##// END OF EJS Templates
bundle: add config option to include phases...
Martin von Zweigbergk -
r33031:e8c8d81e default
parent child Browse files
Show More
@@ -0,0 +1,259 b''
1 $ cat >> $HGRCPATH <<EOF
2 > [experimental]
3 > bundle-phases=yes
4 > [extensions]
5 > strip=
6 > drawdag=$TESTDIR/drawdag.py
7 > EOF
8
9 Set up repo with linear history
10 $ hg init linear
11 $ cd linear
12 $ hg debugdrawdag <<'EOF'
13 > E
14 > |
15 > D
16 > |
17 > C
18 > |
19 > B
20 > |
21 > A
22 > EOF
23 $ hg phase --public A
24 $ hg phase --force --secret D
25 $ hg log -G -T '{desc} {phase}\n'
26 o E secret
27 |
28 o D secret
29 |
30 o C draft
31 |
32 o B draft
33 |
34 o A public
35
36 Phases are restored when unbundling
37 $ hg bundle --base B -r E bundle
38 3 changesets found
39 $ hg debugbundle bundle
40 Stream params: sortdict([('Compression', 'BZ')])
41 changegroup -- "sortdict([('version', '02'), ('nbchanges', '3')])"
42 26805aba1e600a82e93661149f2313866a221a7b
43 f585351a92f85104bff7c284233c338b10eb1df7
44 9bc730a19041f9ec7cb33c626e811aa233efb18c
45 phase-heads -- 'sortdict()'
46 26805aba1e600a82e93661149f2313866a221a7b draft
47 $ hg strip --no-backup C
48 $ hg unbundle -q bundle
49 $ rm bundle
50 $ hg log -G -T '{desc} {phase}\n'
51 o E secret
52 |
53 o D secret
54 |
55 o C draft
56 |
57 o B draft
58 |
59 o A public
60
61 Root revision's phase is preserved
62 $ hg bundle -a bundle
63 5 changesets found
64 $ hg strip --no-backup A
65 $ hg unbundle -q bundle
66 $ rm bundle
67 $ hg log -G -T '{desc} {phase}\n'
68 o E secret
69 |
70 o D secret
71 |
72 o C draft
73 |
74 o B draft
75 |
76 o A public
77
78 Completely public history can be restored
79 $ hg phase --public E
80 $ hg bundle -a bundle
81 5 changesets found
82 $ hg strip --no-backup A
83 $ hg unbundle -q bundle
84 $ rm bundle
85 $ hg log -G -T '{desc} {phase}\n'
86 o E public
87 |
88 o D public
89 |
90 o C public
91 |
92 o B public
93 |
94 o A public
95
96 Direct transition from public to secret can be restored
97 $ hg phase --secret --force D
98 $ hg bundle -a bundle
99 5 changesets found
100 $ hg strip --no-backup A
101 $ hg unbundle -q bundle
102 $ rm bundle
103 $ hg log -G -T '{desc} {phase}\n'
104 o E secret
105 |
106 o D secret
107 |
108 o C public
109 |
110 o B public
111 |
112 o A public
113
114 Revisions within bundle preserve their phase even if parent changes its phase
115 $ hg phase --draft --force B
116 $ hg bundle --base B -r E bundle
117 3 changesets found
118 $ hg strip --no-backup C
119 $ hg phase --public B
120 $ hg unbundle -q bundle
121 $ rm bundle
122 $ hg log -G -T '{desc} {phase}\n'
123 o E secret
124 |
125 o D secret
126 |
127 o C draft
128 |
129 o B public
130 |
131 o A public
132
133 Phase of ancestors of stripped node get advanced to accommodate child
134 $ hg bundle --base B -r E bundle
135 3 changesets found
136 $ hg strip --no-backup C
137 $ hg phase --force --secret B
138 $ hg unbundle -q bundle
139 $ rm bundle
140 $ hg log -G -T '{desc} {phase}\n'
141 o E secret
142 |
143 o D secret
144 |
145 o C draft
146 |
147 o B draft
148 |
149 o A public
150
151 Unbundling advances phases of changesets even if they were already in the repo.
152 To test that, create a bundle of everything in draft phase and then unbundle
153 to see that secret becomes draft, but public remains public.
154 $ hg phase --draft --force A
155 $ hg phase --draft E
156 $ hg bundle -a bundle
157 5 changesets found
158 $ hg phase --public A
159 $ hg phase --secret --force E
160 $ hg unbundle -q bundle
161 $ rm bundle
162 $ hg log -G -T '{desc} {phase}\n'
163 o E draft
164 |
165 o D draft
166 |
167 o C draft
168 |
169 o B draft
170 |
171 o A public
172
173 $ cd ..
174
175 Set up repo with non-linear history
176 $ hg init non-linear
177 $ cd non-linear
178 $ hg debugdrawdag <<'EOF'
179 > D E
180 > |\|
181 > B C
182 > |/
183 > A
184 > EOF
185 $ hg phase --public C
186 $ hg phase --force --secret B
187 $ hg log -G -T '{node|short} {desc} {phase}\n'
188 o 03ca77807e91 E draft
189 |
190 | o 215e7b0814e1 D secret
191 |/|
192 o | dc0947a82db8 C public
193 | |
194 | o 112478962961 B secret
195 |/
196 o 426bada5c675 A public
197
198
199 Restore bundle of entire repo
200 $ hg bundle -a bundle
201 5 changesets found
202 $ hg debugbundle bundle
203 Stream params: sortdict([('Compression', 'BZ')])
204 changegroup -- "sortdict([('version', '02'), ('nbchanges', '5')])"
205 426bada5c67598ca65036d57d9e4b64b0c1ce7a0
206 112478962961147124edd43549aedd1a335e44bf
207 dc0947a82db884575bb76ea10ac97b08536bfa03
208 215e7b0814e1cac8e2614e7284f2a5dc266b4323
209 03ca77807e919db8807c3749086dc36fb478cac0
210 phase-heads -- 'sortdict()'
211 dc0947a82db884575bb76ea10ac97b08536bfa03 public
212 03ca77807e919db8807c3749086dc36fb478cac0 draft
213 $ hg strip --no-backup A
214 $ hg unbundle -q bundle
215 $ rm bundle
216 $ hg log -G -T '{node|short} {desc} {phase}\n'
217 o 03ca77807e91 E draft
218 |
219 | o 215e7b0814e1 D secret
220 |/|
221 o | dc0947a82db8 C public
222 | |
223 | o 112478962961 B secret
224 |/
225 o 426bada5c675 A public
226
227
228 $ hg bundle --base 'A + C' -r D bundle
229 2 changesets found
230 $ hg debugbundle bundle
231 Stream params: sortdict([('Compression', 'BZ')])
232 changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
233 112478962961147124edd43549aedd1a335e44bf
234 215e7b0814e1cac8e2614e7284f2a5dc266b4323
235 phase-heads -- 'sortdict()'
236 $ rm bundle
237
238 $ hg bundle --base A -r D bundle
239 3 changesets found
240 $ hg debugbundle bundle
241 Stream params: sortdict([('Compression', 'BZ')])
242 changegroup -- "sortdict([('version', '02'), ('nbchanges', '3')])"
243 112478962961147124edd43549aedd1a335e44bf
244 dc0947a82db884575bb76ea10ac97b08536bfa03
245 215e7b0814e1cac8e2614e7284f2a5dc266b4323
246 phase-heads -- 'sortdict()'
247 dc0947a82db884575bb76ea10ac97b08536bfa03 public
248 $ rm bundle
249
250 $ hg bundle --base 'B + C' -r 'D + E' bundle
251 2 changesets found
252 $ hg debugbundle bundle
253 Stream params: sortdict([('Compression', 'BZ')])
254 changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
255 215e7b0814e1cac8e2614e7284f2a5dc266b4323
256 03ca77807e919db8807c3749086dc36fb478cac0
257 phase-heads -- 'sortdict()'
258 03ca77807e919db8807c3749086dc36fb478cac0 draft
259 $ rm bundle
@@ -1,1784 +1,1818 b''
1 # bundle2.py - generic container format to transmit arbitrary data.
1 # bundle2.py - generic container format to transmit arbitrary data.
2 #
2 #
3 # Copyright 2013 Facebook, Inc.
3 # Copyright 2013 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """Handling of the new bundle2 format
7 """Handling of the new bundle2 format
8
8
9 The goal of bundle2 is to act as an atomically packet to transmit a set of
9 The goal of bundle2 is to act as an atomically packet to transmit a set of
10 payloads in an application agnostic way. It consist in a sequence of "parts"
10 payloads in an application agnostic way. It consist in a sequence of "parts"
11 that will be handed to and processed by the application layer.
11 that will be handed to and processed by the application layer.
12
12
13
13
14 General format architecture
14 General format architecture
15 ===========================
15 ===========================
16
16
17 The format is architectured as follow
17 The format is architectured as follow
18
18
19 - magic string
19 - magic string
20 - stream level parameters
20 - stream level parameters
21 - payload parts (any number)
21 - payload parts (any number)
22 - end of stream marker.
22 - end of stream marker.
23
23
24 the Binary format
24 the Binary format
25 ============================
25 ============================
26
26
27 All numbers are unsigned and big-endian.
27 All numbers are unsigned and big-endian.
28
28
29 stream level parameters
29 stream level parameters
30 ------------------------
30 ------------------------
31
31
32 Binary format is as follow
32 Binary format is as follow
33
33
34 :params size: int32
34 :params size: int32
35
35
36 The total number of Bytes used by the parameters
36 The total number of Bytes used by the parameters
37
37
38 :params value: arbitrary number of Bytes
38 :params value: arbitrary number of Bytes
39
39
40 A blob of `params size` containing the serialized version of all stream level
40 A blob of `params size` containing the serialized version of all stream level
41 parameters.
41 parameters.
42
42
43 The blob contains a space separated list of parameters. Parameters with value
43 The blob contains a space separated list of parameters. Parameters with value
44 are stored in the form `<name>=<value>`. Both name and value are urlquoted.
44 are stored in the form `<name>=<value>`. Both name and value are urlquoted.
45
45
46 Empty name are obviously forbidden.
46 Empty name are obviously forbidden.
47
47
48 Name MUST start with a letter. If this first letter is lower case, the
48 Name MUST start with a letter. If this first letter is lower case, the
49 parameter is advisory and can be safely ignored. However when the first
49 parameter is advisory and can be safely ignored. However when the first
50 letter is capital, the parameter is mandatory and the bundling process MUST
50 letter is capital, the parameter is mandatory and the bundling process MUST
51 stop if he is not able to proceed it.
51 stop if he is not able to proceed it.
52
52
53 Stream parameters use a simple textual format for two main reasons:
53 Stream parameters use a simple textual format for two main reasons:
54
54
55 - Stream level parameters should remain simple and we want to discourage any
55 - Stream level parameters should remain simple and we want to discourage any
56 crazy usage.
56 crazy usage.
57 - Textual data allow easy human inspection of a bundle2 header in case of
57 - Textual data allow easy human inspection of a bundle2 header in case of
58 troubles.
58 troubles.
59
59
60 Any Applicative level options MUST go into a bundle2 part instead.
60 Any Applicative level options MUST go into a bundle2 part instead.
61
61
62 Payload part
62 Payload part
63 ------------------------
63 ------------------------
64
64
65 Binary format is as follow
65 Binary format is as follow
66
66
67 :header size: int32
67 :header size: int32
68
68
69 The total number of Bytes used by the part header. When the header is empty
69 The total number of Bytes used by the part header. When the header is empty
70 (size = 0) this is interpreted as the end of stream marker.
70 (size = 0) this is interpreted as the end of stream marker.
71
71
72 :header:
72 :header:
73
73
74 The header defines how to interpret the part. It contains two piece of
74 The header defines how to interpret the part. It contains two piece of
75 data: the part type, and the part parameters.
75 data: the part type, and the part parameters.
76
76
77 The part type is used to route an application level handler, that can
77 The part type is used to route an application level handler, that can
78 interpret payload.
78 interpret payload.
79
79
80 Part parameters are passed to the application level handler. They are
80 Part parameters are passed to the application level handler. They are
81 meant to convey information that will help the application level object to
81 meant to convey information that will help the application level object to
82 interpret the part payload.
82 interpret the part payload.
83
83
84 The binary format of the header is has follow
84 The binary format of the header is has follow
85
85
86 :typesize: (one byte)
86 :typesize: (one byte)
87
87
88 :parttype: alphanumerical part name (restricted to [a-zA-Z0-9_:-]*)
88 :parttype: alphanumerical part name (restricted to [a-zA-Z0-9_:-]*)
89
89
90 :partid: A 32bits integer (unique in the bundle) that can be used to refer
90 :partid: A 32bits integer (unique in the bundle) that can be used to refer
91 to this part.
91 to this part.
92
92
93 :parameters:
93 :parameters:
94
94
95 Part's parameter may have arbitrary content, the binary structure is::
95 Part's parameter may have arbitrary content, the binary structure is::
96
96
97 <mandatory-count><advisory-count><param-sizes><param-data>
97 <mandatory-count><advisory-count><param-sizes><param-data>
98
98
99 :mandatory-count: 1 byte, number of mandatory parameters
99 :mandatory-count: 1 byte, number of mandatory parameters
100
100
101 :advisory-count: 1 byte, number of advisory parameters
101 :advisory-count: 1 byte, number of advisory parameters
102
102
103 :param-sizes:
103 :param-sizes:
104
104
105 N couple of bytes, where N is the total number of parameters. Each
105 N couple of bytes, where N is the total number of parameters. Each
106 couple contains (<size-of-key>, <size-of-value) for one parameter.
106 couple contains (<size-of-key>, <size-of-value) for one parameter.
107
107
108 :param-data:
108 :param-data:
109
109
110 A blob of bytes from which each parameter key and value can be
110 A blob of bytes from which each parameter key and value can be
111 retrieved using the list of size couples stored in the previous
111 retrieved using the list of size couples stored in the previous
112 field.
112 field.
113
113
114 Mandatory parameters comes first, then the advisory ones.
114 Mandatory parameters comes first, then the advisory ones.
115
115
116 Each parameter's key MUST be unique within the part.
116 Each parameter's key MUST be unique within the part.
117
117
118 :payload:
118 :payload:
119
119
120 payload is a series of `<chunksize><chunkdata>`.
120 payload is a series of `<chunksize><chunkdata>`.
121
121
122 `chunksize` is an int32, `chunkdata` are plain bytes (as much as
122 `chunksize` is an int32, `chunkdata` are plain bytes (as much as
123 `chunksize` says)` The payload part is concluded by a zero size chunk.
123 `chunksize` says)` The payload part is concluded by a zero size chunk.
124
124
125 The current implementation always produces either zero or one chunk.
125 The current implementation always produces either zero or one chunk.
126 This is an implementation limitation that will ultimately be lifted.
126 This is an implementation limitation that will ultimately be lifted.
127
127
128 `chunksize` can be negative to trigger special case processing. No such
128 `chunksize` can be negative to trigger special case processing. No such
129 processing is in place yet.
129 processing is in place yet.
130
130
131 Bundle processing
131 Bundle processing
132 ============================
132 ============================
133
133
134 Each part is processed in order using a "part handler". Handler are registered
134 Each part is processed in order using a "part handler". Handler are registered
135 for a certain part type.
135 for a certain part type.
136
136
137 The matching of a part to its handler is case insensitive. The case of the
137 The matching of a part to its handler is case insensitive. The case of the
138 part type is used to know if a part is mandatory or advisory. If the Part type
138 part type is used to know if a part is mandatory or advisory. If the Part type
139 contains any uppercase char it is considered mandatory. When no handler is
139 contains any uppercase char it is considered mandatory. When no handler is
140 known for a Mandatory part, the process is aborted and an exception is raised.
140 known for a Mandatory part, the process is aborted and an exception is raised.
141 If the part is advisory and no handler is known, the part is ignored. When the
141 If the part is advisory and no handler is known, the part is ignored. When the
142 process is aborted, the full bundle is still read from the stream to keep the
142 process is aborted, the full bundle is still read from the stream to keep the
143 channel usable. But none of the part read from an abort are processed. In the
143 channel usable. But none of the part read from an abort are processed. In the
144 future, dropping the stream may become an option for channel we do not care to
144 future, dropping the stream may become an option for channel we do not care to
145 preserve.
145 preserve.
146 """
146 """
147
147
148 from __future__ import absolute_import
148 from __future__ import absolute_import
149
149
150 import errno
150 import errno
151 import re
151 import re
152 import string
152 import string
153 import struct
153 import struct
154 import sys
154 import sys
155
155
156 from .i18n import _
156 from .i18n import _
157 from . import (
157 from . import (
158 changegroup,
158 changegroup,
159 error,
159 error,
160 obsolete,
160 obsolete,
161 phases,
161 pushkey,
162 pushkey,
162 pycompat,
163 pycompat,
163 tags,
164 tags,
164 url,
165 url,
165 util,
166 util,
166 )
167 )
167
168
168 urlerr = util.urlerr
169 urlerr = util.urlerr
169 urlreq = util.urlreq
170 urlreq = util.urlreq
170
171
171 _pack = struct.pack
172 _pack = struct.pack
172 _unpack = struct.unpack
173 _unpack = struct.unpack
173
174
174 _fstreamparamsize = '>i'
175 _fstreamparamsize = '>i'
175 _fpartheadersize = '>i'
176 _fpartheadersize = '>i'
176 _fparttypesize = '>B'
177 _fparttypesize = '>B'
177 _fpartid = '>I'
178 _fpartid = '>I'
178 _fpayloadsize = '>i'
179 _fpayloadsize = '>i'
179 _fpartparamcount = '>BB'
180 _fpartparamcount = '>BB'
180
181
182 _fphasesentry = '>i20s'
183
181 preferedchunksize = 4096
184 preferedchunksize = 4096
182
185
183 _parttypeforbidden = re.compile('[^a-zA-Z0-9_:-]')
186 _parttypeforbidden = re.compile('[^a-zA-Z0-9_:-]')
184
187
185 def outdebug(ui, message):
188 def outdebug(ui, message):
186 """debug regarding output stream (bundling)"""
189 """debug regarding output stream (bundling)"""
187 if ui.configbool('devel', 'bundle2.debug', False):
190 if ui.configbool('devel', 'bundle2.debug', False):
188 ui.debug('bundle2-output: %s\n' % message)
191 ui.debug('bundle2-output: %s\n' % message)
189
192
190 def indebug(ui, message):
193 def indebug(ui, message):
191 """debug on input stream (unbundling)"""
194 """debug on input stream (unbundling)"""
192 if ui.configbool('devel', 'bundle2.debug', False):
195 if ui.configbool('devel', 'bundle2.debug', False):
193 ui.debug('bundle2-input: %s\n' % message)
196 ui.debug('bundle2-input: %s\n' % message)
194
197
195 def validateparttype(parttype):
198 def validateparttype(parttype):
196 """raise ValueError if a parttype contains invalid character"""
199 """raise ValueError if a parttype contains invalid character"""
197 if _parttypeforbidden.search(parttype):
200 if _parttypeforbidden.search(parttype):
198 raise ValueError(parttype)
201 raise ValueError(parttype)
199
202
200 def _makefpartparamsizes(nbparams):
203 def _makefpartparamsizes(nbparams):
201 """return a struct format to read part parameter sizes
204 """return a struct format to read part parameter sizes
202
205
203 The number parameters is variable so we need to build that format
206 The number parameters is variable so we need to build that format
204 dynamically.
207 dynamically.
205 """
208 """
206 return '>'+('BB'*nbparams)
209 return '>'+('BB'*nbparams)
207
210
208 parthandlermapping = {}
211 parthandlermapping = {}
209
212
210 def parthandler(parttype, params=()):
213 def parthandler(parttype, params=()):
211 """decorator that register a function as a bundle2 part handler
214 """decorator that register a function as a bundle2 part handler
212
215
213 eg::
216 eg::
214
217
215 @parthandler('myparttype', ('mandatory', 'param', 'handled'))
218 @parthandler('myparttype', ('mandatory', 'param', 'handled'))
216 def myparttypehandler(...):
219 def myparttypehandler(...):
217 '''process a part of type "my part".'''
220 '''process a part of type "my part".'''
218 ...
221 ...
219 """
222 """
220 validateparttype(parttype)
223 validateparttype(parttype)
221 def _decorator(func):
224 def _decorator(func):
222 lparttype = parttype.lower() # enforce lower case matching.
225 lparttype = parttype.lower() # enforce lower case matching.
223 assert lparttype not in parthandlermapping
226 assert lparttype not in parthandlermapping
224 parthandlermapping[lparttype] = func
227 parthandlermapping[lparttype] = func
225 func.params = frozenset(params)
228 func.params = frozenset(params)
226 return func
229 return func
227 return _decorator
230 return _decorator
228
231
229 class unbundlerecords(object):
232 class unbundlerecords(object):
230 """keep record of what happens during and unbundle
233 """keep record of what happens during and unbundle
231
234
232 New records are added using `records.add('cat', obj)`. Where 'cat' is a
235 New records are added using `records.add('cat', obj)`. Where 'cat' is a
233 category of record and obj is an arbitrary object.
236 category of record and obj is an arbitrary object.
234
237
235 `records['cat']` will return all entries of this category 'cat'.
238 `records['cat']` will return all entries of this category 'cat'.
236
239
237 Iterating on the object itself will yield `('category', obj)` tuples
240 Iterating on the object itself will yield `('category', obj)` tuples
238 for all entries.
241 for all entries.
239
242
240 All iterations happens in chronological order.
243 All iterations happens in chronological order.
241 """
244 """
242
245
243 def __init__(self):
246 def __init__(self):
244 self._categories = {}
247 self._categories = {}
245 self._sequences = []
248 self._sequences = []
246 self._replies = {}
249 self._replies = {}
247
250
248 def add(self, category, entry, inreplyto=None):
251 def add(self, category, entry, inreplyto=None):
249 """add a new record of a given category.
252 """add a new record of a given category.
250
253
251 The entry can then be retrieved in the list returned by
254 The entry can then be retrieved in the list returned by
252 self['category']."""
255 self['category']."""
253 self._categories.setdefault(category, []).append(entry)
256 self._categories.setdefault(category, []).append(entry)
254 self._sequences.append((category, entry))
257 self._sequences.append((category, entry))
255 if inreplyto is not None:
258 if inreplyto is not None:
256 self.getreplies(inreplyto).add(category, entry)
259 self.getreplies(inreplyto).add(category, entry)
257
260
258 def getreplies(self, partid):
261 def getreplies(self, partid):
259 """get the records that are replies to a specific part"""
262 """get the records that are replies to a specific part"""
260 return self._replies.setdefault(partid, unbundlerecords())
263 return self._replies.setdefault(partid, unbundlerecords())
261
264
262 def __getitem__(self, cat):
265 def __getitem__(self, cat):
263 return tuple(self._categories.get(cat, ()))
266 return tuple(self._categories.get(cat, ()))
264
267
265 def __iter__(self):
268 def __iter__(self):
266 return iter(self._sequences)
269 return iter(self._sequences)
267
270
268 def __len__(self):
271 def __len__(self):
269 return len(self._sequences)
272 return len(self._sequences)
270
273
271 def __nonzero__(self):
274 def __nonzero__(self):
272 return bool(self._sequences)
275 return bool(self._sequences)
273
276
274 __bool__ = __nonzero__
277 __bool__ = __nonzero__
275
278
276 class bundleoperation(object):
279 class bundleoperation(object):
277 """an object that represents a single bundling process
280 """an object that represents a single bundling process
278
281
279 Its purpose is to carry unbundle-related objects and states.
282 Its purpose is to carry unbundle-related objects and states.
280
283
281 A new object should be created at the beginning of each bundle processing.
284 A new object should be created at the beginning of each bundle processing.
282 The object is to be returned by the processing function.
285 The object is to be returned by the processing function.
283
286
284 The object has very little content now it will ultimately contain:
287 The object has very little content now it will ultimately contain:
285 * an access to the repo the bundle is applied to,
288 * an access to the repo the bundle is applied to,
286 * a ui object,
289 * a ui object,
287 * a way to retrieve a transaction to add changes to the repo,
290 * a way to retrieve a transaction to add changes to the repo,
288 * a way to record the result of processing each part,
291 * a way to record the result of processing each part,
289 * a way to construct a bundle response when applicable.
292 * a way to construct a bundle response when applicable.
290 """
293 """
291
294
292 def __init__(self, repo, transactiongetter, captureoutput=True):
295 def __init__(self, repo, transactiongetter, captureoutput=True):
293 self.repo = repo
296 self.repo = repo
294 self.ui = repo.ui
297 self.ui = repo.ui
295 self.records = unbundlerecords()
298 self.records = unbundlerecords()
296 self.gettransaction = transactiongetter
299 self.gettransaction = transactiongetter
297 self.reply = None
300 self.reply = None
298 self.captureoutput = captureoutput
301 self.captureoutput = captureoutput
299
302
300 class TransactionUnavailable(RuntimeError):
303 class TransactionUnavailable(RuntimeError):
301 pass
304 pass
302
305
303 def _notransaction():
306 def _notransaction():
304 """default method to get a transaction while processing a bundle
307 """default method to get a transaction while processing a bundle
305
308
306 Raise an exception to highlight the fact that no transaction was expected
309 Raise an exception to highlight the fact that no transaction was expected
307 to be created"""
310 to be created"""
308 raise TransactionUnavailable()
311 raise TransactionUnavailable()
309
312
310 def applybundle(repo, unbundler, tr, source=None, url=None, op=None):
313 def applybundle(repo, unbundler, tr, source=None, url=None, op=None):
311 # transform me into unbundler.apply() as soon as the freeze is lifted
314 # transform me into unbundler.apply() as soon as the freeze is lifted
312 tr.hookargs['bundle2'] = '1'
315 tr.hookargs['bundle2'] = '1'
313 if source is not None and 'source' not in tr.hookargs:
316 if source is not None and 'source' not in tr.hookargs:
314 tr.hookargs['source'] = source
317 tr.hookargs['source'] = source
315 if url is not None and 'url' not in tr.hookargs:
318 if url is not None and 'url' not in tr.hookargs:
316 tr.hookargs['url'] = url
319 tr.hookargs['url'] = url
317 return processbundle(repo, unbundler, lambda: tr, op=op)
320 return processbundle(repo, unbundler, lambda: tr, op=op)
318
321
319 def processbundle(repo, unbundler, transactiongetter=None, op=None):
322 def processbundle(repo, unbundler, transactiongetter=None, op=None):
320 """This function process a bundle, apply effect to/from a repo
323 """This function process a bundle, apply effect to/from a repo
321
324
322 It iterates over each part then searches for and uses the proper handling
325 It iterates over each part then searches for and uses the proper handling
323 code to process the part. Parts are processed in order.
326 code to process the part. Parts are processed in order.
324
327
325 Unknown Mandatory part will abort the process.
328 Unknown Mandatory part will abort the process.
326
329
327 It is temporarily possible to provide a prebuilt bundleoperation to the
330 It is temporarily possible to provide a prebuilt bundleoperation to the
328 function. This is used to ensure output is properly propagated in case of
331 function. This is used to ensure output is properly propagated in case of
329 an error during the unbundling. This output capturing part will likely be
332 an error during the unbundling. This output capturing part will likely be
330 reworked and this ability will probably go away in the process.
333 reworked and this ability will probably go away in the process.
331 """
334 """
332 if op is None:
335 if op is None:
333 if transactiongetter is None:
336 if transactiongetter is None:
334 transactiongetter = _notransaction
337 transactiongetter = _notransaction
335 op = bundleoperation(repo, transactiongetter)
338 op = bundleoperation(repo, transactiongetter)
336 # todo:
339 # todo:
337 # - replace this is a init function soon.
340 # - replace this is a init function soon.
338 # - exception catching
341 # - exception catching
339 unbundler.params
342 unbundler.params
340 if repo.ui.debugflag:
343 if repo.ui.debugflag:
341 msg = ['bundle2-input-bundle:']
344 msg = ['bundle2-input-bundle:']
342 if unbundler.params:
345 if unbundler.params:
343 msg.append(' %i params')
346 msg.append(' %i params')
344 if op.gettransaction is None or op.gettransaction is _notransaction:
347 if op.gettransaction is None or op.gettransaction is _notransaction:
345 msg.append(' no-transaction')
348 msg.append(' no-transaction')
346 else:
349 else:
347 msg.append(' with-transaction')
350 msg.append(' with-transaction')
348 msg.append('\n')
351 msg.append('\n')
349 repo.ui.debug(''.join(msg))
352 repo.ui.debug(''.join(msg))
350 iterparts = enumerate(unbundler.iterparts())
353 iterparts = enumerate(unbundler.iterparts())
351 part = None
354 part = None
352 nbpart = 0
355 nbpart = 0
353 try:
356 try:
354 for nbpart, part in iterparts:
357 for nbpart, part in iterparts:
355 _processpart(op, part)
358 _processpart(op, part)
356 except Exception as exc:
359 except Exception as exc:
357 # Any exceptions seeking to the end of the bundle at this point are
360 # Any exceptions seeking to the end of the bundle at this point are
358 # almost certainly related to the underlying stream being bad.
361 # almost certainly related to the underlying stream being bad.
359 # And, chances are that the exception we're handling is related to
362 # And, chances are that the exception we're handling is related to
360 # getting in that bad state. So, we swallow the seeking error and
363 # getting in that bad state. So, we swallow the seeking error and
361 # re-raise the original error.
364 # re-raise the original error.
362 seekerror = False
365 seekerror = False
363 try:
366 try:
364 for nbpart, part in iterparts:
367 for nbpart, part in iterparts:
365 # consume the bundle content
368 # consume the bundle content
366 part.seek(0, 2)
369 part.seek(0, 2)
367 except Exception:
370 except Exception:
368 seekerror = True
371 seekerror = True
369
372
370 # Small hack to let caller code distinguish exceptions from bundle2
373 # Small hack to let caller code distinguish exceptions from bundle2
371 # processing from processing the old format. This is mostly
374 # processing from processing the old format. This is mostly
372 # needed to handle different return codes to unbundle according to the
375 # needed to handle different return codes to unbundle according to the
373 # type of bundle. We should probably clean up or drop this return code
376 # type of bundle. We should probably clean up or drop this return code
374 # craziness in a future version.
377 # craziness in a future version.
375 exc.duringunbundle2 = True
378 exc.duringunbundle2 = True
376 salvaged = []
379 salvaged = []
377 replycaps = None
380 replycaps = None
378 if op.reply is not None:
381 if op.reply is not None:
379 salvaged = op.reply.salvageoutput()
382 salvaged = op.reply.salvageoutput()
380 replycaps = op.reply.capabilities
383 replycaps = op.reply.capabilities
381 exc._replycaps = replycaps
384 exc._replycaps = replycaps
382 exc._bundle2salvagedoutput = salvaged
385 exc._bundle2salvagedoutput = salvaged
383
386
384 # Re-raising from a variable loses the original stack. So only use
387 # Re-raising from a variable loses the original stack. So only use
385 # that form if we need to.
388 # that form if we need to.
386 if seekerror:
389 if seekerror:
387 raise exc
390 raise exc
388 else:
391 else:
389 raise
392 raise
390 finally:
393 finally:
391 repo.ui.debug('bundle2-input-bundle: %i parts total\n' % nbpart)
394 repo.ui.debug('bundle2-input-bundle: %i parts total\n' % nbpart)
392
395
393 return op
396 return op
394
397
395 def _processpart(op, part):
398 def _processpart(op, part):
396 """process a single part from a bundle
399 """process a single part from a bundle
397
400
398 The part is guaranteed to have been fully consumed when the function exits
401 The part is guaranteed to have been fully consumed when the function exits
399 (even if an exception is raised)."""
402 (even if an exception is raised)."""
400 status = 'unknown' # used by debug output
403 status = 'unknown' # used by debug output
401 hardabort = False
404 hardabort = False
402 try:
405 try:
403 try:
406 try:
404 handler = parthandlermapping.get(part.type)
407 handler = parthandlermapping.get(part.type)
405 if handler is None:
408 if handler is None:
406 status = 'unsupported-type'
409 status = 'unsupported-type'
407 raise error.BundleUnknownFeatureError(parttype=part.type)
410 raise error.BundleUnknownFeatureError(parttype=part.type)
408 indebug(op.ui, 'found a handler for part %r' % part.type)
411 indebug(op.ui, 'found a handler for part %r' % part.type)
409 unknownparams = part.mandatorykeys - handler.params
412 unknownparams = part.mandatorykeys - handler.params
410 if unknownparams:
413 if unknownparams:
411 unknownparams = list(unknownparams)
414 unknownparams = list(unknownparams)
412 unknownparams.sort()
415 unknownparams.sort()
413 status = 'unsupported-params (%s)' % unknownparams
416 status = 'unsupported-params (%s)' % unknownparams
414 raise error.BundleUnknownFeatureError(parttype=part.type,
417 raise error.BundleUnknownFeatureError(parttype=part.type,
415 params=unknownparams)
418 params=unknownparams)
416 status = 'supported'
419 status = 'supported'
417 except error.BundleUnknownFeatureError as exc:
420 except error.BundleUnknownFeatureError as exc:
418 if part.mandatory: # mandatory parts
421 if part.mandatory: # mandatory parts
419 raise
422 raise
420 indebug(op.ui, 'ignoring unsupported advisory part %s' % exc)
423 indebug(op.ui, 'ignoring unsupported advisory part %s' % exc)
421 return # skip to part processing
424 return # skip to part processing
422 finally:
425 finally:
423 if op.ui.debugflag:
426 if op.ui.debugflag:
424 msg = ['bundle2-input-part: "%s"' % part.type]
427 msg = ['bundle2-input-part: "%s"' % part.type]
425 if not part.mandatory:
428 if not part.mandatory:
426 msg.append(' (advisory)')
429 msg.append(' (advisory)')
427 nbmp = len(part.mandatorykeys)
430 nbmp = len(part.mandatorykeys)
428 nbap = len(part.params) - nbmp
431 nbap = len(part.params) - nbmp
429 if nbmp or nbap:
432 if nbmp or nbap:
430 msg.append(' (params:')
433 msg.append(' (params:')
431 if nbmp:
434 if nbmp:
432 msg.append(' %i mandatory' % nbmp)
435 msg.append(' %i mandatory' % nbmp)
433 if nbap:
436 if nbap:
434 msg.append(' %i advisory' % nbmp)
437 msg.append(' %i advisory' % nbmp)
435 msg.append(')')
438 msg.append(')')
436 msg.append(' %s\n' % status)
439 msg.append(' %s\n' % status)
437 op.ui.debug(''.join(msg))
440 op.ui.debug(''.join(msg))
438
441
439 # handler is called outside the above try block so that we don't
442 # handler is called outside the above try block so that we don't
440 # risk catching KeyErrors from anything other than the
443 # risk catching KeyErrors from anything other than the
441 # parthandlermapping lookup (any KeyError raised by handler()
444 # parthandlermapping lookup (any KeyError raised by handler()
442 # itself represents a defect of a different variety).
445 # itself represents a defect of a different variety).
443 output = None
446 output = None
444 if op.captureoutput and op.reply is not None:
447 if op.captureoutput and op.reply is not None:
445 op.ui.pushbuffer(error=True, subproc=True)
448 op.ui.pushbuffer(error=True, subproc=True)
446 output = ''
449 output = ''
447 try:
450 try:
448 handler(op, part)
451 handler(op, part)
449 finally:
452 finally:
450 if output is not None:
453 if output is not None:
451 output = op.ui.popbuffer()
454 output = op.ui.popbuffer()
452 if output:
455 if output:
453 outpart = op.reply.newpart('output', data=output,
456 outpart = op.reply.newpart('output', data=output,
454 mandatory=False)
457 mandatory=False)
455 outpart.addparam('in-reply-to', str(part.id), mandatory=False)
458 outpart.addparam('in-reply-to', str(part.id), mandatory=False)
456 # If exiting or interrupted, do not attempt to seek the stream in the
459 # If exiting or interrupted, do not attempt to seek the stream in the
457 # finally block below. This makes abort faster.
460 # finally block below. This makes abort faster.
458 except (SystemExit, KeyboardInterrupt):
461 except (SystemExit, KeyboardInterrupt):
459 hardabort = True
462 hardabort = True
460 raise
463 raise
461 finally:
464 finally:
462 # consume the part content to not corrupt the stream.
465 # consume the part content to not corrupt the stream.
463 if not hardabort:
466 if not hardabort:
464 part.seek(0, 2)
467 part.seek(0, 2)
465
468
466
469
467 def decodecaps(blob):
470 def decodecaps(blob):
468 """decode a bundle2 caps bytes blob into a dictionary
471 """decode a bundle2 caps bytes blob into a dictionary
469
472
470 The blob is a list of capabilities (one per line)
473 The blob is a list of capabilities (one per line)
471 Capabilities may have values using a line of the form::
474 Capabilities may have values using a line of the form::
472
475
473 capability=value1,value2,value3
476 capability=value1,value2,value3
474
477
475 The values are always a list."""
478 The values are always a list."""
476 caps = {}
479 caps = {}
477 for line in blob.splitlines():
480 for line in blob.splitlines():
478 if not line:
481 if not line:
479 continue
482 continue
480 if '=' not in line:
483 if '=' not in line:
481 key, vals = line, ()
484 key, vals = line, ()
482 else:
485 else:
483 key, vals = line.split('=', 1)
486 key, vals = line.split('=', 1)
484 vals = vals.split(',')
487 vals = vals.split(',')
485 key = urlreq.unquote(key)
488 key = urlreq.unquote(key)
486 vals = [urlreq.unquote(v) for v in vals]
489 vals = [urlreq.unquote(v) for v in vals]
487 caps[key] = vals
490 caps[key] = vals
488 return caps
491 return caps
489
492
490 def encodecaps(caps):
493 def encodecaps(caps):
491 """encode a bundle2 caps dictionary into a bytes blob"""
494 """encode a bundle2 caps dictionary into a bytes blob"""
492 chunks = []
495 chunks = []
493 for ca in sorted(caps):
496 for ca in sorted(caps):
494 vals = caps[ca]
497 vals = caps[ca]
495 ca = urlreq.quote(ca)
498 ca = urlreq.quote(ca)
496 vals = [urlreq.quote(v) for v in vals]
499 vals = [urlreq.quote(v) for v in vals]
497 if vals:
500 if vals:
498 ca = "%s=%s" % (ca, ','.join(vals))
501 ca = "%s=%s" % (ca, ','.join(vals))
499 chunks.append(ca)
502 chunks.append(ca)
500 return '\n'.join(chunks)
503 return '\n'.join(chunks)
501
504
502 bundletypes = {
505 bundletypes = {
503 "": ("", 'UN'), # only when using unbundle on ssh and old http servers
506 "": ("", 'UN'), # only when using unbundle on ssh and old http servers
504 # since the unification ssh accepts a header but there
507 # since the unification ssh accepts a header but there
505 # is no capability signaling it.
508 # is no capability signaling it.
506 "HG20": (), # special-cased below
509 "HG20": (), # special-cased below
507 "HG10UN": ("HG10UN", 'UN'),
510 "HG10UN": ("HG10UN", 'UN'),
508 "HG10BZ": ("HG10", 'BZ'),
511 "HG10BZ": ("HG10", 'BZ'),
509 "HG10GZ": ("HG10GZ", 'GZ'),
512 "HG10GZ": ("HG10GZ", 'GZ'),
510 }
513 }
511
514
512 # hgweb uses this list to communicate its preferred type
515 # hgweb uses this list to communicate its preferred type
513 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
516 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
514
517
515 class bundle20(object):
518 class bundle20(object):
516 """represent an outgoing bundle2 container
519 """represent an outgoing bundle2 container
517
520
518 Use the `addparam` method to add stream level parameter. and `newpart` to
521 Use the `addparam` method to add stream level parameter. and `newpart` to
519 populate it. Then call `getchunks` to retrieve all the binary chunks of
522 populate it. Then call `getchunks` to retrieve all the binary chunks of
520 data that compose the bundle2 container."""
523 data that compose the bundle2 container."""
521
524
522 _magicstring = 'HG20'
525 _magicstring = 'HG20'
523
526
524 def __init__(self, ui, capabilities=()):
527 def __init__(self, ui, capabilities=()):
525 self.ui = ui
528 self.ui = ui
526 self._params = []
529 self._params = []
527 self._parts = []
530 self._parts = []
528 self.capabilities = dict(capabilities)
531 self.capabilities = dict(capabilities)
529 self._compengine = util.compengines.forbundletype('UN')
532 self._compengine = util.compengines.forbundletype('UN')
530 self._compopts = None
533 self._compopts = None
531
534
532 def setcompression(self, alg, compopts=None):
535 def setcompression(self, alg, compopts=None):
533 """setup core part compression to <alg>"""
536 """setup core part compression to <alg>"""
534 if alg in (None, 'UN'):
537 if alg in (None, 'UN'):
535 return
538 return
536 assert not any(n.lower() == 'compression' for n, v in self._params)
539 assert not any(n.lower() == 'compression' for n, v in self._params)
537 self.addparam('Compression', alg)
540 self.addparam('Compression', alg)
538 self._compengine = util.compengines.forbundletype(alg)
541 self._compengine = util.compengines.forbundletype(alg)
539 self._compopts = compopts
542 self._compopts = compopts
540
543
541 @property
544 @property
542 def nbparts(self):
545 def nbparts(self):
543 """total number of parts added to the bundler"""
546 """total number of parts added to the bundler"""
544 return len(self._parts)
547 return len(self._parts)
545
548
546 # methods used to defines the bundle2 content
549 # methods used to defines the bundle2 content
547 def addparam(self, name, value=None):
550 def addparam(self, name, value=None):
548 """add a stream level parameter"""
551 """add a stream level parameter"""
549 if not name:
552 if not name:
550 raise ValueError('empty parameter name')
553 raise ValueError('empty parameter name')
551 if name[0] not in string.letters:
554 if name[0] not in string.letters:
552 raise ValueError('non letter first character: %r' % name)
555 raise ValueError('non letter first character: %r' % name)
553 self._params.append((name, value))
556 self._params.append((name, value))
554
557
555 def addpart(self, part):
558 def addpart(self, part):
556 """add a new part to the bundle2 container
559 """add a new part to the bundle2 container
557
560
558 Parts contains the actual applicative payload."""
561 Parts contains the actual applicative payload."""
559 assert part.id is None
562 assert part.id is None
560 part.id = len(self._parts) # very cheap counter
563 part.id = len(self._parts) # very cheap counter
561 self._parts.append(part)
564 self._parts.append(part)
562
565
563 def newpart(self, typeid, *args, **kwargs):
566 def newpart(self, typeid, *args, **kwargs):
564 """create a new part and add it to the containers
567 """create a new part and add it to the containers
565
568
566 As the part is directly added to the containers. For now, this means
569 As the part is directly added to the containers. For now, this means
567 that any failure to properly initialize the part after calling
570 that any failure to properly initialize the part after calling
568 ``newpart`` should result in a failure of the whole bundling process.
571 ``newpart`` should result in a failure of the whole bundling process.
569
572
570 You can still fall back to manually create and add if you need better
573 You can still fall back to manually create and add if you need better
571 control."""
574 control."""
572 part = bundlepart(typeid, *args, **kwargs)
575 part = bundlepart(typeid, *args, **kwargs)
573 self.addpart(part)
576 self.addpart(part)
574 return part
577 return part
575
578
576 # methods used to generate the bundle2 stream
579 # methods used to generate the bundle2 stream
577 def getchunks(self):
580 def getchunks(self):
578 if self.ui.debugflag:
581 if self.ui.debugflag:
579 msg = ['bundle2-output-bundle: "%s",' % self._magicstring]
582 msg = ['bundle2-output-bundle: "%s",' % self._magicstring]
580 if self._params:
583 if self._params:
581 msg.append(' (%i params)' % len(self._params))
584 msg.append(' (%i params)' % len(self._params))
582 msg.append(' %i parts total\n' % len(self._parts))
585 msg.append(' %i parts total\n' % len(self._parts))
583 self.ui.debug(''.join(msg))
586 self.ui.debug(''.join(msg))
584 outdebug(self.ui, 'start emission of %s stream' % self._magicstring)
587 outdebug(self.ui, 'start emission of %s stream' % self._magicstring)
585 yield self._magicstring
588 yield self._magicstring
586 param = self._paramchunk()
589 param = self._paramchunk()
587 outdebug(self.ui, 'bundle parameter: %s' % param)
590 outdebug(self.ui, 'bundle parameter: %s' % param)
588 yield _pack(_fstreamparamsize, len(param))
591 yield _pack(_fstreamparamsize, len(param))
589 if param:
592 if param:
590 yield param
593 yield param
591 for chunk in self._compengine.compressstream(self._getcorechunk(),
594 for chunk in self._compengine.compressstream(self._getcorechunk(),
592 self._compopts):
595 self._compopts):
593 yield chunk
596 yield chunk
594
597
595 def _paramchunk(self):
598 def _paramchunk(self):
596 """return a encoded version of all stream parameters"""
599 """return a encoded version of all stream parameters"""
597 blocks = []
600 blocks = []
598 for par, value in self._params:
601 for par, value in self._params:
599 par = urlreq.quote(par)
602 par = urlreq.quote(par)
600 if value is not None:
603 if value is not None:
601 value = urlreq.quote(value)
604 value = urlreq.quote(value)
602 par = '%s=%s' % (par, value)
605 par = '%s=%s' % (par, value)
603 blocks.append(par)
606 blocks.append(par)
604 return ' '.join(blocks)
607 return ' '.join(blocks)
605
608
606 def _getcorechunk(self):
609 def _getcorechunk(self):
607 """yield chunk for the core part of the bundle
610 """yield chunk for the core part of the bundle
608
611
609 (all but headers and parameters)"""
612 (all but headers and parameters)"""
610 outdebug(self.ui, 'start of parts')
613 outdebug(self.ui, 'start of parts')
611 for part in self._parts:
614 for part in self._parts:
612 outdebug(self.ui, 'bundle part: "%s"' % part.type)
615 outdebug(self.ui, 'bundle part: "%s"' % part.type)
613 for chunk in part.getchunks(ui=self.ui):
616 for chunk in part.getchunks(ui=self.ui):
614 yield chunk
617 yield chunk
615 outdebug(self.ui, 'end of bundle')
618 outdebug(self.ui, 'end of bundle')
616 yield _pack(_fpartheadersize, 0)
619 yield _pack(_fpartheadersize, 0)
617
620
618
621
619 def salvageoutput(self):
622 def salvageoutput(self):
620 """return a list with a copy of all output parts in the bundle
623 """return a list with a copy of all output parts in the bundle
621
624
622 This is meant to be used during error handling to make sure we preserve
625 This is meant to be used during error handling to make sure we preserve
623 server output"""
626 server output"""
624 salvaged = []
627 salvaged = []
625 for part in self._parts:
628 for part in self._parts:
626 if part.type.startswith('output'):
629 if part.type.startswith('output'):
627 salvaged.append(part.copy())
630 salvaged.append(part.copy())
628 return salvaged
631 return salvaged
629
632
630
633
631 class unpackermixin(object):
634 class unpackermixin(object):
632 """A mixin to extract bytes and struct data from a stream"""
635 """A mixin to extract bytes and struct data from a stream"""
633
636
634 def __init__(self, fp):
637 def __init__(self, fp):
635 self._fp = fp
638 self._fp = fp
636
639
637 def _unpack(self, format):
640 def _unpack(self, format):
638 """unpack this struct format from the stream
641 """unpack this struct format from the stream
639
642
640 This method is meant for internal usage by the bundle2 protocol only.
643 This method is meant for internal usage by the bundle2 protocol only.
641 They directly manipulate the low level stream including bundle2 level
644 They directly manipulate the low level stream including bundle2 level
642 instruction.
645 instruction.
643
646
644 Do not use it to implement higher-level logic or methods."""
647 Do not use it to implement higher-level logic or methods."""
645 data = self._readexact(struct.calcsize(format))
648 data = self._readexact(struct.calcsize(format))
646 return _unpack(format, data)
649 return _unpack(format, data)
647
650
648 def _readexact(self, size):
651 def _readexact(self, size):
649 """read exactly <size> bytes from the stream
652 """read exactly <size> bytes from the stream
650
653
651 This method is meant for internal usage by the bundle2 protocol only.
654 This method is meant for internal usage by the bundle2 protocol only.
652 They directly manipulate the low level stream including bundle2 level
655 They directly manipulate the low level stream including bundle2 level
653 instruction.
656 instruction.
654
657
655 Do not use it to implement higher-level logic or methods."""
658 Do not use it to implement higher-level logic or methods."""
656 return changegroup.readexactly(self._fp, size)
659 return changegroup.readexactly(self._fp, size)
657
660
658 def getunbundler(ui, fp, magicstring=None):
661 def getunbundler(ui, fp, magicstring=None):
659 """return a valid unbundler object for a given magicstring"""
662 """return a valid unbundler object for a given magicstring"""
660 if magicstring is None:
663 if magicstring is None:
661 magicstring = changegroup.readexactly(fp, 4)
664 magicstring = changegroup.readexactly(fp, 4)
662 magic, version = magicstring[0:2], magicstring[2:4]
665 magic, version = magicstring[0:2], magicstring[2:4]
663 if magic != 'HG':
666 if magic != 'HG':
664 raise error.Abort(_('not a Mercurial bundle'))
667 raise error.Abort(_('not a Mercurial bundle'))
665 unbundlerclass = formatmap.get(version)
668 unbundlerclass = formatmap.get(version)
666 if unbundlerclass is None:
669 if unbundlerclass is None:
667 raise error.Abort(_('unknown bundle version %s') % version)
670 raise error.Abort(_('unknown bundle version %s') % version)
668 unbundler = unbundlerclass(ui, fp)
671 unbundler = unbundlerclass(ui, fp)
669 indebug(ui, 'start processing of %s stream' % magicstring)
672 indebug(ui, 'start processing of %s stream' % magicstring)
670 return unbundler
673 return unbundler
671
674
672 class unbundle20(unpackermixin):
675 class unbundle20(unpackermixin):
673 """interpret a bundle2 stream
676 """interpret a bundle2 stream
674
677
675 This class is fed with a binary stream and yields parts through its
678 This class is fed with a binary stream and yields parts through its
676 `iterparts` methods."""
679 `iterparts` methods."""
677
680
678 _magicstring = 'HG20'
681 _magicstring = 'HG20'
679
682
680 def __init__(self, ui, fp):
683 def __init__(self, ui, fp):
681 """If header is specified, we do not read it out of the stream."""
684 """If header is specified, we do not read it out of the stream."""
682 self.ui = ui
685 self.ui = ui
683 self._compengine = util.compengines.forbundletype('UN')
686 self._compengine = util.compengines.forbundletype('UN')
684 self._compressed = None
687 self._compressed = None
685 super(unbundle20, self).__init__(fp)
688 super(unbundle20, self).__init__(fp)
686
689
687 @util.propertycache
690 @util.propertycache
688 def params(self):
691 def params(self):
689 """dictionary of stream level parameters"""
692 """dictionary of stream level parameters"""
690 indebug(self.ui, 'reading bundle2 stream parameters')
693 indebug(self.ui, 'reading bundle2 stream parameters')
691 params = {}
694 params = {}
692 paramssize = self._unpack(_fstreamparamsize)[0]
695 paramssize = self._unpack(_fstreamparamsize)[0]
693 if paramssize < 0:
696 if paramssize < 0:
694 raise error.BundleValueError('negative bundle param size: %i'
697 raise error.BundleValueError('negative bundle param size: %i'
695 % paramssize)
698 % paramssize)
696 if paramssize:
699 if paramssize:
697 params = self._readexact(paramssize)
700 params = self._readexact(paramssize)
698 params = self._processallparams(params)
701 params = self._processallparams(params)
699 return params
702 return params
700
703
701 def _processallparams(self, paramsblock):
704 def _processallparams(self, paramsblock):
702 """"""
705 """"""
703 params = util.sortdict()
706 params = util.sortdict()
704 for p in paramsblock.split(' '):
707 for p in paramsblock.split(' '):
705 p = p.split('=', 1)
708 p = p.split('=', 1)
706 p = [urlreq.unquote(i) for i in p]
709 p = [urlreq.unquote(i) for i in p]
707 if len(p) < 2:
710 if len(p) < 2:
708 p.append(None)
711 p.append(None)
709 self._processparam(*p)
712 self._processparam(*p)
710 params[p[0]] = p[1]
713 params[p[0]] = p[1]
711 return params
714 return params
712
715
713
716
714 def _processparam(self, name, value):
717 def _processparam(self, name, value):
715 """process a parameter, applying its effect if needed
718 """process a parameter, applying its effect if needed
716
719
717 Parameter starting with a lower case letter are advisory and will be
720 Parameter starting with a lower case letter are advisory and will be
718 ignored when unknown. Those starting with an upper case letter are
721 ignored when unknown. Those starting with an upper case letter are
719 mandatory and will this function will raise a KeyError when unknown.
722 mandatory and will this function will raise a KeyError when unknown.
720
723
721 Note: no option are currently supported. Any input will be either
724 Note: no option are currently supported. Any input will be either
722 ignored or failing.
725 ignored or failing.
723 """
726 """
724 if not name:
727 if not name:
725 raise ValueError('empty parameter name')
728 raise ValueError('empty parameter name')
726 if name[0] not in string.letters:
729 if name[0] not in string.letters:
727 raise ValueError('non letter first character: %r' % name)
730 raise ValueError('non letter first character: %r' % name)
728 try:
731 try:
729 handler = b2streamparamsmap[name.lower()]
732 handler = b2streamparamsmap[name.lower()]
730 except KeyError:
733 except KeyError:
731 if name[0].islower():
734 if name[0].islower():
732 indebug(self.ui, "ignoring unknown parameter %r" % name)
735 indebug(self.ui, "ignoring unknown parameter %r" % name)
733 else:
736 else:
734 raise error.BundleUnknownFeatureError(params=(name,))
737 raise error.BundleUnknownFeatureError(params=(name,))
735 else:
738 else:
736 handler(self, name, value)
739 handler(self, name, value)
737
740
738 def _forwardchunks(self):
741 def _forwardchunks(self):
739 """utility to transfer a bundle2 as binary
742 """utility to transfer a bundle2 as binary
740
743
741 This is made necessary by the fact the 'getbundle' command over 'ssh'
744 This is made necessary by the fact the 'getbundle' command over 'ssh'
742 have no way to know then the reply end, relying on the bundle to be
745 have no way to know then the reply end, relying on the bundle to be
743 interpreted to know its end. This is terrible and we are sorry, but we
746 interpreted to know its end. This is terrible and we are sorry, but we
744 needed to move forward to get general delta enabled.
747 needed to move forward to get general delta enabled.
745 """
748 """
746 yield self._magicstring
749 yield self._magicstring
747 assert 'params' not in vars(self)
750 assert 'params' not in vars(self)
748 paramssize = self._unpack(_fstreamparamsize)[0]
751 paramssize = self._unpack(_fstreamparamsize)[0]
749 if paramssize < 0:
752 if paramssize < 0:
750 raise error.BundleValueError('negative bundle param size: %i'
753 raise error.BundleValueError('negative bundle param size: %i'
751 % paramssize)
754 % paramssize)
752 yield _pack(_fstreamparamsize, paramssize)
755 yield _pack(_fstreamparamsize, paramssize)
753 if paramssize:
756 if paramssize:
754 params = self._readexact(paramssize)
757 params = self._readexact(paramssize)
755 self._processallparams(params)
758 self._processallparams(params)
756 yield params
759 yield params
757 assert self._compengine.bundletype == 'UN'
760 assert self._compengine.bundletype == 'UN'
758 # From there, payload might need to be decompressed
761 # From there, payload might need to be decompressed
759 self._fp = self._compengine.decompressorreader(self._fp)
762 self._fp = self._compengine.decompressorreader(self._fp)
760 emptycount = 0
763 emptycount = 0
761 while emptycount < 2:
764 while emptycount < 2:
762 # so we can brainlessly loop
765 # so we can brainlessly loop
763 assert _fpartheadersize == _fpayloadsize
766 assert _fpartheadersize == _fpayloadsize
764 size = self._unpack(_fpartheadersize)[0]
767 size = self._unpack(_fpartheadersize)[0]
765 yield _pack(_fpartheadersize, size)
768 yield _pack(_fpartheadersize, size)
766 if size:
769 if size:
767 emptycount = 0
770 emptycount = 0
768 else:
771 else:
769 emptycount += 1
772 emptycount += 1
770 continue
773 continue
771 if size == flaginterrupt:
774 if size == flaginterrupt:
772 continue
775 continue
773 elif size < 0:
776 elif size < 0:
774 raise error.BundleValueError('negative chunk size: %i')
777 raise error.BundleValueError('negative chunk size: %i')
775 yield self._readexact(size)
778 yield self._readexact(size)
776
779
777
780
778 def iterparts(self):
781 def iterparts(self):
779 """yield all parts contained in the stream"""
782 """yield all parts contained in the stream"""
780 # make sure param have been loaded
783 # make sure param have been loaded
781 self.params
784 self.params
782 # From there, payload need to be decompressed
785 # From there, payload need to be decompressed
783 self._fp = self._compengine.decompressorreader(self._fp)
786 self._fp = self._compengine.decompressorreader(self._fp)
784 indebug(self.ui, 'start extraction of bundle2 parts')
787 indebug(self.ui, 'start extraction of bundle2 parts')
785 headerblock = self._readpartheader()
788 headerblock = self._readpartheader()
786 while headerblock is not None:
789 while headerblock is not None:
787 part = unbundlepart(self.ui, headerblock, self._fp)
790 part = unbundlepart(self.ui, headerblock, self._fp)
788 yield part
791 yield part
789 part.seek(0, 2)
792 part.seek(0, 2)
790 headerblock = self._readpartheader()
793 headerblock = self._readpartheader()
791 indebug(self.ui, 'end of bundle2 stream')
794 indebug(self.ui, 'end of bundle2 stream')
792
795
793 def _readpartheader(self):
796 def _readpartheader(self):
794 """reads a part header size and return the bytes blob
797 """reads a part header size and return the bytes blob
795
798
796 returns None if empty"""
799 returns None if empty"""
797 headersize = self._unpack(_fpartheadersize)[0]
800 headersize = self._unpack(_fpartheadersize)[0]
798 if headersize < 0:
801 if headersize < 0:
799 raise error.BundleValueError('negative part header size: %i'
802 raise error.BundleValueError('negative part header size: %i'
800 % headersize)
803 % headersize)
801 indebug(self.ui, 'part header size: %i' % headersize)
804 indebug(self.ui, 'part header size: %i' % headersize)
802 if headersize:
805 if headersize:
803 return self._readexact(headersize)
806 return self._readexact(headersize)
804 return None
807 return None
805
808
806 def compressed(self):
809 def compressed(self):
807 self.params # load params
810 self.params # load params
808 return self._compressed
811 return self._compressed
809
812
810 def close(self):
813 def close(self):
811 """close underlying file"""
814 """close underlying file"""
812 if util.safehasattr(self._fp, 'close'):
815 if util.safehasattr(self._fp, 'close'):
813 return self._fp.close()
816 return self._fp.close()
814
817
815 formatmap = {'20': unbundle20}
818 formatmap = {'20': unbundle20}
816
819
817 b2streamparamsmap = {}
820 b2streamparamsmap = {}
818
821
819 def b2streamparamhandler(name):
822 def b2streamparamhandler(name):
820 """register a handler for a stream level parameter"""
823 """register a handler for a stream level parameter"""
821 def decorator(func):
824 def decorator(func):
822 assert name not in formatmap
825 assert name not in formatmap
823 b2streamparamsmap[name] = func
826 b2streamparamsmap[name] = func
824 return func
827 return func
825 return decorator
828 return decorator
826
829
827 @b2streamparamhandler('compression')
830 @b2streamparamhandler('compression')
828 def processcompression(unbundler, param, value):
831 def processcompression(unbundler, param, value):
829 """read compression parameter and install payload decompression"""
832 """read compression parameter and install payload decompression"""
830 if value not in util.compengines.supportedbundletypes:
833 if value not in util.compengines.supportedbundletypes:
831 raise error.BundleUnknownFeatureError(params=(param,),
834 raise error.BundleUnknownFeatureError(params=(param,),
832 values=(value,))
835 values=(value,))
833 unbundler._compengine = util.compengines.forbundletype(value)
836 unbundler._compengine = util.compengines.forbundletype(value)
834 if value is not None:
837 if value is not None:
835 unbundler._compressed = True
838 unbundler._compressed = True
836
839
837 class bundlepart(object):
840 class bundlepart(object):
838 """A bundle2 part contains application level payload
841 """A bundle2 part contains application level payload
839
842
840 The part `type` is used to route the part to the application level
843 The part `type` is used to route the part to the application level
841 handler.
844 handler.
842
845
843 The part payload is contained in ``part.data``. It could be raw bytes or a
846 The part payload is contained in ``part.data``. It could be raw bytes or a
844 generator of byte chunks.
847 generator of byte chunks.
845
848
846 You can add parameters to the part using the ``addparam`` method.
849 You can add parameters to the part using the ``addparam`` method.
847 Parameters can be either mandatory (default) or advisory. Remote side
850 Parameters can be either mandatory (default) or advisory. Remote side
848 should be able to safely ignore the advisory ones.
851 should be able to safely ignore the advisory ones.
849
852
850 Both data and parameters cannot be modified after the generation has begun.
853 Both data and parameters cannot be modified after the generation has begun.
851 """
854 """
852
855
853 def __init__(self, parttype, mandatoryparams=(), advisoryparams=(),
856 def __init__(self, parttype, mandatoryparams=(), advisoryparams=(),
854 data='', mandatory=True):
857 data='', mandatory=True):
855 validateparttype(parttype)
858 validateparttype(parttype)
856 self.id = None
859 self.id = None
857 self.type = parttype
860 self.type = parttype
858 self._data = data
861 self._data = data
859 self._mandatoryparams = list(mandatoryparams)
862 self._mandatoryparams = list(mandatoryparams)
860 self._advisoryparams = list(advisoryparams)
863 self._advisoryparams = list(advisoryparams)
861 # checking for duplicated entries
864 # checking for duplicated entries
862 self._seenparams = set()
865 self._seenparams = set()
863 for pname, __ in self._mandatoryparams + self._advisoryparams:
866 for pname, __ in self._mandatoryparams + self._advisoryparams:
864 if pname in self._seenparams:
867 if pname in self._seenparams:
865 raise error.ProgrammingError('duplicated params: %s' % pname)
868 raise error.ProgrammingError('duplicated params: %s' % pname)
866 self._seenparams.add(pname)
869 self._seenparams.add(pname)
867 # status of the part's generation:
870 # status of the part's generation:
868 # - None: not started,
871 # - None: not started,
869 # - False: currently generated,
872 # - False: currently generated,
870 # - True: generation done.
873 # - True: generation done.
871 self._generated = None
874 self._generated = None
872 self.mandatory = mandatory
875 self.mandatory = mandatory
873
876
874 def __repr__(self):
877 def __repr__(self):
875 cls = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
878 cls = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
876 return ('<%s object at %x; id: %s; type: %s; mandatory: %s>'
879 return ('<%s object at %x; id: %s; type: %s; mandatory: %s>'
877 % (cls, id(self), self.id, self.type, self.mandatory))
880 % (cls, id(self), self.id, self.type, self.mandatory))
878
881
879 def copy(self):
882 def copy(self):
880 """return a copy of the part
883 """return a copy of the part
881
884
882 The new part have the very same content but no partid assigned yet.
885 The new part have the very same content but no partid assigned yet.
883 Parts with generated data cannot be copied."""
886 Parts with generated data cannot be copied."""
884 assert not util.safehasattr(self.data, 'next')
887 assert not util.safehasattr(self.data, 'next')
885 return self.__class__(self.type, self._mandatoryparams,
888 return self.__class__(self.type, self._mandatoryparams,
886 self._advisoryparams, self._data, self.mandatory)
889 self._advisoryparams, self._data, self.mandatory)
887
890
888 # methods used to defines the part content
891 # methods used to defines the part content
889 @property
892 @property
890 def data(self):
893 def data(self):
891 return self._data
894 return self._data
892
895
893 @data.setter
896 @data.setter
894 def data(self, data):
897 def data(self, data):
895 if self._generated is not None:
898 if self._generated is not None:
896 raise error.ReadOnlyPartError('part is being generated')
899 raise error.ReadOnlyPartError('part is being generated')
897 self._data = data
900 self._data = data
898
901
899 @property
902 @property
900 def mandatoryparams(self):
903 def mandatoryparams(self):
901 # make it an immutable tuple to force people through ``addparam``
904 # make it an immutable tuple to force people through ``addparam``
902 return tuple(self._mandatoryparams)
905 return tuple(self._mandatoryparams)
903
906
904 @property
907 @property
905 def advisoryparams(self):
908 def advisoryparams(self):
906 # make it an immutable tuple to force people through ``addparam``
909 # make it an immutable tuple to force people through ``addparam``
907 return tuple(self._advisoryparams)
910 return tuple(self._advisoryparams)
908
911
909 def addparam(self, name, value='', mandatory=True):
912 def addparam(self, name, value='', mandatory=True):
910 """add a parameter to the part
913 """add a parameter to the part
911
914
912 If 'mandatory' is set to True, the remote handler must claim support
915 If 'mandatory' is set to True, the remote handler must claim support
913 for this parameter or the unbundling will be aborted.
916 for this parameter or the unbundling will be aborted.
914
917
915 The 'name' and 'value' cannot exceed 255 bytes each.
918 The 'name' and 'value' cannot exceed 255 bytes each.
916 """
919 """
917 if self._generated is not None:
920 if self._generated is not None:
918 raise error.ReadOnlyPartError('part is being generated')
921 raise error.ReadOnlyPartError('part is being generated')
919 if name in self._seenparams:
922 if name in self._seenparams:
920 raise ValueError('duplicated params: %s' % name)
923 raise ValueError('duplicated params: %s' % name)
921 self._seenparams.add(name)
924 self._seenparams.add(name)
922 params = self._advisoryparams
925 params = self._advisoryparams
923 if mandatory:
926 if mandatory:
924 params = self._mandatoryparams
927 params = self._mandatoryparams
925 params.append((name, value))
928 params.append((name, value))
926
929
927 # methods used to generates the bundle2 stream
930 # methods used to generates the bundle2 stream
928 def getchunks(self, ui):
931 def getchunks(self, ui):
929 if self._generated is not None:
932 if self._generated is not None:
930 raise error.ProgrammingError('part can only be consumed once')
933 raise error.ProgrammingError('part can only be consumed once')
931 self._generated = False
934 self._generated = False
932
935
933 if ui.debugflag:
936 if ui.debugflag:
934 msg = ['bundle2-output-part: "%s"' % self.type]
937 msg = ['bundle2-output-part: "%s"' % self.type]
935 if not self.mandatory:
938 if not self.mandatory:
936 msg.append(' (advisory)')
939 msg.append(' (advisory)')
937 nbmp = len(self.mandatoryparams)
940 nbmp = len(self.mandatoryparams)
938 nbap = len(self.advisoryparams)
941 nbap = len(self.advisoryparams)
939 if nbmp or nbap:
942 if nbmp or nbap:
940 msg.append(' (params:')
943 msg.append(' (params:')
941 if nbmp:
944 if nbmp:
942 msg.append(' %i mandatory' % nbmp)
945 msg.append(' %i mandatory' % nbmp)
943 if nbap:
946 if nbap:
944 msg.append(' %i advisory' % nbmp)
947 msg.append(' %i advisory' % nbmp)
945 msg.append(')')
948 msg.append(')')
946 if not self.data:
949 if not self.data:
947 msg.append(' empty payload')
950 msg.append(' empty payload')
948 elif util.safehasattr(self.data, 'next'):
951 elif util.safehasattr(self.data, 'next'):
949 msg.append(' streamed payload')
952 msg.append(' streamed payload')
950 else:
953 else:
951 msg.append(' %i bytes payload' % len(self.data))
954 msg.append(' %i bytes payload' % len(self.data))
952 msg.append('\n')
955 msg.append('\n')
953 ui.debug(''.join(msg))
956 ui.debug(''.join(msg))
954
957
955 #### header
958 #### header
956 if self.mandatory:
959 if self.mandatory:
957 parttype = self.type.upper()
960 parttype = self.type.upper()
958 else:
961 else:
959 parttype = self.type.lower()
962 parttype = self.type.lower()
960 outdebug(ui, 'part %s: "%s"' % (self.id, parttype))
963 outdebug(ui, 'part %s: "%s"' % (self.id, parttype))
961 ## parttype
964 ## parttype
962 header = [_pack(_fparttypesize, len(parttype)),
965 header = [_pack(_fparttypesize, len(parttype)),
963 parttype, _pack(_fpartid, self.id),
966 parttype, _pack(_fpartid, self.id),
964 ]
967 ]
965 ## parameters
968 ## parameters
966 # count
969 # count
967 manpar = self.mandatoryparams
970 manpar = self.mandatoryparams
968 advpar = self.advisoryparams
971 advpar = self.advisoryparams
969 header.append(_pack(_fpartparamcount, len(manpar), len(advpar)))
972 header.append(_pack(_fpartparamcount, len(manpar), len(advpar)))
970 # size
973 # size
971 parsizes = []
974 parsizes = []
972 for key, value in manpar:
975 for key, value in manpar:
973 parsizes.append(len(key))
976 parsizes.append(len(key))
974 parsizes.append(len(value))
977 parsizes.append(len(value))
975 for key, value in advpar:
978 for key, value in advpar:
976 parsizes.append(len(key))
979 parsizes.append(len(key))
977 parsizes.append(len(value))
980 parsizes.append(len(value))
978 paramsizes = _pack(_makefpartparamsizes(len(parsizes) / 2), *parsizes)
981 paramsizes = _pack(_makefpartparamsizes(len(parsizes) / 2), *parsizes)
979 header.append(paramsizes)
982 header.append(paramsizes)
980 # key, value
983 # key, value
981 for key, value in manpar:
984 for key, value in manpar:
982 header.append(key)
985 header.append(key)
983 header.append(value)
986 header.append(value)
984 for key, value in advpar:
987 for key, value in advpar:
985 header.append(key)
988 header.append(key)
986 header.append(value)
989 header.append(value)
987 ## finalize header
990 ## finalize header
988 headerchunk = ''.join(header)
991 headerchunk = ''.join(header)
989 outdebug(ui, 'header chunk size: %i' % len(headerchunk))
992 outdebug(ui, 'header chunk size: %i' % len(headerchunk))
990 yield _pack(_fpartheadersize, len(headerchunk))
993 yield _pack(_fpartheadersize, len(headerchunk))
991 yield headerchunk
994 yield headerchunk
992 ## payload
995 ## payload
993 try:
996 try:
994 for chunk in self._payloadchunks():
997 for chunk in self._payloadchunks():
995 outdebug(ui, 'payload chunk size: %i' % len(chunk))
998 outdebug(ui, 'payload chunk size: %i' % len(chunk))
996 yield _pack(_fpayloadsize, len(chunk))
999 yield _pack(_fpayloadsize, len(chunk))
997 yield chunk
1000 yield chunk
998 except GeneratorExit:
1001 except GeneratorExit:
999 # GeneratorExit means that nobody is listening for our
1002 # GeneratorExit means that nobody is listening for our
1000 # results anyway, so just bail quickly rather than trying
1003 # results anyway, so just bail quickly rather than trying
1001 # to produce an error part.
1004 # to produce an error part.
1002 ui.debug('bundle2-generatorexit\n')
1005 ui.debug('bundle2-generatorexit\n')
1003 raise
1006 raise
1004 except BaseException as exc:
1007 except BaseException as exc:
1005 # backup exception data for later
1008 # backup exception data for later
1006 ui.debug('bundle2-input-stream-interrupt: encoding exception %s'
1009 ui.debug('bundle2-input-stream-interrupt: encoding exception %s'
1007 % exc)
1010 % exc)
1008 tb = sys.exc_info()[2]
1011 tb = sys.exc_info()[2]
1009 msg = 'unexpected error: %s' % exc
1012 msg = 'unexpected error: %s' % exc
1010 interpart = bundlepart('error:abort', [('message', msg)],
1013 interpart = bundlepart('error:abort', [('message', msg)],
1011 mandatory=False)
1014 mandatory=False)
1012 interpart.id = 0
1015 interpart.id = 0
1013 yield _pack(_fpayloadsize, -1)
1016 yield _pack(_fpayloadsize, -1)
1014 for chunk in interpart.getchunks(ui=ui):
1017 for chunk in interpart.getchunks(ui=ui):
1015 yield chunk
1018 yield chunk
1016 outdebug(ui, 'closing payload chunk')
1019 outdebug(ui, 'closing payload chunk')
1017 # abort current part payload
1020 # abort current part payload
1018 yield _pack(_fpayloadsize, 0)
1021 yield _pack(_fpayloadsize, 0)
1019 pycompat.raisewithtb(exc, tb)
1022 pycompat.raisewithtb(exc, tb)
1020 # end of payload
1023 # end of payload
1021 outdebug(ui, 'closing payload chunk')
1024 outdebug(ui, 'closing payload chunk')
1022 yield _pack(_fpayloadsize, 0)
1025 yield _pack(_fpayloadsize, 0)
1023 self._generated = True
1026 self._generated = True
1024
1027
1025 def _payloadchunks(self):
1028 def _payloadchunks(self):
1026 """yield chunks of a the part payload
1029 """yield chunks of a the part payload
1027
1030
1028 Exists to handle the different methods to provide data to a part."""
1031 Exists to handle the different methods to provide data to a part."""
1029 # we only support fixed size data now.
1032 # we only support fixed size data now.
1030 # This will be improved in the future.
1033 # This will be improved in the future.
1031 if util.safehasattr(self.data, 'next'):
1034 if util.safehasattr(self.data, 'next'):
1032 buff = util.chunkbuffer(self.data)
1035 buff = util.chunkbuffer(self.data)
1033 chunk = buff.read(preferedchunksize)
1036 chunk = buff.read(preferedchunksize)
1034 while chunk:
1037 while chunk:
1035 yield chunk
1038 yield chunk
1036 chunk = buff.read(preferedchunksize)
1039 chunk = buff.read(preferedchunksize)
1037 elif len(self.data):
1040 elif len(self.data):
1038 yield self.data
1041 yield self.data
1039
1042
1040
1043
1041 flaginterrupt = -1
1044 flaginterrupt = -1
1042
1045
1043 class interrupthandler(unpackermixin):
1046 class interrupthandler(unpackermixin):
1044 """read one part and process it with restricted capability
1047 """read one part and process it with restricted capability
1045
1048
1046 This allows to transmit exception raised on the producer size during part
1049 This allows to transmit exception raised on the producer size during part
1047 iteration while the consumer is reading a part.
1050 iteration while the consumer is reading a part.
1048
1051
1049 Part processed in this manner only have access to a ui object,"""
1052 Part processed in this manner only have access to a ui object,"""
1050
1053
1051 def __init__(self, ui, fp):
1054 def __init__(self, ui, fp):
1052 super(interrupthandler, self).__init__(fp)
1055 super(interrupthandler, self).__init__(fp)
1053 self.ui = ui
1056 self.ui = ui
1054
1057
1055 def _readpartheader(self):
1058 def _readpartheader(self):
1056 """reads a part header size and return the bytes blob
1059 """reads a part header size and return the bytes blob
1057
1060
1058 returns None if empty"""
1061 returns None if empty"""
1059 headersize = self._unpack(_fpartheadersize)[0]
1062 headersize = self._unpack(_fpartheadersize)[0]
1060 if headersize < 0:
1063 if headersize < 0:
1061 raise error.BundleValueError('negative part header size: %i'
1064 raise error.BundleValueError('negative part header size: %i'
1062 % headersize)
1065 % headersize)
1063 indebug(self.ui, 'part header size: %i\n' % headersize)
1066 indebug(self.ui, 'part header size: %i\n' % headersize)
1064 if headersize:
1067 if headersize:
1065 return self._readexact(headersize)
1068 return self._readexact(headersize)
1066 return None
1069 return None
1067
1070
1068 def __call__(self):
1071 def __call__(self):
1069
1072
1070 self.ui.debug('bundle2-input-stream-interrupt:'
1073 self.ui.debug('bundle2-input-stream-interrupt:'
1071 ' opening out of band context\n')
1074 ' opening out of band context\n')
1072 indebug(self.ui, 'bundle2 stream interruption, looking for a part.')
1075 indebug(self.ui, 'bundle2 stream interruption, looking for a part.')
1073 headerblock = self._readpartheader()
1076 headerblock = self._readpartheader()
1074 if headerblock is None:
1077 if headerblock is None:
1075 indebug(self.ui, 'no part found during interruption.')
1078 indebug(self.ui, 'no part found during interruption.')
1076 return
1079 return
1077 part = unbundlepart(self.ui, headerblock, self._fp)
1080 part = unbundlepart(self.ui, headerblock, self._fp)
1078 op = interruptoperation(self.ui)
1081 op = interruptoperation(self.ui)
1079 _processpart(op, part)
1082 _processpart(op, part)
1080 self.ui.debug('bundle2-input-stream-interrupt:'
1083 self.ui.debug('bundle2-input-stream-interrupt:'
1081 ' closing out of band context\n')
1084 ' closing out of band context\n')
1082
1085
1083 class interruptoperation(object):
1086 class interruptoperation(object):
1084 """A limited operation to be use by part handler during interruption
1087 """A limited operation to be use by part handler during interruption
1085
1088
1086 It only have access to an ui object.
1089 It only have access to an ui object.
1087 """
1090 """
1088
1091
1089 def __init__(self, ui):
1092 def __init__(self, ui):
1090 self.ui = ui
1093 self.ui = ui
1091 self.reply = None
1094 self.reply = None
1092 self.captureoutput = False
1095 self.captureoutput = False
1093
1096
1094 @property
1097 @property
1095 def repo(self):
1098 def repo(self):
1096 raise error.ProgrammingError('no repo access from stream interruption')
1099 raise error.ProgrammingError('no repo access from stream interruption')
1097
1100
1098 def gettransaction(self):
1101 def gettransaction(self):
1099 raise TransactionUnavailable('no repo access from stream interruption')
1102 raise TransactionUnavailable('no repo access from stream interruption')
1100
1103
1101 class unbundlepart(unpackermixin):
1104 class unbundlepart(unpackermixin):
1102 """a bundle part read from a bundle"""
1105 """a bundle part read from a bundle"""
1103
1106
1104 def __init__(self, ui, header, fp):
1107 def __init__(self, ui, header, fp):
1105 super(unbundlepart, self).__init__(fp)
1108 super(unbundlepart, self).__init__(fp)
1106 self._seekable = (util.safehasattr(fp, 'seek') and
1109 self._seekable = (util.safehasattr(fp, 'seek') and
1107 util.safehasattr(fp, 'tell'))
1110 util.safehasattr(fp, 'tell'))
1108 self.ui = ui
1111 self.ui = ui
1109 # unbundle state attr
1112 # unbundle state attr
1110 self._headerdata = header
1113 self._headerdata = header
1111 self._headeroffset = 0
1114 self._headeroffset = 0
1112 self._initialized = False
1115 self._initialized = False
1113 self.consumed = False
1116 self.consumed = False
1114 # part data
1117 # part data
1115 self.id = None
1118 self.id = None
1116 self.type = None
1119 self.type = None
1117 self.mandatoryparams = None
1120 self.mandatoryparams = None
1118 self.advisoryparams = None
1121 self.advisoryparams = None
1119 self.params = None
1122 self.params = None
1120 self.mandatorykeys = ()
1123 self.mandatorykeys = ()
1121 self._payloadstream = None
1124 self._payloadstream = None
1122 self._readheader()
1125 self._readheader()
1123 self._mandatory = None
1126 self._mandatory = None
1124 self._chunkindex = [] #(payload, file) position tuples for chunk starts
1127 self._chunkindex = [] #(payload, file) position tuples for chunk starts
1125 self._pos = 0
1128 self._pos = 0
1126
1129
1127 def _fromheader(self, size):
1130 def _fromheader(self, size):
1128 """return the next <size> byte from the header"""
1131 """return the next <size> byte from the header"""
1129 offset = self._headeroffset
1132 offset = self._headeroffset
1130 data = self._headerdata[offset:(offset + size)]
1133 data = self._headerdata[offset:(offset + size)]
1131 self._headeroffset = offset + size
1134 self._headeroffset = offset + size
1132 return data
1135 return data
1133
1136
1134 def _unpackheader(self, format):
1137 def _unpackheader(self, format):
1135 """read given format from header
1138 """read given format from header
1136
1139
1137 This automatically compute the size of the format to read."""
1140 This automatically compute the size of the format to read."""
1138 data = self._fromheader(struct.calcsize(format))
1141 data = self._fromheader(struct.calcsize(format))
1139 return _unpack(format, data)
1142 return _unpack(format, data)
1140
1143
1141 def _initparams(self, mandatoryparams, advisoryparams):
1144 def _initparams(self, mandatoryparams, advisoryparams):
1142 """internal function to setup all logic related parameters"""
1145 """internal function to setup all logic related parameters"""
1143 # make it read only to prevent people touching it by mistake.
1146 # make it read only to prevent people touching it by mistake.
1144 self.mandatoryparams = tuple(mandatoryparams)
1147 self.mandatoryparams = tuple(mandatoryparams)
1145 self.advisoryparams = tuple(advisoryparams)
1148 self.advisoryparams = tuple(advisoryparams)
1146 # user friendly UI
1149 # user friendly UI
1147 self.params = util.sortdict(self.mandatoryparams)
1150 self.params = util.sortdict(self.mandatoryparams)
1148 self.params.update(self.advisoryparams)
1151 self.params.update(self.advisoryparams)
1149 self.mandatorykeys = frozenset(p[0] for p in mandatoryparams)
1152 self.mandatorykeys = frozenset(p[0] for p in mandatoryparams)
1150
1153
1151 def _payloadchunks(self, chunknum=0):
1154 def _payloadchunks(self, chunknum=0):
1152 '''seek to specified chunk and start yielding data'''
1155 '''seek to specified chunk and start yielding data'''
1153 if len(self._chunkindex) == 0:
1156 if len(self._chunkindex) == 0:
1154 assert chunknum == 0, 'Must start with chunk 0'
1157 assert chunknum == 0, 'Must start with chunk 0'
1155 self._chunkindex.append((0, self._tellfp()))
1158 self._chunkindex.append((0, self._tellfp()))
1156 else:
1159 else:
1157 assert chunknum < len(self._chunkindex), \
1160 assert chunknum < len(self._chunkindex), \
1158 'Unknown chunk %d' % chunknum
1161 'Unknown chunk %d' % chunknum
1159 self._seekfp(self._chunkindex[chunknum][1])
1162 self._seekfp(self._chunkindex[chunknum][1])
1160
1163
1161 pos = self._chunkindex[chunknum][0]
1164 pos = self._chunkindex[chunknum][0]
1162 payloadsize = self._unpack(_fpayloadsize)[0]
1165 payloadsize = self._unpack(_fpayloadsize)[0]
1163 indebug(self.ui, 'payload chunk size: %i' % payloadsize)
1166 indebug(self.ui, 'payload chunk size: %i' % payloadsize)
1164 while payloadsize:
1167 while payloadsize:
1165 if payloadsize == flaginterrupt:
1168 if payloadsize == flaginterrupt:
1166 # interruption detection, the handler will now read a
1169 # interruption detection, the handler will now read a
1167 # single part and process it.
1170 # single part and process it.
1168 interrupthandler(self.ui, self._fp)()
1171 interrupthandler(self.ui, self._fp)()
1169 elif payloadsize < 0:
1172 elif payloadsize < 0:
1170 msg = 'negative payload chunk size: %i' % payloadsize
1173 msg = 'negative payload chunk size: %i' % payloadsize
1171 raise error.BundleValueError(msg)
1174 raise error.BundleValueError(msg)
1172 else:
1175 else:
1173 result = self._readexact(payloadsize)
1176 result = self._readexact(payloadsize)
1174 chunknum += 1
1177 chunknum += 1
1175 pos += payloadsize
1178 pos += payloadsize
1176 if chunknum == len(self._chunkindex):
1179 if chunknum == len(self._chunkindex):
1177 self._chunkindex.append((pos, self._tellfp()))
1180 self._chunkindex.append((pos, self._tellfp()))
1178 yield result
1181 yield result
1179 payloadsize = self._unpack(_fpayloadsize)[0]
1182 payloadsize = self._unpack(_fpayloadsize)[0]
1180 indebug(self.ui, 'payload chunk size: %i' % payloadsize)
1183 indebug(self.ui, 'payload chunk size: %i' % payloadsize)
1181
1184
1182 def _findchunk(self, pos):
1185 def _findchunk(self, pos):
1183 '''for a given payload position, return a chunk number and offset'''
1186 '''for a given payload position, return a chunk number and offset'''
1184 for chunk, (ppos, fpos) in enumerate(self._chunkindex):
1187 for chunk, (ppos, fpos) in enumerate(self._chunkindex):
1185 if ppos == pos:
1188 if ppos == pos:
1186 return chunk, 0
1189 return chunk, 0
1187 elif ppos > pos:
1190 elif ppos > pos:
1188 return chunk - 1, pos - self._chunkindex[chunk - 1][0]
1191 return chunk - 1, pos - self._chunkindex[chunk - 1][0]
1189 raise ValueError('Unknown chunk')
1192 raise ValueError('Unknown chunk')
1190
1193
1191 def _readheader(self):
1194 def _readheader(self):
1192 """read the header and setup the object"""
1195 """read the header and setup the object"""
1193 typesize = self._unpackheader(_fparttypesize)[0]
1196 typesize = self._unpackheader(_fparttypesize)[0]
1194 self.type = self._fromheader(typesize)
1197 self.type = self._fromheader(typesize)
1195 indebug(self.ui, 'part type: "%s"' % self.type)
1198 indebug(self.ui, 'part type: "%s"' % self.type)
1196 self.id = self._unpackheader(_fpartid)[0]
1199 self.id = self._unpackheader(_fpartid)[0]
1197 indebug(self.ui, 'part id: "%s"' % self.id)
1200 indebug(self.ui, 'part id: "%s"' % self.id)
1198 # extract mandatory bit from type
1201 # extract mandatory bit from type
1199 self.mandatory = (self.type != self.type.lower())
1202 self.mandatory = (self.type != self.type.lower())
1200 self.type = self.type.lower()
1203 self.type = self.type.lower()
1201 ## reading parameters
1204 ## reading parameters
1202 # param count
1205 # param count
1203 mancount, advcount = self._unpackheader(_fpartparamcount)
1206 mancount, advcount = self._unpackheader(_fpartparamcount)
1204 indebug(self.ui, 'part parameters: %i' % (mancount + advcount))
1207 indebug(self.ui, 'part parameters: %i' % (mancount + advcount))
1205 # param size
1208 # param size
1206 fparamsizes = _makefpartparamsizes(mancount + advcount)
1209 fparamsizes = _makefpartparamsizes(mancount + advcount)
1207 paramsizes = self._unpackheader(fparamsizes)
1210 paramsizes = self._unpackheader(fparamsizes)
1208 # make it a list of couple again
1211 # make it a list of couple again
1209 paramsizes = zip(paramsizes[::2], paramsizes[1::2])
1212 paramsizes = zip(paramsizes[::2], paramsizes[1::2])
1210 # split mandatory from advisory
1213 # split mandatory from advisory
1211 mansizes = paramsizes[:mancount]
1214 mansizes = paramsizes[:mancount]
1212 advsizes = paramsizes[mancount:]
1215 advsizes = paramsizes[mancount:]
1213 # retrieve param value
1216 # retrieve param value
1214 manparams = []
1217 manparams = []
1215 for key, value in mansizes:
1218 for key, value in mansizes:
1216 manparams.append((self._fromheader(key), self._fromheader(value)))
1219 manparams.append((self._fromheader(key), self._fromheader(value)))
1217 advparams = []
1220 advparams = []
1218 for key, value in advsizes:
1221 for key, value in advsizes:
1219 advparams.append((self._fromheader(key), self._fromheader(value)))
1222 advparams.append((self._fromheader(key), self._fromheader(value)))
1220 self._initparams(manparams, advparams)
1223 self._initparams(manparams, advparams)
1221 ## part payload
1224 ## part payload
1222 self._payloadstream = util.chunkbuffer(self._payloadchunks())
1225 self._payloadstream = util.chunkbuffer(self._payloadchunks())
1223 # we read the data, tell it
1226 # we read the data, tell it
1224 self._initialized = True
1227 self._initialized = True
1225
1228
1226 def read(self, size=None):
1229 def read(self, size=None):
1227 """read payload data"""
1230 """read payload data"""
1228 if not self._initialized:
1231 if not self._initialized:
1229 self._readheader()
1232 self._readheader()
1230 if size is None:
1233 if size is None:
1231 data = self._payloadstream.read()
1234 data = self._payloadstream.read()
1232 else:
1235 else:
1233 data = self._payloadstream.read(size)
1236 data = self._payloadstream.read(size)
1234 self._pos += len(data)
1237 self._pos += len(data)
1235 if size is None or len(data) < size:
1238 if size is None or len(data) < size:
1236 if not self.consumed and self._pos:
1239 if not self.consumed and self._pos:
1237 self.ui.debug('bundle2-input-part: total payload size %i\n'
1240 self.ui.debug('bundle2-input-part: total payload size %i\n'
1238 % self._pos)
1241 % self._pos)
1239 self.consumed = True
1242 self.consumed = True
1240 return data
1243 return data
1241
1244
1242 def tell(self):
1245 def tell(self):
1243 return self._pos
1246 return self._pos
1244
1247
1245 def seek(self, offset, whence=0):
1248 def seek(self, offset, whence=0):
1246 if whence == 0:
1249 if whence == 0:
1247 newpos = offset
1250 newpos = offset
1248 elif whence == 1:
1251 elif whence == 1:
1249 newpos = self._pos + offset
1252 newpos = self._pos + offset
1250 elif whence == 2:
1253 elif whence == 2:
1251 if not self.consumed:
1254 if not self.consumed:
1252 self.read()
1255 self.read()
1253 newpos = self._chunkindex[-1][0] - offset
1256 newpos = self._chunkindex[-1][0] - offset
1254 else:
1257 else:
1255 raise ValueError('Unknown whence value: %r' % (whence,))
1258 raise ValueError('Unknown whence value: %r' % (whence,))
1256
1259
1257 if newpos > self._chunkindex[-1][0] and not self.consumed:
1260 if newpos > self._chunkindex[-1][0] and not self.consumed:
1258 self.read()
1261 self.read()
1259 if not 0 <= newpos <= self._chunkindex[-1][0]:
1262 if not 0 <= newpos <= self._chunkindex[-1][0]:
1260 raise ValueError('Offset out of range')
1263 raise ValueError('Offset out of range')
1261
1264
1262 if self._pos != newpos:
1265 if self._pos != newpos:
1263 chunk, internaloffset = self._findchunk(newpos)
1266 chunk, internaloffset = self._findchunk(newpos)
1264 self._payloadstream = util.chunkbuffer(self._payloadchunks(chunk))
1267 self._payloadstream = util.chunkbuffer(self._payloadchunks(chunk))
1265 adjust = self.read(internaloffset)
1268 adjust = self.read(internaloffset)
1266 if len(adjust) != internaloffset:
1269 if len(adjust) != internaloffset:
1267 raise error.Abort(_('Seek failed\n'))
1270 raise error.Abort(_('Seek failed\n'))
1268 self._pos = newpos
1271 self._pos = newpos
1269
1272
1270 def _seekfp(self, offset, whence=0):
1273 def _seekfp(self, offset, whence=0):
1271 """move the underlying file pointer
1274 """move the underlying file pointer
1272
1275
1273 This method is meant for internal usage by the bundle2 protocol only.
1276 This method is meant for internal usage by the bundle2 protocol only.
1274 They directly manipulate the low level stream including bundle2 level
1277 They directly manipulate the low level stream including bundle2 level
1275 instruction.
1278 instruction.
1276
1279
1277 Do not use it to implement higher-level logic or methods."""
1280 Do not use it to implement higher-level logic or methods."""
1278 if self._seekable:
1281 if self._seekable:
1279 return self._fp.seek(offset, whence)
1282 return self._fp.seek(offset, whence)
1280 else:
1283 else:
1281 raise NotImplementedError(_('File pointer is not seekable'))
1284 raise NotImplementedError(_('File pointer is not seekable'))
1282
1285
1283 def _tellfp(self):
1286 def _tellfp(self):
1284 """return the file offset, or None if file is not seekable
1287 """return the file offset, or None if file is not seekable
1285
1288
1286 This method is meant for internal usage by the bundle2 protocol only.
1289 This method is meant for internal usage by the bundle2 protocol only.
1287 They directly manipulate the low level stream including bundle2 level
1290 They directly manipulate the low level stream including bundle2 level
1288 instruction.
1291 instruction.
1289
1292
1290 Do not use it to implement higher-level logic or methods."""
1293 Do not use it to implement higher-level logic or methods."""
1291 if self._seekable:
1294 if self._seekable:
1292 try:
1295 try:
1293 return self._fp.tell()
1296 return self._fp.tell()
1294 except IOError as e:
1297 except IOError as e:
1295 if e.errno == errno.ESPIPE:
1298 if e.errno == errno.ESPIPE:
1296 self._seekable = False
1299 self._seekable = False
1297 else:
1300 else:
1298 raise
1301 raise
1299 return None
1302 return None
1300
1303
1301 # These are only the static capabilities.
1304 # These are only the static capabilities.
1302 # Check the 'getrepocaps' function for the rest.
1305 # Check the 'getrepocaps' function for the rest.
1303 capabilities = {'HG20': (),
1306 capabilities = {'HG20': (),
1304 'error': ('abort', 'unsupportedcontent', 'pushraced',
1307 'error': ('abort', 'unsupportedcontent', 'pushraced',
1305 'pushkey'),
1308 'pushkey'),
1306 'listkeys': (),
1309 'listkeys': (),
1307 'pushkey': (),
1310 'pushkey': (),
1308 'digests': tuple(sorted(util.DIGESTS.keys())),
1311 'digests': tuple(sorted(util.DIGESTS.keys())),
1309 'remote-changegroup': ('http', 'https'),
1312 'remote-changegroup': ('http', 'https'),
1310 'hgtagsfnodes': (),
1313 'hgtagsfnodes': (),
1311 }
1314 }
1312
1315
1313 def getrepocaps(repo, allowpushback=False):
1316 def getrepocaps(repo, allowpushback=False):
1314 """return the bundle2 capabilities for a given repo
1317 """return the bundle2 capabilities for a given repo
1315
1318
1316 Exists to allow extensions (like evolution) to mutate the capabilities.
1319 Exists to allow extensions (like evolution) to mutate the capabilities.
1317 """
1320 """
1318 caps = capabilities.copy()
1321 caps = capabilities.copy()
1319 caps['changegroup'] = tuple(sorted(
1322 caps['changegroup'] = tuple(sorted(
1320 changegroup.supportedincomingversions(repo)))
1323 changegroup.supportedincomingversions(repo)))
1321 if obsolete.isenabled(repo, obsolete.exchangeopt):
1324 if obsolete.isenabled(repo, obsolete.exchangeopt):
1322 supportedformat = tuple('V%i' % v for v in obsolete.formats)
1325 supportedformat = tuple('V%i' % v for v in obsolete.formats)
1323 caps['obsmarkers'] = supportedformat
1326 caps['obsmarkers'] = supportedformat
1324 if allowpushback:
1327 if allowpushback:
1325 caps['pushback'] = ()
1328 caps['pushback'] = ()
1326 cpmode = repo.ui.config('server', 'concurrent-push-mode', 'strict')
1329 cpmode = repo.ui.config('server', 'concurrent-push-mode', 'strict')
1327 if cpmode == 'check-related':
1330 if cpmode == 'check-related':
1328 caps['checkheads'] = ('related',)
1331 caps['checkheads'] = ('related',)
1329 return caps
1332 return caps
1330
1333
1331 def bundle2caps(remote):
1334 def bundle2caps(remote):
1332 """return the bundle capabilities of a peer as dict"""
1335 """return the bundle capabilities of a peer as dict"""
1333 raw = remote.capable('bundle2')
1336 raw = remote.capable('bundle2')
1334 if not raw and raw != '':
1337 if not raw and raw != '':
1335 return {}
1338 return {}
1336 capsblob = urlreq.unquote(remote.capable('bundle2'))
1339 capsblob = urlreq.unquote(remote.capable('bundle2'))
1337 return decodecaps(capsblob)
1340 return decodecaps(capsblob)
1338
1341
1339 def obsmarkersversion(caps):
1342 def obsmarkersversion(caps):
1340 """extract the list of supported obsmarkers versions from a bundle2caps dict
1343 """extract the list of supported obsmarkers versions from a bundle2caps dict
1341 """
1344 """
1342 obscaps = caps.get('obsmarkers', ())
1345 obscaps = caps.get('obsmarkers', ())
1343 return [int(c[1:]) for c in obscaps if c.startswith('V')]
1346 return [int(c[1:]) for c in obscaps if c.startswith('V')]
1344
1347
1345 def writenewbundle(ui, repo, source, filename, bundletype, outgoing, opts,
1348 def writenewbundle(ui, repo, source, filename, bundletype, outgoing, opts,
1346 vfs=None, compression=None, compopts=None):
1349 vfs=None, compression=None, compopts=None):
1347 if bundletype.startswith('HG10'):
1350 if bundletype.startswith('HG10'):
1348 cg = changegroup.getchangegroup(repo, source, outgoing, version='01')
1351 cg = changegroup.getchangegroup(repo, source, outgoing, version='01')
1349 return writebundle(ui, cg, filename, bundletype, vfs=vfs,
1352 return writebundle(ui, cg, filename, bundletype, vfs=vfs,
1350 compression=compression, compopts=compopts)
1353 compression=compression, compopts=compopts)
1351 elif not bundletype.startswith('HG20'):
1354 elif not bundletype.startswith('HG20'):
1352 raise error.ProgrammingError('unknown bundle type: %s' % bundletype)
1355 raise error.ProgrammingError('unknown bundle type: %s' % bundletype)
1353
1356
1354 caps = {}
1357 caps = {}
1355 if 'obsolescence' in opts:
1358 if 'obsolescence' in opts:
1356 caps['obsmarkers'] = ('V1',)
1359 caps['obsmarkers'] = ('V1',)
1357 bundle = bundle20(ui, caps)
1360 bundle = bundle20(ui, caps)
1358 bundle.setcompression(compression, compopts)
1361 bundle.setcompression(compression, compopts)
1359 _addpartsfromopts(ui, repo, bundle, source, outgoing, opts)
1362 _addpartsfromopts(ui, repo, bundle, source, outgoing, opts)
1360 chunkiter = bundle.getchunks()
1363 chunkiter = bundle.getchunks()
1361
1364
1362 return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
1365 return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
1363
1366
1364 def _addpartsfromopts(ui, repo, bundler, source, outgoing, opts):
1367 def _addpartsfromopts(ui, repo, bundler, source, outgoing, opts):
1365 # We should eventually reconcile this logic with the one behind
1368 # We should eventually reconcile this logic with the one behind
1366 # 'exchange.getbundle2partsgenerator'.
1369 # 'exchange.getbundle2partsgenerator'.
1367 #
1370 #
1368 # The type of input from 'getbundle' and 'writenewbundle' are a bit
1371 # The type of input from 'getbundle' and 'writenewbundle' are a bit
1369 # different right now. So we keep them separated for now for the sake of
1372 # different right now. So we keep them separated for now for the sake of
1370 # simplicity.
1373 # simplicity.
1371
1374
1372 # we always want a changegroup in such bundle
1375 # we always want a changegroup in such bundle
1373 cgversion = opts.get('cg.version')
1376 cgversion = opts.get('cg.version')
1374 if cgversion is None:
1377 if cgversion is None:
1375 cgversion = changegroup.safeversion(repo)
1378 cgversion = changegroup.safeversion(repo)
1376 cg = changegroup.getchangegroup(repo, source, outgoing,
1379 cg = changegroup.getchangegroup(repo, source, outgoing,
1377 version=cgversion)
1380 version=cgversion)
1378 part = bundler.newpart('changegroup', data=cg.getchunks())
1381 part = bundler.newpart('changegroup', data=cg.getchunks())
1379 part.addparam('version', cg.version)
1382 part.addparam('version', cg.version)
1380 if 'clcount' in cg.extras:
1383 if 'clcount' in cg.extras:
1381 part.addparam('nbchanges', str(cg.extras['clcount']),
1384 part.addparam('nbchanges', str(cg.extras['clcount']),
1382 mandatory=False)
1385 mandatory=False)
1383
1386
1384 addparttagsfnodescache(repo, bundler, outgoing)
1387 addparttagsfnodescache(repo, bundler, outgoing)
1385
1388
1386 if opts.get('obsolescence', False):
1389 if opts.get('obsolescence', False):
1387 obsmarkers = repo.obsstore.relevantmarkers(outgoing.missing)
1390 obsmarkers = repo.obsstore.relevantmarkers(outgoing.missing)
1388 buildobsmarkerspart(bundler, obsmarkers)
1391 buildobsmarkerspart(bundler, obsmarkers)
1389
1392
1393 if opts.get('phases', False):
1394 headsbyphase = phases.subsetphaseheads(repo, outgoing.missing)
1395 phasedata = []
1396 for phase in phases.allphases:
1397 for head in headsbyphase[phase]:
1398 phasedata.append(_pack(_fphasesentry, phase, head))
1399 bundler.newpart('phase-heads', data=''.join(phasedata))
1400
1390 def addparttagsfnodescache(repo, bundler, outgoing):
1401 def addparttagsfnodescache(repo, bundler, outgoing):
1391 # we include the tags fnode cache for the bundle changeset
1402 # we include the tags fnode cache for the bundle changeset
1392 # (as an optional parts)
1403 # (as an optional parts)
1393 cache = tags.hgtagsfnodescache(repo.unfiltered())
1404 cache = tags.hgtagsfnodescache(repo.unfiltered())
1394 chunks = []
1405 chunks = []
1395
1406
1396 # .hgtags fnodes are only relevant for head changesets. While we could
1407 # .hgtags fnodes are only relevant for head changesets. While we could
1397 # transfer values for all known nodes, there will likely be little to
1408 # transfer values for all known nodes, there will likely be little to
1398 # no benefit.
1409 # no benefit.
1399 #
1410 #
1400 # We don't bother using a generator to produce output data because
1411 # We don't bother using a generator to produce output data because
1401 # a) we only have 40 bytes per head and even esoteric numbers of heads
1412 # a) we only have 40 bytes per head and even esoteric numbers of heads
1402 # consume little memory (1M heads is 40MB) b) we don't want to send the
1413 # consume little memory (1M heads is 40MB) b) we don't want to send the
1403 # part if we don't have entries and knowing if we have entries requires
1414 # part if we don't have entries and knowing if we have entries requires
1404 # cache lookups.
1415 # cache lookups.
1405 for node in outgoing.missingheads:
1416 for node in outgoing.missingheads:
1406 # Don't compute missing, as this may slow down serving.
1417 # Don't compute missing, as this may slow down serving.
1407 fnode = cache.getfnode(node, computemissing=False)
1418 fnode = cache.getfnode(node, computemissing=False)
1408 if fnode is not None:
1419 if fnode is not None:
1409 chunks.extend([node, fnode])
1420 chunks.extend([node, fnode])
1410
1421
1411 if chunks:
1422 if chunks:
1412 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1423 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1413
1424
1414 def buildobsmarkerspart(bundler, markers):
1425 def buildobsmarkerspart(bundler, markers):
1415 """add an obsmarker part to the bundler with <markers>
1426 """add an obsmarker part to the bundler with <markers>
1416
1427
1417 No part is created if markers is empty.
1428 No part is created if markers is empty.
1418 Raises ValueError if the bundler doesn't support any known obsmarker format.
1429 Raises ValueError if the bundler doesn't support any known obsmarker format.
1419 """
1430 """
1420 if not markers:
1431 if not markers:
1421 return None
1432 return None
1422
1433
1423 remoteversions = obsmarkersversion(bundler.capabilities)
1434 remoteversions = obsmarkersversion(bundler.capabilities)
1424 version = obsolete.commonversion(remoteversions)
1435 version = obsolete.commonversion(remoteversions)
1425 if version is None:
1436 if version is None:
1426 raise ValueError('bundler does not support common obsmarker format')
1437 raise ValueError('bundler does not support common obsmarker format')
1427 stream = obsolete.encodemarkers(markers, True, version=version)
1438 stream = obsolete.encodemarkers(markers, True, version=version)
1428 return bundler.newpart('obsmarkers', data=stream)
1439 return bundler.newpart('obsmarkers', data=stream)
1429
1440
1430 def writebundle(ui, cg, filename, bundletype, vfs=None, compression=None,
1441 def writebundle(ui, cg, filename, bundletype, vfs=None, compression=None,
1431 compopts=None):
1442 compopts=None):
1432 """Write a bundle file and return its filename.
1443 """Write a bundle file and return its filename.
1433
1444
1434 Existing files will not be overwritten.
1445 Existing files will not be overwritten.
1435 If no filename is specified, a temporary file is created.
1446 If no filename is specified, a temporary file is created.
1436 bz2 compression can be turned off.
1447 bz2 compression can be turned off.
1437 The bundle file will be deleted in case of errors.
1448 The bundle file will be deleted in case of errors.
1438 """
1449 """
1439
1450
1440 if bundletype == "HG20":
1451 if bundletype == "HG20":
1441 bundle = bundle20(ui)
1452 bundle = bundle20(ui)
1442 bundle.setcompression(compression, compopts)
1453 bundle.setcompression(compression, compopts)
1443 part = bundle.newpart('changegroup', data=cg.getchunks())
1454 part = bundle.newpart('changegroup', data=cg.getchunks())
1444 part.addparam('version', cg.version)
1455 part.addparam('version', cg.version)
1445 if 'clcount' in cg.extras:
1456 if 'clcount' in cg.extras:
1446 part.addparam('nbchanges', str(cg.extras['clcount']),
1457 part.addparam('nbchanges', str(cg.extras['clcount']),
1447 mandatory=False)
1458 mandatory=False)
1448 chunkiter = bundle.getchunks()
1459 chunkiter = bundle.getchunks()
1449 else:
1460 else:
1450 # compression argument is only for the bundle2 case
1461 # compression argument is only for the bundle2 case
1451 assert compression is None
1462 assert compression is None
1452 if cg.version != '01':
1463 if cg.version != '01':
1453 raise error.Abort(_('old bundle types only supports v1 '
1464 raise error.Abort(_('old bundle types only supports v1 '
1454 'changegroups'))
1465 'changegroups'))
1455 header, comp = bundletypes[bundletype]
1466 header, comp = bundletypes[bundletype]
1456 if comp not in util.compengines.supportedbundletypes:
1467 if comp not in util.compengines.supportedbundletypes:
1457 raise error.Abort(_('unknown stream compression type: %s')
1468 raise error.Abort(_('unknown stream compression type: %s')
1458 % comp)
1469 % comp)
1459 compengine = util.compengines.forbundletype(comp)
1470 compengine = util.compengines.forbundletype(comp)
1460 def chunkiter():
1471 def chunkiter():
1461 yield header
1472 yield header
1462 for chunk in compengine.compressstream(cg.getchunks(), compopts):
1473 for chunk in compengine.compressstream(cg.getchunks(), compopts):
1463 yield chunk
1474 yield chunk
1464 chunkiter = chunkiter()
1475 chunkiter = chunkiter()
1465
1476
1466 # parse the changegroup data, otherwise we will block
1477 # parse the changegroup data, otherwise we will block
1467 # in case of sshrepo because we don't know the end of the stream
1478 # in case of sshrepo because we don't know the end of the stream
1468 return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
1479 return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
1469
1480
1470 @parthandler('changegroup', ('version', 'nbchanges', 'treemanifest'))
1481 @parthandler('changegroup', ('version', 'nbchanges', 'treemanifest'))
1471 def handlechangegroup(op, inpart):
1482 def handlechangegroup(op, inpart):
1472 """apply a changegroup part on the repo
1483 """apply a changegroup part on the repo
1473
1484
1474 This is a very early implementation that will massive rework before being
1485 This is a very early implementation that will massive rework before being
1475 inflicted to any end-user.
1486 inflicted to any end-user.
1476 """
1487 """
1477 tr = op.gettransaction()
1488 tr = op.gettransaction()
1478 unpackerversion = inpart.params.get('version', '01')
1489 unpackerversion = inpart.params.get('version', '01')
1479 # We should raise an appropriate exception here
1490 # We should raise an appropriate exception here
1480 cg = changegroup.getunbundler(unpackerversion, inpart, None)
1491 cg = changegroup.getunbundler(unpackerversion, inpart, None)
1481 # the source and url passed here are overwritten by the one contained in
1492 # the source and url passed here are overwritten by the one contained in
1482 # the transaction.hookargs argument. So 'bundle2' is a placeholder
1493 # the transaction.hookargs argument. So 'bundle2' is a placeholder
1483 nbchangesets = None
1494 nbchangesets = None
1484 if 'nbchanges' in inpart.params:
1495 if 'nbchanges' in inpart.params:
1485 nbchangesets = int(inpart.params.get('nbchanges'))
1496 nbchangesets = int(inpart.params.get('nbchanges'))
1486 if ('treemanifest' in inpart.params and
1497 if ('treemanifest' in inpart.params and
1487 'treemanifest' not in op.repo.requirements):
1498 'treemanifest' not in op.repo.requirements):
1488 if len(op.repo.changelog) != 0:
1499 if len(op.repo.changelog) != 0:
1489 raise error.Abort(_(
1500 raise error.Abort(_(
1490 "bundle contains tree manifests, but local repo is "
1501 "bundle contains tree manifests, but local repo is "
1491 "non-empty and does not use tree manifests"))
1502 "non-empty and does not use tree manifests"))
1492 op.repo.requirements.add('treemanifest')
1503 op.repo.requirements.add('treemanifest')
1493 op.repo._applyopenerreqs()
1504 op.repo._applyopenerreqs()
1494 op.repo._writerequirements()
1505 op.repo._writerequirements()
1495 ret, addednodes = cg.apply(op.repo, tr, 'bundle2', 'bundle2',
1506 ret, addednodes = cg.apply(op.repo, tr, 'bundle2', 'bundle2',
1496 expectedtotal=nbchangesets)
1507 expectedtotal=nbchangesets)
1497 op.records.add('changegroup', {
1508 op.records.add('changegroup', {
1498 'return': ret,
1509 'return': ret,
1499 'addednodes': addednodes,
1510 'addednodes': addednodes,
1500 })
1511 })
1501 if op.reply is not None:
1512 if op.reply is not None:
1502 # This is definitely not the final form of this
1513 # This is definitely not the final form of this
1503 # return. But one need to start somewhere.
1514 # return. But one need to start somewhere.
1504 part = op.reply.newpart('reply:changegroup', mandatory=False)
1515 part = op.reply.newpart('reply:changegroup', mandatory=False)
1505 part.addparam('in-reply-to', str(inpart.id), mandatory=False)
1516 part.addparam('in-reply-to', str(inpart.id), mandatory=False)
1506 part.addparam('return', '%i' % ret, mandatory=False)
1517 part.addparam('return', '%i' % ret, mandatory=False)
1507 assert not inpart.read()
1518 assert not inpart.read()
1508
1519
1509 _remotechangegroupparams = tuple(['url', 'size', 'digests'] +
1520 _remotechangegroupparams = tuple(['url', 'size', 'digests'] +
1510 ['digest:%s' % k for k in util.DIGESTS.keys()])
1521 ['digest:%s' % k for k in util.DIGESTS.keys()])
1511 @parthandler('remote-changegroup', _remotechangegroupparams)
1522 @parthandler('remote-changegroup', _remotechangegroupparams)
1512 def handleremotechangegroup(op, inpart):
1523 def handleremotechangegroup(op, inpart):
1513 """apply a bundle10 on the repo, given an url and validation information
1524 """apply a bundle10 on the repo, given an url and validation information
1514
1525
1515 All the information about the remote bundle to import are given as
1526 All the information about the remote bundle to import are given as
1516 parameters. The parameters include:
1527 parameters. The parameters include:
1517 - url: the url to the bundle10.
1528 - url: the url to the bundle10.
1518 - size: the bundle10 file size. It is used to validate what was
1529 - size: the bundle10 file size. It is used to validate what was
1519 retrieved by the client matches the server knowledge about the bundle.
1530 retrieved by the client matches the server knowledge about the bundle.
1520 - digests: a space separated list of the digest types provided as
1531 - digests: a space separated list of the digest types provided as
1521 parameters.
1532 parameters.
1522 - digest:<digest-type>: the hexadecimal representation of the digest with
1533 - digest:<digest-type>: the hexadecimal representation of the digest with
1523 that name. Like the size, it is used to validate what was retrieved by
1534 that name. Like the size, it is used to validate what was retrieved by
1524 the client matches what the server knows about the bundle.
1535 the client matches what the server knows about the bundle.
1525
1536
1526 When multiple digest types are given, all of them are checked.
1537 When multiple digest types are given, all of them are checked.
1527 """
1538 """
1528 try:
1539 try:
1529 raw_url = inpart.params['url']
1540 raw_url = inpart.params['url']
1530 except KeyError:
1541 except KeyError:
1531 raise error.Abort(_('remote-changegroup: missing "%s" param') % 'url')
1542 raise error.Abort(_('remote-changegroup: missing "%s" param') % 'url')
1532 parsed_url = util.url(raw_url)
1543 parsed_url = util.url(raw_url)
1533 if parsed_url.scheme not in capabilities['remote-changegroup']:
1544 if parsed_url.scheme not in capabilities['remote-changegroup']:
1534 raise error.Abort(_('remote-changegroup does not support %s urls') %
1545 raise error.Abort(_('remote-changegroup does not support %s urls') %
1535 parsed_url.scheme)
1546 parsed_url.scheme)
1536
1547
1537 try:
1548 try:
1538 size = int(inpart.params['size'])
1549 size = int(inpart.params['size'])
1539 except ValueError:
1550 except ValueError:
1540 raise error.Abort(_('remote-changegroup: invalid value for param "%s"')
1551 raise error.Abort(_('remote-changegroup: invalid value for param "%s"')
1541 % 'size')
1552 % 'size')
1542 except KeyError:
1553 except KeyError:
1543 raise error.Abort(_('remote-changegroup: missing "%s" param') % 'size')
1554 raise error.Abort(_('remote-changegroup: missing "%s" param') % 'size')
1544
1555
1545 digests = {}
1556 digests = {}
1546 for typ in inpart.params.get('digests', '').split():
1557 for typ in inpart.params.get('digests', '').split():
1547 param = 'digest:%s' % typ
1558 param = 'digest:%s' % typ
1548 try:
1559 try:
1549 value = inpart.params[param]
1560 value = inpart.params[param]
1550 except KeyError:
1561 except KeyError:
1551 raise error.Abort(_('remote-changegroup: missing "%s" param') %
1562 raise error.Abort(_('remote-changegroup: missing "%s" param') %
1552 param)
1563 param)
1553 digests[typ] = value
1564 digests[typ] = value
1554
1565
1555 real_part = util.digestchecker(url.open(op.ui, raw_url), size, digests)
1566 real_part = util.digestchecker(url.open(op.ui, raw_url), size, digests)
1556
1567
1557 tr = op.gettransaction()
1568 tr = op.gettransaction()
1558 from . import exchange
1569 from . import exchange
1559 cg = exchange.readbundle(op.repo.ui, real_part, raw_url)
1570 cg = exchange.readbundle(op.repo.ui, real_part, raw_url)
1560 if not isinstance(cg, changegroup.cg1unpacker):
1571 if not isinstance(cg, changegroup.cg1unpacker):
1561 raise error.Abort(_('%s: not a bundle version 1.0') %
1572 raise error.Abort(_('%s: not a bundle version 1.0') %
1562 util.hidepassword(raw_url))
1573 util.hidepassword(raw_url))
1563 ret, addednodes = cg.apply(op.repo, tr, 'bundle2', 'bundle2')
1574 ret, addednodes = cg.apply(op.repo, tr, 'bundle2', 'bundle2')
1564 op.records.add('changegroup', {
1575 op.records.add('changegroup', {
1565 'return': ret,
1576 'return': ret,
1566 'addednodes': addednodes,
1577 'addednodes': addednodes,
1567 })
1578 })
1568 if op.reply is not None:
1579 if op.reply is not None:
1569 # This is definitely not the final form of this
1580 # This is definitely not the final form of this
1570 # return. But one need to start somewhere.
1581 # return. But one need to start somewhere.
1571 part = op.reply.newpart('reply:changegroup')
1582 part = op.reply.newpart('reply:changegroup')
1572 part.addparam('in-reply-to', str(inpart.id), mandatory=False)
1583 part.addparam('in-reply-to', str(inpart.id), mandatory=False)
1573 part.addparam('return', '%i' % ret, mandatory=False)
1584 part.addparam('return', '%i' % ret, mandatory=False)
1574 try:
1585 try:
1575 real_part.validate()
1586 real_part.validate()
1576 except error.Abort as e:
1587 except error.Abort as e:
1577 raise error.Abort(_('bundle at %s is corrupted:\n%s') %
1588 raise error.Abort(_('bundle at %s is corrupted:\n%s') %
1578 (util.hidepassword(raw_url), str(e)))
1589 (util.hidepassword(raw_url), str(e)))
1579 assert not inpart.read()
1590 assert not inpart.read()
1580
1591
1581 @parthandler('reply:changegroup', ('return', 'in-reply-to'))
1592 @parthandler('reply:changegroup', ('return', 'in-reply-to'))
1582 def handlereplychangegroup(op, inpart):
1593 def handlereplychangegroup(op, inpart):
1583 ret = int(inpart.params['return'])
1594 ret = int(inpart.params['return'])
1584 replyto = int(inpart.params['in-reply-to'])
1595 replyto = int(inpart.params['in-reply-to'])
1585 op.records.add('changegroup', {'return': ret}, replyto)
1596 op.records.add('changegroup', {'return': ret}, replyto)
1586
1597
1587 @parthandler('check:heads')
1598 @parthandler('check:heads')
1588 def handlecheckheads(op, inpart):
1599 def handlecheckheads(op, inpart):
1589 """check that head of the repo did not change
1600 """check that head of the repo did not change
1590
1601
1591 This is used to detect a push race when using unbundle.
1602 This is used to detect a push race when using unbundle.
1592 This replaces the "heads" argument of unbundle."""
1603 This replaces the "heads" argument of unbundle."""
1593 h = inpart.read(20)
1604 h = inpart.read(20)
1594 heads = []
1605 heads = []
1595 while len(h) == 20:
1606 while len(h) == 20:
1596 heads.append(h)
1607 heads.append(h)
1597 h = inpart.read(20)
1608 h = inpart.read(20)
1598 assert not h
1609 assert not h
1599 # Trigger a transaction so that we are guaranteed to have the lock now.
1610 # Trigger a transaction so that we are guaranteed to have the lock now.
1600 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1611 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1601 op.gettransaction()
1612 op.gettransaction()
1602 if sorted(heads) != sorted(op.repo.heads()):
1613 if sorted(heads) != sorted(op.repo.heads()):
1603 raise error.PushRaced('repository changed while pushing - '
1614 raise error.PushRaced('repository changed while pushing - '
1604 'please try again')
1615 'please try again')
1605
1616
1606 @parthandler('check:updated-heads')
1617 @parthandler('check:updated-heads')
1607 def handlecheckupdatedheads(op, inpart):
1618 def handlecheckupdatedheads(op, inpart):
1608 """check for race on the heads touched by a push
1619 """check for race on the heads touched by a push
1609
1620
1610 This is similar to 'check:heads' but focus on the heads actually updated
1621 This is similar to 'check:heads' but focus on the heads actually updated
1611 during the push. If other activities happen on unrelated heads, it is
1622 during the push. If other activities happen on unrelated heads, it is
1612 ignored.
1623 ignored.
1613
1624
1614 This allow server with high traffic to avoid push contention as long as
1625 This allow server with high traffic to avoid push contention as long as
1615 unrelated parts of the graph are involved."""
1626 unrelated parts of the graph are involved."""
1616 h = inpart.read(20)
1627 h = inpart.read(20)
1617 heads = []
1628 heads = []
1618 while len(h) == 20:
1629 while len(h) == 20:
1619 heads.append(h)
1630 heads.append(h)
1620 h = inpart.read(20)
1631 h = inpart.read(20)
1621 assert not h
1632 assert not h
1622 # trigger a transaction so that we are guaranteed to have the lock now.
1633 # trigger a transaction so that we are guaranteed to have the lock now.
1623 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1634 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1624 op.gettransaction()
1635 op.gettransaction()
1625
1636
1626 currentheads = set()
1637 currentheads = set()
1627 for ls in op.repo.branchmap().itervalues():
1638 for ls in op.repo.branchmap().itervalues():
1628 currentheads.update(ls)
1639 currentheads.update(ls)
1629
1640
1630 for h in heads:
1641 for h in heads:
1631 if h not in currentheads:
1642 if h not in currentheads:
1632 raise error.PushRaced('repository changed while pushing - '
1643 raise error.PushRaced('repository changed while pushing - '
1633 'please try again')
1644 'please try again')
1634
1645
1635 @parthandler('output')
1646 @parthandler('output')
1636 def handleoutput(op, inpart):
1647 def handleoutput(op, inpart):
1637 """forward output captured on the server to the client"""
1648 """forward output captured on the server to the client"""
1638 for line in inpart.read().splitlines():
1649 for line in inpart.read().splitlines():
1639 op.ui.status(_('remote: %s\n') % line)
1650 op.ui.status(_('remote: %s\n') % line)
1640
1651
1641 @parthandler('replycaps')
1652 @parthandler('replycaps')
1642 def handlereplycaps(op, inpart):
1653 def handlereplycaps(op, inpart):
1643 """Notify that a reply bundle should be created
1654 """Notify that a reply bundle should be created
1644
1655
1645 The payload contains the capabilities information for the reply"""
1656 The payload contains the capabilities information for the reply"""
1646 caps = decodecaps(inpart.read())
1657 caps = decodecaps(inpart.read())
1647 if op.reply is None:
1658 if op.reply is None:
1648 op.reply = bundle20(op.ui, caps)
1659 op.reply = bundle20(op.ui, caps)
1649
1660
1650 class AbortFromPart(error.Abort):
1661 class AbortFromPart(error.Abort):
1651 """Sub-class of Abort that denotes an error from a bundle2 part."""
1662 """Sub-class of Abort that denotes an error from a bundle2 part."""
1652
1663
1653 @parthandler('error:abort', ('message', 'hint'))
1664 @parthandler('error:abort', ('message', 'hint'))
1654 def handleerrorabort(op, inpart):
1665 def handleerrorabort(op, inpart):
1655 """Used to transmit abort error over the wire"""
1666 """Used to transmit abort error over the wire"""
1656 raise AbortFromPart(inpart.params['message'],
1667 raise AbortFromPart(inpart.params['message'],
1657 hint=inpart.params.get('hint'))
1668 hint=inpart.params.get('hint'))
1658
1669
1659 @parthandler('error:pushkey', ('namespace', 'key', 'new', 'old', 'ret',
1670 @parthandler('error:pushkey', ('namespace', 'key', 'new', 'old', 'ret',
1660 'in-reply-to'))
1671 'in-reply-to'))
1661 def handleerrorpushkey(op, inpart):
1672 def handleerrorpushkey(op, inpart):
1662 """Used to transmit failure of a mandatory pushkey over the wire"""
1673 """Used to transmit failure of a mandatory pushkey over the wire"""
1663 kwargs = {}
1674 kwargs = {}
1664 for name in ('namespace', 'key', 'new', 'old', 'ret'):
1675 for name in ('namespace', 'key', 'new', 'old', 'ret'):
1665 value = inpart.params.get(name)
1676 value = inpart.params.get(name)
1666 if value is not None:
1677 if value is not None:
1667 kwargs[name] = value
1678 kwargs[name] = value
1668 raise error.PushkeyFailed(inpart.params['in-reply-to'], **kwargs)
1679 raise error.PushkeyFailed(inpart.params['in-reply-to'], **kwargs)
1669
1680
1670 @parthandler('error:unsupportedcontent', ('parttype', 'params'))
1681 @parthandler('error:unsupportedcontent', ('parttype', 'params'))
1671 def handleerrorunsupportedcontent(op, inpart):
1682 def handleerrorunsupportedcontent(op, inpart):
1672 """Used to transmit unknown content error over the wire"""
1683 """Used to transmit unknown content error over the wire"""
1673 kwargs = {}
1684 kwargs = {}
1674 parttype = inpart.params.get('parttype')
1685 parttype = inpart.params.get('parttype')
1675 if parttype is not None:
1686 if parttype is not None:
1676 kwargs['parttype'] = parttype
1687 kwargs['parttype'] = parttype
1677 params = inpart.params.get('params')
1688 params = inpart.params.get('params')
1678 if params is not None:
1689 if params is not None:
1679 kwargs['params'] = params.split('\0')
1690 kwargs['params'] = params.split('\0')
1680
1691
1681 raise error.BundleUnknownFeatureError(**kwargs)
1692 raise error.BundleUnknownFeatureError(**kwargs)
1682
1693
1683 @parthandler('error:pushraced', ('message',))
1694 @parthandler('error:pushraced', ('message',))
1684 def handleerrorpushraced(op, inpart):
1695 def handleerrorpushraced(op, inpart):
1685 """Used to transmit push race error over the wire"""
1696 """Used to transmit push race error over the wire"""
1686 raise error.ResponseError(_('push failed:'), inpart.params['message'])
1697 raise error.ResponseError(_('push failed:'), inpart.params['message'])
1687
1698
1688 @parthandler('listkeys', ('namespace',))
1699 @parthandler('listkeys', ('namespace',))
1689 def handlelistkeys(op, inpart):
1700 def handlelistkeys(op, inpart):
1690 """retrieve pushkey namespace content stored in a bundle2"""
1701 """retrieve pushkey namespace content stored in a bundle2"""
1691 namespace = inpart.params['namespace']
1702 namespace = inpart.params['namespace']
1692 r = pushkey.decodekeys(inpart.read())
1703 r = pushkey.decodekeys(inpart.read())
1693 op.records.add('listkeys', (namespace, r))
1704 op.records.add('listkeys', (namespace, r))
1694
1705
1695 @parthandler('pushkey', ('namespace', 'key', 'old', 'new'))
1706 @parthandler('pushkey', ('namespace', 'key', 'old', 'new'))
1696 def handlepushkey(op, inpart):
1707 def handlepushkey(op, inpart):
1697 """process a pushkey request"""
1708 """process a pushkey request"""
1698 dec = pushkey.decode
1709 dec = pushkey.decode
1699 namespace = dec(inpart.params['namespace'])
1710 namespace = dec(inpart.params['namespace'])
1700 key = dec(inpart.params['key'])
1711 key = dec(inpart.params['key'])
1701 old = dec(inpart.params['old'])
1712 old = dec(inpart.params['old'])
1702 new = dec(inpart.params['new'])
1713 new = dec(inpart.params['new'])
1703 # Grab the transaction to ensure that we have the lock before performing the
1714 # Grab the transaction to ensure that we have the lock before performing the
1704 # pushkey.
1715 # pushkey.
1705 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1716 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1706 op.gettransaction()
1717 op.gettransaction()
1707 ret = op.repo.pushkey(namespace, key, old, new)
1718 ret = op.repo.pushkey(namespace, key, old, new)
1708 record = {'namespace': namespace,
1719 record = {'namespace': namespace,
1709 'key': key,
1720 'key': key,
1710 'old': old,
1721 'old': old,
1711 'new': new}
1722 'new': new}
1712 op.records.add('pushkey', record)
1723 op.records.add('pushkey', record)
1713 if op.reply is not None:
1724 if op.reply is not None:
1714 rpart = op.reply.newpart('reply:pushkey')
1725 rpart = op.reply.newpart('reply:pushkey')
1715 rpart.addparam('in-reply-to', str(inpart.id), mandatory=False)
1726 rpart.addparam('in-reply-to', str(inpart.id), mandatory=False)
1716 rpart.addparam('return', '%i' % ret, mandatory=False)
1727 rpart.addparam('return', '%i' % ret, mandatory=False)
1717 if inpart.mandatory and not ret:
1728 if inpart.mandatory and not ret:
1718 kwargs = {}
1729 kwargs = {}
1719 for key in ('namespace', 'key', 'new', 'old', 'ret'):
1730 for key in ('namespace', 'key', 'new', 'old', 'ret'):
1720 if key in inpart.params:
1731 if key in inpart.params:
1721 kwargs[key] = inpart.params[key]
1732 kwargs[key] = inpart.params[key]
1722 raise error.PushkeyFailed(partid=str(inpart.id), **kwargs)
1733 raise error.PushkeyFailed(partid=str(inpart.id), **kwargs)
1723
1734
1735 def _readphaseheads(inpart):
1736 headsbyphase = [[] for i in phases.allphases]
1737 entrysize = struct.calcsize(_fphasesentry)
1738 while True:
1739 entry = inpart.read(entrysize)
1740 if len(entry) < entrysize:
1741 if entry:
1742 raise error.Abort(_('bad phase-heads bundle part'))
1743 break
1744 phase, node = struct.unpack(_fphasesentry, entry)
1745 headsbyphase[phase].append(node)
1746 return headsbyphase
1747
1748 @parthandler('phase-heads')
1749 def handlephases(op, inpart):
1750 """apply phases from bundle part to repo"""
1751 headsbyphase = _readphaseheads(inpart)
1752 addednodes = []
1753 for entry in op.records['changegroup']:
1754 addednodes.extend(entry['addednodes'])
1755 phases.updatephases(op.repo.unfiltered(), op.gettransaction(), headsbyphase,
1756 addednodes)
1757
1724 @parthandler('reply:pushkey', ('return', 'in-reply-to'))
1758 @parthandler('reply:pushkey', ('return', 'in-reply-to'))
1725 def handlepushkeyreply(op, inpart):
1759 def handlepushkeyreply(op, inpart):
1726 """retrieve the result of a pushkey request"""
1760 """retrieve the result of a pushkey request"""
1727 ret = int(inpart.params['return'])
1761 ret = int(inpart.params['return'])
1728 partid = int(inpart.params['in-reply-to'])
1762 partid = int(inpart.params['in-reply-to'])
1729 op.records.add('pushkey', {'return': ret}, partid)
1763 op.records.add('pushkey', {'return': ret}, partid)
1730
1764
1731 @parthandler('obsmarkers')
1765 @parthandler('obsmarkers')
1732 def handleobsmarker(op, inpart):
1766 def handleobsmarker(op, inpart):
1733 """add a stream of obsmarkers to the repo"""
1767 """add a stream of obsmarkers to the repo"""
1734 tr = op.gettransaction()
1768 tr = op.gettransaction()
1735 markerdata = inpart.read()
1769 markerdata = inpart.read()
1736 if op.ui.config('experimental', 'obsmarkers-exchange-debug', False):
1770 if op.ui.config('experimental', 'obsmarkers-exchange-debug', False):
1737 op.ui.write(('obsmarker-exchange: %i bytes received\n')
1771 op.ui.write(('obsmarker-exchange: %i bytes received\n')
1738 % len(markerdata))
1772 % len(markerdata))
1739 # The mergemarkers call will crash if marker creation is not enabled.
1773 # The mergemarkers call will crash if marker creation is not enabled.
1740 # we want to avoid this if the part is advisory.
1774 # we want to avoid this if the part is advisory.
1741 if not inpart.mandatory and op.repo.obsstore.readonly:
1775 if not inpart.mandatory and op.repo.obsstore.readonly:
1742 op.repo.ui.debug('ignoring obsolescence markers, feature not enabled')
1776 op.repo.ui.debug('ignoring obsolescence markers, feature not enabled')
1743 return
1777 return
1744 new = op.repo.obsstore.mergemarkers(tr, markerdata)
1778 new = op.repo.obsstore.mergemarkers(tr, markerdata)
1745 op.repo.invalidatevolatilesets()
1779 op.repo.invalidatevolatilesets()
1746 if new:
1780 if new:
1747 op.repo.ui.status(_('%i new obsolescence markers\n') % new)
1781 op.repo.ui.status(_('%i new obsolescence markers\n') % new)
1748 op.records.add('obsmarkers', {'new': new})
1782 op.records.add('obsmarkers', {'new': new})
1749 if op.reply is not None:
1783 if op.reply is not None:
1750 rpart = op.reply.newpart('reply:obsmarkers')
1784 rpart = op.reply.newpart('reply:obsmarkers')
1751 rpart.addparam('in-reply-to', str(inpart.id), mandatory=False)
1785 rpart.addparam('in-reply-to', str(inpart.id), mandatory=False)
1752 rpart.addparam('new', '%i' % new, mandatory=False)
1786 rpart.addparam('new', '%i' % new, mandatory=False)
1753
1787
1754
1788
1755 @parthandler('reply:obsmarkers', ('new', 'in-reply-to'))
1789 @parthandler('reply:obsmarkers', ('new', 'in-reply-to'))
1756 def handleobsmarkerreply(op, inpart):
1790 def handleobsmarkerreply(op, inpart):
1757 """retrieve the result of a pushkey request"""
1791 """retrieve the result of a pushkey request"""
1758 ret = int(inpart.params['new'])
1792 ret = int(inpart.params['new'])
1759 partid = int(inpart.params['in-reply-to'])
1793 partid = int(inpart.params['in-reply-to'])
1760 op.records.add('obsmarkers', {'new': ret}, partid)
1794 op.records.add('obsmarkers', {'new': ret}, partid)
1761
1795
1762 @parthandler('hgtagsfnodes')
1796 @parthandler('hgtagsfnodes')
1763 def handlehgtagsfnodes(op, inpart):
1797 def handlehgtagsfnodes(op, inpart):
1764 """Applies .hgtags fnodes cache entries to the local repo.
1798 """Applies .hgtags fnodes cache entries to the local repo.
1765
1799
1766 Payload is pairs of 20 byte changeset nodes and filenodes.
1800 Payload is pairs of 20 byte changeset nodes and filenodes.
1767 """
1801 """
1768 # Grab the transaction so we ensure that we have the lock at this point.
1802 # Grab the transaction so we ensure that we have the lock at this point.
1769 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1803 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1770 op.gettransaction()
1804 op.gettransaction()
1771 cache = tags.hgtagsfnodescache(op.repo.unfiltered())
1805 cache = tags.hgtagsfnodescache(op.repo.unfiltered())
1772
1806
1773 count = 0
1807 count = 0
1774 while True:
1808 while True:
1775 node = inpart.read(20)
1809 node = inpart.read(20)
1776 fnode = inpart.read(20)
1810 fnode = inpart.read(20)
1777 if len(node) < 20 or len(fnode) < 20:
1811 if len(node) < 20 or len(fnode) < 20:
1778 op.ui.debug('ignoring incomplete received .hgtags fnodes data\n')
1812 op.ui.debug('ignoring incomplete received .hgtags fnodes data\n')
1779 break
1813 break
1780 cache.setfnode(node, fnode)
1814 cache.setfnode(node, fnode)
1781 count += 1
1815 count += 1
1782
1816
1783 cache.write()
1817 cache.write()
1784 op.ui.debug('applied %i hgtags fnodes cache entries\n' % count)
1818 op.ui.debug('applied %i hgtags fnodes cache entries\n' % count)
@@ -1,5400 +1,5402 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14 import sys
14 import sys
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23 from . import (
23 from . import (
24 archival,
24 archival,
25 bookmarks,
25 bookmarks,
26 bundle2,
26 bundle2,
27 changegroup,
27 changegroup,
28 cmdutil,
28 cmdutil,
29 copies,
29 copies,
30 debugcommands as debugcommandsmod,
30 debugcommands as debugcommandsmod,
31 destutil,
31 destutil,
32 dirstateguard,
32 dirstateguard,
33 discovery,
33 discovery,
34 encoding,
34 encoding,
35 error,
35 error,
36 exchange,
36 exchange,
37 extensions,
37 extensions,
38 formatter,
38 formatter,
39 graphmod,
39 graphmod,
40 hbisect,
40 hbisect,
41 help,
41 help,
42 hg,
42 hg,
43 lock as lockmod,
43 lock as lockmod,
44 merge as mergemod,
44 merge as mergemod,
45 obsolete,
45 obsolete,
46 patch,
46 patch,
47 phases,
47 phases,
48 pycompat,
48 pycompat,
49 rcutil,
49 rcutil,
50 registrar,
50 registrar,
51 revsetlang,
51 revsetlang,
52 scmutil,
52 scmutil,
53 server,
53 server,
54 sshserver,
54 sshserver,
55 streamclone,
55 streamclone,
56 tags as tagsmod,
56 tags as tagsmod,
57 templatekw,
57 templatekw,
58 ui as uimod,
58 ui as uimod,
59 util,
59 util,
60 )
60 )
61
61
62 release = lockmod.release
62 release = lockmod.release
63
63
64 table = {}
64 table = {}
65 table.update(debugcommandsmod.command._table)
65 table.update(debugcommandsmod.command._table)
66
66
67 command = registrar.command(table)
67 command = registrar.command(table)
68
68
69 # common command options
69 # common command options
70
70
71 globalopts = [
71 globalopts = [
72 ('R', 'repository', '',
72 ('R', 'repository', '',
73 _('repository root directory or name of overlay bundle file'),
73 _('repository root directory or name of overlay bundle file'),
74 _('REPO')),
74 _('REPO')),
75 ('', 'cwd', '',
75 ('', 'cwd', '',
76 _('change working directory'), _('DIR')),
76 _('change working directory'), _('DIR')),
77 ('y', 'noninteractive', None,
77 ('y', 'noninteractive', None,
78 _('do not prompt, automatically pick the first choice for all prompts')),
78 _('do not prompt, automatically pick the first choice for all prompts')),
79 ('q', 'quiet', None, _('suppress output')),
79 ('q', 'quiet', None, _('suppress output')),
80 ('v', 'verbose', None, _('enable additional output')),
80 ('v', 'verbose', None, _('enable additional output')),
81 ('', 'color', '',
81 ('', 'color', '',
82 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
82 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
83 # and should not be translated
83 # and should not be translated
84 _("when to colorize (boolean, always, auto, never, or debug)"),
84 _("when to colorize (boolean, always, auto, never, or debug)"),
85 _('TYPE')),
85 _('TYPE')),
86 ('', 'config', [],
86 ('', 'config', [],
87 _('set/override config option (use \'section.name=value\')'),
87 _('set/override config option (use \'section.name=value\')'),
88 _('CONFIG')),
88 _('CONFIG')),
89 ('', 'debug', None, _('enable debugging output')),
89 ('', 'debug', None, _('enable debugging output')),
90 ('', 'debugger', None, _('start debugger')),
90 ('', 'debugger', None, _('start debugger')),
91 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
91 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
92 _('ENCODE')),
92 _('ENCODE')),
93 ('', 'encodingmode', encoding.encodingmode,
93 ('', 'encodingmode', encoding.encodingmode,
94 _('set the charset encoding mode'), _('MODE')),
94 _('set the charset encoding mode'), _('MODE')),
95 ('', 'traceback', None, _('always print a traceback on exception')),
95 ('', 'traceback', None, _('always print a traceback on exception')),
96 ('', 'time', None, _('time how long the command takes')),
96 ('', 'time', None, _('time how long the command takes')),
97 ('', 'profile', None, _('print command execution profile')),
97 ('', 'profile', None, _('print command execution profile')),
98 ('', 'version', None, _('output version information and exit')),
98 ('', 'version', None, _('output version information and exit')),
99 ('h', 'help', None, _('display help and exit')),
99 ('h', 'help', None, _('display help and exit')),
100 ('', 'hidden', False, _('consider hidden changesets')),
100 ('', 'hidden', False, _('consider hidden changesets')),
101 ('', 'pager', 'auto',
101 ('', 'pager', 'auto',
102 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
102 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
103 ]
103 ]
104
104
105 dryrunopts = cmdutil.dryrunopts
105 dryrunopts = cmdutil.dryrunopts
106 remoteopts = cmdutil.remoteopts
106 remoteopts = cmdutil.remoteopts
107 walkopts = cmdutil.walkopts
107 walkopts = cmdutil.walkopts
108 commitopts = cmdutil.commitopts
108 commitopts = cmdutil.commitopts
109 commitopts2 = cmdutil.commitopts2
109 commitopts2 = cmdutil.commitopts2
110 formatteropts = cmdutil.formatteropts
110 formatteropts = cmdutil.formatteropts
111 templateopts = cmdutil.templateopts
111 templateopts = cmdutil.templateopts
112 logopts = cmdutil.logopts
112 logopts = cmdutil.logopts
113 diffopts = cmdutil.diffopts
113 diffopts = cmdutil.diffopts
114 diffwsopts = cmdutil.diffwsopts
114 diffwsopts = cmdutil.diffwsopts
115 diffopts2 = cmdutil.diffopts2
115 diffopts2 = cmdutil.diffopts2
116 mergetoolopts = cmdutil.mergetoolopts
116 mergetoolopts = cmdutil.mergetoolopts
117 similarityopts = cmdutil.similarityopts
117 similarityopts = cmdutil.similarityopts
118 subrepoopts = cmdutil.subrepoopts
118 subrepoopts = cmdutil.subrepoopts
119 debugrevlogopts = cmdutil.debugrevlogopts
119 debugrevlogopts = cmdutil.debugrevlogopts
120
120
121 # Commands start here, listed alphabetically
121 # Commands start here, listed alphabetically
122
122
123 @command('^add',
123 @command('^add',
124 walkopts + subrepoopts + dryrunopts,
124 walkopts + subrepoopts + dryrunopts,
125 _('[OPTION]... [FILE]...'),
125 _('[OPTION]... [FILE]...'),
126 inferrepo=True)
126 inferrepo=True)
127 def add(ui, repo, *pats, **opts):
127 def add(ui, repo, *pats, **opts):
128 """add the specified files on the next commit
128 """add the specified files on the next commit
129
129
130 Schedule files to be version controlled and added to the
130 Schedule files to be version controlled and added to the
131 repository.
131 repository.
132
132
133 The files will be added to the repository at the next commit. To
133 The files will be added to the repository at the next commit. To
134 undo an add before that, see :hg:`forget`.
134 undo an add before that, see :hg:`forget`.
135
135
136 If no names are given, add all files to the repository (except
136 If no names are given, add all files to the repository (except
137 files matching ``.hgignore``).
137 files matching ``.hgignore``).
138
138
139 .. container:: verbose
139 .. container:: verbose
140
140
141 Examples:
141 Examples:
142
142
143 - New (unknown) files are added
143 - New (unknown) files are added
144 automatically by :hg:`add`::
144 automatically by :hg:`add`::
145
145
146 $ ls
146 $ ls
147 foo.c
147 foo.c
148 $ hg status
148 $ hg status
149 ? foo.c
149 ? foo.c
150 $ hg add
150 $ hg add
151 adding foo.c
151 adding foo.c
152 $ hg status
152 $ hg status
153 A foo.c
153 A foo.c
154
154
155 - Specific files to be added can be specified::
155 - Specific files to be added can be specified::
156
156
157 $ ls
157 $ ls
158 bar.c foo.c
158 bar.c foo.c
159 $ hg status
159 $ hg status
160 ? bar.c
160 ? bar.c
161 ? foo.c
161 ? foo.c
162 $ hg add bar.c
162 $ hg add bar.c
163 $ hg status
163 $ hg status
164 A bar.c
164 A bar.c
165 ? foo.c
165 ? foo.c
166
166
167 Returns 0 if all files are successfully added.
167 Returns 0 if all files are successfully added.
168 """
168 """
169
169
170 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
170 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
171 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
171 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
172 return rejected and 1 or 0
172 return rejected and 1 or 0
173
173
174 @command('addremove',
174 @command('addremove',
175 similarityopts + subrepoopts + walkopts + dryrunopts,
175 similarityopts + subrepoopts + walkopts + dryrunopts,
176 _('[OPTION]... [FILE]...'),
176 _('[OPTION]... [FILE]...'),
177 inferrepo=True)
177 inferrepo=True)
178 def addremove(ui, repo, *pats, **opts):
178 def addremove(ui, repo, *pats, **opts):
179 """add all new files, delete all missing files
179 """add all new files, delete all missing files
180
180
181 Add all new files and remove all missing files from the
181 Add all new files and remove all missing files from the
182 repository.
182 repository.
183
183
184 Unless names are given, new files are ignored if they match any of
184 Unless names are given, new files are ignored if they match any of
185 the patterns in ``.hgignore``. As with add, these changes take
185 the patterns in ``.hgignore``. As with add, these changes take
186 effect at the next commit.
186 effect at the next commit.
187
187
188 Use the -s/--similarity option to detect renamed files. This
188 Use the -s/--similarity option to detect renamed files. This
189 option takes a percentage between 0 (disabled) and 100 (files must
189 option takes a percentage between 0 (disabled) and 100 (files must
190 be identical) as its parameter. With a parameter greater than 0,
190 be identical) as its parameter. With a parameter greater than 0,
191 this compares every removed file with every added file and records
191 this compares every removed file with every added file and records
192 those similar enough as renames. Detecting renamed files this way
192 those similar enough as renames. Detecting renamed files this way
193 can be expensive. After using this option, :hg:`status -C` can be
193 can be expensive. After using this option, :hg:`status -C` can be
194 used to check which files were identified as moved or renamed. If
194 used to check which files were identified as moved or renamed. If
195 not specified, -s/--similarity defaults to 100 and only renames of
195 not specified, -s/--similarity defaults to 100 and only renames of
196 identical files are detected.
196 identical files are detected.
197
197
198 .. container:: verbose
198 .. container:: verbose
199
199
200 Examples:
200 Examples:
201
201
202 - A number of files (bar.c and foo.c) are new,
202 - A number of files (bar.c and foo.c) are new,
203 while foobar.c has been removed (without using :hg:`remove`)
203 while foobar.c has been removed (without using :hg:`remove`)
204 from the repository::
204 from the repository::
205
205
206 $ ls
206 $ ls
207 bar.c foo.c
207 bar.c foo.c
208 $ hg status
208 $ hg status
209 ! foobar.c
209 ! foobar.c
210 ? bar.c
210 ? bar.c
211 ? foo.c
211 ? foo.c
212 $ hg addremove
212 $ hg addremove
213 adding bar.c
213 adding bar.c
214 adding foo.c
214 adding foo.c
215 removing foobar.c
215 removing foobar.c
216 $ hg status
216 $ hg status
217 A bar.c
217 A bar.c
218 A foo.c
218 A foo.c
219 R foobar.c
219 R foobar.c
220
220
221 - A file foobar.c was moved to foo.c without using :hg:`rename`.
221 - A file foobar.c was moved to foo.c without using :hg:`rename`.
222 Afterwards, it was edited slightly::
222 Afterwards, it was edited slightly::
223
223
224 $ ls
224 $ ls
225 foo.c
225 foo.c
226 $ hg status
226 $ hg status
227 ! foobar.c
227 ! foobar.c
228 ? foo.c
228 ? foo.c
229 $ hg addremove --similarity 90
229 $ hg addremove --similarity 90
230 removing foobar.c
230 removing foobar.c
231 adding foo.c
231 adding foo.c
232 recording removal of foobar.c as rename to foo.c (94% similar)
232 recording removal of foobar.c as rename to foo.c (94% similar)
233 $ hg status -C
233 $ hg status -C
234 A foo.c
234 A foo.c
235 foobar.c
235 foobar.c
236 R foobar.c
236 R foobar.c
237
237
238 Returns 0 if all files are successfully added.
238 Returns 0 if all files are successfully added.
239 """
239 """
240 opts = pycompat.byteskwargs(opts)
240 opts = pycompat.byteskwargs(opts)
241 try:
241 try:
242 sim = float(opts.get('similarity') or 100)
242 sim = float(opts.get('similarity') or 100)
243 except ValueError:
243 except ValueError:
244 raise error.Abort(_('similarity must be a number'))
244 raise error.Abort(_('similarity must be a number'))
245 if sim < 0 or sim > 100:
245 if sim < 0 or sim > 100:
246 raise error.Abort(_('similarity must be between 0 and 100'))
246 raise error.Abort(_('similarity must be between 0 and 100'))
247 matcher = scmutil.match(repo[None], pats, opts)
247 matcher = scmutil.match(repo[None], pats, opts)
248 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
248 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
249
249
250 @command('^annotate|blame',
250 @command('^annotate|blame',
251 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
251 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
252 ('', 'follow', None,
252 ('', 'follow', None,
253 _('follow copies/renames and list the filename (DEPRECATED)')),
253 _('follow copies/renames and list the filename (DEPRECATED)')),
254 ('', 'no-follow', None, _("don't follow copies and renames")),
254 ('', 'no-follow', None, _("don't follow copies and renames")),
255 ('a', 'text', None, _('treat all files as text')),
255 ('a', 'text', None, _('treat all files as text')),
256 ('u', 'user', None, _('list the author (long with -v)')),
256 ('u', 'user', None, _('list the author (long with -v)')),
257 ('f', 'file', None, _('list the filename')),
257 ('f', 'file', None, _('list the filename')),
258 ('d', 'date', None, _('list the date (short with -q)')),
258 ('d', 'date', None, _('list the date (short with -q)')),
259 ('n', 'number', None, _('list the revision number (default)')),
259 ('n', 'number', None, _('list the revision number (default)')),
260 ('c', 'changeset', None, _('list the changeset')),
260 ('c', 'changeset', None, _('list the changeset')),
261 ('l', 'line-number', None, _('show line number at the first appearance')),
261 ('l', 'line-number', None, _('show line number at the first appearance')),
262 ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
262 ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
263 ] + diffwsopts + walkopts + formatteropts,
263 ] + diffwsopts + walkopts + formatteropts,
264 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
264 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
265 inferrepo=True)
265 inferrepo=True)
266 def annotate(ui, repo, *pats, **opts):
266 def annotate(ui, repo, *pats, **opts):
267 """show changeset information by line for each file
267 """show changeset information by line for each file
268
268
269 List changes in files, showing the revision id responsible for
269 List changes in files, showing the revision id responsible for
270 each line.
270 each line.
271
271
272 This command is useful for discovering when a change was made and
272 This command is useful for discovering when a change was made and
273 by whom.
273 by whom.
274
274
275 If you include --file, --user, or --date, the revision number is
275 If you include --file, --user, or --date, the revision number is
276 suppressed unless you also include --number.
276 suppressed unless you also include --number.
277
277
278 Without the -a/--text option, annotate will avoid processing files
278 Without the -a/--text option, annotate will avoid processing files
279 it detects as binary. With -a, annotate will annotate the file
279 it detects as binary. With -a, annotate will annotate the file
280 anyway, although the results will probably be neither useful
280 anyway, although the results will probably be neither useful
281 nor desirable.
281 nor desirable.
282
282
283 Returns 0 on success.
283 Returns 0 on success.
284 """
284 """
285 opts = pycompat.byteskwargs(opts)
285 opts = pycompat.byteskwargs(opts)
286 if not pats:
286 if not pats:
287 raise error.Abort(_('at least one filename or pattern is required'))
287 raise error.Abort(_('at least one filename or pattern is required'))
288
288
289 if opts.get('follow'):
289 if opts.get('follow'):
290 # --follow is deprecated and now just an alias for -f/--file
290 # --follow is deprecated and now just an alias for -f/--file
291 # to mimic the behavior of Mercurial before version 1.5
291 # to mimic the behavior of Mercurial before version 1.5
292 opts['file'] = True
292 opts['file'] = True
293
293
294 ctx = scmutil.revsingle(repo, opts.get('rev'))
294 ctx = scmutil.revsingle(repo, opts.get('rev'))
295
295
296 rootfm = ui.formatter('annotate', opts)
296 rootfm = ui.formatter('annotate', opts)
297 if ui.quiet:
297 if ui.quiet:
298 datefunc = util.shortdate
298 datefunc = util.shortdate
299 else:
299 else:
300 datefunc = util.datestr
300 datefunc = util.datestr
301 if ctx.rev() is None:
301 if ctx.rev() is None:
302 def hexfn(node):
302 def hexfn(node):
303 if node is None:
303 if node is None:
304 return None
304 return None
305 else:
305 else:
306 return rootfm.hexfunc(node)
306 return rootfm.hexfunc(node)
307 if opts.get('changeset'):
307 if opts.get('changeset'):
308 # omit "+" suffix which is appended to node hex
308 # omit "+" suffix which is appended to node hex
309 def formatrev(rev):
309 def formatrev(rev):
310 if rev is None:
310 if rev is None:
311 return '%d' % ctx.p1().rev()
311 return '%d' % ctx.p1().rev()
312 else:
312 else:
313 return '%d' % rev
313 return '%d' % rev
314 else:
314 else:
315 def formatrev(rev):
315 def formatrev(rev):
316 if rev is None:
316 if rev is None:
317 return '%d+' % ctx.p1().rev()
317 return '%d+' % ctx.p1().rev()
318 else:
318 else:
319 return '%d ' % rev
319 return '%d ' % rev
320 def formathex(hex):
320 def formathex(hex):
321 if hex is None:
321 if hex is None:
322 return '%s+' % rootfm.hexfunc(ctx.p1().node())
322 return '%s+' % rootfm.hexfunc(ctx.p1().node())
323 else:
323 else:
324 return '%s ' % hex
324 return '%s ' % hex
325 else:
325 else:
326 hexfn = rootfm.hexfunc
326 hexfn = rootfm.hexfunc
327 formatrev = formathex = str
327 formatrev = formathex = str
328
328
329 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
329 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
330 ('number', ' ', lambda x: x[0].rev(), formatrev),
330 ('number', ' ', lambda x: x[0].rev(), formatrev),
331 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
331 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
332 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
332 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
333 ('file', ' ', lambda x: x[0].path(), str),
333 ('file', ' ', lambda x: x[0].path(), str),
334 ('line_number', ':', lambda x: x[1], str),
334 ('line_number', ':', lambda x: x[1], str),
335 ]
335 ]
336 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
336 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
337
337
338 if (not opts.get('user') and not opts.get('changeset')
338 if (not opts.get('user') and not opts.get('changeset')
339 and not opts.get('date') and not opts.get('file')):
339 and not opts.get('date') and not opts.get('file')):
340 opts['number'] = True
340 opts['number'] = True
341
341
342 linenumber = opts.get('line_number') is not None
342 linenumber = opts.get('line_number') is not None
343 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
343 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
344 raise error.Abort(_('at least one of -n/-c is required for -l'))
344 raise error.Abort(_('at least one of -n/-c is required for -l'))
345
345
346 ui.pager('annotate')
346 ui.pager('annotate')
347
347
348 if rootfm.isplain():
348 if rootfm.isplain():
349 def makefunc(get, fmt):
349 def makefunc(get, fmt):
350 return lambda x: fmt(get(x))
350 return lambda x: fmt(get(x))
351 else:
351 else:
352 def makefunc(get, fmt):
352 def makefunc(get, fmt):
353 return get
353 return get
354 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
354 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
355 if opts.get(op)]
355 if opts.get(op)]
356 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
356 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
357 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
357 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
358 if opts.get(op))
358 if opts.get(op))
359
359
360 def bad(x, y):
360 def bad(x, y):
361 raise error.Abort("%s: %s" % (x, y))
361 raise error.Abort("%s: %s" % (x, y))
362
362
363 m = scmutil.match(ctx, pats, opts, badfn=bad)
363 m = scmutil.match(ctx, pats, opts, badfn=bad)
364
364
365 follow = not opts.get('no_follow')
365 follow = not opts.get('no_follow')
366 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
366 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
367 whitespace=True)
367 whitespace=True)
368 skiprevs = opts.get('skip')
368 skiprevs = opts.get('skip')
369 if skiprevs:
369 if skiprevs:
370 skiprevs = scmutil.revrange(repo, skiprevs)
370 skiprevs = scmutil.revrange(repo, skiprevs)
371
371
372 for abs in ctx.walk(m):
372 for abs in ctx.walk(m):
373 fctx = ctx[abs]
373 fctx = ctx[abs]
374 rootfm.startitem()
374 rootfm.startitem()
375 rootfm.data(abspath=abs, path=m.rel(abs))
375 rootfm.data(abspath=abs, path=m.rel(abs))
376 if not opts.get('text') and fctx.isbinary():
376 if not opts.get('text') and fctx.isbinary():
377 rootfm.plain(_("%s: binary file\n")
377 rootfm.plain(_("%s: binary file\n")
378 % ((pats and m.rel(abs)) or abs))
378 % ((pats and m.rel(abs)) or abs))
379 continue
379 continue
380
380
381 fm = rootfm.nested('lines')
381 fm = rootfm.nested('lines')
382 lines = fctx.annotate(follow=follow, linenumber=linenumber,
382 lines = fctx.annotate(follow=follow, linenumber=linenumber,
383 skiprevs=skiprevs, diffopts=diffopts)
383 skiprevs=skiprevs, diffopts=diffopts)
384 if not lines:
384 if not lines:
385 fm.end()
385 fm.end()
386 continue
386 continue
387 formats = []
387 formats = []
388 pieces = []
388 pieces = []
389
389
390 for f, sep in funcmap:
390 for f, sep in funcmap:
391 l = [f(n) for n, dummy in lines]
391 l = [f(n) for n, dummy in lines]
392 if fm.isplain():
392 if fm.isplain():
393 sizes = [encoding.colwidth(x) for x in l]
393 sizes = [encoding.colwidth(x) for x in l]
394 ml = max(sizes)
394 ml = max(sizes)
395 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
395 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
396 else:
396 else:
397 formats.append(['%s' for x in l])
397 formats.append(['%s' for x in l])
398 pieces.append(l)
398 pieces.append(l)
399
399
400 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
400 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
401 fm.startitem()
401 fm.startitem()
402 fm.write(fields, "".join(f), *p)
402 fm.write(fields, "".join(f), *p)
403 fm.write('line', ": %s", l[1])
403 fm.write('line', ": %s", l[1])
404
404
405 if not lines[-1][1].endswith('\n'):
405 if not lines[-1][1].endswith('\n'):
406 fm.plain('\n')
406 fm.plain('\n')
407 fm.end()
407 fm.end()
408
408
409 rootfm.end()
409 rootfm.end()
410
410
411 @command('archive',
411 @command('archive',
412 [('', 'no-decode', None, _('do not pass files through decoders')),
412 [('', 'no-decode', None, _('do not pass files through decoders')),
413 ('p', 'prefix', '', _('directory prefix for files in archive'),
413 ('p', 'prefix', '', _('directory prefix for files in archive'),
414 _('PREFIX')),
414 _('PREFIX')),
415 ('r', 'rev', '', _('revision to distribute'), _('REV')),
415 ('r', 'rev', '', _('revision to distribute'), _('REV')),
416 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
416 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
417 ] + subrepoopts + walkopts,
417 ] + subrepoopts + walkopts,
418 _('[OPTION]... DEST'))
418 _('[OPTION]... DEST'))
419 def archive(ui, repo, dest, **opts):
419 def archive(ui, repo, dest, **opts):
420 '''create an unversioned archive of a repository revision
420 '''create an unversioned archive of a repository revision
421
421
422 By default, the revision used is the parent of the working
422 By default, the revision used is the parent of the working
423 directory; use -r/--rev to specify a different revision.
423 directory; use -r/--rev to specify a different revision.
424
424
425 The archive type is automatically detected based on file
425 The archive type is automatically detected based on file
426 extension (to override, use -t/--type).
426 extension (to override, use -t/--type).
427
427
428 .. container:: verbose
428 .. container:: verbose
429
429
430 Examples:
430 Examples:
431
431
432 - create a zip file containing the 1.0 release::
432 - create a zip file containing the 1.0 release::
433
433
434 hg archive -r 1.0 project-1.0.zip
434 hg archive -r 1.0 project-1.0.zip
435
435
436 - create a tarball excluding .hg files::
436 - create a tarball excluding .hg files::
437
437
438 hg archive project.tar.gz -X ".hg*"
438 hg archive project.tar.gz -X ".hg*"
439
439
440 Valid types are:
440 Valid types are:
441
441
442 :``files``: a directory full of files (default)
442 :``files``: a directory full of files (default)
443 :``tar``: tar archive, uncompressed
443 :``tar``: tar archive, uncompressed
444 :``tbz2``: tar archive, compressed using bzip2
444 :``tbz2``: tar archive, compressed using bzip2
445 :``tgz``: tar archive, compressed using gzip
445 :``tgz``: tar archive, compressed using gzip
446 :``uzip``: zip archive, uncompressed
446 :``uzip``: zip archive, uncompressed
447 :``zip``: zip archive, compressed using deflate
447 :``zip``: zip archive, compressed using deflate
448
448
449 The exact name of the destination archive or directory is given
449 The exact name of the destination archive or directory is given
450 using a format string; see :hg:`help export` for details.
450 using a format string; see :hg:`help export` for details.
451
451
452 Each member added to an archive file has a directory prefix
452 Each member added to an archive file has a directory prefix
453 prepended. Use -p/--prefix to specify a format string for the
453 prepended. Use -p/--prefix to specify a format string for the
454 prefix. The default is the basename of the archive, with suffixes
454 prefix. The default is the basename of the archive, with suffixes
455 removed.
455 removed.
456
456
457 Returns 0 on success.
457 Returns 0 on success.
458 '''
458 '''
459
459
460 opts = pycompat.byteskwargs(opts)
460 opts = pycompat.byteskwargs(opts)
461 ctx = scmutil.revsingle(repo, opts.get('rev'))
461 ctx = scmutil.revsingle(repo, opts.get('rev'))
462 if not ctx:
462 if not ctx:
463 raise error.Abort(_('no working directory: please specify a revision'))
463 raise error.Abort(_('no working directory: please specify a revision'))
464 node = ctx.node()
464 node = ctx.node()
465 dest = cmdutil.makefilename(repo, dest, node)
465 dest = cmdutil.makefilename(repo, dest, node)
466 if os.path.realpath(dest) == repo.root:
466 if os.path.realpath(dest) == repo.root:
467 raise error.Abort(_('repository root cannot be destination'))
467 raise error.Abort(_('repository root cannot be destination'))
468
468
469 kind = opts.get('type') or archival.guesskind(dest) or 'files'
469 kind = opts.get('type') or archival.guesskind(dest) or 'files'
470 prefix = opts.get('prefix')
470 prefix = opts.get('prefix')
471
471
472 if dest == '-':
472 if dest == '-':
473 if kind == 'files':
473 if kind == 'files':
474 raise error.Abort(_('cannot archive plain files to stdout'))
474 raise error.Abort(_('cannot archive plain files to stdout'))
475 dest = cmdutil.makefileobj(repo, dest)
475 dest = cmdutil.makefileobj(repo, dest)
476 if not prefix:
476 if not prefix:
477 prefix = os.path.basename(repo.root) + '-%h'
477 prefix = os.path.basename(repo.root) + '-%h'
478
478
479 prefix = cmdutil.makefilename(repo, prefix, node)
479 prefix = cmdutil.makefilename(repo, prefix, node)
480 matchfn = scmutil.match(ctx, [], opts)
480 matchfn = scmutil.match(ctx, [], opts)
481 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
481 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
482 matchfn, prefix, subrepos=opts.get('subrepos'))
482 matchfn, prefix, subrepos=opts.get('subrepos'))
483
483
484 @command('backout',
484 @command('backout',
485 [('', 'merge', None, _('merge with old dirstate parent after backout')),
485 [('', 'merge', None, _('merge with old dirstate parent after backout')),
486 ('', 'commit', None,
486 ('', 'commit', None,
487 _('commit if no conflicts were encountered (DEPRECATED)')),
487 _('commit if no conflicts were encountered (DEPRECATED)')),
488 ('', 'no-commit', None, _('do not commit')),
488 ('', 'no-commit', None, _('do not commit')),
489 ('', 'parent', '',
489 ('', 'parent', '',
490 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
490 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
491 ('r', 'rev', '', _('revision to backout'), _('REV')),
491 ('r', 'rev', '', _('revision to backout'), _('REV')),
492 ('e', 'edit', False, _('invoke editor on commit messages')),
492 ('e', 'edit', False, _('invoke editor on commit messages')),
493 ] + mergetoolopts + walkopts + commitopts + commitopts2,
493 ] + mergetoolopts + walkopts + commitopts + commitopts2,
494 _('[OPTION]... [-r] REV'))
494 _('[OPTION]... [-r] REV'))
495 def backout(ui, repo, node=None, rev=None, **opts):
495 def backout(ui, repo, node=None, rev=None, **opts):
496 '''reverse effect of earlier changeset
496 '''reverse effect of earlier changeset
497
497
498 Prepare a new changeset with the effect of REV undone in the
498 Prepare a new changeset with the effect of REV undone in the
499 current working directory. If no conflicts were encountered,
499 current working directory. If no conflicts were encountered,
500 it will be committed immediately.
500 it will be committed immediately.
501
501
502 If REV is the parent of the working directory, then this new changeset
502 If REV is the parent of the working directory, then this new changeset
503 is committed automatically (unless --no-commit is specified).
503 is committed automatically (unless --no-commit is specified).
504
504
505 .. note::
505 .. note::
506
506
507 :hg:`backout` cannot be used to fix either an unwanted or
507 :hg:`backout` cannot be used to fix either an unwanted or
508 incorrect merge.
508 incorrect merge.
509
509
510 .. container:: verbose
510 .. container:: verbose
511
511
512 Examples:
512 Examples:
513
513
514 - Reverse the effect of the parent of the working directory.
514 - Reverse the effect of the parent of the working directory.
515 This backout will be committed immediately::
515 This backout will be committed immediately::
516
516
517 hg backout -r .
517 hg backout -r .
518
518
519 - Reverse the effect of previous bad revision 23::
519 - Reverse the effect of previous bad revision 23::
520
520
521 hg backout -r 23
521 hg backout -r 23
522
522
523 - Reverse the effect of previous bad revision 23 and
523 - Reverse the effect of previous bad revision 23 and
524 leave changes uncommitted::
524 leave changes uncommitted::
525
525
526 hg backout -r 23 --no-commit
526 hg backout -r 23 --no-commit
527 hg commit -m "Backout revision 23"
527 hg commit -m "Backout revision 23"
528
528
529 By default, the pending changeset will have one parent,
529 By default, the pending changeset will have one parent,
530 maintaining a linear history. With --merge, the pending
530 maintaining a linear history. With --merge, the pending
531 changeset will instead have two parents: the old parent of the
531 changeset will instead have two parents: the old parent of the
532 working directory and a new child of REV that simply undoes REV.
532 working directory and a new child of REV that simply undoes REV.
533
533
534 Before version 1.7, the behavior without --merge was equivalent
534 Before version 1.7, the behavior without --merge was equivalent
535 to specifying --merge followed by :hg:`update --clean .` to
535 to specifying --merge followed by :hg:`update --clean .` to
536 cancel the merge and leave the child of REV as a head to be
536 cancel the merge and leave the child of REV as a head to be
537 merged separately.
537 merged separately.
538
538
539 See :hg:`help dates` for a list of formats valid for -d/--date.
539 See :hg:`help dates` for a list of formats valid for -d/--date.
540
540
541 See :hg:`help revert` for a way to restore files to the state
541 See :hg:`help revert` for a way to restore files to the state
542 of another revision.
542 of another revision.
543
543
544 Returns 0 on success, 1 if nothing to backout or there are unresolved
544 Returns 0 on success, 1 if nothing to backout or there are unresolved
545 files.
545 files.
546 '''
546 '''
547 wlock = lock = None
547 wlock = lock = None
548 try:
548 try:
549 wlock = repo.wlock()
549 wlock = repo.wlock()
550 lock = repo.lock()
550 lock = repo.lock()
551 return _dobackout(ui, repo, node, rev, **opts)
551 return _dobackout(ui, repo, node, rev, **opts)
552 finally:
552 finally:
553 release(lock, wlock)
553 release(lock, wlock)
554
554
555 def _dobackout(ui, repo, node=None, rev=None, **opts):
555 def _dobackout(ui, repo, node=None, rev=None, **opts):
556 opts = pycompat.byteskwargs(opts)
556 opts = pycompat.byteskwargs(opts)
557 if opts.get('commit') and opts.get('no_commit'):
557 if opts.get('commit') and opts.get('no_commit'):
558 raise error.Abort(_("cannot use --commit with --no-commit"))
558 raise error.Abort(_("cannot use --commit with --no-commit"))
559 if opts.get('merge') and opts.get('no_commit'):
559 if opts.get('merge') and opts.get('no_commit'):
560 raise error.Abort(_("cannot use --merge with --no-commit"))
560 raise error.Abort(_("cannot use --merge with --no-commit"))
561
561
562 if rev and node:
562 if rev and node:
563 raise error.Abort(_("please specify just one revision"))
563 raise error.Abort(_("please specify just one revision"))
564
564
565 if not rev:
565 if not rev:
566 rev = node
566 rev = node
567
567
568 if not rev:
568 if not rev:
569 raise error.Abort(_("please specify a revision to backout"))
569 raise error.Abort(_("please specify a revision to backout"))
570
570
571 date = opts.get('date')
571 date = opts.get('date')
572 if date:
572 if date:
573 opts['date'] = util.parsedate(date)
573 opts['date'] = util.parsedate(date)
574
574
575 cmdutil.checkunfinished(repo)
575 cmdutil.checkunfinished(repo)
576 cmdutil.bailifchanged(repo)
576 cmdutil.bailifchanged(repo)
577 node = scmutil.revsingle(repo, rev).node()
577 node = scmutil.revsingle(repo, rev).node()
578
578
579 op1, op2 = repo.dirstate.parents()
579 op1, op2 = repo.dirstate.parents()
580 if not repo.changelog.isancestor(node, op1):
580 if not repo.changelog.isancestor(node, op1):
581 raise error.Abort(_('cannot backout change that is not an ancestor'))
581 raise error.Abort(_('cannot backout change that is not an ancestor'))
582
582
583 p1, p2 = repo.changelog.parents(node)
583 p1, p2 = repo.changelog.parents(node)
584 if p1 == nullid:
584 if p1 == nullid:
585 raise error.Abort(_('cannot backout a change with no parents'))
585 raise error.Abort(_('cannot backout a change with no parents'))
586 if p2 != nullid:
586 if p2 != nullid:
587 if not opts.get('parent'):
587 if not opts.get('parent'):
588 raise error.Abort(_('cannot backout a merge changeset'))
588 raise error.Abort(_('cannot backout a merge changeset'))
589 p = repo.lookup(opts['parent'])
589 p = repo.lookup(opts['parent'])
590 if p not in (p1, p2):
590 if p not in (p1, p2):
591 raise error.Abort(_('%s is not a parent of %s') %
591 raise error.Abort(_('%s is not a parent of %s') %
592 (short(p), short(node)))
592 (short(p), short(node)))
593 parent = p
593 parent = p
594 else:
594 else:
595 if opts.get('parent'):
595 if opts.get('parent'):
596 raise error.Abort(_('cannot use --parent on non-merge changeset'))
596 raise error.Abort(_('cannot use --parent on non-merge changeset'))
597 parent = p1
597 parent = p1
598
598
599 # the backout should appear on the same branch
599 # the backout should appear on the same branch
600 branch = repo.dirstate.branch()
600 branch = repo.dirstate.branch()
601 bheads = repo.branchheads(branch)
601 bheads = repo.branchheads(branch)
602 rctx = scmutil.revsingle(repo, hex(parent))
602 rctx = scmutil.revsingle(repo, hex(parent))
603 if not opts.get('merge') and op1 != node:
603 if not opts.get('merge') and op1 != node:
604 dsguard = dirstateguard.dirstateguard(repo, 'backout')
604 dsguard = dirstateguard.dirstateguard(repo, 'backout')
605 try:
605 try:
606 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
606 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
607 'backout')
607 'backout')
608 stats = mergemod.update(repo, parent, True, True, node, False)
608 stats = mergemod.update(repo, parent, True, True, node, False)
609 repo.setparents(op1, op2)
609 repo.setparents(op1, op2)
610 dsguard.close()
610 dsguard.close()
611 hg._showstats(repo, stats)
611 hg._showstats(repo, stats)
612 if stats[3]:
612 if stats[3]:
613 repo.ui.status(_("use 'hg resolve' to retry unresolved "
613 repo.ui.status(_("use 'hg resolve' to retry unresolved "
614 "file merges\n"))
614 "file merges\n"))
615 return 1
615 return 1
616 finally:
616 finally:
617 ui.setconfig('ui', 'forcemerge', '', '')
617 ui.setconfig('ui', 'forcemerge', '', '')
618 lockmod.release(dsguard)
618 lockmod.release(dsguard)
619 else:
619 else:
620 hg.clean(repo, node, show_stats=False)
620 hg.clean(repo, node, show_stats=False)
621 repo.dirstate.setbranch(branch)
621 repo.dirstate.setbranch(branch)
622 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
622 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
623
623
624 if opts.get('no_commit'):
624 if opts.get('no_commit'):
625 msg = _("changeset %s backed out, "
625 msg = _("changeset %s backed out, "
626 "don't forget to commit.\n")
626 "don't forget to commit.\n")
627 ui.status(msg % short(node))
627 ui.status(msg % short(node))
628 return 0
628 return 0
629
629
630 def commitfunc(ui, repo, message, match, opts):
630 def commitfunc(ui, repo, message, match, opts):
631 editform = 'backout'
631 editform = 'backout'
632 e = cmdutil.getcommiteditor(editform=editform,
632 e = cmdutil.getcommiteditor(editform=editform,
633 **pycompat.strkwargs(opts))
633 **pycompat.strkwargs(opts))
634 if not message:
634 if not message:
635 # we don't translate commit messages
635 # we don't translate commit messages
636 message = "Backed out changeset %s" % short(node)
636 message = "Backed out changeset %s" % short(node)
637 e = cmdutil.getcommiteditor(edit=True, editform=editform)
637 e = cmdutil.getcommiteditor(edit=True, editform=editform)
638 return repo.commit(message, opts.get('user'), opts.get('date'),
638 return repo.commit(message, opts.get('user'), opts.get('date'),
639 match, editor=e)
639 match, editor=e)
640 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
640 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
641 if not newnode:
641 if not newnode:
642 ui.status(_("nothing changed\n"))
642 ui.status(_("nothing changed\n"))
643 return 1
643 return 1
644 cmdutil.commitstatus(repo, newnode, branch, bheads)
644 cmdutil.commitstatus(repo, newnode, branch, bheads)
645
645
646 def nice(node):
646 def nice(node):
647 return '%d:%s' % (repo.changelog.rev(node), short(node))
647 return '%d:%s' % (repo.changelog.rev(node), short(node))
648 ui.status(_('changeset %s backs out changeset %s\n') %
648 ui.status(_('changeset %s backs out changeset %s\n') %
649 (nice(repo.changelog.tip()), nice(node)))
649 (nice(repo.changelog.tip()), nice(node)))
650 if opts.get('merge') and op1 != node:
650 if opts.get('merge') and op1 != node:
651 hg.clean(repo, op1, show_stats=False)
651 hg.clean(repo, op1, show_stats=False)
652 ui.status(_('merging with changeset %s\n')
652 ui.status(_('merging with changeset %s\n')
653 % nice(repo.changelog.tip()))
653 % nice(repo.changelog.tip()))
654 try:
654 try:
655 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
655 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
656 'backout')
656 'backout')
657 return hg.merge(repo, hex(repo.changelog.tip()))
657 return hg.merge(repo, hex(repo.changelog.tip()))
658 finally:
658 finally:
659 ui.setconfig('ui', 'forcemerge', '', '')
659 ui.setconfig('ui', 'forcemerge', '', '')
660 return 0
660 return 0
661
661
662 @command('bisect',
662 @command('bisect',
663 [('r', 'reset', False, _('reset bisect state')),
663 [('r', 'reset', False, _('reset bisect state')),
664 ('g', 'good', False, _('mark changeset good')),
664 ('g', 'good', False, _('mark changeset good')),
665 ('b', 'bad', False, _('mark changeset bad')),
665 ('b', 'bad', False, _('mark changeset bad')),
666 ('s', 'skip', False, _('skip testing changeset')),
666 ('s', 'skip', False, _('skip testing changeset')),
667 ('e', 'extend', False, _('extend the bisect range')),
667 ('e', 'extend', False, _('extend the bisect range')),
668 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
668 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
669 ('U', 'noupdate', False, _('do not update to target'))],
669 ('U', 'noupdate', False, _('do not update to target'))],
670 _("[-gbsr] [-U] [-c CMD] [REV]"))
670 _("[-gbsr] [-U] [-c CMD] [REV]"))
671 def bisect(ui, repo, rev=None, extra=None, command=None,
671 def bisect(ui, repo, rev=None, extra=None, command=None,
672 reset=None, good=None, bad=None, skip=None, extend=None,
672 reset=None, good=None, bad=None, skip=None, extend=None,
673 noupdate=None):
673 noupdate=None):
674 """subdivision search of changesets
674 """subdivision search of changesets
675
675
676 This command helps to find changesets which introduce problems. To
676 This command helps to find changesets which introduce problems. To
677 use, mark the earliest changeset you know exhibits the problem as
677 use, mark the earliest changeset you know exhibits the problem as
678 bad, then mark the latest changeset which is free from the problem
678 bad, then mark the latest changeset which is free from the problem
679 as good. Bisect will update your working directory to a revision
679 as good. Bisect will update your working directory to a revision
680 for testing (unless the -U/--noupdate option is specified). Once
680 for testing (unless the -U/--noupdate option is specified). Once
681 you have performed tests, mark the working directory as good or
681 you have performed tests, mark the working directory as good or
682 bad, and bisect will either update to another candidate changeset
682 bad, and bisect will either update to another candidate changeset
683 or announce that it has found the bad revision.
683 or announce that it has found the bad revision.
684
684
685 As a shortcut, you can also use the revision argument to mark a
685 As a shortcut, you can also use the revision argument to mark a
686 revision as good or bad without checking it out first.
686 revision as good or bad without checking it out first.
687
687
688 If you supply a command, it will be used for automatic bisection.
688 If you supply a command, it will be used for automatic bisection.
689 The environment variable HG_NODE will contain the ID of the
689 The environment variable HG_NODE will contain the ID of the
690 changeset being tested. The exit status of the command will be
690 changeset being tested. The exit status of the command will be
691 used to mark revisions as good or bad: status 0 means good, 125
691 used to mark revisions as good or bad: status 0 means good, 125
692 means to skip the revision, 127 (command not found) will abort the
692 means to skip the revision, 127 (command not found) will abort the
693 bisection, and any other non-zero exit status means the revision
693 bisection, and any other non-zero exit status means the revision
694 is bad.
694 is bad.
695
695
696 .. container:: verbose
696 .. container:: verbose
697
697
698 Some examples:
698 Some examples:
699
699
700 - start a bisection with known bad revision 34, and good revision 12::
700 - start a bisection with known bad revision 34, and good revision 12::
701
701
702 hg bisect --bad 34
702 hg bisect --bad 34
703 hg bisect --good 12
703 hg bisect --good 12
704
704
705 - advance the current bisection by marking current revision as good or
705 - advance the current bisection by marking current revision as good or
706 bad::
706 bad::
707
707
708 hg bisect --good
708 hg bisect --good
709 hg bisect --bad
709 hg bisect --bad
710
710
711 - mark the current revision, or a known revision, to be skipped (e.g. if
711 - mark the current revision, or a known revision, to be skipped (e.g. if
712 that revision is not usable because of another issue)::
712 that revision is not usable because of another issue)::
713
713
714 hg bisect --skip
714 hg bisect --skip
715 hg bisect --skip 23
715 hg bisect --skip 23
716
716
717 - skip all revisions that do not touch directories ``foo`` or ``bar``::
717 - skip all revisions that do not touch directories ``foo`` or ``bar``::
718
718
719 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
719 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
720
720
721 - forget the current bisection::
721 - forget the current bisection::
722
722
723 hg bisect --reset
723 hg bisect --reset
724
724
725 - use 'make && make tests' to automatically find the first broken
725 - use 'make && make tests' to automatically find the first broken
726 revision::
726 revision::
727
727
728 hg bisect --reset
728 hg bisect --reset
729 hg bisect --bad 34
729 hg bisect --bad 34
730 hg bisect --good 12
730 hg bisect --good 12
731 hg bisect --command "make && make tests"
731 hg bisect --command "make && make tests"
732
732
733 - see all changesets whose states are already known in the current
733 - see all changesets whose states are already known in the current
734 bisection::
734 bisection::
735
735
736 hg log -r "bisect(pruned)"
736 hg log -r "bisect(pruned)"
737
737
738 - see the changeset currently being bisected (especially useful
738 - see the changeset currently being bisected (especially useful
739 if running with -U/--noupdate)::
739 if running with -U/--noupdate)::
740
740
741 hg log -r "bisect(current)"
741 hg log -r "bisect(current)"
742
742
743 - see all changesets that took part in the current bisection::
743 - see all changesets that took part in the current bisection::
744
744
745 hg log -r "bisect(range)"
745 hg log -r "bisect(range)"
746
746
747 - you can even get a nice graph::
747 - you can even get a nice graph::
748
748
749 hg log --graph -r "bisect(range)"
749 hg log --graph -r "bisect(range)"
750
750
751 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
751 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
752
752
753 Returns 0 on success.
753 Returns 0 on success.
754 """
754 """
755 # backward compatibility
755 # backward compatibility
756 if rev in "good bad reset init".split():
756 if rev in "good bad reset init".split():
757 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
757 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
758 cmd, rev, extra = rev, extra, None
758 cmd, rev, extra = rev, extra, None
759 if cmd == "good":
759 if cmd == "good":
760 good = True
760 good = True
761 elif cmd == "bad":
761 elif cmd == "bad":
762 bad = True
762 bad = True
763 else:
763 else:
764 reset = True
764 reset = True
765 elif extra:
765 elif extra:
766 raise error.Abort(_('incompatible arguments'))
766 raise error.Abort(_('incompatible arguments'))
767
767
768 incompatibles = {
768 incompatibles = {
769 '--bad': bad,
769 '--bad': bad,
770 '--command': bool(command),
770 '--command': bool(command),
771 '--extend': extend,
771 '--extend': extend,
772 '--good': good,
772 '--good': good,
773 '--reset': reset,
773 '--reset': reset,
774 '--skip': skip,
774 '--skip': skip,
775 }
775 }
776
776
777 enabled = [x for x in incompatibles if incompatibles[x]]
777 enabled = [x for x in incompatibles if incompatibles[x]]
778
778
779 if len(enabled) > 1:
779 if len(enabled) > 1:
780 raise error.Abort(_('%s and %s are incompatible') %
780 raise error.Abort(_('%s and %s are incompatible') %
781 tuple(sorted(enabled)[0:2]))
781 tuple(sorted(enabled)[0:2]))
782
782
783 if reset:
783 if reset:
784 hbisect.resetstate(repo)
784 hbisect.resetstate(repo)
785 return
785 return
786
786
787 state = hbisect.load_state(repo)
787 state = hbisect.load_state(repo)
788
788
789 # update state
789 # update state
790 if good or bad or skip:
790 if good or bad or skip:
791 if rev:
791 if rev:
792 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
792 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
793 else:
793 else:
794 nodes = [repo.lookup('.')]
794 nodes = [repo.lookup('.')]
795 if good:
795 if good:
796 state['good'] += nodes
796 state['good'] += nodes
797 elif bad:
797 elif bad:
798 state['bad'] += nodes
798 state['bad'] += nodes
799 elif skip:
799 elif skip:
800 state['skip'] += nodes
800 state['skip'] += nodes
801 hbisect.save_state(repo, state)
801 hbisect.save_state(repo, state)
802 if not (state['good'] and state['bad']):
802 if not (state['good'] and state['bad']):
803 return
803 return
804
804
805 def mayupdate(repo, node, show_stats=True):
805 def mayupdate(repo, node, show_stats=True):
806 """common used update sequence"""
806 """common used update sequence"""
807 if noupdate:
807 if noupdate:
808 return
808 return
809 cmdutil.checkunfinished(repo)
809 cmdutil.checkunfinished(repo)
810 cmdutil.bailifchanged(repo)
810 cmdutil.bailifchanged(repo)
811 return hg.clean(repo, node, show_stats=show_stats)
811 return hg.clean(repo, node, show_stats=show_stats)
812
812
813 displayer = cmdutil.show_changeset(ui, repo, {})
813 displayer = cmdutil.show_changeset(ui, repo, {})
814
814
815 if command:
815 if command:
816 changesets = 1
816 changesets = 1
817 if noupdate:
817 if noupdate:
818 try:
818 try:
819 node = state['current'][0]
819 node = state['current'][0]
820 except LookupError:
820 except LookupError:
821 raise error.Abort(_('current bisect revision is unknown - '
821 raise error.Abort(_('current bisect revision is unknown - '
822 'start a new bisect to fix'))
822 'start a new bisect to fix'))
823 else:
823 else:
824 node, p2 = repo.dirstate.parents()
824 node, p2 = repo.dirstate.parents()
825 if p2 != nullid:
825 if p2 != nullid:
826 raise error.Abort(_('current bisect revision is a merge'))
826 raise error.Abort(_('current bisect revision is a merge'))
827 if rev:
827 if rev:
828 node = repo[scmutil.revsingle(repo, rev, node)].node()
828 node = repo[scmutil.revsingle(repo, rev, node)].node()
829 try:
829 try:
830 while changesets:
830 while changesets:
831 # update state
831 # update state
832 state['current'] = [node]
832 state['current'] = [node]
833 hbisect.save_state(repo, state)
833 hbisect.save_state(repo, state)
834 status = ui.system(command, environ={'HG_NODE': hex(node)},
834 status = ui.system(command, environ={'HG_NODE': hex(node)},
835 blockedtag='bisect_check')
835 blockedtag='bisect_check')
836 if status == 125:
836 if status == 125:
837 transition = "skip"
837 transition = "skip"
838 elif status == 0:
838 elif status == 0:
839 transition = "good"
839 transition = "good"
840 # status < 0 means process was killed
840 # status < 0 means process was killed
841 elif status == 127:
841 elif status == 127:
842 raise error.Abort(_("failed to execute %s") % command)
842 raise error.Abort(_("failed to execute %s") % command)
843 elif status < 0:
843 elif status < 0:
844 raise error.Abort(_("%s killed") % command)
844 raise error.Abort(_("%s killed") % command)
845 else:
845 else:
846 transition = "bad"
846 transition = "bad"
847 state[transition].append(node)
847 state[transition].append(node)
848 ctx = repo[node]
848 ctx = repo[node]
849 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
849 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
850 hbisect.checkstate(state)
850 hbisect.checkstate(state)
851 # bisect
851 # bisect
852 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
852 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
853 # update to next check
853 # update to next check
854 node = nodes[0]
854 node = nodes[0]
855 mayupdate(repo, node, show_stats=False)
855 mayupdate(repo, node, show_stats=False)
856 finally:
856 finally:
857 state['current'] = [node]
857 state['current'] = [node]
858 hbisect.save_state(repo, state)
858 hbisect.save_state(repo, state)
859 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
859 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
860 return
860 return
861
861
862 hbisect.checkstate(state)
862 hbisect.checkstate(state)
863
863
864 # actually bisect
864 # actually bisect
865 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
865 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
866 if extend:
866 if extend:
867 if not changesets:
867 if not changesets:
868 extendnode = hbisect.extendrange(repo, state, nodes, good)
868 extendnode = hbisect.extendrange(repo, state, nodes, good)
869 if extendnode is not None:
869 if extendnode is not None:
870 ui.write(_("Extending search to changeset %d:%s\n")
870 ui.write(_("Extending search to changeset %d:%s\n")
871 % (extendnode.rev(), extendnode))
871 % (extendnode.rev(), extendnode))
872 state['current'] = [extendnode.node()]
872 state['current'] = [extendnode.node()]
873 hbisect.save_state(repo, state)
873 hbisect.save_state(repo, state)
874 return mayupdate(repo, extendnode.node())
874 return mayupdate(repo, extendnode.node())
875 raise error.Abort(_("nothing to extend"))
875 raise error.Abort(_("nothing to extend"))
876
876
877 if changesets == 0:
877 if changesets == 0:
878 hbisect.printresult(ui, repo, state, displayer, nodes, good)
878 hbisect.printresult(ui, repo, state, displayer, nodes, good)
879 else:
879 else:
880 assert len(nodes) == 1 # only a single node can be tested next
880 assert len(nodes) == 1 # only a single node can be tested next
881 node = nodes[0]
881 node = nodes[0]
882 # compute the approximate number of remaining tests
882 # compute the approximate number of remaining tests
883 tests, size = 0, 2
883 tests, size = 0, 2
884 while size <= changesets:
884 while size <= changesets:
885 tests, size = tests + 1, size * 2
885 tests, size = tests + 1, size * 2
886 rev = repo.changelog.rev(node)
886 rev = repo.changelog.rev(node)
887 ui.write(_("Testing changeset %d:%s "
887 ui.write(_("Testing changeset %d:%s "
888 "(%d changesets remaining, ~%d tests)\n")
888 "(%d changesets remaining, ~%d tests)\n")
889 % (rev, short(node), changesets, tests))
889 % (rev, short(node), changesets, tests))
890 state['current'] = [node]
890 state['current'] = [node]
891 hbisect.save_state(repo, state)
891 hbisect.save_state(repo, state)
892 return mayupdate(repo, node)
892 return mayupdate(repo, node)
893
893
894 @command('bookmarks|bookmark',
894 @command('bookmarks|bookmark',
895 [('f', 'force', False, _('force')),
895 [('f', 'force', False, _('force')),
896 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
896 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
897 ('d', 'delete', False, _('delete a given bookmark')),
897 ('d', 'delete', False, _('delete a given bookmark')),
898 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
898 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
899 ('i', 'inactive', False, _('mark a bookmark inactive')),
899 ('i', 'inactive', False, _('mark a bookmark inactive')),
900 ] + formatteropts,
900 ] + formatteropts,
901 _('hg bookmarks [OPTIONS]... [NAME]...'))
901 _('hg bookmarks [OPTIONS]... [NAME]...'))
902 def bookmark(ui, repo, *names, **opts):
902 def bookmark(ui, repo, *names, **opts):
903 '''create a new bookmark or list existing bookmarks
903 '''create a new bookmark or list existing bookmarks
904
904
905 Bookmarks are labels on changesets to help track lines of development.
905 Bookmarks are labels on changesets to help track lines of development.
906 Bookmarks are unversioned and can be moved, renamed and deleted.
906 Bookmarks are unversioned and can be moved, renamed and deleted.
907 Deleting or moving a bookmark has no effect on the associated changesets.
907 Deleting or moving a bookmark has no effect on the associated changesets.
908
908
909 Creating or updating to a bookmark causes it to be marked as 'active'.
909 Creating or updating to a bookmark causes it to be marked as 'active'.
910 The active bookmark is indicated with a '*'.
910 The active bookmark is indicated with a '*'.
911 When a commit is made, the active bookmark will advance to the new commit.
911 When a commit is made, the active bookmark will advance to the new commit.
912 A plain :hg:`update` will also advance an active bookmark, if possible.
912 A plain :hg:`update` will also advance an active bookmark, if possible.
913 Updating away from a bookmark will cause it to be deactivated.
913 Updating away from a bookmark will cause it to be deactivated.
914
914
915 Bookmarks can be pushed and pulled between repositories (see
915 Bookmarks can be pushed and pulled between repositories (see
916 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
916 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
917 diverged, a new 'divergent bookmark' of the form 'name@path' will
917 diverged, a new 'divergent bookmark' of the form 'name@path' will
918 be created. Using :hg:`merge` will resolve the divergence.
918 be created. Using :hg:`merge` will resolve the divergence.
919
919
920 A bookmark named '@' has the special property that :hg:`clone` will
920 A bookmark named '@' has the special property that :hg:`clone` will
921 check it out by default if it exists.
921 check it out by default if it exists.
922
922
923 .. container:: verbose
923 .. container:: verbose
924
924
925 Examples:
925 Examples:
926
926
927 - create an active bookmark for a new line of development::
927 - create an active bookmark for a new line of development::
928
928
929 hg book new-feature
929 hg book new-feature
930
930
931 - create an inactive bookmark as a place marker::
931 - create an inactive bookmark as a place marker::
932
932
933 hg book -i reviewed
933 hg book -i reviewed
934
934
935 - create an inactive bookmark on another changeset::
935 - create an inactive bookmark on another changeset::
936
936
937 hg book -r .^ tested
937 hg book -r .^ tested
938
938
939 - rename bookmark turkey to dinner::
939 - rename bookmark turkey to dinner::
940
940
941 hg book -m turkey dinner
941 hg book -m turkey dinner
942
942
943 - move the '@' bookmark from another branch::
943 - move the '@' bookmark from another branch::
944
944
945 hg book -f @
945 hg book -f @
946 '''
946 '''
947 opts = pycompat.byteskwargs(opts)
947 opts = pycompat.byteskwargs(opts)
948 force = opts.get('force')
948 force = opts.get('force')
949 rev = opts.get('rev')
949 rev = opts.get('rev')
950 delete = opts.get('delete')
950 delete = opts.get('delete')
951 rename = opts.get('rename')
951 rename = opts.get('rename')
952 inactive = opts.get('inactive')
952 inactive = opts.get('inactive')
953
953
954 if delete and rename:
954 if delete and rename:
955 raise error.Abort(_("--delete and --rename are incompatible"))
955 raise error.Abort(_("--delete and --rename are incompatible"))
956 if delete and rev:
956 if delete and rev:
957 raise error.Abort(_("--rev is incompatible with --delete"))
957 raise error.Abort(_("--rev is incompatible with --delete"))
958 if rename and rev:
958 if rename and rev:
959 raise error.Abort(_("--rev is incompatible with --rename"))
959 raise error.Abort(_("--rev is incompatible with --rename"))
960 if not names and (delete or rev):
960 if not names and (delete or rev):
961 raise error.Abort(_("bookmark name required"))
961 raise error.Abort(_("bookmark name required"))
962
962
963 if delete or rename or names or inactive:
963 if delete or rename or names or inactive:
964 with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
964 with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
965 if delete:
965 if delete:
966 bookmarks.delete(repo, tr, names)
966 bookmarks.delete(repo, tr, names)
967 elif rename:
967 elif rename:
968 if not names:
968 if not names:
969 raise error.Abort(_("new bookmark name required"))
969 raise error.Abort(_("new bookmark name required"))
970 elif len(names) > 1:
970 elif len(names) > 1:
971 raise error.Abort(_("only one new bookmark name allowed"))
971 raise error.Abort(_("only one new bookmark name allowed"))
972 bookmarks.rename(repo, tr, rename, names[0], force, inactive)
972 bookmarks.rename(repo, tr, rename, names[0], force, inactive)
973 elif names:
973 elif names:
974 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
974 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
975 elif inactive:
975 elif inactive:
976 if len(repo._bookmarks) == 0:
976 if len(repo._bookmarks) == 0:
977 ui.status(_("no bookmarks set\n"))
977 ui.status(_("no bookmarks set\n"))
978 elif not repo._activebookmark:
978 elif not repo._activebookmark:
979 ui.status(_("no active bookmark\n"))
979 ui.status(_("no active bookmark\n"))
980 else:
980 else:
981 bookmarks.deactivate(repo)
981 bookmarks.deactivate(repo)
982 else: # show bookmarks
982 else: # show bookmarks
983 bookmarks.printbookmarks(ui, repo, **opts)
983 bookmarks.printbookmarks(ui, repo, **opts)
984
984
985 @command('branch',
985 @command('branch',
986 [('f', 'force', None,
986 [('f', 'force', None,
987 _('set branch name even if it shadows an existing branch')),
987 _('set branch name even if it shadows an existing branch')),
988 ('C', 'clean', None, _('reset branch name to parent branch name'))],
988 ('C', 'clean', None, _('reset branch name to parent branch name'))],
989 _('[-fC] [NAME]'))
989 _('[-fC] [NAME]'))
990 def branch(ui, repo, label=None, **opts):
990 def branch(ui, repo, label=None, **opts):
991 """set or show the current branch name
991 """set or show the current branch name
992
992
993 .. note::
993 .. note::
994
994
995 Branch names are permanent and global. Use :hg:`bookmark` to create a
995 Branch names are permanent and global. Use :hg:`bookmark` to create a
996 light-weight bookmark instead. See :hg:`help glossary` for more
996 light-weight bookmark instead. See :hg:`help glossary` for more
997 information about named branches and bookmarks.
997 information about named branches and bookmarks.
998
998
999 With no argument, show the current branch name. With one argument,
999 With no argument, show the current branch name. With one argument,
1000 set the working directory branch name (the branch will not exist
1000 set the working directory branch name (the branch will not exist
1001 in the repository until the next commit). Standard practice
1001 in the repository until the next commit). Standard practice
1002 recommends that primary development take place on the 'default'
1002 recommends that primary development take place on the 'default'
1003 branch.
1003 branch.
1004
1004
1005 Unless -f/--force is specified, branch will not let you set a
1005 Unless -f/--force is specified, branch will not let you set a
1006 branch name that already exists.
1006 branch name that already exists.
1007
1007
1008 Use -C/--clean to reset the working directory branch to that of
1008 Use -C/--clean to reset the working directory branch to that of
1009 the parent of the working directory, negating a previous branch
1009 the parent of the working directory, negating a previous branch
1010 change.
1010 change.
1011
1011
1012 Use the command :hg:`update` to switch to an existing branch. Use
1012 Use the command :hg:`update` to switch to an existing branch. Use
1013 :hg:`commit --close-branch` to mark this branch head as closed.
1013 :hg:`commit --close-branch` to mark this branch head as closed.
1014 When all heads of a branch are closed, the branch will be
1014 When all heads of a branch are closed, the branch will be
1015 considered closed.
1015 considered closed.
1016
1016
1017 Returns 0 on success.
1017 Returns 0 on success.
1018 """
1018 """
1019 opts = pycompat.byteskwargs(opts)
1019 opts = pycompat.byteskwargs(opts)
1020 if label:
1020 if label:
1021 label = label.strip()
1021 label = label.strip()
1022
1022
1023 if not opts.get('clean') and not label:
1023 if not opts.get('clean') and not label:
1024 ui.write("%s\n" % repo.dirstate.branch())
1024 ui.write("%s\n" % repo.dirstate.branch())
1025 return
1025 return
1026
1026
1027 with repo.wlock():
1027 with repo.wlock():
1028 if opts.get('clean'):
1028 if opts.get('clean'):
1029 label = repo[None].p1().branch()
1029 label = repo[None].p1().branch()
1030 repo.dirstate.setbranch(label)
1030 repo.dirstate.setbranch(label)
1031 ui.status(_('reset working directory to branch %s\n') % label)
1031 ui.status(_('reset working directory to branch %s\n') % label)
1032 elif label:
1032 elif label:
1033 if not opts.get('force') and label in repo.branchmap():
1033 if not opts.get('force') and label in repo.branchmap():
1034 if label not in [p.branch() for p in repo[None].parents()]:
1034 if label not in [p.branch() for p in repo[None].parents()]:
1035 raise error.Abort(_('a branch of the same name already'
1035 raise error.Abort(_('a branch of the same name already'
1036 ' exists'),
1036 ' exists'),
1037 # i18n: "it" refers to an existing branch
1037 # i18n: "it" refers to an existing branch
1038 hint=_("use 'hg update' to switch to it"))
1038 hint=_("use 'hg update' to switch to it"))
1039 scmutil.checknewlabel(repo, label, 'branch')
1039 scmutil.checknewlabel(repo, label, 'branch')
1040 repo.dirstate.setbranch(label)
1040 repo.dirstate.setbranch(label)
1041 ui.status(_('marked working directory as branch %s\n') % label)
1041 ui.status(_('marked working directory as branch %s\n') % label)
1042
1042
1043 # find any open named branches aside from default
1043 # find any open named branches aside from default
1044 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1044 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1045 if n != "default" and not c]
1045 if n != "default" and not c]
1046 if not others:
1046 if not others:
1047 ui.status(_('(branches are permanent and global, '
1047 ui.status(_('(branches are permanent and global, '
1048 'did you want a bookmark?)\n'))
1048 'did you want a bookmark?)\n'))
1049
1049
1050 @command('branches',
1050 @command('branches',
1051 [('a', 'active', False,
1051 [('a', 'active', False,
1052 _('show only branches that have unmerged heads (DEPRECATED)')),
1052 _('show only branches that have unmerged heads (DEPRECATED)')),
1053 ('c', 'closed', False, _('show normal and closed branches')),
1053 ('c', 'closed', False, _('show normal and closed branches')),
1054 ] + formatteropts,
1054 ] + formatteropts,
1055 _('[-c]'))
1055 _('[-c]'))
1056 def branches(ui, repo, active=False, closed=False, **opts):
1056 def branches(ui, repo, active=False, closed=False, **opts):
1057 """list repository named branches
1057 """list repository named branches
1058
1058
1059 List the repository's named branches, indicating which ones are
1059 List the repository's named branches, indicating which ones are
1060 inactive. If -c/--closed is specified, also list branches which have
1060 inactive. If -c/--closed is specified, also list branches which have
1061 been marked closed (see :hg:`commit --close-branch`).
1061 been marked closed (see :hg:`commit --close-branch`).
1062
1062
1063 Use the command :hg:`update` to switch to an existing branch.
1063 Use the command :hg:`update` to switch to an existing branch.
1064
1064
1065 Returns 0.
1065 Returns 0.
1066 """
1066 """
1067
1067
1068 opts = pycompat.byteskwargs(opts)
1068 opts = pycompat.byteskwargs(opts)
1069 ui.pager('branches')
1069 ui.pager('branches')
1070 fm = ui.formatter('branches', opts)
1070 fm = ui.formatter('branches', opts)
1071 hexfunc = fm.hexfunc
1071 hexfunc = fm.hexfunc
1072
1072
1073 allheads = set(repo.heads())
1073 allheads = set(repo.heads())
1074 branches = []
1074 branches = []
1075 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1075 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1076 isactive = not isclosed and bool(set(heads) & allheads)
1076 isactive = not isclosed and bool(set(heads) & allheads)
1077 branches.append((tag, repo[tip], isactive, not isclosed))
1077 branches.append((tag, repo[tip], isactive, not isclosed))
1078 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1078 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1079 reverse=True)
1079 reverse=True)
1080
1080
1081 for tag, ctx, isactive, isopen in branches:
1081 for tag, ctx, isactive, isopen in branches:
1082 if active and not isactive:
1082 if active and not isactive:
1083 continue
1083 continue
1084 if isactive:
1084 if isactive:
1085 label = 'branches.active'
1085 label = 'branches.active'
1086 notice = ''
1086 notice = ''
1087 elif not isopen:
1087 elif not isopen:
1088 if not closed:
1088 if not closed:
1089 continue
1089 continue
1090 label = 'branches.closed'
1090 label = 'branches.closed'
1091 notice = _(' (closed)')
1091 notice = _(' (closed)')
1092 else:
1092 else:
1093 label = 'branches.inactive'
1093 label = 'branches.inactive'
1094 notice = _(' (inactive)')
1094 notice = _(' (inactive)')
1095 current = (tag == repo.dirstate.branch())
1095 current = (tag == repo.dirstate.branch())
1096 if current:
1096 if current:
1097 label = 'branches.current'
1097 label = 'branches.current'
1098
1098
1099 fm.startitem()
1099 fm.startitem()
1100 fm.write('branch', '%s', tag, label=label)
1100 fm.write('branch', '%s', tag, label=label)
1101 rev = ctx.rev()
1101 rev = ctx.rev()
1102 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1102 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1103 fmt = ' ' * padsize + ' %d:%s'
1103 fmt = ' ' * padsize + ' %d:%s'
1104 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1104 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1105 label='log.changeset changeset.%s' % ctx.phasestr())
1105 label='log.changeset changeset.%s' % ctx.phasestr())
1106 fm.context(ctx=ctx)
1106 fm.context(ctx=ctx)
1107 fm.data(active=isactive, closed=not isopen, current=current)
1107 fm.data(active=isactive, closed=not isopen, current=current)
1108 if not ui.quiet:
1108 if not ui.quiet:
1109 fm.plain(notice)
1109 fm.plain(notice)
1110 fm.plain('\n')
1110 fm.plain('\n')
1111 fm.end()
1111 fm.end()
1112
1112
1113 @command('bundle',
1113 @command('bundle',
1114 [('f', 'force', None, _('run even when the destination is unrelated')),
1114 [('f', 'force', None, _('run even when the destination is unrelated')),
1115 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1115 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1116 _('REV')),
1116 _('REV')),
1117 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1117 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1118 _('BRANCH')),
1118 _('BRANCH')),
1119 ('', 'base', [],
1119 ('', 'base', [],
1120 _('a base changeset assumed to be available at the destination'),
1120 _('a base changeset assumed to be available at the destination'),
1121 _('REV')),
1121 _('REV')),
1122 ('a', 'all', None, _('bundle all changesets in the repository')),
1122 ('a', 'all', None, _('bundle all changesets in the repository')),
1123 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1123 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1124 ] + remoteopts,
1124 ] + remoteopts,
1125 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1125 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1126 def bundle(ui, repo, fname, dest=None, **opts):
1126 def bundle(ui, repo, fname, dest=None, **opts):
1127 """create a bundle file
1127 """create a bundle file
1128
1128
1129 Generate a bundle file containing data to be added to a repository.
1129 Generate a bundle file containing data to be added to a repository.
1130
1130
1131 To create a bundle containing all changesets, use -a/--all
1131 To create a bundle containing all changesets, use -a/--all
1132 (or --base null). Otherwise, hg assumes the destination will have
1132 (or --base null). Otherwise, hg assumes the destination will have
1133 all the nodes you specify with --base parameters. Otherwise, hg
1133 all the nodes you specify with --base parameters. Otherwise, hg
1134 will assume the repository has all the nodes in destination, or
1134 will assume the repository has all the nodes in destination, or
1135 default-push/default if no destination is specified.
1135 default-push/default if no destination is specified.
1136
1136
1137 You can change bundle format with the -t/--type option. See
1137 You can change bundle format with the -t/--type option. See
1138 :hg:`help bundlespec` for documentation on this format. By default,
1138 :hg:`help bundlespec` for documentation on this format. By default,
1139 the most appropriate format is used and compression defaults to
1139 the most appropriate format is used and compression defaults to
1140 bzip2.
1140 bzip2.
1141
1141
1142 The bundle file can then be transferred using conventional means
1142 The bundle file can then be transferred using conventional means
1143 and applied to another repository with the unbundle or pull
1143 and applied to another repository with the unbundle or pull
1144 command. This is useful when direct push and pull are not
1144 command. This is useful when direct push and pull are not
1145 available or when exporting an entire repository is undesirable.
1145 available or when exporting an entire repository is undesirable.
1146
1146
1147 Applying bundles preserves all changeset contents including
1147 Applying bundles preserves all changeset contents including
1148 permissions, copy/rename information, and revision history.
1148 permissions, copy/rename information, and revision history.
1149
1149
1150 Returns 0 on success, 1 if no changes found.
1150 Returns 0 on success, 1 if no changes found.
1151 """
1151 """
1152 opts = pycompat.byteskwargs(opts)
1152 opts = pycompat.byteskwargs(opts)
1153 revs = None
1153 revs = None
1154 if 'rev' in opts:
1154 if 'rev' in opts:
1155 revstrings = opts['rev']
1155 revstrings = opts['rev']
1156 revs = scmutil.revrange(repo, revstrings)
1156 revs = scmutil.revrange(repo, revstrings)
1157 if revstrings and not revs:
1157 if revstrings and not revs:
1158 raise error.Abort(_('no commits to bundle'))
1158 raise error.Abort(_('no commits to bundle'))
1159
1159
1160 bundletype = opts.get('type', 'bzip2').lower()
1160 bundletype = opts.get('type', 'bzip2').lower()
1161 try:
1161 try:
1162 bcompression, cgversion, params = exchange.parsebundlespec(
1162 bcompression, cgversion, params = exchange.parsebundlespec(
1163 repo, bundletype, strict=False)
1163 repo, bundletype, strict=False)
1164 except error.UnsupportedBundleSpecification as e:
1164 except error.UnsupportedBundleSpecification as e:
1165 raise error.Abort(str(e),
1165 raise error.Abort(str(e),
1166 hint=_("see 'hg help bundlespec' for supported "
1166 hint=_("see 'hg help bundlespec' for supported "
1167 "values for --type"))
1167 "values for --type"))
1168
1168
1169 # Packed bundles are a pseudo bundle format for now.
1169 # Packed bundles are a pseudo bundle format for now.
1170 if cgversion == 's1':
1170 if cgversion == 's1':
1171 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1171 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1172 hint=_("use 'hg debugcreatestreamclonebundle'"))
1172 hint=_("use 'hg debugcreatestreamclonebundle'"))
1173
1173
1174 if opts.get('all'):
1174 if opts.get('all'):
1175 if dest:
1175 if dest:
1176 raise error.Abort(_("--all is incompatible with specifying "
1176 raise error.Abort(_("--all is incompatible with specifying "
1177 "a destination"))
1177 "a destination"))
1178 if opts.get('base'):
1178 if opts.get('base'):
1179 ui.warn(_("ignoring --base because --all was specified\n"))
1179 ui.warn(_("ignoring --base because --all was specified\n"))
1180 base = ['null']
1180 base = ['null']
1181 else:
1181 else:
1182 base = scmutil.revrange(repo, opts.get('base'))
1182 base = scmutil.revrange(repo, opts.get('base'))
1183 if cgversion not in changegroup.supportedoutgoingversions(repo):
1183 if cgversion not in changegroup.supportedoutgoingversions(repo):
1184 raise error.Abort(_("repository does not support bundle version %s") %
1184 raise error.Abort(_("repository does not support bundle version %s") %
1185 cgversion)
1185 cgversion)
1186
1186
1187 if base:
1187 if base:
1188 if dest:
1188 if dest:
1189 raise error.Abort(_("--base is incompatible with specifying "
1189 raise error.Abort(_("--base is incompatible with specifying "
1190 "a destination"))
1190 "a destination"))
1191 common = [repo.lookup(rev) for rev in base]
1191 common = [repo.lookup(rev) for rev in base]
1192 heads = revs and map(repo.lookup, revs) or None
1192 heads = revs and map(repo.lookup, revs) or None
1193 outgoing = discovery.outgoing(repo, common, heads)
1193 outgoing = discovery.outgoing(repo, common, heads)
1194 else:
1194 else:
1195 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1195 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1196 dest, branches = hg.parseurl(dest, opts.get('branch'))
1196 dest, branches = hg.parseurl(dest, opts.get('branch'))
1197 other = hg.peer(repo, opts, dest)
1197 other = hg.peer(repo, opts, dest)
1198 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1198 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1199 heads = revs and map(repo.lookup, revs) or revs
1199 heads = revs and map(repo.lookup, revs) or revs
1200 outgoing = discovery.findcommonoutgoing(repo, other,
1200 outgoing = discovery.findcommonoutgoing(repo, other,
1201 onlyheads=heads,
1201 onlyheads=heads,
1202 force=opts.get('force'),
1202 force=opts.get('force'),
1203 portable=True)
1203 portable=True)
1204
1204
1205 if not outgoing.missing:
1205 if not outgoing.missing:
1206 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1206 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1207 return 1
1207 return 1
1208
1208
1209 if cgversion == '01': #bundle1
1209 if cgversion == '01': #bundle1
1210 if bcompression is None:
1210 if bcompression is None:
1211 bcompression = 'UN'
1211 bcompression = 'UN'
1212 bversion = 'HG10' + bcompression
1212 bversion = 'HG10' + bcompression
1213 bcompression = None
1213 bcompression = None
1214 elif cgversion in ('02', '03'):
1214 elif cgversion in ('02', '03'):
1215 bversion = 'HG20'
1215 bversion = 'HG20'
1216 else:
1216 else:
1217 raise error.ProgrammingError(
1217 raise error.ProgrammingError(
1218 'bundle: unexpected changegroup version %s' % cgversion)
1218 'bundle: unexpected changegroup version %s' % cgversion)
1219
1219
1220 # TODO compression options should be derived from bundlespec parsing.
1220 # TODO compression options should be derived from bundlespec parsing.
1221 # This is a temporary hack to allow adjusting bundle compression
1221 # This is a temporary hack to allow adjusting bundle compression
1222 # level without a) formalizing the bundlespec changes to declare it
1222 # level without a) formalizing the bundlespec changes to declare it
1223 # b) introducing a command flag.
1223 # b) introducing a command flag.
1224 compopts = {}
1224 compopts = {}
1225 complevel = ui.configint('experimental', 'bundlecomplevel')
1225 complevel = ui.configint('experimental', 'bundlecomplevel')
1226 if complevel is not None:
1226 if complevel is not None:
1227 compopts['level'] = complevel
1227 compopts['level'] = complevel
1228
1228
1229
1229
1230 contentopts = {'cg.version': cgversion}
1230 contentopts = {'cg.version': cgversion}
1231 if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker', False):
1231 if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker', False):
1232 contentopts['obsolescence'] = True
1232 contentopts['obsolescence'] = True
1233 if repo.ui.configbool('experimental', 'bundle-phases', False):
1234 contentopts['phases'] = True
1233 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1235 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1234 contentopts, compression=bcompression,
1236 contentopts, compression=bcompression,
1235 compopts=compopts)
1237 compopts=compopts)
1236
1238
1237 @command('cat',
1239 @command('cat',
1238 [('o', 'output', '',
1240 [('o', 'output', '',
1239 _('print output to file with formatted name'), _('FORMAT')),
1241 _('print output to file with formatted name'), _('FORMAT')),
1240 ('r', 'rev', '', _('print the given revision'), _('REV')),
1242 ('r', 'rev', '', _('print the given revision'), _('REV')),
1241 ('', 'decode', None, _('apply any matching decode filter')),
1243 ('', 'decode', None, _('apply any matching decode filter')),
1242 ] + walkopts + formatteropts,
1244 ] + walkopts + formatteropts,
1243 _('[OPTION]... FILE...'),
1245 _('[OPTION]... FILE...'),
1244 inferrepo=True)
1246 inferrepo=True)
1245 def cat(ui, repo, file1, *pats, **opts):
1247 def cat(ui, repo, file1, *pats, **opts):
1246 """output the current or given revision of files
1248 """output the current or given revision of files
1247
1249
1248 Print the specified files as they were at the given revision. If
1250 Print the specified files as they were at the given revision. If
1249 no revision is given, the parent of the working directory is used.
1251 no revision is given, the parent of the working directory is used.
1250
1252
1251 Output may be to a file, in which case the name of the file is
1253 Output may be to a file, in which case the name of the file is
1252 given using a format string. The formatting rules as follows:
1254 given using a format string. The formatting rules as follows:
1253
1255
1254 :``%%``: literal "%" character
1256 :``%%``: literal "%" character
1255 :``%s``: basename of file being printed
1257 :``%s``: basename of file being printed
1256 :``%d``: dirname of file being printed, or '.' if in repository root
1258 :``%d``: dirname of file being printed, or '.' if in repository root
1257 :``%p``: root-relative path name of file being printed
1259 :``%p``: root-relative path name of file being printed
1258 :``%H``: changeset hash (40 hexadecimal digits)
1260 :``%H``: changeset hash (40 hexadecimal digits)
1259 :``%R``: changeset revision number
1261 :``%R``: changeset revision number
1260 :``%h``: short-form changeset hash (12 hexadecimal digits)
1262 :``%h``: short-form changeset hash (12 hexadecimal digits)
1261 :``%r``: zero-padded changeset revision number
1263 :``%r``: zero-padded changeset revision number
1262 :``%b``: basename of the exporting repository
1264 :``%b``: basename of the exporting repository
1263
1265
1264 Returns 0 on success.
1266 Returns 0 on success.
1265 """
1267 """
1266 ctx = scmutil.revsingle(repo, opts.get('rev'))
1268 ctx = scmutil.revsingle(repo, opts.get('rev'))
1267 m = scmutil.match(ctx, (file1,) + pats, opts)
1269 m = scmutil.match(ctx, (file1,) + pats, opts)
1268 fntemplate = opts.pop('output', '')
1270 fntemplate = opts.pop('output', '')
1269 if cmdutil.isstdiofilename(fntemplate):
1271 if cmdutil.isstdiofilename(fntemplate):
1270 fntemplate = ''
1272 fntemplate = ''
1271
1273
1272 if fntemplate:
1274 if fntemplate:
1273 fm = formatter.nullformatter(ui, 'cat')
1275 fm = formatter.nullformatter(ui, 'cat')
1274 else:
1276 else:
1275 ui.pager('cat')
1277 ui.pager('cat')
1276 fm = ui.formatter('cat', opts)
1278 fm = ui.formatter('cat', opts)
1277 with fm:
1279 with fm:
1278 return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '', **opts)
1280 return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '', **opts)
1279
1281
1280 @command('^clone',
1282 @command('^clone',
1281 [('U', 'noupdate', None, _('the clone will include an empty working '
1283 [('U', 'noupdate', None, _('the clone will include an empty working '
1282 'directory (only a repository)')),
1284 'directory (only a repository)')),
1283 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1285 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1284 _('REV')),
1286 _('REV')),
1285 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1287 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1286 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1288 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1287 ('', 'pull', None, _('use pull protocol to copy metadata')),
1289 ('', 'pull', None, _('use pull protocol to copy metadata')),
1288 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1290 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1289 ] + remoteopts,
1291 ] + remoteopts,
1290 _('[OPTION]... SOURCE [DEST]'),
1292 _('[OPTION]... SOURCE [DEST]'),
1291 norepo=True)
1293 norepo=True)
1292 def clone(ui, source, dest=None, **opts):
1294 def clone(ui, source, dest=None, **opts):
1293 """make a copy of an existing repository
1295 """make a copy of an existing repository
1294
1296
1295 Create a copy of an existing repository in a new directory.
1297 Create a copy of an existing repository in a new directory.
1296
1298
1297 If no destination directory name is specified, it defaults to the
1299 If no destination directory name is specified, it defaults to the
1298 basename of the source.
1300 basename of the source.
1299
1301
1300 The location of the source is added to the new repository's
1302 The location of the source is added to the new repository's
1301 ``.hg/hgrc`` file, as the default to be used for future pulls.
1303 ``.hg/hgrc`` file, as the default to be used for future pulls.
1302
1304
1303 Only local paths and ``ssh://`` URLs are supported as
1305 Only local paths and ``ssh://`` URLs are supported as
1304 destinations. For ``ssh://`` destinations, no working directory or
1306 destinations. For ``ssh://`` destinations, no working directory or
1305 ``.hg/hgrc`` will be created on the remote side.
1307 ``.hg/hgrc`` will be created on the remote side.
1306
1308
1307 If the source repository has a bookmark called '@' set, that
1309 If the source repository has a bookmark called '@' set, that
1308 revision will be checked out in the new repository by default.
1310 revision will be checked out in the new repository by default.
1309
1311
1310 To check out a particular version, use -u/--update, or
1312 To check out a particular version, use -u/--update, or
1311 -U/--noupdate to create a clone with no working directory.
1313 -U/--noupdate to create a clone with no working directory.
1312
1314
1313 To pull only a subset of changesets, specify one or more revisions
1315 To pull only a subset of changesets, specify one or more revisions
1314 identifiers with -r/--rev or branches with -b/--branch. The
1316 identifiers with -r/--rev or branches with -b/--branch. The
1315 resulting clone will contain only the specified changesets and
1317 resulting clone will contain only the specified changesets and
1316 their ancestors. These options (or 'clone src#rev dest') imply
1318 their ancestors. These options (or 'clone src#rev dest') imply
1317 --pull, even for local source repositories.
1319 --pull, even for local source repositories.
1318
1320
1319 .. note::
1321 .. note::
1320
1322
1321 Specifying a tag will include the tagged changeset but not the
1323 Specifying a tag will include the tagged changeset but not the
1322 changeset containing the tag.
1324 changeset containing the tag.
1323
1325
1324 .. container:: verbose
1326 .. container:: verbose
1325
1327
1326 For efficiency, hardlinks are used for cloning whenever the
1328 For efficiency, hardlinks are used for cloning whenever the
1327 source and destination are on the same filesystem (note this
1329 source and destination are on the same filesystem (note this
1328 applies only to the repository data, not to the working
1330 applies only to the repository data, not to the working
1329 directory). Some filesystems, such as AFS, implement hardlinking
1331 directory). Some filesystems, such as AFS, implement hardlinking
1330 incorrectly, but do not report errors. In these cases, use the
1332 incorrectly, but do not report errors. In these cases, use the
1331 --pull option to avoid hardlinking.
1333 --pull option to avoid hardlinking.
1332
1334
1333 In some cases, you can clone repositories and the working
1335 In some cases, you can clone repositories and the working
1334 directory using full hardlinks with ::
1336 directory using full hardlinks with ::
1335
1337
1336 $ cp -al REPO REPOCLONE
1338 $ cp -al REPO REPOCLONE
1337
1339
1338 This is the fastest way to clone, but it is not always safe. The
1340 This is the fastest way to clone, but it is not always safe. The
1339 operation is not atomic (making sure REPO is not modified during
1341 operation is not atomic (making sure REPO is not modified during
1340 the operation is up to you) and you have to make sure your
1342 the operation is up to you) and you have to make sure your
1341 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1343 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1342 so). Also, this is not compatible with certain extensions that
1344 so). Also, this is not compatible with certain extensions that
1343 place their metadata under the .hg directory, such as mq.
1345 place their metadata under the .hg directory, such as mq.
1344
1346
1345 Mercurial will update the working directory to the first applicable
1347 Mercurial will update the working directory to the first applicable
1346 revision from this list:
1348 revision from this list:
1347
1349
1348 a) null if -U or the source repository has no changesets
1350 a) null if -U or the source repository has no changesets
1349 b) if -u . and the source repository is local, the first parent of
1351 b) if -u . and the source repository is local, the first parent of
1350 the source repository's working directory
1352 the source repository's working directory
1351 c) the changeset specified with -u (if a branch name, this means the
1353 c) the changeset specified with -u (if a branch name, this means the
1352 latest head of that branch)
1354 latest head of that branch)
1353 d) the changeset specified with -r
1355 d) the changeset specified with -r
1354 e) the tipmost head specified with -b
1356 e) the tipmost head specified with -b
1355 f) the tipmost head specified with the url#branch source syntax
1357 f) the tipmost head specified with the url#branch source syntax
1356 g) the revision marked with the '@' bookmark, if present
1358 g) the revision marked with the '@' bookmark, if present
1357 h) the tipmost head of the default branch
1359 h) the tipmost head of the default branch
1358 i) tip
1360 i) tip
1359
1361
1360 When cloning from servers that support it, Mercurial may fetch
1362 When cloning from servers that support it, Mercurial may fetch
1361 pre-generated data from a server-advertised URL. When this is done,
1363 pre-generated data from a server-advertised URL. When this is done,
1362 hooks operating on incoming changesets and changegroups may fire twice,
1364 hooks operating on incoming changesets and changegroups may fire twice,
1363 once for the bundle fetched from the URL and another for any additional
1365 once for the bundle fetched from the URL and another for any additional
1364 data not fetched from this URL. In addition, if an error occurs, the
1366 data not fetched from this URL. In addition, if an error occurs, the
1365 repository may be rolled back to a partial clone. This behavior may
1367 repository may be rolled back to a partial clone. This behavior may
1366 change in future releases. See :hg:`help -e clonebundles` for more.
1368 change in future releases. See :hg:`help -e clonebundles` for more.
1367
1369
1368 Examples:
1370 Examples:
1369
1371
1370 - clone a remote repository to a new directory named hg/::
1372 - clone a remote repository to a new directory named hg/::
1371
1373
1372 hg clone https://www.mercurial-scm.org/repo/hg/
1374 hg clone https://www.mercurial-scm.org/repo/hg/
1373
1375
1374 - create a lightweight local clone::
1376 - create a lightweight local clone::
1375
1377
1376 hg clone project/ project-feature/
1378 hg clone project/ project-feature/
1377
1379
1378 - clone from an absolute path on an ssh server (note double-slash)::
1380 - clone from an absolute path on an ssh server (note double-slash)::
1379
1381
1380 hg clone ssh://user@server//home/projects/alpha/
1382 hg clone ssh://user@server//home/projects/alpha/
1381
1383
1382 - do a high-speed clone over a LAN while checking out a
1384 - do a high-speed clone over a LAN while checking out a
1383 specified version::
1385 specified version::
1384
1386
1385 hg clone --uncompressed http://server/repo -u 1.5
1387 hg clone --uncompressed http://server/repo -u 1.5
1386
1388
1387 - create a repository without changesets after a particular revision::
1389 - create a repository without changesets after a particular revision::
1388
1390
1389 hg clone -r 04e544 experimental/ good/
1391 hg clone -r 04e544 experimental/ good/
1390
1392
1391 - clone (and track) a particular named branch::
1393 - clone (and track) a particular named branch::
1392
1394
1393 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1395 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1394
1396
1395 See :hg:`help urls` for details on specifying URLs.
1397 See :hg:`help urls` for details on specifying URLs.
1396
1398
1397 Returns 0 on success.
1399 Returns 0 on success.
1398 """
1400 """
1399 opts = pycompat.byteskwargs(opts)
1401 opts = pycompat.byteskwargs(opts)
1400 if opts.get('noupdate') and opts.get('updaterev'):
1402 if opts.get('noupdate') and opts.get('updaterev'):
1401 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1403 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1402
1404
1403 r = hg.clone(ui, opts, source, dest,
1405 r = hg.clone(ui, opts, source, dest,
1404 pull=opts.get('pull'),
1406 pull=opts.get('pull'),
1405 stream=opts.get('uncompressed'),
1407 stream=opts.get('uncompressed'),
1406 rev=opts.get('rev'),
1408 rev=opts.get('rev'),
1407 update=opts.get('updaterev') or not opts.get('noupdate'),
1409 update=opts.get('updaterev') or not opts.get('noupdate'),
1408 branch=opts.get('branch'),
1410 branch=opts.get('branch'),
1409 shareopts=opts.get('shareopts'))
1411 shareopts=opts.get('shareopts'))
1410
1412
1411 return r is None
1413 return r is None
1412
1414
1413 @command('^commit|ci',
1415 @command('^commit|ci',
1414 [('A', 'addremove', None,
1416 [('A', 'addremove', None,
1415 _('mark new/missing files as added/removed before committing')),
1417 _('mark new/missing files as added/removed before committing')),
1416 ('', 'close-branch', None,
1418 ('', 'close-branch', None,
1417 _('mark a branch head as closed')),
1419 _('mark a branch head as closed')),
1418 ('', 'amend', None, _('amend the parent of the working directory')),
1420 ('', 'amend', None, _('amend the parent of the working directory')),
1419 ('s', 'secret', None, _('use the secret phase for committing')),
1421 ('s', 'secret', None, _('use the secret phase for committing')),
1420 ('e', 'edit', None, _('invoke editor on commit messages')),
1422 ('e', 'edit', None, _('invoke editor on commit messages')),
1421 ('i', 'interactive', None, _('use interactive mode')),
1423 ('i', 'interactive', None, _('use interactive mode')),
1422 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1424 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1423 _('[OPTION]... [FILE]...'),
1425 _('[OPTION]... [FILE]...'),
1424 inferrepo=True)
1426 inferrepo=True)
1425 def commit(ui, repo, *pats, **opts):
1427 def commit(ui, repo, *pats, **opts):
1426 """commit the specified files or all outstanding changes
1428 """commit the specified files or all outstanding changes
1427
1429
1428 Commit changes to the given files into the repository. Unlike a
1430 Commit changes to the given files into the repository. Unlike a
1429 centralized SCM, this operation is a local operation. See
1431 centralized SCM, this operation is a local operation. See
1430 :hg:`push` for a way to actively distribute your changes.
1432 :hg:`push` for a way to actively distribute your changes.
1431
1433
1432 If a list of files is omitted, all changes reported by :hg:`status`
1434 If a list of files is omitted, all changes reported by :hg:`status`
1433 will be committed.
1435 will be committed.
1434
1436
1435 If you are committing the result of a merge, do not provide any
1437 If you are committing the result of a merge, do not provide any
1436 filenames or -I/-X filters.
1438 filenames or -I/-X filters.
1437
1439
1438 If no commit message is specified, Mercurial starts your
1440 If no commit message is specified, Mercurial starts your
1439 configured editor where you can enter a message. In case your
1441 configured editor where you can enter a message. In case your
1440 commit fails, you will find a backup of your message in
1442 commit fails, you will find a backup of your message in
1441 ``.hg/last-message.txt``.
1443 ``.hg/last-message.txt``.
1442
1444
1443 The --close-branch flag can be used to mark the current branch
1445 The --close-branch flag can be used to mark the current branch
1444 head closed. When all heads of a branch are closed, the branch
1446 head closed. When all heads of a branch are closed, the branch
1445 will be considered closed and no longer listed.
1447 will be considered closed and no longer listed.
1446
1448
1447 The --amend flag can be used to amend the parent of the
1449 The --amend flag can be used to amend the parent of the
1448 working directory with a new commit that contains the changes
1450 working directory with a new commit that contains the changes
1449 in the parent in addition to those currently reported by :hg:`status`,
1451 in the parent in addition to those currently reported by :hg:`status`,
1450 if there are any. The old commit is stored in a backup bundle in
1452 if there are any. The old commit is stored in a backup bundle in
1451 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1453 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1452 on how to restore it).
1454 on how to restore it).
1453
1455
1454 Message, user and date are taken from the amended commit unless
1456 Message, user and date are taken from the amended commit unless
1455 specified. When a message isn't specified on the command line,
1457 specified. When a message isn't specified on the command line,
1456 the editor will open with the message of the amended commit.
1458 the editor will open with the message of the amended commit.
1457
1459
1458 It is not possible to amend public changesets (see :hg:`help phases`)
1460 It is not possible to amend public changesets (see :hg:`help phases`)
1459 or changesets that have children.
1461 or changesets that have children.
1460
1462
1461 See :hg:`help dates` for a list of formats valid for -d/--date.
1463 See :hg:`help dates` for a list of formats valid for -d/--date.
1462
1464
1463 Returns 0 on success, 1 if nothing changed.
1465 Returns 0 on success, 1 if nothing changed.
1464
1466
1465 .. container:: verbose
1467 .. container:: verbose
1466
1468
1467 Examples:
1469 Examples:
1468
1470
1469 - commit all files ending in .py::
1471 - commit all files ending in .py::
1470
1472
1471 hg commit --include "set:**.py"
1473 hg commit --include "set:**.py"
1472
1474
1473 - commit all non-binary files::
1475 - commit all non-binary files::
1474
1476
1475 hg commit --exclude "set:binary()"
1477 hg commit --exclude "set:binary()"
1476
1478
1477 - amend the current commit and set the date to now::
1479 - amend the current commit and set the date to now::
1478
1480
1479 hg commit --amend --date now
1481 hg commit --amend --date now
1480 """
1482 """
1481 wlock = lock = None
1483 wlock = lock = None
1482 try:
1484 try:
1483 wlock = repo.wlock()
1485 wlock = repo.wlock()
1484 lock = repo.lock()
1486 lock = repo.lock()
1485 return _docommit(ui, repo, *pats, **opts)
1487 return _docommit(ui, repo, *pats, **opts)
1486 finally:
1488 finally:
1487 release(lock, wlock)
1489 release(lock, wlock)
1488
1490
1489 def _docommit(ui, repo, *pats, **opts):
1491 def _docommit(ui, repo, *pats, **opts):
1490 if opts.get(r'interactive'):
1492 if opts.get(r'interactive'):
1491 opts.pop(r'interactive')
1493 opts.pop(r'interactive')
1492 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1494 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1493 cmdutil.recordfilter, *pats,
1495 cmdutil.recordfilter, *pats,
1494 **opts)
1496 **opts)
1495 # ret can be 0 (no changes to record) or the value returned by
1497 # ret can be 0 (no changes to record) or the value returned by
1496 # commit(), 1 if nothing changed or None on success.
1498 # commit(), 1 if nothing changed or None on success.
1497 return 1 if ret == 0 else ret
1499 return 1 if ret == 0 else ret
1498
1500
1499 opts = pycompat.byteskwargs(opts)
1501 opts = pycompat.byteskwargs(opts)
1500 if opts.get('subrepos'):
1502 if opts.get('subrepos'):
1501 if opts.get('amend'):
1503 if opts.get('amend'):
1502 raise error.Abort(_('cannot amend with --subrepos'))
1504 raise error.Abort(_('cannot amend with --subrepos'))
1503 # Let --subrepos on the command line override config setting.
1505 # Let --subrepos on the command line override config setting.
1504 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1506 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1505
1507
1506 cmdutil.checkunfinished(repo, commit=True)
1508 cmdutil.checkunfinished(repo, commit=True)
1507
1509
1508 branch = repo[None].branch()
1510 branch = repo[None].branch()
1509 bheads = repo.branchheads(branch)
1511 bheads = repo.branchheads(branch)
1510
1512
1511 extra = {}
1513 extra = {}
1512 if opts.get('close_branch'):
1514 if opts.get('close_branch'):
1513 extra['close'] = 1
1515 extra['close'] = 1
1514
1516
1515 if not bheads:
1517 if not bheads:
1516 raise error.Abort(_('can only close branch heads'))
1518 raise error.Abort(_('can only close branch heads'))
1517 elif opts.get('amend'):
1519 elif opts.get('amend'):
1518 if repo[None].parents()[0].p1().branch() != branch and \
1520 if repo[None].parents()[0].p1().branch() != branch and \
1519 repo[None].parents()[0].p2().branch() != branch:
1521 repo[None].parents()[0].p2().branch() != branch:
1520 raise error.Abort(_('can only close branch heads'))
1522 raise error.Abort(_('can only close branch heads'))
1521
1523
1522 if opts.get('amend'):
1524 if opts.get('amend'):
1523 if ui.configbool('ui', 'commitsubrepos'):
1525 if ui.configbool('ui', 'commitsubrepos'):
1524 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1526 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1525
1527
1526 old = repo['.']
1528 old = repo['.']
1527 if not old.mutable():
1529 if not old.mutable():
1528 raise error.Abort(_('cannot amend public changesets'))
1530 raise error.Abort(_('cannot amend public changesets'))
1529 if len(repo[None].parents()) > 1:
1531 if len(repo[None].parents()) > 1:
1530 raise error.Abort(_('cannot amend while merging'))
1532 raise error.Abort(_('cannot amend while merging'))
1531 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1533 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1532 if not allowunstable and old.children():
1534 if not allowunstable and old.children():
1533 raise error.Abort(_('cannot amend changeset with children'))
1535 raise error.Abort(_('cannot amend changeset with children'))
1534
1536
1535 # Currently histedit gets confused if an amend happens while histedit
1537 # Currently histedit gets confused if an amend happens while histedit
1536 # is in progress. Since we have a checkunfinished command, we are
1538 # is in progress. Since we have a checkunfinished command, we are
1537 # temporarily honoring it.
1539 # temporarily honoring it.
1538 #
1540 #
1539 # Note: eventually this guard will be removed. Please do not expect
1541 # Note: eventually this guard will be removed. Please do not expect
1540 # this behavior to remain.
1542 # this behavior to remain.
1541 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1543 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1542 cmdutil.checkunfinished(repo)
1544 cmdutil.checkunfinished(repo)
1543
1545
1544 # commitfunc is used only for temporary amend commit by cmdutil.amend
1546 # commitfunc is used only for temporary amend commit by cmdutil.amend
1545 def commitfunc(ui, repo, message, match, opts):
1547 def commitfunc(ui, repo, message, match, opts):
1546 return repo.commit(message,
1548 return repo.commit(message,
1547 opts.get('user') or old.user(),
1549 opts.get('user') or old.user(),
1548 opts.get('date') or old.date(),
1550 opts.get('date') or old.date(),
1549 match,
1551 match,
1550 extra=extra)
1552 extra=extra)
1551
1553
1552 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1554 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1553 if node == old.node():
1555 if node == old.node():
1554 ui.status(_("nothing changed\n"))
1556 ui.status(_("nothing changed\n"))
1555 return 1
1557 return 1
1556 else:
1558 else:
1557 def commitfunc(ui, repo, message, match, opts):
1559 def commitfunc(ui, repo, message, match, opts):
1558 overrides = {}
1560 overrides = {}
1559 if opts.get('secret'):
1561 if opts.get('secret'):
1560 overrides[('phases', 'new-commit')] = 'secret'
1562 overrides[('phases', 'new-commit')] = 'secret'
1561
1563
1562 baseui = repo.baseui
1564 baseui = repo.baseui
1563 with baseui.configoverride(overrides, 'commit'):
1565 with baseui.configoverride(overrides, 'commit'):
1564 with ui.configoverride(overrides, 'commit'):
1566 with ui.configoverride(overrides, 'commit'):
1565 editform = cmdutil.mergeeditform(repo[None],
1567 editform = cmdutil.mergeeditform(repo[None],
1566 'commit.normal')
1568 'commit.normal')
1567 editor = cmdutil.getcommiteditor(
1569 editor = cmdutil.getcommiteditor(
1568 editform=editform, **pycompat.strkwargs(opts))
1570 editform=editform, **pycompat.strkwargs(opts))
1569 return repo.commit(message,
1571 return repo.commit(message,
1570 opts.get('user'),
1572 opts.get('user'),
1571 opts.get('date'),
1573 opts.get('date'),
1572 match,
1574 match,
1573 editor=editor,
1575 editor=editor,
1574 extra=extra)
1576 extra=extra)
1575
1577
1576 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1578 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1577
1579
1578 if not node:
1580 if not node:
1579 stat = cmdutil.postcommitstatus(repo, pats, opts)
1581 stat = cmdutil.postcommitstatus(repo, pats, opts)
1580 if stat[3]:
1582 if stat[3]:
1581 ui.status(_("nothing changed (%d missing files, see "
1583 ui.status(_("nothing changed (%d missing files, see "
1582 "'hg status')\n") % len(stat[3]))
1584 "'hg status')\n") % len(stat[3]))
1583 else:
1585 else:
1584 ui.status(_("nothing changed\n"))
1586 ui.status(_("nothing changed\n"))
1585 return 1
1587 return 1
1586
1588
1587 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1589 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1588
1590
1589 @command('config|showconfig|debugconfig',
1591 @command('config|showconfig|debugconfig',
1590 [('u', 'untrusted', None, _('show untrusted configuration options')),
1592 [('u', 'untrusted', None, _('show untrusted configuration options')),
1591 ('e', 'edit', None, _('edit user config')),
1593 ('e', 'edit', None, _('edit user config')),
1592 ('l', 'local', None, _('edit repository config')),
1594 ('l', 'local', None, _('edit repository config')),
1593 ('g', 'global', None, _('edit global config'))] + formatteropts,
1595 ('g', 'global', None, _('edit global config'))] + formatteropts,
1594 _('[-u] [NAME]...'),
1596 _('[-u] [NAME]...'),
1595 optionalrepo=True)
1597 optionalrepo=True)
1596 def config(ui, repo, *values, **opts):
1598 def config(ui, repo, *values, **opts):
1597 """show combined config settings from all hgrc files
1599 """show combined config settings from all hgrc files
1598
1600
1599 With no arguments, print names and values of all config items.
1601 With no arguments, print names and values of all config items.
1600
1602
1601 With one argument of the form section.name, print just the value
1603 With one argument of the form section.name, print just the value
1602 of that config item.
1604 of that config item.
1603
1605
1604 With multiple arguments, print names and values of all config
1606 With multiple arguments, print names and values of all config
1605 items with matching section names.
1607 items with matching section names.
1606
1608
1607 With --edit, start an editor on the user-level config file. With
1609 With --edit, start an editor on the user-level config file. With
1608 --global, edit the system-wide config file. With --local, edit the
1610 --global, edit the system-wide config file. With --local, edit the
1609 repository-level config file.
1611 repository-level config file.
1610
1612
1611 With --debug, the source (filename and line number) is printed
1613 With --debug, the source (filename and line number) is printed
1612 for each config item.
1614 for each config item.
1613
1615
1614 See :hg:`help config` for more information about config files.
1616 See :hg:`help config` for more information about config files.
1615
1617
1616 Returns 0 on success, 1 if NAME does not exist.
1618 Returns 0 on success, 1 if NAME does not exist.
1617
1619
1618 """
1620 """
1619
1621
1620 opts = pycompat.byteskwargs(opts)
1622 opts = pycompat.byteskwargs(opts)
1621 if opts.get('edit') or opts.get('local') or opts.get('global'):
1623 if opts.get('edit') or opts.get('local') or opts.get('global'):
1622 if opts.get('local') and opts.get('global'):
1624 if opts.get('local') and opts.get('global'):
1623 raise error.Abort(_("can't use --local and --global together"))
1625 raise error.Abort(_("can't use --local and --global together"))
1624
1626
1625 if opts.get('local'):
1627 if opts.get('local'):
1626 if not repo:
1628 if not repo:
1627 raise error.Abort(_("can't use --local outside a repository"))
1629 raise error.Abort(_("can't use --local outside a repository"))
1628 paths = [repo.vfs.join('hgrc')]
1630 paths = [repo.vfs.join('hgrc')]
1629 elif opts.get('global'):
1631 elif opts.get('global'):
1630 paths = rcutil.systemrcpath()
1632 paths = rcutil.systemrcpath()
1631 else:
1633 else:
1632 paths = rcutil.userrcpath()
1634 paths = rcutil.userrcpath()
1633
1635
1634 for f in paths:
1636 for f in paths:
1635 if os.path.exists(f):
1637 if os.path.exists(f):
1636 break
1638 break
1637 else:
1639 else:
1638 if opts.get('global'):
1640 if opts.get('global'):
1639 samplehgrc = uimod.samplehgrcs['global']
1641 samplehgrc = uimod.samplehgrcs['global']
1640 elif opts.get('local'):
1642 elif opts.get('local'):
1641 samplehgrc = uimod.samplehgrcs['local']
1643 samplehgrc = uimod.samplehgrcs['local']
1642 else:
1644 else:
1643 samplehgrc = uimod.samplehgrcs['user']
1645 samplehgrc = uimod.samplehgrcs['user']
1644
1646
1645 f = paths[0]
1647 f = paths[0]
1646 fp = open(f, "w")
1648 fp = open(f, "w")
1647 fp.write(samplehgrc)
1649 fp.write(samplehgrc)
1648 fp.close()
1650 fp.close()
1649
1651
1650 editor = ui.geteditor()
1652 editor = ui.geteditor()
1651 ui.system("%s \"%s\"" % (editor, f),
1653 ui.system("%s \"%s\"" % (editor, f),
1652 onerr=error.Abort, errprefix=_("edit failed"),
1654 onerr=error.Abort, errprefix=_("edit failed"),
1653 blockedtag='config_edit')
1655 blockedtag='config_edit')
1654 return
1656 return
1655 ui.pager('config')
1657 ui.pager('config')
1656 fm = ui.formatter('config', opts)
1658 fm = ui.formatter('config', opts)
1657 for t, f in rcutil.rccomponents():
1659 for t, f in rcutil.rccomponents():
1658 if t == 'path':
1660 if t == 'path':
1659 ui.debug('read config from: %s\n' % f)
1661 ui.debug('read config from: %s\n' % f)
1660 elif t == 'items':
1662 elif t == 'items':
1661 for section, name, value, source in f:
1663 for section, name, value, source in f:
1662 ui.debug('set config by: %s\n' % source)
1664 ui.debug('set config by: %s\n' % source)
1663 else:
1665 else:
1664 raise error.ProgrammingError('unknown rctype: %s' % t)
1666 raise error.ProgrammingError('unknown rctype: %s' % t)
1665 untrusted = bool(opts.get('untrusted'))
1667 untrusted = bool(opts.get('untrusted'))
1666 if values:
1668 if values:
1667 sections = [v for v in values if '.' not in v]
1669 sections = [v for v in values if '.' not in v]
1668 items = [v for v in values if '.' in v]
1670 items = [v for v in values if '.' in v]
1669 if len(items) > 1 or items and sections:
1671 if len(items) > 1 or items and sections:
1670 raise error.Abort(_('only one config item permitted'))
1672 raise error.Abort(_('only one config item permitted'))
1671 matched = False
1673 matched = False
1672 for section, name, value in ui.walkconfig(untrusted=untrusted):
1674 for section, name, value in ui.walkconfig(untrusted=untrusted):
1673 source = ui.configsource(section, name, untrusted)
1675 source = ui.configsource(section, name, untrusted)
1674 value = pycompat.bytestr(value)
1676 value = pycompat.bytestr(value)
1675 if fm.isplain():
1677 if fm.isplain():
1676 source = source or 'none'
1678 source = source or 'none'
1677 value = value.replace('\n', '\\n')
1679 value = value.replace('\n', '\\n')
1678 entryname = section + '.' + name
1680 entryname = section + '.' + name
1679 if values:
1681 if values:
1680 for v in values:
1682 for v in values:
1681 if v == section:
1683 if v == section:
1682 fm.startitem()
1684 fm.startitem()
1683 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1685 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1684 fm.write('name value', '%s=%s\n', entryname, value)
1686 fm.write('name value', '%s=%s\n', entryname, value)
1685 matched = True
1687 matched = True
1686 elif v == entryname:
1688 elif v == entryname:
1687 fm.startitem()
1689 fm.startitem()
1688 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1690 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1689 fm.write('value', '%s\n', value)
1691 fm.write('value', '%s\n', value)
1690 fm.data(name=entryname)
1692 fm.data(name=entryname)
1691 matched = True
1693 matched = True
1692 else:
1694 else:
1693 fm.startitem()
1695 fm.startitem()
1694 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1696 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1695 fm.write('name value', '%s=%s\n', entryname, value)
1697 fm.write('name value', '%s=%s\n', entryname, value)
1696 matched = True
1698 matched = True
1697 fm.end()
1699 fm.end()
1698 if matched:
1700 if matched:
1699 return 0
1701 return 0
1700 return 1
1702 return 1
1701
1703
1702 @command('copy|cp',
1704 @command('copy|cp',
1703 [('A', 'after', None, _('record a copy that has already occurred')),
1705 [('A', 'after', None, _('record a copy that has already occurred')),
1704 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1706 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1705 ] + walkopts + dryrunopts,
1707 ] + walkopts + dryrunopts,
1706 _('[OPTION]... [SOURCE]... DEST'))
1708 _('[OPTION]... [SOURCE]... DEST'))
1707 def copy(ui, repo, *pats, **opts):
1709 def copy(ui, repo, *pats, **opts):
1708 """mark files as copied for the next commit
1710 """mark files as copied for the next commit
1709
1711
1710 Mark dest as having copies of source files. If dest is a
1712 Mark dest as having copies of source files. If dest is a
1711 directory, copies are put in that directory. If dest is a file,
1713 directory, copies are put in that directory. If dest is a file,
1712 the source must be a single file.
1714 the source must be a single file.
1713
1715
1714 By default, this command copies the contents of files as they
1716 By default, this command copies the contents of files as they
1715 exist in the working directory. If invoked with -A/--after, the
1717 exist in the working directory. If invoked with -A/--after, the
1716 operation is recorded, but no copying is performed.
1718 operation is recorded, but no copying is performed.
1717
1719
1718 This command takes effect with the next commit. To undo a copy
1720 This command takes effect with the next commit. To undo a copy
1719 before that, see :hg:`revert`.
1721 before that, see :hg:`revert`.
1720
1722
1721 Returns 0 on success, 1 if errors are encountered.
1723 Returns 0 on success, 1 if errors are encountered.
1722 """
1724 """
1723 opts = pycompat.byteskwargs(opts)
1725 opts = pycompat.byteskwargs(opts)
1724 with repo.wlock(False):
1726 with repo.wlock(False):
1725 return cmdutil.copy(ui, repo, pats, opts)
1727 return cmdutil.copy(ui, repo, pats, opts)
1726
1728
1727 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1729 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1728 def debugcommands(ui, cmd='', *args):
1730 def debugcommands(ui, cmd='', *args):
1729 """list all available commands and options"""
1731 """list all available commands and options"""
1730 for cmd, vals in sorted(table.iteritems()):
1732 for cmd, vals in sorted(table.iteritems()):
1731 cmd = cmd.split('|')[0].strip('^')
1733 cmd = cmd.split('|')[0].strip('^')
1732 opts = ', '.join([i[1] for i in vals[1]])
1734 opts = ', '.join([i[1] for i in vals[1]])
1733 ui.write('%s: %s\n' % (cmd, opts))
1735 ui.write('%s: %s\n' % (cmd, opts))
1734
1736
1735 @command('debugcomplete',
1737 @command('debugcomplete',
1736 [('o', 'options', None, _('show the command options'))],
1738 [('o', 'options', None, _('show the command options'))],
1737 _('[-o] CMD'),
1739 _('[-o] CMD'),
1738 norepo=True)
1740 norepo=True)
1739 def debugcomplete(ui, cmd='', **opts):
1741 def debugcomplete(ui, cmd='', **opts):
1740 """returns the completion list associated with the given command"""
1742 """returns the completion list associated with the given command"""
1741
1743
1742 if opts.get('options'):
1744 if opts.get('options'):
1743 options = []
1745 options = []
1744 otables = [globalopts]
1746 otables = [globalopts]
1745 if cmd:
1747 if cmd:
1746 aliases, entry = cmdutil.findcmd(cmd, table, False)
1748 aliases, entry = cmdutil.findcmd(cmd, table, False)
1747 otables.append(entry[1])
1749 otables.append(entry[1])
1748 for t in otables:
1750 for t in otables:
1749 for o in t:
1751 for o in t:
1750 if "(DEPRECATED)" in o[3]:
1752 if "(DEPRECATED)" in o[3]:
1751 continue
1753 continue
1752 if o[0]:
1754 if o[0]:
1753 options.append('-%s' % o[0])
1755 options.append('-%s' % o[0])
1754 options.append('--%s' % o[1])
1756 options.append('--%s' % o[1])
1755 ui.write("%s\n" % "\n".join(options))
1757 ui.write("%s\n" % "\n".join(options))
1756 return
1758 return
1757
1759
1758 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1760 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1759 if ui.verbose:
1761 if ui.verbose:
1760 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1762 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1761 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1763 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1762
1764
1763 @command('^diff',
1765 @command('^diff',
1764 [('r', 'rev', [], _('revision'), _('REV')),
1766 [('r', 'rev', [], _('revision'), _('REV')),
1765 ('c', 'change', '', _('change made by revision'), _('REV'))
1767 ('c', 'change', '', _('change made by revision'), _('REV'))
1766 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1768 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1767 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1769 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1768 inferrepo=True)
1770 inferrepo=True)
1769 def diff(ui, repo, *pats, **opts):
1771 def diff(ui, repo, *pats, **opts):
1770 """diff repository (or selected files)
1772 """diff repository (or selected files)
1771
1773
1772 Show differences between revisions for the specified files.
1774 Show differences between revisions for the specified files.
1773
1775
1774 Differences between files are shown using the unified diff format.
1776 Differences between files are shown using the unified diff format.
1775
1777
1776 .. note::
1778 .. note::
1777
1779
1778 :hg:`diff` may generate unexpected results for merges, as it will
1780 :hg:`diff` may generate unexpected results for merges, as it will
1779 default to comparing against the working directory's first
1781 default to comparing against the working directory's first
1780 parent changeset if no revisions are specified.
1782 parent changeset if no revisions are specified.
1781
1783
1782 When two revision arguments are given, then changes are shown
1784 When two revision arguments are given, then changes are shown
1783 between those revisions. If only one revision is specified then
1785 between those revisions. If only one revision is specified then
1784 that revision is compared to the working directory, and, when no
1786 that revision is compared to the working directory, and, when no
1785 revisions are specified, the working directory files are compared
1787 revisions are specified, the working directory files are compared
1786 to its first parent.
1788 to its first parent.
1787
1789
1788 Alternatively you can specify -c/--change with a revision to see
1790 Alternatively you can specify -c/--change with a revision to see
1789 the changes in that changeset relative to its first parent.
1791 the changes in that changeset relative to its first parent.
1790
1792
1791 Without the -a/--text option, diff will avoid generating diffs of
1793 Without the -a/--text option, diff will avoid generating diffs of
1792 files it detects as binary. With -a, diff will generate a diff
1794 files it detects as binary. With -a, diff will generate a diff
1793 anyway, probably with undesirable results.
1795 anyway, probably with undesirable results.
1794
1796
1795 Use the -g/--git option to generate diffs in the git extended diff
1797 Use the -g/--git option to generate diffs in the git extended diff
1796 format. For more information, read :hg:`help diffs`.
1798 format. For more information, read :hg:`help diffs`.
1797
1799
1798 .. container:: verbose
1800 .. container:: verbose
1799
1801
1800 Examples:
1802 Examples:
1801
1803
1802 - compare a file in the current working directory to its parent::
1804 - compare a file in the current working directory to its parent::
1803
1805
1804 hg diff foo.c
1806 hg diff foo.c
1805
1807
1806 - compare two historical versions of a directory, with rename info::
1808 - compare two historical versions of a directory, with rename info::
1807
1809
1808 hg diff --git -r 1.0:1.2 lib/
1810 hg diff --git -r 1.0:1.2 lib/
1809
1811
1810 - get change stats relative to the last change on some date::
1812 - get change stats relative to the last change on some date::
1811
1813
1812 hg diff --stat -r "date('may 2')"
1814 hg diff --stat -r "date('may 2')"
1813
1815
1814 - diff all newly-added files that contain a keyword::
1816 - diff all newly-added files that contain a keyword::
1815
1817
1816 hg diff "set:added() and grep(GNU)"
1818 hg diff "set:added() and grep(GNU)"
1817
1819
1818 - compare a revision and its parents::
1820 - compare a revision and its parents::
1819
1821
1820 hg diff -c 9353 # compare against first parent
1822 hg diff -c 9353 # compare against first parent
1821 hg diff -r 9353^:9353 # same using revset syntax
1823 hg diff -r 9353^:9353 # same using revset syntax
1822 hg diff -r 9353^2:9353 # compare against the second parent
1824 hg diff -r 9353^2:9353 # compare against the second parent
1823
1825
1824 Returns 0 on success.
1826 Returns 0 on success.
1825 """
1827 """
1826
1828
1827 opts = pycompat.byteskwargs(opts)
1829 opts = pycompat.byteskwargs(opts)
1828 revs = opts.get('rev')
1830 revs = opts.get('rev')
1829 change = opts.get('change')
1831 change = opts.get('change')
1830 stat = opts.get('stat')
1832 stat = opts.get('stat')
1831 reverse = opts.get('reverse')
1833 reverse = opts.get('reverse')
1832
1834
1833 if revs and change:
1835 if revs and change:
1834 msg = _('cannot specify --rev and --change at the same time')
1836 msg = _('cannot specify --rev and --change at the same time')
1835 raise error.Abort(msg)
1837 raise error.Abort(msg)
1836 elif change:
1838 elif change:
1837 node2 = scmutil.revsingle(repo, change, None).node()
1839 node2 = scmutil.revsingle(repo, change, None).node()
1838 node1 = repo[node2].p1().node()
1840 node1 = repo[node2].p1().node()
1839 else:
1841 else:
1840 node1, node2 = scmutil.revpair(repo, revs)
1842 node1, node2 = scmutil.revpair(repo, revs)
1841
1843
1842 if reverse:
1844 if reverse:
1843 node1, node2 = node2, node1
1845 node1, node2 = node2, node1
1844
1846
1845 diffopts = patch.diffallopts(ui, opts)
1847 diffopts = patch.diffallopts(ui, opts)
1846 m = scmutil.match(repo[node2], pats, opts)
1848 m = scmutil.match(repo[node2], pats, opts)
1847 ui.pager('diff')
1849 ui.pager('diff')
1848 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1850 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1849 listsubrepos=opts.get('subrepos'),
1851 listsubrepos=opts.get('subrepos'),
1850 root=opts.get('root'))
1852 root=opts.get('root'))
1851
1853
1852 @command('^export',
1854 @command('^export',
1853 [('o', 'output', '',
1855 [('o', 'output', '',
1854 _('print output to file with formatted name'), _('FORMAT')),
1856 _('print output to file with formatted name'), _('FORMAT')),
1855 ('', 'switch-parent', None, _('diff against the second parent')),
1857 ('', 'switch-parent', None, _('diff against the second parent')),
1856 ('r', 'rev', [], _('revisions to export'), _('REV')),
1858 ('r', 'rev', [], _('revisions to export'), _('REV')),
1857 ] + diffopts,
1859 ] + diffopts,
1858 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
1860 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
1859 def export(ui, repo, *changesets, **opts):
1861 def export(ui, repo, *changesets, **opts):
1860 """dump the header and diffs for one or more changesets
1862 """dump the header and diffs for one or more changesets
1861
1863
1862 Print the changeset header and diffs for one or more revisions.
1864 Print the changeset header and diffs for one or more revisions.
1863 If no revision is given, the parent of the working directory is used.
1865 If no revision is given, the parent of the working directory is used.
1864
1866
1865 The information shown in the changeset header is: author, date,
1867 The information shown in the changeset header is: author, date,
1866 branch name (if non-default), changeset hash, parent(s) and commit
1868 branch name (if non-default), changeset hash, parent(s) and commit
1867 comment.
1869 comment.
1868
1870
1869 .. note::
1871 .. note::
1870
1872
1871 :hg:`export` may generate unexpected diff output for merge
1873 :hg:`export` may generate unexpected diff output for merge
1872 changesets, as it will compare the merge changeset against its
1874 changesets, as it will compare the merge changeset against its
1873 first parent only.
1875 first parent only.
1874
1876
1875 Output may be to a file, in which case the name of the file is
1877 Output may be to a file, in which case the name of the file is
1876 given using a format string. The formatting rules are as follows:
1878 given using a format string. The formatting rules are as follows:
1877
1879
1878 :``%%``: literal "%" character
1880 :``%%``: literal "%" character
1879 :``%H``: changeset hash (40 hexadecimal digits)
1881 :``%H``: changeset hash (40 hexadecimal digits)
1880 :``%N``: number of patches being generated
1882 :``%N``: number of patches being generated
1881 :``%R``: changeset revision number
1883 :``%R``: changeset revision number
1882 :``%b``: basename of the exporting repository
1884 :``%b``: basename of the exporting repository
1883 :``%h``: short-form changeset hash (12 hexadecimal digits)
1885 :``%h``: short-form changeset hash (12 hexadecimal digits)
1884 :``%m``: first line of the commit message (only alphanumeric characters)
1886 :``%m``: first line of the commit message (only alphanumeric characters)
1885 :``%n``: zero-padded sequence number, starting at 1
1887 :``%n``: zero-padded sequence number, starting at 1
1886 :``%r``: zero-padded changeset revision number
1888 :``%r``: zero-padded changeset revision number
1887
1889
1888 Without the -a/--text option, export will avoid generating diffs
1890 Without the -a/--text option, export will avoid generating diffs
1889 of files it detects as binary. With -a, export will generate a
1891 of files it detects as binary. With -a, export will generate a
1890 diff anyway, probably with undesirable results.
1892 diff anyway, probably with undesirable results.
1891
1893
1892 Use the -g/--git option to generate diffs in the git extended diff
1894 Use the -g/--git option to generate diffs in the git extended diff
1893 format. See :hg:`help diffs` for more information.
1895 format. See :hg:`help diffs` for more information.
1894
1896
1895 With the --switch-parent option, the diff will be against the
1897 With the --switch-parent option, the diff will be against the
1896 second parent. It can be useful to review a merge.
1898 second parent. It can be useful to review a merge.
1897
1899
1898 .. container:: verbose
1900 .. container:: verbose
1899
1901
1900 Examples:
1902 Examples:
1901
1903
1902 - use export and import to transplant a bugfix to the current
1904 - use export and import to transplant a bugfix to the current
1903 branch::
1905 branch::
1904
1906
1905 hg export -r 9353 | hg import -
1907 hg export -r 9353 | hg import -
1906
1908
1907 - export all the changesets between two revisions to a file with
1909 - export all the changesets between two revisions to a file with
1908 rename information::
1910 rename information::
1909
1911
1910 hg export --git -r 123:150 > changes.txt
1912 hg export --git -r 123:150 > changes.txt
1911
1913
1912 - split outgoing changes into a series of patches with
1914 - split outgoing changes into a series of patches with
1913 descriptive names::
1915 descriptive names::
1914
1916
1915 hg export -r "outgoing()" -o "%n-%m.patch"
1917 hg export -r "outgoing()" -o "%n-%m.patch"
1916
1918
1917 Returns 0 on success.
1919 Returns 0 on success.
1918 """
1920 """
1919 opts = pycompat.byteskwargs(opts)
1921 opts = pycompat.byteskwargs(opts)
1920 changesets += tuple(opts.get('rev', []))
1922 changesets += tuple(opts.get('rev', []))
1921 if not changesets:
1923 if not changesets:
1922 changesets = ['.']
1924 changesets = ['.']
1923 revs = scmutil.revrange(repo, changesets)
1925 revs = scmutil.revrange(repo, changesets)
1924 if not revs:
1926 if not revs:
1925 raise error.Abort(_("export requires at least one changeset"))
1927 raise error.Abort(_("export requires at least one changeset"))
1926 if len(revs) > 1:
1928 if len(revs) > 1:
1927 ui.note(_('exporting patches:\n'))
1929 ui.note(_('exporting patches:\n'))
1928 else:
1930 else:
1929 ui.note(_('exporting patch:\n'))
1931 ui.note(_('exporting patch:\n'))
1930 ui.pager('export')
1932 ui.pager('export')
1931 cmdutil.export(repo, revs, fntemplate=opts.get('output'),
1933 cmdutil.export(repo, revs, fntemplate=opts.get('output'),
1932 switch_parent=opts.get('switch_parent'),
1934 switch_parent=opts.get('switch_parent'),
1933 opts=patch.diffallopts(ui, opts))
1935 opts=patch.diffallopts(ui, opts))
1934
1936
1935 @command('files',
1937 @command('files',
1936 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
1938 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
1937 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
1939 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
1938 ] + walkopts + formatteropts + subrepoopts,
1940 ] + walkopts + formatteropts + subrepoopts,
1939 _('[OPTION]... [FILE]...'))
1941 _('[OPTION]... [FILE]...'))
1940 def files(ui, repo, *pats, **opts):
1942 def files(ui, repo, *pats, **opts):
1941 """list tracked files
1943 """list tracked files
1942
1944
1943 Print files under Mercurial control in the working directory or
1945 Print files under Mercurial control in the working directory or
1944 specified revision for given files (excluding removed files).
1946 specified revision for given files (excluding removed files).
1945 Files can be specified as filenames or filesets.
1947 Files can be specified as filenames or filesets.
1946
1948
1947 If no files are given to match, this command prints the names
1949 If no files are given to match, this command prints the names
1948 of all files under Mercurial control.
1950 of all files under Mercurial control.
1949
1951
1950 .. container:: verbose
1952 .. container:: verbose
1951
1953
1952 Examples:
1954 Examples:
1953
1955
1954 - list all files under the current directory::
1956 - list all files under the current directory::
1955
1957
1956 hg files .
1958 hg files .
1957
1959
1958 - shows sizes and flags for current revision::
1960 - shows sizes and flags for current revision::
1959
1961
1960 hg files -vr .
1962 hg files -vr .
1961
1963
1962 - list all files named README::
1964 - list all files named README::
1963
1965
1964 hg files -I "**/README"
1966 hg files -I "**/README"
1965
1967
1966 - list all binary files::
1968 - list all binary files::
1967
1969
1968 hg files "set:binary()"
1970 hg files "set:binary()"
1969
1971
1970 - find files containing a regular expression::
1972 - find files containing a regular expression::
1971
1973
1972 hg files "set:grep('bob')"
1974 hg files "set:grep('bob')"
1973
1975
1974 - search tracked file contents with xargs and grep::
1976 - search tracked file contents with xargs and grep::
1975
1977
1976 hg files -0 | xargs -0 grep foo
1978 hg files -0 | xargs -0 grep foo
1977
1979
1978 See :hg:`help patterns` and :hg:`help filesets` for more information
1980 See :hg:`help patterns` and :hg:`help filesets` for more information
1979 on specifying file patterns.
1981 on specifying file patterns.
1980
1982
1981 Returns 0 if a match is found, 1 otherwise.
1983 Returns 0 if a match is found, 1 otherwise.
1982
1984
1983 """
1985 """
1984
1986
1985 opts = pycompat.byteskwargs(opts)
1987 opts = pycompat.byteskwargs(opts)
1986 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
1988 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
1987
1989
1988 end = '\n'
1990 end = '\n'
1989 if opts.get('print0'):
1991 if opts.get('print0'):
1990 end = '\0'
1992 end = '\0'
1991 fmt = '%s' + end
1993 fmt = '%s' + end
1992
1994
1993 m = scmutil.match(ctx, pats, opts)
1995 m = scmutil.match(ctx, pats, opts)
1994 ui.pager('files')
1996 ui.pager('files')
1995 with ui.formatter('files', opts) as fm:
1997 with ui.formatter('files', opts) as fm:
1996 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
1998 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
1997
1999
1998 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
2000 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
1999 def forget(ui, repo, *pats, **opts):
2001 def forget(ui, repo, *pats, **opts):
2000 """forget the specified files on the next commit
2002 """forget the specified files on the next commit
2001
2003
2002 Mark the specified files so they will no longer be tracked
2004 Mark the specified files so they will no longer be tracked
2003 after the next commit.
2005 after the next commit.
2004
2006
2005 This only removes files from the current branch, not from the
2007 This only removes files from the current branch, not from the
2006 entire project history, and it does not delete them from the
2008 entire project history, and it does not delete them from the
2007 working directory.
2009 working directory.
2008
2010
2009 To delete the file from the working directory, see :hg:`remove`.
2011 To delete the file from the working directory, see :hg:`remove`.
2010
2012
2011 To undo a forget before the next commit, see :hg:`add`.
2013 To undo a forget before the next commit, see :hg:`add`.
2012
2014
2013 .. container:: verbose
2015 .. container:: verbose
2014
2016
2015 Examples:
2017 Examples:
2016
2018
2017 - forget newly-added binary files::
2019 - forget newly-added binary files::
2018
2020
2019 hg forget "set:added() and binary()"
2021 hg forget "set:added() and binary()"
2020
2022
2021 - forget files that would be excluded by .hgignore::
2023 - forget files that would be excluded by .hgignore::
2022
2024
2023 hg forget "set:hgignore()"
2025 hg forget "set:hgignore()"
2024
2026
2025 Returns 0 on success.
2027 Returns 0 on success.
2026 """
2028 """
2027
2029
2028 opts = pycompat.byteskwargs(opts)
2030 opts = pycompat.byteskwargs(opts)
2029 if not pats:
2031 if not pats:
2030 raise error.Abort(_('no files specified'))
2032 raise error.Abort(_('no files specified'))
2031
2033
2032 m = scmutil.match(repo[None], pats, opts)
2034 m = scmutil.match(repo[None], pats, opts)
2033 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2035 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2034 return rejected and 1 or 0
2036 return rejected and 1 or 0
2035
2037
2036 @command(
2038 @command(
2037 'graft',
2039 'graft',
2038 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2040 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2039 ('c', 'continue', False, _('resume interrupted graft')),
2041 ('c', 'continue', False, _('resume interrupted graft')),
2040 ('e', 'edit', False, _('invoke editor on commit messages')),
2042 ('e', 'edit', False, _('invoke editor on commit messages')),
2041 ('', 'log', None, _('append graft info to log message')),
2043 ('', 'log', None, _('append graft info to log message')),
2042 ('f', 'force', False, _('force graft')),
2044 ('f', 'force', False, _('force graft')),
2043 ('D', 'currentdate', False,
2045 ('D', 'currentdate', False,
2044 _('record the current date as commit date')),
2046 _('record the current date as commit date')),
2045 ('U', 'currentuser', False,
2047 ('U', 'currentuser', False,
2046 _('record the current user as committer'), _('DATE'))]
2048 _('record the current user as committer'), _('DATE'))]
2047 + commitopts2 + mergetoolopts + dryrunopts,
2049 + commitopts2 + mergetoolopts + dryrunopts,
2048 _('[OPTION]... [-r REV]... REV...'))
2050 _('[OPTION]... [-r REV]... REV...'))
2049 def graft(ui, repo, *revs, **opts):
2051 def graft(ui, repo, *revs, **opts):
2050 '''copy changes from other branches onto the current branch
2052 '''copy changes from other branches onto the current branch
2051
2053
2052 This command uses Mercurial's merge logic to copy individual
2054 This command uses Mercurial's merge logic to copy individual
2053 changes from other branches without merging branches in the
2055 changes from other branches without merging branches in the
2054 history graph. This is sometimes known as 'backporting' or
2056 history graph. This is sometimes known as 'backporting' or
2055 'cherry-picking'. By default, graft will copy user, date, and
2057 'cherry-picking'. By default, graft will copy user, date, and
2056 description from the source changesets.
2058 description from the source changesets.
2057
2059
2058 Changesets that are ancestors of the current revision, that have
2060 Changesets that are ancestors of the current revision, that have
2059 already been grafted, or that are merges will be skipped.
2061 already been grafted, or that are merges will be skipped.
2060
2062
2061 If --log is specified, log messages will have a comment appended
2063 If --log is specified, log messages will have a comment appended
2062 of the form::
2064 of the form::
2063
2065
2064 (grafted from CHANGESETHASH)
2066 (grafted from CHANGESETHASH)
2065
2067
2066 If --force is specified, revisions will be grafted even if they
2068 If --force is specified, revisions will be grafted even if they
2067 are already ancestors of or have been grafted to the destination.
2069 are already ancestors of or have been grafted to the destination.
2068 This is useful when the revisions have since been backed out.
2070 This is useful when the revisions have since been backed out.
2069
2071
2070 If a graft merge results in conflicts, the graft process is
2072 If a graft merge results in conflicts, the graft process is
2071 interrupted so that the current merge can be manually resolved.
2073 interrupted so that the current merge can be manually resolved.
2072 Once all conflicts are addressed, the graft process can be
2074 Once all conflicts are addressed, the graft process can be
2073 continued with the -c/--continue option.
2075 continued with the -c/--continue option.
2074
2076
2075 .. note::
2077 .. note::
2076
2078
2077 The -c/--continue option does not reapply earlier options, except
2079 The -c/--continue option does not reapply earlier options, except
2078 for --force.
2080 for --force.
2079
2081
2080 .. container:: verbose
2082 .. container:: verbose
2081
2083
2082 Examples:
2084 Examples:
2083
2085
2084 - copy a single change to the stable branch and edit its description::
2086 - copy a single change to the stable branch and edit its description::
2085
2087
2086 hg update stable
2088 hg update stable
2087 hg graft --edit 9393
2089 hg graft --edit 9393
2088
2090
2089 - graft a range of changesets with one exception, updating dates::
2091 - graft a range of changesets with one exception, updating dates::
2090
2092
2091 hg graft -D "2085::2093 and not 2091"
2093 hg graft -D "2085::2093 and not 2091"
2092
2094
2093 - continue a graft after resolving conflicts::
2095 - continue a graft after resolving conflicts::
2094
2096
2095 hg graft -c
2097 hg graft -c
2096
2098
2097 - show the source of a grafted changeset::
2099 - show the source of a grafted changeset::
2098
2100
2099 hg log --debug -r .
2101 hg log --debug -r .
2100
2102
2101 - show revisions sorted by date::
2103 - show revisions sorted by date::
2102
2104
2103 hg log -r "sort(all(), date)"
2105 hg log -r "sort(all(), date)"
2104
2106
2105 See :hg:`help revisions` for more about specifying revisions.
2107 See :hg:`help revisions` for more about specifying revisions.
2106
2108
2107 Returns 0 on successful completion.
2109 Returns 0 on successful completion.
2108 '''
2110 '''
2109 with repo.wlock():
2111 with repo.wlock():
2110 return _dograft(ui, repo, *revs, **opts)
2112 return _dograft(ui, repo, *revs, **opts)
2111
2113
2112 def _dograft(ui, repo, *revs, **opts):
2114 def _dograft(ui, repo, *revs, **opts):
2113 opts = pycompat.byteskwargs(opts)
2115 opts = pycompat.byteskwargs(opts)
2114 if revs and opts.get('rev'):
2116 if revs and opts.get('rev'):
2115 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2117 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2116 'revision ordering!\n'))
2118 'revision ordering!\n'))
2117
2119
2118 revs = list(revs)
2120 revs = list(revs)
2119 revs.extend(opts.get('rev'))
2121 revs.extend(opts.get('rev'))
2120
2122
2121 if not opts.get('user') and opts.get('currentuser'):
2123 if not opts.get('user') and opts.get('currentuser'):
2122 opts['user'] = ui.username()
2124 opts['user'] = ui.username()
2123 if not opts.get('date') and opts.get('currentdate'):
2125 if not opts.get('date') and opts.get('currentdate'):
2124 opts['date'] = "%d %d" % util.makedate()
2126 opts['date'] = "%d %d" % util.makedate()
2125
2127
2126 editor = cmdutil.getcommiteditor(editform='graft',
2128 editor = cmdutil.getcommiteditor(editform='graft',
2127 **pycompat.strkwargs(opts))
2129 **pycompat.strkwargs(opts))
2128
2130
2129 cont = False
2131 cont = False
2130 if opts.get('continue'):
2132 if opts.get('continue'):
2131 cont = True
2133 cont = True
2132 if revs:
2134 if revs:
2133 raise error.Abort(_("can't specify --continue and revisions"))
2135 raise error.Abort(_("can't specify --continue and revisions"))
2134 # read in unfinished revisions
2136 # read in unfinished revisions
2135 try:
2137 try:
2136 nodes = repo.vfs.read('graftstate').splitlines()
2138 nodes = repo.vfs.read('graftstate').splitlines()
2137 revs = [repo[node].rev() for node in nodes]
2139 revs = [repo[node].rev() for node in nodes]
2138 except IOError as inst:
2140 except IOError as inst:
2139 if inst.errno != errno.ENOENT:
2141 if inst.errno != errno.ENOENT:
2140 raise
2142 raise
2141 cmdutil.wrongtooltocontinue(repo, _('graft'))
2143 cmdutil.wrongtooltocontinue(repo, _('graft'))
2142 else:
2144 else:
2143 cmdutil.checkunfinished(repo)
2145 cmdutil.checkunfinished(repo)
2144 cmdutil.bailifchanged(repo)
2146 cmdutil.bailifchanged(repo)
2145 if not revs:
2147 if not revs:
2146 raise error.Abort(_('no revisions specified'))
2148 raise error.Abort(_('no revisions specified'))
2147 revs = scmutil.revrange(repo, revs)
2149 revs = scmutil.revrange(repo, revs)
2148
2150
2149 skipped = set()
2151 skipped = set()
2150 # check for merges
2152 # check for merges
2151 for rev in repo.revs('%ld and merge()', revs):
2153 for rev in repo.revs('%ld and merge()', revs):
2152 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2154 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2153 skipped.add(rev)
2155 skipped.add(rev)
2154 revs = [r for r in revs if r not in skipped]
2156 revs = [r for r in revs if r not in skipped]
2155 if not revs:
2157 if not revs:
2156 return -1
2158 return -1
2157
2159
2158 # Don't check in the --continue case, in effect retaining --force across
2160 # Don't check in the --continue case, in effect retaining --force across
2159 # --continues. That's because without --force, any revisions we decided to
2161 # --continues. That's because without --force, any revisions we decided to
2160 # skip would have been filtered out here, so they wouldn't have made their
2162 # skip would have been filtered out here, so they wouldn't have made their
2161 # way to the graftstate. With --force, any revisions we would have otherwise
2163 # way to the graftstate. With --force, any revisions we would have otherwise
2162 # skipped would not have been filtered out, and if they hadn't been applied
2164 # skipped would not have been filtered out, and if they hadn't been applied
2163 # already, they'd have been in the graftstate.
2165 # already, they'd have been in the graftstate.
2164 if not (cont or opts.get('force')):
2166 if not (cont or opts.get('force')):
2165 # check for ancestors of dest branch
2167 # check for ancestors of dest branch
2166 crev = repo['.'].rev()
2168 crev = repo['.'].rev()
2167 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2169 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2168 # XXX make this lazy in the future
2170 # XXX make this lazy in the future
2169 # don't mutate while iterating, create a copy
2171 # don't mutate while iterating, create a copy
2170 for rev in list(revs):
2172 for rev in list(revs):
2171 if rev in ancestors:
2173 if rev in ancestors:
2172 ui.warn(_('skipping ancestor revision %d:%s\n') %
2174 ui.warn(_('skipping ancestor revision %d:%s\n') %
2173 (rev, repo[rev]))
2175 (rev, repo[rev]))
2174 # XXX remove on list is slow
2176 # XXX remove on list is slow
2175 revs.remove(rev)
2177 revs.remove(rev)
2176 if not revs:
2178 if not revs:
2177 return -1
2179 return -1
2178
2180
2179 # analyze revs for earlier grafts
2181 # analyze revs for earlier grafts
2180 ids = {}
2182 ids = {}
2181 for ctx in repo.set("%ld", revs):
2183 for ctx in repo.set("%ld", revs):
2182 ids[ctx.hex()] = ctx.rev()
2184 ids[ctx.hex()] = ctx.rev()
2183 n = ctx.extra().get('source')
2185 n = ctx.extra().get('source')
2184 if n:
2186 if n:
2185 ids[n] = ctx.rev()
2187 ids[n] = ctx.rev()
2186
2188
2187 # check ancestors for earlier grafts
2189 # check ancestors for earlier grafts
2188 ui.debug('scanning for duplicate grafts\n')
2190 ui.debug('scanning for duplicate grafts\n')
2189
2191
2190 # The only changesets we can be sure doesn't contain grafts of any
2192 # The only changesets we can be sure doesn't contain grafts of any
2191 # revs, are the ones that are common ancestors of *all* revs:
2193 # revs, are the ones that are common ancestors of *all* revs:
2192 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2194 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2193 ctx = repo[rev]
2195 ctx = repo[rev]
2194 n = ctx.extra().get('source')
2196 n = ctx.extra().get('source')
2195 if n in ids:
2197 if n in ids:
2196 try:
2198 try:
2197 r = repo[n].rev()
2199 r = repo[n].rev()
2198 except error.RepoLookupError:
2200 except error.RepoLookupError:
2199 r = None
2201 r = None
2200 if r in revs:
2202 if r in revs:
2201 ui.warn(_('skipping revision %d:%s '
2203 ui.warn(_('skipping revision %d:%s '
2202 '(already grafted to %d:%s)\n')
2204 '(already grafted to %d:%s)\n')
2203 % (r, repo[r], rev, ctx))
2205 % (r, repo[r], rev, ctx))
2204 revs.remove(r)
2206 revs.remove(r)
2205 elif ids[n] in revs:
2207 elif ids[n] in revs:
2206 if r is None:
2208 if r is None:
2207 ui.warn(_('skipping already grafted revision %d:%s '
2209 ui.warn(_('skipping already grafted revision %d:%s '
2208 '(%d:%s also has unknown origin %s)\n')
2210 '(%d:%s also has unknown origin %s)\n')
2209 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2211 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2210 else:
2212 else:
2211 ui.warn(_('skipping already grafted revision %d:%s '
2213 ui.warn(_('skipping already grafted revision %d:%s '
2212 '(%d:%s also has origin %d:%s)\n')
2214 '(%d:%s also has origin %d:%s)\n')
2213 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2215 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2214 revs.remove(ids[n])
2216 revs.remove(ids[n])
2215 elif ctx.hex() in ids:
2217 elif ctx.hex() in ids:
2216 r = ids[ctx.hex()]
2218 r = ids[ctx.hex()]
2217 ui.warn(_('skipping already grafted revision %d:%s '
2219 ui.warn(_('skipping already grafted revision %d:%s '
2218 '(was grafted from %d:%s)\n') %
2220 '(was grafted from %d:%s)\n') %
2219 (r, repo[r], rev, ctx))
2221 (r, repo[r], rev, ctx))
2220 revs.remove(r)
2222 revs.remove(r)
2221 if not revs:
2223 if not revs:
2222 return -1
2224 return -1
2223
2225
2224 for pos, ctx in enumerate(repo.set("%ld", revs)):
2226 for pos, ctx in enumerate(repo.set("%ld", revs)):
2225 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2227 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2226 ctx.description().split('\n', 1)[0])
2228 ctx.description().split('\n', 1)[0])
2227 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2229 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2228 if names:
2230 if names:
2229 desc += ' (%s)' % ' '.join(names)
2231 desc += ' (%s)' % ' '.join(names)
2230 ui.status(_('grafting %s\n') % desc)
2232 ui.status(_('grafting %s\n') % desc)
2231 if opts.get('dry_run'):
2233 if opts.get('dry_run'):
2232 continue
2234 continue
2233
2235
2234 source = ctx.extra().get('source')
2236 source = ctx.extra().get('source')
2235 extra = {}
2237 extra = {}
2236 if source:
2238 if source:
2237 extra['source'] = source
2239 extra['source'] = source
2238 extra['intermediate-source'] = ctx.hex()
2240 extra['intermediate-source'] = ctx.hex()
2239 else:
2241 else:
2240 extra['source'] = ctx.hex()
2242 extra['source'] = ctx.hex()
2241 user = ctx.user()
2243 user = ctx.user()
2242 if opts.get('user'):
2244 if opts.get('user'):
2243 user = opts['user']
2245 user = opts['user']
2244 date = ctx.date()
2246 date = ctx.date()
2245 if opts.get('date'):
2247 if opts.get('date'):
2246 date = opts['date']
2248 date = opts['date']
2247 message = ctx.description()
2249 message = ctx.description()
2248 if opts.get('log'):
2250 if opts.get('log'):
2249 message += '\n(grafted from %s)' % ctx.hex()
2251 message += '\n(grafted from %s)' % ctx.hex()
2250
2252
2251 # we don't merge the first commit when continuing
2253 # we don't merge the first commit when continuing
2252 if not cont:
2254 if not cont:
2253 # perform the graft merge with p1(rev) as 'ancestor'
2255 # perform the graft merge with p1(rev) as 'ancestor'
2254 try:
2256 try:
2255 # ui.forcemerge is an internal variable, do not document
2257 # ui.forcemerge is an internal variable, do not document
2256 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2258 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2257 'graft')
2259 'graft')
2258 stats = mergemod.graft(repo, ctx, ctx.p1(),
2260 stats = mergemod.graft(repo, ctx, ctx.p1(),
2259 ['local', 'graft'])
2261 ['local', 'graft'])
2260 finally:
2262 finally:
2261 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2263 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2262 # report any conflicts
2264 # report any conflicts
2263 if stats and stats[3] > 0:
2265 if stats and stats[3] > 0:
2264 # write out state for --continue
2266 # write out state for --continue
2265 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2267 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2266 repo.vfs.write('graftstate', ''.join(nodelines))
2268 repo.vfs.write('graftstate', ''.join(nodelines))
2267 extra = ''
2269 extra = ''
2268 if opts.get('user'):
2270 if opts.get('user'):
2269 extra += ' --user %s' % util.shellquote(opts['user'])
2271 extra += ' --user %s' % util.shellquote(opts['user'])
2270 if opts.get('date'):
2272 if opts.get('date'):
2271 extra += ' --date %s' % util.shellquote(opts['date'])
2273 extra += ' --date %s' % util.shellquote(opts['date'])
2272 if opts.get('log'):
2274 if opts.get('log'):
2273 extra += ' --log'
2275 extra += ' --log'
2274 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2276 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2275 raise error.Abort(
2277 raise error.Abort(
2276 _("unresolved conflicts, can't continue"),
2278 _("unresolved conflicts, can't continue"),
2277 hint=hint)
2279 hint=hint)
2278 else:
2280 else:
2279 cont = False
2281 cont = False
2280
2282
2281 # commit
2283 # commit
2282 node = repo.commit(text=message, user=user,
2284 node = repo.commit(text=message, user=user,
2283 date=date, extra=extra, editor=editor)
2285 date=date, extra=extra, editor=editor)
2284 if node is None:
2286 if node is None:
2285 ui.warn(
2287 ui.warn(
2286 _('note: graft of %d:%s created no changes to commit\n') %
2288 _('note: graft of %d:%s created no changes to commit\n') %
2287 (ctx.rev(), ctx))
2289 (ctx.rev(), ctx))
2288
2290
2289 # remove state when we complete successfully
2291 # remove state when we complete successfully
2290 if not opts.get('dry_run'):
2292 if not opts.get('dry_run'):
2291 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2293 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2292
2294
2293 return 0
2295 return 0
2294
2296
2295 @command('grep',
2297 @command('grep',
2296 [('0', 'print0', None, _('end fields with NUL')),
2298 [('0', 'print0', None, _('end fields with NUL')),
2297 ('', 'all', None, _('print all revisions that match')),
2299 ('', 'all', None, _('print all revisions that match')),
2298 ('a', 'text', None, _('treat all files as text')),
2300 ('a', 'text', None, _('treat all files as text')),
2299 ('f', 'follow', None,
2301 ('f', 'follow', None,
2300 _('follow changeset history,'
2302 _('follow changeset history,'
2301 ' or file history across copies and renames')),
2303 ' or file history across copies and renames')),
2302 ('i', 'ignore-case', None, _('ignore case when matching')),
2304 ('i', 'ignore-case', None, _('ignore case when matching')),
2303 ('l', 'files-with-matches', None,
2305 ('l', 'files-with-matches', None,
2304 _('print only filenames and revisions that match')),
2306 _('print only filenames and revisions that match')),
2305 ('n', 'line-number', None, _('print matching line numbers')),
2307 ('n', 'line-number', None, _('print matching line numbers')),
2306 ('r', 'rev', [],
2308 ('r', 'rev', [],
2307 _('only search files changed within revision range'), _('REV')),
2309 _('only search files changed within revision range'), _('REV')),
2308 ('u', 'user', None, _('list the author (long with -v)')),
2310 ('u', 'user', None, _('list the author (long with -v)')),
2309 ('d', 'date', None, _('list the date (short with -q)')),
2311 ('d', 'date', None, _('list the date (short with -q)')),
2310 ] + formatteropts + walkopts,
2312 ] + formatteropts + walkopts,
2311 _('[OPTION]... PATTERN [FILE]...'),
2313 _('[OPTION]... PATTERN [FILE]...'),
2312 inferrepo=True)
2314 inferrepo=True)
2313 def grep(ui, repo, pattern, *pats, **opts):
2315 def grep(ui, repo, pattern, *pats, **opts):
2314 """search revision history for a pattern in specified files
2316 """search revision history for a pattern in specified files
2315
2317
2316 Search revision history for a regular expression in the specified
2318 Search revision history for a regular expression in the specified
2317 files or the entire project.
2319 files or the entire project.
2318
2320
2319 By default, grep prints the most recent revision number for each
2321 By default, grep prints the most recent revision number for each
2320 file in which it finds a match. To get it to print every revision
2322 file in which it finds a match. To get it to print every revision
2321 that contains a change in match status ("-" for a match that becomes
2323 that contains a change in match status ("-" for a match that becomes
2322 a non-match, or "+" for a non-match that becomes a match), use the
2324 a non-match, or "+" for a non-match that becomes a match), use the
2323 --all flag.
2325 --all flag.
2324
2326
2325 PATTERN can be any Python (roughly Perl-compatible) regular
2327 PATTERN can be any Python (roughly Perl-compatible) regular
2326 expression.
2328 expression.
2327
2329
2328 If no FILEs are specified (and -f/--follow isn't set), all files in
2330 If no FILEs are specified (and -f/--follow isn't set), all files in
2329 the repository are searched, including those that don't exist in the
2331 the repository are searched, including those that don't exist in the
2330 current branch or have been deleted in a prior changeset.
2332 current branch or have been deleted in a prior changeset.
2331
2333
2332 Returns 0 if a match is found, 1 otherwise.
2334 Returns 0 if a match is found, 1 otherwise.
2333 """
2335 """
2334 opts = pycompat.byteskwargs(opts)
2336 opts = pycompat.byteskwargs(opts)
2335 reflags = re.M
2337 reflags = re.M
2336 if opts.get('ignore_case'):
2338 if opts.get('ignore_case'):
2337 reflags |= re.I
2339 reflags |= re.I
2338 try:
2340 try:
2339 regexp = util.re.compile(pattern, reflags)
2341 regexp = util.re.compile(pattern, reflags)
2340 except re.error as inst:
2342 except re.error as inst:
2341 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2343 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2342 return 1
2344 return 1
2343 sep, eol = ':', '\n'
2345 sep, eol = ':', '\n'
2344 if opts.get('print0'):
2346 if opts.get('print0'):
2345 sep = eol = '\0'
2347 sep = eol = '\0'
2346
2348
2347 getfile = util.lrucachefunc(repo.file)
2349 getfile = util.lrucachefunc(repo.file)
2348
2350
2349 def matchlines(body):
2351 def matchlines(body):
2350 begin = 0
2352 begin = 0
2351 linenum = 0
2353 linenum = 0
2352 while begin < len(body):
2354 while begin < len(body):
2353 match = regexp.search(body, begin)
2355 match = regexp.search(body, begin)
2354 if not match:
2356 if not match:
2355 break
2357 break
2356 mstart, mend = match.span()
2358 mstart, mend = match.span()
2357 linenum += body.count('\n', begin, mstart) + 1
2359 linenum += body.count('\n', begin, mstart) + 1
2358 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2360 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2359 begin = body.find('\n', mend) + 1 or len(body) + 1
2361 begin = body.find('\n', mend) + 1 or len(body) + 1
2360 lend = begin - 1
2362 lend = begin - 1
2361 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2363 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2362
2364
2363 class linestate(object):
2365 class linestate(object):
2364 def __init__(self, line, linenum, colstart, colend):
2366 def __init__(self, line, linenum, colstart, colend):
2365 self.line = line
2367 self.line = line
2366 self.linenum = linenum
2368 self.linenum = linenum
2367 self.colstart = colstart
2369 self.colstart = colstart
2368 self.colend = colend
2370 self.colend = colend
2369
2371
2370 def __hash__(self):
2372 def __hash__(self):
2371 return hash((self.linenum, self.line))
2373 return hash((self.linenum, self.line))
2372
2374
2373 def __eq__(self, other):
2375 def __eq__(self, other):
2374 return self.line == other.line
2376 return self.line == other.line
2375
2377
2376 def findpos(self):
2378 def findpos(self):
2377 """Iterate all (start, end) indices of matches"""
2379 """Iterate all (start, end) indices of matches"""
2378 yield self.colstart, self.colend
2380 yield self.colstart, self.colend
2379 p = self.colend
2381 p = self.colend
2380 while p < len(self.line):
2382 while p < len(self.line):
2381 m = regexp.search(self.line, p)
2383 m = regexp.search(self.line, p)
2382 if not m:
2384 if not m:
2383 break
2385 break
2384 yield m.span()
2386 yield m.span()
2385 p = m.end()
2387 p = m.end()
2386
2388
2387 matches = {}
2389 matches = {}
2388 copies = {}
2390 copies = {}
2389 def grepbody(fn, rev, body):
2391 def grepbody(fn, rev, body):
2390 matches[rev].setdefault(fn, [])
2392 matches[rev].setdefault(fn, [])
2391 m = matches[rev][fn]
2393 m = matches[rev][fn]
2392 for lnum, cstart, cend, line in matchlines(body):
2394 for lnum, cstart, cend, line in matchlines(body):
2393 s = linestate(line, lnum, cstart, cend)
2395 s = linestate(line, lnum, cstart, cend)
2394 m.append(s)
2396 m.append(s)
2395
2397
2396 def difflinestates(a, b):
2398 def difflinestates(a, b):
2397 sm = difflib.SequenceMatcher(None, a, b)
2399 sm = difflib.SequenceMatcher(None, a, b)
2398 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2400 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2399 if tag == 'insert':
2401 if tag == 'insert':
2400 for i in xrange(blo, bhi):
2402 for i in xrange(blo, bhi):
2401 yield ('+', b[i])
2403 yield ('+', b[i])
2402 elif tag == 'delete':
2404 elif tag == 'delete':
2403 for i in xrange(alo, ahi):
2405 for i in xrange(alo, ahi):
2404 yield ('-', a[i])
2406 yield ('-', a[i])
2405 elif tag == 'replace':
2407 elif tag == 'replace':
2406 for i in xrange(alo, ahi):
2408 for i in xrange(alo, ahi):
2407 yield ('-', a[i])
2409 yield ('-', a[i])
2408 for i in xrange(blo, bhi):
2410 for i in xrange(blo, bhi):
2409 yield ('+', b[i])
2411 yield ('+', b[i])
2410
2412
2411 def display(fm, fn, ctx, pstates, states):
2413 def display(fm, fn, ctx, pstates, states):
2412 rev = ctx.rev()
2414 rev = ctx.rev()
2413 if fm.isplain():
2415 if fm.isplain():
2414 formatuser = ui.shortuser
2416 formatuser = ui.shortuser
2415 else:
2417 else:
2416 formatuser = str
2418 formatuser = str
2417 if ui.quiet:
2419 if ui.quiet:
2418 datefmt = '%Y-%m-%d'
2420 datefmt = '%Y-%m-%d'
2419 else:
2421 else:
2420 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2422 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2421 found = False
2423 found = False
2422 @util.cachefunc
2424 @util.cachefunc
2423 def binary():
2425 def binary():
2424 flog = getfile(fn)
2426 flog = getfile(fn)
2425 return util.binary(flog.read(ctx.filenode(fn)))
2427 return util.binary(flog.read(ctx.filenode(fn)))
2426
2428
2427 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2429 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2428 if opts.get('all'):
2430 if opts.get('all'):
2429 iter = difflinestates(pstates, states)
2431 iter = difflinestates(pstates, states)
2430 else:
2432 else:
2431 iter = [('', l) for l in states]
2433 iter = [('', l) for l in states]
2432 for change, l in iter:
2434 for change, l in iter:
2433 fm.startitem()
2435 fm.startitem()
2434 fm.data(node=fm.hexfunc(ctx.node()))
2436 fm.data(node=fm.hexfunc(ctx.node()))
2435 cols = [
2437 cols = [
2436 ('filename', fn, True),
2438 ('filename', fn, True),
2437 ('rev', rev, True),
2439 ('rev', rev, True),
2438 ('linenumber', l.linenum, opts.get('line_number')),
2440 ('linenumber', l.linenum, opts.get('line_number')),
2439 ]
2441 ]
2440 if opts.get('all'):
2442 if opts.get('all'):
2441 cols.append(('change', change, True))
2443 cols.append(('change', change, True))
2442 cols.extend([
2444 cols.extend([
2443 ('user', formatuser(ctx.user()), opts.get('user')),
2445 ('user', formatuser(ctx.user()), opts.get('user')),
2444 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2446 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2445 ])
2447 ])
2446 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2448 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2447 for name, data, cond in cols:
2449 for name, data, cond in cols:
2448 field = fieldnamemap.get(name, name)
2450 field = fieldnamemap.get(name, name)
2449 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2451 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2450 if cond and name != lastcol:
2452 if cond and name != lastcol:
2451 fm.plain(sep, label='grep.sep')
2453 fm.plain(sep, label='grep.sep')
2452 if not opts.get('files_with_matches'):
2454 if not opts.get('files_with_matches'):
2453 fm.plain(sep, label='grep.sep')
2455 fm.plain(sep, label='grep.sep')
2454 if not opts.get('text') and binary():
2456 if not opts.get('text') and binary():
2455 fm.plain(_(" Binary file matches"))
2457 fm.plain(_(" Binary file matches"))
2456 else:
2458 else:
2457 displaymatches(fm.nested('texts'), l)
2459 displaymatches(fm.nested('texts'), l)
2458 fm.plain(eol)
2460 fm.plain(eol)
2459 found = True
2461 found = True
2460 if opts.get('files_with_matches'):
2462 if opts.get('files_with_matches'):
2461 break
2463 break
2462 return found
2464 return found
2463
2465
2464 def displaymatches(fm, l):
2466 def displaymatches(fm, l):
2465 p = 0
2467 p = 0
2466 for s, e in l.findpos():
2468 for s, e in l.findpos():
2467 if p < s:
2469 if p < s:
2468 fm.startitem()
2470 fm.startitem()
2469 fm.write('text', '%s', l.line[p:s])
2471 fm.write('text', '%s', l.line[p:s])
2470 fm.data(matched=False)
2472 fm.data(matched=False)
2471 fm.startitem()
2473 fm.startitem()
2472 fm.write('text', '%s', l.line[s:e], label='grep.match')
2474 fm.write('text', '%s', l.line[s:e], label='grep.match')
2473 fm.data(matched=True)
2475 fm.data(matched=True)
2474 p = e
2476 p = e
2475 if p < len(l.line):
2477 if p < len(l.line):
2476 fm.startitem()
2478 fm.startitem()
2477 fm.write('text', '%s', l.line[p:])
2479 fm.write('text', '%s', l.line[p:])
2478 fm.data(matched=False)
2480 fm.data(matched=False)
2479 fm.end()
2481 fm.end()
2480
2482
2481 skip = {}
2483 skip = {}
2482 revfiles = {}
2484 revfiles = {}
2483 matchfn = scmutil.match(repo[None], pats, opts)
2485 matchfn = scmutil.match(repo[None], pats, opts)
2484 found = False
2486 found = False
2485 follow = opts.get('follow')
2487 follow = opts.get('follow')
2486
2488
2487 def prep(ctx, fns):
2489 def prep(ctx, fns):
2488 rev = ctx.rev()
2490 rev = ctx.rev()
2489 pctx = ctx.p1()
2491 pctx = ctx.p1()
2490 parent = pctx.rev()
2492 parent = pctx.rev()
2491 matches.setdefault(rev, {})
2493 matches.setdefault(rev, {})
2492 matches.setdefault(parent, {})
2494 matches.setdefault(parent, {})
2493 files = revfiles.setdefault(rev, [])
2495 files = revfiles.setdefault(rev, [])
2494 for fn in fns:
2496 for fn in fns:
2495 flog = getfile(fn)
2497 flog = getfile(fn)
2496 try:
2498 try:
2497 fnode = ctx.filenode(fn)
2499 fnode = ctx.filenode(fn)
2498 except error.LookupError:
2500 except error.LookupError:
2499 continue
2501 continue
2500
2502
2501 copied = flog.renamed(fnode)
2503 copied = flog.renamed(fnode)
2502 copy = follow and copied and copied[0]
2504 copy = follow and copied and copied[0]
2503 if copy:
2505 if copy:
2504 copies.setdefault(rev, {})[fn] = copy
2506 copies.setdefault(rev, {})[fn] = copy
2505 if fn in skip:
2507 if fn in skip:
2506 if copy:
2508 if copy:
2507 skip[copy] = True
2509 skip[copy] = True
2508 continue
2510 continue
2509 files.append(fn)
2511 files.append(fn)
2510
2512
2511 if fn not in matches[rev]:
2513 if fn not in matches[rev]:
2512 grepbody(fn, rev, flog.read(fnode))
2514 grepbody(fn, rev, flog.read(fnode))
2513
2515
2514 pfn = copy or fn
2516 pfn = copy or fn
2515 if pfn not in matches[parent]:
2517 if pfn not in matches[parent]:
2516 try:
2518 try:
2517 fnode = pctx.filenode(pfn)
2519 fnode = pctx.filenode(pfn)
2518 grepbody(pfn, parent, flog.read(fnode))
2520 grepbody(pfn, parent, flog.read(fnode))
2519 except error.LookupError:
2521 except error.LookupError:
2520 pass
2522 pass
2521
2523
2522 ui.pager('grep')
2524 ui.pager('grep')
2523 fm = ui.formatter('grep', opts)
2525 fm = ui.formatter('grep', opts)
2524 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2526 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2525 rev = ctx.rev()
2527 rev = ctx.rev()
2526 parent = ctx.p1().rev()
2528 parent = ctx.p1().rev()
2527 for fn in sorted(revfiles.get(rev, [])):
2529 for fn in sorted(revfiles.get(rev, [])):
2528 states = matches[rev][fn]
2530 states = matches[rev][fn]
2529 copy = copies.get(rev, {}).get(fn)
2531 copy = copies.get(rev, {}).get(fn)
2530 if fn in skip:
2532 if fn in skip:
2531 if copy:
2533 if copy:
2532 skip[copy] = True
2534 skip[copy] = True
2533 continue
2535 continue
2534 pstates = matches.get(parent, {}).get(copy or fn, [])
2536 pstates = matches.get(parent, {}).get(copy or fn, [])
2535 if pstates or states:
2537 if pstates or states:
2536 r = display(fm, fn, ctx, pstates, states)
2538 r = display(fm, fn, ctx, pstates, states)
2537 found = found or r
2539 found = found or r
2538 if r and not opts.get('all'):
2540 if r and not opts.get('all'):
2539 skip[fn] = True
2541 skip[fn] = True
2540 if copy:
2542 if copy:
2541 skip[copy] = True
2543 skip[copy] = True
2542 del matches[rev]
2544 del matches[rev]
2543 del revfiles[rev]
2545 del revfiles[rev]
2544 fm.end()
2546 fm.end()
2545
2547
2546 return not found
2548 return not found
2547
2549
2548 @command('heads',
2550 @command('heads',
2549 [('r', 'rev', '',
2551 [('r', 'rev', '',
2550 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2552 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2551 ('t', 'topo', False, _('show topological heads only')),
2553 ('t', 'topo', False, _('show topological heads only')),
2552 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2554 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2553 ('c', 'closed', False, _('show normal and closed branch heads')),
2555 ('c', 'closed', False, _('show normal and closed branch heads')),
2554 ] + templateopts,
2556 ] + templateopts,
2555 _('[-ct] [-r STARTREV] [REV]...'))
2557 _('[-ct] [-r STARTREV] [REV]...'))
2556 def heads(ui, repo, *branchrevs, **opts):
2558 def heads(ui, repo, *branchrevs, **opts):
2557 """show branch heads
2559 """show branch heads
2558
2560
2559 With no arguments, show all open branch heads in the repository.
2561 With no arguments, show all open branch heads in the repository.
2560 Branch heads are changesets that have no descendants on the
2562 Branch heads are changesets that have no descendants on the
2561 same branch. They are where development generally takes place and
2563 same branch. They are where development generally takes place and
2562 are the usual targets for update and merge operations.
2564 are the usual targets for update and merge operations.
2563
2565
2564 If one or more REVs are given, only open branch heads on the
2566 If one or more REVs are given, only open branch heads on the
2565 branches associated with the specified changesets are shown. This
2567 branches associated with the specified changesets are shown. This
2566 means that you can use :hg:`heads .` to see the heads on the
2568 means that you can use :hg:`heads .` to see the heads on the
2567 currently checked-out branch.
2569 currently checked-out branch.
2568
2570
2569 If -c/--closed is specified, also show branch heads marked closed
2571 If -c/--closed is specified, also show branch heads marked closed
2570 (see :hg:`commit --close-branch`).
2572 (see :hg:`commit --close-branch`).
2571
2573
2572 If STARTREV is specified, only those heads that are descendants of
2574 If STARTREV is specified, only those heads that are descendants of
2573 STARTREV will be displayed.
2575 STARTREV will be displayed.
2574
2576
2575 If -t/--topo is specified, named branch mechanics will be ignored and only
2577 If -t/--topo is specified, named branch mechanics will be ignored and only
2576 topological heads (changesets with no children) will be shown.
2578 topological heads (changesets with no children) will be shown.
2577
2579
2578 Returns 0 if matching heads are found, 1 if not.
2580 Returns 0 if matching heads are found, 1 if not.
2579 """
2581 """
2580
2582
2581 opts = pycompat.byteskwargs(opts)
2583 opts = pycompat.byteskwargs(opts)
2582 start = None
2584 start = None
2583 if 'rev' in opts:
2585 if 'rev' in opts:
2584 start = scmutil.revsingle(repo, opts['rev'], None).node()
2586 start = scmutil.revsingle(repo, opts['rev'], None).node()
2585
2587
2586 if opts.get('topo'):
2588 if opts.get('topo'):
2587 heads = [repo[h] for h in repo.heads(start)]
2589 heads = [repo[h] for h in repo.heads(start)]
2588 else:
2590 else:
2589 heads = []
2591 heads = []
2590 for branch in repo.branchmap():
2592 for branch in repo.branchmap():
2591 heads += repo.branchheads(branch, start, opts.get('closed'))
2593 heads += repo.branchheads(branch, start, opts.get('closed'))
2592 heads = [repo[h] for h in heads]
2594 heads = [repo[h] for h in heads]
2593
2595
2594 if branchrevs:
2596 if branchrevs:
2595 branches = set(repo[br].branch() for br in branchrevs)
2597 branches = set(repo[br].branch() for br in branchrevs)
2596 heads = [h for h in heads if h.branch() in branches]
2598 heads = [h for h in heads if h.branch() in branches]
2597
2599
2598 if opts.get('active') and branchrevs:
2600 if opts.get('active') and branchrevs:
2599 dagheads = repo.heads(start)
2601 dagheads = repo.heads(start)
2600 heads = [h for h in heads if h.node() in dagheads]
2602 heads = [h for h in heads if h.node() in dagheads]
2601
2603
2602 if branchrevs:
2604 if branchrevs:
2603 haveheads = set(h.branch() for h in heads)
2605 haveheads = set(h.branch() for h in heads)
2604 if branches - haveheads:
2606 if branches - haveheads:
2605 headless = ', '.join(b for b in branches - haveheads)
2607 headless = ', '.join(b for b in branches - haveheads)
2606 msg = _('no open branch heads found on branches %s')
2608 msg = _('no open branch heads found on branches %s')
2607 if opts.get('rev'):
2609 if opts.get('rev'):
2608 msg += _(' (started at %s)') % opts['rev']
2610 msg += _(' (started at %s)') % opts['rev']
2609 ui.warn((msg + '\n') % headless)
2611 ui.warn((msg + '\n') % headless)
2610
2612
2611 if not heads:
2613 if not heads:
2612 return 1
2614 return 1
2613
2615
2614 ui.pager('heads')
2616 ui.pager('heads')
2615 heads = sorted(heads, key=lambda x: -x.rev())
2617 heads = sorted(heads, key=lambda x: -x.rev())
2616 displayer = cmdutil.show_changeset(ui, repo, opts)
2618 displayer = cmdutil.show_changeset(ui, repo, opts)
2617 for ctx in heads:
2619 for ctx in heads:
2618 displayer.show(ctx)
2620 displayer.show(ctx)
2619 displayer.close()
2621 displayer.close()
2620
2622
2621 @command('help',
2623 @command('help',
2622 [('e', 'extension', None, _('show only help for extensions')),
2624 [('e', 'extension', None, _('show only help for extensions')),
2623 ('c', 'command', None, _('show only help for commands')),
2625 ('c', 'command', None, _('show only help for commands')),
2624 ('k', 'keyword', None, _('show topics matching keyword')),
2626 ('k', 'keyword', None, _('show topics matching keyword')),
2625 ('s', 'system', [], _('show help for specific platform(s)')),
2627 ('s', 'system', [], _('show help for specific platform(s)')),
2626 ],
2628 ],
2627 _('[-ecks] [TOPIC]'),
2629 _('[-ecks] [TOPIC]'),
2628 norepo=True)
2630 norepo=True)
2629 def help_(ui, name=None, **opts):
2631 def help_(ui, name=None, **opts):
2630 """show help for a given topic or a help overview
2632 """show help for a given topic or a help overview
2631
2633
2632 With no arguments, print a list of commands with short help messages.
2634 With no arguments, print a list of commands with short help messages.
2633
2635
2634 Given a topic, extension, or command name, print help for that
2636 Given a topic, extension, or command name, print help for that
2635 topic.
2637 topic.
2636
2638
2637 Returns 0 if successful.
2639 Returns 0 if successful.
2638 """
2640 """
2639
2641
2640 keep = opts.get(r'system') or []
2642 keep = opts.get(r'system') or []
2641 if len(keep) == 0:
2643 if len(keep) == 0:
2642 if pycompat.sysplatform.startswith('win'):
2644 if pycompat.sysplatform.startswith('win'):
2643 keep.append('windows')
2645 keep.append('windows')
2644 elif pycompat.sysplatform == 'OpenVMS':
2646 elif pycompat.sysplatform == 'OpenVMS':
2645 keep.append('vms')
2647 keep.append('vms')
2646 elif pycompat.sysplatform == 'plan9':
2648 elif pycompat.sysplatform == 'plan9':
2647 keep.append('plan9')
2649 keep.append('plan9')
2648 else:
2650 else:
2649 keep.append('unix')
2651 keep.append('unix')
2650 keep.append(pycompat.sysplatform.lower())
2652 keep.append(pycompat.sysplatform.lower())
2651 if ui.verbose:
2653 if ui.verbose:
2652 keep.append('verbose')
2654 keep.append('verbose')
2653
2655
2654 commands = sys.modules[__name__]
2656 commands = sys.modules[__name__]
2655 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
2657 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
2656 ui.pager('help')
2658 ui.pager('help')
2657 ui.write(formatted)
2659 ui.write(formatted)
2658
2660
2659
2661
2660 @command('identify|id',
2662 @command('identify|id',
2661 [('r', 'rev', '',
2663 [('r', 'rev', '',
2662 _('identify the specified revision'), _('REV')),
2664 _('identify the specified revision'), _('REV')),
2663 ('n', 'num', None, _('show local revision number')),
2665 ('n', 'num', None, _('show local revision number')),
2664 ('i', 'id', None, _('show global revision id')),
2666 ('i', 'id', None, _('show global revision id')),
2665 ('b', 'branch', None, _('show branch')),
2667 ('b', 'branch', None, _('show branch')),
2666 ('t', 'tags', None, _('show tags')),
2668 ('t', 'tags', None, _('show tags')),
2667 ('B', 'bookmarks', None, _('show bookmarks')),
2669 ('B', 'bookmarks', None, _('show bookmarks')),
2668 ] + remoteopts,
2670 ] + remoteopts,
2669 _('[-nibtB] [-r REV] [SOURCE]'),
2671 _('[-nibtB] [-r REV] [SOURCE]'),
2670 optionalrepo=True)
2672 optionalrepo=True)
2671 def identify(ui, repo, source=None, rev=None,
2673 def identify(ui, repo, source=None, rev=None,
2672 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2674 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2673 """identify the working directory or specified revision
2675 """identify the working directory or specified revision
2674
2676
2675 Print a summary identifying the repository state at REV using one or
2677 Print a summary identifying the repository state at REV using one or
2676 two parent hash identifiers, followed by a "+" if the working
2678 two parent hash identifiers, followed by a "+" if the working
2677 directory has uncommitted changes, the branch name (if not default),
2679 directory has uncommitted changes, the branch name (if not default),
2678 a list of tags, and a list of bookmarks.
2680 a list of tags, and a list of bookmarks.
2679
2681
2680 When REV is not given, print a summary of the current state of the
2682 When REV is not given, print a summary of the current state of the
2681 repository.
2683 repository.
2682
2684
2683 Specifying a path to a repository root or Mercurial bundle will
2685 Specifying a path to a repository root or Mercurial bundle will
2684 cause lookup to operate on that repository/bundle.
2686 cause lookup to operate on that repository/bundle.
2685
2687
2686 .. container:: verbose
2688 .. container:: verbose
2687
2689
2688 Examples:
2690 Examples:
2689
2691
2690 - generate a build identifier for the working directory::
2692 - generate a build identifier for the working directory::
2691
2693
2692 hg id --id > build-id.dat
2694 hg id --id > build-id.dat
2693
2695
2694 - find the revision corresponding to a tag::
2696 - find the revision corresponding to a tag::
2695
2697
2696 hg id -n -r 1.3
2698 hg id -n -r 1.3
2697
2699
2698 - check the most recent revision of a remote repository::
2700 - check the most recent revision of a remote repository::
2699
2701
2700 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2702 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2701
2703
2702 See :hg:`log` for generating more information about specific revisions,
2704 See :hg:`log` for generating more information about specific revisions,
2703 including full hash identifiers.
2705 including full hash identifiers.
2704
2706
2705 Returns 0 if successful.
2707 Returns 0 if successful.
2706 """
2708 """
2707
2709
2708 opts = pycompat.byteskwargs(opts)
2710 opts = pycompat.byteskwargs(opts)
2709 if not repo and not source:
2711 if not repo and not source:
2710 raise error.Abort(_("there is no Mercurial repository here "
2712 raise error.Abort(_("there is no Mercurial repository here "
2711 "(.hg not found)"))
2713 "(.hg not found)"))
2712
2714
2713 if ui.debugflag:
2715 if ui.debugflag:
2714 hexfunc = hex
2716 hexfunc = hex
2715 else:
2717 else:
2716 hexfunc = short
2718 hexfunc = short
2717 default = not (num or id or branch or tags or bookmarks)
2719 default = not (num or id or branch or tags or bookmarks)
2718 output = []
2720 output = []
2719 revs = []
2721 revs = []
2720
2722
2721 if source:
2723 if source:
2722 source, branches = hg.parseurl(ui.expandpath(source))
2724 source, branches = hg.parseurl(ui.expandpath(source))
2723 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2725 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2724 repo = peer.local()
2726 repo = peer.local()
2725 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2727 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2726
2728
2727 if not repo:
2729 if not repo:
2728 if num or branch or tags:
2730 if num or branch or tags:
2729 raise error.Abort(
2731 raise error.Abort(
2730 _("can't query remote revision number, branch, or tags"))
2732 _("can't query remote revision number, branch, or tags"))
2731 if not rev and revs:
2733 if not rev and revs:
2732 rev = revs[0]
2734 rev = revs[0]
2733 if not rev:
2735 if not rev:
2734 rev = "tip"
2736 rev = "tip"
2735
2737
2736 remoterev = peer.lookup(rev)
2738 remoterev = peer.lookup(rev)
2737 if default or id:
2739 if default or id:
2738 output = [hexfunc(remoterev)]
2740 output = [hexfunc(remoterev)]
2739
2741
2740 def getbms():
2742 def getbms():
2741 bms = []
2743 bms = []
2742
2744
2743 if 'bookmarks' in peer.listkeys('namespaces'):
2745 if 'bookmarks' in peer.listkeys('namespaces'):
2744 hexremoterev = hex(remoterev)
2746 hexremoterev = hex(remoterev)
2745 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2747 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2746 if bmr == hexremoterev]
2748 if bmr == hexremoterev]
2747
2749
2748 return sorted(bms)
2750 return sorted(bms)
2749
2751
2750 if bookmarks:
2752 if bookmarks:
2751 output.extend(getbms())
2753 output.extend(getbms())
2752 elif default and not ui.quiet:
2754 elif default and not ui.quiet:
2753 # multiple bookmarks for a single parent separated by '/'
2755 # multiple bookmarks for a single parent separated by '/'
2754 bm = '/'.join(getbms())
2756 bm = '/'.join(getbms())
2755 if bm:
2757 if bm:
2756 output.append(bm)
2758 output.append(bm)
2757 else:
2759 else:
2758 ctx = scmutil.revsingle(repo, rev, None)
2760 ctx = scmutil.revsingle(repo, rev, None)
2759
2761
2760 if ctx.rev() is None:
2762 if ctx.rev() is None:
2761 ctx = repo[None]
2763 ctx = repo[None]
2762 parents = ctx.parents()
2764 parents = ctx.parents()
2763 taglist = []
2765 taglist = []
2764 for p in parents:
2766 for p in parents:
2765 taglist.extend(p.tags())
2767 taglist.extend(p.tags())
2766
2768
2767 changed = ""
2769 changed = ""
2768 if default or id or num:
2770 if default or id or num:
2769 if (any(repo.status())
2771 if (any(repo.status())
2770 or any(ctx.sub(s).dirty() for s in ctx.substate)):
2772 or any(ctx.sub(s).dirty() for s in ctx.substate)):
2771 changed = '+'
2773 changed = '+'
2772 if default or id:
2774 if default or id:
2773 output = ["%s%s" %
2775 output = ["%s%s" %
2774 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2776 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2775 if num:
2777 if num:
2776 output.append("%s%s" %
2778 output.append("%s%s" %
2777 ('+'.join(["%d" % p.rev() for p in parents]), changed))
2779 ('+'.join(["%d" % p.rev() for p in parents]), changed))
2778 else:
2780 else:
2779 if default or id:
2781 if default or id:
2780 output = [hexfunc(ctx.node())]
2782 output = [hexfunc(ctx.node())]
2781 if num:
2783 if num:
2782 output.append(pycompat.bytestr(ctx.rev()))
2784 output.append(pycompat.bytestr(ctx.rev()))
2783 taglist = ctx.tags()
2785 taglist = ctx.tags()
2784
2786
2785 if default and not ui.quiet:
2787 if default and not ui.quiet:
2786 b = ctx.branch()
2788 b = ctx.branch()
2787 if b != 'default':
2789 if b != 'default':
2788 output.append("(%s)" % b)
2790 output.append("(%s)" % b)
2789
2791
2790 # multiple tags for a single parent separated by '/'
2792 # multiple tags for a single parent separated by '/'
2791 t = '/'.join(taglist)
2793 t = '/'.join(taglist)
2792 if t:
2794 if t:
2793 output.append(t)
2795 output.append(t)
2794
2796
2795 # multiple bookmarks for a single parent separated by '/'
2797 # multiple bookmarks for a single parent separated by '/'
2796 bm = '/'.join(ctx.bookmarks())
2798 bm = '/'.join(ctx.bookmarks())
2797 if bm:
2799 if bm:
2798 output.append(bm)
2800 output.append(bm)
2799 else:
2801 else:
2800 if branch:
2802 if branch:
2801 output.append(ctx.branch())
2803 output.append(ctx.branch())
2802
2804
2803 if tags:
2805 if tags:
2804 output.extend(taglist)
2806 output.extend(taglist)
2805
2807
2806 if bookmarks:
2808 if bookmarks:
2807 output.extend(ctx.bookmarks())
2809 output.extend(ctx.bookmarks())
2808
2810
2809 ui.write("%s\n" % ' '.join(output))
2811 ui.write("%s\n" % ' '.join(output))
2810
2812
2811 @command('import|patch',
2813 @command('import|patch',
2812 [('p', 'strip', 1,
2814 [('p', 'strip', 1,
2813 _('directory strip option for patch. This has the same '
2815 _('directory strip option for patch. This has the same '
2814 'meaning as the corresponding patch option'), _('NUM')),
2816 'meaning as the corresponding patch option'), _('NUM')),
2815 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2817 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2816 ('e', 'edit', False, _('invoke editor on commit messages')),
2818 ('e', 'edit', False, _('invoke editor on commit messages')),
2817 ('f', 'force', None,
2819 ('f', 'force', None,
2818 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2820 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2819 ('', 'no-commit', None,
2821 ('', 'no-commit', None,
2820 _("don't commit, just update the working directory")),
2822 _("don't commit, just update the working directory")),
2821 ('', 'bypass', None,
2823 ('', 'bypass', None,
2822 _("apply patch without touching the working directory")),
2824 _("apply patch without touching the working directory")),
2823 ('', 'partial', None,
2825 ('', 'partial', None,
2824 _('commit even if some hunks fail')),
2826 _('commit even if some hunks fail')),
2825 ('', 'exact', None,
2827 ('', 'exact', None,
2826 _('abort if patch would apply lossily')),
2828 _('abort if patch would apply lossily')),
2827 ('', 'prefix', '',
2829 ('', 'prefix', '',
2828 _('apply patch to subdirectory'), _('DIR')),
2830 _('apply patch to subdirectory'), _('DIR')),
2829 ('', 'import-branch', None,
2831 ('', 'import-branch', None,
2830 _('use any branch information in patch (implied by --exact)'))] +
2832 _('use any branch information in patch (implied by --exact)'))] +
2831 commitopts + commitopts2 + similarityopts,
2833 commitopts + commitopts2 + similarityopts,
2832 _('[OPTION]... PATCH...'))
2834 _('[OPTION]... PATCH...'))
2833 def import_(ui, repo, patch1=None, *patches, **opts):
2835 def import_(ui, repo, patch1=None, *patches, **opts):
2834 """import an ordered set of patches
2836 """import an ordered set of patches
2835
2837
2836 Import a list of patches and commit them individually (unless
2838 Import a list of patches and commit them individually (unless
2837 --no-commit is specified).
2839 --no-commit is specified).
2838
2840
2839 To read a patch from standard input (stdin), use "-" as the patch
2841 To read a patch from standard input (stdin), use "-" as the patch
2840 name. If a URL is specified, the patch will be downloaded from
2842 name. If a URL is specified, the patch will be downloaded from
2841 there.
2843 there.
2842
2844
2843 Import first applies changes to the working directory (unless
2845 Import first applies changes to the working directory (unless
2844 --bypass is specified), import will abort if there are outstanding
2846 --bypass is specified), import will abort if there are outstanding
2845 changes.
2847 changes.
2846
2848
2847 Use --bypass to apply and commit patches directly to the
2849 Use --bypass to apply and commit patches directly to the
2848 repository, without affecting the working directory. Without
2850 repository, without affecting the working directory. Without
2849 --exact, patches will be applied on top of the working directory
2851 --exact, patches will be applied on top of the working directory
2850 parent revision.
2852 parent revision.
2851
2853
2852 You can import a patch straight from a mail message. Even patches
2854 You can import a patch straight from a mail message. Even patches
2853 as attachments work (to use the body part, it must have type
2855 as attachments work (to use the body part, it must have type
2854 text/plain or text/x-patch). From and Subject headers of email
2856 text/plain or text/x-patch). From and Subject headers of email
2855 message are used as default committer and commit message. All
2857 message are used as default committer and commit message. All
2856 text/plain body parts before first diff are added to the commit
2858 text/plain body parts before first diff are added to the commit
2857 message.
2859 message.
2858
2860
2859 If the imported patch was generated by :hg:`export`, user and
2861 If the imported patch was generated by :hg:`export`, user and
2860 description from patch override values from message headers and
2862 description from patch override values from message headers and
2861 body. Values given on command line with -m/--message and -u/--user
2863 body. Values given on command line with -m/--message and -u/--user
2862 override these.
2864 override these.
2863
2865
2864 If --exact is specified, import will set the working directory to
2866 If --exact is specified, import will set the working directory to
2865 the parent of each patch before applying it, and will abort if the
2867 the parent of each patch before applying it, and will abort if the
2866 resulting changeset has a different ID than the one recorded in
2868 resulting changeset has a different ID than the one recorded in
2867 the patch. This will guard against various ways that portable
2869 the patch. This will guard against various ways that portable
2868 patch formats and mail systems might fail to transfer Mercurial
2870 patch formats and mail systems might fail to transfer Mercurial
2869 data or metadata. See :hg:`bundle` for lossless transmission.
2871 data or metadata. See :hg:`bundle` for lossless transmission.
2870
2872
2871 Use --partial to ensure a changeset will be created from the patch
2873 Use --partial to ensure a changeset will be created from the patch
2872 even if some hunks fail to apply. Hunks that fail to apply will be
2874 even if some hunks fail to apply. Hunks that fail to apply will be
2873 written to a <target-file>.rej file. Conflicts can then be resolved
2875 written to a <target-file>.rej file. Conflicts can then be resolved
2874 by hand before :hg:`commit --amend` is run to update the created
2876 by hand before :hg:`commit --amend` is run to update the created
2875 changeset. This flag exists to let people import patches that
2877 changeset. This flag exists to let people import patches that
2876 partially apply without losing the associated metadata (author,
2878 partially apply without losing the associated metadata (author,
2877 date, description, ...).
2879 date, description, ...).
2878
2880
2879 .. note::
2881 .. note::
2880
2882
2881 When no hunks apply cleanly, :hg:`import --partial` will create
2883 When no hunks apply cleanly, :hg:`import --partial` will create
2882 an empty changeset, importing only the patch metadata.
2884 an empty changeset, importing only the patch metadata.
2883
2885
2884 With -s/--similarity, hg will attempt to discover renames and
2886 With -s/--similarity, hg will attempt to discover renames and
2885 copies in the patch in the same way as :hg:`addremove`.
2887 copies in the patch in the same way as :hg:`addremove`.
2886
2888
2887 It is possible to use external patch programs to perform the patch
2889 It is possible to use external patch programs to perform the patch
2888 by setting the ``ui.patch`` configuration option. For the default
2890 by setting the ``ui.patch`` configuration option. For the default
2889 internal tool, the fuzz can also be configured via ``patch.fuzz``.
2891 internal tool, the fuzz can also be configured via ``patch.fuzz``.
2890 See :hg:`help config` for more information about configuration
2892 See :hg:`help config` for more information about configuration
2891 files and how to use these options.
2893 files and how to use these options.
2892
2894
2893 See :hg:`help dates` for a list of formats valid for -d/--date.
2895 See :hg:`help dates` for a list of formats valid for -d/--date.
2894
2896
2895 .. container:: verbose
2897 .. container:: verbose
2896
2898
2897 Examples:
2899 Examples:
2898
2900
2899 - import a traditional patch from a website and detect renames::
2901 - import a traditional patch from a website and detect renames::
2900
2902
2901 hg import -s 80 http://example.com/bugfix.patch
2903 hg import -s 80 http://example.com/bugfix.patch
2902
2904
2903 - import a changeset from an hgweb server::
2905 - import a changeset from an hgweb server::
2904
2906
2905 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
2907 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
2906
2908
2907 - import all the patches in an Unix-style mbox::
2909 - import all the patches in an Unix-style mbox::
2908
2910
2909 hg import incoming-patches.mbox
2911 hg import incoming-patches.mbox
2910
2912
2911 - import patches from stdin::
2913 - import patches from stdin::
2912
2914
2913 hg import -
2915 hg import -
2914
2916
2915 - attempt to exactly restore an exported changeset (not always
2917 - attempt to exactly restore an exported changeset (not always
2916 possible)::
2918 possible)::
2917
2919
2918 hg import --exact proposed-fix.patch
2920 hg import --exact proposed-fix.patch
2919
2921
2920 - use an external tool to apply a patch which is too fuzzy for
2922 - use an external tool to apply a patch which is too fuzzy for
2921 the default internal tool.
2923 the default internal tool.
2922
2924
2923 hg import --config ui.patch="patch --merge" fuzzy.patch
2925 hg import --config ui.patch="patch --merge" fuzzy.patch
2924
2926
2925 - change the default fuzzing from 2 to a less strict 7
2927 - change the default fuzzing from 2 to a less strict 7
2926
2928
2927 hg import --config ui.fuzz=7 fuzz.patch
2929 hg import --config ui.fuzz=7 fuzz.patch
2928
2930
2929 Returns 0 on success, 1 on partial success (see --partial).
2931 Returns 0 on success, 1 on partial success (see --partial).
2930 """
2932 """
2931
2933
2932 opts = pycompat.byteskwargs(opts)
2934 opts = pycompat.byteskwargs(opts)
2933 if not patch1:
2935 if not patch1:
2934 raise error.Abort(_('need at least one patch to import'))
2936 raise error.Abort(_('need at least one patch to import'))
2935
2937
2936 patches = (patch1,) + patches
2938 patches = (patch1,) + patches
2937
2939
2938 date = opts.get('date')
2940 date = opts.get('date')
2939 if date:
2941 if date:
2940 opts['date'] = util.parsedate(date)
2942 opts['date'] = util.parsedate(date)
2941
2943
2942 exact = opts.get('exact')
2944 exact = opts.get('exact')
2943 update = not opts.get('bypass')
2945 update = not opts.get('bypass')
2944 if not update and opts.get('no_commit'):
2946 if not update and opts.get('no_commit'):
2945 raise error.Abort(_('cannot use --no-commit with --bypass'))
2947 raise error.Abort(_('cannot use --no-commit with --bypass'))
2946 try:
2948 try:
2947 sim = float(opts.get('similarity') or 0)
2949 sim = float(opts.get('similarity') or 0)
2948 except ValueError:
2950 except ValueError:
2949 raise error.Abort(_('similarity must be a number'))
2951 raise error.Abort(_('similarity must be a number'))
2950 if sim < 0 or sim > 100:
2952 if sim < 0 or sim > 100:
2951 raise error.Abort(_('similarity must be between 0 and 100'))
2953 raise error.Abort(_('similarity must be between 0 and 100'))
2952 if sim and not update:
2954 if sim and not update:
2953 raise error.Abort(_('cannot use --similarity with --bypass'))
2955 raise error.Abort(_('cannot use --similarity with --bypass'))
2954 if exact:
2956 if exact:
2955 if opts.get('edit'):
2957 if opts.get('edit'):
2956 raise error.Abort(_('cannot use --exact with --edit'))
2958 raise error.Abort(_('cannot use --exact with --edit'))
2957 if opts.get('prefix'):
2959 if opts.get('prefix'):
2958 raise error.Abort(_('cannot use --exact with --prefix'))
2960 raise error.Abort(_('cannot use --exact with --prefix'))
2959
2961
2960 base = opts["base"]
2962 base = opts["base"]
2961 wlock = dsguard = lock = tr = None
2963 wlock = dsguard = lock = tr = None
2962 msgs = []
2964 msgs = []
2963 ret = 0
2965 ret = 0
2964
2966
2965
2967
2966 try:
2968 try:
2967 wlock = repo.wlock()
2969 wlock = repo.wlock()
2968
2970
2969 if update:
2971 if update:
2970 cmdutil.checkunfinished(repo)
2972 cmdutil.checkunfinished(repo)
2971 if (exact or not opts.get('force')):
2973 if (exact or not opts.get('force')):
2972 cmdutil.bailifchanged(repo)
2974 cmdutil.bailifchanged(repo)
2973
2975
2974 if not opts.get('no_commit'):
2976 if not opts.get('no_commit'):
2975 lock = repo.lock()
2977 lock = repo.lock()
2976 tr = repo.transaction('import')
2978 tr = repo.transaction('import')
2977 else:
2979 else:
2978 dsguard = dirstateguard.dirstateguard(repo, 'import')
2980 dsguard = dirstateguard.dirstateguard(repo, 'import')
2979 parents = repo[None].parents()
2981 parents = repo[None].parents()
2980 for patchurl in patches:
2982 for patchurl in patches:
2981 if patchurl == '-':
2983 if patchurl == '-':
2982 ui.status(_('applying patch from stdin\n'))
2984 ui.status(_('applying patch from stdin\n'))
2983 patchfile = ui.fin
2985 patchfile = ui.fin
2984 patchurl = 'stdin' # for error message
2986 patchurl = 'stdin' # for error message
2985 else:
2987 else:
2986 patchurl = os.path.join(base, patchurl)
2988 patchurl = os.path.join(base, patchurl)
2987 ui.status(_('applying %s\n') % patchurl)
2989 ui.status(_('applying %s\n') % patchurl)
2988 patchfile = hg.openpath(ui, patchurl)
2990 patchfile = hg.openpath(ui, patchurl)
2989
2991
2990 haspatch = False
2992 haspatch = False
2991 for hunk in patch.split(patchfile):
2993 for hunk in patch.split(patchfile):
2992 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
2994 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
2993 parents, opts,
2995 parents, opts,
2994 msgs, hg.clean)
2996 msgs, hg.clean)
2995 if msg:
2997 if msg:
2996 haspatch = True
2998 haspatch = True
2997 ui.note(msg + '\n')
2999 ui.note(msg + '\n')
2998 if update or exact:
3000 if update or exact:
2999 parents = repo[None].parents()
3001 parents = repo[None].parents()
3000 else:
3002 else:
3001 parents = [repo[node]]
3003 parents = [repo[node]]
3002 if rej:
3004 if rej:
3003 ui.write_err(_("patch applied partially\n"))
3005 ui.write_err(_("patch applied partially\n"))
3004 ui.write_err(_("(fix the .rej files and run "
3006 ui.write_err(_("(fix the .rej files and run "
3005 "`hg commit --amend`)\n"))
3007 "`hg commit --amend`)\n"))
3006 ret = 1
3008 ret = 1
3007 break
3009 break
3008
3010
3009 if not haspatch:
3011 if not haspatch:
3010 raise error.Abort(_('%s: no diffs found') % patchurl)
3012 raise error.Abort(_('%s: no diffs found') % patchurl)
3011
3013
3012 if tr:
3014 if tr:
3013 tr.close()
3015 tr.close()
3014 if msgs:
3016 if msgs:
3015 repo.savecommitmessage('\n* * *\n'.join(msgs))
3017 repo.savecommitmessage('\n* * *\n'.join(msgs))
3016 if dsguard:
3018 if dsguard:
3017 dsguard.close()
3019 dsguard.close()
3018 return ret
3020 return ret
3019 finally:
3021 finally:
3020 if tr:
3022 if tr:
3021 tr.release()
3023 tr.release()
3022 release(lock, dsguard, wlock)
3024 release(lock, dsguard, wlock)
3023
3025
3024 @command('incoming|in',
3026 @command('incoming|in',
3025 [('f', 'force', None,
3027 [('f', 'force', None,
3026 _('run even if remote repository is unrelated')),
3028 _('run even if remote repository is unrelated')),
3027 ('n', 'newest-first', None, _('show newest record first')),
3029 ('n', 'newest-first', None, _('show newest record first')),
3028 ('', 'bundle', '',
3030 ('', 'bundle', '',
3029 _('file to store the bundles into'), _('FILE')),
3031 _('file to store the bundles into'), _('FILE')),
3030 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3032 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3031 ('B', 'bookmarks', False, _("compare bookmarks")),
3033 ('B', 'bookmarks', False, _("compare bookmarks")),
3032 ('b', 'branch', [],
3034 ('b', 'branch', [],
3033 _('a specific branch you would like to pull'), _('BRANCH')),
3035 _('a specific branch you would like to pull'), _('BRANCH')),
3034 ] + logopts + remoteopts + subrepoopts,
3036 ] + logopts + remoteopts + subrepoopts,
3035 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3037 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3036 def incoming(ui, repo, source="default", **opts):
3038 def incoming(ui, repo, source="default", **opts):
3037 """show new changesets found in source
3039 """show new changesets found in source
3038
3040
3039 Show new changesets found in the specified path/URL or the default
3041 Show new changesets found in the specified path/URL or the default
3040 pull location. These are the changesets that would have been pulled
3042 pull location. These are the changesets that would have been pulled
3041 if a pull at the time you issued this command.
3043 if a pull at the time you issued this command.
3042
3044
3043 See pull for valid source format details.
3045 See pull for valid source format details.
3044
3046
3045 .. container:: verbose
3047 .. container:: verbose
3046
3048
3047 With -B/--bookmarks, the result of bookmark comparison between
3049 With -B/--bookmarks, the result of bookmark comparison between
3048 local and remote repositories is displayed. With -v/--verbose,
3050 local and remote repositories is displayed. With -v/--verbose,
3049 status is also displayed for each bookmark like below::
3051 status is also displayed for each bookmark like below::
3050
3052
3051 BM1 01234567890a added
3053 BM1 01234567890a added
3052 BM2 1234567890ab advanced
3054 BM2 1234567890ab advanced
3053 BM3 234567890abc diverged
3055 BM3 234567890abc diverged
3054 BM4 34567890abcd changed
3056 BM4 34567890abcd changed
3055
3057
3056 The action taken locally when pulling depends on the
3058 The action taken locally when pulling depends on the
3057 status of each bookmark:
3059 status of each bookmark:
3058
3060
3059 :``added``: pull will create it
3061 :``added``: pull will create it
3060 :``advanced``: pull will update it
3062 :``advanced``: pull will update it
3061 :``diverged``: pull will create a divergent bookmark
3063 :``diverged``: pull will create a divergent bookmark
3062 :``changed``: result depends on remote changesets
3064 :``changed``: result depends on remote changesets
3063
3065
3064 From the point of view of pulling behavior, bookmark
3066 From the point of view of pulling behavior, bookmark
3065 existing only in the remote repository are treated as ``added``,
3067 existing only in the remote repository are treated as ``added``,
3066 even if it is in fact locally deleted.
3068 even if it is in fact locally deleted.
3067
3069
3068 .. container:: verbose
3070 .. container:: verbose
3069
3071
3070 For remote repository, using --bundle avoids downloading the
3072 For remote repository, using --bundle avoids downloading the
3071 changesets twice if the incoming is followed by a pull.
3073 changesets twice if the incoming is followed by a pull.
3072
3074
3073 Examples:
3075 Examples:
3074
3076
3075 - show incoming changes with patches and full description::
3077 - show incoming changes with patches and full description::
3076
3078
3077 hg incoming -vp
3079 hg incoming -vp
3078
3080
3079 - show incoming changes excluding merges, store a bundle::
3081 - show incoming changes excluding merges, store a bundle::
3080
3082
3081 hg in -vpM --bundle incoming.hg
3083 hg in -vpM --bundle incoming.hg
3082 hg pull incoming.hg
3084 hg pull incoming.hg
3083
3085
3084 - briefly list changes inside a bundle::
3086 - briefly list changes inside a bundle::
3085
3087
3086 hg in changes.hg -T "{desc|firstline}\\n"
3088 hg in changes.hg -T "{desc|firstline}\\n"
3087
3089
3088 Returns 0 if there are incoming changes, 1 otherwise.
3090 Returns 0 if there are incoming changes, 1 otherwise.
3089 """
3091 """
3090 opts = pycompat.byteskwargs(opts)
3092 opts = pycompat.byteskwargs(opts)
3091 if opts.get('graph'):
3093 if opts.get('graph'):
3092 cmdutil.checkunsupportedgraphflags([], opts)
3094 cmdutil.checkunsupportedgraphflags([], opts)
3093 def display(other, chlist, displayer):
3095 def display(other, chlist, displayer):
3094 revdag = cmdutil.graphrevs(other, chlist, opts)
3096 revdag = cmdutil.graphrevs(other, chlist, opts)
3095 cmdutil.displaygraph(ui, repo, revdag, displayer,
3097 cmdutil.displaygraph(ui, repo, revdag, displayer,
3096 graphmod.asciiedges)
3098 graphmod.asciiedges)
3097
3099
3098 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3100 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3099 return 0
3101 return 0
3100
3102
3101 if opts.get('bundle') and opts.get('subrepos'):
3103 if opts.get('bundle') and opts.get('subrepos'):
3102 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3104 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3103
3105
3104 if opts.get('bookmarks'):
3106 if opts.get('bookmarks'):
3105 source, branches = hg.parseurl(ui.expandpath(source),
3107 source, branches = hg.parseurl(ui.expandpath(source),
3106 opts.get('branch'))
3108 opts.get('branch'))
3107 other = hg.peer(repo, opts, source)
3109 other = hg.peer(repo, opts, source)
3108 if 'bookmarks' not in other.listkeys('namespaces'):
3110 if 'bookmarks' not in other.listkeys('namespaces'):
3109 ui.warn(_("remote doesn't support bookmarks\n"))
3111 ui.warn(_("remote doesn't support bookmarks\n"))
3110 return 0
3112 return 0
3111 ui.pager('incoming')
3113 ui.pager('incoming')
3112 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3114 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3113 return bookmarks.incoming(ui, repo, other)
3115 return bookmarks.incoming(ui, repo, other)
3114
3116
3115 repo._subtoppath = ui.expandpath(source)
3117 repo._subtoppath = ui.expandpath(source)
3116 try:
3118 try:
3117 return hg.incoming(ui, repo, source, opts)
3119 return hg.incoming(ui, repo, source, opts)
3118 finally:
3120 finally:
3119 del repo._subtoppath
3121 del repo._subtoppath
3120
3122
3121
3123
3122 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3124 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3123 norepo=True)
3125 norepo=True)
3124 def init(ui, dest=".", **opts):
3126 def init(ui, dest=".", **opts):
3125 """create a new repository in the given directory
3127 """create a new repository in the given directory
3126
3128
3127 Initialize a new repository in the given directory. If the given
3129 Initialize a new repository in the given directory. If the given
3128 directory does not exist, it will be created.
3130 directory does not exist, it will be created.
3129
3131
3130 If no directory is given, the current directory is used.
3132 If no directory is given, the current directory is used.
3131
3133
3132 It is possible to specify an ``ssh://`` URL as the destination.
3134 It is possible to specify an ``ssh://`` URL as the destination.
3133 See :hg:`help urls` for more information.
3135 See :hg:`help urls` for more information.
3134
3136
3135 Returns 0 on success.
3137 Returns 0 on success.
3136 """
3138 """
3137 opts = pycompat.byteskwargs(opts)
3139 opts = pycompat.byteskwargs(opts)
3138 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3140 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3139
3141
3140 @command('locate',
3142 @command('locate',
3141 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3143 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3142 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3144 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3143 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3145 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3144 ] + walkopts,
3146 ] + walkopts,
3145 _('[OPTION]... [PATTERN]...'))
3147 _('[OPTION]... [PATTERN]...'))
3146 def locate(ui, repo, *pats, **opts):
3148 def locate(ui, repo, *pats, **opts):
3147 """locate files matching specific patterns (DEPRECATED)
3149 """locate files matching specific patterns (DEPRECATED)
3148
3150
3149 Print files under Mercurial control in the working directory whose
3151 Print files under Mercurial control in the working directory whose
3150 names match the given patterns.
3152 names match the given patterns.
3151
3153
3152 By default, this command searches all directories in the working
3154 By default, this command searches all directories in the working
3153 directory. To search just the current directory and its
3155 directory. To search just the current directory and its
3154 subdirectories, use "--include .".
3156 subdirectories, use "--include .".
3155
3157
3156 If no patterns are given to match, this command prints the names
3158 If no patterns are given to match, this command prints the names
3157 of all files under Mercurial control in the working directory.
3159 of all files under Mercurial control in the working directory.
3158
3160
3159 If you want to feed the output of this command into the "xargs"
3161 If you want to feed the output of this command into the "xargs"
3160 command, use the -0 option to both this command and "xargs". This
3162 command, use the -0 option to both this command and "xargs". This
3161 will avoid the problem of "xargs" treating single filenames that
3163 will avoid the problem of "xargs" treating single filenames that
3162 contain whitespace as multiple filenames.
3164 contain whitespace as multiple filenames.
3163
3165
3164 See :hg:`help files` for a more versatile command.
3166 See :hg:`help files` for a more versatile command.
3165
3167
3166 Returns 0 if a match is found, 1 otherwise.
3168 Returns 0 if a match is found, 1 otherwise.
3167 """
3169 """
3168 opts = pycompat.byteskwargs(opts)
3170 opts = pycompat.byteskwargs(opts)
3169 if opts.get('print0'):
3171 if opts.get('print0'):
3170 end = '\0'
3172 end = '\0'
3171 else:
3173 else:
3172 end = '\n'
3174 end = '\n'
3173 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3175 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3174
3176
3175 ret = 1
3177 ret = 1
3176 ctx = repo[rev]
3178 ctx = repo[rev]
3177 m = scmutil.match(ctx, pats, opts, default='relglob',
3179 m = scmutil.match(ctx, pats, opts, default='relglob',
3178 badfn=lambda x, y: False)
3180 badfn=lambda x, y: False)
3179
3181
3180 ui.pager('locate')
3182 ui.pager('locate')
3181 for abs in ctx.matches(m):
3183 for abs in ctx.matches(m):
3182 if opts.get('fullpath'):
3184 if opts.get('fullpath'):
3183 ui.write(repo.wjoin(abs), end)
3185 ui.write(repo.wjoin(abs), end)
3184 else:
3186 else:
3185 ui.write(((pats and m.rel(abs)) or abs), end)
3187 ui.write(((pats and m.rel(abs)) or abs), end)
3186 ret = 0
3188 ret = 0
3187
3189
3188 return ret
3190 return ret
3189
3191
3190 @command('^log|history',
3192 @command('^log|history',
3191 [('f', 'follow', None,
3193 [('f', 'follow', None,
3192 _('follow changeset history, or file history across copies and renames')),
3194 _('follow changeset history, or file history across copies and renames')),
3193 ('', 'follow-first', None,
3195 ('', 'follow-first', None,
3194 _('only follow the first parent of merge changesets (DEPRECATED)')),
3196 _('only follow the first parent of merge changesets (DEPRECATED)')),
3195 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3197 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3196 ('C', 'copies', None, _('show copied files')),
3198 ('C', 'copies', None, _('show copied files')),
3197 ('k', 'keyword', [],
3199 ('k', 'keyword', [],
3198 _('do case-insensitive search for a given text'), _('TEXT')),
3200 _('do case-insensitive search for a given text'), _('TEXT')),
3199 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3201 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3200 ('', 'removed', None, _('include revisions where files were removed')),
3202 ('', 'removed', None, _('include revisions where files were removed')),
3201 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3203 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3202 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3204 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3203 ('', 'only-branch', [],
3205 ('', 'only-branch', [],
3204 _('show only changesets within the given named branch (DEPRECATED)'),
3206 _('show only changesets within the given named branch (DEPRECATED)'),
3205 _('BRANCH')),
3207 _('BRANCH')),
3206 ('b', 'branch', [],
3208 ('b', 'branch', [],
3207 _('show changesets within the given named branch'), _('BRANCH')),
3209 _('show changesets within the given named branch'), _('BRANCH')),
3208 ('P', 'prune', [],
3210 ('P', 'prune', [],
3209 _('do not display revision or any of its ancestors'), _('REV')),
3211 _('do not display revision or any of its ancestors'), _('REV')),
3210 ] + logopts + walkopts,
3212 ] + logopts + walkopts,
3211 _('[OPTION]... [FILE]'),
3213 _('[OPTION]... [FILE]'),
3212 inferrepo=True)
3214 inferrepo=True)
3213 def log(ui, repo, *pats, **opts):
3215 def log(ui, repo, *pats, **opts):
3214 """show revision history of entire repository or files
3216 """show revision history of entire repository or files
3215
3217
3216 Print the revision history of the specified files or the entire
3218 Print the revision history of the specified files or the entire
3217 project.
3219 project.
3218
3220
3219 If no revision range is specified, the default is ``tip:0`` unless
3221 If no revision range is specified, the default is ``tip:0`` unless
3220 --follow is set, in which case the working directory parent is
3222 --follow is set, in which case the working directory parent is
3221 used as the starting revision.
3223 used as the starting revision.
3222
3224
3223 File history is shown without following rename or copy history of
3225 File history is shown without following rename or copy history of
3224 files. Use -f/--follow with a filename to follow history across
3226 files. Use -f/--follow with a filename to follow history across
3225 renames and copies. --follow without a filename will only show
3227 renames and copies. --follow without a filename will only show
3226 ancestors or descendants of the starting revision.
3228 ancestors or descendants of the starting revision.
3227
3229
3228 By default this command prints revision number and changeset id,
3230 By default this command prints revision number and changeset id,
3229 tags, non-trivial parents, user, date and time, and a summary for
3231 tags, non-trivial parents, user, date and time, and a summary for
3230 each commit. When the -v/--verbose switch is used, the list of
3232 each commit. When the -v/--verbose switch is used, the list of
3231 changed files and full commit message are shown.
3233 changed files and full commit message are shown.
3232
3234
3233 With --graph the revisions are shown as an ASCII art DAG with the most
3235 With --graph the revisions are shown as an ASCII art DAG with the most
3234 recent changeset at the top.
3236 recent changeset at the top.
3235 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
3237 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
3236 and '+' represents a fork where the changeset from the lines below is a
3238 and '+' represents a fork where the changeset from the lines below is a
3237 parent of the 'o' merge on the same line.
3239 parent of the 'o' merge on the same line.
3238 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3240 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3239 of a '|' indicates one or more revisions in a path are omitted.
3241 of a '|' indicates one or more revisions in a path are omitted.
3240
3242
3241 .. note::
3243 .. note::
3242
3244
3243 :hg:`log --patch` may generate unexpected diff output for merge
3245 :hg:`log --patch` may generate unexpected diff output for merge
3244 changesets, as it will only compare the merge changeset against
3246 changesets, as it will only compare the merge changeset against
3245 its first parent. Also, only files different from BOTH parents
3247 its first parent. Also, only files different from BOTH parents
3246 will appear in files:.
3248 will appear in files:.
3247
3249
3248 .. note::
3250 .. note::
3249
3251
3250 For performance reasons, :hg:`log FILE` may omit duplicate changes
3252 For performance reasons, :hg:`log FILE` may omit duplicate changes
3251 made on branches and will not show removals or mode changes. To
3253 made on branches and will not show removals or mode changes. To
3252 see all such changes, use the --removed switch.
3254 see all such changes, use the --removed switch.
3253
3255
3254 .. container:: verbose
3256 .. container:: verbose
3255
3257
3256 Some examples:
3258 Some examples:
3257
3259
3258 - changesets with full descriptions and file lists::
3260 - changesets with full descriptions and file lists::
3259
3261
3260 hg log -v
3262 hg log -v
3261
3263
3262 - changesets ancestral to the working directory::
3264 - changesets ancestral to the working directory::
3263
3265
3264 hg log -f
3266 hg log -f
3265
3267
3266 - last 10 commits on the current branch::
3268 - last 10 commits on the current branch::
3267
3269
3268 hg log -l 10 -b .
3270 hg log -l 10 -b .
3269
3271
3270 - changesets showing all modifications of a file, including removals::
3272 - changesets showing all modifications of a file, including removals::
3271
3273
3272 hg log --removed file.c
3274 hg log --removed file.c
3273
3275
3274 - all changesets that touch a directory, with diffs, excluding merges::
3276 - all changesets that touch a directory, with diffs, excluding merges::
3275
3277
3276 hg log -Mp lib/
3278 hg log -Mp lib/
3277
3279
3278 - all revision numbers that match a keyword::
3280 - all revision numbers that match a keyword::
3279
3281
3280 hg log -k bug --template "{rev}\\n"
3282 hg log -k bug --template "{rev}\\n"
3281
3283
3282 - the full hash identifier of the working directory parent::
3284 - the full hash identifier of the working directory parent::
3283
3285
3284 hg log -r . --template "{node}\\n"
3286 hg log -r . --template "{node}\\n"
3285
3287
3286 - list available log templates::
3288 - list available log templates::
3287
3289
3288 hg log -T list
3290 hg log -T list
3289
3291
3290 - check if a given changeset is included in a tagged release::
3292 - check if a given changeset is included in a tagged release::
3291
3293
3292 hg log -r "a21ccf and ancestor(1.9)"
3294 hg log -r "a21ccf and ancestor(1.9)"
3293
3295
3294 - find all changesets by some user in a date range::
3296 - find all changesets by some user in a date range::
3295
3297
3296 hg log -k alice -d "may 2008 to jul 2008"
3298 hg log -k alice -d "may 2008 to jul 2008"
3297
3299
3298 - summary of all changesets after the last tag::
3300 - summary of all changesets after the last tag::
3299
3301
3300 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3302 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3301
3303
3302 See :hg:`help dates` for a list of formats valid for -d/--date.
3304 See :hg:`help dates` for a list of formats valid for -d/--date.
3303
3305
3304 See :hg:`help revisions` for more about specifying and ordering
3306 See :hg:`help revisions` for more about specifying and ordering
3305 revisions.
3307 revisions.
3306
3308
3307 See :hg:`help templates` for more about pre-packaged styles and
3309 See :hg:`help templates` for more about pre-packaged styles and
3308 specifying custom templates.
3310 specifying custom templates.
3309
3311
3310 Returns 0 on success.
3312 Returns 0 on success.
3311
3313
3312 """
3314 """
3313 opts = pycompat.byteskwargs(opts)
3315 opts = pycompat.byteskwargs(opts)
3314 if opts.get('follow') and opts.get('rev'):
3316 if opts.get('follow') and opts.get('rev'):
3315 opts['rev'] = [revsetlang.formatspec('reverse(::%lr)', opts.get('rev'))]
3317 opts['rev'] = [revsetlang.formatspec('reverse(::%lr)', opts.get('rev'))]
3316 del opts['follow']
3318 del opts['follow']
3317
3319
3318 if opts.get('graph'):
3320 if opts.get('graph'):
3319 return cmdutil.graphlog(ui, repo, pats, opts)
3321 return cmdutil.graphlog(ui, repo, pats, opts)
3320
3322
3321 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
3323 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
3322 limit = cmdutil.loglimit(opts)
3324 limit = cmdutil.loglimit(opts)
3323 count = 0
3325 count = 0
3324
3326
3325 getrenamed = None
3327 getrenamed = None
3326 if opts.get('copies'):
3328 if opts.get('copies'):
3327 endrev = None
3329 endrev = None
3328 if opts.get('rev'):
3330 if opts.get('rev'):
3329 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3331 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3330 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3332 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3331
3333
3332 ui.pager('log')
3334 ui.pager('log')
3333 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3335 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3334 for rev in revs:
3336 for rev in revs:
3335 if count == limit:
3337 if count == limit:
3336 break
3338 break
3337 ctx = repo[rev]
3339 ctx = repo[rev]
3338 copies = None
3340 copies = None
3339 if getrenamed is not None and rev:
3341 if getrenamed is not None and rev:
3340 copies = []
3342 copies = []
3341 for fn in ctx.files():
3343 for fn in ctx.files():
3342 rename = getrenamed(fn, rev)
3344 rename = getrenamed(fn, rev)
3343 if rename:
3345 if rename:
3344 copies.append((fn, rename[0]))
3346 copies.append((fn, rename[0]))
3345 if filematcher:
3347 if filematcher:
3346 revmatchfn = filematcher(ctx.rev())
3348 revmatchfn = filematcher(ctx.rev())
3347 else:
3349 else:
3348 revmatchfn = None
3350 revmatchfn = None
3349 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3351 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3350 if displayer.flush(ctx):
3352 if displayer.flush(ctx):
3351 count += 1
3353 count += 1
3352
3354
3353 displayer.close()
3355 displayer.close()
3354
3356
3355 @command('manifest',
3357 @command('manifest',
3356 [('r', 'rev', '', _('revision to display'), _('REV')),
3358 [('r', 'rev', '', _('revision to display'), _('REV')),
3357 ('', 'all', False, _("list files from all revisions"))]
3359 ('', 'all', False, _("list files from all revisions"))]
3358 + formatteropts,
3360 + formatteropts,
3359 _('[-r REV]'))
3361 _('[-r REV]'))
3360 def manifest(ui, repo, node=None, rev=None, **opts):
3362 def manifest(ui, repo, node=None, rev=None, **opts):
3361 """output the current or given revision of the project manifest
3363 """output the current or given revision of the project manifest
3362
3364
3363 Print a list of version controlled files for the given revision.
3365 Print a list of version controlled files for the given revision.
3364 If no revision is given, the first parent of the working directory
3366 If no revision is given, the first parent of the working directory
3365 is used, or the null revision if no revision is checked out.
3367 is used, or the null revision if no revision is checked out.
3366
3368
3367 With -v, print file permissions, symlink and executable bits.
3369 With -v, print file permissions, symlink and executable bits.
3368 With --debug, print file revision hashes.
3370 With --debug, print file revision hashes.
3369
3371
3370 If option --all is specified, the list of all files from all revisions
3372 If option --all is specified, the list of all files from all revisions
3371 is printed. This includes deleted and renamed files.
3373 is printed. This includes deleted and renamed files.
3372
3374
3373 Returns 0 on success.
3375 Returns 0 on success.
3374 """
3376 """
3375 opts = pycompat.byteskwargs(opts)
3377 opts = pycompat.byteskwargs(opts)
3376 fm = ui.formatter('manifest', opts)
3378 fm = ui.formatter('manifest', opts)
3377
3379
3378 if opts.get('all'):
3380 if opts.get('all'):
3379 if rev or node:
3381 if rev or node:
3380 raise error.Abort(_("can't specify a revision with --all"))
3382 raise error.Abort(_("can't specify a revision with --all"))
3381
3383
3382 res = []
3384 res = []
3383 prefix = "data/"
3385 prefix = "data/"
3384 suffix = ".i"
3386 suffix = ".i"
3385 plen = len(prefix)
3387 plen = len(prefix)
3386 slen = len(suffix)
3388 slen = len(suffix)
3387 with repo.lock():
3389 with repo.lock():
3388 for fn, b, size in repo.store.datafiles():
3390 for fn, b, size in repo.store.datafiles():
3389 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3391 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3390 res.append(fn[plen:-slen])
3392 res.append(fn[plen:-slen])
3391 ui.pager('manifest')
3393 ui.pager('manifest')
3392 for f in res:
3394 for f in res:
3393 fm.startitem()
3395 fm.startitem()
3394 fm.write("path", '%s\n', f)
3396 fm.write("path", '%s\n', f)
3395 fm.end()
3397 fm.end()
3396 return
3398 return
3397
3399
3398 if rev and node:
3400 if rev and node:
3399 raise error.Abort(_("please specify just one revision"))
3401 raise error.Abort(_("please specify just one revision"))
3400
3402
3401 if not node:
3403 if not node:
3402 node = rev
3404 node = rev
3403
3405
3404 char = {'l': '@', 'x': '*', '': ''}
3406 char = {'l': '@', 'x': '*', '': ''}
3405 mode = {'l': '644', 'x': '755', '': '644'}
3407 mode = {'l': '644', 'x': '755', '': '644'}
3406 ctx = scmutil.revsingle(repo, node)
3408 ctx = scmutil.revsingle(repo, node)
3407 mf = ctx.manifest()
3409 mf = ctx.manifest()
3408 ui.pager('manifest')
3410 ui.pager('manifest')
3409 for f in ctx:
3411 for f in ctx:
3410 fm.startitem()
3412 fm.startitem()
3411 fl = ctx[f].flags()
3413 fl = ctx[f].flags()
3412 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3414 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3413 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3415 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3414 fm.write('path', '%s\n', f)
3416 fm.write('path', '%s\n', f)
3415 fm.end()
3417 fm.end()
3416
3418
3417 @command('^merge',
3419 @command('^merge',
3418 [('f', 'force', None,
3420 [('f', 'force', None,
3419 _('force a merge including outstanding changes (DEPRECATED)')),
3421 _('force a merge including outstanding changes (DEPRECATED)')),
3420 ('r', 'rev', '', _('revision to merge'), _('REV')),
3422 ('r', 'rev', '', _('revision to merge'), _('REV')),
3421 ('P', 'preview', None,
3423 ('P', 'preview', None,
3422 _('review revisions to merge (no merge is performed)'))
3424 _('review revisions to merge (no merge is performed)'))
3423 ] + mergetoolopts,
3425 ] + mergetoolopts,
3424 _('[-P] [[-r] REV]'))
3426 _('[-P] [[-r] REV]'))
3425 def merge(ui, repo, node=None, **opts):
3427 def merge(ui, repo, node=None, **opts):
3426 """merge another revision into working directory
3428 """merge another revision into working directory
3427
3429
3428 The current working directory is updated with all changes made in
3430 The current working directory is updated with all changes made in
3429 the requested revision since the last common predecessor revision.
3431 the requested revision since the last common predecessor revision.
3430
3432
3431 Files that changed between either parent are marked as changed for
3433 Files that changed between either parent are marked as changed for
3432 the next commit and a commit must be performed before any further
3434 the next commit and a commit must be performed before any further
3433 updates to the repository are allowed. The next commit will have
3435 updates to the repository are allowed. The next commit will have
3434 two parents.
3436 two parents.
3435
3437
3436 ``--tool`` can be used to specify the merge tool used for file
3438 ``--tool`` can be used to specify the merge tool used for file
3437 merges. It overrides the HGMERGE environment variable and your
3439 merges. It overrides the HGMERGE environment variable and your
3438 configuration files. See :hg:`help merge-tools` for options.
3440 configuration files. See :hg:`help merge-tools` for options.
3439
3441
3440 If no revision is specified, the working directory's parent is a
3442 If no revision is specified, the working directory's parent is a
3441 head revision, and the current branch contains exactly one other
3443 head revision, and the current branch contains exactly one other
3442 head, the other head is merged with by default. Otherwise, an
3444 head, the other head is merged with by default. Otherwise, an
3443 explicit revision with which to merge with must be provided.
3445 explicit revision with which to merge with must be provided.
3444
3446
3445 See :hg:`help resolve` for information on handling file conflicts.
3447 See :hg:`help resolve` for information on handling file conflicts.
3446
3448
3447 To undo an uncommitted merge, use :hg:`update --clean .` which
3449 To undo an uncommitted merge, use :hg:`update --clean .` which
3448 will check out a clean copy of the original merge parent, losing
3450 will check out a clean copy of the original merge parent, losing
3449 all changes.
3451 all changes.
3450
3452
3451 Returns 0 on success, 1 if there are unresolved files.
3453 Returns 0 on success, 1 if there are unresolved files.
3452 """
3454 """
3453
3455
3454 opts = pycompat.byteskwargs(opts)
3456 opts = pycompat.byteskwargs(opts)
3455 if opts.get('rev') and node:
3457 if opts.get('rev') and node:
3456 raise error.Abort(_("please specify just one revision"))
3458 raise error.Abort(_("please specify just one revision"))
3457 if not node:
3459 if not node:
3458 node = opts.get('rev')
3460 node = opts.get('rev')
3459
3461
3460 if node:
3462 if node:
3461 node = scmutil.revsingle(repo, node).node()
3463 node = scmutil.revsingle(repo, node).node()
3462
3464
3463 if not node:
3465 if not node:
3464 node = repo[destutil.destmerge(repo)].node()
3466 node = repo[destutil.destmerge(repo)].node()
3465
3467
3466 if opts.get('preview'):
3468 if opts.get('preview'):
3467 # find nodes that are ancestors of p2 but not of p1
3469 # find nodes that are ancestors of p2 but not of p1
3468 p1 = repo.lookup('.')
3470 p1 = repo.lookup('.')
3469 p2 = repo.lookup(node)
3471 p2 = repo.lookup(node)
3470 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3472 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3471
3473
3472 displayer = cmdutil.show_changeset(ui, repo, opts)
3474 displayer = cmdutil.show_changeset(ui, repo, opts)
3473 for node in nodes:
3475 for node in nodes:
3474 displayer.show(repo[node])
3476 displayer.show(repo[node])
3475 displayer.close()
3477 displayer.close()
3476 return 0
3478 return 0
3477
3479
3478 try:
3480 try:
3479 # ui.forcemerge is an internal variable, do not document
3481 # ui.forcemerge is an internal variable, do not document
3480 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3482 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3481 force = opts.get('force')
3483 force = opts.get('force')
3482 labels = ['working copy', 'merge rev']
3484 labels = ['working copy', 'merge rev']
3483 return hg.merge(repo, node, force=force, mergeforce=force,
3485 return hg.merge(repo, node, force=force, mergeforce=force,
3484 labels=labels)
3486 labels=labels)
3485 finally:
3487 finally:
3486 ui.setconfig('ui', 'forcemerge', '', 'merge')
3488 ui.setconfig('ui', 'forcemerge', '', 'merge')
3487
3489
3488 @command('outgoing|out',
3490 @command('outgoing|out',
3489 [('f', 'force', None, _('run even when the destination is unrelated')),
3491 [('f', 'force', None, _('run even when the destination is unrelated')),
3490 ('r', 'rev', [],
3492 ('r', 'rev', [],
3491 _('a changeset intended to be included in the destination'), _('REV')),
3493 _('a changeset intended to be included in the destination'), _('REV')),
3492 ('n', 'newest-first', None, _('show newest record first')),
3494 ('n', 'newest-first', None, _('show newest record first')),
3493 ('B', 'bookmarks', False, _('compare bookmarks')),
3495 ('B', 'bookmarks', False, _('compare bookmarks')),
3494 ('b', 'branch', [], _('a specific branch you would like to push'),
3496 ('b', 'branch', [], _('a specific branch you would like to push'),
3495 _('BRANCH')),
3497 _('BRANCH')),
3496 ] + logopts + remoteopts + subrepoopts,
3498 ] + logopts + remoteopts + subrepoopts,
3497 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3499 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3498 def outgoing(ui, repo, dest=None, **opts):
3500 def outgoing(ui, repo, dest=None, **opts):
3499 """show changesets not found in the destination
3501 """show changesets not found in the destination
3500
3502
3501 Show changesets not found in the specified destination repository
3503 Show changesets not found in the specified destination repository
3502 or the default push location. These are the changesets that would
3504 or the default push location. These are the changesets that would
3503 be pushed if a push was requested.
3505 be pushed if a push was requested.
3504
3506
3505 See pull for details of valid destination formats.
3507 See pull for details of valid destination formats.
3506
3508
3507 .. container:: verbose
3509 .. container:: verbose
3508
3510
3509 With -B/--bookmarks, the result of bookmark comparison between
3511 With -B/--bookmarks, the result of bookmark comparison between
3510 local and remote repositories is displayed. With -v/--verbose,
3512 local and remote repositories is displayed. With -v/--verbose,
3511 status is also displayed for each bookmark like below::
3513 status is also displayed for each bookmark like below::
3512
3514
3513 BM1 01234567890a added
3515 BM1 01234567890a added
3514 BM2 deleted
3516 BM2 deleted
3515 BM3 234567890abc advanced
3517 BM3 234567890abc advanced
3516 BM4 34567890abcd diverged
3518 BM4 34567890abcd diverged
3517 BM5 4567890abcde changed
3519 BM5 4567890abcde changed
3518
3520
3519 The action taken when pushing depends on the
3521 The action taken when pushing depends on the
3520 status of each bookmark:
3522 status of each bookmark:
3521
3523
3522 :``added``: push with ``-B`` will create it
3524 :``added``: push with ``-B`` will create it
3523 :``deleted``: push with ``-B`` will delete it
3525 :``deleted``: push with ``-B`` will delete it
3524 :``advanced``: push will update it
3526 :``advanced``: push will update it
3525 :``diverged``: push with ``-B`` will update it
3527 :``diverged``: push with ``-B`` will update it
3526 :``changed``: push with ``-B`` will update it
3528 :``changed``: push with ``-B`` will update it
3527
3529
3528 From the point of view of pushing behavior, bookmarks
3530 From the point of view of pushing behavior, bookmarks
3529 existing only in the remote repository are treated as
3531 existing only in the remote repository are treated as
3530 ``deleted``, even if it is in fact added remotely.
3532 ``deleted``, even if it is in fact added remotely.
3531
3533
3532 Returns 0 if there are outgoing changes, 1 otherwise.
3534 Returns 0 if there are outgoing changes, 1 otherwise.
3533 """
3535 """
3534 opts = pycompat.byteskwargs(opts)
3536 opts = pycompat.byteskwargs(opts)
3535 if opts.get('graph'):
3537 if opts.get('graph'):
3536 cmdutil.checkunsupportedgraphflags([], opts)
3538 cmdutil.checkunsupportedgraphflags([], opts)
3537 o, other = hg._outgoing(ui, repo, dest, opts)
3539 o, other = hg._outgoing(ui, repo, dest, opts)
3538 if not o:
3540 if not o:
3539 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3541 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3540 return
3542 return
3541
3543
3542 revdag = cmdutil.graphrevs(repo, o, opts)
3544 revdag = cmdutil.graphrevs(repo, o, opts)
3543 ui.pager('outgoing')
3545 ui.pager('outgoing')
3544 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3546 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3545 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
3547 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
3546 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3548 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3547 return 0
3549 return 0
3548
3550
3549 if opts.get('bookmarks'):
3551 if opts.get('bookmarks'):
3550 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3552 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3551 dest, branches = hg.parseurl(dest, opts.get('branch'))
3553 dest, branches = hg.parseurl(dest, opts.get('branch'))
3552 other = hg.peer(repo, opts, dest)
3554 other = hg.peer(repo, opts, dest)
3553 if 'bookmarks' not in other.listkeys('namespaces'):
3555 if 'bookmarks' not in other.listkeys('namespaces'):
3554 ui.warn(_("remote doesn't support bookmarks\n"))
3556 ui.warn(_("remote doesn't support bookmarks\n"))
3555 return 0
3557 return 0
3556 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3558 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3557 ui.pager('outgoing')
3559 ui.pager('outgoing')
3558 return bookmarks.outgoing(ui, repo, other)
3560 return bookmarks.outgoing(ui, repo, other)
3559
3561
3560 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3562 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3561 try:
3563 try:
3562 return hg.outgoing(ui, repo, dest, opts)
3564 return hg.outgoing(ui, repo, dest, opts)
3563 finally:
3565 finally:
3564 del repo._subtoppath
3566 del repo._subtoppath
3565
3567
3566 @command('parents',
3568 @command('parents',
3567 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3569 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3568 ] + templateopts,
3570 ] + templateopts,
3569 _('[-r REV] [FILE]'),
3571 _('[-r REV] [FILE]'),
3570 inferrepo=True)
3572 inferrepo=True)
3571 def parents(ui, repo, file_=None, **opts):
3573 def parents(ui, repo, file_=None, **opts):
3572 """show the parents of the working directory or revision (DEPRECATED)
3574 """show the parents of the working directory or revision (DEPRECATED)
3573
3575
3574 Print the working directory's parent revisions. If a revision is
3576 Print the working directory's parent revisions. If a revision is
3575 given via -r/--rev, the parent of that revision will be printed.
3577 given via -r/--rev, the parent of that revision will be printed.
3576 If a file argument is given, the revision in which the file was
3578 If a file argument is given, the revision in which the file was
3577 last changed (before the working directory revision or the
3579 last changed (before the working directory revision or the
3578 argument to --rev if given) is printed.
3580 argument to --rev if given) is printed.
3579
3581
3580 This command is equivalent to::
3582 This command is equivalent to::
3581
3583
3582 hg log -r "p1()+p2()" or
3584 hg log -r "p1()+p2()" or
3583 hg log -r "p1(REV)+p2(REV)" or
3585 hg log -r "p1(REV)+p2(REV)" or
3584 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3586 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3585 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3587 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3586
3588
3587 See :hg:`summary` and :hg:`help revsets` for related information.
3589 See :hg:`summary` and :hg:`help revsets` for related information.
3588
3590
3589 Returns 0 on success.
3591 Returns 0 on success.
3590 """
3592 """
3591
3593
3592 opts = pycompat.byteskwargs(opts)
3594 opts = pycompat.byteskwargs(opts)
3593 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3595 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3594
3596
3595 if file_:
3597 if file_:
3596 m = scmutil.match(ctx, (file_,), opts)
3598 m = scmutil.match(ctx, (file_,), opts)
3597 if m.anypats() or len(m.files()) != 1:
3599 if m.anypats() or len(m.files()) != 1:
3598 raise error.Abort(_('can only specify an explicit filename'))
3600 raise error.Abort(_('can only specify an explicit filename'))
3599 file_ = m.files()[0]
3601 file_ = m.files()[0]
3600 filenodes = []
3602 filenodes = []
3601 for cp in ctx.parents():
3603 for cp in ctx.parents():
3602 if not cp:
3604 if not cp:
3603 continue
3605 continue
3604 try:
3606 try:
3605 filenodes.append(cp.filenode(file_))
3607 filenodes.append(cp.filenode(file_))
3606 except error.LookupError:
3608 except error.LookupError:
3607 pass
3609 pass
3608 if not filenodes:
3610 if not filenodes:
3609 raise error.Abort(_("'%s' not found in manifest!") % file_)
3611 raise error.Abort(_("'%s' not found in manifest!") % file_)
3610 p = []
3612 p = []
3611 for fn in filenodes:
3613 for fn in filenodes:
3612 fctx = repo.filectx(file_, fileid=fn)
3614 fctx = repo.filectx(file_, fileid=fn)
3613 p.append(fctx.node())
3615 p.append(fctx.node())
3614 else:
3616 else:
3615 p = [cp.node() for cp in ctx.parents()]
3617 p = [cp.node() for cp in ctx.parents()]
3616
3618
3617 displayer = cmdutil.show_changeset(ui, repo, opts)
3619 displayer = cmdutil.show_changeset(ui, repo, opts)
3618 for n in p:
3620 for n in p:
3619 if n != nullid:
3621 if n != nullid:
3620 displayer.show(repo[n])
3622 displayer.show(repo[n])
3621 displayer.close()
3623 displayer.close()
3622
3624
3623 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
3625 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
3624 def paths(ui, repo, search=None, **opts):
3626 def paths(ui, repo, search=None, **opts):
3625 """show aliases for remote repositories
3627 """show aliases for remote repositories
3626
3628
3627 Show definition of symbolic path name NAME. If no name is given,
3629 Show definition of symbolic path name NAME. If no name is given,
3628 show definition of all available names.
3630 show definition of all available names.
3629
3631
3630 Option -q/--quiet suppresses all output when searching for NAME
3632 Option -q/--quiet suppresses all output when searching for NAME
3631 and shows only the path names when listing all definitions.
3633 and shows only the path names when listing all definitions.
3632
3634
3633 Path names are defined in the [paths] section of your
3635 Path names are defined in the [paths] section of your
3634 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3636 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3635 repository, ``.hg/hgrc`` is used, too.
3637 repository, ``.hg/hgrc`` is used, too.
3636
3638
3637 The path names ``default`` and ``default-push`` have a special
3639 The path names ``default`` and ``default-push`` have a special
3638 meaning. When performing a push or pull operation, they are used
3640 meaning. When performing a push or pull operation, they are used
3639 as fallbacks if no location is specified on the command-line.
3641 as fallbacks if no location is specified on the command-line.
3640 When ``default-push`` is set, it will be used for push and
3642 When ``default-push`` is set, it will be used for push and
3641 ``default`` will be used for pull; otherwise ``default`` is used
3643 ``default`` will be used for pull; otherwise ``default`` is used
3642 as the fallback for both. When cloning a repository, the clone
3644 as the fallback for both. When cloning a repository, the clone
3643 source is written as ``default`` in ``.hg/hgrc``.
3645 source is written as ``default`` in ``.hg/hgrc``.
3644
3646
3645 .. note::
3647 .. note::
3646
3648
3647 ``default`` and ``default-push`` apply to all inbound (e.g.
3649 ``default`` and ``default-push`` apply to all inbound (e.g.
3648 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3650 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3649 and :hg:`bundle`) operations.
3651 and :hg:`bundle`) operations.
3650
3652
3651 See :hg:`help urls` for more information.
3653 See :hg:`help urls` for more information.
3652
3654
3653 Returns 0 on success.
3655 Returns 0 on success.
3654 """
3656 """
3655
3657
3656 opts = pycompat.byteskwargs(opts)
3658 opts = pycompat.byteskwargs(opts)
3657 ui.pager('paths')
3659 ui.pager('paths')
3658 if search:
3660 if search:
3659 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3661 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3660 if name == search]
3662 if name == search]
3661 else:
3663 else:
3662 pathitems = sorted(ui.paths.iteritems())
3664 pathitems = sorted(ui.paths.iteritems())
3663
3665
3664 fm = ui.formatter('paths', opts)
3666 fm = ui.formatter('paths', opts)
3665 if fm.isplain():
3667 if fm.isplain():
3666 hidepassword = util.hidepassword
3668 hidepassword = util.hidepassword
3667 else:
3669 else:
3668 hidepassword = str
3670 hidepassword = str
3669 if ui.quiet:
3671 if ui.quiet:
3670 namefmt = '%s\n'
3672 namefmt = '%s\n'
3671 else:
3673 else:
3672 namefmt = '%s = '
3674 namefmt = '%s = '
3673 showsubopts = not search and not ui.quiet
3675 showsubopts = not search and not ui.quiet
3674
3676
3675 for name, path in pathitems:
3677 for name, path in pathitems:
3676 fm.startitem()
3678 fm.startitem()
3677 fm.condwrite(not search, 'name', namefmt, name)
3679 fm.condwrite(not search, 'name', namefmt, name)
3678 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3680 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3679 for subopt, value in sorted(path.suboptions.items()):
3681 for subopt, value in sorted(path.suboptions.items()):
3680 assert subopt not in ('name', 'url')
3682 assert subopt not in ('name', 'url')
3681 if showsubopts:
3683 if showsubopts:
3682 fm.plain('%s:%s = ' % (name, subopt))
3684 fm.plain('%s:%s = ' % (name, subopt))
3683 fm.condwrite(showsubopts, subopt, '%s\n', value)
3685 fm.condwrite(showsubopts, subopt, '%s\n', value)
3684
3686
3685 fm.end()
3687 fm.end()
3686
3688
3687 if search and not pathitems:
3689 if search and not pathitems:
3688 if not ui.quiet:
3690 if not ui.quiet:
3689 ui.warn(_("not found!\n"))
3691 ui.warn(_("not found!\n"))
3690 return 1
3692 return 1
3691 else:
3693 else:
3692 return 0
3694 return 0
3693
3695
3694 @command('phase',
3696 @command('phase',
3695 [('p', 'public', False, _('set changeset phase to public')),
3697 [('p', 'public', False, _('set changeset phase to public')),
3696 ('d', 'draft', False, _('set changeset phase to draft')),
3698 ('d', 'draft', False, _('set changeset phase to draft')),
3697 ('s', 'secret', False, _('set changeset phase to secret')),
3699 ('s', 'secret', False, _('set changeset phase to secret')),
3698 ('f', 'force', False, _('allow to move boundary backward')),
3700 ('f', 'force', False, _('allow to move boundary backward')),
3699 ('r', 'rev', [], _('target revision'), _('REV')),
3701 ('r', 'rev', [], _('target revision'), _('REV')),
3700 ],
3702 ],
3701 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3703 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3702 def phase(ui, repo, *revs, **opts):
3704 def phase(ui, repo, *revs, **opts):
3703 """set or show the current phase name
3705 """set or show the current phase name
3704
3706
3705 With no argument, show the phase name of the current revision(s).
3707 With no argument, show the phase name of the current revision(s).
3706
3708
3707 With one of -p/--public, -d/--draft or -s/--secret, change the
3709 With one of -p/--public, -d/--draft or -s/--secret, change the
3708 phase value of the specified revisions.
3710 phase value of the specified revisions.
3709
3711
3710 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
3712 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
3711 lower phase to an higher phase. Phases are ordered as follows::
3713 lower phase to an higher phase. Phases are ordered as follows::
3712
3714
3713 public < draft < secret
3715 public < draft < secret
3714
3716
3715 Returns 0 on success, 1 if some phases could not be changed.
3717 Returns 0 on success, 1 if some phases could not be changed.
3716
3718
3717 (For more information about the phases concept, see :hg:`help phases`.)
3719 (For more information about the phases concept, see :hg:`help phases`.)
3718 """
3720 """
3719 opts = pycompat.byteskwargs(opts)
3721 opts = pycompat.byteskwargs(opts)
3720 # search for a unique phase argument
3722 # search for a unique phase argument
3721 targetphase = None
3723 targetphase = None
3722 for idx, name in enumerate(phases.phasenames):
3724 for idx, name in enumerate(phases.phasenames):
3723 if opts[name]:
3725 if opts[name]:
3724 if targetphase is not None:
3726 if targetphase is not None:
3725 raise error.Abort(_('only one phase can be specified'))
3727 raise error.Abort(_('only one phase can be specified'))
3726 targetphase = idx
3728 targetphase = idx
3727
3729
3728 # look for specified revision
3730 # look for specified revision
3729 revs = list(revs)
3731 revs = list(revs)
3730 revs.extend(opts['rev'])
3732 revs.extend(opts['rev'])
3731 if not revs:
3733 if not revs:
3732 # display both parents as the second parent phase can influence
3734 # display both parents as the second parent phase can influence
3733 # the phase of a merge commit
3735 # the phase of a merge commit
3734 revs = [c.rev() for c in repo[None].parents()]
3736 revs = [c.rev() for c in repo[None].parents()]
3735
3737
3736 revs = scmutil.revrange(repo, revs)
3738 revs = scmutil.revrange(repo, revs)
3737
3739
3738 lock = None
3740 lock = None
3739 ret = 0
3741 ret = 0
3740 if targetphase is None:
3742 if targetphase is None:
3741 # display
3743 # display
3742 for r in revs:
3744 for r in revs:
3743 ctx = repo[r]
3745 ctx = repo[r]
3744 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3746 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3745 else:
3747 else:
3746 tr = None
3748 tr = None
3747 lock = repo.lock()
3749 lock = repo.lock()
3748 try:
3750 try:
3749 tr = repo.transaction("phase")
3751 tr = repo.transaction("phase")
3750 # set phase
3752 # set phase
3751 if not revs:
3753 if not revs:
3752 raise error.Abort(_('empty revision set'))
3754 raise error.Abort(_('empty revision set'))
3753 nodes = [repo[r].node() for r in revs]
3755 nodes = [repo[r].node() for r in revs]
3754 # moving revision from public to draft may hide them
3756 # moving revision from public to draft may hide them
3755 # We have to check result on an unfiltered repository
3757 # We have to check result on an unfiltered repository
3756 unfi = repo.unfiltered()
3758 unfi = repo.unfiltered()
3757 getphase = unfi._phasecache.phase
3759 getphase = unfi._phasecache.phase
3758 olddata = [getphase(unfi, r) for r in unfi]
3760 olddata = [getphase(unfi, r) for r in unfi]
3759 phases.advanceboundary(repo, tr, targetphase, nodes)
3761 phases.advanceboundary(repo, tr, targetphase, nodes)
3760 if opts['force']:
3762 if opts['force']:
3761 phases.retractboundary(repo, tr, targetphase, nodes)
3763 phases.retractboundary(repo, tr, targetphase, nodes)
3762 tr.close()
3764 tr.close()
3763 finally:
3765 finally:
3764 if tr is not None:
3766 if tr is not None:
3765 tr.release()
3767 tr.release()
3766 lock.release()
3768 lock.release()
3767 getphase = unfi._phasecache.phase
3769 getphase = unfi._phasecache.phase
3768 newdata = [getphase(unfi, r) for r in unfi]
3770 newdata = [getphase(unfi, r) for r in unfi]
3769 changes = sum(newdata[r] != olddata[r] for r in unfi)
3771 changes = sum(newdata[r] != olddata[r] for r in unfi)
3770 cl = unfi.changelog
3772 cl = unfi.changelog
3771 rejected = [n for n in nodes
3773 rejected = [n for n in nodes
3772 if newdata[cl.rev(n)] < targetphase]
3774 if newdata[cl.rev(n)] < targetphase]
3773 if rejected:
3775 if rejected:
3774 ui.warn(_('cannot move %i changesets to a higher '
3776 ui.warn(_('cannot move %i changesets to a higher '
3775 'phase, use --force\n') % len(rejected))
3777 'phase, use --force\n') % len(rejected))
3776 ret = 1
3778 ret = 1
3777 if changes:
3779 if changes:
3778 msg = _('phase changed for %i changesets\n') % changes
3780 msg = _('phase changed for %i changesets\n') % changes
3779 if ret:
3781 if ret:
3780 ui.status(msg)
3782 ui.status(msg)
3781 else:
3783 else:
3782 ui.note(msg)
3784 ui.note(msg)
3783 else:
3785 else:
3784 ui.warn(_('no phases changed\n'))
3786 ui.warn(_('no phases changed\n'))
3785 return ret
3787 return ret
3786
3788
3787 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3789 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3788 """Run after a changegroup has been added via pull/unbundle
3790 """Run after a changegroup has been added via pull/unbundle
3789
3791
3790 This takes arguments below:
3792 This takes arguments below:
3791
3793
3792 :modheads: change of heads by pull/unbundle
3794 :modheads: change of heads by pull/unbundle
3793 :optupdate: updating working directory is needed or not
3795 :optupdate: updating working directory is needed or not
3794 :checkout: update destination revision (or None to default destination)
3796 :checkout: update destination revision (or None to default destination)
3795 :brev: a name, which might be a bookmark to be activated after updating
3797 :brev: a name, which might be a bookmark to be activated after updating
3796 """
3798 """
3797 if modheads == 0:
3799 if modheads == 0:
3798 return
3800 return
3799 if optupdate:
3801 if optupdate:
3800 try:
3802 try:
3801 return hg.updatetotally(ui, repo, checkout, brev)
3803 return hg.updatetotally(ui, repo, checkout, brev)
3802 except error.UpdateAbort as inst:
3804 except error.UpdateAbort as inst:
3803 msg = _("not updating: %s") % str(inst)
3805 msg = _("not updating: %s") % str(inst)
3804 hint = inst.hint
3806 hint = inst.hint
3805 raise error.UpdateAbort(msg, hint=hint)
3807 raise error.UpdateAbort(msg, hint=hint)
3806 if modheads > 1:
3808 if modheads > 1:
3807 currentbranchheads = len(repo.branchheads())
3809 currentbranchheads = len(repo.branchheads())
3808 if currentbranchheads == modheads:
3810 if currentbranchheads == modheads:
3809 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3811 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3810 elif currentbranchheads > 1:
3812 elif currentbranchheads > 1:
3811 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3813 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3812 "merge)\n"))
3814 "merge)\n"))
3813 else:
3815 else:
3814 ui.status(_("(run 'hg heads' to see heads)\n"))
3816 ui.status(_("(run 'hg heads' to see heads)\n"))
3815 else:
3817 else:
3816 ui.status(_("(run 'hg update' to get a working copy)\n"))
3818 ui.status(_("(run 'hg update' to get a working copy)\n"))
3817
3819
3818 @command('^pull',
3820 @command('^pull',
3819 [('u', 'update', None,
3821 [('u', 'update', None,
3820 _('update to new branch head if changesets were pulled')),
3822 _('update to new branch head if changesets were pulled')),
3821 ('f', 'force', None, _('run even when remote repository is unrelated')),
3823 ('f', 'force', None, _('run even when remote repository is unrelated')),
3822 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3824 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3823 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3825 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3824 ('b', 'branch', [], _('a specific branch you would like to pull'),
3826 ('b', 'branch', [], _('a specific branch you would like to pull'),
3825 _('BRANCH')),
3827 _('BRANCH')),
3826 ] + remoteopts,
3828 ] + remoteopts,
3827 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3829 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3828 def pull(ui, repo, source="default", **opts):
3830 def pull(ui, repo, source="default", **opts):
3829 """pull changes from the specified source
3831 """pull changes from the specified source
3830
3832
3831 Pull changes from a remote repository to a local one.
3833 Pull changes from a remote repository to a local one.
3832
3834
3833 This finds all changes from the repository at the specified path
3835 This finds all changes from the repository at the specified path
3834 or URL and adds them to a local repository (the current one unless
3836 or URL and adds them to a local repository (the current one unless
3835 -R is specified). By default, this does not update the copy of the
3837 -R is specified). By default, this does not update the copy of the
3836 project in the working directory.
3838 project in the working directory.
3837
3839
3838 Use :hg:`incoming` if you want to see what would have been added
3840 Use :hg:`incoming` if you want to see what would have been added
3839 by a pull at the time you issued this command. If you then decide
3841 by a pull at the time you issued this command. If you then decide
3840 to add those changes to the repository, you should use :hg:`pull
3842 to add those changes to the repository, you should use :hg:`pull
3841 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3843 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3842
3844
3843 If SOURCE is omitted, the 'default' path will be used.
3845 If SOURCE is omitted, the 'default' path will be used.
3844 See :hg:`help urls` for more information.
3846 See :hg:`help urls` for more information.
3845
3847
3846 Specifying bookmark as ``.`` is equivalent to specifying the active
3848 Specifying bookmark as ``.`` is equivalent to specifying the active
3847 bookmark's name.
3849 bookmark's name.
3848
3850
3849 Returns 0 on success, 1 if an update had unresolved files.
3851 Returns 0 on success, 1 if an update had unresolved files.
3850 """
3852 """
3851
3853
3852 opts = pycompat.byteskwargs(opts)
3854 opts = pycompat.byteskwargs(opts)
3853 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3855 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3854 msg = _('update destination required by configuration')
3856 msg = _('update destination required by configuration')
3855 hint = _('use hg pull followed by hg update DEST')
3857 hint = _('use hg pull followed by hg update DEST')
3856 raise error.Abort(msg, hint=hint)
3858 raise error.Abort(msg, hint=hint)
3857
3859
3858 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3860 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3859 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3861 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3860 other = hg.peer(repo, opts, source)
3862 other = hg.peer(repo, opts, source)
3861 try:
3863 try:
3862 revs, checkout = hg.addbranchrevs(repo, other, branches,
3864 revs, checkout = hg.addbranchrevs(repo, other, branches,
3863 opts.get('rev'))
3865 opts.get('rev'))
3864
3866
3865
3867
3866 pullopargs = {}
3868 pullopargs = {}
3867 if opts.get('bookmark'):
3869 if opts.get('bookmark'):
3868 if not revs:
3870 if not revs:
3869 revs = []
3871 revs = []
3870 # The list of bookmark used here is not the one used to actually
3872 # The list of bookmark used here is not the one used to actually
3871 # update the bookmark name. This can result in the revision pulled
3873 # update the bookmark name. This can result in the revision pulled
3872 # not ending up with the name of the bookmark because of a race
3874 # not ending up with the name of the bookmark because of a race
3873 # condition on the server. (See issue 4689 for details)
3875 # condition on the server. (See issue 4689 for details)
3874 remotebookmarks = other.listkeys('bookmarks')
3876 remotebookmarks = other.listkeys('bookmarks')
3875 pullopargs['remotebookmarks'] = remotebookmarks
3877 pullopargs['remotebookmarks'] = remotebookmarks
3876 for b in opts['bookmark']:
3878 for b in opts['bookmark']:
3877 b = repo._bookmarks.expandname(b)
3879 b = repo._bookmarks.expandname(b)
3878 if b not in remotebookmarks:
3880 if b not in remotebookmarks:
3879 raise error.Abort(_('remote bookmark %s not found!') % b)
3881 raise error.Abort(_('remote bookmark %s not found!') % b)
3880 revs.append(remotebookmarks[b])
3882 revs.append(remotebookmarks[b])
3881
3883
3882 if revs:
3884 if revs:
3883 try:
3885 try:
3884 # When 'rev' is a bookmark name, we cannot guarantee that it
3886 # When 'rev' is a bookmark name, we cannot guarantee that it
3885 # will be updated with that name because of a race condition
3887 # will be updated with that name because of a race condition
3886 # server side. (See issue 4689 for details)
3888 # server side. (See issue 4689 for details)
3887 oldrevs = revs
3889 oldrevs = revs
3888 revs = [] # actually, nodes
3890 revs = [] # actually, nodes
3889 for r in oldrevs:
3891 for r in oldrevs:
3890 node = other.lookup(r)
3892 node = other.lookup(r)
3891 revs.append(node)
3893 revs.append(node)
3892 if r == checkout:
3894 if r == checkout:
3893 checkout = node
3895 checkout = node
3894 except error.CapabilityError:
3896 except error.CapabilityError:
3895 err = _("other repository doesn't support revision lookup, "
3897 err = _("other repository doesn't support revision lookup, "
3896 "so a rev cannot be specified.")
3898 "so a rev cannot be specified.")
3897 raise error.Abort(err)
3899 raise error.Abort(err)
3898
3900
3899 pullopargs.update(opts.get('opargs', {}))
3901 pullopargs.update(opts.get('opargs', {}))
3900 modheads = exchange.pull(repo, other, heads=revs,
3902 modheads = exchange.pull(repo, other, heads=revs,
3901 force=opts.get('force'),
3903 force=opts.get('force'),
3902 bookmarks=opts.get('bookmark', ()),
3904 bookmarks=opts.get('bookmark', ()),
3903 opargs=pullopargs).cgresult
3905 opargs=pullopargs).cgresult
3904
3906
3905 # brev is a name, which might be a bookmark to be activated at
3907 # brev is a name, which might be a bookmark to be activated at
3906 # the end of the update. In other words, it is an explicit
3908 # the end of the update. In other words, it is an explicit
3907 # destination of the update
3909 # destination of the update
3908 brev = None
3910 brev = None
3909
3911
3910 if checkout:
3912 if checkout:
3911 checkout = str(repo.changelog.rev(checkout))
3913 checkout = str(repo.changelog.rev(checkout))
3912
3914
3913 # order below depends on implementation of
3915 # order below depends on implementation of
3914 # hg.addbranchrevs(). opts['bookmark'] is ignored,
3916 # hg.addbranchrevs(). opts['bookmark'] is ignored,
3915 # because 'checkout' is determined without it.
3917 # because 'checkout' is determined without it.
3916 if opts.get('rev'):
3918 if opts.get('rev'):
3917 brev = opts['rev'][0]
3919 brev = opts['rev'][0]
3918 elif opts.get('branch'):
3920 elif opts.get('branch'):
3919 brev = opts['branch'][0]
3921 brev = opts['branch'][0]
3920 else:
3922 else:
3921 brev = branches[0]
3923 brev = branches[0]
3922 repo._subtoppath = source
3924 repo._subtoppath = source
3923 try:
3925 try:
3924 ret = postincoming(ui, repo, modheads, opts.get('update'),
3926 ret = postincoming(ui, repo, modheads, opts.get('update'),
3925 checkout, brev)
3927 checkout, brev)
3926
3928
3927 finally:
3929 finally:
3928 del repo._subtoppath
3930 del repo._subtoppath
3929
3931
3930 finally:
3932 finally:
3931 other.close()
3933 other.close()
3932 return ret
3934 return ret
3933
3935
3934 @command('^push',
3936 @command('^push',
3935 [('f', 'force', None, _('force push')),
3937 [('f', 'force', None, _('force push')),
3936 ('r', 'rev', [],
3938 ('r', 'rev', [],
3937 _('a changeset intended to be included in the destination'),
3939 _('a changeset intended to be included in the destination'),
3938 _('REV')),
3940 _('REV')),
3939 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
3941 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
3940 ('b', 'branch', [],
3942 ('b', 'branch', [],
3941 _('a specific branch you would like to push'), _('BRANCH')),
3943 _('a specific branch you would like to push'), _('BRANCH')),
3942 ('', 'new-branch', False, _('allow pushing a new branch')),
3944 ('', 'new-branch', False, _('allow pushing a new branch')),
3943 ] + remoteopts,
3945 ] + remoteopts,
3944 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
3946 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
3945 def push(ui, repo, dest=None, **opts):
3947 def push(ui, repo, dest=None, **opts):
3946 """push changes to the specified destination
3948 """push changes to the specified destination
3947
3949
3948 Push changesets from the local repository to the specified
3950 Push changesets from the local repository to the specified
3949 destination.
3951 destination.
3950
3952
3951 This operation is symmetrical to pull: it is identical to a pull
3953 This operation is symmetrical to pull: it is identical to a pull
3952 in the destination repository from the current one.
3954 in the destination repository from the current one.
3953
3955
3954 By default, push will not allow creation of new heads at the
3956 By default, push will not allow creation of new heads at the
3955 destination, since multiple heads would make it unclear which head
3957 destination, since multiple heads would make it unclear which head
3956 to use. In this situation, it is recommended to pull and merge
3958 to use. In this situation, it is recommended to pull and merge
3957 before pushing.
3959 before pushing.
3958
3960
3959 Use --new-branch if you want to allow push to create a new named
3961 Use --new-branch if you want to allow push to create a new named
3960 branch that is not present at the destination. This allows you to
3962 branch that is not present at the destination. This allows you to
3961 only create a new branch without forcing other changes.
3963 only create a new branch without forcing other changes.
3962
3964
3963 .. note::
3965 .. note::
3964
3966
3965 Extra care should be taken with the -f/--force option,
3967 Extra care should be taken with the -f/--force option,
3966 which will push all new heads on all branches, an action which will
3968 which will push all new heads on all branches, an action which will
3967 almost always cause confusion for collaborators.
3969 almost always cause confusion for collaborators.
3968
3970
3969 If -r/--rev is used, the specified revision and all its ancestors
3971 If -r/--rev is used, the specified revision and all its ancestors
3970 will be pushed to the remote repository.
3972 will be pushed to the remote repository.
3971
3973
3972 If -B/--bookmark is used, the specified bookmarked revision, its
3974 If -B/--bookmark is used, the specified bookmarked revision, its
3973 ancestors, and the bookmark will be pushed to the remote
3975 ancestors, and the bookmark will be pushed to the remote
3974 repository. Specifying ``.`` is equivalent to specifying the active
3976 repository. Specifying ``.`` is equivalent to specifying the active
3975 bookmark's name.
3977 bookmark's name.
3976
3978
3977 Please see :hg:`help urls` for important details about ``ssh://``
3979 Please see :hg:`help urls` for important details about ``ssh://``
3978 URLs. If DESTINATION is omitted, a default path will be used.
3980 URLs. If DESTINATION is omitted, a default path will be used.
3979
3981
3980 Returns 0 if push was successful, 1 if nothing to push.
3982 Returns 0 if push was successful, 1 if nothing to push.
3981 """
3983 """
3982
3984
3983 opts = pycompat.byteskwargs(opts)
3985 opts = pycompat.byteskwargs(opts)
3984 if opts.get('bookmark'):
3986 if opts.get('bookmark'):
3985 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
3987 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
3986 for b in opts['bookmark']:
3988 for b in opts['bookmark']:
3987 # translate -B options to -r so changesets get pushed
3989 # translate -B options to -r so changesets get pushed
3988 b = repo._bookmarks.expandname(b)
3990 b = repo._bookmarks.expandname(b)
3989 if b in repo._bookmarks:
3991 if b in repo._bookmarks:
3990 opts.setdefault('rev', []).append(b)
3992 opts.setdefault('rev', []).append(b)
3991 else:
3993 else:
3992 # if we try to push a deleted bookmark, translate it to null
3994 # if we try to push a deleted bookmark, translate it to null
3993 # this lets simultaneous -r, -b options continue working
3995 # this lets simultaneous -r, -b options continue working
3994 opts.setdefault('rev', []).append("null")
3996 opts.setdefault('rev', []).append("null")
3995
3997
3996 path = ui.paths.getpath(dest, default=('default-push', 'default'))
3998 path = ui.paths.getpath(dest, default=('default-push', 'default'))
3997 if not path:
3999 if not path:
3998 raise error.Abort(_('default repository not configured!'),
4000 raise error.Abort(_('default repository not configured!'),
3999 hint=_("see 'hg help config.paths'"))
4001 hint=_("see 'hg help config.paths'"))
4000 dest = path.pushloc or path.loc
4002 dest = path.pushloc or path.loc
4001 branches = (path.branch, opts.get('branch') or [])
4003 branches = (path.branch, opts.get('branch') or [])
4002 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4004 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4003 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4005 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4004 other = hg.peer(repo, opts, dest)
4006 other = hg.peer(repo, opts, dest)
4005
4007
4006 if revs:
4008 if revs:
4007 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4009 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4008 if not revs:
4010 if not revs:
4009 raise error.Abort(_("specified revisions evaluate to an empty set"),
4011 raise error.Abort(_("specified revisions evaluate to an empty set"),
4010 hint=_("use different revision arguments"))
4012 hint=_("use different revision arguments"))
4011 elif path.pushrev:
4013 elif path.pushrev:
4012 # It doesn't make any sense to specify ancestor revisions. So limit
4014 # It doesn't make any sense to specify ancestor revisions. So limit
4013 # to DAG heads to make discovery simpler.
4015 # to DAG heads to make discovery simpler.
4014 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4016 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4015 revs = scmutil.revrange(repo, [expr])
4017 revs = scmutil.revrange(repo, [expr])
4016 revs = [repo[rev].node() for rev in revs]
4018 revs = [repo[rev].node() for rev in revs]
4017 if not revs:
4019 if not revs:
4018 raise error.Abort(_('default push revset for path evaluates to an '
4020 raise error.Abort(_('default push revset for path evaluates to an '
4019 'empty set'))
4021 'empty set'))
4020
4022
4021 repo._subtoppath = dest
4023 repo._subtoppath = dest
4022 try:
4024 try:
4023 # push subrepos depth-first for coherent ordering
4025 # push subrepos depth-first for coherent ordering
4024 c = repo['']
4026 c = repo['']
4025 subs = c.substate # only repos that are committed
4027 subs = c.substate # only repos that are committed
4026 for s in sorted(subs):
4028 for s in sorted(subs):
4027 result = c.sub(s).push(opts)
4029 result = c.sub(s).push(opts)
4028 if result == 0:
4030 if result == 0:
4029 return not result
4031 return not result
4030 finally:
4032 finally:
4031 del repo._subtoppath
4033 del repo._subtoppath
4032 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4034 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4033 newbranch=opts.get('new_branch'),
4035 newbranch=opts.get('new_branch'),
4034 bookmarks=opts.get('bookmark', ()),
4036 bookmarks=opts.get('bookmark', ()),
4035 opargs=opts.get('opargs'))
4037 opargs=opts.get('opargs'))
4036
4038
4037 result = not pushop.cgresult
4039 result = not pushop.cgresult
4038
4040
4039 if pushop.bkresult is not None:
4041 if pushop.bkresult is not None:
4040 if pushop.bkresult == 2:
4042 if pushop.bkresult == 2:
4041 result = 2
4043 result = 2
4042 elif not result and pushop.bkresult:
4044 elif not result and pushop.bkresult:
4043 result = 2
4045 result = 2
4044
4046
4045 return result
4047 return result
4046
4048
4047 @command('recover', [])
4049 @command('recover', [])
4048 def recover(ui, repo):
4050 def recover(ui, repo):
4049 """roll back an interrupted transaction
4051 """roll back an interrupted transaction
4050
4052
4051 Recover from an interrupted commit or pull.
4053 Recover from an interrupted commit or pull.
4052
4054
4053 This command tries to fix the repository status after an
4055 This command tries to fix the repository status after an
4054 interrupted operation. It should only be necessary when Mercurial
4056 interrupted operation. It should only be necessary when Mercurial
4055 suggests it.
4057 suggests it.
4056
4058
4057 Returns 0 if successful, 1 if nothing to recover or verify fails.
4059 Returns 0 if successful, 1 if nothing to recover or verify fails.
4058 """
4060 """
4059 if repo.recover():
4061 if repo.recover():
4060 return hg.verify(repo)
4062 return hg.verify(repo)
4061 return 1
4063 return 1
4062
4064
4063 @command('^remove|rm',
4065 @command('^remove|rm',
4064 [('A', 'after', None, _('record delete for missing files')),
4066 [('A', 'after', None, _('record delete for missing files')),
4065 ('f', 'force', None,
4067 ('f', 'force', None,
4066 _('forget added files, delete modified files')),
4068 _('forget added files, delete modified files')),
4067 ] + subrepoopts + walkopts,
4069 ] + subrepoopts + walkopts,
4068 _('[OPTION]... FILE...'),
4070 _('[OPTION]... FILE...'),
4069 inferrepo=True)
4071 inferrepo=True)
4070 def remove(ui, repo, *pats, **opts):
4072 def remove(ui, repo, *pats, **opts):
4071 """remove the specified files on the next commit
4073 """remove the specified files on the next commit
4072
4074
4073 Schedule the indicated files for removal from the current branch.
4075 Schedule the indicated files for removal from the current branch.
4074
4076
4075 This command schedules the files to be removed at the next commit.
4077 This command schedules the files to be removed at the next commit.
4076 To undo a remove before that, see :hg:`revert`. To undo added
4078 To undo a remove before that, see :hg:`revert`. To undo added
4077 files, see :hg:`forget`.
4079 files, see :hg:`forget`.
4078
4080
4079 .. container:: verbose
4081 .. container:: verbose
4080
4082
4081 -A/--after can be used to remove only files that have already
4083 -A/--after can be used to remove only files that have already
4082 been deleted, -f/--force can be used to force deletion, and -Af
4084 been deleted, -f/--force can be used to force deletion, and -Af
4083 can be used to remove files from the next revision without
4085 can be used to remove files from the next revision without
4084 deleting them from the working directory.
4086 deleting them from the working directory.
4085
4087
4086 The following table details the behavior of remove for different
4088 The following table details the behavior of remove for different
4087 file states (columns) and option combinations (rows). The file
4089 file states (columns) and option combinations (rows). The file
4088 states are Added [A], Clean [C], Modified [M] and Missing [!]
4090 states are Added [A], Clean [C], Modified [M] and Missing [!]
4089 (as reported by :hg:`status`). The actions are Warn, Remove
4091 (as reported by :hg:`status`). The actions are Warn, Remove
4090 (from branch) and Delete (from disk):
4092 (from branch) and Delete (from disk):
4091
4093
4092 ========= == == == ==
4094 ========= == == == ==
4093 opt/state A C M !
4095 opt/state A C M !
4094 ========= == == == ==
4096 ========= == == == ==
4095 none W RD W R
4097 none W RD W R
4096 -f R RD RD R
4098 -f R RD RD R
4097 -A W W W R
4099 -A W W W R
4098 -Af R R R R
4100 -Af R R R R
4099 ========= == == == ==
4101 ========= == == == ==
4100
4102
4101 .. note::
4103 .. note::
4102
4104
4103 :hg:`remove` never deletes files in Added [A] state from the
4105 :hg:`remove` never deletes files in Added [A] state from the
4104 working directory, not even if ``--force`` is specified.
4106 working directory, not even if ``--force`` is specified.
4105
4107
4106 Returns 0 on success, 1 if any warnings encountered.
4108 Returns 0 on success, 1 if any warnings encountered.
4107 """
4109 """
4108
4110
4109 opts = pycompat.byteskwargs(opts)
4111 opts = pycompat.byteskwargs(opts)
4110 after, force = opts.get('after'), opts.get('force')
4112 after, force = opts.get('after'), opts.get('force')
4111 if not pats and not after:
4113 if not pats and not after:
4112 raise error.Abort(_('no files specified'))
4114 raise error.Abort(_('no files specified'))
4113
4115
4114 m = scmutil.match(repo[None], pats, opts)
4116 m = scmutil.match(repo[None], pats, opts)
4115 subrepos = opts.get('subrepos')
4117 subrepos = opts.get('subrepos')
4116 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
4118 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
4117
4119
4118 @command('rename|move|mv',
4120 @command('rename|move|mv',
4119 [('A', 'after', None, _('record a rename that has already occurred')),
4121 [('A', 'after', None, _('record a rename that has already occurred')),
4120 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4122 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4121 ] + walkopts + dryrunopts,
4123 ] + walkopts + dryrunopts,
4122 _('[OPTION]... SOURCE... DEST'))
4124 _('[OPTION]... SOURCE... DEST'))
4123 def rename(ui, repo, *pats, **opts):
4125 def rename(ui, repo, *pats, **opts):
4124 """rename files; equivalent of copy + remove
4126 """rename files; equivalent of copy + remove
4125
4127
4126 Mark dest as copies of sources; mark sources for deletion. If dest
4128 Mark dest as copies of sources; mark sources for deletion. If dest
4127 is a directory, copies are put in that directory. If dest is a
4129 is a directory, copies are put in that directory. If dest is a
4128 file, there can only be one source.
4130 file, there can only be one source.
4129
4131
4130 By default, this command copies the contents of files as they
4132 By default, this command copies the contents of files as they
4131 exist in the working directory. If invoked with -A/--after, the
4133 exist in the working directory. If invoked with -A/--after, the
4132 operation is recorded, but no copying is performed.
4134 operation is recorded, but no copying is performed.
4133
4135
4134 This command takes effect at the next commit. To undo a rename
4136 This command takes effect at the next commit. To undo a rename
4135 before that, see :hg:`revert`.
4137 before that, see :hg:`revert`.
4136
4138
4137 Returns 0 on success, 1 if errors are encountered.
4139 Returns 0 on success, 1 if errors are encountered.
4138 """
4140 """
4139 opts = pycompat.byteskwargs(opts)
4141 opts = pycompat.byteskwargs(opts)
4140 with repo.wlock(False):
4142 with repo.wlock(False):
4141 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4143 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4142
4144
4143 @command('resolve',
4145 @command('resolve',
4144 [('a', 'all', None, _('select all unresolved files')),
4146 [('a', 'all', None, _('select all unresolved files')),
4145 ('l', 'list', None, _('list state of files needing merge')),
4147 ('l', 'list', None, _('list state of files needing merge')),
4146 ('m', 'mark', None, _('mark files as resolved')),
4148 ('m', 'mark', None, _('mark files as resolved')),
4147 ('u', 'unmark', None, _('mark files as unresolved')),
4149 ('u', 'unmark', None, _('mark files as unresolved')),
4148 ('n', 'no-status', None, _('hide status prefix'))]
4150 ('n', 'no-status', None, _('hide status prefix'))]
4149 + mergetoolopts + walkopts + formatteropts,
4151 + mergetoolopts + walkopts + formatteropts,
4150 _('[OPTION]... [FILE]...'),
4152 _('[OPTION]... [FILE]...'),
4151 inferrepo=True)
4153 inferrepo=True)
4152 def resolve(ui, repo, *pats, **opts):
4154 def resolve(ui, repo, *pats, **opts):
4153 """redo merges or set/view the merge status of files
4155 """redo merges or set/view the merge status of files
4154
4156
4155 Merges with unresolved conflicts are often the result of
4157 Merges with unresolved conflicts are often the result of
4156 non-interactive merging using the ``internal:merge`` configuration
4158 non-interactive merging using the ``internal:merge`` configuration
4157 setting, or a command-line merge tool like ``diff3``. The resolve
4159 setting, or a command-line merge tool like ``diff3``. The resolve
4158 command is used to manage the files involved in a merge, after
4160 command is used to manage the files involved in a merge, after
4159 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4161 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4160 working directory must have two parents). See :hg:`help
4162 working directory must have two parents). See :hg:`help
4161 merge-tools` for information on configuring merge tools.
4163 merge-tools` for information on configuring merge tools.
4162
4164
4163 The resolve command can be used in the following ways:
4165 The resolve command can be used in the following ways:
4164
4166
4165 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4167 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4166 files, discarding any previous merge attempts. Re-merging is not
4168 files, discarding any previous merge attempts. Re-merging is not
4167 performed for files already marked as resolved. Use ``--all/-a``
4169 performed for files already marked as resolved. Use ``--all/-a``
4168 to select all unresolved files. ``--tool`` can be used to specify
4170 to select all unresolved files. ``--tool`` can be used to specify
4169 the merge tool used for the given files. It overrides the HGMERGE
4171 the merge tool used for the given files. It overrides the HGMERGE
4170 environment variable and your configuration files. Previous file
4172 environment variable and your configuration files. Previous file
4171 contents are saved with a ``.orig`` suffix.
4173 contents are saved with a ``.orig`` suffix.
4172
4174
4173 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4175 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4174 (e.g. after having manually fixed-up the files). The default is
4176 (e.g. after having manually fixed-up the files). The default is
4175 to mark all unresolved files.
4177 to mark all unresolved files.
4176
4178
4177 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4179 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4178 default is to mark all resolved files.
4180 default is to mark all resolved files.
4179
4181
4180 - :hg:`resolve -l`: list files which had or still have conflicts.
4182 - :hg:`resolve -l`: list files which had or still have conflicts.
4181 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4183 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4182 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4184 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4183 the list. See :hg:`help filesets` for details.
4185 the list. See :hg:`help filesets` for details.
4184
4186
4185 .. note::
4187 .. note::
4186
4188
4187 Mercurial will not let you commit files with unresolved merge
4189 Mercurial will not let you commit files with unresolved merge
4188 conflicts. You must use :hg:`resolve -m ...` before you can
4190 conflicts. You must use :hg:`resolve -m ...` before you can
4189 commit after a conflicting merge.
4191 commit after a conflicting merge.
4190
4192
4191 Returns 0 on success, 1 if any files fail a resolve attempt.
4193 Returns 0 on success, 1 if any files fail a resolve attempt.
4192 """
4194 """
4193
4195
4194 opts = pycompat.byteskwargs(opts)
4196 opts = pycompat.byteskwargs(opts)
4195 flaglist = 'all mark unmark list no_status'.split()
4197 flaglist = 'all mark unmark list no_status'.split()
4196 all, mark, unmark, show, nostatus = \
4198 all, mark, unmark, show, nostatus = \
4197 [opts.get(o) for o in flaglist]
4199 [opts.get(o) for o in flaglist]
4198
4200
4199 if (show and (mark or unmark)) or (mark and unmark):
4201 if (show and (mark or unmark)) or (mark and unmark):
4200 raise error.Abort(_("too many options specified"))
4202 raise error.Abort(_("too many options specified"))
4201 if pats and all:
4203 if pats and all:
4202 raise error.Abort(_("can't specify --all and patterns"))
4204 raise error.Abort(_("can't specify --all and patterns"))
4203 if not (all or pats or show or mark or unmark):
4205 if not (all or pats or show or mark or unmark):
4204 raise error.Abort(_('no files or directories specified'),
4206 raise error.Abort(_('no files or directories specified'),
4205 hint=('use --all to re-merge all unresolved files'))
4207 hint=('use --all to re-merge all unresolved files'))
4206
4208
4207 if show:
4209 if show:
4208 ui.pager('resolve')
4210 ui.pager('resolve')
4209 fm = ui.formatter('resolve', opts)
4211 fm = ui.formatter('resolve', opts)
4210 ms = mergemod.mergestate.read(repo)
4212 ms = mergemod.mergestate.read(repo)
4211 m = scmutil.match(repo[None], pats, opts)
4213 m = scmutil.match(repo[None], pats, opts)
4212 for f in ms:
4214 for f in ms:
4213 if not m(f):
4215 if not m(f):
4214 continue
4216 continue
4215 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
4217 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
4216 'd': 'driverresolved'}[ms[f]]
4218 'd': 'driverresolved'}[ms[f]]
4217 fm.startitem()
4219 fm.startitem()
4218 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
4220 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
4219 fm.write('path', '%s\n', f, label=l)
4221 fm.write('path', '%s\n', f, label=l)
4220 fm.end()
4222 fm.end()
4221 return 0
4223 return 0
4222
4224
4223 with repo.wlock():
4225 with repo.wlock():
4224 ms = mergemod.mergestate.read(repo)
4226 ms = mergemod.mergestate.read(repo)
4225
4227
4226 if not (ms.active() or repo.dirstate.p2() != nullid):
4228 if not (ms.active() or repo.dirstate.p2() != nullid):
4227 raise error.Abort(
4229 raise error.Abort(
4228 _('resolve command not applicable when not merging'))
4230 _('resolve command not applicable when not merging'))
4229
4231
4230 wctx = repo[None]
4232 wctx = repo[None]
4231
4233
4232 if ms.mergedriver and ms.mdstate() == 'u':
4234 if ms.mergedriver and ms.mdstate() == 'u':
4233 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4235 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4234 ms.commit()
4236 ms.commit()
4235 # allow mark and unmark to go through
4237 # allow mark and unmark to go through
4236 if not mark and not unmark and not proceed:
4238 if not mark and not unmark and not proceed:
4237 return 1
4239 return 1
4238
4240
4239 m = scmutil.match(wctx, pats, opts)
4241 m = scmutil.match(wctx, pats, opts)
4240 ret = 0
4242 ret = 0
4241 didwork = False
4243 didwork = False
4242 runconclude = False
4244 runconclude = False
4243
4245
4244 tocomplete = []
4246 tocomplete = []
4245 for f in ms:
4247 for f in ms:
4246 if not m(f):
4248 if not m(f):
4247 continue
4249 continue
4248
4250
4249 didwork = True
4251 didwork = True
4250
4252
4251 # don't let driver-resolved files be marked, and run the conclude
4253 # don't let driver-resolved files be marked, and run the conclude
4252 # step if asked to resolve
4254 # step if asked to resolve
4253 if ms[f] == "d":
4255 if ms[f] == "d":
4254 exact = m.exact(f)
4256 exact = m.exact(f)
4255 if mark:
4257 if mark:
4256 if exact:
4258 if exact:
4257 ui.warn(_('not marking %s as it is driver-resolved\n')
4259 ui.warn(_('not marking %s as it is driver-resolved\n')
4258 % f)
4260 % f)
4259 elif unmark:
4261 elif unmark:
4260 if exact:
4262 if exact:
4261 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4263 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4262 % f)
4264 % f)
4263 else:
4265 else:
4264 runconclude = True
4266 runconclude = True
4265 continue
4267 continue
4266
4268
4267 if mark:
4269 if mark:
4268 ms.mark(f, "r")
4270 ms.mark(f, "r")
4269 elif unmark:
4271 elif unmark:
4270 ms.mark(f, "u")
4272 ms.mark(f, "u")
4271 else:
4273 else:
4272 # backup pre-resolve (merge uses .orig for its own purposes)
4274 # backup pre-resolve (merge uses .orig for its own purposes)
4273 a = repo.wjoin(f)
4275 a = repo.wjoin(f)
4274 try:
4276 try:
4275 util.copyfile(a, a + ".resolve")
4277 util.copyfile(a, a + ".resolve")
4276 except (IOError, OSError) as inst:
4278 except (IOError, OSError) as inst:
4277 if inst.errno != errno.ENOENT:
4279 if inst.errno != errno.ENOENT:
4278 raise
4280 raise
4279
4281
4280 try:
4282 try:
4281 # preresolve file
4283 # preresolve file
4282 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4284 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4283 'resolve')
4285 'resolve')
4284 complete, r = ms.preresolve(f, wctx)
4286 complete, r = ms.preresolve(f, wctx)
4285 if not complete:
4287 if not complete:
4286 tocomplete.append(f)
4288 tocomplete.append(f)
4287 elif r:
4289 elif r:
4288 ret = 1
4290 ret = 1
4289 finally:
4291 finally:
4290 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4292 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4291 ms.commit()
4293 ms.commit()
4292
4294
4293 # replace filemerge's .orig file with our resolve file, but only
4295 # replace filemerge's .orig file with our resolve file, but only
4294 # for merges that are complete
4296 # for merges that are complete
4295 if complete:
4297 if complete:
4296 try:
4298 try:
4297 util.rename(a + ".resolve",
4299 util.rename(a + ".resolve",
4298 scmutil.origpath(ui, repo, a))
4300 scmutil.origpath(ui, repo, a))
4299 except OSError as inst:
4301 except OSError as inst:
4300 if inst.errno != errno.ENOENT:
4302 if inst.errno != errno.ENOENT:
4301 raise
4303 raise
4302
4304
4303 for f in tocomplete:
4305 for f in tocomplete:
4304 try:
4306 try:
4305 # resolve file
4307 # resolve file
4306 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4308 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4307 'resolve')
4309 'resolve')
4308 r = ms.resolve(f, wctx)
4310 r = ms.resolve(f, wctx)
4309 if r:
4311 if r:
4310 ret = 1
4312 ret = 1
4311 finally:
4313 finally:
4312 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4314 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4313 ms.commit()
4315 ms.commit()
4314
4316
4315 # replace filemerge's .orig file with our resolve file
4317 # replace filemerge's .orig file with our resolve file
4316 a = repo.wjoin(f)
4318 a = repo.wjoin(f)
4317 try:
4319 try:
4318 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4320 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4319 except OSError as inst:
4321 except OSError as inst:
4320 if inst.errno != errno.ENOENT:
4322 if inst.errno != errno.ENOENT:
4321 raise
4323 raise
4322
4324
4323 ms.commit()
4325 ms.commit()
4324 ms.recordactions()
4326 ms.recordactions()
4325
4327
4326 if not didwork and pats:
4328 if not didwork and pats:
4327 hint = None
4329 hint = None
4328 if not any([p for p in pats if p.find(':') >= 0]):
4330 if not any([p for p in pats if p.find(':') >= 0]):
4329 pats = ['path:%s' % p for p in pats]
4331 pats = ['path:%s' % p for p in pats]
4330 m = scmutil.match(wctx, pats, opts)
4332 m = scmutil.match(wctx, pats, opts)
4331 for f in ms:
4333 for f in ms:
4332 if not m(f):
4334 if not m(f):
4333 continue
4335 continue
4334 flags = ''.join(['-%s ' % o[0] for o in flaglist
4336 flags = ''.join(['-%s ' % o[0] for o in flaglist
4335 if opts.get(o)])
4337 if opts.get(o)])
4336 hint = _("(try: hg resolve %s%s)\n") % (
4338 hint = _("(try: hg resolve %s%s)\n") % (
4337 flags,
4339 flags,
4338 ' '.join(pats))
4340 ' '.join(pats))
4339 break
4341 break
4340 ui.warn(_("arguments do not match paths that need resolving\n"))
4342 ui.warn(_("arguments do not match paths that need resolving\n"))
4341 if hint:
4343 if hint:
4342 ui.warn(hint)
4344 ui.warn(hint)
4343 elif ms.mergedriver and ms.mdstate() != 's':
4345 elif ms.mergedriver and ms.mdstate() != 's':
4344 # run conclude step when either a driver-resolved file is requested
4346 # run conclude step when either a driver-resolved file is requested
4345 # or there are no driver-resolved files
4347 # or there are no driver-resolved files
4346 # we can't use 'ret' to determine whether any files are unresolved
4348 # we can't use 'ret' to determine whether any files are unresolved
4347 # because we might not have tried to resolve some
4349 # because we might not have tried to resolve some
4348 if ((runconclude or not list(ms.driverresolved()))
4350 if ((runconclude or not list(ms.driverresolved()))
4349 and not list(ms.unresolved())):
4351 and not list(ms.unresolved())):
4350 proceed = mergemod.driverconclude(repo, ms, wctx)
4352 proceed = mergemod.driverconclude(repo, ms, wctx)
4351 ms.commit()
4353 ms.commit()
4352 if not proceed:
4354 if not proceed:
4353 return 1
4355 return 1
4354
4356
4355 # Nudge users into finishing an unfinished operation
4357 # Nudge users into finishing an unfinished operation
4356 unresolvedf = list(ms.unresolved())
4358 unresolvedf = list(ms.unresolved())
4357 driverresolvedf = list(ms.driverresolved())
4359 driverresolvedf = list(ms.driverresolved())
4358 if not unresolvedf and not driverresolvedf:
4360 if not unresolvedf and not driverresolvedf:
4359 ui.status(_('(no more unresolved files)\n'))
4361 ui.status(_('(no more unresolved files)\n'))
4360 cmdutil.checkafterresolved(repo)
4362 cmdutil.checkafterresolved(repo)
4361 elif not unresolvedf:
4363 elif not unresolvedf:
4362 ui.status(_('(no more unresolved files -- '
4364 ui.status(_('(no more unresolved files -- '
4363 'run "hg resolve --all" to conclude)\n'))
4365 'run "hg resolve --all" to conclude)\n'))
4364
4366
4365 return ret
4367 return ret
4366
4368
4367 @command('revert',
4369 @command('revert',
4368 [('a', 'all', None, _('revert all changes when no arguments given')),
4370 [('a', 'all', None, _('revert all changes when no arguments given')),
4369 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4371 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4370 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4372 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4371 ('C', 'no-backup', None, _('do not save backup copies of files')),
4373 ('C', 'no-backup', None, _('do not save backup copies of files')),
4372 ('i', 'interactive', None,
4374 ('i', 'interactive', None,
4373 _('interactively select the changes (EXPERIMENTAL)')),
4375 _('interactively select the changes (EXPERIMENTAL)')),
4374 ] + walkopts + dryrunopts,
4376 ] + walkopts + dryrunopts,
4375 _('[OPTION]... [-r REV] [NAME]...'))
4377 _('[OPTION]... [-r REV] [NAME]...'))
4376 def revert(ui, repo, *pats, **opts):
4378 def revert(ui, repo, *pats, **opts):
4377 """restore files to their checkout state
4379 """restore files to their checkout state
4378
4380
4379 .. note::
4381 .. note::
4380
4382
4381 To check out earlier revisions, you should use :hg:`update REV`.
4383 To check out earlier revisions, you should use :hg:`update REV`.
4382 To cancel an uncommitted merge (and lose your changes),
4384 To cancel an uncommitted merge (and lose your changes),
4383 use :hg:`update --clean .`.
4385 use :hg:`update --clean .`.
4384
4386
4385 With no revision specified, revert the specified files or directories
4387 With no revision specified, revert the specified files or directories
4386 to the contents they had in the parent of the working directory.
4388 to the contents they had in the parent of the working directory.
4387 This restores the contents of files to an unmodified
4389 This restores the contents of files to an unmodified
4388 state and unschedules adds, removes, copies, and renames. If the
4390 state and unschedules adds, removes, copies, and renames. If the
4389 working directory has two parents, you must explicitly specify a
4391 working directory has two parents, you must explicitly specify a
4390 revision.
4392 revision.
4391
4393
4392 Using the -r/--rev or -d/--date options, revert the given files or
4394 Using the -r/--rev or -d/--date options, revert the given files or
4393 directories to their states as of a specific revision. Because
4395 directories to their states as of a specific revision. Because
4394 revert does not change the working directory parents, this will
4396 revert does not change the working directory parents, this will
4395 cause these files to appear modified. This can be helpful to "back
4397 cause these files to appear modified. This can be helpful to "back
4396 out" some or all of an earlier change. See :hg:`backout` for a
4398 out" some or all of an earlier change. See :hg:`backout` for a
4397 related method.
4399 related method.
4398
4400
4399 Modified files are saved with a .orig suffix before reverting.
4401 Modified files are saved with a .orig suffix before reverting.
4400 To disable these backups, use --no-backup. It is possible to store
4402 To disable these backups, use --no-backup. It is possible to store
4401 the backup files in a custom directory relative to the root of the
4403 the backup files in a custom directory relative to the root of the
4402 repository by setting the ``ui.origbackuppath`` configuration
4404 repository by setting the ``ui.origbackuppath`` configuration
4403 option.
4405 option.
4404
4406
4405 See :hg:`help dates` for a list of formats valid for -d/--date.
4407 See :hg:`help dates` for a list of formats valid for -d/--date.
4406
4408
4407 See :hg:`help backout` for a way to reverse the effect of an
4409 See :hg:`help backout` for a way to reverse the effect of an
4408 earlier changeset.
4410 earlier changeset.
4409
4411
4410 Returns 0 on success.
4412 Returns 0 on success.
4411 """
4413 """
4412
4414
4413 if opts.get("date"):
4415 if opts.get("date"):
4414 if opts.get("rev"):
4416 if opts.get("rev"):
4415 raise error.Abort(_("you can't specify a revision and a date"))
4417 raise error.Abort(_("you can't specify a revision and a date"))
4416 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4418 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4417
4419
4418 parent, p2 = repo.dirstate.parents()
4420 parent, p2 = repo.dirstate.parents()
4419 if not opts.get('rev') and p2 != nullid:
4421 if not opts.get('rev') and p2 != nullid:
4420 # revert after merge is a trap for new users (issue2915)
4422 # revert after merge is a trap for new users (issue2915)
4421 raise error.Abort(_('uncommitted merge with no revision specified'),
4423 raise error.Abort(_('uncommitted merge with no revision specified'),
4422 hint=_("use 'hg update' or see 'hg help revert'"))
4424 hint=_("use 'hg update' or see 'hg help revert'"))
4423
4425
4424 ctx = scmutil.revsingle(repo, opts.get('rev'))
4426 ctx = scmutil.revsingle(repo, opts.get('rev'))
4425
4427
4426 if (not (pats or opts.get('include') or opts.get('exclude') or
4428 if (not (pats or opts.get('include') or opts.get('exclude') or
4427 opts.get('all') or opts.get('interactive'))):
4429 opts.get('all') or opts.get('interactive'))):
4428 msg = _("no files or directories specified")
4430 msg = _("no files or directories specified")
4429 if p2 != nullid:
4431 if p2 != nullid:
4430 hint = _("uncommitted merge, use --all to discard all changes,"
4432 hint = _("uncommitted merge, use --all to discard all changes,"
4431 " or 'hg update -C .' to abort the merge")
4433 " or 'hg update -C .' to abort the merge")
4432 raise error.Abort(msg, hint=hint)
4434 raise error.Abort(msg, hint=hint)
4433 dirty = any(repo.status())
4435 dirty = any(repo.status())
4434 node = ctx.node()
4436 node = ctx.node()
4435 if node != parent:
4437 if node != parent:
4436 if dirty:
4438 if dirty:
4437 hint = _("uncommitted changes, use --all to discard all"
4439 hint = _("uncommitted changes, use --all to discard all"
4438 " changes, or 'hg update %s' to update") % ctx.rev()
4440 " changes, or 'hg update %s' to update") % ctx.rev()
4439 else:
4441 else:
4440 hint = _("use --all to revert all files,"
4442 hint = _("use --all to revert all files,"
4441 " or 'hg update %s' to update") % ctx.rev()
4443 " or 'hg update %s' to update") % ctx.rev()
4442 elif dirty:
4444 elif dirty:
4443 hint = _("uncommitted changes, use --all to discard all changes")
4445 hint = _("uncommitted changes, use --all to discard all changes")
4444 else:
4446 else:
4445 hint = _("use --all to revert all files")
4447 hint = _("use --all to revert all files")
4446 raise error.Abort(msg, hint=hint)
4448 raise error.Abort(msg, hint=hint)
4447
4449
4448 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4450 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4449
4451
4450 @command('rollback', dryrunopts +
4452 @command('rollback', dryrunopts +
4451 [('f', 'force', False, _('ignore safety measures'))])
4453 [('f', 'force', False, _('ignore safety measures'))])
4452 def rollback(ui, repo, **opts):
4454 def rollback(ui, repo, **opts):
4453 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4455 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4454
4456
4455 Please use :hg:`commit --amend` instead of rollback to correct
4457 Please use :hg:`commit --amend` instead of rollback to correct
4456 mistakes in the last commit.
4458 mistakes in the last commit.
4457
4459
4458 This command should be used with care. There is only one level of
4460 This command should be used with care. There is only one level of
4459 rollback, and there is no way to undo a rollback. It will also
4461 rollback, and there is no way to undo a rollback. It will also
4460 restore the dirstate at the time of the last transaction, losing
4462 restore the dirstate at the time of the last transaction, losing
4461 any dirstate changes since that time. This command does not alter
4463 any dirstate changes since that time. This command does not alter
4462 the working directory.
4464 the working directory.
4463
4465
4464 Transactions are used to encapsulate the effects of all commands
4466 Transactions are used to encapsulate the effects of all commands
4465 that create new changesets or propagate existing changesets into a
4467 that create new changesets or propagate existing changesets into a
4466 repository.
4468 repository.
4467
4469
4468 .. container:: verbose
4470 .. container:: verbose
4469
4471
4470 For example, the following commands are transactional, and their
4472 For example, the following commands are transactional, and their
4471 effects can be rolled back:
4473 effects can be rolled back:
4472
4474
4473 - commit
4475 - commit
4474 - import
4476 - import
4475 - pull
4477 - pull
4476 - push (with this repository as the destination)
4478 - push (with this repository as the destination)
4477 - unbundle
4479 - unbundle
4478
4480
4479 To avoid permanent data loss, rollback will refuse to rollback a
4481 To avoid permanent data loss, rollback will refuse to rollback a
4480 commit transaction if it isn't checked out. Use --force to
4482 commit transaction if it isn't checked out. Use --force to
4481 override this protection.
4483 override this protection.
4482
4484
4483 The rollback command can be entirely disabled by setting the
4485 The rollback command can be entirely disabled by setting the
4484 ``ui.rollback`` configuration setting to false. If you're here
4486 ``ui.rollback`` configuration setting to false. If you're here
4485 because you want to use rollback and it's disabled, you can
4487 because you want to use rollback and it's disabled, you can
4486 re-enable the command by setting ``ui.rollback`` to true.
4488 re-enable the command by setting ``ui.rollback`` to true.
4487
4489
4488 This command is not intended for use on public repositories. Once
4490 This command is not intended for use on public repositories. Once
4489 changes are visible for pull by other users, rolling a transaction
4491 changes are visible for pull by other users, rolling a transaction
4490 back locally is ineffective (someone else may already have pulled
4492 back locally is ineffective (someone else may already have pulled
4491 the changes). Furthermore, a race is possible with readers of the
4493 the changes). Furthermore, a race is possible with readers of the
4492 repository; for example an in-progress pull from the repository
4494 repository; for example an in-progress pull from the repository
4493 may fail if a rollback is performed.
4495 may fail if a rollback is performed.
4494
4496
4495 Returns 0 on success, 1 if no rollback data is available.
4497 Returns 0 on success, 1 if no rollback data is available.
4496 """
4498 """
4497 if not ui.configbool('ui', 'rollback', True):
4499 if not ui.configbool('ui', 'rollback', True):
4498 raise error.Abort(_('rollback is disabled because it is unsafe'),
4500 raise error.Abort(_('rollback is disabled because it is unsafe'),
4499 hint=('see `hg help -v rollback` for information'))
4501 hint=('see `hg help -v rollback` for information'))
4500 return repo.rollback(dryrun=opts.get(r'dry_run'),
4502 return repo.rollback(dryrun=opts.get(r'dry_run'),
4501 force=opts.get(r'force'))
4503 force=opts.get(r'force'))
4502
4504
4503 @command('root', [])
4505 @command('root', [])
4504 def root(ui, repo):
4506 def root(ui, repo):
4505 """print the root (top) of the current working directory
4507 """print the root (top) of the current working directory
4506
4508
4507 Print the root directory of the current repository.
4509 Print the root directory of the current repository.
4508
4510
4509 Returns 0 on success.
4511 Returns 0 on success.
4510 """
4512 """
4511 ui.write(repo.root + "\n")
4513 ui.write(repo.root + "\n")
4512
4514
4513 @command('^serve',
4515 @command('^serve',
4514 [('A', 'accesslog', '', _('name of access log file to write to'),
4516 [('A', 'accesslog', '', _('name of access log file to write to'),
4515 _('FILE')),
4517 _('FILE')),
4516 ('d', 'daemon', None, _('run server in background')),
4518 ('d', 'daemon', None, _('run server in background')),
4517 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4519 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4518 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4520 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4519 # use string type, then we can check if something was passed
4521 # use string type, then we can check if something was passed
4520 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4522 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4521 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4523 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4522 _('ADDR')),
4524 _('ADDR')),
4523 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4525 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4524 _('PREFIX')),
4526 _('PREFIX')),
4525 ('n', 'name', '',
4527 ('n', 'name', '',
4526 _('name to show in web pages (default: working directory)'), _('NAME')),
4528 _('name to show in web pages (default: working directory)'), _('NAME')),
4527 ('', 'web-conf', '',
4529 ('', 'web-conf', '',
4528 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4530 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4529 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4531 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4530 _('FILE')),
4532 _('FILE')),
4531 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4533 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4532 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4534 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4533 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4535 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4534 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4536 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4535 ('', 'style', '', _('template style to use'), _('STYLE')),
4537 ('', 'style', '', _('template style to use'), _('STYLE')),
4536 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4538 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4537 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4539 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4538 + subrepoopts,
4540 + subrepoopts,
4539 _('[OPTION]...'),
4541 _('[OPTION]...'),
4540 optionalrepo=True)
4542 optionalrepo=True)
4541 def serve(ui, repo, **opts):
4543 def serve(ui, repo, **opts):
4542 """start stand-alone webserver
4544 """start stand-alone webserver
4543
4545
4544 Start a local HTTP repository browser and pull server. You can use
4546 Start a local HTTP repository browser and pull server. You can use
4545 this for ad-hoc sharing and browsing of repositories. It is
4547 this for ad-hoc sharing and browsing of repositories. It is
4546 recommended to use a real web server to serve a repository for
4548 recommended to use a real web server to serve a repository for
4547 longer periods of time.
4549 longer periods of time.
4548
4550
4549 Please note that the server does not implement access control.
4551 Please note that the server does not implement access control.
4550 This means that, by default, anybody can read from the server and
4552 This means that, by default, anybody can read from the server and
4551 nobody can write to it by default. Set the ``web.allow_push``
4553 nobody can write to it by default. Set the ``web.allow_push``
4552 option to ``*`` to allow everybody to push to the server. You
4554 option to ``*`` to allow everybody to push to the server. You
4553 should use a real web server if you need to authenticate users.
4555 should use a real web server if you need to authenticate users.
4554
4556
4555 By default, the server logs accesses to stdout and errors to
4557 By default, the server logs accesses to stdout and errors to
4556 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4558 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4557 files.
4559 files.
4558
4560
4559 To have the server choose a free port number to listen on, specify
4561 To have the server choose a free port number to listen on, specify
4560 a port number of 0; in this case, the server will print the port
4562 a port number of 0; in this case, the server will print the port
4561 number it uses.
4563 number it uses.
4562
4564
4563 Returns 0 on success.
4565 Returns 0 on success.
4564 """
4566 """
4565
4567
4566 opts = pycompat.byteskwargs(opts)
4568 opts = pycompat.byteskwargs(opts)
4567 if opts["stdio"] and opts["cmdserver"]:
4569 if opts["stdio"] and opts["cmdserver"]:
4568 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4570 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4569
4571
4570 if opts["stdio"]:
4572 if opts["stdio"]:
4571 if repo is None:
4573 if repo is None:
4572 raise error.RepoError(_("there is no Mercurial repository here"
4574 raise error.RepoError(_("there is no Mercurial repository here"
4573 " (.hg not found)"))
4575 " (.hg not found)"))
4574 s = sshserver.sshserver(ui, repo)
4576 s = sshserver.sshserver(ui, repo)
4575 s.serve_forever()
4577 s.serve_forever()
4576
4578
4577 service = server.createservice(ui, repo, opts)
4579 service = server.createservice(ui, repo, opts)
4578 return server.runservice(opts, initfn=service.init, runfn=service.run)
4580 return server.runservice(opts, initfn=service.init, runfn=service.run)
4579
4581
4580 @command('^status|st',
4582 @command('^status|st',
4581 [('A', 'all', None, _('show status of all files')),
4583 [('A', 'all', None, _('show status of all files')),
4582 ('m', 'modified', None, _('show only modified files')),
4584 ('m', 'modified', None, _('show only modified files')),
4583 ('a', 'added', None, _('show only added files')),
4585 ('a', 'added', None, _('show only added files')),
4584 ('r', 'removed', None, _('show only removed files')),
4586 ('r', 'removed', None, _('show only removed files')),
4585 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4587 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4586 ('c', 'clean', None, _('show only files without changes')),
4588 ('c', 'clean', None, _('show only files without changes')),
4587 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4589 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4588 ('i', 'ignored', None, _('show only ignored files')),
4590 ('i', 'ignored', None, _('show only ignored files')),
4589 ('n', 'no-status', None, _('hide status prefix')),
4591 ('n', 'no-status', None, _('hide status prefix')),
4590 ('C', 'copies', None, _('show source of copied files')),
4592 ('C', 'copies', None, _('show source of copied files')),
4591 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4593 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4592 ('', 'rev', [], _('show difference from revision'), _('REV')),
4594 ('', 'rev', [], _('show difference from revision'), _('REV')),
4593 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4595 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4594 ] + walkopts + subrepoopts + formatteropts,
4596 ] + walkopts + subrepoopts + formatteropts,
4595 _('[OPTION]... [FILE]...'),
4597 _('[OPTION]... [FILE]...'),
4596 inferrepo=True)
4598 inferrepo=True)
4597 def status(ui, repo, *pats, **opts):
4599 def status(ui, repo, *pats, **opts):
4598 """show changed files in the working directory
4600 """show changed files in the working directory
4599
4601
4600 Show status of files in the repository. If names are given, only
4602 Show status of files in the repository. If names are given, only
4601 files that match are shown. Files that are clean or ignored or
4603 files that match are shown. Files that are clean or ignored or
4602 the source of a copy/move operation, are not listed unless
4604 the source of a copy/move operation, are not listed unless
4603 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4605 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4604 Unless options described with "show only ..." are given, the
4606 Unless options described with "show only ..." are given, the
4605 options -mardu are used.
4607 options -mardu are used.
4606
4608
4607 Option -q/--quiet hides untracked (unknown and ignored) files
4609 Option -q/--quiet hides untracked (unknown and ignored) files
4608 unless explicitly requested with -u/--unknown or -i/--ignored.
4610 unless explicitly requested with -u/--unknown or -i/--ignored.
4609
4611
4610 .. note::
4612 .. note::
4611
4613
4612 :hg:`status` may appear to disagree with diff if permissions have
4614 :hg:`status` may appear to disagree with diff if permissions have
4613 changed or a merge has occurred. The standard diff format does
4615 changed or a merge has occurred. The standard diff format does
4614 not report permission changes and diff only reports changes
4616 not report permission changes and diff only reports changes
4615 relative to one merge parent.
4617 relative to one merge parent.
4616
4618
4617 If one revision is given, it is used as the base revision.
4619 If one revision is given, it is used as the base revision.
4618 If two revisions are given, the differences between them are
4620 If two revisions are given, the differences between them are
4619 shown. The --change option can also be used as a shortcut to list
4621 shown. The --change option can also be used as a shortcut to list
4620 the changed files of a revision from its first parent.
4622 the changed files of a revision from its first parent.
4621
4623
4622 The codes used to show the status of files are::
4624 The codes used to show the status of files are::
4623
4625
4624 M = modified
4626 M = modified
4625 A = added
4627 A = added
4626 R = removed
4628 R = removed
4627 C = clean
4629 C = clean
4628 ! = missing (deleted by non-hg command, but still tracked)
4630 ! = missing (deleted by non-hg command, but still tracked)
4629 ? = not tracked
4631 ? = not tracked
4630 I = ignored
4632 I = ignored
4631 = origin of the previous file (with --copies)
4633 = origin of the previous file (with --copies)
4632
4634
4633 .. container:: verbose
4635 .. container:: verbose
4634
4636
4635 Examples:
4637 Examples:
4636
4638
4637 - show changes in the working directory relative to a
4639 - show changes in the working directory relative to a
4638 changeset::
4640 changeset::
4639
4641
4640 hg status --rev 9353
4642 hg status --rev 9353
4641
4643
4642 - show changes in the working directory relative to the
4644 - show changes in the working directory relative to the
4643 current directory (see :hg:`help patterns` for more information)::
4645 current directory (see :hg:`help patterns` for more information)::
4644
4646
4645 hg status re:
4647 hg status re:
4646
4648
4647 - show all changes including copies in an existing changeset::
4649 - show all changes including copies in an existing changeset::
4648
4650
4649 hg status --copies --change 9353
4651 hg status --copies --change 9353
4650
4652
4651 - get a NUL separated list of added files, suitable for xargs::
4653 - get a NUL separated list of added files, suitable for xargs::
4652
4654
4653 hg status -an0
4655 hg status -an0
4654
4656
4655 Returns 0 on success.
4657 Returns 0 on success.
4656 """
4658 """
4657
4659
4658 opts = pycompat.byteskwargs(opts)
4660 opts = pycompat.byteskwargs(opts)
4659 revs = opts.get('rev')
4661 revs = opts.get('rev')
4660 change = opts.get('change')
4662 change = opts.get('change')
4661
4663
4662 if revs and change:
4664 if revs and change:
4663 msg = _('cannot specify --rev and --change at the same time')
4665 msg = _('cannot specify --rev and --change at the same time')
4664 raise error.Abort(msg)
4666 raise error.Abort(msg)
4665 elif change:
4667 elif change:
4666 node2 = scmutil.revsingle(repo, change, None).node()
4668 node2 = scmutil.revsingle(repo, change, None).node()
4667 node1 = repo[node2].p1().node()
4669 node1 = repo[node2].p1().node()
4668 else:
4670 else:
4669 node1, node2 = scmutil.revpair(repo, revs)
4671 node1, node2 = scmutil.revpair(repo, revs)
4670
4672
4671 if pats or ui.configbool('commands', 'status.relative'):
4673 if pats or ui.configbool('commands', 'status.relative'):
4672 cwd = repo.getcwd()
4674 cwd = repo.getcwd()
4673 else:
4675 else:
4674 cwd = ''
4676 cwd = ''
4675
4677
4676 if opts.get('print0'):
4678 if opts.get('print0'):
4677 end = '\0'
4679 end = '\0'
4678 else:
4680 else:
4679 end = '\n'
4681 end = '\n'
4680 copy = {}
4682 copy = {}
4681 states = 'modified added removed deleted unknown ignored clean'.split()
4683 states = 'modified added removed deleted unknown ignored clean'.split()
4682 show = [k for k in states if opts.get(k)]
4684 show = [k for k in states if opts.get(k)]
4683 if opts.get('all'):
4685 if opts.get('all'):
4684 show += ui.quiet and (states[:4] + ['clean']) or states
4686 show += ui.quiet and (states[:4] + ['clean']) or states
4685 if not show:
4687 if not show:
4686 if ui.quiet:
4688 if ui.quiet:
4687 show = states[:4]
4689 show = states[:4]
4688 else:
4690 else:
4689 show = states[:5]
4691 show = states[:5]
4690
4692
4691 m = scmutil.match(repo[node2], pats, opts)
4693 m = scmutil.match(repo[node2], pats, opts)
4692 stat = repo.status(node1, node2, m,
4694 stat = repo.status(node1, node2, m,
4693 'ignored' in show, 'clean' in show, 'unknown' in show,
4695 'ignored' in show, 'clean' in show, 'unknown' in show,
4694 opts.get('subrepos'))
4696 opts.get('subrepos'))
4695 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4697 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4696
4698
4697 if (opts.get('all') or opts.get('copies')
4699 if (opts.get('all') or opts.get('copies')
4698 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4700 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4699 copy = copies.pathcopies(repo[node1], repo[node2], m)
4701 copy = copies.pathcopies(repo[node1], repo[node2], m)
4700
4702
4701 ui.pager('status')
4703 ui.pager('status')
4702 fm = ui.formatter('status', opts)
4704 fm = ui.formatter('status', opts)
4703 fmt = '%s' + end
4705 fmt = '%s' + end
4704 showchar = not opts.get('no_status')
4706 showchar = not opts.get('no_status')
4705
4707
4706 for state, char, files in changestates:
4708 for state, char, files in changestates:
4707 if state in show:
4709 if state in show:
4708 label = 'status.' + state
4710 label = 'status.' + state
4709 for f in files:
4711 for f in files:
4710 fm.startitem()
4712 fm.startitem()
4711 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4713 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4712 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4714 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4713 if f in copy:
4715 if f in copy:
4714 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4716 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4715 label='status.copied')
4717 label='status.copied')
4716 fm.end()
4718 fm.end()
4717
4719
4718 @command('^summary|sum',
4720 @command('^summary|sum',
4719 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
4721 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
4720 def summary(ui, repo, **opts):
4722 def summary(ui, repo, **opts):
4721 """summarize working directory state
4723 """summarize working directory state
4722
4724
4723 This generates a brief summary of the working directory state,
4725 This generates a brief summary of the working directory state,
4724 including parents, branch, commit status, phase and available updates.
4726 including parents, branch, commit status, phase and available updates.
4725
4727
4726 With the --remote option, this will check the default paths for
4728 With the --remote option, this will check the default paths for
4727 incoming and outgoing changes. This can be time-consuming.
4729 incoming and outgoing changes. This can be time-consuming.
4728
4730
4729 Returns 0 on success.
4731 Returns 0 on success.
4730 """
4732 """
4731
4733
4732 opts = pycompat.byteskwargs(opts)
4734 opts = pycompat.byteskwargs(opts)
4733 ui.pager('summary')
4735 ui.pager('summary')
4734 ctx = repo[None]
4736 ctx = repo[None]
4735 parents = ctx.parents()
4737 parents = ctx.parents()
4736 pnode = parents[0].node()
4738 pnode = parents[0].node()
4737 marks = []
4739 marks = []
4738
4740
4739 ms = None
4741 ms = None
4740 try:
4742 try:
4741 ms = mergemod.mergestate.read(repo)
4743 ms = mergemod.mergestate.read(repo)
4742 except error.UnsupportedMergeRecords as e:
4744 except error.UnsupportedMergeRecords as e:
4743 s = ' '.join(e.recordtypes)
4745 s = ' '.join(e.recordtypes)
4744 ui.warn(
4746 ui.warn(
4745 _('warning: merge state has unsupported record types: %s\n') % s)
4747 _('warning: merge state has unsupported record types: %s\n') % s)
4746 unresolved = 0
4748 unresolved = 0
4747 else:
4749 else:
4748 unresolved = [f for f in ms if ms[f] == 'u']
4750 unresolved = [f for f in ms if ms[f] == 'u']
4749
4751
4750 for p in parents:
4752 for p in parents:
4751 # label with log.changeset (instead of log.parent) since this
4753 # label with log.changeset (instead of log.parent) since this
4752 # shows a working directory parent *changeset*:
4754 # shows a working directory parent *changeset*:
4753 # i18n: column positioning for "hg summary"
4755 # i18n: column positioning for "hg summary"
4754 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4756 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4755 label=cmdutil._changesetlabels(p))
4757 label=cmdutil._changesetlabels(p))
4756 ui.write(' '.join(p.tags()), label='log.tag')
4758 ui.write(' '.join(p.tags()), label='log.tag')
4757 if p.bookmarks():
4759 if p.bookmarks():
4758 marks.extend(p.bookmarks())
4760 marks.extend(p.bookmarks())
4759 if p.rev() == -1:
4761 if p.rev() == -1:
4760 if not len(repo):
4762 if not len(repo):
4761 ui.write(_(' (empty repository)'))
4763 ui.write(_(' (empty repository)'))
4762 else:
4764 else:
4763 ui.write(_(' (no revision checked out)'))
4765 ui.write(_(' (no revision checked out)'))
4764 if p.obsolete():
4766 if p.obsolete():
4765 ui.write(_(' (obsolete)'))
4767 ui.write(_(' (obsolete)'))
4766 if p.troubled():
4768 if p.troubled():
4767 ui.write(' ('
4769 ui.write(' ('
4768 + ', '.join(ui.label(trouble, 'trouble.%s' % trouble)
4770 + ', '.join(ui.label(trouble, 'trouble.%s' % trouble)
4769 for trouble in p.troubles())
4771 for trouble in p.troubles())
4770 + ')')
4772 + ')')
4771 ui.write('\n')
4773 ui.write('\n')
4772 if p.description():
4774 if p.description():
4773 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
4775 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
4774 label='log.summary')
4776 label='log.summary')
4775
4777
4776 branch = ctx.branch()
4778 branch = ctx.branch()
4777 bheads = repo.branchheads(branch)
4779 bheads = repo.branchheads(branch)
4778 # i18n: column positioning for "hg summary"
4780 # i18n: column positioning for "hg summary"
4779 m = _('branch: %s\n') % branch
4781 m = _('branch: %s\n') % branch
4780 if branch != 'default':
4782 if branch != 'default':
4781 ui.write(m, label='log.branch')
4783 ui.write(m, label='log.branch')
4782 else:
4784 else:
4783 ui.status(m, label='log.branch')
4785 ui.status(m, label='log.branch')
4784
4786
4785 if marks:
4787 if marks:
4786 active = repo._activebookmark
4788 active = repo._activebookmark
4787 # i18n: column positioning for "hg summary"
4789 # i18n: column positioning for "hg summary"
4788 ui.write(_('bookmarks:'), label='log.bookmark')
4790 ui.write(_('bookmarks:'), label='log.bookmark')
4789 if active is not None:
4791 if active is not None:
4790 if active in marks:
4792 if active in marks:
4791 ui.write(' *' + active, label=bookmarks.activebookmarklabel)
4793 ui.write(' *' + active, label=bookmarks.activebookmarklabel)
4792 marks.remove(active)
4794 marks.remove(active)
4793 else:
4795 else:
4794 ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
4796 ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
4795 for m in marks:
4797 for m in marks:
4796 ui.write(' ' + m, label='log.bookmark')
4798 ui.write(' ' + m, label='log.bookmark')
4797 ui.write('\n', label='log.bookmark')
4799 ui.write('\n', label='log.bookmark')
4798
4800
4799 status = repo.status(unknown=True)
4801 status = repo.status(unknown=True)
4800
4802
4801 c = repo.dirstate.copies()
4803 c = repo.dirstate.copies()
4802 copied, renamed = [], []
4804 copied, renamed = [], []
4803 for d, s in c.iteritems():
4805 for d, s in c.iteritems():
4804 if s in status.removed:
4806 if s in status.removed:
4805 status.removed.remove(s)
4807 status.removed.remove(s)
4806 renamed.append(d)
4808 renamed.append(d)
4807 else:
4809 else:
4808 copied.append(d)
4810 copied.append(d)
4809 if d in status.added:
4811 if d in status.added:
4810 status.added.remove(d)
4812 status.added.remove(d)
4811
4813
4812 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
4814 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
4813
4815
4814 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
4816 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
4815 (ui.label(_('%d added'), 'status.added'), status.added),
4817 (ui.label(_('%d added'), 'status.added'), status.added),
4816 (ui.label(_('%d removed'), 'status.removed'), status.removed),
4818 (ui.label(_('%d removed'), 'status.removed'), status.removed),
4817 (ui.label(_('%d renamed'), 'status.copied'), renamed),
4819 (ui.label(_('%d renamed'), 'status.copied'), renamed),
4818 (ui.label(_('%d copied'), 'status.copied'), copied),
4820 (ui.label(_('%d copied'), 'status.copied'), copied),
4819 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
4821 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
4820 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
4822 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
4821 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
4823 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
4822 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
4824 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
4823 t = []
4825 t = []
4824 for l, s in labels:
4826 for l, s in labels:
4825 if s:
4827 if s:
4826 t.append(l % len(s))
4828 t.append(l % len(s))
4827
4829
4828 t = ', '.join(t)
4830 t = ', '.join(t)
4829 cleanworkdir = False
4831 cleanworkdir = False
4830
4832
4831 if repo.vfs.exists('graftstate'):
4833 if repo.vfs.exists('graftstate'):
4832 t += _(' (graft in progress)')
4834 t += _(' (graft in progress)')
4833 if repo.vfs.exists('updatestate'):
4835 if repo.vfs.exists('updatestate'):
4834 t += _(' (interrupted update)')
4836 t += _(' (interrupted update)')
4835 elif len(parents) > 1:
4837 elif len(parents) > 1:
4836 t += _(' (merge)')
4838 t += _(' (merge)')
4837 elif branch != parents[0].branch():
4839 elif branch != parents[0].branch():
4838 t += _(' (new branch)')
4840 t += _(' (new branch)')
4839 elif (parents[0].closesbranch() and
4841 elif (parents[0].closesbranch() and
4840 pnode in repo.branchheads(branch, closed=True)):
4842 pnode in repo.branchheads(branch, closed=True)):
4841 t += _(' (head closed)')
4843 t += _(' (head closed)')
4842 elif not (status.modified or status.added or status.removed or renamed or
4844 elif not (status.modified or status.added or status.removed or renamed or
4843 copied or subs):
4845 copied or subs):
4844 t += _(' (clean)')
4846 t += _(' (clean)')
4845 cleanworkdir = True
4847 cleanworkdir = True
4846 elif pnode not in bheads:
4848 elif pnode not in bheads:
4847 t += _(' (new branch head)')
4849 t += _(' (new branch head)')
4848
4850
4849 if parents:
4851 if parents:
4850 pendingphase = max(p.phase() for p in parents)
4852 pendingphase = max(p.phase() for p in parents)
4851 else:
4853 else:
4852 pendingphase = phases.public
4854 pendingphase = phases.public
4853
4855
4854 if pendingphase > phases.newcommitphase(ui):
4856 if pendingphase > phases.newcommitphase(ui):
4855 t += ' (%s)' % phases.phasenames[pendingphase]
4857 t += ' (%s)' % phases.phasenames[pendingphase]
4856
4858
4857 if cleanworkdir:
4859 if cleanworkdir:
4858 # i18n: column positioning for "hg summary"
4860 # i18n: column positioning for "hg summary"
4859 ui.status(_('commit: %s\n') % t.strip())
4861 ui.status(_('commit: %s\n') % t.strip())
4860 else:
4862 else:
4861 # i18n: column positioning for "hg summary"
4863 # i18n: column positioning for "hg summary"
4862 ui.write(_('commit: %s\n') % t.strip())
4864 ui.write(_('commit: %s\n') % t.strip())
4863
4865
4864 # all ancestors of branch heads - all ancestors of parent = new csets
4866 # all ancestors of branch heads - all ancestors of parent = new csets
4865 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
4867 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
4866 bheads))
4868 bheads))
4867
4869
4868 if new == 0:
4870 if new == 0:
4869 # i18n: column positioning for "hg summary"
4871 # i18n: column positioning for "hg summary"
4870 ui.status(_('update: (current)\n'))
4872 ui.status(_('update: (current)\n'))
4871 elif pnode not in bheads:
4873 elif pnode not in bheads:
4872 # i18n: column positioning for "hg summary"
4874 # i18n: column positioning for "hg summary"
4873 ui.write(_('update: %d new changesets (update)\n') % new)
4875 ui.write(_('update: %d new changesets (update)\n') % new)
4874 else:
4876 else:
4875 # i18n: column positioning for "hg summary"
4877 # i18n: column positioning for "hg summary"
4876 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
4878 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
4877 (new, len(bheads)))
4879 (new, len(bheads)))
4878
4880
4879 t = []
4881 t = []
4880 draft = len(repo.revs('draft()'))
4882 draft = len(repo.revs('draft()'))
4881 if draft:
4883 if draft:
4882 t.append(_('%d draft') % draft)
4884 t.append(_('%d draft') % draft)
4883 secret = len(repo.revs('secret()'))
4885 secret = len(repo.revs('secret()'))
4884 if secret:
4886 if secret:
4885 t.append(_('%d secret') % secret)
4887 t.append(_('%d secret') % secret)
4886
4888
4887 if draft or secret:
4889 if draft or secret:
4888 ui.status(_('phases: %s\n') % ', '.join(t))
4890 ui.status(_('phases: %s\n') % ', '.join(t))
4889
4891
4890 if obsolete.isenabled(repo, obsolete.createmarkersopt):
4892 if obsolete.isenabled(repo, obsolete.createmarkersopt):
4891 for trouble in ("unstable", "divergent", "bumped"):
4893 for trouble in ("unstable", "divergent", "bumped"):
4892 numtrouble = len(repo.revs(trouble + "()"))
4894 numtrouble = len(repo.revs(trouble + "()"))
4893 # We write all the possibilities to ease translation
4895 # We write all the possibilities to ease translation
4894 troublemsg = {
4896 troublemsg = {
4895 "unstable": _("unstable: %d changesets"),
4897 "unstable": _("unstable: %d changesets"),
4896 "divergent": _("divergent: %d changesets"),
4898 "divergent": _("divergent: %d changesets"),
4897 "bumped": _("bumped: %d changesets"),
4899 "bumped": _("bumped: %d changesets"),
4898 }
4900 }
4899 if numtrouble > 0:
4901 if numtrouble > 0:
4900 ui.status(troublemsg[trouble] % numtrouble + "\n")
4902 ui.status(troublemsg[trouble] % numtrouble + "\n")
4901
4903
4902 cmdutil.summaryhooks(ui, repo)
4904 cmdutil.summaryhooks(ui, repo)
4903
4905
4904 if opts.get('remote'):
4906 if opts.get('remote'):
4905 needsincoming, needsoutgoing = True, True
4907 needsincoming, needsoutgoing = True, True
4906 else:
4908 else:
4907 needsincoming, needsoutgoing = False, False
4909 needsincoming, needsoutgoing = False, False
4908 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
4910 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
4909 if i:
4911 if i:
4910 needsincoming = True
4912 needsincoming = True
4911 if o:
4913 if o:
4912 needsoutgoing = True
4914 needsoutgoing = True
4913 if not needsincoming and not needsoutgoing:
4915 if not needsincoming and not needsoutgoing:
4914 return
4916 return
4915
4917
4916 def getincoming():
4918 def getincoming():
4917 source, branches = hg.parseurl(ui.expandpath('default'))
4919 source, branches = hg.parseurl(ui.expandpath('default'))
4918 sbranch = branches[0]
4920 sbranch = branches[0]
4919 try:
4921 try:
4920 other = hg.peer(repo, {}, source)
4922 other = hg.peer(repo, {}, source)
4921 except error.RepoError:
4923 except error.RepoError:
4922 if opts.get('remote'):
4924 if opts.get('remote'):
4923 raise
4925 raise
4924 return source, sbranch, None, None, None
4926 return source, sbranch, None, None, None
4925 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
4927 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
4926 if revs:
4928 if revs:
4927 revs = [other.lookup(rev) for rev in revs]
4929 revs = [other.lookup(rev) for rev in revs]
4928 ui.debug('comparing with %s\n' % util.hidepassword(source))
4930 ui.debug('comparing with %s\n' % util.hidepassword(source))
4929 repo.ui.pushbuffer()
4931 repo.ui.pushbuffer()
4930 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
4932 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
4931 repo.ui.popbuffer()
4933 repo.ui.popbuffer()
4932 return source, sbranch, other, commoninc, commoninc[1]
4934 return source, sbranch, other, commoninc, commoninc[1]
4933
4935
4934 if needsincoming:
4936 if needsincoming:
4935 source, sbranch, sother, commoninc, incoming = getincoming()
4937 source, sbranch, sother, commoninc, incoming = getincoming()
4936 else:
4938 else:
4937 source = sbranch = sother = commoninc = incoming = None
4939 source = sbranch = sother = commoninc = incoming = None
4938
4940
4939 def getoutgoing():
4941 def getoutgoing():
4940 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
4942 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
4941 dbranch = branches[0]
4943 dbranch = branches[0]
4942 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
4944 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
4943 if source != dest:
4945 if source != dest:
4944 try:
4946 try:
4945 dother = hg.peer(repo, {}, dest)
4947 dother = hg.peer(repo, {}, dest)
4946 except error.RepoError:
4948 except error.RepoError:
4947 if opts.get('remote'):
4949 if opts.get('remote'):
4948 raise
4950 raise
4949 return dest, dbranch, None, None
4951 return dest, dbranch, None, None
4950 ui.debug('comparing with %s\n' % util.hidepassword(dest))
4952 ui.debug('comparing with %s\n' % util.hidepassword(dest))
4951 elif sother is None:
4953 elif sother is None:
4952 # there is no explicit destination peer, but source one is invalid
4954 # there is no explicit destination peer, but source one is invalid
4953 return dest, dbranch, None, None
4955 return dest, dbranch, None, None
4954 else:
4956 else:
4955 dother = sother
4957 dother = sother
4956 if (source != dest or (sbranch is not None and sbranch != dbranch)):
4958 if (source != dest or (sbranch is not None and sbranch != dbranch)):
4957 common = None
4959 common = None
4958 else:
4960 else:
4959 common = commoninc
4961 common = commoninc
4960 if revs:
4962 if revs:
4961 revs = [repo.lookup(rev) for rev in revs]
4963 revs = [repo.lookup(rev) for rev in revs]
4962 repo.ui.pushbuffer()
4964 repo.ui.pushbuffer()
4963 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
4965 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
4964 commoninc=common)
4966 commoninc=common)
4965 repo.ui.popbuffer()
4967 repo.ui.popbuffer()
4966 return dest, dbranch, dother, outgoing
4968 return dest, dbranch, dother, outgoing
4967
4969
4968 if needsoutgoing:
4970 if needsoutgoing:
4969 dest, dbranch, dother, outgoing = getoutgoing()
4971 dest, dbranch, dother, outgoing = getoutgoing()
4970 else:
4972 else:
4971 dest = dbranch = dother = outgoing = None
4973 dest = dbranch = dother = outgoing = None
4972
4974
4973 if opts.get('remote'):
4975 if opts.get('remote'):
4974 t = []
4976 t = []
4975 if incoming:
4977 if incoming:
4976 t.append(_('1 or more incoming'))
4978 t.append(_('1 or more incoming'))
4977 o = outgoing.missing
4979 o = outgoing.missing
4978 if o:
4980 if o:
4979 t.append(_('%d outgoing') % len(o))
4981 t.append(_('%d outgoing') % len(o))
4980 other = dother or sother
4982 other = dother or sother
4981 if 'bookmarks' in other.listkeys('namespaces'):
4983 if 'bookmarks' in other.listkeys('namespaces'):
4982 counts = bookmarks.summary(repo, other)
4984 counts = bookmarks.summary(repo, other)
4983 if counts[0] > 0:
4985 if counts[0] > 0:
4984 t.append(_('%d incoming bookmarks') % counts[0])
4986 t.append(_('%d incoming bookmarks') % counts[0])
4985 if counts[1] > 0:
4987 if counts[1] > 0:
4986 t.append(_('%d outgoing bookmarks') % counts[1])
4988 t.append(_('%d outgoing bookmarks') % counts[1])
4987
4989
4988 if t:
4990 if t:
4989 # i18n: column positioning for "hg summary"
4991 # i18n: column positioning for "hg summary"
4990 ui.write(_('remote: %s\n') % (', '.join(t)))
4992 ui.write(_('remote: %s\n') % (', '.join(t)))
4991 else:
4993 else:
4992 # i18n: column positioning for "hg summary"
4994 # i18n: column positioning for "hg summary"
4993 ui.status(_('remote: (synced)\n'))
4995 ui.status(_('remote: (synced)\n'))
4994
4996
4995 cmdutil.summaryremotehooks(ui, repo, opts,
4997 cmdutil.summaryremotehooks(ui, repo, opts,
4996 ((source, sbranch, sother, commoninc),
4998 ((source, sbranch, sother, commoninc),
4997 (dest, dbranch, dother, outgoing)))
4999 (dest, dbranch, dother, outgoing)))
4998
5000
4999 @command('tag',
5001 @command('tag',
5000 [('f', 'force', None, _('force tag')),
5002 [('f', 'force', None, _('force tag')),
5001 ('l', 'local', None, _('make the tag local')),
5003 ('l', 'local', None, _('make the tag local')),
5002 ('r', 'rev', '', _('revision to tag'), _('REV')),
5004 ('r', 'rev', '', _('revision to tag'), _('REV')),
5003 ('', 'remove', None, _('remove a tag')),
5005 ('', 'remove', None, _('remove a tag')),
5004 # -l/--local is already there, commitopts cannot be used
5006 # -l/--local is already there, commitopts cannot be used
5005 ('e', 'edit', None, _('invoke editor on commit messages')),
5007 ('e', 'edit', None, _('invoke editor on commit messages')),
5006 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5008 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5007 ] + commitopts2,
5009 ] + commitopts2,
5008 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5010 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5009 def tag(ui, repo, name1, *names, **opts):
5011 def tag(ui, repo, name1, *names, **opts):
5010 """add one or more tags for the current or given revision
5012 """add one or more tags for the current or given revision
5011
5013
5012 Name a particular revision using <name>.
5014 Name a particular revision using <name>.
5013
5015
5014 Tags are used to name particular revisions of the repository and are
5016 Tags are used to name particular revisions of the repository and are
5015 very useful to compare different revisions, to go back to significant
5017 very useful to compare different revisions, to go back to significant
5016 earlier versions or to mark branch points as releases, etc. Changing
5018 earlier versions or to mark branch points as releases, etc. Changing
5017 an existing tag is normally disallowed; use -f/--force to override.
5019 an existing tag is normally disallowed; use -f/--force to override.
5018
5020
5019 If no revision is given, the parent of the working directory is
5021 If no revision is given, the parent of the working directory is
5020 used.
5022 used.
5021
5023
5022 To facilitate version control, distribution, and merging of tags,
5024 To facilitate version control, distribution, and merging of tags,
5023 they are stored as a file named ".hgtags" which is managed similarly
5025 they are stored as a file named ".hgtags" which is managed similarly
5024 to other project files and can be hand-edited if necessary. This
5026 to other project files and can be hand-edited if necessary. This
5025 also means that tagging creates a new commit. The file
5027 also means that tagging creates a new commit. The file
5026 ".hg/localtags" is used for local tags (not shared among
5028 ".hg/localtags" is used for local tags (not shared among
5027 repositories).
5029 repositories).
5028
5030
5029 Tag commits are usually made at the head of a branch. If the parent
5031 Tag commits are usually made at the head of a branch. If the parent
5030 of the working directory is not a branch head, :hg:`tag` aborts; use
5032 of the working directory is not a branch head, :hg:`tag` aborts; use
5031 -f/--force to force the tag commit to be based on a non-head
5033 -f/--force to force the tag commit to be based on a non-head
5032 changeset.
5034 changeset.
5033
5035
5034 See :hg:`help dates` for a list of formats valid for -d/--date.
5036 See :hg:`help dates` for a list of formats valid for -d/--date.
5035
5037
5036 Since tag names have priority over branch names during revision
5038 Since tag names have priority over branch names during revision
5037 lookup, using an existing branch name as a tag name is discouraged.
5039 lookup, using an existing branch name as a tag name is discouraged.
5038
5040
5039 Returns 0 on success.
5041 Returns 0 on success.
5040 """
5042 """
5041 opts = pycompat.byteskwargs(opts)
5043 opts = pycompat.byteskwargs(opts)
5042 wlock = lock = None
5044 wlock = lock = None
5043 try:
5045 try:
5044 wlock = repo.wlock()
5046 wlock = repo.wlock()
5045 lock = repo.lock()
5047 lock = repo.lock()
5046 rev_ = "."
5048 rev_ = "."
5047 names = [t.strip() for t in (name1,) + names]
5049 names = [t.strip() for t in (name1,) + names]
5048 if len(names) != len(set(names)):
5050 if len(names) != len(set(names)):
5049 raise error.Abort(_('tag names must be unique'))
5051 raise error.Abort(_('tag names must be unique'))
5050 for n in names:
5052 for n in names:
5051 scmutil.checknewlabel(repo, n, 'tag')
5053 scmutil.checknewlabel(repo, n, 'tag')
5052 if not n:
5054 if not n:
5053 raise error.Abort(_('tag names cannot consist entirely of '
5055 raise error.Abort(_('tag names cannot consist entirely of '
5054 'whitespace'))
5056 'whitespace'))
5055 if opts.get('rev') and opts.get('remove'):
5057 if opts.get('rev') and opts.get('remove'):
5056 raise error.Abort(_("--rev and --remove are incompatible"))
5058 raise error.Abort(_("--rev and --remove are incompatible"))
5057 if opts.get('rev'):
5059 if opts.get('rev'):
5058 rev_ = opts['rev']
5060 rev_ = opts['rev']
5059 message = opts.get('message')
5061 message = opts.get('message')
5060 if opts.get('remove'):
5062 if opts.get('remove'):
5061 if opts.get('local'):
5063 if opts.get('local'):
5062 expectedtype = 'local'
5064 expectedtype = 'local'
5063 else:
5065 else:
5064 expectedtype = 'global'
5066 expectedtype = 'global'
5065
5067
5066 for n in names:
5068 for n in names:
5067 if not repo.tagtype(n):
5069 if not repo.tagtype(n):
5068 raise error.Abort(_("tag '%s' does not exist") % n)
5070 raise error.Abort(_("tag '%s' does not exist") % n)
5069 if repo.tagtype(n) != expectedtype:
5071 if repo.tagtype(n) != expectedtype:
5070 if expectedtype == 'global':
5072 if expectedtype == 'global':
5071 raise error.Abort(_("tag '%s' is not a global tag") % n)
5073 raise error.Abort(_("tag '%s' is not a global tag") % n)
5072 else:
5074 else:
5073 raise error.Abort(_("tag '%s' is not a local tag") % n)
5075 raise error.Abort(_("tag '%s' is not a local tag") % n)
5074 rev_ = 'null'
5076 rev_ = 'null'
5075 if not message:
5077 if not message:
5076 # we don't translate commit messages
5078 # we don't translate commit messages
5077 message = 'Removed tag %s' % ', '.join(names)
5079 message = 'Removed tag %s' % ', '.join(names)
5078 elif not opts.get('force'):
5080 elif not opts.get('force'):
5079 for n in names:
5081 for n in names:
5080 if n in repo.tags():
5082 if n in repo.tags():
5081 raise error.Abort(_("tag '%s' already exists "
5083 raise error.Abort(_("tag '%s' already exists "
5082 "(use -f to force)") % n)
5084 "(use -f to force)") % n)
5083 if not opts.get('local'):
5085 if not opts.get('local'):
5084 p1, p2 = repo.dirstate.parents()
5086 p1, p2 = repo.dirstate.parents()
5085 if p2 != nullid:
5087 if p2 != nullid:
5086 raise error.Abort(_('uncommitted merge'))
5088 raise error.Abort(_('uncommitted merge'))
5087 bheads = repo.branchheads()
5089 bheads = repo.branchheads()
5088 if not opts.get('force') and bheads and p1 not in bheads:
5090 if not opts.get('force') and bheads and p1 not in bheads:
5089 raise error.Abort(_('working directory is not at a branch head '
5091 raise error.Abort(_('working directory is not at a branch head '
5090 '(use -f to force)'))
5092 '(use -f to force)'))
5091 r = scmutil.revsingle(repo, rev_).node()
5093 r = scmutil.revsingle(repo, rev_).node()
5092
5094
5093 if not message:
5095 if not message:
5094 # we don't translate commit messages
5096 # we don't translate commit messages
5095 message = ('Added tag %s for changeset %s' %
5097 message = ('Added tag %s for changeset %s' %
5096 (', '.join(names), short(r)))
5098 (', '.join(names), short(r)))
5097
5099
5098 date = opts.get('date')
5100 date = opts.get('date')
5099 if date:
5101 if date:
5100 date = util.parsedate(date)
5102 date = util.parsedate(date)
5101
5103
5102 if opts.get('remove'):
5104 if opts.get('remove'):
5103 editform = 'tag.remove'
5105 editform = 'tag.remove'
5104 else:
5106 else:
5105 editform = 'tag.add'
5107 editform = 'tag.add'
5106 editor = cmdutil.getcommiteditor(editform=editform,
5108 editor = cmdutil.getcommiteditor(editform=editform,
5107 **pycompat.strkwargs(opts))
5109 **pycompat.strkwargs(opts))
5108
5110
5109 # don't allow tagging the null rev
5111 # don't allow tagging the null rev
5110 if (not opts.get('remove') and
5112 if (not opts.get('remove') and
5111 scmutil.revsingle(repo, rev_).rev() == nullrev):
5113 scmutil.revsingle(repo, rev_).rev() == nullrev):
5112 raise error.Abort(_("cannot tag null revision"))
5114 raise error.Abort(_("cannot tag null revision"))
5113
5115
5114 tagsmod.tag(repo, names, r, message, opts.get('local'),
5116 tagsmod.tag(repo, names, r, message, opts.get('local'),
5115 opts.get('user'), date, editor=editor)
5117 opts.get('user'), date, editor=editor)
5116 finally:
5118 finally:
5117 release(lock, wlock)
5119 release(lock, wlock)
5118
5120
5119 @command('tags', formatteropts, '')
5121 @command('tags', formatteropts, '')
5120 def tags(ui, repo, **opts):
5122 def tags(ui, repo, **opts):
5121 """list repository tags
5123 """list repository tags
5122
5124
5123 This lists both regular and local tags. When the -v/--verbose
5125 This lists both regular and local tags. When the -v/--verbose
5124 switch is used, a third column "local" is printed for local tags.
5126 switch is used, a third column "local" is printed for local tags.
5125 When the -q/--quiet switch is used, only the tag name is printed.
5127 When the -q/--quiet switch is used, only the tag name is printed.
5126
5128
5127 Returns 0 on success.
5129 Returns 0 on success.
5128 """
5130 """
5129
5131
5130 opts = pycompat.byteskwargs(opts)
5132 opts = pycompat.byteskwargs(opts)
5131 ui.pager('tags')
5133 ui.pager('tags')
5132 fm = ui.formatter('tags', opts)
5134 fm = ui.formatter('tags', opts)
5133 hexfunc = fm.hexfunc
5135 hexfunc = fm.hexfunc
5134 tagtype = ""
5136 tagtype = ""
5135
5137
5136 for t, n in reversed(repo.tagslist()):
5138 for t, n in reversed(repo.tagslist()):
5137 hn = hexfunc(n)
5139 hn = hexfunc(n)
5138 label = 'tags.normal'
5140 label = 'tags.normal'
5139 tagtype = ''
5141 tagtype = ''
5140 if repo.tagtype(t) == 'local':
5142 if repo.tagtype(t) == 'local':
5141 label = 'tags.local'
5143 label = 'tags.local'
5142 tagtype = 'local'
5144 tagtype = 'local'
5143
5145
5144 fm.startitem()
5146 fm.startitem()
5145 fm.write('tag', '%s', t, label=label)
5147 fm.write('tag', '%s', t, label=label)
5146 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5148 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5147 fm.condwrite(not ui.quiet, 'rev node', fmt,
5149 fm.condwrite(not ui.quiet, 'rev node', fmt,
5148 repo.changelog.rev(n), hn, label=label)
5150 repo.changelog.rev(n), hn, label=label)
5149 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5151 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5150 tagtype, label=label)
5152 tagtype, label=label)
5151 fm.plain('\n')
5153 fm.plain('\n')
5152 fm.end()
5154 fm.end()
5153
5155
5154 @command('tip',
5156 @command('tip',
5155 [('p', 'patch', None, _('show patch')),
5157 [('p', 'patch', None, _('show patch')),
5156 ('g', 'git', None, _('use git extended diff format')),
5158 ('g', 'git', None, _('use git extended diff format')),
5157 ] + templateopts,
5159 ] + templateopts,
5158 _('[-p] [-g]'))
5160 _('[-p] [-g]'))
5159 def tip(ui, repo, **opts):
5161 def tip(ui, repo, **opts):
5160 """show the tip revision (DEPRECATED)
5162 """show the tip revision (DEPRECATED)
5161
5163
5162 The tip revision (usually just called the tip) is the changeset
5164 The tip revision (usually just called the tip) is the changeset
5163 most recently added to the repository (and therefore the most
5165 most recently added to the repository (and therefore the most
5164 recently changed head).
5166 recently changed head).
5165
5167
5166 If you have just made a commit, that commit will be the tip. If
5168 If you have just made a commit, that commit will be the tip. If
5167 you have just pulled changes from another repository, the tip of
5169 you have just pulled changes from another repository, the tip of
5168 that repository becomes the current tip. The "tip" tag is special
5170 that repository becomes the current tip. The "tip" tag is special
5169 and cannot be renamed or assigned to a different changeset.
5171 and cannot be renamed or assigned to a different changeset.
5170
5172
5171 This command is deprecated, please use :hg:`heads` instead.
5173 This command is deprecated, please use :hg:`heads` instead.
5172
5174
5173 Returns 0 on success.
5175 Returns 0 on success.
5174 """
5176 """
5175 opts = pycompat.byteskwargs(opts)
5177 opts = pycompat.byteskwargs(opts)
5176 displayer = cmdutil.show_changeset(ui, repo, opts)
5178 displayer = cmdutil.show_changeset(ui, repo, opts)
5177 displayer.show(repo['tip'])
5179 displayer.show(repo['tip'])
5178 displayer.close()
5180 displayer.close()
5179
5181
5180 @command('unbundle',
5182 @command('unbundle',
5181 [('u', 'update', None,
5183 [('u', 'update', None,
5182 _('update to new branch head if changesets were unbundled'))],
5184 _('update to new branch head if changesets were unbundled'))],
5183 _('[-u] FILE...'))
5185 _('[-u] FILE...'))
5184 def unbundle(ui, repo, fname1, *fnames, **opts):
5186 def unbundle(ui, repo, fname1, *fnames, **opts):
5185 """apply one or more bundle files
5187 """apply one or more bundle files
5186
5188
5187 Apply one or more bundle files generated by :hg:`bundle`.
5189 Apply one or more bundle files generated by :hg:`bundle`.
5188
5190
5189 Returns 0 on success, 1 if an update has unresolved files.
5191 Returns 0 on success, 1 if an update has unresolved files.
5190 """
5192 """
5191 fnames = (fname1,) + fnames
5193 fnames = (fname1,) + fnames
5192
5194
5193 with repo.lock():
5195 with repo.lock():
5194 for fname in fnames:
5196 for fname in fnames:
5195 f = hg.openpath(ui, fname)
5197 f = hg.openpath(ui, fname)
5196 gen = exchange.readbundle(ui, f, fname)
5198 gen = exchange.readbundle(ui, f, fname)
5197 if isinstance(gen, streamclone.streamcloneapplier):
5199 if isinstance(gen, streamclone.streamcloneapplier):
5198 raise error.Abort(
5200 raise error.Abort(
5199 _('packed bundles cannot be applied with '
5201 _('packed bundles cannot be applied with '
5200 '"hg unbundle"'),
5202 '"hg unbundle"'),
5201 hint=_('use "hg debugapplystreamclonebundle"'))
5203 hint=_('use "hg debugapplystreamclonebundle"'))
5202 url = 'bundle:' + fname
5204 url = 'bundle:' + fname
5203 if isinstance(gen, bundle2.unbundle20):
5205 if isinstance(gen, bundle2.unbundle20):
5204 with repo.transaction('unbundle') as tr:
5206 with repo.transaction('unbundle') as tr:
5205 try:
5207 try:
5206 op = bundle2.applybundle(repo, gen, tr,
5208 op = bundle2.applybundle(repo, gen, tr,
5207 source='unbundle',
5209 source='unbundle',
5208 url=url)
5210 url=url)
5209 except error.BundleUnknownFeatureError as exc:
5211 except error.BundleUnknownFeatureError as exc:
5210 raise error.Abort(
5212 raise error.Abort(
5211 _('%s: unknown bundle feature, %s') % (fname, exc),
5213 _('%s: unknown bundle feature, %s') % (fname, exc),
5212 hint=_("see https://mercurial-scm.org/"
5214 hint=_("see https://mercurial-scm.org/"
5213 "wiki/BundleFeature for more "
5215 "wiki/BundleFeature for more "
5214 "information"))
5216 "information"))
5215 changes = [r.get('return', 0)
5217 changes = [r.get('return', 0)
5216 for r in op.records['changegroup']]
5218 for r in op.records['changegroup']]
5217 modheads = changegroup.combineresults(changes)
5219 modheads = changegroup.combineresults(changes)
5218 else:
5220 else:
5219 txnname = 'unbundle\n%s' % util.hidepassword(url)
5221 txnname = 'unbundle\n%s' % util.hidepassword(url)
5220 with repo.transaction(txnname) as tr:
5222 with repo.transaction(txnname) as tr:
5221 modheads, addednodes = gen.apply(repo, tr, 'unbundle', url)
5223 modheads, addednodes = gen.apply(repo, tr, 'unbundle', url)
5222
5224
5223 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5225 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5224
5226
5225 @command('^update|up|checkout|co',
5227 @command('^update|up|checkout|co',
5226 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5228 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5227 ('c', 'check', None, _('require clean working directory')),
5229 ('c', 'check', None, _('require clean working directory')),
5228 ('m', 'merge', None, _('merge uncommitted changes')),
5230 ('m', 'merge', None, _('merge uncommitted changes')),
5229 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5231 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5230 ('r', 'rev', '', _('revision'), _('REV'))
5232 ('r', 'rev', '', _('revision'), _('REV'))
5231 ] + mergetoolopts,
5233 ] + mergetoolopts,
5232 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5234 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5233 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5235 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5234 merge=None, tool=None):
5236 merge=None, tool=None):
5235 """update working directory (or switch revisions)
5237 """update working directory (or switch revisions)
5236
5238
5237 Update the repository's working directory to the specified
5239 Update the repository's working directory to the specified
5238 changeset. If no changeset is specified, update to the tip of the
5240 changeset. If no changeset is specified, update to the tip of the
5239 current named branch and move the active bookmark (see :hg:`help
5241 current named branch and move the active bookmark (see :hg:`help
5240 bookmarks`).
5242 bookmarks`).
5241
5243
5242 Update sets the working directory's parent revision to the specified
5244 Update sets the working directory's parent revision to the specified
5243 changeset (see :hg:`help parents`).
5245 changeset (see :hg:`help parents`).
5244
5246
5245 If the changeset is not a descendant or ancestor of the working
5247 If the changeset is not a descendant or ancestor of the working
5246 directory's parent and there are uncommitted changes, the update is
5248 directory's parent and there are uncommitted changes, the update is
5247 aborted. With the -c/--check option, the working directory is checked
5249 aborted. With the -c/--check option, the working directory is checked
5248 for uncommitted changes; if none are found, the working directory is
5250 for uncommitted changes; if none are found, the working directory is
5249 updated to the specified changeset.
5251 updated to the specified changeset.
5250
5252
5251 .. container:: verbose
5253 .. container:: verbose
5252
5254
5253 The -C/--clean, -c/--check, and -m/--merge options control what
5255 The -C/--clean, -c/--check, and -m/--merge options control what
5254 happens if the working directory contains uncommitted changes.
5256 happens if the working directory contains uncommitted changes.
5255 At most of one of them can be specified.
5257 At most of one of them can be specified.
5256
5258
5257 1. If no option is specified, and if
5259 1. If no option is specified, and if
5258 the requested changeset is an ancestor or descendant of
5260 the requested changeset is an ancestor or descendant of
5259 the working directory's parent, the uncommitted changes
5261 the working directory's parent, the uncommitted changes
5260 are merged into the requested changeset and the merged
5262 are merged into the requested changeset and the merged
5261 result is left uncommitted. If the requested changeset is
5263 result is left uncommitted. If the requested changeset is
5262 not an ancestor or descendant (that is, it is on another
5264 not an ancestor or descendant (that is, it is on another
5263 branch), the update is aborted and the uncommitted changes
5265 branch), the update is aborted and the uncommitted changes
5264 are preserved.
5266 are preserved.
5265
5267
5266 2. With the -m/--merge option, the update is allowed even if the
5268 2. With the -m/--merge option, the update is allowed even if the
5267 requested changeset is not an ancestor or descendant of
5269 requested changeset is not an ancestor or descendant of
5268 the working directory's parent.
5270 the working directory's parent.
5269
5271
5270 3. With the -c/--check option, the update is aborted and the
5272 3. With the -c/--check option, the update is aborted and the
5271 uncommitted changes are preserved.
5273 uncommitted changes are preserved.
5272
5274
5273 4. With the -C/--clean option, uncommitted changes are discarded and
5275 4. With the -C/--clean option, uncommitted changes are discarded and
5274 the working directory is updated to the requested changeset.
5276 the working directory is updated to the requested changeset.
5275
5277
5276 To cancel an uncommitted merge (and lose your changes), use
5278 To cancel an uncommitted merge (and lose your changes), use
5277 :hg:`update --clean .`.
5279 :hg:`update --clean .`.
5278
5280
5279 Use null as the changeset to remove the working directory (like
5281 Use null as the changeset to remove the working directory (like
5280 :hg:`clone -U`).
5282 :hg:`clone -U`).
5281
5283
5282 If you want to revert just one file to an older revision, use
5284 If you want to revert just one file to an older revision, use
5283 :hg:`revert [-r REV] NAME`.
5285 :hg:`revert [-r REV] NAME`.
5284
5286
5285 See :hg:`help dates` for a list of formats valid for -d/--date.
5287 See :hg:`help dates` for a list of formats valid for -d/--date.
5286
5288
5287 Returns 0 on success, 1 if there are unresolved files.
5289 Returns 0 on success, 1 if there are unresolved files.
5288 """
5290 """
5289 if rev and node:
5291 if rev and node:
5290 raise error.Abort(_("please specify just one revision"))
5292 raise error.Abort(_("please specify just one revision"))
5291
5293
5292 if ui.configbool('commands', 'update.requiredest'):
5294 if ui.configbool('commands', 'update.requiredest'):
5293 if not node and not rev and not date:
5295 if not node and not rev and not date:
5294 raise error.Abort(_('you must specify a destination'),
5296 raise error.Abort(_('you must specify a destination'),
5295 hint=_('for example: hg update ".::"'))
5297 hint=_('for example: hg update ".::"'))
5296
5298
5297 if rev is None or rev == '':
5299 if rev is None or rev == '':
5298 rev = node
5300 rev = node
5299
5301
5300 if date and rev is not None:
5302 if date and rev is not None:
5301 raise error.Abort(_("you can't specify a revision and a date"))
5303 raise error.Abort(_("you can't specify a revision and a date"))
5302
5304
5303 if len([x for x in (clean, check, merge) if x]) > 1:
5305 if len([x for x in (clean, check, merge) if x]) > 1:
5304 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5306 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5305 "or -m/merge"))
5307 "or -m/merge"))
5306
5308
5307 updatecheck = None
5309 updatecheck = None
5308 if check:
5310 if check:
5309 updatecheck = 'abort'
5311 updatecheck = 'abort'
5310 elif merge:
5312 elif merge:
5311 updatecheck = 'none'
5313 updatecheck = 'none'
5312
5314
5313 with repo.wlock():
5315 with repo.wlock():
5314 cmdutil.clearunfinished(repo)
5316 cmdutil.clearunfinished(repo)
5315
5317
5316 if date:
5318 if date:
5317 rev = cmdutil.finddate(ui, repo, date)
5319 rev = cmdutil.finddate(ui, repo, date)
5318
5320
5319 # if we defined a bookmark, we have to remember the original name
5321 # if we defined a bookmark, we have to remember the original name
5320 brev = rev
5322 brev = rev
5321 rev = scmutil.revsingle(repo, rev, rev).rev()
5323 rev = scmutil.revsingle(repo, rev, rev).rev()
5322
5324
5323 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5325 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5324
5326
5325 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5327 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5326 updatecheck=updatecheck)
5328 updatecheck=updatecheck)
5327
5329
5328 @command('verify', [])
5330 @command('verify', [])
5329 def verify(ui, repo):
5331 def verify(ui, repo):
5330 """verify the integrity of the repository
5332 """verify the integrity of the repository
5331
5333
5332 Verify the integrity of the current repository.
5334 Verify the integrity of the current repository.
5333
5335
5334 This will perform an extensive check of the repository's
5336 This will perform an extensive check of the repository's
5335 integrity, validating the hashes and checksums of each entry in
5337 integrity, validating the hashes and checksums of each entry in
5336 the changelog, manifest, and tracked files, as well as the
5338 the changelog, manifest, and tracked files, as well as the
5337 integrity of their crosslinks and indices.
5339 integrity of their crosslinks and indices.
5338
5340
5339 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5341 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5340 for more information about recovery from corruption of the
5342 for more information about recovery from corruption of the
5341 repository.
5343 repository.
5342
5344
5343 Returns 0 on success, 1 if errors are encountered.
5345 Returns 0 on success, 1 if errors are encountered.
5344 """
5346 """
5345 return hg.verify(repo)
5347 return hg.verify(repo)
5346
5348
5347 @command('version', [] + formatteropts, norepo=True)
5349 @command('version', [] + formatteropts, norepo=True)
5348 def version_(ui, **opts):
5350 def version_(ui, **opts):
5349 """output version and copyright information"""
5351 """output version and copyright information"""
5350 opts = pycompat.byteskwargs(opts)
5352 opts = pycompat.byteskwargs(opts)
5351 if ui.verbose:
5353 if ui.verbose:
5352 ui.pager('version')
5354 ui.pager('version')
5353 fm = ui.formatter("version", opts)
5355 fm = ui.formatter("version", opts)
5354 fm.startitem()
5356 fm.startitem()
5355 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5357 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5356 util.version())
5358 util.version())
5357 license = _(
5359 license = _(
5358 "(see https://mercurial-scm.org for more information)\n"
5360 "(see https://mercurial-scm.org for more information)\n"
5359 "\nCopyright (C) 2005-2017 Matt Mackall and others\n"
5361 "\nCopyright (C) 2005-2017 Matt Mackall and others\n"
5360 "This is free software; see the source for copying conditions. "
5362 "This is free software; see the source for copying conditions. "
5361 "There is NO\nwarranty; "
5363 "There is NO\nwarranty; "
5362 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5364 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5363 )
5365 )
5364 if not ui.quiet:
5366 if not ui.quiet:
5365 fm.plain(license)
5367 fm.plain(license)
5366
5368
5367 if ui.verbose:
5369 if ui.verbose:
5368 fm.plain(_("\nEnabled extensions:\n\n"))
5370 fm.plain(_("\nEnabled extensions:\n\n"))
5369 # format names and versions into columns
5371 # format names and versions into columns
5370 names = []
5372 names = []
5371 vers = []
5373 vers = []
5372 isinternals = []
5374 isinternals = []
5373 for name, module in extensions.extensions():
5375 for name, module in extensions.extensions():
5374 names.append(name)
5376 names.append(name)
5375 vers.append(extensions.moduleversion(module) or None)
5377 vers.append(extensions.moduleversion(module) or None)
5376 isinternals.append(extensions.ismoduleinternal(module))
5378 isinternals.append(extensions.ismoduleinternal(module))
5377 fn = fm.nested("extensions")
5379 fn = fm.nested("extensions")
5378 if names:
5380 if names:
5379 namefmt = " %%-%ds " % max(len(n) for n in names)
5381 namefmt = " %%-%ds " % max(len(n) for n in names)
5380 places = [_("external"), _("internal")]
5382 places = [_("external"), _("internal")]
5381 for n, v, p in zip(names, vers, isinternals):
5383 for n, v, p in zip(names, vers, isinternals):
5382 fn.startitem()
5384 fn.startitem()
5383 fn.condwrite(ui.verbose, "name", namefmt, n)
5385 fn.condwrite(ui.verbose, "name", namefmt, n)
5384 if ui.verbose:
5386 if ui.verbose:
5385 fn.plain("%s " % places[p])
5387 fn.plain("%s " % places[p])
5386 fn.data(bundled=p)
5388 fn.data(bundled=p)
5387 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5389 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5388 if ui.verbose:
5390 if ui.verbose:
5389 fn.plain("\n")
5391 fn.plain("\n")
5390 fn.end()
5392 fn.end()
5391 fm.end()
5393 fm.end()
5392
5394
5393 def loadcmdtable(ui, name, cmdtable):
5395 def loadcmdtable(ui, name, cmdtable):
5394 """Load command functions from specified cmdtable
5396 """Load command functions from specified cmdtable
5395 """
5397 """
5396 overrides = [cmd for cmd in cmdtable if cmd in table]
5398 overrides = [cmd for cmd in cmdtable if cmd in table]
5397 if overrides:
5399 if overrides:
5398 ui.warn(_("extension '%s' overrides commands: %s\n")
5400 ui.warn(_("extension '%s' overrides commands: %s\n")
5399 % (name, " ".join(overrides)))
5401 % (name, " ".join(overrides)))
5400 table.update(cmdtable)
5402 table.update(cmdtable)
@@ -1,2205 +1,2216 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 context,
35 context,
36 dagparser,
36 dagparser,
37 dagutil,
37 dagutil,
38 encoding,
38 encoding,
39 error,
39 error,
40 exchange,
40 exchange,
41 extensions,
41 extensions,
42 filemerge,
42 filemerge,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 phases,
50 phases,
51 policy,
51 policy,
52 pvec,
52 pvec,
53 pycompat,
53 pycompat,
54 registrar,
54 registrar,
55 repair,
55 repair,
56 revlog,
56 revlog,
57 revset,
57 revset,
58 revsetlang,
58 revsetlang,
59 scmutil,
59 scmutil,
60 setdiscovery,
60 setdiscovery,
61 simplemerge,
61 simplemerge,
62 smartset,
62 smartset,
63 sslutil,
63 sslutil,
64 streamclone,
64 streamclone,
65 templater,
65 templater,
66 treediscovery,
66 treediscovery,
67 upgrade,
67 upgrade,
68 util,
68 util,
69 vfs as vfsmod,
69 vfs as vfsmod,
70 )
70 )
71
71
72 release = lockmod.release
72 release = lockmod.release
73
73
74 command = registrar.command()
74 command = registrar.command()
75
75
76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
77 def debugancestor(ui, repo, *args):
77 def debugancestor(ui, repo, *args):
78 """find the ancestor revision of two revisions in a given index"""
78 """find the ancestor revision of two revisions in a given index"""
79 if len(args) == 3:
79 if len(args) == 3:
80 index, rev1, rev2 = args
80 index, rev1, rev2 = args
81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
82 lookup = r.lookup
82 lookup = r.lookup
83 elif len(args) == 2:
83 elif len(args) == 2:
84 if not repo:
84 if not repo:
85 raise error.Abort(_('there is no Mercurial repository here '
85 raise error.Abort(_('there is no Mercurial repository here '
86 '(.hg not found)'))
86 '(.hg not found)'))
87 rev1, rev2 = args
87 rev1, rev2 = args
88 r = repo.changelog
88 r = repo.changelog
89 lookup = repo.lookup
89 lookup = repo.lookup
90 else:
90 else:
91 raise error.Abort(_('either two or three arguments required'))
91 raise error.Abort(_('either two or three arguments required'))
92 a = r.ancestor(lookup(rev1), lookup(rev2))
92 a = r.ancestor(lookup(rev1), lookup(rev2))
93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
94
94
95 @command('debugapplystreamclonebundle', [], 'FILE')
95 @command('debugapplystreamclonebundle', [], 'FILE')
96 def debugapplystreamclonebundle(ui, repo, fname):
96 def debugapplystreamclonebundle(ui, repo, fname):
97 """apply a stream clone bundle file"""
97 """apply a stream clone bundle file"""
98 f = hg.openpath(ui, fname)
98 f = hg.openpath(ui, fname)
99 gen = exchange.readbundle(ui, f, fname)
99 gen = exchange.readbundle(ui, f, fname)
100 gen.apply(repo)
100 gen.apply(repo)
101
101
102 @command('debugbuilddag',
102 @command('debugbuilddag',
103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
105 ('n', 'new-file', None, _('add new file at each rev'))],
105 ('n', 'new-file', None, _('add new file at each rev'))],
106 _('[OPTION]... [TEXT]'))
106 _('[OPTION]... [TEXT]'))
107 def debugbuilddag(ui, repo, text=None,
107 def debugbuilddag(ui, repo, text=None,
108 mergeable_file=False,
108 mergeable_file=False,
109 overwritten_file=False,
109 overwritten_file=False,
110 new_file=False):
110 new_file=False):
111 """builds a repo with a given DAG from scratch in the current empty repo
111 """builds a repo with a given DAG from scratch in the current empty repo
112
112
113 The description of the DAG is read from stdin if not given on the
113 The description of the DAG is read from stdin if not given on the
114 command line.
114 command line.
115
115
116 Elements:
116 Elements:
117
117
118 - "+n" is a linear run of n nodes based on the current default parent
118 - "+n" is a linear run of n nodes based on the current default parent
119 - "." is a single node based on the current default parent
119 - "." is a single node based on the current default parent
120 - "$" resets the default parent to null (implied at the start);
120 - "$" resets the default parent to null (implied at the start);
121 otherwise the default parent is always the last node created
121 otherwise the default parent is always the last node created
122 - "<p" sets the default parent to the backref p
122 - "<p" sets the default parent to the backref p
123 - "*p" is a fork at parent p, which is a backref
123 - "*p" is a fork at parent p, which is a backref
124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
125 - "/p2" is a merge of the preceding node and p2
125 - "/p2" is a merge of the preceding node and p2
126 - ":tag" defines a local tag for the preceding node
126 - ":tag" defines a local tag for the preceding node
127 - "@branch" sets the named branch for subsequent nodes
127 - "@branch" sets the named branch for subsequent nodes
128 - "#...\\n" is a comment up to the end of the line
128 - "#...\\n" is a comment up to the end of the line
129
129
130 Whitespace between the above elements is ignored.
130 Whitespace between the above elements is ignored.
131
131
132 A backref is either
132 A backref is either
133
133
134 - a number n, which references the node curr-n, where curr is the current
134 - a number n, which references the node curr-n, where curr is the current
135 node, or
135 node, or
136 - the name of a local tag you placed earlier using ":tag", or
136 - the name of a local tag you placed earlier using ":tag", or
137 - empty to denote the default parent.
137 - empty to denote the default parent.
138
138
139 All string valued-elements are either strictly alphanumeric, or must
139 All string valued-elements are either strictly alphanumeric, or must
140 be enclosed in double quotes ("..."), with "\\" as escape character.
140 be enclosed in double quotes ("..."), with "\\" as escape character.
141 """
141 """
142
142
143 if text is None:
143 if text is None:
144 ui.status(_("reading DAG from stdin\n"))
144 ui.status(_("reading DAG from stdin\n"))
145 text = ui.fin.read()
145 text = ui.fin.read()
146
146
147 cl = repo.changelog
147 cl = repo.changelog
148 if len(cl) > 0:
148 if len(cl) > 0:
149 raise error.Abort(_('repository is not empty'))
149 raise error.Abort(_('repository is not empty'))
150
150
151 # determine number of revs in DAG
151 # determine number of revs in DAG
152 total = 0
152 total = 0
153 for type, data in dagparser.parsedag(text):
153 for type, data in dagparser.parsedag(text):
154 if type == 'n':
154 if type == 'n':
155 total += 1
155 total += 1
156
156
157 if mergeable_file:
157 if mergeable_file:
158 linesperrev = 2
158 linesperrev = 2
159 # make a file with k lines per rev
159 # make a file with k lines per rev
160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
161 initialmergedlines.append("")
161 initialmergedlines.append("")
162
162
163 tags = []
163 tags = []
164
164
165 wlock = lock = tr = None
165 wlock = lock = tr = None
166 try:
166 try:
167 wlock = repo.wlock()
167 wlock = repo.wlock()
168 lock = repo.lock()
168 lock = repo.lock()
169 tr = repo.transaction("builddag")
169 tr = repo.transaction("builddag")
170
170
171 at = -1
171 at = -1
172 atbranch = 'default'
172 atbranch = 'default'
173 nodeids = []
173 nodeids = []
174 id = 0
174 id = 0
175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
176 for type, data in dagparser.parsedag(text):
176 for type, data in dagparser.parsedag(text):
177 if type == 'n':
177 if type == 'n':
178 ui.note(('node %s\n' % str(data)))
178 ui.note(('node %s\n' % str(data)))
179 id, ps = data
179 id, ps = data
180
180
181 files = []
181 files = []
182 fctxs = {}
182 fctxs = {}
183
183
184 p2 = None
184 p2 = None
185 if mergeable_file:
185 if mergeable_file:
186 fn = "mf"
186 fn = "mf"
187 p1 = repo[ps[0]]
187 p1 = repo[ps[0]]
188 if len(ps) > 1:
188 if len(ps) > 1:
189 p2 = repo[ps[1]]
189 p2 = repo[ps[1]]
190 pa = p1.ancestor(p2)
190 pa = p1.ancestor(p2)
191 base, local, other = [x[fn].data() for x in (pa, p1,
191 base, local, other = [x[fn].data() for x in (pa, p1,
192 p2)]
192 p2)]
193 m3 = simplemerge.Merge3Text(base, local, other)
193 m3 = simplemerge.Merge3Text(base, local, other)
194 ml = [l.strip() for l in m3.merge_lines()]
194 ml = [l.strip() for l in m3.merge_lines()]
195 ml.append("")
195 ml.append("")
196 elif at > 0:
196 elif at > 0:
197 ml = p1[fn].data().split("\n")
197 ml = p1[fn].data().split("\n")
198 else:
198 else:
199 ml = initialmergedlines
199 ml = initialmergedlines
200 ml[id * linesperrev] += " r%i" % id
200 ml[id * linesperrev] += " r%i" % id
201 mergedtext = "\n".join(ml)
201 mergedtext = "\n".join(ml)
202 files.append(fn)
202 files.append(fn)
203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
204
204
205 if overwritten_file:
205 if overwritten_file:
206 fn = "of"
206 fn = "of"
207 files.append(fn)
207 files.append(fn)
208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
209
209
210 if new_file:
210 if new_file:
211 fn = "nf%i" % id
211 fn = "nf%i" % id
212 files.append(fn)
212 files.append(fn)
213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
214 if len(ps) > 1:
214 if len(ps) > 1:
215 if not p2:
215 if not p2:
216 p2 = repo[ps[1]]
216 p2 = repo[ps[1]]
217 for fn in p2:
217 for fn in p2:
218 if fn.startswith("nf"):
218 if fn.startswith("nf"):
219 files.append(fn)
219 files.append(fn)
220 fctxs[fn] = p2[fn]
220 fctxs[fn] = p2[fn]
221
221
222 def fctxfn(repo, cx, path):
222 def fctxfn(repo, cx, path):
223 return fctxs.get(path)
223 return fctxs.get(path)
224
224
225 if len(ps) == 0 or ps[0] < 0:
225 if len(ps) == 0 or ps[0] < 0:
226 pars = [None, None]
226 pars = [None, None]
227 elif len(ps) == 1:
227 elif len(ps) == 1:
228 pars = [nodeids[ps[0]], None]
228 pars = [nodeids[ps[0]], None]
229 else:
229 else:
230 pars = [nodeids[p] for p in ps]
230 pars = [nodeids[p] for p in ps]
231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
232 date=(id, 0),
232 date=(id, 0),
233 user="debugbuilddag",
233 user="debugbuilddag",
234 extra={'branch': atbranch})
234 extra={'branch': atbranch})
235 nodeid = repo.commitctx(cx)
235 nodeid = repo.commitctx(cx)
236 nodeids.append(nodeid)
236 nodeids.append(nodeid)
237 at = id
237 at = id
238 elif type == 'l':
238 elif type == 'l':
239 id, name = data
239 id, name = data
240 ui.note(('tag %s\n' % name))
240 ui.note(('tag %s\n' % name))
241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
242 elif type == 'a':
242 elif type == 'a':
243 ui.note(('branch %s\n' % data))
243 ui.note(('branch %s\n' % data))
244 atbranch = data
244 atbranch = data
245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
246 tr.close()
246 tr.close()
247
247
248 if tags:
248 if tags:
249 repo.vfs.write("localtags", "".join(tags))
249 repo.vfs.write("localtags", "".join(tags))
250 finally:
250 finally:
251 ui.progress(_('building'), None)
251 ui.progress(_('building'), None)
252 release(tr, lock, wlock)
252 release(tr, lock, wlock)
253
253
254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
255 indent_string = ' ' * indent
255 indent_string = ' ' * indent
256 if all:
256 if all:
257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
258 % indent_string)
258 % indent_string)
259
259
260 def showchunks(named):
260 def showchunks(named):
261 ui.write("\n%s%s\n" % (indent_string, named))
261 ui.write("\n%s%s\n" % (indent_string, named))
262 chain = None
262 chain = None
263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
264 node = chunkdata['node']
264 node = chunkdata['node']
265 p1 = chunkdata['p1']
265 p1 = chunkdata['p1']
266 p2 = chunkdata['p2']
266 p2 = chunkdata['p2']
267 cs = chunkdata['cs']
267 cs = chunkdata['cs']
268 deltabase = chunkdata['deltabase']
268 deltabase = chunkdata['deltabase']
269 delta = chunkdata['delta']
269 delta = chunkdata['delta']
270 ui.write("%s%s %s %s %s %s %s\n" %
270 ui.write("%s%s %s %s %s %s %s\n" %
271 (indent_string, hex(node), hex(p1), hex(p2),
271 (indent_string, hex(node), hex(p1), hex(p2),
272 hex(cs), hex(deltabase), len(delta)))
272 hex(cs), hex(deltabase), len(delta)))
273 chain = node
273 chain = node
274
274
275 chunkdata = gen.changelogheader()
275 chunkdata = gen.changelogheader()
276 showchunks("changelog")
276 showchunks("changelog")
277 chunkdata = gen.manifestheader()
277 chunkdata = gen.manifestheader()
278 showchunks("manifest")
278 showchunks("manifest")
279 for chunkdata in iter(gen.filelogheader, {}):
279 for chunkdata in iter(gen.filelogheader, {}):
280 fname = chunkdata['filename']
280 fname = chunkdata['filename']
281 showchunks(fname)
281 showchunks(fname)
282 else:
282 else:
283 if isinstance(gen, bundle2.unbundle20):
283 if isinstance(gen, bundle2.unbundle20):
284 raise error.Abort(_('use debugbundle2 for this file'))
284 raise error.Abort(_('use debugbundle2 for this file'))
285 chunkdata = gen.changelogheader()
285 chunkdata = gen.changelogheader()
286 chain = None
286 chain = None
287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
288 node = chunkdata['node']
288 node = chunkdata['node']
289 ui.write("%s%s\n" % (indent_string, hex(node)))
289 ui.write("%s%s\n" % (indent_string, hex(node)))
290 chain = node
290 chain = node
291
291
292 def _debugobsmarkers(ui, part, indent=0, **opts):
292 def _debugobsmarkers(ui, part, indent=0, **opts):
293 """display version and markers contained in 'data'"""
293 """display version and markers contained in 'data'"""
294 data = part.read()
294 data = part.read()
295 indent_string = ' ' * indent
295 indent_string = ' ' * indent
296 try:
296 try:
297 version, markers = obsolete._readmarkers(data)
297 version, markers = obsolete._readmarkers(data)
298 except error.UnknownVersion as exc:
298 except error.UnknownVersion as exc:
299 msg = "%sunsupported version: %s (%d bytes)\n"
299 msg = "%sunsupported version: %s (%d bytes)\n"
300 msg %= indent_string, exc.version, len(data)
300 msg %= indent_string, exc.version, len(data)
301 ui.write(msg)
301 ui.write(msg)
302 else:
302 else:
303 msg = "%sversion: %s (%d bytes)\n"
303 msg = "%sversion: %s (%d bytes)\n"
304 msg %= indent_string, version, len(data)
304 msg %= indent_string, version, len(data)
305 ui.write(msg)
305 ui.write(msg)
306 fm = ui.formatter('debugobsolete', opts)
306 fm = ui.formatter('debugobsolete', opts)
307 for rawmarker in sorted(markers):
307 for rawmarker in sorted(markers):
308 m = obsolete.marker(None, rawmarker)
308 m = obsolete.marker(None, rawmarker)
309 fm.startitem()
309 fm.startitem()
310 fm.plain(indent_string)
310 fm.plain(indent_string)
311 cmdutil.showmarker(fm, m)
311 cmdutil.showmarker(fm, m)
312 fm.end()
312 fm.end()
313
313
314 def _debugphaseheads(ui, data, indent=0):
315 """display version and markers contained in 'data'"""
316 indent_string = ' ' * indent
317 headsbyphase = bundle2._readphaseheads(data)
318 for phase in phases.allphases:
319 for head in headsbyphase[phase]:
320 ui.write(indent_string)
321 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
322
314 def _debugbundle2(ui, gen, all=None, **opts):
323 def _debugbundle2(ui, gen, all=None, **opts):
315 """lists the contents of a bundle2"""
324 """lists the contents of a bundle2"""
316 if not isinstance(gen, bundle2.unbundle20):
325 if not isinstance(gen, bundle2.unbundle20):
317 raise error.Abort(_('not a bundle2 file'))
326 raise error.Abort(_('not a bundle2 file'))
318 ui.write(('Stream params: %s\n' % repr(gen.params)))
327 ui.write(('Stream params: %s\n' % repr(gen.params)))
319 parttypes = opts.get('part_type', [])
328 parttypes = opts.get('part_type', [])
320 for part in gen.iterparts():
329 for part in gen.iterparts():
321 if parttypes and part.type not in parttypes:
330 if parttypes and part.type not in parttypes:
322 continue
331 continue
323 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
332 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
324 if part.type == 'changegroup':
333 if part.type == 'changegroup':
325 version = part.params.get('version', '01')
334 version = part.params.get('version', '01')
326 cg = changegroup.getunbundler(version, part, 'UN')
335 cg = changegroup.getunbundler(version, part, 'UN')
327 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
336 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
328 if part.type == 'obsmarkers':
337 if part.type == 'obsmarkers':
329 _debugobsmarkers(ui, part, indent=4, **opts)
338 _debugobsmarkers(ui, part, indent=4, **opts)
339 if part.type == 'phase-heads':
340 _debugphaseheads(ui, part, indent=4)
330
341
331 @command('debugbundle',
342 @command('debugbundle',
332 [('a', 'all', None, _('show all details')),
343 [('a', 'all', None, _('show all details')),
333 ('', 'part-type', [], _('show only the named part type')),
344 ('', 'part-type', [], _('show only the named part type')),
334 ('', 'spec', None, _('print the bundlespec of the bundle'))],
345 ('', 'spec', None, _('print the bundlespec of the bundle'))],
335 _('FILE'),
346 _('FILE'),
336 norepo=True)
347 norepo=True)
337 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
348 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
338 """lists the contents of a bundle"""
349 """lists the contents of a bundle"""
339 with hg.openpath(ui, bundlepath) as f:
350 with hg.openpath(ui, bundlepath) as f:
340 if spec:
351 if spec:
341 spec = exchange.getbundlespec(ui, f)
352 spec = exchange.getbundlespec(ui, f)
342 ui.write('%s\n' % spec)
353 ui.write('%s\n' % spec)
343 return
354 return
344
355
345 gen = exchange.readbundle(ui, f, bundlepath)
356 gen = exchange.readbundle(ui, f, bundlepath)
346 if isinstance(gen, bundle2.unbundle20):
357 if isinstance(gen, bundle2.unbundle20):
347 return _debugbundle2(ui, gen, all=all, **opts)
358 return _debugbundle2(ui, gen, all=all, **opts)
348 _debugchangegroup(ui, gen, all=all, **opts)
359 _debugchangegroup(ui, gen, all=all, **opts)
349
360
350 @command('debugcheckstate', [], '')
361 @command('debugcheckstate', [], '')
351 def debugcheckstate(ui, repo):
362 def debugcheckstate(ui, repo):
352 """validate the correctness of the current dirstate"""
363 """validate the correctness of the current dirstate"""
353 parent1, parent2 = repo.dirstate.parents()
364 parent1, parent2 = repo.dirstate.parents()
354 m1 = repo[parent1].manifest()
365 m1 = repo[parent1].manifest()
355 m2 = repo[parent2].manifest()
366 m2 = repo[parent2].manifest()
356 errors = 0
367 errors = 0
357 for f in repo.dirstate:
368 for f in repo.dirstate:
358 state = repo.dirstate[f]
369 state = repo.dirstate[f]
359 if state in "nr" and f not in m1:
370 if state in "nr" and f not in m1:
360 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
371 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
361 errors += 1
372 errors += 1
362 if state in "a" and f in m1:
373 if state in "a" and f in m1:
363 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
374 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
364 errors += 1
375 errors += 1
365 if state in "m" and f not in m1 and f not in m2:
376 if state in "m" and f not in m1 and f not in m2:
366 ui.warn(_("%s in state %s, but not in either manifest\n") %
377 ui.warn(_("%s in state %s, but not in either manifest\n") %
367 (f, state))
378 (f, state))
368 errors += 1
379 errors += 1
369 for f in m1:
380 for f in m1:
370 state = repo.dirstate[f]
381 state = repo.dirstate[f]
371 if state not in "nrm":
382 if state not in "nrm":
372 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
383 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
373 errors += 1
384 errors += 1
374 if errors:
385 if errors:
375 error = _(".hg/dirstate inconsistent with current parent's manifest")
386 error = _(".hg/dirstate inconsistent with current parent's manifest")
376 raise error.Abort(error)
387 raise error.Abort(error)
377
388
378 @command('debugcolor',
389 @command('debugcolor',
379 [('', 'style', None, _('show all configured styles'))],
390 [('', 'style', None, _('show all configured styles'))],
380 'hg debugcolor')
391 'hg debugcolor')
381 def debugcolor(ui, repo, **opts):
392 def debugcolor(ui, repo, **opts):
382 """show available color, effects or style"""
393 """show available color, effects or style"""
383 ui.write(('color mode: %s\n') % ui._colormode)
394 ui.write(('color mode: %s\n') % ui._colormode)
384 if opts.get('style'):
395 if opts.get('style'):
385 return _debugdisplaystyle(ui)
396 return _debugdisplaystyle(ui)
386 else:
397 else:
387 return _debugdisplaycolor(ui)
398 return _debugdisplaycolor(ui)
388
399
389 def _debugdisplaycolor(ui):
400 def _debugdisplaycolor(ui):
390 ui = ui.copy()
401 ui = ui.copy()
391 ui._styles.clear()
402 ui._styles.clear()
392 for effect in color._activeeffects(ui).keys():
403 for effect in color._activeeffects(ui).keys():
393 ui._styles[effect] = effect
404 ui._styles[effect] = effect
394 if ui._terminfoparams:
405 if ui._terminfoparams:
395 for k, v in ui.configitems('color'):
406 for k, v in ui.configitems('color'):
396 if k.startswith('color.'):
407 if k.startswith('color.'):
397 ui._styles[k] = k[6:]
408 ui._styles[k] = k[6:]
398 elif k.startswith('terminfo.'):
409 elif k.startswith('terminfo.'):
399 ui._styles[k] = k[9:]
410 ui._styles[k] = k[9:]
400 ui.write(_('available colors:\n'))
411 ui.write(_('available colors:\n'))
401 # sort label with a '_' after the other to group '_background' entry.
412 # sort label with a '_' after the other to group '_background' entry.
402 items = sorted(ui._styles.items(),
413 items = sorted(ui._styles.items(),
403 key=lambda i: ('_' in i[0], i[0], i[1]))
414 key=lambda i: ('_' in i[0], i[0], i[1]))
404 for colorname, label in items:
415 for colorname, label in items:
405 ui.write(('%s\n') % colorname, label=label)
416 ui.write(('%s\n') % colorname, label=label)
406
417
407 def _debugdisplaystyle(ui):
418 def _debugdisplaystyle(ui):
408 ui.write(_('available style:\n'))
419 ui.write(_('available style:\n'))
409 width = max(len(s) for s in ui._styles)
420 width = max(len(s) for s in ui._styles)
410 for label, effects in sorted(ui._styles.items()):
421 for label, effects in sorted(ui._styles.items()):
411 ui.write('%s' % label, label=label)
422 ui.write('%s' % label, label=label)
412 if effects:
423 if effects:
413 # 50
424 # 50
414 ui.write(': ')
425 ui.write(': ')
415 ui.write(' ' * (max(0, width - len(label))))
426 ui.write(' ' * (max(0, width - len(label))))
416 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
427 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
417 ui.write('\n')
428 ui.write('\n')
418
429
419 @command('debugcreatestreamclonebundle', [], 'FILE')
430 @command('debugcreatestreamclonebundle', [], 'FILE')
420 def debugcreatestreamclonebundle(ui, repo, fname):
431 def debugcreatestreamclonebundle(ui, repo, fname):
421 """create a stream clone bundle file
432 """create a stream clone bundle file
422
433
423 Stream bundles are special bundles that are essentially archives of
434 Stream bundles are special bundles that are essentially archives of
424 revlog files. They are commonly used for cloning very quickly.
435 revlog files. They are commonly used for cloning very quickly.
425 """
436 """
426 # TODO we may want to turn this into an abort when this functionality
437 # TODO we may want to turn this into an abort when this functionality
427 # is moved into `hg bundle`.
438 # is moved into `hg bundle`.
428 if phases.hassecret(repo):
439 if phases.hassecret(repo):
429 ui.warn(_('(warning: stream clone bundle will contain secret '
440 ui.warn(_('(warning: stream clone bundle will contain secret '
430 'revisions)\n'))
441 'revisions)\n'))
431
442
432 requirements, gen = streamclone.generatebundlev1(repo)
443 requirements, gen = streamclone.generatebundlev1(repo)
433 changegroup.writechunks(ui, gen, fname)
444 changegroup.writechunks(ui, gen, fname)
434
445
435 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
446 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
436
447
437 @command('debugdag',
448 @command('debugdag',
438 [('t', 'tags', None, _('use tags as labels')),
449 [('t', 'tags', None, _('use tags as labels')),
439 ('b', 'branches', None, _('annotate with branch names')),
450 ('b', 'branches', None, _('annotate with branch names')),
440 ('', 'dots', None, _('use dots for runs')),
451 ('', 'dots', None, _('use dots for runs')),
441 ('s', 'spaces', None, _('separate elements by spaces'))],
452 ('s', 'spaces', None, _('separate elements by spaces'))],
442 _('[OPTION]... [FILE [REV]...]'),
453 _('[OPTION]... [FILE [REV]...]'),
443 optionalrepo=True)
454 optionalrepo=True)
444 def debugdag(ui, repo, file_=None, *revs, **opts):
455 def debugdag(ui, repo, file_=None, *revs, **opts):
445 """format the changelog or an index DAG as a concise textual description
456 """format the changelog or an index DAG as a concise textual description
446
457
447 If you pass a revlog index, the revlog's DAG is emitted. If you list
458 If you pass a revlog index, the revlog's DAG is emitted. If you list
448 revision numbers, they get labeled in the output as rN.
459 revision numbers, they get labeled in the output as rN.
449
460
450 Otherwise, the changelog DAG of the current repo is emitted.
461 Otherwise, the changelog DAG of the current repo is emitted.
451 """
462 """
452 spaces = opts.get('spaces')
463 spaces = opts.get('spaces')
453 dots = opts.get('dots')
464 dots = opts.get('dots')
454 if file_:
465 if file_:
455 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
466 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
456 file_)
467 file_)
457 revs = set((int(r) for r in revs))
468 revs = set((int(r) for r in revs))
458 def events():
469 def events():
459 for r in rlog:
470 for r in rlog:
460 yield 'n', (r, list(p for p in rlog.parentrevs(r)
471 yield 'n', (r, list(p for p in rlog.parentrevs(r)
461 if p != -1))
472 if p != -1))
462 if r in revs:
473 if r in revs:
463 yield 'l', (r, "r%i" % r)
474 yield 'l', (r, "r%i" % r)
464 elif repo:
475 elif repo:
465 cl = repo.changelog
476 cl = repo.changelog
466 tags = opts.get('tags')
477 tags = opts.get('tags')
467 branches = opts.get('branches')
478 branches = opts.get('branches')
468 if tags:
479 if tags:
469 labels = {}
480 labels = {}
470 for l, n in repo.tags().items():
481 for l, n in repo.tags().items():
471 labels.setdefault(cl.rev(n), []).append(l)
482 labels.setdefault(cl.rev(n), []).append(l)
472 def events():
483 def events():
473 b = "default"
484 b = "default"
474 for r in cl:
485 for r in cl:
475 if branches:
486 if branches:
476 newb = cl.read(cl.node(r))[5]['branch']
487 newb = cl.read(cl.node(r))[5]['branch']
477 if newb != b:
488 if newb != b:
478 yield 'a', newb
489 yield 'a', newb
479 b = newb
490 b = newb
480 yield 'n', (r, list(p for p in cl.parentrevs(r)
491 yield 'n', (r, list(p for p in cl.parentrevs(r)
481 if p != -1))
492 if p != -1))
482 if tags:
493 if tags:
483 ls = labels.get(r)
494 ls = labels.get(r)
484 if ls:
495 if ls:
485 for l in ls:
496 for l in ls:
486 yield 'l', (r, l)
497 yield 'l', (r, l)
487 else:
498 else:
488 raise error.Abort(_('need repo for changelog dag'))
499 raise error.Abort(_('need repo for changelog dag'))
489
500
490 for line in dagparser.dagtextlines(events(),
501 for line in dagparser.dagtextlines(events(),
491 addspaces=spaces,
502 addspaces=spaces,
492 wraplabels=True,
503 wraplabels=True,
493 wrapannotations=True,
504 wrapannotations=True,
494 wrapnonlinear=dots,
505 wrapnonlinear=dots,
495 usedots=dots,
506 usedots=dots,
496 maxlinewidth=70):
507 maxlinewidth=70):
497 ui.write(line)
508 ui.write(line)
498 ui.write("\n")
509 ui.write("\n")
499
510
500 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
511 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
501 def debugdata(ui, repo, file_, rev=None, **opts):
512 def debugdata(ui, repo, file_, rev=None, **opts):
502 """dump the contents of a data file revision"""
513 """dump the contents of a data file revision"""
503 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
514 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
504 if rev is not None:
515 if rev is not None:
505 raise error.CommandError('debugdata', _('invalid arguments'))
516 raise error.CommandError('debugdata', _('invalid arguments'))
506 file_, rev = None, file_
517 file_, rev = None, file_
507 elif rev is None:
518 elif rev is None:
508 raise error.CommandError('debugdata', _('invalid arguments'))
519 raise error.CommandError('debugdata', _('invalid arguments'))
509 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
520 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
510 try:
521 try:
511 ui.write(r.revision(r.lookup(rev), raw=True))
522 ui.write(r.revision(r.lookup(rev), raw=True))
512 except KeyError:
523 except KeyError:
513 raise error.Abort(_('invalid revision identifier %s') % rev)
524 raise error.Abort(_('invalid revision identifier %s') % rev)
514
525
515 @command('debugdate',
526 @command('debugdate',
516 [('e', 'extended', None, _('try extended date formats'))],
527 [('e', 'extended', None, _('try extended date formats'))],
517 _('[-e] DATE [RANGE]'),
528 _('[-e] DATE [RANGE]'),
518 norepo=True, optionalrepo=True)
529 norepo=True, optionalrepo=True)
519 def debugdate(ui, date, range=None, **opts):
530 def debugdate(ui, date, range=None, **opts):
520 """parse and display a date"""
531 """parse and display a date"""
521 if opts["extended"]:
532 if opts["extended"]:
522 d = util.parsedate(date, util.extendeddateformats)
533 d = util.parsedate(date, util.extendeddateformats)
523 else:
534 else:
524 d = util.parsedate(date)
535 d = util.parsedate(date)
525 ui.write(("internal: %s %s\n") % d)
536 ui.write(("internal: %s %s\n") % d)
526 ui.write(("standard: %s\n") % util.datestr(d))
537 ui.write(("standard: %s\n") % util.datestr(d))
527 if range:
538 if range:
528 m = util.matchdate(range)
539 m = util.matchdate(range)
529 ui.write(("match: %s\n") % m(d[0]))
540 ui.write(("match: %s\n") % m(d[0]))
530
541
531 @command('debugdeltachain',
542 @command('debugdeltachain',
532 cmdutil.debugrevlogopts + cmdutil.formatteropts,
543 cmdutil.debugrevlogopts + cmdutil.formatteropts,
533 _('-c|-m|FILE'),
544 _('-c|-m|FILE'),
534 optionalrepo=True)
545 optionalrepo=True)
535 def debugdeltachain(ui, repo, file_=None, **opts):
546 def debugdeltachain(ui, repo, file_=None, **opts):
536 """dump information about delta chains in a revlog
547 """dump information about delta chains in a revlog
537
548
538 Output can be templatized. Available template keywords are:
549 Output can be templatized. Available template keywords are:
539
550
540 :``rev``: revision number
551 :``rev``: revision number
541 :``chainid``: delta chain identifier (numbered by unique base)
552 :``chainid``: delta chain identifier (numbered by unique base)
542 :``chainlen``: delta chain length to this revision
553 :``chainlen``: delta chain length to this revision
543 :``prevrev``: previous revision in delta chain
554 :``prevrev``: previous revision in delta chain
544 :``deltatype``: role of delta / how it was computed
555 :``deltatype``: role of delta / how it was computed
545 :``compsize``: compressed size of revision
556 :``compsize``: compressed size of revision
546 :``uncompsize``: uncompressed size of revision
557 :``uncompsize``: uncompressed size of revision
547 :``chainsize``: total size of compressed revisions in chain
558 :``chainsize``: total size of compressed revisions in chain
548 :``chainratio``: total chain size divided by uncompressed revision size
559 :``chainratio``: total chain size divided by uncompressed revision size
549 (new delta chains typically start at ratio 2.00)
560 (new delta chains typically start at ratio 2.00)
550 :``lindist``: linear distance from base revision in delta chain to end
561 :``lindist``: linear distance from base revision in delta chain to end
551 of this revision
562 of this revision
552 :``extradist``: total size of revisions not part of this delta chain from
563 :``extradist``: total size of revisions not part of this delta chain from
553 base of delta chain to end of this revision; a measurement
564 base of delta chain to end of this revision; a measurement
554 of how much extra data we need to read/seek across to read
565 of how much extra data we need to read/seek across to read
555 the delta chain for this revision
566 the delta chain for this revision
556 :``extraratio``: extradist divided by chainsize; another representation of
567 :``extraratio``: extradist divided by chainsize; another representation of
557 how much unrelated data is needed to load this delta chain
568 how much unrelated data is needed to load this delta chain
558 """
569 """
559 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
570 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
560 index = r.index
571 index = r.index
561 generaldelta = r.version & revlog.FLAG_GENERALDELTA
572 generaldelta = r.version & revlog.FLAG_GENERALDELTA
562
573
563 def revinfo(rev):
574 def revinfo(rev):
564 e = index[rev]
575 e = index[rev]
565 compsize = e[1]
576 compsize = e[1]
566 uncompsize = e[2]
577 uncompsize = e[2]
567 chainsize = 0
578 chainsize = 0
568
579
569 if generaldelta:
580 if generaldelta:
570 if e[3] == e[5]:
581 if e[3] == e[5]:
571 deltatype = 'p1'
582 deltatype = 'p1'
572 elif e[3] == e[6]:
583 elif e[3] == e[6]:
573 deltatype = 'p2'
584 deltatype = 'p2'
574 elif e[3] == rev - 1:
585 elif e[3] == rev - 1:
575 deltatype = 'prev'
586 deltatype = 'prev'
576 elif e[3] == rev:
587 elif e[3] == rev:
577 deltatype = 'base'
588 deltatype = 'base'
578 else:
589 else:
579 deltatype = 'other'
590 deltatype = 'other'
580 else:
591 else:
581 if e[3] == rev:
592 if e[3] == rev:
582 deltatype = 'base'
593 deltatype = 'base'
583 else:
594 else:
584 deltatype = 'prev'
595 deltatype = 'prev'
585
596
586 chain = r._deltachain(rev)[0]
597 chain = r._deltachain(rev)[0]
587 for iterrev in chain:
598 for iterrev in chain:
588 e = index[iterrev]
599 e = index[iterrev]
589 chainsize += e[1]
600 chainsize += e[1]
590
601
591 return compsize, uncompsize, deltatype, chain, chainsize
602 return compsize, uncompsize, deltatype, chain, chainsize
592
603
593 fm = ui.formatter('debugdeltachain', opts)
604 fm = ui.formatter('debugdeltachain', opts)
594
605
595 fm.plain(' rev chain# chainlen prev delta '
606 fm.plain(' rev chain# chainlen prev delta '
596 'size rawsize chainsize ratio lindist extradist '
607 'size rawsize chainsize ratio lindist extradist '
597 'extraratio\n')
608 'extraratio\n')
598
609
599 chainbases = {}
610 chainbases = {}
600 for rev in r:
611 for rev in r:
601 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
612 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
602 chainbase = chain[0]
613 chainbase = chain[0]
603 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
614 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
604 basestart = r.start(chainbase)
615 basestart = r.start(chainbase)
605 revstart = r.start(rev)
616 revstart = r.start(rev)
606 lineardist = revstart + comp - basestart
617 lineardist = revstart + comp - basestart
607 extradist = lineardist - chainsize
618 extradist = lineardist - chainsize
608 try:
619 try:
609 prevrev = chain[-2]
620 prevrev = chain[-2]
610 except IndexError:
621 except IndexError:
611 prevrev = -1
622 prevrev = -1
612
623
613 chainratio = float(chainsize) / float(uncomp)
624 chainratio = float(chainsize) / float(uncomp)
614 extraratio = float(extradist) / float(chainsize)
625 extraratio = float(extradist) / float(chainsize)
615
626
616 fm.startitem()
627 fm.startitem()
617 fm.write('rev chainid chainlen prevrev deltatype compsize '
628 fm.write('rev chainid chainlen prevrev deltatype compsize '
618 'uncompsize chainsize chainratio lindist extradist '
629 'uncompsize chainsize chainratio lindist extradist '
619 'extraratio',
630 'extraratio',
620 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
631 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
621 rev, chainid, len(chain), prevrev, deltatype, comp,
632 rev, chainid, len(chain), prevrev, deltatype, comp,
622 uncomp, chainsize, chainratio, lineardist, extradist,
633 uncomp, chainsize, chainratio, lineardist, extradist,
623 extraratio,
634 extraratio,
624 rev=rev, chainid=chainid, chainlen=len(chain),
635 rev=rev, chainid=chainid, chainlen=len(chain),
625 prevrev=prevrev, deltatype=deltatype, compsize=comp,
636 prevrev=prevrev, deltatype=deltatype, compsize=comp,
626 uncompsize=uncomp, chainsize=chainsize,
637 uncompsize=uncomp, chainsize=chainsize,
627 chainratio=chainratio, lindist=lineardist,
638 chainratio=chainratio, lindist=lineardist,
628 extradist=extradist, extraratio=extraratio)
639 extradist=extradist, extraratio=extraratio)
629
640
630 fm.end()
641 fm.end()
631
642
632 @command('debugdirstate|debugstate',
643 @command('debugdirstate|debugstate',
633 [('', 'nodates', None, _('do not display the saved mtime')),
644 [('', 'nodates', None, _('do not display the saved mtime')),
634 ('', 'datesort', None, _('sort by saved mtime'))],
645 ('', 'datesort', None, _('sort by saved mtime'))],
635 _('[OPTION]...'))
646 _('[OPTION]...'))
636 def debugstate(ui, repo, **opts):
647 def debugstate(ui, repo, **opts):
637 """show the contents of the current dirstate"""
648 """show the contents of the current dirstate"""
638
649
639 nodates = opts.get('nodates')
650 nodates = opts.get('nodates')
640 datesort = opts.get('datesort')
651 datesort = opts.get('datesort')
641
652
642 timestr = ""
653 timestr = ""
643 if datesort:
654 if datesort:
644 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
655 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
645 else:
656 else:
646 keyfunc = None # sort by filename
657 keyfunc = None # sort by filename
647 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
658 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
648 if ent[3] == -1:
659 if ent[3] == -1:
649 timestr = 'unset '
660 timestr = 'unset '
650 elif nodates:
661 elif nodates:
651 timestr = 'set '
662 timestr = 'set '
652 else:
663 else:
653 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
664 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
654 time.localtime(ent[3]))
665 time.localtime(ent[3]))
655 if ent[1] & 0o20000:
666 if ent[1] & 0o20000:
656 mode = 'lnk'
667 mode = 'lnk'
657 else:
668 else:
658 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
669 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
659 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
670 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
660 for f in repo.dirstate.copies():
671 for f in repo.dirstate.copies():
661 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
672 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
662
673
663 @command('debugdiscovery',
674 @command('debugdiscovery',
664 [('', 'old', None, _('use old-style discovery')),
675 [('', 'old', None, _('use old-style discovery')),
665 ('', 'nonheads', None,
676 ('', 'nonheads', None,
666 _('use old-style discovery with non-heads included')),
677 _('use old-style discovery with non-heads included')),
667 ] + cmdutil.remoteopts,
678 ] + cmdutil.remoteopts,
668 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
679 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
669 def debugdiscovery(ui, repo, remoteurl="default", **opts):
680 def debugdiscovery(ui, repo, remoteurl="default", **opts):
670 """runs the changeset discovery protocol in isolation"""
681 """runs the changeset discovery protocol in isolation"""
671 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
682 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
672 opts.get('branch'))
683 opts.get('branch'))
673 remote = hg.peer(repo, opts, remoteurl)
684 remote = hg.peer(repo, opts, remoteurl)
674 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
685 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
675
686
676 # make sure tests are repeatable
687 # make sure tests are repeatable
677 random.seed(12323)
688 random.seed(12323)
678
689
679 def doit(localheads, remoteheads, remote=remote):
690 def doit(localheads, remoteheads, remote=remote):
680 if opts.get('old'):
691 if opts.get('old'):
681 if localheads:
692 if localheads:
682 raise error.Abort('cannot use localheads with old style '
693 raise error.Abort('cannot use localheads with old style '
683 'discovery')
694 'discovery')
684 if not util.safehasattr(remote, 'branches'):
695 if not util.safehasattr(remote, 'branches'):
685 # enable in-client legacy support
696 # enable in-client legacy support
686 remote = localrepo.locallegacypeer(remote.local())
697 remote = localrepo.locallegacypeer(remote.local())
687 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
698 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
688 force=True)
699 force=True)
689 common = set(common)
700 common = set(common)
690 if not opts.get('nonheads'):
701 if not opts.get('nonheads'):
691 ui.write(("unpruned common: %s\n") %
702 ui.write(("unpruned common: %s\n") %
692 " ".join(sorted(short(n) for n in common)))
703 " ".join(sorted(short(n) for n in common)))
693 dag = dagutil.revlogdag(repo.changelog)
704 dag = dagutil.revlogdag(repo.changelog)
694 all = dag.ancestorset(dag.internalizeall(common))
705 all = dag.ancestorset(dag.internalizeall(common))
695 common = dag.externalizeall(dag.headsetofconnecteds(all))
706 common = dag.externalizeall(dag.headsetofconnecteds(all))
696 else:
707 else:
697 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
708 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
698 common = set(common)
709 common = set(common)
699 rheads = set(hds)
710 rheads = set(hds)
700 lheads = set(repo.heads())
711 lheads = set(repo.heads())
701 ui.write(("common heads: %s\n") %
712 ui.write(("common heads: %s\n") %
702 " ".join(sorted(short(n) for n in common)))
713 " ".join(sorted(short(n) for n in common)))
703 if lheads <= common:
714 if lheads <= common:
704 ui.write(("local is subset\n"))
715 ui.write(("local is subset\n"))
705 elif rheads <= common:
716 elif rheads <= common:
706 ui.write(("remote is subset\n"))
717 ui.write(("remote is subset\n"))
707
718
708 serverlogs = opts.get('serverlog')
719 serverlogs = opts.get('serverlog')
709 if serverlogs:
720 if serverlogs:
710 for filename in serverlogs:
721 for filename in serverlogs:
711 with open(filename, 'r') as logfile:
722 with open(filename, 'r') as logfile:
712 line = logfile.readline()
723 line = logfile.readline()
713 while line:
724 while line:
714 parts = line.strip().split(';')
725 parts = line.strip().split(';')
715 op = parts[1]
726 op = parts[1]
716 if op == 'cg':
727 if op == 'cg':
717 pass
728 pass
718 elif op == 'cgss':
729 elif op == 'cgss':
719 doit(parts[2].split(' '), parts[3].split(' '))
730 doit(parts[2].split(' '), parts[3].split(' '))
720 elif op == 'unb':
731 elif op == 'unb':
721 doit(parts[3].split(' '), parts[2].split(' '))
732 doit(parts[3].split(' '), parts[2].split(' '))
722 line = logfile.readline()
733 line = logfile.readline()
723 else:
734 else:
724 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
735 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
725 opts.get('remote_head'))
736 opts.get('remote_head'))
726 localrevs = opts.get('local_head')
737 localrevs = opts.get('local_head')
727 doit(localrevs, remoterevs)
738 doit(localrevs, remoterevs)
728
739
729 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
740 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
730 def debugextensions(ui, **opts):
741 def debugextensions(ui, **opts):
731 '''show information about active extensions'''
742 '''show information about active extensions'''
732 exts = extensions.extensions(ui)
743 exts = extensions.extensions(ui)
733 hgver = util.version()
744 hgver = util.version()
734 fm = ui.formatter('debugextensions', opts)
745 fm = ui.formatter('debugextensions', opts)
735 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
746 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
736 isinternal = extensions.ismoduleinternal(extmod)
747 isinternal = extensions.ismoduleinternal(extmod)
737 extsource = pycompat.fsencode(extmod.__file__)
748 extsource = pycompat.fsencode(extmod.__file__)
738 if isinternal:
749 if isinternal:
739 exttestedwith = [] # never expose magic string to users
750 exttestedwith = [] # never expose magic string to users
740 else:
751 else:
741 exttestedwith = getattr(extmod, 'testedwith', '').split()
752 exttestedwith = getattr(extmod, 'testedwith', '').split()
742 extbuglink = getattr(extmod, 'buglink', None)
753 extbuglink = getattr(extmod, 'buglink', None)
743
754
744 fm.startitem()
755 fm.startitem()
745
756
746 if ui.quiet or ui.verbose:
757 if ui.quiet or ui.verbose:
747 fm.write('name', '%s\n', extname)
758 fm.write('name', '%s\n', extname)
748 else:
759 else:
749 fm.write('name', '%s', extname)
760 fm.write('name', '%s', extname)
750 if isinternal or hgver in exttestedwith:
761 if isinternal or hgver in exttestedwith:
751 fm.plain('\n')
762 fm.plain('\n')
752 elif not exttestedwith:
763 elif not exttestedwith:
753 fm.plain(_(' (untested!)\n'))
764 fm.plain(_(' (untested!)\n'))
754 else:
765 else:
755 lasttestedversion = exttestedwith[-1]
766 lasttestedversion = exttestedwith[-1]
756 fm.plain(' (%s!)\n' % lasttestedversion)
767 fm.plain(' (%s!)\n' % lasttestedversion)
757
768
758 fm.condwrite(ui.verbose and extsource, 'source',
769 fm.condwrite(ui.verbose and extsource, 'source',
759 _(' location: %s\n'), extsource or "")
770 _(' location: %s\n'), extsource or "")
760
771
761 if ui.verbose:
772 if ui.verbose:
762 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
773 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
763 fm.data(bundled=isinternal)
774 fm.data(bundled=isinternal)
764
775
765 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
776 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
766 _(' tested with: %s\n'),
777 _(' tested with: %s\n'),
767 fm.formatlist(exttestedwith, name='ver'))
778 fm.formatlist(exttestedwith, name='ver'))
768
779
769 fm.condwrite(ui.verbose and extbuglink, 'buglink',
780 fm.condwrite(ui.verbose and extbuglink, 'buglink',
770 _(' bug reporting: %s\n'), extbuglink or "")
781 _(' bug reporting: %s\n'), extbuglink or "")
771
782
772 fm.end()
783 fm.end()
773
784
774 @command('debugfileset',
785 @command('debugfileset',
775 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
786 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
776 _('[-r REV] FILESPEC'))
787 _('[-r REV] FILESPEC'))
777 def debugfileset(ui, repo, expr, **opts):
788 def debugfileset(ui, repo, expr, **opts):
778 '''parse and apply a fileset specification'''
789 '''parse and apply a fileset specification'''
779 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
790 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
780 if ui.verbose:
791 if ui.verbose:
781 tree = fileset.parse(expr)
792 tree = fileset.parse(expr)
782 ui.note(fileset.prettyformat(tree), "\n")
793 ui.note(fileset.prettyformat(tree), "\n")
783
794
784 for f in ctx.getfileset(expr):
795 for f in ctx.getfileset(expr):
785 ui.write("%s\n" % f)
796 ui.write("%s\n" % f)
786
797
787 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
798 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
788 def debugfsinfo(ui, path="."):
799 def debugfsinfo(ui, path="."):
789 """show information detected about current filesystem"""
800 """show information detected about current filesystem"""
790 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
801 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
791 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
802 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
792 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
803 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
793 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
804 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
794 casesensitive = '(unknown)'
805 casesensitive = '(unknown)'
795 try:
806 try:
796 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
807 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
797 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
808 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
798 except OSError:
809 except OSError:
799 pass
810 pass
800 ui.write(('case-sensitive: %s\n') % casesensitive)
811 ui.write(('case-sensitive: %s\n') % casesensitive)
801
812
802 @command('debuggetbundle',
813 @command('debuggetbundle',
803 [('H', 'head', [], _('id of head node'), _('ID')),
814 [('H', 'head', [], _('id of head node'), _('ID')),
804 ('C', 'common', [], _('id of common node'), _('ID')),
815 ('C', 'common', [], _('id of common node'), _('ID')),
805 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
816 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
806 _('REPO FILE [-H|-C ID]...'),
817 _('REPO FILE [-H|-C ID]...'),
807 norepo=True)
818 norepo=True)
808 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
819 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
809 """retrieves a bundle from a repo
820 """retrieves a bundle from a repo
810
821
811 Every ID must be a full-length hex node id string. Saves the bundle to the
822 Every ID must be a full-length hex node id string. Saves the bundle to the
812 given file.
823 given file.
813 """
824 """
814 repo = hg.peer(ui, opts, repopath)
825 repo = hg.peer(ui, opts, repopath)
815 if not repo.capable('getbundle'):
826 if not repo.capable('getbundle'):
816 raise error.Abort("getbundle() not supported by target repository")
827 raise error.Abort("getbundle() not supported by target repository")
817 args = {}
828 args = {}
818 if common:
829 if common:
819 args['common'] = [bin(s) for s in common]
830 args['common'] = [bin(s) for s in common]
820 if head:
831 if head:
821 args['heads'] = [bin(s) for s in head]
832 args['heads'] = [bin(s) for s in head]
822 # TODO: get desired bundlecaps from command line.
833 # TODO: get desired bundlecaps from command line.
823 args['bundlecaps'] = None
834 args['bundlecaps'] = None
824 bundle = repo.getbundle('debug', **args)
835 bundle = repo.getbundle('debug', **args)
825
836
826 bundletype = opts.get('type', 'bzip2').lower()
837 bundletype = opts.get('type', 'bzip2').lower()
827 btypes = {'none': 'HG10UN',
838 btypes = {'none': 'HG10UN',
828 'bzip2': 'HG10BZ',
839 'bzip2': 'HG10BZ',
829 'gzip': 'HG10GZ',
840 'gzip': 'HG10GZ',
830 'bundle2': 'HG20'}
841 'bundle2': 'HG20'}
831 bundletype = btypes.get(bundletype)
842 bundletype = btypes.get(bundletype)
832 if bundletype not in bundle2.bundletypes:
843 if bundletype not in bundle2.bundletypes:
833 raise error.Abort(_('unknown bundle type specified with --type'))
844 raise error.Abort(_('unknown bundle type specified with --type'))
834 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
845 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
835
846
836 @command('debugignore', [], '[FILE]')
847 @command('debugignore', [], '[FILE]')
837 def debugignore(ui, repo, *files, **opts):
848 def debugignore(ui, repo, *files, **opts):
838 """display the combined ignore pattern and information about ignored files
849 """display the combined ignore pattern and information about ignored files
839
850
840 With no argument display the combined ignore pattern.
851 With no argument display the combined ignore pattern.
841
852
842 Given space separated file names, shows if the given file is ignored and
853 Given space separated file names, shows if the given file is ignored and
843 if so, show the ignore rule (file and line number) that matched it.
854 if so, show the ignore rule (file and line number) that matched it.
844 """
855 """
845 ignore = repo.dirstate._ignore
856 ignore = repo.dirstate._ignore
846 if not files:
857 if not files:
847 # Show all the patterns
858 # Show all the patterns
848 ui.write("%s\n" % repr(ignore))
859 ui.write("%s\n" % repr(ignore))
849 else:
860 else:
850 for f in files:
861 for f in files:
851 nf = util.normpath(f)
862 nf = util.normpath(f)
852 ignored = None
863 ignored = None
853 ignoredata = None
864 ignoredata = None
854 if nf != '.':
865 if nf != '.':
855 if ignore(nf):
866 if ignore(nf):
856 ignored = nf
867 ignored = nf
857 ignoredata = repo.dirstate._ignorefileandline(nf)
868 ignoredata = repo.dirstate._ignorefileandline(nf)
858 else:
869 else:
859 for p in util.finddirs(nf):
870 for p in util.finddirs(nf):
860 if ignore(p):
871 if ignore(p):
861 ignored = p
872 ignored = p
862 ignoredata = repo.dirstate._ignorefileandline(p)
873 ignoredata = repo.dirstate._ignorefileandline(p)
863 break
874 break
864 if ignored:
875 if ignored:
865 if ignored == nf:
876 if ignored == nf:
866 ui.write(_("%s is ignored\n") % f)
877 ui.write(_("%s is ignored\n") % f)
867 else:
878 else:
868 ui.write(_("%s is ignored because of "
879 ui.write(_("%s is ignored because of "
869 "containing folder %s\n")
880 "containing folder %s\n")
870 % (f, ignored))
881 % (f, ignored))
871 ignorefile, lineno, line = ignoredata
882 ignorefile, lineno, line = ignoredata
872 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
883 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
873 % (ignorefile, lineno, line))
884 % (ignorefile, lineno, line))
874 else:
885 else:
875 ui.write(_("%s is not ignored\n") % f)
886 ui.write(_("%s is not ignored\n") % f)
876
887
877 @command('debugindex', cmdutil.debugrevlogopts +
888 @command('debugindex', cmdutil.debugrevlogopts +
878 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
889 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
879 _('[-f FORMAT] -c|-m|FILE'),
890 _('[-f FORMAT] -c|-m|FILE'),
880 optionalrepo=True)
891 optionalrepo=True)
881 def debugindex(ui, repo, file_=None, **opts):
892 def debugindex(ui, repo, file_=None, **opts):
882 """dump the contents of an index file"""
893 """dump the contents of an index file"""
883 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
894 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
884 format = opts.get('format', 0)
895 format = opts.get('format', 0)
885 if format not in (0, 1):
896 if format not in (0, 1):
886 raise error.Abort(_("unknown format %d") % format)
897 raise error.Abort(_("unknown format %d") % format)
887
898
888 generaldelta = r.version & revlog.FLAG_GENERALDELTA
899 generaldelta = r.version & revlog.FLAG_GENERALDELTA
889 if generaldelta:
900 if generaldelta:
890 basehdr = ' delta'
901 basehdr = ' delta'
891 else:
902 else:
892 basehdr = ' base'
903 basehdr = ' base'
893
904
894 if ui.debugflag:
905 if ui.debugflag:
895 shortfn = hex
906 shortfn = hex
896 else:
907 else:
897 shortfn = short
908 shortfn = short
898
909
899 # There might not be anything in r, so have a sane default
910 # There might not be anything in r, so have a sane default
900 idlen = 12
911 idlen = 12
901 for i in r:
912 for i in r:
902 idlen = len(shortfn(r.node(i)))
913 idlen = len(shortfn(r.node(i)))
903 break
914 break
904
915
905 if format == 0:
916 if format == 0:
906 ui.write((" rev offset length " + basehdr + " linkrev"
917 ui.write((" rev offset length " + basehdr + " linkrev"
907 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
918 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
908 elif format == 1:
919 elif format == 1:
909 ui.write((" rev flag offset length"
920 ui.write((" rev flag offset length"
910 " size " + basehdr + " link p1 p2"
921 " size " + basehdr + " link p1 p2"
911 " %s\n") % "nodeid".rjust(idlen))
922 " %s\n") % "nodeid".rjust(idlen))
912
923
913 for i in r:
924 for i in r:
914 node = r.node(i)
925 node = r.node(i)
915 if generaldelta:
926 if generaldelta:
916 base = r.deltaparent(i)
927 base = r.deltaparent(i)
917 else:
928 else:
918 base = r.chainbase(i)
929 base = r.chainbase(i)
919 if format == 0:
930 if format == 0:
920 try:
931 try:
921 pp = r.parents(node)
932 pp = r.parents(node)
922 except Exception:
933 except Exception:
923 pp = [nullid, nullid]
934 pp = [nullid, nullid]
924 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
935 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
925 i, r.start(i), r.length(i), base, r.linkrev(i),
936 i, r.start(i), r.length(i), base, r.linkrev(i),
926 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
937 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
927 elif format == 1:
938 elif format == 1:
928 pr = r.parentrevs(i)
939 pr = r.parentrevs(i)
929 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
940 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
930 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
941 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
931 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
942 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
932
943
933 @command('debugindexdot', cmdutil.debugrevlogopts,
944 @command('debugindexdot', cmdutil.debugrevlogopts,
934 _('-c|-m|FILE'), optionalrepo=True)
945 _('-c|-m|FILE'), optionalrepo=True)
935 def debugindexdot(ui, repo, file_=None, **opts):
946 def debugindexdot(ui, repo, file_=None, **opts):
936 """dump an index DAG as a graphviz dot file"""
947 """dump an index DAG as a graphviz dot file"""
937 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
948 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
938 ui.write(("digraph G {\n"))
949 ui.write(("digraph G {\n"))
939 for i in r:
950 for i in r:
940 node = r.node(i)
951 node = r.node(i)
941 pp = r.parents(node)
952 pp = r.parents(node)
942 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
953 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
943 if pp[1] != nullid:
954 if pp[1] != nullid:
944 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
955 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
945 ui.write("}\n")
956 ui.write("}\n")
946
957
947 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
958 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
948 def debuginstall(ui, **opts):
959 def debuginstall(ui, **opts):
949 '''test Mercurial installation
960 '''test Mercurial installation
950
961
951 Returns 0 on success.
962 Returns 0 on success.
952 '''
963 '''
953
964
954 def writetemp(contents):
965 def writetemp(contents):
955 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
966 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
956 f = os.fdopen(fd, pycompat.sysstr("wb"))
967 f = os.fdopen(fd, pycompat.sysstr("wb"))
957 f.write(contents)
968 f.write(contents)
958 f.close()
969 f.close()
959 return name
970 return name
960
971
961 problems = 0
972 problems = 0
962
973
963 fm = ui.formatter('debuginstall', opts)
974 fm = ui.formatter('debuginstall', opts)
964 fm.startitem()
975 fm.startitem()
965
976
966 # encoding
977 # encoding
967 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
978 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
968 err = None
979 err = None
969 try:
980 try:
970 encoding.fromlocal("test")
981 encoding.fromlocal("test")
971 except error.Abort as inst:
982 except error.Abort as inst:
972 err = inst
983 err = inst
973 problems += 1
984 problems += 1
974 fm.condwrite(err, 'encodingerror', _(" %s\n"
985 fm.condwrite(err, 'encodingerror', _(" %s\n"
975 " (check that your locale is properly set)\n"), err)
986 " (check that your locale is properly set)\n"), err)
976
987
977 # Python
988 # Python
978 fm.write('pythonexe', _("checking Python executable (%s)\n"),
989 fm.write('pythonexe', _("checking Python executable (%s)\n"),
979 pycompat.sysexecutable)
990 pycompat.sysexecutable)
980 fm.write('pythonver', _("checking Python version (%s)\n"),
991 fm.write('pythonver', _("checking Python version (%s)\n"),
981 ("%d.%d.%d" % sys.version_info[:3]))
992 ("%d.%d.%d" % sys.version_info[:3]))
982 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
993 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
983 os.path.dirname(pycompat.fsencode(os.__file__)))
994 os.path.dirname(pycompat.fsencode(os.__file__)))
984
995
985 security = set(sslutil.supportedprotocols)
996 security = set(sslutil.supportedprotocols)
986 if sslutil.hassni:
997 if sslutil.hassni:
987 security.add('sni')
998 security.add('sni')
988
999
989 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1000 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
990 fm.formatlist(sorted(security), name='protocol',
1001 fm.formatlist(sorted(security), name='protocol',
991 fmt='%s', sep=','))
1002 fmt='%s', sep=','))
992
1003
993 # These are warnings, not errors. So don't increment problem count. This
1004 # These are warnings, not errors. So don't increment problem count. This
994 # may change in the future.
1005 # may change in the future.
995 if 'tls1.2' not in security:
1006 if 'tls1.2' not in security:
996 fm.plain(_(' TLS 1.2 not supported by Python install; '
1007 fm.plain(_(' TLS 1.2 not supported by Python install; '
997 'network connections lack modern security\n'))
1008 'network connections lack modern security\n'))
998 if 'sni' not in security:
1009 if 'sni' not in security:
999 fm.plain(_(' SNI not supported by Python install; may have '
1010 fm.plain(_(' SNI not supported by Python install; may have '
1000 'connectivity issues with some servers\n'))
1011 'connectivity issues with some servers\n'))
1001
1012
1002 # TODO print CA cert info
1013 # TODO print CA cert info
1003
1014
1004 # hg version
1015 # hg version
1005 hgver = util.version()
1016 hgver = util.version()
1006 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1017 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1007 hgver.split('+')[0])
1018 hgver.split('+')[0])
1008 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1019 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1009 '+'.join(hgver.split('+')[1:]))
1020 '+'.join(hgver.split('+')[1:]))
1010
1021
1011 # compiled modules
1022 # compiled modules
1012 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1023 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1013 policy.policy)
1024 policy.policy)
1014 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1025 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1015 os.path.dirname(pycompat.fsencode(__file__)))
1026 os.path.dirname(pycompat.fsencode(__file__)))
1016
1027
1017 if policy.policy in ('c', 'allow'):
1028 if policy.policy in ('c', 'allow'):
1018 err = None
1029 err = None
1019 try:
1030 try:
1020 from .cext import (
1031 from .cext import (
1021 base85,
1032 base85,
1022 bdiff,
1033 bdiff,
1023 mpatch,
1034 mpatch,
1024 osutil,
1035 osutil,
1025 )
1036 )
1026 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1037 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1027 except Exception as inst:
1038 except Exception as inst:
1028 err = inst
1039 err = inst
1029 problems += 1
1040 problems += 1
1030 fm.condwrite(err, 'extensionserror', " %s\n", err)
1041 fm.condwrite(err, 'extensionserror', " %s\n", err)
1031
1042
1032 compengines = util.compengines._engines.values()
1043 compengines = util.compengines._engines.values()
1033 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1044 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1034 fm.formatlist(sorted(e.name() for e in compengines),
1045 fm.formatlist(sorted(e.name() for e in compengines),
1035 name='compengine', fmt='%s', sep=', '))
1046 name='compengine', fmt='%s', sep=', '))
1036 fm.write('compenginesavail', _('checking available compression engines '
1047 fm.write('compenginesavail', _('checking available compression engines '
1037 '(%s)\n'),
1048 '(%s)\n'),
1038 fm.formatlist(sorted(e.name() for e in compengines
1049 fm.formatlist(sorted(e.name() for e in compengines
1039 if e.available()),
1050 if e.available()),
1040 name='compengine', fmt='%s', sep=', '))
1051 name='compengine', fmt='%s', sep=', '))
1041 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1052 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1042 fm.write('compenginesserver', _('checking available compression engines '
1053 fm.write('compenginesserver', _('checking available compression engines '
1043 'for wire protocol (%s)\n'),
1054 'for wire protocol (%s)\n'),
1044 fm.formatlist([e.name() for e in wirecompengines
1055 fm.formatlist([e.name() for e in wirecompengines
1045 if e.wireprotosupport()],
1056 if e.wireprotosupport()],
1046 name='compengine', fmt='%s', sep=', '))
1057 name='compengine', fmt='%s', sep=', '))
1047
1058
1048 # templates
1059 # templates
1049 p = templater.templatepaths()
1060 p = templater.templatepaths()
1050 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1061 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1051 fm.condwrite(not p, '', _(" no template directories found\n"))
1062 fm.condwrite(not p, '', _(" no template directories found\n"))
1052 if p:
1063 if p:
1053 m = templater.templatepath("map-cmdline.default")
1064 m = templater.templatepath("map-cmdline.default")
1054 if m:
1065 if m:
1055 # template found, check if it is working
1066 # template found, check if it is working
1056 err = None
1067 err = None
1057 try:
1068 try:
1058 templater.templater.frommapfile(m)
1069 templater.templater.frommapfile(m)
1059 except Exception as inst:
1070 except Exception as inst:
1060 err = inst
1071 err = inst
1061 p = None
1072 p = None
1062 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1073 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1063 else:
1074 else:
1064 p = None
1075 p = None
1065 fm.condwrite(p, 'defaulttemplate',
1076 fm.condwrite(p, 'defaulttemplate',
1066 _("checking default template (%s)\n"), m)
1077 _("checking default template (%s)\n"), m)
1067 fm.condwrite(not m, 'defaulttemplatenotfound',
1078 fm.condwrite(not m, 'defaulttemplatenotfound',
1068 _(" template '%s' not found\n"), "default")
1079 _(" template '%s' not found\n"), "default")
1069 if not p:
1080 if not p:
1070 problems += 1
1081 problems += 1
1071 fm.condwrite(not p, '',
1082 fm.condwrite(not p, '',
1072 _(" (templates seem to have been installed incorrectly)\n"))
1083 _(" (templates seem to have been installed incorrectly)\n"))
1073
1084
1074 # editor
1085 # editor
1075 editor = ui.geteditor()
1086 editor = ui.geteditor()
1076 editor = util.expandpath(editor)
1087 editor = util.expandpath(editor)
1077 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1088 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1078 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1089 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1079 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1090 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1080 _(" No commit editor set and can't find %s in PATH\n"
1091 _(" No commit editor set and can't find %s in PATH\n"
1081 " (specify a commit editor in your configuration"
1092 " (specify a commit editor in your configuration"
1082 " file)\n"), not cmdpath and editor == 'vi' and editor)
1093 " file)\n"), not cmdpath and editor == 'vi' and editor)
1083 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1094 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1084 _(" Can't find editor '%s' in PATH\n"
1095 _(" Can't find editor '%s' in PATH\n"
1085 " (specify a commit editor in your configuration"
1096 " (specify a commit editor in your configuration"
1086 " file)\n"), not cmdpath and editor)
1097 " file)\n"), not cmdpath and editor)
1087 if not cmdpath and editor != 'vi':
1098 if not cmdpath and editor != 'vi':
1088 problems += 1
1099 problems += 1
1089
1100
1090 # check username
1101 # check username
1091 username = None
1102 username = None
1092 err = None
1103 err = None
1093 try:
1104 try:
1094 username = ui.username()
1105 username = ui.username()
1095 except error.Abort as e:
1106 except error.Abort as e:
1096 err = e
1107 err = e
1097 problems += 1
1108 problems += 1
1098
1109
1099 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1110 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1100 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1111 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1101 " (specify a username in your configuration file)\n"), err)
1112 " (specify a username in your configuration file)\n"), err)
1102
1113
1103 fm.condwrite(not problems, '',
1114 fm.condwrite(not problems, '',
1104 _("no problems detected\n"))
1115 _("no problems detected\n"))
1105 if not problems:
1116 if not problems:
1106 fm.data(problems=problems)
1117 fm.data(problems=problems)
1107 fm.condwrite(problems, 'problems',
1118 fm.condwrite(problems, 'problems',
1108 _("%d problems detected,"
1119 _("%d problems detected,"
1109 " please check your install!\n"), problems)
1120 " please check your install!\n"), problems)
1110 fm.end()
1121 fm.end()
1111
1122
1112 return problems
1123 return problems
1113
1124
1114 @command('debugknown', [], _('REPO ID...'), norepo=True)
1125 @command('debugknown', [], _('REPO ID...'), norepo=True)
1115 def debugknown(ui, repopath, *ids, **opts):
1126 def debugknown(ui, repopath, *ids, **opts):
1116 """test whether node ids are known to a repo
1127 """test whether node ids are known to a repo
1117
1128
1118 Every ID must be a full-length hex node id string. Returns a list of 0s
1129 Every ID must be a full-length hex node id string. Returns a list of 0s
1119 and 1s indicating unknown/known.
1130 and 1s indicating unknown/known.
1120 """
1131 """
1121 repo = hg.peer(ui, opts, repopath)
1132 repo = hg.peer(ui, opts, repopath)
1122 if not repo.capable('known'):
1133 if not repo.capable('known'):
1123 raise error.Abort("known() not supported by target repository")
1134 raise error.Abort("known() not supported by target repository")
1124 flags = repo.known([bin(s) for s in ids])
1135 flags = repo.known([bin(s) for s in ids])
1125 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1136 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1126
1137
1127 @command('debuglabelcomplete', [], _('LABEL...'))
1138 @command('debuglabelcomplete', [], _('LABEL...'))
1128 def debuglabelcomplete(ui, repo, *args):
1139 def debuglabelcomplete(ui, repo, *args):
1129 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1140 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1130 debugnamecomplete(ui, repo, *args)
1141 debugnamecomplete(ui, repo, *args)
1131
1142
1132 @command('debuglocks',
1143 @command('debuglocks',
1133 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1144 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1134 ('W', 'force-wlock', None,
1145 ('W', 'force-wlock', None,
1135 _('free the working state lock (DANGEROUS)'))],
1146 _('free the working state lock (DANGEROUS)'))],
1136 _('[OPTION]...'))
1147 _('[OPTION]...'))
1137 def debuglocks(ui, repo, **opts):
1148 def debuglocks(ui, repo, **opts):
1138 """show or modify state of locks
1149 """show or modify state of locks
1139
1150
1140 By default, this command will show which locks are held. This
1151 By default, this command will show which locks are held. This
1141 includes the user and process holding the lock, the amount of time
1152 includes the user and process holding the lock, the amount of time
1142 the lock has been held, and the machine name where the process is
1153 the lock has been held, and the machine name where the process is
1143 running if it's not local.
1154 running if it's not local.
1144
1155
1145 Locks protect the integrity of Mercurial's data, so should be
1156 Locks protect the integrity of Mercurial's data, so should be
1146 treated with care. System crashes or other interruptions may cause
1157 treated with care. System crashes or other interruptions may cause
1147 locks to not be properly released, though Mercurial will usually
1158 locks to not be properly released, though Mercurial will usually
1148 detect and remove such stale locks automatically.
1159 detect and remove such stale locks automatically.
1149
1160
1150 However, detecting stale locks may not always be possible (for
1161 However, detecting stale locks may not always be possible (for
1151 instance, on a shared filesystem). Removing locks may also be
1162 instance, on a shared filesystem). Removing locks may also be
1152 blocked by filesystem permissions.
1163 blocked by filesystem permissions.
1153
1164
1154 Returns 0 if no locks are held.
1165 Returns 0 if no locks are held.
1155
1166
1156 """
1167 """
1157
1168
1158 if opts.get('force_lock'):
1169 if opts.get('force_lock'):
1159 repo.svfs.unlink('lock')
1170 repo.svfs.unlink('lock')
1160 if opts.get('force_wlock'):
1171 if opts.get('force_wlock'):
1161 repo.vfs.unlink('wlock')
1172 repo.vfs.unlink('wlock')
1162 if opts.get('force_lock') or opts.get('force_lock'):
1173 if opts.get('force_lock') or opts.get('force_lock'):
1163 return 0
1174 return 0
1164
1175
1165 now = time.time()
1176 now = time.time()
1166 held = 0
1177 held = 0
1167
1178
1168 def report(vfs, name, method):
1179 def report(vfs, name, method):
1169 # this causes stale locks to get reaped for more accurate reporting
1180 # this causes stale locks to get reaped for more accurate reporting
1170 try:
1181 try:
1171 l = method(False)
1182 l = method(False)
1172 except error.LockHeld:
1183 except error.LockHeld:
1173 l = None
1184 l = None
1174
1185
1175 if l:
1186 if l:
1176 l.release()
1187 l.release()
1177 else:
1188 else:
1178 try:
1189 try:
1179 stat = vfs.lstat(name)
1190 stat = vfs.lstat(name)
1180 age = now - stat.st_mtime
1191 age = now - stat.st_mtime
1181 user = util.username(stat.st_uid)
1192 user = util.username(stat.st_uid)
1182 locker = vfs.readlock(name)
1193 locker = vfs.readlock(name)
1183 if ":" in locker:
1194 if ":" in locker:
1184 host, pid = locker.split(':')
1195 host, pid = locker.split(':')
1185 if host == socket.gethostname():
1196 if host == socket.gethostname():
1186 locker = 'user %s, process %s' % (user, pid)
1197 locker = 'user %s, process %s' % (user, pid)
1187 else:
1198 else:
1188 locker = 'user %s, process %s, host %s' \
1199 locker = 'user %s, process %s, host %s' \
1189 % (user, pid, host)
1200 % (user, pid, host)
1190 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1201 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1191 return 1
1202 return 1
1192 except OSError as e:
1203 except OSError as e:
1193 if e.errno != errno.ENOENT:
1204 if e.errno != errno.ENOENT:
1194 raise
1205 raise
1195
1206
1196 ui.write(("%-6s free\n") % (name + ":"))
1207 ui.write(("%-6s free\n") % (name + ":"))
1197 return 0
1208 return 0
1198
1209
1199 held += report(repo.svfs, "lock", repo.lock)
1210 held += report(repo.svfs, "lock", repo.lock)
1200 held += report(repo.vfs, "wlock", repo.wlock)
1211 held += report(repo.vfs, "wlock", repo.wlock)
1201
1212
1202 return held
1213 return held
1203
1214
1204 @command('debugmergestate', [], '')
1215 @command('debugmergestate', [], '')
1205 def debugmergestate(ui, repo, *args):
1216 def debugmergestate(ui, repo, *args):
1206 """print merge state
1217 """print merge state
1207
1218
1208 Use --verbose to print out information about whether v1 or v2 merge state
1219 Use --verbose to print out information about whether v1 or v2 merge state
1209 was chosen."""
1220 was chosen."""
1210 def _hashornull(h):
1221 def _hashornull(h):
1211 if h == nullhex:
1222 if h == nullhex:
1212 return 'null'
1223 return 'null'
1213 else:
1224 else:
1214 return h
1225 return h
1215
1226
1216 def printrecords(version):
1227 def printrecords(version):
1217 ui.write(('* version %s records\n') % version)
1228 ui.write(('* version %s records\n') % version)
1218 if version == 1:
1229 if version == 1:
1219 records = v1records
1230 records = v1records
1220 else:
1231 else:
1221 records = v2records
1232 records = v2records
1222
1233
1223 for rtype, record in records:
1234 for rtype, record in records:
1224 # pretty print some record types
1235 # pretty print some record types
1225 if rtype == 'L':
1236 if rtype == 'L':
1226 ui.write(('local: %s\n') % record)
1237 ui.write(('local: %s\n') % record)
1227 elif rtype == 'O':
1238 elif rtype == 'O':
1228 ui.write(('other: %s\n') % record)
1239 ui.write(('other: %s\n') % record)
1229 elif rtype == 'm':
1240 elif rtype == 'm':
1230 driver, mdstate = record.split('\0', 1)
1241 driver, mdstate = record.split('\0', 1)
1231 ui.write(('merge driver: %s (state "%s")\n')
1242 ui.write(('merge driver: %s (state "%s")\n')
1232 % (driver, mdstate))
1243 % (driver, mdstate))
1233 elif rtype in 'FDC':
1244 elif rtype in 'FDC':
1234 r = record.split('\0')
1245 r = record.split('\0')
1235 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1246 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1236 if version == 1:
1247 if version == 1:
1237 onode = 'not stored in v1 format'
1248 onode = 'not stored in v1 format'
1238 flags = r[7]
1249 flags = r[7]
1239 else:
1250 else:
1240 onode, flags = r[7:9]
1251 onode, flags = r[7:9]
1241 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1252 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1242 % (f, rtype, state, _hashornull(hash)))
1253 % (f, rtype, state, _hashornull(hash)))
1243 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1254 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1244 ui.write((' ancestor path: %s (node %s)\n')
1255 ui.write((' ancestor path: %s (node %s)\n')
1245 % (afile, _hashornull(anode)))
1256 % (afile, _hashornull(anode)))
1246 ui.write((' other path: %s (node %s)\n')
1257 ui.write((' other path: %s (node %s)\n')
1247 % (ofile, _hashornull(onode)))
1258 % (ofile, _hashornull(onode)))
1248 elif rtype == 'f':
1259 elif rtype == 'f':
1249 filename, rawextras = record.split('\0', 1)
1260 filename, rawextras = record.split('\0', 1)
1250 extras = rawextras.split('\0')
1261 extras = rawextras.split('\0')
1251 i = 0
1262 i = 0
1252 extrastrings = []
1263 extrastrings = []
1253 while i < len(extras):
1264 while i < len(extras):
1254 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1265 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1255 i += 2
1266 i += 2
1256
1267
1257 ui.write(('file extras: %s (%s)\n')
1268 ui.write(('file extras: %s (%s)\n')
1258 % (filename, ', '.join(extrastrings)))
1269 % (filename, ', '.join(extrastrings)))
1259 elif rtype == 'l':
1270 elif rtype == 'l':
1260 labels = record.split('\0', 2)
1271 labels = record.split('\0', 2)
1261 labels = [l for l in labels if len(l) > 0]
1272 labels = [l for l in labels if len(l) > 0]
1262 ui.write(('labels:\n'))
1273 ui.write(('labels:\n'))
1263 ui.write((' local: %s\n' % labels[0]))
1274 ui.write((' local: %s\n' % labels[0]))
1264 ui.write((' other: %s\n' % labels[1]))
1275 ui.write((' other: %s\n' % labels[1]))
1265 if len(labels) > 2:
1276 if len(labels) > 2:
1266 ui.write((' base: %s\n' % labels[2]))
1277 ui.write((' base: %s\n' % labels[2]))
1267 else:
1278 else:
1268 ui.write(('unrecognized entry: %s\t%s\n')
1279 ui.write(('unrecognized entry: %s\t%s\n')
1269 % (rtype, record.replace('\0', '\t')))
1280 % (rtype, record.replace('\0', '\t')))
1270
1281
1271 # Avoid mergestate.read() since it may raise an exception for unsupported
1282 # Avoid mergestate.read() since it may raise an exception for unsupported
1272 # merge state records. We shouldn't be doing this, but this is OK since this
1283 # merge state records. We shouldn't be doing this, but this is OK since this
1273 # command is pretty low-level.
1284 # command is pretty low-level.
1274 ms = mergemod.mergestate(repo)
1285 ms = mergemod.mergestate(repo)
1275
1286
1276 # sort so that reasonable information is on top
1287 # sort so that reasonable information is on top
1277 v1records = ms._readrecordsv1()
1288 v1records = ms._readrecordsv1()
1278 v2records = ms._readrecordsv2()
1289 v2records = ms._readrecordsv2()
1279 order = 'LOml'
1290 order = 'LOml'
1280 def key(r):
1291 def key(r):
1281 idx = order.find(r[0])
1292 idx = order.find(r[0])
1282 if idx == -1:
1293 if idx == -1:
1283 return (1, r[1])
1294 return (1, r[1])
1284 else:
1295 else:
1285 return (0, idx)
1296 return (0, idx)
1286 v1records.sort(key=key)
1297 v1records.sort(key=key)
1287 v2records.sort(key=key)
1298 v2records.sort(key=key)
1288
1299
1289 if not v1records and not v2records:
1300 if not v1records and not v2records:
1290 ui.write(('no merge state found\n'))
1301 ui.write(('no merge state found\n'))
1291 elif not v2records:
1302 elif not v2records:
1292 ui.note(('no version 2 merge state\n'))
1303 ui.note(('no version 2 merge state\n'))
1293 printrecords(1)
1304 printrecords(1)
1294 elif ms._v1v2match(v1records, v2records):
1305 elif ms._v1v2match(v1records, v2records):
1295 ui.note(('v1 and v2 states match: using v2\n'))
1306 ui.note(('v1 and v2 states match: using v2\n'))
1296 printrecords(2)
1307 printrecords(2)
1297 else:
1308 else:
1298 ui.note(('v1 and v2 states mismatch: using v1\n'))
1309 ui.note(('v1 and v2 states mismatch: using v1\n'))
1299 printrecords(1)
1310 printrecords(1)
1300 if ui.verbose:
1311 if ui.verbose:
1301 printrecords(2)
1312 printrecords(2)
1302
1313
1303 @command('debugnamecomplete', [], _('NAME...'))
1314 @command('debugnamecomplete', [], _('NAME...'))
1304 def debugnamecomplete(ui, repo, *args):
1315 def debugnamecomplete(ui, repo, *args):
1305 '''complete "names" - tags, open branch names, bookmark names'''
1316 '''complete "names" - tags, open branch names, bookmark names'''
1306
1317
1307 names = set()
1318 names = set()
1308 # since we previously only listed open branches, we will handle that
1319 # since we previously only listed open branches, we will handle that
1309 # specially (after this for loop)
1320 # specially (after this for loop)
1310 for name, ns in repo.names.iteritems():
1321 for name, ns in repo.names.iteritems():
1311 if name != 'branches':
1322 if name != 'branches':
1312 names.update(ns.listnames(repo))
1323 names.update(ns.listnames(repo))
1313 names.update(tag for (tag, heads, tip, closed)
1324 names.update(tag for (tag, heads, tip, closed)
1314 in repo.branchmap().iterbranches() if not closed)
1325 in repo.branchmap().iterbranches() if not closed)
1315 completions = set()
1326 completions = set()
1316 if not args:
1327 if not args:
1317 args = ['']
1328 args = ['']
1318 for a in args:
1329 for a in args:
1319 completions.update(n for n in names if n.startswith(a))
1330 completions.update(n for n in names if n.startswith(a))
1320 ui.write('\n'.join(sorted(completions)))
1331 ui.write('\n'.join(sorted(completions)))
1321 ui.write('\n')
1332 ui.write('\n')
1322
1333
1323 @command('debugobsolete',
1334 @command('debugobsolete',
1324 [('', 'flags', 0, _('markers flag')),
1335 [('', 'flags', 0, _('markers flag')),
1325 ('', 'record-parents', False,
1336 ('', 'record-parents', False,
1326 _('record parent information for the precursor')),
1337 _('record parent information for the precursor')),
1327 ('r', 'rev', [], _('display markers relevant to REV')),
1338 ('r', 'rev', [], _('display markers relevant to REV')),
1328 ('', 'exclusive', False, _('restrict display to markers only '
1339 ('', 'exclusive', False, _('restrict display to markers only '
1329 'relevant to REV')),
1340 'relevant to REV')),
1330 ('', 'index', False, _('display index of the marker')),
1341 ('', 'index', False, _('display index of the marker')),
1331 ('', 'delete', [], _('delete markers specified by indices')),
1342 ('', 'delete', [], _('delete markers specified by indices')),
1332 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1343 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1333 _('[OBSOLETED [REPLACEMENT ...]]'))
1344 _('[OBSOLETED [REPLACEMENT ...]]'))
1334 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1345 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1335 """create arbitrary obsolete marker
1346 """create arbitrary obsolete marker
1336
1347
1337 With no arguments, displays the list of obsolescence markers."""
1348 With no arguments, displays the list of obsolescence markers."""
1338
1349
1339 def parsenodeid(s):
1350 def parsenodeid(s):
1340 try:
1351 try:
1341 # We do not use revsingle/revrange functions here to accept
1352 # We do not use revsingle/revrange functions here to accept
1342 # arbitrary node identifiers, possibly not present in the
1353 # arbitrary node identifiers, possibly not present in the
1343 # local repository.
1354 # local repository.
1344 n = bin(s)
1355 n = bin(s)
1345 if len(n) != len(nullid):
1356 if len(n) != len(nullid):
1346 raise TypeError()
1357 raise TypeError()
1347 return n
1358 return n
1348 except TypeError:
1359 except TypeError:
1349 raise error.Abort('changeset references must be full hexadecimal '
1360 raise error.Abort('changeset references must be full hexadecimal '
1350 'node identifiers')
1361 'node identifiers')
1351
1362
1352 if opts.get('delete'):
1363 if opts.get('delete'):
1353 indices = []
1364 indices = []
1354 for v in opts.get('delete'):
1365 for v in opts.get('delete'):
1355 try:
1366 try:
1356 indices.append(int(v))
1367 indices.append(int(v))
1357 except ValueError:
1368 except ValueError:
1358 raise error.Abort(_('invalid index value: %r') % v,
1369 raise error.Abort(_('invalid index value: %r') % v,
1359 hint=_('use integers for indices'))
1370 hint=_('use integers for indices'))
1360
1371
1361 if repo.currenttransaction():
1372 if repo.currenttransaction():
1362 raise error.Abort(_('cannot delete obsmarkers in the middle '
1373 raise error.Abort(_('cannot delete obsmarkers in the middle '
1363 'of transaction.'))
1374 'of transaction.'))
1364
1375
1365 with repo.lock():
1376 with repo.lock():
1366 n = repair.deleteobsmarkers(repo.obsstore, indices)
1377 n = repair.deleteobsmarkers(repo.obsstore, indices)
1367 ui.write(_('deleted %i obsolescence markers\n') % n)
1378 ui.write(_('deleted %i obsolescence markers\n') % n)
1368
1379
1369 return
1380 return
1370
1381
1371 if precursor is not None:
1382 if precursor is not None:
1372 if opts['rev']:
1383 if opts['rev']:
1373 raise error.Abort('cannot select revision when creating marker')
1384 raise error.Abort('cannot select revision when creating marker')
1374 metadata = {}
1385 metadata = {}
1375 metadata['user'] = opts['user'] or ui.username()
1386 metadata['user'] = opts['user'] or ui.username()
1376 succs = tuple(parsenodeid(succ) for succ in successors)
1387 succs = tuple(parsenodeid(succ) for succ in successors)
1377 l = repo.lock()
1388 l = repo.lock()
1378 try:
1389 try:
1379 tr = repo.transaction('debugobsolete')
1390 tr = repo.transaction('debugobsolete')
1380 try:
1391 try:
1381 date = opts.get('date')
1392 date = opts.get('date')
1382 if date:
1393 if date:
1383 date = util.parsedate(date)
1394 date = util.parsedate(date)
1384 else:
1395 else:
1385 date = None
1396 date = None
1386 prec = parsenodeid(precursor)
1397 prec = parsenodeid(precursor)
1387 parents = None
1398 parents = None
1388 if opts['record_parents']:
1399 if opts['record_parents']:
1389 if prec not in repo.unfiltered():
1400 if prec not in repo.unfiltered():
1390 raise error.Abort('cannot used --record-parents on '
1401 raise error.Abort('cannot used --record-parents on '
1391 'unknown changesets')
1402 'unknown changesets')
1392 parents = repo.unfiltered()[prec].parents()
1403 parents = repo.unfiltered()[prec].parents()
1393 parents = tuple(p.node() for p in parents)
1404 parents = tuple(p.node() for p in parents)
1394 repo.obsstore.create(tr, prec, succs, opts['flags'],
1405 repo.obsstore.create(tr, prec, succs, opts['flags'],
1395 parents=parents, date=date,
1406 parents=parents, date=date,
1396 metadata=metadata, ui=ui)
1407 metadata=metadata, ui=ui)
1397 tr.close()
1408 tr.close()
1398 except ValueError as exc:
1409 except ValueError as exc:
1399 raise error.Abort(_('bad obsmarker input: %s') % exc)
1410 raise error.Abort(_('bad obsmarker input: %s') % exc)
1400 finally:
1411 finally:
1401 tr.release()
1412 tr.release()
1402 finally:
1413 finally:
1403 l.release()
1414 l.release()
1404 else:
1415 else:
1405 if opts['rev']:
1416 if opts['rev']:
1406 revs = scmutil.revrange(repo, opts['rev'])
1417 revs = scmutil.revrange(repo, opts['rev'])
1407 nodes = [repo[r].node() for r in revs]
1418 nodes = [repo[r].node() for r in revs]
1408 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1419 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1409 exclusive=opts['exclusive']))
1420 exclusive=opts['exclusive']))
1410 markers.sort(key=lambda x: x._data)
1421 markers.sort(key=lambda x: x._data)
1411 else:
1422 else:
1412 markers = obsolete.getmarkers(repo)
1423 markers = obsolete.getmarkers(repo)
1413
1424
1414 markerstoiter = markers
1425 markerstoiter = markers
1415 isrelevant = lambda m: True
1426 isrelevant = lambda m: True
1416 if opts.get('rev') and opts.get('index'):
1427 if opts.get('rev') and opts.get('index'):
1417 markerstoiter = obsolete.getmarkers(repo)
1428 markerstoiter = obsolete.getmarkers(repo)
1418 markerset = set(markers)
1429 markerset = set(markers)
1419 isrelevant = lambda m: m in markerset
1430 isrelevant = lambda m: m in markerset
1420
1431
1421 fm = ui.formatter('debugobsolete', opts)
1432 fm = ui.formatter('debugobsolete', opts)
1422 for i, m in enumerate(markerstoiter):
1433 for i, m in enumerate(markerstoiter):
1423 if not isrelevant(m):
1434 if not isrelevant(m):
1424 # marker can be irrelevant when we're iterating over a set
1435 # marker can be irrelevant when we're iterating over a set
1425 # of markers (markerstoiter) which is bigger than the set
1436 # of markers (markerstoiter) which is bigger than the set
1426 # of markers we want to display (markers)
1437 # of markers we want to display (markers)
1427 # this can happen if both --index and --rev options are
1438 # this can happen if both --index and --rev options are
1428 # provided and thus we need to iterate over all of the markers
1439 # provided and thus we need to iterate over all of the markers
1429 # to get the correct indices, but only display the ones that
1440 # to get the correct indices, but only display the ones that
1430 # are relevant to --rev value
1441 # are relevant to --rev value
1431 continue
1442 continue
1432 fm.startitem()
1443 fm.startitem()
1433 ind = i if opts.get('index') else None
1444 ind = i if opts.get('index') else None
1434 cmdutil.showmarker(fm, m, index=ind)
1445 cmdutil.showmarker(fm, m, index=ind)
1435 fm.end()
1446 fm.end()
1436
1447
1437 @command('debugpathcomplete',
1448 @command('debugpathcomplete',
1438 [('f', 'full', None, _('complete an entire path')),
1449 [('f', 'full', None, _('complete an entire path')),
1439 ('n', 'normal', None, _('show only normal files')),
1450 ('n', 'normal', None, _('show only normal files')),
1440 ('a', 'added', None, _('show only added files')),
1451 ('a', 'added', None, _('show only added files')),
1441 ('r', 'removed', None, _('show only removed files'))],
1452 ('r', 'removed', None, _('show only removed files'))],
1442 _('FILESPEC...'))
1453 _('FILESPEC...'))
1443 def debugpathcomplete(ui, repo, *specs, **opts):
1454 def debugpathcomplete(ui, repo, *specs, **opts):
1444 '''complete part or all of a tracked path
1455 '''complete part or all of a tracked path
1445
1456
1446 This command supports shells that offer path name completion. It
1457 This command supports shells that offer path name completion. It
1447 currently completes only files already known to the dirstate.
1458 currently completes only files already known to the dirstate.
1448
1459
1449 Completion extends only to the next path segment unless
1460 Completion extends only to the next path segment unless
1450 --full is specified, in which case entire paths are used.'''
1461 --full is specified, in which case entire paths are used.'''
1451
1462
1452 def complete(path, acceptable):
1463 def complete(path, acceptable):
1453 dirstate = repo.dirstate
1464 dirstate = repo.dirstate
1454 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1465 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1455 rootdir = repo.root + pycompat.ossep
1466 rootdir = repo.root + pycompat.ossep
1456 if spec != repo.root and not spec.startswith(rootdir):
1467 if spec != repo.root and not spec.startswith(rootdir):
1457 return [], []
1468 return [], []
1458 if os.path.isdir(spec):
1469 if os.path.isdir(spec):
1459 spec += '/'
1470 spec += '/'
1460 spec = spec[len(rootdir):]
1471 spec = spec[len(rootdir):]
1461 fixpaths = pycompat.ossep != '/'
1472 fixpaths = pycompat.ossep != '/'
1462 if fixpaths:
1473 if fixpaths:
1463 spec = spec.replace(pycompat.ossep, '/')
1474 spec = spec.replace(pycompat.ossep, '/')
1464 speclen = len(spec)
1475 speclen = len(spec)
1465 fullpaths = opts['full']
1476 fullpaths = opts['full']
1466 files, dirs = set(), set()
1477 files, dirs = set(), set()
1467 adddir, addfile = dirs.add, files.add
1478 adddir, addfile = dirs.add, files.add
1468 for f, st in dirstate.iteritems():
1479 for f, st in dirstate.iteritems():
1469 if f.startswith(spec) and st[0] in acceptable:
1480 if f.startswith(spec) and st[0] in acceptable:
1470 if fixpaths:
1481 if fixpaths:
1471 f = f.replace('/', pycompat.ossep)
1482 f = f.replace('/', pycompat.ossep)
1472 if fullpaths:
1483 if fullpaths:
1473 addfile(f)
1484 addfile(f)
1474 continue
1485 continue
1475 s = f.find(pycompat.ossep, speclen)
1486 s = f.find(pycompat.ossep, speclen)
1476 if s >= 0:
1487 if s >= 0:
1477 adddir(f[:s])
1488 adddir(f[:s])
1478 else:
1489 else:
1479 addfile(f)
1490 addfile(f)
1480 return files, dirs
1491 return files, dirs
1481
1492
1482 acceptable = ''
1493 acceptable = ''
1483 if opts['normal']:
1494 if opts['normal']:
1484 acceptable += 'nm'
1495 acceptable += 'nm'
1485 if opts['added']:
1496 if opts['added']:
1486 acceptable += 'a'
1497 acceptable += 'a'
1487 if opts['removed']:
1498 if opts['removed']:
1488 acceptable += 'r'
1499 acceptable += 'r'
1489 cwd = repo.getcwd()
1500 cwd = repo.getcwd()
1490 if not specs:
1501 if not specs:
1491 specs = ['.']
1502 specs = ['.']
1492
1503
1493 files, dirs = set(), set()
1504 files, dirs = set(), set()
1494 for spec in specs:
1505 for spec in specs:
1495 f, d = complete(spec, acceptable or 'nmar')
1506 f, d = complete(spec, acceptable or 'nmar')
1496 files.update(f)
1507 files.update(f)
1497 dirs.update(d)
1508 dirs.update(d)
1498 files.update(dirs)
1509 files.update(dirs)
1499 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1510 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1500 ui.write('\n')
1511 ui.write('\n')
1501
1512
1502 @command('debugpickmergetool',
1513 @command('debugpickmergetool',
1503 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1514 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1504 ('', 'changedelete', None, _('emulate merging change and delete')),
1515 ('', 'changedelete', None, _('emulate merging change and delete')),
1505 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1516 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1506 _('[PATTERN]...'),
1517 _('[PATTERN]...'),
1507 inferrepo=True)
1518 inferrepo=True)
1508 def debugpickmergetool(ui, repo, *pats, **opts):
1519 def debugpickmergetool(ui, repo, *pats, **opts):
1509 """examine which merge tool is chosen for specified file
1520 """examine which merge tool is chosen for specified file
1510
1521
1511 As described in :hg:`help merge-tools`, Mercurial examines
1522 As described in :hg:`help merge-tools`, Mercurial examines
1512 configurations below in this order to decide which merge tool is
1523 configurations below in this order to decide which merge tool is
1513 chosen for specified file.
1524 chosen for specified file.
1514
1525
1515 1. ``--tool`` option
1526 1. ``--tool`` option
1516 2. ``HGMERGE`` environment variable
1527 2. ``HGMERGE`` environment variable
1517 3. configurations in ``merge-patterns`` section
1528 3. configurations in ``merge-patterns`` section
1518 4. configuration of ``ui.merge``
1529 4. configuration of ``ui.merge``
1519 5. configurations in ``merge-tools`` section
1530 5. configurations in ``merge-tools`` section
1520 6. ``hgmerge`` tool (for historical reason only)
1531 6. ``hgmerge`` tool (for historical reason only)
1521 7. default tool for fallback (``:merge`` or ``:prompt``)
1532 7. default tool for fallback (``:merge`` or ``:prompt``)
1522
1533
1523 This command writes out examination result in the style below::
1534 This command writes out examination result in the style below::
1524
1535
1525 FILE = MERGETOOL
1536 FILE = MERGETOOL
1526
1537
1527 By default, all files known in the first parent context of the
1538 By default, all files known in the first parent context of the
1528 working directory are examined. Use file patterns and/or -I/-X
1539 working directory are examined. Use file patterns and/or -I/-X
1529 options to limit target files. -r/--rev is also useful to examine
1540 options to limit target files. -r/--rev is also useful to examine
1530 files in another context without actual updating to it.
1541 files in another context without actual updating to it.
1531
1542
1532 With --debug, this command shows warning messages while matching
1543 With --debug, this command shows warning messages while matching
1533 against ``merge-patterns`` and so on, too. It is recommended to
1544 against ``merge-patterns`` and so on, too. It is recommended to
1534 use this option with explicit file patterns and/or -I/-X options,
1545 use this option with explicit file patterns and/or -I/-X options,
1535 because this option increases amount of output per file according
1546 because this option increases amount of output per file according
1536 to configurations in hgrc.
1547 to configurations in hgrc.
1537
1548
1538 With -v/--verbose, this command shows configurations below at
1549 With -v/--verbose, this command shows configurations below at
1539 first (only if specified).
1550 first (only if specified).
1540
1551
1541 - ``--tool`` option
1552 - ``--tool`` option
1542 - ``HGMERGE`` environment variable
1553 - ``HGMERGE`` environment variable
1543 - configuration of ``ui.merge``
1554 - configuration of ``ui.merge``
1544
1555
1545 If merge tool is chosen before matching against
1556 If merge tool is chosen before matching against
1546 ``merge-patterns``, this command can't show any helpful
1557 ``merge-patterns``, this command can't show any helpful
1547 information, even with --debug. In such case, information above is
1558 information, even with --debug. In such case, information above is
1548 useful to know why a merge tool is chosen.
1559 useful to know why a merge tool is chosen.
1549 """
1560 """
1550 overrides = {}
1561 overrides = {}
1551 if opts['tool']:
1562 if opts['tool']:
1552 overrides[('ui', 'forcemerge')] = opts['tool']
1563 overrides[('ui', 'forcemerge')] = opts['tool']
1553 ui.note(('with --tool %r\n') % (opts['tool']))
1564 ui.note(('with --tool %r\n') % (opts['tool']))
1554
1565
1555 with ui.configoverride(overrides, 'debugmergepatterns'):
1566 with ui.configoverride(overrides, 'debugmergepatterns'):
1556 hgmerge = encoding.environ.get("HGMERGE")
1567 hgmerge = encoding.environ.get("HGMERGE")
1557 if hgmerge is not None:
1568 if hgmerge is not None:
1558 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1569 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1559 uimerge = ui.config("ui", "merge")
1570 uimerge = ui.config("ui", "merge")
1560 if uimerge:
1571 if uimerge:
1561 ui.note(('with ui.merge=%r\n') % (uimerge))
1572 ui.note(('with ui.merge=%r\n') % (uimerge))
1562
1573
1563 ctx = scmutil.revsingle(repo, opts.get('rev'))
1574 ctx = scmutil.revsingle(repo, opts.get('rev'))
1564 m = scmutil.match(ctx, pats, opts)
1575 m = scmutil.match(ctx, pats, opts)
1565 changedelete = opts['changedelete']
1576 changedelete = opts['changedelete']
1566 for path in ctx.walk(m):
1577 for path in ctx.walk(m):
1567 fctx = ctx[path]
1578 fctx = ctx[path]
1568 try:
1579 try:
1569 if not ui.debugflag:
1580 if not ui.debugflag:
1570 ui.pushbuffer(error=True)
1581 ui.pushbuffer(error=True)
1571 tool, toolpath = filemerge._picktool(repo, ui, path,
1582 tool, toolpath = filemerge._picktool(repo, ui, path,
1572 fctx.isbinary(),
1583 fctx.isbinary(),
1573 'l' in fctx.flags(),
1584 'l' in fctx.flags(),
1574 changedelete)
1585 changedelete)
1575 finally:
1586 finally:
1576 if not ui.debugflag:
1587 if not ui.debugflag:
1577 ui.popbuffer()
1588 ui.popbuffer()
1578 ui.write(('%s = %s\n') % (path, tool))
1589 ui.write(('%s = %s\n') % (path, tool))
1579
1590
1580 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1591 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1581 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1592 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1582 '''access the pushkey key/value protocol
1593 '''access the pushkey key/value protocol
1583
1594
1584 With two args, list the keys in the given namespace.
1595 With two args, list the keys in the given namespace.
1585
1596
1586 With five args, set a key to new if it currently is set to old.
1597 With five args, set a key to new if it currently is set to old.
1587 Reports success or failure.
1598 Reports success or failure.
1588 '''
1599 '''
1589
1600
1590 target = hg.peer(ui, {}, repopath)
1601 target = hg.peer(ui, {}, repopath)
1591 if keyinfo:
1602 if keyinfo:
1592 key, old, new = keyinfo
1603 key, old, new = keyinfo
1593 r = target.pushkey(namespace, key, old, new)
1604 r = target.pushkey(namespace, key, old, new)
1594 ui.status(str(r) + '\n')
1605 ui.status(str(r) + '\n')
1595 return not r
1606 return not r
1596 else:
1607 else:
1597 for k, v in sorted(target.listkeys(namespace).iteritems()):
1608 for k, v in sorted(target.listkeys(namespace).iteritems()):
1598 ui.write("%s\t%s\n" % (util.escapestr(k),
1609 ui.write("%s\t%s\n" % (util.escapestr(k),
1599 util.escapestr(v)))
1610 util.escapestr(v)))
1600
1611
1601 @command('debugpvec', [], _('A B'))
1612 @command('debugpvec', [], _('A B'))
1602 def debugpvec(ui, repo, a, b=None):
1613 def debugpvec(ui, repo, a, b=None):
1603 ca = scmutil.revsingle(repo, a)
1614 ca = scmutil.revsingle(repo, a)
1604 cb = scmutil.revsingle(repo, b)
1615 cb = scmutil.revsingle(repo, b)
1605 pa = pvec.ctxpvec(ca)
1616 pa = pvec.ctxpvec(ca)
1606 pb = pvec.ctxpvec(cb)
1617 pb = pvec.ctxpvec(cb)
1607 if pa == pb:
1618 if pa == pb:
1608 rel = "="
1619 rel = "="
1609 elif pa > pb:
1620 elif pa > pb:
1610 rel = ">"
1621 rel = ">"
1611 elif pa < pb:
1622 elif pa < pb:
1612 rel = "<"
1623 rel = "<"
1613 elif pa | pb:
1624 elif pa | pb:
1614 rel = "|"
1625 rel = "|"
1615 ui.write(_("a: %s\n") % pa)
1626 ui.write(_("a: %s\n") % pa)
1616 ui.write(_("b: %s\n") % pb)
1627 ui.write(_("b: %s\n") % pb)
1617 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1628 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1618 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1629 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1619 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1630 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1620 pa.distance(pb), rel))
1631 pa.distance(pb), rel))
1621
1632
1622 @command('debugrebuilddirstate|debugrebuildstate',
1633 @command('debugrebuilddirstate|debugrebuildstate',
1623 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1634 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1624 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1635 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1625 'the working copy parent')),
1636 'the working copy parent')),
1626 ],
1637 ],
1627 _('[-r REV]'))
1638 _('[-r REV]'))
1628 def debugrebuilddirstate(ui, repo, rev, **opts):
1639 def debugrebuilddirstate(ui, repo, rev, **opts):
1629 """rebuild the dirstate as it would look like for the given revision
1640 """rebuild the dirstate as it would look like for the given revision
1630
1641
1631 If no revision is specified the first current parent will be used.
1642 If no revision is specified the first current parent will be used.
1632
1643
1633 The dirstate will be set to the files of the given revision.
1644 The dirstate will be set to the files of the given revision.
1634 The actual working directory content or existing dirstate
1645 The actual working directory content or existing dirstate
1635 information such as adds or removes is not considered.
1646 information such as adds or removes is not considered.
1636
1647
1637 ``minimal`` will only rebuild the dirstate status for files that claim to be
1648 ``minimal`` will only rebuild the dirstate status for files that claim to be
1638 tracked but are not in the parent manifest, or that exist in the parent
1649 tracked but are not in the parent manifest, or that exist in the parent
1639 manifest but are not in the dirstate. It will not change adds, removes, or
1650 manifest but are not in the dirstate. It will not change adds, removes, or
1640 modified files that are in the working copy parent.
1651 modified files that are in the working copy parent.
1641
1652
1642 One use of this command is to make the next :hg:`status` invocation
1653 One use of this command is to make the next :hg:`status` invocation
1643 check the actual file content.
1654 check the actual file content.
1644 """
1655 """
1645 ctx = scmutil.revsingle(repo, rev)
1656 ctx = scmutil.revsingle(repo, rev)
1646 with repo.wlock():
1657 with repo.wlock():
1647 dirstate = repo.dirstate
1658 dirstate = repo.dirstate
1648 changedfiles = None
1659 changedfiles = None
1649 # See command doc for what minimal does.
1660 # See command doc for what minimal does.
1650 if opts.get('minimal'):
1661 if opts.get('minimal'):
1651 manifestfiles = set(ctx.manifest().keys())
1662 manifestfiles = set(ctx.manifest().keys())
1652 dirstatefiles = set(dirstate)
1663 dirstatefiles = set(dirstate)
1653 manifestonly = manifestfiles - dirstatefiles
1664 manifestonly = manifestfiles - dirstatefiles
1654 dsonly = dirstatefiles - manifestfiles
1665 dsonly = dirstatefiles - manifestfiles
1655 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1666 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1656 changedfiles = manifestonly | dsnotadded
1667 changedfiles = manifestonly | dsnotadded
1657
1668
1658 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1669 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1659
1670
1660 @command('debugrebuildfncache', [], '')
1671 @command('debugrebuildfncache', [], '')
1661 def debugrebuildfncache(ui, repo):
1672 def debugrebuildfncache(ui, repo):
1662 """rebuild the fncache file"""
1673 """rebuild the fncache file"""
1663 repair.rebuildfncache(ui, repo)
1674 repair.rebuildfncache(ui, repo)
1664
1675
1665 @command('debugrename',
1676 @command('debugrename',
1666 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1677 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1667 _('[-r REV] FILE'))
1678 _('[-r REV] FILE'))
1668 def debugrename(ui, repo, file1, *pats, **opts):
1679 def debugrename(ui, repo, file1, *pats, **opts):
1669 """dump rename information"""
1680 """dump rename information"""
1670
1681
1671 ctx = scmutil.revsingle(repo, opts.get('rev'))
1682 ctx = scmutil.revsingle(repo, opts.get('rev'))
1672 m = scmutil.match(ctx, (file1,) + pats, opts)
1683 m = scmutil.match(ctx, (file1,) + pats, opts)
1673 for abs in ctx.walk(m):
1684 for abs in ctx.walk(m):
1674 fctx = ctx[abs]
1685 fctx = ctx[abs]
1675 o = fctx.filelog().renamed(fctx.filenode())
1686 o = fctx.filelog().renamed(fctx.filenode())
1676 rel = m.rel(abs)
1687 rel = m.rel(abs)
1677 if o:
1688 if o:
1678 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1689 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1679 else:
1690 else:
1680 ui.write(_("%s not renamed\n") % rel)
1691 ui.write(_("%s not renamed\n") % rel)
1681
1692
1682 @command('debugrevlog', cmdutil.debugrevlogopts +
1693 @command('debugrevlog', cmdutil.debugrevlogopts +
1683 [('d', 'dump', False, _('dump index data'))],
1694 [('d', 'dump', False, _('dump index data'))],
1684 _('-c|-m|FILE'),
1695 _('-c|-m|FILE'),
1685 optionalrepo=True)
1696 optionalrepo=True)
1686 def debugrevlog(ui, repo, file_=None, **opts):
1697 def debugrevlog(ui, repo, file_=None, **opts):
1687 """show data and statistics about a revlog"""
1698 """show data and statistics about a revlog"""
1688 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1699 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1689
1700
1690 if opts.get("dump"):
1701 if opts.get("dump"):
1691 numrevs = len(r)
1702 numrevs = len(r)
1692 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1703 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1693 " rawsize totalsize compression heads chainlen\n"))
1704 " rawsize totalsize compression heads chainlen\n"))
1694 ts = 0
1705 ts = 0
1695 heads = set()
1706 heads = set()
1696
1707
1697 for rev in xrange(numrevs):
1708 for rev in xrange(numrevs):
1698 dbase = r.deltaparent(rev)
1709 dbase = r.deltaparent(rev)
1699 if dbase == -1:
1710 if dbase == -1:
1700 dbase = rev
1711 dbase = rev
1701 cbase = r.chainbase(rev)
1712 cbase = r.chainbase(rev)
1702 clen = r.chainlen(rev)
1713 clen = r.chainlen(rev)
1703 p1, p2 = r.parentrevs(rev)
1714 p1, p2 = r.parentrevs(rev)
1704 rs = r.rawsize(rev)
1715 rs = r.rawsize(rev)
1705 ts = ts + rs
1716 ts = ts + rs
1706 heads -= set(r.parentrevs(rev))
1717 heads -= set(r.parentrevs(rev))
1707 heads.add(rev)
1718 heads.add(rev)
1708 try:
1719 try:
1709 compression = ts / r.end(rev)
1720 compression = ts / r.end(rev)
1710 except ZeroDivisionError:
1721 except ZeroDivisionError:
1711 compression = 0
1722 compression = 0
1712 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1723 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1713 "%11d %5d %8d\n" %
1724 "%11d %5d %8d\n" %
1714 (rev, p1, p2, r.start(rev), r.end(rev),
1725 (rev, p1, p2, r.start(rev), r.end(rev),
1715 r.start(dbase), r.start(cbase),
1726 r.start(dbase), r.start(cbase),
1716 r.start(p1), r.start(p2),
1727 r.start(p1), r.start(p2),
1717 rs, ts, compression, len(heads), clen))
1728 rs, ts, compression, len(heads), clen))
1718 return 0
1729 return 0
1719
1730
1720 v = r.version
1731 v = r.version
1721 format = v & 0xFFFF
1732 format = v & 0xFFFF
1722 flags = []
1733 flags = []
1723 gdelta = False
1734 gdelta = False
1724 if v & revlog.FLAG_INLINE_DATA:
1735 if v & revlog.FLAG_INLINE_DATA:
1725 flags.append('inline')
1736 flags.append('inline')
1726 if v & revlog.FLAG_GENERALDELTA:
1737 if v & revlog.FLAG_GENERALDELTA:
1727 gdelta = True
1738 gdelta = True
1728 flags.append('generaldelta')
1739 flags.append('generaldelta')
1729 if not flags:
1740 if not flags:
1730 flags = ['(none)']
1741 flags = ['(none)']
1731
1742
1732 nummerges = 0
1743 nummerges = 0
1733 numfull = 0
1744 numfull = 0
1734 numprev = 0
1745 numprev = 0
1735 nump1 = 0
1746 nump1 = 0
1736 nump2 = 0
1747 nump2 = 0
1737 numother = 0
1748 numother = 0
1738 nump1prev = 0
1749 nump1prev = 0
1739 nump2prev = 0
1750 nump2prev = 0
1740 chainlengths = []
1751 chainlengths = []
1741
1752
1742 datasize = [None, 0, 0]
1753 datasize = [None, 0, 0]
1743 fullsize = [None, 0, 0]
1754 fullsize = [None, 0, 0]
1744 deltasize = [None, 0, 0]
1755 deltasize = [None, 0, 0]
1745 chunktypecounts = {}
1756 chunktypecounts = {}
1746 chunktypesizes = {}
1757 chunktypesizes = {}
1747
1758
1748 def addsize(size, l):
1759 def addsize(size, l):
1749 if l[0] is None or size < l[0]:
1760 if l[0] is None or size < l[0]:
1750 l[0] = size
1761 l[0] = size
1751 if size > l[1]:
1762 if size > l[1]:
1752 l[1] = size
1763 l[1] = size
1753 l[2] += size
1764 l[2] += size
1754
1765
1755 numrevs = len(r)
1766 numrevs = len(r)
1756 for rev in xrange(numrevs):
1767 for rev in xrange(numrevs):
1757 p1, p2 = r.parentrevs(rev)
1768 p1, p2 = r.parentrevs(rev)
1758 delta = r.deltaparent(rev)
1769 delta = r.deltaparent(rev)
1759 if format > 0:
1770 if format > 0:
1760 addsize(r.rawsize(rev), datasize)
1771 addsize(r.rawsize(rev), datasize)
1761 if p2 != nullrev:
1772 if p2 != nullrev:
1762 nummerges += 1
1773 nummerges += 1
1763 size = r.length(rev)
1774 size = r.length(rev)
1764 if delta == nullrev:
1775 if delta == nullrev:
1765 chainlengths.append(0)
1776 chainlengths.append(0)
1766 numfull += 1
1777 numfull += 1
1767 addsize(size, fullsize)
1778 addsize(size, fullsize)
1768 else:
1779 else:
1769 chainlengths.append(chainlengths[delta] + 1)
1780 chainlengths.append(chainlengths[delta] + 1)
1770 addsize(size, deltasize)
1781 addsize(size, deltasize)
1771 if delta == rev - 1:
1782 if delta == rev - 1:
1772 numprev += 1
1783 numprev += 1
1773 if delta == p1:
1784 if delta == p1:
1774 nump1prev += 1
1785 nump1prev += 1
1775 elif delta == p2:
1786 elif delta == p2:
1776 nump2prev += 1
1787 nump2prev += 1
1777 elif delta == p1:
1788 elif delta == p1:
1778 nump1 += 1
1789 nump1 += 1
1779 elif delta == p2:
1790 elif delta == p2:
1780 nump2 += 1
1791 nump2 += 1
1781 elif delta != nullrev:
1792 elif delta != nullrev:
1782 numother += 1
1793 numother += 1
1783
1794
1784 # Obtain data on the raw chunks in the revlog.
1795 # Obtain data on the raw chunks in the revlog.
1785 segment = r._getsegmentforrevs(rev, rev)[1]
1796 segment = r._getsegmentforrevs(rev, rev)[1]
1786 if segment:
1797 if segment:
1787 chunktype = segment[0]
1798 chunktype = segment[0]
1788 else:
1799 else:
1789 chunktype = 'empty'
1800 chunktype = 'empty'
1790
1801
1791 if chunktype not in chunktypecounts:
1802 if chunktype not in chunktypecounts:
1792 chunktypecounts[chunktype] = 0
1803 chunktypecounts[chunktype] = 0
1793 chunktypesizes[chunktype] = 0
1804 chunktypesizes[chunktype] = 0
1794
1805
1795 chunktypecounts[chunktype] += 1
1806 chunktypecounts[chunktype] += 1
1796 chunktypesizes[chunktype] += size
1807 chunktypesizes[chunktype] += size
1797
1808
1798 # Adjust size min value for empty cases
1809 # Adjust size min value for empty cases
1799 for size in (datasize, fullsize, deltasize):
1810 for size in (datasize, fullsize, deltasize):
1800 if size[0] is None:
1811 if size[0] is None:
1801 size[0] = 0
1812 size[0] = 0
1802
1813
1803 numdeltas = numrevs - numfull
1814 numdeltas = numrevs - numfull
1804 numoprev = numprev - nump1prev - nump2prev
1815 numoprev = numprev - nump1prev - nump2prev
1805 totalrawsize = datasize[2]
1816 totalrawsize = datasize[2]
1806 datasize[2] /= numrevs
1817 datasize[2] /= numrevs
1807 fulltotal = fullsize[2]
1818 fulltotal = fullsize[2]
1808 fullsize[2] /= numfull
1819 fullsize[2] /= numfull
1809 deltatotal = deltasize[2]
1820 deltatotal = deltasize[2]
1810 if numrevs - numfull > 0:
1821 if numrevs - numfull > 0:
1811 deltasize[2] /= numrevs - numfull
1822 deltasize[2] /= numrevs - numfull
1812 totalsize = fulltotal + deltatotal
1823 totalsize = fulltotal + deltatotal
1813 avgchainlen = sum(chainlengths) / numrevs
1824 avgchainlen = sum(chainlengths) / numrevs
1814 maxchainlen = max(chainlengths)
1825 maxchainlen = max(chainlengths)
1815 compratio = 1
1826 compratio = 1
1816 if totalsize:
1827 if totalsize:
1817 compratio = totalrawsize / totalsize
1828 compratio = totalrawsize / totalsize
1818
1829
1819 basedfmtstr = '%%%dd\n'
1830 basedfmtstr = '%%%dd\n'
1820 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1831 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1821
1832
1822 def dfmtstr(max):
1833 def dfmtstr(max):
1823 return basedfmtstr % len(str(max))
1834 return basedfmtstr % len(str(max))
1824 def pcfmtstr(max, padding=0):
1835 def pcfmtstr(max, padding=0):
1825 return basepcfmtstr % (len(str(max)), ' ' * padding)
1836 return basepcfmtstr % (len(str(max)), ' ' * padding)
1826
1837
1827 def pcfmt(value, total):
1838 def pcfmt(value, total):
1828 if total:
1839 if total:
1829 return (value, 100 * float(value) / total)
1840 return (value, 100 * float(value) / total)
1830 else:
1841 else:
1831 return value, 100.0
1842 return value, 100.0
1832
1843
1833 ui.write(('format : %d\n') % format)
1844 ui.write(('format : %d\n') % format)
1834 ui.write(('flags : %s\n') % ', '.join(flags))
1845 ui.write(('flags : %s\n') % ', '.join(flags))
1835
1846
1836 ui.write('\n')
1847 ui.write('\n')
1837 fmt = pcfmtstr(totalsize)
1848 fmt = pcfmtstr(totalsize)
1838 fmt2 = dfmtstr(totalsize)
1849 fmt2 = dfmtstr(totalsize)
1839 ui.write(('revisions : ') + fmt2 % numrevs)
1850 ui.write(('revisions : ') + fmt2 % numrevs)
1840 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1851 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1841 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1852 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1842 ui.write(('revisions : ') + fmt2 % numrevs)
1853 ui.write(('revisions : ') + fmt2 % numrevs)
1843 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1854 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1844 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1855 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1845 ui.write(('revision size : ') + fmt2 % totalsize)
1856 ui.write(('revision size : ') + fmt2 % totalsize)
1846 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1857 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1847 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1858 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1848
1859
1849 def fmtchunktype(chunktype):
1860 def fmtchunktype(chunktype):
1850 if chunktype == 'empty':
1861 if chunktype == 'empty':
1851 return ' %s : ' % chunktype
1862 return ' %s : ' % chunktype
1852 elif chunktype in string.ascii_letters:
1863 elif chunktype in string.ascii_letters:
1853 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1864 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1854 else:
1865 else:
1855 return ' 0x%s : ' % hex(chunktype)
1866 return ' 0x%s : ' % hex(chunktype)
1856
1867
1857 ui.write('\n')
1868 ui.write('\n')
1858 ui.write(('chunks : ') + fmt2 % numrevs)
1869 ui.write(('chunks : ') + fmt2 % numrevs)
1859 for chunktype in sorted(chunktypecounts):
1870 for chunktype in sorted(chunktypecounts):
1860 ui.write(fmtchunktype(chunktype))
1871 ui.write(fmtchunktype(chunktype))
1861 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1872 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1862 ui.write(('chunks size : ') + fmt2 % totalsize)
1873 ui.write(('chunks size : ') + fmt2 % totalsize)
1863 for chunktype in sorted(chunktypecounts):
1874 for chunktype in sorted(chunktypecounts):
1864 ui.write(fmtchunktype(chunktype))
1875 ui.write(fmtchunktype(chunktype))
1865 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1876 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1866
1877
1867 ui.write('\n')
1878 ui.write('\n')
1868 fmt = dfmtstr(max(avgchainlen, compratio))
1879 fmt = dfmtstr(max(avgchainlen, compratio))
1869 ui.write(('avg chain length : ') + fmt % avgchainlen)
1880 ui.write(('avg chain length : ') + fmt % avgchainlen)
1870 ui.write(('max chain length : ') + fmt % maxchainlen)
1881 ui.write(('max chain length : ') + fmt % maxchainlen)
1871 ui.write(('compression ratio : ') + fmt % compratio)
1882 ui.write(('compression ratio : ') + fmt % compratio)
1872
1883
1873 if format > 0:
1884 if format > 0:
1874 ui.write('\n')
1885 ui.write('\n')
1875 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1886 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1876 % tuple(datasize))
1887 % tuple(datasize))
1877 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1888 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1878 % tuple(fullsize))
1889 % tuple(fullsize))
1879 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1890 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1880 % tuple(deltasize))
1891 % tuple(deltasize))
1881
1892
1882 if numdeltas > 0:
1893 if numdeltas > 0:
1883 ui.write('\n')
1894 ui.write('\n')
1884 fmt = pcfmtstr(numdeltas)
1895 fmt = pcfmtstr(numdeltas)
1885 fmt2 = pcfmtstr(numdeltas, 4)
1896 fmt2 = pcfmtstr(numdeltas, 4)
1886 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1897 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1887 if numprev > 0:
1898 if numprev > 0:
1888 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1899 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1889 numprev))
1900 numprev))
1890 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1901 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1891 numprev))
1902 numprev))
1892 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1903 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1893 numprev))
1904 numprev))
1894 if gdelta:
1905 if gdelta:
1895 ui.write(('deltas against p1 : ')
1906 ui.write(('deltas against p1 : ')
1896 + fmt % pcfmt(nump1, numdeltas))
1907 + fmt % pcfmt(nump1, numdeltas))
1897 ui.write(('deltas against p2 : ')
1908 ui.write(('deltas against p2 : ')
1898 + fmt % pcfmt(nump2, numdeltas))
1909 + fmt % pcfmt(nump2, numdeltas))
1899 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1910 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1900 numdeltas))
1911 numdeltas))
1901
1912
1902 @command('debugrevspec',
1913 @command('debugrevspec',
1903 [('', 'optimize', None,
1914 [('', 'optimize', None,
1904 _('print parsed tree after optimizing (DEPRECATED)')),
1915 _('print parsed tree after optimizing (DEPRECATED)')),
1905 ('', 'show-revs', True, _('print list of result revisions (default)')),
1916 ('', 'show-revs', True, _('print list of result revisions (default)')),
1906 ('s', 'show-set', None, _('print internal representation of result set')),
1917 ('s', 'show-set', None, _('print internal representation of result set')),
1907 ('p', 'show-stage', [],
1918 ('p', 'show-stage', [],
1908 _('print parsed tree at the given stage'), _('NAME')),
1919 _('print parsed tree at the given stage'), _('NAME')),
1909 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1920 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1910 ('', 'verify-optimized', False, _('verify optimized result')),
1921 ('', 'verify-optimized', False, _('verify optimized result')),
1911 ],
1922 ],
1912 ('REVSPEC'))
1923 ('REVSPEC'))
1913 def debugrevspec(ui, repo, expr, **opts):
1924 def debugrevspec(ui, repo, expr, **opts):
1914 """parse and apply a revision specification
1925 """parse and apply a revision specification
1915
1926
1916 Use -p/--show-stage option to print the parsed tree at the given stages.
1927 Use -p/--show-stage option to print the parsed tree at the given stages.
1917 Use -p all to print tree at every stage.
1928 Use -p all to print tree at every stage.
1918
1929
1919 Use --no-show-revs option with -s or -p to print only the set
1930 Use --no-show-revs option with -s or -p to print only the set
1920 representation or the parsed tree respectively.
1931 representation or the parsed tree respectively.
1921
1932
1922 Use --verify-optimized to compare the optimized result with the unoptimized
1933 Use --verify-optimized to compare the optimized result with the unoptimized
1923 one. Returns 1 if the optimized result differs.
1934 one. Returns 1 if the optimized result differs.
1924 """
1935 """
1925 stages = [
1936 stages = [
1926 ('parsed', lambda tree: tree),
1937 ('parsed', lambda tree: tree),
1927 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1938 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1928 ('concatenated', revsetlang.foldconcat),
1939 ('concatenated', revsetlang.foldconcat),
1929 ('analyzed', revsetlang.analyze),
1940 ('analyzed', revsetlang.analyze),
1930 ('optimized', revsetlang.optimize),
1941 ('optimized', revsetlang.optimize),
1931 ]
1942 ]
1932 if opts['no_optimized']:
1943 if opts['no_optimized']:
1933 stages = stages[:-1]
1944 stages = stages[:-1]
1934 if opts['verify_optimized'] and opts['no_optimized']:
1945 if opts['verify_optimized'] and opts['no_optimized']:
1935 raise error.Abort(_('cannot use --verify-optimized with '
1946 raise error.Abort(_('cannot use --verify-optimized with '
1936 '--no-optimized'))
1947 '--no-optimized'))
1937 stagenames = set(n for n, f in stages)
1948 stagenames = set(n for n, f in stages)
1938
1949
1939 showalways = set()
1950 showalways = set()
1940 showchanged = set()
1951 showchanged = set()
1941 if ui.verbose and not opts['show_stage']:
1952 if ui.verbose and not opts['show_stage']:
1942 # show parsed tree by --verbose (deprecated)
1953 # show parsed tree by --verbose (deprecated)
1943 showalways.add('parsed')
1954 showalways.add('parsed')
1944 showchanged.update(['expanded', 'concatenated'])
1955 showchanged.update(['expanded', 'concatenated'])
1945 if opts['optimize']:
1956 if opts['optimize']:
1946 showalways.add('optimized')
1957 showalways.add('optimized')
1947 if opts['show_stage'] and opts['optimize']:
1958 if opts['show_stage'] and opts['optimize']:
1948 raise error.Abort(_('cannot use --optimize with --show-stage'))
1959 raise error.Abort(_('cannot use --optimize with --show-stage'))
1949 if opts['show_stage'] == ['all']:
1960 if opts['show_stage'] == ['all']:
1950 showalways.update(stagenames)
1961 showalways.update(stagenames)
1951 else:
1962 else:
1952 for n in opts['show_stage']:
1963 for n in opts['show_stage']:
1953 if n not in stagenames:
1964 if n not in stagenames:
1954 raise error.Abort(_('invalid stage name: %s') % n)
1965 raise error.Abort(_('invalid stage name: %s') % n)
1955 showalways.update(opts['show_stage'])
1966 showalways.update(opts['show_stage'])
1956
1967
1957 treebystage = {}
1968 treebystage = {}
1958 printedtree = None
1969 printedtree = None
1959 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1970 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1960 for n, f in stages:
1971 for n, f in stages:
1961 treebystage[n] = tree = f(tree)
1972 treebystage[n] = tree = f(tree)
1962 if n in showalways or (n in showchanged and tree != printedtree):
1973 if n in showalways or (n in showchanged and tree != printedtree):
1963 if opts['show_stage'] or n != 'parsed':
1974 if opts['show_stage'] or n != 'parsed':
1964 ui.write(("* %s:\n") % n)
1975 ui.write(("* %s:\n") % n)
1965 ui.write(revsetlang.prettyformat(tree), "\n")
1976 ui.write(revsetlang.prettyformat(tree), "\n")
1966 printedtree = tree
1977 printedtree = tree
1967
1978
1968 if opts['verify_optimized']:
1979 if opts['verify_optimized']:
1969 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1980 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1970 brevs = revset.makematcher(treebystage['optimized'])(repo)
1981 brevs = revset.makematcher(treebystage['optimized'])(repo)
1971 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1982 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1972 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1983 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1973 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1984 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1974 arevs = list(arevs)
1985 arevs = list(arevs)
1975 brevs = list(brevs)
1986 brevs = list(brevs)
1976 if arevs == brevs:
1987 if arevs == brevs:
1977 return 0
1988 return 0
1978 ui.write(('--- analyzed\n'), label='diff.file_a')
1989 ui.write(('--- analyzed\n'), label='diff.file_a')
1979 ui.write(('+++ optimized\n'), label='diff.file_b')
1990 ui.write(('+++ optimized\n'), label='diff.file_b')
1980 sm = difflib.SequenceMatcher(None, arevs, brevs)
1991 sm = difflib.SequenceMatcher(None, arevs, brevs)
1981 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1992 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1982 if tag in ('delete', 'replace'):
1993 if tag in ('delete', 'replace'):
1983 for c in arevs[alo:ahi]:
1994 for c in arevs[alo:ahi]:
1984 ui.write('-%s\n' % c, label='diff.deleted')
1995 ui.write('-%s\n' % c, label='diff.deleted')
1985 if tag in ('insert', 'replace'):
1996 if tag in ('insert', 'replace'):
1986 for c in brevs[blo:bhi]:
1997 for c in brevs[blo:bhi]:
1987 ui.write('+%s\n' % c, label='diff.inserted')
1998 ui.write('+%s\n' % c, label='diff.inserted')
1988 if tag == 'equal':
1999 if tag == 'equal':
1989 for c in arevs[alo:ahi]:
2000 for c in arevs[alo:ahi]:
1990 ui.write(' %s\n' % c)
2001 ui.write(' %s\n' % c)
1991 return 1
2002 return 1
1992
2003
1993 func = revset.makematcher(tree)
2004 func = revset.makematcher(tree)
1994 revs = func(repo)
2005 revs = func(repo)
1995 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2006 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1996 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2007 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
1997 if not opts['show_revs']:
2008 if not opts['show_revs']:
1998 return
2009 return
1999 for c in revs:
2010 for c in revs:
2000 ui.write("%s\n" % c)
2011 ui.write("%s\n" % c)
2001
2012
2002 @command('debugsetparents', [], _('REV1 [REV2]'))
2013 @command('debugsetparents', [], _('REV1 [REV2]'))
2003 def debugsetparents(ui, repo, rev1, rev2=None):
2014 def debugsetparents(ui, repo, rev1, rev2=None):
2004 """manually set the parents of the current working directory
2015 """manually set the parents of the current working directory
2005
2016
2006 This is useful for writing repository conversion tools, but should
2017 This is useful for writing repository conversion tools, but should
2007 be used with care. For example, neither the working directory nor the
2018 be used with care. For example, neither the working directory nor the
2008 dirstate is updated, so file status may be incorrect after running this
2019 dirstate is updated, so file status may be incorrect after running this
2009 command.
2020 command.
2010
2021
2011 Returns 0 on success.
2022 Returns 0 on success.
2012 """
2023 """
2013
2024
2014 r1 = scmutil.revsingle(repo, rev1).node()
2025 r1 = scmutil.revsingle(repo, rev1).node()
2015 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2026 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2016
2027
2017 with repo.wlock():
2028 with repo.wlock():
2018 repo.setparents(r1, r2)
2029 repo.setparents(r1, r2)
2019
2030
2020 @command('debugsub',
2031 @command('debugsub',
2021 [('r', 'rev', '',
2032 [('r', 'rev', '',
2022 _('revision to check'), _('REV'))],
2033 _('revision to check'), _('REV'))],
2023 _('[-r REV] [REV]'))
2034 _('[-r REV] [REV]'))
2024 def debugsub(ui, repo, rev=None):
2035 def debugsub(ui, repo, rev=None):
2025 ctx = scmutil.revsingle(repo, rev, None)
2036 ctx = scmutil.revsingle(repo, rev, None)
2026 for k, v in sorted(ctx.substate.items()):
2037 for k, v in sorted(ctx.substate.items()):
2027 ui.write(('path %s\n') % k)
2038 ui.write(('path %s\n') % k)
2028 ui.write((' source %s\n') % v[0])
2039 ui.write((' source %s\n') % v[0])
2029 ui.write((' revision %s\n') % v[1])
2040 ui.write((' revision %s\n') % v[1])
2030
2041
2031 @command('debugsuccessorssets',
2042 @command('debugsuccessorssets',
2032 [],
2043 [],
2033 _('[REV]'))
2044 _('[REV]'))
2034 def debugsuccessorssets(ui, repo, *revs):
2045 def debugsuccessorssets(ui, repo, *revs):
2035 """show set of successors for revision
2046 """show set of successors for revision
2036
2047
2037 A successors set of changeset A is a consistent group of revisions that
2048 A successors set of changeset A is a consistent group of revisions that
2038 succeed A. It contains non-obsolete changesets only.
2049 succeed A. It contains non-obsolete changesets only.
2039
2050
2040 In most cases a changeset A has a single successors set containing a single
2051 In most cases a changeset A has a single successors set containing a single
2041 successor (changeset A replaced by A').
2052 successor (changeset A replaced by A').
2042
2053
2043 A changeset that is made obsolete with no successors are called "pruned".
2054 A changeset that is made obsolete with no successors are called "pruned".
2044 Such changesets have no successors sets at all.
2055 Such changesets have no successors sets at all.
2045
2056
2046 A changeset that has been "split" will have a successors set containing
2057 A changeset that has been "split" will have a successors set containing
2047 more than one successor.
2058 more than one successor.
2048
2059
2049 A changeset that has been rewritten in multiple different ways is called
2060 A changeset that has been rewritten in multiple different ways is called
2050 "divergent". Such changesets have multiple successor sets (each of which
2061 "divergent". Such changesets have multiple successor sets (each of which
2051 may also be split, i.e. have multiple successors).
2062 may also be split, i.e. have multiple successors).
2052
2063
2053 Results are displayed as follows::
2064 Results are displayed as follows::
2054
2065
2055 <rev1>
2066 <rev1>
2056 <successors-1A>
2067 <successors-1A>
2057 <rev2>
2068 <rev2>
2058 <successors-2A>
2069 <successors-2A>
2059 <successors-2B1> <successors-2B2> <successors-2B3>
2070 <successors-2B1> <successors-2B2> <successors-2B3>
2060
2071
2061 Here rev2 has two possible (i.e. divergent) successors sets. The first
2072 Here rev2 has two possible (i.e. divergent) successors sets. The first
2062 holds one element, whereas the second holds three (i.e. the changeset has
2073 holds one element, whereas the second holds three (i.e. the changeset has
2063 been split).
2074 been split).
2064 """
2075 """
2065 # passed to successorssets caching computation from one call to another
2076 # passed to successorssets caching computation from one call to another
2066 cache = {}
2077 cache = {}
2067 ctx2str = str
2078 ctx2str = str
2068 node2str = short
2079 node2str = short
2069 if ui.debug():
2080 if ui.debug():
2070 def ctx2str(ctx):
2081 def ctx2str(ctx):
2071 return ctx.hex()
2082 return ctx.hex()
2072 node2str = hex
2083 node2str = hex
2073 for rev in scmutil.revrange(repo, revs):
2084 for rev in scmutil.revrange(repo, revs):
2074 ctx = repo[rev]
2085 ctx = repo[rev]
2075 ui.write('%s\n'% ctx2str(ctx))
2086 ui.write('%s\n'% ctx2str(ctx))
2076 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2087 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2077 if succsset:
2088 if succsset:
2078 ui.write(' ')
2089 ui.write(' ')
2079 ui.write(node2str(succsset[0]))
2090 ui.write(node2str(succsset[0]))
2080 for node in succsset[1:]:
2091 for node in succsset[1:]:
2081 ui.write(' ')
2092 ui.write(' ')
2082 ui.write(node2str(node))
2093 ui.write(node2str(node))
2083 ui.write('\n')
2094 ui.write('\n')
2084
2095
2085 @command('debugtemplate',
2096 @command('debugtemplate',
2086 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2097 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2087 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2098 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2088 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2099 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2089 optionalrepo=True)
2100 optionalrepo=True)
2090 def debugtemplate(ui, repo, tmpl, **opts):
2101 def debugtemplate(ui, repo, tmpl, **opts):
2091 """parse and apply a template
2102 """parse and apply a template
2092
2103
2093 If -r/--rev is given, the template is processed as a log template and
2104 If -r/--rev is given, the template is processed as a log template and
2094 applied to the given changesets. Otherwise, it is processed as a generic
2105 applied to the given changesets. Otherwise, it is processed as a generic
2095 template.
2106 template.
2096
2107
2097 Use --verbose to print the parsed tree.
2108 Use --verbose to print the parsed tree.
2098 """
2109 """
2099 revs = None
2110 revs = None
2100 if opts['rev']:
2111 if opts['rev']:
2101 if repo is None:
2112 if repo is None:
2102 raise error.RepoError(_('there is no Mercurial repository here '
2113 raise error.RepoError(_('there is no Mercurial repository here '
2103 '(.hg not found)'))
2114 '(.hg not found)'))
2104 revs = scmutil.revrange(repo, opts['rev'])
2115 revs = scmutil.revrange(repo, opts['rev'])
2105
2116
2106 props = {}
2117 props = {}
2107 for d in opts['define']:
2118 for d in opts['define']:
2108 try:
2119 try:
2109 k, v = (e.strip() for e in d.split('=', 1))
2120 k, v = (e.strip() for e in d.split('=', 1))
2110 if not k or k == 'ui':
2121 if not k or k == 'ui':
2111 raise ValueError
2122 raise ValueError
2112 props[k] = v
2123 props[k] = v
2113 except ValueError:
2124 except ValueError:
2114 raise error.Abort(_('malformed keyword definition: %s') % d)
2125 raise error.Abort(_('malformed keyword definition: %s') % d)
2115
2126
2116 if ui.verbose:
2127 if ui.verbose:
2117 aliases = ui.configitems('templatealias')
2128 aliases = ui.configitems('templatealias')
2118 tree = templater.parse(tmpl)
2129 tree = templater.parse(tmpl)
2119 ui.note(templater.prettyformat(tree), '\n')
2130 ui.note(templater.prettyformat(tree), '\n')
2120 newtree = templater.expandaliases(tree, aliases)
2131 newtree = templater.expandaliases(tree, aliases)
2121 if newtree != tree:
2132 if newtree != tree:
2122 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2133 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2123
2134
2124 if revs is None:
2135 if revs is None:
2125 t = formatter.maketemplater(ui, tmpl)
2136 t = formatter.maketemplater(ui, tmpl)
2126 props['ui'] = ui
2137 props['ui'] = ui
2127 ui.write(t.render(props))
2138 ui.write(t.render(props))
2128 else:
2139 else:
2129 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2140 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2130 for r in revs:
2141 for r in revs:
2131 displayer.show(repo[r], **props)
2142 displayer.show(repo[r], **props)
2132 displayer.close()
2143 displayer.close()
2133
2144
2134 @command('debugupdatecaches', [])
2145 @command('debugupdatecaches', [])
2135 def debugupdatecaches(ui, repo, *pats, **opts):
2146 def debugupdatecaches(ui, repo, *pats, **opts):
2136 """warm all known caches in the repository"""
2147 """warm all known caches in the repository"""
2137 with repo.wlock():
2148 with repo.wlock():
2138 with repo.lock():
2149 with repo.lock():
2139 repo.updatecaches()
2150 repo.updatecaches()
2140
2151
2141 @command('debugupgraderepo', [
2152 @command('debugupgraderepo', [
2142 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2153 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2143 ('', 'run', False, _('performs an upgrade')),
2154 ('', 'run', False, _('performs an upgrade')),
2144 ])
2155 ])
2145 def debugupgraderepo(ui, repo, run=False, optimize=None):
2156 def debugupgraderepo(ui, repo, run=False, optimize=None):
2146 """upgrade a repository to use different features
2157 """upgrade a repository to use different features
2147
2158
2148 If no arguments are specified, the repository is evaluated for upgrade
2159 If no arguments are specified, the repository is evaluated for upgrade
2149 and a list of problems and potential optimizations is printed.
2160 and a list of problems and potential optimizations is printed.
2150
2161
2151 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2162 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2152 can be influenced via additional arguments. More details will be provided
2163 can be influenced via additional arguments. More details will be provided
2153 by the command output when run without ``--run``.
2164 by the command output when run without ``--run``.
2154
2165
2155 During the upgrade, the repository will be locked and no writes will be
2166 During the upgrade, the repository will be locked and no writes will be
2156 allowed.
2167 allowed.
2157
2168
2158 At the end of the upgrade, the repository may not be readable while new
2169 At the end of the upgrade, the repository may not be readable while new
2159 repository data is swapped in. This window will be as long as it takes to
2170 repository data is swapped in. This window will be as long as it takes to
2160 rename some directories inside the ``.hg`` directory. On most machines, this
2171 rename some directories inside the ``.hg`` directory. On most machines, this
2161 should complete almost instantaneously and the chances of a consumer being
2172 should complete almost instantaneously and the chances of a consumer being
2162 unable to access the repository should be low.
2173 unable to access the repository should be low.
2163 """
2174 """
2164 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2175 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2165
2176
2166 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2177 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2167 inferrepo=True)
2178 inferrepo=True)
2168 def debugwalk(ui, repo, *pats, **opts):
2179 def debugwalk(ui, repo, *pats, **opts):
2169 """show how files match on given patterns"""
2180 """show how files match on given patterns"""
2170 m = scmutil.match(repo[None], pats, opts)
2181 m = scmutil.match(repo[None], pats, opts)
2171 ui.write(('matcher: %r\n' % m))
2182 ui.write(('matcher: %r\n' % m))
2172 items = list(repo[None].walk(m))
2183 items = list(repo[None].walk(m))
2173 if not items:
2184 if not items:
2174 return
2185 return
2175 f = lambda fn: fn
2186 f = lambda fn: fn
2176 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2187 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2177 f = lambda fn: util.normpath(fn)
2188 f = lambda fn: util.normpath(fn)
2178 fmt = 'f %%-%ds %%-%ds %%s' % (
2189 fmt = 'f %%-%ds %%-%ds %%s' % (
2179 max([len(abs) for abs in items]),
2190 max([len(abs) for abs in items]),
2180 max([len(m.rel(abs)) for abs in items]))
2191 max([len(m.rel(abs)) for abs in items]))
2181 for abs in items:
2192 for abs in items:
2182 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2193 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2183 ui.write("%s\n" % line.rstrip())
2194 ui.write("%s\n" % line.rstrip())
2184
2195
2185 @command('debugwireargs',
2196 @command('debugwireargs',
2186 [('', 'three', '', 'three'),
2197 [('', 'three', '', 'three'),
2187 ('', 'four', '', 'four'),
2198 ('', 'four', '', 'four'),
2188 ('', 'five', '', 'five'),
2199 ('', 'five', '', 'five'),
2189 ] + cmdutil.remoteopts,
2200 ] + cmdutil.remoteopts,
2190 _('REPO [OPTIONS]... [ONE [TWO]]'),
2201 _('REPO [OPTIONS]... [ONE [TWO]]'),
2191 norepo=True)
2202 norepo=True)
2192 def debugwireargs(ui, repopath, *vals, **opts):
2203 def debugwireargs(ui, repopath, *vals, **opts):
2193 repo = hg.peer(ui, opts, repopath)
2204 repo = hg.peer(ui, opts, repopath)
2194 for opt in cmdutil.remoteopts:
2205 for opt in cmdutil.remoteopts:
2195 del opts[opt[1]]
2206 del opts[opt[1]]
2196 args = {}
2207 args = {}
2197 for k, v in opts.iteritems():
2208 for k, v in opts.iteritems():
2198 if v:
2209 if v:
2199 args[k] = v
2210 args[k] = v
2200 # run twice to check that we don't mess up the stream for the next command
2211 # run twice to check that we don't mess up the stream for the next command
2201 res1 = repo.debugwireargs(*vals, **args)
2212 res1 = repo.debugwireargs(*vals, **args)
2202 res2 = repo.debugwireargs(*vals, **args)
2213 res2 = repo.debugwireargs(*vals, **args)
2203 ui.write("%s\n" % res1)
2214 ui.write("%s\n" % res1)
2204 if res1 != res2:
2215 if res1 != res2:
2205 ui.warn("%s\n" % res2)
2216 ui.warn("%s\n" % res2)
@@ -1,493 +1,519 b''
1 """ Mercurial phases support code
1 """ Mercurial phases support code
2
2
3 ---
3 ---
4
4
5 Copyright 2011 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
5 Copyright 2011 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
6 Logilab SA <contact@logilab.fr>
6 Logilab SA <contact@logilab.fr>
7 Augie Fackler <durin42@gmail.com>
7 Augie Fackler <durin42@gmail.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License version 2 or any later version.
10 of the GNU General Public License version 2 or any later version.
11
11
12 ---
12 ---
13
13
14 This module implements most phase logic in mercurial.
14 This module implements most phase logic in mercurial.
15
15
16
16
17 Basic Concept
17 Basic Concept
18 =============
18 =============
19
19
20 A 'changeset phase' is an indicator that tells us how a changeset is
20 A 'changeset phase' is an indicator that tells us how a changeset is
21 manipulated and communicated. The details of each phase is described
21 manipulated and communicated. The details of each phase is described
22 below, here we describe the properties they have in common.
22 below, here we describe the properties they have in common.
23
23
24 Like bookmarks, phases are not stored in history and thus are not
24 Like bookmarks, phases are not stored in history and thus are not
25 permanent and leave no audit trail.
25 permanent and leave no audit trail.
26
26
27 First, no changeset can be in two phases at once. Phases are ordered,
27 First, no changeset can be in two phases at once. Phases are ordered,
28 so they can be considered from lowest to highest. The default, lowest
28 so they can be considered from lowest to highest. The default, lowest
29 phase is 'public' - this is the normal phase of existing changesets. A
29 phase is 'public' - this is the normal phase of existing changesets. A
30 child changeset can not be in a lower phase than its parents.
30 child changeset can not be in a lower phase than its parents.
31
31
32 These phases share a hierarchy of traits:
32 These phases share a hierarchy of traits:
33
33
34 immutable shared
34 immutable shared
35 public: X X
35 public: X X
36 draft: X
36 draft: X
37 secret:
37 secret:
38
38
39 Local commits are draft by default.
39 Local commits are draft by default.
40
40
41 Phase Movement and Exchange
41 Phase Movement and Exchange
42 ===========================
42 ===========================
43
43
44 Phase data is exchanged by pushkey on pull and push. Some servers have
44 Phase data is exchanged by pushkey on pull and push. Some servers have
45 a publish option set, we call such a server a "publishing server".
45 a publish option set, we call such a server a "publishing server".
46 Pushing a draft changeset to a publishing server changes the phase to
46 Pushing a draft changeset to a publishing server changes the phase to
47 public.
47 public.
48
48
49 A small list of fact/rules define the exchange of phase:
49 A small list of fact/rules define the exchange of phase:
50
50
51 * old client never changes server states
51 * old client never changes server states
52 * pull never changes server states
52 * pull never changes server states
53 * publish and old server changesets are seen as public by client
53 * publish and old server changesets are seen as public by client
54 * any secret changeset seen in another repository is lowered to at
54 * any secret changeset seen in another repository is lowered to at
55 least draft
55 least draft
56
56
57 Here is the final table summing up the 49 possible use cases of phase
57 Here is the final table summing up the 49 possible use cases of phase
58 exchange:
58 exchange:
59
59
60 server
60 server
61 old publish non-publish
61 old publish non-publish
62 N X N D P N D P
62 N X N D P N D P
63 old client
63 old client
64 pull
64 pull
65 N - X/X - X/D X/P - X/D X/P
65 N - X/X - X/D X/P - X/D X/P
66 X - X/X - X/D X/P - X/D X/P
66 X - X/X - X/D X/P - X/D X/P
67 push
67 push
68 X X/X X/X X/P X/P X/P X/D X/D X/P
68 X X/X X/X X/P X/P X/P X/D X/D X/P
69 new client
69 new client
70 pull
70 pull
71 N - P/X - P/D P/P - D/D P/P
71 N - P/X - P/D P/P - D/D P/P
72 D - P/X - P/D P/P - D/D P/P
72 D - P/X - P/D P/P - D/D P/P
73 P - P/X - P/D P/P - P/D P/P
73 P - P/X - P/D P/P - P/D P/P
74 push
74 push
75 D P/X P/X P/P P/P P/P D/D D/D P/P
75 D P/X P/X P/P P/P P/P D/D D/D P/P
76 P P/X P/X P/P P/P P/P P/P P/P P/P
76 P P/X P/X P/P P/P P/P P/P P/P P/P
77
77
78 Legend:
78 Legend:
79
79
80 A/B = final state on client / state on server
80 A/B = final state on client / state on server
81
81
82 * N = new/not present,
82 * N = new/not present,
83 * P = public,
83 * P = public,
84 * D = draft,
84 * D = draft,
85 * X = not tracked (i.e., the old client or server has no internal
85 * X = not tracked (i.e., the old client or server has no internal
86 way of recording the phase.)
86 way of recording the phase.)
87
87
88 passive = only pushes
88 passive = only pushes
89
89
90
90
91 A cell here can be read like this:
91 A cell here can be read like this:
92
92
93 "When a new client pushes a draft changeset (D) to a publishing
93 "When a new client pushes a draft changeset (D) to a publishing
94 server where it's not present (N), it's marked public on both
94 server where it's not present (N), it's marked public on both
95 sides (P/P)."
95 sides (P/P)."
96
96
97 Note: old client behave as a publishing server with draft only content
97 Note: old client behave as a publishing server with draft only content
98 - other people see it as public
98 - other people see it as public
99 - content is pushed as draft
99 - content is pushed as draft
100
100
101 """
101 """
102
102
103 from __future__ import absolute_import
103 from __future__ import absolute_import
104
104
105 import errno
105 import errno
106
106
107 from .i18n import _
107 from .i18n import _
108 from .node import (
108 from .node import (
109 bin,
109 bin,
110 hex,
110 hex,
111 nullid,
111 nullid,
112 nullrev,
112 nullrev,
113 short,
113 short,
114 )
114 )
115 from . import (
115 from . import (
116 error,
116 error,
117 smartset,
117 smartset,
118 txnutil,
118 txnutil,
119 util,
119 util,
120 )
120 )
121
121
122 allphases = public, draft, secret = range(3)
122 allphases = public, draft, secret = range(3)
123 trackedphases = allphases[1:]
123 trackedphases = allphases[1:]
124 phasenames = ['public', 'draft', 'secret']
124 phasenames = ['public', 'draft', 'secret']
125
125
126 def _readroots(repo, phasedefaults=None):
126 def _readroots(repo, phasedefaults=None):
127 """Read phase roots from disk
127 """Read phase roots from disk
128
128
129 phasedefaults is a list of fn(repo, roots) callable, which are
129 phasedefaults is a list of fn(repo, roots) callable, which are
130 executed if the phase roots file does not exist. When phases are
130 executed if the phase roots file does not exist. When phases are
131 being initialized on an existing repository, this could be used to
131 being initialized on an existing repository, this could be used to
132 set selected changesets phase to something else than public.
132 set selected changesets phase to something else than public.
133
133
134 Return (roots, dirty) where dirty is true if roots differ from
134 Return (roots, dirty) where dirty is true if roots differ from
135 what is being stored.
135 what is being stored.
136 """
136 """
137 repo = repo.unfiltered()
137 repo = repo.unfiltered()
138 dirty = False
138 dirty = False
139 roots = [set() for i in allphases]
139 roots = [set() for i in allphases]
140 try:
140 try:
141 f, pending = txnutil.trypending(repo.root, repo.svfs, 'phaseroots')
141 f, pending = txnutil.trypending(repo.root, repo.svfs, 'phaseroots')
142 try:
142 try:
143 for line in f:
143 for line in f:
144 phase, nh = line.split()
144 phase, nh = line.split()
145 roots[int(phase)].add(bin(nh))
145 roots[int(phase)].add(bin(nh))
146 finally:
146 finally:
147 f.close()
147 f.close()
148 except IOError as inst:
148 except IOError as inst:
149 if inst.errno != errno.ENOENT:
149 if inst.errno != errno.ENOENT:
150 raise
150 raise
151 if phasedefaults:
151 if phasedefaults:
152 for f in phasedefaults:
152 for f in phasedefaults:
153 roots = f(repo, roots)
153 roots = f(repo, roots)
154 dirty = True
154 dirty = True
155 return roots, dirty
155 return roots, dirty
156
156
157 class phasecache(object):
157 class phasecache(object):
158 def __init__(self, repo, phasedefaults, _load=True):
158 def __init__(self, repo, phasedefaults, _load=True):
159 if _load:
159 if _load:
160 # Cheap trick to allow shallow-copy without copy module
160 # Cheap trick to allow shallow-copy without copy module
161 self.phaseroots, self.dirty = _readroots(repo, phasedefaults)
161 self.phaseroots, self.dirty = _readroots(repo, phasedefaults)
162 self._phaserevs = None
162 self._phaserevs = None
163 self._phasesets = None
163 self._phasesets = None
164 self.filterunknown(repo)
164 self.filterunknown(repo)
165 self.opener = repo.svfs
165 self.opener = repo.svfs
166
166
167 def getrevset(self, repo, phases):
167 def getrevset(self, repo, phases):
168 """return a smartset for the given phases"""
168 """return a smartset for the given phases"""
169 self.loadphaserevs(repo) # ensure phase's sets are loaded
169 self.loadphaserevs(repo) # ensure phase's sets are loaded
170
170
171 if self._phasesets and all(self._phasesets[p] is not None
171 if self._phasesets and all(self._phasesets[p] is not None
172 for p in phases):
172 for p in phases):
173 # fast path - use _phasesets
173 # fast path - use _phasesets
174 revs = self._phasesets[phases[0]]
174 revs = self._phasesets[phases[0]]
175 if len(phases) > 1:
175 if len(phases) > 1:
176 revs = revs.copy() # only copy when needed
176 revs = revs.copy() # only copy when needed
177 for p in phases[1:]:
177 for p in phases[1:]:
178 revs.update(self._phasesets[p])
178 revs.update(self._phasesets[p])
179 if repo.changelog.filteredrevs:
179 if repo.changelog.filteredrevs:
180 revs = revs - repo.changelog.filteredrevs
180 revs = revs - repo.changelog.filteredrevs
181 return smartset.baseset(revs)
181 return smartset.baseset(revs)
182 else:
182 else:
183 # slow path - enumerate all revisions
183 # slow path - enumerate all revisions
184 phase = self.phase
184 phase = self.phase
185 revs = (r for r in repo if phase(repo, r) in phases)
185 revs = (r for r in repo if phase(repo, r) in phases)
186 return smartset.generatorset(revs, iterasc=True)
186 return smartset.generatorset(revs, iterasc=True)
187
187
188 def copy(self):
188 def copy(self):
189 # Shallow copy meant to ensure isolation in
189 # Shallow copy meant to ensure isolation in
190 # advance/retractboundary(), nothing more.
190 # advance/retractboundary(), nothing more.
191 ph = self.__class__(None, None, _load=False)
191 ph = self.__class__(None, None, _load=False)
192 ph.phaseroots = self.phaseroots[:]
192 ph.phaseroots = self.phaseroots[:]
193 ph.dirty = self.dirty
193 ph.dirty = self.dirty
194 ph.opener = self.opener
194 ph.opener = self.opener
195 ph._phaserevs = self._phaserevs
195 ph._phaserevs = self._phaserevs
196 ph._phasesets = self._phasesets
196 ph._phasesets = self._phasesets
197 return ph
197 return ph
198
198
199 def replace(self, phcache):
199 def replace(self, phcache):
200 """replace all values in 'self' with content of phcache"""
200 """replace all values in 'self' with content of phcache"""
201 for a in ('phaseroots', 'dirty', 'opener', '_phaserevs', '_phasesets'):
201 for a in ('phaseroots', 'dirty', 'opener', '_phaserevs', '_phasesets'):
202 setattr(self, a, getattr(phcache, a))
202 setattr(self, a, getattr(phcache, a))
203
203
204 def _getphaserevsnative(self, repo):
204 def _getphaserevsnative(self, repo):
205 repo = repo.unfiltered()
205 repo = repo.unfiltered()
206 nativeroots = []
206 nativeroots = []
207 for phase in trackedphases:
207 for phase in trackedphases:
208 nativeroots.append(map(repo.changelog.rev, self.phaseroots[phase]))
208 nativeroots.append(map(repo.changelog.rev, self.phaseroots[phase]))
209 return repo.changelog.computephases(nativeroots)
209 return repo.changelog.computephases(nativeroots)
210
210
211 def _computephaserevspure(self, repo):
211 def _computephaserevspure(self, repo):
212 repo = repo.unfiltered()
212 repo = repo.unfiltered()
213 revs = [public] * len(repo.changelog)
213 revs = [public] * len(repo.changelog)
214 self._phaserevs = revs
214 self._phaserevs = revs
215 self._populatephaseroots(repo)
215 self._populatephaseroots(repo)
216 for phase in trackedphases:
216 for phase in trackedphases:
217 roots = list(map(repo.changelog.rev, self.phaseroots[phase]))
217 roots = list(map(repo.changelog.rev, self.phaseroots[phase]))
218 if roots:
218 if roots:
219 for rev in roots:
219 for rev in roots:
220 revs[rev] = phase
220 revs[rev] = phase
221 for rev in repo.changelog.descendants(roots):
221 for rev in repo.changelog.descendants(roots):
222 revs[rev] = phase
222 revs[rev] = phase
223
223
224 def loadphaserevs(self, repo):
224 def loadphaserevs(self, repo):
225 """ensure phase information is loaded in the object"""
225 """ensure phase information is loaded in the object"""
226 if self._phaserevs is None:
226 if self._phaserevs is None:
227 try:
227 try:
228 res = self._getphaserevsnative(repo)
228 res = self._getphaserevsnative(repo)
229 self._phaserevs, self._phasesets = res
229 self._phaserevs, self._phasesets = res
230 except AttributeError:
230 except AttributeError:
231 self._computephaserevspure(repo)
231 self._computephaserevspure(repo)
232
232
233 def invalidate(self):
233 def invalidate(self):
234 self._phaserevs = None
234 self._phaserevs = None
235 self._phasesets = None
235 self._phasesets = None
236
236
237 def _populatephaseroots(self, repo):
237 def _populatephaseroots(self, repo):
238 """Fills the _phaserevs cache with phases for the roots.
238 """Fills the _phaserevs cache with phases for the roots.
239 """
239 """
240 cl = repo.changelog
240 cl = repo.changelog
241 phaserevs = self._phaserevs
241 phaserevs = self._phaserevs
242 for phase in trackedphases:
242 for phase in trackedphases:
243 roots = map(cl.rev, self.phaseroots[phase])
243 roots = map(cl.rev, self.phaseroots[phase])
244 for root in roots:
244 for root in roots:
245 phaserevs[root] = phase
245 phaserevs[root] = phase
246
246
247 def phase(self, repo, rev):
247 def phase(self, repo, rev):
248 # We need a repo argument here to be able to build _phaserevs
248 # We need a repo argument here to be able to build _phaserevs
249 # if necessary. The repository instance is not stored in
249 # if necessary. The repository instance is not stored in
250 # phasecache to avoid reference cycles. The changelog instance
250 # phasecache to avoid reference cycles. The changelog instance
251 # is not stored because it is a filecache() property and can
251 # is not stored because it is a filecache() property and can
252 # be replaced without us being notified.
252 # be replaced without us being notified.
253 if rev == nullrev:
253 if rev == nullrev:
254 return public
254 return public
255 if rev < nullrev:
255 if rev < nullrev:
256 raise ValueError(_('cannot lookup negative revision'))
256 raise ValueError(_('cannot lookup negative revision'))
257 if self._phaserevs is None or rev >= len(self._phaserevs):
257 if self._phaserevs is None or rev >= len(self._phaserevs):
258 self.invalidate()
258 self.invalidate()
259 self.loadphaserevs(repo)
259 self.loadphaserevs(repo)
260 return self._phaserevs[rev]
260 return self._phaserevs[rev]
261
261
262 def write(self):
262 def write(self):
263 if not self.dirty:
263 if not self.dirty:
264 return
264 return
265 f = self.opener('phaseroots', 'w', atomictemp=True, checkambig=True)
265 f = self.opener('phaseroots', 'w', atomictemp=True, checkambig=True)
266 try:
266 try:
267 self._write(f)
267 self._write(f)
268 finally:
268 finally:
269 f.close()
269 f.close()
270
270
271 def _write(self, fp):
271 def _write(self, fp):
272 for phase, roots in enumerate(self.phaseroots):
272 for phase, roots in enumerate(self.phaseroots):
273 for h in roots:
273 for h in roots:
274 fp.write('%i %s\n' % (phase, hex(h)))
274 fp.write('%i %s\n' % (phase, hex(h)))
275 self.dirty = False
275 self.dirty = False
276
276
277 def _updateroots(self, phase, newroots, tr):
277 def _updateroots(self, phase, newroots, tr):
278 self.phaseroots[phase] = newroots
278 self.phaseroots[phase] = newroots
279 self.invalidate()
279 self.invalidate()
280 self.dirty = True
280 self.dirty = True
281
281
282 tr.addfilegenerator('phase', ('phaseroots',), self._write)
282 tr.addfilegenerator('phase', ('phaseroots',), self._write)
283 tr.hookargs['phases_moved'] = '1'
283 tr.hookargs['phases_moved'] = '1'
284
284
285 def advanceboundary(self, repo, tr, targetphase, nodes):
285 def advanceboundary(self, repo, tr, targetphase, nodes):
286 # Be careful to preserve shallow-copied values: do not update
286 # Be careful to preserve shallow-copied values: do not update
287 # phaseroots values, replace them.
287 # phaseroots values, replace them.
288
288
289 repo = repo.unfiltered()
289 repo = repo.unfiltered()
290 delroots = [] # set of root deleted by this path
290 delroots = [] # set of root deleted by this path
291 for phase in xrange(targetphase + 1, len(allphases)):
291 for phase in xrange(targetphase + 1, len(allphases)):
292 # filter nodes that are not in a compatible phase already
292 # filter nodes that are not in a compatible phase already
293 nodes = [n for n in nodes
293 nodes = [n for n in nodes
294 if self.phase(repo, repo[n].rev()) >= phase]
294 if self.phase(repo, repo[n].rev()) >= phase]
295 if not nodes:
295 if not nodes:
296 break # no roots to move anymore
296 break # no roots to move anymore
297 olds = self.phaseroots[phase]
297 olds = self.phaseroots[phase]
298 roots = set(ctx.node() for ctx in repo.set(
298 roots = set(ctx.node() for ctx in repo.set(
299 'roots((%ln::) - (%ln::%ln))', olds, olds, nodes))
299 'roots((%ln::) - (%ln::%ln))', olds, olds, nodes))
300 if olds != roots:
300 if olds != roots:
301 self._updateroots(phase, roots, tr)
301 self._updateroots(phase, roots, tr)
302 # some roots may need to be declared for lower phases
302 # some roots may need to be declared for lower phases
303 delroots.extend(olds - roots)
303 delroots.extend(olds - roots)
304 # declare deleted root in the target phase
304 # declare deleted root in the target phase
305 if targetphase != 0:
305 if targetphase != 0:
306 self.retractboundary(repo, tr, targetphase, delroots)
306 self.retractboundary(repo, tr, targetphase, delroots)
307 repo.invalidatevolatilesets()
307 repo.invalidatevolatilesets()
308
308
309 def retractboundary(self, repo, tr, targetphase, nodes):
309 def retractboundary(self, repo, tr, targetphase, nodes):
310 # Be careful to preserve shallow-copied values: do not update
310 # Be careful to preserve shallow-copied values: do not update
311 # phaseroots values, replace them.
311 # phaseroots values, replace them.
312
312
313 repo = repo.unfiltered()
313 repo = repo.unfiltered()
314 currentroots = self.phaseroots[targetphase]
314 currentroots = self.phaseroots[targetphase]
315 newroots = [n for n in nodes
315 newroots = [n for n in nodes
316 if self.phase(repo, repo[n].rev()) < targetphase]
316 if self.phase(repo, repo[n].rev()) < targetphase]
317 if newroots:
317 if newroots:
318 if nullid in newroots:
318 if nullid in newroots:
319 raise error.Abort(_('cannot change null revision phase'))
319 raise error.Abort(_('cannot change null revision phase'))
320 currentroots = currentroots.copy()
320 currentroots = currentroots.copy()
321 currentroots.update(newroots)
321 currentroots.update(newroots)
322
322
323 # Only compute new roots for revs above the roots that are being
323 # Only compute new roots for revs above the roots that are being
324 # retracted.
324 # retracted.
325 minnewroot = min(repo[n].rev() for n in newroots)
325 minnewroot = min(repo[n].rev() for n in newroots)
326 aboveroots = [n for n in currentroots
326 aboveroots = [n for n in currentroots
327 if repo[n].rev() >= minnewroot]
327 if repo[n].rev() >= minnewroot]
328 updatedroots = repo.set('roots(%ln::)', aboveroots)
328 updatedroots = repo.set('roots(%ln::)', aboveroots)
329
329
330 finalroots = set(n for n in currentroots if repo[n].rev() <
330 finalroots = set(n for n in currentroots if repo[n].rev() <
331 minnewroot)
331 minnewroot)
332 finalroots.update(ctx.node() for ctx in updatedroots)
332 finalroots.update(ctx.node() for ctx in updatedroots)
333
333
334 self._updateroots(targetphase, finalroots, tr)
334 self._updateroots(targetphase, finalroots, tr)
335 repo.invalidatevolatilesets()
335 repo.invalidatevolatilesets()
336
336
337 def filterunknown(self, repo):
337 def filterunknown(self, repo):
338 """remove unknown nodes from the phase boundary
338 """remove unknown nodes from the phase boundary
339
339
340 Nothing is lost as unknown nodes only hold data for their descendants.
340 Nothing is lost as unknown nodes only hold data for their descendants.
341 """
341 """
342 filtered = False
342 filtered = False
343 nodemap = repo.changelog.nodemap # to filter unknown nodes
343 nodemap = repo.changelog.nodemap # to filter unknown nodes
344 for phase, nodes in enumerate(self.phaseroots):
344 for phase, nodes in enumerate(self.phaseroots):
345 missing = sorted(node for node in nodes if node not in nodemap)
345 missing = sorted(node for node in nodes if node not in nodemap)
346 if missing:
346 if missing:
347 for mnode in missing:
347 for mnode in missing:
348 repo.ui.debug(
348 repo.ui.debug(
349 'removing unknown node %s from %i-phase boundary\n'
349 'removing unknown node %s from %i-phase boundary\n'
350 % (short(mnode), phase))
350 % (short(mnode), phase))
351 nodes.symmetric_difference_update(missing)
351 nodes.symmetric_difference_update(missing)
352 filtered = True
352 filtered = True
353 if filtered:
353 if filtered:
354 self.dirty = True
354 self.dirty = True
355 # filterunknown is called by repo.destroyed, we may have no changes in
355 # filterunknown is called by repo.destroyed, we may have no changes in
356 # root but phaserevs contents is certainly invalid (or at least we
356 # root but phaserevs contents is certainly invalid (or at least we
357 # have not proper way to check that). related to issue 3858.
357 # have not proper way to check that). related to issue 3858.
358 #
358 #
359 # The other caller is __init__ that have no _phaserevs initialized
359 # The other caller is __init__ that have no _phaserevs initialized
360 # anyway. If this change we should consider adding a dedicated
360 # anyway. If this change we should consider adding a dedicated
361 # "destroyed" function to phasecache or a proper cache key mechanism
361 # "destroyed" function to phasecache or a proper cache key mechanism
362 # (see branchmap one)
362 # (see branchmap one)
363 self.invalidate()
363 self.invalidate()
364
364
365 def advanceboundary(repo, tr, targetphase, nodes):
365 def advanceboundary(repo, tr, targetphase, nodes):
366 """Add nodes to a phase changing other nodes phases if necessary.
366 """Add nodes to a phase changing other nodes phases if necessary.
367
367
368 This function move boundary *forward* this means that all nodes
368 This function move boundary *forward* this means that all nodes
369 are set in the target phase or kept in a *lower* phase.
369 are set in the target phase or kept in a *lower* phase.
370
370
371 Simplify boundary to contains phase roots only."""
371 Simplify boundary to contains phase roots only."""
372 phcache = repo._phasecache.copy()
372 phcache = repo._phasecache.copy()
373 phcache.advanceboundary(repo, tr, targetphase, nodes)
373 phcache.advanceboundary(repo, tr, targetphase, nodes)
374 repo._phasecache.replace(phcache)
374 repo._phasecache.replace(phcache)
375
375
376 def retractboundary(repo, tr, targetphase, nodes):
376 def retractboundary(repo, tr, targetphase, nodes):
377 """Set nodes back to a phase changing other nodes phases if
377 """Set nodes back to a phase changing other nodes phases if
378 necessary.
378 necessary.
379
379
380 This function move boundary *backward* this means that all nodes
380 This function move boundary *backward* this means that all nodes
381 are set in the target phase or kept in a *higher* phase.
381 are set in the target phase or kept in a *higher* phase.
382
382
383 Simplify boundary to contains phase roots only."""
383 Simplify boundary to contains phase roots only."""
384 phcache = repo._phasecache.copy()
384 phcache = repo._phasecache.copy()
385 phcache.retractboundary(repo, tr, targetphase, nodes)
385 phcache.retractboundary(repo, tr, targetphase, nodes)
386 repo._phasecache.replace(phcache)
386 repo._phasecache.replace(phcache)
387
387
388 def listphases(repo):
388 def listphases(repo):
389 """List phases root for serialization over pushkey"""
389 """List phases root for serialization over pushkey"""
390 # Use ordered dictionary so behavior is deterministic.
390 # Use ordered dictionary so behavior is deterministic.
391 keys = util.sortdict()
391 keys = util.sortdict()
392 value = '%i' % draft
392 value = '%i' % draft
393 for root in repo._phasecache.phaseroots[draft]:
393 for root in repo._phasecache.phaseroots[draft]:
394 keys[hex(root)] = value
394 keys[hex(root)] = value
395
395
396 if repo.publishing():
396 if repo.publishing():
397 # Add an extra data to let remote know we are a publishing
397 # Add an extra data to let remote know we are a publishing
398 # repo. Publishing repo can't just pretend they are old repo.
398 # repo. Publishing repo can't just pretend they are old repo.
399 # When pushing to a publishing repo, the client still need to
399 # When pushing to a publishing repo, the client still need to
400 # push phase boundary
400 # push phase boundary
401 #
401 #
402 # Push do not only push changeset. It also push phase data.
402 # Push do not only push changeset. It also push phase data.
403 # New phase data may apply to common changeset which won't be
403 # New phase data may apply to common changeset which won't be
404 # push (as they are common). Here is a very simple example:
404 # push (as they are common). Here is a very simple example:
405 #
405 #
406 # 1) repo A push changeset X as draft to repo B
406 # 1) repo A push changeset X as draft to repo B
407 # 2) repo B make changeset X public
407 # 2) repo B make changeset X public
408 # 3) repo B push to repo A. X is not pushed but the data that
408 # 3) repo B push to repo A. X is not pushed but the data that
409 # X as now public should
409 # X as now public should
410 #
410 #
411 # The server can't handle it on it's own as it has no idea of
411 # The server can't handle it on it's own as it has no idea of
412 # client phase data.
412 # client phase data.
413 keys['publishing'] = 'True'
413 keys['publishing'] = 'True'
414 return keys
414 return keys
415
415
416 def pushphase(repo, nhex, oldphasestr, newphasestr):
416 def pushphase(repo, nhex, oldphasestr, newphasestr):
417 """List phases root for serialization over pushkey"""
417 """List phases root for serialization over pushkey"""
418 repo = repo.unfiltered()
418 repo = repo.unfiltered()
419 with repo.lock():
419 with repo.lock():
420 currentphase = repo[nhex].phase()
420 currentphase = repo[nhex].phase()
421 newphase = abs(int(newphasestr)) # let's avoid negative index surprise
421 newphase = abs(int(newphasestr)) # let's avoid negative index surprise
422 oldphase = abs(int(oldphasestr)) # let's avoid negative index surprise
422 oldphase = abs(int(oldphasestr)) # let's avoid negative index surprise
423 if currentphase == oldphase and newphase < oldphase:
423 if currentphase == oldphase and newphase < oldphase:
424 with repo.transaction('pushkey-phase') as tr:
424 with repo.transaction('pushkey-phase') as tr:
425 advanceboundary(repo, tr, newphase, [bin(nhex)])
425 advanceboundary(repo, tr, newphase, [bin(nhex)])
426 return True
426 return True
427 elif currentphase == newphase:
427 elif currentphase == newphase:
428 # raced, but got correct result
428 # raced, but got correct result
429 return True
429 return True
430 else:
430 else:
431 return False
431 return False
432
432
433 def subsetphaseheads(repo, subset):
434 """Finds the phase heads for a subset of a history
435
436 Returns a list indexed by phase number where each item is a list of phase
437 head nodes.
438 """
439 cl = repo.changelog
440
441 headsbyphase = [[] for i in allphases]
442 # No need to keep track of secret phase; any heads in the subset that
443 # are not mentioned are implicitly secret.
444 for phase in allphases[:-1]:
445 revset = "heads(%%ln & %s())" % phasenames[phase]
446 headsbyphase[phase] = [cl.node(r) for r in repo.revs(revset, subset)]
447 return headsbyphase
448
449 def updatephases(repo, tr, headsbyphase, addednodes):
450 """Updates the repo with the given phase heads"""
451 # First make all the added revisions secret because changegroup.apply()
452 # currently sets the phase to draft.
453 retractboundary(repo, tr, secret, addednodes)
454
455 # Now advance phase boundaries of all but secret phase
456 for phase in allphases[:-1]:
457 advanceboundary(repo, tr, phase, headsbyphase[phase])
458
433 def analyzeremotephases(repo, subset, roots):
459 def analyzeremotephases(repo, subset, roots):
434 """Compute phases heads and root in a subset of node from root dict
460 """Compute phases heads and root in a subset of node from root dict
435
461
436 * subset is heads of the subset
462 * subset is heads of the subset
437 * roots is {<nodeid> => phase} mapping. key and value are string.
463 * roots is {<nodeid> => phase} mapping. key and value are string.
438
464
439 Accept unknown element input
465 Accept unknown element input
440 """
466 """
441 repo = repo.unfiltered()
467 repo = repo.unfiltered()
442 # build list from dictionary
468 # build list from dictionary
443 draftroots = []
469 draftroots = []
444 nodemap = repo.changelog.nodemap # to filter unknown nodes
470 nodemap = repo.changelog.nodemap # to filter unknown nodes
445 for nhex, phase in roots.iteritems():
471 for nhex, phase in roots.iteritems():
446 if nhex == 'publishing': # ignore data related to publish option
472 if nhex == 'publishing': # ignore data related to publish option
447 continue
473 continue
448 node = bin(nhex)
474 node = bin(nhex)
449 phase = int(phase)
475 phase = int(phase)
450 if phase == public:
476 if phase == public:
451 if node != nullid:
477 if node != nullid:
452 repo.ui.warn(_('ignoring inconsistent public root'
478 repo.ui.warn(_('ignoring inconsistent public root'
453 ' from remote: %s\n') % nhex)
479 ' from remote: %s\n') % nhex)
454 elif phase == draft:
480 elif phase == draft:
455 if node in nodemap:
481 if node in nodemap:
456 draftroots.append(node)
482 draftroots.append(node)
457 else:
483 else:
458 repo.ui.warn(_('ignoring unexpected root from remote: %i %s\n')
484 repo.ui.warn(_('ignoring unexpected root from remote: %i %s\n')
459 % (phase, nhex))
485 % (phase, nhex))
460 # compute heads
486 # compute heads
461 publicheads = newheads(repo, subset, draftroots)
487 publicheads = newheads(repo, subset, draftroots)
462 return publicheads, draftroots
488 return publicheads, draftroots
463
489
464 def newheads(repo, heads, roots):
490 def newheads(repo, heads, roots):
465 """compute new head of a subset minus another
491 """compute new head of a subset minus another
466
492
467 * `heads`: define the first subset
493 * `heads`: define the first subset
468 * `roots`: define the second we subtract from the first"""
494 * `roots`: define the second we subtract from the first"""
469 repo = repo.unfiltered()
495 repo = repo.unfiltered()
470 revset = repo.set('heads((%ln + parents(%ln)) - (%ln::%ln))',
496 revset = repo.set('heads((%ln + parents(%ln)) - (%ln::%ln))',
471 heads, roots, roots, heads)
497 heads, roots, roots, heads)
472 return [c.node() for c in revset]
498 return [c.node() for c in revset]
473
499
474
500
475 def newcommitphase(ui):
501 def newcommitphase(ui):
476 """helper to get the target phase of new commit
502 """helper to get the target phase of new commit
477
503
478 Handle all possible values for the phases.new-commit options.
504 Handle all possible values for the phases.new-commit options.
479
505
480 """
506 """
481 v = ui.config('phases', 'new-commit', draft)
507 v = ui.config('phases', 'new-commit', draft)
482 try:
508 try:
483 return phasenames.index(v)
509 return phasenames.index(v)
484 except ValueError:
510 except ValueError:
485 try:
511 try:
486 return int(v)
512 return int(v)
487 except ValueError:
513 except ValueError:
488 msg = _("phases.new-commit: not a valid phase name ('%s')")
514 msg = _("phases.new-commit: not a valid phase name ('%s')")
489 raise error.ConfigError(msg % v)
515 raise error.ConfigError(msg % v)
490
516
491 def hassecret(repo):
517 def hassecret(repo):
492 """utility function that check if a repo have any secret changeset."""
518 """utility function that check if a repo have any secret changeset."""
493 return bool(repo._phasecache.phaseroots[2])
519 return bool(repo._phasecache.phaseroots[2])
General Comments 0
You need to be logged in to leave comments. Login now