##// END OF EJS Templates
Merge with crew
Bryan O'Sullivan -
r5676:9ed65758 merge default
parent child Browse files
Show More
@@ -0,0 +1,108 b''
1 #!/bin/sh
2
3 echo "[extensions]" >> $HGRCPATH
4 echo "mq=" >> $HGRCPATH
5 echo "[diff]" >> $HGRCPATH
6 echo "nodates=true" >> $HGRCPATH
7
8
9 catlog() {
10 cat .hg/patches/$1.patch | sed -e "s/^diff \-r [0-9a-f]* /diff -r ... /"
11 hg log --template "{rev}: {desc} - {author}\n"
12 }
13
14
15 echo ==== init
16 hg init a
17 cd a
18 hg qinit
19
20
21 echo ==== qnew -U
22 hg qnew -U 1.patch
23 catlog 1
24
25 echo ==== qref
26 echo "1" >1
27 hg add
28 hg qref
29 catlog 1
30
31 echo ==== qref -u
32 hg qref -u mary
33 catlog 1
34
35 echo ==== qnew
36 hg qnew 2.patch
37 echo "2" >2
38 hg add
39 hg qref
40 catlog 2
41
42 echo ==== qref -u
43 hg qref -u jane
44 catlog 2
45
46
47 echo ==== qnew -U -m
48 hg qnew -U -m "Three" 3.patch
49 catlog 3
50
51 echo ==== qref
52 echo "3" >3
53 hg add
54 hg qref
55 catlog 3
56
57 echo ==== qref -m
58 hg qref -m "Drei"
59 catlog 3
60
61 echo ==== qref -u
62 hg qref -u mary
63 catlog 3
64
65 echo ==== qref -u -m
66 hg qref -u maria -m "Three (again)"
67 catlog 3
68
69 echo ==== qnew -m
70 hg qnew -m "Four" 4.patch
71 echo "4" >4
72 hg add
73 hg qref
74 catlog 4
75
76 echo ==== qref -u
77 hg qref -u jane
78 catlog 4
79
80
81 echo ==== qnew with HG header
82 hg qnew 5.patch
83 hg qpop
84 echo "# HG changeset patch" >>.hg/patches/5.patch
85 echo "# User johndoe" >>.hg/patches/5.patch
86 # Drop patch specific error line
87 hg qpush 2>&1 | grep -v garbage
88 catlog 5
89
90 echo ==== hg qref
91 echo "5" >5
92 hg add
93 hg qref
94 catlog 5
95
96 echo ==== hg qref -U
97 hg qref -U
98 catlog 5
99
100 echo ==== hg qref -u
101 hg qref -u johndeere
102 catlog 5
103
104
105 echo ==== "qpop -a / qpush -a"
106 hg qpop -a
107 hg qpush -a
108 hg log --template "{rev}: {desc} - {author}\n"
@@ -0,0 +1,200 b''
1 ==== init
2 ==== qnew -U
3 From: test
4
5 0: [mq]: 1.patch - test
6 ==== qref
7 adding 1
8 From: test
9
10 diff -r ... 1
11 --- /dev/null
12 +++ b/1
13 @@ -0,0 +1,1 @@
14 +1
15 0: [mq]: 1.patch - test
16 ==== qref -u
17 From: mary
18
19 diff -r ... 1
20 --- /dev/null
21 +++ b/1
22 @@ -0,0 +1,1 @@
23 +1
24 0: [mq]: 1.patch - mary
25 ==== qnew
26 adding 2
27 diff -r ... 2
28 --- /dev/null
29 +++ b/2
30 @@ -0,0 +1,1 @@
31 +2
32 1: [mq]: 2.patch - test
33 0: [mq]: 1.patch - mary
34 ==== qref -u
35 From: jane
36
37
38 diff -r ... 2
39 --- /dev/null
40 +++ b/2
41 @@ -0,0 +1,1 @@
42 +2
43 1: [mq]: 2.patch - jane
44 0: [mq]: 1.patch - mary
45 ==== qnew -U -m
46 From: test
47
48 Three
49 2: Three - test
50 1: [mq]: 2.patch - jane
51 0: [mq]: 1.patch - mary
52 ==== qref
53 adding 3
54 From: test
55
56 Three
57
58 diff -r ... 3
59 --- /dev/null
60 +++ b/3
61 @@ -0,0 +1,1 @@
62 +3
63 2: Three - test
64 1: [mq]: 2.patch - jane
65 0: [mq]: 1.patch - mary
66 ==== qref -m
67 From: test
68
69 Drei
70
71 diff -r ... 3
72 --- /dev/null
73 +++ b/3
74 @@ -0,0 +1,1 @@
75 +3
76 2: Drei - test
77 1: [mq]: 2.patch - jane
78 0: [mq]: 1.patch - mary
79 ==== qref -u
80 From: mary
81
82 Drei
83
84 diff -r ... 3
85 --- /dev/null
86 +++ b/3
87 @@ -0,0 +1,1 @@
88 +3
89 2: Drei - mary
90 1: [mq]: 2.patch - jane
91 0: [mq]: 1.patch - mary
92 ==== qref -u -m
93 From: maria
94
95 Three (again)
96
97 diff -r ... 3
98 --- /dev/null
99 +++ b/3
100 @@ -0,0 +1,1 @@
101 +3
102 2: Three (again) - maria
103 1: [mq]: 2.patch - jane
104 0: [mq]: 1.patch - mary
105 ==== qnew -m
106 adding 4
107 Four
108
109 diff -r ... 4
110 --- /dev/null
111 +++ b/4
112 @@ -0,0 +1,1 @@
113 +4
114 3: Four - test
115 2: Three (again) - maria
116 1: [mq]: 2.patch - jane
117 0: [mq]: 1.patch - mary
118 ==== qref -u
119 From: jane
120
121 Four
122
123 diff -r ... 4
124 --- /dev/null
125 +++ b/4
126 @@ -0,0 +1,1 @@
127 +4
128 3: Four - jane
129 2: Three (again) - maria
130 1: [mq]: 2.patch - jane
131 0: [mq]: 1.patch - mary
132 ==== qnew with HG header
133 Now at: 4.patch
134 applying 5.patch
135 patch failed, unable to continue (try -v)
136 patch 5.patch is empty
137 Now at: 5.patch
138 # HG changeset patch
139 # User johndoe
140 4: imported patch 5.patch - johndoe
141 3: Four - jane
142 2: Three (again) - maria
143 1: [mq]: 2.patch - jane
144 0: [mq]: 1.patch - mary
145 ==== hg qref
146 adding 5
147 # HG changeset patch
148 # User johndoe
149
150 diff -r ... 5
151 --- /dev/null
152 +++ b/5
153 @@ -0,0 +1,1 @@
154 +5
155 4: [mq]: 5.patch - johndoe
156 3: Four - jane
157 2: Three (again) - maria
158 1: [mq]: 2.patch - jane
159 0: [mq]: 1.patch - mary
160 ==== hg qref -U
161 # HG changeset patch
162 # User test
163
164 diff -r ... 5
165 --- /dev/null
166 +++ b/5
167 @@ -0,0 +1,1 @@
168 +5
169 4: [mq]: 5.patch - test
170 3: Four - jane
171 2: Three (again) - maria
172 1: [mq]: 2.patch - jane
173 0: [mq]: 1.patch - mary
174 ==== hg qref -u
175 # HG changeset patch
176 # User johndeere
177
178 diff -r ... 5
179 --- /dev/null
180 +++ b/5
181 @@ -0,0 +1,1 @@
182 +5
183 4: [mq]: 5.patch - johndeere
184 3: Four - jane
185 2: Three (again) - maria
186 1: [mq]: 2.patch - jane
187 0: [mq]: 1.patch - mary
188 ==== qpop -a / qpush -a
189 Patch queue now empty
190 applying 1.patch
191 applying 2.patch
192 applying 3.patch
193 applying 4.patch
194 applying 5.patch
195 Now at: 5.patch
196 4: imported patch 5.patch - johndeere
197 3: Four - jane
198 2: Three (again) - maria
199 1: imported patch 2.patch - jane
200 0: imported patch 1.patch - mary
@@ -1,583 +1,583 b''
1 1 HGRC(5)
2 2 =======
3 3 Bryan O'Sullivan <bos@serpentine.com>
4 4
5 5 NAME
6 6 ----
7 7 hgrc - configuration files for Mercurial
8 8
9 9 SYNOPSIS
10 10 --------
11 11
12 12 The Mercurial system uses a set of configuration files to control
13 13 aspects of its behaviour.
14 14
15 15 FILES
16 16 -----
17 17
18 18 Mercurial reads configuration data from several files, if they exist.
19 19 The names of these files depend on the system on which Mercurial is
20 20 installed. Windows registry keys contain PATH-like strings, every
21 21 part must reference a Mercurial.ini file or be a directory where *.rc
22 22 files will be read.
23 23
24 24 (Unix) <install-root>/etc/mercurial/hgrc.d/*.rc::
25 25 (Unix) <install-root>/etc/mercurial/hgrc::
26 26 Per-installation configuration files, searched for in the
27 27 directory where Mercurial is installed. For example, if installed
28 28 in /shared/tools, Mercurial will look in
29 29 /shared/tools/etc/mercurial/hgrc. Options in these files apply to
30 30 all Mercurial commands executed by any user in any directory.
31 31
32 32 (Unix) /etc/mercurial/hgrc.d/*.rc::
33 33 (Unix) /etc/mercurial/hgrc::
34 34 (Windows) HKEY_LOCAL_MACHINE\SOFTWARE\Mercurial::
35 35 or::
36 36 (Windows) C:\Mercurial\Mercurial.ini::
37 37 Per-system configuration files, for the system on which Mercurial
38 38 is running. Options in these files apply to all Mercurial
39 39 commands executed by any user in any directory. Options in these
40 40 files override per-installation options.
41 41
42 42 (Unix) $HOME/.hgrc::
43 43 (Windows) C:\Documents and Settings\USERNAME\Mercurial.ini::
44 44 (Windows) $HOME\Mercurial.ini::
45 45 Per-user configuration file, for the user running Mercurial.
46 46 Options in this file apply to all Mercurial commands executed by
47 47 any user in any directory. Options in this file override
48 48 per-installation and per-system options.
49 49 On Windows system, one of these is chosen exclusively according
50 50 to definition of HOME environment variable.
51 51
52 52 (Unix, Windows) <repo>/.hg/hgrc::
53 53 Per-repository configuration options that only apply in a
54 54 particular repository. This file is not version-controlled, and
55 55 will not get transferred during a "clone" operation. Options in
56 56 this file override options in all other configuration files.
57 57 On Unix, most of this file will be ignored if it doesn't belong
58 58 to a trusted user or to a trusted group. See the documentation
59 59 for the trusted section below for more details.
60 60
61 61 SYNTAX
62 62 ------
63 63
64 64 A configuration file consists of sections, led by a "[section]" header
65 65 and followed by "name: value" entries; "name=value" is also accepted.
66 66
67 67 [spam]
68 68 eggs=ham
69 69 green=
70 70 eggs
71 71
72 72 Each line contains one entry. If the lines that follow are indented,
73 73 they are treated as continuations of that entry.
74 74
75 75 Leading whitespace is removed from values. Empty lines are skipped.
76 76
77 77 The optional values can contain format strings which refer to other
78 78 values in the same section, or values in a special DEFAULT section.
79 79
80 80 Lines beginning with "#" or ";" are ignored and may be used to provide
81 81 comments.
82 82
83 83 SECTIONS
84 84 --------
85 85
86 86 This section describes the different sections that may appear in a
87 87 Mercurial "hgrc" file, the purpose of each section, its possible
88 88 keys, and their possible values.
89 89
90 90 decode/encode::
91 91 Filters for transforming files on checkout/checkin. This would
92 92 typically be used for newline processing or other
93 93 localization/canonicalization of files.
94 94
95 95 Filters consist of a filter pattern followed by a filter command.
96 96 Filter patterns are globs by default, rooted at the repository
97 97 root. For example, to match any file ending in ".txt" in the root
98 98 directory only, use the pattern "*.txt". To match any file ending
99 99 in ".c" anywhere in the repository, use the pattern "**.c".
100 100
101 101 The filter command can start with a specifier, either "pipe:" or
102 102 "tempfile:". If no specifier is given, "pipe:" is used by default.
103 103
104 104 A "pipe:" command must accept data on stdin and return the
105 105 transformed data on stdout.
106 106
107 107 Pipe example:
108 108
109 109 [encode]
110 110 # uncompress gzip files on checkin to improve delta compression
111 111 # note: not necessarily a good idea, just an example
112 112 *.gz = pipe: gunzip
113 113
114 114 [decode]
115 115 # recompress gzip files when writing them to the working dir (we
116 116 # can safely omit "pipe:", because it's the default)
117 117 *.gz = gzip
118 118
119 119 A "tempfile:" command is a template. The string INFILE is replaced
120 120 with the name of a temporary file that contains the data to be
121 121 filtered by the command. The string OUTFILE is replaced with the
122 122 name of an empty temporary file, where the filtered data must be
123 123 written by the command.
124 124
125 125 NOTE: the tempfile mechanism is recommended for Windows systems,
126 126 where the standard shell I/O redirection operators often have
127 127 strange effects. In particular, if you are doing line ending
128 128 conversion on Windows using the popular dos2unix and unix2dos
129 129 programs, you *must* use the tempfile mechanism, as using pipes will
130 130 corrupt the contents of your files.
131 131
132 132 Tempfile example:
133 133
134 134 [encode]
135 135 # convert files to unix line ending conventions on checkin
136 136 **.txt = tempfile: dos2unix -n INFILE OUTFILE
137 137
138 138 [decode]
139 139 # convert files to windows line ending conventions when writing
140 140 # them to the working dir
141 141 **.txt = tempfile: unix2dos -n INFILE OUTFILE
142 142
143 143 defaults::
144 144 Use the [defaults] section to define command defaults, i.e. the
145 145 default options/arguments to pass to the specified commands.
146 146
147 147 The following example makes 'hg log' run in verbose mode, and
148 148 'hg status' show only the modified files, by default.
149 149
150 150 [defaults]
151 151 log = -v
152 152 status = -m
153 153
154 154 The actual commands, instead of their aliases, must be used when
155 155 defining command defaults. The command defaults will also be
156 156 applied to the aliases of the commands defined.
157 157
158 158 diff::
159 159 Settings used when displaying diffs. They are all boolean and
160 160 defaults to False.
161 161 git;;
162 162 Use git extended diff format.
163 163 nodates;;
164 164 Don't include dates in diff headers.
165 165 showfunc;;
166 166 Show which function each change is in.
167 167 ignorews;;
168 168 Ignore white space when comparing lines.
169 169 ignorewsamount;;
170 170 Ignore changes in the amount of white space.
171 171 ignoreblanklines;;
172 172 Ignore changes whose lines are all blank.
173 173
174 174 email::
175 175 Settings for extensions that send email messages.
176 176 from;;
177 177 Optional. Email address to use in "From" header and SMTP envelope
178 178 of outgoing messages.
179 179 to;;
180 180 Optional. Comma-separated list of recipients' email addresses.
181 181 cc;;
182 182 Optional. Comma-separated list of carbon copy recipients'
183 183 email addresses.
184 184 bcc;;
185 185 Optional. Comma-separated list of blind carbon copy
186 186 recipients' email addresses. Cannot be set interactively.
187 187 method;;
188 188 Optional. Method to use to send email messages. If value is
189 189 "smtp" (default), use SMTP (see section "[smtp]" for
190 190 configuration). Otherwise, use as name of program to run that
191 191 acts like sendmail (takes "-f" option for sender, list of
192 192 recipients on command line, message on stdin). Normally, setting
193 193 this to "sendmail" or "/usr/sbin/sendmail" is enough to use
194 194 sendmail to send messages.
195 195
196 196 Email example:
197 197
198 198 [email]
199 199 from = Joseph User <joe.user@example.com>
200 200 method = /usr/sbin/sendmail
201 201
202 202 extensions::
203 203 Mercurial has an extension mechanism for adding new features. To
204 204 enable an extension, create an entry for it in this section.
205 205
206 206 If you know that the extension is already in Python's search path,
207 207 you can give the name of the module, followed by "=", with nothing
208 208 after the "=".
209 209
210 210 Otherwise, give a name that you choose, followed by "=", followed by
211 211 the path to the ".py" file (including the file name extension) that
212 212 defines the extension.
213 213
214 214 Example for ~/.hgrc:
215 215
216 216 [extensions]
217 217 # (the mq extension will get loaded from mercurial's path)
218 218 hgext.mq =
219 219 # (this extension will get loaded from the file specified)
220 220 myfeature = ~/.hgext/myfeature.py
221 221
222 222 format::
223 223
224 224 usestore;;
225 225 Enable or disable the "store" repository format which improves
226 226 compatibility with systems that fold case or otherwise mangle
227 227 filenames. Enabled by default. Disabling this option will allow
228 228 you to store longer filenames in some situations at the expense of
229 229 compatibility.
230 230
231 231 hooks::
232 232 Commands or Python functions that get automatically executed by
233 233 various actions such as starting or finishing a commit. Multiple
234 234 hooks can be run for the same action by appending a suffix to the
235 235 action. Overriding a site-wide hook can be done by changing its
236 236 value or setting it to an empty string.
237 237
238 238 Example .hg/hgrc:
239 239
240 240 [hooks]
241 241 # do not use the site-wide hook
242 242 incoming =
243 243 incoming.email = /my/email/hook
244 244 incoming.autobuild = /my/build/hook
245 245
246 246 Most hooks are run with environment variables set that give added
247 247 useful information. For each hook below, the environment variables
248 248 it is passed are listed with names of the form "$HG_foo".
249 249
250 250 changegroup;;
251 251 Run after a changegroup has been added via push, pull or
252 252 unbundle. ID of the first new changeset is in $HG_NODE. URL from
253 253 which changes came is in $HG_URL.
254 254 commit;;
255 255 Run after a changeset has been created in the local repository.
256 256 ID of the newly created changeset is in $HG_NODE. Parent
257 257 changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
258 258 incoming;;
259 259 Run after a changeset has been pulled, pushed, or unbundled into
260 260 the local repository. The ID of the newly arrived changeset is in
261 261 $HG_NODE. URL that was source of changes came is in $HG_URL.
262 262 outgoing;;
263 263 Run after sending changes from local repository to another. ID of
264 264 first changeset sent is in $HG_NODE. Source of operation is in
265 265 $HG_SOURCE; see "preoutgoing" hook for description.
266 266 post-<command>;;
267 267 Run after successful invocations of the associated command. The
268 268 contents of the command line are passed as $HG_ARGS and the result
269 269 code in $HG_RESULT. Hook failure is ignored.
270 270 pre-<command>;;
271 271 Run before executing the associated command. The contents of the
272 272 command line are passed as $HG_ARGS. If the hook returns failure,
273 273 the command doesn't execute and Mercurial returns the failure code.
274 274 prechangegroup;;
275 275 Run before a changegroup is added via push, pull or unbundle.
276 276 Exit status 0 allows the changegroup to proceed. Non-zero status
277 277 will cause the push, pull or unbundle to fail. URL from which
278 278 changes will come is in $HG_URL.
279 279 precommit;;
280 280 Run before starting a local commit. Exit status 0 allows the
281 281 commit to proceed. Non-zero status will cause the commit to fail.
282 282 Parent changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
283 283 preoutgoing;;
284 Run before computing changes to send from the local repository to
284 Run before collecting changes to send from the local repository to
285 285 another. Non-zero status will cause failure. This lets you
286 286 prevent pull over http or ssh. Also prevents against local pull,
287 287 push (outbound) or bundle commands, but not effective, since you
288 288 can just copy files instead then. Source of operation is in
289 289 $HG_SOURCE. If "serve", operation is happening on behalf of
290 290 remote ssh or http repository. If "push", "pull" or "bundle",
291 291 operation is happening on behalf of repository on same system.
292 292 pretag;;
293 293 Run before creating a tag. Exit status 0 allows the tag to be
294 294 created. Non-zero status will cause the tag to fail. ID of
295 295 changeset to tag is in $HG_NODE. Name of tag is in $HG_TAG. Tag
296 296 is local if $HG_LOCAL=1, in repo if $HG_LOCAL=0.
297 297 pretxnchangegroup;;
298 298 Run after a changegroup has been added via push, pull or unbundle,
299 299 but before the transaction has been committed. Changegroup is
300 300 visible to hook program. This lets you validate incoming changes
301 301 before accepting them. Passed the ID of the first new changeset
302 302 in $HG_NODE. Exit status 0 allows the transaction to commit.
303 303 Non-zero status will cause the transaction to be rolled back and
304 304 the push, pull or unbundle will fail. URL that was source of
305 305 changes is in $HG_URL.
306 306 pretxncommit;;
307 307 Run after a changeset has been created but the transaction not yet
308 308 committed. Changeset is visible to hook program. This lets you
309 309 validate commit message and changes. Exit status 0 allows the
310 310 commit to proceed. Non-zero status will cause the transaction to
311 311 be rolled back. ID of changeset is in $HG_NODE. Parent changeset
312 312 IDs are in $HG_PARENT1 and $HG_PARENT2.
313 313 preupdate;;
314 314 Run before updating the working directory. Exit status 0 allows
315 315 the update to proceed. Non-zero status will prevent the update.
316 316 Changeset ID of first new parent is in $HG_PARENT1. If merge, ID
317 317 of second new parent is in $HG_PARENT2.
318 318 tag;;
319 319 Run after a tag is created. ID of tagged changeset is in
320 320 $HG_NODE. Name of tag is in $HG_TAG. Tag is local if
321 321 $HG_LOCAL=1, in repo if $HG_LOCAL=0.
322 322 update;;
323 323 Run after updating the working directory. Changeset ID of first
324 324 new parent is in $HG_PARENT1. If merge, ID of second new parent
325 325 is in $HG_PARENT2. If update succeeded, $HG_ERROR=0. If update
326 326 failed (e.g. because conflicts not resolved), $HG_ERROR=1.
327 327
328 328 Note: it is generally better to use standard hooks rather than the
329 329 generic pre- and post- command hooks as they are guaranteed to be
330 330 called in the appropriate contexts for influencing transactions.
331 331 Also, hooks like "commit" will be called in all contexts that
332 332 generate a commit (eg. tag) and not just the commit command.
333 333
334 334 Note2: Environment variables with empty values may not be passed to
335 335 hooks on platforms like Windows. For instance, $HG_PARENT2 will
336 336 not be available under Windows for non-merge changesets while being
337 337 set to an empty value under Unix-like systems.
338 338
339 339 The syntax for Python hooks is as follows:
340 340
341 341 hookname = python:modulename.submodule.callable
342 342
343 343 Python hooks are run within the Mercurial process. Each hook is
344 344 called with at least three keyword arguments: a ui object (keyword
345 345 "ui"), a repository object (keyword "repo"), and a "hooktype"
346 346 keyword that tells what kind of hook is used. Arguments listed as
347 347 environment variables above are passed as keyword arguments, with no
348 348 "HG_" prefix, and names in lower case.
349 349
350 350 If a Python hook returns a "true" value or raises an exception, this
351 351 is treated as failure of the hook.
352 352
353 353 http_proxy::
354 354 Used to access web-based Mercurial repositories through a HTTP
355 355 proxy.
356 356 host;;
357 357 Host name and (optional) port of the proxy server, for example
358 358 "myproxy:8000".
359 359 no;;
360 360 Optional. Comma-separated list of host names that should bypass
361 361 the proxy.
362 362 passwd;;
363 363 Optional. Password to authenticate with at the proxy server.
364 364 user;;
365 365 Optional. User name to authenticate with at the proxy server.
366 366
367 367 smtp::
368 368 Configuration for extensions that need to send email messages.
369 369 host;;
370 370 Host name of mail server, e.g. "mail.example.com".
371 371 port;;
372 372 Optional. Port to connect to on mail server. Default: 25.
373 373 tls;;
374 374 Optional. Whether to connect to mail server using TLS. True or
375 375 False. Default: False.
376 376 username;;
377 377 Optional. User name to authenticate to SMTP server with.
378 378 If username is specified, password must also be specified.
379 379 Default: none.
380 380 password;;
381 381 Optional. Password to authenticate to SMTP server with.
382 382 If username is specified, password must also be specified.
383 383 Default: none.
384 384 local_hostname;;
385 385 Optional. It's the hostname that the sender can use to identify itself
386 386 to the MTA.
387 387
388 388 paths::
389 389 Assigns symbolic names to repositories. The left side is the
390 390 symbolic name, and the right gives the directory or URL that is the
391 391 location of the repository. Default paths can be declared by
392 392 setting the following entries.
393 393 default;;
394 394 Directory or URL to use when pulling if no source is specified.
395 395 Default is set to repository from which the current repository
396 396 was cloned.
397 397 default-push;;
398 398 Optional. Directory or URL to use when pushing if no destination
399 399 is specified.
400 400
401 401 server::
402 402 Controls generic server settings.
403 403 uncompressed;;
404 404 Whether to allow clients to clone a repo using the uncompressed
405 405 streaming protocol. This transfers about 40% more data than a
406 406 regular clone, but uses less memory and CPU on both server and
407 407 client. Over a LAN (100Mbps or better) or a very fast WAN, an
408 408 uncompressed streaming clone is a lot faster (~10x) than a regular
409 409 clone. Over most WAN connections (anything slower than about
410 410 6Mbps), uncompressed streaming is slower, because of the extra
411 411 data transfer overhead. Default is False.
412 412
413 413 trusted::
414 414 For security reasons, Mercurial will not use the settings in
415 415 the .hg/hgrc file from a repository if it doesn't belong to a
416 416 trusted user or to a trusted group. The main exception is the
417 417 web interface, which automatically uses some safe settings, since
418 418 it's common to serve repositories from different users.
419 419
420 420 This section specifies what users and groups are trusted. The
421 421 current user is always trusted. To trust everybody, list a user
422 422 or a group with name "*".
423 423
424 424 users;;
425 425 Comma-separated list of trusted users.
426 426 groups;;
427 427 Comma-separated list of trusted groups.
428 428
429 429 ui::
430 430 User interface controls.
431 431 debug;;
432 432 Print debugging information. True or False. Default is False.
433 433 editor;;
434 434 The editor to use during a commit. Default is $EDITOR or "vi".
435 435 fallbackencoding;;
436 436 Encoding to try if it's not possible to decode the changelog using
437 437 UTF-8. Default is ISO-8859-1.
438 438 ignore;;
439 439 A file to read per-user ignore patterns from. This file should be in
440 440 the same format as a repository-wide .hgignore file. This option
441 441 supports hook syntax, so if you want to specify multiple ignore
442 442 files, you can do so by setting something like
443 443 "ignore.other = ~/.hgignore2". For details of the ignore file
444 444 format, see the hgignore(5) man page.
445 445 interactive;;
446 446 Allow to prompt the user. True or False. Default is True.
447 447 logtemplate;;
448 448 Template string for commands that print changesets.
449 449 merge;;
450 450 The conflict resolution program to use during a manual merge.
451 451 Default is "hgmerge".
452 452 patch;;
453 453 command to use to apply patches. Look for 'gpatch' or 'patch' in PATH if
454 454 unset.
455 455 quiet;;
456 456 Reduce the amount of output printed. True or False. Default is False.
457 457 remotecmd;;
458 458 remote command to use for clone/push/pull operations. Default is 'hg'.
459 459 report_untrusted;;
460 460 Warn if a .hg/hgrc file is ignored due to not being owned by a
461 461 trusted user or group. True or False. Default is True.
462 462 slash;;
463 463 Display paths using a slash ("/") as the path separator. This only
464 464 makes a difference on systems where the default path separator is not
465 465 the slash character (e.g. Windows uses the backslash character ("\")).
466 466 Default is False.
467 467 ssh;;
468 468 command to use for SSH connections. Default is 'ssh'.
469 469 strict;;
470 470 Require exact command names, instead of allowing unambiguous
471 471 abbreviations. True or False. Default is False.
472 472 style;;
473 473 Name of style to use for command output.
474 474 timeout;;
475 475 The timeout used when a lock is held (in seconds), a negative value
476 476 means no timeout. Default is 600.
477 477 username;;
478 478 The committer of a changeset created when running "commit".
479 479 Typically a person's name and email address, e.g. "Fred Widget
480 480 <fred@example.com>". Default is $EMAIL or username@hostname.
481 481 If the username in hgrc is empty, it has to be specified manually or
482 482 in a different hgrc file (e.g. $HOME/.hgrc, if the admin set "username ="
483 483 in the system hgrc).
484 484 verbose;;
485 485 Increase the amount of output printed. True or False. Default is False.
486 486
487 487
488 488 web::
489 489 Web interface configuration.
490 490 accesslog;;
491 491 Where to output the access log. Default is stdout.
492 492 address;;
493 493 Interface address to bind to. Default is all.
494 494 allow_archive;;
495 495 List of archive format (bz2, gz, zip) allowed for downloading.
496 496 Default is empty.
497 497 allowbz2;;
498 498 (DEPRECATED) Whether to allow .tar.bz2 downloading of repo revisions.
499 499 Default is false.
500 500 allowgz;;
501 501 (DEPRECATED) Whether to allow .tar.gz downloading of repo revisions.
502 502 Default is false.
503 503 allowpull;;
504 504 Whether to allow pulling from the repository. Default is true.
505 505 allow_push;;
506 506 Whether to allow pushing to the repository. If empty or not set,
507 507 push is not allowed. If the special value "*", any remote user
508 508 can push, including unauthenticated users. Otherwise, the remote
509 509 user must have been authenticated, and the authenticated user name
510 510 must be present in this list (separated by whitespace or ",").
511 511 The contents of the allow_push list are examined after the
512 512 deny_push list.
513 513 allowzip;;
514 514 (DEPRECATED) Whether to allow .zip downloading of repo revisions.
515 515 Default is false. This feature creates temporary files.
516 516 baseurl;;
517 517 Base URL to use when publishing URLs in other locations, so
518 518 third-party tools like email notification hooks can construct URLs.
519 519 Example: "http://hgserver/repos/"
520 520 contact;;
521 521 Name or email address of the person in charge of the repository.
522 522 Default is "unknown".
523 523 deny_push;;
524 524 Whether to deny pushing to the repository. If empty or not set,
525 525 push is not denied. If the special value "*", all remote users
526 526 are denied push. Otherwise, unauthenticated users are all denied,
527 527 and any authenticated user name present in this list (separated by
528 528 whitespace or ",") is also denied. The contents of the deny_push
529 529 list are examined before the allow_push list.
530 530 description;;
531 531 Textual description of the repository's purpose or contents.
532 532 Default is "unknown".
533 533 encoding;;
534 534 Character encoding name.
535 535 Example: "UTF-8"
536 536 errorlog;;
537 537 Where to output the error log. Default is stderr.
538 538 hidden;;
539 539 Whether to hide the repository in the hgwebdir index. Default is false.
540 540 ipv6;;
541 541 Whether to use IPv6. Default is false.
542 542 name;;
543 543 Repository name to use in the web interface. Default is current
544 544 working directory.
545 545 maxchanges;;
546 546 Maximum number of changes to list on the changelog. Default is 10.
547 547 maxfiles;;
548 548 Maximum number of files to list per changeset. Default is 10.
549 549 port;;
550 550 Port to listen on. Default is 8000.
551 551 push_ssl;;
552 552 Whether to require that inbound pushes be transported over SSL to
553 553 prevent password sniffing. Default is true.
554 554 staticurl;;
555 555 Base URL to use for static files. If unset, static files (e.g.
556 556 the hgicon.png favicon) will be served by the CGI script itself.
557 557 Use this setting to serve them directly with the HTTP server.
558 558 Example: "http://hgserver/static/"
559 559 stripes;;
560 560 How many lines a "zebra stripe" should span in multiline output.
561 561 Default is 1; set to 0 to disable.
562 562 style;;
563 563 Which template map style to use.
564 564 templates;;
565 565 Where to find the HTML templates. Default is install path.
566 566
567 567
568 568 AUTHOR
569 569 ------
570 570 Bryan O'Sullivan <bos@serpentine.com>.
571 571
572 572 Mercurial was written by Matt Mackall <mpm@selenic.com>.
573 573
574 574 SEE ALSO
575 575 --------
576 576 hg(1), hgignore(5)
577 577
578 578 COPYING
579 579 -------
580 580 This manual page is copyright 2005 Bryan O'Sullivan.
581 581 Mercurial is copyright 2005-2007 Matt Mackall.
582 582 Free use of this software is granted under the terms of the GNU General
583 583 Public License (GPL).
@@ -1,2258 +1,2292 b''
1 1 # queue.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''patch management and development
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use "hg help command" for more details):
18 18
19 19 prepare repository to work with patches qinit
20 20 create new patch qnew
21 21 import existing patch qimport
22 22
23 23 print patch series qseries
24 24 print applied patches qapplied
25 25 print name of top applied patch qtop
26 26
27 27 add known patch to applied stack qpush
28 28 remove patch from applied stack qpop
29 29 refresh contents of top applied patch qrefresh
30 30 '''
31 31
32 32 from mercurial.i18n import _
33 33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 34 from mercurial import repair
35 35 import os, sys, re, errno
36 36
37 37 commands.norepo += " qclone"
38 38
39 39 # Patch names looks like unix-file names.
40 40 # They must be joinable with queue directory and result in the patch path.
41 41 normname = util.normpath
42 42
43 43 class statusentry:
44 44 def __init__(self, rev, name=None):
45 45 if not name:
46 46 fields = rev.split(':', 1)
47 47 if len(fields) == 2:
48 48 self.rev, self.name = fields
49 49 else:
50 50 self.rev, self.name = None, None
51 51 else:
52 52 self.rev, self.name = rev, name
53 53
54 54 def __str__(self):
55 55 return self.rev + ':' + self.name
56 56
57 57 class queue:
58 58 def __init__(self, ui, path, patchdir=None):
59 59 self.basepath = path
60 60 self.path = patchdir or os.path.join(path, "patches")
61 61 self.opener = util.opener(self.path)
62 62 self.ui = ui
63 63 self.applied = []
64 64 self.full_series = []
65 65 self.applied_dirty = 0
66 66 self.series_dirty = 0
67 67 self.series_path = "series"
68 68 self.status_path = "status"
69 69 self.guards_path = "guards"
70 70 self.active_guards = None
71 71 self.guards_dirty = False
72 72 self._diffopts = None
73 73
74 74 if os.path.exists(self.join(self.series_path)):
75 75 self.full_series = self.opener(self.series_path).read().splitlines()
76 76 self.parse_series()
77 77
78 78 if os.path.exists(self.join(self.status_path)):
79 79 lines = self.opener(self.status_path).read().splitlines()
80 80 self.applied = [statusentry(l) for l in lines]
81 81
82 82 def diffopts(self):
83 83 if self._diffopts is None:
84 84 self._diffopts = patch.diffopts(self.ui)
85 85 return self._diffopts
86 86
87 87 def join(self, *p):
88 88 return os.path.join(self.path, *p)
89 89
90 90 def find_series(self, patch):
91 91 pre = re.compile("(\s*)([^#]+)")
92 92 index = 0
93 93 for l in self.full_series:
94 94 m = pre.match(l)
95 95 if m:
96 96 s = m.group(2)
97 97 s = s.rstrip()
98 98 if s == patch:
99 99 return index
100 100 index += 1
101 101 return None
102 102
103 103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104 104
105 105 def parse_series(self):
106 106 self.series = []
107 107 self.series_guards = []
108 108 for l in self.full_series:
109 109 h = l.find('#')
110 110 if h == -1:
111 111 patch = l
112 112 comment = ''
113 113 elif h == 0:
114 114 continue
115 115 else:
116 116 patch = l[:h]
117 117 comment = l[h:]
118 118 patch = patch.strip()
119 119 if patch:
120 120 if patch in self.series:
121 121 raise util.Abort(_('%s appears more than once in %s') %
122 122 (patch, self.join(self.series_path)))
123 123 self.series.append(patch)
124 124 self.series_guards.append(self.guard_re.findall(comment))
125 125
126 126 def check_guard(self, guard):
127 127 bad_chars = '# \t\r\n\f'
128 128 first = guard[0]
129 129 for c in '-+':
130 130 if first == c:
131 131 return (_('guard %r starts with invalid character: %r') %
132 132 (guard, c))
133 133 for c in bad_chars:
134 134 if c in guard:
135 135 return _('invalid character in guard %r: %r') % (guard, c)
136 136
137 137 def set_active(self, guards):
138 138 for guard in guards:
139 139 bad = self.check_guard(guard)
140 140 if bad:
141 141 raise util.Abort(bad)
142 142 guards = dict.fromkeys(guards).keys()
143 143 guards.sort()
144 144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 145 self.active_guards = guards
146 146 self.guards_dirty = True
147 147
148 148 def active(self):
149 149 if self.active_guards is None:
150 150 self.active_guards = []
151 151 try:
152 152 guards = self.opener(self.guards_path).read().split()
153 153 except IOError, err:
154 154 if err.errno != errno.ENOENT: raise
155 155 guards = []
156 156 for i, guard in enumerate(guards):
157 157 bad = self.check_guard(guard)
158 158 if bad:
159 159 self.ui.warn('%s:%d: %s\n' %
160 160 (self.join(self.guards_path), i + 1, bad))
161 161 else:
162 162 self.active_guards.append(guard)
163 163 return self.active_guards
164 164
165 165 def set_guards(self, idx, guards):
166 166 for g in guards:
167 167 if len(g) < 2:
168 168 raise util.Abort(_('guard %r too short') % g)
169 169 if g[0] not in '-+':
170 170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 171 bad = self.check_guard(g[1:])
172 172 if bad:
173 173 raise util.Abort(bad)
174 174 drop = self.guard_re.sub('', self.full_series[idx])
175 175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 176 self.parse_series()
177 177 self.series_dirty = True
178 178
179 179 def pushable(self, idx):
180 180 if isinstance(idx, str):
181 181 idx = self.series.index(idx)
182 182 patchguards = self.series_guards[idx]
183 183 if not patchguards:
184 184 return True, None
185 185 default = False
186 186 guards = self.active()
187 187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 188 if exactneg:
189 189 return False, exactneg[0]
190 190 pos = [g for g in patchguards if g[0] == '+']
191 191 exactpos = [g for g in pos if g[1:] in guards]
192 192 if pos:
193 193 if exactpos:
194 194 return True, exactpos[0]
195 195 return False, pos
196 196 return True, ''
197 197
198 198 def explain_pushable(self, idx, all_patches=False):
199 199 write = all_patches and self.ui.write or self.ui.warn
200 200 if all_patches or self.ui.verbose:
201 201 if isinstance(idx, str):
202 202 idx = self.series.index(idx)
203 203 pushable, why = self.pushable(idx)
204 204 if all_patches and pushable:
205 205 if why is None:
206 206 write(_('allowing %s - no guards in effect\n') %
207 207 self.series[idx])
208 208 else:
209 209 if not why:
210 210 write(_('allowing %s - no matching negative guards\n') %
211 211 self.series[idx])
212 212 else:
213 213 write(_('allowing %s - guarded by %r\n') %
214 214 (self.series[idx], why))
215 215 if not pushable:
216 216 if why:
217 217 write(_('skipping %s - guarded by %r\n') %
218 218 (self.series[idx], why))
219 219 else:
220 220 write(_('skipping %s - no matching guards\n') %
221 221 self.series[idx])
222 222
223 223 def save_dirty(self):
224 224 def write_list(items, path):
225 225 fp = self.opener(path, 'w')
226 226 for i in items:
227 227 print >> fp, i
228 228 fp.close()
229 229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232 232
233 233 def readheaders(self, patch):
234 234 def eatdiff(lines):
235 235 while lines:
236 236 l = lines[-1]
237 237 if (l.startswith("diff -") or
238 238 l.startswith("Index:") or
239 239 l.startswith("===========")):
240 240 del lines[-1]
241 241 else:
242 242 break
243 243 def eatempty(lines):
244 244 while lines:
245 245 l = lines[-1]
246 246 if re.match('\s*$', l):
247 247 del lines[-1]
248 248 else:
249 249 break
250 250
251 251 pf = self.join(patch)
252 252 message = []
253 253 comments = []
254 254 user = None
255 255 date = None
256 256 format = None
257 257 subject = None
258 258 diffstart = 0
259 259
260 260 for line in file(pf):
261 261 line = line.rstrip()
262 262 if line.startswith('diff --git'):
263 263 diffstart = 2
264 264 break
265 265 if diffstart:
266 266 if line.startswith('+++ '):
267 267 diffstart = 2
268 268 break
269 269 if line.startswith("--- "):
270 270 diffstart = 1
271 271 continue
272 272 elif format == "hgpatch":
273 273 # parse values when importing the result of an hg export
274 274 if line.startswith("# User "):
275 275 user = line[7:]
276 276 elif line.startswith("# Date "):
277 277 date = line[7:]
278 278 elif not line.startswith("# ") and line:
279 279 message.append(line)
280 280 format = None
281 281 elif line == '# HG changeset patch':
282 282 format = "hgpatch"
283 283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 284 line.startswith("subject: "))):
285 285 subject = line[9:]
286 286 format = "tag"
287 287 elif (format != "tagdone" and (line.startswith("From: ") or
288 288 line.startswith("from: "))):
289 289 user = line[6:]
290 290 format = "tag"
291 291 elif format == "tag" and line == "":
292 292 # when looking for tags (subject: from: etc) they
293 293 # end once you find a blank line in the source
294 294 format = "tagdone"
295 295 elif message or line:
296 296 message.append(line)
297 297 comments.append(line)
298 298
299 299 eatdiff(message)
300 300 eatdiff(comments)
301 301 eatempty(message)
302 302 eatempty(comments)
303 303
304 304 # make sure message isn't empty
305 305 if format and format.startswith("tag") and subject:
306 306 message.insert(0, "")
307 307 message.insert(0, subject)
308 308 return (message, comments, user, date, diffstart > 1)
309 309
310 310 def removeundo(self, repo):
311 311 undo = repo.sjoin('undo')
312 312 if not os.path.exists(undo):
313 313 return
314 314 try:
315 315 os.unlink(undo)
316 316 except OSError, inst:
317 317 self.ui.warn('error removing undo: %s\n' % str(inst))
318 318
319 319 def printdiff(self, repo, node1, node2=None, files=None,
320 320 fp=None, changes=None, opts={}):
321 321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322 322
323 323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 324 fp=fp, changes=changes, opts=self.diffopts())
325 325
326 326 def mergeone(self, repo, mergeq, head, patch, rev):
327 327 # first try just applying the patch
328 328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 329 strict=True, merge=rev)
330 330
331 331 if err == 0:
332 332 return (err, n)
333 333
334 334 if n is None:
335 335 raise util.Abort(_("apply failed for patch %s") % patch)
336 336
337 337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338 338
339 339 # apply failed, strip away that rev and merge.
340 340 hg.clean(repo, head)
341 341 self.strip(repo, n, update=False, backup='strip')
342 342
343 343 ctx = repo.changectx(rev)
344 344 ret = hg.merge(repo, rev)
345 345 if ret:
346 346 raise util.Abort(_("update returned %d") % ret)
347 347 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
348 348 if n == None:
349 349 raise util.Abort(_("repo commit failed"))
350 350 try:
351 351 message, comments, user, date, patchfound = mergeq.readheaders(patch)
352 352 except:
353 353 raise util.Abort(_("unable to read %s") % patch)
354 354
355 355 patchf = self.opener(patch, "w")
356 356 if comments:
357 357 comments = "\n".join(comments) + '\n\n'
358 358 patchf.write(comments)
359 359 self.printdiff(repo, head, n, fp=patchf)
360 360 patchf.close()
361 361 self.removeundo(repo)
362 362 return (0, n)
363 363
364 364 def qparents(self, repo, rev=None):
365 365 if rev is None:
366 366 (p1, p2) = repo.dirstate.parents()
367 367 if p2 == revlog.nullid:
368 368 return p1
369 369 if len(self.applied) == 0:
370 370 return None
371 371 return revlog.bin(self.applied[-1].rev)
372 372 pp = repo.changelog.parents(rev)
373 373 if pp[1] != revlog.nullid:
374 374 arevs = [ x.rev for x in self.applied ]
375 375 p0 = revlog.hex(pp[0])
376 376 p1 = revlog.hex(pp[1])
377 377 if p0 in arevs:
378 378 return pp[0]
379 379 if p1 in arevs:
380 380 return pp[1]
381 381 return pp[0]
382 382
383 383 def mergepatch(self, repo, mergeq, series):
384 384 if len(self.applied) == 0:
385 385 # each of the patches merged in will have two parents. This
386 386 # can confuse the qrefresh, qdiff, and strip code because it
387 387 # needs to know which parent is actually in the patch queue.
388 388 # so, we insert a merge marker with only one parent. This way
389 389 # the first patch in the queue is never a merge patch
390 390 #
391 391 pname = ".hg.patches.merge.marker"
392 392 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
393 393 self.removeundo(repo)
394 394 self.applied.append(statusentry(revlog.hex(n), pname))
395 395 self.applied_dirty = 1
396 396
397 397 head = self.qparents(repo)
398 398
399 399 for patch in series:
400 400 patch = mergeq.lookup(patch, strict=True)
401 401 if not patch:
402 402 self.ui.warn("patch %s does not exist\n" % patch)
403 403 return (1, None)
404 404 pushable, reason = self.pushable(patch)
405 405 if not pushable:
406 406 self.explain_pushable(patch, all_patches=True)
407 407 continue
408 408 info = mergeq.isapplied(patch)
409 409 if not info:
410 410 self.ui.warn("patch %s is not applied\n" % patch)
411 411 return (1, None)
412 412 rev = revlog.bin(info[1])
413 413 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
414 414 if head:
415 415 self.applied.append(statusentry(revlog.hex(head), patch))
416 416 self.applied_dirty = 1
417 417 if err:
418 418 return (err, head)
419 419 self.save_dirty()
420 420 return (0, head)
421 421
422 422 def patch(self, repo, patchfile):
423 423 '''Apply patchfile to the working directory.
424 424 patchfile: file name of patch'''
425 425 files = {}
426 426 try:
427 427 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
428 428 files=files)
429 429 except Exception, inst:
430 430 self.ui.note(str(inst) + '\n')
431 431 if not self.ui.verbose:
432 432 self.ui.warn("patch failed, unable to continue (try -v)\n")
433 433 return (False, files, False)
434 434
435 435 return (True, files, fuzz)
436 436
437 437 def apply(self, repo, series, list=False, update_status=True,
438 438 strict=False, patchdir=None, merge=None, all_files={}):
439 439 wlock = lock = tr = None
440 440 try:
441 441 wlock = repo.wlock()
442 442 lock = repo.lock()
443 443 tr = repo.transaction()
444 444 try:
445 445 ret = self._apply(repo, series, list, update_status,
446 446 strict, patchdir, merge, all_files=all_files)
447 447 tr.close()
448 448 self.save_dirty()
449 449 return ret
450 450 except:
451 451 try:
452 452 tr.abort()
453 453 finally:
454 454 repo.invalidate()
455 455 repo.dirstate.invalidate()
456 456 raise
457 457 finally:
458 458 del tr, lock, wlock
459 459 self.removeundo(repo)
460 460
461 461 def _apply(self, repo, series, list=False, update_status=True,
462 462 strict=False, patchdir=None, merge=None, all_files={}):
463 463 # TODO unify with commands.py
464 464 if not patchdir:
465 465 patchdir = self.path
466 466 err = 0
467 467 n = None
468 468 for patchname in series:
469 469 pushable, reason = self.pushable(patchname)
470 470 if not pushable:
471 471 self.explain_pushable(patchname, all_patches=True)
472 472 continue
473 473 self.ui.warn("applying %s\n" % patchname)
474 474 pf = os.path.join(patchdir, patchname)
475 475
476 476 try:
477 477 message, comments, user, date, patchfound = self.readheaders(patchname)
478 478 except:
479 479 self.ui.warn("Unable to read %s\n" % patchname)
480 480 err = 1
481 481 break
482 482
483 483 if not message:
484 484 message = "imported patch %s\n" % patchname
485 485 else:
486 486 if list:
487 487 message.append("\nimported patch %s" % patchname)
488 488 message = '\n'.join(message)
489 489
490 490 (patcherr, files, fuzz) = self.patch(repo, pf)
491 491 all_files.update(files)
492 492 patcherr = not patcherr
493 493
494 494 if merge and files:
495 495 # Mark as removed/merged and update dirstate parent info
496 496 removed = []
497 497 merged = []
498 498 for f in files:
499 499 if os.path.exists(repo.wjoin(f)):
500 500 merged.append(f)
501 501 else:
502 502 removed.append(f)
503 503 for f in removed:
504 504 repo.dirstate.remove(f)
505 505 for f in merged:
506 506 repo.dirstate.merge(f)
507 507 p1, p2 = repo.dirstate.parents()
508 508 repo.dirstate.setparents(p1, merge)
509 509 files = patch.updatedir(self.ui, repo, files)
510 510 n = repo.commit(files, message, user, date, force=1)
511 511
512 512 if n == None:
513 513 raise util.Abort(_("repo commit failed"))
514 514
515 515 if update_status:
516 516 self.applied.append(statusentry(revlog.hex(n), patchname))
517 517
518 518 if patcherr:
519 519 if not patchfound:
520 520 self.ui.warn("patch %s is empty\n" % patchname)
521 521 err = 0
522 522 else:
523 523 self.ui.warn("patch failed, rejects left in working dir\n")
524 524 err = 1
525 525 break
526 526
527 527 if fuzz and strict:
528 528 self.ui.warn("fuzz found when applying patch, stopping\n")
529 529 err = 1
530 530 break
531 531 return (err, n)
532 532
533 533 def delete(self, repo, patches, opts):
534 534 if not patches and not opts.get('rev'):
535 535 raise util.Abort(_('qdelete requires at least one revision or '
536 536 'patch name'))
537 537
538 538 realpatches = []
539 539 for patch in patches:
540 540 patch = self.lookup(patch, strict=True)
541 541 info = self.isapplied(patch)
542 542 if info:
543 543 raise util.Abort(_("cannot delete applied patch %s") % patch)
544 544 if patch not in self.series:
545 545 raise util.Abort(_("patch %s not in series file") % patch)
546 546 realpatches.append(patch)
547 547
548 548 appliedbase = 0
549 549 if opts.get('rev'):
550 550 if not self.applied:
551 551 raise util.Abort(_('no patches applied'))
552 552 revs = cmdutil.revrange(repo, opts['rev'])
553 553 if len(revs) > 1 and revs[0] > revs[1]:
554 554 revs.reverse()
555 555 for rev in revs:
556 556 if appliedbase >= len(self.applied):
557 557 raise util.Abort(_("revision %d is not managed") % rev)
558 558
559 559 base = revlog.bin(self.applied[appliedbase].rev)
560 560 node = repo.changelog.node(rev)
561 561 if node != base:
562 562 raise util.Abort(_("cannot delete revision %d above "
563 563 "applied patches") % rev)
564 564 realpatches.append(self.applied[appliedbase].name)
565 565 appliedbase += 1
566 566
567 567 if not opts.get('keep'):
568 568 r = self.qrepo()
569 569 if r:
570 570 r.remove(realpatches, True)
571 571 else:
572 572 for p in realpatches:
573 573 os.unlink(self.join(p))
574 574
575 575 if appliedbase:
576 576 del self.applied[:appliedbase]
577 577 self.applied_dirty = 1
578 578 indices = [self.find_series(p) for p in realpatches]
579 579 indices.sort()
580 580 for i in indices[-1::-1]:
581 581 del self.full_series[i]
582 582 self.parse_series()
583 583 self.series_dirty = 1
584 584
585 585 def check_toppatch(self, repo):
586 586 if len(self.applied) > 0:
587 587 top = revlog.bin(self.applied[-1].rev)
588 588 pp = repo.dirstate.parents()
589 589 if top not in pp:
590 590 raise util.Abort(_("working directory revision is not qtip"))
591 591 return top
592 592 return None
593 593 def check_localchanges(self, repo, force=False, refresh=True):
594 594 m, a, r, d = repo.status()[:4]
595 595 if m or a or r or d:
596 596 if not force:
597 597 if refresh:
598 598 raise util.Abort(_("local changes found, refresh first"))
599 599 else:
600 600 raise util.Abort(_("local changes found"))
601 601 return m, a, r, d
602 602
603 603 def new(self, repo, patch, *pats, **opts):
604 604 msg = opts.get('msg')
605 605 force = opts.get('force')
606 user = opts.get('user')
606 607 if os.path.exists(self.join(patch)):
607 608 raise util.Abort(_('patch "%s" already exists') % patch)
608 609 if opts.get('include') or opts.get('exclude') or pats:
609 610 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
610 611 m, a, r, d = repo.status(files=fns, match=match)[:4]
611 612 else:
612 613 m, a, r, d = self.check_localchanges(repo, force)
613 614 fns, match, anypats = cmdutil.matchpats(repo, m + a + r)
614 615 commitfiles = m + a + r
615 616 self.check_toppatch(repo)
616 617 wlock = repo.wlock()
617 618 try:
618 619 insert = self.full_series_end()
619 620 commitmsg = msg and msg or ("[mq]: %s" % patch)
620 n = repo.commit(commitfiles, commitmsg, match=match, force=True)
621 n = repo.commit(commitfiles, commitmsg, user, match=match, force=True)
621 622 if n == None:
622 623 raise util.Abort(_("repo commit failed"))
623 624 self.full_series[insert:insert] = [patch]
624 625 self.applied.append(statusentry(revlog.hex(n), patch))
625 626 self.parse_series()
626 627 self.series_dirty = 1
627 628 self.applied_dirty = 1
628 629 p = self.opener(patch, "w")
630 if user:
631 p.write("From: " + user + "\n\n")
629 632 if msg:
630 633 msg = msg + "\n"
631 634 p.write(msg)
632 635 p.close()
633 636 wlock = None
634 637 r = self.qrepo()
635 638 if r: r.add([patch])
636 639 if commitfiles:
637 640 self.refresh(repo, short=True, git=opts.get('git'))
638 641 self.removeundo(repo)
639 642 finally:
640 643 del wlock
641 644
642 645 def strip(self, repo, rev, update=True, backup="all"):
643 646 wlock = lock = None
644 647 try:
645 648 wlock = repo.wlock()
646 649 lock = repo.lock()
647 650
648 651 if update:
649 652 self.check_localchanges(repo, refresh=False)
650 653 urev = self.qparents(repo, rev)
651 654 hg.clean(repo, urev)
652 655 repo.dirstate.write()
653 656
654 657 self.removeundo(repo)
655 658 repair.strip(self.ui, repo, rev, backup)
656 659 # strip may have unbundled a set of backed up revisions after
657 660 # the actual strip
658 661 self.removeundo(repo)
659 662 finally:
660 663 del lock, wlock
661 664
662 665 def isapplied(self, patch):
663 666 """returns (index, rev, patch)"""
664 667 for i in xrange(len(self.applied)):
665 668 a = self.applied[i]
666 669 if a.name == patch:
667 670 return (i, a.rev, a.name)
668 671 return None
669 672
670 673 # if the exact patch name does not exist, we try a few
671 674 # variations. If strict is passed, we try only #1
672 675 #
673 676 # 1) a number to indicate an offset in the series file
674 677 # 2) a unique substring of the patch name was given
675 678 # 3) patchname[-+]num to indicate an offset in the series file
676 679 def lookup(self, patch, strict=False):
677 680 patch = patch and str(patch)
678 681
679 682 def partial_name(s):
680 683 if s in self.series:
681 684 return s
682 685 matches = [x for x in self.series if s in x]
683 686 if len(matches) > 1:
684 687 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
685 688 for m in matches:
686 689 self.ui.warn(' %s\n' % m)
687 690 return None
688 691 if matches:
689 692 return matches[0]
690 693 if len(self.series) > 0 and len(self.applied) > 0:
691 694 if s == 'qtip':
692 695 return self.series[self.series_end(True)-1]
693 696 if s == 'qbase':
694 697 return self.series[0]
695 698 return None
696 699 if patch == None:
697 700 return None
698 701
699 702 # we don't want to return a partial match until we make
700 703 # sure the file name passed in does not exist (checked below)
701 704 res = partial_name(patch)
702 705 if res and res == patch:
703 706 return res
704 707
705 708 if not os.path.isfile(self.join(patch)):
706 709 try:
707 710 sno = int(patch)
708 711 except(ValueError, OverflowError):
709 712 pass
710 713 else:
711 714 if sno < len(self.series):
712 715 return self.series[sno]
713 716 if not strict:
714 717 # return any partial match made above
715 718 if res:
716 719 return res
717 720 minus = patch.rfind('-')
718 721 if minus >= 0:
719 722 res = partial_name(patch[:minus])
720 723 if res:
721 724 i = self.series.index(res)
722 725 try:
723 726 off = int(patch[minus+1:] or 1)
724 727 except(ValueError, OverflowError):
725 728 pass
726 729 else:
727 730 if i - off >= 0:
728 731 return self.series[i - off]
729 732 plus = patch.rfind('+')
730 733 if plus >= 0:
731 734 res = partial_name(patch[:plus])
732 735 if res:
733 736 i = self.series.index(res)
734 737 try:
735 738 off = int(patch[plus+1:] or 1)
736 739 except(ValueError, OverflowError):
737 740 pass
738 741 else:
739 742 if i + off < len(self.series):
740 743 return self.series[i + off]
741 744 raise util.Abort(_("patch %s not in series") % patch)
742 745
743 746 def push(self, repo, patch=None, force=False, list=False,
744 747 mergeq=None):
745 748 wlock = repo.wlock()
746 749 try:
747 750 patch = self.lookup(patch)
748 751 # Suppose our series file is: A B C and the current 'top'
749 752 # patch is B. qpush C should be performed (moving forward)
750 753 # qpush B is a NOP (no change) qpush A is an error (can't
751 754 # go backwards with qpush)
752 755 if patch:
753 756 info = self.isapplied(patch)
754 757 if info:
755 758 if info[0] < len(self.applied) - 1:
756 759 raise util.Abort(
757 760 _("cannot push to a previous patch: %s") % patch)
758 761 if info[0] < len(self.series) - 1:
759 762 self.ui.warn(
760 763 _('qpush: %s is already at the top\n') % patch)
761 764 else:
762 765 self.ui.warn(_('all patches are currently applied\n'))
763 766 return
764 767
765 768 # Following the above example, starting at 'top' of B:
766 769 # qpush should be performed (pushes C), but a subsequent
767 770 # qpush without an argument is an error (nothing to
768 771 # apply). This allows a loop of "...while hg qpush..." to
769 772 # work as it detects an error when done
770 773 if self.series_end() == len(self.series):
771 774 self.ui.warn(_('patch series already fully applied\n'))
772 775 return 1
773 776 if not force:
774 777 self.check_localchanges(repo)
775 778
776 779 self.applied_dirty = 1;
777 780 start = self.series_end()
778 781 if start > 0:
779 782 self.check_toppatch(repo)
780 783 if not patch:
781 784 patch = self.series[start]
782 785 end = start + 1
783 786 else:
784 787 end = self.series.index(patch, start) + 1
785 788 s = self.series[start:end]
786 789 all_files = {}
787 790 try:
788 791 if mergeq:
789 792 ret = self.mergepatch(repo, mergeq, s)
790 793 else:
791 794 ret = self.apply(repo, s, list, all_files=all_files)
792 795 except:
793 796 self.ui.warn(_('cleaning up working directory...'))
794 797 node = repo.dirstate.parents()[0]
795 798 hg.revert(repo, node, None)
796 799 unknown = repo.status()[4]
797 800 # only remove unknown files that we know we touched or
798 801 # created while patching
799 802 for f in unknown:
800 803 if f in all_files:
801 804 util.unlink(repo.wjoin(f))
802 805 self.ui.warn(_('done\n'))
803 806 raise
804 807 top = self.applied[-1].name
805 808 if ret[0]:
806 809 self.ui.write(
807 810 "Errors during apply, please fix and refresh %s\n" % top)
808 811 else:
809 812 self.ui.write("Now at: %s\n" % top)
810 813 return ret[0]
811 814 finally:
812 815 del wlock
813 816
814 817 def pop(self, repo, patch=None, force=False, update=True, all=False):
815 818 def getfile(f, rev, flags):
816 819 t = repo.file(f).read(rev)
817 820 repo.wwrite(f, t, flags)
818 821
819 822 wlock = repo.wlock()
820 823 try:
821 824 if patch:
822 825 # index, rev, patch
823 826 info = self.isapplied(patch)
824 827 if not info:
825 828 patch = self.lookup(patch)
826 829 info = self.isapplied(patch)
827 830 if not info:
828 831 raise util.Abort(_("patch %s is not applied") % patch)
829 832
830 833 if len(self.applied) == 0:
831 834 # Allow qpop -a to work repeatedly,
832 835 # but not qpop without an argument
833 836 self.ui.warn(_("no patches applied\n"))
834 837 return not all
835 838
836 839 if not update:
837 840 parents = repo.dirstate.parents()
838 841 rr = [ revlog.bin(x.rev) for x in self.applied ]
839 842 for p in parents:
840 843 if p in rr:
841 844 self.ui.warn("qpop: forcing dirstate update\n")
842 845 update = True
843 846
844 847 if not force and update:
845 848 self.check_localchanges(repo)
846 849
847 850 self.applied_dirty = 1;
848 851 end = len(self.applied)
849 852 if not patch:
850 853 if all:
851 854 popi = 0
852 855 else:
853 856 popi = len(self.applied) - 1
854 857 else:
855 858 popi = info[0] + 1
856 859 if popi >= end:
857 860 self.ui.warn("qpop: %s is already at the top\n" % patch)
858 861 return
859 862 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
860 863
861 864 start = info[0]
862 865 rev = revlog.bin(info[1])
863 866
864 867 # we know there are no local changes, so we can make a simplified
865 868 # form of hg.update.
866 869 if update:
867 870 top = self.check_toppatch(repo)
868 871 qp = self.qparents(repo, rev)
869 872 changes = repo.changelog.read(qp)
870 873 mmap = repo.manifest.read(changes[0])
871 874 m, a, r, d, u = repo.status(qp, top)[:5]
872 875 if d:
873 876 raise util.Abort("deletions found between repo revs")
874 877 for f in m:
875 878 getfile(f, mmap[f], mmap.flags(f))
876 879 for f in r:
877 880 getfile(f, mmap[f], mmap.flags(f))
878 881 for f in m + r:
879 882 repo.dirstate.normal(f)
880 883 for f in a:
881 884 try:
882 885 os.unlink(repo.wjoin(f))
883 886 except OSError, e:
884 887 if e.errno != errno.ENOENT:
885 888 raise
886 889 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
887 890 except: pass
888 891 repo.dirstate.forget(f)
889 892 repo.dirstate.setparents(qp, revlog.nullid)
890 893 self.strip(repo, rev, update=False, backup='strip')
891 894 del self.applied[start:end]
892 895 if len(self.applied):
893 896 self.ui.write("Now at: %s\n" % self.applied[-1].name)
894 897 else:
895 898 self.ui.write("Patch queue now empty\n")
896 899 finally:
897 900 del wlock
898 901
899 902 def diff(self, repo, pats, opts):
900 903 top = self.check_toppatch(repo)
901 904 if not top:
902 905 self.ui.write("No patches applied\n")
903 906 return
904 907 qp = self.qparents(repo, top)
905 908 if opts.get('git'):
906 909 self.diffopts().git = True
907 910 self.printdiff(repo, qp, files=pats, opts=opts)
908 911
909 912 def refresh(self, repo, pats=None, **opts):
910 913 if len(self.applied) == 0:
911 914 self.ui.write("No patches applied\n")
912 915 return 1
913 916 wlock = repo.wlock()
914 917 try:
915 918 self.check_toppatch(repo)
916 919 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
917 920 top = revlog.bin(top)
918 921 cparents = repo.changelog.parents(top)
919 922 patchparent = self.qparents(repo, top)
920 923 message, comments, user, date, patchfound = self.readheaders(patchfn)
921 924
922 925 patchf = self.opener(patchfn, 'r+')
923 926
924 927 # if the patch was a git patch, refresh it as a git patch
925 928 for line in patchf:
926 929 if line.startswith('diff --git'):
927 930 self.diffopts().git = True
928 931 break
929 932
930 933 msg = opts.get('msg', '').rstrip()
931 934 if msg and comments:
932 935 # Remove existing message, keeping the rest of the comments
933 936 # fields.
934 937 # If comments contains 'subject: ', message will prepend
935 938 # the field and a blank line.
936 939 if message:
937 940 subj = 'subject: ' + message[0].lower()
938 941 for i in xrange(len(comments)):
939 942 if subj == comments[i].lower():
940 943 del comments[i]
941 944 message = message[2:]
942 945 break
943 946 ci = 0
944 947 for mi in xrange(len(message)):
945 948 while message[mi] != comments[ci]:
946 949 ci += 1
947 950 del comments[ci]
951
952 newuser = opts.get('user')
953 if newuser:
954 # Update all references to a user in the patch header.
955 # If none found, add "From: " header.
956 needfrom = True
957 for prefix in ['# User ', 'From: ']:
958 for i in xrange(len(comments)):
959 if comments[i].startswith(prefix):
960 comments[i] = prefix + newuser
961 needfrom = False
962 break
963 if needfrom:
964 comments = ['From: ' + newuser, ''] + comments
965 user = newuser
966
948 967 if msg:
949 968 comments.append(msg)
950 969
951 970 patchf.seek(0)
952 971 patchf.truncate()
953 972
954 973 if comments:
955 974 comments = "\n".join(comments) + '\n\n'
956 975 patchf.write(comments)
957 976
958 977 if opts.get('git'):
959 978 self.diffopts().git = True
960 979 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
961 980 tip = repo.changelog.tip()
962 981 if top == tip:
963 982 # if the top of our patch queue is also the tip, there is an
964 983 # optimization here. We update the dirstate in place and strip
965 984 # off the tip commit. Then just commit the current directory
966 985 # tree. We can also send repo.commit the list of files
967 986 # changed to speed up the diff
968 987 #
969 988 # in short mode, we only diff the files included in the
970 989 # patch already
971 990 #
972 991 # this should really read:
973 992 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
974 993 # but we do it backwards to take advantage of manifest/chlog
975 994 # caching against the next repo.status call
976 995 #
977 996 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
978 997 changes = repo.changelog.read(tip)
979 998 man = repo.manifest.read(changes[0])
980 999 aaa = aa[:]
981 1000 if opts.get('short'):
982 1001 filelist = mm + aa + dd
983 1002 match = dict.fromkeys(filelist).__contains__
984 1003 else:
985 1004 filelist = None
986 1005 match = util.always
987 1006 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
988 1007
989 1008 # we might end up with files that were added between
990 1009 # tip and the dirstate parent, but then changed in the
991 1010 # local dirstate. in this case, we want them to only
992 1011 # show up in the added section
993 1012 for x in m:
994 1013 if x not in aa:
995 1014 mm.append(x)
996 1015 # we might end up with files added by the local dirstate that
997 1016 # were deleted by the patch. In this case, they should only
998 1017 # show up in the changed section.
999 1018 for x in a:
1000 1019 if x in dd:
1001 1020 del dd[dd.index(x)]
1002 1021 mm.append(x)
1003 1022 else:
1004 1023 aa.append(x)
1005 1024 # make sure any files deleted in the local dirstate
1006 1025 # are not in the add or change column of the patch
1007 1026 forget = []
1008 1027 for x in d + r:
1009 1028 if x in aa:
1010 1029 del aa[aa.index(x)]
1011 1030 forget.append(x)
1012 1031 continue
1013 1032 elif x in mm:
1014 1033 del mm[mm.index(x)]
1015 1034 dd.append(x)
1016 1035
1017 1036 m = util.unique(mm)
1018 1037 r = util.unique(dd)
1019 1038 a = util.unique(aa)
1020 1039 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1021 1040 filelist = util.unique(c[0] + c[1] + c[2])
1022 1041 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1023 1042 fp=patchf, changes=c, opts=self.diffopts())
1024 1043 patchf.close()
1025 1044
1026 1045 repo.dirstate.setparents(*cparents)
1027 1046 copies = {}
1028 1047 for dst in a:
1029 1048 src = repo.dirstate.copied(dst)
1030 1049 if src is not None:
1031 1050 copies.setdefault(src, []).append(dst)
1032 1051 repo.dirstate.add(dst)
1033 1052 # remember the copies between patchparent and tip
1034 1053 # this may be slow, so don't do it if we're not tracking copies
1035 1054 if self.diffopts().git:
1036 1055 for dst in aaa:
1037 1056 f = repo.file(dst)
1038 1057 src = f.renamed(man[dst])
1039 1058 if src:
1040 1059 copies[src[0]] = copies.get(dst, [])
1041 1060 if dst in a:
1042 1061 copies[src[0]].append(dst)
1043 1062 # we can't copy a file created by the patch itself
1044 1063 if dst in copies:
1045 1064 del copies[dst]
1046 1065 for src, dsts in copies.iteritems():
1047 1066 for dst in dsts:
1048 1067 repo.dirstate.copy(src, dst)
1049 1068 for f in r:
1050 1069 repo.dirstate.remove(f)
1051 1070 # if the patch excludes a modified file, mark that
1052 1071 # file with mtime=0 so status can see it.
1053 1072 mm = []
1054 1073 for i in xrange(len(m)-1, -1, -1):
1055 1074 if not matchfn(m[i]):
1056 1075 mm.append(m[i])
1057 1076 del m[i]
1058 1077 for f in m:
1059 1078 repo.dirstate.normal(f)
1060 1079 for f in mm:
1061 1080 repo.dirstate.normallookup(f)
1062 1081 for f in forget:
1063 1082 repo.dirstate.forget(f)
1064 1083
1065 1084 if not msg:
1066 1085 if not message:
1067 1086 message = "[mq]: %s\n" % patchfn
1068 1087 else:
1069 1088 message = "\n".join(message)
1070 1089 else:
1071 1090 message = msg
1072 1091
1092 if not user:
1093 user = changes[1]
1094
1073 1095 self.strip(repo, top, update=False,
1074 1096 backup='strip')
1075 n = repo.commit(filelist, message, changes[1], match=matchfn,
1097 n = repo.commit(filelist, message, user, match=matchfn,
1076 1098 force=1)
1077 1099 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1078 1100 self.applied_dirty = 1
1079 1101 self.removeundo(repo)
1080 1102 else:
1081 1103 self.printdiff(repo, patchparent, fp=patchf)
1082 1104 patchf.close()
1083 1105 added = repo.status()[1]
1084 1106 for a in added:
1085 1107 f = repo.wjoin(a)
1086 1108 try:
1087 1109 os.unlink(f)
1088 1110 except OSError, e:
1089 1111 if e.errno != errno.ENOENT:
1090 1112 raise
1091 1113 try: os.removedirs(os.path.dirname(f))
1092 1114 except: pass
1093 1115 # forget the file copies in the dirstate
1094 1116 # push should readd the files later on
1095 1117 repo.dirstate.forget(a)
1096 1118 self.pop(repo, force=True)
1097 1119 self.push(repo, force=True)
1098 1120 finally:
1099 1121 del wlock
1100 1122
1101 1123 def init(self, repo, create=False):
1102 1124 if not create and os.path.isdir(self.path):
1103 1125 raise util.Abort(_("patch queue directory already exists"))
1104 1126 try:
1105 1127 os.mkdir(self.path)
1106 1128 except OSError, inst:
1107 1129 if inst.errno != errno.EEXIST or not create:
1108 1130 raise
1109 1131 if create:
1110 1132 return self.qrepo(create=True)
1111 1133
1112 1134 def unapplied(self, repo, patch=None):
1113 1135 if patch and patch not in self.series:
1114 1136 raise util.Abort(_("patch %s is not in series file") % patch)
1115 1137 if not patch:
1116 1138 start = self.series_end()
1117 1139 else:
1118 1140 start = self.series.index(patch) + 1
1119 1141 unapplied = []
1120 1142 for i in xrange(start, len(self.series)):
1121 1143 pushable, reason = self.pushable(i)
1122 1144 if pushable:
1123 1145 unapplied.append((i, self.series[i]))
1124 1146 self.explain_pushable(i)
1125 1147 return unapplied
1126 1148
1127 1149 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1128 1150 summary=False):
1129 1151 def displayname(patchname):
1130 1152 if summary:
1131 1153 msg = self.readheaders(patchname)[0]
1132 1154 msg = msg and ': ' + msg[0] or ': '
1133 1155 else:
1134 1156 msg = ''
1135 1157 return '%s%s' % (patchname, msg)
1136 1158
1137 1159 applied = dict.fromkeys([p.name for p in self.applied])
1138 1160 if length is None:
1139 1161 length = len(self.series) - start
1140 1162 if not missing:
1141 1163 for i in xrange(start, start+length):
1142 1164 patch = self.series[i]
1143 1165 if patch in applied:
1144 1166 stat = 'A'
1145 1167 elif self.pushable(i)[0]:
1146 1168 stat = 'U'
1147 1169 else:
1148 1170 stat = 'G'
1149 1171 pfx = ''
1150 1172 if self.ui.verbose:
1151 1173 pfx = '%d %s ' % (i, stat)
1152 1174 elif status and status != stat:
1153 1175 continue
1154 1176 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1155 1177 else:
1156 1178 msng_list = []
1157 1179 for root, dirs, files in os.walk(self.path):
1158 1180 d = root[len(self.path) + 1:]
1159 1181 for f in files:
1160 1182 fl = os.path.join(d, f)
1161 1183 if (fl not in self.series and
1162 1184 fl not in (self.status_path, self.series_path,
1163 1185 self.guards_path)
1164 1186 and not fl.startswith('.')):
1165 1187 msng_list.append(fl)
1166 1188 msng_list.sort()
1167 1189 for x in msng_list:
1168 1190 pfx = self.ui.verbose and ('D ') or ''
1169 1191 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1170 1192
1171 1193 def issaveline(self, l):
1172 1194 if l.name == '.hg.patches.save.line':
1173 1195 return True
1174 1196
1175 1197 def qrepo(self, create=False):
1176 1198 if create or os.path.isdir(self.join(".hg")):
1177 1199 return hg.repository(self.ui, path=self.path, create=create)
1178 1200
1179 1201 def restore(self, repo, rev, delete=None, qupdate=None):
1180 1202 c = repo.changelog.read(rev)
1181 1203 desc = c[4].strip()
1182 1204 lines = desc.splitlines()
1183 1205 i = 0
1184 1206 datastart = None
1185 1207 series = []
1186 1208 applied = []
1187 1209 qpp = None
1188 1210 for i in xrange(0, len(lines)):
1189 1211 if lines[i] == 'Patch Data:':
1190 1212 datastart = i + 1
1191 1213 elif lines[i].startswith('Dirstate:'):
1192 1214 l = lines[i].rstrip()
1193 1215 l = l[10:].split(' ')
1194 1216 qpp = [ hg.bin(x) for x in l ]
1195 1217 elif datastart != None:
1196 1218 l = lines[i].rstrip()
1197 1219 se = statusentry(l)
1198 1220 file_ = se.name
1199 1221 if se.rev:
1200 1222 applied.append(se)
1201 1223 else:
1202 1224 series.append(file_)
1203 1225 if datastart == None:
1204 1226 self.ui.warn("No saved patch data found\n")
1205 1227 return 1
1206 1228 self.ui.warn("restoring status: %s\n" % lines[0])
1207 1229 self.full_series = series
1208 1230 self.applied = applied
1209 1231 self.parse_series()
1210 1232 self.series_dirty = 1
1211 1233 self.applied_dirty = 1
1212 1234 heads = repo.changelog.heads()
1213 1235 if delete:
1214 1236 if rev not in heads:
1215 1237 self.ui.warn("save entry has children, leaving it alone\n")
1216 1238 else:
1217 1239 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1218 1240 pp = repo.dirstate.parents()
1219 1241 if rev in pp:
1220 1242 update = True
1221 1243 else:
1222 1244 update = False
1223 1245 self.strip(repo, rev, update=update, backup='strip')
1224 1246 if qpp:
1225 1247 self.ui.warn("saved queue repository parents: %s %s\n" %
1226 1248 (hg.short(qpp[0]), hg.short(qpp[1])))
1227 1249 if qupdate:
1228 1250 print "queue directory updating"
1229 1251 r = self.qrepo()
1230 1252 if not r:
1231 1253 self.ui.warn("Unable to load queue repository\n")
1232 1254 return 1
1233 1255 hg.clean(r, qpp[0])
1234 1256
1235 1257 def save(self, repo, msg=None):
1236 1258 if len(self.applied) == 0:
1237 1259 self.ui.warn("save: no patches applied, exiting\n")
1238 1260 return 1
1239 1261 if self.issaveline(self.applied[-1]):
1240 1262 self.ui.warn("status is already saved\n")
1241 1263 return 1
1242 1264
1243 1265 ar = [ ':' + x for x in self.full_series ]
1244 1266 if not msg:
1245 1267 msg = "hg patches saved state"
1246 1268 else:
1247 1269 msg = "hg patches: " + msg.rstrip('\r\n')
1248 1270 r = self.qrepo()
1249 1271 if r:
1250 1272 pp = r.dirstate.parents()
1251 1273 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1252 1274 msg += "\n\nPatch Data:\n"
1253 1275 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1254 1276 "\n".join(ar) + '\n' or "")
1255 1277 n = repo.commit(None, text, user=None, force=1)
1256 1278 if not n:
1257 1279 self.ui.warn("repo commit failed\n")
1258 1280 return 1
1259 1281 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1260 1282 self.applied_dirty = 1
1261 1283 self.removeundo(repo)
1262 1284
1263 1285 def full_series_end(self):
1264 1286 if len(self.applied) > 0:
1265 1287 p = self.applied[-1].name
1266 1288 end = self.find_series(p)
1267 1289 if end == None:
1268 1290 return len(self.full_series)
1269 1291 return end + 1
1270 1292 return 0
1271 1293
1272 1294 def series_end(self, all_patches=False):
1273 1295 """If all_patches is False, return the index of the next pushable patch
1274 1296 in the series, or the series length. If all_patches is True, return the
1275 1297 index of the first patch past the last applied one.
1276 1298 """
1277 1299 end = 0
1278 1300 def next(start):
1279 1301 if all_patches:
1280 1302 return start
1281 1303 i = start
1282 1304 while i < len(self.series):
1283 1305 p, reason = self.pushable(i)
1284 1306 if p:
1285 1307 break
1286 1308 self.explain_pushable(i)
1287 1309 i += 1
1288 1310 return i
1289 1311 if len(self.applied) > 0:
1290 1312 p = self.applied[-1].name
1291 1313 try:
1292 1314 end = self.series.index(p)
1293 1315 except ValueError:
1294 1316 return 0
1295 1317 return next(end + 1)
1296 1318 return next(end)
1297 1319
1298 1320 def appliedname(self, index):
1299 1321 pname = self.applied[index].name
1300 1322 if not self.ui.verbose:
1301 1323 p = pname
1302 1324 else:
1303 1325 p = str(self.series.index(pname)) + " " + pname
1304 1326 return p
1305 1327
1306 1328 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1307 1329 force=None, git=False):
1308 1330 def checkseries(patchname):
1309 1331 if patchname in self.series:
1310 1332 raise util.Abort(_('patch %s is already in the series file')
1311 1333 % patchname)
1312 1334 def checkfile(patchname):
1313 1335 if not force and os.path.exists(self.join(patchname)):
1314 1336 raise util.Abort(_('patch "%s" already exists')
1315 1337 % patchname)
1316 1338
1317 1339 if rev:
1318 1340 if files:
1319 1341 raise util.Abort(_('option "-r" not valid when importing '
1320 1342 'files'))
1321 1343 rev = cmdutil.revrange(repo, rev)
1322 1344 rev.sort(lambda x, y: cmp(y, x))
1323 1345 if (len(files) > 1 or len(rev) > 1) and patchname:
1324 1346 raise util.Abort(_('option "-n" not valid when importing multiple '
1325 1347 'patches'))
1326 1348 i = 0
1327 1349 added = []
1328 1350 if rev:
1329 1351 # If mq patches are applied, we can only import revisions
1330 1352 # that form a linear path to qbase.
1331 1353 # Otherwise, they should form a linear path to a head.
1332 1354 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1333 1355 if len(heads) > 1:
1334 1356 raise util.Abort(_('revision %d is the root of more than one '
1335 1357 'branch') % rev[-1])
1336 1358 if self.applied:
1337 1359 base = revlog.hex(repo.changelog.node(rev[0]))
1338 1360 if base in [n.rev for n in self.applied]:
1339 1361 raise util.Abort(_('revision %d is already managed')
1340 1362 % rev[0])
1341 1363 if heads != [revlog.bin(self.applied[-1].rev)]:
1342 1364 raise util.Abort(_('revision %d is not the parent of '
1343 1365 'the queue') % rev[0])
1344 1366 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1345 1367 lastparent = repo.changelog.parentrevs(base)[0]
1346 1368 else:
1347 1369 if heads != [repo.changelog.node(rev[0])]:
1348 1370 raise util.Abort(_('revision %d has unmanaged children')
1349 1371 % rev[0])
1350 1372 lastparent = None
1351 1373
1352 1374 if git:
1353 1375 self.diffopts().git = True
1354 1376
1355 1377 for r in rev:
1356 1378 p1, p2 = repo.changelog.parentrevs(r)
1357 1379 n = repo.changelog.node(r)
1358 1380 if p2 != revlog.nullrev:
1359 1381 raise util.Abort(_('cannot import merge revision %d') % r)
1360 1382 if lastparent and lastparent != r:
1361 1383 raise util.Abort(_('revision %d is not the parent of %d')
1362 1384 % (r, lastparent))
1363 1385 lastparent = p1
1364 1386
1365 1387 if not patchname:
1366 1388 patchname = normname('%d.diff' % r)
1367 1389 checkseries(patchname)
1368 1390 checkfile(patchname)
1369 1391 self.full_series.insert(0, patchname)
1370 1392
1371 1393 patchf = self.opener(patchname, "w")
1372 1394 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1373 1395 patchf.close()
1374 1396
1375 1397 se = statusentry(revlog.hex(n), patchname)
1376 1398 self.applied.insert(0, se)
1377 1399
1378 1400 added.append(patchname)
1379 1401 patchname = None
1380 1402 self.parse_series()
1381 1403 self.applied_dirty = 1
1382 1404
1383 1405 for filename in files:
1384 1406 if existing:
1385 1407 if filename == '-':
1386 1408 raise util.Abort(_('-e is incompatible with import from -'))
1387 1409 if not patchname:
1388 1410 patchname = normname(filename)
1389 1411 if not os.path.isfile(self.join(patchname)):
1390 1412 raise util.Abort(_("patch %s does not exist") % patchname)
1391 1413 else:
1392 1414 try:
1393 1415 if filename == '-':
1394 1416 if not patchname:
1395 1417 raise util.Abort(_('need --name to import a patch from -'))
1396 1418 text = sys.stdin.read()
1397 1419 else:
1398 1420 text = file(filename).read()
1399 1421 except IOError:
1400 1422 raise util.Abort(_("unable to read %s") % patchname)
1401 1423 if not patchname:
1402 1424 patchname = normname(os.path.basename(filename))
1403 1425 checkfile(patchname)
1404 1426 patchf = self.opener(patchname, "w")
1405 1427 patchf.write(text)
1406 1428 checkseries(patchname)
1407 1429 index = self.full_series_end() + i
1408 1430 self.full_series[index:index] = [patchname]
1409 1431 self.parse_series()
1410 1432 self.ui.warn("adding %s to series file\n" % patchname)
1411 1433 i += 1
1412 1434 added.append(patchname)
1413 1435 patchname = None
1414 1436 self.series_dirty = 1
1415 1437 qrepo = self.qrepo()
1416 1438 if qrepo:
1417 1439 qrepo.add(added)
1418 1440
1419 1441 def delete(ui, repo, *patches, **opts):
1420 1442 """remove patches from queue
1421 1443
1422 1444 The patches must not be applied, unless they are arguments to
1423 1445 the --rev parameter. At least one patch or revision is required.
1424 1446
1425 1447 With --rev, mq will stop managing the named revisions (converting
1426 1448 them to regular mercurial changesets). The patches must be applied
1427 1449 and at the base of the stack. This option is useful when the patches
1428 1450 have been applied upstream.
1429 1451
1430 1452 With --keep, the patch files are preserved in the patch directory."""
1431 1453 q = repo.mq
1432 1454 q.delete(repo, patches, opts)
1433 1455 q.save_dirty()
1434 1456 return 0
1435 1457
1436 1458 def applied(ui, repo, patch=None, **opts):
1437 1459 """print the patches already applied"""
1438 1460 q = repo.mq
1439 1461 if patch:
1440 1462 if patch not in q.series:
1441 1463 raise util.Abort(_("patch %s is not in series file") % patch)
1442 1464 end = q.series.index(patch) + 1
1443 1465 else:
1444 1466 end = q.series_end(True)
1445 1467 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1446 1468
1447 1469 def unapplied(ui, repo, patch=None, **opts):
1448 1470 """print the patches not yet applied"""
1449 1471 q = repo.mq
1450 1472 if patch:
1451 1473 if patch not in q.series:
1452 1474 raise util.Abort(_("patch %s is not in series file") % patch)
1453 1475 start = q.series.index(patch) + 1
1454 1476 else:
1455 1477 start = q.series_end(True)
1456 1478 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1457 1479
1458 1480 def qimport(ui, repo, *filename, **opts):
1459 1481 """import a patch
1460 1482
1461 1483 The patch will have the same name as its source file unless you
1462 1484 give it a new one with --name.
1463 1485
1464 1486 You can register an existing patch inside the patch directory
1465 1487 with the --existing flag.
1466 1488
1467 1489 With --force, an existing patch of the same name will be overwritten.
1468 1490
1469 1491 An existing changeset may be placed under mq control with --rev
1470 1492 (e.g. qimport --rev tip -n patch will place tip under mq control).
1471 1493 With --git, patches imported with --rev will use the git diff
1472 1494 format.
1473 1495 """
1474 1496 q = repo.mq
1475 1497 q.qimport(repo, filename, patchname=opts['name'],
1476 1498 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1477 1499 git=opts['git'])
1478 1500 q.save_dirty()
1479 1501 return 0
1480 1502
1481 1503 def init(ui, repo, **opts):
1482 1504 """init a new queue repository
1483 1505
1484 1506 The queue repository is unversioned by default. If -c is
1485 1507 specified, qinit will create a separate nested repository
1486 1508 for patches (qinit -c may also be run later to convert
1487 1509 an unversioned patch repository into a versioned one).
1488 1510 You can use qcommit to commit changes to this queue repository."""
1489 1511 q = repo.mq
1490 1512 r = q.init(repo, create=opts['create_repo'])
1491 1513 q.save_dirty()
1492 1514 if r:
1493 1515 if not os.path.exists(r.wjoin('.hgignore')):
1494 1516 fp = r.wopener('.hgignore', 'w')
1495 1517 fp.write('syntax: glob\n')
1496 1518 fp.write('status\n')
1497 1519 fp.write('guards\n')
1498 1520 fp.close()
1499 1521 if not os.path.exists(r.wjoin('series')):
1500 1522 r.wopener('series', 'w').close()
1501 1523 r.add(['.hgignore', 'series'])
1502 1524 commands.add(ui, r)
1503 1525 return 0
1504 1526
1505 1527 def clone(ui, source, dest=None, **opts):
1506 1528 '''clone main and patch repository at same time
1507 1529
1508 1530 If source is local, destination will have no patches applied. If
1509 1531 source is remote, this command can not check if patches are
1510 1532 applied in source, so cannot guarantee that patches are not
1511 1533 applied in destination. If you clone remote repository, be sure
1512 1534 before that it has no patches applied.
1513 1535
1514 1536 Source patch repository is looked for in <src>/.hg/patches by
1515 1537 default. Use -p <url> to change.
1516 1538
1517 1539 The patch directory must be a nested mercurial repository, as
1518 1540 would be created by qinit -c.
1519 1541 '''
1520 1542 def patchdir(repo):
1521 1543 url = repo.url()
1522 1544 if url.endswith('/'):
1523 1545 url = url[:-1]
1524 1546 return url + '/.hg/patches'
1525 1547 cmdutil.setremoteconfig(ui, opts)
1526 1548 if dest is None:
1527 1549 dest = hg.defaultdest(source)
1528 1550 sr = hg.repository(ui, ui.expandpath(source))
1529 1551 patchespath = opts['patches'] or patchdir(sr)
1530 1552 try:
1531 1553 pr = hg.repository(ui, patchespath)
1532 1554 except hg.RepoError:
1533 1555 raise util.Abort(_('versioned patch repository not found'
1534 1556 ' (see qinit -c)'))
1535 1557 qbase, destrev = None, None
1536 1558 if sr.local():
1537 1559 if sr.mq.applied:
1538 1560 qbase = revlog.bin(sr.mq.applied[0].rev)
1539 1561 if not hg.islocal(dest):
1540 1562 heads = dict.fromkeys(sr.heads())
1541 1563 for h in sr.heads(qbase):
1542 1564 del heads[h]
1543 1565 destrev = heads.keys()
1544 1566 destrev.append(sr.changelog.parents(qbase)[0])
1545 1567 ui.note(_('cloning main repo\n'))
1546 1568 sr, dr = hg.clone(ui, sr.url(), dest,
1547 1569 pull=opts['pull'],
1548 1570 rev=destrev,
1549 1571 update=False,
1550 1572 stream=opts['uncompressed'])
1551 1573 ui.note(_('cloning patch repo\n'))
1552 1574 spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1553 1575 pull=opts['pull'], update=not opts['noupdate'],
1554 1576 stream=opts['uncompressed'])
1555 1577 if dr.local():
1556 1578 if qbase:
1557 1579 ui.note(_('stripping applied patches from destination repo\n'))
1558 1580 dr.mq.strip(dr, qbase, update=False, backup=None)
1559 1581 if not opts['noupdate']:
1560 1582 ui.note(_('updating destination repo\n'))
1561 1583 hg.update(dr, dr.changelog.tip())
1562 1584
1563 1585 def commit(ui, repo, *pats, **opts):
1564 1586 """commit changes in the queue repository"""
1565 1587 q = repo.mq
1566 1588 r = q.qrepo()
1567 1589 if not r: raise util.Abort('no queue repository')
1568 1590 commands.commit(r.ui, r, *pats, **opts)
1569 1591
1570 1592 def series(ui, repo, **opts):
1571 1593 """print the entire series file"""
1572 1594 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1573 1595 return 0
1574 1596
1575 1597 def top(ui, repo, **opts):
1576 1598 """print the name of the current patch"""
1577 1599 q = repo.mq
1578 1600 t = q.applied and q.series_end(True) or 0
1579 1601 if t:
1580 1602 return q.qseries(repo, start=t-1, length=1, status='A',
1581 1603 summary=opts.get('summary'))
1582 1604 else:
1583 1605 ui.write("No patches applied\n")
1584 1606 return 1
1585 1607
1586 1608 def next(ui, repo, **opts):
1587 1609 """print the name of the next patch"""
1588 1610 q = repo.mq
1589 1611 end = q.series_end()
1590 1612 if end == len(q.series):
1591 1613 ui.write("All patches applied\n")
1592 1614 return 1
1593 1615 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1594 1616
1595 1617 def prev(ui, repo, **opts):
1596 1618 """print the name of the previous patch"""
1597 1619 q = repo.mq
1598 1620 l = len(q.applied)
1599 1621 if l == 1:
1600 1622 ui.write("Only one patch applied\n")
1601 1623 return 1
1602 1624 if not l:
1603 1625 ui.write("No patches applied\n")
1604 1626 return 1
1605 1627 return q.qseries(repo, start=l-2, length=1, status='A',
1606 1628 summary=opts.get('summary'))
1607 1629
1630 def setupheaderopts(ui, opts):
1631 def do(opt,val):
1632 if not opts[opt] and opts['current' + opt]:
1633 opts[opt] = val
1634 do('user', ui.username())
1635
1608 1636 def new(ui, repo, patch, *args, **opts):
1609 1637 """create a new patch
1610 1638
1611 1639 qnew creates a new patch on top of the currently-applied patch
1612 1640 (if any). It will refuse to run if there are any outstanding
1613 1641 changes unless -f is specified, in which case the patch will
1614 1642 be initialised with them. You may also use -I, -X, and/or a list of
1615 1643 files after the patch name to add only changes to matching files
1616 1644 to the new patch, leaving the rest as uncommitted modifications.
1617 1645
1618 1646 -e, -m or -l set the patch header as well as the commit message.
1619 1647 If none is specified, the patch header is empty and the
1620 1648 commit message is '[mq]: PATCH'"""
1621 1649 q = repo.mq
1622 1650 message = cmdutil.logmessage(opts)
1623 1651 if opts['edit']:
1624 1652 message = ui.edit(message, ui.username())
1625 1653 opts['msg'] = message
1654 setupheaderopts(ui, opts)
1626 1655 q.new(repo, patch, *args, **opts)
1627 1656 q.save_dirty()
1628 1657 return 0
1629 1658
1630 1659 def refresh(ui, repo, *pats, **opts):
1631 1660 """update the current patch
1632 1661
1633 1662 If any file patterns are provided, the refreshed patch will contain only
1634 1663 the modifications that match those patterns; the remaining modifications
1635 1664 will remain in the working directory.
1636 1665
1637 1666 hg add/remove/copy/rename work as usual, though you might want to use
1638 1667 git-style patches (--git or [diff] git=1) to track copies and renames.
1639 1668 """
1640 1669 q = repo.mq
1641 1670 message = cmdutil.logmessage(opts)
1642 1671 if opts['edit']:
1643 1672 if not q.applied:
1644 1673 ui.write(_("No patches applied\n"))
1645 1674 return 1
1646 1675 if message:
1647 1676 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1648 1677 patch = q.applied[-1].name
1649 1678 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1650 1679 message = ui.edit('\n'.join(message), user or ui.username())
1680 setupheaderopts(ui, opts)
1651 1681 ret = q.refresh(repo, pats, msg=message, **opts)
1652 1682 q.save_dirty()
1653 1683 return ret
1654 1684
1655 1685 def diff(ui, repo, *pats, **opts):
1656 1686 """diff of the current patch"""
1657 1687 repo.mq.diff(repo, pats, opts)
1658 1688 return 0
1659 1689
1660 1690 def fold(ui, repo, *files, **opts):
1661 1691 """fold the named patches into the current patch
1662 1692
1663 1693 Patches must not yet be applied. Each patch will be successively
1664 1694 applied to the current patch in the order given. If all the
1665 1695 patches apply successfully, the current patch will be refreshed
1666 1696 with the new cumulative patch, and the folded patches will
1667 1697 be deleted. With -k/--keep, the folded patch files will not
1668 1698 be removed afterwards.
1669 1699
1670 1700 The header for each folded patch will be concatenated with
1671 1701 the current patch header, separated by a line of '* * *'."""
1672 1702
1673 1703 q = repo.mq
1674 1704
1675 1705 if not files:
1676 1706 raise util.Abort(_('qfold requires at least one patch name'))
1677 1707 if not q.check_toppatch(repo):
1678 1708 raise util.Abort(_('No patches applied'))
1679 1709
1680 1710 message = cmdutil.logmessage(opts)
1681 1711 if opts['edit']:
1682 1712 if message:
1683 1713 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1684 1714
1685 1715 parent = q.lookup('qtip')
1686 1716 patches = []
1687 1717 messages = []
1688 1718 for f in files:
1689 1719 p = q.lookup(f)
1690 1720 if p in patches or p == parent:
1691 1721 ui.warn(_('Skipping already folded patch %s') % p)
1692 1722 if q.isapplied(p):
1693 1723 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1694 1724 patches.append(p)
1695 1725
1696 1726 for p in patches:
1697 1727 if not message:
1698 1728 messages.append(q.readheaders(p)[0])
1699 1729 pf = q.join(p)
1700 1730 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1701 1731 if not patchsuccess:
1702 1732 raise util.Abort(_('Error folding patch %s') % p)
1703 1733 patch.updatedir(ui, repo, files)
1704 1734
1705 1735 if not message:
1706 1736 message, comments, user = q.readheaders(parent)[0:3]
1707 1737 for msg in messages:
1708 1738 message.append('* * *')
1709 1739 message.extend(msg)
1710 1740 message = '\n'.join(message)
1711 1741
1712 1742 if opts['edit']:
1713 1743 message = ui.edit(message, user or ui.username())
1714 1744
1715 1745 q.refresh(repo, msg=message)
1716 1746 q.delete(repo, patches, opts)
1717 1747 q.save_dirty()
1718 1748
1719 1749 def goto(ui, repo, patch, **opts):
1720 1750 '''push or pop patches until named patch is at top of stack'''
1721 1751 q = repo.mq
1722 1752 patch = q.lookup(patch)
1723 1753 if q.isapplied(patch):
1724 1754 ret = q.pop(repo, patch, force=opts['force'])
1725 1755 else:
1726 1756 ret = q.push(repo, patch, force=opts['force'])
1727 1757 q.save_dirty()
1728 1758 return ret
1729 1759
1730 1760 def guard(ui, repo, *args, **opts):
1731 1761 '''set or print guards for a patch
1732 1762
1733 1763 Guards control whether a patch can be pushed. A patch with no
1734 1764 guards is always pushed. A patch with a positive guard ("+foo") is
1735 1765 pushed only if the qselect command has activated it. A patch with
1736 1766 a negative guard ("-foo") is never pushed if the qselect command
1737 1767 has activated it.
1738 1768
1739 1769 With no arguments, print the currently active guards.
1740 1770 With arguments, set guards for the named patch.
1741 1771
1742 1772 To set a negative guard "-foo" on topmost patch ("--" is needed so
1743 1773 hg will not interpret "-foo" as an option):
1744 1774 hg qguard -- -foo
1745 1775
1746 1776 To set guards on another patch:
1747 1777 hg qguard other.patch +2.6.17 -stable
1748 1778 '''
1749 1779 def status(idx):
1750 1780 guards = q.series_guards[idx] or ['unguarded']
1751 1781 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1752 1782 q = repo.mq
1753 1783 patch = None
1754 1784 args = list(args)
1755 1785 if opts['list']:
1756 1786 if args or opts['none']:
1757 1787 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1758 1788 for i in xrange(len(q.series)):
1759 1789 status(i)
1760 1790 return
1761 1791 if not args or args[0][0:1] in '-+':
1762 1792 if not q.applied:
1763 1793 raise util.Abort(_('no patches applied'))
1764 1794 patch = q.applied[-1].name
1765 1795 if patch is None and args[0][0:1] not in '-+':
1766 1796 patch = args.pop(0)
1767 1797 if patch is None:
1768 1798 raise util.Abort(_('no patch to work with'))
1769 1799 if args or opts['none']:
1770 1800 idx = q.find_series(patch)
1771 1801 if idx is None:
1772 1802 raise util.Abort(_('no patch named %s') % patch)
1773 1803 q.set_guards(idx, args)
1774 1804 q.save_dirty()
1775 1805 else:
1776 1806 status(q.series.index(q.lookup(patch)))
1777 1807
1778 1808 def header(ui, repo, patch=None):
1779 1809 """Print the header of the topmost or specified patch"""
1780 1810 q = repo.mq
1781 1811
1782 1812 if patch:
1783 1813 patch = q.lookup(patch)
1784 1814 else:
1785 1815 if not q.applied:
1786 1816 ui.write('No patches applied\n')
1787 1817 return 1
1788 1818 patch = q.lookup('qtip')
1789 1819 message = repo.mq.readheaders(patch)[0]
1790 1820
1791 1821 ui.write('\n'.join(message) + '\n')
1792 1822
1793 1823 def lastsavename(path):
1794 1824 (directory, base) = os.path.split(path)
1795 1825 names = os.listdir(directory)
1796 1826 namere = re.compile("%s.([0-9]+)" % base)
1797 1827 maxindex = None
1798 1828 maxname = None
1799 1829 for f in names:
1800 1830 m = namere.match(f)
1801 1831 if m:
1802 1832 index = int(m.group(1))
1803 1833 if maxindex == None or index > maxindex:
1804 1834 maxindex = index
1805 1835 maxname = f
1806 1836 if maxname:
1807 1837 return (os.path.join(directory, maxname), maxindex)
1808 1838 return (None, None)
1809 1839
1810 1840 def savename(path):
1811 1841 (last, index) = lastsavename(path)
1812 1842 if last is None:
1813 1843 index = 0
1814 1844 newpath = path + ".%d" % (index + 1)
1815 1845 return newpath
1816 1846
1817 1847 def push(ui, repo, patch=None, **opts):
1818 1848 """push the next patch onto the stack"""
1819 1849 q = repo.mq
1820 1850 mergeq = None
1821 1851
1822 1852 if opts['all']:
1823 1853 if not q.series:
1824 1854 ui.warn(_('no patches in series\n'))
1825 1855 return 0
1826 1856 patch = q.series[-1]
1827 1857 if opts['merge']:
1828 1858 if opts['name']:
1829 1859 newpath = opts['name']
1830 1860 else:
1831 1861 newpath, i = lastsavename(q.path)
1832 1862 if not newpath:
1833 1863 ui.warn("no saved queues found, please use -n\n")
1834 1864 return 1
1835 1865 mergeq = queue(ui, repo.join(""), newpath)
1836 1866 ui.warn("merging with queue at: %s\n" % mergeq.path)
1837 1867 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1838 1868 mergeq=mergeq)
1839 1869 return ret
1840 1870
1841 1871 def pop(ui, repo, patch=None, **opts):
1842 1872 """pop the current patch off the stack"""
1843 1873 localupdate = True
1844 1874 if opts['name']:
1845 1875 q = queue(ui, repo.join(""), repo.join(opts['name']))
1846 1876 ui.warn('using patch queue: %s\n' % q.path)
1847 1877 localupdate = False
1848 1878 else:
1849 1879 q = repo.mq
1850 1880 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1851 1881 all=opts['all'])
1852 1882 q.save_dirty()
1853 1883 return ret
1854 1884
1855 1885 def rename(ui, repo, patch, name=None, **opts):
1856 1886 """rename a patch
1857 1887
1858 1888 With one argument, renames the current patch to PATCH1.
1859 1889 With two arguments, renames PATCH1 to PATCH2."""
1860 1890
1861 1891 q = repo.mq
1862 1892
1863 1893 if not name:
1864 1894 name = patch
1865 1895 patch = None
1866 1896
1867 1897 if patch:
1868 1898 patch = q.lookup(patch)
1869 1899 else:
1870 1900 if not q.applied:
1871 1901 ui.write(_('No patches applied\n'))
1872 1902 return
1873 1903 patch = q.lookup('qtip')
1874 1904 absdest = q.join(name)
1875 1905 if os.path.isdir(absdest):
1876 1906 name = normname(os.path.join(name, os.path.basename(patch)))
1877 1907 absdest = q.join(name)
1878 1908 if os.path.exists(absdest):
1879 1909 raise util.Abort(_('%s already exists') % absdest)
1880 1910
1881 1911 if name in q.series:
1882 1912 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1883 1913
1884 1914 if ui.verbose:
1885 1915 ui.write('Renaming %s to %s\n' % (patch, name))
1886 1916 i = q.find_series(patch)
1887 1917 guards = q.guard_re.findall(q.full_series[i])
1888 1918 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1889 1919 q.parse_series()
1890 1920 q.series_dirty = 1
1891 1921
1892 1922 info = q.isapplied(patch)
1893 1923 if info:
1894 1924 q.applied[info[0]] = statusentry(info[1], name)
1895 1925 q.applied_dirty = 1
1896 1926
1897 1927 util.rename(q.join(patch), absdest)
1898 1928 r = q.qrepo()
1899 1929 if r:
1900 1930 wlock = r.wlock()
1901 1931 try:
1902 1932 if r.dirstate[name] == 'r':
1903 1933 r.undelete([name])
1904 1934 r.copy(patch, name)
1905 1935 r.remove([patch], False)
1906 1936 finally:
1907 1937 del wlock
1908 1938
1909 1939 q.save_dirty()
1910 1940
1911 1941 def restore(ui, repo, rev, **opts):
1912 1942 """restore the queue state saved by a rev"""
1913 1943 rev = repo.lookup(rev)
1914 1944 q = repo.mq
1915 1945 q.restore(repo, rev, delete=opts['delete'],
1916 1946 qupdate=opts['update'])
1917 1947 q.save_dirty()
1918 1948 return 0
1919 1949
1920 1950 def save(ui, repo, **opts):
1921 1951 """save current queue state"""
1922 1952 q = repo.mq
1923 1953 message = cmdutil.logmessage(opts)
1924 1954 ret = q.save(repo, msg=message)
1925 1955 if ret:
1926 1956 return ret
1927 1957 q.save_dirty()
1928 1958 if opts['copy']:
1929 1959 path = q.path
1930 1960 if opts['name']:
1931 1961 newpath = os.path.join(q.basepath, opts['name'])
1932 1962 if os.path.exists(newpath):
1933 1963 if not os.path.isdir(newpath):
1934 1964 raise util.Abort(_('destination %s exists and is not '
1935 1965 'a directory') % newpath)
1936 1966 if not opts['force']:
1937 1967 raise util.Abort(_('destination %s exists, '
1938 1968 'use -f to force') % newpath)
1939 1969 else:
1940 1970 newpath = savename(path)
1941 1971 ui.warn("copy %s to %s\n" % (path, newpath))
1942 1972 util.copyfiles(path, newpath)
1943 1973 if opts['empty']:
1944 1974 try:
1945 1975 os.unlink(q.join(q.status_path))
1946 1976 except:
1947 1977 pass
1948 1978 return 0
1949 1979
1950 1980 def strip(ui, repo, rev, **opts):
1951 1981 """strip a revision and all later revs on the same branch"""
1952 1982 rev = repo.lookup(rev)
1953 1983 backup = 'all'
1954 1984 if opts['backup']:
1955 1985 backup = 'strip'
1956 1986 elif opts['nobackup']:
1957 1987 backup = 'none'
1958 1988 update = repo.dirstate.parents()[0] != revlog.nullid
1959 1989 repo.mq.strip(repo, rev, backup=backup, update=update)
1960 1990 return 0
1961 1991
1962 1992 def select(ui, repo, *args, **opts):
1963 1993 '''set or print guarded patches to push
1964 1994
1965 1995 Use the qguard command to set or print guards on patch, then use
1966 1996 qselect to tell mq which guards to use. A patch will be pushed if it
1967 1997 has no guards or any positive guards match the currently selected guard,
1968 1998 but will not be pushed if any negative guards match the current guard.
1969 1999 For example:
1970 2000
1971 2001 qguard foo.patch -stable (negative guard)
1972 2002 qguard bar.patch +stable (positive guard)
1973 2003 qselect stable
1974 2004
1975 2005 This activates the "stable" guard. mq will skip foo.patch (because
1976 2006 it has a negative match) but push bar.patch (because it
1977 2007 has a positive match).
1978 2008
1979 2009 With no arguments, prints the currently active guards.
1980 2010 With one argument, sets the active guard.
1981 2011
1982 2012 Use -n/--none to deactivate guards (no other arguments needed).
1983 2013 When no guards are active, patches with positive guards are skipped
1984 2014 and patches with negative guards are pushed.
1985 2015
1986 2016 qselect can change the guards on applied patches. It does not pop
1987 2017 guarded patches by default. Use --pop to pop back to the last applied
1988 2018 patch that is not guarded. Use --reapply (which implies --pop) to push
1989 2019 back to the current patch afterwards, but skip guarded patches.
1990 2020
1991 2021 Use -s/--series to print a list of all guards in the series file (no
1992 2022 other arguments needed). Use -v for more information.'''
1993 2023
1994 2024 q = repo.mq
1995 2025 guards = q.active()
1996 2026 if args or opts['none']:
1997 2027 old_unapplied = q.unapplied(repo)
1998 2028 old_guarded = [i for i in xrange(len(q.applied)) if
1999 2029 not q.pushable(i)[0]]
2000 2030 q.set_active(args)
2001 2031 q.save_dirty()
2002 2032 if not args:
2003 2033 ui.status(_('guards deactivated\n'))
2004 2034 if not opts['pop'] and not opts['reapply']:
2005 2035 unapplied = q.unapplied(repo)
2006 2036 guarded = [i for i in xrange(len(q.applied))
2007 2037 if not q.pushable(i)[0]]
2008 2038 if len(unapplied) != len(old_unapplied):
2009 2039 ui.status(_('number of unguarded, unapplied patches has '
2010 2040 'changed from %d to %d\n') %
2011 2041 (len(old_unapplied), len(unapplied)))
2012 2042 if len(guarded) != len(old_guarded):
2013 2043 ui.status(_('number of guarded, applied patches has changed '
2014 2044 'from %d to %d\n') %
2015 2045 (len(old_guarded), len(guarded)))
2016 2046 elif opts['series']:
2017 2047 guards = {}
2018 2048 noguards = 0
2019 2049 for gs in q.series_guards:
2020 2050 if not gs:
2021 2051 noguards += 1
2022 2052 for g in gs:
2023 2053 guards.setdefault(g, 0)
2024 2054 guards[g] += 1
2025 2055 if ui.verbose:
2026 2056 guards['NONE'] = noguards
2027 2057 guards = guards.items()
2028 2058 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2029 2059 if guards:
2030 2060 ui.note(_('guards in series file:\n'))
2031 2061 for guard, count in guards:
2032 2062 ui.note('%2d ' % count)
2033 2063 ui.write(guard, '\n')
2034 2064 else:
2035 2065 ui.note(_('no guards in series file\n'))
2036 2066 else:
2037 2067 if guards:
2038 2068 ui.note(_('active guards:\n'))
2039 2069 for g in guards:
2040 2070 ui.write(g, '\n')
2041 2071 else:
2042 2072 ui.write(_('no active guards\n'))
2043 2073 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2044 2074 popped = False
2045 2075 if opts['pop'] or opts['reapply']:
2046 2076 for i in xrange(len(q.applied)):
2047 2077 pushable, reason = q.pushable(i)
2048 2078 if not pushable:
2049 2079 ui.status(_('popping guarded patches\n'))
2050 2080 popped = True
2051 2081 if i == 0:
2052 2082 q.pop(repo, all=True)
2053 2083 else:
2054 2084 q.pop(repo, i-1)
2055 2085 break
2056 2086 if popped:
2057 2087 try:
2058 2088 if reapply:
2059 2089 ui.status(_('reapplying unguarded patches\n'))
2060 2090 q.push(repo, reapply)
2061 2091 finally:
2062 2092 q.save_dirty()
2063 2093
2064 2094 def reposetup(ui, repo):
2065 2095 class mqrepo(repo.__class__):
2066 2096 def abort_if_wdir_patched(self, errmsg, force=False):
2067 2097 if self.mq.applied and not force:
2068 2098 parent = revlog.hex(self.dirstate.parents()[0])
2069 2099 if parent in [s.rev for s in self.mq.applied]:
2070 2100 raise util.Abort(errmsg)
2071 2101
2072 2102 def commit(self, *args, **opts):
2073 2103 if len(args) >= 6:
2074 2104 force = args[5]
2075 2105 else:
2076 2106 force = opts.get('force')
2077 2107 self.abort_if_wdir_patched(
2078 2108 _('cannot commit over an applied mq patch'),
2079 2109 force)
2080 2110
2081 2111 return super(mqrepo, self).commit(*args, **opts)
2082 2112
2083 2113 def push(self, remote, force=False, revs=None):
2084 2114 if self.mq.applied and not force and not revs:
2085 2115 raise util.Abort(_('source has mq patches applied'))
2086 2116 return super(mqrepo, self).push(remote, force, revs)
2087 2117
2088 2118 def tags(self):
2089 2119 if self.tagscache:
2090 2120 return self.tagscache
2091 2121
2092 2122 tagscache = super(mqrepo, self).tags()
2093 2123
2094 2124 q = self.mq
2095 2125 if not q.applied:
2096 2126 return tagscache
2097 2127
2098 2128 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2099 2129 mqtags.append((mqtags[-1][0], 'qtip'))
2100 2130 mqtags.append((mqtags[0][0], 'qbase'))
2101 2131 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2102 2132 for patch in mqtags:
2103 2133 if patch[1] in tagscache:
2104 2134 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2105 2135 else:
2106 2136 tagscache[patch[1]] = patch[0]
2107 2137
2108 2138 return tagscache
2109 2139
2110 2140 def _branchtags(self):
2111 2141 q = self.mq
2112 2142 if not q.applied:
2113 2143 return super(mqrepo, self)._branchtags()
2114 2144
2115 2145 self.branchcache = {} # avoid recursion in changectx
2116 2146 cl = self.changelog
2117 2147 partial, last, lrev = self._readbranchcache()
2118 2148
2119 2149 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2120 2150 start = lrev + 1
2121 2151 if start < qbase:
2122 2152 # update the cache (excluding the patches) and save it
2123 2153 self._updatebranchcache(partial, lrev+1, qbase)
2124 2154 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2125 2155 start = qbase
2126 2156 # if start = qbase, the cache is as updated as it should be.
2127 2157 # if start > qbase, the cache includes (part of) the patches.
2128 2158 # we might as well use it, but we won't save it.
2129 2159
2130 2160 # update the cache up to the tip
2131 2161 self._updatebranchcache(partial, start, cl.count())
2132 2162
2133 2163 return partial
2134 2164
2135 2165 if repo.local():
2136 2166 repo.__class__ = mqrepo
2137 2167 repo.mq = queue(ui, repo.join(""))
2138 2168
2139 2169 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2140 2170
2171 headeropts = [
2172 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2173 ('u', 'user', '', _('add "From: <given user>" to patch'))]
2174
2141 2175 cmdtable = {
2142 2176 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2143 2177 "qclone":
2144 2178 (clone,
2145 2179 [('', 'pull', None, _('use pull protocol to copy metadata')),
2146 2180 ('U', 'noupdate', None, _('do not update the new working directories')),
2147 2181 ('', 'uncompressed', None,
2148 2182 _('use uncompressed transfer (fast over LAN)')),
2149 2183 ('p', 'patches', '', _('location of source patch repo')),
2150 2184 ] + commands.remoteopts,
2151 2185 _('hg qclone [OPTION]... SOURCE [DEST]')),
2152 2186 "qcommit|qci":
2153 2187 (commit,
2154 2188 commands.table["^commit|ci"][1],
2155 2189 _('hg qcommit [OPTION]... [FILE]...')),
2156 2190 "^qdiff":
2157 2191 (diff,
2158 2192 [('g', 'git', None, _('use git extended diff format')),
2159 2193 ] + commands.walkopts,
2160 2194 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2161 2195 "qdelete|qremove|qrm":
2162 2196 (delete,
2163 2197 [('k', 'keep', None, _('keep patch file')),
2164 2198 ('r', 'rev', [], _('stop managing a revision'))],
2165 2199 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2166 2200 'qfold':
2167 2201 (fold,
2168 2202 [('e', 'edit', None, _('edit patch header')),
2169 2203 ('k', 'keep', None, _('keep folded patch files')),
2170 2204 ] + commands.commitopts,
2171 2205 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2172 2206 'qgoto':
2173 2207 (goto,
2174 2208 [('f', 'force', None, _('overwrite any local changes'))],
2175 2209 _('hg qgoto [OPTION]... PATCH')),
2176 2210 'qguard':
2177 2211 (guard,
2178 2212 [('l', 'list', None, _('list all patches and guards')),
2179 2213 ('n', 'none', None, _('drop all guards'))],
2180 2214 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2181 2215 'qheader': (header, [], _('hg qheader [PATCH]')),
2182 2216 "^qimport":
2183 2217 (qimport,
2184 2218 [('e', 'existing', None, 'import file in patch dir'),
2185 2219 ('n', 'name', '', 'patch file name'),
2186 2220 ('f', 'force', None, 'overwrite existing files'),
2187 2221 ('r', 'rev', [], 'place existing revisions under mq control'),
2188 2222 ('g', 'git', None, _('use git extended diff format'))],
2189 2223 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2190 2224 "^qinit":
2191 2225 (init,
2192 2226 [('c', 'create-repo', None, 'create queue repository')],
2193 2227 _('hg qinit [-c]')),
2194 2228 "qnew":
2195 2229 (new,
2196 2230 [('e', 'edit', None, _('edit commit message')),
2197 2231 ('f', 'force', None, _('import uncommitted changes into patch')),
2198 2232 ('g', 'git', None, _('use git extended diff format')),
2199 ] + commands.walkopts + commands.commitopts,
2233 ] + commands.walkopts + commands.commitopts + headeropts,
2200 2234 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2201 2235 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2202 2236 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2203 2237 "^qpop":
2204 2238 (pop,
2205 2239 [('a', 'all', None, _('pop all patches')),
2206 2240 ('n', 'name', '', _('queue name to pop')),
2207 2241 ('f', 'force', None, _('forget any local changes'))],
2208 2242 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2209 2243 "^qpush":
2210 2244 (push,
2211 2245 [('f', 'force', None, _('apply if the patch has rejects')),
2212 2246 ('l', 'list', None, _('list patch name in commit text')),
2213 2247 ('a', 'all', None, _('apply all patches')),
2214 2248 ('m', 'merge', None, _('merge from another queue')),
2215 2249 ('n', 'name', '', _('merge queue name'))],
2216 2250 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2217 2251 "^qrefresh":
2218 2252 (refresh,
2219 2253 [('e', 'edit', None, _('edit commit message')),
2220 2254 ('g', 'git', None, _('use git extended diff format')),
2221 2255 ('s', 'short', None, _('refresh only files already in the patch')),
2222 ] + commands.walkopts + commands.commitopts,
2256 ] + commands.walkopts + commands.commitopts + headeropts,
2223 2257 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2224 2258 'qrename|qmv':
2225 2259 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2226 2260 "qrestore":
2227 2261 (restore,
2228 2262 [('d', 'delete', None, _('delete save entry')),
2229 2263 ('u', 'update', None, _('update queue working dir'))],
2230 2264 _('hg qrestore [-d] [-u] REV')),
2231 2265 "qsave":
2232 2266 (save,
2233 2267 [('c', 'copy', None, _('copy patch directory')),
2234 2268 ('n', 'name', '', _('copy directory name')),
2235 2269 ('e', 'empty', None, _('clear queue status file')),
2236 2270 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2237 2271 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2238 2272 "qselect":
2239 2273 (select,
2240 2274 [('n', 'none', None, _('disable all guards')),
2241 2275 ('s', 'series', None, _('list all guards in series file')),
2242 2276 ('', 'pop', None, _('pop to before first guarded applied patch')),
2243 2277 ('', 'reapply', None, _('pop, then reapply patches'))],
2244 2278 _('hg qselect [OPTION]... [GUARD]...')),
2245 2279 "qseries":
2246 2280 (series,
2247 2281 [('m', 'missing', None, _('print patches not in series')),
2248 2282 ] + seriesopts,
2249 2283 _('hg qseries [-ms]')),
2250 2284 "^strip":
2251 2285 (strip,
2252 2286 [('f', 'force', None, _('force multi-head removal')),
2253 2287 ('b', 'backup', None, _('bundle unrelated changesets')),
2254 2288 ('n', 'nobackup', None, _('no backups'))],
2255 2289 _('hg strip [-f] [-b] [-n] REV')),
2256 2290 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2257 2291 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2258 2292 }
@@ -1,2965 +1,2966 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import os, re, sys, urllib
11 11 import hg, util, revlog, bundlerepo, extensions
12 12 import difflib, patch, time, help, mdiff, tempfile
13 13 import errno, version, socket
14 14 import archival, changegroup, cmdutil, hgweb.server, sshserver
15 15
16 16 # Commands start here, listed alphabetically
17 17
18 18 def add(ui, repo, *pats, **opts):
19 19 """add the specified files on the next commit
20 20
21 21 Schedule files to be version controlled and added to the repository.
22 22
23 23 The files will be added to the repository at the next commit. To
24 24 undo an add before that, see hg revert.
25 25
26 26 If no names are given, add all files in the repository.
27 27 """
28 28
29 29 names = []
30 30 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
31 31 if exact:
32 32 if ui.verbose:
33 33 ui.status(_('adding %s\n') % rel)
34 34 names.append(abs)
35 35 elif abs not in repo.dirstate:
36 36 ui.status(_('adding %s\n') % rel)
37 37 names.append(abs)
38 38 if not opts.get('dry_run'):
39 39 repo.add(names)
40 40
41 41 def addremove(ui, repo, *pats, **opts):
42 42 """add all new files, delete all missing files
43 43
44 44 Add all new files and remove all missing files from the repository.
45 45
46 46 New files are ignored if they match any of the patterns in .hgignore. As
47 47 with add, these changes take effect at the next commit.
48 48
49 49 Use the -s option to detect renamed files. With a parameter > 0,
50 50 this compares every removed file with every added file and records
51 51 those similar enough as renames. This option takes a percentage
52 52 between 0 (disabled) and 100 (files must be identical) as its
53 53 parameter. Detecting renamed files this way can be expensive.
54 54 """
55 55 try:
56 56 sim = float(opts.get('similarity') or 0)
57 57 except ValueError:
58 58 raise util.Abort(_('similarity must be a number'))
59 59 if sim < 0 or sim > 100:
60 60 raise util.Abort(_('similarity must be between 0 and 100'))
61 61 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
62 62
63 63 def annotate(ui, repo, *pats, **opts):
64 64 """show changeset information per file line
65 65
66 66 List changes in files, showing the revision id responsible for each line
67 67
68 68 This command is useful to discover who did a change or when a change took
69 69 place.
70 70
71 71 Without the -a option, annotate will avoid processing files it
72 72 detects as binary. With -a, annotate will generate an annotation
73 73 anyway, probably with undesirable results.
74 74 """
75 75 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
76 76
77 77 if not pats:
78 78 raise util.Abort(_('at least one file name or pattern required'))
79 79
80 80 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
81 81 ('number', lambda x: str(x[0].rev())),
82 82 ('changeset', lambda x: short(x[0].node())),
83 83 ('date', getdate),
84 84 ('follow', lambda x: x[0].path()),
85 85 ]
86 86
87 87 if (not opts['user'] and not opts['changeset'] and not opts['date']
88 88 and not opts['follow']):
89 89 opts['number'] = 1
90 90
91 91 linenumber = opts.get('line_number') is not None
92 92 if (linenumber and (not opts['changeset']) and (not opts['number'])):
93 93 raise util.Abort(_('at least one of -n/-c is required for -l'))
94 94
95 95 funcmap = [func for op, func in opmap if opts.get(op)]
96 96 if linenumber:
97 97 lastfunc = funcmap[-1]
98 98 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
99 99
100 100 ctx = repo.changectx(opts['rev'])
101 101
102 102 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
103 103 node=ctx.node()):
104 104 fctx = ctx.filectx(abs)
105 105 if not opts['text'] and util.binary(fctx.data()):
106 106 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
107 107 continue
108 108
109 109 lines = fctx.annotate(follow=opts.get('follow'),
110 110 linenumber=linenumber)
111 111 pieces = []
112 112
113 113 for f in funcmap:
114 114 l = [f(n) for n, dummy in lines]
115 115 if l:
116 116 m = max(map(len, l))
117 117 pieces.append(["%*s" % (m, x) for x in l])
118 118
119 119 if pieces:
120 120 for p, l in zip(zip(*pieces), lines):
121 121 ui.write("%s: %s" % (" ".join(p), l[1]))
122 122
123 123 def archive(ui, repo, dest, **opts):
124 124 '''create unversioned archive of a repository revision
125 125
126 126 By default, the revision used is the parent of the working
127 127 directory; use "-r" to specify a different revision.
128 128
129 129 To specify the type of archive to create, use "-t". Valid
130 130 types are:
131 131
132 132 "files" (default): a directory full of files
133 133 "tar": tar archive, uncompressed
134 134 "tbz2": tar archive, compressed using bzip2
135 135 "tgz": tar archive, compressed using gzip
136 136 "uzip": zip archive, uncompressed
137 137 "zip": zip archive, compressed using deflate
138 138
139 139 The exact name of the destination archive or directory is given
140 140 using a format string; see "hg help export" for details.
141 141
142 142 Each member added to an archive file has a directory prefix
143 143 prepended. Use "-p" to specify a format string for the prefix.
144 144 The default is the basename of the archive, with suffixes removed.
145 145 '''
146 146
147 147 ctx = repo.changectx(opts['rev'])
148 148 if not ctx:
149 149 raise util.Abort(_('repository has no revisions'))
150 150 node = ctx.node()
151 151 dest = cmdutil.make_filename(repo, dest, node)
152 152 if os.path.realpath(dest) == repo.root:
153 153 raise util.Abort(_('repository root cannot be destination'))
154 154 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
155 155 kind = opts.get('type') or 'files'
156 156 prefix = opts['prefix']
157 157 if dest == '-':
158 158 if kind == 'files':
159 159 raise util.Abort(_('cannot archive plain files to stdout'))
160 160 dest = sys.stdout
161 161 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
162 162 prefix = cmdutil.make_filename(repo, prefix, node)
163 163 archival.archive(repo, dest, node, kind, not opts['no_decode'],
164 164 matchfn, prefix)
165 165
166 166 def backout(ui, repo, node=None, rev=None, **opts):
167 167 '''reverse effect of earlier changeset
168 168
169 169 Commit the backed out changes as a new changeset. The new
170 170 changeset is a child of the backed out changeset.
171 171
172 172 If you back out a changeset other than the tip, a new head is
173 173 created. This head is the parent of the working directory. If
174 174 you back out an old changeset, your working directory will appear
175 175 old after the backout. You should merge the backout changeset
176 176 with another head.
177 177
178 178 The --merge option remembers the parent of the working directory
179 179 before starting the backout, then merges the new head with that
180 180 changeset afterwards. This saves you from doing the merge by
181 181 hand. The result of this merge is not committed, as for a normal
182 182 merge.'''
183 183 if rev and node:
184 184 raise util.Abort(_("please specify just one revision"))
185 185
186 186 if not rev:
187 187 rev = node
188 188
189 189 if not rev:
190 190 raise util.Abort(_("please specify a revision to backout"))
191 191
192 192 cmdutil.bail_if_changed(repo)
193 193 op1, op2 = repo.dirstate.parents()
194 194 if op2 != nullid:
195 195 raise util.Abort(_('outstanding uncommitted merge'))
196 196 node = repo.lookup(rev)
197 197
198 198 a = repo.changelog.ancestor(op1, node)
199 199 if a != node:
200 200 raise util.Abort(_('cannot back out change on a different branch'))
201 201
202 202 p1, p2 = repo.changelog.parents(node)
203 203 if p1 == nullid:
204 204 raise util.Abort(_('cannot back out a change with no parents'))
205 205 if p2 != nullid:
206 206 if not opts['parent']:
207 207 raise util.Abort(_('cannot back out a merge changeset without '
208 208 '--parent'))
209 209 p = repo.lookup(opts['parent'])
210 210 if p not in (p1, p2):
211 211 raise util.Abort(_('%s is not a parent of %s') %
212 212 (short(p), short(node)))
213 213 parent = p
214 214 else:
215 215 if opts['parent']:
216 216 raise util.Abort(_('cannot use --parent on non-merge changeset'))
217 217 parent = p1
218 218
219 219 hg.clean(repo, node, show_stats=False)
220 220 revert_opts = opts.copy()
221 221 revert_opts['date'] = None
222 222 revert_opts['all'] = True
223 223 revert_opts['rev'] = hex(parent)
224 224 revert(ui, repo, **revert_opts)
225 225 commit_opts = opts.copy()
226 226 commit_opts['addremove'] = False
227 227 if not commit_opts['message'] and not commit_opts['logfile']:
228 228 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
229 229 commit_opts['force_editor'] = True
230 230 commit(ui, repo, **commit_opts)
231 231 def nice(node):
232 232 return '%d:%s' % (repo.changelog.rev(node), short(node))
233 233 ui.status(_('changeset %s backs out changeset %s\n') %
234 234 (nice(repo.changelog.tip()), nice(node)))
235 235 if op1 != node:
236 236 if opts['merge']:
237 237 ui.status(_('merging with changeset %s\n') % nice(op1))
238 238 hg.merge(repo, hex(op1))
239 239 else:
240 240 ui.status(_('the backout changeset is a new head - '
241 241 'do not forget to merge\n'))
242 242 ui.status(_('(use "backout --merge" '
243 243 'if you want to auto-merge)\n'))
244 244
245 245 def branch(ui, repo, label=None, **opts):
246 246 """set or show the current branch name
247 247
248 248 With no argument, show the current branch name. With one argument,
249 249 set the working directory branch name (the branch does not exist in
250 250 the repository until the next commit).
251 251
252 252 Unless --force is specified, branch will not let you set a
253 253 branch name that shadows an existing branch.
254 254 """
255 255
256 256 if label:
257 257 if not opts.get('force') and label in repo.branchtags():
258 258 if label not in [p.branch() for p in repo.workingctx().parents()]:
259 259 raise util.Abort(_('a branch of the same name already exists'
260 260 ' (use --force to override)'))
261 261 repo.dirstate.setbranch(util.fromlocal(label))
262 262 ui.status(_('marked working directory as branch %s\n') % label)
263 263 else:
264 264 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
265 265
266 266 def branches(ui, repo, active=False):
267 267 """list repository named branches
268 268
269 269 List the repository's named branches, indicating which ones are
270 270 inactive. If active is specified, only show active branches.
271 271
272 272 A branch is considered active if it contains unmerged heads.
273 273 """
274 274 b = repo.branchtags()
275 275 heads = dict.fromkeys(repo.heads(), 1)
276 276 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
277 277 l.sort()
278 278 l.reverse()
279 279 for ishead, r, n, t in l:
280 280 if active and not ishead:
281 281 # If we're only displaying active branches, abort the loop on
282 282 # encountering the first inactive head
283 283 break
284 284 else:
285 285 hexfunc = ui.debugflag and hex or short
286 286 if ui.quiet:
287 287 ui.write("%s\n" % t)
288 288 else:
289 289 spaces = " " * (30 - util.locallen(t))
290 290 # The code only gets here if inactive branches are being
291 291 # displayed or the branch is active.
292 292 isinactive = ((not ishead) and " (inactive)") or ''
293 293 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
294 294
295 295 def bundle(ui, repo, fname, dest=None, **opts):
296 296 """create a changegroup file
297 297
298 298 Generate a compressed changegroup file collecting changesets not
299 299 found in the other repository.
300 300
301 301 If no destination repository is specified the destination is assumed
302 302 to have all the nodes specified by one or more --base parameters.
303 303
304 304 The bundle file can then be transferred using conventional means and
305 305 applied to another repository with the unbundle or pull command.
306 306 This is useful when direct push and pull are not available or when
307 307 exporting an entire repository is undesirable.
308 308
309 309 Applying bundles preserves all changeset contents including
310 310 permissions, copy/rename information, and revision history.
311 311 """
312 312 revs = opts.get('rev') or None
313 313 if revs:
314 314 revs = [repo.lookup(rev) for rev in revs]
315 315 base = opts.get('base')
316 316 if base:
317 317 if dest:
318 318 raise util.Abort(_("--base is incompatible with specifiying "
319 319 "a destination"))
320 320 base = [repo.lookup(rev) for rev in base]
321 321 # create the right base
322 322 # XXX: nodesbetween / changegroup* should be "fixed" instead
323 323 o = []
324 324 has = {nullid: None}
325 325 for n in base:
326 326 has.update(repo.changelog.reachable(n))
327 327 if revs:
328 328 visit = list(revs)
329 329 else:
330 330 visit = repo.changelog.heads()
331 331 seen = {}
332 332 while visit:
333 333 n = visit.pop(0)
334 334 parents = [p for p in repo.changelog.parents(n) if p not in has]
335 335 if len(parents) == 0:
336 336 o.insert(0, n)
337 337 else:
338 338 for p in parents:
339 339 if p not in seen:
340 340 seen[p] = 1
341 341 visit.append(p)
342 342 else:
343 343 cmdutil.setremoteconfig(ui, opts)
344 344 dest, revs, checkout = hg.parseurl(
345 345 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
346 346 other = hg.repository(ui, dest)
347 347 o = repo.findoutgoing(other, force=opts['force'])
348 348
349 349 if revs:
350 350 cg = repo.changegroupsubset(o, revs, 'bundle')
351 351 else:
352 352 cg = repo.changegroup(o, 'bundle')
353 353 changegroup.writebundle(cg, fname, "HG10BZ")
354 354
355 355 def cat(ui, repo, file1, *pats, **opts):
356 356 """output the current or given revision of files
357 357
358 358 Print the specified files as they were at the given revision.
359 359 If no revision is given, the parent of the working directory is used,
360 360 or tip if no revision is checked out.
361 361
362 362 Output may be to a file, in which case the name of the file is
363 363 given using a format string. The formatting rules are the same as
364 364 for the export command, with the following additions:
365 365
366 366 %s basename of file being printed
367 367 %d dirname of file being printed, or '.' if in repo root
368 368 %p root-relative path name of file being printed
369 369 """
370 370 ctx = repo.changectx(opts['rev'])
371 371 err = 1
372 372 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
373 373 ctx.node()):
374 374 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
375 375 fp.write(ctx.filectx(abs).data())
376 376 err = 0
377 377 return err
378 378
379 379 def clone(ui, source, dest=None, **opts):
380 380 """make a copy of an existing repository
381 381
382 382 Create a copy of an existing repository in a new directory.
383 383
384 384 If no destination directory name is specified, it defaults to the
385 385 basename of the source.
386 386
387 387 The location of the source is added to the new repository's
388 388 .hg/hgrc file, as the default to be used for future pulls.
389 389
390 390 For efficiency, hardlinks are used for cloning whenever the source
391 391 and destination are on the same filesystem (note this applies only
392 392 to the repository data, not to the checked out files). Some
393 393 filesystems, such as AFS, implement hardlinking incorrectly, but
394 394 do not report errors. In these cases, use the --pull option to
395 395 avoid hardlinking.
396 396
397 397 You can safely clone repositories and checked out files using full
398 398 hardlinks with
399 399
400 400 $ cp -al REPO REPOCLONE
401 401
402 402 which is the fastest way to clone. However, the operation is not
403 403 atomic (making sure REPO is not modified during the operation is
404 404 up to you) and you have to make sure your editor breaks hardlinks
405 405 (Emacs and most Linux Kernel tools do so).
406 406
407 407 If you use the -r option to clone up to a specific revision, no
408 408 subsequent revisions will be present in the cloned repository.
409 409 This option implies --pull, even on local repositories.
410 410
411 411 See pull for valid source format details.
412 412
413 413 It is possible to specify an ssh:// URL as the destination, but no
414 414 .hg/hgrc and working directory will be created on the remote side.
415 415 Look at the help text for the pull command for important details
416 416 about ssh:// URLs.
417 417 """
418 418 cmdutil.setremoteconfig(ui, opts)
419 419 hg.clone(ui, source, dest,
420 420 pull=opts['pull'],
421 421 stream=opts['uncompressed'],
422 422 rev=opts['rev'],
423 423 update=not opts['noupdate'])
424 424
425 425 def commit(ui, repo, *pats, **opts):
426 426 """commit the specified files or all outstanding changes
427 427
428 428 Commit changes to the given files into the repository.
429 429
430 430 If a list of files is omitted, all changes reported by "hg status"
431 431 will be committed.
432 432
433 433 If no commit message is specified, the editor configured in your hgrc
434 434 or in the EDITOR environment variable is started to enter a message.
435 435 """
436 436 def commitfunc(ui, repo, files, message, match, opts):
437 437 return repo.commit(files, message, opts['user'], opts['date'], match,
438 438 force_editor=opts.get('force_editor'))
439 439 cmdutil.commit(ui, repo, commitfunc, pats, opts)
440 440
441 441 def copy(ui, repo, *pats, **opts):
442 442 """mark files as copied for the next commit
443 443
444 444 Mark dest as having copies of source files. If dest is a
445 445 directory, copies are put in that directory. If dest is a file,
446 446 there can only be one source.
447 447
448 448 By default, this command copies the contents of files as they
449 449 stand in the working directory. If invoked with --after, the
450 450 operation is recorded, but no copying is performed.
451 451
452 452 This command takes effect in the next commit. To undo a copy
453 453 before that, see hg revert.
454 454 """
455 455 wlock = repo.wlock(False)
456 456 try:
457 457 return cmdutil.copy(ui, repo, pats, opts)
458 458 finally:
459 459 del wlock
460 460
461 461 def debugancestor(ui, index, rev1, rev2):
462 462 """find the ancestor revision of two revisions in a given index"""
463 463 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
464 464 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
465 465 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
466 466
467 467 def debugcomplete(ui, cmd='', **opts):
468 468 """returns the completion list associated with the given command"""
469 469
470 470 if opts['options']:
471 471 options = []
472 472 otables = [globalopts]
473 473 if cmd:
474 474 aliases, entry = cmdutil.findcmd(ui, cmd, table)
475 475 otables.append(entry[1])
476 476 for t in otables:
477 477 for o in t:
478 478 if o[0]:
479 479 options.append('-%s' % o[0])
480 480 options.append('--%s' % o[1])
481 481 ui.write("%s\n" % "\n".join(options))
482 482 return
483 483
484 484 clist = cmdutil.findpossible(ui, cmd, table).keys()
485 485 clist.sort()
486 486 ui.write("%s\n" % "\n".join(clist))
487 487
488 488 def debugrebuildstate(ui, repo, rev=""):
489 489 """rebuild the dirstate as it would look like for the given revision"""
490 490 if rev == "":
491 491 rev = repo.changelog.tip()
492 492 ctx = repo.changectx(rev)
493 493 files = ctx.manifest()
494 494 wlock = repo.wlock()
495 495 try:
496 496 repo.dirstate.rebuild(rev, files)
497 497 finally:
498 498 del wlock
499 499
500 500 def debugcheckstate(ui, repo):
501 501 """validate the correctness of the current dirstate"""
502 502 parent1, parent2 = repo.dirstate.parents()
503 503 m1 = repo.changectx(parent1).manifest()
504 504 m2 = repo.changectx(parent2).manifest()
505 505 errors = 0
506 506 for f in repo.dirstate:
507 507 state = repo.dirstate[f]
508 508 if state in "nr" and f not in m1:
509 509 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
510 510 errors += 1
511 511 if state in "a" and f in m1:
512 512 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
513 513 errors += 1
514 514 if state in "m" and f not in m1 and f not in m2:
515 515 ui.warn(_("%s in state %s, but not in either manifest\n") %
516 516 (f, state))
517 517 errors += 1
518 518 for f in m1:
519 519 state = repo.dirstate[f]
520 520 if state not in "nrm":
521 521 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
522 522 errors += 1
523 523 if errors:
524 524 error = _(".hg/dirstate inconsistent with current parent's manifest")
525 525 raise util.Abort(error)
526 526
527 527 def showconfig(ui, repo, *values, **opts):
528 528 """show combined config settings from all hgrc files
529 529
530 530 With no args, print names and values of all config items.
531 531
532 532 With one arg of the form section.name, print just the value of
533 533 that config item.
534 534
535 535 With multiple args, print names and values of all config items
536 536 with matching section names."""
537 537
538 538 untrusted = bool(opts.get('untrusted'))
539 539 if values:
540 540 if len([v for v in values if '.' in v]) > 1:
541 541 raise util.Abort(_('only one config item permitted'))
542 542 for section, name, value in ui.walkconfig(untrusted=untrusted):
543 543 sectname = section + '.' + name
544 544 if values:
545 545 for v in values:
546 546 if v == section:
547 547 ui.write('%s=%s\n' % (sectname, value))
548 548 elif v == sectname:
549 549 ui.write(value, '\n')
550 550 else:
551 551 ui.write('%s=%s\n' % (sectname, value))
552 552
553 553 def debugsetparents(ui, repo, rev1, rev2=None):
554 554 """manually set the parents of the current working directory
555 555
556 556 This is useful for writing repository conversion tools, but should
557 557 be used with care.
558 558 """
559 559
560 560 if not rev2:
561 561 rev2 = hex(nullid)
562 562
563 563 wlock = repo.wlock()
564 564 try:
565 565 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
566 566 finally:
567 567 del wlock
568 568
569 569 def debugstate(ui, repo):
570 570 """show the contents of the current dirstate"""
571 571 k = repo.dirstate._map.items()
572 572 k.sort()
573 573 for file_, ent in k:
574 574 if ent[3] == -1:
575 575 # Pad or slice to locale representation
576 576 locale_len = len(time.strftime("%x %X", time.localtime(0)))
577 577 timestr = 'unset'
578 578 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
579 579 else:
580 580 timestr = time.strftime("%x %X", time.localtime(ent[3]))
581 581 if ent[1] & 020000:
582 582 mode = 'lnk'
583 583 else:
584 584 mode = '%3o' % (ent[1] & 0777)
585 585 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
586 586 for f in repo.dirstate.copies():
587 587 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
588 588
589 589 def debugdata(ui, file_, rev):
590 590 """dump the contents of a data file revision"""
591 591 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
592 592 try:
593 593 ui.write(r.revision(r.lookup(rev)))
594 594 except KeyError:
595 595 raise util.Abort(_('invalid revision identifier %s') % rev)
596 596
597 597 def debugdate(ui, date, range=None, **opts):
598 598 """parse and display a date"""
599 599 if opts["extended"]:
600 600 d = util.parsedate(date, util.extendeddateformats)
601 601 else:
602 602 d = util.parsedate(date)
603 603 ui.write("internal: %s %s\n" % d)
604 604 ui.write("standard: %s\n" % util.datestr(d))
605 605 if range:
606 606 m = util.matchdate(range)
607 607 ui.write("match: %s\n" % m(d[0]))
608 608
609 609 def debugindex(ui, file_):
610 610 """dump the contents of an index file"""
611 611 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
612 612 ui.write(" rev offset length base linkrev" +
613 613 " nodeid p1 p2\n")
614 614 for i in xrange(r.count()):
615 615 node = r.node(i)
616 616 try:
617 617 pp = r.parents(node)
618 618 except:
619 619 pp = [nullid, nullid]
620 620 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
621 621 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
622 622 short(node), short(pp[0]), short(pp[1])))
623 623
624 624 def debugindexdot(ui, file_):
625 625 """dump an index DAG as a .dot file"""
626 626 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
627 627 ui.write("digraph G {\n")
628 628 for i in xrange(r.count()):
629 629 node = r.node(i)
630 630 pp = r.parents(node)
631 631 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
632 632 if pp[1] != nullid:
633 633 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
634 634 ui.write("}\n")
635 635
636 636 def debuginstall(ui):
637 637 '''test Mercurial installation'''
638 638
639 639 def writetemp(contents):
640 640 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
641 641 f = os.fdopen(fd, "wb")
642 642 f.write(contents)
643 643 f.close()
644 644 return name
645 645
646 646 problems = 0
647 647
648 648 # encoding
649 649 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
650 650 try:
651 651 util.fromlocal("test")
652 652 except util.Abort, inst:
653 653 ui.write(" %s\n" % inst)
654 654 ui.write(_(" (check that your locale is properly set)\n"))
655 655 problems += 1
656 656
657 657 # compiled modules
658 658 ui.status(_("Checking extensions...\n"))
659 659 try:
660 660 import bdiff, mpatch, base85
661 661 except Exception, inst:
662 662 ui.write(" %s\n" % inst)
663 663 ui.write(_(" One or more extensions could not be found"))
664 664 ui.write(_(" (check that you compiled the extensions)\n"))
665 665 problems += 1
666 666
667 667 # templates
668 668 ui.status(_("Checking templates...\n"))
669 669 try:
670 670 import templater
671 671 t = templater.templater(templater.templatepath("map-cmdline.default"))
672 672 except Exception, inst:
673 673 ui.write(" %s\n" % inst)
674 674 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
675 675 problems += 1
676 676
677 677 # patch
678 678 ui.status(_("Checking patch...\n"))
679 679 patchproblems = 0
680 680 a = "1\n2\n3\n4\n"
681 681 b = "1\n2\n3\ninsert\n4\n"
682 682 fa = writetemp(a)
683 683 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
684 684 os.path.basename(fa))
685 685 fd = writetemp(d)
686 686
687 687 files = {}
688 688 try:
689 689 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
690 690 except util.Abort, e:
691 691 ui.write(_(" patch call failed:\n"))
692 692 ui.write(" " + str(e) + "\n")
693 693 patchproblems += 1
694 694 else:
695 695 if list(files) != [os.path.basename(fa)]:
696 696 ui.write(_(" unexpected patch output!\n"))
697 697 patchproblems += 1
698 698 a = file(fa).read()
699 699 if a != b:
700 700 ui.write(_(" patch test failed!\n"))
701 701 patchproblems += 1
702 702
703 703 if patchproblems:
704 704 if ui.config('ui', 'patch'):
705 705 ui.write(_(" (Current patch tool may be incompatible with patch,"
706 706 " or misconfigured. Please check your .hgrc file)\n"))
707 707 else:
708 708 ui.write(_(" Internal patcher failure, please report this error"
709 709 " to http://www.selenic.com/mercurial/bts\n"))
710 710 problems += patchproblems
711 711
712 712 os.unlink(fa)
713 713 os.unlink(fd)
714 714
715 715 # merge helper
716 716 ui.status(_("Checking merge helper...\n"))
717 717 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
718 718 or "hgmerge")
719 719 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
720 720 if not cmdpath:
721 721 if cmd == 'hgmerge':
722 722 ui.write(_(" No merge helper set and can't find default"
723 723 " hgmerge script in PATH\n"))
724 724 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
725 725 else:
726 726 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
727 727 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
728 728 problems += 1
729 729 else:
730 730 # actually attempt a patch here
731 731 fa = writetemp("1\n2\n3\n4\n")
732 732 fl = writetemp("1\n2\n3\ninsert\n4\n")
733 733 fr = writetemp("begin\n1\n2\n3\n4\n")
734 734 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
735 735 if r:
736 736 ui.write(_(" Got unexpected merge error %d!\n") % r)
737 737 problems += 1
738 738 m = file(fl).read()
739 739 if m != "begin\n1\n2\n3\ninsert\n4\n":
740 740 ui.write(_(" Got unexpected merge results!\n"))
741 741 ui.write(_(" (your merge helper may have the"
742 742 " wrong argument order)\n"))
743 743 ui.write(_(" Result: %r\n") % m)
744 744 problems += 1
745 745 os.unlink(fa)
746 746 os.unlink(fl)
747 747 os.unlink(fr)
748 748
749 749 # editor
750 750 ui.status(_("Checking commit editor...\n"))
751 751 editor = (os.environ.get("HGEDITOR") or
752 752 ui.config("ui", "editor") or
753 753 os.environ.get("EDITOR", "vi"))
754 754 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
755 755 if not cmdpath:
756 756 if editor == 'vi':
757 757 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
758 758 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
759 759 else:
760 760 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
761 761 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
762 762 problems += 1
763 763
764 764 # check username
765 765 ui.status(_("Checking username...\n"))
766 766 user = os.environ.get("HGUSER")
767 767 if user is None:
768 768 user = ui.config("ui", "username")
769 769 if user is None:
770 770 user = os.environ.get("EMAIL")
771 771 if not user:
772 772 ui.warn(" ")
773 773 ui.username()
774 774 ui.write(_(" (specify a username in your .hgrc file)\n"))
775 775
776 776 if not problems:
777 777 ui.status(_("No problems detected\n"))
778 778 else:
779 779 ui.write(_("%s problems detected,"
780 780 " please check your install!\n") % problems)
781 781
782 782 return problems
783 783
784 784 def debugrename(ui, repo, file1, *pats, **opts):
785 785 """dump rename information"""
786 786
787 787 ctx = repo.changectx(opts.get('rev', 'tip'))
788 788 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
789 789 ctx.node()):
790 790 m = ctx.filectx(abs).renamed()
791 791 if m:
792 792 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
793 793 else:
794 794 ui.write(_("%s not renamed\n") % rel)
795 795
796 796 def debugwalk(ui, repo, *pats, **opts):
797 797 """show how files match on given patterns"""
798 798 items = list(cmdutil.walk(repo, pats, opts))
799 799 if not items:
800 800 return
801 801 fmt = '%%s %%-%ds %%-%ds %%s' % (
802 802 max([len(abs) for (src, abs, rel, exact) in items]),
803 803 max([len(rel) for (src, abs, rel, exact) in items]))
804 804 for src, abs, rel, exact in items:
805 805 line = fmt % (src, abs, rel, exact and 'exact' or '')
806 806 ui.write("%s\n" % line.rstrip())
807 807
808 808 def diff(ui, repo, *pats, **opts):
809 809 """diff repository (or selected files)
810 810
811 811 Show differences between revisions for the specified files.
812 812
813 813 Differences between files are shown using the unified diff format.
814 814
815 815 NOTE: diff may generate unexpected results for merges, as it will
816 816 default to comparing against the working directory's first parent
817 817 changeset if no revisions are specified.
818 818
819 819 When two revision arguments are given, then changes are shown
820 820 between those revisions. If only one revision is specified then
821 821 that revision is compared to the working directory, and, when no
822 822 revisions are specified, the working directory files are compared
823 823 to its parent.
824 824
825 825 Without the -a option, diff will avoid generating diffs of files
826 826 it detects as binary. With -a, diff will generate a diff anyway,
827 827 probably with undesirable results.
828 828 """
829 829 node1, node2 = cmdutil.revpair(repo, opts['rev'])
830 830
831 831 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
832 832
833 833 patch.diff(repo, node1, node2, fns, match=matchfn,
834 834 opts=patch.diffopts(ui, opts))
835 835
836 836 def export(ui, repo, *changesets, **opts):
837 837 """dump the header and diffs for one or more changesets
838 838
839 839 Print the changeset header and diffs for one or more revisions.
840 840
841 841 The information shown in the changeset header is: author,
842 842 changeset hash, parent(s) and commit comment.
843 843
844 844 NOTE: export may generate unexpected diff output for merge changesets,
845 845 as it will compare the merge changeset against its first parent only.
846 846
847 847 Output may be to a file, in which case the name of the file is
848 848 given using a format string. The formatting rules are as follows:
849 849
850 850 %% literal "%" character
851 851 %H changeset hash (40 bytes of hexadecimal)
852 852 %N number of patches being generated
853 853 %R changeset revision number
854 854 %b basename of the exporting repository
855 855 %h short-form changeset hash (12 bytes of hexadecimal)
856 856 %n zero-padded sequence number, starting at 1
857 857 %r zero-padded changeset revision number
858 858
859 859 Without the -a option, export will avoid generating diffs of files
860 860 it detects as binary. With -a, export will generate a diff anyway,
861 861 probably with undesirable results.
862 862
863 863 With the --switch-parent option, the diff will be against the second
864 864 parent. It can be useful to review a merge.
865 865 """
866 866 if not changesets:
867 867 raise util.Abort(_("export requires at least one changeset"))
868 868 revs = cmdutil.revrange(repo, changesets)
869 869 if len(revs) > 1:
870 870 ui.note(_('exporting patches:\n'))
871 871 else:
872 872 ui.note(_('exporting patch:\n'))
873 873 patch.export(repo, revs, template=opts['output'],
874 874 switch_parent=opts['switch_parent'],
875 875 opts=patch.diffopts(ui, opts))
876 876
877 877 def grep(ui, repo, pattern, *pats, **opts):
878 878 """search for a pattern in specified files and revisions
879 879
880 880 Search revisions of files for a regular expression.
881 881
882 882 This command behaves differently than Unix grep. It only accepts
883 883 Python/Perl regexps. It searches repository history, not the
884 884 working directory. It always prints the revision number in which
885 885 a match appears.
886 886
887 887 By default, grep only prints output for the first revision of a
888 888 file in which it finds a match. To get it to print every revision
889 889 that contains a change in match status ("-" for a match that
890 890 becomes a non-match, or "+" for a non-match that becomes a match),
891 891 use the --all flag.
892 892 """
893 893 reflags = 0
894 894 if opts['ignore_case']:
895 895 reflags |= re.I
896 896 try:
897 897 regexp = re.compile(pattern, reflags)
898 898 except Exception, inst:
899 899 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
900 900 return None
901 901 sep, eol = ':', '\n'
902 902 if opts['print0']:
903 903 sep = eol = '\0'
904 904
905 905 fcache = {}
906 906 def getfile(fn):
907 907 if fn not in fcache:
908 908 fcache[fn] = repo.file(fn)
909 909 return fcache[fn]
910 910
911 911 def matchlines(body):
912 912 begin = 0
913 913 linenum = 0
914 914 while True:
915 915 match = regexp.search(body, begin)
916 916 if not match:
917 917 break
918 918 mstart, mend = match.span()
919 919 linenum += body.count('\n', begin, mstart) + 1
920 920 lstart = body.rfind('\n', begin, mstart) + 1 or begin
921 921 lend = body.find('\n', mend)
922 922 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
923 923 begin = lend + 1
924 924
925 925 class linestate(object):
926 926 def __init__(self, line, linenum, colstart, colend):
927 927 self.line = line
928 928 self.linenum = linenum
929 929 self.colstart = colstart
930 930 self.colend = colend
931 931
932 932 def __eq__(self, other):
933 933 return self.line == other.line
934 934
935 935 matches = {}
936 936 copies = {}
937 937 def grepbody(fn, rev, body):
938 938 matches[rev].setdefault(fn, [])
939 939 m = matches[rev][fn]
940 940 for lnum, cstart, cend, line in matchlines(body):
941 941 s = linestate(line, lnum, cstart, cend)
942 942 m.append(s)
943 943
944 944 def difflinestates(a, b):
945 945 sm = difflib.SequenceMatcher(None, a, b)
946 946 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
947 947 if tag == 'insert':
948 948 for i in xrange(blo, bhi):
949 949 yield ('+', b[i])
950 950 elif tag == 'delete':
951 951 for i in xrange(alo, ahi):
952 952 yield ('-', a[i])
953 953 elif tag == 'replace':
954 954 for i in xrange(alo, ahi):
955 955 yield ('-', a[i])
956 956 for i in xrange(blo, bhi):
957 957 yield ('+', b[i])
958 958
959 959 prev = {}
960 960 def display(fn, rev, states, prevstates):
961 961 found = False
962 962 filerevmatches = {}
963 963 r = prev.get(fn, -1)
964 964 if opts['all']:
965 965 iter = difflinestates(states, prevstates)
966 966 else:
967 967 iter = [('', l) for l in prevstates]
968 968 for change, l in iter:
969 969 cols = [fn, str(r)]
970 970 if opts['line_number']:
971 971 cols.append(str(l.linenum))
972 972 if opts['all']:
973 973 cols.append(change)
974 974 if opts['user']:
975 975 cols.append(ui.shortuser(get(r)[1]))
976 976 if opts['files_with_matches']:
977 977 c = (fn, r)
978 978 if c in filerevmatches:
979 979 continue
980 980 filerevmatches[c] = 1
981 981 else:
982 982 cols.append(l.line)
983 983 ui.write(sep.join(cols), eol)
984 984 found = True
985 985 return found
986 986
987 987 fstate = {}
988 988 skip = {}
989 989 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
990 990 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
991 991 found = False
992 992 follow = opts.get('follow')
993 993 for st, rev, fns in changeiter:
994 994 if st == 'window':
995 995 matches.clear()
996 996 elif st == 'add':
997 997 mf = repo.changectx(rev).manifest()
998 998 matches[rev] = {}
999 999 for fn in fns:
1000 1000 if fn in skip:
1001 1001 continue
1002 1002 try:
1003 1003 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1004 1004 fstate.setdefault(fn, [])
1005 1005 if follow:
1006 1006 copied = getfile(fn).renamed(mf[fn])
1007 1007 if copied:
1008 1008 copies.setdefault(rev, {})[fn] = copied[0]
1009 1009 except KeyError:
1010 1010 pass
1011 1011 elif st == 'iter':
1012 1012 states = matches[rev].items()
1013 1013 states.sort()
1014 1014 for fn, m in states:
1015 1015 copy = copies.get(rev, {}).get(fn)
1016 1016 if fn in skip:
1017 1017 if copy:
1018 1018 skip[copy] = True
1019 1019 continue
1020 1020 if fn in prev or fstate[fn]:
1021 1021 r = display(fn, rev, m, fstate[fn])
1022 1022 found = found or r
1023 1023 if r and not opts['all']:
1024 1024 skip[fn] = True
1025 1025 if copy:
1026 1026 skip[copy] = True
1027 1027 fstate[fn] = m
1028 1028 if copy:
1029 1029 fstate[copy] = m
1030 1030 prev[fn] = rev
1031 1031
1032 1032 fstate = fstate.items()
1033 1033 fstate.sort()
1034 1034 for fn, state in fstate:
1035 1035 if fn in skip:
1036 1036 continue
1037 1037 if fn not in copies.get(prev[fn], {}):
1038 1038 found = display(fn, rev, {}, state) or found
1039 1039 return (not found and 1) or 0
1040 1040
1041 1041 def heads(ui, repo, *branchrevs, **opts):
1042 1042 """show current repository heads or show branch heads
1043 1043
1044 1044 With no arguments, show all repository head changesets.
1045 1045
1046 1046 If branch or revisions names are given this will show the heads of
1047 1047 the specified branches or the branches those revisions are tagged
1048 1048 with.
1049 1049
1050 1050 Repository "heads" are changesets that don't have child
1051 1051 changesets. They are where development generally takes place and
1052 1052 are the usual targets for update and merge operations.
1053 1053
1054 1054 Branch heads are changesets that have a given branch tag, but have
1055 1055 no child changesets with that tag. They are usually where
1056 1056 development on the given branch takes place.
1057 1057 """
1058 1058 if opts['rev']:
1059 1059 start = repo.lookup(opts['rev'])
1060 1060 else:
1061 1061 start = None
1062 1062 if not branchrevs:
1063 1063 # Assume we're looking repo-wide heads if no revs were specified.
1064 1064 heads = repo.heads(start)
1065 1065 else:
1066 1066 heads = []
1067 1067 visitedset = util.set()
1068 1068 for branchrev in branchrevs:
1069 1069 branch = repo.changectx(branchrev).branch()
1070 1070 if branch in visitedset:
1071 1071 continue
1072 1072 visitedset.add(branch)
1073 1073 bheads = repo.branchheads(branch, start)
1074 1074 if not bheads:
1075 1075 if branch != branchrev:
1076 1076 ui.warn(_("no changes on branch %s containing %s are "
1077 1077 "reachable from %s\n")
1078 1078 % (branch, branchrev, opts['rev']))
1079 1079 else:
1080 1080 ui.warn(_("no changes on branch %s are reachable from %s\n")
1081 1081 % (branch, opts['rev']))
1082 1082 heads.extend(bheads)
1083 1083 if not heads:
1084 1084 return 1
1085 1085 displayer = cmdutil.show_changeset(ui, repo, opts)
1086 1086 for n in heads:
1087 1087 displayer.show(changenode=n)
1088 1088
1089 1089 def help_(ui, name=None, with_version=False):
1090 1090 """show help for a command, extension, or list of commands
1091 1091
1092 1092 With no arguments, print a list of commands and short help.
1093 1093
1094 1094 Given a command name, print help for that command.
1095 1095
1096 1096 Given an extension name, print help for that extension, and the
1097 1097 commands it provides."""
1098 1098 option_lists = []
1099 1099
1100 1100 def addglobalopts(aliases):
1101 1101 if ui.verbose:
1102 1102 option_lists.append((_("global options:"), globalopts))
1103 1103 if name == 'shortlist':
1104 1104 option_lists.append((_('use "hg help" for the full list '
1105 1105 'of commands'), ()))
1106 1106 else:
1107 1107 if name == 'shortlist':
1108 1108 msg = _('use "hg help" for the full list of commands '
1109 1109 'or "hg -v" for details')
1110 1110 elif aliases:
1111 1111 msg = _('use "hg -v help%s" to show aliases and '
1112 1112 'global options') % (name and " " + name or "")
1113 1113 else:
1114 1114 msg = _('use "hg -v help %s" to show global options') % name
1115 1115 option_lists.append((msg, ()))
1116 1116
1117 1117 def helpcmd(name):
1118 1118 if with_version:
1119 1119 version_(ui)
1120 1120 ui.write('\n')
1121 1121 aliases, i = cmdutil.findcmd(ui, name, table)
1122 1122 # synopsis
1123 1123 ui.write("%s\n\n" % i[2])
1124 1124
1125 1125 # description
1126 1126 doc = i[0].__doc__
1127 1127 if not doc:
1128 1128 doc = _("(No help text available)")
1129 1129 if ui.quiet:
1130 1130 doc = doc.splitlines(0)[0]
1131 1131 ui.write("%s\n" % doc.rstrip())
1132 1132
1133 1133 if not ui.quiet:
1134 1134 # aliases
1135 1135 if len(aliases) > 1:
1136 1136 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1137 1137
1138 1138 # options
1139 1139 if i[1]:
1140 1140 option_lists.append((_("options:\n"), i[1]))
1141 1141
1142 1142 addglobalopts(False)
1143 1143
1144 1144 def helplist(header, select=None):
1145 1145 h = {}
1146 1146 cmds = {}
1147 1147 for c, e in table.items():
1148 1148 f = c.split("|", 1)[0]
1149 1149 if select and not select(f):
1150 1150 continue
1151 1151 if name == "shortlist" and not f.startswith("^"):
1152 1152 continue
1153 1153 f = f.lstrip("^")
1154 1154 if not ui.debugflag and f.startswith("debug"):
1155 1155 continue
1156 1156 doc = e[0].__doc__
1157 1157 if not doc:
1158 1158 doc = _("(No help text available)")
1159 1159 h[f] = doc.splitlines(0)[0].rstrip()
1160 1160 cmds[f] = c.lstrip("^")
1161 1161
1162 1162 if not h:
1163 1163 ui.status(_('no commands defined\n'))
1164 1164 return
1165 1165
1166 1166 ui.status(header)
1167 1167 fns = h.keys()
1168 1168 fns.sort()
1169 1169 m = max(map(len, fns))
1170 1170 for f in fns:
1171 1171 if ui.verbose:
1172 1172 commands = cmds[f].replace("|",", ")
1173 1173 ui.write(" %s:\n %s\n"%(commands, h[f]))
1174 1174 else:
1175 1175 ui.write(' %-*s %s\n' % (m, f, h[f]))
1176 1176
1177 1177 if not ui.quiet:
1178 1178 addglobalopts(True)
1179 1179
1180 1180 def helptopic(name):
1181 1181 v = None
1182 1182 for i in help.helptable:
1183 1183 l = i.split('|')
1184 1184 if name in l:
1185 1185 v = i
1186 1186 header = l[-1]
1187 1187 if not v:
1188 1188 raise cmdutil.UnknownCommand(name)
1189 1189
1190 1190 # description
1191 1191 doc = help.helptable[v]
1192 1192 if not doc:
1193 1193 doc = _("(No help text available)")
1194 1194 if callable(doc):
1195 1195 doc = doc()
1196 1196
1197 1197 ui.write("%s\n" % header)
1198 1198 ui.write("%s\n" % doc.rstrip())
1199 1199
1200 1200 def helpext(name):
1201 1201 try:
1202 1202 mod = extensions.find(name)
1203 1203 except KeyError:
1204 1204 raise cmdutil.UnknownCommand(name)
1205 1205
1206 1206 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1207 1207 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1208 1208 for d in doc[1:]:
1209 1209 ui.write(d, '\n')
1210 1210
1211 1211 ui.status('\n')
1212 1212
1213 1213 try:
1214 1214 ct = mod.cmdtable
1215 1215 except AttributeError:
1216 1216 ct = {}
1217 1217
1218 1218 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1219 1219 helplist(_('list of commands:\n\n'), modcmds.has_key)
1220 1220
1221 1221 if name and name != 'shortlist':
1222 1222 i = None
1223 1223 for f in (helpcmd, helptopic, helpext):
1224 1224 try:
1225 1225 f(name)
1226 1226 i = None
1227 1227 break
1228 1228 except cmdutil.UnknownCommand, inst:
1229 1229 i = inst
1230 1230 if i:
1231 1231 raise i
1232 1232
1233 1233 else:
1234 1234 # program name
1235 1235 if ui.verbose or with_version:
1236 1236 version_(ui)
1237 1237 else:
1238 1238 ui.status(_("Mercurial Distributed SCM\n"))
1239 1239 ui.status('\n')
1240 1240
1241 1241 # list of commands
1242 1242 if name == "shortlist":
1243 1243 header = _('basic commands:\n\n')
1244 1244 else:
1245 1245 header = _('list of commands:\n\n')
1246 1246
1247 1247 helplist(header)
1248 1248
1249 1249 # list all option lists
1250 1250 opt_output = []
1251 1251 for title, options in option_lists:
1252 1252 opt_output.append(("\n%s" % title, None))
1253 1253 for shortopt, longopt, default, desc in options:
1254 1254 if "DEPRECATED" in desc and not ui.verbose: continue
1255 1255 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1256 1256 longopt and " --%s" % longopt),
1257 1257 "%s%s" % (desc,
1258 1258 default
1259 1259 and _(" (default: %s)") % default
1260 1260 or "")))
1261 1261
1262 1262 if opt_output:
1263 1263 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1264 1264 for first, second in opt_output:
1265 1265 if second:
1266 1266 ui.write(" %-*s %s\n" % (opts_len, first, second))
1267 1267 else:
1268 1268 ui.write("%s\n" % first)
1269 1269
1270 1270 def identify(ui, repo, source=None,
1271 1271 rev=None, num=None, id=None, branch=None, tags=None):
1272 1272 """identify the working copy or specified revision
1273 1273
1274 1274 With no revision, print a summary of the current state of the repo.
1275 1275
1276 1276 With a path, do a lookup in another repository.
1277 1277
1278 1278 This summary identifies the repository state using one or two parent
1279 1279 hash identifiers, followed by a "+" if there are uncommitted changes
1280 1280 in the working directory, a list of tags for this revision and a branch
1281 1281 name for non-default branches.
1282 1282 """
1283 1283
1284 1284 if not repo and not source:
1285 1285 raise util.Abort(_("There is no Mercurial repository here "
1286 1286 "(.hg not found)"))
1287 1287
1288 1288 hexfunc = ui.debugflag and hex or short
1289 1289 default = not (num or id or branch or tags)
1290 1290 output = []
1291 1291
1292 1292 if source:
1293 1293 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1294 1294 srepo = hg.repository(ui, source)
1295 1295 if not rev and revs:
1296 1296 rev = revs[0]
1297 1297 if not rev:
1298 1298 rev = "tip"
1299 1299 if num or branch or tags:
1300 1300 raise util.Abort(
1301 1301 "can't query remote revision number, branch, or tags")
1302 1302 output = [hexfunc(srepo.lookup(rev))]
1303 1303 elif not rev:
1304 1304 ctx = repo.workingctx()
1305 1305 parents = ctx.parents()
1306 1306 changed = False
1307 1307 if default or id or num:
1308 1308 changed = ctx.files() + ctx.deleted()
1309 1309 if default or id:
1310 1310 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1311 1311 (changed) and "+" or "")]
1312 1312 if num:
1313 1313 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1314 1314 (changed) and "+" or ""))
1315 1315 else:
1316 1316 ctx = repo.changectx(rev)
1317 1317 if default or id:
1318 1318 output = [hexfunc(ctx.node())]
1319 1319 if num:
1320 1320 output.append(str(ctx.rev()))
1321 1321
1322 1322 if not source and default and not ui.quiet:
1323 1323 b = util.tolocal(ctx.branch())
1324 1324 if b != 'default':
1325 1325 output.append("(%s)" % b)
1326 1326
1327 1327 # multiple tags for a single parent separated by '/'
1328 1328 t = "/".join(ctx.tags())
1329 1329 if t:
1330 1330 output.append(t)
1331 1331
1332 1332 if branch:
1333 1333 output.append(util.tolocal(ctx.branch()))
1334 1334
1335 1335 if tags:
1336 1336 output.extend(ctx.tags())
1337 1337
1338 1338 ui.write("%s\n" % ' '.join(output))
1339 1339
1340 1340 def import_(ui, repo, patch1, *patches, **opts):
1341 1341 """import an ordered set of patches
1342 1342
1343 1343 Import a list of patches and commit them individually.
1344 1344
1345 1345 If there are outstanding changes in the working directory, import
1346 1346 will abort unless given the -f flag.
1347 1347
1348 1348 You can import a patch straight from a mail message. Even patches
1349 1349 as attachments work (body part must be type text/plain or
1350 1350 text/x-patch to be used). From and Subject headers of email
1351 1351 message are used as default committer and commit message. All
1352 1352 text/plain body parts before first diff are added to commit
1353 1353 message.
1354 1354
1355 1355 If the imported patch was generated by hg export, user and description
1356 1356 from patch override values from message headers and body. Values
1357 1357 given on command line with -m and -u override these.
1358 1358
1359 1359 If --exact is specified, import will set the working directory
1360 1360 to the parent of each patch before applying it, and will abort
1361 1361 if the resulting changeset has a different ID than the one
1362 1362 recorded in the patch. This may happen due to character set
1363 1363 problems or other deficiencies in the text patch format.
1364 1364
1365 1365 To read a patch from standard input, use patch name "-".
1366 1366 """
1367 1367 patches = (patch1,) + patches
1368 1368
1369 1369 if opts.get('exact') or not opts['force']:
1370 1370 cmdutil.bail_if_changed(repo)
1371 1371
1372 1372 d = opts["base"]
1373 1373 strip = opts["strip"]
1374 1374 wlock = lock = None
1375 1375 try:
1376 1376 wlock = repo.wlock()
1377 1377 lock = repo.lock()
1378 1378 for p in patches:
1379 1379 pf = os.path.join(d, p)
1380 1380
1381 1381 if pf == '-':
1382 1382 ui.status(_("applying patch from stdin\n"))
1383 1383 data = patch.extract(ui, sys.stdin)
1384 1384 else:
1385 1385 ui.status(_("applying %s\n") % p)
1386 1386 if os.path.exists(pf):
1387 1387 data = patch.extract(ui, file(pf, 'rb'))
1388 1388 else:
1389 1389 data = patch.extract(ui, urllib.urlopen(pf))
1390 1390 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1391 1391
1392 1392 if tmpname is None:
1393 1393 raise util.Abort(_('no diffs found'))
1394 1394
1395 1395 try:
1396 1396 cmdline_message = cmdutil.logmessage(opts)
1397 1397 if cmdline_message:
1398 1398 # pickup the cmdline msg
1399 1399 message = cmdline_message
1400 1400 elif message:
1401 1401 # pickup the patch msg
1402 1402 message = message.strip()
1403 1403 else:
1404 1404 # launch the editor
1405 1405 message = None
1406 1406 ui.debug(_('message:\n%s\n') % message)
1407 1407
1408 1408 wp = repo.workingctx().parents()
1409 1409 if opts.get('exact'):
1410 1410 if not nodeid or not p1:
1411 1411 raise util.Abort(_('not a mercurial patch'))
1412 1412 p1 = repo.lookup(p1)
1413 1413 p2 = repo.lookup(p2 or hex(nullid))
1414 1414
1415 1415 if p1 != wp[0].node():
1416 1416 hg.clean(repo, p1)
1417 1417 repo.dirstate.setparents(p1, p2)
1418 1418 elif p2:
1419 1419 try:
1420 1420 p1 = repo.lookup(p1)
1421 1421 p2 = repo.lookup(p2)
1422 1422 if p1 == wp[0].node():
1423 1423 repo.dirstate.setparents(p1, p2)
1424 1424 except hg.RepoError:
1425 1425 pass
1426 1426 if opts.get('exact') or opts.get('import_branch'):
1427 1427 repo.dirstate.setbranch(branch or 'default')
1428 1428
1429 1429 files = {}
1430 1430 try:
1431 1431 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1432 1432 files=files)
1433 1433 finally:
1434 1434 files = patch.updatedir(ui, repo, files)
1435 1435 n = repo.commit(files, message, user, date)
1436 1436 if opts.get('exact'):
1437 1437 if hex(n) != nodeid:
1438 1438 repo.rollback()
1439 1439 raise util.Abort(_('patch is damaged'
1440 1440 ' or loses information'))
1441 1441 finally:
1442 1442 os.unlink(tmpname)
1443 1443 finally:
1444 1444 del lock, wlock
1445 1445
1446 1446 def incoming(ui, repo, source="default", **opts):
1447 1447 """show new changesets found in source
1448 1448
1449 1449 Show new changesets found in the specified path/URL or the default
1450 1450 pull location. These are the changesets that would be pulled if a pull
1451 1451 was requested.
1452 1452
1453 1453 For remote repository, using --bundle avoids downloading the changesets
1454 1454 twice if the incoming is followed by a pull.
1455 1455
1456 1456 See pull for valid source format details.
1457 1457 """
1458 1458 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1459 1459 cmdutil.setremoteconfig(ui, opts)
1460 1460
1461 1461 other = hg.repository(ui, source)
1462 1462 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1463 1463 if revs:
1464 1464 revs = [other.lookup(rev) for rev in revs]
1465 1465 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1466 1466 if not incoming:
1467 1467 try:
1468 1468 os.unlink(opts["bundle"])
1469 1469 except:
1470 1470 pass
1471 1471 ui.status(_("no changes found\n"))
1472 1472 return 1
1473 1473
1474 1474 cleanup = None
1475 1475 try:
1476 1476 fname = opts["bundle"]
1477 1477 if fname or not other.local():
1478 1478 # create a bundle (uncompressed if other repo is not local)
1479 1479 if revs is None:
1480 1480 cg = other.changegroup(incoming, "incoming")
1481 1481 else:
1482 1482 cg = other.changegroupsubset(incoming, revs, 'incoming')
1483 1483 bundletype = other.local() and "HG10BZ" or "HG10UN"
1484 1484 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1485 1485 # keep written bundle?
1486 1486 if opts["bundle"]:
1487 1487 cleanup = None
1488 1488 if not other.local():
1489 1489 # use the created uncompressed bundlerepo
1490 1490 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1491 1491
1492 1492 o = other.changelog.nodesbetween(incoming, revs)[0]
1493 1493 if opts['newest_first']:
1494 1494 o.reverse()
1495 1495 displayer = cmdutil.show_changeset(ui, other, opts)
1496 1496 for n in o:
1497 1497 parents = [p for p in other.changelog.parents(n) if p != nullid]
1498 1498 if opts['no_merges'] and len(parents) == 2:
1499 1499 continue
1500 1500 displayer.show(changenode=n)
1501 1501 finally:
1502 1502 if hasattr(other, 'close'):
1503 1503 other.close()
1504 1504 if cleanup:
1505 1505 os.unlink(cleanup)
1506 1506
1507 1507 def init(ui, dest=".", **opts):
1508 1508 """create a new repository in the given directory
1509 1509
1510 1510 Initialize a new repository in the given directory. If the given
1511 1511 directory does not exist, it is created.
1512 1512
1513 1513 If no directory is given, the current directory is used.
1514 1514
1515 1515 It is possible to specify an ssh:// URL as the destination.
1516 1516 Look at the help text for the pull command for important details
1517 1517 about ssh:// URLs.
1518 1518 """
1519 1519 cmdutil.setremoteconfig(ui, opts)
1520 1520 hg.repository(ui, dest, create=1)
1521 1521
1522 1522 def locate(ui, repo, *pats, **opts):
1523 1523 """locate files matching specific patterns
1524 1524
1525 1525 Print all files under Mercurial control whose names match the
1526 1526 given patterns.
1527 1527
1528 1528 This command searches the entire repository by default. To search
1529 1529 just the current directory and its subdirectories, use
1530 1530 "--include .".
1531 1531
1532 1532 If no patterns are given to match, this command prints all file
1533 1533 names.
1534 1534
1535 1535 If you want to feed the output of this command into the "xargs"
1536 1536 command, use the "-0" option to both this command and "xargs".
1537 1537 This will avoid the problem of "xargs" treating single filenames
1538 1538 that contain white space as multiple filenames.
1539 1539 """
1540 1540 end = opts['print0'] and '\0' or '\n'
1541 1541 rev = opts['rev']
1542 1542 if rev:
1543 1543 node = repo.lookup(rev)
1544 1544 else:
1545 1545 node = None
1546 1546
1547 1547 ret = 1
1548 1548 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1549 1549 badmatch=util.always,
1550 1550 default='relglob'):
1551 1551 if src == 'b':
1552 1552 continue
1553 1553 if not node and abs not in repo.dirstate:
1554 1554 continue
1555 1555 if opts['fullpath']:
1556 1556 ui.write(os.path.join(repo.root, abs), end)
1557 1557 else:
1558 1558 ui.write(((pats and rel) or abs), end)
1559 1559 ret = 0
1560 1560
1561 1561 return ret
1562 1562
1563 1563 def log(ui, repo, *pats, **opts):
1564 1564 """show revision history of entire repository or files
1565 1565
1566 1566 Print the revision history of the specified files or the entire
1567 1567 project.
1568 1568
1569 1569 File history is shown without following rename or copy history of
1570 1570 files. Use -f/--follow with a file name to follow history across
1571 1571 renames and copies. --follow without a file name will only show
1572 1572 ancestors or descendants of the starting revision. --follow-first
1573 1573 only follows the first parent of merge revisions.
1574 1574
1575 1575 If no revision range is specified, the default is tip:0 unless
1576 1576 --follow is set, in which case the working directory parent is
1577 1577 used as the starting revision.
1578 1578
1579 1579 By default this command outputs: changeset id and hash, tags,
1580 1580 non-trivial parents, user, date and time, and a summary for each
1581 1581 commit. When the -v/--verbose switch is used, the list of changed
1582 1582 files and full commit message is shown.
1583 1583
1584 1584 NOTE: log -p may generate unexpected diff output for merge
1585 1585 changesets, as it will compare the merge changeset against its
1586 1586 first parent only. Also, the files: list will only reflect files
1587 1587 that are different from BOTH parents.
1588 1588
1589 1589 """
1590 1590
1591 1591 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1592 1592 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1593 1593
1594 1594 if opts['limit']:
1595 1595 try:
1596 1596 limit = int(opts['limit'])
1597 1597 except ValueError:
1598 1598 raise util.Abort(_('limit must be a positive integer'))
1599 1599 if limit <= 0: raise util.Abort(_('limit must be positive'))
1600 1600 else:
1601 1601 limit = sys.maxint
1602 1602 count = 0
1603 1603
1604 1604 if opts['copies'] and opts['rev']:
1605 1605 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1606 1606 else:
1607 1607 endrev = repo.changelog.count()
1608 1608 rcache = {}
1609 1609 ncache = {}
1610 1610 dcache = []
1611 1611 def getrenamed(fn, rev, man):
1612 1612 '''looks up all renames for a file (up to endrev) the first
1613 1613 time the file is given. It indexes on the changerev and only
1614 1614 parses the manifest if linkrev != changerev.
1615 1615 Returns rename info for fn at changerev rev.'''
1616 1616 if fn not in rcache:
1617 1617 rcache[fn] = {}
1618 1618 ncache[fn] = {}
1619 1619 fl = repo.file(fn)
1620 1620 for i in xrange(fl.count()):
1621 1621 node = fl.node(i)
1622 1622 lr = fl.linkrev(node)
1623 1623 renamed = fl.renamed(node)
1624 1624 rcache[fn][lr] = renamed
1625 1625 if renamed:
1626 1626 ncache[fn][node] = renamed
1627 1627 if lr >= endrev:
1628 1628 break
1629 1629 if rev in rcache[fn]:
1630 1630 return rcache[fn][rev]
1631 1631 mr = repo.manifest.rev(man)
1632 1632 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1633 1633 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1634 1634 if not dcache or dcache[0] != man:
1635 1635 dcache[:] = [man, repo.manifest.readdelta(man)]
1636 1636 if fn in dcache[1]:
1637 1637 return ncache[fn].get(dcache[1][fn])
1638 1638 return None
1639 1639
1640 1640 df = False
1641 1641 if opts["date"]:
1642 1642 df = util.matchdate(opts["date"])
1643 1643
1644 1644 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1645 1645 for st, rev, fns in changeiter:
1646 1646 if st == 'add':
1647 1647 changenode = repo.changelog.node(rev)
1648 1648 parents = [p for p in repo.changelog.parentrevs(rev)
1649 1649 if p != nullrev]
1650 1650 if opts['no_merges'] and len(parents) == 2:
1651 1651 continue
1652 1652 if opts['only_merges'] and len(parents) != 2:
1653 1653 continue
1654 1654
1655 1655 if df:
1656 1656 changes = get(rev)
1657 1657 if not df(changes[2][0]):
1658 1658 continue
1659 1659
1660 1660 if opts['keyword']:
1661 1661 changes = get(rev)
1662 1662 miss = 0
1663 1663 for k in [kw.lower() for kw in opts['keyword']]:
1664 1664 if not (k in changes[1].lower() or
1665 1665 k in changes[4].lower() or
1666 1666 k in " ".join(changes[3]).lower()):
1667 1667 miss = 1
1668 1668 break
1669 1669 if miss:
1670 1670 continue
1671 1671
1672 1672 copies = []
1673 1673 if opts.get('copies') and rev:
1674 1674 mf = get(rev)[0]
1675 1675 for fn in get(rev)[3]:
1676 1676 rename = getrenamed(fn, rev, mf)
1677 1677 if rename:
1678 1678 copies.append((fn, rename[0]))
1679 1679 displayer.show(rev, changenode, copies=copies)
1680 1680 elif st == 'iter':
1681 1681 if count == limit: break
1682 1682 if displayer.flush(rev):
1683 1683 count += 1
1684 1684
1685 1685 def manifest(ui, repo, node=None, rev=None):
1686 1686 """output the current or given revision of the project manifest
1687 1687
1688 1688 Print a list of version controlled files for the given revision.
1689 1689 If no revision is given, the parent of the working directory is used,
1690 1690 or tip if no revision is checked out.
1691 1691
1692 1692 The manifest is the list of files being version controlled. If no revision
1693 1693 is given then the first parent of the working directory is used.
1694 1694
1695 1695 With -v flag, print file permissions, symlink and executable bits. With
1696 1696 --debug flag, print file revision hashes.
1697 1697 """
1698 1698
1699 1699 if rev and node:
1700 1700 raise util.Abort(_("please specify just one revision"))
1701 1701
1702 1702 if not node:
1703 1703 node = rev
1704 1704
1705 1705 m = repo.changectx(node).manifest()
1706 1706 files = m.keys()
1707 1707 files.sort()
1708 1708
1709 1709 for f in files:
1710 1710 if ui.debugflag:
1711 1711 ui.write("%40s " % hex(m[f]))
1712 1712 if ui.verbose:
1713 1713 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1714 1714 perm = m.execf(f) and "755" or "644"
1715 1715 ui.write("%3s %1s " % (perm, type))
1716 1716 ui.write("%s\n" % f)
1717 1717
1718 1718 def merge(ui, repo, node=None, force=None, rev=None):
1719 1719 """merge working directory with another revision
1720 1720
1721 1721 Merge the contents of the current working directory and the
1722 1722 requested revision. Files that changed between either parent are
1723 1723 marked as changed for the next commit and a commit must be
1724 1724 performed before any further updates are allowed.
1725 1725
1726 1726 If no revision is specified, the working directory's parent is a
1727 1727 head revision, and the repository contains exactly one other head,
1728 1728 the other head is merged with by default. Otherwise, an explicit
1729 1729 revision to merge with must be provided.
1730 1730 """
1731 1731
1732 1732 if rev and node:
1733 1733 raise util.Abort(_("please specify just one revision"))
1734 1734 if not node:
1735 1735 node = rev
1736 1736
1737 1737 if not node:
1738 1738 heads = repo.heads()
1739 1739 if len(heads) > 2:
1740 1740 raise util.Abort(_('repo has %d heads - '
1741 1741 'please merge with an explicit rev') %
1742 1742 len(heads))
1743 1743 parent = repo.dirstate.parents()[0]
1744 1744 if len(heads) == 1:
1745 1745 msg = _('there is nothing to merge')
1746 1746 if parent != repo.lookup(repo.workingctx().branch()):
1747 1747 msg = _('%s - use "hg update" instead') % msg
1748 1748 raise util.Abort(msg)
1749 1749
1750 1750 if parent not in heads:
1751 1751 raise util.Abort(_('working dir not at a head rev - '
1752 1752 'use "hg update" or merge with an explicit rev'))
1753 1753 node = parent == heads[0] and heads[-1] or heads[0]
1754 1754 return hg.merge(repo, node, force=force)
1755 1755
1756 1756 def outgoing(ui, repo, dest=None, **opts):
1757 1757 """show changesets not found in destination
1758 1758
1759 1759 Show changesets not found in the specified destination repository or
1760 1760 the default push location. These are the changesets that would be pushed
1761 1761 if a push was requested.
1762 1762
1763 1763 See pull for valid destination format details.
1764 1764 """
1765 1765 dest, revs, checkout = hg.parseurl(
1766 1766 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1767 1767 cmdutil.setremoteconfig(ui, opts)
1768 1768 if revs:
1769 1769 revs = [repo.lookup(rev) for rev in revs]
1770 1770
1771 1771 other = hg.repository(ui, dest)
1772 1772 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1773 1773 o = repo.findoutgoing(other, force=opts['force'])
1774 1774 if not o:
1775 1775 ui.status(_("no changes found\n"))
1776 1776 return 1
1777 1777 o = repo.changelog.nodesbetween(o, revs)[0]
1778 1778 if opts['newest_first']:
1779 1779 o.reverse()
1780 1780 displayer = cmdutil.show_changeset(ui, repo, opts)
1781 1781 for n in o:
1782 1782 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1783 1783 if opts['no_merges'] and len(parents) == 2:
1784 1784 continue
1785 1785 displayer.show(changenode=n)
1786 1786
1787 1787 def parents(ui, repo, file_=None, **opts):
1788 1788 """show the parents of the working dir or revision
1789 1789
1790 1790 Print the working directory's parent revisions. If a
1791 1791 revision is given via --rev, the parent of that revision
1792 1792 will be printed. If a file argument is given, revision in
1793 1793 which the file was last changed (before the working directory
1794 1794 revision or the argument to --rev if given) is printed.
1795 1795 """
1796 1796 rev = opts.get('rev')
1797 1797 if rev:
1798 1798 ctx = repo.changectx(rev)
1799 1799 else:
1800 1800 ctx = repo.workingctx()
1801 1801
1802 1802 if file_:
1803 1803 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1804 1804 if anypats or len(files) != 1:
1805 1805 raise util.Abort(_('can only specify an explicit file name'))
1806 1806 file_ = files[0]
1807 1807 filenodes = []
1808 1808 for cp in ctx.parents():
1809 1809 if not cp:
1810 1810 continue
1811 1811 try:
1812 1812 filenodes.append(cp.filenode(file_))
1813 1813 except revlog.LookupError:
1814 1814 pass
1815 1815 if not filenodes:
1816 1816 raise util.Abort(_("'%s' not found in manifest!") % file_)
1817 1817 fl = repo.file(file_)
1818 1818 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1819 1819 else:
1820 1820 p = [cp.node() for cp in ctx.parents()]
1821 1821
1822 1822 displayer = cmdutil.show_changeset(ui, repo, opts)
1823 1823 for n in p:
1824 1824 if n != nullid:
1825 1825 displayer.show(changenode=n)
1826 1826
1827 1827 def paths(ui, repo, search=None):
1828 1828 """show definition of symbolic path names
1829 1829
1830 1830 Show definition of symbolic path name NAME. If no name is given, show
1831 1831 definition of available names.
1832 1832
1833 1833 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1834 1834 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1835 1835 """
1836 1836 if search:
1837 1837 for name, path in ui.configitems("paths"):
1838 1838 if name == search:
1839 1839 ui.write("%s\n" % path)
1840 1840 return
1841 1841 ui.warn(_("not found!\n"))
1842 1842 return 1
1843 1843 else:
1844 1844 for name, path in ui.configitems("paths"):
1845 1845 ui.write("%s = %s\n" % (name, path))
1846 1846
1847 1847 def postincoming(ui, repo, modheads, optupdate, checkout):
1848 1848 if modheads == 0:
1849 1849 return
1850 1850 if optupdate:
1851 1851 if modheads <= 1 or checkout:
1852 1852 return hg.update(repo, checkout)
1853 1853 else:
1854 1854 ui.status(_("not updating, since new heads added\n"))
1855 1855 if modheads > 1:
1856 1856 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1857 1857 else:
1858 1858 ui.status(_("(run 'hg update' to get a working copy)\n"))
1859 1859
1860 1860 def pull(ui, repo, source="default", **opts):
1861 1861 """pull changes from the specified source
1862 1862
1863 1863 Pull changes from a remote repository to a local one.
1864 1864
1865 1865 This finds all changes from the repository at the specified path
1866 1866 or URL and adds them to the local repository. By default, this
1867 1867 does not update the copy of the project in the working directory.
1868 1868
1869 1869 Valid URLs are of the form:
1870 1870
1871 1871 local/filesystem/path (or file://local/filesystem/path)
1872 1872 http://[user@]host[:port]/[path]
1873 1873 https://[user@]host[:port]/[path]
1874 1874 ssh://[user@]host[:port]/[path]
1875 1875 static-http://host[:port]/[path]
1876 1876
1877 1877 Paths in the local filesystem can either point to Mercurial
1878 1878 repositories or to bundle files (as created by 'hg bundle' or
1879 1879 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1880 1880 allows access to a Mercurial repository where you simply use a web
1881 1881 server to publish the .hg directory as static content.
1882 1882
1883 1883 An optional identifier after # indicates a particular branch, tag,
1884 1884 or changeset to pull.
1885 1885
1886 1886 Some notes about using SSH with Mercurial:
1887 1887 - SSH requires an accessible shell account on the destination machine
1888 1888 and a copy of hg in the remote path or specified with as remotecmd.
1889 1889 - path is relative to the remote user's home directory by default.
1890 1890 Use an extra slash at the start of a path to specify an absolute path:
1891 1891 ssh://example.com//tmp/repository
1892 1892 - Mercurial doesn't use its own compression via SSH; the right thing
1893 1893 to do is to configure it in your ~/.ssh/config, e.g.:
1894 1894 Host *.mylocalnetwork.example.com
1895 1895 Compression no
1896 1896 Host *
1897 1897 Compression yes
1898 1898 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1899 1899 with the --ssh command line option.
1900 1900 """
1901 1901 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1902 1902 cmdutil.setremoteconfig(ui, opts)
1903 1903
1904 1904 other = hg.repository(ui, source)
1905 1905 ui.status(_('pulling from %s\n') % util.hidepassword(source))
1906 1906 if revs:
1907 1907 try:
1908 1908 revs = [other.lookup(rev) for rev in revs]
1909 1909 except repo.NoCapability:
1910 1910 error = _("Other repository doesn't support revision lookup, "
1911 1911 "so a rev cannot be specified.")
1912 1912 raise util.Abort(error)
1913 1913
1914 1914 modheads = repo.pull(other, heads=revs, force=opts['force'])
1915 1915 return postincoming(ui, repo, modheads, opts['update'], checkout)
1916 1916
1917 1917 def push(ui, repo, dest=None, **opts):
1918 1918 """push changes to the specified destination
1919 1919
1920 1920 Push changes from the local repository to the given destination.
1921 1921
1922 1922 This is the symmetrical operation for pull. It helps to move
1923 1923 changes from the current repository to a different one. If the
1924 1924 destination is local this is identical to a pull in that directory
1925 1925 from the current one.
1926 1926
1927 1927 By default, push will refuse to run if it detects the result would
1928 1928 increase the number of remote heads. This generally indicates the
1929 1929 the client has forgotten to sync and merge before pushing.
1930 1930
1931 1931 Valid URLs are of the form:
1932 1932
1933 1933 local/filesystem/path (or file://local/filesystem/path)
1934 1934 ssh://[user@]host[:port]/[path]
1935 1935 http://[user@]host[:port]/[path]
1936 1936 https://[user@]host[:port]/[path]
1937 1937
1938 1938 An optional identifier after # indicates a particular branch, tag,
1939 1939 or changeset to push.
1940 1940
1941 1941 Look at the help text for the pull command for important details
1942 1942 about ssh:// URLs.
1943 1943
1944 1944 Pushing to http:// and https:// URLs is only possible, if this
1945 1945 feature is explicitly enabled on the remote Mercurial server.
1946 1946 """
1947 1947 dest, revs, checkout = hg.parseurl(
1948 1948 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1949 1949 cmdutil.setremoteconfig(ui, opts)
1950 1950
1951 1951 other = hg.repository(ui, dest)
1952 1952 ui.status('pushing to %s\n' % util.hidepassword(dest))
1953 1953 if revs:
1954 1954 revs = [repo.lookup(rev) for rev in revs]
1955 1955 r = repo.push(other, opts['force'], revs=revs)
1956 1956 return r == 0
1957 1957
1958 1958 def rawcommit(ui, repo, *pats, **opts):
1959 1959 """raw commit interface (DEPRECATED)
1960 1960
1961 1961 (DEPRECATED)
1962 1962 Lowlevel commit, for use in helper scripts.
1963 1963
1964 1964 This command is not intended to be used by normal users, as it is
1965 1965 primarily useful for importing from other SCMs.
1966 1966
1967 1967 This command is now deprecated and will be removed in a future
1968 1968 release, please use debugsetparents and commit instead.
1969 1969 """
1970 1970
1971 1971 ui.warn(_("(the rawcommit command is deprecated)\n"))
1972 1972
1973 1973 message = cmdutil.logmessage(opts)
1974 1974
1975 1975 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1976 1976 if opts['files']:
1977 1977 files += open(opts['files']).read().splitlines()
1978 1978
1979 1979 parents = [repo.lookup(p) for p in opts['parent']]
1980 1980
1981 1981 try:
1982 1982 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1983 1983 except ValueError, inst:
1984 1984 raise util.Abort(str(inst))
1985 1985
1986 1986 def recover(ui, repo):
1987 1987 """roll back an interrupted transaction
1988 1988
1989 1989 Recover from an interrupted commit or pull.
1990 1990
1991 1991 This command tries to fix the repository status after an interrupted
1992 1992 operation. It should only be necessary when Mercurial suggests it.
1993 1993 """
1994 1994 if repo.recover():
1995 1995 return hg.verify(repo)
1996 1996 return 1
1997 1997
1998 1998 def remove(ui, repo, *pats, **opts):
1999 1999 """remove the specified files on the next commit
2000 2000
2001 2001 Schedule the indicated files for removal from the repository.
2002 2002
2003 2003 This only removes files from the current branch, not from the
2004 2004 entire project history. If the files still exist in the working
2005 2005 directory, they will be deleted from it. If invoked with --after,
2006 2006 files are marked as removed, but not actually unlinked unless --force
2007 2007 is also given. Without exact file names, --after will only mark
2008 2008 files as removed if they are no longer in the working directory.
2009 2009
2010 2010 This command schedules the files to be removed at the next commit.
2011 2011 To undo a remove before that, see hg revert.
2012 2012
2013 2013 Modified files and added files are not removed by default. To
2014 2014 remove them, use the -f/--force option.
2015 2015 """
2016 2016 if not opts['after'] and not pats:
2017 2017 raise util.Abort(_('no files specified'))
2018 2018 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2019 2019 exact = dict.fromkeys(files)
2020 2020 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2021 2021 modified, added, removed, deleted, unknown = mardu
2022 2022 remove, forget = [], []
2023 2023 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2024 2024 reason = None
2025 2025 if abs in modified and not opts['force']:
2026 2026 reason = _('is modified (use -f to force removal)')
2027 2027 elif abs in added:
2028 2028 if opts['force']:
2029 2029 forget.append(abs)
2030 2030 continue
2031 2031 reason = _('has been marked for add (use -f to force removal)')
2032 exact = 1 # force the message
2032 2033 elif abs not in repo.dirstate:
2033 2034 reason = _('is not managed')
2034 2035 elif opts['after'] and not exact and abs not in deleted:
2035 2036 continue
2036 2037 elif abs in removed:
2037 2038 continue
2038 2039 if reason:
2039 2040 if exact:
2040 2041 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2041 2042 else:
2042 2043 if ui.verbose or not exact:
2043 2044 ui.status(_('removing %s\n') % rel)
2044 2045 remove.append(abs)
2045 2046 repo.forget(forget)
2046 2047 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2047 2048
2048 2049 def rename(ui, repo, *pats, **opts):
2049 2050 """rename files; equivalent of copy + remove
2050 2051
2051 2052 Mark dest as copies of sources; mark sources for deletion. If
2052 2053 dest is a directory, copies are put in that directory. If dest is
2053 2054 a file, there can only be one source.
2054 2055
2055 2056 By default, this command copies the contents of files as they
2056 2057 stand in the working directory. If invoked with --after, the
2057 2058 operation is recorded, but no copying is performed.
2058 2059
2059 2060 This command takes effect in the next commit. To undo a rename
2060 2061 before that, see hg revert.
2061 2062 """
2062 2063 wlock = repo.wlock(False)
2063 2064 try:
2064 2065 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2065 2066 finally:
2066 2067 del wlock
2067 2068
2068 2069 def revert(ui, repo, *pats, **opts):
2069 2070 """restore individual files or dirs to an earlier state
2070 2071
2071 2072 (use update -r to check out earlier revisions, revert does not
2072 2073 change the working dir parents)
2073 2074
2074 2075 With no revision specified, revert the named files or directories
2075 2076 to the contents they had in the parent of the working directory.
2076 2077 This restores the contents of the affected files to an unmodified
2077 2078 state and unschedules adds, removes, copies, and renames. If the
2078 2079 working directory has two parents, you must explicitly specify the
2079 2080 revision to revert to.
2080 2081
2081 2082 Using the -r option, revert the given files or directories to their
2082 2083 contents as of a specific revision. This can be helpful to "roll
2083 2084 back" some or all of an earlier change.
2084 2085
2085 2086 Revert modifies the working directory. It does not commit any
2086 2087 changes, or change the parent of the working directory. If you
2087 2088 revert to a revision other than the parent of the working
2088 2089 directory, the reverted files will thus appear modified
2089 2090 afterwards.
2090 2091
2091 2092 If a file has been deleted, it is restored. If the executable
2092 2093 mode of a file was changed, it is reset.
2093 2094
2094 2095 If names are given, all files matching the names are reverted.
2095 2096
2096 2097 If no arguments are given, no files are reverted.
2097 2098
2098 2099 Modified files are saved with a .orig suffix before reverting.
2099 2100 To disable these backups, use --no-backup.
2100 2101 """
2101 2102
2102 2103 if opts["date"]:
2103 2104 if opts["rev"]:
2104 2105 raise util.Abort(_("you can't specify a revision and a date"))
2105 2106 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2106 2107
2107 2108 if not pats and not opts['all']:
2108 2109 raise util.Abort(_('no files or directories specified; '
2109 2110 'use --all to revert the whole repo'))
2110 2111
2111 2112 parent, p2 = repo.dirstate.parents()
2112 2113 if not opts['rev'] and p2 != nullid:
2113 2114 raise util.Abort(_('uncommitted merge - please provide a '
2114 2115 'specific revision'))
2115 2116 ctx = repo.changectx(opts['rev'])
2116 2117 node = ctx.node()
2117 2118 mf = ctx.manifest()
2118 2119 if node == parent:
2119 2120 pmf = mf
2120 2121 else:
2121 2122 pmf = None
2122 2123
2123 2124 # need all matching names in dirstate and manifest of target rev,
2124 2125 # so have to walk both. do not print errors if files exist in one
2125 2126 # but not other.
2126 2127
2127 2128 names = {}
2128 2129 target_only = {}
2129 2130
2130 2131 wlock = repo.wlock()
2131 2132 try:
2132 2133 # walk dirstate.
2133 2134 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2134 2135 badmatch=mf.has_key):
2135 2136 names[abs] = (rel, exact)
2136 2137 if src == 'b':
2137 2138 target_only[abs] = True
2138 2139
2139 2140 # walk target manifest.
2140 2141
2141 2142 def badmatch(path):
2142 2143 if path in names:
2143 2144 return True
2144 2145 path_ = path + '/'
2145 2146 for f in names:
2146 2147 if f.startswith(path_):
2147 2148 return True
2148 2149 return False
2149 2150
2150 2151 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2151 2152 badmatch=badmatch):
2152 2153 if abs in names or src == 'b':
2153 2154 continue
2154 2155 names[abs] = (rel, exact)
2155 2156 target_only[abs] = True
2156 2157
2157 2158 changes = repo.status(match=names.has_key)[:5]
2158 2159 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2159 2160
2160 2161 # if f is a rename, also revert the source
2161 2162 cwd = repo.getcwd()
2162 2163 for f in added:
2163 2164 src = repo.dirstate.copied(f)
2164 2165 if src and src not in names and repo.dirstate[src] == 'r':
2165 2166 removed[src] = None
2166 2167 names[src] = (repo.pathto(src, cwd), True)
2167 2168
2168 2169 revert = ([], _('reverting %s\n'))
2169 2170 add = ([], _('adding %s\n'))
2170 2171 remove = ([], _('removing %s\n'))
2171 2172 forget = ([], _('forgetting %s\n'))
2172 2173 undelete = ([], _('undeleting %s\n'))
2173 2174 update = {}
2174 2175
2175 2176 disptable = (
2176 2177 # dispatch table:
2177 2178 # file state
2178 2179 # action if in target manifest
2179 2180 # action if not in target manifest
2180 2181 # make backup if in target manifest
2181 2182 # make backup if not in target manifest
2182 2183 (modified, revert, remove, True, True),
2183 2184 (added, revert, forget, True, False),
2184 2185 (removed, undelete, None, False, False),
2185 2186 (deleted, revert, remove, False, False),
2186 2187 (unknown, add, None, True, False),
2187 2188 (target_only, add, None, False, False),
2188 2189 )
2189 2190
2190 2191 entries = names.items()
2191 2192 entries.sort()
2192 2193
2193 2194 for abs, (rel, exact) in entries:
2194 2195 mfentry = mf.get(abs)
2195 2196 target = repo.wjoin(abs)
2196 2197 def handle(xlist, dobackup):
2197 2198 xlist[0].append(abs)
2198 2199 update[abs] = 1
2199 2200 if dobackup and not opts['no_backup'] and util.lexists(target):
2200 2201 bakname = "%s.orig" % rel
2201 2202 ui.note(_('saving current version of %s as %s\n') %
2202 2203 (rel, bakname))
2203 2204 if not opts.get('dry_run'):
2204 2205 util.copyfile(target, bakname)
2205 2206 if ui.verbose or not exact:
2206 2207 ui.status(xlist[1] % rel)
2207 2208 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2208 2209 if abs not in table: continue
2209 2210 # file has changed in dirstate
2210 2211 if mfentry:
2211 2212 handle(hitlist, backuphit)
2212 2213 elif misslist is not None:
2213 2214 handle(misslist, backupmiss)
2214 2215 else:
2215 2216 if exact: ui.warn(_('file not managed: %s\n') % rel)
2216 2217 break
2217 2218 else:
2218 2219 # file has not changed in dirstate
2219 2220 if node == parent:
2220 2221 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2221 2222 continue
2222 2223 if pmf is None:
2223 2224 # only need parent manifest in this unlikely case,
2224 2225 # so do not read by default
2225 2226 pmf = repo.changectx(parent).manifest()
2226 2227 if abs in pmf:
2227 2228 if mfentry:
2228 2229 # if version of file is same in parent and target
2229 2230 # manifests, do nothing
2230 2231 if pmf[abs] != mfentry:
2231 2232 handle(revert, False)
2232 2233 else:
2233 2234 handle(remove, False)
2234 2235
2235 2236 if not opts.get('dry_run'):
2236 2237 for f in forget[0]:
2237 2238 repo.dirstate.forget(f)
2238 2239 r = hg.revert(repo, node, update.has_key)
2239 2240 for f in add[0]:
2240 2241 repo.dirstate.add(f)
2241 2242 for f in undelete[0]:
2242 2243 repo.dirstate.normal(f)
2243 2244 for f in remove[0]:
2244 2245 repo.dirstate.remove(f)
2245 2246 return r
2246 2247 finally:
2247 2248 del wlock
2248 2249
2249 2250 def rollback(ui, repo):
2250 2251 """roll back the last transaction
2251 2252
2252 2253 This command should be used with care. There is only one level of
2253 2254 rollback, and there is no way to undo a rollback. It will also
2254 2255 restore the dirstate at the time of the last transaction, losing
2255 2256 any dirstate changes since that time.
2256 2257
2257 2258 Transactions are used to encapsulate the effects of all commands
2258 2259 that create new changesets or propagate existing changesets into a
2259 2260 repository. For example, the following commands are transactional,
2260 2261 and their effects can be rolled back:
2261 2262
2262 2263 commit
2263 2264 import
2264 2265 pull
2265 2266 push (with this repository as destination)
2266 2267 unbundle
2267 2268
2268 2269 This command is not intended for use on public repositories. Once
2269 2270 changes are visible for pull by other users, rolling a transaction
2270 2271 back locally is ineffective (someone else may already have pulled
2271 2272 the changes). Furthermore, a race is possible with readers of the
2272 2273 repository; for example an in-progress pull from the repository
2273 2274 may fail if a rollback is performed.
2274 2275 """
2275 2276 repo.rollback()
2276 2277
2277 2278 def root(ui, repo):
2278 2279 """print the root (top) of the current working dir
2279 2280
2280 2281 Print the root directory of the current repository.
2281 2282 """
2282 2283 ui.write(repo.root + "\n")
2283 2284
2284 2285 def serve(ui, repo, **opts):
2285 2286 """export the repository via HTTP
2286 2287
2287 2288 Start a local HTTP repository browser and pull server.
2288 2289
2289 2290 By default, the server logs accesses to stdout and errors to
2290 2291 stderr. Use the "-A" and "-E" options to log to files.
2291 2292 """
2292 2293
2293 2294 if opts["stdio"]:
2294 2295 if repo is None:
2295 2296 raise hg.RepoError(_("There is no Mercurial repository here"
2296 2297 " (.hg not found)"))
2297 2298 s = sshserver.sshserver(ui, repo)
2298 2299 s.serve_forever()
2299 2300
2300 2301 parentui = ui.parentui or ui
2301 2302 optlist = ("name templates style address port ipv6"
2302 2303 " accesslog errorlog webdir_conf certificate")
2303 2304 for o in optlist.split():
2304 2305 if opts[o]:
2305 2306 parentui.setconfig("web", o, str(opts[o]))
2306 2307 if (repo is not None) and (repo.ui != parentui):
2307 2308 repo.ui.setconfig("web", o, str(opts[o]))
2308 2309
2309 2310 if repo is None and not ui.config("web", "webdir_conf"):
2310 2311 raise hg.RepoError(_("There is no Mercurial repository here"
2311 2312 " (.hg not found)"))
2312 2313
2313 2314 class service:
2314 2315 def init(self):
2315 2316 util.set_signal_handler()
2316 2317 try:
2317 2318 self.httpd = hgweb.server.create_server(parentui, repo)
2318 2319 except socket.error, inst:
2319 2320 raise util.Abort(_('cannot start server: ') + inst.args[1])
2320 2321
2321 2322 if not ui.verbose: return
2322 2323
2323 2324 if self.httpd.port != 80:
2324 2325 ui.status(_('listening at http://%s:%d/\n') %
2325 2326 (self.httpd.addr, self.httpd.port))
2326 2327 else:
2327 2328 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2328 2329
2329 2330 def run(self):
2330 2331 self.httpd.serve_forever()
2331 2332
2332 2333 service = service()
2333 2334
2334 2335 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2335 2336
2336 2337 def status(ui, repo, *pats, **opts):
2337 2338 """show changed files in the working directory
2338 2339
2339 2340 Show status of files in the repository. If names are given, only
2340 2341 files that match are shown. Files that are clean or ignored, are
2341 2342 not listed unless -c (clean), -i (ignored) or -A is given.
2342 2343
2343 2344 NOTE: status may appear to disagree with diff if permissions have
2344 2345 changed or a merge has occurred. The standard diff format does not
2345 2346 report permission changes and diff only reports changes relative
2346 2347 to one merge parent.
2347 2348
2348 2349 If one revision is given, it is used as the base revision.
2349 2350 If two revisions are given, the difference between them is shown.
2350 2351
2351 2352 The codes used to show the status of files are:
2352 2353 M = modified
2353 2354 A = added
2354 2355 R = removed
2355 2356 C = clean
2356 2357 ! = deleted, but still tracked
2357 2358 ? = not tracked
2358 2359 I = ignored (not shown by default)
2359 2360 = the previous added file was copied from here
2360 2361 """
2361 2362
2362 2363 all = opts['all']
2363 2364 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2364 2365
2365 2366 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2366 2367 cwd = (pats and repo.getcwd()) or ''
2367 2368 modified, added, removed, deleted, unknown, ignored, clean = [
2368 2369 n for n in repo.status(node1=node1, node2=node2, files=files,
2369 2370 match=matchfn,
2370 2371 list_ignored=all or opts['ignored'],
2371 2372 list_clean=all or opts['clean'])]
2372 2373
2373 2374 changetypes = (('modified', 'M', modified),
2374 2375 ('added', 'A', added),
2375 2376 ('removed', 'R', removed),
2376 2377 ('deleted', '!', deleted),
2377 2378 ('unknown', '?', unknown),
2378 2379 ('ignored', 'I', ignored))
2379 2380
2380 2381 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2381 2382
2382 2383 end = opts['print0'] and '\0' or '\n'
2383 2384
2384 2385 for opt, char, changes in ([ct for ct in explicit_changetypes
2385 2386 if all or opts[ct[0]]]
2386 2387 or changetypes):
2387 2388 if opts['no_status']:
2388 2389 format = "%%s%s" % end
2389 2390 else:
2390 2391 format = "%s %%s%s" % (char, end)
2391 2392
2392 2393 for f in changes:
2393 2394 ui.write(format % repo.pathto(f, cwd))
2394 2395 if ((all or opts.get('copies')) and not opts.get('no_status')):
2395 2396 copied = repo.dirstate.copied(f)
2396 2397 if copied:
2397 2398 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2398 2399
2399 2400 def tag(ui, repo, name, rev_=None, **opts):
2400 2401 """add a tag for the current or given revision
2401 2402
2402 2403 Name a particular revision using <name>.
2403 2404
2404 2405 Tags are used to name particular revisions of the repository and are
2405 2406 very useful to compare different revision, to go back to significant
2406 2407 earlier versions or to mark branch points as releases, etc.
2407 2408
2408 2409 If no revision is given, the parent of the working directory is used,
2409 2410 or tip if no revision is checked out.
2410 2411
2411 2412 To facilitate version control, distribution, and merging of tags,
2412 2413 they are stored as a file named ".hgtags" which is managed
2413 2414 similarly to other project files and can be hand-edited if
2414 2415 necessary. The file '.hg/localtags' is used for local tags (not
2415 2416 shared among repositories).
2416 2417 """
2417 2418 if name in ['tip', '.', 'null']:
2418 2419 raise util.Abort(_("the name '%s' is reserved") % name)
2419 2420 if rev_ is not None:
2420 2421 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2421 2422 "please use 'hg tag [-r REV] NAME' instead\n"))
2422 2423 if opts['rev']:
2423 2424 raise util.Abort(_("use only one form to specify the revision"))
2424 2425 if opts['rev'] and opts['remove']:
2425 2426 raise util.Abort(_("--rev and --remove are incompatible"))
2426 2427 if opts['rev']:
2427 2428 rev_ = opts['rev']
2428 2429 message = opts['message']
2429 2430 if opts['remove']:
2430 2431 if not name in repo.tags():
2431 2432 raise util.Abort(_('tag %s does not exist') % name)
2432 2433 rev_ = nullid
2433 2434 if not message:
2434 2435 message = _('Removed tag %s') % name
2435 2436 elif name in repo.tags() and not opts['force']:
2436 2437 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2437 2438 % name)
2438 2439 if not rev_ and repo.dirstate.parents()[1] != nullid:
2439 2440 raise util.Abort(_('uncommitted merge - please provide a '
2440 2441 'specific revision'))
2441 2442 r = repo.changectx(rev_).node()
2442 2443
2443 2444 if not message:
2444 2445 message = _('Added tag %s for changeset %s') % (name, short(r))
2445 2446
2446 2447 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2447 2448
2448 2449 def tags(ui, repo):
2449 2450 """list repository tags
2450 2451
2451 2452 List the repository tags.
2452 2453
2453 2454 This lists both regular and local tags.
2454 2455 """
2455 2456
2456 2457 l = repo.tagslist()
2457 2458 l.reverse()
2458 2459 hexfunc = ui.debugflag and hex or short
2459 2460 for t, n in l:
2460 2461 try:
2461 2462 hn = hexfunc(n)
2462 2463 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2463 2464 except revlog.LookupError:
2464 2465 r = " ?:%s" % hn
2465 2466 if ui.quiet:
2466 2467 ui.write("%s\n" % t)
2467 2468 else:
2468 2469 spaces = " " * (30 - util.locallen(t))
2469 2470 ui.write("%s%s %s\n" % (t, spaces, r))
2470 2471
2471 2472 def tip(ui, repo, **opts):
2472 2473 """show the tip revision
2473 2474
2474 2475 Show the tip revision.
2475 2476 """
2476 2477 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2477 2478
2478 2479 def unbundle(ui, repo, fname1, *fnames, **opts):
2479 2480 """apply one or more changegroup files
2480 2481
2481 2482 Apply one or more compressed changegroup files generated by the
2482 2483 bundle command.
2483 2484 """
2484 2485 fnames = (fname1,) + fnames
2485 2486 for fname in fnames:
2486 2487 if os.path.exists(fname):
2487 2488 f = open(fname, "rb")
2488 2489 else:
2489 2490 f = urllib.urlopen(fname)
2490 2491 gen = changegroup.readbundle(f, fname)
2491 2492 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2492 2493
2493 2494 return postincoming(ui, repo, modheads, opts['update'], None)
2494 2495
2495 2496 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2496 2497 """update working directory
2497 2498
2498 2499 Update the working directory to the specified revision, or the
2499 2500 tip of the current branch if none is specified.
2500 2501
2501 2502 If there are no outstanding changes in the working directory and
2502 2503 there is a linear relationship between the current version and the
2503 2504 requested version, the result is the requested version.
2504 2505
2505 2506 To merge the working directory with another revision, use the
2506 2507 merge command.
2507 2508
2508 2509 By default, update will refuse to run if doing so would require
2509 2510 discarding local changes.
2510 2511 """
2511 2512 if rev and node:
2512 2513 raise util.Abort(_("please specify just one revision"))
2513 2514
2514 2515 if not rev:
2515 2516 rev = node
2516 2517
2517 2518 if date:
2518 2519 if rev:
2519 2520 raise util.Abort(_("you can't specify a revision and a date"))
2520 2521 rev = cmdutil.finddate(ui, repo, date)
2521 2522
2522 2523 if clean:
2523 2524 return hg.clean(repo, rev)
2524 2525 else:
2525 2526 return hg.update(repo, rev)
2526 2527
2527 2528 def verify(ui, repo):
2528 2529 """verify the integrity of the repository
2529 2530
2530 2531 Verify the integrity of the current repository.
2531 2532
2532 2533 This will perform an extensive check of the repository's
2533 2534 integrity, validating the hashes and checksums of each entry in
2534 2535 the changelog, manifest, and tracked files, as well as the
2535 2536 integrity of their crosslinks and indices.
2536 2537 """
2537 2538 return hg.verify(repo)
2538 2539
2539 2540 def version_(ui):
2540 2541 """output version and copyright information"""
2541 2542 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2542 2543 % version.get_version())
2543 2544 ui.status(_(
2544 2545 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2545 2546 "This is free software; see the source for copying conditions. "
2546 2547 "There is NO\nwarranty; "
2547 2548 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2548 2549 ))
2549 2550
2550 2551 # Command options and aliases are listed here, alphabetically
2551 2552
2552 2553 globalopts = [
2553 2554 ('R', 'repository', '',
2554 2555 _('repository root directory or symbolic path name')),
2555 2556 ('', 'cwd', '', _('change working directory')),
2556 2557 ('y', 'noninteractive', None,
2557 2558 _('do not prompt, assume \'yes\' for any required answers')),
2558 2559 ('q', 'quiet', None, _('suppress output')),
2559 2560 ('v', 'verbose', None, _('enable additional output')),
2560 2561 ('', 'config', [], _('set/override config option')),
2561 2562 ('', 'debug', None, _('enable debugging output')),
2562 2563 ('', 'debugger', None, _('start debugger')),
2563 2564 ('', 'encoding', util._encoding, _('set the charset encoding')),
2564 2565 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2565 2566 ('', 'lsprof', None, _('print improved command execution profile')),
2566 2567 ('', 'traceback', None, _('print traceback on exception')),
2567 2568 ('', 'time', None, _('time how long the command takes')),
2568 2569 ('', 'profile', None, _('print command execution profile')),
2569 2570 ('', 'version', None, _('output version information and exit')),
2570 2571 ('h', 'help', None, _('display help and exit')),
2571 2572 ]
2572 2573
2573 2574 dryrunopts = [('n', 'dry-run', None,
2574 2575 _('do not perform actions, just print output'))]
2575 2576
2576 2577 remoteopts = [
2577 2578 ('e', 'ssh', '', _('specify ssh command to use')),
2578 2579 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2579 2580 ]
2580 2581
2581 2582 walkopts = [
2582 2583 ('I', 'include', [], _('include names matching the given patterns')),
2583 2584 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2584 2585 ]
2585 2586
2586 2587 commitopts = [
2587 2588 ('m', 'message', '', _('use <text> as commit message')),
2588 2589 ('l', 'logfile', '', _('read commit message from <file>')),
2589 2590 ]
2590 2591
2591 2592 commitopts2 = [
2592 2593 ('d', 'date', '', _('record datecode as commit date')),
2593 2594 ('u', 'user', '', _('record user as committer')),
2594 2595 ]
2595 2596
2596 2597 table = {
2597 2598 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2598 2599 "addremove":
2599 2600 (addremove,
2600 2601 [('s', 'similarity', '',
2601 2602 _('guess renamed files by similarity (0<=s<=100)')),
2602 2603 ] + walkopts + dryrunopts,
2603 2604 _('hg addremove [OPTION]... [FILE]...')),
2604 2605 "^annotate":
2605 2606 (annotate,
2606 2607 [('r', 'rev', '', _('annotate the specified revision')),
2607 2608 ('f', 'follow', None, _('follow file copies and renames')),
2608 2609 ('a', 'text', None, _('treat all files as text')),
2609 2610 ('u', 'user', None, _('list the author')),
2610 2611 ('d', 'date', None, _('list the date')),
2611 2612 ('n', 'number', None, _('list the revision number (default)')),
2612 2613 ('c', 'changeset', None, _('list the changeset')),
2613 2614 ('l', 'line-number', None,
2614 2615 _('show line number at the first appearance'))
2615 2616 ] + walkopts,
2616 2617 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2617 2618 "archive":
2618 2619 (archive,
2619 2620 [('', 'no-decode', None, _('do not pass files through decoders')),
2620 2621 ('p', 'prefix', '', _('directory prefix for files in archive')),
2621 2622 ('r', 'rev', '', _('revision to distribute')),
2622 2623 ('t', 'type', '', _('type of distribution to create')),
2623 2624 ] + walkopts,
2624 2625 _('hg archive [OPTION]... DEST')),
2625 2626 "backout":
2626 2627 (backout,
2627 2628 [('', 'merge', None,
2628 2629 _('merge with old dirstate parent after backout')),
2629 2630 ('', 'parent', '', _('parent to choose when backing out merge')),
2630 2631 ('r', 'rev', '', _('revision to backout')),
2631 2632 ] + walkopts + commitopts + commitopts2,
2632 2633 _('hg backout [OPTION]... [-r] REV')),
2633 2634 "branch":
2634 2635 (branch,
2635 2636 [('f', 'force', None,
2636 2637 _('set branch name even if it shadows an existing branch'))],
2637 2638 _('hg branch [NAME]')),
2638 2639 "branches":
2639 2640 (branches,
2640 2641 [('a', 'active', False,
2641 2642 _('show only branches that have unmerged heads'))],
2642 2643 _('hg branches [-a]')),
2643 2644 "bundle":
2644 2645 (bundle,
2645 2646 [('f', 'force', None,
2646 2647 _('run even when remote repository is unrelated')),
2647 2648 ('r', 'rev', [],
2648 2649 _('a changeset you would like to bundle')),
2649 2650 ('', 'base', [],
2650 2651 _('a base changeset to specify instead of a destination')),
2651 2652 ] + remoteopts,
2652 2653 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2653 2654 "cat":
2654 2655 (cat,
2655 2656 [('o', 'output', '', _('print output to file with formatted name')),
2656 2657 ('r', 'rev', '', _('print the given revision')),
2657 2658 ] + walkopts,
2658 2659 _('hg cat [OPTION]... FILE...')),
2659 2660 "^clone":
2660 2661 (clone,
2661 2662 [('U', 'noupdate', None, _('do not update the new working directory')),
2662 2663 ('r', 'rev', [],
2663 2664 _('a changeset you would like to have after cloning')),
2664 2665 ('', 'pull', None, _('use pull protocol to copy metadata')),
2665 2666 ('', 'uncompressed', None,
2666 2667 _('use uncompressed transfer (fast over LAN)')),
2667 2668 ] + remoteopts,
2668 2669 _('hg clone [OPTION]... SOURCE [DEST]')),
2669 2670 "^commit|ci":
2670 2671 (commit,
2671 2672 [('A', 'addremove', None,
2672 2673 _('mark new/missing files as added/removed before committing')),
2673 2674 ] + walkopts + commitopts + commitopts2,
2674 2675 _('hg commit [OPTION]... [FILE]...')),
2675 2676 "copy|cp":
2676 2677 (copy,
2677 2678 [('A', 'after', None, _('record a copy that has already occurred')),
2678 2679 ('f', 'force', None,
2679 2680 _('forcibly copy over an existing managed file')),
2680 2681 ] + walkopts + dryrunopts,
2681 2682 _('hg copy [OPTION]... [SOURCE]... DEST')),
2682 2683 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2683 2684 "debugcomplete":
2684 2685 (debugcomplete,
2685 2686 [('o', 'options', None, _('show the command options'))],
2686 2687 _('debugcomplete [-o] CMD')),
2687 2688 "debuginstall": (debuginstall, [], _('debuginstall')),
2688 2689 "debugrebuildstate":
2689 2690 (debugrebuildstate,
2690 2691 [('r', 'rev', '', _('revision to rebuild to'))],
2691 2692 _('debugrebuildstate [-r REV] [REV]')),
2692 2693 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2693 2694 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2694 2695 "debugstate": (debugstate, [], _('debugstate')),
2695 2696 "debugdate":
2696 2697 (debugdate,
2697 2698 [('e', 'extended', None, _('try extended date formats'))],
2698 2699 _('debugdate [-e] DATE [RANGE]')),
2699 2700 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2700 2701 "debugindex": (debugindex, [], _('debugindex FILE')),
2701 2702 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2702 2703 "debugrename":
2703 2704 (debugrename,
2704 2705 [('r', 'rev', '', _('revision to debug'))],
2705 2706 _('debugrename [-r REV] FILE')),
2706 2707 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2707 2708 "^diff":
2708 2709 (diff,
2709 2710 [('r', 'rev', [], _('revision')),
2710 2711 ('a', 'text', None, _('treat all files as text')),
2711 2712 ('p', 'show-function', None,
2712 2713 _('show which function each change is in')),
2713 2714 ('g', 'git', None, _('use git extended diff format')),
2714 2715 ('', 'nodates', None, _("don't include dates in diff headers")),
2715 2716 ('w', 'ignore-all-space', None,
2716 2717 _('ignore white space when comparing lines')),
2717 2718 ('b', 'ignore-space-change', None,
2718 2719 _('ignore changes in the amount of white space')),
2719 2720 ('B', 'ignore-blank-lines', None,
2720 2721 _('ignore changes whose lines are all blank')),
2721 2722 ] + walkopts,
2722 2723 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2723 2724 "^export":
2724 2725 (export,
2725 2726 [('o', 'output', '', _('print output to file with formatted name')),
2726 2727 ('a', 'text', None, _('treat all files as text')),
2727 2728 ('g', 'git', None, _('use git extended diff format')),
2728 2729 ('', 'nodates', None, _("don't include dates in diff headers")),
2729 2730 ('', 'switch-parent', None, _('diff against the second parent'))],
2730 2731 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2731 2732 "grep":
2732 2733 (grep,
2733 2734 [('0', 'print0', None, _('end fields with NUL')),
2734 2735 ('', 'all', None, _('print all revisions that match')),
2735 2736 ('f', 'follow', None,
2736 2737 _('follow changeset history, or file history across copies and renames')),
2737 2738 ('i', 'ignore-case', None, _('ignore case when matching')),
2738 2739 ('l', 'files-with-matches', None,
2739 2740 _('print only filenames and revs that match')),
2740 2741 ('n', 'line-number', None, _('print matching line numbers')),
2741 2742 ('r', 'rev', [], _('search in given revision range')),
2742 2743 ('u', 'user', None, _('print user who committed change')),
2743 2744 ] + walkopts,
2744 2745 _('hg grep [OPTION]... PATTERN [FILE]...')),
2745 2746 "heads":
2746 2747 (heads,
2747 2748 [('', 'style', '', _('display using template map file')),
2748 2749 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2749 2750 ('', 'template', '', _('display with template'))],
2750 2751 _('hg heads [-r REV] [REV]...')),
2751 2752 "help": (help_, [], _('hg help [COMMAND]')),
2752 2753 "identify|id":
2753 2754 (identify,
2754 2755 [('r', 'rev', '', _('identify the specified rev')),
2755 2756 ('n', 'num', None, _('show local revision number')),
2756 2757 ('i', 'id', None, _('show global revision id')),
2757 2758 ('b', 'branch', None, _('show branch')),
2758 2759 ('t', 'tags', None, _('show tags'))],
2759 2760 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2760 2761 "import|patch":
2761 2762 (import_,
2762 2763 [('p', 'strip', 1,
2763 2764 _('directory strip option for patch. This has the same\n'
2764 2765 'meaning as the corresponding patch option')),
2765 2766 ('b', 'base', '', _('base path')),
2766 2767 ('f', 'force', None,
2767 2768 _('skip check for outstanding uncommitted changes')),
2768 2769 ('', 'exact', None,
2769 2770 _('apply patch to the nodes from which it was generated')),
2770 2771 ('', 'import-branch', None,
2771 2772 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2772 2773 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2773 2774 "incoming|in": (incoming,
2774 2775 [('M', 'no-merges', None, _('do not show merges')),
2775 2776 ('f', 'force', None,
2776 2777 _('run even when remote repository is unrelated')),
2777 2778 ('', 'style', '', _('display using template map file')),
2778 2779 ('n', 'newest-first', None, _('show newest record first')),
2779 2780 ('', 'bundle', '', _('file to store the bundles into')),
2780 2781 ('p', 'patch', None, _('show patch')),
2781 2782 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2782 2783 ('', 'template', '', _('display with template')),
2783 2784 ] + remoteopts,
2784 2785 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2785 2786 ' [--bundle FILENAME] [SOURCE]')),
2786 2787 "^init":
2787 2788 (init,
2788 2789 remoteopts,
2789 2790 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2790 2791 "locate":
2791 2792 (locate,
2792 2793 [('r', 'rev', '', _('search the repository as it stood at rev')),
2793 2794 ('0', 'print0', None,
2794 2795 _('end filenames with NUL, for use with xargs')),
2795 2796 ('f', 'fullpath', None,
2796 2797 _('print complete paths from the filesystem root')),
2797 2798 ] + walkopts,
2798 2799 _('hg locate [OPTION]... [PATTERN]...')),
2799 2800 "^log|history":
2800 2801 (log,
2801 2802 [('f', 'follow', None,
2802 2803 _('follow changeset history, or file history across copies and renames')),
2803 2804 ('', 'follow-first', None,
2804 2805 _('only follow the first parent of merge changesets')),
2805 2806 ('d', 'date', '', _('show revs matching date spec')),
2806 2807 ('C', 'copies', None, _('show copied files')),
2807 2808 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2808 2809 ('l', 'limit', '', _('limit number of changes displayed')),
2809 2810 ('r', 'rev', [], _('show the specified revision or range')),
2810 2811 ('', 'removed', None, _('include revs where files were removed')),
2811 2812 ('M', 'no-merges', None, _('do not show merges')),
2812 2813 ('', 'style', '', _('display using template map file')),
2813 2814 ('m', 'only-merges', None, _('show only merges')),
2814 2815 ('p', 'patch', None, _('show patch')),
2815 2816 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2816 2817 ('', 'template', '', _('display with template')),
2817 2818 ] + walkopts,
2818 2819 _('hg log [OPTION]... [FILE]')),
2819 2820 "manifest": (manifest, [('r', 'rev', '', _('revision to display'))],
2820 2821 _('hg manifest [-r REV]')),
2821 2822 "^merge":
2822 2823 (merge,
2823 2824 [('f', 'force', None, _('force a merge with outstanding changes')),
2824 2825 ('r', 'rev', '', _('revision to merge')),
2825 2826 ],
2826 2827 _('hg merge [-f] [[-r] REV]')),
2827 2828 "outgoing|out": (outgoing,
2828 2829 [('M', 'no-merges', None, _('do not show merges')),
2829 2830 ('f', 'force', None,
2830 2831 _('run even when remote repository is unrelated')),
2831 2832 ('p', 'patch', None, _('show patch')),
2832 2833 ('', 'style', '', _('display using template map file')),
2833 2834 ('r', 'rev', [], _('a specific revision you would like to push')),
2834 2835 ('n', 'newest-first', None, _('show newest record first')),
2835 2836 ('', 'template', '', _('display with template')),
2836 2837 ] + remoteopts,
2837 2838 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2838 2839 "^parents":
2839 2840 (parents,
2840 2841 [('r', 'rev', '', _('show parents from the specified rev')),
2841 2842 ('', 'style', '', _('display using template map file')),
2842 2843 ('', 'template', '', _('display with template'))],
2843 2844 _('hg parents [-r REV] [FILE]')),
2844 2845 "paths": (paths, [], _('hg paths [NAME]')),
2845 2846 "^pull":
2846 2847 (pull,
2847 2848 [('u', 'update', None,
2848 2849 _('update to new tip if changesets were pulled')),
2849 2850 ('f', 'force', None,
2850 2851 _('run even when remote repository is unrelated')),
2851 2852 ('r', 'rev', [],
2852 2853 _('a specific revision up to which you would like to pull')),
2853 2854 ] + remoteopts,
2854 2855 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2855 2856 "^push":
2856 2857 (push,
2857 2858 [('f', 'force', None, _('force push')),
2858 2859 ('r', 'rev', [], _('a specific revision you would like to push')),
2859 2860 ] + remoteopts,
2860 2861 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2861 2862 "debugrawcommit|rawcommit":
2862 2863 (rawcommit,
2863 2864 [('p', 'parent', [], _('parent')),
2864 2865 ('F', 'files', '', _('file list'))
2865 2866 ] + commitopts + commitopts2,
2866 2867 _('hg debugrawcommit [OPTION]... [FILE]...')),
2867 2868 "recover": (recover, [], _('hg recover')),
2868 2869 "^remove|rm":
2869 2870 (remove,
2870 2871 [('A', 'after', None, _('record remove without deleting')),
2871 2872 ('f', 'force', None, _('remove file even if modified')),
2872 2873 ] + walkopts,
2873 2874 _('hg remove [OPTION]... FILE...')),
2874 2875 "rename|mv":
2875 2876 (rename,
2876 2877 [('A', 'after', None, _('record a rename that has already occurred')),
2877 2878 ('f', 'force', None,
2878 2879 _('forcibly copy over an existing managed file')),
2879 2880 ] + walkopts + dryrunopts,
2880 2881 _('hg rename [OPTION]... SOURCE... DEST')),
2881 2882 "revert":
2882 2883 (revert,
2883 2884 [('a', 'all', None, _('revert all changes when no arguments given')),
2884 2885 ('d', 'date', '', _('tipmost revision matching date')),
2885 2886 ('r', 'rev', '', _('revision to revert to')),
2886 2887 ('', 'no-backup', None, _('do not save backup copies of files')),
2887 2888 ] + walkopts + dryrunopts,
2888 2889 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2889 2890 "rollback": (rollback, [], _('hg rollback')),
2890 2891 "root": (root, [], _('hg root')),
2891 2892 "showconfig|debugconfig":
2892 2893 (showconfig,
2893 2894 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2894 2895 _('showconfig [-u] [NAME]...')),
2895 2896 "^serve":
2896 2897 (serve,
2897 2898 [('A', 'accesslog', '', _('name of access log file to write to')),
2898 2899 ('d', 'daemon', None, _('run server in background')),
2899 2900 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2900 2901 ('E', 'errorlog', '', _('name of error log file to write to')),
2901 2902 ('p', 'port', 0, _('port to use (default: 8000)')),
2902 2903 ('a', 'address', '', _('address to use')),
2903 2904 ('n', 'name', '',
2904 2905 _('name to show in web pages (default: working dir)')),
2905 2906 ('', 'webdir-conf', '', _('name of the webdir config file'
2906 2907 ' (serve more than one repo)')),
2907 2908 ('', 'pid-file', '', _('name of file to write process ID to')),
2908 2909 ('', 'stdio', None, _('for remote clients')),
2909 2910 ('t', 'templates', '', _('web templates to use')),
2910 2911 ('', 'style', '', _('template style to use')),
2911 2912 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
2912 2913 ('', 'certificate', '', _('SSL certificate file'))],
2913 2914 _('hg serve [OPTION]...')),
2914 2915 "^status|st":
2915 2916 (status,
2916 2917 [('A', 'all', None, _('show status of all files')),
2917 2918 ('m', 'modified', None, _('show only modified files')),
2918 2919 ('a', 'added', None, _('show only added files')),
2919 2920 ('r', 'removed', None, _('show only removed files')),
2920 2921 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2921 2922 ('c', 'clean', None, _('show only files without changes')),
2922 2923 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2923 2924 ('i', 'ignored', None, _('show only ignored files')),
2924 2925 ('n', 'no-status', None, _('hide status prefix')),
2925 2926 ('C', 'copies', None, _('show source of copied files')),
2926 2927 ('0', 'print0', None,
2927 2928 _('end filenames with NUL, for use with xargs')),
2928 2929 ('', 'rev', [], _('show difference from revision')),
2929 2930 ] + walkopts,
2930 2931 _('hg status [OPTION]... [FILE]...')),
2931 2932 "tag":
2932 2933 (tag,
2933 2934 [('f', 'force', None, _('replace existing tag')),
2934 2935 ('l', 'local', None, _('make the tag local')),
2935 2936 ('r', 'rev', '', _('revision to tag')),
2936 2937 ('', 'remove', None, _('remove a tag')),
2937 2938 # -l/--local is already there, commitopts cannot be used
2938 2939 ('m', 'message', '', _('use <text> as commit message')),
2939 2940 ] + commitopts2,
2940 2941 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2941 2942 "tags": (tags, [], _('hg tags')),
2942 2943 "tip":
2943 2944 (tip,
2944 2945 [('', 'style', '', _('display using template map file')),
2945 2946 ('p', 'patch', None, _('show patch')),
2946 2947 ('', 'template', '', _('display with template'))],
2947 2948 _('hg tip [-p]')),
2948 2949 "unbundle":
2949 2950 (unbundle,
2950 2951 [('u', 'update', None,
2951 2952 _('update to new tip if changesets were unbundled'))],
2952 2953 _('hg unbundle [-u] FILE...')),
2953 2954 "^update|up|checkout|co":
2954 2955 (update,
2955 2956 [('C', 'clean', None, _('overwrite locally modified files')),
2956 2957 ('d', 'date', '', _('tipmost revision matching date')),
2957 2958 ('r', 'rev', '', _('revision'))],
2958 2959 _('hg update [-C] [-d DATE] [[-r] REV]')),
2959 2960 "verify": (verify, [], _('hg verify')),
2960 2961 "version": (version_, [], _('hg version')),
2961 2962 }
2962 2963
2963 2964 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2964 2965 " debugindex debugindexdot debugdate debuginstall")
2965 2966 optionalrepo = ("identify paths serve showconfig")
@@ -1,131 +1,131 b''
1 1 # demandimport.py - global demand-loading of modules for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''
9 9 demandimport - automatic demandloading of modules
10 10
11 11 To enable this module, do:
12 12
13 13 import demandimport; demandimport.enable()
14 14
15 15 Imports of the following forms will be demand-loaded:
16 16
17 17 import a, b.c
18 18 import a.b as c
19 19 from a import b,c # a will be loaded immediately
20 20
21 21 These imports will not be delayed:
22 22
23 23 from a import *
24 24 b = __import__(a)
25 25 '''
26 26
27 27 _origimport = __import__
28 28
29 29 class _demandmod(object):
30 30 """module demand-loader and proxy"""
31 31 def __init__(self, name, globals, locals):
32 32 if '.' in name:
33 33 head, rest = name.split('.', 1)
34 34 after = [rest]
35 35 else:
36 36 head = name
37 37 after = []
38 38 object.__setattr__(self, "_data", (head, globals, locals, after))
39 39 object.__setattr__(self, "_module", None)
40 40 def _extend(self, name):
41 41 """add to the list of submodules to load"""
42 42 self._data[3].append(name)
43 43 def _load(self):
44 44 if not self._module:
45 45 head, globals, locals, after = self._data
46 46 mod = _origimport(head, globals, locals)
47 47 # load submodules
48 48 def subload(mod, p):
49 49 h, t = p, None
50 50 if '.' in p:
51 51 h, t = p.split('.', 1)
52 52 if not hasattr(mod, h):
53 53 setattr(mod, h, _demandmod(p, mod.__dict__, mod.__dict__))
54 54 elif t:
55 55 subload(getattr(mod, h), t)
56 56
57 57 for x in after:
58 58 subload(mod, x)
59 59
60 60 # are we in the locals dictionary still?
61 61 if locals and locals.get(head) == self:
62 62 locals[head] = mod
63 63 object.__setattr__(self, "_module", mod)
64 64
65 65 def __repr__(self):
66 66 if self._module:
67 67 return "<proxied module '%s'>" % self._data[0]
68 68 return "<unloaded module '%s'>" % self._data[0]
69 69 def __call__(self, *args, **kwargs):
70 raise TypeError("'unloaded module' object is not callable")
70 raise TypeError("%s object is not callable" % repr(self))
71 71 def __getattribute__(self, attr):
72 72 if attr in ('_data', '_extend', '_load', '_module'):
73 73 return object.__getattribute__(self, attr)
74 74 self._load()
75 75 return getattr(self._module, attr)
76 76 def __setattr__(self, attr, val):
77 77 self._load()
78 78 setattr(self._module, attr, val)
79 79
80 80 def _demandimport(name, globals=None, locals=None, fromlist=None):
81 81 if not locals or name in ignore or fromlist == ('*',):
82 82 # these cases we can't really delay
83 83 return _origimport(name, globals, locals, fromlist)
84 84 elif not fromlist:
85 85 # import a [as b]
86 86 if '.' in name: # a.b
87 87 base, rest = name.split('.', 1)
88 88 # email.__init__ loading email.mime
89 89 if globals and globals.get('__name__', None) == base:
90 90 return _origimport(name, globals, locals, fromlist)
91 91 # if a is already demand-loaded, add b to its submodule list
92 92 if base in locals:
93 93 if isinstance(locals[base], _demandmod):
94 94 locals[base]._extend(rest)
95 95 return locals[base]
96 96 return _demandmod(name, globals, locals)
97 97 else:
98 98 # from a import b,c,d
99 99 mod = _origimport(name, globals, locals)
100 100 # recurse down the module chain
101 101 for comp in name.split('.')[1:]:
102 102 if not hasattr(mod, comp):
103 103 setattr(mod, comp, _demandmod(comp, mod.__dict__, mod.__dict__))
104 104 mod = getattr(mod, comp)
105 105 for x in fromlist:
106 106 # set requested submodules for demand load
107 107 if not(hasattr(mod, x)):
108 108 setattr(mod, x, _demandmod(x, mod.__dict__, locals))
109 109 return mod
110 110
111 111 ignore = [
112 112 '_hashlib',
113 113 '_xmlplus',
114 114 'fcntl',
115 115 'win32com.gen_py',
116 116 # imported by tarfile, not available under Windows
117 117 'pwd',
118 118 'grp',
119 119 # imported by profile, itself imported by hotshot.stats,
120 120 # not available under Windows
121 121 'resource',
122 122 ]
123 123
124 124 def enable():
125 125 "enable global demand-loading of modules"
126 126 __builtins__["__import__"] = _demandimport
127 127
128 128 def disable():
129 129 "disable global demand-loading of modules"
130 130 __builtins__["__import__"] = _origimport
131 131
@@ -1,401 +1,403 b''
1 1 # dispatch.py - command dispatching for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex, time
11 11 import util, commands, hg, lock, fancyopts, revlog, version, extensions, hook
12 12 import cmdutil
13 13 import ui as _ui
14 14
15 15 class ParseError(Exception):
16 16 """Exception raised on errors in parsing the command line."""
17 17
18 18 def run():
19 19 "run the command in sys.argv"
20 20 sys.exit(dispatch(sys.argv[1:]))
21 21
22 22 def dispatch(args):
23 23 "run the command specified in args"
24 24 try:
25 25 u = _ui.ui(traceback='--traceback' in args)
26 26 except util.Abort, inst:
27 27 sys.stderr.write(_("abort: %s\n") % inst)
28 28 return -1
29 29 return _runcatch(u, args)
30 30
31 31 def _runcatch(ui, args):
32 32 def catchterm(*args):
33 33 raise util.SignalInterrupt
34 34
35 35 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
36 36 num = getattr(signal, name, None)
37 37 if num: signal.signal(num, catchterm)
38 38
39 39 try:
40 40 try:
41 41 # enter the debugger before command execution
42 42 if '--debugger' in args:
43 43 pdb.set_trace()
44 44 try:
45 45 return _dispatch(ui, args)
46 46 finally:
47 47 ui.flush()
48 48 except:
49 49 # enter the debugger when we hit an exception
50 50 if '--debugger' in args:
51 51 pdb.post_mortem(sys.exc_info()[2])
52 52 ui.print_exc()
53 53 raise
54 54
55 55 except ParseError, inst:
56 56 if inst.args[0]:
57 57 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
58 58 commands.help_(ui, inst.args[0])
59 59 else:
60 60 ui.warn(_("hg: %s\n") % inst.args[1])
61 61 commands.help_(ui, 'shortlist')
62 62 except cmdutil.AmbiguousCommand, inst:
63 63 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
64 64 (inst.args[0], " ".join(inst.args[1])))
65 65 except cmdutil.UnknownCommand, inst:
66 66 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
67 67 commands.help_(ui, 'shortlist')
68 68 except hg.RepoError, inst:
69 69 ui.warn(_("abort: %s!\n") % inst)
70 70 except lock.LockHeld, inst:
71 71 if inst.errno == errno.ETIMEDOUT:
72 72 reason = _('timed out waiting for lock held by %s') % inst.locker
73 73 else:
74 74 reason = _('lock held by %s') % inst.locker
75 75 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
76 76 except lock.LockUnavailable, inst:
77 77 ui.warn(_("abort: could not lock %s: %s\n") %
78 78 (inst.desc or inst.filename, inst.strerror))
79 79 except revlog.RevlogError, inst:
80 80 ui.warn(_("abort: %s!\n") % inst)
81 81 except util.SignalInterrupt:
82 82 ui.warn(_("killed!\n"))
83 83 except KeyboardInterrupt:
84 84 try:
85 85 ui.warn(_("interrupted!\n"))
86 86 except IOError, inst:
87 87 if inst.errno == errno.EPIPE:
88 88 if ui.debugflag:
89 89 ui.warn(_("\nbroken pipe\n"))
90 90 else:
91 91 raise
92 92 except socket.error, inst:
93 93 ui.warn(_("abort: %s\n") % inst[1])
94 94 except IOError, inst:
95 95 if hasattr(inst, "code"):
96 96 ui.warn(_("abort: %s\n") % inst)
97 97 elif hasattr(inst, "reason"):
98 98 try: # usually it is in the form (errno, strerror)
99 99 reason = inst.reason.args[1]
100 100 except: # it might be anything, for example a string
101 101 reason = inst.reason
102 102 ui.warn(_("abort: error: %s\n") % reason)
103 103 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
104 104 if ui.debugflag:
105 105 ui.warn(_("broken pipe\n"))
106 106 elif getattr(inst, "strerror", None):
107 107 if getattr(inst, "filename", None):
108 108 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
109 109 else:
110 110 ui.warn(_("abort: %s\n") % inst.strerror)
111 111 else:
112 112 raise
113 113 except OSError, inst:
114 114 if getattr(inst, "filename", None):
115 115 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
116 116 else:
117 117 ui.warn(_("abort: %s\n") % inst.strerror)
118 118 except util.UnexpectedOutput, inst:
119 119 ui.warn(_("abort: %s") % inst[0])
120 120 if not isinstance(inst[1], basestring):
121 121 ui.warn(" %r\n" % (inst[1],))
122 122 elif not inst[1]:
123 123 ui.warn(_(" empty string\n"))
124 124 else:
125 125 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
126 126 except ImportError, inst:
127 127 m = str(inst).split()[-1]
128 128 ui.warn(_("abort: could not import module %s!\n") % m)
129 129 if m in "mpatch bdiff".split():
130 130 ui.warn(_("(did you forget to compile extensions?)\n"))
131 131 elif m in "zlib".split():
132 132 ui.warn(_("(is your Python install correct?)\n"))
133 133
134 134 except util.Abort, inst:
135 135 ui.warn(_("abort: %s\n") % inst)
136 except MemoryError:
137 ui.warn(_("abort: out of memory\n"))
136 138 except SystemExit, inst:
137 139 # Commands shouldn't sys.exit directly, but give a return code.
138 140 # Just in case catch this and and pass exit code to caller.
139 141 return inst.code
140 142 except:
141 143 ui.warn(_("** unknown exception encountered, details follow\n"))
142 144 ui.warn(_("** report bug details to "
143 145 "http://www.selenic.com/mercurial/bts\n"))
144 146 ui.warn(_("** or mercurial@selenic.com\n"))
145 147 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
146 148 % version.get_version())
147 149 raise
148 150
149 151 return -1
150 152
151 153 def _findrepo():
152 154 p = os.getcwd()
153 155 while not os.path.isdir(os.path.join(p, ".hg")):
154 156 oldp, p = p, os.path.dirname(p)
155 157 if p == oldp:
156 158 return None
157 159
158 160 return p
159 161
160 162 def _parse(ui, args):
161 163 options = {}
162 164 cmdoptions = {}
163 165
164 166 try:
165 167 args = fancyopts.fancyopts(args, commands.globalopts, options)
166 168 except fancyopts.getopt.GetoptError, inst:
167 169 raise ParseError(None, inst)
168 170
169 171 if args:
170 172 cmd, args = args[0], args[1:]
171 173 aliases, i = cmdutil.findcmd(ui, cmd, commands.table)
172 174 cmd = aliases[0]
173 175 defaults = ui.config("defaults", cmd)
174 176 if defaults:
175 177 args = shlex.split(defaults) + args
176 178 c = list(i[1])
177 179 else:
178 180 cmd = None
179 181 c = []
180 182
181 183 # combine global options into local
182 184 for o in commands.globalopts:
183 185 c.append((o[0], o[1], options[o[1]], o[3]))
184 186
185 187 try:
186 188 args = fancyopts.fancyopts(args, c, cmdoptions)
187 189 except fancyopts.getopt.GetoptError, inst:
188 190 raise ParseError(cmd, inst)
189 191
190 192 # separate global options back out
191 193 for o in commands.globalopts:
192 194 n = o[1]
193 195 options[n] = cmdoptions[n]
194 196 del cmdoptions[n]
195 197
196 198 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
197 199
198 200 def _parseconfig(config):
199 201 """parse the --config options from the command line"""
200 202 parsed = []
201 203 for cfg in config:
202 204 try:
203 205 name, value = cfg.split('=', 1)
204 206 section, name = name.split('.', 1)
205 207 if not section or not name:
206 208 raise IndexError
207 209 parsed.append((section, name, value))
208 210 except (IndexError, ValueError):
209 211 raise util.Abort(_('malformed --config option: %s') % cfg)
210 212 return parsed
211 213
212 214 def _earlygetopt(aliases, args):
213 215 """Return list of values for an option (or aliases).
214 216
215 217 The values are listed in the order they appear in args.
216 218 The options and values are removed from args.
217 219 """
218 220 try:
219 221 argcount = args.index("--")
220 222 except ValueError:
221 223 argcount = len(args)
222 224 shortopts = [opt for opt in aliases if len(opt) == 2]
223 225 values = []
224 226 pos = 0
225 227 while pos < argcount:
226 228 if args[pos] in aliases:
227 229 if pos + 1 >= argcount:
228 230 # ignore and let getopt report an error if there is no value
229 231 break
230 232 del args[pos]
231 233 values.append(args.pop(pos))
232 234 argcount -= 2
233 235 elif args[pos][:2] in shortopts:
234 236 # short option can have no following space, e.g. hg log -Rfoo
235 237 values.append(args.pop(pos)[2:])
236 238 argcount -= 1
237 239 else:
238 240 pos += 1
239 241 return values
240 242
241 243 _loaded = {}
242 244 def _dispatch(ui, args):
243 245 # read --config before doing anything else
244 246 # (e.g. to change trust settings for reading .hg/hgrc)
245 247 config = _earlygetopt(['--config'], args)
246 248 if config:
247 249 ui.updateopts(config=_parseconfig(config))
248 250
249 251 # check for cwd
250 252 cwd = _earlygetopt(['--cwd'], args)
251 253 if cwd:
252 254 os.chdir(cwd[-1])
253 255
254 256 # read the local repository .hgrc into a local ui object
255 257 path = _findrepo() or ""
256 258 if not path:
257 259 lui = ui
258 260 if path:
259 261 try:
260 262 lui = _ui.ui(parentui=ui)
261 263 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
262 264 except IOError:
263 265 pass
264 266
265 267 # now we can expand paths, even ones in .hg/hgrc
266 268 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
267 269 if rpath:
268 270 path = lui.expandpath(rpath[-1])
269 271 lui = _ui.ui(parentui=ui)
270 272 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
271 273
272 274 extensions.loadall(lui)
273 275 for name, module in extensions.extensions():
274 276 if name in _loaded:
275 277 continue
276 278 cmdtable = getattr(module, 'cmdtable', {})
277 279 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
278 280 if overrides:
279 281 ui.warn(_("extension '%s' overrides commands: %s\n")
280 282 % (name, " ".join(overrides)))
281 283 commands.table.update(cmdtable)
282 284 _loaded[name] = 1
283 285 # check for fallback encoding
284 286 fallback = lui.config('ui', 'fallbackencoding')
285 287 if fallback:
286 288 util._fallbackencoding = fallback
287 289
288 290 fullargs = args
289 291 cmd, func, args, options, cmdoptions = _parse(lui, args)
290 292
291 293 if options["config"]:
292 294 raise util.Abort(_("Option --config may not be abbreviated!"))
293 295 if options["cwd"]:
294 296 raise util.Abort(_("Option --cwd may not be abbreviated!"))
295 297 if options["repository"]:
296 298 raise util.Abort(_(
297 299 "Option -R has to be separated from other options (i.e. not -qR) "
298 300 "and --repository may only be abbreviated as --repo!"))
299 301
300 302 if options["encoding"]:
301 303 util._encoding = options["encoding"]
302 304 if options["encodingmode"]:
303 305 util._encodingmode = options["encodingmode"]
304 306 if options["time"]:
305 307 def get_times():
306 308 t = os.times()
307 309 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
308 310 t = (t[0], t[1], t[2], t[3], time.clock())
309 311 return t
310 312 s = get_times()
311 313 def print_time():
312 314 t = get_times()
313 315 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
314 316 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
315 317 atexit.register(print_time)
316 318
317 319 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
318 320 not options["noninteractive"], options["traceback"])
319 321
320 322 if options['help']:
321 323 return commands.help_(ui, cmd, options['version'])
322 324 elif options['version']:
323 325 return commands.version_(ui)
324 326 elif not cmd:
325 327 return commands.help_(ui, 'shortlist')
326 328
327 329 repo = None
328 330 if cmd not in commands.norepo.split():
329 331 try:
330 332 repo = hg.repository(ui, path=path)
331 333 ui = repo.ui
332 334 if not repo.local():
333 335 raise util.Abort(_("repository '%s' is not local") % path)
334 336 except hg.RepoError:
335 337 if cmd not in commands.optionalrepo.split():
336 338 if not path:
337 339 raise hg.RepoError(_("There is no Mercurial repository here"
338 340 " (.hg not found)"))
339 341 raise
340 342 d = lambda: func(ui, repo, *args, **cmdoptions)
341 343 else:
342 344 d = lambda: func(ui, *args, **cmdoptions)
343 345
344 346 # run pre-hook, and abort if it fails
345 347 ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
346 348 if ret:
347 349 return ret
348 350 ret = _runcommand(ui, options, cmd, d)
349 351 # run post-hook, passing command result
350 352 hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
351 353 result = ret)
352 354 return ret
353 355
354 356 def _runcommand(ui, options, cmd, cmdfunc):
355 357 def checkargs():
356 358 try:
357 359 return cmdfunc()
358 360 except TypeError, inst:
359 361 # was this an argument error?
360 362 tb = traceback.extract_tb(sys.exc_info()[2])
361 363 if len(tb) != 2: # no
362 364 raise
363 365 raise ParseError(cmd, _("invalid arguments"))
364 366
365 367 if options['profile']:
366 368 import hotshot, hotshot.stats
367 369 prof = hotshot.Profile("hg.prof")
368 370 try:
369 371 try:
370 372 return prof.runcall(checkargs)
371 373 except:
372 374 try:
373 375 ui.warn(_('exception raised - generating '
374 376 'profile anyway\n'))
375 377 except:
376 378 pass
377 379 raise
378 380 finally:
379 381 prof.close()
380 382 stats = hotshot.stats.load("hg.prof")
381 383 stats.strip_dirs()
382 384 stats.sort_stats('time', 'calls')
383 385 stats.print_stats(40)
384 386 elif options['lsprof']:
385 387 try:
386 388 from mercurial import lsprof
387 389 except ImportError:
388 390 raise util.Abort(_(
389 391 'lsprof not available - install from '
390 392 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
391 393 p = lsprof.Profiler()
392 394 p.enable(subcalls=True)
393 395 try:
394 396 return checkargs()
395 397 finally:
396 398 p.disable()
397 399 stats = lsprof.Stats(p.getstats())
398 400 stats.sort()
399 401 stats.pprint(top=10, file=sys.stderr, climit=5)
400 402 else:
401 403 return checkargs()
@@ -1,35 +1,74 b''
1 1 import getopt
2 2
3 3 def fancyopts(args, options, state):
4 long = []
5 short = ''
6 map = {}
7 dt = {}
4 """
5 read args, parse options, and store options in state
6
7 each option is a tuple of:
8
9 short option or ''
10 long option
11 default value
12 description
13
14 option types include:
15
16 boolean or none - option sets variable in state to true
17 string - parameter string is stored in state
18 list - parameter string is added to a list
19 integer - parameter strings is stored as int
20 function - call function with parameter
8 21
9 for s, l, d, c in options:
10 pl = l.replace('-', '_')
11 map['-'+s] = map['--'+l] = pl
12 if isinstance(d, list):
13 state[pl] = d[:]
22 non-option args are returned
23 """
24 namelist = []
25 shortlist = ''
26 argmap = {}
27 defmap = {}
28
29 for short, name, default, comment in options:
30 # convert opts to getopt format
31 oname = name
32 name = name.replace('-', '_')
33
34 argmap['-' + short] = argmap['--' + oname] = name
35 defmap[name] = default
36
37 # copy defaults to state
38 if isinstance(default, list):
39 state[name] = default[:]
40 elif callable(default):
41 print "whoa", name, default
42 state[name] = None
14 43 else:
15 state[pl] = d
16 dt[pl] = type(d)
17 if (d is not None and d is not True and d is not False and
18 not callable(d)):
19 if s: s += ':'
20 if l: l += '='
21 if s: short = short + s
22 if l: long.append(l)
44 state[name] = default
23 45
24 opts, args = getopt.getopt(args, short, long)
46 # does it take a parameter?
47 if not (default is None or default is True or default is False):
48 if short: short += ':'
49 if oname: oname += '='
50 if short:
51 shortlist += short
52 if name:
53 namelist.append(oname)
54
55 # parse arguments
56 opts, args = getopt.getopt(args, shortlist, namelist)
25 57
26 for opt, arg in opts:
27 if dt[map[opt]] is type(fancyopts): state[map[opt]](state, map[opt], arg)
28 elif dt[map[opt]] is type(1): state[map[opt]] = int(arg)
29 elif dt[map[opt]] is type(''): state[map[opt]] = arg
30 elif dt[map[opt]] is type([]): state[map[opt]].append(arg)
31 elif dt[map[opt]] is type(None): state[map[opt]] = True
32 elif dt[map[opt]] is type(False): state[map[opt]] = True
58 # transfer result to state
59 for opt, val in opts:
60 name = argmap[opt]
61 t = type(defmap[name])
62 if t is type(fancyopts):
63 state[name] = defmap[name](val)
64 elif t is type(1):
65 state[name] = int(val)
66 elif t is type(''):
67 state[name] = val
68 elif t is type([]):
69 state[name].append(val)
70 elif t is type(None) or t is type(False):
71 state[name] = True
33 72
73 # return unparsed args
34 74 return args
35
@@ -1,312 +1,312 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from node import *
10 10 from repo import *
11 11 from i18n import _
12 12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 13 import errno, lock, os, shutil, util, extensions
14 14 import merge as _merge
15 15 import verify as _verify
16 16
17 17 def _local(path):
18 18 return (os.path.isfile(util.drop_scheme('file', path)) and
19 19 bundlerepo or localrepo)
20 20
21 21 def parseurl(url, revs):
22 22 '''parse url#branch, returning url, branch + revs'''
23 23
24 24 if '#' not in url:
25 25 return url, (revs or None), None
26 26
27 27 url, rev = url.split('#', 1)
28 28 return url, revs + [rev], rev
29 29
30 30 schemes = {
31 31 'bundle': bundlerepo,
32 32 'file': _local,
33 33 'http': httprepo,
34 34 'https': httprepo,
35 35 'ssh': sshrepo,
36 36 'static-http': statichttprepo,
37 37 }
38 38
39 39 def _lookup(path):
40 40 scheme = 'file'
41 41 if path:
42 42 c = path.find(':')
43 43 if c > 0:
44 44 scheme = path[:c]
45 45 thing = schemes.get(scheme) or schemes['file']
46 46 try:
47 47 return thing(path)
48 48 except TypeError:
49 49 return thing
50 50
51 51 def islocal(repo):
52 52 '''return true if repo or path is local'''
53 53 if isinstance(repo, str):
54 54 try:
55 55 return _lookup(repo).islocal(repo)
56 56 except AttributeError:
57 57 return False
58 58 return repo.local()
59 59
60 60 def repository(ui, path='', create=False):
61 61 """return a repository object for the specified path"""
62 62 repo = _lookup(path).instance(ui, path, create)
63 63 ui = getattr(repo, "ui", ui)
64 64 for name, module in extensions.extensions():
65 65 hook = getattr(module, 'reposetup', None)
66 66 if hook:
67 67 hook(ui, repo)
68 68 return repo
69 69
70 70 def defaultdest(source):
71 71 '''return default destination of clone if none is given'''
72 72 return os.path.basename(os.path.normpath(source))
73 73
74 74 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
75 75 stream=False):
76 76 """Make a copy of an existing repository.
77 77
78 78 Create a copy of an existing repository in a new directory. The
79 79 source and destination are URLs, as passed to the repository
80 80 function. Returns a pair of repository objects, the source and
81 81 newly created destination.
82 82
83 83 The location of the source is added to the new repository's
84 84 .hg/hgrc file, as the default to be used for future pulls and
85 85 pushes.
86 86
87 87 If an exception is raised, the partly cloned/updated destination
88 88 repository will be deleted.
89 89
90 90 Arguments:
91 91
92 92 source: repository object or URL
93 93
94 94 dest: URL of destination repository to create (defaults to base
95 95 name of source repository)
96 96
97 97 pull: always pull from source repository, even in local case
98 98
99 99 stream: stream raw data uncompressed from repository (fast over
100 100 LAN, slow over WAN)
101 101
102 102 rev: revision to clone up to (implies pull=True)
103 103
104 104 update: update working directory after clone completes, if
105 105 destination is local repository
106 106 """
107 107
108 108 origsource = source
109 109 source, rev, checkout = parseurl(ui.expandpath(source), rev)
110 110
111 111 if isinstance(source, str):
112 112 src_repo = repository(ui, source)
113 113 else:
114 114 src_repo = source
115 115 source = src_repo.url()
116 116
117 117 if dest is None:
118 118 dest = defaultdest(source)
119 119 ui.status(_("destination directory: %s\n") % dest)
120 120
121 121 def localpath(path):
122 122 if path.startswith('file://'):
123 123 return path[7:]
124 124 if path.startswith('file:'):
125 125 return path[5:]
126 126 return path
127 127
128 128 dest = localpath(dest)
129 129 source = localpath(source)
130 130
131 131 if os.path.exists(dest):
132 132 raise util.Abort(_("destination '%s' already exists") % dest)
133 133
134 134 class DirCleanup(object):
135 135 def __init__(self, dir_):
136 136 self.rmtree = shutil.rmtree
137 137 self.dir_ = dir_
138 138 def close(self):
139 139 self.dir_ = None
140 140 def __del__(self):
141 141 if self.dir_:
142 142 self.rmtree(self.dir_, True)
143 143
144 144 src_lock = dest_lock = dir_cleanup = None
145 145 try:
146 146 if islocal(dest):
147 147 dir_cleanup = DirCleanup(dest)
148 148
149 149 abspath = origsource
150 150 copy = False
151 151 if src_repo.local() and islocal(dest):
152 152 abspath = os.path.abspath(util.drop_scheme('file', origsource))
153 153 copy = not pull and not rev
154 154
155 155 if copy:
156 156 try:
157 157 # we use a lock here because if we race with commit, we
158 158 # can end up with extra data in the cloned revlogs that's
159 159 # not pointed to by changesets, thus causing verify to
160 160 # fail
161 161 src_lock = src_repo.lock()
162 162 except lock.LockException:
163 163 copy = False
164 164
165 165 if copy:
166 166 def force_copy(src, dst):
167 167 try:
168 168 util.copyfiles(src, dst)
169 169 except OSError, inst:
170 170 if inst.errno != errno.ENOENT:
171 171 raise
172 172
173 173 src_store = os.path.realpath(src_repo.spath)
174 174 if not os.path.exists(dest):
175 175 os.mkdir(dest)
176 176 try:
177 177 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
178 178 os.mkdir(dest_path)
179 179 except OSError, inst:
180 180 if inst.errno == errno.EEXIST:
181 181 dir_cleanup.close()
182 182 raise util.Abort(_("destination '%s' already exists")
183 183 % dest)
184 184 raise
185 185 if src_repo.spath != src_repo.path:
186 186 # XXX racy
187 187 dummy_changelog = os.path.join(dest_path, "00changelog.i")
188 188 # copy the dummy changelog
189 189 force_copy(src_repo.join("00changelog.i"), dummy_changelog)
190 190 dest_store = os.path.join(dest_path, "store")
191 191 os.mkdir(dest_store)
192 192 else:
193 193 dest_store = dest_path
194 194 # copy the requires file
195 195 force_copy(src_repo.join("requires"),
196 196 os.path.join(dest_path, "requires"))
197 197 # we lock here to avoid premature writing to the target
198 198 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
199 199
200 200 files = ("data",
201 201 "00manifest.d", "00manifest.i",
202 202 "00changelog.d", "00changelog.i")
203 203 for f in files:
204 204 src = os.path.join(src_store, f)
205 205 dst = os.path.join(dest_store, f)
206 206 force_copy(src, dst)
207 207
208 208 # we need to re-init the repo after manually copying the data
209 209 # into it
210 210 dest_repo = repository(ui, dest)
211 211
212 212 else:
213 213 try:
214 214 dest_repo = repository(ui, dest, create=True)
215 215 except OSError, inst:
216 216 if inst.errno == errno.EEXIST:
217 217 dir_cleanup.close()
218 218 raise util.Abort(_("destination '%s' already exists")
219 219 % dest)
220 220 raise
221 221
222 222 revs = None
223 223 if rev:
224 224 if 'lookup' not in src_repo.capabilities:
225 225 raise util.Abort(_("src repository does not support revision "
226 226 "lookup and so doesn't support clone by "
227 227 "revision"))
228 228 revs = [src_repo.lookup(r) for r in rev]
229 229
230 230 if dest_repo.local():
231 231 dest_repo.clone(src_repo, heads=revs, stream=stream)
232 232 elif src_repo.local():
233 233 src_repo.push(dest_repo, revs=revs)
234 234 else:
235 235 raise util.Abort(_("clone from remote to remote not supported"))
236 236
237 237 if dir_cleanup:
238 238 dir_cleanup.close()
239 239
240 240 if dest_repo.local():
241 241 fp = dest_repo.opener("hgrc", "w", text=True)
242 242 fp.write("[paths]\n")
243 243 fp.write("default = %s\n" % abspath)
244 244 fp.close()
245 245
246 246 if update:
247 247 if not checkout:
248 248 try:
249 249 checkout = dest_repo.lookup("default")
250 250 except:
251 251 checkout = dest_repo.changelog.tip()
252 252 _update(dest_repo, checkout)
253 253
254 254 return src_repo, dest_repo
255 255 finally:
256 256 del src_lock, dest_lock, dir_cleanup
257 257
258 258 def _showstats(repo, stats):
259 259 stats = ((stats[0], _("updated")),
260 260 (stats[1], _("merged")),
261 261 (stats[2], _("removed")),
262 262 (stats[3], _("unresolved")))
263 263 note = ", ".join([_("%d files %s") % s for s in stats])
264 264 repo.ui.status("%s\n" % note)
265 265
266 266 def _update(repo, node): return update(repo, node)
267 267
268 268 def update(repo, node):
269 269 """update the working directory to node, merging linear changes"""
270 270 pl = repo.parents()
271 271 stats = _merge.update(repo, node, False, False, None)
272 272 _showstats(repo, stats)
273 273 if stats[3]:
274 274 repo.ui.status(_("There are unresolved merges with"
275 275 " locally modified files.\n"))
276 276 if stats[1]:
277 277 repo.ui.status(_("You can finish the partial merge using:\n"))
278 278 else:
279 279 repo.ui.status(_("You can redo the full merge using:\n"))
280 280 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
281 281 repo.ui.status(_(" hg update %s\n hg update %s\n")
282 282 % (pl[0].rev(), repo.changectx(node).rev()))
283 return stats[3]
283 return stats[3] > 0
284 284
285 285 def clean(repo, node, show_stats=True):
286 286 """forcibly switch the working directory to node, clobbering changes"""
287 287 stats = _merge.update(repo, node, False, True, None)
288 288 if show_stats: _showstats(repo, stats)
289 return stats[3]
289 return stats[3] > 0
290 290
291 291 def merge(repo, node, force=None, remind=True):
292 292 """branch merge with node, resolving changes"""
293 293 stats = _merge.update(repo, node, True, force, False)
294 294 _showstats(repo, stats)
295 295 if stats[3]:
296 296 pl = repo.parents()
297 297 repo.ui.status(_("There are unresolved merges,"
298 298 " you can redo the full merge using:\n"
299 299 " hg update -C %s\n"
300 300 " hg merge %s\n")
301 301 % (pl[0].rev(), pl[1].rev()))
302 302 elif remind:
303 303 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
304 return stats[3]
304 return stats[3] > 0
305 305
306 306 def revert(repo, node, choose):
307 307 """revert changes to revision in node without updating dirstate"""
308 return _merge.update(repo, node, False, True, choose)[3]
308 return _merge.update(repo, node, False, True, choose)[3] > 0
309 309
310 310 def verify(repo):
311 311 """verify the consistency of a repository"""
312 312 return _verify.verify(repo)
@@ -1,87 +1,90 b''
1 1 # ignore.py - ignored file handling for mercurial
2 2 #
3 3 # Copyright 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from i18n import _
9 import util
9 import util, re
10
11 _commentre = None
10 12
11 13 def _parselines(fp):
12 14 for line in fp:
13 if not line.endswith('\n'):
14 line += '\n'
15 escape = False
16 for i in xrange(len(line)):
17 if escape: escape = False
18 elif line[i] == '\\': escape = True
19 elif line[i] == '#': break
20 line = line[:i].rstrip()
15 if "#" in line:
16 global _commentre
17 if not _commentre:
18 _commentre = re.compile(r'((^|[^\\])(\\\\)*)#.*')
19 # remove comments prefixed by an even number of escapes
20 line = _commentre.sub(r'\1', line)
21 # fixup properly escaped comments that survived the above
22 line = line.replace("\\#", "#")
23 line = line.rstrip()
21 24 if line:
22 25 yield line
23 26
24 27 def ignore(root, files, warn):
25 28 '''return the contents of .hgignore files as a list of patterns.
26 29
27 30 the files parsed for patterns include:
28 31 .hgignore in the repository root
29 32 any additional files specified in the [ui] section of ~/.hgrc
30 33
31 34 trailing white space is dropped.
32 35 the escape character is backslash.
33 36 comments start with #.
34 37 empty lines are skipped.
35 38
36 39 lines can be of the following formats:
37 40
38 41 syntax: regexp # defaults following lines to non-rooted regexps
39 42 syntax: glob # defaults following lines to non-rooted globs
40 43 re:pattern # non-rooted regular expression
41 44 glob:pattern # non-rooted glob
42 45 pattern # pattern of the current default type'''
43 46
44 47 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
45 48 pats = {}
46 49 for f in files:
47 50 try:
48 51 pats[f] = []
49 52 fp = open(f)
50 53 syntax = 'relre:'
51 54 for line in _parselines(fp):
52 55 if line.startswith('syntax:'):
53 56 s = line[7:].strip()
54 57 try:
55 58 syntax = syntaxes[s]
56 59 except KeyError:
57 60 warn(_("%s: ignoring invalid syntax '%s'\n") % (f, s))
58 61 continue
59 62 pat = syntax + line
60 63 for s, rels in syntaxes.items():
61 64 if line.startswith(rels):
62 65 pat = line
63 66 break
64 67 elif line.startswith(s+':'):
65 68 pat = rels + line[len(s)+1:]
66 69 break
67 70 pats[f].append(pat)
68 71 except IOError, inst:
69 72 if f != files[0]:
70 73 warn(_("skipping unreadable ignore file '%s': %s\n") %
71 74 (f, inst.strerror))
72 75
73 76 allpats = []
74 77 [allpats.extend(patlist) for patlist in pats.values()]
75 78 if not allpats:
76 79 return util.never
77 80
78 81 try:
79 82 files, ignorefunc, anypats = (
80 83 util.matcher(root, inc=allpats, src='.hgignore'))
81 84 except util.Abort:
82 85 # Re-raise an exception where the src is the right file
83 86 for f, patlist in pats.items():
84 87 files, ignorefunc, anypats = (
85 88 util.matcher(root, inc=patlist, src=f))
86 89
87 90 return ignorefunc
@@ -1,1996 +1,2001 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context, weakref
12 12 import re, lock, transaction, tempfile, stat, errno, ui
13 13 import os, revlog, time, util, extensions, hook
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = util.set(('lookup', 'changegroupsubset'))
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __init__(self, parentui, path=None, create=0):
20 20 repo.repository.__init__(self)
21 21 self.root = os.path.realpath(path)
22 22 self.path = os.path.join(self.root, ".hg")
23 23 self.origroot = path
24 24 self.opener = util.opener(self.path)
25 25 self.wopener = util.opener(self.root)
26 26
27 27 if not os.path.isdir(self.path):
28 28 if create:
29 29 if not os.path.exists(path):
30 30 os.mkdir(path)
31 31 os.mkdir(self.path)
32 32 requirements = ["revlogv1"]
33 33 if parentui.configbool('format', 'usestore', True):
34 34 os.mkdir(os.path.join(self.path, "store"))
35 35 requirements.append("store")
36 36 # create an invalid changelog
37 37 self.opener("00changelog.i", "a").write(
38 38 '\0\0\0\2' # represents revlogv2
39 39 ' dummy changelog to prevent using the old repo layout'
40 40 )
41 41 reqfile = self.opener("requires", "w")
42 42 for r in requirements:
43 43 reqfile.write("%s\n" % r)
44 44 reqfile.close()
45 45 else:
46 46 raise repo.RepoError(_("repository %s not found") % path)
47 47 elif create:
48 48 raise repo.RepoError(_("repository %s already exists") % path)
49 49 else:
50 50 # find requirements
51 51 try:
52 52 requirements = self.opener("requires").read().splitlines()
53 53 except IOError, inst:
54 54 if inst.errno != errno.ENOENT:
55 55 raise
56 56 requirements = []
57 57 # check them
58 58 for r in requirements:
59 59 if r not in self.supported:
60 60 raise repo.RepoError(_("requirement '%s' not supported") % r)
61 61
62 62 # setup store
63 63 if "store" in requirements:
64 64 self.encodefn = util.encodefilename
65 65 self.decodefn = util.decodefilename
66 66 self.spath = os.path.join(self.path, "store")
67 67 else:
68 68 self.encodefn = lambda x: x
69 69 self.decodefn = lambda x: x
70 70 self.spath = self.path
71 71 self.sopener = util.encodedopener(util.opener(self.spath),
72 72 self.encodefn)
73 73
74 74 self.ui = ui.ui(parentui=parentui)
75 75 try:
76 76 self.ui.readconfig(self.join("hgrc"), self.root)
77 77 extensions.loadall(self.ui)
78 78 except IOError:
79 79 pass
80 80
81 81 self.tagscache = None
82 82 self.branchcache = None
83 83 self.nodetagscache = None
84 84 self.filterpats = {}
85 85 self._transref = self._lockref = self._wlockref = None
86 86
87 87 def __getattr__(self, name):
88 88 if name == 'changelog':
89 89 self.changelog = changelog.changelog(self.sopener)
90 90 self.sopener.defversion = self.changelog.version
91 91 return self.changelog
92 92 if name == 'manifest':
93 93 self.changelog
94 94 self.manifest = manifest.manifest(self.sopener)
95 95 return self.manifest
96 96 if name == 'dirstate':
97 97 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
98 98 return self.dirstate
99 99 else:
100 100 raise AttributeError, name
101 101
102 102 def url(self):
103 103 return 'file:' + self.root
104 104
105 105 def hook(self, name, throw=False, **args):
106 106 return hook.hook(self.ui, self, name, throw, **args)
107 107
108 108 tag_disallowed = ':\r\n'
109 109
110 110 def _tag(self, name, node, message, local, user, date, parent=None,
111 111 extra={}):
112 112 use_dirstate = parent is None
113 113
114 114 for c in self.tag_disallowed:
115 115 if c in name:
116 116 raise util.Abort(_('%r cannot be used in a tag name') % c)
117 117
118 118 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
119 119
120 120 def writetag(fp, name, munge, prevtags):
121 121 if prevtags and prevtags[-1] != '\n':
122 122 fp.write('\n')
123 123 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
124 124 fp.close()
125 125 self.hook('tag', node=hex(node), tag=name, local=local)
126 126
127 127 prevtags = ''
128 128 if local:
129 129 try:
130 130 fp = self.opener('localtags', 'r+')
131 131 except IOError, err:
132 132 fp = self.opener('localtags', 'a')
133 133 else:
134 134 prevtags = fp.read()
135 135
136 136 # local tags are stored in the current charset
137 137 writetag(fp, name, None, prevtags)
138 138 return
139 139
140 140 if use_dirstate:
141 141 try:
142 142 fp = self.wfile('.hgtags', 'rb+')
143 143 except IOError, err:
144 144 fp = self.wfile('.hgtags', 'ab')
145 145 else:
146 146 prevtags = fp.read()
147 147 else:
148 148 try:
149 149 prevtags = self.filectx('.hgtags', parent).data()
150 150 except revlog.LookupError:
151 151 pass
152 152 fp = self.wfile('.hgtags', 'wb')
153 153 if prevtags:
154 154 fp.write(prevtags)
155 155
156 156 # committed tags are stored in UTF-8
157 157 writetag(fp, name, util.fromlocal, prevtags)
158 158
159 159 if use_dirstate and '.hgtags' not in self.dirstate:
160 160 self.add(['.hgtags'])
161 161
162 162 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
163 163 extra=extra)
164 164
165 165 self.hook('tag', node=hex(node), tag=name, local=local)
166 166
167 167 return tagnode
168 168
169 169 def tag(self, name, node, message, local, user, date):
170 170 '''tag a revision with a symbolic name.
171 171
172 172 if local is True, the tag is stored in a per-repository file.
173 173 otherwise, it is stored in the .hgtags file, and a new
174 174 changeset is committed with the change.
175 175
176 176 keyword arguments:
177 177
178 178 local: whether to store tag in non-version-controlled file
179 179 (default False)
180 180
181 181 message: commit message to use if committing
182 182
183 183 user: name of user to use if committing
184 184
185 185 date: date tuple to use if committing'''
186 186
187 187 for x in self.status()[:5]:
188 188 if '.hgtags' in x:
189 189 raise util.Abort(_('working copy of .hgtags is changed '
190 190 '(please commit .hgtags manually)'))
191 191
192 192
193 193 self._tag(name, node, message, local, user, date)
194 194
195 195 def tags(self):
196 196 '''return a mapping of tag to node'''
197 197 if self.tagscache:
198 198 return self.tagscache
199 199
200 200 globaltags = {}
201 201
202 202 def readtags(lines, fn):
203 203 filetags = {}
204 204 count = 0
205 205
206 206 def warn(msg):
207 207 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
208 208
209 209 for l in lines:
210 210 count += 1
211 211 if not l:
212 212 continue
213 213 s = l.split(" ", 1)
214 214 if len(s) != 2:
215 215 warn(_("cannot parse entry"))
216 216 continue
217 217 node, key = s
218 218 key = util.tolocal(key.strip()) # stored in UTF-8
219 219 try:
220 220 bin_n = bin(node)
221 221 except TypeError:
222 222 warn(_("node '%s' is not well formed") % node)
223 223 continue
224 224 if bin_n not in self.changelog.nodemap:
225 225 warn(_("tag '%s' refers to unknown node") % key)
226 226 continue
227 227
228 228 h = []
229 229 if key in filetags:
230 230 n, h = filetags[key]
231 231 h.append(n)
232 232 filetags[key] = (bin_n, h)
233 233
234 234 for k, nh in filetags.items():
235 235 if k not in globaltags:
236 236 globaltags[k] = nh
237 237 continue
238 238 # we prefer the global tag if:
239 239 # it supercedes us OR
240 240 # mutual supercedes and it has a higher rank
241 241 # otherwise we win because we're tip-most
242 242 an, ah = nh
243 243 bn, bh = globaltags[k]
244 244 if (bn != an and an in bh and
245 245 (bn not in ah or len(bh) > len(ah))):
246 246 an = bn
247 247 ah.extend([n for n in bh if n not in ah])
248 248 globaltags[k] = an, ah
249 249
250 250 # read the tags file from each head, ending with the tip
251 251 f = None
252 252 for rev, node, fnode in self._hgtagsnodes():
253 253 f = (f and f.filectx(fnode) or
254 254 self.filectx('.hgtags', fileid=fnode))
255 255 readtags(f.data().splitlines(), f)
256 256
257 257 try:
258 258 data = util.fromlocal(self.opener("localtags").read())
259 259 # localtags are stored in the local character set
260 260 # while the internal tag table is stored in UTF-8
261 261 readtags(data.splitlines(), "localtags")
262 262 except IOError:
263 263 pass
264 264
265 265 self.tagscache = {}
266 266 for k,nh in globaltags.items():
267 267 n = nh[0]
268 268 if n != nullid:
269 269 self.tagscache[k] = n
270 270 self.tagscache['tip'] = self.changelog.tip()
271 271
272 272 return self.tagscache
273 273
274 274 def _hgtagsnodes(self):
275 275 heads = self.heads()
276 276 heads.reverse()
277 277 last = {}
278 278 ret = []
279 279 for node in heads:
280 280 c = self.changectx(node)
281 281 rev = c.rev()
282 282 try:
283 283 fnode = c.filenode('.hgtags')
284 284 except revlog.LookupError:
285 285 continue
286 286 ret.append((rev, node, fnode))
287 287 if fnode in last:
288 288 ret[last[fnode]] = None
289 289 last[fnode] = len(ret) - 1
290 290 return [item for item in ret if item]
291 291
292 292 def tagslist(self):
293 293 '''return a list of tags ordered by revision'''
294 294 l = []
295 295 for t, n in self.tags().items():
296 296 try:
297 297 r = self.changelog.rev(n)
298 298 except:
299 299 r = -2 # sort to the beginning of the list if unknown
300 300 l.append((r, t, n))
301 301 l.sort()
302 302 return [(t, n) for r, t, n in l]
303 303
304 304 def nodetags(self, node):
305 305 '''return the tags associated with a node'''
306 306 if not self.nodetagscache:
307 307 self.nodetagscache = {}
308 308 for t, n in self.tags().items():
309 309 self.nodetagscache.setdefault(n, []).append(t)
310 310 return self.nodetagscache.get(node, [])
311 311
312 312 def _branchtags(self):
313 313 partial, last, lrev = self._readbranchcache()
314 314
315 315 tiprev = self.changelog.count() - 1
316 316 if lrev != tiprev:
317 317 self._updatebranchcache(partial, lrev+1, tiprev+1)
318 318 self._writebranchcache(partial, self.changelog.tip(), tiprev)
319 319
320 320 return partial
321 321
322 322 def branchtags(self):
323 323 if self.branchcache is not None:
324 324 return self.branchcache
325 325
326 326 self.branchcache = {} # avoid recursion in changectx
327 327 partial = self._branchtags()
328 328
329 329 # the branch cache is stored on disk as UTF-8, but in the local
330 330 # charset internally
331 331 for k, v in partial.items():
332 332 self.branchcache[util.tolocal(k)] = v
333 333 return self.branchcache
334 334
335 335 def _readbranchcache(self):
336 336 partial = {}
337 337 try:
338 338 f = self.opener("branch.cache")
339 339 lines = f.read().split('\n')
340 340 f.close()
341 341 except (IOError, OSError):
342 342 return {}, nullid, nullrev
343 343
344 344 try:
345 345 last, lrev = lines.pop(0).split(" ", 1)
346 346 last, lrev = bin(last), int(lrev)
347 347 if not (lrev < self.changelog.count() and
348 348 self.changelog.node(lrev) == last): # sanity check
349 349 # invalidate the cache
350 350 raise ValueError('Invalid branch cache: unknown tip')
351 351 for l in lines:
352 352 if not l: continue
353 353 node, label = l.split(" ", 1)
354 354 partial[label.strip()] = bin(node)
355 355 except (KeyboardInterrupt, util.SignalInterrupt):
356 356 raise
357 357 except Exception, inst:
358 358 if self.ui.debugflag:
359 359 self.ui.warn(str(inst), '\n')
360 360 partial, last, lrev = {}, nullid, nullrev
361 361 return partial, last, lrev
362 362
363 363 def _writebranchcache(self, branches, tip, tiprev):
364 364 try:
365 365 f = self.opener("branch.cache", "w", atomictemp=True)
366 366 f.write("%s %s\n" % (hex(tip), tiprev))
367 367 for label, node in branches.iteritems():
368 368 f.write("%s %s\n" % (hex(node), label))
369 369 f.rename()
370 370 except (IOError, OSError):
371 371 pass
372 372
373 373 def _updatebranchcache(self, partial, start, end):
374 374 for r in xrange(start, end):
375 375 c = self.changectx(r)
376 376 b = c.branch()
377 377 partial[b] = c.node()
378 378
379 379 def lookup(self, key):
380 380 if key == '.':
381 381 key, second = self.dirstate.parents()
382 382 if key == nullid:
383 383 raise repo.RepoError(_("no revision checked out"))
384 384 if second != nullid:
385 385 self.ui.warn(_("warning: working directory has two parents, "
386 386 "tag '.' uses the first\n"))
387 387 elif key == 'null':
388 388 return nullid
389 389 n = self.changelog._match(key)
390 390 if n:
391 391 return n
392 392 if key in self.tags():
393 393 return self.tags()[key]
394 394 if key in self.branchtags():
395 395 return self.branchtags()[key]
396 396 n = self.changelog._partialmatch(key)
397 397 if n:
398 398 return n
399 399 try:
400 400 if len(key) == 20:
401 401 key = hex(key)
402 402 except:
403 403 pass
404 404 raise repo.RepoError(_("unknown revision '%s'") % key)
405 405
406 406 def dev(self):
407 407 return os.lstat(self.path).st_dev
408 408
409 409 def local(self):
410 410 return True
411 411
412 412 def join(self, f):
413 413 return os.path.join(self.path, f)
414 414
415 415 def sjoin(self, f):
416 416 f = self.encodefn(f)
417 417 return os.path.join(self.spath, f)
418 418
419 419 def wjoin(self, f):
420 420 return os.path.join(self.root, f)
421 421
422 422 def file(self, f):
423 423 if f[0] == '/':
424 424 f = f[1:]
425 425 return filelog.filelog(self.sopener, f)
426 426
427 427 def changectx(self, changeid=None):
428 428 return context.changectx(self, changeid)
429 429
430 430 def workingctx(self):
431 431 return context.workingctx(self)
432 432
433 433 def parents(self, changeid=None):
434 434 '''
435 435 get list of changectxs for parents of changeid or working directory
436 436 '''
437 437 if changeid is None:
438 438 pl = self.dirstate.parents()
439 439 else:
440 440 n = self.changelog.lookup(changeid)
441 441 pl = self.changelog.parents(n)
442 442 if pl[1] == nullid:
443 443 return [self.changectx(pl[0])]
444 444 return [self.changectx(pl[0]), self.changectx(pl[1])]
445 445
446 446 def filectx(self, path, changeid=None, fileid=None):
447 447 """changeid can be a changeset revision, node, or tag.
448 448 fileid can be a file revision or node."""
449 449 return context.filectx(self, path, changeid, fileid)
450 450
451 451 def getcwd(self):
452 452 return self.dirstate.getcwd()
453 453
454 454 def pathto(self, f, cwd=None):
455 455 return self.dirstate.pathto(f, cwd)
456 456
457 457 def wfile(self, f, mode='r'):
458 458 return self.wopener(f, mode)
459 459
460 460 def _link(self, f):
461 461 return os.path.islink(self.wjoin(f))
462 462
463 463 def _filter(self, filter, filename, data):
464 464 if filter not in self.filterpats:
465 465 l = []
466 466 for pat, cmd in self.ui.configitems(filter):
467 467 mf = util.matcher(self.root, "", [pat], [], [])[1]
468 468 l.append((mf, cmd))
469 469 self.filterpats[filter] = l
470 470
471 471 for mf, cmd in self.filterpats[filter]:
472 472 if mf(filename):
473 473 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
474 474 data = util.filter(data, cmd)
475 475 break
476 476
477 477 return data
478 478
479 479 def wread(self, filename):
480 480 if self._link(filename):
481 481 data = os.readlink(self.wjoin(filename))
482 482 else:
483 483 data = self.wopener(filename, 'r').read()
484 484 return self._filter("encode", filename, data)
485 485
486 486 def wwrite(self, filename, data, flags):
487 487 data = self._filter("decode", filename, data)
488 488 if "l" in flags:
489 489 self.wopener.symlink(data, filename)
490 490 else:
491 491 try:
492 492 if self._link(filename):
493 493 os.unlink(self.wjoin(filename))
494 494 except OSError:
495 495 pass
496 496 self.wopener(filename, 'w').write(data)
497 497 util.set_exec(self.wjoin(filename), "x" in flags)
498 498
499 499 def wwritedata(self, filename, data):
500 500 return self._filter("decode", filename, data)
501 501
502 502 def transaction(self):
503 503 if self._transref and self._transref():
504 504 return self._transref().nest()
505 505
506 506 # save dirstate for rollback
507 507 try:
508 508 ds = self.opener("dirstate").read()
509 509 except IOError:
510 510 ds = ""
511 511 self.opener("journal.dirstate", "w").write(ds)
512 512
513 513 renames = [(self.sjoin("journal"), self.sjoin("undo")),
514 514 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
515 515 tr = transaction.transaction(self.ui.warn, self.sopener,
516 516 self.sjoin("journal"),
517 517 aftertrans(renames))
518 518 self._transref = weakref.ref(tr)
519 519 return tr
520 520
521 521 def recover(self):
522 522 l = self.lock()
523 523 try:
524 524 if os.path.exists(self.sjoin("journal")):
525 525 self.ui.status(_("rolling back interrupted transaction\n"))
526 526 transaction.rollback(self.sopener, self.sjoin("journal"))
527 527 self.invalidate()
528 528 return True
529 529 else:
530 530 self.ui.warn(_("no interrupted transaction available\n"))
531 531 return False
532 532 finally:
533 533 del l
534 534
535 535 def rollback(self):
536 536 wlock = lock = None
537 537 try:
538 538 wlock = self.wlock()
539 539 lock = self.lock()
540 540 if os.path.exists(self.sjoin("undo")):
541 541 self.ui.status(_("rolling back last transaction\n"))
542 542 transaction.rollback(self.sopener, self.sjoin("undo"))
543 543 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
544 544 self.invalidate()
545 545 self.dirstate.invalidate()
546 546 else:
547 547 self.ui.warn(_("no rollback information available\n"))
548 548 finally:
549 549 del lock, wlock
550 550
551 551 def invalidate(self):
552 552 for a in "changelog manifest".split():
553 553 if hasattr(self, a):
554 554 self.__delattr__(a)
555 555 self.tagscache = None
556 556 self.nodetagscache = None
557 557
558 558 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
559 559 try:
560 560 l = lock.lock(lockname, 0, releasefn, desc=desc)
561 561 except lock.LockHeld, inst:
562 562 if not wait:
563 563 raise
564 564 self.ui.warn(_("waiting for lock on %s held by %r\n") %
565 565 (desc, inst.locker))
566 566 # default to 600 seconds timeout
567 567 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
568 568 releasefn, desc=desc)
569 569 if acquirefn:
570 570 acquirefn()
571 571 return l
572 572
573 573 def lock(self, wait=True):
574 574 if self._lockref and self._lockref():
575 575 return self._lockref()
576 576
577 577 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
578 578 _('repository %s') % self.origroot)
579 579 self._lockref = weakref.ref(l)
580 580 return l
581 581
582 582 def wlock(self, wait=True):
583 583 if self._wlockref and self._wlockref():
584 584 return self._wlockref()
585 585
586 586 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
587 587 self.dirstate.invalidate, _('working directory of %s') %
588 588 self.origroot)
589 589 self._wlockref = weakref.ref(l)
590 590 return l
591 591
592 592 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
593 593 """
594 594 commit an individual file as part of a larger transaction
595 595 """
596 596
597 597 t = self.wread(fn)
598 598 fl = self.file(fn)
599 599 fp1 = manifest1.get(fn, nullid)
600 600 fp2 = manifest2.get(fn, nullid)
601 601
602 602 meta = {}
603 603 cp = self.dirstate.copied(fn)
604 604 if cp:
605 605 # Mark the new revision of this file as a copy of another
606 606 # file. This copy data will effectively act as a parent
607 607 # of this new revision. If this is a merge, the first
608 608 # parent will be the nullid (meaning "look up the copy data")
609 609 # and the second one will be the other parent. For example:
610 610 #
611 611 # 0 --- 1 --- 3 rev1 changes file foo
612 612 # \ / rev2 renames foo to bar and changes it
613 613 # \- 2 -/ rev3 should have bar with all changes and
614 614 # should record that bar descends from
615 615 # bar in rev2 and foo in rev1
616 616 #
617 617 # this allows this merge to succeed:
618 618 #
619 619 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
620 620 # \ / merging rev3 and rev4 should use bar@rev2
621 621 # \- 2 --- 4 as the merge base
622 622 #
623 623 meta["copy"] = cp
624 624 if not manifest2: # not a branch merge
625 625 meta["copyrev"] = hex(manifest1.get(cp, nullid))
626 626 fp2 = nullid
627 627 elif fp2 != nullid: # copied on remote side
628 628 meta["copyrev"] = hex(manifest1.get(cp, nullid))
629 629 elif fp1 != nullid: # copied on local side, reversed
630 630 meta["copyrev"] = hex(manifest2.get(cp))
631 631 fp2 = fp1
632 632 elif cp in manifest2: # directory rename on local side
633 633 meta["copyrev"] = hex(manifest2[cp])
634 634 else: # directory rename on remote side
635 635 meta["copyrev"] = hex(manifest1.get(cp, nullid))
636 636 self.ui.debug(_(" %s: copy %s:%s\n") %
637 637 (fn, cp, meta["copyrev"]))
638 638 fp1 = nullid
639 639 elif fp2 != nullid:
640 640 # is one parent an ancestor of the other?
641 641 fpa = fl.ancestor(fp1, fp2)
642 642 if fpa == fp1:
643 643 fp1, fp2 = fp2, nullid
644 644 elif fpa == fp2:
645 645 fp2 = nullid
646 646
647 647 # is the file unmodified from the parent? report existing entry
648 648 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
649 649 return fp1
650 650
651 651 changelist.append(fn)
652 652 return fl.add(t, meta, tr, linkrev, fp1, fp2)
653 653
654 654 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
655 655 if p1 is None:
656 656 p1, p2 = self.dirstate.parents()
657 657 return self.commit(files=files, text=text, user=user, date=date,
658 658 p1=p1, p2=p2, extra=extra, empty_ok=True)
659 659
660 660 def commit(self, files=None, text="", user=None, date=None,
661 661 match=util.always, force=False, force_editor=False,
662 662 p1=None, p2=None, extra={}, empty_ok=False):
663 663 wlock = lock = tr = None
664 valid = 0 # don't save the dirstate if this isn't set
664 665 try:
665 666 commit = []
666 667 remove = []
667 668 changed = []
668 669 use_dirstate = (p1 is None) # not rawcommit
669 670 extra = extra.copy()
670 671
671 672 if use_dirstate:
672 673 if files:
673 674 for f in files:
674 675 s = self.dirstate[f]
675 676 if s in 'nma':
676 677 commit.append(f)
677 678 elif s == 'r':
678 679 remove.append(f)
679 680 else:
680 681 self.ui.warn(_("%s not tracked!\n") % f)
681 682 else:
682 683 changes = self.status(match=match)[:5]
683 684 modified, added, removed, deleted, unknown = changes
684 685 commit = modified + added
685 686 remove = removed
686 687 else:
687 688 commit = files
688 689
689 690 if use_dirstate:
690 691 p1, p2 = self.dirstate.parents()
691 692 update_dirstate = True
692 693 else:
693 694 p1, p2 = p1, p2 or nullid
694 695 update_dirstate = (self.dirstate.parents()[0] == p1)
695 696
696 697 c1 = self.changelog.read(p1)
697 698 c2 = self.changelog.read(p2)
698 699 m1 = self.manifest.read(c1[0]).copy()
699 700 m2 = self.manifest.read(c2[0])
700 701
701 702 if use_dirstate:
702 703 branchname = self.workingctx().branch()
703 704 try:
704 705 branchname = branchname.decode('UTF-8').encode('UTF-8')
705 706 except UnicodeDecodeError:
706 707 raise util.Abort(_('branch name not in UTF-8!'))
707 708 else:
708 709 branchname = ""
709 710
710 711 if use_dirstate:
711 712 oldname = c1[5].get("branch") # stored in UTF-8
712 713 if (not commit and not remove and not force and p2 == nullid
713 714 and branchname == oldname):
714 715 self.ui.status(_("nothing changed\n"))
715 716 return None
716 717
717 718 xp1 = hex(p1)
718 719 if p2 == nullid: xp2 = ''
719 720 else: xp2 = hex(p2)
720 721
721 722 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
722 723
723 724 wlock = self.wlock()
724 725 lock = self.lock()
725 726 tr = self.transaction()
726 727 trp = weakref.proxy(tr)
727 728
728 729 # check in files
729 730 new = {}
730 731 linkrev = self.changelog.count()
731 732 commit.sort()
732 733 is_exec = util.execfunc(self.root, m1.execf)
733 734 is_link = util.linkfunc(self.root, m1.linkf)
734 735 for f in commit:
735 736 self.ui.note(f + "\n")
736 737 try:
737 738 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
738 739 new_exec = is_exec(f)
739 740 new_link = is_link(f)
740 741 if ((not changed or changed[-1] != f) and
741 742 m2.get(f) != new[f]):
742 743 # mention the file in the changelog if some
743 744 # flag changed, even if there was no content
744 745 # change.
745 746 old_exec = m1.execf(f)
746 747 old_link = m1.linkf(f)
747 748 if old_exec != new_exec or old_link != new_link:
748 749 changed.append(f)
749 750 m1.set(f, new_exec, new_link)
751 if use_dirstate:
752 self.dirstate.normal(f)
753
750 754 except (OSError, IOError):
751 755 if use_dirstate:
752 756 self.ui.warn(_("trouble committing %s!\n") % f)
753 757 raise
754 758 else:
755 759 remove.append(f)
756 760
757 761 # update manifest
758 762 m1.update(new)
759 763 remove.sort()
760 764 removed = []
761 765
762 766 for f in remove:
763 767 if f in m1:
764 768 del m1[f]
765 769 removed.append(f)
766 770 elif f in m2:
767 771 removed.append(f)
768 772 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
769 773 (new, removed))
770 774
771 775 # add changeset
772 776 new = new.keys()
773 777 new.sort()
774 778
775 779 user = user or self.ui.username()
776 780 if (not empty_ok and not text) or force_editor:
777 781 edittext = []
778 782 if text:
779 783 edittext.append(text)
780 784 edittext.append("")
781 785 edittext.append("HG: user: %s" % user)
782 786 if p2 != nullid:
783 787 edittext.append("HG: branch merge")
784 788 if branchname:
785 789 edittext.append("HG: branch %s" % util.tolocal(branchname))
786 790 edittext.extend(["HG: changed %s" % f for f in changed])
787 791 edittext.extend(["HG: removed %s" % f for f in removed])
788 792 if not changed and not remove:
789 793 edittext.append("HG: no files changed")
790 794 edittext.append("")
791 795 # run editor in the repository root
792 796 olddir = os.getcwd()
793 797 os.chdir(self.root)
794 798 text = self.ui.edit("\n".join(edittext), user)
795 799 os.chdir(olddir)
796 800
797 801 if branchname:
798 802 extra["branch"] = branchname
799 803
800 804 if use_dirstate:
801 805 lines = [line.rstrip() for line in text.rstrip().splitlines()]
802 806 while lines and not lines[0]:
803 807 del lines[0]
804 808 if not lines:
805 809 return None
806 810 text = '\n'.join(lines)
807 811
808 812 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
809 813 user, date, extra)
810 814 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
811 815 parent2=xp2)
812 816 tr.close()
813 817
814 818 if self.branchcache and "branch" in extra:
815 819 self.branchcache[util.tolocal(extra["branch"])] = n
816 820
817 821 if use_dirstate or update_dirstate:
818 822 self.dirstate.setparents(n)
819 823 if use_dirstate:
820 for f in new:
821 self.dirstate.normal(f)
822 824 for f in removed:
823 825 self.dirstate.forget(f)
826 valid = 1 # our dirstate updates are complete
824 827
825 828 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
826 829 return n
827 830 finally:
831 if not valid: # don't save our updated dirstate
832 self.dirstate.invalidate()
828 833 del tr, lock, wlock
829 834
830 835 def walk(self, node=None, files=[], match=util.always, badmatch=None):
831 836 '''
832 837 walk recursively through the directory tree or a given
833 838 changeset, finding all files matched by the match
834 839 function
835 840
836 841 results are yielded in a tuple (src, filename), where src
837 842 is one of:
838 843 'f' the file was found in the directory tree
839 844 'm' the file was only in the dirstate and not in the tree
840 845 'b' file was not found and matched badmatch
841 846 '''
842 847
843 848 if node:
844 849 fdict = dict.fromkeys(files)
845 850 # for dirstate.walk, files=['.'] means "walk the whole tree".
846 851 # follow that here, too
847 852 fdict.pop('.', None)
848 853 mdict = self.manifest.read(self.changelog.read(node)[0])
849 854 mfiles = mdict.keys()
850 855 mfiles.sort()
851 856 for fn in mfiles:
852 857 for ffn in fdict:
853 858 # match if the file is the exact name or a directory
854 859 if ffn == fn or fn.startswith("%s/" % ffn):
855 860 del fdict[ffn]
856 861 break
857 862 if match(fn):
858 863 yield 'm', fn
859 864 ffiles = fdict.keys()
860 865 ffiles.sort()
861 866 for fn in ffiles:
862 867 if badmatch and badmatch(fn):
863 868 if match(fn):
864 869 yield 'b', fn
865 870 else:
866 871 self.ui.warn(_('%s: No such file in rev %s\n')
867 872 % (self.pathto(fn), short(node)))
868 873 else:
869 874 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
870 875 yield src, fn
871 876
872 877 def status(self, node1=None, node2=None, files=[], match=util.always,
873 878 list_ignored=False, list_clean=False):
874 879 """return status of files between two nodes or node and working directory
875 880
876 881 If node1 is None, use the first dirstate parent instead.
877 882 If node2 is None, compare node1 with working directory.
878 883 """
879 884
880 885 def fcmp(fn, getnode):
881 886 t1 = self.wread(fn)
882 887 return self.file(fn).cmp(getnode(fn), t1)
883 888
884 889 def mfmatches(node):
885 890 change = self.changelog.read(node)
886 891 mf = self.manifest.read(change[0]).copy()
887 892 for fn in mf.keys():
888 893 if not match(fn):
889 894 del mf[fn]
890 895 return mf
891 896
892 897 modified, added, removed, deleted, unknown = [], [], [], [], []
893 898 ignored, clean = [], []
894 899
895 900 compareworking = False
896 901 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
897 902 compareworking = True
898 903
899 904 if not compareworking:
900 905 # read the manifest from node1 before the manifest from node2,
901 906 # so that we'll hit the manifest cache if we're going through
902 907 # all the revisions in parent->child order.
903 908 mf1 = mfmatches(node1)
904 909
905 910 # are we comparing the working directory?
906 911 if not node2:
907 912 (lookup, modified, added, removed, deleted, unknown,
908 913 ignored, clean) = self.dirstate.status(files, match,
909 914 list_ignored, list_clean)
910 915
911 916 # are we comparing working dir against its parent?
912 917 if compareworking:
913 918 if lookup:
914 919 fixup = []
915 920 # do a full compare of any files that might have changed
916 921 ctx = self.changectx()
917 922 for f in lookup:
918 923 if f not in ctx or ctx[f].cmp(self.wread(f)):
919 924 modified.append(f)
920 925 else:
921 926 fixup.append(f)
922 927 if list_clean:
923 928 clean.append(f)
924 929
925 930 # update dirstate for files that are actually clean
926 931 if fixup:
927 932 wlock = None
928 933 try:
929 934 try:
930 935 wlock = self.wlock(False)
931 936 except lock.LockException:
932 937 pass
933 938 if wlock:
934 939 for f in fixup:
935 940 self.dirstate.normal(f)
936 941 finally:
937 942 del wlock
938 943 else:
939 944 # we are comparing working dir against non-parent
940 945 # generate a pseudo-manifest for the working dir
941 946 # XXX: create it in dirstate.py ?
942 947 mf2 = mfmatches(self.dirstate.parents()[0])
943 948 is_exec = util.execfunc(self.root, mf2.execf)
944 949 is_link = util.linkfunc(self.root, mf2.linkf)
945 950 for f in lookup + modified + added:
946 951 mf2[f] = ""
947 952 mf2.set(f, is_exec(f), is_link(f))
948 953 for f in removed:
949 954 if f in mf2:
950 955 del mf2[f]
951 956
952 957 else:
953 958 # we are comparing two revisions
954 959 mf2 = mfmatches(node2)
955 960
956 961 if not compareworking:
957 962 # flush lists from dirstate before comparing manifests
958 963 modified, added, clean = [], [], []
959 964
960 965 # make sure to sort the files so we talk to the disk in a
961 966 # reasonable order
962 967 mf2keys = mf2.keys()
963 968 mf2keys.sort()
964 969 getnode = lambda fn: mf1.get(fn, nullid)
965 970 for fn in mf2keys:
966 971 if mf1.has_key(fn):
967 972 if (mf1.flags(fn) != mf2.flags(fn) or
968 973 (mf1[fn] != mf2[fn] and
969 974 (mf2[fn] != "" or fcmp(fn, getnode)))):
970 975 modified.append(fn)
971 976 elif list_clean:
972 977 clean.append(fn)
973 978 del mf1[fn]
974 979 else:
975 980 added.append(fn)
976 981
977 982 removed = mf1.keys()
978 983
979 984 # sort and return results:
980 985 for l in modified, added, removed, deleted, unknown, ignored, clean:
981 986 l.sort()
982 987 return (modified, added, removed, deleted, unknown, ignored, clean)
983 988
984 989 def add(self, list):
985 990 wlock = self.wlock()
986 991 try:
987 992 for f in list:
988 993 p = self.wjoin(f)
989 994 try:
990 995 st = os.lstat(p)
991 996 except:
992 997 self.ui.warn(_("%s does not exist!\n") % f)
993 998 continue
994 999 if st.st_size > 10000000:
995 1000 self.ui.warn(_("%s: files over 10MB may cause memory and"
996 1001 " performance problems\n"
997 1002 "(use 'hg revert %s' to unadd the file)\n")
998 1003 % (f, f))
999 1004 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1000 1005 self.ui.warn(_("%s not added: only files and symlinks "
1001 1006 "supported currently\n") % f)
1002 1007 elif self.dirstate[f] in 'amn':
1003 1008 self.ui.warn(_("%s already tracked!\n") % f)
1004 1009 elif self.dirstate[f] == 'r':
1005 1010 self.dirstate.normallookup(f)
1006 1011 else:
1007 1012 self.dirstate.add(f)
1008 1013 finally:
1009 1014 del wlock
1010 1015
1011 1016 def forget(self, list):
1012 1017 wlock = self.wlock()
1013 1018 try:
1014 1019 for f in list:
1015 1020 if self.dirstate[f] != 'a':
1016 1021 self.ui.warn(_("%s not added!\n") % f)
1017 1022 else:
1018 1023 self.dirstate.forget(f)
1019 1024 finally:
1020 1025 del wlock
1021 1026
1022 1027 def remove(self, list, unlink=False):
1023 1028 wlock = None
1024 1029 try:
1025 1030 if unlink:
1026 1031 for f in list:
1027 1032 try:
1028 1033 util.unlink(self.wjoin(f))
1029 1034 except OSError, inst:
1030 1035 if inst.errno != errno.ENOENT:
1031 1036 raise
1032 1037 wlock = self.wlock()
1033 1038 for f in list:
1034 1039 if unlink and os.path.exists(self.wjoin(f)):
1035 1040 self.ui.warn(_("%s still exists!\n") % f)
1036 1041 elif self.dirstate[f] == 'a':
1037 1042 self.dirstate.forget(f)
1038 1043 elif f not in self.dirstate:
1039 1044 self.ui.warn(_("%s not tracked!\n") % f)
1040 1045 else:
1041 1046 self.dirstate.remove(f)
1042 1047 finally:
1043 1048 del wlock
1044 1049
1045 1050 def undelete(self, list):
1046 1051 wlock = None
1047 1052 try:
1048 1053 manifests = [self.manifest.read(self.changelog.read(p)[0])
1049 1054 for p in self.dirstate.parents() if p != nullid]
1050 1055 wlock = self.wlock()
1051 1056 for f in list:
1052 1057 if self.dirstate[f] != 'r':
1053 1058 self.ui.warn("%s not removed!\n" % f)
1054 1059 else:
1055 1060 m = f in manifests[0] and manifests[0] or manifests[1]
1056 1061 t = self.file(f).read(m[f])
1057 1062 self.wwrite(f, t, m.flags(f))
1058 1063 self.dirstate.normal(f)
1059 1064 finally:
1060 1065 del wlock
1061 1066
1062 1067 def copy(self, source, dest):
1063 1068 wlock = None
1064 1069 try:
1065 1070 p = self.wjoin(dest)
1066 1071 if not (os.path.exists(p) or os.path.islink(p)):
1067 1072 self.ui.warn(_("%s does not exist!\n") % dest)
1068 1073 elif not (os.path.isfile(p) or os.path.islink(p)):
1069 1074 self.ui.warn(_("copy failed: %s is not a file or a "
1070 1075 "symbolic link\n") % dest)
1071 1076 else:
1072 1077 wlock = self.wlock()
1073 1078 if dest not in self.dirstate:
1074 1079 self.dirstate.add(dest)
1075 1080 self.dirstate.copy(source, dest)
1076 1081 finally:
1077 1082 del wlock
1078 1083
1079 1084 def heads(self, start=None):
1080 1085 heads = self.changelog.heads(start)
1081 1086 # sort the output in rev descending order
1082 1087 heads = [(-self.changelog.rev(h), h) for h in heads]
1083 1088 heads.sort()
1084 1089 return [n for (r, n) in heads]
1085 1090
1086 1091 def branchheads(self, branch, start=None):
1087 1092 branches = self.branchtags()
1088 1093 if branch not in branches:
1089 1094 return []
1090 1095 # The basic algorithm is this:
1091 1096 #
1092 1097 # Start from the branch tip since there are no later revisions that can
1093 1098 # possibly be in this branch, and the tip is a guaranteed head.
1094 1099 #
1095 1100 # Remember the tip's parents as the first ancestors, since these by
1096 1101 # definition are not heads.
1097 1102 #
1098 1103 # Step backwards from the brach tip through all the revisions. We are
1099 1104 # guaranteed by the rules of Mercurial that we will now be visiting the
1100 1105 # nodes in reverse topological order (children before parents).
1101 1106 #
1102 1107 # If a revision is one of the ancestors of a head then we can toss it
1103 1108 # out of the ancestors set (we've already found it and won't be
1104 1109 # visiting it again) and put its parents in the ancestors set.
1105 1110 #
1106 1111 # Otherwise, if a revision is in the branch it's another head, since it
1107 1112 # wasn't in the ancestor list of an existing head. So add it to the
1108 1113 # head list, and add its parents to the ancestor list.
1109 1114 #
1110 1115 # If it is not in the branch ignore it.
1111 1116 #
1112 1117 # Once we have a list of heads, use nodesbetween to filter out all the
1113 1118 # heads that cannot be reached from startrev. There may be a more
1114 1119 # efficient way to do this as part of the previous algorithm.
1115 1120
1116 1121 set = util.set
1117 1122 heads = [self.changelog.rev(branches[branch])]
1118 1123 # Don't care if ancestors contains nullrev or not.
1119 1124 ancestors = set(self.changelog.parentrevs(heads[0]))
1120 1125 for rev in xrange(heads[0] - 1, nullrev, -1):
1121 1126 if rev in ancestors:
1122 1127 ancestors.update(self.changelog.parentrevs(rev))
1123 1128 ancestors.remove(rev)
1124 1129 elif self.changectx(rev).branch() == branch:
1125 1130 heads.append(rev)
1126 1131 ancestors.update(self.changelog.parentrevs(rev))
1127 1132 heads = [self.changelog.node(rev) for rev in heads]
1128 1133 if start is not None:
1129 1134 heads = self.changelog.nodesbetween([start], heads)[2]
1130 1135 return heads
1131 1136
1132 1137 def branches(self, nodes):
1133 1138 if not nodes:
1134 1139 nodes = [self.changelog.tip()]
1135 1140 b = []
1136 1141 for n in nodes:
1137 1142 t = n
1138 1143 while 1:
1139 1144 p = self.changelog.parents(n)
1140 1145 if p[1] != nullid or p[0] == nullid:
1141 1146 b.append((t, n, p[0], p[1]))
1142 1147 break
1143 1148 n = p[0]
1144 1149 return b
1145 1150
1146 1151 def between(self, pairs):
1147 1152 r = []
1148 1153
1149 1154 for top, bottom in pairs:
1150 1155 n, l, i = top, [], 0
1151 1156 f = 1
1152 1157
1153 1158 while n != bottom:
1154 1159 p = self.changelog.parents(n)[0]
1155 1160 if i == f:
1156 1161 l.append(n)
1157 1162 f = f * 2
1158 1163 n = p
1159 1164 i += 1
1160 1165
1161 1166 r.append(l)
1162 1167
1163 1168 return r
1164 1169
1165 1170 def findincoming(self, remote, base=None, heads=None, force=False):
1166 1171 """Return list of roots of the subsets of missing nodes from remote
1167 1172
1168 1173 If base dict is specified, assume that these nodes and their parents
1169 1174 exist on the remote side and that no child of a node of base exists
1170 1175 in both remote and self.
1171 1176 Furthermore base will be updated to include the nodes that exists
1172 1177 in self and remote but no children exists in self and remote.
1173 1178 If a list of heads is specified, return only nodes which are heads
1174 1179 or ancestors of these heads.
1175 1180
1176 1181 All the ancestors of base are in self and in remote.
1177 1182 All the descendants of the list returned are missing in self.
1178 1183 (and so we know that the rest of the nodes are missing in remote, see
1179 1184 outgoing)
1180 1185 """
1181 1186 m = self.changelog.nodemap
1182 1187 search = []
1183 1188 fetch = {}
1184 1189 seen = {}
1185 1190 seenbranch = {}
1186 1191 if base == None:
1187 1192 base = {}
1188 1193
1189 1194 if not heads:
1190 1195 heads = remote.heads()
1191 1196
1192 1197 if self.changelog.tip() == nullid:
1193 1198 base[nullid] = 1
1194 1199 if heads != [nullid]:
1195 1200 return [nullid]
1196 1201 return []
1197 1202
1198 1203 # assume we're closer to the tip than the root
1199 1204 # and start by examining the heads
1200 1205 self.ui.status(_("searching for changes\n"))
1201 1206
1202 1207 unknown = []
1203 1208 for h in heads:
1204 1209 if h not in m:
1205 1210 unknown.append(h)
1206 1211 else:
1207 1212 base[h] = 1
1208 1213
1209 1214 if not unknown:
1210 1215 return []
1211 1216
1212 1217 req = dict.fromkeys(unknown)
1213 1218 reqcnt = 0
1214 1219
1215 1220 # search through remote branches
1216 1221 # a 'branch' here is a linear segment of history, with four parts:
1217 1222 # head, root, first parent, second parent
1218 1223 # (a branch always has two parents (or none) by definition)
1219 1224 unknown = remote.branches(unknown)
1220 1225 while unknown:
1221 1226 r = []
1222 1227 while unknown:
1223 1228 n = unknown.pop(0)
1224 1229 if n[0] in seen:
1225 1230 continue
1226 1231
1227 1232 self.ui.debug(_("examining %s:%s\n")
1228 1233 % (short(n[0]), short(n[1])))
1229 1234 if n[0] == nullid: # found the end of the branch
1230 1235 pass
1231 1236 elif n in seenbranch:
1232 1237 self.ui.debug(_("branch already found\n"))
1233 1238 continue
1234 1239 elif n[1] and n[1] in m: # do we know the base?
1235 1240 self.ui.debug(_("found incomplete branch %s:%s\n")
1236 1241 % (short(n[0]), short(n[1])))
1237 1242 search.append(n) # schedule branch range for scanning
1238 1243 seenbranch[n] = 1
1239 1244 else:
1240 1245 if n[1] not in seen and n[1] not in fetch:
1241 1246 if n[2] in m and n[3] in m:
1242 1247 self.ui.debug(_("found new changeset %s\n") %
1243 1248 short(n[1]))
1244 1249 fetch[n[1]] = 1 # earliest unknown
1245 1250 for p in n[2:4]:
1246 1251 if p in m:
1247 1252 base[p] = 1 # latest known
1248 1253
1249 1254 for p in n[2:4]:
1250 1255 if p not in req and p not in m:
1251 1256 r.append(p)
1252 1257 req[p] = 1
1253 1258 seen[n[0]] = 1
1254 1259
1255 1260 if r:
1256 1261 reqcnt += 1
1257 1262 self.ui.debug(_("request %d: %s\n") %
1258 1263 (reqcnt, " ".join(map(short, r))))
1259 1264 for p in xrange(0, len(r), 10):
1260 1265 for b in remote.branches(r[p:p+10]):
1261 1266 self.ui.debug(_("received %s:%s\n") %
1262 1267 (short(b[0]), short(b[1])))
1263 1268 unknown.append(b)
1264 1269
1265 1270 # do binary search on the branches we found
1266 1271 while search:
1267 1272 n = search.pop(0)
1268 1273 reqcnt += 1
1269 1274 l = remote.between([(n[0], n[1])])[0]
1270 1275 l.append(n[1])
1271 1276 p = n[0]
1272 1277 f = 1
1273 1278 for i in l:
1274 1279 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1275 1280 if i in m:
1276 1281 if f <= 2:
1277 1282 self.ui.debug(_("found new branch changeset %s\n") %
1278 1283 short(p))
1279 1284 fetch[p] = 1
1280 1285 base[i] = 1
1281 1286 else:
1282 1287 self.ui.debug(_("narrowed branch search to %s:%s\n")
1283 1288 % (short(p), short(i)))
1284 1289 search.append((p, i))
1285 1290 break
1286 1291 p, f = i, f * 2
1287 1292
1288 1293 # sanity check our fetch list
1289 1294 for f in fetch.keys():
1290 1295 if f in m:
1291 1296 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1292 1297
1293 1298 if base.keys() == [nullid]:
1294 1299 if force:
1295 1300 self.ui.warn(_("warning: repository is unrelated\n"))
1296 1301 else:
1297 1302 raise util.Abort(_("repository is unrelated"))
1298 1303
1299 1304 self.ui.debug(_("found new changesets starting at ") +
1300 1305 " ".join([short(f) for f in fetch]) + "\n")
1301 1306
1302 1307 self.ui.debug(_("%d total queries\n") % reqcnt)
1303 1308
1304 1309 return fetch.keys()
1305 1310
1306 1311 def findoutgoing(self, remote, base=None, heads=None, force=False):
1307 1312 """Return list of nodes that are roots of subsets not in remote
1308 1313
1309 1314 If base dict is specified, assume that these nodes and their parents
1310 1315 exist on the remote side.
1311 1316 If a list of heads is specified, return only nodes which are heads
1312 1317 or ancestors of these heads, and return a second element which
1313 1318 contains all remote heads which get new children.
1314 1319 """
1315 1320 if base == None:
1316 1321 base = {}
1317 1322 self.findincoming(remote, base, heads, force=force)
1318 1323
1319 1324 self.ui.debug(_("common changesets up to ")
1320 1325 + " ".join(map(short, base.keys())) + "\n")
1321 1326
1322 1327 remain = dict.fromkeys(self.changelog.nodemap)
1323 1328
1324 1329 # prune everything remote has from the tree
1325 1330 del remain[nullid]
1326 1331 remove = base.keys()
1327 1332 while remove:
1328 1333 n = remove.pop(0)
1329 1334 if n in remain:
1330 1335 del remain[n]
1331 1336 for p in self.changelog.parents(n):
1332 1337 remove.append(p)
1333 1338
1334 1339 # find every node whose parents have been pruned
1335 1340 subset = []
1336 1341 # find every remote head that will get new children
1337 1342 updated_heads = {}
1338 1343 for n in remain:
1339 1344 p1, p2 = self.changelog.parents(n)
1340 1345 if p1 not in remain and p2 not in remain:
1341 1346 subset.append(n)
1342 1347 if heads:
1343 1348 if p1 in heads:
1344 1349 updated_heads[p1] = True
1345 1350 if p2 in heads:
1346 1351 updated_heads[p2] = True
1347 1352
1348 1353 # this is the set of all roots we have to push
1349 1354 if heads:
1350 1355 return subset, updated_heads.keys()
1351 1356 else:
1352 1357 return subset
1353 1358
1354 1359 def pull(self, remote, heads=None, force=False):
1355 1360 lock = self.lock()
1356 1361 try:
1357 1362 fetch = self.findincoming(remote, heads=heads, force=force)
1358 1363 if fetch == [nullid]:
1359 1364 self.ui.status(_("requesting all changes\n"))
1360 1365
1361 1366 if not fetch:
1362 1367 self.ui.status(_("no changes found\n"))
1363 1368 return 0
1364 1369
1365 1370 if heads is None:
1366 1371 cg = remote.changegroup(fetch, 'pull')
1367 1372 else:
1368 1373 if 'changegroupsubset' not in remote.capabilities:
1369 1374 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1370 1375 cg = remote.changegroupsubset(fetch, heads, 'pull')
1371 1376 return self.addchangegroup(cg, 'pull', remote.url())
1372 1377 finally:
1373 1378 del lock
1374 1379
1375 1380 def push(self, remote, force=False, revs=None):
1376 1381 # there are two ways to push to remote repo:
1377 1382 #
1378 1383 # addchangegroup assumes local user can lock remote
1379 1384 # repo (local filesystem, old ssh servers).
1380 1385 #
1381 1386 # unbundle assumes local user cannot lock remote repo (new ssh
1382 1387 # servers, http servers).
1383 1388
1384 1389 if remote.capable('unbundle'):
1385 1390 return self.push_unbundle(remote, force, revs)
1386 1391 return self.push_addchangegroup(remote, force, revs)
1387 1392
1388 1393 def prepush(self, remote, force, revs):
1389 1394 base = {}
1390 1395 remote_heads = remote.heads()
1391 1396 inc = self.findincoming(remote, base, remote_heads, force=force)
1392 1397
1393 1398 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1394 1399 if revs is not None:
1395 1400 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1396 1401 else:
1397 1402 bases, heads = update, self.changelog.heads()
1398 1403
1399 1404 if not bases:
1400 1405 self.ui.status(_("no changes found\n"))
1401 1406 return None, 1
1402 1407 elif not force:
1403 1408 # check if we're creating new remote heads
1404 1409 # to be a remote head after push, node must be either
1405 1410 # - unknown locally
1406 1411 # - a local outgoing head descended from update
1407 1412 # - a remote head that's known locally and not
1408 1413 # ancestral to an outgoing head
1409 1414
1410 1415 warn = 0
1411 1416
1412 1417 if remote_heads == [nullid]:
1413 1418 warn = 0
1414 1419 elif not revs and len(heads) > len(remote_heads):
1415 1420 warn = 1
1416 1421 else:
1417 1422 newheads = list(heads)
1418 1423 for r in remote_heads:
1419 1424 if r in self.changelog.nodemap:
1420 1425 desc = self.changelog.heads(r, heads)
1421 1426 l = [h for h in heads if h in desc]
1422 1427 if not l:
1423 1428 newheads.append(r)
1424 1429 else:
1425 1430 newheads.append(r)
1426 1431 if len(newheads) > len(remote_heads):
1427 1432 warn = 1
1428 1433
1429 1434 if warn:
1430 1435 self.ui.warn(_("abort: push creates new remote branches!\n"))
1431 1436 self.ui.status(_("(did you forget to merge?"
1432 1437 " use push -f to force)\n"))
1433 1438 return None, 1
1434 1439 elif inc:
1435 1440 self.ui.warn(_("note: unsynced remote changes!\n"))
1436 1441
1437 1442
1438 1443 if revs is None:
1439 1444 cg = self.changegroup(update, 'push')
1440 1445 else:
1441 1446 cg = self.changegroupsubset(update, revs, 'push')
1442 1447 return cg, remote_heads
1443 1448
1444 1449 def push_addchangegroup(self, remote, force, revs):
1445 1450 lock = remote.lock()
1446 1451 try:
1447 1452 ret = self.prepush(remote, force, revs)
1448 1453 if ret[0] is not None:
1449 1454 cg, remote_heads = ret
1450 1455 return remote.addchangegroup(cg, 'push', self.url())
1451 1456 return ret[1]
1452 1457 finally:
1453 1458 del lock
1454 1459
1455 1460 def push_unbundle(self, remote, force, revs):
1456 1461 # local repo finds heads on server, finds out what revs it
1457 1462 # must push. once revs transferred, if server finds it has
1458 1463 # different heads (someone else won commit/push race), server
1459 1464 # aborts.
1460 1465
1461 1466 ret = self.prepush(remote, force, revs)
1462 1467 if ret[0] is not None:
1463 1468 cg, remote_heads = ret
1464 1469 if force: remote_heads = ['force']
1465 1470 return remote.unbundle(cg, remote_heads, 'push')
1466 1471 return ret[1]
1467 1472
1468 1473 def changegroupinfo(self, nodes):
1469 1474 self.ui.note(_("%d changesets found\n") % len(nodes))
1470 1475 if self.ui.debugflag:
1471 1476 self.ui.debug(_("List of changesets:\n"))
1472 1477 for node in nodes:
1473 1478 self.ui.debug("%s\n" % hex(node))
1474 1479
1475 1480 def changegroupsubset(self, bases, heads, source):
1476 1481 """This function generates a changegroup consisting of all the nodes
1477 1482 that are descendents of any of the bases, and ancestors of any of
1478 1483 the heads.
1479 1484
1480 1485 It is fairly complex as determining which filenodes and which
1481 1486 manifest nodes need to be included for the changeset to be complete
1482 1487 is non-trivial.
1483 1488
1484 1489 Another wrinkle is doing the reverse, figuring out which changeset in
1485 1490 the changegroup a particular filenode or manifestnode belongs to."""
1486 1491
1487 1492 self.hook('preoutgoing', throw=True, source=source)
1488 1493
1489 1494 # Set up some initial variables
1490 1495 # Make it easy to refer to self.changelog
1491 1496 cl = self.changelog
1492 1497 # msng is short for missing - compute the list of changesets in this
1493 1498 # changegroup.
1494 1499 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1495 1500 self.changegroupinfo(msng_cl_lst)
1496 1501 # Some bases may turn out to be superfluous, and some heads may be
1497 1502 # too. nodesbetween will return the minimal set of bases and heads
1498 1503 # necessary to re-create the changegroup.
1499 1504
1500 1505 # Known heads are the list of heads that it is assumed the recipient
1501 1506 # of this changegroup will know about.
1502 1507 knownheads = {}
1503 1508 # We assume that all parents of bases are known heads.
1504 1509 for n in bases:
1505 1510 for p in cl.parents(n):
1506 1511 if p != nullid:
1507 1512 knownheads[p] = 1
1508 1513 knownheads = knownheads.keys()
1509 1514 if knownheads:
1510 1515 # Now that we know what heads are known, we can compute which
1511 1516 # changesets are known. The recipient must know about all
1512 1517 # changesets required to reach the known heads from the null
1513 1518 # changeset.
1514 1519 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1515 1520 junk = None
1516 1521 # Transform the list into an ersatz set.
1517 1522 has_cl_set = dict.fromkeys(has_cl_set)
1518 1523 else:
1519 1524 # If there were no known heads, the recipient cannot be assumed to
1520 1525 # know about any changesets.
1521 1526 has_cl_set = {}
1522 1527
1523 1528 # Make it easy to refer to self.manifest
1524 1529 mnfst = self.manifest
1525 1530 # We don't know which manifests are missing yet
1526 1531 msng_mnfst_set = {}
1527 1532 # Nor do we know which filenodes are missing.
1528 1533 msng_filenode_set = {}
1529 1534
1530 1535 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1531 1536 junk = None
1532 1537
1533 1538 # A changeset always belongs to itself, so the changenode lookup
1534 1539 # function for a changenode is identity.
1535 1540 def identity(x):
1536 1541 return x
1537 1542
1538 1543 # A function generating function. Sets up an environment for the
1539 1544 # inner function.
1540 1545 def cmp_by_rev_func(revlog):
1541 1546 # Compare two nodes by their revision number in the environment's
1542 1547 # revision history. Since the revision number both represents the
1543 1548 # most efficient order to read the nodes in, and represents a
1544 1549 # topological sorting of the nodes, this function is often useful.
1545 1550 def cmp_by_rev(a, b):
1546 1551 return cmp(revlog.rev(a), revlog.rev(b))
1547 1552 return cmp_by_rev
1548 1553
1549 1554 # If we determine that a particular file or manifest node must be a
1550 1555 # node that the recipient of the changegroup will already have, we can
1551 1556 # also assume the recipient will have all the parents. This function
1552 1557 # prunes them from the set of missing nodes.
1553 1558 def prune_parents(revlog, hasset, msngset):
1554 1559 haslst = hasset.keys()
1555 1560 haslst.sort(cmp_by_rev_func(revlog))
1556 1561 for node in haslst:
1557 1562 parentlst = [p for p in revlog.parents(node) if p != nullid]
1558 1563 while parentlst:
1559 1564 n = parentlst.pop()
1560 1565 if n not in hasset:
1561 1566 hasset[n] = 1
1562 1567 p = [p for p in revlog.parents(n) if p != nullid]
1563 1568 parentlst.extend(p)
1564 1569 for n in hasset:
1565 1570 msngset.pop(n, None)
1566 1571
1567 1572 # This is a function generating function used to set up an environment
1568 1573 # for the inner function to execute in.
1569 1574 def manifest_and_file_collector(changedfileset):
1570 1575 # This is an information gathering function that gathers
1571 1576 # information from each changeset node that goes out as part of
1572 1577 # the changegroup. The information gathered is a list of which
1573 1578 # manifest nodes are potentially required (the recipient may
1574 1579 # already have them) and total list of all files which were
1575 1580 # changed in any changeset in the changegroup.
1576 1581 #
1577 1582 # We also remember the first changenode we saw any manifest
1578 1583 # referenced by so we can later determine which changenode 'owns'
1579 1584 # the manifest.
1580 1585 def collect_manifests_and_files(clnode):
1581 1586 c = cl.read(clnode)
1582 1587 for f in c[3]:
1583 1588 # This is to make sure we only have one instance of each
1584 1589 # filename string for each filename.
1585 1590 changedfileset.setdefault(f, f)
1586 1591 msng_mnfst_set.setdefault(c[0], clnode)
1587 1592 return collect_manifests_and_files
1588 1593
1589 1594 # Figure out which manifest nodes (of the ones we think might be part
1590 1595 # of the changegroup) the recipient must know about and remove them
1591 1596 # from the changegroup.
1592 1597 def prune_manifests():
1593 1598 has_mnfst_set = {}
1594 1599 for n in msng_mnfst_set:
1595 1600 # If a 'missing' manifest thinks it belongs to a changenode
1596 1601 # the recipient is assumed to have, obviously the recipient
1597 1602 # must have that manifest.
1598 1603 linknode = cl.node(mnfst.linkrev(n))
1599 1604 if linknode in has_cl_set:
1600 1605 has_mnfst_set[n] = 1
1601 1606 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1602 1607
1603 1608 # Use the information collected in collect_manifests_and_files to say
1604 1609 # which changenode any manifestnode belongs to.
1605 1610 def lookup_manifest_link(mnfstnode):
1606 1611 return msng_mnfst_set[mnfstnode]
1607 1612
1608 1613 # A function generating function that sets up the initial environment
1609 1614 # the inner function.
1610 1615 def filenode_collector(changedfiles):
1611 1616 next_rev = [0]
1612 1617 # This gathers information from each manifestnode included in the
1613 1618 # changegroup about which filenodes the manifest node references
1614 1619 # so we can include those in the changegroup too.
1615 1620 #
1616 1621 # It also remembers which changenode each filenode belongs to. It
1617 1622 # does this by assuming the a filenode belongs to the changenode
1618 1623 # the first manifest that references it belongs to.
1619 1624 def collect_msng_filenodes(mnfstnode):
1620 1625 r = mnfst.rev(mnfstnode)
1621 1626 if r == next_rev[0]:
1622 1627 # If the last rev we looked at was the one just previous,
1623 1628 # we only need to see a diff.
1624 1629 deltamf = mnfst.readdelta(mnfstnode)
1625 1630 # For each line in the delta
1626 1631 for f, fnode in deltamf.items():
1627 1632 f = changedfiles.get(f, None)
1628 1633 # And if the file is in the list of files we care
1629 1634 # about.
1630 1635 if f is not None:
1631 1636 # Get the changenode this manifest belongs to
1632 1637 clnode = msng_mnfst_set[mnfstnode]
1633 1638 # Create the set of filenodes for the file if
1634 1639 # there isn't one already.
1635 1640 ndset = msng_filenode_set.setdefault(f, {})
1636 1641 # And set the filenode's changelog node to the
1637 1642 # manifest's if it hasn't been set already.
1638 1643 ndset.setdefault(fnode, clnode)
1639 1644 else:
1640 1645 # Otherwise we need a full manifest.
1641 1646 m = mnfst.read(mnfstnode)
1642 1647 # For every file in we care about.
1643 1648 for f in changedfiles:
1644 1649 fnode = m.get(f, None)
1645 1650 # If it's in the manifest
1646 1651 if fnode is not None:
1647 1652 # See comments above.
1648 1653 clnode = msng_mnfst_set[mnfstnode]
1649 1654 ndset = msng_filenode_set.setdefault(f, {})
1650 1655 ndset.setdefault(fnode, clnode)
1651 1656 # Remember the revision we hope to see next.
1652 1657 next_rev[0] = r + 1
1653 1658 return collect_msng_filenodes
1654 1659
1655 1660 # We have a list of filenodes we think we need for a file, lets remove
1656 1661 # all those we now the recipient must have.
1657 1662 def prune_filenodes(f, filerevlog):
1658 1663 msngset = msng_filenode_set[f]
1659 1664 hasset = {}
1660 1665 # If a 'missing' filenode thinks it belongs to a changenode we
1661 1666 # assume the recipient must have, then the recipient must have
1662 1667 # that filenode.
1663 1668 for n in msngset:
1664 1669 clnode = cl.node(filerevlog.linkrev(n))
1665 1670 if clnode in has_cl_set:
1666 1671 hasset[n] = 1
1667 1672 prune_parents(filerevlog, hasset, msngset)
1668 1673
1669 1674 # A function generator function that sets up the a context for the
1670 1675 # inner function.
1671 1676 def lookup_filenode_link_func(fname):
1672 1677 msngset = msng_filenode_set[fname]
1673 1678 # Lookup the changenode the filenode belongs to.
1674 1679 def lookup_filenode_link(fnode):
1675 1680 return msngset[fnode]
1676 1681 return lookup_filenode_link
1677 1682
1678 1683 # Now that we have all theses utility functions to help out and
1679 1684 # logically divide up the task, generate the group.
1680 1685 def gengroup():
1681 1686 # The set of changed files starts empty.
1682 1687 changedfiles = {}
1683 1688 # Create a changenode group generator that will call our functions
1684 1689 # back to lookup the owning changenode and collect information.
1685 1690 group = cl.group(msng_cl_lst, identity,
1686 1691 manifest_and_file_collector(changedfiles))
1687 1692 for chnk in group:
1688 1693 yield chnk
1689 1694
1690 1695 # The list of manifests has been collected by the generator
1691 1696 # calling our functions back.
1692 1697 prune_manifests()
1693 1698 msng_mnfst_lst = msng_mnfst_set.keys()
1694 1699 # Sort the manifestnodes by revision number.
1695 1700 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1696 1701 # Create a generator for the manifestnodes that calls our lookup
1697 1702 # and data collection functions back.
1698 1703 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1699 1704 filenode_collector(changedfiles))
1700 1705 for chnk in group:
1701 1706 yield chnk
1702 1707
1703 1708 # These are no longer needed, dereference and toss the memory for
1704 1709 # them.
1705 1710 msng_mnfst_lst = None
1706 1711 msng_mnfst_set.clear()
1707 1712
1708 1713 changedfiles = changedfiles.keys()
1709 1714 changedfiles.sort()
1710 1715 # Go through all our files in order sorted by name.
1711 1716 for fname in changedfiles:
1712 1717 filerevlog = self.file(fname)
1713 1718 if filerevlog.count() == 0:
1714 1719 raise util.abort(_("empty or missing revlog for %s") % fname)
1715 1720 # Toss out the filenodes that the recipient isn't really
1716 1721 # missing.
1717 1722 if msng_filenode_set.has_key(fname):
1718 1723 prune_filenodes(fname, filerevlog)
1719 1724 msng_filenode_lst = msng_filenode_set[fname].keys()
1720 1725 else:
1721 1726 msng_filenode_lst = []
1722 1727 # If any filenodes are left, generate the group for them,
1723 1728 # otherwise don't bother.
1724 1729 if len(msng_filenode_lst) > 0:
1725 1730 yield changegroup.chunkheader(len(fname))
1726 1731 yield fname
1727 1732 # Sort the filenodes by their revision #
1728 1733 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1729 1734 # Create a group generator and only pass in a changenode
1730 1735 # lookup function as we need to collect no information
1731 1736 # from filenodes.
1732 1737 group = filerevlog.group(msng_filenode_lst,
1733 1738 lookup_filenode_link_func(fname))
1734 1739 for chnk in group:
1735 1740 yield chnk
1736 1741 if msng_filenode_set.has_key(fname):
1737 1742 # Don't need this anymore, toss it to free memory.
1738 1743 del msng_filenode_set[fname]
1739 1744 # Signal that no more groups are left.
1740 1745 yield changegroup.closechunk()
1741 1746
1742 1747 if msng_cl_lst:
1743 1748 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1744 1749
1745 1750 return util.chunkbuffer(gengroup())
1746 1751
1747 1752 def changegroup(self, basenodes, source):
1748 1753 """Generate a changegroup of all nodes that we have that a recipient
1749 1754 doesn't.
1750 1755
1751 1756 This is much easier than the previous function as we can assume that
1752 1757 the recipient has any changenode we aren't sending them."""
1753 1758
1754 1759 self.hook('preoutgoing', throw=True, source=source)
1755 1760
1756 1761 cl = self.changelog
1757 1762 nodes = cl.nodesbetween(basenodes, None)[0]
1758 1763 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1759 1764 self.changegroupinfo(nodes)
1760 1765
1761 1766 def identity(x):
1762 1767 return x
1763 1768
1764 1769 def gennodelst(revlog):
1765 1770 for r in xrange(0, revlog.count()):
1766 1771 n = revlog.node(r)
1767 1772 if revlog.linkrev(n) in revset:
1768 1773 yield n
1769 1774
1770 1775 def changed_file_collector(changedfileset):
1771 1776 def collect_changed_files(clnode):
1772 1777 c = cl.read(clnode)
1773 1778 for fname in c[3]:
1774 1779 changedfileset[fname] = 1
1775 1780 return collect_changed_files
1776 1781
1777 1782 def lookuprevlink_func(revlog):
1778 1783 def lookuprevlink(n):
1779 1784 return cl.node(revlog.linkrev(n))
1780 1785 return lookuprevlink
1781 1786
1782 1787 def gengroup():
1783 1788 # construct a list of all changed files
1784 1789 changedfiles = {}
1785 1790
1786 1791 for chnk in cl.group(nodes, identity,
1787 1792 changed_file_collector(changedfiles)):
1788 1793 yield chnk
1789 1794 changedfiles = changedfiles.keys()
1790 1795 changedfiles.sort()
1791 1796
1792 1797 mnfst = self.manifest
1793 1798 nodeiter = gennodelst(mnfst)
1794 1799 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1795 1800 yield chnk
1796 1801
1797 1802 for fname in changedfiles:
1798 1803 filerevlog = self.file(fname)
1799 1804 if filerevlog.count() == 0:
1800 1805 raise util.abort(_("empty or missing revlog for %s") % fname)
1801 1806 nodeiter = gennodelst(filerevlog)
1802 1807 nodeiter = list(nodeiter)
1803 1808 if nodeiter:
1804 1809 yield changegroup.chunkheader(len(fname))
1805 1810 yield fname
1806 1811 lookup = lookuprevlink_func(filerevlog)
1807 1812 for chnk in filerevlog.group(nodeiter, lookup):
1808 1813 yield chnk
1809 1814
1810 1815 yield changegroup.closechunk()
1811 1816
1812 1817 if nodes:
1813 1818 self.hook('outgoing', node=hex(nodes[0]), source=source)
1814 1819
1815 1820 return util.chunkbuffer(gengroup())
1816 1821
1817 1822 def addchangegroup(self, source, srctype, url):
1818 1823 """add changegroup to repo.
1819 1824
1820 1825 return values:
1821 1826 - nothing changed or no source: 0
1822 1827 - more heads than before: 1+added heads (2..n)
1823 1828 - less heads than before: -1-removed heads (-2..-n)
1824 1829 - number of heads stays the same: 1
1825 1830 """
1826 1831 def csmap(x):
1827 1832 self.ui.debug(_("add changeset %s\n") % short(x))
1828 1833 return cl.count()
1829 1834
1830 1835 def revmap(x):
1831 1836 return cl.rev(x)
1832 1837
1833 1838 if not source:
1834 1839 return 0
1835 1840
1836 1841 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1837 1842
1838 1843 changesets = files = revisions = 0
1839 1844
1840 1845 # write changelog data to temp files so concurrent readers will not see
1841 1846 # inconsistent view
1842 1847 cl = self.changelog
1843 1848 cl.delayupdate()
1844 1849 oldheads = len(cl.heads())
1845 1850
1846 1851 tr = self.transaction()
1847 1852 try:
1848 1853 trp = weakref.proxy(tr)
1849 1854 # pull off the changeset group
1850 1855 self.ui.status(_("adding changesets\n"))
1851 1856 cor = cl.count() - 1
1852 1857 chunkiter = changegroup.chunkiter(source)
1853 1858 if cl.addgroup(chunkiter, csmap, trp, 1) is None:
1854 1859 raise util.Abort(_("received changelog group is empty"))
1855 1860 cnr = cl.count() - 1
1856 1861 changesets = cnr - cor
1857 1862
1858 1863 # pull off the manifest group
1859 1864 self.ui.status(_("adding manifests\n"))
1860 1865 chunkiter = changegroup.chunkiter(source)
1861 1866 # no need to check for empty manifest group here:
1862 1867 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1863 1868 # no new manifest will be created and the manifest group will
1864 1869 # be empty during the pull
1865 1870 self.manifest.addgroup(chunkiter, revmap, trp)
1866 1871
1867 1872 # process the files
1868 1873 self.ui.status(_("adding file changes\n"))
1869 1874 while 1:
1870 1875 f = changegroup.getchunk(source)
1871 1876 if not f:
1872 1877 break
1873 1878 self.ui.debug(_("adding %s revisions\n") % f)
1874 1879 fl = self.file(f)
1875 1880 o = fl.count()
1876 1881 chunkiter = changegroup.chunkiter(source)
1877 1882 if fl.addgroup(chunkiter, revmap, trp) is None:
1878 1883 raise util.Abort(_("received file revlog group is empty"))
1879 1884 revisions += fl.count() - o
1880 1885 files += 1
1881 1886
1882 1887 # make changelog see real files again
1883 1888 cl.finalize(trp)
1884 1889
1885 1890 newheads = len(self.changelog.heads())
1886 1891 heads = ""
1887 1892 if oldheads and newheads != oldheads:
1888 1893 heads = _(" (%+d heads)") % (newheads - oldheads)
1889 1894
1890 1895 self.ui.status(_("added %d changesets"
1891 1896 " with %d changes to %d files%s\n")
1892 1897 % (changesets, revisions, files, heads))
1893 1898
1894 1899 if changesets > 0:
1895 1900 self.hook('pretxnchangegroup', throw=True,
1896 1901 node=hex(self.changelog.node(cor+1)), source=srctype,
1897 1902 url=url)
1898 1903
1899 1904 tr.close()
1900 1905 finally:
1901 1906 del tr
1902 1907
1903 1908 if changesets > 0:
1904 1909 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1905 1910 source=srctype, url=url)
1906 1911
1907 1912 for i in xrange(cor + 1, cnr + 1):
1908 1913 self.hook("incoming", node=hex(self.changelog.node(i)),
1909 1914 source=srctype, url=url)
1910 1915
1911 1916 # never return 0 here:
1912 1917 if newheads < oldheads:
1913 1918 return newheads - oldheads - 1
1914 1919 else:
1915 1920 return newheads - oldheads + 1
1916 1921
1917 1922
1918 1923 def stream_in(self, remote):
1919 1924 fp = remote.stream_out()
1920 1925 l = fp.readline()
1921 1926 try:
1922 1927 resp = int(l)
1923 1928 except ValueError:
1924 1929 raise util.UnexpectedOutput(
1925 1930 _('Unexpected response from remote server:'), l)
1926 1931 if resp == 1:
1927 1932 raise util.Abort(_('operation forbidden by server'))
1928 1933 elif resp == 2:
1929 1934 raise util.Abort(_('locking the remote repository failed'))
1930 1935 elif resp != 0:
1931 1936 raise util.Abort(_('the server sent an unknown error code'))
1932 1937 self.ui.status(_('streaming all changes\n'))
1933 1938 l = fp.readline()
1934 1939 try:
1935 1940 total_files, total_bytes = map(int, l.split(' ', 1))
1936 1941 except ValueError, TypeError:
1937 1942 raise util.UnexpectedOutput(
1938 1943 _('Unexpected response from remote server:'), l)
1939 1944 self.ui.status(_('%d files to transfer, %s of data\n') %
1940 1945 (total_files, util.bytecount(total_bytes)))
1941 1946 start = time.time()
1942 1947 for i in xrange(total_files):
1943 1948 # XXX doesn't support '\n' or '\r' in filenames
1944 1949 l = fp.readline()
1945 1950 try:
1946 1951 name, size = l.split('\0', 1)
1947 1952 size = int(size)
1948 1953 except ValueError, TypeError:
1949 1954 raise util.UnexpectedOutput(
1950 1955 _('Unexpected response from remote server:'), l)
1951 1956 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1952 1957 ofp = self.sopener(name, 'w')
1953 1958 for chunk in util.filechunkiter(fp, limit=size):
1954 1959 ofp.write(chunk)
1955 1960 ofp.close()
1956 1961 elapsed = time.time() - start
1957 1962 if elapsed <= 0:
1958 1963 elapsed = 0.001
1959 1964 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1960 1965 (util.bytecount(total_bytes), elapsed,
1961 1966 util.bytecount(total_bytes / elapsed)))
1962 1967 self.invalidate()
1963 1968 return len(self.heads()) + 1
1964 1969
1965 1970 def clone(self, remote, heads=[], stream=False):
1966 1971 '''clone remote repository.
1967 1972
1968 1973 keyword arguments:
1969 1974 heads: list of revs to clone (forces use of pull)
1970 1975 stream: use streaming clone if possible'''
1971 1976
1972 1977 # now, all clients that can request uncompressed clones can
1973 1978 # read repo formats supported by all servers that can serve
1974 1979 # them.
1975 1980
1976 1981 # if revlog format changes, client will have to check version
1977 1982 # and format flags on "stream" capability, and use
1978 1983 # uncompressed only if compatible.
1979 1984
1980 1985 if stream and not heads and remote.capable('stream'):
1981 1986 return self.stream_in(remote)
1982 1987 return self.pull(remote, heads)
1983 1988
1984 1989 # used to avoid circular references so destructors work
1985 1990 def aftertrans(files):
1986 1991 renamefiles = [tuple(t) for t in files]
1987 1992 def a():
1988 1993 for src, dest in renamefiles:
1989 1994 util.rename(src, dest)
1990 1995 return a
1991 1996
1992 1997 def instance(ui, path, create):
1993 1998 return localrepository(ui, util.drop_scheme('file', path), create)
1994 1999
1995 2000 def islocal(path):
1996 2001 return True
@@ -1,1359 +1,1384 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from i18n import _
10 10 from node import *
11 11 import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
12 12 import cStringIO, email.Parser, os, popen2, re, sha, errno
13 13 import sys, tempfile, zlib
14 14
15 15 class PatchError(Exception):
16 16 pass
17 17
18 18 class NoHunks(PatchError):
19 19 pass
20 20
21 21 # helper functions
22 22
23 23 def copyfile(src, dst, basedir=None):
24 24 if not basedir:
25 25 basedir = os.getcwd()
26 26
27 27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
28 28 if os.path.exists(absdst):
29 29 raise util.Abort(_("cannot create %s: destination already exists") %
30 30 dst)
31 31
32 32 targetdir = os.path.dirname(absdst)
33 33 if not os.path.isdir(targetdir):
34 34 os.makedirs(targetdir)
35 35
36 36 util.copyfile(abssrc, absdst)
37 37
38 38 # public functions
39 39
40 40 def extract(ui, fileobj):
41 41 '''extract patch from data read from fileobj.
42 42
43 43 patch can be a normal patch or contained in an email message.
44 44
45 45 return tuple (filename, message, user, date, node, p1, p2).
46 46 Any item in the returned tuple can be None. If filename is None,
47 47 fileobj did not contain a patch. Caller must unlink filename when done.'''
48 48
49 49 # attempt to detect the start of a patch
50 50 # (this heuristic is borrowed from quilt)
51 51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
52 52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
53 53 '(---|\*\*\*)[ \t])', re.MULTILINE)
54 54
55 55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
56 56 tmpfp = os.fdopen(fd, 'w')
57 57 try:
58 58 msg = email.Parser.Parser().parse(fileobj)
59 59
60 60 subject = msg['Subject']
61 61 user = msg['From']
62 62 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
63 63 # should try to parse msg['Date']
64 64 date = None
65 65 nodeid = None
66 66 branch = None
67 67 parents = []
68 68
69 69 if subject:
70 70 if subject.startswith('[PATCH'):
71 71 pend = subject.find(']')
72 72 if pend >= 0:
73 73 subject = subject[pend+1:].lstrip()
74 74 subject = subject.replace('\n\t', ' ')
75 75 ui.debug('Subject: %s\n' % subject)
76 76 if user:
77 77 ui.debug('From: %s\n' % user)
78 78 diffs_seen = 0
79 79 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
80 80 message = ''
81 81 for part in msg.walk():
82 82 content_type = part.get_content_type()
83 83 ui.debug('Content-Type: %s\n' % content_type)
84 84 if content_type not in ok_types:
85 85 continue
86 86 payload = part.get_payload(decode=True)
87 87 m = diffre.search(payload)
88 88 if m:
89 89 hgpatch = False
90 90 ignoretext = False
91 91
92 92 ui.debug(_('found patch at byte %d\n') % m.start(0))
93 93 diffs_seen += 1
94 94 cfp = cStringIO.StringIO()
95 95 for line in payload[:m.start(0)].splitlines():
96 96 if line.startswith('# HG changeset patch'):
97 97 ui.debug(_('patch generated by hg export\n'))
98 98 hgpatch = True
99 99 # drop earlier commit message content
100 100 cfp.seek(0)
101 101 cfp.truncate()
102 102 subject = None
103 103 elif hgpatch:
104 104 if line.startswith('# User '):
105 105 user = line[7:]
106 106 ui.debug('From: %s\n' % user)
107 107 elif line.startswith("# Date "):
108 108 date = line[7:]
109 109 elif line.startswith("# Branch "):
110 110 branch = line[9:]
111 111 elif line.startswith("# Node ID "):
112 112 nodeid = line[10:]
113 113 elif line.startswith("# Parent "):
114 114 parents.append(line[10:])
115 115 elif line == '---' and gitsendmail:
116 116 ignoretext = True
117 117 if not line.startswith('# ') and not ignoretext:
118 118 cfp.write(line)
119 119 cfp.write('\n')
120 120 message = cfp.getvalue()
121 121 if tmpfp:
122 122 tmpfp.write(payload)
123 123 if not payload.endswith('\n'):
124 124 tmpfp.write('\n')
125 125 elif not diffs_seen and message and content_type == 'text/plain':
126 126 message += '\n' + payload
127 127 except:
128 128 tmpfp.close()
129 129 os.unlink(tmpname)
130 130 raise
131 131
132 132 if subject and not message.startswith(subject):
133 133 message = '%s\n%s' % (subject, message)
134 134 tmpfp.close()
135 135 if not diffs_seen:
136 136 os.unlink(tmpname)
137 137 return None, message, user, date, branch, None, None, None
138 138 p1 = parents and parents.pop(0) or None
139 139 p2 = parents and parents.pop(0) or None
140 140 return tmpname, message, user, date, branch, nodeid, p1, p2
141 141
142 142 GP_PATCH = 1 << 0 # we have to run patch
143 143 GP_FILTER = 1 << 1 # there's some copy/rename operation
144 144 GP_BINARY = 1 << 2 # there's a binary patch
145 145
146 146 def readgitpatch(fp, firstline=None):
147 147 """extract git-style metadata about patches from <patchname>"""
148 148 class gitpatch:
149 149 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
150 150 def __init__(self, path):
151 151 self.path = path
152 152 self.oldpath = None
153 153 self.mode = None
154 154 self.op = 'MODIFY'
155 155 self.lineno = 0
156 156 self.binary = False
157 157
158 158 def reader(fp, firstline):
159 159 if firstline is not None:
160 160 yield firstline
161 161 for line in fp:
162 162 yield line
163 163
164 164 # Filter patch for git information
165 165 gitre = re.compile('diff --git a/(.*) b/(.*)')
166 166 gp = None
167 167 gitpatches = []
168 168 # Can have a git patch with only metadata, causing patch to complain
169 169 dopatch = 0
170 170
171 171 lineno = 0
172 172 for line in reader(fp, firstline):
173 173 lineno += 1
174 174 if line.startswith('diff --git'):
175 175 m = gitre.match(line)
176 176 if m:
177 177 if gp:
178 178 gitpatches.append(gp)
179 179 src, dst = m.group(1, 2)
180 180 gp = gitpatch(dst)
181 181 gp.lineno = lineno
182 182 elif gp:
183 183 if line.startswith('--- '):
184 184 if gp.op in ('COPY', 'RENAME'):
185 185 dopatch |= GP_FILTER
186 186 gitpatches.append(gp)
187 187 gp = None
188 188 dopatch |= GP_PATCH
189 189 continue
190 190 if line.startswith('rename from '):
191 191 gp.op = 'RENAME'
192 192 gp.oldpath = line[12:].rstrip()
193 193 elif line.startswith('rename to '):
194 194 gp.path = line[10:].rstrip()
195 195 elif line.startswith('copy from '):
196 196 gp.op = 'COPY'
197 197 gp.oldpath = line[10:].rstrip()
198 198 elif line.startswith('copy to '):
199 199 gp.path = line[8:].rstrip()
200 200 elif line.startswith('deleted file'):
201 201 gp.op = 'DELETE'
202 202 elif line.startswith('new file mode '):
203 203 gp.op = 'ADD'
204 204 gp.mode = int(line.rstrip()[-6:], 8)
205 205 elif line.startswith('new mode '):
206 206 gp.mode = int(line.rstrip()[-6:], 8)
207 207 elif line.startswith('GIT binary patch'):
208 208 dopatch |= GP_BINARY
209 209 gp.binary = True
210 210 if gp:
211 211 gitpatches.append(gp)
212 212
213 213 if not gitpatches:
214 214 dopatch = GP_PATCH
215 215
216 216 return (dopatch, gitpatches)
217 217
218 218 def patch(patchname, ui, strip=1, cwd=None, files={}):
219 219 """apply <patchname> to the working directory.
220 220 returns whether patch was applied with fuzz factor."""
221 221 patcher = ui.config('ui', 'patch')
222 222 args = []
223 223 try:
224 224 if patcher:
225 225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
226 226 files)
227 227 else:
228 228 try:
229 229 return internalpatch(patchname, ui, strip, cwd, files)
230 230 except NoHunks:
231 231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
232 232 ui.debug('no valid hunks found; trying with %r instead\n' %
233 233 patcher)
234 234 if util.needbinarypatch():
235 235 args.append('--binary')
236 236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
237 237 files)
238 238 except PatchError, err:
239 239 s = str(err)
240 240 if s:
241 241 raise util.Abort(s)
242 242 else:
243 243 raise util.Abort(_('patch failed to apply'))
244 244
245 245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
246 246 """use <patcher> to apply <patchname> to the working directory.
247 247 returns whether patch was applied with fuzz factor."""
248 248
249 249 fuzz = False
250 250 if cwd:
251 251 args.append('-d %s' % util.shellquote(cwd))
252 252 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
253 253 util.shellquote(patchname)))
254 254
255 255 for line in fp:
256 256 line = line.rstrip()
257 257 ui.note(line + '\n')
258 258 if line.startswith('patching file '):
259 259 pf = util.parse_patch_output(line)
260 260 printed_file = False
261 261 files.setdefault(pf, (None, None))
262 262 elif line.find('with fuzz') >= 0:
263 263 fuzz = True
264 264 if not printed_file:
265 265 ui.warn(pf + '\n')
266 266 printed_file = True
267 267 ui.warn(line + '\n')
268 268 elif line.find('saving rejects to file') >= 0:
269 269 ui.warn(line + '\n')
270 270 elif line.find('FAILED') >= 0:
271 271 if not printed_file:
272 272 ui.warn(pf + '\n')
273 273 printed_file = True
274 274 ui.warn(line + '\n')
275 275 code = fp.close()
276 276 if code:
277 277 raise PatchError(_("patch command failed: %s") %
278 278 util.explain_exit(code)[0])
279 279 return fuzz
280 280
281 281 def internalpatch(patchobj, ui, strip, cwd, files={}):
282 282 """use builtin patch to apply <patchobj> to the working directory.
283 283 returns whether patch was applied with fuzz factor."""
284 284 try:
285 285 fp = file(patchobj, 'rb')
286 286 except TypeError:
287 287 fp = patchobj
288 288 if cwd:
289 289 curdir = os.getcwd()
290 290 os.chdir(cwd)
291 291 try:
292 292 ret = applydiff(ui, fp, files, strip=strip)
293 293 finally:
294 294 if cwd:
295 295 os.chdir(curdir)
296 296 if ret < 0:
297 297 raise PatchError
298 298 return ret > 0
299 299
300 300 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
301 301 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
302 302 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
303 303
304 304 class patchfile:
305 def __init__(self, ui, fname):
305 def __init__(self, ui, fname, missing=False):
306 306 self.fname = fname
307 307 self.ui = ui
308 try:
309 fp = file(fname, 'rb')
310 self.lines = fp.readlines()
311 self.exists = True
312 except IOError:
308 self.lines = []
309 self.exists = False
310 self.missing = missing
311 if not missing:
312 try:
313 fp = file(fname, 'rb')
314 self.lines = fp.readlines()
315 self.exists = True
316 except IOError:
317 pass
318 else:
319 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
320
321 if not self.exists:
313 322 dirname = os.path.dirname(fname)
314 323 if dirname and not os.path.isdir(dirname):
315 dirs = dirname.split(os.path.sep)
316 d = ""
317 for x in dirs:
318 d = os.path.join(d, x)
319 if not os.path.isdir(d):
320 os.mkdir(d)
321 self.lines = []
322 self.exists = False
324 os.makedirs(dirname)
323 325
324 326 self.hash = {}
325 327 self.dirty = 0
326 328 self.offset = 0
327 329 self.rej = []
328 330 self.fileprinted = False
329 331 self.printfile(False)
330 332 self.hunks = 0
331 333
332 334 def printfile(self, warn):
333 335 if self.fileprinted:
334 336 return
335 337 if warn or self.ui.verbose:
336 338 self.fileprinted = True
337 339 s = _("patching file %s\n") % self.fname
338 340 if warn:
339 341 self.ui.warn(s)
340 342 else:
341 343 self.ui.note(s)
342 344
343 345
344 346 def findlines(self, l, linenum):
345 347 # looks through the hash and finds candidate lines. The
346 348 # result is a list of line numbers sorted based on distance
347 349 # from linenum
348 350 def sorter(a, b):
349 351 vala = abs(a - linenum)
350 352 valb = abs(b - linenum)
351 353 return cmp(vala, valb)
352 354
353 355 try:
354 356 cand = self.hash[l]
355 357 except:
356 358 return []
357 359
358 360 if len(cand) > 1:
359 361 # resort our list of potentials forward then back.
360 362 cand.sort(sorter)
361 363 return cand
362 364
363 365 def hashlines(self):
364 366 self.hash = {}
365 367 for x in xrange(len(self.lines)):
366 368 s = self.lines[x]
367 369 self.hash.setdefault(s, []).append(x)
368 370
369 371 def write_rej(self):
370 372 # our rejects are a little different from patch(1). This always
371 373 # creates rejects in the same form as the original patch. A file
372 374 # header is inserted so that you can run the reject through patch again
373 375 # without having to type the filename.
374 376
375 377 if not self.rej:
376 378 return
377 379 if self.hunks != 1:
378 380 hunkstr = "s"
379 381 else:
380 382 hunkstr = ""
381 383
382 384 fname = self.fname + ".rej"
383 385 self.ui.warn(
384 386 _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
385 387 (len(self.rej), self.hunks, hunkstr, fname))
386 388 try: os.unlink(fname)
387 389 except:
388 390 pass
389 391 fp = file(fname, 'wb')
390 392 base = os.path.basename(self.fname)
391 393 fp.write("--- %s\n+++ %s\n" % (base, base))
392 394 for x in self.rej:
393 395 for l in x.hunk:
394 396 fp.write(l)
395 397 if l[-1] != '\n':
396 398 fp.write("\n\ No newline at end of file\n")
397 399
398 400 def write(self, dest=None):
399 401 if self.dirty:
400 402 if not dest:
401 403 dest = self.fname
402 404 st = None
403 405 try:
404 406 st = os.lstat(dest)
405 407 except OSError, inst:
406 408 if inst.errno != errno.ENOENT:
407 409 raise
408 410 if st and st.st_nlink > 1:
409 411 os.unlink(dest)
410 412 fp = file(dest, 'wb')
411 413 if st and st.st_nlink > 1:
412 414 os.chmod(dest, st.st_mode)
413 415 fp.writelines(self.lines)
414 416 fp.close()
415 417
416 418 def close(self):
417 419 self.write()
418 420 self.write_rej()
419 421
420 422 def apply(self, h, reverse):
421 423 if not h.complete():
422 424 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
423 425 (h.number, h.desc, len(h.a), h.lena, len(h.b),
424 426 h.lenb))
425 427
426 428 self.hunks += 1
427 429 if reverse:
428 430 h.reverse()
429 431
432 if self.missing:
433 self.rej.append(h)
434 return -1
435
430 436 if self.exists and h.createfile():
431 437 self.ui.warn(_("file %s already exists\n") % self.fname)
432 438 self.rej.append(h)
433 439 return -1
434 440
435 441 if isinstance(h, binhunk):
436 442 if h.rmfile():
437 443 os.unlink(self.fname)
438 444 else:
439 445 self.lines[:] = h.new()
440 446 self.offset += len(h.new())
441 447 self.dirty = 1
442 448 return 0
443 449
444 450 # fast case first, no offsets, no fuzz
445 451 old = h.old()
446 452 # patch starts counting at 1 unless we are adding the file
447 453 if h.starta == 0:
448 454 start = 0
449 455 else:
450 456 start = h.starta + self.offset - 1
451 457 orig_start = start
452 458 if diffhelpers.testhunk(old, self.lines, start) == 0:
453 459 if h.rmfile():
454 460 os.unlink(self.fname)
455 461 else:
456 462 self.lines[start : start + h.lena] = h.new()
457 463 self.offset += h.lenb - h.lena
458 464 self.dirty = 1
459 465 return 0
460 466
461 467 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
462 468 self.hashlines()
463 469 if h.hunk[-1][0] != ' ':
464 470 # if the hunk tried to put something at the bottom of the file
465 471 # override the start line and use eof here
466 472 search_start = len(self.lines)
467 473 else:
468 474 search_start = orig_start
469 475
470 476 for fuzzlen in xrange(3):
471 477 for toponly in [ True, False ]:
472 478 old = h.old(fuzzlen, toponly)
473 479
474 480 cand = self.findlines(old[0][1:], search_start)
475 481 for l in cand:
476 482 if diffhelpers.testhunk(old, self.lines, l) == 0:
477 483 newlines = h.new(fuzzlen, toponly)
478 484 self.lines[l : l + len(old)] = newlines
479 485 self.offset += len(newlines) - len(old)
480 486 self.dirty = 1
481 487 if fuzzlen:
482 488 fuzzstr = "with fuzz %d " % fuzzlen
483 489 f = self.ui.warn
484 490 self.printfile(True)
485 491 else:
486 492 fuzzstr = ""
487 493 f = self.ui.note
488 494 offset = l - orig_start - fuzzlen
489 495 if offset == 1:
490 496 linestr = "line"
491 497 else:
492 498 linestr = "lines"
493 499 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
494 500 (h.number, l+1, fuzzstr, offset, linestr))
495 501 return fuzzlen
496 502 self.printfile(True)
497 503 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
498 504 self.rej.append(h)
499 505 return -1
500 506
501 507 class hunk:
502 508 def __init__(self, desc, num, lr, context):
503 509 self.number = num
504 510 self.desc = desc
505 511 self.hunk = [ desc ]
506 512 self.a = []
507 513 self.b = []
508 514 if context:
509 515 self.read_context_hunk(lr)
510 516 else:
511 517 self.read_unified_hunk(lr)
512 518
513 519 def read_unified_hunk(self, lr):
514 520 m = unidesc.match(self.desc)
515 521 if not m:
516 522 raise PatchError(_("bad hunk #%d") % self.number)
517 523 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
518 524 if self.lena == None:
519 525 self.lena = 1
520 526 else:
521 527 self.lena = int(self.lena)
522 528 if self.lenb == None:
523 529 self.lenb = 1
524 530 else:
525 531 self.lenb = int(self.lenb)
526 532 self.starta = int(self.starta)
527 533 self.startb = int(self.startb)
528 534 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
529 535 # if we hit eof before finishing out the hunk, the last line will
530 536 # be zero length. Lets try to fix it up.
531 537 while len(self.hunk[-1]) == 0:
532 538 del self.hunk[-1]
533 539 del self.a[-1]
534 540 del self.b[-1]
535 541 self.lena -= 1
536 542 self.lenb -= 1
537 543
538 544 def read_context_hunk(self, lr):
539 545 self.desc = lr.readline()
540 546 m = contextdesc.match(self.desc)
541 547 if not m:
542 548 raise PatchError(_("bad hunk #%d") % self.number)
543 549 foo, self.starta, foo2, aend, foo3 = m.groups()
544 550 self.starta = int(self.starta)
545 551 if aend == None:
546 552 aend = self.starta
547 553 self.lena = int(aend) - self.starta
548 554 if self.starta:
549 555 self.lena += 1
550 556 for x in xrange(self.lena):
551 557 l = lr.readline()
552 558 if l.startswith('---'):
553 559 lr.push(l)
554 560 break
555 561 s = l[2:]
556 562 if l.startswith('- ') or l.startswith('! '):
557 563 u = '-' + s
558 564 elif l.startswith(' '):
559 565 u = ' ' + s
560 566 else:
561 567 raise PatchError(_("bad hunk #%d old text line %d") %
562 568 (self.number, x))
563 569 self.a.append(u)
564 570 self.hunk.append(u)
565 571
566 572 l = lr.readline()
567 573 if l.startswith('\ '):
568 574 s = self.a[-1][:-1]
569 575 self.a[-1] = s
570 576 self.hunk[-1] = s
571 577 l = lr.readline()
572 578 m = contextdesc.match(l)
573 579 if not m:
574 580 raise PatchError(_("bad hunk #%d") % self.number)
575 581 foo, self.startb, foo2, bend, foo3 = m.groups()
576 582 self.startb = int(self.startb)
577 583 if bend == None:
578 584 bend = self.startb
579 585 self.lenb = int(bend) - self.startb
580 586 if self.startb:
581 587 self.lenb += 1
582 588 hunki = 1
583 589 for x in xrange(self.lenb):
584 590 l = lr.readline()
585 591 if l.startswith('\ '):
586 592 s = self.b[-1][:-1]
587 593 self.b[-1] = s
588 594 self.hunk[hunki-1] = s
589 595 continue
590 596 if not l:
591 597 lr.push(l)
592 598 break
593 599 s = l[2:]
594 600 if l.startswith('+ ') or l.startswith('! '):
595 601 u = '+' + s
596 602 elif l.startswith(' '):
597 603 u = ' ' + s
598 604 elif len(self.b) == 0:
599 605 # this can happen when the hunk does not add any lines
600 606 lr.push(l)
601 607 break
602 608 else:
603 609 raise PatchError(_("bad hunk #%d old text line %d") %
604 610 (self.number, x))
605 611 self.b.append(s)
606 612 while True:
607 613 if hunki >= len(self.hunk):
608 614 h = ""
609 615 else:
610 616 h = self.hunk[hunki]
611 617 hunki += 1
612 618 if h == u:
613 619 break
614 620 elif h.startswith('-'):
615 621 continue
616 622 else:
617 623 self.hunk.insert(hunki-1, u)
618 624 break
619 625
620 626 if not self.a:
621 627 # this happens when lines were only added to the hunk
622 628 for x in self.hunk:
623 629 if x.startswith('-') or x.startswith(' '):
624 630 self.a.append(x)
625 631 if not self.b:
626 632 # this happens when lines were only deleted from the hunk
627 633 for x in self.hunk:
628 634 if x.startswith('+') or x.startswith(' '):
629 635 self.b.append(x[1:])
630 636 # @@ -start,len +start,len @@
631 637 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
632 638 self.startb, self.lenb)
633 639 self.hunk[0] = self.desc
634 640
635 641 def reverse(self):
636 642 origlena = self.lena
637 643 origstarta = self.starta
638 644 self.lena = self.lenb
639 645 self.starta = self.startb
640 646 self.lenb = origlena
641 647 self.startb = origstarta
642 648 self.a = []
643 649 self.b = []
644 650 # self.hunk[0] is the @@ description
645 651 for x in xrange(1, len(self.hunk)):
646 652 o = self.hunk[x]
647 653 if o.startswith('-'):
648 654 n = '+' + o[1:]
649 655 self.b.append(o[1:])
650 656 elif o.startswith('+'):
651 657 n = '-' + o[1:]
652 658 self.a.append(n)
653 659 else:
654 660 n = o
655 661 self.b.append(o[1:])
656 662 self.a.append(o)
657 663 self.hunk[x] = o
658 664
659 665 def fix_newline(self):
660 666 diffhelpers.fix_newline(self.hunk, self.a, self.b)
661 667
662 668 def complete(self):
663 669 return len(self.a) == self.lena and len(self.b) == self.lenb
664 670
665 671 def createfile(self):
666 672 return self.starta == 0 and self.lena == 0
667 673
668 674 def rmfile(self):
669 675 return self.startb == 0 and self.lenb == 0
670 676
671 677 def fuzzit(self, l, fuzz, toponly):
672 678 # this removes context lines from the top and bottom of list 'l'. It
673 679 # checks the hunk to make sure only context lines are removed, and then
674 680 # returns a new shortened list of lines.
675 681 fuzz = min(fuzz, len(l)-1)
676 682 if fuzz:
677 683 top = 0
678 684 bot = 0
679 685 hlen = len(self.hunk)
680 686 for x in xrange(hlen-1):
681 687 # the hunk starts with the @@ line, so use x+1
682 688 if self.hunk[x+1][0] == ' ':
683 689 top += 1
684 690 else:
685 691 break
686 692 if not toponly:
687 693 for x in xrange(hlen-1):
688 694 if self.hunk[hlen-bot-1][0] == ' ':
689 695 bot += 1
690 696 else:
691 697 break
692 698
693 699 # top and bot now count context in the hunk
694 700 # adjust them if either one is short
695 701 context = max(top, bot, 3)
696 702 if bot < context:
697 703 bot = max(0, fuzz - (context - bot))
698 704 else:
699 705 bot = min(fuzz, bot)
700 706 if top < context:
701 707 top = max(0, fuzz - (context - top))
702 708 else:
703 709 top = min(fuzz, top)
704 710
705 711 return l[top:len(l)-bot]
706 712 return l
707 713
708 714 def old(self, fuzz=0, toponly=False):
709 715 return self.fuzzit(self.a, fuzz, toponly)
710 716
711 717 def newctrl(self):
712 718 res = []
713 719 for x in self.hunk:
714 720 c = x[0]
715 721 if c == ' ' or c == '+':
716 722 res.append(x)
717 723 return res
718 724
719 725 def new(self, fuzz=0, toponly=False):
720 726 return self.fuzzit(self.b, fuzz, toponly)
721 727
722 728 class binhunk:
723 729 'A binary patch file. Only understands literals so far.'
724 730 def __init__(self, gitpatch):
725 731 self.gitpatch = gitpatch
726 732 self.text = None
727 733 self.hunk = ['GIT binary patch\n']
728 734
729 735 def createfile(self):
730 736 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
731 737
732 738 def rmfile(self):
733 739 return self.gitpatch.op == 'DELETE'
734 740
735 741 def complete(self):
736 742 return self.text is not None
737 743
738 744 def new(self):
739 745 return [self.text]
740 746
741 747 def extract(self, fp):
742 748 line = fp.readline()
743 749 self.hunk.append(line)
744 750 while line and not line.startswith('literal '):
745 751 line = fp.readline()
746 752 self.hunk.append(line)
747 753 if not line:
748 754 raise PatchError(_('could not extract binary patch'))
749 755 size = int(line[8:].rstrip())
750 756 dec = []
751 757 line = fp.readline()
752 758 self.hunk.append(line)
753 759 while len(line) > 1:
754 760 l = line[0]
755 761 if l <= 'Z' and l >= 'A':
756 762 l = ord(l) - ord('A') + 1
757 763 else:
758 764 l = ord(l) - ord('a') + 27
759 765 dec.append(base85.b85decode(line[1:-1])[:l])
760 766 line = fp.readline()
761 767 self.hunk.append(line)
762 768 text = zlib.decompress(''.join(dec))
763 769 if len(text) != size:
764 770 raise PatchError(_('binary patch is %d bytes, not %d') %
765 771 len(text), size)
766 772 self.text = text
767 773
768 774 def parsefilename(str):
769 775 # --- filename \t|space stuff
770 776 s = str[4:]
771 777 i = s.find('\t')
772 778 if i < 0:
773 779 i = s.find(' ')
774 780 if i < 0:
775 781 return s
776 782 return s[:i]
777 783
778 784 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
779 785 def pathstrip(path, count=1):
780 786 pathlen = len(path)
781 787 i = 0
782 788 if count == 0:
783 789 return path.rstrip()
784 790 while count > 0:
785 791 i = path.find('/', i)
786 792 if i == -1:
787 793 raise PatchError(_("unable to strip away %d dirs from %s") %
788 794 (count, path))
789 795 i += 1
790 796 # consume '//' in the path
791 797 while i < pathlen - 1 and path[i] == '/':
792 798 i += 1
793 799 count -= 1
794 800 return path[i:].rstrip()
795 801
796 802 nulla = afile_orig == "/dev/null"
797 803 nullb = bfile_orig == "/dev/null"
798 804 afile = pathstrip(afile_orig, strip)
799 gooda = os.path.exists(afile) and not nulla
805 gooda = not nulla and os.path.exists(afile)
800 806 bfile = pathstrip(bfile_orig, strip)
801 807 if afile == bfile:
802 808 goodb = gooda
803 809 else:
804 goodb = os.path.exists(bfile) and not nullb
810 goodb = not nullb and os.path.exists(bfile)
805 811 createfunc = hunk.createfile
806 812 if reverse:
807 813 createfunc = hunk.rmfile
808 if not goodb and not gooda and not createfunc():
809 raise PatchError(_("unable to find %s or %s for patching") %
810 (afile, bfile))
811 if gooda and goodb:
812 fname = bfile
813 if afile in bfile:
814 missing = not goodb and not gooda and not createfunc()
815 fname = None
816 if not missing:
817 if gooda and goodb:
818 fname = (afile in bfile) and afile or bfile
819 elif gooda:
814 820 fname = afile
815 elif gooda:
816 fname = afile
817 elif not nullb:
818 fname = bfile
819 if afile in bfile:
821
822 if not fname:
823 if not nullb:
824 fname = (afile in bfile) and afile or bfile
825 elif not nulla:
820 826 fname = afile
821 elif not nulla:
822 fname = afile
823 return fname
827 else:
828 raise PatchError(_("undefined source and destination files"))
829
830 return fname, missing
824 831
825 832 class linereader:
826 833 # simple class to allow pushing lines back into the input stream
827 834 def __init__(self, fp):
828 835 self.fp = fp
829 836 self.buf = []
830 837
831 838 def push(self, line):
832 839 self.buf.append(line)
833 840
834 841 def readline(self):
835 842 if self.buf:
836 843 l = self.buf[0]
837 844 del self.buf[0]
838 845 return l
839 846 return self.fp.readline()
840 847
841 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
842 rejmerge=None, updatedir=None):
843 """reads a patch from fp and tries to apply it. The dict 'changed' is
844 filled in with all of the filenames changed by the patch. Returns 0
845 for a clean patch, -1 if any rejects were found and 1 if there was
846 any fuzz."""
848 def iterhunks(ui, fp, sourcefile=None):
849 """Read a patch and yield the following events:
850 - ("file", afile, bfile, firsthunk): select a new target file.
851 - ("hunk", hunk): a new hunk is ready to be applied, follows a
852 "file" event.
853 - ("git", gitchanges): current diff is in git format, gitchanges
854 maps filenames to gitpatch records. Unique event.
855 """
847 856
848 def scangitpatch(fp, firstline, cwd=None):
857 def scangitpatch(fp, firstline):
849 858 '''git patches can modify a file, then copy that file to
850 859 a new file, but expect the source to be the unmodified form.
851 860 So we scan the patch looking for that case so we can do
852 861 the copies ahead of time.'''
853 862
854 863 pos = 0
855 864 try:
856 865 pos = fp.tell()
857 866 except IOError:
858 867 fp = cStringIO.StringIO(fp.read())
859 868
860 869 (dopatch, gitpatches) = readgitpatch(fp, firstline)
861 for gp in gitpatches:
862 if gp.op in ('COPY', 'RENAME'):
863 copyfile(gp.oldpath, gp.path, basedir=cwd)
864
865 870 fp.seek(pos)
866 871
867 872 return fp, dopatch, gitpatches
868 873
874 changed = {}
869 875 current_hunk = None
870 current_file = None
871 876 afile = ""
872 877 bfile = ""
873 878 state = None
874 879 hunknum = 0
875 rejects = 0
880 emitfile = False
876 881
877 882 git = False
878 883 gitre = re.compile('diff --git (a/.*) (b/.*)')
879 884
880 885 # our states
881 886 BFILE = 1
882 err = 0
883 887 context = None
884 888 lr = linereader(fp)
885 889 dopatch = True
886 890 gitworkdone = False
887 891
888 def getpatchfile(afile, bfile, hunk):
889 try:
890 if sourcefile:
891 targetfile = patchfile(ui, sourcefile)
892 else:
893 targetfile = selectfile(afile, bfile, hunk,
894 strip, reverse)
895 targetfile = patchfile(ui, targetfile)
896 return targetfile
897 except PatchError, err:
898 ui.warn(str(err) + '\n')
899 return None
900
901 892 while True:
902 893 newfile = False
903 894 x = lr.readline()
904 895 if not x:
905 896 break
906 897 if current_hunk:
907 898 if x.startswith('\ '):
908 899 current_hunk.fix_newline()
909 ret = current_file.apply(current_hunk, reverse)
910 if ret >= 0:
911 changed.setdefault(current_file.fname, (None, None))
912 if ret > 0:
913 err = 1
900 yield 'hunk', current_hunk
914 901 current_hunk = None
915 902 gitworkdone = False
916 903 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
917 904 ((context or context == None) and x.startswith('***************')))):
918 905 try:
919 906 if context == None and x.startswith('***************'):
920 907 context = True
921 908 current_hunk = hunk(x, hunknum + 1, lr, context)
922 909 except PatchError, err:
923 910 ui.debug(err)
924 911 current_hunk = None
925 912 continue
926 913 hunknum += 1
927 if not current_file:
928 current_file = getpatchfile(afile, bfile, current_hunk)
929 if not current_file:
930 current_file, current_hunk = None, None
931 rejects += 1
932 continue
914 if emitfile:
915 emitfile = False
916 yield 'file', (afile, bfile, current_hunk)
933 917 elif state == BFILE and x.startswith('GIT binary patch'):
934 918 current_hunk = binhunk(changed[bfile[2:]][1])
935 919 hunknum += 1
936 if not current_file:
937 current_file = getpatchfile(afile, bfile, current_hunk)
938 if not current_file:
939 current_file, current_hunk = None, None
940 rejects += 1
941 continue
920 if emitfile:
921 emitfile = False
922 yield 'file', (afile, bfile, current_hunk)
942 923 current_hunk.extract(fp)
943 924 elif x.startswith('diff --git'):
944 925 # check for git diff, scanning the whole patch file if needed
945 926 m = gitre.match(x)
946 927 if m:
947 928 afile, bfile = m.group(1, 2)
948 929 if not git:
949 930 git = True
950 931 fp, dopatch, gitpatches = scangitpatch(fp, x)
932 yield 'git', gitpatches
951 933 for gp in gitpatches:
952 934 changed[gp.path] = (gp.op, gp)
953 935 # else error?
954 936 # copy/rename + modify should modify target, not source
955 937 if changed.get(bfile[2:], (None, None))[0] in ('COPY',
956 938 'RENAME'):
957 939 afile = bfile
958 940 gitworkdone = True
959 941 newfile = True
960 942 elif x.startswith('---'):
961 943 # check for a unified diff
962 944 l2 = lr.readline()
963 945 if not l2.startswith('+++'):
964 946 lr.push(l2)
965 947 continue
966 948 newfile = True
967 949 context = False
968 950 afile = parsefilename(x)
969 951 bfile = parsefilename(l2)
970 952 elif x.startswith('***'):
971 953 # check for a context diff
972 954 l2 = lr.readline()
973 955 if not l2.startswith('---'):
974 956 lr.push(l2)
975 957 continue
976 958 l3 = lr.readline()
977 959 lr.push(l3)
978 960 if not l3.startswith("***************"):
979 961 lr.push(l2)
980 962 continue
981 963 newfile = True
982 964 context = True
983 965 afile = parsefilename(x)
984 966 bfile = parsefilename(l2)
985 967
986 968 if newfile:
987 if current_file:
988 current_file.close()
989 if rejmerge:
990 rejmerge(current_file)
991 rejects += len(current_file.rej)
969 emitfile = True
992 970 state = BFILE
993 current_file = None
994 971 hunknum = 0
995 972 if current_hunk:
996 973 if current_hunk.complete():
974 yield 'hunk', current_hunk
975 else:
976 raise PatchError(_("malformed patch %s %s") % (afile,
977 current_hunk.desc))
978
979 if hunknum == 0 and dopatch and not gitworkdone:
980 raise NoHunks
981
982 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
983 rejmerge=None, updatedir=None):
984 """reads a patch from fp and tries to apply it. The dict 'changed' is
985 filled in with all of the filenames changed by the patch. Returns 0
986 for a clean patch, -1 if any rejects were found and 1 if there was
987 any fuzz."""
988
989 rejects = 0
990 err = 0
991 current_file = None
992 gitpatches = None
993
994 def closefile():
995 if not current_file:
996 return 0
997 current_file.close()
998 if rejmerge:
999 rejmerge(current_file)
1000 return len(current_file.rej)
1001
1002 for state, values in iterhunks(ui, fp, sourcefile):
1003 if state == 'hunk':
1004 if not current_file:
1005 continue
1006 current_hunk = values
997 1007 ret = current_file.apply(current_hunk, reverse)
998 1008 if ret >= 0:
999 1009 changed.setdefault(current_file.fname, (None, None))
1000 1010 if ret > 0:
1001 1011 err = 1
1012 elif state == 'file':
1013 rejects += closefile()
1014 afile, bfile, first_hunk = values
1015 try:
1016 if sourcefile:
1017 current_file = patchfile(ui, sourcefile)
1018 else:
1019 current_file, missing = selectfile(afile, bfile, first_hunk,
1020 strip, reverse)
1021 current_file = patchfile(ui, current_file, missing)
1022 except PatchError, err:
1023 ui.warn(str(err) + '\n')
1024 current_file, current_hunk = None, None
1025 rejects += 1
1026 continue
1027 elif state == 'git':
1028 gitpatches = values
1029 for gp in gitpatches:
1030 if gp.op in ('COPY', 'RENAME'):
1031 copyfile(gp.oldpath, gp.path)
1032 changed[gp.path] = (gp.op, gp)
1002 1033 else:
1003 fname = current_file and current_file.fname or None
1004 raise PatchError(_("malformed patch %s %s") % (fname,
1005 current_hunk.desc))
1006 if current_file:
1007 current_file.close()
1008 if rejmerge:
1009 rejmerge(current_file)
1010 rejects += len(current_file.rej)
1011 if updatedir and git:
1034 raise util.Abort(_('unsupported parser state: %s') % state)
1035
1036 rejects += closefile()
1037
1038 if updatedir and gitpatches:
1012 1039 updatedir(gitpatches)
1013 1040 if rejects:
1014 1041 return -1
1015 if hunknum == 0 and dopatch and not gitworkdone:
1016 raise NoHunks
1017 1042 return err
1018 1043
1019 1044 def diffopts(ui, opts={}, untrusted=False):
1020 1045 def get(key, name=None):
1021 1046 return (opts.get(key) or
1022 1047 ui.configbool('diff', name or key, None, untrusted=untrusted))
1023 1048 return mdiff.diffopts(
1024 1049 text=opts.get('text'),
1025 1050 git=get('git'),
1026 1051 nodates=get('nodates'),
1027 1052 showfunc=get('show_function', 'showfunc'),
1028 1053 ignorews=get('ignore_all_space', 'ignorews'),
1029 1054 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1030 1055 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
1031 1056
1032 1057 def updatedir(ui, repo, patches):
1033 1058 '''Update dirstate after patch application according to metadata'''
1034 1059 if not patches:
1035 1060 return
1036 1061 copies = []
1037 1062 removes = {}
1038 1063 cfiles = patches.keys()
1039 1064 cwd = repo.getcwd()
1040 1065 if cwd:
1041 1066 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1042 1067 for f in patches:
1043 1068 ctype, gp = patches[f]
1044 1069 if ctype == 'RENAME':
1045 1070 copies.append((gp.oldpath, gp.path))
1046 1071 removes[gp.oldpath] = 1
1047 1072 elif ctype == 'COPY':
1048 1073 copies.append((gp.oldpath, gp.path))
1049 1074 elif ctype == 'DELETE':
1050 1075 removes[gp.path] = 1
1051 1076 for src, dst in copies:
1052 1077 repo.copy(src, dst)
1053 1078 removes = removes.keys()
1054 1079 if removes:
1055 1080 removes.sort()
1056 1081 repo.remove(removes, True)
1057 1082 for f in patches:
1058 1083 ctype, gp = patches[f]
1059 1084 if gp and gp.mode:
1060 1085 x = gp.mode & 0100 != 0
1061 1086 l = gp.mode & 020000 != 0
1062 1087 dst = os.path.join(repo.root, gp.path)
1063 1088 # patch won't create empty files
1064 1089 if ctype == 'ADD' and not os.path.exists(dst):
1065 1090 repo.wwrite(gp.path, '', x and 'x' or '')
1066 1091 else:
1067 1092 util.set_link(dst, l)
1068 1093 if not l:
1069 1094 util.set_exec(dst, x)
1070 1095 cmdutil.addremove(repo, cfiles)
1071 1096 files = patches.keys()
1072 1097 files.extend([r for r in removes if r not in files])
1073 1098 files.sort()
1074 1099
1075 1100 return files
1076 1101
1077 1102 def b85diff(to, tn):
1078 1103 '''print base85-encoded binary diff'''
1079 1104 def gitindex(text):
1080 1105 if not text:
1081 1106 return '0' * 40
1082 1107 l = len(text)
1083 1108 s = sha.new('blob %d\0' % l)
1084 1109 s.update(text)
1085 1110 return s.hexdigest()
1086 1111
1087 1112 def fmtline(line):
1088 1113 l = len(line)
1089 1114 if l <= 26:
1090 1115 l = chr(ord('A') + l - 1)
1091 1116 else:
1092 1117 l = chr(l - 26 + ord('a') - 1)
1093 1118 return '%c%s\n' % (l, base85.b85encode(line, True))
1094 1119
1095 1120 def chunk(text, csize=52):
1096 1121 l = len(text)
1097 1122 i = 0
1098 1123 while i < l:
1099 1124 yield text[i:i+csize]
1100 1125 i += csize
1101 1126
1102 1127 tohash = gitindex(to)
1103 1128 tnhash = gitindex(tn)
1104 1129 if tohash == tnhash:
1105 1130 return ""
1106 1131
1107 1132 # TODO: deltas
1108 1133 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1109 1134 (tohash, tnhash, len(tn))]
1110 1135 for l in chunk(zlib.compress(tn)):
1111 1136 ret.append(fmtline(l))
1112 1137 ret.append('\n')
1113 1138 return ''.join(ret)
1114 1139
1115 1140 def diff(repo, node1=None, node2=None, files=None, match=util.always,
1116 1141 fp=None, changes=None, opts=None):
1117 1142 '''print diff of changes to files between two nodes, or node and
1118 1143 working directory.
1119 1144
1120 1145 if node1 is None, use first dirstate parent instead.
1121 1146 if node2 is None, compare node1 with working directory.'''
1122 1147
1123 1148 if opts is None:
1124 1149 opts = mdiff.defaultopts
1125 1150 if fp is None:
1126 1151 fp = repo.ui
1127 1152
1128 1153 if not node1:
1129 1154 node1 = repo.dirstate.parents()[0]
1130 1155
1131 1156 ccache = {}
1132 1157 def getctx(r):
1133 1158 if r not in ccache:
1134 1159 ccache[r] = context.changectx(repo, r)
1135 1160 return ccache[r]
1136 1161
1137 1162 flcache = {}
1138 1163 def getfilectx(f, ctx):
1139 1164 flctx = ctx.filectx(f, filelog=flcache.get(f))
1140 1165 if f not in flcache:
1141 1166 flcache[f] = flctx._filelog
1142 1167 return flctx
1143 1168
1144 1169 # reading the data for node1 early allows it to play nicely
1145 1170 # with repo.status and the revlog cache.
1146 1171 ctx1 = context.changectx(repo, node1)
1147 1172 # force manifest reading
1148 1173 man1 = ctx1.manifest()
1149 1174 date1 = util.datestr(ctx1.date())
1150 1175
1151 1176 if not changes:
1152 1177 changes = repo.status(node1, node2, files, match=match)[:5]
1153 1178 modified, added, removed, deleted, unknown = changes
1154 1179
1155 1180 if not modified and not added and not removed:
1156 1181 return
1157 1182
1158 1183 if node2:
1159 1184 ctx2 = context.changectx(repo, node2)
1160 1185 execf2 = ctx2.manifest().execf
1161 1186 linkf2 = ctx2.manifest().linkf
1162 1187 else:
1163 1188 ctx2 = context.workingctx(repo)
1164 1189 execf2 = util.execfunc(repo.root, None)
1165 1190 linkf2 = util.linkfunc(repo.root, None)
1166 1191 if execf2 is None:
1167 1192 mc = ctx2.parents()[0].manifest().copy()
1168 1193 execf2 = mc.execf
1169 1194 linkf2 = mc.linkf
1170 1195
1171 1196 # returns False if there was no rename between ctx1 and ctx2
1172 1197 # returns None if the file was created between ctx1 and ctx2
1173 1198 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
1174 1199 # This will only really work if c1 is the Nth 1st parent of c2.
1175 1200 def renamed(c1, c2, man, f):
1176 1201 startrev = c1.rev()
1177 1202 c = c2
1178 1203 crev = c.rev()
1179 1204 if crev is None:
1180 1205 crev = repo.changelog.count()
1181 1206 orig = f
1182 1207 files = (f,)
1183 1208 while crev > startrev:
1184 1209 if f in files:
1185 1210 try:
1186 1211 src = getfilectx(f, c).renamed()
1187 1212 except revlog.LookupError:
1188 1213 return None
1189 1214 if src:
1190 1215 f = src[0]
1191 1216 crev = c.parents()[0].rev()
1192 1217 # try to reuse
1193 1218 c = getctx(crev)
1194 1219 files = c.files()
1195 1220 if f not in man:
1196 1221 return None
1197 1222 if f == orig:
1198 1223 return False
1199 1224 return f
1200 1225
1201 1226 if repo.ui.quiet:
1202 1227 r = None
1203 1228 else:
1204 1229 hexfunc = repo.ui.debugflag and hex or short
1205 1230 r = [hexfunc(node) for node in [node1, node2] if node]
1206 1231
1207 1232 if opts.git:
1208 1233 copied = {}
1209 1234 c1, c2 = ctx1, ctx2
1210 1235 files = added
1211 1236 man = man1
1212 1237 if node2 and ctx1.rev() >= ctx2.rev():
1213 1238 # renamed() starts at c2 and walks back in history until c1.
1214 1239 # Since ctx1.rev() >= ctx2.rev(), invert ctx2 and ctx1 to
1215 1240 # detect (inverted) copies.
1216 1241 c1, c2 = ctx2, ctx1
1217 1242 files = removed
1218 1243 man = ctx2.manifest()
1219 1244 for f in files:
1220 1245 src = renamed(c1, c2, man, f)
1221 1246 if src:
1222 1247 copied[f] = src
1223 1248 if ctx1 == c2:
1224 1249 # invert the copied dict
1225 1250 copied = dict([(v, k) for (k, v) in copied.iteritems()])
1226 1251 # If we've renamed file foo to bar (copied['bar'] = 'foo'),
1227 1252 # avoid showing a diff for foo if we're going to show
1228 1253 # the rename to bar.
1229 1254 srcs = [x[1] for x in copied.iteritems() if x[0] in added]
1230 1255
1231 1256 all = modified + added + removed
1232 1257 all.sort()
1233 1258 gone = {}
1234 1259
1235 1260 for f in all:
1236 1261 to = None
1237 1262 tn = None
1238 1263 dodiff = True
1239 1264 header = []
1240 1265 if f in man1:
1241 1266 to = getfilectx(f, ctx1).data()
1242 1267 if f not in removed:
1243 1268 tn = getfilectx(f, ctx2).data()
1244 1269 a, b = f, f
1245 1270 if opts.git:
1246 1271 def gitmode(x, l):
1247 1272 return l and '120000' or (x and '100755' or '100644')
1248 1273 def addmodehdr(header, omode, nmode):
1249 1274 if omode != nmode:
1250 1275 header.append('old mode %s\n' % omode)
1251 1276 header.append('new mode %s\n' % nmode)
1252 1277
1253 1278 if f in added:
1254 1279 mode = gitmode(execf2(f), linkf2(f))
1255 1280 if f in copied:
1256 1281 a = copied[f]
1257 1282 omode = gitmode(man1.execf(a), man1.linkf(a))
1258 1283 addmodehdr(header, omode, mode)
1259 1284 if a in removed and a not in gone:
1260 1285 op = 'rename'
1261 1286 gone[a] = 1
1262 1287 else:
1263 1288 op = 'copy'
1264 1289 header.append('%s from %s\n' % (op, a))
1265 1290 header.append('%s to %s\n' % (op, f))
1266 1291 to = getfilectx(a, ctx1).data()
1267 1292 else:
1268 1293 header.append('new file mode %s\n' % mode)
1269 1294 if util.binary(tn):
1270 1295 dodiff = 'binary'
1271 1296 elif f in removed:
1272 1297 if f in srcs:
1273 1298 dodiff = False
1274 1299 else:
1275 1300 mode = gitmode(man1.execf(f), man1.linkf(f))
1276 1301 header.append('deleted file mode %s\n' % mode)
1277 1302 else:
1278 1303 omode = gitmode(man1.execf(f), man1.linkf(f))
1279 1304 nmode = gitmode(execf2(f), linkf2(f))
1280 1305 addmodehdr(header, omode, nmode)
1281 1306 if util.binary(to) or util.binary(tn):
1282 1307 dodiff = 'binary'
1283 1308 r = None
1284 1309 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1285 1310 if dodiff:
1286 1311 if dodiff == 'binary':
1287 1312 text = b85diff(to, tn)
1288 1313 else:
1289 1314 text = mdiff.unidiff(to, date1,
1290 1315 # ctx2 date may be dynamic
1291 1316 tn, util.datestr(ctx2.date()),
1292 1317 a, b, r, opts=opts)
1293 1318 if text or len(header) > 1:
1294 1319 fp.write(''.join(header))
1295 1320 fp.write(text)
1296 1321
1297 1322 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1298 1323 opts=None):
1299 1324 '''export changesets as hg patches.'''
1300 1325
1301 1326 total = len(revs)
1302 1327 revwidth = max([len(str(rev)) for rev in revs])
1303 1328
1304 1329 def single(rev, seqno, fp):
1305 1330 ctx = repo.changectx(rev)
1306 1331 node = ctx.node()
1307 1332 parents = [p.node() for p in ctx.parents() if p]
1308 1333 branch = ctx.branch()
1309 1334 if switch_parent:
1310 1335 parents.reverse()
1311 1336 prev = (parents and parents[0]) or nullid
1312 1337
1313 1338 if not fp:
1314 1339 fp = cmdutil.make_file(repo, template, node, total=total,
1315 1340 seqno=seqno, revwidth=revwidth)
1316 1341 if fp != sys.stdout and hasattr(fp, 'name'):
1317 1342 repo.ui.note("%s\n" % fp.name)
1318 1343
1319 1344 fp.write("# HG changeset patch\n")
1320 1345 fp.write("# User %s\n" % ctx.user())
1321 1346 fp.write("# Date %d %d\n" % ctx.date())
1322 1347 if branch and (branch != 'default'):
1323 1348 fp.write("# Branch %s\n" % branch)
1324 1349 fp.write("# Node ID %s\n" % hex(node))
1325 1350 fp.write("# Parent %s\n" % hex(prev))
1326 1351 if len(parents) > 1:
1327 1352 fp.write("# Parent %s\n" % hex(parents[1]))
1328 1353 fp.write(ctx.description().rstrip())
1329 1354 fp.write("\n\n")
1330 1355
1331 1356 diff(repo, prev, node, fp=fp, opts=opts)
1332 1357 if fp not in (sys.stdout, repo.ui):
1333 1358 fp.close()
1334 1359
1335 1360 for seqno, rev in enumerate(revs):
1336 1361 single(rev, seqno+1, fp)
1337 1362
1338 1363 def diffstat(patchlines):
1339 1364 if not util.find_exe('diffstat'):
1340 1365 return
1341 1366 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1342 1367 try:
1343 1368 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1344 1369 try:
1345 1370 for line in patchlines: print >> p.tochild, line
1346 1371 p.tochild.close()
1347 1372 if p.wait(): return
1348 1373 fp = os.fdopen(fd, 'r')
1349 1374 stat = []
1350 1375 for line in fp: stat.append(line.lstrip())
1351 1376 last = stat.pop()
1352 1377 stat.insert(0, last)
1353 1378 stat = ''.join(stat)
1354 1379 if stat.startswith('0 files'): raise ValueError
1355 1380 return stat
1356 1381 except: raise
1357 1382 finally:
1358 1383 try: os.unlink(name)
1359 1384 except: pass
@@ -1,1742 +1,1750 b''
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7 7
8 8 This software may be used and distributed according to the terms
9 9 of the GNU General Public License, incorporated herein by reference.
10 10
11 11 This contains helper routines that are independent of the SCM core and hide
12 12 platform-specific details from the core.
13 13 """
14 14
15 15 from i18n import _
16 16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile, strutil
17 17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
18 18 import re, urlparse
19 19
20 20 try:
21 21 set = set
22 22 frozenset = frozenset
23 23 except NameError:
24 24 from sets import Set as set, ImmutableSet as frozenset
25 25
26 26 try:
27 27 _encoding = os.environ.get("HGENCODING")
28 28 if sys.platform == 'darwin' and not _encoding:
29 29 # On darwin, getpreferredencoding ignores the locale environment and
30 30 # always returns mac-roman. We override this if the environment is
31 31 # not C (has been customized by the user).
32 32 locale.setlocale(locale.LC_CTYPE, '')
33 33 _encoding = locale.getlocale()[1]
34 34 if not _encoding:
35 35 _encoding = locale.getpreferredencoding() or 'ascii'
36 36 except locale.Error:
37 37 _encoding = 'ascii'
38 38 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
39 39 _fallbackencoding = 'ISO-8859-1'
40 40
41 41 def tolocal(s):
42 42 """
43 43 Convert a string from internal UTF-8 to local encoding
44 44
45 45 All internal strings should be UTF-8 but some repos before the
46 46 implementation of locale support may contain latin1 or possibly
47 47 other character sets. We attempt to decode everything strictly
48 48 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
49 49 replace unknown characters.
50 50 """
51 51 for e in ('UTF-8', _fallbackencoding):
52 52 try:
53 53 u = s.decode(e) # attempt strict decoding
54 54 return u.encode(_encoding, "replace")
55 55 except LookupError, k:
56 56 raise Abort(_("%s, please check your locale settings") % k)
57 57 except UnicodeDecodeError:
58 58 pass
59 59 u = s.decode("utf-8", "replace") # last ditch
60 60 return u.encode(_encoding, "replace")
61 61
62 62 def fromlocal(s):
63 63 """
64 64 Convert a string from the local character encoding to UTF-8
65 65
66 66 We attempt to decode strings using the encoding mode set by
67 67 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
68 68 characters will cause an error message. Other modes include
69 69 'replace', which replaces unknown characters with a special
70 70 Unicode character, and 'ignore', which drops the character.
71 71 """
72 72 try:
73 73 return s.decode(_encoding, _encodingmode).encode("utf-8")
74 74 except UnicodeDecodeError, inst:
75 75 sub = s[max(0, inst.start-10):inst.start+10]
76 76 raise Abort("decoding near '%s': %s!" % (sub, inst))
77 77 except LookupError, k:
78 78 raise Abort(_("%s, please check your locale settings") % k)
79 79
80 80 def locallen(s):
81 81 """Find the length in characters of a local string"""
82 82 return len(s.decode(_encoding, "replace"))
83 83
84 84 def localsub(s, a, b=None):
85 85 try:
86 86 u = s.decode(_encoding, _encodingmode)
87 87 if b is not None:
88 88 u = u[a:b]
89 89 else:
90 90 u = u[:a]
91 91 return u.encode(_encoding, _encodingmode)
92 92 except UnicodeDecodeError, inst:
93 93 sub = s[max(0, inst.start-10), inst.start+10]
94 94 raise Abort(_("decoding near '%s': %s!") % (sub, inst))
95 95
96 96 # used by parsedate
97 97 defaultdateformats = (
98 98 '%Y-%m-%d %H:%M:%S',
99 99 '%Y-%m-%d %I:%M:%S%p',
100 100 '%Y-%m-%d %H:%M',
101 101 '%Y-%m-%d %I:%M%p',
102 102 '%Y-%m-%d',
103 103 '%m-%d',
104 104 '%m/%d',
105 105 '%m/%d/%y',
106 106 '%m/%d/%Y',
107 107 '%a %b %d %H:%M:%S %Y',
108 108 '%a %b %d %I:%M:%S%p %Y',
109 109 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
110 110 '%b %d %H:%M:%S %Y',
111 111 '%b %d %I:%M:%S%p %Y',
112 112 '%b %d %H:%M:%S',
113 113 '%b %d %I:%M:%S%p',
114 114 '%b %d %H:%M',
115 115 '%b %d %I:%M%p',
116 116 '%b %d %Y',
117 117 '%b %d',
118 118 '%H:%M:%S',
119 119 '%I:%M:%SP',
120 120 '%H:%M',
121 121 '%I:%M%p',
122 122 )
123 123
124 124 extendeddateformats = defaultdateformats + (
125 125 "%Y",
126 126 "%Y-%m",
127 127 "%b",
128 128 "%b %Y",
129 129 )
130 130
131 131 class SignalInterrupt(Exception):
132 132 """Exception raised on SIGTERM and SIGHUP."""
133 133
134 134 # differences from SafeConfigParser:
135 135 # - case-sensitive keys
136 136 # - allows values that are not strings (this means that you may not
137 137 # be able to save the configuration to a file)
138 138 class configparser(ConfigParser.SafeConfigParser):
139 139 def optionxform(self, optionstr):
140 140 return optionstr
141 141
142 142 def set(self, section, option, value):
143 143 return ConfigParser.ConfigParser.set(self, section, option, value)
144 144
145 145 def _interpolate(self, section, option, rawval, vars):
146 146 if not isinstance(rawval, basestring):
147 147 return rawval
148 148 return ConfigParser.SafeConfigParser._interpolate(self, section,
149 149 option, rawval, vars)
150 150
151 151 def cachefunc(func):
152 152 '''cache the result of function calls'''
153 153 # XXX doesn't handle keywords args
154 154 cache = {}
155 155 if func.func_code.co_argcount == 1:
156 156 # we gain a small amount of time because
157 157 # we don't need to pack/unpack the list
158 158 def f(arg):
159 159 if arg not in cache:
160 160 cache[arg] = func(arg)
161 161 return cache[arg]
162 162 else:
163 163 def f(*args):
164 164 if args not in cache:
165 165 cache[args] = func(*args)
166 166 return cache[args]
167 167
168 168 return f
169 169
170 170 def pipefilter(s, cmd):
171 171 '''filter string S through command CMD, returning its output'''
172 172 (pin, pout) = os.popen2(cmd, 'b')
173 173 def writer():
174 174 try:
175 175 pin.write(s)
176 176 pin.close()
177 177 except IOError, inst:
178 178 if inst.errno != errno.EPIPE:
179 179 raise
180 180
181 181 # we should use select instead on UNIX, but this will work on most
182 182 # systems, including Windows
183 183 w = threading.Thread(target=writer)
184 184 w.start()
185 185 f = pout.read()
186 186 pout.close()
187 187 w.join()
188 188 return f
189 189
190 190 def tempfilter(s, cmd):
191 191 '''filter string S through a pair of temporary files with CMD.
192 192 CMD is used as a template to create the real command to be run,
193 193 with the strings INFILE and OUTFILE replaced by the real names of
194 194 the temporary files generated.'''
195 195 inname, outname = None, None
196 196 try:
197 197 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
198 198 fp = os.fdopen(infd, 'wb')
199 199 fp.write(s)
200 200 fp.close()
201 201 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
202 202 os.close(outfd)
203 203 cmd = cmd.replace('INFILE', inname)
204 204 cmd = cmd.replace('OUTFILE', outname)
205 205 code = os.system(cmd)
206 206 if sys.platform == 'OpenVMS' and code & 1:
207 207 code = 0
208 208 if code: raise Abort(_("command '%s' failed: %s") %
209 209 (cmd, explain_exit(code)))
210 210 return open(outname, 'rb').read()
211 211 finally:
212 212 try:
213 213 if inname: os.unlink(inname)
214 214 except: pass
215 215 try:
216 216 if outname: os.unlink(outname)
217 217 except: pass
218 218
219 219 filtertable = {
220 220 'tempfile:': tempfilter,
221 221 'pipe:': pipefilter,
222 222 }
223 223
224 224 def filter(s, cmd):
225 225 "filter a string through a command that transforms its input to its output"
226 226 for name, fn in filtertable.iteritems():
227 227 if cmd.startswith(name):
228 228 return fn(s, cmd[len(name):].lstrip())
229 229 return pipefilter(s, cmd)
230 230
231 231 def binary(s):
232 232 """return true if a string is binary data using diff's heuristic"""
233 233 if s and '\0' in s[:4096]:
234 234 return True
235 235 return False
236 236
237 237 def unique(g):
238 238 """return the uniq elements of iterable g"""
239 239 seen = {}
240 240 l = []
241 241 for f in g:
242 242 if f not in seen:
243 243 seen[f] = 1
244 244 l.append(f)
245 245 return l
246 246
247 247 class Abort(Exception):
248 248 """Raised if a command needs to print an error and exit."""
249 249
250 250 class UnexpectedOutput(Abort):
251 251 """Raised to print an error with part of output and exit."""
252 252
253 253 def always(fn): return True
254 254 def never(fn): return False
255 255
256 256 def expand_glob(pats):
257 257 '''On Windows, expand the implicit globs in a list of patterns'''
258 258 if os.name != 'nt':
259 259 return list(pats)
260 260 ret = []
261 261 for p in pats:
262 262 kind, name = patkind(p, None)
263 263 if kind is None:
264 264 globbed = glob.glob(name)
265 265 if globbed:
266 266 ret.extend(globbed)
267 267 continue
268 268 # if we couldn't expand the glob, just keep it around
269 269 ret.append(p)
270 270 return ret
271 271
272 272 def patkind(name, dflt_pat='glob'):
273 273 """Split a string into an optional pattern kind prefix and the
274 274 actual pattern."""
275 275 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
276 276 if name.startswith(prefix + ':'): return name.split(':', 1)
277 277 return dflt_pat, name
278 278
279 279 def globre(pat, head='^', tail='$'):
280 280 "convert a glob pattern into a regexp"
281 281 i, n = 0, len(pat)
282 282 res = ''
283 283 group = False
284 284 def peek(): return i < n and pat[i]
285 285 while i < n:
286 286 c = pat[i]
287 287 i = i+1
288 288 if c == '*':
289 289 if peek() == '*':
290 290 i += 1
291 291 res += '.*'
292 292 else:
293 293 res += '[^/]*'
294 294 elif c == '?':
295 295 res += '.'
296 296 elif c == '[':
297 297 j = i
298 298 if j < n and pat[j] in '!]':
299 299 j += 1
300 300 while j < n and pat[j] != ']':
301 301 j += 1
302 302 if j >= n:
303 303 res += '\\['
304 304 else:
305 305 stuff = pat[i:j].replace('\\','\\\\')
306 306 i = j + 1
307 307 if stuff[0] == '!':
308 308 stuff = '^' + stuff[1:]
309 309 elif stuff[0] == '^':
310 310 stuff = '\\' + stuff
311 311 res = '%s[%s]' % (res, stuff)
312 312 elif c == '{':
313 313 group = True
314 314 res += '(?:'
315 315 elif c == '}' and group:
316 316 res += ')'
317 317 group = False
318 318 elif c == ',' and group:
319 319 res += '|'
320 320 elif c == '\\':
321 321 p = peek()
322 322 if p:
323 323 i += 1
324 324 res += re.escape(p)
325 325 else:
326 326 res += re.escape(c)
327 327 else:
328 328 res += re.escape(c)
329 329 return head + res + tail
330 330
331 331 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
332 332
333 333 def pathto(root, n1, n2):
334 334 '''return the relative path from one place to another.
335 335 root should use os.sep to separate directories
336 336 n1 should use os.sep to separate directories
337 337 n2 should use "/" to separate directories
338 338 returns an os.sep-separated path.
339 339
340 340 If n1 is a relative path, it's assumed it's
341 341 relative to root.
342 342 n2 should always be relative to root.
343 343 '''
344 344 if not n1: return localpath(n2)
345 345 if os.path.isabs(n1):
346 346 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
347 347 return os.path.join(root, localpath(n2))
348 348 n2 = '/'.join((pconvert(root), n2))
349 349 a, b = n1.split(os.sep), n2.split('/')
350 350 a.reverse()
351 351 b.reverse()
352 352 while a and b and a[-1] == b[-1]:
353 353 a.pop()
354 354 b.pop()
355 355 b.reverse()
356 356 return os.sep.join((['..'] * len(a)) + b)
357 357
358 358 def canonpath(root, cwd, myname):
359 359 """return the canonical path of myname, given cwd and root"""
360 360 if root == os.sep:
361 361 rootsep = os.sep
362 362 elif root.endswith(os.sep):
363 363 rootsep = root
364 364 else:
365 365 rootsep = root + os.sep
366 366 name = myname
367 367 if not os.path.isabs(name):
368 368 name = os.path.join(root, cwd, name)
369 369 name = os.path.normpath(name)
370 370 audit_path = path_auditor(root)
371 371 if name != rootsep and name.startswith(rootsep):
372 372 name = name[len(rootsep):]
373 373 audit_path(name)
374 374 return pconvert(name)
375 375 elif name == root:
376 376 return ''
377 377 else:
378 378 # Determine whether `name' is in the hierarchy at or beneath `root',
379 379 # by iterating name=dirname(name) until that causes no change (can't
380 380 # check name == '/', because that doesn't work on windows). For each
381 381 # `name', compare dev/inode numbers. If they match, the list `rel'
382 382 # holds the reversed list of components making up the relative file
383 383 # name we want.
384 384 root_st = os.stat(root)
385 385 rel = []
386 386 while True:
387 387 try:
388 388 name_st = os.stat(name)
389 389 except OSError:
390 390 break
391 391 if samestat(name_st, root_st):
392 392 if not rel:
393 393 # name was actually the same as root (maybe a symlink)
394 394 return ''
395 395 rel.reverse()
396 396 name = os.path.join(*rel)
397 397 audit_path(name)
398 398 return pconvert(name)
399 399 dirname, basename = os.path.split(name)
400 400 rel.append(basename)
401 401 if dirname == name:
402 402 break
403 403 name = dirname
404 404
405 405 raise Abort('%s not under root' % myname)
406 406
407 407 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
408 408 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
409 409
410 410 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
411 411 globbed=False, default=None):
412 412 default = default or 'relpath'
413 413 if default == 'relpath' and not globbed:
414 414 names = expand_glob(names)
415 415 return _matcher(canonroot, cwd, names, inc, exc, default, src)
416 416
417 417 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
418 418 """build a function to match a set of file patterns
419 419
420 420 arguments:
421 421 canonroot - the canonical root of the tree you're matching against
422 422 cwd - the current working directory, if relevant
423 423 names - patterns to find
424 424 inc - patterns to include
425 425 exc - patterns to exclude
426 426 dflt_pat - if a pattern in names has no explicit type, assume this one
427 427 src - where these patterns came from (e.g. .hgignore)
428 428
429 429 a pattern is one of:
430 430 'glob:<glob>' - a glob relative to cwd
431 431 're:<regexp>' - a regular expression
432 432 'path:<path>' - a path relative to canonroot
433 433 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
434 434 'relpath:<path>' - a path relative to cwd
435 435 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
436 436 '<something>' - one of the cases above, selected by the dflt_pat argument
437 437
438 438 returns:
439 439 a 3-tuple containing
440 440 - list of roots (places where one should start a recursive walk of the fs);
441 441 this often matches the explicit non-pattern names passed in, but also
442 442 includes the initial part of glob: patterns that has no glob characters
443 443 - a bool match(filename) function
444 444 - a bool indicating if any patterns were passed in
445 445 """
446 446
447 447 # a common case: no patterns at all
448 448 if not names and not inc and not exc:
449 449 return [], always, False
450 450
451 451 def contains_glob(name):
452 452 for c in name:
453 453 if c in _globchars: return True
454 454 return False
455 455
456 456 def regex(kind, name, tail):
457 457 '''convert a pattern into a regular expression'''
458 458 if not name:
459 459 return ''
460 460 if kind == 're':
461 461 return name
462 462 elif kind == 'path':
463 463 return '^' + re.escape(name) + '(?:/|$)'
464 464 elif kind == 'relglob':
465 465 return globre(name, '(?:|.*/)', tail)
466 466 elif kind == 'relpath':
467 467 return re.escape(name) + '(?:/|$)'
468 468 elif kind == 'relre':
469 469 if name.startswith('^'):
470 470 return name
471 471 return '.*' + name
472 472 return globre(name, '', tail)
473 473
474 474 def matchfn(pats, tail):
475 475 """build a matching function from a set of patterns"""
476 476 if not pats:
477 477 return
478 478 try:
479 479 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
480 480 return re.compile(pat).match
481 481 except OverflowError:
482 482 # We're using a Python with a tiny regex engine and we
483 483 # made it explode, so we'll divide the pattern list in two
484 484 # until it works
485 485 l = len(pats)
486 486 if l < 2:
487 487 raise
488 488 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
489 489 return lambda s: a(s) or b(s)
490 490 except re.error:
491 491 for k, p in pats:
492 492 try:
493 493 re.compile('(?:%s)' % regex(k, p, tail))
494 494 except re.error:
495 495 if src:
496 496 raise Abort("%s: invalid pattern (%s): %s" %
497 497 (src, k, p))
498 498 else:
499 499 raise Abort("invalid pattern (%s): %s" % (k, p))
500 500 raise Abort("invalid pattern")
501 501
502 502 def globprefix(pat):
503 503 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
504 504 root = []
505 505 for p in pat.split('/'):
506 506 if contains_glob(p): break
507 507 root.append(p)
508 508 return '/'.join(root) or '.'
509 509
510 510 def normalizepats(names, default):
511 511 pats = []
512 512 roots = []
513 513 anypats = False
514 514 for kind, name in [patkind(p, default) for p in names]:
515 515 if kind in ('glob', 'relpath'):
516 516 name = canonpath(canonroot, cwd, name)
517 517 elif kind in ('relglob', 'path'):
518 518 name = normpath(name)
519 519
520 520 pats.append((kind, name))
521 521
522 522 if kind in ('glob', 're', 'relglob', 'relre'):
523 523 anypats = True
524 524
525 525 if kind == 'glob':
526 526 root = globprefix(name)
527 527 roots.append(root)
528 528 elif kind in ('relpath', 'path'):
529 529 roots.append(name or '.')
530 530 elif kind == 'relglob':
531 531 roots.append('.')
532 532 return roots, pats, anypats
533 533
534 534 roots, pats, anypats = normalizepats(names, dflt_pat)
535 535
536 536 patmatch = matchfn(pats, '$') or always
537 537 incmatch = always
538 538 if inc:
539 539 dummy, inckinds, dummy = normalizepats(inc, 'glob')
540 540 incmatch = matchfn(inckinds, '(?:/|$)')
541 541 excmatch = lambda fn: False
542 542 if exc:
543 543 dummy, exckinds, dummy = normalizepats(exc, 'glob')
544 544 excmatch = matchfn(exckinds, '(?:/|$)')
545 545
546 546 if not names and inc and not exc:
547 547 # common case: hgignore patterns
548 548 match = incmatch
549 549 else:
550 550 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
551 551
552 552 return (roots, match, (inc or exc or anypats) and True)
553 553
554 554 _hgexecutable = None
555 555
556 556 def hgexecutable():
557 557 """return location of the 'hg' executable.
558 558
559 559 Defaults to $HG or 'hg' in the search path.
560 560 """
561 561 if _hgexecutable is None:
562 562 set_hgexecutable(os.environ.get('HG') or find_exe('hg', 'hg'))
563 563 return _hgexecutable
564 564
565 565 def set_hgexecutable(path):
566 566 """set location of the 'hg' executable"""
567 567 global _hgexecutable
568 568 _hgexecutable = path
569 569
570 570 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
571 571 '''enhanced shell command execution.
572 572 run with environment maybe modified, maybe in different dir.
573 573
574 574 if command fails and onerr is None, return status. if ui object,
575 575 print error message and return status, else raise onerr object as
576 576 exception.'''
577 577 def py2shell(val):
578 578 'convert python object into string that is useful to shell'
579 579 if val in (None, False):
580 580 return '0'
581 581 if val == True:
582 582 return '1'
583 583 return str(val)
584 584 oldenv = {}
585 585 for k in environ:
586 586 oldenv[k] = os.environ.get(k)
587 587 if cwd is not None:
588 588 oldcwd = os.getcwd()
589 589 origcmd = cmd
590 590 if os.name == 'nt':
591 591 cmd = '"%s"' % cmd
592 592 try:
593 593 for k, v in environ.iteritems():
594 594 os.environ[k] = py2shell(v)
595 595 os.environ['HG'] = hgexecutable()
596 596 if cwd is not None and oldcwd != cwd:
597 597 os.chdir(cwd)
598 598 rc = os.system(cmd)
599 599 if sys.platform == 'OpenVMS' and rc & 1:
600 600 rc = 0
601 601 if rc and onerr:
602 602 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
603 603 explain_exit(rc)[0])
604 604 if errprefix:
605 605 errmsg = '%s: %s' % (errprefix, errmsg)
606 606 try:
607 607 onerr.warn(errmsg + '\n')
608 608 except AttributeError:
609 609 raise onerr(errmsg)
610 610 return rc
611 611 finally:
612 612 for k, v in oldenv.iteritems():
613 613 if v is None:
614 614 del os.environ[k]
615 615 else:
616 616 os.environ[k] = v
617 617 if cwd is not None and oldcwd != cwd:
618 618 os.chdir(oldcwd)
619 619
620 620 # os.path.lexists is not available on python2.3
621 621 def lexists(filename):
622 622 "test whether a file with this name exists. does not follow symlinks"
623 623 try:
624 624 os.lstat(filename)
625 625 except:
626 626 return False
627 627 return True
628 628
629 629 def rename(src, dst):
630 630 """forcibly rename a file"""
631 631 try:
632 632 os.rename(src, dst)
633 633 except OSError, err: # FIXME: check err (EEXIST ?)
634 634 # on windows, rename to existing file is not allowed, so we
635 635 # must delete destination first. but if file is open, unlink
636 636 # schedules it for delete but does not delete it. rename
637 637 # happens immediately even for open files, so we create
638 638 # temporary file, delete it, rename destination to that name,
639 639 # then delete that. then rename is safe to do.
640 640 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
641 641 os.close(fd)
642 642 os.unlink(temp)
643 643 os.rename(dst, temp)
644 644 os.unlink(temp)
645 645 os.rename(src, dst)
646 646
647 647 def unlink(f):
648 648 """unlink and remove the directory if it is empty"""
649 649 os.unlink(f)
650 650 # try removing directories that might now be empty
651 651 try:
652 652 os.removedirs(os.path.dirname(f))
653 653 except OSError:
654 654 pass
655 655
656 656 def copyfile(src, dest):
657 657 "copy a file, preserving mode"
658 658 if os.path.islink(src):
659 659 try:
660 660 os.unlink(dest)
661 661 except:
662 662 pass
663 663 os.symlink(os.readlink(src), dest)
664 664 else:
665 665 try:
666 666 shutil.copyfile(src, dest)
667 667 shutil.copymode(src, dest)
668 668 except shutil.Error, inst:
669 669 raise Abort(str(inst))
670 670
671 671 def copyfiles(src, dst, hardlink=None):
672 672 """Copy a directory tree using hardlinks if possible"""
673 673
674 674 if hardlink is None:
675 675 hardlink = (os.stat(src).st_dev ==
676 676 os.stat(os.path.dirname(dst)).st_dev)
677 677
678 678 if os.path.isdir(src):
679 679 os.mkdir(dst)
680 680 for name, kind in osutil.listdir(src):
681 681 srcname = os.path.join(src, name)
682 682 dstname = os.path.join(dst, name)
683 683 copyfiles(srcname, dstname, hardlink)
684 684 else:
685 685 if hardlink:
686 686 try:
687 687 os_link(src, dst)
688 688 except (IOError, OSError):
689 689 hardlink = False
690 690 shutil.copy(src, dst)
691 691 else:
692 692 shutil.copy(src, dst)
693 693
694 694 class path_auditor(object):
695 695 '''ensure that a filesystem path contains no banned components.
696 696 the following properties of a path are checked:
697 697
698 698 - under top-level .hg
699 699 - starts at the root of a windows drive
700 700 - contains ".."
701 701 - traverses a symlink (e.g. a/symlink_here/b)
702 702 - inside a nested repository'''
703 703
704 704 def __init__(self, root):
705 705 self.audited = set()
706 706 self.auditeddir = set()
707 707 self.root = root
708 708
709 709 def __call__(self, path):
710 710 if path in self.audited:
711 711 return
712 712 normpath = os.path.normcase(path)
713 713 parts = normpath.split(os.sep)
714 714 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
715 715 or os.pardir in parts):
716 716 raise Abort(_("path contains illegal component: %s") % path)
717 717 def check(prefix):
718 718 curpath = os.path.join(self.root, prefix)
719 719 try:
720 720 st = os.lstat(curpath)
721 721 except OSError, err:
722 722 # EINVAL can be raised as invalid path syntax under win32.
723 723 # They must be ignored for patterns can be checked too.
724 724 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
725 725 raise
726 726 else:
727 727 if stat.S_ISLNK(st.st_mode):
728 728 raise Abort(_('path %r traverses symbolic link %r') %
729 729 (path, prefix))
730 730 elif (stat.S_ISDIR(st.st_mode) and
731 731 os.path.isdir(os.path.join(curpath, '.hg'))):
732 732 raise Abort(_('path %r is inside repo %r') %
733 733 (path, prefix))
734 734
735 735 prefixes = []
736 736 for c in strutil.rfindall(normpath, os.sep):
737 737 prefix = normpath[:c]
738 738 if prefix in self.auditeddir:
739 739 break
740 740 check(prefix)
741 741 prefixes.append(prefix)
742 742
743 743 self.audited.add(path)
744 744 # only add prefixes to the cache after checking everything: we don't
745 745 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
746 746 self.auditeddir.update(prefixes)
747 747
748 748 def _makelock_file(info, pathname):
749 749 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
750 750 os.write(ld, info)
751 751 os.close(ld)
752 752
753 753 def _readlock_file(pathname):
754 754 return posixfile(pathname).read()
755 755
756 756 def nlinks(pathname):
757 757 """Return number of hardlinks for the given file."""
758 758 return os.lstat(pathname).st_nlink
759 759
760 760 if hasattr(os, 'link'):
761 761 os_link = os.link
762 762 else:
763 763 def os_link(src, dst):
764 764 raise OSError(0, _("Hardlinks not supported"))
765 765
766 766 def fstat(fp):
767 767 '''stat file object that may not have fileno method.'''
768 768 try:
769 769 return os.fstat(fp.fileno())
770 770 except AttributeError:
771 771 return os.stat(fp.name)
772 772
773 773 posixfile = file
774 774
775 775 def is_win_9x():
776 776 '''return true if run on windows 95, 98 or me.'''
777 777 try:
778 778 return sys.getwindowsversion()[3] == 1
779 779 except AttributeError:
780 780 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
781 781
782 782 getuser_fallback = None
783 783
784 784 def getuser():
785 785 '''return name of current user'''
786 786 try:
787 787 return getpass.getuser()
788 788 except ImportError:
789 789 # import of pwd will fail on windows - try fallback
790 790 if getuser_fallback:
791 791 return getuser_fallback()
792 792 # raised if win32api not available
793 793 raise Abort(_('user name not available - set USERNAME '
794 794 'environment variable'))
795 795
796 796 def username(uid=None):
797 797 """Return the name of the user with the given uid.
798 798
799 799 If uid is None, return the name of the current user."""
800 800 try:
801 801 import pwd
802 802 if uid is None:
803 803 uid = os.getuid()
804 804 try:
805 805 return pwd.getpwuid(uid)[0]
806 806 except KeyError:
807 807 return str(uid)
808 808 except ImportError:
809 809 return None
810 810
811 811 def groupname(gid=None):
812 812 """Return the name of the group with the given gid.
813 813
814 814 If gid is None, return the name of the current group."""
815 815 try:
816 816 import grp
817 817 if gid is None:
818 818 gid = os.getgid()
819 819 try:
820 820 return grp.getgrgid(gid)[0]
821 821 except KeyError:
822 822 return str(gid)
823 823 except ImportError:
824 824 return None
825 825
826 826 # File system features
827 827
828 828 def checkfolding(path):
829 829 """
830 830 Check whether the given path is on a case-sensitive filesystem
831 831
832 832 Requires a path (like /foo/.hg) ending with a foldable final
833 833 directory component.
834 834 """
835 835 s1 = os.stat(path)
836 836 d, b = os.path.split(path)
837 837 p2 = os.path.join(d, b.upper())
838 838 if path == p2:
839 839 p2 = os.path.join(d, b.lower())
840 840 try:
841 841 s2 = os.stat(p2)
842 842 if s2 == s1:
843 843 return False
844 844 return True
845 845 except:
846 846 return True
847 847
848 848 def checkexec(path):
849 849 """
850 850 Check whether the given path is on a filesystem with UNIX-like exec flags
851 851
852 852 Requires a directory (like /foo/.hg)
853 853 """
854 854 try:
855 855 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
856 856 fh, fn = tempfile.mkstemp("", "", path)
857 857 os.close(fh)
858 858 m = os.stat(fn).st_mode
859 859 # VFAT on Linux can flip mode but it doesn't persist a FS remount.
860 860 # frequently we can detect it if files are created with exec bit on.
861 861 new_file_has_exec = m & EXECFLAGS
862 862 os.chmod(fn, m ^ EXECFLAGS)
863 863 exec_flags_cannot_flip = (os.stat(fn).st_mode == m)
864 864 os.unlink(fn)
865 865 except (IOError,OSError):
866 866 # we don't care, the user probably won't be able to commit anyway
867 867 return False
868 868 return not (new_file_has_exec or exec_flags_cannot_flip)
869 869
870 870 def execfunc(path, fallback):
871 871 '''return an is_exec() function with default to fallback'''
872 872 if checkexec(path):
873 873 return lambda x: is_exec(os.path.join(path, x))
874 874 return fallback
875 875
876 876 def checklink(path):
877 877 """check whether the given path is on a symlink-capable filesystem"""
878 878 # mktemp is not racy because symlink creation will fail if the
879 879 # file already exists
880 880 name = tempfile.mktemp(dir=path)
881 881 try:
882 882 os.symlink(".", name)
883 883 os.unlink(name)
884 884 return True
885 885 except (OSError, AttributeError):
886 886 return False
887 887
888 888 def linkfunc(path, fallback):
889 889 '''return an is_link() function with default to fallback'''
890 890 if checklink(path):
891 891 return lambda x: os.path.islink(os.path.join(path, x))
892 892 return fallback
893 893
894 894 _umask = os.umask(0)
895 895 os.umask(_umask)
896 896
897 897 def needbinarypatch():
898 898 """return True if patches should be applied in binary mode by default."""
899 899 return os.name == 'nt'
900 900
901 901 # Platform specific variants
902 902 if os.name == 'nt':
903 903 import msvcrt
904 904 nulldev = 'NUL:'
905 905
906 906 class winstdout:
907 907 '''stdout on windows misbehaves if sent through a pipe'''
908 908
909 909 def __init__(self, fp):
910 910 self.fp = fp
911 911
912 912 def __getattr__(self, key):
913 913 return getattr(self.fp, key)
914 914
915 915 def close(self):
916 916 try:
917 917 self.fp.close()
918 918 except: pass
919 919
920 920 def write(self, s):
921 921 try:
922 return self.fp.write(s)
922 # This is workaround for "Not enough space" error on
923 # writing large size of data to console.
924 limit = 16000
925 l = len(s)
926 start = 0
927 while start < l:
928 end = start + limit
929 self.fp.write(s[start:end])
930 start = end
923 931 except IOError, inst:
924 932 if inst.errno != 0: raise
925 933 self.close()
926 934 raise IOError(errno.EPIPE, 'Broken pipe')
927 935
928 936 def flush(self):
929 937 try:
930 938 return self.fp.flush()
931 939 except IOError, inst:
932 940 if inst.errno != errno.EINVAL: raise
933 941 self.close()
934 942 raise IOError(errno.EPIPE, 'Broken pipe')
935 943
936 944 sys.stdout = winstdout(sys.stdout)
937 945
938 946 def system_rcpath():
939 947 try:
940 948 return system_rcpath_win32()
941 949 except:
942 950 return [r'c:\mercurial\mercurial.ini']
943 951
944 952 def user_rcpath():
945 953 '''return os-specific hgrc search path to the user dir'''
946 954 try:
947 955 userrc = user_rcpath_win32()
948 956 except:
949 957 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
950 958 path = [userrc]
951 959 userprofile = os.environ.get('USERPROFILE')
952 960 if userprofile:
953 961 path.append(os.path.join(userprofile, 'mercurial.ini'))
954 962 return path
955 963
956 964 def parse_patch_output(output_line):
957 965 """parses the output produced by patch and returns the file name"""
958 966 pf = output_line[14:]
959 967 if pf[0] == '`':
960 968 pf = pf[1:-1] # Remove the quotes
961 969 return pf
962 970
963 971 def sshargs(sshcmd, host, user, port):
964 972 '''Build argument list for ssh or Plink'''
965 973 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
966 974 args = user and ("%s@%s" % (user, host)) or host
967 975 return port and ("%s %s %s" % (args, pflag, port)) or args
968 976
969 977 def testpid(pid):
970 978 '''return False if pid dead, True if running or not known'''
971 979 return True
972 980
973 981 def set_exec(f, mode):
974 982 pass
975 983
976 984 def set_link(f, mode):
977 985 pass
978 986
979 987 def set_binary(fd):
980 988 msvcrt.setmode(fd.fileno(), os.O_BINARY)
981 989
982 990 def pconvert(path):
983 991 return path.replace("\\", "/")
984 992
985 993 def localpath(path):
986 994 return path.replace('/', '\\')
987 995
988 996 def normpath(path):
989 997 return pconvert(os.path.normpath(path))
990 998
991 999 makelock = _makelock_file
992 1000 readlock = _readlock_file
993 1001
994 1002 def samestat(s1, s2):
995 1003 return False
996 1004
997 1005 # A sequence of backslashes is special iff it precedes a double quote:
998 1006 # - if there's an even number of backslashes, the double quote is not
999 1007 # quoted (i.e. it ends the quoted region)
1000 1008 # - if there's an odd number of backslashes, the double quote is quoted
1001 1009 # - in both cases, every pair of backslashes is unquoted into a single
1002 1010 # backslash
1003 1011 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
1004 1012 # So, to quote a string, we must surround it in double quotes, double
1005 1013 # the number of backslashes that preceed double quotes and add another
1006 1014 # backslash before every double quote (being careful with the double
1007 1015 # quote we've appended to the end)
1008 1016 _quotere = None
1009 1017 def shellquote(s):
1010 1018 global _quotere
1011 1019 if _quotere is None:
1012 1020 _quotere = re.compile(r'(\\*)("|\\$)')
1013 1021 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1014 1022
1015 1023 def quotecommand(cmd):
1016 1024 """Build a command string suitable for os.popen* calls."""
1017 1025 # The extra quotes are needed because popen* runs the command
1018 1026 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1019 1027 return '"' + cmd + '"'
1020 1028
1021 1029 def popen(command):
1022 1030 # Work around "popen spawned process may not write to stdout
1023 1031 # under windows"
1024 1032 # http://bugs.python.org/issue1366
1025 1033 command += " 2> %s" % nulldev
1026 1034 return os.popen(quotecommand(command))
1027 1035
1028 1036 def explain_exit(code):
1029 1037 return _("exited with status %d") % code, code
1030 1038
1031 1039 # if you change this stub into a real check, please try to implement the
1032 1040 # username and groupname functions above, too.
1033 1041 def isowner(fp, st=None):
1034 1042 return True
1035 1043
1036 1044 def find_in_path(name, path, default=None):
1037 1045 '''find name in search path. path can be string (will be split
1038 1046 with os.pathsep), or iterable thing that returns strings. if name
1039 1047 found, return path to name. else return default. name is looked up
1040 1048 using cmd.exe rules, using PATHEXT.'''
1041 1049 if isinstance(path, str):
1042 1050 path = path.split(os.pathsep)
1043 1051
1044 1052 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1045 1053 pathext = pathext.lower().split(os.pathsep)
1046 1054 isexec = os.path.splitext(name)[1].lower() in pathext
1047 1055
1048 1056 for p in path:
1049 1057 p_name = os.path.join(p, name)
1050 1058
1051 1059 if isexec and os.path.exists(p_name):
1052 1060 return p_name
1053 1061
1054 1062 for ext in pathext:
1055 1063 p_name_ext = p_name + ext
1056 1064 if os.path.exists(p_name_ext):
1057 1065 return p_name_ext
1058 1066 return default
1059 1067
1060 1068 def set_signal_handler():
1061 1069 try:
1062 1070 set_signal_handler_win32()
1063 1071 except NameError:
1064 1072 pass
1065 1073
1066 1074 try:
1067 1075 # override functions with win32 versions if possible
1068 1076 from util_win32 import *
1069 1077 if not is_win_9x():
1070 1078 posixfile = posixfile_nt
1071 1079 except ImportError:
1072 1080 pass
1073 1081
1074 1082 else:
1075 1083 nulldev = '/dev/null'
1076 1084
1077 1085 def rcfiles(path):
1078 1086 rcs = [os.path.join(path, 'hgrc')]
1079 1087 rcdir = os.path.join(path, 'hgrc.d')
1080 1088 try:
1081 1089 rcs.extend([os.path.join(rcdir, f)
1082 1090 for f, kind in osutil.listdir(rcdir)
1083 1091 if f.endswith(".rc")])
1084 1092 except OSError:
1085 1093 pass
1086 1094 return rcs
1087 1095
1088 1096 def system_rcpath():
1089 1097 path = []
1090 1098 # old mod_python does not set sys.argv
1091 1099 if len(getattr(sys, 'argv', [])) > 0:
1092 1100 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1093 1101 '/../etc/mercurial'))
1094 1102 path.extend(rcfiles('/etc/mercurial'))
1095 1103 return path
1096 1104
1097 1105 def user_rcpath():
1098 1106 return [os.path.expanduser('~/.hgrc')]
1099 1107
1100 1108 def parse_patch_output(output_line):
1101 1109 """parses the output produced by patch and returns the file name"""
1102 1110 pf = output_line[14:]
1103 1111 if os.sys.platform == 'OpenVMS':
1104 1112 if pf[0] == '`':
1105 1113 pf = pf[1:-1] # Remove the quotes
1106 1114 else:
1107 1115 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1108 1116 pf = pf[1:-1] # Remove the quotes
1109 1117 return pf
1110 1118
1111 1119 def sshargs(sshcmd, host, user, port):
1112 1120 '''Build argument list for ssh'''
1113 1121 args = user and ("%s@%s" % (user, host)) or host
1114 1122 return port and ("%s -p %s" % (args, port)) or args
1115 1123
1116 1124 def is_exec(f):
1117 1125 """check whether a file is executable"""
1118 1126 return (os.lstat(f).st_mode & 0100 != 0)
1119 1127
1120 1128 def set_exec(f, mode):
1121 1129 s = os.lstat(f).st_mode
1122 1130 if stat.S_ISLNK(s) or (s & 0100 != 0) == mode:
1123 1131 return
1124 1132 if mode:
1125 1133 # Turn on +x for every +r bit when making a file executable
1126 1134 # and obey umask.
1127 1135 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1128 1136 else:
1129 1137 os.chmod(f, s & 0666)
1130 1138
1131 1139 def set_link(f, mode):
1132 1140 """make a file a symbolic link/regular file
1133 1141
1134 1142 if a file is changed to a link, its contents become the link data
1135 1143 if a link is changed to a file, its link data become its contents
1136 1144 """
1137 1145
1138 1146 m = os.path.islink(f)
1139 1147 if m == bool(mode):
1140 1148 return
1141 1149
1142 1150 if mode: # switch file to link
1143 1151 data = file(f).read()
1144 1152 os.unlink(f)
1145 1153 os.symlink(data, f)
1146 1154 else:
1147 1155 data = os.readlink(f)
1148 1156 os.unlink(f)
1149 1157 file(f, "w").write(data)
1150 1158
1151 1159 def set_binary(fd):
1152 1160 pass
1153 1161
1154 1162 def pconvert(path):
1155 1163 return path
1156 1164
1157 1165 def localpath(path):
1158 1166 return path
1159 1167
1160 1168 normpath = os.path.normpath
1161 1169 samestat = os.path.samestat
1162 1170
1163 1171 def makelock(info, pathname):
1164 1172 try:
1165 1173 os.symlink(info, pathname)
1166 1174 except OSError, why:
1167 1175 if why.errno == errno.EEXIST:
1168 1176 raise
1169 1177 else:
1170 1178 _makelock_file(info, pathname)
1171 1179
1172 1180 def readlock(pathname):
1173 1181 try:
1174 1182 return os.readlink(pathname)
1175 1183 except OSError, why:
1176 1184 if why.errno in (errno.EINVAL, errno.ENOSYS):
1177 1185 return _readlock_file(pathname)
1178 1186 else:
1179 1187 raise
1180 1188
1181 1189 def shellquote(s):
1182 1190 if os.sys.platform == 'OpenVMS':
1183 1191 return '"%s"' % s
1184 1192 else:
1185 1193 return "'%s'" % s.replace("'", "'\\''")
1186 1194
1187 1195 def quotecommand(cmd):
1188 1196 return cmd
1189 1197
1190 1198 def popen(command):
1191 1199 return os.popen(command)
1192 1200
1193 1201 def testpid(pid):
1194 1202 '''return False if pid dead, True if running or not sure'''
1195 1203 if os.sys.platform == 'OpenVMS':
1196 1204 return True
1197 1205 try:
1198 1206 os.kill(pid, 0)
1199 1207 return True
1200 1208 except OSError, inst:
1201 1209 return inst.errno != errno.ESRCH
1202 1210
1203 1211 def explain_exit(code):
1204 1212 """return a 2-tuple (desc, code) describing a process's status"""
1205 1213 if os.WIFEXITED(code):
1206 1214 val = os.WEXITSTATUS(code)
1207 1215 return _("exited with status %d") % val, val
1208 1216 elif os.WIFSIGNALED(code):
1209 1217 val = os.WTERMSIG(code)
1210 1218 return _("killed by signal %d") % val, val
1211 1219 elif os.WIFSTOPPED(code):
1212 1220 val = os.WSTOPSIG(code)
1213 1221 return _("stopped by signal %d") % val, val
1214 1222 raise ValueError(_("invalid exit code"))
1215 1223
1216 1224 def isowner(fp, st=None):
1217 1225 """Return True if the file object f belongs to the current user.
1218 1226
1219 1227 The return value of a util.fstat(f) may be passed as the st argument.
1220 1228 """
1221 1229 if st is None:
1222 1230 st = fstat(fp)
1223 1231 return st.st_uid == os.getuid()
1224 1232
1225 1233 def find_in_path(name, path, default=None):
1226 1234 '''find name in search path. path can be string (will be split
1227 1235 with os.pathsep), or iterable thing that returns strings. if name
1228 1236 found, return path to name. else return default.'''
1229 1237 if isinstance(path, str):
1230 1238 path = path.split(os.pathsep)
1231 1239 for p in path:
1232 1240 p_name = os.path.join(p, name)
1233 1241 if os.path.exists(p_name):
1234 1242 return p_name
1235 1243 return default
1236 1244
1237 1245 def set_signal_handler():
1238 1246 pass
1239 1247
1240 1248 def find_exe(name, default=None):
1241 1249 '''find path of an executable.
1242 1250 if name contains a path component, return it as is. otherwise,
1243 1251 use normal executable search path.'''
1244 1252
1245 1253 if os.sep in name or sys.platform == 'OpenVMS':
1246 1254 # don't check the executable bit. if the file isn't
1247 1255 # executable, whoever tries to actually run it will give a
1248 1256 # much more useful error message.
1249 1257 return name
1250 1258 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1251 1259
1252 1260 def _buildencodefun():
1253 1261 e = '_'
1254 1262 win_reserved = [ord(x) for x in '\\:*?"<>|']
1255 1263 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1256 1264 for x in (range(32) + range(126, 256) + win_reserved):
1257 1265 cmap[chr(x)] = "~%02x" % x
1258 1266 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1259 1267 cmap[chr(x)] = e + chr(x).lower()
1260 1268 dmap = {}
1261 1269 for k, v in cmap.iteritems():
1262 1270 dmap[v] = k
1263 1271 def decode(s):
1264 1272 i = 0
1265 1273 while i < len(s):
1266 1274 for l in xrange(1, 4):
1267 1275 try:
1268 1276 yield dmap[s[i:i+l]]
1269 1277 i += l
1270 1278 break
1271 1279 except KeyError:
1272 1280 pass
1273 1281 else:
1274 1282 raise KeyError
1275 1283 return (lambda s: "".join([cmap[c] for c in s]),
1276 1284 lambda s: "".join(list(decode(s))))
1277 1285
1278 1286 encodefilename, decodefilename = _buildencodefun()
1279 1287
1280 1288 def encodedopener(openerfn, fn):
1281 1289 def o(path, *args, **kw):
1282 1290 return openerfn(fn(path), *args, **kw)
1283 1291 return o
1284 1292
1285 1293 def mktempcopy(name, emptyok=False):
1286 1294 """Create a temporary file with the same contents from name
1287 1295
1288 1296 The permission bits are copied from the original file.
1289 1297
1290 1298 If the temporary file is going to be truncated immediately, you
1291 1299 can use emptyok=True as an optimization.
1292 1300
1293 1301 Returns the name of the temporary file.
1294 1302 """
1295 1303 d, fn = os.path.split(name)
1296 1304 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1297 1305 os.close(fd)
1298 1306 # Temporary files are created with mode 0600, which is usually not
1299 1307 # what we want. If the original file already exists, just copy
1300 1308 # its mode. Otherwise, manually obey umask.
1301 1309 try:
1302 1310 st_mode = os.lstat(name).st_mode
1303 1311 except OSError, inst:
1304 1312 if inst.errno != errno.ENOENT:
1305 1313 raise
1306 1314 st_mode = 0666 & ~_umask
1307 1315 os.chmod(temp, st_mode)
1308 1316 if emptyok:
1309 1317 return temp
1310 1318 try:
1311 1319 try:
1312 1320 ifp = posixfile(name, "rb")
1313 1321 except IOError, inst:
1314 1322 if inst.errno == errno.ENOENT:
1315 1323 return temp
1316 1324 if not getattr(inst, 'filename', None):
1317 1325 inst.filename = name
1318 1326 raise
1319 1327 ofp = posixfile(temp, "wb")
1320 1328 for chunk in filechunkiter(ifp):
1321 1329 ofp.write(chunk)
1322 1330 ifp.close()
1323 1331 ofp.close()
1324 1332 except:
1325 1333 try: os.unlink(temp)
1326 1334 except: pass
1327 1335 raise
1328 1336 return temp
1329 1337
1330 1338 class atomictempfile(posixfile):
1331 1339 """file-like object that atomically updates a file
1332 1340
1333 1341 All writes will be redirected to a temporary copy of the original
1334 1342 file. When rename is called, the copy is renamed to the original
1335 1343 name, making the changes visible.
1336 1344 """
1337 1345 def __init__(self, name, mode):
1338 1346 self.__name = name
1339 1347 self.temp = mktempcopy(name, emptyok=('w' in mode))
1340 1348 posixfile.__init__(self, self.temp, mode)
1341 1349
1342 1350 def rename(self):
1343 1351 if not self.closed:
1344 1352 posixfile.close(self)
1345 1353 rename(self.temp, localpath(self.__name))
1346 1354
1347 1355 def __del__(self):
1348 1356 if not self.closed:
1349 1357 try:
1350 1358 os.unlink(self.temp)
1351 1359 except: pass
1352 1360 posixfile.close(self)
1353 1361
1354 1362 class opener(object):
1355 1363 """Open files relative to a base directory
1356 1364
1357 1365 This class is used to hide the details of COW semantics and
1358 1366 remote file access from higher level code.
1359 1367 """
1360 1368 def __init__(self, base, audit=True):
1361 1369 self.base = base
1362 1370 if audit:
1363 1371 self.audit_path = path_auditor(base)
1364 1372 else:
1365 1373 self.audit_path = always
1366 1374
1367 1375 def __getattr__(self, name):
1368 1376 if name == '_can_symlink':
1369 1377 self._can_symlink = checklink(self.base)
1370 1378 return self._can_symlink
1371 1379 raise AttributeError(name)
1372 1380
1373 1381 def __call__(self, path, mode="r", text=False, atomictemp=False):
1374 1382 self.audit_path(path)
1375 1383 f = os.path.join(self.base, path)
1376 1384
1377 1385 if not text and "b" not in mode:
1378 1386 mode += "b" # for that other OS
1379 1387
1380 1388 if mode[0] != "r":
1381 1389 try:
1382 1390 nlink = nlinks(f)
1383 1391 except OSError:
1384 1392 nlink = 0
1385 1393 d = os.path.dirname(f)
1386 1394 if not os.path.isdir(d):
1387 1395 os.makedirs(d)
1388 1396 if atomictemp:
1389 1397 return atomictempfile(f, mode)
1390 1398 if nlink > 1:
1391 1399 rename(mktempcopy(f), f)
1392 1400 return posixfile(f, mode)
1393 1401
1394 1402 def symlink(self, src, dst):
1395 1403 self.audit_path(dst)
1396 1404 linkname = os.path.join(self.base, dst)
1397 1405 try:
1398 1406 os.unlink(linkname)
1399 1407 except OSError:
1400 1408 pass
1401 1409
1402 1410 dirname = os.path.dirname(linkname)
1403 1411 if not os.path.exists(dirname):
1404 1412 os.makedirs(dirname)
1405 1413
1406 1414 if self._can_symlink:
1407 1415 try:
1408 1416 os.symlink(src, linkname)
1409 1417 except OSError, err:
1410 1418 raise OSError(err.errno, _('could not symlink to %r: %s') %
1411 1419 (src, err.strerror), linkname)
1412 1420 else:
1413 1421 f = self(dst, "w")
1414 1422 f.write(src)
1415 1423 f.close()
1416 1424
1417 1425 class chunkbuffer(object):
1418 1426 """Allow arbitrary sized chunks of data to be efficiently read from an
1419 1427 iterator over chunks of arbitrary size."""
1420 1428
1421 1429 def __init__(self, in_iter):
1422 1430 """in_iter is the iterator that's iterating over the input chunks.
1423 1431 targetsize is how big a buffer to try to maintain."""
1424 1432 self.iter = iter(in_iter)
1425 1433 self.buf = ''
1426 1434 self.targetsize = 2**16
1427 1435
1428 1436 def read(self, l):
1429 1437 """Read L bytes of data from the iterator of chunks of data.
1430 1438 Returns less than L bytes if the iterator runs dry."""
1431 1439 if l > len(self.buf) and self.iter:
1432 1440 # Clamp to a multiple of self.targetsize
1433 1441 targetsize = max(l, self.targetsize)
1434 1442 collector = cStringIO.StringIO()
1435 1443 collector.write(self.buf)
1436 1444 collected = len(self.buf)
1437 1445 for chunk in self.iter:
1438 1446 collector.write(chunk)
1439 1447 collected += len(chunk)
1440 1448 if collected >= targetsize:
1441 1449 break
1442 1450 if collected < targetsize:
1443 1451 self.iter = False
1444 1452 self.buf = collector.getvalue()
1445 1453 if len(self.buf) == l:
1446 1454 s, self.buf = str(self.buf), ''
1447 1455 else:
1448 1456 s, self.buf = self.buf[:l], buffer(self.buf, l)
1449 1457 return s
1450 1458
1451 1459 def filechunkiter(f, size=65536, limit=None):
1452 1460 """Create a generator that produces the data in the file size
1453 1461 (default 65536) bytes at a time, up to optional limit (default is
1454 1462 to read all data). Chunks may be less than size bytes if the
1455 1463 chunk is the last chunk in the file, or the file is a socket or
1456 1464 some other type of file that sometimes reads less data than is
1457 1465 requested."""
1458 1466 assert size >= 0
1459 1467 assert limit is None or limit >= 0
1460 1468 while True:
1461 1469 if limit is None: nbytes = size
1462 1470 else: nbytes = min(limit, size)
1463 1471 s = nbytes and f.read(nbytes)
1464 1472 if not s: break
1465 1473 if limit: limit -= len(s)
1466 1474 yield s
1467 1475
1468 1476 def makedate():
1469 1477 lt = time.localtime()
1470 1478 if lt[8] == 1 and time.daylight:
1471 1479 tz = time.altzone
1472 1480 else:
1473 1481 tz = time.timezone
1474 1482 return time.mktime(lt), tz
1475 1483
1476 1484 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True, timezone_format=" %+03d%02d"):
1477 1485 """represent a (unixtime, offset) tuple as a localized time.
1478 1486 unixtime is seconds since the epoch, and offset is the time zone's
1479 1487 number of seconds away from UTC. if timezone is false, do not
1480 1488 append time zone to string."""
1481 1489 t, tz = date or makedate()
1482 1490 s = time.strftime(format, time.gmtime(float(t) - tz))
1483 1491 if timezone:
1484 1492 s += timezone_format % (-tz / 3600, ((-tz % 3600) / 60))
1485 1493 return s
1486 1494
1487 1495 def strdate(string, format, defaults=[]):
1488 1496 """parse a localized time string and return a (unixtime, offset) tuple.
1489 1497 if the string cannot be parsed, ValueError is raised."""
1490 1498 def timezone(string):
1491 1499 tz = string.split()[-1]
1492 1500 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1493 1501 tz = int(tz)
1494 1502 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1495 1503 return offset
1496 1504 if tz == "GMT" or tz == "UTC":
1497 1505 return 0
1498 1506 return None
1499 1507
1500 1508 # NOTE: unixtime = localunixtime + offset
1501 1509 offset, date = timezone(string), string
1502 1510 if offset != None:
1503 1511 date = " ".join(string.split()[:-1])
1504 1512
1505 1513 # add missing elements from defaults
1506 1514 for part in defaults:
1507 1515 found = [True for p in part if ("%"+p) in format]
1508 1516 if not found:
1509 1517 date += "@" + defaults[part]
1510 1518 format += "@%" + part[0]
1511 1519
1512 1520 timetuple = time.strptime(date, format)
1513 1521 localunixtime = int(calendar.timegm(timetuple))
1514 1522 if offset is None:
1515 1523 # local timezone
1516 1524 unixtime = int(time.mktime(timetuple))
1517 1525 offset = unixtime - localunixtime
1518 1526 else:
1519 1527 unixtime = localunixtime + offset
1520 1528 return unixtime, offset
1521 1529
1522 1530 def parsedate(string, formats=None, defaults=None):
1523 1531 """parse a localized time string and return a (unixtime, offset) tuple.
1524 1532 The date may be a "unixtime offset" string or in one of the specified
1525 1533 formats."""
1526 1534 if not string:
1527 1535 return 0, 0
1528 1536 if not formats:
1529 1537 formats = defaultdateformats
1530 1538 string = string.strip()
1531 1539 try:
1532 1540 when, offset = map(int, string.split(' '))
1533 1541 except ValueError:
1534 1542 # fill out defaults
1535 1543 if not defaults:
1536 1544 defaults = {}
1537 1545 now = makedate()
1538 1546 for part in "d mb yY HI M S".split():
1539 1547 if part not in defaults:
1540 1548 if part[0] in "HMS":
1541 1549 defaults[part] = "00"
1542 1550 elif part[0] in "dm":
1543 1551 defaults[part] = "1"
1544 1552 else:
1545 1553 defaults[part] = datestr(now, "%" + part[0], False)
1546 1554
1547 1555 for format in formats:
1548 1556 try:
1549 1557 when, offset = strdate(string, format, defaults)
1550 1558 except ValueError:
1551 1559 pass
1552 1560 else:
1553 1561 break
1554 1562 else:
1555 1563 raise Abort(_('invalid date: %r ') % string)
1556 1564 # validate explicit (probably user-specified) date and
1557 1565 # time zone offset. values must fit in signed 32 bits for
1558 1566 # current 32-bit linux runtimes. timezones go from UTC-12
1559 1567 # to UTC+14
1560 1568 if abs(when) > 0x7fffffff:
1561 1569 raise Abort(_('date exceeds 32 bits: %d') % when)
1562 1570 if offset < -50400 or offset > 43200:
1563 1571 raise Abort(_('impossible time zone offset: %d') % offset)
1564 1572 return when, offset
1565 1573
1566 1574 def matchdate(date):
1567 1575 """Return a function that matches a given date match specifier
1568 1576
1569 1577 Formats include:
1570 1578
1571 1579 '{date}' match a given date to the accuracy provided
1572 1580
1573 1581 '<{date}' on or before a given date
1574 1582
1575 1583 '>{date}' on or after a given date
1576 1584
1577 1585 """
1578 1586
1579 1587 def lower(date):
1580 1588 return parsedate(date, extendeddateformats)[0]
1581 1589
1582 1590 def upper(date):
1583 1591 d = dict(mb="12", HI="23", M="59", S="59")
1584 1592 for days in "31 30 29".split():
1585 1593 try:
1586 1594 d["d"] = days
1587 1595 return parsedate(date, extendeddateformats, d)[0]
1588 1596 except:
1589 1597 pass
1590 1598 d["d"] = "28"
1591 1599 return parsedate(date, extendeddateformats, d)[0]
1592 1600
1593 1601 if date[0] == "<":
1594 1602 when = upper(date[1:])
1595 1603 return lambda x: x <= when
1596 1604 elif date[0] == ">":
1597 1605 when = lower(date[1:])
1598 1606 return lambda x: x >= when
1599 1607 elif date[0] == "-":
1600 1608 try:
1601 1609 days = int(date[1:])
1602 1610 except ValueError:
1603 1611 raise Abort(_("invalid day spec: %s") % date[1:])
1604 1612 when = makedate()[0] - days * 3600 * 24
1605 1613 return lambda x: x >= when
1606 1614 elif " to " in date:
1607 1615 a, b = date.split(" to ")
1608 1616 start, stop = lower(a), upper(b)
1609 1617 return lambda x: x >= start and x <= stop
1610 1618 else:
1611 1619 start, stop = lower(date), upper(date)
1612 1620 return lambda x: x >= start and x <= stop
1613 1621
1614 1622 def shortuser(user):
1615 1623 """Return a short representation of a user name or email address."""
1616 1624 f = user.find('@')
1617 1625 if f >= 0:
1618 1626 user = user[:f]
1619 1627 f = user.find('<')
1620 1628 if f >= 0:
1621 1629 user = user[f+1:]
1622 1630 f = user.find(' ')
1623 1631 if f >= 0:
1624 1632 user = user[:f]
1625 1633 f = user.find('.')
1626 1634 if f >= 0:
1627 1635 user = user[:f]
1628 1636 return user
1629 1637
1630 1638 def ellipsis(text, maxlength=400):
1631 1639 """Trim string to at most maxlength (default: 400) characters."""
1632 1640 if len(text) <= maxlength:
1633 1641 return text
1634 1642 else:
1635 1643 return "%s..." % (text[:maxlength-3])
1636 1644
1637 1645 def walkrepos(path):
1638 1646 '''yield every hg repository under path, recursively.'''
1639 1647 def errhandler(err):
1640 1648 if err.filename == path:
1641 1649 raise err
1642 1650
1643 1651 for root, dirs, files in os.walk(path, onerror=errhandler):
1644 1652 for d in dirs:
1645 1653 if d == '.hg':
1646 1654 yield root
1647 1655 dirs[:] = []
1648 1656 break
1649 1657
1650 1658 _rcpath = None
1651 1659
1652 1660 def os_rcpath():
1653 1661 '''return default os-specific hgrc search path'''
1654 1662 path = system_rcpath()
1655 1663 path.extend(user_rcpath())
1656 1664 path = [os.path.normpath(f) for f in path]
1657 1665 return path
1658 1666
1659 1667 def rcpath():
1660 1668 '''return hgrc search path. if env var HGRCPATH is set, use it.
1661 1669 for each item in path, if directory, use files ending in .rc,
1662 1670 else use item.
1663 1671 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1664 1672 if no HGRCPATH, use default os-specific path.'''
1665 1673 global _rcpath
1666 1674 if _rcpath is None:
1667 1675 if 'HGRCPATH' in os.environ:
1668 1676 _rcpath = []
1669 1677 for p in os.environ['HGRCPATH'].split(os.pathsep):
1670 1678 if not p: continue
1671 1679 if os.path.isdir(p):
1672 1680 for f, kind in osutil.listdir(p):
1673 1681 if f.endswith('.rc'):
1674 1682 _rcpath.append(os.path.join(p, f))
1675 1683 else:
1676 1684 _rcpath.append(p)
1677 1685 else:
1678 1686 _rcpath = os_rcpath()
1679 1687 return _rcpath
1680 1688
1681 1689 def bytecount(nbytes):
1682 1690 '''return byte count formatted as readable string, with units'''
1683 1691
1684 1692 units = (
1685 1693 (100, 1<<30, _('%.0f GB')),
1686 1694 (10, 1<<30, _('%.1f GB')),
1687 1695 (1, 1<<30, _('%.2f GB')),
1688 1696 (100, 1<<20, _('%.0f MB')),
1689 1697 (10, 1<<20, _('%.1f MB')),
1690 1698 (1, 1<<20, _('%.2f MB')),
1691 1699 (100, 1<<10, _('%.0f KB')),
1692 1700 (10, 1<<10, _('%.1f KB')),
1693 1701 (1, 1<<10, _('%.2f KB')),
1694 1702 (1, 1, _('%.0f bytes')),
1695 1703 )
1696 1704
1697 1705 for multiplier, divisor, format in units:
1698 1706 if nbytes >= divisor * multiplier:
1699 1707 return format % (nbytes / float(divisor))
1700 1708 return units[-1][2] % nbytes
1701 1709
1702 1710 def drop_scheme(scheme, path):
1703 1711 sc = scheme + ':'
1704 1712 if path.startswith(sc):
1705 1713 path = path[len(sc):]
1706 1714 if path.startswith('//'):
1707 1715 path = path[2:]
1708 1716 return path
1709 1717
1710 1718 def uirepr(s):
1711 1719 # Avoid double backslash in Windows path repr()
1712 1720 return repr(s).replace('\\\\', '\\')
1713 1721
1714 1722 def hidepassword(url):
1715 1723 '''replaces the password in the url string by three asterisks (***)
1716 1724
1717 1725 >>> hidepassword('http://www.example.com/some/path#fragment')
1718 1726 'http://www.example.com/some/path#fragment'
1719 1727 >>> hidepassword('http://me@www.example.com/some/path#fragment')
1720 1728 'http://me@www.example.com/some/path#fragment'
1721 1729 >>> hidepassword('http://me:simplepw@www.example.com/path#frag')
1722 1730 'http://me:***@www.example.com/path#frag'
1723 1731 >>> hidepassword('http://me:complex:pw@www.example.com/path#frag')
1724 1732 'http://me:***@www.example.com/path#frag'
1725 1733 >>> hidepassword('/path/to/repo')
1726 1734 '/path/to/repo'
1727 1735 >>> hidepassword('relative/path/to/repo')
1728 1736 'relative/path/to/repo'
1729 1737 >>> hidepassword('c:\\\\path\\\\to\\\\repo')
1730 1738 'c:\\\\path\\\\to\\\\repo'
1731 1739 >>> hidepassword('c:/path/to/repo')
1732 1740 'c:/path/to/repo'
1733 1741 >>> hidepassword('bundle://path/to/bundle')
1734 1742 'bundle://path/to/bundle'
1735 1743 '''
1736 1744 url_parts = list(urlparse.urlparse(url))
1737 1745 host_with_pw_pattern = re.compile('^([^:]*):([^@]*)@(.*)$')
1738 1746 if host_with_pw_pattern.match(url_parts[1]):
1739 1747 url_parts[1] = re.sub(host_with_pw_pattern, r'\1:***@\3',
1740 1748 url_parts[1])
1741 1749 return urlparse.urlunparse(url_parts)
1742 1750
@@ -1,69 +1,73 b''
1 1 #!/bin/sh
2 2
3 3 # Test issue835:
4 4 # qpush fails immediately when patching a missing file, but
5 5 # remaining added files are still created empty which will
6 6 # trick a future qrefresh.
7 7
8 8 cat > writelines.py <<EOF
9 9 import sys
10 10 path = sys.argv[1]
11 11 args = sys.argv[2:]
12 12 assert (len(args) % 2) == 0
13 13
14 14 f = file(path, 'wb')
15 15 for i in xrange(len(args)/2):
16 16 count, s = args[2*i:2*i+2]
17 17 count = int(count)
18 18 s = s.decode('string_escape')
19 19 f.write(s*count)
20 20 f.close()
21 21
22 22 EOF
23 23
24 24 echo "[extensions]" >> $HGRCPATH
25 25 echo "mq=" >> $HGRCPATH
26 26
27 27 hg init normal
28 28 cd normal
29 29 python ../writelines.py b 10 'a\n'
30 30 hg ci -Am addb
31 31 echo a > a
32 32 python ../writelines.py b 2 'b\n' 10 'a\n' 2 'c\n'
33 33 echo c > c
34 34 hg add a c
35 35 hg qnew -f changeb
36 36 hg qpop
37 37 hg rm b
38 38 hg ci -Am rmb
39 39 echo % push patch with missing target
40 40 hg qpush
41 41 echo % display added files
42 42 cat a
43 43 cat c
44 echo % display rejections
45 cat b.rej
44 46 cd ..
45 47
46 48
47 49 echo "[diff]" >> $HGRCPATH
48 50 echo "git=1" >> $HGRCPATH
49 51
50 52 hg init git
51 53 cd git
52 54 python ../writelines.py b 1 '\x00'
53 55 hg ci -Am addb
54 56 echo a > a
55 57 python ../writelines.py b 1 '\x01' 1 '\x00'
56 58 echo c > c
57 59 hg add a c
58 60 hg qnew -f changeb
59 61 hg qpop
60 62 hg rm b
61 63 hg ci -Am rmb
62 64 echo % push git patch with missing target
63 65 hg qpush 2>&1 | sed -e 's/b:.*/b: No such file or directory/'
64 66 hg st
65 67 echo % display added files
66 68 cat a
67 69 cat c
70 echo % display rejections
71 cat b.rej
68 72 cd ..
69 73
@@ -1,25 +1,49 b''
1 1 adding b
2 2 Patch queue now empty
3 3 % push patch with missing target
4 4 applying changeb
5 unable to find b or b for patching
6 unable to find b or b for patching
5 unable to find 'b' for patching
6 2 out of 2 hunks FAILED -- saving rejects to file b.rej
7 7 patch failed, unable to continue (try -v)
8 8 patch failed, rejects left in working dir
9 9 Errors during apply, please fix and refresh changeb
10 10 % display added files
11 11 a
12 12 c
13 % display rejections
14 --- b
15 +++ b
16 @@ -1,3 +1,5 @@ a
17 +b
18 +b
19 a
20 a
21 a
22 @@ -8,3 +10,5 @@ a
23 a
24 a
25 a
26 +c
27 +c
13 28 adding b
14 29 Patch queue now empty
15 30 % push git patch with missing target
16 31 applying changeb
17 unable to find b or b for patching
32 unable to find 'b' for patching
33 1 out of 1 hunk FAILED -- saving rejects to file b.rej
18 34 patch failed, unable to continue (try -v)
19 35 b: No such file or directory
20 36 b not tracked!
21 37 patch failed, rejects left in working dir
22 38 Errors during apply, please fix and refresh changeb
39 ? b.rej
23 40 % display added files
24 41 a
25 42 c
43 % display rejections
44 --- b
45 +++ b
46 GIT binary patch
47 literal 2
48 Jc${No0000400IC2
49
General Comments 0
You need to be logged in to leave comments. Login now