##// END OF EJS Templates
Merge with crew-stable
Alexis S. L. Carvalho -
r4059:431f3c1d merge default
parent child Browse files
Show More
@@ -0,0 +1,77 b''
1 #!/bin/sh
2 # check that renames are correctly saved by a commit after a merge
3
4 HGMERGE=merge
5 export HGMERGE
6
7 # test with the merge on 3 having the rename on the local parent
8 hg init a
9 cd a
10
11 echo line1 > foo
12 hg add foo
13 hg ci -m '0: add foo' -d '0 0'
14
15 echo line2 >> foo
16 hg ci -m '1: change foo' -d '0 0'
17
18 hg up -C 0
19 hg mv foo bar
20 rm bar
21 echo line0 > bar
22 echo line1 >> bar
23 hg ci -m '2: mv foo bar; change bar' -d '0 0'
24
25 hg merge 1
26 echo '% contents of bar should be line0 line1 line2'
27 cat bar
28 hg ci -m '3: merge with local rename' -d '0 0'
29 hg debugindex .hg/store/data/bar.i
30 hg debugrename bar
31 hg debugindex .hg/store/data/foo.i
32
33 # revert the content change from rev 2
34 hg up -C 2
35 rm bar
36 echo line1 > bar
37 hg ci -m '4: revert content change from rev 2' -d '0 0'
38
39 hg log --template '#rev#:#node|short# #parents#\n'
40 echo '% this should use bar@rev2 as the ancestor'
41 hg --debug merge 3
42 echo '% contents of bar should be line1 line2'
43 cat bar
44 hg ci -m '5: merge' -d '0 0'
45 hg debugindex .hg/store/data/bar.i
46
47
48 # same thing, but with the merge on 3 having the rename on the remote parent
49 echo
50 echo
51 cd ..
52 hg clone -U -r 1 -r 2 a b
53 cd b
54
55 hg up -C 1
56 hg merge 2
57 echo '% contents of bar should be line0 line1 line2'
58 cat bar
59 hg ci -m '3: merge with remote rename' -d '0 0'
60 hg debugindex .hg/store/data/bar.i
61 hg debugrename bar
62 hg debugindex .hg/store/data/foo.i
63
64 # revert the content change from rev 2
65 hg up -C 2
66 rm bar
67 echo line1 > bar
68 hg ci -m '4: revert content change from rev 2' -d '0 0'
69
70 hg log --template '#rev#:#node|short# #parents#\n'
71 echo '% this should use bar@rev2 as the ancestor'
72 hg --debug merge 3
73 echo '% contents of bar should be line1 line2'
74 cat bar
75 hg ci -m '5: merge' -d '0 0'
76 hg debugindex .hg/store/data/bar.i
77
@@ -0,0 +1,83 b''
1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
2 merging bar and foo
3 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
4 (branch merge, don't forget to commit)
5 % contents of bar should be line0 line1 line2
6 line0
7 line1
8 line2
9 rev offset length base linkrev nodeid p1 p2
10 0 0 77 0 2 da78c0659611 000000000000 000000000000
11 1 77 76 0 3 4b358025380b 000000000000 da78c0659611
12 bar renamed from foo:9e25c27b87571a1edee5ae4dddee5687746cc8e2
13 rev offset length base linkrev nodeid p1 p2
14 0 0 7 0 0 690b295714ae 000000000000 000000000000
15 1 7 13 1 1 9e25c27b8757 690b295714ae 000000000000
16 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
17 4:2d2f9a22c82b 2:0a3ab4856510
18 3:7d3b554bfdf1 2:0a3ab4856510 1:5cd961e4045d
19 2:0a3ab4856510 0:2665aaee66e9
20 1:5cd961e4045d
21 0:2665aaee66e9
22 % this should use bar@rev2 as the ancestor
23 resolving manifests
24 overwrite None partial False
25 ancestor 0a3ab4856510 local 2d2f9a22c82b+ remote 7d3b554bfdf1
26 bar: versions differ -> m
27 merging bar
28 my bar@2d2f9a22c82b+ other bar@7d3b554bfdf1 ancestor bar@0a3ab4856510
29 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
30 (branch merge, don't forget to commit)
31 % contents of bar should be line1 line2
32 line1
33 line2
34 rev offset length base linkrev nodeid p1 p2
35 0 0 77 0 2 da78c0659611 000000000000 000000000000
36 1 77 76 0 3 4b358025380b 000000000000 da78c0659611
37 2 153 7 2 4 4defe5eec418 da78c0659611 000000000000
38 3 160 13 3 5 4663501da27b 4defe5eec418 4b358025380b
39
40
41 requesting all changes
42 adding changesets
43 adding manifests
44 adding file changes
45 added 3 changesets with 3 changes to 2 files (+1 heads)
46 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
47 merging foo and bar
48 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
49 (branch merge, don't forget to commit)
50 % contents of bar should be line0 line1 line2
51 line0
52 line1
53 line2
54 rev offset length base linkrev nodeid p1 p2
55 0 0 77 0 2 da78c0659611 000000000000 000000000000
56 1 77 76 0 3 4b358025380b 000000000000 da78c0659611
57 bar renamed from foo:9e25c27b87571a1edee5ae4dddee5687746cc8e2
58 rev offset length base linkrev nodeid p1 p2
59 0 0 7 0 0 690b295714ae 000000000000 000000000000
60 1 7 13 1 1 9e25c27b8757 690b295714ae 000000000000
61 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
62 4:2d2f9a22c82b 2:0a3ab4856510
63 3:96ab80c60897 1:5cd961e4045d 2:0a3ab4856510
64 2:0a3ab4856510 0:2665aaee66e9
65 1:5cd961e4045d
66 0:2665aaee66e9
67 % this should use bar@rev2 as the ancestor
68 resolving manifests
69 overwrite None partial False
70 ancestor 0a3ab4856510 local 2d2f9a22c82b+ remote 96ab80c60897
71 bar: versions differ -> m
72 merging bar
73 my bar@2d2f9a22c82b+ other bar@96ab80c60897 ancestor bar@0a3ab4856510
74 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
75 (branch merge, don't forget to commit)
76 % contents of bar should be line1 line2
77 line1
78 line2
79 rev offset length base linkrev nodeid p1 p2
80 0 0 77 0 2 da78c0659611 000000000000 000000000000
81 1 77 76 0 3 4b358025380b 000000000000 da78c0659611
82 2 153 7 2 4 4defe5eec418 da78c0659611 000000000000
83 3 160 13 3 5 4663501da27b 4defe5eec418 4b358025380b
@@ -1,531 +1,534 b''
1 1 HGRC(5)
2 2 =======
3 3 Bryan O'Sullivan <bos@serpentine.com>
4 4
5 5 NAME
6 6 ----
7 7 hgrc - configuration files for Mercurial
8 8
9 9 SYNOPSIS
10 10 --------
11 11
12 12 The Mercurial system uses a set of configuration files to control
13 13 aspects of its behaviour.
14 14
15 15 FILES
16 16 -----
17 17
18 18 Mercurial reads configuration data from several files, if they exist.
19 19 The names of these files depend on the system on which Mercurial is
20 20 installed.
21 21
22 22 (Unix) <install-root>/etc/mercurial/hgrc.d/*.rc::
23 23 (Unix) <install-root>/etc/mercurial/hgrc::
24 24 Per-installation configuration files, searched for in the
25 25 directory where Mercurial is installed. For example, if installed
26 26 in /shared/tools, Mercurial will look in
27 27 /shared/tools/etc/mercurial/hgrc. Options in these files apply to
28 28 all Mercurial commands executed by any user in any directory.
29 29
30 30 (Unix) /etc/mercurial/hgrc.d/*.rc::
31 31 (Unix) /etc/mercurial/hgrc::
32 32 (Windows) C:\Mercurial\Mercurial.ini::
33 33 Per-system configuration files, for the system on which Mercurial
34 34 is running. Options in these files apply to all Mercurial
35 35 commands executed by any user in any directory. Options in these
36 36 files override per-installation options.
37 37
38 38 (Unix) $HOME/.hgrc::
39 39 (Windows) C:\Documents and Settings\USERNAME\Mercurial.ini::
40 40 (Windows) $HOME\Mercurial.ini::
41 41 Per-user configuration file, for the user running Mercurial.
42 42 Options in this file apply to all Mercurial commands executed by
43 43 any user in any directory. Options in this file override
44 44 per-installation and per-system options.
45 45 On Windows system, one of these is chosen exclusively according
46 46 to definition of HOME environment variable.
47 47
48 48 (Unix, Windows) <repo>/.hg/hgrc::
49 49 Per-repository configuration options that only apply in a
50 50 particular repository. This file is not version-controlled, and
51 51 will not get transferred during a "clone" operation. Options in
52 52 this file override options in all other configuration files.
53 53 On Unix, most of this file will be ignored if it doesn't belong
54 54 to a trusted user or to a trusted group. See the documentation
55 55 for the trusted section below for more details.
56 56
57 57 SYNTAX
58 58 ------
59 59
60 60 A configuration file consists of sections, led by a "[section]" header
61 61 and followed by "name: value" entries; "name=value" is also accepted.
62 62
63 63 [spam]
64 64 eggs=ham
65 65 green=
66 66 eggs
67 67
68 68 Each line contains one entry. If the lines that follow are indented,
69 69 they are treated as continuations of that entry.
70 70
71 71 Leading whitespace is removed from values. Empty lines are skipped.
72 72
73 73 The optional values can contain format strings which refer to other
74 74 values in the same section, or values in a special DEFAULT section.
75 75
76 76 Lines beginning with "#" or ";" are ignored and may be used to provide
77 77 comments.
78 78
79 79 SECTIONS
80 80 --------
81 81
82 82 This section describes the different sections that may appear in a
83 83 Mercurial "hgrc" file, the purpose of each section, its possible
84 84 keys, and their possible values.
85 85
86 86 decode/encode::
87 87 Filters for transforming files on checkout/checkin. This would
88 88 typically be used for newline processing or other
89 89 localization/canonicalization of files.
90 90
91 91 Filters consist of a filter pattern followed by a filter command.
92 92 Filter patterns are globs by default, rooted at the repository
93 93 root. For example, to match any file ending in ".txt" in the root
94 94 directory only, use the pattern "*.txt". To match any file ending
95 95 in ".c" anywhere in the repository, use the pattern "**.c".
96 96
97 97 The filter command can start with a specifier, either "pipe:" or
98 98 "tempfile:". If no specifier is given, "pipe:" is used by default.
99 99
100 100 A "pipe:" command must accept data on stdin and return the
101 101 transformed data on stdout.
102 102
103 103 Pipe example:
104 104
105 105 [encode]
106 106 # uncompress gzip files on checkin to improve delta compression
107 107 # note: not necessarily a good idea, just an example
108 108 *.gz = pipe: gunzip
109 109
110 110 [decode]
111 111 # recompress gzip files when writing them to the working dir (we
112 112 # can safely omit "pipe:", because it's the default)
113 113 *.gz = gzip
114 114
115 115 A "tempfile:" command is a template. The string INFILE is replaced
116 116 with the name of a temporary file that contains the data to be
117 117 filtered by the command. The string OUTFILE is replaced with the
118 118 name of an empty temporary file, where the filtered data must be
119 119 written by the command.
120 120
121 121 NOTE: the tempfile mechanism is recommended for Windows systems,
122 122 where the standard shell I/O redirection operators often have
123 123 strange effects. In particular, if you are doing line ending
124 124 conversion on Windows using the popular dos2unix and unix2dos
125 125 programs, you *must* use the tempfile mechanism, as using pipes will
126 126 corrupt the contents of your files.
127 127
128 128 Tempfile example:
129 129
130 130 [encode]
131 131 # convert files to unix line ending conventions on checkin
132 132 **.txt = tempfile: dos2unix -n INFILE OUTFILE
133 133
134 134 [decode]
135 135 # convert files to windows line ending conventions when writing
136 136 # them to the working dir
137 137 **.txt = tempfile: unix2dos -n INFILE OUTFILE
138 138
139 139 defaults::
140 140 Use the [defaults] section to define command defaults, i.e. the
141 141 default options/arguments to pass to the specified commands.
142 142
143 143 The following example makes 'hg log' run in verbose mode, and
144 144 'hg status' show only the modified files, by default.
145 145
146 146 [defaults]
147 147 log = -v
148 148 status = -m
149 149
150 150 The actual commands, instead of their aliases, must be used when
151 151 defining command defaults. The command defaults will also be
152 152 applied to the aliases of the commands defined.
153 153
154 154 diff::
155 155 Settings used when displaying diffs. They are all boolean and
156 156 defaults to False.
157 157 git;;
158 158 Use git extended diff format.
159 159 nodates;;
160 160 Don't include dates in diff headers.
161 161 showfunc;;
162 162 Show which function each change is in.
163 163 ignorews;;
164 164 Ignore white space when comparing lines.
165 165 ignorewsamount;;
166 166 Ignore changes in the amount of white space.
167 167 ignoreblanklines;;
168 168 Ignore changes whose lines are all blank.
169 169
170 170 email::
171 171 Settings for extensions that send email messages.
172 172 from;;
173 173 Optional. Email address to use in "From" header and SMTP envelope
174 174 of outgoing messages.
175 175 to;;
176 176 Optional. Comma-separated list of recipients' email addresses.
177 177 cc;;
178 178 Optional. Comma-separated list of carbon copy recipients'
179 179 email addresses.
180 180 bcc;;
181 181 Optional. Comma-separated list of blind carbon copy
182 182 recipients' email addresses. Cannot be set interactively.
183 183 method;;
184 184 Optional. Method to use to send email messages. If value is
185 185 "smtp" (default), use SMTP (see section "[smtp]" for
186 186 configuration). Otherwise, use as name of program to run that
187 187 acts like sendmail (takes "-f" option for sender, list of
188 188 recipients on command line, message on stdin). Normally, setting
189 189 this to "sendmail" or "/usr/sbin/sendmail" is enough to use
190 190 sendmail to send messages.
191 191
192 192 Email example:
193 193
194 194 [email]
195 195 from = Joseph User <joe.user@example.com>
196 196 method = /usr/sbin/sendmail
197 197
198 198 extensions::
199 199 Mercurial has an extension mechanism for adding new features. To
200 200 enable an extension, create an entry for it in this section.
201 201
202 202 If you know that the extension is already in Python's search path,
203 203 you can give the name of the module, followed by "=", with nothing
204 204 after the "=".
205 205
206 206 Otherwise, give a name that you choose, followed by "=", followed by
207 207 the path to the ".py" file (including the file name extension) that
208 208 defines the extension.
209 209
210 210 Example for ~/.hgrc:
211 211
212 212 [extensions]
213 213 # (the mq extension will get loaded from mercurial's path)
214 214 hgext.mq =
215 215 # (this extension will get loaded from the file specified)
216 216 myfeature = ~/.hgext/myfeature.py
217 217
218 218 hooks::
219 219 Commands or Python functions that get automatically executed by
220 220 various actions such as starting or finishing a commit. Multiple
221 221 hooks can be run for the same action by appending a suffix to the
222 222 action. Overriding a site-wide hook can be done by changing its
223 223 value or setting it to an empty string.
224 224
225 225 Example .hg/hgrc:
226 226
227 227 [hooks]
228 228 # do not use the site-wide hook
229 229 incoming =
230 230 incoming.email = /my/email/hook
231 231 incoming.autobuild = /my/build/hook
232 232
233 233 Most hooks are run with environment variables set that give added
234 234 useful information. For each hook below, the environment variables
235 235 it is passed are listed with names of the form "$HG_foo".
236 236
237 237 changegroup;;
238 238 Run after a changegroup has been added via push, pull or
239 239 unbundle. ID of the first new changeset is in $HG_NODE. URL from
240 240 which changes came is in $HG_URL.
241 241 commit;;
242 242 Run after a changeset has been created in the local repository.
243 243 ID of the newly created changeset is in $HG_NODE. Parent
244 244 changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
245 245 incoming;;
246 246 Run after a changeset has been pulled, pushed, or unbundled into
247 247 the local repository. The ID of the newly arrived changeset is in
248 248 $HG_NODE. URL that was source of changes came is in $HG_URL.
249 249 outgoing;;
250 250 Run after sending changes from local repository to another. ID of
251 251 first changeset sent is in $HG_NODE. Source of operation is in
252 252 $HG_SOURCE; see "preoutgoing" hook for description.
253 253 prechangegroup;;
254 254 Run before a changegroup is added via push, pull or unbundle.
255 255 Exit status 0 allows the changegroup to proceed. Non-zero status
256 256 will cause the push, pull or unbundle to fail. URL from which
257 257 changes will come is in $HG_URL.
258 258 precommit;;
259 259 Run before starting a local commit. Exit status 0 allows the
260 260 commit to proceed. Non-zero status will cause the commit to fail.
261 261 Parent changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
262 262 preoutgoing;;
263 263 Run before computing changes to send from the local repository to
264 264 another. Non-zero status will cause failure. This lets you
265 265 prevent pull over http or ssh. Also prevents against local pull,
266 266 push (outbound) or bundle commands, but not effective, since you
267 267 can just copy files instead then. Source of operation is in
268 268 $HG_SOURCE. If "serve", operation is happening on behalf of
269 269 remote ssh or http repository. If "push", "pull" or "bundle",
270 270 operation is happening on behalf of repository on same system.
271 271 pretag;;
272 272 Run before creating a tag. Exit status 0 allows the tag to be
273 273 created. Non-zero status will cause the tag to fail. ID of
274 274 changeset to tag is in $HG_NODE. Name of tag is in $HG_TAG. Tag
275 275 is local if $HG_LOCAL=1, in repo if $HG_LOCAL=0.
276 276 pretxnchangegroup;;
277 277 Run after a changegroup has been added via push, pull or unbundle,
278 278 but before the transaction has been committed. Changegroup is
279 279 visible to hook program. This lets you validate incoming changes
280 280 before accepting them. Passed the ID of the first new changeset
281 281 in $HG_NODE. Exit status 0 allows the transaction to commit.
282 282 Non-zero status will cause the transaction to be rolled back and
283 283 the push, pull or unbundle will fail. URL that was source of
284 284 changes is in $HG_URL.
285 285 pretxncommit;;
286 286 Run after a changeset has been created but the transaction not yet
287 287 committed. Changeset is visible to hook program. This lets you
288 288 validate commit message and changes. Exit status 0 allows the
289 289 commit to proceed. Non-zero status will cause the transaction to
290 290 be rolled back. ID of changeset is in $HG_NODE. Parent changeset
291 291 IDs are in $HG_PARENT1 and $HG_PARENT2.
292 292 preupdate;;
293 293 Run before updating the working directory. Exit status 0 allows
294 294 the update to proceed. Non-zero status will prevent the update.
295 295 Changeset ID of first new parent is in $HG_PARENT1. If merge, ID
296 296 of second new parent is in $HG_PARENT2.
297 297 tag;;
298 298 Run after a tag is created. ID of tagged changeset is in
299 299 $HG_NODE. Name of tag is in $HG_TAG. Tag is local if
300 300 $HG_LOCAL=1, in repo if $HG_LOCAL=0.
301 301 update;;
302 302 Run after updating the working directory. Changeset ID of first
303 303 new parent is in $HG_PARENT1. If merge, ID of second new parent
304 304 is in $HG_PARENT2. If update succeeded, $HG_ERROR=0. If update
305 305 failed (e.g. because conflicts not resolved), $HG_ERROR=1.
306 306
307 307 Note: In earlier releases, the names of hook environment variables
308 308 did not have a "HG_" prefix. The old unprefixed names are no longer
309 309 provided in the environment.
310 310
311 311 The syntax for Python hooks is as follows:
312 312
313 313 hookname = python:modulename.submodule.callable
314 314
315 315 Python hooks are run within the Mercurial process. Each hook is
316 316 called with at least three keyword arguments: a ui object (keyword
317 317 "ui"), a repository object (keyword "repo"), and a "hooktype"
318 318 keyword that tells what kind of hook is used. Arguments listed as
319 319 environment variables above are passed as keyword arguments, with no
320 320 "HG_" prefix, and names in lower case.
321 321
322 322 If a Python hook returns a "true" value or raises an exception, this
323 323 is treated as failure of the hook.
324 324
325 325 http_proxy::
326 326 Used to access web-based Mercurial repositories through a HTTP
327 327 proxy.
328 328 host;;
329 329 Host name and (optional) port of the proxy server, for example
330 330 "myproxy:8000".
331 331 no;;
332 332 Optional. Comma-separated list of host names that should bypass
333 333 the proxy.
334 334 passwd;;
335 335 Optional. Password to authenticate with at the proxy server.
336 336 user;;
337 337 Optional. User name to authenticate with at the proxy server.
338 338
339 339 smtp::
340 340 Configuration for extensions that need to send email messages.
341 341 host;;
342 342 Host name of mail server, e.g. "mail.example.com".
343 343 port;;
344 344 Optional. Port to connect to on mail server. Default: 25.
345 345 tls;;
346 346 Optional. Whether to connect to mail server using TLS. True or
347 347 False. Default: False.
348 348 username;;
349 349 Optional. User name to authenticate to SMTP server with.
350 350 If username is specified, password must also be specified.
351 351 Default: none.
352 352 password;;
353 353 Optional. Password to authenticate to SMTP server with.
354 354 If username is specified, password must also be specified.
355 355 Default: none.
356 356 local_hostname;;
357 357 Optional. It's the hostname that the sender can use to identify itself
358 358 to the MTA.
359 359
360 360 paths::
361 361 Assigns symbolic names to repositories. The left side is the
362 362 symbolic name, and the right gives the directory or URL that is the
363 363 location of the repository. Default paths can be declared by
364 364 setting the following entries.
365 365 default;;
366 366 Directory or URL to use when pulling if no source is specified.
367 367 Default is set to repository from which the current repository
368 368 was cloned.
369 369 default-push;;
370 370 Optional. Directory or URL to use when pushing if no destination
371 371 is specified.
372 372
373 373 server::
374 374 Controls generic server settings.
375 375 uncompressed;;
376 376 Whether to allow clients to clone a repo using the uncompressed
377 377 streaming protocol. This transfers about 40% more data than a
378 378 regular clone, but uses less memory and CPU on both server and
379 379 client. Over a LAN (100Mbps or better) or a very fast WAN, an
380 380 uncompressed streaming clone is a lot faster (~10x) than a regular
381 381 clone. Over most WAN connections (anything slower than about
382 382 6Mbps), uncompressed streaming is slower, because of the extra
383 383 data transfer overhead. Default is False.
384 384
385 385 trusted::
386 386 For security reasons, Mercurial will not use the settings in
387 387 the .hg/hgrc file from a repository if it doesn't belong to a
388 388 trusted user or to a trusted group. The main exception is the
389 389 web interface, which automatically uses some safe settings, since
390 390 it's common to serve repositories from different users.
391 391
392 392 This section specifies what users and groups are trusted. The
393 393 current user is always trusted. To trust everybody, list a user
394 394 or a group with name "*".
395 395
396 396 users;;
397 397 Comma-separated list of trusted users.
398 398 groups;;
399 399 Comma-separated list of trusted groups.
400 400
401 401 ui::
402 402 User interface controls.
403 403 debug;;
404 404 Print debugging information. True or False. Default is False.
405 405 editor;;
406 406 The editor to use during a commit. Default is $EDITOR or "vi".
407 407 fallbackencoding;;
408 408 Encoding to try if it's not possible to decode the changelog using
409 409 UTF-8. Default is ISO-8859-1.
410 410 ignore;;
411 411 A file to read per-user ignore patterns from. This file should be in
412 412 the same format as a repository-wide .hgignore file. This option
413 413 supports hook syntax, so if you want to specify multiple ignore
414 414 files, you can do so by setting something like
415 415 "ignore.other = ~/.hgignore2". For details of the ignore file
416 416 format, see the hgignore(5) man page.
417 417 interactive;;
418 418 Allow to prompt the user. True or False. Default is True.
419 419 logtemplate;;
420 420 Template string for commands that print changesets.
421 421 style;;
422 422 Name of style to use for command output.
423 423 merge;;
424 424 The conflict resolution program to use during a manual merge.
425 425 Default is "hgmerge".
426 426 quiet;;
427 427 Reduce the amount of output printed. True or False. Default is False.
428 428 remotecmd;;
429 429 remote command to use for clone/push/pull operations. Default is 'hg'.
430 430 ssh;;
431 431 command to use for SSH connections. Default is 'ssh'.
432 432 strict;;
433 433 Require exact command names, instead of allowing unambiguous
434 434 abbreviations. True or False. Default is False.
435 435 timeout;;
436 436 The timeout used when a lock is held (in seconds), a negative value
437 437 means no timeout. Default is 600.
438 438 username;;
439 439 The committer of a changeset created when running "commit".
440 440 Typically a person's name and email address, e.g. "Fred Widget
441 441 <fred@example.com>". Default is $EMAIL or username@hostname.
442 If the username in hgrc is empty, it has to be specified manually or
443 in a different hgrc file (e.g. $HOME/.hgrc, if the admin set "username ="
444 in the system hgrc).
442 445 verbose;;
443 446 Increase the amount of output printed. True or False. Default is False.
444 447
445 448
446 449 web::
447 450 Web interface configuration.
448 451 accesslog;;
449 452 Where to output the access log. Default is stdout.
450 453 address;;
451 454 Interface address to bind to. Default is all.
452 455 allow_archive;;
453 456 List of archive format (bz2, gz, zip) allowed for downloading.
454 457 Default is empty.
455 458 allowbz2;;
456 459 (DEPRECATED) Whether to allow .tar.bz2 downloading of repo revisions.
457 460 Default is false.
458 461 allowgz;;
459 462 (DEPRECATED) Whether to allow .tar.gz downloading of repo revisions.
460 463 Default is false.
461 464 allowpull;;
462 465 Whether to allow pulling from the repository. Default is true.
463 466 allow_push;;
464 467 Whether to allow pushing to the repository. If empty or not set,
465 468 push is not allowed. If the special value "*", any remote user
466 469 can push, including unauthenticated users. Otherwise, the remote
467 470 user must have been authenticated, and the authenticated user name
468 471 must be present in this list (separated by whitespace or ",").
469 472 The contents of the allow_push list are examined after the
470 473 deny_push list.
471 474 allowzip;;
472 475 (DEPRECATED) Whether to allow .zip downloading of repo revisions.
473 476 Default is false. This feature creates temporary files.
474 477 baseurl;;
475 478 Base URL to use when publishing URLs in other locations, so
476 479 third-party tools like email notification hooks can construct URLs.
477 480 Example: "http://hgserver/repos/"
478 481 contact;;
479 482 Name or email address of the person in charge of the repository.
480 483 Default is "unknown".
481 484 deny_push;;
482 485 Whether to deny pushing to the repository. If empty or not set,
483 486 push is not denied. If the special value "*", all remote users
484 487 are denied push. Otherwise, unauthenticated users are all denied,
485 488 and any authenticated user name present in this list (separated by
486 489 whitespace or ",") is also denied. The contents of the deny_push
487 490 list are examined before the allow_push list.
488 491 description;;
489 492 Textual description of the repository's purpose or contents.
490 493 Default is "unknown".
491 494 errorlog;;
492 495 Where to output the error log. Default is stderr.
493 496 ipv6;;
494 497 Whether to use IPv6. Default is false.
495 498 name;;
496 499 Repository name to use in the web interface. Default is current
497 500 working directory.
498 501 maxchanges;;
499 502 Maximum number of changes to list on the changelog. Default is 10.
500 503 maxfiles;;
501 504 Maximum number of files to list per changeset. Default is 10.
502 505 port;;
503 506 Port to listen on. Default is 8000.
504 507 push_ssl;;
505 508 Whether to require that inbound pushes be transported over SSL to
506 509 prevent password sniffing. Default is true.
507 510 stripes;;
508 511 How many lines a "zebra stripe" should span in multiline output.
509 512 Default is 1; set to 0 to disable.
510 513 style;;
511 514 Which template map style to use.
512 515 templates;;
513 516 Where to find the HTML templates. Default is install path.
514 517
515 518
516 519 AUTHOR
517 520 ------
518 521 Bryan O'Sullivan <bos@serpentine.com>.
519 522
520 523 Mercurial was written by Matt Mackall <mpm@selenic.com>.
521 524
522 525 SEE ALSO
523 526 --------
524 527 hg(1), hgignore(5)
525 528
526 529 COPYING
527 530 -------
528 531 This manual page is copyright 2005 Bryan O'Sullivan.
529 532 Mercurial is copyright 2005, 2006 Matt Mackall.
530 533 Free use of this software is granted under the terms of the GNU General
531 534 Public License (GPL).
@@ -1,753 +1,755 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import os, sys, mdiff, util, templater, patch
11 11
12 12 revrangesep = ':'
13 13
14 14 def revpair(repo, revs):
15 15 '''return pair of nodes, given list of revisions. second item can
16 16 be None, meaning use working dir.'''
17 17
18 18 def revfix(repo, val, defval):
19 19 if not val and val != 0 and defval is not None:
20 20 val = defval
21 21 return repo.lookup(val)
22 22
23 23 if not revs:
24 24 return repo.dirstate.parents()[0], None
25 25 end = None
26 26 if len(revs) == 1:
27 27 if revrangesep in revs[0]:
28 28 start, end = revs[0].split(revrangesep, 1)
29 29 start = revfix(repo, start, 0)
30 30 end = revfix(repo, end, repo.changelog.count() - 1)
31 31 else:
32 32 start = revfix(repo, revs[0], None)
33 33 elif len(revs) == 2:
34 34 if revrangesep in revs[0] or revrangesep in revs[1]:
35 35 raise util.Abort(_('too many revisions specified'))
36 36 start = revfix(repo, revs[0], None)
37 37 end = revfix(repo, revs[1], None)
38 38 else:
39 39 raise util.Abort(_('too many revisions specified'))
40 40 return start, end
41 41
42 42 def revrange(repo, revs):
43 43 """Yield revision as strings from a list of revision specifications."""
44 44
45 45 def revfix(repo, val, defval):
46 46 if not val and val != 0 and defval is not None:
47 47 return defval
48 48 return repo.changelog.rev(repo.lookup(val))
49 49
50 50 seen, l = {}, []
51 51 for spec in revs:
52 52 if revrangesep in spec:
53 53 start, end = spec.split(revrangesep, 1)
54 54 start = revfix(repo, start, 0)
55 55 end = revfix(repo, end, repo.changelog.count() - 1)
56 56 step = start > end and -1 or 1
57 57 for rev in xrange(start, end+step, step):
58 58 if rev in seen:
59 59 continue
60 60 seen[rev] = 1
61 61 l.append(rev)
62 62 else:
63 63 rev = revfix(repo, spec, None)
64 64 if rev in seen:
65 65 continue
66 66 seen[rev] = 1
67 67 l.append(rev)
68 68
69 69 return l
70 70
71 71 def make_filename(repo, pat, node,
72 72 total=None, seqno=None, revwidth=None, pathname=None):
73 73 node_expander = {
74 74 'H': lambda: hex(node),
75 75 'R': lambda: str(repo.changelog.rev(node)),
76 76 'h': lambda: short(node),
77 77 }
78 78 expander = {
79 79 '%': lambda: '%',
80 80 'b': lambda: os.path.basename(repo.root),
81 81 }
82 82
83 83 try:
84 84 if node:
85 85 expander.update(node_expander)
86 86 if node and revwidth is not None:
87 87 expander['r'] = (lambda:
88 88 str(repo.changelog.rev(node)).zfill(revwidth))
89 89 if total is not None:
90 90 expander['N'] = lambda: str(total)
91 91 if seqno is not None:
92 92 expander['n'] = lambda: str(seqno)
93 93 if total is not None and seqno is not None:
94 94 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
95 95 if pathname is not None:
96 96 expander['s'] = lambda: os.path.basename(pathname)
97 97 expander['d'] = lambda: os.path.dirname(pathname) or '.'
98 98 expander['p'] = lambda: pathname
99 99
100 100 newname = []
101 101 patlen = len(pat)
102 102 i = 0
103 103 while i < patlen:
104 104 c = pat[i]
105 105 if c == '%':
106 106 i += 1
107 107 c = pat[i]
108 108 c = expander[c]()
109 109 newname.append(c)
110 110 i += 1
111 111 return ''.join(newname)
112 112 except KeyError, inst:
113 113 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
114 114 inst.args[0])
115 115
116 116 def make_file(repo, pat, node=None,
117 117 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
118 118 if not pat or pat == '-':
119 119 return 'w' in mode and sys.stdout or sys.stdin
120 120 if hasattr(pat, 'write') and 'w' in mode:
121 121 return pat
122 122 if hasattr(pat, 'read') and 'r' in mode:
123 123 return pat
124 124 return open(make_filename(repo, pat, node, total, seqno, revwidth,
125 125 pathname),
126 126 mode)
127 127
128 def matchpats(repo, pats=[], opts={}, head=''):
128 def matchpats(repo, pats=[], opts={}, head='', globbed=False):
129 129 cwd = repo.getcwd()
130 130 if not pats and cwd:
131 131 opts['include'] = [os.path.join(cwd, i)
132 132 for i in opts.get('include', [])]
133 133 opts['exclude'] = [os.path.join(cwd, x)
134 134 for x in opts.get('exclude', [])]
135 135 cwd = ''
136 136 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
137 opts.get('exclude'), head)
137 opts.get('exclude'), head, globbed=globbed)
138 138
139 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
140 files, matchfn, anypats = matchpats(repo, pats, opts, head)
139 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None,
140 globbed=False):
141 files, matchfn, anypats = matchpats(repo, pats, opts, head,
142 globbed=globbed)
141 143 exact = dict.fromkeys(files)
142 144 for src, fn in repo.walk(node=node, files=files, match=matchfn,
143 145 badmatch=badmatch):
144 146 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
145 147
146 148 def findrenames(repo, added=None, removed=None, threshold=0.5):
147 149 if added is None or removed is None:
148 150 added, removed = repo.status()[1:3]
149 151 ctx = repo.changectx()
150 152 for a in added:
151 153 aa = repo.wread(a)
152 154 bestscore, bestname = None, None
153 155 for r in removed:
154 156 rr = ctx.filectx(r).data()
155 157 delta = mdiff.textdiff(aa, rr)
156 158 if len(delta) < len(aa):
157 159 myscore = 1.0 - (float(len(delta)) / len(aa))
158 160 if bestscore is None or myscore > bestscore:
159 161 bestscore, bestname = myscore, r
160 162 if bestname and bestscore >= threshold:
161 163 yield bestname, a, bestscore
162 164
163 165 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
164 166 similarity=None):
165 167 if dry_run is None:
166 168 dry_run = opts.get('dry_run')
167 169 if similarity is None:
168 170 similarity = float(opts.get('similarity') or 0)
169 171 add, remove = [], []
170 172 mapping = {}
171 173 for src, abs, rel, exact in walk(repo, pats, opts):
172 174 if src == 'f' and repo.dirstate.state(abs) == '?':
173 175 add.append(abs)
174 176 mapping[abs] = rel, exact
175 177 if repo.ui.verbose or not exact:
176 178 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
177 179 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
178 180 remove.append(abs)
179 181 mapping[abs] = rel, exact
180 182 if repo.ui.verbose or not exact:
181 183 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
182 184 if not dry_run:
183 185 repo.add(add, wlock=wlock)
184 186 repo.remove(remove, wlock=wlock)
185 187 if similarity > 0:
186 188 for old, new, score in findrenames(repo, add, remove, similarity):
187 189 oldrel, oldexact = mapping[old]
188 190 newrel, newexact = mapping[new]
189 191 if repo.ui.verbose or not oldexact or not newexact:
190 192 repo.ui.status(_('recording removal of %s as rename to %s '
191 193 '(%d%% similar)\n') %
192 194 (oldrel, newrel, score * 100))
193 195 if not dry_run:
194 196 repo.copy(old, new, wlock=wlock)
195 197
196 198 class changeset_printer(object):
197 199 '''show changeset information when templating not requested.'''
198 200
199 201 def __init__(self, ui, repo, patch, buffered):
200 202 self.ui = ui
201 203 self.repo = repo
202 204 self.buffered = buffered
203 205 self.patch = patch
204 206 self.header = {}
205 207 self.hunk = {}
206 208 self.lastheader = None
207 209
208 210 def flush(self, rev):
209 211 if rev in self.header:
210 212 h = self.header[rev]
211 213 if h != self.lastheader:
212 214 self.lastheader = h
213 215 self.ui.write(h)
214 216 del self.header[rev]
215 217 if rev in self.hunk:
216 218 self.ui.write(self.hunk[rev])
217 219 del self.hunk[rev]
218 220 return 1
219 221 return 0
220 222
221 223 def show(self, rev=0, changenode=None, copies=None, **props):
222 224 if self.buffered:
223 225 self.ui.pushbuffer()
224 226 self._show(rev, changenode, copies, props)
225 227 self.hunk[rev] = self.ui.popbuffer()
226 228 else:
227 229 self._show(rev, changenode, copies, props)
228 230
229 231 def _show(self, rev, changenode, copies, props):
230 232 '''show a single changeset or file revision'''
231 233 log = self.repo.changelog
232 234 if changenode is None:
233 235 changenode = log.node(rev)
234 236 elif not rev:
235 237 rev = log.rev(changenode)
236 238
237 239 if self.ui.quiet:
238 240 self.ui.write("%d:%s\n" % (rev, short(changenode)))
239 241 return
240 242
241 243 changes = log.read(changenode)
242 244 date = util.datestr(changes[2])
243 245 extra = changes[5]
244 246 branch = extra.get("branch")
245 247
246 248 hexfunc = self.ui.debugflag and hex or short
247 249
248 250 parents = log.parentrevs(rev)
249 251 if not self.ui.debugflag:
250 252 if parents[1] == nullrev:
251 253 if parents[0] >= rev - 1:
252 254 parents = []
253 255 else:
254 256 parents = [parents[0]]
255 257 parents = [(p, hexfunc(log.node(p))) for p in parents]
256 258
257 259 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
258 260
259 261 if branch:
260 262 branch = util.tolocal(branch)
261 263 self.ui.write(_("branch: %s\n") % branch)
262 264 for tag in self.repo.nodetags(changenode):
263 265 self.ui.write(_("tag: %s\n") % tag)
264 266 for parent in parents:
265 267 self.ui.write(_("parent: %d:%s\n") % parent)
266 268
267 269 if self.ui.debugflag:
268 270 self.ui.write(_("manifest: %d:%s\n") %
269 271 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
270 272 self.ui.write(_("user: %s\n") % changes[1])
271 273 self.ui.write(_("date: %s\n") % date)
272 274
273 275 if self.ui.debugflag:
274 276 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
275 277 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
276 278 files):
277 279 if value:
278 280 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
279 281 elif changes[3] and self.ui.verbose:
280 282 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
281 283 if copies and self.ui.verbose:
282 284 copies = ['%s (%s)' % c for c in copies]
283 285 self.ui.write(_("copies: %s\n") % ' '.join(copies))
284 286
285 287 if extra and self.ui.debugflag:
286 288 extraitems = extra.items()
287 289 extraitems.sort()
288 290 for key, value in extraitems:
289 291 self.ui.write(_("extra: %s=%s\n")
290 292 % (key, value.encode('string_escape')))
291 293
292 294 description = changes[4].strip()
293 295 if description:
294 296 if self.ui.verbose:
295 297 self.ui.write(_("description:\n"))
296 298 self.ui.write(description)
297 299 self.ui.write("\n\n")
298 300 else:
299 301 self.ui.write(_("summary: %s\n") %
300 302 description.splitlines()[0])
301 303 self.ui.write("\n")
302 304
303 305 self.showpatch(changenode)
304 306
305 307 def showpatch(self, node):
306 308 if self.patch:
307 309 prev = self.repo.changelog.parents(node)[0]
308 310 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
309 311 self.ui.write("\n")
310 312
311 313 class changeset_templater(changeset_printer):
312 314 '''format changeset information.'''
313 315
314 316 def __init__(self, ui, repo, patch, mapfile, buffered):
315 317 changeset_printer.__init__(self, ui, repo, patch, buffered)
316 318 self.t = templater.templater(mapfile, templater.common_filters,
317 319 cache={'parent': '{rev}:{node|short} ',
318 320 'manifest': '{rev}:{node|short}',
319 321 'filecopy': '{name} ({source})'})
320 322
321 323 def use_template(self, t):
322 324 '''set template string to use'''
323 325 self.t.cache['changeset'] = t
324 326
325 327 def _show(self, rev, changenode, copies, props):
326 328 '''show a single changeset or file revision'''
327 329 log = self.repo.changelog
328 330 if changenode is None:
329 331 changenode = log.node(rev)
330 332 elif not rev:
331 333 rev = log.rev(changenode)
332 334
333 335 changes = log.read(changenode)
334 336
335 337 def showlist(name, values, plural=None, **args):
336 338 '''expand set of values.
337 339 name is name of key in template map.
338 340 values is list of strings or dicts.
339 341 plural is plural of name, if not simply name + 's'.
340 342
341 343 expansion works like this, given name 'foo'.
342 344
343 345 if values is empty, expand 'no_foos'.
344 346
345 347 if 'foo' not in template map, return values as a string,
346 348 joined by space.
347 349
348 350 expand 'start_foos'.
349 351
350 352 for each value, expand 'foo'. if 'last_foo' in template
351 353 map, expand it instead of 'foo' for last key.
352 354
353 355 expand 'end_foos'.
354 356 '''
355 357 if plural: names = plural
356 358 else: names = name + 's'
357 359 if not values:
358 360 noname = 'no_' + names
359 361 if noname in self.t:
360 362 yield self.t(noname, **args)
361 363 return
362 364 if name not in self.t:
363 365 if isinstance(values[0], str):
364 366 yield ' '.join(values)
365 367 else:
366 368 for v in values:
367 369 yield dict(v, **args)
368 370 return
369 371 startname = 'start_' + names
370 372 if startname in self.t:
371 373 yield self.t(startname, **args)
372 374 vargs = args.copy()
373 375 def one(v, tag=name):
374 376 try:
375 377 vargs.update(v)
376 378 except (AttributeError, ValueError):
377 379 try:
378 380 for a, b in v:
379 381 vargs[a] = b
380 382 except ValueError:
381 383 vargs[name] = v
382 384 return self.t(tag, **vargs)
383 385 lastname = 'last_' + name
384 386 if lastname in self.t:
385 387 last = values.pop()
386 388 else:
387 389 last = None
388 390 for v in values:
389 391 yield one(v)
390 392 if last is not None:
391 393 yield one(last, tag=lastname)
392 394 endname = 'end_' + names
393 395 if endname in self.t:
394 396 yield self.t(endname, **args)
395 397
396 398 def showbranches(**args):
397 399 branch = changes[5].get("branch")
398 400 if branch:
399 401 branch = util.tolocal(branch)
400 402 return showlist('branch', [branch], plural='branches', **args)
401 403
402 404 def showparents(**args):
403 405 parents = [[('rev', log.rev(p)), ('node', hex(p))]
404 406 for p in log.parents(changenode)
405 407 if self.ui.debugflag or p != nullid]
406 408 if (not self.ui.debugflag and len(parents) == 1 and
407 409 parents[0][0][1] == rev - 1):
408 410 return
409 411 return showlist('parent', parents, **args)
410 412
411 413 def showtags(**args):
412 414 return showlist('tag', self.repo.nodetags(changenode), **args)
413 415
414 416 def showextras(**args):
415 417 extras = changes[5].items()
416 418 extras.sort()
417 419 for key, value in extras:
418 420 args = args.copy()
419 421 args.update(dict(key=key, value=value))
420 422 yield self.t('extra', **args)
421 423
422 424 def showcopies(**args):
423 425 c = [{'name': x[0], 'source': x[1]} for x in copies]
424 426 return showlist('file_copy', c, plural='file_copies', **args)
425 427
426 428 if self.ui.debugflag:
427 429 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
428 430 def showfiles(**args):
429 431 return showlist('file', files[0], **args)
430 432 def showadds(**args):
431 433 return showlist('file_add', files[1], **args)
432 434 def showdels(**args):
433 435 return showlist('file_del', files[2], **args)
434 436 def showmanifest(**args):
435 437 args = args.copy()
436 438 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
437 439 node=hex(changes[0])))
438 440 return self.t('manifest', **args)
439 441 else:
440 442 def showfiles(**args):
441 443 return showlist('file', changes[3], **args)
442 444 showadds = ''
443 445 showdels = ''
444 446 showmanifest = ''
445 447
446 448 defprops = {
447 449 'author': changes[1],
448 450 'branches': showbranches,
449 451 'date': changes[2],
450 452 'desc': changes[4],
451 453 'file_adds': showadds,
452 454 'file_dels': showdels,
453 455 'files': showfiles,
454 456 'file_copies': showcopies,
455 457 'manifest': showmanifest,
456 458 'node': hex(changenode),
457 459 'parents': showparents,
458 460 'rev': rev,
459 461 'tags': showtags,
460 462 'extras': showextras,
461 463 }
462 464 props = props.copy()
463 465 props.update(defprops)
464 466
465 467 try:
466 468 if self.ui.debugflag and 'header_debug' in self.t:
467 469 key = 'header_debug'
468 470 elif self.ui.quiet and 'header_quiet' in self.t:
469 471 key = 'header_quiet'
470 472 elif self.ui.verbose and 'header_verbose' in self.t:
471 473 key = 'header_verbose'
472 474 elif 'header' in self.t:
473 475 key = 'header'
474 476 else:
475 477 key = ''
476 478 if key:
477 479 h = templater.stringify(self.t(key, **props))
478 480 if self.buffered:
479 481 self.header[rev] = h
480 482 else:
481 483 self.ui.write(h)
482 484 if self.ui.debugflag and 'changeset_debug' in self.t:
483 485 key = 'changeset_debug'
484 486 elif self.ui.quiet and 'changeset_quiet' in self.t:
485 487 key = 'changeset_quiet'
486 488 elif self.ui.verbose and 'changeset_verbose' in self.t:
487 489 key = 'changeset_verbose'
488 490 else:
489 491 key = 'changeset'
490 492 self.ui.write(templater.stringify(self.t(key, **props)))
491 493 self.showpatch(changenode)
492 494 except KeyError, inst:
493 495 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
494 496 inst.args[0]))
495 497 except SyntaxError, inst:
496 498 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
497 499
498 500 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
499 501 """show one changeset using template or regular display.
500 502
501 503 Display format will be the first non-empty hit of:
502 504 1. option 'template'
503 505 2. option 'style'
504 506 3. [ui] setting 'logtemplate'
505 507 4. [ui] setting 'style'
506 508 If all of these values are either the unset or the empty string,
507 509 regular display via changeset_printer() is done.
508 510 """
509 511 # options
510 512 patch = False
511 513 if opts.get('patch'):
512 514 patch = matchfn or util.always
513 515
514 516 tmpl = opts.get('template')
515 517 mapfile = None
516 518 if tmpl:
517 519 tmpl = templater.parsestring(tmpl, quoted=False)
518 520 else:
519 521 mapfile = opts.get('style')
520 522 # ui settings
521 523 if not mapfile:
522 524 tmpl = ui.config('ui', 'logtemplate')
523 525 if tmpl:
524 526 tmpl = templater.parsestring(tmpl)
525 527 else:
526 528 mapfile = ui.config('ui', 'style')
527 529
528 530 if tmpl or mapfile:
529 531 if mapfile:
530 532 if not os.path.split(mapfile)[0]:
531 533 mapname = (templater.templatepath('map-cmdline.' + mapfile)
532 534 or templater.templatepath(mapfile))
533 535 if mapname: mapfile = mapname
534 536 try:
535 537 t = changeset_templater(ui, repo, patch, mapfile, buffered)
536 538 except SyntaxError, inst:
537 539 raise util.Abort(inst.args[0])
538 540 if tmpl: t.use_template(tmpl)
539 541 return t
540 542 return changeset_printer(ui, repo, patch, buffered)
541 543
542 544 def finddate(ui, repo, date):
543 545 """Find the tipmost changeset that matches the given date spec"""
544 546 df = util.matchdate(date + " to " + date)
545 547 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
546 548 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
547 549 results = {}
548 550 for st, rev, fns in changeiter:
549 551 if st == 'add':
550 552 d = get(rev)[2]
551 553 if df(d[0]):
552 554 results[rev] = d
553 555 elif st == 'iter':
554 556 if rev in results:
555 557 ui.status("Found revision %s from %s\n" %
556 558 (rev, util.datestr(results[rev])))
557 559 return str(rev)
558 560
559 561 raise util.Abort(_("revision matching date not found"))
560 562
561 563 def walkchangerevs(ui, repo, pats, change, opts):
562 564 '''Iterate over files and the revs they changed in.
563 565
564 566 Callers most commonly need to iterate backwards over the history
565 567 it is interested in. Doing so has awful (quadratic-looking)
566 568 performance, so we use iterators in a "windowed" way.
567 569
568 570 We walk a window of revisions in the desired order. Within the
569 571 window, we first walk forwards to gather data, then in the desired
570 572 order (usually backwards) to display it.
571 573
572 574 This function returns an (iterator, matchfn) tuple. The iterator
573 575 yields 3-tuples. They will be of one of the following forms:
574 576
575 577 "window", incrementing, lastrev: stepping through a window,
576 578 positive if walking forwards through revs, last rev in the
577 579 sequence iterated over - use to reset state for the current window
578 580
579 581 "add", rev, fns: out-of-order traversal of the given file names
580 582 fns, which changed during revision rev - use to gather data for
581 583 possible display
582 584
583 585 "iter", rev, None: in-order traversal of the revs earlier iterated
584 586 over with "add" - use to display data'''
585 587
586 588 def increasing_windows(start, end, windowsize=8, sizelimit=512):
587 589 if start < end:
588 590 while start < end:
589 591 yield start, min(windowsize, end-start)
590 592 start += windowsize
591 593 if windowsize < sizelimit:
592 594 windowsize *= 2
593 595 else:
594 596 while start > end:
595 597 yield start, min(windowsize, start-end-1)
596 598 start -= windowsize
597 599 if windowsize < sizelimit:
598 600 windowsize *= 2
599 601
600 602 files, matchfn, anypats = matchpats(repo, pats, opts)
601 603 follow = opts.get('follow') or opts.get('follow_first')
602 604
603 605 if repo.changelog.count() == 0:
604 606 return [], matchfn
605 607
606 608 if follow:
607 609 defrange = '%s:0' % repo.changectx().rev()
608 610 else:
609 611 defrange = 'tip:0'
610 612 revs = revrange(repo, opts['rev'] or [defrange])
611 613 wanted = {}
612 614 slowpath = anypats or opts.get('removed')
613 615 fncache = {}
614 616
615 617 if not slowpath and not files:
616 618 # No files, no patterns. Display all revs.
617 619 wanted = dict.fromkeys(revs)
618 620 copies = []
619 621 if not slowpath:
620 622 # Only files, no patterns. Check the history of each file.
621 623 def filerevgen(filelog, node):
622 624 cl_count = repo.changelog.count()
623 625 if node is None:
624 626 last = filelog.count() - 1
625 627 else:
626 628 last = filelog.rev(node)
627 629 for i, window in increasing_windows(last, nullrev):
628 630 revs = []
629 631 for j in xrange(i - window, i + 1):
630 632 n = filelog.node(j)
631 633 revs.append((filelog.linkrev(n),
632 634 follow and filelog.renamed(n)))
633 635 revs.reverse()
634 636 for rev in revs:
635 637 # only yield rev for which we have the changelog, it can
636 638 # happen while doing "hg log" during a pull or commit
637 639 if rev[0] < cl_count:
638 640 yield rev
639 641 def iterfiles():
640 642 for filename in files:
641 643 yield filename, None
642 644 for filename_node in copies:
643 645 yield filename_node
644 646 minrev, maxrev = min(revs), max(revs)
645 647 for file_, node in iterfiles():
646 648 filelog = repo.file(file_)
647 649 # A zero count may be a directory or deleted file, so
648 650 # try to find matching entries on the slow path.
649 651 if filelog.count() == 0:
650 652 slowpath = True
651 653 break
652 654 for rev, copied in filerevgen(filelog, node):
653 655 if rev <= maxrev:
654 656 if rev < minrev:
655 657 break
656 658 fncache.setdefault(rev, [])
657 659 fncache[rev].append(file_)
658 660 wanted[rev] = 1
659 661 if follow and copied:
660 662 copies.append(copied)
661 663 if slowpath:
662 664 if follow:
663 665 raise util.Abort(_('can only follow copies/renames for explicit '
664 666 'file names'))
665 667
666 668 # The slow path checks files modified in every changeset.
667 669 def changerevgen():
668 670 for i, window in increasing_windows(repo.changelog.count()-1,
669 671 nullrev):
670 672 for j in xrange(i - window, i + 1):
671 673 yield j, change(j)[3]
672 674
673 675 for rev, changefiles in changerevgen():
674 676 matches = filter(matchfn, changefiles)
675 677 if matches:
676 678 fncache[rev] = matches
677 679 wanted[rev] = 1
678 680
679 681 class followfilter:
680 682 def __init__(self, onlyfirst=False):
681 683 self.startrev = nullrev
682 684 self.roots = []
683 685 self.onlyfirst = onlyfirst
684 686
685 687 def match(self, rev):
686 688 def realparents(rev):
687 689 if self.onlyfirst:
688 690 return repo.changelog.parentrevs(rev)[0:1]
689 691 else:
690 692 return filter(lambda x: x != nullrev,
691 693 repo.changelog.parentrevs(rev))
692 694
693 695 if self.startrev == nullrev:
694 696 self.startrev = rev
695 697 return True
696 698
697 699 if rev > self.startrev:
698 700 # forward: all descendants
699 701 if not self.roots:
700 702 self.roots.append(self.startrev)
701 703 for parent in realparents(rev):
702 704 if parent in self.roots:
703 705 self.roots.append(rev)
704 706 return True
705 707 else:
706 708 # backwards: all parents
707 709 if not self.roots:
708 710 self.roots.extend(realparents(self.startrev))
709 711 if rev in self.roots:
710 712 self.roots.remove(rev)
711 713 self.roots.extend(realparents(rev))
712 714 return True
713 715
714 716 return False
715 717
716 718 # it might be worthwhile to do this in the iterator if the rev range
717 719 # is descending and the prune args are all within that range
718 720 for rev in opts.get('prune', ()):
719 721 rev = repo.changelog.rev(repo.lookup(rev))
720 722 ff = followfilter()
721 723 stop = min(revs[0], revs[-1])
722 724 for x in xrange(rev, stop-1, -1):
723 725 if ff.match(x) and x in wanted:
724 726 del wanted[x]
725 727
726 728 def iterate():
727 729 if follow and not files:
728 730 ff = followfilter(onlyfirst=opts.get('follow_first'))
729 731 def want(rev):
730 732 if ff.match(rev) and rev in wanted:
731 733 return True
732 734 return False
733 735 else:
734 736 def want(rev):
735 737 return rev in wanted
736 738
737 739 for i, window in increasing_windows(0, len(revs)):
738 740 yield 'window', revs[0] < revs[-1], revs[-1]
739 741 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
740 742 srevs = list(nrevs)
741 743 srevs.sort()
742 744 for rev in srevs:
743 745 fns = fncache.get(rev)
744 746 if not fns:
745 747 def fns_generator():
746 748 for f in change(rev)[3]:
747 749 if matchfn(f):
748 750 yield f
749 751 fns = fns_generator()
750 752 yield 'add', rev, fns
751 753 for rev in nrevs:
752 754 yield 'iter', rev, None
753 755 return iterate(), matchfn
@@ -1,3296 +1,3307 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import demandimport; demandimport.enable()
9 9 from node import *
10 10 from i18n import _
11 11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
12 12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import traceback, errno, version, atexit, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16 16
17 17 class UnknownCommand(Exception):
18 18 """Exception raised if command is not in the command table."""
19 19 class AmbiguousCommand(Exception):
20 20 """Exception raised if command shortcut matches more than one command."""
21 21
22 22 def bail_if_changed(repo):
23 23 modified, added, removed, deleted = repo.status()[:4]
24 24 if modified or added or removed or deleted:
25 25 raise util.Abort(_("outstanding uncommitted changes"))
26 26
27 27 def logmessage(opts):
28 28 """ get the log message according to -m and -l option """
29 29 message = opts['message']
30 30 logfile = opts['logfile']
31 31
32 32 if message and logfile:
33 33 raise util.Abort(_('options --message and --logfile are mutually '
34 34 'exclusive'))
35 35 if not message and logfile:
36 36 try:
37 37 if logfile == '-':
38 38 message = sys.stdin.read()
39 39 else:
40 40 message = open(logfile).read()
41 41 except IOError, inst:
42 42 raise util.Abort(_("can't read commit message '%s': %s") %
43 43 (logfile, inst.strerror))
44 44 return message
45 45
46 46 def setremoteconfig(ui, opts):
47 47 "copy remote options to ui tree"
48 48 if opts.get('ssh'):
49 49 ui.setconfig("ui", "ssh", opts['ssh'])
50 50 if opts.get('remotecmd'):
51 51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52 52
53 53 # Commands start here, listed alphabetically
54 54
55 55 def add(ui, repo, *pats, **opts):
56 56 """add the specified files on the next commit
57 57
58 58 Schedule files to be version controlled and added to the repository.
59 59
60 60 The files will be added to the repository at the next commit. To
61 61 undo an add before that, see hg revert.
62 62
63 63 If no names are given, add all files in the repository.
64 64 """
65 65
66 66 names = []
67 67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 68 if exact:
69 69 if ui.verbose:
70 70 ui.status(_('adding %s\n') % rel)
71 71 names.append(abs)
72 72 elif repo.dirstate.state(abs) == '?':
73 73 ui.status(_('adding %s\n') % rel)
74 74 names.append(abs)
75 75 if not opts.get('dry_run'):
76 76 repo.add(names)
77 77
78 78 def addremove(ui, repo, *pats, **opts):
79 79 """add all new files, delete all missing files
80 80
81 81 Add all new files and remove all missing files from the repository.
82 82
83 83 New files are ignored if they match any of the patterns in .hgignore. As
84 84 with add, these changes take effect at the next commit.
85 85
86 86 Use the -s option to detect renamed files. With a parameter > 0,
87 87 this compares every removed file with every added file and records
88 88 those similar enough as renames. This option takes a percentage
89 89 between 0 (disabled) and 100 (files must be identical) as its
90 90 parameter. Detecting renamed files this way can be expensive.
91 91 """
92 92 sim = float(opts.get('similarity') or 0)
93 93 if sim < 0 or sim > 100:
94 94 raise util.Abort(_('similarity must be between 0 and 100'))
95 95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96 96
97 97 def annotate(ui, repo, *pats, **opts):
98 98 """show changeset information per file line
99 99
100 100 List changes in files, showing the revision id responsible for each line
101 101
102 102 This command is useful to discover who did a change or when a change took
103 103 place.
104 104
105 105 Without the -a option, annotate will avoid processing files it
106 106 detects as binary. With -a, annotate will generate an annotation
107 107 anyway, probably with undesirable results.
108 108 """
109 109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110 110
111 111 if not pats:
112 112 raise util.Abort(_('at least one file name or pattern required'))
113 113
114 114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 115 ['number', lambda x: str(x.rev())],
116 116 ['changeset', lambda x: short(x.node())],
117 117 ['date', getdate], ['follow', lambda x: x.path()]]
118 118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 119 and not opts['follow']):
120 120 opts['number'] = 1
121 121
122 122 ctx = repo.changectx(opts['rev'])
123 123
124 124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 125 node=ctx.node()):
126 126 fctx = ctx.filectx(abs)
127 127 if not opts['text'] and util.binary(fctx.data()):
128 128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 129 continue
130 130
131 131 lines = fctx.annotate(follow=opts.get('follow'))
132 132 pieces = []
133 133
134 134 for o, f in opmap:
135 135 if opts[o]:
136 136 l = [f(n) for n, dummy in lines]
137 137 if l:
138 138 m = max(map(len, l))
139 139 pieces.append(["%*s" % (m, x) for x in l])
140 140
141 141 if pieces:
142 142 for p, l in zip(zip(*pieces), lines):
143 143 ui.write("%s: %s" % (" ".join(p), l[1]))
144 144
145 145 def archive(ui, repo, dest, **opts):
146 146 '''create unversioned archive of a repository revision
147 147
148 148 By default, the revision used is the parent of the working
149 149 directory; use "-r" to specify a different revision.
150 150
151 151 To specify the type of archive to create, use "-t". Valid
152 152 types are:
153 153
154 154 "files" (default): a directory full of files
155 155 "tar": tar archive, uncompressed
156 156 "tbz2": tar archive, compressed using bzip2
157 157 "tgz": tar archive, compressed using gzip
158 158 "uzip": zip archive, uncompressed
159 159 "zip": zip archive, compressed using deflate
160 160
161 161 The exact name of the destination archive or directory is given
162 162 using a format string; see "hg help export" for details.
163 163
164 164 Each member added to an archive file has a directory prefix
165 165 prepended. Use "-p" to specify a format string for the prefix.
166 166 The default is the basename of the archive, with suffixes removed.
167 167 '''
168 168
169 169 node = repo.changectx(opts['rev']).node()
170 170 dest = cmdutil.make_filename(repo, dest, node)
171 171 if os.path.realpath(dest) == repo.root:
172 172 raise util.Abort(_('repository root cannot be destination'))
173 173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 174 kind = opts.get('type') or 'files'
175 175 prefix = opts['prefix']
176 176 if dest == '-':
177 177 if kind == 'files':
178 178 raise util.Abort(_('cannot archive plain files to stdout'))
179 179 dest = sys.stdout
180 180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 181 prefix = cmdutil.make_filename(repo, prefix, node)
182 182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 183 matchfn, prefix)
184 184
185 185 def backout(ui, repo, rev, **opts):
186 186 '''reverse effect of earlier changeset
187 187
188 188 Commit the backed out changes as a new changeset. The new
189 189 changeset is a child of the backed out changeset.
190 190
191 191 If you back out a changeset other than the tip, a new head is
192 192 created. This head is the parent of the working directory. If
193 193 you back out an old changeset, your working directory will appear
194 194 old after the backout. You should merge the backout changeset
195 195 with another head.
196 196
197 197 The --merge option remembers the parent of the working directory
198 198 before starting the backout, then merges the new head with that
199 199 changeset afterwards. This saves you from doing the merge by
200 200 hand. The result of this merge is not committed, as for a normal
201 201 merge.'''
202 202
203 203 bail_if_changed(repo)
204 204 op1, op2 = repo.dirstate.parents()
205 205 if op2 != nullid:
206 206 raise util.Abort(_('outstanding uncommitted merge'))
207 207 node = repo.lookup(rev)
208 208 p1, p2 = repo.changelog.parents(node)
209 209 if p1 == nullid:
210 210 raise util.Abort(_('cannot back out a change with no parents'))
211 211 if p2 != nullid:
212 212 if not opts['parent']:
213 213 raise util.Abort(_('cannot back out a merge changeset without '
214 214 '--parent'))
215 215 p = repo.lookup(opts['parent'])
216 216 if p not in (p1, p2):
217 217 raise util.Abort(_('%s is not a parent of %s') %
218 218 (short(p), short(node)))
219 219 parent = p
220 220 else:
221 221 if opts['parent']:
222 222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
223 223 parent = p1
224 224 hg.clean(repo, node, show_stats=False)
225 225 revert_opts = opts.copy()
226 226 revert_opts['date'] = None
227 227 revert_opts['all'] = True
228 228 revert_opts['rev'] = hex(parent)
229 229 revert(ui, repo, **revert_opts)
230 230 commit_opts = opts.copy()
231 231 commit_opts['addremove'] = False
232 232 if not commit_opts['message'] and not commit_opts['logfile']:
233 233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
234 234 commit_opts['force_editor'] = True
235 235 commit(ui, repo, **commit_opts)
236 236 def nice(node):
237 237 return '%d:%s' % (repo.changelog.rev(node), short(node))
238 238 ui.status(_('changeset %s backs out changeset %s\n') %
239 239 (nice(repo.changelog.tip()), nice(node)))
240 240 if op1 != node:
241 241 if opts['merge']:
242 242 ui.status(_('merging with changeset %s\n') % nice(op1))
243 243 hg.merge(repo, hex(op1))
244 244 else:
245 245 ui.status(_('the backout changeset is a new head - '
246 246 'do not forget to merge\n'))
247 247 ui.status(_('(use "backout --merge" '
248 248 'if you want to auto-merge)\n'))
249 249
250 250 def branch(ui, repo, label=None):
251 251 """set or show the current branch name
252 252
253 253 With <name>, set the current branch name. Otherwise, show the
254 254 current branch name.
255 255 """
256 256
257 257 if label is not None:
258 258 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
259 259 else:
260 260 b = util.tolocal(repo.workingctx().branch())
261 261 if b:
262 262 ui.write("%s\n" % b)
263 263
264 264 def branches(ui, repo):
265 265 """list repository named branches
266 266
267 267 List the repository's named branches.
268 268 """
269 269 b = repo.branchtags()
270 270 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
271 271 l.sort()
272 272 for r, n, t in l:
273 273 hexfunc = ui.debugflag and hex or short
274 274 if ui.quiet:
275 275 ui.write("%s\n" % t)
276 276 else:
277 277 t = util.localsub(t, 30)
278 278 t += " " * (30 - util.locallen(t))
279 279 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
280 280
281 281 def bundle(ui, repo, fname, dest=None, **opts):
282 282 """create a changegroup file
283 283
284 284 Generate a compressed changegroup file collecting changesets not
285 285 found in the other repository.
286 286
287 287 If no destination repository is specified the destination is assumed
288 288 to have all the nodes specified by one or more --base parameters.
289 289
290 290 The bundle file can then be transferred using conventional means and
291 291 applied to another repository with the unbundle or pull command.
292 292 This is useful when direct push and pull are not available or when
293 293 exporting an entire repository is undesirable.
294 294
295 295 Applying bundles preserves all changeset contents including
296 296 permissions, copy/rename information, and revision history.
297 297 """
298 298 revs = opts.get('rev') or None
299 299 if revs:
300 300 revs = [repo.lookup(rev) for rev in revs]
301 301 base = opts.get('base')
302 302 if base:
303 303 if dest:
304 304 raise util.Abort(_("--base is incompatible with specifiying "
305 305 "a destination"))
306 306 base = [repo.lookup(rev) for rev in base]
307 307 # create the right base
308 308 # XXX: nodesbetween / changegroup* should be "fixed" instead
309 309 o = []
310 310 has = {nullid: None}
311 311 for n in base:
312 312 has.update(repo.changelog.reachable(n))
313 313 if revs:
314 314 visit = list(revs)
315 315 else:
316 316 visit = repo.changelog.heads()
317 317 seen = {}
318 318 while visit:
319 319 n = visit.pop(0)
320 320 parents = [p for p in repo.changelog.parents(n) if p not in has]
321 321 if len(parents) == 0:
322 322 o.insert(0, n)
323 323 else:
324 324 for p in parents:
325 325 if p not in seen:
326 326 seen[p] = 1
327 327 visit.append(p)
328 328 else:
329 329 setremoteconfig(ui, opts)
330 330 dest = ui.expandpath(dest or 'default-push', dest or 'default')
331 331 other = hg.repository(ui, dest)
332 332 o = repo.findoutgoing(other, force=opts['force'])
333 333
334 334 if revs:
335 335 cg = repo.changegroupsubset(o, revs, 'bundle')
336 336 else:
337 337 cg = repo.changegroup(o, 'bundle')
338 338 changegroup.writebundle(cg, fname, "HG10BZ")
339 339
340 340 def cat(ui, repo, file1, *pats, **opts):
341 341 """output the current or given revision of files
342 342
343 343 Print the specified files as they were at the given revision.
344 344 If no revision is given, the parent of the working directory is used,
345 345 or tip if no revision is checked out.
346 346
347 347 Output may be to a file, in which case the name of the file is
348 348 given using a format string. The formatting rules are the same as
349 349 for the export command, with the following additions:
350 350
351 351 %s basename of file being printed
352 352 %d dirname of file being printed, or '.' if in repo root
353 353 %p root-relative path name of file being printed
354 354 """
355 355 ctx = repo.changectx(opts['rev'])
356 356 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
357 357 ctx.node()):
358 358 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
359 359 fp.write(ctx.filectx(abs).data())
360 360
361 361 def clone(ui, source, dest=None, **opts):
362 362 """make a copy of an existing repository
363 363
364 364 Create a copy of an existing repository in a new directory.
365 365
366 366 If no destination directory name is specified, it defaults to the
367 367 basename of the source.
368 368
369 369 The location of the source is added to the new repository's
370 370 .hg/hgrc file, as the default to be used for future pulls.
371 371
372 372 For efficiency, hardlinks are used for cloning whenever the source
373 373 and destination are on the same filesystem (note this applies only
374 374 to the repository data, not to the checked out files). Some
375 375 filesystems, such as AFS, implement hardlinking incorrectly, but
376 376 do not report errors. In these cases, use the --pull option to
377 377 avoid hardlinking.
378 378
379 379 You can safely clone repositories and checked out files using full
380 380 hardlinks with
381 381
382 382 $ cp -al REPO REPOCLONE
383 383
384 384 which is the fastest way to clone. However, the operation is not
385 385 atomic (making sure REPO is not modified during the operation is
386 386 up to you) and you have to make sure your editor breaks hardlinks
387 387 (Emacs and most Linux Kernel tools do so).
388 388
389 389 If you use the -r option to clone up to a specific revision, no
390 390 subsequent revisions will be present in the cloned repository.
391 391 This option implies --pull, even on local repositories.
392 392
393 393 See pull for valid source format details.
394 394
395 395 It is possible to specify an ssh:// URL as the destination, but no
396 396 .hg/hgrc and working directory will be created on the remote side.
397 397 Look at the help text for the pull command for important details
398 398 about ssh:// URLs.
399 399 """
400 400 setremoteconfig(ui, opts)
401 401 hg.clone(ui, ui.expandpath(source), dest,
402 402 pull=opts['pull'],
403 403 stream=opts['uncompressed'],
404 404 rev=opts['rev'],
405 405 update=not opts['noupdate'])
406 406
407 407 def commit(ui, repo, *pats, **opts):
408 408 """commit the specified files or all outstanding changes
409 409
410 410 Commit changes to the given files into the repository.
411 411
412 412 If a list of files is omitted, all changes reported by "hg status"
413 413 will be committed.
414 414
415 415 If no commit message is specified, the editor configured in your hgrc
416 416 or in the EDITOR environment variable is started to enter a message.
417 417 """
418 418 message = logmessage(opts)
419 419
420 420 if opts['addremove']:
421 421 cmdutil.addremove(repo, pats, opts)
422 422 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
423 423 if pats:
424 424 status = repo.status(files=fns, match=match)
425 425 modified, added, removed, deleted, unknown = status[:5]
426 426 files = modified + added + removed
427 427 slist = None
428 428 for f in fns:
429 429 if f not in files:
430 430 rf = repo.wjoin(f)
431 431 if f in unknown:
432 432 raise util.Abort(_("file %s not tracked!") % rf)
433 433 try:
434 434 mode = os.lstat(rf)[stat.ST_MODE]
435 435 except OSError:
436 436 raise util.Abort(_("file %s not found!") % rf)
437 437 if stat.S_ISDIR(mode):
438 438 name = f + '/'
439 439 if slist is None:
440 440 slist = list(files)
441 441 slist.sort()
442 442 i = bisect.bisect(slist, name)
443 443 if i >= len(slist) or not slist[i].startswith(name):
444 444 raise util.Abort(_("no match under directory %s!")
445 445 % rf)
446 446 elif not stat.S_ISREG(mode):
447 447 raise util.Abort(_("can't commit %s: "
448 448 "unsupported file type!") % rf)
449 449 else:
450 450 files = []
451 451 try:
452 452 repo.commit(files, message, opts['user'], opts['date'], match,
453 453 force_editor=opts.get('force_editor'))
454 454 except ValueError, inst:
455 455 raise util.Abort(str(inst))
456 456
457 457 def docopy(ui, repo, pats, opts, wlock):
458 458 # called with the repo lock held
459 459 #
460 460 # hgsep => pathname that uses "/" to separate directories
461 461 # ossep => pathname that uses os.sep to separate directories
462 462 cwd = repo.getcwd()
463 463 errors = 0
464 464 copied = []
465 465 targets = {}
466 466
467 467 # abs: hgsep
468 468 # rel: ossep
469 469 # return: hgsep
470 470 def okaytocopy(abs, rel, exact):
471 471 reasons = {'?': _('is not managed'),
472 472 'a': _('has been marked for add'),
473 473 'r': _('has been marked for remove')}
474 474 state = repo.dirstate.state(abs)
475 475 reason = reasons.get(state)
476 476 if reason:
477 477 if state == 'a':
478 478 origsrc = repo.dirstate.copied(abs)
479 479 if origsrc is not None:
480 480 return origsrc
481 481 if exact:
482 482 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
483 483 else:
484 484 return abs
485 485
486 486 # origsrc: hgsep
487 487 # abssrc: hgsep
488 488 # relsrc: ossep
489 489 # target: ossep
490 490 def copy(origsrc, abssrc, relsrc, target, exact):
491 491 abstarget = util.canonpath(repo.root, cwd, target)
492 492 reltarget = util.pathto(cwd, abstarget)
493 493 prevsrc = targets.get(abstarget)
494 494 if prevsrc is not None:
495 495 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
496 496 (reltarget, util.localpath(abssrc),
497 497 util.localpath(prevsrc)))
498 498 return
499 499 if (not opts['after'] and os.path.exists(reltarget) or
500 500 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
501 501 if not opts['force']:
502 502 ui.warn(_('%s: not overwriting - file exists\n') %
503 503 reltarget)
504 504 return
505 505 if not opts['after'] and not opts.get('dry_run'):
506 506 os.unlink(reltarget)
507 507 if opts['after']:
508 508 if not os.path.exists(reltarget):
509 509 return
510 510 else:
511 511 targetdir = os.path.dirname(reltarget) or '.'
512 512 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
513 513 os.makedirs(targetdir)
514 514 try:
515 515 restore = repo.dirstate.state(abstarget) == 'r'
516 516 if restore and not opts.get('dry_run'):
517 517 repo.undelete([abstarget], wlock)
518 518 try:
519 519 if not opts.get('dry_run'):
520 520 util.copyfile(relsrc, reltarget)
521 521 restore = False
522 522 finally:
523 523 if restore:
524 524 repo.remove([abstarget], wlock)
525 525 except IOError, inst:
526 526 if inst.errno == errno.ENOENT:
527 527 ui.warn(_('%s: deleted in working copy\n') % relsrc)
528 528 else:
529 529 ui.warn(_('%s: cannot copy - %s\n') %
530 530 (relsrc, inst.strerror))
531 531 errors += 1
532 532 return
533 533 if ui.verbose or not exact:
534 534 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
535 535 targets[abstarget] = abssrc
536 536 if abstarget != origsrc and not opts.get('dry_run'):
537 537 repo.copy(origsrc, abstarget, wlock)
538 538 copied.append((abssrc, relsrc, exact))
539 539
540 540 # pat: ossep
541 541 # dest ossep
542 542 # srcs: list of (hgsep, hgsep, ossep, bool)
543 543 # return: function that takes hgsep and returns ossep
544 544 def targetpathfn(pat, dest, srcs):
545 545 if os.path.isdir(pat):
546 546 abspfx = util.canonpath(repo.root, cwd, pat)
547 547 abspfx = util.localpath(abspfx)
548 548 if destdirexists:
549 549 striplen = len(os.path.split(abspfx)[0])
550 550 else:
551 551 striplen = len(abspfx)
552 552 if striplen:
553 553 striplen += len(os.sep)
554 554 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
555 555 elif destdirexists:
556 556 res = lambda p: os.path.join(dest,
557 557 os.path.basename(util.localpath(p)))
558 558 else:
559 559 res = lambda p: dest
560 560 return res
561 561
562 562 # pat: ossep
563 563 # dest ossep
564 564 # srcs: list of (hgsep, hgsep, ossep, bool)
565 565 # return: function that takes hgsep and returns ossep
566 566 def targetpathafterfn(pat, dest, srcs):
567 567 if util.patkind(pat, None)[0]:
568 568 # a mercurial pattern
569 569 res = lambda p: os.path.join(dest,
570 570 os.path.basename(util.localpath(p)))
571 571 else:
572 572 abspfx = util.canonpath(repo.root, cwd, pat)
573 573 if len(abspfx) < len(srcs[0][0]):
574 574 # A directory. Either the target path contains the last
575 575 # component of the source path or it does not.
576 576 def evalpath(striplen):
577 577 score = 0
578 578 for s in srcs:
579 579 t = os.path.join(dest, util.localpath(s[0])[striplen:])
580 580 if os.path.exists(t):
581 581 score += 1
582 582 return score
583 583
584 584 abspfx = util.localpath(abspfx)
585 585 striplen = len(abspfx)
586 586 if striplen:
587 587 striplen += len(os.sep)
588 588 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
589 589 score = evalpath(striplen)
590 590 striplen1 = len(os.path.split(abspfx)[0])
591 591 if striplen1:
592 592 striplen1 += len(os.sep)
593 593 if evalpath(striplen1) > score:
594 594 striplen = striplen1
595 595 res = lambda p: os.path.join(dest,
596 596 util.localpath(p)[striplen:])
597 597 else:
598 598 # a file
599 599 if destdirexists:
600 600 res = lambda p: os.path.join(dest,
601 601 os.path.basename(util.localpath(p)))
602 602 else:
603 603 res = lambda p: dest
604 604 return res
605 605
606 606
607 pats = list(pats)
607 pats = util.expand_glob(pats)
608 608 if not pats:
609 609 raise util.Abort(_('no source or destination specified'))
610 610 if len(pats) == 1:
611 611 raise util.Abort(_('no destination specified'))
612 612 dest = pats.pop()
613 613 destdirexists = os.path.isdir(dest)
614 614 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
615 615 raise util.Abort(_('with multiple sources, destination must be an '
616 616 'existing directory'))
617 617 if opts['after']:
618 618 tfn = targetpathafterfn
619 619 else:
620 620 tfn = targetpathfn
621 621 copylist = []
622 622 for pat in pats:
623 623 srcs = []
624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
625 globbed=True):
625 626 origsrc = okaytocopy(abssrc, relsrc, exact)
626 627 if origsrc:
627 628 srcs.append((origsrc, abssrc, relsrc, exact))
628 629 if not srcs:
629 630 continue
630 631 copylist.append((tfn(pat, dest, srcs), srcs))
631 632 if not copylist:
632 633 raise util.Abort(_('no files to copy'))
633 634
634 635 for targetpath, srcs in copylist:
635 636 for origsrc, abssrc, relsrc, exact in srcs:
636 637 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
637 638
638 639 if errors:
639 640 ui.warn(_('(consider using --after)\n'))
640 641 return errors, copied
641 642
642 643 def copy(ui, repo, *pats, **opts):
643 644 """mark files as copied for the next commit
644 645
645 646 Mark dest as having copies of source files. If dest is a
646 647 directory, copies are put in that directory. If dest is a file,
647 648 there can only be one source.
648 649
649 650 By default, this command copies the contents of files as they
650 651 stand in the working directory. If invoked with --after, the
651 652 operation is recorded, but no copying is performed.
652 653
653 654 This command takes effect in the next commit. To undo a copy
654 655 before that, see hg revert.
655 656 """
656 657 wlock = repo.wlock(0)
657 658 errs, copied = docopy(ui, repo, pats, opts, wlock)
658 659 return errs
659 660
660 661 def debugancestor(ui, index, rev1, rev2):
661 662 """find the ancestor revision of two revisions in a given index"""
662 663 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
663 664 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
664 665 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
665 666
666 667 def debugcomplete(ui, cmd='', **opts):
667 668 """returns the completion list associated with the given command"""
668 669
669 670 if opts['options']:
670 671 options = []
671 672 otables = [globalopts]
672 673 if cmd:
673 674 aliases, entry = findcmd(ui, cmd)
674 675 otables.append(entry[1])
675 676 for t in otables:
676 677 for o in t:
677 678 if o[0]:
678 679 options.append('-%s' % o[0])
679 680 options.append('--%s' % o[1])
680 681 ui.write("%s\n" % "\n".join(options))
681 682 return
682 683
683 684 clist = findpossible(ui, cmd).keys()
684 685 clist.sort()
685 686 ui.write("%s\n" % "\n".join(clist))
686 687
687 688 def debugrebuildstate(ui, repo, rev=""):
688 689 """rebuild the dirstate as it would look like for the given revision"""
689 690 if rev == "":
690 691 rev = repo.changelog.tip()
691 692 ctx = repo.changectx(rev)
692 693 files = ctx.manifest()
693 694 wlock = repo.wlock()
694 695 repo.dirstate.rebuild(rev, files)
695 696
696 697 def debugcheckstate(ui, repo):
697 698 """validate the correctness of the current dirstate"""
698 699 parent1, parent2 = repo.dirstate.parents()
699 700 repo.dirstate.read()
700 701 dc = repo.dirstate.map
701 702 keys = dc.keys()
702 703 keys.sort()
703 704 m1 = repo.changectx(parent1).manifest()
704 705 m2 = repo.changectx(parent2).manifest()
705 706 errors = 0
706 707 for f in dc:
707 708 state = repo.dirstate.state(f)
708 709 if state in "nr" and f not in m1:
709 710 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
710 711 errors += 1
711 712 if state in "a" and f in m1:
712 713 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
713 714 errors += 1
714 715 if state in "m" and f not in m1 and f not in m2:
715 716 ui.warn(_("%s in state %s, but not in either manifest\n") %
716 717 (f, state))
717 718 errors += 1
718 719 for f in m1:
719 720 state = repo.dirstate.state(f)
720 721 if state not in "nrm":
721 722 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
722 723 errors += 1
723 724 if errors:
724 725 error = _(".hg/dirstate inconsistent with current parent's manifest")
725 726 raise util.Abort(error)
726 727
727 728 def showconfig(ui, repo, *values, **opts):
728 729 """show combined config settings from all hgrc files
729 730
730 731 With no args, print names and values of all config items.
731 732
732 733 With one arg of the form section.name, print just the value of
733 734 that config item.
734 735
735 736 With multiple args, print names and values of all config items
736 737 with matching section names."""
737 738
738 739 untrusted = bool(opts.get('untrusted'))
739 740 if values:
740 741 if len([v for v in values if '.' in v]) > 1:
741 742 raise util.Abort(_('only one config item permitted'))
742 743 for section, name, value in ui.walkconfig(untrusted=untrusted):
743 744 sectname = section + '.' + name
744 745 if values:
745 746 for v in values:
746 747 if v == section:
747 748 ui.write('%s=%s\n' % (sectname, value))
748 749 elif v == sectname:
749 750 ui.write(value, '\n')
750 751 else:
751 752 ui.write('%s=%s\n' % (sectname, value))
752 753
753 754 def debugsetparents(ui, repo, rev1, rev2=None):
754 755 """manually set the parents of the current working directory
755 756
756 757 This is useful for writing repository conversion tools, but should
757 758 be used with care.
758 759 """
759 760
760 761 if not rev2:
761 762 rev2 = hex(nullid)
762 763
763 764 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
764 765
765 766 def debugstate(ui, repo):
766 767 """show the contents of the current dirstate"""
767 768 repo.dirstate.read()
768 769 dc = repo.dirstate.map
769 770 keys = dc.keys()
770 771 keys.sort()
771 772 for file_ in keys:
773 if dc[file_][3] == -1:
774 # Pad or slice to locale representation
775 locale_len = len(time.strftime("%x %X", time.localtime(0)))
776 timestr = 'unset'
777 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
778 else:
779 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
772 780 ui.write("%c %3o %10d %s %s\n"
773 781 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
774 time.strftime("%x %X",
775 time.localtime(dc[file_][3])), file_))
782 timestr, file_))
776 783 for f in repo.dirstate.copies():
777 784 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
778 785
779 786 def debugdata(ui, file_, rev):
780 787 """dump the contents of an data file revision"""
781 788 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
782 789 file_[:-2] + ".i", file_, 0)
783 790 try:
784 791 ui.write(r.revision(r.lookup(rev)))
785 792 except KeyError:
786 793 raise util.Abort(_('invalid revision identifier %s') % rev)
787 794
788 795 def debugdate(ui, date, range=None, **opts):
789 796 """parse and display a date"""
790 797 if opts["extended"]:
791 798 d = util.parsedate(date, util.extendeddateformats)
792 799 else:
793 800 d = util.parsedate(date)
794 801 ui.write("internal: %s %s\n" % d)
795 802 ui.write("standard: %s\n" % util.datestr(d))
796 803 if range:
797 804 m = util.matchdate(range)
798 805 ui.write("match: %s\n" % m(d[0]))
799 806
800 807 def debugindex(ui, file_):
801 808 """dump the contents of an index file"""
802 809 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
803 810 ui.write(" rev offset length base linkrev" +
804 811 " nodeid p1 p2\n")
805 812 for i in xrange(r.count()):
806 813 node = r.node(i)
807 814 pp = r.parents(node)
808 815 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
809 816 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
810 817 short(node), short(pp[0]), short(pp[1])))
811 818
812 819 def debugindexdot(ui, file_):
813 820 """dump an index DAG as a .dot file"""
814 821 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
815 822 ui.write("digraph G {\n")
816 823 for i in xrange(r.count()):
817 824 node = r.node(i)
818 825 pp = r.parents(node)
819 826 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
820 827 if pp[1] != nullid:
821 828 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
822 829 ui.write("}\n")
823 830
824 831 def debuginstall(ui):
825 832 '''test Mercurial installation'''
826 833
827 834 def writetemp(contents):
828 835 (fd, name) = tempfile.mkstemp()
829 836 f = os.fdopen(fd, "wb")
830 837 f.write(contents)
831 838 f.close()
832 839 return name
833 840
834 841 problems = 0
835 842
836 843 # encoding
837 844 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
838 845 try:
839 846 util.fromlocal("test")
840 847 except util.Abort, inst:
841 848 ui.write(" %s\n" % inst)
842 849 ui.write(_(" (check that your locale is properly set)\n"))
843 850 problems += 1
844 851
845 852 # compiled modules
846 853 ui.status(_("Checking extensions...\n"))
847 854 try:
848 855 import bdiff, mpatch, base85
849 856 except Exception, inst:
850 857 ui.write(" %s\n" % inst)
851 858 ui.write(_(" One or more extensions could not be found"))
852 859 ui.write(_(" (check that you compiled the extensions)\n"))
853 860 problems += 1
854 861
855 862 # templates
856 863 ui.status(_("Checking templates...\n"))
857 864 try:
858 865 import templater
859 866 t = templater.templater(templater.templatepath("map-cmdline.default"))
860 867 except Exception, inst:
861 868 ui.write(" %s\n" % inst)
862 869 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
863 870 problems += 1
864 871
865 872 # patch
866 873 ui.status(_("Checking patch...\n"))
867 874 path = os.environ.get('PATH', '')
868 875 patcher = util.find_in_path('gpatch', path,
869 876 util.find_in_path('patch', path, None))
870 877 if not patcher:
871 878 ui.write(_(" Can't find patch or gpatch in PATH\n"))
872 879 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
873 880 problems += 1
874 881 else:
875 882 # actually attempt a patch here
876 883 a = "1\n2\n3\n4\n"
877 884 b = "1\n2\n3\ninsert\n4\n"
878 885 d = mdiff.unidiff(a, None, b, None, "a")
879 886 fa = writetemp(a)
880 887 fd = writetemp(d)
881 888 fp = os.popen('%s %s %s' % (patcher, fa, fd))
882 889 files = []
883 890 output = ""
884 891 for line in fp:
885 892 output += line
886 893 if line.startswith('patching file '):
887 894 pf = util.parse_patch_output(line.rstrip())
888 895 files.append(pf)
889 896 if files != [fa]:
890 897 ui.write(_(" unexpected patch output!"))
891 898 ui.write(_(" (you may have an incompatible version of patch)\n"))
892 899 ui.write(output)
893 900 problems += 1
894 901 a = file(fa).read()
895 902 if a != b:
896 903 ui.write(_(" patch test failed!"))
897 904 ui.write(_(" (you may have an incompatible version of patch)\n"))
898 905 problems += 1
899 906 os.unlink(fa)
900 907 os.unlink(fd)
901 908
902 909 # merge helper
903 910 ui.status(_("Checking merge helper...\n"))
904 911 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
905 912 or "hgmerge")
906 913 cmdpath = util.find_in_path(cmd, path)
907 914 if not cmdpath:
908 915 cmdpath = util.find_in_path(cmd.split()[0], path)
909 916 if not cmdpath:
910 917 if cmd == 'hgmerge':
911 918 ui.write(_(" No merge helper set and can't find default"
912 919 " hgmerge script in PATH\n"))
913 920 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
914 921 else:
915 922 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
916 923 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
917 924 problems += 1
918 925 else:
919 926 # actually attempt a patch here
920 927 fa = writetemp("1\n2\n3\n4\n")
921 928 fl = writetemp("1\n2\n3\ninsert\n4\n")
922 929 fr = writetemp("begin\n1\n2\n3\n4\n")
923 930 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
924 931 if r:
925 932 ui.write(_(" got unexpected merge error %d!") % r)
926 933 problems += 1
927 934 m = file(fl).read()
928 935 if m != "begin\n1\n2\n3\ninsert\n4\n":
929 936 ui.write(_(" got unexpected merge results!") % r)
930 937 ui.write(_(" (your merge helper may have the"
931 938 " wrong argument order)\n"))
932 939 ui.write(m)
933 940 os.unlink(fa)
934 941 os.unlink(fl)
935 942 os.unlink(fr)
936 943
937 944 # editor
938 945 ui.status(_("Checking commit editor...\n"))
939 946 editor = (os.environ.get("HGEDITOR") or
940 947 ui.config("ui", "editor") or
941 948 os.environ.get("EDITOR", "vi"))
942 949 cmdpath = util.find_in_path(editor, path)
943 950 if not cmdpath:
944 951 cmdpath = util.find_in_path(editor.split()[0], path)
945 952 if not cmdpath:
946 953 if editor == 'vi':
947 954 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
948 955 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
949 956 else:
950 957 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
951 958 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
952 959 problems += 1
953 960
954 961 # check username
955 962 ui.status(_("Checking username...\n"))
956 963 user = os.environ.get("HGUSER")
957 964 if user is None:
958 965 user = ui.config("ui", "username")
959 966 if user is None:
960 967 user = os.environ.get("EMAIL")
961 968 if not user:
962 969 ui.warn(" ")
963 970 ui.username()
964 971 ui.write(_(" (specify a username in your .hgrc file)\n"))
965 972
966 973 if not problems:
967 974 ui.status(_("No problems detected\n"))
968 975 else:
969 976 ui.write(_("%s problems detected,"
970 977 " please check your install!\n") % problems)
971 978
972 979 return problems
973 980
974 981 def debugrename(ui, repo, file1, *pats, **opts):
975 982 """dump rename information"""
976 983
977 984 ctx = repo.changectx(opts.get('rev', 'tip'))
978 985 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
979 986 ctx.node()):
980 987 m = ctx.filectx(abs).renamed()
981 988 if m:
982 989 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
983 990 else:
984 991 ui.write(_("%s not renamed\n") % rel)
985 992
986 993 def debugwalk(ui, repo, *pats, **opts):
987 994 """show how files match on given patterns"""
988 995 items = list(cmdutil.walk(repo, pats, opts))
989 996 if not items:
990 997 return
991 998 fmt = '%%s %%-%ds %%-%ds %%s' % (
992 999 max([len(abs) for (src, abs, rel, exact) in items]),
993 1000 max([len(rel) for (src, abs, rel, exact) in items]))
994 1001 for src, abs, rel, exact in items:
995 1002 line = fmt % (src, abs, rel, exact and 'exact' or '')
996 1003 ui.write("%s\n" % line.rstrip())
997 1004
998 1005 def diff(ui, repo, *pats, **opts):
999 1006 """diff repository (or selected files)
1000 1007
1001 1008 Show differences between revisions for the specified files.
1002 1009
1003 1010 Differences between files are shown using the unified diff format.
1004 1011
1005 1012 NOTE: diff may generate unexpected results for merges, as it will
1006 1013 default to comparing against the working directory's first parent
1007 1014 changeset if no revisions are specified.
1008 1015
1009 1016 When two revision arguments are given, then changes are shown
1010 1017 between those revisions. If only one revision is specified then
1011 1018 that revision is compared to the working directory, and, when no
1012 1019 revisions are specified, the working directory files are compared
1013 1020 to its parent.
1014 1021
1015 1022 Without the -a option, diff will avoid generating diffs of files
1016 1023 it detects as binary. With -a, diff will generate a diff anyway,
1017 1024 probably with undesirable results.
1018 1025 """
1019 1026 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1020 1027
1021 1028 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1022 1029
1023 1030 patch.diff(repo, node1, node2, fns, match=matchfn,
1024 1031 opts=patch.diffopts(ui, opts))
1025 1032
1026 1033 def export(ui, repo, *changesets, **opts):
1027 1034 """dump the header and diffs for one or more changesets
1028 1035
1029 1036 Print the changeset header and diffs for one or more revisions.
1030 1037
1031 1038 The information shown in the changeset header is: author,
1032 1039 changeset hash, parent(s) and commit comment.
1033 1040
1034 1041 NOTE: export may generate unexpected diff output for merge changesets,
1035 1042 as it will compare the merge changeset against its first parent only.
1036 1043
1037 1044 Output may be to a file, in which case the name of the file is
1038 1045 given using a format string. The formatting rules are as follows:
1039 1046
1040 1047 %% literal "%" character
1041 1048 %H changeset hash (40 bytes of hexadecimal)
1042 1049 %N number of patches being generated
1043 1050 %R changeset revision number
1044 1051 %b basename of the exporting repository
1045 1052 %h short-form changeset hash (12 bytes of hexadecimal)
1046 1053 %n zero-padded sequence number, starting at 1
1047 1054 %r zero-padded changeset revision number
1048 1055
1049 1056 Without the -a option, export will avoid generating diffs of files
1050 1057 it detects as binary. With -a, export will generate a diff anyway,
1051 1058 probably with undesirable results.
1052 1059
1053 1060 With the --switch-parent option, the diff will be against the second
1054 1061 parent. It can be useful to review a merge.
1055 1062 """
1056 1063 if not changesets:
1057 1064 raise util.Abort(_("export requires at least one changeset"))
1058 1065 revs = cmdutil.revrange(repo, changesets)
1059 1066 if len(revs) > 1:
1060 1067 ui.note(_('exporting patches:\n'))
1061 1068 else:
1062 1069 ui.note(_('exporting patch:\n'))
1063 1070 patch.export(repo, revs, template=opts['output'],
1064 1071 switch_parent=opts['switch_parent'],
1065 1072 opts=patch.diffopts(ui, opts))
1066 1073
1067 1074 def grep(ui, repo, pattern, *pats, **opts):
1068 1075 """search for a pattern in specified files and revisions
1069 1076
1070 1077 Search revisions of files for a regular expression.
1071 1078
1072 1079 This command behaves differently than Unix grep. It only accepts
1073 1080 Python/Perl regexps. It searches repository history, not the
1074 1081 working directory. It always prints the revision number in which
1075 1082 a match appears.
1076 1083
1077 1084 By default, grep only prints output for the first revision of a
1078 1085 file in which it finds a match. To get it to print every revision
1079 1086 that contains a change in match status ("-" for a match that
1080 1087 becomes a non-match, or "+" for a non-match that becomes a match),
1081 1088 use the --all flag.
1082 1089 """
1083 1090 reflags = 0
1084 1091 if opts['ignore_case']:
1085 1092 reflags |= re.I
1086 1093 regexp = re.compile(pattern, reflags)
1087 1094 sep, eol = ':', '\n'
1088 1095 if opts['print0']:
1089 1096 sep = eol = '\0'
1090 1097
1091 1098 fcache = {}
1092 1099 def getfile(fn):
1093 1100 if fn not in fcache:
1094 1101 fcache[fn] = repo.file(fn)
1095 1102 return fcache[fn]
1096 1103
1097 1104 def matchlines(body):
1098 1105 begin = 0
1099 1106 linenum = 0
1100 1107 while True:
1101 1108 match = regexp.search(body, begin)
1102 1109 if not match:
1103 1110 break
1104 1111 mstart, mend = match.span()
1105 1112 linenum += body.count('\n', begin, mstart) + 1
1106 1113 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1107 1114 lend = body.find('\n', mend)
1108 1115 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1109 1116 begin = lend + 1
1110 1117
1111 1118 class linestate(object):
1112 1119 def __init__(self, line, linenum, colstart, colend):
1113 1120 self.line = line
1114 1121 self.linenum = linenum
1115 1122 self.colstart = colstart
1116 1123 self.colend = colend
1117 1124
1118 1125 def __eq__(self, other):
1119 1126 return self.line == other.line
1120 1127
1121 1128 matches = {}
1122 1129 copies = {}
1123 1130 def grepbody(fn, rev, body):
1124 1131 matches[rev].setdefault(fn, [])
1125 1132 m = matches[rev][fn]
1126 1133 for lnum, cstart, cend, line in matchlines(body):
1127 1134 s = linestate(line, lnum, cstart, cend)
1128 1135 m.append(s)
1129 1136
1130 1137 def difflinestates(a, b):
1131 1138 sm = difflib.SequenceMatcher(None, a, b)
1132 1139 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1133 1140 if tag == 'insert':
1134 1141 for i in xrange(blo, bhi):
1135 1142 yield ('+', b[i])
1136 1143 elif tag == 'delete':
1137 1144 for i in xrange(alo, ahi):
1138 1145 yield ('-', a[i])
1139 1146 elif tag == 'replace':
1140 1147 for i in xrange(alo, ahi):
1141 1148 yield ('-', a[i])
1142 1149 for i in xrange(blo, bhi):
1143 1150 yield ('+', b[i])
1144 1151
1145 1152 prev = {}
1146 1153 def display(fn, rev, states, prevstates):
1147 1154 found = False
1148 1155 filerevmatches = {}
1149 1156 r = prev.get(fn, -1)
1150 1157 if opts['all']:
1151 1158 iter = difflinestates(states, prevstates)
1152 1159 else:
1153 1160 iter = [('', l) for l in prevstates]
1154 1161 for change, l in iter:
1155 1162 cols = [fn, str(r)]
1156 1163 if opts['line_number']:
1157 1164 cols.append(str(l.linenum))
1158 1165 if opts['all']:
1159 1166 cols.append(change)
1160 1167 if opts['user']:
1161 1168 cols.append(ui.shortuser(get(r)[1]))
1162 1169 if opts['files_with_matches']:
1163 1170 c = (fn, r)
1164 1171 if c in filerevmatches:
1165 1172 continue
1166 1173 filerevmatches[c] = 1
1167 1174 else:
1168 1175 cols.append(l.line)
1169 1176 ui.write(sep.join(cols), eol)
1170 1177 found = True
1171 1178 return found
1172 1179
1173 1180 fstate = {}
1174 1181 skip = {}
1175 1182 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1176 1183 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1177 1184 found = False
1178 1185 follow = opts.get('follow')
1179 1186 for st, rev, fns in changeiter:
1180 1187 if st == 'window':
1181 1188 matches.clear()
1182 1189 elif st == 'add':
1183 1190 mf = repo.changectx(rev).manifest()
1184 1191 matches[rev] = {}
1185 1192 for fn in fns:
1186 1193 if fn in skip:
1187 1194 continue
1188 1195 fstate.setdefault(fn, {})
1189 1196 try:
1190 1197 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1191 1198 if follow:
1192 1199 copied = getfile(fn).renamed(mf[fn])
1193 1200 if copied:
1194 1201 copies.setdefault(rev, {})[fn] = copied[0]
1195 1202 except KeyError:
1196 1203 pass
1197 1204 elif st == 'iter':
1198 1205 states = matches[rev].items()
1199 1206 states.sort()
1200 1207 for fn, m in states:
1201 1208 copy = copies.get(rev, {}).get(fn)
1202 1209 if fn in skip:
1203 1210 if copy:
1204 1211 skip[copy] = True
1205 1212 continue
1206 1213 if fn in prev or fstate[fn]:
1207 1214 r = display(fn, rev, m, fstate[fn])
1208 1215 found = found or r
1209 1216 if r and not opts['all']:
1210 1217 skip[fn] = True
1211 1218 if copy:
1212 1219 skip[copy] = True
1213 1220 fstate[fn] = m
1214 1221 if copy:
1215 1222 fstate[copy] = m
1216 1223 prev[fn] = rev
1217 1224
1218 1225 fstate = fstate.items()
1219 1226 fstate.sort()
1220 1227 for fn, state in fstate:
1221 1228 if fn in skip:
1222 1229 continue
1223 1230 if fn not in copies.get(prev[fn], {}):
1224 1231 found = display(fn, rev, {}, state) or found
1225 1232 return (not found and 1) or 0
1226 1233
1227 1234 def heads(ui, repo, **opts):
1228 1235 """show current repository heads
1229 1236
1230 1237 Show all repository head changesets.
1231 1238
1232 1239 Repository "heads" are changesets that don't have children
1233 1240 changesets. They are where development generally takes place and
1234 1241 are the usual targets for update and merge operations.
1235 1242 """
1236 1243 if opts['rev']:
1237 1244 heads = repo.heads(repo.lookup(opts['rev']))
1238 1245 else:
1239 1246 heads = repo.heads()
1240 1247 displayer = cmdutil.show_changeset(ui, repo, opts)
1241 1248 for n in heads:
1242 1249 displayer.show(changenode=n)
1243 1250
1244 1251 def help_(ui, name=None, with_version=False):
1245 1252 """show help for a command, extension, or list of commands
1246 1253
1247 1254 With no arguments, print a list of commands and short help.
1248 1255
1249 1256 Given a command name, print help for that command.
1250 1257
1251 1258 Given an extension name, print help for that extension, and the
1252 1259 commands it provides."""
1253 1260 option_lists = []
1254 1261
1255 1262 def helpcmd(name):
1256 1263 if with_version:
1257 1264 version_(ui)
1258 1265 ui.write('\n')
1259 1266 aliases, i = findcmd(ui, name)
1260 1267 # synopsis
1261 1268 ui.write("%s\n\n" % i[2])
1262 1269
1263 1270 # description
1264 1271 doc = i[0].__doc__
1265 1272 if not doc:
1266 1273 doc = _("(No help text available)")
1267 1274 if ui.quiet:
1268 1275 doc = doc.splitlines(0)[0]
1269 1276 ui.write("%s\n" % doc.rstrip())
1270 1277
1271 1278 if not ui.quiet:
1272 1279 # aliases
1273 1280 if len(aliases) > 1:
1274 1281 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1275 1282
1276 1283 # options
1277 1284 if i[1]:
1278 1285 option_lists.append(("options", i[1]))
1279 1286
1280 1287 def helplist(select=None):
1281 1288 h = {}
1282 1289 cmds = {}
1283 1290 for c, e in table.items():
1284 1291 f = c.split("|", 1)[0]
1285 1292 if select and not select(f):
1286 1293 continue
1287 1294 if name == "shortlist" and not f.startswith("^"):
1288 1295 continue
1289 1296 f = f.lstrip("^")
1290 1297 if not ui.debugflag and f.startswith("debug"):
1291 1298 continue
1292 1299 doc = e[0].__doc__
1293 1300 if not doc:
1294 1301 doc = _("(No help text available)")
1295 1302 h[f] = doc.splitlines(0)[0].rstrip()
1296 1303 cmds[f] = c.lstrip("^")
1297 1304
1298 1305 fns = h.keys()
1299 1306 fns.sort()
1300 1307 m = max(map(len, fns))
1301 1308 for f in fns:
1302 1309 if ui.verbose:
1303 1310 commands = cmds[f].replace("|",", ")
1304 1311 ui.write(" %s:\n %s\n"%(commands, h[f]))
1305 1312 else:
1306 1313 ui.write(' %-*s %s\n' % (m, f, h[f]))
1307 1314
1308 1315 def helptopic(name):
1309 1316 v = None
1310 1317 for i in help.helptable:
1311 1318 l = i.split('|')
1312 1319 if name in l:
1313 1320 v = i
1314 1321 header = l[-1]
1315 1322 if not v:
1316 1323 raise UnknownCommand(name)
1317 1324
1318 1325 # description
1319 1326 doc = help.helptable[v]
1320 1327 if not doc:
1321 1328 doc = _("(No help text available)")
1322 1329 if callable(doc):
1323 1330 doc = doc()
1324 1331
1325 1332 ui.write("%s\n" % header)
1326 1333 ui.write("%s\n" % doc.rstrip())
1327 1334
1328 1335 def helpext(name):
1329 1336 try:
1330 1337 mod = findext(name)
1331 1338 except KeyError:
1332 1339 raise UnknownCommand(name)
1333 1340
1334 1341 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1335 1342 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1336 1343 for d in doc[1:]:
1337 1344 ui.write(d, '\n')
1338 1345
1339 1346 ui.status('\n')
1340 1347
1341 1348 try:
1342 1349 ct = mod.cmdtable
1343 1350 except AttributeError:
1344 1351 ui.status(_('no commands defined\n'))
1345 1352 return
1346 1353
1347 1354 if ui.verbose:
1348 1355 ui.status(_('list of commands:\n\n'))
1349 1356 else:
1350 1357 ui.status(_('list of commands (use "hg help -v %s" '
1351 1358 'to show aliases and global options):\n\n') % name)
1352 1359
1353 1360 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1354 1361 helplist(modcmds.has_key)
1355 1362
1356 1363 if name and name != 'shortlist':
1357 1364 i = None
1358 1365 for f in (helpcmd, helptopic, helpext):
1359 1366 try:
1360 1367 f(name)
1361 1368 i = None
1362 1369 break
1363 1370 except UnknownCommand, inst:
1364 1371 i = inst
1365 1372 if i:
1366 1373 raise i
1367 1374
1368 1375 else:
1369 1376 # program name
1370 1377 if ui.verbose or with_version:
1371 1378 version_(ui)
1372 1379 else:
1373 1380 ui.status(_("Mercurial Distributed SCM\n"))
1374 1381 ui.status('\n')
1375 1382
1376 1383 # list of commands
1377 1384 if name == "shortlist":
1378 1385 ui.status(_('basic commands (use "hg help" '
1379 1386 'for the full list or option "-v" for details):\n\n'))
1380 1387 elif ui.verbose:
1381 1388 ui.status(_('list of commands:\n\n'))
1382 1389 else:
1383 1390 ui.status(_('list of commands (use "hg help -v" '
1384 1391 'to show aliases and global options):\n\n'))
1385 1392
1386 1393 helplist()
1387 1394
1388 1395 # global options
1389 1396 if ui.verbose:
1390 1397 option_lists.append(("global options", globalopts))
1391 1398
1392 1399 # list all option lists
1393 1400 opt_output = []
1394 1401 for title, options in option_lists:
1395 1402 opt_output.append(("\n%s:\n" % title, None))
1396 1403 for shortopt, longopt, default, desc in options:
1397 1404 if "DEPRECATED" in desc and not ui.verbose: continue
1398 1405 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1399 1406 longopt and " --%s" % longopt),
1400 1407 "%s%s" % (desc,
1401 1408 default
1402 1409 and _(" (default: %s)") % default
1403 1410 or "")))
1404 1411
1405 1412 if opt_output:
1406 1413 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1407 1414 for first, second in opt_output:
1408 1415 if second:
1409 1416 ui.write(" %-*s %s\n" % (opts_len, first, second))
1410 1417 else:
1411 1418 ui.write("%s\n" % first)
1412 1419
1413 1420 def identify(ui, repo):
1414 1421 """print information about the working copy
1415 1422
1416 1423 Print a short summary of the current state of the repo.
1417 1424
1418 1425 This summary identifies the repository state using one or two parent
1419 1426 hash identifiers, followed by a "+" if there are uncommitted changes
1420 1427 in the working directory, followed by a list of tags for this revision.
1421 1428 """
1422 1429 parents = [p for p in repo.dirstate.parents() if p != nullid]
1423 1430 if not parents:
1424 1431 ui.write(_("unknown\n"))
1425 1432 return
1426 1433
1427 1434 hexfunc = ui.debugflag and hex or short
1428 1435 modified, added, removed, deleted = repo.status()[:4]
1429 1436 output = ["%s%s" %
1430 1437 ('+'.join([hexfunc(parent) for parent in parents]),
1431 1438 (modified or added or removed or deleted) and "+" or "")]
1432 1439
1433 1440 if not ui.quiet:
1434 1441
1435 1442 branch = util.tolocal(repo.workingctx().branch())
1436 1443 if branch:
1437 1444 output.append("(%s)" % branch)
1438 1445
1439 1446 # multiple tags for a single parent separated by '/'
1440 1447 parenttags = ['/'.join(tags)
1441 1448 for tags in map(repo.nodetags, parents) if tags]
1442 1449 # tags for multiple parents separated by ' + '
1443 1450 if parenttags:
1444 1451 output.append(' + '.join(parenttags))
1445 1452
1446 1453 ui.write("%s\n" % ' '.join(output))
1447 1454
1448 1455 def import_(ui, repo, patch1, *patches, **opts):
1449 1456 """import an ordered set of patches
1450 1457
1451 1458 Import a list of patches and commit them individually.
1452 1459
1453 1460 If there are outstanding changes in the working directory, import
1454 1461 will abort unless given the -f flag.
1455 1462
1456 1463 You can import a patch straight from a mail message. Even patches
1457 1464 as attachments work (body part must be type text/plain or
1458 1465 text/x-patch to be used). From and Subject headers of email
1459 1466 message are used as default committer and commit message. All
1460 1467 text/plain body parts before first diff are added to commit
1461 1468 message.
1462 1469
1463 1470 If imported patch was generated by hg export, user and description
1464 1471 from patch override values from message headers and body. Values
1465 1472 given on command line with -m and -u override these.
1466 1473
1467 1474 To read a patch from standard input, use patch name "-".
1468 1475 """
1469 1476 patches = (patch1,) + patches
1470 1477
1471 1478 if not opts['force']:
1472 1479 bail_if_changed(repo)
1473 1480
1474 1481 d = opts["base"]
1475 1482 strip = opts["strip"]
1476 1483
1477 1484 wlock = repo.wlock()
1478 1485 lock = repo.lock()
1479 1486
1480 1487 for p in patches:
1481 1488 pf = os.path.join(d, p)
1482 1489
1483 1490 if pf == '-':
1484 1491 ui.status(_("applying patch from stdin\n"))
1485 1492 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1486 1493 else:
1487 1494 ui.status(_("applying %s\n") % p)
1488 1495 tmpname, message, user, date = patch.extract(ui, file(pf))
1489 1496
1490 1497 if tmpname is None:
1491 1498 raise util.Abort(_('no diffs found'))
1492 1499
1493 1500 try:
1494 1501 cmdline_message = logmessage(opts)
1495 1502 if cmdline_message:
1496 1503 # pickup the cmdline msg
1497 1504 message = cmdline_message
1498 1505 elif message:
1499 1506 # pickup the patch msg
1500 1507 message = message.strip()
1501 1508 else:
1502 1509 # launch the editor
1503 1510 message = None
1504 1511 ui.debug(_('message:\n%s\n') % message)
1505 1512
1506 1513 files = {}
1507 1514 try:
1508 1515 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1509 1516 files=files)
1510 1517 finally:
1511 1518 files = patch.updatedir(ui, repo, files, wlock=wlock)
1512 1519 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1513 1520 finally:
1514 1521 os.unlink(tmpname)
1515 1522
1516 1523 def incoming(ui, repo, source="default", **opts):
1517 1524 """show new changesets found in source
1518 1525
1519 1526 Show new changesets found in the specified path/URL or the default
1520 1527 pull location. These are the changesets that would be pulled if a pull
1521 1528 was requested.
1522 1529
1523 1530 For remote repository, using --bundle avoids downloading the changesets
1524 1531 twice if the incoming is followed by a pull.
1525 1532
1526 1533 See pull for valid source format details.
1527 1534 """
1528 1535 source = ui.expandpath(source)
1529 1536 setremoteconfig(ui, opts)
1530 1537
1531 1538 other = hg.repository(ui, source)
1532 1539 incoming = repo.findincoming(other, force=opts["force"])
1533 1540 if not incoming:
1534 1541 try:
1535 1542 os.unlink(opts["bundle"])
1536 1543 except:
1537 1544 pass
1538 1545 ui.status(_("no changes found\n"))
1539 1546 return 1
1540 1547
1541 1548 cleanup = None
1542 1549 try:
1543 1550 fname = opts["bundle"]
1544 1551 if fname or not other.local():
1545 1552 # create a bundle (uncompressed if other repo is not local)
1546 1553 cg = other.changegroup(incoming, "incoming")
1547 1554 bundletype = other.local() and "HG10BZ" or "HG10UN"
1548 1555 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1549 1556 # keep written bundle?
1550 1557 if opts["bundle"]:
1551 1558 cleanup = None
1552 1559 if not other.local():
1553 1560 # use the created uncompressed bundlerepo
1554 1561 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1555 1562
1556 1563 revs = None
1557 1564 if opts['rev']:
1558 1565 revs = [other.lookup(rev) for rev in opts['rev']]
1559 1566 o = other.changelog.nodesbetween(incoming, revs)[0]
1560 1567 if opts['newest_first']:
1561 1568 o.reverse()
1562 1569 displayer = cmdutil.show_changeset(ui, other, opts)
1563 1570 for n in o:
1564 1571 parents = [p for p in other.changelog.parents(n) if p != nullid]
1565 1572 if opts['no_merges'] and len(parents) == 2:
1566 1573 continue
1567 1574 displayer.show(changenode=n)
1568 1575 finally:
1569 1576 if hasattr(other, 'close'):
1570 1577 other.close()
1571 1578 if cleanup:
1572 1579 os.unlink(cleanup)
1573 1580
1574 1581 def init(ui, dest=".", **opts):
1575 1582 """create a new repository in the given directory
1576 1583
1577 1584 Initialize a new repository in the given directory. If the given
1578 1585 directory does not exist, it is created.
1579 1586
1580 1587 If no directory is given, the current directory is used.
1581 1588
1582 1589 It is possible to specify an ssh:// URL as the destination.
1583 1590 Look at the help text for the pull command for important details
1584 1591 about ssh:// URLs.
1585 1592 """
1586 1593 setremoteconfig(ui, opts)
1587 1594 hg.repository(ui, dest, create=1)
1588 1595
1589 1596 def locate(ui, repo, *pats, **opts):
1590 1597 """locate files matching specific patterns
1591 1598
1592 1599 Print all files under Mercurial control whose names match the
1593 1600 given patterns.
1594 1601
1595 1602 This command searches the current directory and its
1596 1603 subdirectories. To search an entire repository, move to the root
1597 1604 of the repository.
1598 1605
1599 1606 If no patterns are given to match, this command prints all file
1600 1607 names.
1601 1608
1602 1609 If you want to feed the output of this command into the "xargs"
1603 1610 command, use the "-0" option to both this command and "xargs".
1604 1611 This will avoid the problem of "xargs" treating single filenames
1605 1612 that contain white space as multiple filenames.
1606 1613 """
1607 1614 end = opts['print0'] and '\0' or '\n'
1608 1615 rev = opts['rev']
1609 1616 if rev:
1610 1617 node = repo.lookup(rev)
1611 1618 else:
1612 1619 node = None
1613 1620
1614 1621 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1615 1622 head='(?:.*/|)'):
1616 1623 if not node and repo.dirstate.state(abs) == '?':
1617 1624 continue
1618 1625 if opts['fullpath']:
1619 1626 ui.write(os.path.join(repo.root, abs), end)
1620 1627 else:
1621 1628 ui.write(((pats and rel) or abs), end)
1622 1629
1623 1630 def log(ui, repo, *pats, **opts):
1624 1631 """show revision history of entire repository or files
1625 1632
1626 1633 Print the revision history of the specified files or the entire
1627 1634 project.
1628 1635
1629 1636 File history is shown without following rename or copy history of
1630 1637 files. Use -f/--follow with a file name to follow history across
1631 1638 renames and copies. --follow without a file name will only show
1632 1639 ancestors or descendants of the starting revision. --follow-first
1633 1640 only follows the first parent of merge revisions.
1634 1641
1635 1642 If no revision range is specified, the default is tip:0 unless
1636 1643 --follow is set, in which case the working directory parent is
1637 1644 used as the starting revision.
1638 1645
1639 1646 By default this command outputs: changeset id and hash, tags,
1640 1647 non-trivial parents, user, date and time, and a summary for each
1641 1648 commit. When the -v/--verbose switch is used, the list of changed
1642 1649 files and full commit message is shown.
1643 1650
1644 1651 NOTE: log -p may generate unexpected diff output for merge
1645 1652 changesets, as it will compare the merge changeset against its
1646 1653 first parent only. Also, the files: list will only reflect files
1647 1654 that are different from BOTH parents.
1648 1655
1649 1656 """
1650 1657
1651 1658 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1652 1659 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1653 1660
1654 1661 if opts['limit']:
1655 1662 try:
1656 1663 limit = int(opts['limit'])
1657 1664 except ValueError:
1658 1665 raise util.Abort(_('limit must be a positive integer'))
1659 1666 if limit <= 0: raise util.Abort(_('limit must be positive'))
1660 1667 else:
1661 1668 limit = sys.maxint
1662 1669 count = 0
1663 1670
1664 1671 if opts['copies'] and opts['rev']:
1665 1672 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1666 1673 else:
1667 1674 endrev = repo.changelog.count()
1668 1675 rcache = {}
1669 1676 ncache = {}
1670 1677 dcache = []
1671 1678 def getrenamed(fn, rev, man):
1672 1679 '''looks up all renames for a file (up to endrev) the first
1673 1680 time the file is given. It indexes on the changerev and only
1674 1681 parses the manifest if linkrev != changerev.
1675 1682 Returns rename info for fn at changerev rev.'''
1676 1683 if fn not in rcache:
1677 1684 rcache[fn] = {}
1678 1685 ncache[fn] = {}
1679 1686 fl = repo.file(fn)
1680 1687 for i in xrange(fl.count()):
1681 1688 node = fl.node(i)
1682 1689 lr = fl.linkrev(node)
1683 1690 renamed = fl.renamed(node)
1684 1691 rcache[fn][lr] = renamed
1685 1692 if renamed:
1686 1693 ncache[fn][node] = renamed
1687 1694 if lr >= endrev:
1688 1695 break
1689 1696 if rev in rcache[fn]:
1690 1697 return rcache[fn][rev]
1691 1698 mr = repo.manifest.rev(man)
1692 1699 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1693 1700 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1694 1701 if not dcache or dcache[0] != man:
1695 1702 dcache[:] = [man, repo.manifest.readdelta(man)]
1696 1703 if fn in dcache[1]:
1697 1704 return ncache[fn].get(dcache[1][fn])
1698 1705 return None
1699 1706
1700 1707 df = False
1701 1708 if opts["date"]:
1702 1709 df = util.matchdate(opts["date"])
1703 1710
1704 1711 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1705 1712 for st, rev, fns in changeiter:
1706 1713 if st == 'add':
1707 1714 changenode = repo.changelog.node(rev)
1708 1715 parents = [p for p in repo.changelog.parentrevs(rev)
1709 1716 if p != nullrev]
1710 1717 if opts['no_merges'] and len(parents) == 2:
1711 1718 continue
1712 1719 if opts['only_merges'] and len(parents) != 2:
1713 1720 continue
1714 1721
1715 1722 if df:
1716 1723 changes = get(rev)
1717 1724 if not df(changes[2][0]):
1718 1725 continue
1719 1726
1720 1727 if opts['keyword']:
1721 1728 changes = get(rev)
1722 1729 miss = 0
1723 1730 for k in [kw.lower() for kw in opts['keyword']]:
1724 1731 if not (k in changes[1].lower() or
1725 1732 k in changes[4].lower() or
1726 1733 k in " ".join(changes[3][:20]).lower()):
1727 1734 miss = 1
1728 1735 break
1729 1736 if miss:
1730 1737 continue
1731 1738
1732 1739 copies = []
1733 1740 if opts.get('copies') and rev:
1734 1741 mf = get(rev)[0]
1735 1742 for fn in get(rev)[3]:
1736 1743 rename = getrenamed(fn, rev, mf)
1737 1744 if rename:
1738 1745 copies.append((fn, rename[0]))
1739 1746 displayer.show(rev, changenode, copies=copies)
1740 1747 elif st == 'iter':
1741 1748 if count == limit: break
1742 1749 if displayer.flush(rev):
1743 1750 count += 1
1744 1751
1745 1752 def manifest(ui, repo, rev=None):
1746 1753 """output the current or given revision of the project manifest
1747 1754
1748 1755 Print a list of version controlled files for the given revision.
1749 1756 If no revision is given, the parent of the working directory is used,
1750 1757 or tip if no revision is checked out.
1751 1758
1752 1759 The manifest is the list of files being version controlled. If no revision
1753 1760 is given then the first parent of the working directory is used.
1754 1761
1755 1762 With -v flag, print file permissions. With --debug flag, print
1756 1763 file revision hashes.
1757 1764 """
1758 1765
1759 1766 m = repo.changectx(rev).manifest()
1760 1767 files = m.keys()
1761 1768 files.sort()
1762 1769
1763 1770 for f in files:
1764 1771 if ui.debugflag:
1765 1772 ui.write("%40s " % hex(m[f]))
1766 1773 if ui.verbose:
1767 1774 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1768 1775 ui.write("%s\n" % f)
1769 1776
1770 1777 def merge(ui, repo, node=None, force=None):
1771 1778 """merge working directory with another revision
1772 1779
1773 1780 Merge the contents of the current working directory and the
1774 1781 requested revision. Files that changed between either parent are
1775 1782 marked as changed for the next commit and a commit must be
1776 1783 performed before any further updates are allowed.
1777 1784
1778 1785 If no revision is specified, the working directory's parent is a
1779 1786 head revision, and the repository contains exactly one other head,
1780 1787 the other head is merged with by default. Otherwise, an explicit
1781 1788 revision to merge with must be provided.
1782 1789 """
1783 1790
1784 1791 if not node:
1785 1792 heads = repo.heads()
1786 1793 if len(heads) > 2:
1787 1794 raise util.Abort(_('repo has %d heads - '
1788 1795 'please merge with an explicit rev') %
1789 1796 len(heads))
1790 1797 if len(heads) == 1:
1791 1798 raise util.Abort(_('there is nothing to merge - '
1792 1799 'use "hg update" instead'))
1793 1800 parent = repo.dirstate.parents()[0]
1794 1801 if parent not in heads:
1795 1802 raise util.Abort(_('working dir not at a head rev - '
1796 1803 'use "hg update" or merge with an explicit rev'))
1797 1804 node = parent == heads[0] and heads[-1] or heads[0]
1798 1805 return hg.merge(repo, node, force=force)
1799 1806
1800 1807 def outgoing(ui, repo, dest=None, **opts):
1801 1808 """show changesets not found in destination
1802 1809
1803 1810 Show changesets not found in the specified destination repository or
1804 1811 the default push location. These are the changesets that would be pushed
1805 1812 if a push was requested.
1806 1813
1807 1814 See pull for valid destination format details.
1808 1815 """
1809 1816 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1810 1817 setremoteconfig(ui, opts)
1811 1818 revs = None
1812 1819 if opts['rev']:
1813 1820 revs = [repo.lookup(rev) for rev in opts['rev']]
1814 1821
1815 1822 other = hg.repository(ui, dest)
1816 1823 o = repo.findoutgoing(other, force=opts['force'])
1817 1824 if not o:
1818 1825 ui.status(_("no changes found\n"))
1819 1826 return 1
1820 1827 o = repo.changelog.nodesbetween(o, revs)[0]
1821 1828 if opts['newest_first']:
1822 1829 o.reverse()
1823 1830 displayer = cmdutil.show_changeset(ui, repo, opts)
1824 1831 for n in o:
1825 1832 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1826 1833 if opts['no_merges'] and len(parents) == 2:
1827 1834 continue
1828 1835 displayer.show(changenode=n)
1829 1836
1830 1837 def parents(ui, repo, file_=None, **opts):
1831 1838 """show the parents of the working dir or revision
1832 1839
1833 1840 Print the working directory's parent revisions.
1834 1841 """
1835 1842 rev = opts.get('rev')
1836 1843 if rev:
1837 1844 if file_:
1838 1845 ctx = repo.filectx(file_, changeid=rev)
1839 1846 else:
1840 1847 ctx = repo.changectx(rev)
1841 1848 p = [cp.node() for cp in ctx.parents()]
1842 1849 else:
1843 1850 p = repo.dirstate.parents()
1844 1851
1845 1852 displayer = cmdutil.show_changeset(ui, repo, opts)
1846 1853 for n in p:
1847 1854 if n != nullid:
1848 1855 displayer.show(changenode=n)
1849 1856
1850 1857 def paths(ui, repo, search=None):
1851 1858 """show definition of symbolic path names
1852 1859
1853 1860 Show definition of symbolic path name NAME. If no name is given, show
1854 1861 definition of available names.
1855 1862
1856 1863 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1857 1864 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1858 1865 """
1859 1866 if search:
1860 1867 for name, path in ui.configitems("paths"):
1861 1868 if name == search:
1862 1869 ui.write("%s\n" % path)
1863 1870 return
1864 1871 ui.warn(_("not found!\n"))
1865 1872 return 1
1866 1873 else:
1867 1874 for name, path in ui.configitems("paths"):
1868 1875 ui.write("%s = %s\n" % (name, path))
1869 1876
1870 1877 def postincoming(ui, repo, modheads, optupdate):
1871 1878 if modheads == 0:
1872 1879 return
1873 1880 if optupdate:
1874 1881 if modheads == 1:
1875 1882 return hg.update(repo, repo.changelog.tip()) # update
1876 1883 else:
1877 1884 ui.status(_("not updating, since new heads added\n"))
1878 1885 if modheads > 1:
1879 1886 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1880 1887 else:
1881 1888 ui.status(_("(run 'hg update' to get a working copy)\n"))
1882 1889
1883 1890 def pull(ui, repo, source="default", **opts):
1884 1891 """pull changes from the specified source
1885 1892
1886 1893 Pull changes from a remote repository to a local one.
1887 1894
1888 1895 This finds all changes from the repository at the specified path
1889 1896 or URL and adds them to the local repository. By default, this
1890 1897 does not update the copy of the project in the working directory.
1891 1898
1892 1899 Valid URLs are of the form:
1893 1900
1894 1901 local/filesystem/path (or file://local/filesystem/path)
1895 1902 http://[user@]host[:port]/[path]
1896 1903 https://[user@]host[:port]/[path]
1897 1904 ssh://[user@]host[:port]/[path]
1898 1905 static-http://host[:port]/[path]
1899 1906
1900 1907 Paths in the local filesystem can either point to Mercurial
1901 1908 repositories or to bundle files (as created by 'hg bundle' or
1902 1909 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1903 1910 allows access to a Mercurial repository where you simply use a web
1904 1911 server to publish the .hg directory as static content.
1905 1912
1906 1913 Some notes about using SSH with Mercurial:
1907 1914 - SSH requires an accessible shell account on the destination machine
1908 1915 and a copy of hg in the remote path or specified with as remotecmd.
1909 1916 - path is relative to the remote user's home directory by default.
1910 1917 Use an extra slash at the start of a path to specify an absolute path:
1911 1918 ssh://example.com//tmp/repository
1912 1919 - Mercurial doesn't use its own compression via SSH; the right thing
1913 1920 to do is to configure it in your ~/.ssh/config, e.g.:
1914 1921 Host *.mylocalnetwork.example.com
1915 1922 Compression no
1916 1923 Host *
1917 1924 Compression yes
1918 1925 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1919 1926 with the --ssh command line option.
1920 1927 """
1921 1928 source = ui.expandpath(source)
1922 1929 setremoteconfig(ui, opts)
1923 1930
1924 1931 other = hg.repository(ui, source)
1925 1932 ui.status(_('pulling from %s\n') % (source))
1926 1933 revs = None
1927 1934 if opts['rev']:
1928 1935 if 'lookup' in other.capabilities:
1929 1936 revs = [other.lookup(rev) for rev in opts['rev']]
1930 1937 else:
1931 1938 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1932 1939 raise util.Abort(error)
1933 1940 modheads = repo.pull(other, heads=revs, force=opts['force'])
1934 1941 return postincoming(ui, repo, modheads, opts['update'])
1935 1942
1936 1943 def push(ui, repo, dest=None, **opts):
1937 1944 """push changes to the specified destination
1938 1945
1939 1946 Push changes from the local repository to the given destination.
1940 1947
1941 1948 This is the symmetrical operation for pull. It helps to move
1942 1949 changes from the current repository to a different one. If the
1943 1950 destination is local this is identical to a pull in that directory
1944 1951 from the current one.
1945 1952
1946 1953 By default, push will refuse to run if it detects the result would
1947 1954 increase the number of remote heads. This generally indicates the
1948 1955 the client has forgotten to sync and merge before pushing.
1949 1956
1950 1957 Valid URLs are of the form:
1951 1958
1952 1959 local/filesystem/path (or file://local/filesystem/path)
1953 1960 ssh://[user@]host[:port]/[path]
1954 1961 http://[user@]host[:port]/[path]
1955 1962 https://[user@]host[:port]/[path]
1956 1963
1957 1964 Look at the help text for the pull command for important details
1958 1965 about ssh:// URLs.
1959 1966
1960 1967 Pushing to http:// and https:// URLs is only possible, if this
1961 1968 feature is explicitly enabled on the remote Mercurial server.
1962 1969 """
1963 1970 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1964 1971 setremoteconfig(ui, opts)
1965 1972
1966 1973 other = hg.repository(ui, dest)
1967 1974 ui.status('pushing to %s\n' % (dest))
1968 1975 revs = None
1969 1976 if opts['rev']:
1970 1977 revs = [repo.lookup(rev) for rev in opts['rev']]
1971 1978 r = repo.push(other, opts['force'], revs=revs)
1972 1979 return r == 0
1973 1980
1974 1981 def rawcommit(ui, repo, *pats, **opts):
1975 1982 """raw commit interface (DEPRECATED)
1976 1983
1977 1984 (DEPRECATED)
1978 1985 Lowlevel commit, for use in helper scripts.
1979 1986
1980 1987 This command is not intended to be used by normal users, as it is
1981 1988 primarily useful for importing from other SCMs.
1982 1989
1983 1990 This command is now deprecated and will be removed in a future
1984 1991 release, please use debugsetparents and commit instead.
1985 1992 """
1986 1993
1987 1994 ui.warn(_("(the rawcommit command is deprecated)\n"))
1988 1995
1989 1996 message = logmessage(opts)
1990 1997
1991 1998 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1992 1999 if opts['files']:
1993 2000 files += open(opts['files']).read().splitlines()
1994 2001
1995 2002 parents = [repo.lookup(p) for p in opts['parent']]
1996 2003
1997 2004 try:
1998 2005 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1999 2006 except ValueError, inst:
2000 2007 raise util.Abort(str(inst))
2001 2008
2002 2009 def recover(ui, repo):
2003 2010 """roll back an interrupted transaction
2004 2011
2005 2012 Recover from an interrupted commit or pull.
2006 2013
2007 2014 This command tries to fix the repository status after an interrupted
2008 2015 operation. It should only be necessary when Mercurial suggests it.
2009 2016 """
2010 2017 if repo.recover():
2011 2018 return hg.verify(repo)
2012 2019 return 1
2013 2020
2014 2021 def remove(ui, repo, *pats, **opts):
2015 2022 """remove the specified files on the next commit
2016 2023
2017 2024 Schedule the indicated files for removal from the repository.
2018 2025
2019 2026 This only removes files from the current branch, not from the
2020 2027 entire project history. If the files still exist in the working
2021 2028 directory, they will be deleted from it. If invoked with --after,
2022 2029 files that have been manually deleted are marked as removed.
2023 2030
2024 2031 This command schedules the files to be removed at the next commit.
2025 2032 To undo a remove before that, see hg revert.
2026 2033
2027 2034 Modified files and added files are not removed by default. To
2028 2035 remove them, use the -f/--force option.
2029 2036 """
2030 2037 names = []
2031 2038 if not opts['after'] and not pats:
2032 2039 raise util.Abort(_('no files specified'))
2033 2040 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2034 2041 exact = dict.fromkeys(files)
2035 2042 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2036 2043 modified, added, removed, deleted, unknown = mardu
2037 2044 remove, forget = [], []
2038 2045 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2039 2046 reason = None
2040 2047 if abs not in deleted and opts['after']:
2041 2048 reason = _('is still present')
2042 2049 elif abs in modified and not opts['force']:
2043 2050 reason = _('is modified (use -f to force removal)')
2044 2051 elif abs in added:
2045 2052 if opts['force']:
2046 2053 forget.append(abs)
2047 2054 continue
2048 2055 reason = _('has been marked for add (use -f to force removal)')
2049 2056 elif abs in unknown:
2050 2057 reason = _('is not managed')
2051 2058 elif abs in removed:
2052 2059 continue
2053 2060 if reason:
2054 2061 if exact:
2055 2062 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2056 2063 else:
2057 2064 if ui.verbose or not exact:
2058 2065 ui.status(_('removing %s\n') % rel)
2059 2066 remove.append(abs)
2060 2067 repo.forget(forget)
2061 2068 repo.remove(remove, unlink=not opts['after'])
2062 2069
2063 2070 def rename(ui, repo, *pats, **opts):
2064 2071 """rename files; equivalent of copy + remove
2065 2072
2066 2073 Mark dest as copies of sources; mark sources for deletion. If
2067 2074 dest is a directory, copies are put in that directory. If dest is
2068 2075 a file, there can only be one source.
2069 2076
2070 2077 By default, this command copies the contents of files as they
2071 2078 stand in the working directory. If invoked with --after, the
2072 2079 operation is recorded, but no copying is performed.
2073 2080
2074 2081 This command takes effect in the next commit. To undo a rename
2075 2082 before that, see hg revert.
2076 2083 """
2077 2084 wlock = repo.wlock(0)
2078 2085 errs, copied = docopy(ui, repo, pats, opts, wlock)
2079 2086 names = []
2080 2087 for abs, rel, exact in copied:
2081 2088 if ui.verbose or not exact:
2082 2089 ui.status(_('removing %s\n') % rel)
2083 2090 names.append(abs)
2084 2091 if not opts.get('dry_run'):
2085 2092 repo.remove(names, True, wlock)
2086 2093 return errs
2087 2094
2088 2095 def revert(ui, repo, *pats, **opts):
2089 2096 """revert files or dirs to their states as of some revision
2090 2097
2091 2098 With no revision specified, revert the named files or directories
2092 2099 to the contents they had in the parent of the working directory.
2093 2100 This restores the contents of the affected files to an unmodified
2094 2101 state and unschedules adds, removes, copies, and renames. If the
2095 2102 working directory has two parents, you must explicitly specify the
2096 2103 revision to revert to.
2097 2104
2098 2105 Modified files are saved with a .orig suffix before reverting.
2099 2106 To disable these backups, use --no-backup.
2100 2107
2101 2108 Using the -r option, revert the given files or directories to their
2102 2109 contents as of a specific revision. This can be helpful to "roll
2103 2110 back" some or all of a change that should not have been committed.
2104 2111
2105 2112 Revert modifies the working directory. It does not commit any
2106 2113 changes, or change the parent of the working directory. If you
2107 2114 revert to a revision other than the parent of the working
2108 2115 directory, the reverted files will thus appear modified
2109 2116 afterwards.
2110 2117
2111 2118 If a file has been deleted, it is recreated. If the executable
2112 2119 mode of a file was changed, it is reset.
2113 2120
2114 2121 If names are given, all files matching the names are reverted.
2115 2122
2116 2123 If no arguments are given, no files are reverted.
2117 2124 """
2118 2125
2119 2126 if opts["date"]:
2120 2127 if opts["rev"]:
2121 2128 raise util.Abort(_("you can't specify a revision and a date"))
2122 2129 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2123 2130
2124 2131 if not pats and not opts['all']:
2125 2132 raise util.Abort(_('no files or directories specified; '
2126 2133 'use --all to revert the whole repo'))
2127 2134
2128 2135 parent, p2 = repo.dirstate.parents()
2129 2136 if not opts['rev'] and p2 != nullid:
2130 2137 raise util.Abort(_('uncommitted merge - please provide a '
2131 2138 'specific revision'))
2132 2139 ctx = repo.changectx(opts['rev'])
2133 2140 node = ctx.node()
2134 2141 mf = ctx.manifest()
2135 2142 if node == parent:
2136 2143 pmf = mf
2137 2144 else:
2138 2145 pmf = None
2139 2146
2140 2147 wlock = repo.wlock()
2141 2148
2142 2149 # need all matching names in dirstate and manifest of target rev,
2143 2150 # so have to walk both. do not print errors if files exist in one
2144 2151 # but not other.
2145 2152
2146 2153 names = {}
2147 2154 target_only = {}
2148 2155
2149 2156 # walk dirstate.
2150 2157
2151 2158 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2152 2159 badmatch=mf.has_key):
2153 2160 names[abs] = (rel, exact)
2154 2161 if src == 'b':
2155 2162 target_only[abs] = True
2156 2163
2157 2164 # walk target manifest.
2158 2165
2159 2166 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2160 2167 badmatch=names.has_key):
2161 2168 if abs in names: continue
2162 2169 names[abs] = (rel, exact)
2163 2170 target_only[abs] = True
2164 2171
2165 2172 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2166 2173 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2167 2174
2168 2175 revert = ([], _('reverting %s\n'))
2169 2176 add = ([], _('adding %s\n'))
2170 2177 remove = ([], _('removing %s\n'))
2171 2178 forget = ([], _('forgetting %s\n'))
2172 2179 undelete = ([], _('undeleting %s\n'))
2173 2180 update = {}
2174 2181
2175 2182 disptable = (
2176 2183 # dispatch table:
2177 2184 # file state
2178 2185 # action if in target manifest
2179 2186 # action if not in target manifest
2180 2187 # make backup if in target manifest
2181 2188 # make backup if not in target manifest
2182 2189 (modified, revert, remove, True, True),
2183 2190 (added, revert, forget, True, False),
2184 2191 (removed, undelete, None, False, False),
2185 2192 (deleted, revert, remove, False, False),
2186 2193 (unknown, add, None, True, False),
2187 2194 (target_only, add, None, False, False),
2188 2195 )
2189 2196
2190 2197 entries = names.items()
2191 2198 entries.sort()
2192 2199
2193 2200 for abs, (rel, exact) in entries:
2194 2201 mfentry = mf.get(abs)
2195 2202 def handle(xlist, dobackup):
2196 2203 xlist[0].append(abs)
2197 2204 update[abs] = 1
2198 2205 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2199 2206 bakname = "%s.orig" % rel
2200 2207 ui.note(_('saving current version of %s as %s\n') %
2201 2208 (rel, bakname))
2202 2209 if not opts.get('dry_run'):
2203 2210 util.copyfile(rel, bakname)
2204 2211 if ui.verbose or not exact:
2205 2212 ui.status(xlist[1] % rel)
2206 2213 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2207 2214 if abs not in table: continue
2208 2215 # file has changed in dirstate
2209 2216 if mfentry:
2210 2217 handle(hitlist, backuphit)
2211 2218 elif misslist is not None:
2212 2219 handle(misslist, backupmiss)
2213 2220 else:
2214 2221 if exact: ui.warn(_('file not managed: %s\n') % rel)
2215 2222 break
2216 2223 else:
2217 2224 # file has not changed in dirstate
2218 2225 if node == parent:
2219 2226 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2220 2227 continue
2221 2228 if pmf is None:
2222 2229 # only need parent manifest in this unlikely case,
2223 2230 # so do not read by default
2224 2231 pmf = repo.changectx(parent).manifest()
2225 2232 if abs in pmf:
2226 2233 if mfentry:
2227 2234 # if version of file is same in parent and target
2228 2235 # manifests, do nothing
2229 2236 if pmf[abs] != mfentry:
2230 2237 handle(revert, False)
2231 2238 else:
2232 2239 handle(remove, False)
2233 2240
2234 2241 if not opts.get('dry_run'):
2235 2242 repo.dirstate.forget(forget[0])
2236 2243 r = hg.revert(repo, node, update.has_key, wlock)
2237 2244 repo.dirstate.update(add[0], 'a')
2238 2245 repo.dirstate.update(undelete[0], 'n')
2239 2246 repo.dirstate.update(remove[0], 'r')
2240 2247 return r
2241 2248
2242 2249 def rollback(ui, repo):
2243 2250 """roll back the last transaction in this repository
2244 2251
2245 2252 Roll back the last transaction in this repository, restoring the
2246 2253 project to its state prior to the transaction.
2247 2254
2248 2255 Transactions are used to encapsulate the effects of all commands
2249 2256 that create new changesets or propagate existing changesets into a
2250 2257 repository. For example, the following commands are transactional,
2251 2258 and their effects can be rolled back:
2252 2259
2253 2260 commit
2254 2261 import
2255 2262 pull
2256 2263 push (with this repository as destination)
2257 2264 unbundle
2258 2265
2259 2266 This command should be used with care. There is only one level of
2260 2267 rollback, and there is no way to undo a rollback.
2261 2268
2262 2269 This command is not intended for use on public repositories. Once
2263 2270 changes are visible for pull by other users, rolling a transaction
2264 2271 back locally is ineffective (someone else may already have pulled
2265 2272 the changes). Furthermore, a race is possible with readers of the
2266 2273 repository; for example an in-progress pull from the repository
2267 2274 may fail if a rollback is performed.
2268 2275 """
2269 2276 repo.rollback()
2270 2277
2271 2278 def root(ui, repo):
2272 2279 """print the root (top) of the current working dir
2273 2280
2274 2281 Print the root directory of the current repository.
2275 2282 """
2276 2283 ui.write(repo.root + "\n")
2277 2284
2278 2285 def serve(ui, repo, **opts):
2279 2286 """export the repository via HTTP
2280 2287
2281 2288 Start a local HTTP repository browser and pull server.
2282 2289
2283 2290 By default, the server logs accesses to stdout and errors to
2284 2291 stderr. Use the "-A" and "-E" options to log to files.
2285 2292 """
2286 2293
2287 2294 if opts["stdio"]:
2288 2295 if repo is None:
2289 2296 raise hg.RepoError(_("There is no Mercurial repository here"
2290 2297 " (.hg not found)"))
2291 2298 s = sshserver.sshserver(ui, repo)
2292 2299 s.serve_forever()
2293 2300
2294 2301 optlist = ("name templates style address port ipv6"
2295 2302 " accesslog errorlog webdir_conf")
2296 2303 for o in optlist.split():
2297 2304 if opts[o]:
2298 2305 ui.setconfig("web", o, str(opts[o]))
2299 2306
2300 2307 if repo is None and not ui.config("web", "webdir_conf"):
2301 2308 raise hg.RepoError(_("There is no Mercurial repository here"
2302 2309 " (.hg not found)"))
2303 2310
2304 2311 if opts['daemon'] and not opts['daemon_pipefds']:
2305 2312 rfd, wfd = os.pipe()
2306 2313 args = sys.argv[:]
2307 2314 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2308 2315 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2309 2316 args[0], args)
2310 2317 os.close(wfd)
2311 2318 os.read(rfd, 1)
2312 2319 os._exit(0)
2313 2320
2314 2321 httpd = hgweb.server.create_server(ui, repo)
2315 2322
2316 2323 if ui.verbose:
2317 2324 if httpd.port != 80:
2318 2325 ui.status(_('listening at http://%s:%d/\n') %
2319 2326 (httpd.addr, httpd.port))
2320 2327 else:
2321 2328 ui.status(_('listening at http://%s/\n') % httpd.addr)
2322 2329
2323 2330 if opts['pid_file']:
2324 2331 fp = open(opts['pid_file'], 'w')
2325 2332 fp.write(str(os.getpid()) + '\n')
2326 2333 fp.close()
2327 2334
2328 2335 if opts['daemon_pipefds']:
2329 2336 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2330 2337 os.close(rfd)
2331 2338 os.write(wfd, 'y')
2332 2339 os.close(wfd)
2333 2340 sys.stdout.flush()
2334 2341 sys.stderr.flush()
2335 2342 fd = os.open(util.nulldev, os.O_RDWR)
2336 2343 if fd != 0: os.dup2(fd, 0)
2337 2344 if fd != 1: os.dup2(fd, 1)
2338 2345 if fd != 2: os.dup2(fd, 2)
2339 2346 if fd not in (0, 1, 2): os.close(fd)
2340 2347
2341 2348 httpd.serve_forever()
2342 2349
2343 2350 def status(ui, repo, *pats, **opts):
2344 2351 """show changed files in the working directory
2345 2352
2346 2353 Show status of files in the repository. If names are given, only
2347 2354 files that match are shown. Files that are clean or ignored, are
2348 2355 not listed unless -c (clean), -i (ignored) or -A is given.
2349 2356
2350 2357 NOTE: status may appear to disagree with diff if permissions have
2351 2358 changed or a merge has occurred. The standard diff format does not
2352 2359 report permission changes and diff only reports changes relative
2353 2360 to one merge parent.
2354 2361
2355 2362 If one revision is given, it is used as the base revision.
2356 2363 If two revisions are given, the difference between them is shown.
2357 2364
2358 2365 The codes used to show the status of files are:
2359 2366 M = modified
2360 2367 A = added
2361 2368 R = removed
2362 2369 C = clean
2363 2370 ! = deleted, but still tracked
2364 2371 ? = not tracked
2365 2372 I = ignored (not shown by default)
2366 2373 = the previous added file was copied from here
2367 2374 """
2368 2375
2369 2376 all = opts['all']
2370 2377 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2371 2378
2372 2379 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2373 2380 cwd = (pats and repo.getcwd()) or ''
2374 2381 modified, added, removed, deleted, unknown, ignored, clean = [
2375 2382 n for n in repo.status(node1=node1, node2=node2, files=files,
2376 2383 match=matchfn,
2377 2384 list_ignored=all or opts['ignored'],
2378 2385 list_clean=all or opts['clean'])]
2379 2386
2380 2387 changetypes = (('modified', 'M', modified),
2381 2388 ('added', 'A', added),
2382 2389 ('removed', 'R', removed),
2383 2390 ('deleted', '!', deleted),
2384 2391 ('unknown', '?', unknown),
2385 2392 ('ignored', 'I', ignored))
2386 2393
2387 2394 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2388 2395
2389 2396 end = opts['print0'] and '\0' or '\n'
2390 2397
2391 2398 for opt, char, changes in ([ct for ct in explicit_changetypes
2392 2399 if all or opts[ct[0]]]
2393 2400 or changetypes):
2394 2401 if opts['no_status']:
2395 2402 format = "%%s%s" % end
2396 2403 else:
2397 2404 format = "%s %%s%s" % (char, end)
2398 2405
2399 2406 for f in changes:
2400 2407 ui.write(format % util.pathto(cwd, f))
2401 2408 if ((all or opts.get('copies')) and not opts.get('no_status')):
2402 2409 copied = repo.dirstate.copied(f)
2403 2410 if copied:
2404 2411 ui.write(' %s%s' % (util.pathto(cwd, copied), end))
2405 2412
2406 2413 def tag(ui, repo, name, rev_=None, **opts):
2407 2414 """add a tag for the current or given revision
2408 2415
2409 2416 Name a particular revision using <name>.
2410 2417
2411 2418 Tags are used to name particular revisions of the repository and are
2412 2419 very useful to compare different revision, to go back to significant
2413 2420 earlier versions or to mark branch points as releases, etc.
2414 2421
2415 2422 If no revision is given, the parent of the working directory is used,
2416 2423 or tip if no revision is checked out.
2417 2424
2418 2425 To facilitate version control, distribution, and merging of tags,
2419 2426 they are stored as a file named ".hgtags" which is managed
2420 2427 similarly to other project files and can be hand-edited if
2421 2428 necessary. The file '.hg/localtags' is used for local tags (not
2422 2429 shared among repositories).
2423 2430 """
2424 2431 if name in ['tip', '.', 'null']:
2425 2432 raise util.Abort(_("the name '%s' is reserved") % name)
2426 2433 if rev_ is not None:
2427 2434 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2428 2435 "please use 'hg tag [-r REV] NAME' instead\n"))
2429 2436 if opts['rev']:
2430 2437 raise util.Abort(_("use only one form to specify the revision"))
2431 2438 if opts['rev']:
2432 2439 rev_ = opts['rev']
2433 2440 if not rev_ and repo.dirstate.parents()[1] != nullid:
2434 2441 raise util.Abort(_('uncommitted merge - please provide a '
2435 2442 'specific revision'))
2436 2443 r = repo.changectx(rev_).node()
2437 2444
2438 2445 message = opts['message']
2439 2446 if not message:
2440 2447 message = _('Added tag %s for changeset %s') % (name, short(r))
2441 2448
2442 2449 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2443 2450
2444 2451 def tags(ui, repo):
2445 2452 """list repository tags
2446 2453
2447 2454 List the repository tags.
2448 2455
2449 2456 This lists both regular and local tags.
2450 2457 """
2451 2458
2452 2459 l = repo.tagslist()
2453 2460 l.reverse()
2454 2461 hexfunc = ui.debugflag and hex or short
2455 2462 for t, n in l:
2456 2463 try:
2457 2464 hn = hexfunc(n)
2458 2465 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2459 2466 except revlog.LookupError:
2460 2467 r = " ?:%s" % hn
2461 2468 if ui.quiet:
2462 2469 ui.write("%s\n" % t)
2463 2470 else:
2464 2471 t = util.localsub(t, 30)
2465 2472 t += " " * (30 - util.locallen(t))
2466 2473 ui.write("%s %s\n" % (t, r))
2467 2474
2468 2475 def tip(ui, repo, **opts):
2469 2476 """show the tip revision
2470 2477
2471 2478 Show the tip revision.
2472 2479 """
2473 2480 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2474 2481
2475 2482 def unbundle(ui, repo, fname, **opts):
2476 2483 """apply a changegroup file
2477 2484
2478 2485 Apply a compressed changegroup file generated by the bundle
2479 2486 command.
2480 2487 """
2481 gen = changegroup.readbundle(urllib.urlopen(fname), fname)
2488 if os.path.exists(fname):
2489 f = open(fname)
2490 else:
2491 f = urllib.urlopen(fname)
2492 gen = changegroup.readbundle(f, fname)
2482 2493 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2483 2494 return postincoming(ui, repo, modheads, opts['update'])
2484 2495
2485 2496 def update(ui, repo, node=None, clean=False, date=None):
2486 2497 """update working directory
2487 2498
2488 2499 Update the working directory to the specified revision.
2489 2500
2490 2501 If there are no outstanding changes in the working directory and
2491 2502 there is a linear relationship between the current version and the
2492 2503 requested version, the result is the requested version.
2493 2504
2494 2505 To merge the working directory with another revision, use the
2495 2506 merge command.
2496 2507
2497 2508 By default, update will refuse to run if doing so would require
2498 2509 discarding local changes.
2499 2510 """
2500 2511 if date:
2501 2512 if node:
2502 2513 raise util.Abort(_("you can't specify a revision and a date"))
2503 2514 node = cmdutil.finddate(ui, repo, date)
2504 2515
2505 2516 if clean:
2506 2517 return hg.clean(repo, node)
2507 2518 else:
2508 2519 return hg.update(repo, node)
2509 2520
2510 2521 def verify(ui, repo):
2511 2522 """verify the integrity of the repository
2512 2523
2513 2524 Verify the integrity of the current repository.
2514 2525
2515 2526 This will perform an extensive check of the repository's
2516 2527 integrity, validating the hashes and checksums of each entry in
2517 2528 the changelog, manifest, and tracked files, as well as the
2518 2529 integrity of their crosslinks and indices.
2519 2530 """
2520 2531 return hg.verify(repo)
2521 2532
2522 2533 def version_(ui):
2523 2534 """output version and copyright information"""
2524 2535 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2525 2536 % version.get_version())
2526 2537 ui.status(_(
2527 2538 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2528 2539 "This is free software; see the source for copying conditions. "
2529 2540 "There is NO\nwarranty; "
2530 2541 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2531 2542 ))
2532 2543
2533 2544 # Command options and aliases are listed here, alphabetically
2534 2545
2535 2546 globalopts = [
2536 2547 ('R', 'repository', '',
2537 2548 _('repository root directory or symbolic path name')),
2538 2549 ('', 'cwd', '', _('change working directory')),
2539 2550 ('y', 'noninteractive', None,
2540 2551 _('do not prompt, assume \'yes\' for any required answers')),
2541 2552 ('q', 'quiet', None, _('suppress output')),
2542 2553 ('v', 'verbose', None, _('enable additional output')),
2543 2554 ('', 'config', [], _('set/override config option')),
2544 2555 ('', 'debug', None, _('enable debugging output')),
2545 2556 ('', 'debugger', None, _('start debugger')),
2546 2557 ('', 'encoding', util._encoding, _('set the charset encoding')),
2547 2558 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2548 2559 ('', 'lsprof', None, _('print improved command execution profile')),
2549 2560 ('', 'traceback', None, _('print traceback on exception')),
2550 2561 ('', 'time', None, _('time how long the command takes')),
2551 2562 ('', 'profile', None, _('print command execution profile')),
2552 2563 ('', 'version', None, _('output version information and exit')),
2553 2564 ('h', 'help', None, _('display help and exit')),
2554 2565 ]
2555 2566
2556 2567 dryrunopts = [('n', 'dry-run', None,
2557 2568 _('do not perform actions, just print output'))]
2558 2569
2559 2570 remoteopts = [
2560 2571 ('e', 'ssh', '', _('specify ssh command to use')),
2561 2572 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2562 2573 ]
2563 2574
2564 2575 walkopts = [
2565 2576 ('I', 'include', [], _('include names matching the given patterns')),
2566 2577 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2567 2578 ]
2568 2579
2569 2580 commitopts = [
2570 2581 ('m', 'message', '', _('use <text> as commit message')),
2571 2582 ('l', 'logfile', '', _('read commit message from <file>')),
2572 2583 ]
2573 2584
2574 2585 table = {
2575 2586 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2576 2587 "addremove":
2577 2588 (addremove,
2578 2589 [('s', 'similarity', '',
2579 2590 _('guess renamed files by similarity (0<=s<=100)')),
2580 2591 ] + walkopts + dryrunopts,
2581 2592 _('hg addremove [OPTION]... [FILE]...')),
2582 2593 "^annotate":
2583 2594 (annotate,
2584 2595 [('r', 'rev', '', _('annotate the specified revision')),
2585 2596 ('f', 'follow', None, _('follow file copies and renames')),
2586 2597 ('a', 'text', None, _('treat all files as text')),
2587 2598 ('u', 'user', None, _('list the author')),
2588 2599 ('d', 'date', None, _('list the date')),
2589 2600 ('n', 'number', None, _('list the revision number (default)')),
2590 2601 ('c', 'changeset', None, _('list the changeset')),
2591 2602 ] + walkopts,
2592 2603 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2593 2604 "archive":
2594 2605 (archive,
2595 2606 [('', 'no-decode', None, _('do not pass files through decoders')),
2596 2607 ('p', 'prefix', '', _('directory prefix for files in archive')),
2597 2608 ('r', 'rev', '', _('revision to distribute')),
2598 2609 ('t', 'type', '', _('type of distribution to create')),
2599 2610 ] + walkopts,
2600 2611 _('hg archive [OPTION]... DEST')),
2601 2612 "backout":
2602 2613 (backout,
2603 2614 [('', 'merge', None,
2604 2615 _('merge with old dirstate parent after backout')),
2605 2616 ('d', 'date', '', _('record datecode as commit date')),
2606 2617 ('', 'parent', '', _('parent to choose when backing out merge')),
2607 2618 ('u', 'user', '', _('record user as committer')),
2608 2619 ] + walkopts + commitopts,
2609 2620 _('hg backout [OPTION]... REV')),
2610 2621 "branch": (branch, [], _('hg branch [NAME]')),
2611 2622 "branches": (branches, [], _('hg branches')),
2612 2623 "bundle":
2613 2624 (bundle,
2614 2625 [('f', 'force', None,
2615 2626 _('run even when remote repository is unrelated')),
2616 2627 ('r', 'rev', [],
2617 2628 _('a changeset you would like to bundle')),
2618 2629 ('', 'base', [],
2619 2630 _('a base changeset to specify instead of a destination')),
2620 2631 ] + remoteopts,
2621 2632 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2622 2633 "cat":
2623 2634 (cat,
2624 2635 [('o', 'output', '', _('print output to file with formatted name')),
2625 2636 ('r', 'rev', '', _('print the given revision')),
2626 2637 ] + walkopts,
2627 2638 _('hg cat [OPTION]... FILE...')),
2628 2639 "^clone":
2629 2640 (clone,
2630 2641 [('U', 'noupdate', None, _('do not update the new working directory')),
2631 2642 ('r', 'rev', [],
2632 2643 _('a changeset you would like to have after cloning')),
2633 2644 ('', 'pull', None, _('use pull protocol to copy metadata')),
2634 2645 ('', 'uncompressed', None,
2635 2646 _('use uncompressed transfer (fast over LAN)')),
2636 2647 ] + remoteopts,
2637 2648 _('hg clone [OPTION]... SOURCE [DEST]')),
2638 2649 "^commit|ci":
2639 2650 (commit,
2640 2651 [('A', 'addremove', None,
2641 2652 _('mark new/missing files as added/removed before committing')),
2642 2653 ('d', 'date', '', _('record datecode as commit date')),
2643 2654 ('u', 'user', '', _('record user as commiter')),
2644 2655 ] + walkopts + commitopts,
2645 2656 _('hg commit [OPTION]... [FILE]...')),
2646 2657 "copy|cp":
2647 2658 (copy,
2648 2659 [('A', 'after', None, _('record a copy that has already occurred')),
2649 2660 ('f', 'force', None,
2650 2661 _('forcibly copy over an existing managed file')),
2651 2662 ] + walkopts + dryrunopts,
2652 2663 _('hg copy [OPTION]... [SOURCE]... DEST')),
2653 2664 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2654 2665 "debugcomplete":
2655 2666 (debugcomplete,
2656 2667 [('o', 'options', None, _('show the command options'))],
2657 2668 _('debugcomplete [-o] CMD')),
2658 2669 "debuginstall": (debuginstall, [], _('debuginstall')),
2659 2670 "debugrebuildstate":
2660 2671 (debugrebuildstate,
2661 2672 [('r', 'rev', '', _('revision to rebuild to'))],
2662 2673 _('debugrebuildstate [-r REV] [REV]')),
2663 2674 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2664 2675 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2665 2676 "debugstate": (debugstate, [], _('debugstate')),
2666 2677 "debugdate":
2667 2678 (debugdate,
2668 2679 [('e', 'extended', None, _('try extended date formats'))],
2669 2680 _('debugdate [-e] DATE [RANGE]')),
2670 2681 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2671 2682 "debugindex": (debugindex, [], _('debugindex FILE')),
2672 2683 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2673 2684 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2674 2685 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2675 2686 "^diff":
2676 2687 (diff,
2677 2688 [('r', 'rev', [], _('revision')),
2678 2689 ('a', 'text', None, _('treat all files as text')),
2679 2690 ('p', 'show-function', None,
2680 2691 _('show which function each change is in')),
2681 2692 ('g', 'git', None, _('use git extended diff format')),
2682 2693 ('', 'nodates', None, _("don't include dates in diff headers")),
2683 2694 ('w', 'ignore-all-space', None,
2684 2695 _('ignore white space when comparing lines')),
2685 2696 ('b', 'ignore-space-change', None,
2686 2697 _('ignore changes in the amount of white space')),
2687 2698 ('B', 'ignore-blank-lines', None,
2688 2699 _('ignore changes whose lines are all blank')),
2689 2700 ] + walkopts,
2690 2701 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2691 2702 "^export":
2692 2703 (export,
2693 2704 [('o', 'output', '', _('print output to file with formatted name')),
2694 2705 ('a', 'text', None, _('treat all files as text')),
2695 2706 ('g', 'git', None, _('use git extended diff format')),
2696 2707 ('', 'nodates', None, _("don't include dates in diff headers")),
2697 2708 ('', 'switch-parent', None, _('diff against the second parent'))],
2698 2709 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2699 2710 "grep":
2700 2711 (grep,
2701 2712 [('0', 'print0', None, _('end fields with NUL')),
2702 2713 ('', 'all', None, _('print all revisions that match')),
2703 2714 ('f', 'follow', None,
2704 2715 _('follow changeset history, or file history across copies and renames')),
2705 2716 ('i', 'ignore-case', None, _('ignore case when matching')),
2706 2717 ('l', 'files-with-matches', None,
2707 2718 _('print only filenames and revs that match')),
2708 2719 ('n', 'line-number', None, _('print matching line numbers')),
2709 2720 ('r', 'rev', [], _('search in given revision range')),
2710 2721 ('u', 'user', None, _('print user who committed change')),
2711 2722 ] + walkopts,
2712 2723 _('hg grep [OPTION]... PATTERN [FILE]...')),
2713 2724 "heads":
2714 2725 (heads,
2715 2726 [('', 'style', '', _('display using template map file')),
2716 2727 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2717 2728 ('', 'template', '', _('display with template'))],
2718 2729 _('hg heads [-r REV]')),
2719 2730 "help": (help_, [], _('hg help [COMMAND]')),
2720 2731 "identify|id": (identify, [], _('hg identify')),
2721 2732 "import|patch":
2722 2733 (import_,
2723 2734 [('p', 'strip', 1,
2724 2735 _('directory strip option for patch. This has the same\n'
2725 2736 'meaning as the corresponding patch option')),
2726 2737 ('b', 'base', '', _('base path')),
2727 2738 ('f', 'force', None,
2728 2739 _('skip check for outstanding uncommitted changes'))] + commitopts,
2729 2740 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2730 2741 "incoming|in": (incoming,
2731 2742 [('M', 'no-merges', None, _('do not show merges')),
2732 2743 ('f', 'force', None,
2733 2744 _('run even when remote repository is unrelated')),
2734 2745 ('', 'style', '', _('display using template map file')),
2735 2746 ('n', 'newest-first', None, _('show newest record first')),
2736 2747 ('', 'bundle', '', _('file to store the bundles into')),
2737 2748 ('p', 'patch', None, _('show patch')),
2738 2749 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2739 2750 ('', 'template', '', _('display with template')),
2740 2751 ] + remoteopts,
2741 2752 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2742 2753 ' [--bundle FILENAME] [SOURCE]')),
2743 2754 "^init":
2744 2755 (init,
2745 2756 remoteopts,
2746 2757 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2747 2758 "locate":
2748 2759 (locate,
2749 2760 [('r', 'rev', '', _('search the repository as it stood at rev')),
2750 2761 ('0', 'print0', None,
2751 2762 _('end filenames with NUL, for use with xargs')),
2752 2763 ('f', 'fullpath', None,
2753 2764 _('print complete paths from the filesystem root')),
2754 2765 ] + walkopts,
2755 2766 _('hg locate [OPTION]... [PATTERN]...')),
2756 2767 "^log|history":
2757 2768 (log,
2758 2769 [('f', 'follow', None,
2759 2770 _('follow changeset history, or file history across copies and renames')),
2760 2771 ('', 'follow-first', None,
2761 2772 _('only follow the first parent of merge changesets')),
2762 2773 ('d', 'date', '', _('show revs matching date spec')),
2763 2774 ('C', 'copies', None, _('show copied files')),
2764 2775 ('k', 'keyword', [], _('search for a keyword')),
2765 2776 ('l', 'limit', '', _('limit number of changes displayed')),
2766 2777 ('r', 'rev', [], _('show the specified revision or range')),
2767 2778 ('', 'removed', None, _('include revs where files were removed')),
2768 2779 ('M', 'no-merges', None, _('do not show merges')),
2769 2780 ('', 'style', '', _('display using template map file')),
2770 2781 ('m', 'only-merges', None, _('show only merges')),
2771 2782 ('p', 'patch', None, _('show patch')),
2772 2783 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2773 2784 ('', 'template', '', _('display with template')),
2774 2785 ] + walkopts,
2775 2786 _('hg log [OPTION]... [FILE]')),
2776 2787 "manifest": (manifest, [], _('hg manifest [REV]')),
2777 2788 "^merge":
2778 2789 (merge,
2779 2790 [('f', 'force', None, _('force a merge with outstanding changes'))],
2780 2791 _('hg merge [-f] [REV]')),
2781 2792 "outgoing|out": (outgoing,
2782 2793 [('M', 'no-merges', None, _('do not show merges')),
2783 2794 ('f', 'force', None,
2784 2795 _('run even when remote repository is unrelated')),
2785 2796 ('p', 'patch', None, _('show patch')),
2786 2797 ('', 'style', '', _('display using template map file')),
2787 2798 ('r', 'rev', [], _('a specific revision you would like to push')),
2788 2799 ('n', 'newest-first', None, _('show newest record first')),
2789 2800 ('', 'template', '', _('display with template')),
2790 2801 ] + remoteopts,
2791 2802 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2792 2803 "^parents":
2793 2804 (parents,
2794 2805 [('r', 'rev', '', _('show parents from the specified rev')),
2795 2806 ('', 'style', '', _('display using template map file')),
2796 2807 ('', 'template', '', _('display with template'))],
2797 2808 _('hg parents [-r REV] [FILE]')),
2798 2809 "paths": (paths, [], _('hg paths [NAME]')),
2799 2810 "^pull":
2800 2811 (pull,
2801 2812 [('u', 'update', None,
2802 2813 _('update to new tip if changesets were pulled')),
2803 2814 ('f', 'force', None,
2804 2815 _('run even when remote repository is unrelated')),
2805 2816 ('r', 'rev', [],
2806 2817 _('a specific revision up to which you would like to pull')),
2807 2818 ] + remoteopts,
2808 2819 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2809 2820 "^push":
2810 2821 (push,
2811 2822 [('f', 'force', None, _('force push')),
2812 2823 ('r', 'rev', [], _('a specific revision you would like to push')),
2813 2824 ] + remoteopts,
2814 2825 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2815 2826 "debugrawcommit|rawcommit":
2816 2827 (rawcommit,
2817 2828 [('p', 'parent', [], _('parent')),
2818 2829 ('d', 'date', '', _('date code')),
2819 2830 ('u', 'user', '', _('user')),
2820 2831 ('F', 'files', '', _('file list'))
2821 2832 ] + commitopts,
2822 2833 _('hg debugrawcommit [OPTION]... [FILE]...')),
2823 2834 "recover": (recover, [], _('hg recover')),
2824 2835 "^remove|rm":
2825 2836 (remove,
2826 2837 [('A', 'after', None, _('record remove that has already occurred')),
2827 2838 ('f', 'force', None, _('remove file even if modified')),
2828 2839 ] + walkopts,
2829 2840 _('hg remove [OPTION]... FILE...')),
2830 2841 "rename|mv":
2831 2842 (rename,
2832 2843 [('A', 'after', None, _('record a rename that has already occurred')),
2833 2844 ('f', 'force', None,
2834 2845 _('forcibly copy over an existing managed file')),
2835 2846 ] + walkopts + dryrunopts,
2836 2847 _('hg rename [OPTION]... SOURCE... DEST')),
2837 2848 "^revert":
2838 2849 (revert,
2839 2850 [('a', 'all', None, _('revert all changes when no arguments given')),
2840 2851 ('d', 'date', '', _('tipmost revision matching date')),
2841 2852 ('r', 'rev', '', _('revision to revert to')),
2842 2853 ('', 'no-backup', None, _('do not save backup copies of files')),
2843 2854 ] + walkopts + dryrunopts,
2844 2855 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2845 2856 "rollback": (rollback, [], _('hg rollback')),
2846 2857 "root": (root, [], _('hg root')),
2847 2858 "showconfig|debugconfig":
2848 2859 (showconfig,
2849 2860 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2850 2861 _('showconfig [-u] [NAME]...')),
2851 2862 "^serve":
2852 2863 (serve,
2853 2864 [('A', 'accesslog', '', _('name of access log file to write to')),
2854 2865 ('d', 'daemon', None, _('run server in background')),
2855 2866 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2856 2867 ('E', 'errorlog', '', _('name of error log file to write to')),
2857 2868 ('p', 'port', 0, _('port to use (default: 8000)')),
2858 2869 ('a', 'address', '', _('address to use')),
2859 2870 ('n', 'name', '',
2860 2871 _('name to show in web pages (default: working dir)')),
2861 2872 ('', 'webdir-conf', '', _('name of the webdir config file'
2862 2873 ' (serve more than one repo)')),
2863 2874 ('', 'pid-file', '', _('name of file to write process ID to')),
2864 2875 ('', 'stdio', None, _('for remote clients')),
2865 2876 ('t', 'templates', '', _('web templates to use')),
2866 2877 ('', 'style', '', _('template style to use')),
2867 2878 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2868 2879 _('hg serve [OPTION]...')),
2869 2880 "^status|st":
2870 2881 (status,
2871 2882 [('A', 'all', None, _('show status of all files')),
2872 2883 ('m', 'modified', None, _('show only modified files')),
2873 2884 ('a', 'added', None, _('show only added files')),
2874 2885 ('r', 'removed', None, _('show only removed files')),
2875 2886 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2876 2887 ('c', 'clean', None, _('show only files without changes')),
2877 2888 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2878 2889 ('i', 'ignored', None, _('show ignored files')),
2879 2890 ('n', 'no-status', None, _('hide status prefix')),
2880 2891 ('C', 'copies', None, _('show source of copied files')),
2881 2892 ('0', 'print0', None,
2882 2893 _('end filenames with NUL, for use with xargs')),
2883 2894 ('', 'rev', [], _('show difference from revision')),
2884 2895 ] + walkopts,
2885 2896 _('hg status [OPTION]... [FILE]...')),
2886 2897 "tag":
2887 2898 (tag,
2888 2899 [('l', 'local', None, _('make the tag local')),
2889 2900 ('m', 'message', '', _('message for tag commit log entry')),
2890 2901 ('d', 'date', '', _('record datecode as commit date')),
2891 2902 ('u', 'user', '', _('record user as commiter')),
2892 2903 ('r', 'rev', '', _('revision to tag'))],
2893 2904 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2894 2905 "tags": (tags, [], _('hg tags')),
2895 2906 "tip":
2896 2907 (tip,
2897 2908 [('', 'style', '', _('display using template map file')),
2898 2909 ('p', 'patch', None, _('show patch')),
2899 2910 ('', 'template', '', _('display with template'))],
2900 2911 _('hg tip [-p]')),
2901 2912 "unbundle":
2902 2913 (unbundle,
2903 2914 [('u', 'update', None,
2904 2915 _('update to new tip if changesets were unbundled'))],
2905 2916 _('hg unbundle [-u] FILE')),
2906 2917 "^update|up|checkout|co":
2907 2918 (update,
2908 2919 [('C', 'clean', None, _('overwrite locally modified files')),
2909 2920 ('d', 'date', '', _('tipmost revision matching date'))],
2910 2921 _('hg update [-C] [-d DATE] [REV]')),
2911 2922 "verify": (verify, [], _('hg verify')),
2912 2923 "version": (version_, [], _('hg version')),
2913 2924 }
2914 2925
2915 2926 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2916 2927 " debugindex debugindexdot debugdate debuginstall")
2917 2928 optionalrepo = ("paths serve showconfig")
2918 2929
2919 2930 def findpossible(ui, cmd):
2920 2931 """
2921 2932 Return cmd -> (aliases, command table entry)
2922 2933 for each matching command.
2923 2934 Return debug commands (or their aliases) only if no normal command matches.
2924 2935 """
2925 2936 choice = {}
2926 2937 debugchoice = {}
2927 2938 for e in table.keys():
2928 2939 aliases = e.lstrip("^").split("|")
2929 2940 found = None
2930 2941 if cmd in aliases:
2931 2942 found = cmd
2932 2943 elif not ui.config("ui", "strict"):
2933 2944 for a in aliases:
2934 2945 if a.startswith(cmd):
2935 2946 found = a
2936 2947 break
2937 2948 if found is not None:
2938 2949 if aliases[0].startswith("debug") or found.startswith("debug"):
2939 2950 debugchoice[found] = (aliases, table[e])
2940 2951 else:
2941 2952 choice[found] = (aliases, table[e])
2942 2953
2943 2954 if not choice and debugchoice:
2944 2955 choice = debugchoice
2945 2956
2946 2957 return choice
2947 2958
2948 2959 def findcmd(ui, cmd):
2949 2960 """Return (aliases, command table entry) for command string."""
2950 2961 choice = findpossible(ui, cmd)
2951 2962
2952 2963 if choice.has_key(cmd):
2953 2964 return choice[cmd]
2954 2965
2955 2966 if len(choice) > 1:
2956 2967 clist = choice.keys()
2957 2968 clist.sort()
2958 2969 raise AmbiguousCommand(cmd, clist)
2959 2970
2960 2971 if choice:
2961 2972 return choice.values()[0]
2962 2973
2963 2974 raise UnknownCommand(cmd)
2964 2975
2965 2976 def catchterm(*args):
2966 2977 raise util.SignalInterrupt
2967 2978
2968 2979 def run():
2969 2980 sys.exit(dispatch(sys.argv[1:]))
2970 2981
2971 2982 class ParseError(Exception):
2972 2983 """Exception raised on errors in parsing the command line."""
2973 2984
2974 2985 def parse(ui, args):
2975 2986 options = {}
2976 2987 cmdoptions = {}
2977 2988
2978 2989 try:
2979 2990 args = fancyopts.fancyopts(args, globalopts, options)
2980 2991 except fancyopts.getopt.GetoptError, inst:
2981 2992 raise ParseError(None, inst)
2982 2993
2983 2994 if args:
2984 2995 cmd, args = args[0], args[1:]
2985 2996 aliases, i = findcmd(ui, cmd)
2986 2997 cmd = aliases[0]
2987 2998 defaults = ui.config("defaults", cmd)
2988 2999 if defaults:
2989 3000 args = shlex.split(defaults) + args
2990 3001 c = list(i[1])
2991 3002 else:
2992 3003 cmd = None
2993 3004 c = []
2994 3005
2995 3006 # combine global options into local
2996 3007 for o in globalopts:
2997 3008 c.append((o[0], o[1], options[o[1]], o[3]))
2998 3009
2999 3010 try:
3000 3011 args = fancyopts.fancyopts(args, c, cmdoptions)
3001 3012 except fancyopts.getopt.GetoptError, inst:
3002 3013 raise ParseError(cmd, inst)
3003 3014
3004 3015 # separate global options back out
3005 3016 for o in globalopts:
3006 3017 n = o[1]
3007 3018 options[n] = cmdoptions[n]
3008 3019 del cmdoptions[n]
3009 3020
3010 3021 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3011 3022
3012 3023 external = {}
3013 3024
3014 3025 def findext(name):
3015 3026 '''return module with given extension name'''
3016 3027 try:
3017 3028 return sys.modules[external[name]]
3018 3029 except KeyError:
3019 3030 for k, v in external.iteritems():
3020 3031 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3021 3032 return sys.modules[v]
3022 3033 raise KeyError(name)
3023 3034
3024 3035 def load_extensions(ui):
3025 3036 added = []
3026 3037 for ext_name, load_from_name in ui.extensions():
3027 3038 if ext_name in external:
3028 3039 continue
3029 3040 try:
3030 3041 if load_from_name:
3031 3042 # the module will be loaded in sys.modules
3032 3043 # choose an unique name so that it doesn't
3033 3044 # conflicts with other modules
3034 3045 module_name = "hgext_%s" % ext_name.replace('.', '_')
3035 3046 mod = imp.load_source(module_name, load_from_name)
3036 3047 else:
3037 3048 def importh(name):
3038 3049 mod = __import__(name)
3039 3050 components = name.split('.')
3040 3051 for comp in components[1:]:
3041 3052 mod = getattr(mod, comp)
3042 3053 return mod
3043 3054 try:
3044 3055 mod = importh("hgext.%s" % ext_name)
3045 3056 except ImportError:
3046 3057 mod = importh(ext_name)
3047 3058 external[ext_name] = mod.__name__
3048 3059 added.append((mod, ext_name))
3049 3060 except (util.SignalInterrupt, KeyboardInterrupt):
3050 3061 raise
3051 3062 except Exception, inst:
3052 3063 ui.warn(_("*** failed to import extension %s: %s\n") %
3053 3064 (ext_name, inst))
3054 3065 if ui.print_exc():
3055 3066 return 1
3056 3067
3057 3068 for mod, name in added:
3058 3069 uisetup = getattr(mod, 'uisetup', None)
3059 3070 if uisetup:
3060 3071 uisetup(ui)
3061 3072 cmdtable = getattr(mod, 'cmdtable', {})
3062 3073 overrides = [cmd for cmd in cmdtable if cmd in table]
3063 3074 if overrides:
3064 3075 ui.warn(_("extension '%s' overrides commands: %s\n")
3065 3076 % (name, " ".join(overrides)))
3066 3077 table.update(cmdtable)
3067 3078
3068 3079 def parseconfig(config):
3069 3080 """parse the --config options from the command line"""
3070 3081 parsed = []
3071 3082 for cfg in config:
3072 3083 try:
3073 3084 name, value = cfg.split('=', 1)
3074 3085 section, name = name.split('.', 1)
3075 3086 if not section or not name:
3076 3087 raise IndexError
3077 3088 parsed.append((section, name, value))
3078 3089 except (IndexError, ValueError):
3079 3090 raise util.Abort(_('malformed --config option: %s') % cfg)
3080 3091 return parsed
3081 3092
3082 3093 def dispatch(args):
3083 3094 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3084 3095 num = getattr(signal, name, None)
3085 3096 if num: signal.signal(num, catchterm)
3086 3097
3087 3098 try:
3088 3099 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3089 3100 except util.Abort, inst:
3090 3101 sys.stderr.write(_("abort: %s\n") % inst)
3091 3102 return -1
3092 3103
3093 3104 load_extensions(u)
3094 3105 u.addreadhook(load_extensions)
3095 3106
3096 3107 try:
3097 3108 cmd, func, args, options, cmdoptions = parse(u, args)
3098 3109 if options["encoding"]:
3099 3110 util._encoding = options["encoding"]
3100 3111 if options["encodingmode"]:
3101 3112 util._encodingmode = options["encodingmode"]
3102 3113 if options["time"]:
3103 3114 def get_times():
3104 3115 t = os.times()
3105 3116 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3106 3117 t = (t[0], t[1], t[2], t[3], time.clock())
3107 3118 return t
3108 3119 s = get_times()
3109 3120 def print_time():
3110 3121 t = get_times()
3111 3122 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3112 3123 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3113 3124 atexit.register(print_time)
3114 3125
3115 3126 # enter the debugger before command execution
3116 3127 if options['debugger']:
3117 3128 pdb.set_trace()
3118 3129
3119 3130 try:
3120 3131 if options['cwd']:
3121 3132 os.chdir(options['cwd'])
3122 3133
3123 3134 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3124 3135 not options["noninteractive"], options["traceback"],
3125 3136 parseconfig(options["config"]))
3126 3137
3127 3138 path = u.expandpath(options["repository"]) or ""
3128 3139 repo = path and hg.repository(u, path=path) or None
3129 3140 if repo and not repo.local():
3130 3141 raise util.Abort(_("repository '%s' is not local") % path)
3131 3142
3132 3143 if options['help']:
3133 3144 return help_(u, cmd, options['version'])
3134 3145 elif options['version']:
3135 3146 return version_(u)
3136 3147 elif not cmd:
3137 3148 return help_(u, 'shortlist')
3138 3149
3139 3150 if cmd not in norepo.split():
3140 3151 try:
3141 3152 if not repo:
3142 3153 repo = hg.repository(u, path=path)
3143 3154 u = repo.ui
3144 3155 for name in external.itervalues():
3145 3156 mod = sys.modules[name]
3146 3157 if hasattr(mod, 'reposetup'):
3147 3158 mod.reposetup(u, repo)
3148 3159 hg.repo_setup_hooks.append(mod.reposetup)
3149 3160 except hg.RepoError:
3150 3161 if cmd not in optionalrepo.split():
3151 3162 raise
3152 3163 d = lambda: func(u, repo, *args, **cmdoptions)
3153 3164 else:
3154 3165 d = lambda: func(u, *args, **cmdoptions)
3155 3166
3156 3167 try:
3157 3168 if options['profile']:
3158 3169 import hotshot, hotshot.stats
3159 3170 prof = hotshot.Profile("hg.prof")
3160 3171 try:
3161 3172 try:
3162 3173 return prof.runcall(d)
3163 3174 except:
3164 3175 try:
3165 3176 u.warn(_('exception raised - generating '
3166 3177 'profile anyway\n'))
3167 3178 except:
3168 3179 pass
3169 3180 raise
3170 3181 finally:
3171 3182 prof.close()
3172 3183 stats = hotshot.stats.load("hg.prof")
3173 3184 stats.strip_dirs()
3174 3185 stats.sort_stats('time', 'calls')
3175 3186 stats.print_stats(40)
3176 3187 elif options['lsprof']:
3177 3188 try:
3178 3189 from mercurial import lsprof
3179 3190 except ImportError:
3180 3191 raise util.Abort(_(
3181 3192 'lsprof not available - install from '
3182 3193 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3183 3194 p = lsprof.Profiler()
3184 3195 p.enable(subcalls=True)
3185 3196 try:
3186 3197 return d()
3187 3198 finally:
3188 3199 p.disable()
3189 3200 stats = lsprof.Stats(p.getstats())
3190 3201 stats.sort()
3191 3202 stats.pprint(top=10, file=sys.stderr, climit=5)
3192 3203 else:
3193 3204 return d()
3194 3205 finally:
3195 3206 u.flush()
3196 3207 except:
3197 3208 # enter the debugger when we hit an exception
3198 3209 if options['debugger']:
3199 3210 pdb.post_mortem(sys.exc_info()[2])
3200 3211 u.print_exc()
3201 3212 raise
3202 3213 except ParseError, inst:
3203 3214 if inst.args[0]:
3204 3215 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3205 3216 help_(u, inst.args[0])
3206 3217 else:
3207 3218 u.warn(_("hg: %s\n") % inst.args[1])
3208 3219 help_(u, 'shortlist')
3209 3220 except AmbiguousCommand, inst:
3210 3221 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3211 3222 (inst.args[0], " ".join(inst.args[1])))
3212 3223 except UnknownCommand, inst:
3213 3224 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3214 3225 help_(u, 'shortlist')
3215 3226 except hg.RepoError, inst:
3216 3227 u.warn(_("abort: %s!\n") % inst)
3217 3228 except lock.LockHeld, inst:
3218 3229 if inst.errno == errno.ETIMEDOUT:
3219 3230 reason = _('timed out waiting for lock held by %s') % inst.locker
3220 3231 else:
3221 3232 reason = _('lock held by %s') % inst.locker
3222 3233 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3223 3234 except lock.LockUnavailable, inst:
3224 3235 u.warn(_("abort: could not lock %s: %s\n") %
3225 3236 (inst.desc or inst.filename, inst.strerror))
3226 3237 except revlog.RevlogError, inst:
3227 3238 u.warn(_("abort: %s!\n") % inst)
3228 3239 except util.SignalInterrupt:
3229 3240 u.warn(_("killed!\n"))
3230 3241 except KeyboardInterrupt:
3231 3242 try:
3232 3243 u.warn(_("interrupted!\n"))
3233 3244 except IOError, inst:
3234 3245 if inst.errno == errno.EPIPE:
3235 3246 if u.debugflag:
3236 3247 u.warn(_("\nbroken pipe\n"))
3237 3248 else:
3238 3249 raise
3239 3250 except socket.error, inst:
3240 3251 u.warn(_("abort: %s\n") % inst[1])
3241 3252 except IOError, inst:
3242 3253 if hasattr(inst, "code"):
3243 3254 u.warn(_("abort: %s\n") % inst)
3244 3255 elif hasattr(inst, "reason"):
3245 3256 try: # usually it is in the form (errno, strerror)
3246 3257 reason = inst.reason.args[1]
3247 3258 except: # it might be anything, for example a string
3248 3259 reason = inst.reason
3249 3260 u.warn(_("abort: error: %s\n") % reason)
3250 3261 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3251 3262 if u.debugflag:
3252 3263 u.warn(_("broken pipe\n"))
3253 3264 elif getattr(inst, "strerror", None):
3254 3265 if getattr(inst, "filename", None):
3255 3266 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3256 3267 else:
3257 3268 u.warn(_("abort: %s\n") % inst.strerror)
3258 3269 else:
3259 3270 raise
3260 3271 except OSError, inst:
3261 3272 if getattr(inst, "filename", None):
3262 3273 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3263 3274 else:
3264 3275 u.warn(_("abort: %s\n") % inst.strerror)
3265 3276 except util.UnexpectedOutput, inst:
3266 3277 u.warn(_("abort: %s") % inst[0])
3267 3278 if not isinstance(inst[1], basestring):
3268 3279 u.warn(" %r\n" % (inst[1],))
3269 3280 elif not inst[1]:
3270 3281 u.warn(_(" empty string\n"))
3271 3282 else:
3272 3283 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3273 3284 except util.Abort, inst:
3274 3285 u.warn(_("abort: %s\n") % inst)
3275 3286 except TypeError, inst:
3276 3287 # was this an argument error?
3277 3288 tb = traceback.extract_tb(sys.exc_info()[2])
3278 3289 if len(tb) > 2: # no
3279 3290 raise
3280 3291 u.debug(inst, "\n")
3281 3292 u.warn(_("%s: invalid arguments\n") % cmd)
3282 3293 help_(u, cmd)
3283 3294 except SystemExit, inst:
3284 3295 # Commands shouldn't sys.exit directly, but give a return code.
3285 3296 # Just in case catch this and and pass exit code to caller.
3286 3297 return inst.code
3287 3298 except:
3288 3299 u.warn(_("** unknown exception encountered, details follow\n"))
3289 3300 u.warn(_("** report bug details to "
3290 3301 "http://www.selenic.com/mercurial/bts\n"))
3291 3302 u.warn(_("** or mercurial@selenic.com\n"))
3292 3303 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3293 3304 % version.get_version())
3294 3305 raise
3295 3306
3296 3307 return -1
@@ -1,1885 +1,1903 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import repo, appendfile, changegroup
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 13 import os, revlog, time, util
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = ('lookup', 'changegroupsubset')
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __del__(self):
20 20 self.transhandle = None
21 21 def __init__(self, parentui, path=None, create=0):
22 22 repo.repository.__init__(self)
23 23 if not path:
24 24 p = os.getcwd()
25 25 while not os.path.isdir(os.path.join(p, ".hg")):
26 26 oldp = p
27 27 p = os.path.dirname(p)
28 28 if p == oldp:
29 29 raise repo.RepoError(_("There is no Mercurial repository"
30 30 " here (.hg not found)"))
31 31 path = p
32 32
33 33 self.path = os.path.join(path, ".hg")
34 34 self.root = os.path.realpath(path)
35 35 self.origroot = path
36 36 self.opener = util.opener(self.path)
37 37 self.wopener = util.opener(self.root)
38 38
39 39 if not os.path.isdir(self.path):
40 40 if create:
41 41 if not os.path.exists(path):
42 42 os.mkdir(path)
43 43 os.mkdir(self.path)
44 44 os.mkdir(os.path.join(self.path, "store"))
45 45 requirements = ("revlogv1", "store")
46 46 reqfile = self.opener("requires", "w")
47 47 for r in requirements:
48 48 reqfile.write("%s\n" % r)
49 49 reqfile.close()
50 50 # create an invalid changelog
51 51 self.opener("00changelog.i", "a").write(
52 52 '\0\0\0\2' # represents revlogv2
53 53 ' dummy changelog to prevent using the old repo layout'
54 54 )
55 55 else:
56 56 raise repo.RepoError(_("repository %s not found") % path)
57 57 elif create:
58 58 raise repo.RepoError(_("repository %s already exists") % path)
59 59 else:
60 60 # find requirements
61 61 try:
62 62 requirements = self.opener("requires").read().splitlines()
63 63 except IOError, inst:
64 64 if inst.errno != errno.ENOENT:
65 65 raise
66 66 requirements = []
67 67 # check them
68 68 for r in requirements:
69 69 if r not in self.supported:
70 70 raise repo.RepoError(_("requirement '%s' not supported") % r)
71 71
72 72 # setup store
73 73 if "store" in requirements:
74 74 self.encodefn = util.encodefilename
75 75 self.decodefn = util.decodefilename
76 76 self.spath = os.path.join(self.path, "store")
77 77 else:
78 78 self.encodefn = lambda x: x
79 79 self.decodefn = lambda x: x
80 80 self.spath = self.path
81 81 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
82 82
83 83 self.ui = ui.ui(parentui=parentui)
84 84 try:
85 85 self.ui.readconfig(self.join("hgrc"), self.root)
86 86 except IOError:
87 87 pass
88 88
89 89 v = self.ui.configrevlog()
90 90 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
91 91 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
92 92 fl = v.get('flags', None)
93 93 flags = 0
94 94 if fl != None:
95 95 for x in fl.split():
96 96 flags |= revlog.flagstr(x)
97 97 elif self.revlogv1:
98 98 flags = revlog.REVLOG_DEFAULT_FLAGS
99 99
100 100 v = self.revlogversion | flags
101 101 self.manifest = manifest.manifest(self.sopener, v)
102 102 self.changelog = changelog.changelog(self.sopener, v)
103 103
104 104 fallback = self.ui.config('ui', 'fallbackencoding')
105 105 if fallback:
106 106 util._fallbackencoding = fallback
107 107
108 108 # the changelog might not have the inline index flag
109 109 # on. If the format of the changelog is the same as found in
110 110 # .hgrc, apply any flags found in the .hgrc as well.
111 111 # Otherwise, just version from the changelog
112 112 v = self.changelog.version
113 113 if v == self.revlogversion:
114 114 v |= flags
115 115 self.revlogversion = v
116 116
117 117 self.tagscache = None
118 118 self.branchcache = None
119 119 self.nodetagscache = None
120 120 self.filterpats = {}
121 121 self.transhandle = None
122 122
123 123 self._link = lambda x: False
124 124 if util.checklink(self.root):
125 125 r = self.root # avoid circular reference in lambda
126 126 self._link = lambda x: util.is_link(os.path.join(r, x))
127 127
128 128 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
129 129
130 130 def url(self):
131 131 return 'file:' + self.root
132 132
133 133 def hook(self, name, throw=False, **args):
134 134 def callhook(hname, funcname):
135 135 '''call python hook. hook is callable object, looked up as
136 136 name in python module. if callable returns "true", hook
137 137 fails, else passes. if hook raises exception, treated as
138 138 hook failure. exception propagates if throw is "true".
139 139
140 140 reason for "true" meaning "hook failed" is so that
141 141 unmodified commands (e.g. mercurial.commands.update) can
142 142 be run as hooks without wrappers to convert return values.'''
143 143
144 144 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
145 145 d = funcname.rfind('.')
146 146 if d == -1:
147 147 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
148 148 % (hname, funcname))
149 149 modname = funcname[:d]
150 150 try:
151 151 obj = __import__(modname)
152 152 except ImportError:
153 153 try:
154 154 # extensions are loaded with hgext_ prefix
155 155 obj = __import__("hgext_%s" % modname)
156 156 except ImportError:
157 157 raise util.Abort(_('%s hook is invalid '
158 158 '(import of "%s" failed)') %
159 159 (hname, modname))
160 160 try:
161 161 for p in funcname.split('.')[1:]:
162 162 obj = getattr(obj, p)
163 163 except AttributeError, err:
164 164 raise util.Abort(_('%s hook is invalid '
165 165 '("%s" is not defined)') %
166 166 (hname, funcname))
167 167 if not callable(obj):
168 168 raise util.Abort(_('%s hook is invalid '
169 169 '("%s" is not callable)') %
170 170 (hname, funcname))
171 171 try:
172 172 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
173 173 except (KeyboardInterrupt, util.SignalInterrupt):
174 174 raise
175 175 except Exception, exc:
176 176 if isinstance(exc, util.Abort):
177 177 self.ui.warn(_('error: %s hook failed: %s\n') %
178 178 (hname, exc.args[0]))
179 179 else:
180 180 self.ui.warn(_('error: %s hook raised an exception: '
181 181 '%s\n') % (hname, exc))
182 182 if throw:
183 183 raise
184 184 self.ui.print_exc()
185 185 return True
186 186 if r:
187 187 if throw:
188 188 raise util.Abort(_('%s hook failed') % hname)
189 189 self.ui.warn(_('warning: %s hook failed\n') % hname)
190 190 return r
191 191
192 192 def runhook(name, cmd):
193 193 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
194 194 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
195 195 r = util.system(cmd, environ=env, cwd=self.root)
196 196 if r:
197 197 desc, r = util.explain_exit(r)
198 198 if throw:
199 199 raise util.Abort(_('%s hook %s') % (name, desc))
200 200 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
201 201 return r
202 202
203 203 r = False
204 204 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
205 205 if hname.split(".", 1)[0] == name and cmd]
206 206 hooks.sort()
207 207 for hname, cmd in hooks:
208 208 if cmd.startswith('python:'):
209 209 r = callhook(hname, cmd[7:].strip()) or r
210 210 else:
211 211 r = runhook(hname, cmd) or r
212 212 return r
213 213
214 214 tag_disallowed = ':\r\n'
215 215
216 216 def tag(self, name, node, message, local, user, date):
217 217 '''tag a revision with a symbolic name.
218 218
219 219 if local is True, the tag is stored in a per-repository file.
220 220 otherwise, it is stored in the .hgtags file, and a new
221 221 changeset is committed with the change.
222 222
223 223 keyword arguments:
224 224
225 225 local: whether to store tag in non-version-controlled file
226 226 (default False)
227 227
228 228 message: commit message to use if committing
229 229
230 230 user: name of user to use if committing
231 231
232 232 date: date tuple to use if committing'''
233 233
234 234 for c in self.tag_disallowed:
235 235 if c in name:
236 236 raise util.Abort(_('%r cannot be used in a tag name') % c)
237 237
238 238 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
239 239
240 240 if local:
241 241 # local tags are stored in the current charset
242 242 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
243 243 self.hook('tag', node=hex(node), tag=name, local=local)
244 244 return
245 245
246 246 for x in self.status()[:5]:
247 247 if '.hgtags' in x:
248 248 raise util.Abort(_('working copy of .hgtags is changed '
249 249 '(please commit .hgtags manually)'))
250 250
251 251 # committed tags are stored in UTF-8
252 252 line = '%s %s\n' % (hex(node), util.fromlocal(name))
253 253 self.wfile('.hgtags', 'ab').write(line)
254 254 if self.dirstate.state('.hgtags') == '?':
255 255 self.add(['.hgtags'])
256 256
257 257 self.commit(['.hgtags'], message, user, date)
258 258 self.hook('tag', node=hex(node), tag=name, local=local)
259 259
260 260 def tags(self):
261 261 '''return a mapping of tag to node'''
262 262 if not self.tagscache:
263 263 self.tagscache = {}
264 264
265 265 def parsetag(line, context):
266 266 if not line:
267 267 return
268 268 s = l.split(" ", 1)
269 269 if len(s) != 2:
270 270 self.ui.warn(_("%s: cannot parse entry\n") % context)
271 271 return
272 272 node, key = s
273 273 key = util.tolocal(key.strip()) # stored in UTF-8
274 274 try:
275 275 bin_n = bin(node)
276 276 except TypeError:
277 277 self.ui.warn(_("%s: node '%s' is not well formed\n") %
278 278 (context, node))
279 279 return
280 280 if bin_n not in self.changelog.nodemap:
281 281 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
282 282 (context, key))
283 283 return
284 284 self.tagscache[key] = bin_n
285 285
286 286 # read the tags file from each head, ending with the tip,
287 287 # and add each tag found to the map, with "newer" ones
288 288 # taking precedence
289 289 f = None
290 290 for rev, node, fnode in self._hgtagsnodes():
291 291 f = (f and f.filectx(fnode) or
292 292 self.filectx('.hgtags', fileid=fnode))
293 293 count = 0
294 294 for l in f.data().splitlines():
295 295 count += 1
296 296 parsetag(l, _("%s, line %d") % (str(f), count))
297 297
298 298 try:
299 299 f = self.opener("localtags")
300 300 count = 0
301 301 for l in f:
302 302 # localtags are stored in the local character set
303 303 # while the internal tag table is stored in UTF-8
304 304 l = util.fromlocal(l)
305 305 count += 1
306 306 parsetag(l, _("localtags, line %d") % count)
307 307 except IOError:
308 308 pass
309 309
310 310 self.tagscache['tip'] = self.changelog.tip()
311 311
312 312 return self.tagscache
313 313
314 314 def _hgtagsnodes(self):
315 315 heads = self.heads()
316 316 heads.reverse()
317 317 last = {}
318 318 ret = []
319 319 for node in heads:
320 320 c = self.changectx(node)
321 321 rev = c.rev()
322 322 try:
323 323 fnode = c.filenode('.hgtags')
324 324 except revlog.LookupError:
325 325 continue
326 326 ret.append((rev, node, fnode))
327 327 if fnode in last:
328 328 ret[last[fnode]] = None
329 329 last[fnode] = len(ret) - 1
330 330 return [item for item in ret if item]
331 331
332 332 def tagslist(self):
333 333 '''return a list of tags ordered by revision'''
334 334 l = []
335 335 for t, n in self.tags().items():
336 336 try:
337 337 r = self.changelog.rev(n)
338 338 except:
339 339 r = -2 # sort to the beginning of the list if unknown
340 340 l.append((r, t, n))
341 341 l.sort()
342 342 return [(t, n) for r, t, n in l]
343 343
344 344 def nodetags(self, node):
345 345 '''return the tags associated with a node'''
346 346 if not self.nodetagscache:
347 347 self.nodetagscache = {}
348 348 for t, n in self.tags().items():
349 349 self.nodetagscache.setdefault(n, []).append(t)
350 350 return self.nodetagscache.get(node, [])
351 351
352 352 def _branchtags(self):
353 353 partial, last, lrev = self._readbranchcache()
354 354
355 355 tiprev = self.changelog.count() - 1
356 356 if lrev != tiprev:
357 357 self._updatebranchcache(partial, lrev+1, tiprev+1)
358 358 self._writebranchcache(partial, self.changelog.tip(), tiprev)
359 359
360 360 return partial
361 361
362 362 def branchtags(self):
363 363 if self.branchcache is not None:
364 364 return self.branchcache
365 365
366 366 self.branchcache = {} # avoid recursion in changectx
367 367 partial = self._branchtags()
368 368
369 369 # the branch cache is stored on disk as UTF-8, but in the local
370 370 # charset internally
371 371 for k, v in partial.items():
372 372 self.branchcache[util.tolocal(k)] = v
373 373 return self.branchcache
374 374
375 375 def _readbranchcache(self):
376 376 partial = {}
377 377 try:
378 378 f = self.opener("branches.cache")
379 379 lines = f.read().split('\n')
380 380 f.close()
381 381 last, lrev = lines.pop(0).rstrip().split(" ", 1)
382 382 last, lrev = bin(last), int(lrev)
383 383 if not (lrev < self.changelog.count() and
384 384 self.changelog.node(lrev) == last): # sanity check
385 385 # invalidate the cache
386 386 raise ValueError('Invalid branch cache: unknown tip')
387 387 for l in lines:
388 388 if not l: continue
389 389 node, label = l.rstrip().split(" ", 1)
390 390 partial[label] = bin(node)
391 391 except (KeyboardInterrupt, util.SignalInterrupt):
392 392 raise
393 393 except Exception, inst:
394 394 if self.ui.debugflag:
395 395 self.ui.warn(str(inst), '\n')
396 396 partial, last, lrev = {}, nullid, nullrev
397 397 return partial, last, lrev
398 398
399 399 def _writebranchcache(self, branches, tip, tiprev):
400 400 try:
401 401 f = self.opener("branches.cache", "w")
402 402 f.write("%s %s\n" % (hex(tip), tiprev))
403 403 for label, node in branches.iteritems():
404 404 f.write("%s %s\n" % (hex(node), label))
405 405 except IOError:
406 406 pass
407 407
408 408 def _updatebranchcache(self, partial, start, end):
409 409 for r in xrange(start, end):
410 410 c = self.changectx(r)
411 411 b = c.branch()
412 412 if b:
413 413 partial[b] = c.node()
414 414
415 415 def lookup(self, key):
416 416 if key == '.':
417 417 key = self.dirstate.parents()[0]
418 418 if key == nullid:
419 419 raise repo.RepoError(_("no revision checked out"))
420 420 elif key == 'null':
421 421 return nullid
422 422 n = self.changelog._match(key)
423 423 if n:
424 424 return n
425 425 if key in self.tags():
426 426 return self.tags()[key]
427 427 if key in self.branchtags():
428 428 return self.branchtags()[key]
429 429 n = self.changelog._partialmatch(key)
430 430 if n:
431 431 return n
432 432 raise repo.RepoError(_("unknown revision '%s'") % key)
433 433
434 434 def dev(self):
435 435 return os.lstat(self.path).st_dev
436 436
437 437 def local(self):
438 438 return True
439 439
440 440 def join(self, f):
441 441 return os.path.join(self.path, f)
442 442
443 443 def sjoin(self, f):
444 444 f = self.encodefn(f)
445 445 return os.path.join(self.spath, f)
446 446
447 447 def wjoin(self, f):
448 448 return os.path.join(self.root, f)
449 449
450 450 def file(self, f):
451 451 if f[0] == '/':
452 452 f = f[1:]
453 453 return filelog.filelog(self.sopener, f, self.revlogversion)
454 454
455 455 def changectx(self, changeid=None):
456 456 return context.changectx(self, changeid)
457 457
458 458 def workingctx(self):
459 459 return context.workingctx(self)
460 460
461 461 def parents(self, changeid=None):
462 462 '''
463 463 get list of changectxs for parents of changeid or working directory
464 464 '''
465 465 if changeid is None:
466 466 pl = self.dirstate.parents()
467 467 else:
468 468 n = self.changelog.lookup(changeid)
469 469 pl = self.changelog.parents(n)
470 470 if pl[1] == nullid:
471 471 return [self.changectx(pl[0])]
472 472 return [self.changectx(pl[0]), self.changectx(pl[1])]
473 473
474 474 def filectx(self, path, changeid=None, fileid=None):
475 475 """changeid can be a changeset revision, node, or tag.
476 476 fileid can be a file revision or node."""
477 477 return context.filectx(self, path, changeid, fileid)
478 478
479 479 def getcwd(self):
480 480 return self.dirstate.getcwd()
481 481
482 482 def wfile(self, f, mode='r'):
483 483 return self.wopener(f, mode)
484 484
485 485 def _filter(self, filter, filename, data):
486 486 if filter not in self.filterpats:
487 487 l = []
488 488 for pat, cmd in self.ui.configitems(filter):
489 489 mf = util.matcher(self.root, "", [pat], [], [])[1]
490 490 l.append((mf, cmd))
491 491 self.filterpats[filter] = l
492 492
493 493 for mf, cmd in self.filterpats[filter]:
494 494 if mf(filename):
495 495 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
496 496 data = util.filter(data, cmd)
497 497 break
498 498
499 499 return data
500 500
501 501 def wread(self, filename):
502 502 if self._link(filename):
503 503 data = os.readlink(self.wjoin(filename))
504 504 else:
505 505 data = self.wopener(filename, 'r').read()
506 506 return self._filter("encode", filename, data)
507 507
508 508 def wwrite(self, filename, data, flags):
509 509 data = self._filter("decode", filename, data)
510 510 if "l" in flags:
511 511 try:
512 512 os.unlink(self.wjoin(filename))
513 513 except OSError:
514 514 pass
515 515 os.symlink(data, self.wjoin(filename))
516 516 else:
517 517 try:
518 518 if self._link(filename):
519 519 os.unlink(self.wjoin(filename))
520 520 except OSError:
521 521 pass
522 522 self.wopener(filename, 'w').write(data)
523 523 util.set_exec(self.wjoin(filename), "x" in flags)
524 524
525 525 def wwritedata(self, filename, data):
526 526 return self._filter("decode", filename, data)
527 527
528 528 def transaction(self):
529 529 tr = self.transhandle
530 530 if tr != None and tr.running():
531 531 return tr.nest()
532 532
533 533 # save dirstate for rollback
534 534 try:
535 535 ds = self.opener("dirstate").read()
536 536 except IOError:
537 537 ds = ""
538 538 self.opener("journal.dirstate", "w").write(ds)
539 539
540 540 renames = [(self.sjoin("journal"), self.sjoin("undo")),
541 541 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
542 542 tr = transaction.transaction(self.ui.warn, self.sopener,
543 543 self.sjoin("journal"),
544 544 aftertrans(renames))
545 545 self.transhandle = tr
546 546 return tr
547 547
548 548 def recover(self):
549 549 l = self.lock()
550 550 if os.path.exists(self.sjoin("journal")):
551 551 self.ui.status(_("rolling back interrupted transaction\n"))
552 552 transaction.rollback(self.sopener, self.sjoin("journal"))
553 553 self.reload()
554 554 return True
555 555 else:
556 556 self.ui.warn(_("no interrupted transaction available\n"))
557 557 return False
558 558
559 559 def rollback(self, wlock=None):
560 560 if not wlock:
561 561 wlock = self.wlock()
562 562 l = self.lock()
563 563 if os.path.exists(self.sjoin("undo")):
564 564 self.ui.status(_("rolling back last transaction\n"))
565 565 transaction.rollback(self.sopener, self.sjoin("undo"))
566 566 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
567 567 self.reload()
568 568 self.wreload()
569 569 else:
570 570 self.ui.warn(_("no rollback information available\n"))
571 571
572 572 def wreload(self):
573 573 self.dirstate.read()
574 574
575 575 def reload(self):
576 576 self.changelog.load()
577 577 self.manifest.load()
578 578 self.tagscache = None
579 579 self.nodetagscache = None
580 580
581 581 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
582 582 desc=None):
583 583 try:
584 584 l = lock.lock(lockname, 0, releasefn, desc=desc)
585 585 except lock.LockHeld, inst:
586 586 if not wait:
587 587 raise
588 588 self.ui.warn(_("waiting for lock on %s held by %r\n") %
589 589 (desc, inst.locker))
590 590 # default to 600 seconds timeout
591 591 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
592 592 releasefn, desc=desc)
593 593 if acquirefn:
594 594 acquirefn()
595 595 return l
596 596
597 597 def lock(self, wait=1):
598 598 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
599 599 desc=_('repository %s') % self.origroot)
600 600
601 601 def wlock(self, wait=1):
602 602 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
603 603 self.wreload,
604 604 desc=_('working directory of %s') % self.origroot)
605 605
606 606 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
607 607 """
608 608 commit an individual file as part of a larger transaction
609 609 """
610 610
611 611 t = self.wread(fn)
612 612 fl = self.file(fn)
613 613 fp1 = manifest1.get(fn, nullid)
614 614 fp2 = manifest2.get(fn, nullid)
615 615
616 616 meta = {}
617 617 cp = self.dirstate.copied(fn)
618 618 if cp:
619 # Mark the new revision of this file as a copy of another
620 # file. This copy data will effectively act as a parent
621 # of this new revision. If this is a merge, the first
622 # parent will be the nullid (meaning "look up the copy data")
623 # and the second one will be the other parent. For example:
624 #
625 # 0 --- 1 --- 3 rev1 changes file foo
626 # \ / rev2 renames foo to bar and changes it
627 # \- 2 -/ rev3 should have bar with all changes and
628 # should record that bar descends from
629 # bar in rev2 and foo in rev1
630 #
631 # this allows this merge to succeed:
632 #
633 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
634 # \ / merging rev3 and rev4 should use bar@rev2
635 # \- 2 --- 4 as the merge base
636 #
619 637 meta["copy"] = cp
620 638 if not manifest2: # not a branch merge
621 639 meta["copyrev"] = hex(manifest1.get(cp, nullid))
622 640 fp2 = nullid
623 641 elif fp2 != nullid: # copied on remote side
624 642 meta["copyrev"] = hex(manifest1.get(cp, nullid))
625 643 elif fp1 != nullid: # copied on local side, reversed
626 644 meta["copyrev"] = hex(manifest2.get(cp))
627 fp2 = nullid
645 fp2 = fp1
628 646 else: # directory rename
629 647 meta["copyrev"] = hex(manifest1.get(cp, nullid))
630 648 self.ui.debug(_(" %s: copy %s:%s\n") %
631 649 (fn, cp, meta["copyrev"]))
632 650 fp1 = nullid
633 651 elif fp2 != nullid:
634 652 # is one parent an ancestor of the other?
635 653 fpa = fl.ancestor(fp1, fp2)
636 654 if fpa == fp1:
637 655 fp1, fp2 = fp2, nullid
638 656 elif fpa == fp2:
639 657 fp2 = nullid
640 658
641 659 # is the file unmodified from the parent? report existing entry
642 660 if fp2 == nullid and not fl.cmp(fp1, t):
643 661 return fp1
644 662
645 663 changelist.append(fn)
646 664 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
647 665
648 666 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
649 667 if p1 is None:
650 668 p1, p2 = self.dirstate.parents()
651 669 return self.commit(files=files, text=text, user=user, date=date,
652 670 p1=p1, p2=p2, wlock=wlock, extra=extra)
653 671
654 672 def commit(self, files=None, text="", user=None, date=None,
655 673 match=util.always, force=False, lock=None, wlock=None,
656 674 force_editor=False, p1=None, p2=None, extra={}):
657 675
658 676 commit = []
659 677 remove = []
660 678 changed = []
661 679 use_dirstate = (p1 is None) # not rawcommit
662 680 extra = extra.copy()
663 681
664 682 if use_dirstate:
665 683 if files:
666 684 for f in files:
667 685 s = self.dirstate.state(f)
668 686 if s in 'nmai':
669 687 commit.append(f)
670 688 elif s == 'r':
671 689 remove.append(f)
672 690 else:
673 691 self.ui.warn(_("%s not tracked!\n") % f)
674 692 else:
675 693 changes = self.status(match=match)[:5]
676 694 modified, added, removed, deleted, unknown = changes
677 695 commit = modified + added
678 696 remove = removed
679 697 else:
680 698 commit = files
681 699
682 700 if use_dirstate:
683 701 p1, p2 = self.dirstate.parents()
684 702 update_dirstate = True
685 703 else:
686 704 p1, p2 = p1, p2 or nullid
687 705 update_dirstate = (self.dirstate.parents()[0] == p1)
688 706
689 707 c1 = self.changelog.read(p1)
690 708 c2 = self.changelog.read(p2)
691 709 m1 = self.manifest.read(c1[0]).copy()
692 710 m2 = self.manifest.read(c2[0])
693 711
694 712 if use_dirstate:
695 713 branchname = self.workingctx().branch()
696 714 try:
697 715 branchname = branchname.decode('UTF-8').encode('UTF-8')
698 716 except UnicodeDecodeError:
699 717 raise util.Abort(_('branch name not in UTF-8!'))
700 718 else:
701 719 branchname = ""
702 720
703 721 if use_dirstate:
704 722 oldname = c1[5].get("branch", "") # stored in UTF-8
705 723 if not commit and not remove and not force and p2 == nullid and \
706 724 branchname == oldname:
707 725 self.ui.status(_("nothing changed\n"))
708 726 return None
709 727
710 728 xp1 = hex(p1)
711 729 if p2 == nullid: xp2 = ''
712 730 else: xp2 = hex(p2)
713 731
714 732 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
715 733
716 734 if not wlock:
717 735 wlock = self.wlock()
718 736 if not lock:
719 737 lock = self.lock()
720 738 tr = self.transaction()
721 739
722 740 # check in files
723 741 new = {}
724 742 linkrev = self.changelog.count()
725 743 commit.sort()
726 744 is_exec = util.execfunc(self.root, m1.execf)
727 745 is_link = util.linkfunc(self.root, m1.linkf)
728 746 for f in commit:
729 747 self.ui.note(f + "\n")
730 748 try:
731 749 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
732 750 m1.set(f, is_exec(f), is_link(f))
733 751 except OSError:
734 752 if use_dirstate:
735 753 self.ui.warn(_("trouble committing %s!\n") % f)
736 754 raise
737 755 else:
738 756 remove.append(f)
739 757
740 758 # update manifest
741 759 m1.update(new)
742 760 remove.sort()
743 761 removed = []
744 762
745 763 for f in remove:
746 764 if f in m1:
747 765 del m1[f]
748 766 removed.append(f)
749 767 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
750 768
751 769 # add changeset
752 770 new = new.keys()
753 771 new.sort()
754 772
755 773 user = user or self.ui.username()
756 774 if not text or force_editor:
757 775 edittext = []
758 776 if text:
759 777 edittext.append(text)
760 778 edittext.append("")
761 779 edittext.append("HG: user: %s" % user)
762 780 if p2 != nullid:
763 781 edittext.append("HG: branch merge")
764 782 if branchname:
765 783 edittext.append("HG: branch %s" % util.tolocal(branchname))
766 784 edittext.extend(["HG: changed %s" % f for f in changed])
767 785 edittext.extend(["HG: removed %s" % f for f in removed])
768 786 if not changed and not remove:
769 787 edittext.append("HG: no files changed")
770 788 edittext.append("")
771 789 # run editor in the repository root
772 790 olddir = os.getcwd()
773 791 os.chdir(self.root)
774 792 text = self.ui.edit("\n".join(edittext), user)
775 793 os.chdir(olddir)
776 794
777 795 lines = [line.rstrip() for line in text.rstrip().splitlines()]
778 796 while lines and not lines[0]:
779 797 del lines[0]
780 798 if not lines:
781 799 return None
782 800 text = '\n'.join(lines)
783 801 if branchname:
784 802 extra["branch"] = branchname
785 803 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
786 804 user, date, extra)
787 805 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
788 806 parent2=xp2)
789 807 tr.close()
790 808
791 809 if self.branchcache and "branch" in extra:
792 810 self.branchcache[util.tolocal(extra["branch"])] = n
793 811
794 812 if use_dirstate or update_dirstate:
795 813 self.dirstate.setparents(n)
796 814 if use_dirstate:
797 815 self.dirstate.update(new, "n")
798 816 self.dirstate.forget(removed)
799 817
800 818 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
801 819 return n
802 820
803 821 def walk(self, node=None, files=[], match=util.always, badmatch=None):
804 822 '''
805 823 walk recursively through the directory tree or a given
806 824 changeset, finding all files matched by the match
807 825 function
808 826
809 827 results are yielded in a tuple (src, filename), where src
810 828 is one of:
811 829 'f' the file was found in the directory tree
812 830 'm' the file was only in the dirstate and not in the tree
813 831 'b' file was not found and matched badmatch
814 832 '''
815 833
816 834 if node:
817 835 fdict = dict.fromkeys(files)
818 836 for fn in self.manifest.read(self.changelog.read(node)[0]):
819 837 for ffn in fdict:
820 838 # match if the file is the exact name or a directory
821 839 if ffn == fn or fn.startswith("%s/" % ffn):
822 840 del fdict[ffn]
823 841 break
824 842 if match(fn):
825 843 yield 'm', fn
826 844 for fn in fdict:
827 845 if badmatch and badmatch(fn):
828 846 if match(fn):
829 847 yield 'b', fn
830 848 else:
831 849 self.ui.warn(_('%s: No such file in rev %s\n') % (
832 850 util.pathto(self.getcwd(), fn), short(node)))
833 851 else:
834 852 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
835 853 yield src, fn
836 854
837 855 def status(self, node1=None, node2=None, files=[], match=util.always,
838 856 wlock=None, list_ignored=False, list_clean=False):
839 857 """return status of files between two nodes or node and working directory
840 858
841 859 If node1 is None, use the first dirstate parent instead.
842 860 If node2 is None, compare node1 with working directory.
843 861 """
844 862
845 863 def fcmp(fn, mf):
846 864 t1 = self.wread(fn)
847 865 return self.file(fn).cmp(mf.get(fn, nullid), t1)
848 866
849 867 def mfmatches(node):
850 868 change = self.changelog.read(node)
851 869 mf = self.manifest.read(change[0]).copy()
852 870 for fn in mf.keys():
853 871 if not match(fn):
854 872 del mf[fn]
855 873 return mf
856 874
857 875 modified, added, removed, deleted, unknown = [], [], [], [], []
858 876 ignored, clean = [], []
859 877
860 878 compareworking = False
861 879 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
862 880 compareworking = True
863 881
864 882 if not compareworking:
865 883 # read the manifest from node1 before the manifest from node2,
866 884 # so that we'll hit the manifest cache if we're going through
867 885 # all the revisions in parent->child order.
868 886 mf1 = mfmatches(node1)
869 887
870 888 # are we comparing the working directory?
871 889 if not node2:
872 890 if not wlock:
873 891 try:
874 892 wlock = self.wlock(wait=0)
875 893 except lock.LockException:
876 894 wlock = None
877 895 (lookup, modified, added, removed, deleted, unknown,
878 896 ignored, clean) = self.dirstate.status(files, match,
879 897 list_ignored, list_clean)
880 898
881 899 # are we comparing working dir against its parent?
882 900 if compareworking:
883 901 if lookup:
884 902 # do a full compare of any files that might have changed
885 903 mf2 = mfmatches(self.dirstate.parents()[0])
886 904 for f in lookup:
887 905 if fcmp(f, mf2):
888 906 modified.append(f)
889 907 else:
890 908 clean.append(f)
891 909 if wlock is not None:
892 910 self.dirstate.update([f], "n")
893 911 else:
894 912 # we are comparing working dir against non-parent
895 913 # generate a pseudo-manifest for the working dir
896 914 # XXX: create it in dirstate.py ?
897 915 mf2 = mfmatches(self.dirstate.parents()[0])
898 916 is_exec = util.execfunc(self.root, mf2.execf)
899 917 is_link = util.linkfunc(self.root, mf2.linkf)
900 918 for f in lookup + modified + added:
901 919 mf2[f] = ""
902 920 mf2.set(f, is_exec(f), is_link(f))
903 921 for f in removed:
904 922 if f in mf2:
905 923 del mf2[f]
906 924 else:
907 925 # we are comparing two revisions
908 926 mf2 = mfmatches(node2)
909 927
910 928 if not compareworking:
911 929 # flush lists from dirstate before comparing manifests
912 930 modified, added, clean = [], [], []
913 931
914 932 # make sure to sort the files so we talk to the disk in a
915 933 # reasonable order
916 934 mf2keys = mf2.keys()
917 935 mf2keys.sort()
918 936 for fn in mf2keys:
919 937 if mf1.has_key(fn):
920 938 if mf1.flags(fn) != mf2.flags(fn) or \
921 939 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
922 940 modified.append(fn)
923 941 elif list_clean:
924 942 clean.append(fn)
925 943 del mf1[fn]
926 944 else:
927 945 added.append(fn)
928 946
929 947 removed = mf1.keys()
930 948
931 949 # sort and return results:
932 950 for l in modified, added, removed, deleted, unknown, ignored, clean:
933 951 l.sort()
934 952 return (modified, added, removed, deleted, unknown, ignored, clean)
935 953
936 954 def add(self, list, wlock=None):
937 955 if not wlock:
938 956 wlock = self.wlock()
939 957 for f in list:
940 958 p = self.wjoin(f)
941 959 islink = os.path.islink(p)
942 960 if not islink and not os.path.exists(p):
943 961 self.ui.warn(_("%s does not exist!\n") % f)
944 962 elif not islink and not os.path.isfile(p):
945 963 self.ui.warn(_("%s not added: only files and symlinks "
946 964 "supported currently\n") % f)
947 965 elif self.dirstate.state(f) in 'an':
948 966 self.ui.warn(_("%s already tracked!\n") % f)
949 967 else:
950 968 self.dirstate.update([f], "a")
951 969
952 970 def forget(self, list, wlock=None):
953 971 if not wlock:
954 972 wlock = self.wlock()
955 973 for f in list:
956 974 if self.dirstate.state(f) not in 'ai':
957 975 self.ui.warn(_("%s not added!\n") % f)
958 976 else:
959 977 self.dirstate.forget([f])
960 978
961 979 def remove(self, list, unlink=False, wlock=None):
962 980 if unlink:
963 981 for f in list:
964 982 try:
965 983 util.unlink(self.wjoin(f))
966 984 except OSError, inst:
967 985 if inst.errno != errno.ENOENT:
968 986 raise
969 987 if not wlock:
970 988 wlock = self.wlock()
971 989 for f in list:
972 990 p = self.wjoin(f)
973 991 if os.path.exists(p):
974 992 self.ui.warn(_("%s still exists!\n") % f)
975 993 elif self.dirstate.state(f) == 'a':
976 994 self.dirstate.forget([f])
977 995 elif f not in self.dirstate:
978 996 self.ui.warn(_("%s not tracked!\n") % f)
979 997 else:
980 998 self.dirstate.update([f], "r")
981 999
982 1000 def undelete(self, list, wlock=None):
983 1001 p = self.dirstate.parents()[0]
984 1002 mn = self.changelog.read(p)[0]
985 1003 m = self.manifest.read(mn)
986 1004 if not wlock:
987 1005 wlock = self.wlock()
988 1006 for f in list:
989 1007 if self.dirstate.state(f) not in "r":
990 1008 self.ui.warn("%s not removed!\n" % f)
991 1009 else:
992 1010 t = self.file(f).read(m[f])
993 1011 self.wwrite(f, t, m.flags(f))
994 1012 self.dirstate.update([f], "n")
995 1013
996 1014 def copy(self, source, dest, wlock=None):
997 1015 p = self.wjoin(dest)
998 1016 if not os.path.exists(p):
999 1017 self.ui.warn(_("%s does not exist!\n") % dest)
1000 1018 elif not os.path.isfile(p):
1001 1019 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
1002 1020 else:
1003 1021 if not wlock:
1004 1022 wlock = self.wlock()
1005 1023 if self.dirstate.state(dest) == '?':
1006 1024 self.dirstate.update([dest], "a")
1007 1025 self.dirstate.copy(source, dest)
1008 1026
1009 1027 def heads(self, start=None):
1010 1028 heads = self.changelog.heads(start)
1011 1029 # sort the output in rev descending order
1012 1030 heads = [(-self.changelog.rev(h), h) for h in heads]
1013 1031 heads.sort()
1014 1032 return [n for (r, n) in heads]
1015 1033
1016 1034 def branches(self, nodes):
1017 1035 if not nodes:
1018 1036 nodes = [self.changelog.tip()]
1019 1037 b = []
1020 1038 for n in nodes:
1021 1039 t = n
1022 1040 while 1:
1023 1041 p = self.changelog.parents(n)
1024 1042 if p[1] != nullid or p[0] == nullid:
1025 1043 b.append((t, n, p[0], p[1]))
1026 1044 break
1027 1045 n = p[0]
1028 1046 return b
1029 1047
1030 1048 def between(self, pairs):
1031 1049 r = []
1032 1050
1033 1051 for top, bottom in pairs:
1034 1052 n, l, i = top, [], 0
1035 1053 f = 1
1036 1054
1037 1055 while n != bottom:
1038 1056 p = self.changelog.parents(n)[0]
1039 1057 if i == f:
1040 1058 l.append(n)
1041 1059 f = f * 2
1042 1060 n = p
1043 1061 i += 1
1044 1062
1045 1063 r.append(l)
1046 1064
1047 1065 return r
1048 1066
1049 1067 def findincoming(self, remote, base=None, heads=None, force=False):
1050 1068 """Return list of roots of the subsets of missing nodes from remote
1051 1069
1052 1070 If base dict is specified, assume that these nodes and their parents
1053 1071 exist on the remote side and that no child of a node of base exists
1054 1072 in both remote and self.
1055 1073 Furthermore base will be updated to include the nodes that exists
1056 1074 in self and remote but no children exists in self and remote.
1057 1075 If a list of heads is specified, return only nodes which are heads
1058 1076 or ancestors of these heads.
1059 1077
1060 1078 All the ancestors of base are in self and in remote.
1061 1079 All the descendants of the list returned are missing in self.
1062 1080 (and so we know that the rest of the nodes are missing in remote, see
1063 1081 outgoing)
1064 1082 """
1065 1083 m = self.changelog.nodemap
1066 1084 search = []
1067 1085 fetch = {}
1068 1086 seen = {}
1069 1087 seenbranch = {}
1070 1088 if base == None:
1071 1089 base = {}
1072 1090
1073 1091 if not heads:
1074 1092 heads = remote.heads()
1075 1093
1076 1094 if self.changelog.tip() == nullid:
1077 1095 base[nullid] = 1
1078 1096 if heads != [nullid]:
1079 1097 return [nullid]
1080 1098 return []
1081 1099
1082 1100 # assume we're closer to the tip than the root
1083 1101 # and start by examining the heads
1084 1102 self.ui.status(_("searching for changes\n"))
1085 1103
1086 1104 unknown = []
1087 1105 for h in heads:
1088 1106 if h not in m:
1089 1107 unknown.append(h)
1090 1108 else:
1091 1109 base[h] = 1
1092 1110
1093 1111 if not unknown:
1094 1112 return []
1095 1113
1096 1114 req = dict.fromkeys(unknown)
1097 1115 reqcnt = 0
1098 1116
1099 1117 # search through remote branches
1100 1118 # a 'branch' here is a linear segment of history, with four parts:
1101 1119 # head, root, first parent, second parent
1102 1120 # (a branch always has two parents (or none) by definition)
1103 1121 unknown = remote.branches(unknown)
1104 1122 while unknown:
1105 1123 r = []
1106 1124 while unknown:
1107 1125 n = unknown.pop(0)
1108 1126 if n[0] in seen:
1109 1127 continue
1110 1128
1111 1129 self.ui.debug(_("examining %s:%s\n")
1112 1130 % (short(n[0]), short(n[1])))
1113 1131 if n[0] == nullid: # found the end of the branch
1114 1132 pass
1115 1133 elif n in seenbranch:
1116 1134 self.ui.debug(_("branch already found\n"))
1117 1135 continue
1118 1136 elif n[1] and n[1] in m: # do we know the base?
1119 1137 self.ui.debug(_("found incomplete branch %s:%s\n")
1120 1138 % (short(n[0]), short(n[1])))
1121 1139 search.append(n) # schedule branch range for scanning
1122 1140 seenbranch[n] = 1
1123 1141 else:
1124 1142 if n[1] not in seen and n[1] not in fetch:
1125 1143 if n[2] in m and n[3] in m:
1126 1144 self.ui.debug(_("found new changeset %s\n") %
1127 1145 short(n[1]))
1128 1146 fetch[n[1]] = 1 # earliest unknown
1129 1147 for p in n[2:4]:
1130 1148 if p in m:
1131 1149 base[p] = 1 # latest known
1132 1150
1133 1151 for p in n[2:4]:
1134 1152 if p not in req and p not in m:
1135 1153 r.append(p)
1136 1154 req[p] = 1
1137 1155 seen[n[0]] = 1
1138 1156
1139 1157 if r:
1140 1158 reqcnt += 1
1141 1159 self.ui.debug(_("request %d: %s\n") %
1142 1160 (reqcnt, " ".join(map(short, r))))
1143 1161 for p in xrange(0, len(r), 10):
1144 1162 for b in remote.branches(r[p:p+10]):
1145 1163 self.ui.debug(_("received %s:%s\n") %
1146 1164 (short(b[0]), short(b[1])))
1147 1165 unknown.append(b)
1148 1166
1149 1167 # do binary search on the branches we found
1150 1168 while search:
1151 1169 n = search.pop(0)
1152 1170 reqcnt += 1
1153 1171 l = remote.between([(n[0], n[1])])[0]
1154 1172 l.append(n[1])
1155 1173 p = n[0]
1156 1174 f = 1
1157 1175 for i in l:
1158 1176 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1159 1177 if i in m:
1160 1178 if f <= 2:
1161 1179 self.ui.debug(_("found new branch changeset %s\n") %
1162 1180 short(p))
1163 1181 fetch[p] = 1
1164 1182 base[i] = 1
1165 1183 else:
1166 1184 self.ui.debug(_("narrowed branch search to %s:%s\n")
1167 1185 % (short(p), short(i)))
1168 1186 search.append((p, i))
1169 1187 break
1170 1188 p, f = i, f * 2
1171 1189
1172 1190 # sanity check our fetch list
1173 1191 for f in fetch.keys():
1174 1192 if f in m:
1175 1193 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1176 1194
1177 1195 if base.keys() == [nullid]:
1178 1196 if force:
1179 1197 self.ui.warn(_("warning: repository is unrelated\n"))
1180 1198 else:
1181 1199 raise util.Abort(_("repository is unrelated"))
1182 1200
1183 1201 self.ui.debug(_("found new changesets starting at ") +
1184 1202 " ".join([short(f) for f in fetch]) + "\n")
1185 1203
1186 1204 self.ui.debug(_("%d total queries\n") % reqcnt)
1187 1205
1188 1206 return fetch.keys()
1189 1207
1190 1208 def findoutgoing(self, remote, base=None, heads=None, force=False):
1191 1209 """Return list of nodes that are roots of subsets not in remote
1192 1210
1193 1211 If base dict is specified, assume that these nodes and their parents
1194 1212 exist on the remote side.
1195 1213 If a list of heads is specified, return only nodes which are heads
1196 1214 or ancestors of these heads, and return a second element which
1197 1215 contains all remote heads which get new children.
1198 1216 """
1199 1217 if base == None:
1200 1218 base = {}
1201 1219 self.findincoming(remote, base, heads, force=force)
1202 1220
1203 1221 self.ui.debug(_("common changesets up to ")
1204 1222 + " ".join(map(short, base.keys())) + "\n")
1205 1223
1206 1224 remain = dict.fromkeys(self.changelog.nodemap)
1207 1225
1208 1226 # prune everything remote has from the tree
1209 1227 del remain[nullid]
1210 1228 remove = base.keys()
1211 1229 while remove:
1212 1230 n = remove.pop(0)
1213 1231 if n in remain:
1214 1232 del remain[n]
1215 1233 for p in self.changelog.parents(n):
1216 1234 remove.append(p)
1217 1235
1218 1236 # find every node whose parents have been pruned
1219 1237 subset = []
1220 1238 # find every remote head that will get new children
1221 1239 updated_heads = {}
1222 1240 for n in remain:
1223 1241 p1, p2 = self.changelog.parents(n)
1224 1242 if p1 not in remain and p2 not in remain:
1225 1243 subset.append(n)
1226 1244 if heads:
1227 1245 if p1 in heads:
1228 1246 updated_heads[p1] = True
1229 1247 if p2 in heads:
1230 1248 updated_heads[p2] = True
1231 1249
1232 1250 # this is the set of all roots we have to push
1233 1251 if heads:
1234 1252 return subset, updated_heads.keys()
1235 1253 else:
1236 1254 return subset
1237 1255
1238 1256 def pull(self, remote, heads=None, force=False, lock=None):
1239 1257 mylock = False
1240 1258 if not lock:
1241 1259 lock = self.lock()
1242 1260 mylock = True
1243 1261
1244 1262 try:
1245 1263 fetch = self.findincoming(remote, force=force)
1246 1264 if fetch == [nullid]:
1247 1265 self.ui.status(_("requesting all changes\n"))
1248 1266
1249 1267 if not fetch:
1250 1268 self.ui.status(_("no changes found\n"))
1251 1269 return 0
1252 1270
1253 1271 if heads is None:
1254 1272 cg = remote.changegroup(fetch, 'pull')
1255 1273 else:
1256 1274 if 'changegroupsubset' not in remote.capabilities:
1257 1275 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1258 1276 cg = remote.changegroupsubset(fetch, heads, 'pull')
1259 1277 return self.addchangegroup(cg, 'pull', remote.url())
1260 1278 finally:
1261 1279 if mylock:
1262 1280 lock.release()
1263 1281
1264 1282 def push(self, remote, force=False, revs=None):
1265 1283 # there are two ways to push to remote repo:
1266 1284 #
1267 1285 # addchangegroup assumes local user can lock remote
1268 1286 # repo (local filesystem, old ssh servers).
1269 1287 #
1270 1288 # unbundle assumes local user cannot lock remote repo (new ssh
1271 1289 # servers, http servers).
1272 1290
1273 1291 if remote.capable('unbundle'):
1274 1292 return self.push_unbundle(remote, force, revs)
1275 1293 return self.push_addchangegroup(remote, force, revs)
1276 1294
1277 1295 def prepush(self, remote, force, revs):
1278 1296 base = {}
1279 1297 remote_heads = remote.heads()
1280 1298 inc = self.findincoming(remote, base, remote_heads, force=force)
1281 1299
1282 1300 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1283 1301 if revs is not None:
1284 1302 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1285 1303 else:
1286 1304 bases, heads = update, self.changelog.heads()
1287 1305
1288 1306 if not bases:
1289 1307 self.ui.status(_("no changes found\n"))
1290 1308 return None, 1
1291 1309 elif not force:
1292 1310 # check if we're creating new remote heads
1293 1311 # to be a remote head after push, node must be either
1294 1312 # - unknown locally
1295 1313 # - a local outgoing head descended from update
1296 1314 # - a remote head that's known locally and not
1297 1315 # ancestral to an outgoing head
1298 1316
1299 1317 warn = 0
1300 1318
1301 1319 if remote_heads == [nullid]:
1302 1320 warn = 0
1303 1321 elif not revs and len(heads) > len(remote_heads):
1304 1322 warn = 1
1305 1323 else:
1306 1324 newheads = list(heads)
1307 1325 for r in remote_heads:
1308 1326 if r in self.changelog.nodemap:
1309 1327 desc = self.changelog.heads(r, heads)
1310 1328 l = [h for h in heads if h in desc]
1311 1329 if not l:
1312 1330 newheads.append(r)
1313 1331 else:
1314 1332 newheads.append(r)
1315 1333 if len(newheads) > len(remote_heads):
1316 1334 warn = 1
1317 1335
1318 1336 if warn:
1319 1337 self.ui.warn(_("abort: push creates new remote branches!\n"))
1320 1338 self.ui.status(_("(did you forget to merge?"
1321 1339 " use push -f to force)\n"))
1322 1340 return None, 1
1323 1341 elif inc:
1324 1342 self.ui.warn(_("note: unsynced remote changes!\n"))
1325 1343
1326 1344
1327 1345 if revs is None:
1328 1346 cg = self.changegroup(update, 'push')
1329 1347 else:
1330 1348 cg = self.changegroupsubset(update, revs, 'push')
1331 1349 return cg, remote_heads
1332 1350
1333 1351 def push_addchangegroup(self, remote, force, revs):
1334 1352 lock = remote.lock()
1335 1353
1336 1354 ret = self.prepush(remote, force, revs)
1337 1355 if ret[0] is not None:
1338 1356 cg, remote_heads = ret
1339 1357 return remote.addchangegroup(cg, 'push', self.url())
1340 1358 return ret[1]
1341 1359
1342 1360 def push_unbundle(self, remote, force, revs):
1343 1361 # local repo finds heads on server, finds out what revs it
1344 1362 # must push. once revs transferred, if server finds it has
1345 1363 # different heads (someone else won commit/push race), server
1346 1364 # aborts.
1347 1365
1348 1366 ret = self.prepush(remote, force, revs)
1349 1367 if ret[0] is not None:
1350 1368 cg, remote_heads = ret
1351 1369 if force: remote_heads = ['force']
1352 1370 return remote.unbundle(cg, remote_heads, 'push')
1353 1371 return ret[1]
1354 1372
1355 1373 def changegroupinfo(self, nodes):
1356 1374 self.ui.note(_("%d changesets found\n") % len(nodes))
1357 1375 if self.ui.debugflag:
1358 1376 self.ui.debug(_("List of changesets:\n"))
1359 1377 for node in nodes:
1360 1378 self.ui.debug("%s\n" % hex(node))
1361 1379
1362 1380 def changegroupsubset(self, bases, heads, source):
1363 1381 """This function generates a changegroup consisting of all the nodes
1364 1382 that are descendents of any of the bases, and ancestors of any of
1365 1383 the heads.
1366 1384
1367 1385 It is fairly complex as determining which filenodes and which
1368 1386 manifest nodes need to be included for the changeset to be complete
1369 1387 is non-trivial.
1370 1388
1371 1389 Another wrinkle is doing the reverse, figuring out which changeset in
1372 1390 the changegroup a particular filenode or manifestnode belongs to."""
1373 1391
1374 1392 self.hook('preoutgoing', throw=True, source=source)
1375 1393
1376 1394 # Set up some initial variables
1377 1395 # Make it easy to refer to self.changelog
1378 1396 cl = self.changelog
1379 1397 # msng is short for missing - compute the list of changesets in this
1380 1398 # changegroup.
1381 1399 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1382 1400 self.changegroupinfo(msng_cl_lst)
1383 1401 # Some bases may turn out to be superfluous, and some heads may be
1384 1402 # too. nodesbetween will return the minimal set of bases and heads
1385 1403 # necessary to re-create the changegroup.
1386 1404
1387 1405 # Known heads are the list of heads that it is assumed the recipient
1388 1406 # of this changegroup will know about.
1389 1407 knownheads = {}
1390 1408 # We assume that all parents of bases are known heads.
1391 1409 for n in bases:
1392 1410 for p in cl.parents(n):
1393 1411 if p != nullid:
1394 1412 knownheads[p] = 1
1395 1413 knownheads = knownheads.keys()
1396 1414 if knownheads:
1397 1415 # Now that we know what heads are known, we can compute which
1398 1416 # changesets are known. The recipient must know about all
1399 1417 # changesets required to reach the known heads from the null
1400 1418 # changeset.
1401 1419 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1402 1420 junk = None
1403 1421 # Transform the list into an ersatz set.
1404 1422 has_cl_set = dict.fromkeys(has_cl_set)
1405 1423 else:
1406 1424 # If there were no known heads, the recipient cannot be assumed to
1407 1425 # know about any changesets.
1408 1426 has_cl_set = {}
1409 1427
1410 1428 # Make it easy to refer to self.manifest
1411 1429 mnfst = self.manifest
1412 1430 # We don't know which manifests are missing yet
1413 1431 msng_mnfst_set = {}
1414 1432 # Nor do we know which filenodes are missing.
1415 1433 msng_filenode_set = {}
1416 1434
1417 1435 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1418 1436 junk = None
1419 1437
1420 1438 # A changeset always belongs to itself, so the changenode lookup
1421 1439 # function for a changenode is identity.
1422 1440 def identity(x):
1423 1441 return x
1424 1442
1425 1443 # A function generating function. Sets up an environment for the
1426 1444 # inner function.
1427 1445 def cmp_by_rev_func(revlog):
1428 1446 # Compare two nodes by their revision number in the environment's
1429 1447 # revision history. Since the revision number both represents the
1430 1448 # most efficient order to read the nodes in, and represents a
1431 1449 # topological sorting of the nodes, this function is often useful.
1432 1450 def cmp_by_rev(a, b):
1433 1451 return cmp(revlog.rev(a), revlog.rev(b))
1434 1452 return cmp_by_rev
1435 1453
1436 1454 # If we determine that a particular file or manifest node must be a
1437 1455 # node that the recipient of the changegroup will already have, we can
1438 1456 # also assume the recipient will have all the parents. This function
1439 1457 # prunes them from the set of missing nodes.
1440 1458 def prune_parents(revlog, hasset, msngset):
1441 1459 haslst = hasset.keys()
1442 1460 haslst.sort(cmp_by_rev_func(revlog))
1443 1461 for node in haslst:
1444 1462 parentlst = [p for p in revlog.parents(node) if p != nullid]
1445 1463 while parentlst:
1446 1464 n = parentlst.pop()
1447 1465 if n not in hasset:
1448 1466 hasset[n] = 1
1449 1467 p = [p for p in revlog.parents(n) if p != nullid]
1450 1468 parentlst.extend(p)
1451 1469 for n in hasset:
1452 1470 msngset.pop(n, None)
1453 1471
1454 1472 # This is a function generating function used to set up an environment
1455 1473 # for the inner function to execute in.
1456 1474 def manifest_and_file_collector(changedfileset):
1457 1475 # This is an information gathering function that gathers
1458 1476 # information from each changeset node that goes out as part of
1459 1477 # the changegroup. The information gathered is a list of which
1460 1478 # manifest nodes are potentially required (the recipient may
1461 1479 # already have them) and total list of all files which were
1462 1480 # changed in any changeset in the changegroup.
1463 1481 #
1464 1482 # We also remember the first changenode we saw any manifest
1465 1483 # referenced by so we can later determine which changenode 'owns'
1466 1484 # the manifest.
1467 1485 def collect_manifests_and_files(clnode):
1468 1486 c = cl.read(clnode)
1469 1487 for f in c[3]:
1470 1488 # This is to make sure we only have one instance of each
1471 1489 # filename string for each filename.
1472 1490 changedfileset.setdefault(f, f)
1473 1491 msng_mnfst_set.setdefault(c[0], clnode)
1474 1492 return collect_manifests_and_files
1475 1493
1476 1494 # Figure out which manifest nodes (of the ones we think might be part
1477 1495 # of the changegroup) the recipient must know about and remove them
1478 1496 # from the changegroup.
1479 1497 def prune_manifests():
1480 1498 has_mnfst_set = {}
1481 1499 for n in msng_mnfst_set:
1482 1500 # If a 'missing' manifest thinks it belongs to a changenode
1483 1501 # the recipient is assumed to have, obviously the recipient
1484 1502 # must have that manifest.
1485 1503 linknode = cl.node(mnfst.linkrev(n))
1486 1504 if linknode in has_cl_set:
1487 1505 has_mnfst_set[n] = 1
1488 1506 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1489 1507
1490 1508 # Use the information collected in collect_manifests_and_files to say
1491 1509 # which changenode any manifestnode belongs to.
1492 1510 def lookup_manifest_link(mnfstnode):
1493 1511 return msng_mnfst_set[mnfstnode]
1494 1512
1495 1513 # A function generating function that sets up the initial environment
1496 1514 # the inner function.
1497 1515 def filenode_collector(changedfiles):
1498 1516 next_rev = [0]
1499 1517 # This gathers information from each manifestnode included in the
1500 1518 # changegroup about which filenodes the manifest node references
1501 1519 # so we can include those in the changegroup too.
1502 1520 #
1503 1521 # It also remembers which changenode each filenode belongs to. It
1504 1522 # does this by assuming the a filenode belongs to the changenode
1505 1523 # the first manifest that references it belongs to.
1506 1524 def collect_msng_filenodes(mnfstnode):
1507 1525 r = mnfst.rev(mnfstnode)
1508 1526 if r == next_rev[0]:
1509 1527 # If the last rev we looked at was the one just previous,
1510 1528 # we only need to see a diff.
1511 1529 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1512 1530 # For each line in the delta
1513 1531 for dline in delta.splitlines():
1514 1532 # get the filename and filenode for that line
1515 1533 f, fnode = dline.split('\0')
1516 1534 fnode = bin(fnode[:40])
1517 1535 f = changedfiles.get(f, None)
1518 1536 # And if the file is in the list of files we care
1519 1537 # about.
1520 1538 if f is not None:
1521 1539 # Get the changenode this manifest belongs to
1522 1540 clnode = msng_mnfst_set[mnfstnode]
1523 1541 # Create the set of filenodes for the file if
1524 1542 # there isn't one already.
1525 1543 ndset = msng_filenode_set.setdefault(f, {})
1526 1544 # And set the filenode's changelog node to the
1527 1545 # manifest's if it hasn't been set already.
1528 1546 ndset.setdefault(fnode, clnode)
1529 1547 else:
1530 1548 # Otherwise we need a full manifest.
1531 1549 m = mnfst.read(mnfstnode)
1532 1550 # For every file in we care about.
1533 1551 for f in changedfiles:
1534 1552 fnode = m.get(f, None)
1535 1553 # If it's in the manifest
1536 1554 if fnode is not None:
1537 1555 # See comments above.
1538 1556 clnode = msng_mnfst_set[mnfstnode]
1539 1557 ndset = msng_filenode_set.setdefault(f, {})
1540 1558 ndset.setdefault(fnode, clnode)
1541 1559 # Remember the revision we hope to see next.
1542 1560 next_rev[0] = r + 1
1543 1561 return collect_msng_filenodes
1544 1562
1545 1563 # We have a list of filenodes we think we need for a file, lets remove
1546 1564 # all those we now the recipient must have.
1547 1565 def prune_filenodes(f, filerevlog):
1548 1566 msngset = msng_filenode_set[f]
1549 1567 hasset = {}
1550 1568 # If a 'missing' filenode thinks it belongs to a changenode we
1551 1569 # assume the recipient must have, then the recipient must have
1552 1570 # that filenode.
1553 1571 for n in msngset:
1554 1572 clnode = cl.node(filerevlog.linkrev(n))
1555 1573 if clnode in has_cl_set:
1556 1574 hasset[n] = 1
1557 1575 prune_parents(filerevlog, hasset, msngset)
1558 1576
1559 1577 # A function generator function that sets up the a context for the
1560 1578 # inner function.
1561 1579 def lookup_filenode_link_func(fname):
1562 1580 msngset = msng_filenode_set[fname]
1563 1581 # Lookup the changenode the filenode belongs to.
1564 1582 def lookup_filenode_link(fnode):
1565 1583 return msngset[fnode]
1566 1584 return lookup_filenode_link
1567 1585
1568 1586 # Now that we have all theses utility functions to help out and
1569 1587 # logically divide up the task, generate the group.
1570 1588 def gengroup():
1571 1589 # The set of changed files starts empty.
1572 1590 changedfiles = {}
1573 1591 # Create a changenode group generator that will call our functions
1574 1592 # back to lookup the owning changenode and collect information.
1575 1593 group = cl.group(msng_cl_lst, identity,
1576 1594 manifest_and_file_collector(changedfiles))
1577 1595 for chnk in group:
1578 1596 yield chnk
1579 1597
1580 1598 # The list of manifests has been collected by the generator
1581 1599 # calling our functions back.
1582 1600 prune_manifests()
1583 1601 msng_mnfst_lst = msng_mnfst_set.keys()
1584 1602 # Sort the manifestnodes by revision number.
1585 1603 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1586 1604 # Create a generator for the manifestnodes that calls our lookup
1587 1605 # and data collection functions back.
1588 1606 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1589 1607 filenode_collector(changedfiles))
1590 1608 for chnk in group:
1591 1609 yield chnk
1592 1610
1593 1611 # These are no longer needed, dereference and toss the memory for
1594 1612 # them.
1595 1613 msng_mnfst_lst = None
1596 1614 msng_mnfst_set.clear()
1597 1615
1598 1616 changedfiles = changedfiles.keys()
1599 1617 changedfiles.sort()
1600 1618 # Go through all our files in order sorted by name.
1601 1619 for fname in changedfiles:
1602 1620 filerevlog = self.file(fname)
1603 1621 # Toss out the filenodes that the recipient isn't really
1604 1622 # missing.
1605 1623 if msng_filenode_set.has_key(fname):
1606 1624 prune_filenodes(fname, filerevlog)
1607 1625 msng_filenode_lst = msng_filenode_set[fname].keys()
1608 1626 else:
1609 1627 msng_filenode_lst = []
1610 1628 # If any filenodes are left, generate the group for them,
1611 1629 # otherwise don't bother.
1612 1630 if len(msng_filenode_lst) > 0:
1613 1631 yield changegroup.genchunk(fname)
1614 1632 # Sort the filenodes by their revision #
1615 1633 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1616 1634 # Create a group generator and only pass in a changenode
1617 1635 # lookup function as we need to collect no information
1618 1636 # from filenodes.
1619 1637 group = filerevlog.group(msng_filenode_lst,
1620 1638 lookup_filenode_link_func(fname))
1621 1639 for chnk in group:
1622 1640 yield chnk
1623 1641 if msng_filenode_set.has_key(fname):
1624 1642 # Don't need this anymore, toss it to free memory.
1625 1643 del msng_filenode_set[fname]
1626 1644 # Signal that no more groups are left.
1627 1645 yield changegroup.closechunk()
1628 1646
1629 1647 if msng_cl_lst:
1630 1648 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1631 1649
1632 1650 return util.chunkbuffer(gengroup())
1633 1651
1634 1652 def changegroup(self, basenodes, source):
1635 1653 """Generate a changegroup of all nodes that we have that a recipient
1636 1654 doesn't.
1637 1655
1638 1656 This is much easier than the previous function as we can assume that
1639 1657 the recipient has any changenode we aren't sending them."""
1640 1658
1641 1659 self.hook('preoutgoing', throw=True, source=source)
1642 1660
1643 1661 cl = self.changelog
1644 1662 nodes = cl.nodesbetween(basenodes, None)[0]
1645 1663 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1646 1664 self.changegroupinfo(nodes)
1647 1665
1648 1666 def identity(x):
1649 1667 return x
1650 1668
1651 1669 def gennodelst(revlog):
1652 1670 for r in xrange(0, revlog.count()):
1653 1671 n = revlog.node(r)
1654 1672 if revlog.linkrev(n) in revset:
1655 1673 yield n
1656 1674
1657 1675 def changed_file_collector(changedfileset):
1658 1676 def collect_changed_files(clnode):
1659 1677 c = cl.read(clnode)
1660 1678 for fname in c[3]:
1661 1679 changedfileset[fname] = 1
1662 1680 return collect_changed_files
1663 1681
1664 1682 def lookuprevlink_func(revlog):
1665 1683 def lookuprevlink(n):
1666 1684 return cl.node(revlog.linkrev(n))
1667 1685 return lookuprevlink
1668 1686
1669 1687 def gengroup():
1670 1688 # construct a list of all changed files
1671 1689 changedfiles = {}
1672 1690
1673 1691 for chnk in cl.group(nodes, identity,
1674 1692 changed_file_collector(changedfiles)):
1675 1693 yield chnk
1676 1694 changedfiles = changedfiles.keys()
1677 1695 changedfiles.sort()
1678 1696
1679 1697 mnfst = self.manifest
1680 1698 nodeiter = gennodelst(mnfst)
1681 1699 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1682 1700 yield chnk
1683 1701
1684 1702 for fname in changedfiles:
1685 1703 filerevlog = self.file(fname)
1686 1704 nodeiter = gennodelst(filerevlog)
1687 1705 nodeiter = list(nodeiter)
1688 1706 if nodeiter:
1689 1707 yield changegroup.genchunk(fname)
1690 1708 lookup = lookuprevlink_func(filerevlog)
1691 1709 for chnk in filerevlog.group(nodeiter, lookup):
1692 1710 yield chnk
1693 1711
1694 1712 yield changegroup.closechunk()
1695 1713
1696 1714 if nodes:
1697 1715 self.hook('outgoing', node=hex(nodes[0]), source=source)
1698 1716
1699 1717 return util.chunkbuffer(gengroup())
1700 1718
1701 1719 def addchangegroup(self, source, srctype, url):
1702 1720 """add changegroup to repo.
1703 1721
1704 1722 return values:
1705 1723 - nothing changed or no source: 0
1706 1724 - more heads than before: 1+added heads (2..n)
1707 1725 - less heads than before: -1-removed heads (-2..-n)
1708 1726 - number of heads stays the same: 1
1709 1727 """
1710 1728 def csmap(x):
1711 1729 self.ui.debug(_("add changeset %s\n") % short(x))
1712 1730 return cl.count()
1713 1731
1714 1732 def revmap(x):
1715 1733 return cl.rev(x)
1716 1734
1717 1735 if not source:
1718 1736 return 0
1719 1737
1720 1738 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1721 1739
1722 1740 changesets = files = revisions = 0
1723 1741
1724 1742 tr = self.transaction()
1725 1743
1726 1744 # write changelog data to temp files so concurrent readers will not see
1727 1745 # inconsistent view
1728 1746 cl = None
1729 1747 try:
1730 1748 cl = appendfile.appendchangelog(self.sopener,
1731 1749 self.changelog.version)
1732 1750
1733 1751 oldheads = len(cl.heads())
1734 1752
1735 1753 # pull off the changeset group
1736 1754 self.ui.status(_("adding changesets\n"))
1737 1755 cor = cl.count() - 1
1738 1756 chunkiter = changegroup.chunkiter(source)
1739 1757 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1740 1758 raise util.Abort(_("received changelog group is empty"))
1741 1759 cnr = cl.count() - 1
1742 1760 changesets = cnr - cor
1743 1761
1744 1762 # pull off the manifest group
1745 1763 self.ui.status(_("adding manifests\n"))
1746 1764 chunkiter = changegroup.chunkiter(source)
1747 1765 # no need to check for empty manifest group here:
1748 1766 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1749 1767 # no new manifest will be created and the manifest group will
1750 1768 # be empty during the pull
1751 1769 self.manifest.addgroup(chunkiter, revmap, tr)
1752 1770
1753 1771 # process the files
1754 1772 self.ui.status(_("adding file changes\n"))
1755 1773 while 1:
1756 1774 f = changegroup.getchunk(source)
1757 1775 if not f:
1758 1776 break
1759 1777 self.ui.debug(_("adding %s revisions\n") % f)
1760 1778 fl = self.file(f)
1761 1779 o = fl.count()
1762 1780 chunkiter = changegroup.chunkiter(source)
1763 1781 if fl.addgroup(chunkiter, revmap, tr) is None:
1764 1782 raise util.Abort(_("received file revlog group is empty"))
1765 1783 revisions += fl.count() - o
1766 1784 files += 1
1767 1785
1768 1786 cl.writedata()
1769 1787 finally:
1770 1788 if cl:
1771 1789 cl.cleanup()
1772 1790
1773 1791 # make changelog see real files again
1774 1792 self.changelog = changelog.changelog(self.sopener,
1775 1793 self.changelog.version)
1776 1794 self.changelog.checkinlinesize(tr)
1777 1795
1778 1796 newheads = len(self.changelog.heads())
1779 1797 heads = ""
1780 1798 if oldheads and newheads != oldheads:
1781 1799 heads = _(" (%+d heads)") % (newheads - oldheads)
1782 1800
1783 1801 self.ui.status(_("added %d changesets"
1784 1802 " with %d changes to %d files%s\n")
1785 1803 % (changesets, revisions, files, heads))
1786 1804
1787 1805 if changesets > 0:
1788 1806 self.hook('pretxnchangegroup', throw=True,
1789 1807 node=hex(self.changelog.node(cor+1)), source=srctype,
1790 1808 url=url)
1791 1809
1792 1810 tr.close()
1793 1811
1794 1812 if changesets > 0:
1795 1813 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1796 1814 source=srctype, url=url)
1797 1815
1798 1816 for i in xrange(cor + 1, cnr + 1):
1799 1817 self.hook("incoming", node=hex(self.changelog.node(i)),
1800 1818 source=srctype, url=url)
1801 1819
1802 1820 # never return 0 here:
1803 1821 if newheads < oldheads:
1804 1822 return newheads - oldheads - 1
1805 1823 else:
1806 1824 return newheads - oldheads + 1
1807 1825
1808 1826
1809 1827 def stream_in(self, remote):
1810 1828 fp = remote.stream_out()
1811 1829 l = fp.readline()
1812 1830 try:
1813 1831 resp = int(l)
1814 1832 except ValueError:
1815 1833 raise util.UnexpectedOutput(
1816 1834 _('Unexpected response from remote server:'), l)
1817 1835 if resp == 1:
1818 1836 raise util.Abort(_('operation forbidden by server'))
1819 1837 elif resp == 2:
1820 1838 raise util.Abort(_('locking the remote repository failed'))
1821 1839 elif resp != 0:
1822 1840 raise util.Abort(_('the server sent an unknown error code'))
1823 1841 self.ui.status(_('streaming all changes\n'))
1824 1842 l = fp.readline()
1825 1843 try:
1826 1844 total_files, total_bytes = map(int, l.split(' ', 1))
1827 1845 except ValueError, TypeError:
1828 1846 raise util.UnexpectedOutput(
1829 1847 _('Unexpected response from remote server:'), l)
1830 1848 self.ui.status(_('%d files to transfer, %s of data\n') %
1831 1849 (total_files, util.bytecount(total_bytes)))
1832 1850 start = time.time()
1833 1851 for i in xrange(total_files):
1834 1852 # XXX doesn't support '\n' or '\r' in filenames
1835 1853 l = fp.readline()
1836 1854 try:
1837 1855 name, size = l.split('\0', 1)
1838 1856 size = int(size)
1839 1857 except ValueError, TypeError:
1840 1858 raise util.UnexpectedOutput(
1841 1859 _('Unexpected response from remote server:'), l)
1842 1860 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1843 1861 ofp = self.sopener(name, 'w')
1844 1862 for chunk in util.filechunkiter(fp, limit=size):
1845 1863 ofp.write(chunk)
1846 1864 ofp.close()
1847 1865 elapsed = time.time() - start
1848 1866 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1849 1867 (util.bytecount(total_bytes), elapsed,
1850 1868 util.bytecount(total_bytes / elapsed)))
1851 1869 self.reload()
1852 1870 return len(self.heads()) + 1
1853 1871
1854 1872 def clone(self, remote, heads=[], stream=False):
1855 1873 '''clone remote repository.
1856 1874
1857 1875 keyword arguments:
1858 1876 heads: list of revs to clone (forces use of pull)
1859 1877 stream: use streaming clone if possible'''
1860 1878
1861 1879 # now, all clients that can request uncompressed clones can
1862 1880 # read repo formats supported by all servers that can serve
1863 1881 # them.
1864 1882
1865 1883 # if revlog format changes, client will have to check version
1866 1884 # and format flags on "stream" capability, and use
1867 1885 # uncompressed only if compatible.
1868 1886
1869 1887 if stream and not heads and remote.capable('stream'):
1870 1888 return self.stream_in(remote)
1871 1889 return self.pull(remote, heads)
1872 1890
1873 1891 # used to avoid circular references so destructors work
1874 1892 def aftertrans(files):
1875 1893 renamefiles = [tuple(t) for t in files]
1876 1894 def a():
1877 1895 for src, dest in renamefiles:
1878 1896 util.rename(src, dest)
1879 1897 return a
1880 1898
1881 1899 def instance(ui, path, create):
1882 1900 return localrepository(ui, util.drop_scheme('file', path), create)
1883 1901
1884 1902 def islocal(path):
1885 1903 return True
@@ -1,1386 +1,1404 b''
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7 7
8 8 This software may be used and distributed according to the terms
9 9 of the GNU General Public License, incorporated herein by reference.
10 10
11 11 This contains helper routines that are independent of the SCM core and hide
12 12 platform-specific details from the core.
13 13 """
14 14
15 15 from i18n import _
16 16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
17 import os, threading, time, calendar, ConfigParser, locale
17 import os, threading, time, calendar, ConfigParser, locale, glob
18 18
19 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
20 or "ascii"
19 try:
20 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
21 or "ascii"
22 except locale.Error:
23 _encoding = 'ascii'
21 24 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
22 25 _fallbackencoding = 'ISO-8859-1'
23 26
24 27 def tolocal(s):
25 28 """
26 29 Convert a string from internal UTF-8 to local encoding
27 30
28 31 All internal strings should be UTF-8 but some repos before the
29 32 implementation of locale support may contain latin1 or possibly
30 33 other character sets. We attempt to decode everything strictly
31 34 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
32 35 replace unknown characters.
33 36 """
34 37 for e in ('UTF-8', _fallbackencoding):
35 38 try:
36 39 u = s.decode(e) # attempt strict decoding
37 40 return u.encode(_encoding, "replace")
38 41 except LookupError, k:
39 42 raise Abort(_("%s, please check your locale settings") % k)
40 43 except UnicodeDecodeError:
41 44 pass
42 45 u = s.decode("utf-8", "replace") # last ditch
43 46 return u.encode(_encoding, "replace")
44 47
45 48 def fromlocal(s):
46 49 """
47 50 Convert a string from the local character encoding to UTF-8
48 51
49 52 We attempt to decode strings using the encoding mode set by
50 53 HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
51 54 characters will cause an error message. Other modes include
52 55 'replace', which replaces unknown characters with a special
53 56 Unicode character, and 'ignore', which drops the character.
54 57 """
55 58 try:
56 59 return s.decode(_encoding, _encodingmode).encode("utf-8")
57 60 except UnicodeDecodeError, inst:
58 61 sub = s[max(0, inst.start-10):inst.start+10]
59 62 raise Abort("decoding near '%s': %s!" % (sub, inst))
60 63 except LookupError, k:
61 64 raise Abort(_("%s, please check your locale settings") % k)
62 65
63 66 def locallen(s):
64 67 """Find the length in characters of a local string"""
65 68 return len(s.decode(_encoding, "replace"))
66 69
67 70 def localsub(s, a, b=None):
68 71 try:
69 72 u = s.decode(_encoding, _encodingmode)
70 73 if b is not None:
71 74 u = u[a:b]
72 75 else:
73 76 u = u[:a]
74 77 return u.encode(_encoding, _encodingmode)
75 78 except UnicodeDecodeError, inst:
76 79 sub = s[max(0, inst.start-10), inst.start+10]
77 80 raise Abort(_("decoding near '%s': %s!\n") % (sub, inst))
78 81
79 82 # used by parsedate
80 83 defaultdateformats = (
81 84 '%Y-%m-%d %H:%M:%S',
82 85 '%Y-%m-%d %I:%M:%S%p',
83 86 '%Y-%m-%d %H:%M',
84 87 '%Y-%m-%d %I:%M%p',
85 88 '%Y-%m-%d',
86 89 '%m-%d',
87 90 '%m/%d',
88 91 '%m/%d/%y',
89 92 '%m/%d/%Y',
90 93 '%a %b %d %H:%M:%S %Y',
91 94 '%a %b %d %I:%M:%S%p %Y',
92 95 '%b %d %H:%M:%S %Y',
93 96 '%b %d %I:%M:%S%p %Y',
94 97 '%b %d %H:%M:%S',
95 98 '%b %d %I:%M:%S%p',
96 99 '%b %d %H:%M',
97 100 '%b %d %I:%M%p',
98 101 '%b %d %Y',
99 102 '%b %d',
100 103 '%H:%M:%S',
101 104 '%I:%M:%SP',
102 105 '%H:%M',
103 106 '%I:%M%p',
104 107 )
105 108
106 109 extendeddateformats = defaultdateformats + (
107 110 "%Y",
108 111 "%Y-%m",
109 112 "%b",
110 113 "%b %Y",
111 114 )
112 115
113 116 class SignalInterrupt(Exception):
114 117 """Exception raised on SIGTERM and SIGHUP."""
115 118
116 119 # like SafeConfigParser but with case-sensitive keys
117 120 class configparser(ConfigParser.SafeConfigParser):
118 121 def optionxform(self, optionstr):
119 122 return optionstr
120 123
121 124 def cachefunc(func):
122 125 '''cache the result of function calls'''
123 126 # XXX doesn't handle keywords args
124 127 cache = {}
125 128 if func.func_code.co_argcount == 1:
126 129 # we gain a small amount of time because
127 130 # we don't need to pack/unpack the list
128 131 def f(arg):
129 132 if arg not in cache:
130 133 cache[arg] = func(arg)
131 134 return cache[arg]
132 135 else:
133 136 def f(*args):
134 137 if args not in cache:
135 138 cache[args] = func(*args)
136 139 return cache[args]
137 140
138 141 return f
139 142
140 143 def pipefilter(s, cmd):
141 144 '''filter string S through command CMD, returning its output'''
142 145 (pout, pin) = popen2.popen2(cmd, -1, 'b')
143 146 def writer():
144 147 try:
145 148 pin.write(s)
146 149 pin.close()
147 150 except IOError, inst:
148 151 if inst.errno != errno.EPIPE:
149 152 raise
150 153
151 154 # we should use select instead on UNIX, but this will work on most
152 155 # systems, including Windows
153 156 w = threading.Thread(target=writer)
154 157 w.start()
155 158 f = pout.read()
156 159 pout.close()
157 160 w.join()
158 161 return f
159 162
160 163 def tempfilter(s, cmd):
161 164 '''filter string S through a pair of temporary files with CMD.
162 165 CMD is used as a template to create the real command to be run,
163 166 with the strings INFILE and OUTFILE replaced by the real names of
164 167 the temporary files generated.'''
165 168 inname, outname = None, None
166 169 try:
167 170 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
168 171 fp = os.fdopen(infd, 'wb')
169 172 fp.write(s)
170 173 fp.close()
171 174 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
172 175 os.close(outfd)
173 176 cmd = cmd.replace('INFILE', inname)
174 177 cmd = cmd.replace('OUTFILE', outname)
175 178 code = os.system(cmd)
176 179 if code: raise Abort(_("command '%s' failed: %s") %
177 180 (cmd, explain_exit(code)))
178 181 return open(outname, 'rb').read()
179 182 finally:
180 183 try:
181 184 if inname: os.unlink(inname)
182 185 except: pass
183 186 try:
184 187 if outname: os.unlink(outname)
185 188 except: pass
186 189
187 190 filtertable = {
188 191 'tempfile:': tempfilter,
189 192 'pipe:': pipefilter,
190 193 }
191 194
192 195 def filter(s, cmd):
193 196 "filter a string through a command that transforms its input to its output"
194 197 for name, fn in filtertable.iteritems():
195 198 if cmd.startswith(name):
196 199 return fn(s, cmd[len(name):].lstrip())
197 200 return pipefilter(s, cmd)
198 201
199 202 def find_in_path(name, path, default=None):
200 203 '''find name in search path. path can be string (will be split
201 204 with os.pathsep), or iterable thing that returns strings. if name
202 205 found, return path to name. else return default.'''
203 206 if isinstance(path, str):
204 207 path = path.split(os.pathsep)
205 208 for p in path:
206 209 p_name = os.path.join(p, name)
207 210 if os.path.exists(p_name):
208 211 return p_name
209 212 return default
210 213
211 214 def binary(s):
212 215 """return true if a string is binary data using diff's heuristic"""
213 216 if s and '\0' in s[:4096]:
214 217 return True
215 218 return False
216 219
217 220 def unique(g):
218 221 """return the uniq elements of iterable g"""
219 222 seen = {}
220 223 l = []
221 224 for f in g:
222 225 if f not in seen:
223 226 seen[f] = 1
224 227 l.append(f)
225 228 return l
226 229
227 230 class Abort(Exception):
228 231 """Raised if a command needs to print an error and exit."""
229 232
230 233 class UnexpectedOutput(Abort):
231 234 """Raised to print an error with part of output and exit."""
232 235
233 236 def always(fn): return True
234 237 def never(fn): return False
235 238
239 def expand_glob(pats):
240 '''On Windows, expand the implicit globs in a list of patterns'''
241 if os.name != 'nt':
242 return list(pats)
243 ret = []
244 for p in pats:
245 kind, name = patkind(p, None)
246 if kind is None:
247 globbed = glob.glob(name)
248 if globbed:
249 ret.extend(globbed)
250 continue
251 # if we couldn't expand the glob, just keep it around
252 ret.append(p)
253 return ret
254
236 255 def patkind(name, dflt_pat='glob'):
237 256 """Split a string into an optional pattern kind prefix and the
238 257 actual pattern."""
239 258 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
240 259 if name.startswith(prefix + ':'): return name.split(':', 1)
241 260 return dflt_pat, name
242 261
243 262 def globre(pat, head='^', tail='$'):
244 263 "convert a glob pattern into a regexp"
245 264 i, n = 0, len(pat)
246 265 res = ''
247 266 group = False
248 267 def peek(): return i < n and pat[i]
249 268 while i < n:
250 269 c = pat[i]
251 270 i = i+1
252 271 if c == '*':
253 272 if peek() == '*':
254 273 i += 1
255 274 res += '.*'
256 275 else:
257 276 res += '[^/]*'
258 277 elif c == '?':
259 278 res += '.'
260 279 elif c == '[':
261 280 j = i
262 281 if j < n and pat[j] in '!]':
263 282 j += 1
264 283 while j < n and pat[j] != ']':
265 284 j += 1
266 285 if j >= n:
267 286 res += '\\['
268 287 else:
269 288 stuff = pat[i:j].replace('\\','\\\\')
270 289 i = j + 1
271 290 if stuff[0] == '!':
272 291 stuff = '^' + stuff[1:]
273 292 elif stuff[0] == '^':
274 293 stuff = '\\' + stuff
275 294 res = '%s[%s]' % (res, stuff)
276 295 elif c == '{':
277 296 group = True
278 297 res += '(?:'
279 298 elif c == '}' and group:
280 299 res += ')'
281 300 group = False
282 301 elif c == ',' and group:
283 302 res += '|'
284 303 elif c == '\\':
285 304 p = peek()
286 305 if p:
287 306 i += 1
288 307 res += re.escape(p)
289 308 else:
290 309 res += re.escape(c)
291 310 else:
292 311 res += re.escape(c)
293 312 return head + res + tail
294 313
295 314 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
296 315
297 316 def pathto(n1, n2):
298 317 '''return the relative path from one place to another.
299 318 n1 should use os.sep to separate directories
300 319 n2 should use "/" to separate directories
301 320 returns an os.sep-separated path.
302 321 '''
303 322 if not n1: return localpath(n2)
304 323 a, b = n1.split(os.sep), n2.split('/')
305 324 a.reverse()
306 325 b.reverse()
307 326 while a and b and a[-1] == b[-1]:
308 327 a.pop()
309 328 b.pop()
310 329 b.reverse()
311 330 return os.sep.join((['..'] * len(a)) + b)
312 331
313 332 def canonpath(root, cwd, myname):
314 333 """return the canonical path of myname, given cwd and root"""
315 334 if root == os.sep:
316 335 rootsep = os.sep
317 336 elif root.endswith(os.sep):
318 337 rootsep = root
319 338 else:
320 339 rootsep = root + os.sep
321 340 name = myname
322 341 if not os.path.isabs(name):
323 342 name = os.path.join(root, cwd, name)
324 343 name = os.path.normpath(name)
325 344 if name != rootsep and name.startswith(rootsep):
326 345 name = name[len(rootsep):]
327 346 audit_path(name)
328 347 return pconvert(name)
329 348 elif name == root:
330 349 return ''
331 350 else:
332 351 # Determine whether `name' is in the hierarchy at or beneath `root',
333 352 # by iterating name=dirname(name) until that causes no change (can't
334 353 # check name == '/', because that doesn't work on windows). For each
335 354 # `name', compare dev/inode numbers. If they match, the list `rel'
336 355 # holds the reversed list of components making up the relative file
337 356 # name we want.
338 357 root_st = os.stat(root)
339 358 rel = []
340 359 while True:
341 360 try:
342 361 name_st = os.stat(name)
343 362 except OSError:
344 363 break
345 364 if samestat(name_st, root_st):
346 365 rel.reverse()
347 366 name = os.path.join(*rel)
348 367 audit_path(name)
349 368 return pconvert(name)
350 369 dirname, basename = os.path.split(name)
351 370 rel.append(basename)
352 371 if dirname == name:
353 372 break
354 373 name = dirname
355 374
356 375 raise Abort('%s not under root' % myname)
357 376
358 377 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
359 378 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
360 379
361 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
362 if os.name == 'nt':
363 dflt_pat = 'glob'
364 else:
365 dflt_pat = 'relpath'
366 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
380 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='',
381 src=None, globbed=False):
382 if not globbed:
383 names = expand_glob(names)
384 return _matcher(canonroot, cwd, names, inc, exc, head, 'relpath', src)
367 385
368 386 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
369 387 """build a function to match a set of file patterns
370 388
371 389 arguments:
372 390 canonroot - the canonical root of the tree you're matching against
373 391 cwd - the current working directory, if relevant
374 392 names - patterns to find
375 393 inc - patterns to include
376 394 exc - patterns to exclude
377 395 head - a regex to prepend to patterns to control whether a match is rooted
378 396
379 397 a pattern is one of:
380 398 'glob:<rooted glob>'
381 399 're:<rooted regexp>'
382 400 'path:<rooted path>'
383 401 'relglob:<relative glob>'
384 402 'relpath:<relative path>'
385 403 'relre:<relative regexp>'
386 404 '<rooted path or regexp>'
387 405
388 406 returns:
389 407 a 3-tuple containing
390 408 - list of explicit non-pattern names passed in
391 409 - a bool match(filename) function
392 410 - a bool indicating if any patterns were passed in
393 411
394 412 todo:
395 413 make head regex a rooted bool
396 414 """
397 415
398 416 def contains_glob(name):
399 417 for c in name:
400 418 if c in _globchars: return True
401 419 return False
402 420
403 421 def regex(kind, name, tail):
404 422 '''convert a pattern into a regular expression'''
405 423 if kind == 're':
406 424 return name
407 425 elif kind == 'path':
408 426 return '^' + re.escape(name) + '(?:/|$)'
409 427 elif kind == 'relglob':
410 428 return head + globre(name, '(?:|.*/)', tail)
411 429 elif kind == 'relpath':
412 430 return head + re.escape(name) + tail
413 431 elif kind == 'relre':
414 432 if name.startswith('^'):
415 433 return name
416 434 return '.*' + name
417 435 return head + globre(name, '', tail)
418 436
419 437 def matchfn(pats, tail):
420 438 """build a matching function from a set of patterns"""
421 439 if not pats:
422 440 return
423 441 matches = []
424 442 for k, p in pats:
425 443 try:
426 444 pat = '(?:%s)' % regex(k, p, tail)
427 445 matches.append(re.compile(pat).match)
428 446 except re.error:
429 447 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
430 448 else: raise Abort("invalid pattern (%s): %s" % (k, p))
431 449
432 450 def buildfn(text):
433 451 for m in matches:
434 452 r = m(text)
435 453 if r:
436 454 return r
437 455
438 456 return buildfn
439 457
440 458 def globprefix(pat):
441 459 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
442 460 root = []
443 461 for p in pat.split(os.sep):
444 462 if contains_glob(p): break
445 463 root.append(p)
446 464 return '/'.join(root)
447 465
448 466 pats = []
449 467 files = []
450 468 roots = []
451 469 for kind, name in [patkind(p, dflt_pat) for p in names]:
452 470 if kind in ('glob', 'relpath'):
453 471 name = canonpath(canonroot, cwd, name)
454 472 if name == '':
455 473 kind, name = 'glob', '**'
456 474 if kind in ('glob', 'path', 're'):
457 475 pats.append((kind, name))
458 476 if kind == 'glob':
459 477 root = globprefix(name)
460 478 if root: roots.append(root)
461 479 elif kind == 'relpath':
462 480 files.append((kind, name))
463 481 roots.append(name)
464 482
465 483 patmatch = matchfn(pats, '$') or always
466 484 filematch = matchfn(files, '(?:/|$)') or always
467 485 incmatch = always
468 486 if inc:
469 487 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
470 488 incmatch = matchfn(inckinds, '(?:/|$)')
471 489 excmatch = lambda fn: False
472 490 if exc:
473 491 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
474 492 excmatch = matchfn(exckinds, '(?:/|$)')
475 493
476 494 return (roots,
477 495 lambda fn: (incmatch(fn) and not excmatch(fn) and
478 496 (fn.endswith('/') or
479 497 (not pats and not files) or
480 498 (pats and patmatch(fn)) or
481 499 (files and filematch(fn)))),
482 500 (inc or exc or (pats and pats != [('glob', '**')])) and True)
483 501
484 502 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
485 503 '''enhanced shell command execution.
486 504 run with environment maybe modified, maybe in different dir.
487 505
488 506 if command fails and onerr is None, return status. if ui object,
489 507 print error message and return status, else raise onerr object as
490 508 exception.'''
491 509 def py2shell(val):
492 510 'convert python object into string that is useful to shell'
493 511 if val in (None, False):
494 512 return '0'
495 513 if val == True:
496 514 return '1'
497 515 return str(val)
498 516 oldenv = {}
499 517 for k in environ:
500 518 oldenv[k] = os.environ.get(k)
501 519 if cwd is not None:
502 520 oldcwd = os.getcwd()
503 521 origcmd = cmd
504 522 if os.name == 'nt':
505 523 cmd = '"%s"' % cmd
506 524 try:
507 525 for k, v in environ.iteritems():
508 526 os.environ[k] = py2shell(v)
509 527 if cwd is not None and oldcwd != cwd:
510 528 os.chdir(cwd)
511 529 rc = os.system(cmd)
512 530 if rc and onerr:
513 531 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
514 532 explain_exit(rc)[0])
515 533 if errprefix:
516 534 errmsg = '%s: %s' % (errprefix, errmsg)
517 535 try:
518 536 onerr.warn(errmsg + '\n')
519 537 except AttributeError:
520 538 raise onerr(errmsg)
521 539 return rc
522 540 finally:
523 541 for k, v in oldenv.iteritems():
524 542 if v is None:
525 543 del os.environ[k]
526 544 else:
527 545 os.environ[k] = v
528 546 if cwd is not None and oldcwd != cwd:
529 547 os.chdir(oldcwd)
530 548
531 549 def rename(src, dst):
532 550 """forcibly rename a file"""
533 551 try:
534 552 os.rename(src, dst)
535 553 except OSError, err:
536 554 # on windows, rename to existing file is not allowed, so we
537 555 # must delete destination first. but if file is open, unlink
538 556 # schedules it for delete but does not delete it. rename
539 557 # happens immediately even for open files, so we create
540 558 # temporary file, delete it, rename destination to that name,
541 559 # then delete that. then rename is safe to do.
542 560 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
543 561 os.close(fd)
544 562 os.unlink(temp)
545 563 os.rename(dst, temp)
546 564 os.unlink(temp)
547 565 os.rename(src, dst)
548 566
549 567 def unlink(f):
550 568 """unlink and remove the directory if it is empty"""
551 569 os.unlink(f)
552 570 # try removing directories that might now be empty
553 571 try:
554 572 os.removedirs(os.path.dirname(f))
555 573 except OSError:
556 574 pass
557 575
558 576 def copyfile(src, dest):
559 577 "copy a file, preserving mode"
560 578 try:
561 579 shutil.copyfile(src, dest)
562 580 shutil.copymode(src, dest)
563 581 except shutil.Error, inst:
564 582 raise util.Abort(str(inst))
565 583
566 584 def copyfiles(src, dst, hardlink=None):
567 585 """Copy a directory tree using hardlinks if possible"""
568 586
569 587 if hardlink is None:
570 588 hardlink = (os.stat(src).st_dev ==
571 589 os.stat(os.path.dirname(dst)).st_dev)
572 590
573 591 if os.path.isdir(src):
574 592 os.mkdir(dst)
575 593 for name in os.listdir(src):
576 594 srcname = os.path.join(src, name)
577 595 dstname = os.path.join(dst, name)
578 596 copyfiles(srcname, dstname, hardlink)
579 597 else:
580 598 if hardlink:
581 599 try:
582 600 os_link(src, dst)
583 601 except (IOError, OSError):
584 602 hardlink = False
585 603 shutil.copy(src, dst)
586 604 else:
587 605 shutil.copy(src, dst)
588 606
589 607 def audit_path(path):
590 608 """Abort if path contains dangerous components"""
591 609 parts = os.path.normcase(path).split(os.sep)
592 610 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
593 611 or os.pardir in parts):
594 612 raise Abort(_("path contains illegal component: %s\n") % path)
595 613
596 614 def _makelock_file(info, pathname):
597 615 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
598 616 os.write(ld, info)
599 617 os.close(ld)
600 618
601 619 def _readlock_file(pathname):
602 620 return posixfile(pathname).read()
603 621
604 622 def nlinks(pathname):
605 623 """Return number of hardlinks for the given file."""
606 624 return os.lstat(pathname).st_nlink
607 625
608 626 if hasattr(os, 'link'):
609 627 os_link = os.link
610 628 else:
611 629 def os_link(src, dst):
612 630 raise OSError(0, _("Hardlinks not supported"))
613 631
614 632 def fstat(fp):
615 633 '''stat file object that may not have fileno method.'''
616 634 try:
617 635 return os.fstat(fp.fileno())
618 636 except AttributeError:
619 637 return os.stat(fp.name)
620 638
621 639 posixfile = file
622 640
623 641 def is_win_9x():
624 642 '''return true if run on windows 95, 98 or me.'''
625 643 try:
626 644 return sys.getwindowsversion()[3] == 1
627 645 except AttributeError:
628 646 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
629 647
630 648 getuser_fallback = None
631 649
632 650 def getuser():
633 651 '''return name of current user'''
634 652 try:
635 653 return getpass.getuser()
636 654 except ImportError:
637 655 # import of pwd will fail on windows - try fallback
638 656 if getuser_fallback:
639 657 return getuser_fallback()
640 658 # raised if win32api not available
641 659 raise Abort(_('user name not available - set USERNAME '
642 660 'environment variable'))
643 661
644 662 def username(uid=None):
645 663 """Return the name of the user with the given uid.
646 664
647 665 If uid is None, return the name of the current user."""
648 666 try:
649 667 import pwd
650 668 if uid is None:
651 669 uid = os.getuid()
652 670 try:
653 671 return pwd.getpwuid(uid)[0]
654 672 except KeyError:
655 673 return str(uid)
656 674 except ImportError:
657 675 return None
658 676
659 677 def groupname(gid=None):
660 678 """Return the name of the group with the given gid.
661 679
662 680 If gid is None, return the name of the current group."""
663 681 try:
664 682 import grp
665 683 if gid is None:
666 684 gid = os.getgid()
667 685 try:
668 686 return grp.getgrgid(gid)[0]
669 687 except KeyError:
670 688 return str(gid)
671 689 except ImportError:
672 690 return None
673 691
674 692 # File system features
675 693
676 694 def checkfolding(path):
677 695 """
678 696 Check whether the given path is on a case-sensitive filesystem
679 697
680 698 Requires a path (like /foo/.hg) ending with a foldable final
681 699 directory component.
682 700 """
683 701 s1 = os.stat(path)
684 702 d, b = os.path.split(path)
685 703 p2 = os.path.join(d, b.upper())
686 704 if path == p2:
687 705 p2 = os.path.join(d, b.lower())
688 706 try:
689 707 s2 = os.stat(p2)
690 708 if s2 == s1:
691 709 return False
692 710 return True
693 711 except:
694 712 return True
695 713
696 714 def checkexec(path):
697 715 """
698 716 Check whether the given path is on a filesystem with UNIX-like exec flags
699 717
700 718 Requires a directory (like /foo/.hg)
701 719 """
702 720 fh, fn = tempfile.mkstemp("", "", path)
703 721 os.close(fh)
704 722 m = os.stat(fn).st_mode
705 723 os.chmod(fn, m ^ 0111)
706 724 r = (os.stat(fn).st_mode != m)
707 725 os.unlink(fn)
708 726 return r
709 727
710 728 def execfunc(path, fallback):
711 729 '''return an is_exec() function with default to fallback'''
712 730 if checkexec(path):
713 731 return lambda x: is_exec(os.path.join(path, x))
714 732 return fallback
715 733
716 734 def checklink(path):
717 735 """check whether the given path is on a symlink-capable filesystem"""
718 736 # mktemp is not racy because symlink creation will fail if the
719 737 # file already exists
720 738 name = tempfile.mktemp(dir=path)
721 739 try:
722 740 os.symlink(".", name)
723 741 os.unlink(name)
724 742 return True
725 743 except (OSError, AttributeError):
726 744 return False
727 745
728 746 def linkfunc(path, fallback):
729 747 '''return an is_link() function with default to fallback'''
730 748 if checklink(path):
731 749 return lambda x: is_link(os.path.join(path, x))
732 750 return fallback
733 751
734 752 # Platform specific variants
735 753 if os.name == 'nt':
736 754 import msvcrt
737 755 nulldev = 'NUL:'
738 756
739 757 class winstdout:
740 758 '''stdout on windows misbehaves if sent through a pipe'''
741 759
742 760 def __init__(self, fp):
743 761 self.fp = fp
744 762
745 763 def __getattr__(self, key):
746 764 return getattr(self.fp, key)
747 765
748 766 def close(self):
749 767 try:
750 768 self.fp.close()
751 769 except: pass
752 770
753 771 def write(self, s):
754 772 try:
755 773 return self.fp.write(s)
756 774 except IOError, inst:
757 775 if inst.errno != 0: raise
758 776 self.close()
759 777 raise IOError(errno.EPIPE, 'Broken pipe')
760 778
761 779 sys.stdout = winstdout(sys.stdout)
762 780
763 781 def system_rcpath():
764 782 try:
765 783 return system_rcpath_win32()
766 784 except:
767 785 return [r'c:\mercurial\mercurial.ini']
768 786
769 787 def os_rcpath():
770 788 '''return default os-specific hgrc search path'''
771 789 path = system_rcpath()
772 790 path.append(user_rcpath())
773 791 userprofile = os.environ.get('USERPROFILE')
774 792 if userprofile:
775 793 path.append(os.path.join(userprofile, 'mercurial.ini'))
776 794 return path
777 795
778 796 def user_rcpath():
779 797 '''return os-specific hgrc search path to the user dir'''
780 798 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
781 799
782 800 def parse_patch_output(output_line):
783 801 """parses the output produced by patch and returns the file name"""
784 802 pf = output_line[14:]
785 803 if pf[0] == '`':
786 804 pf = pf[1:-1] # Remove the quotes
787 805 return pf
788 806
789 807 def testpid(pid):
790 808 '''return False if pid dead, True if running or not known'''
791 809 return True
792 810
793 811 def set_exec(f, mode):
794 812 pass
795 813
796 814 def set_link(f, mode):
797 815 pass
798 816
799 817 def set_binary(fd):
800 818 msvcrt.setmode(fd.fileno(), os.O_BINARY)
801 819
802 820 def pconvert(path):
803 821 return path.replace("\\", "/")
804 822
805 823 def localpath(path):
806 824 return path.replace('/', '\\')
807 825
808 826 def normpath(path):
809 827 return pconvert(os.path.normpath(path))
810 828
811 829 makelock = _makelock_file
812 830 readlock = _readlock_file
813 831
814 832 def samestat(s1, s2):
815 833 return False
816 834
817 835 def shellquote(s):
818 836 return '"%s"' % s.replace('"', '\\"')
819 837
820 838 def explain_exit(code):
821 839 return _("exited with status %d") % code, code
822 840
823 841 # if you change this stub into a real check, please try to implement the
824 842 # username and groupname functions above, too.
825 843 def isowner(fp, st=None):
826 844 return True
827 845
828 846 try:
829 847 # override functions with win32 versions if possible
830 848 from util_win32 import *
831 849 if not is_win_9x():
832 850 posixfile = posixfile_nt
833 851 except ImportError:
834 852 pass
835 853
836 854 else:
837 855 nulldev = '/dev/null'
838 856 _umask = os.umask(0)
839 857 os.umask(_umask)
840 858
841 859 def rcfiles(path):
842 860 rcs = [os.path.join(path, 'hgrc')]
843 861 rcdir = os.path.join(path, 'hgrc.d')
844 862 try:
845 863 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
846 864 if f.endswith(".rc")])
847 865 except OSError:
848 866 pass
849 867 return rcs
850 868
851 869 def os_rcpath():
852 870 '''return default os-specific hgrc search path'''
853 871 path = []
854 872 # old mod_python does not set sys.argv
855 873 if len(getattr(sys, 'argv', [])) > 0:
856 874 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
857 875 '/../etc/mercurial'))
858 876 path.extend(rcfiles('/etc/mercurial'))
859 877 path.append(os.path.expanduser('~/.hgrc'))
860 878 path = [os.path.normpath(f) for f in path]
861 879 return path
862 880
863 881 def parse_patch_output(output_line):
864 882 """parses the output produced by patch and returns the file name"""
865 883 pf = output_line[14:]
866 884 if pf.startswith("'") and pf.endswith("'") and " " in pf:
867 885 pf = pf[1:-1] # Remove the quotes
868 886 return pf
869 887
870 888 def is_exec(f):
871 889 """check whether a file is executable"""
872 890 return (os.lstat(f).st_mode & 0100 != 0)
873 891
874 892 def set_exec(f, mode):
875 893 s = os.lstat(f).st_mode
876 894 if (s & 0100 != 0) == mode:
877 895 return
878 896 if mode:
879 897 # Turn on +x for every +r bit when making a file executable
880 898 # and obey umask.
881 899 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
882 900 else:
883 901 os.chmod(f, s & 0666)
884 902
885 903 def is_link(f):
886 904 """check whether a file is a symlink"""
887 905 return (os.lstat(f).st_mode & 0120000 == 0120000)
888 906
889 907 def set_link(f, mode):
890 908 """make a file a symbolic link/regular file
891 909
892 910 if a file is changed to a link, its contents become the link data
893 911 if a link is changed to a file, its link data become its contents
894 912 """
895 913
896 914 m = is_link(f)
897 915 if m == bool(mode):
898 916 return
899 917
900 918 if mode: # switch file to link
901 919 data = file(f).read()
902 920 os.unlink(f)
903 921 os.symlink(data, f)
904 922 else:
905 923 data = os.readlink(f)
906 924 os.unlink(f)
907 925 file(f, "w").write(data)
908 926
909 927 def set_binary(fd):
910 928 pass
911 929
912 930 def pconvert(path):
913 931 return path
914 932
915 933 def localpath(path):
916 934 return path
917 935
918 936 normpath = os.path.normpath
919 937 samestat = os.path.samestat
920 938
921 939 def makelock(info, pathname):
922 940 try:
923 941 os.symlink(info, pathname)
924 942 except OSError, why:
925 943 if why.errno == errno.EEXIST:
926 944 raise
927 945 else:
928 946 _makelock_file(info, pathname)
929 947
930 948 def readlock(pathname):
931 949 try:
932 950 return os.readlink(pathname)
933 951 except OSError, why:
934 952 if why.errno == errno.EINVAL:
935 953 return _readlock_file(pathname)
936 954 else:
937 955 raise
938 956
939 957 def shellquote(s):
940 958 return "'%s'" % s.replace("'", "'\\''")
941 959
942 960 def testpid(pid):
943 961 '''return False if pid dead, True if running or not sure'''
944 962 try:
945 963 os.kill(pid, 0)
946 964 return True
947 965 except OSError, inst:
948 966 return inst.errno != errno.ESRCH
949 967
950 968 def explain_exit(code):
951 969 """return a 2-tuple (desc, code) describing a process's status"""
952 970 if os.WIFEXITED(code):
953 971 val = os.WEXITSTATUS(code)
954 972 return _("exited with status %d") % val, val
955 973 elif os.WIFSIGNALED(code):
956 974 val = os.WTERMSIG(code)
957 975 return _("killed by signal %d") % val, val
958 976 elif os.WIFSTOPPED(code):
959 977 val = os.WSTOPSIG(code)
960 978 return _("stopped by signal %d") % val, val
961 979 raise ValueError(_("invalid exit code"))
962 980
963 981 def isowner(fp, st=None):
964 982 """Return True if the file object f belongs to the current user.
965 983
966 984 The return value of a util.fstat(f) may be passed as the st argument.
967 985 """
968 986 if st is None:
969 987 st = fstat(fp)
970 988 return st.st_uid == os.getuid()
971 989
972 990 def _buildencodefun():
973 991 e = '_'
974 992 win_reserved = [ord(x) for x in '\\:*?"<>|']
975 993 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
976 994 for x in (range(32) + range(126, 256) + win_reserved):
977 995 cmap[chr(x)] = "~%02x" % x
978 996 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
979 997 cmap[chr(x)] = e + chr(x).lower()
980 998 dmap = {}
981 999 for k, v in cmap.iteritems():
982 1000 dmap[v] = k
983 1001 def decode(s):
984 1002 i = 0
985 1003 while i < len(s):
986 1004 for l in xrange(1, 4):
987 1005 try:
988 1006 yield dmap[s[i:i+l]]
989 1007 i += l
990 1008 break
991 1009 except KeyError:
992 1010 pass
993 1011 else:
994 1012 raise KeyError
995 1013 return (lambda s: "".join([cmap[c] for c in s]),
996 1014 lambda s: "".join(list(decode(s))))
997 1015
998 1016 encodefilename, decodefilename = _buildencodefun()
999 1017
1000 1018 def encodedopener(openerfn, fn):
1001 1019 def o(path, *args, **kw):
1002 1020 return openerfn(fn(path), *args, **kw)
1003 1021 return o
1004 1022
1005 1023 def opener(base, audit=True):
1006 1024 """
1007 1025 return a function that opens files relative to base
1008 1026
1009 1027 this function is used to hide the details of COW semantics and
1010 1028 remote file access from higher level code.
1011 1029 """
1012 1030 p = base
1013 1031 audit_p = audit
1014 1032
1015 1033 def mktempcopy(name):
1016 1034 d, fn = os.path.split(name)
1017 1035 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1018 1036 os.close(fd)
1019 1037 ofp = posixfile(temp, "wb")
1020 1038 try:
1021 1039 try:
1022 1040 ifp = posixfile(name, "rb")
1023 1041 except IOError, inst:
1024 1042 if not getattr(inst, 'filename', None):
1025 1043 inst.filename = name
1026 1044 raise
1027 1045 for chunk in filechunkiter(ifp):
1028 1046 ofp.write(chunk)
1029 1047 ifp.close()
1030 1048 ofp.close()
1031 1049 except:
1032 1050 try: os.unlink(temp)
1033 1051 except: pass
1034 1052 raise
1035 1053 st = os.lstat(name)
1036 1054 os.chmod(temp, st.st_mode)
1037 1055 return temp
1038 1056
1039 1057 class atomictempfile(posixfile):
1040 1058 """the file will only be copied when rename is called"""
1041 1059 def __init__(self, name, mode):
1042 1060 self.__name = name
1043 1061 self.temp = mktempcopy(name)
1044 1062 posixfile.__init__(self, self.temp, mode)
1045 1063 def rename(self):
1046 1064 if not self.closed:
1047 1065 posixfile.close(self)
1048 1066 rename(self.temp, localpath(self.__name))
1049 1067 def __del__(self):
1050 1068 if not self.closed:
1051 1069 try:
1052 1070 os.unlink(self.temp)
1053 1071 except: pass
1054 1072 posixfile.close(self)
1055 1073
1056 1074 class atomicfile(atomictempfile):
1057 1075 """the file will only be copied on close"""
1058 1076 def __init__(self, name, mode):
1059 1077 atomictempfile.__init__(self, name, mode)
1060 1078 def close(self):
1061 1079 self.rename()
1062 1080 def __del__(self):
1063 1081 self.rename()
1064 1082
1065 1083 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
1066 1084 if audit_p:
1067 1085 audit_path(path)
1068 1086 f = os.path.join(p, path)
1069 1087
1070 1088 if not text:
1071 1089 mode += "b" # for that other OS
1072 1090
1073 1091 if mode[0] != "r":
1074 1092 try:
1075 1093 nlink = nlinks(f)
1076 1094 except OSError:
1077 1095 d = os.path.dirname(f)
1078 1096 if not os.path.isdir(d):
1079 1097 os.makedirs(d)
1080 1098 else:
1081 1099 if atomic:
1082 1100 return atomicfile(f, mode)
1083 1101 elif atomictemp:
1084 1102 return atomictempfile(f, mode)
1085 1103 if nlink > 1:
1086 1104 rename(mktempcopy(f), f)
1087 1105 return posixfile(f, mode)
1088 1106
1089 1107 return o
1090 1108
1091 1109 class chunkbuffer(object):
1092 1110 """Allow arbitrary sized chunks of data to be efficiently read from an
1093 1111 iterator over chunks of arbitrary size."""
1094 1112
1095 1113 def __init__(self, in_iter, targetsize = 2**16):
1096 1114 """in_iter is the iterator that's iterating over the input chunks.
1097 1115 targetsize is how big a buffer to try to maintain."""
1098 1116 self.in_iter = iter(in_iter)
1099 1117 self.buf = ''
1100 1118 self.targetsize = int(targetsize)
1101 1119 if self.targetsize <= 0:
1102 1120 raise ValueError(_("targetsize must be greater than 0, was %d") %
1103 1121 targetsize)
1104 1122 self.iterempty = False
1105 1123
1106 1124 def fillbuf(self):
1107 1125 """Ignore target size; read every chunk from iterator until empty."""
1108 1126 if not self.iterempty:
1109 1127 collector = cStringIO.StringIO()
1110 1128 collector.write(self.buf)
1111 1129 for ch in self.in_iter:
1112 1130 collector.write(ch)
1113 1131 self.buf = collector.getvalue()
1114 1132 self.iterempty = True
1115 1133
1116 1134 def read(self, l):
1117 1135 """Read L bytes of data from the iterator of chunks of data.
1118 1136 Returns less than L bytes if the iterator runs dry."""
1119 1137 if l > len(self.buf) and not self.iterempty:
1120 1138 # Clamp to a multiple of self.targetsize
1121 1139 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1122 1140 collector = cStringIO.StringIO()
1123 1141 collector.write(self.buf)
1124 1142 collected = len(self.buf)
1125 1143 for chunk in self.in_iter:
1126 1144 collector.write(chunk)
1127 1145 collected += len(chunk)
1128 1146 if collected >= targetsize:
1129 1147 break
1130 1148 if collected < targetsize:
1131 1149 self.iterempty = True
1132 1150 self.buf = collector.getvalue()
1133 1151 s, self.buf = self.buf[:l], buffer(self.buf, l)
1134 1152 return s
1135 1153
1136 1154 def filechunkiter(f, size=65536, limit=None):
1137 1155 """Create a generator that produces the data in the file size
1138 1156 (default 65536) bytes at a time, up to optional limit (default is
1139 1157 to read all data). Chunks may be less than size bytes if the
1140 1158 chunk is the last chunk in the file, or the file is a socket or
1141 1159 some other type of file that sometimes reads less data than is
1142 1160 requested."""
1143 1161 assert size >= 0
1144 1162 assert limit is None or limit >= 0
1145 1163 while True:
1146 1164 if limit is None: nbytes = size
1147 1165 else: nbytes = min(limit, size)
1148 1166 s = nbytes and f.read(nbytes)
1149 1167 if not s: break
1150 1168 if limit: limit -= len(s)
1151 1169 yield s
1152 1170
1153 1171 def makedate():
1154 1172 lt = time.localtime()
1155 1173 if lt[8] == 1 and time.daylight:
1156 1174 tz = time.altzone
1157 1175 else:
1158 1176 tz = time.timezone
1159 1177 return time.mktime(lt), tz
1160 1178
1161 1179 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1162 1180 """represent a (unixtime, offset) tuple as a localized time.
1163 1181 unixtime is seconds since the epoch, and offset is the time zone's
1164 1182 number of seconds away from UTC. if timezone is false, do not
1165 1183 append time zone to string."""
1166 1184 t, tz = date or makedate()
1167 1185 s = time.strftime(format, time.gmtime(float(t) - tz))
1168 1186 if timezone:
1169 1187 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1170 1188 return s
1171 1189
1172 1190 def strdate(string, format, defaults):
1173 1191 """parse a localized time string and return a (unixtime, offset) tuple.
1174 1192 if the string cannot be parsed, ValueError is raised."""
1175 1193 def timezone(string):
1176 1194 tz = string.split()[-1]
1177 1195 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1178 1196 tz = int(tz)
1179 1197 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1180 1198 return offset
1181 1199 if tz == "GMT" or tz == "UTC":
1182 1200 return 0
1183 1201 return None
1184 1202
1185 1203 # NOTE: unixtime = localunixtime + offset
1186 1204 offset, date = timezone(string), string
1187 1205 if offset != None:
1188 1206 date = " ".join(string.split()[:-1])
1189 1207
1190 1208 # add missing elements from defaults
1191 1209 for part in defaults:
1192 1210 found = [True for p in part if ("%"+p) in format]
1193 1211 if not found:
1194 1212 date += "@" + defaults[part]
1195 1213 format += "@%" + part[0]
1196 1214
1197 1215 timetuple = time.strptime(date, format)
1198 1216 localunixtime = int(calendar.timegm(timetuple))
1199 1217 if offset is None:
1200 1218 # local timezone
1201 1219 unixtime = int(time.mktime(timetuple))
1202 1220 offset = unixtime - localunixtime
1203 1221 else:
1204 1222 unixtime = localunixtime + offset
1205 1223 return unixtime, offset
1206 1224
1207 1225 def parsedate(string, formats=None, defaults=None):
1208 1226 """parse a localized time string and return a (unixtime, offset) tuple.
1209 1227 The date may be a "unixtime offset" string or in one of the specified
1210 1228 formats."""
1211 1229 if not string:
1212 1230 return 0, 0
1213 1231 if not formats:
1214 1232 formats = defaultdateformats
1215 1233 string = string.strip()
1216 1234 try:
1217 1235 when, offset = map(int, string.split(' '))
1218 1236 except ValueError:
1219 1237 # fill out defaults
1220 1238 if not defaults:
1221 1239 defaults = {}
1222 1240 now = makedate()
1223 1241 for part in "d mb yY HI M S".split():
1224 1242 if part not in defaults:
1225 1243 if part[0] in "HMS":
1226 1244 defaults[part] = "00"
1227 1245 elif part[0] in "dm":
1228 1246 defaults[part] = "1"
1229 1247 else:
1230 1248 defaults[part] = datestr(now, "%" + part[0], False)
1231 1249
1232 1250 for format in formats:
1233 1251 try:
1234 1252 when, offset = strdate(string, format, defaults)
1235 1253 except ValueError:
1236 1254 pass
1237 1255 else:
1238 1256 break
1239 1257 else:
1240 1258 raise Abort(_('invalid date: %r ') % string)
1241 1259 # validate explicit (probably user-specified) date and
1242 1260 # time zone offset. values must fit in signed 32 bits for
1243 1261 # current 32-bit linux runtimes. timezones go from UTC-12
1244 1262 # to UTC+14
1245 1263 if abs(when) > 0x7fffffff:
1246 1264 raise Abort(_('date exceeds 32 bits: %d') % when)
1247 1265 if offset < -50400 or offset > 43200:
1248 1266 raise Abort(_('impossible time zone offset: %d') % offset)
1249 1267 return when, offset
1250 1268
1251 1269 def matchdate(date):
1252 1270 """Return a function that matches a given date match specifier
1253 1271
1254 1272 Formats include:
1255 1273
1256 1274 '{date}' match a given date to the accuracy provided
1257 1275
1258 1276 '<{date}' on or before a given date
1259 1277
1260 1278 '>{date}' on or after a given date
1261 1279
1262 1280 """
1263 1281
1264 1282 def lower(date):
1265 1283 return parsedate(date, extendeddateformats)[0]
1266 1284
1267 1285 def upper(date):
1268 1286 d = dict(mb="12", HI="23", M="59", S="59")
1269 1287 for days in "31 30 29".split():
1270 1288 try:
1271 1289 d["d"] = days
1272 1290 return parsedate(date, extendeddateformats, d)[0]
1273 1291 except:
1274 1292 pass
1275 1293 d["d"] = "28"
1276 1294 return parsedate(date, extendeddateformats, d)[0]
1277 1295
1278 1296 if date[0] == "<":
1279 1297 when = upper(date[1:])
1280 1298 return lambda x: x <= when
1281 1299 elif date[0] == ">":
1282 1300 when = lower(date[1:])
1283 1301 return lambda x: x >= when
1284 1302 elif date[0] == "-":
1285 1303 try:
1286 1304 days = int(date[1:])
1287 1305 except ValueError:
1288 1306 raise Abort(_("invalid day spec: %s") % date[1:])
1289 1307 when = makedate()[0] - days * 3600 * 24
1290 1308 return lambda x: x >= when
1291 1309 elif " to " in date:
1292 1310 a, b = date.split(" to ")
1293 1311 start, stop = lower(a), upper(b)
1294 1312 return lambda x: x >= start and x <= stop
1295 1313 else:
1296 1314 start, stop = lower(date), upper(date)
1297 1315 return lambda x: x >= start and x <= stop
1298 1316
1299 1317 def shortuser(user):
1300 1318 """Return a short representation of a user name or email address."""
1301 1319 f = user.find('@')
1302 1320 if f >= 0:
1303 1321 user = user[:f]
1304 1322 f = user.find('<')
1305 1323 if f >= 0:
1306 1324 user = user[f+1:]
1307 1325 f = user.find(' ')
1308 1326 if f >= 0:
1309 1327 user = user[:f]
1310 1328 f = user.find('.')
1311 1329 if f >= 0:
1312 1330 user = user[:f]
1313 1331 return user
1314 1332
1315 1333 def ellipsis(text, maxlength=400):
1316 1334 """Trim string to at most maxlength (default: 400) characters."""
1317 1335 if len(text) <= maxlength:
1318 1336 return text
1319 1337 else:
1320 1338 return "%s..." % (text[:maxlength-3])
1321 1339
1322 1340 def walkrepos(path):
1323 1341 '''yield every hg repository under path, recursively.'''
1324 1342 def errhandler(err):
1325 1343 if err.filename == path:
1326 1344 raise err
1327 1345
1328 1346 for root, dirs, files in os.walk(path, onerror=errhandler):
1329 1347 for d in dirs:
1330 1348 if d == '.hg':
1331 1349 yield root
1332 1350 dirs[:] = []
1333 1351 break
1334 1352
1335 1353 _rcpath = None
1336 1354
1337 1355 def rcpath():
1338 1356 '''return hgrc search path. if env var HGRCPATH is set, use it.
1339 1357 for each item in path, if directory, use files ending in .rc,
1340 1358 else use item.
1341 1359 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1342 1360 if no HGRCPATH, use default os-specific path.'''
1343 1361 global _rcpath
1344 1362 if _rcpath is None:
1345 1363 if 'HGRCPATH' in os.environ:
1346 1364 _rcpath = []
1347 1365 for p in os.environ['HGRCPATH'].split(os.pathsep):
1348 1366 if not p: continue
1349 1367 if os.path.isdir(p):
1350 1368 for f in os.listdir(p):
1351 1369 if f.endswith('.rc'):
1352 1370 _rcpath.append(os.path.join(p, f))
1353 1371 else:
1354 1372 _rcpath.append(p)
1355 1373 else:
1356 1374 _rcpath = os_rcpath()
1357 1375 return _rcpath
1358 1376
1359 1377 def bytecount(nbytes):
1360 1378 '''return byte count formatted as readable string, with units'''
1361 1379
1362 1380 units = (
1363 1381 (100, 1<<30, _('%.0f GB')),
1364 1382 (10, 1<<30, _('%.1f GB')),
1365 1383 (1, 1<<30, _('%.2f GB')),
1366 1384 (100, 1<<20, _('%.0f MB')),
1367 1385 (10, 1<<20, _('%.1f MB')),
1368 1386 (1, 1<<20, _('%.2f MB')),
1369 1387 (100, 1<<10, _('%.0f KB')),
1370 1388 (10, 1<<10, _('%.1f KB')),
1371 1389 (1, 1<<10, _('%.2f KB')),
1372 1390 (1, 1, _('%.0f bytes')),
1373 1391 )
1374 1392
1375 1393 for multiplier, divisor, format in units:
1376 1394 if nbytes >= divisor * multiplier:
1377 1395 return format % (nbytes / float(divisor))
1378 1396 return units[-1][2] % nbytes
1379 1397
1380 1398 def drop_scheme(scheme, path):
1381 1399 sc = scheme + ':'
1382 1400 if path.startswith(sc):
1383 1401 path = path[len(sc):]
1384 1402 if path.startswith('//'):
1385 1403 path = path[2:]
1386 1404 return path
General Comments 0
You need to be logged in to leave comments. Login now