##// END OF EJS Templates
Merge with crew-stable
Alexis S. L. Carvalho -
r4059:431f3c1d merge default
parent child Browse files
Show More
@@ -0,0 +1,77 b''
1 #!/bin/sh
2 # check that renames are correctly saved by a commit after a merge
3
4 HGMERGE=merge
5 export HGMERGE
6
7 # test with the merge on 3 having the rename on the local parent
8 hg init a
9 cd a
10
11 echo line1 > foo
12 hg add foo
13 hg ci -m '0: add foo' -d '0 0'
14
15 echo line2 >> foo
16 hg ci -m '1: change foo' -d '0 0'
17
18 hg up -C 0
19 hg mv foo bar
20 rm bar
21 echo line0 > bar
22 echo line1 >> bar
23 hg ci -m '2: mv foo bar; change bar' -d '0 0'
24
25 hg merge 1
26 echo '% contents of bar should be line0 line1 line2'
27 cat bar
28 hg ci -m '3: merge with local rename' -d '0 0'
29 hg debugindex .hg/store/data/bar.i
30 hg debugrename bar
31 hg debugindex .hg/store/data/foo.i
32
33 # revert the content change from rev 2
34 hg up -C 2
35 rm bar
36 echo line1 > bar
37 hg ci -m '4: revert content change from rev 2' -d '0 0'
38
39 hg log --template '#rev#:#node|short# #parents#\n'
40 echo '% this should use bar@rev2 as the ancestor'
41 hg --debug merge 3
42 echo '% contents of bar should be line1 line2'
43 cat bar
44 hg ci -m '5: merge' -d '0 0'
45 hg debugindex .hg/store/data/bar.i
46
47
48 # same thing, but with the merge on 3 having the rename on the remote parent
49 echo
50 echo
51 cd ..
52 hg clone -U -r 1 -r 2 a b
53 cd b
54
55 hg up -C 1
56 hg merge 2
57 echo '% contents of bar should be line0 line1 line2'
58 cat bar
59 hg ci -m '3: merge with remote rename' -d '0 0'
60 hg debugindex .hg/store/data/bar.i
61 hg debugrename bar
62 hg debugindex .hg/store/data/foo.i
63
64 # revert the content change from rev 2
65 hg up -C 2
66 rm bar
67 echo line1 > bar
68 hg ci -m '4: revert content change from rev 2' -d '0 0'
69
70 hg log --template '#rev#:#node|short# #parents#\n'
71 echo '% this should use bar@rev2 as the ancestor'
72 hg --debug merge 3
73 echo '% contents of bar should be line1 line2'
74 cat bar
75 hg ci -m '5: merge' -d '0 0'
76 hg debugindex .hg/store/data/bar.i
77
@@ -0,0 +1,83 b''
1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
2 merging bar and foo
3 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
4 (branch merge, don't forget to commit)
5 % contents of bar should be line0 line1 line2
6 line0
7 line1
8 line2
9 rev offset length base linkrev nodeid p1 p2
10 0 0 77 0 2 da78c0659611 000000000000 000000000000
11 1 77 76 0 3 4b358025380b 000000000000 da78c0659611
12 bar renamed from foo:9e25c27b87571a1edee5ae4dddee5687746cc8e2
13 rev offset length base linkrev nodeid p1 p2
14 0 0 7 0 0 690b295714ae 000000000000 000000000000
15 1 7 13 1 1 9e25c27b8757 690b295714ae 000000000000
16 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
17 4:2d2f9a22c82b 2:0a3ab4856510
18 3:7d3b554bfdf1 2:0a3ab4856510 1:5cd961e4045d
19 2:0a3ab4856510 0:2665aaee66e9
20 1:5cd961e4045d
21 0:2665aaee66e9
22 % this should use bar@rev2 as the ancestor
23 resolving manifests
24 overwrite None partial False
25 ancestor 0a3ab4856510 local 2d2f9a22c82b+ remote 7d3b554bfdf1
26 bar: versions differ -> m
27 merging bar
28 my bar@2d2f9a22c82b+ other bar@7d3b554bfdf1 ancestor bar@0a3ab4856510
29 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
30 (branch merge, don't forget to commit)
31 % contents of bar should be line1 line2
32 line1
33 line2
34 rev offset length base linkrev nodeid p1 p2
35 0 0 77 0 2 da78c0659611 000000000000 000000000000
36 1 77 76 0 3 4b358025380b 000000000000 da78c0659611
37 2 153 7 2 4 4defe5eec418 da78c0659611 000000000000
38 3 160 13 3 5 4663501da27b 4defe5eec418 4b358025380b
39
40
41 requesting all changes
42 adding changesets
43 adding manifests
44 adding file changes
45 added 3 changesets with 3 changes to 2 files (+1 heads)
46 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
47 merging foo and bar
48 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
49 (branch merge, don't forget to commit)
50 % contents of bar should be line0 line1 line2
51 line0
52 line1
53 line2
54 rev offset length base linkrev nodeid p1 p2
55 0 0 77 0 2 da78c0659611 000000000000 000000000000
56 1 77 76 0 3 4b358025380b 000000000000 da78c0659611
57 bar renamed from foo:9e25c27b87571a1edee5ae4dddee5687746cc8e2
58 rev offset length base linkrev nodeid p1 p2
59 0 0 7 0 0 690b295714ae 000000000000 000000000000
60 1 7 13 1 1 9e25c27b8757 690b295714ae 000000000000
61 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
62 4:2d2f9a22c82b 2:0a3ab4856510
63 3:96ab80c60897 1:5cd961e4045d 2:0a3ab4856510
64 2:0a3ab4856510 0:2665aaee66e9
65 1:5cd961e4045d
66 0:2665aaee66e9
67 % this should use bar@rev2 as the ancestor
68 resolving manifests
69 overwrite None partial False
70 ancestor 0a3ab4856510 local 2d2f9a22c82b+ remote 96ab80c60897
71 bar: versions differ -> m
72 merging bar
73 my bar@2d2f9a22c82b+ other bar@96ab80c60897 ancestor bar@0a3ab4856510
74 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
75 (branch merge, don't forget to commit)
76 % contents of bar should be line1 line2
77 line1
78 line2
79 rev offset length base linkrev nodeid p1 p2
80 0 0 77 0 2 da78c0659611 000000000000 000000000000
81 1 77 76 0 3 4b358025380b 000000000000 da78c0659611
82 2 153 7 2 4 4defe5eec418 da78c0659611 000000000000
83 3 160 13 3 5 4663501da27b 4defe5eec418 4b358025380b
@@ -1,531 +1,534 b''
1 HGRC(5)
1 HGRC(5)
2 =======
2 =======
3 Bryan O'Sullivan <bos@serpentine.com>
3 Bryan O'Sullivan <bos@serpentine.com>
4
4
5 NAME
5 NAME
6 ----
6 ----
7 hgrc - configuration files for Mercurial
7 hgrc - configuration files for Mercurial
8
8
9 SYNOPSIS
9 SYNOPSIS
10 --------
10 --------
11
11
12 The Mercurial system uses a set of configuration files to control
12 The Mercurial system uses a set of configuration files to control
13 aspects of its behaviour.
13 aspects of its behaviour.
14
14
15 FILES
15 FILES
16 -----
16 -----
17
17
18 Mercurial reads configuration data from several files, if they exist.
18 Mercurial reads configuration data from several files, if they exist.
19 The names of these files depend on the system on which Mercurial is
19 The names of these files depend on the system on which Mercurial is
20 installed.
20 installed.
21
21
22 (Unix) <install-root>/etc/mercurial/hgrc.d/*.rc::
22 (Unix) <install-root>/etc/mercurial/hgrc.d/*.rc::
23 (Unix) <install-root>/etc/mercurial/hgrc::
23 (Unix) <install-root>/etc/mercurial/hgrc::
24 Per-installation configuration files, searched for in the
24 Per-installation configuration files, searched for in the
25 directory where Mercurial is installed. For example, if installed
25 directory where Mercurial is installed. For example, if installed
26 in /shared/tools, Mercurial will look in
26 in /shared/tools, Mercurial will look in
27 /shared/tools/etc/mercurial/hgrc. Options in these files apply to
27 /shared/tools/etc/mercurial/hgrc. Options in these files apply to
28 all Mercurial commands executed by any user in any directory.
28 all Mercurial commands executed by any user in any directory.
29
29
30 (Unix) /etc/mercurial/hgrc.d/*.rc::
30 (Unix) /etc/mercurial/hgrc.d/*.rc::
31 (Unix) /etc/mercurial/hgrc::
31 (Unix) /etc/mercurial/hgrc::
32 (Windows) C:\Mercurial\Mercurial.ini::
32 (Windows) C:\Mercurial\Mercurial.ini::
33 Per-system configuration files, for the system on which Mercurial
33 Per-system configuration files, for the system on which Mercurial
34 is running. Options in these files apply to all Mercurial
34 is running. Options in these files apply to all Mercurial
35 commands executed by any user in any directory. Options in these
35 commands executed by any user in any directory. Options in these
36 files override per-installation options.
36 files override per-installation options.
37
37
38 (Unix) $HOME/.hgrc::
38 (Unix) $HOME/.hgrc::
39 (Windows) C:\Documents and Settings\USERNAME\Mercurial.ini::
39 (Windows) C:\Documents and Settings\USERNAME\Mercurial.ini::
40 (Windows) $HOME\Mercurial.ini::
40 (Windows) $HOME\Mercurial.ini::
41 Per-user configuration file, for the user running Mercurial.
41 Per-user configuration file, for the user running Mercurial.
42 Options in this file apply to all Mercurial commands executed by
42 Options in this file apply to all Mercurial commands executed by
43 any user in any directory. Options in this file override
43 any user in any directory. Options in this file override
44 per-installation and per-system options.
44 per-installation and per-system options.
45 On Windows system, one of these is chosen exclusively according
45 On Windows system, one of these is chosen exclusively according
46 to definition of HOME environment variable.
46 to definition of HOME environment variable.
47
47
48 (Unix, Windows) <repo>/.hg/hgrc::
48 (Unix, Windows) <repo>/.hg/hgrc::
49 Per-repository configuration options that only apply in a
49 Per-repository configuration options that only apply in a
50 particular repository. This file is not version-controlled, and
50 particular repository. This file is not version-controlled, and
51 will not get transferred during a "clone" operation. Options in
51 will not get transferred during a "clone" operation. Options in
52 this file override options in all other configuration files.
52 this file override options in all other configuration files.
53 On Unix, most of this file will be ignored if it doesn't belong
53 On Unix, most of this file will be ignored if it doesn't belong
54 to a trusted user or to a trusted group. See the documentation
54 to a trusted user or to a trusted group. See the documentation
55 for the trusted section below for more details.
55 for the trusted section below for more details.
56
56
57 SYNTAX
57 SYNTAX
58 ------
58 ------
59
59
60 A configuration file consists of sections, led by a "[section]" header
60 A configuration file consists of sections, led by a "[section]" header
61 and followed by "name: value" entries; "name=value" is also accepted.
61 and followed by "name: value" entries; "name=value" is also accepted.
62
62
63 [spam]
63 [spam]
64 eggs=ham
64 eggs=ham
65 green=
65 green=
66 eggs
66 eggs
67
67
68 Each line contains one entry. If the lines that follow are indented,
68 Each line contains one entry. If the lines that follow are indented,
69 they are treated as continuations of that entry.
69 they are treated as continuations of that entry.
70
70
71 Leading whitespace is removed from values. Empty lines are skipped.
71 Leading whitespace is removed from values. Empty lines are skipped.
72
72
73 The optional values can contain format strings which refer to other
73 The optional values can contain format strings which refer to other
74 values in the same section, or values in a special DEFAULT section.
74 values in the same section, or values in a special DEFAULT section.
75
75
76 Lines beginning with "#" or ";" are ignored and may be used to provide
76 Lines beginning with "#" or ";" are ignored and may be used to provide
77 comments.
77 comments.
78
78
79 SECTIONS
79 SECTIONS
80 --------
80 --------
81
81
82 This section describes the different sections that may appear in a
82 This section describes the different sections that may appear in a
83 Mercurial "hgrc" file, the purpose of each section, its possible
83 Mercurial "hgrc" file, the purpose of each section, its possible
84 keys, and their possible values.
84 keys, and their possible values.
85
85
86 decode/encode::
86 decode/encode::
87 Filters for transforming files on checkout/checkin. This would
87 Filters for transforming files on checkout/checkin. This would
88 typically be used for newline processing or other
88 typically be used for newline processing or other
89 localization/canonicalization of files.
89 localization/canonicalization of files.
90
90
91 Filters consist of a filter pattern followed by a filter command.
91 Filters consist of a filter pattern followed by a filter command.
92 Filter patterns are globs by default, rooted at the repository
92 Filter patterns are globs by default, rooted at the repository
93 root. For example, to match any file ending in ".txt" in the root
93 root. For example, to match any file ending in ".txt" in the root
94 directory only, use the pattern "*.txt". To match any file ending
94 directory only, use the pattern "*.txt". To match any file ending
95 in ".c" anywhere in the repository, use the pattern "**.c".
95 in ".c" anywhere in the repository, use the pattern "**.c".
96
96
97 The filter command can start with a specifier, either "pipe:" or
97 The filter command can start with a specifier, either "pipe:" or
98 "tempfile:". If no specifier is given, "pipe:" is used by default.
98 "tempfile:". If no specifier is given, "pipe:" is used by default.
99
99
100 A "pipe:" command must accept data on stdin and return the
100 A "pipe:" command must accept data on stdin and return the
101 transformed data on stdout.
101 transformed data on stdout.
102
102
103 Pipe example:
103 Pipe example:
104
104
105 [encode]
105 [encode]
106 # uncompress gzip files on checkin to improve delta compression
106 # uncompress gzip files on checkin to improve delta compression
107 # note: not necessarily a good idea, just an example
107 # note: not necessarily a good idea, just an example
108 *.gz = pipe: gunzip
108 *.gz = pipe: gunzip
109
109
110 [decode]
110 [decode]
111 # recompress gzip files when writing them to the working dir (we
111 # recompress gzip files when writing them to the working dir (we
112 # can safely omit "pipe:", because it's the default)
112 # can safely omit "pipe:", because it's the default)
113 *.gz = gzip
113 *.gz = gzip
114
114
115 A "tempfile:" command is a template. The string INFILE is replaced
115 A "tempfile:" command is a template. The string INFILE is replaced
116 with the name of a temporary file that contains the data to be
116 with the name of a temporary file that contains the data to be
117 filtered by the command. The string OUTFILE is replaced with the
117 filtered by the command. The string OUTFILE is replaced with the
118 name of an empty temporary file, where the filtered data must be
118 name of an empty temporary file, where the filtered data must be
119 written by the command.
119 written by the command.
120
120
121 NOTE: the tempfile mechanism is recommended for Windows systems,
121 NOTE: the tempfile mechanism is recommended for Windows systems,
122 where the standard shell I/O redirection operators often have
122 where the standard shell I/O redirection operators often have
123 strange effects. In particular, if you are doing line ending
123 strange effects. In particular, if you are doing line ending
124 conversion on Windows using the popular dos2unix and unix2dos
124 conversion on Windows using the popular dos2unix and unix2dos
125 programs, you *must* use the tempfile mechanism, as using pipes will
125 programs, you *must* use the tempfile mechanism, as using pipes will
126 corrupt the contents of your files.
126 corrupt the contents of your files.
127
127
128 Tempfile example:
128 Tempfile example:
129
129
130 [encode]
130 [encode]
131 # convert files to unix line ending conventions on checkin
131 # convert files to unix line ending conventions on checkin
132 **.txt = tempfile: dos2unix -n INFILE OUTFILE
132 **.txt = tempfile: dos2unix -n INFILE OUTFILE
133
133
134 [decode]
134 [decode]
135 # convert files to windows line ending conventions when writing
135 # convert files to windows line ending conventions when writing
136 # them to the working dir
136 # them to the working dir
137 **.txt = tempfile: unix2dos -n INFILE OUTFILE
137 **.txt = tempfile: unix2dos -n INFILE OUTFILE
138
138
139 defaults::
139 defaults::
140 Use the [defaults] section to define command defaults, i.e. the
140 Use the [defaults] section to define command defaults, i.e. the
141 default options/arguments to pass to the specified commands.
141 default options/arguments to pass to the specified commands.
142
142
143 The following example makes 'hg log' run in verbose mode, and
143 The following example makes 'hg log' run in verbose mode, and
144 'hg status' show only the modified files, by default.
144 'hg status' show only the modified files, by default.
145
145
146 [defaults]
146 [defaults]
147 log = -v
147 log = -v
148 status = -m
148 status = -m
149
149
150 The actual commands, instead of their aliases, must be used when
150 The actual commands, instead of their aliases, must be used when
151 defining command defaults. The command defaults will also be
151 defining command defaults. The command defaults will also be
152 applied to the aliases of the commands defined.
152 applied to the aliases of the commands defined.
153
153
154 diff::
154 diff::
155 Settings used when displaying diffs. They are all boolean and
155 Settings used when displaying diffs. They are all boolean and
156 defaults to False.
156 defaults to False.
157 git;;
157 git;;
158 Use git extended diff format.
158 Use git extended diff format.
159 nodates;;
159 nodates;;
160 Don't include dates in diff headers.
160 Don't include dates in diff headers.
161 showfunc;;
161 showfunc;;
162 Show which function each change is in.
162 Show which function each change is in.
163 ignorews;;
163 ignorews;;
164 Ignore white space when comparing lines.
164 Ignore white space when comparing lines.
165 ignorewsamount;;
165 ignorewsamount;;
166 Ignore changes in the amount of white space.
166 Ignore changes in the amount of white space.
167 ignoreblanklines;;
167 ignoreblanklines;;
168 Ignore changes whose lines are all blank.
168 Ignore changes whose lines are all blank.
169
169
170 email::
170 email::
171 Settings for extensions that send email messages.
171 Settings for extensions that send email messages.
172 from;;
172 from;;
173 Optional. Email address to use in "From" header and SMTP envelope
173 Optional. Email address to use in "From" header and SMTP envelope
174 of outgoing messages.
174 of outgoing messages.
175 to;;
175 to;;
176 Optional. Comma-separated list of recipients' email addresses.
176 Optional. Comma-separated list of recipients' email addresses.
177 cc;;
177 cc;;
178 Optional. Comma-separated list of carbon copy recipients'
178 Optional. Comma-separated list of carbon copy recipients'
179 email addresses.
179 email addresses.
180 bcc;;
180 bcc;;
181 Optional. Comma-separated list of blind carbon copy
181 Optional. Comma-separated list of blind carbon copy
182 recipients' email addresses. Cannot be set interactively.
182 recipients' email addresses. Cannot be set interactively.
183 method;;
183 method;;
184 Optional. Method to use to send email messages. If value is
184 Optional. Method to use to send email messages. If value is
185 "smtp" (default), use SMTP (see section "[smtp]" for
185 "smtp" (default), use SMTP (see section "[smtp]" for
186 configuration). Otherwise, use as name of program to run that
186 configuration). Otherwise, use as name of program to run that
187 acts like sendmail (takes "-f" option for sender, list of
187 acts like sendmail (takes "-f" option for sender, list of
188 recipients on command line, message on stdin). Normally, setting
188 recipients on command line, message on stdin). Normally, setting
189 this to "sendmail" or "/usr/sbin/sendmail" is enough to use
189 this to "sendmail" or "/usr/sbin/sendmail" is enough to use
190 sendmail to send messages.
190 sendmail to send messages.
191
191
192 Email example:
192 Email example:
193
193
194 [email]
194 [email]
195 from = Joseph User <joe.user@example.com>
195 from = Joseph User <joe.user@example.com>
196 method = /usr/sbin/sendmail
196 method = /usr/sbin/sendmail
197
197
198 extensions::
198 extensions::
199 Mercurial has an extension mechanism for adding new features. To
199 Mercurial has an extension mechanism for adding new features. To
200 enable an extension, create an entry for it in this section.
200 enable an extension, create an entry for it in this section.
201
201
202 If you know that the extension is already in Python's search path,
202 If you know that the extension is already in Python's search path,
203 you can give the name of the module, followed by "=", with nothing
203 you can give the name of the module, followed by "=", with nothing
204 after the "=".
204 after the "=".
205
205
206 Otherwise, give a name that you choose, followed by "=", followed by
206 Otherwise, give a name that you choose, followed by "=", followed by
207 the path to the ".py" file (including the file name extension) that
207 the path to the ".py" file (including the file name extension) that
208 defines the extension.
208 defines the extension.
209
209
210 Example for ~/.hgrc:
210 Example for ~/.hgrc:
211
211
212 [extensions]
212 [extensions]
213 # (the mq extension will get loaded from mercurial's path)
213 # (the mq extension will get loaded from mercurial's path)
214 hgext.mq =
214 hgext.mq =
215 # (this extension will get loaded from the file specified)
215 # (this extension will get loaded from the file specified)
216 myfeature = ~/.hgext/myfeature.py
216 myfeature = ~/.hgext/myfeature.py
217
217
218 hooks::
218 hooks::
219 Commands or Python functions that get automatically executed by
219 Commands or Python functions that get automatically executed by
220 various actions such as starting or finishing a commit. Multiple
220 various actions such as starting or finishing a commit. Multiple
221 hooks can be run for the same action by appending a suffix to the
221 hooks can be run for the same action by appending a suffix to the
222 action. Overriding a site-wide hook can be done by changing its
222 action. Overriding a site-wide hook can be done by changing its
223 value or setting it to an empty string.
223 value or setting it to an empty string.
224
224
225 Example .hg/hgrc:
225 Example .hg/hgrc:
226
226
227 [hooks]
227 [hooks]
228 # do not use the site-wide hook
228 # do not use the site-wide hook
229 incoming =
229 incoming =
230 incoming.email = /my/email/hook
230 incoming.email = /my/email/hook
231 incoming.autobuild = /my/build/hook
231 incoming.autobuild = /my/build/hook
232
232
233 Most hooks are run with environment variables set that give added
233 Most hooks are run with environment variables set that give added
234 useful information. For each hook below, the environment variables
234 useful information. For each hook below, the environment variables
235 it is passed are listed with names of the form "$HG_foo".
235 it is passed are listed with names of the form "$HG_foo".
236
236
237 changegroup;;
237 changegroup;;
238 Run after a changegroup has been added via push, pull or
238 Run after a changegroup has been added via push, pull or
239 unbundle. ID of the first new changeset is in $HG_NODE. URL from
239 unbundle. ID of the first new changeset is in $HG_NODE. URL from
240 which changes came is in $HG_URL.
240 which changes came is in $HG_URL.
241 commit;;
241 commit;;
242 Run after a changeset has been created in the local repository.
242 Run after a changeset has been created in the local repository.
243 ID of the newly created changeset is in $HG_NODE. Parent
243 ID of the newly created changeset is in $HG_NODE. Parent
244 changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
244 changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
245 incoming;;
245 incoming;;
246 Run after a changeset has been pulled, pushed, or unbundled into
246 Run after a changeset has been pulled, pushed, or unbundled into
247 the local repository. The ID of the newly arrived changeset is in
247 the local repository. The ID of the newly arrived changeset is in
248 $HG_NODE. URL that was source of changes came is in $HG_URL.
248 $HG_NODE. URL that was source of changes came is in $HG_URL.
249 outgoing;;
249 outgoing;;
250 Run after sending changes from local repository to another. ID of
250 Run after sending changes from local repository to another. ID of
251 first changeset sent is in $HG_NODE. Source of operation is in
251 first changeset sent is in $HG_NODE. Source of operation is in
252 $HG_SOURCE; see "preoutgoing" hook for description.
252 $HG_SOURCE; see "preoutgoing" hook for description.
253 prechangegroup;;
253 prechangegroup;;
254 Run before a changegroup is added via push, pull or unbundle.
254 Run before a changegroup is added via push, pull or unbundle.
255 Exit status 0 allows the changegroup to proceed. Non-zero status
255 Exit status 0 allows the changegroup to proceed. Non-zero status
256 will cause the push, pull or unbundle to fail. URL from which
256 will cause the push, pull or unbundle to fail. URL from which
257 changes will come is in $HG_URL.
257 changes will come is in $HG_URL.
258 precommit;;
258 precommit;;
259 Run before starting a local commit. Exit status 0 allows the
259 Run before starting a local commit. Exit status 0 allows the
260 commit to proceed. Non-zero status will cause the commit to fail.
260 commit to proceed. Non-zero status will cause the commit to fail.
261 Parent changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
261 Parent changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
262 preoutgoing;;
262 preoutgoing;;
263 Run before computing changes to send from the local repository to
263 Run before computing changes to send from the local repository to
264 another. Non-zero status will cause failure. This lets you
264 another. Non-zero status will cause failure. This lets you
265 prevent pull over http or ssh. Also prevents against local pull,
265 prevent pull over http or ssh. Also prevents against local pull,
266 push (outbound) or bundle commands, but not effective, since you
266 push (outbound) or bundle commands, but not effective, since you
267 can just copy files instead then. Source of operation is in
267 can just copy files instead then. Source of operation is in
268 $HG_SOURCE. If "serve", operation is happening on behalf of
268 $HG_SOURCE. If "serve", operation is happening on behalf of
269 remote ssh or http repository. If "push", "pull" or "bundle",
269 remote ssh or http repository. If "push", "pull" or "bundle",
270 operation is happening on behalf of repository on same system.
270 operation is happening on behalf of repository on same system.
271 pretag;;
271 pretag;;
272 Run before creating a tag. Exit status 0 allows the tag to be
272 Run before creating a tag. Exit status 0 allows the tag to be
273 created. Non-zero status will cause the tag to fail. ID of
273 created. Non-zero status will cause the tag to fail. ID of
274 changeset to tag is in $HG_NODE. Name of tag is in $HG_TAG. Tag
274 changeset to tag is in $HG_NODE. Name of tag is in $HG_TAG. Tag
275 is local if $HG_LOCAL=1, in repo if $HG_LOCAL=0.
275 is local if $HG_LOCAL=1, in repo if $HG_LOCAL=0.
276 pretxnchangegroup;;
276 pretxnchangegroup;;
277 Run after a changegroup has been added via push, pull or unbundle,
277 Run after a changegroup has been added via push, pull or unbundle,
278 but before the transaction has been committed. Changegroup is
278 but before the transaction has been committed. Changegroup is
279 visible to hook program. This lets you validate incoming changes
279 visible to hook program. This lets you validate incoming changes
280 before accepting them. Passed the ID of the first new changeset
280 before accepting them. Passed the ID of the first new changeset
281 in $HG_NODE. Exit status 0 allows the transaction to commit.
281 in $HG_NODE. Exit status 0 allows the transaction to commit.
282 Non-zero status will cause the transaction to be rolled back and
282 Non-zero status will cause the transaction to be rolled back and
283 the push, pull or unbundle will fail. URL that was source of
283 the push, pull or unbundle will fail. URL that was source of
284 changes is in $HG_URL.
284 changes is in $HG_URL.
285 pretxncommit;;
285 pretxncommit;;
286 Run after a changeset has been created but the transaction not yet
286 Run after a changeset has been created but the transaction not yet
287 committed. Changeset is visible to hook program. This lets you
287 committed. Changeset is visible to hook program. This lets you
288 validate commit message and changes. Exit status 0 allows the
288 validate commit message and changes. Exit status 0 allows the
289 commit to proceed. Non-zero status will cause the transaction to
289 commit to proceed. Non-zero status will cause the transaction to
290 be rolled back. ID of changeset is in $HG_NODE. Parent changeset
290 be rolled back. ID of changeset is in $HG_NODE. Parent changeset
291 IDs are in $HG_PARENT1 and $HG_PARENT2.
291 IDs are in $HG_PARENT1 and $HG_PARENT2.
292 preupdate;;
292 preupdate;;
293 Run before updating the working directory. Exit status 0 allows
293 Run before updating the working directory. Exit status 0 allows
294 the update to proceed. Non-zero status will prevent the update.
294 the update to proceed. Non-zero status will prevent the update.
295 Changeset ID of first new parent is in $HG_PARENT1. If merge, ID
295 Changeset ID of first new parent is in $HG_PARENT1. If merge, ID
296 of second new parent is in $HG_PARENT2.
296 of second new parent is in $HG_PARENT2.
297 tag;;
297 tag;;
298 Run after a tag is created. ID of tagged changeset is in
298 Run after a tag is created. ID of tagged changeset is in
299 $HG_NODE. Name of tag is in $HG_TAG. Tag is local if
299 $HG_NODE. Name of tag is in $HG_TAG. Tag is local if
300 $HG_LOCAL=1, in repo if $HG_LOCAL=0.
300 $HG_LOCAL=1, in repo if $HG_LOCAL=0.
301 update;;
301 update;;
302 Run after updating the working directory. Changeset ID of first
302 Run after updating the working directory. Changeset ID of first
303 new parent is in $HG_PARENT1. If merge, ID of second new parent
303 new parent is in $HG_PARENT1. If merge, ID of second new parent
304 is in $HG_PARENT2. If update succeeded, $HG_ERROR=0. If update
304 is in $HG_PARENT2. If update succeeded, $HG_ERROR=0. If update
305 failed (e.g. because conflicts not resolved), $HG_ERROR=1.
305 failed (e.g. because conflicts not resolved), $HG_ERROR=1.
306
306
307 Note: In earlier releases, the names of hook environment variables
307 Note: In earlier releases, the names of hook environment variables
308 did not have a "HG_" prefix. The old unprefixed names are no longer
308 did not have a "HG_" prefix. The old unprefixed names are no longer
309 provided in the environment.
309 provided in the environment.
310
310
311 The syntax for Python hooks is as follows:
311 The syntax for Python hooks is as follows:
312
312
313 hookname = python:modulename.submodule.callable
313 hookname = python:modulename.submodule.callable
314
314
315 Python hooks are run within the Mercurial process. Each hook is
315 Python hooks are run within the Mercurial process. Each hook is
316 called with at least three keyword arguments: a ui object (keyword
316 called with at least three keyword arguments: a ui object (keyword
317 "ui"), a repository object (keyword "repo"), and a "hooktype"
317 "ui"), a repository object (keyword "repo"), and a "hooktype"
318 keyword that tells what kind of hook is used. Arguments listed as
318 keyword that tells what kind of hook is used. Arguments listed as
319 environment variables above are passed as keyword arguments, with no
319 environment variables above are passed as keyword arguments, with no
320 "HG_" prefix, and names in lower case.
320 "HG_" prefix, and names in lower case.
321
321
322 If a Python hook returns a "true" value or raises an exception, this
322 If a Python hook returns a "true" value or raises an exception, this
323 is treated as failure of the hook.
323 is treated as failure of the hook.
324
324
325 http_proxy::
325 http_proxy::
326 Used to access web-based Mercurial repositories through a HTTP
326 Used to access web-based Mercurial repositories through a HTTP
327 proxy.
327 proxy.
328 host;;
328 host;;
329 Host name and (optional) port of the proxy server, for example
329 Host name and (optional) port of the proxy server, for example
330 "myproxy:8000".
330 "myproxy:8000".
331 no;;
331 no;;
332 Optional. Comma-separated list of host names that should bypass
332 Optional. Comma-separated list of host names that should bypass
333 the proxy.
333 the proxy.
334 passwd;;
334 passwd;;
335 Optional. Password to authenticate with at the proxy server.
335 Optional. Password to authenticate with at the proxy server.
336 user;;
336 user;;
337 Optional. User name to authenticate with at the proxy server.
337 Optional. User name to authenticate with at the proxy server.
338
338
339 smtp::
339 smtp::
340 Configuration for extensions that need to send email messages.
340 Configuration for extensions that need to send email messages.
341 host;;
341 host;;
342 Host name of mail server, e.g. "mail.example.com".
342 Host name of mail server, e.g. "mail.example.com".
343 port;;
343 port;;
344 Optional. Port to connect to on mail server. Default: 25.
344 Optional. Port to connect to on mail server. Default: 25.
345 tls;;
345 tls;;
346 Optional. Whether to connect to mail server using TLS. True or
346 Optional. Whether to connect to mail server using TLS. True or
347 False. Default: False.
347 False. Default: False.
348 username;;
348 username;;
349 Optional. User name to authenticate to SMTP server with.
349 Optional. User name to authenticate to SMTP server with.
350 If username is specified, password must also be specified.
350 If username is specified, password must also be specified.
351 Default: none.
351 Default: none.
352 password;;
352 password;;
353 Optional. Password to authenticate to SMTP server with.
353 Optional. Password to authenticate to SMTP server with.
354 If username is specified, password must also be specified.
354 If username is specified, password must also be specified.
355 Default: none.
355 Default: none.
356 local_hostname;;
356 local_hostname;;
357 Optional. It's the hostname that the sender can use to identify itself
357 Optional. It's the hostname that the sender can use to identify itself
358 to the MTA.
358 to the MTA.
359
359
360 paths::
360 paths::
361 Assigns symbolic names to repositories. The left side is the
361 Assigns symbolic names to repositories. The left side is the
362 symbolic name, and the right gives the directory or URL that is the
362 symbolic name, and the right gives the directory or URL that is the
363 location of the repository. Default paths can be declared by
363 location of the repository. Default paths can be declared by
364 setting the following entries.
364 setting the following entries.
365 default;;
365 default;;
366 Directory or URL to use when pulling if no source is specified.
366 Directory or URL to use when pulling if no source is specified.
367 Default is set to repository from which the current repository
367 Default is set to repository from which the current repository
368 was cloned.
368 was cloned.
369 default-push;;
369 default-push;;
370 Optional. Directory or URL to use when pushing if no destination
370 Optional. Directory or URL to use when pushing if no destination
371 is specified.
371 is specified.
372
372
373 server::
373 server::
374 Controls generic server settings.
374 Controls generic server settings.
375 uncompressed;;
375 uncompressed;;
376 Whether to allow clients to clone a repo using the uncompressed
376 Whether to allow clients to clone a repo using the uncompressed
377 streaming protocol. This transfers about 40% more data than a
377 streaming protocol. This transfers about 40% more data than a
378 regular clone, but uses less memory and CPU on both server and
378 regular clone, but uses less memory and CPU on both server and
379 client. Over a LAN (100Mbps or better) or a very fast WAN, an
379 client. Over a LAN (100Mbps or better) or a very fast WAN, an
380 uncompressed streaming clone is a lot faster (~10x) than a regular
380 uncompressed streaming clone is a lot faster (~10x) than a regular
381 clone. Over most WAN connections (anything slower than about
381 clone. Over most WAN connections (anything slower than about
382 6Mbps), uncompressed streaming is slower, because of the extra
382 6Mbps), uncompressed streaming is slower, because of the extra
383 data transfer overhead. Default is False.
383 data transfer overhead. Default is False.
384
384
385 trusted::
385 trusted::
386 For security reasons, Mercurial will not use the settings in
386 For security reasons, Mercurial will not use the settings in
387 the .hg/hgrc file from a repository if it doesn't belong to a
387 the .hg/hgrc file from a repository if it doesn't belong to a
388 trusted user or to a trusted group. The main exception is the
388 trusted user or to a trusted group. The main exception is the
389 web interface, which automatically uses some safe settings, since
389 web interface, which automatically uses some safe settings, since
390 it's common to serve repositories from different users.
390 it's common to serve repositories from different users.
391
391
392 This section specifies what users and groups are trusted. The
392 This section specifies what users and groups are trusted. The
393 current user is always trusted. To trust everybody, list a user
393 current user is always trusted. To trust everybody, list a user
394 or a group with name "*".
394 or a group with name "*".
395
395
396 users;;
396 users;;
397 Comma-separated list of trusted users.
397 Comma-separated list of trusted users.
398 groups;;
398 groups;;
399 Comma-separated list of trusted groups.
399 Comma-separated list of trusted groups.
400
400
401 ui::
401 ui::
402 User interface controls.
402 User interface controls.
403 debug;;
403 debug;;
404 Print debugging information. True or False. Default is False.
404 Print debugging information. True or False. Default is False.
405 editor;;
405 editor;;
406 The editor to use during a commit. Default is $EDITOR or "vi".
406 The editor to use during a commit. Default is $EDITOR or "vi".
407 fallbackencoding;;
407 fallbackencoding;;
408 Encoding to try if it's not possible to decode the changelog using
408 Encoding to try if it's not possible to decode the changelog using
409 UTF-8. Default is ISO-8859-1.
409 UTF-8. Default is ISO-8859-1.
410 ignore;;
410 ignore;;
411 A file to read per-user ignore patterns from. This file should be in
411 A file to read per-user ignore patterns from. This file should be in
412 the same format as a repository-wide .hgignore file. This option
412 the same format as a repository-wide .hgignore file. This option
413 supports hook syntax, so if you want to specify multiple ignore
413 supports hook syntax, so if you want to specify multiple ignore
414 files, you can do so by setting something like
414 files, you can do so by setting something like
415 "ignore.other = ~/.hgignore2". For details of the ignore file
415 "ignore.other = ~/.hgignore2". For details of the ignore file
416 format, see the hgignore(5) man page.
416 format, see the hgignore(5) man page.
417 interactive;;
417 interactive;;
418 Allow to prompt the user. True or False. Default is True.
418 Allow to prompt the user. True or False. Default is True.
419 logtemplate;;
419 logtemplate;;
420 Template string for commands that print changesets.
420 Template string for commands that print changesets.
421 style;;
421 style;;
422 Name of style to use for command output.
422 Name of style to use for command output.
423 merge;;
423 merge;;
424 The conflict resolution program to use during a manual merge.
424 The conflict resolution program to use during a manual merge.
425 Default is "hgmerge".
425 Default is "hgmerge".
426 quiet;;
426 quiet;;
427 Reduce the amount of output printed. True or False. Default is False.
427 Reduce the amount of output printed. True or False. Default is False.
428 remotecmd;;
428 remotecmd;;
429 remote command to use for clone/push/pull operations. Default is 'hg'.
429 remote command to use for clone/push/pull operations. Default is 'hg'.
430 ssh;;
430 ssh;;
431 command to use for SSH connections. Default is 'ssh'.
431 command to use for SSH connections. Default is 'ssh'.
432 strict;;
432 strict;;
433 Require exact command names, instead of allowing unambiguous
433 Require exact command names, instead of allowing unambiguous
434 abbreviations. True or False. Default is False.
434 abbreviations. True or False. Default is False.
435 timeout;;
435 timeout;;
436 The timeout used when a lock is held (in seconds), a negative value
436 The timeout used when a lock is held (in seconds), a negative value
437 means no timeout. Default is 600.
437 means no timeout. Default is 600.
438 username;;
438 username;;
439 The committer of a changeset created when running "commit".
439 The committer of a changeset created when running "commit".
440 Typically a person's name and email address, e.g. "Fred Widget
440 Typically a person's name and email address, e.g. "Fred Widget
441 <fred@example.com>". Default is $EMAIL or username@hostname.
441 <fred@example.com>". Default is $EMAIL or username@hostname.
442 If the username in hgrc is empty, it has to be specified manually or
443 in a different hgrc file (e.g. $HOME/.hgrc, if the admin set "username ="
444 in the system hgrc).
442 verbose;;
445 verbose;;
443 Increase the amount of output printed. True or False. Default is False.
446 Increase the amount of output printed. True or False. Default is False.
444
447
445
448
446 web::
449 web::
447 Web interface configuration.
450 Web interface configuration.
448 accesslog;;
451 accesslog;;
449 Where to output the access log. Default is stdout.
452 Where to output the access log. Default is stdout.
450 address;;
453 address;;
451 Interface address to bind to. Default is all.
454 Interface address to bind to. Default is all.
452 allow_archive;;
455 allow_archive;;
453 List of archive format (bz2, gz, zip) allowed for downloading.
456 List of archive format (bz2, gz, zip) allowed for downloading.
454 Default is empty.
457 Default is empty.
455 allowbz2;;
458 allowbz2;;
456 (DEPRECATED) Whether to allow .tar.bz2 downloading of repo revisions.
459 (DEPRECATED) Whether to allow .tar.bz2 downloading of repo revisions.
457 Default is false.
460 Default is false.
458 allowgz;;
461 allowgz;;
459 (DEPRECATED) Whether to allow .tar.gz downloading of repo revisions.
462 (DEPRECATED) Whether to allow .tar.gz downloading of repo revisions.
460 Default is false.
463 Default is false.
461 allowpull;;
464 allowpull;;
462 Whether to allow pulling from the repository. Default is true.
465 Whether to allow pulling from the repository. Default is true.
463 allow_push;;
466 allow_push;;
464 Whether to allow pushing to the repository. If empty or not set,
467 Whether to allow pushing to the repository. If empty or not set,
465 push is not allowed. If the special value "*", any remote user
468 push is not allowed. If the special value "*", any remote user
466 can push, including unauthenticated users. Otherwise, the remote
469 can push, including unauthenticated users. Otherwise, the remote
467 user must have been authenticated, and the authenticated user name
470 user must have been authenticated, and the authenticated user name
468 must be present in this list (separated by whitespace or ",").
471 must be present in this list (separated by whitespace or ",").
469 The contents of the allow_push list are examined after the
472 The contents of the allow_push list are examined after the
470 deny_push list.
473 deny_push list.
471 allowzip;;
474 allowzip;;
472 (DEPRECATED) Whether to allow .zip downloading of repo revisions.
475 (DEPRECATED) Whether to allow .zip downloading of repo revisions.
473 Default is false. This feature creates temporary files.
476 Default is false. This feature creates temporary files.
474 baseurl;;
477 baseurl;;
475 Base URL to use when publishing URLs in other locations, so
478 Base URL to use when publishing URLs in other locations, so
476 third-party tools like email notification hooks can construct URLs.
479 third-party tools like email notification hooks can construct URLs.
477 Example: "http://hgserver/repos/"
480 Example: "http://hgserver/repos/"
478 contact;;
481 contact;;
479 Name or email address of the person in charge of the repository.
482 Name or email address of the person in charge of the repository.
480 Default is "unknown".
483 Default is "unknown".
481 deny_push;;
484 deny_push;;
482 Whether to deny pushing to the repository. If empty or not set,
485 Whether to deny pushing to the repository. If empty or not set,
483 push is not denied. If the special value "*", all remote users
486 push is not denied. If the special value "*", all remote users
484 are denied push. Otherwise, unauthenticated users are all denied,
487 are denied push. Otherwise, unauthenticated users are all denied,
485 and any authenticated user name present in this list (separated by
488 and any authenticated user name present in this list (separated by
486 whitespace or ",") is also denied. The contents of the deny_push
489 whitespace or ",") is also denied. The contents of the deny_push
487 list are examined before the allow_push list.
490 list are examined before the allow_push list.
488 description;;
491 description;;
489 Textual description of the repository's purpose or contents.
492 Textual description of the repository's purpose or contents.
490 Default is "unknown".
493 Default is "unknown".
491 errorlog;;
494 errorlog;;
492 Where to output the error log. Default is stderr.
495 Where to output the error log. Default is stderr.
493 ipv6;;
496 ipv6;;
494 Whether to use IPv6. Default is false.
497 Whether to use IPv6. Default is false.
495 name;;
498 name;;
496 Repository name to use in the web interface. Default is current
499 Repository name to use in the web interface. Default is current
497 working directory.
500 working directory.
498 maxchanges;;
501 maxchanges;;
499 Maximum number of changes to list on the changelog. Default is 10.
502 Maximum number of changes to list on the changelog. Default is 10.
500 maxfiles;;
503 maxfiles;;
501 Maximum number of files to list per changeset. Default is 10.
504 Maximum number of files to list per changeset. Default is 10.
502 port;;
505 port;;
503 Port to listen on. Default is 8000.
506 Port to listen on. Default is 8000.
504 push_ssl;;
507 push_ssl;;
505 Whether to require that inbound pushes be transported over SSL to
508 Whether to require that inbound pushes be transported over SSL to
506 prevent password sniffing. Default is true.
509 prevent password sniffing. Default is true.
507 stripes;;
510 stripes;;
508 How many lines a "zebra stripe" should span in multiline output.
511 How many lines a "zebra stripe" should span in multiline output.
509 Default is 1; set to 0 to disable.
512 Default is 1; set to 0 to disable.
510 style;;
513 style;;
511 Which template map style to use.
514 Which template map style to use.
512 templates;;
515 templates;;
513 Where to find the HTML templates. Default is install path.
516 Where to find the HTML templates. Default is install path.
514
517
515
518
516 AUTHOR
519 AUTHOR
517 ------
520 ------
518 Bryan O'Sullivan <bos@serpentine.com>.
521 Bryan O'Sullivan <bos@serpentine.com>.
519
522
520 Mercurial was written by Matt Mackall <mpm@selenic.com>.
523 Mercurial was written by Matt Mackall <mpm@selenic.com>.
521
524
522 SEE ALSO
525 SEE ALSO
523 --------
526 --------
524 hg(1), hgignore(5)
527 hg(1), hgignore(5)
525
528
526 COPYING
529 COPYING
527 -------
530 -------
528 This manual page is copyright 2005 Bryan O'Sullivan.
531 This manual page is copyright 2005 Bryan O'Sullivan.
529 Mercurial is copyright 2005, 2006 Matt Mackall.
532 Mercurial is copyright 2005, 2006 Matt Mackall.
530 Free use of this software is granted under the terms of the GNU General
533 Free use of this software is granted under the terms of the GNU General
531 Public License (GPL).
534 Public License (GPL).
@@ -1,753 +1,755 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import os, sys, mdiff, util, templater, patch
10 import os, sys, mdiff, util, templater, patch
11
11
12 revrangesep = ':'
12 revrangesep = ':'
13
13
14 def revpair(repo, revs):
14 def revpair(repo, revs):
15 '''return pair of nodes, given list of revisions. second item can
15 '''return pair of nodes, given list of revisions. second item can
16 be None, meaning use working dir.'''
16 be None, meaning use working dir.'''
17
17
18 def revfix(repo, val, defval):
18 def revfix(repo, val, defval):
19 if not val and val != 0 and defval is not None:
19 if not val and val != 0 and defval is not None:
20 val = defval
20 val = defval
21 return repo.lookup(val)
21 return repo.lookup(val)
22
22
23 if not revs:
23 if not revs:
24 return repo.dirstate.parents()[0], None
24 return repo.dirstate.parents()[0], None
25 end = None
25 end = None
26 if len(revs) == 1:
26 if len(revs) == 1:
27 if revrangesep in revs[0]:
27 if revrangesep in revs[0]:
28 start, end = revs[0].split(revrangesep, 1)
28 start, end = revs[0].split(revrangesep, 1)
29 start = revfix(repo, start, 0)
29 start = revfix(repo, start, 0)
30 end = revfix(repo, end, repo.changelog.count() - 1)
30 end = revfix(repo, end, repo.changelog.count() - 1)
31 else:
31 else:
32 start = revfix(repo, revs[0], None)
32 start = revfix(repo, revs[0], None)
33 elif len(revs) == 2:
33 elif len(revs) == 2:
34 if revrangesep in revs[0] or revrangesep in revs[1]:
34 if revrangesep in revs[0] or revrangesep in revs[1]:
35 raise util.Abort(_('too many revisions specified'))
35 raise util.Abort(_('too many revisions specified'))
36 start = revfix(repo, revs[0], None)
36 start = revfix(repo, revs[0], None)
37 end = revfix(repo, revs[1], None)
37 end = revfix(repo, revs[1], None)
38 else:
38 else:
39 raise util.Abort(_('too many revisions specified'))
39 raise util.Abort(_('too many revisions specified'))
40 return start, end
40 return start, end
41
41
42 def revrange(repo, revs):
42 def revrange(repo, revs):
43 """Yield revision as strings from a list of revision specifications."""
43 """Yield revision as strings from a list of revision specifications."""
44
44
45 def revfix(repo, val, defval):
45 def revfix(repo, val, defval):
46 if not val and val != 0 and defval is not None:
46 if not val and val != 0 and defval is not None:
47 return defval
47 return defval
48 return repo.changelog.rev(repo.lookup(val))
48 return repo.changelog.rev(repo.lookup(val))
49
49
50 seen, l = {}, []
50 seen, l = {}, []
51 for spec in revs:
51 for spec in revs:
52 if revrangesep in spec:
52 if revrangesep in spec:
53 start, end = spec.split(revrangesep, 1)
53 start, end = spec.split(revrangesep, 1)
54 start = revfix(repo, start, 0)
54 start = revfix(repo, start, 0)
55 end = revfix(repo, end, repo.changelog.count() - 1)
55 end = revfix(repo, end, repo.changelog.count() - 1)
56 step = start > end and -1 or 1
56 step = start > end and -1 or 1
57 for rev in xrange(start, end+step, step):
57 for rev in xrange(start, end+step, step):
58 if rev in seen:
58 if rev in seen:
59 continue
59 continue
60 seen[rev] = 1
60 seen[rev] = 1
61 l.append(rev)
61 l.append(rev)
62 else:
62 else:
63 rev = revfix(repo, spec, None)
63 rev = revfix(repo, spec, None)
64 if rev in seen:
64 if rev in seen:
65 continue
65 continue
66 seen[rev] = 1
66 seen[rev] = 1
67 l.append(rev)
67 l.append(rev)
68
68
69 return l
69 return l
70
70
71 def make_filename(repo, pat, node,
71 def make_filename(repo, pat, node,
72 total=None, seqno=None, revwidth=None, pathname=None):
72 total=None, seqno=None, revwidth=None, pathname=None):
73 node_expander = {
73 node_expander = {
74 'H': lambda: hex(node),
74 'H': lambda: hex(node),
75 'R': lambda: str(repo.changelog.rev(node)),
75 'R': lambda: str(repo.changelog.rev(node)),
76 'h': lambda: short(node),
76 'h': lambda: short(node),
77 }
77 }
78 expander = {
78 expander = {
79 '%': lambda: '%',
79 '%': lambda: '%',
80 'b': lambda: os.path.basename(repo.root),
80 'b': lambda: os.path.basename(repo.root),
81 }
81 }
82
82
83 try:
83 try:
84 if node:
84 if node:
85 expander.update(node_expander)
85 expander.update(node_expander)
86 if node and revwidth is not None:
86 if node and revwidth is not None:
87 expander['r'] = (lambda:
87 expander['r'] = (lambda:
88 str(repo.changelog.rev(node)).zfill(revwidth))
88 str(repo.changelog.rev(node)).zfill(revwidth))
89 if total is not None:
89 if total is not None:
90 expander['N'] = lambda: str(total)
90 expander['N'] = lambda: str(total)
91 if seqno is not None:
91 if seqno is not None:
92 expander['n'] = lambda: str(seqno)
92 expander['n'] = lambda: str(seqno)
93 if total is not None and seqno is not None:
93 if total is not None and seqno is not None:
94 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
94 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
95 if pathname is not None:
95 if pathname is not None:
96 expander['s'] = lambda: os.path.basename(pathname)
96 expander['s'] = lambda: os.path.basename(pathname)
97 expander['d'] = lambda: os.path.dirname(pathname) or '.'
97 expander['d'] = lambda: os.path.dirname(pathname) or '.'
98 expander['p'] = lambda: pathname
98 expander['p'] = lambda: pathname
99
99
100 newname = []
100 newname = []
101 patlen = len(pat)
101 patlen = len(pat)
102 i = 0
102 i = 0
103 while i < patlen:
103 while i < patlen:
104 c = pat[i]
104 c = pat[i]
105 if c == '%':
105 if c == '%':
106 i += 1
106 i += 1
107 c = pat[i]
107 c = pat[i]
108 c = expander[c]()
108 c = expander[c]()
109 newname.append(c)
109 newname.append(c)
110 i += 1
110 i += 1
111 return ''.join(newname)
111 return ''.join(newname)
112 except KeyError, inst:
112 except KeyError, inst:
113 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
113 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
114 inst.args[0])
114 inst.args[0])
115
115
116 def make_file(repo, pat, node=None,
116 def make_file(repo, pat, node=None,
117 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
117 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
118 if not pat or pat == '-':
118 if not pat or pat == '-':
119 return 'w' in mode and sys.stdout or sys.stdin
119 return 'w' in mode and sys.stdout or sys.stdin
120 if hasattr(pat, 'write') and 'w' in mode:
120 if hasattr(pat, 'write') and 'w' in mode:
121 return pat
121 return pat
122 if hasattr(pat, 'read') and 'r' in mode:
122 if hasattr(pat, 'read') and 'r' in mode:
123 return pat
123 return pat
124 return open(make_filename(repo, pat, node, total, seqno, revwidth,
124 return open(make_filename(repo, pat, node, total, seqno, revwidth,
125 pathname),
125 pathname),
126 mode)
126 mode)
127
127
128 def matchpats(repo, pats=[], opts={}, head=''):
128 def matchpats(repo, pats=[], opts={}, head='', globbed=False):
129 cwd = repo.getcwd()
129 cwd = repo.getcwd()
130 if not pats and cwd:
130 if not pats and cwd:
131 opts['include'] = [os.path.join(cwd, i)
131 opts['include'] = [os.path.join(cwd, i)
132 for i in opts.get('include', [])]
132 for i in opts.get('include', [])]
133 opts['exclude'] = [os.path.join(cwd, x)
133 opts['exclude'] = [os.path.join(cwd, x)
134 for x in opts.get('exclude', [])]
134 for x in opts.get('exclude', [])]
135 cwd = ''
135 cwd = ''
136 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
136 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
137 opts.get('exclude'), head)
137 opts.get('exclude'), head, globbed=globbed)
138
138
139 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
139 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None,
140 files, matchfn, anypats = matchpats(repo, pats, opts, head)
140 globbed=False):
141 files, matchfn, anypats = matchpats(repo, pats, opts, head,
142 globbed=globbed)
141 exact = dict.fromkeys(files)
143 exact = dict.fromkeys(files)
142 for src, fn in repo.walk(node=node, files=files, match=matchfn,
144 for src, fn in repo.walk(node=node, files=files, match=matchfn,
143 badmatch=badmatch):
145 badmatch=badmatch):
144 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
146 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
145
147
146 def findrenames(repo, added=None, removed=None, threshold=0.5):
148 def findrenames(repo, added=None, removed=None, threshold=0.5):
147 if added is None or removed is None:
149 if added is None or removed is None:
148 added, removed = repo.status()[1:3]
150 added, removed = repo.status()[1:3]
149 ctx = repo.changectx()
151 ctx = repo.changectx()
150 for a in added:
152 for a in added:
151 aa = repo.wread(a)
153 aa = repo.wread(a)
152 bestscore, bestname = None, None
154 bestscore, bestname = None, None
153 for r in removed:
155 for r in removed:
154 rr = ctx.filectx(r).data()
156 rr = ctx.filectx(r).data()
155 delta = mdiff.textdiff(aa, rr)
157 delta = mdiff.textdiff(aa, rr)
156 if len(delta) < len(aa):
158 if len(delta) < len(aa):
157 myscore = 1.0 - (float(len(delta)) / len(aa))
159 myscore = 1.0 - (float(len(delta)) / len(aa))
158 if bestscore is None or myscore > bestscore:
160 if bestscore is None or myscore > bestscore:
159 bestscore, bestname = myscore, r
161 bestscore, bestname = myscore, r
160 if bestname and bestscore >= threshold:
162 if bestname and bestscore >= threshold:
161 yield bestname, a, bestscore
163 yield bestname, a, bestscore
162
164
163 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
165 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
164 similarity=None):
166 similarity=None):
165 if dry_run is None:
167 if dry_run is None:
166 dry_run = opts.get('dry_run')
168 dry_run = opts.get('dry_run')
167 if similarity is None:
169 if similarity is None:
168 similarity = float(opts.get('similarity') or 0)
170 similarity = float(opts.get('similarity') or 0)
169 add, remove = [], []
171 add, remove = [], []
170 mapping = {}
172 mapping = {}
171 for src, abs, rel, exact in walk(repo, pats, opts):
173 for src, abs, rel, exact in walk(repo, pats, opts):
172 if src == 'f' and repo.dirstate.state(abs) == '?':
174 if src == 'f' and repo.dirstate.state(abs) == '?':
173 add.append(abs)
175 add.append(abs)
174 mapping[abs] = rel, exact
176 mapping[abs] = rel, exact
175 if repo.ui.verbose or not exact:
177 if repo.ui.verbose or not exact:
176 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
178 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
177 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
179 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
178 remove.append(abs)
180 remove.append(abs)
179 mapping[abs] = rel, exact
181 mapping[abs] = rel, exact
180 if repo.ui.verbose or not exact:
182 if repo.ui.verbose or not exact:
181 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
183 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
182 if not dry_run:
184 if not dry_run:
183 repo.add(add, wlock=wlock)
185 repo.add(add, wlock=wlock)
184 repo.remove(remove, wlock=wlock)
186 repo.remove(remove, wlock=wlock)
185 if similarity > 0:
187 if similarity > 0:
186 for old, new, score in findrenames(repo, add, remove, similarity):
188 for old, new, score in findrenames(repo, add, remove, similarity):
187 oldrel, oldexact = mapping[old]
189 oldrel, oldexact = mapping[old]
188 newrel, newexact = mapping[new]
190 newrel, newexact = mapping[new]
189 if repo.ui.verbose or not oldexact or not newexact:
191 if repo.ui.verbose or not oldexact or not newexact:
190 repo.ui.status(_('recording removal of %s as rename to %s '
192 repo.ui.status(_('recording removal of %s as rename to %s '
191 '(%d%% similar)\n') %
193 '(%d%% similar)\n') %
192 (oldrel, newrel, score * 100))
194 (oldrel, newrel, score * 100))
193 if not dry_run:
195 if not dry_run:
194 repo.copy(old, new, wlock=wlock)
196 repo.copy(old, new, wlock=wlock)
195
197
196 class changeset_printer(object):
198 class changeset_printer(object):
197 '''show changeset information when templating not requested.'''
199 '''show changeset information when templating not requested.'''
198
200
199 def __init__(self, ui, repo, patch, buffered):
201 def __init__(self, ui, repo, patch, buffered):
200 self.ui = ui
202 self.ui = ui
201 self.repo = repo
203 self.repo = repo
202 self.buffered = buffered
204 self.buffered = buffered
203 self.patch = patch
205 self.patch = patch
204 self.header = {}
206 self.header = {}
205 self.hunk = {}
207 self.hunk = {}
206 self.lastheader = None
208 self.lastheader = None
207
209
208 def flush(self, rev):
210 def flush(self, rev):
209 if rev in self.header:
211 if rev in self.header:
210 h = self.header[rev]
212 h = self.header[rev]
211 if h != self.lastheader:
213 if h != self.lastheader:
212 self.lastheader = h
214 self.lastheader = h
213 self.ui.write(h)
215 self.ui.write(h)
214 del self.header[rev]
216 del self.header[rev]
215 if rev in self.hunk:
217 if rev in self.hunk:
216 self.ui.write(self.hunk[rev])
218 self.ui.write(self.hunk[rev])
217 del self.hunk[rev]
219 del self.hunk[rev]
218 return 1
220 return 1
219 return 0
221 return 0
220
222
221 def show(self, rev=0, changenode=None, copies=None, **props):
223 def show(self, rev=0, changenode=None, copies=None, **props):
222 if self.buffered:
224 if self.buffered:
223 self.ui.pushbuffer()
225 self.ui.pushbuffer()
224 self._show(rev, changenode, copies, props)
226 self._show(rev, changenode, copies, props)
225 self.hunk[rev] = self.ui.popbuffer()
227 self.hunk[rev] = self.ui.popbuffer()
226 else:
228 else:
227 self._show(rev, changenode, copies, props)
229 self._show(rev, changenode, copies, props)
228
230
229 def _show(self, rev, changenode, copies, props):
231 def _show(self, rev, changenode, copies, props):
230 '''show a single changeset or file revision'''
232 '''show a single changeset or file revision'''
231 log = self.repo.changelog
233 log = self.repo.changelog
232 if changenode is None:
234 if changenode is None:
233 changenode = log.node(rev)
235 changenode = log.node(rev)
234 elif not rev:
236 elif not rev:
235 rev = log.rev(changenode)
237 rev = log.rev(changenode)
236
238
237 if self.ui.quiet:
239 if self.ui.quiet:
238 self.ui.write("%d:%s\n" % (rev, short(changenode)))
240 self.ui.write("%d:%s\n" % (rev, short(changenode)))
239 return
241 return
240
242
241 changes = log.read(changenode)
243 changes = log.read(changenode)
242 date = util.datestr(changes[2])
244 date = util.datestr(changes[2])
243 extra = changes[5]
245 extra = changes[5]
244 branch = extra.get("branch")
246 branch = extra.get("branch")
245
247
246 hexfunc = self.ui.debugflag and hex or short
248 hexfunc = self.ui.debugflag and hex or short
247
249
248 parents = log.parentrevs(rev)
250 parents = log.parentrevs(rev)
249 if not self.ui.debugflag:
251 if not self.ui.debugflag:
250 if parents[1] == nullrev:
252 if parents[1] == nullrev:
251 if parents[0] >= rev - 1:
253 if parents[0] >= rev - 1:
252 parents = []
254 parents = []
253 else:
255 else:
254 parents = [parents[0]]
256 parents = [parents[0]]
255 parents = [(p, hexfunc(log.node(p))) for p in parents]
257 parents = [(p, hexfunc(log.node(p))) for p in parents]
256
258
257 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
259 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
258
260
259 if branch:
261 if branch:
260 branch = util.tolocal(branch)
262 branch = util.tolocal(branch)
261 self.ui.write(_("branch: %s\n") % branch)
263 self.ui.write(_("branch: %s\n") % branch)
262 for tag in self.repo.nodetags(changenode):
264 for tag in self.repo.nodetags(changenode):
263 self.ui.write(_("tag: %s\n") % tag)
265 self.ui.write(_("tag: %s\n") % tag)
264 for parent in parents:
266 for parent in parents:
265 self.ui.write(_("parent: %d:%s\n") % parent)
267 self.ui.write(_("parent: %d:%s\n") % parent)
266
268
267 if self.ui.debugflag:
269 if self.ui.debugflag:
268 self.ui.write(_("manifest: %d:%s\n") %
270 self.ui.write(_("manifest: %d:%s\n") %
269 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
271 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
270 self.ui.write(_("user: %s\n") % changes[1])
272 self.ui.write(_("user: %s\n") % changes[1])
271 self.ui.write(_("date: %s\n") % date)
273 self.ui.write(_("date: %s\n") % date)
272
274
273 if self.ui.debugflag:
275 if self.ui.debugflag:
274 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
276 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
275 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
277 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
276 files):
278 files):
277 if value:
279 if value:
278 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
280 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
279 elif changes[3] and self.ui.verbose:
281 elif changes[3] and self.ui.verbose:
280 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
282 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
281 if copies and self.ui.verbose:
283 if copies and self.ui.verbose:
282 copies = ['%s (%s)' % c for c in copies]
284 copies = ['%s (%s)' % c for c in copies]
283 self.ui.write(_("copies: %s\n") % ' '.join(copies))
285 self.ui.write(_("copies: %s\n") % ' '.join(copies))
284
286
285 if extra and self.ui.debugflag:
287 if extra and self.ui.debugflag:
286 extraitems = extra.items()
288 extraitems = extra.items()
287 extraitems.sort()
289 extraitems.sort()
288 for key, value in extraitems:
290 for key, value in extraitems:
289 self.ui.write(_("extra: %s=%s\n")
291 self.ui.write(_("extra: %s=%s\n")
290 % (key, value.encode('string_escape')))
292 % (key, value.encode('string_escape')))
291
293
292 description = changes[4].strip()
294 description = changes[4].strip()
293 if description:
295 if description:
294 if self.ui.verbose:
296 if self.ui.verbose:
295 self.ui.write(_("description:\n"))
297 self.ui.write(_("description:\n"))
296 self.ui.write(description)
298 self.ui.write(description)
297 self.ui.write("\n\n")
299 self.ui.write("\n\n")
298 else:
300 else:
299 self.ui.write(_("summary: %s\n") %
301 self.ui.write(_("summary: %s\n") %
300 description.splitlines()[0])
302 description.splitlines()[0])
301 self.ui.write("\n")
303 self.ui.write("\n")
302
304
303 self.showpatch(changenode)
305 self.showpatch(changenode)
304
306
305 def showpatch(self, node):
307 def showpatch(self, node):
306 if self.patch:
308 if self.patch:
307 prev = self.repo.changelog.parents(node)[0]
309 prev = self.repo.changelog.parents(node)[0]
308 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
310 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
309 self.ui.write("\n")
311 self.ui.write("\n")
310
312
311 class changeset_templater(changeset_printer):
313 class changeset_templater(changeset_printer):
312 '''format changeset information.'''
314 '''format changeset information.'''
313
315
314 def __init__(self, ui, repo, patch, mapfile, buffered):
316 def __init__(self, ui, repo, patch, mapfile, buffered):
315 changeset_printer.__init__(self, ui, repo, patch, buffered)
317 changeset_printer.__init__(self, ui, repo, patch, buffered)
316 self.t = templater.templater(mapfile, templater.common_filters,
318 self.t = templater.templater(mapfile, templater.common_filters,
317 cache={'parent': '{rev}:{node|short} ',
319 cache={'parent': '{rev}:{node|short} ',
318 'manifest': '{rev}:{node|short}',
320 'manifest': '{rev}:{node|short}',
319 'filecopy': '{name} ({source})'})
321 'filecopy': '{name} ({source})'})
320
322
321 def use_template(self, t):
323 def use_template(self, t):
322 '''set template string to use'''
324 '''set template string to use'''
323 self.t.cache['changeset'] = t
325 self.t.cache['changeset'] = t
324
326
325 def _show(self, rev, changenode, copies, props):
327 def _show(self, rev, changenode, copies, props):
326 '''show a single changeset or file revision'''
328 '''show a single changeset or file revision'''
327 log = self.repo.changelog
329 log = self.repo.changelog
328 if changenode is None:
330 if changenode is None:
329 changenode = log.node(rev)
331 changenode = log.node(rev)
330 elif not rev:
332 elif not rev:
331 rev = log.rev(changenode)
333 rev = log.rev(changenode)
332
334
333 changes = log.read(changenode)
335 changes = log.read(changenode)
334
336
335 def showlist(name, values, plural=None, **args):
337 def showlist(name, values, plural=None, **args):
336 '''expand set of values.
338 '''expand set of values.
337 name is name of key in template map.
339 name is name of key in template map.
338 values is list of strings or dicts.
340 values is list of strings or dicts.
339 plural is plural of name, if not simply name + 's'.
341 plural is plural of name, if not simply name + 's'.
340
342
341 expansion works like this, given name 'foo'.
343 expansion works like this, given name 'foo'.
342
344
343 if values is empty, expand 'no_foos'.
345 if values is empty, expand 'no_foos'.
344
346
345 if 'foo' not in template map, return values as a string,
347 if 'foo' not in template map, return values as a string,
346 joined by space.
348 joined by space.
347
349
348 expand 'start_foos'.
350 expand 'start_foos'.
349
351
350 for each value, expand 'foo'. if 'last_foo' in template
352 for each value, expand 'foo'. if 'last_foo' in template
351 map, expand it instead of 'foo' for last key.
353 map, expand it instead of 'foo' for last key.
352
354
353 expand 'end_foos'.
355 expand 'end_foos'.
354 '''
356 '''
355 if plural: names = plural
357 if plural: names = plural
356 else: names = name + 's'
358 else: names = name + 's'
357 if not values:
359 if not values:
358 noname = 'no_' + names
360 noname = 'no_' + names
359 if noname in self.t:
361 if noname in self.t:
360 yield self.t(noname, **args)
362 yield self.t(noname, **args)
361 return
363 return
362 if name not in self.t:
364 if name not in self.t:
363 if isinstance(values[0], str):
365 if isinstance(values[0], str):
364 yield ' '.join(values)
366 yield ' '.join(values)
365 else:
367 else:
366 for v in values:
368 for v in values:
367 yield dict(v, **args)
369 yield dict(v, **args)
368 return
370 return
369 startname = 'start_' + names
371 startname = 'start_' + names
370 if startname in self.t:
372 if startname in self.t:
371 yield self.t(startname, **args)
373 yield self.t(startname, **args)
372 vargs = args.copy()
374 vargs = args.copy()
373 def one(v, tag=name):
375 def one(v, tag=name):
374 try:
376 try:
375 vargs.update(v)
377 vargs.update(v)
376 except (AttributeError, ValueError):
378 except (AttributeError, ValueError):
377 try:
379 try:
378 for a, b in v:
380 for a, b in v:
379 vargs[a] = b
381 vargs[a] = b
380 except ValueError:
382 except ValueError:
381 vargs[name] = v
383 vargs[name] = v
382 return self.t(tag, **vargs)
384 return self.t(tag, **vargs)
383 lastname = 'last_' + name
385 lastname = 'last_' + name
384 if lastname in self.t:
386 if lastname in self.t:
385 last = values.pop()
387 last = values.pop()
386 else:
388 else:
387 last = None
389 last = None
388 for v in values:
390 for v in values:
389 yield one(v)
391 yield one(v)
390 if last is not None:
392 if last is not None:
391 yield one(last, tag=lastname)
393 yield one(last, tag=lastname)
392 endname = 'end_' + names
394 endname = 'end_' + names
393 if endname in self.t:
395 if endname in self.t:
394 yield self.t(endname, **args)
396 yield self.t(endname, **args)
395
397
396 def showbranches(**args):
398 def showbranches(**args):
397 branch = changes[5].get("branch")
399 branch = changes[5].get("branch")
398 if branch:
400 if branch:
399 branch = util.tolocal(branch)
401 branch = util.tolocal(branch)
400 return showlist('branch', [branch], plural='branches', **args)
402 return showlist('branch', [branch], plural='branches', **args)
401
403
402 def showparents(**args):
404 def showparents(**args):
403 parents = [[('rev', log.rev(p)), ('node', hex(p))]
405 parents = [[('rev', log.rev(p)), ('node', hex(p))]
404 for p in log.parents(changenode)
406 for p in log.parents(changenode)
405 if self.ui.debugflag or p != nullid]
407 if self.ui.debugflag or p != nullid]
406 if (not self.ui.debugflag and len(parents) == 1 and
408 if (not self.ui.debugflag and len(parents) == 1 and
407 parents[0][0][1] == rev - 1):
409 parents[0][0][1] == rev - 1):
408 return
410 return
409 return showlist('parent', parents, **args)
411 return showlist('parent', parents, **args)
410
412
411 def showtags(**args):
413 def showtags(**args):
412 return showlist('tag', self.repo.nodetags(changenode), **args)
414 return showlist('tag', self.repo.nodetags(changenode), **args)
413
415
414 def showextras(**args):
416 def showextras(**args):
415 extras = changes[5].items()
417 extras = changes[5].items()
416 extras.sort()
418 extras.sort()
417 for key, value in extras:
419 for key, value in extras:
418 args = args.copy()
420 args = args.copy()
419 args.update(dict(key=key, value=value))
421 args.update(dict(key=key, value=value))
420 yield self.t('extra', **args)
422 yield self.t('extra', **args)
421
423
422 def showcopies(**args):
424 def showcopies(**args):
423 c = [{'name': x[0], 'source': x[1]} for x in copies]
425 c = [{'name': x[0], 'source': x[1]} for x in copies]
424 return showlist('file_copy', c, plural='file_copies', **args)
426 return showlist('file_copy', c, plural='file_copies', **args)
425
427
426 if self.ui.debugflag:
428 if self.ui.debugflag:
427 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
429 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
428 def showfiles(**args):
430 def showfiles(**args):
429 return showlist('file', files[0], **args)
431 return showlist('file', files[0], **args)
430 def showadds(**args):
432 def showadds(**args):
431 return showlist('file_add', files[1], **args)
433 return showlist('file_add', files[1], **args)
432 def showdels(**args):
434 def showdels(**args):
433 return showlist('file_del', files[2], **args)
435 return showlist('file_del', files[2], **args)
434 def showmanifest(**args):
436 def showmanifest(**args):
435 args = args.copy()
437 args = args.copy()
436 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
438 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
437 node=hex(changes[0])))
439 node=hex(changes[0])))
438 return self.t('manifest', **args)
440 return self.t('manifest', **args)
439 else:
441 else:
440 def showfiles(**args):
442 def showfiles(**args):
441 return showlist('file', changes[3], **args)
443 return showlist('file', changes[3], **args)
442 showadds = ''
444 showadds = ''
443 showdels = ''
445 showdels = ''
444 showmanifest = ''
446 showmanifest = ''
445
447
446 defprops = {
448 defprops = {
447 'author': changes[1],
449 'author': changes[1],
448 'branches': showbranches,
450 'branches': showbranches,
449 'date': changes[2],
451 'date': changes[2],
450 'desc': changes[4],
452 'desc': changes[4],
451 'file_adds': showadds,
453 'file_adds': showadds,
452 'file_dels': showdels,
454 'file_dels': showdels,
453 'files': showfiles,
455 'files': showfiles,
454 'file_copies': showcopies,
456 'file_copies': showcopies,
455 'manifest': showmanifest,
457 'manifest': showmanifest,
456 'node': hex(changenode),
458 'node': hex(changenode),
457 'parents': showparents,
459 'parents': showparents,
458 'rev': rev,
460 'rev': rev,
459 'tags': showtags,
461 'tags': showtags,
460 'extras': showextras,
462 'extras': showextras,
461 }
463 }
462 props = props.copy()
464 props = props.copy()
463 props.update(defprops)
465 props.update(defprops)
464
466
465 try:
467 try:
466 if self.ui.debugflag and 'header_debug' in self.t:
468 if self.ui.debugflag and 'header_debug' in self.t:
467 key = 'header_debug'
469 key = 'header_debug'
468 elif self.ui.quiet and 'header_quiet' in self.t:
470 elif self.ui.quiet and 'header_quiet' in self.t:
469 key = 'header_quiet'
471 key = 'header_quiet'
470 elif self.ui.verbose and 'header_verbose' in self.t:
472 elif self.ui.verbose and 'header_verbose' in self.t:
471 key = 'header_verbose'
473 key = 'header_verbose'
472 elif 'header' in self.t:
474 elif 'header' in self.t:
473 key = 'header'
475 key = 'header'
474 else:
476 else:
475 key = ''
477 key = ''
476 if key:
478 if key:
477 h = templater.stringify(self.t(key, **props))
479 h = templater.stringify(self.t(key, **props))
478 if self.buffered:
480 if self.buffered:
479 self.header[rev] = h
481 self.header[rev] = h
480 else:
482 else:
481 self.ui.write(h)
483 self.ui.write(h)
482 if self.ui.debugflag and 'changeset_debug' in self.t:
484 if self.ui.debugflag and 'changeset_debug' in self.t:
483 key = 'changeset_debug'
485 key = 'changeset_debug'
484 elif self.ui.quiet and 'changeset_quiet' in self.t:
486 elif self.ui.quiet and 'changeset_quiet' in self.t:
485 key = 'changeset_quiet'
487 key = 'changeset_quiet'
486 elif self.ui.verbose and 'changeset_verbose' in self.t:
488 elif self.ui.verbose and 'changeset_verbose' in self.t:
487 key = 'changeset_verbose'
489 key = 'changeset_verbose'
488 else:
490 else:
489 key = 'changeset'
491 key = 'changeset'
490 self.ui.write(templater.stringify(self.t(key, **props)))
492 self.ui.write(templater.stringify(self.t(key, **props)))
491 self.showpatch(changenode)
493 self.showpatch(changenode)
492 except KeyError, inst:
494 except KeyError, inst:
493 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
495 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
494 inst.args[0]))
496 inst.args[0]))
495 except SyntaxError, inst:
497 except SyntaxError, inst:
496 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
498 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
497
499
498 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
500 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
499 """show one changeset using template or regular display.
501 """show one changeset using template or regular display.
500
502
501 Display format will be the first non-empty hit of:
503 Display format will be the first non-empty hit of:
502 1. option 'template'
504 1. option 'template'
503 2. option 'style'
505 2. option 'style'
504 3. [ui] setting 'logtemplate'
506 3. [ui] setting 'logtemplate'
505 4. [ui] setting 'style'
507 4. [ui] setting 'style'
506 If all of these values are either the unset or the empty string,
508 If all of these values are either the unset or the empty string,
507 regular display via changeset_printer() is done.
509 regular display via changeset_printer() is done.
508 """
510 """
509 # options
511 # options
510 patch = False
512 patch = False
511 if opts.get('patch'):
513 if opts.get('patch'):
512 patch = matchfn or util.always
514 patch = matchfn or util.always
513
515
514 tmpl = opts.get('template')
516 tmpl = opts.get('template')
515 mapfile = None
517 mapfile = None
516 if tmpl:
518 if tmpl:
517 tmpl = templater.parsestring(tmpl, quoted=False)
519 tmpl = templater.parsestring(tmpl, quoted=False)
518 else:
520 else:
519 mapfile = opts.get('style')
521 mapfile = opts.get('style')
520 # ui settings
522 # ui settings
521 if not mapfile:
523 if not mapfile:
522 tmpl = ui.config('ui', 'logtemplate')
524 tmpl = ui.config('ui', 'logtemplate')
523 if tmpl:
525 if tmpl:
524 tmpl = templater.parsestring(tmpl)
526 tmpl = templater.parsestring(tmpl)
525 else:
527 else:
526 mapfile = ui.config('ui', 'style')
528 mapfile = ui.config('ui', 'style')
527
529
528 if tmpl or mapfile:
530 if tmpl or mapfile:
529 if mapfile:
531 if mapfile:
530 if not os.path.split(mapfile)[0]:
532 if not os.path.split(mapfile)[0]:
531 mapname = (templater.templatepath('map-cmdline.' + mapfile)
533 mapname = (templater.templatepath('map-cmdline.' + mapfile)
532 or templater.templatepath(mapfile))
534 or templater.templatepath(mapfile))
533 if mapname: mapfile = mapname
535 if mapname: mapfile = mapname
534 try:
536 try:
535 t = changeset_templater(ui, repo, patch, mapfile, buffered)
537 t = changeset_templater(ui, repo, patch, mapfile, buffered)
536 except SyntaxError, inst:
538 except SyntaxError, inst:
537 raise util.Abort(inst.args[0])
539 raise util.Abort(inst.args[0])
538 if tmpl: t.use_template(tmpl)
540 if tmpl: t.use_template(tmpl)
539 return t
541 return t
540 return changeset_printer(ui, repo, patch, buffered)
542 return changeset_printer(ui, repo, patch, buffered)
541
543
542 def finddate(ui, repo, date):
544 def finddate(ui, repo, date):
543 """Find the tipmost changeset that matches the given date spec"""
545 """Find the tipmost changeset that matches the given date spec"""
544 df = util.matchdate(date + " to " + date)
546 df = util.matchdate(date + " to " + date)
545 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
547 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
546 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
548 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
547 results = {}
549 results = {}
548 for st, rev, fns in changeiter:
550 for st, rev, fns in changeiter:
549 if st == 'add':
551 if st == 'add':
550 d = get(rev)[2]
552 d = get(rev)[2]
551 if df(d[0]):
553 if df(d[0]):
552 results[rev] = d
554 results[rev] = d
553 elif st == 'iter':
555 elif st == 'iter':
554 if rev in results:
556 if rev in results:
555 ui.status("Found revision %s from %s\n" %
557 ui.status("Found revision %s from %s\n" %
556 (rev, util.datestr(results[rev])))
558 (rev, util.datestr(results[rev])))
557 return str(rev)
559 return str(rev)
558
560
559 raise util.Abort(_("revision matching date not found"))
561 raise util.Abort(_("revision matching date not found"))
560
562
561 def walkchangerevs(ui, repo, pats, change, opts):
563 def walkchangerevs(ui, repo, pats, change, opts):
562 '''Iterate over files and the revs they changed in.
564 '''Iterate over files and the revs they changed in.
563
565
564 Callers most commonly need to iterate backwards over the history
566 Callers most commonly need to iterate backwards over the history
565 it is interested in. Doing so has awful (quadratic-looking)
567 it is interested in. Doing so has awful (quadratic-looking)
566 performance, so we use iterators in a "windowed" way.
568 performance, so we use iterators in a "windowed" way.
567
569
568 We walk a window of revisions in the desired order. Within the
570 We walk a window of revisions in the desired order. Within the
569 window, we first walk forwards to gather data, then in the desired
571 window, we first walk forwards to gather data, then in the desired
570 order (usually backwards) to display it.
572 order (usually backwards) to display it.
571
573
572 This function returns an (iterator, matchfn) tuple. The iterator
574 This function returns an (iterator, matchfn) tuple. The iterator
573 yields 3-tuples. They will be of one of the following forms:
575 yields 3-tuples. They will be of one of the following forms:
574
576
575 "window", incrementing, lastrev: stepping through a window,
577 "window", incrementing, lastrev: stepping through a window,
576 positive if walking forwards through revs, last rev in the
578 positive if walking forwards through revs, last rev in the
577 sequence iterated over - use to reset state for the current window
579 sequence iterated over - use to reset state for the current window
578
580
579 "add", rev, fns: out-of-order traversal of the given file names
581 "add", rev, fns: out-of-order traversal of the given file names
580 fns, which changed during revision rev - use to gather data for
582 fns, which changed during revision rev - use to gather data for
581 possible display
583 possible display
582
584
583 "iter", rev, None: in-order traversal of the revs earlier iterated
585 "iter", rev, None: in-order traversal of the revs earlier iterated
584 over with "add" - use to display data'''
586 over with "add" - use to display data'''
585
587
586 def increasing_windows(start, end, windowsize=8, sizelimit=512):
588 def increasing_windows(start, end, windowsize=8, sizelimit=512):
587 if start < end:
589 if start < end:
588 while start < end:
590 while start < end:
589 yield start, min(windowsize, end-start)
591 yield start, min(windowsize, end-start)
590 start += windowsize
592 start += windowsize
591 if windowsize < sizelimit:
593 if windowsize < sizelimit:
592 windowsize *= 2
594 windowsize *= 2
593 else:
595 else:
594 while start > end:
596 while start > end:
595 yield start, min(windowsize, start-end-1)
597 yield start, min(windowsize, start-end-1)
596 start -= windowsize
598 start -= windowsize
597 if windowsize < sizelimit:
599 if windowsize < sizelimit:
598 windowsize *= 2
600 windowsize *= 2
599
601
600 files, matchfn, anypats = matchpats(repo, pats, opts)
602 files, matchfn, anypats = matchpats(repo, pats, opts)
601 follow = opts.get('follow') or opts.get('follow_first')
603 follow = opts.get('follow') or opts.get('follow_first')
602
604
603 if repo.changelog.count() == 0:
605 if repo.changelog.count() == 0:
604 return [], matchfn
606 return [], matchfn
605
607
606 if follow:
608 if follow:
607 defrange = '%s:0' % repo.changectx().rev()
609 defrange = '%s:0' % repo.changectx().rev()
608 else:
610 else:
609 defrange = 'tip:0'
611 defrange = 'tip:0'
610 revs = revrange(repo, opts['rev'] or [defrange])
612 revs = revrange(repo, opts['rev'] or [defrange])
611 wanted = {}
613 wanted = {}
612 slowpath = anypats or opts.get('removed')
614 slowpath = anypats or opts.get('removed')
613 fncache = {}
615 fncache = {}
614
616
615 if not slowpath and not files:
617 if not slowpath and not files:
616 # No files, no patterns. Display all revs.
618 # No files, no patterns. Display all revs.
617 wanted = dict.fromkeys(revs)
619 wanted = dict.fromkeys(revs)
618 copies = []
620 copies = []
619 if not slowpath:
621 if not slowpath:
620 # Only files, no patterns. Check the history of each file.
622 # Only files, no patterns. Check the history of each file.
621 def filerevgen(filelog, node):
623 def filerevgen(filelog, node):
622 cl_count = repo.changelog.count()
624 cl_count = repo.changelog.count()
623 if node is None:
625 if node is None:
624 last = filelog.count() - 1
626 last = filelog.count() - 1
625 else:
627 else:
626 last = filelog.rev(node)
628 last = filelog.rev(node)
627 for i, window in increasing_windows(last, nullrev):
629 for i, window in increasing_windows(last, nullrev):
628 revs = []
630 revs = []
629 for j in xrange(i - window, i + 1):
631 for j in xrange(i - window, i + 1):
630 n = filelog.node(j)
632 n = filelog.node(j)
631 revs.append((filelog.linkrev(n),
633 revs.append((filelog.linkrev(n),
632 follow and filelog.renamed(n)))
634 follow and filelog.renamed(n)))
633 revs.reverse()
635 revs.reverse()
634 for rev in revs:
636 for rev in revs:
635 # only yield rev for which we have the changelog, it can
637 # only yield rev for which we have the changelog, it can
636 # happen while doing "hg log" during a pull or commit
638 # happen while doing "hg log" during a pull or commit
637 if rev[0] < cl_count:
639 if rev[0] < cl_count:
638 yield rev
640 yield rev
639 def iterfiles():
641 def iterfiles():
640 for filename in files:
642 for filename in files:
641 yield filename, None
643 yield filename, None
642 for filename_node in copies:
644 for filename_node in copies:
643 yield filename_node
645 yield filename_node
644 minrev, maxrev = min(revs), max(revs)
646 minrev, maxrev = min(revs), max(revs)
645 for file_, node in iterfiles():
647 for file_, node in iterfiles():
646 filelog = repo.file(file_)
648 filelog = repo.file(file_)
647 # A zero count may be a directory or deleted file, so
649 # A zero count may be a directory or deleted file, so
648 # try to find matching entries on the slow path.
650 # try to find matching entries on the slow path.
649 if filelog.count() == 0:
651 if filelog.count() == 0:
650 slowpath = True
652 slowpath = True
651 break
653 break
652 for rev, copied in filerevgen(filelog, node):
654 for rev, copied in filerevgen(filelog, node):
653 if rev <= maxrev:
655 if rev <= maxrev:
654 if rev < minrev:
656 if rev < minrev:
655 break
657 break
656 fncache.setdefault(rev, [])
658 fncache.setdefault(rev, [])
657 fncache[rev].append(file_)
659 fncache[rev].append(file_)
658 wanted[rev] = 1
660 wanted[rev] = 1
659 if follow and copied:
661 if follow and copied:
660 copies.append(copied)
662 copies.append(copied)
661 if slowpath:
663 if slowpath:
662 if follow:
664 if follow:
663 raise util.Abort(_('can only follow copies/renames for explicit '
665 raise util.Abort(_('can only follow copies/renames for explicit '
664 'file names'))
666 'file names'))
665
667
666 # The slow path checks files modified in every changeset.
668 # The slow path checks files modified in every changeset.
667 def changerevgen():
669 def changerevgen():
668 for i, window in increasing_windows(repo.changelog.count()-1,
670 for i, window in increasing_windows(repo.changelog.count()-1,
669 nullrev):
671 nullrev):
670 for j in xrange(i - window, i + 1):
672 for j in xrange(i - window, i + 1):
671 yield j, change(j)[3]
673 yield j, change(j)[3]
672
674
673 for rev, changefiles in changerevgen():
675 for rev, changefiles in changerevgen():
674 matches = filter(matchfn, changefiles)
676 matches = filter(matchfn, changefiles)
675 if matches:
677 if matches:
676 fncache[rev] = matches
678 fncache[rev] = matches
677 wanted[rev] = 1
679 wanted[rev] = 1
678
680
679 class followfilter:
681 class followfilter:
680 def __init__(self, onlyfirst=False):
682 def __init__(self, onlyfirst=False):
681 self.startrev = nullrev
683 self.startrev = nullrev
682 self.roots = []
684 self.roots = []
683 self.onlyfirst = onlyfirst
685 self.onlyfirst = onlyfirst
684
686
685 def match(self, rev):
687 def match(self, rev):
686 def realparents(rev):
688 def realparents(rev):
687 if self.onlyfirst:
689 if self.onlyfirst:
688 return repo.changelog.parentrevs(rev)[0:1]
690 return repo.changelog.parentrevs(rev)[0:1]
689 else:
691 else:
690 return filter(lambda x: x != nullrev,
692 return filter(lambda x: x != nullrev,
691 repo.changelog.parentrevs(rev))
693 repo.changelog.parentrevs(rev))
692
694
693 if self.startrev == nullrev:
695 if self.startrev == nullrev:
694 self.startrev = rev
696 self.startrev = rev
695 return True
697 return True
696
698
697 if rev > self.startrev:
699 if rev > self.startrev:
698 # forward: all descendants
700 # forward: all descendants
699 if not self.roots:
701 if not self.roots:
700 self.roots.append(self.startrev)
702 self.roots.append(self.startrev)
701 for parent in realparents(rev):
703 for parent in realparents(rev):
702 if parent in self.roots:
704 if parent in self.roots:
703 self.roots.append(rev)
705 self.roots.append(rev)
704 return True
706 return True
705 else:
707 else:
706 # backwards: all parents
708 # backwards: all parents
707 if not self.roots:
709 if not self.roots:
708 self.roots.extend(realparents(self.startrev))
710 self.roots.extend(realparents(self.startrev))
709 if rev in self.roots:
711 if rev in self.roots:
710 self.roots.remove(rev)
712 self.roots.remove(rev)
711 self.roots.extend(realparents(rev))
713 self.roots.extend(realparents(rev))
712 return True
714 return True
713
715
714 return False
716 return False
715
717
716 # it might be worthwhile to do this in the iterator if the rev range
718 # it might be worthwhile to do this in the iterator if the rev range
717 # is descending and the prune args are all within that range
719 # is descending and the prune args are all within that range
718 for rev in opts.get('prune', ()):
720 for rev in opts.get('prune', ()):
719 rev = repo.changelog.rev(repo.lookup(rev))
721 rev = repo.changelog.rev(repo.lookup(rev))
720 ff = followfilter()
722 ff = followfilter()
721 stop = min(revs[0], revs[-1])
723 stop = min(revs[0], revs[-1])
722 for x in xrange(rev, stop-1, -1):
724 for x in xrange(rev, stop-1, -1):
723 if ff.match(x) and x in wanted:
725 if ff.match(x) and x in wanted:
724 del wanted[x]
726 del wanted[x]
725
727
726 def iterate():
728 def iterate():
727 if follow and not files:
729 if follow and not files:
728 ff = followfilter(onlyfirst=opts.get('follow_first'))
730 ff = followfilter(onlyfirst=opts.get('follow_first'))
729 def want(rev):
731 def want(rev):
730 if ff.match(rev) and rev in wanted:
732 if ff.match(rev) and rev in wanted:
731 return True
733 return True
732 return False
734 return False
733 else:
735 else:
734 def want(rev):
736 def want(rev):
735 return rev in wanted
737 return rev in wanted
736
738
737 for i, window in increasing_windows(0, len(revs)):
739 for i, window in increasing_windows(0, len(revs)):
738 yield 'window', revs[0] < revs[-1], revs[-1]
740 yield 'window', revs[0] < revs[-1], revs[-1]
739 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
741 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
740 srevs = list(nrevs)
742 srevs = list(nrevs)
741 srevs.sort()
743 srevs.sort()
742 for rev in srevs:
744 for rev in srevs:
743 fns = fncache.get(rev)
745 fns = fncache.get(rev)
744 if not fns:
746 if not fns:
745 def fns_generator():
747 def fns_generator():
746 for f in change(rev)[3]:
748 for f in change(rev)[3]:
747 if matchfn(f):
749 if matchfn(f):
748 yield f
750 yield f
749 fns = fns_generator()
751 fns = fns_generator()
750 yield 'add', rev, fns
752 yield 'add', rev, fns
751 for rev in nrevs:
753 for rev in nrevs:
752 yield 'iter', rev, None
754 yield 'iter', rev, None
753 return iterate(), matchfn
755 return iterate(), matchfn
@@ -1,3296 +1,3307 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import demandimport; demandimport.enable()
8 import demandimport; demandimport.enable()
9 from node import *
9 from node import *
10 from i18n import _
10 from i18n import _
11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
13 import difflib, patch, time, help, mdiff, tempfile
13 import difflib, patch, time, help, mdiff, tempfile
14 import traceback, errno, version, atexit, socket
14 import traceback, errno, version, atexit, socket
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16
16
17 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
18 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
19 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
20 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
21
21
22 def bail_if_changed(repo):
22 def bail_if_changed(repo):
23 modified, added, removed, deleted = repo.status()[:4]
23 modified, added, removed, deleted = repo.status()[:4]
24 if modified or added or removed or deleted:
24 if modified or added or removed or deleted:
25 raise util.Abort(_("outstanding uncommitted changes"))
25 raise util.Abort(_("outstanding uncommitted changes"))
26
26
27 def logmessage(opts):
27 def logmessage(opts):
28 """ get the log message according to -m and -l option """
28 """ get the log message according to -m and -l option """
29 message = opts['message']
29 message = opts['message']
30 logfile = opts['logfile']
30 logfile = opts['logfile']
31
31
32 if message and logfile:
32 if message and logfile:
33 raise util.Abort(_('options --message and --logfile are mutually '
33 raise util.Abort(_('options --message and --logfile are mutually '
34 'exclusive'))
34 'exclusive'))
35 if not message and logfile:
35 if not message and logfile:
36 try:
36 try:
37 if logfile == '-':
37 if logfile == '-':
38 message = sys.stdin.read()
38 message = sys.stdin.read()
39 else:
39 else:
40 message = open(logfile).read()
40 message = open(logfile).read()
41 except IOError, inst:
41 except IOError, inst:
42 raise util.Abort(_("can't read commit message '%s': %s") %
42 raise util.Abort(_("can't read commit message '%s': %s") %
43 (logfile, inst.strerror))
43 (logfile, inst.strerror))
44 return message
44 return message
45
45
46 def setremoteconfig(ui, opts):
46 def setremoteconfig(ui, opts):
47 "copy remote options to ui tree"
47 "copy remote options to ui tree"
48 if opts.get('ssh'):
48 if opts.get('ssh'):
49 ui.setconfig("ui", "ssh", opts['ssh'])
49 ui.setconfig("ui", "ssh", opts['ssh'])
50 if opts.get('remotecmd'):
50 if opts.get('remotecmd'):
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52
52
53 # Commands start here, listed alphabetically
53 # Commands start here, listed alphabetically
54
54
55 def add(ui, repo, *pats, **opts):
55 def add(ui, repo, *pats, **opts):
56 """add the specified files on the next commit
56 """add the specified files on the next commit
57
57
58 Schedule files to be version controlled and added to the repository.
58 Schedule files to be version controlled and added to the repository.
59
59
60 The files will be added to the repository at the next commit. To
60 The files will be added to the repository at the next commit. To
61 undo an add before that, see hg revert.
61 undo an add before that, see hg revert.
62
62
63 If no names are given, add all files in the repository.
63 If no names are given, add all files in the repository.
64 """
64 """
65
65
66 names = []
66 names = []
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 if exact:
68 if exact:
69 if ui.verbose:
69 if ui.verbose:
70 ui.status(_('adding %s\n') % rel)
70 ui.status(_('adding %s\n') % rel)
71 names.append(abs)
71 names.append(abs)
72 elif repo.dirstate.state(abs) == '?':
72 elif repo.dirstate.state(abs) == '?':
73 ui.status(_('adding %s\n') % rel)
73 ui.status(_('adding %s\n') % rel)
74 names.append(abs)
74 names.append(abs)
75 if not opts.get('dry_run'):
75 if not opts.get('dry_run'):
76 repo.add(names)
76 repo.add(names)
77
77
78 def addremove(ui, repo, *pats, **opts):
78 def addremove(ui, repo, *pats, **opts):
79 """add all new files, delete all missing files
79 """add all new files, delete all missing files
80
80
81 Add all new files and remove all missing files from the repository.
81 Add all new files and remove all missing files from the repository.
82
82
83 New files are ignored if they match any of the patterns in .hgignore. As
83 New files are ignored if they match any of the patterns in .hgignore. As
84 with add, these changes take effect at the next commit.
84 with add, these changes take effect at the next commit.
85
85
86 Use the -s option to detect renamed files. With a parameter > 0,
86 Use the -s option to detect renamed files. With a parameter > 0,
87 this compares every removed file with every added file and records
87 this compares every removed file with every added file and records
88 those similar enough as renames. This option takes a percentage
88 those similar enough as renames. This option takes a percentage
89 between 0 (disabled) and 100 (files must be identical) as its
89 between 0 (disabled) and 100 (files must be identical) as its
90 parameter. Detecting renamed files this way can be expensive.
90 parameter. Detecting renamed files this way can be expensive.
91 """
91 """
92 sim = float(opts.get('similarity') or 0)
92 sim = float(opts.get('similarity') or 0)
93 if sim < 0 or sim > 100:
93 if sim < 0 or sim > 100:
94 raise util.Abort(_('similarity must be between 0 and 100'))
94 raise util.Abort(_('similarity must be between 0 and 100'))
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96
96
97 def annotate(ui, repo, *pats, **opts):
97 def annotate(ui, repo, *pats, **opts):
98 """show changeset information per file line
98 """show changeset information per file line
99
99
100 List changes in files, showing the revision id responsible for each line
100 List changes in files, showing the revision id responsible for each line
101
101
102 This command is useful to discover who did a change or when a change took
102 This command is useful to discover who did a change or when a change took
103 place.
103 place.
104
104
105 Without the -a option, annotate will avoid processing files it
105 Without the -a option, annotate will avoid processing files it
106 detects as binary. With -a, annotate will generate an annotation
106 detects as binary. With -a, annotate will generate an annotation
107 anyway, probably with undesirable results.
107 anyway, probably with undesirable results.
108 """
108 """
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110
110
111 if not pats:
111 if not pats:
112 raise util.Abort(_('at least one file name or pattern required'))
112 raise util.Abort(_('at least one file name or pattern required'))
113
113
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 ['number', lambda x: str(x.rev())],
115 ['number', lambda x: str(x.rev())],
116 ['changeset', lambda x: short(x.node())],
116 ['changeset', lambda x: short(x.node())],
117 ['date', getdate], ['follow', lambda x: x.path()]]
117 ['date', getdate], ['follow', lambda x: x.path()]]
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 and not opts['follow']):
119 and not opts['follow']):
120 opts['number'] = 1
120 opts['number'] = 1
121
121
122 ctx = repo.changectx(opts['rev'])
122 ctx = repo.changectx(opts['rev'])
123
123
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 node=ctx.node()):
125 node=ctx.node()):
126 fctx = ctx.filectx(abs)
126 fctx = ctx.filectx(abs)
127 if not opts['text'] and util.binary(fctx.data()):
127 if not opts['text'] and util.binary(fctx.data()):
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 continue
129 continue
130
130
131 lines = fctx.annotate(follow=opts.get('follow'))
131 lines = fctx.annotate(follow=opts.get('follow'))
132 pieces = []
132 pieces = []
133
133
134 for o, f in opmap:
134 for o, f in opmap:
135 if opts[o]:
135 if opts[o]:
136 l = [f(n) for n, dummy in lines]
136 l = [f(n) for n, dummy in lines]
137 if l:
137 if l:
138 m = max(map(len, l))
138 m = max(map(len, l))
139 pieces.append(["%*s" % (m, x) for x in l])
139 pieces.append(["%*s" % (m, x) for x in l])
140
140
141 if pieces:
141 if pieces:
142 for p, l in zip(zip(*pieces), lines):
142 for p, l in zip(zip(*pieces), lines):
143 ui.write("%s: %s" % (" ".join(p), l[1]))
143 ui.write("%s: %s" % (" ".join(p), l[1]))
144
144
145 def archive(ui, repo, dest, **opts):
145 def archive(ui, repo, dest, **opts):
146 '''create unversioned archive of a repository revision
146 '''create unversioned archive of a repository revision
147
147
148 By default, the revision used is the parent of the working
148 By default, the revision used is the parent of the working
149 directory; use "-r" to specify a different revision.
149 directory; use "-r" to specify a different revision.
150
150
151 To specify the type of archive to create, use "-t". Valid
151 To specify the type of archive to create, use "-t". Valid
152 types are:
152 types are:
153
153
154 "files" (default): a directory full of files
154 "files" (default): a directory full of files
155 "tar": tar archive, uncompressed
155 "tar": tar archive, uncompressed
156 "tbz2": tar archive, compressed using bzip2
156 "tbz2": tar archive, compressed using bzip2
157 "tgz": tar archive, compressed using gzip
157 "tgz": tar archive, compressed using gzip
158 "uzip": zip archive, uncompressed
158 "uzip": zip archive, uncompressed
159 "zip": zip archive, compressed using deflate
159 "zip": zip archive, compressed using deflate
160
160
161 The exact name of the destination archive or directory is given
161 The exact name of the destination archive or directory is given
162 using a format string; see "hg help export" for details.
162 using a format string; see "hg help export" for details.
163
163
164 Each member added to an archive file has a directory prefix
164 Each member added to an archive file has a directory prefix
165 prepended. Use "-p" to specify a format string for the prefix.
165 prepended. Use "-p" to specify a format string for the prefix.
166 The default is the basename of the archive, with suffixes removed.
166 The default is the basename of the archive, with suffixes removed.
167 '''
167 '''
168
168
169 node = repo.changectx(opts['rev']).node()
169 node = repo.changectx(opts['rev']).node()
170 dest = cmdutil.make_filename(repo, dest, node)
170 dest = cmdutil.make_filename(repo, dest, node)
171 if os.path.realpath(dest) == repo.root:
171 if os.path.realpath(dest) == repo.root:
172 raise util.Abort(_('repository root cannot be destination'))
172 raise util.Abort(_('repository root cannot be destination'))
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 kind = opts.get('type') or 'files'
174 kind = opts.get('type') or 'files'
175 prefix = opts['prefix']
175 prefix = opts['prefix']
176 if dest == '-':
176 if dest == '-':
177 if kind == 'files':
177 if kind == 'files':
178 raise util.Abort(_('cannot archive plain files to stdout'))
178 raise util.Abort(_('cannot archive plain files to stdout'))
179 dest = sys.stdout
179 dest = sys.stdout
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 prefix = cmdutil.make_filename(repo, prefix, node)
181 prefix = cmdutil.make_filename(repo, prefix, node)
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 matchfn, prefix)
183 matchfn, prefix)
184
184
185 def backout(ui, repo, rev, **opts):
185 def backout(ui, repo, rev, **opts):
186 '''reverse effect of earlier changeset
186 '''reverse effect of earlier changeset
187
187
188 Commit the backed out changes as a new changeset. The new
188 Commit the backed out changes as a new changeset. The new
189 changeset is a child of the backed out changeset.
189 changeset is a child of the backed out changeset.
190
190
191 If you back out a changeset other than the tip, a new head is
191 If you back out a changeset other than the tip, a new head is
192 created. This head is the parent of the working directory. If
192 created. This head is the parent of the working directory. If
193 you back out an old changeset, your working directory will appear
193 you back out an old changeset, your working directory will appear
194 old after the backout. You should merge the backout changeset
194 old after the backout. You should merge the backout changeset
195 with another head.
195 with another head.
196
196
197 The --merge option remembers the parent of the working directory
197 The --merge option remembers the parent of the working directory
198 before starting the backout, then merges the new head with that
198 before starting the backout, then merges the new head with that
199 changeset afterwards. This saves you from doing the merge by
199 changeset afterwards. This saves you from doing the merge by
200 hand. The result of this merge is not committed, as for a normal
200 hand. The result of this merge is not committed, as for a normal
201 merge.'''
201 merge.'''
202
202
203 bail_if_changed(repo)
203 bail_if_changed(repo)
204 op1, op2 = repo.dirstate.parents()
204 op1, op2 = repo.dirstate.parents()
205 if op2 != nullid:
205 if op2 != nullid:
206 raise util.Abort(_('outstanding uncommitted merge'))
206 raise util.Abort(_('outstanding uncommitted merge'))
207 node = repo.lookup(rev)
207 node = repo.lookup(rev)
208 p1, p2 = repo.changelog.parents(node)
208 p1, p2 = repo.changelog.parents(node)
209 if p1 == nullid:
209 if p1 == nullid:
210 raise util.Abort(_('cannot back out a change with no parents'))
210 raise util.Abort(_('cannot back out a change with no parents'))
211 if p2 != nullid:
211 if p2 != nullid:
212 if not opts['parent']:
212 if not opts['parent']:
213 raise util.Abort(_('cannot back out a merge changeset without '
213 raise util.Abort(_('cannot back out a merge changeset without '
214 '--parent'))
214 '--parent'))
215 p = repo.lookup(opts['parent'])
215 p = repo.lookup(opts['parent'])
216 if p not in (p1, p2):
216 if p not in (p1, p2):
217 raise util.Abort(_('%s is not a parent of %s') %
217 raise util.Abort(_('%s is not a parent of %s') %
218 (short(p), short(node)))
218 (short(p), short(node)))
219 parent = p
219 parent = p
220 else:
220 else:
221 if opts['parent']:
221 if opts['parent']:
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
223 parent = p1
223 parent = p1
224 hg.clean(repo, node, show_stats=False)
224 hg.clean(repo, node, show_stats=False)
225 revert_opts = opts.copy()
225 revert_opts = opts.copy()
226 revert_opts['date'] = None
226 revert_opts['date'] = None
227 revert_opts['all'] = True
227 revert_opts['all'] = True
228 revert_opts['rev'] = hex(parent)
228 revert_opts['rev'] = hex(parent)
229 revert(ui, repo, **revert_opts)
229 revert(ui, repo, **revert_opts)
230 commit_opts = opts.copy()
230 commit_opts = opts.copy()
231 commit_opts['addremove'] = False
231 commit_opts['addremove'] = False
232 if not commit_opts['message'] and not commit_opts['logfile']:
232 if not commit_opts['message'] and not commit_opts['logfile']:
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
234 commit_opts['force_editor'] = True
234 commit_opts['force_editor'] = True
235 commit(ui, repo, **commit_opts)
235 commit(ui, repo, **commit_opts)
236 def nice(node):
236 def nice(node):
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
238 ui.status(_('changeset %s backs out changeset %s\n') %
238 ui.status(_('changeset %s backs out changeset %s\n') %
239 (nice(repo.changelog.tip()), nice(node)))
239 (nice(repo.changelog.tip()), nice(node)))
240 if op1 != node:
240 if op1 != node:
241 if opts['merge']:
241 if opts['merge']:
242 ui.status(_('merging with changeset %s\n') % nice(op1))
242 ui.status(_('merging with changeset %s\n') % nice(op1))
243 hg.merge(repo, hex(op1))
243 hg.merge(repo, hex(op1))
244 else:
244 else:
245 ui.status(_('the backout changeset is a new head - '
245 ui.status(_('the backout changeset is a new head - '
246 'do not forget to merge\n'))
246 'do not forget to merge\n'))
247 ui.status(_('(use "backout --merge" '
247 ui.status(_('(use "backout --merge" '
248 'if you want to auto-merge)\n'))
248 'if you want to auto-merge)\n'))
249
249
250 def branch(ui, repo, label=None):
250 def branch(ui, repo, label=None):
251 """set or show the current branch name
251 """set or show the current branch name
252
252
253 With <name>, set the current branch name. Otherwise, show the
253 With <name>, set the current branch name. Otherwise, show the
254 current branch name.
254 current branch name.
255 """
255 """
256
256
257 if label is not None:
257 if label is not None:
258 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
258 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
259 else:
259 else:
260 b = util.tolocal(repo.workingctx().branch())
260 b = util.tolocal(repo.workingctx().branch())
261 if b:
261 if b:
262 ui.write("%s\n" % b)
262 ui.write("%s\n" % b)
263
263
264 def branches(ui, repo):
264 def branches(ui, repo):
265 """list repository named branches
265 """list repository named branches
266
266
267 List the repository's named branches.
267 List the repository's named branches.
268 """
268 """
269 b = repo.branchtags()
269 b = repo.branchtags()
270 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
270 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
271 l.sort()
271 l.sort()
272 for r, n, t in l:
272 for r, n, t in l:
273 hexfunc = ui.debugflag and hex or short
273 hexfunc = ui.debugflag and hex or short
274 if ui.quiet:
274 if ui.quiet:
275 ui.write("%s\n" % t)
275 ui.write("%s\n" % t)
276 else:
276 else:
277 t = util.localsub(t, 30)
277 t = util.localsub(t, 30)
278 t += " " * (30 - util.locallen(t))
278 t += " " * (30 - util.locallen(t))
279 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
279 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
280
280
281 def bundle(ui, repo, fname, dest=None, **opts):
281 def bundle(ui, repo, fname, dest=None, **opts):
282 """create a changegroup file
282 """create a changegroup file
283
283
284 Generate a compressed changegroup file collecting changesets not
284 Generate a compressed changegroup file collecting changesets not
285 found in the other repository.
285 found in the other repository.
286
286
287 If no destination repository is specified the destination is assumed
287 If no destination repository is specified the destination is assumed
288 to have all the nodes specified by one or more --base parameters.
288 to have all the nodes specified by one or more --base parameters.
289
289
290 The bundle file can then be transferred using conventional means and
290 The bundle file can then be transferred using conventional means and
291 applied to another repository with the unbundle or pull command.
291 applied to another repository with the unbundle or pull command.
292 This is useful when direct push and pull are not available or when
292 This is useful when direct push and pull are not available or when
293 exporting an entire repository is undesirable.
293 exporting an entire repository is undesirable.
294
294
295 Applying bundles preserves all changeset contents including
295 Applying bundles preserves all changeset contents including
296 permissions, copy/rename information, and revision history.
296 permissions, copy/rename information, and revision history.
297 """
297 """
298 revs = opts.get('rev') or None
298 revs = opts.get('rev') or None
299 if revs:
299 if revs:
300 revs = [repo.lookup(rev) for rev in revs]
300 revs = [repo.lookup(rev) for rev in revs]
301 base = opts.get('base')
301 base = opts.get('base')
302 if base:
302 if base:
303 if dest:
303 if dest:
304 raise util.Abort(_("--base is incompatible with specifiying "
304 raise util.Abort(_("--base is incompatible with specifiying "
305 "a destination"))
305 "a destination"))
306 base = [repo.lookup(rev) for rev in base]
306 base = [repo.lookup(rev) for rev in base]
307 # create the right base
307 # create the right base
308 # XXX: nodesbetween / changegroup* should be "fixed" instead
308 # XXX: nodesbetween / changegroup* should be "fixed" instead
309 o = []
309 o = []
310 has = {nullid: None}
310 has = {nullid: None}
311 for n in base:
311 for n in base:
312 has.update(repo.changelog.reachable(n))
312 has.update(repo.changelog.reachable(n))
313 if revs:
313 if revs:
314 visit = list(revs)
314 visit = list(revs)
315 else:
315 else:
316 visit = repo.changelog.heads()
316 visit = repo.changelog.heads()
317 seen = {}
317 seen = {}
318 while visit:
318 while visit:
319 n = visit.pop(0)
319 n = visit.pop(0)
320 parents = [p for p in repo.changelog.parents(n) if p not in has]
320 parents = [p for p in repo.changelog.parents(n) if p not in has]
321 if len(parents) == 0:
321 if len(parents) == 0:
322 o.insert(0, n)
322 o.insert(0, n)
323 else:
323 else:
324 for p in parents:
324 for p in parents:
325 if p not in seen:
325 if p not in seen:
326 seen[p] = 1
326 seen[p] = 1
327 visit.append(p)
327 visit.append(p)
328 else:
328 else:
329 setremoteconfig(ui, opts)
329 setremoteconfig(ui, opts)
330 dest = ui.expandpath(dest or 'default-push', dest or 'default')
330 dest = ui.expandpath(dest or 'default-push', dest or 'default')
331 other = hg.repository(ui, dest)
331 other = hg.repository(ui, dest)
332 o = repo.findoutgoing(other, force=opts['force'])
332 o = repo.findoutgoing(other, force=opts['force'])
333
333
334 if revs:
334 if revs:
335 cg = repo.changegroupsubset(o, revs, 'bundle')
335 cg = repo.changegroupsubset(o, revs, 'bundle')
336 else:
336 else:
337 cg = repo.changegroup(o, 'bundle')
337 cg = repo.changegroup(o, 'bundle')
338 changegroup.writebundle(cg, fname, "HG10BZ")
338 changegroup.writebundle(cg, fname, "HG10BZ")
339
339
340 def cat(ui, repo, file1, *pats, **opts):
340 def cat(ui, repo, file1, *pats, **opts):
341 """output the current or given revision of files
341 """output the current or given revision of files
342
342
343 Print the specified files as they were at the given revision.
343 Print the specified files as they were at the given revision.
344 If no revision is given, the parent of the working directory is used,
344 If no revision is given, the parent of the working directory is used,
345 or tip if no revision is checked out.
345 or tip if no revision is checked out.
346
346
347 Output may be to a file, in which case the name of the file is
347 Output may be to a file, in which case the name of the file is
348 given using a format string. The formatting rules are the same as
348 given using a format string. The formatting rules are the same as
349 for the export command, with the following additions:
349 for the export command, with the following additions:
350
350
351 %s basename of file being printed
351 %s basename of file being printed
352 %d dirname of file being printed, or '.' if in repo root
352 %d dirname of file being printed, or '.' if in repo root
353 %p root-relative path name of file being printed
353 %p root-relative path name of file being printed
354 """
354 """
355 ctx = repo.changectx(opts['rev'])
355 ctx = repo.changectx(opts['rev'])
356 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
356 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
357 ctx.node()):
357 ctx.node()):
358 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
358 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
359 fp.write(ctx.filectx(abs).data())
359 fp.write(ctx.filectx(abs).data())
360
360
361 def clone(ui, source, dest=None, **opts):
361 def clone(ui, source, dest=None, **opts):
362 """make a copy of an existing repository
362 """make a copy of an existing repository
363
363
364 Create a copy of an existing repository in a new directory.
364 Create a copy of an existing repository in a new directory.
365
365
366 If no destination directory name is specified, it defaults to the
366 If no destination directory name is specified, it defaults to the
367 basename of the source.
367 basename of the source.
368
368
369 The location of the source is added to the new repository's
369 The location of the source is added to the new repository's
370 .hg/hgrc file, as the default to be used for future pulls.
370 .hg/hgrc file, as the default to be used for future pulls.
371
371
372 For efficiency, hardlinks are used for cloning whenever the source
372 For efficiency, hardlinks are used for cloning whenever the source
373 and destination are on the same filesystem (note this applies only
373 and destination are on the same filesystem (note this applies only
374 to the repository data, not to the checked out files). Some
374 to the repository data, not to the checked out files). Some
375 filesystems, such as AFS, implement hardlinking incorrectly, but
375 filesystems, such as AFS, implement hardlinking incorrectly, but
376 do not report errors. In these cases, use the --pull option to
376 do not report errors. In these cases, use the --pull option to
377 avoid hardlinking.
377 avoid hardlinking.
378
378
379 You can safely clone repositories and checked out files using full
379 You can safely clone repositories and checked out files using full
380 hardlinks with
380 hardlinks with
381
381
382 $ cp -al REPO REPOCLONE
382 $ cp -al REPO REPOCLONE
383
383
384 which is the fastest way to clone. However, the operation is not
384 which is the fastest way to clone. However, the operation is not
385 atomic (making sure REPO is not modified during the operation is
385 atomic (making sure REPO is not modified during the operation is
386 up to you) and you have to make sure your editor breaks hardlinks
386 up to you) and you have to make sure your editor breaks hardlinks
387 (Emacs and most Linux Kernel tools do so).
387 (Emacs and most Linux Kernel tools do so).
388
388
389 If you use the -r option to clone up to a specific revision, no
389 If you use the -r option to clone up to a specific revision, no
390 subsequent revisions will be present in the cloned repository.
390 subsequent revisions will be present in the cloned repository.
391 This option implies --pull, even on local repositories.
391 This option implies --pull, even on local repositories.
392
392
393 See pull for valid source format details.
393 See pull for valid source format details.
394
394
395 It is possible to specify an ssh:// URL as the destination, but no
395 It is possible to specify an ssh:// URL as the destination, but no
396 .hg/hgrc and working directory will be created on the remote side.
396 .hg/hgrc and working directory will be created on the remote side.
397 Look at the help text for the pull command for important details
397 Look at the help text for the pull command for important details
398 about ssh:// URLs.
398 about ssh:// URLs.
399 """
399 """
400 setremoteconfig(ui, opts)
400 setremoteconfig(ui, opts)
401 hg.clone(ui, ui.expandpath(source), dest,
401 hg.clone(ui, ui.expandpath(source), dest,
402 pull=opts['pull'],
402 pull=opts['pull'],
403 stream=opts['uncompressed'],
403 stream=opts['uncompressed'],
404 rev=opts['rev'],
404 rev=opts['rev'],
405 update=not opts['noupdate'])
405 update=not opts['noupdate'])
406
406
407 def commit(ui, repo, *pats, **opts):
407 def commit(ui, repo, *pats, **opts):
408 """commit the specified files or all outstanding changes
408 """commit the specified files or all outstanding changes
409
409
410 Commit changes to the given files into the repository.
410 Commit changes to the given files into the repository.
411
411
412 If a list of files is omitted, all changes reported by "hg status"
412 If a list of files is omitted, all changes reported by "hg status"
413 will be committed.
413 will be committed.
414
414
415 If no commit message is specified, the editor configured in your hgrc
415 If no commit message is specified, the editor configured in your hgrc
416 or in the EDITOR environment variable is started to enter a message.
416 or in the EDITOR environment variable is started to enter a message.
417 """
417 """
418 message = logmessage(opts)
418 message = logmessage(opts)
419
419
420 if opts['addremove']:
420 if opts['addremove']:
421 cmdutil.addremove(repo, pats, opts)
421 cmdutil.addremove(repo, pats, opts)
422 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
422 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
423 if pats:
423 if pats:
424 status = repo.status(files=fns, match=match)
424 status = repo.status(files=fns, match=match)
425 modified, added, removed, deleted, unknown = status[:5]
425 modified, added, removed, deleted, unknown = status[:5]
426 files = modified + added + removed
426 files = modified + added + removed
427 slist = None
427 slist = None
428 for f in fns:
428 for f in fns:
429 if f not in files:
429 if f not in files:
430 rf = repo.wjoin(f)
430 rf = repo.wjoin(f)
431 if f in unknown:
431 if f in unknown:
432 raise util.Abort(_("file %s not tracked!") % rf)
432 raise util.Abort(_("file %s not tracked!") % rf)
433 try:
433 try:
434 mode = os.lstat(rf)[stat.ST_MODE]
434 mode = os.lstat(rf)[stat.ST_MODE]
435 except OSError:
435 except OSError:
436 raise util.Abort(_("file %s not found!") % rf)
436 raise util.Abort(_("file %s not found!") % rf)
437 if stat.S_ISDIR(mode):
437 if stat.S_ISDIR(mode):
438 name = f + '/'
438 name = f + '/'
439 if slist is None:
439 if slist is None:
440 slist = list(files)
440 slist = list(files)
441 slist.sort()
441 slist.sort()
442 i = bisect.bisect(slist, name)
442 i = bisect.bisect(slist, name)
443 if i >= len(slist) or not slist[i].startswith(name):
443 if i >= len(slist) or not slist[i].startswith(name):
444 raise util.Abort(_("no match under directory %s!")
444 raise util.Abort(_("no match under directory %s!")
445 % rf)
445 % rf)
446 elif not stat.S_ISREG(mode):
446 elif not stat.S_ISREG(mode):
447 raise util.Abort(_("can't commit %s: "
447 raise util.Abort(_("can't commit %s: "
448 "unsupported file type!") % rf)
448 "unsupported file type!") % rf)
449 else:
449 else:
450 files = []
450 files = []
451 try:
451 try:
452 repo.commit(files, message, opts['user'], opts['date'], match,
452 repo.commit(files, message, opts['user'], opts['date'], match,
453 force_editor=opts.get('force_editor'))
453 force_editor=opts.get('force_editor'))
454 except ValueError, inst:
454 except ValueError, inst:
455 raise util.Abort(str(inst))
455 raise util.Abort(str(inst))
456
456
457 def docopy(ui, repo, pats, opts, wlock):
457 def docopy(ui, repo, pats, opts, wlock):
458 # called with the repo lock held
458 # called with the repo lock held
459 #
459 #
460 # hgsep => pathname that uses "/" to separate directories
460 # hgsep => pathname that uses "/" to separate directories
461 # ossep => pathname that uses os.sep to separate directories
461 # ossep => pathname that uses os.sep to separate directories
462 cwd = repo.getcwd()
462 cwd = repo.getcwd()
463 errors = 0
463 errors = 0
464 copied = []
464 copied = []
465 targets = {}
465 targets = {}
466
466
467 # abs: hgsep
467 # abs: hgsep
468 # rel: ossep
468 # rel: ossep
469 # return: hgsep
469 # return: hgsep
470 def okaytocopy(abs, rel, exact):
470 def okaytocopy(abs, rel, exact):
471 reasons = {'?': _('is not managed'),
471 reasons = {'?': _('is not managed'),
472 'a': _('has been marked for add'),
472 'a': _('has been marked for add'),
473 'r': _('has been marked for remove')}
473 'r': _('has been marked for remove')}
474 state = repo.dirstate.state(abs)
474 state = repo.dirstate.state(abs)
475 reason = reasons.get(state)
475 reason = reasons.get(state)
476 if reason:
476 if reason:
477 if state == 'a':
477 if state == 'a':
478 origsrc = repo.dirstate.copied(abs)
478 origsrc = repo.dirstate.copied(abs)
479 if origsrc is not None:
479 if origsrc is not None:
480 return origsrc
480 return origsrc
481 if exact:
481 if exact:
482 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
482 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
483 else:
483 else:
484 return abs
484 return abs
485
485
486 # origsrc: hgsep
486 # origsrc: hgsep
487 # abssrc: hgsep
487 # abssrc: hgsep
488 # relsrc: ossep
488 # relsrc: ossep
489 # target: ossep
489 # target: ossep
490 def copy(origsrc, abssrc, relsrc, target, exact):
490 def copy(origsrc, abssrc, relsrc, target, exact):
491 abstarget = util.canonpath(repo.root, cwd, target)
491 abstarget = util.canonpath(repo.root, cwd, target)
492 reltarget = util.pathto(cwd, abstarget)
492 reltarget = util.pathto(cwd, abstarget)
493 prevsrc = targets.get(abstarget)
493 prevsrc = targets.get(abstarget)
494 if prevsrc is not None:
494 if prevsrc is not None:
495 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
495 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
496 (reltarget, util.localpath(abssrc),
496 (reltarget, util.localpath(abssrc),
497 util.localpath(prevsrc)))
497 util.localpath(prevsrc)))
498 return
498 return
499 if (not opts['after'] and os.path.exists(reltarget) or
499 if (not opts['after'] and os.path.exists(reltarget) or
500 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
500 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
501 if not opts['force']:
501 if not opts['force']:
502 ui.warn(_('%s: not overwriting - file exists\n') %
502 ui.warn(_('%s: not overwriting - file exists\n') %
503 reltarget)
503 reltarget)
504 return
504 return
505 if not opts['after'] and not opts.get('dry_run'):
505 if not opts['after'] and not opts.get('dry_run'):
506 os.unlink(reltarget)
506 os.unlink(reltarget)
507 if opts['after']:
507 if opts['after']:
508 if not os.path.exists(reltarget):
508 if not os.path.exists(reltarget):
509 return
509 return
510 else:
510 else:
511 targetdir = os.path.dirname(reltarget) or '.'
511 targetdir = os.path.dirname(reltarget) or '.'
512 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
512 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
513 os.makedirs(targetdir)
513 os.makedirs(targetdir)
514 try:
514 try:
515 restore = repo.dirstate.state(abstarget) == 'r'
515 restore = repo.dirstate.state(abstarget) == 'r'
516 if restore and not opts.get('dry_run'):
516 if restore and not opts.get('dry_run'):
517 repo.undelete([abstarget], wlock)
517 repo.undelete([abstarget], wlock)
518 try:
518 try:
519 if not opts.get('dry_run'):
519 if not opts.get('dry_run'):
520 util.copyfile(relsrc, reltarget)
520 util.copyfile(relsrc, reltarget)
521 restore = False
521 restore = False
522 finally:
522 finally:
523 if restore:
523 if restore:
524 repo.remove([abstarget], wlock)
524 repo.remove([abstarget], wlock)
525 except IOError, inst:
525 except IOError, inst:
526 if inst.errno == errno.ENOENT:
526 if inst.errno == errno.ENOENT:
527 ui.warn(_('%s: deleted in working copy\n') % relsrc)
527 ui.warn(_('%s: deleted in working copy\n') % relsrc)
528 else:
528 else:
529 ui.warn(_('%s: cannot copy - %s\n') %
529 ui.warn(_('%s: cannot copy - %s\n') %
530 (relsrc, inst.strerror))
530 (relsrc, inst.strerror))
531 errors += 1
531 errors += 1
532 return
532 return
533 if ui.verbose or not exact:
533 if ui.verbose or not exact:
534 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
534 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
535 targets[abstarget] = abssrc
535 targets[abstarget] = abssrc
536 if abstarget != origsrc and not opts.get('dry_run'):
536 if abstarget != origsrc and not opts.get('dry_run'):
537 repo.copy(origsrc, abstarget, wlock)
537 repo.copy(origsrc, abstarget, wlock)
538 copied.append((abssrc, relsrc, exact))
538 copied.append((abssrc, relsrc, exact))
539
539
540 # pat: ossep
540 # pat: ossep
541 # dest ossep
541 # dest ossep
542 # srcs: list of (hgsep, hgsep, ossep, bool)
542 # srcs: list of (hgsep, hgsep, ossep, bool)
543 # return: function that takes hgsep and returns ossep
543 # return: function that takes hgsep and returns ossep
544 def targetpathfn(pat, dest, srcs):
544 def targetpathfn(pat, dest, srcs):
545 if os.path.isdir(pat):
545 if os.path.isdir(pat):
546 abspfx = util.canonpath(repo.root, cwd, pat)
546 abspfx = util.canonpath(repo.root, cwd, pat)
547 abspfx = util.localpath(abspfx)
547 abspfx = util.localpath(abspfx)
548 if destdirexists:
548 if destdirexists:
549 striplen = len(os.path.split(abspfx)[0])
549 striplen = len(os.path.split(abspfx)[0])
550 else:
550 else:
551 striplen = len(abspfx)
551 striplen = len(abspfx)
552 if striplen:
552 if striplen:
553 striplen += len(os.sep)
553 striplen += len(os.sep)
554 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
554 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
555 elif destdirexists:
555 elif destdirexists:
556 res = lambda p: os.path.join(dest,
556 res = lambda p: os.path.join(dest,
557 os.path.basename(util.localpath(p)))
557 os.path.basename(util.localpath(p)))
558 else:
558 else:
559 res = lambda p: dest
559 res = lambda p: dest
560 return res
560 return res
561
561
562 # pat: ossep
562 # pat: ossep
563 # dest ossep
563 # dest ossep
564 # srcs: list of (hgsep, hgsep, ossep, bool)
564 # srcs: list of (hgsep, hgsep, ossep, bool)
565 # return: function that takes hgsep and returns ossep
565 # return: function that takes hgsep and returns ossep
566 def targetpathafterfn(pat, dest, srcs):
566 def targetpathafterfn(pat, dest, srcs):
567 if util.patkind(pat, None)[0]:
567 if util.patkind(pat, None)[0]:
568 # a mercurial pattern
568 # a mercurial pattern
569 res = lambda p: os.path.join(dest,
569 res = lambda p: os.path.join(dest,
570 os.path.basename(util.localpath(p)))
570 os.path.basename(util.localpath(p)))
571 else:
571 else:
572 abspfx = util.canonpath(repo.root, cwd, pat)
572 abspfx = util.canonpath(repo.root, cwd, pat)
573 if len(abspfx) < len(srcs[0][0]):
573 if len(abspfx) < len(srcs[0][0]):
574 # A directory. Either the target path contains the last
574 # A directory. Either the target path contains the last
575 # component of the source path or it does not.
575 # component of the source path or it does not.
576 def evalpath(striplen):
576 def evalpath(striplen):
577 score = 0
577 score = 0
578 for s in srcs:
578 for s in srcs:
579 t = os.path.join(dest, util.localpath(s[0])[striplen:])
579 t = os.path.join(dest, util.localpath(s[0])[striplen:])
580 if os.path.exists(t):
580 if os.path.exists(t):
581 score += 1
581 score += 1
582 return score
582 return score
583
583
584 abspfx = util.localpath(abspfx)
584 abspfx = util.localpath(abspfx)
585 striplen = len(abspfx)
585 striplen = len(abspfx)
586 if striplen:
586 if striplen:
587 striplen += len(os.sep)
587 striplen += len(os.sep)
588 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
588 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
589 score = evalpath(striplen)
589 score = evalpath(striplen)
590 striplen1 = len(os.path.split(abspfx)[0])
590 striplen1 = len(os.path.split(abspfx)[0])
591 if striplen1:
591 if striplen1:
592 striplen1 += len(os.sep)
592 striplen1 += len(os.sep)
593 if evalpath(striplen1) > score:
593 if evalpath(striplen1) > score:
594 striplen = striplen1
594 striplen = striplen1
595 res = lambda p: os.path.join(dest,
595 res = lambda p: os.path.join(dest,
596 util.localpath(p)[striplen:])
596 util.localpath(p)[striplen:])
597 else:
597 else:
598 # a file
598 # a file
599 if destdirexists:
599 if destdirexists:
600 res = lambda p: os.path.join(dest,
600 res = lambda p: os.path.join(dest,
601 os.path.basename(util.localpath(p)))
601 os.path.basename(util.localpath(p)))
602 else:
602 else:
603 res = lambda p: dest
603 res = lambda p: dest
604 return res
604 return res
605
605
606
606
607 pats = list(pats)
607 pats = util.expand_glob(pats)
608 if not pats:
608 if not pats:
609 raise util.Abort(_('no source or destination specified'))
609 raise util.Abort(_('no source or destination specified'))
610 if len(pats) == 1:
610 if len(pats) == 1:
611 raise util.Abort(_('no destination specified'))
611 raise util.Abort(_('no destination specified'))
612 dest = pats.pop()
612 dest = pats.pop()
613 destdirexists = os.path.isdir(dest)
613 destdirexists = os.path.isdir(dest)
614 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
614 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
615 raise util.Abort(_('with multiple sources, destination must be an '
615 raise util.Abort(_('with multiple sources, destination must be an '
616 'existing directory'))
616 'existing directory'))
617 if opts['after']:
617 if opts['after']:
618 tfn = targetpathafterfn
618 tfn = targetpathafterfn
619 else:
619 else:
620 tfn = targetpathfn
620 tfn = targetpathfn
621 copylist = []
621 copylist = []
622 for pat in pats:
622 for pat in pats:
623 srcs = []
623 srcs = []
624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
625 globbed=True):
625 origsrc = okaytocopy(abssrc, relsrc, exact)
626 origsrc = okaytocopy(abssrc, relsrc, exact)
626 if origsrc:
627 if origsrc:
627 srcs.append((origsrc, abssrc, relsrc, exact))
628 srcs.append((origsrc, abssrc, relsrc, exact))
628 if not srcs:
629 if not srcs:
629 continue
630 continue
630 copylist.append((tfn(pat, dest, srcs), srcs))
631 copylist.append((tfn(pat, dest, srcs), srcs))
631 if not copylist:
632 if not copylist:
632 raise util.Abort(_('no files to copy'))
633 raise util.Abort(_('no files to copy'))
633
634
634 for targetpath, srcs in copylist:
635 for targetpath, srcs in copylist:
635 for origsrc, abssrc, relsrc, exact in srcs:
636 for origsrc, abssrc, relsrc, exact in srcs:
636 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
637 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
637
638
638 if errors:
639 if errors:
639 ui.warn(_('(consider using --after)\n'))
640 ui.warn(_('(consider using --after)\n'))
640 return errors, copied
641 return errors, copied
641
642
642 def copy(ui, repo, *pats, **opts):
643 def copy(ui, repo, *pats, **opts):
643 """mark files as copied for the next commit
644 """mark files as copied for the next commit
644
645
645 Mark dest as having copies of source files. If dest is a
646 Mark dest as having copies of source files. If dest is a
646 directory, copies are put in that directory. If dest is a file,
647 directory, copies are put in that directory. If dest is a file,
647 there can only be one source.
648 there can only be one source.
648
649
649 By default, this command copies the contents of files as they
650 By default, this command copies the contents of files as they
650 stand in the working directory. If invoked with --after, the
651 stand in the working directory. If invoked with --after, the
651 operation is recorded, but no copying is performed.
652 operation is recorded, but no copying is performed.
652
653
653 This command takes effect in the next commit. To undo a copy
654 This command takes effect in the next commit. To undo a copy
654 before that, see hg revert.
655 before that, see hg revert.
655 """
656 """
656 wlock = repo.wlock(0)
657 wlock = repo.wlock(0)
657 errs, copied = docopy(ui, repo, pats, opts, wlock)
658 errs, copied = docopy(ui, repo, pats, opts, wlock)
658 return errs
659 return errs
659
660
660 def debugancestor(ui, index, rev1, rev2):
661 def debugancestor(ui, index, rev1, rev2):
661 """find the ancestor revision of two revisions in a given index"""
662 """find the ancestor revision of two revisions in a given index"""
662 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
663 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
663 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
664 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
664 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
665 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
665
666
666 def debugcomplete(ui, cmd='', **opts):
667 def debugcomplete(ui, cmd='', **opts):
667 """returns the completion list associated with the given command"""
668 """returns the completion list associated with the given command"""
668
669
669 if opts['options']:
670 if opts['options']:
670 options = []
671 options = []
671 otables = [globalopts]
672 otables = [globalopts]
672 if cmd:
673 if cmd:
673 aliases, entry = findcmd(ui, cmd)
674 aliases, entry = findcmd(ui, cmd)
674 otables.append(entry[1])
675 otables.append(entry[1])
675 for t in otables:
676 for t in otables:
676 for o in t:
677 for o in t:
677 if o[0]:
678 if o[0]:
678 options.append('-%s' % o[0])
679 options.append('-%s' % o[0])
679 options.append('--%s' % o[1])
680 options.append('--%s' % o[1])
680 ui.write("%s\n" % "\n".join(options))
681 ui.write("%s\n" % "\n".join(options))
681 return
682 return
682
683
683 clist = findpossible(ui, cmd).keys()
684 clist = findpossible(ui, cmd).keys()
684 clist.sort()
685 clist.sort()
685 ui.write("%s\n" % "\n".join(clist))
686 ui.write("%s\n" % "\n".join(clist))
686
687
687 def debugrebuildstate(ui, repo, rev=""):
688 def debugrebuildstate(ui, repo, rev=""):
688 """rebuild the dirstate as it would look like for the given revision"""
689 """rebuild the dirstate as it would look like for the given revision"""
689 if rev == "":
690 if rev == "":
690 rev = repo.changelog.tip()
691 rev = repo.changelog.tip()
691 ctx = repo.changectx(rev)
692 ctx = repo.changectx(rev)
692 files = ctx.manifest()
693 files = ctx.manifest()
693 wlock = repo.wlock()
694 wlock = repo.wlock()
694 repo.dirstate.rebuild(rev, files)
695 repo.dirstate.rebuild(rev, files)
695
696
696 def debugcheckstate(ui, repo):
697 def debugcheckstate(ui, repo):
697 """validate the correctness of the current dirstate"""
698 """validate the correctness of the current dirstate"""
698 parent1, parent2 = repo.dirstate.parents()
699 parent1, parent2 = repo.dirstate.parents()
699 repo.dirstate.read()
700 repo.dirstate.read()
700 dc = repo.dirstate.map
701 dc = repo.dirstate.map
701 keys = dc.keys()
702 keys = dc.keys()
702 keys.sort()
703 keys.sort()
703 m1 = repo.changectx(parent1).manifest()
704 m1 = repo.changectx(parent1).manifest()
704 m2 = repo.changectx(parent2).manifest()
705 m2 = repo.changectx(parent2).manifest()
705 errors = 0
706 errors = 0
706 for f in dc:
707 for f in dc:
707 state = repo.dirstate.state(f)
708 state = repo.dirstate.state(f)
708 if state in "nr" and f not in m1:
709 if state in "nr" and f not in m1:
709 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
710 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
710 errors += 1
711 errors += 1
711 if state in "a" and f in m1:
712 if state in "a" and f in m1:
712 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
713 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
713 errors += 1
714 errors += 1
714 if state in "m" and f not in m1 and f not in m2:
715 if state in "m" and f not in m1 and f not in m2:
715 ui.warn(_("%s in state %s, but not in either manifest\n") %
716 ui.warn(_("%s in state %s, but not in either manifest\n") %
716 (f, state))
717 (f, state))
717 errors += 1
718 errors += 1
718 for f in m1:
719 for f in m1:
719 state = repo.dirstate.state(f)
720 state = repo.dirstate.state(f)
720 if state not in "nrm":
721 if state not in "nrm":
721 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
722 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
722 errors += 1
723 errors += 1
723 if errors:
724 if errors:
724 error = _(".hg/dirstate inconsistent with current parent's manifest")
725 error = _(".hg/dirstate inconsistent with current parent's manifest")
725 raise util.Abort(error)
726 raise util.Abort(error)
726
727
727 def showconfig(ui, repo, *values, **opts):
728 def showconfig(ui, repo, *values, **opts):
728 """show combined config settings from all hgrc files
729 """show combined config settings from all hgrc files
729
730
730 With no args, print names and values of all config items.
731 With no args, print names and values of all config items.
731
732
732 With one arg of the form section.name, print just the value of
733 With one arg of the form section.name, print just the value of
733 that config item.
734 that config item.
734
735
735 With multiple args, print names and values of all config items
736 With multiple args, print names and values of all config items
736 with matching section names."""
737 with matching section names."""
737
738
738 untrusted = bool(opts.get('untrusted'))
739 untrusted = bool(opts.get('untrusted'))
739 if values:
740 if values:
740 if len([v for v in values if '.' in v]) > 1:
741 if len([v for v in values if '.' in v]) > 1:
741 raise util.Abort(_('only one config item permitted'))
742 raise util.Abort(_('only one config item permitted'))
742 for section, name, value in ui.walkconfig(untrusted=untrusted):
743 for section, name, value in ui.walkconfig(untrusted=untrusted):
743 sectname = section + '.' + name
744 sectname = section + '.' + name
744 if values:
745 if values:
745 for v in values:
746 for v in values:
746 if v == section:
747 if v == section:
747 ui.write('%s=%s\n' % (sectname, value))
748 ui.write('%s=%s\n' % (sectname, value))
748 elif v == sectname:
749 elif v == sectname:
749 ui.write(value, '\n')
750 ui.write(value, '\n')
750 else:
751 else:
751 ui.write('%s=%s\n' % (sectname, value))
752 ui.write('%s=%s\n' % (sectname, value))
752
753
753 def debugsetparents(ui, repo, rev1, rev2=None):
754 def debugsetparents(ui, repo, rev1, rev2=None):
754 """manually set the parents of the current working directory
755 """manually set the parents of the current working directory
755
756
756 This is useful for writing repository conversion tools, but should
757 This is useful for writing repository conversion tools, but should
757 be used with care.
758 be used with care.
758 """
759 """
759
760
760 if not rev2:
761 if not rev2:
761 rev2 = hex(nullid)
762 rev2 = hex(nullid)
762
763
763 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
764 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
764
765
765 def debugstate(ui, repo):
766 def debugstate(ui, repo):
766 """show the contents of the current dirstate"""
767 """show the contents of the current dirstate"""
767 repo.dirstate.read()
768 repo.dirstate.read()
768 dc = repo.dirstate.map
769 dc = repo.dirstate.map
769 keys = dc.keys()
770 keys = dc.keys()
770 keys.sort()
771 keys.sort()
771 for file_ in keys:
772 for file_ in keys:
773 if dc[file_][3] == -1:
774 # Pad or slice to locale representation
775 locale_len = len(time.strftime("%x %X", time.localtime(0)))
776 timestr = 'unset'
777 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
778 else:
779 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
772 ui.write("%c %3o %10d %s %s\n"
780 ui.write("%c %3o %10d %s %s\n"
773 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
781 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
774 time.strftime("%x %X",
782 timestr, file_))
775 time.localtime(dc[file_][3])), file_))
776 for f in repo.dirstate.copies():
783 for f in repo.dirstate.copies():
777 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
784 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
778
785
779 def debugdata(ui, file_, rev):
786 def debugdata(ui, file_, rev):
780 """dump the contents of an data file revision"""
787 """dump the contents of an data file revision"""
781 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
788 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
782 file_[:-2] + ".i", file_, 0)
789 file_[:-2] + ".i", file_, 0)
783 try:
790 try:
784 ui.write(r.revision(r.lookup(rev)))
791 ui.write(r.revision(r.lookup(rev)))
785 except KeyError:
792 except KeyError:
786 raise util.Abort(_('invalid revision identifier %s') % rev)
793 raise util.Abort(_('invalid revision identifier %s') % rev)
787
794
788 def debugdate(ui, date, range=None, **opts):
795 def debugdate(ui, date, range=None, **opts):
789 """parse and display a date"""
796 """parse and display a date"""
790 if opts["extended"]:
797 if opts["extended"]:
791 d = util.parsedate(date, util.extendeddateformats)
798 d = util.parsedate(date, util.extendeddateformats)
792 else:
799 else:
793 d = util.parsedate(date)
800 d = util.parsedate(date)
794 ui.write("internal: %s %s\n" % d)
801 ui.write("internal: %s %s\n" % d)
795 ui.write("standard: %s\n" % util.datestr(d))
802 ui.write("standard: %s\n" % util.datestr(d))
796 if range:
803 if range:
797 m = util.matchdate(range)
804 m = util.matchdate(range)
798 ui.write("match: %s\n" % m(d[0]))
805 ui.write("match: %s\n" % m(d[0]))
799
806
800 def debugindex(ui, file_):
807 def debugindex(ui, file_):
801 """dump the contents of an index file"""
808 """dump the contents of an index file"""
802 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
809 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
803 ui.write(" rev offset length base linkrev" +
810 ui.write(" rev offset length base linkrev" +
804 " nodeid p1 p2\n")
811 " nodeid p1 p2\n")
805 for i in xrange(r.count()):
812 for i in xrange(r.count()):
806 node = r.node(i)
813 node = r.node(i)
807 pp = r.parents(node)
814 pp = r.parents(node)
808 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
815 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
809 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
816 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
810 short(node), short(pp[0]), short(pp[1])))
817 short(node), short(pp[0]), short(pp[1])))
811
818
812 def debugindexdot(ui, file_):
819 def debugindexdot(ui, file_):
813 """dump an index DAG as a .dot file"""
820 """dump an index DAG as a .dot file"""
814 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
821 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
815 ui.write("digraph G {\n")
822 ui.write("digraph G {\n")
816 for i in xrange(r.count()):
823 for i in xrange(r.count()):
817 node = r.node(i)
824 node = r.node(i)
818 pp = r.parents(node)
825 pp = r.parents(node)
819 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
826 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
820 if pp[1] != nullid:
827 if pp[1] != nullid:
821 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
828 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
822 ui.write("}\n")
829 ui.write("}\n")
823
830
824 def debuginstall(ui):
831 def debuginstall(ui):
825 '''test Mercurial installation'''
832 '''test Mercurial installation'''
826
833
827 def writetemp(contents):
834 def writetemp(contents):
828 (fd, name) = tempfile.mkstemp()
835 (fd, name) = tempfile.mkstemp()
829 f = os.fdopen(fd, "wb")
836 f = os.fdopen(fd, "wb")
830 f.write(contents)
837 f.write(contents)
831 f.close()
838 f.close()
832 return name
839 return name
833
840
834 problems = 0
841 problems = 0
835
842
836 # encoding
843 # encoding
837 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
844 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
838 try:
845 try:
839 util.fromlocal("test")
846 util.fromlocal("test")
840 except util.Abort, inst:
847 except util.Abort, inst:
841 ui.write(" %s\n" % inst)
848 ui.write(" %s\n" % inst)
842 ui.write(_(" (check that your locale is properly set)\n"))
849 ui.write(_(" (check that your locale is properly set)\n"))
843 problems += 1
850 problems += 1
844
851
845 # compiled modules
852 # compiled modules
846 ui.status(_("Checking extensions...\n"))
853 ui.status(_("Checking extensions...\n"))
847 try:
854 try:
848 import bdiff, mpatch, base85
855 import bdiff, mpatch, base85
849 except Exception, inst:
856 except Exception, inst:
850 ui.write(" %s\n" % inst)
857 ui.write(" %s\n" % inst)
851 ui.write(_(" One or more extensions could not be found"))
858 ui.write(_(" One or more extensions could not be found"))
852 ui.write(_(" (check that you compiled the extensions)\n"))
859 ui.write(_(" (check that you compiled the extensions)\n"))
853 problems += 1
860 problems += 1
854
861
855 # templates
862 # templates
856 ui.status(_("Checking templates...\n"))
863 ui.status(_("Checking templates...\n"))
857 try:
864 try:
858 import templater
865 import templater
859 t = templater.templater(templater.templatepath("map-cmdline.default"))
866 t = templater.templater(templater.templatepath("map-cmdline.default"))
860 except Exception, inst:
867 except Exception, inst:
861 ui.write(" %s\n" % inst)
868 ui.write(" %s\n" % inst)
862 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
869 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
863 problems += 1
870 problems += 1
864
871
865 # patch
872 # patch
866 ui.status(_("Checking patch...\n"))
873 ui.status(_("Checking patch...\n"))
867 path = os.environ.get('PATH', '')
874 path = os.environ.get('PATH', '')
868 patcher = util.find_in_path('gpatch', path,
875 patcher = util.find_in_path('gpatch', path,
869 util.find_in_path('patch', path, None))
876 util.find_in_path('patch', path, None))
870 if not patcher:
877 if not patcher:
871 ui.write(_(" Can't find patch or gpatch in PATH\n"))
878 ui.write(_(" Can't find patch or gpatch in PATH\n"))
872 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
879 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
873 problems += 1
880 problems += 1
874 else:
881 else:
875 # actually attempt a patch here
882 # actually attempt a patch here
876 a = "1\n2\n3\n4\n"
883 a = "1\n2\n3\n4\n"
877 b = "1\n2\n3\ninsert\n4\n"
884 b = "1\n2\n3\ninsert\n4\n"
878 d = mdiff.unidiff(a, None, b, None, "a")
885 d = mdiff.unidiff(a, None, b, None, "a")
879 fa = writetemp(a)
886 fa = writetemp(a)
880 fd = writetemp(d)
887 fd = writetemp(d)
881 fp = os.popen('%s %s %s' % (patcher, fa, fd))
888 fp = os.popen('%s %s %s' % (patcher, fa, fd))
882 files = []
889 files = []
883 output = ""
890 output = ""
884 for line in fp:
891 for line in fp:
885 output += line
892 output += line
886 if line.startswith('patching file '):
893 if line.startswith('patching file '):
887 pf = util.parse_patch_output(line.rstrip())
894 pf = util.parse_patch_output(line.rstrip())
888 files.append(pf)
895 files.append(pf)
889 if files != [fa]:
896 if files != [fa]:
890 ui.write(_(" unexpected patch output!"))
897 ui.write(_(" unexpected patch output!"))
891 ui.write(_(" (you may have an incompatible version of patch)\n"))
898 ui.write(_(" (you may have an incompatible version of patch)\n"))
892 ui.write(output)
899 ui.write(output)
893 problems += 1
900 problems += 1
894 a = file(fa).read()
901 a = file(fa).read()
895 if a != b:
902 if a != b:
896 ui.write(_(" patch test failed!"))
903 ui.write(_(" patch test failed!"))
897 ui.write(_(" (you may have an incompatible version of patch)\n"))
904 ui.write(_(" (you may have an incompatible version of patch)\n"))
898 problems += 1
905 problems += 1
899 os.unlink(fa)
906 os.unlink(fa)
900 os.unlink(fd)
907 os.unlink(fd)
901
908
902 # merge helper
909 # merge helper
903 ui.status(_("Checking merge helper...\n"))
910 ui.status(_("Checking merge helper...\n"))
904 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
911 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
905 or "hgmerge")
912 or "hgmerge")
906 cmdpath = util.find_in_path(cmd, path)
913 cmdpath = util.find_in_path(cmd, path)
907 if not cmdpath:
914 if not cmdpath:
908 cmdpath = util.find_in_path(cmd.split()[0], path)
915 cmdpath = util.find_in_path(cmd.split()[0], path)
909 if not cmdpath:
916 if not cmdpath:
910 if cmd == 'hgmerge':
917 if cmd == 'hgmerge':
911 ui.write(_(" No merge helper set and can't find default"
918 ui.write(_(" No merge helper set and can't find default"
912 " hgmerge script in PATH\n"))
919 " hgmerge script in PATH\n"))
913 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
920 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
914 else:
921 else:
915 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
922 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
916 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
923 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
917 problems += 1
924 problems += 1
918 else:
925 else:
919 # actually attempt a patch here
926 # actually attempt a patch here
920 fa = writetemp("1\n2\n3\n4\n")
927 fa = writetemp("1\n2\n3\n4\n")
921 fl = writetemp("1\n2\n3\ninsert\n4\n")
928 fl = writetemp("1\n2\n3\ninsert\n4\n")
922 fr = writetemp("begin\n1\n2\n3\n4\n")
929 fr = writetemp("begin\n1\n2\n3\n4\n")
923 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
930 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
924 if r:
931 if r:
925 ui.write(_(" got unexpected merge error %d!") % r)
932 ui.write(_(" got unexpected merge error %d!") % r)
926 problems += 1
933 problems += 1
927 m = file(fl).read()
934 m = file(fl).read()
928 if m != "begin\n1\n2\n3\ninsert\n4\n":
935 if m != "begin\n1\n2\n3\ninsert\n4\n":
929 ui.write(_(" got unexpected merge results!") % r)
936 ui.write(_(" got unexpected merge results!") % r)
930 ui.write(_(" (your merge helper may have the"
937 ui.write(_(" (your merge helper may have the"
931 " wrong argument order)\n"))
938 " wrong argument order)\n"))
932 ui.write(m)
939 ui.write(m)
933 os.unlink(fa)
940 os.unlink(fa)
934 os.unlink(fl)
941 os.unlink(fl)
935 os.unlink(fr)
942 os.unlink(fr)
936
943
937 # editor
944 # editor
938 ui.status(_("Checking commit editor...\n"))
945 ui.status(_("Checking commit editor...\n"))
939 editor = (os.environ.get("HGEDITOR") or
946 editor = (os.environ.get("HGEDITOR") or
940 ui.config("ui", "editor") or
947 ui.config("ui", "editor") or
941 os.environ.get("EDITOR", "vi"))
948 os.environ.get("EDITOR", "vi"))
942 cmdpath = util.find_in_path(editor, path)
949 cmdpath = util.find_in_path(editor, path)
943 if not cmdpath:
950 if not cmdpath:
944 cmdpath = util.find_in_path(editor.split()[0], path)
951 cmdpath = util.find_in_path(editor.split()[0], path)
945 if not cmdpath:
952 if not cmdpath:
946 if editor == 'vi':
953 if editor == 'vi':
947 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
954 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
948 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
955 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
949 else:
956 else:
950 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
957 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
951 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
958 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
952 problems += 1
959 problems += 1
953
960
954 # check username
961 # check username
955 ui.status(_("Checking username...\n"))
962 ui.status(_("Checking username...\n"))
956 user = os.environ.get("HGUSER")
963 user = os.environ.get("HGUSER")
957 if user is None:
964 if user is None:
958 user = ui.config("ui", "username")
965 user = ui.config("ui", "username")
959 if user is None:
966 if user is None:
960 user = os.environ.get("EMAIL")
967 user = os.environ.get("EMAIL")
961 if not user:
968 if not user:
962 ui.warn(" ")
969 ui.warn(" ")
963 ui.username()
970 ui.username()
964 ui.write(_(" (specify a username in your .hgrc file)\n"))
971 ui.write(_(" (specify a username in your .hgrc file)\n"))
965
972
966 if not problems:
973 if not problems:
967 ui.status(_("No problems detected\n"))
974 ui.status(_("No problems detected\n"))
968 else:
975 else:
969 ui.write(_("%s problems detected,"
976 ui.write(_("%s problems detected,"
970 " please check your install!\n") % problems)
977 " please check your install!\n") % problems)
971
978
972 return problems
979 return problems
973
980
974 def debugrename(ui, repo, file1, *pats, **opts):
981 def debugrename(ui, repo, file1, *pats, **opts):
975 """dump rename information"""
982 """dump rename information"""
976
983
977 ctx = repo.changectx(opts.get('rev', 'tip'))
984 ctx = repo.changectx(opts.get('rev', 'tip'))
978 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
985 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
979 ctx.node()):
986 ctx.node()):
980 m = ctx.filectx(abs).renamed()
987 m = ctx.filectx(abs).renamed()
981 if m:
988 if m:
982 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
989 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
983 else:
990 else:
984 ui.write(_("%s not renamed\n") % rel)
991 ui.write(_("%s not renamed\n") % rel)
985
992
986 def debugwalk(ui, repo, *pats, **opts):
993 def debugwalk(ui, repo, *pats, **opts):
987 """show how files match on given patterns"""
994 """show how files match on given patterns"""
988 items = list(cmdutil.walk(repo, pats, opts))
995 items = list(cmdutil.walk(repo, pats, opts))
989 if not items:
996 if not items:
990 return
997 return
991 fmt = '%%s %%-%ds %%-%ds %%s' % (
998 fmt = '%%s %%-%ds %%-%ds %%s' % (
992 max([len(abs) for (src, abs, rel, exact) in items]),
999 max([len(abs) for (src, abs, rel, exact) in items]),
993 max([len(rel) for (src, abs, rel, exact) in items]))
1000 max([len(rel) for (src, abs, rel, exact) in items]))
994 for src, abs, rel, exact in items:
1001 for src, abs, rel, exact in items:
995 line = fmt % (src, abs, rel, exact and 'exact' or '')
1002 line = fmt % (src, abs, rel, exact and 'exact' or '')
996 ui.write("%s\n" % line.rstrip())
1003 ui.write("%s\n" % line.rstrip())
997
1004
998 def diff(ui, repo, *pats, **opts):
1005 def diff(ui, repo, *pats, **opts):
999 """diff repository (or selected files)
1006 """diff repository (or selected files)
1000
1007
1001 Show differences between revisions for the specified files.
1008 Show differences between revisions for the specified files.
1002
1009
1003 Differences between files are shown using the unified diff format.
1010 Differences between files are shown using the unified diff format.
1004
1011
1005 NOTE: diff may generate unexpected results for merges, as it will
1012 NOTE: diff may generate unexpected results for merges, as it will
1006 default to comparing against the working directory's first parent
1013 default to comparing against the working directory's first parent
1007 changeset if no revisions are specified.
1014 changeset if no revisions are specified.
1008
1015
1009 When two revision arguments are given, then changes are shown
1016 When two revision arguments are given, then changes are shown
1010 between those revisions. If only one revision is specified then
1017 between those revisions. If only one revision is specified then
1011 that revision is compared to the working directory, and, when no
1018 that revision is compared to the working directory, and, when no
1012 revisions are specified, the working directory files are compared
1019 revisions are specified, the working directory files are compared
1013 to its parent.
1020 to its parent.
1014
1021
1015 Without the -a option, diff will avoid generating diffs of files
1022 Without the -a option, diff will avoid generating diffs of files
1016 it detects as binary. With -a, diff will generate a diff anyway,
1023 it detects as binary. With -a, diff will generate a diff anyway,
1017 probably with undesirable results.
1024 probably with undesirable results.
1018 """
1025 """
1019 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1026 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1020
1027
1021 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1028 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1022
1029
1023 patch.diff(repo, node1, node2, fns, match=matchfn,
1030 patch.diff(repo, node1, node2, fns, match=matchfn,
1024 opts=patch.diffopts(ui, opts))
1031 opts=patch.diffopts(ui, opts))
1025
1032
1026 def export(ui, repo, *changesets, **opts):
1033 def export(ui, repo, *changesets, **opts):
1027 """dump the header and diffs for one or more changesets
1034 """dump the header and diffs for one or more changesets
1028
1035
1029 Print the changeset header and diffs for one or more revisions.
1036 Print the changeset header and diffs for one or more revisions.
1030
1037
1031 The information shown in the changeset header is: author,
1038 The information shown in the changeset header is: author,
1032 changeset hash, parent(s) and commit comment.
1039 changeset hash, parent(s) and commit comment.
1033
1040
1034 NOTE: export may generate unexpected diff output for merge changesets,
1041 NOTE: export may generate unexpected diff output for merge changesets,
1035 as it will compare the merge changeset against its first parent only.
1042 as it will compare the merge changeset against its first parent only.
1036
1043
1037 Output may be to a file, in which case the name of the file is
1044 Output may be to a file, in which case the name of the file is
1038 given using a format string. The formatting rules are as follows:
1045 given using a format string. The formatting rules are as follows:
1039
1046
1040 %% literal "%" character
1047 %% literal "%" character
1041 %H changeset hash (40 bytes of hexadecimal)
1048 %H changeset hash (40 bytes of hexadecimal)
1042 %N number of patches being generated
1049 %N number of patches being generated
1043 %R changeset revision number
1050 %R changeset revision number
1044 %b basename of the exporting repository
1051 %b basename of the exporting repository
1045 %h short-form changeset hash (12 bytes of hexadecimal)
1052 %h short-form changeset hash (12 bytes of hexadecimal)
1046 %n zero-padded sequence number, starting at 1
1053 %n zero-padded sequence number, starting at 1
1047 %r zero-padded changeset revision number
1054 %r zero-padded changeset revision number
1048
1055
1049 Without the -a option, export will avoid generating diffs of files
1056 Without the -a option, export will avoid generating diffs of files
1050 it detects as binary. With -a, export will generate a diff anyway,
1057 it detects as binary. With -a, export will generate a diff anyway,
1051 probably with undesirable results.
1058 probably with undesirable results.
1052
1059
1053 With the --switch-parent option, the diff will be against the second
1060 With the --switch-parent option, the diff will be against the second
1054 parent. It can be useful to review a merge.
1061 parent. It can be useful to review a merge.
1055 """
1062 """
1056 if not changesets:
1063 if not changesets:
1057 raise util.Abort(_("export requires at least one changeset"))
1064 raise util.Abort(_("export requires at least one changeset"))
1058 revs = cmdutil.revrange(repo, changesets)
1065 revs = cmdutil.revrange(repo, changesets)
1059 if len(revs) > 1:
1066 if len(revs) > 1:
1060 ui.note(_('exporting patches:\n'))
1067 ui.note(_('exporting patches:\n'))
1061 else:
1068 else:
1062 ui.note(_('exporting patch:\n'))
1069 ui.note(_('exporting patch:\n'))
1063 patch.export(repo, revs, template=opts['output'],
1070 patch.export(repo, revs, template=opts['output'],
1064 switch_parent=opts['switch_parent'],
1071 switch_parent=opts['switch_parent'],
1065 opts=patch.diffopts(ui, opts))
1072 opts=patch.diffopts(ui, opts))
1066
1073
1067 def grep(ui, repo, pattern, *pats, **opts):
1074 def grep(ui, repo, pattern, *pats, **opts):
1068 """search for a pattern in specified files and revisions
1075 """search for a pattern in specified files and revisions
1069
1076
1070 Search revisions of files for a regular expression.
1077 Search revisions of files for a regular expression.
1071
1078
1072 This command behaves differently than Unix grep. It only accepts
1079 This command behaves differently than Unix grep. It only accepts
1073 Python/Perl regexps. It searches repository history, not the
1080 Python/Perl regexps. It searches repository history, not the
1074 working directory. It always prints the revision number in which
1081 working directory. It always prints the revision number in which
1075 a match appears.
1082 a match appears.
1076
1083
1077 By default, grep only prints output for the first revision of a
1084 By default, grep only prints output for the first revision of a
1078 file in which it finds a match. To get it to print every revision
1085 file in which it finds a match. To get it to print every revision
1079 that contains a change in match status ("-" for a match that
1086 that contains a change in match status ("-" for a match that
1080 becomes a non-match, or "+" for a non-match that becomes a match),
1087 becomes a non-match, or "+" for a non-match that becomes a match),
1081 use the --all flag.
1088 use the --all flag.
1082 """
1089 """
1083 reflags = 0
1090 reflags = 0
1084 if opts['ignore_case']:
1091 if opts['ignore_case']:
1085 reflags |= re.I
1092 reflags |= re.I
1086 regexp = re.compile(pattern, reflags)
1093 regexp = re.compile(pattern, reflags)
1087 sep, eol = ':', '\n'
1094 sep, eol = ':', '\n'
1088 if opts['print0']:
1095 if opts['print0']:
1089 sep = eol = '\0'
1096 sep = eol = '\0'
1090
1097
1091 fcache = {}
1098 fcache = {}
1092 def getfile(fn):
1099 def getfile(fn):
1093 if fn not in fcache:
1100 if fn not in fcache:
1094 fcache[fn] = repo.file(fn)
1101 fcache[fn] = repo.file(fn)
1095 return fcache[fn]
1102 return fcache[fn]
1096
1103
1097 def matchlines(body):
1104 def matchlines(body):
1098 begin = 0
1105 begin = 0
1099 linenum = 0
1106 linenum = 0
1100 while True:
1107 while True:
1101 match = regexp.search(body, begin)
1108 match = regexp.search(body, begin)
1102 if not match:
1109 if not match:
1103 break
1110 break
1104 mstart, mend = match.span()
1111 mstart, mend = match.span()
1105 linenum += body.count('\n', begin, mstart) + 1
1112 linenum += body.count('\n', begin, mstart) + 1
1106 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1113 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1107 lend = body.find('\n', mend)
1114 lend = body.find('\n', mend)
1108 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1115 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1109 begin = lend + 1
1116 begin = lend + 1
1110
1117
1111 class linestate(object):
1118 class linestate(object):
1112 def __init__(self, line, linenum, colstart, colend):
1119 def __init__(self, line, linenum, colstart, colend):
1113 self.line = line
1120 self.line = line
1114 self.linenum = linenum
1121 self.linenum = linenum
1115 self.colstart = colstart
1122 self.colstart = colstart
1116 self.colend = colend
1123 self.colend = colend
1117
1124
1118 def __eq__(self, other):
1125 def __eq__(self, other):
1119 return self.line == other.line
1126 return self.line == other.line
1120
1127
1121 matches = {}
1128 matches = {}
1122 copies = {}
1129 copies = {}
1123 def grepbody(fn, rev, body):
1130 def grepbody(fn, rev, body):
1124 matches[rev].setdefault(fn, [])
1131 matches[rev].setdefault(fn, [])
1125 m = matches[rev][fn]
1132 m = matches[rev][fn]
1126 for lnum, cstart, cend, line in matchlines(body):
1133 for lnum, cstart, cend, line in matchlines(body):
1127 s = linestate(line, lnum, cstart, cend)
1134 s = linestate(line, lnum, cstart, cend)
1128 m.append(s)
1135 m.append(s)
1129
1136
1130 def difflinestates(a, b):
1137 def difflinestates(a, b):
1131 sm = difflib.SequenceMatcher(None, a, b)
1138 sm = difflib.SequenceMatcher(None, a, b)
1132 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1139 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1133 if tag == 'insert':
1140 if tag == 'insert':
1134 for i in xrange(blo, bhi):
1141 for i in xrange(blo, bhi):
1135 yield ('+', b[i])
1142 yield ('+', b[i])
1136 elif tag == 'delete':
1143 elif tag == 'delete':
1137 for i in xrange(alo, ahi):
1144 for i in xrange(alo, ahi):
1138 yield ('-', a[i])
1145 yield ('-', a[i])
1139 elif tag == 'replace':
1146 elif tag == 'replace':
1140 for i in xrange(alo, ahi):
1147 for i in xrange(alo, ahi):
1141 yield ('-', a[i])
1148 yield ('-', a[i])
1142 for i in xrange(blo, bhi):
1149 for i in xrange(blo, bhi):
1143 yield ('+', b[i])
1150 yield ('+', b[i])
1144
1151
1145 prev = {}
1152 prev = {}
1146 def display(fn, rev, states, prevstates):
1153 def display(fn, rev, states, prevstates):
1147 found = False
1154 found = False
1148 filerevmatches = {}
1155 filerevmatches = {}
1149 r = prev.get(fn, -1)
1156 r = prev.get(fn, -1)
1150 if opts['all']:
1157 if opts['all']:
1151 iter = difflinestates(states, prevstates)
1158 iter = difflinestates(states, prevstates)
1152 else:
1159 else:
1153 iter = [('', l) for l in prevstates]
1160 iter = [('', l) for l in prevstates]
1154 for change, l in iter:
1161 for change, l in iter:
1155 cols = [fn, str(r)]
1162 cols = [fn, str(r)]
1156 if opts['line_number']:
1163 if opts['line_number']:
1157 cols.append(str(l.linenum))
1164 cols.append(str(l.linenum))
1158 if opts['all']:
1165 if opts['all']:
1159 cols.append(change)
1166 cols.append(change)
1160 if opts['user']:
1167 if opts['user']:
1161 cols.append(ui.shortuser(get(r)[1]))
1168 cols.append(ui.shortuser(get(r)[1]))
1162 if opts['files_with_matches']:
1169 if opts['files_with_matches']:
1163 c = (fn, r)
1170 c = (fn, r)
1164 if c in filerevmatches:
1171 if c in filerevmatches:
1165 continue
1172 continue
1166 filerevmatches[c] = 1
1173 filerevmatches[c] = 1
1167 else:
1174 else:
1168 cols.append(l.line)
1175 cols.append(l.line)
1169 ui.write(sep.join(cols), eol)
1176 ui.write(sep.join(cols), eol)
1170 found = True
1177 found = True
1171 return found
1178 return found
1172
1179
1173 fstate = {}
1180 fstate = {}
1174 skip = {}
1181 skip = {}
1175 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1182 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1176 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1183 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1177 found = False
1184 found = False
1178 follow = opts.get('follow')
1185 follow = opts.get('follow')
1179 for st, rev, fns in changeiter:
1186 for st, rev, fns in changeiter:
1180 if st == 'window':
1187 if st == 'window':
1181 matches.clear()
1188 matches.clear()
1182 elif st == 'add':
1189 elif st == 'add':
1183 mf = repo.changectx(rev).manifest()
1190 mf = repo.changectx(rev).manifest()
1184 matches[rev] = {}
1191 matches[rev] = {}
1185 for fn in fns:
1192 for fn in fns:
1186 if fn in skip:
1193 if fn in skip:
1187 continue
1194 continue
1188 fstate.setdefault(fn, {})
1195 fstate.setdefault(fn, {})
1189 try:
1196 try:
1190 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1197 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1191 if follow:
1198 if follow:
1192 copied = getfile(fn).renamed(mf[fn])
1199 copied = getfile(fn).renamed(mf[fn])
1193 if copied:
1200 if copied:
1194 copies.setdefault(rev, {})[fn] = copied[0]
1201 copies.setdefault(rev, {})[fn] = copied[0]
1195 except KeyError:
1202 except KeyError:
1196 pass
1203 pass
1197 elif st == 'iter':
1204 elif st == 'iter':
1198 states = matches[rev].items()
1205 states = matches[rev].items()
1199 states.sort()
1206 states.sort()
1200 for fn, m in states:
1207 for fn, m in states:
1201 copy = copies.get(rev, {}).get(fn)
1208 copy = copies.get(rev, {}).get(fn)
1202 if fn in skip:
1209 if fn in skip:
1203 if copy:
1210 if copy:
1204 skip[copy] = True
1211 skip[copy] = True
1205 continue
1212 continue
1206 if fn in prev or fstate[fn]:
1213 if fn in prev or fstate[fn]:
1207 r = display(fn, rev, m, fstate[fn])
1214 r = display(fn, rev, m, fstate[fn])
1208 found = found or r
1215 found = found or r
1209 if r and not opts['all']:
1216 if r and not opts['all']:
1210 skip[fn] = True
1217 skip[fn] = True
1211 if copy:
1218 if copy:
1212 skip[copy] = True
1219 skip[copy] = True
1213 fstate[fn] = m
1220 fstate[fn] = m
1214 if copy:
1221 if copy:
1215 fstate[copy] = m
1222 fstate[copy] = m
1216 prev[fn] = rev
1223 prev[fn] = rev
1217
1224
1218 fstate = fstate.items()
1225 fstate = fstate.items()
1219 fstate.sort()
1226 fstate.sort()
1220 for fn, state in fstate:
1227 for fn, state in fstate:
1221 if fn in skip:
1228 if fn in skip:
1222 continue
1229 continue
1223 if fn not in copies.get(prev[fn], {}):
1230 if fn not in copies.get(prev[fn], {}):
1224 found = display(fn, rev, {}, state) or found
1231 found = display(fn, rev, {}, state) or found
1225 return (not found and 1) or 0
1232 return (not found and 1) or 0
1226
1233
1227 def heads(ui, repo, **opts):
1234 def heads(ui, repo, **opts):
1228 """show current repository heads
1235 """show current repository heads
1229
1236
1230 Show all repository head changesets.
1237 Show all repository head changesets.
1231
1238
1232 Repository "heads" are changesets that don't have children
1239 Repository "heads" are changesets that don't have children
1233 changesets. They are where development generally takes place and
1240 changesets. They are where development generally takes place and
1234 are the usual targets for update and merge operations.
1241 are the usual targets for update and merge operations.
1235 """
1242 """
1236 if opts['rev']:
1243 if opts['rev']:
1237 heads = repo.heads(repo.lookup(opts['rev']))
1244 heads = repo.heads(repo.lookup(opts['rev']))
1238 else:
1245 else:
1239 heads = repo.heads()
1246 heads = repo.heads()
1240 displayer = cmdutil.show_changeset(ui, repo, opts)
1247 displayer = cmdutil.show_changeset(ui, repo, opts)
1241 for n in heads:
1248 for n in heads:
1242 displayer.show(changenode=n)
1249 displayer.show(changenode=n)
1243
1250
1244 def help_(ui, name=None, with_version=False):
1251 def help_(ui, name=None, with_version=False):
1245 """show help for a command, extension, or list of commands
1252 """show help for a command, extension, or list of commands
1246
1253
1247 With no arguments, print a list of commands and short help.
1254 With no arguments, print a list of commands and short help.
1248
1255
1249 Given a command name, print help for that command.
1256 Given a command name, print help for that command.
1250
1257
1251 Given an extension name, print help for that extension, and the
1258 Given an extension name, print help for that extension, and the
1252 commands it provides."""
1259 commands it provides."""
1253 option_lists = []
1260 option_lists = []
1254
1261
1255 def helpcmd(name):
1262 def helpcmd(name):
1256 if with_version:
1263 if with_version:
1257 version_(ui)
1264 version_(ui)
1258 ui.write('\n')
1265 ui.write('\n')
1259 aliases, i = findcmd(ui, name)
1266 aliases, i = findcmd(ui, name)
1260 # synopsis
1267 # synopsis
1261 ui.write("%s\n\n" % i[2])
1268 ui.write("%s\n\n" % i[2])
1262
1269
1263 # description
1270 # description
1264 doc = i[0].__doc__
1271 doc = i[0].__doc__
1265 if not doc:
1272 if not doc:
1266 doc = _("(No help text available)")
1273 doc = _("(No help text available)")
1267 if ui.quiet:
1274 if ui.quiet:
1268 doc = doc.splitlines(0)[0]
1275 doc = doc.splitlines(0)[0]
1269 ui.write("%s\n" % doc.rstrip())
1276 ui.write("%s\n" % doc.rstrip())
1270
1277
1271 if not ui.quiet:
1278 if not ui.quiet:
1272 # aliases
1279 # aliases
1273 if len(aliases) > 1:
1280 if len(aliases) > 1:
1274 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1281 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1275
1282
1276 # options
1283 # options
1277 if i[1]:
1284 if i[1]:
1278 option_lists.append(("options", i[1]))
1285 option_lists.append(("options", i[1]))
1279
1286
1280 def helplist(select=None):
1287 def helplist(select=None):
1281 h = {}
1288 h = {}
1282 cmds = {}
1289 cmds = {}
1283 for c, e in table.items():
1290 for c, e in table.items():
1284 f = c.split("|", 1)[0]
1291 f = c.split("|", 1)[0]
1285 if select and not select(f):
1292 if select and not select(f):
1286 continue
1293 continue
1287 if name == "shortlist" and not f.startswith("^"):
1294 if name == "shortlist" and not f.startswith("^"):
1288 continue
1295 continue
1289 f = f.lstrip("^")
1296 f = f.lstrip("^")
1290 if not ui.debugflag and f.startswith("debug"):
1297 if not ui.debugflag and f.startswith("debug"):
1291 continue
1298 continue
1292 doc = e[0].__doc__
1299 doc = e[0].__doc__
1293 if not doc:
1300 if not doc:
1294 doc = _("(No help text available)")
1301 doc = _("(No help text available)")
1295 h[f] = doc.splitlines(0)[0].rstrip()
1302 h[f] = doc.splitlines(0)[0].rstrip()
1296 cmds[f] = c.lstrip("^")
1303 cmds[f] = c.lstrip("^")
1297
1304
1298 fns = h.keys()
1305 fns = h.keys()
1299 fns.sort()
1306 fns.sort()
1300 m = max(map(len, fns))
1307 m = max(map(len, fns))
1301 for f in fns:
1308 for f in fns:
1302 if ui.verbose:
1309 if ui.verbose:
1303 commands = cmds[f].replace("|",", ")
1310 commands = cmds[f].replace("|",", ")
1304 ui.write(" %s:\n %s\n"%(commands, h[f]))
1311 ui.write(" %s:\n %s\n"%(commands, h[f]))
1305 else:
1312 else:
1306 ui.write(' %-*s %s\n' % (m, f, h[f]))
1313 ui.write(' %-*s %s\n' % (m, f, h[f]))
1307
1314
1308 def helptopic(name):
1315 def helptopic(name):
1309 v = None
1316 v = None
1310 for i in help.helptable:
1317 for i in help.helptable:
1311 l = i.split('|')
1318 l = i.split('|')
1312 if name in l:
1319 if name in l:
1313 v = i
1320 v = i
1314 header = l[-1]
1321 header = l[-1]
1315 if not v:
1322 if not v:
1316 raise UnknownCommand(name)
1323 raise UnknownCommand(name)
1317
1324
1318 # description
1325 # description
1319 doc = help.helptable[v]
1326 doc = help.helptable[v]
1320 if not doc:
1327 if not doc:
1321 doc = _("(No help text available)")
1328 doc = _("(No help text available)")
1322 if callable(doc):
1329 if callable(doc):
1323 doc = doc()
1330 doc = doc()
1324
1331
1325 ui.write("%s\n" % header)
1332 ui.write("%s\n" % header)
1326 ui.write("%s\n" % doc.rstrip())
1333 ui.write("%s\n" % doc.rstrip())
1327
1334
1328 def helpext(name):
1335 def helpext(name):
1329 try:
1336 try:
1330 mod = findext(name)
1337 mod = findext(name)
1331 except KeyError:
1338 except KeyError:
1332 raise UnknownCommand(name)
1339 raise UnknownCommand(name)
1333
1340
1334 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1341 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1335 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1342 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1336 for d in doc[1:]:
1343 for d in doc[1:]:
1337 ui.write(d, '\n')
1344 ui.write(d, '\n')
1338
1345
1339 ui.status('\n')
1346 ui.status('\n')
1340
1347
1341 try:
1348 try:
1342 ct = mod.cmdtable
1349 ct = mod.cmdtable
1343 except AttributeError:
1350 except AttributeError:
1344 ui.status(_('no commands defined\n'))
1351 ui.status(_('no commands defined\n'))
1345 return
1352 return
1346
1353
1347 if ui.verbose:
1354 if ui.verbose:
1348 ui.status(_('list of commands:\n\n'))
1355 ui.status(_('list of commands:\n\n'))
1349 else:
1356 else:
1350 ui.status(_('list of commands (use "hg help -v %s" '
1357 ui.status(_('list of commands (use "hg help -v %s" '
1351 'to show aliases and global options):\n\n') % name)
1358 'to show aliases and global options):\n\n') % name)
1352
1359
1353 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1360 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1354 helplist(modcmds.has_key)
1361 helplist(modcmds.has_key)
1355
1362
1356 if name and name != 'shortlist':
1363 if name and name != 'shortlist':
1357 i = None
1364 i = None
1358 for f in (helpcmd, helptopic, helpext):
1365 for f in (helpcmd, helptopic, helpext):
1359 try:
1366 try:
1360 f(name)
1367 f(name)
1361 i = None
1368 i = None
1362 break
1369 break
1363 except UnknownCommand, inst:
1370 except UnknownCommand, inst:
1364 i = inst
1371 i = inst
1365 if i:
1372 if i:
1366 raise i
1373 raise i
1367
1374
1368 else:
1375 else:
1369 # program name
1376 # program name
1370 if ui.verbose or with_version:
1377 if ui.verbose or with_version:
1371 version_(ui)
1378 version_(ui)
1372 else:
1379 else:
1373 ui.status(_("Mercurial Distributed SCM\n"))
1380 ui.status(_("Mercurial Distributed SCM\n"))
1374 ui.status('\n')
1381 ui.status('\n')
1375
1382
1376 # list of commands
1383 # list of commands
1377 if name == "shortlist":
1384 if name == "shortlist":
1378 ui.status(_('basic commands (use "hg help" '
1385 ui.status(_('basic commands (use "hg help" '
1379 'for the full list or option "-v" for details):\n\n'))
1386 'for the full list or option "-v" for details):\n\n'))
1380 elif ui.verbose:
1387 elif ui.verbose:
1381 ui.status(_('list of commands:\n\n'))
1388 ui.status(_('list of commands:\n\n'))
1382 else:
1389 else:
1383 ui.status(_('list of commands (use "hg help -v" '
1390 ui.status(_('list of commands (use "hg help -v" '
1384 'to show aliases and global options):\n\n'))
1391 'to show aliases and global options):\n\n'))
1385
1392
1386 helplist()
1393 helplist()
1387
1394
1388 # global options
1395 # global options
1389 if ui.verbose:
1396 if ui.verbose:
1390 option_lists.append(("global options", globalopts))
1397 option_lists.append(("global options", globalopts))
1391
1398
1392 # list all option lists
1399 # list all option lists
1393 opt_output = []
1400 opt_output = []
1394 for title, options in option_lists:
1401 for title, options in option_lists:
1395 opt_output.append(("\n%s:\n" % title, None))
1402 opt_output.append(("\n%s:\n" % title, None))
1396 for shortopt, longopt, default, desc in options:
1403 for shortopt, longopt, default, desc in options:
1397 if "DEPRECATED" in desc and not ui.verbose: continue
1404 if "DEPRECATED" in desc and not ui.verbose: continue
1398 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1405 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1399 longopt and " --%s" % longopt),
1406 longopt and " --%s" % longopt),
1400 "%s%s" % (desc,
1407 "%s%s" % (desc,
1401 default
1408 default
1402 and _(" (default: %s)") % default
1409 and _(" (default: %s)") % default
1403 or "")))
1410 or "")))
1404
1411
1405 if opt_output:
1412 if opt_output:
1406 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1413 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1407 for first, second in opt_output:
1414 for first, second in opt_output:
1408 if second:
1415 if second:
1409 ui.write(" %-*s %s\n" % (opts_len, first, second))
1416 ui.write(" %-*s %s\n" % (opts_len, first, second))
1410 else:
1417 else:
1411 ui.write("%s\n" % first)
1418 ui.write("%s\n" % first)
1412
1419
1413 def identify(ui, repo):
1420 def identify(ui, repo):
1414 """print information about the working copy
1421 """print information about the working copy
1415
1422
1416 Print a short summary of the current state of the repo.
1423 Print a short summary of the current state of the repo.
1417
1424
1418 This summary identifies the repository state using one or two parent
1425 This summary identifies the repository state using one or two parent
1419 hash identifiers, followed by a "+" if there are uncommitted changes
1426 hash identifiers, followed by a "+" if there are uncommitted changes
1420 in the working directory, followed by a list of tags for this revision.
1427 in the working directory, followed by a list of tags for this revision.
1421 """
1428 """
1422 parents = [p for p in repo.dirstate.parents() if p != nullid]
1429 parents = [p for p in repo.dirstate.parents() if p != nullid]
1423 if not parents:
1430 if not parents:
1424 ui.write(_("unknown\n"))
1431 ui.write(_("unknown\n"))
1425 return
1432 return
1426
1433
1427 hexfunc = ui.debugflag and hex or short
1434 hexfunc = ui.debugflag and hex or short
1428 modified, added, removed, deleted = repo.status()[:4]
1435 modified, added, removed, deleted = repo.status()[:4]
1429 output = ["%s%s" %
1436 output = ["%s%s" %
1430 ('+'.join([hexfunc(parent) for parent in parents]),
1437 ('+'.join([hexfunc(parent) for parent in parents]),
1431 (modified or added or removed or deleted) and "+" or "")]
1438 (modified or added or removed or deleted) and "+" or "")]
1432
1439
1433 if not ui.quiet:
1440 if not ui.quiet:
1434
1441
1435 branch = util.tolocal(repo.workingctx().branch())
1442 branch = util.tolocal(repo.workingctx().branch())
1436 if branch:
1443 if branch:
1437 output.append("(%s)" % branch)
1444 output.append("(%s)" % branch)
1438
1445
1439 # multiple tags for a single parent separated by '/'
1446 # multiple tags for a single parent separated by '/'
1440 parenttags = ['/'.join(tags)
1447 parenttags = ['/'.join(tags)
1441 for tags in map(repo.nodetags, parents) if tags]
1448 for tags in map(repo.nodetags, parents) if tags]
1442 # tags for multiple parents separated by ' + '
1449 # tags for multiple parents separated by ' + '
1443 if parenttags:
1450 if parenttags:
1444 output.append(' + '.join(parenttags))
1451 output.append(' + '.join(parenttags))
1445
1452
1446 ui.write("%s\n" % ' '.join(output))
1453 ui.write("%s\n" % ' '.join(output))
1447
1454
1448 def import_(ui, repo, patch1, *patches, **opts):
1455 def import_(ui, repo, patch1, *patches, **opts):
1449 """import an ordered set of patches
1456 """import an ordered set of patches
1450
1457
1451 Import a list of patches and commit them individually.
1458 Import a list of patches and commit them individually.
1452
1459
1453 If there are outstanding changes in the working directory, import
1460 If there are outstanding changes in the working directory, import
1454 will abort unless given the -f flag.
1461 will abort unless given the -f flag.
1455
1462
1456 You can import a patch straight from a mail message. Even patches
1463 You can import a patch straight from a mail message. Even patches
1457 as attachments work (body part must be type text/plain or
1464 as attachments work (body part must be type text/plain or
1458 text/x-patch to be used). From and Subject headers of email
1465 text/x-patch to be used). From and Subject headers of email
1459 message are used as default committer and commit message. All
1466 message are used as default committer and commit message. All
1460 text/plain body parts before first diff are added to commit
1467 text/plain body parts before first diff are added to commit
1461 message.
1468 message.
1462
1469
1463 If imported patch was generated by hg export, user and description
1470 If imported patch was generated by hg export, user and description
1464 from patch override values from message headers and body. Values
1471 from patch override values from message headers and body. Values
1465 given on command line with -m and -u override these.
1472 given on command line with -m and -u override these.
1466
1473
1467 To read a patch from standard input, use patch name "-".
1474 To read a patch from standard input, use patch name "-".
1468 """
1475 """
1469 patches = (patch1,) + patches
1476 patches = (patch1,) + patches
1470
1477
1471 if not opts['force']:
1478 if not opts['force']:
1472 bail_if_changed(repo)
1479 bail_if_changed(repo)
1473
1480
1474 d = opts["base"]
1481 d = opts["base"]
1475 strip = opts["strip"]
1482 strip = opts["strip"]
1476
1483
1477 wlock = repo.wlock()
1484 wlock = repo.wlock()
1478 lock = repo.lock()
1485 lock = repo.lock()
1479
1486
1480 for p in patches:
1487 for p in patches:
1481 pf = os.path.join(d, p)
1488 pf = os.path.join(d, p)
1482
1489
1483 if pf == '-':
1490 if pf == '-':
1484 ui.status(_("applying patch from stdin\n"))
1491 ui.status(_("applying patch from stdin\n"))
1485 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1492 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1486 else:
1493 else:
1487 ui.status(_("applying %s\n") % p)
1494 ui.status(_("applying %s\n") % p)
1488 tmpname, message, user, date = patch.extract(ui, file(pf))
1495 tmpname, message, user, date = patch.extract(ui, file(pf))
1489
1496
1490 if tmpname is None:
1497 if tmpname is None:
1491 raise util.Abort(_('no diffs found'))
1498 raise util.Abort(_('no diffs found'))
1492
1499
1493 try:
1500 try:
1494 cmdline_message = logmessage(opts)
1501 cmdline_message = logmessage(opts)
1495 if cmdline_message:
1502 if cmdline_message:
1496 # pickup the cmdline msg
1503 # pickup the cmdline msg
1497 message = cmdline_message
1504 message = cmdline_message
1498 elif message:
1505 elif message:
1499 # pickup the patch msg
1506 # pickup the patch msg
1500 message = message.strip()
1507 message = message.strip()
1501 else:
1508 else:
1502 # launch the editor
1509 # launch the editor
1503 message = None
1510 message = None
1504 ui.debug(_('message:\n%s\n') % message)
1511 ui.debug(_('message:\n%s\n') % message)
1505
1512
1506 files = {}
1513 files = {}
1507 try:
1514 try:
1508 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1515 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1509 files=files)
1516 files=files)
1510 finally:
1517 finally:
1511 files = patch.updatedir(ui, repo, files, wlock=wlock)
1518 files = patch.updatedir(ui, repo, files, wlock=wlock)
1512 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1519 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1513 finally:
1520 finally:
1514 os.unlink(tmpname)
1521 os.unlink(tmpname)
1515
1522
1516 def incoming(ui, repo, source="default", **opts):
1523 def incoming(ui, repo, source="default", **opts):
1517 """show new changesets found in source
1524 """show new changesets found in source
1518
1525
1519 Show new changesets found in the specified path/URL or the default
1526 Show new changesets found in the specified path/URL or the default
1520 pull location. These are the changesets that would be pulled if a pull
1527 pull location. These are the changesets that would be pulled if a pull
1521 was requested.
1528 was requested.
1522
1529
1523 For remote repository, using --bundle avoids downloading the changesets
1530 For remote repository, using --bundle avoids downloading the changesets
1524 twice if the incoming is followed by a pull.
1531 twice if the incoming is followed by a pull.
1525
1532
1526 See pull for valid source format details.
1533 See pull for valid source format details.
1527 """
1534 """
1528 source = ui.expandpath(source)
1535 source = ui.expandpath(source)
1529 setremoteconfig(ui, opts)
1536 setremoteconfig(ui, opts)
1530
1537
1531 other = hg.repository(ui, source)
1538 other = hg.repository(ui, source)
1532 incoming = repo.findincoming(other, force=opts["force"])
1539 incoming = repo.findincoming(other, force=opts["force"])
1533 if not incoming:
1540 if not incoming:
1534 try:
1541 try:
1535 os.unlink(opts["bundle"])
1542 os.unlink(opts["bundle"])
1536 except:
1543 except:
1537 pass
1544 pass
1538 ui.status(_("no changes found\n"))
1545 ui.status(_("no changes found\n"))
1539 return 1
1546 return 1
1540
1547
1541 cleanup = None
1548 cleanup = None
1542 try:
1549 try:
1543 fname = opts["bundle"]
1550 fname = opts["bundle"]
1544 if fname or not other.local():
1551 if fname or not other.local():
1545 # create a bundle (uncompressed if other repo is not local)
1552 # create a bundle (uncompressed if other repo is not local)
1546 cg = other.changegroup(incoming, "incoming")
1553 cg = other.changegroup(incoming, "incoming")
1547 bundletype = other.local() and "HG10BZ" or "HG10UN"
1554 bundletype = other.local() and "HG10BZ" or "HG10UN"
1548 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1555 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1549 # keep written bundle?
1556 # keep written bundle?
1550 if opts["bundle"]:
1557 if opts["bundle"]:
1551 cleanup = None
1558 cleanup = None
1552 if not other.local():
1559 if not other.local():
1553 # use the created uncompressed bundlerepo
1560 # use the created uncompressed bundlerepo
1554 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1561 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1555
1562
1556 revs = None
1563 revs = None
1557 if opts['rev']:
1564 if opts['rev']:
1558 revs = [other.lookup(rev) for rev in opts['rev']]
1565 revs = [other.lookup(rev) for rev in opts['rev']]
1559 o = other.changelog.nodesbetween(incoming, revs)[0]
1566 o = other.changelog.nodesbetween(incoming, revs)[0]
1560 if opts['newest_first']:
1567 if opts['newest_first']:
1561 o.reverse()
1568 o.reverse()
1562 displayer = cmdutil.show_changeset(ui, other, opts)
1569 displayer = cmdutil.show_changeset(ui, other, opts)
1563 for n in o:
1570 for n in o:
1564 parents = [p for p in other.changelog.parents(n) if p != nullid]
1571 parents = [p for p in other.changelog.parents(n) if p != nullid]
1565 if opts['no_merges'] and len(parents) == 2:
1572 if opts['no_merges'] and len(parents) == 2:
1566 continue
1573 continue
1567 displayer.show(changenode=n)
1574 displayer.show(changenode=n)
1568 finally:
1575 finally:
1569 if hasattr(other, 'close'):
1576 if hasattr(other, 'close'):
1570 other.close()
1577 other.close()
1571 if cleanup:
1578 if cleanup:
1572 os.unlink(cleanup)
1579 os.unlink(cleanup)
1573
1580
1574 def init(ui, dest=".", **opts):
1581 def init(ui, dest=".", **opts):
1575 """create a new repository in the given directory
1582 """create a new repository in the given directory
1576
1583
1577 Initialize a new repository in the given directory. If the given
1584 Initialize a new repository in the given directory. If the given
1578 directory does not exist, it is created.
1585 directory does not exist, it is created.
1579
1586
1580 If no directory is given, the current directory is used.
1587 If no directory is given, the current directory is used.
1581
1588
1582 It is possible to specify an ssh:// URL as the destination.
1589 It is possible to specify an ssh:// URL as the destination.
1583 Look at the help text for the pull command for important details
1590 Look at the help text for the pull command for important details
1584 about ssh:// URLs.
1591 about ssh:// URLs.
1585 """
1592 """
1586 setremoteconfig(ui, opts)
1593 setremoteconfig(ui, opts)
1587 hg.repository(ui, dest, create=1)
1594 hg.repository(ui, dest, create=1)
1588
1595
1589 def locate(ui, repo, *pats, **opts):
1596 def locate(ui, repo, *pats, **opts):
1590 """locate files matching specific patterns
1597 """locate files matching specific patterns
1591
1598
1592 Print all files under Mercurial control whose names match the
1599 Print all files under Mercurial control whose names match the
1593 given patterns.
1600 given patterns.
1594
1601
1595 This command searches the current directory and its
1602 This command searches the current directory and its
1596 subdirectories. To search an entire repository, move to the root
1603 subdirectories. To search an entire repository, move to the root
1597 of the repository.
1604 of the repository.
1598
1605
1599 If no patterns are given to match, this command prints all file
1606 If no patterns are given to match, this command prints all file
1600 names.
1607 names.
1601
1608
1602 If you want to feed the output of this command into the "xargs"
1609 If you want to feed the output of this command into the "xargs"
1603 command, use the "-0" option to both this command and "xargs".
1610 command, use the "-0" option to both this command and "xargs".
1604 This will avoid the problem of "xargs" treating single filenames
1611 This will avoid the problem of "xargs" treating single filenames
1605 that contain white space as multiple filenames.
1612 that contain white space as multiple filenames.
1606 """
1613 """
1607 end = opts['print0'] and '\0' or '\n'
1614 end = opts['print0'] and '\0' or '\n'
1608 rev = opts['rev']
1615 rev = opts['rev']
1609 if rev:
1616 if rev:
1610 node = repo.lookup(rev)
1617 node = repo.lookup(rev)
1611 else:
1618 else:
1612 node = None
1619 node = None
1613
1620
1614 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1621 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1615 head='(?:.*/|)'):
1622 head='(?:.*/|)'):
1616 if not node and repo.dirstate.state(abs) == '?':
1623 if not node and repo.dirstate.state(abs) == '?':
1617 continue
1624 continue
1618 if opts['fullpath']:
1625 if opts['fullpath']:
1619 ui.write(os.path.join(repo.root, abs), end)
1626 ui.write(os.path.join(repo.root, abs), end)
1620 else:
1627 else:
1621 ui.write(((pats and rel) or abs), end)
1628 ui.write(((pats and rel) or abs), end)
1622
1629
1623 def log(ui, repo, *pats, **opts):
1630 def log(ui, repo, *pats, **opts):
1624 """show revision history of entire repository or files
1631 """show revision history of entire repository or files
1625
1632
1626 Print the revision history of the specified files or the entire
1633 Print the revision history of the specified files or the entire
1627 project.
1634 project.
1628
1635
1629 File history is shown without following rename or copy history of
1636 File history is shown without following rename or copy history of
1630 files. Use -f/--follow with a file name to follow history across
1637 files. Use -f/--follow with a file name to follow history across
1631 renames and copies. --follow without a file name will only show
1638 renames and copies. --follow without a file name will only show
1632 ancestors or descendants of the starting revision. --follow-first
1639 ancestors or descendants of the starting revision. --follow-first
1633 only follows the first parent of merge revisions.
1640 only follows the first parent of merge revisions.
1634
1641
1635 If no revision range is specified, the default is tip:0 unless
1642 If no revision range is specified, the default is tip:0 unless
1636 --follow is set, in which case the working directory parent is
1643 --follow is set, in which case the working directory parent is
1637 used as the starting revision.
1644 used as the starting revision.
1638
1645
1639 By default this command outputs: changeset id and hash, tags,
1646 By default this command outputs: changeset id and hash, tags,
1640 non-trivial parents, user, date and time, and a summary for each
1647 non-trivial parents, user, date and time, and a summary for each
1641 commit. When the -v/--verbose switch is used, the list of changed
1648 commit. When the -v/--verbose switch is used, the list of changed
1642 files and full commit message is shown.
1649 files and full commit message is shown.
1643
1650
1644 NOTE: log -p may generate unexpected diff output for merge
1651 NOTE: log -p may generate unexpected diff output for merge
1645 changesets, as it will compare the merge changeset against its
1652 changesets, as it will compare the merge changeset against its
1646 first parent only. Also, the files: list will only reflect files
1653 first parent only. Also, the files: list will only reflect files
1647 that are different from BOTH parents.
1654 that are different from BOTH parents.
1648
1655
1649 """
1656 """
1650
1657
1651 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1658 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1652 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1659 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1653
1660
1654 if opts['limit']:
1661 if opts['limit']:
1655 try:
1662 try:
1656 limit = int(opts['limit'])
1663 limit = int(opts['limit'])
1657 except ValueError:
1664 except ValueError:
1658 raise util.Abort(_('limit must be a positive integer'))
1665 raise util.Abort(_('limit must be a positive integer'))
1659 if limit <= 0: raise util.Abort(_('limit must be positive'))
1666 if limit <= 0: raise util.Abort(_('limit must be positive'))
1660 else:
1667 else:
1661 limit = sys.maxint
1668 limit = sys.maxint
1662 count = 0
1669 count = 0
1663
1670
1664 if opts['copies'] and opts['rev']:
1671 if opts['copies'] and opts['rev']:
1665 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1672 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1666 else:
1673 else:
1667 endrev = repo.changelog.count()
1674 endrev = repo.changelog.count()
1668 rcache = {}
1675 rcache = {}
1669 ncache = {}
1676 ncache = {}
1670 dcache = []
1677 dcache = []
1671 def getrenamed(fn, rev, man):
1678 def getrenamed(fn, rev, man):
1672 '''looks up all renames for a file (up to endrev) the first
1679 '''looks up all renames for a file (up to endrev) the first
1673 time the file is given. It indexes on the changerev and only
1680 time the file is given. It indexes on the changerev and only
1674 parses the manifest if linkrev != changerev.
1681 parses the manifest if linkrev != changerev.
1675 Returns rename info for fn at changerev rev.'''
1682 Returns rename info for fn at changerev rev.'''
1676 if fn not in rcache:
1683 if fn not in rcache:
1677 rcache[fn] = {}
1684 rcache[fn] = {}
1678 ncache[fn] = {}
1685 ncache[fn] = {}
1679 fl = repo.file(fn)
1686 fl = repo.file(fn)
1680 for i in xrange(fl.count()):
1687 for i in xrange(fl.count()):
1681 node = fl.node(i)
1688 node = fl.node(i)
1682 lr = fl.linkrev(node)
1689 lr = fl.linkrev(node)
1683 renamed = fl.renamed(node)
1690 renamed = fl.renamed(node)
1684 rcache[fn][lr] = renamed
1691 rcache[fn][lr] = renamed
1685 if renamed:
1692 if renamed:
1686 ncache[fn][node] = renamed
1693 ncache[fn][node] = renamed
1687 if lr >= endrev:
1694 if lr >= endrev:
1688 break
1695 break
1689 if rev in rcache[fn]:
1696 if rev in rcache[fn]:
1690 return rcache[fn][rev]
1697 return rcache[fn][rev]
1691 mr = repo.manifest.rev(man)
1698 mr = repo.manifest.rev(man)
1692 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1699 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1693 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1700 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1694 if not dcache or dcache[0] != man:
1701 if not dcache or dcache[0] != man:
1695 dcache[:] = [man, repo.manifest.readdelta(man)]
1702 dcache[:] = [man, repo.manifest.readdelta(man)]
1696 if fn in dcache[1]:
1703 if fn in dcache[1]:
1697 return ncache[fn].get(dcache[1][fn])
1704 return ncache[fn].get(dcache[1][fn])
1698 return None
1705 return None
1699
1706
1700 df = False
1707 df = False
1701 if opts["date"]:
1708 if opts["date"]:
1702 df = util.matchdate(opts["date"])
1709 df = util.matchdate(opts["date"])
1703
1710
1704 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1711 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1705 for st, rev, fns in changeiter:
1712 for st, rev, fns in changeiter:
1706 if st == 'add':
1713 if st == 'add':
1707 changenode = repo.changelog.node(rev)
1714 changenode = repo.changelog.node(rev)
1708 parents = [p for p in repo.changelog.parentrevs(rev)
1715 parents = [p for p in repo.changelog.parentrevs(rev)
1709 if p != nullrev]
1716 if p != nullrev]
1710 if opts['no_merges'] and len(parents) == 2:
1717 if opts['no_merges'] and len(parents) == 2:
1711 continue
1718 continue
1712 if opts['only_merges'] and len(parents) != 2:
1719 if opts['only_merges'] and len(parents) != 2:
1713 continue
1720 continue
1714
1721
1715 if df:
1722 if df:
1716 changes = get(rev)
1723 changes = get(rev)
1717 if not df(changes[2][0]):
1724 if not df(changes[2][0]):
1718 continue
1725 continue
1719
1726
1720 if opts['keyword']:
1727 if opts['keyword']:
1721 changes = get(rev)
1728 changes = get(rev)
1722 miss = 0
1729 miss = 0
1723 for k in [kw.lower() for kw in opts['keyword']]:
1730 for k in [kw.lower() for kw in opts['keyword']]:
1724 if not (k in changes[1].lower() or
1731 if not (k in changes[1].lower() or
1725 k in changes[4].lower() or
1732 k in changes[4].lower() or
1726 k in " ".join(changes[3][:20]).lower()):
1733 k in " ".join(changes[3][:20]).lower()):
1727 miss = 1
1734 miss = 1
1728 break
1735 break
1729 if miss:
1736 if miss:
1730 continue
1737 continue
1731
1738
1732 copies = []
1739 copies = []
1733 if opts.get('copies') and rev:
1740 if opts.get('copies') and rev:
1734 mf = get(rev)[0]
1741 mf = get(rev)[0]
1735 for fn in get(rev)[3]:
1742 for fn in get(rev)[3]:
1736 rename = getrenamed(fn, rev, mf)
1743 rename = getrenamed(fn, rev, mf)
1737 if rename:
1744 if rename:
1738 copies.append((fn, rename[0]))
1745 copies.append((fn, rename[0]))
1739 displayer.show(rev, changenode, copies=copies)
1746 displayer.show(rev, changenode, copies=copies)
1740 elif st == 'iter':
1747 elif st == 'iter':
1741 if count == limit: break
1748 if count == limit: break
1742 if displayer.flush(rev):
1749 if displayer.flush(rev):
1743 count += 1
1750 count += 1
1744
1751
1745 def manifest(ui, repo, rev=None):
1752 def manifest(ui, repo, rev=None):
1746 """output the current or given revision of the project manifest
1753 """output the current or given revision of the project manifest
1747
1754
1748 Print a list of version controlled files for the given revision.
1755 Print a list of version controlled files for the given revision.
1749 If no revision is given, the parent of the working directory is used,
1756 If no revision is given, the parent of the working directory is used,
1750 or tip if no revision is checked out.
1757 or tip if no revision is checked out.
1751
1758
1752 The manifest is the list of files being version controlled. If no revision
1759 The manifest is the list of files being version controlled. If no revision
1753 is given then the first parent of the working directory is used.
1760 is given then the first parent of the working directory is used.
1754
1761
1755 With -v flag, print file permissions. With --debug flag, print
1762 With -v flag, print file permissions. With --debug flag, print
1756 file revision hashes.
1763 file revision hashes.
1757 """
1764 """
1758
1765
1759 m = repo.changectx(rev).manifest()
1766 m = repo.changectx(rev).manifest()
1760 files = m.keys()
1767 files = m.keys()
1761 files.sort()
1768 files.sort()
1762
1769
1763 for f in files:
1770 for f in files:
1764 if ui.debugflag:
1771 if ui.debugflag:
1765 ui.write("%40s " % hex(m[f]))
1772 ui.write("%40s " % hex(m[f]))
1766 if ui.verbose:
1773 if ui.verbose:
1767 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1774 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1768 ui.write("%s\n" % f)
1775 ui.write("%s\n" % f)
1769
1776
1770 def merge(ui, repo, node=None, force=None):
1777 def merge(ui, repo, node=None, force=None):
1771 """merge working directory with another revision
1778 """merge working directory with another revision
1772
1779
1773 Merge the contents of the current working directory and the
1780 Merge the contents of the current working directory and the
1774 requested revision. Files that changed between either parent are
1781 requested revision. Files that changed between either parent are
1775 marked as changed for the next commit and a commit must be
1782 marked as changed for the next commit and a commit must be
1776 performed before any further updates are allowed.
1783 performed before any further updates are allowed.
1777
1784
1778 If no revision is specified, the working directory's parent is a
1785 If no revision is specified, the working directory's parent is a
1779 head revision, and the repository contains exactly one other head,
1786 head revision, and the repository contains exactly one other head,
1780 the other head is merged with by default. Otherwise, an explicit
1787 the other head is merged with by default. Otherwise, an explicit
1781 revision to merge with must be provided.
1788 revision to merge with must be provided.
1782 """
1789 """
1783
1790
1784 if not node:
1791 if not node:
1785 heads = repo.heads()
1792 heads = repo.heads()
1786 if len(heads) > 2:
1793 if len(heads) > 2:
1787 raise util.Abort(_('repo has %d heads - '
1794 raise util.Abort(_('repo has %d heads - '
1788 'please merge with an explicit rev') %
1795 'please merge with an explicit rev') %
1789 len(heads))
1796 len(heads))
1790 if len(heads) == 1:
1797 if len(heads) == 1:
1791 raise util.Abort(_('there is nothing to merge - '
1798 raise util.Abort(_('there is nothing to merge - '
1792 'use "hg update" instead'))
1799 'use "hg update" instead'))
1793 parent = repo.dirstate.parents()[0]
1800 parent = repo.dirstate.parents()[0]
1794 if parent not in heads:
1801 if parent not in heads:
1795 raise util.Abort(_('working dir not at a head rev - '
1802 raise util.Abort(_('working dir not at a head rev - '
1796 'use "hg update" or merge with an explicit rev'))
1803 'use "hg update" or merge with an explicit rev'))
1797 node = parent == heads[0] and heads[-1] or heads[0]
1804 node = parent == heads[0] and heads[-1] or heads[0]
1798 return hg.merge(repo, node, force=force)
1805 return hg.merge(repo, node, force=force)
1799
1806
1800 def outgoing(ui, repo, dest=None, **opts):
1807 def outgoing(ui, repo, dest=None, **opts):
1801 """show changesets not found in destination
1808 """show changesets not found in destination
1802
1809
1803 Show changesets not found in the specified destination repository or
1810 Show changesets not found in the specified destination repository or
1804 the default push location. These are the changesets that would be pushed
1811 the default push location. These are the changesets that would be pushed
1805 if a push was requested.
1812 if a push was requested.
1806
1813
1807 See pull for valid destination format details.
1814 See pull for valid destination format details.
1808 """
1815 """
1809 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1816 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1810 setremoteconfig(ui, opts)
1817 setremoteconfig(ui, opts)
1811 revs = None
1818 revs = None
1812 if opts['rev']:
1819 if opts['rev']:
1813 revs = [repo.lookup(rev) for rev in opts['rev']]
1820 revs = [repo.lookup(rev) for rev in opts['rev']]
1814
1821
1815 other = hg.repository(ui, dest)
1822 other = hg.repository(ui, dest)
1816 o = repo.findoutgoing(other, force=opts['force'])
1823 o = repo.findoutgoing(other, force=opts['force'])
1817 if not o:
1824 if not o:
1818 ui.status(_("no changes found\n"))
1825 ui.status(_("no changes found\n"))
1819 return 1
1826 return 1
1820 o = repo.changelog.nodesbetween(o, revs)[0]
1827 o = repo.changelog.nodesbetween(o, revs)[0]
1821 if opts['newest_first']:
1828 if opts['newest_first']:
1822 o.reverse()
1829 o.reverse()
1823 displayer = cmdutil.show_changeset(ui, repo, opts)
1830 displayer = cmdutil.show_changeset(ui, repo, opts)
1824 for n in o:
1831 for n in o:
1825 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1832 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1826 if opts['no_merges'] and len(parents) == 2:
1833 if opts['no_merges'] and len(parents) == 2:
1827 continue
1834 continue
1828 displayer.show(changenode=n)
1835 displayer.show(changenode=n)
1829
1836
1830 def parents(ui, repo, file_=None, **opts):
1837 def parents(ui, repo, file_=None, **opts):
1831 """show the parents of the working dir or revision
1838 """show the parents of the working dir or revision
1832
1839
1833 Print the working directory's parent revisions.
1840 Print the working directory's parent revisions.
1834 """
1841 """
1835 rev = opts.get('rev')
1842 rev = opts.get('rev')
1836 if rev:
1843 if rev:
1837 if file_:
1844 if file_:
1838 ctx = repo.filectx(file_, changeid=rev)
1845 ctx = repo.filectx(file_, changeid=rev)
1839 else:
1846 else:
1840 ctx = repo.changectx(rev)
1847 ctx = repo.changectx(rev)
1841 p = [cp.node() for cp in ctx.parents()]
1848 p = [cp.node() for cp in ctx.parents()]
1842 else:
1849 else:
1843 p = repo.dirstate.parents()
1850 p = repo.dirstate.parents()
1844
1851
1845 displayer = cmdutil.show_changeset(ui, repo, opts)
1852 displayer = cmdutil.show_changeset(ui, repo, opts)
1846 for n in p:
1853 for n in p:
1847 if n != nullid:
1854 if n != nullid:
1848 displayer.show(changenode=n)
1855 displayer.show(changenode=n)
1849
1856
1850 def paths(ui, repo, search=None):
1857 def paths(ui, repo, search=None):
1851 """show definition of symbolic path names
1858 """show definition of symbolic path names
1852
1859
1853 Show definition of symbolic path name NAME. If no name is given, show
1860 Show definition of symbolic path name NAME. If no name is given, show
1854 definition of available names.
1861 definition of available names.
1855
1862
1856 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1863 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1857 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1864 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1858 """
1865 """
1859 if search:
1866 if search:
1860 for name, path in ui.configitems("paths"):
1867 for name, path in ui.configitems("paths"):
1861 if name == search:
1868 if name == search:
1862 ui.write("%s\n" % path)
1869 ui.write("%s\n" % path)
1863 return
1870 return
1864 ui.warn(_("not found!\n"))
1871 ui.warn(_("not found!\n"))
1865 return 1
1872 return 1
1866 else:
1873 else:
1867 for name, path in ui.configitems("paths"):
1874 for name, path in ui.configitems("paths"):
1868 ui.write("%s = %s\n" % (name, path))
1875 ui.write("%s = %s\n" % (name, path))
1869
1876
1870 def postincoming(ui, repo, modheads, optupdate):
1877 def postincoming(ui, repo, modheads, optupdate):
1871 if modheads == 0:
1878 if modheads == 0:
1872 return
1879 return
1873 if optupdate:
1880 if optupdate:
1874 if modheads == 1:
1881 if modheads == 1:
1875 return hg.update(repo, repo.changelog.tip()) # update
1882 return hg.update(repo, repo.changelog.tip()) # update
1876 else:
1883 else:
1877 ui.status(_("not updating, since new heads added\n"))
1884 ui.status(_("not updating, since new heads added\n"))
1878 if modheads > 1:
1885 if modheads > 1:
1879 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1886 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1880 else:
1887 else:
1881 ui.status(_("(run 'hg update' to get a working copy)\n"))
1888 ui.status(_("(run 'hg update' to get a working copy)\n"))
1882
1889
1883 def pull(ui, repo, source="default", **opts):
1890 def pull(ui, repo, source="default", **opts):
1884 """pull changes from the specified source
1891 """pull changes from the specified source
1885
1892
1886 Pull changes from a remote repository to a local one.
1893 Pull changes from a remote repository to a local one.
1887
1894
1888 This finds all changes from the repository at the specified path
1895 This finds all changes from the repository at the specified path
1889 or URL and adds them to the local repository. By default, this
1896 or URL and adds them to the local repository. By default, this
1890 does not update the copy of the project in the working directory.
1897 does not update the copy of the project in the working directory.
1891
1898
1892 Valid URLs are of the form:
1899 Valid URLs are of the form:
1893
1900
1894 local/filesystem/path (or file://local/filesystem/path)
1901 local/filesystem/path (or file://local/filesystem/path)
1895 http://[user@]host[:port]/[path]
1902 http://[user@]host[:port]/[path]
1896 https://[user@]host[:port]/[path]
1903 https://[user@]host[:port]/[path]
1897 ssh://[user@]host[:port]/[path]
1904 ssh://[user@]host[:port]/[path]
1898 static-http://host[:port]/[path]
1905 static-http://host[:port]/[path]
1899
1906
1900 Paths in the local filesystem can either point to Mercurial
1907 Paths in the local filesystem can either point to Mercurial
1901 repositories or to bundle files (as created by 'hg bundle' or
1908 repositories or to bundle files (as created by 'hg bundle' or
1902 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1909 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1903 allows access to a Mercurial repository where you simply use a web
1910 allows access to a Mercurial repository where you simply use a web
1904 server to publish the .hg directory as static content.
1911 server to publish the .hg directory as static content.
1905
1912
1906 Some notes about using SSH with Mercurial:
1913 Some notes about using SSH with Mercurial:
1907 - SSH requires an accessible shell account on the destination machine
1914 - SSH requires an accessible shell account on the destination machine
1908 and a copy of hg in the remote path or specified with as remotecmd.
1915 and a copy of hg in the remote path or specified with as remotecmd.
1909 - path is relative to the remote user's home directory by default.
1916 - path is relative to the remote user's home directory by default.
1910 Use an extra slash at the start of a path to specify an absolute path:
1917 Use an extra slash at the start of a path to specify an absolute path:
1911 ssh://example.com//tmp/repository
1918 ssh://example.com//tmp/repository
1912 - Mercurial doesn't use its own compression via SSH; the right thing
1919 - Mercurial doesn't use its own compression via SSH; the right thing
1913 to do is to configure it in your ~/.ssh/config, e.g.:
1920 to do is to configure it in your ~/.ssh/config, e.g.:
1914 Host *.mylocalnetwork.example.com
1921 Host *.mylocalnetwork.example.com
1915 Compression no
1922 Compression no
1916 Host *
1923 Host *
1917 Compression yes
1924 Compression yes
1918 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1925 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1919 with the --ssh command line option.
1926 with the --ssh command line option.
1920 """
1927 """
1921 source = ui.expandpath(source)
1928 source = ui.expandpath(source)
1922 setremoteconfig(ui, opts)
1929 setremoteconfig(ui, opts)
1923
1930
1924 other = hg.repository(ui, source)
1931 other = hg.repository(ui, source)
1925 ui.status(_('pulling from %s\n') % (source))
1932 ui.status(_('pulling from %s\n') % (source))
1926 revs = None
1933 revs = None
1927 if opts['rev']:
1934 if opts['rev']:
1928 if 'lookup' in other.capabilities:
1935 if 'lookup' in other.capabilities:
1929 revs = [other.lookup(rev) for rev in opts['rev']]
1936 revs = [other.lookup(rev) for rev in opts['rev']]
1930 else:
1937 else:
1931 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1938 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1932 raise util.Abort(error)
1939 raise util.Abort(error)
1933 modheads = repo.pull(other, heads=revs, force=opts['force'])
1940 modheads = repo.pull(other, heads=revs, force=opts['force'])
1934 return postincoming(ui, repo, modheads, opts['update'])
1941 return postincoming(ui, repo, modheads, opts['update'])
1935
1942
1936 def push(ui, repo, dest=None, **opts):
1943 def push(ui, repo, dest=None, **opts):
1937 """push changes to the specified destination
1944 """push changes to the specified destination
1938
1945
1939 Push changes from the local repository to the given destination.
1946 Push changes from the local repository to the given destination.
1940
1947
1941 This is the symmetrical operation for pull. It helps to move
1948 This is the symmetrical operation for pull. It helps to move
1942 changes from the current repository to a different one. If the
1949 changes from the current repository to a different one. If the
1943 destination is local this is identical to a pull in that directory
1950 destination is local this is identical to a pull in that directory
1944 from the current one.
1951 from the current one.
1945
1952
1946 By default, push will refuse to run if it detects the result would
1953 By default, push will refuse to run if it detects the result would
1947 increase the number of remote heads. This generally indicates the
1954 increase the number of remote heads. This generally indicates the
1948 the client has forgotten to sync and merge before pushing.
1955 the client has forgotten to sync and merge before pushing.
1949
1956
1950 Valid URLs are of the form:
1957 Valid URLs are of the form:
1951
1958
1952 local/filesystem/path (or file://local/filesystem/path)
1959 local/filesystem/path (or file://local/filesystem/path)
1953 ssh://[user@]host[:port]/[path]
1960 ssh://[user@]host[:port]/[path]
1954 http://[user@]host[:port]/[path]
1961 http://[user@]host[:port]/[path]
1955 https://[user@]host[:port]/[path]
1962 https://[user@]host[:port]/[path]
1956
1963
1957 Look at the help text for the pull command for important details
1964 Look at the help text for the pull command for important details
1958 about ssh:// URLs.
1965 about ssh:// URLs.
1959
1966
1960 Pushing to http:// and https:// URLs is only possible, if this
1967 Pushing to http:// and https:// URLs is only possible, if this
1961 feature is explicitly enabled on the remote Mercurial server.
1968 feature is explicitly enabled on the remote Mercurial server.
1962 """
1969 """
1963 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1970 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1964 setremoteconfig(ui, opts)
1971 setremoteconfig(ui, opts)
1965
1972
1966 other = hg.repository(ui, dest)
1973 other = hg.repository(ui, dest)
1967 ui.status('pushing to %s\n' % (dest))
1974 ui.status('pushing to %s\n' % (dest))
1968 revs = None
1975 revs = None
1969 if opts['rev']:
1976 if opts['rev']:
1970 revs = [repo.lookup(rev) for rev in opts['rev']]
1977 revs = [repo.lookup(rev) for rev in opts['rev']]
1971 r = repo.push(other, opts['force'], revs=revs)
1978 r = repo.push(other, opts['force'], revs=revs)
1972 return r == 0
1979 return r == 0
1973
1980
1974 def rawcommit(ui, repo, *pats, **opts):
1981 def rawcommit(ui, repo, *pats, **opts):
1975 """raw commit interface (DEPRECATED)
1982 """raw commit interface (DEPRECATED)
1976
1983
1977 (DEPRECATED)
1984 (DEPRECATED)
1978 Lowlevel commit, for use in helper scripts.
1985 Lowlevel commit, for use in helper scripts.
1979
1986
1980 This command is not intended to be used by normal users, as it is
1987 This command is not intended to be used by normal users, as it is
1981 primarily useful for importing from other SCMs.
1988 primarily useful for importing from other SCMs.
1982
1989
1983 This command is now deprecated and will be removed in a future
1990 This command is now deprecated and will be removed in a future
1984 release, please use debugsetparents and commit instead.
1991 release, please use debugsetparents and commit instead.
1985 """
1992 """
1986
1993
1987 ui.warn(_("(the rawcommit command is deprecated)\n"))
1994 ui.warn(_("(the rawcommit command is deprecated)\n"))
1988
1995
1989 message = logmessage(opts)
1996 message = logmessage(opts)
1990
1997
1991 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1998 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1992 if opts['files']:
1999 if opts['files']:
1993 files += open(opts['files']).read().splitlines()
2000 files += open(opts['files']).read().splitlines()
1994
2001
1995 parents = [repo.lookup(p) for p in opts['parent']]
2002 parents = [repo.lookup(p) for p in opts['parent']]
1996
2003
1997 try:
2004 try:
1998 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2005 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1999 except ValueError, inst:
2006 except ValueError, inst:
2000 raise util.Abort(str(inst))
2007 raise util.Abort(str(inst))
2001
2008
2002 def recover(ui, repo):
2009 def recover(ui, repo):
2003 """roll back an interrupted transaction
2010 """roll back an interrupted transaction
2004
2011
2005 Recover from an interrupted commit or pull.
2012 Recover from an interrupted commit or pull.
2006
2013
2007 This command tries to fix the repository status after an interrupted
2014 This command tries to fix the repository status after an interrupted
2008 operation. It should only be necessary when Mercurial suggests it.
2015 operation. It should only be necessary when Mercurial suggests it.
2009 """
2016 """
2010 if repo.recover():
2017 if repo.recover():
2011 return hg.verify(repo)
2018 return hg.verify(repo)
2012 return 1
2019 return 1
2013
2020
2014 def remove(ui, repo, *pats, **opts):
2021 def remove(ui, repo, *pats, **opts):
2015 """remove the specified files on the next commit
2022 """remove the specified files on the next commit
2016
2023
2017 Schedule the indicated files for removal from the repository.
2024 Schedule the indicated files for removal from the repository.
2018
2025
2019 This only removes files from the current branch, not from the
2026 This only removes files from the current branch, not from the
2020 entire project history. If the files still exist in the working
2027 entire project history. If the files still exist in the working
2021 directory, they will be deleted from it. If invoked with --after,
2028 directory, they will be deleted from it. If invoked with --after,
2022 files that have been manually deleted are marked as removed.
2029 files that have been manually deleted are marked as removed.
2023
2030
2024 This command schedules the files to be removed at the next commit.
2031 This command schedules the files to be removed at the next commit.
2025 To undo a remove before that, see hg revert.
2032 To undo a remove before that, see hg revert.
2026
2033
2027 Modified files and added files are not removed by default. To
2034 Modified files and added files are not removed by default. To
2028 remove them, use the -f/--force option.
2035 remove them, use the -f/--force option.
2029 """
2036 """
2030 names = []
2037 names = []
2031 if not opts['after'] and not pats:
2038 if not opts['after'] and not pats:
2032 raise util.Abort(_('no files specified'))
2039 raise util.Abort(_('no files specified'))
2033 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2040 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2034 exact = dict.fromkeys(files)
2041 exact = dict.fromkeys(files)
2035 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2042 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2036 modified, added, removed, deleted, unknown = mardu
2043 modified, added, removed, deleted, unknown = mardu
2037 remove, forget = [], []
2044 remove, forget = [], []
2038 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2045 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2039 reason = None
2046 reason = None
2040 if abs not in deleted and opts['after']:
2047 if abs not in deleted and opts['after']:
2041 reason = _('is still present')
2048 reason = _('is still present')
2042 elif abs in modified and not opts['force']:
2049 elif abs in modified and not opts['force']:
2043 reason = _('is modified (use -f to force removal)')
2050 reason = _('is modified (use -f to force removal)')
2044 elif abs in added:
2051 elif abs in added:
2045 if opts['force']:
2052 if opts['force']:
2046 forget.append(abs)
2053 forget.append(abs)
2047 continue
2054 continue
2048 reason = _('has been marked for add (use -f to force removal)')
2055 reason = _('has been marked for add (use -f to force removal)')
2049 elif abs in unknown:
2056 elif abs in unknown:
2050 reason = _('is not managed')
2057 reason = _('is not managed')
2051 elif abs in removed:
2058 elif abs in removed:
2052 continue
2059 continue
2053 if reason:
2060 if reason:
2054 if exact:
2061 if exact:
2055 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2062 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2056 else:
2063 else:
2057 if ui.verbose or not exact:
2064 if ui.verbose or not exact:
2058 ui.status(_('removing %s\n') % rel)
2065 ui.status(_('removing %s\n') % rel)
2059 remove.append(abs)
2066 remove.append(abs)
2060 repo.forget(forget)
2067 repo.forget(forget)
2061 repo.remove(remove, unlink=not opts['after'])
2068 repo.remove(remove, unlink=not opts['after'])
2062
2069
2063 def rename(ui, repo, *pats, **opts):
2070 def rename(ui, repo, *pats, **opts):
2064 """rename files; equivalent of copy + remove
2071 """rename files; equivalent of copy + remove
2065
2072
2066 Mark dest as copies of sources; mark sources for deletion. If
2073 Mark dest as copies of sources; mark sources for deletion. If
2067 dest is a directory, copies are put in that directory. If dest is
2074 dest is a directory, copies are put in that directory. If dest is
2068 a file, there can only be one source.
2075 a file, there can only be one source.
2069
2076
2070 By default, this command copies the contents of files as they
2077 By default, this command copies the contents of files as they
2071 stand in the working directory. If invoked with --after, the
2078 stand in the working directory. If invoked with --after, the
2072 operation is recorded, but no copying is performed.
2079 operation is recorded, but no copying is performed.
2073
2080
2074 This command takes effect in the next commit. To undo a rename
2081 This command takes effect in the next commit. To undo a rename
2075 before that, see hg revert.
2082 before that, see hg revert.
2076 """
2083 """
2077 wlock = repo.wlock(0)
2084 wlock = repo.wlock(0)
2078 errs, copied = docopy(ui, repo, pats, opts, wlock)
2085 errs, copied = docopy(ui, repo, pats, opts, wlock)
2079 names = []
2086 names = []
2080 for abs, rel, exact in copied:
2087 for abs, rel, exact in copied:
2081 if ui.verbose or not exact:
2088 if ui.verbose or not exact:
2082 ui.status(_('removing %s\n') % rel)
2089 ui.status(_('removing %s\n') % rel)
2083 names.append(abs)
2090 names.append(abs)
2084 if not opts.get('dry_run'):
2091 if not opts.get('dry_run'):
2085 repo.remove(names, True, wlock)
2092 repo.remove(names, True, wlock)
2086 return errs
2093 return errs
2087
2094
2088 def revert(ui, repo, *pats, **opts):
2095 def revert(ui, repo, *pats, **opts):
2089 """revert files or dirs to their states as of some revision
2096 """revert files or dirs to their states as of some revision
2090
2097
2091 With no revision specified, revert the named files or directories
2098 With no revision specified, revert the named files or directories
2092 to the contents they had in the parent of the working directory.
2099 to the contents they had in the parent of the working directory.
2093 This restores the contents of the affected files to an unmodified
2100 This restores the contents of the affected files to an unmodified
2094 state and unschedules adds, removes, copies, and renames. If the
2101 state and unschedules adds, removes, copies, and renames. If the
2095 working directory has two parents, you must explicitly specify the
2102 working directory has two parents, you must explicitly specify the
2096 revision to revert to.
2103 revision to revert to.
2097
2104
2098 Modified files are saved with a .orig suffix before reverting.
2105 Modified files are saved with a .orig suffix before reverting.
2099 To disable these backups, use --no-backup.
2106 To disable these backups, use --no-backup.
2100
2107
2101 Using the -r option, revert the given files or directories to their
2108 Using the -r option, revert the given files or directories to their
2102 contents as of a specific revision. This can be helpful to "roll
2109 contents as of a specific revision. This can be helpful to "roll
2103 back" some or all of a change that should not have been committed.
2110 back" some or all of a change that should not have been committed.
2104
2111
2105 Revert modifies the working directory. It does not commit any
2112 Revert modifies the working directory. It does not commit any
2106 changes, or change the parent of the working directory. If you
2113 changes, or change the parent of the working directory. If you
2107 revert to a revision other than the parent of the working
2114 revert to a revision other than the parent of the working
2108 directory, the reverted files will thus appear modified
2115 directory, the reverted files will thus appear modified
2109 afterwards.
2116 afterwards.
2110
2117
2111 If a file has been deleted, it is recreated. If the executable
2118 If a file has been deleted, it is recreated. If the executable
2112 mode of a file was changed, it is reset.
2119 mode of a file was changed, it is reset.
2113
2120
2114 If names are given, all files matching the names are reverted.
2121 If names are given, all files matching the names are reverted.
2115
2122
2116 If no arguments are given, no files are reverted.
2123 If no arguments are given, no files are reverted.
2117 """
2124 """
2118
2125
2119 if opts["date"]:
2126 if opts["date"]:
2120 if opts["rev"]:
2127 if opts["rev"]:
2121 raise util.Abort(_("you can't specify a revision and a date"))
2128 raise util.Abort(_("you can't specify a revision and a date"))
2122 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2129 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2123
2130
2124 if not pats and not opts['all']:
2131 if not pats and not opts['all']:
2125 raise util.Abort(_('no files or directories specified; '
2132 raise util.Abort(_('no files or directories specified; '
2126 'use --all to revert the whole repo'))
2133 'use --all to revert the whole repo'))
2127
2134
2128 parent, p2 = repo.dirstate.parents()
2135 parent, p2 = repo.dirstate.parents()
2129 if not opts['rev'] and p2 != nullid:
2136 if not opts['rev'] and p2 != nullid:
2130 raise util.Abort(_('uncommitted merge - please provide a '
2137 raise util.Abort(_('uncommitted merge - please provide a '
2131 'specific revision'))
2138 'specific revision'))
2132 ctx = repo.changectx(opts['rev'])
2139 ctx = repo.changectx(opts['rev'])
2133 node = ctx.node()
2140 node = ctx.node()
2134 mf = ctx.manifest()
2141 mf = ctx.manifest()
2135 if node == parent:
2142 if node == parent:
2136 pmf = mf
2143 pmf = mf
2137 else:
2144 else:
2138 pmf = None
2145 pmf = None
2139
2146
2140 wlock = repo.wlock()
2147 wlock = repo.wlock()
2141
2148
2142 # need all matching names in dirstate and manifest of target rev,
2149 # need all matching names in dirstate and manifest of target rev,
2143 # so have to walk both. do not print errors if files exist in one
2150 # so have to walk both. do not print errors if files exist in one
2144 # but not other.
2151 # but not other.
2145
2152
2146 names = {}
2153 names = {}
2147 target_only = {}
2154 target_only = {}
2148
2155
2149 # walk dirstate.
2156 # walk dirstate.
2150
2157
2151 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2158 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2152 badmatch=mf.has_key):
2159 badmatch=mf.has_key):
2153 names[abs] = (rel, exact)
2160 names[abs] = (rel, exact)
2154 if src == 'b':
2161 if src == 'b':
2155 target_only[abs] = True
2162 target_only[abs] = True
2156
2163
2157 # walk target manifest.
2164 # walk target manifest.
2158
2165
2159 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2166 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2160 badmatch=names.has_key):
2167 badmatch=names.has_key):
2161 if abs in names: continue
2168 if abs in names: continue
2162 names[abs] = (rel, exact)
2169 names[abs] = (rel, exact)
2163 target_only[abs] = True
2170 target_only[abs] = True
2164
2171
2165 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2172 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2166 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2173 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2167
2174
2168 revert = ([], _('reverting %s\n'))
2175 revert = ([], _('reverting %s\n'))
2169 add = ([], _('adding %s\n'))
2176 add = ([], _('adding %s\n'))
2170 remove = ([], _('removing %s\n'))
2177 remove = ([], _('removing %s\n'))
2171 forget = ([], _('forgetting %s\n'))
2178 forget = ([], _('forgetting %s\n'))
2172 undelete = ([], _('undeleting %s\n'))
2179 undelete = ([], _('undeleting %s\n'))
2173 update = {}
2180 update = {}
2174
2181
2175 disptable = (
2182 disptable = (
2176 # dispatch table:
2183 # dispatch table:
2177 # file state
2184 # file state
2178 # action if in target manifest
2185 # action if in target manifest
2179 # action if not in target manifest
2186 # action if not in target manifest
2180 # make backup if in target manifest
2187 # make backup if in target manifest
2181 # make backup if not in target manifest
2188 # make backup if not in target manifest
2182 (modified, revert, remove, True, True),
2189 (modified, revert, remove, True, True),
2183 (added, revert, forget, True, False),
2190 (added, revert, forget, True, False),
2184 (removed, undelete, None, False, False),
2191 (removed, undelete, None, False, False),
2185 (deleted, revert, remove, False, False),
2192 (deleted, revert, remove, False, False),
2186 (unknown, add, None, True, False),
2193 (unknown, add, None, True, False),
2187 (target_only, add, None, False, False),
2194 (target_only, add, None, False, False),
2188 )
2195 )
2189
2196
2190 entries = names.items()
2197 entries = names.items()
2191 entries.sort()
2198 entries.sort()
2192
2199
2193 for abs, (rel, exact) in entries:
2200 for abs, (rel, exact) in entries:
2194 mfentry = mf.get(abs)
2201 mfentry = mf.get(abs)
2195 def handle(xlist, dobackup):
2202 def handle(xlist, dobackup):
2196 xlist[0].append(abs)
2203 xlist[0].append(abs)
2197 update[abs] = 1
2204 update[abs] = 1
2198 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2205 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2199 bakname = "%s.orig" % rel
2206 bakname = "%s.orig" % rel
2200 ui.note(_('saving current version of %s as %s\n') %
2207 ui.note(_('saving current version of %s as %s\n') %
2201 (rel, bakname))
2208 (rel, bakname))
2202 if not opts.get('dry_run'):
2209 if not opts.get('dry_run'):
2203 util.copyfile(rel, bakname)
2210 util.copyfile(rel, bakname)
2204 if ui.verbose or not exact:
2211 if ui.verbose or not exact:
2205 ui.status(xlist[1] % rel)
2212 ui.status(xlist[1] % rel)
2206 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2213 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2207 if abs not in table: continue
2214 if abs not in table: continue
2208 # file has changed in dirstate
2215 # file has changed in dirstate
2209 if mfentry:
2216 if mfentry:
2210 handle(hitlist, backuphit)
2217 handle(hitlist, backuphit)
2211 elif misslist is not None:
2218 elif misslist is not None:
2212 handle(misslist, backupmiss)
2219 handle(misslist, backupmiss)
2213 else:
2220 else:
2214 if exact: ui.warn(_('file not managed: %s\n') % rel)
2221 if exact: ui.warn(_('file not managed: %s\n') % rel)
2215 break
2222 break
2216 else:
2223 else:
2217 # file has not changed in dirstate
2224 # file has not changed in dirstate
2218 if node == parent:
2225 if node == parent:
2219 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2226 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2220 continue
2227 continue
2221 if pmf is None:
2228 if pmf is None:
2222 # only need parent manifest in this unlikely case,
2229 # only need parent manifest in this unlikely case,
2223 # so do not read by default
2230 # so do not read by default
2224 pmf = repo.changectx(parent).manifest()
2231 pmf = repo.changectx(parent).manifest()
2225 if abs in pmf:
2232 if abs in pmf:
2226 if mfentry:
2233 if mfentry:
2227 # if version of file is same in parent and target
2234 # if version of file is same in parent and target
2228 # manifests, do nothing
2235 # manifests, do nothing
2229 if pmf[abs] != mfentry:
2236 if pmf[abs] != mfentry:
2230 handle(revert, False)
2237 handle(revert, False)
2231 else:
2238 else:
2232 handle(remove, False)
2239 handle(remove, False)
2233
2240
2234 if not opts.get('dry_run'):
2241 if not opts.get('dry_run'):
2235 repo.dirstate.forget(forget[0])
2242 repo.dirstate.forget(forget[0])
2236 r = hg.revert(repo, node, update.has_key, wlock)
2243 r = hg.revert(repo, node, update.has_key, wlock)
2237 repo.dirstate.update(add[0], 'a')
2244 repo.dirstate.update(add[0], 'a')
2238 repo.dirstate.update(undelete[0], 'n')
2245 repo.dirstate.update(undelete[0], 'n')
2239 repo.dirstate.update(remove[0], 'r')
2246 repo.dirstate.update(remove[0], 'r')
2240 return r
2247 return r
2241
2248
2242 def rollback(ui, repo):
2249 def rollback(ui, repo):
2243 """roll back the last transaction in this repository
2250 """roll back the last transaction in this repository
2244
2251
2245 Roll back the last transaction in this repository, restoring the
2252 Roll back the last transaction in this repository, restoring the
2246 project to its state prior to the transaction.
2253 project to its state prior to the transaction.
2247
2254
2248 Transactions are used to encapsulate the effects of all commands
2255 Transactions are used to encapsulate the effects of all commands
2249 that create new changesets or propagate existing changesets into a
2256 that create new changesets or propagate existing changesets into a
2250 repository. For example, the following commands are transactional,
2257 repository. For example, the following commands are transactional,
2251 and their effects can be rolled back:
2258 and their effects can be rolled back:
2252
2259
2253 commit
2260 commit
2254 import
2261 import
2255 pull
2262 pull
2256 push (with this repository as destination)
2263 push (with this repository as destination)
2257 unbundle
2264 unbundle
2258
2265
2259 This command should be used with care. There is only one level of
2266 This command should be used with care. There is only one level of
2260 rollback, and there is no way to undo a rollback.
2267 rollback, and there is no way to undo a rollback.
2261
2268
2262 This command is not intended for use on public repositories. Once
2269 This command is not intended for use on public repositories. Once
2263 changes are visible for pull by other users, rolling a transaction
2270 changes are visible for pull by other users, rolling a transaction
2264 back locally is ineffective (someone else may already have pulled
2271 back locally is ineffective (someone else may already have pulled
2265 the changes). Furthermore, a race is possible with readers of the
2272 the changes). Furthermore, a race is possible with readers of the
2266 repository; for example an in-progress pull from the repository
2273 repository; for example an in-progress pull from the repository
2267 may fail if a rollback is performed.
2274 may fail if a rollback is performed.
2268 """
2275 """
2269 repo.rollback()
2276 repo.rollback()
2270
2277
2271 def root(ui, repo):
2278 def root(ui, repo):
2272 """print the root (top) of the current working dir
2279 """print the root (top) of the current working dir
2273
2280
2274 Print the root directory of the current repository.
2281 Print the root directory of the current repository.
2275 """
2282 """
2276 ui.write(repo.root + "\n")
2283 ui.write(repo.root + "\n")
2277
2284
2278 def serve(ui, repo, **opts):
2285 def serve(ui, repo, **opts):
2279 """export the repository via HTTP
2286 """export the repository via HTTP
2280
2287
2281 Start a local HTTP repository browser and pull server.
2288 Start a local HTTP repository browser and pull server.
2282
2289
2283 By default, the server logs accesses to stdout and errors to
2290 By default, the server logs accesses to stdout and errors to
2284 stderr. Use the "-A" and "-E" options to log to files.
2291 stderr. Use the "-A" and "-E" options to log to files.
2285 """
2292 """
2286
2293
2287 if opts["stdio"]:
2294 if opts["stdio"]:
2288 if repo is None:
2295 if repo is None:
2289 raise hg.RepoError(_("There is no Mercurial repository here"
2296 raise hg.RepoError(_("There is no Mercurial repository here"
2290 " (.hg not found)"))
2297 " (.hg not found)"))
2291 s = sshserver.sshserver(ui, repo)
2298 s = sshserver.sshserver(ui, repo)
2292 s.serve_forever()
2299 s.serve_forever()
2293
2300
2294 optlist = ("name templates style address port ipv6"
2301 optlist = ("name templates style address port ipv6"
2295 " accesslog errorlog webdir_conf")
2302 " accesslog errorlog webdir_conf")
2296 for o in optlist.split():
2303 for o in optlist.split():
2297 if opts[o]:
2304 if opts[o]:
2298 ui.setconfig("web", o, str(opts[o]))
2305 ui.setconfig("web", o, str(opts[o]))
2299
2306
2300 if repo is None and not ui.config("web", "webdir_conf"):
2307 if repo is None and not ui.config("web", "webdir_conf"):
2301 raise hg.RepoError(_("There is no Mercurial repository here"
2308 raise hg.RepoError(_("There is no Mercurial repository here"
2302 " (.hg not found)"))
2309 " (.hg not found)"))
2303
2310
2304 if opts['daemon'] and not opts['daemon_pipefds']:
2311 if opts['daemon'] and not opts['daemon_pipefds']:
2305 rfd, wfd = os.pipe()
2312 rfd, wfd = os.pipe()
2306 args = sys.argv[:]
2313 args = sys.argv[:]
2307 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2314 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2308 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2315 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2309 args[0], args)
2316 args[0], args)
2310 os.close(wfd)
2317 os.close(wfd)
2311 os.read(rfd, 1)
2318 os.read(rfd, 1)
2312 os._exit(0)
2319 os._exit(0)
2313
2320
2314 httpd = hgweb.server.create_server(ui, repo)
2321 httpd = hgweb.server.create_server(ui, repo)
2315
2322
2316 if ui.verbose:
2323 if ui.verbose:
2317 if httpd.port != 80:
2324 if httpd.port != 80:
2318 ui.status(_('listening at http://%s:%d/\n') %
2325 ui.status(_('listening at http://%s:%d/\n') %
2319 (httpd.addr, httpd.port))
2326 (httpd.addr, httpd.port))
2320 else:
2327 else:
2321 ui.status(_('listening at http://%s/\n') % httpd.addr)
2328 ui.status(_('listening at http://%s/\n') % httpd.addr)
2322
2329
2323 if opts['pid_file']:
2330 if opts['pid_file']:
2324 fp = open(opts['pid_file'], 'w')
2331 fp = open(opts['pid_file'], 'w')
2325 fp.write(str(os.getpid()) + '\n')
2332 fp.write(str(os.getpid()) + '\n')
2326 fp.close()
2333 fp.close()
2327
2334
2328 if opts['daemon_pipefds']:
2335 if opts['daemon_pipefds']:
2329 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2336 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2330 os.close(rfd)
2337 os.close(rfd)
2331 os.write(wfd, 'y')
2338 os.write(wfd, 'y')
2332 os.close(wfd)
2339 os.close(wfd)
2333 sys.stdout.flush()
2340 sys.stdout.flush()
2334 sys.stderr.flush()
2341 sys.stderr.flush()
2335 fd = os.open(util.nulldev, os.O_RDWR)
2342 fd = os.open(util.nulldev, os.O_RDWR)
2336 if fd != 0: os.dup2(fd, 0)
2343 if fd != 0: os.dup2(fd, 0)
2337 if fd != 1: os.dup2(fd, 1)
2344 if fd != 1: os.dup2(fd, 1)
2338 if fd != 2: os.dup2(fd, 2)
2345 if fd != 2: os.dup2(fd, 2)
2339 if fd not in (0, 1, 2): os.close(fd)
2346 if fd not in (0, 1, 2): os.close(fd)
2340
2347
2341 httpd.serve_forever()
2348 httpd.serve_forever()
2342
2349
2343 def status(ui, repo, *pats, **opts):
2350 def status(ui, repo, *pats, **opts):
2344 """show changed files in the working directory
2351 """show changed files in the working directory
2345
2352
2346 Show status of files in the repository. If names are given, only
2353 Show status of files in the repository. If names are given, only
2347 files that match are shown. Files that are clean or ignored, are
2354 files that match are shown. Files that are clean or ignored, are
2348 not listed unless -c (clean), -i (ignored) or -A is given.
2355 not listed unless -c (clean), -i (ignored) or -A is given.
2349
2356
2350 NOTE: status may appear to disagree with diff if permissions have
2357 NOTE: status may appear to disagree with diff if permissions have
2351 changed or a merge has occurred. The standard diff format does not
2358 changed or a merge has occurred. The standard diff format does not
2352 report permission changes and diff only reports changes relative
2359 report permission changes and diff only reports changes relative
2353 to one merge parent.
2360 to one merge parent.
2354
2361
2355 If one revision is given, it is used as the base revision.
2362 If one revision is given, it is used as the base revision.
2356 If two revisions are given, the difference between them is shown.
2363 If two revisions are given, the difference between them is shown.
2357
2364
2358 The codes used to show the status of files are:
2365 The codes used to show the status of files are:
2359 M = modified
2366 M = modified
2360 A = added
2367 A = added
2361 R = removed
2368 R = removed
2362 C = clean
2369 C = clean
2363 ! = deleted, but still tracked
2370 ! = deleted, but still tracked
2364 ? = not tracked
2371 ? = not tracked
2365 I = ignored (not shown by default)
2372 I = ignored (not shown by default)
2366 = the previous added file was copied from here
2373 = the previous added file was copied from here
2367 """
2374 """
2368
2375
2369 all = opts['all']
2376 all = opts['all']
2370 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2377 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2371
2378
2372 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2379 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2373 cwd = (pats and repo.getcwd()) or ''
2380 cwd = (pats and repo.getcwd()) or ''
2374 modified, added, removed, deleted, unknown, ignored, clean = [
2381 modified, added, removed, deleted, unknown, ignored, clean = [
2375 n for n in repo.status(node1=node1, node2=node2, files=files,
2382 n for n in repo.status(node1=node1, node2=node2, files=files,
2376 match=matchfn,
2383 match=matchfn,
2377 list_ignored=all or opts['ignored'],
2384 list_ignored=all or opts['ignored'],
2378 list_clean=all or opts['clean'])]
2385 list_clean=all or opts['clean'])]
2379
2386
2380 changetypes = (('modified', 'M', modified),
2387 changetypes = (('modified', 'M', modified),
2381 ('added', 'A', added),
2388 ('added', 'A', added),
2382 ('removed', 'R', removed),
2389 ('removed', 'R', removed),
2383 ('deleted', '!', deleted),
2390 ('deleted', '!', deleted),
2384 ('unknown', '?', unknown),
2391 ('unknown', '?', unknown),
2385 ('ignored', 'I', ignored))
2392 ('ignored', 'I', ignored))
2386
2393
2387 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2394 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2388
2395
2389 end = opts['print0'] and '\0' or '\n'
2396 end = opts['print0'] and '\0' or '\n'
2390
2397
2391 for opt, char, changes in ([ct for ct in explicit_changetypes
2398 for opt, char, changes in ([ct for ct in explicit_changetypes
2392 if all or opts[ct[0]]]
2399 if all or opts[ct[0]]]
2393 or changetypes):
2400 or changetypes):
2394 if opts['no_status']:
2401 if opts['no_status']:
2395 format = "%%s%s" % end
2402 format = "%%s%s" % end
2396 else:
2403 else:
2397 format = "%s %%s%s" % (char, end)
2404 format = "%s %%s%s" % (char, end)
2398
2405
2399 for f in changes:
2406 for f in changes:
2400 ui.write(format % util.pathto(cwd, f))
2407 ui.write(format % util.pathto(cwd, f))
2401 if ((all or opts.get('copies')) and not opts.get('no_status')):
2408 if ((all or opts.get('copies')) and not opts.get('no_status')):
2402 copied = repo.dirstate.copied(f)
2409 copied = repo.dirstate.copied(f)
2403 if copied:
2410 if copied:
2404 ui.write(' %s%s' % (util.pathto(cwd, copied), end))
2411 ui.write(' %s%s' % (util.pathto(cwd, copied), end))
2405
2412
2406 def tag(ui, repo, name, rev_=None, **opts):
2413 def tag(ui, repo, name, rev_=None, **opts):
2407 """add a tag for the current or given revision
2414 """add a tag for the current or given revision
2408
2415
2409 Name a particular revision using <name>.
2416 Name a particular revision using <name>.
2410
2417
2411 Tags are used to name particular revisions of the repository and are
2418 Tags are used to name particular revisions of the repository and are
2412 very useful to compare different revision, to go back to significant
2419 very useful to compare different revision, to go back to significant
2413 earlier versions or to mark branch points as releases, etc.
2420 earlier versions or to mark branch points as releases, etc.
2414
2421
2415 If no revision is given, the parent of the working directory is used,
2422 If no revision is given, the parent of the working directory is used,
2416 or tip if no revision is checked out.
2423 or tip if no revision is checked out.
2417
2424
2418 To facilitate version control, distribution, and merging of tags,
2425 To facilitate version control, distribution, and merging of tags,
2419 they are stored as a file named ".hgtags" which is managed
2426 they are stored as a file named ".hgtags" which is managed
2420 similarly to other project files and can be hand-edited if
2427 similarly to other project files and can be hand-edited if
2421 necessary. The file '.hg/localtags' is used for local tags (not
2428 necessary. The file '.hg/localtags' is used for local tags (not
2422 shared among repositories).
2429 shared among repositories).
2423 """
2430 """
2424 if name in ['tip', '.', 'null']:
2431 if name in ['tip', '.', 'null']:
2425 raise util.Abort(_("the name '%s' is reserved") % name)
2432 raise util.Abort(_("the name '%s' is reserved") % name)
2426 if rev_ is not None:
2433 if rev_ is not None:
2427 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2434 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2428 "please use 'hg tag [-r REV] NAME' instead\n"))
2435 "please use 'hg tag [-r REV] NAME' instead\n"))
2429 if opts['rev']:
2436 if opts['rev']:
2430 raise util.Abort(_("use only one form to specify the revision"))
2437 raise util.Abort(_("use only one form to specify the revision"))
2431 if opts['rev']:
2438 if opts['rev']:
2432 rev_ = opts['rev']
2439 rev_ = opts['rev']
2433 if not rev_ and repo.dirstate.parents()[1] != nullid:
2440 if not rev_ and repo.dirstate.parents()[1] != nullid:
2434 raise util.Abort(_('uncommitted merge - please provide a '
2441 raise util.Abort(_('uncommitted merge - please provide a '
2435 'specific revision'))
2442 'specific revision'))
2436 r = repo.changectx(rev_).node()
2443 r = repo.changectx(rev_).node()
2437
2444
2438 message = opts['message']
2445 message = opts['message']
2439 if not message:
2446 if not message:
2440 message = _('Added tag %s for changeset %s') % (name, short(r))
2447 message = _('Added tag %s for changeset %s') % (name, short(r))
2441
2448
2442 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2449 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2443
2450
2444 def tags(ui, repo):
2451 def tags(ui, repo):
2445 """list repository tags
2452 """list repository tags
2446
2453
2447 List the repository tags.
2454 List the repository tags.
2448
2455
2449 This lists both regular and local tags.
2456 This lists both regular and local tags.
2450 """
2457 """
2451
2458
2452 l = repo.tagslist()
2459 l = repo.tagslist()
2453 l.reverse()
2460 l.reverse()
2454 hexfunc = ui.debugflag and hex or short
2461 hexfunc = ui.debugflag and hex or short
2455 for t, n in l:
2462 for t, n in l:
2456 try:
2463 try:
2457 hn = hexfunc(n)
2464 hn = hexfunc(n)
2458 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2465 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2459 except revlog.LookupError:
2466 except revlog.LookupError:
2460 r = " ?:%s" % hn
2467 r = " ?:%s" % hn
2461 if ui.quiet:
2468 if ui.quiet:
2462 ui.write("%s\n" % t)
2469 ui.write("%s\n" % t)
2463 else:
2470 else:
2464 t = util.localsub(t, 30)
2471 t = util.localsub(t, 30)
2465 t += " " * (30 - util.locallen(t))
2472 t += " " * (30 - util.locallen(t))
2466 ui.write("%s %s\n" % (t, r))
2473 ui.write("%s %s\n" % (t, r))
2467
2474
2468 def tip(ui, repo, **opts):
2475 def tip(ui, repo, **opts):
2469 """show the tip revision
2476 """show the tip revision
2470
2477
2471 Show the tip revision.
2478 Show the tip revision.
2472 """
2479 """
2473 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2480 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2474
2481
2475 def unbundle(ui, repo, fname, **opts):
2482 def unbundle(ui, repo, fname, **opts):
2476 """apply a changegroup file
2483 """apply a changegroup file
2477
2484
2478 Apply a compressed changegroup file generated by the bundle
2485 Apply a compressed changegroup file generated by the bundle
2479 command.
2486 command.
2480 """
2487 """
2481 gen = changegroup.readbundle(urllib.urlopen(fname), fname)
2488 if os.path.exists(fname):
2489 f = open(fname)
2490 else:
2491 f = urllib.urlopen(fname)
2492 gen = changegroup.readbundle(f, fname)
2482 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2493 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2483 return postincoming(ui, repo, modheads, opts['update'])
2494 return postincoming(ui, repo, modheads, opts['update'])
2484
2495
2485 def update(ui, repo, node=None, clean=False, date=None):
2496 def update(ui, repo, node=None, clean=False, date=None):
2486 """update working directory
2497 """update working directory
2487
2498
2488 Update the working directory to the specified revision.
2499 Update the working directory to the specified revision.
2489
2500
2490 If there are no outstanding changes in the working directory and
2501 If there are no outstanding changes in the working directory and
2491 there is a linear relationship between the current version and the
2502 there is a linear relationship between the current version and the
2492 requested version, the result is the requested version.
2503 requested version, the result is the requested version.
2493
2504
2494 To merge the working directory with another revision, use the
2505 To merge the working directory with another revision, use the
2495 merge command.
2506 merge command.
2496
2507
2497 By default, update will refuse to run if doing so would require
2508 By default, update will refuse to run if doing so would require
2498 discarding local changes.
2509 discarding local changes.
2499 """
2510 """
2500 if date:
2511 if date:
2501 if node:
2512 if node:
2502 raise util.Abort(_("you can't specify a revision and a date"))
2513 raise util.Abort(_("you can't specify a revision and a date"))
2503 node = cmdutil.finddate(ui, repo, date)
2514 node = cmdutil.finddate(ui, repo, date)
2504
2515
2505 if clean:
2516 if clean:
2506 return hg.clean(repo, node)
2517 return hg.clean(repo, node)
2507 else:
2518 else:
2508 return hg.update(repo, node)
2519 return hg.update(repo, node)
2509
2520
2510 def verify(ui, repo):
2521 def verify(ui, repo):
2511 """verify the integrity of the repository
2522 """verify the integrity of the repository
2512
2523
2513 Verify the integrity of the current repository.
2524 Verify the integrity of the current repository.
2514
2525
2515 This will perform an extensive check of the repository's
2526 This will perform an extensive check of the repository's
2516 integrity, validating the hashes and checksums of each entry in
2527 integrity, validating the hashes and checksums of each entry in
2517 the changelog, manifest, and tracked files, as well as the
2528 the changelog, manifest, and tracked files, as well as the
2518 integrity of their crosslinks and indices.
2529 integrity of their crosslinks and indices.
2519 """
2530 """
2520 return hg.verify(repo)
2531 return hg.verify(repo)
2521
2532
2522 def version_(ui):
2533 def version_(ui):
2523 """output version and copyright information"""
2534 """output version and copyright information"""
2524 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2535 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2525 % version.get_version())
2536 % version.get_version())
2526 ui.status(_(
2537 ui.status(_(
2527 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2538 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2528 "This is free software; see the source for copying conditions. "
2539 "This is free software; see the source for copying conditions. "
2529 "There is NO\nwarranty; "
2540 "There is NO\nwarranty; "
2530 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2541 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2531 ))
2542 ))
2532
2543
2533 # Command options and aliases are listed here, alphabetically
2544 # Command options and aliases are listed here, alphabetically
2534
2545
2535 globalopts = [
2546 globalopts = [
2536 ('R', 'repository', '',
2547 ('R', 'repository', '',
2537 _('repository root directory or symbolic path name')),
2548 _('repository root directory or symbolic path name')),
2538 ('', 'cwd', '', _('change working directory')),
2549 ('', 'cwd', '', _('change working directory')),
2539 ('y', 'noninteractive', None,
2550 ('y', 'noninteractive', None,
2540 _('do not prompt, assume \'yes\' for any required answers')),
2551 _('do not prompt, assume \'yes\' for any required answers')),
2541 ('q', 'quiet', None, _('suppress output')),
2552 ('q', 'quiet', None, _('suppress output')),
2542 ('v', 'verbose', None, _('enable additional output')),
2553 ('v', 'verbose', None, _('enable additional output')),
2543 ('', 'config', [], _('set/override config option')),
2554 ('', 'config', [], _('set/override config option')),
2544 ('', 'debug', None, _('enable debugging output')),
2555 ('', 'debug', None, _('enable debugging output')),
2545 ('', 'debugger', None, _('start debugger')),
2556 ('', 'debugger', None, _('start debugger')),
2546 ('', 'encoding', util._encoding, _('set the charset encoding')),
2557 ('', 'encoding', util._encoding, _('set the charset encoding')),
2547 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2558 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2548 ('', 'lsprof', None, _('print improved command execution profile')),
2559 ('', 'lsprof', None, _('print improved command execution profile')),
2549 ('', 'traceback', None, _('print traceback on exception')),
2560 ('', 'traceback', None, _('print traceback on exception')),
2550 ('', 'time', None, _('time how long the command takes')),
2561 ('', 'time', None, _('time how long the command takes')),
2551 ('', 'profile', None, _('print command execution profile')),
2562 ('', 'profile', None, _('print command execution profile')),
2552 ('', 'version', None, _('output version information and exit')),
2563 ('', 'version', None, _('output version information and exit')),
2553 ('h', 'help', None, _('display help and exit')),
2564 ('h', 'help', None, _('display help and exit')),
2554 ]
2565 ]
2555
2566
2556 dryrunopts = [('n', 'dry-run', None,
2567 dryrunopts = [('n', 'dry-run', None,
2557 _('do not perform actions, just print output'))]
2568 _('do not perform actions, just print output'))]
2558
2569
2559 remoteopts = [
2570 remoteopts = [
2560 ('e', 'ssh', '', _('specify ssh command to use')),
2571 ('e', 'ssh', '', _('specify ssh command to use')),
2561 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2572 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2562 ]
2573 ]
2563
2574
2564 walkopts = [
2575 walkopts = [
2565 ('I', 'include', [], _('include names matching the given patterns')),
2576 ('I', 'include', [], _('include names matching the given patterns')),
2566 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2577 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2567 ]
2578 ]
2568
2579
2569 commitopts = [
2580 commitopts = [
2570 ('m', 'message', '', _('use <text> as commit message')),
2581 ('m', 'message', '', _('use <text> as commit message')),
2571 ('l', 'logfile', '', _('read commit message from <file>')),
2582 ('l', 'logfile', '', _('read commit message from <file>')),
2572 ]
2583 ]
2573
2584
2574 table = {
2585 table = {
2575 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2586 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2576 "addremove":
2587 "addremove":
2577 (addremove,
2588 (addremove,
2578 [('s', 'similarity', '',
2589 [('s', 'similarity', '',
2579 _('guess renamed files by similarity (0<=s<=100)')),
2590 _('guess renamed files by similarity (0<=s<=100)')),
2580 ] + walkopts + dryrunopts,
2591 ] + walkopts + dryrunopts,
2581 _('hg addremove [OPTION]... [FILE]...')),
2592 _('hg addremove [OPTION]... [FILE]...')),
2582 "^annotate":
2593 "^annotate":
2583 (annotate,
2594 (annotate,
2584 [('r', 'rev', '', _('annotate the specified revision')),
2595 [('r', 'rev', '', _('annotate the specified revision')),
2585 ('f', 'follow', None, _('follow file copies and renames')),
2596 ('f', 'follow', None, _('follow file copies and renames')),
2586 ('a', 'text', None, _('treat all files as text')),
2597 ('a', 'text', None, _('treat all files as text')),
2587 ('u', 'user', None, _('list the author')),
2598 ('u', 'user', None, _('list the author')),
2588 ('d', 'date', None, _('list the date')),
2599 ('d', 'date', None, _('list the date')),
2589 ('n', 'number', None, _('list the revision number (default)')),
2600 ('n', 'number', None, _('list the revision number (default)')),
2590 ('c', 'changeset', None, _('list the changeset')),
2601 ('c', 'changeset', None, _('list the changeset')),
2591 ] + walkopts,
2602 ] + walkopts,
2592 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2603 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2593 "archive":
2604 "archive":
2594 (archive,
2605 (archive,
2595 [('', 'no-decode', None, _('do not pass files through decoders')),
2606 [('', 'no-decode', None, _('do not pass files through decoders')),
2596 ('p', 'prefix', '', _('directory prefix for files in archive')),
2607 ('p', 'prefix', '', _('directory prefix for files in archive')),
2597 ('r', 'rev', '', _('revision to distribute')),
2608 ('r', 'rev', '', _('revision to distribute')),
2598 ('t', 'type', '', _('type of distribution to create')),
2609 ('t', 'type', '', _('type of distribution to create')),
2599 ] + walkopts,
2610 ] + walkopts,
2600 _('hg archive [OPTION]... DEST')),
2611 _('hg archive [OPTION]... DEST')),
2601 "backout":
2612 "backout":
2602 (backout,
2613 (backout,
2603 [('', 'merge', None,
2614 [('', 'merge', None,
2604 _('merge with old dirstate parent after backout')),
2615 _('merge with old dirstate parent after backout')),
2605 ('d', 'date', '', _('record datecode as commit date')),
2616 ('d', 'date', '', _('record datecode as commit date')),
2606 ('', 'parent', '', _('parent to choose when backing out merge')),
2617 ('', 'parent', '', _('parent to choose when backing out merge')),
2607 ('u', 'user', '', _('record user as committer')),
2618 ('u', 'user', '', _('record user as committer')),
2608 ] + walkopts + commitopts,
2619 ] + walkopts + commitopts,
2609 _('hg backout [OPTION]... REV')),
2620 _('hg backout [OPTION]... REV')),
2610 "branch": (branch, [], _('hg branch [NAME]')),
2621 "branch": (branch, [], _('hg branch [NAME]')),
2611 "branches": (branches, [], _('hg branches')),
2622 "branches": (branches, [], _('hg branches')),
2612 "bundle":
2623 "bundle":
2613 (bundle,
2624 (bundle,
2614 [('f', 'force', None,
2625 [('f', 'force', None,
2615 _('run even when remote repository is unrelated')),
2626 _('run even when remote repository is unrelated')),
2616 ('r', 'rev', [],
2627 ('r', 'rev', [],
2617 _('a changeset you would like to bundle')),
2628 _('a changeset you would like to bundle')),
2618 ('', 'base', [],
2629 ('', 'base', [],
2619 _('a base changeset to specify instead of a destination')),
2630 _('a base changeset to specify instead of a destination')),
2620 ] + remoteopts,
2631 ] + remoteopts,
2621 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2632 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2622 "cat":
2633 "cat":
2623 (cat,
2634 (cat,
2624 [('o', 'output', '', _('print output to file with formatted name')),
2635 [('o', 'output', '', _('print output to file with formatted name')),
2625 ('r', 'rev', '', _('print the given revision')),
2636 ('r', 'rev', '', _('print the given revision')),
2626 ] + walkopts,
2637 ] + walkopts,
2627 _('hg cat [OPTION]... FILE...')),
2638 _('hg cat [OPTION]... FILE...')),
2628 "^clone":
2639 "^clone":
2629 (clone,
2640 (clone,
2630 [('U', 'noupdate', None, _('do not update the new working directory')),
2641 [('U', 'noupdate', None, _('do not update the new working directory')),
2631 ('r', 'rev', [],
2642 ('r', 'rev', [],
2632 _('a changeset you would like to have after cloning')),
2643 _('a changeset you would like to have after cloning')),
2633 ('', 'pull', None, _('use pull protocol to copy metadata')),
2644 ('', 'pull', None, _('use pull protocol to copy metadata')),
2634 ('', 'uncompressed', None,
2645 ('', 'uncompressed', None,
2635 _('use uncompressed transfer (fast over LAN)')),
2646 _('use uncompressed transfer (fast over LAN)')),
2636 ] + remoteopts,
2647 ] + remoteopts,
2637 _('hg clone [OPTION]... SOURCE [DEST]')),
2648 _('hg clone [OPTION]... SOURCE [DEST]')),
2638 "^commit|ci":
2649 "^commit|ci":
2639 (commit,
2650 (commit,
2640 [('A', 'addremove', None,
2651 [('A', 'addremove', None,
2641 _('mark new/missing files as added/removed before committing')),
2652 _('mark new/missing files as added/removed before committing')),
2642 ('d', 'date', '', _('record datecode as commit date')),
2653 ('d', 'date', '', _('record datecode as commit date')),
2643 ('u', 'user', '', _('record user as commiter')),
2654 ('u', 'user', '', _('record user as commiter')),
2644 ] + walkopts + commitopts,
2655 ] + walkopts + commitopts,
2645 _('hg commit [OPTION]... [FILE]...')),
2656 _('hg commit [OPTION]... [FILE]...')),
2646 "copy|cp":
2657 "copy|cp":
2647 (copy,
2658 (copy,
2648 [('A', 'after', None, _('record a copy that has already occurred')),
2659 [('A', 'after', None, _('record a copy that has already occurred')),
2649 ('f', 'force', None,
2660 ('f', 'force', None,
2650 _('forcibly copy over an existing managed file')),
2661 _('forcibly copy over an existing managed file')),
2651 ] + walkopts + dryrunopts,
2662 ] + walkopts + dryrunopts,
2652 _('hg copy [OPTION]... [SOURCE]... DEST')),
2663 _('hg copy [OPTION]... [SOURCE]... DEST')),
2653 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2664 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2654 "debugcomplete":
2665 "debugcomplete":
2655 (debugcomplete,
2666 (debugcomplete,
2656 [('o', 'options', None, _('show the command options'))],
2667 [('o', 'options', None, _('show the command options'))],
2657 _('debugcomplete [-o] CMD')),
2668 _('debugcomplete [-o] CMD')),
2658 "debuginstall": (debuginstall, [], _('debuginstall')),
2669 "debuginstall": (debuginstall, [], _('debuginstall')),
2659 "debugrebuildstate":
2670 "debugrebuildstate":
2660 (debugrebuildstate,
2671 (debugrebuildstate,
2661 [('r', 'rev', '', _('revision to rebuild to'))],
2672 [('r', 'rev', '', _('revision to rebuild to'))],
2662 _('debugrebuildstate [-r REV] [REV]')),
2673 _('debugrebuildstate [-r REV] [REV]')),
2663 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2674 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2664 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2675 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2665 "debugstate": (debugstate, [], _('debugstate')),
2676 "debugstate": (debugstate, [], _('debugstate')),
2666 "debugdate":
2677 "debugdate":
2667 (debugdate,
2678 (debugdate,
2668 [('e', 'extended', None, _('try extended date formats'))],
2679 [('e', 'extended', None, _('try extended date formats'))],
2669 _('debugdate [-e] DATE [RANGE]')),
2680 _('debugdate [-e] DATE [RANGE]')),
2670 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2681 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2671 "debugindex": (debugindex, [], _('debugindex FILE')),
2682 "debugindex": (debugindex, [], _('debugindex FILE')),
2672 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2683 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2673 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2684 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2674 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2685 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2675 "^diff":
2686 "^diff":
2676 (diff,
2687 (diff,
2677 [('r', 'rev', [], _('revision')),
2688 [('r', 'rev', [], _('revision')),
2678 ('a', 'text', None, _('treat all files as text')),
2689 ('a', 'text', None, _('treat all files as text')),
2679 ('p', 'show-function', None,
2690 ('p', 'show-function', None,
2680 _('show which function each change is in')),
2691 _('show which function each change is in')),
2681 ('g', 'git', None, _('use git extended diff format')),
2692 ('g', 'git', None, _('use git extended diff format')),
2682 ('', 'nodates', None, _("don't include dates in diff headers")),
2693 ('', 'nodates', None, _("don't include dates in diff headers")),
2683 ('w', 'ignore-all-space', None,
2694 ('w', 'ignore-all-space', None,
2684 _('ignore white space when comparing lines')),
2695 _('ignore white space when comparing lines')),
2685 ('b', 'ignore-space-change', None,
2696 ('b', 'ignore-space-change', None,
2686 _('ignore changes in the amount of white space')),
2697 _('ignore changes in the amount of white space')),
2687 ('B', 'ignore-blank-lines', None,
2698 ('B', 'ignore-blank-lines', None,
2688 _('ignore changes whose lines are all blank')),
2699 _('ignore changes whose lines are all blank')),
2689 ] + walkopts,
2700 ] + walkopts,
2690 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2701 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2691 "^export":
2702 "^export":
2692 (export,
2703 (export,
2693 [('o', 'output', '', _('print output to file with formatted name')),
2704 [('o', 'output', '', _('print output to file with formatted name')),
2694 ('a', 'text', None, _('treat all files as text')),
2705 ('a', 'text', None, _('treat all files as text')),
2695 ('g', 'git', None, _('use git extended diff format')),
2706 ('g', 'git', None, _('use git extended diff format')),
2696 ('', 'nodates', None, _("don't include dates in diff headers")),
2707 ('', 'nodates', None, _("don't include dates in diff headers")),
2697 ('', 'switch-parent', None, _('diff against the second parent'))],
2708 ('', 'switch-parent', None, _('diff against the second parent'))],
2698 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2709 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2699 "grep":
2710 "grep":
2700 (grep,
2711 (grep,
2701 [('0', 'print0', None, _('end fields with NUL')),
2712 [('0', 'print0', None, _('end fields with NUL')),
2702 ('', 'all', None, _('print all revisions that match')),
2713 ('', 'all', None, _('print all revisions that match')),
2703 ('f', 'follow', None,
2714 ('f', 'follow', None,
2704 _('follow changeset history, or file history across copies and renames')),
2715 _('follow changeset history, or file history across copies and renames')),
2705 ('i', 'ignore-case', None, _('ignore case when matching')),
2716 ('i', 'ignore-case', None, _('ignore case when matching')),
2706 ('l', 'files-with-matches', None,
2717 ('l', 'files-with-matches', None,
2707 _('print only filenames and revs that match')),
2718 _('print only filenames and revs that match')),
2708 ('n', 'line-number', None, _('print matching line numbers')),
2719 ('n', 'line-number', None, _('print matching line numbers')),
2709 ('r', 'rev', [], _('search in given revision range')),
2720 ('r', 'rev', [], _('search in given revision range')),
2710 ('u', 'user', None, _('print user who committed change')),
2721 ('u', 'user', None, _('print user who committed change')),
2711 ] + walkopts,
2722 ] + walkopts,
2712 _('hg grep [OPTION]... PATTERN [FILE]...')),
2723 _('hg grep [OPTION]... PATTERN [FILE]...')),
2713 "heads":
2724 "heads":
2714 (heads,
2725 (heads,
2715 [('', 'style', '', _('display using template map file')),
2726 [('', 'style', '', _('display using template map file')),
2716 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2727 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2717 ('', 'template', '', _('display with template'))],
2728 ('', 'template', '', _('display with template'))],
2718 _('hg heads [-r REV]')),
2729 _('hg heads [-r REV]')),
2719 "help": (help_, [], _('hg help [COMMAND]')),
2730 "help": (help_, [], _('hg help [COMMAND]')),
2720 "identify|id": (identify, [], _('hg identify')),
2731 "identify|id": (identify, [], _('hg identify')),
2721 "import|patch":
2732 "import|patch":
2722 (import_,
2733 (import_,
2723 [('p', 'strip', 1,
2734 [('p', 'strip', 1,
2724 _('directory strip option for patch. This has the same\n'
2735 _('directory strip option for patch. This has the same\n'
2725 'meaning as the corresponding patch option')),
2736 'meaning as the corresponding patch option')),
2726 ('b', 'base', '', _('base path')),
2737 ('b', 'base', '', _('base path')),
2727 ('f', 'force', None,
2738 ('f', 'force', None,
2728 _('skip check for outstanding uncommitted changes'))] + commitopts,
2739 _('skip check for outstanding uncommitted changes'))] + commitopts,
2729 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2740 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2730 "incoming|in": (incoming,
2741 "incoming|in": (incoming,
2731 [('M', 'no-merges', None, _('do not show merges')),
2742 [('M', 'no-merges', None, _('do not show merges')),
2732 ('f', 'force', None,
2743 ('f', 'force', None,
2733 _('run even when remote repository is unrelated')),
2744 _('run even when remote repository is unrelated')),
2734 ('', 'style', '', _('display using template map file')),
2745 ('', 'style', '', _('display using template map file')),
2735 ('n', 'newest-first', None, _('show newest record first')),
2746 ('n', 'newest-first', None, _('show newest record first')),
2736 ('', 'bundle', '', _('file to store the bundles into')),
2747 ('', 'bundle', '', _('file to store the bundles into')),
2737 ('p', 'patch', None, _('show patch')),
2748 ('p', 'patch', None, _('show patch')),
2738 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2749 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2739 ('', 'template', '', _('display with template')),
2750 ('', 'template', '', _('display with template')),
2740 ] + remoteopts,
2751 ] + remoteopts,
2741 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2752 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2742 ' [--bundle FILENAME] [SOURCE]')),
2753 ' [--bundle FILENAME] [SOURCE]')),
2743 "^init":
2754 "^init":
2744 (init,
2755 (init,
2745 remoteopts,
2756 remoteopts,
2746 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2757 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2747 "locate":
2758 "locate":
2748 (locate,
2759 (locate,
2749 [('r', 'rev', '', _('search the repository as it stood at rev')),
2760 [('r', 'rev', '', _('search the repository as it stood at rev')),
2750 ('0', 'print0', None,
2761 ('0', 'print0', None,
2751 _('end filenames with NUL, for use with xargs')),
2762 _('end filenames with NUL, for use with xargs')),
2752 ('f', 'fullpath', None,
2763 ('f', 'fullpath', None,
2753 _('print complete paths from the filesystem root')),
2764 _('print complete paths from the filesystem root')),
2754 ] + walkopts,
2765 ] + walkopts,
2755 _('hg locate [OPTION]... [PATTERN]...')),
2766 _('hg locate [OPTION]... [PATTERN]...')),
2756 "^log|history":
2767 "^log|history":
2757 (log,
2768 (log,
2758 [('f', 'follow', None,
2769 [('f', 'follow', None,
2759 _('follow changeset history, or file history across copies and renames')),
2770 _('follow changeset history, or file history across copies and renames')),
2760 ('', 'follow-first', None,
2771 ('', 'follow-first', None,
2761 _('only follow the first parent of merge changesets')),
2772 _('only follow the first parent of merge changesets')),
2762 ('d', 'date', '', _('show revs matching date spec')),
2773 ('d', 'date', '', _('show revs matching date spec')),
2763 ('C', 'copies', None, _('show copied files')),
2774 ('C', 'copies', None, _('show copied files')),
2764 ('k', 'keyword', [], _('search for a keyword')),
2775 ('k', 'keyword', [], _('search for a keyword')),
2765 ('l', 'limit', '', _('limit number of changes displayed')),
2776 ('l', 'limit', '', _('limit number of changes displayed')),
2766 ('r', 'rev', [], _('show the specified revision or range')),
2777 ('r', 'rev', [], _('show the specified revision or range')),
2767 ('', 'removed', None, _('include revs where files were removed')),
2778 ('', 'removed', None, _('include revs where files were removed')),
2768 ('M', 'no-merges', None, _('do not show merges')),
2779 ('M', 'no-merges', None, _('do not show merges')),
2769 ('', 'style', '', _('display using template map file')),
2780 ('', 'style', '', _('display using template map file')),
2770 ('m', 'only-merges', None, _('show only merges')),
2781 ('m', 'only-merges', None, _('show only merges')),
2771 ('p', 'patch', None, _('show patch')),
2782 ('p', 'patch', None, _('show patch')),
2772 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2783 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2773 ('', 'template', '', _('display with template')),
2784 ('', 'template', '', _('display with template')),
2774 ] + walkopts,
2785 ] + walkopts,
2775 _('hg log [OPTION]... [FILE]')),
2786 _('hg log [OPTION]... [FILE]')),
2776 "manifest": (manifest, [], _('hg manifest [REV]')),
2787 "manifest": (manifest, [], _('hg manifest [REV]')),
2777 "^merge":
2788 "^merge":
2778 (merge,
2789 (merge,
2779 [('f', 'force', None, _('force a merge with outstanding changes'))],
2790 [('f', 'force', None, _('force a merge with outstanding changes'))],
2780 _('hg merge [-f] [REV]')),
2791 _('hg merge [-f] [REV]')),
2781 "outgoing|out": (outgoing,
2792 "outgoing|out": (outgoing,
2782 [('M', 'no-merges', None, _('do not show merges')),
2793 [('M', 'no-merges', None, _('do not show merges')),
2783 ('f', 'force', None,
2794 ('f', 'force', None,
2784 _('run even when remote repository is unrelated')),
2795 _('run even when remote repository is unrelated')),
2785 ('p', 'patch', None, _('show patch')),
2796 ('p', 'patch', None, _('show patch')),
2786 ('', 'style', '', _('display using template map file')),
2797 ('', 'style', '', _('display using template map file')),
2787 ('r', 'rev', [], _('a specific revision you would like to push')),
2798 ('r', 'rev', [], _('a specific revision you would like to push')),
2788 ('n', 'newest-first', None, _('show newest record first')),
2799 ('n', 'newest-first', None, _('show newest record first')),
2789 ('', 'template', '', _('display with template')),
2800 ('', 'template', '', _('display with template')),
2790 ] + remoteopts,
2801 ] + remoteopts,
2791 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2802 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2792 "^parents":
2803 "^parents":
2793 (parents,
2804 (parents,
2794 [('r', 'rev', '', _('show parents from the specified rev')),
2805 [('r', 'rev', '', _('show parents from the specified rev')),
2795 ('', 'style', '', _('display using template map file')),
2806 ('', 'style', '', _('display using template map file')),
2796 ('', 'template', '', _('display with template'))],
2807 ('', 'template', '', _('display with template'))],
2797 _('hg parents [-r REV] [FILE]')),
2808 _('hg parents [-r REV] [FILE]')),
2798 "paths": (paths, [], _('hg paths [NAME]')),
2809 "paths": (paths, [], _('hg paths [NAME]')),
2799 "^pull":
2810 "^pull":
2800 (pull,
2811 (pull,
2801 [('u', 'update', None,
2812 [('u', 'update', None,
2802 _('update to new tip if changesets were pulled')),
2813 _('update to new tip if changesets were pulled')),
2803 ('f', 'force', None,
2814 ('f', 'force', None,
2804 _('run even when remote repository is unrelated')),
2815 _('run even when remote repository is unrelated')),
2805 ('r', 'rev', [],
2816 ('r', 'rev', [],
2806 _('a specific revision up to which you would like to pull')),
2817 _('a specific revision up to which you would like to pull')),
2807 ] + remoteopts,
2818 ] + remoteopts,
2808 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2819 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2809 "^push":
2820 "^push":
2810 (push,
2821 (push,
2811 [('f', 'force', None, _('force push')),
2822 [('f', 'force', None, _('force push')),
2812 ('r', 'rev', [], _('a specific revision you would like to push')),
2823 ('r', 'rev', [], _('a specific revision you would like to push')),
2813 ] + remoteopts,
2824 ] + remoteopts,
2814 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2825 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2815 "debugrawcommit|rawcommit":
2826 "debugrawcommit|rawcommit":
2816 (rawcommit,
2827 (rawcommit,
2817 [('p', 'parent', [], _('parent')),
2828 [('p', 'parent', [], _('parent')),
2818 ('d', 'date', '', _('date code')),
2829 ('d', 'date', '', _('date code')),
2819 ('u', 'user', '', _('user')),
2830 ('u', 'user', '', _('user')),
2820 ('F', 'files', '', _('file list'))
2831 ('F', 'files', '', _('file list'))
2821 ] + commitopts,
2832 ] + commitopts,
2822 _('hg debugrawcommit [OPTION]... [FILE]...')),
2833 _('hg debugrawcommit [OPTION]... [FILE]...')),
2823 "recover": (recover, [], _('hg recover')),
2834 "recover": (recover, [], _('hg recover')),
2824 "^remove|rm":
2835 "^remove|rm":
2825 (remove,
2836 (remove,
2826 [('A', 'after', None, _('record remove that has already occurred')),
2837 [('A', 'after', None, _('record remove that has already occurred')),
2827 ('f', 'force', None, _('remove file even if modified')),
2838 ('f', 'force', None, _('remove file even if modified')),
2828 ] + walkopts,
2839 ] + walkopts,
2829 _('hg remove [OPTION]... FILE...')),
2840 _('hg remove [OPTION]... FILE...')),
2830 "rename|mv":
2841 "rename|mv":
2831 (rename,
2842 (rename,
2832 [('A', 'after', None, _('record a rename that has already occurred')),
2843 [('A', 'after', None, _('record a rename that has already occurred')),
2833 ('f', 'force', None,
2844 ('f', 'force', None,
2834 _('forcibly copy over an existing managed file')),
2845 _('forcibly copy over an existing managed file')),
2835 ] + walkopts + dryrunopts,
2846 ] + walkopts + dryrunopts,
2836 _('hg rename [OPTION]... SOURCE... DEST')),
2847 _('hg rename [OPTION]... SOURCE... DEST')),
2837 "^revert":
2848 "^revert":
2838 (revert,
2849 (revert,
2839 [('a', 'all', None, _('revert all changes when no arguments given')),
2850 [('a', 'all', None, _('revert all changes when no arguments given')),
2840 ('d', 'date', '', _('tipmost revision matching date')),
2851 ('d', 'date', '', _('tipmost revision matching date')),
2841 ('r', 'rev', '', _('revision to revert to')),
2852 ('r', 'rev', '', _('revision to revert to')),
2842 ('', 'no-backup', None, _('do not save backup copies of files')),
2853 ('', 'no-backup', None, _('do not save backup copies of files')),
2843 ] + walkopts + dryrunopts,
2854 ] + walkopts + dryrunopts,
2844 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2855 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2845 "rollback": (rollback, [], _('hg rollback')),
2856 "rollback": (rollback, [], _('hg rollback')),
2846 "root": (root, [], _('hg root')),
2857 "root": (root, [], _('hg root')),
2847 "showconfig|debugconfig":
2858 "showconfig|debugconfig":
2848 (showconfig,
2859 (showconfig,
2849 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2860 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2850 _('showconfig [-u] [NAME]...')),
2861 _('showconfig [-u] [NAME]...')),
2851 "^serve":
2862 "^serve":
2852 (serve,
2863 (serve,
2853 [('A', 'accesslog', '', _('name of access log file to write to')),
2864 [('A', 'accesslog', '', _('name of access log file to write to')),
2854 ('d', 'daemon', None, _('run server in background')),
2865 ('d', 'daemon', None, _('run server in background')),
2855 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2866 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2856 ('E', 'errorlog', '', _('name of error log file to write to')),
2867 ('E', 'errorlog', '', _('name of error log file to write to')),
2857 ('p', 'port', 0, _('port to use (default: 8000)')),
2868 ('p', 'port', 0, _('port to use (default: 8000)')),
2858 ('a', 'address', '', _('address to use')),
2869 ('a', 'address', '', _('address to use')),
2859 ('n', 'name', '',
2870 ('n', 'name', '',
2860 _('name to show in web pages (default: working dir)')),
2871 _('name to show in web pages (default: working dir)')),
2861 ('', 'webdir-conf', '', _('name of the webdir config file'
2872 ('', 'webdir-conf', '', _('name of the webdir config file'
2862 ' (serve more than one repo)')),
2873 ' (serve more than one repo)')),
2863 ('', 'pid-file', '', _('name of file to write process ID to')),
2874 ('', 'pid-file', '', _('name of file to write process ID to')),
2864 ('', 'stdio', None, _('for remote clients')),
2875 ('', 'stdio', None, _('for remote clients')),
2865 ('t', 'templates', '', _('web templates to use')),
2876 ('t', 'templates', '', _('web templates to use')),
2866 ('', 'style', '', _('template style to use')),
2877 ('', 'style', '', _('template style to use')),
2867 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2878 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2868 _('hg serve [OPTION]...')),
2879 _('hg serve [OPTION]...')),
2869 "^status|st":
2880 "^status|st":
2870 (status,
2881 (status,
2871 [('A', 'all', None, _('show status of all files')),
2882 [('A', 'all', None, _('show status of all files')),
2872 ('m', 'modified', None, _('show only modified files')),
2883 ('m', 'modified', None, _('show only modified files')),
2873 ('a', 'added', None, _('show only added files')),
2884 ('a', 'added', None, _('show only added files')),
2874 ('r', 'removed', None, _('show only removed files')),
2885 ('r', 'removed', None, _('show only removed files')),
2875 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2886 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2876 ('c', 'clean', None, _('show only files without changes')),
2887 ('c', 'clean', None, _('show only files without changes')),
2877 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2888 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2878 ('i', 'ignored', None, _('show ignored files')),
2889 ('i', 'ignored', None, _('show ignored files')),
2879 ('n', 'no-status', None, _('hide status prefix')),
2890 ('n', 'no-status', None, _('hide status prefix')),
2880 ('C', 'copies', None, _('show source of copied files')),
2891 ('C', 'copies', None, _('show source of copied files')),
2881 ('0', 'print0', None,
2892 ('0', 'print0', None,
2882 _('end filenames with NUL, for use with xargs')),
2893 _('end filenames with NUL, for use with xargs')),
2883 ('', 'rev', [], _('show difference from revision')),
2894 ('', 'rev', [], _('show difference from revision')),
2884 ] + walkopts,
2895 ] + walkopts,
2885 _('hg status [OPTION]... [FILE]...')),
2896 _('hg status [OPTION]... [FILE]...')),
2886 "tag":
2897 "tag":
2887 (tag,
2898 (tag,
2888 [('l', 'local', None, _('make the tag local')),
2899 [('l', 'local', None, _('make the tag local')),
2889 ('m', 'message', '', _('message for tag commit log entry')),
2900 ('m', 'message', '', _('message for tag commit log entry')),
2890 ('d', 'date', '', _('record datecode as commit date')),
2901 ('d', 'date', '', _('record datecode as commit date')),
2891 ('u', 'user', '', _('record user as commiter')),
2902 ('u', 'user', '', _('record user as commiter')),
2892 ('r', 'rev', '', _('revision to tag'))],
2903 ('r', 'rev', '', _('revision to tag'))],
2893 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2904 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2894 "tags": (tags, [], _('hg tags')),
2905 "tags": (tags, [], _('hg tags')),
2895 "tip":
2906 "tip":
2896 (tip,
2907 (tip,
2897 [('', 'style', '', _('display using template map file')),
2908 [('', 'style', '', _('display using template map file')),
2898 ('p', 'patch', None, _('show patch')),
2909 ('p', 'patch', None, _('show patch')),
2899 ('', 'template', '', _('display with template'))],
2910 ('', 'template', '', _('display with template'))],
2900 _('hg tip [-p]')),
2911 _('hg tip [-p]')),
2901 "unbundle":
2912 "unbundle":
2902 (unbundle,
2913 (unbundle,
2903 [('u', 'update', None,
2914 [('u', 'update', None,
2904 _('update to new tip if changesets were unbundled'))],
2915 _('update to new tip if changesets were unbundled'))],
2905 _('hg unbundle [-u] FILE')),
2916 _('hg unbundle [-u] FILE')),
2906 "^update|up|checkout|co":
2917 "^update|up|checkout|co":
2907 (update,
2918 (update,
2908 [('C', 'clean', None, _('overwrite locally modified files')),
2919 [('C', 'clean', None, _('overwrite locally modified files')),
2909 ('d', 'date', '', _('tipmost revision matching date'))],
2920 ('d', 'date', '', _('tipmost revision matching date'))],
2910 _('hg update [-C] [-d DATE] [REV]')),
2921 _('hg update [-C] [-d DATE] [REV]')),
2911 "verify": (verify, [], _('hg verify')),
2922 "verify": (verify, [], _('hg verify')),
2912 "version": (version_, [], _('hg version')),
2923 "version": (version_, [], _('hg version')),
2913 }
2924 }
2914
2925
2915 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2926 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2916 " debugindex debugindexdot debugdate debuginstall")
2927 " debugindex debugindexdot debugdate debuginstall")
2917 optionalrepo = ("paths serve showconfig")
2928 optionalrepo = ("paths serve showconfig")
2918
2929
2919 def findpossible(ui, cmd):
2930 def findpossible(ui, cmd):
2920 """
2931 """
2921 Return cmd -> (aliases, command table entry)
2932 Return cmd -> (aliases, command table entry)
2922 for each matching command.
2933 for each matching command.
2923 Return debug commands (or their aliases) only if no normal command matches.
2934 Return debug commands (or their aliases) only if no normal command matches.
2924 """
2935 """
2925 choice = {}
2936 choice = {}
2926 debugchoice = {}
2937 debugchoice = {}
2927 for e in table.keys():
2938 for e in table.keys():
2928 aliases = e.lstrip("^").split("|")
2939 aliases = e.lstrip("^").split("|")
2929 found = None
2940 found = None
2930 if cmd in aliases:
2941 if cmd in aliases:
2931 found = cmd
2942 found = cmd
2932 elif not ui.config("ui", "strict"):
2943 elif not ui.config("ui", "strict"):
2933 for a in aliases:
2944 for a in aliases:
2934 if a.startswith(cmd):
2945 if a.startswith(cmd):
2935 found = a
2946 found = a
2936 break
2947 break
2937 if found is not None:
2948 if found is not None:
2938 if aliases[0].startswith("debug") or found.startswith("debug"):
2949 if aliases[0].startswith("debug") or found.startswith("debug"):
2939 debugchoice[found] = (aliases, table[e])
2950 debugchoice[found] = (aliases, table[e])
2940 else:
2951 else:
2941 choice[found] = (aliases, table[e])
2952 choice[found] = (aliases, table[e])
2942
2953
2943 if not choice and debugchoice:
2954 if not choice and debugchoice:
2944 choice = debugchoice
2955 choice = debugchoice
2945
2956
2946 return choice
2957 return choice
2947
2958
2948 def findcmd(ui, cmd):
2959 def findcmd(ui, cmd):
2949 """Return (aliases, command table entry) for command string."""
2960 """Return (aliases, command table entry) for command string."""
2950 choice = findpossible(ui, cmd)
2961 choice = findpossible(ui, cmd)
2951
2962
2952 if choice.has_key(cmd):
2963 if choice.has_key(cmd):
2953 return choice[cmd]
2964 return choice[cmd]
2954
2965
2955 if len(choice) > 1:
2966 if len(choice) > 1:
2956 clist = choice.keys()
2967 clist = choice.keys()
2957 clist.sort()
2968 clist.sort()
2958 raise AmbiguousCommand(cmd, clist)
2969 raise AmbiguousCommand(cmd, clist)
2959
2970
2960 if choice:
2971 if choice:
2961 return choice.values()[0]
2972 return choice.values()[0]
2962
2973
2963 raise UnknownCommand(cmd)
2974 raise UnknownCommand(cmd)
2964
2975
2965 def catchterm(*args):
2976 def catchterm(*args):
2966 raise util.SignalInterrupt
2977 raise util.SignalInterrupt
2967
2978
2968 def run():
2979 def run():
2969 sys.exit(dispatch(sys.argv[1:]))
2980 sys.exit(dispatch(sys.argv[1:]))
2970
2981
2971 class ParseError(Exception):
2982 class ParseError(Exception):
2972 """Exception raised on errors in parsing the command line."""
2983 """Exception raised on errors in parsing the command line."""
2973
2984
2974 def parse(ui, args):
2985 def parse(ui, args):
2975 options = {}
2986 options = {}
2976 cmdoptions = {}
2987 cmdoptions = {}
2977
2988
2978 try:
2989 try:
2979 args = fancyopts.fancyopts(args, globalopts, options)
2990 args = fancyopts.fancyopts(args, globalopts, options)
2980 except fancyopts.getopt.GetoptError, inst:
2991 except fancyopts.getopt.GetoptError, inst:
2981 raise ParseError(None, inst)
2992 raise ParseError(None, inst)
2982
2993
2983 if args:
2994 if args:
2984 cmd, args = args[0], args[1:]
2995 cmd, args = args[0], args[1:]
2985 aliases, i = findcmd(ui, cmd)
2996 aliases, i = findcmd(ui, cmd)
2986 cmd = aliases[0]
2997 cmd = aliases[0]
2987 defaults = ui.config("defaults", cmd)
2998 defaults = ui.config("defaults", cmd)
2988 if defaults:
2999 if defaults:
2989 args = shlex.split(defaults) + args
3000 args = shlex.split(defaults) + args
2990 c = list(i[1])
3001 c = list(i[1])
2991 else:
3002 else:
2992 cmd = None
3003 cmd = None
2993 c = []
3004 c = []
2994
3005
2995 # combine global options into local
3006 # combine global options into local
2996 for o in globalopts:
3007 for o in globalopts:
2997 c.append((o[0], o[1], options[o[1]], o[3]))
3008 c.append((o[0], o[1], options[o[1]], o[3]))
2998
3009
2999 try:
3010 try:
3000 args = fancyopts.fancyopts(args, c, cmdoptions)
3011 args = fancyopts.fancyopts(args, c, cmdoptions)
3001 except fancyopts.getopt.GetoptError, inst:
3012 except fancyopts.getopt.GetoptError, inst:
3002 raise ParseError(cmd, inst)
3013 raise ParseError(cmd, inst)
3003
3014
3004 # separate global options back out
3015 # separate global options back out
3005 for o in globalopts:
3016 for o in globalopts:
3006 n = o[1]
3017 n = o[1]
3007 options[n] = cmdoptions[n]
3018 options[n] = cmdoptions[n]
3008 del cmdoptions[n]
3019 del cmdoptions[n]
3009
3020
3010 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3021 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3011
3022
3012 external = {}
3023 external = {}
3013
3024
3014 def findext(name):
3025 def findext(name):
3015 '''return module with given extension name'''
3026 '''return module with given extension name'''
3016 try:
3027 try:
3017 return sys.modules[external[name]]
3028 return sys.modules[external[name]]
3018 except KeyError:
3029 except KeyError:
3019 for k, v in external.iteritems():
3030 for k, v in external.iteritems():
3020 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3031 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3021 return sys.modules[v]
3032 return sys.modules[v]
3022 raise KeyError(name)
3033 raise KeyError(name)
3023
3034
3024 def load_extensions(ui):
3035 def load_extensions(ui):
3025 added = []
3036 added = []
3026 for ext_name, load_from_name in ui.extensions():
3037 for ext_name, load_from_name in ui.extensions():
3027 if ext_name in external:
3038 if ext_name in external:
3028 continue
3039 continue
3029 try:
3040 try:
3030 if load_from_name:
3041 if load_from_name:
3031 # the module will be loaded in sys.modules
3042 # the module will be loaded in sys.modules
3032 # choose an unique name so that it doesn't
3043 # choose an unique name so that it doesn't
3033 # conflicts with other modules
3044 # conflicts with other modules
3034 module_name = "hgext_%s" % ext_name.replace('.', '_')
3045 module_name = "hgext_%s" % ext_name.replace('.', '_')
3035 mod = imp.load_source(module_name, load_from_name)
3046 mod = imp.load_source(module_name, load_from_name)
3036 else:
3047 else:
3037 def importh(name):
3048 def importh(name):
3038 mod = __import__(name)
3049 mod = __import__(name)
3039 components = name.split('.')
3050 components = name.split('.')
3040 for comp in components[1:]:
3051 for comp in components[1:]:
3041 mod = getattr(mod, comp)
3052 mod = getattr(mod, comp)
3042 return mod
3053 return mod
3043 try:
3054 try:
3044 mod = importh("hgext.%s" % ext_name)
3055 mod = importh("hgext.%s" % ext_name)
3045 except ImportError:
3056 except ImportError:
3046 mod = importh(ext_name)
3057 mod = importh(ext_name)
3047 external[ext_name] = mod.__name__
3058 external[ext_name] = mod.__name__
3048 added.append((mod, ext_name))
3059 added.append((mod, ext_name))
3049 except (util.SignalInterrupt, KeyboardInterrupt):
3060 except (util.SignalInterrupt, KeyboardInterrupt):
3050 raise
3061 raise
3051 except Exception, inst:
3062 except Exception, inst:
3052 ui.warn(_("*** failed to import extension %s: %s\n") %
3063 ui.warn(_("*** failed to import extension %s: %s\n") %
3053 (ext_name, inst))
3064 (ext_name, inst))
3054 if ui.print_exc():
3065 if ui.print_exc():
3055 return 1
3066 return 1
3056
3067
3057 for mod, name in added:
3068 for mod, name in added:
3058 uisetup = getattr(mod, 'uisetup', None)
3069 uisetup = getattr(mod, 'uisetup', None)
3059 if uisetup:
3070 if uisetup:
3060 uisetup(ui)
3071 uisetup(ui)
3061 cmdtable = getattr(mod, 'cmdtable', {})
3072 cmdtable = getattr(mod, 'cmdtable', {})
3062 overrides = [cmd for cmd in cmdtable if cmd in table]
3073 overrides = [cmd for cmd in cmdtable if cmd in table]
3063 if overrides:
3074 if overrides:
3064 ui.warn(_("extension '%s' overrides commands: %s\n")
3075 ui.warn(_("extension '%s' overrides commands: %s\n")
3065 % (name, " ".join(overrides)))
3076 % (name, " ".join(overrides)))
3066 table.update(cmdtable)
3077 table.update(cmdtable)
3067
3078
3068 def parseconfig(config):
3079 def parseconfig(config):
3069 """parse the --config options from the command line"""
3080 """parse the --config options from the command line"""
3070 parsed = []
3081 parsed = []
3071 for cfg in config:
3082 for cfg in config:
3072 try:
3083 try:
3073 name, value = cfg.split('=', 1)
3084 name, value = cfg.split('=', 1)
3074 section, name = name.split('.', 1)
3085 section, name = name.split('.', 1)
3075 if not section or not name:
3086 if not section or not name:
3076 raise IndexError
3087 raise IndexError
3077 parsed.append((section, name, value))
3088 parsed.append((section, name, value))
3078 except (IndexError, ValueError):
3089 except (IndexError, ValueError):
3079 raise util.Abort(_('malformed --config option: %s') % cfg)
3090 raise util.Abort(_('malformed --config option: %s') % cfg)
3080 return parsed
3091 return parsed
3081
3092
3082 def dispatch(args):
3093 def dispatch(args):
3083 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3094 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3084 num = getattr(signal, name, None)
3095 num = getattr(signal, name, None)
3085 if num: signal.signal(num, catchterm)
3096 if num: signal.signal(num, catchterm)
3086
3097
3087 try:
3098 try:
3088 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3099 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3089 except util.Abort, inst:
3100 except util.Abort, inst:
3090 sys.stderr.write(_("abort: %s\n") % inst)
3101 sys.stderr.write(_("abort: %s\n") % inst)
3091 return -1
3102 return -1
3092
3103
3093 load_extensions(u)
3104 load_extensions(u)
3094 u.addreadhook(load_extensions)
3105 u.addreadhook(load_extensions)
3095
3106
3096 try:
3107 try:
3097 cmd, func, args, options, cmdoptions = parse(u, args)
3108 cmd, func, args, options, cmdoptions = parse(u, args)
3098 if options["encoding"]:
3109 if options["encoding"]:
3099 util._encoding = options["encoding"]
3110 util._encoding = options["encoding"]
3100 if options["encodingmode"]:
3111 if options["encodingmode"]:
3101 util._encodingmode = options["encodingmode"]
3112 util._encodingmode = options["encodingmode"]
3102 if options["time"]:
3113 if options["time"]:
3103 def get_times():
3114 def get_times():
3104 t = os.times()
3115 t = os.times()
3105 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3116 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3106 t = (t[0], t[1], t[2], t[3], time.clock())
3117 t = (t[0], t[1], t[2], t[3], time.clock())
3107 return t
3118 return t
3108 s = get_times()
3119 s = get_times()
3109 def print_time():
3120 def print_time():
3110 t = get_times()
3121 t = get_times()
3111 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3122 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3112 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3123 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3113 atexit.register(print_time)
3124 atexit.register(print_time)
3114
3125
3115 # enter the debugger before command execution
3126 # enter the debugger before command execution
3116 if options['debugger']:
3127 if options['debugger']:
3117 pdb.set_trace()
3128 pdb.set_trace()
3118
3129
3119 try:
3130 try:
3120 if options['cwd']:
3131 if options['cwd']:
3121 os.chdir(options['cwd'])
3132 os.chdir(options['cwd'])
3122
3133
3123 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3134 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3124 not options["noninteractive"], options["traceback"],
3135 not options["noninteractive"], options["traceback"],
3125 parseconfig(options["config"]))
3136 parseconfig(options["config"]))
3126
3137
3127 path = u.expandpath(options["repository"]) or ""
3138 path = u.expandpath(options["repository"]) or ""
3128 repo = path and hg.repository(u, path=path) or None
3139 repo = path and hg.repository(u, path=path) or None
3129 if repo and not repo.local():
3140 if repo and not repo.local():
3130 raise util.Abort(_("repository '%s' is not local") % path)
3141 raise util.Abort(_("repository '%s' is not local") % path)
3131
3142
3132 if options['help']:
3143 if options['help']:
3133 return help_(u, cmd, options['version'])
3144 return help_(u, cmd, options['version'])
3134 elif options['version']:
3145 elif options['version']:
3135 return version_(u)
3146 return version_(u)
3136 elif not cmd:
3147 elif not cmd:
3137 return help_(u, 'shortlist')
3148 return help_(u, 'shortlist')
3138
3149
3139 if cmd not in norepo.split():
3150 if cmd not in norepo.split():
3140 try:
3151 try:
3141 if not repo:
3152 if not repo:
3142 repo = hg.repository(u, path=path)
3153 repo = hg.repository(u, path=path)
3143 u = repo.ui
3154 u = repo.ui
3144 for name in external.itervalues():
3155 for name in external.itervalues():
3145 mod = sys.modules[name]
3156 mod = sys.modules[name]
3146 if hasattr(mod, 'reposetup'):
3157 if hasattr(mod, 'reposetup'):
3147 mod.reposetup(u, repo)
3158 mod.reposetup(u, repo)
3148 hg.repo_setup_hooks.append(mod.reposetup)
3159 hg.repo_setup_hooks.append(mod.reposetup)
3149 except hg.RepoError:
3160 except hg.RepoError:
3150 if cmd not in optionalrepo.split():
3161 if cmd not in optionalrepo.split():
3151 raise
3162 raise
3152 d = lambda: func(u, repo, *args, **cmdoptions)
3163 d = lambda: func(u, repo, *args, **cmdoptions)
3153 else:
3164 else:
3154 d = lambda: func(u, *args, **cmdoptions)
3165 d = lambda: func(u, *args, **cmdoptions)
3155
3166
3156 try:
3167 try:
3157 if options['profile']:
3168 if options['profile']:
3158 import hotshot, hotshot.stats
3169 import hotshot, hotshot.stats
3159 prof = hotshot.Profile("hg.prof")
3170 prof = hotshot.Profile("hg.prof")
3160 try:
3171 try:
3161 try:
3172 try:
3162 return prof.runcall(d)
3173 return prof.runcall(d)
3163 except:
3174 except:
3164 try:
3175 try:
3165 u.warn(_('exception raised - generating '
3176 u.warn(_('exception raised - generating '
3166 'profile anyway\n'))
3177 'profile anyway\n'))
3167 except:
3178 except:
3168 pass
3179 pass
3169 raise
3180 raise
3170 finally:
3181 finally:
3171 prof.close()
3182 prof.close()
3172 stats = hotshot.stats.load("hg.prof")
3183 stats = hotshot.stats.load("hg.prof")
3173 stats.strip_dirs()
3184 stats.strip_dirs()
3174 stats.sort_stats('time', 'calls')
3185 stats.sort_stats('time', 'calls')
3175 stats.print_stats(40)
3186 stats.print_stats(40)
3176 elif options['lsprof']:
3187 elif options['lsprof']:
3177 try:
3188 try:
3178 from mercurial import lsprof
3189 from mercurial import lsprof
3179 except ImportError:
3190 except ImportError:
3180 raise util.Abort(_(
3191 raise util.Abort(_(
3181 'lsprof not available - install from '
3192 'lsprof not available - install from '
3182 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3193 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3183 p = lsprof.Profiler()
3194 p = lsprof.Profiler()
3184 p.enable(subcalls=True)
3195 p.enable(subcalls=True)
3185 try:
3196 try:
3186 return d()
3197 return d()
3187 finally:
3198 finally:
3188 p.disable()
3199 p.disable()
3189 stats = lsprof.Stats(p.getstats())
3200 stats = lsprof.Stats(p.getstats())
3190 stats.sort()
3201 stats.sort()
3191 stats.pprint(top=10, file=sys.stderr, climit=5)
3202 stats.pprint(top=10, file=sys.stderr, climit=5)
3192 else:
3203 else:
3193 return d()
3204 return d()
3194 finally:
3205 finally:
3195 u.flush()
3206 u.flush()
3196 except:
3207 except:
3197 # enter the debugger when we hit an exception
3208 # enter the debugger when we hit an exception
3198 if options['debugger']:
3209 if options['debugger']:
3199 pdb.post_mortem(sys.exc_info()[2])
3210 pdb.post_mortem(sys.exc_info()[2])
3200 u.print_exc()
3211 u.print_exc()
3201 raise
3212 raise
3202 except ParseError, inst:
3213 except ParseError, inst:
3203 if inst.args[0]:
3214 if inst.args[0]:
3204 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3215 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3205 help_(u, inst.args[0])
3216 help_(u, inst.args[0])
3206 else:
3217 else:
3207 u.warn(_("hg: %s\n") % inst.args[1])
3218 u.warn(_("hg: %s\n") % inst.args[1])
3208 help_(u, 'shortlist')
3219 help_(u, 'shortlist')
3209 except AmbiguousCommand, inst:
3220 except AmbiguousCommand, inst:
3210 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3221 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3211 (inst.args[0], " ".join(inst.args[1])))
3222 (inst.args[0], " ".join(inst.args[1])))
3212 except UnknownCommand, inst:
3223 except UnknownCommand, inst:
3213 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3224 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3214 help_(u, 'shortlist')
3225 help_(u, 'shortlist')
3215 except hg.RepoError, inst:
3226 except hg.RepoError, inst:
3216 u.warn(_("abort: %s!\n") % inst)
3227 u.warn(_("abort: %s!\n") % inst)
3217 except lock.LockHeld, inst:
3228 except lock.LockHeld, inst:
3218 if inst.errno == errno.ETIMEDOUT:
3229 if inst.errno == errno.ETIMEDOUT:
3219 reason = _('timed out waiting for lock held by %s') % inst.locker
3230 reason = _('timed out waiting for lock held by %s') % inst.locker
3220 else:
3231 else:
3221 reason = _('lock held by %s') % inst.locker
3232 reason = _('lock held by %s') % inst.locker
3222 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3233 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3223 except lock.LockUnavailable, inst:
3234 except lock.LockUnavailable, inst:
3224 u.warn(_("abort: could not lock %s: %s\n") %
3235 u.warn(_("abort: could not lock %s: %s\n") %
3225 (inst.desc or inst.filename, inst.strerror))
3236 (inst.desc or inst.filename, inst.strerror))
3226 except revlog.RevlogError, inst:
3237 except revlog.RevlogError, inst:
3227 u.warn(_("abort: %s!\n") % inst)
3238 u.warn(_("abort: %s!\n") % inst)
3228 except util.SignalInterrupt:
3239 except util.SignalInterrupt:
3229 u.warn(_("killed!\n"))
3240 u.warn(_("killed!\n"))
3230 except KeyboardInterrupt:
3241 except KeyboardInterrupt:
3231 try:
3242 try:
3232 u.warn(_("interrupted!\n"))
3243 u.warn(_("interrupted!\n"))
3233 except IOError, inst:
3244 except IOError, inst:
3234 if inst.errno == errno.EPIPE:
3245 if inst.errno == errno.EPIPE:
3235 if u.debugflag:
3246 if u.debugflag:
3236 u.warn(_("\nbroken pipe\n"))
3247 u.warn(_("\nbroken pipe\n"))
3237 else:
3248 else:
3238 raise
3249 raise
3239 except socket.error, inst:
3250 except socket.error, inst:
3240 u.warn(_("abort: %s\n") % inst[1])
3251 u.warn(_("abort: %s\n") % inst[1])
3241 except IOError, inst:
3252 except IOError, inst:
3242 if hasattr(inst, "code"):
3253 if hasattr(inst, "code"):
3243 u.warn(_("abort: %s\n") % inst)
3254 u.warn(_("abort: %s\n") % inst)
3244 elif hasattr(inst, "reason"):
3255 elif hasattr(inst, "reason"):
3245 try: # usually it is in the form (errno, strerror)
3256 try: # usually it is in the form (errno, strerror)
3246 reason = inst.reason.args[1]
3257 reason = inst.reason.args[1]
3247 except: # it might be anything, for example a string
3258 except: # it might be anything, for example a string
3248 reason = inst.reason
3259 reason = inst.reason
3249 u.warn(_("abort: error: %s\n") % reason)
3260 u.warn(_("abort: error: %s\n") % reason)
3250 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3261 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3251 if u.debugflag:
3262 if u.debugflag:
3252 u.warn(_("broken pipe\n"))
3263 u.warn(_("broken pipe\n"))
3253 elif getattr(inst, "strerror", None):
3264 elif getattr(inst, "strerror", None):
3254 if getattr(inst, "filename", None):
3265 if getattr(inst, "filename", None):
3255 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3266 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3256 else:
3267 else:
3257 u.warn(_("abort: %s\n") % inst.strerror)
3268 u.warn(_("abort: %s\n") % inst.strerror)
3258 else:
3269 else:
3259 raise
3270 raise
3260 except OSError, inst:
3271 except OSError, inst:
3261 if getattr(inst, "filename", None):
3272 if getattr(inst, "filename", None):
3262 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3273 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3263 else:
3274 else:
3264 u.warn(_("abort: %s\n") % inst.strerror)
3275 u.warn(_("abort: %s\n") % inst.strerror)
3265 except util.UnexpectedOutput, inst:
3276 except util.UnexpectedOutput, inst:
3266 u.warn(_("abort: %s") % inst[0])
3277 u.warn(_("abort: %s") % inst[0])
3267 if not isinstance(inst[1], basestring):
3278 if not isinstance(inst[1], basestring):
3268 u.warn(" %r\n" % (inst[1],))
3279 u.warn(" %r\n" % (inst[1],))
3269 elif not inst[1]:
3280 elif not inst[1]:
3270 u.warn(_(" empty string\n"))
3281 u.warn(_(" empty string\n"))
3271 else:
3282 else:
3272 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3283 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3273 except util.Abort, inst:
3284 except util.Abort, inst:
3274 u.warn(_("abort: %s\n") % inst)
3285 u.warn(_("abort: %s\n") % inst)
3275 except TypeError, inst:
3286 except TypeError, inst:
3276 # was this an argument error?
3287 # was this an argument error?
3277 tb = traceback.extract_tb(sys.exc_info()[2])
3288 tb = traceback.extract_tb(sys.exc_info()[2])
3278 if len(tb) > 2: # no
3289 if len(tb) > 2: # no
3279 raise
3290 raise
3280 u.debug(inst, "\n")
3291 u.debug(inst, "\n")
3281 u.warn(_("%s: invalid arguments\n") % cmd)
3292 u.warn(_("%s: invalid arguments\n") % cmd)
3282 help_(u, cmd)
3293 help_(u, cmd)
3283 except SystemExit, inst:
3294 except SystemExit, inst:
3284 # Commands shouldn't sys.exit directly, but give a return code.
3295 # Commands shouldn't sys.exit directly, but give a return code.
3285 # Just in case catch this and and pass exit code to caller.
3296 # Just in case catch this and and pass exit code to caller.
3286 return inst.code
3297 return inst.code
3287 except:
3298 except:
3288 u.warn(_("** unknown exception encountered, details follow\n"))
3299 u.warn(_("** unknown exception encountered, details follow\n"))
3289 u.warn(_("** report bug details to "
3300 u.warn(_("** report bug details to "
3290 "http://www.selenic.com/mercurial/bts\n"))
3301 "http://www.selenic.com/mercurial/bts\n"))
3291 u.warn(_("** or mercurial@selenic.com\n"))
3302 u.warn(_("** or mercurial@selenic.com\n"))
3292 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3303 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3293 % version.get_version())
3304 % version.get_version())
3294 raise
3305 raise
3295
3306
3296 return -1
3307 return -1
@@ -1,1885 +1,1903 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, appendfile, changegroup
10 import repo, appendfile, changegroup
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 import os, revlog, time, util
13 import os, revlog, time, util
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = ('lookup', 'changegroupsubset')
16 capabilities = ('lookup', 'changegroupsubset')
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 if not path:
23 if not path:
24 p = os.getcwd()
24 p = os.getcwd()
25 while not os.path.isdir(os.path.join(p, ".hg")):
25 while not os.path.isdir(os.path.join(p, ".hg")):
26 oldp = p
26 oldp = p
27 p = os.path.dirname(p)
27 p = os.path.dirname(p)
28 if p == oldp:
28 if p == oldp:
29 raise repo.RepoError(_("There is no Mercurial repository"
29 raise repo.RepoError(_("There is no Mercurial repository"
30 " here (.hg not found)"))
30 " here (.hg not found)"))
31 path = p
31 path = p
32
32
33 self.path = os.path.join(path, ".hg")
33 self.path = os.path.join(path, ".hg")
34 self.root = os.path.realpath(path)
34 self.root = os.path.realpath(path)
35 self.origroot = path
35 self.origroot = path
36 self.opener = util.opener(self.path)
36 self.opener = util.opener(self.path)
37 self.wopener = util.opener(self.root)
37 self.wopener = util.opener(self.root)
38
38
39 if not os.path.isdir(self.path):
39 if not os.path.isdir(self.path):
40 if create:
40 if create:
41 if not os.path.exists(path):
41 if not os.path.exists(path):
42 os.mkdir(path)
42 os.mkdir(path)
43 os.mkdir(self.path)
43 os.mkdir(self.path)
44 os.mkdir(os.path.join(self.path, "store"))
44 os.mkdir(os.path.join(self.path, "store"))
45 requirements = ("revlogv1", "store")
45 requirements = ("revlogv1", "store")
46 reqfile = self.opener("requires", "w")
46 reqfile = self.opener("requires", "w")
47 for r in requirements:
47 for r in requirements:
48 reqfile.write("%s\n" % r)
48 reqfile.write("%s\n" % r)
49 reqfile.close()
49 reqfile.close()
50 # create an invalid changelog
50 # create an invalid changelog
51 self.opener("00changelog.i", "a").write(
51 self.opener("00changelog.i", "a").write(
52 '\0\0\0\2' # represents revlogv2
52 '\0\0\0\2' # represents revlogv2
53 ' dummy changelog to prevent using the old repo layout'
53 ' dummy changelog to prevent using the old repo layout'
54 )
54 )
55 else:
55 else:
56 raise repo.RepoError(_("repository %s not found") % path)
56 raise repo.RepoError(_("repository %s not found") % path)
57 elif create:
57 elif create:
58 raise repo.RepoError(_("repository %s already exists") % path)
58 raise repo.RepoError(_("repository %s already exists") % path)
59 else:
59 else:
60 # find requirements
60 # find requirements
61 try:
61 try:
62 requirements = self.opener("requires").read().splitlines()
62 requirements = self.opener("requires").read().splitlines()
63 except IOError, inst:
63 except IOError, inst:
64 if inst.errno != errno.ENOENT:
64 if inst.errno != errno.ENOENT:
65 raise
65 raise
66 requirements = []
66 requirements = []
67 # check them
67 # check them
68 for r in requirements:
68 for r in requirements:
69 if r not in self.supported:
69 if r not in self.supported:
70 raise repo.RepoError(_("requirement '%s' not supported") % r)
70 raise repo.RepoError(_("requirement '%s' not supported") % r)
71
71
72 # setup store
72 # setup store
73 if "store" in requirements:
73 if "store" in requirements:
74 self.encodefn = util.encodefilename
74 self.encodefn = util.encodefilename
75 self.decodefn = util.decodefilename
75 self.decodefn = util.decodefilename
76 self.spath = os.path.join(self.path, "store")
76 self.spath = os.path.join(self.path, "store")
77 else:
77 else:
78 self.encodefn = lambda x: x
78 self.encodefn = lambda x: x
79 self.decodefn = lambda x: x
79 self.decodefn = lambda x: x
80 self.spath = self.path
80 self.spath = self.path
81 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
81 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
82
82
83 self.ui = ui.ui(parentui=parentui)
83 self.ui = ui.ui(parentui=parentui)
84 try:
84 try:
85 self.ui.readconfig(self.join("hgrc"), self.root)
85 self.ui.readconfig(self.join("hgrc"), self.root)
86 except IOError:
86 except IOError:
87 pass
87 pass
88
88
89 v = self.ui.configrevlog()
89 v = self.ui.configrevlog()
90 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
90 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
91 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
91 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
92 fl = v.get('flags', None)
92 fl = v.get('flags', None)
93 flags = 0
93 flags = 0
94 if fl != None:
94 if fl != None:
95 for x in fl.split():
95 for x in fl.split():
96 flags |= revlog.flagstr(x)
96 flags |= revlog.flagstr(x)
97 elif self.revlogv1:
97 elif self.revlogv1:
98 flags = revlog.REVLOG_DEFAULT_FLAGS
98 flags = revlog.REVLOG_DEFAULT_FLAGS
99
99
100 v = self.revlogversion | flags
100 v = self.revlogversion | flags
101 self.manifest = manifest.manifest(self.sopener, v)
101 self.manifest = manifest.manifest(self.sopener, v)
102 self.changelog = changelog.changelog(self.sopener, v)
102 self.changelog = changelog.changelog(self.sopener, v)
103
103
104 fallback = self.ui.config('ui', 'fallbackencoding')
104 fallback = self.ui.config('ui', 'fallbackencoding')
105 if fallback:
105 if fallback:
106 util._fallbackencoding = fallback
106 util._fallbackencoding = fallback
107
107
108 # the changelog might not have the inline index flag
108 # the changelog might not have the inline index flag
109 # on. If the format of the changelog is the same as found in
109 # on. If the format of the changelog is the same as found in
110 # .hgrc, apply any flags found in the .hgrc as well.
110 # .hgrc, apply any flags found in the .hgrc as well.
111 # Otherwise, just version from the changelog
111 # Otherwise, just version from the changelog
112 v = self.changelog.version
112 v = self.changelog.version
113 if v == self.revlogversion:
113 if v == self.revlogversion:
114 v |= flags
114 v |= flags
115 self.revlogversion = v
115 self.revlogversion = v
116
116
117 self.tagscache = None
117 self.tagscache = None
118 self.branchcache = None
118 self.branchcache = None
119 self.nodetagscache = None
119 self.nodetagscache = None
120 self.filterpats = {}
120 self.filterpats = {}
121 self.transhandle = None
121 self.transhandle = None
122
122
123 self._link = lambda x: False
123 self._link = lambda x: False
124 if util.checklink(self.root):
124 if util.checklink(self.root):
125 r = self.root # avoid circular reference in lambda
125 r = self.root # avoid circular reference in lambda
126 self._link = lambda x: util.is_link(os.path.join(r, x))
126 self._link = lambda x: util.is_link(os.path.join(r, x))
127
127
128 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
128 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
129
129
130 def url(self):
130 def url(self):
131 return 'file:' + self.root
131 return 'file:' + self.root
132
132
133 def hook(self, name, throw=False, **args):
133 def hook(self, name, throw=False, **args):
134 def callhook(hname, funcname):
134 def callhook(hname, funcname):
135 '''call python hook. hook is callable object, looked up as
135 '''call python hook. hook is callable object, looked up as
136 name in python module. if callable returns "true", hook
136 name in python module. if callable returns "true", hook
137 fails, else passes. if hook raises exception, treated as
137 fails, else passes. if hook raises exception, treated as
138 hook failure. exception propagates if throw is "true".
138 hook failure. exception propagates if throw is "true".
139
139
140 reason for "true" meaning "hook failed" is so that
140 reason for "true" meaning "hook failed" is so that
141 unmodified commands (e.g. mercurial.commands.update) can
141 unmodified commands (e.g. mercurial.commands.update) can
142 be run as hooks without wrappers to convert return values.'''
142 be run as hooks without wrappers to convert return values.'''
143
143
144 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
144 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
145 d = funcname.rfind('.')
145 d = funcname.rfind('.')
146 if d == -1:
146 if d == -1:
147 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
147 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
148 % (hname, funcname))
148 % (hname, funcname))
149 modname = funcname[:d]
149 modname = funcname[:d]
150 try:
150 try:
151 obj = __import__(modname)
151 obj = __import__(modname)
152 except ImportError:
152 except ImportError:
153 try:
153 try:
154 # extensions are loaded with hgext_ prefix
154 # extensions are loaded with hgext_ prefix
155 obj = __import__("hgext_%s" % modname)
155 obj = __import__("hgext_%s" % modname)
156 except ImportError:
156 except ImportError:
157 raise util.Abort(_('%s hook is invalid '
157 raise util.Abort(_('%s hook is invalid '
158 '(import of "%s" failed)') %
158 '(import of "%s" failed)') %
159 (hname, modname))
159 (hname, modname))
160 try:
160 try:
161 for p in funcname.split('.')[1:]:
161 for p in funcname.split('.')[1:]:
162 obj = getattr(obj, p)
162 obj = getattr(obj, p)
163 except AttributeError, err:
163 except AttributeError, err:
164 raise util.Abort(_('%s hook is invalid '
164 raise util.Abort(_('%s hook is invalid '
165 '("%s" is not defined)') %
165 '("%s" is not defined)') %
166 (hname, funcname))
166 (hname, funcname))
167 if not callable(obj):
167 if not callable(obj):
168 raise util.Abort(_('%s hook is invalid '
168 raise util.Abort(_('%s hook is invalid '
169 '("%s" is not callable)') %
169 '("%s" is not callable)') %
170 (hname, funcname))
170 (hname, funcname))
171 try:
171 try:
172 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
172 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
173 except (KeyboardInterrupt, util.SignalInterrupt):
173 except (KeyboardInterrupt, util.SignalInterrupt):
174 raise
174 raise
175 except Exception, exc:
175 except Exception, exc:
176 if isinstance(exc, util.Abort):
176 if isinstance(exc, util.Abort):
177 self.ui.warn(_('error: %s hook failed: %s\n') %
177 self.ui.warn(_('error: %s hook failed: %s\n') %
178 (hname, exc.args[0]))
178 (hname, exc.args[0]))
179 else:
179 else:
180 self.ui.warn(_('error: %s hook raised an exception: '
180 self.ui.warn(_('error: %s hook raised an exception: '
181 '%s\n') % (hname, exc))
181 '%s\n') % (hname, exc))
182 if throw:
182 if throw:
183 raise
183 raise
184 self.ui.print_exc()
184 self.ui.print_exc()
185 return True
185 return True
186 if r:
186 if r:
187 if throw:
187 if throw:
188 raise util.Abort(_('%s hook failed') % hname)
188 raise util.Abort(_('%s hook failed') % hname)
189 self.ui.warn(_('warning: %s hook failed\n') % hname)
189 self.ui.warn(_('warning: %s hook failed\n') % hname)
190 return r
190 return r
191
191
192 def runhook(name, cmd):
192 def runhook(name, cmd):
193 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
193 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
194 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
194 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
195 r = util.system(cmd, environ=env, cwd=self.root)
195 r = util.system(cmd, environ=env, cwd=self.root)
196 if r:
196 if r:
197 desc, r = util.explain_exit(r)
197 desc, r = util.explain_exit(r)
198 if throw:
198 if throw:
199 raise util.Abort(_('%s hook %s') % (name, desc))
199 raise util.Abort(_('%s hook %s') % (name, desc))
200 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
200 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
201 return r
201 return r
202
202
203 r = False
203 r = False
204 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
204 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
205 if hname.split(".", 1)[0] == name and cmd]
205 if hname.split(".", 1)[0] == name and cmd]
206 hooks.sort()
206 hooks.sort()
207 for hname, cmd in hooks:
207 for hname, cmd in hooks:
208 if cmd.startswith('python:'):
208 if cmd.startswith('python:'):
209 r = callhook(hname, cmd[7:].strip()) or r
209 r = callhook(hname, cmd[7:].strip()) or r
210 else:
210 else:
211 r = runhook(hname, cmd) or r
211 r = runhook(hname, cmd) or r
212 return r
212 return r
213
213
214 tag_disallowed = ':\r\n'
214 tag_disallowed = ':\r\n'
215
215
216 def tag(self, name, node, message, local, user, date):
216 def tag(self, name, node, message, local, user, date):
217 '''tag a revision with a symbolic name.
217 '''tag a revision with a symbolic name.
218
218
219 if local is True, the tag is stored in a per-repository file.
219 if local is True, the tag is stored in a per-repository file.
220 otherwise, it is stored in the .hgtags file, and a new
220 otherwise, it is stored in the .hgtags file, and a new
221 changeset is committed with the change.
221 changeset is committed with the change.
222
222
223 keyword arguments:
223 keyword arguments:
224
224
225 local: whether to store tag in non-version-controlled file
225 local: whether to store tag in non-version-controlled file
226 (default False)
226 (default False)
227
227
228 message: commit message to use if committing
228 message: commit message to use if committing
229
229
230 user: name of user to use if committing
230 user: name of user to use if committing
231
231
232 date: date tuple to use if committing'''
232 date: date tuple to use if committing'''
233
233
234 for c in self.tag_disallowed:
234 for c in self.tag_disallowed:
235 if c in name:
235 if c in name:
236 raise util.Abort(_('%r cannot be used in a tag name') % c)
236 raise util.Abort(_('%r cannot be used in a tag name') % c)
237
237
238 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
238 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
239
239
240 if local:
240 if local:
241 # local tags are stored in the current charset
241 # local tags are stored in the current charset
242 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
242 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
243 self.hook('tag', node=hex(node), tag=name, local=local)
243 self.hook('tag', node=hex(node), tag=name, local=local)
244 return
244 return
245
245
246 for x in self.status()[:5]:
246 for x in self.status()[:5]:
247 if '.hgtags' in x:
247 if '.hgtags' in x:
248 raise util.Abort(_('working copy of .hgtags is changed '
248 raise util.Abort(_('working copy of .hgtags is changed '
249 '(please commit .hgtags manually)'))
249 '(please commit .hgtags manually)'))
250
250
251 # committed tags are stored in UTF-8
251 # committed tags are stored in UTF-8
252 line = '%s %s\n' % (hex(node), util.fromlocal(name))
252 line = '%s %s\n' % (hex(node), util.fromlocal(name))
253 self.wfile('.hgtags', 'ab').write(line)
253 self.wfile('.hgtags', 'ab').write(line)
254 if self.dirstate.state('.hgtags') == '?':
254 if self.dirstate.state('.hgtags') == '?':
255 self.add(['.hgtags'])
255 self.add(['.hgtags'])
256
256
257 self.commit(['.hgtags'], message, user, date)
257 self.commit(['.hgtags'], message, user, date)
258 self.hook('tag', node=hex(node), tag=name, local=local)
258 self.hook('tag', node=hex(node), tag=name, local=local)
259
259
260 def tags(self):
260 def tags(self):
261 '''return a mapping of tag to node'''
261 '''return a mapping of tag to node'''
262 if not self.tagscache:
262 if not self.tagscache:
263 self.tagscache = {}
263 self.tagscache = {}
264
264
265 def parsetag(line, context):
265 def parsetag(line, context):
266 if not line:
266 if not line:
267 return
267 return
268 s = l.split(" ", 1)
268 s = l.split(" ", 1)
269 if len(s) != 2:
269 if len(s) != 2:
270 self.ui.warn(_("%s: cannot parse entry\n") % context)
270 self.ui.warn(_("%s: cannot parse entry\n") % context)
271 return
271 return
272 node, key = s
272 node, key = s
273 key = util.tolocal(key.strip()) # stored in UTF-8
273 key = util.tolocal(key.strip()) # stored in UTF-8
274 try:
274 try:
275 bin_n = bin(node)
275 bin_n = bin(node)
276 except TypeError:
276 except TypeError:
277 self.ui.warn(_("%s: node '%s' is not well formed\n") %
277 self.ui.warn(_("%s: node '%s' is not well formed\n") %
278 (context, node))
278 (context, node))
279 return
279 return
280 if bin_n not in self.changelog.nodemap:
280 if bin_n not in self.changelog.nodemap:
281 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
281 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
282 (context, key))
282 (context, key))
283 return
283 return
284 self.tagscache[key] = bin_n
284 self.tagscache[key] = bin_n
285
285
286 # read the tags file from each head, ending with the tip,
286 # read the tags file from each head, ending with the tip,
287 # and add each tag found to the map, with "newer" ones
287 # and add each tag found to the map, with "newer" ones
288 # taking precedence
288 # taking precedence
289 f = None
289 f = None
290 for rev, node, fnode in self._hgtagsnodes():
290 for rev, node, fnode in self._hgtagsnodes():
291 f = (f and f.filectx(fnode) or
291 f = (f and f.filectx(fnode) or
292 self.filectx('.hgtags', fileid=fnode))
292 self.filectx('.hgtags', fileid=fnode))
293 count = 0
293 count = 0
294 for l in f.data().splitlines():
294 for l in f.data().splitlines():
295 count += 1
295 count += 1
296 parsetag(l, _("%s, line %d") % (str(f), count))
296 parsetag(l, _("%s, line %d") % (str(f), count))
297
297
298 try:
298 try:
299 f = self.opener("localtags")
299 f = self.opener("localtags")
300 count = 0
300 count = 0
301 for l in f:
301 for l in f:
302 # localtags are stored in the local character set
302 # localtags are stored in the local character set
303 # while the internal tag table is stored in UTF-8
303 # while the internal tag table is stored in UTF-8
304 l = util.fromlocal(l)
304 l = util.fromlocal(l)
305 count += 1
305 count += 1
306 parsetag(l, _("localtags, line %d") % count)
306 parsetag(l, _("localtags, line %d") % count)
307 except IOError:
307 except IOError:
308 pass
308 pass
309
309
310 self.tagscache['tip'] = self.changelog.tip()
310 self.tagscache['tip'] = self.changelog.tip()
311
311
312 return self.tagscache
312 return self.tagscache
313
313
314 def _hgtagsnodes(self):
314 def _hgtagsnodes(self):
315 heads = self.heads()
315 heads = self.heads()
316 heads.reverse()
316 heads.reverse()
317 last = {}
317 last = {}
318 ret = []
318 ret = []
319 for node in heads:
319 for node in heads:
320 c = self.changectx(node)
320 c = self.changectx(node)
321 rev = c.rev()
321 rev = c.rev()
322 try:
322 try:
323 fnode = c.filenode('.hgtags')
323 fnode = c.filenode('.hgtags')
324 except revlog.LookupError:
324 except revlog.LookupError:
325 continue
325 continue
326 ret.append((rev, node, fnode))
326 ret.append((rev, node, fnode))
327 if fnode in last:
327 if fnode in last:
328 ret[last[fnode]] = None
328 ret[last[fnode]] = None
329 last[fnode] = len(ret) - 1
329 last[fnode] = len(ret) - 1
330 return [item for item in ret if item]
330 return [item for item in ret if item]
331
331
332 def tagslist(self):
332 def tagslist(self):
333 '''return a list of tags ordered by revision'''
333 '''return a list of tags ordered by revision'''
334 l = []
334 l = []
335 for t, n in self.tags().items():
335 for t, n in self.tags().items():
336 try:
336 try:
337 r = self.changelog.rev(n)
337 r = self.changelog.rev(n)
338 except:
338 except:
339 r = -2 # sort to the beginning of the list if unknown
339 r = -2 # sort to the beginning of the list if unknown
340 l.append((r, t, n))
340 l.append((r, t, n))
341 l.sort()
341 l.sort()
342 return [(t, n) for r, t, n in l]
342 return [(t, n) for r, t, n in l]
343
343
344 def nodetags(self, node):
344 def nodetags(self, node):
345 '''return the tags associated with a node'''
345 '''return the tags associated with a node'''
346 if not self.nodetagscache:
346 if not self.nodetagscache:
347 self.nodetagscache = {}
347 self.nodetagscache = {}
348 for t, n in self.tags().items():
348 for t, n in self.tags().items():
349 self.nodetagscache.setdefault(n, []).append(t)
349 self.nodetagscache.setdefault(n, []).append(t)
350 return self.nodetagscache.get(node, [])
350 return self.nodetagscache.get(node, [])
351
351
352 def _branchtags(self):
352 def _branchtags(self):
353 partial, last, lrev = self._readbranchcache()
353 partial, last, lrev = self._readbranchcache()
354
354
355 tiprev = self.changelog.count() - 1
355 tiprev = self.changelog.count() - 1
356 if lrev != tiprev:
356 if lrev != tiprev:
357 self._updatebranchcache(partial, lrev+1, tiprev+1)
357 self._updatebranchcache(partial, lrev+1, tiprev+1)
358 self._writebranchcache(partial, self.changelog.tip(), tiprev)
358 self._writebranchcache(partial, self.changelog.tip(), tiprev)
359
359
360 return partial
360 return partial
361
361
362 def branchtags(self):
362 def branchtags(self):
363 if self.branchcache is not None:
363 if self.branchcache is not None:
364 return self.branchcache
364 return self.branchcache
365
365
366 self.branchcache = {} # avoid recursion in changectx
366 self.branchcache = {} # avoid recursion in changectx
367 partial = self._branchtags()
367 partial = self._branchtags()
368
368
369 # the branch cache is stored on disk as UTF-8, but in the local
369 # the branch cache is stored on disk as UTF-8, but in the local
370 # charset internally
370 # charset internally
371 for k, v in partial.items():
371 for k, v in partial.items():
372 self.branchcache[util.tolocal(k)] = v
372 self.branchcache[util.tolocal(k)] = v
373 return self.branchcache
373 return self.branchcache
374
374
375 def _readbranchcache(self):
375 def _readbranchcache(self):
376 partial = {}
376 partial = {}
377 try:
377 try:
378 f = self.opener("branches.cache")
378 f = self.opener("branches.cache")
379 lines = f.read().split('\n')
379 lines = f.read().split('\n')
380 f.close()
380 f.close()
381 last, lrev = lines.pop(0).rstrip().split(" ", 1)
381 last, lrev = lines.pop(0).rstrip().split(" ", 1)
382 last, lrev = bin(last), int(lrev)
382 last, lrev = bin(last), int(lrev)
383 if not (lrev < self.changelog.count() and
383 if not (lrev < self.changelog.count() and
384 self.changelog.node(lrev) == last): # sanity check
384 self.changelog.node(lrev) == last): # sanity check
385 # invalidate the cache
385 # invalidate the cache
386 raise ValueError('Invalid branch cache: unknown tip')
386 raise ValueError('Invalid branch cache: unknown tip')
387 for l in lines:
387 for l in lines:
388 if not l: continue
388 if not l: continue
389 node, label = l.rstrip().split(" ", 1)
389 node, label = l.rstrip().split(" ", 1)
390 partial[label] = bin(node)
390 partial[label] = bin(node)
391 except (KeyboardInterrupt, util.SignalInterrupt):
391 except (KeyboardInterrupt, util.SignalInterrupt):
392 raise
392 raise
393 except Exception, inst:
393 except Exception, inst:
394 if self.ui.debugflag:
394 if self.ui.debugflag:
395 self.ui.warn(str(inst), '\n')
395 self.ui.warn(str(inst), '\n')
396 partial, last, lrev = {}, nullid, nullrev
396 partial, last, lrev = {}, nullid, nullrev
397 return partial, last, lrev
397 return partial, last, lrev
398
398
399 def _writebranchcache(self, branches, tip, tiprev):
399 def _writebranchcache(self, branches, tip, tiprev):
400 try:
400 try:
401 f = self.opener("branches.cache", "w")
401 f = self.opener("branches.cache", "w")
402 f.write("%s %s\n" % (hex(tip), tiprev))
402 f.write("%s %s\n" % (hex(tip), tiprev))
403 for label, node in branches.iteritems():
403 for label, node in branches.iteritems():
404 f.write("%s %s\n" % (hex(node), label))
404 f.write("%s %s\n" % (hex(node), label))
405 except IOError:
405 except IOError:
406 pass
406 pass
407
407
408 def _updatebranchcache(self, partial, start, end):
408 def _updatebranchcache(self, partial, start, end):
409 for r in xrange(start, end):
409 for r in xrange(start, end):
410 c = self.changectx(r)
410 c = self.changectx(r)
411 b = c.branch()
411 b = c.branch()
412 if b:
412 if b:
413 partial[b] = c.node()
413 partial[b] = c.node()
414
414
415 def lookup(self, key):
415 def lookup(self, key):
416 if key == '.':
416 if key == '.':
417 key = self.dirstate.parents()[0]
417 key = self.dirstate.parents()[0]
418 if key == nullid:
418 if key == nullid:
419 raise repo.RepoError(_("no revision checked out"))
419 raise repo.RepoError(_("no revision checked out"))
420 elif key == 'null':
420 elif key == 'null':
421 return nullid
421 return nullid
422 n = self.changelog._match(key)
422 n = self.changelog._match(key)
423 if n:
423 if n:
424 return n
424 return n
425 if key in self.tags():
425 if key in self.tags():
426 return self.tags()[key]
426 return self.tags()[key]
427 if key in self.branchtags():
427 if key in self.branchtags():
428 return self.branchtags()[key]
428 return self.branchtags()[key]
429 n = self.changelog._partialmatch(key)
429 n = self.changelog._partialmatch(key)
430 if n:
430 if n:
431 return n
431 return n
432 raise repo.RepoError(_("unknown revision '%s'") % key)
432 raise repo.RepoError(_("unknown revision '%s'") % key)
433
433
434 def dev(self):
434 def dev(self):
435 return os.lstat(self.path).st_dev
435 return os.lstat(self.path).st_dev
436
436
437 def local(self):
437 def local(self):
438 return True
438 return True
439
439
440 def join(self, f):
440 def join(self, f):
441 return os.path.join(self.path, f)
441 return os.path.join(self.path, f)
442
442
443 def sjoin(self, f):
443 def sjoin(self, f):
444 f = self.encodefn(f)
444 f = self.encodefn(f)
445 return os.path.join(self.spath, f)
445 return os.path.join(self.spath, f)
446
446
447 def wjoin(self, f):
447 def wjoin(self, f):
448 return os.path.join(self.root, f)
448 return os.path.join(self.root, f)
449
449
450 def file(self, f):
450 def file(self, f):
451 if f[0] == '/':
451 if f[0] == '/':
452 f = f[1:]
452 f = f[1:]
453 return filelog.filelog(self.sopener, f, self.revlogversion)
453 return filelog.filelog(self.sopener, f, self.revlogversion)
454
454
455 def changectx(self, changeid=None):
455 def changectx(self, changeid=None):
456 return context.changectx(self, changeid)
456 return context.changectx(self, changeid)
457
457
458 def workingctx(self):
458 def workingctx(self):
459 return context.workingctx(self)
459 return context.workingctx(self)
460
460
461 def parents(self, changeid=None):
461 def parents(self, changeid=None):
462 '''
462 '''
463 get list of changectxs for parents of changeid or working directory
463 get list of changectxs for parents of changeid or working directory
464 '''
464 '''
465 if changeid is None:
465 if changeid is None:
466 pl = self.dirstate.parents()
466 pl = self.dirstate.parents()
467 else:
467 else:
468 n = self.changelog.lookup(changeid)
468 n = self.changelog.lookup(changeid)
469 pl = self.changelog.parents(n)
469 pl = self.changelog.parents(n)
470 if pl[1] == nullid:
470 if pl[1] == nullid:
471 return [self.changectx(pl[0])]
471 return [self.changectx(pl[0])]
472 return [self.changectx(pl[0]), self.changectx(pl[1])]
472 return [self.changectx(pl[0]), self.changectx(pl[1])]
473
473
474 def filectx(self, path, changeid=None, fileid=None):
474 def filectx(self, path, changeid=None, fileid=None):
475 """changeid can be a changeset revision, node, or tag.
475 """changeid can be a changeset revision, node, or tag.
476 fileid can be a file revision or node."""
476 fileid can be a file revision or node."""
477 return context.filectx(self, path, changeid, fileid)
477 return context.filectx(self, path, changeid, fileid)
478
478
479 def getcwd(self):
479 def getcwd(self):
480 return self.dirstate.getcwd()
480 return self.dirstate.getcwd()
481
481
482 def wfile(self, f, mode='r'):
482 def wfile(self, f, mode='r'):
483 return self.wopener(f, mode)
483 return self.wopener(f, mode)
484
484
485 def _filter(self, filter, filename, data):
485 def _filter(self, filter, filename, data):
486 if filter not in self.filterpats:
486 if filter not in self.filterpats:
487 l = []
487 l = []
488 for pat, cmd in self.ui.configitems(filter):
488 for pat, cmd in self.ui.configitems(filter):
489 mf = util.matcher(self.root, "", [pat], [], [])[1]
489 mf = util.matcher(self.root, "", [pat], [], [])[1]
490 l.append((mf, cmd))
490 l.append((mf, cmd))
491 self.filterpats[filter] = l
491 self.filterpats[filter] = l
492
492
493 for mf, cmd in self.filterpats[filter]:
493 for mf, cmd in self.filterpats[filter]:
494 if mf(filename):
494 if mf(filename):
495 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
495 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
496 data = util.filter(data, cmd)
496 data = util.filter(data, cmd)
497 break
497 break
498
498
499 return data
499 return data
500
500
501 def wread(self, filename):
501 def wread(self, filename):
502 if self._link(filename):
502 if self._link(filename):
503 data = os.readlink(self.wjoin(filename))
503 data = os.readlink(self.wjoin(filename))
504 else:
504 else:
505 data = self.wopener(filename, 'r').read()
505 data = self.wopener(filename, 'r').read()
506 return self._filter("encode", filename, data)
506 return self._filter("encode", filename, data)
507
507
508 def wwrite(self, filename, data, flags):
508 def wwrite(self, filename, data, flags):
509 data = self._filter("decode", filename, data)
509 data = self._filter("decode", filename, data)
510 if "l" in flags:
510 if "l" in flags:
511 try:
511 try:
512 os.unlink(self.wjoin(filename))
512 os.unlink(self.wjoin(filename))
513 except OSError:
513 except OSError:
514 pass
514 pass
515 os.symlink(data, self.wjoin(filename))
515 os.symlink(data, self.wjoin(filename))
516 else:
516 else:
517 try:
517 try:
518 if self._link(filename):
518 if self._link(filename):
519 os.unlink(self.wjoin(filename))
519 os.unlink(self.wjoin(filename))
520 except OSError:
520 except OSError:
521 pass
521 pass
522 self.wopener(filename, 'w').write(data)
522 self.wopener(filename, 'w').write(data)
523 util.set_exec(self.wjoin(filename), "x" in flags)
523 util.set_exec(self.wjoin(filename), "x" in flags)
524
524
525 def wwritedata(self, filename, data):
525 def wwritedata(self, filename, data):
526 return self._filter("decode", filename, data)
526 return self._filter("decode", filename, data)
527
527
528 def transaction(self):
528 def transaction(self):
529 tr = self.transhandle
529 tr = self.transhandle
530 if tr != None and tr.running():
530 if tr != None and tr.running():
531 return tr.nest()
531 return tr.nest()
532
532
533 # save dirstate for rollback
533 # save dirstate for rollback
534 try:
534 try:
535 ds = self.opener("dirstate").read()
535 ds = self.opener("dirstate").read()
536 except IOError:
536 except IOError:
537 ds = ""
537 ds = ""
538 self.opener("journal.dirstate", "w").write(ds)
538 self.opener("journal.dirstate", "w").write(ds)
539
539
540 renames = [(self.sjoin("journal"), self.sjoin("undo")),
540 renames = [(self.sjoin("journal"), self.sjoin("undo")),
541 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
541 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
542 tr = transaction.transaction(self.ui.warn, self.sopener,
542 tr = transaction.transaction(self.ui.warn, self.sopener,
543 self.sjoin("journal"),
543 self.sjoin("journal"),
544 aftertrans(renames))
544 aftertrans(renames))
545 self.transhandle = tr
545 self.transhandle = tr
546 return tr
546 return tr
547
547
548 def recover(self):
548 def recover(self):
549 l = self.lock()
549 l = self.lock()
550 if os.path.exists(self.sjoin("journal")):
550 if os.path.exists(self.sjoin("journal")):
551 self.ui.status(_("rolling back interrupted transaction\n"))
551 self.ui.status(_("rolling back interrupted transaction\n"))
552 transaction.rollback(self.sopener, self.sjoin("journal"))
552 transaction.rollback(self.sopener, self.sjoin("journal"))
553 self.reload()
553 self.reload()
554 return True
554 return True
555 else:
555 else:
556 self.ui.warn(_("no interrupted transaction available\n"))
556 self.ui.warn(_("no interrupted transaction available\n"))
557 return False
557 return False
558
558
559 def rollback(self, wlock=None):
559 def rollback(self, wlock=None):
560 if not wlock:
560 if not wlock:
561 wlock = self.wlock()
561 wlock = self.wlock()
562 l = self.lock()
562 l = self.lock()
563 if os.path.exists(self.sjoin("undo")):
563 if os.path.exists(self.sjoin("undo")):
564 self.ui.status(_("rolling back last transaction\n"))
564 self.ui.status(_("rolling back last transaction\n"))
565 transaction.rollback(self.sopener, self.sjoin("undo"))
565 transaction.rollback(self.sopener, self.sjoin("undo"))
566 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
566 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
567 self.reload()
567 self.reload()
568 self.wreload()
568 self.wreload()
569 else:
569 else:
570 self.ui.warn(_("no rollback information available\n"))
570 self.ui.warn(_("no rollback information available\n"))
571
571
572 def wreload(self):
572 def wreload(self):
573 self.dirstate.read()
573 self.dirstate.read()
574
574
575 def reload(self):
575 def reload(self):
576 self.changelog.load()
576 self.changelog.load()
577 self.manifest.load()
577 self.manifest.load()
578 self.tagscache = None
578 self.tagscache = None
579 self.nodetagscache = None
579 self.nodetagscache = None
580
580
581 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
581 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
582 desc=None):
582 desc=None):
583 try:
583 try:
584 l = lock.lock(lockname, 0, releasefn, desc=desc)
584 l = lock.lock(lockname, 0, releasefn, desc=desc)
585 except lock.LockHeld, inst:
585 except lock.LockHeld, inst:
586 if not wait:
586 if not wait:
587 raise
587 raise
588 self.ui.warn(_("waiting for lock on %s held by %r\n") %
588 self.ui.warn(_("waiting for lock on %s held by %r\n") %
589 (desc, inst.locker))
589 (desc, inst.locker))
590 # default to 600 seconds timeout
590 # default to 600 seconds timeout
591 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
591 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
592 releasefn, desc=desc)
592 releasefn, desc=desc)
593 if acquirefn:
593 if acquirefn:
594 acquirefn()
594 acquirefn()
595 return l
595 return l
596
596
597 def lock(self, wait=1):
597 def lock(self, wait=1):
598 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
598 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
599 desc=_('repository %s') % self.origroot)
599 desc=_('repository %s') % self.origroot)
600
600
601 def wlock(self, wait=1):
601 def wlock(self, wait=1):
602 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
602 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
603 self.wreload,
603 self.wreload,
604 desc=_('working directory of %s') % self.origroot)
604 desc=_('working directory of %s') % self.origroot)
605
605
606 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
606 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
607 """
607 """
608 commit an individual file as part of a larger transaction
608 commit an individual file as part of a larger transaction
609 """
609 """
610
610
611 t = self.wread(fn)
611 t = self.wread(fn)
612 fl = self.file(fn)
612 fl = self.file(fn)
613 fp1 = manifest1.get(fn, nullid)
613 fp1 = manifest1.get(fn, nullid)
614 fp2 = manifest2.get(fn, nullid)
614 fp2 = manifest2.get(fn, nullid)
615
615
616 meta = {}
616 meta = {}
617 cp = self.dirstate.copied(fn)
617 cp = self.dirstate.copied(fn)
618 if cp:
618 if cp:
619 # Mark the new revision of this file as a copy of another
620 # file. This copy data will effectively act as a parent
621 # of this new revision. If this is a merge, the first
622 # parent will be the nullid (meaning "look up the copy data")
623 # and the second one will be the other parent. For example:
624 #
625 # 0 --- 1 --- 3 rev1 changes file foo
626 # \ / rev2 renames foo to bar and changes it
627 # \- 2 -/ rev3 should have bar with all changes and
628 # should record that bar descends from
629 # bar in rev2 and foo in rev1
630 #
631 # this allows this merge to succeed:
632 #
633 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
634 # \ / merging rev3 and rev4 should use bar@rev2
635 # \- 2 --- 4 as the merge base
636 #
619 meta["copy"] = cp
637 meta["copy"] = cp
620 if not manifest2: # not a branch merge
638 if not manifest2: # not a branch merge
621 meta["copyrev"] = hex(manifest1.get(cp, nullid))
639 meta["copyrev"] = hex(manifest1.get(cp, nullid))
622 fp2 = nullid
640 fp2 = nullid
623 elif fp2 != nullid: # copied on remote side
641 elif fp2 != nullid: # copied on remote side
624 meta["copyrev"] = hex(manifest1.get(cp, nullid))
642 meta["copyrev"] = hex(manifest1.get(cp, nullid))
625 elif fp1 != nullid: # copied on local side, reversed
643 elif fp1 != nullid: # copied on local side, reversed
626 meta["copyrev"] = hex(manifest2.get(cp))
644 meta["copyrev"] = hex(manifest2.get(cp))
627 fp2 = nullid
645 fp2 = fp1
628 else: # directory rename
646 else: # directory rename
629 meta["copyrev"] = hex(manifest1.get(cp, nullid))
647 meta["copyrev"] = hex(manifest1.get(cp, nullid))
630 self.ui.debug(_(" %s: copy %s:%s\n") %
648 self.ui.debug(_(" %s: copy %s:%s\n") %
631 (fn, cp, meta["copyrev"]))
649 (fn, cp, meta["copyrev"]))
632 fp1 = nullid
650 fp1 = nullid
633 elif fp2 != nullid:
651 elif fp2 != nullid:
634 # is one parent an ancestor of the other?
652 # is one parent an ancestor of the other?
635 fpa = fl.ancestor(fp1, fp2)
653 fpa = fl.ancestor(fp1, fp2)
636 if fpa == fp1:
654 if fpa == fp1:
637 fp1, fp2 = fp2, nullid
655 fp1, fp2 = fp2, nullid
638 elif fpa == fp2:
656 elif fpa == fp2:
639 fp2 = nullid
657 fp2 = nullid
640
658
641 # is the file unmodified from the parent? report existing entry
659 # is the file unmodified from the parent? report existing entry
642 if fp2 == nullid and not fl.cmp(fp1, t):
660 if fp2 == nullid and not fl.cmp(fp1, t):
643 return fp1
661 return fp1
644
662
645 changelist.append(fn)
663 changelist.append(fn)
646 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
664 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
647
665
648 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
666 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
649 if p1 is None:
667 if p1 is None:
650 p1, p2 = self.dirstate.parents()
668 p1, p2 = self.dirstate.parents()
651 return self.commit(files=files, text=text, user=user, date=date,
669 return self.commit(files=files, text=text, user=user, date=date,
652 p1=p1, p2=p2, wlock=wlock, extra=extra)
670 p1=p1, p2=p2, wlock=wlock, extra=extra)
653
671
654 def commit(self, files=None, text="", user=None, date=None,
672 def commit(self, files=None, text="", user=None, date=None,
655 match=util.always, force=False, lock=None, wlock=None,
673 match=util.always, force=False, lock=None, wlock=None,
656 force_editor=False, p1=None, p2=None, extra={}):
674 force_editor=False, p1=None, p2=None, extra={}):
657
675
658 commit = []
676 commit = []
659 remove = []
677 remove = []
660 changed = []
678 changed = []
661 use_dirstate = (p1 is None) # not rawcommit
679 use_dirstate = (p1 is None) # not rawcommit
662 extra = extra.copy()
680 extra = extra.copy()
663
681
664 if use_dirstate:
682 if use_dirstate:
665 if files:
683 if files:
666 for f in files:
684 for f in files:
667 s = self.dirstate.state(f)
685 s = self.dirstate.state(f)
668 if s in 'nmai':
686 if s in 'nmai':
669 commit.append(f)
687 commit.append(f)
670 elif s == 'r':
688 elif s == 'r':
671 remove.append(f)
689 remove.append(f)
672 else:
690 else:
673 self.ui.warn(_("%s not tracked!\n") % f)
691 self.ui.warn(_("%s not tracked!\n") % f)
674 else:
692 else:
675 changes = self.status(match=match)[:5]
693 changes = self.status(match=match)[:5]
676 modified, added, removed, deleted, unknown = changes
694 modified, added, removed, deleted, unknown = changes
677 commit = modified + added
695 commit = modified + added
678 remove = removed
696 remove = removed
679 else:
697 else:
680 commit = files
698 commit = files
681
699
682 if use_dirstate:
700 if use_dirstate:
683 p1, p2 = self.dirstate.parents()
701 p1, p2 = self.dirstate.parents()
684 update_dirstate = True
702 update_dirstate = True
685 else:
703 else:
686 p1, p2 = p1, p2 or nullid
704 p1, p2 = p1, p2 or nullid
687 update_dirstate = (self.dirstate.parents()[0] == p1)
705 update_dirstate = (self.dirstate.parents()[0] == p1)
688
706
689 c1 = self.changelog.read(p1)
707 c1 = self.changelog.read(p1)
690 c2 = self.changelog.read(p2)
708 c2 = self.changelog.read(p2)
691 m1 = self.manifest.read(c1[0]).copy()
709 m1 = self.manifest.read(c1[0]).copy()
692 m2 = self.manifest.read(c2[0])
710 m2 = self.manifest.read(c2[0])
693
711
694 if use_dirstate:
712 if use_dirstate:
695 branchname = self.workingctx().branch()
713 branchname = self.workingctx().branch()
696 try:
714 try:
697 branchname = branchname.decode('UTF-8').encode('UTF-8')
715 branchname = branchname.decode('UTF-8').encode('UTF-8')
698 except UnicodeDecodeError:
716 except UnicodeDecodeError:
699 raise util.Abort(_('branch name not in UTF-8!'))
717 raise util.Abort(_('branch name not in UTF-8!'))
700 else:
718 else:
701 branchname = ""
719 branchname = ""
702
720
703 if use_dirstate:
721 if use_dirstate:
704 oldname = c1[5].get("branch", "") # stored in UTF-8
722 oldname = c1[5].get("branch", "") # stored in UTF-8
705 if not commit and not remove and not force and p2 == nullid and \
723 if not commit and not remove and not force and p2 == nullid and \
706 branchname == oldname:
724 branchname == oldname:
707 self.ui.status(_("nothing changed\n"))
725 self.ui.status(_("nothing changed\n"))
708 return None
726 return None
709
727
710 xp1 = hex(p1)
728 xp1 = hex(p1)
711 if p2 == nullid: xp2 = ''
729 if p2 == nullid: xp2 = ''
712 else: xp2 = hex(p2)
730 else: xp2 = hex(p2)
713
731
714 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
732 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
715
733
716 if not wlock:
734 if not wlock:
717 wlock = self.wlock()
735 wlock = self.wlock()
718 if not lock:
736 if not lock:
719 lock = self.lock()
737 lock = self.lock()
720 tr = self.transaction()
738 tr = self.transaction()
721
739
722 # check in files
740 # check in files
723 new = {}
741 new = {}
724 linkrev = self.changelog.count()
742 linkrev = self.changelog.count()
725 commit.sort()
743 commit.sort()
726 is_exec = util.execfunc(self.root, m1.execf)
744 is_exec = util.execfunc(self.root, m1.execf)
727 is_link = util.linkfunc(self.root, m1.linkf)
745 is_link = util.linkfunc(self.root, m1.linkf)
728 for f in commit:
746 for f in commit:
729 self.ui.note(f + "\n")
747 self.ui.note(f + "\n")
730 try:
748 try:
731 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
749 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
732 m1.set(f, is_exec(f), is_link(f))
750 m1.set(f, is_exec(f), is_link(f))
733 except OSError:
751 except OSError:
734 if use_dirstate:
752 if use_dirstate:
735 self.ui.warn(_("trouble committing %s!\n") % f)
753 self.ui.warn(_("trouble committing %s!\n") % f)
736 raise
754 raise
737 else:
755 else:
738 remove.append(f)
756 remove.append(f)
739
757
740 # update manifest
758 # update manifest
741 m1.update(new)
759 m1.update(new)
742 remove.sort()
760 remove.sort()
743 removed = []
761 removed = []
744
762
745 for f in remove:
763 for f in remove:
746 if f in m1:
764 if f in m1:
747 del m1[f]
765 del m1[f]
748 removed.append(f)
766 removed.append(f)
749 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
767 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
750
768
751 # add changeset
769 # add changeset
752 new = new.keys()
770 new = new.keys()
753 new.sort()
771 new.sort()
754
772
755 user = user or self.ui.username()
773 user = user or self.ui.username()
756 if not text or force_editor:
774 if not text or force_editor:
757 edittext = []
775 edittext = []
758 if text:
776 if text:
759 edittext.append(text)
777 edittext.append(text)
760 edittext.append("")
778 edittext.append("")
761 edittext.append("HG: user: %s" % user)
779 edittext.append("HG: user: %s" % user)
762 if p2 != nullid:
780 if p2 != nullid:
763 edittext.append("HG: branch merge")
781 edittext.append("HG: branch merge")
764 if branchname:
782 if branchname:
765 edittext.append("HG: branch %s" % util.tolocal(branchname))
783 edittext.append("HG: branch %s" % util.tolocal(branchname))
766 edittext.extend(["HG: changed %s" % f for f in changed])
784 edittext.extend(["HG: changed %s" % f for f in changed])
767 edittext.extend(["HG: removed %s" % f for f in removed])
785 edittext.extend(["HG: removed %s" % f for f in removed])
768 if not changed and not remove:
786 if not changed and not remove:
769 edittext.append("HG: no files changed")
787 edittext.append("HG: no files changed")
770 edittext.append("")
788 edittext.append("")
771 # run editor in the repository root
789 # run editor in the repository root
772 olddir = os.getcwd()
790 olddir = os.getcwd()
773 os.chdir(self.root)
791 os.chdir(self.root)
774 text = self.ui.edit("\n".join(edittext), user)
792 text = self.ui.edit("\n".join(edittext), user)
775 os.chdir(olddir)
793 os.chdir(olddir)
776
794
777 lines = [line.rstrip() for line in text.rstrip().splitlines()]
795 lines = [line.rstrip() for line in text.rstrip().splitlines()]
778 while lines and not lines[0]:
796 while lines and not lines[0]:
779 del lines[0]
797 del lines[0]
780 if not lines:
798 if not lines:
781 return None
799 return None
782 text = '\n'.join(lines)
800 text = '\n'.join(lines)
783 if branchname:
801 if branchname:
784 extra["branch"] = branchname
802 extra["branch"] = branchname
785 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
803 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
786 user, date, extra)
804 user, date, extra)
787 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
805 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
788 parent2=xp2)
806 parent2=xp2)
789 tr.close()
807 tr.close()
790
808
791 if self.branchcache and "branch" in extra:
809 if self.branchcache and "branch" in extra:
792 self.branchcache[util.tolocal(extra["branch"])] = n
810 self.branchcache[util.tolocal(extra["branch"])] = n
793
811
794 if use_dirstate or update_dirstate:
812 if use_dirstate or update_dirstate:
795 self.dirstate.setparents(n)
813 self.dirstate.setparents(n)
796 if use_dirstate:
814 if use_dirstate:
797 self.dirstate.update(new, "n")
815 self.dirstate.update(new, "n")
798 self.dirstate.forget(removed)
816 self.dirstate.forget(removed)
799
817
800 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
818 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
801 return n
819 return n
802
820
803 def walk(self, node=None, files=[], match=util.always, badmatch=None):
821 def walk(self, node=None, files=[], match=util.always, badmatch=None):
804 '''
822 '''
805 walk recursively through the directory tree or a given
823 walk recursively through the directory tree or a given
806 changeset, finding all files matched by the match
824 changeset, finding all files matched by the match
807 function
825 function
808
826
809 results are yielded in a tuple (src, filename), where src
827 results are yielded in a tuple (src, filename), where src
810 is one of:
828 is one of:
811 'f' the file was found in the directory tree
829 'f' the file was found in the directory tree
812 'm' the file was only in the dirstate and not in the tree
830 'm' the file was only in the dirstate and not in the tree
813 'b' file was not found and matched badmatch
831 'b' file was not found and matched badmatch
814 '''
832 '''
815
833
816 if node:
834 if node:
817 fdict = dict.fromkeys(files)
835 fdict = dict.fromkeys(files)
818 for fn in self.manifest.read(self.changelog.read(node)[0]):
836 for fn in self.manifest.read(self.changelog.read(node)[0]):
819 for ffn in fdict:
837 for ffn in fdict:
820 # match if the file is the exact name or a directory
838 # match if the file is the exact name or a directory
821 if ffn == fn or fn.startswith("%s/" % ffn):
839 if ffn == fn or fn.startswith("%s/" % ffn):
822 del fdict[ffn]
840 del fdict[ffn]
823 break
841 break
824 if match(fn):
842 if match(fn):
825 yield 'm', fn
843 yield 'm', fn
826 for fn in fdict:
844 for fn in fdict:
827 if badmatch and badmatch(fn):
845 if badmatch and badmatch(fn):
828 if match(fn):
846 if match(fn):
829 yield 'b', fn
847 yield 'b', fn
830 else:
848 else:
831 self.ui.warn(_('%s: No such file in rev %s\n') % (
849 self.ui.warn(_('%s: No such file in rev %s\n') % (
832 util.pathto(self.getcwd(), fn), short(node)))
850 util.pathto(self.getcwd(), fn), short(node)))
833 else:
851 else:
834 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
852 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
835 yield src, fn
853 yield src, fn
836
854
837 def status(self, node1=None, node2=None, files=[], match=util.always,
855 def status(self, node1=None, node2=None, files=[], match=util.always,
838 wlock=None, list_ignored=False, list_clean=False):
856 wlock=None, list_ignored=False, list_clean=False):
839 """return status of files between two nodes or node and working directory
857 """return status of files between two nodes or node and working directory
840
858
841 If node1 is None, use the first dirstate parent instead.
859 If node1 is None, use the first dirstate parent instead.
842 If node2 is None, compare node1 with working directory.
860 If node2 is None, compare node1 with working directory.
843 """
861 """
844
862
845 def fcmp(fn, mf):
863 def fcmp(fn, mf):
846 t1 = self.wread(fn)
864 t1 = self.wread(fn)
847 return self.file(fn).cmp(mf.get(fn, nullid), t1)
865 return self.file(fn).cmp(mf.get(fn, nullid), t1)
848
866
849 def mfmatches(node):
867 def mfmatches(node):
850 change = self.changelog.read(node)
868 change = self.changelog.read(node)
851 mf = self.manifest.read(change[0]).copy()
869 mf = self.manifest.read(change[0]).copy()
852 for fn in mf.keys():
870 for fn in mf.keys():
853 if not match(fn):
871 if not match(fn):
854 del mf[fn]
872 del mf[fn]
855 return mf
873 return mf
856
874
857 modified, added, removed, deleted, unknown = [], [], [], [], []
875 modified, added, removed, deleted, unknown = [], [], [], [], []
858 ignored, clean = [], []
876 ignored, clean = [], []
859
877
860 compareworking = False
878 compareworking = False
861 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
879 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
862 compareworking = True
880 compareworking = True
863
881
864 if not compareworking:
882 if not compareworking:
865 # read the manifest from node1 before the manifest from node2,
883 # read the manifest from node1 before the manifest from node2,
866 # so that we'll hit the manifest cache if we're going through
884 # so that we'll hit the manifest cache if we're going through
867 # all the revisions in parent->child order.
885 # all the revisions in parent->child order.
868 mf1 = mfmatches(node1)
886 mf1 = mfmatches(node1)
869
887
870 # are we comparing the working directory?
888 # are we comparing the working directory?
871 if not node2:
889 if not node2:
872 if not wlock:
890 if not wlock:
873 try:
891 try:
874 wlock = self.wlock(wait=0)
892 wlock = self.wlock(wait=0)
875 except lock.LockException:
893 except lock.LockException:
876 wlock = None
894 wlock = None
877 (lookup, modified, added, removed, deleted, unknown,
895 (lookup, modified, added, removed, deleted, unknown,
878 ignored, clean) = self.dirstate.status(files, match,
896 ignored, clean) = self.dirstate.status(files, match,
879 list_ignored, list_clean)
897 list_ignored, list_clean)
880
898
881 # are we comparing working dir against its parent?
899 # are we comparing working dir against its parent?
882 if compareworking:
900 if compareworking:
883 if lookup:
901 if lookup:
884 # do a full compare of any files that might have changed
902 # do a full compare of any files that might have changed
885 mf2 = mfmatches(self.dirstate.parents()[0])
903 mf2 = mfmatches(self.dirstate.parents()[0])
886 for f in lookup:
904 for f in lookup:
887 if fcmp(f, mf2):
905 if fcmp(f, mf2):
888 modified.append(f)
906 modified.append(f)
889 else:
907 else:
890 clean.append(f)
908 clean.append(f)
891 if wlock is not None:
909 if wlock is not None:
892 self.dirstate.update([f], "n")
910 self.dirstate.update([f], "n")
893 else:
911 else:
894 # we are comparing working dir against non-parent
912 # we are comparing working dir against non-parent
895 # generate a pseudo-manifest for the working dir
913 # generate a pseudo-manifest for the working dir
896 # XXX: create it in dirstate.py ?
914 # XXX: create it in dirstate.py ?
897 mf2 = mfmatches(self.dirstate.parents()[0])
915 mf2 = mfmatches(self.dirstate.parents()[0])
898 is_exec = util.execfunc(self.root, mf2.execf)
916 is_exec = util.execfunc(self.root, mf2.execf)
899 is_link = util.linkfunc(self.root, mf2.linkf)
917 is_link = util.linkfunc(self.root, mf2.linkf)
900 for f in lookup + modified + added:
918 for f in lookup + modified + added:
901 mf2[f] = ""
919 mf2[f] = ""
902 mf2.set(f, is_exec(f), is_link(f))
920 mf2.set(f, is_exec(f), is_link(f))
903 for f in removed:
921 for f in removed:
904 if f in mf2:
922 if f in mf2:
905 del mf2[f]
923 del mf2[f]
906 else:
924 else:
907 # we are comparing two revisions
925 # we are comparing two revisions
908 mf2 = mfmatches(node2)
926 mf2 = mfmatches(node2)
909
927
910 if not compareworking:
928 if not compareworking:
911 # flush lists from dirstate before comparing manifests
929 # flush lists from dirstate before comparing manifests
912 modified, added, clean = [], [], []
930 modified, added, clean = [], [], []
913
931
914 # make sure to sort the files so we talk to the disk in a
932 # make sure to sort the files so we talk to the disk in a
915 # reasonable order
933 # reasonable order
916 mf2keys = mf2.keys()
934 mf2keys = mf2.keys()
917 mf2keys.sort()
935 mf2keys.sort()
918 for fn in mf2keys:
936 for fn in mf2keys:
919 if mf1.has_key(fn):
937 if mf1.has_key(fn):
920 if mf1.flags(fn) != mf2.flags(fn) or \
938 if mf1.flags(fn) != mf2.flags(fn) or \
921 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
939 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
922 modified.append(fn)
940 modified.append(fn)
923 elif list_clean:
941 elif list_clean:
924 clean.append(fn)
942 clean.append(fn)
925 del mf1[fn]
943 del mf1[fn]
926 else:
944 else:
927 added.append(fn)
945 added.append(fn)
928
946
929 removed = mf1.keys()
947 removed = mf1.keys()
930
948
931 # sort and return results:
949 # sort and return results:
932 for l in modified, added, removed, deleted, unknown, ignored, clean:
950 for l in modified, added, removed, deleted, unknown, ignored, clean:
933 l.sort()
951 l.sort()
934 return (modified, added, removed, deleted, unknown, ignored, clean)
952 return (modified, added, removed, deleted, unknown, ignored, clean)
935
953
936 def add(self, list, wlock=None):
954 def add(self, list, wlock=None):
937 if not wlock:
955 if not wlock:
938 wlock = self.wlock()
956 wlock = self.wlock()
939 for f in list:
957 for f in list:
940 p = self.wjoin(f)
958 p = self.wjoin(f)
941 islink = os.path.islink(p)
959 islink = os.path.islink(p)
942 if not islink and not os.path.exists(p):
960 if not islink and not os.path.exists(p):
943 self.ui.warn(_("%s does not exist!\n") % f)
961 self.ui.warn(_("%s does not exist!\n") % f)
944 elif not islink and not os.path.isfile(p):
962 elif not islink and not os.path.isfile(p):
945 self.ui.warn(_("%s not added: only files and symlinks "
963 self.ui.warn(_("%s not added: only files and symlinks "
946 "supported currently\n") % f)
964 "supported currently\n") % f)
947 elif self.dirstate.state(f) in 'an':
965 elif self.dirstate.state(f) in 'an':
948 self.ui.warn(_("%s already tracked!\n") % f)
966 self.ui.warn(_("%s already tracked!\n") % f)
949 else:
967 else:
950 self.dirstate.update([f], "a")
968 self.dirstate.update([f], "a")
951
969
952 def forget(self, list, wlock=None):
970 def forget(self, list, wlock=None):
953 if not wlock:
971 if not wlock:
954 wlock = self.wlock()
972 wlock = self.wlock()
955 for f in list:
973 for f in list:
956 if self.dirstate.state(f) not in 'ai':
974 if self.dirstate.state(f) not in 'ai':
957 self.ui.warn(_("%s not added!\n") % f)
975 self.ui.warn(_("%s not added!\n") % f)
958 else:
976 else:
959 self.dirstate.forget([f])
977 self.dirstate.forget([f])
960
978
961 def remove(self, list, unlink=False, wlock=None):
979 def remove(self, list, unlink=False, wlock=None):
962 if unlink:
980 if unlink:
963 for f in list:
981 for f in list:
964 try:
982 try:
965 util.unlink(self.wjoin(f))
983 util.unlink(self.wjoin(f))
966 except OSError, inst:
984 except OSError, inst:
967 if inst.errno != errno.ENOENT:
985 if inst.errno != errno.ENOENT:
968 raise
986 raise
969 if not wlock:
987 if not wlock:
970 wlock = self.wlock()
988 wlock = self.wlock()
971 for f in list:
989 for f in list:
972 p = self.wjoin(f)
990 p = self.wjoin(f)
973 if os.path.exists(p):
991 if os.path.exists(p):
974 self.ui.warn(_("%s still exists!\n") % f)
992 self.ui.warn(_("%s still exists!\n") % f)
975 elif self.dirstate.state(f) == 'a':
993 elif self.dirstate.state(f) == 'a':
976 self.dirstate.forget([f])
994 self.dirstate.forget([f])
977 elif f not in self.dirstate:
995 elif f not in self.dirstate:
978 self.ui.warn(_("%s not tracked!\n") % f)
996 self.ui.warn(_("%s not tracked!\n") % f)
979 else:
997 else:
980 self.dirstate.update([f], "r")
998 self.dirstate.update([f], "r")
981
999
982 def undelete(self, list, wlock=None):
1000 def undelete(self, list, wlock=None):
983 p = self.dirstate.parents()[0]
1001 p = self.dirstate.parents()[0]
984 mn = self.changelog.read(p)[0]
1002 mn = self.changelog.read(p)[0]
985 m = self.manifest.read(mn)
1003 m = self.manifest.read(mn)
986 if not wlock:
1004 if not wlock:
987 wlock = self.wlock()
1005 wlock = self.wlock()
988 for f in list:
1006 for f in list:
989 if self.dirstate.state(f) not in "r":
1007 if self.dirstate.state(f) not in "r":
990 self.ui.warn("%s not removed!\n" % f)
1008 self.ui.warn("%s not removed!\n" % f)
991 else:
1009 else:
992 t = self.file(f).read(m[f])
1010 t = self.file(f).read(m[f])
993 self.wwrite(f, t, m.flags(f))
1011 self.wwrite(f, t, m.flags(f))
994 self.dirstate.update([f], "n")
1012 self.dirstate.update([f], "n")
995
1013
996 def copy(self, source, dest, wlock=None):
1014 def copy(self, source, dest, wlock=None):
997 p = self.wjoin(dest)
1015 p = self.wjoin(dest)
998 if not os.path.exists(p):
1016 if not os.path.exists(p):
999 self.ui.warn(_("%s does not exist!\n") % dest)
1017 self.ui.warn(_("%s does not exist!\n") % dest)
1000 elif not os.path.isfile(p):
1018 elif not os.path.isfile(p):
1001 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
1019 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
1002 else:
1020 else:
1003 if not wlock:
1021 if not wlock:
1004 wlock = self.wlock()
1022 wlock = self.wlock()
1005 if self.dirstate.state(dest) == '?':
1023 if self.dirstate.state(dest) == '?':
1006 self.dirstate.update([dest], "a")
1024 self.dirstate.update([dest], "a")
1007 self.dirstate.copy(source, dest)
1025 self.dirstate.copy(source, dest)
1008
1026
1009 def heads(self, start=None):
1027 def heads(self, start=None):
1010 heads = self.changelog.heads(start)
1028 heads = self.changelog.heads(start)
1011 # sort the output in rev descending order
1029 # sort the output in rev descending order
1012 heads = [(-self.changelog.rev(h), h) for h in heads]
1030 heads = [(-self.changelog.rev(h), h) for h in heads]
1013 heads.sort()
1031 heads.sort()
1014 return [n for (r, n) in heads]
1032 return [n for (r, n) in heads]
1015
1033
1016 def branches(self, nodes):
1034 def branches(self, nodes):
1017 if not nodes:
1035 if not nodes:
1018 nodes = [self.changelog.tip()]
1036 nodes = [self.changelog.tip()]
1019 b = []
1037 b = []
1020 for n in nodes:
1038 for n in nodes:
1021 t = n
1039 t = n
1022 while 1:
1040 while 1:
1023 p = self.changelog.parents(n)
1041 p = self.changelog.parents(n)
1024 if p[1] != nullid or p[0] == nullid:
1042 if p[1] != nullid or p[0] == nullid:
1025 b.append((t, n, p[0], p[1]))
1043 b.append((t, n, p[0], p[1]))
1026 break
1044 break
1027 n = p[0]
1045 n = p[0]
1028 return b
1046 return b
1029
1047
1030 def between(self, pairs):
1048 def between(self, pairs):
1031 r = []
1049 r = []
1032
1050
1033 for top, bottom in pairs:
1051 for top, bottom in pairs:
1034 n, l, i = top, [], 0
1052 n, l, i = top, [], 0
1035 f = 1
1053 f = 1
1036
1054
1037 while n != bottom:
1055 while n != bottom:
1038 p = self.changelog.parents(n)[0]
1056 p = self.changelog.parents(n)[0]
1039 if i == f:
1057 if i == f:
1040 l.append(n)
1058 l.append(n)
1041 f = f * 2
1059 f = f * 2
1042 n = p
1060 n = p
1043 i += 1
1061 i += 1
1044
1062
1045 r.append(l)
1063 r.append(l)
1046
1064
1047 return r
1065 return r
1048
1066
1049 def findincoming(self, remote, base=None, heads=None, force=False):
1067 def findincoming(self, remote, base=None, heads=None, force=False):
1050 """Return list of roots of the subsets of missing nodes from remote
1068 """Return list of roots of the subsets of missing nodes from remote
1051
1069
1052 If base dict is specified, assume that these nodes and their parents
1070 If base dict is specified, assume that these nodes and their parents
1053 exist on the remote side and that no child of a node of base exists
1071 exist on the remote side and that no child of a node of base exists
1054 in both remote and self.
1072 in both remote and self.
1055 Furthermore base will be updated to include the nodes that exists
1073 Furthermore base will be updated to include the nodes that exists
1056 in self and remote but no children exists in self and remote.
1074 in self and remote but no children exists in self and remote.
1057 If a list of heads is specified, return only nodes which are heads
1075 If a list of heads is specified, return only nodes which are heads
1058 or ancestors of these heads.
1076 or ancestors of these heads.
1059
1077
1060 All the ancestors of base are in self and in remote.
1078 All the ancestors of base are in self and in remote.
1061 All the descendants of the list returned are missing in self.
1079 All the descendants of the list returned are missing in self.
1062 (and so we know that the rest of the nodes are missing in remote, see
1080 (and so we know that the rest of the nodes are missing in remote, see
1063 outgoing)
1081 outgoing)
1064 """
1082 """
1065 m = self.changelog.nodemap
1083 m = self.changelog.nodemap
1066 search = []
1084 search = []
1067 fetch = {}
1085 fetch = {}
1068 seen = {}
1086 seen = {}
1069 seenbranch = {}
1087 seenbranch = {}
1070 if base == None:
1088 if base == None:
1071 base = {}
1089 base = {}
1072
1090
1073 if not heads:
1091 if not heads:
1074 heads = remote.heads()
1092 heads = remote.heads()
1075
1093
1076 if self.changelog.tip() == nullid:
1094 if self.changelog.tip() == nullid:
1077 base[nullid] = 1
1095 base[nullid] = 1
1078 if heads != [nullid]:
1096 if heads != [nullid]:
1079 return [nullid]
1097 return [nullid]
1080 return []
1098 return []
1081
1099
1082 # assume we're closer to the tip than the root
1100 # assume we're closer to the tip than the root
1083 # and start by examining the heads
1101 # and start by examining the heads
1084 self.ui.status(_("searching for changes\n"))
1102 self.ui.status(_("searching for changes\n"))
1085
1103
1086 unknown = []
1104 unknown = []
1087 for h in heads:
1105 for h in heads:
1088 if h not in m:
1106 if h not in m:
1089 unknown.append(h)
1107 unknown.append(h)
1090 else:
1108 else:
1091 base[h] = 1
1109 base[h] = 1
1092
1110
1093 if not unknown:
1111 if not unknown:
1094 return []
1112 return []
1095
1113
1096 req = dict.fromkeys(unknown)
1114 req = dict.fromkeys(unknown)
1097 reqcnt = 0
1115 reqcnt = 0
1098
1116
1099 # search through remote branches
1117 # search through remote branches
1100 # a 'branch' here is a linear segment of history, with four parts:
1118 # a 'branch' here is a linear segment of history, with four parts:
1101 # head, root, first parent, second parent
1119 # head, root, first parent, second parent
1102 # (a branch always has two parents (or none) by definition)
1120 # (a branch always has two parents (or none) by definition)
1103 unknown = remote.branches(unknown)
1121 unknown = remote.branches(unknown)
1104 while unknown:
1122 while unknown:
1105 r = []
1123 r = []
1106 while unknown:
1124 while unknown:
1107 n = unknown.pop(0)
1125 n = unknown.pop(0)
1108 if n[0] in seen:
1126 if n[0] in seen:
1109 continue
1127 continue
1110
1128
1111 self.ui.debug(_("examining %s:%s\n")
1129 self.ui.debug(_("examining %s:%s\n")
1112 % (short(n[0]), short(n[1])))
1130 % (short(n[0]), short(n[1])))
1113 if n[0] == nullid: # found the end of the branch
1131 if n[0] == nullid: # found the end of the branch
1114 pass
1132 pass
1115 elif n in seenbranch:
1133 elif n in seenbranch:
1116 self.ui.debug(_("branch already found\n"))
1134 self.ui.debug(_("branch already found\n"))
1117 continue
1135 continue
1118 elif n[1] and n[1] in m: # do we know the base?
1136 elif n[1] and n[1] in m: # do we know the base?
1119 self.ui.debug(_("found incomplete branch %s:%s\n")
1137 self.ui.debug(_("found incomplete branch %s:%s\n")
1120 % (short(n[0]), short(n[1])))
1138 % (short(n[0]), short(n[1])))
1121 search.append(n) # schedule branch range for scanning
1139 search.append(n) # schedule branch range for scanning
1122 seenbranch[n] = 1
1140 seenbranch[n] = 1
1123 else:
1141 else:
1124 if n[1] not in seen and n[1] not in fetch:
1142 if n[1] not in seen and n[1] not in fetch:
1125 if n[2] in m and n[3] in m:
1143 if n[2] in m and n[3] in m:
1126 self.ui.debug(_("found new changeset %s\n") %
1144 self.ui.debug(_("found new changeset %s\n") %
1127 short(n[1]))
1145 short(n[1]))
1128 fetch[n[1]] = 1 # earliest unknown
1146 fetch[n[1]] = 1 # earliest unknown
1129 for p in n[2:4]:
1147 for p in n[2:4]:
1130 if p in m:
1148 if p in m:
1131 base[p] = 1 # latest known
1149 base[p] = 1 # latest known
1132
1150
1133 for p in n[2:4]:
1151 for p in n[2:4]:
1134 if p not in req and p not in m:
1152 if p not in req and p not in m:
1135 r.append(p)
1153 r.append(p)
1136 req[p] = 1
1154 req[p] = 1
1137 seen[n[0]] = 1
1155 seen[n[0]] = 1
1138
1156
1139 if r:
1157 if r:
1140 reqcnt += 1
1158 reqcnt += 1
1141 self.ui.debug(_("request %d: %s\n") %
1159 self.ui.debug(_("request %d: %s\n") %
1142 (reqcnt, " ".join(map(short, r))))
1160 (reqcnt, " ".join(map(short, r))))
1143 for p in xrange(0, len(r), 10):
1161 for p in xrange(0, len(r), 10):
1144 for b in remote.branches(r[p:p+10]):
1162 for b in remote.branches(r[p:p+10]):
1145 self.ui.debug(_("received %s:%s\n") %
1163 self.ui.debug(_("received %s:%s\n") %
1146 (short(b[0]), short(b[1])))
1164 (short(b[0]), short(b[1])))
1147 unknown.append(b)
1165 unknown.append(b)
1148
1166
1149 # do binary search on the branches we found
1167 # do binary search on the branches we found
1150 while search:
1168 while search:
1151 n = search.pop(0)
1169 n = search.pop(0)
1152 reqcnt += 1
1170 reqcnt += 1
1153 l = remote.between([(n[0], n[1])])[0]
1171 l = remote.between([(n[0], n[1])])[0]
1154 l.append(n[1])
1172 l.append(n[1])
1155 p = n[0]
1173 p = n[0]
1156 f = 1
1174 f = 1
1157 for i in l:
1175 for i in l:
1158 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1176 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1159 if i in m:
1177 if i in m:
1160 if f <= 2:
1178 if f <= 2:
1161 self.ui.debug(_("found new branch changeset %s\n") %
1179 self.ui.debug(_("found new branch changeset %s\n") %
1162 short(p))
1180 short(p))
1163 fetch[p] = 1
1181 fetch[p] = 1
1164 base[i] = 1
1182 base[i] = 1
1165 else:
1183 else:
1166 self.ui.debug(_("narrowed branch search to %s:%s\n")
1184 self.ui.debug(_("narrowed branch search to %s:%s\n")
1167 % (short(p), short(i)))
1185 % (short(p), short(i)))
1168 search.append((p, i))
1186 search.append((p, i))
1169 break
1187 break
1170 p, f = i, f * 2
1188 p, f = i, f * 2
1171
1189
1172 # sanity check our fetch list
1190 # sanity check our fetch list
1173 for f in fetch.keys():
1191 for f in fetch.keys():
1174 if f in m:
1192 if f in m:
1175 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1193 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1176
1194
1177 if base.keys() == [nullid]:
1195 if base.keys() == [nullid]:
1178 if force:
1196 if force:
1179 self.ui.warn(_("warning: repository is unrelated\n"))
1197 self.ui.warn(_("warning: repository is unrelated\n"))
1180 else:
1198 else:
1181 raise util.Abort(_("repository is unrelated"))
1199 raise util.Abort(_("repository is unrelated"))
1182
1200
1183 self.ui.debug(_("found new changesets starting at ") +
1201 self.ui.debug(_("found new changesets starting at ") +
1184 " ".join([short(f) for f in fetch]) + "\n")
1202 " ".join([short(f) for f in fetch]) + "\n")
1185
1203
1186 self.ui.debug(_("%d total queries\n") % reqcnt)
1204 self.ui.debug(_("%d total queries\n") % reqcnt)
1187
1205
1188 return fetch.keys()
1206 return fetch.keys()
1189
1207
1190 def findoutgoing(self, remote, base=None, heads=None, force=False):
1208 def findoutgoing(self, remote, base=None, heads=None, force=False):
1191 """Return list of nodes that are roots of subsets not in remote
1209 """Return list of nodes that are roots of subsets not in remote
1192
1210
1193 If base dict is specified, assume that these nodes and their parents
1211 If base dict is specified, assume that these nodes and their parents
1194 exist on the remote side.
1212 exist on the remote side.
1195 If a list of heads is specified, return only nodes which are heads
1213 If a list of heads is specified, return only nodes which are heads
1196 or ancestors of these heads, and return a second element which
1214 or ancestors of these heads, and return a second element which
1197 contains all remote heads which get new children.
1215 contains all remote heads which get new children.
1198 """
1216 """
1199 if base == None:
1217 if base == None:
1200 base = {}
1218 base = {}
1201 self.findincoming(remote, base, heads, force=force)
1219 self.findincoming(remote, base, heads, force=force)
1202
1220
1203 self.ui.debug(_("common changesets up to ")
1221 self.ui.debug(_("common changesets up to ")
1204 + " ".join(map(short, base.keys())) + "\n")
1222 + " ".join(map(short, base.keys())) + "\n")
1205
1223
1206 remain = dict.fromkeys(self.changelog.nodemap)
1224 remain = dict.fromkeys(self.changelog.nodemap)
1207
1225
1208 # prune everything remote has from the tree
1226 # prune everything remote has from the tree
1209 del remain[nullid]
1227 del remain[nullid]
1210 remove = base.keys()
1228 remove = base.keys()
1211 while remove:
1229 while remove:
1212 n = remove.pop(0)
1230 n = remove.pop(0)
1213 if n in remain:
1231 if n in remain:
1214 del remain[n]
1232 del remain[n]
1215 for p in self.changelog.parents(n):
1233 for p in self.changelog.parents(n):
1216 remove.append(p)
1234 remove.append(p)
1217
1235
1218 # find every node whose parents have been pruned
1236 # find every node whose parents have been pruned
1219 subset = []
1237 subset = []
1220 # find every remote head that will get new children
1238 # find every remote head that will get new children
1221 updated_heads = {}
1239 updated_heads = {}
1222 for n in remain:
1240 for n in remain:
1223 p1, p2 = self.changelog.parents(n)
1241 p1, p2 = self.changelog.parents(n)
1224 if p1 not in remain and p2 not in remain:
1242 if p1 not in remain and p2 not in remain:
1225 subset.append(n)
1243 subset.append(n)
1226 if heads:
1244 if heads:
1227 if p1 in heads:
1245 if p1 in heads:
1228 updated_heads[p1] = True
1246 updated_heads[p1] = True
1229 if p2 in heads:
1247 if p2 in heads:
1230 updated_heads[p2] = True
1248 updated_heads[p2] = True
1231
1249
1232 # this is the set of all roots we have to push
1250 # this is the set of all roots we have to push
1233 if heads:
1251 if heads:
1234 return subset, updated_heads.keys()
1252 return subset, updated_heads.keys()
1235 else:
1253 else:
1236 return subset
1254 return subset
1237
1255
1238 def pull(self, remote, heads=None, force=False, lock=None):
1256 def pull(self, remote, heads=None, force=False, lock=None):
1239 mylock = False
1257 mylock = False
1240 if not lock:
1258 if not lock:
1241 lock = self.lock()
1259 lock = self.lock()
1242 mylock = True
1260 mylock = True
1243
1261
1244 try:
1262 try:
1245 fetch = self.findincoming(remote, force=force)
1263 fetch = self.findincoming(remote, force=force)
1246 if fetch == [nullid]:
1264 if fetch == [nullid]:
1247 self.ui.status(_("requesting all changes\n"))
1265 self.ui.status(_("requesting all changes\n"))
1248
1266
1249 if not fetch:
1267 if not fetch:
1250 self.ui.status(_("no changes found\n"))
1268 self.ui.status(_("no changes found\n"))
1251 return 0
1269 return 0
1252
1270
1253 if heads is None:
1271 if heads is None:
1254 cg = remote.changegroup(fetch, 'pull')
1272 cg = remote.changegroup(fetch, 'pull')
1255 else:
1273 else:
1256 if 'changegroupsubset' not in remote.capabilities:
1274 if 'changegroupsubset' not in remote.capabilities:
1257 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1275 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1258 cg = remote.changegroupsubset(fetch, heads, 'pull')
1276 cg = remote.changegroupsubset(fetch, heads, 'pull')
1259 return self.addchangegroup(cg, 'pull', remote.url())
1277 return self.addchangegroup(cg, 'pull', remote.url())
1260 finally:
1278 finally:
1261 if mylock:
1279 if mylock:
1262 lock.release()
1280 lock.release()
1263
1281
1264 def push(self, remote, force=False, revs=None):
1282 def push(self, remote, force=False, revs=None):
1265 # there are two ways to push to remote repo:
1283 # there are two ways to push to remote repo:
1266 #
1284 #
1267 # addchangegroup assumes local user can lock remote
1285 # addchangegroup assumes local user can lock remote
1268 # repo (local filesystem, old ssh servers).
1286 # repo (local filesystem, old ssh servers).
1269 #
1287 #
1270 # unbundle assumes local user cannot lock remote repo (new ssh
1288 # unbundle assumes local user cannot lock remote repo (new ssh
1271 # servers, http servers).
1289 # servers, http servers).
1272
1290
1273 if remote.capable('unbundle'):
1291 if remote.capable('unbundle'):
1274 return self.push_unbundle(remote, force, revs)
1292 return self.push_unbundle(remote, force, revs)
1275 return self.push_addchangegroup(remote, force, revs)
1293 return self.push_addchangegroup(remote, force, revs)
1276
1294
1277 def prepush(self, remote, force, revs):
1295 def prepush(self, remote, force, revs):
1278 base = {}
1296 base = {}
1279 remote_heads = remote.heads()
1297 remote_heads = remote.heads()
1280 inc = self.findincoming(remote, base, remote_heads, force=force)
1298 inc = self.findincoming(remote, base, remote_heads, force=force)
1281
1299
1282 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1300 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1283 if revs is not None:
1301 if revs is not None:
1284 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1302 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1285 else:
1303 else:
1286 bases, heads = update, self.changelog.heads()
1304 bases, heads = update, self.changelog.heads()
1287
1305
1288 if not bases:
1306 if not bases:
1289 self.ui.status(_("no changes found\n"))
1307 self.ui.status(_("no changes found\n"))
1290 return None, 1
1308 return None, 1
1291 elif not force:
1309 elif not force:
1292 # check if we're creating new remote heads
1310 # check if we're creating new remote heads
1293 # to be a remote head after push, node must be either
1311 # to be a remote head after push, node must be either
1294 # - unknown locally
1312 # - unknown locally
1295 # - a local outgoing head descended from update
1313 # - a local outgoing head descended from update
1296 # - a remote head that's known locally and not
1314 # - a remote head that's known locally and not
1297 # ancestral to an outgoing head
1315 # ancestral to an outgoing head
1298
1316
1299 warn = 0
1317 warn = 0
1300
1318
1301 if remote_heads == [nullid]:
1319 if remote_heads == [nullid]:
1302 warn = 0
1320 warn = 0
1303 elif not revs and len(heads) > len(remote_heads):
1321 elif not revs and len(heads) > len(remote_heads):
1304 warn = 1
1322 warn = 1
1305 else:
1323 else:
1306 newheads = list(heads)
1324 newheads = list(heads)
1307 for r in remote_heads:
1325 for r in remote_heads:
1308 if r in self.changelog.nodemap:
1326 if r in self.changelog.nodemap:
1309 desc = self.changelog.heads(r, heads)
1327 desc = self.changelog.heads(r, heads)
1310 l = [h for h in heads if h in desc]
1328 l = [h for h in heads if h in desc]
1311 if not l:
1329 if not l:
1312 newheads.append(r)
1330 newheads.append(r)
1313 else:
1331 else:
1314 newheads.append(r)
1332 newheads.append(r)
1315 if len(newheads) > len(remote_heads):
1333 if len(newheads) > len(remote_heads):
1316 warn = 1
1334 warn = 1
1317
1335
1318 if warn:
1336 if warn:
1319 self.ui.warn(_("abort: push creates new remote branches!\n"))
1337 self.ui.warn(_("abort: push creates new remote branches!\n"))
1320 self.ui.status(_("(did you forget to merge?"
1338 self.ui.status(_("(did you forget to merge?"
1321 " use push -f to force)\n"))
1339 " use push -f to force)\n"))
1322 return None, 1
1340 return None, 1
1323 elif inc:
1341 elif inc:
1324 self.ui.warn(_("note: unsynced remote changes!\n"))
1342 self.ui.warn(_("note: unsynced remote changes!\n"))
1325
1343
1326
1344
1327 if revs is None:
1345 if revs is None:
1328 cg = self.changegroup(update, 'push')
1346 cg = self.changegroup(update, 'push')
1329 else:
1347 else:
1330 cg = self.changegroupsubset(update, revs, 'push')
1348 cg = self.changegroupsubset(update, revs, 'push')
1331 return cg, remote_heads
1349 return cg, remote_heads
1332
1350
1333 def push_addchangegroup(self, remote, force, revs):
1351 def push_addchangegroup(self, remote, force, revs):
1334 lock = remote.lock()
1352 lock = remote.lock()
1335
1353
1336 ret = self.prepush(remote, force, revs)
1354 ret = self.prepush(remote, force, revs)
1337 if ret[0] is not None:
1355 if ret[0] is not None:
1338 cg, remote_heads = ret
1356 cg, remote_heads = ret
1339 return remote.addchangegroup(cg, 'push', self.url())
1357 return remote.addchangegroup(cg, 'push', self.url())
1340 return ret[1]
1358 return ret[1]
1341
1359
1342 def push_unbundle(self, remote, force, revs):
1360 def push_unbundle(self, remote, force, revs):
1343 # local repo finds heads on server, finds out what revs it
1361 # local repo finds heads on server, finds out what revs it
1344 # must push. once revs transferred, if server finds it has
1362 # must push. once revs transferred, if server finds it has
1345 # different heads (someone else won commit/push race), server
1363 # different heads (someone else won commit/push race), server
1346 # aborts.
1364 # aborts.
1347
1365
1348 ret = self.prepush(remote, force, revs)
1366 ret = self.prepush(remote, force, revs)
1349 if ret[0] is not None:
1367 if ret[0] is not None:
1350 cg, remote_heads = ret
1368 cg, remote_heads = ret
1351 if force: remote_heads = ['force']
1369 if force: remote_heads = ['force']
1352 return remote.unbundle(cg, remote_heads, 'push')
1370 return remote.unbundle(cg, remote_heads, 'push')
1353 return ret[1]
1371 return ret[1]
1354
1372
1355 def changegroupinfo(self, nodes):
1373 def changegroupinfo(self, nodes):
1356 self.ui.note(_("%d changesets found\n") % len(nodes))
1374 self.ui.note(_("%d changesets found\n") % len(nodes))
1357 if self.ui.debugflag:
1375 if self.ui.debugflag:
1358 self.ui.debug(_("List of changesets:\n"))
1376 self.ui.debug(_("List of changesets:\n"))
1359 for node in nodes:
1377 for node in nodes:
1360 self.ui.debug("%s\n" % hex(node))
1378 self.ui.debug("%s\n" % hex(node))
1361
1379
1362 def changegroupsubset(self, bases, heads, source):
1380 def changegroupsubset(self, bases, heads, source):
1363 """This function generates a changegroup consisting of all the nodes
1381 """This function generates a changegroup consisting of all the nodes
1364 that are descendents of any of the bases, and ancestors of any of
1382 that are descendents of any of the bases, and ancestors of any of
1365 the heads.
1383 the heads.
1366
1384
1367 It is fairly complex as determining which filenodes and which
1385 It is fairly complex as determining which filenodes and which
1368 manifest nodes need to be included for the changeset to be complete
1386 manifest nodes need to be included for the changeset to be complete
1369 is non-trivial.
1387 is non-trivial.
1370
1388
1371 Another wrinkle is doing the reverse, figuring out which changeset in
1389 Another wrinkle is doing the reverse, figuring out which changeset in
1372 the changegroup a particular filenode or manifestnode belongs to."""
1390 the changegroup a particular filenode or manifestnode belongs to."""
1373
1391
1374 self.hook('preoutgoing', throw=True, source=source)
1392 self.hook('preoutgoing', throw=True, source=source)
1375
1393
1376 # Set up some initial variables
1394 # Set up some initial variables
1377 # Make it easy to refer to self.changelog
1395 # Make it easy to refer to self.changelog
1378 cl = self.changelog
1396 cl = self.changelog
1379 # msng is short for missing - compute the list of changesets in this
1397 # msng is short for missing - compute the list of changesets in this
1380 # changegroup.
1398 # changegroup.
1381 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1399 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1382 self.changegroupinfo(msng_cl_lst)
1400 self.changegroupinfo(msng_cl_lst)
1383 # Some bases may turn out to be superfluous, and some heads may be
1401 # Some bases may turn out to be superfluous, and some heads may be
1384 # too. nodesbetween will return the minimal set of bases and heads
1402 # too. nodesbetween will return the minimal set of bases and heads
1385 # necessary to re-create the changegroup.
1403 # necessary to re-create the changegroup.
1386
1404
1387 # Known heads are the list of heads that it is assumed the recipient
1405 # Known heads are the list of heads that it is assumed the recipient
1388 # of this changegroup will know about.
1406 # of this changegroup will know about.
1389 knownheads = {}
1407 knownheads = {}
1390 # We assume that all parents of bases are known heads.
1408 # We assume that all parents of bases are known heads.
1391 for n in bases:
1409 for n in bases:
1392 for p in cl.parents(n):
1410 for p in cl.parents(n):
1393 if p != nullid:
1411 if p != nullid:
1394 knownheads[p] = 1
1412 knownheads[p] = 1
1395 knownheads = knownheads.keys()
1413 knownheads = knownheads.keys()
1396 if knownheads:
1414 if knownheads:
1397 # Now that we know what heads are known, we can compute which
1415 # Now that we know what heads are known, we can compute which
1398 # changesets are known. The recipient must know about all
1416 # changesets are known. The recipient must know about all
1399 # changesets required to reach the known heads from the null
1417 # changesets required to reach the known heads from the null
1400 # changeset.
1418 # changeset.
1401 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1419 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1402 junk = None
1420 junk = None
1403 # Transform the list into an ersatz set.
1421 # Transform the list into an ersatz set.
1404 has_cl_set = dict.fromkeys(has_cl_set)
1422 has_cl_set = dict.fromkeys(has_cl_set)
1405 else:
1423 else:
1406 # If there were no known heads, the recipient cannot be assumed to
1424 # If there were no known heads, the recipient cannot be assumed to
1407 # know about any changesets.
1425 # know about any changesets.
1408 has_cl_set = {}
1426 has_cl_set = {}
1409
1427
1410 # Make it easy to refer to self.manifest
1428 # Make it easy to refer to self.manifest
1411 mnfst = self.manifest
1429 mnfst = self.manifest
1412 # We don't know which manifests are missing yet
1430 # We don't know which manifests are missing yet
1413 msng_mnfst_set = {}
1431 msng_mnfst_set = {}
1414 # Nor do we know which filenodes are missing.
1432 # Nor do we know which filenodes are missing.
1415 msng_filenode_set = {}
1433 msng_filenode_set = {}
1416
1434
1417 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1435 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1418 junk = None
1436 junk = None
1419
1437
1420 # A changeset always belongs to itself, so the changenode lookup
1438 # A changeset always belongs to itself, so the changenode lookup
1421 # function for a changenode is identity.
1439 # function for a changenode is identity.
1422 def identity(x):
1440 def identity(x):
1423 return x
1441 return x
1424
1442
1425 # A function generating function. Sets up an environment for the
1443 # A function generating function. Sets up an environment for the
1426 # inner function.
1444 # inner function.
1427 def cmp_by_rev_func(revlog):
1445 def cmp_by_rev_func(revlog):
1428 # Compare two nodes by their revision number in the environment's
1446 # Compare two nodes by their revision number in the environment's
1429 # revision history. Since the revision number both represents the
1447 # revision history. Since the revision number both represents the
1430 # most efficient order to read the nodes in, and represents a
1448 # most efficient order to read the nodes in, and represents a
1431 # topological sorting of the nodes, this function is often useful.
1449 # topological sorting of the nodes, this function is often useful.
1432 def cmp_by_rev(a, b):
1450 def cmp_by_rev(a, b):
1433 return cmp(revlog.rev(a), revlog.rev(b))
1451 return cmp(revlog.rev(a), revlog.rev(b))
1434 return cmp_by_rev
1452 return cmp_by_rev
1435
1453
1436 # If we determine that a particular file or manifest node must be a
1454 # If we determine that a particular file or manifest node must be a
1437 # node that the recipient of the changegroup will already have, we can
1455 # node that the recipient of the changegroup will already have, we can
1438 # also assume the recipient will have all the parents. This function
1456 # also assume the recipient will have all the parents. This function
1439 # prunes them from the set of missing nodes.
1457 # prunes them from the set of missing nodes.
1440 def prune_parents(revlog, hasset, msngset):
1458 def prune_parents(revlog, hasset, msngset):
1441 haslst = hasset.keys()
1459 haslst = hasset.keys()
1442 haslst.sort(cmp_by_rev_func(revlog))
1460 haslst.sort(cmp_by_rev_func(revlog))
1443 for node in haslst:
1461 for node in haslst:
1444 parentlst = [p for p in revlog.parents(node) if p != nullid]
1462 parentlst = [p for p in revlog.parents(node) if p != nullid]
1445 while parentlst:
1463 while parentlst:
1446 n = parentlst.pop()
1464 n = parentlst.pop()
1447 if n not in hasset:
1465 if n not in hasset:
1448 hasset[n] = 1
1466 hasset[n] = 1
1449 p = [p for p in revlog.parents(n) if p != nullid]
1467 p = [p for p in revlog.parents(n) if p != nullid]
1450 parentlst.extend(p)
1468 parentlst.extend(p)
1451 for n in hasset:
1469 for n in hasset:
1452 msngset.pop(n, None)
1470 msngset.pop(n, None)
1453
1471
1454 # This is a function generating function used to set up an environment
1472 # This is a function generating function used to set up an environment
1455 # for the inner function to execute in.
1473 # for the inner function to execute in.
1456 def manifest_and_file_collector(changedfileset):
1474 def manifest_and_file_collector(changedfileset):
1457 # This is an information gathering function that gathers
1475 # This is an information gathering function that gathers
1458 # information from each changeset node that goes out as part of
1476 # information from each changeset node that goes out as part of
1459 # the changegroup. The information gathered is a list of which
1477 # the changegroup. The information gathered is a list of which
1460 # manifest nodes are potentially required (the recipient may
1478 # manifest nodes are potentially required (the recipient may
1461 # already have them) and total list of all files which were
1479 # already have them) and total list of all files which were
1462 # changed in any changeset in the changegroup.
1480 # changed in any changeset in the changegroup.
1463 #
1481 #
1464 # We also remember the first changenode we saw any manifest
1482 # We also remember the first changenode we saw any manifest
1465 # referenced by so we can later determine which changenode 'owns'
1483 # referenced by so we can later determine which changenode 'owns'
1466 # the manifest.
1484 # the manifest.
1467 def collect_manifests_and_files(clnode):
1485 def collect_manifests_and_files(clnode):
1468 c = cl.read(clnode)
1486 c = cl.read(clnode)
1469 for f in c[3]:
1487 for f in c[3]:
1470 # This is to make sure we only have one instance of each
1488 # This is to make sure we only have one instance of each
1471 # filename string for each filename.
1489 # filename string for each filename.
1472 changedfileset.setdefault(f, f)
1490 changedfileset.setdefault(f, f)
1473 msng_mnfst_set.setdefault(c[0], clnode)
1491 msng_mnfst_set.setdefault(c[0], clnode)
1474 return collect_manifests_and_files
1492 return collect_manifests_and_files
1475
1493
1476 # Figure out which manifest nodes (of the ones we think might be part
1494 # Figure out which manifest nodes (of the ones we think might be part
1477 # of the changegroup) the recipient must know about and remove them
1495 # of the changegroup) the recipient must know about and remove them
1478 # from the changegroup.
1496 # from the changegroup.
1479 def prune_manifests():
1497 def prune_manifests():
1480 has_mnfst_set = {}
1498 has_mnfst_set = {}
1481 for n in msng_mnfst_set:
1499 for n in msng_mnfst_set:
1482 # If a 'missing' manifest thinks it belongs to a changenode
1500 # If a 'missing' manifest thinks it belongs to a changenode
1483 # the recipient is assumed to have, obviously the recipient
1501 # the recipient is assumed to have, obviously the recipient
1484 # must have that manifest.
1502 # must have that manifest.
1485 linknode = cl.node(mnfst.linkrev(n))
1503 linknode = cl.node(mnfst.linkrev(n))
1486 if linknode in has_cl_set:
1504 if linknode in has_cl_set:
1487 has_mnfst_set[n] = 1
1505 has_mnfst_set[n] = 1
1488 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1506 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1489
1507
1490 # Use the information collected in collect_manifests_and_files to say
1508 # Use the information collected in collect_manifests_and_files to say
1491 # which changenode any manifestnode belongs to.
1509 # which changenode any manifestnode belongs to.
1492 def lookup_manifest_link(mnfstnode):
1510 def lookup_manifest_link(mnfstnode):
1493 return msng_mnfst_set[mnfstnode]
1511 return msng_mnfst_set[mnfstnode]
1494
1512
1495 # A function generating function that sets up the initial environment
1513 # A function generating function that sets up the initial environment
1496 # the inner function.
1514 # the inner function.
1497 def filenode_collector(changedfiles):
1515 def filenode_collector(changedfiles):
1498 next_rev = [0]
1516 next_rev = [0]
1499 # This gathers information from each manifestnode included in the
1517 # This gathers information from each manifestnode included in the
1500 # changegroup about which filenodes the manifest node references
1518 # changegroup about which filenodes the manifest node references
1501 # so we can include those in the changegroup too.
1519 # so we can include those in the changegroup too.
1502 #
1520 #
1503 # It also remembers which changenode each filenode belongs to. It
1521 # It also remembers which changenode each filenode belongs to. It
1504 # does this by assuming the a filenode belongs to the changenode
1522 # does this by assuming the a filenode belongs to the changenode
1505 # the first manifest that references it belongs to.
1523 # the first manifest that references it belongs to.
1506 def collect_msng_filenodes(mnfstnode):
1524 def collect_msng_filenodes(mnfstnode):
1507 r = mnfst.rev(mnfstnode)
1525 r = mnfst.rev(mnfstnode)
1508 if r == next_rev[0]:
1526 if r == next_rev[0]:
1509 # If the last rev we looked at was the one just previous,
1527 # If the last rev we looked at was the one just previous,
1510 # we only need to see a diff.
1528 # we only need to see a diff.
1511 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1529 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1512 # For each line in the delta
1530 # For each line in the delta
1513 for dline in delta.splitlines():
1531 for dline in delta.splitlines():
1514 # get the filename and filenode for that line
1532 # get the filename and filenode for that line
1515 f, fnode = dline.split('\0')
1533 f, fnode = dline.split('\0')
1516 fnode = bin(fnode[:40])
1534 fnode = bin(fnode[:40])
1517 f = changedfiles.get(f, None)
1535 f = changedfiles.get(f, None)
1518 # And if the file is in the list of files we care
1536 # And if the file is in the list of files we care
1519 # about.
1537 # about.
1520 if f is not None:
1538 if f is not None:
1521 # Get the changenode this manifest belongs to
1539 # Get the changenode this manifest belongs to
1522 clnode = msng_mnfst_set[mnfstnode]
1540 clnode = msng_mnfst_set[mnfstnode]
1523 # Create the set of filenodes for the file if
1541 # Create the set of filenodes for the file if
1524 # there isn't one already.
1542 # there isn't one already.
1525 ndset = msng_filenode_set.setdefault(f, {})
1543 ndset = msng_filenode_set.setdefault(f, {})
1526 # And set the filenode's changelog node to the
1544 # And set the filenode's changelog node to the
1527 # manifest's if it hasn't been set already.
1545 # manifest's if it hasn't been set already.
1528 ndset.setdefault(fnode, clnode)
1546 ndset.setdefault(fnode, clnode)
1529 else:
1547 else:
1530 # Otherwise we need a full manifest.
1548 # Otherwise we need a full manifest.
1531 m = mnfst.read(mnfstnode)
1549 m = mnfst.read(mnfstnode)
1532 # For every file in we care about.
1550 # For every file in we care about.
1533 for f in changedfiles:
1551 for f in changedfiles:
1534 fnode = m.get(f, None)
1552 fnode = m.get(f, None)
1535 # If it's in the manifest
1553 # If it's in the manifest
1536 if fnode is not None:
1554 if fnode is not None:
1537 # See comments above.
1555 # See comments above.
1538 clnode = msng_mnfst_set[mnfstnode]
1556 clnode = msng_mnfst_set[mnfstnode]
1539 ndset = msng_filenode_set.setdefault(f, {})
1557 ndset = msng_filenode_set.setdefault(f, {})
1540 ndset.setdefault(fnode, clnode)
1558 ndset.setdefault(fnode, clnode)
1541 # Remember the revision we hope to see next.
1559 # Remember the revision we hope to see next.
1542 next_rev[0] = r + 1
1560 next_rev[0] = r + 1
1543 return collect_msng_filenodes
1561 return collect_msng_filenodes
1544
1562
1545 # We have a list of filenodes we think we need for a file, lets remove
1563 # We have a list of filenodes we think we need for a file, lets remove
1546 # all those we now the recipient must have.
1564 # all those we now the recipient must have.
1547 def prune_filenodes(f, filerevlog):
1565 def prune_filenodes(f, filerevlog):
1548 msngset = msng_filenode_set[f]
1566 msngset = msng_filenode_set[f]
1549 hasset = {}
1567 hasset = {}
1550 # If a 'missing' filenode thinks it belongs to a changenode we
1568 # If a 'missing' filenode thinks it belongs to a changenode we
1551 # assume the recipient must have, then the recipient must have
1569 # assume the recipient must have, then the recipient must have
1552 # that filenode.
1570 # that filenode.
1553 for n in msngset:
1571 for n in msngset:
1554 clnode = cl.node(filerevlog.linkrev(n))
1572 clnode = cl.node(filerevlog.linkrev(n))
1555 if clnode in has_cl_set:
1573 if clnode in has_cl_set:
1556 hasset[n] = 1
1574 hasset[n] = 1
1557 prune_parents(filerevlog, hasset, msngset)
1575 prune_parents(filerevlog, hasset, msngset)
1558
1576
1559 # A function generator function that sets up the a context for the
1577 # A function generator function that sets up the a context for the
1560 # inner function.
1578 # inner function.
1561 def lookup_filenode_link_func(fname):
1579 def lookup_filenode_link_func(fname):
1562 msngset = msng_filenode_set[fname]
1580 msngset = msng_filenode_set[fname]
1563 # Lookup the changenode the filenode belongs to.
1581 # Lookup the changenode the filenode belongs to.
1564 def lookup_filenode_link(fnode):
1582 def lookup_filenode_link(fnode):
1565 return msngset[fnode]
1583 return msngset[fnode]
1566 return lookup_filenode_link
1584 return lookup_filenode_link
1567
1585
1568 # Now that we have all theses utility functions to help out and
1586 # Now that we have all theses utility functions to help out and
1569 # logically divide up the task, generate the group.
1587 # logically divide up the task, generate the group.
1570 def gengroup():
1588 def gengroup():
1571 # The set of changed files starts empty.
1589 # The set of changed files starts empty.
1572 changedfiles = {}
1590 changedfiles = {}
1573 # Create a changenode group generator that will call our functions
1591 # Create a changenode group generator that will call our functions
1574 # back to lookup the owning changenode and collect information.
1592 # back to lookup the owning changenode and collect information.
1575 group = cl.group(msng_cl_lst, identity,
1593 group = cl.group(msng_cl_lst, identity,
1576 manifest_and_file_collector(changedfiles))
1594 manifest_and_file_collector(changedfiles))
1577 for chnk in group:
1595 for chnk in group:
1578 yield chnk
1596 yield chnk
1579
1597
1580 # The list of manifests has been collected by the generator
1598 # The list of manifests has been collected by the generator
1581 # calling our functions back.
1599 # calling our functions back.
1582 prune_manifests()
1600 prune_manifests()
1583 msng_mnfst_lst = msng_mnfst_set.keys()
1601 msng_mnfst_lst = msng_mnfst_set.keys()
1584 # Sort the manifestnodes by revision number.
1602 # Sort the manifestnodes by revision number.
1585 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1603 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1586 # Create a generator for the manifestnodes that calls our lookup
1604 # Create a generator for the manifestnodes that calls our lookup
1587 # and data collection functions back.
1605 # and data collection functions back.
1588 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1606 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1589 filenode_collector(changedfiles))
1607 filenode_collector(changedfiles))
1590 for chnk in group:
1608 for chnk in group:
1591 yield chnk
1609 yield chnk
1592
1610
1593 # These are no longer needed, dereference and toss the memory for
1611 # These are no longer needed, dereference and toss the memory for
1594 # them.
1612 # them.
1595 msng_mnfst_lst = None
1613 msng_mnfst_lst = None
1596 msng_mnfst_set.clear()
1614 msng_mnfst_set.clear()
1597
1615
1598 changedfiles = changedfiles.keys()
1616 changedfiles = changedfiles.keys()
1599 changedfiles.sort()
1617 changedfiles.sort()
1600 # Go through all our files in order sorted by name.
1618 # Go through all our files in order sorted by name.
1601 for fname in changedfiles:
1619 for fname in changedfiles:
1602 filerevlog = self.file(fname)
1620 filerevlog = self.file(fname)
1603 # Toss out the filenodes that the recipient isn't really
1621 # Toss out the filenodes that the recipient isn't really
1604 # missing.
1622 # missing.
1605 if msng_filenode_set.has_key(fname):
1623 if msng_filenode_set.has_key(fname):
1606 prune_filenodes(fname, filerevlog)
1624 prune_filenodes(fname, filerevlog)
1607 msng_filenode_lst = msng_filenode_set[fname].keys()
1625 msng_filenode_lst = msng_filenode_set[fname].keys()
1608 else:
1626 else:
1609 msng_filenode_lst = []
1627 msng_filenode_lst = []
1610 # If any filenodes are left, generate the group for them,
1628 # If any filenodes are left, generate the group for them,
1611 # otherwise don't bother.
1629 # otherwise don't bother.
1612 if len(msng_filenode_lst) > 0:
1630 if len(msng_filenode_lst) > 0:
1613 yield changegroup.genchunk(fname)
1631 yield changegroup.genchunk(fname)
1614 # Sort the filenodes by their revision #
1632 # Sort the filenodes by their revision #
1615 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1633 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1616 # Create a group generator and only pass in a changenode
1634 # Create a group generator and only pass in a changenode
1617 # lookup function as we need to collect no information
1635 # lookup function as we need to collect no information
1618 # from filenodes.
1636 # from filenodes.
1619 group = filerevlog.group(msng_filenode_lst,
1637 group = filerevlog.group(msng_filenode_lst,
1620 lookup_filenode_link_func(fname))
1638 lookup_filenode_link_func(fname))
1621 for chnk in group:
1639 for chnk in group:
1622 yield chnk
1640 yield chnk
1623 if msng_filenode_set.has_key(fname):
1641 if msng_filenode_set.has_key(fname):
1624 # Don't need this anymore, toss it to free memory.
1642 # Don't need this anymore, toss it to free memory.
1625 del msng_filenode_set[fname]
1643 del msng_filenode_set[fname]
1626 # Signal that no more groups are left.
1644 # Signal that no more groups are left.
1627 yield changegroup.closechunk()
1645 yield changegroup.closechunk()
1628
1646
1629 if msng_cl_lst:
1647 if msng_cl_lst:
1630 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1648 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1631
1649
1632 return util.chunkbuffer(gengroup())
1650 return util.chunkbuffer(gengroup())
1633
1651
1634 def changegroup(self, basenodes, source):
1652 def changegroup(self, basenodes, source):
1635 """Generate a changegroup of all nodes that we have that a recipient
1653 """Generate a changegroup of all nodes that we have that a recipient
1636 doesn't.
1654 doesn't.
1637
1655
1638 This is much easier than the previous function as we can assume that
1656 This is much easier than the previous function as we can assume that
1639 the recipient has any changenode we aren't sending them."""
1657 the recipient has any changenode we aren't sending them."""
1640
1658
1641 self.hook('preoutgoing', throw=True, source=source)
1659 self.hook('preoutgoing', throw=True, source=source)
1642
1660
1643 cl = self.changelog
1661 cl = self.changelog
1644 nodes = cl.nodesbetween(basenodes, None)[0]
1662 nodes = cl.nodesbetween(basenodes, None)[0]
1645 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1663 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1646 self.changegroupinfo(nodes)
1664 self.changegroupinfo(nodes)
1647
1665
1648 def identity(x):
1666 def identity(x):
1649 return x
1667 return x
1650
1668
1651 def gennodelst(revlog):
1669 def gennodelst(revlog):
1652 for r in xrange(0, revlog.count()):
1670 for r in xrange(0, revlog.count()):
1653 n = revlog.node(r)
1671 n = revlog.node(r)
1654 if revlog.linkrev(n) in revset:
1672 if revlog.linkrev(n) in revset:
1655 yield n
1673 yield n
1656
1674
1657 def changed_file_collector(changedfileset):
1675 def changed_file_collector(changedfileset):
1658 def collect_changed_files(clnode):
1676 def collect_changed_files(clnode):
1659 c = cl.read(clnode)
1677 c = cl.read(clnode)
1660 for fname in c[3]:
1678 for fname in c[3]:
1661 changedfileset[fname] = 1
1679 changedfileset[fname] = 1
1662 return collect_changed_files
1680 return collect_changed_files
1663
1681
1664 def lookuprevlink_func(revlog):
1682 def lookuprevlink_func(revlog):
1665 def lookuprevlink(n):
1683 def lookuprevlink(n):
1666 return cl.node(revlog.linkrev(n))
1684 return cl.node(revlog.linkrev(n))
1667 return lookuprevlink
1685 return lookuprevlink
1668
1686
1669 def gengroup():
1687 def gengroup():
1670 # construct a list of all changed files
1688 # construct a list of all changed files
1671 changedfiles = {}
1689 changedfiles = {}
1672
1690
1673 for chnk in cl.group(nodes, identity,
1691 for chnk in cl.group(nodes, identity,
1674 changed_file_collector(changedfiles)):
1692 changed_file_collector(changedfiles)):
1675 yield chnk
1693 yield chnk
1676 changedfiles = changedfiles.keys()
1694 changedfiles = changedfiles.keys()
1677 changedfiles.sort()
1695 changedfiles.sort()
1678
1696
1679 mnfst = self.manifest
1697 mnfst = self.manifest
1680 nodeiter = gennodelst(mnfst)
1698 nodeiter = gennodelst(mnfst)
1681 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1699 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1682 yield chnk
1700 yield chnk
1683
1701
1684 for fname in changedfiles:
1702 for fname in changedfiles:
1685 filerevlog = self.file(fname)
1703 filerevlog = self.file(fname)
1686 nodeiter = gennodelst(filerevlog)
1704 nodeiter = gennodelst(filerevlog)
1687 nodeiter = list(nodeiter)
1705 nodeiter = list(nodeiter)
1688 if nodeiter:
1706 if nodeiter:
1689 yield changegroup.genchunk(fname)
1707 yield changegroup.genchunk(fname)
1690 lookup = lookuprevlink_func(filerevlog)
1708 lookup = lookuprevlink_func(filerevlog)
1691 for chnk in filerevlog.group(nodeiter, lookup):
1709 for chnk in filerevlog.group(nodeiter, lookup):
1692 yield chnk
1710 yield chnk
1693
1711
1694 yield changegroup.closechunk()
1712 yield changegroup.closechunk()
1695
1713
1696 if nodes:
1714 if nodes:
1697 self.hook('outgoing', node=hex(nodes[0]), source=source)
1715 self.hook('outgoing', node=hex(nodes[0]), source=source)
1698
1716
1699 return util.chunkbuffer(gengroup())
1717 return util.chunkbuffer(gengroup())
1700
1718
1701 def addchangegroup(self, source, srctype, url):
1719 def addchangegroup(self, source, srctype, url):
1702 """add changegroup to repo.
1720 """add changegroup to repo.
1703
1721
1704 return values:
1722 return values:
1705 - nothing changed or no source: 0
1723 - nothing changed or no source: 0
1706 - more heads than before: 1+added heads (2..n)
1724 - more heads than before: 1+added heads (2..n)
1707 - less heads than before: -1-removed heads (-2..-n)
1725 - less heads than before: -1-removed heads (-2..-n)
1708 - number of heads stays the same: 1
1726 - number of heads stays the same: 1
1709 """
1727 """
1710 def csmap(x):
1728 def csmap(x):
1711 self.ui.debug(_("add changeset %s\n") % short(x))
1729 self.ui.debug(_("add changeset %s\n") % short(x))
1712 return cl.count()
1730 return cl.count()
1713
1731
1714 def revmap(x):
1732 def revmap(x):
1715 return cl.rev(x)
1733 return cl.rev(x)
1716
1734
1717 if not source:
1735 if not source:
1718 return 0
1736 return 0
1719
1737
1720 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1738 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1721
1739
1722 changesets = files = revisions = 0
1740 changesets = files = revisions = 0
1723
1741
1724 tr = self.transaction()
1742 tr = self.transaction()
1725
1743
1726 # write changelog data to temp files so concurrent readers will not see
1744 # write changelog data to temp files so concurrent readers will not see
1727 # inconsistent view
1745 # inconsistent view
1728 cl = None
1746 cl = None
1729 try:
1747 try:
1730 cl = appendfile.appendchangelog(self.sopener,
1748 cl = appendfile.appendchangelog(self.sopener,
1731 self.changelog.version)
1749 self.changelog.version)
1732
1750
1733 oldheads = len(cl.heads())
1751 oldheads = len(cl.heads())
1734
1752
1735 # pull off the changeset group
1753 # pull off the changeset group
1736 self.ui.status(_("adding changesets\n"))
1754 self.ui.status(_("adding changesets\n"))
1737 cor = cl.count() - 1
1755 cor = cl.count() - 1
1738 chunkiter = changegroup.chunkiter(source)
1756 chunkiter = changegroup.chunkiter(source)
1739 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1757 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1740 raise util.Abort(_("received changelog group is empty"))
1758 raise util.Abort(_("received changelog group is empty"))
1741 cnr = cl.count() - 1
1759 cnr = cl.count() - 1
1742 changesets = cnr - cor
1760 changesets = cnr - cor
1743
1761
1744 # pull off the manifest group
1762 # pull off the manifest group
1745 self.ui.status(_("adding manifests\n"))
1763 self.ui.status(_("adding manifests\n"))
1746 chunkiter = changegroup.chunkiter(source)
1764 chunkiter = changegroup.chunkiter(source)
1747 # no need to check for empty manifest group here:
1765 # no need to check for empty manifest group here:
1748 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1766 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1749 # no new manifest will be created and the manifest group will
1767 # no new manifest will be created and the manifest group will
1750 # be empty during the pull
1768 # be empty during the pull
1751 self.manifest.addgroup(chunkiter, revmap, tr)
1769 self.manifest.addgroup(chunkiter, revmap, tr)
1752
1770
1753 # process the files
1771 # process the files
1754 self.ui.status(_("adding file changes\n"))
1772 self.ui.status(_("adding file changes\n"))
1755 while 1:
1773 while 1:
1756 f = changegroup.getchunk(source)
1774 f = changegroup.getchunk(source)
1757 if not f:
1775 if not f:
1758 break
1776 break
1759 self.ui.debug(_("adding %s revisions\n") % f)
1777 self.ui.debug(_("adding %s revisions\n") % f)
1760 fl = self.file(f)
1778 fl = self.file(f)
1761 o = fl.count()
1779 o = fl.count()
1762 chunkiter = changegroup.chunkiter(source)
1780 chunkiter = changegroup.chunkiter(source)
1763 if fl.addgroup(chunkiter, revmap, tr) is None:
1781 if fl.addgroup(chunkiter, revmap, tr) is None:
1764 raise util.Abort(_("received file revlog group is empty"))
1782 raise util.Abort(_("received file revlog group is empty"))
1765 revisions += fl.count() - o
1783 revisions += fl.count() - o
1766 files += 1
1784 files += 1
1767
1785
1768 cl.writedata()
1786 cl.writedata()
1769 finally:
1787 finally:
1770 if cl:
1788 if cl:
1771 cl.cleanup()
1789 cl.cleanup()
1772
1790
1773 # make changelog see real files again
1791 # make changelog see real files again
1774 self.changelog = changelog.changelog(self.sopener,
1792 self.changelog = changelog.changelog(self.sopener,
1775 self.changelog.version)
1793 self.changelog.version)
1776 self.changelog.checkinlinesize(tr)
1794 self.changelog.checkinlinesize(tr)
1777
1795
1778 newheads = len(self.changelog.heads())
1796 newheads = len(self.changelog.heads())
1779 heads = ""
1797 heads = ""
1780 if oldheads and newheads != oldheads:
1798 if oldheads and newheads != oldheads:
1781 heads = _(" (%+d heads)") % (newheads - oldheads)
1799 heads = _(" (%+d heads)") % (newheads - oldheads)
1782
1800
1783 self.ui.status(_("added %d changesets"
1801 self.ui.status(_("added %d changesets"
1784 " with %d changes to %d files%s\n")
1802 " with %d changes to %d files%s\n")
1785 % (changesets, revisions, files, heads))
1803 % (changesets, revisions, files, heads))
1786
1804
1787 if changesets > 0:
1805 if changesets > 0:
1788 self.hook('pretxnchangegroup', throw=True,
1806 self.hook('pretxnchangegroup', throw=True,
1789 node=hex(self.changelog.node(cor+1)), source=srctype,
1807 node=hex(self.changelog.node(cor+1)), source=srctype,
1790 url=url)
1808 url=url)
1791
1809
1792 tr.close()
1810 tr.close()
1793
1811
1794 if changesets > 0:
1812 if changesets > 0:
1795 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1813 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1796 source=srctype, url=url)
1814 source=srctype, url=url)
1797
1815
1798 for i in xrange(cor + 1, cnr + 1):
1816 for i in xrange(cor + 1, cnr + 1):
1799 self.hook("incoming", node=hex(self.changelog.node(i)),
1817 self.hook("incoming", node=hex(self.changelog.node(i)),
1800 source=srctype, url=url)
1818 source=srctype, url=url)
1801
1819
1802 # never return 0 here:
1820 # never return 0 here:
1803 if newheads < oldheads:
1821 if newheads < oldheads:
1804 return newheads - oldheads - 1
1822 return newheads - oldheads - 1
1805 else:
1823 else:
1806 return newheads - oldheads + 1
1824 return newheads - oldheads + 1
1807
1825
1808
1826
1809 def stream_in(self, remote):
1827 def stream_in(self, remote):
1810 fp = remote.stream_out()
1828 fp = remote.stream_out()
1811 l = fp.readline()
1829 l = fp.readline()
1812 try:
1830 try:
1813 resp = int(l)
1831 resp = int(l)
1814 except ValueError:
1832 except ValueError:
1815 raise util.UnexpectedOutput(
1833 raise util.UnexpectedOutput(
1816 _('Unexpected response from remote server:'), l)
1834 _('Unexpected response from remote server:'), l)
1817 if resp == 1:
1835 if resp == 1:
1818 raise util.Abort(_('operation forbidden by server'))
1836 raise util.Abort(_('operation forbidden by server'))
1819 elif resp == 2:
1837 elif resp == 2:
1820 raise util.Abort(_('locking the remote repository failed'))
1838 raise util.Abort(_('locking the remote repository failed'))
1821 elif resp != 0:
1839 elif resp != 0:
1822 raise util.Abort(_('the server sent an unknown error code'))
1840 raise util.Abort(_('the server sent an unknown error code'))
1823 self.ui.status(_('streaming all changes\n'))
1841 self.ui.status(_('streaming all changes\n'))
1824 l = fp.readline()
1842 l = fp.readline()
1825 try:
1843 try:
1826 total_files, total_bytes = map(int, l.split(' ', 1))
1844 total_files, total_bytes = map(int, l.split(' ', 1))
1827 except ValueError, TypeError:
1845 except ValueError, TypeError:
1828 raise util.UnexpectedOutput(
1846 raise util.UnexpectedOutput(
1829 _('Unexpected response from remote server:'), l)
1847 _('Unexpected response from remote server:'), l)
1830 self.ui.status(_('%d files to transfer, %s of data\n') %
1848 self.ui.status(_('%d files to transfer, %s of data\n') %
1831 (total_files, util.bytecount(total_bytes)))
1849 (total_files, util.bytecount(total_bytes)))
1832 start = time.time()
1850 start = time.time()
1833 for i in xrange(total_files):
1851 for i in xrange(total_files):
1834 # XXX doesn't support '\n' or '\r' in filenames
1852 # XXX doesn't support '\n' or '\r' in filenames
1835 l = fp.readline()
1853 l = fp.readline()
1836 try:
1854 try:
1837 name, size = l.split('\0', 1)
1855 name, size = l.split('\0', 1)
1838 size = int(size)
1856 size = int(size)
1839 except ValueError, TypeError:
1857 except ValueError, TypeError:
1840 raise util.UnexpectedOutput(
1858 raise util.UnexpectedOutput(
1841 _('Unexpected response from remote server:'), l)
1859 _('Unexpected response from remote server:'), l)
1842 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1860 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1843 ofp = self.sopener(name, 'w')
1861 ofp = self.sopener(name, 'w')
1844 for chunk in util.filechunkiter(fp, limit=size):
1862 for chunk in util.filechunkiter(fp, limit=size):
1845 ofp.write(chunk)
1863 ofp.write(chunk)
1846 ofp.close()
1864 ofp.close()
1847 elapsed = time.time() - start
1865 elapsed = time.time() - start
1848 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1866 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1849 (util.bytecount(total_bytes), elapsed,
1867 (util.bytecount(total_bytes), elapsed,
1850 util.bytecount(total_bytes / elapsed)))
1868 util.bytecount(total_bytes / elapsed)))
1851 self.reload()
1869 self.reload()
1852 return len(self.heads()) + 1
1870 return len(self.heads()) + 1
1853
1871
1854 def clone(self, remote, heads=[], stream=False):
1872 def clone(self, remote, heads=[], stream=False):
1855 '''clone remote repository.
1873 '''clone remote repository.
1856
1874
1857 keyword arguments:
1875 keyword arguments:
1858 heads: list of revs to clone (forces use of pull)
1876 heads: list of revs to clone (forces use of pull)
1859 stream: use streaming clone if possible'''
1877 stream: use streaming clone if possible'''
1860
1878
1861 # now, all clients that can request uncompressed clones can
1879 # now, all clients that can request uncompressed clones can
1862 # read repo formats supported by all servers that can serve
1880 # read repo formats supported by all servers that can serve
1863 # them.
1881 # them.
1864
1882
1865 # if revlog format changes, client will have to check version
1883 # if revlog format changes, client will have to check version
1866 # and format flags on "stream" capability, and use
1884 # and format flags on "stream" capability, and use
1867 # uncompressed only if compatible.
1885 # uncompressed only if compatible.
1868
1886
1869 if stream and not heads and remote.capable('stream'):
1887 if stream and not heads and remote.capable('stream'):
1870 return self.stream_in(remote)
1888 return self.stream_in(remote)
1871 return self.pull(remote, heads)
1889 return self.pull(remote, heads)
1872
1890
1873 # used to avoid circular references so destructors work
1891 # used to avoid circular references so destructors work
1874 def aftertrans(files):
1892 def aftertrans(files):
1875 renamefiles = [tuple(t) for t in files]
1893 renamefiles = [tuple(t) for t in files]
1876 def a():
1894 def a():
1877 for src, dest in renamefiles:
1895 for src, dest in renamefiles:
1878 util.rename(src, dest)
1896 util.rename(src, dest)
1879 return a
1897 return a
1880
1898
1881 def instance(ui, path, create):
1899 def instance(ui, path, create):
1882 return localrepository(ui, util.drop_scheme('file', path), create)
1900 return localrepository(ui, util.drop_scheme('file', path), create)
1883
1901
1884 def islocal(path):
1902 def islocal(path):
1885 return True
1903 return True
@@ -1,1386 +1,1404 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import _
15 from i18n import _
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
17 import os, threading, time, calendar, ConfigParser, locale
17 import os, threading, time, calendar, ConfigParser, locale, glob
18
18
19 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
19 try:
20 or "ascii"
20 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
21 or "ascii"
22 except locale.Error:
23 _encoding = 'ascii'
21 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
24 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
22 _fallbackencoding = 'ISO-8859-1'
25 _fallbackencoding = 'ISO-8859-1'
23
26
24 def tolocal(s):
27 def tolocal(s):
25 """
28 """
26 Convert a string from internal UTF-8 to local encoding
29 Convert a string from internal UTF-8 to local encoding
27
30
28 All internal strings should be UTF-8 but some repos before the
31 All internal strings should be UTF-8 but some repos before the
29 implementation of locale support may contain latin1 or possibly
32 implementation of locale support may contain latin1 or possibly
30 other character sets. We attempt to decode everything strictly
33 other character sets. We attempt to decode everything strictly
31 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
34 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
32 replace unknown characters.
35 replace unknown characters.
33 """
36 """
34 for e in ('UTF-8', _fallbackencoding):
37 for e in ('UTF-8', _fallbackencoding):
35 try:
38 try:
36 u = s.decode(e) # attempt strict decoding
39 u = s.decode(e) # attempt strict decoding
37 return u.encode(_encoding, "replace")
40 return u.encode(_encoding, "replace")
38 except LookupError, k:
41 except LookupError, k:
39 raise Abort(_("%s, please check your locale settings") % k)
42 raise Abort(_("%s, please check your locale settings") % k)
40 except UnicodeDecodeError:
43 except UnicodeDecodeError:
41 pass
44 pass
42 u = s.decode("utf-8", "replace") # last ditch
45 u = s.decode("utf-8", "replace") # last ditch
43 return u.encode(_encoding, "replace")
46 return u.encode(_encoding, "replace")
44
47
45 def fromlocal(s):
48 def fromlocal(s):
46 """
49 """
47 Convert a string from the local character encoding to UTF-8
50 Convert a string from the local character encoding to UTF-8
48
51
49 We attempt to decode strings using the encoding mode set by
52 We attempt to decode strings using the encoding mode set by
50 HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
53 HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
51 characters will cause an error message. Other modes include
54 characters will cause an error message. Other modes include
52 'replace', which replaces unknown characters with a special
55 'replace', which replaces unknown characters with a special
53 Unicode character, and 'ignore', which drops the character.
56 Unicode character, and 'ignore', which drops the character.
54 """
57 """
55 try:
58 try:
56 return s.decode(_encoding, _encodingmode).encode("utf-8")
59 return s.decode(_encoding, _encodingmode).encode("utf-8")
57 except UnicodeDecodeError, inst:
60 except UnicodeDecodeError, inst:
58 sub = s[max(0, inst.start-10):inst.start+10]
61 sub = s[max(0, inst.start-10):inst.start+10]
59 raise Abort("decoding near '%s': %s!" % (sub, inst))
62 raise Abort("decoding near '%s': %s!" % (sub, inst))
60 except LookupError, k:
63 except LookupError, k:
61 raise Abort(_("%s, please check your locale settings") % k)
64 raise Abort(_("%s, please check your locale settings") % k)
62
65
63 def locallen(s):
66 def locallen(s):
64 """Find the length in characters of a local string"""
67 """Find the length in characters of a local string"""
65 return len(s.decode(_encoding, "replace"))
68 return len(s.decode(_encoding, "replace"))
66
69
67 def localsub(s, a, b=None):
70 def localsub(s, a, b=None):
68 try:
71 try:
69 u = s.decode(_encoding, _encodingmode)
72 u = s.decode(_encoding, _encodingmode)
70 if b is not None:
73 if b is not None:
71 u = u[a:b]
74 u = u[a:b]
72 else:
75 else:
73 u = u[:a]
76 u = u[:a]
74 return u.encode(_encoding, _encodingmode)
77 return u.encode(_encoding, _encodingmode)
75 except UnicodeDecodeError, inst:
78 except UnicodeDecodeError, inst:
76 sub = s[max(0, inst.start-10), inst.start+10]
79 sub = s[max(0, inst.start-10), inst.start+10]
77 raise Abort(_("decoding near '%s': %s!\n") % (sub, inst))
80 raise Abort(_("decoding near '%s': %s!\n") % (sub, inst))
78
81
79 # used by parsedate
82 # used by parsedate
80 defaultdateformats = (
83 defaultdateformats = (
81 '%Y-%m-%d %H:%M:%S',
84 '%Y-%m-%d %H:%M:%S',
82 '%Y-%m-%d %I:%M:%S%p',
85 '%Y-%m-%d %I:%M:%S%p',
83 '%Y-%m-%d %H:%M',
86 '%Y-%m-%d %H:%M',
84 '%Y-%m-%d %I:%M%p',
87 '%Y-%m-%d %I:%M%p',
85 '%Y-%m-%d',
88 '%Y-%m-%d',
86 '%m-%d',
89 '%m-%d',
87 '%m/%d',
90 '%m/%d',
88 '%m/%d/%y',
91 '%m/%d/%y',
89 '%m/%d/%Y',
92 '%m/%d/%Y',
90 '%a %b %d %H:%M:%S %Y',
93 '%a %b %d %H:%M:%S %Y',
91 '%a %b %d %I:%M:%S%p %Y',
94 '%a %b %d %I:%M:%S%p %Y',
92 '%b %d %H:%M:%S %Y',
95 '%b %d %H:%M:%S %Y',
93 '%b %d %I:%M:%S%p %Y',
96 '%b %d %I:%M:%S%p %Y',
94 '%b %d %H:%M:%S',
97 '%b %d %H:%M:%S',
95 '%b %d %I:%M:%S%p',
98 '%b %d %I:%M:%S%p',
96 '%b %d %H:%M',
99 '%b %d %H:%M',
97 '%b %d %I:%M%p',
100 '%b %d %I:%M%p',
98 '%b %d %Y',
101 '%b %d %Y',
99 '%b %d',
102 '%b %d',
100 '%H:%M:%S',
103 '%H:%M:%S',
101 '%I:%M:%SP',
104 '%I:%M:%SP',
102 '%H:%M',
105 '%H:%M',
103 '%I:%M%p',
106 '%I:%M%p',
104 )
107 )
105
108
106 extendeddateformats = defaultdateformats + (
109 extendeddateformats = defaultdateformats + (
107 "%Y",
110 "%Y",
108 "%Y-%m",
111 "%Y-%m",
109 "%b",
112 "%b",
110 "%b %Y",
113 "%b %Y",
111 )
114 )
112
115
113 class SignalInterrupt(Exception):
116 class SignalInterrupt(Exception):
114 """Exception raised on SIGTERM and SIGHUP."""
117 """Exception raised on SIGTERM and SIGHUP."""
115
118
116 # like SafeConfigParser but with case-sensitive keys
119 # like SafeConfigParser but with case-sensitive keys
117 class configparser(ConfigParser.SafeConfigParser):
120 class configparser(ConfigParser.SafeConfigParser):
118 def optionxform(self, optionstr):
121 def optionxform(self, optionstr):
119 return optionstr
122 return optionstr
120
123
121 def cachefunc(func):
124 def cachefunc(func):
122 '''cache the result of function calls'''
125 '''cache the result of function calls'''
123 # XXX doesn't handle keywords args
126 # XXX doesn't handle keywords args
124 cache = {}
127 cache = {}
125 if func.func_code.co_argcount == 1:
128 if func.func_code.co_argcount == 1:
126 # we gain a small amount of time because
129 # we gain a small amount of time because
127 # we don't need to pack/unpack the list
130 # we don't need to pack/unpack the list
128 def f(arg):
131 def f(arg):
129 if arg not in cache:
132 if arg not in cache:
130 cache[arg] = func(arg)
133 cache[arg] = func(arg)
131 return cache[arg]
134 return cache[arg]
132 else:
135 else:
133 def f(*args):
136 def f(*args):
134 if args not in cache:
137 if args not in cache:
135 cache[args] = func(*args)
138 cache[args] = func(*args)
136 return cache[args]
139 return cache[args]
137
140
138 return f
141 return f
139
142
140 def pipefilter(s, cmd):
143 def pipefilter(s, cmd):
141 '''filter string S through command CMD, returning its output'''
144 '''filter string S through command CMD, returning its output'''
142 (pout, pin) = popen2.popen2(cmd, -1, 'b')
145 (pout, pin) = popen2.popen2(cmd, -1, 'b')
143 def writer():
146 def writer():
144 try:
147 try:
145 pin.write(s)
148 pin.write(s)
146 pin.close()
149 pin.close()
147 except IOError, inst:
150 except IOError, inst:
148 if inst.errno != errno.EPIPE:
151 if inst.errno != errno.EPIPE:
149 raise
152 raise
150
153
151 # we should use select instead on UNIX, but this will work on most
154 # we should use select instead on UNIX, but this will work on most
152 # systems, including Windows
155 # systems, including Windows
153 w = threading.Thread(target=writer)
156 w = threading.Thread(target=writer)
154 w.start()
157 w.start()
155 f = pout.read()
158 f = pout.read()
156 pout.close()
159 pout.close()
157 w.join()
160 w.join()
158 return f
161 return f
159
162
160 def tempfilter(s, cmd):
163 def tempfilter(s, cmd):
161 '''filter string S through a pair of temporary files with CMD.
164 '''filter string S through a pair of temporary files with CMD.
162 CMD is used as a template to create the real command to be run,
165 CMD is used as a template to create the real command to be run,
163 with the strings INFILE and OUTFILE replaced by the real names of
166 with the strings INFILE and OUTFILE replaced by the real names of
164 the temporary files generated.'''
167 the temporary files generated.'''
165 inname, outname = None, None
168 inname, outname = None, None
166 try:
169 try:
167 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
170 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
168 fp = os.fdopen(infd, 'wb')
171 fp = os.fdopen(infd, 'wb')
169 fp.write(s)
172 fp.write(s)
170 fp.close()
173 fp.close()
171 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
174 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
172 os.close(outfd)
175 os.close(outfd)
173 cmd = cmd.replace('INFILE', inname)
176 cmd = cmd.replace('INFILE', inname)
174 cmd = cmd.replace('OUTFILE', outname)
177 cmd = cmd.replace('OUTFILE', outname)
175 code = os.system(cmd)
178 code = os.system(cmd)
176 if code: raise Abort(_("command '%s' failed: %s") %
179 if code: raise Abort(_("command '%s' failed: %s") %
177 (cmd, explain_exit(code)))
180 (cmd, explain_exit(code)))
178 return open(outname, 'rb').read()
181 return open(outname, 'rb').read()
179 finally:
182 finally:
180 try:
183 try:
181 if inname: os.unlink(inname)
184 if inname: os.unlink(inname)
182 except: pass
185 except: pass
183 try:
186 try:
184 if outname: os.unlink(outname)
187 if outname: os.unlink(outname)
185 except: pass
188 except: pass
186
189
187 filtertable = {
190 filtertable = {
188 'tempfile:': tempfilter,
191 'tempfile:': tempfilter,
189 'pipe:': pipefilter,
192 'pipe:': pipefilter,
190 }
193 }
191
194
192 def filter(s, cmd):
195 def filter(s, cmd):
193 "filter a string through a command that transforms its input to its output"
196 "filter a string through a command that transforms its input to its output"
194 for name, fn in filtertable.iteritems():
197 for name, fn in filtertable.iteritems():
195 if cmd.startswith(name):
198 if cmd.startswith(name):
196 return fn(s, cmd[len(name):].lstrip())
199 return fn(s, cmd[len(name):].lstrip())
197 return pipefilter(s, cmd)
200 return pipefilter(s, cmd)
198
201
199 def find_in_path(name, path, default=None):
202 def find_in_path(name, path, default=None):
200 '''find name in search path. path can be string (will be split
203 '''find name in search path. path can be string (will be split
201 with os.pathsep), or iterable thing that returns strings. if name
204 with os.pathsep), or iterable thing that returns strings. if name
202 found, return path to name. else return default.'''
205 found, return path to name. else return default.'''
203 if isinstance(path, str):
206 if isinstance(path, str):
204 path = path.split(os.pathsep)
207 path = path.split(os.pathsep)
205 for p in path:
208 for p in path:
206 p_name = os.path.join(p, name)
209 p_name = os.path.join(p, name)
207 if os.path.exists(p_name):
210 if os.path.exists(p_name):
208 return p_name
211 return p_name
209 return default
212 return default
210
213
211 def binary(s):
214 def binary(s):
212 """return true if a string is binary data using diff's heuristic"""
215 """return true if a string is binary data using diff's heuristic"""
213 if s and '\0' in s[:4096]:
216 if s and '\0' in s[:4096]:
214 return True
217 return True
215 return False
218 return False
216
219
217 def unique(g):
220 def unique(g):
218 """return the uniq elements of iterable g"""
221 """return the uniq elements of iterable g"""
219 seen = {}
222 seen = {}
220 l = []
223 l = []
221 for f in g:
224 for f in g:
222 if f not in seen:
225 if f not in seen:
223 seen[f] = 1
226 seen[f] = 1
224 l.append(f)
227 l.append(f)
225 return l
228 return l
226
229
227 class Abort(Exception):
230 class Abort(Exception):
228 """Raised if a command needs to print an error and exit."""
231 """Raised if a command needs to print an error and exit."""
229
232
230 class UnexpectedOutput(Abort):
233 class UnexpectedOutput(Abort):
231 """Raised to print an error with part of output and exit."""
234 """Raised to print an error with part of output and exit."""
232
235
233 def always(fn): return True
236 def always(fn): return True
234 def never(fn): return False
237 def never(fn): return False
235
238
239 def expand_glob(pats):
240 '''On Windows, expand the implicit globs in a list of patterns'''
241 if os.name != 'nt':
242 return list(pats)
243 ret = []
244 for p in pats:
245 kind, name = patkind(p, None)
246 if kind is None:
247 globbed = glob.glob(name)
248 if globbed:
249 ret.extend(globbed)
250 continue
251 # if we couldn't expand the glob, just keep it around
252 ret.append(p)
253 return ret
254
236 def patkind(name, dflt_pat='glob'):
255 def patkind(name, dflt_pat='glob'):
237 """Split a string into an optional pattern kind prefix and the
256 """Split a string into an optional pattern kind prefix and the
238 actual pattern."""
257 actual pattern."""
239 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
258 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
240 if name.startswith(prefix + ':'): return name.split(':', 1)
259 if name.startswith(prefix + ':'): return name.split(':', 1)
241 return dflt_pat, name
260 return dflt_pat, name
242
261
243 def globre(pat, head='^', tail='$'):
262 def globre(pat, head='^', tail='$'):
244 "convert a glob pattern into a regexp"
263 "convert a glob pattern into a regexp"
245 i, n = 0, len(pat)
264 i, n = 0, len(pat)
246 res = ''
265 res = ''
247 group = False
266 group = False
248 def peek(): return i < n and pat[i]
267 def peek(): return i < n and pat[i]
249 while i < n:
268 while i < n:
250 c = pat[i]
269 c = pat[i]
251 i = i+1
270 i = i+1
252 if c == '*':
271 if c == '*':
253 if peek() == '*':
272 if peek() == '*':
254 i += 1
273 i += 1
255 res += '.*'
274 res += '.*'
256 else:
275 else:
257 res += '[^/]*'
276 res += '[^/]*'
258 elif c == '?':
277 elif c == '?':
259 res += '.'
278 res += '.'
260 elif c == '[':
279 elif c == '[':
261 j = i
280 j = i
262 if j < n and pat[j] in '!]':
281 if j < n and pat[j] in '!]':
263 j += 1
282 j += 1
264 while j < n and pat[j] != ']':
283 while j < n and pat[j] != ']':
265 j += 1
284 j += 1
266 if j >= n:
285 if j >= n:
267 res += '\\['
286 res += '\\['
268 else:
287 else:
269 stuff = pat[i:j].replace('\\','\\\\')
288 stuff = pat[i:j].replace('\\','\\\\')
270 i = j + 1
289 i = j + 1
271 if stuff[0] == '!':
290 if stuff[0] == '!':
272 stuff = '^' + stuff[1:]
291 stuff = '^' + stuff[1:]
273 elif stuff[0] == '^':
292 elif stuff[0] == '^':
274 stuff = '\\' + stuff
293 stuff = '\\' + stuff
275 res = '%s[%s]' % (res, stuff)
294 res = '%s[%s]' % (res, stuff)
276 elif c == '{':
295 elif c == '{':
277 group = True
296 group = True
278 res += '(?:'
297 res += '(?:'
279 elif c == '}' and group:
298 elif c == '}' and group:
280 res += ')'
299 res += ')'
281 group = False
300 group = False
282 elif c == ',' and group:
301 elif c == ',' and group:
283 res += '|'
302 res += '|'
284 elif c == '\\':
303 elif c == '\\':
285 p = peek()
304 p = peek()
286 if p:
305 if p:
287 i += 1
306 i += 1
288 res += re.escape(p)
307 res += re.escape(p)
289 else:
308 else:
290 res += re.escape(c)
309 res += re.escape(c)
291 else:
310 else:
292 res += re.escape(c)
311 res += re.escape(c)
293 return head + res + tail
312 return head + res + tail
294
313
295 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
314 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
296
315
297 def pathto(n1, n2):
316 def pathto(n1, n2):
298 '''return the relative path from one place to another.
317 '''return the relative path from one place to another.
299 n1 should use os.sep to separate directories
318 n1 should use os.sep to separate directories
300 n2 should use "/" to separate directories
319 n2 should use "/" to separate directories
301 returns an os.sep-separated path.
320 returns an os.sep-separated path.
302 '''
321 '''
303 if not n1: return localpath(n2)
322 if not n1: return localpath(n2)
304 a, b = n1.split(os.sep), n2.split('/')
323 a, b = n1.split(os.sep), n2.split('/')
305 a.reverse()
324 a.reverse()
306 b.reverse()
325 b.reverse()
307 while a and b and a[-1] == b[-1]:
326 while a and b and a[-1] == b[-1]:
308 a.pop()
327 a.pop()
309 b.pop()
328 b.pop()
310 b.reverse()
329 b.reverse()
311 return os.sep.join((['..'] * len(a)) + b)
330 return os.sep.join((['..'] * len(a)) + b)
312
331
313 def canonpath(root, cwd, myname):
332 def canonpath(root, cwd, myname):
314 """return the canonical path of myname, given cwd and root"""
333 """return the canonical path of myname, given cwd and root"""
315 if root == os.sep:
334 if root == os.sep:
316 rootsep = os.sep
335 rootsep = os.sep
317 elif root.endswith(os.sep):
336 elif root.endswith(os.sep):
318 rootsep = root
337 rootsep = root
319 else:
338 else:
320 rootsep = root + os.sep
339 rootsep = root + os.sep
321 name = myname
340 name = myname
322 if not os.path.isabs(name):
341 if not os.path.isabs(name):
323 name = os.path.join(root, cwd, name)
342 name = os.path.join(root, cwd, name)
324 name = os.path.normpath(name)
343 name = os.path.normpath(name)
325 if name != rootsep and name.startswith(rootsep):
344 if name != rootsep and name.startswith(rootsep):
326 name = name[len(rootsep):]
345 name = name[len(rootsep):]
327 audit_path(name)
346 audit_path(name)
328 return pconvert(name)
347 return pconvert(name)
329 elif name == root:
348 elif name == root:
330 return ''
349 return ''
331 else:
350 else:
332 # Determine whether `name' is in the hierarchy at or beneath `root',
351 # Determine whether `name' is in the hierarchy at or beneath `root',
333 # by iterating name=dirname(name) until that causes no change (can't
352 # by iterating name=dirname(name) until that causes no change (can't
334 # check name == '/', because that doesn't work on windows). For each
353 # check name == '/', because that doesn't work on windows). For each
335 # `name', compare dev/inode numbers. If they match, the list `rel'
354 # `name', compare dev/inode numbers. If they match, the list `rel'
336 # holds the reversed list of components making up the relative file
355 # holds the reversed list of components making up the relative file
337 # name we want.
356 # name we want.
338 root_st = os.stat(root)
357 root_st = os.stat(root)
339 rel = []
358 rel = []
340 while True:
359 while True:
341 try:
360 try:
342 name_st = os.stat(name)
361 name_st = os.stat(name)
343 except OSError:
362 except OSError:
344 break
363 break
345 if samestat(name_st, root_st):
364 if samestat(name_st, root_st):
346 rel.reverse()
365 rel.reverse()
347 name = os.path.join(*rel)
366 name = os.path.join(*rel)
348 audit_path(name)
367 audit_path(name)
349 return pconvert(name)
368 return pconvert(name)
350 dirname, basename = os.path.split(name)
369 dirname, basename = os.path.split(name)
351 rel.append(basename)
370 rel.append(basename)
352 if dirname == name:
371 if dirname == name:
353 break
372 break
354 name = dirname
373 name = dirname
355
374
356 raise Abort('%s not under root' % myname)
375 raise Abort('%s not under root' % myname)
357
376
358 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
377 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
359 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
378 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
360
379
361 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
380 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='',
362 if os.name == 'nt':
381 src=None, globbed=False):
363 dflt_pat = 'glob'
382 if not globbed:
364 else:
383 names = expand_glob(names)
365 dflt_pat = 'relpath'
384 return _matcher(canonroot, cwd, names, inc, exc, head, 'relpath', src)
366 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
367
385
368 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
386 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
369 """build a function to match a set of file patterns
387 """build a function to match a set of file patterns
370
388
371 arguments:
389 arguments:
372 canonroot - the canonical root of the tree you're matching against
390 canonroot - the canonical root of the tree you're matching against
373 cwd - the current working directory, if relevant
391 cwd - the current working directory, if relevant
374 names - patterns to find
392 names - patterns to find
375 inc - patterns to include
393 inc - patterns to include
376 exc - patterns to exclude
394 exc - patterns to exclude
377 head - a regex to prepend to patterns to control whether a match is rooted
395 head - a regex to prepend to patterns to control whether a match is rooted
378
396
379 a pattern is one of:
397 a pattern is one of:
380 'glob:<rooted glob>'
398 'glob:<rooted glob>'
381 're:<rooted regexp>'
399 're:<rooted regexp>'
382 'path:<rooted path>'
400 'path:<rooted path>'
383 'relglob:<relative glob>'
401 'relglob:<relative glob>'
384 'relpath:<relative path>'
402 'relpath:<relative path>'
385 'relre:<relative regexp>'
403 'relre:<relative regexp>'
386 '<rooted path or regexp>'
404 '<rooted path or regexp>'
387
405
388 returns:
406 returns:
389 a 3-tuple containing
407 a 3-tuple containing
390 - list of explicit non-pattern names passed in
408 - list of explicit non-pattern names passed in
391 - a bool match(filename) function
409 - a bool match(filename) function
392 - a bool indicating if any patterns were passed in
410 - a bool indicating if any patterns were passed in
393
411
394 todo:
412 todo:
395 make head regex a rooted bool
413 make head regex a rooted bool
396 """
414 """
397
415
398 def contains_glob(name):
416 def contains_glob(name):
399 for c in name:
417 for c in name:
400 if c in _globchars: return True
418 if c in _globchars: return True
401 return False
419 return False
402
420
403 def regex(kind, name, tail):
421 def regex(kind, name, tail):
404 '''convert a pattern into a regular expression'''
422 '''convert a pattern into a regular expression'''
405 if kind == 're':
423 if kind == 're':
406 return name
424 return name
407 elif kind == 'path':
425 elif kind == 'path':
408 return '^' + re.escape(name) + '(?:/|$)'
426 return '^' + re.escape(name) + '(?:/|$)'
409 elif kind == 'relglob':
427 elif kind == 'relglob':
410 return head + globre(name, '(?:|.*/)', tail)
428 return head + globre(name, '(?:|.*/)', tail)
411 elif kind == 'relpath':
429 elif kind == 'relpath':
412 return head + re.escape(name) + tail
430 return head + re.escape(name) + tail
413 elif kind == 'relre':
431 elif kind == 'relre':
414 if name.startswith('^'):
432 if name.startswith('^'):
415 return name
433 return name
416 return '.*' + name
434 return '.*' + name
417 return head + globre(name, '', tail)
435 return head + globre(name, '', tail)
418
436
419 def matchfn(pats, tail):
437 def matchfn(pats, tail):
420 """build a matching function from a set of patterns"""
438 """build a matching function from a set of patterns"""
421 if not pats:
439 if not pats:
422 return
440 return
423 matches = []
441 matches = []
424 for k, p in pats:
442 for k, p in pats:
425 try:
443 try:
426 pat = '(?:%s)' % regex(k, p, tail)
444 pat = '(?:%s)' % regex(k, p, tail)
427 matches.append(re.compile(pat).match)
445 matches.append(re.compile(pat).match)
428 except re.error:
446 except re.error:
429 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
447 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
430 else: raise Abort("invalid pattern (%s): %s" % (k, p))
448 else: raise Abort("invalid pattern (%s): %s" % (k, p))
431
449
432 def buildfn(text):
450 def buildfn(text):
433 for m in matches:
451 for m in matches:
434 r = m(text)
452 r = m(text)
435 if r:
453 if r:
436 return r
454 return r
437
455
438 return buildfn
456 return buildfn
439
457
440 def globprefix(pat):
458 def globprefix(pat):
441 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
459 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
442 root = []
460 root = []
443 for p in pat.split(os.sep):
461 for p in pat.split(os.sep):
444 if contains_glob(p): break
462 if contains_glob(p): break
445 root.append(p)
463 root.append(p)
446 return '/'.join(root)
464 return '/'.join(root)
447
465
448 pats = []
466 pats = []
449 files = []
467 files = []
450 roots = []
468 roots = []
451 for kind, name in [patkind(p, dflt_pat) for p in names]:
469 for kind, name in [patkind(p, dflt_pat) for p in names]:
452 if kind in ('glob', 'relpath'):
470 if kind in ('glob', 'relpath'):
453 name = canonpath(canonroot, cwd, name)
471 name = canonpath(canonroot, cwd, name)
454 if name == '':
472 if name == '':
455 kind, name = 'glob', '**'
473 kind, name = 'glob', '**'
456 if kind in ('glob', 'path', 're'):
474 if kind in ('glob', 'path', 're'):
457 pats.append((kind, name))
475 pats.append((kind, name))
458 if kind == 'glob':
476 if kind == 'glob':
459 root = globprefix(name)
477 root = globprefix(name)
460 if root: roots.append(root)
478 if root: roots.append(root)
461 elif kind == 'relpath':
479 elif kind == 'relpath':
462 files.append((kind, name))
480 files.append((kind, name))
463 roots.append(name)
481 roots.append(name)
464
482
465 patmatch = matchfn(pats, '$') or always
483 patmatch = matchfn(pats, '$') or always
466 filematch = matchfn(files, '(?:/|$)') or always
484 filematch = matchfn(files, '(?:/|$)') or always
467 incmatch = always
485 incmatch = always
468 if inc:
486 if inc:
469 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
487 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
470 incmatch = matchfn(inckinds, '(?:/|$)')
488 incmatch = matchfn(inckinds, '(?:/|$)')
471 excmatch = lambda fn: False
489 excmatch = lambda fn: False
472 if exc:
490 if exc:
473 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
491 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
474 excmatch = matchfn(exckinds, '(?:/|$)')
492 excmatch = matchfn(exckinds, '(?:/|$)')
475
493
476 return (roots,
494 return (roots,
477 lambda fn: (incmatch(fn) and not excmatch(fn) and
495 lambda fn: (incmatch(fn) and not excmatch(fn) and
478 (fn.endswith('/') or
496 (fn.endswith('/') or
479 (not pats and not files) or
497 (not pats and not files) or
480 (pats and patmatch(fn)) or
498 (pats and patmatch(fn)) or
481 (files and filematch(fn)))),
499 (files and filematch(fn)))),
482 (inc or exc or (pats and pats != [('glob', '**')])) and True)
500 (inc or exc or (pats and pats != [('glob', '**')])) and True)
483
501
484 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
502 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
485 '''enhanced shell command execution.
503 '''enhanced shell command execution.
486 run with environment maybe modified, maybe in different dir.
504 run with environment maybe modified, maybe in different dir.
487
505
488 if command fails and onerr is None, return status. if ui object,
506 if command fails and onerr is None, return status. if ui object,
489 print error message and return status, else raise onerr object as
507 print error message and return status, else raise onerr object as
490 exception.'''
508 exception.'''
491 def py2shell(val):
509 def py2shell(val):
492 'convert python object into string that is useful to shell'
510 'convert python object into string that is useful to shell'
493 if val in (None, False):
511 if val in (None, False):
494 return '0'
512 return '0'
495 if val == True:
513 if val == True:
496 return '1'
514 return '1'
497 return str(val)
515 return str(val)
498 oldenv = {}
516 oldenv = {}
499 for k in environ:
517 for k in environ:
500 oldenv[k] = os.environ.get(k)
518 oldenv[k] = os.environ.get(k)
501 if cwd is not None:
519 if cwd is not None:
502 oldcwd = os.getcwd()
520 oldcwd = os.getcwd()
503 origcmd = cmd
521 origcmd = cmd
504 if os.name == 'nt':
522 if os.name == 'nt':
505 cmd = '"%s"' % cmd
523 cmd = '"%s"' % cmd
506 try:
524 try:
507 for k, v in environ.iteritems():
525 for k, v in environ.iteritems():
508 os.environ[k] = py2shell(v)
526 os.environ[k] = py2shell(v)
509 if cwd is not None and oldcwd != cwd:
527 if cwd is not None and oldcwd != cwd:
510 os.chdir(cwd)
528 os.chdir(cwd)
511 rc = os.system(cmd)
529 rc = os.system(cmd)
512 if rc and onerr:
530 if rc and onerr:
513 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
531 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
514 explain_exit(rc)[0])
532 explain_exit(rc)[0])
515 if errprefix:
533 if errprefix:
516 errmsg = '%s: %s' % (errprefix, errmsg)
534 errmsg = '%s: %s' % (errprefix, errmsg)
517 try:
535 try:
518 onerr.warn(errmsg + '\n')
536 onerr.warn(errmsg + '\n')
519 except AttributeError:
537 except AttributeError:
520 raise onerr(errmsg)
538 raise onerr(errmsg)
521 return rc
539 return rc
522 finally:
540 finally:
523 for k, v in oldenv.iteritems():
541 for k, v in oldenv.iteritems():
524 if v is None:
542 if v is None:
525 del os.environ[k]
543 del os.environ[k]
526 else:
544 else:
527 os.environ[k] = v
545 os.environ[k] = v
528 if cwd is not None and oldcwd != cwd:
546 if cwd is not None and oldcwd != cwd:
529 os.chdir(oldcwd)
547 os.chdir(oldcwd)
530
548
531 def rename(src, dst):
549 def rename(src, dst):
532 """forcibly rename a file"""
550 """forcibly rename a file"""
533 try:
551 try:
534 os.rename(src, dst)
552 os.rename(src, dst)
535 except OSError, err:
553 except OSError, err:
536 # on windows, rename to existing file is not allowed, so we
554 # on windows, rename to existing file is not allowed, so we
537 # must delete destination first. but if file is open, unlink
555 # must delete destination first. but if file is open, unlink
538 # schedules it for delete but does not delete it. rename
556 # schedules it for delete but does not delete it. rename
539 # happens immediately even for open files, so we create
557 # happens immediately even for open files, so we create
540 # temporary file, delete it, rename destination to that name,
558 # temporary file, delete it, rename destination to that name,
541 # then delete that. then rename is safe to do.
559 # then delete that. then rename is safe to do.
542 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
560 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
543 os.close(fd)
561 os.close(fd)
544 os.unlink(temp)
562 os.unlink(temp)
545 os.rename(dst, temp)
563 os.rename(dst, temp)
546 os.unlink(temp)
564 os.unlink(temp)
547 os.rename(src, dst)
565 os.rename(src, dst)
548
566
549 def unlink(f):
567 def unlink(f):
550 """unlink and remove the directory if it is empty"""
568 """unlink and remove the directory if it is empty"""
551 os.unlink(f)
569 os.unlink(f)
552 # try removing directories that might now be empty
570 # try removing directories that might now be empty
553 try:
571 try:
554 os.removedirs(os.path.dirname(f))
572 os.removedirs(os.path.dirname(f))
555 except OSError:
573 except OSError:
556 pass
574 pass
557
575
558 def copyfile(src, dest):
576 def copyfile(src, dest):
559 "copy a file, preserving mode"
577 "copy a file, preserving mode"
560 try:
578 try:
561 shutil.copyfile(src, dest)
579 shutil.copyfile(src, dest)
562 shutil.copymode(src, dest)
580 shutil.copymode(src, dest)
563 except shutil.Error, inst:
581 except shutil.Error, inst:
564 raise util.Abort(str(inst))
582 raise util.Abort(str(inst))
565
583
566 def copyfiles(src, dst, hardlink=None):
584 def copyfiles(src, dst, hardlink=None):
567 """Copy a directory tree using hardlinks if possible"""
585 """Copy a directory tree using hardlinks if possible"""
568
586
569 if hardlink is None:
587 if hardlink is None:
570 hardlink = (os.stat(src).st_dev ==
588 hardlink = (os.stat(src).st_dev ==
571 os.stat(os.path.dirname(dst)).st_dev)
589 os.stat(os.path.dirname(dst)).st_dev)
572
590
573 if os.path.isdir(src):
591 if os.path.isdir(src):
574 os.mkdir(dst)
592 os.mkdir(dst)
575 for name in os.listdir(src):
593 for name in os.listdir(src):
576 srcname = os.path.join(src, name)
594 srcname = os.path.join(src, name)
577 dstname = os.path.join(dst, name)
595 dstname = os.path.join(dst, name)
578 copyfiles(srcname, dstname, hardlink)
596 copyfiles(srcname, dstname, hardlink)
579 else:
597 else:
580 if hardlink:
598 if hardlink:
581 try:
599 try:
582 os_link(src, dst)
600 os_link(src, dst)
583 except (IOError, OSError):
601 except (IOError, OSError):
584 hardlink = False
602 hardlink = False
585 shutil.copy(src, dst)
603 shutil.copy(src, dst)
586 else:
604 else:
587 shutil.copy(src, dst)
605 shutil.copy(src, dst)
588
606
589 def audit_path(path):
607 def audit_path(path):
590 """Abort if path contains dangerous components"""
608 """Abort if path contains dangerous components"""
591 parts = os.path.normcase(path).split(os.sep)
609 parts = os.path.normcase(path).split(os.sep)
592 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
610 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
593 or os.pardir in parts):
611 or os.pardir in parts):
594 raise Abort(_("path contains illegal component: %s\n") % path)
612 raise Abort(_("path contains illegal component: %s\n") % path)
595
613
596 def _makelock_file(info, pathname):
614 def _makelock_file(info, pathname):
597 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
615 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
598 os.write(ld, info)
616 os.write(ld, info)
599 os.close(ld)
617 os.close(ld)
600
618
601 def _readlock_file(pathname):
619 def _readlock_file(pathname):
602 return posixfile(pathname).read()
620 return posixfile(pathname).read()
603
621
604 def nlinks(pathname):
622 def nlinks(pathname):
605 """Return number of hardlinks for the given file."""
623 """Return number of hardlinks for the given file."""
606 return os.lstat(pathname).st_nlink
624 return os.lstat(pathname).st_nlink
607
625
608 if hasattr(os, 'link'):
626 if hasattr(os, 'link'):
609 os_link = os.link
627 os_link = os.link
610 else:
628 else:
611 def os_link(src, dst):
629 def os_link(src, dst):
612 raise OSError(0, _("Hardlinks not supported"))
630 raise OSError(0, _("Hardlinks not supported"))
613
631
614 def fstat(fp):
632 def fstat(fp):
615 '''stat file object that may not have fileno method.'''
633 '''stat file object that may not have fileno method.'''
616 try:
634 try:
617 return os.fstat(fp.fileno())
635 return os.fstat(fp.fileno())
618 except AttributeError:
636 except AttributeError:
619 return os.stat(fp.name)
637 return os.stat(fp.name)
620
638
621 posixfile = file
639 posixfile = file
622
640
623 def is_win_9x():
641 def is_win_9x():
624 '''return true if run on windows 95, 98 or me.'''
642 '''return true if run on windows 95, 98 or me.'''
625 try:
643 try:
626 return sys.getwindowsversion()[3] == 1
644 return sys.getwindowsversion()[3] == 1
627 except AttributeError:
645 except AttributeError:
628 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
646 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
629
647
630 getuser_fallback = None
648 getuser_fallback = None
631
649
632 def getuser():
650 def getuser():
633 '''return name of current user'''
651 '''return name of current user'''
634 try:
652 try:
635 return getpass.getuser()
653 return getpass.getuser()
636 except ImportError:
654 except ImportError:
637 # import of pwd will fail on windows - try fallback
655 # import of pwd will fail on windows - try fallback
638 if getuser_fallback:
656 if getuser_fallback:
639 return getuser_fallback()
657 return getuser_fallback()
640 # raised if win32api not available
658 # raised if win32api not available
641 raise Abort(_('user name not available - set USERNAME '
659 raise Abort(_('user name not available - set USERNAME '
642 'environment variable'))
660 'environment variable'))
643
661
644 def username(uid=None):
662 def username(uid=None):
645 """Return the name of the user with the given uid.
663 """Return the name of the user with the given uid.
646
664
647 If uid is None, return the name of the current user."""
665 If uid is None, return the name of the current user."""
648 try:
666 try:
649 import pwd
667 import pwd
650 if uid is None:
668 if uid is None:
651 uid = os.getuid()
669 uid = os.getuid()
652 try:
670 try:
653 return pwd.getpwuid(uid)[0]
671 return pwd.getpwuid(uid)[0]
654 except KeyError:
672 except KeyError:
655 return str(uid)
673 return str(uid)
656 except ImportError:
674 except ImportError:
657 return None
675 return None
658
676
659 def groupname(gid=None):
677 def groupname(gid=None):
660 """Return the name of the group with the given gid.
678 """Return the name of the group with the given gid.
661
679
662 If gid is None, return the name of the current group."""
680 If gid is None, return the name of the current group."""
663 try:
681 try:
664 import grp
682 import grp
665 if gid is None:
683 if gid is None:
666 gid = os.getgid()
684 gid = os.getgid()
667 try:
685 try:
668 return grp.getgrgid(gid)[0]
686 return grp.getgrgid(gid)[0]
669 except KeyError:
687 except KeyError:
670 return str(gid)
688 return str(gid)
671 except ImportError:
689 except ImportError:
672 return None
690 return None
673
691
674 # File system features
692 # File system features
675
693
676 def checkfolding(path):
694 def checkfolding(path):
677 """
695 """
678 Check whether the given path is on a case-sensitive filesystem
696 Check whether the given path is on a case-sensitive filesystem
679
697
680 Requires a path (like /foo/.hg) ending with a foldable final
698 Requires a path (like /foo/.hg) ending with a foldable final
681 directory component.
699 directory component.
682 """
700 """
683 s1 = os.stat(path)
701 s1 = os.stat(path)
684 d, b = os.path.split(path)
702 d, b = os.path.split(path)
685 p2 = os.path.join(d, b.upper())
703 p2 = os.path.join(d, b.upper())
686 if path == p2:
704 if path == p2:
687 p2 = os.path.join(d, b.lower())
705 p2 = os.path.join(d, b.lower())
688 try:
706 try:
689 s2 = os.stat(p2)
707 s2 = os.stat(p2)
690 if s2 == s1:
708 if s2 == s1:
691 return False
709 return False
692 return True
710 return True
693 except:
711 except:
694 return True
712 return True
695
713
696 def checkexec(path):
714 def checkexec(path):
697 """
715 """
698 Check whether the given path is on a filesystem with UNIX-like exec flags
716 Check whether the given path is on a filesystem with UNIX-like exec flags
699
717
700 Requires a directory (like /foo/.hg)
718 Requires a directory (like /foo/.hg)
701 """
719 """
702 fh, fn = tempfile.mkstemp("", "", path)
720 fh, fn = tempfile.mkstemp("", "", path)
703 os.close(fh)
721 os.close(fh)
704 m = os.stat(fn).st_mode
722 m = os.stat(fn).st_mode
705 os.chmod(fn, m ^ 0111)
723 os.chmod(fn, m ^ 0111)
706 r = (os.stat(fn).st_mode != m)
724 r = (os.stat(fn).st_mode != m)
707 os.unlink(fn)
725 os.unlink(fn)
708 return r
726 return r
709
727
710 def execfunc(path, fallback):
728 def execfunc(path, fallback):
711 '''return an is_exec() function with default to fallback'''
729 '''return an is_exec() function with default to fallback'''
712 if checkexec(path):
730 if checkexec(path):
713 return lambda x: is_exec(os.path.join(path, x))
731 return lambda x: is_exec(os.path.join(path, x))
714 return fallback
732 return fallback
715
733
716 def checklink(path):
734 def checklink(path):
717 """check whether the given path is on a symlink-capable filesystem"""
735 """check whether the given path is on a symlink-capable filesystem"""
718 # mktemp is not racy because symlink creation will fail if the
736 # mktemp is not racy because symlink creation will fail if the
719 # file already exists
737 # file already exists
720 name = tempfile.mktemp(dir=path)
738 name = tempfile.mktemp(dir=path)
721 try:
739 try:
722 os.symlink(".", name)
740 os.symlink(".", name)
723 os.unlink(name)
741 os.unlink(name)
724 return True
742 return True
725 except (OSError, AttributeError):
743 except (OSError, AttributeError):
726 return False
744 return False
727
745
728 def linkfunc(path, fallback):
746 def linkfunc(path, fallback):
729 '''return an is_link() function with default to fallback'''
747 '''return an is_link() function with default to fallback'''
730 if checklink(path):
748 if checklink(path):
731 return lambda x: is_link(os.path.join(path, x))
749 return lambda x: is_link(os.path.join(path, x))
732 return fallback
750 return fallback
733
751
734 # Platform specific variants
752 # Platform specific variants
735 if os.name == 'nt':
753 if os.name == 'nt':
736 import msvcrt
754 import msvcrt
737 nulldev = 'NUL:'
755 nulldev = 'NUL:'
738
756
739 class winstdout:
757 class winstdout:
740 '''stdout on windows misbehaves if sent through a pipe'''
758 '''stdout on windows misbehaves if sent through a pipe'''
741
759
742 def __init__(self, fp):
760 def __init__(self, fp):
743 self.fp = fp
761 self.fp = fp
744
762
745 def __getattr__(self, key):
763 def __getattr__(self, key):
746 return getattr(self.fp, key)
764 return getattr(self.fp, key)
747
765
748 def close(self):
766 def close(self):
749 try:
767 try:
750 self.fp.close()
768 self.fp.close()
751 except: pass
769 except: pass
752
770
753 def write(self, s):
771 def write(self, s):
754 try:
772 try:
755 return self.fp.write(s)
773 return self.fp.write(s)
756 except IOError, inst:
774 except IOError, inst:
757 if inst.errno != 0: raise
775 if inst.errno != 0: raise
758 self.close()
776 self.close()
759 raise IOError(errno.EPIPE, 'Broken pipe')
777 raise IOError(errno.EPIPE, 'Broken pipe')
760
778
761 sys.stdout = winstdout(sys.stdout)
779 sys.stdout = winstdout(sys.stdout)
762
780
763 def system_rcpath():
781 def system_rcpath():
764 try:
782 try:
765 return system_rcpath_win32()
783 return system_rcpath_win32()
766 except:
784 except:
767 return [r'c:\mercurial\mercurial.ini']
785 return [r'c:\mercurial\mercurial.ini']
768
786
769 def os_rcpath():
787 def os_rcpath():
770 '''return default os-specific hgrc search path'''
788 '''return default os-specific hgrc search path'''
771 path = system_rcpath()
789 path = system_rcpath()
772 path.append(user_rcpath())
790 path.append(user_rcpath())
773 userprofile = os.environ.get('USERPROFILE')
791 userprofile = os.environ.get('USERPROFILE')
774 if userprofile:
792 if userprofile:
775 path.append(os.path.join(userprofile, 'mercurial.ini'))
793 path.append(os.path.join(userprofile, 'mercurial.ini'))
776 return path
794 return path
777
795
778 def user_rcpath():
796 def user_rcpath():
779 '''return os-specific hgrc search path to the user dir'''
797 '''return os-specific hgrc search path to the user dir'''
780 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
798 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
781
799
782 def parse_patch_output(output_line):
800 def parse_patch_output(output_line):
783 """parses the output produced by patch and returns the file name"""
801 """parses the output produced by patch and returns the file name"""
784 pf = output_line[14:]
802 pf = output_line[14:]
785 if pf[0] == '`':
803 if pf[0] == '`':
786 pf = pf[1:-1] # Remove the quotes
804 pf = pf[1:-1] # Remove the quotes
787 return pf
805 return pf
788
806
789 def testpid(pid):
807 def testpid(pid):
790 '''return False if pid dead, True if running or not known'''
808 '''return False if pid dead, True if running or not known'''
791 return True
809 return True
792
810
793 def set_exec(f, mode):
811 def set_exec(f, mode):
794 pass
812 pass
795
813
796 def set_link(f, mode):
814 def set_link(f, mode):
797 pass
815 pass
798
816
799 def set_binary(fd):
817 def set_binary(fd):
800 msvcrt.setmode(fd.fileno(), os.O_BINARY)
818 msvcrt.setmode(fd.fileno(), os.O_BINARY)
801
819
802 def pconvert(path):
820 def pconvert(path):
803 return path.replace("\\", "/")
821 return path.replace("\\", "/")
804
822
805 def localpath(path):
823 def localpath(path):
806 return path.replace('/', '\\')
824 return path.replace('/', '\\')
807
825
808 def normpath(path):
826 def normpath(path):
809 return pconvert(os.path.normpath(path))
827 return pconvert(os.path.normpath(path))
810
828
811 makelock = _makelock_file
829 makelock = _makelock_file
812 readlock = _readlock_file
830 readlock = _readlock_file
813
831
814 def samestat(s1, s2):
832 def samestat(s1, s2):
815 return False
833 return False
816
834
817 def shellquote(s):
835 def shellquote(s):
818 return '"%s"' % s.replace('"', '\\"')
836 return '"%s"' % s.replace('"', '\\"')
819
837
820 def explain_exit(code):
838 def explain_exit(code):
821 return _("exited with status %d") % code, code
839 return _("exited with status %d") % code, code
822
840
823 # if you change this stub into a real check, please try to implement the
841 # if you change this stub into a real check, please try to implement the
824 # username and groupname functions above, too.
842 # username and groupname functions above, too.
825 def isowner(fp, st=None):
843 def isowner(fp, st=None):
826 return True
844 return True
827
845
828 try:
846 try:
829 # override functions with win32 versions if possible
847 # override functions with win32 versions if possible
830 from util_win32 import *
848 from util_win32 import *
831 if not is_win_9x():
849 if not is_win_9x():
832 posixfile = posixfile_nt
850 posixfile = posixfile_nt
833 except ImportError:
851 except ImportError:
834 pass
852 pass
835
853
836 else:
854 else:
837 nulldev = '/dev/null'
855 nulldev = '/dev/null'
838 _umask = os.umask(0)
856 _umask = os.umask(0)
839 os.umask(_umask)
857 os.umask(_umask)
840
858
841 def rcfiles(path):
859 def rcfiles(path):
842 rcs = [os.path.join(path, 'hgrc')]
860 rcs = [os.path.join(path, 'hgrc')]
843 rcdir = os.path.join(path, 'hgrc.d')
861 rcdir = os.path.join(path, 'hgrc.d')
844 try:
862 try:
845 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
863 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
846 if f.endswith(".rc")])
864 if f.endswith(".rc")])
847 except OSError:
865 except OSError:
848 pass
866 pass
849 return rcs
867 return rcs
850
868
851 def os_rcpath():
869 def os_rcpath():
852 '''return default os-specific hgrc search path'''
870 '''return default os-specific hgrc search path'''
853 path = []
871 path = []
854 # old mod_python does not set sys.argv
872 # old mod_python does not set sys.argv
855 if len(getattr(sys, 'argv', [])) > 0:
873 if len(getattr(sys, 'argv', [])) > 0:
856 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
874 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
857 '/../etc/mercurial'))
875 '/../etc/mercurial'))
858 path.extend(rcfiles('/etc/mercurial'))
876 path.extend(rcfiles('/etc/mercurial'))
859 path.append(os.path.expanduser('~/.hgrc'))
877 path.append(os.path.expanduser('~/.hgrc'))
860 path = [os.path.normpath(f) for f in path]
878 path = [os.path.normpath(f) for f in path]
861 return path
879 return path
862
880
863 def parse_patch_output(output_line):
881 def parse_patch_output(output_line):
864 """parses the output produced by patch and returns the file name"""
882 """parses the output produced by patch and returns the file name"""
865 pf = output_line[14:]
883 pf = output_line[14:]
866 if pf.startswith("'") and pf.endswith("'") and " " in pf:
884 if pf.startswith("'") and pf.endswith("'") and " " in pf:
867 pf = pf[1:-1] # Remove the quotes
885 pf = pf[1:-1] # Remove the quotes
868 return pf
886 return pf
869
887
870 def is_exec(f):
888 def is_exec(f):
871 """check whether a file is executable"""
889 """check whether a file is executable"""
872 return (os.lstat(f).st_mode & 0100 != 0)
890 return (os.lstat(f).st_mode & 0100 != 0)
873
891
874 def set_exec(f, mode):
892 def set_exec(f, mode):
875 s = os.lstat(f).st_mode
893 s = os.lstat(f).st_mode
876 if (s & 0100 != 0) == mode:
894 if (s & 0100 != 0) == mode:
877 return
895 return
878 if mode:
896 if mode:
879 # Turn on +x for every +r bit when making a file executable
897 # Turn on +x for every +r bit when making a file executable
880 # and obey umask.
898 # and obey umask.
881 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
899 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
882 else:
900 else:
883 os.chmod(f, s & 0666)
901 os.chmod(f, s & 0666)
884
902
885 def is_link(f):
903 def is_link(f):
886 """check whether a file is a symlink"""
904 """check whether a file is a symlink"""
887 return (os.lstat(f).st_mode & 0120000 == 0120000)
905 return (os.lstat(f).st_mode & 0120000 == 0120000)
888
906
889 def set_link(f, mode):
907 def set_link(f, mode):
890 """make a file a symbolic link/regular file
908 """make a file a symbolic link/regular file
891
909
892 if a file is changed to a link, its contents become the link data
910 if a file is changed to a link, its contents become the link data
893 if a link is changed to a file, its link data become its contents
911 if a link is changed to a file, its link data become its contents
894 """
912 """
895
913
896 m = is_link(f)
914 m = is_link(f)
897 if m == bool(mode):
915 if m == bool(mode):
898 return
916 return
899
917
900 if mode: # switch file to link
918 if mode: # switch file to link
901 data = file(f).read()
919 data = file(f).read()
902 os.unlink(f)
920 os.unlink(f)
903 os.symlink(data, f)
921 os.symlink(data, f)
904 else:
922 else:
905 data = os.readlink(f)
923 data = os.readlink(f)
906 os.unlink(f)
924 os.unlink(f)
907 file(f, "w").write(data)
925 file(f, "w").write(data)
908
926
909 def set_binary(fd):
927 def set_binary(fd):
910 pass
928 pass
911
929
912 def pconvert(path):
930 def pconvert(path):
913 return path
931 return path
914
932
915 def localpath(path):
933 def localpath(path):
916 return path
934 return path
917
935
918 normpath = os.path.normpath
936 normpath = os.path.normpath
919 samestat = os.path.samestat
937 samestat = os.path.samestat
920
938
921 def makelock(info, pathname):
939 def makelock(info, pathname):
922 try:
940 try:
923 os.symlink(info, pathname)
941 os.symlink(info, pathname)
924 except OSError, why:
942 except OSError, why:
925 if why.errno == errno.EEXIST:
943 if why.errno == errno.EEXIST:
926 raise
944 raise
927 else:
945 else:
928 _makelock_file(info, pathname)
946 _makelock_file(info, pathname)
929
947
930 def readlock(pathname):
948 def readlock(pathname):
931 try:
949 try:
932 return os.readlink(pathname)
950 return os.readlink(pathname)
933 except OSError, why:
951 except OSError, why:
934 if why.errno == errno.EINVAL:
952 if why.errno == errno.EINVAL:
935 return _readlock_file(pathname)
953 return _readlock_file(pathname)
936 else:
954 else:
937 raise
955 raise
938
956
939 def shellquote(s):
957 def shellquote(s):
940 return "'%s'" % s.replace("'", "'\\''")
958 return "'%s'" % s.replace("'", "'\\''")
941
959
942 def testpid(pid):
960 def testpid(pid):
943 '''return False if pid dead, True if running or not sure'''
961 '''return False if pid dead, True if running or not sure'''
944 try:
962 try:
945 os.kill(pid, 0)
963 os.kill(pid, 0)
946 return True
964 return True
947 except OSError, inst:
965 except OSError, inst:
948 return inst.errno != errno.ESRCH
966 return inst.errno != errno.ESRCH
949
967
950 def explain_exit(code):
968 def explain_exit(code):
951 """return a 2-tuple (desc, code) describing a process's status"""
969 """return a 2-tuple (desc, code) describing a process's status"""
952 if os.WIFEXITED(code):
970 if os.WIFEXITED(code):
953 val = os.WEXITSTATUS(code)
971 val = os.WEXITSTATUS(code)
954 return _("exited with status %d") % val, val
972 return _("exited with status %d") % val, val
955 elif os.WIFSIGNALED(code):
973 elif os.WIFSIGNALED(code):
956 val = os.WTERMSIG(code)
974 val = os.WTERMSIG(code)
957 return _("killed by signal %d") % val, val
975 return _("killed by signal %d") % val, val
958 elif os.WIFSTOPPED(code):
976 elif os.WIFSTOPPED(code):
959 val = os.WSTOPSIG(code)
977 val = os.WSTOPSIG(code)
960 return _("stopped by signal %d") % val, val
978 return _("stopped by signal %d") % val, val
961 raise ValueError(_("invalid exit code"))
979 raise ValueError(_("invalid exit code"))
962
980
963 def isowner(fp, st=None):
981 def isowner(fp, st=None):
964 """Return True if the file object f belongs to the current user.
982 """Return True if the file object f belongs to the current user.
965
983
966 The return value of a util.fstat(f) may be passed as the st argument.
984 The return value of a util.fstat(f) may be passed as the st argument.
967 """
985 """
968 if st is None:
986 if st is None:
969 st = fstat(fp)
987 st = fstat(fp)
970 return st.st_uid == os.getuid()
988 return st.st_uid == os.getuid()
971
989
972 def _buildencodefun():
990 def _buildencodefun():
973 e = '_'
991 e = '_'
974 win_reserved = [ord(x) for x in '\\:*?"<>|']
992 win_reserved = [ord(x) for x in '\\:*?"<>|']
975 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
993 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
976 for x in (range(32) + range(126, 256) + win_reserved):
994 for x in (range(32) + range(126, 256) + win_reserved):
977 cmap[chr(x)] = "~%02x" % x
995 cmap[chr(x)] = "~%02x" % x
978 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
996 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
979 cmap[chr(x)] = e + chr(x).lower()
997 cmap[chr(x)] = e + chr(x).lower()
980 dmap = {}
998 dmap = {}
981 for k, v in cmap.iteritems():
999 for k, v in cmap.iteritems():
982 dmap[v] = k
1000 dmap[v] = k
983 def decode(s):
1001 def decode(s):
984 i = 0
1002 i = 0
985 while i < len(s):
1003 while i < len(s):
986 for l in xrange(1, 4):
1004 for l in xrange(1, 4):
987 try:
1005 try:
988 yield dmap[s[i:i+l]]
1006 yield dmap[s[i:i+l]]
989 i += l
1007 i += l
990 break
1008 break
991 except KeyError:
1009 except KeyError:
992 pass
1010 pass
993 else:
1011 else:
994 raise KeyError
1012 raise KeyError
995 return (lambda s: "".join([cmap[c] for c in s]),
1013 return (lambda s: "".join([cmap[c] for c in s]),
996 lambda s: "".join(list(decode(s))))
1014 lambda s: "".join(list(decode(s))))
997
1015
998 encodefilename, decodefilename = _buildencodefun()
1016 encodefilename, decodefilename = _buildencodefun()
999
1017
1000 def encodedopener(openerfn, fn):
1018 def encodedopener(openerfn, fn):
1001 def o(path, *args, **kw):
1019 def o(path, *args, **kw):
1002 return openerfn(fn(path), *args, **kw)
1020 return openerfn(fn(path), *args, **kw)
1003 return o
1021 return o
1004
1022
1005 def opener(base, audit=True):
1023 def opener(base, audit=True):
1006 """
1024 """
1007 return a function that opens files relative to base
1025 return a function that opens files relative to base
1008
1026
1009 this function is used to hide the details of COW semantics and
1027 this function is used to hide the details of COW semantics and
1010 remote file access from higher level code.
1028 remote file access from higher level code.
1011 """
1029 """
1012 p = base
1030 p = base
1013 audit_p = audit
1031 audit_p = audit
1014
1032
1015 def mktempcopy(name):
1033 def mktempcopy(name):
1016 d, fn = os.path.split(name)
1034 d, fn = os.path.split(name)
1017 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1035 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1018 os.close(fd)
1036 os.close(fd)
1019 ofp = posixfile(temp, "wb")
1037 ofp = posixfile(temp, "wb")
1020 try:
1038 try:
1021 try:
1039 try:
1022 ifp = posixfile(name, "rb")
1040 ifp = posixfile(name, "rb")
1023 except IOError, inst:
1041 except IOError, inst:
1024 if not getattr(inst, 'filename', None):
1042 if not getattr(inst, 'filename', None):
1025 inst.filename = name
1043 inst.filename = name
1026 raise
1044 raise
1027 for chunk in filechunkiter(ifp):
1045 for chunk in filechunkiter(ifp):
1028 ofp.write(chunk)
1046 ofp.write(chunk)
1029 ifp.close()
1047 ifp.close()
1030 ofp.close()
1048 ofp.close()
1031 except:
1049 except:
1032 try: os.unlink(temp)
1050 try: os.unlink(temp)
1033 except: pass
1051 except: pass
1034 raise
1052 raise
1035 st = os.lstat(name)
1053 st = os.lstat(name)
1036 os.chmod(temp, st.st_mode)
1054 os.chmod(temp, st.st_mode)
1037 return temp
1055 return temp
1038
1056
1039 class atomictempfile(posixfile):
1057 class atomictempfile(posixfile):
1040 """the file will only be copied when rename is called"""
1058 """the file will only be copied when rename is called"""
1041 def __init__(self, name, mode):
1059 def __init__(self, name, mode):
1042 self.__name = name
1060 self.__name = name
1043 self.temp = mktempcopy(name)
1061 self.temp = mktempcopy(name)
1044 posixfile.__init__(self, self.temp, mode)
1062 posixfile.__init__(self, self.temp, mode)
1045 def rename(self):
1063 def rename(self):
1046 if not self.closed:
1064 if not self.closed:
1047 posixfile.close(self)
1065 posixfile.close(self)
1048 rename(self.temp, localpath(self.__name))
1066 rename(self.temp, localpath(self.__name))
1049 def __del__(self):
1067 def __del__(self):
1050 if not self.closed:
1068 if not self.closed:
1051 try:
1069 try:
1052 os.unlink(self.temp)
1070 os.unlink(self.temp)
1053 except: pass
1071 except: pass
1054 posixfile.close(self)
1072 posixfile.close(self)
1055
1073
1056 class atomicfile(atomictempfile):
1074 class atomicfile(atomictempfile):
1057 """the file will only be copied on close"""
1075 """the file will only be copied on close"""
1058 def __init__(self, name, mode):
1076 def __init__(self, name, mode):
1059 atomictempfile.__init__(self, name, mode)
1077 atomictempfile.__init__(self, name, mode)
1060 def close(self):
1078 def close(self):
1061 self.rename()
1079 self.rename()
1062 def __del__(self):
1080 def __del__(self):
1063 self.rename()
1081 self.rename()
1064
1082
1065 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
1083 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
1066 if audit_p:
1084 if audit_p:
1067 audit_path(path)
1085 audit_path(path)
1068 f = os.path.join(p, path)
1086 f = os.path.join(p, path)
1069
1087
1070 if not text:
1088 if not text:
1071 mode += "b" # for that other OS
1089 mode += "b" # for that other OS
1072
1090
1073 if mode[0] != "r":
1091 if mode[0] != "r":
1074 try:
1092 try:
1075 nlink = nlinks(f)
1093 nlink = nlinks(f)
1076 except OSError:
1094 except OSError:
1077 d = os.path.dirname(f)
1095 d = os.path.dirname(f)
1078 if not os.path.isdir(d):
1096 if not os.path.isdir(d):
1079 os.makedirs(d)
1097 os.makedirs(d)
1080 else:
1098 else:
1081 if atomic:
1099 if atomic:
1082 return atomicfile(f, mode)
1100 return atomicfile(f, mode)
1083 elif atomictemp:
1101 elif atomictemp:
1084 return atomictempfile(f, mode)
1102 return atomictempfile(f, mode)
1085 if nlink > 1:
1103 if nlink > 1:
1086 rename(mktempcopy(f), f)
1104 rename(mktempcopy(f), f)
1087 return posixfile(f, mode)
1105 return posixfile(f, mode)
1088
1106
1089 return o
1107 return o
1090
1108
1091 class chunkbuffer(object):
1109 class chunkbuffer(object):
1092 """Allow arbitrary sized chunks of data to be efficiently read from an
1110 """Allow arbitrary sized chunks of data to be efficiently read from an
1093 iterator over chunks of arbitrary size."""
1111 iterator over chunks of arbitrary size."""
1094
1112
1095 def __init__(self, in_iter, targetsize = 2**16):
1113 def __init__(self, in_iter, targetsize = 2**16):
1096 """in_iter is the iterator that's iterating over the input chunks.
1114 """in_iter is the iterator that's iterating over the input chunks.
1097 targetsize is how big a buffer to try to maintain."""
1115 targetsize is how big a buffer to try to maintain."""
1098 self.in_iter = iter(in_iter)
1116 self.in_iter = iter(in_iter)
1099 self.buf = ''
1117 self.buf = ''
1100 self.targetsize = int(targetsize)
1118 self.targetsize = int(targetsize)
1101 if self.targetsize <= 0:
1119 if self.targetsize <= 0:
1102 raise ValueError(_("targetsize must be greater than 0, was %d") %
1120 raise ValueError(_("targetsize must be greater than 0, was %d") %
1103 targetsize)
1121 targetsize)
1104 self.iterempty = False
1122 self.iterempty = False
1105
1123
1106 def fillbuf(self):
1124 def fillbuf(self):
1107 """Ignore target size; read every chunk from iterator until empty."""
1125 """Ignore target size; read every chunk from iterator until empty."""
1108 if not self.iterempty:
1126 if not self.iterempty:
1109 collector = cStringIO.StringIO()
1127 collector = cStringIO.StringIO()
1110 collector.write(self.buf)
1128 collector.write(self.buf)
1111 for ch in self.in_iter:
1129 for ch in self.in_iter:
1112 collector.write(ch)
1130 collector.write(ch)
1113 self.buf = collector.getvalue()
1131 self.buf = collector.getvalue()
1114 self.iterempty = True
1132 self.iterempty = True
1115
1133
1116 def read(self, l):
1134 def read(self, l):
1117 """Read L bytes of data from the iterator of chunks of data.
1135 """Read L bytes of data from the iterator of chunks of data.
1118 Returns less than L bytes if the iterator runs dry."""
1136 Returns less than L bytes if the iterator runs dry."""
1119 if l > len(self.buf) and not self.iterempty:
1137 if l > len(self.buf) and not self.iterempty:
1120 # Clamp to a multiple of self.targetsize
1138 # Clamp to a multiple of self.targetsize
1121 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1139 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1122 collector = cStringIO.StringIO()
1140 collector = cStringIO.StringIO()
1123 collector.write(self.buf)
1141 collector.write(self.buf)
1124 collected = len(self.buf)
1142 collected = len(self.buf)
1125 for chunk in self.in_iter:
1143 for chunk in self.in_iter:
1126 collector.write(chunk)
1144 collector.write(chunk)
1127 collected += len(chunk)
1145 collected += len(chunk)
1128 if collected >= targetsize:
1146 if collected >= targetsize:
1129 break
1147 break
1130 if collected < targetsize:
1148 if collected < targetsize:
1131 self.iterempty = True
1149 self.iterempty = True
1132 self.buf = collector.getvalue()
1150 self.buf = collector.getvalue()
1133 s, self.buf = self.buf[:l], buffer(self.buf, l)
1151 s, self.buf = self.buf[:l], buffer(self.buf, l)
1134 return s
1152 return s
1135
1153
1136 def filechunkiter(f, size=65536, limit=None):
1154 def filechunkiter(f, size=65536, limit=None):
1137 """Create a generator that produces the data in the file size
1155 """Create a generator that produces the data in the file size
1138 (default 65536) bytes at a time, up to optional limit (default is
1156 (default 65536) bytes at a time, up to optional limit (default is
1139 to read all data). Chunks may be less than size bytes if the
1157 to read all data). Chunks may be less than size bytes if the
1140 chunk is the last chunk in the file, or the file is a socket or
1158 chunk is the last chunk in the file, or the file is a socket or
1141 some other type of file that sometimes reads less data than is
1159 some other type of file that sometimes reads less data than is
1142 requested."""
1160 requested."""
1143 assert size >= 0
1161 assert size >= 0
1144 assert limit is None or limit >= 0
1162 assert limit is None or limit >= 0
1145 while True:
1163 while True:
1146 if limit is None: nbytes = size
1164 if limit is None: nbytes = size
1147 else: nbytes = min(limit, size)
1165 else: nbytes = min(limit, size)
1148 s = nbytes and f.read(nbytes)
1166 s = nbytes and f.read(nbytes)
1149 if not s: break
1167 if not s: break
1150 if limit: limit -= len(s)
1168 if limit: limit -= len(s)
1151 yield s
1169 yield s
1152
1170
1153 def makedate():
1171 def makedate():
1154 lt = time.localtime()
1172 lt = time.localtime()
1155 if lt[8] == 1 and time.daylight:
1173 if lt[8] == 1 and time.daylight:
1156 tz = time.altzone
1174 tz = time.altzone
1157 else:
1175 else:
1158 tz = time.timezone
1176 tz = time.timezone
1159 return time.mktime(lt), tz
1177 return time.mktime(lt), tz
1160
1178
1161 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1179 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1162 """represent a (unixtime, offset) tuple as a localized time.
1180 """represent a (unixtime, offset) tuple as a localized time.
1163 unixtime is seconds since the epoch, and offset is the time zone's
1181 unixtime is seconds since the epoch, and offset is the time zone's
1164 number of seconds away from UTC. if timezone is false, do not
1182 number of seconds away from UTC. if timezone is false, do not
1165 append time zone to string."""
1183 append time zone to string."""
1166 t, tz = date or makedate()
1184 t, tz = date or makedate()
1167 s = time.strftime(format, time.gmtime(float(t) - tz))
1185 s = time.strftime(format, time.gmtime(float(t) - tz))
1168 if timezone:
1186 if timezone:
1169 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1187 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1170 return s
1188 return s
1171
1189
1172 def strdate(string, format, defaults):
1190 def strdate(string, format, defaults):
1173 """parse a localized time string and return a (unixtime, offset) tuple.
1191 """parse a localized time string and return a (unixtime, offset) tuple.
1174 if the string cannot be parsed, ValueError is raised."""
1192 if the string cannot be parsed, ValueError is raised."""
1175 def timezone(string):
1193 def timezone(string):
1176 tz = string.split()[-1]
1194 tz = string.split()[-1]
1177 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1195 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1178 tz = int(tz)
1196 tz = int(tz)
1179 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1197 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1180 return offset
1198 return offset
1181 if tz == "GMT" or tz == "UTC":
1199 if tz == "GMT" or tz == "UTC":
1182 return 0
1200 return 0
1183 return None
1201 return None
1184
1202
1185 # NOTE: unixtime = localunixtime + offset
1203 # NOTE: unixtime = localunixtime + offset
1186 offset, date = timezone(string), string
1204 offset, date = timezone(string), string
1187 if offset != None:
1205 if offset != None:
1188 date = " ".join(string.split()[:-1])
1206 date = " ".join(string.split()[:-1])
1189
1207
1190 # add missing elements from defaults
1208 # add missing elements from defaults
1191 for part in defaults:
1209 for part in defaults:
1192 found = [True for p in part if ("%"+p) in format]
1210 found = [True for p in part if ("%"+p) in format]
1193 if not found:
1211 if not found:
1194 date += "@" + defaults[part]
1212 date += "@" + defaults[part]
1195 format += "@%" + part[0]
1213 format += "@%" + part[0]
1196
1214
1197 timetuple = time.strptime(date, format)
1215 timetuple = time.strptime(date, format)
1198 localunixtime = int(calendar.timegm(timetuple))
1216 localunixtime = int(calendar.timegm(timetuple))
1199 if offset is None:
1217 if offset is None:
1200 # local timezone
1218 # local timezone
1201 unixtime = int(time.mktime(timetuple))
1219 unixtime = int(time.mktime(timetuple))
1202 offset = unixtime - localunixtime
1220 offset = unixtime - localunixtime
1203 else:
1221 else:
1204 unixtime = localunixtime + offset
1222 unixtime = localunixtime + offset
1205 return unixtime, offset
1223 return unixtime, offset
1206
1224
1207 def parsedate(string, formats=None, defaults=None):
1225 def parsedate(string, formats=None, defaults=None):
1208 """parse a localized time string and return a (unixtime, offset) tuple.
1226 """parse a localized time string and return a (unixtime, offset) tuple.
1209 The date may be a "unixtime offset" string or in one of the specified
1227 The date may be a "unixtime offset" string or in one of the specified
1210 formats."""
1228 formats."""
1211 if not string:
1229 if not string:
1212 return 0, 0
1230 return 0, 0
1213 if not formats:
1231 if not formats:
1214 formats = defaultdateformats
1232 formats = defaultdateformats
1215 string = string.strip()
1233 string = string.strip()
1216 try:
1234 try:
1217 when, offset = map(int, string.split(' '))
1235 when, offset = map(int, string.split(' '))
1218 except ValueError:
1236 except ValueError:
1219 # fill out defaults
1237 # fill out defaults
1220 if not defaults:
1238 if not defaults:
1221 defaults = {}
1239 defaults = {}
1222 now = makedate()
1240 now = makedate()
1223 for part in "d mb yY HI M S".split():
1241 for part in "d mb yY HI M S".split():
1224 if part not in defaults:
1242 if part not in defaults:
1225 if part[0] in "HMS":
1243 if part[0] in "HMS":
1226 defaults[part] = "00"
1244 defaults[part] = "00"
1227 elif part[0] in "dm":
1245 elif part[0] in "dm":
1228 defaults[part] = "1"
1246 defaults[part] = "1"
1229 else:
1247 else:
1230 defaults[part] = datestr(now, "%" + part[0], False)
1248 defaults[part] = datestr(now, "%" + part[0], False)
1231
1249
1232 for format in formats:
1250 for format in formats:
1233 try:
1251 try:
1234 when, offset = strdate(string, format, defaults)
1252 when, offset = strdate(string, format, defaults)
1235 except ValueError:
1253 except ValueError:
1236 pass
1254 pass
1237 else:
1255 else:
1238 break
1256 break
1239 else:
1257 else:
1240 raise Abort(_('invalid date: %r ') % string)
1258 raise Abort(_('invalid date: %r ') % string)
1241 # validate explicit (probably user-specified) date and
1259 # validate explicit (probably user-specified) date and
1242 # time zone offset. values must fit in signed 32 bits for
1260 # time zone offset. values must fit in signed 32 bits for
1243 # current 32-bit linux runtimes. timezones go from UTC-12
1261 # current 32-bit linux runtimes. timezones go from UTC-12
1244 # to UTC+14
1262 # to UTC+14
1245 if abs(when) > 0x7fffffff:
1263 if abs(when) > 0x7fffffff:
1246 raise Abort(_('date exceeds 32 bits: %d') % when)
1264 raise Abort(_('date exceeds 32 bits: %d') % when)
1247 if offset < -50400 or offset > 43200:
1265 if offset < -50400 or offset > 43200:
1248 raise Abort(_('impossible time zone offset: %d') % offset)
1266 raise Abort(_('impossible time zone offset: %d') % offset)
1249 return when, offset
1267 return when, offset
1250
1268
1251 def matchdate(date):
1269 def matchdate(date):
1252 """Return a function that matches a given date match specifier
1270 """Return a function that matches a given date match specifier
1253
1271
1254 Formats include:
1272 Formats include:
1255
1273
1256 '{date}' match a given date to the accuracy provided
1274 '{date}' match a given date to the accuracy provided
1257
1275
1258 '<{date}' on or before a given date
1276 '<{date}' on or before a given date
1259
1277
1260 '>{date}' on or after a given date
1278 '>{date}' on or after a given date
1261
1279
1262 """
1280 """
1263
1281
1264 def lower(date):
1282 def lower(date):
1265 return parsedate(date, extendeddateformats)[0]
1283 return parsedate(date, extendeddateformats)[0]
1266
1284
1267 def upper(date):
1285 def upper(date):
1268 d = dict(mb="12", HI="23", M="59", S="59")
1286 d = dict(mb="12", HI="23", M="59", S="59")
1269 for days in "31 30 29".split():
1287 for days in "31 30 29".split():
1270 try:
1288 try:
1271 d["d"] = days
1289 d["d"] = days
1272 return parsedate(date, extendeddateformats, d)[0]
1290 return parsedate(date, extendeddateformats, d)[0]
1273 except:
1291 except:
1274 pass
1292 pass
1275 d["d"] = "28"
1293 d["d"] = "28"
1276 return parsedate(date, extendeddateformats, d)[0]
1294 return parsedate(date, extendeddateformats, d)[0]
1277
1295
1278 if date[0] == "<":
1296 if date[0] == "<":
1279 when = upper(date[1:])
1297 when = upper(date[1:])
1280 return lambda x: x <= when
1298 return lambda x: x <= when
1281 elif date[0] == ">":
1299 elif date[0] == ">":
1282 when = lower(date[1:])
1300 when = lower(date[1:])
1283 return lambda x: x >= when
1301 return lambda x: x >= when
1284 elif date[0] == "-":
1302 elif date[0] == "-":
1285 try:
1303 try:
1286 days = int(date[1:])
1304 days = int(date[1:])
1287 except ValueError:
1305 except ValueError:
1288 raise Abort(_("invalid day spec: %s") % date[1:])
1306 raise Abort(_("invalid day spec: %s") % date[1:])
1289 when = makedate()[0] - days * 3600 * 24
1307 when = makedate()[0] - days * 3600 * 24
1290 return lambda x: x >= when
1308 return lambda x: x >= when
1291 elif " to " in date:
1309 elif " to " in date:
1292 a, b = date.split(" to ")
1310 a, b = date.split(" to ")
1293 start, stop = lower(a), upper(b)
1311 start, stop = lower(a), upper(b)
1294 return lambda x: x >= start and x <= stop
1312 return lambda x: x >= start and x <= stop
1295 else:
1313 else:
1296 start, stop = lower(date), upper(date)
1314 start, stop = lower(date), upper(date)
1297 return lambda x: x >= start and x <= stop
1315 return lambda x: x >= start and x <= stop
1298
1316
1299 def shortuser(user):
1317 def shortuser(user):
1300 """Return a short representation of a user name or email address."""
1318 """Return a short representation of a user name or email address."""
1301 f = user.find('@')
1319 f = user.find('@')
1302 if f >= 0:
1320 if f >= 0:
1303 user = user[:f]
1321 user = user[:f]
1304 f = user.find('<')
1322 f = user.find('<')
1305 if f >= 0:
1323 if f >= 0:
1306 user = user[f+1:]
1324 user = user[f+1:]
1307 f = user.find(' ')
1325 f = user.find(' ')
1308 if f >= 0:
1326 if f >= 0:
1309 user = user[:f]
1327 user = user[:f]
1310 f = user.find('.')
1328 f = user.find('.')
1311 if f >= 0:
1329 if f >= 0:
1312 user = user[:f]
1330 user = user[:f]
1313 return user
1331 return user
1314
1332
1315 def ellipsis(text, maxlength=400):
1333 def ellipsis(text, maxlength=400):
1316 """Trim string to at most maxlength (default: 400) characters."""
1334 """Trim string to at most maxlength (default: 400) characters."""
1317 if len(text) <= maxlength:
1335 if len(text) <= maxlength:
1318 return text
1336 return text
1319 else:
1337 else:
1320 return "%s..." % (text[:maxlength-3])
1338 return "%s..." % (text[:maxlength-3])
1321
1339
1322 def walkrepos(path):
1340 def walkrepos(path):
1323 '''yield every hg repository under path, recursively.'''
1341 '''yield every hg repository under path, recursively.'''
1324 def errhandler(err):
1342 def errhandler(err):
1325 if err.filename == path:
1343 if err.filename == path:
1326 raise err
1344 raise err
1327
1345
1328 for root, dirs, files in os.walk(path, onerror=errhandler):
1346 for root, dirs, files in os.walk(path, onerror=errhandler):
1329 for d in dirs:
1347 for d in dirs:
1330 if d == '.hg':
1348 if d == '.hg':
1331 yield root
1349 yield root
1332 dirs[:] = []
1350 dirs[:] = []
1333 break
1351 break
1334
1352
1335 _rcpath = None
1353 _rcpath = None
1336
1354
1337 def rcpath():
1355 def rcpath():
1338 '''return hgrc search path. if env var HGRCPATH is set, use it.
1356 '''return hgrc search path. if env var HGRCPATH is set, use it.
1339 for each item in path, if directory, use files ending in .rc,
1357 for each item in path, if directory, use files ending in .rc,
1340 else use item.
1358 else use item.
1341 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1359 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1342 if no HGRCPATH, use default os-specific path.'''
1360 if no HGRCPATH, use default os-specific path.'''
1343 global _rcpath
1361 global _rcpath
1344 if _rcpath is None:
1362 if _rcpath is None:
1345 if 'HGRCPATH' in os.environ:
1363 if 'HGRCPATH' in os.environ:
1346 _rcpath = []
1364 _rcpath = []
1347 for p in os.environ['HGRCPATH'].split(os.pathsep):
1365 for p in os.environ['HGRCPATH'].split(os.pathsep):
1348 if not p: continue
1366 if not p: continue
1349 if os.path.isdir(p):
1367 if os.path.isdir(p):
1350 for f in os.listdir(p):
1368 for f in os.listdir(p):
1351 if f.endswith('.rc'):
1369 if f.endswith('.rc'):
1352 _rcpath.append(os.path.join(p, f))
1370 _rcpath.append(os.path.join(p, f))
1353 else:
1371 else:
1354 _rcpath.append(p)
1372 _rcpath.append(p)
1355 else:
1373 else:
1356 _rcpath = os_rcpath()
1374 _rcpath = os_rcpath()
1357 return _rcpath
1375 return _rcpath
1358
1376
1359 def bytecount(nbytes):
1377 def bytecount(nbytes):
1360 '''return byte count formatted as readable string, with units'''
1378 '''return byte count formatted as readable string, with units'''
1361
1379
1362 units = (
1380 units = (
1363 (100, 1<<30, _('%.0f GB')),
1381 (100, 1<<30, _('%.0f GB')),
1364 (10, 1<<30, _('%.1f GB')),
1382 (10, 1<<30, _('%.1f GB')),
1365 (1, 1<<30, _('%.2f GB')),
1383 (1, 1<<30, _('%.2f GB')),
1366 (100, 1<<20, _('%.0f MB')),
1384 (100, 1<<20, _('%.0f MB')),
1367 (10, 1<<20, _('%.1f MB')),
1385 (10, 1<<20, _('%.1f MB')),
1368 (1, 1<<20, _('%.2f MB')),
1386 (1, 1<<20, _('%.2f MB')),
1369 (100, 1<<10, _('%.0f KB')),
1387 (100, 1<<10, _('%.0f KB')),
1370 (10, 1<<10, _('%.1f KB')),
1388 (10, 1<<10, _('%.1f KB')),
1371 (1, 1<<10, _('%.2f KB')),
1389 (1, 1<<10, _('%.2f KB')),
1372 (1, 1, _('%.0f bytes')),
1390 (1, 1, _('%.0f bytes')),
1373 )
1391 )
1374
1392
1375 for multiplier, divisor, format in units:
1393 for multiplier, divisor, format in units:
1376 if nbytes >= divisor * multiplier:
1394 if nbytes >= divisor * multiplier:
1377 return format % (nbytes / float(divisor))
1395 return format % (nbytes / float(divisor))
1378 return units[-1][2] % nbytes
1396 return units[-1][2] % nbytes
1379
1397
1380 def drop_scheme(scheme, path):
1398 def drop_scheme(scheme, path):
1381 sc = scheme + ':'
1399 sc = scheme + ':'
1382 if path.startswith(sc):
1400 if path.startswith(sc):
1383 path = path[len(sc):]
1401 path = path[len(sc):]
1384 if path.startswith('//'):
1402 if path.startswith('//'):
1385 path = path[2:]
1403 path = path[2:]
1386 return path
1404 return path
General Comments 0
You need to be logged in to leave comments. Login now