##// END OF EJS Templates
Merge with crew
Bryan O'Sullivan -
r5676:9ed65758 merge default
parent child Browse files
Show More
@@ -0,0 +1,108 b''
1 #!/bin/sh
2
3 echo "[extensions]" >> $HGRCPATH
4 echo "mq=" >> $HGRCPATH
5 echo "[diff]" >> $HGRCPATH
6 echo "nodates=true" >> $HGRCPATH
7
8
9 catlog() {
10 cat .hg/patches/$1.patch | sed -e "s/^diff \-r [0-9a-f]* /diff -r ... /"
11 hg log --template "{rev}: {desc} - {author}\n"
12 }
13
14
15 echo ==== init
16 hg init a
17 cd a
18 hg qinit
19
20
21 echo ==== qnew -U
22 hg qnew -U 1.patch
23 catlog 1
24
25 echo ==== qref
26 echo "1" >1
27 hg add
28 hg qref
29 catlog 1
30
31 echo ==== qref -u
32 hg qref -u mary
33 catlog 1
34
35 echo ==== qnew
36 hg qnew 2.patch
37 echo "2" >2
38 hg add
39 hg qref
40 catlog 2
41
42 echo ==== qref -u
43 hg qref -u jane
44 catlog 2
45
46
47 echo ==== qnew -U -m
48 hg qnew -U -m "Three" 3.patch
49 catlog 3
50
51 echo ==== qref
52 echo "3" >3
53 hg add
54 hg qref
55 catlog 3
56
57 echo ==== qref -m
58 hg qref -m "Drei"
59 catlog 3
60
61 echo ==== qref -u
62 hg qref -u mary
63 catlog 3
64
65 echo ==== qref -u -m
66 hg qref -u maria -m "Three (again)"
67 catlog 3
68
69 echo ==== qnew -m
70 hg qnew -m "Four" 4.patch
71 echo "4" >4
72 hg add
73 hg qref
74 catlog 4
75
76 echo ==== qref -u
77 hg qref -u jane
78 catlog 4
79
80
81 echo ==== qnew with HG header
82 hg qnew 5.patch
83 hg qpop
84 echo "# HG changeset patch" >>.hg/patches/5.patch
85 echo "# User johndoe" >>.hg/patches/5.patch
86 # Drop patch specific error line
87 hg qpush 2>&1 | grep -v garbage
88 catlog 5
89
90 echo ==== hg qref
91 echo "5" >5
92 hg add
93 hg qref
94 catlog 5
95
96 echo ==== hg qref -U
97 hg qref -U
98 catlog 5
99
100 echo ==== hg qref -u
101 hg qref -u johndeere
102 catlog 5
103
104
105 echo ==== "qpop -a / qpush -a"
106 hg qpop -a
107 hg qpush -a
108 hg log --template "{rev}: {desc} - {author}\n"
@@ -0,0 +1,200 b''
1 ==== init
2 ==== qnew -U
3 From: test
4
5 0: [mq]: 1.patch - test
6 ==== qref
7 adding 1
8 From: test
9
10 diff -r ... 1
11 --- /dev/null
12 +++ b/1
13 @@ -0,0 +1,1 @@
14 +1
15 0: [mq]: 1.patch - test
16 ==== qref -u
17 From: mary
18
19 diff -r ... 1
20 --- /dev/null
21 +++ b/1
22 @@ -0,0 +1,1 @@
23 +1
24 0: [mq]: 1.patch - mary
25 ==== qnew
26 adding 2
27 diff -r ... 2
28 --- /dev/null
29 +++ b/2
30 @@ -0,0 +1,1 @@
31 +2
32 1: [mq]: 2.patch - test
33 0: [mq]: 1.patch - mary
34 ==== qref -u
35 From: jane
36
37
38 diff -r ... 2
39 --- /dev/null
40 +++ b/2
41 @@ -0,0 +1,1 @@
42 +2
43 1: [mq]: 2.patch - jane
44 0: [mq]: 1.patch - mary
45 ==== qnew -U -m
46 From: test
47
48 Three
49 2: Three - test
50 1: [mq]: 2.patch - jane
51 0: [mq]: 1.patch - mary
52 ==== qref
53 adding 3
54 From: test
55
56 Three
57
58 diff -r ... 3
59 --- /dev/null
60 +++ b/3
61 @@ -0,0 +1,1 @@
62 +3
63 2: Three - test
64 1: [mq]: 2.patch - jane
65 0: [mq]: 1.patch - mary
66 ==== qref -m
67 From: test
68
69 Drei
70
71 diff -r ... 3
72 --- /dev/null
73 +++ b/3
74 @@ -0,0 +1,1 @@
75 +3
76 2: Drei - test
77 1: [mq]: 2.patch - jane
78 0: [mq]: 1.patch - mary
79 ==== qref -u
80 From: mary
81
82 Drei
83
84 diff -r ... 3
85 --- /dev/null
86 +++ b/3
87 @@ -0,0 +1,1 @@
88 +3
89 2: Drei - mary
90 1: [mq]: 2.patch - jane
91 0: [mq]: 1.patch - mary
92 ==== qref -u -m
93 From: maria
94
95 Three (again)
96
97 diff -r ... 3
98 --- /dev/null
99 +++ b/3
100 @@ -0,0 +1,1 @@
101 +3
102 2: Three (again) - maria
103 1: [mq]: 2.patch - jane
104 0: [mq]: 1.patch - mary
105 ==== qnew -m
106 adding 4
107 Four
108
109 diff -r ... 4
110 --- /dev/null
111 +++ b/4
112 @@ -0,0 +1,1 @@
113 +4
114 3: Four - test
115 2: Three (again) - maria
116 1: [mq]: 2.patch - jane
117 0: [mq]: 1.patch - mary
118 ==== qref -u
119 From: jane
120
121 Four
122
123 diff -r ... 4
124 --- /dev/null
125 +++ b/4
126 @@ -0,0 +1,1 @@
127 +4
128 3: Four - jane
129 2: Three (again) - maria
130 1: [mq]: 2.patch - jane
131 0: [mq]: 1.patch - mary
132 ==== qnew with HG header
133 Now at: 4.patch
134 applying 5.patch
135 patch failed, unable to continue (try -v)
136 patch 5.patch is empty
137 Now at: 5.patch
138 # HG changeset patch
139 # User johndoe
140 4: imported patch 5.patch - johndoe
141 3: Four - jane
142 2: Three (again) - maria
143 1: [mq]: 2.patch - jane
144 0: [mq]: 1.patch - mary
145 ==== hg qref
146 adding 5
147 # HG changeset patch
148 # User johndoe
149
150 diff -r ... 5
151 --- /dev/null
152 +++ b/5
153 @@ -0,0 +1,1 @@
154 +5
155 4: [mq]: 5.patch - johndoe
156 3: Four - jane
157 2: Three (again) - maria
158 1: [mq]: 2.patch - jane
159 0: [mq]: 1.patch - mary
160 ==== hg qref -U
161 # HG changeset patch
162 # User test
163
164 diff -r ... 5
165 --- /dev/null
166 +++ b/5
167 @@ -0,0 +1,1 @@
168 +5
169 4: [mq]: 5.patch - test
170 3: Four - jane
171 2: Three (again) - maria
172 1: [mq]: 2.patch - jane
173 0: [mq]: 1.patch - mary
174 ==== hg qref -u
175 # HG changeset patch
176 # User johndeere
177
178 diff -r ... 5
179 --- /dev/null
180 +++ b/5
181 @@ -0,0 +1,1 @@
182 +5
183 4: [mq]: 5.patch - johndeere
184 3: Four - jane
185 2: Three (again) - maria
186 1: [mq]: 2.patch - jane
187 0: [mq]: 1.patch - mary
188 ==== qpop -a / qpush -a
189 Patch queue now empty
190 applying 1.patch
191 applying 2.patch
192 applying 3.patch
193 applying 4.patch
194 applying 5.patch
195 Now at: 5.patch
196 4: imported patch 5.patch - johndeere
197 3: Four - jane
198 2: Three (again) - maria
199 1: imported patch 2.patch - jane
200 0: imported patch 1.patch - mary
@@ -1,583 +1,583 b''
1 HGRC(5)
1 HGRC(5)
2 =======
2 =======
3 Bryan O'Sullivan <bos@serpentine.com>
3 Bryan O'Sullivan <bos@serpentine.com>
4
4
5 NAME
5 NAME
6 ----
6 ----
7 hgrc - configuration files for Mercurial
7 hgrc - configuration files for Mercurial
8
8
9 SYNOPSIS
9 SYNOPSIS
10 --------
10 --------
11
11
12 The Mercurial system uses a set of configuration files to control
12 The Mercurial system uses a set of configuration files to control
13 aspects of its behaviour.
13 aspects of its behaviour.
14
14
15 FILES
15 FILES
16 -----
16 -----
17
17
18 Mercurial reads configuration data from several files, if they exist.
18 Mercurial reads configuration data from several files, if they exist.
19 The names of these files depend on the system on which Mercurial is
19 The names of these files depend on the system on which Mercurial is
20 installed. Windows registry keys contain PATH-like strings, every
20 installed. Windows registry keys contain PATH-like strings, every
21 part must reference a Mercurial.ini file or be a directory where *.rc
21 part must reference a Mercurial.ini file or be a directory where *.rc
22 files will be read.
22 files will be read.
23
23
24 (Unix) <install-root>/etc/mercurial/hgrc.d/*.rc::
24 (Unix) <install-root>/etc/mercurial/hgrc.d/*.rc::
25 (Unix) <install-root>/etc/mercurial/hgrc::
25 (Unix) <install-root>/etc/mercurial/hgrc::
26 Per-installation configuration files, searched for in the
26 Per-installation configuration files, searched for in the
27 directory where Mercurial is installed. For example, if installed
27 directory where Mercurial is installed. For example, if installed
28 in /shared/tools, Mercurial will look in
28 in /shared/tools, Mercurial will look in
29 /shared/tools/etc/mercurial/hgrc. Options in these files apply to
29 /shared/tools/etc/mercurial/hgrc. Options in these files apply to
30 all Mercurial commands executed by any user in any directory.
30 all Mercurial commands executed by any user in any directory.
31
31
32 (Unix) /etc/mercurial/hgrc.d/*.rc::
32 (Unix) /etc/mercurial/hgrc.d/*.rc::
33 (Unix) /etc/mercurial/hgrc::
33 (Unix) /etc/mercurial/hgrc::
34 (Windows) HKEY_LOCAL_MACHINE\SOFTWARE\Mercurial::
34 (Windows) HKEY_LOCAL_MACHINE\SOFTWARE\Mercurial::
35 or::
35 or::
36 (Windows) C:\Mercurial\Mercurial.ini::
36 (Windows) C:\Mercurial\Mercurial.ini::
37 Per-system configuration files, for the system on which Mercurial
37 Per-system configuration files, for the system on which Mercurial
38 is running. Options in these files apply to all Mercurial
38 is running. Options in these files apply to all Mercurial
39 commands executed by any user in any directory. Options in these
39 commands executed by any user in any directory. Options in these
40 files override per-installation options.
40 files override per-installation options.
41
41
42 (Unix) $HOME/.hgrc::
42 (Unix) $HOME/.hgrc::
43 (Windows) C:\Documents and Settings\USERNAME\Mercurial.ini::
43 (Windows) C:\Documents and Settings\USERNAME\Mercurial.ini::
44 (Windows) $HOME\Mercurial.ini::
44 (Windows) $HOME\Mercurial.ini::
45 Per-user configuration file, for the user running Mercurial.
45 Per-user configuration file, for the user running Mercurial.
46 Options in this file apply to all Mercurial commands executed by
46 Options in this file apply to all Mercurial commands executed by
47 any user in any directory. Options in this file override
47 any user in any directory. Options in this file override
48 per-installation and per-system options.
48 per-installation and per-system options.
49 On Windows system, one of these is chosen exclusively according
49 On Windows system, one of these is chosen exclusively according
50 to definition of HOME environment variable.
50 to definition of HOME environment variable.
51
51
52 (Unix, Windows) <repo>/.hg/hgrc::
52 (Unix, Windows) <repo>/.hg/hgrc::
53 Per-repository configuration options that only apply in a
53 Per-repository configuration options that only apply in a
54 particular repository. This file is not version-controlled, and
54 particular repository. This file is not version-controlled, and
55 will not get transferred during a "clone" operation. Options in
55 will not get transferred during a "clone" operation. Options in
56 this file override options in all other configuration files.
56 this file override options in all other configuration files.
57 On Unix, most of this file will be ignored if it doesn't belong
57 On Unix, most of this file will be ignored if it doesn't belong
58 to a trusted user or to a trusted group. See the documentation
58 to a trusted user or to a trusted group. See the documentation
59 for the trusted section below for more details.
59 for the trusted section below for more details.
60
60
61 SYNTAX
61 SYNTAX
62 ------
62 ------
63
63
64 A configuration file consists of sections, led by a "[section]" header
64 A configuration file consists of sections, led by a "[section]" header
65 and followed by "name: value" entries; "name=value" is also accepted.
65 and followed by "name: value" entries; "name=value" is also accepted.
66
66
67 [spam]
67 [spam]
68 eggs=ham
68 eggs=ham
69 green=
69 green=
70 eggs
70 eggs
71
71
72 Each line contains one entry. If the lines that follow are indented,
72 Each line contains one entry. If the lines that follow are indented,
73 they are treated as continuations of that entry.
73 they are treated as continuations of that entry.
74
74
75 Leading whitespace is removed from values. Empty lines are skipped.
75 Leading whitespace is removed from values. Empty lines are skipped.
76
76
77 The optional values can contain format strings which refer to other
77 The optional values can contain format strings which refer to other
78 values in the same section, or values in a special DEFAULT section.
78 values in the same section, or values in a special DEFAULT section.
79
79
80 Lines beginning with "#" or ";" are ignored and may be used to provide
80 Lines beginning with "#" or ";" are ignored and may be used to provide
81 comments.
81 comments.
82
82
83 SECTIONS
83 SECTIONS
84 --------
84 --------
85
85
86 This section describes the different sections that may appear in a
86 This section describes the different sections that may appear in a
87 Mercurial "hgrc" file, the purpose of each section, its possible
87 Mercurial "hgrc" file, the purpose of each section, its possible
88 keys, and their possible values.
88 keys, and their possible values.
89
89
90 decode/encode::
90 decode/encode::
91 Filters for transforming files on checkout/checkin. This would
91 Filters for transforming files on checkout/checkin. This would
92 typically be used for newline processing or other
92 typically be used for newline processing or other
93 localization/canonicalization of files.
93 localization/canonicalization of files.
94
94
95 Filters consist of a filter pattern followed by a filter command.
95 Filters consist of a filter pattern followed by a filter command.
96 Filter patterns are globs by default, rooted at the repository
96 Filter patterns are globs by default, rooted at the repository
97 root. For example, to match any file ending in ".txt" in the root
97 root. For example, to match any file ending in ".txt" in the root
98 directory only, use the pattern "*.txt". To match any file ending
98 directory only, use the pattern "*.txt". To match any file ending
99 in ".c" anywhere in the repository, use the pattern "**.c".
99 in ".c" anywhere in the repository, use the pattern "**.c".
100
100
101 The filter command can start with a specifier, either "pipe:" or
101 The filter command can start with a specifier, either "pipe:" or
102 "tempfile:". If no specifier is given, "pipe:" is used by default.
102 "tempfile:". If no specifier is given, "pipe:" is used by default.
103
103
104 A "pipe:" command must accept data on stdin and return the
104 A "pipe:" command must accept data on stdin and return the
105 transformed data on stdout.
105 transformed data on stdout.
106
106
107 Pipe example:
107 Pipe example:
108
108
109 [encode]
109 [encode]
110 # uncompress gzip files on checkin to improve delta compression
110 # uncompress gzip files on checkin to improve delta compression
111 # note: not necessarily a good idea, just an example
111 # note: not necessarily a good idea, just an example
112 *.gz = pipe: gunzip
112 *.gz = pipe: gunzip
113
113
114 [decode]
114 [decode]
115 # recompress gzip files when writing them to the working dir (we
115 # recompress gzip files when writing them to the working dir (we
116 # can safely omit "pipe:", because it's the default)
116 # can safely omit "pipe:", because it's the default)
117 *.gz = gzip
117 *.gz = gzip
118
118
119 A "tempfile:" command is a template. The string INFILE is replaced
119 A "tempfile:" command is a template. The string INFILE is replaced
120 with the name of a temporary file that contains the data to be
120 with the name of a temporary file that contains the data to be
121 filtered by the command. The string OUTFILE is replaced with the
121 filtered by the command. The string OUTFILE is replaced with the
122 name of an empty temporary file, where the filtered data must be
122 name of an empty temporary file, where the filtered data must be
123 written by the command.
123 written by the command.
124
124
125 NOTE: the tempfile mechanism is recommended for Windows systems,
125 NOTE: the tempfile mechanism is recommended for Windows systems,
126 where the standard shell I/O redirection operators often have
126 where the standard shell I/O redirection operators often have
127 strange effects. In particular, if you are doing line ending
127 strange effects. In particular, if you are doing line ending
128 conversion on Windows using the popular dos2unix and unix2dos
128 conversion on Windows using the popular dos2unix and unix2dos
129 programs, you *must* use the tempfile mechanism, as using pipes will
129 programs, you *must* use the tempfile mechanism, as using pipes will
130 corrupt the contents of your files.
130 corrupt the contents of your files.
131
131
132 Tempfile example:
132 Tempfile example:
133
133
134 [encode]
134 [encode]
135 # convert files to unix line ending conventions on checkin
135 # convert files to unix line ending conventions on checkin
136 **.txt = tempfile: dos2unix -n INFILE OUTFILE
136 **.txt = tempfile: dos2unix -n INFILE OUTFILE
137
137
138 [decode]
138 [decode]
139 # convert files to windows line ending conventions when writing
139 # convert files to windows line ending conventions when writing
140 # them to the working dir
140 # them to the working dir
141 **.txt = tempfile: unix2dos -n INFILE OUTFILE
141 **.txt = tempfile: unix2dos -n INFILE OUTFILE
142
142
143 defaults::
143 defaults::
144 Use the [defaults] section to define command defaults, i.e. the
144 Use the [defaults] section to define command defaults, i.e. the
145 default options/arguments to pass to the specified commands.
145 default options/arguments to pass to the specified commands.
146
146
147 The following example makes 'hg log' run in verbose mode, and
147 The following example makes 'hg log' run in verbose mode, and
148 'hg status' show only the modified files, by default.
148 'hg status' show only the modified files, by default.
149
149
150 [defaults]
150 [defaults]
151 log = -v
151 log = -v
152 status = -m
152 status = -m
153
153
154 The actual commands, instead of their aliases, must be used when
154 The actual commands, instead of their aliases, must be used when
155 defining command defaults. The command defaults will also be
155 defining command defaults. The command defaults will also be
156 applied to the aliases of the commands defined.
156 applied to the aliases of the commands defined.
157
157
158 diff::
158 diff::
159 Settings used when displaying diffs. They are all boolean and
159 Settings used when displaying diffs. They are all boolean and
160 defaults to False.
160 defaults to False.
161 git;;
161 git;;
162 Use git extended diff format.
162 Use git extended diff format.
163 nodates;;
163 nodates;;
164 Don't include dates in diff headers.
164 Don't include dates in diff headers.
165 showfunc;;
165 showfunc;;
166 Show which function each change is in.
166 Show which function each change is in.
167 ignorews;;
167 ignorews;;
168 Ignore white space when comparing lines.
168 Ignore white space when comparing lines.
169 ignorewsamount;;
169 ignorewsamount;;
170 Ignore changes in the amount of white space.
170 Ignore changes in the amount of white space.
171 ignoreblanklines;;
171 ignoreblanklines;;
172 Ignore changes whose lines are all blank.
172 Ignore changes whose lines are all blank.
173
173
174 email::
174 email::
175 Settings for extensions that send email messages.
175 Settings for extensions that send email messages.
176 from;;
176 from;;
177 Optional. Email address to use in "From" header and SMTP envelope
177 Optional. Email address to use in "From" header and SMTP envelope
178 of outgoing messages.
178 of outgoing messages.
179 to;;
179 to;;
180 Optional. Comma-separated list of recipients' email addresses.
180 Optional. Comma-separated list of recipients' email addresses.
181 cc;;
181 cc;;
182 Optional. Comma-separated list of carbon copy recipients'
182 Optional. Comma-separated list of carbon copy recipients'
183 email addresses.
183 email addresses.
184 bcc;;
184 bcc;;
185 Optional. Comma-separated list of blind carbon copy
185 Optional. Comma-separated list of blind carbon copy
186 recipients' email addresses. Cannot be set interactively.
186 recipients' email addresses. Cannot be set interactively.
187 method;;
187 method;;
188 Optional. Method to use to send email messages. If value is
188 Optional. Method to use to send email messages. If value is
189 "smtp" (default), use SMTP (see section "[smtp]" for
189 "smtp" (default), use SMTP (see section "[smtp]" for
190 configuration). Otherwise, use as name of program to run that
190 configuration). Otherwise, use as name of program to run that
191 acts like sendmail (takes "-f" option for sender, list of
191 acts like sendmail (takes "-f" option for sender, list of
192 recipients on command line, message on stdin). Normally, setting
192 recipients on command line, message on stdin). Normally, setting
193 this to "sendmail" or "/usr/sbin/sendmail" is enough to use
193 this to "sendmail" or "/usr/sbin/sendmail" is enough to use
194 sendmail to send messages.
194 sendmail to send messages.
195
195
196 Email example:
196 Email example:
197
197
198 [email]
198 [email]
199 from = Joseph User <joe.user@example.com>
199 from = Joseph User <joe.user@example.com>
200 method = /usr/sbin/sendmail
200 method = /usr/sbin/sendmail
201
201
202 extensions::
202 extensions::
203 Mercurial has an extension mechanism for adding new features. To
203 Mercurial has an extension mechanism for adding new features. To
204 enable an extension, create an entry for it in this section.
204 enable an extension, create an entry for it in this section.
205
205
206 If you know that the extension is already in Python's search path,
206 If you know that the extension is already in Python's search path,
207 you can give the name of the module, followed by "=", with nothing
207 you can give the name of the module, followed by "=", with nothing
208 after the "=".
208 after the "=".
209
209
210 Otherwise, give a name that you choose, followed by "=", followed by
210 Otherwise, give a name that you choose, followed by "=", followed by
211 the path to the ".py" file (including the file name extension) that
211 the path to the ".py" file (including the file name extension) that
212 defines the extension.
212 defines the extension.
213
213
214 Example for ~/.hgrc:
214 Example for ~/.hgrc:
215
215
216 [extensions]
216 [extensions]
217 # (the mq extension will get loaded from mercurial's path)
217 # (the mq extension will get loaded from mercurial's path)
218 hgext.mq =
218 hgext.mq =
219 # (this extension will get loaded from the file specified)
219 # (this extension will get loaded from the file specified)
220 myfeature = ~/.hgext/myfeature.py
220 myfeature = ~/.hgext/myfeature.py
221
221
222 format::
222 format::
223
223
224 usestore;;
224 usestore;;
225 Enable or disable the "store" repository format which improves
225 Enable or disable the "store" repository format which improves
226 compatibility with systems that fold case or otherwise mangle
226 compatibility with systems that fold case or otherwise mangle
227 filenames. Enabled by default. Disabling this option will allow
227 filenames. Enabled by default. Disabling this option will allow
228 you to store longer filenames in some situations at the expense of
228 you to store longer filenames in some situations at the expense of
229 compatibility.
229 compatibility.
230
230
231 hooks::
231 hooks::
232 Commands or Python functions that get automatically executed by
232 Commands or Python functions that get automatically executed by
233 various actions such as starting or finishing a commit. Multiple
233 various actions such as starting or finishing a commit. Multiple
234 hooks can be run for the same action by appending a suffix to the
234 hooks can be run for the same action by appending a suffix to the
235 action. Overriding a site-wide hook can be done by changing its
235 action. Overriding a site-wide hook can be done by changing its
236 value or setting it to an empty string.
236 value or setting it to an empty string.
237
237
238 Example .hg/hgrc:
238 Example .hg/hgrc:
239
239
240 [hooks]
240 [hooks]
241 # do not use the site-wide hook
241 # do not use the site-wide hook
242 incoming =
242 incoming =
243 incoming.email = /my/email/hook
243 incoming.email = /my/email/hook
244 incoming.autobuild = /my/build/hook
244 incoming.autobuild = /my/build/hook
245
245
246 Most hooks are run with environment variables set that give added
246 Most hooks are run with environment variables set that give added
247 useful information. For each hook below, the environment variables
247 useful information. For each hook below, the environment variables
248 it is passed are listed with names of the form "$HG_foo".
248 it is passed are listed with names of the form "$HG_foo".
249
249
250 changegroup;;
250 changegroup;;
251 Run after a changegroup has been added via push, pull or
251 Run after a changegroup has been added via push, pull or
252 unbundle. ID of the first new changeset is in $HG_NODE. URL from
252 unbundle. ID of the first new changeset is in $HG_NODE. URL from
253 which changes came is in $HG_URL.
253 which changes came is in $HG_URL.
254 commit;;
254 commit;;
255 Run after a changeset has been created in the local repository.
255 Run after a changeset has been created in the local repository.
256 ID of the newly created changeset is in $HG_NODE. Parent
256 ID of the newly created changeset is in $HG_NODE. Parent
257 changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
257 changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
258 incoming;;
258 incoming;;
259 Run after a changeset has been pulled, pushed, or unbundled into
259 Run after a changeset has been pulled, pushed, or unbundled into
260 the local repository. The ID of the newly arrived changeset is in
260 the local repository. The ID of the newly arrived changeset is in
261 $HG_NODE. URL that was source of changes came is in $HG_URL.
261 $HG_NODE. URL that was source of changes came is in $HG_URL.
262 outgoing;;
262 outgoing;;
263 Run after sending changes from local repository to another. ID of
263 Run after sending changes from local repository to another. ID of
264 first changeset sent is in $HG_NODE. Source of operation is in
264 first changeset sent is in $HG_NODE. Source of operation is in
265 $HG_SOURCE; see "preoutgoing" hook for description.
265 $HG_SOURCE; see "preoutgoing" hook for description.
266 post-<command>;;
266 post-<command>;;
267 Run after successful invocations of the associated command. The
267 Run after successful invocations of the associated command. The
268 contents of the command line are passed as $HG_ARGS and the result
268 contents of the command line are passed as $HG_ARGS and the result
269 code in $HG_RESULT. Hook failure is ignored.
269 code in $HG_RESULT. Hook failure is ignored.
270 pre-<command>;;
270 pre-<command>;;
271 Run before executing the associated command. The contents of the
271 Run before executing the associated command. The contents of the
272 command line are passed as $HG_ARGS. If the hook returns failure,
272 command line are passed as $HG_ARGS. If the hook returns failure,
273 the command doesn't execute and Mercurial returns the failure code.
273 the command doesn't execute and Mercurial returns the failure code.
274 prechangegroup;;
274 prechangegroup;;
275 Run before a changegroup is added via push, pull or unbundle.
275 Run before a changegroup is added via push, pull or unbundle.
276 Exit status 0 allows the changegroup to proceed. Non-zero status
276 Exit status 0 allows the changegroup to proceed. Non-zero status
277 will cause the push, pull or unbundle to fail. URL from which
277 will cause the push, pull or unbundle to fail. URL from which
278 changes will come is in $HG_URL.
278 changes will come is in $HG_URL.
279 precommit;;
279 precommit;;
280 Run before starting a local commit. Exit status 0 allows the
280 Run before starting a local commit. Exit status 0 allows the
281 commit to proceed. Non-zero status will cause the commit to fail.
281 commit to proceed. Non-zero status will cause the commit to fail.
282 Parent changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
282 Parent changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
283 preoutgoing;;
283 preoutgoing;;
284 Run before computing changes to send from the local repository to
284 Run before collecting changes to send from the local repository to
285 another. Non-zero status will cause failure. This lets you
285 another. Non-zero status will cause failure. This lets you
286 prevent pull over http or ssh. Also prevents against local pull,
286 prevent pull over http or ssh. Also prevents against local pull,
287 push (outbound) or bundle commands, but not effective, since you
287 push (outbound) or bundle commands, but not effective, since you
288 can just copy files instead then. Source of operation is in
288 can just copy files instead then. Source of operation is in
289 $HG_SOURCE. If "serve", operation is happening on behalf of
289 $HG_SOURCE. If "serve", operation is happening on behalf of
290 remote ssh or http repository. If "push", "pull" or "bundle",
290 remote ssh or http repository. If "push", "pull" or "bundle",
291 operation is happening on behalf of repository on same system.
291 operation is happening on behalf of repository on same system.
292 pretag;;
292 pretag;;
293 Run before creating a tag. Exit status 0 allows the tag to be
293 Run before creating a tag. Exit status 0 allows the tag to be
294 created. Non-zero status will cause the tag to fail. ID of
294 created. Non-zero status will cause the tag to fail. ID of
295 changeset to tag is in $HG_NODE. Name of tag is in $HG_TAG. Tag
295 changeset to tag is in $HG_NODE. Name of tag is in $HG_TAG. Tag
296 is local if $HG_LOCAL=1, in repo if $HG_LOCAL=0.
296 is local if $HG_LOCAL=1, in repo if $HG_LOCAL=0.
297 pretxnchangegroup;;
297 pretxnchangegroup;;
298 Run after a changegroup has been added via push, pull or unbundle,
298 Run after a changegroup has been added via push, pull or unbundle,
299 but before the transaction has been committed. Changegroup is
299 but before the transaction has been committed. Changegroup is
300 visible to hook program. This lets you validate incoming changes
300 visible to hook program. This lets you validate incoming changes
301 before accepting them. Passed the ID of the first new changeset
301 before accepting them. Passed the ID of the first new changeset
302 in $HG_NODE. Exit status 0 allows the transaction to commit.
302 in $HG_NODE. Exit status 0 allows the transaction to commit.
303 Non-zero status will cause the transaction to be rolled back and
303 Non-zero status will cause the transaction to be rolled back and
304 the push, pull or unbundle will fail. URL that was source of
304 the push, pull or unbundle will fail. URL that was source of
305 changes is in $HG_URL.
305 changes is in $HG_URL.
306 pretxncommit;;
306 pretxncommit;;
307 Run after a changeset has been created but the transaction not yet
307 Run after a changeset has been created but the transaction not yet
308 committed. Changeset is visible to hook program. This lets you
308 committed. Changeset is visible to hook program. This lets you
309 validate commit message and changes. Exit status 0 allows the
309 validate commit message and changes. Exit status 0 allows the
310 commit to proceed. Non-zero status will cause the transaction to
310 commit to proceed. Non-zero status will cause the transaction to
311 be rolled back. ID of changeset is in $HG_NODE. Parent changeset
311 be rolled back. ID of changeset is in $HG_NODE. Parent changeset
312 IDs are in $HG_PARENT1 and $HG_PARENT2.
312 IDs are in $HG_PARENT1 and $HG_PARENT2.
313 preupdate;;
313 preupdate;;
314 Run before updating the working directory. Exit status 0 allows
314 Run before updating the working directory. Exit status 0 allows
315 the update to proceed. Non-zero status will prevent the update.
315 the update to proceed. Non-zero status will prevent the update.
316 Changeset ID of first new parent is in $HG_PARENT1. If merge, ID
316 Changeset ID of first new parent is in $HG_PARENT1. If merge, ID
317 of second new parent is in $HG_PARENT2.
317 of second new parent is in $HG_PARENT2.
318 tag;;
318 tag;;
319 Run after a tag is created. ID of tagged changeset is in
319 Run after a tag is created. ID of tagged changeset is in
320 $HG_NODE. Name of tag is in $HG_TAG. Tag is local if
320 $HG_NODE. Name of tag is in $HG_TAG. Tag is local if
321 $HG_LOCAL=1, in repo if $HG_LOCAL=0.
321 $HG_LOCAL=1, in repo if $HG_LOCAL=0.
322 update;;
322 update;;
323 Run after updating the working directory. Changeset ID of first
323 Run after updating the working directory. Changeset ID of first
324 new parent is in $HG_PARENT1. If merge, ID of second new parent
324 new parent is in $HG_PARENT1. If merge, ID of second new parent
325 is in $HG_PARENT2. If update succeeded, $HG_ERROR=0. If update
325 is in $HG_PARENT2. If update succeeded, $HG_ERROR=0. If update
326 failed (e.g. because conflicts not resolved), $HG_ERROR=1.
326 failed (e.g. because conflicts not resolved), $HG_ERROR=1.
327
327
328 Note: it is generally better to use standard hooks rather than the
328 Note: it is generally better to use standard hooks rather than the
329 generic pre- and post- command hooks as they are guaranteed to be
329 generic pre- and post- command hooks as they are guaranteed to be
330 called in the appropriate contexts for influencing transactions.
330 called in the appropriate contexts for influencing transactions.
331 Also, hooks like "commit" will be called in all contexts that
331 Also, hooks like "commit" will be called in all contexts that
332 generate a commit (eg. tag) and not just the commit command.
332 generate a commit (eg. tag) and not just the commit command.
333
333
334 Note2: Environment variables with empty values may not be passed to
334 Note2: Environment variables with empty values may not be passed to
335 hooks on platforms like Windows. For instance, $HG_PARENT2 will
335 hooks on platforms like Windows. For instance, $HG_PARENT2 will
336 not be available under Windows for non-merge changesets while being
336 not be available under Windows for non-merge changesets while being
337 set to an empty value under Unix-like systems.
337 set to an empty value under Unix-like systems.
338
338
339 The syntax for Python hooks is as follows:
339 The syntax for Python hooks is as follows:
340
340
341 hookname = python:modulename.submodule.callable
341 hookname = python:modulename.submodule.callable
342
342
343 Python hooks are run within the Mercurial process. Each hook is
343 Python hooks are run within the Mercurial process. Each hook is
344 called with at least three keyword arguments: a ui object (keyword
344 called with at least three keyword arguments: a ui object (keyword
345 "ui"), a repository object (keyword "repo"), and a "hooktype"
345 "ui"), a repository object (keyword "repo"), and a "hooktype"
346 keyword that tells what kind of hook is used. Arguments listed as
346 keyword that tells what kind of hook is used. Arguments listed as
347 environment variables above are passed as keyword arguments, with no
347 environment variables above are passed as keyword arguments, with no
348 "HG_" prefix, and names in lower case.
348 "HG_" prefix, and names in lower case.
349
349
350 If a Python hook returns a "true" value or raises an exception, this
350 If a Python hook returns a "true" value or raises an exception, this
351 is treated as failure of the hook.
351 is treated as failure of the hook.
352
352
353 http_proxy::
353 http_proxy::
354 Used to access web-based Mercurial repositories through a HTTP
354 Used to access web-based Mercurial repositories through a HTTP
355 proxy.
355 proxy.
356 host;;
356 host;;
357 Host name and (optional) port of the proxy server, for example
357 Host name and (optional) port of the proxy server, for example
358 "myproxy:8000".
358 "myproxy:8000".
359 no;;
359 no;;
360 Optional. Comma-separated list of host names that should bypass
360 Optional. Comma-separated list of host names that should bypass
361 the proxy.
361 the proxy.
362 passwd;;
362 passwd;;
363 Optional. Password to authenticate with at the proxy server.
363 Optional. Password to authenticate with at the proxy server.
364 user;;
364 user;;
365 Optional. User name to authenticate with at the proxy server.
365 Optional. User name to authenticate with at the proxy server.
366
366
367 smtp::
367 smtp::
368 Configuration for extensions that need to send email messages.
368 Configuration for extensions that need to send email messages.
369 host;;
369 host;;
370 Host name of mail server, e.g. "mail.example.com".
370 Host name of mail server, e.g. "mail.example.com".
371 port;;
371 port;;
372 Optional. Port to connect to on mail server. Default: 25.
372 Optional. Port to connect to on mail server. Default: 25.
373 tls;;
373 tls;;
374 Optional. Whether to connect to mail server using TLS. True or
374 Optional. Whether to connect to mail server using TLS. True or
375 False. Default: False.
375 False. Default: False.
376 username;;
376 username;;
377 Optional. User name to authenticate to SMTP server with.
377 Optional. User name to authenticate to SMTP server with.
378 If username is specified, password must also be specified.
378 If username is specified, password must also be specified.
379 Default: none.
379 Default: none.
380 password;;
380 password;;
381 Optional. Password to authenticate to SMTP server with.
381 Optional. Password to authenticate to SMTP server with.
382 If username is specified, password must also be specified.
382 If username is specified, password must also be specified.
383 Default: none.
383 Default: none.
384 local_hostname;;
384 local_hostname;;
385 Optional. It's the hostname that the sender can use to identify itself
385 Optional. It's the hostname that the sender can use to identify itself
386 to the MTA.
386 to the MTA.
387
387
388 paths::
388 paths::
389 Assigns symbolic names to repositories. The left side is the
389 Assigns symbolic names to repositories. The left side is the
390 symbolic name, and the right gives the directory or URL that is the
390 symbolic name, and the right gives the directory or URL that is the
391 location of the repository. Default paths can be declared by
391 location of the repository. Default paths can be declared by
392 setting the following entries.
392 setting the following entries.
393 default;;
393 default;;
394 Directory or URL to use when pulling if no source is specified.
394 Directory or URL to use when pulling if no source is specified.
395 Default is set to repository from which the current repository
395 Default is set to repository from which the current repository
396 was cloned.
396 was cloned.
397 default-push;;
397 default-push;;
398 Optional. Directory or URL to use when pushing if no destination
398 Optional. Directory or URL to use when pushing if no destination
399 is specified.
399 is specified.
400
400
401 server::
401 server::
402 Controls generic server settings.
402 Controls generic server settings.
403 uncompressed;;
403 uncompressed;;
404 Whether to allow clients to clone a repo using the uncompressed
404 Whether to allow clients to clone a repo using the uncompressed
405 streaming protocol. This transfers about 40% more data than a
405 streaming protocol. This transfers about 40% more data than a
406 regular clone, but uses less memory and CPU on both server and
406 regular clone, but uses less memory and CPU on both server and
407 client. Over a LAN (100Mbps or better) or a very fast WAN, an
407 client. Over a LAN (100Mbps or better) or a very fast WAN, an
408 uncompressed streaming clone is a lot faster (~10x) than a regular
408 uncompressed streaming clone is a lot faster (~10x) than a regular
409 clone. Over most WAN connections (anything slower than about
409 clone. Over most WAN connections (anything slower than about
410 6Mbps), uncompressed streaming is slower, because of the extra
410 6Mbps), uncompressed streaming is slower, because of the extra
411 data transfer overhead. Default is False.
411 data transfer overhead. Default is False.
412
412
413 trusted::
413 trusted::
414 For security reasons, Mercurial will not use the settings in
414 For security reasons, Mercurial will not use the settings in
415 the .hg/hgrc file from a repository if it doesn't belong to a
415 the .hg/hgrc file from a repository if it doesn't belong to a
416 trusted user or to a trusted group. The main exception is the
416 trusted user or to a trusted group. The main exception is the
417 web interface, which automatically uses some safe settings, since
417 web interface, which automatically uses some safe settings, since
418 it's common to serve repositories from different users.
418 it's common to serve repositories from different users.
419
419
420 This section specifies what users and groups are trusted. The
420 This section specifies what users and groups are trusted. The
421 current user is always trusted. To trust everybody, list a user
421 current user is always trusted. To trust everybody, list a user
422 or a group with name "*".
422 or a group with name "*".
423
423
424 users;;
424 users;;
425 Comma-separated list of trusted users.
425 Comma-separated list of trusted users.
426 groups;;
426 groups;;
427 Comma-separated list of trusted groups.
427 Comma-separated list of trusted groups.
428
428
429 ui::
429 ui::
430 User interface controls.
430 User interface controls.
431 debug;;
431 debug;;
432 Print debugging information. True or False. Default is False.
432 Print debugging information. True or False. Default is False.
433 editor;;
433 editor;;
434 The editor to use during a commit. Default is $EDITOR or "vi".
434 The editor to use during a commit. Default is $EDITOR or "vi".
435 fallbackencoding;;
435 fallbackencoding;;
436 Encoding to try if it's not possible to decode the changelog using
436 Encoding to try if it's not possible to decode the changelog using
437 UTF-8. Default is ISO-8859-1.
437 UTF-8. Default is ISO-8859-1.
438 ignore;;
438 ignore;;
439 A file to read per-user ignore patterns from. This file should be in
439 A file to read per-user ignore patterns from. This file should be in
440 the same format as a repository-wide .hgignore file. This option
440 the same format as a repository-wide .hgignore file. This option
441 supports hook syntax, so if you want to specify multiple ignore
441 supports hook syntax, so if you want to specify multiple ignore
442 files, you can do so by setting something like
442 files, you can do so by setting something like
443 "ignore.other = ~/.hgignore2". For details of the ignore file
443 "ignore.other = ~/.hgignore2". For details of the ignore file
444 format, see the hgignore(5) man page.
444 format, see the hgignore(5) man page.
445 interactive;;
445 interactive;;
446 Allow to prompt the user. True or False. Default is True.
446 Allow to prompt the user. True or False. Default is True.
447 logtemplate;;
447 logtemplate;;
448 Template string for commands that print changesets.
448 Template string for commands that print changesets.
449 merge;;
449 merge;;
450 The conflict resolution program to use during a manual merge.
450 The conflict resolution program to use during a manual merge.
451 Default is "hgmerge".
451 Default is "hgmerge".
452 patch;;
452 patch;;
453 command to use to apply patches. Look for 'gpatch' or 'patch' in PATH if
453 command to use to apply patches. Look for 'gpatch' or 'patch' in PATH if
454 unset.
454 unset.
455 quiet;;
455 quiet;;
456 Reduce the amount of output printed. True or False. Default is False.
456 Reduce the amount of output printed. True or False. Default is False.
457 remotecmd;;
457 remotecmd;;
458 remote command to use for clone/push/pull operations. Default is 'hg'.
458 remote command to use for clone/push/pull operations. Default is 'hg'.
459 report_untrusted;;
459 report_untrusted;;
460 Warn if a .hg/hgrc file is ignored due to not being owned by a
460 Warn if a .hg/hgrc file is ignored due to not being owned by a
461 trusted user or group. True or False. Default is True.
461 trusted user or group. True or False. Default is True.
462 slash;;
462 slash;;
463 Display paths using a slash ("/") as the path separator. This only
463 Display paths using a slash ("/") as the path separator. This only
464 makes a difference on systems where the default path separator is not
464 makes a difference on systems where the default path separator is not
465 the slash character (e.g. Windows uses the backslash character ("\")).
465 the slash character (e.g. Windows uses the backslash character ("\")).
466 Default is False.
466 Default is False.
467 ssh;;
467 ssh;;
468 command to use for SSH connections. Default is 'ssh'.
468 command to use for SSH connections. Default is 'ssh'.
469 strict;;
469 strict;;
470 Require exact command names, instead of allowing unambiguous
470 Require exact command names, instead of allowing unambiguous
471 abbreviations. True or False. Default is False.
471 abbreviations. True or False. Default is False.
472 style;;
472 style;;
473 Name of style to use for command output.
473 Name of style to use for command output.
474 timeout;;
474 timeout;;
475 The timeout used when a lock is held (in seconds), a negative value
475 The timeout used when a lock is held (in seconds), a negative value
476 means no timeout. Default is 600.
476 means no timeout. Default is 600.
477 username;;
477 username;;
478 The committer of a changeset created when running "commit".
478 The committer of a changeset created when running "commit".
479 Typically a person's name and email address, e.g. "Fred Widget
479 Typically a person's name and email address, e.g. "Fred Widget
480 <fred@example.com>". Default is $EMAIL or username@hostname.
480 <fred@example.com>". Default is $EMAIL or username@hostname.
481 If the username in hgrc is empty, it has to be specified manually or
481 If the username in hgrc is empty, it has to be specified manually or
482 in a different hgrc file (e.g. $HOME/.hgrc, if the admin set "username ="
482 in a different hgrc file (e.g. $HOME/.hgrc, if the admin set "username ="
483 in the system hgrc).
483 in the system hgrc).
484 verbose;;
484 verbose;;
485 Increase the amount of output printed. True or False. Default is False.
485 Increase the amount of output printed. True or False. Default is False.
486
486
487
487
488 web::
488 web::
489 Web interface configuration.
489 Web interface configuration.
490 accesslog;;
490 accesslog;;
491 Where to output the access log. Default is stdout.
491 Where to output the access log. Default is stdout.
492 address;;
492 address;;
493 Interface address to bind to. Default is all.
493 Interface address to bind to. Default is all.
494 allow_archive;;
494 allow_archive;;
495 List of archive format (bz2, gz, zip) allowed for downloading.
495 List of archive format (bz2, gz, zip) allowed for downloading.
496 Default is empty.
496 Default is empty.
497 allowbz2;;
497 allowbz2;;
498 (DEPRECATED) Whether to allow .tar.bz2 downloading of repo revisions.
498 (DEPRECATED) Whether to allow .tar.bz2 downloading of repo revisions.
499 Default is false.
499 Default is false.
500 allowgz;;
500 allowgz;;
501 (DEPRECATED) Whether to allow .tar.gz downloading of repo revisions.
501 (DEPRECATED) Whether to allow .tar.gz downloading of repo revisions.
502 Default is false.
502 Default is false.
503 allowpull;;
503 allowpull;;
504 Whether to allow pulling from the repository. Default is true.
504 Whether to allow pulling from the repository. Default is true.
505 allow_push;;
505 allow_push;;
506 Whether to allow pushing to the repository. If empty or not set,
506 Whether to allow pushing to the repository. If empty or not set,
507 push is not allowed. If the special value "*", any remote user
507 push is not allowed. If the special value "*", any remote user
508 can push, including unauthenticated users. Otherwise, the remote
508 can push, including unauthenticated users. Otherwise, the remote
509 user must have been authenticated, and the authenticated user name
509 user must have been authenticated, and the authenticated user name
510 must be present in this list (separated by whitespace or ",").
510 must be present in this list (separated by whitespace or ",").
511 The contents of the allow_push list are examined after the
511 The contents of the allow_push list are examined after the
512 deny_push list.
512 deny_push list.
513 allowzip;;
513 allowzip;;
514 (DEPRECATED) Whether to allow .zip downloading of repo revisions.
514 (DEPRECATED) Whether to allow .zip downloading of repo revisions.
515 Default is false. This feature creates temporary files.
515 Default is false. This feature creates temporary files.
516 baseurl;;
516 baseurl;;
517 Base URL to use when publishing URLs in other locations, so
517 Base URL to use when publishing URLs in other locations, so
518 third-party tools like email notification hooks can construct URLs.
518 third-party tools like email notification hooks can construct URLs.
519 Example: "http://hgserver/repos/"
519 Example: "http://hgserver/repos/"
520 contact;;
520 contact;;
521 Name or email address of the person in charge of the repository.
521 Name or email address of the person in charge of the repository.
522 Default is "unknown".
522 Default is "unknown".
523 deny_push;;
523 deny_push;;
524 Whether to deny pushing to the repository. If empty or not set,
524 Whether to deny pushing to the repository. If empty or not set,
525 push is not denied. If the special value "*", all remote users
525 push is not denied. If the special value "*", all remote users
526 are denied push. Otherwise, unauthenticated users are all denied,
526 are denied push. Otherwise, unauthenticated users are all denied,
527 and any authenticated user name present in this list (separated by
527 and any authenticated user name present in this list (separated by
528 whitespace or ",") is also denied. The contents of the deny_push
528 whitespace or ",") is also denied. The contents of the deny_push
529 list are examined before the allow_push list.
529 list are examined before the allow_push list.
530 description;;
530 description;;
531 Textual description of the repository's purpose or contents.
531 Textual description of the repository's purpose or contents.
532 Default is "unknown".
532 Default is "unknown".
533 encoding;;
533 encoding;;
534 Character encoding name.
534 Character encoding name.
535 Example: "UTF-8"
535 Example: "UTF-8"
536 errorlog;;
536 errorlog;;
537 Where to output the error log. Default is stderr.
537 Where to output the error log. Default is stderr.
538 hidden;;
538 hidden;;
539 Whether to hide the repository in the hgwebdir index. Default is false.
539 Whether to hide the repository in the hgwebdir index. Default is false.
540 ipv6;;
540 ipv6;;
541 Whether to use IPv6. Default is false.
541 Whether to use IPv6. Default is false.
542 name;;
542 name;;
543 Repository name to use in the web interface. Default is current
543 Repository name to use in the web interface. Default is current
544 working directory.
544 working directory.
545 maxchanges;;
545 maxchanges;;
546 Maximum number of changes to list on the changelog. Default is 10.
546 Maximum number of changes to list on the changelog. Default is 10.
547 maxfiles;;
547 maxfiles;;
548 Maximum number of files to list per changeset. Default is 10.
548 Maximum number of files to list per changeset. Default is 10.
549 port;;
549 port;;
550 Port to listen on. Default is 8000.
550 Port to listen on. Default is 8000.
551 push_ssl;;
551 push_ssl;;
552 Whether to require that inbound pushes be transported over SSL to
552 Whether to require that inbound pushes be transported over SSL to
553 prevent password sniffing. Default is true.
553 prevent password sniffing. Default is true.
554 staticurl;;
554 staticurl;;
555 Base URL to use for static files. If unset, static files (e.g.
555 Base URL to use for static files. If unset, static files (e.g.
556 the hgicon.png favicon) will be served by the CGI script itself.
556 the hgicon.png favicon) will be served by the CGI script itself.
557 Use this setting to serve them directly with the HTTP server.
557 Use this setting to serve them directly with the HTTP server.
558 Example: "http://hgserver/static/"
558 Example: "http://hgserver/static/"
559 stripes;;
559 stripes;;
560 How many lines a "zebra stripe" should span in multiline output.
560 How many lines a "zebra stripe" should span in multiline output.
561 Default is 1; set to 0 to disable.
561 Default is 1; set to 0 to disable.
562 style;;
562 style;;
563 Which template map style to use.
563 Which template map style to use.
564 templates;;
564 templates;;
565 Where to find the HTML templates. Default is install path.
565 Where to find the HTML templates. Default is install path.
566
566
567
567
568 AUTHOR
568 AUTHOR
569 ------
569 ------
570 Bryan O'Sullivan <bos@serpentine.com>.
570 Bryan O'Sullivan <bos@serpentine.com>.
571
571
572 Mercurial was written by Matt Mackall <mpm@selenic.com>.
572 Mercurial was written by Matt Mackall <mpm@selenic.com>.
573
573
574 SEE ALSO
574 SEE ALSO
575 --------
575 --------
576 hg(1), hgignore(5)
576 hg(1), hgignore(5)
577
577
578 COPYING
578 COPYING
579 -------
579 -------
580 This manual page is copyright 2005 Bryan O'Sullivan.
580 This manual page is copyright 2005 Bryan O'Sullivan.
581 Mercurial is copyright 2005-2007 Matt Mackall.
581 Mercurial is copyright 2005-2007 Matt Mackall.
582 Free use of this software is granted under the terms of the GNU General
582 Free use of this software is granted under the terms of the GNU General
583 Public License (GPL).
583 Public License (GPL).
@@ -1,2258 +1,2292 b''
1 # queue.py - patch queues for mercurial
1 # queue.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.i18n import _
32 from mercurial.i18n import _
33 from mercurial import commands, cmdutil, hg, patch, revlog, util
33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 from mercurial import repair
34 from mercurial import repair
35 import os, sys, re, errno
35 import os, sys, re, errno
36
36
37 commands.norepo += " qclone"
37 commands.norepo += " qclone"
38
38
39 # Patch names looks like unix-file names.
39 # Patch names looks like unix-file names.
40 # They must be joinable with queue directory and result in the patch path.
40 # They must be joinable with queue directory and result in the patch path.
41 normname = util.normpath
41 normname = util.normpath
42
42
43 class statusentry:
43 class statusentry:
44 def __init__(self, rev, name=None):
44 def __init__(self, rev, name=None):
45 if not name:
45 if not name:
46 fields = rev.split(':', 1)
46 fields = rev.split(':', 1)
47 if len(fields) == 2:
47 if len(fields) == 2:
48 self.rev, self.name = fields
48 self.rev, self.name = fields
49 else:
49 else:
50 self.rev, self.name = None, None
50 self.rev, self.name = None, None
51 else:
51 else:
52 self.rev, self.name = rev, name
52 self.rev, self.name = rev, name
53
53
54 def __str__(self):
54 def __str__(self):
55 return self.rev + ':' + self.name
55 return self.rev + ':' + self.name
56
56
57 class queue:
57 class queue:
58 def __init__(self, ui, path, patchdir=None):
58 def __init__(self, ui, path, patchdir=None):
59 self.basepath = path
59 self.basepath = path
60 self.path = patchdir or os.path.join(path, "patches")
60 self.path = patchdir or os.path.join(path, "patches")
61 self.opener = util.opener(self.path)
61 self.opener = util.opener(self.path)
62 self.ui = ui
62 self.ui = ui
63 self.applied = []
63 self.applied = []
64 self.full_series = []
64 self.full_series = []
65 self.applied_dirty = 0
65 self.applied_dirty = 0
66 self.series_dirty = 0
66 self.series_dirty = 0
67 self.series_path = "series"
67 self.series_path = "series"
68 self.status_path = "status"
68 self.status_path = "status"
69 self.guards_path = "guards"
69 self.guards_path = "guards"
70 self.active_guards = None
70 self.active_guards = None
71 self.guards_dirty = False
71 self.guards_dirty = False
72 self._diffopts = None
72 self._diffopts = None
73
73
74 if os.path.exists(self.join(self.series_path)):
74 if os.path.exists(self.join(self.series_path)):
75 self.full_series = self.opener(self.series_path).read().splitlines()
75 self.full_series = self.opener(self.series_path).read().splitlines()
76 self.parse_series()
76 self.parse_series()
77
77
78 if os.path.exists(self.join(self.status_path)):
78 if os.path.exists(self.join(self.status_path)):
79 lines = self.opener(self.status_path).read().splitlines()
79 lines = self.opener(self.status_path).read().splitlines()
80 self.applied = [statusentry(l) for l in lines]
80 self.applied = [statusentry(l) for l in lines]
81
81
82 def diffopts(self):
82 def diffopts(self):
83 if self._diffopts is None:
83 if self._diffopts is None:
84 self._diffopts = patch.diffopts(self.ui)
84 self._diffopts = patch.diffopts(self.ui)
85 return self._diffopts
85 return self._diffopts
86
86
87 def join(self, *p):
87 def join(self, *p):
88 return os.path.join(self.path, *p)
88 return os.path.join(self.path, *p)
89
89
90 def find_series(self, patch):
90 def find_series(self, patch):
91 pre = re.compile("(\s*)([^#]+)")
91 pre = re.compile("(\s*)([^#]+)")
92 index = 0
92 index = 0
93 for l in self.full_series:
93 for l in self.full_series:
94 m = pre.match(l)
94 m = pre.match(l)
95 if m:
95 if m:
96 s = m.group(2)
96 s = m.group(2)
97 s = s.rstrip()
97 s = s.rstrip()
98 if s == patch:
98 if s == patch:
99 return index
99 return index
100 index += 1
100 index += 1
101 return None
101 return None
102
102
103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104
104
105 def parse_series(self):
105 def parse_series(self):
106 self.series = []
106 self.series = []
107 self.series_guards = []
107 self.series_guards = []
108 for l in self.full_series:
108 for l in self.full_series:
109 h = l.find('#')
109 h = l.find('#')
110 if h == -1:
110 if h == -1:
111 patch = l
111 patch = l
112 comment = ''
112 comment = ''
113 elif h == 0:
113 elif h == 0:
114 continue
114 continue
115 else:
115 else:
116 patch = l[:h]
116 patch = l[:h]
117 comment = l[h:]
117 comment = l[h:]
118 patch = patch.strip()
118 patch = patch.strip()
119 if patch:
119 if patch:
120 if patch in self.series:
120 if patch in self.series:
121 raise util.Abort(_('%s appears more than once in %s') %
121 raise util.Abort(_('%s appears more than once in %s') %
122 (patch, self.join(self.series_path)))
122 (patch, self.join(self.series_path)))
123 self.series.append(patch)
123 self.series.append(patch)
124 self.series_guards.append(self.guard_re.findall(comment))
124 self.series_guards.append(self.guard_re.findall(comment))
125
125
126 def check_guard(self, guard):
126 def check_guard(self, guard):
127 bad_chars = '# \t\r\n\f'
127 bad_chars = '# \t\r\n\f'
128 first = guard[0]
128 first = guard[0]
129 for c in '-+':
129 for c in '-+':
130 if first == c:
130 if first == c:
131 return (_('guard %r starts with invalid character: %r') %
131 return (_('guard %r starts with invalid character: %r') %
132 (guard, c))
132 (guard, c))
133 for c in bad_chars:
133 for c in bad_chars:
134 if c in guard:
134 if c in guard:
135 return _('invalid character in guard %r: %r') % (guard, c)
135 return _('invalid character in guard %r: %r') % (guard, c)
136
136
137 def set_active(self, guards):
137 def set_active(self, guards):
138 for guard in guards:
138 for guard in guards:
139 bad = self.check_guard(guard)
139 bad = self.check_guard(guard)
140 if bad:
140 if bad:
141 raise util.Abort(bad)
141 raise util.Abort(bad)
142 guards = dict.fromkeys(guards).keys()
142 guards = dict.fromkeys(guards).keys()
143 guards.sort()
143 guards.sort()
144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 self.active_guards = guards
145 self.active_guards = guards
146 self.guards_dirty = True
146 self.guards_dirty = True
147
147
148 def active(self):
148 def active(self):
149 if self.active_guards is None:
149 if self.active_guards is None:
150 self.active_guards = []
150 self.active_guards = []
151 try:
151 try:
152 guards = self.opener(self.guards_path).read().split()
152 guards = self.opener(self.guards_path).read().split()
153 except IOError, err:
153 except IOError, err:
154 if err.errno != errno.ENOENT: raise
154 if err.errno != errno.ENOENT: raise
155 guards = []
155 guards = []
156 for i, guard in enumerate(guards):
156 for i, guard in enumerate(guards):
157 bad = self.check_guard(guard)
157 bad = self.check_guard(guard)
158 if bad:
158 if bad:
159 self.ui.warn('%s:%d: %s\n' %
159 self.ui.warn('%s:%d: %s\n' %
160 (self.join(self.guards_path), i + 1, bad))
160 (self.join(self.guards_path), i + 1, bad))
161 else:
161 else:
162 self.active_guards.append(guard)
162 self.active_guards.append(guard)
163 return self.active_guards
163 return self.active_guards
164
164
165 def set_guards(self, idx, guards):
165 def set_guards(self, idx, guards):
166 for g in guards:
166 for g in guards:
167 if len(g) < 2:
167 if len(g) < 2:
168 raise util.Abort(_('guard %r too short') % g)
168 raise util.Abort(_('guard %r too short') % g)
169 if g[0] not in '-+':
169 if g[0] not in '-+':
170 raise util.Abort(_('guard %r starts with invalid char') % g)
170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 bad = self.check_guard(g[1:])
171 bad = self.check_guard(g[1:])
172 if bad:
172 if bad:
173 raise util.Abort(bad)
173 raise util.Abort(bad)
174 drop = self.guard_re.sub('', self.full_series[idx])
174 drop = self.guard_re.sub('', self.full_series[idx])
175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 self.parse_series()
176 self.parse_series()
177 self.series_dirty = True
177 self.series_dirty = True
178
178
179 def pushable(self, idx):
179 def pushable(self, idx):
180 if isinstance(idx, str):
180 if isinstance(idx, str):
181 idx = self.series.index(idx)
181 idx = self.series.index(idx)
182 patchguards = self.series_guards[idx]
182 patchguards = self.series_guards[idx]
183 if not patchguards:
183 if not patchguards:
184 return True, None
184 return True, None
185 default = False
185 default = False
186 guards = self.active()
186 guards = self.active()
187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 if exactneg:
188 if exactneg:
189 return False, exactneg[0]
189 return False, exactneg[0]
190 pos = [g for g in patchguards if g[0] == '+']
190 pos = [g for g in patchguards if g[0] == '+']
191 exactpos = [g for g in pos if g[1:] in guards]
191 exactpos = [g for g in pos if g[1:] in guards]
192 if pos:
192 if pos:
193 if exactpos:
193 if exactpos:
194 return True, exactpos[0]
194 return True, exactpos[0]
195 return False, pos
195 return False, pos
196 return True, ''
196 return True, ''
197
197
198 def explain_pushable(self, idx, all_patches=False):
198 def explain_pushable(self, idx, all_patches=False):
199 write = all_patches and self.ui.write or self.ui.warn
199 write = all_patches and self.ui.write or self.ui.warn
200 if all_patches or self.ui.verbose:
200 if all_patches or self.ui.verbose:
201 if isinstance(idx, str):
201 if isinstance(idx, str):
202 idx = self.series.index(idx)
202 idx = self.series.index(idx)
203 pushable, why = self.pushable(idx)
203 pushable, why = self.pushable(idx)
204 if all_patches and pushable:
204 if all_patches and pushable:
205 if why is None:
205 if why is None:
206 write(_('allowing %s - no guards in effect\n') %
206 write(_('allowing %s - no guards in effect\n') %
207 self.series[idx])
207 self.series[idx])
208 else:
208 else:
209 if not why:
209 if not why:
210 write(_('allowing %s - no matching negative guards\n') %
210 write(_('allowing %s - no matching negative guards\n') %
211 self.series[idx])
211 self.series[idx])
212 else:
212 else:
213 write(_('allowing %s - guarded by %r\n') %
213 write(_('allowing %s - guarded by %r\n') %
214 (self.series[idx], why))
214 (self.series[idx], why))
215 if not pushable:
215 if not pushable:
216 if why:
216 if why:
217 write(_('skipping %s - guarded by %r\n') %
217 write(_('skipping %s - guarded by %r\n') %
218 (self.series[idx], why))
218 (self.series[idx], why))
219 else:
219 else:
220 write(_('skipping %s - no matching guards\n') %
220 write(_('skipping %s - no matching guards\n') %
221 self.series[idx])
221 self.series[idx])
222
222
223 def save_dirty(self):
223 def save_dirty(self):
224 def write_list(items, path):
224 def write_list(items, path):
225 fp = self.opener(path, 'w')
225 fp = self.opener(path, 'w')
226 for i in items:
226 for i in items:
227 print >> fp, i
227 print >> fp, i
228 fp.close()
228 fp.close()
229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 if self.series_dirty: write_list(self.full_series, self.series_path)
230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232
232
233 def readheaders(self, patch):
233 def readheaders(self, patch):
234 def eatdiff(lines):
234 def eatdiff(lines):
235 while lines:
235 while lines:
236 l = lines[-1]
236 l = lines[-1]
237 if (l.startswith("diff -") or
237 if (l.startswith("diff -") or
238 l.startswith("Index:") or
238 l.startswith("Index:") or
239 l.startswith("===========")):
239 l.startswith("===========")):
240 del lines[-1]
240 del lines[-1]
241 else:
241 else:
242 break
242 break
243 def eatempty(lines):
243 def eatempty(lines):
244 while lines:
244 while lines:
245 l = lines[-1]
245 l = lines[-1]
246 if re.match('\s*$', l):
246 if re.match('\s*$', l):
247 del lines[-1]
247 del lines[-1]
248 else:
248 else:
249 break
249 break
250
250
251 pf = self.join(patch)
251 pf = self.join(patch)
252 message = []
252 message = []
253 comments = []
253 comments = []
254 user = None
254 user = None
255 date = None
255 date = None
256 format = None
256 format = None
257 subject = None
257 subject = None
258 diffstart = 0
258 diffstart = 0
259
259
260 for line in file(pf):
260 for line in file(pf):
261 line = line.rstrip()
261 line = line.rstrip()
262 if line.startswith('diff --git'):
262 if line.startswith('diff --git'):
263 diffstart = 2
263 diffstart = 2
264 break
264 break
265 if diffstart:
265 if diffstart:
266 if line.startswith('+++ '):
266 if line.startswith('+++ '):
267 diffstart = 2
267 diffstart = 2
268 break
268 break
269 if line.startswith("--- "):
269 if line.startswith("--- "):
270 diffstart = 1
270 diffstart = 1
271 continue
271 continue
272 elif format == "hgpatch":
272 elif format == "hgpatch":
273 # parse values when importing the result of an hg export
273 # parse values when importing the result of an hg export
274 if line.startswith("# User "):
274 if line.startswith("# User "):
275 user = line[7:]
275 user = line[7:]
276 elif line.startswith("# Date "):
276 elif line.startswith("# Date "):
277 date = line[7:]
277 date = line[7:]
278 elif not line.startswith("# ") and line:
278 elif not line.startswith("# ") and line:
279 message.append(line)
279 message.append(line)
280 format = None
280 format = None
281 elif line == '# HG changeset patch':
281 elif line == '# HG changeset patch':
282 format = "hgpatch"
282 format = "hgpatch"
283 elif (format != "tagdone" and (line.startswith("Subject: ") or
283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 line.startswith("subject: "))):
284 line.startswith("subject: "))):
285 subject = line[9:]
285 subject = line[9:]
286 format = "tag"
286 format = "tag"
287 elif (format != "tagdone" and (line.startswith("From: ") or
287 elif (format != "tagdone" and (line.startswith("From: ") or
288 line.startswith("from: "))):
288 line.startswith("from: "))):
289 user = line[6:]
289 user = line[6:]
290 format = "tag"
290 format = "tag"
291 elif format == "tag" and line == "":
291 elif format == "tag" and line == "":
292 # when looking for tags (subject: from: etc) they
292 # when looking for tags (subject: from: etc) they
293 # end once you find a blank line in the source
293 # end once you find a blank line in the source
294 format = "tagdone"
294 format = "tagdone"
295 elif message or line:
295 elif message or line:
296 message.append(line)
296 message.append(line)
297 comments.append(line)
297 comments.append(line)
298
298
299 eatdiff(message)
299 eatdiff(message)
300 eatdiff(comments)
300 eatdiff(comments)
301 eatempty(message)
301 eatempty(message)
302 eatempty(comments)
302 eatempty(comments)
303
303
304 # make sure message isn't empty
304 # make sure message isn't empty
305 if format and format.startswith("tag") and subject:
305 if format and format.startswith("tag") and subject:
306 message.insert(0, "")
306 message.insert(0, "")
307 message.insert(0, subject)
307 message.insert(0, subject)
308 return (message, comments, user, date, diffstart > 1)
308 return (message, comments, user, date, diffstart > 1)
309
309
310 def removeundo(self, repo):
310 def removeundo(self, repo):
311 undo = repo.sjoin('undo')
311 undo = repo.sjoin('undo')
312 if not os.path.exists(undo):
312 if not os.path.exists(undo):
313 return
313 return
314 try:
314 try:
315 os.unlink(undo)
315 os.unlink(undo)
316 except OSError, inst:
316 except OSError, inst:
317 self.ui.warn('error removing undo: %s\n' % str(inst))
317 self.ui.warn('error removing undo: %s\n' % str(inst))
318
318
319 def printdiff(self, repo, node1, node2=None, files=None,
319 def printdiff(self, repo, node1, node2=None, files=None,
320 fp=None, changes=None, opts={}):
320 fp=None, changes=None, opts={}):
321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322
322
323 patch.diff(repo, node1, node2, fns, match=matchfn,
323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 fp=fp, changes=changes, opts=self.diffopts())
324 fp=fp, changes=changes, opts=self.diffopts())
325
325
326 def mergeone(self, repo, mergeq, head, patch, rev):
326 def mergeone(self, repo, mergeq, head, patch, rev):
327 # first try just applying the patch
327 # first try just applying the patch
328 (err, n) = self.apply(repo, [ patch ], update_status=False,
328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 strict=True, merge=rev)
329 strict=True, merge=rev)
330
330
331 if err == 0:
331 if err == 0:
332 return (err, n)
332 return (err, n)
333
333
334 if n is None:
334 if n is None:
335 raise util.Abort(_("apply failed for patch %s") % patch)
335 raise util.Abort(_("apply failed for patch %s") % patch)
336
336
337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338
338
339 # apply failed, strip away that rev and merge.
339 # apply failed, strip away that rev and merge.
340 hg.clean(repo, head)
340 hg.clean(repo, head)
341 self.strip(repo, n, update=False, backup='strip')
341 self.strip(repo, n, update=False, backup='strip')
342
342
343 ctx = repo.changectx(rev)
343 ctx = repo.changectx(rev)
344 ret = hg.merge(repo, rev)
344 ret = hg.merge(repo, rev)
345 if ret:
345 if ret:
346 raise util.Abort(_("update returned %d") % ret)
346 raise util.Abort(_("update returned %d") % ret)
347 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
347 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
348 if n == None:
348 if n == None:
349 raise util.Abort(_("repo commit failed"))
349 raise util.Abort(_("repo commit failed"))
350 try:
350 try:
351 message, comments, user, date, patchfound = mergeq.readheaders(patch)
351 message, comments, user, date, patchfound = mergeq.readheaders(patch)
352 except:
352 except:
353 raise util.Abort(_("unable to read %s") % patch)
353 raise util.Abort(_("unable to read %s") % patch)
354
354
355 patchf = self.opener(patch, "w")
355 patchf = self.opener(patch, "w")
356 if comments:
356 if comments:
357 comments = "\n".join(comments) + '\n\n'
357 comments = "\n".join(comments) + '\n\n'
358 patchf.write(comments)
358 patchf.write(comments)
359 self.printdiff(repo, head, n, fp=patchf)
359 self.printdiff(repo, head, n, fp=patchf)
360 patchf.close()
360 patchf.close()
361 self.removeundo(repo)
361 self.removeundo(repo)
362 return (0, n)
362 return (0, n)
363
363
364 def qparents(self, repo, rev=None):
364 def qparents(self, repo, rev=None):
365 if rev is None:
365 if rev is None:
366 (p1, p2) = repo.dirstate.parents()
366 (p1, p2) = repo.dirstate.parents()
367 if p2 == revlog.nullid:
367 if p2 == revlog.nullid:
368 return p1
368 return p1
369 if len(self.applied) == 0:
369 if len(self.applied) == 0:
370 return None
370 return None
371 return revlog.bin(self.applied[-1].rev)
371 return revlog.bin(self.applied[-1].rev)
372 pp = repo.changelog.parents(rev)
372 pp = repo.changelog.parents(rev)
373 if pp[1] != revlog.nullid:
373 if pp[1] != revlog.nullid:
374 arevs = [ x.rev for x in self.applied ]
374 arevs = [ x.rev for x in self.applied ]
375 p0 = revlog.hex(pp[0])
375 p0 = revlog.hex(pp[0])
376 p1 = revlog.hex(pp[1])
376 p1 = revlog.hex(pp[1])
377 if p0 in arevs:
377 if p0 in arevs:
378 return pp[0]
378 return pp[0]
379 if p1 in arevs:
379 if p1 in arevs:
380 return pp[1]
380 return pp[1]
381 return pp[0]
381 return pp[0]
382
382
383 def mergepatch(self, repo, mergeq, series):
383 def mergepatch(self, repo, mergeq, series):
384 if len(self.applied) == 0:
384 if len(self.applied) == 0:
385 # each of the patches merged in will have two parents. This
385 # each of the patches merged in will have two parents. This
386 # can confuse the qrefresh, qdiff, and strip code because it
386 # can confuse the qrefresh, qdiff, and strip code because it
387 # needs to know which parent is actually in the patch queue.
387 # needs to know which parent is actually in the patch queue.
388 # so, we insert a merge marker with only one parent. This way
388 # so, we insert a merge marker with only one parent. This way
389 # the first patch in the queue is never a merge patch
389 # the first patch in the queue is never a merge patch
390 #
390 #
391 pname = ".hg.patches.merge.marker"
391 pname = ".hg.patches.merge.marker"
392 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
392 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
393 self.removeundo(repo)
393 self.removeundo(repo)
394 self.applied.append(statusentry(revlog.hex(n), pname))
394 self.applied.append(statusentry(revlog.hex(n), pname))
395 self.applied_dirty = 1
395 self.applied_dirty = 1
396
396
397 head = self.qparents(repo)
397 head = self.qparents(repo)
398
398
399 for patch in series:
399 for patch in series:
400 patch = mergeq.lookup(patch, strict=True)
400 patch = mergeq.lookup(patch, strict=True)
401 if not patch:
401 if not patch:
402 self.ui.warn("patch %s does not exist\n" % patch)
402 self.ui.warn("patch %s does not exist\n" % patch)
403 return (1, None)
403 return (1, None)
404 pushable, reason = self.pushable(patch)
404 pushable, reason = self.pushable(patch)
405 if not pushable:
405 if not pushable:
406 self.explain_pushable(patch, all_patches=True)
406 self.explain_pushable(patch, all_patches=True)
407 continue
407 continue
408 info = mergeq.isapplied(patch)
408 info = mergeq.isapplied(patch)
409 if not info:
409 if not info:
410 self.ui.warn("patch %s is not applied\n" % patch)
410 self.ui.warn("patch %s is not applied\n" % patch)
411 return (1, None)
411 return (1, None)
412 rev = revlog.bin(info[1])
412 rev = revlog.bin(info[1])
413 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
413 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
414 if head:
414 if head:
415 self.applied.append(statusentry(revlog.hex(head), patch))
415 self.applied.append(statusentry(revlog.hex(head), patch))
416 self.applied_dirty = 1
416 self.applied_dirty = 1
417 if err:
417 if err:
418 return (err, head)
418 return (err, head)
419 self.save_dirty()
419 self.save_dirty()
420 return (0, head)
420 return (0, head)
421
421
422 def patch(self, repo, patchfile):
422 def patch(self, repo, patchfile):
423 '''Apply patchfile to the working directory.
423 '''Apply patchfile to the working directory.
424 patchfile: file name of patch'''
424 patchfile: file name of patch'''
425 files = {}
425 files = {}
426 try:
426 try:
427 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
427 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
428 files=files)
428 files=files)
429 except Exception, inst:
429 except Exception, inst:
430 self.ui.note(str(inst) + '\n')
430 self.ui.note(str(inst) + '\n')
431 if not self.ui.verbose:
431 if not self.ui.verbose:
432 self.ui.warn("patch failed, unable to continue (try -v)\n")
432 self.ui.warn("patch failed, unable to continue (try -v)\n")
433 return (False, files, False)
433 return (False, files, False)
434
434
435 return (True, files, fuzz)
435 return (True, files, fuzz)
436
436
437 def apply(self, repo, series, list=False, update_status=True,
437 def apply(self, repo, series, list=False, update_status=True,
438 strict=False, patchdir=None, merge=None, all_files={}):
438 strict=False, patchdir=None, merge=None, all_files={}):
439 wlock = lock = tr = None
439 wlock = lock = tr = None
440 try:
440 try:
441 wlock = repo.wlock()
441 wlock = repo.wlock()
442 lock = repo.lock()
442 lock = repo.lock()
443 tr = repo.transaction()
443 tr = repo.transaction()
444 try:
444 try:
445 ret = self._apply(repo, series, list, update_status,
445 ret = self._apply(repo, series, list, update_status,
446 strict, patchdir, merge, all_files=all_files)
446 strict, patchdir, merge, all_files=all_files)
447 tr.close()
447 tr.close()
448 self.save_dirty()
448 self.save_dirty()
449 return ret
449 return ret
450 except:
450 except:
451 try:
451 try:
452 tr.abort()
452 tr.abort()
453 finally:
453 finally:
454 repo.invalidate()
454 repo.invalidate()
455 repo.dirstate.invalidate()
455 repo.dirstate.invalidate()
456 raise
456 raise
457 finally:
457 finally:
458 del tr, lock, wlock
458 del tr, lock, wlock
459 self.removeundo(repo)
459 self.removeundo(repo)
460
460
461 def _apply(self, repo, series, list=False, update_status=True,
461 def _apply(self, repo, series, list=False, update_status=True,
462 strict=False, patchdir=None, merge=None, all_files={}):
462 strict=False, patchdir=None, merge=None, all_files={}):
463 # TODO unify with commands.py
463 # TODO unify with commands.py
464 if not patchdir:
464 if not patchdir:
465 patchdir = self.path
465 patchdir = self.path
466 err = 0
466 err = 0
467 n = None
467 n = None
468 for patchname in series:
468 for patchname in series:
469 pushable, reason = self.pushable(patchname)
469 pushable, reason = self.pushable(patchname)
470 if not pushable:
470 if not pushable:
471 self.explain_pushable(patchname, all_patches=True)
471 self.explain_pushable(patchname, all_patches=True)
472 continue
472 continue
473 self.ui.warn("applying %s\n" % patchname)
473 self.ui.warn("applying %s\n" % patchname)
474 pf = os.path.join(patchdir, patchname)
474 pf = os.path.join(patchdir, patchname)
475
475
476 try:
476 try:
477 message, comments, user, date, patchfound = self.readheaders(patchname)
477 message, comments, user, date, patchfound = self.readheaders(patchname)
478 except:
478 except:
479 self.ui.warn("Unable to read %s\n" % patchname)
479 self.ui.warn("Unable to read %s\n" % patchname)
480 err = 1
480 err = 1
481 break
481 break
482
482
483 if not message:
483 if not message:
484 message = "imported patch %s\n" % patchname
484 message = "imported patch %s\n" % patchname
485 else:
485 else:
486 if list:
486 if list:
487 message.append("\nimported patch %s" % patchname)
487 message.append("\nimported patch %s" % patchname)
488 message = '\n'.join(message)
488 message = '\n'.join(message)
489
489
490 (patcherr, files, fuzz) = self.patch(repo, pf)
490 (patcherr, files, fuzz) = self.patch(repo, pf)
491 all_files.update(files)
491 all_files.update(files)
492 patcherr = not patcherr
492 patcherr = not patcherr
493
493
494 if merge and files:
494 if merge and files:
495 # Mark as removed/merged and update dirstate parent info
495 # Mark as removed/merged and update dirstate parent info
496 removed = []
496 removed = []
497 merged = []
497 merged = []
498 for f in files:
498 for f in files:
499 if os.path.exists(repo.wjoin(f)):
499 if os.path.exists(repo.wjoin(f)):
500 merged.append(f)
500 merged.append(f)
501 else:
501 else:
502 removed.append(f)
502 removed.append(f)
503 for f in removed:
503 for f in removed:
504 repo.dirstate.remove(f)
504 repo.dirstate.remove(f)
505 for f in merged:
505 for f in merged:
506 repo.dirstate.merge(f)
506 repo.dirstate.merge(f)
507 p1, p2 = repo.dirstate.parents()
507 p1, p2 = repo.dirstate.parents()
508 repo.dirstate.setparents(p1, merge)
508 repo.dirstate.setparents(p1, merge)
509 files = patch.updatedir(self.ui, repo, files)
509 files = patch.updatedir(self.ui, repo, files)
510 n = repo.commit(files, message, user, date, force=1)
510 n = repo.commit(files, message, user, date, force=1)
511
511
512 if n == None:
512 if n == None:
513 raise util.Abort(_("repo commit failed"))
513 raise util.Abort(_("repo commit failed"))
514
514
515 if update_status:
515 if update_status:
516 self.applied.append(statusentry(revlog.hex(n), patchname))
516 self.applied.append(statusentry(revlog.hex(n), patchname))
517
517
518 if patcherr:
518 if patcherr:
519 if not patchfound:
519 if not patchfound:
520 self.ui.warn("patch %s is empty\n" % patchname)
520 self.ui.warn("patch %s is empty\n" % patchname)
521 err = 0
521 err = 0
522 else:
522 else:
523 self.ui.warn("patch failed, rejects left in working dir\n")
523 self.ui.warn("patch failed, rejects left in working dir\n")
524 err = 1
524 err = 1
525 break
525 break
526
526
527 if fuzz and strict:
527 if fuzz and strict:
528 self.ui.warn("fuzz found when applying patch, stopping\n")
528 self.ui.warn("fuzz found when applying patch, stopping\n")
529 err = 1
529 err = 1
530 break
530 break
531 return (err, n)
531 return (err, n)
532
532
533 def delete(self, repo, patches, opts):
533 def delete(self, repo, patches, opts):
534 if not patches and not opts.get('rev'):
534 if not patches and not opts.get('rev'):
535 raise util.Abort(_('qdelete requires at least one revision or '
535 raise util.Abort(_('qdelete requires at least one revision or '
536 'patch name'))
536 'patch name'))
537
537
538 realpatches = []
538 realpatches = []
539 for patch in patches:
539 for patch in patches:
540 patch = self.lookup(patch, strict=True)
540 patch = self.lookup(patch, strict=True)
541 info = self.isapplied(patch)
541 info = self.isapplied(patch)
542 if info:
542 if info:
543 raise util.Abort(_("cannot delete applied patch %s") % patch)
543 raise util.Abort(_("cannot delete applied patch %s") % patch)
544 if patch not in self.series:
544 if patch not in self.series:
545 raise util.Abort(_("patch %s not in series file") % patch)
545 raise util.Abort(_("patch %s not in series file") % patch)
546 realpatches.append(patch)
546 realpatches.append(patch)
547
547
548 appliedbase = 0
548 appliedbase = 0
549 if opts.get('rev'):
549 if opts.get('rev'):
550 if not self.applied:
550 if not self.applied:
551 raise util.Abort(_('no patches applied'))
551 raise util.Abort(_('no patches applied'))
552 revs = cmdutil.revrange(repo, opts['rev'])
552 revs = cmdutil.revrange(repo, opts['rev'])
553 if len(revs) > 1 and revs[0] > revs[1]:
553 if len(revs) > 1 and revs[0] > revs[1]:
554 revs.reverse()
554 revs.reverse()
555 for rev in revs:
555 for rev in revs:
556 if appliedbase >= len(self.applied):
556 if appliedbase >= len(self.applied):
557 raise util.Abort(_("revision %d is not managed") % rev)
557 raise util.Abort(_("revision %d is not managed") % rev)
558
558
559 base = revlog.bin(self.applied[appliedbase].rev)
559 base = revlog.bin(self.applied[appliedbase].rev)
560 node = repo.changelog.node(rev)
560 node = repo.changelog.node(rev)
561 if node != base:
561 if node != base:
562 raise util.Abort(_("cannot delete revision %d above "
562 raise util.Abort(_("cannot delete revision %d above "
563 "applied patches") % rev)
563 "applied patches") % rev)
564 realpatches.append(self.applied[appliedbase].name)
564 realpatches.append(self.applied[appliedbase].name)
565 appliedbase += 1
565 appliedbase += 1
566
566
567 if not opts.get('keep'):
567 if not opts.get('keep'):
568 r = self.qrepo()
568 r = self.qrepo()
569 if r:
569 if r:
570 r.remove(realpatches, True)
570 r.remove(realpatches, True)
571 else:
571 else:
572 for p in realpatches:
572 for p in realpatches:
573 os.unlink(self.join(p))
573 os.unlink(self.join(p))
574
574
575 if appliedbase:
575 if appliedbase:
576 del self.applied[:appliedbase]
576 del self.applied[:appliedbase]
577 self.applied_dirty = 1
577 self.applied_dirty = 1
578 indices = [self.find_series(p) for p in realpatches]
578 indices = [self.find_series(p) for p in realpatches]
579 indices.sort()
579 indices.sort()
580 for i in indices[-1::-1]:
580 for i in indices[-1::-1]:
581 del self.full_series[i]
581 del self.full_series[i]
582 self.parse_series()
582 self.parse_series()
583 self.series_dirty = 1
583 self.series_dirty = 1
584
584
585 def check_toppatch(self, repo):
585 def check_toppatch(self, repo):
586 if len(self.applied) > 0:
586 if len(self.applied) > 0:
587 top = revlog.bin(self.applied[-1].rev)
587 top = revlog.bin(self.applied[-1].rev)
588 pp = repo.dirstate.parents()
588 pp = repo.dirstate.parents()
589 if top not in pp:
589 if top not in pp:
590 raise util.Abort(_("working directory revision is not qtip"))
590 raise util.Abort(_("working directory revision is not qtip"))
591 return top
591 return top
592 return None
592 return None
593 def check_localchanges(self, repo, force=False, refresh=True):
593 def check_localchanges(self, repo, force=False, refresh=True):
594 m, a, r, d = repo.status()[:4]
594 m, a, r, d = repo.status()[:4]
595 if m or a or r or d:
595 if m or a or r or d:
596 if not force:
596 if not force:
597 if refresh:
597 if refresh:
598 raise util.Abort(_("local changes found, refresh first"))
598 raise util.Abort(_("local changes found, refresh first"))
599 else:
599 else:
600 raise util.Abort(_("local changes found"))
600 raise util.Abort(_("local changes found"))
601 return m, a, r, d
601 return m, a, r, d
602
602
603 def new(self, repo, patch, *pats, **opts):
603 def new(self, repo, patch, *pats, **opts):
604 msg = opts.get('msg')
604 msg = opts.get('msg')
605 force = opts.get('force')
605 force = opts.get('force')
606 user = opts.get('user')
606 if os.path.exists(self.join(patch)):
607 if os.path.exists(self.join(patch)):
607 raise util.Abort(_('patch "%s" already exists') % patch)
608 raise util.Abort(_('patch "%s" already exists') % patch)
608 if opts.get('include') or opts.get('exclude') or pats:
609 if opts.get('include') or opts.get('exclude') or pats:
609 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
610 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
610 m, a, r, d = repo.status(files=fns, match=match)[:4]
611 m, a, r, d = repo.status(files=fns, match=match)[:4]
611 else:
612 else:
612 m, a, r, d = self.check_localchanges(repo, force)
613 m, a, r, d = self.check_localchanges(repo, force)
613 fns, match, anypats = cmdutil.matchpats(repo, m + a + r)
614 fns, match, anypats = cmdutil.matchpats(repo, m + a + r)
614 commitfiles = m + a + r
615 commitfiles = m + a + r
615 self.check_toppatch(repo)
616 self.check_toppatch(repo)
616 wlock = repo.wlock()
617 wlock = repo.wlock()
617 try:
618 try:
618 insert = self.full_series_end()
619 insert = self.full_series_end()
619 commitmsg = msg and msg or ("[mq]: %s" % patch)
620 commitmsg = msg and msg or ("[mq]: %s" % patch)
620 n = repo.commit(commitfiles, commitmsg, match=match, force=True)
621 n = repo.commit(commitfiles, commitmsg, user, match=match, force=True)
621 if n == None:
622 if n == None:
622 raise util.Abort(_("repo commit failed"))
623 raise util.Abort(_("repo commit failed"))
623 self.full_series[insert:insert] = [patch]
624 self.full_series[insert:insert] = [patch]
624 self.applied.append(statusentry(revlog.hex(n), patch))
625 self.applied.append(statusentry(revlog.hex(n), patch))
625 self.parse_series()
626 self.parse_series()
626 self.series_dirty = 1
627 self.series_dirty = 1
627 self.applied_dirty = 1
628 self.applied_dirty = 1
628 p = self.opener(patch, "w")
629 p = self.opener(patch, "w")
630 if user:
631 p.write("From: " + user + "\n\n")
629 if msg:
632 if msg:
630 msg = msg + "\n"
633 msg = msg + "\n"
631 p.write(msg)
634 p.write(msg)
632 p.close()
635 p.close()
633 wlock = None
636 wlock = None
634 r = self.qrepo()
637 r = self.qrepo()
635 if r: r.add([patch])
638 if r: r.add([patch])
636 if commitfiles:
639 if commitfiles:
637 self.refresh(repo, short=True, git=opts.get('git'))
640 self.refresh(repo, short=True, git=opts.get('git'))
638 self.removeundo(repo)
641 self.removeundo(repo)
639 finally:
642 finally:
640 del wlock
643 del wlock
641
644
642 def strip(self, repo, rev, update=True, backup="all"):
645 def strip(self, repo, rev, update=True, backup="all"):
643 wlock = lock = None
646 wlock = lock = None
644 try:
647 try:
645 wlock = repo.wlock()
648 wlock = repo.wlock()
646 lock = repo.lock()
649 lock = repo.lock()
647
650
648 if update:
651 if update:
649 self.check_localchanges(repo, refresh=False)
652 self.check_localchanges(repo, refresh=False)
650 urev = self.qparents(repo, rev)
653 urev = self.qparents(repo, rev)
651 hg.clean(repo, urev)
654 hg.clean(repo, urev)
652 repo.dirstate.write()
655 repo.dirstate.write()
653
656
654 self.removeundo(repo)
657 self.removeundo(repo)
655 repair.strip(self.ui, repo, rev, backup)
658 repair.strip(self.ui, repo, rev, backup)
656 # strip may have unbundled a set of backed up revisions after
659 # strip may have unbundled a set of backed up revisions after
657 # the actual strip
660 # the actual strip
658 self.removeundo(repo)
661 self.removeundo(repo)
659 finally:
662 finally:
660 del lock, wlock
663 del lock, wlock
661
664
662 def isapplied(self, patch):
665 def isapplied(self, patch):
663 """returns (index, rev, patch)"""
666 """returns (index, rev, patch)"""
664 for i in xrange(len(self.applied)):
667 for i in xrange(len(self.applied)):
665 a = self.applied[i]
668 a = self.applied[i]
666 if a.name == patch:
669 if a.name == patch:
667 return (i, a.rev, a.name)
670 return (i, a.rev, a.name)
668 return None
671 return None
669
672
670 # if the exact patch name does not exist, we try a few
673 # if the exact patch name does not exist, we try a few
671 # variations. If strict is passed, we try only #1
674 # variations. If strict is passed, we try only #1
672 #
675 #
673 # 1) a number to indicate an offset in the series file
676 # 1) a number to indicate an offset in the series file
674 # 2) a unique substring of the patch name was given
677 # 2) a unique substring of the patch name was given
675 # 3) patchname[-+]num to indicate an offset in the series file
678 # 3) patchname[-+]num to indicate an offset in the series file
676 def lookup(self, patch, strict=False):
679 def lookup(self, patch, strict=False):
677 patch = patch and str(patch)
680 patch = patch and str(patch)
678
681
679 def partial_name(s):
682 def partial_name(s):
680 if s in self.series:
683 if s in self.series:
681 return s
684 return s
682 matches = [x for x in self.series if s in x]
685 matches = [x for x in self.series if s in x]
683 if len(matches) > 1:
686 if len(matches) > 1:
684 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
687 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
685 for m in matches:
688 for m in matches:
686 self.ui.warn(' %s\n' % m)
689 self.ui.warn(' %s\n' % m)
687 return None
690 return None
688 if matches:
691 if matches:
689 return matches[0]
692 return matches[0]
690 if len(self.series) > 0 and len(self.applied) > 0:
693 if len(self.series) > 0 and len(self.applied) > 0:
691 if s == 'qtip':
694 if s == 'qtip':
692 return self.series[self.series_end(True)-1]
695 return self.series[self.series_end(True)-1]
693 if s == 'qbase':
696 if s == 'qbase':
694 return self.series[0]
697 return self.series[0]
695 return None
698 return None
696 if patch == None:
699 if patch == None:
697 return None
700 return None
698
701
699 # we don't want to return a partial match until we make
702 # we don't want to return a partial match until we make
700 # sure the file name passed in does not exist (checked below)
703 # sure the file name passed in does not exist (checked below)
701 res = partial_name(patch)
704 res = partial_name(patch)
702 if res and res == patch:
705 if res and res == patch:
703 return res
706 return res
704
707
705 if not os.path.isfile(self.join(patch)):
708 if not os.path.isfile(self.join(patch)):
706 try:
709 try:
707 sno = int(patch)
710 sno = int(patch)
708 except(ValueError, OverflowError):
711 except(ValueError, OverflowError):
709 pass
712 pass
710 else:
713 else:
711 if sno < len(self.series):
714 if sno < len(self.series):
712 return self.series[sno]
715 return self.series[sno]
713 if not strict:
716 if not strict:
714 # return any partial match made above
717 # return any partial match made above
715 if res:
718 if res:
716 return res
719 return res
717 minus = patch.rfind('-')
720 minus = patch.rfind('-')
718 if minus >= 0:
721 if minus >= 0:
719 res = partial_name(patch[:minus])
722 res = partial_name(patch[:minus])
720 if res:
723 if res:
721 i = self.series.index(res)
724 i = self.series.index(res)
722 try:
725 try:
723 off = int(patch[minus+1:] or 1)
726 off = int(patch[minus+1:] or 1)
724 except(ValueError, OverflowError):
727 except(ValueError, OverflowError):
725 pass
728 pass
726 else:
729 else:
727 if i - off >= 0:
730 if i - off >= 0:
728 return self.series[i - off]
731 return self.series[i - off]
729 plus = patch.rfind('+')
732 plus = patch.rfind('+')
730 if plus >= 0:
733 if plus >= 0:
731 res = partial_name(patch[:plus])
734 res = partial_name(patch[:plus])
732 if res:
735 if res:
733 i = self.series.index(res)
736 i = self.series.index(res)
734 try:
737 try:
735 off = int(patch[plus+1:] or 1)
738 off = int(patch[plus+1:] or 1)
736 except(ValueError, OverflowError):
739 except(ValueError, OverflowError):
737 pass
740 pass
738 else:
741 else:
739 if i + off < len(self.series):
742 if i + off < len(self.series):
740 return self.series[i + off]
743 return self.series[i + off]
741 raise util.Abort(_("patch %s not in series") % patch)
744 raise util.Abort(_("patch %s not in series") % patch)
742
745
743 def push(self, repo, patch=None, force=False, list=False,
746 def push(self, repo, patch=None, force=False, list=False,
744 mergeq=None):
747 mergeq=None):
745 wlock = repo.wlock()
748 wlock = repo.wlock()
746 try:
749 try:
747 patch = self.lookup(patch)
750 patch = self.lookup(patch)
748 # Suppose our series file is: A B C and the current 'top'
751 # Suppose our series file is: A B C and the current 'top'
749 # patch is B. qpush C should be performed (moving forward)
752 # patch is B. qpush C should be performed (moving forward)
750 # qpush B is a NOP (no change) qpush A is an error (can't
753 # qpush B is a NOP (no change) qpush A is an error (can't
751 # go backwards with qpush)
754 # go backwards with qpush)
752 if patch:
755 if patch:
753 info = self.isapplied(patch)
756 info = self.isapplied(patch)
754 if info:
757 if info:
755 if info[0] < len(self.applied) - 1:
758 if info[0] < len(self.applied) - 1:
756 raise util.Abort(
759 raise util.Abort(
757 _("cannot push to a previous patch: %s") % patch)
760 _("cannot push to a previous patch: %s") % patch)
758 if info[0] < len(self.series) - 1:
761 if info[0] < len(self.series) - 1:
759 self.ui.warn(
762 self.ui.warn(
760 _('qpush: %s is already at the top\n') % patch)
763 _('qpush: %s is already at the top\n') % patch)
761 else:
764 else:
762 self.ui.warn(_('all patches are currently applied\n'))
765 self.ui.warn(_('all patches are currently applied\n'))
763 return
766 return
764
767
765 # Following the above example, starting at 'top' of B:
768 # Following the above example, starting at 'top' of B:
766 # qpush should be performed (pushes C), but a subsequent
769 # qpush should be performed (pushes C), but a subsequent
767 # qpush without an argument is an error (nothing to
770 # qpush without an argument is an error (nothing to
768 # apply). This allows a loop of "...while hg qpush..." to
771 # apply). This allows a loop of "...while hg qpush..." to
769 # work as it detects an error when done
772 # work as it detects an error when done
770 if self.series_end() == len(self.series):
773 if self.series_end() == len(self.series):
771 self.ui.warn(_('patch series already fully applied\n'))
774 self.ui.warn(_('patch series already fully applied\n'))
772 return 1
775 return 1
773 if not force:
776 if not force:
774 self.check_localchanges(repo)
777 self.check_localchanges(repo)
775
778
776 self.applied_dirty = 1;
779 self.applied_dirty = 1;
777 start = self.series_end()
780 start = self.series_end()
778 if start > 0:
781 if start > 0:
779 self.check_toppatch(repo)
782 self.check_toppatch(repo)
780 if not patch:
783 if not patch:
781 patch = self.series[start]
784 patch = self.series[start]
782 end = start + 1
785 end = start + 1
783 else:
786 else:
784 end = self.series.index(patch, start) + 1
787 end = self.series.index(patch, start) + 1
785 s = self.series[start:end]
788 s = self.series[start:end]
786 all_files = {}
789 all_files = {}
787 try:
790 try:
788 if mergeq:
791 if mergeq:
789 ret = self.mergepatch(repo, mergeq, s)
792 ret = self.mergepatch(repo, mergeq, s)
790 else:
793 else:
791 ret = self.apply(repo, s, list, all_files=all_files)
794 ret = self.apply(repo, s, list, all_files=all_files)
792 except:
795 except:
793 self.ui.warn(_('cleaning up working directory...'))
796 self.ui.warn(_('cleaning up working directory...'))
794 node = repo.dirstate.parents()[0]
797 node = repo.dirstate.parents()[0]
795 hg.revert(repo, node, None)
798 hg.revert(repo, node, None)
796 unknown = repo.status()[4]
799 unknown = repo.status()[4]
797 # only remove unknown files that we know we touched or
800 # only remove unknown files that we know we touched or
798 # created while patching
801 # created while patching
799 for f in unknown:
802 for f in unknown:
800 if f in all_files:
803 if f in all_files:
801 util.unlink(repo.wjoin(f))
804 util.unlink(repo.wjoin(f))
802 self.ui.warn(_('done\n'))
805 self.ui.warn(_('done\n'))
803 raise
806 raise
804 top = self.applied[-1].name
807 top = self.applied[-1].name
805 if ret[0]:
808 if ret[0]:
806 self.ui.write(
809 self.ui.write(
807 "Errors during apply, please fix and refresh %s\n" % top)
810 "Errors during apply, please fix and refresh %s\n" % top)
808 else:
811 else:
809 self.ui.write("Now at: %s\n" % top)
812 self.ui.write("Now at: %s\n" % top)
810 return ret[0]
813 return ret[0]
811 finally:
814 finally:
812 del wlock
815 del wlock
813
816
814 def pop(self, repo, patch=None, force=False, update=True, all=False):
817 def pop(self, repo, patch=None, force=False, update=True, all=False):
815 def getfile(f, rev, flags):
818 def getfile(f, rev, flags):
816 t = repo.file(f).read(rev)
819 t = repo.file(f).read(rev)
817 repo.wwrite(f, t, flags)
820 repo.wwrite(f, t, flags)
818
821
819 wlock = repo.wlock()
822 wlock = repo.wlock()
820 try:
823 try:
821 if patch:
824 if patch:
822 # index, rev, patch
825 # index, rev, patch
823 info = self.isapplied(patch)
826 info = self.isapplied(patch)
824 if not info:
827 if not info:
825 patch = self.lookup(patch)
828 patch = self.lookup(patch)
826 info = self.isapplied(patch)
829 info = self.isapplied(patch)
827 if not info:
830 if not info:
828 raise util.Abort(_("patch %s is not applied") % patch)
831 raise util.Abort(_("patch %s is not applied") % patch)
829
832
830 if len(self.applied) == 0:
833 if len(self.applied) == 0:
831 # Allow qpop -a to work repeatedly,
834 # Allow qpop -a to work repeatedly,
832 # but not qpop without an argument
835 # but not qpop without an argument
833 self.ui.warn(_("no patches applied\n"))
836 self.ui.warn(_("no patches applied\n"))
834 return not all
837 return not all
835
838
836 if not update:
839 if not update:
837 parents = repo.dirstate.parents()
840 parents = repo.dirstate.parents()
838 rr = [ revlog.bin(x.rev) for x in self.applied ]
841 rr = [ revlog.bin(x.rev) for x in self.applied ]
839 for p in parents:
842 for p in parents:
840 if p in rr:
843 if p in rr:
841 self.ui.warn("qpop: forcing dirstate update\n")
844 self.ui.warn("qpop: forcing dirstate update\n")
842 update = True
845 update = True
843
846
844 if not force and update:
847 if not force and update:
845 self.check_localchanges(repo)
848 self.check_localchanges(repo)
846
849
847 self.applied_dirty = 1;
850 self.applied_dirty = 1;
848 end = len(self.applied)
851 end = len(self.applied)
849 if not patch:
852 if not patch:
850 if all:
853 if all:
851 popi = 0
854 popi = 0
852 else:
855 else:
853 popi = len(self.applied) - 1
856 popi = len(self.applied) - 1
854 else:
857 else:
855 popi = info[0] + 1
858 popi = info[0] + 1
856 if popi >= end:
859 if popi >= end:
857 self.ui.warn("qpop: %s is already at the top\n" % patch)
860 self.ui.warn("qpop: %s is already at the top\n" % patch)
858 return
861 return
859 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
862 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
860
863
861 start = info[0]
864 start = info[0]
862 rev = revlog.bin(info[1])
865 rev = revlog.bin(info[1])
863
866
864 # we know there are no local changes, so we can make a simplified
867 # we know there are no local changes, so we can make a simplified
865 # form of hg.update.
868 # form of hg.update.
866 if update:
869 if update:
867 top = self.check_toppatch(repo)
870 top = self.check_toppatch(repo)
868 qp = self.qparents(repo, rev)
871 qp = self.qparents(repo, rev)
869 changes = repo.changelog.read(qp)
872 changes = repo.changelog.read(qp)
870 mmap = repo.manifest.read(changes[0])
873 mmap = repo.manifest.read(changes[0])
871 m, a, r, d, u = repo.status(qp, top)[:5]
874 m, a, r, d, u = repo.status(qp, top)[:5]
872 if d:
875 if d:
873 raise util.Abort("deletions found between repo revs")
876 raise util.Abort("deletions found between repo revs")
874 for f in m:
877 for f in m:
875 getfile(f, mmap[f], mmap.flags(f))
878 getfile(f, mmap[f], mmap.flags(f))
876 for f in r:
879 for f in r:
877 getfile(f, mmap[f], mmap.flags(f))
880 getfile(f, mmap[f], mmap.flags(f))
878 for f in m + r:
881 for f in m + r:
879 repo.dirstate.normal(f)
882 repo.dirstate.normal(f)
880 for f in a:
883 for f in a:
881 try:
884 try:
882 os.unlink(repo.wjoin(f))
885 os.unlink(repo.wjoin(f))
883 except OSError, e:
886 except OSError, e:
884 if e.errno != errno.ENOENT:
887 if e.errno != errno.ENOENT:
885 raise
888 raise
886 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
889 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
887 except: pass
890 except: pass
888 repo.dirstate.forget(f)
891 repo.dirstate.forget(f)
889 repo.dirstate.setparents(qp, revlog.nullid)
892 repo.dirstate.setparents(qp, revlog.nullid)
890 self.strip(repo, rev, update=False, backup='strip')
893 self.strip(repo, rev, update=False, backup='strip')
891 del self.applied[start:end]
894 del self.applied[start:end]
892 if len(self.applied):
895 if len(self.applied):
893 self.ui.write("Now at: %s\n" % self.applied[-1].name)
896 self.ui.write("Now at: %s\n" % self.applied[-1].name)
894 else:
897 else:
895 self.ui.write("Patch queue now empty\n")
898 self.ui.write("Patch queue now empty\n")
896 finally:
899 finally:
897 del wlock
900 del wlock
898
901
899 def diff(self, repo, pats, opts):
902 def diff(self, repo, pats, opts):
900 top = self.check_toppatch(repo)
903 top = self.check_toppatch(repo)
901 if not top:
904 if not top:
902 self.ui.write("No patches applied\n")
905 self.ui.write("No patches applied\n")
903 return
906 return
904 qp = self.qparents(repo, top)
907 qp = self.qparents(repo, top)
905 if opts.get('git'):
908 if opts.get('git'):
906 self.diffopts().git = True
909 self.diffopts().git = True
907 self.printdiff(repo, qp, files=pats, opts=opts)
910 self.printdiff(repo, qp, files=pats, opts=opts)
908
911
909 def refresh(self, repo, pats=None, **opts):
912 def refresh(self, repo, pats=None, **opts):
910 if len(self.applied) == 0:
913 if len(self.applied) == 0:
911 self.ui.write("No patches applied\n")
914 self.ui.write("No patches applied\n")
912 return 1
915 return 1
913 wlock = repo.wlock()
916 wlock = repo.wlock()
914 try:
917 try:
915 self.check_toppatch(repo)
918 self.check_toppatch(repo)
916 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
919 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
917 top = revlog.bin(top)
920 top = revlog.bin(top)
918 cparents = repo.changelog.parents(top)
921 cparents = repo.changelog.parents(top)
919 patchparent = self.qparents(repo, top)
922 patchparent = self.qparents(repo, top)
920 message, comments, user, date, patchfound = self.readheaders(patchfn)
923 message, comments, user, date, patchfound = self.readheaders(patchfn)
921
924
922 patchf = self.opener(patchfn, 'r+')
925 patchf = self.opener(patchfn, 'r+')
923
926
924 # if the patch was a git patch, refresh it as a git patch
927 # if the patch was a git patch, refresh it as a git patch
925 for line in patchf:
928 for line in patchf:
926 if line.startswith('diff --git'):
929 if line.startswith('diff --git'):
927 self.diffopts().git = True
930 self.diffopts().git = True
928 break
931 break
929
932
930 msg = opts.get('msg', '').rstrip()
933 msg = opts.get('msg', '').rstrip()
931 if msg and comments:
934 if msg and comments:
932 # Remove existing message, keeping the rest of the comments
935 # Remove existing message, keeping the rest of the comments
933 # fields.
936 # fields.
934 # If comments contains 'subject: ', message will prepend
937 # If comments contains 'subject: ', message will prepend
935 # the field and a blank line.
938 # the field and a blank line.
936 if message:
939 if message:
937 subj = 'subject: ' + message[0].lower()
940 subj = 'subject: ' + message[0].lower()
938 for i in xrange(len(comments)):
941 for i in xrange(len(comments)):
939 if subj == comments[i].lower():
942 if subj == comments[i].lower():
940 del comments[i]
943 del comments[i]
941 message = message[2:]
944 message = message[2:]
942 break
945 break
943 ci = 0
946 ci = 0
944 for mi in xrange(len(message)):
947 for mi in xrange(len(message)):
945 while message[mi] != comments[ci]:
948 while message[mi] != comments[ci]:
946 ci += 1
949 ci += 1
947 del comments[ci]
950 del comments[ci]
951
952 newuser = opts.get('user')
953 if newuser:
954 # Update all references to a user in the patch header.
955 # If none found, add "From: " header.
956 needfrom = True
957 for prefix in ['# User ', 'From: ']:
958 for i in xrange(len(comments)):
959 if comments[i].startswith(prefix):
960 comments[i] = prefix + newuser
961 needfrom = False
962 break
963 if needfrom:
964 comments = ['From: ' + newuser, ''] + comments
965 user = newuser
966
948 if msg:
967 if msg:
949 comments.append(msg)
968 comments.append(msg)
950
969
951 patchf.seek(0)
970 patchf.seek(0)
952 patchf.truncate()
971 patchf.truncate()
953
972
954 if comments:
973 if comments:
955 comments = "\n".join(comments) + '\n\n'
974 comments = "\n".join(comments) + '\n\n'
956 patchf.write(comments)
975 patchf.write(comments)
957
976
958 if opts.get('git'):
977 if opts.get('git'):
959 self.diffopts().git = True
978 self.diffopts().git = True
960 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
979 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
961 tip = repo.changelog.tip()
980 tip = repo.changelog.tip()
962 if top == tip:
981 if top == tip:
963 # if the top of our patch queue is also the tip, there is an
982 # if the top of our patch queue is also the tip, there is an
964 # optimization here. We update the dirstate in place and strip
983 # optimization here. We update the dirstate in place and strip
965 # off the tip commit. Then just commit the current directory
984 # off the tip commit. Then just commit the current directory
966 # tree. We can also send repo.commit the list of files
985 # tree. We can also send repo.commit the list of files
967 # changed to speed up the diff
986 # changed to speed up the diff
968 #
987 #
969 # in short mode, we only diff the files included in the
988 # in short mode, we only diff the files included in the
970 # patch already
989 # patch already
971 #
990 #
972 # this should really read:
991 # this should really read:
973 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
992 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
974 # but we do it backwards to take advantage of manifest/chlog
993 # but we do it backwards to take advantage of manifest/chlog
975 # caching against the next repo.status call
994 # caching against the next repo.status call
976 #
995 #
977 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
996 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
978 changes = repo.changelog.read(tip)
997 changes = repo.changelog.read(tip)
979 man = repo.manifest.read(changes[0])
998 man = repo.manifest.read(changes[0])
980 aaa = aa[:]
999 aaa = aa[:]
981 if opts.get('short'):
1000 if opts.get('short'):
982 filelist = mm + aa + dd
1001 filelist = mm + aa + dd
983 match = dict.fromkeys(filelist).__contains__
1002 match = dict.fromkeys(filelist).__contains__
984 else:
1003 else:
985 filelist = None
1004 filelist = None
986 match = util.always
1005 match = util.always
987 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
1006 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
988
1007
989 # we might end up with files that were added between
1008 # we might end up with files that were added between
990 # tip and the dirstate parent, but then changed in the
1009 # tip and the dirstate parent, but then changed in the
991 # local dirstate. in this case, we want them to only
1010 # local dirstate. in this case, we want them to only
992 # show up in the added section
1011 # show up in the added section
993 for x in m:
1012 for x in m:
994 if x not in aa:
1013 if x not in aa:
995 mm.append(x)
1014 mm.append(x)
996 # we might end up with files added by the local dirstate that
1015 # we might end up with files added by the local dirstate that
997 # were deleted by the patch. In this case, they should only
1016 # were deleted by the patch. In this case, they should only
998 # show up in the changed section.
1017 # show up in the changed section.
999 for x in a:
1018 for x in a:
1000 if x in dd:
1019 if x in dd:
1001 del dd[dd.index(x)]
1020 del dd[dd.index(x)]
1002 mm.append(x)
1021 mm.append(x)
1003 else:
1022 else:
1004 aa.append(x)
1023 aa.append(x)
1005 # make sure any files deleted in the local dirstate
1024 # make sure any files deleted in the local dirstate
1006 # are not in the add or change column of the patch
1025 # are not in the add or change column of the patch
1007 forget = []
1026 forget = []
1008 for x in d + r:
1027 for x in d + r:
1009 if x in aa:
1028 if x in aa:
1010 del aa[aa.index(x)]
1029 del aa[aa.index(x)]
1011 forget.append(x)
1030 forget.append(x)
1012 continue
1031 continue
1013 elif x in mm:
1032 elif x in mm:
1014 del mm[mm.index(x)]
1033 del mm[mm.index(x)]
1015 dd.append(x)
1034 dd.append(x)
1016
1035
1017 m = util.unique(mm)
1036 m = util.unique(mm)
1018 r = util.unique(dd)
1037 r = util.unique(dd)
1019 a = util.unique(aa)
1038 a = util.unique(aa)
1020 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1039 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1021 filelist = util.unique(c[0] + c[1] + c[2])
1040 filelist = util.unique(c[0] + c[1] + c[2])
1022 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1041 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1023 fp=patchf, changes=c, opts=self.diffopts())
1042 fp=patchf, changes=c, opts=self.diffopts())
1024 patchf.close()
1043 patchf.close()
1025
1044
1026 repo.dirstate.setparents(*cparents)
1045 repo.dirstate.setparents(*cparents)
1027 copies = {}
1046 copies = {}
1028 for dst in a:
1047 for dst in a:
1029 src = repo.dirstate.copied(dst)
1048 src = repo.dirstate.copied(dst)
1030 if src is not None:
1049 if src is not None:
1031 copies.setdefault(src, []).append(dst)
1050 copies.setdefault(src, []).append(dst)
1032 repo.dirstate.add(dst)
1051 repo.dirstate.add(dst)
1033 # remember the copies between patchparent and tip
1052 # remember the copies between patchparent and tip
1034 # this may be slow, so don't do it if we're not tracking copies
1053 # this may be slow, so don't do it if we're not tracking copies
1035 if self.diffopts().git:
1054 if self.diffopts().git:
1036 for dst in aaa:
1055 for dst in aaa:
1037 f = repo.file(dst)
1056 f = repo.file(dst)
1038 src = f.renamed(man[dst])
1057 src = f.renamed(man[dst])
1039 if src:
1058 if src:
1040 copies[src[0]] = copies.get(dst, [])
1059 copies[src[0]] = copies.get(dst, [])
1041 if dst in a:
1060 if dst in a:
1042 copies[src[0]].append(dst)
1061 copies[src[0]].append(dst)
1043 # we can't copy a file created by the patch itself
1062 # we can't copy a file created by the patch itself
1044 if dst in copies:
1063 if dst in copies:
1045 del copies[dst]
1064 del copies[dst]
1046 for src, dsts in copies.iteritems():
1065 for src, dsts in copies.iteritems():
1047 for dst in dsts:
1066 for dst in dsts:
1048 repo.dirstate.copy(src, dst)
1067 repo.dirstate.copy(src, dst)
1049 for f in r:
1068 for f in r:
1050 repo.dirstate.remove(f)
1069 repo.dirstate.remove(f)
1051 # if the patch excludes a modified file, mark that
1070 # if the patch excludes a modified file, mark that
1052 # file with mtime=0 so status can see it.
1071 # file with mtime=0 so status can see it.
1053 mm = []
1072 mm = []
1054 for i in xrange(len(m)-1, -1, -1):
1073 for i in xrange(len(m)-1, -1, -1):
1055 if not matchfn(m[i]):
1074 if not matchfn(m[i]):
1056 mm.append(m[i])
1075 mm.append(m[i])
1057 del m[i]
1076 del m[i]
1058 for f in m:
1077 for f in m:
1059 repo.dirstate.normal(f)
1078 repo.dirstate.normal(f)
1060 for f in mm:
1079 for f in mm:
1061 repo.dirstate.normallookup(f)
1080 repo.dirstate.normallookup(f)
1062 for f in forget:
1081 for f in forget:
1063 repo.dirstate.forget(f)
1082 repo.dirstate.forget(f)
1064
1083
1065 if not msg:
1084 if not msg:
1066 if not message:
1085 if not message:
1067 message = "[mq]: %s\n" % patchfn
1086 message = "[mq]: %s\n" % patchfn
1068 else:
1087 else:
1069 message = "\n".join(message)
1088 message = "\n".join(message)
1070 else:
1089 else:
1071 message = msg
1090 message = msg
1072
1091
1092 if not user:
1093 user = changes[1]
1094
1073 self.strip(repo, top, update=False,
1095 self.strip(repo, top, update=False,
1074 backup='strip')
1096 backup='strip')
1075 n = repo.commit(filelist, message, changes[1], match=matchfn,
1097 n = repo.commit(filelist, message, user, match=matchfn,
1076 force=1)
1098 force=1)
1077 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1099 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1078 self.applied_dirty = 1
1100 self.applied_dirty = 1
1079 self.removeundo(repo)
1101 self.removeundo(repo)
1080 else:
1102 else:
1081 self.printdiff(repo, patchparent, fp=patchf)
1103 self.printdiff(repo, patchparent, fp=patchf)
1082 patchf.close()
1104 patchf.close()
1083 added = repo.status()[1]
1105 added = repo.status()[1]
1084 for a in added:
1106 for a in added:
1085 f = repo.wjoin(a)
1107 f = repo.wjoin(a)
1086 try:
1108 try:
1087 os.unlink(f)
1109 os.unlink(f)
1088 except OSError, e:
1110 except OSError, e:
1089 if e.errno != errno.ENOENT:
1111 if e.errno != errno.ENOENT:
1090 raise
1112 raise
1091 try: os.removedirs(os.path.dirname(f))
1113 try: os.removedirs(os.path.dirname(f))
1092 except: pass
1114 except: pass
1093 # forget the file copies in the dirstate
1115 # forget the file copies in the dirstate
1094 # push should readd the files later on
1116 # push should readd the files later on
1095 repo.dirstate.forget(a)
1117 repo.dirstate.forget(a)
1096 self.pop(repo, force=True)
1118 self.pop(repo, force=True)
1097 self.push(repo, force=True)
1119 self.push(repo, force=True)
1098 finally:
1120 finally:
1099 del wlock
1121 del wlock
1100
1122
1101 def init(self, repo, create=False):
1123 def init(self, repo, create=False):
1102 if not create and os.path.isdir(self.path):
1124 if not create and os.path.isdir(self.path):
1103 raise util.Abort(_("patch queue directory already exists"))
1125 raise util.Abort(_("patch queue directory already exists"))
1104 try:
1126 try:
1105 os.mkdir(self.path)
1127 os.mkdir(self.path)
1106 except OSError, inst:
1128 except OSError, inst:
1107 if inst.errno != errno.EEXIST or not create:
1129 if inst.errno != errno.EEXIST or not create:
1108 raise
1130 raise
1109 if create:
1131 if create:
1110 return self.qrepo(create=True)
1132 return self.qrepo(create=True)
1111
1133
1112 def unapplied(self, repo, patch=None):
1134 def unapplied(self, repo, patch=None):
1113 if patch and patch not in self.series:
1135 if patch and patch not in self.series:
1114 raise util.Abort(_("patch %s is not in series file") % patch)
1136 raise util.Abort(_("patch %s is not in series file") % patch)
1115 if not patch:
1137 if not patch:
1116 start = self.series_end()
1138 start = self.series_end()
1117 else:
1139 else:
1118 start = self.series.index(patch) + 1
1140 start = self.series.index(patch) + 1
1119 unapplied = []
1141 unapplied = []
1120 for i in xrange(start, len(self.series)):
1142 for i in xrange(start, len(self.series)):
1121 pushable, reason = self.pushable(i)
1143 pushable, reason = self.pushable(i)
1122 if pushable:
1144 if pushable:
1123 unapplied.append((i, self.series[i]))
1145 unapplied.append((i, self.series[i]))
1124 self.explain_pushable(i)
1146 self.explain_pushable(i)
1125 return unapplied
1147 return unapplied
1126
1148
1127 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1149 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1128 summary=False):
1150 summary=False):
1129 def displayname(patchname):
1151 def displayname(patchname):
1130 if summary:
1152 if summary:
1131 msg = self.readheaders(patchname)[0]
1153 msg = self.readheaders(patchname)[0]
1132 msg = msg and ': ' + msg[0] or ': '
1154 msg = msg and ': ' + msg[0] or ': '
1133 else:
1155 else:
1134 msg = ''
1156 msg = ''
1135 return '%s%s' % (patchname, msg)
1157 return '%s%s' % (patchname, msg)
1136
1158
1137 applied = dict.fromkeys([p.name for p in self.applied])
1159 applied = dict.fromkeys([p.name for p in self.applied])
1138 if length is None:
1160 if length is None:
1139 length = len(self.series) - start
1161 length = len(self.series) - start
1140 if not missing:
1162 if not missing:
1141 for i in xrange(start, start+length):
1163 for i in xrange(start, start+length):
1142 patch = self.series[i]
1164 patch = self.series[i]
1143 if patch in applied:
1165 if patch in applied:
1144 stat = 'A'
1166 stat = 'A'
1145 elif self.pushable(i)[0]:
1167 elif self.pushable(i)[0]:
1146 stat = 'U'
1168 stat = 'U'
1147 else:
1169 else:
1148 stat = 'G'
1170 stat = 'G'
1149 pfx = ''
1171 pfx = ''
1150 if self.ui.verbose:
1172 if self.ui.verbose:
1151 pfx = '%d %s ' % (i, stat)
1173 pfx = '%d %s ' % (i, stat)
1152 elif status and status != stat:
1174 elif status and status != stat:
1153 continue
1175 continue
1154 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1176 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1155 else:
1177 else:
1156 msng_list = []
1178 msng_list = []
1157 for root, dirs, files in os.walk(self.path):
1179 for root, dirs, files in os.walk(self.path):
1158 d = root[len(self.path) + 1:]
1180 d = root[len(self.path) + 1:]
1159 for f in files:
1181 for f in files:
1160 fl = os.path.join(d, f)
1182 fl = os.path.join(d, f)
1161 if (fl not in self.series and
1183 if (fl not in self.series and
1162 fl not in (self.status_path, self.series_path,
1184 fl not in (self.status_path, self.series_path,
1163 self.guards_path)
1185 self.guards_path)
1164 and not fl.startswith('.')):
1186 and not fl.startswith('.')):
1165 msng_list.append(fl)
1187 msng_list.append(fl)
1166 msng_list.sort()
1188 msng_list.sort()
1167 for x in msng_list:
1189 for x in msng_list:
1168 pfx = self.ui.verbose and ('D ') or ''
1190 pfx = self.ui.verbose and ('D ') or ''
1169 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1191 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1170
1192
1171 def issaveline(self, l):
1193 def issaveline(self, l):
1172 if l.name == '.hg.patches.save.line':
1194 if l.name == '.hg.patches.save.line':
1173 return True
1195 return True
1174
1196
1175 def qrepo(self, create=False):
1197 def qrepo(self, create=False):
1176 if create or os.path.isdir(self.join(".hg")):
1198 if create or os.path.isdir(self.join(".hg")):
1177 return hg.repository(self.ui, path=self.path, create=create)
1199 return hg.repository(self.ui, path=self.path, create=create)
1178
1200
1179 def restore(self, repo, rev, delete=None, qupdate=None):
1201 def restore(self, repo, rev, delete=None, qupdate=None):
1180 c = repo.changelog.read(rev)
1202 c = repo.changelog.read(rev)
1181 desc = c[4].strip()
1203 desc = c[4].strip()
1182 lines = desc.splitlines()
1204 lines = desc.splitlines()
1183 i = 0
1205 i = 0
1184 datastart = None
1206 datastart = None
1185 series = []
1207 series = []
1186 applied = []
1208 applied = []
1187 qpp = None
1209 qpp = None
1188 for i in xrange(0, len(lines)):
1210 for i in xrange(0, len(lines)):
1189 if lines[i] == 'Patch Data:':
1211 if lines[i] == 'Patch Data:':
1190 datastart = i + 1
1212 datastart = i + 1
1191 elif lines[i].startswith('Dirstate:'):
1213 elif lines[i].startswith('Dirstate:'):
1192 l = lines[i].rstrip()
1214 l = lines[i].rstrip()
1193 l = l[10:].split(' ')
1215 l = l[10:].split(' ')
1194 qpp = [ hg.bin(x) for x in l ]
1216 qpp = [ hg.bin(x) for x in l ]
1195 elif datastart != None:
1217 elif datastart != None:
1196 l = lines[i].rstrip()
1218 l = lines[i].rstrip()
1197 se = statusentry(l)
1219 se = statusentry(l)
1198 file_ = se.name
1220 file_ = se.name
1199 if se.rev:
1221 if se.rev:
1200 applied.append(se)
1222 applied.append(se)
1201 else:
1223 else:
1202 series.append(file_)
1224 series.append(file_)
1203 if datastart == None:
1225 if datastart == None:
1204 self.ui.warn("No saved patch data found\n")
1226 self.ui.warn("No saved patch data found\n")
1205 return 1
1227 return 1
1206 self.ui.warn("restoring status: %s\n" % lines[0])
1228 self.ui.warn("restoring status: %s\n" % lines[0])
1207 self.full_series = series
1229 self.full_series = series
1208 self.applied = applied
1230 self.applied = applied
1209 self.parse_series()
1231 self.parse_series()
1210 self.series_dirty = 1
1232 self.series_dirty = 1
1211 self.applied_dirty = 1
1233 self.applied_dirty = 1
1212 heads = repo.changelog.heads()
1234 heads = repo.changelog.heads()
1213 if delete:
1235 if delete:
1214 if rev not in heads:
1236 if rev not in heads:
1215 self.ui.warn("save entry has children, leaving it alone\n")
1237 self.ui.warn("save entry has children, leaving it alone\n")
1216 else:
1238 else:
1217 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1239 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1218 pp = repo.dirstate.parents()
1240 pp = repo.dirstate.parents()
1219 if rev in pp:
1241 if rev in pp:
1220 update = True
1242 update = True
1221 else:
1243 else:
1222 update = False
1244 update = False
1223 self.strip(repo, rev, update=update, backup='strip')
1245 self.strip(repo, rev, update=update, backup='strip')
1224 if qpp:
1246 if qpp:
1225 self.ui.warn("saved queue repository parents: %s %s\n" %
1247 self.ui.warn("saved queue repository parents: %s %s\n" %
1226 (hg.short(qpp[0]), hg.short(qpp[1])))
1248 (hg.short(qpp[0]), hg.short(qpp[1])))
1227 if qupdate:
1249 if qupdate:
1228 print "queue directory updating"
1250 print "queue directory updating"
1229 r = self.qrepo()
1251 r = self.qrepo()
1230 if not r:
1252 if not r:
1231 self.ui.warn("Unable to load queue repository\n")
1253 self.ui.warn("Unable to load queue repository\n")
1232 return 1
1254 return 1
1233 hg.clean(r, qpp[0])
1255 hg.clean(r, qpp[0])
1234
1256
1235 def save(self, repo, msg=None):
1257 def save(self, repo, msg=None):
1236 if len(self.applied) == 0:
1258 if len(self.applied) == 0:
1237 self.ui.warn("save: no patches applied, exiting\n")
1259 self.ui.warn("save: no patches applied, exiting\n")
1238 return 1
1260 return 1
1239 if self.issaveline(self.applied[-1]):
1261 if self.issaveline(self.applied[-1]):
1240 self.ui.warn("status is already saved\n")
1262 self.ui.warn("status is already saved\n")
1241 return 1
1263 return 1
1242
1264
1243 ar = [ ':' + x for x in self.full_series ]
1265 ar = [ ':' + x for x in self.full_series ]
1244 if not msg:
1266 if not msg:
1245 msg = "hg patches saved state"
1267 msg = "hg patches saved state"
1246 else:
1268 else:
1247 msg = "hg patches: " + msg.rstrip('\r\n')
1269 msg = "hg patches: " + msg.rstrip('\r\n')
1248 r = self.qrepo()
1270 r = self.qrepo()
1249 if r:
1271 if r:
1250 pp = r.dirstate.parents()
1272 pp = r.dirstate.parents()
1251 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1273 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1252 msg += "\n\nPatch Data:\n"
1274 msg += "\n\nPatch Data:\n"
1253 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1275 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1254 "\n".join(ar) + '\n' or "")
1276 "\n".join(ar) + '\n' or "")
1255 n = repo.commit(None, text, user=None, force=1)
1277 n = repo.commit(None, text, user=None, force=1)
1256 if not n:
1278 if not n:
1257 self.ui.warn("repo commit failed\n")
1279 self.ui.warn("repo commit failed\n")
1258 return 1
1280 return 1
1259 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1281 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1260 self.applied_dirty = 1
1282 self.applied_dirty = 1
1261 self.removeundo(repo)
1283 self.removeundo(repo)
1262
1284
1263 def full_series_end(self):
1285 def full_series_end(self):
1264 if len(self.applied) > 0:
1286 if len(self.applied) > 0:
1265 p = self.applied[-1].name
1287 p = self.applied[-1].name
1266 end = self.find_series(p)
1288 end = self.find_series(p)
1267 if end == None:
1289 if end == None:
1268 return len(self.full_series)
1290 return len(self.full_series)
1269 return end + 1
1291 return end + 1
1270 return 0
1292 return 0
1271
1293
1272 def series_end(self, all_patches=False):
1294 def series_end(self, all_patches=False):
1273 """If all_patches is False, return the index of the next pushable patch
1295 """If all_patches is False, return the index of the next pushable patch
1274 in the series, or the series length. If all_patches is True, return the
1296 in the series, or the series length. If all_patches is True, return the
1275 index of the first patch past the last applied one.
1297 index of the first patch past the last applied one.
1276 """
1298 """
1277 end = 0
1299 end = 0
1278 def next(start):
1300 def next(start):
1279 if all_patches:
1301 if all_patches:
1280 return start
1302 return start
1281 i = start
1303 i = start
1282 while i < len(self.series):
1304 while i < len(self.series):
1283 p, reason = self.pushable(i)
1305 p, reason = self.pushable(i)
1284 if p:
1306 if p:
1285 break
1307 break
1286 self.explain_pushable(i)
1308 self.explain_pushable(i)
1287 i += 1
1309 i += 1
1288 return i
1310 return i
1289 if len(self.applied) > 0:
1311 if len(self.applied) > 0:
1290 p = self.applied[-1].name
1312 p = self.applied[-1].name
1291 try:
1313 try:
1292 end = self.series.index(p)
1314 end = self.series.index(p)
1293 except ValueError:
1315 except ValueError:
1294 return 0
1316 return 0
1295 return next(end + 1)
1317 return next(end + 1)
1296 return next(end)
1318 return next(end)
1297
1319
1298 def appliedname(self, index):
1320 def appliedname(self, index):
1299 pname = self.applied[index].name
1321 pname = self.applied[index].name
1300 if not self.ui.verbose:
1322 if not self.ui.verbose:
1301 p = pname
1323 p = pname
1302 else:
1324 else:
1303 p = str(self.series.index(pname)) + " " + pname
1325 p = str(self.series.index(pname)) + " " + pname
1304 return p
1326 return p
1305
1327
1306 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1328 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1307 force=None, git=False):
1329 force=None, git=False):
1308 def checkseries(patchname):
1330 def checkseries(patchname):
1309 if patchname in self.series:
1331 if patchname in self.series:
1310 raise util.Abort(_('patch %s is already in the series file')
1332 raise util.Abort(_('patch %s is already in the series file')
1311 % patchname)
1333 % patchname)
1312 def checkfile(patchname):
1334 def checkfile(patchname):
1313 if not force and os.path.exists(self.join(patchname)):
1335 if not force and os.path.exists(self.join(patchname)):
1314 raise util.Abort(_('patch "%s" already exists')
1336 raise util.Abort(_('patch "%s" already exists')
1315 % patchname)
1337 % patchname)
1316
1338
1317 if rev:
1339 if rev:
1318 if files:
1340 if files:
1319 raise util.Abort(_('option "-r" not valid when importing '
1341 raise util.Abort(_('option "-r" not valid when importing '
1320 'files'))
1342 'files'))
1321 rev = cmdutil.revrange(repo, rev)
1343 rev = cmdutil.revrange(repo, rev)
1322 rev.sort(lambda x, y: cmp(y, x))
1344 rev.sort(lambda x, y: cmp(y, x))
1323 if (len(files) > 1 or len(rev) > 1) and patchname:
1345 if (len(files) > 1 or len(rev) > 1) and patchname:
1324 raise util.Abort(_('option "-n" not valid when importing multiple '
1346 raise util.Abort(_('option "-n" not valid when importing multiple '
1325 'patches'))
1347 'patches'))
1326 i = 0
1348 i = 0
1327 added = []
1349 added = []
1328 if rev:
1350 if rev:
1329 # If mq patches are applied, we can only import revisions
1351 # If mq patches are applied, we can only import revisions
1330 # that form a linear path to qbase.
1352 # that form a linear path to qbase.
1331 # Otherwise, they should form a linear path to a head.
1353 # Otherwise, they should form a linear path to a head.
1332 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1354 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1333 if len(heads) > 1:
1355 if len(heads) > 1:
1334 raise util.Abort(_('revision %d is the root of more than one '
1356 raise util.Abort(_('revision %d is the root of more than one '
1335 'branch') % rev[-1])
1357 'branch') % rev[-1])
1336 if self.applied:
1358 if self.applied:
1337 base = revlog.hex(repo.changelog.node(rev[0]))
1359 base = revlog.hex(repo.changelog.node(rev[0]))
1338 if base in [n.rev for n in self.applied]:
1360 if base in [n.rev for n in self.applied]:
1339 raise util.Abort(_('revision %d is already managed')
1361 raise util.Abort(_('revision %d is already managed')
1340 % rev[0])
1362 % rev[0])
1341 if heads != [revlog.bin(self.applied[-1].rev)]:
1363 if heads != [revlog.bin(self.applied[-1].rev)]:
1342 raise util.Abort(_('revision %d is not the parent of '
1364 raise util.Abort(_('revision %d is not the parent of '
1343 'the queue') % rev[0])
1365 'the queue') % rev[0])
1344 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1366 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1345 lastparent = repo.changelog.parentrevs(base)[0]
1367 lastparent = repo.changelog.parentrevs(base)[0]
1346 else:
1368 else:
1347 if heads != [repo.changelog.node(rev[0])]:
1369 if heads != [repo.changelog.node(rev[0])]:
1348 raise util.Abort(_('revision %d has unmanaged children')
1370 raise util.Abort(_('revision %d has unmanaged children')
1349 % rev[0])
1371 % rev[0])
1350 lastparent = None
1372 lastparent = None
1351
1373
1352 if git:
1374 if git:
1353 self.diffopts().git = True
1375 self.diffopts().git = True
1354
1376
1355 for r in rev:
1377 for r in rev:
1356 p1, p2 = repo.changelog.parentrevs(r)
1378 p1, p2 = repo.changelog.parentrevs(r)
1357 n = repo.changelog.node(r)
1379 n = repo.changelog.node(r)
1358 if p2 != revlog.nullrev:
1380 if p2 != revlog.nullrev:
1359 raise util.Abort(_('cannot import merge revision %d') % r)
1381 raise util.Abort(_('cannot import merge revision %d') % r)
1360 if lastparent and lastparent != r:
1382 if lastparent and lastparent != r:
1361 raise util.Abort(_('revision %d is not the parent of %d')
1383 raise util.Abort(_('revision %d is not the parent of %d')
1362 % (r, lastparent))
1384 % (r, lastparent))
1363 lastparent = p1
1385 lastparent = p1
1364
1386
1365 if not patchname:
1387 if not patchname:
1366 patchname = normname('%d.diff' % r)
1388 patchname = normname('%d.diff' % r)
1367 checkseries(patchname)
1389 checkseries(patchname)
1368 checkfile(patchname)
1390 checkfile(patchname)
1369 self.full_series.insert(0, patchname)
1391 self.full_series.insert(0, patchname)
1370
1392
1371 patchf = self.opener(patchname, "w")
1393 patchf = self.opener(patchname, "w")
1372 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1394 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1373 patchf.close()
1395 patchf.close()
1374
1396
1375 se = statusentry(revlog.hex(n), patchname)
1397 se = statusentry(revlog.hex(n), patchname)
1376 self.applied.insert(0, se)
1398 self.applied.insert(0, se)
1377
1399
1378 added.append(patchname)
1400 added.append(patchname)
1379 patchname = None
1401 patchname = None
1380 self.parse_series()
1402 self.parse_series()
1381 self.applied_dirty = 1
1403 self.applied_dirty = 1
1382
1404
1383 for filename in files:
1405 for filename in files:
1384 if existing:
1406 if existing:
1385 if filename == '-':
1407 if filename == '-':
1386 raise util.Abort(_('-e is incompatible with import from -'))
1408 raise util.Abort(_('-e is incompatible with import from -'))
1387 if not patchname:
1409 if not patchname:
1388 patchname = normname(filename)
1410 patchname = normname(filename)
1389 if not os.path.isfile(self.join(patchname)):
1411 if not os.path.isfile(self.join(patchname)):
1390 raise util.Abort(_("patch %s does not exist") % patchname)
1412 raise util.Abort(_("patch %s does not exist") % patchname)
1391 else:
1413 else:
1392 try:
1414 try:
1393 if filename == '-':
1415 if filename == '-':
1394 if not patchname:
1416 if not patchname:
1395 raise util.Abort(_('need --name to import a patch from -'))
1417 raise util.Abort(_('need --name to import a patch from -'))
1396 text = sys.stdin.read()
1418 text = sys.stdin.read()
1397 else:
1419 else:
1398 text = file(filename).read()
1420 text = file(filename).read()
1399 except IOError:
1421 except IOError:
1400 raise util.Abort(_("unable to read %s") % patchname)
1422 raise util.Abort(_("unable to read %s") % patchname)
1401 if not patchname:
1423 if not patchname:
1402 patchname = normname(os.path.basename(filename))
1424 patchname = normname(os.path.basename(filename))
1403 checkfile(patchname)
1425 checkfile(patchname)
1404 patchf = self.opener(patchname, "w")
1426 patchf = self.opener(patchname, "w")
1405 patchf.write(text)
1427 patchf.write(text)
1406 checkseries(patchname)
1428 checkseries(patchname)
1407 index = self.full_series_end() + i
1429 index = self.full_series_end() + i
1408 self.full_series[index:index] = [patchname]
1430 self.full_series[index:index] = [patchname]
1409 self.parse_series()
1431 self.parse_series()
1410 self.ui.warn("adding %s to series file\n" % patchname)
1432 self.ui.warn("adding %s to series file\n" % patchname)
1411 i += 1
1433 i += 1
1412 added.append(patchname)
1434 added.append(patchname)
1413 patchname = None
1435 patchname = None
1414 self.series_dirty = 1
1436 self.series_dirty = 1
1415 qrepo = self.qrepo()
1437 qrepo = self.qrepo()
1416 if qrepo:
1438 if qrepo:
1417 qrepo.add(added)
1439 qrepo.add(added)
1418
1440
1419 def delete(ui, repo, *patches, **opts):
1441 def delete(ui, repo, *patches, **opts):
1420 """remove patches from queue
1442 """remove patches from queue
1421
1443
1422 The patches must not be applied, unless they are arguments to
1444 The patches must not be applied, unless they are arguments to
1423 the --rev parameter. At least one patch or revision is required.
1445 the --rev parameter. At least one patch or revision is required.
1424
1446
1425 With --rev, mq will stop managing the named revisions (converting
1447 With --rev, mq will stop managing the named revisions (converting
1426 them to regular mercurial changesets). The patches must be applied
1448 them to regular mercurial changesets). The patches must be applied
1427 and at the base of the stack. This option is useful when the patches
1449 and at the base of the stack. This option is useful when the patches
1428 have been applied upstream.
1450 have been applied upstream.
1429
1451
1430 With --keep, the patch files are preserved in the patch directory."""
1452 With --keep, the patch files are preserved in the patch directory."""
1431 q = repo.mq
1453 q = repo.mq
1432 q.delete(repo, patches, opts)
1454 q.delete(repo, patches, opts)
1433 q.save_dirty()
1455 q.save_dirty()
1434 return 0
1456 return 0
1435
1457
1436 def applied(ui, repo, patch=None, **opts):
1458 def applied(ui, repo, patch=None, **opts):
1437 """print the patches already applied"""
1459 """print the patches already applied"""
1438 q = repo.mq
1460 q = repo.mq
1439 if patch:
1461 if patch:
1440 if patch not in q.series:
1462 if patch not in q.series:
1441 raise util.Abort(_("patch %s is not in series file") % patch)
1463 raise util.Abort(_("patch %s is not in series file") % patch)
1442 end = q.series.index(patch) + 1
1464 end = q.series.index(patch) + 1
1443 else:
1465 else:
1444 end = q.series_end(True)
1466 end = q.series_end(True)
1445 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1467 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1446
1468
1447 def unapplied(ui, repo, patch=None, **opts):
1469 def unapplied(ui, repo, patch=None, **opts):
1448 """print the patches not yet applied"""
1470 """print the patches not yet applied"""
1449 q = repo.mq
1471 q = repo.mq
1450 if patch:
1472 if patch:
1451 if patch not in q.series:
1473 if patch not in q.series:
1452 raise util.Abort(_("patch %s is not in series file") % patch)
1474 raise util.Abort(_("patch %s is not in series file") % patch)
1453 start = q.series.index(patch) + 1
1475 start = q.series.index(patch) + 1
1454 else:
1476 else:
1455 start = q.series_end(True)
1477 start = q.series_end(True)
1456 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1478 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1457
1479
1458 def qimport(ui, repo, *filename, **opts):
1480 def qimport(ui, repo, *filename, **opts):
1459 """import a patch
1481 """import a patch
1460
1482
1461 The patch will have the same name as its source file unless you
1483 The patch will have the same name as its source file unless you
1462 give it a new one with --name.
1484 give it a new one with --name.
1463
1485
1464 You can register an existing patch inside the patch directory
1486 You can register an existing patch inside the patch directory
1465 with the --existing flag.
1487 with the --existing flag.
1466
1488
1467 With --force, an existing patch of the same name will be overwritten.
1489 With --force, an existing patch of the same name will be overwritten.
1468
1490
1469 An existing changeset may be placed under mq control with --rev
1491 An existing changeset may be placed under mq control with --rev
1470 (e.g. qimport --rev tip -n patch will place tip under mq control).
1492 (e.g. qimport --rev tip -n patch will place tip under mq control).
1471 With --git, patches imported with --rev will use the git diff
1493 With --git, patches imported with --rev will use the git diff
1472 format.
1494 format.
1473 """
1495 """
1474 q = repo.mq
1496 q = repo.mq
1475 q.qimport(repo, filename, patchname=opts['name'],
1497 q.qimport(repo, filename, patchname=opts['name'],
1476 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1498 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1477 git=opts['git'])
1499 git=opts['git'])
1478 q.save_dirty()
1500 q.save_dirty()
1479 return 0
1501 return 0
1480
1502
1481 def init(ui, repo, **opts):
1503 def init(ui, repo, **opts):
1482 """init a new queue repository
1504 """init a new queue repository
1483
1505
1484 The queue repository is unversioned by default. If -c is
1506 The queue repository is unversioned by default. If -c is
1485 specified, qinit will create a separate nested repository
1507 specified, qinit will create a separate nested repository
1486 for patches (qinit -c may also be run later to convert
1508 for patches (qinit -c may also be run later to convert
1487 an unversioned patch repository into a versioned one).
1509 an unversioned patch repository into a versioned one).
1488 You can use qcommit to commit changes to this queue repository."""
1510 You can use qcommit to commit changes to this queue repository."""
1489 q = repo.mq
1511 q = repo.mq
1490 r = q.init(repo, create=opts['create_repo'])
1512 r = q.init(repo, create=opts['create_repo'])
1491 q.save_dirty()
1513 q.save_dirty()
1492 if r:
1514 if r:
1493 if not os.path.exists(r.wjoin('.hgignore')):
1515 if not os.path.exists(r.wjoin('.hgignore')):
1494 fp = r.wopener('.hgignore', 'w')
1516 fp = r.wopener('.hgignore', 'w')
1495 fp.write('syntax: glob\n')
1517 fp.write('syntax: glob\n')
1496 fp.write('status\n')
1518 fp.write('status\n')
1497 fp.write('guards\n')
1519 fp.write('guards\n')
1498 fp.close()
1520 fp.close()
1499 if not os.path.exists(r.wjoin('series')):
1521 if not os.path.exists(r.wjoin('series')):
1500 r.wopener('series', 'w').close()
1522 r.wopener('series', 'w').close()
1501 r.add(['.hgignore', 'series'])
1523 r.add(['.hgignore', 'series'])
1502 commands.add(ui, r)
1524 commands.add(ui, r)
1503 return 0
1525 return 0
1504
1526
1505 def clone(ui, source, dest=None, **opts):
1527 def clone(ui, source, dest=None, **opts):
1506 '''clone main and patch repository at same time
1528 '''clone main and patch repository at same time
1507
1529
1508 If source is local, destination will have no patches applied. If
1530 If source is local, destination will have no patches applied. If
1509 source is remote, this command can not check if patches are
1531 source is remote, this command can not check if patches are
1510 applied in source, so cannot guarantee that patches are not
1532 applied in source, so cannot guarantee that patches are not
1511 applied in destination. If you clone remote repository, be sure
1533 applied in destination. If you clone remote repository, be sure
1512 before that it has no patches applied.
1534 before that it has no patches applied.
1513
1535
1514 Source patch repository is looked for in <src>/.hg/patches by
1536 Source patch repository is looked for in <src>/.hg/patches by
1515 default. Use -p <url> to change.
1537 default. Use -p <url> to change.
1516
1538
1517 The patch directory must be a nested mercurial repository, as
1539 The patch directory must be a nested mercurial repository, as
1518 would be created by qinit -c.
1540 would be created by qinit -c.
1519 '''
1541 '''
1520 def patchdir(repo):
1542 def patchdir(repo):
1521 url = repo.url()
1543 url = repo.url()
1522 if url.endswith('/'):
1544 if url.endswith('/'):
1523 url = url[:-1]
1545 url = url[:-1]
1524 return url + '/.hg/patches'
1546 return url + '/.hg/patches'
1525 cmdutil.setremoteconfig(ui, opts)
1547 cmdutil.setremoteconfig(ui, opts)
1526 if dest is None:
1548 if dest is None:
1527 dest = hg.defaultdest(source)
1549 dest = hg.defaultdest(source)
1528 sr = hg.repository(ui, ui.expandpath(source))
1550 sr = hg.repository(ui, ui.expandpath(source))
1529 patchespath = opts['patches'] or patchdir(sr)
1551 patchespath = opts['patches'] or patchdir(sr)
1530 try:
1552 try:
1531 pr = hg.repository(ui, patchespath)
1553 pr = hg.repository(ui, patchespath)
1532 except hg.RepoError:
1554 except hg.RepoError:
1533 raise util.Abort(_('versioned patch repository not found'
1555 raise util.Abort(_('versioned patch repository not found'
1534 ' (see qinit -c)'))
1556 ' (see qinit -c)'))
1535 qbase, destrev = None, None
1557 qbase, destrev = None, None
1536 if sr.local():
1558 if sr.local():
1537 if sr.mq.applied:
1559 if sr.mq.applied:
1538 qbase = revlog.bin(sr.mq.applied[0].rev)
1560 qbase = revlog.bin(sr.mq.applied[0].rev)
1539 if not hg.islocal(dest):
1561 if not hg.islocal(dest):
1540 heads = dict.fromkeys(sr.heads())
1562 heads = dict.fromkeys(sr.heads())
1541 for h in sr.heads(qbase):
1563 for h in sr.heads(qbase):
1542 del heads[h]
1564 del heads[h]
1543 destrev = heads.keys()
1565 destrev = heads.keys()
1544 destrev.append(sr.changelog.parents(qbase)[0])
1566 destrev.append(sr.changelog.parents(qbase)[0])
1545 ui.note(_('cloning main repo\n'))
1567 ui.note(_('cloning main repo\n'))
1546 sr, dr = hg.clone(ui, sr.url(), dest,
1568 sr, dr = hg.clone(ui, sr.url(), dest,
1547 pull=opts['pull'],
1569 pull=opts['pull'],
1548 rev=destrev,
1570 rev=destrev,
1549 update=False,
1571 update=False,
1550 stream=opts['uncompressed'])
1572 stream=opts['uncompressed'])
1551 ui.note(_('cloning patch repo\n'))
1573 ui.note(_('cloning patch repo\n'))
1552 spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1574 spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1553 pull=opts['pull'], update=not opts['noupdate'],
1575 pull=opts['pull'], update=not opts['noupdate'],
1554 stream=opts['uncompressed'])
1576 stream=opts['uncompressed'])
1555 if dr.local():
1577 if dr.local():
1556 if qbase:
1578 if qbase:
1557 ui.note(_('stripping applied patches from destination repo\n'))
1579 ui.note(_('stripping applied patches from destination repo\n'))
1558 dr.mq.strip(dr, qbase, update=False, backup=None)
1580 dr.mq.strip(dr, qbase, update=False, backup=None)
1559 if not opts['noupdate']:
1581 if not opts['noupdate']:
1560 ui.note(_('updating destination repo\n'))
1582 ui.note(_('updating destination repo\n'))
1561 hg.update(dr, dr.changelog.tip())
1583 hg.update(dr, dr.changelog.tip())
1562
1584
1563 def commit(ui, repo, *pats, **opts):
1585 def commit(ui, repo, *pats, **opts):
1564 """commit changes in the queue repository"""
1586 """commit changes in the queue repository"""
1565 q = repo.mq
1587 q = repo.mq
1566 r = q.qrepo()
1588 r = q.qrepo()
1567 if not r: raise util.Abort('no queue repository')
1589 if not r: raise util.Abort('no queue repository')
1568 commands.commit(r.ui, r, *pats, **opts)
1590 commands.commit(r.ui, r, *pats, **opts)
1569
1591
1570 def series(ui, repo, **opts):
1592 def series(ui, repo, **opts):
1571 """print the entire series file"""
1593 """print the entire series file"""
1572 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1594 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1573 return 0
1595 return 0
1574
1596
1575 def top(ui, repo, **opts):
1597 def top(ui, repo, **opts):
1576 """print the name of the current patch"""
1598 """print the name of the current patch"""
1577 q = repo.mq
1599 q = repo.mq
1578 t = q.applied and q.series_end(True) or 0
1600 t = q.applied and q.series_end(True) or 0
1579 if t:
1601 if t:
1580 return q.qseries(repo, start=t-1, length=1, status='A',
1602 return q.qseries(repo, start=t-1, length=1, status='A',
1581 summary=opts.get('summary'))
1603 summary=opts.get('summary'))
1582 else:
1604 else:
1583 ui.write("No patches applied\n")
1605 ui.write("No patches applied\n")
1584 return 1
1606 return 1
1585
1607
1586 def next(ui, repo, **opts):
1608 def next(ui, repo, **opts):
1587 """print the name of the next patch"""
1609 """print the name of the next patch"""
1588 q = repo.mq
1610 q = repo.mq
1589 end = q.series_end()
1611 end = q.series_end()
1590 if end == len(q.series):
1612 if end == len(q.series):
1591 ui.write("All patches applied\n")
1613 ui.write("All patches applied\n")
1592 return 1
1614 return 1
1593 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1615 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1594
1616
1595 def prev(ui, repo, **opts):
1617 def prev(ui, repo, **opts):
1596 """print the name of the previous patch"""
1618 """print the name of the previous patch"""
1597 q = repo.mq
1619 q = repo.mq
1598 l = len(q.applied)
1620 l = len(q.applied)
1599 if l == 1:
1621 if l == 1:
1600 ui.write("Only one patch applied\n")
1622 ui.write("Only one patch applied\n")
1601 return 1
1623 return 1
1602 if not l:
1624 if not l:
1603 ui.write("No patches applied\n")
1625 ui.write("No patches applied\n")
1604 return 1
1626 return 1
1605 return q.qseries(repo, start=l-2, length=1, status='A',
1627 return q.qseries(repo, start=l-2, length=1, status='A',
1606 summary=opts.get('summary'))
1628 summary=opts.get('summary'))
1607
1629
1630 def setupheaderopts(ui, opts):
1631 def do(opt,val):
1632 if not opts[opt] and opts['current' + opt]:
1633 opts[opt] = val
1634 do('user', ui.username())
1635
1608 def new(ui, repo, patch, *args, **opts):
1636 def new(ui, repo, patch, *args, **opts):
1609 """create a new patch
1637 """create a new patch
1610
1638
1611 qnew creates a new patch on top of the currently-applied patch
1639 qnew creates a new patch on top of the currently-applied patch
1612 (if any). It will refuse to run if there are any outstanding
1640 (if any). It will refuse to run if there are any outstanding
1613 changes unless -f is specified, in which case the patch will
1641 changes unless -f is specified, in which case the patch will
1614 be initialised with them. You may also use -I, -X, and/or a list of
1642 be initialised with them. You may also use -I, -X, and/or a list of
1615 files after the patch name to add only changes to matching files
1643 files after the patch name to add only changes to matching files
1616 to the new patch, leaving the rest as uncommitted modifications.
1644 to the new patch, leaving the rest as uncommitted modifications.
1617
1645
1618 -e, -m or -l set the patch header as well as the commit message.
1646 -e, -m or -l set the patch header as well as the commit message.
1619 If none is specified, the patch header is empty and the
1647 If none is specified, the patch header is empty and the
1620 commit message is '[mq]: PATCH'"""
1648 commit message is '[mq]: PATCH'"""
1621 q = repo.mq
1649 q = repo.mq
1622 message = cmdutil.logmessage(opts)
1650 message = cmdutil.logmessage(opts)
1623 if opts['edit']:
1651 if opts['edit']:
1624 message = ui.edit(message, ui.username())
1652 message = ui.edit(message, ui.username())
1625 opts['msg'] = message
1653 opts['msg'] = message
1654 setupheaderopts(ui, opts)
1626 q.new(repo, patch, *args, **opts)
1655 q.new(repo, patch, *args, **opts)
1627 q.save_dirty()
1656 q.save_dirty()
1628 return 0
1657 return 0
1629
1658
1630 def refresh(ui, repo, *pats, **opts):
1659 def refresh(ui, repo, *pats, **opts):
1631 """update the current patch
1660 """update the current patch
1632
1661
1633 If any file patterns are provided, the refreshed patch will contain only
1662 If any file patterns are provided, the refreshed patch will contain only
1634 the modifications that match those patterns; the remaining modifications
1663 the modifications that match those patterns; the remaining modifications
1635 will remain in the working directory.
1664 will remain in the working directory.
1636
1665
1637 hg add/remove/copy/rename work as usual, though you might want to use
1666 hg add/remove/copy/rename work as usual, though you might want to use
1638 git-style patches (--git or [diff] git=1) to track copies and renames.
1667 git-style patches (--git or [diff] git=1) to track copies and renames.
1639 """
1668 """
1640 q = repo.mq
1669 q = repo.mq
1641 message = cmdutil.logmessage(opts)
1670 message = cmdutil.logmessage(opts)
1642 if opts['edit']:
1671 if opts['edit']:
1643 if not q.applied:
1672 if not q.applied:
1644 ui.write(_("No patches applied\n"))
1673 ui.write(_("No patches applied\n"))
1645 return 1
1674 return 1
1646 if message:
1675 if message:
1647 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1676 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1648 patch = q.applied[-1].name
1677 patch = q.applied[-1].name
1649 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1678 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1650 message = ui.edit('\n'.join(message), user or ui.username())
1679 message = ui.edit('\n'.join(message), user or ui.username())
1680 setupheaderopts(ui, opts)
1651 ret = q.refresh(repo, pats, msg=message, **opts)
1681 ret = q.refresh(repo, pats, msg=message, **opts)
1652 q.save_dirty()
1682 q.save_dirty()
1653 return ret
1683 return ret
1654
1684
1655 def diff(ui, repo, *pats, **opts):
1685 def diff(ui, repo, *pats, **opts):
1656 """diff of the current patch"""
1686 """diff of the current patch"""
1657 repo.mq.diff(repo, pats, opts)
1687 repo.mq.diff(repo, pats, opts)
1658 return 0
1688 return 0
1659
1689
1660 def fold(ui, repo, *files, **opts):
1690 def fold(ui, repo, *files, **opts):
1661 """fold the named patches into the current patch
1691 """fold the named patches into the current patch
1662
1692
1663 Patches must not yet be applied. Each patch will be successively
1693 Patches must not yet be applied. Each patch will be successively
1664 applied to the current patch in the order given. If all the
1694 applied to the current patch in the order given. If all the
1665 patches apply successfully, the current patch will be refreshed
1695 patches apply successfully, the current patch will be refreshed
1666 with the new cumulative patch, and the folded patches will
1696 with the new cumulative patch, and the folded patches will
1667 be deleted. With -k/--keep, the folded patch files will not
1697 be deleted. With -k/--keep, the folded patch files will not
1668 be removed afterwards.
1698 be removed afterwards.
1669
1699
1670 The header for each folded patch will be concatenated with
1700 The header for each folded patch will be concatenated with
1671 the current patch header, separated by a line of '* * *'."""
1701 the current patch header, separated by a line of '* * *'."""
1672
1702
1673 q = repo.mq
1703 q = repo.mq
1674
1704
1675 if not files:
1705 if not files:
1676 raise util.Abort(_('qfold requires at least one patch name'))
1706 raise util.Abort(_('qfold requires at least one patch name'))
1677 if not q.check_toppatch(repo):
1707 if not q.check_toppatch(repo):
1678 raise util.Abort(_('No patches applied'))
1708 raise util.Abort(_('No patches applied'))
1679
1709
1680 message = cmdutil.logmessage(opts)
1710 message = cmdutil.logmessage(opts)
1681 if opts['edit']:
1711 if opts['edit']:
1682 if message:
1712 if message:
1683 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1713 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1684
1714
1685 parent = q.lookup('qtip')
1715 parent = q.lookup('qtip')
1686 patches = []
1716 patches = []
1687 messages = []
1717 messages = []
1688 for f in files:
1718 for f in files:
1689 p = q.lookup(f)
1719 p = q.lookup(f)
1690 if p in patches or p == parent:
1720 if p in patches or p == parent:
1691 ui.warn(_('Skipping already folded patch %s') % p)
1721 ui.warn(_('Skipping already folded patch %s') % p)
1692 if q.isapplied(p):
1722 if q.isapplied(p):
1693 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1723 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1694 patches.append(p)
1724 patches.append(p)
1695
1725
1696 for p in patches:
1726 for p in patches:
1697 if not message:
1727 if not message:
1698 messages.append(q.readheaders(p)[0])
1728 messages.append(q.readheaders(p)[0])
1699 pf = q.join(p)
1729 pf = q.join(p)
1700 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1730 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1701 if not patchsuccess:
1731 if not patchsuccess:
1702 raise util.Abort(_('Error folding patch %s') % p)
1732 raise util.Abort(_('Error folding patch %s') % p)
1703 patch.updatedir(ui, repo, files)
1733 patch.updatedir(ui, repo, files)
1704
1734
1705 if not message:
1735 if not message:
1706 message, comments, user = q.readheaders(parent)[0:3]
1736 message, comments, user = q.readheaders(parent)[0:3]
1707 for msg in messages:
1737 for msg in messages:
1708 message.append('* * *')
1738 message.append('* * *')
1709 message.extend(msg)
1739 message.extend(msg)
1710 message = '\n'.join(message)
1740 message = '\n'.join(message)
1711
1741
1712 if opts['edit']:
1742 if opts['edit']:
1713 message = ui.edit(message, user or ui.username())
1743 message = ui.edit(message, user or ui.username())
1714
1744
1715 q.refresh(repo, msg=message)
1745 q.refresh(repo, msg=message)
1716 q.delete(repo, patches, opts)
1746 q.delete(repo, patches, opts)
1717 q.save_dirty()
1747 q.save_dirty()
1718
1748
1719 def goto(ui, repo, patch, **opts):
1749 def goto(ui, repo, patch, **opts):
1720 '''push or pop patches until named patch is at top of stack'''
1750 '''push or pop patches until named patch is at top of stack'''
1721 q = repo.mq
1751 q = repo.mq
1722 patch = q.lookup(patch)
1752 patch = q.lookup(patch)
1723 if q.isapplied(patch):
1753 if q.isapplied(patch):
1724 ret = q.pop(repo, patch, force=opts['force'])
1754 ret = q.pop(repo, patch, force=opts['force'])
1725 else:
1755 else:
1726 ret = q.push(repo, patch, force=opts['force'])
1756 ret = q.push(repo, patch, force=opts['force'])
1727 q.save_dirty()
1757 q.save_dirty()
1728 return ret
1758 return ret
1729
1759
1730 def guard(ui, repo, *args, **opts):
1760 def guard(ui, repo, *args, **opts):
1731 '''set or print guards for a patch
1761 '''set or print guards for a patch
1732
1762
1733 Guards control whether a patch can be pushed. A patch with no
1763 Guards control whether a patch can be pushed. A patch with no
1734 guards is always pushed. A patch with a positive guard ("+foo") is
1764 guards is always pushed. A patch with a positive guard ("+foo") is
1735 pushed only if the qselect command has activated it. A patch with
1765 pushed only if the qselect command has activated it. A patch with
1736 a negative guard ("-foo") is never pushed if the qselect command
1766 a negative guard ("-foo") is never pushed if the qselect command
1737 has activated it.
1767 has activated it.
1738
1768
1739 With no arguments, print the currently active guards.
1769 With no arguments, print the currently active guards.
1740 With arguments, set guards for the named patch.
1770 With arguments, set guards for the named patch.
1741
1771
1742 To set a negative guard "-foo" on topmost patch ("--" is needed so
1772 To set a negative guard "-foo" on topmost patch ("--" is needed so
1743 hg will not interpret "-foo" as an option):
1773 hg will not interpret "-foo" as an option):
1744 hg qguard -- -foo
1774 hg qguard -- -foo
1745
1775
1746 To set guards on another patch:
1776 To set guards on another patch:
1747 hg qguard other.patch +2.6.17 -stable
1777 hg qguard other.patch +2.6.17 -stable
1748 '''
1778 '''
1749 def status(idx):
1779 def status(idx):
1750 guards = q.series_guards[idx] or ['unguarded']
1780 guards = q.series_guards[idx] or ['unguarded']
1751 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1781 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1752 q = repo.mq
1782 q = repo.mq
1753 patch = None
1783 patch = None
1754 args = list(args)
1784 args = list(args)
1755 if opts['list']:
1785 if opts['list']:
1756 if args or opts['none']:
1786 if args or opts['none']:
1757 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1787 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1758 for i in xrange(len(q.series)):
1788 for i in xrange(len(q.series)):
1759 status(i)
1789 status(i)
1760 return
1790 return
1761 if not args or args[0][0:1] in '-+':
1791 if not args or args[0][0:1] in '-+':
1762 if not q.applied:
1792 if not q.applied:
1763 raise util.Abort(_('no patches applied'))
1793 raise util.Abort(_('no patches applied'))
1764 patch = q.applied[-1].name
1794 patch = q.applied[-1].name
1765 if patch is None and args[0][0:1] not in '-+':
1795 if patch is None and args[0][0:1] not in '-+':
1766 patch = args.pop(0)
1796 patch = args.pop(0)
1767 if patch is None:
1797 if patch is None:
1768 raise util.Abort(_('no patch to work with'))
1798 raise util.Abort(_('no patch to work with'))
1769 if args or opts['none']:
1799 if args or opts['none']:
1770 idx = q.find_series(patch)
1800 idx = q.find_series(patch)
1771 if idx is None:
1801 if idx is None:
1772 raise util.Abort(_('no patch named %s') % patch)
1802 raise util.Abort(_('no patch named %s') % patch)
1773 q.set_guards(idx, args)
1803 q.set_guards(idx, args)
1774 q.save_dirty()
1804 q.save_dirty()
1775 else:
1805 else:
1776 status(q.series.index(q.lookup(patch)))
1806 status(q.series.index(q.lookup(patch)))
1777
1807
1778 def header(ui, repo, patch=None):
1808 def header(ui, repo, patch=None):
1779 """Print the header of the topmost or specified patch"""
1809 """Print the header of the topmost or specified patch"""
1780 q = repo.mq
1810 q = repo.mq
1781
1811
1782 if patch:
1812 if patch:
1783 patch = q.lookup(patch)
1813 patch = q.lookup(patch)
1784 else:
1814 else:
1785 if not q.applied:
1815 if not q.applied:
1786 ui.write('No patches applied\n')
1816 ui.write('No patches applied\n')
1787 return 1
1817 return 1
1788 patch = q.lookup('qtip')
1818 patch = q.lookup('qtip')
1789 message = repo.mq.readheaders(patch)[0]
1819 message = repo.mq.readheaders(patch)[0]
1790
1820
1791 ui.write('\n'.join(message) + '\n')
1821 ui.write('\n'.join(message) + '\n')
1792
1822
1793 def lastsavename(path):
1823 def lastsavename(path):
1794 (directory, base) = os.path.split(path)
1824 (directory, base) = os.path.split(path)
1795 names = os.listdir(directory)
1825 names = os.listdir(directory)
1796 namere = re.compile("%s.([0-9]+)" % base)
1826 namere = re.compile("%s.([0-9]+)" % base)
1797 maxindex = None
1827 maxindex = None
1798 maxname = None
1828 maxname = None
1799 for f in names:
1829 for f in names:
1800 m = namere.match(f)
1830 m = namere.match(f)
1801 if m:
1831 if m:
1802 index = int(m.group(1))
1832 index = int(m.group(1))
1803 if maxindex == None or index > maxindex:
1833 if maxindex == None or index > maxindex:
1804 maxindex = index
1834 maxindex = index
1805 maxname = f
1835 maxname = f
1806 if maxname:
1836 if maxname:
1807 return (os.path.join(directory, maxname), maxindex)
1837 return (os.path.join(directory, maxname), maxindex)
1808 return (None, None)
1838 return (None, None)
1809
1839
1810 def savename(path):
1840 def savename(path):
1811 (last, index) = lastsavename(path)
1841 (last, index) = lastsavename(path)
1812 if last is None:
1842 if last is None:
1813 index = 0
1843 index = 0
1814 newpath = path + ".%d" % (index + 1)
1844 newpath = path + ".%d" % (index + 1)
1815 return newpath
1845 return newpath
1816
1846
1817 def push(ui, repo, patch=None, **opts):
1847 def push(ui, repo, patch=None, **opts):
1818 """push the next patch onto the stack"""
1848 """push the next patch onto the stack"""
1819 q = repo.mq
1849 q = repo.mq
1820 mergeq = None
1850 mergeq = None
1821
1851
1822 if opts['all']:
1852 if opts['all']:
1823 if not q.series:
1853 if not q.series:
1824 ui.warn(_('no patches in series\n'))
1854 ui.warn(_('no patches in series\n'))
1825 return 0
1855 return 0
1826 patch = q.series[-1]
1856 patch = q.series[-1]
1827 if opts['merge']:
1857 if opts['merge']:
1828 if opts['name']:
1858 if opts['name']:
1829 newpath = opts['name']
1859 newpath = opts['name']
1830 else:
1860 else:
1831 newpath, i = lastsavename(q.path)
1861 newpath, i = lastsavename(q.path)
1832 if not newpath:
1862 if not newpath:
1833 ui.warn("no saved queues found, please use -n\n")
1863 ui.warn("no saved queues found, please use -n\n")
1834 return 1
1864 return 1
1835 mergeq = queue(ui, repo.join(""), newpath)
1865 mergeq = queue(ui, repo.join(""), newpath)
1836 ui.warn("merging with queue at: %s\n" % mergeq.path)
1866 ui.warn("merging with queue at: %s\n" % mergeq.path)
1837 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1867 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1838 mergeq=mergeq)
1868 mergeq=mergeq)
1839 return ret
1869 return ret
1840
1870
1841 def pop(ui, repo, patch=None, **opts):
1871 def pop(ui, repo, patch=None, **opts):
1842 """pop the current patch off the stack"""
1872 """pop the current patch off the stack"""
1843 localupdate = True
1873 localupdate = True
1844 if opts['name']:
1874 if opts['name']:
1845 q = queue(ui, repo.join(""), repo.join(opts['name']))
1875 q = queue(ui, repo.join(""), repo.join(opts['name']))
1846 ui.warn('using patch queue: %s\n' % q.path)
1876 ui.warn('using patch queue: %s\n' % q.path)
1847 localupdate = False
1877 localupdate = False
1848 else:
1878 else:
1849 q = repo.mq
1879 q = repo.mq
1850 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1880 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1851 all=opts['all'])
1881 all=opts['all'])
1852 q.save_dirty()
1882 q.save_dirty()
1853 return ret
1883 return ret
1854
1884
1855 def rename(ui, repo, patch, name=None, **opts):
1885 def rename(ui, repo, patch, name=None, **opts):
1856 """rename a patch
1886 """rename a patch
1857
1887
1858 With one argument, renames the current patch to PATCH1.
1888 With one argument, renames the current patch to PATCH1.
1859 With two arguments, renames PATCH1 to PATCH2."""
1889 With two arguments, renames PATCH1 to PATCH2."""
1860
1890
1861 q = repo.mq
1891 q = repo.mq
1862
1892
1863 if not name:
1893 if not name:
1864 name = patch
1894 name = patch
1865 patch = None
1895 patch = None
1866
1896
1867 if patch:
1897 if patch:
1868 patch = q.lookup(patch)
1898 patch = q.lookup(patch)
1869 else:
1899 else:
1870 if not q.applied:
1900 if not q.applied:
1871 ui.write(_('No patches applied\n'))
1901 ui.write(_('No patches applied\n'))
1872 return
1902 return
1873 patch = q.lookup('qtip')
1903 patch = q.lookup('qtip')
1874 absdest = q.join(name)
1904 absdest = q.join(name)
1875 if os.path.isdir(absdest):
1905 if os.path.isdir(absdest):
1876 name = normname(os.path.join(name, os.path.basename(patch)))
1906 name = normname(os.path.join(name, os.path.basename(patch)))
1877 absdest = q.join(name)
1907 absdest = q.join(name)
1878 if os.path.exists(absdest):
1908 if os.path.exists(absdest):
1879 raise util.Abort(_('%s already exists') % absdest)
1909 raise util.Abort(_('%s already exists') % absdest)
1880
1910
1881 if name in q.series:
1911 if name in q.series:
1882 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1912 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1883
1913
1884 if ui.verbose:
1914 if ui.verbose:
1885 ui.write('Renaming %s to %s\n' % (patch, name))
1915 ui.write('Renaming %s to %s\n' % (patch, name))
1886 i = q.find_series(patch)
1916 i = q.find_series(patch)
1887 guards = q.guard_re.findall(q.full_series[i])
1917 guards = q.guard_re.findall(q.full_series[i])
1888 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1918 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1889 q.parse_series()
1919 q.parse_series()
1890 q.series_dirty = 1
1920 q.series_dirty = 1
1891
1921
1892 info = q.isapplied(patch)
1922 info = q.isapplied(patch)
1893 if info:
1923 if info:
1894 q.applied[info[0]] = statusentry(info[1], name)
1924 q.applied[info[0]] = statusentry(info[1], name)
1895 q.applied_dirty = 1
1925 q.applied_dirty = 1
1896
1926
1897 util.rename(q.join(patch), absdest)
1927 util.rename(q.join(patch), absdest)
1898 r = q.qrepo()
1928 r = q.qrepo()
1899 if r:
1929 if r:
1900 wlock = r.wlock()
1930 wlock = r.wlock()
1901 try:
1931 try:
1902 if r.dirstate[name] == 'r':
1932 if r.dirstate[name] == 'r':
1903 r.undelete([name])
1933 r.undelete([name])
1904 r.copy(patch, name)
1934 r.copy(patch, name)
1905 r.remove([patch], False)
1935 r.remove([patch], False)
1906 finally:
1936 finally:
1907 del wlock
1937 del wlock
1908
1938
1909 q.save_dirty()
1939 q.save_dirty()
1910
1940
1911 def restore(ui, repo, rev, **opts):
1941 def restore(ui, repo, rev, **opts):
1912 """restore the queue state saved by a rev"""
1942 """restore the queue state saved by a rev"""
1913 rev = repo.lookup(rev)
1943 rev = repo.lookup(rev)
1914 q = repo.mq
1944 q = repo.mq
1915 q.restore(repo, rev, delete=opts['delete'],
1945 q.restore(repo, rev, delete=opts['delete'],
1916 qupdate=opts['update'])
1946 qupdate=opts['update'])
1917 q.save_dirty()
1947 q.save_dirty()
1918 return 0
1948 return 0
1919
1949
1920 def save(ui, repo, **opts):
1950 def save(ui, repo, **opts):
1921 """save current queue state"""
1951 """save current queue state"""
1922 q = repo.mq
1952 q = repo.mq
1923 message = cmdutil.logmessage(opts)
1953 message = cmdutil.logmessage(opts)
1924 ret = q.save(repo, msg=message)
1954 ret = q.save(repo, msg=message)
1925 if ret:
1955 if ret:
1926 return ret
1956 return ret
1927 q.save_dirty()
1957 q.save_dirty()
1928 if opts['copy']:
1958 if opts['copy']:
1929 path = q.path
1959 path = q.path
1930 if opts['name']:
1960 if opts['name']:
1931 newpath = os.path.join(q.basepath, opts['name'])
1961 newpath = os.path.join(q.basepath, opts['name'])
1932 if os.path.exists(newpath):
1962 if os.path.exists(newpath):
1933 if not os.path.isdir(newpath):
1963 if not os.path.isdir(newpath):
1934 raise util.Abort(_('destination %s exists and is not '
1964 raise util.Abort(_('destination %s exists and is not '
1935 'a directory') % newpath)
1965 'a directory') % newpath)
1936 if not opts['force']:
1966 if not opts['force']:
1937 raise util.Abort(_('destination %s exists, '
1967 raise util.Abort(_('destination %s exists, '
1938 'use -f to force') % newpath)
1968 'use -f to force') % newpath)
1939 else:
1969 else:
1940 newpath = savename(path)
1970 newpath = savename(path)
1941 ui.warn("copy %s to %s\n" % (path, newpath))
1971 ui.warn("copy %s to %s\n" % (path, newpath))
1942 util.copyfiles(path, newpath)
1972 util.copyfiles(path, newpath)
1943 if opts['empty']:
1973 if opts['empty']:
1944 try:
1974 try:
1945 os.unlink(q.join(q.status_path))
1975 os.unlink(q.join(q.status_path))
1946 except:
1976 except:
1947 pass
1977 pass
1948 return 0
1978 return 0
1949
1979
1950 def strip(ui, repo, rev, **opts):
1980 def strip(ui, repo, rev, **opts):
1951 """strip a revision and all later revs on the same branch"""
1981 """strip a revision and all later revs on the same branch"""
1952 rev = repo.lookup(rev)
1982 rev = repo.lookup(rev)
1953 backup = 'all'
1983 backup = 'all'
1954 if opts['backup']:
1984 if opts['backup']:
1955 backup = 'strip'
1985 backup = 'strip'
1956 elif opts['nobackup']:
1986 elif opts['nobackup']:
1957 backup = 'none'
1987 backup = 'none'
1958 update = repo.dirstate.parents()[0] != revlog.nullid
1988 update = repo.dirstate.parents()[0] != revlog.nullid
1959 repo.mq.strip(repo, rev, backup=backup, update=update)
1989 repo.mq.strip(repo, rev, backup=backup, update=update)
1960 return 0
1990 return 0
1961
1991
1962 def select(ui, repo, *args, **opts):
1992 def select(ui, repo, *args, **opts):
1963 '''set or print guarded patches to push
1993 '''set or print guarded patches to push
1964
1994
1965 Use the qguard command to set or print guards on patch, then use
1995 Use the qguard command to set or print guards on patch, then use
1966 qselect to tell mq which guards to use. A patch will be pushed if it
1996 qselect to tell mq which guards to use. A patch will be pushed if it
1967 has no guards or any positive guards match the currently selected guard,
1997 has no guards or any positive guards match the currently selected guard,
1968 but will not be pushed if any negative guards match the current guard.
1998 but will not be pushed if any negative guards match the current guard.
1969 For example:
1999 For example:
1970
2000
1971 qguard foo.patch -stable (negative guard)
2001 qguard foo.patch -stable (negative guard)
1972 qguard bar.patch +stable (positive guard)
2002 qguard bar.patch +stable (positive guard)
1973 qselect stable
2003 qselect stable
1974
2004
1975 This activates the "stable" guard. mq will skip foo.patch (because
2005 This activates the "stable" guard. mq will skip foo.patch (because
1976 it has a negative match) but push bar.patch (because it
2006 it has a negative match) but push bar.patch (because it
1977 has a positive match).
2007 has a positive match).
1978
2008
1979 With no arguments, prints the currently active guards.
2009 With no arguments, prints the currently active guards.
1980 With one argument, sets the active guard.
2010 With one argument, sets the active guard.
1981
2011
1982 Use -n/--none to deactivate guards (no other arguments needed).
2012 Use -n/--none to deactivate guards (no other arguments needed).
1983 When no guards are active, patches with positive guards are skipped
2013 When no guards are active, patches with positive guards are skipped
1984 and patches with negative guards are pushed.
2014 and patches with negative guards are pushed.
1985
2015
1986 qselect can change the guards on applied patches. It does not pop
2016 qselect can change the guards on applied patches. It does not pop
1987 guarded patches by default. Use --pop to pop back to the last applied
2017 guarded patches by default. Use --pop to pop back to the last applied
1988 patch that is not guarded. Use --reapply (which implies --pop) to push
2018 patch that is not guarded. Use --reapply (which implies --pop) to push
1989 back to the current patch afterwards, but skip guarded patches.
2019 back to the current patch afterwards, but skip guarded patches.
1990
2020
1991 Use -s/--series to print a list of all guards in the series file (no
2021 Use -s/--series to print a list of all guards in the series file (no
1992 other arguments needed). Use -v for more information.'''
2022 other arguments needed). Use -v for more information.'''
1993
2023
1994 q = repo.mq
2024 q = repo.mq
1995 guards = q.active()
2025 guards = q.active()
1996 if args or opts['none']:
2026 if args or opts['none']:
1997 old_unapplied = q.unapplied(repo)
2027 old_unapplied = q.unapplied(repo)
1998 old_guarded = [i for i in xrange(len(q.applied)) if
2028 old_guarded = [i for i in xrange(len(q.applied)) if
1999 not q.pushable(i)[0]]
2029 not q.pushable(i)[0]]
2000 q.set_active(args)
2030 q.set_active(args)
2001 q.save_dirty()
2031 q.save_dirty()
2002 if not args:
2032 if not args:
2003 ui.status(_('guards deactivated\n'))
2033 ui.status(_('guards deactivated\n'))
2004 if not opts['pop'] and not opts['reapply']:
2034 if not opts['pop'] and not opts['reapply']:
2005 unapplied = q.unapplied(repo)
2035 unapplied = q.unapplied(repo)
2006 guarded = [i for i in xrange(len(q.applied))
2036 guarded = [i for i in xrange(len(q.applied))
2007 if not q.pushable(i)[0]]
2037 if not q.pushable(i)[0]]
2008 if len(unapplied) != len(old_unapplied):
2038 if len(unapplied) != len(old_unapplied):
2009 ui.status(_('number of unguarded, unapplied patches has '
2039 ui.status(_('number of unguarded, unapplied patches has '
2010 'changed from %d to %d\n') %
2040 'changed from %d to %d\n') %
2011 (len(old_unapplied), len(unapplied)))
2041 (len(old_unapplied), len(unapplied)))
2012 if len(guarded) != len(old_guarded):
2042 if len(guarded) != len(old_guarded):
2013 ui.status(_('number of guarded, applied patches has changed '
2043 ui.status(_('number of guarded, applied patches has changed '
2014 'from %d to %d\n') %
2044 'from %d to %d\n') %
2015 (len(old_guarded), len(guarded)))
2045 (len(old_guarded), len(guarded)))
2016 elif opts['series']:
2046 elif opts['series']:
2017 guards = {}
2047 guards = {}
2018 noguards = 0
2048 noguards = 0
2019 for gs in q.series_guards:
2049 for gs in q.series_guards:
2020 if not gs:
2050 if not gs:
2021 noguards += 1
2051 noguards += 1
2022 for g in gs:
2052 for g in gs:
2023 guards.setdefault(g, 0)
2053 guards.setdefault(g, 0)
2024 guards[g] += 1
2054 guards[g] += 1
2025 if ui.verbose:
2055 if ui.verbose:
2026 guards['NONE'] = noguards
2056 guards['NONE'] = noguards
2027 guards = guards.items()
2057 guards = guards.items()
2028 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2058 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2029 if guards:
2059 if guards:
2030 ui.note(_('guards in series file:\n'))
2060 ui.note(_('guards in series file:\n'))
2031 for guard, count in guards:
2061 for guard, count in guards:
2032 ui.note('%2d ' % count)
2062 ui.note('%2d ' % count)
2033 ui.write(guard, '\n')
2063 ui.write(guard, '\n')
2034 else:
2064 else:
2035 ui.note(_('no guards in series file\n'))
2065 ui.note(_('no guards in series file\n'))
2036 else:
2066 else:
2037 if guards:
2067 if guards:
2038 ui.note(_('active guards:\n'))
2068 ui.note(_('active guards:\n'))
2039 for g in guards:
2069 for g in guards:
2040 ui.write(g, '\n')
2070 ui.write(g, '\n')
2041 else:
2071 else:
2042 ui.write(_('no active guards\n'))
2072 ui.write(_('no active guards\n'))
2043 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2073 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2044 popped = False
2074 popped = False
2045 if opts['pop'] or opts['reapply']:
2075 if opts['pop'] or opts['reapply']:
2046 for i in xrange(len(q.applied)):
2076 for i in xrange(len(q.applied)):
2047 pushable, reason = q.pushable(i)
2077 pushable, reason = q.pushable(i)
2048 if not pushable:
2078 if not pushable:
2049 ui.status(_('popping guarded patches\n'))
2079 ui.status(_('popping guarded patches\n'))
2050 popped = True
2080 popped = True
2051 if i == 0:
2081 if i == 0:
2052 q.pop(repo, all=True)
2082 q.pop(repo, all=True)
2053 else:
2083 else:
2054 q.pop(repo, i-1)
2084 q.pop(repo, i-1)
2055 break
2085 break
2056 if popped:
2086 if popped:
2057 try:
2087 try:
2058 if reapply:
2088 if reapply:
2059 ui.status(_('reapplying unguarded patches\n'))
2089 ui.status(_('reapplying unguarded patches\n'))
2060 q.push(repo, reapply)
2090 q.push(repo, reapply)
2061 finally:
2091 finally:
2062 q.save_dirty()
2092 q.save_dirty()
2063
2093
2064 def reposetup(ui, repo):
2094 def reposetup(ui, repo):
2065 class mqrepo(repo.__class__):
2095 class mqrepo(repo.__class__):
2066 def abort_if_wdir_patched(self, errmsg, force=False):
2096 def abort_if_wdir_patched(self, errmsg, force=False):
2067 if self.mq.applied and not force:
2097 if self.mq.applied and not force:
2068 parent = revlog.hex(self.dirstate.parents()[0])
2098 parent = revlog.hex(self.dirstate.parents()[0])
2069 if parent in [s.rev for s in self.mq.applied]:
2099 if parent in [s.rev for s in self.mq.applied]:
2070 raise util.Abort(errmsg)
2100 raise util.Abort(errmsg)
2071
2101
2072 def commit(self, *args, **opts):
2102 def commit(self, *args, **opts):
2073 if len(args) >= 6:
2103 if len(args) >= 6:
2074 force = args[5]
2104 force = args[5]
2075 else:
2105 else:
2076 force = opts.get('force')
2106 force = opts.get('force')
2077 self.abort_if_wdir_patched(
2107 self.abort_if_wdir_patched(
2078 _('cannot commit over an applied mq patch'),
2108 _('cannot commit over an applied mq patch'),
2079 force)
2109 force)
2080
2110
2081 return super(mqrepo, self).commit(*args, **opts)
2111 return super(mqrepo, self).commit(*args, **opts)
2082
2112
2083 def push(self, remote, force=False, revs=None):
2113 def push(self, remote, force=False, revs=None):
2084 if self.mq.applied and not force and not revs:
2114 if self.mq.applied and not force and not revs:
2085 raise util.Abort(_('source has mq patches applied'))
2115 raise util.Abort(_('source has mq patches applied'))
2086 return super(mqrepo, self).push(remote, force, revs)
2116 return super(mqrepo, self).push(remote, force, revs)
2087
2117
2088 def tags(self):
2118 def tags(self):
2089 if self.tagscache:
2119 if self.tagscache:
2090 return self.tagscache
2120 return self.tagscache
2091
2121
2092 tagscache = super(mqrepo, self).tags()
2122 tagscache = super(mqrepo, self).tags()
2093
2123
2094 q = self.mq
2124 q = self.mq
2095 if not q.applied:
2125 if not q.applied:
2096 return tagscache
2126 return tagscache
2097
2127
2098 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2128 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2099 mqtags.append((mqtags[-1][0], 'qtip'))
2129 mqtags.append((mqtags[-1][0], 'qtip'))
2100 mqtags.append((mqtags[0][0], 'qbase'))
2130 mqtags.append((mqtags[0][0], 'qbase'))
2101 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2131 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2102 for patch in mqtags:
2132 for patch in mqtags:
2103 if patch[1] in tagscache:
2133 if patch[1] in tagscache:
2104 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2134 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2105 else:
2135 else:
2106 tagscache[patch[1]] = patch[0]
2136 tagscache[patch[1]] = patch[0]
2107
2137
2108 return tagscache
2138 return tagscache
2109
2139
2110 def _branchtags(self):
2140 def _branchtags(self):
2111 q = self.mq
2141 q = self.mq
2112 if not q.applied:
2142 if not q.applied:
2113 return super(mqrepo, self)._branchtags()
2143 return super(mqrepo, self)._branchtags()
2114
2144
2115 self.branchcache = {} # avoid recursion in changectx
2145 self.branchcache = {} # avoid recursion in changectx
2116 cl = self.changelog
2146 cl = self.changelog
2117 partial, last, lrev = self._readbranchcache()
2147 partial, last, lrev = self._readbranchcache()
2118
2148
2119 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2149 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2120 start = lrev + 1
2150 start = lrev + 1
2121 if start < qbase:
2151 if start < qbase:
2122 # update the cache (excluding the patches) and save it
2152 # update the cache (excluding the patches) and save it
2123 self._updatebranchcache(partial, lrev+1, qbase)
2153 self._updatebranchcache(partial, lrev+1, qbase)
2124 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2154 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2125 start = qbase
2155 start = qbase
2126 # if start = qbase, the cache is as updated as it should be.
2156 # if start = qbase, the cache is as updated as it should be.
2127 # if start > qbase, the cache includes (part of) the patches.
2157 # if start > qbase, the cache includes (part of) the patches.
2128 # we might as well use it, but we won't save it.
2158 # we might as well use it, but we won't save it.
2129
2159
2130 # update the cache up to the tip
2160 # update the cache up to the tip
2131 self._updatebranchcache(partial, start, cl.count())
2161 self._updatebranchcache(partial, start, cl.count())
2132
2162
2133 return partial
2163 return partial
2134
2164
2135 if repo.local():
2165 if repo.local():
2136 repo.__class__ = mqrepo
2166 repo.__class__ = mqrepo
2137 repo.mq = queue(ui, repo.join(""))
2167 repo.mq = queue(ui, repo.join(""))
2138
2168
2139 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2169 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2140
2170
2171 headeropts = [
2172 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2173 ('u', 'user', '', _('add "From: <given user>" to patch'))]
2174
2141 cmdtable = {
2175 cmdtable = {
2142 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2176 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2143 "qclone":
2177 "qclone":
2144 (clone,
2178 (clone,
2145 [('', 'pull', None, _('use pull protocol to copy metadata')),
2179 [('', 'pull', None, _('use pull protocol to copy metadata')),
2146 ('U', 'noupdate', None, _('do not update the new working directories')),
2180 ('U', 'noupdate', None, _('do not update the new working directories')),
2147 ('', 'uncompressed', None,
2181 ('', 'uncompressed', None,
2148 _('use uncompressed transfer (fast over LAN)')),
2182 _('use uncompressed transfer (fast over LAN)')),
2149 ('p', 'patches', '', _('location of source patch repo')),
2183 ('p', 'patches', '', _('location of source patch repo')),
2150 ] + commands.remoteopts,
2184 ] + commands.remoteopts,
2151 _('hg qclone [OPTION]... SOURCE [DEST]')),
2185 _('hg qclone [OPTION]... SOURCE [DEST]')),
2152 "qcommit|qci":
2186 "qcommit|qci":
2153 (commit,
2187 (commit,
2154 commands.table["^commit|ci"][1],
2188 commands.table["^commit|ci"][1],
2155 _('hg qcommit [OPTION]... [FILE]...')),
2189 _('hg qcommit [OPTION]... [FILE]...')),
2156 "^qdiff":
2190 "^qdiff":
2157 (diff,
2191 (diff,
2158 [('g', 'git', None, _('use git extended diff format')),
2192 [('g', 'git', None, _('use git extended diff format')),
2159 ] + commands.walkopts,
2193 ] + commands.walkopts,
2160 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2194 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2161 "qdelete|qremove|qrm":
2195 "qdelete|qremove|qrm":
2162 (delete,
2196 (delete,
2163 [('k', 'keep', None, _('keep patch file')),
2197 [('k', 'keep', None, _('keep patch file')),
2164 ('r', 'rev', [], _('stop managing a revision'))],
2198 ('r', 'rev', [], _('stop managing a revision'))],
2165 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2199 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2166 'qfold':
2200 'qfold':
2167 (fold,
2201 (fold,
2168 [('e', 'edit', None, _('edit patch header')),
2202 [('e', 'edit', None, _('edit patch header')),
2169 ('k', 'keep', None, _('keep folded patch files')),
2203 ('k', 'keep', None, _('keep folded patch files')),
2170 ] + commands.commitopts,
2204 ] + commands.commitopts,
2171 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2205 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2172 'qgoto':
2206 'qgoto':
2173 (goto,
2207 (goto,
2174 [('f', 'force', None, _('overwrite any local changes'))],
2208 [('f', 'force', None, _('overwrite any local changes'))],
2175 _('hg qgoto [OPTION]... PATCH')),
2209 _('hg qgoto [OPTION]... PATCH')),
2176 'qguard':
2210 'qguard':
2177 (guard,
2211 (guard,
2178 [('l', 'list', None, _('list all patches and guards')),
2212 [('l', 'list', None, _('list all patches and guards')),
2179 ('n', 'none', None, _('drop all guards'))],
2213 ('n', 'none', None, _('drop all guards'))],
2180 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2214 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2181 'qheader': (header, [], _('hg qheader [PATCH]')),
2215 'qheader': (header, [], _('hg qheader [PATCH]')),
2182 "^qimport":
2216 "^qimport":
2183 (qimport,
2217 (qimport,
2184 [('e', 'existing', None, 'import file in patch dir'),
2218 [('e', 'existing', None, 'import file in patch dir'),
2185 ('n', 'name', '', 'patch file name'),
2219 ('n', 'name', '', 'patch file name'),
2186 ('f', 'force', None, 'overwrite existing files'),
2220 ('f', 'force', None, 'overwrite existing files'),
2187 ('r', 'rev', [], 'place existing revisions under mq control'),
2221 ('r', 'rev', [], 'place existing revisions under mq control'),
2188 ('g', 'git', None, _('use git extended diff format'))],
2222 ('g', 'git', None, _('use git extended diff format'))],
2189 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2223 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2190 "^qinit":
2224 "^qinit":
2191 (init,
2225 (init,
2192 [('c', 'create-repo', None, 'create queue repository')],
2226 [('c', 'create-repo', None, 'create queue repository')],
2193 _('hg qinit [-c]')),
2227 _('hg qinit [-c]')),
2194 "qnew":
2228 "qnew":
2195 (new,
2229 (new,
2196 [('e', 'edit', None, _('edit commit message')),
2230 [('e', 'edit', None, _('edit commit message')),
2197 ('f', 'force', None, _('import uncommitted changes into patch')),
2231 ('f', 'force', None, _('import uncommitted changes into patch')),
2198 ('g', 'git', None, _('use git extended diff format')),
2232 ('g', 'git', None, _('use git extended diff format')),
2199 ] + commands.walkopts + commands.commitopts,
2233 ] + commands.walkopts + commands.commitopts + headeropts,
2200 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2234 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2201 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2235 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2202 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2236 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2203 "^qpop":
2237 "^qpop":
2204 (pop,
2238 (pop,
2205 [('a', 'all', None, _('pop all patches')),
2239 [('a', 'all', None, _('pop all patches')),
2206 ('n', 'name', '', _('queue name to pop')),
2240 ('n', 'name', '', _('queue name to pop')),
2207 ('f', 'force', None, _('forget any local changes'))],
2241 ('f', 'force', None, _('forget any local changes'))],
2208 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2242 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2209 "^qpush":
2243 "^qpush":
2210 (push,
2244 (push,
2211 [('f', 'force', None, _('apply if the patch has rejects')),
2245 [('f', 'force', None, _('apply if the patch has rejects')),
2212 ('l', 'list', None, _('list patch name in commit text')),
2246 ('l', 'list', None, _('list patch name in commit text')),
2213 ('a', 'all', None, _('apply all patches')),
2247 ('a', 'all', None, _('apply all patches')),
2214 ('m', 'merge', None, _('merge from another queue')),
2248 ('m', 'merge', None, _('merge from another queue')),
2215 ('n', 'name', '', _('merge queue name'))],
2249 ('n', 'name', '', _('merge queue name'))],
2216 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2250 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2217 "^qrefresh":
2251 "^qrefresh":
2218 (refresh,
2252 (refresh,
2219 [('e', 'edit', None, _('edit commit message')),
2253 [('e', 'edit', None, _('edit commit message')),
2220 ('g', 'git', None, _('use git extended diff format')),
2254 ('g', 'git', None, _('use git extended diff format')),
2221 ('s', 'short', None, _('refresh only files already in the patch')),
2255 ('s', 'short', None, _('refresh only files already in the patch')),
2222 ] + commands.walkopts + commands.commitopts,
2256 ] + commands.walkopts + commands.commitopts + headeropts,
2223 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2257 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2224 'qrename|qmv':
2258 'qrename|qmv':
2225 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2259 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2226 "qrestore":
2260 "qrestore":
2227 (restore,
2261 (restore,
2228 [('d', 'delete', None, _('delete save entry')),
2262 [('d', 'delete', None, _('delete save entry')),
2229 ('u', 'update', None, _('update queue working dir'))],
2263 ('u', 'update', None, _('update queue working dir'))],
2230 _('hg qrestore [-d] [-u] REV')),
2264 _('hg qrestore [-d] [-u] REV')),
2231 "qsave":
2265 "qsave":
2232 (save,
2266 (save,
2233 [('c', 'copy', None, _('copy patch directory')),
2267 [('c', 'copy', None, _('copy patch directory')),
2234 ('n', 'name', '', _('copy directory name')),
2268 ('n', 'name', '', _('copy directory name')),
2235 ('e', 'empty', None, _('clear queue status file')),
2269 ('e', 'empty', None, _('clear queue status file')),
2236 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2270 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2237 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2271 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2238 "qselect":
2272 "qselect":
2239 (select,
2273 (select,
2240 [('n', 'none', None, _('disable all guards')),
2274 [('n', 'none', None, _('disable all guards')),
2241 ('s', 'series', None, _('list all guards in series file')),
2275 ('s', 'series', None, _('list all guards in series file')),
2242 ('', 'pop', None, _('pop to before first guarded applied patch')),
2276 ('', 'pop', None, _('pop to before first guarded applied patch')),
2243 ('', 'reapply', None, _('pop, then reapply patches'))],
2277 ('', 'reapply', None, _('pop, then reapply patches'))],
2244 _('hg qselect [OPTION]... [GUARD]...')),
2278 _('hg qselect [OPTION]... [GUARD]...')),
2245 "qseries":
2279 "qseries":
2246 (series,
2280 (series,
2247 [('m', 'missing', None, _('print patches not in series')),
2281 [('m', 'missing', None, _('print patches not in series')),
2248 ] + seriesopts,
2282 ] + seriesopts,
2249 _('hg qseries [-ms]')),
2283 _('hg qseries [-ms]')),
2250 "^strip":
2284 "^strip":
2251 (strip,
2285 (strip,
2252 [('f', 'force', None, _('force multi-head removal')),
2286 [('f', 'force', None, _('force multi-head removal')),
2253 ('b', 'backup', None, _('bundle unrelated changesets')),
2287 ('b', 'backup', None, _('bundle unrelated changesets')),
2254 ('n', 'nobackup', None, _('no backups'))],
2288 ('n', 'nobackup', None, _('no backups'))],
2255 _('hg strip [-f] [-b] [-n] REV')),
2289 _('hg strip [-f] [-b] [-n] REV')),
2256 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2290 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2257 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2291 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2258 }
2292 }
@@ -1,2965 +1,2966 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import os, re, sys, urllib
10 import os, re, sys, urllib
11 import hg, util, revlog, bundlerepo, extensions
11 import hg, util, revlog, bundlerepo, extensions
12 import difflib, patch, time, help, mdiff, tempfile
12 import difflib, patch, time, help, mdiff, tempfile
13 import errno, version, socket
13 import errno, version, socket
14 import archival, changegroup, cmdutil, hgweb.server, sshserver
14 import archival, changegroup, cmdutil, hgweb.server, sshserver
15
15
16 # Commands start here, listed alphabetically
16 # Commands start here, listed alphabetically
17
17
18 def add(ui, repo, *pats, **opts):
18 def add(ui, repo, *pats, **opts):
19 """add the specified files on the next commit
19 """add the specified files on the next commit
20
20
21 Schedule files to be version controlled and added to the repository.
21 Schedule files to be version controlled and added to the repository.
22
22
23 The files will be added to the repository at the next commit. To
23 The files will be added to the repository at the next commit. To
24 undo an add before that, see hg revert.
24 undo an add before that, see hg revert.
25
25
26 If no names are given, add all files in the repository.
26 If no names are given, add all files in the repository.
27 """
27 """
28
28
29 names = []
29 names = []
30 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
30 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
31 if exact:
31 if exact:
32 if ui.verbose:
32 if ui.verbose:
33 ui.status(_('adding %s\n') % rel)
33 ui.status(_('adding %s\n') % rel)
34 names.append(abs)
34 names.append(abs)
35 elif abs not in repo.dirstate:
35 elif abs not in repo.dirstate:
36 ui.status(_('adding %s\n') % rel)
36 ui.status(_('adding %s\n') % rel)
37 names.append(abs)
37 names.append(abs)
38 if not opts.get('dry_run'):
38 if not opts.get('dry_run'):
39 repo.add(names)
39 repo.add(names)
40
40
41 def addremove(ui, repo, *pats, **opts):
41 def addremove(ui, repo, *pats, **opts):
42 """add all new files, delete all missing files
42 """add all new files, delete all missing files
43
43
44 Add all new files and remove all missing files from the repository.
44 Add all new files and remove all missing files from the repository.
45
45
46 New files are ignored if they match any of the patterns in .hgignore. As
46 New files are ignored if they match any of the patterns in .hgignore. As
47 with add, these changes take effect at the next commit.
47 with add, these changes take effect at the next commit.
48
48
49 Use the -s option to detect renamed files. With a parameter > 0,
49 Use the -s option to detect renamed files. With a parameter > 0,
50 this compares every removed file with every added file and records
50 this compares every removed file with every added file and records
51 those similar enough as renames. This option takes a percentage
51 those similar enough as renames. This option takes a percentage
52 between 0 (disabled) and 100 (files must be identical) as its
52 between 0 (disabled) and 100 (files must be identical) as its
53 parameter. Detecting renamed files this way can be expensive.
53 parameter. Detecting renamed files this way can be expensive.
54 """
54 """
55 try:
55 try:
56 sim = float(opts.get('similarity') or 0)
56 sim = float(opts.get('similarity') or 0)
57 except ValueError:
57 except ValueError:
58 raise util.Abort(_('similarity must be a number'))
58 raise util.Abort(_('similarity must be a number'))
59 if sim < 0 or sim > 100:
59 if sim < 0 or sim > 100:
60 raise util.Abort(_('similarity must be between 0 and 100'))
60 raise util.Abort(_('similarity must be between 0 and 100'))
61 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
61 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
62
62
63 def annotate(ui, repo, *pats, **opts):
63 def annotate(ui, repo, *pats, **opts):
64 """show changeset information per file line
64 """show changeset information per file line
65
65
66 List changes in files, showing the revision id responsible for each line
66 List changes in files, showing the revision id responsible for each line
67
67
68 This command is useful to discover who did a change or when a change took
68 This command is useful to discover who did a change or when a change took
69 place.
69 place.
70
70
71 Without the -a option, annotate will avoid processing files it
71 Without the -a option, annotate will avoid processing files it
72 detects as binary. With -a, annotate will generate an annotation
72 detects as binary. With -a, annotate will generate an annotation
73 anyway, probably with undesirable results.
73 anyway, probably with undesirable results.
74 """
74 """
75 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
75 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
76
76
77 if not pats:
77 if not pats:
78 raise util.Abort(_('at least one file name or pattern required'))
78 raise util.Abort(_('at least one file name or pattern required'))
79
79
80 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
80 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
81 ('number', lambda x: str(x[0].rev())),
81 ('number', lambda x: str(x[0].rev())),
82 ('changeset', lambda x: short(x[0].node())),
82 ('changeset', lambda x: short(x[0].node())),
83 ('date', getdate),
83 ('date', getdate),
84 ('follow', lambda x: x[0].path()),
84 ('follow', lambda x: x[0].path()),
85 ]
85 ]
86
86
87 if (not opts['user'] and not opts['changeset'] and not opts['date']
87 if (not opts['user'] and not opts['changeset'] and not opts['date']
88 and not opts['follow']):
88 and not opts['follow']):
89 opts['number'] = 1
89 opts['number'] = 1
90
90
91 linenumber = opts.get('line_number') is not None
91 linenumber = opts.get('line_number') is not None
92 if (linenumber and (not opts['changeset']) and (not opts['number'])):
92 if (linenumber and (not opts['changeset']) and (not opts['number'])):
93 raise util.Abort(_('at least one of -n/-c is required for -l'))
93 raise util.Abort(_('at least one of -n/-c is required for -l'))
94
94
95 funcmap = [func for op, func in opmap if opts.get(op)]
95 funcmap = [func for op, func in opmap if opts.get(op)]
96 if linenumber:
96 if linenumber:
97 lastfunc = funcmap[-1]
97 lastfunc = funcmap[-1]
98 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
98 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
99
99
100 ctx = repo.changectx(opts['rev'])
100 ctx = repo.changectx(opts['rev'])
101
101
102 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
102 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
103 node=ctx.node()):
103 node=ctx.node()):
104 fctx = ctx.filectx(abs)
104 fctx = ctx.filectx(abs)
105 if not opts['text'] and util.binary(fctx.data()):
105 if not opts['text'] and util.binary(fctx.data()):
106 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
106 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
107 continue
107 continue
108
108
109 lines = fctx.annotate(follow=opts.get('follow'),
109 lines = fctx.annotate(follow=opts.get('follow'),
110 linenumber=linenumber)
110 linenumber=linenumber)
111 pieces = []
111 pieces = []
112
112
113 for f in funcmap:
113 for f in funcmap:
114 l = [f(n) for n, dummy in lines]
114 l = [f(n) for n, dummy in lines]
115 if l:
115 if l:
116 m = max(map(len, l))
116 m = max(map(len, l))
117 pieces.append(["%*s" % (m, x) for x in l])
117 pieces.append(["%*s" % (m, x) for x in l])
118
118
119 if pieces:
119 if pieces:
120 for p, l in zip(zip(*pieces), lines):
120 for p, l in zip(zip(*pieces), lines):
121 ui.write("%s: %s" % (" ".join(p), l[1]))
121 ui.write("%s: %s" % (" ".join(p), l[1]))
122
122
123 def archive(ui, repo, dest, **opts):
123 def archive(ui, repo, dest, **opts):
124 '''create unversioned archive of a repository revision
124 '''create unversioned archive of a repository revision
125
125
126 By default, the revision used is the parent of the working
126 By default, the revision used is the parent of the working
127 directory; use "-r" to specify a different revision.
127 directory; use "-r" to specify a different revision.
128
128
129 To specify the type of archive to create, use "-t". Valid
129 To specify the type of archive to create, use "-t". Valid
130 types are:
130 types are:
131
131
132 "files" (default): a directory full of files
132 "files" (default): a directory full of files
133 "tar": tar archive, uncompressed
133 "tar": tar archive, uncompressed
134 "tbz2": tar archive, compressed using bzip2
134 "tbz2": tar archive, compressed using bzip2
135 "tgz": tar archive, compressed using gzip
135 "tgz": tar archive, compressed using gzip
136 "uzip": zip archive, uncompressed
136 "uzip": zip archive, uncompressed
137 "zip": zip archive, compressed using deflate
137 "zip": zip archive, compressed using deflate
138
138
139 The exact name of the destination archive or directory is given
139 The exact name of the destination archive or directory is given
140 using a format string; see "hg help export" for details.
140 using a format string; see "hg help export" for details.
141
141
142 Each member added to an archive file has a directory prefix
142 Each member added to an archive file has a directory prefix
143 prepended. Use "-p" to specify a format string for the prefix.
143 prepended. Use "-p" to specify a format string for the prefix.
144 The default is the basename of the archive, with suffixes removed.
144 The default is the basename of the archive, with suffixes removed.
145 '''
145 '''
146
146
147 ctx = repo.changectx(opts['rev'])
147 ctx = repo.changectx(opts['rev'])
148 if not ctx:
148 if not ctx:
149 raise util.Abort(_('repository has no revisions'))
149 raise util.Abort(_('repository has no revisions'))
150 node = ctx.node()
150 node = ctx.node()
151 dest = cmdutil.make_filename(repo, dest, node)
151 dest = cmdutil.make_filename(repo, dest, node)
152 if os.path.realpath(dest) == repo.root:
152 if os.path.realpath(dest) == repo.root:
153 raise util.Abort(_('repository root cannot be destination'))
153 raise util.Abort(_('repository root cannot be destination'))
154 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
154 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
155 kind = opts.get('type') or 'files'
155 kind = opts.get('type') or 'files'
156 prefix = opts['prefix']
156 prefix = opts['prefix']
157 if dest == '-':
157 if dest == '-':
158 if kind == 'files':
158 if kind == 'files':
159 raise util.Abort(_('cannot archive plain files to stdout'))
159 raise util.Abort(_('cannot archive plain files to stdout'))
160 dest = sys.stdout
160 dest = sys.stdout
161 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
161 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
162 prefix = cmdutil.make_filename(repo, prefix, node)
162 prefix = cmdutil.make_filename(repo, prefix, node)
163 archival.archive(repo, dest, node, kind, not opts['no_decode'],
163 archival.archive(repo, dest, node, kind, not opts['no_decode'],
164 matchfn, prefix)
164 matchfn, prefix)
165
165
166 def backout(ui, repo, node=None, rev=None, **opts):
166 def backout(ui, repo, node=None, rev=None, **opts):
167 '''reverse effect of earlier changeset
167 '''reverse effect of earlier changeset
168
168
169 Commit the backed out changes as a new changeset. The new
169 Commit the backed out changes as a new changeset. The new
170 changeset is a child of the backed out changeset.
170 changeset is a child of the backed out changeset.
171
171
172 If you back out a changeset other than the tip, a new head is
172 If you back out a changeset other than the tip, a new head is
173 created. This head is the parent of the working directory. If
173 created. This head is the parent of the working directory. If
174 you back out an old changeset, your working directory will appear
174 you back out an old changeset, your working directory will appear
175 old after the backout. You should merge the backout changeset
175 old after the backout. You should merge the backout changeset
176 with another head.
176 with another head.
177
177
178 The --merge option remembers the parent of the working directory
178 The --merge option remembers the parent of the working directory
179 before starting the backout, then merges the new head with that
179 before starting the backout, then merges the new head with that
180 changeset afterwards. This saves you from doing the merge by
180 changeset afterwards. This saves you from doing the merge by
181 hand. The result of this merge is not committed, as for a normal
181 hand. The result of this merge is not committed, as for a normal
182 merge.'''
182 merge.'''
183 if rev and node:
183 if rev and node:
184 raise util.Abort(_("please specify just one revision"))
184 raise util.Abort(_("please specify just one revision"))
185
185
186 if not rev:
186 if not rev:
187 rev = node
187 rev = node
188
188
189 if not rev:
189 if not rev:
190 raise util.Abort(_("please specify a revision to backout"))
190 raise util.Abort(_("please specify a revision to backout"))
191
191
192 cmdutil.bail_if_changed(repo)
192 cmdutil.bail_if_changed(repo)
193 op1, op2 = repo.dirstate.parents()
193 op1, op2 = repo.dirstate.parents()
194 if op2 != nullid:
194 if op2 != nullid:
195 raise util.Abort(_('outstanding uncommitted merge'))
195 raise util.Abort(_('outstanding uncommitted merge'))
196 node = repo.lookup(rev)
196 node = repo.lookup(rev)
197
197
198 a = repo.changelog.ancestor(op1, node)
198 a = repo.changelog.ancestor(op1, node)
199 if a != node:
199 if a != node:
200 raise util.Abort(_('cannot back out change on a different branch'))
200 raise util.Abort(_('cannot back out change on a different branch'))
201
201
202 p1, p2 = repo.changelog.parents(node)
202 p1, p2 = repo.changelog.parents(node)
203 if p1 == nullid:
203 if p1 == nullid:
204 raise util.Abort(_('cannot back out a change with no parents'))
204 raise util.Abort(_('cannot back out a change with no parents'))
205 if p2 != nullid:
205 if p2 != nullid:
206 if not opts['parent']:
206 if not opts['parent']:
207 raise util.Abort(_('cannot back out a merge changeset without '
207 raise util.Abort(_('cannot back out a merge changeset without '
208 '--parent'))
208 '--parent'))
209 p = repo.lookup(opts['parent'])
209 p = repo.lookup(opts['parent'])
210 if p not in (p1, p2):
210 if p not in (p1, p2):
211 raise util.Abort(_('%s is not a parent of %s') %
211 raise util.Abort(_('%s is not a parent of %s') %
212 (short(p), short(node)))
212 (short(p), short(node)))
213 parent = p
213 parent = p
214 else:
214 else:
215 if opts['parent']:
215 if opts['parent']:
216 raise util.Abort(_('cannot use --parent on non-merge changeset'))
216 raise util.Abort(_('cannot use --parent on non-merge changeset'))
217 parent = p1
217 parent = p1
218
218
219 hg.clean(repo, node, show_stats=False)
219 hg.clean(repo, node, show_stats=False)
220 revert_opts = opts.copy()
220 revert_opts = opts.copy()
221 revert_opts['date'] = None
221 revert_opts['date'] = None
222 revert_opts['all'] = True
222 revert_opts['all'] = True
223 revert_opts['rev'] = hex(parent)
223 revert_opts['rev'] = hex(parent)
224 revert(ui, repo, **revert_opts)
224 revert(ui, repo, **revert_opts)
225 commit_opts = opts.copy()
225 commit_opts = opts.copy()
226 commit_opts['addremove'] = False
226 commit_opts['addremove'] = False
227 if not commit_opts['message'] and not commit_opts['logfile']:
227 if not commit_opts['message'] and not commit_opts['logfile']:
228 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
228 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
229 commit_opts['force_editor'] = True
229 commit_opts['force_editor'] = True
230 commit(ui, repo, **commit_opts)
230 commit(ui, repo, **commit_opts)
231 def nice(node):
231 def nice(node):
232 return '%d:%s' % (repo.changelog.rev(node), short(node))
232 return '%d:%s' % (repo.changelog.rev(node), short(node))
233 ui.status(_('changeset %s backs out changeset %s\n') %
233 ui.status(_('changeset %s backs out changeset %s\n') %
234 (nice(repo.changelog.tip()), nice(node)))
234 (nice(repo.changelog.tip()), nice(node)))
235 if op1 != node:
235 if op1 != node:
236 if opts['merge']:
236 if opts['merge']:
237 ui.status(_('merging with changeset %s\n') % nice(op1))
237 ui.status(_('merging with changeset %s\n') % nice(op1))
238 hg.merge(repo, hex(op1))
238 hg.merge(repo, hex(op1))
239 else:
239 else:
240 ui.status(_('the backout changeset is a new head - '
240 ui.status(_('the backout changeset is a new head - '
241 'do not forget to merge\n'))
241 'do not forget to merge\n'))
242 ui.status(_('(use "backout --merge" '
242 ui.status(_('(use "backout --merge" '
243 'if you want to auto-merge)\n'))
243 'if you want to auto-merge)\n'))
244
244
245 def branch(ui, repo, label=None, **opts):
245 def branch(ui, repo, label=None, **opts):
246 """set or show the current branch name
246 """set or show the current branch name
247
247
248 With no argument, show the current branch name. With one argument,
248 With no argument, show the current branch name. With one argument,
249 set the working directory branch name (the branch does not exist in
249 set the working directory branch name (the branch does not exist in
250 the repository until the next commit).
250 the repository until the next commit).
251
251
252 Unless --force is specified, branch will not let you set a
252 Unless --force is specified, branch will not let you set a
253 branch name that shadows an existing branch.
253 branch name that shadows an existing branch.
254 """
254 """
255
255
256 if label:
256 if label:
257 if not opts.get('force') and label in repo.branchtags():
257 if not opts.get('force') and label in repo.branchtags():
258 if label not in [p.branch() for p in repo.workingctx().parents()]:
258 if label not in [p.branch() for p in repo.workingctx().parents()]:
259 raise util.Abort(_('a branch of the same name already exists'
259 raise util.Abort(_('a branch of the same name already exists'
260 ' (use --force to override)'))
260 ' (use --force to override)'))
261 repo.dirstate.setbranch(util.fromlocal(label))
261 repo.dirstate.setbranch(util.fromlocal(label))
262 ui.status(_('marked working directory as branch %s\n') % label)
262 ui.status(_('marked working directory as branch %s\n') % label)
263 else:
263 else:
264 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
264 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
265
265
266 def branches(ui, repo, active=False):
266 def branches(ui, repo, active=False):
267 """list repository named branches
267 """list repository named branches
268
268
269 List the repository's named branches, indicating which ones are
269 List the repository's named branches, indicating which ones are
270 inactive. If active is specified, only show active branches.
270 inactive. If active is specified, only show active branches.
271
271
272 A branch is considered active if it contains unmerged heads.
272 A branch is considered active if it contains unmerged heads.
273 """
273 """
274 b = repo.branchtags()
274 b = repo.branchtags()
275 heads = dict.fromkeys(repo.heads(), 1)
275 heads = dict.fromkeys(repo.heads(), 1)
276 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
276 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
277 l.sort()
277 l.sort()
278 l.reverse()
278 l.reverse()
279 for ishead, r, n, t in l:
279 for ishead, r, n, t in l:
280 if active and not ishead:
280 if active and not ishead:
281 # If we're only displaying active branches, abort the loop on
281 # If we're only displaying active branches, abort the loop on
282 # encountering the first inactive head
282 # encountering the first inactive head
283 break
283 break
284 else:
284 else:
285 hexfunc = ui.debugflag and hex or short
285 hexfunc = ui.debugflag and hex or short
286 if ui.quiet:
286 if ui.quiet:
287 ui.write("%s\n" % t)
287 ui.write("%s\n" % t)
288 else:
288 else:
289 spaces = " " * (30 - util.locallen(t))
289 spaces = " " * (30 - util.locallen(t))
290 # The code only gets here if inactive branches are being
290 # The code only gets here if inactive branches are being
291 # displayed or the branch is active.
291 # displayed or the branch is active.
292 isinactive = ((not ishead) and " (inactive)") or ''
292 isinactive = ((not ishead) and " (inactive)") or ''
293 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
293 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
294
294
295 def bundle(ui, repo, fname, dest=None, **opts):
295 def bundle(ui, repo, fname, dest=None, **opts):
296 """create a changegroup file
296 """create a changegroup file
297
297
298 Generate a compressed changegroup file collecting changesets not
298 Generate a compressed changegroup file collecting changesets not
299 found in the other repository.
299 found in the other repository.
300
300
301 If no destination repository is specified the destination is assumed
301 If no destination repository is specified the destination is assumed
302 to have all the nodes specified by one or more --base parameters.
302 to have all the nodes specified by one or more --base parameters.
303
303
304 The bundle file can then be transferred using conventional means and
304 The bundle file can then be transferred using conventional means and
305 applied to another repository with the unbundle or pull command.
305 applied to another repository with the unbundle or pull command.
306 This is useful when direct push and pull are not available or when
306 This is useful when direct push and pull are not available or when
307 exporting an entire repository is undesirable.
307 exporting an entire repository is undesirable.
308
308
309 Applying bundles preserves all changeset contents including
309 Applying bundles preserves all changeset contents including
310 permissions, copy/rename information, and revision history.
310 permissions, copy/rename information, and revision history.
311 """
311 """
312 revs = opts.get('rev') or None
312 revs = opts.get('rev') or None
313 if revs:
313 if revs:
314 revs = [repo.lookup(rev) for rev in revs]
314 revs = [repo.lookup(rev) for rev in revs]
315 base = opts.get('base')
315 base = opts.get('base')
316 if base:
316 if base:
317 if dest:
317 if dest:
318 raise util.Abort(_("--base is incompatible with specifiying "
318 raise util.Abort(_("--base is incompatible with specifiying "
319 "a destination"))
319 "a destination"))
320 base = [repo.lookup(rev) for rev in base]
320 base = [repo.lookup(rev) for rev in base]
321 # create the right base
321 # create the right base
322 # XXX: nodesbetween / changegroup* should be "fixed" instead
322 # XXX: nodesbetween / changegroup* should be "fixed" instead
323 o = []
323 o = []
324 has = {nullid: None}
324 has = {nullid: None}
325 for n in base:
325 for n in base:
326 has.update(repo.changelog.reachable(n))
326 has.update(repo.changelog.reachable(n))
327 if revs:
327 if revs:
328 visit = list(revs)
328 visit = list(revs)
329 else:
329 else:
330 visit = repo.changelog.heads()
330 visit = repo.changelog.heads()
331 seen = {}
331 seen = {}
332 while visit:
332 while visit:
333 n = visit.pop(0)
333 n = visit.pop(0)
334 parents = [p for p in repo.changelog.parents(n) if p not in has]
334 parents = [p for p in repo.changelog.parents(n) if p not in has]
335 if len(parents) == 0:
335 if len(parents) == 0:
336 o.insert(0, n)
336 o.insert(0, n)
337 else:
337 else:
338 for p in parents:
338 for p in parents:
339 if p not in seen:
339 if p not in seen:
340 seen[p] = 1
340 seen[p] = 1
341 visit.append(p)
341 visit.append(p)
342 else:
342 else:
343 cmdutil.setremoteconfig(ui, opts)
343 cmdutil.setremoteconfig(ui, opts)
344 dest, revs, checkout = hg.parseurl(
344 dest, revs, checkout = hg.parseurl(
345 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
345 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
346 other = hg.repository(ui, dest)
346 other = hg.repository(ui, dest)
347 o = repo.findoutgoing(other, force=opts['force'])
347 o = repo.findoutgoing(other, force=opts['force'])
348
348
349 if revs:
349 if revs:
350 cg = repo.changegroupsubset(o, revs, 'bundle')
350 cg = repo.changegroupsubset(o, revs, 'bundle')
351 else:
351 else:
352 cg = repo.changegroup(o, 'bundle')
352 cg = repo.changegroup(o, 'bundle')
353 changegroup.writebundle(cg, fname, "HG10BZ")
353 changegroup.writebundle(cg, fname, "HG10BZ")
354
354
355 def cat(ui, repo, file1, *pats, **opts):
355 def cat(ui, repo, file1, *pats, **opts):
356 """output the current or given revision of files
356 """output the current or given revision of files
357
357
358 Print the specified files as they were at the given revision.
358 Print the specified files as they were at the given revision.
359 If no revision is given, the parent of the working directory is used,
359 If no revision is given, the parent of the working directory is used,
360 or tip if no revision is checked out.
360 or tip if no revision is checked out.
361
361
362 Output may be to a file, in which case the name of the file is
362 Output may be to a file, in which case the name of the file is
363 given using a format string. The formatting rules are the same as
363 given using a format string. The formatting rules are the same as
364 for the export command, with the following additions:
364 for the export command, with the following additions:
365
365
366 %s basename of file being printed
366 %s basename of file being printed
367 %d dirname of file being printed, or '.' if in repo root
367 %d dirname of file being printed, or '.' if in repo root
368 %p root-relative path name of file being printed
368 %p root-relative path name of file being printed
369 """
369 """
370 ctx = repo.changectx(opts['rev'])
370 ctx = repo.changectx(opts['rev'])
371 err = 1
371 err = 1
372 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
372 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
373 ctx.node()):
373 ctx.node()):
374 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
374 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
375 fp.write(ctx.filectx(abs).data())
375 fp.write(ctx.filectx(abs).data())
376 err = 0
376 err = 0
377 return err
377 return err
378
378
379 def clone(ui, source, dest=None, **opts):
379 def clone(ui, source, dest=None, **opts):
380 """make a copy of an existing repository
380 """make a copy of an existing repository
381
381
382 Create a copy of an existing repository in a new directory.
382 Create a copy of an existing repository in a new directory.
383
383
384 If no destination directory name is specified, it defaults to the
384 If no destination directory name is specified, it defaults to the
385 basename of the source.
385 basename of the source.
386
386
387 The location of the source is added to the new repository's
387 The location of the source is added to the new repository's
388 .hg/hgrc file, as the default to be used for future pulls.
388 .hg/hgrc file, as the default to be used for future pulls.
389
389
390 For efficiency, hardlinks are used for cloning whenever the source
390 For efficiency, hardlinks are used for cloning whenever the source
391 and destination are on the same filesystem (note this applies only
391 and destination are on the same filesystem (note this applies only
392 to the repository data, not to the checked out files). Some
392 to the repository data, not to the checked out files). Some
393 filesystems, such as AFS, implement hardlinking incorrectly, but
393 filesystems, such as AFS, implement hardlinking incorrectly, but
394 do not report errors. In these cases, use the --pull option to
394 do not report errors. In these cases, use the --pull option to
395 avoid hardlinking.
395 avoid hardlinking.
396
396
397 You can safely clone repositories and checked out files using full
397 You can safely clone repositories and checked out files using full
398 hardlinks with
398 hardlinks with
399
399
400 $ cp -al REPO REPOCLONE
400 $ cp -al REPO REPOCLONE
401
401
402 which is the fastest way to clone. However, the operation is not
402 which is the fastest way to clone. However, the operation is not
403 atomic (making sure REPO is not modified during the operation is
403 atomic (making sure REPO is not modified during the operation is
404 up to you) and you have to make sure your editor breaks hardlinks
404 up to you) and you have to make sure your editor breaks hardlinks
405 (Emacs and most Linux Kernel tools do so).
405 (Emacs and most Linux Kernel tools do so).
406
406
407 If you use the -r option to clone up to a specific revision, no
407 If you use the -r option to clone up to a specific revision, no
408 subsequent revisions will be present in the cloned repository.
408 subsequent revisions will be present in the cloned repository.
409 This option implies --pull, even on local repositories.
409 This option implies --pull, even on local repositories.
410
410
411 See pull for valid source format details.
411 See pull for valid source format details.
412
412
413 It is possible to specify an ssh:// URL as the destination, but no
413 It is possible to specify an ssh:// URL as the destination, but no
414 .hg/hgrc and working directory will be created on the remote side.
414 .hg/hgrc and working directory will be created on the remote side.
415 Look at the help text for the pull command for important details
415 Look at the help text for the pull command for important details
416 about ssh:// URLs.
416 about ssh:// URLs.
417 """
417 """
418 cmdutil.setremoteconfig(ui, opts)
418 cmdutil.setremoteconfig(ui, opts)
419 hg.clone(ui, source, dest,
419 hg.clone(ui, source, dest,
420 pull=opts['pull'],
420 pull=opts['pull'],
421 stream=opts['uncompressed'],
421 stream=opts['uncompressed'],
422 rev=opts['rev'],
422 rev=opts['rev'],
423 update=not opts['noupdate'])
423 update=not opts['noupdate'])
424
424
425 def commit(ui, repo, *pats, **opts):
425 def commit(ui, repo, *pats, **opts):
426 """commit the specified files or all outstanding changes
426 """commit the specified files or all outstanding changes
427
427
428 Commit changes to the given files into the repository.
428 Commit changes to the given files into the repository.
429
429
430 If a list of files is omitted, all changes reported by "hg status"
430 If a list of files is omitted, all changes reported by "hg status"
431 will be committed.
431 will be committed.
432
432
433 If no commit message is specified, the editor configured in your hgrc
433 If no commit message is specified, the editor configured in your hgrc
434 or in the EDITOR environment variable is started to enter a message.
434 or in the EDITOR environment variable is started to enter a message.
435 """
435 """
436 def commitfunc(ui, repo, files, message, match, opts):
436 def commitfunc(ui, repo, files, message, match, opts):
437 return repo.commit(files, message, opts['user'], opts['date'], match,
437 return repo.commit(files, message, opts['user'], opts['date'], match,
438 force_editor=opts.get('force_editor'))
438 force_editor=opts.get('force_editor'))
439 cmdutil.commit(ui, repo, commitfunc, pats, opts)
439 cmdutil.commit(ui, repo, commitfunc, pats, opts)
440
440
441 def copy(ui, repo, *pats, **opts):
441 def copy(ui, repo, *pats, **opts):
442 """mark files as copied for the next commit
442 """mark files as copied for the next commit
443
443
444 Mark dest as having copies of source files. If dest is a
444 Mark dest as having copies of source files. If dest is a
445 directory, copies are put in that directory. If dest is a file,
445 directory, copies are put in that directory. If dest is a file,
446 there can only be one source.
446 there can only be one source.
447
447
448 By default, this command copies the contents of files as they
448 By default, this command copies the contents of files as they
449 stand in the working directory. If invoked with --after, the
449 stand in the working directory. If invoked with --after, the
450 operation is recorded, but no copying is performed.
450 operation is recorded, but no copying is performed.
451
451
452 This command takes effect in the next commit. To undo a copy
452 This command takes effect in the next commit. To undo a copy
453 before that, see hg revert.
453 before that, see hg revert.
454 """
454 """
455 wlock = repo.wlock(False)
455 wlock = repo.wlock(False)
456 try:
456 try:
457 return cmdutil.copy(ui, repo, pats, opts)
457 return cmdutil.copy(ui, repo, pats, opts)
458 finally:
458 finally:
459 del wlock
459 del wlock
460
460
461 def debugancestor(ui, index, rev1, rev2):
461 def debugancestor(ui, index, rev1, rev2):
462 """find the ancestor revision of two revisions in a given index"""
462 """find the ancestor revision of two revisions in a given index"""
463 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
463 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
464 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
464 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
465 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
465 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
466
466
467 def debugcomplete(ui, cmd='', **opts):
467 def debugcomplete(ui, cmd='', **opts):
468 """returns the completion list associated with the given command"""
468 """returns the completion list associated with the given command"""
469
469
470 if opts['options']:
470 if opts['options']:
471 options = []
471 options = []
472 otables = [globalopts]
472 otables = [globalopts]
473 if cmd:
473 if cmd:
474 aliases, entry = cmdutil.findcmd(ui, cmd, table)
474 aliases, entry = cmdutil.findcmd(ui, cmd, table)
475 otables.append(entry[1])
475 otables.append(entry[1])
476 for t in otables:
476 for t in otables:
477 for o in t:
477 for o in t:
478 if o[0]:
478 if o[0]:
479 options.append('-%s' % o[0])
479 options.append('-%s' % o[0])
480 options.append('--%s' % o[1])
480 options.append('--%s' % o[1])
481 ui.write("%s\n" % "\n".join(options))
481 ui.write("%s\n" % "\n".join(options))
482 return
482 return
483
483
484 clist = cmdutil.findpossible(ui, cmd, table).keys()
484 clist = cmdutil.findpossible(ui, cmd, table).keys()
485 clist.sort()
485 clist.sort()
486 ui.write("%s\n" % "\n".join(clist))
486 ui.write("%s\n" % "\n".join(clist))
487
487
488 def debugrebuildstate(ui, repo, rev=""):
488 def debugrebuildstate(ui, repo, rev=""):
489 """rebuild the dirstate as it would look like for the given revision"""
489 """rebuild the dirstate as it would look like for the given revision"""
490 if rev == "":
490 if rev == "":
491 rev = repo.changelog.tip()
491 rev = repo.changelog.tip()
492 ctx = repo.changectx(rev)
492 ctx = repo.changectx(rev)
493 files = ctx.manifest()
493 files = ctx.manifest()
494 wlock = repo.wlock()
494 wlock = repo.wlock()
495 try:
495 try:
496 repo.dirstate.rebuild(rev, files)
496 repo.dirstate.rebuild(rev, files)
497 finally:
497 finally:
498 del wlock
498 del wlock
499
499
500 def debugcheckstate(ui, repo):
500 def debugcheckstate(ui, repo):
501 """validate the correctness of the current dirstate"""
501 """validate the correctness of the current dirstate"""
502 parent1, parent2 = repo.dirstate.parents()
502 parent1, parent2 = repo.dirstate.parents()
503 m1 = repo.changectx(parent1).manifest()
503 m1 = repo.changectx(parent1).manifest()
504 m2 = repo.changectx(parent2).manifest()
504 m2 = repo.changectx(parent2).manifest()
505 errors = 0
505 errors = 0
506 for f in repo.dirstate:
506 for f in repo.dirstate:
507 state = repo.dirstate[f]
507 state = repo.dirstate[f]
508 if state in "nr" and f not in m1:
508 if state in "nr" and f not in m1:
509 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
509 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
510 errors += 1
510 errors += 1
511 if state in "a" and f in m1:
511 if state in "a" and f in m1:
512 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
512 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
513 errors += 1
513 errors += 1
514 if state in "m" and f not in m1 and f not in m2:
514 if state in "m" and f not in m1 and f not in m2:
515 ui.warn(_("%s in state %s, but not in either manifest\n") %
515 ui.warn(_("%s in state %s, but not in either manifest\n") %
516 (f, state))
516 (f, state))
517 errors += 1
517 errors += 1
518 for f in m1:
518 for f in m1:
519 state = repo.dirstate[f]
519 state = repo.dirstate[f]
520 if state not in "nrm":
520 if state not in "nrm":
521 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
521 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
522 errors += 1
522 errors += 1
523 if errors:
523 if errors:
524 error = _(".hg/dirstate inconsistent with current parent's manifest")
524 error = _(".hg/dirstate inconsistent with current parent's manifest")
525 raise util.Abort(error)
525 raise util.Abort(error)
526
526
527 def showconfig(ui, repo, *values, **opts):
527 def showconfig(ui, repo, *values, **opts):
528 """show combined config settings from all hgrc files
528 """show combined config settings from all hgrc files
529
529
530 With no args, print names and values of all config items.
530 With no args, print names and values of all config items.
531
531
532 With one arg of the form section.name, print just the value of
532 With one arg of the form section.name, print just the value of
533 that config item.
533 that config item.
534
534
535 With multiple args, print names and values of all config items
535 With multiple args, print names and values of all config items
536 with matching section names."""
536 with matching section names."""
537
537
538 untrusted = bool(opts.get('untrusted'))
538 untrusted = bool(opts.get('untrusted'))
539 if values:
539 if values:
540 if len([v for v in values if '.' in v]) > 1:
540 if len([v for v in values if '.' in v]) > 1:
541 raise util.Abort(_('only one config item permitted'))
541 raise util.Abort(_('only one config item permitted'))
542 for section, name, value in ui.walkconfig(untrusted=untrusted):
542 for section, name, value in ui.walkconfig(untrusted=untrusted):
543 sectname = section + '.' + name
543 sectname = section + '.' + name
544 if values:
544 if values:
545 for v in values:
545 for v in values:
546 if v == section:
546 if v == section:
547 ui.write('%s=%s\n' % (sectname, value))
547 ui.write('%s=%s\n' % (sectname, value))
548 elif v == sectname:
548 elif v == sectname:
549 ui.write(value, '\n')
549 ui.write(value, '\n')
550 else:
550 else:
551 ui.write('%s=%s\n' % (sectname, value))
551 ui.write('%s=%s\n' % (sectname, value))
552
552
553 def debugsetparents(ui, repo, rev1, rev2=None):
553 def debugsetparents(ui, repo, rev1, rev2=None):
554 """manually set the parents of the current working directory
554 """manually set the parents of the current working directory
555
555
556 This is useful for writing repository conversion tools, but should
556 This is useful for writing repository conversion tools, but should
557 be used with care.
557 be used with care.
558 """
558 """
559
559
560 if not rev2:
560 if not rev2:
561 rev2 = hex(nullid)
561 rev2 = hex(nullid)
562
562
563 wlock = repo.wlock()
563 wlock = repo.wlock()
564 try:
564 try:
565 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
565 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
566 finally:
566 finally:
567 del wlock
567 del wlock
568
568
569 def debugstate(ui, repo):
569 def debugstate(ui, repo):
570 """show the contents of the current dirstate"""
570 """show the contents of the current dirstate"""
571 k = repo.dirstate._map.items()
571 k = repo.dirstate._map.items()
572 k.sort()
572 k.sort()
573 for file_, ent in k:
573 for file_, ent in k:
574 if ent[3] == -1:
574 if ent[3] == -1:
575 # Pad or slice to locale representation
575 # Pad or slice to locale representation
576 locale_len = len(time.strftime("%x %X", time.localtime(0)))
576 locale_len = len(time.strftime("%x %X", time.localtime(0)))
577 timestr = 'unset'
577 timestr = 'unset'
578 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
578 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
579 else:
579 else:
580 timestr = time.strftime("%x %X", time.localtime(ent[3]))
580 timestr = time.strftime("%x %X", time.localtime(ent[3]))
581 if ent[1] & 020000:
581 if ent[1] & 020000:
582 mode = 'lnk'
582 mode = 'lnk'
583 else:
583 else:
584 mode = '%3o' % (ent[1] & 0777)
584 mode = '%3o' % (ent[1] & 0777)
585 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
585 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
586 for f in repo.dirstate.copies():
586 for f in repo.dirstate.copies():
587 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
587 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
588
588
589 def debugdata(ui, file_, rev):
589 def debugdata(ui, file_, rev):
590 """dump the contents of a data file revision"""
590 """dump the contents of a data file revision"""
591 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
591 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
592 try:
592 try:
593 ui.write(r.revision(r.lookup(rev)))
593 ui.write(r.revision(r.lookup(rev)))
594 except KeyError:
594 except KeyError:
595 raise util.Abort(_('invalid revision identifier %s') % rev)
595 raise util.Abort(_('invalid revision identifier %s') % rev)
596
596
597 def debugdate(ui, date, range=None, **opts):
597 def debugdate(ui, date, range=None, **opts):
598 """parse and display a date"""
598 """parse and display a date"""
599 if opts["extended"]:
599 if opts["extended"]:
600 d = util.parsedate(date, util.extendeddateformats)
600 d = util.parsedate(date, util.extendeddateformats)
601 else:
601 else:
602 d = util.parsedate(date)
602 d = util.parsedate(date)
603 ui.write("internal: %s %s\n" % d)
603 ui.write("internal: %s %s\n" % d)
604 ui.write("standard: %s\n" % util.datestr(d))
604 ui.write("standard: %s\n" % util.datestr(d))
605 if range:
605 if range:
606 m = util.matchdate(range)
606 m = util.matchdate(range)
607 ui.write("match: %s\n" % m(d[0]))
607 ui.write("match: %s\n" % m(d[0]))
608
608
609 def debugindex(ui, file_):
609 def debugindex(ui, file_):
610 """dump the contents of an index file"""
610 """dump the contents of an index file"""
611 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
611 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
612 ui.write(" rev offset length base linkrev" +
612 ui.write(" rev offset length base linkrev" +
613 " nodeid p1 p2\n")
613 " nodeid p1 p2\n")
614 for i in xrange(r.count()):
614 for i in xrange(r.count()):
615 node = r.node(i)
615 node = r.node(i)
616 try:
616 try:
617 pp = r.parents(node)
617 pp = r.parents(node)
618 except:
618 except:
619 pp = [nullid, nullid]
619 pp = [nullid, nullid]
620 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
620 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
621 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
621 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
622 short(node), short(pp[0]), short(pp[1])))
622 short(node), short(pp[0]), short(pp[1])))
623
623
624 def debugindexdot(ui, file_):
624 def debugindexdot(ui, file_):
625 """dump an index DAG as a .dot file"""
625 """dump an index DAG as a .dot file"""
626 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
626 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
627 ui.write("digraph G {\n")
627 ui.write("digraph G {\n")
628 for i in xrange(r.count()):
628 for i in xrange(r.count()):
629 node = r.node(i)
629 node = r.node(i)
630 pp = r.parents(node)
630 pp = r.parents(node)
631 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
631 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
632 if pp[1] != nullid:
632 if pp[1] != nullid:
633 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
633 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
634 ui.write("}\n")
634 ui.write("}\n")
635
635
636 def debuginstall(ui):
636 def debuginstall(ui):
637 '''test Mercurial installation'''
637 '''test Mercurial installation'''
638
638
639 def writetemp(contents):
639 def writetemp(contents):
640 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
640 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
641 f = os.fdopen(fd, "wb")
641 f = os.fdopen(fd, "wb")
642 f.write(contents)
642 f.write(contents)
643 f.close()
643 f.close()
644 return name
644 return name
645
645
646 problems = 0
646 problems = 0
647
647
648 # encoding
648 # encoding
649 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
649 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
650 try:
650 try:
651 util.fromlocal("test")
651 util.fromlocal("test")
652 except util.Abort, inst:
652 except util.Abort, inst:
653 ui.write(" %s\n" % inst)
653 ui.write(" %s\n" % inst)
654 ui.write(_(" (check that your locale is properly set)\n"))
654 ui.write(_(" (check that your locale is properly set)\n"))
655 problems += 1
655 problems += 1
656
656
657 # compiled modules
657 # compiled modules
658 ui.status(_("Checking extensions...\n"))
658 ui.status(_("Checking extensions...\n"))
659 try:
659 try:
660 import bdiff, mpatch, base85
660 import bdiff, mpatch, base85
661 except Exception, inst:
661 except Exception, inst:
662 ui.write(" %s\n" % inst)
662 ui.write(" %s\n" % inst)
663 ui.write(_(" One or more extensions could not be found"))
663 ui.write(_(" One or more extensions could not be found"))
664 ui.write(_(" (check that you compiled the extensions)\n"))
664 ui.write(_(" (check that you compiled the extensions)\n"))
665 problems += 1
665 problems += 1
666
666
667 # templates
667 # templates
668 ui.status(_("Checking templates...\n"))
668 ui.status(_("Checking templates...\n"))
669 try:
669 try:
670 import templater
670 import templater
671 t = templater.templater(templater.templatepath("map-cmdline.default"))
671 t = templater.templater(templater.templatepath("map-cmdline.default"))
672 except Exception, inst:
672 except Exception, inst:
673 ui.write(" %s\n" % inst)
673 ui.write(" %s\n" % inst)
674 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
674 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
675 problems += 1
675 problems += 1
676
676
677 # patch
677 # patch
678 ui.status(_("Checking patch...\n"))
678 ui.status(_("Checking patch...\n"))
679 patchproblems = 0
679 patchproblems = 0
680 a = "1\n2\n3\n4\n"
680 a = "1\n2\n3\n4\n"
681 b = "1\n2\n3\ninsert\n4\n"
681 b = "1\n2\n3\ninsert\n4\n"
682 fa = writetemp(a)
682 fa = writetemp(a)
683 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
683 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
684 os.path.basename(fa))
684 os.path.basename(fa))
685 fd = writetemp(d)
685 fd = writetemp(d)
686
686
687 files = {}
687 files = {}
688 try:
688 try:
689 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
689 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
690 except util.Abort, e:
690 except util.Abort, e:
691 ui.write(_(" patch call failed:\n"))
691 ui.write(_(" patch call failed:\n"))
692 ui.write(" " + str(e) + "\n")
692 ui.write(" " + str(e) + "\n")
693 patchproblems += 1
693 patchproblems += 1
694 else:
694 else:
695 if list(files) != [os.path.basename(fa)]:
695 if list(files) != [os.path.basename(fa)]:
696 ui.write(_(" unexpected patch output!\n"))
696 ui.write(_(" unexpected patch output!\n"))
697 patchproblems += 1
697 patchproblems += 1
698 a = file(fa).read()
698 a = file(fa).read()
699 if a != b:
699 if a != b:
700 ui.write(_(" patch test failed!\n"))
700 ui.write(_(" patch test failed!\n"))
701 patchproblems += 1
701 patchproblems += 1
702
702
703 if patchproblems:
703 if patchproblems:
704 if ui.config('ui', 'patch'):
704 if ui.config('ui', 'patch'):
705 ui.write(_(" (Current patch tool may be incompatible with patch,"
705 ui.write(_(" (Current patch tool may be incompatible with patch,"
706 " or misconfigured. Please check your .hgrc file)\n"))
706 " or misconfigured. Please check your .hgrc file)\n"))
707 else:
707 else:
708 ui.write(_(" Internal patcher failure, please report this error"
708 ui.write(_(" Internal patcher failure, please report this error"
709 " to http://www.selenic.com/mercurial/bts\n"))
709 " to http://www.selenic.com/mercurial/bts\n"))
710 problems += patchproblems
710 problems += patchproblems
711
711
712 os.unlink(fa)
712 os.unlink(fa)
713 os.unlink(fd)
713 os.unlink(fd)
714
714
715 # merge helper
715 # merge helper
716 ui.status(_("Checking merge helper...\n"))
716 ui.status(_("Checking merge helper...\n"))
717 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
717 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
718 or "hgmerge")
718 or "hgmerge")
719 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
719 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
720 if not cmdpath:
720 if not cmdpath:
721 if cmd == 'hgmerge':
721 if cmd == 'hgmerge':
722 ui.write(_(" No merge helper set and can't find default"
722 ui.write(_(" No merge helper set and can't find default"
723 " hgmerge script in PATH\n"))
723 " hgmerge script in PATH\n"))
724 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
724 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
725 else:
725 else:
726 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
726 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
727 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
727 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
728 problems += 1
728 problems += 1
729 else:
729 else:
730 # actually attempt a patch here
730 # actually attempt a patch here
731 fa = writetemp("1\n2\n3\n4\n")
731 fa = writetemp("1\n2\n3\n4\n")
732 fl = writetemp("1\n2\n3\ninsert\n4\n")
732 fl = writetemp("1\n2\n3\ninsert\n4\n")
733 fr = writetemp("begin\n1\n2\n3\n4\n")
733 fr = writetemp("begin\n1\n2\n3\n4\n")
734 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
734 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
735 if r:
735 if r:
736 ui.write(_(" Got unexpected merge error %d!\n") % r)
736 ui.write(_(" Got unexpected merge error %d!\n") % r)
737 problems += 1
737 problems += 1
738 m = file(fl).read()
738 m = file(fl).read()
739 if m != "begin\n1\n2\n3\ninsert\n4\n":
739 if m != "begin\n1\n2\n3\ninsert\n4\n":
740 ui.write(_(" Got unexpected merge results!\n"))
740 ui.write(_(" Got unexpected merge results!\n"))
741 ui.write(_(" (your merge helper may have the"
741 ui.write(_(" (your merge helper may have the"
742 " wrong argument order)\n"))
742 " wrong argument order)\n"))
743 ui.write(_(" Result: %r\n") % m)
743 ui.write(_(" Result: %r\n") % m)
744 problems += 1
744 problems += 1
745 os.unlink(fa)
745 os.unlink(fa)
746 os.unlink(fl)
746 os.unlink(fl)
747 os.unlink(fr)
747 os.unlink(fr)
748
748
749 # editor
749 # editor
750 ui.status(_("Checking commit editor...\n"))
750 ui.status(_("Checking commit editor...\n"))
751 editor = (os.environ.get("HGEDITOR") or
751 editor = (os.environ.get("HGEDITOR") or
752 ui.config("ui", "editor") or
752 ui.config("ui", "editor") or
753 os.environ.get("EDITOR", "vi"))
753 os.environ.get("EDITOR", "vi"))
754 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
754 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
755 if not cmdpath:
755 if not cmdpath:
756 if editor == 'vi':
756 if editor == 'vi':
757 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
757 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
758 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
758 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
759 else:
759 else:
760 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
760 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
761 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
761 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
762 problems += 1
762 problems += 1
763
763
764 # check username
764 # check username
765 ui.status(_("Checking username...\n"))
765 ui.status(_("Checking username...\n"))
766 user = os.environ.get("HGUSER")
766 user = os.environ.get("HGUSER")
767 if user is None:
767 if user is None:
768 user = ui.config("ui", "username")
768 user = ui.config("ui", "username")
769 if user is None:
769 if user is None:
770 user = os.environ.get("EMAIL")
770 user = os.environ.get("EMAIL")
771 if not user:
771 if not user:
772 ui.warn(" ")
772 ui.warn(" ")
773 ui.username()
773 ui.username()
774 ui.write(_(" (specify a username in your .hgrc file)\n"))
774 ui.write(_(" (specify a username in your .hgrc file)\n"))
775
775
776 if not problems:
776 if not problems:
777 ui.status(_("No problems detected\n"))
777 ui.status(_("No problems detected\n"))
778 else:
778 else:
779 ui.write(_("%s problems detected,"
779 ui.write(_("%s problems detected,"
780 " please check your install!\n") % problems)
780 " please check your install!\n") % problems)
781
781
782 return problems
782 return problems
783
783
784 def debugrename(ui, repo, file1, *pats, **opts):
784 def debugrename(ui, repo, file1, *pats, **opts):
785 """dump rename information"""
785 """dump rename information"""
786
786
787 ctx = repo.changectx(opts.get('rev', 'tip'))
787 ctx = repo.changectx(opts.get('rev', 'tip'))
788 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
788 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
789 ctx.node()):
789 ctx.node()):
790 m = ctx.filectx(abs).renamed()
790 m = ctx.filectx(abs).renamed()
791 if m:
791 if m:
792 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
792 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
793 else:
793 else:
794 ui.write(_("%s not renamed\n") % rel)
794 ui.write(_("%s not renamed\n") % rel)
795
795
796 def debugwalk(ui, repo, *pats, **opts):
796 def debugwalk(ui, repo, *pats, **opts):
797 """show how files match on given patterns"""
797 """show how files match on given patterns"""
798 items = list(cmdutil.walk(repo, pats, opts))
798 items = list(cmdutil.walk(repo, pats, opts))
799 if not items:
799 if not items:
800 return
800 return
801 fmt = '%%s %%-%ds %%-%ds %%s' % (
801 fmt = '%%s %%-%ds %%-%ds %%s' % (
802 max([len(abs) for (src, abs, rel, exact) in items]),
802 max([len(abs) for (src, abs, rel, exact) in items]),
803 max([len(rel) for (src, abs, rel, exact) in items]))
803 max([len(rel) for (src, abs, rel, exact) in items]))
804 for src, abs, rel, exact in items:
804 for src, abs, rel, exact in items:
805 line = fmt % (src, abs, rel, exact and 'exact' or '')
805 line = fmt % (src, abs, rel, exact and 'exact' or '')
806 ui.write("%s\n" % line.rstrip())
806 ui.write("%s\n" % line.rstrip())
807
807
808 def diff(ui, repo, *pats, **opts):
808 def diff(ui, repo, *pats, **opts):
809 """diff repository (or selected files)
809 """diff repository (or selected files)
810
810
811 Show differences between revisions for the specified files.
811 Show differences between revisions for the specified files.
812
812
813 Differences between files are shown using the unified diff format.
813 Differences between files are shown using the unified diff format.
814
814
815 NOTE: diff may generate unexpected results for merges, as it will
815 NOTE: diff may generate unexpected results for merges, as it will
816 default to comparing against the working directory's first parent
816 default to comparing against the working directory's first parent
817 changeset if no revisions are specified.
817 changeset if no revisions are specified.
818
818
819 When two revision arguments are given, then changes are shown
819 When two revision arguments are given, then changes are shown
820 between those revisions. If only one revision is specified then
820 between those revisions. If only one revision is specified then
821 that revision is compared to the working directory, and, when no
821 that revision is compared to the working directory, and, when no
822 revisions are specified, the working directory files are compared
822 revisions are specified, the working directory files are compared
823 to its parent.
823 to its parent.
824
824
825 Without the -a option, diff will avoid generating diffs of files
825 Without the -a option, diff will avoid generating diffs of files
826 it detects as binary. With -a, diff will generate a diff anyway,
826 it detects as binary. With -a, diff will generate a diff anyway,
827 probably with undesirable results.
827 probably with undesirable results.
828 """
828 """
829 node1, node2 = cmdutil.revpair(repo, opts['rev'])
829 node1, node2 = cmdutil.revpair(repo, opts['rev'])
830
830
831 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
831 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
832
832
833 patch.diff(repo, node1, node2, fns, match=matchfn,
833 patch.diff(repo, node1, node2, fns, match=matchfn,
834 opts=patch.diffopts(ui, opts))
834 opts=patch.diffopts(ui, opts))
835
835
836 def export(ui, repo, *changesets, **opts):
836 def export(ui, repo, *changesets, **opts):
837 """dump the header and diffs for one or more changesets
837 """dump the header and diffs for one or more changesets
838
838
839 Print the changeset header and diffs for one or more revisions.
839 Print the changeset header and diffs for one or more revisions.
840
840
841 The information shown in the changeset header is: author,
841 The information shown in the changeset header is: author,
842 changeset hash, parent(s) and commit comment.
842 changeset hash, parent(s) and commit comment.
843
843
844 NOTE: export may generate unexpected diff output for merge changesets,
844 NOTE: export may generate unexpected diff output for merge changesets,
845 as it will compare the merge changeset against its first parent only.
845 as it will compare the merge changeset against its first parent only.
846
846
847 Output may be to a file, in which case the name of the file is
847 Output may be to a file, in which case the name of the file is
848 given using a format string. The formatting rules are as follows:
848 given using a format string. The formatting rules are as follows:
849
849
850 %% literal "%" character
850 %% literal "%" character
851 %H changeset hash (40 bytes of hexadecimal)
851 %H changeset hash (40 bytes of hexadecimal)
852 %N number of patches being generated
852 %N number of patches being generated
853 %R changeset revision number
853 %R changeset revision number
854 %b basename of the exporting repository
854 %b basename of the exporting repository
855 %h short-form changeset hash (12 bytes of hexadecimal)
855 %h short-form changeset hash (12 bytes of hexadecimal)
856 %n zero-padded sequence number, starting at 1
856 %n zero-padded sequence number, starting at 1
857 %r zero-padded changeset revision number
857 %r zero-padded changeset revision number
858
858
859 Without the -a option, export will avoid generating diffs of files
859 Without the -a option, export will avoid generating diffs of files
860 it detects as binary. With -a, export will generate a diff anyway,
860 it detects as binary. With -a, export will generate a diff anyway,
861 probably with undesirable results.
861 probably with undesirable results.
862
862
863 With the --switch-parent option, the diff will be against the second
863 With the --switch-parent option, the diff will be against the second
864 parent. It can be useful to review a merge.
864 parent. It can be useful to review a merge.
865 """
865 """
866 if not changesets:
866 if not changesets:
867 raise util.Abort(_("export requires at least one changeset"))
867 raise util.Abort(_("export requires at least one changeset"))
868 revs = cmdutil.revrange(repo, changesets)
868 revs = cmdutil.revrange(repo, changesets)
869 if len(revs) > 1:
869 if len(revs) > 1:
870 ui.note(_('exporting patches:\n'))
870 ui.note(_('exporting patches:\n'))
871 else:
871 else:
872 ui.note(_('exporting patch:\n'))
872 ui.note(_('exporting patch:\n'))
873 patch.export(repo, revs, template=opts['output'],
873 patch.export(repo, revs, template=opts['output'],
874 switch_parent=opts['switch_parent'],
874 switch_parent=opts['switch_parent'],
875 opts=patch.diffopts(ui, opts))
875 opts=patch.diffopts(ui, opts))
876
876
877 def grep(ui, repo, pattern, *pats, **opts):
877 def grep(ui, repo, pattern, *pats, **opts):
878 """search for a pattern in specified files and revisions
878 """search for a pattern in specified files and revisions
879
879
880 Search revisions of files for a regular expression.
880 Search revisions of files for a regular expression.
881
881
882 This command behaves differently than Unix grep. It only accepts
882 This command behaves differently than Unix grep. It only accepts
883 Python/Perl regexps. It searches repository history, not the
883 Python/Perl regexps. It searches repository history, not the
884 working directory. It always prints the revision number in which
884 working directory. It always prints the revision number in which
885 a match appears.
885 a match appears.
886
886
887 By default, grep only prints output for the first revision of a
887 By default, grep only prints output for the first revision of a
888 file in which it finds a match. To get it to print every revision
888 file in which it finds a match. To get it to print every revision
889 that contains a change in match status ("-" for a match that
889 that contains a change in match status ("-" for a match that
890 becomes a non-match, or "+" for a non-match that becomes a match),
890 becomes a non-match, or "+" for a non-match that becomes a match),
891 use the --all flag.
891 use the --all flag.
892 """
892 """
893 reflags = 0
893 reflags = 0
894 if opts['ignore_case']:
894 if opts['ignore_case']:
895 reflags |= re.I
895 reflags |= re.I
896 try:
896 try:
897 regexp = re.compile(pattern, reflags)
897 regexp = re.compile(pattern, reflags)
898 except Exception, inst:
898 except Exception, inst:
899 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
899 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
900 return None
900 return None
901 sep, eol = ':', '\n'
901 sep, eol = ':', '\n'
902 if opts['print0']:
902 if opts['print0']:
903 sep = eol = '\0'
903 sep = eol = '\0'
904
904
905 fcache = {}
905 fcache = {}
906 def getfile(fn):
906 def getfile(fn):
907 if fn not in fcache:
907 if fn not in fcache:
908 fcache[fn] = repo.file(fn)
908 fcache[fn] = repo.file(fn)
909 return fcache[fn]
909 return fcache[fn]
910
910
911 def matchlines(body):
911 def matchlines(body):
912 begin = 0
912 begin = 0
913 linenum = 0
913 linenum = 0
914 while True:
914 while True:
915 match = regexp.search(body, begin)
915 match = regexp.search(body, begin)
916 if not match:
916 if not match:
917 break
917 break
918 mstart, mend = match.span()
918 mstart, mend = match.span()
919 linenum += body.count('\n', begin, mstart) + 1
919 linenum += body.count('\n', begin, mstart) + 1
920 lstart = body.rfind('\n', begin, mstart) + 1 or begin
920 lstart = body.rfind('\n', begin, mstart) + 1 or begin
921 lend = body.find('\n', mend)
921 lend = body.find('\n', mend)
922 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
922 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
923 begin = lend + 1
923 begin = lend + 1
924
924
925 class linestate(object):
925 class linestate(object):
926 def __init__(self, line, linenum, colstart, colend):
926 def __init__(self, line, linenum, colstart, colend):
927 self.line = line
927 self.line = line
928 self.linenum = linenum
928 self.linenum = linenum
929 self.colstart = colstart
929 self.colstart = colstart
930 self.colend = colend
930 self.colend = colend
931
931
932 def __eq__(self, other):
932 def __eq__(self, other):
933 return self.line == other.line
933 return self.line == other.line
934
934
935 matches = {}
935 matches = {}
936 copies = {}
936 copies = {}
937 def grepbody(fn, rev, body):
937 def grepbody(fn, rev, body):
938 matches[rev].setdefault(fn, [])
938 matches[rev].setdefault(fn, [])
939 m = matches[rev][fn]
939 m = matches[rev][fn]
940 for lnum, cstart, cend, line in matchlines(body):
940 for lnum, cstart, cend, line in matchlines(body):
941 s = linestate(line, lnum, cstart, cend)
941 s = linestate(line, lnum, cstart, cend)
942 m.append(s)
942 m.append(s)
943
943
944 def difflinestates(a, b):
944 def difflinestates(a, b):
945 sm = difflib.SequenceMatcher(None, a, b)
945 sm = difflib.SequenceMatcher(None, a, b)
946 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
946 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
947 if tag == 'insert':
947 if tag == 'insert':
948 for i in xrange(blo, bhi):
948 for i in xrange(blo, bhi):
949 yield ('+', b[i])
949 yield ('+', b[i])
950 elif tag == 'delete':
950 elif tag == 'delete':
951 for i in xrange(alo, ahi):
951 for i in xrange(alo, ahi):
952 yield ('-', a[i])
952 yield ('-', a[i])
953 elif tag == 'replace':
953 elif tag == 'replace':
954 for i in xrange(alo, ahi):
954 for i in xrange(alo, ahi):
955 yield ('-', a[i])
955 yield ('-', a[i])
956 for i in xrange(blo, bhi):
956 for i in xrange(blo, bhi):
957 yield ('+', b[i])
957 yield ('+', b[i])
958
958
959 prev = {}
959 prev = {}
960 def display(fn, rev, states, prevstates):
960 def display(fn, rev, states, prevstates):
961 found = False
961 found = False
962 filerevmatches = {}
962 filerevmatches = {}
963 r = prev.get(fn, -1)
963 r = prev.get(fn, -1)
964 if opts['all']:
964 if opts['all']:
965 iter = difflinestates(states, prevstates)
965 iter = difflinestates(states, prevstates)
966 else:
966 else:
967 iter = [('', l) for l in prevstates]
967 iter = [('', l) for l in prevstates]
968 for change, l in iter:
968 for change, l in iter:
969 cols = [fn, str(r)]
969 cols = [fn, str(r)]
970 if opts['line_number']:
970 if opts['line_number']:
971 cols.append(str(l.linenum))
971 cols.append(str(l.linenum))
972 if opts['all']:
972 if opts['all']:
973 cols.append(change)
973 cols.append(change)
974 if opts['user']:
974 if opts['user']:
975 cols.append(ui.shortuser(get(r)[1]))
975 cols.append(ui.shortuser(get(r)[1]))
976 if opts['files_with_matches']:
976 if opts['files_with_matches']:
977 c = (fn, r)
977 c = (fn, r)
978 if c in filerevmatches:
978 if c in filerevmatches:
979 continue
979 continue
980 filerevmatches[c] = 1
980 filerevmatches[c] = 1
981 else:
981 else:
982 cols.append(l.line)
982 cols.append(l.line)
983 ui.write(sep.join(cols), eol)
983 ui.write(sep.join(cols), eol)
984 found = True
984 found = True
985 return found
985 return found
986
986
987 fstate = {}
987 fstate = {}
988 skip = {}
988 skip = {}
989 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
989 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
990 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
990 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
991 found = False
991 found = False
992 follow = opts.get('follow')
992 follow = opts.get('follow')
993 for st, rev, fns in changeiter:
993 for st, rev, fns in changeiter:
994 if st == 'window':
994 if st == 'window':
995 matches.clear()
995 matches.clear()
996 elif st == 'add':
996 elif st == 'add':
997 mf = repo.changectx(rev).manifest()
997 mf = repo.changectx(rev).manifest()
998 matches[rev] = {}
998 matches[rev] = {}
999 for fn in fns:
999 for fn in fns:
1000 if fn in skip:
1000 if fn in skip:
1001 continue
1001 continue
1002 try:
1002 try:
1003 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1003 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1004 fstate.setdefault(fn, [])
1004 fstate.setdefault(fn, [])
1005 if follow:
1005 if follow:
1006 copied = getfile(fn).renamed(mf[fn])
1006 copied = getfile(fn).renamed(mf[fn])
1007 if copied:
1007 if copied:
1008 copies.setdefault(rev, {})[fn] = copied[0]
1008 copies.setdefault(rev, {})[fn] = copied[0]
1009 except KeyError:
1009 except KeyError:
1010 pass
1010 pass
1011 elif st == 'iter':
1011 elif st == 'iter':
1012 states = matches[rev].items()
1012 states = matches[rev].items()
1013 states.sort()
1013 states.sort()
1014 for fn, m in states:
1014 for fn, m in states:
1015 copy = copies.get(rev, {}).get(fn)
1015 copy = copies.get(rev, {}).get(fn)
1016 if fn in skip:
1016 if fn in skip:
1017 if copy:
1017 if copy:
1018 skip[copy] = True
1018 skip[copy] = True
1019 continue
1019 continue
1020 if fn in prev or fstate[fn]:
1020 if fn in prev or fstate[fn]:
1021 r = display(fn, rev, m, fstate[fn])
1021 r = display(fn, rev, m, fstate[fn])
1022 found = found or r
1022 found = found or r
1023 if r and not opts['all']:
1023 if r and not opts['all']:
1024 skip[fn] = True
1024 skip[fn] = True
1025 if copy:
1025 if copy:
1026 skip[copy] = True
1026 skip[copy] = True
1027 fstate[fn] = m
1027 fstate[fn] = m
1028 if copy:
1028 if copy:
1029 fstate[copy] = m
1029 fstate[copy] = m
1030 prev[fn] = rev
1030 prev[fn] = rev
1031
1031
1032 fstate = fstate.items()
1032 fstate = fstate.items()
1033 fstate.sort()
1033 fstate.sort()
1034 for fn, state in fstate:
1034 for fn, state in fstate:
1035 if fn in skip:
1035 if fn in skip:
1036 continue
1036 continue
1037 if fn not in copies.get(prev[fn], {}):
1037 if fn not in copies.get(prev[fn], {}):
1038 found = display(fn, rev, {}, state) or found
1038 found = display(fn, rev, {}, state) or found
1039 return (not found and 1) or 0
1039 return (not found and 1) or 0
1040
1040
1041 def heads(ui, repo, *branchrevs, **opts):
1041 def heads(ui, repo, *branchrevs, **opts):
1042 """show current repository heads or show branch heads
1042 """show current repository heads or show branch heads
1043
1043
1044 With no arguments, show all repository head changesets.
1044 With no arguments, show all repository head changesets.
1045
1045
1046 If branch or revisions names are given this will show the heads of
1046 If branch or revisions names are given this will show the heads of
1047 the specified branches or the branches those revisions are tagged
1047 the specified branches or the branches those revisions are tagged
1048 with.
1048 with.
1049
1049
1050 Repository "heads" are changesets that don't have child
1050 Repository "heads" are changesets that don't have child
1051 changesets. They are where development generally takes place and
1051 changesets. They are where development generally takes place and
1052 are the usual targets for update and merge operations.
1052 are the usual targets for update and merge operations.
1053
1053
1054 Branch heads are changesets that have a given branch tag, but have
1054 Branch heads are changesets that have a given branch tag, but have
1055 no child changesets with that tag. They are usually where
1055 no child changesets with that tag. They are usually where
1056 development on the given branch takes place.
1056 development on the given branch takes place.
1057 """
1057 """
1058 if opts['rev']:
1058 if opts['rev']:
1059 start = repo.lookup(opts['rev'])
1059 start = repo.lookup(opts['rev'])
1060 else:
1060 else:
1061 start = None
1061 start = None
1062 if not branchrevs:
1062 if not branchrevs:
1063 # Assume we're looking repo-wide heads if no revs were specified.
1063 # Assume we're looking repo-wide heads if no revs were specified.
1064 heads = repo.heads(start)
1064 heads = repo.heads(start)
1065 else:
1065 else:
1066 heads = []
1066 heads = []
1067 visitedset = util.set()
1067 visitedset = util.set()
1068 for branchrev in branchrevs:
1068 for branchrev in branchrevs:
1069 branch = repo.changectx(branchrev).branch()
1069 branch = repo.changectx(branchrev).branch()
1070 if branch in visitedset:
1070 if branch in visitedset:
1071 continue
1071 continue
1072 visitedset.add(branch)
1072 visitedset.add(branch)
1073 bheads = repo.branchheads(branch, start)
1073 bheads = repo.branchheads(branch, start)
1074 if not bheads:
1074 if not bheads:
1075 if branch != branchrev:
1075 if branch != branchrev:
1076 ui.warn(_("no changes on branch %s containing %s are "
1076 ui.warn(_("no changes on branch %s containing %s are "
1077 "reachable from %s\n")
1077 "reachable from %s\n")
1078 % (branch, branchrev, opts['rev']))
1078 % (branch, branchrev, opts['rev']))
1079 else:
1079 else:
1080 ui.warn(_("no changes on branch %s are reachable from %s\n")
1080 ui.warn(_("no changes on branch %s are reachable from %s\n")
1081 % (branch, opts['rev']))
1081 % (branch, opts['rev']))
1082 heads.extend(bheads)
1082 heads.extend(bheads)
1083 if not heads:
1083 if not heads:
1084 return 1
1084 return 1
1085 displayer = cmdutil.show_changeset(ui, repo, opts)
1085 displayer = cmdutil.show_changeset(ui, repo, opts)
1086 for n in heads:
1086 for n in heads:
1087 displayer.show(changenode=n)
1087 displayer.show(changenode=n)
1088
1088
1089 def help_(ui, name=None, with_version=False):
1089 def help_(ui, name=None, with_version=False):
1090 """show help for a command, extension, or list of commands
1090 """show help for a command, extension, or list of commands
1091
1091
1092 With no arguments, print a list of commands and short help.
1092 With no arguments, print a list of commands and short help.
1093
1093
1094 Given a command name, print help for that command.
1094 Given a command name, print help for that command.
1095
1095
1096 Given an extension name, print help for that extension, and the
1096 Given an extension name, print help for that extension, and the
1097 commands it provides."""
1097 commands it provides."""
1098 option_lists = []
1098 option_lists = []
1099
1099
1100 def addglobalopts(aliases):
1100 def addglobalopts(aliases):
1101 if ui.verbose:
1101 if ui.verbose:
1102 option_lists.append((_("global options:"), globalopts))
1102 option_lists.append((_("global options:"), globalopts))
1103 if name == 'shortlist':
1103 if name == 'shortlist':
1104 option_lists.append((_('use "hg help" for the full list '
1104 option_lists.append((_('use "hg help" for the full list '
1105 'of commands'), ()))
1105 'of commands'), ()))
1106 else:
1106 else:
1107 if name == 'shortlist':
1107 if name == 'shortlist':
1108 msg = _('use "hg help" for the full list of commands '
1108 msg = _('use "hg help" for the full list of commands '
1109 'or "hg -v" for details')
1109 'or "hg -v" for details')
1110 elif aliases:
1110 elif aliases:
1111 msg = _('use "hg -v help%s" to show aliases and '
1111 msg = _('use "hg -v help%s" to show aliases and '
1112 'global options') % (name and " " + name or "")
1112 'global options') % (name and " " + name or "")
1113 else:
1113 else:
1114 msg = _('use "hg -v help %s" to show global options') % name
1114 msg = _('use "hg -v help %s" to show global options') % name
1115 option_lists.append((msg, ()))
1115 option_lists.append((msg, ()))
1116
1116
1117 def helpcmd(name):
1117 def helpcmd(name):
1118 if with_version:
1118 if with_version:
1119 version_(ui)
1119 version_(ui)
1120 ui.write('\n')
1120 ui.write('\n')
1121 aliases, i = cmdutil.findcmd(ui, name, table)
1121 aliases, i = cmdutil.findcmd(ui, name, table)
1122 # synopsis
1122 # synopsis
1123 ui.write("%s\n\n" % i[2])
1123 ui.write("%s\n\n" % i[2])
1124
1124
1125 # description
1125 # description
1126 doc = i[0].__doc__
1126 doc = i[0].__doc__
1127 if not doc:
1127 if not doc:
1128 doc = _("(No help text available)")
1128 doc = _("(No help text available)")
1129 if ui.quiet:
1129 if ui.quiet:
1130 doc = doc.splitlines(0)[0]
1130 doc = doc.splitlines(0)[0]
1131 ui.write("%s\n" % doc.rstrip())
1131 ui.write("%s\n" % doc.rstrip())
1132
1132
1133 if not ui.quiet:
1133 if not ui.quiet:
1134 # aliases
1134 # aliases
1135 if len(aliases) > 1:
1135 if len(aliases) > 1:
1136 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1136 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1137
1137
1138 # options
1138 # options
1139 if i[1]:
1139 if i[1]:
1140 option_lists.append((_("options:\n"), i[1]))
1140 option_lists.append((_("options:\n"), i[1]))
1141
1141
1142 addglobalopts(False)
1142 addglobalopts(False)
1143
1143
1144 def helplist(header, select=None):
1144 def helplist(header, select=None):
1145 h = {}
1145 h = {}
1146 cmds = {}
1146 cmds = {}
1147 for c, e in table.items():
1147 for c, e in table.items():
1148 f = c.split("|", 1)[0]
1148 f = c.split("|", 1)[0]
1149 if select and not select(f):
1149 if select and not select(f):
1150 continue
1150 continue
1151 if name == "shortlist" and not f.startswith("^"):
1151 if name == "shortlist" and not f.startswith("^"):
1152 continue
1152 continue
1153 f = f.lstrip("^")
1153 f = f.lstrip("^")
1154 if not ui.debugflag and f.startswith("debug"):
1154 if not ui.debugflag and f.startswith("debug"):
1155 continue
1155 continue
1156 doc = e[0].__doc__
1156 doc = e[0].__doc__
1157 if not doc:
1157 if not doc:
1158 doc = _("(No help text available)")
1158 doc = _("(No help text available)")
1159 h[f] = doc.splitlines(0)[0].rstrip()
1159 h[f] = doc.splitlines(0)[0].rstrip()
1160 cmds[f] = c.lstrip("^")
1160 cmds[f] = c.lstrip("^")
1161
1161
1162 if not h:
1162 if not h:
1163 ui.status(_('no commands defined\n'))
1163 ui.status(_('no commands defined\n'))
1164 return
1164 return
1165
1165
1166 ui.status(header)
1166 ui.status(header)
1167 fns = h.keys()
1167 fns = h.keys()
1168 fns.sort()
1168 fns.sort()
1169 m = max(map(len, fns))
1169 m = max(map(len, fns))
1170 for f in fns:
1170 for f in fns:
1171 if ui.verbose:
1171 if ui.verbose:
1172 commands = cmds[f].replace("|",", ")
1172 commands = cmds[f].replace("|",", ")
1173 ui.write(" %s:\n %s\n"%(commands, h[f]))
1173 ui.write(" %s:\n %s\n"%(commands, h[f]))
1174 else:
1174 else:
1175 ui.write(' %-*s %s\n' % (m, f, h[f]))
1175 ui.write(' %-*s %s\n' % (m, f, h[f]))
1176
1176
1177 if not ui.quiet:
1177 if not ui.quiet:
1178 addglobalopts(True)
1178 addglobalopts(True)
1179
1179
1180 def helptopic(name):
1180 def helptopic(name):
1181 v = None
1181 v = None
1182 for i in help.helptable:
1182 for i in help.helptable:
1183 l = i.split('|')
1183 l = i.split('|')
1184 if name in l:
1184 if name in l:
1185 v = i
1185 v = i
1186 header = l[-1]
1186 header = l[-1]
1187 if not v:
1187 if not v:
1188 raise cmdutil.UnknownCommand(name)
1188 raise cmdutil.UnknownCommand(name)
1189
1189
1190 # description
1190 # description
1191 doc = help.helptable[v]
1191 doc = help.helptable[v]
1192 if not doc:
1192 if not doc:
1193 doc = _("(No help text available)")
1193 doc = _("(No help text available)")
1194 if callable(doc):
1194 if callable(doc):
1195 doc = doc()
1195 doc = doc()
1196
1196
1197 ui.write("%s\n" % header)
1197 ui.write("%s\n" % header)
1198 ui.write("%s\n" % doc.rstrip())
1198 ui.write("%s\n" % doc.rstrip())
1199
1199
1200 def helpext(name):
1200 def helpext(name):
1201 try:
1201 try:
1202 mod = extensions.find(name)
1202 mod = extensions.find(name)
1203 except KeyError:
1203 except KeyError:
1204 raise cmdutil.UnknownCommand(name)
1204 raise cmdutil.UnknownCommand(name)
1205
1205
1206 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1206 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1207 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1207 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1208 for d in doc[1:]:
1208 for d in doc[1:]:
1209 ui.write(d, '\n')
1209 ui.write(d, '\n')
1210
1210
1211 ui.status('\n')
1211 ui.status('\n')
1212
1212
1213 try:
1213 try:
1214 ct = mod.cmdtable
1214 ct = mod.cmdtable
1215 except AttributeError:
1215 except AttributeError:
1216 ct = {}
1216 ct = {}
1217
1217
1218 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1218 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1219 helplist(_('list of commands:\n\n'), modcmds.has_key)
1219 helplist(_('list of commands:\n\n'), modcmds.has_key)
1220
1220
1221 if name and name != 'shortlist':
1221 if name and name != 'shortlist':
1222 i = None
1222 i = None
1223 for f in (helpcmd, helptopic, helpext):
1223 for f in (helpcmd, helptopic, helpext):
1224 try:
1224 try:
1225 f(name)
1225 f(name)
1226 i = None
1226 i = None
1227 break
1227 break
1228 except cmdutil.UnknownCommand, inst:
1228 except cmdutil.UnknownCommand, inst:
1229 i = inst
1229 i = inst
1230 if i:
1230 if i:
1231 raise i
1231 raise i
1232
1232
1233 else:
1233 else:
1234 # program name
1234 # program name
1235 if ui.verbose or with_version:
1235 if ui.verbose or with_version:
1236 version_(ui)
1236 version_(ui)
1237 else:
1237 else:
1238 ui.status(_("Mercurial Distributed SCM\n"))
1238 ui.status(_("Mercurial Distributed SCM\n"))
1239 ui.status('\n')
1239 ui.status('\n')
1240
1240
1241 # list of commands
1241 # list of commands
1242 if name == "shortlist":
1242 if name == "shortlist":
1243 header = _('basic commands:\n\n')
1243 header = _('basic commands:\n\n')
1244 else:
1244 else:
1245 header = _('list of commands:\n\n')
1245 header = _('list of commands:\n\n')
1246
1246
1247 helplist(header)
1247 helplist(header)
1248
1248
1249 # list all option lists
1249 # list all option lists
1250 opt_output = []
1250 opt_output = []
1251 for title, options in option_lists:
1251 for title, options in option_lists:
1252 opt_output.append(("\n%s" % title, None))
1252 opt_output.append(("\n%s" % title, None))
1253 for shortopt, longopt, default, desc in options:
1253 for shortopt, longopt, default, desc in options:
1254 if "DEPRECATED" in desc and not ui.verbose: continue
1254 if "DEPRECATED" in desc and not ui.verbose: continue
1255 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1255 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1256 longopt and " --%s" % longopt),
1256 longopt and " --%s" % longopt),
1257 "%s%s" % (desc,
1257 "%s%s" % (desc,
1258 default
1258 default
1259 and _(" (default: %s)") % default
1259 and _(" (default: %s)") % default
1260 or "")))
1260 or "")))
1261
1261
1262 if opt_output:
1262 if opt_output:
1263 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1263 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1264 for first, second in opt_output:
1264 for first, second in opt_output:
1265 if second:
1265 if second:
1266 ui.write(" %-*s %s\n" % (opts_len, first, second))
1266 ui.write(" %-*s %s\n" % (opts_len, first, second))
1267 else:
1267 else:
1268 ui.write("%s\n" % first)
1268 ui.write("%s\n" % first)
1269
1269
1270 def identify(ui, repo, source=None,
1270 def identify(ui, repo, source=None,
1271 rev=None, num=None, id=None, branch=None, tags=None):
1271 rev=None, num=None, id=None, branch=None, tags=None):
1272 """identify the working copy or specified revision
1272 """identify the working copy or specified revision
1273
1273
1274 With no revision, print a summary of the current state of the repo.
1274 With no revision, print a summary of the current state of the repo.
1275
1275
1276 With a path, do a lookup in another repository.
1276 With a path, do a lookup in another repository.
1277
1277
1278 This summary identifies the repository state using one or two parent
1278 This summary identifies the repository state using one or two parent
1279 hash identifiers, followed by a "+" if there are uncommitted changes
1279 hash identifiers, followed by a "+" if there are uncommitted changes
1280 in the working directory, a list of tags for this revision and a branch
1280 in the working directory, a list of tags for this revision and a branch
1281 name for non-default branches.
1281 name for non-default branches.
1282 """
1282 """
1283
1283
1284 if not repo and not source:
1284 if not repo and not source:
1285 raise util.Abort(_("There is no Mercurial repository here "
1285 raise util.Abort(_("There is no Mercurial repository here "
1286 "(.hg not found)"))
1286 "(.hg not found)"))
1287
1287
1288 hexfunc = ui.debugflag and hex or short
1288 hexfunc = ui.debugflag and hex or short
1289 default = not (num or id or branch or tags)
1289 default = not (num or id or branch or tags)
1290 output = []
1290 output = []
1291
1291
1292 if source:
1292 if source:
1293 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1293 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1294 srepo = hg.repository(ui, source)
1294 srepo = hg.repository(ui, source)
1295 if not rev and revs:
1295 if not rev and revs:
1296 rev = revs[0]
1296 rev = revs[0]
1297 if not rev:
1297 if not rev:
1298 rev = "tip"
1298 rev = "tip"
1299 if num or branch or tags:
1299 if num or branch or tags:
1300 raise util.Abort(
1300 raise util.Abort(
1301 "can't query remote revision number, branch, or tags")
1301 "can't query remote revision number, branch, or tags")
1302 output = [hexfunc(srepo.lookup(rev))]
1302 output = [hexfunc(srepo.lookup(rev))]
1303 elif not rev:
1303 elif not rev:
1304 ctx = repo.workingctx()
1304 ctx = repo.workingctx()
1305 parents = ctx.parents()
1305 parents = ctx.parents()
1306 changed = False
1306 changed = False
1307 if default or id or num:
1307 if default or id or num:
1308 changed = ctx.files() + ctx.deleted()
1308 changed = ctx.files() + ctx.deleted()
1309 if default or id:
1309 if default or id:
1310 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1310 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1311 (changed) and "+" or "")]
1311 (changed) and "+" or "")]
1312 if num:
1312 if num:
1313 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1313 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1314 (changed) and "+" or ""))
1314 (changed) and "+" or ""))
1315 else:
1315 else:
1316 ctx = repo.changectx(rev)
1316 ctx = repo.changectx(rev)
1317 if default or id:
1317 if default or id:
1318 output = [hexfunc(ctx.node())]
1318 output = [hexfunc(ctx.node())]
1319 if num:
1319 if num:
1320 output.append(str(ctx.rev()))
1320 output.append(str(ctx.rev()))
1321
1321
1322 if not source and default and not ui.quiet:
1322 if not source and default and not ui.quiet:
1323 b = util.tolocal(ctx.branch())
1323 b = util.tolocal(ctx.branch())
1324 if b != 'default':
1324 if b != 'default':
1325 output.append("(%s)" % b)
1325 output.append("(%s)" % b)
1326
1326
1327 # multiple tags for a single parent separated by '/'
1327 # multiple tags for a single parent separated by '/'
1328 t = "/".join(ctx.tags())
1328 t = "/".join(ctx.tags())
1329 if t:
1329 if t:
1330 output.append(t)
1330 output.append(t)
1331
1331
1332 if branch:
1332 if branch:
1333 output.append(util.tolocal(ctx.branch()))
1333 output.append(util.tolocal(ctx.branch()))
1334
1334
1335 if tags:
1335 if tags:
1336 output.extend(ctx.tags())
1336 output.extend(ctx.tags())
1337
1337
1338 ui.write("%s\n" % ' '.join(output))
1338 ui.write("%s\n" % ' '.join(output))
1339
1339
1340 def import_(ui, repo, patch1, *patches, **opts):
1340 def import_(ui, repo, patch1, *patches, **opts):
1341 """import an ordered set of patches
1341 """import an ordered set of patches
1342
1342
1343 Import a list of patches and commit them individually.
1343 Import a list of patches and commit them individually.
1344
1344
1345 If there are outstanding changes in the working directory, import
1345 If there are outstanding changes in the working directory, import
1346 will abort unless given the -f flag.
1346 will abort unless given the -f flag.
1347
1347
1348 You can import a patch straight from a mail message. Even patches
1348 You can import a patch straight from a mail message. Even patches
1349 as attachments work (body part must be type text/plain or
1349 as attachments work (body part must be type text/plain or
1350 text/x-patch to be used). From and Subject headers of email
1350 text/x-patch to be used). From and Subject headers of email
1351 message are used as default committer and commit message. All
1351 message are used as default committer and commit message. All
1352 text/plain body parts before first diff are added to commit
1352 text/plain body parts before first diff are added to commit
1353 message.
1353 message.
1354
1354
1355 If the imported patch was generated by hg export, user and description
1355 If the imported patch was generated by hg export, user and description
1356 from patch override values from message headers and body. Values
1356 from patch override values from message headers and body. Values
1357 given on command line with -m and -u override these.
1357 given on command line with -m and -u override these.
1358
1358
1359 If --exact is specified, import will set the working directory
1359 If --exact is specified, import will set the working directory
1360 to the parent of each patch before applying it, and will abort
1360 to the parent of each patch before applying it, and will abort
1361 if the resulting changeset has a different ID than the one
1361 if the resulting changeset has a different ID than the one
1362 recorded in the patch. This may happen due to character set
1362 recorded in the patch. This may happen due to character set
1363 problems or other deficiencies in the text patch format.
1363 problems or other deficiencies in the text patch format.
1364
1364
1365 To read a patch from standard input, use patch name "-".
1365 To read a patch from standard input, use patch name "-".
1366 """
1366 """
1367 patches = (patch1,) + patches
1367 patches = (patch1,) + patches
1368
1368
1369 if opts.get('exact') or not opts['force']:
1369 if opts.get('exact') or not opts['force']:
1370 cmdutil.bail_if_changed(repo)
1370 cmdutil.bail_if_changed(repo)
1371
1371
1372 d = opts["base"]
1372 d = opts["base"]
1373 strip = opts["strip"]
1373 strip = opts["strip"]
1374 wlock = lock = None
1374 wlock = lock = None
1375 try:
1375 try:
1376 wlock = repo.wlock()
1376 wlock = repo.wlock()
1377 lock = repo.lock()
1377 lock = repo.lock()
1378 for p in patches:
1378 for p in patches:
1379 pf = os.path.join(d, p)
1379 pf = os.path.join(d, p)
1380
1380
1381 if pf == '-':
1381 if pf == '-':
1382 ui.status(_("applying patch from stdin\n"))
1382 ui.status(_("applying patch from stdin\n"))
1383 data = patch.extract(ui, sys.stdin)
1383 data = patch.extract(ui, sys.stdin)
1384 else:
1384 else:
1385 ui.status(_("applying %s\n") % p)
1385 ui.status(_("applying %s\n") % p)
1386 if os.path.exists(pf):
1386 if os.path.exists(pf):
1387 data = patch.extract(ui, file(pf, 'rb'))
1387 data = patch.extract(ui, file(pf, 'rb'))
1388 else:
1388 else:
1389 data = patch.extract(ui, urllib.urlopen(pf))
1389 data = patch.extract(ui, urllib.urlopen(pf))
1390 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1390 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1391
1391
1392 if tmpname is None:
1392 if tmpname is None:
1393 raise util.Abort(_('no diffs found'))
1393 raise util.Abort(_('no diffs found'))
1394
1394
1395 try:
1395 try:
1396 cmdline_message = cmdutil.logmessage(opts)
1396 cmdline_message = cmdutil.logmessage(opts)
1397 if cmdline_message:
1397 if cmdline_message:
1398 # pickup the cmdline msg
1398 # pickup the cmdline msg
1399 message = cmdline_message
1399 message = cmdline_message
1400 elif message:
1400 elif message:
1401 # pickup the patch msg
1401 # pickup the patch msg
1402 message = message.strip()
1402 message = message.strip()
1403 else:
1403 else:
1404 # launch the editor
1404 # launch the editor
1405 message = None
1405 message = None
1406 ui.debug(_('message:\n%s\n') % message)
1406 ui.debug(_('message:\n%s\n') % message)
1407
1407
1408 wp = repo.workingctx().parents()
1408 wp = repo.workingctx().parents()
1409 if opts.get('exact'):
1409 if opts.get('exact'):
1410 if not nodeid or not p1:
1410 if not nodeid or not p1:
1411 raise util.Abort(_('not a mercurial patch'))
1411 raise util.Abort(_('not a mercurial patch'))
1412 p1 = repo.lookup(p1)
1412 p1 = repo.lookup(p1)
1413 p2 = repo.lookup(p2 or hex(nullid))
1413 p2 = repo.lookup(p2 or hex(nullid))
1414
1414
1415 if p1 != wp[0].node():
1415 if p1 != wp[0].node():
1416 hg.clean(repo, p1)
1416 hg.clean(repo, p1)
1417 repo.dirstate.setparents(p1, p2)
1417 repo.dirstate.setparents(p1, p2)
1418 elif p2:
1418 elif p2:
1419 try:
1419 try:
1420 p1 = repo.lookup(p1)
1420 p1 = repo.lookup(p1)
1421 p2 = repo.lookup(p2)
1421 p2 = repo.lookup(p2)
1422 if p1 == wp[0].node():
1422 if p1 == wp[0].node():
1423 repo.dirstate.setparents(p1, p2)
1423 repo.dirstate.setparents(p1, p2)
1424 except hg.RepoError:
1424 except hg.RepoError:
1425 pass
1425 pass
1426 if opts.get('exact') or opts.get('import_branch'):
1426 if opts.get('exact') or opts.get('import_branch'):
1427 repo.dirstate.setbranch(branch or 'default')
1427 repo.dirstate.setbranch(branch or 'default')
1428
1428
1429 files = {}
1429 files = {}
1430 try:
1430 try:
1431 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1431 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1432 files=files)
1432 files=files)
1433 finally:
1433 finally:
1434 files = patch.updatedir(ui, repo, files)
1434 files = patch.updatedir(ui, repo, files)
1435 n = repo.commit(files, message, user, date)
1435 n = repo.commit(files, message, user, date)
1436 if opts.get('exact'):
1436 if opts.get('exact'):
1437 if hex(n) != nodeid:
1437 if hex(n) != nodeid:
1438 repo.rollback()
1438 repo.rollback()
1439 raise util.Abort(_('patch is damaged'
1439 raise util.Abort(_('patch is damaged'
1440 ' or loses information'))
1440 ' or loses information'))
1441 finally:
1441 finally:
1442 os.unlink(tmpname)
1442 os.unlink(tmpname)
1443 finally:
1443 finally:
1444 del lock, wlock
1444 del lock, wlock
1445
1445
1446 def incoming(ui, repo, source="default", **opts):
1446 def incoming(ui, repo, source="default", **opts):
1447 """show new changesets found in source
1447 """show new changesets found in source
1448
1448
1449 Show new changesets found in the specified path/URL or the default
1449 Show new changesets found in the specified path/URL or the default
1450 pull location. These are the changesets that would be pulled if a pull
1450 pull location. These are the changesets that would be pulled if a pull
1451 was requested.
1451 was requested.
1452
1452
1453 For remote repository, using --bundle avoids downloading the changesets
1453 For remote repository, using --bundle avoids downloading the changesets
1454 twice if the incoming is followed by a pull.
1454 twice if the incoming is followed by a pull.
1455
1455
1456 See pull for valid source format details.
1456 See pull for valid source format details.
1457 """
1457 """
1458 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1458 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1459 cmdutil.setremoteconfig(ui, opts)
1459 cmdutil.setremoteconfig(ui, opts)
1460
1460
1461 other = hg.repository(ui, source)
1461 other = hg.repository(ui, source)
1462 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1462 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1463 if revs:
1463 if revs:
1464 revs = [other.lookup(rev) for rev in revs]
1464 revs = [other.lookup(rev) for rev in revs]
1465 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1465 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1466 if not incoming:
1466 if not incoming:
1467 try:
1467 try:
1468 os.unlink(opts["bundle"])
1468 os.unlink(opts["bundle"])
1469 except:
1469 except:
1470 pass
1470 pass
1471 ui.status(_("no changes found\n"))
1471 ui.status(_("no changes found\n"))
1472 return 1
1472 return 1
1473
1473
1474 cleanup = None
1474 cleanup = None
1475 try:
1475 try:
1476 fname = opts["bundle"]
1476 fname = opts["bundle"]
1477 if fname or not other.local():
1477 if fname or not other.local():
1478 # create a bundle (uncompressed if other repo is not local)
1478 # create a bundle (uncompressed if other repo is not local)
1479 if revs is None:
1479 if revs is None:
1480 cg = other.changegroup(incoming, "incoming")
1480 cg = other.changegroup(incoming, "incoming")
1481 else:
1481 else:
1482 cg = other.changegroupsubset(incoming, revs, 'incoming')
1482 cg = other.changegroupsubset(incoming, revs, 'incoming')
1483 bundletype = other.local() and "HG10BZ" or "HG10UN"
1483 bundletype = other.local() and "HG10BZ" or "HG10UN"
1484 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1484 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1485 # keep written bundle?
1485 # keep written bundle?
1486 if opts["bundle"]:
1486 if opts["bundle"]:
1487 cleanup = None
1487 cleanup = None
1488 if not other.local():
1488 if not other.local():
1489 # use the created uncompressed bundlerepo
1489 # use the created uncompressed bundlerepo
1490 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1490 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1491
1491
1492 o = other.changelog.nodesbetween(incoming, revs)[0]
1492 o = other.changelog.nodesbetween(incoming, revs)[0]
1493 if opts['newest_first']:
1493 if opts['newest_first']:
1494 o.reverse()
1494 o.reverse()
1495 displayer = cmdutil.show_changeset(ui, other, opts)
1495 displayer = cmdutil.show_changeset(ui, other, opts)
1496 for n in o:
1496 for n in o:
1497 parents = [p for p in other.changelog.parents(n) if p != nullid]
1497 parents = [p for p in other.changelog.parents(n) if p != nullid]
1498 if opts['no_merges'] and len(parents) == 2:
1498 if opts['no_merges'] and len(parents) == 2:
1499 continue
1499 continue
1500 displayer.show(changenode=n)
1500 displayer.show(changenode=n)
1501 finally:
1501 finally:
1502 if hasattr(other, 'close'):
1502 if hasattr(other, 'close'):
1503 other.close()
1503 other.close()
1504 if cleanup:
1504 if cleanup:
1505 os.unlink(cleanup)
1505 os.unlink(cleanup)
1506
1506
1507 def init(ui, dest=".", **opts):
1507 def init(ui, dest=".", **opts):
1508 """create a new repository in the given directory
1508 """create a new repository in the given directory
1509
1509
1510 Initialize a new repository in the given directory. If the given
1510 Initialize a new repository in the given directory. If the given
1511 directory does not exist, it is created.
1511 directory does not exist, it is created.
1512
1512
1513 If no directory is given, the current directory is used.
1513 If no directory is given, the current directory is used.
1514
1514
1515 It is possible to specify an ssh:// URL as the destination.
1515 It is possible to specify an ssh:// URL as the destination.
1516 Look at the help text for the pull command for important details
1516 Look at the help text for the pull command for important details
1517 about ssh:// URLs.
1517 about ssh:// URLs.
1518 """
1518 """
1519 cmdutil.setremoteconfig(ui, opts)
1519 cmdutil.setremoteconfig(ui, opts)
1520 hg.repository(ui, dest, create=1)
1520 hg.repository(ui, dest, create=1)
1521
1521
1522 def locate(ui, repo, *pats, **opts):
1522 def locate(ui, repo, *pats, **opts):
1523 """locate files matching specific patterns
1523 """locate files matching specific patterns
1524
1524
1525 Print all files under Mercurial control whose names match the
1525 Print all files under Mercurial control whose names match the
1526 given patterns.
1526 given patterns.
1527
1527
1528 This command searches the entire repository by default. To search
1528 This command searches the entire repository by default. To search
1529 just the current directory and its subdirectories, use
1529 just the current directory and its subdirectories, use
1530 "--include .".
1530 "--include .".
1531
1531
1532 If no patterns are given to match, this command prints all file
1532 If no patterns are given to match, this command prints all file
1533 names.
1533 names.
1534
1534
1535 If you want to feed the output of this command into the "xargs"
1535 If you want to feed the output of this command into the "xargs"
1536 command, use the "-0" option to both this command and "xargs".
1536 command, use the "-0" option to both this command and "xargs".
1537 This will avoid the problem of "xargs" treating single filenames
1537 This will avoid the problem of "xargs" treating single filenames
1538 that contain white space as multiple filenames.
1538 that contain white space as multiple filenames.
1539 """
1539 """
1540 end = opts['print0'] and '\0' or '\n'
1540 end = opts['print0'] and '\0' or '\n'
1541 rev = opts['rev']
1541 rev = opts['rev']
1542 if rev:
1542 if rev:
1543 node = repo.lookup(rev)
1543 node = repo.lookup(rev)
1544 else:
1544 else:
1545 node = None
1545 node = None
1546
1546
1547 ret = 1
1547 ret = 1
1548 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1548 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1549 badmatch=util.always,
1549 badmatch=util.always,
1550 default='relglob'):
1550 default='relglob'):
1551 if src == 'b':
1551 if src == 'b':
1552 continue
1552 continue
1553 if not node and abs not in repo.dirstate:
1553 if not node and abs not in repo.dirstate:
1554 continue
1554 continue
1555 if opts['fullpath']:
1555 if opts['fullpath']:
1556 ui.write(os.path.join(repo.root, abs), end)
1556 ui.write(os.path.join(repo.root, abs), end)
1557 else:
1557 else:
1558 ui.write(((pats and rel) or abs), end)
1558 ui.write(((pats and rel) or abs), end)
1559 ret = 0
1559 ret = 0
1560
1560
1561 return ret
1561 return ret
1562
1562
1563 def log(ui, repo, *pats, **opts):
1563 def log(ui, repo, *pats, **opts):
1564 """show revision history of entire repository or files
1564 """show revision history of entire repository or files
1565
1565
1566 Print the revision history of the specified files or the entire
1566 Print the revision history of the specified files or the entire
1567 project.
1567 project.
1568
1568
1569 File history is shown without following rename or copy history of
1569 File history is shown without following rename or copy history of
1570 files. Use -f/--follow with a file name to follow history across
1570 files. Use -f/--follow with a file name to follow history across
1571 renames and copies. --follow without a file name will only show
1571 renames and copies. --follow without a file name will only show
1572 ancestors or descendants of the starting revision. --follow-first
1572 ancestors or descendants of the starting revision. --follow-first
1573 only follows the first parent of merge revisions.
1573 only follows the first parent of merge revisions.
1574
1574
1575 If no revision range is specified, the default is tip:0 unless
1575 If no revision range is specified, the default is tip:0 unless
1576 --follow is set, in which case the working directory parent is
1576 --follow is set, in which case the working directory parent is
1577 used as the starting revision.
1577 used as the starting revision.
1578
1578
1579 By default this command outputs: changeset id and hash, tags,
1579 By default this command outputs: changeset id and hash, tags,
1580 non-trivial parents, user, date and time, and a summary for each
1580 non-trivial parents, user, date and time, and a summary for each
1581 commit. When the -v/--verbose switch is used, the list of changed
1581 commit. When the -v/--verbose switch is used, the list of changed
1582 files and full commit message is shown.
1582 files and full commit message is shown.
1583
1583
1584 NOTE: log -p may generate unexpected diff output for merge
1584 NOTE: log -p may generate unexpected diff output for merge
1585 changesets, as it will compare the merge changeset against its
1585 changesets, as it will compare the merge changeset against its
1586 first parent only. Also, the files: list will only reflect files
1586 first parent only. Also, the files: list will only reflect files
1587 that are different from BOTH parents.
1587 that are different from BOTH parents.
1588
1588
1589 """
1589 """
1590
1590
1591 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1591 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1592 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1592 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1593
1593
1594 if opts['limit']:
1594 if opts['limit']:
1595 try:
1595 try:
1596 limit = int(opts['limit'])
1596 limit = int(opts['limit'])
1597 except ValueError:
1597 except ValueError:
1598 raise util.Abort(_('limit must be a positive integer'))
1598 raise util.Abort(_('limit must be a positive integer'))
1599 if limit <= 0: raise util.Abort(_('limit must be positive'))
1599 if limit <= 0: raise util.Abort(_('limit must be positive'))
1600 else:
1600 else:
1601 limit = sys.maxint
1601 limit = sys.maxint
1602 count = 0
1602 count = 0
1603
1603
1604 if opts['copies'] and opts['rev']:
1604 if opts['copies'] and opts['rev']:
1605 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1605 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1606 else:
1606 else:
1607 endrev = repo.changelog.count()
1607 endrev = repo.changelog.count()
1608 rcache = {}
1608 rcache = {}
1609 ncache = {}
1609 ncache = {}
1610 dcache = []
1610 dcache = []
1611 def getrenamed(fn, rev, man):
1611 def getrenamed(fn, rev, man):
1612 '''looks up all renames for a file (up to endrev) the first
1612 '''looks up all renames for a file (up to endrev) the first
1613 time the file is given. It indexes on the changerev and only
1613 time the file is given. It indexes on the changerev and only
1614 parses the manifest if linkrev != changerev.
1614 parses the manifest if linkrev != changerev.
1615 Returns rename info for fn at changerev rev.'''
1615 Returns rename info for fn at changerev rev.'''
1616 if fn not in rcache:
1616 if fn not in rcache:
1617 rcache[fn] = {}
1617 rcache[fn] = {}
1618 ncache[fn] = {}
1618 ncache[fn] = {}
1619 fl = repo.file(fn)
1619 fl = repo.file(fn)
1620 for i in xrange(fl.count()):
1620 for i in xrange(fl.count()):
1621 node = fl.node(i)
1621 node = fl.node(i)
1622 lr = fl.linkrev(node)
1622 lr = fl.linkrev(node)
1623 renamed = fl.renamed(node)
1623 renamed = fl.renamed(node)
1624 rcache[fn][lr] = renamed
1624 rcache[fn][lr] = renamed
1625 if renamed:
1625 if renamed:
1626 ncache[fn][node] = renamed
1626 ncache[fn][node] = renamed
1627 if lr >= endrev:
1627 if lr >= endrev:
1628 break
1628 break
1629 if rev in rcache[fn]:
1629 if rev in rcache[fn]:
1630 return rcache[fn][rev]
1630 return rcache[fn][rev]
1631 mr = repo.manifest.rev(man)
1631 mr = repo.manifest.rev(man)
1632 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1632 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1633 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1633 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1634 if not dcache or dcache[0] != man:
1634 if not dcache or dcache[0] != man:
1635 dcache[:] = [man, repo.manifest.readdelta(man)]
1635 dcache[:] = [man, repo.manifest.readdelta(man)]
1636 if fn in dcache[1]:
1636 if fn in dcache[1]:
1637 return ncache[fn].get(dcache[1][fn])
1637 return ncache[fn].get(dcache[1][fn])
1638 return None
1638 return None
1639
1639
1640 df = False
1640 df = False
1641 if opts["date"]:
1641 if opts["date"]:
1642 df = util.matchdate(opts["date"])
1642 df = util.matchdate(opts["date"])
1643
1643
1644 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1644 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1645 for st, rev, fns in changeiter:
1645 for st, rev, fns in changeiter:
1646 if st == 'add':
1646 if st == 'add':
1647 changenode = repo.changelog.node(rev)
1647 changenode = repo.changelog.node(rev)
1648 parents = [p for p in repo.changelog.parentrevs(rev)
1648 parents = [p for p in repo.changelog.parentrevs(rev)
1649 if p != nullrev]
1649 if p != nullrev]
1650 if opts['no_merges'] and len(parents) == 2:
1650 if opts['no_merges'] and len(parents) == 2:
1651 continue
1651 continue
1652 if opts['only_merges'] and len(parents) != 2:
1652 if opts['only_merges'] and len(parents) != 2:
1653 continue
1653 continue
1654
1654
1655 if df:
1655 if df:
1656 changes = get(rev)
1656 changes = get(rev)
1657 if not df(changes[2][0]):
1657 if not df(changes[2][0]):
1658 continue
1658 continue
1659
1659
1660 if opts['keyword']:
1660 if opts['keyword']:
1661 changes = get(rev)
1661 changes = get(rev)
1662 miss = 0
1662 miss = 0
1663 for k in [kw.lower() for kw in opts['keyword']]:
1663 for k in [kw.lower() for kw in opts['keyword']]:
1664 if not (k in changes[1].lower() or
1664 if not (k in changes[1].lower() or
1665 k in changes[4].lower() or
1665 k in changes[4].lower() or
1666 k in " ".join(changes[3]).lower()):
1666 k in " ".join(changes[3]).lower()):
1667 miss = 1
1667 miss = 1
1668 break
1668 break
1669 if miss:
1669 if miss:
1670 continue
1670 continue
1671
1671
1672 copies = []
1672 copies = []
1673 if opts.get('copies') and rev:
1673 if opts.get('copies') and rev:
1674 mf = get(rev)[0]
1674 mf = get(rev)[0]
1675 for fn in get(rev)[3]:
1675 for fn in get(rev)[3]:
1676 rename = getrenamed(fn, rev, mf)
1676 rename = getrenamed(fn, rev, mf)
1677 if rename:
1677 if rename:
1678 copies.append((fn, rename[0]))
1678 copies.append((fn, rename[0]))
1679 displayer.show(rev, changenode, copies=copies)
1679 displayer.show(rev, changenode, copies=copies)
1680 elif st == 'iter':
1680 elif st == 'iter':
1681 if count == limit: break
1681 if count == limit: break
1682 if displayer.flush(rev):
1682 if displayer.flush(rev):
1683 count += 1
1683 count += 1
1684
1684
1685 def manifest(ui, repo, node=None, rev=None):
1685 def manifest(ui, repo, node=None, rev=None):
1686 """output the current or given revision of the project manifest
1686 """output the current or given revision of the project manifest
1687
1687
1688 Print a list of version controlled files for the given revision.
1688 Print a list of version controlled files for the given revision.
1689 If no revision is given, the parent of the working directory is used,
1689 If no revision is given, the parent of the working directory is used,
1690 or tip if no revision is checked out.
1690 or tip if no revision is checked out.
1691
1691
1692 The manifest is the list of files being version controlled. If no revision
1692 The manifest is the list of files being version controlled. If no revision
1693 is given then the first parent of the working directory is used.
1693 is given then the first parent of the working directory is used.
1694
1694
1695 With -v flag, print file permissions, symlink and executable bits. With
1695 With -v flag, print file permissions, symlink and executable bits. With
1696 --debug flag, print file revision hashes.
1696 --debug flag, print file revision hashes.
1697 """
1697 """
1698
1698
1699 if rev and node:
1699 if rev and node:
1700 raise util.Abort(_("please specify just one revision"))
1700 raise util.Abort(_("please specify just one revision"))
1701
1701
1702 if not node:
1702 if not node:
1703 node = rev
1703 node = rev
1704
1704
1705 m = repo.changectx(node).manifest()
1705 m = repo.changectx(node).manifest()
1706 files = m.keys()
1706 files = m.keys()
1707 files.sort()
1707 files.sort()
1708
1708
1709 for f in files:
1709 for f in files:
1710 if ui.debugflag:
1710 if ui.debugflag:
1711 ui.write("%40s " % hex(m[f]))
1711 ui.write("%40s " % hex(m[f]))
1712 if ui.verbose:
1712 if ui.verbose:
1713 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1713 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1714 perm = m.execf(f) and "755" or "644"
1714 perm = m.execf(f) and "755" or "644"
1715 ui.write("%3s %1s " % (perm, type))
1715 ui.write("%3s %1s " % (perm, type))
1716 ui.write("%s\n" % f)
1716 ui.write("%s\n" % f)
1717
1717
1718 def merge(ui, repo, node=None, force=None, rev=None):
1718 def merge(ui, repo, node=None, force=None, rev=None):
1719 """merge working directory with another revision
1719 """merge working directory with another revision
1720
1720
1721 Merge the contents of the current working directory and the
1721 Merge the contents of the current working directory and the
1722 requested revision. Files that changed between either parent are
1722 requested revision. Files that changed between either parent are
1723 marked as changed for the next commit and a commit must be
1723 marked as changed for the next commit and a commit must be
1724 performed before any further updates are allowed.
1724 performed before any further updates are allowed.
1725
1725
1726 If no revision is specified, the working directory's parent is a
1726 If no revision is specified, the working directory's parent is a
1727 head revision, and the repository contains exactly one other head,
1727 head revision, and the repository contains exactly one other head,
1728 the other head is merged with by default. Otherwise, an explicit
1728 the other head is merged with by default. Otherwise, an explicit
1729 revision to merge with must be provided.
1729 revision to merge with must be provided.
1730 """
1730 """
1731
1731
1732 if rev and node:
1732 if rev and node:
1733 raise util.Abort(_("please specify just one revision"))
1733 raise util.Abort(_("please specify just one revision"))
1734 if not node:
1734 if not node:
1735 node = rev
1735 node = rev
1736
1736
1737 if not node:
1737 if not node:
1738 heads = repo.heads()
1738 heads = repo.heads()
1739 if len(heads) > 2:
1739 if len(heads) > 2:
1740 raise util.Abort(_('repo has %d heads - '
1740 raise util.Abort(_('repo has %d heads - '
1741 'please merge with an explicit rev') %
1741 'please merge with an explicit rev') %
1742 len(heads))
1742 len(heads))
1743 parent = repo.dirstate.parents()[0]
1743 parent = repo.dirstate.parents()[0]
1744 if len(heads) == 1:
1744 if len(heads) == 1:
1745 msg = _('there is nothing to merge')
1745 msg = _('there is nothing to merge')
1746 if parent != repo.lookup(repo.workingctx().branch()):
1746 if parent != repo.lookup(repo.workingctx().branch()):
1747 msg = _('%s - use "hg update" instead') % msg
1747 msg = _('%s - use "hg update" instead') % msg
1748 raise util.Abort(msg)
1748 raise util.Abort(msg)
1749
1749
1750 if parent not in heads:
1750 if parent not in heads:
1751 raise util.Abort(_('working dir not at a head rev - '
1751 raise util.Abort(_('working dir not at a head rev - '
1752 'use "hg update" or merge with an explicit rev'))
1752 'use "hg update" or merge with an explicit rev'))
1753 node = parent == heads[0] and heads[-1] or heads[0]
1753 node = parent == heads[0] and heads[-1] or heads[0]
1754 return hg.merge(repo, node, force=force)
1754 return hg.merge(repo, node, force=force)
1755
1755
1756 def outgoing(ui, repo, dest=None, **opts):
1756 def outgoing(ui, repo, dest=None, **opts):
1757 """show changesets not found in destination
1757 """show changesets not found in destination
1758
1758
1759 Show changesets not found in the specified destination repository or
1759 Show changesets not found in the specified destination repository or
1760 the default push location. These are the changesets that would be pushed
1760 the default push location. These are the changesets that would be pushed
1761 if a push was requested.
1761 if a push was requested.
1762
1762
1763 See pull for valid destination format details.
1763 See pull for valid destination format details.
1764 """
1764 """
1765 dest, revs, checkout = hg.parseurl(
1765 dest, revs, checkout = hg.parseurl(
1766 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1766 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1767 cmdutil.setremoteconfig(ui, opts)
1767 cmdutil.setremoteconfig(ui, opts)
1768 if revs:
1768 if revs:
1769 revs = [repo.lookup(rev) for rev in revs]
1769 revs = [repo.lookup(rev) for rev in revs]
1770
1770
1771 other = hg.repository(ui, dest)
1771 other = hg.repository(ui, dest)
1772 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1772 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1773 o = repo.findoutgoing(other, force=opts['force'])
1773 o = repo.findoutgoing(other, force=opts['force'])
1774 if not o:
1774 if not o:
1775 ui.status(_("no changes found\n"))
1775 ui.status(_("no changes found\n"))
1776 return 1
1776 return 1
1777 o = repo.changelog.nodesbetween(o, revs)[0]
1777 o = repo.changelog.nodesbetween(o, revs)[0]
1778 if opts['newest_first']:
1778 if opts['newest_first']:
1779 o.reverse()
1779 o.reverse()
1780 displayer = cmdutil.show_changeset(ui, repo, opts)
1780 displayer = cmdutil.show_changeset(ui, repo, opts)
1781 for n in o:
1781 for n in o:
1782 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1782 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1783 if opts['no_merges'] and len(parents) == 2:
1783 if opts['no_merges'] and len(parents) == 2:
1784 continue
1784 continue
1785 displayer.show(changenode=n)
1785 displayer.show(changenode=n)
1786
1786
1787 def parents(ui, repo, file_=None, **opts):
1787 def parents(ui, repo, file_=None, **opts):
1788 """show the parents of the working dir or revision
1788 """show the parents of the working dir or revision
1789
1789
1790 Print the working directory's parent revisions. If a
1790 Print the working directory's parent revisions. If a
1791 revision is given via --rev, the parent of that revision
1791 revision is given via --rev, the parent of that revision
1792 will be printed. If a file argument is given, revision in
1792 will be printed. If a file argument is given, revision in
1793 which the file was last changed (before the working directory
1793 which the file was last changed (before the working directory
1794 revision or the argument to --rev if given) is printed.
1794 revision or the argument to --rev if given) is printed.
1795 """
1795 """
1796 rev = opts.get('rev')
1796 rev = opts.get('rev')
1797 if rev:
1797 if rev:
1798 ctx = repo.changectx(rev)
1798 ctx = repo.changectx(rev)
1799 else:
1799 else:
1800 ctx = repo.workingctx()
1800 ctx = repo.workingctx()
1801
1801
1802 if file_:
1802 if file_:
1803 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1803 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1804 if anypats or len(files) != 1:
1804 if anypats or len(files) != 1:
1805 raise util.Abort(_('can only specify an explicit file name'))
1805 raise util.Abort(_('can only specify an explicit file name'))
1806 file_ = files[0]
1806 file_ = files[0]
1807 filenodes = []
1807 filenodes = []
1808 for cp in ctx.parents():
1808 for cp in ctx.parents():
1809 if not cp:
1809 if not cp:
1810 continue
1810 continue
1811 try:
1811 try:
1812 filenodes.append(cp.filenode(file_))
1812 filenodes.append(cp.filenode(file_))
1813 except revlog.LookupError:
1813 except revlog.LookupError:
1814 pass
1814 pass
1815 if not filenodes:
1815 if not filenodes:
1816 raise util.Abort(_("'%s' not found in manifest!") % file_)
1816 raise util.Abort(_("'%s' not found in manifest!") % file_)
1817 fl = repo.file(file_)
1817 fl = repo.file(file_)
1818 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1818 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1819 else:
1819 else:
1820 p = [cp.node() for cp in ctx.parents()]
1820 p = [cp.node() for cp in ctx.parents()]
1821
1821
1822 displayer = cmdutil.show_changeset(ui, repo, opts)
1822 displayer = cmdutil.show_changeset(ui, repo, opts)
1823 for n in p:
1823 for n in p:
1824 if n != nullid:
1824 if n != nullid:
1825 displayer.show(changenode=n)
1825 displayer.show(changenode=n)
1826
1826
1827 def paths(ui, repo, search=None):
1827 def paths(ui, repo, search=None):
1828 """show definition of symbolic path names
1828 """show definition of symbolic path names
1829
1829
1830 Show definition of symbolic path name NAME. If no name is given, show
1830 Show definition of symbolic path name NAME. If no name is given, show
1831 definition of available names.
1831 definition of available names.
1832
1832
1833 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1833 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1834 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1834 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1835 """
1835 """
1836 if search:
1836 if search:
1837 for name, path in ui.configitems("paths"):
1837 for name, path in ui.configitems("paths"):
1838 if name == search:
1838 if name == search:
1839 ui.write("%s\n" % path)
1839 ui.write("%s\n" % path)
1840 return
1840 return
1841 ui.warn(_("not found!\n"))
1841 ui.warn(_("not found!\n"))
1842 return 1
1842 return 1
1843 else:
1843 else:
1844 for name, path in ui.configitems("paths"):
1844 for name, path in ui.configitems("paths"):
1845 ui.write("%s = %s\n" % (name, path))
1845 ui.write("%s = %s\n" % (name, path))
1846
1846
1847 def postincoming(ui, repo, modheads, optupdate, checkout):
1847 def postincoming(ui, repo, modheads, optupdate, checkout):
1848 if modheads == 0:
1848 if modheads == 0:
1849 return
1849 return
1850 if optupdate:
1850 if optupdate:
1851 if modheads <= 1 or checkout:
1851 if modheads <= 1 or checkout:
1852 return hg.update(repo, checkout)
1852 return hg.update(repo, checkout)
1853 else:
1853 else:
1854 ui.status(_("not updating, since new heads added\n"))
1854 ui.status(_("not updating, since new heads added\n"))
1855 if modheads > 1:
1855 if modheads > 1:
1856 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1856 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1857 else:
1857 else:
1858 ui.status(_("(run 'hg update' to get a working copy)\n"))
1858 ui.status(_("(run 'hg update' to get a working copy)\n"))
1859
1859
1860 def pull(ui, repo, source="default", **opts):
1860 def pull(ui, repo, source="default", **opts):
1861 """pull changes from the specified source
1861 """pull changes from the specified source
1862
1862
1863 Pull changes from a remote repository to a local one.
1863 Pull changes from a remote repository to a local one.
1864
1864
1865 This finds all changes from the repository at the specified path
1865 This finds all changes from the repository at the specified path
1866 or URL and adds them to the local repository. By default, this
1866 or URL and adds them to the local repository. By default, this
1867 does not update the copy of the project in the working directory.
1867 does not update the copy of the project in the working directory.
1868
1868
1869 Valid URLs are of the form:
1869 Valid URLs are of the form:
1870
1870
1871 local/filesystem/path (or file://local/filesystem/path)
1871 local/filesystem/path (or file://local/filesystem/path)
1872 http://[user@]host[:port]/[path]
1872 http://[user@]host[:port]/[path]
1873 https://[user@]host[:port]/[path]
1873 https://[user@]host[:port]/[path]
1874 ssh://[user@]host[:port]/[path]
1874 ssh://[user@]host[:port]/[path]
1875 static-http://host[:port]/[path]
1875 static-http://host[:port]/[path]
1876
1876
1877 Paths in the local filesystem can either point to Mercurial
1877 Paths in the local filesystem can either point to Mercurial
1878 repositories or to bundle files (as created by 'hg bundle' or
1878 repositories or to bundle files (as created by 'hg bundle' or
1879 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1879 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1880 allows access to a Mercurial repository where you simply use a web
1880 allows access to a Mercurial repository where you simply use a web
1881 server to publish the .hg directory as static content.
1881 server to publish the .hg directory as static content.
1882
1882
1883 An optional identifier after # indicates a particular branch, tag,
1883 An optional identifier after # indicates a particular branch, tag,
1884 or changeset to pull.
1884 or changeset to pull.
1885
1885
1886 Some notes about using SSH with Mercurial:
1886 Some notes about using SSH with Mercurial:
1887 - SSH requires an accessible shell account on the destination machine
1887 - SSH requires an accessible shell account on the destination machine
1888 and a copy of hg in the remote path or specified with as remotecmd.
1888 and a copy of hg in the remote path or specified with as remotecmd.
1889 - path is relative to the remote user's home directory by default.
1889 - path is relative to the remote user's home directory by default.
1890 Use an extra slash at the start of a path to specify an absolute path:
1890 Use an extra slash at the start of a path to specify an absolute path:
1891 ssh://example.com//tmp/repository
1891 ssh://example.com//tmp/repository
1892 - Mercurial doesn't use its own compression via SSH; the right thing
1892 - Mercurial doesn't use its own compression via SSH; the right thing
1893 to do is to configure it in your ~/.ssh/config, e.g.:
1893 to do is to configure it in your ~/.ssh/config, e.g.:
1894 Host *.mylocalnetwork.example.com
1894 Host *.mylocalnetwork.example.com
1895 Compression no
1895 Compression no
1896 Host *
1896 Host *
1897 Compression yes
1897 Compression yes
1898 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1898 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1899 with the --ssh command line option.
1899 with the --ssh command line option.
1900 """
1900 """
1901 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1901 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1902 cmdutil.setremoteconfig(ui, opts)
1902 cmdutil.setremoteconfig(ui, opts)
1903
1903
1904 other = hg.repository(ui, source)
1904 other = hg.repository(ui, source)
1905 ui.status(_('pulling from %s\n') % util.hidepassword(source))
1905 ui.status(_('pulling from %s\n') % util.hidepassword(source))
1906 if revs:
1906 if revs:
1907 try:
1907 try:
1908 revs = [other.lookup(rev) for rev in revs]
1908 revs = [other.lookup(rev) for rev in revs]
1909 except repo.NoCapability:
1909 except repo.NoCapability:
1910 error = _("Other repository doesn't support revision lookup, "
1910 error = _("Other repository doesn't support revision lookup, "
1911 "so a rev cannot be specified.")
1911 "so a rev cannot be specified.")
1912 raise util.Abort(error)
1912 raise util.Abort(error)
1913
1913
1914 modheads = repo.pull(other, heads=revs, force=opts['force'])
1914 modheads = repo.pull(other, heads=revs, force=opts['force'])
1915 return postincoming(ui, repo, modheads, opts['update'], checkout)
1915 return postincoming(ui, repo, modheads, opts['update'], checkout)
1916
1916
1917 def push(ui, repo, dest=None, **opts):
1917 def push(ui, repo, dest=None, **opts):
1918 """push changes to the specified destination
1918 """push changes to the specified destination
1919
1919
1920 Push changes from the local repository to the given destination.
1920 Push changes from the local repository to the given destination.
1921
1921
1922 This is the symmetrical operation for pull. It helps to move
1922 This is the symmetrical operation for pull. It helps to move
1923 changes from the current repository to a different one. If the
1923 changes from the current repository to a different one. If the
1924 destination is local this is identical to a pull in that directory
1924 destination is local this is identical to a pull in that directory
1925 from the current one.
1925 from the current one.
1926
1926
1927 By default, push will refuse to run if it detects the result would
1927 By default, push will refuse to run if it detects the result would
1928 increase the number of remote heads. This generally indicates the
1928 increase the number of remote heads. This generally indicates the
1929 the client has forgotten to sync and merge before pushing.
1929 the client has forgotten to sync and merge before pushing.
1930
1930
1931 Valid URLs are of the form:
1931 Valid URLs are of the form:
1932
1932
1933 local/filesystem/path (or file://local/filesystem/path)
1933 local/filesystem/path (or file://local/filesystem/path)
1934 ssh://[user@]host[:port]/[path]
1934 ssh://[user@]host[:port]/[path]
1935 http://[user@]host[:port]/[path]
1935 http://[user@]host[:port]/[path]
1936 https://[user@]host[:port]/[path]
1936 https://[user@]host[:port]/[path]
1937
1937
1938 An optional identifier after # indicates a particular branch, tag,
1938 An optional identifier after # indicates a particular branch, tag,
1939 or changeset to push.
1939 or changeset to push.
1940
1940
1941 Look at the help text for the pull command for important details
1941 Look at the help text for the pull command for important details
1942 about ssh:// URLs.
1942 about ssh:// URLs.
1943
1943
1944 Pushing to http:// and https:// URLs is only possible, if this
1944 Pushing to http:// and https:// URLs is only possible, if this
1945 feature is explicitly enabled on the remote Mercurial server.
1945 feature is explicitly enabled on the remote Mercurial server.
1946 """
1946 """
1947 dest, revs, checkout = hg.parseurl(
1947 dest, revs, checkout = hg.parseurl(
1948 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1948 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1949 cmdutil.setremoteconfig(ui, opts)
1949 cmdutil.setremoteconfig(ui, opts)
1950
1950
1951 other = hg.repository(ui, dest)
1951 other = hg.repository(ui, dest)
1952 ui.status('pushing to %s\n' % util.hidepassword(dest))
1952 ui.status('pushing to %s\n' % util.hidepassword(dest))
1953 if revs:
1953 if revs:
1954 revs = [repo.lookup(rev) for rev in revs]
1954 revs = [repo.lookup(rev) for rev in revs]
1955 r = repo.push(other, opts['force'], revs=revs)
1955 r = repo.push(other, opts['force'], revs=revs)
1956 return r == 0
1956 return r == 0
1957
1957
1958 def rawcommit(ui, repo, *pats, **opts):
1958 def rawcommit(ui, repo, *pats, **opts):
1959 """raw commit interface (DEPRECATED)
1959 """raw commit interface (DEPRECATED)
1960
1960
1961 (DEPRECATED)
1961 (DEPRECATED)
1962 Lowlevel commit, for use in helper scripts.
1962 Lowlevel commit, for use in helper scripts.
1963
1963
1964 This command is not intended to be used by normal users, as it is
1964 This command is not intended to be used by normal users, as it is
1965 primarily useful for importing from other SCMs.
1965 primarily useful for importing from other SCMs.
1966
1966
1967 This command is now deprecated and will be removed in a future
1967 This command is now deprecated and will be removed in a future
1968 release, please use debugsetparents and commit instead.
1968 release, please use debugsetparents and commit instead.
1969 """
1969 """
1970
1970
1971 ui.warn(_("(the rawcommit command is deprecated)\n"))
1971 ui.warn(_("(the rawcommit command is deprecated)\n"))
1972
1972
1973 message = cmdutil.logmessage(opts)
1973 message = cmdutil.logmessage(opts)
1974
1974
1975 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1975 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1976 if opts['files']:
1976 if opts['files']:
1977 files += open(opts['files']).read().splitlines()
1977 files += open(opts['files']).read().splitlines()
1978
1978
1979 parents = [repo.lookup(p) for p in opts['parent']]
1979 parents = [repo.lookup(p) for p in opts['parent']]
1980
1980
1981 try:
1981 try:
1982 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1982 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1983 except ValueError, inst:
1983 except ValueError, inst:
1984 raise util.Abort(str(inst))
1984 raise util.Abort(str(inst))
1985
1985
1986 def recover(ui, repo):
1986 def recover(ui, repo):
1987 """roll back an interrupted transaction
1987 """roll back an interrupted transaction
1988
1988
1989 Recover from an interrupted commit or pull.
1989 Recover from an interrupted commit or pull.
1990
1990
1991 This command tries to fix the repository status after an interrupted
1991 This command tries to fix the repository status after an interrupted
1992 operation. It should only be necessary when Mercurial suggests it.
1992 operation. It should only be necessary when Mercurial suggests it.
1993 """
1993 """
1994 if repo.recover():
1994 if repo.recover():
1995 return hg.verify(repo)
1995 return hg.verify(repo)
1996 return 1
1996 return 1
1997
1997
1998 def remove(ui, repo, *pats, **opts):
1998 def remove(ui, repo, *pats, **opts):
1999 """remove the specified files on the next commit
1999 """remove the specified files on the next commit
2000
2000
2001 Schedule the indicated files for removal from the repository.
2001 Schedule the indicated files for removal from the repository.
2002
2002
2003 This only removes files from the current branch, not from the
2003 This only removes files from the current branch, not from the
2004 entire project history. If the files still exist in the working
2004 entire project history. If the files still exist in the working
2005 directory, they will be deleted from it. If invoked with --after,
2005 directory, they will be deleted from it. If invoked with --after,
2006 files are marked as removed, but not actually unlinked unless --force
2006 files are marked as removed, but not actually unlinked unless --force
2007 is also given. Without exact file names, --after will only mark
2007 is also given. Without exact file names, --after will only mark
2008 files as removed if they are no longer in the working directory.
2008 files as removed if they are no longer in the working directory.
2009
2009
2010 This command schedules the files to be removed at the next commit.
2010 This command schedules the files to be removed at the next commit.
2011 To undo a remove before that, see hg revert.
2011 To undo a remove before that, see hg revert.
2012
2012
2013 Modified files and added files are not removed by default. To
2013 Modified files and added files are not removed by default. To
2014 remove them, use the -f/--force option.
2014 remove them, use the -f/--force option.
2015 """
2015 """
2016 if not opts['after'] and not pats:
2016 if not opts['after'] and not pats:
2017 raise util.Abort(_('no files specified'))
2017 raise util.Abort(_('no files specified'))
2018 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2018 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2019 exact = dict.fromkeys(files)
2019 exact = dict.fromkeys(files)
2020 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2020 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2021 modified, added, removed, deleted, unknown = mardu
2021 modified, added, removed, deleted, unknown = mardu
2022 remove, forget = [], []
2022 remove, forget = [], []
2023 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2023 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2024 reason = None
2024 reason = None
2025 if abs in modified and not opts['force']:
2025 if abs in modified and not opts['force']:
2026 reason = _('is modified (use -f to force removal)')
2026 reason = _('is modified (use -f to force removal)')
2027 elif abs in added:
2027 elif abs in added:
2028 if opts['force']:
2028 if opts['force']:
2029 forget.append(abs)
2029 forget.append(abs)
2030 continue
2030 continue
2031 reason = _('has been marked for add (use -f to force removal)')
2031 reason = _('has been marked for add (use -f to force removal)')
2032 exact = 1 # force the message
2032 elif abs not in repo.dirstate:
2033 elif abs not in repo.dirstate:
2033 reason = _('is not managed')
2034 reason = _('is not managed')
2034 elif opts['after'] and not exact and abs not in deleted:
2035 elif opts['after'] and not exact and abs not in deleted:
2035 continue
2036 continue
2036 elif abs in removed:
2037 elif abs in removed:
2037 continue
2038 continue
2038 if reason:
2039 if reason:
2039 if exact:
2040 if exact:
2040 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2041 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2041 else:
2042 else:
2042 if ui.verbose or not exact:
2043 if ui.verbose or not exact:
2043 ui.status(_('removing %s\n') % rel)
2044 ui.status(_('removing %s\n') % rel)
2044 remove.append(abs)
2045 remove.append(abs)
2045 repo.forget(forget)
2046 repo.forget(forget)
2046 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2047 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2047
2048
2048 def rename(ui, repo, *pats, **opts):
2049 def rename(ui, repo, *pats, **opts):
2049 """rename files; equivalent of copy + remove
2050 """rename files; equivalent of copy + remove
2050
2051
2051 Mark dest as copies of sources; mark sources for deletion. If
2052 Mark dest as copies of sources; mark sources for deletion. If
2052 dest is a directory, copies are put in that directory. If dest is
2053 dest is a directory, copies are put in that directory. If dest is
2053 a file, there can only be one source.
2054 a file, there can only be one source.
2054
2055
2055 By default, this command copies the contents of files as they
2056 By default, this command copies the contents of files as they
2056 stand in the working directory. If invoked with --after, the
2057 stand in the working directory. If invoked with --after, the
2057 operation is recorded, but no copying is performed.
2058 operation is recorded, but no copying is performed.
2058
2059
2059 This command takes effect in the next commit. To undo a rename
2060 This command takes effect in the next commit. To undo a rename
2060 before that, see hg revert.
2061 before that, see hg revert.
2061 """
2062 """
2062 wlock = repo.wlock(False)
2063 wlock = repo.wlock(False)
2063 try:
2064 try:
2064 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2065 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2065 finally:
2066 finally:
2066 del wlock
2067 del wlock
2067
2068
2068 def revert(ui, repo, *pats, **opts):
2069 def revert(ui, repo, *pats, **opts):
2069 """restore individual files or dirs to an earlier state
2070 """restore individual files or dirs to an earlier state
2070
2071
2071 (use update -r to check out earlier revisions, revert does not
2072 (use update -r to check out earlier revisions, revert does not
2072 change the working dir parents)
2073 change the working dir parents)
2073
2074
2074 With no revision specified, revert the named files or directories
2075 With no revision specified, revert the named files or directories
2075 to the contents they had in the parent of the working directory.
2076 to the contents they had in the parent of the working directory.
2076 This restores the contents of the affected files to an unmodified
2077 This restores the contents of the affected files to an unmodified
2077 state and unschedules adds, removes, copies, and renames. If the
2078 state and unschedules adds, removes, copies, and renames. If the
2078 working directory has two parents, you must explicitly specify the
2079 working directory has two parents, you must explicitly specify the
2079 revision to revert to.
2080 revision to revert to.
2080
2081
2081 Using the -r option, revert the given files or directories to their
2082 Using the -r option, revert the given files or directories to their
2082 contents as of a specific revision. This can be helpful to "roll
2083 contents as of a specific revision. This can be helpful to "roll
2083 back" some or all of an earlier change.
2084 back" some or all of an earlier change.
2084
2085
2085 Revert modifies the working directory. It does not commit any
2086 Revert modifies the working directory. It does not commit any
2086 changes, or change the parent of the working directory. If you
2087 changes, or change the parent of the working directory. If you
2087 revert to a revision other than the parent of the working
2088 revert to a revision other than the parent of the working
2088 directory, the reverted files will thus appear modified
2089 directory, the reverted files will thus appear modified
2089 afterwards.
2090 afterwards.
2090
2091
2091 If a file has been deleted, it is restored. If the executable
2092 If a file has been deleted, it is restored. If the executable
2092 mode of a file was changed, it is reset.
2093 mode of a file was changed, it is reset.
2093
2094
2094 If names are given, all files matching the names are reverted.
2095 If names are given, all files matching the names are reverted.
2095
2096
2096 If no arguments are given, no files are reverted.
2097 If no arguments are given, no files are reverted.
2097
2098
2098 Modified files are saved with a .orig suffix before reverting.
2099 Modified files are saved with a .orig suffix before reverting.
2099 To disable these backups, use --no-backup.
2100 To disable these backups, use --no-backup.
2100 """
2101 """
2101
2102
2102 if opts["date"]:
2103 if opts["date"]:
2103 if opts["rev"]:
2104 if opts["rev"]:
2104 raise util.Abort(_("you can't specify a revision and a date"))
2105 raise util.Abort(_("you can't specify a revision and a date"))
2105 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2106 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2106
2107
2107 if not pats and not opts['all']:
2108 if not pats and not opts['all']:
2108 raise util.Abort(_('no files or directories specified; '
2109 raise util.Abort(_('no files or directories specified; '
2109 'use --all to revert the whole repo'))
2110 'use --all to revert the whole repo'))
2110
2111
2111 parent, p2 = repo.dirstate.parents()
2112 parent, p2 = repo.dirstate.parents()
2112 if not opts['rev'] and p2 != nullid:
2113 if not opts['rev'] and p2 != nullid:
2113 raise util.Abort(_('uncommitted merge - please provide a '
2114 raise util.Abort(_('uncommitted merge - please provide a '
2114 'specific revision'))
2115 'specific revision'))
2115 ctx = repo.changectx(opts['rev'])
2116 ctx = repo.changectx(opts['rev'])
2116 node = ctx.node()
2117 node = ctx.node()
2117 mf = ctx.manifest()
2118 mf = ctx.manifest()
2118 if node == parent:
2119 if node == parent:
2119 pmf = mf
2120 pmf = mf
2120 else:
2121 else:
2121 pmf = None
2122 pmf = None
2122
2123
2123 # need all matching names in dirstate and manifest of target rev,
2124 # need all matching names in dirstate and manifest of target rev,
2124 # so have to walk both. do not print errors if files exist in one
2125 # so have to walk both. do not print errors if files exist in one
2125 # but not other.
2126 # but not other.
2126
2127
2127 names = {}
2128 names = {}
2128 target_only = {}
2129 target_only = {}
2129
2130
2130 wlock = repo.wlock()
2131 wlock = repo.wlock()
2131 try:
2132 try:
2132 # walk dirstate.
2133 # walk dirstate.
2133 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2134 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2134 badmatch=mf.has_key):
2135 badmatch=mf.has_key):
2135 names[abs] = (rel, exact)
2136 names[abs] = (rel, exact)
2136 if src == 'b':
2137 if src == 'b':
2137 target_only[abs] = True
2138 target_only[abs] = True
2138
2139
2139 # walk target manifest.
2140 # walk target manifest.
2140
2141
2141 def badmatch(path):
2142 def badmatch(path):
2142 if path in names:
2143 if path in names:
2143 return True
2144 return True
2144 path_ = path + '/'
2145 path_ = path + '/'
2145 for f in names:
2146 for f in names:
2146 if f.startswith(path_):
2147 if f.startswith(path_):
2147 return True
2148 return True
2148 return False
2149 return False
2149
2150
2150 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2151 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2151 badmatch=badmatch):
2152 badmatch=badmatch):
2152 if abs in names or src == 'b':
2153 if abs in names or src == 'b':
2153 continue
2154 continue
2154 names[abs] = (rel, exact)
2155 names[abs] = (rel, exact)
2155 target_only[abs] = True
2156 target_only[abs] = True
2156
2157
2157 changes = repo.status(match=names.has_key)[:5]
2158 changes = repo.status(match=names.has_key)[:5]
2158 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2159 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2159
2160
2160 # if f is a rename, also revert the source
2161 # if f is a rename, also revert the source
2161 cwd = repo.getcwd()
2162 cwd = repo.getcwd()
2162 for f in added:
2163 for f in added:
2163 src = repo.dirstate.copied(f)
2164 src = repo.dirstate.copied(f)
2164 if src and src not in names and repo.dirstate[src] == 'r':
2165 if src and src not in names and repo.dirstate[src] == 'r':
2165 removed[src] = None
2166 removed[src] = None
2166 names[src] = (repo.pathto(src, cwd), True)
2167 names[src] = (repo.pathto(src, cwd), True)
2167
2168
2168 revert = ([], _('reverting %s\n'))
2169 revert = ([], _('reverting %s\n'))
2169 add = ([], _('adding %s\n'))
2170 add = ([], _('adding %s\n'))
2170 remove = ([], _('removing %s\n'))
2171 remove = ([], _('removing %s\n'))
2171 forget = ([], _('forgetting %s\n'))
2172 forget = ([], _('forgetting %s\n'))
2172 undelete = ([], _('undeleting %s\n'))
2173 undelete = ([], _('undeleting %s\n'))
2173 update = {}
2174 update = {}
2174
2175
2175 disptable = (
2176 disptable = (
2176 # dispatch table:
2177 # dispatch table:
2177 # file state
2178 # file state
2178 # action if in target manifest
2179 # action if in target manifest
2179 # action if not in target manifest
2180 # action if not in target manifest
2180 # make backup if in target manifest
2181 # make backup if in target manifest
2181 # make backup if not in target manifest
2182 # make backup if not in target manifest
2182 (modified, revert, remove, True, True),
2183 (modified, revert, remove, True, True),
2183 (added, revert, forget, True, False),
2184 (added, revert, forget, True, False),
2184 (removed, undelete, None, False, False),
2185 (removed, undelete, None, False, False),
2185 (deleted, revert, remove, False, False),
2186 (deleted, revert, remove, False, False),
2186 (unknown, add, None, True, False),
2187 (unknown, add, None, True, False),
2187 (target_only, add, None, False, False),
2188 (target_only, add, None, False, False),
2188 )
2189 )
2189
2190
2190 entries = names.items()
2191 entries = names.items()
2191 entries.sort()
2192 entries.sort()
2192
2193
2193 for abs, (rel, exact) in entries:
2194 for abs, (rel, exact) in entries:
2194 mfentry = mf.get(abs)
2195 mfentry = mf.get(abs)
2195 target = repo.wjoin(abs)
2196 target = repo.wjoin(abs)
2196 def handle(xlist, dobackup):
2197 def handle(xlist, dobackup):
2197 xlist[0].append(abs)
2198 xlist[0].append(abs)
2198 update[abs] = 1
2199 update[abs] = 1
2199 if dobackup and not opts['no_backup'] and util.lexists(target):
2200 if dobackup and not opts['no_backup'] and util.lexists(target):
2200 bakname = "%s.orig" % rel
2201 bakname = "%s.orig" % rel
2201 ui.note(_('saving current version of %s as %s\n') %
2202 ui.note(_('saving current version of %s as %s\n') %
2202 (rel, bakname))
2203 (rel, bakname))
2203 if not opts.get('dry_run'):
2204 if not opts.get('dry_run'):
2204 util.copyfile(target, bakname)
2205 util.copyfile(target, bakname)
2205 if ui.verbose or not exact:
2206 if ui.verbose or not exact:
2206 ui.status(xlist[1] % rel)
2207 ui.status(xlist[1] % rel)
2207 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2208 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2208 if abs not in table: continue
2209 if abs not in table: continue
2209 # file has changed in dirstate
2210 # file has changed in dirstate
2210 if mfentry:
2211 if mfentry:
2211 handle(hitlist, backuphit)
2212 handle(hitlist, backuphit)
2212 elif misslist is not None:
2213 elif misslist is not None:
2213 handle(misslist, backupmiss)
2214 handle(misslist, backupmiss)
2214 else:
2215 else:
2215 if exact: ui.warn(_('file not managed: %s\n') % rel)
2216 if exact: ui.warn(_('file not managed: %s\n') % rel)
2216 break
2217 break
2217 else:
2218 else:
2218 # file has not changed in dirstate
2219 # file has not changed in dirstate
2219 if node == parent:
2220 if node == parent:
2220 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2221 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2221 continue
2222 continue
2222 if pmf is None:
2223 if pmf is None:
2223 # only need parent manifest in this unlikely case,
2224 # only need parent manifest in this unlikely case,
2224 # so do not read by default
2225 # so do not read by default
2225 pmf = repo.changectx(parent).manifest()
2226 pmf = repo.changectx(parent).manifest()
2226 if abs in pmf:
2227 if abs in pmf:
2227 if mfentry:
2228 if mfentry:
2228 # if version of file is same in parent and target
2229 # if version of file is same in parent and target
2229 # manifests, do nothing
2230 # manifests, do nothing
2230 if pmf[abs] != mfentry:
2231 if pmf[abs] != mfentry:
2231 handle(revert, False)
2232 handle(revert, False)
2232 else:
2233 else:
2233 handle(remove, False)
2234 handle(remove, False)
2234
2235
2235 if not opts.get('dry_run'):
2236 if not opts.get('dry_run'):
2236 for f in forget[0]:
2237 for f in forget[0]:
2237 repo.dirstate.forget(f)
2238 repo.dirstate.forget(f)
2238 r = hg.revert(repo, node, update.has_key)
2239 r = hg.revert(repo, node, update.has_key)
2239 for f in add[0]:
2240 for f in add[0]:
2240 repo.dirstate.add(f)
2241 repo.dirstate.add(f)
2241 for f in undelete[0]:
2242 for f in undelete[0]:
2242 repo.dirstate.normal(f)
2243 repo.dirstate.normal(f)
2243 for f in remove[0]:
2244 for f in remove[0]:
2244 repo.dirstate.remove(f)
2245 repo.dirstate.remove(f)
2245 return r
2246 return r
2246 finally:
2247 finally:
2247 del wlock
2248 del wlock
2248
2249
2249 def rollback(ui, repo):
2250 def rollback(ui, repo):
2250 """roll back the last transaction
2251 """roll back the last transaction
2251
2252
2252 This command should be used with care. There is only one level of
2253 This command should be used with care. There is only one level of
2253 rollback, and there is no way to undo a rollback. It will also
2254 rollback, and there is no way to undo a rollback. It will also
2254 restore the dirstate at the time of the last transaction, losing
2255 restore the dirstate at the time of the last transaction, losing
2255 any dirstate changes since that time.
2256 any dirstate changes since that time.
2256
2257
2257 Transactions are used to encapsulate the effects of all commands
2258 Transactions are used to encapsulate the effects of all commands
2258 that create new changesets or propagate existing changesets into a
2259 that create new changesets or propagate existing changesets into a
2259 repository. For example, the following commands are transactional,
2260 repository. For example, the following commands are transactional,
2260 and their effects can be rolled back:
2261 and their effects can be rolled back:
2261
2262
2262 commit
2263 commit
2263 import
2264 import
2264 pull
2265 pull
2265 push (with this repository as destination)
2266 push (with this repository as destination)
2266 unbundle
2267 unbundle
2267
2268
2268 This command is not intended for use on public repositories. Once
2269 This command is not intended for use on public repositories. Once
2269 changes are visible for pull by other users, rolling a transaction
2270 changes are visible for pull by other users, rolling a transaction
2270 back locally is ineffective (someone else may already have pulled
2271 back locally is ineffective (someone else may already have pulled
2271 the changes). Furthermore, a race is possible with readers of the
2272 the changes). Furthermore, a race is possible with readers of the
2272 repository; for example an in-progress pull from the repository
2273 repository; for example an in-progress pull from the repository
2273 may fail if a rollback is performed.
2274 may fail if a rollback is performed.
2274 """
2275 """
2275 repo.rollback()
2276 repo.rollback()
2276
2277
2277 def root(ui, repo):
2278 def root(ui, repo):
2278 """print the root (top) of the current working dir
2279 """print the root (top) of the current working dir
2279
2280
2280 Print the root directory of the current repository.
2281 Print the root directory of the current repository.
2281 """
2282 """
2282 ui.write(repo.root + "\n")
2283 ui.write(repo.root + "\n")
2283
2284
2284 def serve(ui, repo, **opts):
2285 def serve(ui, repo, **opts):
2285 """export the repository via HTTP
2286 """export the repository via HTTP
2286
2287
2287 Start a local HTTP repository browser and pull server.
2288 Start a local HTTP repository browser and pull server.
2288
2289
2289 By default, the server logs accesses to stdout and errors to
2290 By default, the server logs accesses to stdout and errors to
2290 stderr. Use the "-A" and "-E" options to log to files.
2291 stderr. Use the "-A" and "-E" options to log to files.
2291 """
2292 """
2292
2293
2293 if opts["stdio"]:
2294 if opts["stdio"]:
2294 if repo is None:
2295 if repo is None:
2295 raise hg.RepoError(_("There is no Mercurial repository here"
2296 raise hg.RepoError(_("There is no Mercurial repository here"
2296 " (.hg not found)"))
2297 " (.hg not found)"))
2297 s = sshserver.sshserver(ui, repo)
2298 s = sshserver.sshserver(ui, repo)
2298 s.serve_forever()
2299 s.serve_forever()
2299
2300
2300 parentui = ui.parentui or ui
2301 parentui = ui.parentui or ui
2301 optlist = ("name templates style address port ipv6"
2302 optlist = ("name templates style address port ipv6"
2302 " accesslog errorlog webdir_conf certificate")
2303 " accesslog errorlog webdir_conf certificate")
2303 for o in optlist.split():
2304 for o in optlist.split():
2304 if opts[o]:
2305 if opts[o]:
2305 parentui.setconfig("web", o, str(opts[o]))
2306 parentui.setconfig("web", o, str(opts[o]))
2306 if (repo is not None) and (repo.ui != parentui):
2307 if (repo is not None) and (repo.ui != parentui):
2307 repo.ui.setconfig("web", o, str(opts[o]))
2308 repo.ui.setconfig("web", o, str(opts[o]))
2308
2309
2309 if repo is None and not ui.config("web", "webdir_conf"):
2310 if repo is None and not ui.config("web", "webdir_conf"):
2310 raise hg.RepoError(_("There is no Mercurial repository here"
2311 raise hg.RepoError(_("There is no Mercurial repository here"
2311 " (.hg not found)"))
2312 " (.hg not found)"))
2312
2313
2313 class service:
2314 class service:
2314 def init(self):
2315 def init(self):
2315 util.set_signal_handler()
2316 util.set_signal_handler()
2316 try:
2317 try:
2317 self.httpd = hgweb.server.create_server(parentui, repo)
2318 self.httpd = hgweb.server.create_server(parentui, repo)
2318 except socket.error, inst:
2319 except socket.error, inst:
2319 raise util.Abort(_('cannot start server: ') + inst.args[1])
2320 raise util.Abort(_('cannot start server: ') + inst.args[1])
2320
2321
2321 if not ui.verbose: return
2322 if not ui.verbose: return
2322
2323
2323 if self.httpd.port != 80:
2324 if self.httpd.port != 80:
2324 ui.status(_('listening at http://%s:%d/\n') %
2325 ui.status(_('listening at http://%s:%d/\n') %
2325 (self.httpd.addr, self.httpd.port))
2326 (self.httpd.addr, self.httpd.port))
2326 else:
2327 else:
2327 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2328 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2328
2329
2329 def run(self):
2330 def run(self):
2330 self.httpd.serve_forever()
2331 self.httpd.serve_forever()
2331
2332
2332 service = service()
2333 service = service()
2333
2334
2334 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2335 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2335
2336
2336 def status(ui, repo, *pats, **opts):
2337 def status(ui, repo, *pats, **opts):
2337 """show changed files in the working directory
2338 """show changed files in the working directory
2338
2339
2339 Show status of files in the repository. If names are given, only
2340 Show status of files in the repository. If names are given, only
2340 files that match are shown. Files that are clean or ignored, are
2341 files that match are shown. Files that are clean or ignored, are
2341 not listed unless -c (clean), -i (ignored) or -A is given.
2342 not listed unless -c (clean), -i (ignored) or -A is given.
2342
2343
2343 NOTE: status may appear to disagree with diff if permissions have
2344 NOTE: status may appear to disagree with diff if permissions have
2344 changed or a merge has occurred. The standard diff format does not
2345 changed or a merge has occurred. The standard diff format does not
2345 report permission changes and diff only reports changes relative
2346 report permission changes and diff only reports changes relative
2346 to one merge parent.
2347 to one merge parent.
2347
2348
2348 If one revision is given, it is used as the base revision.
2349 If one revision is given, it is used as the base revision.
2349 If two revisions are given, the difference between them is shown.
2350 If two revisions are given, the difference between them is shown.
2350
2351
2351 The codes used to show the status of files are:
2352 The codes used to show the status of files are:
2352 M = modified
2353 M = modified
2353 A = added
2354 A = added
2354 R = removed
2355 R = removed
2355 C = clean
2356 C = clean
2356 ! = deleted, but still tracked
2357 ! = deleted, but still tracked
2357 ? = not tracked
2358 ? = not tracked
2358 I = ignored (not shown by default)
2359 I = ignored (not shown by default)
2359 = the previous added file was copied from here
2360 = the previous added file was copied from here
2360 """
2361 """
2361
2362
2362 all = opts['all']
2363 all = opts['all']
2363 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2364 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2364
2365
2365 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2366 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2366 cwd = (pats and repo.getcwd()) or ''
2367 cwd = (pats and repo.getcwd()) or ''
2367 modified, added, removed, deleted, unknown, ignored, clean = [
2368 modified, added, removed, deleted, unknown, ignored, clean = [
2368 n for n in repo.status(node1=node1, node2=node2, files=files,
2369 n for n in repo.status(node1=node1, node2=node2, files=files,
2369 match=matchfn,
2370 match=matchfn,
2370 list_ignored=all or opts['ignored'],
2371 list_ignored=all or opts['ignored'],
2371 list_clean=all or opts['clean'])]
2372 list_clean=all or opts['clean'])]
2372
2373
2373 changetypes = (('modified', 'M', modified),
2374 changetypes = (('modified', 'M', modified),
2374 ('added', 'A', added),
2375 ('added', 'A', added),
2375 ('removed', 'R', removed),
2376 ('removed', 'R', removed),
2376 ('deleted', '!', deleted),
2377 ('deleted', '!', deleted),
2377 ('unknown', '?', unknown),
2378 ('unknown', '?', unknown),
2378 ('ignored', 'I', ignored))
2379 ('ignored', 'I', ignored))
2379
2380
2380 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2381 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2381
2382
2382 end = opts['print0'] and '\0' or '\n'
2383 end = opts['print0'] and '\0' or '\n'
2383
2384
2384 for opt, char, changes in ([ct for ct in explicit_changetypes
2385 for opt, char, changes in ([ct for ct in explicit_changetypes
2385 if all or opts[ct[0]]]
2386 if all or opts[ct[0]]]
2386 or changetypes):
2387 or changetypes):
2387 if opts['no_status']:
2388 if opts['no_status']:
2388 format = "%%s%s" % end
2389 format = "%%s%s" % end
2389 else:
2390 else:
2390 format = "%s %%s%s" % (char, end)
2391 format = "%s %%s%s" % (char, end)
2391
2392
2392 for f in changes:
2393 for f in changes:
2393 ui.write(format % repo.pathto(f, cwd))
2394 ui.write(format % repo.pathto(f, cwd))
2394 if ((all or opts.get('copies')) and not opts.get('no_status')):
2395 if ((all or opts.get('copies')) and not opts.get('no_status')):
2395 copied = repo.dirstate.copied(f)
2396 copied = repo.dirstate.copied(f)
2396 if copied:
2397 if copied:
2397 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2398 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2398
2399
2399 def tag(ui, repo, name, rev_=None, **opts):
2400 def tag(ui, repo, name, rev_=None, **opts):
2400 """add a tag for the current or given revision
2401 """add a tag for the current or given revision
2401
2402
2402 Name a particular revision using <name>.
2403 Name a particular revision using <name>.
2403
2404
2404 Tags are used to name particular revisions of the repository and are
2405 Tags are used to name particular revisions of the repository and are
2405 very useful to compare different revision, to go back to significant
2406 very useful to compare different revision, to go back to significant
2406 earlier versions or to mark branch points as releases, etc.
2407 earlier versions or to mark branch points as releases, etc.
2407
2408
2408 If no revision is given, the parent of the working directory is used,
2409 If no revision is given, the parent of the working directory is used,
2409 or tip if no revision is checked out.
2410 or tip if no revision is checked out.
2410
2411
2411 To facilitate version control, distribution, and merging of tags,
2412 To facilitate version control, distribution, and merging of tags,
2412 they are stored as a file named ".hgtags" which is managed
2413 they are stored as a file named ".hgtags" which is managed
2413 similarly to other project files and can be hand-edited if
2414 similarly to other project files and can be hand-edited if
2414 necessary. The file '.hg/localtags' is used for local tags (not
2415 necessary. The file '.hg/localtags' is used for local tags (not
2415 shared among repositories).
2416 shared among repositories).
2416 """
2417 """
2417 if name in ['tip', '.', 'null']:
2418 if name in ['tip', '.', 'null']:
2418 raise util.Abort(_("the name '%s' is reserved") % name)
2419 raise util.Abort(_("the name '%s' is reserved") % name)
2419 if rev_ is not None:
2420 if rev_ is not None:
2420 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2421 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2421 "please use 'hg tag [-r REV] NAME' instead\n"))
2422 "please use 'hg tag [-r REV] NAME' instead\n"))
2422 if opts['rev']:
2423 if opts['rev']:
2423 raise util.Abort(_("use only one form to specify the revision"))
2424 raise util.Abort(_("use only one form to specify the revision"))
2424 if opts['rev'] and opts['remove']:
2425 if opts['rev'] and opts['remove']:
2425 raise util.Abort(_("--rev and --remove are incompatible"))
2426 raise util.Abort(_("--rev and --remove are incompatible"))
2426 if opts['rev']:
2427 if opts['rev']:
2427 rev_ = opts['rev']
2428 rev_ = opts['rev']
2428 message = opts['message']
2429 message = opts['message']
2429 if opts['remove']:
2430 if opts['remove']:
2430 if not name in repo.tags():
2431 if not name in repo.tags():
2431 raise util.Abort(_('tag %s does not exist') % name)
2432 raise util.Abort(_('tag %s does not exist') % name)
2432 rev_ = nullid
2433 rev_ = nullid
2433 if not message:
2434 if not message:
2434 message = _('Removed tag %s') % name
2435 message = _('Removed tag %s') % name
2435 elif name in repo.tags() and not opts['force']:
2436 elif name in repo.tags() and not opts['force']:
2436 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2437 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2437 % name)
2438 % name)
2438 if not rev_ and repo.dirstate.parents()[1] != nullid:
2439 if not rev_ and repo.dirstate.parents()[1] != nullid:
2439 raise util.Abort(_('uncommitted merge - please provide a '
2440 raise util.Abort(_('uncommitted merge - please provide a '
2440 'specific revision'))
2441 'specific revision'))
2441 r = repo.changectx(rev_).node()
2442 r = repo.changectx(rev_).node()
2442
2443
2443 if not message:
2444 if not message:
2444 message = _('Added tag %s for changeset %s') % (name, short(r))
2445 message = _('Added tag %s for changeset %s') % (name, short(r))
2445
2446
2446 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2447 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2447
2448
2448 def tags(ui, repo):
2449 def tags(ui, repo):
2449 """list repository tags
2450 """list repository tags
2450
2451
2451 List the repository tags.
2452 List the repository tags.
2452
2453
2453 This lists both regular and local tags.
2454 This lists both regular and local tags.
2454 """
2455 """
2455
2456
2456 l = repo.tagslist()
2457 l = repo.tagslist()
2457 l.reverse()
2458 l.reverse()
2458 hexfunc = ui.debugflag and hex or short
2459 hexfunc = ui.debugflag and hex or short
2459 for t, n in l:
2460 for t, n in l:
2460 try:
2461 try:
2461 hn = hexfunc(n)
2462 hn = hexfunc(n)
2462 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2463 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2463 except revlog.LookupError:
2464 except revlog.LookupError:
2464 r = " ?:%s" % hn
2465 r = " ?:%s" % hn
2465 if ui.quiet:
2466 if ui.quiet:
2466 ui.write("%s\n" % t)
2467 ui.write("%s\n" % t)
2467 else:
2468 else:
2468 spaces = " " * (30 - util.locallen(t))
2469 spaces = " " * (30 - util.locallen(t))
2469 ui.write("%s%s %s\n" % (t, spaces, r))
2470 ui.write("%s%s %s\n" % (t, spaces, r))
2470
2471
2471 def tip(ui, repo, **opts):
2472 def tip(ui, repo, **opts):
2472 """show the tip revision
2473 """show the tip revision
2473
2474
2474 Show the tip revision.
2475 Show the tip revision.
2475 """
2476 """
2476 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2477 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2477
2478
2478 def unbundle(ui, repo, fname1, *fnames, **opts):
2479 def unbundle(ui, repo, fname1, *fnames, **opts):
2479 """apply one or more changegroup files
2480 """apply one or more changegroup files
2480
2481
2481 Apply one or more compressed changegroup files generated by the
2482 Apply one or more compressed changegroup files generated by the
2482 bundle command.
2483 bundle command.
2483 """
2484 """
2484 fnames = (fname1,) + fnames
2485 fnames = (fname1,) + fnames
2485 for fname in fnames:
2486 for fname in fnames:
2486 if os.path.exists(fname):
2487 if os.path.exists(fname):
2487 f = open(fname, "rb")
2488 f = open(fname, "rb")
2488 else:
2489 else:
2489 f = urllib.urlopen(fname)
2490 f = urllib.urlopen(fname)
2490 gen = changegroup.readbundle(f, fname)
2491 gen = changegroup.readbundle(f, fname)
2491 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2492 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2492
2493
2493 return postincoming(ui, repo, modheads, opts['update'], None)
2494 return postincoming(ui, repo, modheads, opts['update'], None)
2494
2495
2495 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2496 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2496 """update working directory
2497 """update working directory
2497
2498
2498 Update the working directory to the specified revision, or the
2499 Update the working directory to the specified revision, or the
2499 tip of the current branch if none is specified.
2500 tip of the current branch if none is specified.
2500
2501
2501 If there are no outstanding changes in the working directory and
2502 If there are no outstanding changes in the working directory and
2502 there is a linear relationship between the current version and the
2503 there is a linear relationship between the current version and the
2503 requested version, the result is the requested version.
2504 requested version, the result is the requested version.
2504
2505
2505 To merge the working directory with another revision, use the
2506 To merge the working directory with another revision, use the
2506 merge command.
2507 merge command.
2507
2508
2508 By default, update will refuse to run if doing so would require
2509 By default, update will refuse to run if doing so would require
2509 discarding local changes.
2510 discarding local changes.
2510 """
2511 """
2511 if rev and node:
2512 if rev and node:
2512 raise util.Abort(_("please specify just one revision"))
2513 raise util.Abort(_("please specify just one revision"))
2513
2514
2514 if not rev:
2515 if not rev:
2515 rev = node
2516 rev = node
2516
2517
2517 if date:
2518 if date:
2518 if rev:
2519 if rev:
2519 raise util.Abort(_("you can't specify a revision and a date"))
2520 raise util.Abort(_("you can't specify a revision and a date"))
2520 rev = cmdutil.finddate(ui, repo, date)
2521 rev = cmdutil.finddate(ui, repo, date)
2521
2522
2522 if clean:
2523 if clean:
2523 return hg.clean(repo, rev)
2524 return hg.clean(repo, rev)
2524 else:
2525 else:
2525 return hg.update(repo, rev)
2526 return hg.update(repo, rev)
2526
2527
2527 def verify(ui, repo):
2528 def verify(ui, repo):
2528 """verify the integrity of the repository
2529 """verify the integrity of the repository
2529
2530
2530 Verify the integrity of the current repository.
2531 Verify the integrity of the current repository.
2531
2532
2532 This will perform an extensive check of the repository's
2533 This will perform an extensive check of the repository's
2533 integrity, validating the hashes and checksums of each entry in
2534 integrity, validating the hashes and checksums of each entry in
2534 the changelog, manifest, and tracked files, as well as the
2535 the changelog, manifest, and tracked files, as well as the
2535 integrity of their crosslinks and indices.
2536 integrity of their crosslinks and indices.
2536 """
2537 """
2537 return hg.verify(repo)
2538 return hg.verify(repo)
2538
2539
2539 def version_(ui):
2540 def version_(ui):
2540 """output version and copyright information"""
2541 """output version and copyright information"""
2541 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2542 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2542 % version.get_version())
2543 % version.get_version())
2543 ui.status(_(
2544 ui.status(_(
2544 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2545 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2545 "This is free software; see the source for copying conditions. "
2546 "This is free software; see the source for copying conditions. "
2546 "There is NO\nwarranty; "
2547 "There is NO\nwarranty; "
2547 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2548 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2548 ))
2549 ))
2549
2550
2550 # Command options and aliases are listed here, alphabetically
2551 # Command options and aliases are listed here, alphabetically
2551
2552
2552 globalopts = [
2553 globalopts = [
2553 ('R', 'repository', '',
2554 ('R', 'repository', '',
2554 _('repository root directory or symbolic path name')),
2555 _('repository root directory or symbolic path name')),
2555 ('', 'cwd', '', _('change working directory')),
2556 ('', 'cwd', '', _('change working directory')),
2556 ('y', 'noninteractive', None,
2557 ('y', 'noninteractive', None,
2557 _('do not prompt, assume \'yes\' for any required answers')),
2558 _('do not prompt, assume \'yes\' for any required answers')),
2558 ('q', 'quiet', None, _('suppress output')),
2559 ('q', 'quiet', None, _('suppress output')),
2559 ('v', 'verbose', None, _('enable additional output')),
2560 ('v', 'verbose', None, _('enable additional output')),
2560 ('', 'config', [], _('set/override config option')),
2561 ('', 'config', [], _('set/override config option')),
2561 ('', 'debug', None, _('enable debugging output')),
2562 ('', 'debug', None, _('enable debugging output')),
2562 ('', 'debugger', None, _('start debugger')),
2563 ('', 'debugger', None, _('start debugger')),
2563 ('', 'encoding', util._encoding, _('set the charset encoding')),
2564 ('', 'encoding', util._encoding, _('set the charset encoding')),
2564 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2565 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2565 ('', 'lsprof', None, _('print improved command execution profile')),
2566 ('', 'lsprof', None, _('print improved command execution profile')),
2566 ('', 'traceback', None, _('print traceback on exception')),
2567 ('', 'traceback', None, _('print traceback on exception')),
2567 ('', 'time', None, _('time how long the command takes')),
2568 ('', 'time', None, _('time how long the command takes')),
2568 ('', 'profile', None, _('print command execution profile')),
2569 ('', 'profile', None, _('print command execution profile')),
2569 ('', 'version', None, _('output version information and exit')),
2570 ('', 'version', None, _('output version information and exit')),
2570 ('h', 'help', None, _('display help and exit')),
2571 ('h', 'help', None, _('display help and exit')),
2571 ]
2572 ]
2572
2573
2573 dryrunopts = [('n', 'dry-run', None,
2574 dryrunopts = [('n', 'dry-run', None,
2574 _('do not perform actions, just print output'))]
2575 _('do not perform actions, just print output'))]
2575
2576
2576 remoteopts = [
2577 remoteopts = [
2577 ('e', 'ssh', '', _('specify ssh command to use')),
2578 ('e', 'ssh', '', _('specify ssh command to use')),
2578 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2579 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2579 ]
2580 ]
2580
2581
2581 walkopts = [
2582 walkopts = [
2582 ('I', 'include', [], _('include names matching the given patterns')),
2583 ('I', 'include', [], _('include names matching the given patterns')),
2583 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2584 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2584 ]
2585 ]
2585
2586
2586 commitopts = [
2587 commitopts = [
2587 ('m', 'message', '', _('use <text> as commit message')),
2588 ('m', 'message', '', _('use <text> as commit message')),
2588 ('l', 'logfile', '', _('read commit message from <file>')),
2589 ('l', 'logfile', '', _('read commit message from <file>')),
2589 ]
2590 ]
2590
2591
2591 commitopts2 = [
2592 commitopts2 = [
2592 ('d', 'date', '', _('record datecode as commit date')),
2593 ('d', 'date', '', _('record datecode as commit date')),
2593 ('u', 'user', '', _('record user as committer')),
2594 ('u', 'user', '', _('record user as committer')),
2594 ]
2595 ]
2595
2596
2596 table = {
2597 table = {
2597 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2598 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2598 "addremove":
2599 "addremove":
2599 (addremove,
2600 (addremove,
2600 [('s', 'similarity', '',
2601 [('s', 'similarity', '',
2601 _('guess renamed files by similarity (0<=s<=100)')),
2602 _('guess renamed files by similarity (0<=s<=100)')),
2602 ] + walkopts + dryrunopts,
2603 ] + walkopts + dryrunopts,
2603 _('hg addremove [OPTION]... [FILE]...')),
2604 _('hg addremove [OPTION]... [FILE]...')),
2604 "^annotate":
2605 "^annotate":
2605 (annotate,
2606 (annotate,
2606 [('r', 'rev', '', _('annotate the specified revision')),
2607 [('r', 'rev', '', _('annotate the specified revision')),
2607 ('f', 'follow', None, _('follow file copies and renames')),
2608 ('f', 'follow', None, _('follow file copies and renames')),
2608 ('a', 'text', None, _('treat all files as text')),
2609 ('a', 'text', None, _('treat all files as text')),
2609 ('u', 'user', None, _('list the author')),
2610 ('u', 'user', None, _('list the author')),
2610 ('d', 'date', None, _('list the date')),
2611 ('d', 'date', None, _('list the date')),
2611 ('n', 'number', None, _('list the revision number (default)')),
2612 ('n', 'number', None, _('list the revision number (default)')),
2612 ('c', 'changeset', None, _('list the changeset')),
2613 ('c', 'changeset', None, _('list the changeset')),
2613 ('l', 'line-number', None,
2614 ('l', 'line-number', None,
2614 _('show line number at the first appearance'))
2615 _('show line number at the first appearance'))
2615 ] + walkopts,
2616 ] + walkopts,
2616 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2617 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2617 "archive":
2618 "archive":
2618 (archive,
2619 (archive,
2619 [('', 'no-decode', None, _('do not pass files through decoders')),
2620 [('', 'no-decode', None, _('do not pass files through decoders')),
2620 ('p', 'prefix', '', _('directory prefix for files in archive')),
2621 ('p', 'prefix', '', _('directory prefix for files in archive')),
2621 ('r', 'rev', '', _('revision to distribute')),
2622 ('r', 'rev', '', _('revision to distribute')),
2622 ('t', 'type', '', _('type of distribution to create')),
2623 ('t', 'type', '', _('type of distribution to create')),
2623 ] + walkopts,
2624 ] + walkopts,
2624 _('hg archive [OPTION]... DEST')),
2625 _('hg archive [OPTION]... DEST')),
2625 "backout":
2626 "backout":
2626 (backout,
2627 (backout,
2627 [('', 'merge', None,
2628 [('', 'merge', None,
2628 _('merge with old dirstate parent after backout')),
2629 _('merge with old dirstate parent after backout')),
2629 ('', 'parent', '', _('parent to choose when backing out merge')),
2630 ('', 'parent', '', _('parent to choose when backing out merge')),
2630 ('r', 'rev', '', _('revision to backout')),
2631 ('r', 'rev', '', _('revision to backout')),
2631 ] + walkopts + commitopts + commitopts2,
2632 ] + walkopts + commitopts + commitopts2,
2632 _('hg backout [OPTION]... [-r] REV')),
2633 _('hg backout [OPTION]... [-r] REV')),
2633 "branch":
2634 "branch":
2634 (branch,
2635 (branch,
2635 [('f', 'force', None,
2636 [('f', 'force', None,
2636 _('set branch name even if it shadows an existing branch'))],
2637 _('set branch name even if it shadows an existing branch'))],
2637 _('hg branch [NAME]')),
2638 _('hg branch [NAME]')),
2638 "branches":
2639 "branches":
2639 (branches,
2640 (branches,
2640 [('a', 'active', False,
2641 [('a', 'active', False,
2641 _('show only branches that have unmerged heads'))],
2642 _('show only branches that have unmerged heads'))],
2642 _('hg branches [-a]')),
2643 _('hg branches [-a]')),
2643 "bundle":
2644 "bundle":
2644 (bundle,
2645 (bundle,
2645 [('f', 'force', None,
2646 [('f', 'force', None,
2646 _('run even when remote repository is unrelated')),
2647 _('run even when remote repository is unrelated')),
2647 ('r', 'rev', [],
2648 ('r', 'rev', [],
2648 _('a changeset you would like to bundle')),
2649 _('a changeset you would like to bundle')),
2649 ('', 'base', [],
2650 ('', 'base', [],
2650 _('a base changeset to specify instead of a destination')),
2651 _('a base changeset to specify instead of a destination')),
2651 ] + remoteopts,
2652 ] + remoteopts,
2652 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2653 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2653 "cat":
2654 "cat":
2654 (cat,
2655 (cat,
2655 [('o', 'output', '', _('print output to file with formatted name')),
2656 [('o', 'output', '', _('print output to file with formatted name')),
2656 ('r', 'rev', '', _('print the given revision')),
2657 ('r', 'rev', '', _('print the given revision')),
2657 ] + walkopts,
2658 ] + walkopts,
2658 _('hg cat [OPTION]... FILE...')),
2659 _('hg cat [OPTION]... FILE...')),
2659 "^clone":
2660 "^clone":
2660 (clone,
2661 (clone,
2661 [('U', 'noupdate', None, _('do not update the new working directory')),
2662 [('U', 'noupdate', None, _('do not update the new working directory')),
2662 ('r', 'rev', [],
2663 ('r', 'rev', [],
2663 _('a changeset you would like to have after cloning')),
2664 _('a changeset you would like to have after cloning')),
2664 ('', 'pull', None, _('use pull protocol to copy metadata')),
2665 ('', 'pull', None, _('use pull protocol to copy metadata')),
2665 ('', 'uncompressed', None,
2666 ('', 'uncompressed', None,
2666 _('use uncompressed transfer (fast over LAN)')),
2667 _('use uncompressed transfer (fast over LAN)')),
2667 ] + remoteopts,
2668 ] + remoteopts,
2668 _('hg clone [OPTION]... SOURCE [DEST]')),
2669 _('hg clone [OPTION]... SOURCE [DEST]')),
2669 "^commit|ci":
2670 "^commit|ci":
2670 (commit,
2671 (commit,
2671 [('A', 'addremove', None,
2672 [('A', 'addremove', None,
2672 _('mark new/missing files as added/removed before committing')),
2673 _('mark new/missing files as added/removed before committing')),
2673 ] + walkopts + commitopts + commitopts2,
2674 ] + walkopts + commitopts + commitopts2,
2674 _('hg commit [OPTION]... [FILE]...')),
2675 _('hg commit [OPTION]... [FILE]...')),
2675 "copy|cp":
2676 "copy|cp":
2676 (copy,
2677 (copy,
2677 [('A', 'after', None, _('record a copy that has already occurred')),
2678 [('A', 'after', None, _('record a copy that has already occurred')),
2678 ('f', 'force', None,
2679 ('f', 'force', None,
2679 _('forcibly copy over an existing managed file')),
2680 _('forcibly copy over an existing managed file')),
2680 ] + walkopts + dryrunopts,
2681 ] + walkopts + dryrunopts,
2681 _('hg copy [OPTION]... [SOURCE]... DEST')),
2682 _('hg copy [OPTION]... [SOURCE]... DEST')),
2682 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2683 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2683 "debugcomplete":
2684 "debugcomplete":
2684 (debugcomplete,
2685 (debugcomplete,
2685 [('o', 'options', None, _('show the command options'))],
2686 [('o', 'options', None, _('show the command options'))],
2686 _('debugcomplete [-o] CMD')),
2687 _('debugcomplete [-o] CMD')),
2687 "debuginstall": (debuginstall, [], _('debuginstall')),
2688 "debuginstall": (debuginstall, [], _('debuginstall')),
2688 "debugrebuildstate":
2689 "debugrebuildstate":
2689 (debugrebuildstate,
2690 (debugrebuildstate,
2690 [('r', 'rev', '', _('revision to rebuild to'))],
2691 [('r', 'rev', '', _('revision to rebuild to'))],
2691 _('debugrebuildstate [-r REV] [REV]')),
2692 _('debugrebuildstate [-r REV] [REV]')),
2692 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2693 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2693 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2694 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2694 "debugstate": (debugstate, [], _('debugstate')),
2695 "debugstate": (debugstate, [], _('debugstate')),
2695 "debugdate":
2696 "debugdate":
2696 (debugdate,
2697 (debugdate,
2697 [('e', 'extended', None, _('try extended date formats'))],
2698 [('e', 'extended', None, _('try extended date formats'))],
2698 _('debugdate [-e] DATE [RANGE]')),
2699 _('debugdate [-e] DATE [RANGE]')),
2699 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2700 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2700 "debugindex": (debugindex, [], _('debugindex FILE')),
2701 "debugindex": (debugindex, [], _('debugindex FILE')),
2701 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2702 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2702 "debugrename":
2703 "debugrename":
2703 (debugrename,
2704 (debugrename,
2704 [('r', 'rev', '', _('revision to debug'))],
2705 [('r', 'rev', '', _('revision to debug'))],
2705 _('debugrename [-r REV] FILE')),
2706 _('debugrename [-r REV] FILE')),
2706 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2707 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2707 "^diff":
2708 "^diff":
2708 (diff,
2709 (diff,
2709 [('r', 'rev', [], _('revision')),
2710 [('r', 'rev', [], _('revision')),
2710 ('a', 'text', None, _('treat all files as text')),
2711 ('a', 'text', None, _('treat all files as text')),
2711 ('p', 'show-function', None,
2712 ('p', 'show-function', None,
2712 _('show which function each change is in')),
2713 _('show which function each change is in')),
2713 ('g', 'git', None, _('use git extended diff format')),
2714 ('g', 'git', None, _('use git extended diff format')),
2714 ('', 'nodates', None, _("don't include dates in diff headers")),
2715 ('', 'nodates', None, _("don't include dates in diff headers")),
2715 ('w', 'ignore-all-space', None,
2716 ('w', 'ignore-all-space', None,
2716 _('ignore white space when comparing lines')),
2717 _('ignore white space when comparing lines')),
2717 ('b', 'ignore-space-change', None,
2718 ('b', 'ignore-space-change', None,
2718 _('ignore changes in the amount of white space')),
2719 _('ignore changes in the amount of white space')),
2719 ('B', 'ignore-blank-lines', None,
2720 ('B', 'ignore-blank-lines', None,
2720 _('ignore changes whose lines are all blank')),
2721 _('ignore changes whose lines are all blank')),
2721 ] + walkopts,
2722 ] + walkopts,
2722 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2723 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2723 "^export":
2724 "^export":
2724 (export,
2725 (export,
2725 [('o', 'output', '', _('print output to file with formatted name')),
2726 [('o', 'output', '', _('print output to file with formatted name')),
2726 ('a', 'text', None, _('treat all files as text')),
2727 ('a', 'text', None, _('treat all files as text')),
2727 ('g', 'git', None, _('use git extended diff format')),
2728 ('g', 'git', None, _('use git extended diff format')),
2728 ('', 'nodates', None, _("don't include dates in diff headers")),
2729 ('', 'nodates', None, _("don't include dates in diff headers")),
2729 ('', 'switch-parent', None, _('diff against the second parent'))],
2730 ('', 'switch-parent', None, _('diff against the second parent'))],
2730 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2731 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2731 "grep":
2732 "grep":
2732 (grep,
2733 (grep,
2733 [('0', 'print0', None, _('end fields with NUL')),
2734 [('0', 'print0', None, _('end fields with NUL')),
2734 ('', 'all', None, _('print all revisions that match')),
2735 ('', 'all', None, _('print all revisions that match')),
2735 ('f', 'follow', None,
2736 ('f', 'follow', None,
2736 _('follow changeset history, or file history across copies and renames')),
2737 _('follow changeset history, or file history across copies and renames')),
2737 ('i', 'ignore-case', None, _('ignore case when matching')),
2738 ('i', 'ignore-case', None, _('ignore case when matching')),
2738 ('l', 'files-with-matches', None,
2739 ('l', 'files-with-matches', None,
2739 _('print only filenames and revs that match')),
2740 _('print only filenames and revs that match')),
2740 ('n', 'line-number', None, _('print matching line numbers')),
2741 ('n', 'line-number', None, _('print matching line numbers')),
2741 ('r', 'rev', [], _('search in given revision range')),
2742 ('r', 'rev', [], _('search in given revision range')),
2742 ('u', 'user', None, _('print user who committed change')),
2743 ('u', 'user', None, _('print user who committed change')),
2743 ] + walkopts,
2744 ] + walkopts,
2744 _('hg grep [OPTION]... PATTERN [FILE]...')),
2745 _('hg grep [OPTION]... PATTERN [FILE]...')),
2745 "heads":
2746 "heads":
2746 (heads,
2747 (heads,
2747 [('', 'style', '', _('display using template map file')),
2748 [('', 'style', '', _('display using template map file')),
2748 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2749 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2749 ('', 'template', '', _('display with template'))],
2750 ('', 'template', '', _('display with template'))],
2750 _('hg heads [-r REV] [REV]...')),
2751 _('hg heads [-r REV] [REV]...')),
2751 "help": (help_, [], _('hg help [COMMAND]')),
2752 "help": (help_, [], _('hg help [COMMAND]')),
2752 "identify|id":
2753 "identify|id":
2753 (identify,
2754 (identify,
2754 [('r', 'rev', '', _('identify the specified rev')),
2755 [('r', 'rev', '', _('identify the specified rev')),
2755 ('n', 'num', None, _('show local revision number')),
2756 ('n', 'num', None, _('show local revision number')),
2756 ('i', 'id', None, _('show global revision id')),
2757 ('i', 'id', None, _('show global revision id')),
2757 ('b', 'branch', None, _('show branch')),
2758 ('b', 'branch', None, _('show branch')),
2758 ('t', 'tags', None, _('show tags'))],
2759 ('t', 'tags', None, _('show tags'))],
2759 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2760 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2760 "import|patch":
2761 "import|patch":
2761 (import_,
2762 (import_,
2762 [('p', 'strip', 1,
2763 [('p', 'strip', 1,
2763 _('directory strip option for patch. This has the same\n'
2764 _('directory strip option for patch. This has the same\n'
2764 'meaning as the corresponding patch option')),
2765 'meaning as the corresponding patch option')),
2765 ('b', 'base', '', _('base path')),
2766 ('b', 'base', '', _('base path')),
2766 ('f', 'force', None,
2767 ('f', 'force', None,
2767 _('skip check for outstanding uncommitted changes')),
2768 _('skip check for outstanding uncommitted changes')),
2768 ('', 'exact', None,
2769 ('', 'exact', None,
2769 _('apply patch to the nodes from which it was generated')),
2770 _('apply patch to the nodes from which it was generated')),
2770 ('', 'import-branch', None,
2771 ('', 'import-branch', None,
2771 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2772 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2772 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2773 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2773 "incoming|in": (incoming,
2774 "incoming|in": (incoming,
2774 [('M', 'no-merges', None, _('do not show merges')),
2775 [('M', 'no-merges', None, _('do not show merges')),
2775 ('f', 'force', None,
2776 ('f', 'force', None,
2776 _('run even when remote repository is unrelated')),
2777 _('run even when remote repository is unrelated')),
2777 ('', 'style', '', _('display using template map file')),
2778 ('', 'style', '', _('display using template map file')),
2778 ('n', 'newest-first', None, _('show newest record first')),
2779 ('n', 'newest-first', None, _('show newest record first')),
2779 ('', 'bundle', '', _('file to store the bundles into')),
2780 ('', 'bundle', '', _('file to store the bundles into')),
2780 ('p', 'patch', None, _('show patch')),
2781 ('p', 'patch', None, _('show patch')),
2781 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2782 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2782 ('', 'template', '', _('display with template')),
2783 ('', 'template', '', _('display with template')),
2783 ] + remoteopts,
2784 ] + remoteopts,
2784 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2785 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2785 ' [--bundle FILENAME] [SOURCE]')),
2786 ' [--bundle FILENAME] [SOURCE]')),
2786 "^init":
2787 "^init":
2787 (init,
2788 (init,
2788 remoteopts,
2789 remoteopts,
2789 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2790 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2790 "locate":
2791 "locate":
2791 (locate,
2792 (locate,
2792 [('r', 'rev', '', _('search the repository as it stood at rev')),
2793 [('r', 'rev', '', _('search the repository as it stood at rev')),
2793 ('0', 'print0', None,
2794 ('0', 'print0', None,
2794 _('end filenames with NUL, for use with xargs')),
2795 _('end filenames with NUL, for use with xargs')),
2795 ('f', 'fullpath', None,
2796 ('f', 'fullpath', None,
2796 _('print complete paths from the filesystem root')),
2797 _('print complete paths from the filesystem root')),
2797 ] + walkopts,
2798 ] + walkopts,
2798 _('hg locate [OPTION]... [PATTERN]...')),
2799 _('hg locate [OPTION]... [PATTERN]...')),
2799 "^log|history":
2800 "^log|history":
2800 (log,
2801 (log,
2801 [('f', 'follow', None,
2802 [('f', 'follow', None,
2802 _('follow changeset history, or file history across copies and renames')),
2803 _('follow changeset history, or file history across copies and renames')),
2803 ('', 'follow-first', None,
2804 ('', 'follow-first', None,
2804 _('only follow the first parent of merge changesets')),
2805 _('only follow the first parent of merge changesets')),
2805 ('d', 'date', '', _('show revs matching date spec')),
2806 ('d', 'date', '', _('show revs matching date spec')),
2806 ('C', 'copies', None, _('show copied files')),
2807 ('C', 'copies', None, _('show copied files')),
2807 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2808 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2808 ('l', 'limit', '', _('limit number of changes displayed')),
2809 ('l', 'limit', '', _('limit number of changes displayed')),
2809 ('r', 'rev', [], _('show the specified revision or range')),
2810 ('r', 'rev', [], _('show the specified revision or range')),
2810 ('', 'removed', None, _('include revs where files were removed')),
2811 ('', 'removed', None, _('include revs where files were removed')),
2811 ('M', 'no-merges', None, _('do not show merges')),
2812 ('M', 'no-merges', None, _('do not show merges')),
2812 ('', 'style', '', _('display using template map file')),
2813 ('', 'style', '', _('display using template map file')),
2813 ('m', 'only-merges', None, _('show only merges')),
2814 ('m', 'only-merges', None, _('show only merges')),
2814 ('p', 'patch', None, _('show patch')),
2815 ('p', 'patch', None, _('show patch')),
2815 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2816 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2816 ('', 'template', '', _('display with template')),
2817 ('', 'template', '', _('display with template')),
2817 ] + walkopts,
2818 ] + walkopts,
2818 _('hg log [OPTION]... [FILE]')),
2819 _('hg log [OPTION]... [FILE]')),
2819 "manifest": (manifest, [('r', 'rev', '', _('revision to display'))],
2820 "manifest": (manifest, [('r', 'rev', '', _('revision to display'))],
2820 _('hg manifest [-r REV]')),
2821 _('hg manifest [-r REV]')),
2821 "^merge":
2822 "^merge":
2822 (merge,
2823 (merge,
2823 [('f', 'force', None, _('force a merge with outstanding changes')),
2824 [('f', 'force', None, _('force a merge with outstanding changes')),
2824 ('r', 'rev', '', _('revision to merge')),
2825 ('r', 'rev', '', _('revision to merge')),
2825 ],
2826 ],
2826 _('hg merge [-f] [[-r] REV]')),
2827 _('hg merge [-f] [[-r] REV]')),
2827 "outgoing|out": (outgoing,
2828 "outgoing|out": (outgoing,
2828 [('M', 'no-merges', None, _('do not show merges')),
2829 [('M', 'no-merges', None, _('do not show merges')),
2829 ('f', 'force', None,
2830 ('f', 'force', None,
2830 _('run even when remote repository is unrelated')),
2831 _('run even when remote repository is unrelated')),
2831 ('p', 'patch', None, _('show patch')),
2832 ('p', 'patch', None, _('show patch')),
2832 ('', 'style', '', _('display using template map file')),
2833 ('', 'style', '', _('display using template map file')),
2833 ('r', 'rev', [], _('a specific revision you would like to push')),
2834 ('r', 'rev', [], _('a specific revision you would like to push')),
2834 ('n', 'newest-first', None, _('show newest record first')),
2835 ('n', 'newest-first', None, _('show newest record first')),
2835 ('', 'template', '', _('display with template')),
2836 ('', 'template', '', _('display with template')),
2836 ] + remoteopts,
2837 ] + remoteopts,
2837 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2838 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2838 "^parents":
2839 "^parents":
2839 (parents,
2840 (parents,
2840 [('r', 'rev', '', _('show parents from the specified rev')),
2841 [('r', 'rev', '', _('show parents from the specified rev')),
2841 ('', 'style', '', _('display using template map file')),
2842 ('', 'style', '', _('display using template map file')),
2842 ('', 'template', '', _('display with template'))],
2843 ('', 'template', '', _('display with template'))],
2843 _('hg parents [-r REV] [FILE]')),
2844 _('hg parents [-r REV] [FILE]')),
2844 "paths": (paths, [], _('hg paths [NAME]')),
2845 "paths": (paths, [], _('hg paths [NAME]')),
2845 "^pull":
2846 "^pull":
2846 (pull,
2847 (pull,
2847 [('u', 'update', None,
2848 [('u', 'update', None,
2848 _('update to new tip if changesets were pulled')),
2849 _('update to new tip if changesets were pulled')),
2849 ('f', 'force', None,
2850 ('f', 'force', None,
2850 _('run even when remote repository is unrelated')),
2851 _('run even when remote repository is unrelated')),
2851 ('r', 'rev', [],
2852 ('r', 'rev', [],
2852 _('a specific revision up to which you would like to pull')),
2853 _('a specific revision up to which you would like to pull')),
2853 ] + remoteopts,
2854 ] + remoteopts,
2854 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2855 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2855 "^push":
2856 "^push":
2856 (push,
2857 (push,
2857 [('f', 'force', None, _('force push')),
2858 [('f', 'force', None, _('force push')),
2858 ('r', 'rev', [], _('a specific revision you would like to push')),
2859 ('r', 'rev', [], _('a specific revision you would like to push')),
2859 ] + remoteopts,
2860 ] + remoteopts,
2860 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2861 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2861 "debugrawcommit|rawcommit":
2862 "debugrawcommit|rawcommit":
2862 (rawcommit,
2863 (rawcommit,
2863 [('p', 'parent', [], _('parent')),
2864 [('p', 'parent', [], _('parent')),
2864 ('F', 'files', '', _('file list'))
2865 ('F', 'files', '', _('file list'))
2865 ] + commitopts + commitopts2,
2866 ] + commitopts + commitopts2,
2866 _('hg debugrawcommit [OPTION]... [FILE]...')),
2867 _('hg debugrawcommit [OPTION]... [FILE]...')),
2867 "recover": (recover, [], _('hg recover')),
2868 "recover": (recover, [], _('hg recover')),
2868 "^remove|rm":
2869 "^remove|rm":
2869 (remove,
2870 (remove,
2870 [('A', 'after', None, _('record remove without deleting')),
2871 [('A', 'after', None, _('record remove without deleting')),
2871 ('f', 'force', None, _('remove file even if modified')),
2872 ('f', 'force', None, _('remove file even if modified')),
2872 ] + walkopts,
2873 ] + walkopts,
2873 _('hg remove [OPTION]... FILE...')),
2874 _('hg remove [OPTION]... FILE...')),
2874 "rename|mv":
2875 "rename|mv":
2875 (rename,
2876 (rename,
2876 [('A', 'after', None, _('record a rename that has already occurred')),
2877 [('A', 'after', None, _('record a rename that has already occurred')),
2877 ('f', 'force', None,
2878 ('f', 'force', None,
2878 _('forcibly copy over an existing managed file')),
2879 _('forcibly copy over an existing managed file')),
2879 ] + walkopts + dryrunopts,
2880 ] + walkopts + dryrunopts,
2880 _('hg rename [OPTION]... SOURCE... DEST')),
2881 _('hg rename [OPTION]... SOURCE... DEST')),
2881 "revert":
2882 "revert":
2882 (revert,
2883 (revert,
2883 [('a', 'all', None, _('revert all changes when no arguments given')),
2884 [('a', 'all', None, _('revert all changes when no arguments given')),
2884 ('d', 'date', '', _('tipmost revision matching date')),
2885 ('d', 'date', '', _('tipmost revision matching date')),
2885 ('r', 'rev', '', _('revision to revert to')),
2886 ('r', 'rev', '', _('revision to revert to')),
2886 ('', 'no-backup', None, _('do not save backup copies of files')),
2887 ('', 'no-backup', None, _('do not save backup copies of files')),
2887 ] + walkopts + dryrunopts,
2888 ] + walkopts + dryrunopts,
2888 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2889 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2889 "rollback": (rollback, [], _('hg rollback')),
2890 "rollback": (rollback, [], _('hg rollback')),
2890 "root": (root, [], _('hg root')),
2891 "root": (root, [], _('hg root')),
2891 "showconfig|debugconfig":
2892 "showconfig|debugconfig":
2892 (showconfig,
2893 (showconfig,
2893 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2894 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2894 _('showconfig [-u] [NAME]...')),
2895 _('showconfig [-u] [NAME]...')),
2895 "^serve":
2896 "^serve":
2896 (serve,
2897 (serve,
2897 [('A', 'accesslog', '', _('name of access log file to write to')),
2898 [('A', 'accesslog', '', _('name of access log file to write to')),
2898 ('d', 'daemon', None, _('run server in background')),
2899 ('d', 'daemon', None, _('run server in background')),
2899 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2900 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2900 ('E', 'errorlog', '', _('name of error log file to write to')),
2901 ('E', 'errorlog', '', _('name of error log file to write to')),
2901 ('p', 'port', 0, _('port to use (default: 8000)')),
2902 ('p', 'port', 0, _('port to use (default: 8000)')),
2902 ('a', 'address', '', _('address to use')),
2903 ('a', 'address', '', _('address to use')),
2903 ('n', 'name', '',
2904 ('n', 'name', '',
2904 _('name to show in web pages (default: working dir)')),
2905 _('name to show in web pages (default: working dir)')),
2905 ('', 'webdir-conf', '', _('name of the webdir config file'
2906 ('', 'webdir-conf', '', _('name of the webdir config file'
2906 ' (serve more than one repo)')),
2907 ' (serve more than one repo)')),
2907 ('', 'pid-file', '', _('name of file to write process ID to')),
2908 ('', 'pid-file', '', _('name of file to write process ID to')),
2908 ('', 'stdio', None, _('for remote clients')),
2909 ('', 'stdio', None, _('for remote clients')),
2909 ('t', 'templates', '', _('web templates to use')),
2910 ('t', 'templates', '', _('web templates to use')),
2910 ('', 'style', '', _('template style to use')),
2911 ('', 'style', '', _('template style to use')),
2911 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
2912 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
2912 ('', 'certificate', '', _('SSL certificate file'))],
2913 ('', 'certificate', '', _('SSL certificate file'))],
2913 _('hg serve [OPTION]...')),
2914 _('hg serve [OPTION]...')),
2914 "^status|st":
2915 "^status|st":
2915 (status,
2916 (status,
2916 [('A', 'all', None, _('show status of all files')),
2917 [('A', 'all', None, _('show status of all files')),
2917 ('m', 'modified', None, _('show only modified files')),
2918 ('m', 'modified', None, _('show only modified files')),
2918 ('a', 'added', None, _('show only added files')),
2919 ('a', 'added', None, _('show only added files')),
2919 ('r', 'removed', None, _('show only removed files')),
2920 ('r', 'removed', None, _('show only removed files')),
2920 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2921 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2921 ('c', 'clean', None, _('show only files without changes')),
2922 ('c', 'clean', None, _('show only files without changes')),
2922 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2923 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2923 ('i', 'ignored', None, _('show only ignored files')),
2924 ('i', 'ignored', None, _('show only ignored files')),
2924 ('n', 'no-status', None, _('hide status prefix')),
2925 ('n', 'no-status', None, _('hide status prefix')),
2925 ('C', 'copies', None, _('show source of copied files')),
2926 ('C', 'copies', None, _('show source of copied files')),
2926 ('0', 'print0', None,
2927 ('0', 'print0', None,
2927 _('end filenames with NUL, for use with xargs')),
2928 _('end filenames with NUL, for use with xargs')),
2928 ('', 'rev', [], _('show difference from revision')),
2929 ('', 'rev', [], _('show difference from revision')),
2929 ] + walkopts,
2930 ] + walkopts,
2930 _('hg status [OPTION]... [FILE]...')),
2931 _('hg status [OPTION]... [FILE]...')),
2931 "tag":
2932 "tag":
2932 (tag,
2933 (tag,
2933 [('f', 'force', None, _('replace existing tag')),
2934 [('f', 'force', None, _('replace existing tag')),
2934 ('l', 'local', None, _('make the tag local')),
2935 ('l', 'local', None, _('make the tag local')),
2935 ('r', 'rev', '', _('revision to tag')),
2936 ('r', 'rev', '', _('revision to tag')),
2936 ('', 'remove', None, _('remove a tag')),
2937 ('', 'remove', None, _('remove a tag')),
2937 # -l/--local is already there, commitopts cannot be used
2938 # -l/--local is already there, commitopts cannot be used
2938 ('m', 'message', '', _('use <text> as commit message')),
2939 ('m', 'message', '', _('use <text> as commit message')),
2939 ] + commitopts2,
2940 ] + commitopts2,
2940 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2941 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2941 "tags": (tags, [], _('hg tags')),
2942 "tags": (tags, [], _('hg tags')),
2942 "tip":
2943 "tip":
2943 (tip,
2944 (tip,
2944 [('', 'style', '', _('display using template map file')),
2945 [('', 'style', '', _('display using template map file')),
2945 ('p', 'patch', None, _('show patch')),
2946 ('p', 'patch', None, _('show patch')),
2946 ('', 'template', '', _('display with template'))],
2947 ('', 'template', '', _('display with template'))],
2947 _('hg tip [-p]')),
2948 _('hg tip [-p]')),
2948 "unbundle":
2949 "unbundle":
2949 (unbundle,
2950 (unbundle,
2950 [('u', 'update', None,
2951 [('u', 'update', None,
2951 _('update to new tip if changesets were unbundled'))],
2952 _('update to new tip if changesets were unbundled'))],
2952 _('hg unbundle [-u] FILE...')),
2953 _('hg unbundle [-u] FILE...')),
2953 "^update|up|checkout|co":
2954 "^update|up|checkout|co":
2954 (update,
2955 (update,
2955 [('C', 'clean', None, _('overwrite locally modified files')),
2956 [('C', 'clean', None, _('overwrite locally modified files')),
2956 ('d', 'date', '', _('tipmost revision matching date')),
2957 ('d', 'date', '', _('tipmost revision matching date')),
2957 ('r', 'rev', '', _('revision'))],
2958 ('r', 'rev', '', _('revision'))],
2958 _('hg update [-C] [-d DATE] [[-r] REV]')),
2959 _('hg update [-C] [-d DATE] [[-r] REV]')),
2959 "verify": (verify, [], _('hg verify')),
2960 "verify": (verify, [], _('hg verify')),
2960 "version": (version_, [], _('hg version')),
2961 "version": (version_, [], _('hg version')),
2961 }
2962 }
2962
2963
2963 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2964 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2964 " debugindex debugindexdot debugdate debuginstall")
2965 " debugindex debugindexdot debugdate debuginstall")
2965 optionalrepo = ("identify paths serve showconfig")
2966 optionalrepo = ("identify paths serve showconfig")
@@ -1,131 +1,131 b''
1 # demandimport.py - global demand-loading of modules for Mercurial
1 # demandimport.py - global demand-loading of modules for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''
8 '''
9 demandimport - automatic demandloading of modules
9 demandimport - automatic demandloading of modules
10
10
11 To enable this module, do:
11 To enable this module, do:
12
12
13 import demandimport; demandimport.enable()
13 import demandimport; demandimport.enable()
14
14
15 Imports of the following forms will be demand-loaded:
15 Imports of the following forms will be demand-loaded:
16
16
17 import a, b.c
17 import a, b.c
18 import a.b as c
18 import a.b as c
19 from a import b,c # a will be loaded immediately
19 from a import b,c # a will be loaded immediately
20
20
21 These imports will not be delayed:
21 These imports will not be delayed:
22
22
23 from a import *
23 from a import *
24 b = __import__(a)
24 b = __import__(a)
25 '''
25 '''
26
26
27 _origimport = __import__
27 _origimport = __import__
28
28
29 class _demandmod(object):
29 class _demandmod(object):
30 """module demand-loader and proxy"""
30 """module demand-loader and proxy"""
31 def __init__(self, name, globals, locals):
31 def __init__(self, name, globals, locals):
32 if '.' in name:
32 if '.' in name:
33 head, rest = name.split('.', 1)
33 head, rest = name.split('.', 1)
34 after = [rest]
34 after = [rest]
35 else:
35 else:
36 head = name
36 head = name
37 after = []
37 after = []
38 object.__setattr__(self, "_data", (head, globals, locals, after))
38 object.__setattr__(self, "_data", (head, globals, locals, after))
39 object.__setattr__(self, "_module", None)
39 object.__setattr__(self, "_module", None)
40 def _extend(self, name):
40 def _extend(self, name):
41 """add to the list of submodules to load"""
41 """add to the list of submodules to load"""
42 self._data[3].append(name)
42 self._data[3].append(name)
43 def _load(self):
43 def _load(self):
44 if not self._module:
44 if not self._module:
45 head, globals, locals, after = self._data
45 head, globals, locals, after = self._data
46 mod = _origimport(head, globals, locals)
46 mod = _origimport(head, globals, locals)
47 # load submodules
47 # load submodules
48 def subload(mod, p):
48 def subload(mod, p):
49 h, t = p, None
49 h, t = p, None
50 if '.' in p:
50 if '.' in p:
51 h, t = p.split('.', 1)
51 h, t = p.split('.', 1)
52 if not hasattr(mod, h):
52 if not hasattr(mod, h):
53 setattr(mod, h, _demandmod(p, mod.__dict__, mod.__dict__))
53 setattr(mod, h, _demandmod(p, mod.__dict__, mod.__dict__))
54 elif t:
54 elif t:
55 subload(getattr(mod, h), t)
55 subload(getattr(mod, h), t)
56
56
57 for x in after:
57 for x in after:
58 subload(mod, x)
58 subload(mod, x)
59
59
60 # are we in the locals dictionary still?
60 # are we in the locals dictionary still?
61 if locals and locals.get(head) == self:
61 if locals and locals.get(head) == self:
62 locals[head] = mod
62 locals[head] = mod
63 object.__setattr__(self, "_module", mod)
63 object.__setattr__(self, "_module", mod)
64
64
65 def __repr__(self):
65 def __repr__(self):
66 if self._module:
66 if self._module:
67 return "<proxied module '%s'>" % self._data[0]
67 return "<proxied module '%s'>" % self._data[0]
68 return "<unloaded module '%s'>" % self._data[0]
68 return "<unloaded module '%s'>" % self._data[0]
69 def __call__(self, *args, **kwargs):
69 def __call__(self, *args, **kwargs):
70 raise TypeError("'unloaded module' object is not callable")
70 raise TypeError("%s object is not callable" % repr(self))
71 def __getattribute__(self, attr):
71 def __getattribute__(self, attr):
72 if attr in ('_data', '_extend', '_load', '_module'):
72 if attr in ('_data', '_extend', '_load', '_module'):
73 return object.__getattribute__(self, attr)
73 return object.__getattribute__(self, attr)
74 self._load()
74 self._load()
75 return getattr(self._module, attr)
75 return getattr(self._module, attr)
76 def __setattr__(self, attr, val):
76 def __setattr__(self, attr, val):
77 self._load()
77 self._load()
78 setattr(self._module, attr, val)
78 setattr(self._module, attr, val)
79
79
80 def _demandimport(name, globals=None, locals=None, fromlist=None):
80 def _demandimport(name, globals=None, locals=None, fromlist=None):
81 if not locals or name in ignore or fromlist == ('*',):
81 if not locals or name in ignore or fromlist == ('*',):
82 # these cases we can't really delay
82 # these cases we can't really delay
83 return _origimport(name, globals, locals, fromlist)
83 return _origimport(name, globals, locals, fromlist)
84 elif not fromlist:
84 elif not fromlist:
85 # import a [as b]
85 # import a [as b]
86 if '.' in name: # a.b
86 if '.' in name: # a.b
87 base, rest = name.split('.', 1)
87 base, rest = name.split('.', 1)
88 # email.__init__ loading email.mime
88 # email.__init__ loading email.mime
89 if globals and globals.get('__name__', None) == base:
89 if globals and globals.get('__name__', None) == base:
90 return _origimport(name, globals, locals, fromlist)
90 return _origimport(name, globals, locals, fromlist)
91 # if a is already demand-loaded, add b to its submodule list
91 # if a is already demand-loaded, add b to its submodule list
92 if base in locals:
92 if base in locals:
93 if isinstance(locals[base], _demandmod):
93 if isinstance(locals[base], _demandmod):
94 locals[base]._extend(rest)
94 locals[base]._extend(rest)
95 return locals[base]
95 return locals[base]
96 return _demandmod(name, globals, locals)
96 return _demandmod(name, globals, locals)
97 else:
97 else:
98 # from a import b,c,d
98 # from a import b,c,d
99 mod = _origimport(name, globals, locals)
99 mod = _origimport(name, globals, locals)
100 # recurse down the module chain
100 # recurse down the module chain
101 for comp in name.split('.')[1:]:
101 for comp in name.split('.')[1:]:
102 if not hasattr(mod, comp):
102 if not hasattr(mod, comp):
103 setattr(mod, comp, _demandmod(comp, mod.__dict__, mod.__dict__))
103 setattr(mod, comp, _demandmod(comp, mod.__dict__, mod.__dict__))
104 mod = getattr(mod, comp)
104 mod = getattr(mod, comp)
105 for x in fromlist:
105 for x in fromlist:
106 # set requested submodules for demand load
106 # set requested submodules for demand load
107 if not(hasattr(mod, x)):
107 if not(hasattr(mod, x)):
108 setattr(mod, x, _demandmod(x, mod.__dict__, locals))
108 setattr(mod, x, _demandmod(x, mod.__dict__, locals))
109 return mod
109 return mod
110
110
111 ignore = [
111 ignore = [
112 '_hashlib',
112 '_hashlib',
113 '_xmlplus',
113 '_xmlplus',
114 'fcntl',
114 'fcntl',
115 'win32com.gen_py',
115 'win32com.gen_py',
116 # imported by tarfile, not available under Windows
116 # imported by tarfile, not available under Windows
117 'pwd',
117 'pwd',
118 'grp',
118 'grp',
119 # imported by profile, itself imported by hotshot.stats,
119 # imported by profile, itself imported by hotshot.stats,
120 # not available under Windows
120 # not available under Windows
121 'resource',
121 'resource',
122 ]
122 ]
123
123
124 def enable():
124 def enable():
125 "enable global demand-loading of modules"
125 "enable global demand-loading of modules"
126 __builtins__["__import__"] = _demandimport
126 __builtins__["__import__"] = _demandimport
127
127
128 def disable():
128 def disable():
129 "disable global demand-loading of modules"
129 "disable global demand-loading of modules"
130 __builtins__["__import__"] = _origimport
130 __builtins__["__import__"] = _origimport
131
131
@@ -1,401 +1,403 b''
1 # dispatch.py - command dispatching for mercurial
1 # dispatch.py - command dispatching for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex, time
10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex, time
11 import util, commands, hg, lock, fancyopts, revlog, version, extensions, hook
11 import util, commands, hg, lock, fancyopts, revlog, version, extensions, hook
12 import cmdutil
12 import cmdutil
13 import ui as _ui
13 import ui as _ui
14
14
15 class ParseError(Exception):
15 class ParseError(Exception):
16 """Exception raised on errors in parsing the command line."""
16 """Exception raised on errors in parsing the command line."""
17
17
18 def run():
18 def run():
19 "run the command in sys.argv"
19 "run the command in sys.argv"
20 sys.exit(dispatch(sys.argv[1:]))
20 sys.exit(dispatch(sys.argv[1:]))
21
21
22 def dispatch(args):
22 def dispatch(args):
23 "run the command specified in args"
23 "run the command specified in args"
24 try:
24 try:
25 u = _ui.ui(traceback='--traceback' in args)
25 u = _ui.ui(traceback='--traceback' in args)
26 except util.Abort, inst:
26 except util.Abort, inst:
27 sys.stderr.write(_("abort: %s\n") % inst)
27 sys.stderr.write(_("abort: %s\n") % inst)
28 return -1
28 return -1
29 return _runcatch(u, args)
29 return _runcatch(u, args)
30
30
31 def _runcatch(ui, args):
31 def _runcatch(ui, args):
32 def catchterm(*args):
32 def catchterm(*args):
33 raise util.SignalInterrupt
33 raise util.SignalInterrupt
34
34
35 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
35 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
36 num = getattr(signal, name, None)
36 num = getattr(signal, name, None)
37 if num: signal.signal(num, catchterm)
37 if num: signal.signal(num, catchterm)
38
38
39 try:
39 try:
40 try:
40 try:
41 # enter the debugger before command execution
41 # enter the debugger before command execution
42 if '--debugger' in args:
42 if '--debugger' in args:
43 pdb.set_trace()
43 pdb.set_trace()
44 try:
44 try:
45 return _dispatch(ui, args)
45 return _dispatch(ui, args)
46 finally:
46 finally:
47 ui.flush()
47 ui.flush()
48 except:
48 except:
49 # enter the debugger when we hit an exception
49 # enter the debugger when we hit an exception
50 if '--debugger' in args:
50 if '--debugger' in args:
51 pdb.post_mortem(sys.exc_info()[2])
51 pdb.post_mortem(sys.exc_info()[2])
52 ui.print_exc()
52 ui.print_exc()
53 raise
53 raise
54
54
55 except ParseError, inst:
55 except ParseError, inst:
56 if inst.args[0]:
56 if inst.args[0]:
57 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
57 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
58 commands.help_(ui, inst.args[0])
58 commands.help_(ui, inst.args[0])
59 else:
59 else:
60 ui.warn(_("hg: %s\n") % inst.args[1])
60 ui.warn(_("hg: %s\n") % inst.args[1])
61 commands.help_(ui, 'shortlist')
61 commands.help_(ui, 'shortlist')
62 except cmdutil.AmbiguousCommand, inst:
62 except cmdutil.AmbiguousCommand, inst:
63 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
63 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
64 (inst.args[0], " ".join(inst.args[1])))
64 (inst.args[0], " ".join(inst.args[1])))
65 except cmdutil.UnknownCommand, inst:
65 except cmdutil.UnknownCommand, inst:
66 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
66 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
67 commands.help_(ui, 'shortlist')
67 commands.help_(ui, 'shortlist')
68 except hg.RepoError, inst:
68 except hg.RepoError, inst:
69 ui.warn(_("abort: %s!\n") % inst)
69 ui.warn(_("abort: %s!\n") % inst)
70 except lock.LockHeld, inst:
70 except lock.LockHeld, inst:
71 if inst.errno == errno.ETIMEDOUT:
71 if inst.errno == errno.ETIMEDOUT:
72 reason = _('timed out waiting for lock held by %s') % inst.locker
72 reason = _('timed out waiting for lock held by %s') % inst.locker
73 else:
73 else:
74 reason = _('lock held by %s') % inst.locker
74 reason = _('lock held by %s') % inst.locker
75 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
75 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
76 except lock.LockUnavailable, inst:
76 except lock.LockUnavailable, inst:
77 ui.warn(_("abort: could not lock %s: %s\n") %
77 ui.warn(_("abort: could not lock %s: %s\n") %
78 (inst.desc or inst.filename, inst.strerror))
78 (inst.desc or inst.filename, inst.strerror))
79 except revlog.RevlogError, inst:
79 except revlog.RevlogError, inst:
80 ui.warn(_("abort: %s!\n") % inst)
80 ui.warn(_("abort: %s!\n") % inst)
81 except util.SignalInterrupt:
81 except util.SignalInterrupt:
82 ui.warn(_("killed!\n"))
82 ui.warn(_("killed!\n"))
83 except KeyboardInterrupt:
83 except KeyboardInterrupt:
84 try:
84 try:
85 ui.warn(_("interrupted!\n"))
85 ui.warn(_("interrupted!\n"))
86 except IOError, inst:
86 except IOError, inst:
87 if inst.errno == errno.EPIPE:
87 if inst.errno == errno.EPIPE:
88 if ui.debugflag:
88 if ui.debugflag:
89 ui.warn(_("\nbroken pipe\n"))
89 ui.warn(_("\nbroken pipe\n"))
90 else:
90 else:
91 raise
91 raise
92 except socket.error, inst:
92 except socket.error, inst:
93 ui.warn(_("abort: %s\n") % inst[1])
93 ui.warn(_("abort: %s\n") % inst[1])
94 except IOError, inst:
94 except IOError, inst:
95 if hasattr(inst, "code"):
95 if hasattr(inst, "code"):
96 ui.warn(_("abort: %s\n") % inst)
96 ui.warn(_("abort: %s\n") % inst)
97 elif hasattr(inst, "reason"):
97 elif hasattr(inst, "reason"):
98 try: # usually it is in the form (errno, strerror)
98 try: # usually it is in the form (errno, strerror)
99 reason = inst.reason.args[1]
99 reason = inst.reason.args[1]
100 except: # it might be anything, for example a string
100 except: # it might be anything, for example a string
101 reason = inst.reason
101 reason = inst.reason
102 ui.warn(_("abort: error: %s\n") % reason)
102 ui.warn(_("abort: error: %s\n") % reason)
103 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
103 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
104 if ui.debugflag:
104 if ui.debugflag:
105 ui.warn(_("broken pipe\n"))
105 ui.warn(_("broken pipe\n"))
106 elif getattr(inst, "strerror", None):
106 elif getattr(inst, "strerror", None):
107 if getattr(inst, "filename", None):
107 if getattr(inst, "filename", None):
108 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
108 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
109 else:
109 else:
110 ui.warn(_("abort: %s\n") % inst.strerror)
110 ui.warn(_("abort: %s\n") % inst.strerror)
111 else:
111 else:
112 raise
112 raise
113 except OSError, inst:
113 except OSError, inst:
114 if getattr(inst, "filename", None):
114 if getattr(inst, "filename", None):
115 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
115 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
116 else:
116 else:
117 ui.warn(_("abort: %s\n") % inst.strerror)
117 ui.warn(_("abort: %s\n") % inst.strerror)
118 except util.UnexpectedOutput, inst:
118 except util.UnexpectedOutput, inst:
119 ui.warn(_("abort: %s") % inst[0])
119 ui.warn(_("abort: %s") % inst[0])
120 if not isinstance(inst[1], basestring):
120 if not isinstance(inst[1], basestring):
121 ui.warn(" %r\n" % (inst[1],))
121 ui.warn(" %r\n" % (inst[1],))
122 elif not inst[1]:
122 elif not inst[1]:
123 ui.warn(_(" empty string\n"))
123 ui.warn(_(" empty string\n"))
124 else:
124 else:
125 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
125 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
126 except ImportError, inst:
126 except ImportError, inst:
127 m = str(inst).split()[-1]
127 m = str(inst).split()[-1]
128 ui.warn(_("abort: could not import module %s!\n") % m)
128 ui.warn(_("abort: could not import module %s!\n") % m)
129 if m in "mpatch bdiff".split():
129 if m in "mpatch bdiff".split():
130 ui.warn(_("(did you forget to compile extensions?)\n"))
130 ui.warn(_("(did you forget to compile extensions?)\n"))
131 elif m in "zlib".split():
131 elif m in "zlib".split():
132 ui.warn(_("(is your Python install correct?)\n"))
132 ui.warn(_("(is your Python install correct?)\n"))
133
133
134 except util.Abort, inst:
134 except util.Abort, inst:
135 ui.warn(_("abort: %s\n") % inst)
135 ui.warn(_("abort: %s\n") % inst)
136 except MemoryError:
137 ui.warn(_("abort: out of memory\n"))
136 except SystemExit, inst:
138 except SystemExit, inst:
137 # Commands shouldn't sys.exit directly, but give a return code.
139 # Commands shouldn't sys.exit directly, but give a return code.
138 # Just in case catch this and and pass exit code to caller.
140 # Just in case catch this and and pass exit code to caller.
139 return inst.code
141 return inst.code
140 except:
142 except:
141 ui.warn(_("** unknown exception encountered, details follow\n"))
143 ui.warn(_("** unknown exception encountered, details follow\n"))
142 ui.warn(_("** report bug details to "
144 ui.warn(_("** report bug details to "
143 "http://www.selenic.com/mercurial/bts\n"))
145 "http://www.selenic.com/mercurial/bts\n"))
144 ui.warn(_("** or mercurial@selenic.com\n"))
146 ui.warn(_("** or mercurial@selenic.com\n"))
145 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
147 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
146 % version.get_version())
148 % version.get_version())
147 raise
149 raise
148
150
149 return -1
151 return -1
150
152
151 def _findrepo():
153 def _findrepo():
152 p = os.getcwd()
154 p = os.getcwd()
153 while not os.path.isdir(os.path.join(p, ".hg")):
155 while not os.path.isdir(os.path.join(p, ".hg")):
154 oldp, p = p, os.path.dirname(p)
156 oldp, p = p, os.path.dirname(p)
155 if p == oldp:
157 if p == oldp:
156 return None
158 return None
157
159
158 return p
160 return p
159
161
160 def _parse(ui, args):
162 def _parse(ui, args):
161 options = {}
163 options = {}
162 cmdoptions = {}
164 cmdoptions = {}
163
165
164 try:
166 try:
165 args = fancyopts.fancyopts(args, commands.globalopts, options)
167 args = fancyopts.fancyopts(args, commands.globalopts, options)
166 except fancyopts.getopt.GetoptError, inst:
168 except fancyopts.getopt.GetoptError, inst:
167 raise ParseError(None, inst)
169 raise ParseError(None, inst)
168
170
169 if args:
171 if args:
170 cmd, args = args[0], args[1:]
172 cmd, args = args[0], args[1:]
171 aliases, i = cmdutil.findcmd(ui, cmd, commands.table)
173 aliases, i = cmdutil.findcmd(ui, cmd, commands.table)
172 cmd = aliases[0]
174 cmd = aliases[0]
173 defaults = ui.config("defaults", cmd)
175 defaults = ui.config("defaults", cmd)
174 if defaults:
176 if defaults:
175 args = shlex.split(defaults) + args
177 args = shlex.split(defaults) + args
176 c = list(i[1])
178 c = list(i[1])
177 else:
179 else:
178 cmd = None
180 cmd = None
179 c = []
181 c = []
180
182
181 # combine global options into local
183 # combine global options into local
182 for o in commands.globalopts:
184 for o in commands.globalopts:
183 c.append((o[0], o[1], options[o[1]], o[3]))
185 c.append((o[0], o[1], options[o[1]], o[3]))
184
186
185 try:
187 try:
186 args = fancyopts.fancyopts(args, c, cmdoptions)
188 args = fancyopts.fancyopts(args, c, cmdoptions)
187 except fancyopts.getopt.GetoptError, inst:
189 except fancyopts.getopt.GetoptError, inst:
188 raise ParseError(cmd, inst)
190 raise ParseError(cmd, inst)
189
191
190 # separate global options back out
192 # separate global options back out
191 for o in commands.globalopts:
193 for o in commands.globalopts:
192 n = o[1]
194 n = o[1]
193 options[n] = cmdoptions[n]
195 options[n] = cmdoptions[n]
194 del cmdoptions[n]
196 del cmdoptions[n]
195
197
196 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
198 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
197
199
198 def _parseconfig(config):
200 def _parseconfig(config):
199 """parse the --config options from the command line"""
201 """parse the --config options from the command line"""
200 parsed = []
202 parsed = []
201 for cfg in config:
203 for cfg in config:
202 try:
204 try:
203 name, value = cfg.split('=', 1)
205 name, value = cfg.split('=', 1)
204 section, name = name.split('.', 1)
206 section, name = name.split('.', 1)
205 if not section or not name:
207 if not section or not name:
206 raise IndexError
208 raise IndexError
207 parsed.append((section, name, value))
209 parsed.append((section, name, value))
208 except (IndexError, ValueError):
210 except (IndexError, ValueError):
209 raise util.Abort(_('malformed --config option: %s') % cfg)
211 raise util.Abort(_('malformed --config option: %s') % cfg)
210 return parsed
212 return parsed
211
213
212 def _earlygetopt(aliases, args):
214 def _earlygetopt(aliases, args):
213 """Return list of values for an option (or aliases).
215 """Return list of values for an option (or aliases).
214
216
215 The values are listed in the order they appear in args.
217 The values are listed in the order they appear in args.
216 The options and values are removed from args.
218 The options and values are removed from args.
217 """
219 """
218 try:
220 try:
219 argcount = args.index("--")
221 argcount = args.index("--")
220 except ValueError:
222 except ValueError:
221 argcount = len(args)
223 argcount = len(args)
222 shortopts = [opt for opt in aliases if len(opt) == 2]
224 shortopts = [opt for opt in aliases if len(opt) == 2]
223 values = []
225 values = []
224 pos = 0
226 pos = 0
225 while pos < argcount:
227 while pos < argcount:
226 if args[pos] in aliases:
228 if args[pos] in aliases:
227 if pos + 1 >= argcount:
229 if pos + 1 >= argcount:
228 # ignore and let getopt report an error if there is no value
230 # ignore and let getopt report an error if there is no value
229 break
231 break
230 del args[pos]
232 del args[pos]
231 values.append(args.pop(pos))
233 values.append(args.pop(pos))
232 argcount -= 2
234 argcount -= 2
233 elif args[pos][:2] in shortopts:
235 elif args[pos][:2] in shortopts:
234 # short option can have no following space, e.g. hg log -Rfoo
236 # short option can have no following space, e.g. hg log -Rfoo
235 values.append(args.pop(pos)[2:])
237 values.append(args.pop(pos)[2:])
236 argcount -= 1
238 argcount -= 1
237 else:
239 else:
238 pos += 1
240 pos += 1
239 return values
241 return values
240
242
241 _loaded = {}
243 _loaded = {}
242 def _dispatch(ui, args):
244 def _dispatch(ui, args):
243 # read --config before doing anything else
245 # read --config before doing anything else
244 # (e.g. to change trust settings for reading .hg/hgrc)
246 # (e.g. to change trust settings for reading .hg/hgrc)
245 config = _earlygetopt(['--config'], args)
247 config = _earlygetopt(['--config'], args)
246 if config:
248 if config:
247 ui.updateopts(config=_parseconfig(config))
249 ui.updateopts(config=_parseconfig(config))
248
250
249 # check for cwd
251 # check for cwd
250 cwd = _earlygetopt(['--cwd'], args)
252 cwd = _earlygetopt(['--cwd'], args)
251 if cwd:
253 if cwd:
252 os.chdir(cwd[-1])
254 os.chdir(cwd[-1])
253
255
254 # read the local repository .hgrc into a local ui object
256 # read the local repository .hgrc into a local ui object
255 path = _findrepo() or ""
257 path = _findrepo() or ""
256 if not path:
258 if not path:
257 lui = ui
259 lui = ui
258 if path:
260 if path:
259 try:
261 try:
260 lui = _ui.ui(parentui=ui)
262 lui = _ui.ui(parentui=ui)
261 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
263 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
262 except IOError:
264 except IOError:
263 pass
265 pass
264
266
265 # now we can expand paths, even ones in .hg/hgrc
267 # now we can expand paths, even ones in .hg/hgrc
266 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
268 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
267 if rpath:
269 if rpath:
268 path = lui.expandpath(rpath[-1])
270 path = lui.expandpath(rpath[-1])
269 lui = _ui.ui(parentui=ui)
271 lui = _ui.ui(parentui=ui)
270 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
272 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
271
273
272 extensions.loadall(lui)
274 extensions.loadall(lui)
273 for name, module in extensions.extensions():
275 for name, module in extensions.extensions():
274 if name in _loaded:
276 if name in _loaded:
275 continue
277 continue
276 cmdtable = getattr(module, 'cmdtable', {})
278 cmdtable = getattr(module, 'cmdtable', {})
277 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
279 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
278 if overrides:
280 if overrides:
279 ui.warn(_("extension '%s' overrides commands: %s\n")
281 ui.warn(_("extension '%s' overrides commands: %s\n")
280 % (name, " ".join(overrides)))
282 % (name, " ".join(overrides)))
281 commands.table.update(cmdtable)
283 commands.table.update(cmdtable)
282 _loaded[name] = 1
284 _loaded[name] = 1
283 # check for fallback encoding
285 # check for fallback encoding
284 fallback = lui.config('ui', 'fallbackencoding')
286 fallback = lui.config('ui', 'fallbackencoding')
285 if fallback:
287 if fallback:
286 util._fallbackencoding = fallback
288 util._fallbackencoding = fallback
287
289
288 fullargs = args
290 fullargs = args
289 cmd, func, args, options, cmdoptions = _parse(lui, args)
291 cmd, func, args, options, cmdoptions = _parse(lui, args)
290
292
291 if options["config"]:
293 if options["config"]:
292 raise util.Abort(_("Option --config may not be abbreviated!"))
294 raise util.Abort(_("Option --config may not be abbreviated!"))
293 if options["cwd"]:
295 if options["cwd"]:
294 raise util.Abort(_("Option --cwd may not be abbreviated!"))
296 raise util.Abort(_("Option --cwd may not be abbreviated!"))
295 if options["repository"]:
297 if options["repository"]:
296 raise util.Abort(_(
298 raise util.Abort(_(
297 "Option -R has to be separated from other options (i.e. not -qR) "
299 "Option -R has to be separated from other options (i.e. not -qR) "
298 "and --repository may only be abbreviated as --repo!"))
300 "and --repository may only be abbreviated as --repo!"))
299
301
300 if options["encoding"]:
302 if options["encoding"]:
301 util._encoding = options["encoding"]
303 util._encoding = options["encoding"]
302 if options["encodingmode"]:
304 if options["encodingmode"]:
303 util._encodingmode = options["encodingmode"]
305 util._encodingmode = options["encodingmode"]
304 if options["time"]:
306 if options["time"]:
305 def get_times():
307 def get_times():
306 t = os.times()
308 t = os.times()
307 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
309 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
308 t = (t[0], t[1], t[2], t[3], time.clock())
310 t = (t[0], t[1], t[2], t[3], time.clock())
309 return t
311 return t
310 s = get_times()
312 s = get_times()
311 def print_time():
313 def print_time():
312 t = get_times()
314 t = get_times()
313 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
315 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
314 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
316 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
315 atexit.register(print_time)
317 atexit.register(print_time)
316
318
317 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
319 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
318 not options["noninteractive"], options["traceback"])
320 not options["noninteractive"], options["traceback"])
319
321
320 if options['help']:
322 if options['help']:
321 return commands.help_(ui, cmd, options['version'])
323 return commands.help_(ui, cmd, options['version'])
322 elif options['version']:
324 elif options['version']:
323 return commands.version_(ui)
325 return commands.version_(ui)
324 elif not cmd:
326 elif not cmd:
325 return commands.help_(ui, 'shortlist')
327 return commands.help_(ui, 'shortlist')
326
328
327 repo = None
329 repo = None
328 if cmd not in commands.norepo.split():
330 if cmd not in commands.norepo.split():
329 try:
331 try:
330 repo = hg.repository(ui, path=path)
332 repo = hg.repository(ui, path=path)
331 ui = repo.ui
333 ui = repo.ui
332 if not repo.local():
334 if not repo.local():
333 raise util.Abort(_("repository '%s' is not local") % path)
335 raise util.Abort(_("repository '%s' is not local") % path)
334 except hg.RepoError:
336 except hg.RepoError:
335 if cmd not in commands.optionalrepo.split():
337 if cmd not in commands.optionalrepo.split():
336 if not path:
338 if not path:
337 raise hg.RepoError(_("There is no Mercurial repository here"
339 raise hg.RepoError(_("There is no Mercurial repository here"
338 " (.hg not found)"))
340 " (.hg not found)"))
339 raise
341 raise
340 d = lambda: func(ui, repo, *args, **cmdoptions)
342 d = lambda: func(ui, repo, *args, **cmdoptions)
341 else:
343 else:
342 d = lambda: func(ui, *args, **cmdoptions)
344 d = lambda: func(ui, *args, **cmdoptions)
343
345
344 # run pre-hook, and abort if it fails
346 # run pre-hook, and abort if it fails
345 ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
347 ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
346 if ret:
348 if ret:
347 return ret
349 return ret
348 ret = _runcommand(ui, options, cmd, d)
350 ret = _runcommand(ui, options, cmd, d)
349 # run post-hook, passing command result
351 # run post-hook, passing command result
350 hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
352 hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
351 result = ret)
353 result = ret)
352 return ret
354 return ret
353
355
354 def _runcommand(ui, options, cmd, cmdfunc):
356 def _runcommand(ui, options, cmd, cmdfunc):
355 def checkargs():
357 def checkargs():
356 try:
358 try:
357 return cmdfunc()
359 return cmdfunc()
358 except TypeError, inst:
360 except TypeError, inst:
359 # was this an argument error?
361 # was this an argument error?
360 tb = traceback.extract_tb(sys.exc_info()[2])
362 tb = traceback.extract_tb(sys.exc_info()[2])
361 if len(tb) != 2: # no
363 if len(tb) != 2: # no
362 raise
364 raise
363 raise ParseError(cmd, _("invalid arguments"))
365 raise ParseError(cmd, _("invalid arguments"))
364
366
365 if options['profile']:
367 if options['profile']:
366 import hotshot, hotshot.stats
368 import hotshot, hotshot.stats
367 prof = hotshot.Profile("hg.prof")
369 prof = hotshot.Profile("hg.prof")
368 try:
370 try:
369 try:
371 try:
370 return prof.runcall(checkargs)
372 return prof.runcall(checkargs)
371 except:
373 except:
372 try:
374 try:
373 ui.warn(_('exception raised - generating '
375 ui.warn(_('exception raised - generating '
374 'profile anyway\n'))
376 'profile anyway\n'))
375 except:
377 except:
376 pass
378 pass
377 raise
379 raise
378 finally:
380 finally:
379 prof.close()
381 prof.close()
380 stats = hotshot.stats.load("hg.prof")
382 stats = hotshot.stats.load("hg.prof")
381 stats.strip_dirs()
383 stats.strip_dirs()
382 stats.sort_stats('time', 'calls')
384 stats.sort_stats('time', 'calls')
383 stats.print_stats(40)
385 stats.print_stats(40)
384 elif options['lsprof']:
386 elif options['lsprof']:
385 try:
387 try:
386 from mercurial import lsprof
388 from mercurial import lsprof
387 except ImportError:
389 except ImportError:
388 raise util.Abort(_(
390 raise util.Abort(_(
389 'lsprof not available - install from '
391 'lsprof not available - install from '
390 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
392 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
391 p = lsprof.Profiler()
393 p = lsprof.Profiler()
392 p.enable(subcalls=True)
394 p.enable(subcalls=True)
393 try:
395 try:
394 return checkargs()
396 return checkargs()
395 finally:
397 finally:
396 p.disable()
398 p.disable()
397 stats = lsprof.Stats(p.getstats())
399 stats = lsprof.Stats(p.getstats())
398 stats.sort()
400 stats.sort()
399 stats.pprint(top=10, file=sys.stderr, climit=5)
401 stats.pprint(top=10, file=sys.stderr, climit=5)
400 else:
402 else:
401 return checkargs()
403 return checkargs()
@@ -1,35 +1,74 b''
1 import getopt
1 import getopt
2
2
3 def fancyopts(args, options, state):
3 def fancyopts(args, options, state):
4 long = []
4 """
5 short = ''
5 read args, parse options, and store options in state
6 map = {}
6
7 dt = {}
7 each option is a tuple of:
8
9 short option or ''
10 long option
11 default value
12 description
13
14 option types include:
15
16 boolean or none - option sets variable in state to true
17 string - parameter string is stored in state
18 list - parameter string is added to a list
19 integer - parameter strings is stored as int
20 function - call function with parameter
8
21
9 for s, l, d, c in options:
22 non-option args are returned
10 pl = l.replace('-', '_')
23 """
11 map['-'+s] = map['--'+l] = pl
24 namelist = []
12 if isinstance(d, list):
25 shortlist = ''
13 state[pl] = d[:]
26 argmap = {}
27 defmap = {}
28
29 for short, name, default, comment in options:
30 # convert opts to getopt format
31 oname = name
32 name = name.replace('-', '_')
33
34 argmap['-' + short] = argmap['--' + oname] = name
35 defmap[name] = default
36
37 # copy defaults to state
38 if isinstance(default, list):
39 state[name] = default[:]
40 elif callable(default):
41 print "whoa", name, default
42 state[name] = None
14 else:
43 else:
15 state[pl] = d
44 state[name] = default
16 dt[pl] = type(d)
17 if (d is not None and d is not True and d is not False and
18 not callable(d)):
19 if s: s += ':'
20 if l: l += '='
21 if s: short = short + s
22 if l: long.append(l)
23
45
24 opts, args = getopt.getopt(args, short, long)
46 # does it take a parameter?
47 if not (default is None or default is True or default is False):
48 if short: short += ':'
49 if oname: oname += '='
50 if short:
51 shortlist += short
52 if name:
53 namelist.append(oname)
54
55 # parse arguments
56 opts, args = getopt.getopt(args, shortlist, namelist)
25
57
26 for opt, arg in opts:
58 # transfer result to state
27 if dt[map[opt]] is type(fancyopts): state[map[opt]](state, map[opt], arg)
59 for opt, val in opts:
28 elif dt[map[opt]] is type(1): state[map[opt]] = int(arg)
60 name = argmap[opt]
29 elif dt[map[opt]] is type(''): state[map[opt]] = arg
61 t = type(defmap[name])
30 elif dt[map[opt]] is type([]): state[map[opt]].append(arg)
62 if t is type(fancyopts):
31 elif dt[map[opt]] is type(None): state[map[opt]] = True
63 state[name] = defmap[name](val)
32 elif dt[map[opt]] is type(False): state[map[opt]] = True
64 elif t is type(1):
65 state[name] = int(val)
66 elif t is type(''):
67 state[name] = val
68 elif t is type([]):
69 state[name].append(val)
70 elif t is type(None) or t is type(False):
71 state[name] = True
33
72
73 # return unparsed args
34 return args
74 return args
35
@@ -1,312 +1,312 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from node import *
9 from node import *
10 from repo import *
10 from repo import *
11 from i18n import _
11 from i18n import _
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 import errno, lock, os, shutil, util, extensions
13 import errno, lock, os, shutil, util, extensions
14 import merge as _merge
14 import merge as _merge
15 import verify as _verify
15 import verify as _verify
16
16
17 def _local(path):
17 def _local(path):
18 return (os.path.isfile(util.drop_scheme('file', path)) and
18 return (os.path.isfile(util.drop_scheme('file', path)) and
19 bundlerepo or localrepo)
19 bundlerepo or localrepo)
20
20
21 def parseurl(url, revs):
21 def parseurl(url, revs):
22 '''parse url#branch, returning url, branch + revs'''
22 '''parse url#branch, returning url, branch + revs'''
23
23
24 if '#' not in url:
24 if '#' not in url:
25 return url, (revs or None), None
25 return url, (revs or None), None
26
26
27 url, rev = url.split('#', 1)
27 url, rev = url.split('#', 1)
28 return url, revs + [rev], rev
28 return url, revs + [rev], rev
29
29
30 schemes = {
30 schemes = {
31 'bundle': bundlerepo,
31 'bundle': bundlerepo,
32 'file': _local,
32 'file': _local,
33 'http': httprepo,
33 'http': httprepo,
34 'https': httprepo,
34 'https': httprepo,
35 'ssh': sshrepo,
35 'ssh': sshrepo,
36 'static-http': statichttprepo,
36 'static-http': statichttprepo,
37 }
37 }
38
38
39 def _lookup(path):
39 def _lookup(path):
40 scheme = 'file'
40 scheme = 'file'
41 if path:
41 if path:
42 c = path.find(':')
42 c = path.find(':')
43 if c > 0:
43 if c > 0:
44 scheme = path[:c]
44 scheme = path[:c]
45 thing = schemes.get(scheme) or schemes['file']
45 thing = schemes.get(scheme) or schemes['file']
46 try:
46 try:
47 return thing(path)
47 return thing(path)
48 except TypeError:
48 except TypeError:
49 return thing
49 return thing
50
50
51 def islocal(repo):
51 def islocal(repo):
52 '''return true if repo or path is local'''
52 '''return true if repo or path is local'''
53 if isinstance(repo, str):
53 if isinstance(repo, str):
54 try:
54 try:
55 return _lookup(repo).islocal(repo)
55 return _lookup(repo).islocal(repo)
56 except AttributeError:
56 except AttributeError:
57 return False
57 return False
58 return repo.local()
58 return repo.local()
59
59
60 def repository(ui, path='', create=False):
60 def repository(ui, path='', create=False):
61 """return a repository object for the specified path"""
61 """return a repository object for the specified path"""
62 repo = _lookup(path).instance(ui, path, create)
62 repo = _lookup(path).instance(ui, path, create)
63 ui = getattr(repo, "ui", ui)
63 ui = getattr(repo, "ui", ui)
64 for name, module in extensions.extensions():
64 for name, module in extensions.extensions():
65 hook = getattr(module, 'reposetup', None)
65 hook = getattr(module, 'reposetup', None)
66 if hook:
66 if hook:
67 hook(ui, repo)
67 hook(ui, repo)
68 return repo
68 return repo
69
69
70 def defaultdest(source):
70 def defaultdest(source):
71 '''return default destination of clone if none is given'''
71 '''return default destination of clone if none is given'''
72 return os.path.basename(os.path.normpath(source))
72 return os.path.basename(os.path.normpath(source))
73
73
74 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
74 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
75 stream=False):
75 stream=False):
76 """Make a copy of an existing repository.
76 """Make a copy of an existing repository.
77
77
78 Create a copy of an existing repository in a new directory. The
78 Create a copy of an existing repository in a new directory. The
79 source and destination are URLs, as passed to the repository
79 source and destination are URLs, as passed to the repository
80 function. Returns a pair of repository objects, the source and
80 function. Returns a pair of repository objects, the source and
81 newly created destination.
81 newly created destination.
82
82
83 The location of the source is added to the new repository's
83 The location of the source is added to the new repository's
84 .hg/hgrc file, as the default to be used for future pulls and
84 .hg/hgrc file, as the default to be used for future pulls and
85 pushes.
85 pushes.
86
86
87 If an exception is raised, the partly cloned/updated destination
87 If an exception is raised, the partly cloned/updated destination
88 repository will be deleted.
88 repository will be deleted.
89
89
90 Arguments:
90 Arguments:
91
91
92 source: repository object or URL
92 source: repository object or URL
93
93
94 dest: URL of destination repository to create (defaults to base
94 dest: URL of destination repository to create (defaults to base
95 name of source repository)
95 name of source repository)
96
96
97 pull: always pull from source repository, even in local case
97 pull: always pull from source repository, even in local case
98
98
99 stream: stream raw data uncompressed from repository (fast over
99 stream: stream raw data uncompressed from repository (fast over
100 LAN, slow over WAN)
100 LAN, slow over WAN)
101
101
102 rev: revision to clone up to (implies pull=True)
102 rev: revision to clone up to (implies pull=True)
103
103
104 update: update working directory after clone completes, if
104 update: update working directory after clone completes, if
105 destination is local repository
105 destination is local repository
106 """
106 """
107
107
108 origsource = source
108 origsource = source
109 source, rev, checkout = parseurl(ui.expandpath(source), rev)
109 source, rev, checkout = parseurl(ui.expandpath(source), rev)
110
110
111 if isinstance(source, str):
111 if isinstance(source, str):
112 src_repo = repository(ui, source)
112 src_repo = repository(ui, source)
113 else:
113 else:
114 src_repo = source
114 src_repo = source
115 source = src_repo.url()
115 source = src_repo.url()
116
116
117 if dest is None:
117 if dest is None:
118 dest = defaultdest(source)
118 dest = defaultdest(source)
119 ui.status(_("destination directory: %s\n") % dest)
119 ui.status(_("destination directory: %s\n") % dest)
120
120
121 def localpath(path):
121 def localpath(path):
122 if path.startswith('file://'):
122 if path.startswith('file://'):
123 return path[7:]
123 return path[7:]
124 if path.startswith('file:'):
124 if path.startswith('file:'):
125 return path[5:]
125 return path[5:]
126 return path
126 return path
127
127
128 dest = localpath(dest)
128 dest = localpath(dest)
129 source = localpath(source)
129 source = localpath(source)
130
130
131 if os.path.exists(dest):
131 if os.path.exists(dest):
132 raise util.Abort(_("destination '%s' already exists") % dest)
132 raise util.Abort(_("destination '%s' already exists") % dest)
133
133
134 class DirCleanup(object):
134 class DirCleanup(object):
135 def __init__(self, dir_):
135 def __init__(self, dir_):
136 self.rmtree = shutil.rmtree
136 self.rmtree = shutil.rmtree
137 self.dir_ = dir_
137 self.dir_ = dir_
138 def close(self):
138 def close(self):
139 self.dir_ = None
139 self.dir_ = None
140 def __del__(self):
140 def __del__(self):
141 if self.dir_:
141 if self.dir_:
142 self.rmtree(self.dir_, True)
142 self.rmtree(self.dir_, True)
143
143
144 src_lock = dest_lock = dir_cleanup = None
144 src_lock = dest_lock = dir_cleanup = None
145 try:
145 try:
146 if islocal(dest):
146 if islocal(dest):
147 dir_cleanup = DirCleanup(dest)
147 dir_cleanup = DirCleanup(dest)
148
148
149 abspath = origsource
149 abspath = origsource
150 copy = False
150 copy = False
151 if src_repo.local() and islocal(dest):
151 if src_repo.local() and islocal(dest):
152 abspath = os.path.abspath(util.drop_scheme('file', origsource))
152 abspath = os.path.abspath(util.drop_scheme('file', origsource))
153 copy = not pull and not rev
153 copy = not pull and not rev
154
154
155 if copy:
155 if copy:
156 try:
156 try:
157 # we use a lock here because if we race with commit, we
157 # we use a lock here because if we race with commit, we
158 # can end up with extra data in the cloned revlogs that's
158 # can end up with extra data in the cloned revlogs that's
159 # not pointed to by changesets, thus causing verify to
159 # not pointed to by changesets, thus causing verify to
160 # fail
160 # fail
161 src_lock = src_repo.lock()
161 src_lock = src_repo.lock()
162 except lock.LockException:
162 except lock.LockException:
163 copy = False
163 copy = False
164
164
165 if copy:
165 if copy:
166 def force_copy(src, dst):
166 def force_copy(src, dst):
167 try:
167 try:
168 util.copyfiles(src, dst)
168 util.copyfiles(src, dst)
169 except OSError, inst:
169 except OSError, inst:
170 if inst.errno != errno.ENOENT:
170 if inst.errno != errno.ENOENT:
171 raise
171 raise
172
172
173 src_store = os.path.realpath(src_repo.spath)
173 src_store = os.path.realpath(src_repo.spath)
174 if not os.path.exists(dest):
174 if not os.path.exists(dest):
175 os.mkdir(dest)
175 os.mkdir(dest)
176 try:
176 try:
177 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
177 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
178 os.mkdir(dest_path)
178 os.mkdir(dest_path)
179 except OSError, inst:
179 except OSError, inst:
180 if inst.errno == errno.EEXIST:
180 if inst.errno == errno.EEXIST:
181 dir_cleanup.close()
181 dir_cleanup.close()
182 raise util.Abort(_("destination '%s' already exists")
182 raise util.Abort(_("destination '%s' already exists")
183 % dest)
183 % dest)
184 raise
184 raise
185 if src_repo.spath != src_repo.path:
185 if src_repo.spath != src_repo.path:
186 # XXX racy
186 # XXX racy
187 dummy_changelog = os.path.join(dest_path, "00changelog.i")
187 dummy_changelog = os.path.join(dest_path, "00changelog.i")
188 # copy the dummy changelog
188 # copy the dummy changelog
189 force_copy(src_repo.join("00changelog.i"), dummy_changelog)
189 force_copy(src_repo.join("00changelog.i"), dummy_changelog)
190 dest_store = os.path.join(dest_path, "store")
190 dest_store = os.path.join(dest_path, "store")
191 os.mkdir(dest_store)
191 os.mkdir(dest_store)
192 else:
192 else:
193 dest_store = dest_path
193 dest_store = dest_path
194 # copy the requires file
194 # copy the requires file
195 force_copy(src_repo.join("requires"),
195 force_copy(src_repo.join("requires"),
196 os.path.join(dest_path, "requires"))
196 os.path.join(dest_path, "requires"))
197 # we lock here to avoid premature writing to the target
197 # we lock here to avoid premature writing to the target
198 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
198 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
199
199
200 files = ("data",
200 files = ("data",
201 "00manifest.d", "00manifest.i",
201 "00manifest.d", "00manifest.i",
202 "00changelog.d", "00changelog.i")
202 "00changelog.d", "00changelog.i")
203 for f in files:
203 for f in files:
204 src = os.path.join(src_store, f)
204 src = os.path.join(src_store, f)
205 dst = os.path.join(dest_store, f)
205 dst = os.path.join(dest_store, f)
206 force_copy(src, dst)
206 force_copy(src, dst)
207
207
208 # we need to re-init the repo after manually copying the data
208 # we need to re-init the repo after manually copying the data
209 # into it
209 # into it
210 dest_repo = repository(ui, dest)
210 dest_repo = repository(ui, dest)
211
211
212 else:
212 else:
213 try:
213 try:
214 dest_repo = repository(ui, dest, create=True)
214 dest_repo = repository(ui, dest, create=True)
215 except OSError, inst:
215 except OSError, inst:
216 if inst.errno == errno.EEXIST:
216 if inst.errno == errno.EEXIST:
217 dir_cleanup.close()
217 dir_cleanup.close()
218 raise util.Abort(_("destination '%s' already exists")
218 raise util.Abort(_("destination '%s' already exists")
219 % dest)
219 % dest)
220 raise
220 raise
221
221
222 revs = None
222 revs = None
223 if rev:
223 if rev:
224 if 'lookup' not in src_repo.capabilities:
224 if 'lookup' not in src_repo.capabilities:
225 raise util.Abort(_("src repository does not support revision "
225 raise util.Abort(_("src repository does not support revision "
226 "lookup and so doesn't support clone by "
226 "lookup and so doesn't support clone by "
227 "revision"))
227 "revision"))
228 revs = [src_repo.lookup(r) for r in rev]
228 revs = [src_repo.lookup(r) for r in rev]
229
229
230 if dest_repo.local():
230 if dest_repo.local():
231 dest_repo.clone(src_repo, heads=revs, stream=stream)
231 dest_repo.clone(src_repo, heads=revs, stream=stream)
232 elif src_repo.local():
232 elif src_repo.local():
233 src_repo.push(dest_repo, revs=revs)
233 src_repo.push(dest_repo, revs=revs)
234 else:
234 else:
235 raise util.Abort(_("clone from remote to remote not supported"))
235 raise util.Abort(_("clone from remote to remote not supported"))
236
236
237 if dir_cleanup:
237 if dir_cleanup:
238 dir_cleanup.close()
238 dir_cleanup.close()
239
239
240 if dest_repo.local():
240 if dest_repo.local():
241 fp = dest_repo.opener("hgrc", "w", text=True)
241 fp = dest_repo.opener("hgrc", "w", text=True)
242 fp.write("[paths]\n")
242 fp.write("[paths]\n")
243 fp.write("default = %s\n" % abspath)
243 fp.write("default = %s\n" % abspath)
244 fp.close()
244 fp.close()
245
245
246 if update:
246 if update:
247 if not checkout:
247 if not checkout:
248 try:
248 try:
249 checkout = dest_repo.lookup("default")
249 checkout = dest_repo.lookup("default")
250 except:
250 except:
251 checkout = dest_repo.changelog.tip()
251 checkout = dest_repo.changelog.tip()
252 _update(dest_repo, checkout)
252 _update(dest_repo, checkout)
253
253
254 return src_repo, dest_repo
254 return src_repo, dest_repo
255 finally:
255 finally:
256 del src_lock, dest_lock, dir_cleanup
256 del src_lock, dest_lock, dir_cleanup
257
257
258 def _showstats(repo, stats):
258 def _showstats(repo, stats):
259 stats = ((stats[0], _("updated")),
259 stats = ((stats[0], _("updated")),
260 (stats[1], _("merged")),
260 (stats[1], _("merged")),
261 (stats[2], _("removed")),
261 (stats[2], _("removed")),
262 (stats[3], _("unresolved")))
262 (stats[3], _("unresolved")))
263 note = ", ".join([_("%d files %s") % s for s in stats])
263 note = ", ".join([_("%d files %s") % s for s in stats])
264 repo.ui.status("%s\n" % note)
264 repo.ui.status("%s\n" % note)
265
265
266 def _update(repo, node): return update(repo, node)
266 def _update(repo, node): return update(repo, node)
267
267
268 def update(repo, node):
268 def update(repo, node):
269 """update the working directory to node, merging linear changes"""
269 """update the working directory to node, merging linear changes"""
270 pl = repo.parents()
270 pl = repo.parents()
271 stats = _merge.update(repo, node, False, False, None)
271 stats = _merge.update(repo, node, False, False, None)
272 _showstats(repo, stats)
272 _showstats(repo, stats)
273 if stats[3]:
273 if stats[3]:
274 repo.ui.status(_("There are unresolved merges with"
274 repo.ui.status(_("There are unresolved merges with"
275 " locally modified files.\n"))
275 " locally modified files.\n"))
276 if stats[1]:
276 if stats[1]:
277 repo.ui.status(_("You can finish the partial merge using:\n"))
277 repo.ui.status(_("You can finish the partial merge using:\n"))
278 else:
278 else:
279 repo.ui.status(_("You can redo the full merge using:\n"))
279 repo.ui.status(_("You can redo the full merge using:\n"))
280 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
280 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
281 repo.ui.status(_(" hg update %s\n hg update %s\n")
281 repo.ui.status(_(" hg update %s\n hg update %s\n")
282 % (pl[0].rev(), repo.changectx(node).rev()))
282 % (pl[0].rev(), repo.changectx(node).rev()))
283 return stats[3]
283 return stats[3] > 0
284
284
285 def clean(repo, node, show_stats=True):
285 def clean(repo, node, show_stats=True):
286 """forcibly switch the working directory to node, clobbering changes"""
286 """forcibly switch the working directory to node, clobbering changes"""
287 stats = _merge.update(repo, node, False, True, None)
287 stats = _merge.update(repo, node, False, True, None)
288 if show_stats: _showstats(repo, stats)
288 if show_stats: _showstats(repo, stats)
289 return stats[3]
289 return stats[3] > 0
290
290
291 def merge(repo, node, force=None, remind=True):
291 def merge(repo, node, force=None, remind=True):
292 """branch merge with node, resolving changes"""
292 """branch merge with node, resolving changes"""
293 stats = _merge.update(repo, node, True, force, False)
293 stats = _merge.update(repo, node, True, force, False)
294 _showstats(repo, stats)
294 _showstats(repo, stats)
295 if stats[3]:
295 if stats[3]:
296 pl = repo.parents()
296 pl = repo.parents()
297 repo.ui.status(_("There are unresolved merges,"
297 repo.ui.status(_("There are unresolved merges,"
298 " you can redo the full merge using:\n"
298 " you can redo the full merge using:\n"
299 " hg update -C %s\n"
299 " hg update -C %s\n"
300 " hg merge %s\n")
300 " hg merge %s\n")
301 % (pl[0].rev(), pl[1].rev()))
301 % (pl[0].rev(), pl[1].rev()))
302 elif remind:
302 elif remind:
303 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
303 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
304 return stats[3]
304 return stats[3] > 0
305
305
306 def revert(repo, node, choose):
306 def revert(repo, node, choose):
307 """revert changes to revision in node without updating dirstate"""
307 """revert changes to revision in node without updating dirstate"""
308 return _merge.update(repo, node, False, True, choose)[3]
308 return _merge.update(repo, node, False, True, choose)[3] > 0
309
309
310 def verify(repo):
310 def verify(repo):
311 """verify the consistency of a repository"""
311 """verify the consistency of a repository"""
312 return _verify.verify(repo)
312 return _verify.verify(repo)
@@ -1,87 +1,90 b''
1 # ignore.py - ignored file handling for mercurial
1 # ignore.py - ignored file handling for mercurial
2 #
2 #
3 # Copyright 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import util
9 import util, re
10
11 _commentre = None
10
12
11 def _parselines(fp):
13 def _parselines(fp):
12 for line in fp:
14 for line in fp:
13 if not line.endswith('\n'):
15 if "#" in line:
14 line += '\n'
16 global _commentre
15 escape = False
17 if not _commentre:
16 for i in xrange(len(line)):
18 _commentre = re.compile(r'((^|[^\\])(\\\\)*)#.*')
17 if escape: escape = False
19 # remove comments prefixed by an even number of escapes
18 elif line[i] == '\\': escape = True
20 line = _commentre.sub(r'\1', line)
19 elif line[i] == '#': break
21 # fixup properly escaped comments that survived the above
20 line = line[:i].rstrip()
22 line = line.replace("\\#", "#")
23 line = line.rstrip()
21 if line:
24 if line:
22 yield line
25 yield line
23
26
24 def ignore(root, files, warn):
27 def ignore(root, files, warn):
25 '''return the contents of .hgignore files as a list of patterns.
28 '''return the contents of .hgignore files as a list of patterns.
26
29
27 the files parsed for patterns include:
30 the files parsed for patterns include:
28 .hgignore in the repository root
31 .hgignore in the repository root
29 any additional files specified in the [ui] section of ~/.hgrc
32 any additional files specified in the [ui] section of ~/.hgrc
30
33
31 trailing white space is dropped.
34 trailing white space is dropped.
32 the escape character is backslash.
35 the escape character is backslash.
33 comments start with #.
36 comments start with #.
34 empty lines are skipped.
37 empty lines are skipped.
35
38
36 lines can be of the following formats:
39 lines can be of the following formats:
37
40
38 syntax: regexp # defaults following lines to non-rooted regexps
41 syntax: regexp # defaults following lines to non-rooted regexps
39 syntax: glob # defaults following lines to non-rooted globs
42 syntax: glob # defaults following lines to non-rooted globs
40 re:pattern # non-rooted regular expression
43 re:pattern # non-rooted regular expression
41 glob:pattern # non-rooted glob
44 glob:pattern # non-rooted glob
42 pattern # pattern of the current default type'''
45 pattern # pattern of the current default type'''
43
46
44 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
47 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
45 pats = {}
48 pats = {}
46 for f in files:
49 for f in files:
47 try:
50 try:
48 pats[f] = []
51 pats[f] = []
49 fp = open(f)
52 fp = open(f)
50 syntax = 'relre:'
53 syntax = 'relre:'
51 for line in _parselines(fp):
54 for line in _parselines(fp):
52 if line.startswith('syntax:'):
55 if line.startswith('syntax:'):
53 s = line[7:].strip()
56 s = line[7:].strip()
54 try:
57 try:
55 syntax = syntaxes[s]
58 syntax = syntaxes[s]
56 except KeyError:
59 except KeyError:
57 warn(_("%s: ignoring invalid syntax '%s'\n") % (f, s))
60 warn(_("%s: ignoring invalid syntax '%s'\n") % (f, s))
58 continue
61 continue
59 pat = syntax + line
62 pat = syntax + line
60 for s, rels in syntaxes.items():
63 for s, rels in syntaxes.items():
61 if line.startswith(rels):
64 if line.startswith(rels):
62 pat = line
65 pat = line
63 break
66 break
64 elif line.startswith(s+':'):
67 elif line.startswith(s+':'):
65 pat = rels + line[len(s)+1:]
68 pat = rels + line[len(s)+1:]
66 break
69 break
67 pats[f].append(pat)
70 pats[f].append(pat)
68 except IOError, inst:
71 except IOError, inst:
69 if f != files[0]:
72 if f != files[0]:
70 warn(_("skipping unreadable ignore file '%s': %s\n") %
73 warn(_("skipping unreadable ignore file '%s': %s\n") %
71 (f, inst.strerror))
74 (f, inst.strerror))
72
75
73 allpats = []
76 allpats = []
74 [allpats.extend(patlist) for patlist in pats.values()]
77 [allpats.extend(patlist) for patlist in pats.values()]
75 if not allpats:
78 if not allpats:
76 return util.never
79 return util.never
77
80
78 try:
81 try:
79 files, ignorefunc, anypats = (
82 files, ignorefunc, anypats = (
80 util.matcher(root, inc=allpats, src='.hgignore'))
83 util.matcher(root, inc=allpats, src='.hgignore'))
81 except util.Abort:
84 except util.Abort:
82 # Re-raise an exception where the src is the right file
85 # Re-raise an exception where the src is the right file
83 for f, patlist in pats.items():
86 for f, patlist in pats.items():
84 files, ignorefunc, anypats = (
87 files, ignorefunc, anypats = (
85 util.matcher(root, inc=patlist, src=f))
88 util.matcher(root, inc=patlist, src=f))
86
89
87 return ignorefunc
90 return ignorefunc
@@ -1,1996 +1,2001 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import re, lock, transaction, tempfile, stat, errno, ui
12 import re, lock, transaction, tempfile, stat, errno, ui
13 import os, revlog, time, util, extensions, hook
13 import os, revlog, time, util, extensions, hook
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = util.set(('lookup', 'changegroupsubset'))
16 capabilities = util.set(('lookup', 'changegroupsubset'))
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __init__(self, parentui, path=None, create=0):
19 def __init__(self, parentui, path=None, create=0):
20 repo.repository.__init__(self)
20 repo.repository.__init__(self)
21 self.root = os.path.realpath(path)
21 self.root = os.path.realpath(path)
22 self.path = os.path.join(self.root, ".hg")
22 self.path = os.path.join(self.root, ".hg")
23 self.origroot = path
23 self.origroot = path
24 self.opener = util.opener(self.path)
24 self.opener = util.opener(self.path)
25 self.wopener = util.opener(self.root)
25 self.wopener = util.opener(self.root)
26
26
27 if not os.path.isdir(self.path):
27 if not os.path.isdir(self.path):
28 if create:
28 if create:
29 if not os.path.exists(path):
29 if not os.path.exists(path):
30 os.mkdir(path)
30 os.mkdir(path)
31 os.mkdir(self.path)
31 os.mkdir(self.path)
32 requirements = ["revlogv1"]
32 requirements = ["revlogv1"]
33 if parentui.configbool('format', 'usestore', True):
33 if parentui.configbool('format', 'usestore', True):
34 os.mkdir(os.path.join(self.path, "store"))
34 os.mkdir(os.path.join(self.path, "store"))
35 requirements.append("store")
35 requirements.append("store")
36 # create an invalid changelog
36 # create an invalid changelog
37 self.opener("00changelog.i", "a").write(
37 self.opener("00changelog.i", "a").write(
38 '\0\0\0\2' # represents revlogv2
38 '\0\0\0\2' # represents revlogv2
39 ' dummy changelog to prevent using the old repo layout'
39 ' dummy changelog to prevent using the old repo layout'
40 )
40 )
41 reqfile = self.opener("requires", "w")
41 reqfile = self.opener("requires", "w")
42 for r in requirements:
42 for r in requirements:
43 reqfile.write("%s\n" % r)
43 reqfile.write("%s\n" % r)
44 reqfile.close()
44 reqfile.close()
45 else:
45 else:
46 raise repo.RepoError(_("repository %s not found") % path)
46 raise repo.RepoError(_("repository %s not found") % path)
47 elif create:
47 elif create:
48 raise repo.RepoError(_("repository %s already exists") % path)
48 raise repo.RepoError(_("repository %s already exists") % path)
49 else:
49 else:
50 # find requirements
50 # find requirements
51 try:
51 try:
52 requirements = self.opener("requires").read().splitlines()
52 requirements = self.opener("requires").read().splitlines()
53 except IOError, inst:
53 except IOError, inst:
54 if inst.errno != errno.ENOENT:
54 if inst.errno != errno.ENOENT:
55 raise
55 raise
56 requirements = []
56 requirements = []
57 # check them
57 # check them
58 for r in requirements:
58 for r in requirements:
59 if r not in self.supported:
59 if r not in self.supported:
60 raise repo.RepoError(_("requirement '%s' not supported") % r)
60 raise repo.RepoError(_("requirement '%s' not supported") % r)
61
61
62 # setup store
62 # setup store
63 if "store" in requirements:
63 if "store" in requirements:
64 self.encodefn = util.encodefilename
64 self.encodefn = util.encodefilename
65 self.decodefn = util.decodefilename
65 self.decodefn = util.decodefilename
66 self.spath = os.path.join(self.path, "store")
66 self.spath = os.path.join(self.path, "store")
67 else:
67 else:
68 self.encodefn = lambda x: x
68 self.encodefn = lambda x: x
69 self.decodefn = lambda x: x
69 self.decodefn = lambda x: x
70 self.spath = self.path
70 self.spath = self.path
71 self.sopener = util.encodedopener(util.opener(self.spath),
71 self.sopener = util.encodedopener(util.opener(self.spath),
72 self.encodefn)
72 self.encodefn)
73
73
74 self.ui = ui.ui(parentui=parentui)
74 self.ui = ui.ui(parentui=parentui)
75 try:
75 try:
76 self.ui.readconfig(self.join("hgrc"), self.root)
76 self.ui.readconfig(self.join("hgrc"), self.root)
77 extensions.loadall(self.ui)
77 extensions.loadall(self.ui)
78 except IOError:
78 except IOError:
79 pass
79 pass
80
80
81 self.tagscache = None
81 self.tagscache = None
82 self.branchcache = None
82 self.branchcache = None
83 self.nodetagscache = None
83 self.nodetagscache = None
84 self.filterpats = {}
84 self.filterpats = {}
85 self._transref = self._lockref = self._wlockref = None
85 self._transref = self._lockref = self._wlockref = None
86
86
87 def __getattr__(self, name):
87 def __getattr__(self, name):
88 if name == 'changelog':
88 if name == 'changelog':
89 self.changelog = changelog.changelog(self.sopener)
89 self.changelog = changelog.changelog(self.sopener)
90 self.sopener.defversion = self.changelog.version
90 self.sopener.defversion = self.changelog.version
91 return self.changelog
91 return self.changelog
92 if name == 'manifest':
92 if name == 'manifest':
93 self.changelog
93 self.changelog
94 self.manifest = manifest.manifest(self.sopener)
94 self.manifest = manifest.manifest(self.sopener)
95 return self.manifest
95 return self.manifest
96 if name == 'dirstate':
96 if name == 'dirstate':
97 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
97 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
98 return self.dirstate
98 return self.dirstate
99 else:
99 else:
100 raise AttributeError, name
100 raise AttributeError, name
101
101
102 def url(self):
102 def url(self):
103 return 'file:' + self.root
103 return 'file:' + self.root
104
104
105 def hook(self, name, throw=False, **args):
105 def hook(self, name, throw=False, **args):
106 return hook.hook(self.ui, self, name, throw, **args)
106 return hook.hook(self.ui, self, name, throw, **args)
107
107
108 tag_disallowed = ':\r\n'
108 tag_disallowed = ':\r\n'
109
109
110 def _tag(self, name, node, message, local, user, date, parent=None,
110 def _tag(self, name, node, message, local, user, date, parent=None,
111 extra={}):
111 extra={}):
112 use_dirstate = parent is None
112 use_dirstate = parent is None
113
113
114 for c in self.tag_disallowed:
114 for c in self.tag_disallowed:
115 if c in name:
115 if c in name:
116 raise util.Abort(_('%r cannot be used in a tag name') % c)
116 raise util.Abort(_('%r cannot be used in a tag name') % c)
117
117
118 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
118 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
119
119
120 def writetag(fp, name, munge, prevtags):
120 def writetag(fp, name, munge, prevtags):
121 if prevtags and prevtags[-1] != '\n':
121 if prevtags and prevtags[-1] != '\n':
122 fp.write('\n')
122 fp.write('\n')
123 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
123 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
124 fp.close()
124 fp.close()
125 self.hook('tag', node=hex(node), tag=name, local=local)
125 self.hook('tag', node=hex(node), tag=name, local=local)
126
126
127 prevtags = ''
127 prevtags = ''
128 if local:
128 if local:
129 try:
129 try:
130 fp = self.opener('localtags', 'r+')
130 fp = self.opener('localtags', 'r+')
131 except IOError, err:
131 except IOError, err:
132 fp = self.opener('localtags', 'a')
132 fp = self.opener('localtags', 'a')
133 else:
133 else:
134 prevtags = fp.read()
134 prevtags = fp.read()
135
135
136 # local tags are stored in the current charset
136 # local tags are stored in the current charset
137 writetag(fp, name, None, prevtags)
137 writetag(fp, name, None, prevtags)
138 return
138 return
139
139
140 if use_dirstate:
140 if use_dirstate:
141 try:
141 try:
142 fp = self.wfile('.hgtags', 'rb+')
142 fp = self.wfile('.hgtags', 'rb+')
143 except IOError, err:
143 except IOError, err:
144 fp = self.wfile('.hgtags', 'ab')
144 fp = self.wfile('.hgtags', 'ab')
145 else:
145 else:
146 prevtags = fp.read()
146 prevtags = fp.read()
147 else:
147 else:
148 try:
148 try:
149 prevtags = self.filectx('.hgtags', parent).data()
149 prevtags = self.filectx('.hgtags', parent).data()
150 except revlog.LookupError:
150 except revlog.LookupError:
151 pass
151 pass
152 fp = self.wfile('.hgtags', 'wb')
152 fp = self.wfile('.hgtags', 'wb')
153 if prevtags:
153 if prevtags:
154 fp.write(prevtags)
154 fp.write(prevtags)
155
155
156 # committed tags are stored in UTF-8
156 # committed tags are stored in UTF-8
157 writetag(fp, name, util.fromlocal, prevtags)
157 writetag(fp, name, util.fromlocal, prevtags)
158
158
159 if use_dirstate and '.hgtags' not in self.dirstate:
159 if use_dirstate and '.hgtags' not in self.dirstate:
160 self.add(['.hgtags'])
160 self.add(['.hgtags'])
161
161
162 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
162 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
163 extra=extra)
163 extra=extra)
164
164
165 self.hook('tag', node=hex(node), tag=name, local=local)
165 self.hook('tag', node=hex(node), tag=name, local=local)
166
166
167 return tagnode
167 return tagnode
168
168
169 def tag(self, name, node, message, local, user, date):
169 def tag(self, name, node, message, local, user, date):
170 '''tag a revision with a symbolic name.
170 '''tag a revision with a symbolic name.
171
171
172 if local is True, the tag is stored in a per-repository file.
172 if local is True, the tag is stored in a per-repository file.
173 otherwise, it is stored in the .hgtags file, and a new
173 otherwise, it is stored in the .hgtags file, and a new
174 changeset is committed with the change.
174 changeset is committed with the change.
175
175
176 keyword arguments:
176 keyword arguments:
177
177
178 local: whether to store tag in non-version-controlled file
178 local: whether to store tag in non-version-controlled file
179 (default False)
179 (default False)
180
180
181 message: commit message to use if committing
181 message: commit message to use if committing
182
182
183 user: name of user to use if committing
183 user: name of user to use if committing
184
184
185 date: date tuple to use if committing'''
185 date: date tuple to use if committing'''
186
186
187 for x in self.status()[:5]:
187 for x in self.status()[:5]:
188 if '.hgtags' in x:
188 if '.hgtags' in x:
189 raise util.Abort(_('working copy of .hgtags is changed '
189 raise util.Abort(_('working copy of .hgtags is changed '
190 '(please commit .hgtags manually)'))
190 '(please commit .hgtags manually)'))
191
191
192
192
193 self._tag(name, node, message, local, user, date)
193 self._tag(name, node, message, local, user, date)
194
194
195 def tags(self):
195 def tags(self):
196 '''return a mapping of tag to node'''
196 '''return a mapping of tag to node'''
197 if self.tagscache:
197 if self.tagscache:
198 return self.tagscache
198 return self.tagscache
199
199
200 globaltags = {}
200 globaltags = {}
201
201
202 def readtags(lines, fn):
202 def readtags(lines, fn):
203 filetags = {}
203 filetags = {}
204 count = 0
204 count = 0
205
205
206 def warn(msg):
206 def warn(msg):
207 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
207 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
208
208
209 for l in lines:
209 for l in lines:
210 count += 1
210 count += 1
211 if not l:
211 if not l:
212 continue
212 continue
213 s = l.split(" ", 1)
213 s = l.split(" ", 1)
214 if len(s) != 2:
214 if len(s) != 2:
215 warn(_("cannot parse entry"))
215 warn(_("cannot parse entry"))
216 continue
216 continue
217 node, key = s
217 node, key = s
218 key = util.tolocal(key.strip()) # stored in UTF-8
218 key = util.tolocal(key.strip()) # stored in UTF-8
219 try:
219 try:
220 bin_n = bin(node)
220 bin_n = bin(node)
221 except TypeError:
221 except TypeError:
222 warn(_("node '%s' is not well formed") % node)
222 warn(_("node '%s' is not well formed") % node)
223 continue
223 continue
224 if bin_n not in self.changelog.nodemap:
224 if bin_n not in self.changelog.nodemap:
225 warn(_("tag '%s' refers to unknown node") % key)
225 warn(_("tag '%s' refers to unknown node") % key)
226 continue
226 continue
227
227
228 h = []
228 h = []
229 if key in filetags:
229 if key in filetags:
230 n, h = filetags[key]
230 n, h = filetags[key]
231 h.append(n)
231 h.append(n)
232 filetags[key] = (bin_n, h)
232 filetags[key] = (bin_n, h)
233
233
234 for k, nh in filetags.items():
234 for k, nh in filetags.items():
235 if k not in globaltags:
235 if k not in globaltags:
236 globaltags[k] = nh
236 globaltags[k] = nh
237 continue
237 continue
238 # we prefer the global tag if:
238 # we prefer the global tag if:
239 # it supercedes us OR
239 # it supercedes us OR
240 # mutual supercedes and it has a higher rank
240 # mutual supercedes and it has a higher rank
241 # otherwise we win because we're tip-most
241 # otherwise we win because we're tip-most
242 an, ah = nh
242 an, ah = nh
243 bn, bh = globaltags[k]
243 bn, bh = globaltags[k]
244 if (bn != an and an in bh and
244 if (bn != an and an in bh and
245 (bn not in ah or len(bh) > len(ah))):
245 (bn not in ah or len(bh) > len(ah))):
246 an = bn
246 an = bn
247 ah.extend([n for n in bh if n not in ah])
247 ah.extend([n for n in bh if n not in ah])
248 globaltags[k] = an, ah
248 globaltags[k] = an, ah
249
249
250 # read the tags file from each head, ending with the tip
250 # read the tags file from each head, ending with the tip
251 f = None
251 f = None
252 for rev, node, fnode in self._hgtagsnodes():
252 for rev, node, fnode in self._hgtagsnodes():
253 f = (f and f.filectx(fnode) or
253 f = (f and f.filectx(fnode) or
254 self.filectx('.hgtags', fileid=fnode))
254 self.filectx('.hgtags', fileid=fnode))
255 readtags(f.data().splitlines(), f)
255 readtags(f.data().splitlines(), f)
256
256
257 try:
257 try:
258 data = util.fromlocal(self.opener("localtags").read())
258 data = util.fromlocal(self.opener("localtags").read())
259 # localtags are stored in the local character set
259 # localtags are stored in the local character set
260 # while the internal tag table is stored in UTF-8
260 # while the internal tag table is stored in UTF-8
261 readtags(data.splitlines(), "localtags")
261 readtags(data.splitlines(), "localtags")
262 except IOError:
262 except IOError:
263 pass
263 pass
264
264
265 self.tagscache = {}
265 self.tagscache = {}
266 for k,nh in globaltags.items():
266 for k,nh in globaltags.items():
267 n = nh[0]
267 n = nh[0]
268 if n != nullid:
268 if n != nullid:
269 self.tagscache[k] = n
269 self.tagscache[k] = n
270 self.tagscache['tip'] = self.changelog.tip()
270 self.tagscache['tip'] = self.changelog.tip()
271
271
272 return self.tagscache
272 return self.tagscache
273
273
274 def _hgtagsnodes(self):
274 def _hgtagsnodes(self):
275 heads = self.heads()
275 heads = self.heads()
276 heads.reverse()
276 heads.reverse()
277 last = {}
277 last = {}
278 ret = []
278 ret = []
279 for node in heads:
279 for node in heads:
280 c = self.changectx(node)
280 c = self.changectx(node)
281 rev = c.rev()
281 rev = c.rev()
282 try:
282 try:
283 fnode = c.filenode('.hgtags')
283 fnode = c.filenode('.hgtags')
284 except revlog.LookupError:
284 except revlog.LookupError:
285 continue
285 continue
286 ret.append((rev, node, fnode))
286 ret.append((rev, node, fnode))
287 if fnode in last:
287 if fnode in last:
288 ret[last[fnode]] = None
288 ret[last[fnode]] = None
289 last[fnode] = len(ret) - 1
289 last[fnode] = len(ret) - 1
290 return [item for item in ret if item]
290 return [item for item in ret if item]
291
291
292 def tagslist(self):
292 def tagslist(self):
293 '''return a list of tags ordered by revision'''
293 '''return a list of tags ordered by revision'''
294 l = []
294 l = []
295 for t, n in self.tags().items():
295 for t, n in self.tags().items():
296 try:
296 try:
297 r = self.changelog.rev(n)
297 r = self.changelog.rev(n)
298 except:
298 except:
299 r = -2 # sort to the beginning of the list if unknown
299 r = -2 # sort to the beginning of the list if unknown
300 l.append((r, t, n))
300 l.append((r, t, n))
301 l.sort()
301 l.sort()
302 return [(t, n) for r, t, n in l]
302 return [(t, n) for r, t, n in l]
303
303
304 def nodetags(self, node):
304 def nodetags(self, node):
305 '''return the tags associated with a node'''
305 '''return the tags associated with a node'''
306 if not self.nodetagscache:
306 if not self.nodetagscache:
307 self.nodetagscache = {}
307 self.nodetagscache = {}
308 for t, n in self.tags().items():
308 for t, n in self.tags().items():
309 self.nodetagscache.setdefault(n, []).append(t)
309 self.nodetagscache.setdefault(n, []).append(t)
310 return self.nodetagscache.get(node, [])
310 return self.nodetagscache.get(node, [])
311
311
312 def _branchtags(self):
312 def _branchtags(self):
313 partial, last, lrev = self._readbranchcache()
313 partial, last, lrev = self._readbranchcache()
314
314
315 tiprev = self.changelog.count() - 1
315 tiprev = self.changelog.count() - 1
316 if lrev != tiprev:
316 if lrev != tiprev:
317 self._updatebranchcache(partial, lrev+1, tiprev+1)
317 self._updatebranchcache(partial, lrev+1, tiprev+1)
318 self._writebranchcache(partial, self.changelog.tip(), tiprev)
318 self._writebranchcache(partial, self.changelog.tip(), tiprev)
319
319
320 return partial
320 return partial
321
321
322 def branchtags(self):
322 def branchtags(self):
323 if self.branchcache is not None:
323 if self.branchcache is not None:
324 return self.branchcache
324 return self.branchcache
325
325
326 self.branchcache = {} # avoid recursion in changectx
326 self.branchcache = {} # avoid recursion in changectx
327 partial = self._branchtags()
327 partial = self._branchtags()
328
328
329 # the branch cache is stored on disk as UTF-8, but in the local
329 # the branch cache is stored on disk as UTF-8, but in the local
330 # charset internally
330 # charset internally
331 for k, v in partial.items():
331 for k, v in partial.items():
332 self.branchcache[util.tolocal(k)] = v
332 self.branchcache[util.tolocal(k)] = v
333 return self.branchcache
333 return self.branchcache
334
334
335 def _readbranchcache(self):
335 def _readbranchcache(self):
336 partial = {}
336 partial = {}
337 try:
337 try:
338 f = self.opener("branch.cache")
338 f = self.opener("branch.cache")
339 lines = f.read().split('\n')
339 lines = f.read().split('\n')
340 f.close()
340 f.close()
341 except (IOError, OSError):
341 except (IOError, OSError):
342 return {}, nullid, nullrev
342 return {}, nullid, nullrev
343
343
344 try:
344 try:
345 last, lrev = lines.pop(0).split(" ", 1)
345 last, lrev = lines.pop(0).split(" ", 1)
346 last, lrev = bin(last), int(lrev)
346 last, lrev = bin(last), int(lrev)
347 if not (lrev < self.changelog.count() and
347 if not (lrev < self.changelog.count() and
348 self.changelog.node(lrev) == last): # sanity check
348 self.changelog.node(lrev) == last): # sanity check
349 # invalidate the cache
349 # invalidate the cache
350 raise ValueError('Invalid branch cache: unknown tip')
350 raise ValueError('Invalid branch cache: unknown tip')
351 for l in lines:
351 for l in lines:
352 if not l: continue
352 if not l: continue
353 node, label = l.split(" ", 1)
353 node, label = l.split(" ", 1)
354 partial[label.strip()] = bin(node)
354 partial[label.strip()] = bin(node)
355 except (KeyboardInterrupt, util.SignalInterrupt):
355 except (KeyboardInterrupt, util.SignalInterrupt):
356 raise
356 raise
357 except Exception, inst:
357 except Exception, inst:
358 if self.ui.debugflag:
358 if self.ui.debugflag:
359 self.ui.warn(str(inst), '\n')
359 self.ui.warn(str(inst), '\n')
360 partial, last, lrev = {}, nullid, nullrev
360 partial, last, lrev = {}, nullid, nullrev
361 return partial, last, lrev
361 return partial, last, lrev
362
362
363 def _writebranchcache(self, branches, tip, tiprev):
363 def _writebranchcache(self, branches, tip, tiprev):
364 try:
364 try:
365 f = self.opener("branch.cache", "w", atomictemp=True)
365 f = self.opener("branch.cache", "w", atomictemp=True)
366 f.write("%s %s\n" % (hex(tip), tiprev))
366 f.write("%s %s\n" % (hex(tip), tiprev))
367 for label, node in branches.iteritems():
367 for label, node in branches.iteritems():
368 f.write("%s %s\n" % (hex(node), label))
368 f.write("%s %s\n" % (hex(node), label))
369 f.rename()
369 f.rename()
370 except (IOError, OSError):
370 except (IOError, OSError):
371 pass
371 pass
372
372
373 def _updatebranchcache(self, partial, start, end):
373 def _updatebranchcache(self, partial, start, end):
374 for r in xrange(start, end):
374 for r in xrange(start, end):
375 c = self.changectx(r)
375 c = self.changectx(r)
376 b = c.branch()
376 b = c.branch()
377 partial[b] = c.node()
377 partial[b] = c.node()
378
378
379 def lookup(self, key):
379 def lookup(self, key):
380 if key == '.':
380 if key == '.':
381 key, second = self.dirstate.parents()
381 key, second = self.dirstate.parents()
382 if key == nullid:
382 if key == nullid:
383 raise repo.RepoError(_("no revision checked out"))
383 raise repo.RepoError(_("no revision checked out"))
384 if second != nullid:
384 if second != nullid:
385 self.ui.warn(_("warning: working directory has two parents, "
385 self.ui.warn(_("warning: working directory has two parents, "
386 "tag '.' uses the first\n"))
386 "tag '.' uses the first\n"))
387 elif key == 'null':
387 elif key == 'null':
388 return nullid
388 return nullid
389 n = self.changelog._match(key)
389 n = self.changelog._match(key)
390 if n:
390 if n:
391 return n
391 return n
392 if key in self.tags():
392 if key in self.tags():
393 return self.tags()[key]
393 return self.tags()[key]
394 if key in self.branchtags():
394 if key in self.branchtags():
395 return self.branchtags()[key]
395 return self.branchtags()[key]
396 n = self.changelog._partialmatch(key)
396 n = self.changelog._partialmatch(key)
397 if n:
397 if n:
398 return n
398 return n
399 try:
399 try:
400 if len(key) == 20:
400 if len(key) == 20:
401 key = hex(key)
401 key = hex(key)
402 except:
402 except:
403 pass
403 pass
404 raise repo.RepoError(_("unknown revision '%s'") % key)
404 raise repo.RepoError(_("unknown revision '%s'") % key)
405
405
406 def dev(self):
406 def dev(self):
407 return os.lstat(self.path).st_dev
407 return os.lstat(self.path).st_dev
408
408
409 def local(self):
409 def local(self):
410 return True
410 return True
411
411
412 def join(self, f):
412 def join(self, f):
413 return os.path.join(self.path, f)
413 return os.path.join(self.path, f)
414
414
415 def sjoin(self, f):
415 def sjoin(self, f):
416 f = self.encodefn(f)
416 f = self.encodefn(f)
417 return os.path.join(self.spath, f)
417 return os.path.join(self.spath, f)
418
418
419 def wjoin(self, f):
419 def wjoin(self, f):
420 return os.path.join(self.root, f)
420 return os.path.join(self.root, f)
421
421
422 def file(self, f):
422 def file(self, f):
423 if f[0] == '/':
423 if f[0] == '/':
424 f = f[1:]
424 f = f[1:]
425 return filelog.filelog(self.sopener, f)
425 return filelog.filelog(self.sopener, f)
426
426
427 def changectx(self, changeid=None):
427 def changectx(self, changeid=None):
428 return context.changectx(self, changeid)
428 return context.changectx(self, changeid)
429
429
430 def workingctx(self):
430 def workingctx(self):
431 return context.workingctx(self)
431 return context.workingctx(self)
432
432
433 def parents(self, changeid=None):
433 def parents(self, changeid=None):
434 '''
434 '''
435 get list of changectxs for parents of changeid or working directory
435 get list of changectxs for parents of changeid or working directory
436 '''
436 '''
437 if changeid is None:
437 if changeid is None:
438 pl = self.dirstate.parents()
438 pl = self.dirstate.parents()
439 else:
439 else:
440 n = self.changelog.lookup(changeid)
440 n = self.changelog.lookup(changeid)
441 pl = self.changelog.parents(n)
441 pl = self.changelog.parents(n)
442 if pl[1] == nullid:
442 if pl[1] == nullid:
443 return [self.changectx(pl[0])]
443 return [self.changectx(pl[0])]
444 return [self.changectx(pl[0]), self.changectx(pl[1])]
444 return [self.changectx(pl[0]), self.changectx(pl[1])]
445
445
446 def filectx(self, path, changeid=None, fileid=None):
446 def filectx(self, path, changeid=None, fileid=None):
447 """changeid can be a changeset revision, node, or tag.
447 """changeid can be a changeset revision, node, or tag.
448 fileid can be a file revision or node."""
448 fileid can be a file revision or node."""
449 return context.filectx(self, path, changeid, fileid)
449 return context.filectx(self, path, changeid, fileid)
450
450
451 def getcwd(self):
451 def getcwd(self):
452 return self.dirstate.getcwd()
452 return self.dirstate.getcwd()
453
453
454 def pathto(self, f, cwd=None):
454 def pathto(self, f, cwd=None):
455 return self.dirstate.pathto(f, cwd)
455 return self.dirstate.pathto(f, cwd)
456
456
457 def wfile(self, f, mode='r'):
457 def wfile(self, f, mode='r'):
458 return self.wopener(f, mode)
458 return self.wopener(f, mode)
459
459
460 def _link(self, f):
460 def _link(self, f):
461 return os.path.islink(self.wjoin(f))
461 return os.path.islink(self.wjoin(f))
462
462
463 def _filter(self, filter, filename, data):
463 def _filter(self, filter, filename, data):
464 if filter not in self.filterpats:
464 if filter not in self.filterpats:
465 l = []
465 l = []
466 for pat, cmd in self.ui.configitems(filter):
466 for pat, cmd in self.ui.configitems(filter):
467 mf = util.matcher(self.root, "", [pat], [], [])[1]
467 mf = util.matcher(self.root, "", [pat], [], [])[1]
468 l.append((mf, cmd))
468 l.append((mf, cmd))
469 self.filterpats[filter] = l
469 self.filterpats[filter] = l
470
470
471 for mf, cmd in self.filterpats[filter]:
471 for mf, cmd in self.filterpats[filter]:
472 if mf(filename):
472 if mf(filename):
473 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
473 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
474 data = util.filter(data, cmd)
474 data = util.filter(data, cmd)
475 break
475 break
476
476
477 return data
477 return data
478
478
479 def wread(self, filename):
479 def wread(self, filename):
480 if self._link(filename):
480 if self._link(filename):
481 data = os.readlink(self.wjoin(filename))
481 data = os.readlink(self.wjoin(filename))
482 else:
482 else:
483 data = self.wopener(filename, 'r').read()
483 data = self.wopener(filename, 'r').read()
484 return self._filter("encode", filename, data)
484 return self._filter("encode", filename, data)
485
485
486 def wwrite(self, filename, data, flags):
486 def wwrite(self, filename, data, flags):
487 data = self._filter("decode", filename, data)
487 data = self._filter("decode", filename, data)
488 if "l" in flags:
488 if "l" in flags:
489 self.wopener.symlink(data, filename)
489 self.wopener.symlink(data, filename)
490 else:
490 else:
491 try:
491 try:
492 if self._link(filename):
492 if self._link(filename):
493 os.unlink(self.wjoin(filename))
493 os.unlink(self.wjoin(filename))
494 except OSError:
494 except OSError:
495 pass
495 pass
496 self.wopener(filename, 'w').write(data)
496 self.wopener(filename, 'w').write(data)
497 util.set_exec(self.wjoin(filename), "x" in flags)
497 util.set_exec(self.wjoin(filename), "x" in flags)
498
498
499 def wwritedata(self, filename, data):
499 def wwritedata(self, filename, data):
500 return self._filter("decode", filename, data)
500 return self._filter("decode", filename, data)
501
501
502 def transaction(self):
502 def transaction(self):
503 if self._transref and self._transref():
503 if self._transref and self._transref():
504 return self._transref().nest()
504 return self._transref().nest()
505
505
506 # save dirstate for rollback
506 # save dirstate for rollback
507 try:
507 try:
508 ds = self.opener("dirstate").read()
508 ds = self.opener("dirstate").read()
509 except IOError:
509 except IOError:
510 ds = ""
510 ds = ""
511 self.opener("journal.dirstate", "w").write(ds)
511 self.opener("journal.dirstate", "w").write(ds)
512
512
513 renames = [(self.sjoin("journal"), self.sjoin("undo")),
513 renames = [(self.sjoin("journal"), self.sjoin("undo")),
514 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
514 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
515 tr = transaction.transaction(self.ui.warn, self.sopener,
515 tr = transaction.transaction(self.ui.warn, self.sopener,
516 self.sjoin("journal"),
516 self.sjoin("journal"),
517 aftertrans(renames))
517 aftertrans(renames))
518 self._transref = weakref.ref(tr)
518 self._transref = weakref.ref(tr)
519 return tr
519 return tr
520
520
521 def recover(self):
521 def recover(self):
522 l = self.lock()
522 l = self.lock()
523 try:
523 try:
524 if os.path.exists(self.sjoin("journal")):
524 if os.path.exists(self.sjoin("journal")):
525 self.ui.status(_("rolling back interrupted transaction\n"))
525 self.ui.status(_("rolling back interrupted transaction\n"))
526 transaction.rollback(self.sopener, self.sjoin("journal"))
526 transaction.rollback(self.sopener, self.sjoin("journal"))
527 self.invalidate()
527 self.invalidate()
528 return True
528 return True
529 else:
529 else:
530 self.ui.warn(_("no interrupted transaction available\n"))
530 self.ui.warn(_("no interrupted transaction available\n"))
531 return False
531 return False
532 finally:
532 finally:
533 del l
533 del l
534
534
535 def rollback(self):
535 def rollback(self):
536 wlock = lock = None
536 wlock = lock = None
537 try:
537 try:
538 wlock = self.wlock()
538 wlock = self.wlock()
539 lock = self.lock()
539 lock = self.lock()
540 if os.path.exists(self.sjoin("undo")):
540 if os.path.exists(self.sjoin("undo")):
541 self.ui.status(_("rolling back last transaction\n"))
541 self.ui.status(_("rolling back last transaction\n"))
542 transaction.rollback(self.sopener, self.sjoin("undo"))
542 transaction.rollback(self.sopener, self.sjoin("undo"))
543 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
543 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
544 self.invalidate()
544 self.invalidate()
545 self.dirstate.invalidate()
545 self.dirstate.invalidate()
546 else:
546 else:
547 self.ui.warn(_("no rollback information available\n"))
547 self.ui.warn(_("no rollback information available\n"))
548 finally:
548 finally:
549 del lock, wlock
549 del lock, wlock
550
550
551 def invalidate(self):
551 def invalidate(self):
552 for a in "changelog manifest".split():
552 for a in "changelog manifest".split():
553 if hasattr(self, a):
553 if hasattr(self, a):
554 self.__delattr__(a)
554 self.__delattr__(a)
555 self.tagscache = None
555 self.tagscache = None
556 self.nodetagscache = None
556 self.nodetagscache = None
557
557
558 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
558 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
559 try:
559 try:
560 l = lock.lock(lockname, 0, releasefn, desc=desc)
560 l = lock.lock(lockname, 0, releasefn, desc=desc)
561 except lock.LockHeld, inst:
561 except lock.LockHeld, inst:
562 if not wait:
562 if not wait:
563 raise
563 raise
564 self.ui.warn(_("waiting for lock on %s held by %r\n") %
564 self.ui.warn(_("waiting for lock on %s held by %r\n") %
565 (desc, inst.locker))
565 (desc, inst.locker))
566 # default to 600 seconds timeout
566 # default to 600 seconds timeout
567 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
567 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
568 releasefn, desc=desc)
568 releasefn, desc=desc)
569 if acquirefn:
569 if acquirefn:
570 acquirefn()
570 acquirefn()
571 return l
571 return l
572
572
573 def lock(self, wait=True):
573 def lock(self, wait=True):
574 if self._lockref and self._lockref():
574 if self._lockref and self._lockref():
575 return self._lockref()
575 return self._lockref()
576
576
577 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
577 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
578 _('repository %s') % self.origroot)
578 _('repository %s') % self.origroot)
579 self._lockref = weakref.ref(l)
579 self._lockref = weakref.ref(l)
580 return l
580 return l
581
581
582 def wlock(self, wait=True):
582 def wlock(self, wait=True):
583 if self._wlockref and self._wlockref():
583 if self._wlockref and self._wlockref():
584 return self._wlockref()
584 return self._wlockref()
585
585
586 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
586 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
587 self.dirstate.invalidate, _('working directory of %s') %
587 self.dirstate.invalidate, _('working directory of %s') %
588 self.origroot)
588 self.origroot)
589 self._wlockref = weakref.ref(l)
589 self._wlockref = weakref.ref(l)
590 return l
590 return l
591
591
592 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
592 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
593 """
593 """
594 commit an individual file as part of a larger transaction
594 commit an individual file as part of a larger transaction
595 """
595 """
596
596
597 t = self.wread(fn)
597 t = self.wread(fn)
598 fl = self.file(fn)
598 fl = self.file(fn)
599 fp1 = manifest1.get(fn, nullid)
599 fp1 = manifest1.get(fn, nullid)
600 fp2 = manifest2.get(fn, nullid)
600 fp2 = manifest2.get(fn, nullid)
601
601
602 meta = {}
602 meta = {}
603 cp = self.dirstate.copied(fn)
603 cp = self.dirstate.copied(fn)
604 if cp:
604 if cp:
605 # Mark the new revision of this file as a copy of another
605 # Mark the new revision of this file as a copy of another
606 # file. This copy data will effectively act as a parent
606 # file. This copy data will effectively act as a parent
607 # of this new revision. If this is a merge, the first
607 # of this new revision. If this is a merge, the first
608 # parent will be the nullid (meaning "look up the copy data")
608 # parent will be the nullid (meaning "look up the copy data")
609 # and the second one will be the other parent. For example:
609 # and the second one will be the other parent. For example:
610 #
610 #
611 # 0 --- 1 --- 3 rev1 changes file foo
611 # 0 --- 1 --- 3 rev1 changes file foo
612 # \ / rev2 renames foo to bar and changes it
612 # \ / rev2 renames foo to bar and changes it
613 # \- 2 -/ rev3 should have bar with all changes and
613 # \- 2 -/ rev3 should have bar with all changes and
614 # should record that bar descends from
614 # should record that bar descends from
615 # bar in rev2 and foo in rev1
615 # bar in rev2 and foo in rev1
616 #
616 #
617 # this allows this merge to succeed:
617 # this allows this merge to succeed:
618 #
618 #
619 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
619 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
620 # \ / merging rev3 and rev4 should use bar@rev2
620 # \ / merging rev3 and rev4 should use bar@rev2
621 # \- 2 --- 4 as the merge base
621 # \- 2 --- 4 as the merge base
622 #
622 #
623 meta["copy"] = cp
623 meta["copy"] = cp
624 if not manifest2: # not a branch merge
624 if not manifest2: # not a branch merge
625 meta["copyrev"] = hex(manifest1.get(cp, nullid))
625 meta["copyrev"] = hex(manifest1.get(cp, nullid))
626 fp2 = nullid
626 fp2 = nullid
627 elif fp2 != nullid: # copied on remote side
627 elif fp2 != nullid: # copied on remote side
628 meta["copyrev"] = hex(manifest1.get(cp, nullid))
628 meta["copyrev"] = hex(manifest1.get(cp, nullid))
629 elif fp1 != nullid: # copied on local side, reversed
629 elif fp1 != nullid: # copied on local side, reversed
630 meta["copyrev"] = hex(manifest2.get(cp))
630 meta["copyrev"] = hex(manifest2.get(cp))
631 fp2 = fp1
631 fp2 = fp1
632 elif cp in manifest2: # directory rename on local side
632 elif cp in manifest2: # directory rename on local side
633 meta["copyrev"] = hex(manifest2[cp])
633 meta["copyrev"] = hex(manifest2[cp])
634 else: # directory rename on remote side
634 else: # directory rename on remote side
635 meta["copyrev"] = hex(manifest1.get(cp, nullid))
635 meta["copyrev"] = hex(manifest1.get(cp, nullid))
636 self.ui.debug(_(" %s: copy %s:%s\n") %
636 self.ui.debug(_(" %s: copy %s:%s\n") %
637 (fn, cp, meta["copyrev"]))
637 (fn, cp, meta["copyrev"]))
638 fp1 = nullid
638 fp1 = nullid
639 elif fp2 != nullid:
639 elif fp2 != nullid:
640 # is one parent an ancestor of the other?
640 # is one parent an ancestor of the other?
641 fpa = fl.ancestor(fp1, fp2)
641 fpa = fl.ancestor(fp1, fp2)
642 if fpa == fp1:
642 if fpa == fp1:
643 fp1, fp2 = fp2, nullid
643 fp1, fp2 = fp2, nullid
644 elif fpa == fp2:
644 elif fpa == fp2:
645 fp2 = nullid
645 fp2 = nullid
646
646
647 # is the file unmodified from the parent? report existing entry
647 # is the file unmodified from the parent? report existing entry
648 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
648 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
649 return fp1
649 return fp1
650
650
651 changelist.append(fn)
651 changelist.append(fn)
652 return fl.add(t, meta, tr, linkrev, fp1, fp2)
652 return fl.add(t, meta, tr, linkrev, fp1, fp2)
653
653
654 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
654 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
655 if p1 is None:
655 if p1 is None:
656 p1, p2 = self.dirstate.parents()
656 p1, p2 = self.dirstate.parents()
657 return self.commit(files=files, text=text, user=user, date=date,
657 return self.commit(files=files, text=text, user=user, date=date,
658 p1=p1, p2=p2, extra=extra, empty_ok=True)
658 p1=p1, p2=p2, extra=extra, empty_ok=True)
659
659
660 def commit(self, files=None, text="", user=None, date=None,
660 def commit(self, files=None, text="", user=None, date=None,
661 match=util.always, force=False, force_editor=False,
661 match=util.always, force=False, force_editor=False,
662 p1=None, p2=None, extra={}, empty_ok=False):
662 p1=None, p2=None, extra={}, empty_ok=False):
663 wlock = lock = tr = None
663 wlock = lock = tr = None
664 valid = 0 # don't save the dirstate if this isn't set
664 try:
665 try:
665 commit = []
666 commit = []
666 remove = []
667 remove = []
667 changed = []
668 changed = []
668 use_dirstate = (p1 is None) # not rawcommit
669 use_dirstate = (p1 is None) # not rawcommit
669 extra = extra.copy()
670 extra = extra.copy()
670
671
671 if use_dirstate:
672 if use_dirstate:
672 if files:
673 if files:
673 for f in files:
674 for f in files:
674 s = self.dirstate[f]
675 s = self.dirstate[f]
675 if s in 'nma':
676 if s in 'nma':
676 commit.append(f)
677 commit.append(f)
677 elif s == 'r':
678 elif s == 'r':
678 remove.append(f)
679 remove.append(f)
679 else:
680 else:
680 self.ui.warn(_("%s not tracked!\n") % f)
681 self.ui.warn(_("%s not tracked!\n") % f)
681 else:
682 else:
682 changes = self.status(match=match)[:5]
683 changes = self.status(match=match)[:5]
683 modified, added, removed, deleted, unknown = changes
684 modified, added, removed, deleted, unknown = changes
684 commit = modified + added
685 commit = modified + added
685 remove = removed
686 remove = removed
686 else:
687 else:
687 commit = files
688 commit = files
688
689
689 if use_dirstate:
690 if use_dirstate:
690 p1, p2 = self.dirstate.parents()
691 p1, p2 = self.dirstate.parents()
691 update_dirstate = True
692 update_dirstate = True
692 else:
693 else:
693 p1, p2 = p1, p2 or nullid
694 p1, p2 = p1, p2 or nullid
694 update_dirstate = (self.dirstate.parents()[0] == p1)
695 update_dirstate = (self.dirstate.parents()[0] == p1)
695
696
696 c1 = self.changelog.read(p1)
697 c1 = self.changelog.read(p1)
697 c2 = self.changelog.read(p2)
698 c2 = self.changelog.read(p2)
698 m1 = self.manifest.read(c1[0]).copy()
699 m1 = self.manifest.read(c1[0]).copy()
699 m2 = self.manifest.read(c2[0])
700 m2 = self.manifest.read(c2[0])
700
701
701 if use_dirstate:
702 if use_dirstate:
702 branchname = self.workingctx().branch()
703 branchname = self.workingctx().branch()
703 try:
704 try:
704 branchname = branchname.decode('UTF-8').encode('UTF-8')
705 branchname = branchname.decode('UTF-8').encode('UTF-8')
705 except UnicodeDecodeError:
706 except UnicodeDecodeError:
706 raise util.Abort(_('branch name not in UTF-8!'))
707 raise util.Abort(_('branch name not in UTF-8!'))
707 else:
708 else:
708 branchname = ""
709 branchname = ""
709
710
710 if use_dirstate:
711 if use_dirstate:
711 oldname = c1[5].get("branch") # stored in UTF-8
712 oldname = c1[5].get("branch") # stored in UTF-8
712 if (not commit and not remove and not force and p2 == nullid
713 if (not commit and not remove and not force and p2 == nullid
713 and branchname == oldname):
714 and branchname == oldname):
714 self.ui.status(_("nothing changed\n"))
715 self.ui.status(_("nothing changed\n"))
715 return None
716 return None
716
717
717 xp1 = hex(p1)
718 xp1 = hex(p1)
718 if p2 == nullid: xp2 = ''
719 if p2 == nullid: xp2 = ''
719 else: xp2 = hex(p2)
720 else: xp2 = hex(p2)
720
721
721 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
722 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
722
723
723 wlock = self.wlock()
724 wlock = self.wlock()
724 lock = self.lock()
725 lock = self.lock()
725 tr = self.transaction()
726 tr = self.transaction()
726 trp = weakref.proxy(tr)
727 trp = weakref.proxy(tr)
727
728
728 # check in files
729 # check in files
729 new = {}
730 new = {}
730 linkrev = self.changelog.count()
731 linkrev = self.changelog.count()
731 commit.sort()
732 commit.sort()
732 is_exec = util.execfunc(self.root, m1.execf)
733 is_exec = util.execfunc(self.root, m1.execf)
733 is_link = util.linkfunc(self.root, m1.linkf)
734 is_link = util.linkfunc(self.root, m1.linkf)
734 for f in commit:
735 for f in commit:
735 self.ui.note(f + "\n")
736 self.ui.note(f + "\n")
736 try:
737 try:
737 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
738 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
738 new_exec = is_exec(f)
739 new_exec = is_exec(f)
739 new_link = is_link(f)
740 new_link = is_link(f)
740 if ((not changed or changed[-1] != f) and
741 if ((not changed or changed[-1] != f) and
741 m2.get(f) != new[f]):
742 m2.get(f) != new[f]):
742 # mention the file in the changelog if some
743 # mention the file in the changelog if some
743 # flag changed, even if there was no content
744 # flag changed, even if there was no content
744 # change.
745 # change.
745 old_exec = m1.execf(f)
746 old_exec = m1.execf(f)
746 old_link = m1.linkf(f)
747 old_link = m1.linkf(f)
747 if old_exec != new_exec or old_link != new_link:
748 if old_exec != new_exec or old_link != new_link:
748 changed.append(f)
749 changed.append(f)
749 m1.set(f, new_exec, new_link)
750 m1.set(f, new_exec, new_link)
751 if use_dirstate:
752 self.dirstate.normal(f)
753
750 except (OSError, IOError):
754 except (OSError, IOError):
751 if use_dirstate:
755 if use_dirstate:
752 self.ui.warn(_("trouble committing %s!\n") % f)
756 self.ui.warn(_("trouble committing %s!\n") % f)
753 raise
757 raise
754 else:
758 else:
755 remove.append(f)
759 remove.append(f)
756
760
757 # update manifest
761 # update manifest
758 m1.update(new)
762 m1.update(new)
759 remove.sort()
763 remove.sort()
760 removed = []
764 removed = []
761
765
762 for f in remove:
766 for f in remove:
763 if f in m1:
767 if f in m1:
764 del m1[f]
768 del m1[f]
765 removed.append(f)
769 removed.append(f)
766 elif f in m2:
770 elif f in m2:
767 removed.append(f)
771 removed.append(f)
768 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
772 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
769 (new, removed))
773 (new, removed))
770
774
771 # add changeset
775 # add changeset
772 new = new.keys()
776 new = new.keys()
773 new.sort()
777 new.sort()
774
778
775 user = user or self.ui.username()
779 user = user or self.ui.username()
776 if (not empty_ok and not text) or force_editor:
780 if (not empty_ok and not text) or force_editor:
777 edittext = []
781 edittext = []
778 if text:
782 if text:
779 edittext.append(text)
783 edittext.append(text)
780 edittext.append("")
784 edittext.append("")
781 edittext.append("HG: user: %s" % user)
785 edittext.append("HG: user: %s" % user)
782 if p2 != nullid:
786 if p2 != nullid:
783 edittext.append("HG: branch merge")
787 edittext.append("HG: branch merge")
784 if branchname:
788 if branchname:
785 edittext.append("HG: branch %s" % util.tolocal(branchname))
789 edittext.append("HG: branch %s" % util.tolocal(branchname))
786 edittext.extend(["HG: changed %s" % f for f in changed])
790 edittext.extend(["HG: changed %s" % f for f in changed])
787 edittext.extend(["HG: removed %s" % f for f in removed])
791 edittext.extend(["HG: removed %s" % f for f in removed])
788 if not changed and not remove:
792 if not changed and not remove:
789 edittext.append("HG: no files changed")
793 edittext.append("HG: no files changed")
790 edittext.append("")
794 edittext.append("")
791 # run editor in the repository root
795 # run editor in the repository root
792 olddir = os.getcwd()
796 olddir = os.getcwd()
793 os.chdir(self.root)
797 os.chdir(self.root)
794 text = self.ui.edit("\n".join(edittext), user)
798 text = self.ui.edit("\n".join(edittext), user)
795 os.chdir(olddir)
799 os.chdir(olddir)
796
800
797 if branchname:
801 if branchname:
798 extra["branch"] = branchname
802 extra["branch"] = branchname
799
803
800 if use_dirstate:
804 if use_dirstate:
801 lines = [line.rstrip() for line in text.rstrip().splitlines()]
805 lines = [line.rstrip() for line in text.rstrip().splitlines()]
802 while lines and not lines[0]:
806 while lines and not lines[0]:
803 del lines[0]
807 del lines[0]
804 if not lines:
808 if not lines:
805 return None
809 return None
806 text = '\n'.join(lines)
810 text = '\n'.join(lines)
807
811
808 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
812 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
809 user, date, extra)
813 user, date, extra)
810 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
814 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
811 parent2=xp2)
815 parent2=xp2)
812 tr.close()
816 tr.close()
813
817
814 if self.branchcache and "branch" in extra:
818 if self.branchcache and "branch" in extra:
815 self.branchcache[util.tolocal(extra["branch"])] = n
819 self.branchcache[util.tolocal(extra["branch"])] = n
816
820
817 if use_dirstate or update_dirstate:
821 if use_dirstate or update_dirstate:
818 self.dirstate.setparents(n)
822 self.dirstate.setparents(n)
819 if use_dirstate:
823 if use_dirstate:
820 for f in new:
821 self.dirstate.normal(f)
822 for f in removed:
824 for f in removed:
823 self.dirstate.forget(f)
825 self.dirstate.forget(f)
826 valid = 1 # our dirstate updates are complete
824
827
825 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
828 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
826 return n
829 return n
827 finally:
830 finally:
831 if not valid: # don't save our updated dirstate
832 self.dirstate.invalidate()
828 del tr, lock, wlock
833 del tr, lock, wlock
829
834
830 def walk(self, node=None, files=[], match=util.always, badmatch=None):
835 def walk(self, node=None, files=[], match=util.always, badmatch=None):
831 '''
836 '''
832 walk recursively through the directory tree or a given
837 walk recursively through the directory tree or a given
833 changeset, finding all files matched by the match
838 changeset, finding all files matched by the match
834 function
839 function
835
840
836 results are yielded in a tuple (src, filename), where src
841 results are yielded in a tuple (src, filename), where src
837 is one of:
842 is one of:
838 'f' the file was found in the directory tree
843 'f' the file was found in the directory tree
839 'm' the file was only in the dirstate and not in the tree
844 'm' the file was only in the dirstate and not in the tree
840 'b' file was not found and matched badmatch
845 'b' file was not found and matched badmatch
841 '''
846 '''
842
847
843 if node:
848 if node:
844 fdict = dict.fromkeys(files)
849 fdict = dict.fromkeys(files)
845 # for dirstate.walk, files=['.'] means "walk the whole tree".
850 # for dirstate.walk, files=['.'] means "walk the whole tree".
846 # follow that here, too
851 # follow that here, too
847 fdict.pop('.', None)
852 fdict.pop('.', None)
848 mdict = self.manifest.read(self.changelog.read(node)[0])
853 mdict = self.manifest.read(self.changelog.read(node)[0])
849 mfiles = mdict.keys()
854 mfiles = mdict.keys()
850 mfiles.sort()
855 mfiles.sort()
851 for fn in mfiles:
856 for fn in mfiles:
852 for ffn in fdict:
857 for ffn in fdict:
853 # match if the file is the exact name or a directory
858 # match if the file is the exact name or a directory
854 if ffn == fn or fn.startswith("%s/" % ffn):
859 if ffn == fn or fn.startswith("%s/" % ffn):
855 del fdict[ffn]
860 del fdict[ffn]
856 break
861 break
857 if match(fn):
862 if match(fn):
858 yield 'm', fn
863 yield 'm', fn
859 ffiles = fdict.keys()
864 ffiles = fdict.keys()
860 ffiles.sort()
865 ffiles.sort()
861 for fn in ffiles:
866 for fn in ffiles:
862 if badmatch and badmatch(fn):
867 if badmatch and badmatch(fn):
863 if match(fn):
868 if match(fn):
864 yield 'b', fn
869 yield 'b', fn
865 else:
870 else:
866 self.ui.warn(_('%s: No such file in rev %s\n')
871 self.ui.warn(_('%s: No such file in rev %s\n')
867 % (self.pathto(fn), short(node)))
872 % (self.pathto(fn), short(node)))
868 else:
873 else:
869 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
874 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
870 yield src, fn
875 yield src, fn
871
876
872 def status(self, node1=None, node2=None, files=[], match=util.always,
877 def status(self, node1=None, node2=None, files=[], match=util.always,
873 list_ignored=False, list_clean=False):
878 list_ignored=False, list_clean=False):
874 """return status of files between two nodes or node and working directory
879 """return status of files between two nodes or node and working directory
875
880
876 If node1 is None, use the first dirstate parent instead.
881 If node1 is None, use the first dirstate parent instead.
877 If node2 is None, compare node1 with working directory.
882 If node2 is None, compare node1 with working directory.
878 """
883 """
879
884
880 def fcmp(fn, getnode):
885 def fcmp(fn, getnode):
881 t1 = self.wread(fn)
886 t1 = self.wread(fn)
882 return self.file(fn).cmp(getnode(fn), t1)
887 return self.file(fn).cmp(getnode(fn), t1)
883
888
884 def mfmatches(node):
889 def mfmatches(node):
885 change = self.changelog.read(node)
890 change = self.changelog.read(node)
886 mf = self.manifest.read(change[0]).copy()
891 mf = self.manifest.read(change[0]).copy()
887 for fn in mf.keys():
892 for fn in mf.keys():
888 if not match(fn):
893 if not match(fn):
889 del mf[fn]
894 del mf[fn]
890 return mf
895 return mf
891
896
892 modified, added, removed, deleted, unknown = [], [], [], [], []
897 modified, added, removed, deleted, unknown = [], [], [], [], []
893 ignored, clean = [], []
898 ignored, clean = [], []
894
899
895 compareworking = False
900 compareworking = False
896 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
901 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
897 compareworking = True
902 compareworking = True
898
903
899 if not compareworking:
904 if not compareworking:
900 # read the manifest from node1 before the manifest from node2,
905 # read the manifest from node1 before the manifest from node2,
901 # so that we'll hit the manifest cache if we're going through
906 # so that we'll hit the manifest cache if we're going through
902 # all the revisions in parent->child order.
907 # all the revisions in parent->child order.
903 mf1 = mfmatches(node1)
908 mf1 = mfmatches(node1)
904
909
905 # are we comparing the working directory?
910 # are we comparing the working directory?
906 if not node2:
911 if not node2:
907 (lookup, modified, added, removed, deleted, unknown,
912 (lookup, modified, added, removed, deleted, unknown,
908 ignored, clean) = self.dirstate.status(files, match,
913 ignored, clean) = self.dirstate.status(files, match,
909 list_ignored, list_clean)
914 list_ignored, list_clean)
910
915
911 # are we comparing working dir against its parent?
916 # are we comparing working dir against its parent?
912 if compareworking:
917 if compareworking:
913 if lookup:
918 if lookup:
914 fixup = []
919 fixup = []
915 # do a full compare of any files that might have changed
920 # do a full compare of any files that might have changed
916 ctx = self.changectx()
921 ctx = self.changectx()
917 for f in lookup:
922 for f in lookup:
918 if f not in ctx or ctx[f].cmp(self.wread(f)):
923 if f not in ctx or ctx[f].cmp(self.wread(f)):
919 modified.append(f)
924 modified.append(f)
920 else:
925 else:
921 fixup.append(f)
926 fixup.append(f)
922 if list_clean:
927 if list_clean:
923 clean.append(f)
928 clean.append(f)
924
929
925 # update dirstate for files that are actually clean
930 # update dirstate for files that are actually clean
926 if fixup:
931 if fixup:
927 wlock = None
932 wlock = None
928 try:
933 try:
929 try:
934 try:
930 wlock = self.wlock(False)
935 wlock = self.wlock(False)
931 except lock.LockException:
936 except lock.LockException:
932 pass
937 pass
933 if wlock:
938 if wlock:
934 for f in fixup:
939 for f in fixup:
935 self.dirstate.normal(f)
940 self.dirstate.normal(f)
936 finally:
941 finally:
937 del wlock
942 del wlock
938 else:
943 else:
939 # we are comparing working dir against non-parent
944 # we are comparing working dir against non-parent
940 # generate a pseudo-manifest for the working dir
945 # generate a pseudo-manifest for the working dir
941 # XXX: create it in dirstate.py ?
946 # XXX: create it in dirstate.py ?
942 mf2 = mfmatches(self.dirstate.parents()[0])
947 mf2 = mfmatches(self.dirstate.parents()[0])
943 is_exec = util.execfunc(self.root, mf2.execf)
948 is_exec = util.execfunc(self.root, mf2.execf)
944 is_link = util.linkfunc(self.root, mf2.linkf)
949 is_link = util.linkfunc(self.root, mf2.linkf)
945 for f in lookup + modified + added:
950 for f in lookup + modified + added:
946 mf2[f] = ""
951 mf2[f] = ""
947 mf2.set(f, is_exec(f), is_link(f))
952 mf2.set(f, is_exec(f), is_link(f))
948 for f in removed:
953 for f in removed:
949 if f in mf2:
954 if f in mf2:
950 del mf2[f]
955 del mf2[f]
951
956
952 else:
957 else:
953 # we are comparing two revisions
958 # we are comparing two revisions
954 mf2 = mfmatches(node2)
959 mf2 = mfmatches(node2)
955
960
956 if not compareworking:
961 if not compareworking:
957 # flush lists from dirstate before comparing manifests
962 # flush lists from dirstate before comparing manifests
958 modified, added, clean = [], [], []
963 modified, added, clean = [], [], []
959
964
960 # make sure to sort the files so we talk to the disk in a
965 # make sure to sort the files so we talk to the disk in a
961 # reasonable order
966 # reasonable order
962 mf2keys = mf2.keys()
967 mf2keys = mf2.keys()
963 mf2keys.sort()
968 mf2keys.sort()
964 getnode = lambda fn: mf1.get(fn, nullid)
969 getnode = lambda fn: mf1.get(fn, nullid)
965 for fn in mf2keys:
970 for fn in mf2keys:
966 if mf1.has_key(fn):
971 if mf1.has_key(fn):
967 if (mf1.flags(fn) != mf2.flags(fn) or
972 if (mf1.flags(fn) != mf2.flags(fn) or
968 (mf1[fn] != mf2[fn] and
973 (mf1[fn] != mf2[fn] and
969 (mf2[fn] != "" or fcmp(fn, getnode)))):
974 (mf2[fn] != "" or fcmp(fn, getnode)))):
970 modified.append(fn)
975 modified.append(fn)
971 elif list_clean:
976 elif list_clean:
972 clean.append(fn)
977 clean.append(fn)
973 del mf1[fn]
978 del mf1[fn]
974 else:
979 else:
975 added.append(fn)
980 added.append(fn)
976
981
977 removed = mf1.keys()
982 removed = mf1.keys()
978
983
979 # sort and return results:
984 # sort and return results:
980 for l in modified, added, removed, deleted, unknown, ignored, clean:
985 for l in modified, added, removed, deleted, unknown, ignored, clean:
981 l.sort()
986 l.sort()
982 return (modified, added, removed, deleted, unknown, ignored, clean)
987 return (modified, added, removed, deleted, unknown, ignored, clean)
983
988
984 def add(self, list):
989 def add(self, list):
985 wlock = self.wlock()
990 wlock = self.wlock()
986 try:
991 try:
987 for f in list:
992 for f in list:
988 p = self.wjoin(f)
993 p = self.wjoin(f)
989 try:
994 try:
990 st = os.lstat(p)
995 st = os.lstat(p)
991 except:
996 except:
992 self.ui.warn(_("%s does not exist!\n") % f)
997 self.ui.warn(_("%s does not exist!\n") % f)
993 continue
998 continue
994 if st.st_size > 10000000:
999 if st.st_size > 10000000:
995 self.ui.warn(_("%s: files over 10MB may cause memory and"
1000 self.ui.warn(_("%s: files over 10MB may cause memory and"
996 " performance problems\n"
1001 " performance problems\n"
997 "(use 'hg revert %s' to unadd the file)\n")
1002 "(use 'hg revert %s' to unadd the file)\n")
998 % (f, f))
1003 % (f, f))
999 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1004 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1000 self.ui.warn(_("%s not added: only files and symlinks "
1005 self.ui.warn(_("%s not added: only files and symlinks "
1001 "supported currently\n") % f)
1006 "supported currently\n") % f)
1002 elif self.dirstate[f] in 'amn':
1007 elif self.dirstate[f] in 'amn':
1003 self.ui.warn(_("%s already tracked!\n") % f)
1008 self.ui.warn(_("%s already tracked!\n") % f)
1004 elif self.dirstate[f] == 'r':
1009 elif self.dirstate[f] == 'r':
1005 self.dirstate.normallookup(f)
1010 self.dirstate.normallookup(f)
1006 else:
1011 else:
1007 self.dirstate.add(f)
1012 self.dirstate.add(f)
1008 finally:
1013 finally:
1009 del wlock
1014 del wlock
1010
1015
1011 def forget(self, list):
1016 def forget(self, list):
1012 wlock = self.wlock()
1017 wlock = self.wlock()
1013 try:
1018 try:
1014 for f in list:
1019 for f in list:
1015 if self.dirstate[f] != 'a':
1020 if self.dirstate[f] != 'a':
1016 self.ui.warn(_("%s not added!\n") % f)
1021 self.ui.warn(_("%s not added!\n") % f)
1017 else:
1022 else:
1018 self.dirstate.forget(f)
1023 self.dirstate.forget(f)
1019 finally:
1024 finally:
1020 del wlock
1025 del wlock
1021
1026
1022 def remove(self, list, unlink=False):
1027 def remove(self, list, unlink=False):
1023 wlock = None
1028 wlock = None
1024 try:
1029 try:
1025 if unlink:
1030 if unlink:
1026 for f in list:
1031 for f in list:
1027 try:
1032 try:
1028 util.unlink(self.wjoin(f))
1033 util.unlink(self.wjoin(f))
1029 except OSError, inst:
1034 except OSError, inst:
1030 if inst.errno != errno.ENOENT:
1035 if inst.errno != errno.ENOENT:
1031 raise
1036 raise
1032 wlock = self.wlock()
1037 wlock = self.wlock()
1033 for f in list:
1038 for f in list:
1034 if unlink and os.path.exists(self.wjoin(f)):
1039 if unlink and os.path.exists(self.wjoin(f)):
1035 self.ui.warn(_("%s still exists!\n") % f)
1040 self.ui.warn(_("%s still exists!\n") % f)
1036 elif self.dirstate[f] == 'a':
1041 elif self.dirstate[f] == 'a':
1037 self.dirstate.forget(f)
1042 self.dirstate.forget(f)
1038 elif f not in self.dirstate:
1043 elif f not in self.dirstate:
1039 self.ui.warn(_("%s not tracked!\n") % f)
1044 self.ui.warn(_("%s not tracked!\n") % f)
1040 else:
1045 else:
1041 self.dirstate.remove(f)
1046 self.dirstate.remove(f)
1042 finally:
1047 finally:
1043 del wlock
1048 del wlock
1044
1049
1045 def undelete(self, list):
1050 def undelete(self, list):
1046 wlock = None
1051 wlock = None
1047 try:
1052 try:
1048 manifests = [self.manifest.read(self.changelog.read(p)[0])
1053 manifests = [self.manifest.read(self.changelog.read(p)[0])
1049 for p in self.dirstate.parents() if p != nullid]
1054 for p in self.dirstate.parents() if p != nullid]
1050 wlock = self.wlock()
1055 wlock = self.wlock()
1051 for f in list:
1056 for f in list:
1052 if self.dirstate[f] != 'r':
1057 if self.dirstate[f] != 'r':
1053 self.ui.warn("%s not removed!\n" % f)
1058 self.ui.warn("%s not removed!\n" % f)
1054 else:
1059 else:
1055 m = f in manifests[0] and manifests[0] or manifests[1]
1060 m = f in manifests[0] and manifests[0] or manifests[1]
1056 t = self.file(f).read(m[f])
1061 t = self.file(f).read(m[f])
1057 self.wwrite(f, t, m.flags(f))
1062 self.wwrite(f, t, m.flags(f))
1058 self.dirstate.normal(f)
1063 self.dirstate.normal(f)
1059 finally:
1064 finally:
1060 del wlock
1065 del wlock
1061
1066
1062 def copy(self, source, dest):
1067 def copy(self, source, dest):
1063 wlock = None
1068 wlock = None
1064 try:
1069 try:
1065 p = self.wjoin(dest)
1070 p = self.wjoin(dest)
1066 if not (os.path.exists(p) or os.path.islink(p)):
1071 if not (os.path.exists(p) or os.path.islink(p)):
1067 self.ui.warn(_("%s does not exist!\n") % dest)
1072 self.ui.warn(_("%s does not exist!\n") % dest)
1068 elif not (os.path.isfile(p) or os.path.islink(p)):
1073 elif not (os.path.isfile(p) or os.path.islink(p)):
1069 self.ui.warn(_("copy failed: %s is not a file or a "
1074 self.ui.warn(_("copy failed: %s is not a file or a "
1070 "symbolic link\n") % dest)
1075 "symbolic link\n") % dest)
1071 else:
1076 else:
1072 wlock = self.wlock()
1077 wlock = self.wlock()
1073 if dest not in self.dirstate:
1078 if dest not in self.dirstate:
1074 self.dirstate.add(dest)
1079 self.dirstate.add(dest)
1075 self.dirstate.copy(source, dest)
1080 self.dirstate.copy(source, dest)
1076 finally:
1081 finally:
1077 del wlock
1082 del wlock
1078
1083
1079 def heads(self, start=None):
1084 def heads(self, start=None):
1080 heads = self.changelog.heads(start)
1085 heads = self.changelog.heads(start)
1081 # sort the output in rev descending order
1086 # sort the output in rev descending order
1082 heads = [(-self.changelog.rev(h), h) for h in heads]
1087 heads = [(-self.changelog.rev(h), h) for h in heads]
1083 heads.sort()
1088 heads.sort()
1084 return [n for (r, n) in heads]
1089 return [n for (r, n) in heads]
1085
1090
1086 def branchheads(self, branch, start=None):
1091 def branchheads(self, branch, start=None):
1087 branches = self.branchtags()
1092 branches = self.branchtags()
1088 if branch not in branches:
1093 if branch not in branches:
1089 return []
1094 return []
1090 # The basic algorithm is this:
1095 # The basic algorithm is this:
1091 #
1096 #
1092 # Start from the branch tip since there are no later revisions that can
1097 # Start from the branch tip since there are no later revisions that can
1093 # possibly be in this branch, and the tip is a guaranteed head.
1098 # possibly be in this branch, and the tip is a guaranteed head.
1094 #
1099 #
1095 # Remember the tip's parents as the first ancestors, since these by
1100 # Remember the tip's parents as the first ancestors, since these by
1096 # definition are not heads.
1101 # definition are not heads.
1097 #
1102 #
1098 # Step backwards from the brach tip through all the revisions. We are
1103 # Step backwards from the brach tip through all the revisions. We are
1099 # guaranteed by the rules of Mercurial that we will now be visiting the
1104 # guaranteed by the rules of Mercurial that we will now be visiting the
1100 # nodes in reverse topological order (children before parents).
1105 # nodes in reverse topological order (children before parents).
1101 #
1106 #
1102 # If a revision is one of the ancestors of a head then we can toss it
1107 # If a revision is one of the ancestors of a head then we can toss it
1103 # out of the ancestors set (we've already found it and won't be
1108 # out of the ancestors set (we've already found it and won't be
1104 # visiting it again) and put its parents in the ancestors set.
1109 # visiting it again) and put its parents in the ancestors set.
1105 #
1110 #
1106 # Otherwise, if a revision is in the branch it's another head, since it
1111 # Otherwise, if a revision is in the branch it's another head, since it
1107 # wasn't in the ancestor list of an existing head. So add it to the
1112 # wasn't in the ancestor list of an existing head. So add it to the
1108 # head list, and add its parents to the ancestor list.
1113 # head list, and add its parents to the ancestor list.
1109 #
1114 #
1110 # If it is not in the branch ignore it.
1115 # If it is not in the branch ignore it.
1111 #
1116 #
1112 # Once we have a list of heads, use nodesbetween to filter out all the
1117 # Once we have a list of heads, use nodesbetween to filter out all the
1113 # heads that cannot be reached from startrev. There may be a more
1118 # heads that cannot be reached from startrev. There may be a more
1114 # efficient way to do this as part of the previous algorithm.
1119 # efficient way to do this as part of the previous algorithm.
1115
1120
1116 set = util.set
1121 set = util.set
1117 heads = [self.changelog.rev(branches[branch])]
1122 heads = [self.changelog.rev(branches[branch])]
1118 # Don't care if ancestors contains nullrev or not.
1123 # Don't care if ancestors contains nullrev or not.
1119 ancestors = set(self.changelog.parentrevs(heads[0]))
1124 ancestors = set(self.changelog.parentrevs(heads[0]))
1120 for rev in xrange(heads[0] - 1, nullrev, -1):
1125 for rev in xrange(heads[0] - 1, nullrev, -1):
1121 if rev in ancestors:
1126 if rev in ancestors:
1122 ancestors.update(self.changelog.parentrevs(rev))
1127 ancestors.update(self.changelog.parentrevs(rev))
1123 ancestors.remove(rev)
1128 ancestors.remove(rev)
1124 elif self.changectx(rev).branch() == branch:
1129 elif self.changectx(rev).branch() == branch:
1125 heads.append(rev)
1130 heads.append(rev)
1126 ancestors.update(self.changelog.parentrevs(rev))
1131 ancestors.update(self.changelog.parentrevs(rev))
1127 heads = [self.changelog.node(rev) for rev in heads]
1132 heads = [self.changelog.node(rev) for rev in heads]
1128 if start is not None:
1133 if start is not None:
1129 heads = self.changelog.nodesbetween([start], heads)[2]
1134 heads = self.changelog.nodesbetween([start], heads)[2]
1130 return heads
1135 return heads
1131
1136
1132 def branches(self, nodes):
1137 def branches(self, nodes):
1133 if not nodes:
1138 if not nodes:
1134 nodes = [self.changelog.tip()]
1139 nodes = [self.changelog.tip()]
1135 b = []
1140 b = []
1136 for n in nodes:
1141 for n in nodes:
1137 t = n
1142 t = n
1138 while 1:
1143 while 1:
1139 p = self.changelog.parents(n)
1144 p = self.changelog.parents(n)
1140 if p[1] != nullid or p[0] == nullid:
1145 if p[1] != nullid or p[0] == nullid:
1141 b.append((t, n, p[0], p[1]))
1146 b.append((t, n, p[0], p[1]))
1142 break
1147 break
1143 n = p[0]
1148 n = p[0]
1144 return b
1149 return b
1145
1150
1146 def between(self, pairs):
1151 def between(self, pairs):
1147 r = []
1152 r = []
1148
1153
1149 for top, bottom in pairs:
1154 for top, bottom in pairs:
1150 n, l, i = top, [], 0
1155 n, l, i = top, [], 0
1151 f = 1
1156 f = 1
1152
1157
1153 while n != bottom:
1158 while n != bottom:
1154 p = self.changelog.parents(n)[0]
1159 p = self.changelog.parents(n)[0]
1155 if i == f:
1160 if i == f:
1156 l.append(n)
1161 l.append(n)
1157 f = f * 2
1162 f = f * 2
1158 n = p
1163 n = p
1159 i += 1
1164 i += 1
1160
1165
1161 r.append(l)
1166 r.append(l)
1162
1167
1163 return r
1168 return r
1164
1169
1165 def findincoming(self, remote, base=None, heads=None, force=False):
1170 def findincoming(self, remote, base=None, heads=None, force=False):
1166 """Return list of roots of the subsets of missing nodes from remote
1171 """Return list of roots of the subsets of missing nodes from remote
1167
1172
1168 If base dict is specified, assume that these nodes and their parents
1173 If base dict is specified, assume that these nodes and their parents
1169 exist on the remote side and that no child of a node of base exists
1174 exist on the remote side and that no child of a node of base exists
1170 in both remote and self.
1175 in both remote and self.
1171 Furthermore base will be updated to include the nodes that exists
1176 Furthermore base will be updated to include the nodes that exists
1172 in self and remote but no children exists in self and remote.
1177 in self and remote but no children exists in self and remote.
1173 If a list of heads is specified, return only nodes which are heads
1178 If a list of heads is specified, return only nodes which are heads
1174 or ancestors of these heads.
1179 or ancestors of these heads.
1175
1180
1176 All the ancestors of base are in self and in remote.
1181 All the ancestors of base are in self and in remote.
1177 All the descendants of the list returned are missing in self.
1182 All the descendants of the list returned are missing in self.
1178 (and so we know that the rest of the nodes are missing in remote, see
1183 (and so we know that the rest of the nodes are missing in remote, see
1179 outgoing)
1184 outgoing)
1180 """
1185 """
1181 m = self.changelog.nodemap
1186 m = self.changelog.nodemap
1182 search = []
1187 search = []
1183 fetch = {}
1188 fetch = {}
1184 seen = {}
1189 seen = {}
1185 seenbranch = {}
1190 seenbranch = {}
1186 if base == None:
1191 if base == None:
1187 base = {}
1192 base = {}
1188
1193
1189 if not heads:
1194 if not heads:
1190 heads = remote.heads()
1195 heads = remote.heads()
1191
1196
1192 if self.changelog.tip() == nullid:
1197 if self.changelog.tip() == nullid:
1193 base[nullid] = 1
1198 base[nullid] = 1
1194 if heads != [nullid]:
1199 if heads != [nullid]:
1195 return [nullid]
1200 return [nullid]
1196 return []
1201 return []
1197
1202
1198 # assume we're closer to the tip than the root
1203 # assume we're closer to the tip than the root
1199 # and start by examining the heads
1204 # and start by examining the heads
1200 self.ui.status(_("searching for changes\n"))
1205 self.ui.status(_("searching for changes\n"))
1201
1206
1202 unknown = []
1207 unknown = []
1203 for h in heads:
1208 for h in heads:
1204 if h not in m:
1209 if h not in m:
1205 unknown.append(h)
1210 unknown.append(h)
1206 else:
1211 else:
1207 base[h] = 1
1212 base[h] = 1
1208
1213
1209 if not unknown:
1214 if not unknown:
1210 return []
1215 return []
1211
1216
1212 req = dict.fromkeys(unknown)
1217 req = dict.fromkeys(unknown)
1213 reqcnt = 0
1218 reqcnt = 0
1214
1219
1215 # search through remote branches
1220 # search through remote branches
1216 # a 'branch' here is a linear segment of history, with four parts:
1221 # a 'branch' here is a linear segment of history, with four parts:
1217 # head, root, first parent, second parent
1222 # head, root, first parent, second parent
1218 # (a branch always has two parents (or none) by definition)
1223 # (a branch always has two parents (or none) by definition)
1219 unknown = remote.branches(unknown)
1224 unknown = remote.branches(unknown)
1220 while unknown:
1225 while unknown:
1221 r = []
1226 r = []
1222 while unknown:
1227 while unknown:
1223 n = unknown.pop(0)
1228 n = unknown.pop(0)
1224 if n[0] in seen:
1229 if n[0] in seen:
1225 continue
1230 continue
1226
1231
1227 self.ui.debug(_("examining %s:%s\n")
1232 self.ui.debug(_("examining %s:%s\n")
1228 % (short(n[0]), short(n[1])))
1233 % (short(n[0]), short(n[1])))
1229 if n[0] == nullid: # found the end of the branch
1234 if n[0] == nullid: # found the end of the branch
1230 pass
1235 pass
1231 elif n in seenbranch:
1236 elif n in seenbranch:
1232 self.ui.debug(_("branch already found\n"))
1237 self.ui.debug(_("branch already found\n"))
1233 continue
1238 continue
1234 elif n[1] and n[1] in m: # do we know the base?
1239 elif n[1] and n[1] in m: # do we know the base?
1235 self.ui.debug(_("found incomplete branch %s:%s\n")
1240 self.ui.debug(_("found incomplete branch %s:%s\n")
1236 % (short(n[0]), short(n[1])))
1241 % (short(n[0]), short(n[1])))
1237 search.append(n) # schedule branch range for scanning
1242 search.append(n) # schedule branch range for scanning
1238 seenbranch[n] = 1
1243 seenbranch[n] = 1
1239 else:
1244 else:
1240 if n[1] not in seen and n[1] not in fetch:
1245 if n[1] not in seen and n[1] not in fetch:
1241 if n[2] in m and n[3] in m:
1246 if n[2] in m and n[3] in m:
1242 self.ui.debug(_("found new changeset %s\n") %
1247 self.ui.debug(_("found new changeset %s\n") %
1243 short(n[1]))
1248 short(n[1]))
1244 fetch[n[1]] = 1 # earliest unknown
1249 fetch[n[1]] = 1 # earliest unknown
1245 for p in n[2:4]:
1250 for p in n[2:4]:
1246 if p in m:
1251 if p in m:
1247 base[p] = 1 # latest known
1252 base[p] = 1 # latest known
1248
1253
1249 for p in n[2:4]:
1254 for p in n[2:4]:
1250 if p not in req and p not in m:
1255 if p not in req and p not in m:
1251 r.append(p)
1256 r.append(p)
1252 req[p] = 1
1257 req[p] = 1
1253 seen[n[0]] = 1
1258 seen[n[0]] = 1
1254
1259
1255 if r:
1260 if r:
1256 reqcnt += 1
1261 reqcnt += 1
1257 self.ui.debug(_("request %d: %s\n") %
1262 self.ui.debug(_("request %d: %s\n") %
1258 (reqcnt, " ".join(map(short, r))))
1263 (reqcnt, " ".join(map(short, r))))
1259 for p in xrange(0, len(r), 10):
1264 for p in xrange(0, len(r), 10):
1260 for b in remote.branches(r[p:p+10]):
1265 for b in remote.branches(r[p:p+10]):
1261 self.ui.debug(_("received %s:%s\n") %
1266 self.ui.debug(_("received %s:%s\n") %
1262 (short(b[0]), short(b[1])))
1267 (short(b[0]), short(b[1])))
1263 unknown.append(b)
1268 unknown.append(b)
1264
1269
1265 # do binary search on the branches we found
1270 # do binary search on the branches we found
1266 while search:
1271 while search:
1267 n = search.pop(0)
1272 n = search.pop(0)
1268 reqcnt += 1
1273 reqcnt += 1
1269 l = remote.between([(n[0], n[1])])[0]
1274 l = remote.between([(n[0], n[1])])[0]
1270 l.append(n[1])
1275 l.append(n[1])
1271 p = n[0]
1276 p = n[0]
1272 f = 1
1277 f = 1
1273 for i in l:
1278 for i in l:
1274 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1279 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1275 if i in m:
1280 if i in m:
1276 if f <= 2:
1281 if f <= 2:
1277 self.ui.debug(_("found new branch changeset %s\n") %
1282 self.ui.debug(_("found new branch changeset %s\n") %
1278 short(p))
1283 short(p))
1279 fetch[p] = 1
1284 fetch[p] = 1
1280 base[i] = 1
1285 base[i] = 1
1281 else:
1286 else:
1282 self.ui.debug(_("narrowed branch search to %s:%s\n")
1287 self.ui.debug(_("narrowed branch search to %s:%s\n")
1283 % (short(p), short(i)))
1288 % (short(p), short(i)))
1284 search.append((p, i))
1289 search.append((p, i))
1285 break
1290 break
1286 p, f = i, f * 2
1291 p, f = i, f * 2
1287
1292
1288 # sanity check our fetch list
1293 # sanity check our fetch list
1289 for f in fetch.keys():
1294 for f in fetch.keys():
1290 if f in m:
1295 if f in m:
1291 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1296 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1292
1297
1293 if base.keys() == [nullid]:
1298 if base.keys() == [nullid]:
1294 if force:
1299 if force:
1295 self.ui.warn(_("warning: repository is unrelated\n"))
1300 self.ui.warn(_("warning: repository is unrelated\n"))
1296 else:
1301 else:
1297 raise util.Abort(_("repository is unrelated"))
1302 raise util.Abort(_("repository is unrelated"))
1298
1303
1299 self.ui.debug(_("found new changesets starting at ") +
1304 self.ui.debug(_("found new changesets starting at ") +
1300 " ".join([short(f) for f in fetch]) + "\n")
1305 " ".join([short(f) for f in fetch]) + "\n")
1301
1306
1302 self.ui.debug(_("%d total queries\n") % reqcnt)
1307 self.ui.debug(_("%d total queries\n") % reqcnt)
1303
1308
1304 return fetch.keys()
1309 return fetch.keys()
1305
1310
1306 def findoutgoing(self, remote, base=None, heads=None, force=False):
1311 def findoutgoing(self, remote, base=None, heads=None, force=False):
1307 """Return list of nodes that are roots of subsets not in remote
1312 """Return list of nodes that are roots of subsets not in remote
1308
1313
1309 If base dict is specified, assume that these nodes and their parents
1314 If base dict is specified, assume that these nodes and their parents
1310 exist on the remote side.
1315 exist on the remote side.
1311 If a list of heads is specified, return only nodes which are heads
1316 If a list of heads is specified, return only nodes which are heads
1312 or ancestors of these heads, and return a second element which
1317 or ancestors of these heads, and return a second element which
1313 contains all remote heads which get new children.
1318 contains all remote heads which get new children.
1314 """
1319 """
1315 if base == None:
1320 if base == None:
1316 base = {}
1321 base = {}
1317 self.findincoming(remote, base, heads, force=force)
1322 self.findincoming(remote, base, heads, force=force)
1318
1323
1319 self.ui.debug(_("common changesets up to ")
1324 self.ui.debug(_("common changesets up to ")
1320 + " ".join(map(short, base.keys())) + "\n")
1325 + " ".join(map(short, base.keys())) + "\n")
1321
1326
1322 remain = dict.fromkeys(self.changelog.nodemap)
1327 remain = dict.fromkeys(self.changelog.nodemap)
1323
1328
1324 # prune everything remote has from the tree
1329 # prune everything remote has from the tree
1325 del remain[nullid]
1330 del remain[nullid]
1326 remove = base.keys()
1331 remove = base.keys()
1327 while remove:
1332 while remove:
1328 n = remove.pop(0)
1333 n = remove.pop(0)
1329 if n in remain:
1334 if n in remain:
1330 del remain[n]
1335 del remain[n]
1331 for p in self.changelog.parents(n):
1336 for p in self.changelog.parents(n):
1332 remove.append(p)
1337 remove.append(p)
1333
1338
1334 # find every node whose parents have been pruned
1339 # find every node whose parents have been pruned
1335 subset = []
1340 subset = []
1336 # find every remote head that will get new children
1341 # find every remote head that will get new children
1337 updated_heads = {}
1342 updated_heads = {}
1338 for n in remain:
1343 for n in remain:
1339 p1, p2 = self.changelog.parents(n)
1344 p1, p2 = self.changelog.parents(n)
1340 if p1 not in remain and p2 not in remain:
1345 if p1 not in remain and p2 not in remain:
1341 subset.append(n)
1346 subset.append(n)
1342 if heads:
1347 if heads:
1343 if p1 in heads:
1348 if p1 in heads:
1344 updated_heads[p1] = True
1349 updated_heads[p1] = True
1345 if p2 in heads:
1350 if p2 in heads:
1346 updated_heads[p2] = True
1351 updated_heads[p2] = True
1347
1352
1348 # this is the set of all roots we have to push
1353 # this is the set of all roots we have to push
1349 if heads:
1354 if heads:
1350 return subset, updated_heads.keys()
1355 return subset, updated_heads.keys()
1351 else:
1356 else:
1352 return subset
1357 return subset
1353
1358
1354 def pull(self, remote, heads=None, force=False):
1359 def pull(self, remote, heads=None, force=False):
1355 lock = self.lock()
1360 lock = self.lock()
1356 try:
1361 try:
1357 fetch = self.findincoming(remote, heads=heads, force=force)
1362 fetch = self.findincoming(remote, heads=heads, force=force)
1358 if fetch == [nullid]:
1363 if fetch == [nullid]:
1359 self.ui.status(_("requesting all changes\n"))
1364 self.ui.status(_("requesting all changes\n"))
1360
1365
1361 if not fetch:
1366 if not fetch:
1362 self.ui.status(_("no changes found\n"))
1367 self.ui.status(_("no changes found\n"))
1363 return 0
1368 return 0
1364
1369
1365 if heads is None:
1370 if heads is None:
1366 cg = remote.changegroup(fetch, 'pull')
1371 cg = remote.changegroup(fetch, 'pull')
1367 else:
1372 else:
1368 if 'changegroupsubset' not in remote.capabilities:
1373 if 'changegroupsubset' not in remote.capabilities:
1369 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1374 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1370 cg = remote.changegroupsubset(fetch, heads, 'pull')
1375 cg = remote.changegroupsubset(fetch, heads, 'pull')
1371 return self.addchangegroup(cg, 'pull', remote.url())
1376 return self.addchangegroup(cg, 'pull', remote.url())
1372 finally:
1377 finally:
1373 del lock
1378 del lock
1374
1379
1375 def push(self, remote, force=False, revs=None):
1380 def push(self, remote, force=False, revs=None):
1376 # there are two ways to push to remote repo:
1381 # there are two ways to push to remote repo:
1377 #
1382 #
1378 # addchangegroup assumes local user can lock remote
1383 # addchangegroup assumes local user can lock remote
1379 # repo (local filesystem, old ssh servers).
1384 # repo (local filesystem, old ssh servers).
1380 #
1385 #
1381 # unbundle assumes local user cannot lock remote repo (new ssh
1386 # unbundle assumes local user cannot lock remote repo (new ssh
1382 # servers, http servers).
1387 # servers, http servers).
1383
1388
1384 if remote.capable('unbundle'):
1389 if remote.capable('unbundle'):
1385 return self.push_unbundle(remote, force, revs)
1390 return self.push_unbundle(remote, force, revs)
1386 return self.push_addchangegroup(remote, force, revs)
1391 return self.push_addchangegroup(remote, force, revs)
1387
1392
1388 def prepush(self, remote, force, revs):
1393 def prepush(self, remote, force, revs):
1389 base = {}
1394 base = {}
1390 remote_heads = remote.heads()
1395 remote_heads = remote.heads()
1391 inc = self.findincoming(remote, base, remote_heads, force=force)
1396 inc = self.findincoming(remote, base, remote_heads, force=force)
1392
1397
1393 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1398 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1394 if revs is not None:
1399 if revs is not None:
1395 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1400 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1396 else:
1401 else:
1397 bases, heads = update, self.changelog.heads()
1402 bases, heads = update, self.changelog.heads()
1398
1403
1399 if not bases:
1404 if not bases:
1400 self.ui.status(_("no changes found\n"))
1405 self.ui.status(_("no changes found\n"))
1401 return None, 1
1406 return None, 1
1402 elif not force:
1407 elif not force:
1403 # check if we're creating new remote heads
1408 # check if we're creating new remote heads
1404 # to be a remote head after push, node must be either
1409 # to be a remote head after push, node must be either
1405 # - unknown locally
1410 # - unknown locally
1406 # - a local outgoing head descended from update
1411 # - a local outgoing head descended from update
1407 # - a remote head that's known locally and not
1412 # - a remote head that's known locally and not
1408 # ancestral to an outgoing head
1413 # ancestral to an outgoing head
1409
1414
1410 warn = 0
1415 warn = 0
1411
1416
1412 if remote_heads == [nullid]:
1417 if remote_heads == [nullid]:
1413 warn = 0
1418 warn = 0
1414 elif not revs and len(heads) > len(remote_heads):
1419 elif not revs and len(heads) > len(remote_heads):
1415 warn = 1
1420 warn = 1
1416 else:
1421 else:
1417 newheads = list(heads)
1422 newheads = list(heads)
1418 for r in remote_heads:
1423 for r in remote_heads:
1419 if r in self.changelog.nodemap:
1424 if r in self.changelog.nodemap:
1420 desc = self.changelog.heads(r, heads)
1425 desc = self.changelog.heads(r, heads)
1421 l = [h for h in heads if h in desc]
1426 l = [h for h in heads if h in desc]
1422 if not l:
1427 if not l:
1423 newheads.append(r)
1428 newheads.append(r)
1424 else:
1429 else:
1425 newheads.append(r)
1430 newheads.append(r)
1426 if len(newheads) > len(remote_heads):
1431 if len(newheads) > len(remote_heads):
1427 warn = 1
1432 warn = 1
1428
1433
1429 if warn:
1434 if warn:
1430 self.ui.warn(_("abort: push creates new remote branches!\n"))
1435 self.ui.warn(_("abort: push creates new remote branches!\n"))
1431 self.ui.status(_("(did you forget to merge?"
1436 self.ui.status(_("(did you forget to merge?"
1432 " use push -f to force)\n"))
1437 " use push -f to force)\n"))
1433 return None, 1
1438 return None, 1
1434 elif inc:
1439 elif inc:
1435 self.ui.warn(_("note: unsynced remote changes!\n"))
1440 self.ui.warn(_("note: unsynced remote changes!\n"))
1436
1441
1437
1442
1438 if revs is None:
1443 if revs is None:
1439 cg = self.changegroup(update, 'push')
1444 cg = self.changegroup(update, 'push')
1440 else:
1445 else:
1441 cg = self.changegroupsubset(update, revs, 'push')
1446 cg = self.changegroupsubset(update, revs, 'push')
1442 return cg, remote_heads
1447 return cg, remote_heads
1443
1448
1444 def push_addchangegroup(self, remote, force, revs):
1449 def push_addchangegroup(self, remote, force, revs):
1445 lock = remote.lock()
1450 lock = remote.lock()
1446 try:
1451 try:
1447 ret = self.prepush(remote, force, revs)
1452 ret = self.prepush(remote, force, revs)
1448 if ret[0] is not None:
1453 if ret[0] is not None:
1449 cg, remote_heads = ret
1454 cg, remote_heads = ret
1450 return remote.addchangegroup(cg, 'push', self.url())
1455 return remote.addchangegroup(cg, 'push', self.url())
1451 return ret[1]
1456 return ret[1]
1452 finally:
1457 finally:
1453 del lock
1458 del lock
1454
1459
1455 def push_unbundle(self, remote, force, revs):
1460 def push_unbundle(self, remote, force, revs):
1456 # local repo finds heads on server, finds out what revs it
1461 # local repo finds heads on server, finds out what revs it
1457 # must push. once revs transferred, if server finds it has
1462 # must push. once revs transferred, if server finds it has
1458 # different heads (someone else won commit/push race), server
1463 # different heads (someone else won commit/push race), server
1459 # aborts.
1464 # aborts.
1460
1465
1461 ret = self.prepush(remote, force, revs)
1466 ret = self.prepush(remote, force, revs)
1462 if ret[0] is not None:
1467 if ret[0] is not None:
1463 cg, remote_heads = ret
1468 cg, remote_heads = ret
1464 if force: remote_heads = ['force']
1469 if force: remote_heads = ['force']
1465 return remote.unbundle(cg, remote_heads, 'push')
1470 return remote.unbundle(cg, remote_heads, 'push')
1466 return ret[1]
1471 return ret[1]
1467
1472
1468 def changegroupinfo(self, nodes):
1473 def changegroupinfo(self, nodes):
1469 self.ui.note(_("%d changesets found\n") % len(nodes))
1474 self.ui.note(_("%d changesets found\n") % len(nodes))
1470 if self.ui.debugflag:
1475 if self.ui.debugflag:
1471 self.ui.debug(_("List of changesets:\n"))
1476 self.ui.debug(_("List of changesets:\n"))
1472 for node in nodes:
1477 for node in nodes:
1473 self.ui.debug("%s\n" % hex(node))
1478 self.ui.debug("%s\n" % hex(node))
1474
1479
1475 def changegroupsubset(self, bases, heads, source):
1480 def changegroupsubset(self, bases, heads, source):
1476 """This function generates a changegroup consisting of all the nodes
1481 """This function generates a changegroup consisting of all the nodes
1477 that are descendents of any of the bases, and ancestors of any of
1482 that are descendents of any of the bases, and ancestors of any of
1478 the heads.
1483 the heads.
1479
1484
1480 It is fairly complex as determining which filenodes and which
1485 It is fairly complex as determining which filenodes and which
1481 manifest nodes need to be included for the changeset to be complete
1486 manifest nodes need to be included for the changeset to be complete
1482 is non-trivial.
1487 is non-trivial.
1483
1488
1484 Another wrinkle is doing the reverse, figuring out which changeset in
1489 Another wrinkle is doing the reverse, figuring out which changeset in
1485 the changegroup a particular filenode or manifestnode belongs to."""
1490 the changegroup a particular filenode or manifestnode belongs to."""
1486
1491
1487 self.hook('preoutgoing', throw=True, source=source)
1492 self.hook('preoutgoing', throw=True, source=source)
1488
1493
1489 # Set up some initial variables
1494 # Set up some initial variables
1490 # Make it easy to refer to self.changelog
1495 # Make it easy to refer to self.changelog
1491 cl = self.changelog
1496 cl = self.changelog
1492 # msng is short for missing - compute the list of changesets in this
1497 # msng is short for missing - compute the list of changesets in this
1493 # changegroup.
1498 # changegroup.
1494 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1499 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1495 self.changegroupinfo(msng_cl_lst)
1500 self.changegroupinfo(msng_cl_lst)
1496 # Some bases may turn out to be superfluous, and some heads may be
1501 # Some bases may turn out to be superfluous, and some heads may be
1497 # too. nodesbetween will return the minimal set of bases and heads
1502 # too. nodesbetween will return the minimal set of bases and heads
1498 # necessary to re-create the changegroup.
1503 # necessary to re-create the changegroup.
1499
1504
1500 # Known heads are the list of heads that it is assumed the recipient
1505 # Known heads are the list of heads that it is assumed the recipient
1501 # of this changegroup will know about.
1506 # of this changegroup will know about.
1502 knownheads = {}
1507 knownheads = {}
1503 # We assume that all parents of bases are known heads.
1508 # We assume that all parents of bases are known heads.
1504 for n in bases:
1509 for n in bases:
1505 for p in cl.parents(n):
1510 for p in cl.parents(n):
1506 if p != nullid:
1511 if p != nullid:
1507 knownheads[p] = 1
1512 knownheads[p] = 1
1508 knownheads = knownheads.keys()
1513 knownheads = knownheads.keys()
1509 if knownheads:
1514 if knownheads:
1510 # Now that we know what heads are known, we can compute which
1515 # Now that we know what heads are known, we can compute which
1511 # changesets are known. The recipient must know about all
1516 # changesets are known. The recipient must know about all
1512 # changesets required to reach the known heads from the null
1517 # changesets required to reach the known heads from the null
1513 # changeset.
1518 # changeset.
1514 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1519 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1515 junk = None
1520 junk = None
1516 # Transform the list into an ersatz set.
1521 # Transform the list into an ersatz set.
1517 has_cl_set = dict.fromkeys(has_cl_set)
1522 has_cl_set = dict.fromkeys(has_cl_set)
1518 else:
1523 else:
1519 # If there were no known heads, the recipient cannot be assumed to
1524 # If there were no known heads, the recipient cannot be assumed to
1520 # know about any changesets.
1525 # know about any changesets.
1521 has_cl_set = {}
1526 has_cl_set = {}
1522
1527
1523 # Make it easy to refer to self.manifest
1528 # Make it easy to refer to self.manifest
1524 mnfst = self.manifest
1529 mnfst = self.manifest
1525 # We don't know which manifests are missing yet
1530 # We don't know which manifests are missing yet
1526 msng_mnfst_set = {}
1531 msng_mnfst_set = {}
1527 # Nor do we know which filenodes are missing.
1532 # Nor do we know which filenodes are missing.
1528 msng_filenode_set = {}
1533 msng_filenode_set = {}
1529
1534
1530 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1535 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1531 junk = None
1536 junk = None
1532
1537
1533 # A changeset always belongs to itself, so the changenode lookup
1538 # A changeset always belongs to itself, so the changenode lookup
1534 # function for a changenode is identity.
1539 # function for a changenode is identity.
1535 def identity(x):
1540 def identity(x):
1536 return x
1541 return x
1537
1542
1538 # A function generating function. Sets up an environment for the
1543 # A function generating function. Sets up an environment for the
1539 # inner function.
1544 # inner function.
1540 def cmp_by_rev_func(revlog):
1545 def cmp_by_rev_func(revlog):
1541 # Compare two nodes by their revision number in the environment's
1546 # Compare two nodes by their revision number in the environment's
1542 # revision history. Since the revision number both represents the
1547 # revision history. Since the revision number both represents the
1543 # most efficient order to read the nodes in, and represents a
1548 # most efficient order to read the nodes in, and represents a
1544 # topological sorting of the nodes, this function is often useful.
1549 # topological sorting of the nodes, this function is often useful.
1545 def cmp_by_rev(a, b):
1550 def cmp_by_rev(a, b):
1546 return cmp(revlog.rev(a), revlog.rev(b))
1551 return cmp(revlog.rev(a), revlog.rev(b))
1547 return cmp_by_rev
1552 return cmp_by_rev
1548
1553
1549 # If we determine that a particular file or manifest node must be a
1554 # If we determine that a particular file or manifest node must be a
1550 # node that the recipient of the changegroup will already have, we can
1555 # node that the recipient of the changegroup will already have, we can
1551 # also assume the recipient will have all the parents. This function
1556 # also assume the recipient will have all the parents. This function
1552 # prunes them from the set of missing nodes.
1557 # prunes them from the set of missing nodes.
1553 def prune_parents(revlog, hasset, msngset):
1558 def prune_parents(revlog, hasset, msngset):
1554 haslst = hasset.keys()
1559 haslst = hasset.keys()
1555 haslst.sort(cmp_by_rev_func(revlog))
1560 haslst.sort(cmp_by_rev_func(revlog))
1556 for node in haslst:
1561 for node in haslst:
1557 parentlst = [p for p in revlog.parents(node) if p != nullid]
1562 parentlst = [p for p in revlog.parents(node) if p != nullid]
1558 while parentlst:
1563 while parentlst:
1559 n = parentlst.pop()
1564 n = parentlst.pop()
1560 if n not in hasset:
1565 if n not in hasset:
1561 hasset[n] = 1
1566 hasset[n] = 1
1562 p = [p for p in revlog.parents(n) if p != nullid]
1567 p = [p for p in revlog.parents(n) if p != nullid]
1563 parentlst.extend(p)
1568 parentlst.extend(p)
1564 for n in hasset:
1569 for n in hasset:
1565 msngset.pop(n, None)
1570 msngset.pop(n, None)
1566
1571
1567 # This is a function generating function used to set up an environment
1572 # This is a function generating function used to set up an environment
1568 # for the inner function to execute in.
1573 # for the inner function to execute in.
1569 def manifest_and_file_collector(changedfileset):
1574 def manifest_and_file_collector(changedfileset):
1570 # This is an information gathering function that gathers
1575 # This is an information gathering function that gathers
1571 # information from each changeset node that goes out as part of
1576 # information from each changeset node that goes out as part of
1572 # the changegroup. The information gathered is a list of which
1577 # the changegroup. The information gathered is a list of which
1573 # manifest nodes are potentially required (the recipient may
1578 # manifest nodes are potentially required (the recipient may
1574 # already have them) and total list of all files which were
1579 # already have them) and total list of all files which were
1575 # changed in any changeset in the changegroup.
1580 # changed in any changeset in the changegroup.
1576 #
1581 #
1577 # We also remember the first changenode we saw any manifest
1582 # We also remember the first changenode we saw any manifest
1578 # referenced by so we can later determine which changenode 'owns'
1583 # referenced by so we can later determine which changenode 'owns'
1579 # the manifest.
1584 # the manifest.
1580 def collect_manifests_and_files(clnode):
1585 def collect_manifests_and_files(clnode):
1581 c = cl.read(clnode)
1586 c = cl.read(clnode)
1582 for f in c[3]:
1587 for f in c[3]:
1583 # This is to make sure we only have one instance of each
1588 # This is to make sure we only have one instance of each
1584 # filename string for each filename.
1589 # filename string for each filename.
1585 changedfileset.setdefault(f, f)
1590 changedfileset.setdefault(f, f)
1586 msng_mnfst_set.setdefault(c[0], clnode)
1591 msng_mnfst_set.setdefault(c[0], clnode)
1587 return collect_manifests_and_files
1592 return collect_manifests_and_files
1588
1593
1589 # Figure out which manifest nodes (of the ones we think might be part
1594 # Figure out which manifest nodes (of the ones we think might be part
1590 # of the changegroup) the recipient must know about and remove them
1595 # of the changegroup) the recipient must know about and remove them
1591 # from the changegroup.
1596 # from the changegroup.
1592 def prune_manifests():
1597 def prune_manifests():
1593 has_mnfst_set = {}
1598 has_mnfst_set = {}
1594 for n in msng_mnfst_set:
1599 for n in msng_mnfst_set:
1595 # If a 'missing' manifest thinks it belongs to a changenode
1600 # If a 'missing' manifest thinks it belongs to a changenode
1596 # the recipient is assumed to have, obviously the recipient
1601 # the recipient is assumed to have, obviously the recipient
1597 # must have that manifest.
1602 # must have that manifest.
1598 linknode = cl.node(mnfst.linkrev(n))
1603 linknode = cl.node(mnfst.linkrev(n))
1599 if linknode in has_cl_set:
1604 if linknode in has_cl_set:
1600 has_mnfst_set[n] = 1
1605 has_mnfst_set[n] = 1
1601 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1606 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1602
1607
1603 # Use the information collected in collect_manifests_and_files to say
1608 # Use the information collected in collect_manifests_and_files to say
1604 # which changenode any manifestnode belongs to.
1609 # which changenode any manifestnode belongs to.
1605 def lookup_manifest_link(mnfstnode):
1610 def lookup_manifest_link(mnfstnode):
1606 return msng_mnfst_set[mnfstnode]
1611 return msng_mnfst_set[mnfstnode]
1607
1612
1608 # A function generating function that sets up the initial environment
1613 # A function generating function that sets up the initial environment
1609 # the inner function.
1614 # the inner function.
1610 def filenode_collector(changedfiles):
1615 def filenode_collector(changedfiles):
1611 next_rev = [0]
1616 next_rev = [0]
1612 # This gathers information from each manifestnode included in the
1617 # This gathers information from each manifestnode included in the
1613 # changegroup about which filenodes the manifest node references
1618 # changegroup about which filenodes the manifest node references
1614 # so we can include those in the changegroup too.
1619 # so we can include those in the changegroup too.
1615 #
1620 #
1616 # It also remembers which changenode each filenode belongs to. It
1621 # It also remembers which changenode each filenode belongs to. It
1617 # does this by assuming the a filenode belongs to the changenode
1622 # does this by assuming the a filenode belongs to the changenode
1618 # the first manifest that references it belongs to.
1623 # the first manifest that references it belongs to.
1619 def collect_msng_filenodes(mnfstnode):
1624 def collect_msng_filenodes(mnfstnode):
1620 r = mnfst.rev(mnfstnode)
1625 r = mnfst.rev(mnfstnode)
1621 if r == next_rev[0]:
1626 if r == next_rev[0]:
1622 # If the last rev we looked at was the one just previous,
1627 # If the last rev we looked at was the one just previous,
1623 # we only need to see a diff.
1628 # we only need to see a diff.
1624 deltamf = mnfst.readdelta(mnfstnode)
1629 deltamf = mnfst.readdelta(mnfstnode)
1625 # For each line in the delta
1630 # For each line in the delta
1626 for f, fnode in deltamf.items():
1631 for f, fnode in deltamf.items():
1627 f = changedfiles.get(f, None)
1632 f = changedfiles.get(f, None)
1628 # And if the file is in the list of files we care
1633 # And if the file is in the list of files we care
1629 # about.
1634 # about.
1630 if f is not None:
1635 if f is not None:
1631 # Get the changenode this manifest belongs to
1636 # Get the changenode this manifest belongs to
1632 clnode = msng_mnfst_set[mnfstnode]
1637 clnode = msng_mnfst_set[mnfstnode]
1633 # Create the set of filenodes for the file if
1638 # Create the set of filenodes for the file if
1634 # there isn't one already.
1639 # there isn't one already.
1635 ndset = msng_filenode_set.setdefault(f, {})
1640 ndset = msng_filenode_set.setdefault(f, {})
1636 # And set the filenode's changelog node to the
1641 # And set the filenode's changelog node to the
1637 # manifest's if it hasn't been set already.
1642 # manifest's if it hasn't been set already.
1638 ndset.setdefault(fnode, clnode)
1643 ndset.setdefault(fnode, clnode)
1639 else:
1644 else:
1640 # Otherwise we need a full manifest.
1645 # Otherwise we need a full manifest.
1641 m = mnfst.read(mnfstnode)
1646 m = mnfst.read(mnfstnode)
1642 # For every file in we care about.
1647 # For every file in we care about.
1643 for f in changedfiles:
1648 for f in changedfiles:
1644 fnode = m.get(f, None)
1649 fnode = m.get(f, None)
1645 # If it's in the manifest
1650 # If it's in the manifest
1646 if fnode is not None:
1651 if fnode is not None:
1647 # See comments above.
1652 # See comments above.
1648 clnode = msng_mnfst_set[mnfstnode]
1653 clnode = msng_mnfst_set[mnfstnode]
1649 ndset = msng_filenode_set.setdefault(f, {})
1654 ndset = msng_filenode_set.setdefault(f, {})
1650 ndset.setdefault(fnode, clnode)
1655 ndset.setdefault(fnode, clnode)
1651 # Remember the revision we hope to see next.
1656 # Remember the revision we hope to see next.
1652 next_rev[0] = r + 1
1657 next_rev[0] = r + 1
1653 return collect_msng_filenodes
1658 return collect_msng_filenodes
1654
1659
1655 # We have a list of filenodes we think we need for a file, lets remove
1660 # We have a list of filenodes we think we need for a file, lets remove
1656 # all those we now the recipient must have.
1661 # all those we now the recipient must have.
1657 def prune_filenodes(f, filerevlog):
1662 def prune_filenodes(f, filerevlog):
1658 msngset = msng_filenode_set[f]
1663 msngset = msng_filenode_set[f]
1659 hasset = {}
1664 hasset = {}
1660 # If a 'missing' filenode thinks it belongs to a changenode we
1665 # If a 'missing' filenode thinks it belongs to a changenode we
1661 # assume the recipient must have, then the recipient must have
1666 # assume the recipient must have, then the recipient must have
1662 # that filenode.
1667 # that filenode.
1663 for n in msngset:
1668 for n in msngset:
1664 clnode = cl.node(filerevlog.linkrev(n))
1669 clnode = cl.node(filerevlog.linkrev(n))
1665 if clnode in has_cl_set:
1670 if clnode in has_cl_set:
1666 hasset[n] = 1
1671 hasset[n] = 1
1667 prune_parents(filerevlog, hasset, msngset)
1672 prune_parents(filerevlog, hasset, msngset)
1668
1673
1669 # A function generator function that sets up the a context for the
1674 # A function generator function that sets up the a context for the
1670 # inner function.
1675 # inner function.
1671 def lookup_filenode_link_func(fname):
1676 def lookup_filenode_link_func(fname):
1672 msngset = msng_filenode_set[fname]
1677 msngset = msng_filenode_set[fname]
1673 # Lookup the changenode the filenode belongs to.
1678 # Lookup the changenode the filenode belongs to.
1674 def lookup_filenode_link(fnode):
1679 def lookup_filenode_link(fnode):
1675 return msngset[fnode]
1680 return msngset[fnode]
1676 return lookup_filenode_link
1681 return lookup_filenode_link
1677
1682
1678 # Now that we have all theses utility functions to help out and
1683 # Now that we have all theses utility functions to help out and
1679 # logically divide up the task, generate the group.
1684 # logically divide up the task, generate the group.
1680 def gengroup():
1685 def gengroup():
1681 # The set of changed files starts empty.
1686 # The set of changed files starts empty.
1682 changedfiles = {}
1687 changedfiles = {}
1683 # Create a changenode group generator that will call our functions
1688 # Create a changenode group generator that will call our functions
1684 # back to lookup the owning changenode and collect information.
1689 # back to lookup the owning changenode and collect information.
1685 group = cl.group(msng_cl_lst, identity,
1690 group = cl.group(msng_cl_lst, identity,
1686 manifest_and_file_collector(changedfiles))
1691 manifest_and_file_collector(changedfiles))
1687 for chnk in group:
1692 for chnk in group:
1688 yield chnk
1693 yield chnk
1689
1694
1690 # The list of manifests has been collected by the generator
1695 # The list of manifests has been collected by the generator
1691 # calling our functions back.
1696 # calling our functions back.
1692 prune_manifests()
1697 prune_manifests()
1693 msng_mnfst_lst = msng_mnfst_set.keys()
1698 msng_mnfst_lst = msng_mnfst_set.keys()
1694 # Sort the manifestnodes by revision number.
1699 # Sort the manifestnodes by revision number.
1695 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1700 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1696 # Create a generator for the manifestnodes that calls our lookup
1701 # Create a generator for the manifestnodes that calls our lookup
1697 # and data collection functions back.
1702 # and data collection functions back.
1698 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1703 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1699 filenode_collector(changedfiles))
1704 filenode_collector(changedfiles))
1700 for chnk in group:
1705 for chnk in group:
1701 yield chnk
1706 yield chnk
1702
1707
1703 # These are no longer needed, dereference and toss the memory for
1708 # These are no longer needed, dereference and toss the memory for
1704 # them.
1709 # them.
1705 msng_mnfst_lst = None
1710 msng_mnfst_lst = None
1706 msng_mnfst_set.clear()
1711 msng_mnfst_set.clear()
1707
1712
1708 changedfiles = changedfiles.keys()
1713 changedfiles = changedfiles.keys()
1709 changedfiles.sort()
1714 changedfiles.sort()
1710 # Go through all our files in order sorted by name.
1715 # Go through all our files in order sorted by name.
1711 for fname in changedfiles:
1716 for fname in changedfiles:
1712 filerevlog = self.file(fname)
1717 filerevlog = self.file(fname)
1713 if filerevlog.count() == 0:
1718 if filerevlog.count() == 0:
1714 raise util.abort(_("empty or missing revlog for %s") % fname)
1719 raise util.abort(_("empty or missing revlog for %s") % fname)
1715 # Toss out the filenodes that the recipient isn't really
1720 # Toss out the filenodes that the recipient isn't really
1716 # missing.
1721 # missing.
1717 if msng_filenode_set.has_key(fname):
1722 if msng_filenode_set.has_key(fname):
1718 prune_filenodes(fname, filerevlog)
1723 prune_filenodes(fname, filerevlog)
1719 msng_filenode_lst = msng_filenode_set[fname].keys()
1724 msng_filenode_lst = msng_filenode_set[fname].keys()
1720 else:
1725 else:
1721 msng_filenode_lst = []
1726 msng_filenode_lst = []
1722 # If any filenodes are left, generate the group for them,
1727 # If any filenodes are left, generate the group for them,
1723 # otherwise don't bother.
1728 # otherwise don't bother.
1724 if len(msng_filenode_lst) > 0:
1729 if len(msng_filenode_lst) > 0:
1725 yield changegroup.chunkheader(len(fname))
1730 yield changegroup.chunkheader(len(fname))
1726 yield fname
1731 yield fname
1727 # Sort the filenodes by their revision #
1732 # Sort the filenodes by their revision #
1728 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1733 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1729 # Create a group generator and only pass in a changenode
1734 # Create a group generator and only pass in a changenode
1730 # lookup function as we need to collect no information
1735 # lookup function as we need to collect no information
1731 # from filenodes.
1736 # from filenodes.
1732 group = filerevlog.group(msng_filenode_lst,
1737 group = filerevlog.group(msng_filenode_lst,
1733 lookup_filenode_link_func(fname))
1738 lookup_filenode_link_func(fname))
1734 for chnk in group:
1739 for chnk in group:
1735 yield chnk
1740 yield chnk
1736 if msng_filenode_set.has_key(fname):
1741 if msng_filenode_set.has_key(fname):
1737 # Don't need this anymore, toss it to free memory.
1742 # Don't need this anymore, toss it to free memory.
1738 del msng_filenode_set[fname]
1743 del msng_filenode_set[fname]
1739 # Signal that no more groups are left.
1744 # Signal that no more groups are left.
1740 yield changegroup.closechunk()
1745 yield changegroup.closechunk()
1741
1746
1742 if msng_cl_lst:
1747 if msng_cl_lst:
1743 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1748 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1744
1749
1745 return util.chunkbuffer(gengroup())
1750 return util.chunkbuffer(gengroup())
1746
1751
1747 def changegroup(self, basenodes, source):
1752 def changegroup(self, basenodes, source):
1748 """Generate a changegroup of all nodes that we have that a recipient
1753 """Generate a changegroup of all nodes that we have that a recipient
1749 doesn't.
1754 doesn't.
1750
1755
1751 This is much easier than the previous function as we can assume that
1756 This is much easier than the previous function as we can assume that
1752 the recipient has any changenode we aren't sending them."""
1757 the recipient has any changenode we aren't sending them."""
1753
1758
1754 self.hook('preoutgoing', throw=True, source=source)
1759 self.hook('preoutgoing', throw=True, source=source)
1755
1760
1756 cl = self.changelog
1761 cl = self.changelog
1757 nodes = cl.nodesbetween(basenodes, None)[0]
1762 nodes = cl.nodesbetween(basenodes, None)[0]
1758 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1763 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1759 self.changegroupinfo(nodes)
1764 self.changegroupinfo(nodes)
1760
1765
1761 def identity(x):
1766 def identity(x):
1762 return x
1767 return x
1763
1768
1764 def gennodelst(revlog):
1769 def gennodelst(revlog):
1765 for r in xrange(0, revlog.count()):
1770 for r in xrange(0, revlog.count()):
1766 n = revlog.node(r)
1771 n = revlog.node(r)
1767 if revlog.linkrev(n) in revset:
1772 if revlog.linkrev(n) in revset:
1768 yield n
1773 yield n
1769
1774
1770 def changed_file_collector(changedfileset):
1775 def changed_file_collector(changedfileset):
1771 def collect_changed_files(clnode):
1776 def collect_changed_files(clnode):
1772 c = cl.read(clnode)
1777 c = cl.read(clnode)
1773 for fname in c[3]:
1778 for fname in c[3]:
1774 changedfileset[fname] = 1
1779 changedfileset[fname] = 1
1775 return collect_changed_files
1780 return collect_changed_files
1776
1781
1777 def lookuprevlink_func(revlog):
1782 def lookuprevlink_func(revlog):
1778 def lookuprevlink(n):
1783 def lookuprevlink(n):
1779 return cl.node(revlog.linkrev(n))
1784 return cl.node(revlog.linkrev(n))
1780 return lookuprevlink
1785 return lookuprevlink
1781
1786
1782 def gengroup():
1787 def gengroup():
1783 # construct a list of all changed files
1788 # construct a list of all changed files
1784 changedfiles = {}
1789 changedfiles = {}
1785
1790
1786 for chnk in cl.group(nodes, identity,
1791 for chnk in cl.group(nodes, identity,
1787 changed_file_collector(changedfiles)):
1792 changed_file_collector(changedfiles)):
1788 yield chnk
1793 yield chnk
1789 changedfiles = changedfiles.keys()
1794 changedfiles = changedfiles.keys()
1790 changedfiles.sort()
1795 changedfiles.sort()
1791
1796
1792 mnfst = self.manifest
1797 mnfst = self.manifest
1793 nodeiter = gennodelst(mnfst)
1798 nodeiter = gennodelst(mnfst)
1794 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1799 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1795 yield chnk
1800 yield chnk
1796
1801
1797 for fname in changedfiles:
1802 for fname in changedfiles:
1798 filerevlog = self.file(fname)
1803 filerevlog = self.file(fname)
1799 if filerevlog.count() == 0:
1804 if filerevlog.count() == 0:
1800 raise util.abort(_("empty or missing revlog for %s") % fname)
1805 raise util.abort(_("empty or missing revlog for %s") % fname)
1801 nodeiter = gennodelst(filerevlog)
1806 nodeiter = gennodelst(filerevlog)
1802 nodeiter = list(nodeiter)
1807 nodeiter = list(nodeiter)
1803 if nodeiter:
1808 if nodeiter:
1804 yield changegroup.chunkheader(len(fname))
1809 yield changegroup.chunkheader(len(fname))
1805 yield fname
1810 yield fname
1806 lookup = lookuprevlink_func(filerevlog)
1811 lookup = lookuprevlink_func(filerevlog)
1807 for chnk in filerevlog.group(nodeiter, lookup):
1812 for chnk in filerevlog.group(nodeiter, lookup):
1808 yield chnk
1813 yield chnk
1809
1814
1810 yield changegroup.closechunk()
1815 yield changegroup.closechunk()
1811
1816
1812 if nodes:
1817 if nodes:
1813 self.hook('outgoing', node=hex(nodes[0]), source=source)
1818 self.hook('outgoing', node=hex(nodes[0]), source=source)
1814
1819
1815 return util.chunkbuffer(gengroup())
1820 return util.chunkbuffer(gengroup())
1816
1821
1817 def addchangegroup(self, source, srctype, url):
1822 def addchangegroup(self, source, srctype, url):
1818 """add changegroup to repo.
1823 """add changegroup to repo.
1819
1824
1820 return values:
1825 return values:
1821 - nothing changed or no source: 0
1826 - nothing changed or no source: 0
1822 - more heads than before: 1+added heads (2..n)
1827 - more heads than before: 1+added heads (2..n)
1823 - less heads than before: -1-removed heads (-2..-n)
1828 - less heads than before: -1-removed heads (-2..-n)
1824 - number of heads stays the same: 1
1829 - number of heads stays the same: 1
1825 """
1830 """
1826 def csmap(x):
1831 def csmap(x):
1827 self.ui.debug(_("add changeset %s\n") % short(x))
1832 self.ui.debug(_("add changeset %s\n") % short(x))
1828 return cl.count()
1833 return cl.count()
1829
1834
1830 def revmap(x):
1835 def revmap(x):
1831 return cl.rev(x)
1836 return cl.rev(x)
1832
1837
1833 if not source:
1838 if not source:
1834 return 0
1839 return 0
1835
1840
1836 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1841 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1837
1842
1838 changesets = files = revisions = 0
1843 changesets = files = revisions = 0
1839
1844
1840 # write changelog data to temp files so concurrent readers will not see
1845 # write changelog data to temp files so concurrent readers will not see
1841 # inconsistent view
1846 # inconsistent view
1842 cl = self.changelog
1847 cl = self.changelog
1843 cl.delayupdate()
1848 cl.delayupdate()
1844 oldheads = len(cl.heads())
1849 oldheads = len(cl.heads())
1845
1850
1846 tr = self.transaction()
1851 tr = self.transaction()
1847 try:
1852 try:
1848 trp = weakref.proxy(tr)
1853 trp = weakref.proxy(tr)
1849 # pull off the changeset group
1854 # pull off the changeset group
1850 self.ui.status(_("adding changesets\n"))
1855 self.ui.status(_("adding changesets\n"))
1851 cor = cl.count() - 1
1856 cor = cl.count() - 1
1852 chunkiter = changegroup.chunkiter(source)
1857 chunkiter = changegroup.chunkiter(source)
1853 if cl.addgroup(chunkiter, csmap, trp, 1) is None:
1858 if cl.addgroup(chunkiter, csmap, trp, 1) is None:
1854 raise util.Abort(_("received changelog group is empty"))
1859 raise util.Abort(_("received changelog group is empty"))
1855 cnr = cl.count() - 1
1860 cnr = cl.count() - 1
1856 changesets = cnr - cor
1861 changesets = cnr - cor
1857
1862
1858 # pull off the manifest group
1863 # pull off the manifest group
1859 self.ui.status(_("adding manifests\n"))
1864 self.ui.status(_("adding manifests\n"))
1860 chunkiter = changegroup.chunkiter(source)
1865 chunkiter = changegroup.chunkiter(source)
1861 # no need to check for empty manifest group here:
1866 # no need to check for empty manifest group here:
1862 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1867 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1863 # no new manifest will be created and the manifest group will
1868 # no new manifest will be created and the manifest group will
1864 # be empty during the pull
1869 # be empty during the pull
1865 self.manifest.addgroup(chunkiter, revmap, trp)
1870 self.manifest.addgroup(chunkiter, revmap, trp)
1866
1871
1867 # process the files
1872 # process the files
1868 self.ui.status(_("adding file changes\n"))
1873 self.ui.status(_("adding file changes\n"))
1869 while 1:
1874 while 1:
1870 f = changegroup.getchunk(source)
1875 f = changegroup.getchunk(source)
1871 if not f:
1876 if not f:
1872 break
1877 break
1873 self.ui.debug(_("adding %s revisions\n") % f)
1878 self.ui.debug(_("adding %s revisions\n") % f)
1874 fl = self.file(f)
1879 fl = self.file(f)
1875 o = fl.count()
1880 o = fl.count()
1876 chunkiter = changegroup.chunkiter(source)
1881 chunkiter = changegroup.chunkiter(source)
1877 if fl.addgroup(chunkiter, revmap, trp) is None:
1882 if fl.addgroup(chunkiter, revmap, trp) is None:
1878 raise util.Abort(_("received file revlog group is empty"))
1883 raise util.Abort(_("received file revlog group is empty"))
1879 revisions += fl.count() - o
1884 revisions += fl.count() - o
1880 files += 1
1885 files += 1
1881
1886
1882 # make changelog see real files again
1887 # make changelog see real files again
1883 cl.finalize(trp)
1888 cl.finalize(trp)
1884
1889
1885 newheads = len(self.changelog.heads())
1890 newheads = len(self.changelog.heads())
1886 heads = ""
1891 heads = ""
1887 if oldheads and newheads != oldheads:
1892 if oldheads and newheads != oldheads:
1888 heads = _(" (%+d heads)") % (newheads - oldheads)
1893 heads = _(" (%+d heads)") % (newheads - oldheads)
1889
1894
1890 self.ui.status(_("added %d changesets"
1895 self.ui.status(_("added %d changesets"
1891 " with %d changes to %d files%s\n")
1896 " with %d changes to %d files%s\n")
1892 % (changesets, revisions, files, heads))
1897 % (changesets, revisions, files, heads))
1893
1898
1894 if changesets > 0:
1899 if changesets > 0:
1895 self.hook('pretxnchangegroup', throw=True,
1900 self.hook('pretxnchangegroup', throw=True,
1896 node=hex(self.changelog.node(cor+1)), source=srctype,
1901 node=hex(self.changelog.node(cor+1)), source=srctype,
1897 url=url)
1902 url=url)
1898
1903
1899 tr.close()
1904 tr.close()
1900 finally:
1905 finally:
1901 del tr
1906 del tr
1902
1907
1903 if changesets > 0:
1908 if changesets > 0:
1904 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1909 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1905 source=srctype, url=url)
1910 source=srctype, url=url)
1906
1911
1907 for i in xrange(cor + 1, cnr + 1):
1912 for i in xrange(cor + 1, cnr + 1):
1908 self.hook("incoming", node=hex(self.changelog.node(i)),
1913 self.hook("incoming", node=hex(self.changelog.node(i)),
1909 source=srctype, url=url)
1914 source=srctype, url=url)
1910
1915
1911 # never return 0 here:
1916 # never return 0 here:
1912 if newheads < oldheads:
1917 if newheads < oldheads:
1913 return newheads - oldheads - 1
1918 return newheads - oldheads - 1
1914 else:
1919 else:
1915 return newheads - oldheads + 1
1920 return newheads - oldheads + 1
1916
1921
1917
1922
1918 def stream_in(self, remote):
1923 def stream_in(self, remote):
1919 fp = remote.stream_out()
1924 fp = remote.stream_out()
1920 l = fp.readline()
1925 l = fp.readline()
1921 try:
1926 try:
1922 resp = int(l)
1927 resp = int(l)
1923 except ValueError:
1928 except ValueError:
1924 raise util.UnexpectedOutput(
1929 raise util.UnexpectedOutput(
1925 _('Unexpected response from remote server:'), l)
1930 _('Unexpected response from remote server:'), l)
1926 if resp == 1:
1931 if resp == 1:
1927 raise util.Abort(_('operation forbidden by server'))
1932 raise util.Abort(_('operation forbidden by server'))
1928 elif resp == 2:
1933 elif resp == 2:
1929 raise util.Abort(_('locking the remote repository failed'))
1934 raise util.Abort(_('locking the remote repository failed'))
1930 elif resp != 0:
1935 elif resp != 0:
1931 raise util.Abort(_('the server sent an unknown error code'))
1936 raise util.Abort(_('the server sent an unknown error code'))
1932 self.ui.status(_('streaming all changes\n'))
1937 self.ui.status(_('streaming all changes\n'))
1933 l = fp.readline()
1938 l = fp.readline()
1934 try:
1939 try:
1935 total_files, total_bytes = map(int, l.split(' ', 1))
1940 total_files, total_bytes = map(int, l.split(' ', 1))
1936 except ValueError, TypeError:
1941 except ValueError, TypeError:
1937 raise util.UnexpectedOutput(
1942 raise util.UnexpectedOutput(
1938 _('Unexpected response from remote server:'), l)
1943 _('Unexpected response from remote server:'), l)
1939 self.ui.status(_('%d files to transfer, %s of data\n') %
1944 self.ui.status(_('%d files to transfer, %s of data\n') %
1940 (total_files, util.bytecount(total_bytes)))
1945 (total_files, util.bytecount(total_bytes)))
1941 start = time.time()
1946 start = time.time()
1942 for i in xrange(total_files):
1947 for i in xrange(total_files):
1943 # XXX doesn't support '\n' or '\r' in filenames
1948 # XXX doesn't support '\n' or '\r' in filenames
1944 l = fp.readline()
1949 l = fp.readline()
1945 try:
1950 try:
1946 name, size = l.split('\0', 1)
1951 name, size = l.split('\0', 1)
1947 size = int(size)
1952 size = int(size)
1948 except ValueError, TypeError:
1953 except ValueError, TypeError:
1949 raise util.UnexpectedOutput(
1954 raise util.UnexpectedOutput(
1950 _('Unexpected response from remote server:'), l)
1955 _('Unexpected response from remote server:'), l)
1951 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1956 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1952 ofp = self.sopener(name, 'w')
1957 ofp = self.sopener(name, 'w')
1953 for chunk in util.filechunkiter(fp, limit=size):
1958 for chunk in util.filechunkiter(fp, limit=size):
1954 ofp.write(chunk)
1959 ofp.write(chunk)
1955 ofp.close()
1960 ofp.close()
1956 elapsed = time.time() - start
1961 elapsed = time.time() - start
1957 if elapsed <= 0:
1962 if elapsed <= 0:
1958 elapsed = 0.001
1963 elapsed = 0.001
1959 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1964 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1960 (util.bytecount(total_bytes), elapsed,
1965 (util.bytecount(total_bytes), elapsed,
1961 util.bytecount(total_bytes / elapsed)))
1966 util.bytecount(total_bytes / elapsed)))
1962 self.invalidate()
1967 self.invalidate()
1963 return len(self.heads()) + 1
1968 return len(self.heads()) + 1
1964
1969
1965 def clone(self, remote, heads=[], stream=False):
1970 def clone(self, remote, heads=[], stream=False):
1966 '''clone remote repository.
1971 '''clone remote repository.
1967
1972
1968 keyword arguments:
1973 keyword arguments:
1969 heads: list of revs to clone (forces use of pull)
1974 heads: list of revs to clone (forces use of pull)
1970 stream: use streaming clone if possible'''
1975 stream: use streaming clone if possible'''
1971
1976
1972 # now, all clients that can request uncompressed clones can
1977 # now, all clients that can request uncompressed clones can
1973 # read repo formats supported by all servers that can serve
1978 # read repo formats supported by all servers that can serve
1974 # them.
1979 # them.
1975
1980
1976 # if revlog format changes, client will have to check version
1981 # if revlog format changes, client will have to check version
1977 # and format flags on "stream" capability, and use
1982 # and format flags on "stream" capability, and use
1978 # uncompressed only if compatible.
1983 # uncompressed only if compatible.
1979
1984
1980 if stream and not heads and remote.capable('stream'):
1985 if stream and not heads and remote.capable('stream'):
1981 return self.stream_in(remote)
1986 return self.stream_in(remote)
1982 return self.pull(remote, heads)
1987 return self.pull(remote, heads)
1983
1988
1984 # used to avoid circular references so destructors work
1989 # used to avoid circular references so destructors work
1985 def aftertrans(files):
1990 def aftertrans(files):
1986 renamefiles = [tuple(t) for t in files]
1991 renamefiles = [tuple(t) for t in files]
1987 def a():
1992 def a():
1988 for src, dest in renamefiles:
1993 for src, dest in renamefiles:
1989 util.rename(src, dest)
1994 util.rename(src, dest)
1990 return a
1995 return a
1991
1996
1992 def instance(ui, path, create):
1997 def instance(ui, path, create):
1993 return localrepository(ui, util.drop_scheme('file', path), create)
1998 return localrepository(ui, util.drop_scheme('file', path), create)
1994
1999
1995 def islocal(path):
2000 def islocal(path):
1996 return True
2001 return True
@@ -1,1359 +1,1384 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 from node import *
10 from node import *
11 import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
11 import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
12 import cStringIO, email.Parser, os, popen2, re, sha, errno
12 import cStringIO, email.Parser, os, popen2, re, sha, errno
13 import sys, tempfile, zlib
13 import sys, tempfile, zlib
14
14
15 class PatchError(Exception):
15 class PatchError(Exception):
16 pass
16 pass
17
17
18 class NoHunks(PatchError):
18 class NoHunks(PatchError):
19 pass
19 pass
20
20
21 # helper functions
21 # helper functions
22
22
23 def copyfile(src, dst, basedir=None):
23 def copyfile(src, dst, basedir=None):
24 if not basedir:
24 if not basedir:
25 basedir = os.getcwd()
25 basedir = os.getcwd()
26
26
27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
28 if os.path.exists(absdst):
28 if os.path.exists(absdst):
29 raise util.Abort(_("cannot create %s: destination already exists") %
29 raise util.Abort(_("cannot create %s: destination already exists") %
30 dst)
30 dst)
31
31
32 targetdir = os.path.dirname(absdst)
32 targetdir = os.path.dirname(absdst)
33 if not os.path.isdir(targetdir):
33 if not os.path.isdir(targetdir):
34 os.makedirs(targetdir)
34 os.makedirs(targetdir)
35
35
36 util.copyfile(abssrc, absdst)
36 util.copyfile(abssrc, absdst)
37
37
38 # public functions
38 # public functions
39
39
40 def extract(ui, fileobj):
40 def extract(ui, fileobj):
41 '''extract patch from data read from fileobj.
41 '''extract patch from data read from fileobj.
42
42
43 patch can be a normal patch or contained in an email message.
43 patch can be a normal patch or contained in an email message.
44
44
45 return tuple (filename, message, user, date, node, p1, p2).
45 return tuple (filename, message, user, date, node, p1, p2).
46 Any item in the returned tuple can be None. If filename is None,
46 Any item in the returned tuple can be None. If filename is None,
47 fileobj did not contain a patch. Caller must unlink filename when done.'''
47 fileobj did not contain a patch. Caller must unlink filename when done.'''
48
48
49 # attempt to detect the start of a patch
49 # attempt to detect the start of a patch
50 # (this heuristic is borrowed from quilt)
50 # (this heuristic is borrowed from quilt)
51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
53 '(---|\*\*\*)[ \t])', re.MULTILINE)
53 '(---|\*\*\*)[ \t])', re.MULTILINE)
54
54
55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
56 tmpfp = os.fdopen(fd, 'w')
56 tmpfp = os.fdopen(fd, 'w')
57 try:
57 try:
58 msg = email.Parser.Parser().parse(fileobj)
58 msg = email.Parser.Parser().parse(fileobj)
59
59
60 subject = msg['Subject']
60 subject = msg['Subject']
61 user = msg['From']
61 user = msg['From']
62 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
62 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
63 # should try to parse msg['Date']
63 # should try to parse msg['Date']
64 date = None
64 date = None
65 nodeid = None
65 nodeid = None
66 branch = None
66 branch = None
67 parents = []
67 parents = []
68
68
69 if subject:
69 if subject:
70 if subject.startswith('[PATCH'):
70 if subject.startswith('[PATCH'):
71 pend = subject.find(']')
71 pend = subject.find(']')
72 if pend >= 0:
72 if pend >= 0:
73 subject = subject[pend+1:].lstrip()
73 subject = subject[pend+1:].lstrip()
74 subject = subject.replace('\n\t', ' ')
74 subject = subject.replace('\n\t', ' ')
75 ui.debug('Subject: %s\n' % subject)
75 ui.debug('Subject: %s\n' % subject)
76 if user:
76 if user:
77 ui.debug('From: %s\n' % user)
77 ui.debug('From: %s\n' % user)
78 diffs_seen = 0
78 diffs_seen = 0
79 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
79 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
80 message = ''
80 message = ''
81 for part in msg.walk():
81 for part in msg.walk():
82 content_type = part.get_content_type()
82 content_type = part.get_content_type()
83 ui.debug('Content-Type: %s\n' % content_type)
83 ui.debug('Content-Type: %s\n' % content_type)
84 if content_type not in ok_types:
84 if content_type not in ok_types:
85 continue
85 continue
86 payload = part.get_payload(decode=True)
86 payload = part.get_payload(decode=True)
87 m = diffre.search(payload)
87 m = diffre.search(payload)
88 if m:
88 if m:
89 hgpatch = False
89 hgpatch = False
90 ignoretext = False
90 ignoretext = False
91
91
92 ui.debug(_('found patch at byte %d\n') % m.start(0))
92 ui.debug(_('found patch at byte %d\n') % m.start(0))
93 diffs_seen += 1
93 diffs_seen += 1
94 cfp = cStringIO.StringIO()
94 cfp = cStringIO.StringIO()
95 for line in payload[:m.start(0)].splitlines():
95 for line in payload[:m.start(0)].splitlines():
96 if line.startswith('# HG changeset patch'):
96 if line.startswith('# HG changeset patch'):
97 ui.debug(_('patch generated by hg export\n'))
97 ui.debug(_('patch generated by hg export\n'))
98 hgpatch = True
98 hgpatch = True
99 # drop earlier commit message content
99 # drop earlier commit message content
100 cfp.seek(0)
100 cfp.seek(0)
101 cfp.truncate()
101 cfp.truncate()
102 subject = None
102 subject = None
103 elif hgpatch:
103 elif hgpatch:
104 if line.startswith('# User '):
104 if line.startswith('# User '):
105 user = line[7:]
105 user = line[7:]
106 ui.debug('From: %s\n' % user)
106 ui.debug('From: %s\n' % user)
107 elif line.startswith("# Date "):
107 elif line.startswith("# Date "):
108 date = line[7:]
108 date = line[7:]
109 elif line.startswith("# Branch "):
109 elif line.startswith("# Branch "):
110 branch = line[9:]
110 branch = line[9:]
111 elif line.startswith("# Node ID "):
111 elif line.startswith("# Node ID "):
112 nodeid = line[10:]
112 nodeid = line[10:]
113 elif line.startswith("# Parent "):
113 elif line.startswith("# Parent "):
114 parents.append(line[10:])
114 parents.append(line[10:])
115 elif line == '---' and gitsendmail:
115 elif line == '---' and gitsendmail:
116 ignoretext = True
116 ignoretext = True
117 if not line.startswith('# ') and not ignoretext:
117 if not line.startswith('# ') and not ignoretext:
118 cfp.write(line)
118 cfp.write(line)
119 cfp.write('\n')
119 cfp.write('\n')
120 message = cfp.getvalue()
120 message = cfp.getvalue()
121 if tmpfp:
121 if tmpfp:
122 tmpfp.write(payload)
122 tmpfp.write(payload)
123 if not payload.endswith('\n'):
123 if not payload.endswith('\n'):
124 tmpfp.write('\n')
124 tmpfp.write('\n')
125 elif not diffs_seen and message and content_type == 'text/plain':
125 elif not diffs_seen and message and content_type == 'text/plain':
126 message += '\n' + payload
126 message += '\n' + payload
127 except:
127 except:
128 tmpfp.close()
128 tmpfp.close()
129 os.unlink(tmpname)
129 os.unlink(tmpname)
130 raise
130 raise
131
131
132 if subject and not message.startswith(subject):
132 if subject and not message.startswith(subject):
133 message = '%s\n%s' % (subject, message)
133 message = '%s\n%s' % (subject, message)
134 tmpfp.close()
134 tmpfp.close()
135 if not diffs_seen:
135 if not diffs_seen:
136 os.unlink(tmpname)
136 os.unlink(tmpname)
137 return None, message, user, date, branch, None, None, None
137 return None, message, user, date, branch, None, None, None
138 p1 = parents and parents.pop(0) or None
138 p1 = parents and parents.pop(0) or None
139 p2 = parents and parents.pop(0) or None
139 p2 = parents and parents.pop(0) or None
140 return tmpname, message, user, date, branch, nodeid, p1, p2
140 return tmpname, message, user, date, branch, nodeid, p1, p2
141
141
142 GP_PATCH = 1 << 0 # we have to run patch
142 GP_PATCH = 1 << 0 # we have to run patch
143 GP_FILTER = 1 << 1 # there's some copy/rename operation
143 GP_FILTER = 1 << 1 # there's some copy/rename operation
144 GP_BINARY = 1 << 2 # there's a binary patch
144 GP_BINARY = 1 << 2 # there's a binary patch
145
145
146 def readgitpatch(fp, firstline=None):
146 def readgitpatch(fp, firstline=None):
147 """extract git-style metadata about patches from <patchname>"""
147 """extract git-style metadata about patches from <patchname>"""
148 class gitpatch:
148 class gitpatch:
149 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
149 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
150 def __init__(self, path):
150 def __init__(self, path):
151 self.path = path
151 self.path = path
152 self.oldpath = None
152 self.oldpath = None
153 self.mode = None
153 self.mode = None
154 self.op = 'MODIFY'
154 self.op = 'MODIFY'
155 self.lineno = 0
155 self.lineno = 0
156 self.binary = False
156 self.binary = False
157
157
158 def reader(fp, firstline):
158 def reader(fp, firstline):
159 if firstline is not None:
159 if firstline is not None:
160 yield firstline
160 yield firstline
161 for line in fp:
161 for line in fp:
162 yield line
162 yield line
163
163
164 # Filter patch for git information
164 # Filter patch for git information
165 gitre = re.compile('diff --git a/(.*) b/(.*)')
165 gitre = re.compile('diff --git a/(.*) b/(.*)')
166 gp = None
166 gp = None
167 gitpatches = []
167 gitpatches = []
168 # Can have a git patch with only metadata, causing patch to complain
168 # Can have a git patch with only metadata, causing patch to complain
169 dopatch = 0
169 dopatch = 0
170
170
171 lineno = 0
171 lineno = 0
172 for line in reader(fp, firstline):
172 for line in reader(fp, firstline):
173 lineno += 1
173 lineno += 1
174 if line.startswith('diff --git'):
174 if line.startswith('diff --git'):
175 m = gitre.match(line)
175 m = gitre.match(line)
176 if m:
176 if m:
177 if gp:
177 if gp:
178 gitpatches.append(gp)
178 gitpatches.append(gp)
179 src, dst = m.group(1, 2)
179 src, dst = m.group(1, 2)
180 gp = gitpatch(dst)
180 gp = gitpatch(dst)
181 gp.lineno = lineno
181 gp.lineno = lineno
182 elif gp:
182 elif gp:
183 if line.startswith('--- '):
183 if line.startswith('--- '):
184 if gp.op in ('COPY', 'RENAME'):
184 if gp.op in ('COPY', 'RENAME'):
185 dopatch |= GP_FILTER
185 dopatch |= GP_FILTER
186 gitpatches.append(gp)
186 gitpatches.append(gp)
187 gp = None
187 gp = None
188 dopatch |= GP_PATCH
188 dopatch |= GP_PATCH
189 continue
189 continue
190 if line.startswith('rename from '):
190 if line.startswith('rename from '):
191 gp.op = 'RENAME'
191 gp.op = 'RENAME'
192 gp.oldpath = line[12:].rstrip()
192 gp.oldpath = line[12:].rstrip()
193 elif line.startswith('rename to '):
193 elif line.startswith('rename to '):
194 gp.path = line[10:].rstrip()
194 gp.path = line[10:].rstrip()
195 elif line.startswith('copy from '):
195 elif line.startswith('copy from '):
196 gp.op = 'COPY'
196 gp.op = 'COPY'
197 gp.oldpath = line[10:].rstrip()
197 gp.oldpath = line[10:].rstrip()
198 elif line.startswith('copy to '):
198 elif line.startswith('copy to '):
199 gp.path = line[8:].rstrip()
199 gp.path = line[8:].rstrip()
200 elif line.startswith('deleted file'):
200 elif line.startswith('deleted file'):
201 gp.op = 'DELETE'
201 gp.op = 'DELETE'
202 elif line.startswith('new file mode '):
202 elif line.startswith('new file mode '):
203 gp.op = 'ADD'
203 gp.op = 'ADD'
204 gp.mode = int(line.rstrip()[-6:], 8)
204 gp.mode = int(line.rstrip()[-6:], 8)
205 elif line.startswith('new mode '):
205 elif line.startswith('new mode '):
206 gp.mode = int(line.rstrip()[-6:], 8)
206 gp.mode = int(line.rstrip()[-6:], 8)
207 elif line.startswith('GIT binary patch'):
207 elif line.startswith('GIT binary patch'):
208 dopatch |= GP_BINARY
208 dopatch |= GP_BINARY
209 gp.binary = True
209 gp.binary = True
210 if gp:
210 if gp:
211 gitpatches.append(gp)
211 gitpatches.append(gp)
212
212
213 if not gitpatches:
213 if not gitpatches:
214 dopatch = GP_PATCH
214 dopatch = GP_PATCH
215
215
216 return (dopatch, gitpatches)
216 return (dopatch, gitpatches)
217
217
218 def patch(patchname, ui, strip=1, cwd=None, files={}):
218 def patch(patchname, ui, strip=1, cwd=None, files={}):
219 """apply <patchname> to the working directory.
219 """apply <patchname> to the working directory.
220 returns whether patch was applied with fuzz factor."""
220 returns whether patch was applied with fuzz factor."""
221 patcher = ui.config('ui', 'patch')
221 patcher = ui.config('ui', 'patch')
222 args = []
222 args = []
223 try:
223 try:
224 if patcher:
224 if patcher:
225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
226 files)
226 files)
227 else:
227 else:
228 try:
228 try:
229 return internalpatch(patchname, ui, strip, cwd, files)
229 return internalpatch(patchname, ui, strip, cwd, files)
230 except NoHunks:
230 except NoHunks:
231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
232 ui.debug('no valid hunks found; trying with %r instead\n' %
232 ui.debug('no valid hunks found; trying with %r instead\n' %
233 patcher)
233 patcher)
234 if util.needbinarypatch():
234 if util.needbinarypatch():
235 args.append('--binary')
235 args.append('--binary')
236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
237 files)
237 files)
238 except PatchError, err:
238 except PatchError, err:
239 s = str(err)
239 s = str(err)
240 if s:
240 if s:
241 raise util.Abort(s)
241 raise util.Abort(s)
242 else:
242 else:
243 raise util.Abort(_('patch failed to apply'))
243 raise util.Abort(_('patch failed to apply'))
244
244
245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
246 """use <patcher> to apply <patchname> to the working directory.
246 """use <patcher> to apply <patchname> to the working directory.
247 returns whether patch was applied with fuzz factor."""
247 returns whether patch was applied with fuzz factor."""
248
248
249 fuzz = False
249 fuzz = False
250 if cwd:
250 if cwd:
251 args.append('-d %s' % util.shellquote(cwd))
251 args.append('-d %s' % util.shellquote(cwd))
252 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
252 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
253 util.shellquote(patchname)))
253 util.shellquote(patchname)))
254
254
255 for line in fp:
255 for line in fp:
256 line = line.rstrip()
256 line = line.rstrip()
257 ui.note(line + '\n')
257 ui.note(line + '\n')
258 if line.startswith('patching file '):
258 if line.startswith('patching file '):
259 pf = util.parse_patch_output(line)
259 pf = util.parse_patch_output(line)
260 printed_file = False
260 printed_file = False
261 files.setdefault(pf, (None, None))
261 files.setdefault(pf, (None, None))
262 elif line.find('with fuzz') >= 0:
262 elif line.find('with fuzz') >= 0:
263 fuzz = True
263 fuzz = True
264 if not printed_file:
264 if not printed_file:
265 ui.warn(pf + '\n')
265 ui.warn(pf + '\n')
266 printed_file = True
266 printed_file = True
267 ui.warn(line + '\n')
267 ui.warn(line + '\n')
268 elif line.find('saving rejects to file') >= 0:
268 elif line.find('saving rejects to file') >= 0:
269 ui.warn(line + '\n')
269 ui.warn(line + '\n')
270 elif line.find('FAILED') >= 0:
270 elif line.find('FAILED') >= 0:
271 if not printed_file:
271 if not printed_file:
272 ui.warn(pf + '\n')
272 ui.warn(pf + '\n')
273 printed_file = True
273 printed_file = True
274 ui.warn(line + '\n')
274 ui.warn(line + '\n')
275 code = fp.close()
275 code = fp.close()
276 if code:
276 if code:
277 raise PatchError(_("patch command failed: %s") %
277 raise PatchError(_("patch command failed: %s") %
278 util.explain_exit(code)[0])
278 util.explain_exit(code)[0])
279 return fuzz
279 return fuzz
280
280
281 def internalpatch(patchobj, ui, strip, cwd, files={}):
281 def internalpatch(patchobj, ui, strip, cwd, files={}):
282 """use builtin patch to apply <patchobj> to the working directory.
282 """use builtin patch to apply <patchobj> to the working directory.
283 returns whether patch was applied with fuzz factor."""
283 returns whether patch was applied with fuzz factor."""
284 try:
284 try:
285 fp = file(patchobj, 'rb')
285 fp = file(patchobj, 'rb')
286 except TypeError:
286 except TypeError:
287 fp = patchobj
287 fp = patchobj
288 if cwd:
288 if cwd:
289 curdir = os.getcwd()
289 curdir = os.getcwd()
290 os.chdir(cwd)
290 os.chdir(cwd)
291 try:
291 try:
292 ret = applydiff(ui, fp, files, strip=strip)
292 ret = applydiff(ui, fp, files, strip=strip)
293 finally:
293 finally:
294 if cwd:
294 if cwd:
295 os.chdir(curdir)
295 os.chdir(curdir)
296 if ret < 0:
296 if ret < 0:
297 raise PatchError
297 raise PatchError
298 return ret > 0
298 return ret > 0
299
299
300 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
300 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
301 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
301 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
302 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
302 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
303
303
304 class patchfile:
304 class patchfile:
305 def __init__(self, ui, fname):
305 def __init__(self, ui, fname, missing=False):
306 self.fname = fname
306 self.fname = fname
307 self.ui = ui
307 self.ui = ui
308 try:
308 self.lines = []
309 fp = file(fname, 'rb')
309 self.exists = False
310 self.lines = fp.readlines()
310 self.missing = missing
311 self.exists = True
311 if not missing:
312 except IOError:
312 try:
313 fp = file(fname, 'rb')
314 self.lines = fp.readlines()
315 self.exists = True
316 except IOError:
317 pass
318 else:
319 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
320
321 if not self.exists:
313 dirname = os.path.dirname(fname)
322 dirname = os.path.dirname(fname)
314 if dirname and not os.path.isdir(dirname):
323 if dirname and not os.path.isdir(dirname):
315 dirs = dirname.split(os.path.sep)
324 os.makedirs(dirname)
316 d = ""
317 for x in dirs:
318 d = os.path.join(d, x)
319 if not os.path.isdir(d):
320 os.mkdir(d)
321 self.lines = []
322 self.exists = False
323
325
324 self.hash = {}
326 self.hash = {}
325 self.dirty = 0
327 self.dirty = 0
326 self.offset = 0
328 self.offset = 0
327 self.rej = []
329 self.rej = []
328 self.fileprinted = False
330 self.fileprinted = False
329 self.printfile(False)
331 self.printfile(False)
330 self.hunks = 0
332 self.hunks = 0
331
333
332 def printfile(self, warn):
334 def printfile(self, warn):
333 if self.fileprinted:
335 if self.fileprinted:
334 return
336 return
335 if warn or self.ui.verbose:
337 if warn or self.ui.verbose:
336 self.fileprinted = True
338 self.fileprinted = True
337 s = _("patching file %s\n") % self.fname
339 s = _("patching file %s\n") % self.fname
338 if warn:
340 if warn:
339 self.ui.warn(s)
341 self.ui.warn(s)
340 else:
342 else:
341 self.ui.note(s)
343 self.ui.note(s)
342
344
343
345
344 def findlines(self, l, linenum):
346 def findlines(self, l, linenum):
345 # looks through the hash and finds candidate lines. The
347 # looks through the hash and finds candidate lines. The
346 # result is a list of line numbers sorted based on distance
348 # result is a list of line numbers sorted based on distance
347 # from linenum
349 # from linenum
348 def sorter(a, b):
350 def sorter(a, b):
349 vala = abs(a - linenum)
351 vala = abs(a - linenum)
350 valb = abs(b - linenum)
352 valb = abs(b - linenum)
351 return cmp(vala, valb)
353 return cmp(vala, valb)
352
354
353 try:
355 try:
354 cand = self.hash[l]
356 cand = self.hash[l]
355 except:
357 except:
356 return []
358 return []
357
359
358 if len(cand) > 1:
360 if len(cand) > 1:
359 # resort our list of potentials forward then back.
361 # resort our list of potentials forward then back.
360 cand.sort(sorter)
362 cand.sort(sorter)
361 return cand
363 return cand
362
364
363 def hashlines(self):
365 def hashlines(self):
364 self.hash = {}
366 self.hash = {}
365 for x in xrange(len(self.lines)):
367 for x in xrange(len(self.lines)):
366 s = self.lines[x]
368 s = self.lines[x]
367 self.hash.setdefault(s, []).append(x)
369 self.hash.setdefault(s, []).append(x)
368
370
369 def write_rej(self):
371 def write_rej(self):
370 # our rejects are a little different from patch(1). This always
372 # our rejects are a little different from patch(1). This always
371 # creates rejects in the same form as the original patch. A file
373 # creates rejects in the same form as the original patch. A file
372 # header is inserted so that you can run the reject through patch again
374 # header is inserted so that you can run the reject through patch again
373 # without having to type the filename.
375 # without having to type the filename.
374
376
375 if not self.rej:
377 if not self.rej:
376 return
378 return
377 if self.hunks != 1:
379 if self.hunks != 1:
378 hunkstr = "s"
380 hunkstr = "s"
379 else:
381 else:
380 hunkstr = ""
382 hunkstr = ""
381
383
382 fname = self.fname + ".rej"
384 fname = self.fname + ".rej"
383 self.ui.warn(
385 self.ui.warn(
384 _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
386 _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
385 (len(self.rej), self.hunks, hunkstr, fname))
387 (len(self.rej), self.hunks, hunkstr, fname))
386 try: os.unlink(fname)
388 try: os.unlink(fname)
387 except:
389 except:
388 pass
390 pass
389 fp = file(fname, 'wb')
391 fp = file(fname, 'wb')
390 base = os.path.basename(self.fname)
392 base = os.path.basename(self.fname)
391 fp.write("--- %s\n+++ %s\n" % (base, base))
393 fp.write("--- %s\n+++ %s\n" % (base, base))
392 for x in self.rej:
394 for x in self.rej:
393 for l in x.hunk:
395 for l in x.hunk:
394 fp.write(l)
396 fp.write(l)
395 if l[-1] != '\n':
397 if l[-1] != '\n':
396 fp.write("\n\ No newline at end of file\n")
398 fp.write("\n\ No newline at end of file\n")
397
399
398 def write(self, dest=None):
400 def write(self, dest=None):
399 if self.dirty:
401 if self.dirty:
400 if not dest:
402 if not dest:
401 dest = self.fname
403 dest = self.fname
402 st = None
404 st = None
403 try:
405 try:
404 st = os.lstat(dest)
406 st = os.lstat(dest)
405 except OSError, inst:
407 except OSError, inst:
406 if inst.errno != errno.ENOENT:
408 if inst.errno != errno.ENOENT:
407 raise
409 raise
408 if st and st.st_nlink > 1:
410 if st and st.st_nlink > 1:
409 os.unlink(dest)
411 os.unlink(dest)
410 fp = file(dest, 'wb')
412 fp = file(dest, 'wb')
411 if st and st.st_nlink > 1:
413 if st and st.st_nlink > 1:
412 os.chmod(dest, st.st_mode)
414 os.chmod(dest, st.st_mode)
413 fp.writelines(self.lines)
415 fp.writelines(self.lines)
414 fp.close()
416 fp.close()
415
417
416 def close(self):
418 def close(self):
417 self.write()
419 self.write()
418 self.write_rej()
420 self.write_rej()
419
421
420 def apply(self, h, reverse):
422 def apply(self, h, reverse):
421 if not h.complete():
423 if not h.complete():
422 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
424 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
423 (h.number, h.desc, len(h.a), h.lena, len(h.b),
425 (h.number, h.desc, len(h.a), h.lena, len(h.b),
424 h.lenb))
426 h.lenb))
425
427
426 self.hunks += 1
428 self.hunks += 1
427 if reverse:
429 if reverse:
428 h.reverse()
430 h.reverse()
429
431
432 if self.missing:
433 self.rej.append(h)
434 return -1
435
430 if self.exists and h.createfile():
436 if self.exists and h.createfile():
431 self.ui.warn(_("file %s already exists\n") % self.fname)
437 self.ui.warn(_("file %s already exists\n") % self.fname)
432 self.rej.append(h)
438 self.rej.append(h)
433 return -1
439 return -1
434
440
435 if isinstance(h, binhunk):
441 if isinstance(h, binhunk):
436 if h.rmfile():
442 if h.rmfile():
437 os.unlink(self.fname)
443 os.unlink(self.fname)
438 else:
444 else:
439 self.lines[:] = h.new()
445 self.lines[:] = h.new()
440 self.offset += len(h.new())
446 self.offset += len(h.new())
441 self.dirty = 1
447 self.dirty = 1
442 return 0
448 return 0
443
449
444 # fast case first, no offsets, no fuzz
450 # fast case first, no offsets, no fuzz
445 old = h.old()
451 old = h.old()
446 # patch starts counting at 1 unless we are adding the file
452 # patch starts counting at 1 unless we are adding the file
447 if h.starta == 0:
453 if h.starta == 0:
448 start = 0
454 start = 0
449 else:
455 else:
450 start = h.starta + self.offset - 1
456 start = h.starta + self.offset - 1
451 orig_start = start
457 orig_start = start
452 if diffhelpers.testhunk(old, self.lines, start) == 0:
458 if diffhelpers.testhunk(old, self.lines, start) == 0:
453 if h.rmfile():
459 if h.rmfile():
454 os.unlink(self.fname)
460 os.unlink(self.fname)
455 else:
461 else:
456 self.lines[start : start + h.lena] = h.new()
462 self.lines[start : start + h.lena] = h.new()
457 self.offset += h.lenb - h.lena
463 self.offset += h.lenb - h.lena
458 self.dirty = 1
464 self.dirty = 1
459 return 0
465 return 0
460
466
461 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
467 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
462 self.hashlines()
468 self.hashlines()
463 if h.hunk[-1][0] != ' ':
469 if h.hunk[-1][0] != ' ':
464 # if the hunk tried to put something at the bottom of the file
470 # if the hunk tried to put something at the bottom of the file
465 # override the start line and use eof here
471 # override the start line and use eof here
466 search_start = len(self.lines)
472 search_start = len(self.lines)
467 else:
473 else:
468 search_start = orig_start
474 search_start = orig_start
469
475
470 for fuzzlen in xrange(3):
476 for fuzzlen in xrange(3):
471 for toponly in [ True, False ]:
477 for toponly in [ True, False ]:
472 old = h.old(fuzzlen, toponly)
478 old = h.old(fuzzlen, toponly)
473
479
474 cand = self.findlines(old[0][1:], search_start)
480 cand = self.findlines(old[0][1:], search_start)
475 for l in cand:
481 for l in cand:
476 if diffhelpers.testhunk(old, self.lines, l) == 0:
482 if diffhelpers.testhunk(old, self.lines, l) == 0:
477 newlines = h.new(fuzzlen, toponly)
483 newlines = h.new(fuzzlen, toponly)
478 self.lines[l : l + len(old)] = newlines
484 self.lines[l : l + len(old)] = newlines
479 self.offset += len(newlines) - len(old)
485 self.offset += len(newlines) - len(old)
480 self.dirty = 1
486 self.dirty = 1
481 if fuzzlen:
487 if fuzzlen:
482 fuzzstr = "with fuzz %d " % fuzzlen
488 fuzzstr = "with fuzz %d " % fuzzlen
483 f = self.ui.warn
489 f = self.ui.warn
484 self.printfile(True)
490 self.printfile(True)
485 else:
491 else:
486 fuzzstr = ""
492 fuzzstr = ""
487 f = self.ui.note
493 f = self.ui.note
488 offset = l - orig_start - fuzzlen
494 offset = l - orig_start - fuzzlen
489 if offset == 1:
495 if offset == 1:
490 linestr = "line"
496 linestr = "line"
491 else:
497 else:
492 linestr = "lines"
498 linestr = "lines"
493 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
499 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
494 (h.number, l+1, fuzzstr, offset, linestr))
500 (h.number, l+1, fuzzstr, offset, linestr))
495 return fuzzlen
501 return fuzzlen
496 self.printfile(True)
502 self.printfile(True)
497 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
503 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
498 self.rej.append(h)
504 self.rej.append(h)
499 return -1
505 return -1
500
506
501 class hunk:
507 class hunk:
502 def __init__(self, desc, num, lr, context):
508 def __init__(self, desc, num, lr, context):
503 self.number = num
509 self.number = num
504 self.desc = desc
510 self.desc = desc
505 self.hunk = [ desc ]
511 self.hunk = [ desc ]
506 self.a = []
512 self.a = []
507 self.b = []
513 self.b = []
508 if context:
514 if context:
509 self.read_context_hunk(lr)
515 self.read_context_hunk(lr)
510 else:
516 else:
511 self.read_unified_hunk(lr)
517 self.read_unified_hunk(lr)
512
518
513 def read_unified_hunk(self, lr):
519 def read_unified_hunk(self, lr):
514 m = unidesc.match(self.desc)
520 m = unidesc.match(self.desc)
515 if not m:
521 if not m:
516 raise PatchError(_("bad hunk #%d") % self.number)
522 raise PatchError(_("bad hunk #%d") % self.number)
517 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
523 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
518 if self.lena == None:
524 if self.lena == None:
519 self.lena = 1
525 self.lena = 1
520 else:
526 else:
521 self.lena = int(self.lena)
527 self.lena = int(self.lena)
522 if self.lenb == None:
528 if self.lenb == None:
523 self.lenb = 1
529 self.lenb = 1
524 else:
530 else:
525 self.lenb = int(self.lenb)
531 self.lenb = int(self.lenb)
526 self.starta = int(self.starta)
532 self.starta = int(self.starta)
527 self.startb = int(self.startb)
533 self.startb = int(self.startb)
528 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
534 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
529 # if we hit eof before finishing out the hunk, the last line will
535 # if we hit eof before finishing out the hunk, the last line will
530 # be zero length. Lets try to fix it up.
536 # be zero length. Lets try to fix it up.
531 while len(self.hunk[-1]) == 0:
537 while len(self.hunk[-1]) == 0:
532 del self.hunk[-1]
538 del self.hunk[-1]
533 del self.a[-1]
539 del self.a[-1]
534 del self.b[-1]
540 del self.b[-1]
535 self.lena -= 1
541 self.lena -= 1
536 self.lenb -= 1
542 self.lenb -= 1
537
543
538 def read_context_hunk(self, lr):
544 def read_context_hunk(self, lr):
539 self.desc = lr.readline()
545 self.desc = lr.readline()
540 m = contextdesc.match(self.desc)
546 m = contextdesc.match(self.desc)
541 if not m:
547 if not m:
542 raise PatchError(_("bad hunk #%d") % self.number)
548 raise PatchError(_("bad hunk #%d") % self.number)
543 foo, self.starta, foo2, aend, foo3 = m.groups()
549 foo, self.starta, foo2, aend, foo3 = m.groups()
544 self.starta = int(self.starta)
550 self.starta = int(self.starta)
545 if aend == None:
551 if aend == None:
546 aend = self.starta
552 aend = self.starta
547 self.lena = int(aend) - self.starta
553 self.lena = int(aend) - self.starta
548 if self.starta:
554 if self.starta:
549 self.lena += 1
555 self.lena += 1
550 for x in xrange(self.lena):
556 for x in xrange(self.lena):
551 l = lr.readline()
557 l = lr.readline()
552 if l.startswith('---'):
558 if l.startswith('---'):
553 lr.push(l)
559 lr.push(l)
554 break
560 break
555 s = l[2:]
561 s = l[2:]
556 if l.startswith('- ') or l.startswith('! '):
562 if l.startswith('- ') or l.startswith('! '):
557 u = '-' + s
563 u = '-' + s
558 elif l.startswith(' '):
564 elif l.startswith(' '):
559 u = ' ' + s
565 u = ' ' + s
560 else:
566 else:
561 raise PatchError(_("bad hunk #%d old text line %d") %
567 raise PatchError(_("bad hunk #%d old text line %d") %
562 (self.number, x))
568 (self.number, x))
563 self.a.append(u)
569 self.a.append(u)
564 self.hunk.append(u)
570 self.hunk.append(u)
565
571
566 l = lr.readline()
572 l = lr.readline()
567 if l.startswith('\ '):
573 if l.startswith('\ '):
568 s = self.a[-1][:-1]
574 s = self.a[-1][:-1]
569 self.a[-1] = s
575 self.a[-1] = s
570 self.hunk[-1] = s
576 self.hunk[-1] = s
571 l = lr.readline()
577 l = lr.readline()
572 m = contextdesc.match(l)
578 m = contextdesc.match(l)
573 if not m:
579 if not m:
574 raise PatchError(_("bad hunk #%d") % self.number)
580 raise PatchError(_("bad hunk #%d") % self.number)
575 foo, self.startb, foo2, bend, foo3 = m.groups()
581 foo, self.startb, foo2, bend, foo3 = m.groups()
576 self.startb = int(self.startb)
582 self.startb = int(self.startb)
577 if bend == None:
583 if bend == None:
578 bend = self.startb
584 bend = self.startb
579 self.lenb = int(bend) - self.startb
585 self.lenb = int(bend) - self.startb
580 if self.startb:
586 if self.startb:
581 self.lenb += 1
587 self.lenb += 1
582 hunki = 1
588 hunki = 1
583 for x in xrange(self.lenb):
589 for x in xrange(self.lenb):
584 l = lr.readline()
590 l = lr.readline()
585 if l.startswith('\ '):
591 if l.startswith('\ '):
586 s = self.b[-1][:-1]
592 s = self.b[-1][:-1]
587 self.b[-1] = s
593 self.b[-1] = s
588 self.hunk[hunki-1] = s
594 self.hunk[hunki-1] = s
589 continue
595 continue
590 if not l:
596 if not l:
591 lr.push(l)
597 lr.push(l)
592 break
598 break
593 s = l[2:]
599 s = l[2:]
594 if l.startswith('+ ') or l.startswith('! '):
600 if l.startswith('+ ') or l.startswith('! '):
595 u = '+' + s
601 u = '+' + s
596 elif l.startswith(' '):
602 elif l.startswith(' '):
597 u = ' ' + s
603 u = ' ' + s
598 elif len(self.b) == 0:
604 elif len(self.b) == 0:
599 # this can happen when the hunk does not add any lines
605 # this can happen when the hunk does not add any lines
600 lr.push(l)
606 lr.push(l)
601 break
607 break
602 else:
608 else:
603 raise PatchError(_("bad hunk #%d old text line %d") %
609 raise PatchError(_("bad hunk #%d old text line %d") %
604 (self.number, x))
610 (self.number, x))
605 self.b.append(s)
611 self.b.append(s)
606 while True:
612 while True:
607 if hunki >= len(self.hunk):
613 if hunki >= len(self.hunk):
608 h = ""
614 h = ""
609 else:
615 else:
610 h = self.hunk[hunki]
616 h = self.hunk[hunki]
611 hunki += 1
617 hunki += 1
612 if h == u:
618 if h == u:
613 break
619 break
614 elif h.startswith('-'):
620 elif h.startswith('-'):
615 continue
621 continue
616 else:
622 else:
617 self.hunk.insert(hunki-1, u)
623 self.hunk.insert(hunki-1, u)
618 break
624 break
619
625
620 if not self.a:
626 if not self.a:
621 # this happens when lines were only added to the hunk
627 # this happens when lines were only added to the hunk
622 for x in self.hunk:
628 for x in self.hunk:
623 if x.startswith('-') or x.startswith(' '):
629 if x.startswith('-') or x.startswith(' '):
624 self.a.append(x)
630 self.a.append(x)
625 if not self.b:
631 if not self.b:
626 # this happens when lines were only deleted from the hunk
632 # this happens when lines were only deleted from the hunk
627 for x in self.hunk:
633 for x in self.hunk:
628 if x.startswith('+') or x.startswith(' '):
634 if x.startswith('+') or x.startswith(' '):
629 self.b.append(x[1:])
635 self.b.append(x[1:])
630 # @@ -start,len +start,len @@
636 # @@ -start,len +start,len @@
631 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
637 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
632 self.startb, self.lenb)
638 self.startb, self.lenb)
633 self.hunk[0] = self.desc
639 self.hunk[0] = self.desc
634
640
635 def reverse(self):
641 def reverse(self):
636 origlena = self.lena
642 origlena = self.lena
637 origstarta = self.starta
643 origstarta = self.starta
638 self.lena = self.lenb
644 self.lena = self.lenb
639 self.starta = self.startb
645 self.starta = self.startb
640 self.lenb = origlena
646 self.lenb = origlena
641 self.startb = origstarta
647 self.startb = origstarta
642 self.a = []
648 self.a = []
643 self.b = []
649 self.b = []
644 # self.hunk[0] is the @@ description
650 # self.hunk[0] is the @@ description
645 for x in xrange(1, len(self.hunk)):
651 for x in xrange(1, len(self.hunk)):
646 o = self.hunk[x]
652 o = self.hunk[x]
647 if o.startswith('-'):
653 if o.startswith('-'):
648 n = '+' + o[1:]
654 n = '+' + o[1:]
649 self.b.append(o[1:])
655 self.b.append(o[1:])
650 elif o.startswith('+'):
656 elif o.startswith('+'):
651 n = '-' + o[1:]
657 n = '-' + o[1:]
652 self.a.append(n)
658 self.a.append(n)
653 else:
659 else:
654 n = o
660 n = o
655 self.b.append(o[1:])
661 self.b.append(o[1:])
656 self.a.append(o)
662 self.a.append(o)
657 self.hunk[x] = o
663 self.hunk[x] = o
658
664
659 def fix_newline(self):
665 def fix_newline(self):
660 diffhelpers.fix_newline(self.hunk, self.a, self.b)
666 diffhelpers.fix_newline(self.hunk, self.a, self.b)
661
667
662 def complete(self):
668 def complete(self):
663 return len(self.a) == self.lena and len(self.b) == self.lenb
669 return len(self.a) == self.lena and len(self.b) == self.lenb
664
670
665 def createfile(self):
671 def createfile(self):
666 return self.starta == 0 and self.lena == 0
672 return self.starta == 0 and self.lena == 0
667
673
668 def rmfile(self):
674 def rmfile(self):
669 return self.startb == 0 and self.lenb == 0
675 return self.startb == 0 and self.lenb == 0
670
676
671 def fuzzit(self, l, fuzz, toponly):
677 def fuzzit(self, l, fuzz, toponly):
672 # this removes context lines from the top and bottom of list 'l'. It
678 # this removes context lines from the top and bottom of list 'l'. It
673 # checks the hunk to make sure only context lines are removed, and then
679 # checks the hunk to make sure only context lines are removed, and then
674 # returns a new shortened list of lines.
680 # returns a new shortened list of lines.
675 fuzz = min(fuzz, len(l)-1)
681 fuzz = min(fuzz, len(l)-1)
676 if fuzz:
682 if fuzz:
677 top = 0
683 top = 0
678 bot = 0
684 bot = 0
679 hlen = len(self.hunk)
685 hlen = len(self.hunk)
680 for x in xrange(hlen-1):
686 for x in xrange(hlen-1):
681 # the hunk starts with the @@ line, so use x+1
687 # the hunk starts with the @@ line, so use x+1
682 if self.hunk[x+1][0] == ' ':
688 if self.hunk[x+1][0] == ' ':
683 top += 1
689 top += 1
684 else:
690 else:
685 break
691 break
686 if not toponly:
692 if not toponly:
687 for x in xrange(hlen-1):
693 for x in xrange(hlen-1):
688 if self.hunk[hlen-bot-1][0] == ' ':
694 if self.hunk[hlen-bot-1][0] == ' ':
689 bot += 1
695 bot += 1
690 else:
696 else:
691 break
697 break
692
698
693 # top and bot now count context in the hunk
699 # top and bot now count context in the hunk
694 # adjust them if either one is short
700 # adjust them if either one is short
695 context = max(top, bot, 3)
701 context = max(top, bot, 3)
696 if bot < context:
702 if bot < context:
697 bot = max(0, fuzz - (context - bot))
703 bot = max(0, fuzz - (context - bot))
698 else:
704 else:
699 bot = min(fuzz, bot)
705 bot = min(fuzz, bot)
700 if top < context:
706 if top < context:
701 top = max(0, fuzz - (context - top))
707 top = max(0, fuzz - (context - top))
702 else:
708 else:
703 top = min(fuzz, top)
709 top = min(fuzz, top)
704
710
705 return l[top:len(l)-bot]
711 return l[top:len(l)-bot]
706 return l
712 return l
707
713
708 def old(self, fuzz=0, toponly=False):
714 def old(self, fuzz=0, toponly=False):
709 return self.fuzzit(self.a, fuzz, toponly)
715 return self.fuzzit(self.a, fuzz, toponly)
710
716
711 def newctrl(self):
717 def newctrl(self):
712 res = []
718 res = []
713 for x in self.hunk:
719 for x in self.hunk:
714 c = x[0]
720 c = x[0]
715 if c == ' ' or c == '+':
721 if c == ' ' or c == '+':
716 res.append(x)
722 res.append(x)
717 return res
723 return res
718
724
719 def new(self, fuzz=0, toponly=False):
725 def new(self, fuzz=0, toponly=False):
720 return self.fuzzit(self.b, fuzz, toponly)
726 return self.fuzzit(self.b, fuzz, toponly)
721
727
722 class binhunk:
728 class binhunk:
723 'A binary patch file. Only understands literals so far.'
729 'A binary patch file. Only understands literals so far.'
724 def __init__(self, gitpatch):
730 def __init__(self, gitpatch):
725 self.gitpatch = gitpatch
731 self.gitpatch = gitpatch
726 self.text = None
732 self.text = None
727 self.hunk = ['GIT binary patch\n']
733 self.hunk = ['GIT binary patch\n']
728
734
729 def createfile(self):
735 def createfile(self):
730 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
736 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
731
737
732 def rmfile(self):
738 def rmfile(self):
733 return self.gitpatch.op == 'DELETE'
739 return self.gitpatch.op == 'DELETE'
734
740
735 def complete(self):
741 def complete(self):
736 return self.text is not None
742 return self.text is not None
737
743
738 def new(self):
744 def new(self):
739 return [self.text]
745 return [self.text]
740
746
741 def extract(self, fp):
747 def extract(self, fp):
742 line = fp.readline()
748 line = fp.readline()
743 self.hunk.append(line)
749 self.hunk.append(line)
744 while line and not line.startswith('literal '):
750 while line and not line.startswith('literal '):
745 line = fp.readline()
751 line = fp.readline()
746 self.hunk.append(line)
752 self.hunk.append(line)
747 if not line:
753 if not line:
748 raise PatchError(_('could not extract binary patch'))
754 raise PatchError(_('could not extract binary patch'))
749 size = int(line[8:].rstrip())
755 size = int(line[8:].rstrip())
750 dec = []
756 dec = []
751 line = fp.readline()
757 line = fp.readline()
752 self.hunk.append(line)
758 self.hunk.append(line)
753 while len(line) > 1:
759 while len(line) > 1:
754 l = line[0]
760 l = line[0]
755 if l <= 'Z' and l >= 'A':
761 if l <= 'Z' and l >= 'A':
756 l = ord(l) - ord('A') + 1
762 l = ord(l) - ord('A') + 1
757 else:
763 else:
758 l = ord(l) - ord('a') + 27
764 l = ord(l) - ord('a') + 27
759 dec.append(base85.b85decode(line[1:-1])[:l])
765 dec.append(base85.b85decode(line[1:-1])[:l])
760 line = fp.readline()
766 line = fp.readline()
761 self.hunk.append(line)
767 self.hunk.append(line)
762 text = zlib.decompress(''.join(dec))
768 text = zlib.decompress(''.join(dec))
763 if len(text) != size:
769 if len(text) != size:
764 raise PatchError(_('binary patch is %d bytes, not %d') %
770 raise PatchError(_('binary patch is %d bytes, not %d') %
765 len(text), size)
771 len(text), size)
766 self.text = text
772 self.text = text
767
773
768 def parsefilename(str):
774 def parsefilename(str):
769 # --- filename \t|space stuff
775 # --- filename \t|space stuff
770 s = str[4:]
776 s = str[4:]
771 i = s.find('\t')
777 i = s.find('\t')
772 if i < 0:
778 if i < 0:
773 i = s.find(' ')
779 i = s.find(' ')
774 if i < 0:
780 if i < 0:
775 return s
781 return s
776 return s[:i]
782 return s[:i]
777
783
778 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
784 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
779 def pathstrip(path, count=1):
785 def pathstrip(path, count=1):
780 pathlen = len(path)
786 pathlen = len(path)
781 i = 0
787 i = 0
782 if count == 0:
788 if count == 0:
783 return path.rstrip()
789 return path.rstrip()
784 while count > 0:
790 while count > 0:
785 i = path.find('/', i)
791 i = path.find('/', i)
786 if i == -1:
792 if i == -1:
787 raise PatchError(_("unable to strip away %d dirs from %s") %
793 raise PatchError(_("unable to strip away %d dirs from %s") %
788 (count, path))
794 (count, path))
789 i += 1
795 i += 1
790 # consume '//' in the path
796 # consume '//' in the path
791 while i < pathlen - 1 and path[i] == '/':
797 while i < pathlen - 1 and path[i] == '/':
792 i += 1
798 i += 1
793 count -= 1
799 count -= 1
794 return path[i:].rstrip()
800 return path[i:].rstrip()
795
801
796 nulla = afile_orig == "/dev/null"
802 nulla = afile_orig == "/dev/null"
797 nullb = bfile_orig == "/dev/null"
803 nullb = bfile_orig == "/dev/null"
798 afile = pathstrip(afile_orig, strip)
804 afile = pathstrip(afile_orig, strip)
799 gooda = os.path.exists(afile) and not nulla
805 gooda = not nulla and os.path.exists(afile)
800 bfile = pathstrip(bfile_orig, strip)
806 bfile = pathstrip(bfile_orig, strip)
801 if afile == bfile:
807 if afile == bfile:
802 goodb = gooda
808 goodb = gooda
803 else:
809 else:
804 goodb = os.path.exists(bfile) and not nullb
810 goodb = not nullb and os.path.exists(bfile)
805 createfunc = hunk.createfile
811 createfunc = hunk.createfile
806 if reverse:
812 if reverse:
807 createfunc = hunk.rmfile
813 createfunc = hunk.rmfile
808 if not goodb and not gooda and not createfunc():
814 missing = not goodb and not gooda and not createfunc()
809 raise PatchError(_("unable to find %s or %s for patching") %
815 fname = None
810 (afile, bfile))
816 if not missing:
811 if gooda and goodb:
817 if gooda and goodb:
812 fname = bfile
818 fname = (afile in bfile) and afile or bfile
813 if afile in bfile:
819 elif gooda:
814 fname = afile
820 fname = afile
815 elif gooda:
821
816 fname = afile
822 if not fname:
817 elif not nullb:
823 if not nullb:
818 fname = bfile
824 fname = (afile in bfile) and afile or bfile
819 if afile in bfile:
825 elif not nulla:
820 fname = afile
826 fname = afile
821 elif not nulla:
827 else:
822 fname = afile
828 raise PatchError(_("undefined source and destination files"))
823 return fname
829
830 return fname, missing
824
831
825 class linereader:
832 class linereader:
826 # simple class to allow pushing lines back into the input stream
833 # simple class to allow pushing lines back into the input stream
827 def __init__(self, fp):
834 def __init__(self, fp):
828 self.fp = fp
835 self.fp = fp
829 self.buf = []
836 self.buf = []
830
837
831 def push(self, line):
838 def push(self, line):
832 self.buf.append(line)
839 self.buf.append(line)
833
840
834 def readline(self):
841 def readline(self):
835 if self.buf:
842 if self.buf:
836 l = self.buf[0]
843 l = self.buf[0]
837 del self.buf[0]
844 del self.buf[0]
838 return l
845 return l
839 return self.fp.readline()
846 return self.fp.readline()
840
847
841 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
848 def iterhunks(ui, fp, sourcefile=None):
842 rejmerge=None, updatedir=None):
849 """Read a patch and yield the following events:
843 """reads a patch from fp and tries to apply it. The dict 'changed' is
850 - ("file", afile, bfile, firsthunk): select a new target file.
844 filled in with all of the filenames changed by the patch. Returns 0
851 - ("hunk", hunk): a new hunk is ready to be applied, follows a
845 for a clean patch, -1 if any rejects were found and 1 if there was
852 "file" event.
846 any fuzz."""
853 - ("git", gitchanges): current diff is in git format, gitchanges
854 maps filenames to gitpatch records. Unique event.
855 """
847
856
848 def scangitpatch(fp, firstline, cwd=None):
857 def scangitpatch(fp, firstline):
849 '''git patches can modify a file, then copy that file to
858 '''git patches can modify a file, then copy that file to
850 a new file, but expect the source to be the unmodified form.
859 a new file, but expect the source to be the unmodified form.
851 So we scan the patch looking for that case so we can do
860 So we scan the patch looking for that case so we can do
852 the copies ahead of time.'''
861 the copies ahead of time.'''
853
862
854 pos = 0
863 pos = 0
855 try:
864 try:
856 pos = fp.tell()
865 pos = fp.tell()
857 except IOError:
866 except IOError:
858 fp = cStringIO.StringIO(fp.read())
867 fp = cStringIO.StringIO(fp.read())
859
868
860 (dopatch, gitpatches) = readgitpatch(fp, firstline)
869 (dopatch, gitpatches) = readgitpatch(fp, firstline)
861 for gp in gitpatches:
862 if gp.op in ('COPY', 'RENAME'):
863 copyfile(gp.oldpath, gp.path, basedir=cwd)
864
865 fp.seek(pos)
870 fp.seek(pos)
866
871
867 return fp, dopatch, gitpatches
872 return fp, dopatch, gitpatches
868
873
874 changed = {}
869 current_hunk = None
875 current_hunk = None
870 current_file = None
871 afile = ""
876 afile = ""
872 bfile = ""
877 bfile = ""
873 state = None
878 state = None
874 hunknum = 0
879 hunknum = 0
875 rejects = 0
880 emitfile = False
876
881
877 git = False
882 git = False
878 gitre = re.compile('diff --git (a/.*) (b/.*)')
883 gitre = re.compile('diff --git (a/.*) (b/.*)')
879
884
880 # our states
885 # our states
881 BFILE = 1
886 BFILE = 1
882 err = 0
883 context = None
887 context = None
884 lr = linereader(fp)
888 lr = linereader(fp)
885 dopatch = True
889 dopatch = True
886 gitworkdone = False
890 gitworkdone = False
887
891
888 def getpatchfile(afile, bfile, hunk):
889 try:
890 if sourcefile:
891 targetfile = patchfile(ui, sourcefile)
892 else:
893 targetfile = selectfile(afile, bfile, hunk,
894 strip, reverse)
895 targetfile = patchfile(ui, targetfile)
896 return targetfile
897 except PatchError, err:
898 ui.warn(str(err) + '\n')
899 return None
900
901 while True:
892 while True:
902 newfile = False
893 newfile = False
903 x = lr.readline()
894 x = lr.readline()
904 if not x:
895 if not x:
905 break
896 break
906 if current_hunk:
897 if current_hunk:
907 if x.startswith('\ '):
898 if x.startswith('\ '):
908 current_hunk.fix_newline()
899 current_hunk.fix_newline()
909 ret = current_file.apply(current_hunk, reverse)
900 yield 'hunk', current_hunk
910 if ret >= 0:
911 changed.setdefault(current_file.fname, (None, None))
912 if ret > 0:
913 err = 1
914 current_hunk = None
901 current_hunk = None
915 gitworkdone = False
902 gitworkdone = False
916 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
903 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
917 ((context or context == None) and x.startswith('***************')))):
904 ((context or context == None) and x.startswith('***************')))):
918 try:
905 try:
919 if context == None and x.startswith('***************'):
906 if context == None and x.startswith('***************'):
920 context = True
907 context = True
921 current_hunk = hunk(x, hunknum + 1, lr, context)
908 current_hunk = hunk(x, hunknum + 1, lr, context)
922 except PatchError, err:
909 except PatchError, err:
923 ui.debug(err)
910 ui.debug(err)
924 current_hunk = None
911 current_hunk = None
925 continue
912 continue
926 hunknum += 1
913 hunknum += 1
927 if not current_file:
914 if emitfile:
928 current_file = getpatchfile(afile, bfile, current_hunk)
915 emitfile = False
929 if not current_file:
916 yield 'file', (afile, bfile, current_hunk)
930 current_file, current_hunk = None, None
931 rejects += 1
932 continue
933 elif state == BFILE and x.startswith('GIT binary patch'):
917 elif state == BFILE and x.startswith('GIT binary patch'):
934 current_hunk = binhunk(changed[bfile[2:]][1])
918 current_hunk = binhunk(changed[bfile[2:]][1])
935 hunknum += 1
919 hunknum += 1
936 if not current_file:
920 if emitfile:
937 current_file = getpatchfile(afile, bfile, current_hunk)
921 emitfile = False
938 if not current_file:
922 yield 'file', (afile, bfile, current_hunk)
939 current_file, current_hunk = None, None
940 rejects += 1
941 continue
942 current_hunk.extract(fp)
923 current_hunk.extract(fp)
943 elif x.startswith('diff --git'):
924 elif x.startswith('diff --git'):
944 # check for git diff, scanning the whole patch file if needed
925 # check for git diff, scanning the whole patch file if needed
945 m = gitre.match(x)
926 m = gitre.match(x)
946 if m:
927 if m:
947 afile, bfile = m.group(1, 2)
928 afile, bfile = m.group(1, 2)
948 if not git:
929 if not git:
949 git = True
930 git = True
950 fp, dopatch, gitpatches = scangitpatch(fp, x)
931 fp, dopatch, gitpatches = scangitpatch(fp, x)
932 yield 'git', gitpatches
951 for gp in gitpatches:
933 for gp in gitpatches:
952 changed[gp.path] = (gp.op, gp)
934 changed[gp.path] = (gp.op, gp)
953 # else error?
935 # else error?
954 # copy/rename + modify should modify target, not source
936 # copy/rename + modify should modify target, not source
955 if changed.get(bfile[2:], (None, None))[0] in ('COPY',
937 if changed.get(bfile[2:], (None, None))[0] in ('COPY',
956 'RENAME'):
938 'RENAME'):
957 afile = bfile
939 afile = bfile
958 gitworkdone = True
940 gitworkdone = True
959 newfile = True
941 newfile = True
960 elif x.startswith('---'):
942 elif x.startswith('---'):
961 # check for a unified diff
943 # check for a unified diff
962 l2 = lr.readline()
944 l2 = lr.readline()
963 if not l2.startswith('+++'):
945 if not l2.startswith('+++'):
964 lr.push(l2)
946 lr.push(l2)
965 continue
947 continue
966 newfile = True
948 newfile = True
967 context = False
949 context = False
968 afile = parsefilename(x)
950 afile = parsefilename(x)
969 bfile = parsefilename(l2)
951 bfile = parsefilename(l2)
970 elif x.startswith('***'):
952 elif x.startswith('***'):
971 # check for a context diff
953 # check for a context diff
972 l2 = lr.readline()
954 l2 = lr.readline()
973 if not l2.startswith('---'):
955 if not l2.startswith('---'):
974 lr.push(l2)
956 lr.push(l2)
975 continue
957 continue
976 l3 = lr.readline()
958 l3 = lr.readline()
977 lr.push(l3)
959 lr.push(l3)
978 if not l3.startswith("***************"):
960 if not l3.startswith("***************"):
979 lr.push(l2)
961 lr.push(l2)
980 continue
962 continue
981 newfile = True
963 newfile = True
982 context = True
964 context = True
983 afile = parsefilename(x)
965 afile = parsefilename(x)
984 bfile = parsefilename(l2)
966 bfile = parsefilename(l2)
985
967
986 if newfile:
968 if newfile:
987 if current_file:
969 emitfile = True
988 current_file.close()
989 if rejmerge:
990 rejmerge(current_file)
991 rejects += len(current_file.rej)
992 state = BFILE
970 state = BFILE
993 current_file = None
994 hunknum = 0
971 hunknum = 0
995 if current_hunk:
972 if current_hunk:
996 if current_hunk.complete():
973 if current_hunk.complete():
974 yield 'hunk', current_hunk
975 else:
976 raise PatchError(_("malformed patch %s %s") % (afile,
977 current_hunk.desc))
978
979 if hunknum == 0 and dopatch and not gitworkdone:
980 raise NoHunks
981
982 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
983 rejmerge=None, updatedir=None):
984 """reads a patch from fp and tries to apply it. The dict 'changed' is
985 filled in with all of the filenames changed by the patch. Returns 0
986 for a clean patch, -1 if any rejects were found and 1 if there was
987 any fuzz."""
988
989 rejects = 0
990 err = 0
991 current_file = None
992 gitpatches = None
993
994 def closefile():
995 if not current_file:
996 return 0
997 current_file.close()
998 if rejmerge:
999 rejmerge(current_file)
1000 return len(current_file.rej)
1001
1002 for state, values in iterhunks(ui, fp, sourcefile):
1003 if state == 'hunk':
1004 if not current_file:
1005 continue
1006 current_hunk = values
997 ret = current_file.apply(current_hunk, reverse)
1007 ret = current_file.apply(current_hunk, reverse)
998 if ret >= 0:
1008 if ret >= 0:
999 changed.setdefault(current_file.fname, (None, None))
1009 changed.setdefault(current_file.fname, (None, None))
1000 if ret > 0:
1010 if ret > 0:
1001 err = 1
1011 err = 1
1012 elif state == 'file':
1013 rejects += closefile()
1014 afile, bfile, first_hunk = values
1015 try:
1016 if sourcefile:
1017 current_file = patchfile(ui, sourcefile)
1018 else:
1019 current_file, missing = selectfile(afile, bfile, first_hunk,
1020 strip, reverse)
1021 current_file = patchfile(ui, current_file, missing)
1022 except PatchError, err:
1023 ui.warn(str(err) + '\n')
1024 current_file, current_hunk = None, None
1025 rejects += 1
1026 continue
1027 elif state == 'git':
1028 gitpatches = values
1029 for gp in gitpatches:
1030 if gp.op in ('COPY', 'RENAME'):
1031 copyfile(gp.oldpath, gp.path)
1032 changed[gp.path] = (gp.op, gp)
1002 else:
1033 else:
1003 fname = current_file and current_file.fname or None
1034 raise util.Abort(_('unsupported parser state: %s') % state)
1004 raise PatchError(_("malformed patch %s %s") % (fname,
1035
1005 current_hunk.desc))
1036 rejects += closefile()
1006 if current_file:
1037
1007 current_file.close()
1038 if updatedir and gitpatches:
1008 if rejmerge:
1009 rejmerge(current_file)
1010 rejects += len(current_file.rej)
1011 if updatedir and git:
1012 updatedir(gitpatches)
1039 updatedir(gitpatches)
1013 if rejects:
1040 if rejects:
1014 return -1
1041 return -1
1015 if hunknum == 0 and dopatch and not gitworkdone:
1016 raise NoHunks
1017 return err
1042 return err
1018
1043
1019 def diffopts(ui, opts={}, untrusted=False):
1044 def diffopts(ui, opts={}, untrusted=False):
1020 def get(key, name=None):
1045 def get(key, name=None):
1021 return (opts.get(key) or
1046 return (opts.get(key) or
1022 ui.configbool('diff', name or key, None, untrusted=untrusted))
1047 ui.configbool('diff', name or key, None, untrusted=untrusted))
1023 return mdiff.diffopts(
1048 return mdiff.diffopts(
1024 text=opts.get('text'),
1049 text=opts.get('text'),
1025 git=get('git'),
1050 git=get('git'),
1026 nodates=get('nodates'),
1051 nodates=get('nodates'),
1027 showfunc=get('show_function', 'showfunc'),
1052 showfunc=get('show_function', 'showfunc'),
1028 ignorews=get('ignore_all_space', 'ignorews'),
1053 ignorews=get('ignore_all_space', 'ignorews'),
1029 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1054 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1030 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
1055 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
1031
1056
1032 def updatedir(ui, repo, patches):
1057 def updatedir(ui, repo, patches):
1033 '''Update dirstate after patch application according to metadata'''
1058 '''Update dirstate after patch application according to metadata'''
1034 if not patches:
1059 if not patches:
1035 return
1060 return
1036 copies = []
1061 copies = []
1037 removes = {}
1062 removes = {}
1038 cfiles = patches.keys()
1063 cfiles = patches.keys()
1039 cwd = repo.getcwd()
1064 cwd = repo.getcwd()
1040 if cwd:
1065 if cwd:
1041 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1066 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1042 for f in patches:
1067 for f in patches:
1043 ctype, gp = patches[f]
1068 ctype, gp = patches[f]
1044 if ctype == 'RENAME':
1069 if ctype == 'RENAME':
1045 copies.append((gp.oldpath, gp.path))
1070 copies.append((gp.oldpath, gp.path))
1046 removes[gp.oldpath] = 1
1071 removes[gp.oldpath] = 1
1047 elif ctype == 'COPY':
1072 elif ctype == 'COPY':
1048 copies.append((gp.oldpath, gp.path))
1073 copies.append((gp.oldpath, gp.path))
1049 elif ctype == 'DELETE':
1074 elif ctype == 'DELETE':
1050 removes[gp.path] = 1
1075 removes[gp.path] = 1
1051 for src, dst in copies:
1076 for src, dst in copies:
1052 repo.copy(src, dst)
1077 repo.copy(src, dst)
1053 removes = removes.keys()
1078 removes = removes.keys()
1054 if removes:
1079 if removes:
1055 removes.sort()
1080 removes.sort()
1056 repo.remove(removes, True)
1081 repo.remove(removes, True)
1057 for f in patches:
1082 for f in patches:
1058 ctype, gp = patches[f]
1083 ctype, gp = patches[f]
1059 if gp and gp.mode:
1084 if gp and gp.mode:
1060 x = gp.mode & 0100 != 0
1085 x = gp.mode & 0100 != 0
1061 l = gp.mode & 020000 != 0
1086 l = gp.mode & 020000 != 0
1062 dst = os.path.join(repo.root, gp.path)
1087 dst = os.path.join(repo.root, gp.path)
1063 # patch won't create empty files
1088 # patch won't create empty files
1064 if ctype == 'ADD' and not os.path.exists(dst):
1089 if ctype == 'ADD' and not os.path.exists(dst):
1065 repo.wwrite(gp.path, '', x and 'x' or '')
1090 repo.wwrite(gp.path, '', x and 'x' or '')
1066 else:
1091 else:
1067 util.set_link(dst, l)
1092 util.set_link(dst, l)
1068 if not l:
1093 if not l:
1069 util.set_exec(dst, x)
1094 util.set_exec(dst, x)
1070 cmdutil.addremove(repo, cfiles)
1095 cmdutil.addremove(repo, cfiles)
1071 files = patches.keys()
1096 files = patches.keys()
1072 files.extend([r for r in removes if r not in files])
1097 files.extend([r for r in removes if r not in files])
1073 files.sort()
1098 files.sort()
1074
1099
1075 return files
1100 return files
1076
1101
1077 def b85diff(to, tn):
1102 def b85diff(to, tn):
1078 '''print base85-encoded binary diff'''
1103 '''print base85-encoded binary diff'''
1079 def gitindex(text):
1104 def gitindex(text):
1080 if not text:
1105 if not text:
1081 return '0' * 40
1106 return '0' * 40
1082 l = len(text)
1107 l = len(text)
1083 s = sha.new('blob %d\0' % l)
1108 s = sha.new('blob %d\0' % l)
1084 s.update(text)
1109 s.update(text)
1085 return s.hexdigest()
1110 return s.hexdigest()
1086
1111
1087 def fmtline(line):
1112 def fmtline(line):
1088 l = len(line)
1113 l = len(line)
1089 if l <= 26:
1114 if l <= 26:
1090 l = chr(ord('A') + l - 1)
1115 l = chr(ord('A') + l - 1)
1091 else:
1116 else:
1092 l = chr(l - 26 + ord('a') - 1)
1117 l = chr(l - 26 + ord('a') - 1)
1093 return '%c%s\n' % (l, base85.b85encode(line, True))
1118 return '%c%s\n' % (l, base85.b85encode(line, True))
1094
1119
1095 def chunk(text, csize=52):
1120 def chunk(text, csize=52):
1096 l = len(text)
1121 l = len(text)
1097 i = 0
1122 i = 0
1098 while i < l:
1123 while i < l:
1099 yield text[i:i+csize]
1124 yield text[i:i+csize]
1100 i += csize
1125 i += csize
1101
1126
1102 tohash = gitindex(to)
1127 tohash = gitindex(to)
1103 tnhash = gitindex(tn)
1128 tnhash = gitindex(tn)
1104 if tohash == tnhash:
1129 if tohash == tnhash:
1105 return ""
1130 return ""
1106
1131
1107 # TODO: deltas
1132 # TODO: deltas
1108 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1133 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1109 (tohash, tnhash, len(tn))]
1134 (tohash, tnhash, len(tn))]
1110 for l in chunk(zlib.compress(tn)):
1135 for l in chunk(zlib.compress(tn)):
1111 ret.append(fmtline(l))
1136 ret.append(fmtline(l))
1112 ret.append('\n')
1137 ret.append('\n')
1113 return ''.join(ret)
1138 return ''.join(ret)
1114
1139
1115 def diff(repo, node1=None, node2=None, files=None, match=util.always,
1140 def diff(repo, node1=None, node2=None, files=None, match=util.always,
1116 fp=None, changes=None, opts=None):
1141 fp=None, changes=None, opts=None):
1117 '''print diff of changes to files between two nodes, or node and
1142 '''print diff of changes to files between two nodes, or node and
1118 working directory.
1143 working directory.
1119
1144
1120 if node1 is None, use first dirstate parent instead.
1145 if node1 is None, use first dirstate parent instead.
1121 if node2 is None, compare node1 with working directory.'''
1146 if node2 is None, compare node1 with working directory.'''
1122
1147
1123 if opts is None:
1148 if opts is None:
1124 opts = mdiff.defaultopts
1149 opts = mdiff.defaultopts
1125 if fp is None:
1150 if fp is None:
1126 fp = repo.ui
1151 fp = repo.ui
1127
1152
1128 if not node1:
1153 if not node1:
1129 node1 = repo.dirstate.parents()[0]
1154 node1 = repo.dirstate.parents()[0]
1130
1155
1131 ccache = {}
1156 ccache = {}
1132 def getctx(r):
1157 def getctx(r):
1133 if r not in ccache:
1158 if r not in ccache:
1134 ccache[r] = context.changectx(repo, r)
1159 ccache[r] = context.changectx(repo, r)
1135 return ccache[r]
1160 return ccache[r]
1136
1161
1137 flcache = {}
1162 flcache = {}
1138 def getfilectx(f, ctx):
1163 def getfilectx(f, ctx):
1139 flctx = ctx.filectx(f, filelog=flcache.get(f))
1164 flctx = ctx.filectx(f, filelog=flcache.get(f))
1140 if f not in flcache:
1165 if f not in flcache:
1141 flcache[f] = flctx._filelog
1166 flcache[f] = flctx._filelog
1142 return flctx
1167 return flctx
1143
1168
1144 # reading the data for node1 early allows it to play nicely
1169 # reading the data for node1 early allows it to play nicely
1145 # with repo.status and the revlog cache.
1170 # with repo.status and the revlog cache.
1146 ctx1 = context.changectx(repo, node1)
1171 ctx1 = context.changectx(repo, node1)
1147 # force manifest reading
1172 # force manifest reading
1148 man1 = ctx1.manifest()
1173 man1 = ctx1.manifest()
1149 date1 = util.datestr(ctx1.date())
1174 date1 = util.datestr(ctx1.date())
1150
1175
1151 if not changes:
1176 if not changes:
1152 changes = repo.status(node1, node2, files, match=match)[:5]
1177 changes = repo.status(node1, node2, files, match=match)[:5]
1153 modified, added, removed, deleted, unknown = changes
1178 modified, added, removed, deleted, unknown = changes
1154
1179
1155 if not modified and not added and not removed:
1180 if not modified and not added and not removed:
1156 return
1181 return
1157
1182
1158 if node2:
1183 if node2:
1159 ctx2 = context.changectx(repo, node2)
1184 ctx2 = context.changectx(repo, node2)
1160 execf2 = ctx2.manifest().execf
1185 execf2 = ctx2.manifest().execf
1161 linkf2 = ctx2.manifest().linkf
1186 linkf2 = ctx2.manifest().linkf
1162 else:
1187 else:
1163 ctx2 = context.workingctx(repo)
1188 ctx2 = context.workingctx(repo)
1164 execf2 = util.execfunc(repo.root, None)
1189 execf2 = util.execfunc(repo.root, None)
1165 linkf2 = util.linkfunc(repo.root, None)
1190 linkf2 = util.linkfunc(repo.root, None)
1166 if execf2 is None:
1191 if execf2 is None:
1167 mc = ctx2.parents()[0].manifest().copy()
1192 mc = ctx2.parents()[0].manifest().copy()
1168 execf2 = mc.execf
1193 execf2 = mc.execf
1169 linkf2 = mc.linkf
1194 linkf2 = mc.linkf
1170
1195
1171 # returns False if there was no rename between ctx1 and ctx2
1196 # returns False if there was no rename between ctx1 and ctx2
1172 # returns None if the file was created between ctx1 and ctx2
1197 # returns None if the file was created between ctx1 and ctx2
1173 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
1198 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
1174 # This will only really work if c1 is the Nth 1st parent of c2.
1199 # This will only really work if c1 is the Nth 1st parent of c2.
1175 def renamed(c1, c2, man, f):
1200 def renamed(c1, c2, man, f):
1176 startrev = c1.rev()
1201 startrev = c1.rev()
1177 c = c2
1202 c = c2
1178 crev = c.rev()
1203 crev = c.rev()
1179 if crev is None:
1204 if crev is None:
1180 crev = repo.changelog.count()
1205 crev = repo.changelog.count()
1181 orig = f
1206 orig = f
1182 files = (f,)
1207 files = (f,)
1183 while crev > startrev:
1208 while crev > startrev:
1184 if f in files:
1209 if f in files:
1185 try:
1210 try:
1186 src = getfilectx(f, c).renamed()
1211 src = getfilectx(f, c).renamed()
1187 except revlog.LookupError:
1212 except revlog.LookupError:
1188 return None
1213 return None
1189 if src:
1214 if src:
1190 f = src[0]
1215 f = src[0]
1191 crev = c.parents()[0].rev()
1216 crev = c.parents()[0].rev()
1192 # try to reuse
1217 # try to reuse
1193 c = getctx(crev)
1218 c = getctx(crev)
1194 files = c.files()
1219 files = c.files()
1195 if f not in man:
1220 if f not in man:
1196 return None
1221 return None
1197 if f == orig:
1222 if f == orig:
1198 return False
1223 return False
1199 return f
1224 return f
1200
1225
1201 if repo.ui.quiet:
1226 if repo.ui.quiet:
1202 r = None
1227 r = None
1203 else:
1228 else:
1204 hexfunc = repo.ui.debugflag and hex or short
1229 hexfunc = repo.ui.debugflag and hex or short
1205 r = [hexfunc(node) for node in [node1, node2] if node]
1230 r = [hexfunc(node) for node in [node1, node2] if node]
1206
1231
1207 if opts.git:
1232 if opts.git:
1208 copied = {}
1233 copied = {}
1209 c1, c2 = ctx1, ctx2
1234 c1, c2 = ctx1, ctx2
1210 files = added
1235 files = added
1211 man = man1
1236 man = man1
1212 if node2 and ctx1.rev() >= ctx2.rev():
1237 if node2 and ctx1.rev() >= ctx2.rev():
1213 # renamed() starts at c2 and walks back in history until c1.
1238 # renamed() starts at c2 and walks back in history until c1.
1214 # Since ctx1.rev() >= ctx2.rev(), invert ctx2 and ctx1 to
1239 # Since ctx1.rev() >= ctx2.rev(), invert ctx2 and ctx1 to
1215 # detect (inverted) copies.
1240 # detect (inverted) copies.
1216 c1, c2 = ctx2, ctx1
1241 c1, c2 = ctx2, ctx1
1217 files = removed
1242 files = removed
1218 man = ctx2.manifest()
1243 man = ctx2.manifest()
1219 for f in files:
1244 for f in files:
1220 src = renamed(c1, c2, man, f)
1245 src = renamed(c1, c2, man, f)
1221 if src:
1246 if src:
1222 copied[f] = src
1247 copied[f] = src
1223 if ctx1 == c2:
1248 if ctx1 == c2:
1224 # invert the copied dict
1249 # invert the copied dict
1225 copied = dict([(v, k) for (k, v) in copied.iteritems()])
1250 copied = dict([(v, k) for (k, v) in copied.iteritems()])
1226 # If we've renamed file foo to bar (copied['bar'] = 'foo'),
1251 # If we've renamed file foo to bar (copied['bar'] = 'foo'),
1227 # avoid showing a diff for foo if we're going to show
1252 # avoid showing a diff for foo if we're going to show
1228 # the rename to bar.
1253 # the rename to bar.
1229 srcs = [x[1] for x in copied.iteritems() if x[0] in added]
1254 srcs = [x[1] for x in copied.iteritems() if x[0] in added]
1230
1255
1231 all = modified + added + removed
1256 all = modified + added + removed
1232 all.sort()
1257 all.sort()
1233 gone = {}
1258 gone = {}
1234
1259
1235 for f in all:
1260 for f in all:
1236 to = None
1261 to = None
1237 tn = None
1262 tn = None
1238 dodiff = True
1263 dodiff = True
1239 header = []
1264 header = []
1240 if f in man1:
1265 if f in man1:
1241 to = getfilectx(f, ctx1).data()
1266 to = getfilectx(f, ctx1).data()
1242 if f not in removed:
1267 if f not in removed:
1243 tn = getfilectx(f, ctx2).data()
1268 tn = getfilectx(f, ctx2).data()
1244 a, b = f, f
1269 a, b = f, f
1245 if opts.git:
1270 if opts.git:
1246 def gitmode(x, l):
1271 def gitmode(x, l):
1247 return l and '120000' or (x and '100755' or '100644')
1272 return l and '120000' or (x and '100755' or '100644')
1248 def addmodehdr(header, omode, nmode):
1273 def addmodehdr(header, omode, nmode):
1249 if omode != nmode:
1274 if omode != nmode:
1250 header.append('old mode %s\n' % omode)
1275 header.append('old mode %s\n' % omode)
1251 header.append('new mode %s\n' % nmode)
1276 header.append('new mode %s\n' % nmode)
1252
1277
1253 if f in added:
1278 if f in added:
1254 mode = gitmode(execf2(f), linkf2(f))
1279 mode = gitmode(execf2(f), linkf2(f))
1255 if f in copied:
1280 if f in copied:
1256 a = copied[f]
1281 a = copied[f]
1257 omode = gitmode(man1.execf(a), man1.linkf(a))
1282 omode = gitmode(man1.execf(a), man1.linkf(a))
1258 addmodehdr(header, omode, mode)
1283 addmodehdr(header, omode, mode)
1259 if a in removed and a not in gone:
1284 if a in removed and a not in gone:
1260 op = 'rename'
1285 op = 'rename'
1261 gone[a] = 1
1286 gone[a] = 1
1262 else:
1287 else:
1263 op = 'copy'
1288 op = 'copy'
1264 header.append('%s from %s\n' % (op, a))
1289 header.append('%s from %s\n' % (op, a))
1265 header.append('%s to %s\n' % (op, f))
1290 header.append('%s to %s\n' % (op, f))
1266 to = getfilectx(a, ctx1).data()
1291 to = getfilectx(a, ctx1).data()
1267 else:
1292 else:
1268 header.append('new file mode %s\n' % mode)
1293 header.append('new file mode %s\n' % mode)
1269 if util.binary(tn):
1294 if util.binary(tn):
1270 dodiff = 'binary'
1295 dodiff = 'binary'
1271 elif f in removed:
1296 elif f in removed:
1272 if f in srcs:
1297 if f in srcs:
1273 dodiff = False
1298 dodiff = False
1274 else:
1299 else:
1275 mode = gitmode(man1.execf(f), man1.linkf(f))
1300 mode = gitmode(man1.execf(f), man1.linkf(f))
1276 header.append('deleted file mode %s\n' % mode)
1301 header.append('deleted file mode %s\n' % mode)
1277 else:
1302 else:
1278 omode = gitmode(man1.execf(f), man1.linkf(f))
1303 omode = gitmode(man1.execf(f), man1.linkf(f))
1279 nmode = gitmode(execf2(f), linkf2(f))
1304 nmode = gitmode(execf2(f), linkf2(f))
1280 addmodehdr(header, omode, nmode)
1305 addmodehdr(header, omode, nmode)
1281 if util.binary(to) or util.binary(tn):
1306 if util.binary(to) or util.binary(tn):
1282 dodiff = 'binary'
1307 dodiff = 'binary'
1283 r = None
1308 r = None
1284 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1309 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1285 if dodiff:
1310 if dodiff:
1286 if dodiff == 'binary':
1311 if dodiff == 'binary':
1287 text = b85diff(to, tn)
1312 text = b85diff(to, tn)
1288 else:
1313 else:
1289 text = mdiff.unidiff(to, date1,
1314 text = mdiff.unidiff(to, date1,
1290 # ctx2 date may be dynamic
1315 # ctx2 date may be dynamic
1291 tn, util.datestr(ctx2.date()),
1316 tn, util.datestr(ctx2.date()),
1292 a, b, r, opts=opts)
1317 a, b, r, opts=opts)
1293 if text or len(header) > 1:
1318 if text or len(header) > 1:
1294 fp.write(''.join(header))
1319 fp.write(''.join(header))
1295 fp.write(text)
1320 fp.write(text)
1296
1321
1297 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1322 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1298 opts=None):
1323 opts=None):
1299 '''export changesets as hg patches.'''
1324 '''export changesets as hg patches.'''
1300
1325
1301 total = len(revs)
1326 total = len(revs)
1302 revwidth = max([len(str(rev)) for rev in revs])
1327 revwidth = max([len(str(rev)) for rev in revs])
1303
1328
1304 def single(rev, seqno, fp):
1329 def single(rev, seqno, fp):
1305 ctx = repo.changectx(rev)
1330 ctx = repo.changectx(rev)
1306 node = ctx.node()
1331 node = ctx.node()
1307 parents = [p.node() for p in ctx.parents() if p]
1332 parents = [p.node() for p in ctx.parents() if p]
1308 branch = ctx.branch()
1333 branch = ctx.branch()
1309 if switch_parent:
1334 if switch_parent:
1310 parents.reverse()
1335 parents.reverse()
1311 prev = (parents and parents[0]) or nullid
1336 prev = (parents and parents[0]) or nullid
1312
1337
1313 if not fp:
1338 if not fp:
1314 fp = cmdutil.make_file(repo, template, node, total=total,
1339 fp = cmdutil.make_file(repo, template, node, total=total,
1315 seqno=seqno, revwidth=revwidth)
1340 seqno=seqno, revwidth=revwidth)
1316 if fp != sys.stdout and hasattr(fp, 'name'):
1341 if fp != sys.stdout and hasattr(fp, 'name'):
1317 repo.ui.note("%s\n" % fp.name)
1342 repo.ui.note("%s\n" % fp.name)
1318
1343
1319 fp.write("# HG changeset patch\n")
1344 fp.write("# HG changeset patch\n")
1320 fp.write("# User %s\n" % ctx.user())
1345 fp.write("# User %s\n" % ctx.user())
1321 fp.write("# Date %d %d\n" % ctx.date())
1346 fp.write("# Date %d %d\n" % ctx.date())
1322 if branch and (branch != 'default'):
1347 if branch and (branch != 'default'):
1323 fp.write("# Branch %s\n" % branch)
1348 fp.write("# Branch %s\n" % branch)
1324 fp.write("# Node ID %s\n" % hex(node))
1349 fp.write("# Node ID %s\n" % hex(node))
1325 fp.write("# Parent %s\n" % hex(prev))
1350 fp.write("# Parent %s\n" % hex(prev))
1326 if len(parents) > 1:
1351 if len(parents) > 1:
1327 fp.write("# Parent %s\n" % hex(parents[1]))
1352 fp.write("# Parent %s\n" % hex(parents[1]))
1328 fp.write(ctx.description().rstrip())
1353 fp.write(ctx.description().rstrip())
1329 fp.write("\n\n")
1354 fp.write("\n\n")
1330
1355
1331 diff(repo, prev, node, fp=fp, opts=opts)
1356 diff(repo, prev, node, fp=fp, opts=opts)
1332 if fp not in (sys.stdout, repo.ui):
1357 if fp not in (sys.stdout, repo.ui):
1333 fp.close()
1358 fp.close()
1334
1359
1335 for seqno, rev in enumerate(revs):
1360 for seqno, rev in enumerate(revs):
1336 single(rev, seqno+1, fp)
1361 single(rev, seqno+1, fp)
1337
1362
1338 def diffstat(patchlines):
1363 def diffstat(patchlines):
1339 if not util.find_exe('diffstat'):
1364 if not util.find_exe('diffstat'):
1340 return
1365 return
1341 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1366 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1342 try:
1367 try:
1343 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1368 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1344 try:
1369 try:
1345 for line in patchlines: print >> p.tochild, line
1370 for line in patchlines: print >> p.tochild, line
1346 p.tochild.close()
1371 p.tochild.close()
1347 if p.wait(): return
1372 if p.wait(): return
1348 fp = os.fdopen(fd, 'r')
1373 fp = os.fdopen(fd, 'r')
1349 stat = []
1374 stat = []
1350 for line in fp: stat.append(line.lstrip())
1375 for line in fp: stat.append(line.lstrip())
1351 last = stat.pop()
1376 last = stat.pop()
1352 stat.insert(0, last)
1377 stat.insert(0, last)
1353 stat = ''.join(stat)
1378 stat = ''.join(stat)
1354 if stat.startswith('0 files'): raise ValueError
1379 if stat.startswith('0 files'): raise ValueError
1355 return stat
1380 return stat
1356 except: raise
1381 except: raise
1357 finally:
1382 finally:
1358 try: os.unlink(name)
1383 try: os.unlink(name)
1359 except: pass
1384 except: pass
@@ -1,1742 +1,1750 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import _
15 from i18n import _
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile, strutil
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile, strutil
17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
18 import re, urlparse
18 import re, urlparse
19
19
20 try:
20 try:
21 set = set
21 set = set
22 frozenset = frozenset
22 frozenset = frozenset
23 except NameError:
23 except NameError:
24 from sets import Set as set, ImmutableSet as frozenset
24 from sets import Set as set, ImmutableSet as frozenset
25
25
26 try:
26 try:
27 _encoding = os.environ.get("HGENCODING")
27 _encoding = os.environ.get("HGENCODING")
28 if sys.platform == 'darwin' and not _encoding:
28 if sys.platform == 'darwin' and not _encoding:
29 # On darwin, getpreferredencoding ignores the locale environment and
29 # On darwin, getpreferredencoding ignores the locale environment and
30 # always returns mac-roman. We override this if the environment is
30 # always returns mac-roman. We override this if the environment is
31 # not C (has been customized by the user).
31 # not C (has been customized by the user).
32 locale.setlocale(locale.LC_CTYPE, '')
32 locale.setlocale(locale.LC_CTYPE, '')
33 _encoding = locale.getlocale()[1]
33 _encoding = locale.getlocale()[1]
34 if not _encoding:
34 if not _encoding:
35 _encoding = locale.getpreferredencoding() or 'ascii'
35 _encoding = locale.getpreferredencoding() or 'ascii'
36 except locale.Error:
36 except locale.Error:
37 _encoding = 'ascii'
37 _encoding = 'ascii'
38 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
38 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
39 _fallbackencoding = 'ISO-8859-1'
39 _fallbackencoding = 'ISO-8859-1'
40
40
41 def tolocal(s):
41 def tolocal(s):
42 """
42 """
43 Convert a string from internal UTF-8 to local encoding
43 Convert a string from internal UTF-8 to local encoding
44
44
45 All internal strings should be UTF-8 but some repos before the
45 All internal strings should be UTF-8 but some repos before the
46 implementation of locale support may contain latin1 or possibly
46 implementation of locale support may contain latin1 or possibly
47 other character sets. We attempt to decode everything strictly
47 other character sets. We attempt to decode everything strictly
48 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
48 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
49 replace unknown characters.
49 replace unknown characters.
50 """
50 """
51 for e in ('UTF-8', _fallbackencoding):
51 for e in ('UTF-8', _fallbackencoding):
52 try:
52 try:
53 u = s.decode(e) # attempt strict decoding
53 u = s.decode(e) # attempt strict decoding
54 return u.encode(_encoding, "replace")
54 return u.encode(_encoding, "replace")
55 except LookupError, k:
55 except LookupError, k:
56 raise Abort(_("%s, please check your locale settings") % k)
56 raise Abort(_("%s, please check your locale settings") % k)
57 except UnicodeDecodeError:
57 except UnicodeDecodeError:
58 pass
58 pass
59 u = s.decode("utf-8", "replace") # last ditch
59 u = s.decode("utf-8", "replace") # last ditch
60 return u.encode(_encoding, "replace")
60 return u.encode(_encoding, "replace")
61
61
62 def fromlocal(s):
62 def fromlocal(s):
63 """
63 """
64 Convert a string from the local character encoding to UTF-8
64 Convert a string from the local character encoding to UTF-8
65
65
66 We attempt to decode strings using the encoding mode set by
66 We attempt to decode strings using the encoding mode set by
67 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
67 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
68 characters will cause an error message. Other modes include
68 characters will cause an error message. Other modes include
69 'replace', which replaces unknown characters with a special
69 'replace', which replaces unknown characters with a special
70 Unicode character, and 'ignore', which drops the character.
70 Unicode character, and 'ignore', which drops the character.
71 """
71 """
72 try:
72 try:
73 return s.decode(_encoding, _encodingmode).encode("utf-8")
73 return s.decode(_encoding, _encodingmode).encode("utf-8")
74 except UnicodeDecodeError, inst:
74 except UnicodeDecodeError, inst:
75 sub = s[max(0, inst.start-10):inst.start+10]
75 sub = s[max(0, inst.start-10):inst.start+10]
76 raise Abort("decoding near '%s': %s!" % (sub, inst))
76 raise Abort("decoding near '%s': %s!" % (sub, inst))
77 except LookupError, k:
77 except LookupError, k:
78 raise Abort(_("%s, please check your locale settings") % k)
78 raise Abort(_("%s, please check your locale settings") % k)
79
79
80 def locallen(s):
80 def locallen(s):
81 """Find the length in characters of a local string"""
81 """Find the length in characters of a local string"""
82 return len(s.decode(_encoding, "replace"))
82 return len(s.decode(_encoding, "replace"))
83
83
84 def localsub(s, a, b=None):
84 def localsub(s, a, b=None):
85 try:
85 try:
86 u = s.decode(_encoding, _encodingmode)
86 u = s.decode(_encoding, _encodingmode)
87 if b is not None:
87 if b is not None:
88 u = u[a:b]
88 u = u[a:b]
89 else:
89 else:
90 u = u[:a]
90 u = u[:a]
91 return u.encode(_encoding, _encodingmode)
91 return u.encode(_encoding, _encodingmode)
92 except UnicodeDecodeError, inst:
92 except UnicodeDecodeError, inst:
93 sub = s[max(0, inst.start-10), inst.start+10]
93 sub = s[max(0, inst.start-10), inst.start+10]
94 raise Abort(_("decoding near '%s': %s!") % (sub, inst))
94 raise Abort(_("decoding near '%s': %s!") % (sub, inst))
95
95
96 # used by parsedate
96 # used by parsedate
97 defaultdateformats = (
97 defaultdateformats = (
98 '%Y-%m-%d %H:%M:%S',
98 '%Y-%m-%d %H:%M:%S',
99 '%Y-%m-%d %I:%M:%S%p',
99 '%Y-%m-%d %I:%M:%S%p',
100 '%Y-%m-%d %H:%M',
100 '%Y-%m-%d %H:%M',
101 '%Y-%m-%d %I:%M%p',
101 '%Y-%m-%d %I:%M%p',
102 '%Y-%m-%d',
102 '%Y-%m-%d',
103 '%m-%d',
103 '%m-%d',
104 '%m/%d',
104 '%m/%d',
105 '%m/%d/%y',
105 '%m/%d/%y',
106 '%m/%d/%Y',
106 '%m/%d/%Y',
107 '%a %b %d %H:%M:%S %Y',
107 '%a %b %d %H:%M:%S %Y',
108 '%a %b %d %I:%M:%S%p %Y',
108 '%a %b %d %I:%M:%S%p %Y',
109 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
109 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
110 '%b %d %H:%M:%S %Y',
110 '%b %d %H:%M:%S %Y',
111 '%b %d %I:%M:%S%p %Y',
111 '%b %d %I:%M:%S%p %Y',
112 '%b %d %H:%M:%S',
112 '%b %d %H:%M:%S',
113 '%b %d %I:%M:%S%p',
113 '%b %d %I:%M:%S%p',
114 '%b %d %H:%M',
114 '%b %d %H:%M',
115 '%b %d %I:%M%p',
115 '%b %d %I:%M%p',
116 '%b %d %Y',
116 '%b %d %Y',
117 '%b %d',
117 '%b %d',
118 '%H:%M:%S',
118 '%H:%M:%S',
119 '%I:%M:%SP',
119 '%I:%M:%SP',
120 '%H:%M',
120 '%H:%M',
121 '%I:%M%p',
121 '%I:%M%p',
122 )
122 )
123
123
124 extendeddateformats = defaultdateformats + (
124 extendeddateformats = defaultdateformats + (
125 "%Y",
125 "%Y",
126 "%Y-%m",
126 "%Y-%m",
127 "%b",
127 "%b",
128 "%b %Y",
128 "%b %Y",
129 )
129 )
130
130
131 class SignalInterrupt(Exception):
131 class SignalInterrupt(Exception):
132 """Exception raised on SIGTERM and SIGHUP."""
132 """Exception raised on SIGTERM and SIGHUP."""
133
133
134 # differences from SafeConfigParser:
134 # differences from SafeConfigParser:
135 # - case-sensitive keys
135 # - case-sensitive keys
136 # - allows values that are not strings (this means that you may not
136 # - allows values that are not strings (this means that you may not
137 # be able to save the configuration to a file)
137 # be able to save the configuration to a file)
138 class configparser(ConfigParser.SafeConfigParser):
138 class configparser(ConfigParser.SafeConfigParser):
139 def optionxform(self, optionstr):
139 def optionxform(self, optionstr):
140 return optionstr
140 return optionstr
141
141
142 def set(self, section, option, value):
142 def set(self, section, option, value):
143 return ConfigParser.ConfigParser.set(self, section, option, value)
143 return ConfigParser.ConfigParser.set(self, section, option, value)
144
144
145 def _interpolate(self, section, option, rawval, vars):
145 def _interpolate(self, section, option, rawval, vars):
146 if not isinstance(rawval, basestring):
146 if not isinstance(rawval, basestring):
147 return rawval
147 return rawval
148 return ConfigParser.SafeConfigParser._interpolate(self, section,
148 return ConfigParser.SafeConfigParser._interpolate(self, section,
149 option, rawval, vars)
149 option, rawval, vars)
150
150
151 def cachefunc(func):
151 def cachefunc(func):
152 '''cache the result of function calls'''
152 '''cache the result of function calls'''
153 # XXX doesn't handle keywords args
153 # XXX doesn't handle keywords args
154 cache = {}
154 cache = {}
155 if func.func_code.co_argcount == 1:
155 if func.func_code.co_argcount == 1:
156 # we gain a small amount of time because
156 # we gain a small amount of time because
157 # we don't need to pack/unpack the list
157 # we don't need to pack/unpack the list
158 def f(arg):
158 def f(arg):
159 if arg not in cache:
159 if arg not in cache:
160 cache[arg] = func(arg)
160 cache[arg] = func(arg)
161 return cache[arg]
161 return cache[arg]
162 else:
162 else:
163 def f(*args):
163 def f(*args):
164 if args not in cache:
164 if args not in cache:
165 cache[args] = func(*args)
165 cache[args] = func(*args)
166 return cache[args]
166 return cache[args]
167
167
168 return f
168 return f
169
169
170 def pipefilter(s, cmd):
170 def pipefilter(s, cmd):
171 '''filter string S through command CMD, returning its output'''
171 '''filter string S through command CMD, returning its output'''
172 (pin, pout) = os.popen2(cmd, 'b')
172 (pin, pout) = os.popen2(cmd, 'b')
173 def writer():
173 def writer():
174 try:
174 try:
175 pin.write(s)
175 pin.write(s)
176 pin.close()
176 pin.close()
177 except IOError, inst:
177 except IOError, inst:
178 if inst.errno != errno.EPIPE:
178 if inst.errno != errno.EPIPE:
179 raise
179 raise
180
180
181 # we should use select instead on UNIX, but this will work on most
181 # we should use select instead on UNIX, but this will work on most
182 # systems, including Windows
182 # systems, including Windows
183 w = threading.Thread(target=writer)
183 w = threading.Thread(target=writer)
184 w.start()
184 w.start()
185 f = pout.read()
185 f = pout.read()
186 pout.close()
186 pout.close()
187 w.join()
187 w.join()
188 return f
188 return f
189
189
190 def tempfilter(s, cmd):
190 def tempfilter(s, cmd):
191 '''filter string S through a pair of temporary files with CMD.
191 '''filter string S through a pair of temporary files with CMD.
192 CMD is used as a template to create the real command to be run,
192 CMD is used as a template to create the real command to be run,
193 with the strings INFILE and OUTFILE replaced by the real names of
193 with the strings INFILE and OUTFILE replaced by the real names of
194 the temporary files generated.'''
194 the temporary files generated.'''
195 inname, outname = None, None
195 inname, outname = None, None
196 try:
196 try:
197 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
197 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
198 fp = os.fdopen(infd, 'wb')
198 fp = os.fdopen(infd, 'wb')
199 fp.write(s)
199 fp.write(s)
200 fp.close()
200 fp.close()
201 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
201 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
202 os.close(outfd)
202 os.close(outfd)
203 cmd = cmd.replace('INFILE', inname)
203 cmd = cmd.replace('INFILE', inname)
204 cmd = cmd.replace('OUTFILE', outname)
204 cmd = cmd.replace('OUTFILE', outname)
205 code = os.system(cmd)
205 code = os.system(cmd)
206 if sys.platform == 'OpenVMS' and code & 1:
206 if sys.platform == 'OpenVMS' and code & 1:
207 code = 0
207 code = 0
208 if code: raise Abort(_("command '%s' failed: %s") %
208 if code: raise Abort(_("command '%s' failed: %s") %
209 (cmd, explain_exit(code)))
209 (cmd, explain_exit(code)))
210 return open(outname, 'rb').read()
210 return open(outname, 'rb').read()
211 finally:
211 finally:
212 try:
212 try:
213 if inname: os.unlink(inname)
213 if inname: os.unlink(inname)
214 except: pass
214 except: pass
215 try:
215 try:
216 if outname: os.unlink(outname)
216 if outname: os.unlink(outname)
217 except: pass
217 except: pass
218
218
219 filtertable = {
219 filtertable = {
220 'tempfile:': tempfilter,
220 'tempfile:': tempfilter,
221 'pipe:': pipefilter,
221 'pipe:': pipefilter,
222 }
222 }
223
223
224 def filter(s, cmd):
224 def filter(s, cmd):
225 "filter a string through a command that transforms its input to its output"
225 "filter a string through a command that transforms its input to its output"
226 for name, fn in filtertable.iteritems():
226 for name, fn in filtertable.iteritems():
227 if cmd.startswith(name):
227 if cmd.startswith(name):
228 return fn(s, cmd[len(name):].lstrip())
228 return fn(s, cmd[len(name):].lstrip())
229 return pipefilter(s, cmd)
229 return pipefilter(s, cmd)
230
230
231 def binary(s):
231 def binary(s):
232 """return true if a string is binary data using diff's heuristic"""
232 """return true if a string is binary data using diff's heuristic"""
233 if s and '\0' in s[:4096]:
233 if s and '\0' in s[:4096]:
234 return True
234 return True
235 return False
235 return False
236
236
237 def unique(g):
237 def unique(g):
238 """return the uniq elements of iterable g"""
238 """return the uniq elements of iterable g"""
239 seen = {}
239 seen = {}
240 l = []
240 l = []
241 for f in g:
241 for f in g:
242 if f not in seen:
242 if f not in seen:
243 seen[f] = 1
243 seen[f] = 1
244 l.append(f)
244 l.append(f)
245 return l
245 return l
246
246
247 class Abort(Exception):
247 class Abort(Exception):
248 """Raised if a command needs to print an error and exit."""
248 """Raised if a command needs to print an error and exit."""
249
249
250 class UnexpectedOutput(Abort):
250 class UnexpectedOutput(Abort):
251 """Raised to print an error with part of output and exit."""
251 """Raised to print an error with part of output and exit."""
252
252
253 def always(fn): return True
253 def always(fn): return True
254 def never(fn): return False
254 def never(fn): return False
255
255
256 def expand_glob(pats):
256 def expand_glob(pats):
257 '''On Windows, expand the implicit globs in a list of patterns'''
257 '''On Windows, expand the implicit globs in a list of patterns'''
258 if os.name != 'nt':
258 if os.name != 'nt':
259 return list(pats)
259 return list(pats)
260 ret = []
260 ret = []
261 for p in pats:
261 for p in pats:
262 kind, name = patkind(p, None)
262 kind, name = patkind(p, None)
263 if kind is None:
263 if kind is None:
264 globbed = glob.glob(name)
264 globbed = glob.glob(name)
265 if globbed:
265 if globbed:
266 ret.extend(globbed)
266 ret.extend(globbed)
267 continue
267 continue
268 # if we couldn't expand the glob, just keep it around
268 # if we couldn't expand the glob, just keep it around
269 ret.append(p)
269 ret.append(p)
270 return ret
270 return ret
271
271
272 def patkind(name, dflt_pat='glob'):
272 def patkind(name, dflt_pat='glob'):
273 """Split a string into an optional pattern kind prefix and the
273 """Split a string into an optional pattern kind prefix and the
274 actual pattern."""
274 actual pattern."""
275 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
275 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
276 if name.startswith(prefix + ':'): return name.split(':', 1)
276 if name.startswith(prefix + ':'): return name.split(':', 1)
277 return dflt_pat, name
277 return dflt_pat, name
278
278
279 def globre(pat, head='^', tail='$'):
279 def globre(pat, head='^', tail='$'):
280 "convert a glob pattern into a regexp"
280 "convert a glob pattern into a regexp"
281 i, n = 0, len(pat)
281 i, n = 0, len(pat)
282 res = ''
282 res = ''
283 group = False
283 group = False
284 def peek(): return i < n and pat[i]
284 def peek(): return i < n and pat[i]
285 while i < n:
285 while i < n:
286 c = pat[i]
286 c = pat[i]
287 i = i+1
287 i = i+1
288 if c == '*':
288 if c == '*':
289 if peek() == '*':
289 if peek() == '*':
290 i += 1
290 i += 1
291 res += '.*'
291 res += '.*'
292 else:
292 else:
293 res += '[^/]*'
293 res += '[^/]*'
294 elif c == '?':
294 elif c == '?':
295 res += '.'
295 res += '.'
296 elif c == '[':
296 elif c == '[':
297 j = i
297 j = i
298 if j < n and pat[j] in '!]':
298 if j < n and pat[j] in '!]':
299 j += 1
299 j += 1
300 while j < n and pat[j] != ']':
300 while j < n and pat[j] != ']':
301 j += 1
301 j += 1
302 if j >= n:
302 if j >= n:
303 res += '\\['
303 res += '\\['
304 else:
304 else:
305 stuff = pat[i:j].replace('\\','\\\\')
305 stuff = pat[i:j].replace('\\','\\\\')
306 i = j + 1
306 i = j + 1
307 if stuff[0] == '!':
307 if stuff[0] == '!':
308 stuff = '^' + stuff[1:]
308 stuff = '^' + stuff[1:]
309 elif stuff[0] == '^':
309 elif stuff[0] == '^':
310 stuff = '\\' + stuff
310 stuff = '\\' + stuff
311 res = '%s[%s]' % (res, stuff)
311 res = '%s[%s]' % (res, stuff)
312 elif c == '{':
312 elif c == '{':
313 group = True
313 group = True
314 res += '(?:'
314 res += '(?:'
315 elif c == '}' and group:
315 elif c == '}' and group:
316 res += ')'
316 res += ')'
317 group = False
317 group = False
318 elif c == ',' and group:
318 elif c == ',' and group:
319 res += '|'
319 res += '|'
320 elif c == '\\':
320 elif c == '\\':
321 p = peek()
321 p = peek()
322 if p:
322 if p:
323 i += 1
323 i += 1
324 res += re.escape(p)
324 res += re.escape(p)
325 else:
325 else:
326 res += re.escape(c)
326 res += re.escape(c)
327 else:
327 else:
328 res += re.escape(c)
328 res += re.escape(c)
329 return head + res + tail
329 return head + res + tail
330
330
331 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
331 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
332
332
333 def pathto(root, n1, n2):
333 def pathto(root, n1, n2):
334 '''return the relative path from one place to another.
334 '''return the relative path from one place to another.
335 root should use os.sep to separate directories
335 root should use os.sep to separate directories
336 n1 should use os.sep to separate directories
336 n1 should use os.sep to separate directories
337 n2 should use "/" to separate directories
337 n2 should use "/" to separate directories
338 returns an os.sep-separated path.
338 returns an os.sep-separated path.
339
339
340 If n1 is a relative path, it's assumed it's
340 If n1 is a relative path, it's assumed it's
341 relative to root.
341 relative to root.
342 n2 should always be relative to root.
342 n2 should always be relative to root.
343 '''
343 '''
344 if not n1: return localpath(n2)
344 if not n1: return localpath(n2)
345 if os.path.isabs(n1):
345 if os.path.isabs(n1):
346 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
346 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
347 return os.path.join(root, localpath(n2))
347 return os.path.join(root, localpath(n2))
348 n2 = '/'.join((pconvert(root), n2))
348 n2 = '/'.join((pconvert(root), n2))
349 a, b = n1.split(os.sep), n2.split('/')
349 a, b = n1.split(os.sep), n2.split('/')
350 a.reverse()
350 a.reverse()
351 b.reverse()
351 b.reverse()
352 while a and b and a[-1] == b[-1]:
352 while a and b and a[-1] == b[-1]:
353 a.pop()
353 a.pop()
354 b.pop()
354 b.pop()
355 b.reverse()
355 b.reverse()
356 return os.sep.join((['..'] * len(a)) + b)
356 return os.sep.join((['..'] * len(a)) + b)
357
357
358 def canonpath(root, cwd, myname):
358 def canonpath(root, cwd, myname):
359 """return the canonical path of myname, given cwd and root"""
359 """return the canonical path of myname, given cwd and root"""
360 if root == os.sep:
360 if root == os.sep:
361 rootsep = os.sep
361 rootsep = os.sep
362 elif root.endswith(os.sep):
362 elif root.endswith(os.sep):
363 rootsep = root
363 rootsep = root
364 else:
364 else:
365 rootsep = root + os.sep
365 rootsep = root + os.sep
366 name = myname
366 name = myname
367 if not os.path.isabs(name):
367 if not os.path.isabs(name):
368 name = os.path.join(root, cwd, name)
368 name = os.path.join(root, cwd, name)
369 name = os.path.normpath(name)
369 name = os.path.normpath(name)
370 audit_path = path_auditor(root)
370 audit_path = path_auditor(root)
371 if name != rootsep and name.startswith(rootsep):
371 if name != rootsep and name.startswith(rootsep):
372 name = name[len(rootsep):]
372 name = name[len(rootsep):]
373 audit_path(name)
373 audit_path(name)
374 return pconvert(name)
374 return pconvert(name)
375 elif name == root:
375 elif name == root:
376 return ''
376 return ''
377 else:
377 else:
378 # Determine whether `name' is in the hierarchy at or beneath `root',
378 # Determine whether `name' is in the hierarchy at or beneath `root',
379 # by iterating name=dirname(name) until that causes no change (can't
379 # by iterating name=dirname(name) until that causes no change (can't
380 # check name == '/', because that doesn't work on windows). For each
380 # check name == '/', because that doesn't work on windows). For each
381 # `name', compare dev/inode numbers. If they match, the list `rel'
381 # `name', compare dev/inode numbers. If they match, the list `rel'
382 # holds the reversed list of components making up the relative file
382 # holds the reversed list of components making up the relative file
383 # name we want.
383 # name we want.
384 root_st = os.stat(root)
384 root_st = os.stat(root)
385 rel = []
385 rel = []
386 while True:
386 while True:
387 try:
387 try:
388 name_st = os.stat(name)
388 name_st = os.stat(name)
389 except OSError:
389 except OSError:
390 break
390 break
391 if samestat(name_st, root_st):
391 if samestat(name_st, root_st):
392 if not rel:
392 if not rel:
393 # name was actually the same as root (maybe a symlink)
393 # name was actually the same as root (maybe a symlink)
394 return ''
394 return ''
395 rel.reverse()
395 rel.reverse()
396 name = os.path.join(*rel)
396 name = os.path.join(*rel)
397 audit_path(name)
397 audit_path(name)
398 return pconvert(name)
398 return pconvert(name)
399 dirname, basename = os.path.split(name)
399 dirname, basename = os.path.split(name)
400 rel.append(basename)
400 rel.append(basename)
401 if dirname == name:
401 if dirname == name:
402 break
402 break
403 name = dirname
403 name = dirname
404
404
405 raise Abort('%s not under root' % myname)
405 raise Abort('%s not under root' % myname)
406
406
407 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
407 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
408 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
408 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
409
409
410 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
410 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
411 globbed=False, default=None):
411 globbed=False, default=None):
412 default = default or 'relpath'
412 default = default or 'relpath'
413 if default == 'relpath' and not globbed:
413 if default == 'relpath' and not globbed:
414 names = expand_glob(names)
414 names = expand_glob(names)
415 return _matcher(canonroot, cwd, names, inc, exc, default, src)
415 return _matcher(canonroot, cwd, names, inc, exc, default, src)
416
416
417 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
417 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
418 """build a function to match a set of file patterns
418 """build a function to match a set of file patterns
419
419
420 arguments:
420 arguments:
421 canonroot - the canonical root of the tree you're matching against
421 canonroot - the canonical root of the tree you're matching against
422 cwd - the current working directory, if relevant
422 cwd - the current working directory, if relevant
423 names - patterns to find
423 names - patterns to find
424 inc - patterns to include
424 inc - patterns to include
425 exc - patterns to exclude
425 exc - patterns to exclude
426 dflt_pat - if a pattern in names has no explicit type, assume this one
426 dflt_pat - if a pattern in names has no explicit type, assume this one
427 src - where these patterns came from (e.g. .hgignore)
427 src - where these patterns came from (e.g. .hgignore)
428
428
429 a pattern is one of:
429 a pattern is one of:
430 'glob:<glob>' - a glob relative to cwd
430 'glob:<glob>' - a glob relative to cwd
431 're:<regexp>' - a regular expression
431 're:<regexp>' - a regular expression
432 'path:<path>' - a path relative to canonroot
432 'path:<path>' - a path relative to canonroot
433 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
433 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
434 'relpath:<path>' - a path relative to cwd
434 'relpath:<path>' - a path relative to cwd
435 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
435 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
436 '<something>' - one of the cases above, selected by the dflt_pat argument
436 '<something>' - one of the cases above, selected by the dflt_pat argument
437
437
438 returns:
438 returns:
439 a 3-tuple containing
439 a 3-tuple containing
440 - list of roots (places where one should start a recursive walk of the fs);
440 - list of roots (places where one should start a recursive walk of the fs);
441 this often matches the explicit non-pattern names passed in, but also
441 this often matches the explicit non-pattern names passed in, but also
442 includes the initial part of glob: patterns that has no glob characters
442 includes the initial part of glob: patterns that has no glob characters
443 - a bool match(filename) function
443 - a bool match(filename) function
444 - a bool indicating if any patterns were passed in
444 - a bool indicating if any patterns were passed in
445 """
445 """
446
446
447 # a common case: no patterns at all
447 # a common case: no patterns at all
448 if not names and not inc and not exc:
448 if not names and not inc and not exc:
449 return [], always, False
449 return [], always, False
450
450
451 def contains_glob(name):
451 def contains_glob(name):
452 for c in name:
452 for c in name:
453 if c in _globchars: return True
453 if c in _globchars: return True
454 return False
454 return False
455
455
456 def regex(kind, name, tail):
456 def regex(kind, name, tail):
457 '''convert a pattern into a regular expression'''
457 '''convert a pattern into a regular expression'''
458 if not name:
458 if not name:
459 return ''
459 return ''
460 if kind == 're':
460 if kind == 're':
461 return name
461 return name
462 elif kind == 'path':
462 elif kind == 'path':
463 return '^' + re.escape(name) + '(?:/|$)'
463 return '^' + re.escape(name) + '(?:/|$)'
464 elif kind == 'relglob':
464 elif kind == 'relglob':
465 return globre(name, '(?:|.*/)', tail)
465 return globre(name, '(?:|.*/)', tail)
466 elif kind == 'relpath':
466 elif kind == 'relpath':
467 return re.escape(name) + '(?:/|$)'
467 return re.escape(name) + '(?:/|$)'
468 elif kind == 'relre':
468 elif kind == 'relre':
469 if name.startswith('^'):
469 if name.startswith('^'):
470 return name
470 return name
471 return '.*' + name
471 return '.*' + name
472 return globre(name, '', tail)
472 return globre(name, '', tail)
473
473
474 def matchfn(pats, tail):
474 def matchfn(pats, tail):
475 """build a matching function from a set of patterns"""
475 """build a matching function from a set of patterns"""
476 if not pats:
476 if not pats:
477 return
477 return
478 try:
478 try:
479 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
479 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
480 return re.compile(pat).match
480 return re.compile(pat).match
481 except OverflowError:
481 except OverflowError:
482 # We're using a Python with a tiny regex engine and we
482 # We're using a Python with a tiny regex engine and we
483 # made it explode, so we'll divide the pattern list in two
483 # made it explode, so we'll divide the pattern list in two
484 # until it works
484 # until it works
485 l = len(pats)
485 l = len(pats)
486 if l < 2:
486 if l < 2:
487 raise
487 raise
488 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
488 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
489 return lambda s: a(s) or b(s)
489 return lambda s: a(s) or b(s)
490 except re.error:
490 except re.error:
491 for k, p in pats:
491 for k, p in pats:
492 try:
492 try:
493 re.compile('(?:%s)' % regex(k, p, tail))
493 re.compile('(?:%s)' % regex(k, p, tail))
494 except re.error:
494 except re.error:
495 if src:
495 if src:
496 raise Abort("%s: invalid pattern (%s): %s" %
496 raise Abort("%s: invalid pattern (%s): %s" %
497 (src, k, p))
497 (src, k, p))
498 else:
498 else:
499 raise Abort("invalid pattern (%s): %s" % (k, p))
499 raise Abort("invalid pattern (%s): %s" % (k, p))
500 raise Abort("invalid pattern")
500 raise Abort("invalid pattern")
501
501
502 def globprefix(pat):
502 def globprefix(pat):
503 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
503 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
504 root = []
504 root = []
505 for p in pat.split('/'):
505 for p in pat.split('/'):
506 if contains_glob(p): break
506 if contains_glob(p): break
507 root.append(p)
507 root.append(p)
508 return '/'.join(root) or '.'
508 return '/'.join(root) or '.'
509
509
510 def normalizepats(names, default):
510 def normalizepats(names, default):
511 pats = []
511 pats = []
512 roots = []
512 roots = []
513 anypats = False
513 anypats = False
514 for kind, name in [patkind(p, default) for p in names]:
514 for kind, name in [patkind(p, default) for p in names]:
515 if kind in ('glob', 'relpath'):
515 if kind in ('glob', 'relpath'):
516 name = canonpath(canonroot, cwd, name)
516 name = canonpath(canonroot, cwd, name)
517 elif kind in ('relglob', 'path'):
517 elif kind in ('relglob', 'path'):
518 name = normpath(name)
518 name = normpath(name)
519
519
520 pats.append((kind, name))
520 pats.append((kind, name))
521
521
522 if kind in ('glob', 're', 'relglob', 'relre'):
522 if kind in ('glob', 're', 'relglob', 'relre'):
523 anypats = True
523 anypats = True
524
524
525 if kind == 'glob':
525 if kind == 'glob':
526 root = globprefix(name)
526 root = globprefix(name)
527 roots.append(root)
527 roots.append(root)
528 elif kind in ('relpath', 'path'):
528 elif kind in ('relpath', 'path'):
529 roots.append(name or '.')
529 roots.append(name or '.')
530 elif kind == 'relglob':
530 elif kind == 'relglob':
531 roots.append('.')
531 roots.append('.')
532 return roots, pats, anypats
532 return roots, pats, anypats
533
533
534 roots, pats, anypats = normalizepats(names, dflt_pat)
534 roots, pats, anypats = normalizepats(names, dflt_pat)
535
535
536 patmatch = matchfn(pats, '$') or always
536 patmatch = matchfn(pats, '$') or always
537 incmatch = always
537 incmatch = always
538 if inc:
538 if inc:
539 dummy, inckinds, dummy = normalizepats(inc, 'glob')
539 dummy, inckinds, dummy = normalizepats(inc, 'glob')
540 incmatch = matchfn(inckinds, '(?:/|$)')
540 incmatch = matchfn(inckinds, '(?:/|$)')
541 excmatch = lambda fn: False
541 excmatch = lambda fn: False
542 if exc:
542 if exc:
543 dummy, exckinds, dummy = normalizepats(exc, 'glob')
543 dummy, exckinds, dummy = normalizepats(exc, 'glob')
544 excmatch = matchfn(exckinds, '(?:/|$)')
544 excmatch = matchfn(exckinds, '(?:/|$)')
545
545
546 if not names and inc and not exc:
546 if not names and inc and not exc:
547 # common case: hgignore patterns
547 # common case: hgignore patterns
548 match = incmatch
548 match = incmatch
549 else:
549 else:
550 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
550 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
551
551
552 return (roots, match, (inc or exc or anypats) and True)
552 return (roots, match, (inc or exc or anypats) and True)
553
553
554 _hgexecutable = None
554 _hgexecutable = None
555
555
556 def hgexecutable():
556 def hgexecutable():
557 """return location of the 'hg' executable.
557 """return location of the 'hg' executable.
558
558
559 Defaults to $HG or 'hg' in the search path.
559 Defaults to $HG or 'hg' in the search path.
560 """
560 """
561 if _hgexecutable is None:
561 if _hgexecutable is None:
562 set_hgexecutable(os.environ.get('HG') or find_exe('hg', 'hg'))
562 set_hgexecutable(os.environ.get('HG') or find_exe('hg', 'hg'))
563 return _hgexecutable
563 return _hgexecutable
564
564
565 def set_hgexecutable(path):
565 def set_hgexecutable(path):
566 """set location of the 'hg' executable"""
566 """set location of the 'hg' executable"""
567 global _hgexecutable
567 global _hgexecutable
568 _hgexecutable = path
568 _hgexecutable = path
569
569
570 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
570 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
571 '''enhanced shell command execution.
571 '''enhanced shell command execution.
572 run with environment maybe modified, maybe in different dir.
572 run with environment maybe modified, maybe in different dir.
573
573
574 if command fails and onerr is None, return status. if ui object,
574 if command fails and onerr is None, return status. if ui object,
575 print error message and return status, else raise onerr object as
575 print error message and return status, else raise onerr object as
576 exception.'''
576 exception.'''
577 def py2shell(val):
577 def py2shell(val):
578 'convert python object into string that is useful to shell'
578 'convert python object into string that is useful to shell'
579 if val in (None, False):
579 if val in (None, False):
580 return '0'
580 return '0'
581 if val == True:
581 if val == True:
582 return '1'
582 return '1'
583 return str(val)
583 return str(val)
584 oldenv = {}
584 oldenv = {}
585 for k in environ:
585 for k in environ:
586 oldenv[k] = os.environ.get(k)
586 oldenv[k] = os.environ.get(k)
587 if cwd is not None:
587 if cwd is not None:
588 oldcwd = os.getcwd()
588 oldcwd = os.getcwd()
589 origcmd = cmd
589 origcmd = cmd
590 if os.name == 'nt':
590 if os.name == 'nt':
591 cmd = '"%s"' % cmd
591 cmd = '"%s"' % cmd
592 try:
592 try:
593 for k, v in environ.iteritems():
593 for k, v in environ.iteritems():
594 os.environ[k] = py2shell(v)
594 os.environ[k] = py2shell(v)
595 os.environ['HG'] = hgexecutable()
595 os.environ['HG'] = hgexecutable()
596 if cwd is not None and oldcwd != cwd:
596 if cwd is not None and oldcwd != cwd:
597 os.chdir(cwd)
597 os.chdir(cwd)
598 rc = os.system(cmd)
598 rc = os.system(cmd)
599 if sys.platform == 'OpenVMS' and rc & 1:
599 if sys.platform == 'OpenVMS' and rc & 1:
600 rc = 0
600 rc = 0
601 if rc and onerr:
601 if rc and onerr:
602 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
602 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
603 explain_exit(rc)[0])
603 explain_exit(rc)[0])
604 if errprefix:
604 if errprefix:
605 errmsg = '%s: %s' % (errprefix, errmsg)
605 errmsg = '%s: %s' % (errprefix, errmsg)
606 try:
606 try:
607 onerr.warn(errmsg + '\n')
607 onerr.warn(errmsg + '\n')
608 except AttributeError:
608 except AttributeError:
609 raise onerr(errmsg)
609 raise onerr(errmsg)
610 return rc
610 return rc
611 finally:
611 finally:
612 for k, v in oldenv.iteritems():
612 for k, v in oldenv.iteritems():
613 if v is None:
613 if v is None:
614 del os.environ[k]
614 del os.environ[k]
615 else:
615 else:
616 os.environ[k] = v
616 os.environ[k] = v
617 if cwd is not None and oldcwd != cwd:
617 if cwd is not None and oldcwd != cwd:
618 os.chdir(oldcwd)
618 os.chdir(oldcwd)
619
619
620 # os.path.lexists is not available on python2.3
620 # os.path.lexists is not available on python2.3
621 def lexists(filename):
621 def lexists(filename):
622 "test whether a file with this name exists. does not follow symlinks"
622 "test whether a file with this name exists. does not follow symlinks"
623 try:
623 try:
624 os.lstat(filename)
624 os.lstat(filename)
625 except:
625 except:
626 return False
626 return False
627 return True
627 return True
628
628
629 def rename(src, dst):
629 def rename(src, dst):
630 """forcibly rename a file"""
630 """forcibly rename a file"""
631 try:
631 try:
632 os.rename(src, dst)
632 os.rename(src, dst)
633 except OSError, err: # FIXME: check err (EEXIST ?)
633 except OSError, err: # FIXME: check err (EEXIST ?)
634 # on windows, rename to existing file is not allowed, so we
634 # on windows, rename to existing file is not allowed, so we
635 # must delete destination first. but if file is open, unlink
635 # must delete destination first. but if file is open, unlink
636 # schedules it for delete but does not delete it. rename
636 # schedules it for delete but does not delete it. rename
637 # happens immediately even for open files, so we create
637 # happens immediately even for open files, so we create
638 # temporary file, delete it, rename destination to that name,
638 # temporary file, delete it, rename destination to that name,
639 # then delete that. then rename is safe to do.
639 # then delete that. then rename is safe to do.
640 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
640 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
641 os.close(fd)
641 os.close(fd)
642 os.unlink(temp)
642 os.unlink(temp)
643 os.rename(dst, temp)
643 os.rename(dst, temp)
644 os.unlink(temp)
644 os.unlink(temp)
645 os.rename(src, dst)
645 os.rename(src, dst)
646
646
647 def unlink(f):
647 def unlink(f):
648 """unlink and remove the directory if it is empty"""
648 """unlink and remove the directory if it is empty"""
649 os.unlink(f)
649 os.unlink(f)
650 # try removing directories that might now be empty
650 # try removing directories that might now be empty
651 try:
651 try:
652 os.removedirs(os.path.dirname(f))
652 os.removedirs(os.path.dirname(f))
653 except OSError:
653 except OSError:
654 pass
654 pass
655
655
656 def copyfile(src, dest):
656 def copyfile(src, dest):
657 "copy a file, preserving mode"
657 "copy a file, preserving mode"
658 if os.path.islink(src):
658 if os.path.islink(src):
659 try:
659 try:
660 os.unlink(dest)
660 os.unlink(dest)
661 except:
661 except:
662 pass
662 pass
663 os.symlink(os.readlink(src), dest)
663 os.symlink(os.readlink(src), dest)
664 else:
664 else:
665 try:
665 try:
666 shutil.copyfile(src, dest)
666 shutil.copyfile(src, dest)
667 shutil.copymode(src, dest)
667 shutil.copymode(src, dest)
668 except shutil.Error, inst:
668 except shutil.Error, inst:
669 raise Abort(str(inst))
669 raise Abort(str(inst))
670
670
671 def copyfiles(src, dst, hardlink=None):
671 def copyfiles(src, dst, hardlink=None):
672 """Copy a directory tree using hardlinks if possible"""
672 """Copy a directory tree using hardlinks if possible"""
673
673
674 if hardlink is None:
674 if hardlink is None:
675 hardlink = (os.stat(src).st_dev ==
675 hardlink = (os.stat(src).st_dev ==
676 os.stat(os.path.dirname(dst)).st_dev)
676 os.stat(os.path.dirname(dst)).st_dev)
677
677
678 if os.path.isdir(src):
678 if os.path.isdir(src):
679 os.mkdir(dst)
679 os.mkdir(dst)
680 for name, kind in osutil.listdir(src):
680 for name, kind in osutil.listdir(src):
681 srcname = os.path.join(src, name)
681 srcname = os.path.join(src, name)
682 dstname = os.path.join(dst, name)
682 dstname = os.path.join(dst, name)
683 copyfiles(srcname, dstname, hardlink)
683 copyfiles(srcname, dstname, hardlink)
684 else:
684 else:
685 if hardlink:
685 if hardlink:
686 try:
686 try:
687 os_link(src, dst)
687 os_link(src, dst)
688 except (IOError, OSError):
688 except (IOError, OSError):
689 hardlink = False
689 hardlink = False
690 shutil.copy(src, dst)
690 shutil.copy(src, dst)
691 else:
691 else:
692 shutil.copy(src, dst)
692 shutil.copy(src, dst)
693
693
694 class path_auditor(object):
694 class path_auditor(object):
695 '''ensure that a filesystem path contains no banned components.
695 '''ensure that a filesystem path contains no banned components.
696 the following properties of a path are checked:
696 the following properties of a path are checked:
697
697
698 - under top-level .hg
698 - under top-level .hg
699 - starts at the root of a windows drive
699 - starts at the root of a windows drive
700 - contains ".."
700 - contains ".."
701 - traverses a symlink (e.g. a/symlink_here/b)
701 - traverses a symlink (e.g. a/symlink_here/b)
702 - inside a nested repository'''
702 - inside a nested repository'''
703
703
704 def __init__(self, root):
704 def __init__(self, root):
705 self.audited = set()
705 self.audited = set()
706 self.auditeddir = set()
706 self.auditeddir = set()
707 self.root = root
707 self.root = root
708
708
709 def __call__(self, path):
709 def __call__(self, path):
710 if path in self.audited:
710 if path in self.audited:
711 return
711 return
712 normpath = os.path.normcase(path)
712 normpath = os.path.normcase(path)
713 parts = normpath.split(os.sep)
713 parts = normpath.split(os.sep)
714 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
714 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
715 or os.pardir in parts):
715 or os.pardir in parts):
716 raise Abort(_("path contains illegal component: %s") % path)
716 raise Abort(_("path contains illegal component: %s") % path)
717 def check(prefix):
717 def check(prefix):
718 curpath = os.path.join(self.root, prefix)
718 curpath = os.path.join(self.root, prefix)
719 try:
719 try:
720 st = os.lstat(curpath)
720 st = os.lstat(curpath)
721 except OSError, err:
721 except OSError, err:
722 # EINVAL can be raised as invalid path syntax under win32.
722 # EINVAL can be raised as invalid path syntax under win32.
723 # They must be ignored for patterns can be checked too.
723 # They must be ignored for patterns can be checked too.
724 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
724 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
725 raise
725 raise
726 else:
726 else:
727 if stat.S_ISLNK(st.st_mode):
727 if stat.S_ISLNK(st.st_mode):
728 raise Abort(_('path %r traverses symbolic link %r') %
728 raise Abort(_('path %r traverses symbolic link %r') %
729 (path, prefix))
729 (path, prefix))
730 elif (stat.S_ISDIR(st.st_mode) and
730 elif (stat.S_ISDIR(st.st_mode) and
731 os.path.isdir(os.path.join(curpath, '.hg'))):
731 os.path.isdir(os.path.join(curpath, '.hg'))):
732 raise Abort(_('path %r is inside repo %r') %
732 raise Abort(_('path %r is inside repo %r') %
733 (path, prefix))
733 (path, prefix))
734
734
735 prefixes = []
735 prefixes = []
736 for c in strutil.rfindall(normpath, os.sep):
736 for c in strutil.rfindall(normpath, os.sep):
737 prefix = normpath[:c]
737 prefix = normpath[:c]
738 if prefix in self.auditeddir:
738 if prefix in self.auditeddir:
739 break
739 break
740 check(prefix)
740 check(prefix)
741 prefixes.append(prefix)
741 prefixes.append(prefix)
742
742
743 self.audited.add(path)
743 self.audited.add(path)
744 # only add prefixes to the cache after checking everything: we don't
744 # only add prefixes to the cache after checking everything: we don't
745 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
745 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
746 self.auditeddir.update(prefixes)
746 self.auditeddir.update(prefixes)
747
747
748 def _makelock_file(info, pathname):
748 def _makelock_file(info, pathname):
749 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
749 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
750 os.write(ld, info)
750 os.write(ld, info)
751 os.close(ld)
751 os.close(ld)
752
752
753 def _readlock_file(pathname):
753 def _readlock_file(pathname):
754 return posixfile(pathname).read()
754 return posixfile(pathname).read()
755
755
756 def nlinks(pathname):
756 def nlinks(pathname):
757 """Return number of hardlinks for the given file."""
757 """Return number of hardlinks for the given file."""
758 return os.lstat(pathname).st_nlink
758 return os.lstat(pathname).st_nlink
759
759
760 if hasattr(os, 'link'):
760 if hasattr(os, 'link'):
761 os_link = os.link
761 os_link = os.link
762 else:
762 else:
763 def os_link(src, dst):
763 def os_link(src, dst):
764 raise OSError(0, _("Hardlinks not supported"))
764 raise OSError(0, _("Hardlinks not supported"))
765
765
766 def fstat(fp):
766 def fstat(fp):
767 '''stat file object that may not have fileno method.'''
767 '''stat file object that may not have fileno method.'''
768 try:
768 try:
769 return os.fstat(fp.fileno())
769 return os.fstat(fp.fileno())
770 except AttributeError:
770 except AttributeError:
771 return os.stat(fp.name)
771 return os.stat(fp.name)
772
772
773 posixfile = file
773 posixfile = file
774
774
775 def is_win_9x():
775 def is_win_9x():
776 '''return true if run on windows 95, 98 or me.'''
776 '''return true if run on windows 95, 98 or me.'''
777 try:
777 try:
778 return sys.getwindowsversion()[3] == 1
778 return sys.getwindowsversion()[3] == 1
779 except AttributeError:
779 except AttributeError:
780 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
780 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
781
781
782 getuser_fallback = None
782 getuser_fallback = None
783
783
784 def getuser():
784 def getuser():
785 '''return name of current user'''
785 '''return name of current user'''
786 try:
786 try:
787 return getpass.getuser()
787 return getpass.getuser()
788 except ImportError:
788 except ImportError:
789 # import of pwd will fail on windows - try fallback
789 # import of pwd will fail on windows - try fallback
790 if getuser_fallback:
790 if getuser_fallback:
791 return getuser_fallback()
791 return getuser_fallback()
792 # raised if win32api not available
792 # raised if win32api not available
793 raise Abort(_('user name not available - set USERNAME '
793 raise Abort(_('user name not available - set USERNAME '
794 'environment variable'))
794 'environment variable'))
795
795
796 def username(uid=None):
796 def username(uid=None):
797 """Return the name of the user with the given uid.
797 """Return the name of the user with the given uid.
798
798
799 If uid is None, return the name of the current user."""
799 If uid is None, return the name of the current user."""
800 try:
800 try:
801 import pwd
801 import pwd
802 if uid is None:
802 if uid is None:
803 uid = os.getuid()
803 uid = os.getuid()
804 try:
804 try:
805 return pwd.getpwuid(uid)[0]
805 return pwd.getpwuid(uid)[0]
806 except KeyError:
806 except KeyError:
807 return str(uid)
807 return str(uid)
808 except ImportError:
808 except ImportError:
809 return None
809 return None
810
810
811 def groupname(gid=None):
811 def groupname(gid=None):
812 """Return the name of the group with the given gid.
812 """Return the name of the group with the given gid.
813
813
814 If gid is None, return the name of the current group."""
814 If gid is None, return the name of the current group."""
815 try:
815 try:
816 import grp
816 import grp
817 if gid is None:
817 if gid is None:
818 gid = os.getgid()
818 gid = os.getgid()
819 try:
819 try:
820 return grp.getgrgid(gid)[0]
820 return grp.getgrgid(gid)[0]
821 except KeyError:
821 except KeyError:
822 return str(gid)
822 return str(gid)
823 except ImportError:
823 except ImportError:
824 return None
824 return None
825
825
826 # File system features
826 # File system features
827
827
828 def checkfolding(path):
828 def checkfolding(path):
829 """
829 """
830 Check whether the given path is on a case-sensitive filesystem
830 Check whether the given path is on a case-sensitive filesystem
831
831
832 Requires a path (like /foo/.hg) ending with a foldable final
832 Requires a path (like /foo/.hg) ending with a foldable final
833 directory component.
833 directory component.
834 """
834 """
835 s1 = os.stat(path)
835 s1 = os.stat(path)
836 d, b = os.path.split(path)
836 d, b = os.path.split(path)
837 p2 = os.path.join(d, b.upper())
837 p2 = os.path.join(d, b.upper())
838 if path == p2:
838 if path == p2:
839 p2 = os.path.join(d, b.lower())
839 p2 = os.path.join(d, b.lower())
840 try:
840 try:
841 s2 = os.stat(p2)
841 s2 = os.stat(p2)
842 if s2 == s1:
842 if s2 == s1:
843 return False
843 return False
844 return True
844 return True
845 except:
845 except:
846 return True
846 return True
847
847
848 def checkexec(path):
848 def checkexec(path):
849 """
849 """
850 Check whether the given path is on a filesystem with UNIX-like exec flags
850 Check whether the given path is on a filesystem with UNIX-like exec flags
851
851
852 Requires a directory (like /foo/.hg)
852 Requires a directory (like /foo/.hg)
853 """
853 """
854 try:
854 try:
855 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
855 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
856 fh, fn = tempfile.mkstemp("", "", path)
856 fh, fn = tempfile.mkstemp("", "", path)
857 os.close(fh)
857 os.close(fh)
858 m = os.stat(fn).st_mode
858 m = os.stat(fn).st_mode
859 # VFAT on Linux can flip mode but it doesn't persist a FS remount.
859 # VFAT on Linux can flip mode but it doesn't persist a FS remount.
860 # frequently we can detect it if files are created with exec bit on.
860 # frequently we can detect it if files are created with exec bit on.
861 new_file_has_exec = m & EXECFLAGS
861 new_file_has_exec = m & EXECFLAGS
862 os.chmod(fn, m ^ EXECFLAGS)
862 os.chmod(fn, m ^ EXECFLAGS)
863 exec_flags_cannot_flip = (os.stat(fn).st_mode == m)
863 exec_flags_cannot_flip = (os.stat(fn).st_mode == m)
864 os.unlink(fn)
864 os.unlink(fn)
865 except (IOError,OSError):
865 except (IOError,OSError):
866 # we don't care, the user probably won't be able to commit anyway
866 # we don't care, the user probably won't be able to commit anyway
867 return False
867 return False
868 return not (new_file_has_exec or exec_flags_cannot_flip)
868 return not (new_file_has_exec or exec_flags_cannot_flip)
869
869
870 def execfunc(path, fallback):
870 def execfunc(path, fallback):
871 '''return an is_exec() function with default to fallback'''
871 '''return an is_exec() function with default to fallback'''
872 if checkexec(path):
872 if checkexec(path):
873 return lambda x: is_exec(os.path.join(path, x))
873 return lambda x: is_exec(os.path.join(path, x))
874 return fallback
874 return fallback
875
875
876 def checklink(path):
876 def checklink(path):
877 """check whether the given path is on a symlink-capable filesystem"""
877 """check whether the given path is on a symlink-capable filesystem"""
878 # mktemp is not racy because symlink creation will fail if the
878 # mktemp is not racy because symlink creation will fail if the
879 # file already exists
879 # file already exists
880 name = tempfile.mktemp(dir=path)
880 name = tempfile.mktemp(dir=path)
881 try:
881 try:
882 os.symlink(".", name)
882 os.symlink(".", name)
883 os.unlink(name)
883 os.unlink(name)
884 return True
884 return True
885 except (OSError, AttributeError):
885 except (OSError, AttributeError):
886 return False
886 return False
887
887
888 def linkfunc(path, fallback):
888 def linkfunc(path, fallback):
889 '''return an is_link() function with default to fallback'''
889 '''return an is_link() function with default to fallback'''
890 if checklink(path):
890 if checklink(path):
891 return lambda x: os.path.islink(os.path.join(path, x))
891 return lambda x: os.path.islink(os.path.join(path, x))
892 return fallback
892 return fallback
893
893
894 _umask = os.umask(0)
894 _umask = os.umask(0)
895 os.umask(_umask)
895 os.umask(_umask)
896
896
897 def needbinarypatch():
897 def needbinarypatch():
898 """return True if patches should be applied in binary mode by default."""
898 """return True if patches should be applied in binary mode by default."""
899 return os.name == 'nt'
899 return os.name == 'nt'
900
900
901 # Platform specific variants
901 # Platform specific variants
902 if os.name == 'nt':
902 if os.name == 'nt':
903 import msvcrt
903 import msvcrt
904 nulldev = 'NUL:'
904 nulldev = 'NUL:'
905
905
906 class winstdout:
906 class winstdout:
907 '''stdout on windows misbehaves if sent through a pipe'''
907 '''stdout on windows misbehaves if sent through a pipe'''
908
908
909 def __init__(self, fp):
909 def __init__(self, fp):
910 self.fp = fp
910 self.fp = fp
911
911
912 def __getattr__(self, key):
912 def __getattr__(self, key):
913 return getattr(self.fp, key)
913 return getattr(self.fp, key)
914
914
915 def close(self):
915 def close(self):
916 try:
916 try:
917 self.fp.close()
917 self.fp.close()
918 except: pass
918 except: pass
919
919
920 def write(self, s):
920 def write(self, s):
921 try:
921 try:
922 return self.fp.write(s)
922 # This is workaround for "Not enough space" error on
923 # writing large size of data to console.
924 limit = 16000
925 l = len(s)
926 start = 0
927 while start < l:
928 end = start + limit
929 self.fp.write(s[start:end])
930 start = end
923 except IOError, inst:
931 except IOError, inst:
924 if inst.errno != 0: raise
932 if inst.errno != 0: raise
925 self.close()
933 self.close()
926 raise IOError(errno.EPIPE, 'Broken pipe')
934 raise IOError(errno.EPIPE, 'Broken pipe')
927
935
928 def flush(self):
936 def flush(self):
929 try:
937 try:
930 return self.fp.flush()
938 return self.fp.flush()
931 except IOError, inst:
939 except IOError, inst:
932 if inst.errno != errno.EINVAL: raise
940 if inst.errno != errno.EINVAL: raise
933 self.close()
941 self.close()
934 raise IOError(errno.EPIPE, 'Broken pipe')
942 raise IOError(errno.EPIPE, 'Broken pipe')
935
943
936 sys.stdout = winstdout(sys.stdout)
944 sys.stdout = winstdout(sys.stdout)
937
945
938 def system_rcpath():
946 def system_rcpath():
939 try:
947 try:
940 return system_rcpath_win32()
948 return system_rcpath_win32()
941 except:
949 except:
942 return [r'c:\mercurial\mercurial.ini']
950 return [r'c:\mercurial\mercurial.ini']
943
951
944 def user_rcpath():
952 def user_rcpath():
945 '''return os-specific hgrc search path to the user dir'''
953 '''return os-specific hgrc search path to the user dir'''
946 try:
954 try:
947 userrc = user_rcpath_win32()
955 userrc = user_rcpath_win32()
948 except:
956 except:
949 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
957 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
950 path = [userrc]
958 path = [userrc]
951 userprofile = os.environ.get('USERPROFILE')
959 userprofile = os.environ.get('USERPROFILE')
952 if userprofile:
960 if userprofile:
953 path.append(os.path.join(userprofile, 'mercurial.ini'))
961 path.append(os.path.join(userprofile, 'mercurial.ini'))
954 return path
962 return path
955
963
956 def parse_patch_output(output_line):
964 def parse_patch_output(output_line):
957 """parses the output produced by patch and returns the file name"""
965 """parses the output produced by patch and returns the file name"""
958 pf = output_line[14:]
966 pf = output_line[14:]
959 if pf[0] == '`':
967 if pf[0] == '`':
960 pf = pf[1:-1] # Remove the quotes
968 pf = pf[1:-1] # Remove the quotes
961 return pf
969 return pf
962
970
963 def sshargs(sshcmd, host, user, port):
971 def sshargs(sshcmd, host, user, port):
964 '''Build argument list for ssh or Plink'''
972 '''Build argument list for ssh or Plink'''
965 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
973 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
966 args = user and ("%s@%s" % (user, host)) or host
974 args = user and ("%s@%s" % (user, host)) or host
967 return port and ("%s %s %s" % (args, pflag, port)) or args
975 return port and ("%s %s %s" % (args, pflag, port)) or args
968
976
969 def testpid(pid):
977 def testpid(pid):
970 '''return False if pid dead, True if running or not known'''
978 '''return False if pid dead, True if running or not known'''
971 return True
979 return True
972
980
973 def set_exec(f, mode):
981 def set_exec(f, mode):
974 pass
982 pass
975
983
976 def set_link(f, mode):
984 def set_link(f, mode):
977 pass
985 pass
978
986
979 def set_binary(fd):
987 def set_binary(fd):
980 msvcrt.setmode(fd.fileno(), os.O_BINARY)
988 msvcrt.setmode(fd.fileno(), os.O_BINARY)
981
989
982 def pconvert(path):
990 def pconvert(path):
983 return path.replace("\\", "/")
991 return path.replace("\\", "/")
984
992
985 def localpath(path):
993 def localpath(path):
986 return path.replace('/', '\\')
994 return path.replace('/', '\\')
987
995
988 def normpath(path):
996 def normpath(path):
989 return pconvert(os.path.normpath(path))
997 return pconvert(os.path.normpath(path))
990
998
991 makelock = _makelock_file
999 makelock = _makelock_file
992 readlock = _readlock_file
1000 readlock = _readlock_file
993
1001
994 def samestat(s1, s2):
1002 def samestat(s1, s2):
995 return False
1003 return False
996
1004
997 # A sequence of backslashes is special iff it precedes a double quote:
1005 # A sequence of backslashes is special iff it precedes a double quote:
998 # - if there's an even number of backslashes, the double quote is not
1006 # - if there's an even number of backslashes, the double quote is not
999 # quoted (i.e. it ends the quoted region)
1007 # quoted (i.e. it ends the quoted region)
1000 # - if there's an odd number of backslashes, the double quote is quoted
1008 # - if there's an odd number of backslashes, the double quote is quoted
1001 # - in both cases, every pair of backslashes is unquoted into a single
1009 # - in both cases, every pair of backslashes is unquoted into a single
1002 # backslash
1010 # backslash
1003 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
1011 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
1004 # So, to quote a string, we must surround it in double quotes, double
1012 # So, to quote a string, we must surround it in double quotes, double
1005 # the number of backslashes that preceed double quotes and add another
1013 # the number of backslashes that preceed double quotes and add another
1006 # backslash before every double quote (being careful with the double
1014 # backslash before every double quote (being careful with the double
1007 # quote we've appended to the end)
1015 # quote we've appended to the end)
1008 _quotere = None
1016 _quotere = None
1009 def shellquote(s):
1017 def shellquote(s):
1010 global _quotere
1018 global _quotere
1011 if _quotere is None:
1019 if _quotere is None:
1012 _quotere = re.compile(r'(\\*)("|\\$)')
1020 _quotere = re.compile(r'(\\*)("|\\$)')
1013 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1021 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1014
1022
1015 def quotecommand(cmd):
1023 def quotecommand(cmd):
1016 """Build a command string suitable for os.popen* calls."""
1024 """Build a command string suitable for os.popen* calls."""
1017 # The extra quotes are needed because popen* runs the command
1025 # The extra quotes are needed because popen* runs the command
1018 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1026 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1019 return '"' + cmd + '"'
1027 return '"' + cmd + '"'
1020
1028
1021 def popen(command):
1029 def popen(command):
1022 # Work around "popen spawned process may not write to stdout
1030 # Work around "popen spawned process may not write to stdout
1023 # under windows"
1031 # under windows"
1024 # http://bugs.python.org/issue1366
1032 # http://bugs.python.org/issue1366
1025 command += " 2> %s" % nulldev
1033 command += " 2> %s" % nulldev
1026 return os.popen(quotecommand(command))
1034 return os.popen(quotecommand(command))
1027
1035
1028 def explain_exit(code):
1036 def explain_exit(code):
1029 return _("exited with status %d") % code, code
1037 return _("exited with status %d") % code, code
1030
1038
1031 # if you change this stub into a real check, please try to implement the
1039 # if you change this stub into a real check, please try to implement the
1032 # username and groupname functions above, too.
1040 # username and groupname functions above, too.
1033 def isowner(fp, st=None):
1041 def isowner(fp, st=None):
1034 return True
1042 return True
1035
1043
1036 def find_in_path(name, path, default=None):
1044 def find_in_path(name, path, default=None):
1037 '''find name in search path. path can be string (will be split
1045 '''find name in search path. path can be string (will be split
1038 with os.pathsep), or iterable thing that returns strings. if name
1046 with os.pathsep), or iterable thing that returns strings. if name
1039 found, return path to name. else return default. name is looked up
1047 found, return path to name. else return default. name is looked up
1040 using cmd.exe rules, using PATHEXT.'''
1048 using cmd.exe rules, using PATHEXT.'''
1041 if isinstance(path, str):
1049 if isinstance(path, str):
1042 path = path.split(os.pathsep)
1050 path = path.split(os.pathsep)
1043
1051
1044 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1052 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1045 pathext = pathext.lower().split(os.pathsep)
1053 pathext = pathext.lower().split(os.pathsep)
1046 isexec = os.path.splitext(name)[1].lower() in pathext
1054 isexec = os.path.splitext(name)[1].lower() in pathext
1047
1055
1048 for p in path:
1056 for p in path:
1049 p_name = os.path.join(p, name)
1057 p_name = os.path.join(p, name)
1050
1058
1051 if isexec and os.path.exists(p_name):
1059 if isexec and os.path.exists(p_name):
1052 return p_name
1060 return p_name
1053
1061
1054 for ext in pathext:
1062 for ext in pathext:
1055 p_name_ext = p_name + ext
1063 p_name_ext = p_name + ext
1056 if os.path.exists(p_name_ext):
1064 if os.path.exists(p_name_ext):
1057 return p_name_ext
1065 return p_name_ext
1058 return default
1066 return default
1059
1067
1060 def set_signal_handler():
1068 def set_signal_handler():
1061 try:
1069 try:
1062 set_signal_handler_win32()
1070 set_signal_handler_win32()
1063 except NameError:
1071 except NameError:
1064 pass
1072 pass
1065
1073
1066 try:
1074 try:
1067 # override functions with win32 versions if possible
1075 # override functions with win32 versions if possible
1068 from util_win32 import *
1076 from util_win32 import *
1069 if not is_win_9x():
1077 if not is_win_9x():
1070 posixfile = posixfile_nt
1078 posixfile = posixfile_nt
1071 except ImportError:
1079 except ImportError:
1072 pass
1080 pass
1073
1081
1074 else:
1082 else:
1075 nulldev = '/dev/null'
1083 nulldev = '/dev/null'
1076
1084
1077 def rcfiles(path):
1085 def rcfiles(path):
1078 rcs = [os.path.join(path, 'hgrc')]
1086 rcs = [os.path.join(path, 'hgrc')]
1079 rcdir = os.path.join(path, 'hgrc.d')
1087 rcdir = os.path.join(path, 'hgrc.d')
1080 try:
1088 try:
1081 rcs.extend([os.path.join(rcdir, f)
1089 rcs.extend([os.path.join(rcdir, f)
1082 for f, kind in osutil.listdir(rcdir)
1090 for f, kind in osutil.listdir(rcdir)
1083 if f.endswith(".rc")])
1091 if f.endswith(".rc")])
1084 except OSError:
1092 except OSError:
1085 pass
1093 pass
1086 return rcs
1094 return rcs
1087
1095
1088 def system_rcpath():
1096 def system_rcpath():
1089 path = []
1097 path = []
1090 # old mod_python does not set sys.argv
1098 # old mod_python does not set sys.argv
1091 if len(getattr(sys, 'argv', [])) > 0:
1099 if len(getattr(sys, 'argv', [])) > 0:
1092 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1100 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1093 '/../etc/mercurial'))
1101 '/../etc/mercurial'))
1094 path.extend(rcfiles('/etc/mercurial'))
1102 path.extend(rcfiles('/etc/mercurial'))
1095 return path
1103 return path
1096
1104
1097 def user_rcpath():
1105 def user_rcpath():
1098 return [os.path.expanduser('~/.hgrc')]
1106 return [os.path.expanduser('~/.hgrc')]
1099
1107
1100 def parse_patch_output(output_line):
1108 def parse_patch_output(output_line):
1101 """parses the output produced by patch and returns the file name"""
1109 """parses the output produced by patch and returns the file name"""
1102 pf = output_line[14:]
1110 pf = output_line[14:]
1103 if os.sys.platform == 'OpenVMS':
1111 if os.sys.platform == 'OpenVMS':
1104 if pf[0] == '`':
1112 if pf[0] == '`':
1105 pf = pf[1:-1] # Remove the quotes
1113 pf = pf[1:-1] # Remove the quotes
1106 else:
1114 else:
1107 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1115 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1108 pf = pf[1:-1] # Remove the quotes
1116 pf = pf[1:-1] # Remove the quotes
1109 return pf
1117 return pf
1110
1118
1111 def sshargs(sshcmd, host, user, port):
1119 def sshargs(sshcmd, host, user, port):
1112 '''Build argument list for ssh'''
1120 '''Build argument list for ssh'''
1113 args = user and ("%s@%s" % (user, host)) or host
1121 args = user and ("%s@%s" % (user, host)) or host
1114 return port and ("%s -p %s" % (args, port)) or args
1122 return port and ("%s -p %s" % (args, port)) or args
1115
1123
1116 def is_exec(f):
1124 def is_exec(f):
1117 """check whether a file is executable"""
1125 """check whether a file is executable"""
1118 return (os.lstat(f).st_mode & 0100 != 0)
1126 return (os.lstat(f).st_mode & 0100 != 0)
1119
1127
1120 def set_exec(f, mode):
1128 def set_exec(f, mode):
1121 s = os.lstat(f).st_mode
1129 s = os.lstat(f).st_mode
1122 if stat.S_ISLNK(s) or (s & 0100 != 0) == mode:
1130 if stat.S_ISLNK(s) or (s & 0100 != 0) == mode:
1123 return
1131 return
1124 if mode:
1132 if mode:
1125 # Turn on +x for every +r bit when making a file executable
1133 # Turn on +x for every +r bit when making a file executable
1126 # and obey umask.
1134 # and obey umask.
1127 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1135 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1128 else:
1136 else:
1129 os.chmod(f, s & 0666)
1137 os.chmod(f, s & 0666)
1130
1138
1131 def set_link(f, mode):
1139 def set_link(f, mode):
1132 """make a file a symbolic link/regular file
1140 """make a file a symbolic link/regular file
1133
1141
1134 if a file is changed to a link, its contents become the link data
1142 if a file is changed to a link, its contents become the link data
1135 if a link is changed to a file, its link data become its contents
1143 if a link is changed to a file, its link data become its contents
1136 """
1144 """
1137
1145
1138 m = os.path.islink(f)
1146 m = os.path.islink(f)
1139 if m == bool(mode):
1147 if m == bool(mode):
1140 return
1148 return
1141
1149
1142 if mode: # switch file to link
1150 if mode: # switch file to link
1143 data = file(f).read()
1151 data = file(f).read()
1144 os.unlink(f)
1152 os.unlink(f)
1145 os.symlink(data, f)
1153 os.symlink(data, f)
1146 else:
1154 else:
1147 data = os.readlink(f)
1155 data = os.readlink(f)
1148 os.unlink(f)
1156 os.unlink(f)
1149 file(f, "w").write(data)
1157 file(f, "w").write(data)
1150
1158
1151 def set_binary(fd):
1159 def set_binary(fd):
1152 pass
1160 pass
1153
1161
1154 def pconvert(path):
1162 def pconvert(path):
1155 return path
1163 return path
1156
1164
1157 def localpath(path):
1165 def localpath(path):
1158 return path
1166 return path
1159
1167
1160 normpath = os.path.normpath
1168 normpath = os.path.normpath
1161 samestat = os.path.samestat
1169 samestat = os.path.samestat
1162
1170
1163 def makelock(info, pathname):
1171 def makelock(info, pathname):
1164 try:
1172 try:
1165 os.symlink(info, pathname)
1173 os.symlink(info, pathname)
1166 except OSError, why:
1174 except OSError, why:
1167 if why.errno == errno.EEXIST:
1175 if why.errno == errno.EEXIST:
1168 raise
1176 raise
1169 else:
1177 else:
1170 _makelock_file(info, pathname)
1178 _makelock_file(info, pathname)
1171
1179
1172 def readlock(pathname):
1180 def readlock(pathname):
1173 try:
1181 try:
1174 return os.readlink(pathname)
1182 return os.readlink(pathname)
1175 except OSError, why:
1183 except OSError, why:
1176 if why.errno in (errno.EINVAL, errno.ENOSYS):
1184 if why.errno in (errno.EINVAL, errno.ENOSYS):
1177 return _readlock_file(pathname)
1185 return _readlock_file(pathname)
1178 else:
1186 else:
1179 raise
1187 raise
1180
1188
1181 def shellquote(s):
1189 def shellquote(s):
1182 if os.sys.platform == 'OpenVMS':
1190 if os.sys.platform == 'OpenVMS':
1183 return '"%s"' % s
1191 return '"%s"' % s
1184 else:
1192 else:
1185 return "'%s'" % s.replace("'", "'\\''")
1193 return "'%s'" % s.replace("'", "'\\''")
1186
1194
1187 def quotecommand(cmd):
1195 def quotecommand(cmd):
1188 return cmd
1196 return cmd
1189
1197
1190 def popen(command):
1198 def popen(command):
1191 return os.popen(command)
1199 return os.popen(command)
1192
1200
1193 def testpid(pid):
1201 def testpid(pid):
1194 '''return False if pid dead, True if running or not sure'''
1202 '''return False if pid dead, True if running or not sure'''
1195 if os.sys.platform == 'OpenVMS':
1203 if os.sys.platform == 'OpenVMS':
1196 return True
1204 return True
1197 try:
1205 try:
1198 os.kill(pid, 0)
1206 os.kill(pid, 0)
1199 return True
1207 return True
1200 except OSError, inst:
1208 except OSError, inst:
1201 return inst.errno != errno.ESRCH
1209 return inst.errno != errno.ESRCH
1202
1210
1203 def explain_exit(code):
1211 def explain_exit(code):
1204 """return a 2-tuple (desc, code) describing a process's status"""
1212 """return a 2-tuple (desc, code) describing a process's status"""
1205 if os.WIFEXITED(code):
1213 if os.WIFEXITED(code):
1206 val = os.WEXITSTATUS(code)
1214 val = os.WEXITSTATUS(code)
1207 return _("exited with status %d") % val, val
1215 return _("exited with status %d") % val, val
1208 elif os.WIFSIGNALED(code):
1216 elif os.WIFSIGNALED(code):
1209 val = os.WTERMSIG(code)
1217 val = os.WTERMSIG(code)
1210 return _("killed by signal %d") % val, val
1218 return _("killed by signal %d") % val, val
1211 elif os.WIFSTOPPED(code):
1219 elif os.WIFSTOPPED(code):
1212 val = os.WSTOPSIG(code)
1220 val = os.WSTOPSIG(code)
1213 return _("stopped by signal %d") % val, val
1221 return _("stopped by signal %d") % val, val
1214 raise ValueError(_("invalid exit code"))
1222 raise ValueError(_("invalid exit code"))
1215
1223
1216 def isowner(fp, st=None):
1224 def isowner(fp, st=None):
1217 """Return True if the file object f belongs to the current user.
1225 """Return True if the file object f belongs to the current user.
1218
1226
1219 The return value of a util.fstat(f) may be passed as the st argument.
1227 The return value of a util.fstat(f) may be passed as the st argument.
1220 """
1228 """
1221 if st is None:
1229 if st is None:
1222 st = fstat(fp)
1230 st = fstat(fp)
1223 return st.st_uid == os.getuid()
1231 return st.st_uid == os.getuid()
1224
1232
1225 def find_in_path(name, path, default=None):
1233 def find_in_path(name, path, default=None):
1226 '''find name in search path. path can be string (will be split
1234 '''find name in search path. path can be string (will be split
1227 with os.pathsep), or iterable thing that returns strings. if name
1235 with os.pathsep), or iterable thing that returns strings. if name
1228 found, return path to name. else return default.'''
1236 found, return path to name. else return default.'''
1229 if isinstance(path, str):
1237 if isinstance(path, str):
1230 path = path.split(os.pathsep)
1238 path = path.split(os.pathsep)
1231 for p in path:
1239 for p in path:
1232 p_name = os.path.join(p, name)
1240 p_name = os.path.join(p, name)
1233 if os.path.exists(p_name):
1241 if os.path.exists(p_name):
1234 return p_name
1242 return p_name
1235 return default
1243 return default
1236
1244
1237 def set_signal_handler():
1245 def set_signal_handler():
1238 pass
1246 pass
1239
1247
1240 def find_exe(name, default=None):
1248 def find_exe(name, default=None):
1241 '''find path of an executable.
1249 '''find path of an executable.
1242 if name contains a path component, return it as is. otherwise,
1250 if name contains a path component, return it as is. otherwise,
1243 use normal executable search path.'''
1251 use normal executable search path.'''
1244
1252
1245 if os.sep in name or sys.platform == 'OpenVMS':
1253 if os.sep in name or sys.platform == 'OpenVMS':
1246 # don't check the executable bit. if the file isn't
1254 # don't check the executable bit. if the file isn't
1247 # executable, whoever tries to actually run it will give a
1255 # executable, whoever tries to actually run it will give a
1248 # much more useful error message.
1256 # much more useful error message.
1249 return name
1257 return name
1250 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1258 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1251
1259
1252 def _buildencodefun():
1260 def _buildencodefun():
1253 e = '_'
1261 e = '_'
1254 win_reserved = [ord(x) for x in '\\:*?"<>|']
1262 win_reserved = [ord(x) for x in '\\:*?"<>|']
1255 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1263 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1256 for x in (range(32) + range(126, 256) + win_reserved):
1264 for x in (range(32) + range(126, 256) + win_reserved):
1257 cmap[chr(x)] = "~%02x" % x
1265 cmap[chr(x)] = "~%02x" % x
1258 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1266 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1259 cmap[chr(x)] = e + chr(x).lower()
1267 cmap[chr(x)] = e + chr(x).lower()
1260 dmap = {}
1268 dmap = {}
1261 for k, v in cmap.iteritems():
1269 for k, v in cmap.iteritems():
1262 dmap[v] = k
1270 dmap[v] = k
1263 def decode(s):
1271 def decode(s):
1264 i = 0
1272 i = 0
1265 while i < len(s):
1273 while i < len(s):
1266 for l in xrange(1, 4):
1274 for l in xrange(1, 4):
1267 try:
1275 try:
1268 yield dmap[s[i:i+l]]
1276 yield dmap[s[i:i+l]]
1269 i += l
1277 i += l
1270 break
1278 break
1271 except KeyError:
1279 except KeyError:
1272 pass
1280 pass
1273 else:
1281 else:
1274 raise KeyError
1282 raise KeyError
1275 return (lambda s: "".join([cmap[c] for c in s]),
1283 return (lambda s: "".join([cmap[c] for c in s]),
1276 lambda s: "".join(list(decode(s))))
1284 lambda s: "".join(list(decode(s))))
1277
1285
1278 encodefilename, decodefilename = _buildencodefun()
1286 encodefilename, decodefilename = _buildencodefun()
1279
1287
1280 def encodedopener(openerfn, fn):
1288 def encodedopener(openerfn, fn):
1281 def o(path, *args, **kw):
1289 def o(path, *args, **kw):
1282 return openerfn(fn(path), *args, **kw)
1290 return openerfn(fn(path), *args, **kw)
1283 return o
1291 return o
1284
1292
1285 def mktempcopy(name, emptyok=False):
1293 def mktempcopy(name, emptyok=False):
1286 """Create a temporary file with the same contents from name
1294 """Create a temporary file with the same contents from name
1287
1295
1288 The permission bits are copied from the original file.
1296 The permission bits are copied from the original file.
1289
1297
1290 If the temporary file is going to be truncated immediately, you
1298 If the temporary file is going to be truncated immediately, you
1291 can use emptyok=True as an optimization.
1299 can use emptyok=True as an optimization.
1292
1300
1293 Returns the name of the temporary file.
1301 Returns the name of the temporary file.
1294 """
1302 """
1295 d, fn = os.path.split(name)
1303 d, fn = os.path.split(name)
1296 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1304 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1297 os.close(fd)
1305 os.close(fd)
1298 # Temporary files are created with mode 0600, which is usually not
1306 # Temporary files are created with mode 0600, which is usually not
1299 # what we want. If the original file already exists, just copy
1307 # what we want. If the original file already exists, just copy
1300 # its mode. Otherwise, manually obey umask.
1308 # its mode. Otherwise, manually obey umask.
1301 try:
1309 try:
1302 st_mode = os.lstat(name).st_mode
1310 st_mode = os.lstat(name).st_mode
1303 except OSError, inst:
1311 except OSError, inst:
1304 if inst.errno != errno.ENOENT:
1312 if inst.errno != errno.ENOENT:
1305 raise
1313 raise
1306 st_mode = 0666 & ~_umask
1314 st_mode = 0666 & ~_umask
1307 os.chmod(temp, st_mode)
1315 os.chmod(temp, st_mode)
1308 if emptyok:
1316 if emptyok:
1309 return temp
1317 return temp
1310 try:
1318 try:
1311 try:
1319 try:
1312 ifp = posixfile(name, "rb")
1320 ifp = posixfile(name, "rb")
1313 except IOError, inst:
1321 except IOError, inst:
1314 if inst.errno == errno.ENOENT:
1322 if inst.errno == errno.ENOENT:
1315 return temp
1323 return temp
1316 if not getattr(inst, 'filename', None):
1324 if not getattr(inst, 'filename', None):
1317 inst.filename = name
1325 inst.filename = name
1318 raise
1326 raise
1319 ofp = posixfile(temp, "wb")
1327 ofp = posixfile(temp, "wb")
1320 for chunk in filechunkiter(ifp):
1328 for chunk in filechunkiter(ifp):
1321 ofp.write(chunk)
1329 ofp.write(chunk)
1322 ifp.close()
1330 ifp.close()
1323 ofp.close()
1331 ofp.close()
1324 except:
1332 except:
1325 try: os.unlink(temp)
1333 try: os.unlink(temp)
1326 except: pass
1334 except: pass
1327 raise
1335 raise
1328 return temp
1336 return temp
1329
1337
1330 class atomictempfile(posixfile):
1338 class atomictempfile(posixfile):
1331 """file-like object that atomically updates a file
1339 """file-like object that atomically updates a file
1332
1340
1333 All writes will be redirected to a temporary copy of the original
1341 All writes will be redirected to a temporary copy of the original
1334 file. When rename is called, the copy is renamed to the original
1342 file. When rename is called, the copy is renamed to the original
1335 name, making the changes visible.
1343 name, making the changes visible.
1336 """
1344 """
1337 def __init__(self, name, mode):
1345 def __init__(self, name, mode):
1338 self.__name = name
1346 self.__name = name
1339 self.temp = mktempcopy(name, emptyok=('w' in mode))
1347 self.temp = mktempcopy(name, emptyok=('w' in mode))
1340 posixfile.__init__(self, self.temp, mode)
1348 posixfile.__init__(self, self.temp, mode)
1341
1349
1342 def rename(self):
1350 def rename(self):
1343 if not self.closed:
1351 if not self.closed:
1344 posixfile.close(self)
1352 posixfile.close(self)
1345 rename(self.temp, localpath(self.__name))
1353 rename(self.temp, localpath(self.__name))
1346
1354
1347 def __del__(self):
1355 def __del__(self):
1348 if not self.closed:
1356 if not self.closed:
1349 try:
1357 try:
1350 os.unlink(self.temp)
1358 os.unlink(self.temp)
1351 except: pass
1359 except: pass
1352 posixfile.close(self)
1360 posixfile.close(self)
1353
1361
1354 class opener(object):
1362 class opener(object):
1355 """Open files relative to a base directory
1363 """Open files relative to a base directory
1356
1364
1357 This class is used to hide the details of COW semantics and
1365 This class is used to hide the details of COW semantics and
1358 remote file access from higher level code.
1366 remote file access from higher level code.
1359 """
1367 """
1360 def __init__(self, base, audit=True):
1368 def __init__(self, base, audit=True):
1361 self.base = base
1369 self.base = base
1362 if audit:
1370 if audit:
1363 self.audit_path = path_auditor(base)
1371 self.audit_path = path_auditor(base)
1364 else:
1372 else:
1365 self.audit_path = always
1373 self.audit_path = always
1366
1374
1367 def __getattr__(self, name):
1375 def __getattr__(self, name):
1368 if name == '_can_symlink':
1376 if name == '_can_symlink':
1369 self._can_symlink = checklink(self.base)
1377 self._can_symlink = checklink(self.base)
1370 return self._can_symlink
1378 return self._can_symlink
1371 raise AttributeError(name)
1379 raise AttributeError(name)
1372
1380
1373 def __call__(self, path, mode="r", text=False, atomictemp=False):
1381 def __call__(self, path, mode="r", text=False, atomictemp=False):
1374 self.audit_path(path)
1382 self.audit_path(path)
1375 f = os.path.join(self.base, path)
1383 f = os.path.join(self.base, path)
1376
1384
1377 if not text and "b" not in mode:
1385 if not text and "b" not in mode:
1378 mode += "b" # for that other OS
1386 mode += "b" # for that other OS
1379
1387
1380 if mode[0] != "r":
1388 if mode[0] != "r":
1381 try:
1389 try:
1382 nlink = nlinks(f)
1390 nlink = nlinks(f)
1383 except OSError:
1391 except OSError:
1384 nlink = 0
1392 nlink = 0
1385 d = os.path.dirname(f)
1393 d = os.path.dirname(f)
1386 if not os.path.isdir(d):
1394 if not os.path.isdir(d):
1387 os.makedirs(d)
1395 os.makedirs(d)
1388 if atomictemp:
1396 if atomictemp:
1389 return atomictempfile(f, mode)
1397 return atomictempfile(f, mode)
1390 if nlink > 1:
1398 if nlink > 1:
1391 rename(mktempcopy(f), f)
1399 rename(mktempcopy(f), f)
1392 return posixfile(f, mode)
1400 return posixfile(f, mode)
1393
1401
1394 def symlink(self, src, dst):
1402 def symlink(self, src, dst):
1395 self.audit_path(dst)
1403 self.audit_path(dst)
1396 linkname = os.path.join(self.base, dst)
1404 linkname = os.path.join(self.base, dst)
1397 try:
1405 try:
1398 os.unlink(linkname)
1406 os.unlink(linkname)
1399 except OSError:
1407 except OSError:
1400 pass
1408 pass
1401
1409
1402 dirname = os.path.dirname(linkname)
1410 dirname = os.path.dirname(linkname)
1403 if not os.path.exists(dirname):
1411 if not os.path.exists(dirname):
1404 os.makedirs(dirname)
1412 os.makedirs(dirname)
1405
1413
1406 if self._can_symlink:
1414 if self._can_symlink:
1407 try:
1415 try:
1408 os.symlink(src, linkname)
1416 os.symlink(src, linkname)
1409 except OSError, err:
1417 except OSError, err:
1410 raise OSError(err.errno, _('could not symlink to %r: %s') %
1418 raise OSError(err.errno, _('could not symlink to %r: %s') %
1411 (src, err.strerror), linkname)
1419 (src, err.strerror), linkname)
1412 else:
1420 else:
1413 f = self(dst, "w")
1421 f = self(dst, "w")
1414 f.write(src)
1422 f.write(src)
1415 f.close()
1423 f.close()
1416
1424
1417 class chunkbuffer(object):
1425 class chunkbuffer(object):
1418 """Allow arbitrary sized chunks of data to be efficiently read from an
1426 """Allow arbitrary sized chunks of data to be efficiently read from an
1419 iterator over chunks of arbitrary size."""
1427 iterator over chunks of arbitrary size."""
1420
1428
1421 def __init__(self, in_iter):
1429 def __init__(self, in_iter):
1422 """in_iter is the iterator that's iterating over the input chunks.
1430 """in_iter is the iterator that's iterating over the input chunks.
1423 targetsize is how big a buffer to try to maintain."""
1431 targetsize is how big a buffer to try to maintain."""
1424 self.iter = iter(in_iter)
1432 self.iter = iter(in_iter)
1425 self.buf = ''
1433 self.buf = ''
1426 self.targetsize = 2**16
1434 self.targetsize = 2**16
1427
1435
1428 def read(self, l):
1436 def read(self, l):
1429 """Read L bytes of data from the iterator of chunks of data.
1437 """Read L bytes of data from the iterator of chunks of data.
1430 Returns less than L bytes if the iterator runs dry."""
1438 Returns less than L bytes if the iterator runs dry."""
1431 if l > len(self.buf) and self.iter:
1439 if l > len(self.buf) and self.iter:
1432 # Clamp to a multiple of self.targetsize
1440 # Clamp to a multiple of self.targetsize
1433 targetsize = max(l, self.targetsize)
1441 targetsize = max(l, self.targetsize)
1434 collector = cStringIO.StringIO()
1442 collector = cStringIO.StringIO()
1435 collector.write(self.buf)
1443 collector.write(self.buf)
1436 collected = len(self.buf)
1444 collected = len(self.buf)
1437 for chunk in self.iter:
1445 for chunk in self.iter:
1438 collector.write(chunk)
1446 collector.write(chunk)
1439 collected += len(chunk)
1447 collected += len(chunk)
1440 if collected >= targetsize:
1448 if collected >= targetsize:
1441 break
1449 break
1442 if collected < targetsize:
1450 if collected < targetsize:
1443 self.iter = False
1451 self.iter = False
1444 self.buf = collector.getvalue()
1452 self.buf = collector.getvalue()
1445 if len(self.buf) == l:
1453 if len(self.buf) == l:
1446 s, self.buf = str(self.buf), ''
1454 s, self.buf = str(self.buf), ''
1447 else:
1455 else:
1448 s, self.buf = self.buf[:l], buffer(self.buf, l)
1456 s, self.buf = self.buf[:l], buffer(self.buf, l)
1449 return s
1457 return s
1450
1458
1451 def filechunkiter(f, size=65536, limit=None):
1459 def filechunkiter(f, size=65536, limit=None):
1452 """Create a generator that produces the data in the file size
1460 """Create a generator that produces the data in the file size
1453 (default 65536) bytes at a time, up to optional limit (default is
1461 (default 65536) bytes at a time, up to optional limit (default is
1454 to read all data). Chunks may be less than size bytes if the
1462 to read all data). Chunks may be less than size bytes if the
1455 chunk is the last chunk in the file, or the file is a socket or
1463 chunk is the last chunk in the file, or the file is a socket or
1456 some other type of file that sometimes reads less data than is
1464 some other type of file that sometimes reads less data than is
1457 requested."""
1465 requested."""
1458 assert size >= 0
1466 assert size >= 0
1459 assert limit is None or limit >= 0
1467 assert limit is None or limit >= 0
1460 while True:
1468 while True:
1461 if limit is None: nbytes = size
1469 if limit is None: nbytes = size
1462 else: nbytes = min(limit, size)
1470 else: nbytes = min(limit, size)
1463 s = nbytes and f.read(nbytes)
1471 s = nbytes and f.read(nbytes)
1464 if not s: break
1472 if not s: break
1465 if limit: limit -= len(s)
1473 if limit: limit -= len(s)
1466 yield s
1474 yield s
1467
1475
1468 def makedate():
1476 def makedate():
1469 lt = time.localtime()
1477 lt = time.localtime()
1470 if lt[8] == 1 and time.daylight:
1478 if lt[8] == 1 and time.daylight:
1471 tz = time.altzone
1479 tz = time.altzone
1472 else:
1480 else:
1473 tz = time.timezone
1481 tz = time.timezone
1474 return time.mktime(lt), tz
1482 return time.mktime(lt), tz
1475
1483
1476 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True, timezone_format=" %+03d%02d"):
1484 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True, timezone_format=" %+03d%02d"):
1477 """represent a (unixtime, offset) tuple as a localized time.
1485 """represent a (unixtime, offset) tuple as a localized time.
1478 unixtime is seconds since the epoch, and offset is the time zone's
1486 unixtime is seconds since the epoch, and offset is the time zone's
1479 number of seconds away from UTC. if timezone is false, do not
1487 number of seconds away from UTC. if timezone is false, do not
1480 append time zone to string."""
1488 append time zone to string."""
1481 t, tz = date or makedate()
1489 t, tz = date or makedate()
1482 s = time.strftime(format, time.gmtime(float(t) - tz))
1490 s = time.strftime(format, time.gmtime(float(t) - tz))
1483 if timezone:
1491 if timezone:
1484 s += timezone_format % (-tz / 3600, ((-tz % 3600) / 60))
1492 s += timezone_format % (-tz / 3600, ((-tz % 3600) / 60))
1485 return s
1493 return s
1486
1494
1487 def strdate(string, format, defaults=[]):
1495 def strdate(string, format, defaults=[]):
1488 """parse a localized time string and return a (unixtime, offset) tuple.
1496 """parse a localized time string and return a (unixtime, offset) tuple.
1489 if the string cannot be parsed, ValueError is raised."""
1497 if the string cannot be parsed, ValueError is raised."""
1490 def timezone(string):
1498 def timezone(string):
1491 tz = string.split()[-1]
1499 tz = string.split()[-1]
1492 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1500 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1493 tz = int(tz)
1501 tz = int(tz)
1494 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1502 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1495 return offset
1503 return offset
1496 if tz == "GMT" or tz == "UTC":
1504 if tz == "GMT" or tz == "UTC":
1497 return 0
1505 return 0
1498 return None
1506 return None
1499
1507
1500 # NOTE: unixtime = localunixtime + offset
1508 # NOTE: unixtime = localunixtime + offset
1501 offset, date = timezone(string), string
1509 offset, date = timezone(string), string
1502 if offset != None:
1510 if offset != None:
1503 date = " ".join(string.split()[:-1])
1511 date = " ".join(string.split()[:-1])
1504
1512
1505 # add missing elements from defaults
1513 # add missing elements from defaults
1506 for part in defaults:
1514 for part in defaults:
1507 found = [True for p in part if ("%"+p) in format]
1515 found = [True for p in part if ("%"+p) in format]
1508 if not found:
1516 if not found:
1509 date += "@" + defaults[part]
1517 date += "@" + defaults[part]
1510 format += "@%" + part[0]
1518 format += "@%" + part[0]
1511
1519
1512 timetuple = time.strptime(date, format)
1520 timetuple = time.strptime(date, format)
1513 localunixtime = int(calendar.timegm(timetuple))
1521 localunixtime = int(calendar.timegm(timetuple))
1514 if offset is None:
1522 if offset is None:
1515 # local timezone
1523 # local timezone
1516 unixtime = int(time.mktime(timetuple))
1524 unixtime = int(time.mktime(timetuple))
1517 offset = unixtime - localunixtime
1525 offset = unixtime - localunixtime
1518 else:
1526 else:
1519 unixtime = localunixtime + offset
1527 unixtime = localunixtime + offset
1520 return unixtime, offset
1528 return unixtime, offset
1521
1529
1522 def parsedate(string, formats=None, defaults=None):
1530 def parsedate(string, formats=None, defaults=None):
1523 """parse a localized time string and return a (unixtime, offset) tuple.
1531 """parse a localized time string and return a (unixtime, offset) tuple.
1524 The date may be a "unixtime offset" string or in one of the specified
1532 The date may be a "unixtime offset" string or in one of the specified
1525 formats."""
1533 formats."""
1526 if not string:
1534 if not string:
1527 return 0, 0
1535 return 0, 0
1528 if not formats:
1536 if not formats:
1529 formats = defaultdateformats
1537 formats = defaultdateformats
1530 string = string.strip()
1538 string = string.strip()
1531 try:
1539 try:
1532 when, offset = map(int, string.split(' '))
1540 when, offset = map(int, string.split(' '))
1533 except ValueError:
1541 except ValueError:
1534 # fill out defaults
1542 # fill out defaults
1535 if not defaults:
1543 if not defaults:
1536 defaults = {}
1544 defaults = {}
1537 now = makedate()
1545 now = makedate()
1538 for part in "d mb yY HI M S".split():
1546 for part in "d mb yY HI M S".split():
1539 if part not in defaults:
1547 if part not in defaults:
1540 if part[0] in "HMS":
1548 if part[0] in "HMS":
1541 defaults[part] = "00"
1549 defaults[part] = "00"
1542 elif part[0] in "dm":
1550 elif part[0] in "dm":
1543 defaults[part] = "1"
1551 defaults[part] = "1"
1544 else:
1552 else:
1545 defaults[part] = datestr(now, "%" + part[0], False)
1553 defaults[part] = datestr(now, "%" + part[0], False)
1546
1554
1547 for format in formats:
1555 for format in formats:
1548 try:
1556 try:
1549 when, offset = strdate(string, format, defaults)
1557 when, offset = strdate(string, format, defaults)
1550 except ValueError:
1558 except ValueError:
1551 pass
1559 pass
1552 else:
1560 else:
1553 break
1561 break
1554 else:
1562 else:
1555 raise Abort(_('invalid date: %r ') % string)
1563 raise Abort(_('invalid date: %r ') % string)
1556 # validate explicit (probably user-specified) date and
1564 # validate explicit (probably user-specified) date and
1557 # time zone offset. values must fit in signed 32 bits for
1565 # time zone offset. values must fit in signed 32 bits for
1558 # current 32-bit linux runtimes. timezones go from UTC-12
1566 # current 32-bit linux runtimes. timezones go from UTC-12
1559 # to UTC+14
1567 # to UTC+14
1560 if abs(when) > 0x7fffffff:
1568 if abs(when) > 0x7fffffff:
1561 raise Abort(_('date exceeds 32 bits: %d') % when)
1569 raise Abort(_('date exceeds 32 bits: %d') % when)
1562 if offset < -50400 or offset > 43200:
1570 if offset < -50400 or offset > 43200:
1563 raise Abort(_('impossible time zone offset: %d') % offset)
1571 raise Abort(_('impossible time zone offset: %d') % offset)
1564 return when, offset
1572 return when, offset
1565
1573
1566 def matchdate(date):
1574 def matchdate(date):
1567 """Return a function that matches a given date match specifier
1575 """Return a function that matches a given date match specifier
1568
1576
1569 Formats include:
1577 Formats include:
1570
1578
1571 '{date}' match a given date to the accuracy provided
1579 '{date}' match a given date to the accuracy provided
1572
1580
1573 '<{date}' on or before a given date
1581 '<{date}' on or before a given date
1574
1582
1575 '>{date}' on or after a given date
1583 '>{date}' on or after a given date
1576
1584
1577 """
1585 """
1578
1586
1579 def lower(date):
1587 def lower(date):
1580 return parsedate(date, extendeddateformats)[0]
1588 return parsedate(date, extendeddateformats)[0]
1581
1589
1582 def upper(date):
1590 def upper(date):
1583 d = dict(mb="12", HI="23", M="59", S="59")
1591 d = dict(mb="12", HI="23", M="59", S="59")
1584 for days in "31 30 29".split():
1592 for days in "31 30 29".split():
1585 try:
1593 try:
1586 d["d"] = days
1594 d["d"] = days
1587 return parsedate(date, extendeddateformats, d)[0]
1595 return parsedate(date, extendeddateformats, d)[0]
1588 except:
1596 except:
1589 pass
1597 pass
1590 d["d"] = "28"
1598 d["d"] = "28"
1591 return parsedate(date, extendeddateformats, d)[0]
1599 return parsedate(date, extendeddateformats, d)[0]
1592
1600
1593 if date[0] == "<":
1601 if date[0] == "<":
1594 when = upper(date[1:])
1602 when = upper(date[1:])
1595 return lambda x: x <= when
1603 return lambda x: x <= when
1596 elif date[0] == ">":
1604 elif date[0] == ">":
1597 when = lower(date[1:])
1605 when = lower(date[1:])
1598 return lambda x: x >= when
1606 return lambda x: x >= when
1599 elif date[0] == "-":
1607 elif date[0] == "-":
1600 try:
1608 try:
1601 days = int(date[1:])
1609 days = int(date[1:])
1602 except ValueError:
1610 except ValueError:
1603 raise Abort(_("invalid day spec: %s") % date[1:])
1611 raise Abort(_("invalid day spec: %s") % date[1:])
1604 when = makedate()[0] - days * 3600 * 24
1612 when = makedate()[0] - days * 3600 * 24
1605 return lambda x: x >= when
1613 return lambda x: x >= when
1606 elif " to " in date:
1614 elif " to " in date:
1607 a, b = date.split(" to ")
1615 a, b = date.split(" to ")
1608 start, stop = lower(a), upper(b)
1616 start, stop = lower(a), upper(b)
1609 return lambda x: x >= start and x <= stop
1617 return lambda x: x >= start and x <= stop
1610 else:
1618 else:
1611 start, stop = lower(date), upper(date)
1619 start, stop = lower(date), upper(date)
1612 return lambda x: x >= start and x <= stop
1620 return lambda x: x >= start and x <= stop
1613
1621
1614 def shortuser(user):
1622 def shortuser(user):
1615 """Return a short representation of a user name or email address."""
1623 """Return a short representation of a user name or email address."""
1616 f = user.find('@')
1624 f = user.find('@')
1617 if f >= 0:
1625 if f >= 0:
1618 user = user[:f]
1626 user = user[:f]
1619 f = user.find('<')
1627 f = user.find('<')
1620 if f >= 0:
1628 if f >= 0:
1621 user = user[f+1:]
1629 user = user[f+1:]
1622 f = user.find(' ')
1630 f = user.find(' ')
1623 if f >= 0:
1631 if f >= 0:
1624 user = user[:f]
1632 user = user[:f]
1625 f = user.find('.')
1633 f = user.find('.')
1626 if f >= 0:
1634 if f >= 0:
1627 user = user[:f]
1635 user = user[:f]
1628 return user
1636 return user
1629
1637
1630 def ellipsis(text, maxlength=400):
1638 def ellipsis(text, maxlength=400):
1631 """Trim string to at most maxlength (default: 400) characters."""
1639 """Trim string to at most maxlength (default: 400) characters."""
1632 if len(text) <= maxlength:
1640 if len(text) <= maxlength:
1633 return text
1641 return text
1634 else:
1642 else:
1635 return "%s..." % (text[:maxlength-3])
1643 return "%s..." % (text[:maxlength-3])
1636
1644
1637 def walkrepos(path):
1645 def walkrepos(path):
1638 '''yield every hg repository under path, recursively.'''
1646 '''yield every hg repository under path, recursively.'''
1639 def errhandler(err):
1647 def errhandler(err):
1640 if err.filename == path:
1648 if err.filename == path:
1641 raise err
1649 raise err
1642
1650
1643 for root, dirs, files in os.walk(path, onerror=errhandler):
1651 for root, dirs, files in os.walk(path, onerror=errhandler):
1644 for d in dirs:
1652 for d in dirs:
1645 if d == '.hg':
1653 if d == '.hg':
1646 yield root
1654 yield root
1647 dirs[:] = []
1655 dirs[:] = []
1648 break
1656 break
1649
1657
1650 _rcpath = None
1658 _rcpath = None
1651
1659
1652 def os_rcpath():
1660 def os_rcpath():
1653 '''return default os-specific hgrc search path'''
1661 '''return default os-specific hgrc search path'''
1654 path = system_rcpath()
1662 path = system_rcpath()
1655 path.extend(user_rcpath())
1663 path.extend(user_rcpath())
1656 path = [os.path.normpath(f) for f in path]
1664 path = [os.path.normpath(f) for f in path]
1657 return path
1665 return path
1658
1666
1659 def rcpath():
1667 def rcpath():
1660 '''return hgrc search path. if env var HGRCPATH is set, use it.
1668 '''return hgrc search path. if env var HGRCPATH is set, use it.
1661 for each item in path, if directory, use files ending in .rc,
1669 for each item in path, if directory, use files ending in .rc,
1662 else use item.
1670 else use item.
1663 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1671 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1664 if no HGRCPATH, use default os-specific path.'''
1672 if no HGRCPATH, use default os-specific path.'''
1665 global _rcpath
1673 global _rcpath
1666 if _rcpath is None:
1674 if _rcpath is None:
1667 if 'HGRCPATH' in os.environ:
1675 if 'HGRCPATH' in os.environ:
1668 _rcpath = []
1676 _rcpath = []
1669 for p in os.environ['HGRCPATH'].split(os.pathsep):
1677 for p in os.environ['HGRCPATH'].split(os.pathsep):
1670 if not p: continue
1678 if not p: continue
1671 if os.path.isdir(p):
1679 if os.path.isdir(p):
1672 for f, kind in osutil.listdir(p):
1680 for f, kind in osutil.listdir(p):
1673 if f.endswith('.rc'):
1681 if f.endswith('.rc'):
1674 _rcpath.append(os.path.join(p, f))
1682 _rcpath.append(os.path.join(p, f))
1675 else:
1683 else:
1676 _rcpath.append(p)
1684 _rcpath.append(p)
1677 else:
1685 else:
1678 _rcpath = os_rcpath()
1686 _rcpath = os_rcpath()
1679 return _rcpath
1687 return _rcpath
1680
1688
1681 def bytecount(nbytes):
1689 def bytecount(nbytes):
1682 '''return byte count formatted as readable string, with units'''
1690 '''return byte count formatted as readable string, with units'''
1683
1691
1684 units = (
1692 units = (
1685 (100, 1<<30, _('%.0f GB')),
1693 (100, 1<<30, _('%.0f GB')),
1686 (10, 1<<30, _('%.1f GB')),
1694 (10, 1<<30, _('%.1f GB')),
1687 (1, 1<<30, _('%.2f GB')),
1695 (1, 1<<30, _('%.2f GB')),
1688 (100, 1<<20, _('%.0f MB')),
1696 (100, 1<<20, _('%.0f MB')),
1689 (10, 1<<20, _('%.1f MB')),
1697 (10, 1<<20, _('%.1f MB')),
1690 (1, 1<<20, _('%.2f MB')),
1698 (1, 1<<20, _('%.2f MB')),
1691 (100, 1<<10, _('%.0f KB')),
1699 (100, 1<<10, _('%.0f KB')),
1692 (10, 1<<10, _('%.1f KB')),
1700 (10, 1<<10, _('%.1f KB')),
1693 (1, 1<<10, _('%.2f KB')),
1701 (1, 1<<10, _('%.2f KB')),
1694 (1, 1, _('%.0f bytes')),
1702 (1, 1, _('%.0f bytes')),
1695 )
1703 )
1696
1704
1697 for multiplier, divisor, format in units:
1705 for multiplier, divisor, format in units:
1698 if nbytes >= divisor * multiplier:
1706 if nbytes >= divisor * multiplier:
1699 return format % (nbytes / float(divisor))
1707 return format % (nbytes / float(divisor))
1700 return units[-1][2] % nbytes
1708 return units[-1][2] % nbytes
1701
1709
1702 def drop_scheme(scheme, path):
1710 def drop_scheme(scheme, path):
1703 sc = scheme + ':'
1711 sc = scheme + ':'
1704 if path.startswith(sc):
1712 if path.startswith(sc):
1705 path = path[len(sc):]
1713 path = path[len(sc):]
1706 if path.startswith('//'):
1714 if path.startswith('//'):
1707 path = path[2:]
1715 path = path[2:]
1708 return path
1716 return path
1709
1717
1710 def uirepr(s):
1718 def uirepr(s):
1711 # Avoid double backslash in Windows path repr()
1719 # Avoid double backslash in Windows path repr()
1712 return repr(s).replace('\\\\', '\\')
1720 return repr(s).replace('\\\\', '\\')
1713
1721
1714 def hidepassword(url):
1722 def hidepassword(url):
1715 '''replaces the password in the url string by three asterisks (***)
1723 '''replaces the password in the url string by three asterisks (***)
1716
1724
1717 >>> hidepassword('http://www.example.com/some/path#fragment')
1725 >>> hidepassword('http://www.example.com/some/path#fragment')
1718 'http://www.example.com/some/path#fragment'
1726 'http://www.example.com/some/path#fragment'
1719 >>> hidepassword('http://me@www.example.com/some/path#fragment')
1727 >>> hidepassword('http://me@www.example.com/some/path#fragment')
1720 'http://me@www.example.com/some/path#fragment'
1728 'http://me@www.example.com/some/path#fragment'
1721 >>> hidepassword('http://me:simplepw@www.example.com/path#frag')
1729 >>> hidepassword('http://me:simplepw@www.example.com/path#frag')
1722 'http://me:***@www.example.com/path#frag'
1730 'http://me:***@www.example.com/path#frag'
1723 >>> hidepassword('http://me:complex:pw@www.example.com/path#frag')
1731 >>> hidepassword('http://me:complex:pw@www.example.com/path#frag')
1724 'http://me:***@www.example.com/path#frag'
1732 'http://me:***@www.example.com/path#frag'
1725 >>> hidepassword('/path/to/repo')
1733 >>> hidepassword('/path/to/repo')
1726 '/path/to/repo'
1734 '/path/to/repo'
1727 >>> hidepassword('relative/path/to/repo')
1735 >>> hidepassword('relative/path/to/repo')
1728 'relative/path/to/repo'
1736 'relative/path/to/repo'
1729 >>> hidepassword('c:\\\\path\\\\to\\\\repo')
1737 >>> hidepassword('c:\\\\path\\\\to\\\\repo')
1730 'c:\\\\path\\\\to\\\\repo'
1738 'c:\\\\path\\\\to\\\\repo'
1731 >>> hidepassword('c:/path/to/repo')
1739 >>> hidepassword('c:/path/to/repo')
1732 'c:/path/to/repo'
1740 'c:/path/to/repo'
1733 >>> hidepassword('bundle://path/to/bundle')
1741 >>> hidepassword('bundle://path/to/bundle')
1734 'bundle://path/to/bundle'
1742 'bundle://path/to/bundle'
1735 '''
1743 '''
1736 url_parts = list(urlparse.urlparse(url))
1744 url_parts = list(urlparse.urlparse(url))
1737 host_with_pw_pattern = re.compile('^([^:]*):([^@]*)@(.*)$')
1745 host_with_pw_pattern = re.compile('^([^:]*):([^@]*)@(.*)$')
1738 if host_with_pw_pattern.match(url_parts[1]):
1746 if host_with_pw_pattern.match(url_parts[1]):
1739 url_parts[1] = re.sub(host_with_pw_pattern, r'\1:***@\3',
1747 url_parts[1] = re.sub(host_with_pw_pattern, r'\1:***@\3',
1740 url_parts[1])
1748 url_parts[1])
1741 return urlparse.urlunparse(url_parts)
1749 return urlparse.urlunparse(url_parts)
1742
1750
@@ -1,69 +1,73 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 # Test issue835:
3 # Test issue835:
4 # qpush fails immediately when patching a missing file, but
4 # qpush fails immediately when patching a missing file, but
5 # remaining added files are still created empty which will
5 # remaining added files are still created empty which will
6 # trick a future qrefresh.
6 # trick a future qrefresh.
7
7
8 cat > writelines.py <<EOF
8 cat > writelines.py <<EOF
9 import sys
9 import sys
10 path = sys.argv[1]
10 path = sys.argv[1]
11 args = sys.argv[2:]
11 args = sys.argv[2:]
12 assert (len(args) % 2) == 0
12 assert (len(args) % 2) == 0
13
13
14 f = file(path, 'wb')
14 f = file(path, 'wb')
15 for i in xrange(len(args)/2):
15 for i in xrange(len(args)/2):
16 count, s = args[2*i:2*i+2]
16 count, s = args[2*i:2*i+2]
17 count = int(count)
17 count = int(count)
18 s = s.decode('string_escape')
18 s = s.decode('string_escape')
19 f.write(s*count)
19 f.write(s*count)
20 f.close()
20 f.close()
21
21
22 EOF
22 EOF
23
23
24 echo "[extensions]" >> $HGRCPATH
24 echo "[extensions]" >> $HGRCPATH
25 echo "mq=" >> $HGRCPATH
25 echo "mq=" >> $HGRCPATH
26
26
27 hg init normal
27 hg init normal
28 cd normal
28 cd normal
29 python ../writelines.py b 10 'a\n'
29 python ../writelines.py b 10 'a\n'
30 hg ci -Am addb
30 hg ci -Am addb
31 echo a > a
31 echo a > a
32 python ../writelines.py b 2 'b\n' 10 'a\n' 2 'c\n'
32 python ../writelines.py b 2 'b\n' 10 'a\n' 2 'c\n'
33 echo c > c
33 echo c > c
34 hg add a c
34 hg add a c
35 hg qnew -f changeb
35 hg qnew -f changeb
36 hg qpop
36 hg qpop
37 hg rm b
37 hg rm b
38 hg ci -Am rmb
38 hg ci -Am rmb
39 echo % push patch with missing target
39 echo % push patch with missing target
40 hg qpush
40 hg qpush
41 echo % display added files
41 echo % display added files
42 cat a
42 cat a
43 cat c
43 cat c
44 echo % display rejections
45 cat b.rej
44 cd ..
46 cd ..
45
47
46
48
47 echo "[diff]" >> $HGRCPATH
49 echo "[diff]" >> $HGRCPATH
48 echo "git=1" >> $HGRCPATH
50 echo "git=1" >> $HGRCPATH
49
51
50 hg init git
52 hg init git
51 cd git
53 cd git
52 python ../writelines.py b 1 '\x00'
54 python ../writelines.py b 1 '\x00'
53 hg ci -Am addb
55 hg ci -Am addb
54 echo a > a
56 echo a > a
55 python ../writelines.py b 1 '\x01' 1 '\x00'
57 python ../writelines.py b 1 '\x01' 1 '\x00'
56 echo c > c
58 echo c > c
57 hg add a c
59 hg add a c
58 hg qnew -f changeb
60 hg qnew -f changeb
59 hg qpop
61 hg qpop
60 hg rm b
62 hg rm b
61 hg ci -Am rmb
63 hg ci -Am rmb
62 echo % push git patch with missing target
64 echo % push git patch with missing target
63 hg qpush 2>&1 | sed -e 's/b:.*/b: No such file or directory/'
65 hg qpush 2>&1 | sed -e 's/b:.*/b: No such file or directory/'
64 hg st
66 hg st
65 echo % display added files
67 echo % display added files
66 cat a
68 cat a
67 cat c
69 cat c
70 echo % display rejections
71 cat b.rej
68 cd ..
72 cd ..
69
73
@@ -1,25 +1,49 b''
1 adding b
1 adding b
2 Patch queue now empty
2 Patch queue now empty
3 % push patch with missing target
3 % push patch with missing target
4 applying changeb
4 applying changeb
5 unable to find b or b for patching
5 unable to find 'b' for patching
6 unable to find b or b for patching
6 2 out of 2 hunks FAILED -- saving rejects to file b.rej
7 patch failed, unable to continue (try -v)
7 patch failed, unable to continue (try -v)
8 patch failed, rejects left in working dir
8 patch failed, rejects left in working dir
9 Errors during apply, please fix and refresh changeb
9 Errors during apply, please fix and refresh changeb
10 % display added files
10 % display added files
11 a
11 a
12 c
12 c
13 % display rejections
14 --- b
15 +++ b
16 @@ -1,3 +1,5 @@ a
17 +b
18 +b
19 a
20 a
21 a
22 @@ -8,3 +10,5 @@ a
23 a
24 a
25 a
26 +c
27 +c
13 adding b
28 adding b
14 Patch queue now empty
29 Patch queue now empty
15 % push git patch with missing target
30 % push git patch with missing target
16 applying changeb
31 applying changeb
17 unable to find b or b for patching
32 unable to find 'b' for patching
33 1 out of 1 hunk FAILED -- saving rejects to file b.rej
18 patch failed, unable to continue (try -v)
34 patch failed, unable to continue (try -v)
19 b: No such file or directory
35 b: No such file or directory
20 b not tracked!
36 b not tracked!
21 patch failed, rejects left in working dir
37 patch failed, rejects left in working dir
22 Errors during apply, please fix and refresh changeb
38 Errors during apply, please fix and refresh changeb
39 ? b.rej
23 % display added files
40 % display added files
24 a
41 a
25 c
42 c
43 % display rejections
44 --- b
45 +++ b
46 GIT binary patch
47 literal 2
48 Jc${No0000400IC2
49
General Comments 0
You need to be logged in to leave comments. Login now