##// END OF EJS Templates
wireproto: add getbundle() function...
Peter Arrenbrecht -
r13741:b51bf961 default
parent child Browse files
Show More
@@ -0,0 +1,253 b''
1
2 = Test the getbundle() protocol function =
3
4 Enable graphlog extension:
5
6 $ echo "[extensions]" >> $HGRCPATH
7 $ echo "graphlog=" >> $HGRCPATH
8
9 Create a test repository:
10
11 $ hg init repo
12 $ cd repo
13 $ hg debugbuilddag -n -m '+2 :fork +5 :p1 *fork +6 :p2 /p1 :m1 +3' > /dev/null
14 $ hg glog --template '{node}\n'
15 @ 2bba2f40f321484159b395a43f20101d4bb7ead0
16 |
17 o d9e5488323c782fe684573f3043369d199038b6f
18 |
19 o 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
20 |
21 o 733bf0910832b26b768a09172f325f995b5476e1
22 |\
23 | o b5af5d6ea56d73ce24c40bc3cd19a862f74888ac
24 | |
25 | o 6b57ee934bb2996050540f84cdfc8dcad1e7267d
26 | |
27 | o 2c0ec49482e8abe888b7bd090b5827acfc22b3d7
28 | |
29 | o c1818a9f5977dd4139a48f93f5425c67d44a9368
30 | |
31 | o 6c725a58ad10aea441540bfd06c507f63e8b9cdd
32 | |
33 | o 18063366a155bd56b5618229ae2ac3e91849aa5e
34 | |
35 | o a21d913c992197a2eb60b298521ec0f045a04799
36 | |
37 o | b6b2b682253df2ffedc10e9415e4114202b303c5
38 | |
39 o | 2114148793524fd045998f71a45b0aaf139f752b
40 | |
41 o | 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
42 | |
43 o | ea919464b16e003894c48b6cb68df3cd9411b544
44 | |
45 o | 0f82d97ec2778746743fbc996740d409558fda22
46 |/
47 o 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4
48 |
49 o 10e64d654571f11577745b4d8372e859d9e4df63
50
51 $ cd ..
52
53
54 = Test locally =
55
56 Get everything:
57
58 $ hg debuggetbundle repo bundle
59 $ hg debugbundle bundle
60 10e64d654571f11577745b4d8372e859d9e4df63
61 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4
62 0f82d97ec2778746743fbc996740d409558fda22
63 ea919464b16e003894c48b6cb68df3cd9411b544
64 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
65 2114148793524fd045998f71a45b0aaf139f752b
66 b6b2b682253df2ffedc10e9415e4114202b303c5
67 a21d913c992197a2eb60b298521ec0f045a04799
68 18063366a155bd56b5618229ae2ac3e91849aa5e
69 6c725a58ad10aea441540bfd06c507f63e8b9cdd
70 c1818a9f5977dd4139a48f93f5425c67d44a9368
71 2c0ec49482e8abe888b7bd090b5827acfc22b3d7
72 6b57ee934bb2996050540f84cdfc8dcad1e7267d
73 b5af5d6ea56d73ce24c40bc3cd19a862f74888ac
74 733bf0910832b26b768a09172f325f995b5476e1
75 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
76 d9e5488323c782fe684573f3043369d199038b6f
77 2bba2f40f321484159b395a43f20101d4bb7ead0
78
79 Get part of linear run:
80
81 $ hg debuggetbundle repo bundle -H d9e5488323c782fe684573f3043369d199038b6f -C 733bf0910832b26b768a09172f325f995b5476e1
82 $ hg debugbundle bundle
83 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
84 d9e5488323c782fe684573f3043369d199038b6f
85
86 Get missing branch and merge:
87
88 $ hg debuggetbundle repo bundle -H d9e5488323c782fe684573f3043369d199038b6f -C 6b57ee934bb2996050540f84cdfc8dcad1e7267d
89 $ hg debugbundle bundle
90 0f82d97ec2778746743fbc996740d409558fda22
91 ea919464b16e003894c48b6cb68df3cd9411b544
92 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
93 2114148793524fd045998f71a45b0aaf139f752b
94 b6b2b682253df2ffedc10e9415e4114202b303c5
95 b5af5d6ea56d73ce24c40bc3cd19a862f74888ac
96 733bf0910832b26b768a09172f325f995b5476e1
97 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
98 d9e5488323c782fe684573f3043369d199038b6f
99
100 Get from only one head:
101
102 $ hg debuggetbundle repo bundle -H 6c725a58ad10aea441540bfd06c507f63e8b9cdd -C 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4
103 $ hg debugbundle bundle
104 a21d913c992197a2eb60b298521ec0f045a04799
105 18063366a155bd56b5618229ae2ac3e91849aa5e
106 6c725a58ad10aea441540bfd06c507f63e8b9cdd
107
108 Get parts of two branches:
109
110 $ hg debuggetbundle repo bundle -H 6b57ee934bb2996050540f84cdfc8dcad1e7267d -C c1818a9f5977dd4139a48f93f5425c67d44a9368 -H 2114148793524fd045998f71a45b0aaf139f752b -C ea919464b16e003894c48b6cb68df3cd9411b544
111 $ hg debugbundle bundle
112 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
113 2114148793524fd045998f71a45b0aaf139f752b
114 2c0ec49482e8abe888b7bd090b5827acfc22b3d7
115 6b57ee934bb2996050540f84cdfc8dcad1e7267d
116
117 Check that we get all needed file changes:
118
119 $ hg debugbundle bundle --all
120 format: id, p1, p2, cset, len(delta)
121
122 changelog
123 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc ea919464b16e003894c48b6cb68df3cd9411b544 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 99
124 2114148793524fd045998f71a45b0aaf139f752b 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 99
125 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 c1818a9f5977dd4139a48f93f5425c67d44a9368 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 102
126 6b57ee934bb2996050540f84cdfc8dcad1e7267d 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 102
127
128 manifest
129 dac7984588fc4eea7acbf39693a9c1b06f5b175d 591f732a3faf1fb903815273f3c199a514a61ccb 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 113
130 0772616e6b48a76afb6c1458e193cbb3dae2e4ff dac7984588fc4eea7acbf39693a9c1b06f5b175d 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 113
131 eb498cd9af6c44108e43041e951ce829e29f6c80 bff2f4817ced57b386caf7c4e3e36a4bc9af7e93 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 295
132 b15709c071ddd2d93188508ba156196ab4f19620 eb498cd9af6c44108e43041e951ce829e29f6c80 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 114
133
134 mf
135 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 301ca08d026bb72cb4258a9d211bdf7ca0bcd810 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 17
136 c7b583de053293870e145f45bd2d61643563fd06 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 18
137 266ee3c0302a5a18f1cf96817ac79a51836179e9 edc0f6b8db80d68ae6aff2b19f7e5347ab68fa63 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 149
138 698c6a36220548cd3903ca7dada27c59aa500c52 266ee3c0302a5a18f1cf96817ac79a51836179e9 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 19
139
140 nf11
141 33fbc651630ffa7ccbebfe4eb91320a873e7291c 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 16
142
143 nf12
144 ddce0544363f037e9fb889faca058f52dc01c0a5 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 16
145
146 nf4
147 3c1407305701051cbed9f9cb9a68bdfb5997c235 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 15
148
149 nf5
150 0dbd89c185f53a1727c54cd1ce256482fa23968e 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 15
151
152 Get branch and merge:
153
154 $ hg debuggetbundle repo bundle -C 10e64d654571f11577745b4d8372e859d9e4df63 -H 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
155 $ hg debugbundle bundle
156 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4
157 0f82d97ec2778746743fbc996740d409558fda22
158 ea919464b16e003894c48b6cb68df3cd9411b544
159 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
160 2114148793524fd045998f71a45b0aaf139f752b
161 b6b2b682253df2ffedc10e9415e4114202b303c5
162 a21d913c992197a2eb60b298521ec0f045a04799
163 18063366a155bd56b5618229ae2ac3e91849aa5e
164 6c725a58ad10aea441540bfd06c507f63e8b9cdd
165 c1818a9f5977dd4139a48f93f5425c67d44a9368
166 2c0ec49482e8abe888b7bd090b5827acfc22b3d7
167 6b57ee934bb2996050540f84cdfc8dcad1e7267d
168 b5af5d6ea56d73ce24c40bc3cd19a862f74888ac
169 733bf0910832b26b768a09172f325f995b5476e1
170 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
171
172
173 = Test via HTTP =
174
175 Get everything:
176
177 $ hg serve -R repo -p $HGPORT -d --pid-file=hg.pid -E error.log -A access.log
178 $ cat hg.pid >> $DAEMON_PIDS
179 $ hg debuggetbundle http://localhost:$HGPORT/ bundle
180 $ hg debugbundle bundle
181 10e64d654571f11577745b4d8372e859d9e4df63
182 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4
183 0f82d97ec2778746743fbc996740d409558fda22
184 ea919464b16e003894c48b6cb68df3cd9411b544
185 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
186 2114148793524fd045998f71a45b0aaf139f752b
187 b6b2b682253df2ffedc10e9415e4114202b303c5
188 a21d913c992197a2eb60b298521ec0f045a04799
189 18063366a155bd56b5618229ae2ac3e91849aa5e
190 6c725a58ad10aea441540bfd06c507f63e8b9cdd
191 c1818a9f5977dd4139a48f93f5425c67d44a9368
192 2c0ec49482e8abe888b7bd090b5827acfc22b3d7
193 6b57ee934bb2996050540f84cdfc8dcad1e7267d
194 b5af5d6ea56d73ce24c40bc3cd19a862f74888ac
195 733bf0910832b26b768a09172f325f995b5476e1
196 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
197 d9e5488323c782fe684573f3043369d199038b6f
198 2bba2f40f321484159b395a43f20101d4bb7ead0
199
200 Get parts of two branches:
201
202 $ hg debuggetbundle http://localhost:$HGPORT/ bundle -H 6b57ee934bb2996050540f84cdfc8dcad1e7267d -C c1818a9f5977dd4139a48f93f5425c67d44a9368 -H 2114148793524fd045998f71a45b0aaf139f752b -C ea919464b16e003894c48b6cb68df3cd9411b544
203 $ hg debugbundle bundle
204 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
205 2114148793524fd045998f71a45b0aaf139f752b
206 2c0ec49482e8abe888b7bd090b5827acfc22b3d7
207 6b57ee934bb2996050540f84cdfc8dcad1e7267d
208
209 Check that we get all needed file changes:
210
211 $ hg debugbundle bundle --all
212 format: id, p1, p2, cset, len(delta)
213
214 changelog
215 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc ea919464b16e003894c48b6cb68df3cd9411b544 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 99
216 2114148793524fd045998f71a45b0aaf139f752b 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 99
217 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 c1818a9f5977dd4139a48f93f5425c67d44a9368 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 102
218 6b57ee934bb2996050540f84cdfc8dcad1e7267d 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 102
219
220 manifest
221 dac7984588fc4eea7acbf39693a9c1b06f5b175d 591f732a3faf1fb903815273f3c199a514a61ccb 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 113
222 0772616e6b48a76afb6c1458e193cbb3dae2e4ff dac7984588fc4eea7acbf39693a9c1b06f5b175d 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 113
223 eb498cd9af6c44108e43041e951ce829e29f6c80 bff2f4817ced57b386caf7c4e3e36a4bc9af7e93 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 295
224 b15709c071ddd2d93188508ba156196ab4f19620 eb498cd9af6c44108e43041e951ce829e29f6c80 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 114
225
226 mf
227 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 301ca08d026bb72cb4258a9d211bdf7ca0bcd810 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 17
228 c7b583de053293870e145f45bd2d61643563fd06 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 18
229 266ee3c0302a5a18f1cf96817ac79a51836179e9 edc0f6b8db80d68ae6aff2b19f7e5347ab68fa63 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 149
230 698c6a36220548cd3903ca7dada27c59aa500c52 266ee3c0302a5a18f1cf96817ac79a51836179e9 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 19
231
232 nf11
233 33fbc651630ffa7ccbebfe4eb91320a873e7291c 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 16
234
235 nf12
236 ddce0544363f037e9fb889faca058f52dc01c0a5 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 16
237
238 nf4
239 3c1407305701051cbed9f9cb9a68bdfb5997c235 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 15
240
241 nf5
242 0dbd89c185f53a1727c54cd1ce256482fa23968e 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 15
243
244 Verify we hit the HTTP server:
245
246 $ cat access.log
247 * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
248 * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - (glob)
249 * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
250 * - - [*] "GET /?cmd=getbundle&common=c1818a9f5977dd4139a48f93f5425c67d44a9368+ea919464b16e003894c48b6cb68df3cd9411b544&heads=6b57ee934bb2996050540f84cdfc8dcad1e7267d+2114148793524fd045998f71a45b0aaf139f752b HTTP/1.1" 200 - (glob)
251
252 $ cat error.log
253
@@ -1,4874 +1,4904 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, bin, nullid, nullrev, short
8 from node import hex, bin, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, difflib, time, tempfile
11 import os, re, sys, difflib, time, tempfile
12 import hg, util, revlog, extensions, copies, error, bookmarks
12 import hg, util, revlog, extensions, copies, error, bookmarks
13 import patch, help, mdiff, url, encoding, templatekw, discovery
13 import patch, help, mdiff, url, encoding, templatekw, discovery
14 import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server
14 import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server
15 import merge as mergemod
15 import merge as mergemod
16 import minirst, revset, templatefilters
16 import minirst, revset, templatefilters
17 import dagparser
17 import dagparser
18
18
19 # Commands start here, listed alphabetically
19 # Commands start here, listed alphabetically
20
20
21 def add(ui, repo, *pats, **opts):
21 def add(ui, repo, *pats, **opts):
22 """add the specified files on the next commit
22 """add the specified files on the next commit
23
23
24 Schedule files to be version controlled and added to the
24 Schedule files to be version controlled and added to the
25 repository.
25 repository.
26
26
27 The files will be added to the repository at the next commit. To
27 The files will be added to the repository at the next commit. To
28 undo an add before that, see :hg:`forget`.
28 undo an add before that, see :hg:`forget`.
29
29
30 If no names are given, add all files to the repository.
30 If no names are given, add all files to the repository.
31
31
32 .. container:: verbose
32 .. container:: verbose
33
33
34 An example showing how new (unknown) files are added
34 An example showing how new (unknown) files are added
35 automatically by :hg:`add`::
35 automatically by :hg:`add`::
36
36
37 $ ls
37 $ ls
38 foo.c
38 foo.c
39 $ hg status
39 $ hg status
40 ? foo.c
40 ? foo.c
41 $ hg add
41 $ hg add
42 adding foo.c
42 adding foo.c
43 $ hg status
43 $ hg status
44 A foo.c
44 A foo.c
45
45
46 Returns 0 if all files are successfully added.
46 Returns 0 if all files are successfully added.
47 """
47 """
48
48
49 m = cmdutil.match(repo, pats, opts)
49 m = cmdutil.match(repo, pats, opts)
50 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
50 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
51 opts.get('subrepos'), prefix="")
51 opts.get('subrepos'), prefix="")
52 return rejected and 1 or 0
52 return rejected and 1 or 0
53
53
54 def addremove(ui, repo, *pats, **opts):
54 def addremove(ui, repo, *pats, **opts):
55 """add all new files, delete all missing files
55 """add all new files, delete all missing files
56
56
57 Add all new files and remove all missing files from the
57 Add all new files and remove all missing files from the
58 repository.
58 repository.
59
59
60 New files are ignored if they match any of the patterns in
60 New files are ignored if they match any of the patterns in
61 ``.hgignore``. As with add, these changes take effect at the next
61 ``.hgignore``. As with add, these changes take effect at the next
62 commit.
62 commit.
63
63
64 Use the -s/--similarity option to detect renamed files. With a
64 Use the -s/--similarity option to detect renamed files. With a
65 parameter greater than 0, this compares every removed file with
65 parameter greater than 0, this compares every removed file with
66 every added file and records those similar enough as renames. This
66 every added file and records those similar enough as renames. This
67 option takes a percentage between 0 (disabled) and 100 (files must
67 option takes a percentage between 0 (disabled) and 100 (files must
68 be identical) as its parameter. Detecting renamed files this way
68 be identical) as its parameter. Detecting renamed files this way
69 can be expensive. After using this option, :hg:`status -C` can be
69 can be expensive. After using this option, :hg:`status -C` can be
70 used to check which files were identified as moved or renamed.
70 used to check which files were identified as moved or renamed.
71
71
72 Returns 0 if all files are successfully added.
72 Returns 0 if all files are successfully added.
73 """
73 """
74 try:
74 try:
75 sim = float(opts.get('similarity') or 100)
75 sim = float(opts.get('similarity') or 100)
76 except ValueError:
76 except ValueError:
77 raise util.Abort(_('similarity must be a number'))
77 raise util.Abort(_('similarity must be a number'))
78 if sim < 0 or sim > 100:
78 if sim < 0 or sim > 100:
79 raise util.Abort(_('similarity must be between 0 and 100'))
79 raise util.Abort(_('similarity must be between 0 and 100'))
80 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
80 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
81
81
82 def annotate(ui, repo, *pats, **opts):
82 def annotate(ui, repo, *pats, **opts):
83 """show changeset information by line for each file
83 """show changeset information by line for each file
84
84
85 List changes in files, showing the revision id responsible for
85 List changes in files, showing the revision id responsible for
86 each line
86 each line
87
87
88 This command is useful for discovering when a change was made and
88 This command is useful for discovering when a change was made and
89 by whom.
89 by whom.
90
90
91 Without the -a/--text option, annotate will avoid processing files
91 Without the -a/--text option, annotate will avoid processing files
92 it detects as binary. With -a, annotate will annotate the file
92 it detects as binary. With -a, annotate will annotate the file
93 anyway, although the results will probably be neither useful
93 anyway, although the results will probably be neither useful
94 nor desirable.
94 nor desirable.
95
95
96 Returns 0 on success.
96 Returns 0 on success.
97 """
97 """
98 if opts.get('follow'):
98 if opts.get('follow'):
99 # --follow is deprecated and now just an alias for -f/--file
99 # --follow is deprecated and now just an alias for -f/--file
100 # to mimic the behavior of Mercurial before version 1.5
100 # to mimic the behavior of Mercurial before version 1.5
101 opts['file'] = 1
101 opts['file'] = 1
102
102
103 datefunc = ui.quiet and util.shortdate or util.datestr
103 datefunc = ui.quiet and util.shortdate or util.datestr
104 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
104 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
105
105
106 if not pats:
106 if not pats:
107 raise util.Abort(_('at least one filename or pattern is required'))
107 raise util.Abort(_('at least one filename or pattern is required'))
108
108
109 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
109 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
110 ('number', lambda x: str(x[0].rev())),
110 ('number', lambda x: str(x[0].rev())),
111 ('changeset', lambda x: short(x[0].node())),
111 ('changeset', lambda x: short(x[0].node())),
112 ('date', getdate),
112 ('date', getdate),
113 ('file', lambda x: x[0].path()),
113 ('file', lambda x: x[0].path()),
114 ]
114 ]
115
115
116 if (not opts.get('user') and not opts.get('changeset')
116 if (not opts.get('user') and not opts.get('changeset')
117 and not opts.get('date') and not opts.get('file')):
117 and not opts.get('date') and not opts.get('file')):
118 opts['number'] = 1
118 opts['number'] = 1
119
119
120 linenumber = opts.get('line_number') is not None
120 linenumber = opts.get('line_number') is not None
121 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
121 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
122 raise util.Abort(_('at least one of -n/-c is required for -l'))
122 raise util.Abort(_('at least one of -n/-c is required for -l'))
123
123
124 funcmap = [func for op, func in opmap if opts.get(op)]
124 funcmap = [func for op, func in opmap if opts.get(op)]
125 if linenumber:
125 if linenumber:
126 lastfunc = funcmap[-1]
126 lastfunc = funcmap[-1]
127 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
127 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
128
128
129 def bad(x, y):
129 def bad(x, y):
130 raise util.Abort("%s: %s" % (x, y))
130 raise util.Abort("%s: %s" % (x, y))
131
131
132 ctx = cmdutil.revsingle(repo, opts.get('rev'))
132 ctx = cmdutil.revsingle(repo, opts.get('rev'))
133 m = cmdutil.match(repo, pats, opts)
133 m = cmdutil.match(repo, pats, opts)
134 m.bad = bad
134 m.bad = bad
135 follow = not opts.get('no_follow')
135 follow = not opts.get('no_follow')
136 for abs in ctx.walk(m):
136 for abs in ctx.walk(m):
137 fctx = ctx[abs]
137 fctx = ctx[abs]
138 if not opts.get('text') and util.binary(fctx.data()):
138 if not opts.get('text') and util.binary(fctx.data()):
139 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
139 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
140 continue
140 continue
141
141
142 lines = fctx.annotate(follow=follow, linenumber=linenumber)
142 lines = fctx.annotate(follow=follow, linenumber=linenumber)
143 pieces = []
143 pieces = []
144
144
145 for f in funcmap:
145 for f in funcmap:
146 l = [f(n) for n, dummy in lines]
146 l = [f(n) for n, dummy in lines]
147 if l:
147 if l:
148 sized = [(x, encoding.colwidth(x)) for x in l]
148 sized = [(x, encoding.colwidth(x)) for x in l]
149 ml = max([w for x, w in sized])
149 ml = max([w for x, w in sized])
150 pieces.append(["%s%s" % (' ' * (ml - w), x) for x, w in sized])
150 pieces.append(["%s%s" % (' ' * (ml - w), x) for x, w in sized])
151
151
152 if pieces:
152 if pieces:
153 for p, l in zip(zip(*pieces), lines):
153 for p, l in zip(zip(*pieces), lines):
154 ui.write("%s: %s" % (" ".join(p), l[1]))
154 ui.write("%s: %s" % (" ".join(p), l[1]))
155
155
156 def archive(ui, repo, dest, **opts):
156 def archive(ui, repo, dest, **opts):
157 '''create an unversioned archive of a repository revision
157 '''create an unversioned archive of a repository revision
158
158
159 By default, the revision used is the parent of the working
159 By default, the revision used is the parent of the working
160 directory; use -r/--rev to specify a different revision.
160 directory; use -r/--rev to specify a different revision.
161
161
162 The archive type is automatically detected based on file
162 The archive type is automatically detected based on file
163 extension (or override using -t/--type).
163 extension (or override using -t/--type).
164
164
165 Valid types are:
165 Valid types are:
166
166
167 :``files``: a directory full of files (default)
167 :``files``: a directory full of files (default)
168 :``tar``: tar archive, uncompressed
168 :``tar``: tar archive, uncompressed
169 :``tbz2``: tar archive, compressed using bzip2
169 :``tbz2``: tar archive, compressed using bzip2
170 :``tgz``: tar archive, compressed using gzip
170 :``tgz``: tar archive, compressed using gzip
171 :``uzip``: zip archive, uncompressed
171 :``uzip``: zip archive, uncompressed
172 :``zip``: zip archive, compressed using deflate
172 :``zip``: zip archive, compressed using deflate
173
173
174 The exact name of the destination archive or directory is given
174 The exact name of the destination archive or directory is given
175 using a format string; see :hg:`help export` for details.
175 using a format string; see :hg:`help export` for details.
176
176
177 Each member added to an archive file has a directory prefix
177 Each member added to an archive file has a directory prefix
178 prepended. Use -p/--prefix to specify a format string for the
178 prepended. Use -p/--prefix to specify a format string for the
179 prefix. The default is the basename of the archive, with suffixes
179 prefix. The default is the basename of the archive, with suffixes
180 removed.
180 removed.
181
181
182 Returns 0 on success.
182 Returns 0 on success.
183 '''
183 '''
184
184
185 ctx = cmdutil.revsingle(repo, opts.get('rev'))
185 ctx = cmdutil.revsingle(repo, opts.get('rev'))
186 if not ctx:
186 if not ctx:
187 raise util.Abort(_('no working directory: please specify a revision'))
187 raise util.Abort(_('no working directory: please specify a revision'))
188 node = ctx.node()
188 node = ctx.node()
189 dest = cmdutil.make_filename(repo, dest, node)
189 dest = cmdutil.make_filename(repo, dest, node)
190 if os.path.realpath(dest) == repo.root:
190 if os.path.realpath(dest) == repo.root:
191 raise util.Abort(_('repository root cannot be destination'))
191 raise util.Abort(_('repository root cannot be destination'))
192
192
193 kind = opts.get('type') or archival.guesskind(dest) or 'files'
193 kind = opts.get('type') or archival.guesskind(dest) or 'files'
194 prefix = opts.get('prefix')
194 prefix = opts.get('prefix')
195
195
196 if dest == '-':
196 if dest == '-':
197 if kind == 'files':
197 if kind == 'files':
198 raise util.Abort(_('cannot archive plain files to stdout'))
198 raise util.Abort(_('cannot archive plain files to stdout'))
199 dest = sys.stdout
199 dest = sys.stdout
200 if not prefix:
200 if not prefix:
201 prefix = os.path.basename(repo.root) + '-%h'
201 prefix = os.path.basename(repo.root) + '-%h'
202
202
203 prefix = cmdutil.make_filename(repo, prefix, node)
203 prefix = cmdutil.make_filename(repo, prefix, node)
204 matchfn = cmdutil.match(repo, [], opts)
204 matchfn = cmdutil.match(repo, [], opts)
205 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
205 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
206 matchfn, prefix, subrepos=opts.get('subrepos'))
206 matchfn, prefix, subrepos=opts.get('subrepos'))
207
207
208 def backout(ui, repo, node=None, rev=None, **opts):
208 def backout(ui, repo, node=None, rev=None, **opts):
209 '''reverse effect of earlier changeset
209 '''reverse effect of earlier changeset
210
210
211 Prepare a new changeset with the effect of REV undone in the
211 Prepare a new changeset with the effect of REV undone in the
212 current working directory.
212 current working directory.
213
213
214 If REV is the parent of the working directory, then this new changeset
214 If REV is the parent of the working directory, then this new changeset
215 is committed automatically. Otherwise, hg needs to merge the
215 is committed automatically. Otherwise, hg needs to merge the
216 changes and the merged result is left uncommitted.
216 changes and the merged result is left uncommitted.
217
217
218 By default, the pending changeset will have one parent,
218 By default, the pending changeset will have one parent,
219 maintaining a linear history. With --merge, the pending changeset
219 maintaining a linear history. With --merge, the pending changeset
220 will instead have two parents: the old parent of the working
220 will instead have two parents: the old parent of the working
221 directory and a new child of REV that simply undoes REV.
221 directory and a new child of REV that simply undoes REV.
222
222
223 Before version 1.7, the behavior without --merge was equivalent to
223 Before version 1.7, the behavior without --merge was equivalent to
224 specifying --merge followed by :hg:`update --clean .` to cancel
224 specifying --merge followed by :hg:`update --clean .` to cancel
225 the merge and leave the child of REV as a head to be merged
225 the merge and leave the child of REV as a head to be merged
226 separately.
226 separately.
227
227
228 See :hg:`help dates` for a list of formats valid for -d/--date.
228 See :hg:`help dates` for a list of formats valid for -d/--date.
229
229
230 Returns 0 on success.
230 Returns 0 on success.
231 '''
231 '''
232 if rev and node:
232 if rev and node:
233 raise util.Abort(_("please specify just one revision"))
233 raise util.Abort(_("please specify just one revision"))
234
234
235 if not rev:
235 if not rev:
236 rev = node
236 rev = node
237
237
238 if not rev:
238 if not rev:
239 raise util.Abort(_("please specify a revision to backout"))
239 raise util.Abort(_("please specify a revision to backout"))
240
240
241 date = opts.get('date')
241 date = opts.get('date')
242 if date:
242 if date:
243 opts['date'] = util.parsedate(date)
243 opts['date'] = util.parsedate(date)
244
244
245 cmdutil.bail_if_changed(repo)
245 cmdutil.bail_if_changed(repo)
246 node = cmdutil.revsingle(repo, rev).node()
246 node = cmdutil.revsingle(repo, rev).node()
247
247
248 op1, op2 = repo.dirstate.parents()
248 op1, op2 = repo.dirstate.parents()
249 a = repo.changelog.ancestor(op1, node)
249 a = repo.changelog.ancestor(op1, node)
250 if a != node:
250 if a != node:
251 raise util.Abort(_('cannot backout change on a different branch'))
251 raise util.Abort(_('cannot backout change on a different branch'))
252
252
253 p1, p2 = repo.changelog.parents(node)
253 p1, p2 = repo.changelog.parents(node)
254 if p1 == nullid:
254 if p1 == nullid:
255 raise util.Abort(_('cannot backout a change with no parents'))
255 raise util.Abort(_('cannot backout a change with no parents'))
256 if p2 != nullid:
256 if p2 != nullid:
257 if not opts.get('parent'):
257 if not opts.get('parent'):
258 raise util.Abort(_('cannot backout a merge changeset without '
258 raise util.Abort(_('cannot backout a merge changeset without '
259 '--parent'))
259 '--parent'))
260 p = repo.lookup(opts['parent'])
260 p = repo.lookup(opts['parent'])
261 if p not in (p1, p2):
261 if p not in (p1, p2):
262 raise util.Abort(_('%s is not a parent of %s') %
262 raise util.Abort(_('%s is not a parent of %s') %
263 (short(p), short(node)))
263 (short(p), short(node)))
264 parent = p
264 parent = p
265 else:
265 else:
266 if opts.get('parent'):
266 if opts.get('parent'):
267 raise util.Abort(_('cannot use --parent on non-merge changeset'))
267 raise util.Abort(_('cannot use --parent on non-merge changeset'))
268 parent = p1
268 parent = p1
269
269
270 # the backout should appear on the same branch
270 # the backout should appear on the same branch
271 branch = repo.dirstate.branch()
271 branch = repo.dirstate.branch()
272 hg.clean(repo, node, show_stats=False)
272 hg.clean(repo, node, show_stats=False)
273 repo.dirstate.setbranch(branch)
273 repo.dirstate.setbranch(branch)
274 revert_opts = opts.copy()
274 revert_opts = opts.copy()
275 revert_opts['date'] = None
275 revert_opts['date'] = None
276 revert_opts['all'] = True
276 revert_opts['all'] = True
277 revert_opts['rev'] = hex(parent)
277 revert_opts['rev'] = hex(parent)
278 revert_opts['no_backup'] = None
278 revert_opts['no_backup'] = None
279 revert(ui, repo, **revert_opts)
279 revert(ui, repo, **revert_opts)
280 if not opts.get('merge') and op1 != node:
280 if not opts.get('merge') and op1 != node:
281 try:
281 try:
282 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
282 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
283 return hg.update(repo, op1)
283 return hg.update(repo, op1)
284 finally:
284 finally:
285 ui.setconfig('ui', 'forcemerge', '')
285 ui.setconfig('ui', 'forcemerge', '')
286
286
287 commit_opts = opts.copy()
287 commit_opts = opts.copy()
288 commit_opts['addremove'] = False
288 commit_opts['addremove'] = False
289 if not commit_opts['message'] and not commit_opts['logfile']:
289 if not commit_opts['message'] and not commit_opts['logfile']:
290 # we don't translate commit messages
290 # we don't translate commit messages
291 commit_opts['message'] = "Backed out changeset %s" % short(node)
291 commit_opts['message'] = "Backed out changeset %s" % short(node)
292 commit_opts['force_editor'] = True
292 commit_opts['force_editor'] = True
293 commit(ui, repo, **commit_opts)
293 commit(ui, repo, **commit_opts)
294 def nice(node):
294 def nice(node):
295 return '%d:%s' % (repo.changelog.rev(node), short(node))
295 return '%d:%s' % (repo.changelog.rev(node), short(node))
296 ui.status(_('changeset %s backs out changeset %s\n') %
296 ui.status(_('changeset %s backs out changeset %s\n') %
297 (nice(repo.changelog.tip()), nice(node)))
297 (nice(repo.changelog.tip()), nice(node)))
298 if opts.get('merge') and op1 != node:
298 if opts.get('merge') and op1 != node:
299 hg.clean(repo, op1, show_stats=False)
299 hg.clean(repo, op1, show_stats=False)
300 ui.status(_('merging with changeset %s\n')
300 ui.status(_('merging with changeset %s\n')
301 % nice(repo.changelog.tip()))
301 % nice(repo.changelog.tip()))
302 try:
302 try:
303 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
303 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
304 return hg.merge(repo, hex(repo.changelog.tip()))
304 return hg.merge(repo, hex(repo.changelog.tip()))
305 finally:
305 finally:
306 ui.setconfig('ui', 'forcemerge', '')
306 ui.setconfig('ui', 'forcemerge', '')
307 return 0
307 return 0
308
308
309 def bisect(ui, repo, rev=None, extra=None, command=None,
309 def bisect(ui, repo, rev=None, extra=None, command=None,
310 reset=None, good=None, bad=None, skip=None, extend=None,
310 reset=None, good=None, bad=None, skip=None, extend=None,
311 noupdate=None):
311 noupdate=None):
312 """subdivision search of changesets
312 """subdivision search of changesets
313
313
314 This command helps to find changesets which introduce problems. To
314 This command helps to find changesets which introduce problems. To
315 use, mark the earliest changeset you know exhibits the problem as
315 use, mark the earliest changeset you know exhibits the problem as
316 bad, then mark the latest changeset which is free from the problem
316 bad, then mark the latest changeset which is free from the problem
317 as good. Bisect will update your working directory to a revision
317 as good. Bisect will update your working directory to a revision
318 for testing (unless the -U/--noupdate option is specified). Once
318 for testing (unless the -U/--noupdate option is specified). Once
319 you have performed tests, mark the working directory as good or
319 you have performed tests, mark the working directory as good or
320 bad, and bisect will either update to another candidate changeset
320 bad, and bisect will either update to another candidate changeset
321 or announce that it has found the bad revision.
321 or announce that it has found the bad revision.
322
322
323 As a shortcut, you can also use the revision argument to mark a
323 As a shortcut, you can also use the revision argument to mark a
324 revision as good or bad without checking it out first.
324 revision as good or bad without checking it out first.
325
325
326 If you supply a command, it will be used for automatic bisection.
326 If you supply a command, it will be used for automatic bisection.
327 Its exit status will be used to mark revisions as good or bad:
327 Its exit status will be used to mark revisions as good or bad:
328 status 0 means good, 125 means to skip the revision, 127
328 status 0 means good, 125 means to skip the revision, 127
329 (command not found) will abort the bisection, and any other
329 (command not found) will abort the bisection, and any other
330 non-zero exit status means the revision is bad.
330 non-zero exit status means the revision is bad.
331
331
332 Returns 0 on success.
332 Returns 0 on success.
333 """
333 """
334 def extendbisectrange(nodes, good):
334 def extendbisectrange(nodes, good):
335 # bisect is incomplete when it ends on a merge node and
335 # bisect is incomplete when it ends on a merge node and
336 # one of the parent was not checked.
336 # one of the parent was not checked.
337 parents = repo[nodes[0]].parents()
337 parents = repo[nodes[0]].parents()
338 if len(parents) > 1:
338 if len(parents) > 1:
339 side = good and state['bad'] or state['good']
339 side = good and state['bad'] or state['good']
340 num = len(set(i.node() for i in parents) & set(side))
340 num = len(set(i.node() for i in parents) & set(side))
341 if num == 1:
341 if num == 1:
342 return parents[0].ancestor(parents[1])
342 return parents[0].ancestor(parents[1])
343 return None
343 return None
344
344
345 def print_result(nodes, good):
345 def print_result(nodes, good):
346 displayer = cmdutil.show_changeset(ui, repo, {})
346 displayer = cmdutil.show_changeset(ui, repo, {})
347 if len(nodes) == 1:
347 if len(nodes) == 1:
348 # narrowed it down to a single revision
348 # narrowed it down to a single revision
349 if good:
349 if good:
350 ui.write(_("The first good revision is:\n"))
350 ui.write(_("The first good revision is:\n"))
351 else:
351 else:
352 ui.write(_("The first bad revision is:\n"))
352 ui.write(_("The first bad revision is:\n"))
353 displayer.show(repo[nodes[0]])
353 displayer.show(repo[nodes[0]])
354 parents = repo[nodes[0]].parents()
354 parents = repo[nodes[0]].parents()
355 extendnode = extendbisectrange(nodes, good)
355 extendnode = extendbisectrange(nodes, good)
356 if extendnode is not None:
356 if extendnode is not None:
357 ui.write(_('Not all ancestors of this changeset have been'
357 ui.write(_('Not all ancestors of this changeset have been'
358 ' checked.\nUse bisect --extend to continue the '
358 ' checked.\nUse bisect --extend to continue the '
359 'bisection from\nthe common ancestor, %s.\n')
359 'bisection from\nthe common ancestor, %s.\n')
360 % short(extendnode.node()))
360 % short(extendnode.node()))
361 else:
361 else:
362 # multiple possible revisions
362 # multiple possible revisions
363 if good:
363 if good:
364 ui.write(_("Due to skipped revisions, the first "
364 ui.write(_("Due to skipped revisions, the first "
365 "good revision could be any of:\n"))
365 "good revision could be any of:\n"))
366 else:
366 else:
367 ui.write(_("Due to skipped revisions, the first "
367 ui.write(_("Due to skipped revisions, the first "
368 "bad revision could be any of:\n"))
368 "bad revision could be any of:\n"))
369 for n in nodes:
369 for n in nodes:
370 displayer.show(repo[n])
370 displayer.show(repo[n])
371 displayer.close()
371 displayer.close()
372
372
373 def check_state(state, interactive=True):
373 def check_state(state, interactive=True):
374 if not state['good'] or not state['bad']:
374 if not state['good'] or not state['bad']:
375 if (good or bad or skip or reset) and interactive:
375 if (good or bad or skip or reset) and interactive:
376 return
376 return
377 if not state['good']:
377 if not state['good']:
378 raise util.Abort(_('cannot bisect (no known good revisions)'))
378 raise util.Abort(_('cannot bisect (no known good revisions)'))
379 else:
379 else:
380 raise util.Abort(_('cannot bisect (no known bad revisions)'))
380 raise util.Abort(_('cannot bisect (no known bad revisions)'))
381 return True
381 return True
382
382
383 # backward compatibility
383 # backward compatibility
384 if rev in "good bad reset init".split():
384 if rev in "good bad reset init".split():
385 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
385 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
386 cmd, rev, extra = rev, extra, None
386 cmd, rev, extra = rev, extra, None
387 if cmd == "good":
387 if cmd == "good":
388 good = True
388 good = True
389 elif cmd == "bad":
389 elif cmd == "bad":
390 bad = True
390 bad = True
391 else:
391 else:
392 reset = True
392 reset = True
393 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
393 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
394 raise util.Abort(_('incompatible arguments'))
394 raise util.Abort(_('incompatible arguments'))
395
395
396 if reset:
396 if reset:
397 p = repo.join("bisect.state")
397 p = repo.join("bisect.state")
398 if os.path.exists(p):
398 if os.path.exists(p):
399 os.unlink(p)
399 os.unlink(p)
400 return
400 return
401
401
402 state = hbisect.load_state(repo)
402 state = hbisect.load_state(repo)
403
403
404 if command:
404 if command:
405 changesets = 1
405 changesets = 1
406 try:
406 try:
407 while changesets:
407 while changesets:
408 # update state
408 # update state
409 status = util.system(command)
409 status = util.system(command)
410 if status == 125:
410 if status == 125:
411 transition = "skip"
411 transition = "skip"
412 elif status == 0:
412 elif status == 0:
413 transition = "good"
413 transition = "good"
414 # status < 0 means process was killed
414 # status < 0 means process was killed
415 elif status == 127:
415 elif status == 127:
416 raise util.Abort(_("failed to execute %s") % command)
416 raise util.Abort(_("failed to execute %s") % command)
417 elif status < 0:
417 elif status < 0:
418 raise util.Abort(_("%s killed") % command)
418 raise util.Abort(_("%s killed") % command)
419 else:
419 else:
420 transition = "bad"
420 transition = "bad"
421 ctx = cmdutil.revsingle(repo, rev)
421 ctx = cmdutil.revsingle(repo, rev)
422 rev = None # clear for future iterations
422 rev = None # clear for future iterations
423 state[transition].append(ctx.node())
423 state[transition].append(ctx.node())
424 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
424 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
425 check_state(state, interactive=False)
425 check_state(state, interactive=False)
426 # bisect
426 # bisect
427 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
427 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
428 # update to next check
428 # update to next check
429 cmdutil.bail_if_changed(repo)
429 cmdutil.bail_if_changed(repo)
430 hg.clean(repo, nodes[0], show_stats=False)
430 hg.clean(repo, nodes[0], show_stats=False)
431 finally:
431 finally:
432 hbisect.save_state(repo, state)
432 hbisect.save_state(repo, state)
433 print_result(nodes, good)
433 print_result(nodes, good)
434 return
434 return
435
435
436 # update state
436 # update state
437
437
438 if rev:
438 if rev:
439 nodes = [repo.lookup(i) for i in cmdutil.revrange(repo, [rev])]
439 nodes = [repo.lookup(i) for i in cmdutil.revrange(repo, [rev])]
440 else:
440 else:
441 nodes = [repo.lookup('.')]
441 nodes = [repo.lookup('.')]
442
442
443 if good or bad or skip:
443 if good or bad or skip:
444 if good:
444 if good:
445 state['good'] += nodes
445 state['good'] += nodes
446 elif bad:
446 elif bad:
447 state['bad'] += nodes
447 state['bad'] += nodes
448 elif skip:
448 elif skip:
449 state['skip'] += nodes
449 state['skip'] += nodes
450 hbisect.save_state(repo, state)
450 hbisect.save_state(repo, state)
451
451
452 if not check_state(state):
452 if not check_state(state):
453 return
453 return
454
454
455 # actually bisect
455 # actually bisect
456 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
456 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
457 if extend:
457 if extend:
458 if not changesets:
458 if not changesets:
459 extendnode = extendbisectrange(nodes, good)
459 extendnode = extendbisectrange(nodes, good)
460 if extendnode is not None:
460 if extendnode is not None:
461 ui.write(_("Extending search to changeset %d:%s\n"
461 ui.write(_("Extending search to changeset %d:%s\n"
462 % (extendnode.rev(), short(extendnode.node()))))
462 % (extendnode.rev(), short(extendnode.node()))))
463 if noupdate:
463 if noupdate:
464 return
464 return
465 cmdutil.bail_if_changed(repo)
465 cmdutil.bail_if_changed(repo)
466 return hg.clean(repo, extendnode.node())
466 return hg.clean(repo, extendnode.node())
467 raise util.Abort(_("nothing to extend"))
467 raise util.Abort(_("nothing to extend"))
468
468
469 if changesets == 0:
469 if changesets == 0:
470 print_result(nodes, good)
470 print_result(nodes, good)
471 else:
471 else:
472 assert len(nodes) == 1 # only a single node can be tested next
472 assert len(nodes) == 1 # only a single node can be tested next
473 node = nodes[0]
473 node = nodes[0]
474 # compute the approximate number of remaining tests
474 # compute the approximate number of remaining tests
475 tests, size = 0, 2
475 tests, size = 0, 2
476 while size <= changesets:
476 while size <= changesets:
477 tests, size = tests + 1, size * 2
477 tests, size = tests + 1, size * 2
478 rev = repo.changelog.rev(node)
478 rev = repo.changelog.rev(node)
479 ui.write(_("Testing changeset %d:%s "
479 ui.write(_("Testing changeset %d:%s "
480 "(%d changesets remaining, ~%d tests)\n")
480 "(%d changesets remaining, ~%d tests)\n")
481 % (rev, short(node), changesets, tests))
481 % (rev, short(node), changesets, tests))
482 if not noupdate:
482 if not noupdate:
483 cmdutil.bail_if_changed(repo)
483 cmdutil.bail_if_changed(repo)
484 return hg.clean(repo, node)
484 return hg.clean(repo, node)
485
485
486 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None):
486 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None):
487 '''track a line of development with movable markers
487 '''track a line of development with movable markers
488
488
489 Bookmarks are pointers to certain commits that move when
489 Bookmarks are pointers to certain commits that move when
490 committing. Bookmarks are local. They can be renamed, copied and
490 committing. Bookmarks are local. They can be renamed, copied and
491 deleted. It is possible to use bookmark names in :hg:`merge` and
491 deleted. It is possible to use bookmark names in :hg:`merge` and
492 :hg:`update` to merge and update respectively to a given bookmark.
492 :hg:`update` to merge and update respectively to a given bookmark.
493
493
494 You can use :hg:`bookmark NAME` to set a bookmark on the working
494 You can use :hg:`bookmark NAME` to set a bookmark on the working
495 directory's parent revision with the given name. If you specify
495 directory's parent revision with the given name. If you specify
496 a revision using -r REV (where REV may be an existing bookmark),
496 a revision using -r REV (where REV may be an existing bookmark),
497 the bookmark is assigned to that revision.
497 the bookmark is assigned to that revision.
498
498
499 Bookmarks can be pushed and pulled between repositories (see :hg:`help
499 Bookmarks can be pushed and pulled between repositories (see :hg:`help
500 push` and :hg:`help pull`). This requires both the local and remote
500 push` and :hg:`help pull`). This requires both the local and remote
501 repositories to support bookmarks. For versions prior to 1.8, this means
501 repositories to support bookmarks. For versions prior to 1.8, this means
502 the bookmarks extension must be enabled.
502 the bookmarks extension must be enabled.
503 '''
503 '''
504 hexfn = ui.debugflag and hex or short
504 hexfn = ui.debugflag and hex or short
505 marks = repo._bookmarks
505 marks = repo._bookmarks
506 cur = repo.changectx('.').node()
506 cur = repo.changectx('.').node()
507
507
508 if rename:
508 if rename:
509 if rename not in marks:
509 if rename not in marks:
510 raise util.Abort(_("a bookmark of this name does not exist"))
510 raise util.Abort(_("a bookmark of this name does not exist"))
511 if mark in marks and not force:
511 if mark in marks and not force:
512 raise util.Abort(_("a bookmark of the same name already exists"))
512 raise util.Abort(_("a bookmark of the same name already exists"))
513 if mark is None:
513 if mark is None:
514 raise util.Abort(_("new bookmark name required"))
514 raise util.Abort(_("new bookmark name required"))
515 marks[mark] = marks[rename]
515 marks[mark] = marks[rename]
516 if repo._bookmarkcurrent == rename:
516 if repo._bookmarkcurrent == rename:
517 bookmarks.setcurrent(repo, mark)
517 bookmarks.setcurrent(repo, mark)
518 del marks[rename]
518 del marks[rename]
519 bookmarks.write(repo)
519 bookmarks.write(repo)
520 return
520 return
521
521
522 if delete:
522 if delete:
523 if mark is None:
523 if mark is None:
524 raise util.Abort(_("bookmark name required"))
524 raise util.Abort(_("bookmark name required"))
525 if mark not in marks:
525 if mark not in marks:
526 raise util.Abort(_("a bookmark of this name does not exist"))
526 raise util.Abort(_("a bookmark of this name does not exist"))
527 if mark == repo._bookmarkcurrent:
527 if mark == repo._bookmarkcurrent:
528 bookmarks.setcurrent(repo, None)
528 bookmarks.setcurrent(repo, None)
529 del marks[mark]
529 del marks[mark]
530 bookmarks.write(repo)
530 bookmarks.write(repo)
531 return
531 return
532
532
533 if mark is not None:
533 if mark is not None:
534 if "\n" in mark:
534 if "\n" in mark:
535 raise util.Abort(_("bookmark name cannot contain newlines"))
535 raise util.Abort(_("bookmark name cannot contain newlines"))
536 mark = mark.strip()
536 mark = mark.strip()
537 if not mark:
537 if not mark:
538 raise util.Abort(_("bookmark names cannot consist entirely of "
538 raise util.Abort(_("bookmark names cannot consist entirely of "
539 "whitespace"))
539 "whitespace"))
540 if mark in marks and not force:
540 if mark in marks and not force:
541 raise util.Abort(_("a bookmark of the same name already exists"))
541 raise util.Abort(_("a bookmark of the same name already exists"))
542 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
542 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
543 and not force):
543 and not force):
544 raise util.Abort(
544 raise util.Abort(
545 _("a bookmark cannot have the name of an existing branch"))
545 _("a bookmark cannot have the name of an existing branch"))
546 if rev:
546 if rev:
547 marks[mark] = repo.lookup(rev)
547 marks[mark] = repo.lookup(rev)
548 else:
548 else:
549 marks[mark] = repo.changectx('.').node()
549 marks[mark] = repo.changectx('.').node()
550 if repo.changectx('.').node() == marks[mark]:
550 if repo.changectx('.').node() == marks[mark]:
551 bookmarks.setcurrent(repo, mark)
551 bookmarks.setcurrent(repo, mark)
552 bookmarks.write(repo)
552 bookmarks.write(repo)
553 return
553 return
554
554
555 if mark is None:
555 if mark is None:
556 if rev:
556 if rev:
557 raise util.Abort(_("bookmark name required"))
557 raise util.Abort(_("bookmark name required"))
558 if len(marks) == 0:
558 if len(marks) == 0:
559 ui.status(_("no bookmarks set\n"))
559 ui.status(_("no bookmarks set\n"))
560 else:
560 else:
561 for bmark, n in sorted(marks.iteritems()):
561 for bmark, n in sorted(marks.iteritems()):
562 current = repo._bookmarkcurrent
562 current = repo._bookmarkcurrent
563 if bmark == current and n == cur:
563 if bmark == current and n == cur:
564 prefix, label = '*', 'bookmarks.current'
564 prefix, label = '*', 'bookmarks.current'
565 else:
565 else:
566 prefix, label = ' ', ''
566 prefix, label = ' ', ''
567
567
568 if ui.quiet:
568 if ui.quiet:
569 ui.write("%s\n" % bmark, label=label)
569 ui.write("%s\n" % bmark, label=label)
570 else:
570 else:
571 ui.write(" %s %-25s %d:%s\n" % (
571 ui.write(" %s %-25s %d:%s\n" % (
572 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
572 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
573 label=label)
573 label=label)
574 return
574 return
575
575
576 def branch(ui, repo, label=None, **opts):
576 def branch(ui, repo, label=None, **opts):
577 """set or show the current branch name
577 """set or show the current branch name
578
578
579 With no argument, show the current branch name. With one argument,
579 With no argument, show the current branch name. With one argument,
580 set the working directory branch name (the branch will not exist
580 set the working directory branch name (the branch will not exist
581 in the repository until the next commit). Standard practice
581 in the repository until the next commit). Standard practice
582 recommends that primary development take place on the 'default'
582 recommends that primary development take place on the 'default'
583 branch.
583 branch.
584
584
585 Unless -f/--force is specified, branch will not let you set a
585 Unless -f/--force is specified, branch will not let you set a
586 branch name that already exists, even if it's inactive.
586 branch name that already exists, even if it's inactive.
587
587
588 Use -C/--clean to reset the working directory branch to that of
588 Use -C/--clean to reset the working directory branch to that of
589 the parent of the working directory, negating a previous branch
589 the parent of the working directory, negating a previous branch
590 change.
590 change.
591
591
592 Use the command :hg:`update` to switch to an existing branch. Use
592 Use the command :hg:`update` to switch to an existing branch. Use
593 :hg:`commit --close-branch` to mark this branch as closed.
593 :hg:`commit --close-branch` to mark this branch as closed.
594
594
595 Returns 0 on success.
595 Returns 0 on success.
596 """
596 """
597
597
598 if opts.get('clean'):
598 if opts.get('clean'):
599 label = repo[None].parents()[0].branch()
599 label = repo[None].parents()[0].branch()
600 repo.dirstate.setbranch(label)
600 repo.dirstate.setbranch(label)
601 ui.status(_('reset working directory to branch %s\n') % label)
601 ui.status(_('reset working directory to branch %s\n') % label)
602 elif label:
602 elif label:
603 if not opts.get('force') and label in repo.branchtags():
603 if not opts.get('force') and label in repo.branchtags():
604 if label not in [p.branch() for p in repo.parents()]:
604 if label not in [p.branch() for p in repo.parents()]:
605 raise util.Abort(_('a branch of the same name already exists'
605 raise util.Abort(_('a branch of the same name already exists'
606 " (use 'hg update' to switch to it)"))
606 " (use 'hg update' to switch to it)"))
607 repo.dirstate.setbranch(label)
607 repo.dirstate.setbranch(label)
608 ui.status(_('marked working directory as branch %s\n') % label)
608 ui.status(_('marked working directory as branch %s\n') % label)
609 else:
609 else:
610 ui.write("%s\n" % repo.dirstate.branch())
610 ui.write("%s\n" % repo.dirstate.branch())
611
611
612 def branches(ui, repo, active=False, closed=False):
612 def branches(ui, repo, active=False, closed=False):
613 """list repository named branches
613 """list repository named branches
614
614
615 List the repository's named branches, indicating which ones are
615 List the repository's named branches, indicating which ones are
616 inactive. If -c/--closed is specified, also list branches which have
616 inactive. If -c/--closed is specified, also list branches which have
617 been marked closed (see :hg:`commit --close-branch`).
617 been marked closed (see :hg:`commit --close-branch`).
618
618
619 If -a/--active is specified, only show active branches. A branch
619 If -a/--active is specified, only show active branches. A branch
620 is considered active if it contains repository heads.
620 is considered active if it contains repository heads.
621
621
622 Use the command :hg:`update` to switch to an existing branch.
622 Use the command :hg:`update` to switch to an existing branch.
623
623
624 Returns 0.
624 Returns 0.
625 """
625 """
626
626
627 hexfunc = ui.debugflag and hex or short
627 hexfunc = ui.debugflag and hex or short
628 activebranches = [repo[n].branch() for n in repo.heads()]
628 activebranches = [repo[n].branch() for n in repo.heads()]
629 def testactive(tag, node):
629 def testactive(tag, node):
630 realhead = tag in activebranches
630 realhead = tag in activebranches
631 open = node in repo.branchheads(tag, closed=False)
631 open = node in repo.branchheads(tag, closed=False)
632 return realhead and open
632 return realhead and open
633 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
633 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
634 for tag, node in repo.branchtags().items()],
634 for tag, node in repo.branchtags().items()],
635 reverse=True)
635 reverse=True)
636
636
637 for isactive, node, tag in branches:
637 for isactive, node, tag in branches:
638 if (not active) or isactive:
638 if (not active) or isactive:
639 if ui.quiet:
639 if ui.quiet:
640 ui.write("%s\n" % tag)
640 ui.write("%s\n" % tag)
641 else:
641 else:
642 hn = repo.lookup(node)
642 hn = repo.lookup(node)
643 if isactive:
643 if isactive:
644 label = 'branches.active'
644 label = 'branches.active'
645 notice = ''
645 notice = ''
646 elif hn not in repo.branchheads(tag, closed=False):
646 elif hn not in repo.branchheads(tag, closed=False):
647 if not closed:
647 if not closed:
648 continue
648 continue
649 label = 'branches.closed'
649 label = 'branches.closed'
650 notice = _(' (closed)')
650 notice = _(' (closed)')
651 else:
651 else:
652 label = 'branches.inactive'
652 label = 'branches.inactive'
653 notice = _(' (inactive)')
653 notice = _(' (inactive)')
654 if tag == repo.dirstate.branch():
654 if tag == repo.dirstate.branch():
655 label = 'branches.current'
655 label = 'branches.current'
656 rev = str(node).rjust(31 - encoding.colwidth(tag))
656 rev = str(node).rjust(31 - encoding.colwidth(tag))
657 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
657 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
658 tag = ui.label(tag, label)
658 tag = ui.label(tag, label)
659 ui.write("%s %s%s\n" % (tag, rev, notice))
659 ui.write("%s %s%s\n" % (tag, rev, notice))
660
660
661 def bundle(ui, repo, fname, dest=None, **opts):
661 def bundle(ui, repo, fname, dest=None, **opts):
662 """create a changegroup file
662 """create a changegroup file
663
663
664 Generate a compressed changegroup file collecting changesets not
664 Generate a compressed changegroup file collecting changesets not
665 known to be in another repository.
665 known to be in another repository.
666
666
667 If you omit the destination repository, then hg assumes the
667 If you omit the destination repository, then hg assumes the
668 destination will have all the nodes you specify with --base
668 destination will have all the nodes you specify with --base
669 parameters. To create a bundle containing all changesets, use
669 parameters. To create a bundle containing all changesets, use
670 -a/--all (or --base null).
670 -a/--all (or --base null).
671
671
672 You can change compression method with the -t/--type option.
672 You can change compression method with the -t/--type option.
673 The available compression methods are: none, bzip2, and
673 The available compression methods are: none, bzip2, and
674 gzip (by default, bundles are compressed using bzip2).
674 gzip (by default, bundles are compressed using bzip2).
675
675
676 The bundle file can then be transferred using conventional means
676 The bundle file can then be transferred using conventional means
677 and applied to another repository with the unbundle or pull
677 and applied to another repository with the unbundle or pull
678 command. This is useful when direct push and pull are not
678 command. This is useful when direct push and pull are not
679 available or when exporting an entire repository is undesirable.
679 available or when exporting an entire repository is undesirable.
680
680
681 Applying bundles preserves all changeset contents including
681 Applying bundles preserves all changeset contents including
682 permissions, copy/rename information, and revision history.
682 permissions, copy/rename information, and revision history.
683
683
684 Returns 0 on success, 1 if no changes found.
684 Returns 0 on success, 1 if no changes found.
685 """
685 """
686 revs = None
686 revs = None
687 if 'rev' in opts:
687 if 'rev' in opts:
688 revs = cmdutil.revrange(repo, opts['rev'])
688 revs = cmdutil.revrange(repo, opts['rev'])
689
689
690 if opts.get('all'):
690 if opts.get('all'):
691 base = ['null']
691 base = ['null']
692 else:
692 else:
693 base = cmdutil.revrange(repo, opts.get('base'))
693 base = cmdutil.revrange(repo, opts.get('base'))
694 if base:
694 if base:
695 if dest:
695 if dest:
696 raise util.Abort(_("--base is incompatible with specifying "
696 raise util.Abort(_("--base is incompatible with specifying "
697 "a destination"))
697 "a destination"))
698 base = [repo.lookup(rev) for rev in base]
698 base = [repo.lookup(rev) for rev in base]
699 # create the right base
699 # create the right base
700 # XXX: nodesbetween / changegroup* should be "fixed" instead
700 # XXX: nodesbetween / changegroup* should be "fixed" instead
701 o = []
701 o = []
702 has = set((nullid,))
702 has = set((nullid,))
703 for n in base:
703 for n in base:
704 has.update(repo.changelog.reachable(n))
704 has.update(repo.changelog.reachable(n))
705 if revs:
705 if revs:
706 revs = [repo.lookup(rev) for rev in revs]
706 revs = [repo.lookup(rev) for rev in revs]
707 visit = revs[:]
707 visit = revs[:]
708 has.difference_update(visit)
708 has.difference_update(visit)
709 else:
709 else:
710 visit = repo.changelog.heads()
710 visit = repo.changelog.heads()
711 seen = {}
711 seen = {}
712 while visit:
712 while visit:
713 n = visit.pop(0)
713 n = visit.pop(0)
714 parents = [p for p in repo.changelog.parents(n) if p not in has]
714 parents = [p for p in repo.changelog.parents(n) if p not in has]
715 if len(parents) == 0:
715 if len(parents) == 0:
716 if n not in has:
716 if n not in has:
717 o.append(n)
717 o.append(n)
718 else:
718 else:
719 for p in parents:
719 for p in parents:
720 if p not in seen:
720 if p not in seen:
721 seen[p] = 1
721 seen[p] = 1
722 visit.append(p)
722 visit.append(p)
723 else:
723 else:
724 dest = ui.expandpath(dest or 'default-push', dest or 'default')
724 dest = ui.expandpath(dest or 'default-push', dest or 'default')
725 dest, branches = hg.parseurl(dest, opts.get('branch'))
725 dest, branches = hg.parseurl(dest, opts.get('branch'))
726 other = hg.repository(hg.remoteui(repo, opts), dest)
726 other = hg.repository(hg.remoteui(repo, opts), dest)
727 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
727 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
728 if revs:
728 if revs:
729 revs = [repo.lookup(rev) for rev in revs]
729 revs = [repo.lookup(rev) for rev in revs]
730 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
730 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
731
731
732 if not o:
732 if not o:
733 ui.status(_("no changes found\n"))
733 ui.status(_("no changes found\n"))
734 return 1
734 return 1
735
735
736 if revs:
736 if revs:
737 cg = repo.changegroupsubset(o, revs, 'bundle')
737 cg = repo.changegroupsubset(o, revs, 'bundle')
738 else:
738 else:
739 cg = repo.changegroup(o, 'bundle')
739 cg = repo.changegroup(o, 'bundle')
740
740
741 bundletype = opts.get('type', 'bzip2').lower()
741 bundletype = opts.get('type', 'bzip2').lower()
742 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
742 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
743 bundletype = btypes.get(bundletype)
743 bundletype = btypes.get(bundletype)
744 if bundletype not in changegroup.bundletypes:
744 if bundletype not in changegroup.bundletypes:
745 raise util.Abort(_('unknown bundle type specified with --type'))
745 raise util.Abort(_('unknown bundle type specified with --type'))
746
746
747 changegroup.writebundle(cg, fname, bundletype)
747 changegroup.writebundle(cg, fname, bundletype)
748
748
749 def cat(ui, repo, file1, *pats, **opts):
749 def cat(ui, repo, file1, *pats, **opts):
750 """output the current or given revision of files
750 """output the current or given revision of files
751
751
752 Print the specified files as they were at the given revision. If
752 Print the specified files as they were at the given revision. If
753 no revision is given, the parent of the working directory is used,
753 no revision is given, the parent of the working directory is used,
754 or tip if no revision is checked out.
754 or tip if no revision is checked out.
755
755
756 Output may be to a file, in which case the name of the file is
756 Output may be to a file, in which case the name of the file is
757 given using a format string. The formatting rules are the same as
757 given using a format string. The formatting rules are the same as
758 for the export command, with the following additions:
758 for the export command, with the following additions:
759
759
760 :``%s``: basename of file being printed
760 :``%s``: basename of file being printed
761 :``%d``: dirname of file being printed, or '.' if in repository root
761 :``%d``: dirname of file being printed, or '.' if in repository root
762 :``%p``: root-relative path name of file being printed
762 :``%p``: root-relative path name of file being printed
763
763
764 Returns 0 on success.
764 Returns 0 on success.
765 """
765 """
766 ctx = cmdutil.revsingle(repo, opts.get('rev'))
766 ctx = cmdutil.revsingle(repo, opts.get('rev'))
767 err = 1
767 err = 1
768 m = cmdutil.match(repo, (file1,) + pats, opts)
768 m = cmdutil.match(repo, (file1,) + pats, opts)
769 for abs in ctx.walk(m):
769 for abs in ctx.walk(m):
770 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
770 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
771 data = ctx[abs].data()
771 data = ctx[abs].data()
772 if opts.get('decode'):
772 if opts.get('decode'):
773 data = repo.wwritedata(abs, data)
773 data = repo.wwritedata(abs, data)
774 fp.write(data)
774 fp.write(data)
775 fp.close()
775 fp.close()
776 err = 0
776 err = 0
777 return err
777 return err
778
778
779 def clone(ui, source, dest=None, **opts):
779 def clone(ui, source, dest=None, **opts):
780 """make a copy of an existing repository
780 """make a copy of an existing repository
781
781
782 Create a copy of an existing repository in a new directory.
782 Create a copy of an existing repository in a new directory.
783
783
784 If no destination directory name is specified, it defaults to the
784 If no destination directory name is specified, it defaults to the
785 basename of the source.
785 basename of the source.
786
786
787 The location of the source is added to the new repository's
787 The location of the source is added to the new repository's
788 ``.hg/hgrc`` file, as the default to be used for future pulls.
788 ``.hg/hgrc`` file, as the default to be used for future pulls.
789
789
790 See :hg:`help urls` for valid source format details.
790 See :hg:`help urls` for valid source format details.
791
791
792 It is possible to specify an ``ssh://`` URL as the destination, but no
792 It is possible to specify an ``ssh://`` URL as the destination, but no
793 ``.hg/hgrc`` and working directory will be created on the remote side.
793 ``.hg/hgrc`` and working directory will be created on the remote side.
794 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
794 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
795
795
796 A set of changesets (tags, or branch names) to pull may be specified
796 A set of changesets (tags, or branch names) to pull may be specified
797 by listing each changeset (tag, or branch name) with -r/--rev.
797 by listing each changeset (tag, or branch name) with -r/--rev.
798 If -r/--rev is used, the cloned repository will contain only a subset
798 If -r/--rev is used, the cloned repository will contain only a subset
799 of the changesets of the source repository. Only the set of changesets
799 of the changesets of the source repository. Only the set of changesets
800 defined by all -r/--rev options (including all their ancestors)
800 defined by all -r/--rev options (including all their ancestors)
801 will be pulled into the destination repository.
801 will be pulled into the destination repository.
802 No subsequent changesets (including subsequent tags) will be present
802 No subsequent changesets (including subsequent tags) will be present
803 in the destination.
803 in the destination.
804
804
805 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
805 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
806 local source repositories.
806 local source repositories.
807
807
808 For efficiency, hardlinks are used for cloning whenever the source
808 For efficiency, hardlinks are used for cloning whenever the source
809 and destination are on the same filesystem (note this applies only
809 and destination are on the same filesystem (note this applies only
810 to the repository data, not to the working directory). Some
810 to the repository data, not to the working directory). Some
811 filesystems, such as AFS, implement hardlinking incorrectly, but
811 filesystems, such as AFS, implement hardlinking incorrectly, but
812 do not report errors. In these cases, use the --pull option to
812 do not report errors. In these cases, use the --pull option to
813 avoid hardlinking.
813 avoid hardlinking.
814
814
815 In some cases, you can clone repositories and the working directory
815 In some cases, you can clone repositories and the working directory
816 using full hardlinks with ::
816 using full hardlinks with ::
817
817
818 $ cp -al REPO REPOCLONE
818 $ cp -al REPO REPOCLONE
819
819
820 This is the fastest way to clone, but it is not always safe. The
820 This is the fastest way to clone, but it is not always safe. The
821 operation is not atomic (making sure REPO is not modified during
821 operation is not atomic (making sure REPO is not modified during
822 the operation is up to you) and you have to make sure your editor
822 the operation is up to you) and you have to make sure your editor
823 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
823 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
824 this is not compatible with certain extensions that place their
824 this is not compatible with certain extensions that place their
825 metadata under the .hg directory, such as mq.
825 metadata under the .hg directory, such as mq.
826
826
827 Mercurial will update the working directory to the first applicable
827 Mercurial will update the working directory to the first applicable
828 revision from this list:
828 revision from this list:
829
829
830 a) null if -U or the source repository has no changesets
830 a) null if -U or the source repository has no changesets
831 b) if -u . and the source repository is local, the first parent of
831 b) if -u . and the source repository is local, the first parent of
832 the source repository's working directory
832 the source repository's working directory
833 c) the changeset specified with -u (if a branch name, this means the
833 c) the changeset specified with -u (if a branch name, this means the
834 latest head of that branch)
834 latest head of that branch)
835 d) the changeset specified with -r
835 d) the changeset specified with -r
836 e) the tipmost head specified with -b
836 e) the tipmost head specified with -b
837 f) the tipmost head specified with the url#branch source syntax
837 f) the tipmost head specified with the url#branch source syntax
838 g) the tipmost head of the default branch
838 g) the tipmost head of the default branch
839 h) tip
839 h) tip
840
840
841 Returns 0 on success.
841 Returns 0 on success.
842 """
842 """
843 if opts.get('noupdate') and opts.get('updaterev'):
843 if opts.get('noupdate') and opts.get('updaterev'):
844 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
844 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
845
845
846 r = hg.clone(hg.remoteui(ui, opts), source, dest,
846 r = hg.clone(hg.remoteui(ui, opts), source, dest,
847 pull=opts.get('pull'),
847 pull=opts.get('pull'),
848 stream=opts.get('uncompressed'),
848 stream=opts.get('uncompressed'),
849 rev=opts.get('rev'),
849 rev=opts.get('rev'),
850 update=opts.get('updaterev') or not opts.get('noupdate'),
850 update=opts.get('updaterev') or not opts.get('noupdate'),
851 branch=opts.get('branch'))
851 branch=opts.get('branch'))
852
852
853 return r is None
853 return r is None
854
854
855 def commit(ui, repo, *pats, **opts):
855 def commit(ui, repo, *pats, **opts):
856 """commit the specified files or all outstanding changes
856 """commit the specified files or all outstanding changes
857
857
858 Commit changes to the given files into the repository. Unlike a
858 Commit changes to the given files into the repository. Unlike a
859 centralized SCM, this operation is a local operation. See
859 centralized SCM, this operation is a local operation. See
860 :hg:`push` for a way to actively distribute your changes.
860 :hg:`push` for a way to actively distribute your changes.
861
861
862 If a list of files is omitted, all changes reported by :hg:`status`
862 If a list of files is omitted, all changes reported by :hg:`status`
863 will be committed.
863 will be committed.
864
864
865 If you are committing the result of a merge, do not provide any
865 If you are committing the result of a merge, do not provide any
866 filenames or -I/-X filters.
866 filenames or -I/-X filters.
867
867
868 If no commit message is specified, Mercurial starts your
868 If no commit message is specified, Mercurial starts your
869 configured editor where you can enter a message. In case your
869 configured editor where you can enter a message. In case your
870 commit fails, you will find a backup of your message in
870 commit fails, you will find a backup of your message in
871 ``.hg/last-message.txt``.
871 ``.hg/last-message.txt``.
872
872
873 See :hg:`help dates` for a list of formats valid for -d/--date.
873 See :hg:`help dates` for a list of formats valid for -d/--date.
874
874
875 Returns 0 on success, 1 if nothing changed.
875 Returns 0 on success, 1 if nothing changed.
876 """
876 """
877 extra = {}
877 extra = {}
878 if opts.get('close_branch'):
878 if opts.get('close_branch'):
879 if repo['.'].node() not in repo.branchheads():
879 if repo['.'].node() not in repo.branchheads():
880 # The topo heads set is included in the branch heads set of the
880 # The topo heads set is included in the branch heads set of the
881 # current branch, so it's sufficient to test branchheads
881 # current branch, so it's sufficient to test branchheads
882 raise util.Abort(_('can only close branch heads'))
882 raise util.Abort(_('can only close branch heads'))
883 extra['close'] = 1
883 extra['close'] = 1
884 e = cmdutil.commiteditor
884 e = cmdutil.commiteditor
885 if opts.get('force_editor'):
885 if opts.get('force_editor'):
886 e = cmdutil.commitforceeditor
886 e = cmdutil.commitforceeditor
887
887
888 def commitfunc(ui, repo, message, match, opts):
888 def commitfunc(ui, repo, message, match, opts):
889 return repo.commit(message, opts.get('user'), opts.get('date'), match,
889 return repo.commit(message, opts.get('user'), opts.get('date'), match,
890 editor=e, extra=extra)
890 editor=e, extra=extra)
891
891
892 branch = repo[None].branch()
892 branch = repo[None].branch()
893 bheads = repo.branchheads(branch)
893 bheads = repo.branchheads(branch)
894
894
895 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
895 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
896 if not node:
896 if not node:
897 ui.status(_("nothing changed\n"))
897 ui.status(_("nothing changed\n"))
898 return 1
898 return 1
899
899
900 ctx = repo[node]
900 ctx = repo[node]
901 parents = ctx.parents()
901 parents = ctx.parents()
902
902
903 if bheads and not [x for x in parents
903 if bheads and not [x for x in parents
904 if x.node() in bheads and x.branch() == branch]:
904 if x.node() in bheads and x.branch() == branch]:
905 ui.status(_('created new head\n'))
905 ui.status(_('created new head\n'))
906 # The message is not printed for initial roots. For the other
906 # The message is not printed for initial roots. For the other
907 # changesets, it is printed in the following situations:
907 # changesets, it is printed in the following situations:
908 #
908 #
909 # Par column: for the 2 parents with ...
909 # Par column: for the 2 parents with ...
910 # N: null or no parent
910 # N: null or no parent
911 # B: parent is on another named branch
911 # B: parent is on another named branch
912 # C: parent is a regular non head changeset
912 # C: parent is a regular non head changeset
913 # H: parent was a branch head of the current branch
913 # H: parent was a branch head of the current branch
914 # Msg column: whether we print "created new head" message
914 # Msg column: whether we print "created new head" message
915 # In the following, it is assumed that there already exists some
915 # In the following, it is assumed that there already exists some
916 # initial branch heads of the current branch, otherwise nothing is
916 # initial branch heads of the current branch, otherwise nothing is
917 # printed anyway.
917 # printed anyway.
918 #
918 #
919 # Par Msg Comment
919 # Par Msg Comment
920 # NN y additional topo root
920 # NN y additional topo root
921 #
921 #
922 # BN y additional branch root
922 # BN y additional branch root
923 # CN y additional topo head
923 # CN y additional topo head
924 # HN n usual case
924 # HN n usual case
925 #
925 #
926 # BB y weird additional branch root
926 # BB y weird additional branch root
927 # CB y branch merge
927 # CB y branch merge
928 # HB n merge with named branch
928 # HB n merge with named branch
929 #
929 #
930 # CC y additional head from merge
930 # CC y additional head from merge
931 # CH n merge with a head
931 # CH n merge with a head
932 #
932 #
933 # HH n head merge: head count decreases
933 # HH n head merge: head count decreases
934
934
935 if not opts.get('close_branch'):
935 if not opts.get('close_branch'):
936 for r in parents:
936 for r in parents:
937 if r.extra().get('close') and r.branch() == branch:
937 if r.extra().get('close') and r.branch() == branch:
938 ui.status(_('reopening closed branch head %d\n') % r)
938 ui.status(_('reopening closed branch head %d\n') % r)
939
939
940 if ui.debugflag:
940 if ui.debugflag:
941 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
941 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
942 elif ui.verbose:
942 elif ui.verbose:
943 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
943 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
944
944
945 def copy(ui, repo, *pats, **opts):
945 def copy(ui, repo, *pats, **opts):
946 """mark files as copied for the next commit
946 """mark files as copied for the next commit
947
947
948 Mark dest as having copies of source files. If dest is a
948 Mark dest as having copies of source files. If dest is a
949 directory, copies are put in that directory. If dest is a file,
949 directory, copies are put in that directory. If dest is a file,
950 the source must be a single file.
950 the source must be a single file.
951
951
952 By default, this command copies the contents of files as they
952 By default, this command copies the contents of files as they
953 exist in the working directory. If invoked with -A/--after, the
953 exist in the working directory. If invoked with -A/--after, the
954 operation is recorded, but no copying is performed.
954 operation is recorded, but no copying is performed.
955
955
956 This command takes effect with the next commit. To undo a copy
956 This command takes effect with the next commit. To undo a copy
957 before that, see :hg:`revert`.
957 before that, see :hg:`revert`.
958
958
959 Returns 0 on success, 1 if errors are encountered.
959 Returns 0 on success, 1 if errors are encountered.
960 """
960 """
961 wlock = repo.wlock(False)
961 wlock = repo.wlock(False)
962 try:
962 try:
963 return cmdutil.copy(ui, repo, pats, opts)
963 return cmdutil.copy(ui, repo, pats, opts)
964 finally:
964 finally:
965 wlock.release()
965 wlock.release()
966
966
967 def debugancestor(ui, repo, *args):
967 def debugancestor(ui, repo, *args):
968 """find the ancestor revision of two revisions in a given index"""
968 """find the ancestor revision of two revisions in a given index"""
969 if len(args) == 3:
969 if len(args) == 3:
970 index, rev1, rev2 = args
970 index, rev1, rev2 = args
971 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
971 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
972 lookup = r.lookup
972 lookup = r.lookup
973 elif len(args) == 2:
973 elif len(args) == 2:
974 if not repo:
974 if not repo:
975 raise util.Abort(_("there is no Mercurial repository here "
975 raise util.Abort(_("there is no Mercurial repository here "
976 "(.hg not found)"))
976 "(.hg not found)"))
977 rev1, rev2 = args
977 rev1, rev2 = args
978 r = repo.changelog
978 r = repo.changelog
979 lookup = repo.lookup
979 lookup = repo.lookup
980 else:
980 else:
981 raise util.Abort(_('either two or three arguments required'))
981 raise util.Abort(_('either two or three arguments required'))
982 a = r.ancestor(lookup(rev1), lookup(rev2))
982 a = r.ancestor(lookup(rev1), lookup(rev2))
983 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
983 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
984
984
985 def debugbuilddag(ui, repo, text,
985 def debugbuilddag(ui, repo, text,
986 mergeable_file=False,
986 mergeable_file=False,
987 appended_file=False,
987 appended_file=False,
988 overwritten_file=False,
988 overwritten_file=False,
989 new_file=False):
989 new_file=False):
990 """builds a repo with a given dag from scratch in the current empty repo
990 """builds a repo with a given dag from scratch in the current empty repo
991
991
992 Elements:
992 Elements:
993
993
994 - "+n" is a linear run of n nodes based on the current default parent
994 - "+n" is a linear run of n nodes based on the current default parent
995 - "." is a single node based on the current default parent
995 - "." is a single node based on the current default parent
996 - "$" resets the default parent to null (implied at the start);
996 - "$" resets the default parent to null (implied at the start);
997 otherwise the default parent is always the last node created
997 otherwise the default parent is always the last node created
998 - "<p" sets the default parent to the backref p
998 - "<p" sets the default parent to the backref p
999 - "*p" is a fork at parent p, which is a backref
999 - "*p" is a fork at parent p, which is a backref
1000 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1000 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1001 - "/p2" is a merge of the preceding node and p2
1001 - "/p2" is a merge of the preceding node and p2
1002 - ":tag" defines a local tag for the preceding node
1002 - ":tag" defines a local tag for the preceding node
1003 - "@branch" sets the named branch for subsequent nodes
1003 - "@branch" sets the named branch for subsequent nodes
1004 - "!command" runs the command using your shell
1004 - "!command" runs the command using your shell
1005 - "!!my command\\n" is like "!", but to the end of the line
1005 - "!!my command\\n" is like "!", but to the end of the line
1006 - "#...\\n" is a comment up to the end of the line
1006 - "#...\\n" is a comment up to the end of the line
1007
1007
1008 Whitespace between the above elements is ignored.
1008 Whitespace between the above elements is ignored.
1009
1009
1010 A backref is either
1010 A backref is either
1011
1011
1012 - a number n, which references the node curr-n, where curr is the current
1012 - a number n, which references the node curr-n, where curr is the current
1013 node, or
1013 node, or
1014 - the name of a local tag you placed earlier using ":tag", or
1014 - the name of a local tag you placed earlier using ":tag", or
1015 - empty to denote the default parent.
1015 - empty to denote the default parent.
1016
1016
1017 All string valued-elements are either strictly alphanumeric, or must
1017 All string valued-elements are either strictly alphanumeric, or must
1018 be enclosed in double quotes ("..."), with "\\" as escape character.
1018 be enclosed in double quotes ("..."), with "\\" as escape character.
1019
1019
1020 Note that the --overwritten-file and --appended-file options imply the
1020 Note that the --overwritten-file and --appended-file options imply the
1021 use of "HGMERGE=internal:local" during DAG buildup.
1021 use of "HGMERGE=internal:local" during DAG buildup.
1022 """
1022 """
1023
1023
1024 if not (mergeable_file or appended_file or overwritten_file or new_file):
1024 if not (mergeable_file or appended_file or overwritten_file or new_file):
1025 raise util.Abort(_('need at least one of -m, -a, -o, -n'))
1025 raise util.Abort(_('need at least one of -m, -a, -o, -n'))
1026
1026
1027 if len(repo.changelog) > 0:
1027 if len(repo.changelog) > 0:
1028 raise util.Abort(_('repository is not empty'))
1028 raise util.Abort(_('repository is not empty'))
1029
1029
1030 if overwritten_file or appended_file:
1030 if overwritten_file or appended_file:
1031 # we don't want to fail in merges during buildup
1031 # we don't want to fail in merges during buildup
1032 os.environ['HGMERGE'] = 'internal:local'
1032 os.environ['HGMERGE'] = 'internal:local'
1033
1033
1034 def writefile(fname, text, fmode="wb"):
1034 def writefile(fname, text, fmode="wb"):
1035 f = open(fname, fmode)
1035 f = open(fname, fmode)
1036 try:
1036 try:
1037 f.write(text)
1037 f.write(text)
1038 finally:
1038 finally:
1039 f.close()
1039 f.close()
1040
1040
1041 if mergeable_file:
1041 if mergeable_file:
1042 linesperrev = 2
1042 linesperrev = 2
1043 # determine number of revs in DAG
1043 # determine number of revs in DAG
1044 n = 0
1044 n = 0
1045 for type, data in dagparser.parsedag(text):
1045 for type, data in dagparser.parsedag(text):
1046 if type == 'n':
1046 if type == 'n':
1047 n += 1
1047 n += 1
1048 # make a file with k lines per rev
1048 # make a file with k lines per rev
1049 writefile("mf", "\n".join(str(i) for i in xrange(0, n * linesperrev))
1049 writefile("mf", "\n".join(str(i) for i in xrange(0, n * linesperrev))
1050 + "\n")
1050 + "\n")
1051
1051
1052 at = -1
1052 at = -1
1053 atbranch = 'default'
1053 atbranch = 'default'
1054 for type, data in dagparser.parsedag(text):
1054 for type, data in dagparser.parsedag(text):
1055 if type == 'n':
1055 if type == 'n':
1056 ui.status('node %s\n' % str(data))
1056 ui.status('node %s\n' % str(data))
1057 id, ps = data
1057 id, ps = data
1058 p1 = ps[0]
1058 p1 = ps[0]
1059 if p1 != at:
1059 if p1 != at:
1060 update(ui, repo, node=str(p1), clean=True)
1060 update(ui, repo, node=str(p1), clean=True)
1061 at = p1
1061 at = p1
1062 if repo.dirstate.branch() != atbranch:
1062 if repo.dirstate.branch() != atbranch:
1063 branch(ui, repo, atbranch, force=True)
1063 branch(ui, repo, atbranch, force=True)
1064 if len(ps) > 1:
1064 if len(ps) > 1:
1065 p2 = ps[1]
1065 p2 = ps[1]
1066 merge(ui, repo, node=p2)
1066 merge(ui, repo, node=p2)
1067
1067
1068 if mergeable_file:
1068 if mergeable_file:
1069 f = open("mf", "rb+")
1069 f = open("mf", "rb+")
1070 try:
1070 try:
1071 lines = f.read().split("\n")
1071 lines = f.read().split("\n")
1072 lines[id * linesperrev] += " r%i" % id
1072 lines[id * linesperrev] += " r%i" % id
1073 f.seek(0)
1073 f.seek(0)
1074 f.write("\n".join(lines))
1074 f.write("\n".join(lines))
1075 finally:
1075 finally:
1076 f.close()
1076 f.close()
1077
1077
1078 if appended_file:
1078 if appended_file:
1079 writefile("af", "r%i\n" % id, "ab")
1079 writefile("af", "r%i\n" % id, "ab")
1080
1080
1081 if overwritten_file:
1081 if overwritten_file:
1082 writefile("of", "r%i\n" % id)
1082 writefile("of", "r%i\n" % id)
1083
1083
1084 if new_file:
1084 if new_file:
1085 writefile("nf%i" % id, "r%i\n" % id)
1085 writefile("nf%i" % id, "r%i\n" % id)
1086
1086
1087 commit(ui, repo, addremove=True, message="r%i" % id, date=(id, 0))
1087 commit(ui, repo, addremove=True, message="r%i" % id, date=(id, 0))
1088 at = id
1088 at = id
1089 elif type == 'l':
1089 elif type == 'l':
1090 id, name = data
1090 id, name = data
1091 ui.status('tag %s\n' % name)
1091 ui.status('tag %s\n' % name)
1092 tag(ui, repo, name, local=True)
1092 tag(ui, repo, name, local=True)
1093 elif type == 'a':
1093 elif type == 'a':
1094 ui.status('branch %s\n' % data)
1094 ui.status('branch %s\n' % data)
1095 atbranch = data
1095 atbranch = data
1096 elif type in 'cC':
1096 elif type in 'cC':
1097 r = util.system(data, cwd=repo.root)
1097 r = util.system(data, cwd=repo.root)
1098 if r:
1098 if r:
1099 desc, r = util.explain_exit(r)
1099 desc, r = util.explain_exit(r)
1100 raise util.Abort(_('%s command %s') % (data, desc))
1100 raise util.Abort(_('%s command %s') % (data, desc))
1101
1101
1102 def debugcommands(ui, cmd='', *args):
1102 def debugcommands(ui, cmd='', *args):
1103 """list all available commands and options"""
1103 """list all available commands and options"""
1104 for cmd, vals in sorted(table.iteritems()):
1104 for cmd, vals in sorted(table.iteritems()):
1105 cmd = cmd.split('|')[0].strip('^')
1105 cmd = cmd.split('|')[0].strip('^')
1106 opts = ', '.join([i[1] for i in vals[1]])
1106 opts = ', '.join([i[1] for i in vals[1]])
1107 ui.write('%s: %s\n' % (cmd, opts))
1107 ui.write('%s: %s\n' % (cmd, opts))
1108
1108
1109 def debugcomplete(ui, cmd='', **opts):
1109 def debugcomplete(ui, cmd='', **opts):
1110 """returns the completion list associated with the given command"""
1110 """returns the completion list associated with the given command"""
1111
1111
1112 if opts.get('options'):
1112 if opts.get('options'):
1113 options = []
1113 options = []
1114 otables = [globalopts]
1114 otables = [globalopts]
1115 if cmd:
1115 if cmd:
1116 aliases, entry = cmdutil.findcmd(cmd, table, False)
1116 aliases, entry = cmdutil.findcmd(cmd, table, False)
1117 otables.append(entry[1])
1117 otables.append(entry[1])
1118 for t in otables:
1118 for t in otables:
1119 for o in t:
1119 for o in t:
1120 if "(DEPRECATED)" in o[3]:
1120 if "(DEPRECATED)" in o[3]:
1121 continue
1121 continue
1122 if o[0]:
1122 if o[0]:
1123 options.append('-%s' % o[0])
1123 options.append('-%s' % o[0])
1124 options.append('--%s' % o[1])
1124 options.append('--%s' % o[1])
1125 ui.write("%s\n" % "\n".join(options))
1125 ui.write("%s\n" % "\n".join(options))
1126 return
1126 return
1127
1127
1128 cmdlist = cmdutil.findpossible(cmd, table)
1128 cmdlist = cmdutil.findpossible(cmd, table)
1129 if ui.verbose:
1129 if ui.verbose:
1130 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1130 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1131 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1131 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1132
1132
1133 def debugfsinfo(ui, path = "."):
1133 def debugfsinfo(ui, path = "."):
1134 """show information detected about current filesystem"""
1134 """show information detected about current filesystem"""
1135 open('.debugfsinfo', 'w').write('')
1135 open('.debugfsinfo', 'w').write('')
1136 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1136 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1137 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1137 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1138 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1138 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1139 and 'yes' or 'no'))
1139 and 'yes' or 'no'))
1140 os.unlink('.debugfsinfo')
1140 os.unlink('.debugfsinfo')
1141
1141
1142 def debugrebuildstate(ui, repo, rev="tip"):
1142 def debugrebuildstate(ui, repo, rev="tip"):
1143 """rebuild the dirstate as it would look like for the given revision"""
1143 """rebuild the dirstate as it would look like for the given revision"""
1144 ctx = cmdutil.revsingle(repo, rev)
1144 ctx = cmdutil.revsingle(repo, rev)
1145 wlock = repo.wlock()
1145 wlock = repo.wlock()
1146 try:
1146 try:
1147 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1147 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1148 finally:
1148 finally:
1149 wlock.release()
1149 wlock.release()
1150
1150
1151 def debugcheckstate(ui, repo):
1151 def debugcheckstate(ui, repo):
1152 """validate the correctness of the current dirstate"""
1152 """validate the correctness of the current dirstate"""
1153 parent1, parent2 = repo.dirstate.parents()
1153 parent1, parent2 = repo.dirstate.parents()
1154 m1 = repo[parent1].manifest()
1154 m1 = repo[parent1].manifest()
1155 m2 = repo[parent2].manifest()
1155 m2 = repo[parent2].manifest()
1156 errors = 0
1156 errors = 0
1157 for f in repo.dirstate:
1157 for f in repo.dirstate:
1158 state = repo.dirstate[f]
1158 state = repo.dirstate[f]
1159 if state in "nr" and f not in m1:
1159 if state in "nr" and f not in m1:
1160 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1160 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1161 errors += 1
1161 errors += 1
1162 if state in "a" and f in m1:
1162 if state in "a" and f in m1:
1163 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1163 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1164 errors += 1
1164 errors += 1
1165 if state in "m" and f not in m1 and f not in m2:
1165 if state in "m" and f not in m1 and f not in m2:
1166 ui.warn(_("%s in state %s, but not in either manifest\n") %
1166 ui.warn(_("%s in state %s, but not in either manifest\n") %
1167 (f, state))
1167 (f, state))
1168 errors += 1
1168 errors += 1
1169 for f in m1:
1169 for f in m1:
1170 state = repo.dirstate[f]
1170 state = repo.dirstate[f]
1171 if state not in "nrm":
1171 if state not in "nrm":
1172 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1172 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1173 errors += 1
1173 errors += 1
1174 if errors:
1174 if errors:
1175 error = _(".hg/dirstate inconsistent with current parent's manifest")
1175 error = _(".hg/dirstate inconsistent with current parent's manifest")
1176 raise util.Abort(error)
1176 raise util.Abort(error)
1177
1177
1178 def showconfig(ui, repo, *values, **opts):
1178 def showconfig(ui, repo, *values, **opts):
1179 """show combined config settings from all hgrc files
1179 """show combined config settings from all hgrc files
1180
1180
1181 With no arguments, print names and values of all config items.
1181 With no arguments, print names and values of all config items.
1182
1182
1183 With one argument of the form section.name, print just the value
1183 With one argument of the form section.name, print just the value
1184 of that config item.
1184 of that config item.
1185
1185
1186 With multiple arguments, print names and values of all config
1186 With multiple arguments, print names and values of all config
1187 items with matching section names.
1187 items with matching section names.
1188
1188
1189 With --debug, the source (filename and line number) is printed
1189 With --debug, the source (filename and line number) is printed
1190 for each config item.
1190 for each config item.
1191
1191
1192 Returns 0 on success.
1192 Returns 0 on success.
1193 """
1193 """
1194
1194
1195 for f in util.rcpath():
1195 for f in util.rcpath():
1196 ui.debug(_('read config from: %s\n') % f)
1196 ui.debug(_('read config from: %s\n') % f)
1197 untrusted = bool(opts.get('untrusted'))
1197 untrusted = bool(opts.get('untrusted'))
1198 if values:
1198 if values:
1199 sections = [v for v in values if '.' not in v]
1199 sections = [v for v in values if '.' not in v]
1200 items = [v for v in values if '.' in v]
1200 items = [v for v in values if '.' in v]
1201 if len(items) > 1 or items and sections:
1201 if len(items) > 1 or items and sections:
1202 raise util.Abort(_('only one config item permitted'))
1202 raise util.Abort(_('only one config item permitted'))
1203 for section, name, value in ui.walkconfig(untrusted=untrusted):
1203 for section, name, value in ui.walkconfig(untrusted=untrusted):
1204 value = str(value).replace('\n', '\\n')
1204 value = str(value).replace('\n', '\\n')
1205 sectname = section + '.' + name
1205 sectname = section + '.' + name
1206 if values:
1206 if values:
1207 for v in values:
1207 for v in values:
1208 if v == section:
1208 if v == section:
1209 ui.debug('%s: ' %
1209 ui.debug('%s: ' %
1210 ui.configsource(section, name, untrusted))
1210 ui.configsource(section, name, untrusted))
1211 ui.write('%s=%s\n' % (sectname, value))
1211 ui.write('%s=%s\n' % (sectname, value))
1212 elif v == sectname:
1212 elif v == sectname:
1213 ui.debug('%s: ' %
1213 ui.debug('%s: ' %
1214 ui.configsource(section, name, untrusted))
1214 ui.configsource(section, name, untrusted))
1215 ui.write(value, '\n')
1215 ui.write(value, '\n')
1216 else:
1216 else:
1217 ui.debug('%s: ' %
1217 ui.debug('%s: ' %
1218 ui.configsource(section, name, untrusted))
1218 ui.configsource(section, name, untrusted))
1219 ui.write('%s=%s\n' % (sectname, value))
1219 ui.write('%s=%s\n' % (sectname, value))
1220
1220
1221 def debugknown(ui, repopath, *ids, **opts):
1221 def debugknown(ui, repopath, *ids, **opts):
1222 """test whether node ids are known to a repo
1222 """test whether node ids are known to a repo
1223
1223
1224 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1224 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1225 indicating unknown/known.
1225 indicating unknown/known.
1226 """
1226 """
1227 repo = hg.repository(ui, repopath)
1227 repo = hg.repository(ui, repopath)
1228 if not repo.capable('known'):
1228 if not repo.capable('known'):
1229 raise util.Abort("known() not supported by target repository")
1229 raise util.Abort("known() not supported by target repository")
1230 flags = repo.known([bin(s) for s in ids])
1230 flags = repo.known([bin(s) for s in ids])
1231 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1231 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1232
1232
1233 def debugbundle(ui, bundlepath, all=None, **opts):
1233 def debugbundle(ui, bundlepath, all=None, **opts):
1234 """lists the contents of a bundle"""
1234 """lists the contents of a bundle"""
1235 f = url.open(ui, bundlepath)
1235 f = url.open(ui, bundlepath)
1236 try:
1236 try:
1237 gen = changegroup.readbundle(f, bundlepath)
1237 gen = changegroup.readbundle(f, bundlepath)
1238 if all:
1238 if all:
1239 ui.write("format: id, p1, p2, cset, len(delta)\n")
1239 ui.write("format: id, p1, p2, cset, len(delta)\n")
1240
1240
1241 def showchunks(named):
1241 def showchunks(named):
1242 ui.write("\n%s\n" % named)
1242 ui.write("\n%s\n" % named)
1243 while 1:
1243 while 1:
1244 chunkdata = gen.parsechunk()
1244 chunkdata = gen.parsechunk()
1245 if not chunkdata:
1245 if not chunkdata:
1246 break
1246 break
1247 node = chunkdata['node']
1247 node = chunkdata['node']
1248 p1 = chunkdata['p1']
1248 p1 = chunkdata['p1']
1249 p2 = chunkdata['p2']
1249 p2 = chunkdata['p2']
1250 cs = chunkdata['cs']
1250 cs = chunkdata['cs']
1251 delta = chunkdata['data']
1251 delta = chunkdata['data']
1252 ui.write("%s %s %s %s %s\n" %
1252 ui.write("%s %s %s %s %s\n" %
1253 (hex(node), hex(p1), hex(p2),
1253 (hex(node), hex(p1), hex(p2),
1254 hex(cs), len(delta)))
1254 hex(cs), len(delta)))
1255
1255
1256 showchunks("changelog")
1256 showchunks("changelog")
1257 showchunks("manifest")
1257 showchunks("manifest")
1258 while 1:
1258 while 1:
1259 fname = gen.chunk()
1259 fname = gen.chunk()
1260 if not fname:
1260 if not fname:
1261 break
1261 break
1262 showchunks(fname)
1262 showchunks(fname)
1263 else:
1263 else:
1264 while 1:
1264 while 1:
1265 chunkdata = gen.parsechunk()
1265 chunkdata = gen.parsechunk()
1266 if not chunkdata:
1266 if not chunkdata:
1267 break
1267 break
1268 node = chunkdata['node']
1268 node = chunkdata['node']
1269 ui.write("%s\n" % hex(node))
1269 ui.write("%s\n" % hex(node))
1270 finally:
1270 finally:
1271 f.close()
1271 f.close()
1272
1272
1273 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1274 """retrieves a bundle from a repo
1275
1276 Every ID must be a full-length hex node id string. Saves the bundle to the
1277 given file.
1278 """
1279 repo = hg.repository(ui, repopath)
1280 if not repo.capable('getbundle'):
1281 raise util.Abort("getbundle() not supported by target repository")
1282 args = {}
1283 if common:
1284 args['common'] = [bin(s) for s in common]
1285 if head:
1286 args['heads'] = [bin(s) for s in head]
1287 bundle = repo.getbundle('debug', **args)
1288
1289 bundletype = opts.get('type', 'bzip2').lower()
1290 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1291 bundletype = btypes.get(bundletype)
1292 if bundletype not in changegroup.bundletypes:
1293 raise util.Abort(_('unknown bundle type specified with --type'))
1294 changegroup.writebundle(bundle, bundlepath, bundletype)
1295
1273 def debugpushkey(ui, repopath, namespace, *keyinfo):
1296 def debugpushkey(ui, repopath, namespace, *keyinfo):
1274 '''access the pushkey key/value protocol
1297 '''access the pushkey key/value protocol
1275
1298
1276 With two args, list the keys in the given namespace.
1299 With two args, list the keys in the given namespace.
1277
1300
1278 With five args, set a key to new if it currently is set to old.
1301 With five args, set a key to new if it currently is set to old.
1279 Reports success or failure.
1302 Reports success or failure.
1280 '''
1303 '''
1281
1304
1282 target = hg.repository(ui, repopath)
1305 target = hg.repository(ui, repopath)
1283 if keyinfo:
1306 if keyinfo:
1284 key, old, new = keyinfo
1307 key, old, new = keyinfo
1285 r = target.pushkey(namespace, key, old, new)
1308 r = target.pushkey(namespace, key, old, new)
1286 ui.status(str(r) + '\n')
1309 ui.status(str(r) + '\n')
1287 return not r
1310 return not r
1288 else:
1311 else:
1289 for k, v in target.listkeys(namespace).iteritems():
1312 for k, v in target.listkeys(namespace).iteritems():
1290 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1313 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1291 v.encode('string-escape')))
1314 v.encode('string-escape')))
1292
1315
1293 def debugrevspec(ui, repo, expr):
1316 def debugrevspec(ui, repo, expr):
1294 '''parse and apply a revision specification'''
1317 '''parse and apply a revision specification'''
1295 if ui.verbose:
1318 if ui.verbose:
1296 tree = revset.parse(expr)[0]
1319 tree = revset.parse(expr)[0]
1297 ui.note(tree, "\n")
1320 ui.note(tree, "\n")
1298 func = revset.match(expr)
1321 func = revset.match(expr)
1299 for c in func(repo, range(len(repo))):
1322 for c in func(repo, range(len(repo))):
1300 ui.write("%s\n" % c)
1323 ui.write("%s\n" % c)
1301
1324
1302 def debugsetparents(ui, repo, rev1, rev2=None):
1325 def debugsetparents(ui, repo, rev1, rev2=None):
1303 """manually set the parents of the current working directory
1326 """manually set the parents of the current working directory
1304
1327
1305 This is useful for writing repository conversion tools, but should
1328 This is useful for writing repository conversion tools, but should
1306 be used with care.
1329 be used with care.
1307
1330
1308 Returns 0 on success.
1331 Returns 0 on success.
1309 """
1332 """
1310
1333
1311 r1 = cmdutil.revsingle(repo, rev1).node()
1334 r1 = cmdutil.revsingle(repo, rev1).node()
1312 r2 = cmdutil.revsingle(repo, rev2, 'null').node()
1335 r2 = cmdutil.revsingle(repo, rev2, 'null').node()
1313
1336
1314 wlock = repo.wlock()
1337 wlock = repo.wlock()
1315 try:
1338 try:
1316 repo.dirstate.setparents(r1, r2)
1339 repo.dirstate.setparents(r1, r2)
1317 finally:
1340 finally:
1318 wlock.release()
1341 wlock.release()
1319
1342
1320 def debugstate(ui, repo, nodates=None):
1343 def debugstate(ui, repo, nodates=None):
1321 """show the contents of the current dirstate"""
1344 """show the contents of the current dirstate"""
1322 timestr = ""
1345 timestr = ""
1323 showdate = not nodates
1346 showdate = not nodates
1324 for file_, ent in sorted(repo.dirstate._map.iteritems()):
1347 for file_, ent in sorted(repo.dirstate._map.iteritems()):
1325 if showdate:
1348 if showdate:
1326 if ent[3] == -1:
1349 if ent[3] == -1:
1327 # Pad or slice to locale representation
1350 # Pad or slice to locale representation
1328 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
1351 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
1329 time.localtime(0)))
1352 time.localtime(0)))
1330 timestr = 'unset'
1353 timestr = 'unset'
1331 timestr = (timestr[:locale_len] +
1354 timestr = (timestr[:locale_len] +
1332 ' ' * (locale_len - len(timestr)))
1355 ' ' * (locale_len - len(timestr)))
1333 else:
1356 else:
1334 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
1357 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
1335 time.localtime(ent[3]))
1358 time.localtime(ent[3]))
1336 if ent[1] & 020000:
1359 if ent[1] & 020000:
1337 mode = 'lnk'
1360 mode = 'lnk'
1338 else:
1361 else:
1339 mode = '%3o' % (ent[1] & 0777)
1362 mode = '%3o' % (ent[1] & 0777)
1340 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
1363 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
1341 for f in repo.dirstate.copies():
1364 for f in repo.dirstate.copies():
1342 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1365 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1343
1366
1344 def debugsub(ui, repo, rev=None):
1367 def debugsub(ui, repo, rev=None):
1345 ctx = cmdutil.revsingle(repo, rev, None)
1368 ctx = cmdutil.revsingle(repo, rev, None)
1346 for k, v in sorted(ctx.substate.items()):
1369 for k, v in sorted(ctx.substate.items()):
1347 ui.write('path %s\n' % k)
1370 ui.write('path %s\n' % k)
1348 ui.write(' source %s\n' % v[0])
1371 ui.write(' source %s\n' % v[0])
1349 ui.write(' revision %s\n' % v[1])
1372 ui.write(' revision %s\n' % v[1])
1350
1373
1351 def debugdag(ui, repo, file_=None, *revs, **opts):
1374 def debugdag(ui, repo, file_=None, *revs, **opts):
1352 """format the changelog or an index DAG as a concise textual description
1375 """format the changelog or an index DAG as a concise textual description
1353
1376
1354 If you pass a revlog index, the revlog's DAG is emitted. If you list
1377 If you pass a revlog index, the revlog's DAG is emitted. If you list
1355 revision numbers, they get labelled in the output as rN.
1378 revision numbers, they get labelled in the output as rN.
1356
1379
1357 Otherwise, the changelog DAG of the current repo is emitted.
1380 Otherwise, the changelog DAG of the current repo is emitted.
1358 """
1381 """
1359 spaces = opts.get('spaces')
1382 spaces = opts.get('spaces')
1360 dots = opts.get('dots')
1383 dots = opts.get('dots')
1361 if file_:
1384 if file_:
1362 rlog = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1385 rlog = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1363 revs = set((int(r) for r in revs))
1386 revs = set((int(r) for r in revs))
1364 def events():
1387 def events():
1365 for r in rlog:
1388 for r in rlog:
1366 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1389 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1367 if r in revs:
1390 if r in revs:
1368 yield 'l', (r, "r%i" % r)
1391 yield 'l', (r, "r%i" % r)
1369 elif repo:
1392 elif repo:
1370 cl = repo.changelog
1393 cl = repo.changelog
1371 tags = opts.get('tags')
1394 tags = opts.get('tags')
1372 branches = opts.get('branches')
1395 branches = opts.get('branches')
1373 if tags:
1396 if tags:
1374 labels = {}
1397 labels = {}
1375 for l, n in repo.tags().items():
1398 for l, n in repo.tags().items():
1376 labels.setdefault(cl.rev(n), []).append(l)
1399 labels.setdefault(cl.rev(n), []).append(l)
1377 def events():
1400 def events():
1378 b = "default"
1401 b = "default"
1379 for r in cl:
1402 for r in cl:
1380 if branches:
1403 if branches:
1381 newb = cl.read(cl.node(r))[5]['branch']
1404 newb = cl.read(cl.node(r))[5]['branch']
1382 if newb != b:
1405 if newb != b:
1383 yield 'a', newb
1406 yield 'a', newb
1384 b = newb
1407 b = newb
1385 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1408 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1386 if tags:
1409 if tags:
1387 ls = labels.get(r)
1410 ls = labels.get(r)
1388 if ls:
1411 if ls:
1389 for l in ls:
1412 for l in ls:
1390 yield 'l', (r, l)
1413 yield 'l', (r, l)
1391 else:
1414 else:
1392 raise util.Abort(_('need repo for changelog dag'))
1415 raise util.Abort(_('need repo for changelog dag'))
1393
1416
1394 for line in dagparser.dagtextlines(events(),
1417 for line in dagparser.dagtextlines(events(),
1395 addspaces=spaces,
1418 addspaces=spaces,
1396 wraplabels=True,
1419 wraplabels=True,
1397 wrapannotations=True,
1420 wrapannotations=True,
1398 wrapnonlinear=dots,
1421 wrapnonlinear=dots,
1399 usedots=dots,
1422 usedots=dots,
1400 maxlinewidth=70):
1423 maxlinewidth=70):
1401 ui.write(line)
1424 ui.write(line)
1402 ui.write("\n")
1425 ui.write("\n")
1403
1426
1404 def debugdata(ui, repo, file_, rev):
1427 def debugdata(ui, repo, file_, rev):
1405 """dump the contents of a data file revision"""
1428 """dump the contents of a data file revision"""
1406 r = None
1429 r = None
1407 if repo:
1430 if repo:
1408 filelog = repo.file(file_)
1431 filelog = repo.file(file_)
1409 if len(filelog):
1432 if len(filelog):
1410 r = filelog
1433 r = filelog
1411 if not r:
1434 if not r:
1412 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
1435 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
1413 try:
1436 try:
1414 ui.write(r.revision(r.lookup(rev)))
1437 ui.write(r.revision(r.lookup(rev)))
1415 except KeyError:
1438 except KeyError:
1416 raise util.Abort(_('invalid revision identifier %s') % rev)
1439 raise util.Abort(_('invalid revision identifier %s') % rev)
1417
1440
1418 def debugdate(ui, date, range=None, **opts):
1441 def debugdate(ui, date, range=None, **opts):
1419 """parse and display a date"""
1442 """parse and display a date"""
1420 if opts["extended"]:
1443 if opts["extended"]:
1421 d = util.parsedate(date, util.extendeddateformats)
1444 d = util.parsedate(date, util.extendeddateformats)
1422 else:
1445 else:
1423 d = util.parsedate(date)
1446 d = util.parsedate(date)
1424 ui.write("internal: %s %s\n" % d)
1447 ui.write("internal: %s %s\n" % d)
1425 ui.write("standard: %s\n" % util.datestr(d))
1448 ui.write("standard: %s\n" % util.datestr(d))
1426 if range:
1449 if range:
1427 m = util.matchdate(range)
1450 m = util.matchdate(range)
1428 ui.write("match: %s\n" % m(d[0]))
1451 ui.write("match: %s\n" % m(d[0]))
1429
1452
1430 def debugignore(ui, repo, *values, **opts):
1453 def debugignore(ui, repo, *values, **opts):
1431 """display the combined ignore pattern"""
1454 """display the combined ignore pattern"""
1432 ignore = repo.dirstate._ignore
1455 ignore = repo.dirstate._ignore
1433 if hasattr(ignore, 'includepat'):
1456 if hasattr(ignore, 'includepat'):
1434 ui.write("%s\n" % ignore.includepat)
1457 ui.write("%s\n" % ignore.includepat)
1435 else:
1458 else:
1436 raise util.Abort(_("no ignore patterns found"))
1459 raise util.Abort(_("no ignore patterns found"))
1437
1460
1438 def debugindex(ui, repo, file_, **opts):
1461 def debugindex(ui, repo, file_, **opts):
1439 """dump the contents of an index file"""
1462 """dump the contents of an index file"""
1440 r = None
1463 r = None
1441 if repo:
1464 if repo:
1442 filelog = repo.file(file_)
1465 filelog = repo.file(file_)
1443 if len(filelog):
1466 if len(filelog):
1444 r = filelog
1467 r = filelog
1445
1468
1446 format = opts.get('format', 0)
1469 format = opts.get('format', 0)
1447 if format not in (0, 1):
1470 if format not in (0, 1):
1448 raise util.Abort(_("unknown format %d") % format)
1471 raise util.Abort(_("unknown format %d") % format)
1449
1472
1450 if not r:
1473 if not r:
1451 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1474 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1452
1475
1453 if format == 0:
1476 if format == 0:
1454 ui.write(" rev offset length base linkrev"
1477 ui.write(" rev offset length base linkrev"
1455 " nodeid p1 p2\n")
1478 " nodeid p1 p2\n")
1456 elif format == 1:
1479 elif format == 1:
1457 ui.write(" rev flag offset length"
1480 ui.write(" rev flag offset length"
1458 " size base link p1 p2 nodeid\n")
1481 " size base link p1 p2 nodeid\n")
1459
1482
1460 for i in r:
1483 for i in r:
1461 node = r.node(i)
1484 node = r.node(i)
1462 if format == 0:
1485 if format == 0:
1463 try:
1486 try:
1464 pp = r.parents(node)
1487 pp = r.parents(node)
1465 except:
1488 except:
1466 pp = [nullid, nullid]
1489 pp = [nullid, nullid]
1467 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1490 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1468 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
1491 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
1469 short(node), short(pp[0]), short(pp[1])))
1492 short(node), short(pp[0]), short(pp[1])))
1470 elif format == 1:
1493 elif format == 1:
1471 pr = r.parentrevs(i)
1494 pr = r.parentrevs(i)
1472 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1495 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1473 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1496 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1474 r.base(i), r.linkrev(i), pr[0], pr[1], short(node)))
1497 r.base(i), r.linkrev(i), pr[0], pr[1], short(node)))
1475
1498
1476 def debugindexdot(ui, repo, file_):
1499 def debugindexdot(ui, repo, file_):
1477 """dump an index DAG as a graphviz dot file"""
1500 """dump an index DAG as a graphviz dot file"""
1478 r = None
1501 r = None
1479 if repo:
1502 if repo:
1480 filelog = repo.file(file_)
1503 filelog = repo.file(file_)
1481 if len(filelog):
1504 if len(filelog):
1482 r = filelog
1505 r = filelog
1483 if not r:
1506 if not r:
1484 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1507 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1485 ui.write("digraph G {\n")
1508 ui.write("digraph G {\n")
1486 for i in r:
1509 for i in r:
1487 node = r.node(i)
1510 node = r.node(i)
1488 pp = r.parents(node)
1511 pp = r.parents(node)
1489 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1512 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1490 if pp[1] != nullid:
1513 if pp[1] != nullid:
1491 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1514 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1492 ui.write("}\n")
1515 ui.write("}\n")
1493
1516
1494 def debuginstall(ui):
1517 def debuginstall(ui):
1495 '''test Mercurial installation
1518 '''test Mercurial installation
1496
1519
1497 Returns 0 on success.
1520 Returns 0 on success.
1498 '''
1521 '''
1499
1522
1500 def writetemp(contents):
1523 def writetemp(contents):
1501 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1524 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1502 f = os.fdopen(fd, "wb")
1525 f = os.fdopen(fd, "wb")
1503 f.write(contents)
1526 f.write(contents)
1504 f.close()
1527 f.close()
1505 return name
1528 return name
1506
1529
1507 problems = 0
1530 problems = 0
1508
1531
1509 # encoding
1532 # encoding
1510 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1533 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1511 try:
1534 try:
1512 encoding.fromlocal("test")
1535 encoding.fromlocal("test")
1513 except util.Abort, inst:
1536 except util.Abort, inst:
1514 ui.write(" %s\n" % inst)
1537 ui.write(" %s\n" % inst)
1515 ui.write(_(" (check that your locale is properly set)\n"))
1538 ui.write(_(" (check that your locale is properly set)\n"))
1516 problems += 1
1539 problems += 1
1517
1540
1518 # compiled modules
1541 # compiled modules
1519 ui.status(_("Checking installed modules (%s)...\n")
1542 ui.status(_("Checking installed modules (%s)...\n")
1520 % os.path.dirname(__file__))
1543 % os.path.dirname(__file__))
1521 try:
1544 try:
1522 import bdiff, mpatch, base85, osutil
1545 import bdiff, mpatch, base85, osutil
1523 except Exception, inst:
1546 except Exception, inst:
1524 ui.write(" %s\n" % inst)
1547 ui.write(" %s\n" % inst)
1525 ui.write(_(" One or more extensions could not be found"))
1548 ui.write(_(" One or more extensions could not be found"))
1526 ui.write(_(" (check that you compiled the extensions)\n"))
1549 ui.write(_(" (check that you compiled the extensions)\n"))
1527 problems += 1
1550 problems += 1
1528
1551
1529 # templates
1552 # templates
1530 ui.status(_("Checking templates...\n"))
1553 ui.status(_("Checking templates...\n"))
1531 try:
1554 try:
1532 import templater
1555 import templater
1533 templater.templater(templater.templatepath("map-cmdline.default"))
1556 templater.templater(templater.templatepath("map-cmdline.default"))
1534 except Exception, inst:
1557 except Exception, inst:
1535 ui.write(" %s\n" % inst)
1558 ui.write(" %s\n" % inst)
1536 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1559 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1537 problems += 1
1560 problems += 1
1538
1561
1539 # patch
1562 # patch
1540 ui.status(_("Checking patch...\n"))
1563 ui.status(_("Checking patch...\n"))
1541 patchproblems = 0
1564 patchproblems = 0
1542 a = "1\n2\n3\n4\n"
1565 a = "1\n2\n3\n4\n"
1543 b = "1\n2\n3\ninsert\n4\n"
1566 b = "1\n2\n3\ninsert\n4\n"
1544 fa = writetemp(a)
1567 fa = writetemp(a)
1545 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
1568 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
1546 os.path.basename(fa))
1569 os.path.basename(fa))
1547 fd = writetemp(d)
1570 fd = writetemp(d)
1548
1571
1549 files = {}
1572 files = {}
1550 try:
1573 try:
1551 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
1574 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
1552 except util.Abort, e:
1575 except util.Abort, e:
1553 ui.write(_(" patch call failed:\n"))
1576 ui.write(_(" patch call failed:\n"))
1554 ui.write(" " + str(e) + "\n")
1577 ui.write(" " + str(e) + "\n")
1555 patchproblems += 1
1578 patchproblems += 1
1556 else:
1579 else:
1557 if list(files) != [os.path.basename(fa)]:
1580 if list(files) != [os.path.basename(fa)]:
1558 ui.write(_(" unexpected patch output!\n"))
1581 ui.write(_(" unexpected patch output!\n"))
1559 patchproblems += 1
1582 patchproblems += 1
1560 a = open(fa).read()
1583 a = open(fa).read()
1561 if a != b:
1584 if a != b:
1562 ui.write(_(" patch test failed!\n"))
1585 ui.write(_(" patch test failed!\n"))
1563 patchproblems += 1
1586 patchproblems += 1
1564
1587
1565 if patchproblems:
1588 if patchproblems:
1566 if ui.config('ui', 'patch'):
1589 if ui.config('ui', 'patch'):
1567 ui.write(_(" (Current patch tool may be incompatible with patch,"
1590 ui.write(_(" (Current patch tool may be incompatible with patch,"
1568 " or misconfigured. Please check your configuration"
1591 " or misconfigured. Please check your configuration"
1569 " file)\n"))
1592 " file)\n"))
1570 else:
1593 else:
1571 ui.write(_(" Internal patcher failure, please report this error"
1594 ui.write(_(" Internal patcher failure, please report this error"
1572 " to http://mercurial.selenic.com/wiki/BugTracker\n"))
1595 " to http://mercurial.selenic.com/wiki/BugTracker\n"))
1573 problems += patchproblems
1596 problems += patchproblems
1574
1597
1575 os.unlink(fa)
1598 os.unlink(fa)
1576 os.unlink(fd)
1599 os.unlink(fd)
1577
1600
1578 # editor
1601 # editor
1579 ui.status(_("Checking commit editor...\n"))
1602 ui.status(_("Checking commit editor...\n"))
1580 editor = ui.geteditor()
1603 editor = ui.geteditor()
1581 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1604 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1582 if not cmdpath:
1605 if not cmdpath:
1583 if editor == 'vi':
1606 if editor == 'vi':
1584 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1607 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1585 ui.write(_(" (specify a commit editor in your configuration"
1608 ui.write(_(" (specify a commit editor in your configuration"
1586 " file)\n"))
1609 " file)\n"))
1587 else:
1610 else:
1588 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1611 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1589 ui.write(_(" (specify a commit editor in your configuration"
1612 ui.write(_(" (specify a commit editor in your configuration"
1590 " file)\n"))
1613 " file)\n"))
1591 problems += 1
1614 problems += 1
1592
1615
1593 # check username
1616 # check username
1594 ui.status(_("Checking username...\n"))
1617 ui.status(_("Checking username...\n"))
1595 try:
1618 try:
1596 ui.username()
1619 ui.username()
1597 except util.Abort, e:
1620 except util.Abort, e:
1598 ui.write(" %s\n" % e)
1621 ui.write(" %s\n" % e)
1599 ui.write(_(" (specify a username in your configuration file)\n"))
1622 ui.write(_(" (specify a username in your configuration file)\n"))
1600 problems += 1
1623 problems += 1
1601
1624
1602 if not problems:
1625 if not problems:
1603 ui.status(_("No problems detected\n"))
1626 ui.status(_("No problems detected\n"))
1604 else:
1627 else:
1605 ui.write(_("%s problems detected,"
1628 ui.write(_("%s problems detected,"
1606 " please check your install!\n") % problems)
1629 " please check your install!\n") % problems)
1607
1630
1608 return problems
1631 return problems
1609
1632
1610 def debugrename(ui, repo, file1, *pats, **opts):
1633 def debugrename(ui, repo, file1, *pats, **opts):
1611 """dump rename information"""
1634 """dump rename information"""
1612
1635
1613 ctx = cmdutil.revsingle(repo, opts.get('rev'))
1636 ctx = cmdutil.revsingle(repo, opts.get('rev'))
1614 m = cmdutil.match(repo, (file1,) + pats, opts)
1637 m = cmdutil.match(repo, (file1,) + pats, opts)
1615 for abs in ctx.walk(m):
1638 for abs in ctx.walk(m):
1616 fctx = ctx[abs]
1639 fctx = ctx[abs]
1617 o = fctx.filelog().renamed(fctx.filenode())
1640 o = fctx.filelog().renamed(fctx.filenode())
1618 rel = m.rel(abs)
1641 rel = m.rel(abs)
1619 if o:
1642 if o:
1620 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1643 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1621 else:
1644 else:
1622 ui.write(_("%s not renamed\n") % rel)
1645 ui.write(_("%s not renamed\n") % rel)
1623
1646
1624 def debugwalk(ui, repo, *pats, **opts):
1647 def debugwalk(ui, repo, *pats, **opts):
1625 """show how files match on given patterns"""
1648 """show how files match on given patterns"""
1626 m = cmdutil.match(repo, pats, opts)
1649 m = cmdutil.match(repo, pats, opts)
1627 items = list(repo.walk(m))
1650 items = list(repo.walk(m))
1628 if not items:
1651 if not items:
1629 return
1652 return
1630 fmt = 'f %%-%ds %%-%ds %%s' % (
1653 fmt = 'f %%-%ds %%-%ds %%s' % (
1631 max([len(abs) for abs in items]),
1654 max([len(abs) for abs in items]),
1632 max([len(m.rel(abs)) for abs in items]))
1655 max([len(m.rel(abs)) for abs in items]))
1633 for abs in items:
1656 for abs in items:
1634 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1657 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1635 ui.write("%s\n" % line.rstrip())
1658 ui.write("%s\n" % line.rstrip())
1636
1659
1637 def debugwireargs(ui, repopath, *vals, **opts):
1660 def debugwireargs(ui, repopath, *vals, **opts):
1638 repo = hg.repository(hg.remoteui(ui, opts), repopath)
1661 repo = hg.repository(hg.remoteui(ui, opts), repopath)
1639 for opt in remoteopts:
1662 for opt in remoteopts:
1640 del opts[opt[1]]
1663 del opts[opt[1]]
1641 args = {}
1664 args = {}
1642 for k, v in opts.iteritems():
1665 for k, v in opts.iteritems():
1643 if v:
1666 if v:
1644 args[k] = v
1667 args[k] = v
1645 # run twice to check that we don't mess up the stream for the next command
1668 # run twice to check that we don't mess up the stream for the next command
1646 res1 = repo.debugwireargs(*vals, **args)
1669 res1 = repo.debugwireargs(*vals, **args)
1647 res2 = repo.debugwireargs(*vals, **args)
1670 res2 = repo.debugwireargs(*vals, **args)
1648 ui.write("%s\n" % res1)
1671 ui.write("%s\n" % res1)
1649 if res1 != res2:
1672 if res1 != res2:
1650 ui.warn("%s\n" % res2)
1673 ui.warn("%s\n" % res2)
1651
1674
1652 def diff(ui, repo, *pats, **opts):
1675 def diff(ui, repo, *pats, **opts):
1653 """diff repository (or selected files)
1676 """diff repository (or selected files)
1654
1677
1655 Show differences between revisions for the specified files.
1678 Show differences between revisions for the specified files.
1656
1679
1657 Differences between files are shown using the unified diff format.
1680 Differences between files are shown using the unified diff format.
1658
1681
1659 .. note::
1682 .. note::
1660 diff may generate unexpected results for merges, as it will
1683 diff may generate unexpected results for merges, as it will
1661 default to comparing against the working directory's first
1684 default to comparing against the working directory's first
1662 parent changeset if no revisions are specified.
1685 parent changeset if no revisions are specified.
1663
1686
1664 When two revision arguments are given, then changes are shown
1687 When two revision arguments are given, then changes are shown
1665 between those revisions. If only one revision is specified then
1688 between those revisions. If only one revision is specified then
1666 that revision is compared to the working directory, and, when no
1689 that revision is compared to the working directory, and, when no
1667 revisions are specified, the working directory files are compared
1690 revisions are specified, the working directory files are compared
1668 to its parent.
1691 to its parent.
1669
1692
1670 Alternatively you can specify -c/--change with a revision to see
1693 Alternatively you can specify -c/--change with a revision to see
1671 the changes in that changeset relative to its first parent.
1694 the changes in that changeset relative to its first parent.
1672
1695
1673 Without the -a/--text option, diff will avoid generating diffs of
1696 Without the -a/--text option, diff will avoid generating diffs of
1674 files it detects as binary. With -a, diff will generate a diff
1697 files it detects as binary. With -a, diff will generate a diff
1675 anyway, probably with undesirable results.
1698 anyway, probably with undesirable results.
1676
1699
1677 Use the -g/--git option to generate diffs in the git extended diff
1700 Use the -g/--git option to generate diffs in the git extended diff
1678 format. For more information, read :hg:`help diffs`.
1701 format. For more information, read :hg:`help diffs`.
1679
1702
1680 Returns 0 on success.
1703 Returns 0 on success.
1681 """
1704 """
1682
1705
1683 revs = opts.get('rev')
1706 revs = opts.get('rev')
1684 change = opts.get('change')
1707 change = opts.get('change')
1685 stat = opts.get('stat')
1708 stat = opts.get('stat')
1686 reverse = opts.get('reverse')
1709 reverse = opts.get('reverse')
1687
1710
1688 if revs and change:
1711 if revs and change:
1689 msg = _('cannot specify --rev and --change at the same time')
1712 msg = _('cannot specify --rev and --change at the same time')
1690 raise util.Abort(msg)
1713 raise util.Abort(msg)
1691 elif change:
1714 elif change:
1692 node2 = cmdutil.revsingle(repo, change, None).node()
1715 node2 = cmdutil.revsingle(repo, change, None).node()
1693 node1 = repo[node2].parents()[0].node()
1716 node1 = repo[node2].parents()[0].node()
1694 else:
1717 else:
1695 node1, node2 = cmdutil.revpair(repo, revs)
1718 node1, node2 = cmdutil.revpair(repo, revs)
1696
1719
1697 if reverse:
1720 if reverse:
1698 node1, node2 = node2, node1
1721 node1, node2 = node2, node1
1699
1722
1700 diffopts = patch.diffopts(ui, opts)
1723 diffopts = patch.diffopts(ui, opts)
1701 m = cmdutil.match(repo, pats, opts)
1724 m = cmdutil.match(repo, pats, opts)
1702 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1725 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1703 listsubrepos=opts.get('subrepos'))
1726 listsubrepos=opts.get('subrepos'))
1704
1727
1705 def export(ui, repo, *changesets, **opts):
1728 def export(ui, repo, *changesets, **opts):
1706 """dump the header and diffs for one or more changesets
1729 """dump the header and diffs for one or more changesets
1707
1730
1708 Print the changeset header and diffs for one or more revisions.
1731 Print the changeset header and diffs for one or more revisions.
1709
1732
1710 The information shown in the changeset header is: author, date,
1733 The information shown in the changeset header is: author, date,
1711 branch name (if non-default), changeset hash, parent(s) and commit
1734 branch name (if non-default), changeset hash, parent(s) and commit
1712 comment.
1735 comment.
1713
1736
1714 .. note::
1737 .. note::
1715 export may generate unexpected diff output for merge
1738 export may generate unexpected diff output for merge
1716 changesets, as it will compare the merge changeset against its
1739 changesets, as it will compare the merge changeset against its
1717 first parent only.
1740 first parent only.
1718
1741
1719 Output may be to a file, in which case the name of the file is
1742 Output may be to a file, in which case the name of the file is
1720 given using a format string. The formatting rules are as follows:
1743 given using a format string. The formatting rules are as follows:
1721
1744
1722 :``%%``: literal "%" character
1745 :``%%``: literal "%" character
1723 :``%H``: changeset hash (40 hexadecimal digits)
1746 :``%H``: changeset hash (40 hexadecimal digits)
1724 :``%N``: number of patches being generated
1747 :``%N``: number of patches being generated
1725 :``%R``: changeset revision number
1748 :``%R``: changeset revision number
1726 :``%b``: basename of the exporting repository
1749 :``%b``: basename of the exporting repository
1727 :``%h``: short-form changeset hash (12 hexadecimal digits)
1750 :``%h``: short-form changeset hash (12 hexadecimal digits)
1728 :``%n``: zero-padded sequence number, starting at 1
1751 :``%n``: zero-padded sequence number, starting at 1
1729 :``%r``: zero-padded changeset revision number
1752 :``%r``: zero-padded changeset revision number
1730
1753
1731 Without the -a/--text option, export will avoid generating diffs
1754 Without the -a/--text option, export will avoid generating diffs
1732 of files it detects as binary. With -a, export will generate a
1755 of files it detects as binary. With -a, export will generate a
1733 diff anyway, probably with undesirable results.
1756 diff anyway, probably with undesirable results.
1734
1757
1735 Use the -g/--git option to generate diffs in the git extended diff
1758 Use the -g/--git option to generate diffs in the git extended diff
1736 format. See :hg:`help diffs` for more information.
1759 format. See :hg:`help diffs` for more information.
1737
1760
1738 With the --switch-parent option, the diff will be against the
1761 With the --switch-parent option, the diff will be against the
1739 second parent. It can be useful to review a merge.
1762 second parent. It can be useful to review a merge.
1740
1763
1741 Returns 0 on success.
1764 Returns 0 on success.
1742 """
1765 """
1743 changesets += tuple(opts.get('rev', []))
1766 changesets += tuple(opts.get('rev', []))
1744 if not changesets:
1767 if not changesets:
1745 raise util.Abort(_("export requires at least one changeset"))
1768 raise util.Abort(_("export requires at least one changeset"))
1746 revs = cmdutil.revrange(repo, changesets)
1769 revs = cmdutil.revrange(repo, changesets)
1747 if len(revs) > 1:
1770 if len(revs) > 1:
1748 ui.note(_('exporting patches:\n'))
1771 ui.note(_('exporting patches:\n'))
1749 else:
1772 else:
1750 ui.note(_('exporting patch:\n'))
1773 ui.note(_('exporting patch:\n'))
1751 cmdutil.export(repo, revs, template=opts.get('output'),
1774 cmdutil.export(repo, revs, template=opts.get('output'),
1752 switch_parent=opts.get('switch_parent'),
1775 switch_parent=opts.get('switch_parent'),
1753 opts=patch.diffopts(ui, opts))
1776 opts=patch.diffopts(ui, opts))
1754
1777
1755 def forget(ui, repo, *pats, **opts):
1778 def forget(ui, repo, *pats, **opts):
1756 """forget the specified files on the next commit
1779 """forget the specified files on the next commit
1757
1780
1758 Mark the specified files so they will no longer be tracked
1781 Mark the specified files so they will no longer be tracked
1759 after the next commit.
1782 after the next commit.
1760
1783
1761 This only removes files from the current branch, not from the
1784 This only removes files from the current branch, not from the
1762 entire project history, and it does not delete them from the
1785 entire project history, and it does not delete them from the
1763 working directory.
1786 working directory.
1764
1787
1765 To undo a forget before the next commit, see :hg:`add`.
1788 To undo a forget before the next commit, see :hg:`add`.
1766
1789
1767 Returns 0 on success.
1790 Returns 0 on success.
1768 """
1791 """
1769
1792
1770 if not pats:
1793 if not pats:
1771 raise util.Abort(_('no files specified'))
1794 raise util.Abort(_('no files specified'))
1772
1795
1773 m = cmdutil.match(repo, pats, opts)
1796 m = cmdutil.match(repo, pats, opts)
1774 s = repo.status(match=m, clean=True)
1797 s = repo.status(match=m, clean=True)
1775 forget = sorted(s[0] + s[1] + s[3] + s[6])
1798 forget = sorted(s[0] + s[1] + s[3] + s[6])
1776 errs = 0
1799 errs = 0
1777
1800
1778 for f in m.files():
1801 for f in m.files():
1779 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1802 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1780 ui.warn(_('not removing %s: file is already untracked\n')
1803 ui.warn(_('not removing %s: file is already untracked\n')
1781 % m.rel(f))
1804 % m.rel(f))
1782 errs = 1
1805 errs = 1
1783
1806
1784 for f in forget:
1807 for f in forget:
1785 if ui.verbose or not m.exact(f):
1808 if ui.verbose or not m.exact(f):
1786 ui.status(_('removing %s\n') % m.rel(f))
1809 ui.status(_('removing %s\n') % m.rel(f))
1787
1810
1788 repo[None].remove(forget, unlink=False)
1811 repo[None].remove(forget, unlink=False)
1789 return errs
1812 return errs
1790
1813
1791 def grep(ui, repo, pattern, *pats, **opts):
1814 def grep(ui, repo, pattern, *pats, **opts):
1792 """search for a pattern in specified files and revisions
1815 """search for a pattern in specified files and revisions
1793
1816
1794 Search revisions of files for a regular expression.
1817 Search revisions of files for a regular expression.
1795
1818
1796 This command behaves differently than Unix grep. It only accepts
1819 This command behaves differently than Unix grep. It only accepts
1797 Python/Perl regexps. It searches repository history, not the
1820 Python/Perl regexps. It searches repository history, not the
1798 working directory. It always prints the revision number in which a
1821 working directory. It always prints the revision number in which a
1799 match appears.
1822 match appears.
1800
1823
1801 By default, grep only prints output for the first revision of a
1824 By default, grep only prints output for the first revision of a
1802 file in which it finds a match. To get it to print every revision
1825 file in which it finds a match. To get it to print every revision
1803 that contains a change in match status ("-" for a match that
1826 that contains a change in match status ("-" for a match that
1804 becomes a non-match, or "+" for a non-match that becomes a match),
1827 becomes a non-match, or "+" for a non-match that becomes a match),
1805 use the --all flag.
1828 use the --all flag.
1806
1829
1807 Returns 0 if a match is found, 1 otherwise.
1830 Returns 0 if a match is found, 1 otherwise.
1808 """
1831 """
1809 reflags = 0
1832 reflags = 0
1810 if opts.get('ignore_case'):
1833 if opts.get('ignore_case'):
1811 reflags |= re.I
1834 reflags |= re.I
1812 try:
1835 try:
1813 regexp = re.compile(pattern, reflags)
1836 regexp = re.compile(pattern, reflags)
1814 except re.error, inst:
1837 except re.error, inst:
1815 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1838 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1816 return 1
1839 return 1
1817 sep, eol = ':', '\n'
1840 sep, eol = ':', '\n'
1818 if opts.get('print0'):
1841 if opts.get('print0'):
1819 sep = eol = '\0'
1842 sep = eol = '\0'
1820
1843
1821 getfile = util.lrucachefunc(repo.file)
1844 getfile = util.lrucachefunc(repo.file)
1822
1845
1823 def matchlines(body):
1846 def matchlines(body):
1824 begin = 0
1847 begin = 0
1825 linenum = 0
1848 linenum = 0
1826 while True:
1849 while True:
1827 match = regexp.search(body, begin)
1850 match = regexp.search(body, begin)
1828 if not match:
1851 if not match:
1829 break
1852 break
1830 mstart, mend = match.span()
1853 mstart, mend = match.span()
1831 linenum += body.count('\n', begin, mstart) + 1
1854 linenum += body.count('\n', begin, mstart) + 1
1832 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1855 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1833 begin = body.find('\n', mend) + 1 or len(body)
1856 begin = body.find('\n', mend) + 1 or len(body)
1834 lend = begin - 1
1857 lend = begin - 1
1835 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1858 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1836
1859
1837 class linestate(object):
1860 class linestate(object):
1838 def __init__(self, line, linenum, colstart, colend):
1861 def __init__(self, line, linenum, colstart, colend):
1839 self.line = line
1862 self.line = line
1840 self.linenum = linenum
1863 self.linenum = linenum
1841 self.colstart = colstart
1864 self.colstart = colstart
1842 self.colend = colend
1865 self.colend = colend
1843
1866
1844 def __hash__(self):
1867 def __hash__(self):
1845 return hash((self.linenum, self.line))
1868 return hash((self.linenum, self.line))
1846
1869
1847 def __eq__(self, other):
1870 def __eq__(self, other):
1848 return self.line == other.line
1871 return self.line == other.line
1849
1872
1850 matches = {}
1873 matches = {}
1851 copies = {}
1874 copies = {}
1852 def grepbody(fn, rev, body):
1875 def grepbody(fn, rev, body):
1853 matches[rev].setdefault(fn, [])
1876 matches[rev].setdefault(fn, [])
1854 m = matches[rev][fn]
1877 m = matches[rev][fn]
1855 for lnum, cstart, cend, line in matchlines(body):
1878 for lnum, cstart, cend, line in matchlines(body):
1856 s = linestate(line, lnum, cstart, cend)
1879 s = linestate(line, lnum, cstart, cend)
1857 m.append(s)
1880 m.append(s)
1858
1881
1859 def difflinestates(a, b):
1882 def difflinestates(a, b):
1860 sm = difflib.SequenceMatcher(None, a, b)
1883 sm = difflib.SequenceMatcher(None, a, b)
1861 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1884 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1862 if tag == 'insert':
1885 if tag == 'insert':
1863 for i in xrange(blo, bhi):
1886 for i in xrange(blo, bhi):
1864 yield ('+', b[i])
1887 yield ('+', b[i])
1865 elif tag == 'delete':
1888 elif tag == 'delete':
1866 for i in xrange(alo, ahi):
1889 for i in xrange(alo, ahi):
1867 yield ('-', a[i])
1890 yield ('-', a[i])
1868 elif tag == 'replace':
1891 elif tag == 'replace':
1869 for i in xrange(alo, ahi):
1892 for i in xrange(alo, ahi):
1870 yield ('-', a[i])
1893 yield ('-', a[i])
1871 for i in xrange(blo, bhi):
1894 for i in xrange(blo, bhi):
1872 yield ('+', b[i])
1895 yield ('+', b[i])
1873
1896
1874 def display(fn, ctx, pstates, states):
1897 def display(fn, ctx, pstates, states):
1875 rev = ctx.rev()
1898 rev = ctx.rev()
1876 datefunc = ui.quiet and util.shortdate or util.datestr
1899 datefunc = ui.quiet and util.shortdate or util.datestr
1877 found = False
1900 found = False
1878 filerevmatches = {}
1901 filerevmatches = {}
1879 if opts.get('all'):
1902 if opts.get('all'):
1880 iter = difflinestates(pstates, states)
1903 iter = difflinestates(pstates, states)
1881 else:
1904 else:
1882 iter = [('', l) for l in states]
1905 iter = [('', l) for l in states]
1883 for change, l in iter:
1906 for change, l in iter:
1884 cols = [fn, str(rev)]
1907 cols = [fn, str(rev)]
1885 before, match, after = None, None, None
1908 before, match, after = None, None, None
1886 if opts.get('line_number'):
1909 if opts.get('line_number'):
1887 cols.append(str(l.linenum))
1910 cols.append(str(l.linenum))
1888 if opts.get('all'):
1911 if opts.get('all'):
1889 cols.append(change)
1912 cols.append(change)
1890 if opts.get('user'):
1913 if opts.get('user'):
1891 cols.append(ui.shortuser(ctx.user()))
1914 cols.append(ui.shortuser(ctx.user()))
1892 if opts.get('date'):
1915 if opts.get('date'):
1893 cols.append(datefunc(ctx.date()))
1916 cols.append(datefunc(ctx.date()))
1894 if opts.get('files_with_matches'):
1917 if opts.get('files_with_matches'):
1895 c = (fn, rev)
1918 c = (fn, rev)
1896 if c in filerevmatches:
1919 if c in filerevmatches:
1897 continue
1920 continue
1898 filerevmatches[c] = 1
1921 filerevmatches[c] = 1
1899 else:
1922 else:
1900 before = l.line[:l.colstart]
1923 before = l.line[:l.colstart]
1901 match = l.line[l.colstart:l.colend]
1924 match = l.line[l.colstart:l.colend]
1902 after = l.line[l.colend:]
1925 after = l.line[l.colend:]
1903 ui.write(sep.join(cols))
1926 ui.write(sep.join(cols))
1904 if before is not None:
1927 if before is not None:
1905 ui.write(sep + before)
1928 ui.write(sep + before)
1906 ui.write(match, label='grep.match')
1929 ui.write(match, label='grep.match')
1907 ui.write(after)
1930 ui.write(after)
1908 ui.write(eol)
1931 ui.write(eol)
1909 found = True
1932 found = True
1910 return found
1933 return found
1911
1934
1912 skip = {}
1935 skip = {}
1913 revfiles = {}
1936 revfiles = {}
1914 matchfn = cmdutil.match(repo, pats, opts)
1937 matchfn = cmdutil.match(repo, pats, opts)
1915 found = False
1938 found = False
1916 follow = opts.get('follow')
1939 follow = opts.get('follow')
1917
1940
1918 def prep(ctx, fns):
1941 def prep(ctx, fns):
1919 rev = ctx.rev()
1942 rev = ctx.rev()
1920 pctx = ctx.parents()[0]
1943 pctx = ctx.parents()[0]
1921 parent = pctx.rev()
1944 parent = pctx.rev()
1922 matches.setdefault(rev, {})
1945 matches.setdefault(rev, {})
1923 matches.setdefault(parent, {})
1946 matches.setdefault(parent, {})
1924 files = revfiles.setdefault(rev, [])
1947 files = revfiles.setdefault(rev, [])
1925 for fn in fns:
1948 for fn in fns:
1926 flog = getfile(fn)
1949 flog = getfile(fn)
1927 try:
1950 try:
1928 fnode = ctx.filenode(fn)
1951 fnode = ctx.filenode(fn)
1929 except error.LookupError:
1952 except error.LookupError:
1930 continue
1953 continue
1931
1954
1932 copied = flog.renamed(fnode)
1955 copied = flog.renamed(fnode)
1933 copy = follow and copied and copied[0]
1956 copy = follow and copied and copied[0]
1934 if copy:
1957 if copy:
1935 copies.setdefault(rev, {})[fn] = copy
1958 copies.setdefault(rev, {})[fn] = copy
1936 if fn in skip:
1959 if fn in skip:
1937 if copy:
1960 if copy:
1938 skip[copy] = True
1961 skip[copy] = True
1939 continue
1962 continue
1940 files.append(fn)
1963 files.append(fn)
1941
1964
1942 if fn not in matches[rev]:
1965 if fn not in matches[rev]:
1943 grepbody(fn, rev, flog.read(fnode))
1966 grepbody(fn, rev, flog.read(fnode))
1944
1967
1945 pfn = copy or fn
1968 pfn = copy or fn
1946 if pfn not in matches[parent]:
1969 if pfn not in matches[parent]:
1947 try:
1970 try:
1948 fnode = pctx.filenode(pfn)
1971 fnode = pctx.filenode(pfn)
1949 grepbody(pfn, parent, flog.read(fnode))
1972 grepbody(pfn, parent, flog.read(fnode))
1950 except error.LookupError:
1973 except error.LookupError:
1951 pass
1974 pass
1952
1975
1953 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1976 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1954 rev = ctx.rev()
1977 rev = ctx.rev()
1955 parent = ctx.parents()[0].rev()
1978 parent = ctx.parents()[0].rev()
1956 for fn in sorted(revfiles.get(rev, [])):
1979 for fn in sorted(revfiles.get(rev, [])):
1957 states = matches[rev][fn]
1980 states = matches[rev][fn]
1958 copy = copies.get(rev, {}).get(fn)
1981 copy = copies.get(rev, {}).get(fn)
1959 if fn in skip:
1982 if fn in skip:
1960 if copy:
1983 if copy:
1961 skip[copy] = True
1984 skip[copy] = True
1962 continue
1985 continue
1963 pstates = matches.get(parent, {}).get(copy or fn, [])
1986 pstates = matches.get(parent, {}).get(copy or fn, [])
1964 if pstates or states:
1987 if pstates or states:
1965 r = display(fn, ctx, pstates, states)
1988 r = display(fn, ctx, pstates, states)
1966 found = found or r
1989 found = found or r
1967 if r and not opts.get('all'):
1990 if r and not opts.get('all'):
1968 skip[fn] = True
1991 skip[fn] = True
1969 if copy:
1992 if copy:
1970 skip[copy] = True
1993 skip[copy] = True
1971 del matches[rev]
1994 del matches[rev]
1972 del revfiles[rev]
1995 del revfiles[rev]
1973
1996
1974 return not found
1997 return not found
1975
1998
1976 def heads(ui, repo, *branchrevs, **opts):
1999 def heads(ui, repo, *branchrevs, **opts):
1977 """show current repository heads or show branch heads
2000 """show current repository heads or show branch heads
1978
2001
1979 With no arguments, show all repository branch heads.
2002 With no arguments, show all repository branch heads.
1980
2003
1981 Repository "heads" are changesets with no child changesets. They are
2004 Repository "heads" are changesets with no child changesets. They are
1982 where development generally takes place and are the usual targets
2005 where development generally takes place and are the usual targets
1983 for update and merge operations. Branch heads are changesets that have
2006 for update and merge operations. Branch heads are changesets that have
1984 no child changeset on the same branch.
2007 no child changeset on the same branch.
1985
2008
1986 If one or more REVs are given, only branch heads on the branches
2009 If one or more REVs are given, only branch heads on the branches
1987 associated with the specified changesets are shown.
2010 associated with the specified changesets are shown.
1988
2011
1989 If -c/--closed is specified, also show branch heads marked closed
2012 If -c/--closed is specified, also show branch heads marked closed
1990 (see :hg:`commit --close-branch`).
2013 (see :hg:`commit --close-branch`).
1991
2014
1992 If STARTREV is specified, only those heads that are descendants of
2015 If STARTREV is specified, only those heads that are descendants of
1993 STARTREV will be displayed.
2016 STARTREV will be displayed.
1994
2017
1995 If -t/--topo is specified, named branch mechanics will be ignored and only
2018 If -t/--topo is specified, named branch mechanics will be ignored and only
1996 changesets without children will be shown.
2019 changesets without children will be shown.
1997
2020
1998 Returns 0 if matching heads are found, 1 if not.
2021 Returns 0 if matching heads are found, 1 if not.
1999 """
2022 """
2000
2023
2001 start = None
2024 start = None
2002 if 'rev' in opts:
2025 if 'rev' in opts:
2003 start = cmdutil.revsingle(repo, opts['rev'], None).node()
2026 start = cmdutil.revsingle(repo, opts['rev'], None).node()
2004
2027
2005 if opts.get('topo'):
2028 if opts.get('topo'):
2006 heads = [repo[h] for h in repo.heads(start)]
2029 heads = [repo[h] for h in repo.heads(start)]
2007 else:
2030 else:
2008 heads = []
2031 heads = []
2009 for b, ls in repo.branchmap().iteritems():
2032 for b, ls in repo.branchmap().iteritems():
2010 if start is None:
2033 if start is None:
2011 heads += [repo[h] for h in ls]
2034 heads += [repo[h] for h in ls]
2012 continue
2035 continue
2013 startrev = repo.changelog.rev(start)
2036 startrev = repo.changelog.rev(start)
2014 descendants = set(repo.changelog.descendants(startrev))
2037 descendants = set(repo.changelog.descendants(startrev))
2015 descendants.add(startrev)
2038 descendants.add(startrev)
2016 rev = repo.changelog.rev
2039 rev = repo.changelog.rev
2017 heads += [repo[h] for h in ls if rev(h) in descendants]
2040 heads += [repo[h] for h in ls if rev(h) in descendants]
2018
2041
2019 if branchrevs:
2042 if branchrevs:
2020 branches = set(repo[br].branch() for br in branchrevs)
2043 branches = set(repo[br].branch() for br in branchrevs)
2021 heads = [h for h in heads if h.branch() in branches]
2044 heads = [h for h in heads if h.branch() in branches]
2022
2045
2023 if not opts.get('closed'):
2046 if not opts.get('closed'):
2024 heads = [h for h in heads if not h.extra().get('close')]
2047 heads = [h for h in heads if not h.extra().get('close')]
2025
2048
2026 if opts.get('active') and branchrevs:
2049 if opts.get('active') and branchrevs:
2027 dagheads = repo.heads(start)
2050 dagheads = repo.heads(start)
2028 heads = [h for h in heads if h.node() in dagheads]
2051 heads = [h for h in heads if h.node() in dagheads]
2029
2052
2030 if branchrevs:
2053 if branchrevs:
2031 haveheads = set(h.branch() for h in heads)
2054 haveheads = set(h.branch() for h in heads)
2032 if branches - haveheads:
2055 if branches - haveheads:
2033 headless = ', '.join(b for b in branches - haveheads)
2056 headless = ', '.join(b for b in branches - haveheads)
2034 msg = _('no open branch heads found on branches %s')
2057 msg = _('no open branch heads found on branches %s')
2035 if opts.get('rev'):
2058 if opts.get('rev'):
2036 msg += _(' (started at %s)' % opts['rev'])
2059 msg += _(' (started at %s)' % opts['rev'])
2037 ui.warn((msg + '\n') % headless)
2060 ui.warn((msg + '\n') % headless)
2038
2061
2039 if not heads:
2062 if not heads:
2040 return 1
2063 return 1
2041
2064
2042 heads = sorted(heads, key=lambda x: -x.rev())
2065 heads = sorted(heads, key=lambda x: -x.rev())
2043 displayer = cmdutil.show_changeset(ui, repo, opts)
2066 displayer = cmdutil.show_changeset(ui, repo, opts)
2044 for ctx in heads:
2067 for ctx in heads:
2045 displayer.show(ctx)
2068 displayer.show(ctx)
2046 displayer.close()
2069 displayer.close()
2047
2070
2048 def help_(ui, name=None, with_version=False, unknowncmd=False):
2071 def help_(ui, name=None, with_version=False, unknowncmd=False):
2049 """show help for a given topic or a help overview
2072 """show help for a given topic or a help overview
2050
2073
2051 With no arguments, print a list of commands with short help messages.
2074 With no arguments, print a list of commands with short help messages.
2052
2075
2053 Given a topic, extension, or command name, print help for that
2076 Given a topic, extension, or command name, print help for that
2054 topic.
2077 topic.
2055
2078
2056 Returns 0 if successful.
2079 Returns 0 if successful.
2057 """
2080 """
2058 option_lists = []
2081 option_lists = []
2059 textwidth = min(ui.termwidth(), 80) - 2
2082 textwidth = min(ui.termwidth(), 80) - 2
2060
2083
2061 def addglobalopts(aliases):
2084 def addglobalopts(aliases):
2062 if ui.verbose:
2085 if ui.verbose:
2063 option_lists.append((_("global options:"), globalopts))
2086 option_lists.append((_("global options:"), globalopts))
2064 if name == 'shortlist':
2087 if name == 'shortlist':
2065 option_lists.append((_('use "hg help" for the full list '
2088 option_lists.append((_('use "hg help" for the full list '
2066 'of commands'), ()))
2089 'of commands'), ()))
2067 else:
2090 else:
2068 if name == 'shortlist':
2091 if name == 'shortlist':
2069 msg = _('use "hg help" for the full list of commands '
2092 msg = _('use "hg help" for the full list of commands '
2070 'or "hg -v" for details')
2093 'or "hg -v" for details')
2071 elif aliases:
2094 elif aliases:
2072 msg = _('use "hg -v help%s" to show builtin aliases and '
2095 msg = _('use "hg -v help%s" to show builtin aliases and '
2073 'global options') % (name and " " + name or "")
2096 'global options') % (name and " " + name or "")
2074 else:
2097 else:
2075 msg = _('use "hg -v help %s" to show global options') % name
2098 msg = _('use "hg -v help %s" to show global options') % name
2076 option_lists.append((msg, ()))
2099 option_lists.append((msg, ()))
2077
2100
2078 def helpcmd(name):
2101 def helpcmd(name):
2079 if with_version:
2102 if with_version:
2080 version_(ui)
2103 version_(ui)
2081 ui.write('\n')
2104 ui.write('\n')
2082
2105
2083 try:
2106 try:
2084 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2107 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2085 except error.AmbiguousCommand, inst:
2108 except error.AmbiguousCommand, inst:
2086 # py3k fix: except vars can't be used outside the scope of the
2109 # py3k fix: except vars can't be used outside the scope of the
2087 # except block, nor can be used inside a lambda. python issue4617
2110 # except block, nor can be used inside a lambda. python issue4617
2088 prefix = inst.args[0]
2111 prefix = inst.args[0]
2089 select = lambda c: c.lstrip('^').startswith(prefix)
2112 select = lambda c: c.lstrip('^').startswith(prefix)
2090 helplist(_('list of commands:\n\n'), select)
2113 helplist(_('list of commands:\n\n'), select)
2091 return
2114 return
2092
2115
2093 # check if it's an invalid alias and display its error if it is
2116 # check if it's an invalid alias and display its error if it is
2094 if getattr(entry[0], 'badalias', False):
2117 if getattr(entry[0], 'badalias', False):
2095 if not unknowncmd:
2118 if not unknowncmd:
2096 entry[0](ui)
2119 entry[0](ui)
2097 return
2120 return
2098
2121
2099 # synopsis
2122 # synopsis
2100 if len(entry) > 2:
2123 if len(entry) > 2:
2101 if entry[2].startswith('hg'):
2124 if entry[2].startswith('hg'):
2102 ui.write("%s\n" % entry[2])
2125 ui.write("%s\n" % entry[2])
2103 else:
2126 else:
2104 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
2127 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
2105 else:
2128 else:
2106 ui.write('hg %s\n' % aliases[0])
2129 ui.write('hg %s\n' % aliases[0])
2107
2130
2108 # aliases
2131 # aliases
2109 if not ui.quiet and len(aliases) > 1:
2132 if not ui.quiet and len(aliases) > 1:
2110 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
2133 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
2111
2134
2112 # description
2135 # description
2113 doc = gettext(entry[0].__doc__)
2136 doc = gettext(entry[0].__doc__)
2114 if not doc:
2137 if not doc:
2115 doc = _("(no help text available)")
2138 doc = _("(no help text available)")
2116 if hasattr(entry[0], 'definition'): # aliased command
2139 if hasattr(entry[0], 'definition'): # aliased command
2117 if entry[0].definition.startswith('!'): # shell alias
2140 if entry[0].definition.startswith('!'): # shell alias
2118 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
2141 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
2119 else:
2142 else:
2120 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
2143 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
2121 if ui.quiet:
2144 if ui.quiet:
2122 doc = doc.splitlines()[0]
2145 doc = doc.splitlines()[0]
2123 keep = ui.verbose and ['verbose'] or []
2146 keep = ui.verbose and ['verbose'] or []
2124 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
2147 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
2125 ui.write("\n%s\n" % formatted)
2148 ui.write("\n%s\n" % formatted)
2126 if pruned:
2149 if pruned:
2127 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
2150 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
2128
2151
2129 if not ui.quiet:
2152 if not ui.quiet:
2130 # options
2153 # options
2131 if entry[1]:
2154 if entry[1]:
2132 option_lists.append((_("options:\n"), entry[1]))
2155 option_lists.append((_("options:\n"), entry[1]))
2133
2156
2134 addglobalopts(False)
2157 addglobalopts(False)
2135
2158
2136 def helplist(header, select=None):
2159 def helplist(header, select=None):
2137 h = {}
2160 h = {}
2138 cmds = {}
2161 cmds = {}
2139 for c, e in table.iteritems():
2162 for c, e in table.iteritems():
2140 f = c.split("|", 1)[0]
2163 f = c.split("|", 1)[0]
2141 if select and not select(f):
2164 if select and not select(f):
2142 continue
2165 continue
2143 if (not select and name != 'shortlist' and
2166 if (not select and name != 'shortlist' and
2144 e[0].__module__ != __name__):
2167 e[0].__module__ != __name__):
2145 continue
2168 continue
2146 if name == "shortlist" and not f.startswith("^"):
2169 if name == "shortlist" and not f.startswith("^"):
2147 continue
2170 continue
2148 f = f.lstrip("^")
2171 f = f.lstrip("^")
2149 if not ui.debugflag and f.startswith("debug"):
2172 if not ui.debugflag and f.startswith("debug"):
2150 continue
2173 continue
2151 doc = e[0].__doc__
2174 doc = e[0].__doc__
2152 if doc and 'DEPRECATED' in doc and not ui.verbose:
2175 if doc and 'DEPRECATED' in doc and not ui.verbose:
2153 continue
2176 continue
2154 doc = gettext(doc)
2177 doc = gettext(doc)
2155 if not doc:
2178 if not doc:
2156 doc = _("(no help text available)")
2179 doc = _("(no help text available)")
2157 h[f] = doc.splitlines()[0].rstrip()
2180 h[f] = doc.splitlines()[0].rstrip()
2158 cmds[f] = c.lstrip("^")
2181 cmds[f] = c.lstrip("^")
2159
2182
2160 if not h:
2183 if not h:
2161 ui.status(_('no commands defined\n'))
2184 ui.status(_('no commands defined\n'))
2162 return
2185 return
2163
2186
2164 ui.status(header)
2187 ui.status(header)
2165 fns = sorted(h)
2188 fns = sorted(h)
2166 m = max(map(len, fns))
2189 m = max(map(len, fns))
2167 for f in fns:
2190 for f in fns:
2168 if ui.verbose:
2191 if ui.verbose:
2169 commands = cmds[f].replace("|",", ")
2192 commands = cmds[f].replace("|",", ")
2170 ui.write(" %s:\n %s\n"%(commands, h[f]))
2193 ui.write(" %s:\n %s\n"%(commands, h[f]))
2171 else:
2194 else:
2172 ui.write('%s\n' % (util.wrap(h[f], textwidth,
2195 ui.write('%s\n' % (util.wrap(h[f], textwidth,
2173 initindent=' %-*s ' % (m, f),
2196 initindent=' %-*s ' % (m, f),
2174 hangindent=' ' * (m + 4))))
2197 hangindent=' ' * (m + 4))))
2175
2198
2176 if not ui.quiet:
2199 if not ui.quiet:
2177 addglobalopts(True)
2200 addglobalopts(True)
2178
2201
2179 def helptopic(name):
2202 def helptopic(name):
2180 for names, header, doc in help.helptable:
2203 for names, header, doc in help.helptable:
2181 if name in names:
2204 if name in names:
2182 break
2205 break
2183 else:
2206 else:
2184 raise error.UnknownCommand(name)
2207 raise error.UnknownCommand(name)
2185
2208
2186 # description
2209 # description
2187 if not doc:
2210 if not doc:
2188 doc = _("(no help text available)")
2211 doc = _("(no help text available)")
2189 if hasattr(doc, '__call__'):
2212 if hasattr(doc, '__call__'):
2190 doc = doc()
2213 doc = doc()
2191
2214
2192 ui.write("%s\n\n" % header)
2215 ui.write("%s\n\n" % header)
2193 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
2216 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
2194
2217
2195 def helpext(name):
2218 def helpext(name):
2196 try:
2219 try:
2197 mod = extensions.find(name)
2220 mod = extensions.find(name)
2198 doc = gettext(mod.__doc__) or _('no help text available')
2221 doc = gettext(mod.__doc__) or _('no help text available')
2199 except KeyError:
2222 except KeyError:
2200 mod = None
2223 mod = None
2201 doc = extensions.disabledext(name)
2224 doc = extensions.disabledext(name)
2202 if not doc:
2225 if not doc:
2203 raise error.UnknownCommand(name)
2226 raise error.UnknownCommand(name)
2204
2227
2205 if '\n' not in doc:
2228 if '\n' not in doc:
2206 head, tail = doc, ""
2229 head, tail = doc, ""
2207 else:
2230 else:
2208 head, tail = doc.split('\n', 1)
2231 head, tail = doc.split('\n', 1)
2209 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
2232 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
2210 if tail:
2233 if tail:
2211 ui.write(minirst.format(tail, textwidth))
2234 ui.write(minirst.format(tail, textwidth))
2212 ui.status('\n\n')
2235 ui.status('\n\n')
2213
2236
2214 if mod:
2237 if mod:
2215 try:
2238 try:
2216 ct = mod.cmdtable
2239 ct = mod.cmdtable
2217 except AttributeError:
2240 except AttributeError:
2218 ct = {}
2241 ct = {}
2219 modcmds = set([c.split('|', 1)[0] for c in ct])
2242 modcmds = set([c.split('|', 1)[0] for c in ct])
2220 helplist(_('list of commands:\n\n'), modcmds.__contains__)
2243 helplist(_('list of commands:\n\n'), modcmds.__contains__)
2221 else:
2244 else:
2222 ui.write(_('use "hg help extensions" for information on enabling '
2245 ui.write(_('use "hg help extensions" for information on enabling '
2223 'extensions\n'))
2246 'extensions\n'))
2224
2247
2225 def helpextcmd(name):
2248 def helpextcmd(name):
2226 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
2249 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
2227 doc = gettext(mod.__doc__).splitlines()[0]
2250 doc = gettext(mod.__doc__).splitlines()[0]
2228
2251
2229 msg = help.listexts(_("'%s' is provided by the following "
2252 msg = help.listexts(_("'%s' is provided by the following "
2230 "extension:") % cmd, {ext: doc}, len(ext),
2253 "extension:") % cmd, {ext: doc}, len(ext),
2231 indent=4)
2254 indent=4)
2232 ui.write(minirst.format(msg, textwidth))
2255 ui.write(minirst.format(msg, textwidth))
2233 ui.write('\n\n')
2256 ui.write('\n\n')
2234 ui.write(_('use "hg help extensions" for information on enabling '
2257 ui.write(_('use "hg help extensions" for information on enabling '
2235 'extensions\n'))
2258 'extensions\n'))
2236
2259
2237 help.addtopichook('revsets', revset.makedoc)
2260 help.addtopichook('revsets', revset.makedoc)
2238 help.addtopichook('templates', templatekw.makedoc)
2261 help.addtopichook('templates', templatekw.makedoc)
2239 help.addtopichook('templates', templatefilters.makedoc)
2262 help.addtopichook('templates', templatefilters.makedoc)
2240
2263
2241 if name and name != 'shortlist':
2264 if name and name != 'shortlist':
2242 i = None
2265 i = None
2243 if unknowncmd:
2266 if unknowncmd:
2244 queries = (helpextcmd,)
2267 queries = (helpextcmd,)
2245 else:
2268 else:
2246 queries = (helptopic, helpcmd, helpext, helpextcmd)
2269 queries = (helptopic, helpcmd, helpext, helpextcmd)
2247 for f in queries:
2270 for f in queries:
2248 try:
2271 try:
2249 f(name)
2272 f(name)
2250 i = None
2273 i = None
2251 break
2274 break
2252 except error.UnknownCommand, inst:
2275 except error.UnknownCommand, inst:
2253 i = inst
2276 i = inst
2254 if i:
2277 if i:
2255 raise i
2278 raise i
2256
2279
2257 else:
2280 else:
2258 # program name
2281 # program name
2259 if ui.verbose or with_version:
2282 if ui.verbose or with_version:
2260 version_(ui)
2283 version_(ui)
2261 else:
2284 else:
2262 ui.status(_("Mercurial Distributed SCM\n"))
2285 ui.status(_("Mercurial Distributed SCM\n"))
2263 ui.status('\n')
2286 ui.status('\n')
2264
2287
2265 # list of commands
2288 # list of commands
2266 if name == "shortlist":
2289 if name == "shortlist":
2267 header = _('basic commands:\n\n')
2290 header = _('basic commands:\n\n')
2268 else:
2291 else:
2269 header = _('list of commands:\n\n')
2292 header = _('list of commands:\n\n')
2270
2293
2271 helplist(header)
2294 helplist(header)
2272 if name != 'shortlist':
2295 if name != 'shortlist':
2273 exts, maxlength = extensions.enabled()
2296 exts, maxlength = extensions.enabled()
2274 text = help.listexts(_('enabled extensions:'), exts, maxlength)
2297 text = help.listexts(_('enabled extensions:'), exts, maxlength)
2275 if text:
2298 if text:
2276 ui.write("\n%s\n" % minirst.format(text, textwidth))
2299 ui.write("\n%s\n" % minirst.format(text, textwidth))
2277
2300
2278 # list all option lists
2301 # list all option lists
2279 opt_output = []
2302 opt_output = []
2280 multioccur = False
2303 multioccur = False
2281 for title, options in option_lists:
2304 for title, options in option_lists:
2282 opt_output.append(("\n%s" % title, None))
2305 opt_output.append(("\n%s" % title, None))
2283 for option in options:
2306 for option in options:
2284 if len(option) == 5:
2307 if len(option) == 5:
2285 shortopt, longopt, default, desc, optlabel = option
2308 shortopt, longopt, default, desc, optlabel = option
2286 else:
2309 else:
2287 shortopt, longopt, default, desc = option
2310 shortopt, longopt, default, desc = option
2288 optlabel = _("VALUE") # default label
2311 optlabel = _("VALUE") # default label
2289
2312
2290 if _("DEPRECATED") in desc and not ui.verbose:
2313 if _("DEPRECATED") in desc and not ui.verbose:
2291 continue
2314 continue
2292 if isinstance(default, list):
2315 if isinstance(default, list):
2293 numqualifier = " %s [+]" % optlabel
2316 numqualifier = " %s [+]" % optlabel
2294 multioccur = True
2317 multioccur = True
2295 elif (default is not None) and not isinstance(default, bool):
2318 elif (default is not None) and not isinstance(default, bool):
2296 numqualifier = " %s" % optlabel
2319 numqualifier = " %s" % optlabel
2297 else:
2320 else:
2298 numqualifier = ""
2321 numqualifier = ""
2299 opt_output.append(("%2s%s" %
2322 opt_output.append(("%2s%s" %
2300 (shortopt and "-%s" % shortopt,
2323 (shortopt and "-%s" % shortopt,
2301 longopt and " --%s%s" %
2324 longopt and " --%s%s" %
2302 (longopt, numqualifier)),
2325 (longopt, numqualifier)),
2303 "%s%s" % (desc,
2326 "%s%s" % (desc,
2304 default
2327 default
2305 and _(" (default: %s)") % default
2328 and _(" (default: %s)") % default
2306 or "")))
2329 or "")))
2307 if multioccur:
2330 if multioccur:
2308 msg = _("\n[+] marked option can be specified multiple times")
2331 msg = _("\n[+] marked option can be specified multiple times")
2309 if ui.verbose and name != 'shortlist':
2332 if ui.verbose and name != 'shortlist':
2310 opt_output.append((msg, None))
2333 opt_output.append((msg, None))
2311 else:
2334 else:
2312 opt_output.insert(-1, (msg, None))
2335 opt_output.insert(-1, (msg, None))
2313
2336
2314 if not name:
2337 if not name:
2315 ui.write(_("\nadditional help topics:\n\n"))
2338 ui.write(_("\nadditional help topics:\n\n"))
2316 topics = []
2339 topics = []
2317 for names, header, doc in help.helptable:
2340 for names, header, doc in help.helptable:
2318 topics.append((sorted(names, key=len, reverse=True)[0], header))
2341 topics.append((sorted(names, key=len, reverse=True)[0], header))
2319 topics_len = max([len(s[0]) for s in topics])
2342 topics_len = max([len(s[0]) for s in topics])
2320 for t, desc in topics:
2343 for t, desc in topics:
2321 ui.write(" %-*s %s\n" % (topics_len, t, desc))
2344 ui.write(" %-*s %s\n" % (topics_len, t, desc))
2322
2345
2323 if opt_output:
2346 if opt_output:
2324 colwidth = encoding.colwidth
2347 colwidth = encoding.colwidth
2325 # normalize: (opt or message, desc or None, width of opt)
2348 # normalize: (opt or message, desc or None, width of opt)
2326 entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0)
2349 entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0)
2327 for opt, desc in opt_output]
2350 for opt, desc in opt_output]
2328 hanging = max([e[2] for e in entries])
2351 hanging = max([e[2] for e in entries])
2329 for opt, desc, width in entries:
2352 for opt, desc, width in entries:
2330 if desc:
2353 if desc:
2331 initindent = ' %s%s ' % (opt, ' ' * (hanging - width))
2354 initindent = ' %s%s ' % (opt, ' ' * (hanging - width))
2332 hangindent = ' ' * (hanging + 3)
2355 hangindent = ' ' * (hanging + 3)
2333 ui.write('%s\n' % (util.wrap(desc, textwidth,
2356 ui.write('%s\n' % (util.wrap(desc, textwidth,
2334 initindent=initindent,
2357 initindent=initindent,
2335 hangindent=hangindent)))
2358 hangindent=hangindent)))
2336 else:
2359 else:
2337 ui.write("%s\n" % opt)
2360 ui.write("%s\n" % opt)
2338
2361
2339 def identify(ui, repo, source=None, rev=None,
2362 def identify(ui, repo, source=None, rev=None,
2340 num=None, id=None, branch=None, tags=None, bookmarks=None):
2363 num=None, id=None, branch=None, tags=None, bookmarks=None):
2341 """identify the working copy or specified revision
2364 """identify the working copy or specified revision
2342
2365
2343 With no revision, print a summary of the current state of the
2366 With no revision, print a summary of the current state of the
2344 repository.
2367 repository.
2345
2368
2346 Specifying a path to a repository root or Mercurial bundle will
2369 Specifying a path to a repository root or Mercurial bundle will
2347 cause lookup to operate on that repository/bundle.
2370 cause lookup to operate on that repository/bundle.
2348
2371
2349 This summary identifies the repository state using one or two
2372 This summary identifies the repository state using one or two
2350 parent hash identifiers, followed by a "+" if there are
2373 parent hash identifiers, followed by a "+" if there are
2351 uncommitted changes in the working directory, a list of tags for
2374 uncommitted changes in the working directory, a list of tags for
2352 this revision and a branch name for non-default branches.
2375 this revision and a branch name for non-default branches.
2353
2376
2354 Returns 0 if successful.
2377 Returns 0 if successful.
2355 """
2378 """
2356
2379
2357 if not repo and not source:
2380 if not repo and not source:
2358 raise util.Abort(_("there is no Mercurial repository here "
2381 raise util.Abort(_("there is no Mercurial repository here "
2359 "(.hg not found)"))
2382 "(.hg not found)"))
2360
2383
2361 hexfunc = ui.debugflag and hex or short
2384 hexfunc = ui.debugflag and hex or short
2362 default = not (num or id or branch or tags or bookmarks)
2385 default = not (num or id or branch or tags or bookmarks)
2363 output = []
2386 output = []
2364
2387
2365 revs = []
2388 revs = []
2366 bms = []
2389 bms = []
2367 if source:
2390 if source:
2368 source, branches = hg.parseurl(ui.expandpath(source))
2391 source, branches = hg.parseurl(ui.expandpath(source))
2369 repo = hg.repository(ui, source)
2392 repo = hg.repository(ui, source)
2370 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
2393 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
2371
2394
2372 if not repo.local():
2395 if not repo.local():
2373 if not rev and revs:
2396 if not rev and revs:
2374 rev = revs[0]
2397 rev = revs[0]
2375 if not rev:
2398 if not rev:
2376 rev = "tip"
2399 rev = "tip"
2377 if num or branch or tags:
2400 if num or branch or tags:
2378 raise util.Abort(
2401 raise util.Abort(
2379 _("can't query remote revision number, branch, or tags"))
2402 _("can't query remote revision number, branch, or tags"))
2380
2403
2381 remoterev = repo.lookup(rev)
2404 remoterev = repo.lookup(rev)
2382 if default or id:
2405 if default or id:
2383 output = [hexfunc(remoterev)]
2406 output = [hexfunc(remoterev)]
2384
2407
2385 if 'bookmarks' in repo.listkeys('namespaces'):
2408 if 'bookmarks' in repo.listkeys('namespaces'):
2386 hexremoterev = hex(remoterev)
2409 hexremoterev = hex(remoterev)
2387 bms = [bm for bm, bmrev in repo.listkeys('bookmarks').iteritems()
2410 bms = [bm for bm, bmrev in repo.listkeys('bookmarks').iteritems()
2388 if bmrev == hexremoterev]
2411 if bmrev == hexremoterev]
2389
2412
2390 elif not rev:
2413 elif not rev:
2391 ctx = repo[None]
2414 ctx = repo[None]
2392 parents = ctx.parents()
2415 parents = ctx.parents()
2393 changed = False
2416 changed = False
2394 if default or id or num:
2417 if default or id or num:
2395 changed = util.any(repo.status())
2418 changed = util.any(repo.status())
2396 if default or id:
2419 if default or id:
2397 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
2420 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
2398 (changed) and "+" or "")]
2421 (changed) and "+" or "")]
2399 if num:
2422 if num:
2400 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
2423 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
2401 (changed) and "+" or ""))
2424 (changed) and "+" or ""))
2402 else:
2425 else:
2403 ctx = cmdutil.revsingle(repo, rev)
2426 ctx = cmdutil.revsingle(repo, rev)
2404 if default or id:
2427 if default or id:
2405 output = [hexfunc(ctx.node())]
2428 output = [hexfunc(ctx.node())]
2406 if num:
2429 if num:
2407 output.append(str(ctx.rev()))
2430 output.append(str(ctx.rev()))
2408
2431
2409 if repo.local():
2432 if repo.local():
2410 bms = ctx.bookmarks()
2433 bms = ctx.bookmarks()
2411
2434
2412 if repo.local() and default and not ui.quiet:
2435 if repo.local() and default and not ui.quiet:
2413 b = ctx.branch()
2436 b = ctx.branch()
2414 if b != 'default':
2437 if b != 'default':
2415 output.append("(%s)" % b)
2438 output.append("(%s)" % b)
2416
2439
2417 # multiple tags for a single parent separated by '/'
2440 # multiple tags for a single parent separated by '/'
2418 t = "/".join(ctx.tags())
2441 t = "/".join(ctx.tags())
2419 if t:
2442 if t:
2420 output.append(t)
2443 output.append(t)
2421
2444
2422 if default and not ui.quiet:
2445 if default and not ui.quiet:
2423 # multiple bookmarks for a single parent separated by '/'
2446 # multiple bookmarks for a single parent separated by '/'
2424 bm = '/'.join(bms)
2447 bm = '/'.join(bms)
2425 if bm:
2448 if bm:
2426 output.append(bm)
2449 output.append(bm)
2427
2450
2428 if branch:
2451 if branch:
2429 output.append(ctx.branch())
2452 output.append(ctx.branch())
2430
2453
2431 if tags:
2454 if tags:
2432 output.extend(ctx.tags())
2455 output.extend(ctx.tags())
2433
2456
2434 if bookmarks:
2457 if bookmarks:
2435 output.extend(bms)
2458 output.extend(bms)
2436
2459
2437 ui.write("%s\n" % ' '.join(output))
2460 ui.write("%s\n" % ' '.join(output))
2438
2461
2439 def import_(ui, repo, patch1, *patches, **opts):
2462 def import_(ui, repo, patch1, *patches, **opts):
2440 """import an ordered set of patches
2463 """import an ordered set of patches
2441
2464
2442 Import a list of patches and commit them individually (unless
2465 Import a list of patches and commit them individually (unless
2443 --no-commit is specified).
2466 --no-commit is specified).
2444
2467
2445 If there are outstanding changes in the working directory, import
2468 If there are outstanding changes in the working directory, import
2446 will abort unless given the -f/--force flag.
2469 will abort unless given the -f/--force flag.
2447
2470
2448 You can import a patch straight from a mail message. Even patches
2471 You can import a patch straight from a mail message. Even patches
2449 as attachments work (to use the body part, it must have type
2472 as attachments work (to use the body part, it must have type
2450 text/plain or text/x-patch). From and Subject headers of email
2473 text/plain or text/x-patch). From and Subject headers of email
2451 message are used as default committer and commit message. All
2474 message are used as default committer and commit message. All
2452 text/plain body parts before first diff are added to commit
2475 text/plain body parts before first diff are added to commit
2453 message.
2476 message.
2454
2477
2455 If the imported patch was generated by :hg:`export`, user and
2478 If the imported patch was generated by :hg:`export`, user and
2456 description from patch override values from message headers and
2479 description from patch override values from message headers and
2457 body. Values given on command line with -m/--message and -u/--user
2480 body. Values given on command line with -m/--message and -u/--user
2458 override these.
2481 override these.
2459
2482
2460 If --exact is specified, import will set the working directory to
2483 If --exact is specified, import will set the working directory to
2461 the parent of each patch before applying it, and will abort if the
2484 the parent of each patch before applying it, and will abort if the
2462 resulting changeset has a different ID than the one recorded in
2485 resulting changeset has a different ID than the one recorded in
2463 the patch. This may happen due to character set problems or other
2486 the patch. This may happen due to character set problems or other
2464 deficiencies in the text patch format.
2487 deficiencies in the text patch format.
2465
2488
2466 With -s/--similarity, hg will attempt to discover renames and
2489 With -s/--similarity, hg will attempt to discover renames and
2467 copies in the patch in the same way as 'addremove'.
2490 copies in the patch in the same way as 'addremove'.
2468
2491
2469 To read a patch from standard input, use "-" as the patch name. If
2492 To read a patch from standard input, use "-" as the patch name. If
2470 a URL is specified, the patch will be downloaded from it.
2493 a URL is specified, the patch will be downloaded from it.
2471 See :hg:`help dates` for a list of formats valid for -d/--date.
2494 See :hg:`help dates` for a list of formats valid for -d/--date.
2472
2495
2473 Returns 0 on success.
2496 Returns 0 on success.
2474 """
2497 """
2475 patches = (patch1,) + patches
2498 patches = (patch1,) + patches
2476
2499
2477 date = opts.get('date')
2500 date = opts.get('date')
2478 if date:
2501 if date:
2479 opts['date'] = util.parsedate(date)
2502 opts['date'] = util.parsedate(date)
2480
2503
2481 try:
2504 try:
2482 sim = float(opts.get('similarity') or 0)
2505 sim = float(opts.get('similarity') or 0)
2483 except ValueError:
2506 except ValueError:
2484 raise util.Abort(_('similarity must be a number'))
2507 raise util.Abort(_('similarity must be a number'))
2485 if sim < 0 or sim > 100:
2508 if sim < 0 or sim > 100:
2486 raise util.Abort(_('similarity must be between 0 and 100'))
2509 raise util.Abort(_('similarity must be between 0 and 100'))
2487
2510
2488 if opts.get('exact') or not opts.get('force'):
2511 if opts.get('exact') or not opts.get('force'):
2489 cmdutil.bail_if_changed(repo)
2512 cmdutil.bail_if_changed(repo)
2490
2513
2491 d = opts["base"]
2514 d = opts["base"]
2492 strip = opts["strip"]
2515 strip = opts["strip"]
2493 wlock = lock = None
2516 wlock = lock = None
2494 msgs = []
2517 msgs = []
2495
2518
2496 def tryone(ui, hunk):
2519 def tryone(ui, hunk):
2497 tmpname, message, user, date, branch, nodeid, p1, p2 = \
2520 tmpname, message, user, date, branch, nodeid, p1, p2 = \
2498 patch.extract(ui, hunk)
2521 patch.extract(ui, hunk)
2499
2522
2500 if not tmpname:
2523 if not tmpname:
2501 return None
2524 return None
2502 commitid = _('to working directory')
2525 commitid = _('to working directory')
2503
2526
2504 try:
2527 try:
2505 cmdline_message = cmdutil.logmessage(opts)
2528 cmdline_message = cmdutil.logmessage(opts)
2506 if cmdline_message:
2529 if cmdline_message:
2507 # pickup the cmdline msg
2530 # pickup the cmdline msg
2508 message = cmdline_message
2531 message = cmdline_message
2509 elif message:
2532 elif message:
2510 # pickup the patch msg
2533 # pickup the patch msg
2511 message = message.strip()
2534 message = message.strip()
2512 else:
2535 else:
2513 # launch the editor
2536 # launch the editor
2514 message = None
2537 message = None
2515 ui.debug('message:\n%s\n' % message)
2538 ui.debug('message:\n%s\n' % message)
2516
2539
2517 wp = repo.parents()
2540 wp = repo.parents()
2518 if opts.get('exact'):
2541 if opts.get('exact'):
2519 if not nodeid or not p1:
2542 if not nodeid or not p1:
2520 raise util.Abort(_('not a Mercurial patch'))
2543 raise util.Abort(_('not a Mercurial patch'))
2521 p1 = repo.lookup(p1)
2544 p1 = repo.lookup(p1)
2522 p2 = repo.lookup(p2 or hex(nullid))
2545 p2 = repo.lookup(p2 or hex(nullid))
2523
2546
2524 if p1 != wp[0].node():
2547 if p1 != wp[0].node():
2525 hg.clean(repo, p1)
2548 hg.clean(repo, p1)
2526 repo.dirstate.setparents(p1, p2)
2549 repo.dirstate.setparents(p1, p2)
2527 elif p2:
2550 elif p2:
2528 try:
2551 try:
2529 p1 = repo.lookup(p1)
2552 p1 = repo.lookup(p1)
2530 p2 = repo.lookup(p2)
2553 p2 = repo.lookup(p2)
2531 if p1 == wp[0].node():
2554 if p1 == wp[0].node():
2532 repo.dirstate.setparents(p1, p2)
2555 repo.dirstate.setparents(p1, p2)
2533 except error.RepoError:
2556 except error.RepoError:
2534 pass
2557 pass
2535 if opts.get('exact') or opts.get('import_branch'):
2558 if opts.get('exact') or opts.get('import_branch'):
2536 repo.dirstate.setbranch(branch or 'default')
2559 repo.dirstate.setbranch(branch or 'default')
2537
2560
2538 files = {}
2561 files = {}
2539 try:
2562 try:
2540 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
2563 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
2541 files=files, eolmode=None)
2564 files=files, eolmode=None)
2542 finally:
2565 finally:
2543 files = cmdutil.updatedir(ui, repo, files,
2566 files = cmdutil.updatedir(ui, repo, files,
2544 similarity=sim / 100.0)
2567 similarity=sim / 100.0)
2545 if opts.get('no_commit'):
2568 if opts.get('no_commit'):
2546 if message:
2569 if message:
2547 msgs.append(message)
2570 msgs.append(message)
2548 else:
2571 else:
2549 if opts.get('exact'):
2572 if opts.get('exact'):
2550 m = None
2573 m = None
2551 else:
2574 else:
2552 m = cmdutil.matchfiles(repo, files or [])
2575 m = cmdutil.matchfiles(repo, files or [])
2553 n = repo.commit(message, opts.get('user') or user,
2576 n = repo.commit(message, opts.get('user') or user,
2554 opts.get('date') or date, match=m,
2577 opts.get('date') or date, match=m,
2555 editor=cmdutil.commiteditor)
2578 editor=cmdutil.commiteditor)
2556 if opts.get('exact'):
2579 if opts.get('exact'):
2557 if hex(n) != nodeid:
2580 if hex(n) != nodeid:
2558 repo.rollback()
2581 repo.rollback()
2559 raise util.Abort(_('patch is damaged'
2582 raise util.Abort(_('patch is damaged'
2560 ' or loses information'))
2583 ' or loses information'))
2561 # Force a dirstate write so that the next transaction
2584 # Force a dirstate write so that the next transaction
2562 # backups an up-do-date file.
2585 # backups an up-do-date file.
2563 repo.dirstate.write()
2586 repo.dirstate.write()
2564 if n:
2587 if n:
2565 commitid = short(n)
2588 commitid = short(n)
2566
2589
2567 return commitid
2590 return commitid
2568 finally:
2591 finally:
2569 os.unlink(tmpname)
2592 os.unlink(tmpname)
2570
2593
2571 try:
2594 try:
2572 wlock = repo.wlock()
2595 wlock = repo.wlock()
2573 lock = repo.lock()
2596 lock = repo.lock()
2574 lastcommit = None
2597 lastcommit = None
2575 for p in patches:
2598 for p in patches:
2576 pf = os.path.join(d, p)
2599 pf = os.path.join(d, p)
2577
2600
2578 if pf == '-':
2601 if pf == '-':
2579 ui.status(_("applying patch from stdin\n"))
2602 ui.status(_("applying patch from stdin\n"))
2580 pf = sys.stdin
2603 pf = sys.stdin
2581 else:
2604 else:
2582 ui.status(_("applying %s\n") % p)
2605 ui.status(_("applying %s\n") % p)
2583 pf = url.open(ui, pf)
2606 pf = url.open(ui, pf)
2584
2607
2585 haspatch = False
2608 haspatch = False
2586 for hunk in patch.split(pf):
2609 for hunk in patch.split(pf):
2587 commitid = tryone(ui, hunk)
2610 commitid = tryone(ui, hunk)
2588 if commitid:
2611 if commitid:
2589 haspatch = True
2612 haspatch = True
2590 if lastcommit:
2613 if lastcommit:
2591 ui.status(_('applied %s\n') % lastcommit)
2614 ui.status(_('applied %s\n') % lastcommit)
2592 lastcommit = commitid
2615 lastcommit = commitid
2593
2616
2594 if not haspatch:
2617 if not haspatch:
2595 raise util.Abort(_('no diffs found'))
2618 raise util.Abort(_('no diffs found'))
2596
2619
2597 if msgs:
2620 if msgs:
2598 repo.opener('last-message.txt', 'wb').write('\n* * *\n'.join(msgs))
2621 repo.opener('last-message.txt', 'wb').write('\n* * *\n'.join(msgs))
2599 finally:
2622 finally:
2600 release(lock, wlock)
2623 release(lock, wlock)
2601
2624
2602 def incoming(ui, repo, source="default", **opts):
2625 def incoming(ui, repo, source="default", **opts):
2603 """show new changesets found in source
2626 """show new changesets found in source
2604
2627
2605 Show new changesets found in the specified path/URL or the default
2628 Show new changesets found in the specified path/URL or the default
2606 pull location. These are the changesets that would have been pulled
2629 pull location. These are the changesets that would have been pulled
2607 if a pull at the time you issued this command.
2630 if a pull at the time you issued this command.
2608
2631
2609 For remote repository, using --bundle avoids downloading the
2632 For remote repository, using --bundle avoids downloading the
2610 changesets twice if the incoming is followed by a pull.
2633 changesets twice if the incoming is followed by a pull.
2611
2634
2612 See pull for valid source format details.
2635 See pull for valid source format details.
2613
2636
2614 Returns 0 if there are incoming changes, 1 otherwise.
2637 Returns 0 if there are incoming changes, 1 otherwise.
2615 """
2638 """
2616 if opts.get('bundle') and opts.get('subrepos'):
2639 if opts.get('bundle') and opts.get('subrepos'):
2617 raise util.Abort(_('cannot combine --bundle and --subrepos'))
2640 raise util.Abort(_('cannot combine --bundle and --subrepos'))
2618
2641
2619 if opts.get('bookmarks'):
2642 if opts.get('bookmarks'):
2620 source, branches = hg.parseurl(ui.expandpath(source),
2643 source, branches = hg.parseurl(ui.expandpath(source),
2621 opts.get('branch'))
2644 opts.get('branch'))
2622 other = hg.repository(hg.remoteui(repo, opts), source)
2645 other = hg.repository(hg.remoteui(repo, opts), source)
2623 if 'bookmarks' not in other.listkeys('namespaces'):
2646 if 'bookmarks' not in other.listkeys('namespaces'):
2624 ui.warn(_("remote doesn't support bookmarks\n"))
2647 ui.warn(_("remote doesn't support bookmarks\n"))
2625 return 0
2648 return 0
2626 ui.status(_('comparing with %s\n') % url.hidepassword(source))
2649 ui.status(_('comparing with %s\n') % url.hidepassword(source))
2627 return bookmarks.diff(ui, repo, other)
2650 return bookmarks.diff(ui, repo, other)
2628
2651
2629 ret = hg.incoming(ui, repo, source, opts)
2652 ret = hg.incoming(ui, repo, source, opts)
2630 return ret
2653 return ret
2631
2654
2632 def init(ui, dest=".", **opts):
2655 def init(ui, dest=".", **opts):
2633 """create a new repository in the given directory
2656 """create a new repository in the given directory
2634
2657
2635 Initialize a new repository in the given directory. If the given
2658 Initialize a new repository in the given directory. If the given
2636 directory does not exist, it will be created.
2659 directory does not exist, it will be created.
2637
2660
2638 If no directory is given, the current directory is used.
2661 If no directory is given, the current directory is used.
2639
2662
2640 It is possible to specify an ``ssh://`` URL as the destination.
2663 It is possible to specify an ``ssh://`` URL as the destination.
2641 See :hg:`help urls` for more information.
2664 See :hg:`help urls` for more information.
2642
2665
2643 Returns 0 on success.
2666 Returns 0 on success.
2644 """
2667 """
2645 hg.repository(hg.remoteui(ui, opts), ui.expandpath(dest), create=1)
2668 hg.repository(hg.remoteui(ui, opts), ui.expandpath(dest), create=1)
2646
2669
2647 def locate(ui, repo, *pats, **opts):
2670 def locate(ui, repo, *pats, **opts):
2648 """locate files matching specific patterns
2671 """locate files matching specific patterns
2649
2672
2650 Print files under Mercurial control in the working directory whose
2673 Print files under Mercurial control in the working directory whose
2651 names match the given patterns.
2674 names match the given patterns.
2652
2675
2653 By default, this command searches all directories in the working
2676 By default, this command searches all directories in the working
2654 directory. To search just the current directory and its
2677 directory. To search just the current directory and its
2655 subdirectories, use "--include .".
2678 subdirectories, use "--include .".
2656
2679
2657 If no patterns are given to match, this command prints the names
2680 If no patterns are given to match, this command prints the names
2658 of all files under Mercurial control in the working directory.
2681 of all files under Mercurial control in the working directory.
2659
2682
2660 If you want to feed the output of this command into the "xargs"
2683 If you want to feed the output of this command into the "xargs"
2661 command, use the -0 option to both this command and "xargs". This
2684 command, use the -0 option to both this command and "xargs". This
2662 will avoid the problem of "xargs" treating single filenames that
2685 will avoid the problem of "xargs" treating single filenames that
2663 contain whitespace as multiple filenames.
2686 contain whitespace as multiple filenames.
2664
2687
2665 Returns 0 if a match is found, 1 otherwise.
2688 Returns 0 if a match is found, 1 otherwise.
2666 """
2689 """
2667 end = opts.get('print0') and '\0' or '\n'
2690 end = opts.get('print0') and '\0' or '\n'
2668 rev = cmdutil.revsingle(repo, opts.get('rev'), None).node()
2691 rev = cmdutil.revsingle(repo, opts.get('rev'), None).node()
2669
2692
2670 ret = 1
2693 ret = 1
2671 m = cmdutil.match(repo, pats, opts, default='relglob')
2694 m = cmdutil.match(repo, pats, opts, default='relglob')
2672 m.bad = lambda x, y: False
2695 m.bad = lambda x, y: False
2673 for abs in repo[rev].walk(m):
2696 for abs in repo[rev].walk(m):
2674 if not rev and abs not in repo.dirstate:
2697 if not rev and abs not in repo.dirstate:
2675 continue
2698 continue
2676 if opts.get('fullpath'):
2699 if opts.get('fullpath'):
2677 ui.write(repo.wjoin(abs), end)
2700 ui.write(repo.wjoin(abs), end)
2678 else:
2701 else:
2679 ui.write(((pats and m.rel(abs)) or abs), end)
2702 ui.write(((pats and m.rel(abs)) or abs), end)
2680 ret = 0
2703 ret = 0
2681
2704
2682 return ret
2705 return ret
2683
2706
2684 def log(ui, repo, *pats, **opts):
2707 def log(ui, repo, *pats, **opts):
2685 """show revision history of entire repository or files
2708 """show revision history of entire repository or files
2686
2709
2687 Print the revision history of the specified files or the entire
2710 Print the revision history of the specified files or the entire
2688 project.
2711 project.
2689
2712
2690 File history is shown without following rename or copy history of
2713 File history is shown without following rename or copy history of
2691 files. Use -f/--follow with a filename to follow history across
2714 files. Use -f/--follow with a filename to follow history across
2692 renames and copies. --follow without a filename will only show
2715 renames and copies. --follow without a filename will only show
2693 ancestors or descendants of the starting revision. --follow-first
2716 ancestors or descendants of the starting revision. --follow-first
2694 only follows the first parent of merge revisions.
2717 only follows the first parent of merge revisions.
2695
2718
2696 If no revision range is specified, the default is ``tip:0`` unless
2719 If no revision range is specified, the default is ``tip:0`` unless
2697 --follow is set, in which case the working directory parent is
2720 --follow is set, in which case the working directory parent is
2698 used as the starting revision. You can specify a revision set for
2721 used as the starting revision. You can specify a revision set for
2699 log, see :hg:`help revsets` for more information.
2722 log, see :hg:`help revsets` for more information.
2700
2723
2701 See :hg:`help dates` for a list of formats valid for -d/--date.
2724 See :hg:`help dates` for a list of formats valid for -d/--date.
2702
2725
2703 By default this command prints revision number and changeset id,
2726 By default this command prints revision number and changeset id,
2704 tags, non-trivial parents, user, date and time, and a summary for
2727 tags, non-trivial parents, user, date and time, and a summary for
2705 each commit. When the -v/--verbose switch is used, the list of
2728 each commit. When the -v/--verbose switch is used, the list of
2706 changed files and full commit message are shown.
2729 changed files and full commit message are shown.
2707
2730
2708 .. note::
2731 .. note::
2709 log -p/--patch may generate unexpected diff output for merge
2732 log -p/--patch may generate unexpected diff output for merge
2710 changesets, as it will only compare the merge changeset against
2733 changesets, as it will only compare the merge changeset against
2711 its first parent. Also, only files different from BOTH parents
2734 its first parent. Also, only files different from BOTH parents
2712 will appear in files:.
2735 will appear in files:.
2713
2736
2714 Returns 0 on success.
2737 Returns 0 on success.
2715 """
2738 """
2716
2739
2717 matchfn = cmdutil.match(repo, pats, opts)
2740 matchfn = cmdutil.match(repo, pats, opts)
2718 limit = cmdutil.loglimit(opts)
2741 limit = cmdutil.loglimit(opts)
2719 count = 0
2742 count = 0
2720
2743
2721 endrev = None
2744 endrev = None
2722 if opts.get('copies') and opts.get('rev'):
2745 if opts.get('copies') and opts.get('rev'):
2723 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2746 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2724
2747
2725 df = False
2748 df = False
2726 if opts["date"]:
2749 if opts["date"]:
2727 df = util.matchdate(opts["date"])
2750 df = util.matchdate(opts["date"])
2728
2751
2729 branches = opts.get('branch', []) + opts.get('only_branch', [])
2752 branches = opts.get('branch', []) + opts.get('only_branch', [])
2730 opts['branch'] = [repo.lookupbranch(b) for b in branches]
2753 opts['branch'] = [repo.lookupbranch(b) for b in branches]
2731
2754
2732 displayer = cmdutil.show_changeset(ui, repo, opts, True)
2755 displayer = cmdutil.show_changeset(ui, repo, opts, True)
2733 def prep(ctx, fns):
2756 def prep(ctx, fns):
2734 rev = ctx.rev()
2757 rev = ctx.rev()
2735 parents = [p for p in repo.changelog.parentrevs(rev)
2758 parents = [p for p in repo.changelog.parentrevs(rev)
2736 if p != nullrev]
2759 if p != nullrev]
2737 if opts.get('no_merges') and len(parents) == 2:
2760 if opts.get('no_merges') and len(parents) == 2:
2738 return
2761 return
2739 if opts.get('only_merges') and len(parents) != 2:
2762 if opts.get('only_merges') and len(parents) != 2:
2740 return
2763 return
2741 if opts.get('branch') and ctx.branch() not in opts['branch']:
2764 if opts.get('branch') and ctx.branch() not in opts['branch']:
2742 return
2765 return
2743 if df and not df(ctx.date()[0]):
2766 if df and not df(ctx.date()[0]):
2744 return
2767 return
2745 if opts['user'] and not [k for k in opts['user']
2768 if opts['user'] and not [k for k in opts['user']
2746 if k.lower() in ctx.user().lower()]:
2769 if k.lower() in ctx.user().lower()]:
2747 return
2770 return
2748 if opts.get('keyword'):
2771 if opts.get('keyword'):
2749 for k in [kw.lower() for kw in opts['keyword']]:
2772 for k in [kw.lower() for kw in opts['keyword']]:
2750 if (k in ctx.user().lower() or
2773 if (k in ctx.user().lower() or
2751 k in ctx.description().lower() or
2774 k in ctx.description().lower() or
2752 k in " ".join(ctx.files()).lower()):
2775 k in " ".join(ctx.files()).lower()):
2753 break
2776 break
2754 else:
2777 else:
2755 return
2778 return
2756
2779
2757 copies = None
2780 copies = None
2758 if opts.get('copies') and rev:
2781 if opts.get('copies') and rev:
2759 copies = []
2782 copies = []
2760 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2783 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2761 for fn in ctx.files():
2784 for fn in ctx.files():
2762 rename = getrenamed(fn, rev)
2785 rename = getrenamed(fn, rev)
2763 if rename:
2786 if rename:
2764 copies.append((fn, rename[0]))
2787 copies.append((fn, rename[0]))
2765
2788
2766 revmatchfn = None
2789 revmatchfn = None
2767 if opts.get('patch') or opts.get('stat'):
2790 if opts.get('patch') or opts.get('stat'):
2768 if opts.get('follow') or opts.get('follow_first'):
2791 if opts.get('follow') or opts.get('follow_first'):
2769 # note: this might be wrong when following through merges
2792 # note: this might be wrong when following through merges
2770 revmatchfn = cmdutil.match(repo, fns, default='path')
2793 revmatchfn = cmdutil.match(repo, fns, default='path')
2771 else:
2794 else:
2772 revmatchfn = matchfn
2795 revmatchfn = matchfn
2773
2796
2774 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2797 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2775
2798
2776 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2799 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2777 if count == limit:
2800 if count == limit:
2778 break
2801 break
2779 if displayer.flush(ctx.rev()):
2802 if displayer.flush(ctx.rev()):
2780 count += 1
2803 count += 1
2781 displayer.close()
2804 displayer.close()
2782
2805
2783 def manifest(ui, repo, node=None, rev=None):
2806 def manifest(ui, repo, node=None, rev=None):
2784 """output the current or given revision of the project manifest
2807 """output the current or given revision of the project manifest
2785
2808
2786 Print a list of version controlled files for the given revision.
2809 Print a list of version controlled files for the given revision.
2787 If no revision is given, the first parent of the working directory
2810 If no revision is given, the first parent of the working directory
2788 is used, or the null revision if no revision is checked out.
2811 is used, or the null revision if no revision is checked out.
2789
2812
2790 With -v, print file permissions, symlink and executable bits.
2813 With -v, print file permissions, symlink and executable bits.
2791 With --debug, print file revision hashes.
2814 With --debug, print file revision hashes.
2792
2815
2793 Returns 0 on success.
2816 Returns 0 on success.
2794 """
2817 """
2795
2818
2796 if rev and node:
2819 if rev and node:
2797 raise util.Abort(_("please specify just one revision"))
2820 raise util.Abort(_("please specify just one revision"))
2798
2821
2799 if not node:
2822 if not node:
2800 node = rev
2823 node = rev
2801
2824
2802 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2825 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2803 ctx = cmdutil.revsingle(repo, node)
2826 ctx = cmdutil.revsingle(repo, node)
2804 for f in ctx:
2827 for f in ctx:
2805 if ui.debugflag:
2828 if ui.debugflag:
2806 ui.write("%40s " % hex(ctx.manifest()[f]))
2829 ui.write("%40s " % hex(ctx.manifest()[f]))
2807 if ui.verbose:
2830 if ui.verbose:
2808 ui.write(decor[ctx.flags(f)])
2831 ui.write(decor[ctx.flags(f)])
2809 ui.write("%s\n" % f)
2832 ui.write("%s\n" % f)
2810
2833
2811 def merge(ui, repo, node=None, **opts):
2834 def merge(ui, repo, node=None, **opts):
2812 """merge working directory with another revision
2835 """merge working directory with another revision
2813
2836
2814 The current working directory is updated with all changes made in
2837 The current working directory is updated with all changes made in
2815 the requested revision since the last common predecessor revision.
2838 the requested revision since the last common predecessor revision.
2816
2839
2817 Files that changed between either parent are marked as changed for
2840 Files that changed between either parent are marked as changed for
2818 the next commit and a commit must be performed before any further
2841 the next commit and a commit must be performed before any further
2819 updates to the repository are allowed. The next commit will have
2842 updates to the repository are allowed. The next commit will have
2820 two parents.
2843 two parents.
2821
2844
2822 ``--tool`` can be used to specify the merge tool used for file
2845 ``--tool`` can be used to specify the merge tool used for file
2823 merges. It overrides the HGMERGE environment variable and your
2846 merges. It overrides the HGMERGE environment variable and your
2824 configuration files.
2847 configuration files.
2825
2848
2826 If no revision is specified, the working directory's parent is a
2849 If no revision is specified, the working directory's parent is a
2827 head revision, and the current branch contains exactly one other
2850 head revision, and the current branch contains exactly one other
2828 head, the other head is merged with by default. Otherwise, an
2851 head, the other head is merged with by default. Otherwise, an
2829 explicit revision with which to merge with must be provided.
2852 explicit revision with which to merge with must be provided.
2830
2853
2831 :hg:`resolve` must be used to resolve unresolved files.
2854 :hg:`resolve` must be used to resolve unresolved files.
2832
2855
2833 To undo an uncommitted merge, use :hg:`update --clean .` which
2856 To undo an uncommitted merge, use :hg:`update --clean .` which
2834 will check out a clean copy of the original merge parent, losing
2857 will check out a clean copy of the original merge parent, losing
2835 all changes.
2858 all changes.
2836
2859
2837 Returns 0 on success, 1 if there are unresolved files.
2860 Returns 0 on success, 1 if there are unresolved files.
2838 """
2861 """
2839
2862
2840 if opts.get('rev') and node:
2863 if opts.get('rev') and node:
2841 raise util.Abort(_("please specify just one revision"))
2864 raise util.Abort(_("please specify just one revision"))
2842 if not node:
2865 if not node:
2843 node = opts.get('rev')
2866 node = opts.get('rev')
2844
2867
2845 if not node:
2868 if not node:
2846 branch = repo[None].branch()
2869 branch = repo[None].branch()
2847 bheads = repo.branchheads(branch)
2870 bheads = repo.branchheads(branch)
2848 if len(bheads) > 2:
2871 if len(bheads) > 2:
2849 raise util.Abort(_(
2872 raise util.Abort(_(
2850 'branch \'%s\' has %d heads - '
2873 'branch \'%s\' has %d heads - '
2851 'please merge with an explicit rev\n'
2874 'please merge with an explicit rev\n'
2852 '(run \'hg heads .\' to see heads)')
2875 '(run \'hg heads .\' to see heads)')
2853 % (branch, len(bheads)))
2876 % (branch, len(bheads)))
2854
2877
2855 parent = repo.dirstate.parents()[0]
2878 parent = repo.dirstate.parents()[0]
2856 if len(bheads) == 1:
2879 if len(bheads) == 1:
2857 if len(repo.heads()) > 1:
2880 if len(repo.heads()) > 1:
2858 raise util.Abort(_(
2881 raise util.Abort(_(
2859 'branch \'%s\' has one head - '
2882 'branch \'%s\' has one head - '
2860 'please merge with an explicit rev\n'
2883 'please merge with an explicit rev\n'
2861 '(run \'hg heads\' to see all heads)')
2884 '(run \'hg heads\' to see all heads)')
2862 % branch)
2885 % branch)
2863 msg = _('there is nothing to merge')
2886 msg = _('there is nothing to merge')
2864 if parent != repo.lookup(repo[None].branch()):
2887 if parent != repo.lookup(repo[None].branch()):
2865 msg = _('%s - use "hg update" instead') % msg
2888 msg = _('%s - use "hg update" instead') % msg
2866 raise util.Abort(msg)
2889 raise util.Abort(msg)
2867
2890
2868 if parent not in bheads:
2891 if parent not in bheads:
2869 raise util.Abort(_('working dir not at a head rev - '
2892 raise util.Abort(_('working dir not at a head rev - '
2870 'use "hg update" or merge with an explicit rev'))
2893 'use "hg update" or merge with an explicit rev'))
2871 node = parent == bheads[0] and bheads[-1] or bheads[0]
2894 node = parent == bheads[0] and bheads[-1] or bheads[0]
2872 else:
2895 else:
2873 node = cmdutil.revsingle(repo, node).node()
2896 node = cmdutil.revsingle(repo, node).node()
2874
2897
2875 if opts.get('preview'):
2898 if opts.get('preview'):
2876 # find nodes that are ancestors of p2 but not of p1
2899 # find nodes that are ancestors of p2 but not of p1
2877 p1 = repo.lookup('.')
2900 p1 = repo.lookup('.')
2878 p2 = repo.lookup(node)
2901 p2 = repo.lookup(node)
2879 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2902 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2880
2903
2881 displayer = cmdutil.show_changeset(ui, repo, opts)
2904 displayer = cmdutil.show_changeset(ui, repo, opts)
2882 for node in nodes:
2905 for node in nodes:
2883 displayer.show(repo[node])
2906 displayer.show(repo[node])
2884 displayer.close()
2907 displayer.close()
2885 return 0
2908 return 0
2886
2909
2887 try:
2910 try:
2888 # ui.forcemerge is an internal variable, do not document
2911 # ui.forcemerge is an internal variable, do not document
2889 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2912 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2890 return hg.merge(repo, node, force=opts.get('force'))
2913 return hg.merge(repo, node, force=opts.get('force'))
2891 finally:
2914 finally:
2892 ui.setconfig('ui', 'forcemerge', '')
2915 ui.setconfig('ui', 'forcemerge', '')
2893
2916
2894 def outgoing(ui, repo, dest=None, **opts):
2917 def outgoing(ui, repo, dest=None, **opts):
2895 """show changesets not found in the destination
2918 """show changesets not found in the destination
2896
2919
2897 Show changesets not found in the specified destination repository
2920 Show changesets not found in the specified destination repository
2898 or the default push location. These are the changesets that would
2921 or the default push location. These are the changesets that would
2899 be pushed if a push was requested.
2922 be pushed if a push was requested.
2900
2923
2901 See pull for details of valid destination formats.
2924 See pull for details of valid destination formats.
2902
2925
2903 Returns 0 if there are outgoing changes, 1 otherwise.
2926 Returns 0 if there are outgoing changes, 1 otherwise.
2904 """
2927 """
2905
2928
2906 if opts.get('bookmarks'):
2929 if opts.get('bookmarks'):
2907 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2930 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2908 dest, branches = hg.parseurl(dest, opts.get('branch'))
2931 dest, branches = hg.parseurl(dest, opts.get('branch'))
2909 other = hg.repository(hg.remoteui(repo, opts), dest)
2932 other = hg.repository(hg.remoteui(repo, opts), dest)
2910 if 'bookmarks' not in other.listkeys('namespaces'):
2933 if 'bookmarks' not in other.listkeys('namespaces'):
2911 ui.warn(_("remote doesn't support bookmarks\n"))
2934 ui.warn(_("remote doesn't support bookmarks\n"))
2912 return 0
2935 return 0
2913 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2936 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2914 return bookmarks.diff(ui, other, repo)
2937 return bookmarks.diff(ui, other, repo)
2915
2938
2916 ret = hg.outgoing(ui, repo, dest, opts)
2939 ret = hg.outgoing(ui, repo, dest, opts)
2917 return ret
2940 return ret
2918
2941
2919 def parents(ui, repo, file_=None, **opts):
2942 def parents(ui, repo, file_=None, **opts):
2920 """show the parents of the working directory or revision
2943 """show the parents of the working directory or revision
2921
2944
2922 Print the working directory's parent revisions. If a revision is
2945 Print the working directory's parent revisions. If a revision is
2923 given via -r/--rev, the parent of that revision will be printed.
2946 given via -r/--rev, the parent of that revision will be printed.
2924 If a file argument is given, the revision in which the file was
2947 If a file argument is given, the revision in which the file was
2925 last changed (before the working directory revision or the
2948 last changed (before the working directory revision or the
2926 argument to --rev if given) is printed.
2949 argument to --rev if given) is printed.
2927
2950
2928 Returns 0 on success.
2951 Returns 0 on success.
2929 """
2952 """
2930
2953
2931 ctx = cmdutil.revsingle(repo, opts.get('rev'), None)
2954 ctx = cmdutil.revsingle(repo, opts.get('rev'), None)
2932
2955
2933 if file_:
2956 if file_:
2934 m = cmdutil.match(repo, (file_,), opts)
2957 m = cmdutil.match(repo, (file_,), opts)
2935 if m.anypats() or len(m.files()) != 1:
2958 if m.anypats() or len(m.files()) != 1:
2936 raise util.Abort(_('can only specify an explicit filename'))
2959 raise util.Abort(_('can only specify an explicit filename'))
2937 file_ = m.files()[0]
2960 file_ = m.files()[0]
2938 filenodes = []
2961 filenodes = []
2939 for cp in ctx.parents():
2962 for cp in ctx.parents():
2940 if not cp:
2963 if not cp:
2941 continue
2964 continue
2942 try:
2965 try:
2943 filenodes.append(cp.filenode(file_))
2966 filenodes.append(cp.filenode(file_))
2944 except error.LookupError:
2967 except error.LookupError:
2945 pass
2968 pass
2946 if not filenodes:
2969 if not filenodes:
2947 raise util.Abort(_("'%s' not found in manifest!") % file_)
2970 raise util.Abort(_("'%s' not found in manifest!") % file_)
2948 fl = repo.file(file_)
2971 fl = repo.file(file_)
2949 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2972 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2950 else:
2973 else:
2951 p = [cp.node() for cp in ctx.parents()]
2974 p = [cp.node() for cp in ctx.parents()]
2952
2975
2953 displayer = cmdutil.show_changeset(ui, repo, opts)
2976 displayer = cmdutil.show_changeset(ui, repo, opts)
2954 for n in p:
2977 for n in p:
2955 if n != nullid:
2978 if n != nullid:
2956 displayer.show(repo[n])
2979 displayer.show(repo[n])
2957 displayer.close()
2980 displayer.close()
2958
2981
2959 def paths(ui, repo, search=None):
2982 def paths(ui, repo, search=None):
2960 """show aliases for remote repositories
2983 """show aliases for remote repositories
2961
2984
2962 Show definition of symbolic path name NAME. If no name is given,
2985 Show definition of symbolic path name NAME. If no name is given,
2963 show definition of all available names.
2986 show definition of all available names.
2964
2987
2965 Path names are defined in the [paths] section of your
2988 Path names are defined in the [paths] section of your
2966 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
2989 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
2967 repository, ``.hg/hgrc`` is used, too.
2990 repository, ``.hg/hgrc`` is used, too.
2968
2991
2969 The path names ``default`` and ``default-push`` have a special
2992 The path names ``default`` and ``default-push`` have a special
2970 meaning. When performing a push or pull operation, they are used
2993 meaning. When performing a push or pull operation, they are used
2971 as fallbacks if no location is specified on the command-line.
2994 as fallbacks if no location is specified on the command-line.
2972 When ``default-push`` is set, it will be used for push and
2995 When ``default-push`` is set, it will be used for push and
2973 ``default`` will be used for pull; otherwise ``default`` is used
2996 ``default`` will be used for pull; otherwise ``default`` is used
2974 as the fallback for both. When cloning a repository, the clone
2997 as the fallback for both. When cloning a repository, the clone
2975 source is written as ``default`` in ``.hg/hgrc``. Note that
2998 source is written as ``default`` in ``.hg/hgrc``. Note that
2976 ``default`` and ``default-push`` apply to all inbound (e.g.
2999 ``default`` and ``default-push`` apply to all inbound (e.g.
2977 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
3000 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
2978 :hg:`bundle`) operations.
3001 :hg:`bundle`) operations.
2979
3002
2980 See :hg:`help urls` for more information.
3003 See :hg:`help urls` for more information.
2981
3004
2982 Returns 0 on success.
3005 Returns 0 on success.
2983 """
3006 """
2984 if search:
3007 if search:
2985 for name, path in ui.configitems("paths"):
3008 for name, path in ui.configitems("paths"):
2986 if name == search:
3009 if name == search:
2987 ui.write("%s\n" % url.hidepassword(path))
3010 ui.write("%s\n" % url.hidepassword(path))
2988 return
3011 return
2989 ui.warn(_("not found!\n"))
3012 ui.warn(_("not found!\n"))
2990 return 1
3013 return 1
2991 else:
3014 else:
2992 for name, path in ui.configitems("paths"):
3015 for name, path in ui.configitems("paths"):
2993 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
3016 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2994
3017
2995 def postincoming(ui, repo, modheads, optupdate, checkout):
3018 def postincoming(ui, repo, modheads, optupdate, checkout):
2996 if modheads == 0:
3019 if modheads == 0:
2997 return
3020 return
2998 if optupdate:
3021 if optupdate:
2999 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
3022 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
3000 return hg.update(repo, checkout)
3023 return hg.update(repo, checkout)
3001 else:
3024 else:
3002 ui.status(_("not updating, since new heads added\n"))
3025 ui.status(_("not updating, since new heads added\n"))
3003 if modheads > 1:
3026 if modheads > 1:
3004 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3027 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3005 else:
3028 else:
3006 ui.status(_("(run 'hg update' to get a working copy)\n"))
3029 ui.status(_("(run 'hg update' to get a working copy)\n"))
3007
3030
3008 def pull(ui, repo, source="default", **opts):
3031 def pull(ui, repo, source="default", **opts):
3009 """pull changes from the specified source
3032 """pull changes from the specified source
3010
3033
3011 Pull changes from a remote repository to a local one.
3034 Pull changes from a remote repository to a local one.
3012
3035
3013 This finds all changes from the repository at the specified path
3036 This finds all changes from the repository at the specified path
3014 or URL and adds them to a local repository (the current one unless
3037 or URL and adds them to a local repository (the current one unless
3015 -R is specified). By default, this does not update the copy of the
3038 -R is specified). By default, this does not update the copy of the
3016 project in the working directory.
3039 project in the working directory.
3017
3040
3018 Use :hg:`incoming` if you want to see what would have been added
3041 Use :hg:`incoming` if you want to see what would have been added
3019 by a pull at the time you issued this command. If you then decide
3042 by a pull at the time you issued this command. If you then decide
3020 to add those changes to the repository, you should use :hg:`pull
3043 to add those changes to the repository, you should use :hg:`pull
3021 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3044 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3022
3045
3023 If SOURCE is omitted, the 'default' path will be used.
3046 If SOURCE is omitted, the 'default' path will be used.
3024 See :hg:`help urls` for more information.
3047 See :hg:`help urls` for more information.
3025
3048
3026 Returns 0 on success, 1 if an update had unresolved files.
3049 Returns 0 on success, 1 if an update had unresolved files.
3027 """
3050 """
3028 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3051 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3029 other = hg.repository(hg.remoteui(repo, opts), source)
3052 other = hg.repository(hg.remoteui(repo, opts), source)
3030 ui.status(_('pulling from %s\n') % url.hidepassword(source))
3053 ui.status(_('pulling from %s\n') % url.hidepassword(source))
3031 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3054 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3032
3055
3033 if opts.get('bookmark'):
3056 if opts.get('bookmark'):
3034 if not revs:
3057 if not revs:
3035 revs = []
3058 revs = []
3036 rb = other.listkeys('bookmarks')
3059 rb = other.listkeys('bookmarks')
3037 for b in opts['bookmark']:
3060 for b in opts['bookmark']:
3038 if b not in rb:
3061 if b not in rb:
3039 raise util.Abort(_('remote bookmark %s not found!') % b)
3062 raise util.Abort(_('remote bookmark %s not found!') % b)
3040 revs.append(rb[b])
3063 revs.append(rb[b])
3041
3064
3042 if revs:
3065 if revs:
3043 try:
3066 try:
3044 revs = [other.lookup(rev) for rev in revs]
3067 revs = [other.lookup(rev) for rev in revs]
3045 except error.CapabilityError:
3068 except error.CapabilityError:
3046 err = _("other repository doesn't support revision lookup, "
3069 err = _("other repository doesn't support revision lookup, "
3047 "so a rev cannot be specified.")
3070 "so a rev cannot be specified.")
3048 raise util.Abort(err)
3071 raise util.Abort(err)
3049
3072
3050 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
3073 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
3051 bookmarks.updatefromremote(ui, repo, other)
3074 bookmarks.updatefromremote(ui, repo, other)
3052 if checkout:
3075 if checkout:
3053 checkout = str(repo.changelog.rev(other.lookup(checkout)))
3076 checkout = str(repo.changelog.rev(other.lookup(checkout)))
3054 repo._subtoppath = source
3077 repo._subtoppath = source
3055 try:
3078 try:
3056 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
3079 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
3057
3080
3058 finally:
3081 finally:
3059 del repo._subtoppath
3082 del repo._subtoppath
3060
3083
3061 # update specified bookmarks
3084 # update specified bookmarks
3062 if opts.get('bookmark'):
3085 if opts.get('bookmark'):
3063 for b in opts['bookmark']:
3086 for b in opts['bookmark']:
3064 # explicit pull overrides local bookmark if any
3087 # explicit pull overrides local bookmark if any
3065 ui.status(_("importing bookmark %s\n") % b)
3088 ui.status(_("importing bookmark %s\n") % b)
3066 repo._bookmarks[b] = repo[rb[b]].node()
3089 repo._bookmarks[b] = repo[rb[b]].node()
3067 bookmarks.write(repo)
3090 bookmarks.write(repo)
3068
3091
3069 return ret
3092 return ret
3070
3093
3071 def push(ui, repo, dest=None, **opts):
3094 def push(ui, repo, dest=None, **opts):
3072 """push changes to the specified destination
3095 """push changes to the specified destination
3073
3096
3074 Push changesets from the local repository to the specified
3097 Push changesets from the local repository to the specified
3075 destination.
3098 destination.
3076
3099
3077 This operation is symmetrical to pull: it is identical to a pull
3100 This operation is symmetrical to pull: it is identical to a pull
3078 in the destination repository from the current one.
3101 in the destination repository from the current one.
3079
3102
3080 By default, push will not allow creation of new heads at the
3103 By default, push will not allow creation of new heads at the
3081 destination, since multiple heads would make it unclear which head
3104 destination, since multiple heads would make it unclear which head
3082 to use. In this situation, it is recommended to pull and merge
3105 to use. In this situation, it is recommended to pull and merge
3083 before pushing.
3106 before pushing.
3084
3107
3085 Use --new-branch if you want to allow push to create a new named
3108 Use --new-branch if you want to allow push to create a new named
3086 branch that is not present at the destination. This allows you to
3109 branch that is not present at the destination. This allows you to
3087 only create a new branch without forcing other changes.
3110 only create a new branch without forcing other changes.
3088
3111
3089 Use -f/--force to override the default behavior and push all
3112 Use -f/--force to override the default behavior and push all
3090 changesets on all branches.
3113 changesets on all branches.
3091
3114
3092 If -r/--rev is used, the specified revision and all its ancestors
3115 If -r/--rev is used, the specified revision and all its ancestors
3093 will be pushed to the remote repository.
3116 will be pushed to the remote repository.
3094
3117
3095 Please see :hg:`help urls` for important details about ``ssh://``
3118 Please see :hg:`help urls` for important details about ``ssh://``
3096 URLs. If DESTINATION is omitted, a default path will be used.
3119 URLs. If DESTINATION is omitted, a default path will be used.
3097
3120
3098 Returns 0 if push was successful, 1 if nothing to push.
3121 Returns 0 if push was successful, 1 if nothing to push.
3099 """
3122 """
3100
3123
3101 if opts.get('bookmark'):
3124 if opts.get('bookmark'):
3102 for b in opts['bookmark']:
3125 for b in opts['bookmark']:
3103 # translate -B options to -r so changesets get pushed
3126 # translate -B options to -r so changesets get pushed
3104 if b in repo._bookmarks:
3127 if b in repo._bookmarks:
3105 opts.setdefault('rev', []).append(b)
3128 opts.setdefault('rev', []).append(b)
3106 else:
3129 else:
3107 # if we try to push a deleted bookmark, translate it to null
3130 # if we try to push a deleted bookmark, translate it to null
3108 # this lets simultaneous -r, -b options continue working
3131 # this lets simultaneous -r, -b options continue working
3109 opts.setdefault('rev', []).append("null")
3132 opts.setdefault('rev', []).append("null")
3110
3133
3111 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3134 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3112 dest, branches = hg.parseurl(dest, opts.get('branch'))
3135 dest, branches = hg.parseurl(dest, opts.get('branch'))
3113 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
3136 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
3114 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
3137 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
3115 other = hg.repository(hg.remoteui(repo, opts), dest)
3138 other = hg.repository(hg.remoteui(repo, opts), dest)
3116 if revs:
3139 if revs:
3117 revs = [repo.lookup(rev) for rev in revs]
3140 revs = [repo.lookup(rev) for rev in revs]
3118
3141
3119 repo._subtoppath = dest
3142 repo._subtoppath = dest
3120 try:
3143 try:
3121 # push subrepos depth-first for coherent ordering
3144 # push subrepos depth-first for coherent ordering
3122 c = repo['']
3145 c = repo['']
3123 subs = c.substate # only repos that are committed
3146 subs = c.substate # only repos that are committed
3124 for s in sorted(subs):
3147 for s in sorted(subs):
3125 if not c.sub(s).push(opts.get('force')):
3148 if not c.sub(s).push(opts.get('force')):
3126 return False
3149 return False
3127 finally:
3150 finally:
3128 del repo._subtoppath
3151 del repo._subtoppath
3129 result = repo.push(other, opts.get('force'), revs=revs,
3152 result = repo.push(other, opts.get('force'), revs=revs,
3130 newbranch=opts.get('new_branch'))
3153 newbranch=opts.get('new_branch'))
3131
3154
3132 result = (result == 0)
3155 result = (result == 0)
3133
3156
3134 if opts.get('bookmark'):
3157 if opts.get('bookmark'):
3135 rb = other.listkeys('bookmarks')
3158 rb = other.listkeys('bookmarks')
3136 for b in opts['bookmark']:
3159 for b in opts['bookmark']:
3137 # explicit push overrides remote bookmark if any
3160 # explicit push overrides remote bookmark if any
3138 if b in repo._bookmarks:
3161 if b in repo._bookmarks:
3139 ui.status(_("exporting bookmark %s\n") % b)
3162 ui.status(_("exporting bookmark %s\n") % b)
3140 new = repo[b].hex()
3163 new = repo[b].hex()
3141 elif b in rb:
3164 elif b in rb:
3142 ui.status(_("deleting remote bookmark %s\n") % b)
3165 ui.status(_("deleting remote bookmark %s\n") % b)
3143 new = '' # delete
3166 new = '' # delete
3144 else:
3167 else:
3145 ui.warn(_('bookmark %s does not exist on the local '
3168 ui.warn(_('bookmark %s does not exist on the local '
3146 'or remote repository!\n') % b)
3169 'or remote repository!\n') % b)
3147 return 2
3170 return 2
3148 old = rb.get(b, '')
3171 old = rb.get(b, '')
3149 r = other.pushkey('bookmarks', b, old, new)
3172 r = other.pushkey('bookmarks', b, old, new)
3150 if not r:
3173 if not r:
3151 ui.warn(_('updating bookmark %s failed!\n') % b)
3174 ui.warn(_('updating bookmark %s failed!\n') % b)
3152 if not result:
3175 if not result:
3153 result = 2
3176 result = 2
3154
3177
3155 return result
3178 return result
3156
3179
3157 def recover(ui, repo):
3180 def recover(ui, repo):
3158 """roll back an interrupted transaction
3181 """roll back an interrupted transaction
3159
3182
3160 Recover from an interrupted commit or pull.
3183 Recover from an interrupted commit or pull.
3161
3184
3162 This command tries to fix the repository status after an
3185 This command tries to fix the repository status after an
3163 interrupted operation. It should only be necessary when Mercurial
3186 interrupted operation. It should only be necessary when Mercurial
3164 suggests it.
3187 suggests it.
3165
3188
3166 Returns 0 if successful, 1 if nothing to recover or verify fails.
3189 Returns 0 if successful, 1 if nothing to recover or verify fails.
3167 """
3190 """
3168 if repo.recover():
3191 if repo.recover():
3169 return hg.verify(repo)
3192 return hg.verify(repo)
3170 return 1
3193 return 1
3171
3194
3172 def remove(ui, repo, *pats, **opts):
3195 def remove(ui, repo, *pats, **opts):
3173 """remove the specified files on the next commit
3196 """remove the specified files on the next commit
3174
3197
3175 Schedule the indicated files for removal from the repository.
3198 Schedule the indicated files for removal from the repository.
3176
3199
3177 This only removes files from the current branch, not from the
3200 This only removes files from the current branch, not from the
3178 entire project history. -A/--after can be used to remove only
3201 entire project history. -A/--after can be used to remove only
3179 files that have already been deleted, -f/--force can be used to
3202 files that have already been deleted, -f/--force can be used to
3180 force deletion, and -Af can be used to remove files from the next
3203 force deletion, and -Af can be used to remove files from the next
3181 revision without deleting them from the working directory.
3204 revision without deleting them from the working directory.
3182
3205
3183 The following table details the behavior of remove for different
3206 The following table details the behavior of remove for different
3184 file states (columns) and option combinations (rows). The file
3207 file states (columns) and option combinations (rows). The file
3185 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
3208 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
3186 reported by :hg:`status`). The actions are Warn, Remove (from
3209 reported by :hg:`status`). The actions are Warn, Remove (from
3187 branch) and Delete (from disk)::
3210 branch) and Delete (from disk)::
3188
3211
3189 A C M !
3212 A C M !
3190 none W RD W R
3213 none W RD W R
3191 -f R RD RD R
3214 -f R RD RD R
3192 -A W W W R
3215 -A W W W R
3193 -Af R R R R
3216 -Af R R R R
3194
3217
3195 This command schedules the files to be removed at the next commit.
3218 This command schedules the files to be removed at the next commit.
3196 To undo a remove before that, see :hg:`revert`.
3219 To undo a remove before that, see :hg:`revert`.
3197
3220
3198 Returns 0 on success, 1 if any warnings encountered.
3221 Returns 0 on success, 1 if any warnings encountered.
3199 """
3222 """
3200
3223
3201 ret = 0
3224 ret = 0
3202 after, force = opts.get('after'), opts.get('force')
3225 after, force = opts.get('after'), opts.get('force')
3203 if not pats and not after:
3226 if not pats and not after:
3204 raise util.Abort(_('no files specified'))
3227 raise util.Abort(_('no files specified'))
3205
3228
3206 m = cmdutil.match(repo, pats, opts)
3229 m = cmdutil.match(repo, pats, opts)
3207 s = repo.status(match=m, clean=True)
3230 s = repo.status(match=m, clean=True)
3208 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
3231 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
3209
3232
3210 for f in m.files():
3233 for f in m.files():
3211 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
3234 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
3212 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
3235 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
3213 ret = 1
3236 ret = 1
3214
3237
3215 if force:
3238 if force:
3216 remove, forget = modified + deleted + clean, added
3239 remove, forget = modified + deleted + clean, added
3217 elif after:
3240 elif after:
3218 remove, forget = deleted, []
3241 remove, forget = deleted, []
3219 for f in modified + added + clean:
3242 for f in modified + added + clean:
3220 ui.warn(_('not removing %s: file still exists (use -f'
3243 ui.warn(_('not removing %s: file still exists (use -f'
3221 ' to force removal)\n') % m.rel(f))
3244 ' to force removal)\n') % m.rel(f))
3222 ret = 1
3245 ret = 1
3223 else:
3246 else:
3224 remove, forget = deleted + clean, []
3247 remove, forget = deleted + clean, []
3225 for f in modified:
3248 for f in modified:
3226 ui.warn(_('not removing %s: file is modified (use -f'
3249 ui.warn(_('not removing %s: file is modified (use -f'
3227 ' to force removal)\n') % m.rel(f))
3250 ' to force removal)\n') % m.rel(f))
3228 ret = 1
3251 ret = 1
3229 for f in added:
3252 for f in added:
3230 ui.warn(_('not removing %s: file has been marked for add (use -f'
3253 ui.warn(_('not removing %s: file has been marked for add (use -f'
3231 ' to force removal)\n') % m.rel(f))
3254 ' to force removal)\n') % m.rel(f))
3232 ret = 1
3255 ret = 1
3233
3256
3234 for f in sorted(remove + forget):
3257 for f in sorted(remove + forget):
3235 if ui.verbose or not m.exact(f):
3258 if ui.verbose or not m.exact(f):
3236 ui.status(_('removing %s\n') % m.rel(f))
3259 ui.status(_('removing %s\n') % m.rel(f))
3237
3260
3238 repo[None].forget(forget)
3261 repo[None].forget(forget)
3239 repo[None].remove(remove, unlink=not after)
3262 repo[None].remove(remove, unlink=not after)
3240 return ret
3263 return ret
3241
3264
3242 def rename(ui, repo, *pats, **opts):
3265 def rename(ui, repo, *pats, **opts):
3243 """rename files; equivalent of copy + remove
3266 """rename files; equivalent of copy + remove
3244
3267
3245 Mark dest as copies of sources; mark sources for deletion. If dest
3268 Mark dest as copies of sources; mark sources for deletion. If dest
3246 is a directory, copies are put in that directory. If dest is a
3269 is a directory, copies are put in that directory. If dest is a
3247 file, there can only be one source.
3270 file, there can only be one source.
3248
3271
3249 By default, this command copies the contents of files as they
3272 By default, this command copies the contents of files as they
3250 exist in the working directory. If invoked with -A/--after, the
3273 exist in the working directory. If invoked with -A/--after, the
3251 operation is recorded, but no copying is performed.
3274 operation is recorded, but no copying is performed.
3252
3275
3253 This command takes effect at the next commit. To undo a rename
3276 This command takes effect at the next commit. To undo a rename
3254 before that, see :hg:`revert`.
3277 before that, see :hg:`revert`.
3255
3278
3256 Returns 0 on success, 1 if errors are encountered.
3279 Returns 0 on success, 1 if errors are encountered.
3257 """
3280 """
3258 wlock = repo.wlock(False)
3281 wlock = repo.wlock(False)
3259 try:
3282 try:
3260 return cmdutil.copy(ui, repo, pats, opts, rename=True)
3283 return cmdutil.copy(ui, repo, pats, opts, rename=True)
3261 finally:
3284 finally:
3262 wlock.release()
3285 wlock.release()
3263
3286
3264 def resolve(ui, repo, *pats, **opts):
3287 def resolve(ui, repo, *pats, **opts):
3265 """redo merges or set/view the merge status of files
3288 """redo merges or set/view the merge status of files
3266
3289
3267 Merges with unresolved conflicts are often the result of
3290 Merges with unresolved conflicts are often the result of
3268 non-interactive merging using the ``internal:merge`` configuration
3291 non-interactive merging using the ``internal:merge`` configuration
3269 setting, or a command-line merge tool like ``diff3``. The resolve
3292 setting, or a command-line merge tool like ``diff3``. The resolve
3270 command is used to manage the files involved in a merge, after
3293 command is used to manage the files involved in a merge, after
3271 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
3294 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
3272 working directory must have two parents).
3295 working directory must have two parents).
3273
3296
3274 The resolve command can be used in the following ways:
3297 The resolve command can be used in the following ways:
3275
3298
3276 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
3299 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
3277 files, discarding any previous merge attempts. Re-merging is not
3300 files, discarding any previous merge attempts. Re-merging is not
3278 performed for files already marked as resolved. Use ``--all/-a``
3301 performed for files already marked as resolved. Use ``--all/-a``
3279 to selects all unresolved files. ``--tool`` can be used to specify
3302 to selects all unresolved files. ``--tool`` can be used to specify
3280 the merge tool used for the given files. It overrides the HGMERGE
3303 the merge tool used for the given files. It overrides the HGMERGE
3281 environment variable and your configuration files.
3304 environment variable and your configuration files.
3282
3305
3283 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
3306 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
3284 (e.g. after having manually fixed-up the files). The default is
3307 (e.g. after having manually fixed-up the files). The default is
3285 to mark all unresolved files.
3308 to mark all unresolved files.
3286
3309
3287 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
3310 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
3288 default is to mark all resolved files.
3311 default is to mark all resolved files.
3289
3312
3290 - :hg:`resolve -l`: list files which had or still have conflicts.
3313 - :hg:`resolve -l`: list files which had or still have conflicts.
3291 In the printed list, ``U`` = unresolved and ``R`` = resolved.
3314 In the printed list, ``U`` = unresolved and ``R`` = resolved.
3292
3315
3293 Note that Mercurial will not let you commit files with unresolved
3316 Note that Mercurial will not let you commit files with unresolved
3294 merge conflicts. You must use :hg:`resolve -m ...` before you can
3317 merge conflicts. You must use :hg:`resolve -m ...` before you can
3295 commit after a conflicting merge.
3318 commit after a conflicting merge.
3296
3319
3297 Returns 0 on success, 1 if any files fail a resolve attempt.
3320 Returns 0 on success, 1 if any files fail a resolve attempt.
3298 """
3321 """
3299
3322
3300 all, mark, unmark, show, nostatus = \
3323 all, mark, unmark, show, nostatus = \
3301 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
3324 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
3302
3325
3303 if (show and (mark or unmark)) or (mark and unmark):
3326 if (show and (mark or unmark)) or (mark and unmark):
3304 raise util.Abort(_("too many options specified"))
3327 raise util.Abort(_("too many options specified"))
3305 if pats and all:
3328 if pats and all:
3306 raise util.Abort(_("can't specify --all and patterns"))
3329 raise util.Abort(_("can't specify --all and patterns"))
3307 if not (all or pats or show or mark or unmark):
3330 if not (all or pats or show or mark or unmark):
3308 raise util.Abort(_('no files or directories specified; '
3331 raise util.Abort(_('no files or directories specified; '
3309 'use --all to remerge all files'))
3332 'use --all to remerge all files'))
3310
3333
3311 ms = mergemod.mergestate(repo)
3334 ms = mergemod.mergestate(repo)
3312 m = cmdutil.match(repo, pats, opts)
3335 m = cmdutil.match(repo, pats, opts)
3313 ret = 0
3336 ret = 0
3314
3337
3315 for f in ms:
3338 for f in ms:
3316 if m(f):
3339 if m(f):
3317 if show:
3340 if show:
3318 if nostatus:
3341 if nostatus:
3319 ui.write("%s\n" % f)
3342 ui.write("%s\n" % f)
3320 else:
3343 else:
3321 ui.write("%s %s\n" % (ms[f].upper(), f),
3344 ui.write("%s %s\n" % (ms[f].upper(), f),
3322 label='resolve.' +
3345 label='resolve.' +
3323 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
3346 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
3324 elif mark:
3347 elif mark:
3325 ms.mark(f, "r")
3348 ms.mark(f, "r")
3326 elif unmark:
3349 elif unmark:
3327 ms.mark(f, "u")
3350 ms.mark(f, "u")
3328 else:
3351 else:
3329 wctx = repo[None]
3352 wctx = repo[None]
3330 mctx = wctx.parents()[-1]
3353 mctx = wctx.parents()[-1]
3331
3354
3332 # backup pre-resolve (merge uses .orig for its own purposes)
3355 # backup pre-resolve (merge uses .orig for its own purposes)
3333 a = repo.wjoin(f)
3356 a = repo.wjoin(f)
3334 util.copyfile(a, a + ".resolve")
3357 util.copyfile(a, a + ".resolve")
3335
3358
3336 try:
3359 try:
3337 # resolve file
3360 # resolve file
3338 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
3361 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
3339 if ms.resolve(f, wctx, mctx):
3362 if ms.resolve(f, wctx, mctx):
3340 ret = 1
3363 ret = 1
3341 finally:
3364 finally:
3342 ui.setconfig('ui', 'forcemerge', '')
3365 ui.setconfig('ui', 'forcemerge', '')
3343
3366
3344 # replace filemerge's .orig file with our resolve file
3367 # replace filemerge's .orig file with our resolve file
3345 util.rename(a + ".resolve", a + ".orig")
3368 util.rename(a + ".resolve", a + ".orig")
3346
3369
3347 ms.commit()
3370 ms.commit()
3348 return ret
3371 return ret
3349
3372
3350 def revert(ui, repo, *pats, **opts):
3373 def revert(ui, repo, *pats, **opts):
3351 """restore individual files or directories to an earlier state
3374 """restore individual files or directories to an earlier state
3352
3375
3353 .. note::
3376 .. note::
3354 This command is most likely not what you are looking for.
3377 This command is most likely not what you are looking for.
3355 Revert will partially overwrite content in the working
3378 Revert will partially overwrite content in the working
3356 directory without changing the working directory parents. Use
3379 directory without changing the working directory parents. Use
3357 :hg:`update -r rev` to check out earlier revisions, or
3380 :hg:`update -r rev` to check out earlier revisions, or
3358 :hg:`update --clean .` to undo a merge which has added another
3381 :hg:`update --clean .` to undo a merge which has added another
3359 parent.
3382 parent.
3360
3383
3361 With no revision specified, revert the named files or directories
3384 With no revision specified, revert the named files or directories
3362 to the contents they had in the parent of the working directory.
3385 to the contents they had in the parent of the working directory.
3363 This restores the contents of the affected files to an unmodified
3386 This restores the contents of the affected files to an unmodified
3364 state and unschedules adds, removes, copies, and renames. If the
3387 state and unschedules adds, removes, copies, and renames. If the
3365 working directory has two parents, you must explicitly specify a
3388 working directory has two parents, you must explicitly specify a
3366 revision.
3389 revision.
3367
3390
3368 Using the -r/--rev option, revert the given files or directories
3391 Using the -r/--rev option, revert the given files or directories
3369 to their contents as of a specific revision. This can be helpful
3392 to their contents as of a specific revision. This can be helpful
3370 to "roll back" some or all of an earlier change. See :hg:`help
3393 to "roll back" some or all of an earlier change. See :hg:`help
3371 dates` for a list of formats valid for -d/--date.
3394 dates` for a list of formats valid for -d/--date.
3372
3395
3373 Revert modifies the working directory. It does not commit any
3396 Revert modifies the working directory. It does not commit any
3374 changes, or change the parent of the working directory. If you
3397 changes, or change the parent of the working directory. If you
3375 revert to a revision other than the parent of the working
3398 revert to a revision other than the parent of the working
3376 directory, the reverted files will thus appear modified
3399 directory, the reverted files will thus appear modified
3377 afterwards.
3400 afterwards.
3378
3401
3379 If a file has been deleted, it is restored. If the executable mode
3402 If a file has been deleted, it is restored. If the executable mode
3380 of a file was changed, it is reset.
3403 of a file was changed, it is reset.
3381
3404
3382 If names are given, all files matching the names are reverted.
3405 If names are given, all files matching the names are reverted.
3383 If no arguments are given, no files are reverted.
3406 If no arguments are given, no files are reverted.
3384
3407
3385 Modified files are saved with a .orig suffix before reverting.
3408 Modified files are saved with a .orig suffix before reverting.
3386 To disable these backups, use --no-backup.
3409 To disable these backups, use --no-backup.
3387
3410
3388 Returns 0 on success.
3411 Returns 0 on success.
3389 """
3412 """
3390
3413
3391 if opts.get("date"):
3414 if opts.get("date"):
3392 if opts.get("rev"):
3415 if opts.get("rev"):
3393 raise util.Abort(_("you can't specify a revision and a date"))
3416 raise util.Abort(_("you can't specify a revision and a date"))
3394 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
3417 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
3395
3418
3396 parent, p2 = repo.dirstate.parents()
3419 parent, p2 = repo.dirstate.parents()
3397 if not opts.get('rev') and p2 != nullid:
3420 if not opts.get('rev') and p2 != nullid:
3398 raise util.Abort(_('uncommitted merge - '
3421 raise util.Abort(_('uncommitted merge - '
3399 'use "hg update", see "hg help revert"'))
3422 'use "hg update", see "hg help revert"'))
3400
3423
3401 if not pats and not opts.get('all'):
3424 if not pats and not opts.get('all'):
3402 raise util.Abort(_('no files or directories specified; '
3425 raise util.Abort(_('no files or directories specified; '
3403 'use --all to revert the whole repo'))
3426 'use --all to revert the whole repo'))
3404
3427
3405 ctx = cmdutil.revsingle(repo, opts.get('rev'))
3428 ctx = cmdutil.revsingle(repo, opts.get('rev'))
3406 node = ctx.node()
3429 node = ctx.node()
3407 mf = ctx.manifest()
3430 mf = ctx.manifest()
3408 if node == parent:
3431 if node == parent:
3409 pmf = mf
3432 pmf = mf
3410 else:
3433 else:
3411 pmf = None
3434 pmf = None
3412
3435
3413 # need all matching names in dirstate and manifest of target rev,
3436 # need all matching names in dirstate and manifest of target rev,
3414 # so have to walk both. do not print errors if files exist in one
3437 # so have to walk both. do not print errors if files exist in one
3415 # but not other.
3438 # but not other.
3416
3439
3417 names = {}
3440 names = {}
3418
3441
3419 wlock = repo.wlock()
3442 wlock = repo.wlock()
3420 try:
3443 try:
3421 # walk dirstate.
3444 # walk dirstate.
3422
3445
3423 m = cmdutil.match(repo, pats, opts)
3446 m = cmdutil.match(repo, pats, opts)
3424 m.bad = lambda x, y: False
3447 m.bad = lambda x, y: False
3425 for abs in repo.walk(m):
3448 for abs in repo.walk(m):
3426 names[abs] = m.rel(abs), m.exact(abs)
3449 names[abs] = m.rel(abs), m.exact(abs)
3427
3450
3428 # walk target manifest.
3451 # walk target manifest.
3429
3452
3430 def badfn(path, msg):
3453 def badfn(path, msg):
3431 if path in names:
3454 if path in names:
3432 return
3455 return
3433 path_ = path + '/'
3456 path_ = path + '/'
3434 for f in names:
3457 for f in names:
3435 if f.startswith(path_):
3458 if f.startswith(path_):
3436 return
3459 return
3437 ui.warn("%s: %s\n" % (m.rel(path), msg))
3460 ui.warn("%s: %s\n" % (m.rel(path), msg))
3438
3461
3439 m = cmdutil.match(repo, pats, opts)
3462 m = cmdutil.match(repo, pats, opts)
3440 m.bad = badfn
3463 m.bad = badfn
3441 for abs in repo[node].walk(m):
3464 for abs in repo[node].walk(m):
3442 if abs not in names:
3465 if abs not in names:
3443 names[abs] = m.rel(abs), m.exact(abs)
3466 names[abs] = m.rel(abs), m.exact(abs)
3444
3467
3445 m = cmdutil.matchfiles(repo, names)
3468 m = cmdutil.matchfiles(repo, names)
3446 changes = repo.status(match=m)[:4]
3469 changes = repo.status(match=m)[:4]
3447 modified, added, removed, deleted = map(set, changes)
3470 modified, added, removed, deleted = map(set, changes)
3448
3471
3449 # if f is a rename, also revert the source
3472 # if f is a rename, also revert the source
3450 cwd = repo.getcwd()
3473 cwd = repo.getcwd()
3451 for f in added:
3474 for f in added:
3452 src = repo.dirstate.copied(f)
3475 src = repo.dirstate.copied(f)
3453 if src and src not in names and repo.dirstate[src] == 'r':
3476 if src and src not in names and repo.dirstate[src] == 'r':
3454 removed.add(src)
3477 removed.add(src)
3455 names[src] = (repo.pathto(src, cwd), True)
3478 names[src] = (repo.pathto(src, cwd), True)
3456
3479
3457 def removeforget(abs):
3480 def removeforget(abs):
3458 if repo.dirstate[abs] == 'a':
3481 if repo.dirstate[abs] == 'a':
3459 return _('forgetting %s\n')
3482 return _('forgetting %s\n')
3460 return _('removing %s\n')
3483 return _('removing %s\n')
3461
3484
3462 revert = ([], _('reverting %s\n'))
3485 revert = ([], _('reverting %s\n'))
3463 add = ([], _('adding %s\n'))
3486 add = ([], _('adding %s\n'))
3464 remove = ([], removeforget)
3487 remove = ([], removeforget)
3465 undelete = ([], _('undeleting %s\n'))
3488 undelete = ([], _('undeleting %s\n'))
3466
3489
3467 disptable = (
3490 disptable = (
3468 # dispatch table:
3491 # dispatch table:
3469 # file state
3492 # file state
3470 # action if in target manifest
3493 # action if in target manifest
3471 # action if not in target manifest
3494 # action if not in target manifest
3472 # make backup if in target manifest
3495 # make backup if in target manifest
3473 # make backup if not in target manifest
3496 # make backup if not in target manifest
3474 (modified, revert, remove, True, True),
3497 (modified, revert, remove, True, True),
3475 (added, revert, remove, True, False),
3498 (added, revert, remove, True, False),
3476 (removed, undelete, None, False, False),
3499 (removed, undelete, None, False, False),
3477 (deleted, revert, remove, False, False),
3500 (deleted, revert, remove, False, False),
3478 )
3501 )
3479
3502
3480 for abs, (rel, exact) in sorted(names.items()):
3503 for abs, (rel, exact) in sorted(names.items()):
3481 mfentry = mf.get(abs)
3504 mfentry = mf.get(abs)
3482 target = repo.wjoin(abs)
3505 target = repo.wjoin(abs)
3483 def handle(xlist, dobackup):
3506 def handle(xlist, dobackup):
3484 xlist[0].append(abs)
3507 xlist[0].append(abs)
3485 if (dobackup and not opts.get('no_backup') and
3508 if (dobackup and not opts.get('no_backup') and
3486 os.path.lexists(target)):
3509 os.path.lexists(target)):
3487 bakname = "%s.orig" % rel
3510 bakname = "%s.orig" % rel
3488 ui.note(_('saving current version of %s as %s\n') %
3511 ui.note(_('saving current version of %s as %s\n') %
3489 (rel, bakname))
3512 (rel, bakname))
3490 if not opts.get('dry_run'):
3513 if not opts.get('dry_run'):
3491 util.rename(target, bakname)
3514 util.rename(target, bakname)
3492 if ui.verbose or not exact:
3515 if ui.verbose or not exact:
3493 msg = xlist[1]
3516 msg = xlist[1]
3494 if not isinstance(msg, basestring):
3517 if not isinstance(msg, basestring):
3495 msg = msg(abs)
3518 msg = msg(abs)
3496 ui.status(msg % rel)
3519 ui.status(msg % rel)
3497 for table, hitlist, misslist, backuphit, backupmiss in disptable:
3520 for table, hitlist, misslist, backuphit, backupmiss in disptable:
3498 if abs not in table:
3521 if abs not in table:
3499 continue
3522 continue
3500 # file has changed in dirstate
3523 # file has changed in dirstate
3501 if mfentry:
3524 if mfentry:
3502 handle(hitlist, backuphit)
3525 handle(hitlist, backuphit)
3503 elif misslist is not None:
3526 elif misslist is not None:
3504 handle(misslist, backupmiss)
3527 handle(misslist, backupmiss)
3505 break
3528 break
3506 else:
3529 else:
3507 if abs not in repo.dirstate:
3530 if abs not in repo.dirstate:
3508 if mfentry:
3531 if mfentry:
3509 handle(add, True)
3532 handle(add, True)
3510 elif exact:
3533 elif exact:
3511 ui.warn(_('file not managed: %s\n') % rel)
3534 ui.warn(_('file not managed: %s\n') % rel)
3512 continue
3535 continue
3513 # file has not changed in dirstate
3536 # file has not changed in dirstate
3514 if node == parent:
3537 if node == parent:
3515 if exact:
3538 if exact:
3516 ui.warn(_('no changes needed to %s\n') % rel)
3539 ui.warn(_('no changes needed to %s\n') % rel)
3517 continue
3540 continue
3518 if pmf is None:
3541 if pmf is None:
3519 # only need parent manifest in this unlikely case,
3542 # only need parent manifest in this unlikely case,
3520 # so do not read by default
3543 # so do not read by default
3521 pmf = repo[parent].manifest()
3544 pmf = repo[parent].manifest()
3522 if abs in pmf:
3545 if abs in pmf:
3523 if mfentry:
3546 if mfentry:
3524 # if version of file is same in parent and target
3547 # if version of file is same in parent and target
3525 # manifests, do nothing
3548 # manifests, do nothing
3526 if (pmf[abs] != mfentry or
3549 if (pmf[abs] != mfentry or
3527 pmf.flags(abs) != mf.flags(abs)):
3550 pmf.flags(abs) != mf.flags(abs)):
3528 handle(revert, False)
3551 handle(revert, False)
3529 else:
3552 else:
3530 handle(remove, False)
3553 handle(remove, False)
3531
3554
3532 if not opts.get('dry_run'):
3555 if not opts.get('dry_run'):
3533 def checkout(f):
3556 def checkout(f):
3534 fc = ctx[f]
3557 fc = ctx[f]
3535 repo.wwrite(f, fc.data(), fc.flags())
3558 repo.wwrite(f, fc.data(), fc.flags())
3536
3559
3537 audit_path = util.path_auditor(repo.root)
3560 audit_path = util.path_auditor(repo.root)
3538 for f in remove[0]:
3561 for f in remove[0]:
3539 if repo.dirstate[f] == 'a':
3562 if repo.dirstate[f] == 'a':
3540 repo.dirstate.forget(f)
3563 repo.dirstate.forget(f)
3541 continue
3564 continue
3542 audit_path(f)
3565 audit_path(f)
3543 try:
3566 try:
3544 util.unlinkpath(repo.wjoin(f))
3567 util.unlinkpath(repo.wjoin(f))
3545 except OSError:
3568 except OSError:
3546 pass
3569 pass
3547 repo.dirstate.remove(f)
3570 repo.dirstate.remove(f)
3548
3571
3549 normal = None
3572 normal = None
3550 if node == parent:
3573 if node == parent:
3551 # We're reverting to our parent. If possible, we'd like status
3574 # We're reverting to our parent. If possible, we'd like status
3552 # to report the file as clean. We have to use normallookup for
3575 # to report the file as clean. We have to use normallookup for
3553 # merges to avoid losing information about merged/dirty files.
3576 # merges to avoid losing information about merged/dirty files.
3554 if p2 != nullid:
3577 if p2 != nullid:
3555 normal = repo.dirstate.normallookup
3578 normal = repo.dirstate.normallookup
3556 else:
3579 else:
3557 normal = repo.dirstate.normal
3580 normal = repo.dirstate.normal
3558 for f in revert[0]:
3581 for f in revert[0]:
3559 checkout(f)
3582 checkout(f)
3560 if normal:
3583 if normal:
3561 normal(f)
3584 normal(f)
3562
3585
3563 for f in add[0]:
3586 for f in add[0]:
3564 checkout(f)
3587 checkout(f)
3565 repo.dirstate.add(f)
3588 repo.dirstate.add(f)
3566
3589
3567 normal = repo.dirstate.normallookup
3590 normal = repo.dirstate.normallookup
3568 if node == parent and p2 == nullid:
3591 if node == parent and p2 == nullid:
3569 normal = repo.dirstate.normal
3592 normal = repo.dirstate.normal
3570 for f in undelete[0]:
3593 for f in undelete[0]:
3571 checkout(f)
3594 checkout(f)
3572 normal(f)
3595 normal(f)
3573
3596
3574 finally:
3597 finally:
3575 wlock.release()
3598 wlock.release()
3576
3599
3577 def rollback(ui, repo, **opts):
3600 def rollback(ui, repo, **opts):
3578 """roll back the last transaction (dangerous)
3601 """roll back the last transaction (dangerous)
3579
3602
3580 This command should be used with care. There is only one level of
3603 This command should be used with care. There is only one level of
3581 rollback, and there is no way to undo a rollback. It will also
3604 rollback, and there is no way to undo a rollback. It will also
3582 restore the dirstate at the time of the last transaction, losing
3605 restore the dirstate at the time of the last transaction, losing
3583 any dirstate changes since that time. This command does not alter
3606 any dirstate changes since that time. This command does not alter
3584 the working directory.
3607 the working directory.
3585
3608
3586 Transactions are used to encapsulate the effects of all commands
3609 Transactions are used to encapsulate the effects of all commands
3587 that create new changesets or propagate existing changesets into a
3610 that create new changesets or propagate existing changesets into a
3588 repository. For example, the following commands are transactional,
3611 repository. For example, the following commands are transactional,
3589 and their effects can be rolled back:
3612 and their effects can be rolled back:
3590
3613
3591 - commit
3614 - commit
3592 - import
3615 - import
3593 - pull
3616 - pull
3594 - push (with this repository as the destination)
3617 - push (with this repository as the destination)
3595 - unbundle
3618 - unbundle
3596
3619
3597 This command is not intended for use on public repositories. Once
3620 This command is not intended for use on public repositories. Once
3598 changes are visible for pull by other users, rolling a transaction
3621 changes are visible for pull by other users, rolling a transaction
3599 back locally is ineffective (someone else may already have pulled
3622 back locally is ineffective (someone else may already have pulled
3600 the changes). Furthermore, a race is possible with readers of the
3623 the changes). Furthermore, a race is possible with readers of the
3601 repository; for example an in-progress pull from the repository
3624 repository; for example an in-progress pull from the repository
3602 may fail if a rollback is performed.
3625 may fail if a rollback is performed.
3603
3626
3604 Returns 0 on success, 1 if no rollback data is available.
3627 Returns 0 on success, 1 if no rollback data is available.
3605 """
3628 """
3606 return repo.rollback(opts.get('dry_run'))
3629 return repo.rollback(opts.get('dry_run'))
3607
3630
3608 def root(ui, repo):
3631 def root(ui, repo):
3609 """print the root (top) of the current working directory
3632 """print the root (top) of the current working directory
3610
3633
3611 Print the root directory of the current repository.
3634 Print the root directory of the current repository.
3612
3635
3613 Returns 0 on success.
3636 Returns 0 on success.
3614 """
3637 """
3615 ui.write(repo.root + "\n")
3638 ui.write(repo.root + "\n")
3616
3639
3617 def serve(ui, repo, **opts):
3640 def serve(ui, repo, **opts):
3618 """start stand-alone webserver
3641 """start stand-alone webserver
3619
3642
3620 Start a local HTTP repository browser and pull server. You can use
3643 Start a local HTTP repository browser and pull server. You can use
3621 this for ad-hoc sharing and browsing of repositories. It is
3644 this for ad-hoc sharing and browsing of repositories. It is
3622 recommended to use a real web server to serve a repository for
3645 recommended to use a real web server to serve a repository for
3623 longer periods of time.
3646 longer periods of time.
3624
3647
3625 Please note that the server does not implement access control.
3648 Please note that the server does not implement access control.
3626 This means that, by default, anybody can read from the server and
3649 This means that, by default, anybody can read from the server and
3627 nobody can write to it by default. Set the ``web.allow_push``
3650 nobody can write to it by default. Set the ``web.allow_push``
3628 option to ``*`` to allow everybody to push to the server. You
3651 option to ``*`` to allow everybody to push to the server. You
3629 should use a real web server if you need to authenticate users.
3652 should use a real web server if you need to authenticate users.
3630
3653
3631 By default, the server logs accesses to stdout and errors to
3654 By default, the server logs accesses to stdout and errors to
3632 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
3655 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
3633 files.
3656 files.
3634
3657
3635 To have the server choose a free port number to listen on, specify
3658 To have the server choose a free port number to listen on, specify
3636 a port number of 0; in this case, the server will print the port
3659 a port number of 0; in this case, the server will print the port
3637 number it uses.
3660 number it uses.
3638
3661
3639 Returns 0 on success.
3662 Returns 0 on success.
3640 """
3663 """
3641
3664
3642 if opts["stdio"]:
3665 if opts["stdio"]:
3643 if repo is None:
3666 if repo is None:
3644 raise error.RepoError(_("There is no Mercurial repository here"
3667 raise error.RepoError(_("There is no Mercurial repository here"
3645 " (.hg not found)"))
3668 " (.hg not found)"))
3646 s = sshserver.sshserver(ui, repo)
3669 s = sshserver.sshserver(ui, repo)
3647 s.serve_forever()
3670 s.serve_forever()
3648
3671
3649 # this way we can check if something was given in the command-line
3672 # this way we can check if something was given in the command-line
3650 if opts.get('port'):
3673 if opts.get('port'):
3651 opts['port'] = util.getport(opts.get('port'))
3674 opts['port'] = util.getport(opts.get('port'))
3652
3675
3653 baseui = repo and repo.baseui or ui
3676 baseui = repo and repo.baseui or ui
3654 optlist = ("name templates style address port prefix ipv6"
3677 optlist = ("name templates style address port prefix ipv6"
3655 " accesslog errorlog certificate encoding")
3678 " accesslog errorlog certificate encoding")
3656 for o in optlist.split():
3679 for o in optlist.split():
3657 val = opts.get(o, '')
3680 val = opts.get(o, '')
3658 if val in (None, ''): # should check against default options instead
3681 if val in (None, ''): # should check against default options instead
3659 continue
3682 continue
3660 baseui.setconfig("web", o, val)
3683 baseui.setconfig("web", o, val)
3661 if repo and repo.ui != baseui:
3684 if repo and repo.ui != baseui:
3662 repo.ui.setconfig("web", o, val)
3685 repo.ui.setconfig("web", o, val)
3663
3686
3664 o = opts.get('web_conf') or opts.get('webdir_conf')
3687 o = opts.get('web_conf') or opts.get('webdir_conf')
3665 if not o:
3688 if not o:
3666 if not repo:
3689 if not repo:
3667 raise error.RepoError(_("There is no Mercurial repository"
3690 raise error.RepoError(_("There is no Mercurial repository"
3668 " here (.hg not found)"))
3691 " here (.hg not found)"))
3669 o = repo.root
3692 o = repo.root
3670
3693
3671 app = hgweb.hgweb(o, baseui=ui)
3694 app = hgweb.hgweb(o, baseui=ui)
3672
3695
3673 class service(object):
3696 class service(object):
3674 def init(self):
3697 def init(self):
3675 util.set_signal_handler()
3698 util.set_signal_handler()
3676 self.httpd = hgweb.server.create_server(ui, app)
3699 self.httpd = hgweb.server.create_server(ui, app)
3677
3700
3678 if opts['port'] and not ui.verbose:
3701 if opts['port'] and not ui.verbose:
3679 return
3702 return
3680
3703
3681 if self.httpd.prefix:
3704 if self.httpd.prefix:
3682 prefix = self.httpd.prefix.strip('/') + '/'
3705 prefix = self.httpd.prefix.strip('/') + '/'
3683 else:
3706 else:
3684 prefix = ''
3707 prefix = ''
3685
3708
3686 port = ':%d' % self.httpd.port
3709 port = ':%d' % self.httpd.port
3687 if port == ':80':
3710 if port == ':80':
3688 port = ''
3711 port = ''
3689
3712
3690 bindaddr = self.httpd.addr
3713 bindaddr = self.httpd.addr
3691 if bindaddr == '0.0.0.0':
3714 if bindaddr == '0.0.0.0':
3692 bindaddr = '*'
3715 bindaddr = '*'
3693 elif ':' in bindaddr: # IPv6
3716 elif ':' in bindaddr: # IPv6
3694 bindaddr = '[%s]' % bindaddr
3717 bindaddr = '[%s]' % bindaddr
3695
3718
3696 fqaddr = self.httpd.fqaddr
3719 fqaddr = self.httpd.fqaddr
3697 if ':' in fqaddr:
3720 if ':' in fqaddr:
3698 fqaddr = '[%s]' % fqaddr
3721 fqaddr = '[%s]' % fqaddr
3699 if opts['port']:
3722 if opts['port']:
3700 write = ui.status
3723 write = ui.status
3701 else:
3724 else:
3702 write = ui.write
3725 write = ui.write
3703 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
3726 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
3704 (fqaddr, port, prefix, bindaddr, self.httpd.port))
3727 (fqaddr, port, prefix, bindaddr, self.httpd.port))
3705
3728
3706 def run(self):
3729 def run(self):
3707 self.httpd.serve_forever()
3730 self.httpd.serve_forever()
3708
3731
3709 service = service()
3732 service = service()
3710
3733
3711 cmdutil.service(opts, initfn=service.init, runfn=service.run)
3734 cmdutil.service(opts, initfn=service.init, runfn=service.run)
3712
3735
3713 def status(ui, repo, *pats, **opts):
3736 def status(ui, repo, *pats, **opts):
3714 """show changed files in the working directory
3737 """show changed files in the working directory
3715
3738
3716 Show status of files in the repository. If names are given, only
3739 Show status of files in the repository. If names are given, only
3717 files that match are shown. Files that are clean or ignored or
3740 files that match are shown. Files that are clean or ignored or
3718 the source of a copy/move operation, are not listed unless
3741 the source of a copy/move operation, are not listed unless
3719 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
3742 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
3720 Unless options described with "show only ..." are given, the
3743 Unless options described with "show only ..." are given, the
3721 options -mardu are used.
3744 options -mardu are used.
3722
3745
3723 Option -q/--quiet hides untracked (unknown and ignored) files
3746 Option -q/--quiet hides untracked (unknown and ignored) files
3724 unless explicitly requested with -u/--unknown or -i/--ignored.
3747 unless explicitly requested with -u/--unknown or -i/--ignored.
3725
3748
3726 .. note::
3749 .. note::
3727 status may appear to disagree with diff if permissions have
3750 status may appear to disagree with diff if permissions have
3728 changed or a merge has occurred. The standard diff format does
3751 changed or a merge has occurred. The standard diff format does
3729 not report permission changes and diff only reports changes
3752 not report permission changes and diff only reports changes
3730 relative to one merge parent.
3753 relative to one merge parent.
3731
3754
3732 If one revision is given, it is used as the base revision.
3755 If one revision is given, it is used as the base revision.
3733 If two revisions are given, the differences between them are
3756 If two revisions are given, the differences between them are
3734 shown. The --change option can also be used as a shortcut to list
3757 shown. The --change option can also be used as a shortcut to list
3735 the changed files of a revision from its first parent.
3758 the changed files of a revision from its first parent.
3736
3759
3737 The codes used to show the status of files are::
3760 The codes used to show the status of files are::
3738
3761
3739 M = modified
3762 M = modified
3740 A = added
3763 A = added
3741 R = removed
3764 R = removed
3742 C = clean
3765 C = clean
3743 ! = missing (deleted by non-hg command, but still tracked)
3766 ! = missing (deleted by non-hg command, but still tracked)
3744 ? = not tracked
3767 ? = not tracked
3745 I = ignored
3768 I = ignored
3746 = origin of the previous file listed as A (added)
3769 = origin of the previous file listed as A (added)
3747
3770
3748 Returns 0 on success.
3771 Returns 0 on success.
3749 """
3772 """
3750
3773
3751 revs = opts.get('rev')
3774 revs = opts.get('rev')
3752 change = opts.get('change')
3775 change = opts.get('change')
3753
3776
3754 if revs and change:
3777 if revs and change:
3755 msg = _('cannot specify --rev and --change at the same time')
3778 msg = _('cannot specify --rev and --change at the same time')
3756 raise util.Abort(msg)
3779 raise util.Abort(msg)
3757 elif change:
3780 elif change:
3758 node2 = repo.lookup(change)
3781 node2 = repo.lookup(change)
3759 node1 = repo[node2].parents()[0].node()
3782 node1 = repo[node2].parents()[0].node()
3760 else:
3783 else:
3761 node1, node2 = cmdutil.revpair(repo, revs)
3784 node1, node2 = cmdutil.revpair(repo, revs)
3762
3785
3763 cwd = (pats and repo.getcwd()) or ''
3786 cwd = (pats and repo.getcwd()) or ''
3764 end = opts.get('print0') and '\0' or '\n'
3787 end = opts.get('print0') and '\0' or '\n'
3765 copy = {}
3788 copy = {}
3766 states = 'modified added removed deleted unknown ignored clean'.split()
3789 states = 'modified added removed deleted unknown ignored clean'.split()
3767 show = [k for k in states if opts.get(k)]
3790 show = [k for k in states if opts.get(k)]
3768 if opts.get('all'):
3791 if opts.get('all'):
3769 show += ui.quiet and (states[:4] + ['clean']) or states
3792 show += ui.quiet and (states[:4] + ['clean']) or states
3770 if not show:
3793 if not show:
3771 show = ui.quiet and states[:4] or states[:5]
3794 show = ui.quiet and states[:4] or states[:5]
3772
3795
3773 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3796 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3774 'ignored' in show, 'clean' in show, 'unknown' in show,
3797 'ignored' in show, 'clean' in show, 'unknown' in show,
3775 opts.get('subrepos'))
3798 opts.get('subrepos'))
3776 changestates = zip(states, 'MAR!?IC', stat)
3799 changestates = zip(states, 'MAR!?IC', stat)
3777
3800
3778 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3801 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3779 ctxn = repo[nullid]
3802 ctxn = repo[nullid]
3780 ctx1 = repo[node1]
3803 ctx1 = repo[node1]
3781 ctx2 = repo[node2]
3804 ctx2 = repo[node2]
3782 added = stat[1]
3805 added = stat[1]
3783 if node2 is None:
3806 if node2 is None:
3784 added = stat[0] + stat[1] # merged?
3807 added = stat[0] + stat[1] # merged?
3785
3808
3786 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3809 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3787 if k in added:
3810 if k in added:
3788 copy[k] = v
3811 copy[k] = v
3789 elif v in added:
3812 elif v in added:
3790 copy[v] = k
3813 copy[v] = k
3791
3814
3792 for state, char, files in changestates:
3815 for state, char, files in changestates:
3793 if state in show:
3816 if state in show:
3794 format = "%s %%s%s" % (char, end)
3817 format = "%s %%s%s" % (char, end)
3795 if opts.get('no_status'):
3818 if opts.get('no_status'):
3796 format = "%%s%s" % end
3819 format = "%%s%s" % end
3797
3820
3798 for f in files:
3821 for f in files:
3799 ui.write(format % repo.pathto(f, cwd),
3822 ui.write(format % repo.pathto(f, cwd),
3800 label='status.' + state)
3823 label='status.' + state)
3801 if f in copy:
3824 if f in copy:
3802 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
3825 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
3803 label='status.copied')
3826 label='status.copied')
3804
3827
3805 def summary(ui, repo, **opts):
3828 def summary(ui, repo, **opts):
3806 """summarize working directory state
3829 """summarize working directory state
3807
3830
3808 This generates a brief summary of the working directory state,
3831 This generates a brief summary of the working directory state,
3809 including parents, branch, commit status, and available updates.
3832 including parents, branch, commit status, and available updates.
3810
3833
3811 With the --remote option, this will check the default paths for
3834 With the --remote option, this will check the default paths for
3812 incoming and outgoing changes. This can be time-consuming.
3835 incoming and outgoing changes. This can be time-consuming.
3813
3836
3814 Returns 0 on success.
3837 Returns 0 on success.
3815 """
3838 """
3816
3839
3817 ctx = repo[None]
3840 ctx = repo[None]
3818 parents = ctx.parents()
3841 parents = ctx.parents()
3819 pnode = parents[0].node()
3842 pnode = parents[0].node()
3820
3843
3821 for p in parents:
3844 for p in parents:
3822 # label with log.changeset (instead of log.parent) since this
3845 # label with log.changeset (instead of log.parent) since this
3823 # shows a working directory parent *changeset*:
3846 # shows a working directory parent *changeset*:
3824 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
3847 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
3825 label='log.changeset')
3848 label='log.changeset')
3826 ui.write(' '.join(p.tags()), label='log.tag')
3849 ui.write(' '.join(p.tags()), label='log.tag')
3827 if p.bookmarks():
3850 if p.bookmarks():
3828 ui.write(' ' + ' '.join(p.bookmarks()), label='log.bookmark')
3851 ui.write(' ' + ' '.join(p.bookmarks()), label='log.bookmark')
3829 if p.rev() == -1:
3852 if p.rev() == -1:
3830 if not len(repo):
3853 if not len(repo):
3831 ui.write(_(' (empty repository)'))
3854 ui.write(_(' (empty repository)'))
3832 else:
3855 else:
3833 ui.write(_(' (no revision checked out)'))
3856 ui.write(_(' (no revision checked out)'))
3834 ui.write('\n')
3857 ui.write('\n')
3835 if p.description():
3858 if p.description():
3836 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
3859 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
3837 label='log.summary')
3860 label='log.summary')
3838
3861
3839 branch = ctx.branch()
3862 branch = ctx.branch()
3840 bheads = repo.branchheads(branch)
3863 bheads = repo.branchheads(branch)
3841 m = _('branch: %s\n') % branch
3864 m = _('branch: %s\n') % branch
3842 if branch != 'default':
3865 if branch != 'default':
3843 ui.write(m, label='log.branch')
3866 ui.write(m, label='log.branch')
3844 else:
3867 else:
3845 ui.status(m, label='log.branch')
3868 ui.status(m, label='log.branch')
3846
3869
3847 st = list(repo.status(unknown=True))[:6]
3870 st = list(repo.status(unknown=True))[:6]
3848
3871
3849 c = repo.dirstate.copies()
3872 c = repo.dirstate.copies()
3850 copied, renamed = [], []
3873 copied, renamed = [], []
3851 for d, s in c.iteritems():
3874 for d, s in c.iteritems():
3852 if s in st[2]:
3875 if s in st[2]:
3853 st[2].remove(s)
3876 st[2].remove(s)
3854 renamed.append(d)
3877 renamed.append(d)
3855 else:
3878 else:
3856 copied.append(d)
3879 copied.append(d)
3857 if d in st[1]:
3880 if d in st[1]:
3858 st[1].remove(d)
3881 st[1].remove(d)
3859 st.insert(3, renamed)
3882 st.insert(3, renamed)
3860 st.insert(4, copied)
3883 st.insert(4, copied)
3861
3884
3862 ms = mergemod.mergestate(repo)
3885 ms = mergemod.mergestate(repo)
3863 st.append([f for f in ms if ms[f] == 'u'])
3886 st.append([f for f in ms if ms[f] == 'u'])
3864
3887
3865 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
3888 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
3866 st.append(subs)
3889 st.append(subs)
3867
3890
3868 labels = [ui.label(_('%d modified'), 'status.modified'),
3891 labels = [ui.label(_('%d modified'), 'status.modified'),
3869 ui.label(_('%d added'), 'status.added'),
3892 ui.label(_('%d added'), 'status.added'),
3870 ui.label(_('%d removed'), 'status.removed'),
3893 ui.label(_('%d removed'), 'status.removed'),
3871 ui.label(_('%d renamed'), 'status.copied'),
3894 ui.label(_('%d renamed'), 'status.copied'),
3872 ui.label(_('%d copied'), 'status.copied'),
3895 ui.label(_('%d copied'), 'status.copied'),
3873 ui.label(_('%d deleted'), 'status.deleted'),
3896 ui.label(_('%d deleted'), 'status.deleted'),
3874 ui.label(_('%d unknown'), 'status.unknown'),
3897 ui.label(_('%d unknown'), 'status.unknown'),
3875 ui.label(_('%d ignored'), 'status.ignored'),
3898 ui.label(_('%d ignored'), 'status.ignored'),
3876 ui.label(_('%d unresolved'), 'resolve.unresolved'),
3899 ui.label(_('%d unresolved'), 'resolve.unresolved'),
3877 ui.label(_('%d subrepos'), 'status.modified')]
3900 ui.label(_('%d subrepos'), 'status.modified')]
3878 t = []
3901 t = []
3879 for s, l in zip(st, labels):
3902 for s, l in zip(st, labels):
3880 if s:
3903 if s:
3881 t.append(l % len(s))
3904 t.append(l % len(s))
3882
3905
3883 t = ', '.join(t)
3906 t = ', '.join(t)
3884 cleanworkdir = False
3907 cleanworkdir = False
3885
3908
3886 if len(parents) > 1:
3909 if len(parents) > 1:
3887 t += _(' (merge)')
3910 t += _(' (merge)')
3888 elif branch != parents[0].branch():
3911 elif branch != parents[0].branch():
3889 t += _(' (new branch)')
3912 t += _(' (new branch)')
3890 elif (parents[0].extra().get('close') and
3913 elif (parents[0].extra().get('close') and
3891 pnode in repo.branchheads(branch, closed=True)):
3914 pnode in repo.branchheads(branch, closed=True)):
3892 t += _(' (head closed)')
3915 t += _(' (head closed)')
3893 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
3916 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
3894 t += _(' (clean)')
3917 t += _(' (clean)')
3895 cleanworkdir = True
3918 cleanworkdir = True
3896 elif pnode not in bheads:
3919 elif pnode not in bheads:
3897 t += _(' (new branch head)')
3920 t += _(' (new branch head)')
3898
3921
3899 if cleanworkdir:
3922 if cleanworkdir:
3900 ui.status(_('commit: %s\n') % t.strip())
3923 ui.status(_('commit: %s\n') % t.strip())
3901 else:
3924 else:
3902 ui.write(_('commit: %s\n') % t.strip())
3925 ui.write(_('commit: %s\n') % t.strip())
3903
3926
3904 # all ancestors of branch heads - all ancestors of parent = new csets
3927 # all ancestors of branch heads - all ancestors of parent = new csets
3905 new = [0] * len(repo)
3928 new = [0] * len(repo)
3906 cl = repo.changelog
3929 cl = repo.changelog
3907 for a in [cl.rev(n) for n in bheads]:
3930 for a in [cl.rev(n) for n in bheads]:
3908 new[a] = 1
3931 new[a] = 1
3909 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3932 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3910 new[a] = 1
3933 new[a] = 1
3911 for a in [p.rev() for p in parents]:
3934 for a in [p.rev() for p in parents]:
3912 if a >= 0:
3935 if a >= 0:
3913 new[a] = 0
3936 new[a] = 0
3914 for a in cl.ancestors(*[p.rev() for p in parents]):
3937 for a in cl.ancestors(*[p.rev() for p in parents]):
3915 new[a] = 0
3938 new[a] = 0
3916 new = sum(new)
3939 new = sum(new)
3917
3940
3918 if new == 0:
3941 if new == 0:
3919 ui.status(_('update: (current)\n'))
3942 ui.status(_('update: (current)\n'))
3920 elif pnode not in bheads:
3943 elif pnode not in bheads:
3921 ui.write(_('update: %d new changesets (update)\n') % new)
3944 ui.write(_('update: %d new changesets (update)\n') % new)
3922 else:
3945 else:
3923 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3946 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3924 (new, len(bheads)))
3947 (new, len(bheads)))
3925
3948
3926 if opts.get('remote'):
3949 if opts.get('remote'):
3927 t = []
3950 t = []
3928 source, branches = hg.parseurl(ui.expandpath('default'))
3951 source, branches = hg.parseurl(ui.expandpath('default'))
3929 other = hg.repository(hg.remoteui(repo, {}), source)
3952 other = hg.repository(hg.remoteui(repo, {}), source)
3930 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3953 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3931 ui.debug('comparing with %s\n' % url.hidepassword(source))
3954 ui.debug('comparing with %s\n' % url.hidepassword(source))
3932 repo.ui.pushbuffer()
3955 repo.ui.pushbuffer()
3933 common, incoming, rheads = discovery.findcommonincoming(repo, other)
3956 common, incoming, rheads = discovery.findcommonincoming(repo, other)
3934 repo.ui.popbuffer()
3957 repo.ui.popbuffer()
3935 if incoming:
3958 if incoming:
3936 t.append(_('1 or more incoming'))
3959 t.append(_('1 or more incoming'))
3937
3960
3938 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3961 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3939 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3962 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3940 other = hg.repository(hg.remoteui(repo, {}), dest)
3963 other = hg.repository(hg.remoteui(repo, {}), dest)
3941 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3964 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3942 repo.ui.pushbuffer()
3965 repo.ui.pushbuffer()
3943 o = discovery.findoutgoing(repo, other)
3966 o = discovery.findoutgoing(repo, other)
3944 repo.ui.popbuffer()
3967 repo.ui.popbuffer()
3945 o = repo.changelog.nodesbetween(o, None)[0]
3968 o = repo.changelog.nodesbetween(o, None)[0]
3946 if o:
3969 if o:
3947 t.append(_('%d outgoing') % len(o))
3970 t.append(_('%d outgoing') % len(o))
3948 if 'bookmarks' in other.listkeys('namespaces'):
3971 if 'bookmarks' in other.listkeys('namespaces'):
3949 lmarks = repo.listkeys('bookmarks')
3972 lmarks = repo.listkeys('bookmarks')
3950 rmarks = other.listkeys('bookmarks')
3973 rmarks = other.listkeys('bookmarks')
3951 diff = set(rmarks) - set(lmarks)
3974 diff = set(rmarks) - set(lmarks)
3952 if len(diff) > 0:
3975 if len(diff) > 0:
3953 t.append(_('%d incoming bookmarks') % len(diff))
3976 t.append(_('%d incoming bookmarks') % len(diff))
3954 diff = set(lmarks) - set(rmarks)
3977 diff = set(lmarks) - set(rmarks)
3955 if len(diff) > 0:
3978 if len(diff) > 0:
3956 t.append(_('%d outgoing bookmarks') % len(diff))
3979 t.append(_('%d outgoing bookmarks') % len(diff))
3957
3980
3958 if t:
3981 if t:
3959 ui.write(_('remote: %s\n') % (', '.join(t)))
3982 ui.write(_('remote: %s\n') % (', '.join(t)))
3960 else:
3983 else:
3961 ui.status(_('remote: (synced)\n'))
3984 ui.status(_('remote: (synced)\n'))
3962
3985
3963 def tag(ui, repo, name1, *names, **opts):
3986 def tag(ui, repo, name1, *names, **opts):
3964 """add one or more tags for the current or given revision
3987 """add one or more tags for the current or given revision
3965
3988
3966 Name a particular revision using <name>.
3989 Name a particular revision using <name>.
3967
3990
3968 Tags are used to name particular revisions of the repository and are
3991 Tags are used to name particular revisions of the repository and are
3969 very useful to compare different revisions, to go back to significant
3992 very useful to compare different revisions, to go back to significant
3970 earlier versions or to mark branch points as releases, etc. Changing
3993 earlier versions or to mark branch points as releases, etc. Changing
3971 an existing tag is normally disallowed; use -f/--force to override.
3994 an existing tag is normally disallowed; use -f/--force to override.
3972
3995
3973 If no revision is given, the parent of the working directory is
3996 If no revision is given, the parent of the working directory is
3974 used, or tip if no revision is checked out.
3997 used, or tip if no revision is checked out.
3975
3998
3976 To facilitate version control, distribution, and merging of tags,
3999 To facilitate version control, distribution, and merging of tags,
3977 they are stored as a file named ".hgtags" which is managed similarly
4000 they are stored as a file named ".hgtags" which is managed similarly
3978 to other project files and can be hand-edited if necessary. This
4001 to other project files and can be hand-edited if necessary. This
3979 also means that tagging creates a new commit. The file
4002 also means that tagging creates a new commit. The file
3980 ".hg/localtags" is used for local tags (not shared among
4003 ".hg/localtags" is used for local tags (not shared among
3981 repositories).
4004 repositories).
3982
4005
3983 Tag commits are usually made at the head of a branch. If the parent
4006 Tag commits are usually made at the head of a branch. If the parent
3984 of the working directory is not a branch head, :hg:`tag` aborts; use
4007 of the working directory is not a branch head, :hg:`tag` aborts; use
3985 -f/--force to force the tag commit to be based on a non-head
4008 -f/--force to force the tag commit to be based on a non-head
3986 changeset.
4009 changeset.
3987
4010
3988 See :hg:`help dates` for a list of formats valid for -d/--date.
4011 See :hg:`help dates` for a list of formats valid for -d/--date.
3989
4012
3990 Since tag names have priority over branch names during revision
4013 Since tag names have priority over branch names during revision
3991 lookup, using an existing branch name as a tag name is discouraged.
4014 lookup, using an existing branch name as a tag name is discouraged.
3992
4015
3993 Returns 0 on success.
4016 Returns 0 on success.
3994 """
4017 """
3995
4018
3996 rev_ = "."
4019 rev_ = "."
3997 names = [t.strip() for t in (name1,) + names]
4020 names = [t.strip() for t in (name1,) + names]
3998 if len(names) != len(set(names)):
4021 if len(names) != len(set(names)):
3999 raise util.Abort(_('tag names must be unique'))
4022 raise util.Abort(_('tag names must be unique'))
4000 for n in names:
4023 for n in names:
4001 if n in ['tip', '.', 'null']:
4024 if n in ['tip', '.', 'null']:
4002 raise util.Abort(_('the name \'%s\' is reserved') % n)
4025 raise util.Abort(_('the name \'%s\' is reserved') % n)
4003 if not n:
4026 if not n:
4004 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
4027 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
4005 if opts.get('rev') and opts.get('remove'):
4028 if opts.get('rev') and opts.get('remove'):
4006 raise util.Abort(_("--rev and --remove are incompatible"))
4029 raise util.Abort(_("--rev and --remove are incompatible"))
4007 if opts.get('rev'):
4030 if opts.get('rev'):
4008 rev_ = opts['rev']
4031 rev_ = opts['rev']
4009 message = opts.get('message')
4032 message = opts.get('message')
4010 if opts.get('remove'):
4033 if opts.get('remove'):
4011 expectedtype = opts.get('local') and 'local' or 'global'
4034 expectedtype = opts.get('local') and 'local' or 'global'
4012 for n in names:
4035 for n in names:
4013 if not repo.tagtype(n):
4036 if not repo.tagtype(n):
4014 raise util.Abort(_('tag \'%s\' does not exist') % n)
4037 raise util.Abort(_('tag \'%s\' does not exist') % n)
4015 if repo.tagtype(n) != expectedtype:
4038 if repo.tagtype(n) != expectedtype:
4016 if expectedtype == 'global':
4039 if expectedtype == 'global':
4017 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
4040 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
4018 else:
4041 else:
4019 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
4042 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
4020 rev_ = nullid
4043 rev_ = nullid
4021 if not message:
4044 if not message:
4022 # we don't translate commit messages
4045 # we don't translate commit messages
4023 message = 'Removed tag %s' % ', '.join(names)
4046 message = 'Removed tag %s' % ', '.join(names)
4024 elif not opts.get('force'):
4047 elif not opts.get('force'):
4025 for n in names:
4048 for n in names:
4026 if n in repo.tags():
4049 if n in repo.tags():
4027 raise util.Abort(_('tag \'%s\' already exists '
4050 raise util.Abort(_('tag \'%s\' already exists '
4028 '(use -f to force)') % n)
4051 '(use -f to force)') % n)
4029 if not opts.get('local'):
4052 if not opts.get('local'):
4030 p1, p2 = repo.dirstate.parents()
4053 p1, p2 = repo.dirstate.parents()
4031 if p2 != nullid:
4054 if p2 != nullid:
4032 raise util.Abort(_('uncommitted merge'))
4055 raise util.Abort(_('uncommitted merge'))
4033 bheads = repo.branchheads()
4056 bheads = repo.branchheads()
4034 if not opts.get('force') and bheads and p1 not in bheads:
4057 if not opts.get('force') and bheads and p1 not in bheads:
4035 raise util.Abort(_('not at a branch head (use -f to force)'))
4058 raise util.Abort(_('not at a branch head (use -f to force)'))
4036 r = cmdutil.revsingle(repo, rev_).node()
4059 r = cmdutil.revsingle(repo, rev_).node()
4037
4060
4038 if not message:
4061 if not message:
4039 # we don't translate commit messages
4062 # we don't translate commit messages
4040 message = ('Added tag %s for changeset %s' %
4063 message = ('Added tag %s for changeset %s' %
4041 (', '.join(names), short(r)))
4064 (', '.join(names), short(r)))
4042
4065
4043 date = opts.get('date')
4066 date = opts.get('date')
4044 if date:
4067 if date:
4045 date = util.parsedate(date)
4068 date = util.parsedate(date)
4046
4069
4047 if opts.get('edit'):
4070 if opts.get('edit'):
4048 message = ui.edit(message, ui.username())
4071 message = ui.edit(message, ui.username())
4049
4072
4050 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
4073 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
4051
4074
4052 def tags(ui, repo):
4075 def tags(ui, repo):
4053 """list repository tags
4076 """list repository tags
4054
4077
4055 This lists both regular and local tags. When the -v/--verbose
4078 This lists both regular and local tags. When the -v/--verbose
4056 switch is used, a third column "local" is printed for local tags.
4079 switch is used, a third column "local" is printed for local tags.
4057
4080
4058 Returns 0 on success.
4081 Returns 0 on success.
4059 """
4082 """
4060
4083
4061 hexfunc = ui.debugflag and hex or short
4084 hexfunc = ui.debugflag and hex or short
4062 tagtype = ""
4085 tagtype = ""
4063
4086
4064 for t, n in reversed(repo.tagslist()):
4087 for t, n in reversed(repo.tagslist()):
4065 if ui.quiet:
4088 if ui.quiet:
4066 ui.write("%s\n" % t)
4089 ui.write("%s\n" % t)
4067 continue
4090 continue
4068
4091
4069 try:
4092 try:
4070 hn = hexfunc(n)
4093 hn = hexfunc(n)
4071 r = "%5d:%s" % (repo.changelog.rev(n), hn)
4094 r = "%5d:%s" % (repo.changelog.rev(n), hn)
4072 except error.LookupError:
4095 except error.LookupError:
4073 r = " ?:%s" % hn
4096 r = " ?:%s" % hn
4074 else:
4097 else:
4075 spaces = " " * (30 - encoding.colwidth(t))
4098 spaces = " " * (30 - encoding.colwidth(t))
4076 if ui.verbose:
4099 if ui.verbose:
4077 if repo.tagtype(t) == 'local':
4100 if repo.tagtype(t) == 'local':
4078 tagtype = " local"
4101 tagtype = " local"
4079 else:
4102 else:
4080 tagtype = ""
4103 tagtype = ""
4081 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
4104 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
4082
4105
4083 def tip(ui, repo, **opts):
4106 def tip(ui, repo, **opts):
4084 """show the tip revision
4107 """show the tip revision
4085
4108
4086 The tip revision (usually just called the tip) is the changeset
4109 The tip revision (usually just called the tip) is the changeset
4087 most recently added to the repository (and therefore the most
4110 most recently added to the repository (and therefore the most
4088 recently changed head).
4111 recently changed head).
4089
4112
4090 If you have just made a commit, that commit will be the tip. If
4113 If you have just made a commit, that commit will be the tip. If
4091 you have just pulled changes from another repository, the tip of
4114 you have just pulled changes from another repository, the tip of
4092 that repository becomes the current tip. The "tip" tag is special
4115 that repository becomes the current tip. The "tip" tag is special
4093 and cannot be renamed or assigned to a different changeset.
4116 and cannot be renamed or assigned to a different changeset.
4094
4117
4095 Returns 0 on success.
4118 Returns 0 on success.
4096 """
4119 """
4097 displayer = cmdutil.show_changeset(ui, repo, opts)
4120 displayer = cmdutil.show_changeset(ui, repo, opts)
4098 displayer.show(repo[len(repo) - 1])
4121 displayer.show(repo[len(repo) - 1])
4099 displayer.close()
4122 displayer.close()
4100
4123
4101 def unbundle(ui, repo, fname1, *fnames, **opts):
4124 def unbundle(ui, repo, fname1, *fnames, **opts):
4102 """apply one or more changegroup files
4125 """apply one or more changegroup files
4103
4126
4104 Apply one or more compressed changegroup files generated by the
4127 Apply one or more compressed changegroup files generated by the
4105 bundle command.
4128 bundle command.
4106
4129
4107 Returns 0 on success, 1 if an update has unresolved files.
4130 Returns 0 on success, 1 if an update has unresolved files.
4108 """
4131 """
4109 fnames = (fname1,) + fnames
4132 fnames = (fname1,) + fnames
4110
4133
4111 lock = repo.lock()
4134 lock = repo.lock()
4112 wc = repo['.']
4135 wc = repo['.']
4113 try:
4136 try:
4114 for fname in fnames:
4137 for fname in fnames:
4115 f = url.open(ui, fname)
4138 f = url.open(ui, fname)
4116 gen = changegroup.readbundle(f, fname)
4139 gen = changegroup.readbundle(f, fname)
4117 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
4140 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
4118 lock=lock)
4141 lock=lock)
4119 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
4142 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
4120 finally:
4143 finally:
4121 lock.release()
4144 lock.release()
4122 return postincoming(ui, repo, modheads, opts.get('update'), None)
4145 return postincoming(ui, repo, modheads, opts.get('update'), None)
4123
4146
4124 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
4147 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
4125 """update working directory (or switch revisions)
4148 """update working directory (or switch revisions)
4126
4149
4127 Update the repository's working directory to the specified
4150 Update the repository's working directory to the specified
4128 changeset. If no changeset is specified, update to the tip of the
4151 changeset. If no changeset is specified, update to the tip of the
4129 current named branch.
4152 current named branch.
4130
4153
4131 If the changeset is not a descendant of the working directory's
4154 If the changeset is not a descendant of the working directory's
4132 parent, the update is aborted. With the -c/--check option, the
4155 parent, the update is aborted. With the -c/--check option, the
4133 working directory is checked for uncommitted changes; if none are
4156 working directory is checked for uncommitted changes; if none are
4134 found, the working directory is updated to the specified
4157 found, the working directory is updated to the specified
4135 changeset.
4158 changeset.
4136
4159
4137 The following rules apply when the working directory contains
4160 The following rules apply when the working directory contains
4138 uncommitted changes:
4161 uncommitted changes:
4139
4162
4140 1. If neither -c/--check nor -C/--clean is specified, and if
4163 1. If neither -c/--check nor -C/--clean is specified, and if
4141 the requested changeset is an ancestor or descendant of
4164 the requested changeset is an ancestor or descendant of
4142 the working directory's parent, the uncommitted changes
4165 the working directory's parent, the uncommitted changes
4143 are merged into the requested changeset and the merged
4166 are merged into the requested changeset and the merged
4144 result is left uncommitted. If the requested changeset is
4167 result is left uncommitted. If the requested changeset is
4145 not an ancestor or descendant (that is, it is on another
4168 not an ancestor or descendant (that is, it is on another
4146 branch), the update is aborted and the uncommitted changes
4169 branch), the update is aborted and the uncommitted changes
4147 are preserved.
4170 are preserved.
4148
4171
4149 2. With the -c/--check option, the update is aborted and the
4172 2. With the -c/--check option, the update is aborted and the
4150 uncommitted changes are preserved.
4173 uncommitted changes are preserved.
4151
4174
4152 3. With the -C/--clean option, uncommitted changes are discarded and
4175 3. With the -C/--clean option, uncommitted changes are discarded and
4153 the working directory is updated to the requested changeset.
4176 the working directory is updated to the requested changeset.
4154
4177
4155 Use null as the changeset to remove the working directory (like
4178 Use null as the changeset to remove the working directory (like
4156 :hg:`clone -U`).
4179 :hg:`clone -U`).
4157
4180
4158 If you want to update just one file to an older changeset, use
4181 If you want to update just one file to an older changeset, use
4159 :hg:`revert`.
4182 :hg:`revert`.
4160
4183
4161 See :hg:`help dates` for a list of formats valid for -d/--date.
4184 See :hg:`help dates` for a list of formats valid for -d/--date.
4162
4185
4163 Returns 0 on success, 1 if there are unresolved files.
4186 Returns 0 on success, 1 if there are unresolved files.
4164 """
4187 """
4165 if rev and node:
4188 if rev and node:
4166 raise util.Abort(_("please specify just one revision"))
4189 raise util.Abort(_("please specify just one revision"))
4167
4190
4168 if rev is None or rev == '':
4191 if rev is None or rev == '':
4169 rev = node
4192 rev = node
4170
4193
4171 # if we defined a bookmark, we have to remember the original bookmark name
4194 # if we defined a bookmark, we have to remember the original bookmark name
4172 brev = rev
4195 brev = rev
4173 rev = cmdutil.revsingle(repo, rev, rev).rev()
4196 rev = cmdutil.revsingle(repo, rev, rev).rev()
4174
4197
4175 if check and clean:
4198 if check and clean:
4176 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
4199 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
4177
4200
4178 if check:
4201 if check:
4179 # we could use dirty() but we can ignore merge and branch trivia
4202 # we could use dirty() but we can ignore merge and branch trivia
4180 c = repo[None]
4203 c = repo[None]
4181 if c.modified() or c.added() or c.removed():
4204 if c.modified() or c.added() or c.removed():
4182 raise util.Abort(_("uncommitted local changes"))
4205 raise util.Abort(_("uncommitted local changes"))
4183
4206
4184 if date:
4207 if date:
4185 if rev:
4208 if rev:
4186 raise util.Abort(_("you can't specify a revision and a date"))
4209 raise util.Abort(_("you can't specify a revision and a date"))
4187 rev = cmdutil.finddate(ui, repo, date)
4210 rev = cmdutil.finddate(ui, repo, date)
4188
4211
4189 if clean or check:
4212 if clean or check:
4190 ret = hg.clean(repo, rev)
4213 ret = hg.clean(repo, rev)
4191 else:
4214 else:
4192 ret = hg.update(repo, rev)
4215 ret = hg.update(repo, rev)
4193
4216
4194 if brev in repo._bookmarks:
4217 if brev in repo._bookmarks:
4195 bookmarks.setcurrent(repo, brev)
4218 bookmarks.setcurrent(repo, brev)
4196
4219
4197 return ret
4220 return ret
4198
4221
4199 def verify(ui, repo):
4222 def verify(ui, repo):
4200 """verify the integrity of the repository
4223 """verify the integrity of the repository
4201
4224
4202 Verify the integrity of the current repository.
4225 Verify the integrity of the current repository.
4203
4226
4204 This will perform an extensive check of the repository's
4227 This will perform an extensive check of the repository's
4205 integrity, validating the hashes and checksums of each entry in
4228 integrity, validating the hashes and checksums of each entry in
4206 the changelog, manifest, and tracked files, as well as the
4229 the changelog, manifest, and tracked files, as well as the
4207 integrity of their crosslinks and indices.
4230 integrity of their crosslinks and indices.
4208
4231
4209 Returns 0 on success, 1 if errors are encountered.
4232 Returns 0 on success, 1 if errors are encountered.
4210 """
4233 """
4211 return hg.verify(repo)
4234 return hg.verify(repo)
4212
4235
4213 def version_(ui):
4236 def version_(ui):
4214 """output version and copyright information"""
4237 """output version and copyright information"""
4215 ui.write(_("Mercurial Distributed SCM (version %s)\n")
4238 ui.write(_("Mercurial Distributed SCM (version %s)\n")
4216 % util.version())
4239 % util.version())
4217 ui.status(_(
4240 ui.status(_(
4218 "(see http://mercurial.selenic.com for more information)\n"
4241 "(see http://mercurial.selenic.com for more information)\n"
4219 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
4242 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
4220 "This is free software; see the source for copying conditions. "
4243 "This is free software; see the source for copying conditions. "
4221 "There is NO\nwarranty; "
4244 "There is NO\nwarranty; "
4222 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
4245 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
4223 ))
4246 ))
4224
4247
4225 # Command options and aliases are listed here, alphabetically
4248 # Command options and aliases are listed here, alphabetically
4226
4249
4227 globalopts = [
4250 globalopts = [
4228 ('R', 'repository', '',
4251 ('R', 'repository', '',
4229 _('repository root directory or name of overlay bundle file'),
4252 _('repository root directory or name of overlay bundle file'),
4230 _('REPO')),
4253 _('REPO')),
4231 ('', 'cwd', '',
4254 ('', 'cwd', '',
4232 _('change working directory'), _('DIR')),
4255 _('change working directory'), _('DIR')),
4233 ('y', 'noninteractive', None,
4256 ('y', 'noninteractive', None,
4234 _('do not prompt, assume \'yes\' for any required answers')),
4257 _('do not prompt, assume \'yes\' for any required answers')),
4235 ('q', 'quiet', None, _('suppress output')),
4258 ('q', 'quiet', None, _('suppress output')),
4236 ('v', 'verbose', None, _('enable additional output')),
4259 ('v', 'verbose', None, _('enable additional output')),
4237 ('', 'config', [],
4260 ('', 'config', [],
4238 _('set/override config option (use \'section.name=value\')'),
4261 _('set/override config option (use \'section.name=value\')'),
4239 _('CONFIG')),
4262 _('CONFIG')),
4240 ('', 'debug', None, _('enable debugging output')),
4263 ('', 'debug', None, _('enable debugging output')),
4241 ('', 'debugger', None, _('start debugger')),
4264 ('', 'debugger', None, _('start debugger')),
4242 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
4265 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
4243 _('ENCODE')),
4266 _('ENCODE')),
4244 ('', 'encodingmode', encoding.encodingmode,
4267 ('', 'encodingmode', encoding.encodingmode,
4245 _('set the charset encoding mode'), _('MODE')),
4268 _('set the charset encoding mode'), _('MODE')),
4246 ('', 'traceback', None, _('always print a traceback on exception')),
4269 ('', 'traceback', None, _('always print a traceback on exception')),
4247 ('', 'time', None, _('time how long the command takes')),
4270 ('', 'time', None, _('time how long the command takes')),
4248 ('', 'profile', None, _('print command execution profile')),
4271 ('', 'profile', None, _('print command execution profile')),
4249 ('', 'version', None, _('output version information and exit')),
4272 ('', 'version', None, _('output version information and exit')),
4250 ('h', 'help', None, _('display help and exit')),
4273 ('h', 'help', None, _('display help and exit')),
4251 ]
4274 ]
4252
4275
4253 dryrunopts = [('n', 'dry-run', None,
4276 dryrunopts = [('n', 'dry-run', None,
4254 _('do not perform actions, just print output'))]
4277 _('do not perform actions, just print output'))]
4255
4278
4256 remoteopts = [
4279 remoteopts = [
4257 ('e', 'ssh', '',
4280 ('e', 'ssh', '',
4258 _('specify ssh command to use'), _('CMD')),
4281 _('specify ssh command to use'), _('CMD')),
4259 ('', 'remotecmd', '',
4282 ('', 'remotecmd', '',
4260 _('specify hg command to run on the remote side'), _('CMD')),
4283 _('specify hg command to run on the remote side'), _('CMD')),
4261 ('', 'insecure', None,
4284 ('', 'insecure', None,
4262 _('do not verify server certificate (ignoring web.cacerts config)')),
4285 _('do not verify server certificate (ignoring web.cacerts config)')),
4263 ]
4286 ]
4264
4287
4265 walkopts = [
4288 walkopts = [
4266 ('I', 'include', [],
4289 ('I', 'include', [],
4267 _('include names matching the given patterns'), _('PATTERN')),
4290 _('include names matching the given patterns'), _('PATTERN')),
4268 ('X', 'exclude', [],
4291 ('X', 'exclude', [],
4269 _('exclude names matching the given patterns'), _('PATTERN')),
4292 _('exclude names matching the given patterns'), _('PATTERN')),
4270 ]
4293 ]
4271
4294
4272 commitopts = [
4295 commitopts = [
4273 ('m', 'message', '',
4296 ('m', 'message', '',
4274 _('use text as commit message'), _('TEXT')),
4297 _('use text as commit message'), _('TEXT')),
4275 ('l', 'logfile', '',
4298 ('l', 'logfile', '',
4276 _('read commit message from file'), _('FILE')),
4299 _('read commit message from file'), _('FILE')),
4277 ]
4300 ]
4278
4301
4279 commitopts2 = [
4302 commitopts2 = [
4280 ('d', 'date', '',
4303 ('d', 'date', '',
4281 _('record datecode as commit date'), _('DATE')),
4304 _('record datecode as commit date'), _('DATE')),
4282 ('u', 'user', '',
4305 ('u', 'user', '',
4283 _('record the specified user as committer'), _('USER')),
4306 _('record the specified user as committer'), _('USER')),
4284 ]
4307 ]
4285
4308
4286 templateopts = [
4309 templateopts = [
4287 ('', 'style', '',
4310 ('', 'style', '',
4288 _('display using template map file'), _('STYLE')),
4311 _('display using template map file'), _('STYLE')),
4289 ('', 'template', '',
4312 ('', 'template', '',
4290 _('display with template'), _('TEMPLATE')),
4313 _('display with template'), _('TEMPLATE')),
4291 ]
4314 ]
4292
4315
4293 logopts = [
4316 logopts = [
4294 ('p', 'patch', None, _('show patch')),
4317 ('p', 'patch', None, _('show patch')),
4295 ('g', 'git', None, _('use git extended diff format')),
4318 ('g', 'git', None, _('use git extended diff format')),
4296 ('l', 'limit', '',
4319 ('l', 'limit', '',
4297 _('limit number of changes displayed'), _('NUM')),
4320 _('limit number of changes displayed'), _('NUM')),
4298 ('M', 'no-merges', None, _('do not show merges')),
4321 ('M', 'no-merges', None, _('do not show merges')),
4299 ('', 'stat', None, _('output diffstat-style summary of changes')),
4322 ('', 'stat', None, _('output diffstat-style summary of changes')),
4300 ] + templateopts
4323 ] + templateopts
4301
4324
4302 diffopts = [
4325 diffopts = [
4303 ('a', 'text', None, _('treat all files as text')),
4326 ('a', 'text', None, _('treat all files as text')),
4304 ('g', 'git', None, _('use git extended diff format')),
4327 ('g', 'git', None, _('use git extended diff format')),
4305 ('', 'nodates', None, _('omit dates from diff headers'))
4328 ('', 'nodates', None, _('omit dates from diff headers'))
4306 ]
4329 ]
4307
4330
4308 diffopts2 = [
4331 diffopts2 = [
4309 ('p', 'show-function', None, _('show which function each change is in')),
4332 ('p', 'show-function', None, _('show which function each change is in')),
4310 ('', 'reverse', None, _('produce a diff that undoes the changes')),
4333 ('', 'reverse', None, _('produce a diff that undoes the changes')),
4311 ('w', 'ignore-all-space', None,
4334 ('w', 'ignore-all-space', None,
4312 _('ignore white space when comparing lines')),
4335 _('ignore white space when comparing lines')),
4313 ('b', 'ignore-space-change', None,
4336 ('b', 'ignore-space-change', None,
4314 _('ignore changes in the amount of white space')),
4337 _('ignore changes in the amount of white space')),
4315 ('B', 'ignore-blank-lines', None,
4338 ('B', 'ignore-blank-lines', None,
4316 _('ignore changes whose lines are all blank')),
4339 _('ignore changes whose lines are all blank')),
4317 ('U', 'unified', '',
4340 ('U', 'unified', '',
4318 _('number of lines of context to show'), _('NUM')),
4341 _('number of lines of context to show'), _('NUM')),
4319 ('', 'stat', None, _('output diffstat-style summary of changes')),
4342 ('', 'stat', None, _('output diffstat-style summary of changes')),
4320 ]
4343 ]
4321
4344
4322 similarityopts = [
4345 similarityopts = [
4323 ('s', 'similarity', '',
4346 ('s', 'similarity', '',
4324 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
4347 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
4325 ]
4348 ]
4326
4349
4327 subrepoopts = [
4350 subrepoopts = [
4328 ('S', 'subrepos', None,
4351 ('S', 'subrepos', None,
4329 _('recurse into subrepositories'))
4352 _('recurse into subrepositories'))
4330 ]
4353 ]
4331
4354
4332 table = {
4355 table = {
4333 "^add": (add, walkopts + subrepoopts + dryrunopts,
4356 "^add": (add, walkopts + subrepoopts + dryrunopts,
4334 _('[OPTION]... [FILE]...')),
4357 _('[OPTION]... [FILE]...')),
4335 "addremove":
4358 "addremove":
4336 (addremove, similarityopts + walkopts + dryrunopts,
4359 (addremove, similarityopts + walkopts + dryrunopts,
4337 _('[OPTION]... [FILE]...')),
4360 _('[OPTION]... [FILE]...')),
4338 "^annotate|blame":
4361 "^annotate|blame":
4339 (annotate,
4362 (annotate,
4340 [('r', 'rev', '',
4363 [('r', 'rev', '',
4341 _('annotate the specified revision'), _('REV')),
4364 _('annotate the specified revision'), _('REV')),
4342 ('', 'follow', None,
4365 ('', 'follow', None,
4343 _('follow copies/renames and list the filename (DEPRECATED)')),
4366 _('follow copies/renames and list the filename (DEPRECATED)')),
4344 ('', 'no-follow', None, _("don't follow copies and renames")),
4367 ('', 'no-follow', None, _("don't follow copies and renames")),
4345 ('a', 'text', None, _('treat all files as text')),
4368 ('a', 'text', None, _('treat all files as text')),
4346 ('u', 'user', None, _('list the author (long with -v)')),
4369 ('u', 'user', None, _('list the author (long with -v)')),
4347 ('f', 'file', None, _('list the filename')),
4370 ('f', 'file', None, _('list the filename')),
4348 ('d', 'date', None, _('list the date (short with -q)')),
4371 ('d', 'date', None, _('list the date (short with -q)')),
4349 ('n', 'number', None, _('list the revision number (default)')),
4372 ('n', 'number', None, _('list the revision number (default)')),
4350 ('c', 'changeset', None, _('list the changeset')),
4373 ('c', 'changeset', None, _('list the changeset')),
4351 ('l', 'line-number', None,
4374 ('l', 'line-number', None,
4352 _('show line number at the first appearance'))
4375 _('show line number at the first appearance'))
4353 ] + walkopts,
4376 ] + walkopts,
4354 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
4377 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
4355 "archive":
4378 "archive":
4356 (archive,
4379 (archive,
4357 [('', 'no-decode', None, _('do not pass files through decoders')),
4380 [('', 'no-decode', None, _('do not pass files through decoders')),
4358 ('p', 'prefix', '',
4381 ('p', 'prefix', '',
4359 _('directory prefix for files in archive'), _('PREFIX')),
4382 _('directory prefix for files in archive'), _('PREFIX')),
4360 ('r', 'rev', '',
4383 ('r', 'rev', '',
4361 _('revision to distribute'), _('REV')),
4384 _('revision to distribute'), _('REV')),
4362 ('t', 'type', '',
4385 ('t', 'type', '',
4363 _('type of distribution to create'), _('TYPE')),
4386 _('type of distribution to create'), _('TYPE')),
4364 ] + subrepoopts + walkopts,
4387 ] + subrepoopts + walkopts,
4365 _('[OPTION]... DEST')),
4388 _('[OPTION]... DEST')),
4366 "backout":
4389 "backout":
4367 (backout,
4390 (backout,
4368 [('', 'merge', None,
4391 [('', 'merge', None,
4369 _('merge with old dirstate parent after backout')),
4392 _('merge with old dirstate parent after backout')),
4370 ('', 'parent', '',
4393 ('', 'parent', '',
4371 _('parent to choose when backing out merge'), _('REV')),
4394 _('parent to choose when backing out merge'), _('REV')),
4372 ('t', 'tool', '',
4395 ('t', 'tool', '',
4373 _('specify merge tool')),
4396 _('specify merge tool')),
4374 ('r', 'rev', '',
4397 ('r', 'rev', '',
4375 _('revision to backout'), _('REV')),
4398 _('revision to backout'), _('REV')),
4376 ] + walkopts + commitopts + commitopts2,
4399 ] + walkopts + commitopts + commitopts2,
4377 _('[OPTION]... [-r] REV')),
4400 _('[OPTION]... [-r] REV')),
4378 "bisect":
4401 "bisect":
4379 (bisect,
4402 (bisect,
4380 [('r', 'reset', False, _('reset bisect state')),
4403 [('r', 'reset', False, _('reset bisect state')),
4381 ('g', 'good', False, _('mark changeset good')),
4404 ('g', 'good', False, _('mark changeset good')),
4382 ('b', 'bad', False, _('mark changeset bad')),
4405 ('b', 'bad', False, _('mark changeset bad')),
4383 ('s', 'skip', False, _('skip testing changeset')),
4406 ('s', 'skip', False, _('skip testing changeset')),
4384 ('e', 'extend', False, _('extend the bisect range')),
4407 ('e', 'extend', False, _('extend the bisect range')),
4385 ('c', 'command', '',
4408 ('c', 'command', '',
4386 _('use command to check changeset state'), _('CMD')),
4409 _('use command to check changeset state'), _('CMD')),
4387 ('U', 'noupdate', False, _('do not update to target'))],
4410 ('U', 'noupdate', False, _('do not update to target'))],
4388 _("[-gbsr] [-U] [-c CMD] [REV]")),
4411 _("[-gbsr] [-U] [-c CMD] [REV]")),
4389 "bookmarks":
4412 "bookmarks":
4390 (bookmark,
4413 (bookmark,
4391 [('f', 'force', False, _('force')),
4414 [('f', 'force', False, _('force')),
4392 ('r', 'rev', '', _('revision'), _('REV')),
4415 ('r', 'rev', '', _('revision'), _('REV')),
4393 ('d', 'delete', False, _('delete a given bookmark')),
4416 ('d', 'delete', False, _('delete a given bookmark')),
4394 ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))],
4417 ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))],
4395 _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')),
4418 _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')),
4396 "branch":
4419 "branch":
4397 (branch,
4420 (branch,
4398 [('f', 'force', None,
4421 [('f', 'force', None,
4399 _('set branch name even if it shadows an existing branch')),
4422 _('set branch name even if it shadows an existing branch')),
4400 ('C', 'clean', None, _('reset branch name to parent branch name'))],
4423 ('C', 'clean', None, _('reset branch name to parent branch name'))],
4401 _('[-fC] [NAME]')),
4424 _('[-fC] [NAME]')),
4402 "branches":
4425 "branches":
4403 (branches,
4426 (branches,
4404 [('a', 'active', False,
4427 [('a', 'active', False,
4405 _('show only branches that have unmerged heads')),
4428 _('show only branches that have unmerged heads')),
4406 ('c', 'closed', False,
4429 ('c', 'closed', False,
4407 _('show normal and closed branches'))],
4430 _('show normal and closed branches'))],
4408 _('[-ac]')),
4431 _('[-ac]')),
4409 "bundle":
4432 "bundle":
4410 (bundle,
4433 (bundle,
4411 [('f', 'force', None,
4434 [('f', 'force', None,
4412 _('run even when the destination is unrelated')),
4435 _('run even when the destination is unrelated')),
4413 ('r', 'rev', [],
4436 ('r', 'rev', [],
4414 _('a changeset intended to be added to the destination'),
4437 _('a changeset intended to be added to the destination'),
4415 _('REV')),
4438 _('REV')),
4416 ('b', 'branch', [],
4439 ('b', 'branch', [],
4417 _('a specific branch you would like to bundle'),
4440 _('a specific branch you would like to bundle'),
4418 _('BRANCH')),
4441 _('BRANCH')),
4419 ('', 'base', [],
4442 ('', 'base', [],
4420 _('a base changeset assumed to be available at the destination'),
4443 _('a base changeset assumed to be available at the destination'),
4421 _('REV')),
4444 _('REV')),
4422 ('a', 'all', None, _('bundle all changesets in the repository')),
4445 ('a', 'all', None, _('bundle all changesets in the repository')),
4423 ('t', 'type', 'bzip2',
4446 ('t', 'type', 'bzip2',
4424 _('bundle compression type to use'), _('TYPE')),
4447 _('bundle compression type to use'), _('TYPE')),
4425 ] + remoteopts,
4448 ] + remoteopts,
4426 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
4449 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
4427 "cat":
4450 "cat":
4428 (cat,
4451 (cat,
4429 [('o', 'output', '',
4452 [('o', 'output', '',
4430 _('print output to file with formatted name'), _('FORMAT')),
4453 _('print output to file with formatted name'), _('FORMAT')),
4431 ('r', 'rev', '',
4454 ('r', 'rev', '',
4432 _('print the given revision'), _('REV')),
4455 _('print the given revision'), _('REV')),
4433 ('', 'decode', None, _('apply any matching decode filter')),
4456 ('', 'decode', None, _('apply any matching decode filter')),
4434 ] + walkopts,
4457 ] + walkopts,
4435 _('[OPTION]... FILE...')),
4458 _('[OPTION]... FILE...')),
4436 "^clone":
4459 "^clone":
4437 (clone,
4460 (clone,
4438 [('U', 'noupdate', None,
4461 [('U', 'noupdate', None,
4439 _('the clone will include an empty working copy (only a repository)')),
4462 _('the clone will include an empty working copy (only a repository)')),
4440 ('u', 'updaterev', '',
4463 ('u', 'updaterev', '',
4441 _('revision, tag or branch to check out'), _('REV')),
4464 _('revision, tag or branch to check out'), _('REV')),
4442 ('r', 'rev', [],
4465 ('r', 'rev', [],
4443 _('include the specified changeset'), _('REV')),
4466 _('include the specified changeset'), _('REV')),
4444 ('b', 'branch', [],
4467 ('b', 'branch', [],
4445 _('clone only the specified branch'), _('BRANCH')),
4468 _('clone only the specified branch'), _('BRANCH')),
4446 ('', 'pull', None, _('use pull protocol to copy metadata')),
4469 ('', 'pull', None, _('use pull protocol to copy metadata')),
4447 ('', 'uncompressed', None,
4470 ('', 'uncompressed', None,
4448 _('use uncompressed transfer (fast over LAN)')),
4471 _('use uncompressed transfer (fast over LAN)')),
4449 ] + remoteopts,
4472 ] + remoteopts,
4450 _('[OPTION]... SOURCE [DEST]')),
4473 _('[OPTION]... SOURCE [DEST]')),
4451 "^commit|ci":
4474 "^commit|ci":
4452 (commit,
4475 (commit,
4453 [('A', 'addremove', None,
4476 [('A', 'addremove', None,
4454 _('mark new/missing files as added/removed before committing')),
4477 _('mark new/missing files as added/removed before committing')),
4455 ('', 'close-branch', None,
4478 ('', 'close-branch', None,
4456 _('mark a branch as closed, hiding it from the branch list')),
4479 _('mark a branch as closed, hiding it from the branch list')),
4457 ] + walkopts + commitopts + commitopts2,
4480 ] + walkopts + commitopts + commitopts2,
4458 _('[OPTION]... [FILE]...')),
4481 _('[OPTION]... [FILE]...')),
4459 "copy|cp":
4482 "copy|cp":
4460 (copy,
4483 (copy,
4461 [('A', 'after', None, _('record a copy that has already occurred')),
4484 [('A', 'after', None, _('record a copy that has already occurred')),
4462 ('f', 'force', None,
4485 ('f', 'force', None,
4463 _('forcibly copy over an existing managed file')),
4486 _('forcibly copy over an existing managed file')),
4464 ] + walkopts + dryrunopts,
4487 ] + walkopts + dryrunopts,
4465 _('[OPTION]... [SOURCE]... DEST')),
4488 _('[OPTION]... [SOURCE]... DEST')),
4466 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
4489 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
4467 "debugbuilddag":
4490 "debugbuilddag":
4468 (debugbuilddag,
4491 (debugbuilddag,
4469 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
4492 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
4470 ('a', 'appended-file', None, _('add single file all revs append to')),
4493 ('a', 'appended-file', None, _('add single file all revs append to')),
4471 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
4494 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
4472 ('n', 'new-file', None, _('add new file at each rev')),
4495 ('n', 'new-file', None, _('add new file at each rev')),
4473 ],
4496 ],
4474 _('[OPTION]... TEXT')),
4497 _('[OPTION]... TEXT')),
4475 "debugbundle":
4498 "debugbundle":
4476 (debugbundle,
4499 (debugbundle,
4477 [('a', 'all', None, _('show all details')),
4500 [('a', 'all', None, _('show all details')),
4478 ],
4501 ],
4479 _('FILE')),
4502 _('FILE')),
4480 "debugcheckstate": (debugcheckstate, [], ''),
4503 "debugcheckstate": (debugcheckstate, [], ''),
4481 "debugcommands": (debugcommands, [], _('[COMMAND]')),
4504 "debugcommands": (debugcommands, [], _('[COMMAND]')),
4482 "debugcomplete":
4505 "debugcomplete":
4483 (debugcomplete,
4506 (debugcomplete,
4484 [('o', 'options', None, _('show the command options'))],
4507 [('o', 'options', None, _('show the command options'))],
4485 _('[-o] CMD')),
4508 _('[-o] CMD')),
4486 "debugdag":
4509 "debugdag":
4487 (debugdag,
4510 (debugdag,
4488 [('t', 'tags', None, _('use tags as labels')),
4511 [('t', 'tags', None, _('use tags as labels')),
4489 ('b', 'branches', None, _('annotate with branch names')),
4512 ('b', 'branches', None, _('annotate with branch names')),
4490 ('', 'dots', None, _('use dots for runs')),
4513 ('', 'dots', None, _('use dots for runs')),
4491 ('s', 'spaces', None, _('separate elements by spaces')),
4514 ('s', 'spaces', None, _('separate elements by spaces')),
4492 ],
4515 ],
4493 _('[OPTION]... [FILE [REV]...]')),
4516 _('[OPTION]... [FILE [REV]...]')),
4494 "debugdate":
4517 "debugdate":
4495 (debugdate,
4518 (debugdate,
4496 [('e', 'extended', None, _('try extended date formats'))],
4519 [('e', 'extended', None, _('try extended date formats'))],
4497 _('[-e] DATE [RANGE]')),
4520 _('[-e] DATE [RANGE]')),
4498 "debugdata": (debugdata, [], _('FILE REV')),
4521 "debugdata": (debugdata, [], _('FILE REV')),
4499 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
4522 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
4523 "debuggetbundle":
4524 (debuggetbundle,
4525 [('H', 'head', [], _('id of head node'), _('ID')),
4526 ('C', 'common', [], _('id of common node'), _('ID')),
4527 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
4528 ],
4529 _('REPO FILE [-H|-C ID]...')),
4500 "debugignore": (debugignore, [], ''),
4530 "debugignore": (debugignore, [], ''),
4501 "debugindex": (debugindex,
4531 "debugindex": (debugindex,
4502 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
4532 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
4503 _('FILE')),
4533 _('FILE')),
4504 "debugindexdot": (debugindexdot, [], _('FILE')),
4534 "debugindexdot": (debugindexdot, [], _('FILE')),
4505 "debuginstall": (debuginstall, [], ''),
4535 "debuginstall": (debuginstall, [], ''),
4506 "debugknown": (debugknown, [], _('REPO ID...')),
4536 "debugknown": (debugknown, [], _('REPO ID...')),
4507 "debugpushkey": (debugpushkey, [], _('REPO NAMESPACE [KEY OLD NEW]')),
4537 "debugpushkey": (debugpushkey, [], _('REPO NAMESPACE [KEY OLD NEW]')),
4508 "debugrebuildstate":
4538 "debugrebuildstate":
4509 (debugrebuildstate,
4539 (debugrebuildstate,
4510 [('r', 'rev', '',
4540 [('r', 'rev', '',
4511 _('revision to rebuild to'), _('REV'))],
4541 _('revision to rebuild to'), _('REV'))],
4512 _('[-r REV] [REV]')),
4542 _('[-r REV] [REV]')),
4513 "debugrename":
4543 "debugrename":
4514 (debugrename,
4544 (debugrename,
4515 [('r', 'rev', '',
4545 [('r', 'rev', '',
4516 _('revision to debug'), _('REV'))],
4546 _('revision to debug'), _('REV'))],
4517 _('[-r REV] FILE')),
4547 _('[-r REV] FILE')),
4518 "debugrevspec":
4548 "debugrevspec":
4519 (debugrevspec, [], ('REVSPEC')),
4549 (debugrevspec, [], ('REVSPEC')),
4520 "debugsetparents":
4550 "debugsetparents":
4521 (debugsetparents, [], _('REV1 [REV2]')),
4551 (debugsetparents, [], _('REV1 [REV2]')),
4522 "debugstate":
4552 "debugstate":
4523 (debugstate,
4553 (debugstate,
4524 [('', 'nodates', None, _('do not display the saved mtime'))],
4554 [('', 'nodates', None, _('do not display the saved mtime'))],
4525 _('[OPTION]...')),
4555 _('[OPTION]...')),
4526 "debugsub":
4556 "debugsub":
4527 (debugsub,
4557 (debugsub,
4528 [('r', 'rev', '',
4558 [('r', 'rev', '',
4529 _('revision to check'), _('REV'))],
4559 _('revision to check'), _('REV'))],
4530 _('[-r REV] [REV]')),
4560 _('[-r REV] [REV]')),
4531 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
4561 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
4532 "debugwireargs":
4562 "debugwireargs":
4533 (debugwireargs,
4563 (debugwireargs,
4534 [('', 'three', '', 'three'),
4564 [('', 'three', '', 'three'),
4535 ('', 'four', '', 'four'),
4565 ('', 'four', '', 'four'),
4536 ] + remoteopts,
4566 ] + remoteopts,
4537 _('REPO [OPTIONS]... [ONE [TWO]]')),
4567 _('REPO [OPTIONS]... [ONE [TWO]]')),
4538 "^diff":
4568 "^diff":
4539 (diff,
4569 (diff,
4540 [('r', 'rev', [],
4570 [('r', 'rev', [],
4541 _('revision'), _('REV')),
4571 _('revision'), _('REV')),
4542 ('c', 'change', '',
4572 ('c', 'change', '',
4543 _('change made by revision'), _('REV'))
4573 _('change made by revision'), _('REV'))
4544 ] + diffopts + diffopts2 + walkopts + subrepoopts,
4574 ] + diffopts + diffopts2 + walkopts + subrepoopts,
4545 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
4575 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
4546 "^export":
4576 "^export":
4547 (export,
4577 (export,
4548 [('o', 'output', '',
4578 [('o', 'output', '',
4549 _('print output to file with formatted name'), _('FORMAT')),
4579 _('print output to file with formatted name'), _('FORMAT')),
4550 ('', 'switch-parent', None, _('diff against the second parent')),
4580 ('', 'switch-parent', None, _('diff against the second parent')),
4551 ('r', 'rev', [],
4581 ('r', 'rev', [],
4552 _('revisions to export'), _('REV')),
4582 _('revisions to export'), _('REV')),
4553 ] + diffopts,
4583 ] + diffopts,
4554 _('[OPTION]... [-o OUTFILESPEC] REV...')),
4584 _('[OPTION]... [-o OUTFILESPEC] REV...')),
4555 "^forget":
4585 "^forget":
4556 (forget,
4586 (forget,
4557 [] + walkopts,
4587 [] + walkopts,
4558 _('[OPTION]... FILE...')),
4588 _('[OPTION]... FILE...')),
4559 "grep":
4589 "grep":
4560 (grep,
4590 (grep,
4561 [('0', 'print0', None, _('end fields with NUL')),
4591 [('0', 'print0', None, _('end fields with NUL')),
4562 ('', 'all', None, _('print all revisions that match')),
4592 ('', 'all', None, _('print all revisions that match')),
4563 ('f', 'follow', None,
4593 ('f', 'follow', None,
4564 _('follow changeset history,'
4594 _('follow changeset history,'
4565 ' or file history across copies and renames')),
4595 ' or file history across copies and renames')),
4566 ('i', 'ignore-case', None, _('ignore case when matching')),
4596 ('i', 'ignore-case', None, _('ignore case when matching')),
4567 ('l', 'files-with-matches', None,
4597 ('l', 'files-with-matches', None,
4568 _('print only filenames and revisions that match')),
4598 _('print only filenames and revisions that match')),
4569 ('n', 'line-number', None, _('print matching line numbers')),
4599 ('n', 'line-number', None, _('print matching line numbers')),
4570 ('r', 'rev', [],
4600 ('r', 'rev', [],
4571 _('only search files changed within revision range'), _('REV')),
4601 _('only search files changed within revision range'), _('REV')),
4572 ('u', 'user', None, _('list the author (long with -v)')),
4602 ('u', 'user', None, _('list the author (long with -v)')),
4573 ('d', 'date', None, _('list the date (short with -q)')),
4603 ('d', 'date', None, _('list the date (short with -q)')),
4574 ] + walkopts,
4604 ] + walkopts,
4575 _('[OPTION]... PATTERN [FILE]...')),
4605 _('[OPTION]... PATTERN [FILE]...')),
4576 "heads":
4606 "heads":
4577 (heads,
4607 (heads,
4578 [('r', 'rev', '',
4608 [('r', 'rev', '',
4579 _('show only heads which are descendants of STARTREV'),
4609 _('show only heads which are descendants of STARTREV'),
4580 _('STARTREV')),
4610 _('STARTREV')),
4581 ('t', 'topo', False, _('show topological heads only')),
4611 ('t', 'topo', False, _('show topological heads only')),
4582 ('a', 'active', False,
4612 ('a', 'active', False,
4583 _('show active branchheads only (DEPRECATED)')),
4613 _('show active branchheads only (DEPRECATED)')),
4584 ('c', 'closed', False,
4614 ('c', 'closed', False,
4585 _('show normal and closed branch heads')),
4615 _('show normal and closed branch heads')),
4586 ] + templateopts,
4616 ] + templateopts,
4587 _('[-ac] [-r STARTREV] [REV]...')),
4617 _('[-ac] [-r STARTREV] [REV]...')),
4588 "help": (help_, [], _('[TOPIC]')),
4618 "help": (help_, [], _('[TOPIC]')),
4589 "identify|id":
4619 "identify|id":
4590 (identify,
4620 (identify,
4591 [('r', 'rev', '',
4621 [('r', 'rev', '',
4592 _('identify the specified revision'), _('REV')),
4622 _('identify the specified revision'), _('REV')),
4593 ('n', 'num', None, _('show local revision number')),
4623 ('n', 'num', None, _('show local revision number')),
4594 ('i', 'id', None, _('show global revision id')),
4624 ('i', 'id', None, _('show global revision id')),
4595 ('b', 'branch', None, _('show branch')),
4625 ('b', 'branch', None, _('show branch')),
4596 ('t', 'tags', None, _('show tags')),
4626 ('t', 'tags', None, _('show tags')),
4597 ('B', 'bookmarks', None, _('show bookmarks'))],
4627 ('B', 'bookmarks', None, _('show bookmarks'))],
4598 _('[-nibtB] [-r REV] [SOURCE]')),
4628 _('[-nibtB] [-r REV] [SOURCE]')),
4599 "import|patch":
4629 "import|patch":
4600 (import_,
4630 (import_,
4601 [('p', 'strip', 1,
4631 [('p', 'strip', 1,
4602 _('directory strip option for patch. This has the same '
4632 _('directory strip option for patch. This has the same '
4603 'meaning as the corresponding patch option'),
4633 'meaning as the corresponding patch option'),
4604 _('NUM')),
4634 _('NUM')),
4605 ('b', 'base', '',
4635 ('b', 'base', '',
4606 _('base path'), _('PATH')),
4636 _('base path'), _('PATH')),
4607 ('f', 'force', None,
4637 ('f', 'force', None,
4608 _('skip check for outstanding uncommitted changes')),
4638 _('skip check for outstanding uncommitted changes')),
4609 ('', 'no-commit', None,
4639 ('', 'no-commit', None,
4610 _("don't commit, just update the working directory")),
4640 _("don't commit, just update the working directory")),
4611 ('', 'exact', None,
4641 ('', 'exact', None,
4612 _('apply patch to the nodes from which it was generated')),
4642 _('apply patch to the nodes from which it was generated')),
4613 ('', 'import-branch', None,
4643 ('', 'import-branch', None,
4614 _('use any branch information in patch (implied by --exact)'))] +
4644 _('use any branch information in patch (implied by --exact)'))] +
4615 commitopts + commitopts2 + similarityopts,
4645 commitopts + commitopts2 + similarityopts,
4616 _('[OPTION]... PATCH...')),
4646 _('[OPTION]... PATCH...')),
4617 "incoming|in":
4647 "incoming|in":
4618 (incoming,
4648 (incoming,
4619 [('f', 'force', None,
4649 [('f', 'force', None,
4620 _('run even if remote repository is unrelated')),
4650 _('run even if remote repository is unrelated')),
4621 ('n', 'newest-first', None, _('show newest record first')),
4651 ('n', 'newest-first', None, _('show newest record first')),
4622 ('', 'bundle', '',
4652 ('', 'bundle', '',
4623 _('file to store the bundles into'), _('FILE')),
4653 _('file to store the bundles into'), _('FILE')),
4624 ('r', 'rev', [],
4654 ('r', 'rev', [],
4625 _('a remote changeset intended to be added'), _('REV')),
4655 _('a remote changeset intended to be added'), _('REV')),
4626 ('B', 'bookmarks', False, _("compare bookmarks")),
4656 ('B', 'bookmarks', False, _("compare bookmarks")),
4627 ('b', 'branch', [],
4657 ('b', 'branch', [],
4628 _('a specific branch you would like to pull'), _('BRANCH')),
4658 _('a specific branch you would like to pull'), _('BRANCH')),
4629 ] + logopts + remoteopts + subrepoopts,
4659 ] + logopts + remoteopts + subrepoopts,
4630 _('[-p] [-n] [-M] [-f] [-r REV]...'
4660 _('[-p] [-n] [-M] [-f] [-r REV]...'
4631 ' [--bundle FILENAME] [SOURCE]')),
4661 ' [--bundle FILENAME] [SOURCE]')),
4632 "^init":
4662 "^init":
4633 (init,
4663 (init,
4634 remoteopts,
4664 remoteopts,
4635 _('[-e CMD] [--remotecmd CMD] [DEST]')),
4665 _('[-e CMD] [--remotecmd CMD] [DEST]')),
4636 "locate":
4666 "locate":
4637 (locate,
4667 (locate,
4638 [('r', 'rev', '',
4668 [('r', 'rev', '',
4639 _('search the repository as it is in REV'), _('REV')),
4669 _('search the repository as it is in REV'), _('REV')),
4640 ('0', 'print0', None,
4670 ('0', 'print0', None,
4641 _('end filenames with NUL, for use with xargs')),
4671 _('end filenames with NUL, for use with xargs')),
4642 ('f', 'fullpath', None,
4672 ('f', 'fullpath', None,
4643 _('print complete paths from the filesystem root')),
4673 _('print complete paths from the filesystem root')),
4644 ] + walkopts,
4674 ] + walkopts,
4645 _('[OPTION]... [PATTERN]...')),
4675 _('[OPTION]... [PATTERN]...')),
4646 "^log|history":
4676 "^log|history":
4647 (log,
4677 (log,
4648 [('f', 'follow', None,
4678 [('f', 'follow', None,
4649 _('follow changeset history,'
4679 _('follow changeset history,'
4650 ' or file history across copies and renames')),
4680 ' or file history across copies and renames')),
4651 ('', 'follow-first', None,
4681 ('', 'follow-first', None,
4652 _('only follow the first parent of merge changesets')),
4682 _('only follow the first parent of merge changesets')),
4653 ('d', 'date', '',
4683 ('d', 'date', '',
4654 _('show revisions matching date spec'), _('DATE')),
4684 _('show revisions matching date spec'), _('DATE')),
4655 ('C', 'copies', None, _('show copied files')),
4685 ('C', 'copies', None, _('show copied files')),
4656 ('k', 'keyword', [],
4686 ('k', 'keyword', [],
4657 _('do case-insensitive search for a given text'), _('TEXT')),
4687 _('do case-insensitive search for a given text'), _('TEXT')),
4658 ('r', 'rev', [],
4688 ('r', 'rev', [],
4659 _('show the specified revision or range'), _('REV')),
4689 _('show the specified revision or range'), _('REV')),
4660 ('', 'removed', None, _('include revisions where files were removed')),
4690 ('', 'removed', None, _('include revisions where files were removed')),
4661 ('m', 'only-merges', None, _('show only merges')),
4691 ('m', 'only-merges', None, _('show only merges')),
4662 ('u', 'user', [],
4692 ('u', 'user', [],
4663 _('revisions committed by user'), _('USER')),
4693 _('revisions committed by user'), _('USER')),
4664 ('', 'only-branch', [],
4694 ('', 'only-branch', [],
4665 _('show only changesets within the given named branch (DEPRECATED)'),
4695 _('show only changesets within the given named branch (DEPRECATED)'),
4666 _('BRANCH')),
4696 _('BRANCH')),
4667 ('b', 'branch', [],
4697 ('b', 'branch', [],
4668 _('show changesets within the given named branch'), _('BRANCH')),
4698 _('show changesets within the given named branch'), _('BRANCH')),
4669 ('P', 'prune', [],
4699 ('P', 'prune', [],
4670 _('do not display revision or any of its ancestors'), _('REV')),
4700 _('do not display revision or any of its ancestors'), _('REV')),
4671 ] + logopts + walkopts,
4701 ] + logopts + walkopts,
4672 _('[OPTION]... [FILE]')),
4702 _('[OPTION]... [FILE]')),
4673 "manifest":
4703 "manifest":
4674 (manifest,
4704 (manifest,
4675 [('r', 'rev', '',
4705 [('r', 'rev', '',
4676 _('revision to display'), _('REV'))],
4706 _('revision to display'), _('REV'))],
4677 _('[-r REV]')),
4707 _('[-r REV]')),
4678 "^merge":
4708 "^merge":
4679 (merge,
4709 (merge,
4680 [('f', 'force', None, _('force a merge with outstanding changes')),
4710 [('f', 'force', None, _('force a merge with outstanding changes')),
4681 ('t', 'tool', '', _('specify merge tool')),
4711 ('t', 'tool', '', _('specify merge tool')),
4682 ('r', 'rev', '',
4712 ('r', 'rev', '',
4683 _('revision to merge'), _('REV')),
4713 _('revision to merge'), _('REV')),
4684 ('P', 'preview', None,
4714 ('P', 'preview', None,
4685 _('review revisions to merge (no merge is performed)'))],
4715 _('review revisions to merge (no merge is performed)'))],
4686 _('[-P] [-f] [[-r] REV]')),
4716 _('[-P] [-f] [[-r] REV]')),
4687 "outgoing|out":
4717 "outgoing|out":
4688 (outgoing,
4718 (outgoing,
4689 [('f', 'force', None,
4719 [('f', 'force', None,
4690 _('run even when the destination is unrelated')),
4720 _('run even when the destination is unrelated')),
4691 ('r', 'rev', [],
4721 ('r', 'rev', [],
4692 _('a changeset intended to be included in the destination'),
4722 _('a changeset intended to be included in the destination'),
4693 _('REV')),
4723 _('REV')),
4694 ('n', 'newest-first', None, _('show newest record first')),
4724 ('n', 'newest-first', None, _('show newest record first')),
4695 ('B', 'bookmarks', False, _("compare bookmarks")),
4725 ('B', 'bookmarks', False, _("compare bookmarks")),
4696 ('b', 'branch', [],
4726 ('b', 'branch', [],
4697 _('a specific branch you would like to push'), _('BRANCH')),
4727 _('a specific branch you would like to push'), _('BRANCH')),
4698 ] + logopts + remoteopts + subrepoopts,
4728 ] + logopts + remoteopts + subrepoopts,
4699 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
4729 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
4700 "parents":
4730 "parents":
4701 (parents,
4731 (parents,
4702 [('r', 'rev', '',
4732 [('r', 'rev', '',
4703 _('show parents of the specified revision'), _('REV')),
4733 _('show parents of the specified revision'), _('REV')),
4704 ] + templateopts,
4734 ] + templateopts,
4705 _('[-r REV] [FILE]')),
4735 _('[-r REV] [FILE]')),
4706 "paths": (paths, [], _('[NAME]')),
4736 "paths": (paths, [], _('[NAME]')),
4707 "^pull":
4737 "^pull":
4708 (pull,
4738 (pull,
4709 [('u', 'update', None,
4739 [('u', 'update', None,
4710 _('update to new branch head if changesets were pulled')),
4740 _('update to new branch head if changesets were pulled')),
4711 ('f', 'force', None,
4741 ('f', 'force', None,
4712 _('run even when remote repository is unrelated')),
4742 _('run even when remote repository is unrelated')),
4713 ('r', 'rev', [],
4743 ('r', 'rev', [],
4714 _('a remote changeset intended to be added'), _('REV')),
4744 _('a remote changeset intended to be added'), _('REV')),
4715 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4745 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4716 ('b', 'branch', [],
4746 ('b', 'branch', [],
4717 _('a specific branch you would like to pull'), _('BRANCH')),
4747 _('a specific branch you would like to pull'), _('BRANCH')),
4718 ] + remoteopts,
4748 ] + remoteopts,
4719 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
4749 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
4720 "^push":
4750 "^push":
4721 (push,
4751 (push,
4722 [('f', 'force', None, _('force push')),
4752 [('f', 'force', None, _('force push')),
4723 ('r', 'rev', [],
4753 ('r', 'rev', [],
4724 _('a changeset intended to be included in the destination'),
4754 _('a changeset intended to be included in the destination'),
4725 _('REV')),
4755 _('REV')),
4726 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4756 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4727 ('b', 'branch', [],
4757 ('b', 'branch', [],
4728 _('a specific branch you would like to push'), _('BRANCH')),
4758 _('a specific branch you would like to push'), _('BRANCH')),
4729 ('', 'new-branch', False, _('allow pushing a new branch')),
4759 ('', 'new-branch', False, _('allow pushing a new branch')),
4730 ] + remoteopts,
4760 ] + remoteopts,
4731 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
4761 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
4732 "recover": (recover, []),
4762 "recover": (recover, []),
4733 "^remove|rm":
4763 "^remove|rm":
4734 (remove,
4764 (remove,
4735 [('A', 'after', None, _('record delete for missing files')),
4765 [('A', 'after', None, _('record delete for missing files')),
4736 ('f', 'force', None,
4766 ('f', 'force', None,
4737 _('remove (and delete) file even if added or modified')),
4767 _('remove (and delete) file even if added or modified')),
4738 ] + walkopts,
4768 ] + walkopts,
4739 _('[OPTION]... FILE...')),
4769 _('[OPTION]... FILE...')),
4740 "rename|move|mv":
4770 "rename|move|mv":
4741 (rename,
4771 (rename,
4742 [('A', 'after', None, _('record a rename that has already occurred')),
4772 [('A', 'after', None, _('record a rename that has already occurred')),
4743 ('f', 'force', None,
4773 ('f', 'force', None,
4744 _('forcibly copy over an existing managed file')),
4774 _('forcibly copy over an existing managed file')),
4745 ] + walkopts + dryrunopts,
4775 ] + walkopts + dryrunopts,
4746 _('[OPTION]... SOURCE... DEST')),
4776 _('[OPTION]... SOURCE... DEST')),
4747 "resolve":
4777 "resolve":
4748 (resolve,
4778 (resolve,
4749 [('a', 'all', None, _('select all unresolved files')),
4779 [('a', 'all', None, _('select all unresolved files')),
4750 ('l', 'list', None, _('list state of files needing merge')),
4780 ('l', 'list', None, _('list state of files needing merge')),
4751 ('m', 'mark', None, _('mark files as resolved')),
4781 ('m', 'mark', None, _('mark files as resolved')),
4752 ('u', 'unmark', None, _('mark files as unresolved')),
4782 ('u', 'unmark', None, _('mark files as unresolved')),
4753 ('t', 'tool', '', _('specify merge tool')),
4783 ('t', 'tool', '', _('specify merge tool')),
4754 ('n', 'no-status', None, _('hide status prefix'))]
4784 ('n', 'no-status', None, _('hide status prefix'))]
4755 + walkopts,
4785 + walkopts,
4756 _('[OPTION]... [FILE]...')),
4786 _('[OPTION]... [FILE]...')),
4757 "revert":
4787 "revert":
4758 (revert,
4788 (revert,
4759 [('a', 'all', None, _('revert all changes when no arguments given')),
4789 [('a', 'all', None, _('revert all changes when no arguments given')),
4760 ('d', 'date', '',
4790 ('d', 'date', '',
4761 _('tipmost revision matching date'), _('DATE')),
4791 _('tipmost revision matching date'), _('DATE')),
4762 ('r', 'rev', '',
4792 ('r', 'rev', '',
4763 _('revert to the specified revision'), _('REV')),
4793 _('revert to the specified revision'), _('REV')),
4764 ('', 'no-backup', None, _('do not save backup copies of files')),
4794 ('', 'no-backup', None, _('do not save backup copies of files')),
4765 ] + walkopts + dryrunopts,
4795 ] + walkopts + dryrunopts,
4766 _('[OPTION]... [-r REV] [NAME]...')),
4796 _('[OPTION]... [-r REV] [NAME]...')),
4767 "rollback": (rollback, dryrunopts),
4797 "rollback": (rollback, dryrunopts),
4768 "root": (root, []),
4798 "root": (root, []),
4769 "^serve":
4799 "^serve":
4770 (serve,
4800 (serve,
4771 [('A', 'accesslog', '',
4801 [('A', 'accesslog', '',
4772 _('name of access log file to write to'), _('FILE')),
4802 _('name of access log file to write to'), _('FILE')),
4773 ('d', 'daemon', None, _('run server in background')),
4803 ('d', 'daemon', None, _('run server in background')),
4774 ('', 'daemon-pipefds', '',
4804 ('', 'daemon-pipefds', '',
4775 _('used internally by daemon mode'), _('NUM')),
4805 _('used internally by daemon mode'), _('NUM')),
4776 ('E', 'errorlog', '',
4806 ('E', 'errorlog', '',
4777 _('name of error log file to write to'), _('FILE')),
4807 _('name of error log file to write to'), _('FILE')),
4778 # use string type, then we can check if something was passed
4808 # use string type, then we can check if something was passed
4779 ('p', 'port', '',
4809 ('p', 'port', '',
4780 _('port to listen on (default: 8000)'), _('PORT')),
4810 _('port to listen on (default: 8000)'), _('PORT')),
4781 ('a', 'address', '',
4811 ('a', 'address', '',
4782 _('address to listen on (default: all interfaces)'), _('ADDR')),
4812 _('address to listen on (default: all interfaces)'), _('ADDR')),
4783 ('', 'prefix', '',
4813 ('', 'prefix', '',
4784 _('prefix path to serve from (default: server root)'), _('PREFIX')),
4814 _('prefix path to serve from (default: server root)'), _('PREFIX')),
4785 ('n', 'name', '',
4815 ('n', 'name', '',
4786 _('name to show in web pages (default: working directory)'),
4816 _('name to show in web pages (default: working directory)'),
4787 _('NAME')),
4817 _('NAME')),
4788 ('', 'web-conf', '',
4818 ('', 'web-conf', '',
4789 _('name of the hgweb config file (see "hg help hgweb")'),
4819 _('name of the hgweb config file (see "hg help hgweb")'),
4790 _('FILE')),
4820 _('FILE')),
4791 ('', 'webdir-conf', '',
4821 ('', 'webdir-conf', '',
4792 _('name of the hgweb config file (DEPRECATED)'), _('FILE')),
4822 _('name of the hgweb config file (DEPRECATED)'), _('FILE')),
4793 ('', 'pid-file', '',
4823 ('', 'pid-file', '',
4794 _('name of file to write process ID to'), _('FILE')),
4824 _('name of file to write process ID to'), _('FILE')),
4795 ('', 'stdio', None, _('for remote clients')),
4825 ('', 'stdio', None, _('for remote clients')),
4796 ('t', 'templates', '',
4826 ('t', 'templates', '',
4797 _('web templates to use'), _('TEMPLATE')),
4827 _('web templates to use'), _('TEMPLATE')),
4798 ('', 'style', '',
4828 ('', 'style', '',
4799 _('template style to use'), _('STYLE')),
4829 _('template style to use'), _('STYLE')),
4800 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4830 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4801 ('', 'certificate', '',
4831 ('', 'certificate', '',
4802 _('SSL certificate file'), _('FILE'))],
4832 _('SSL certificate file'), _('FILE'))],
4803 _('[OPTION]...')),
4833 _('[OPTION]...')),
4804 "showconfig|debugconfig":
4834 "showconfig|debugconfig":
4805 (showconfig,
4835 (showconfig,
4806 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4836 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4807 _('[-u] [NAME]...')),
4837 _('[-u] [NAME]...')),
4808 "^summary|sum":
4838 "^summary|sum":
4809 (summary,
4839 (summary,
4810 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
4840 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
4811 "^status|st":
4841 "^status|st":
4812 (status,
4842 (status,
4813 [('A', 'all', None, _('show status of all files')),
4843 [('A', 'all', None, _('show status of all files')),
4814 ('m', 'modified', None, _('show only modified files')),
4844 ('m', 'modified', None, _('show only modified files')),
4815 ('a', 'added', None, _('show only added files')),
4845 ('a', 'added', None, _('show only added files')),
4816 ('r', 'removed', None, _('show only removed files')),
4846 ('r', 'removed', None, _('show only removed files')),
4817 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4847 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4818 ('c', 'clean', None, _('show only files without changes')),
4848 ('c', 'clean', None, _('show only files without changes')),
4819 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4849 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4820 ('i', 'ignored', None, _('show only ignored files')),
4850 ('i', 'ignored', None, _('show only ignored files')),
4821 ('n', 'no-status', None, _('hide status prefix')),
4851 ('n', 'no-status', None, _('hide status prefix')),
4822 ('C', 'copies', None, _('show source of copied files')),
4852 ('C', 'copies', None, _('show source of copied files')),
4823 ('0', 'print0', None,
4853 ('0', 'print0', None,
4824 _('end filenames with NUL, for use with xargs')),
4854 _('end filenames with NUL, for use with xargs')),
4825 ('', 'rev', [],
4855 ('', 'rev', [],
4826 _('show difference from revision'), _('REV')),
4856 _('show difference from revision'), _('REV')),
4827 ('', 'change', '',
4857 ('', 'change', '',
4828 _('list the changed files of a revision'), _('REV')),
4858 _('list the changed files of a revision'), _('REV')),
4829 ] + walkopts + subrepoopts,
4859 ] + walkopts + subrepoopts,
4830 _('[OPTION]... [FILE]...')),
4860 _('[OPTION]... [FILE]...')),
4831 "tag":
4861 "tag":
4832 (tag,
4862 (tag,
4833 [('f', 'force', None, _('force tag')),
4863 [('f', 'force', None, _('force tag')),
4834 ('l', 'local', None, _('make the tag local')),
4864 ('l', 'local', None, _('make the tag local')),
4835 ('r', 'rev', '',
4865 ('r', 'rev', '',
4836 _('revision to tag'), _('REV')),
4866 _('revision to tag'), _('REV')),
4837 ('', 'remove', None, _('remove a tag')),
4867 ('', 'remove', None, _('remove a tag')),
4838 # -l/--local is already there, commitopts cannot be used
4868 # -l/--local is already there, commitopts cannot be used
4839 ('e', 'edit', None, _('edit commit message')),
4869 ('e', 'edit', None, _('edit commit message')),
4840 ('m', 'message', '',
4870 ('m', 'message', '',
4841 _('use <text> as commit message'), _('TEXT')),
4871 _('use <text> as commit message'), _('TEXT')),
4842 ] + commitopts2,
4872 ] + commitopts2,
4843 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
4873 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
4844 "tags": (tags, [], ''),
4874 "tags": (tags, [], ''),
4845 "tip":
4875 "tip":
4846 (tip,
4876 (tip,
4847 [('p', 'patch', None, _('show patch')),
4877 [('p', 'patch', None, _('show patch')),
4848 ('g', 'git', None, _('use git extended diff format')),
4878 ('g', 'git', None, _('use git extended diff format')),
4849 ] + templateopts,
4879 ] + templateopts,
4850 _('[-p] [-g]')),
4880 _('[-p] [-g]')),
4851 "unbundle":
4881 "unbundle":
4852 (unbundle,
4882 (unbundle,
4853 [('u', 'update', None,
4883 [('u', 'update', None,
4854 _('update to new branch head if changesets were unbundled'))],
4884 _('update to new branch head if changesets were unbundled'))],
4855 _('[-u] FILE...')),
4885 _('[-u] FILE...')),
4856 "^update|up|checkout|co":
4886 "^update|up|checkout|co":
4857 (update,
4887 (update,
4858 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
4888 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
4859 ('c', 'check', None,
4889 ('c', 'check', None,
4860 _('update across branches if no uncommitted changes')),
4890 _('update across branches if no uncommitted changes')),
4861 ('d', 'date', '',
4891 ('d', 'date', '',
4862 _('tipmost revision matching date'), _('DATE')),
4892 _('tipmost revision matching date'), _('DATE')),
4863 ('r', 'rev', '',
4893 ('r', 'rev', '',
4864 _('revision'), _('REV'))],
4894 _('revision'), _('REV'))],
4865 _('[-c] [-C] [-d DATE] [[-r] REV]')),
4895 _('[-c] [-C] [-d DATE] [[-r] REV]')),
4866 "verify": (verify, []),
4896 "verify": (verify, []),
4867 "version": (version_, []),
4897 "version": (version_, []),
4868 }
4898 }
4869
4899
4870 norepo = ("clone init version help debugcommands debugcomplete"
4900 norepo = ("clone init version help debugcommands debugcomplete"
4871 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
4901 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
4872 " debugknown debugbundle")
4902 " debugknown debuggetbundle debugbundle")
4873 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
4903 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
4874 " debugdata debugindex debugindexdot")
4904 " debugdata debugindex debugindexdot")
@@ -1,294 +1,295 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import os
9 import os
10 from mercurial import ui, hg, hook, error, encoding, templater
10 from mercurial import ui, hg, hook, error, encoding, templater
11 from common import get_mtime, ErrorResponse, permhooks, caching
11 from common import get_mtime, ErrorResponse, permhooks, caching
12 from common import HTTP_OK, HTTP_NOT_MODIFIED, HTTP_BAD_REQUEST
12 from common import HTTP_OK, HTTP_NOT_MODIFIED, HTTP_BAD_REQUEST
13 from common import HTTP_NOT_FOUND, HTTP_SERVER_ERROR
13 from common import HTTP_NOT_FOUND, HTTP_SERVER_ERROR
14 from request import wsgirequest
14 from request import wsgirequest
15 import webcommands, protocol, webutil
15 import webcommands, protocol, webutil
16
16
17 perms = {
17 perms = {
18 'changegroup': 'pull',
18 'changegroup': 'pull',
19 'changegroupsubset': 'pull',
19 'changegroupsubset': 'pull',
20 'getbundle': 'pull',
20 'stream_out': 'pull',
21 'stream_out': 'pull',
21 'listkeys': 'pull',
22 'listkeys': 'pull',
22 'unbundle': 'push',
23 'unbundle': 'push',
23 'pushkey': 'push',
24 'pushkey': 'push',
24 }
25 }
25
26
26 class hgweb(object):
27 class hgweb(object):
27 def __init__(self, repo, name=None, baseui=None):
28 def __init__(self, repo, name=None, baseui=None):
28 if isinstance(repo, str):
29 if isinstance(repo, str):
29 if baseui:
30 if baseui:
30 u = baseui.copy()
31 u = baseui.copy()
31 else:
32 else:
32 u = ui.ui()
33 u = ui.ui()
33 self.repo = hg.repository(u, repo)
34 self.repo = hg.repository(u, repo)
34 else:
35 else:
35 self.repo = repo
36 self.repo = repo
36
37
37 self.repo.ui.setconfig('ui', 'report_untrusted', 'off')
38 self.repo.ui.setconfig('ui', 'report_untrusted', 'off')
38 self.repo.ui.setconfig('ui', 'interactive', 'off')
39 self.repo.ui.setconfig('ui', 'interactive', 'off')
39 hook.redirect(True)
40 hook.redirect(True)
40 self.mtime = -1
41 self.mtime = -1
41 self.reponame = name
42 self.reponame = name
42 self.archives = 'zip', 'gz', 'bz2'
43 self.archives = 'zip', 'gz', 'bz2'
43 self.stripecount = 1
44 self.stripecount = 1
44 # a repo owner may set web.templates in .hg/hgrc to get any file
45 # a repo owner may set web.templates in .hg/hgrc to get any file
45 # readable by the user running the CGI script
46 # readable by the user running the CGI script
46 self.templatepath = self.config('web', 'templates')
47 self.templatepath = self.config('web', 'templates')
47
48
48 # The CGI scripts are often run by a user different from the repo owner.
49 # The CGI scripts are often run by a user different from the repo owner.
49 # Trust the settings from the .hg/hgrc files by default.
50 # Trust the settings from the .hg/hgrc files by default.
50 def config(self, section, name, default=None, untrusted=True):
51 def config(self, section, name, default=None, untrusted=True):
51 return self.repo.ui.config(section, name, default,
52 return self.repo.ui.config(section, name, default,
52 untrusted=untrusted)
53 untrusted=untrusted)
53
54
54 def configbool(self, section, name, default=False, untrusted=True):
55 def configbool(self, section, name, default=False, untrusted=True):
55 return self.repo.ui.configbool(section, name, default,
56 return self.repo.ui.configbool(section, name, default,
56 untrusted=untrusted)
57 untrusted=untrusted)
57
58
58 def configlist(self, section, name, default=None, untrusted=True):
59 def configlist(self, section, name, default=None, untrusted=True):
59 return self.repo.ui.configlist(section, name, default,
60 return self.repo.ui.configlist(section, name, default,
60 untrusted=untrusted)
61 untrusted=untrusted)
61
62
62 def refresh(self, request=None):
63 def refresh(self, request=None):
63 if request:
64 if request:
64 self.repo.ui.environ = request.env
65 self.repo.ui.environ = request.env
65 mtime = get_mtime(self.repo.spath)
66 mtime = get_mtime(self.repo.spath)
66 if mtime != self.mtime:
67 if mtime != self.mtime:
67 self.mtime = mtime
68 self.mtime = mtime
68 self.repo = hg.repository(self.repo.ui, self.repo.root)
69 self.repo = hg.repository(self.repo.ui, self.repo.root)
69 self.maxchanges = int(self.config("web", "maxchanges", 10))
70 self.maxchanges = int(self.config("web", "maxchanges", 10))
70 self.stripecount = int(self.config("web", "stripes", 1))
71 self.stripecount = int(self.config("web", "stripes", 1))
71 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
72 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
72 self.maxfiles = int(self.config("web", "maxfiles", 10))
73 self.maxfiles = int(self.config("web", "maxfiles", 10))
73 self.allowpull = self.configbool("web", "allowpull", True)
74 self.allowpull = self.configbool("web", "allowpull", True)
74 encoding.encoding = self.config("web", "encoding",
75 encoding.encoding = self.config("web", "encoding",
75 encoding.encoding)
76 encoding.encoding)
76
77
77 def run(self):
78 def run(self):
78 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
79 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
79 raise RuntimeError("This function is only intended to be "
80 raise RuntimeError("This function is only intended to be "
80 "called while running as a CGI script.")
81 "called while running as a CGI script.")
81 import mercurial.hgweb.wsgicgi as wsgicgi
82 import mercurial.hgweb.wsgicgi as wsgicgi
82 wsgicgi.launch(self)
83 wsgicgi.launch(self)
83
84
84 def __call__(self, env, respond):
85 def __call__(self, env, respond):
85 req = wsgirequest(env, respond)
86 req = wsgirequest(env, respond)
86 return self.run_wsgi(req)
87 return self.run_wsgi(req)
87
88
88 def run_wsgi(self, req):
89 def run_wsgi(self, req):
89
90
90 self.refresh(req)
91 self.refresh(req)
91
92
92 # work with CGI variables to create coherent structure
93 # work with CGI variables to create coherent structure
93 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
94 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
94
95
95 req.url = req.env['SCRIPT_NAME']
96 req.url = req.env['SCRIPT_NAME']
96 if not req.url.endswith('/'):
97 if not req.url.endswith('/'):
97 req.url += '/'
98 req.url += '/'
98 if 'REPO_NAME' in req.env:
99 if 'REPO_NAME' in req.env:
99 req.url += req.env['REPO_NAME'] + '/'
100 req.url += req.env['REPO_NAME'] + '/'
100
101
101 if 'PATH_INFO' in req.env:
102 if 'PATH_INFO' in req.env:
102 parts = req.env['PATH_INFO'].strip('/').split('/')
103 parts = req.env['PATH_INFO'].strip('/').split('/')
103 repo_parts = req.env.get('REPO_NAME', '').split('/')
104 repo_parts = req.env.get('REPO_NAME', '').split('/')
104 if parts[:len(repo_parts)] == repo_parts:
105 if parts[:len(repo_parts)] == repo_parts:
105 parts = parts[len(repo_parts):]
106 parts = parts[len(repo_parts):]
106 query = '/'.join(parts)
107 query = '/'.join(parts)
107 else:
108 else:
108 query = req.env['QUERY_STRING'].split('&', 1)[0]
109 query = req.env['QUERY_STRING'].split('&', 1)[0]
109 query = query.split(';', 1)[0]
110 query = query.split(';', 1)[0]
110
111
111 # process this if it's a protocol request
112 # process this if it's a protocol request
112 # protocol bits don't need to create any URLs
113 # protocol bits don't need to create any URLs
113 # and the clients always use the old URL structure
114 # and the clients always use the old URL structure
114
115
115 cmd = req.form.get('cmd', [''])[0]
116 cmd = req.form.get('cmd', [''])[0]
116 if protocol.iscmd(cmd):
117 if protocol.iscmd(cmd):
117 try:
118 try:
118 if query:
119 if query:
119 raise ErrorResponse(HTTP_NOT_FOUND)
120 raise ErrorResponse(HTTP_NOT_FOUND)
120 if cmd in perms:
121 if cmd in perms:
121 self.check_perm(req, perms[cmd])
122 self.check_perm(req, perms[cmd])
122 return protocol.call(self.repo, req, cmd)
123 return protocol.call(self.repo, req, cmd)
123 except ErrorResponse, inst:
124 except ErrorResponse, inst:
124 # A client that sends unbundle without 100-continue will
125 # A client that sends unbundle without 100-continue will
125 # break if we respond early.
126 # break if we respond early.
126 if (cmd == 'unbundle' and
127 if (cmd == 'unbundle' and
127 req.env.get('HTTP_EXPECT',
128 req.env.get('HTTP_EXPECT',
128 '').lower() != '100-continue'):
129 '').lower() != '100-continue'):
129 req.drain()
130 req.drain()
130 req.respond(inst, protocol.HGTYPE)
131 req.respond(inst, protocol.HGTYPE)
131 return '0\n%s\n' % inst.message
132 return '0\n%s\n' % inst.message
132
133
133 # translate user-visible url structure to internal structure
134 # translate user-visible url structure to internal structure
134
135
135 args = query.split('/', 2)
136 args = query.split('/', 2)
136 if 'cmd' not in req.form and args and args[0]:
137 if 'cmd' not in req.form and args and args[0]:
137
138
138 cmd = args.pop(0)
139 cmd = args.pop(0)
139 style = cmd.rfind('-')
140 style = cmd.rfind('-')
140 if style != -1:
141 if style != -1:
141 req.form['style'] = [cmd[:style]]
142 req.form['style'] = [cmd[:style]]
142 cmd = cmd[style + 1:]
143 cmd = cmd[style + 1:]
143
144
144 # avoid accepting e.g. style parameter as command
145 # avoid accepting e.g. style parameter as command
145 if hasattr(webcommands, cmd):
146 if hasattr(webcommands, cmd):
146 req.form['cmd'] = [cmd]
147 req.form['cmd'] = [cmd]
147 else:
148 else:
148 cmd = ''
149 cmd = ''
149
150
150 if cmd == 'static':
151 if cmd == 'static':
151 req.form['file'] = ['/'.join(args)]
152 req.form['file'] = ['/'.join(args)]
152 else:
153 else:
153 if args and args[0]:
154 if args and args[0]:
154 node = args.pop(0)
155 node = args.pop(0)
155 req.form['node'] = [node]
156 req.form['node'] = [node]
156 if args:
157 if args:
157 req.form['file'] = args
158 req.form['file'] = args
158
159
159 ua = req.env.get('HTTP_USER_AGENT', '')
160 ua = req.env.get('HTTP_USER_AGENT', '')
160 if cmd == 'rev' and 'mercurial' in ua:
161 if cmd == 'rev' and 'mercurial' in ua:
161 req.form['style'] = ['raw']
162 req.form['style'] = ['raw']
162
163
163 if cmd == 'archive':
164 if cmd == 'archive':
164 fn = req.form['node'][0]
165 fn = req.form['node'][0]
165 for type_, spec in self.archive_specs.iteritems():
166 for type_, spec in self.archive_specs.iteritems():
166 ext = spec[2]
167 ext = spec[2]
167 if fn.endswith(ext):
168 if fn.endswith(ext):
168 req.form['node'] = [fn[:-len(ext)]]
169 req.form['node'] = [fn[:-len(ext)]]
169 req.form['type'] = [type_]
170 req.form['type'] = [type_]
170
171
171 # process the web interface request
172 # process the web interface request
172
173
173 try:
174 try:
174 tmpl = self.templater(req)
175 tmpl = self.templater(req)
175 ctype = tmpl('mimetype', encoding=encoding.encoding)
176 ctype = tmpl('mimetype', encoding=encoding.encoding)
176 ctype = templater.stringify(ctype)
177 ctype = templater.stringify(ctype)
177
178
178 # check read permissions non-static content
179 # check read permissions non-static content
179 if cmd != 'static':
180 if cmd != 'static':
180 self.check_perm(req, None)
181 self.check_perm(req, None)
181
182
182 if cmd == '':
183 if cmd == '':
183 req.form['cmd'] = [tmpl.cache['default']]
184 req.form['cmd'] = [tmpl.cache['default']]
184 cmd = req.form['cmd'][0]
185 cmd = req.form['cmd'][0]
185
186
186 caching(self, req) # sets ETag header or raises NOT_MODIFIED
187 caching(self, req) # sets ETag header or raises NOT_MODIFIED
187 if cmd not in webcommands.__all__:
188 if cmd not in webcommands.__all__:
188 msg = 'no such method: %s' % cmd
189 msg = 'no such method: %s' % cmd
189 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
190 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
190 elif cmd == 'file' and 'raw' in req.form.get('style', []):
191 elif cmd == 'file' and 'raw' in req.form.get('style', []):
191 self.ctype = ctype
192 self.ctype = ctype
192 content = webcommands.rawfile(self, req, tmpl)
193 content = webcommands.rawfile(self, req, tmpl)
193 else:
194 else:
194 content = getattr(webcommands, cmd)(self, req, tmpl)
195 content = getattr(webcommands, cmd)(self, req, tmpl)
195 req.respond(HTTP_OK, ctype)
196 req.respond(HTTP_OK, ctype)
196
197
197 return content
198 return content
198
199
199 except error.LookupError, err:
200 except error.LookupError, err:
200 req.respond(HTTP_NOT_FOUND, ctype)
201 req.respond(HTTP_NOT_FOUND, ctype)
201 msg = str(err)
202 msg = str(err)
202 if 'manifest' not in msg:
203 if 'manifest' not in msg:
203 msg = 'revision not found: %s' % err.name
204 msg = 'revision not found: %s' % err.name
204 return tmpl('error', error=msg)
205 return tmpl('error', error=msg)
205 except (error.RepoError, error.RevlogError), inst:
206 except (error.RepoError, error.RevlogError), inst:
206 req.respond(HTTP_SERVER_ERROR, ctype)
207 req.respond(HTTP_SERVER_ERROR, ctype)
207 return tmpl('error', error=str(inst))
208 return tmpl('error', error=str(inst))
208 except ErrorResponse, inst:
209 except ErrorResponse, inst:
209 req.respond(inst, ctype)
210 req.respond(inst, ctype)
210 if inst.code == HTTP_NOT_MODIFIED:
211 if inst.code == HTTP_NOT_MODIFIED:
211 # Not allowed to return a body on a 304
212 # Not allowed to return a body on a 304
212 return ['']
213 return ['']
213 return tmpl('error', error=inst.message)
214 return tmpl('error', error=inst.message)
214
215
215 def templater(self, req):
216 def templater(self, req):
216
217
217 # determine scheme, port and server name
218 # determine scheme, port and server name
218 # this is needed to create absolute urls
219 # this is needed to create absolute urls
219
220
220 proto = req.env.get('wsgi.url_scheme')
221 proto = req.env.get('wsgi.url_scheme')
221 if proto == 'https':
222 if proto == 'https':
222 proto = 'https'
223 proto = 'https'
223 default_port = "443"
224 default_port = "443"
224 else:
225 else:
225 proto = 'http'
226 proto = 'http'
226 default_port = "80"
227 default_port = "80"
227
228
228 port = req.env["SERVER_PORT"]
229 port = req.env["SERVER_PORT"]
229 port = port != default_port and (":" + port) or ""
230 port = port != default_port and (":" + port) or ""
230 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
231 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
231 staticurl = self.config("web", "staticurl") or req.url + 'static/'
232 staticurl = self.config("web", "staticurl") or req.url + 'static/'
232 if not staticurl.endswith('/'):
233 if not staticurl.endswith('/'):
233 staticurl += '/'
234 staticurl += '/'
234
235
235 # some functions for the templater
236 # some functions for the templater
236
237
237 def header(**map):
238 def header(**map):
238 yield tmpl('header', encoding=encoding.encoding, **map)
239 yield tmpl('header', encoding=encoding.encoding, **map)
239
240
240 def footer(**map):
241 def footer(**map):
241 yield tmpl("footer", **map)
242 yield tmpl("footer", **map)
242
243
243 def motd(**map):
244 def motd(**map):
244 yield self.config("web", "motd", "")
245 yield self.config("web", "motd", "")
245
246
246 # figure out which style to use
247 # figure out which style to use
247
248
248 vars = {}
249 vars = {}
249 styles = (
250 styles = (
250 req.form.get('style', [None])[0],
251 req.form.get('style', [None])[0],
251 self.config('web', 'style'),
252 self.config('web', 'style'),
252 'paper',
253 'paper',
253 )
254 )
254 style, mapfile = templater.stylemap(styles, self.templatepath)
255 style, mapfile = templater.stylemap(styles, self.templatepath)
255 if style == styles[0]:
256 if style == styles[0]:
256 vars['style'] = style
257 vars['style'] = style
257
258
258 start = req.url[-1] == '?' and '&' or '?'
259 start = req.url[-1] == '?' and '&' or '?'
259 sessionvars = webutil.sessionvars(vars, start)
260 sessionvars = webutil.sessionvars(vars, start)
260
261
261 if not self.reponame:
262 if not self.reponame:
262 self.reponame = (self.config("web", "name")
263 self.reponame = (self.config("web", "name")
263 or req.env.get('REPO_NAME')
264 or req.env.get('REPO_NAME')
264 or req.url.strip('/') or self.repo.root)
265 or req.url.strip('/') or self.repo.root)
265
266
266 # create the templater
267 # create the templater
267
268
268 tmpl = templater.templater(mapfile,
269 tmpl = templater.templater(mapfile,
269 defaults={"url": req.url,
270 defaults={"url": req.url,
270 "staticurl": staticurl,
271 "staticurl": staticurl,
271 "urlbase": urlbase,
272 "urlbase": urlbase,
272 "repo": self.reponame,
273 "repo": self.reponame,
273 "header": header,
274 "header": header,
274 "footer": footer,
275 "footer": footer,
275 "motd": motd,
276 "motd": motd,
276 "sessionvars": sessionvars
277 "sessionvars": sessionvars
277 })
278 })
278 return tmpl
279 return tmpl
279
280
280 def archivelist(self, nodeid):
281 def archivelist(self, nodeid):
281 allowed = self.configlist("web", "allow_archive")
282 allowed = self.configlist("web", "allow_archive")
282 for i, spec in self.archive_specs.iteritems():
283 for i, spec in self.archive_specs.iteritems():
283 if i in allowed or self.configbool("web", "allow" + i):
284 if i in allowed or self.configbool("web", "allow" + i):
284 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
285 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
285
286
286 archive_specs = {
287 archive_specs = {
287 'bz2': ('application/x-bzip2', 'tbz2', '.tar.bz2', None),
288 'bz2': ('application/x-bzip2', 'tbz2', '.tar.bz2', None),
288 'gz': ('application/x-gzip', 'tgz', '.tar.gz', None),
289 'gz': ('application/x-gzip', 'tgz', '.tar.gz', None),
289 'zip': ('application/zip', 'zip', '.zip', None),
290 'zip': ('application/zip', 'zip', '.zip', None),
290 }
291 }
291
292
292 def check_perm(self, req, op):
293 def check_perm(self, req, op):
293 for hook in permhooks:
294 for hook in permhooks:
294 hook(self, req, op)
295 hook(self, req, op)
@@ -1,1929 +1,1951 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup, subrepo, discovery, pushkey
10 import repo, changegroup, subrepo, discovery, pushkey
11 import changelog, dirstate, filelog, manifest, context, bookmarks
11 import changelog, dirstate, filelog, manifest, context, bookmarks
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import util, extensions, hook, error
13 import util, extensions, hook, error
14 import match as matchmod
14 import match as matchmod
15 import merge as mergemod
15 import merge as mergemod
16 import tags as tagsmod
16 import tags as tagsmod
17 import url as urlmod
17 import url as urlmod
18 from lock import release
18 from lock import release
19 import weakref, errno, os, time, inspect
19 import weakref, errno, os, time, inspect
20 propertycache = util.propertycache
20 propertycache = util.propertycache
21
21
22 class localrepository(repo.repository):
22 class localrepository(repo.repository):
23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
24 'known'))
24 'known', 'getbundle'))
25 supportedformats = set(('revlogv1', 'parentdelta'))
25 supportedformats = set(('revlogv1', 'parentdelta'))
26 supported = supportedformats | set(('store', 'fncache', 'shared',
26 supported = supportedformats | set(('store', 'fncache', 'shared',
27 'dotencode'))
27 'dotencode'))
28
28
29 def __init__(self, baseui, path=None, create=0):
29 def __init__(self, baseui, path=None, create=0):
30 repo.repository.__init__(self)
30 repo.repository.__init__(self)
31 self.root = os.path.realpath(util.expandpath(path))
31 self.root = os.path.realpath(util.expandpath(path))
32 self.path = os.path.join(self.root, ".hg")
32 self.path = os.path.join(self.root, ".hg")
33 self.origroot = path
33 self.origroot = path
34 self.auditor = util.path_auditor(self.root, self._checknested)
34 self.auditor = util.path_auditor(self.root, self._checknested)
35 self.opener = util.opener(self.path)
35 self.opener = util.opener(self.path)
36 self.wopener = util.opener(self.root)
36 self.wopener = util.opener(self.root)
37 self.baseui = baseui
37 self.baseui = baseui
38 self.ui = baseui.copy()
38 self.ui = baseui.copy()
39
39
40 try:
40 try:
41 self.ui.readconfig(self.join("hgrc"), self.root)
41 self.ui.readconfig(self.join("hgrc"), self.root)
42 extensions.loadall(self.ui)
42 extensions.loadall(self.ui)
43 except IOError:
43 except IOError:
44 pass
44 pass
45
45
46 if not os.path.isdir(self.path):
46 if not os.path.isdir(self.path):
47 if create:
47 if create:
48 if not os.path.exists(path):
48 if not os.path.exists(path):
49 util.makedirs(path)
49 util.makedirs(path)
50 os.mkdir(self.path)
50 os.mkdir(self.path)
51 requirements = ["revlogv1"]
51 requirements = ["revlogv1"]
52 if self.ui.configbool('format', 'usestore', True):
52 if self.ui.configbool('format', 'usestore', True):
53 os.mkdir(os.path.join(self.path, "store"))
53 os.mkdir(os.path.join(self.path, "store"))
54 requirements.append("store")
54 requirements.append("store")
55 if self.ui.configbool('format', 'usefncache', True):
55 if self.ui.configbool('format', 'usefncache', True):
56 requirements.append("fncache")
56 requirements.append("fncache")
57 if self.ui.configbool('format', 'dotencode', True):
57 if self.ui.configbool('format', 'dotencode', True):
58 requirements.append('dotencode')
58 requirements.append('dotencode')
59 # create an invalid changelog
59 # create an invalid changelog
60 self.opener("00changelog.i", "a").write(
60 self.opener("00changelog.i", "a").write(
61 '\0\0\0\2' # represents revlogv2
61 '\0\0\0\2' # represents revlogv2
62 ' dummy changelog to prevent using the old repo layout'
62 ' dummy changelog to prevent using the old repo layout'
63 )
63 )
64 if self.ui.configbool('format', 'parentdelta', False):
64 if self.ui.configbool('format', 'parentdelta', False):
65 requirements.append("parentdelta")
65 requirements.append("parentdelta")
66 else:
66 else:
67 raise error.RepoError(_("repository %s not found") % path)
67 raise error.RepoError(_("repository %s not found") % path)
68 elif create:
68 elif create:
69 raise error.RepoError(_("repository %s already exists") % path)
69 raise error.RepoError(_("repository %s already exists") % path)
70 else:
70 else:
71 # find requirements
71 # find requirements
72 requirements = set()
72 requirements = set()
73 try:
73 try:
74 requirements = set(self.opener("requires").read().splitlines())
74 requirements = set(self.opener("requires").read().splitlines())
75 except IOError, inst:
75 except IOError, inst:
76 if inst.errno != errno.ENOENT:
76 if inst.errno != errno.ENOENT:
77 raise
77 raise
78 for r in requirements - self.supported:
78 for r in requirements - self.supported:
79 raise error.RequirementError(
79 raise error.RequirementError(
80 _("requirement '%s' not supported") % r)
80 _("requirement '%s' not supported") % r)
81
81
82 self.sharedpath = self.path
82 self.sharedpath = self.path
83 try:
83 try:
84 s = os.path.realpath(self.opener("sharedpath").read())
84 s = os.path.realpath(self.opener("sharedpath").read())
85 if not os.path.exists(s):
85 if not os.path.exists(s):
86 raise error.RepoError(
86 raise error.RepoError(
87 _('.hg/sharedpath points to nonexistent directory %s') % s)
87 _('.hg/sharedpath points to nonexistent directory %s') % s)
88 self.sharedpath = s
88 self.sharedpath = s
89 except IOError, inst:
89 except IOError, inst:
90 if inst.errno != errno.ENOENT:
90 if inst.errno != errno.ENOENT:
91 raise
91 raise
92
92
93 self.store = store.store(requirements, self.sharedpath, util.opener)
93 self.store = store.store(requirements, self.sharedpath, util.opener)
94 self.spath = self.store.path
94 self.spath = self.store.path
95 self.sopener = self.store.opener
95 self.sopener = self.store.opener
96 self.sjoin = self.store.join
96 self.sjoin = self.store.join
97 self.opener.createmode = self.store.createmode
97 self.opener.createmode = self.store.createmode
98 self._applyrequirements(requirements)
98 self._applyrequirements(requirements)
99 if create:
99 if create:
100 self._writerequirements()
100 self._writerequirements()
101
101
102 # These two define the set of tags for this repository. _tags
102 # These two define the set of tags for this repository. _tags
103 # maps tag name to node; _tagtypes maps tag name to 'global' or
103 # maps tag name to node; _tagtypes maps tag name to 'global' or
104 # 'local'. (Global tags are defined by .hgtags across all
104 # 'local'. (Global tags are defined by .hgtags across all
105 # heads, and local tags are defined in .hg/localtags.) They
105 # heads, and local tags are defined in .hg/localtags.) They
106 # constitute the in-memory cache of tags.
106 # constitute the in-memory cache of tags.
107 self._tags = None
107 self._tags = None
108 self._tagtypes = None
108 self._tagtypes = None
109
109
110 self._branchcache = None
110 self._branchcache = None
111 self._branchcachetip = None
111 self._branchcachetip = None
112 self.nodetagscache = None
112 self.nodetagscache = None
113 self.filterpats = {}
113 self.filterpats = {}
114 self._datafilters = {}
114 self._datafilters = {}
115 self._transref = self._lockref = self._wlockref = None
115 self._transref = self._lockref = self._wlockref = None
116
116
117 def _applyrequirements(self, requirements):
117 def _applyrequirements(self, requirements):
118 self.requirements = requirements
118 self.requirements = requirements
119 self.sopener.options = {}
119 self.sopener.options = {}
120 if 'parentdelta' in requirements:
120 if 'parentdelta' in requirements:
121 self.sopener.options['parentdelta'] = 1
121 self.sopener.options['parentdelta'] = 1
122
122
123 def _writerequirements(self):
123 def _writerequirements(self):
124 reqfile = self.opener("requires", "w")
124 reqfile = self.opener("requires", "w")
125 for r in self.requirements:
125 for r in self.requirements:
126 reqfile.write("%s\n" % r)
126 reqfile.write("%s\n" % r)
127 reqfile.close()
127 reqfile.close()
128
128
129 def _checknested(self, path):
129 def _checknested(self, path):
130 """Determine if path is a legal nested repository."""
130 """Determine if path is a legal nested repository."""
131 if not path.startswith(self.root):
131 if not path.startswith(self.root):
132 return False
132 return False
133 subpath = path[len(self.root) + 1:]
133 subpath = path[len(self.root) + 1:]
134
134
135 # XXX: Checking against the current working copy is wrong in
135 # XXX: Checking against the current working copy is wrong in
136 # the sense that it can reject things like
136 # the sense that it can reject things like
137 #
137 #
138 # $ hg cat -r 10 sub/x.txt
138 # $ hg cat -r 10 sub/x.txt
139 #
139 #
140 # if sub/ is no longer a subrepository in the working copy
140 # if sub/ is no longer a subrepository in the working copy
141 # parent revision.
141 # parent revision.
142 #
142 #
143 # However, it can of course also allow things that would have
143 # However, it can of course also allow things that would have
144 # been rejected before, such as the above cat command if sub/
144 # been rejected before, such as the above cat command if sub/
145 # is a subrepository now, but was a normal directory before.
145 # is a subrepository now, but was a normal directory before.
146 # The old path auditor would have rejected by mistake since it
146 # The old path auditor would have rejected by mistake since it
147 # panics when it sees sub/.hg/.
147 # panics when it sees sub/.hg/.
148 #
148 #
149 # All in all, checking against the working copy seems sensible
149 # All in all, checking against the working copy seems sensible
150 # since we want to prevent access to nested repositories on
150 # since we want to prevent access to nested repositories on
151 # the filesystem *now*.
151 # the filesystem *now*.
152 ctx = self[None]
152 ctx = self[None]
153 parts = util.splitpath(subpath)
153 parts = util.splitpath(subpath)
154 while parts:
154 while parts:
155 prefix = os.sep.join(parts)
155 prefix = os.sep.join(parts)
156 if prefix in ctx.substate:
156 if prefix in ctx.substate:
157 if prefix == subpath:
157 if prefix == subpath:
158 return True
158 return True
159 else:
159 else:
160 sub = ctx.sub(prefix)
160 sub = ctx.sub(prefix)
161 return sub.checknested(subpath[len(prefix) + 1:])
161 return sub.checknested(subpath[len(prefix) + 1:])
162 else:
162 else:
163 parts.pop()
163 parts.pop()
164 return False
164 return False
165
165
166 @util.propertycache
166 @util.propertycache
167 def _bookmarks(self):
167 def _bookmarks(self):
168 return bookmarks.read(self)
168 return bookmarks.read(self)
169
169
170 @util.propertycache
170 @util.propertycache
171 def _bookmarkcurrent(self):
171 def _bookmarkcurrent(self):
172 return bookmarks.readcurrent(self)
172 return bookmarks.readcurrent(self)
173
173
174 @propertycache
174 @propertycache
175 def changelog(self):
175 def changelog(self):
176 c = changelog.changelog(self.sopener)
176 c = changelog.changelog(self.sopener)
177 if 'HG_PENDING' in os.environ:
177 if 'HG_PENDING' in os.environ:
178 p = os.environ['HG_PENDING']
178 p = os.environ['HG_PENDING']
179 if p.startswith(self.root):
179 if p.startswith(self.root):
180 c.readpending('00changelog.i.a')
180 c.readpending('00changelog.i.a')
181 self.sopener.options['defversion'] = c.version
181 self.sopener.options['defversion'] = c.version
182 return c
182 return c
183
183
184 @propertycache
184 @propertycache
185 def manifest(self):
185 def manifest(self):
186 return manifest.manifest(self.sopener)
186 return manifest.manifest(self.sopener)
187
187
188 @propertycache
188 @propertycache
189 def dirstate(self):
189 def dirstate(self):
190 warned = [0]
190 warned = [0]
191 def validate(node):
191 def validate(node):
192 try:
192 try:
193 r = self.changelog.rev(node)
193 r = self.changelog.rev(node)
194 return node
194 return node
195 except error.LookupError:
195 except error.LookupError:
196 if not warned[0]:
196 if not warned[0]:
197 warned[0] = True
197 warned[0] = True
198 self.ui.warn(_("warning: ignoring unknown"
198 self.ui.warn(_("warning: ignoring unknown"
199 " working parent %s!\n") % short(node))
199 " working parent %s!\n") % short(node))
200 return nullid
200 return nullid
201
201
202 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
202 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
203
203
204 def __getitem__(self, changeid):
204 def __getitem__(self, changeid):
205 if changeid is None:
205 if changeid is None:
206 return context.workingctx(self)
206 return context.workingctx(self)
207 return context.changectx(self, changeid)
207 return context.changectx(self, changeid)
208
208
209 def __contains__(self, changeid):
209 def __contains__(self, changeid):
210 try:
210 try:
211 return bool(self.lookup(changeid))
211 return bool(self.lookup(changeid))
212 except error.RepoLookupError:
212 except error.RepoLookupError:
213 return False
213 return False
214
214
215 def __nonzero__(self):
215 def __nonzero__(self):
216 return True
216 return True
217
217
218 def __len__(self):
218 def __len__(self):
219 return len(self.changelog)
219 return len(self.changelog)
220
220
221 def __iter__(self):
221 def __iter__(self):
222 for i in xrange(len(self)):
222 for i in xrange(len(self)):
223 yield i
223 yield i
224
224
225 def url(self):
225 def url(self):
226 return 'file:' + self.root
226 return 'file:' + self.root
227
227
228 def hook(self, name, throw=False, **args):
228 def hook(self, name, throw=False, **args):
229 return hook.hook(self.ui, self, name, throw, **args)
229 return hook.hook(self.ui, self, name, throw, **args)
230
230
231 tag_disallowed = ':\r\n'
231 tag_disallowed = ':\r\n'
232
232
233 def _tag(self, names, node, message, local, user, date, extra={}):
233 def _tag(self, names, node, message, local, user, date, extra={}):
234 if isinstance(names, str):
234 if isinstance(names, str):
235 allchars = names
235 allchars = names
236 names = (names,)
236 names = (names,)
237 else:
237 else:
238 allchars = ''.join(names)
238 allchars = ''.join(names)
239 for c in self.tag_disallowed:
239 for c in self.tag_disallowed:
240 if c in allchars:
240 if c in allchars:
241 raise util.Abort(_('%r cannot be used in a tag name') % c)
241 raise util.Abort(_('%r cannot be used in a tag name') % c)
242
242
243 branches = self.branchmap()
243 branches = self.branchmap()
244 for name in names:
244 for name in names:
245 self.hook('pretag', throw=True, node=hex(node), tag=name,
245 self.hook('pretag', throw=True, node=hex(node), tag=name,
246 local=local)
246 local=local)
247 if name in branches:
247 if name in branches:
248 self.ui.warn(_("warning: tag %s conflicts with existing"
248 self.ui.warn(_("warning: tag %s conflicts with existing"
249 " branch name\n") % name)
249 " branch name\n") % name)
250
250
251 def writetags(fp, names, munge, prevtags):
251 def writetags(fp, names, munge, prevtags):
252 fp.seek(0, 2)
252 fp.seek(0, 2)
253 if prevtags and prevtags[-1] != '\n':
253 if prevtags and prevtags[-1] != '\n':
254 fp.write('\n')
254 fp.write('\n')
255 for name in names:
255 for name in names:
256 m = munge and munge(name) or name
256 m = munge and munge(name) or name
257 if self._tagtypes and name in self._tagtypes:
257 if self._tagtypes and name in self._tagtypes:
258 old = self._tags.get(name, nullid)
258 old = self._tags.get(name, nullid)
259 fp.write('%s %s\n' % (hex(old), m))
259 fp.write('%s %s\n' % (hex(old), m))
260 fp.write('%s %s\n' % (hex(node), m))
260 fp.write('%s %s\n' % (hex(node), m))
261 fp.close()
261 fp.close()
262
262
263 prevtags = ''
263 prevtags = ''
264 if local:
264 if local:
265 try:
265 try:
266 fp = self.opener('localtags', 'r+')
266 fp = self.opener('localtags', 'r+')
267 except IOError:
267 except IOError:
268 fp = self.opener('localtags', 'a')
268 fp = self.opener('localtags', 'a')
269 else:
269 else:
270 prevtags = fp.read()
270 prevtags = fp.read()
271
271
272 # local tags are stored in the current charset
272 # local tags are stored in the current charset
273 writetags(fp, names, None, prevtags)
273 writetags(fp, names, None, prevtags)
274 for name in names:
274 for name in names:
275 self.hook('tag', node=hex(node), tag=name, local=local)
275 self.hook('tag', node=hex(node), tag=name, local=local)
276 return
276 return
277
277
278 try:
278 try:
279 fp = self.wfile('.hgtags', 'rb+')
279 fp = self.wfile('.hgtags', 'rb+')
280 except IOError:
280 except IOError:
281 fp = self.wfile('.hgtags', 'ab')
281 fp = self.wfile('.hgtags', 'ab')
282 else:
282 else:
283 prevtags = fp.read()
283 prevtags = fp.read()
284
284
285 # committed tags are stored in UTF-8
285 # committed tags are stored in UTF-8
286 writetags(fp, names, encoding.fromlocal, prevtags)
286 writetags(fp, names, encoding.fromlocal, prevtags)
287
287
288 fp.close()
288 fp.close()
289
289
290 if '.hgtags' not in self.dirstate:
290 if '.hgtags' not in self.dirstate:
291 self[None].add(['.hgtags'])
291 self[None].add(['.hgtags'])
292
292
293 m = matchmod.exact(self.root, '', ['.hgtags'])
293 m = matchmod.exact(self.root, '', ['.hgtags'])
294 tagnode = self.commit(message, user, date, extra=extra, match=m)
294 tagnode = self.commit(message, user, date, extra=extra, match=m)
295
295
296 for name in names:
296 for name in names:
297 self.hook('tag', node=hex(node), tag=name, local=local)
297 self.hook('tag', node=hex(node), tag=name, local=local)
298
298
299 return tagnode
299 return tagnode
300
300
301 def tag(self, names, node, message, local, user, date):
301 def tag(self, names, node, message, local, user, date):
302 '''tag a revision with one or more symbolic names.
302 '''tag a revision with one or more symbolic names.
303
303
304 names is a list of strings or, when adding a single tag, names may be a
304 names is a list of strings or, when adding a single tag, names may be a
305 string.
305 string.
306
306
307 if local is True, the tags are stored in a per-repository file.
307 if local is True, the tags are stored in a per-repository file.
308 otherwise, they are stored in the .hgtags file, and a new
308 otherwise, they are stored in the .hgtags file, and a new
309 changeset is committed with the change.
309 changeset is committed with the change.
310
310
311 keyword arguments:
311 keyword arguments:
312
312
313 local: whether to store tags in non-version-controlled file
313 local: whether to store tags in non-version-controlled file
314 (default False)
314 (default False)
315
315
316 message: commit message to use if committing
316 message: commit message to use if committing
317
317
318 user: name of user to use if committing
318 user: name of user to use if committing
319
319
320 date: date tuple to use if committing'''
320 date: date tuple to use if committing'''
321
321
322 if not local:
322 if not local:
323 for x in self.status()[:5]:
323 for x in self.status()[:5]:
324 if '.hgtags' in x:
324 if '.hgtags' in x:
325 raise util.Abort(_('working copy of .hgtags is changed '
325 raise util.Abort(_('working copy of .hgtags is changed '
326 '(please commit .hgtags manually)'))
326 '(please commit .hgtags manually)'))
327
327
328 self.tags() # instantiate the cache
328 self.tags() # instantiate the cache
329 self._tag(names, node, message, local, user, date)
329 self._tag(names, node, message, local, user, date)
330
330
331 def tags(self):
331 def tags(self):
332 '''return a mapping of tag to node'''
332 '''return a mapping of tag to node'''
333 if self._tags is None:
333 if self._tags is None:
334 (self._tags, self._tagtypes) = self._findtags()
334 (self._tags, self._tagtypes) = self._findtags()
335
335
336 return self._tags
336 return self._tags
337
337
338 def _findtags(self):
338 def _findtags(self):
339 '''Do the hard work of finding tags. Return a pair of dicts
339 '''Do the hard work of finding tags. Return a pair of dicts
340 (tags, tagtypes) where tags maps tag name to node, and tagtypes
340 (tags, tagtypes) where tags maps tag name to node, and tagtypes
341 maps tag name to a string like \'global\' or \'local\'.
341 maps tag name to a string like \'global\' or \'local\'.
342 Subclasses or extensions are free to add their own tags, but
342 Subclasses or extensions are free to add their own tags, but
343 should be aware that the returned dicts will be retained for the
343 should be aware that the returned dicts will be retained for the
344 duration of the localrepo object.'''
344 duration of the localrepo object.'''
345
345
346 # XXX what tagtype should subclasses/extensions use? Currently
346 # XXX what tagtype should subclasses/extensions use? Currently
347 # mq and bookmarks add tags, but do not set the tagtype at all.
347 # mq and bookmarks add tags, but do not set the tagtype at all.
348 # Should each extension invent its own tag type? Should there
348 # Should each extension invent its own tag type? Should there
349 # be one tagtype for all such "virtual" tags? Or is the status
349 # be one tagtype for all such "virtual" tags? Or is the status
350 # quo fine?
350 # quo fine?
351
351
352 alltags = {} # map tag name to (node, hist)
352 alltags = {} # map tag name to (node, hist)
353 tagtypes = {}
353 tagtypes = {}
354
354
355 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
355 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
356 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
356 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
357
357
358 # Build the return dicts. Have to re-encode tag names because
358 # Build the return dicts. Have to re-encode tag names because
359 # the tags module always uses UTF-8 (in order not to lose info
359 # the tags module always uses UTF-8 (in order not to lose info
360 # writing to the cache), but the rest of Mercurial wants them in
360 # writing to the cache), but the rest of Mercurial wants them in
361 # local encoding.
361 # local encoding.
362 tags = {}
362 tags = {}
363 for (name, (node, hist)) in alltags.iteritems():
363 for (name, (node, hist)) in alltags.iteritems():
364 if node != nullid:
364 if node != nullid:
365 tags[encoding.tolocal(name)] = node
365 tags[encoding.tolocal(name)] = node
366 tags['tip'] = self.changelog.tip()
366 tags['tip'] = self.changelog.tip()
367 tagtypes = dict([(encoding.tolocal(name), value)
367 tagtypes = dict([(encoding.tolocal(name), value)
368 for (name, value) in tagtypes.iteritems()])
368 for (name, value) in tagtypes.iteritems()])
369 return (tags, tagtypes)
369 return (tags, tagtypes)
370
370
371 def tagtype(self, tagname):
371 def tagtype(self, tagname):
372 '''
372 '''
373 return the type of the given tag. result can be:
373 return the type of the given tag. result can be:
374
374
375 'local' : a local tag
375 'local' : a local tag
376 'global' : a global tag
376 'global' : a global tag
377 None : tag does not exist
377 None : tag does not exist
378 '''
378 '''
379
379
380 self.tags()
380 self.tags()
381
381
382 return self._tagtypes.get(tagname)
382 return self._tagtypes.get(tagname)
383
383
384 def tagslist(self):
384 def tagslist(self):
385 '''return a list of tags ordered by revision'''
385 '''return a list of tags ordered by revision'''
386 l = []
386 l = []
387 for t, n in self.tags().iteritems():
387 for t, n in self.tags().iteritems():
388 try:
388 try:
389 r = self.changelog.rev(n)
389 r = self.changelog.rev(n)
390 except:
390 except:
391 r = -2 # sort to the beginning of the list if unknown
391 r = -2 # sort to the beginning of the list if unknown
392 l.append((r, t, n))
392 l.append((r, t, n))
393 return [(t, n) for r, t, n in sorted(l)]
393 return [(t, n) for r, t, n in sorted(l)]
394
394
395 def nodetags(self, node):
395 def nodetags(self, node):
396 '''return the tags associated with a node'''
396 '''return the tags associated with a node'''
397 if not self.nodetagscache:
397 if not self.nodetagscache:
398 self.nodetagscache = {}
398 self.nodetagscache = {}
399 for t, n in self.tags().iteritems():
399 for t, n in self.tags().iteritems():
400 self.nodetagscache.setdefault(n, []).append(t)
400 self.nodetagscache.setdefault(n, []).append(t)
401 for tags in self.nodetagscache.itervalues():
401 for tags in self.nodetagscache.itervalues():
402 tags.sort()
402 tags.sort()
403 return self.nodetagscache.get(node, [])
403 return self.nodetagscache.get(node, [])
404
404
405 def nodebookmarks(self, node):
405 def nodebookmarks(self, node):
406 marks = []
406 marks = []
407 for bookmark, n in self._bookmarks.iteritems():
407 for bookmark, n in self._bookmarks.iteritems():
408 if n == node:
408 if n == node:
409 marks.append(bookmark)
409 marks.append(bookmark)
410 return sorted(marks)
410 return sorted(marks)
411
411
412 def _branchtags(self, partial, lrev):
412 def _branchtags(self, partial, lrev):
413 # TODO: rename this function?
413 # TODO: rename this function?
414 tiprev = len(self) - 1
414 tiprev = len(self) - 1
415 if lrev != tiprev:
415 if lrev != tiprev:
416 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
416 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
417 self._updatebranchcache(partial, ctxgen)
417 self._updatebranchcache(partial, ctxgen)
418 self._writebranchcache(partial, self.changelog.tip(), tiprev)
418 self._writebranchcache(partial, self.changelog.tip(), tiprev)
419
419
420 return partial
420 return partial
421
421
422 def updatebranchcache(self):
422 def updatebranchcache(self):
423 tip = self.changelog.tip()
423 tip = self.changelog.tip()
424 if self._branchcache is not None and self._branchcachetip == tip:
424 if self._branchcache is not None and self._branchcachetip == tip:
425 return self._branchcache
425 return self._branchcache
426
426
427 oldtip = self._branchcachetip
427 oldtip = self._branchcachetip
428 self._branchcachetip = tip
428 self._branchcachetip = tip
429 if oldtip is None or oldtip not in self.changelog.nodemap:
429 if oldtip is None or oldtip not in self.changelog.nodemap:
430 partial, last, lrev = self._readbranchcache()
430 partial, last, lrev = self._readbranchcache()
431 else:
431 else:
432 lrev = self.changelog.rev(oldtip)
432 lrev = self.changelog.rev(oldtip)
433 partial = self._branchcache
433 partial = self._branchcache
434
434
435 self._branchtags(partial, lrev)
435 self._branchtags(partial, lrev)
436 # this private cache holds all heads (not just tips)
436 # this private cache holds all heads (not just tips)
437 self._branchcache = partial
437 self._branchcache = partial
438
438
439 def branchmap(self):
439 def branchmap(self):
440 '''returns a dictionary {branch: [branchheads]}'''
440 '''returns a dictionary {branch: [branchheads]}'''
441 self.updatebranchcache()
441 self.updatebranchcache()
442 return self._branchcache
442 return self._branchcache
443
443
444 def branchtags(self):
444 def branchtags(self):
445 '''return a dict where branch names map to the tipmost head of
445 '''return a dict where branch names map to the tipmost head of
446 the branch, open heads come before closed'''
446 the branch, open heads come before closed'''
447 bt = {}
447 bt = {}
448 for bn, heads in self.branchmap().iteritems():
448 for bn, heads in self.branchmap().iteritems():
449 tip = heads[-1]
449 tip = heads[-1]
450 for h in reversed(heads):
450 for h in reversed(heads):
451 if 'close' not in self.changelog.read(h)[5]:
451 if 'close' not in self.changelog.read(h)[5]:
452 tip = h
452 tip = h
453 break
453 break
454 bt[bn] = tip
454 bt[bn] = tip
455 return bt
455 return bt
456
456
457 def _readbranchcache(self):
457 def _readbranchcache(self):
458 partial = {}
458 partial = {}
459 try:
459 try:
460 f = self.opener("cache/branchheads")
460 f = self.opener("cache/branchheads")
461 lines = f.read().split('\n')
461 lines = f.read().split('\n')
462 f.close()
462 f.close()
463 except (IOError, OSError):
463 except (IOError, OSError):
464 return {}, nullid, nullrev
464 return {}, nullid, nullrev
465
465
466 try:
466 try:
467 last, lrev = lines.pop(0).split(" ", 1)
467 last, lrev = lines.pop(0).split(" ", 1)
468 last, lrev = bin(last), int(lrev)
468 last, lrev = bin(last), int(lrev)
469 if lrev >= len(self) or self[lrev].node() != last:
469 if lrev >= len(self) or self[lrev].node() != last:
470 # invalidate the cache
470 # invalidate the cache
471 raise ValueError('invalidating branch cache (tip differs)')
471 raise ValueError('invalidating branch cache (tip differs)')
472 for l in lines:
472 for l in lines:
473 if not l:
473 if not l:
474 continue
474 continue
475 node, label = l.split(" ", 1)
475 node, label = l.split(" ", 1)
476 label = encoding.tolocal(label.strip())
476 label = encoding.tolocal(label.strip())
477 partial.setdefault(label, []).append(bin(node))
477 partial.setdefault(label, []).append(bin(node))
478 except KeyboardInterrupt:
478 except KeyboardInterrupt:
479 raise
479 raise
480 except Exception, inst:
480 except Exception, inst:
481 if self.ui.debugflag:
481 if self.ui.debugflag:
482 self.ui.warn(str(inst), '\n')
482 self.ui.warn(str(inst), '\n')
483 partial, last, lrev = {}, nullid, nullrev
483 partial, last, lrev = {}, nullid, nullrev
484 return partial, last, lrev
484 return partial, last, lrev
485
485
486 def _writebranchcache(self, branches, tip, tiprev):
486 def _writebranchcache(self, branches, tip, tiprev):
487 try:
487 try:
488 f = self.opener("cache/branchheads", "w", atomictemp=True)
488 f = self.opener("cache/branchheads", "w", atomictemp=True)
489 f.write("%s %s\n" % (hex(tip), tiprev))
489 f.write("%s %s\n" % (hex(tip), tiprev))
490 for label, nodes in branches.iteritems():
490 for label, nodes in branches.iteritems():
491 for node in nodes:
491 for node in nodes:
492 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
492 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
493 f.rename()
493 f.rename()
494 except (IOError, OSError):
494 except (IOError, OSError):
495 pass
495 pass
496
496
497 def _updatebranchcache(self, partial, ctxgen):
497 def _updatebranchcache(self, partial, ctxgen):
498 # collect new branch entries
498 # collect new branch entries
499 newbranches = {}
499 newbranches = {}
500 for c in ctxgen:
500 for c in ctxgen:
501 newbranches.setdefault(c.branch(), []).append(c.node())
501 newbranches.setdefault(c.branch(), []).append(c.node())
502 # if older branchheads are reachable from new ones, they aren't
502 # if older branchheads are reachable from new ones, they aren't
503 # really branchheads. Note checking parents is insufficient:
503 # really branchheads. Note checking parents is insufficient:
504 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
504 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
505 for branch, newnodes in newbranches.iteritems():
505 for branch, newnodes in newbranches.iteritems():
506 bheads = partial.setdefault(branch, [])
506 bheads = partial.setdefault(branch, [])
507 bheads.extend(newnodes)
507 bheads.extend(newnodes)
508 if len(bheads) <= 1:
508 if len(bheads) <= 1:
509 continue
509 continue
510 # starting from tip means fewer passes over reachable
510 # starting from tip means fewer passes over reachable
511 while newnodes:
511 while newnodes:
512 latest = newnodes.pop()
512 latest = newnodes.pop()
513 if latest not in bheads:
513 if latest not in bheads:
514 continue
514 continue
515 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
515 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
516 reachable = self.changelog.reachable(latest, minbhrev)
516 reachable = self.changelog.reachable(latest, minbhrev)
517 reachable.remove(latest)
517 reachable.remove(latest)
518 bheads = [b for b in bheads if b not in reachable]
518 bheads = [b for b in bheads if b not in reachable]
519 partial[branch] = bheads
519 partial[branch] = bheads
520
520
521 def lookup(self, key):
521 def lookup(self, key):
522 if isinstance(key, int):
522 if isinstance(key, int):
523 return self.changelog.node(key)
523 return self.changelog.node(key)
524 elif key == '.':
524 elif key == '.':
525 return self.dirstate.parents()[0]
525 return self.dirstate.parents()[0]
526 elif key == 'null':
526 elif key == 'null':
527 return nullid
527 return nullid
528 elif key == 'tip':
528 elif key == 'tip':
529 return self.changelog.tip()
529 return self.changelog.tip()
530 n = self.changelog._match(key)
530 n = self.changelog._match(key)
531 if n:
531 if n:
532 return n
532 return n
533 if key in self._bookmarks:
533 if key in self._bookmarks:
534 return self._bookmarks[key]
534 return self._bookmarks[key]
535 if key in self.tags():
535 if key in self.tags():
536 return self.tags()[key]
536 return self.tags()[key]
537 if key in self.branchtags():
537 if key in self.branchtags():
538 return self.branchtags()[key]
538 return self.branchtags()[key]
539 n = self.changelog._partialmatch(key)
539 n = self.changelog._partialmatch(key)
540 if n:
540 if n:
541 return n
541 return n
542
542
543 # can't find key, check if it might have come from damaged dirstate
543 # can't find key, check if it might have come from damaged dirstate
544 if key in self.dirstate.parents():
544 if key in self.dirstate.parents():
545 raise error.Abort(_("working directory has unknown parent '%s'!")
545 raise error.Abort(_("working directory has unknown parent '%s'!")
546 % short(key))
546 % short(key))
547 try:
547 try:
548 if len(key) == 20:
548 if len(key) == 20:
549 key = hex(key)
549 key = hex(key)
550 except:
550 except:
551 pass
551 pass
552 raise error.RepoLookupError(_("unknown revision '%s'") % key)
552 raise error.RepoLookupError(_("unknown revision '%s'") % key)
553
553
554 def lookupbranch(self, key, remote=None):
554 def lookupbranch(self, key, remote=None):
555 repo = remote or self
555 repo = remote or self
556 if key in repo.branchmap():
556 if key in repo.branchmap():
557 return key
557 return key
558
558
559 repo = (remote and remote.local()) and remote or self
559 repo = (remote and remote.local()) and remote or self
560 return repo[key].branch()
560 return repo[key].branch()
561
561
562 def known(self, nodes):
562 def known(self, nodes):
563 nm = self.changelog.nodemap
563 nm = self.changelog.nodemap
564 return [(n in nm) for n in nodes]
564 return [(n in nm) for n in nodes]
565
565
566 def local(self):
566 def local(self):
567 return True
567 return True
568
568
569 def join(self, f):
569 def join(self, f):
570 return os.path.join(self.path, f)
570 return os.path.join(self.path, f)
571
571
572 def wjoin(self, f):
572 def wjoin(self, f):
573 return os.path.join(self.root, f)
573 return os.path.join(self.root, f)
574
574
575 def file(self, f):
575 def file(self, f):
576 if f[0] == '/':
576 if f[0] == '/':
577 f = f[1:]
577 f = f[1:]
578 return filelog.filelog(self.sopener, f)
578 return filelog.filelog(self.sopener, f)
579
579
580 def changectx(self, changeid):
580 def changectx(self, changeid):
581 return self[changeid]
581 return self[changeid]
582
582
583 def parents(self, changeid=None):
583 def parents(self, changeid=None):
584 '''get list of changectxs for parents of changeid'''
584 '''get list of changectxs for parents of changeid'''
585 return self[changeid].parents()
585 return self[changeid].parents()
586
586
587 def filectx(self, path, changeid=None, fileid=None):
587 def filectx(self, path, changeid=None, fileid=None):
588 """changeid can be a changeset revision, node, or tag.
588 """changeid can be a changeset revision, node, or tag.
589 fileid can be a file revision or node."""
589 fileid can be a file revision or node."""
590 return context.filectx(self, path, changeid, fileid)
590 return context.filectx(self, path, changeid, fileid)
591
591
592 def getcwd(self):
592 def getcwd(self):
593 return self.dirstate.getcwd()
593 return self.dirstate.getcwd()
594
594
595 def pathto(self, f, cwd=None):
595 def pathto(self, f, cwd=None):
596 return self.dirstate.pathto(f, cwd)
596 return self.dirstate.pathto(f, cwd)
597
597
598 def wfile(self, f, mode='r'):
598 def wfile(self, f, mode='r'):
599 return self.wopener(f, mode)
599 return self.wopener(f, mode)
600
600
601 def _link(self, f):
601 def _link(self, f):
602 return os.path.islink(self.wjoin(f))
602 return os.path.islink(self.wjoin(f))
603
603
604 def _loadfilter(self, filter):
604 def _loadfilter(self, filter):
605 if filter not in self.filterpats:
605 if filter not in self.filterpats:
606 l = []
606 l = []
607 for pat, cmd in self.ui.configitems(filter):
607 for pat, cmd in self.ui.configitems(filter):
608 if cmd == '!':
608 if cmd == '!':
609 continue
609 continue
610 mf = matchmod.match(self.root, '', [pat])
610 mf = matchmod.match(self.root, '', [pat])
611 fn = None
611 fn = None
612 params = cmd
612 params = cmd
613 for name, filterfn in self._datafilters.iteritems():
613 for name, filterfn in self._datafilters.iteritems():
614 if cmd.startswith(name):
614 if cmd.startswith(name):
615 fn = filterfn
615 fn = filterfn
616 params = cmd[len(name):].lstrip()
616 params = cmd[len(name):].lstrip()
617 break
617 break
618 if not fn:
618 if not fn:
619 fn = lambda s, c, **kwargs: util.filter(s, c)
619 fn = lambda s, c, **kwargs: util.filter(s, c)
620 # Wrap old filters not supporting keyword arguments
620 # Wrap old filters not supporting keyword arguments
621 if not inspect.getargspec(fn)[2]:
621 if not inspect.getargspec(fn)[2]:
622 oldfn = fn
622 oldfn = fn
623 fn = lambda s, c, **kwargs: oldfn(s, c)
623 fn = lambda s, c, **kwargs: oldfn(s, c)
624 l.append((mf, fn, params))
624 l.append((mf, fn, params))
625 self.filterpats[filter] = l
625 self.filterpats[filter] = l
626 return self.filterpats[filter]
626 return self.filterpats[filter]
627
627
628 def _filter(self, filterpats, filename, data):
628 def _filter(self, filterpats, filename, data):
629 for mf, fn, cmd in filterpats:
629 for mf, fn, cmd in filterpats:
630 if mf(filename):
630 if mf(filename):
631 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
631 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
632 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
632 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
633 break
633 break
634
634
635 return data
635 return data
636
636
637 @propertycache
637 @propertycache
638 def _encodefilterpats(self):
638 def _encodefilterpats(self):
639 return self._loadfilter('encode')
639 return self._loadfilter('encode')
640
640
641 @propertycache
641 @propertycache
642 def _decodefilterpats(self):
642 def _decodefilterpats(self):
643 return self._loadfilter('decode')
643 return self._loadfilter('decode')
644
644
645 def adddatafilter(self, name, filter):
645 def adddatafilter(self, name, filter):
646 self._datafilters[name] = filter
646 self._datafilters[name] = filter
647
647
648 def wread(self, filename):
648 def wread(self, filename):
649 if self._link(filename):
649 if self._link(filename):
650 data = os.readlink(self.wjoin(filename))
650 data = os.readlink(self.wjoin(filename))
651 else:
651 else:
652 data = self.wopener(filename, 'r').read()
652 data = self.wopener(filename, 'r').read()
653 return self._filter(self._encodefilterpats, filename, data)
653 return self._filter(self._encodefilterpats, filename, data)
654
654
655 def wwrite(self, filename, data, flags):
655 def wwrite(self, filename, data, flags):
656 data = self._filter(self._decodefilterpats, filename, data)
656 data = self._filter(self._decodefilterpats, filename, data)
657 if 'l' in flags:
657 if 'l' in flags:
658 self.wopener.symlink(data, filename)
658 self.wopener.symlink(data, filename)
659 else:
659 else:
660 self.wopener(filename, 'w').write(data)
660 self.wopener(filename, 'w').write(data)
661 if 'x' in flags:
661 if 'x' in flags:
662 util.set_flags(self.wjoin(filename), False, True)
662 util.set_flags(self.wjoin(filename), False, True)
663
663
664 def wwritedata(self, filename, data):
664 def wwritedata(self, filename, data):
665 return self._filter(self._decodefilterpats, filename, data)
665 return self._filter(self._decodefilterpats, filename, data)
666
666
667 def transaction(self, desc):
667 def transaction(self, desc):
668 tr = self._transref and self._transref() or None
668 tr = self._transref and self._transref() or None
669 if tr and tr.running():
669 if tr and tr.running():
670 return tr.nest()
670 return tr.nest()
671
671
672 # abort here if the journal already exists
672 # abort here if the journal already exists
673 if os.path.exists(self.sjoin("journal")):
673 if os.path.exists(self.sjoin("journal")):
674 raise error.RepoError(
674 raise error.RepoError(
675 _("abandoned transaction found - run hg recover"))
675 _("abandoned transaction found - run hg recover"))
676
676
677 # save dirstate for rollback
677 # save dirstate for rollback
678 try:
678 try:
679 ds = self.opener("dirstate").read()
679 ds = self.opener("dirstate").read()
680 except IOError:
680 except IOError:
681 ds = ""
681 ds = ""
682 self.opener("journal.dirstate", "w").write(ds)
682 self.opener("journal.dirstate", "w").write(ds)
683 self.opener("journal.branch", "w").write(
683 self.opener("journal.branch", "w").write(
684 encoding.fromlocal(self.dirstate.branch()))
684 encoding.fromlocal(self.dirstate.branch()))
685 self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc))
685 self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc))
686
686
687 renames = [(self.sjoin("journal"), self.sjoin("undo")),
687 renames = [(self.sjoin("journal"), self.sjoin("undo")),
688 (self.join("journal.dirstate"), self.join("undo.dirstate")),
688 (self.join("journal.dirstate"), self.join("undo.dirstate")),
689 (self.join("journal.branch"), self.join("undo.branch")),
689 (self.join("journal.branch"), self.join("undo.branch")),
690 (self.join("journal.desc"), self.join("undo.desc"))]
690 (self.join("journal.desc"), self.join("undo.desc"))]
691 tr = transaction.transaction(self.ui.warn, self.sopener,
691 tr = transaction.transaction(self.ui.warn, self.sopener,
692 self.sjoin("journal"),
692 self.sjoin("journal"),
693 aftertrans(renames),
693 aftertrans(renames),
694 self.store.createmode)
694 self.store.createmode)
695 self._transref = weakref.ref(tr)
695 self._transref = weakref.ref(tr)
696 return tr
696 return tr
697
697
698 def recover(self):
698 def recover(self):
699 lock = self.lock()
699 lock = self.lock()
700 try:
700 try:
701 if os.path.exists(self.sjoin("journal")):
701 if os.path.exists(self.sjoin("journal")):
702 self.ui.status(_("rolling back interrupted transaction\n"))
702 self.ui.status(_("rolling back interrupted transaction\n"))
703 transaction.rollback(self.sopener, self.sjoin("journal"),
703 transaction.rollback(self.sopener, self.sjoin("journal"),
704 self.ui.warn)
704 self.ui.warn)
705 self.invalidate()
705 self.invalidate()
706 return True
706 return True
707 else:
707 else:
708 self.ui.warn(_("no interrupted transaction available\n"))
708 self.ui.warn(_("no interrupted transaction available\n"))
709 return False
709 return False
710 finally:
710 finally:
711 lock.release()
711 lock.release()
712
712
713 def rollback(self, dryrun=False):
713 def rollback(self, dryrun=False):
714 wlock = lock = None
714 wlock = lock = None
715 try:
715 try:
716 wlock = self.wlock()
716 wlock = self.wlock()
717 lock = self.lock()
717 lock = self.lock()
718 if os.path.exists(self.sjoin("undo")):
718 if os.path.exists(self.sjoin("undo")):
719 try:
719 try:
720 args = self.opener("undo.desc", "r").read().splitlines()
720 args = self.opener("undo.desc", "r").read().splitlines()
721 if len(args) >= 3 and self.ui.verbose:
721 if len(args) >= 3 and self.ui.verbose:
722 desc = _("repository tip rolled back to revision %s"
722 desc = _("repository tip rolled back to revision %s"
723 " (undo %s: %s)\n") % (
723 " (undo %s: %s)\n") % (
724 int(args[0]) - 1, args[1], args[2])
724 int(args[0]) - 1, args[1], args[2])
725 elif len(args) >= 2:
725 elif len(args) >= 2:
726 desc = _("repository tip rolled back to revision %s"
726 desc = _("repository tip rolled back to revision %s"
727 " (undo %s)\n") % (
727 " (undo %s)\n") % (
728 int(args[0]) - 1, args[1])
728 int(args[0]) - 1, args[1])
729 except IOError:
729 except IOError:
730 desc = _("rolling back unknown transaction\n")
730 desc = _("rolling back unknown transaction\n")
731 self.ui.status(desc)
731 self.ui.status(desc)
732 if dryrun:
732 if dryrun:
733 return
733 return
734 transaction.rollback(self.sopener, self.sjoin("undo"),
734 transaction.rollback(self.sopener, self.sjoin("undo"),
735 self.ui.warn)
735 self.ui.warn)
736 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
736 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
737 if os.path.exists(self.join('undo.bookmarks')):
737 if os.path.exists(self.join('undo.bookmarks')):
738 util.rename(self.join('undo.bookmarks'),
738 util.rename(self.join('undo.bookmarks'),
739 self.join('bookmarks'))
739 self.join('bookmarks'))
740 try:
740 try:
741 branch = self.opener("undo.branch").read()
741 branch = self.opener("undo.branch").read()
742 self.dirstate.setbranch(branch)
742 self.dirstate.setbranch(branch)
743 except IOError:
743 except IOError:
744 self.ui.warn(_("Named branch could not be reset, "
744 self.ui.warn(_("Named branch could not be reset, "
745 "current branch still is: %s\n")
745 "current branch still is: %s\n")
746 % self.dirstate.branch())
746 % self.dirstate.branch())
747 self.invalidate()
747 self.invalidate()
748 self.dirstate.invalidate()
748 self.dirstate.invalidate()
749 self.destroyed()
749 self.destroyed()
750 parents = tuple([p.rev() for p in self.parents()])
750 parents = tuple([p.rev() for p in self.parents()])
751 if len(parents) > 1:
751 if len(parents) > 1:
752 self.ui.status(_("working directory now based on "
752 self.ui.status(_("working directory now based on "
753 "revisions %d and %d\n") % parents)
753 "revisions %d and %d\n") % parents)
754 else:
754 else:
755 self.ui.status(_("working directory now based on "
755 self.ui.status(_("working directory now based on "
756 "revision %d\n") % parents)
756 "revision %d\n") % parents)
757 else:
757 else:
758 self.ui.warn(_("no rollback information available\n"))
758 self.ui.warn(_("no rollback information available\n"))
759 return 1
759 return 1
760 finally:
760 finally:
761 release(lock, wlock)
761 release(lock, wlock)
762
762
763 def invalidatecaches(self):
763 def invalidatecaches(self):
764 self._tags = None
764 self._tags = None
765 self._tagtypes = None
765 self._tagtypes = None
766 self.nodetagscache = None
766 self.nodetagscache = None
767 self._branchcache = None # in UTF-8
767 self._branchcache = None # in UTF-8
768 self._branchcachetip = None
768 self._branchcachetip = None
769
769
770 def invalidate(self):
770 def invalidate(self):
771 for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
771 for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
772 if a in self.__dict__:
772 if a in self.__dict__:
773 delattr(self, a)
773 delattr(self, a)
774 self.invalidatecaches()
774 self.invalidatecaches()
775
775
776 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
776 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
777 try:
777 try:
778 l = lock.lock(lockname, 0, releasefn, desc=desc)
778 l = lock.lock(lockname, 0, releasefn, desc=desc)
779 except error.LockHeld, inst:
779 except error.LockHeld, inst:
780 if not wait:
780 if not wait:
781 raise
781 raise
782 self.ui.warn(_("waiting for lock on %s held by %r\n") %
782 self.ui.warn(_("waiting for lock on %s held by %r\n") %
783 (desc, inst.locker))
783 (desc, inst.locker))
784 # default to 600 seconds timeout
784 # default to 600 seconds timeout
785 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
785 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
786 releasefn, desc=desc)
786 releasefn, desc=desc)
787 if acquirefn:
787 if acquirefn:
788 acquirefn()
788 acquirefn()
789 return l
789 return l
790
790
791 def lock(self, wait=True):
791 def lock(self, wait=True):
792 '''Lock the repository store (.hg/store) and return a weak reference
792 '''Lock the repository store (.hg/store) and return a weak reference
793 to the lock. Use this before modifying the store (e.g. committing or
793 to the lock. Use this before modifying the store (e.g. committing or
794 stripping). If you are opening a transaction, get a lock as well.)'''
794 stripping). If you are opening a transaction, get a lock as well.)'''
795 l = self._lockref and self._lockref()
795 l = self._lockref and self._lockref()
796 if l is not None and l.held:
796 if l is not None and l.held:
797 l.lock()
797 l.lock()
798 return l
798 return l
799
799
800 l = self._lock(self.sjoin("lock"), wait, self.store.write,
800 l = self._lock(self.sjoin("lock"), wait, self.store.write,
801 self.invalidate, _('repository %s') % self.origroot)
801 self.invalidate, _('repository %s') % self.origroot)
802 self._lockref = weakref.ref(l)
802 self._lockref = weakref.ref(l)
803 return l
803 return l
804
804
805 def wlock(self, wait=True):
805 def wlock(self, wait=True):
806 '''Lock the non-store parts of the repository (everything under
806 '''Lock the non-store parts of the repository (everything under
807 .hg except .hg/store) and return a weak reference to the lock.
807 .hg except .hg/store) and return a weak reference to the lock.
808 Use this before modifying files in .hg.'''
808 Use this before modifying files in .hg.'''
809 l = self._wlockref and self._wlockref()
809 l = self._wlockref and self._wlockref()
810 if l is not None and l.held:
810 if l is not None and l.held:
811 l.lock()
811 l.lock()
812 return l
812 return l
813
813
814 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
814 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
815 self.dirstate.invalidate, _('working directory of %s') %
815 self.dirstate.invalidate, _('working directory of %s') %
816 self.origroot)
816 self.origroot)
817 self._wlockref = weakref.ref(l)
817 self._wlockref = weakref.ref(l)
818 return l
818 return l
819
819
820 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
820 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
821 """
821 """
822 commit an individual file as part of a larger transaction
822 commit an individual file as part of a larger transaction
823 """
823 """
824
824
825 fname = fctx.path()
825 fname = fctx.path()
826 text = fctx.data()
826 text = fctx.data()
827 flog = self.file(fname)
827 flog = self.file(fname)
828 fparent1 = manifest1.get(fname, nullid)
828 fparent1 = manifest1.get(fname, nullid)
829 fparent2 = fparent2o = manifest2.get(fname, nullid)
829 fparent2 = fparent2o = manifest2.get(fname, nullid)
830
830
831 meta = {}
831 meta = {}
832 copy = fctx.renamed()
832 copy = fctx.renamed()
833 if copy and copy[0] != fname:
833 if copy and copy[0] != fname:
834 # Mark the new revision of this file as a copy of another
834 # Mark the new revision of this file as a copy of another
835 # file. This copy data will effectively act as a parent
835 # file. This copy data will effectively act as a parent
836 # of this new revision. If this is a merge, the first
836 # of this new revision. If this is a merge, the first
837 # parent will be the nullid (meaning "look up the copy data")
837 # parent will be the nullid (meaning "look up the copy data")
838 # and the second one will be the other parent. For example:
838 # and the second one will be the other parent. For example:
839 #
839 #
840 # 0 --- 1 --- 3 rev1 changes file foo
840 # 0 --- 1 --- 3 rev1 changes file foo
841 # \ / rev2 renames foo to bar and changes it
841 # \ / rev2 renames foo to bar and changes it
842 # \- 2 -/ rev3 should have bar with all changes and
842 # \- 2 -/ rev3 should have bar with all changes and
843 # should record that bar descends from
843 # should record that bar descends from
844 # bar in rev2 and foo in rev1
844 # bar in rev2 and foo in rev1
845 #
845 #
846 # this allows this merge to succeed:
846 # this allows this merge to succeed:
847 #
847 #
848 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
848 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
849 # \ / merging rev3 and rev4 should use bar@rev2
849 # \ / merging rev3 and rev4 should use bar@rev2
850 # \- 2 --- 4 as the merge base
850 # \- 2 --- 4 as the merge base
851 #
851 #
852
852
853 cfname = copy[0]
853 cfname = copy[0]
854 crev = manifest1.get(cfname)
854 crev = manifest1.get(cfname)
855 newfparent = fparent2
855 newfparent = fparent2
856
856
857 if manifest2: # branch merge
857 if manifest2: # branch merge
858 if fparent2 == nullid or crev is None: # copied on remote side
858 if fparent2 == nullid or crev is None: # copied on remote side
859 if cfname in manifest2:
859 if cfname in manifest2:
860 crev = manifest2[cfname]
860 crev = manifest2[cfname]
861 newfparent = fparent1
861 newfparent = fparent1
862
862
863 # find source in nearest ancestor if we've lost track
863 # find source in nearest ancestor if we've lost track
864 if not crev:
864 if not crev:
865 self.ui.debug(" %s: searching for copy revision for %s\n" %
865 self.ui.debug(" %s: searching for copy revision for %s\n" %
866 (fname, cfname))
866 (fname, cfname))
867 for ancestor in self[None].ancestors():
867 for ancestor in self[None].ancestors():
868 if cfname in ancestor:
868 if cfname in ancestor:
869 crev = ancestor[cfname].filenode()
869 crev = ancestor[cfname].filenode()
870 break
870 break
871
871
872 if crev:
872 if crev:
873 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
873 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
874 meta["copy"] = cfname
874 meta["copy"] = cfname
875 meta["copyrev"] = hex(crev)
875 meta["copyrev"] = hex(crev)
876 fparent1, fparent2 = nullid, newfparent
876 fparent1, fparent2 = nullid, newfparent
877 else:
877 else:
878 self.ui.warn(_("warning: can't find ancestor for '%s' "
878 self.ui.warn(_("warning: can't find ancestor for '%s' "
879 "copied from '%s'!\n") % (fname, cfname))
879 "copied from '%s'!\n") % (fname, cfname))
880
880
881 elif fparent2 != nullid:
881 elif fparent2 != nullid:
882 # is one parent an ancestor of the other?
882 # is one parent an ancestor of the other?
883 fparentancestor = flog.ancestor(fparent1, fparent2)
883 fparentancestor = flog.ancestor(fparent1, fparent2)
884 if fparentancestor == fparent1:
884 if fparentancestor == fparent1:
885 fparent1, fparent2 = fparent2, nullid
885 fparent1, fparent2 = fparent2, nullid
886 elif fparentancestor == fparent2:
886 elif fparentancestor == fparent2:
887 fparent2 = nullid
887 fparent2 = nullid
888
888
889 # is the file changed?
889 # is the file changed?
890 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
890 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
891 changelist.append(fname)
891 changelist.append(fname)
892 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
892 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
893
893
894 # are just the flags changed during merge?
894 # are just the flags changed during merge?
895 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
895 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
896 changelist.append(fname)
896 changelist.append(fname)
897
897
898 return fparent1
898 return fparent1
899
899
900 def commit(self, text="", user=None, date=None, match=None, force=False,
900 def commit(self, text="", user=None, date=None, match=None, force=False,
901 editor=False, extra={}):
901 editor=False, extra={}):
902 """Add a new revision to current repository.
902 """Add a new revision to current repository.
903
903
904 Revision information is gathered from the working directory,
904 Revision information is gathered from the working directory,
905 match can be used to filter the committed files. If editor is
905 match can be used to filter the committed files. If editor is
906 supplied, it is called to get a commit message.
906 supplied, it is called to get a commit message.
907 """
907 """
908
908
909 def fail(f, msg):
909 def fail(f, msg):
910 raise util.Abort('%s: %s' % (f, msg))
910 raise util.Abort('%s: %s' % (f, msg))
911
911
912 if not match:
912 if not match:
913 match = matchmod.always(self.root, '')
913 match = matchmod.always(self.root, '')
914
914
915 if not force:
915 if not force:
916 vdirs = []
916 vdirs = []
917 match.dir = vdirs.append
917 match.dir = vdirs.append
918 match.bad = fail
918 match.bad = fail
919
919
920 wlock = self.wlock()
920 wlock = self.wlock()
921 try:
921 try:
922 wctx = self[None]
922 wctx = self[None]
923 merge = len(wctx.parents()) > 1
923 merge = len(wctx.parents()) > 1
924
924
925 if (not force and merge and match and
925 if (not force and merge and match and
926 (match.files() or match.anypats())):
926 (match.files() or match.anypats())):
927 raise util.Abort(_('cannot partially commit a merge '
927 raise util.Abort(_('cannot partially commit a merge '
928 '(do not specify files or patterns)'))
928 '(do not specify files or patterns)'))
929
929
930 changes = self.status(match=match, clean=force)
930 changes = self.status(match=match, clean=force)
931 if force:
931 if force:
932 changes[0].extend(changes[6]) # mq may commit unchanged files
932 changes[0].extend(changes[6]) # mq may commit unchanged files
933
933
934 # check subrepos
934 # check subrepos
935 subs = []
935 subs = []
936 removedsubs = set()
936 removedsubs = set()
937 for p in wctx.parents():
937 for p in wctx.parents():
938 removedsubs.update(s for s in p.substate if match(s))
938 removedsubs.update(s for s in p.substate if match(s))
939 for s in wctx.substate:
939 for s in wctx.substate:
940 removedsubs.discard(s)
940 removedsubs.discard(s)
941 if match(s) and wctx.sub(s).dirty():
941 if match(s) and wctx.sub(s).dirty():
942 subs.append(s)
942 subs.append(s)
943 if (subs or removedsubs):
943 if (subs or removedsubs):
944 if (not match('.hgsub') and
944 if (not match('.hgsub') and
945 '.hgsub' in (wctx.modified() + wctx.added())):
945 '.hgsub' in (wctx.modified() + wctx.added())):
946 raise util.Abort(_("can't commit subrepos without .hgsub"))
946 raise util.Abort(_("can't commit subrepos without .hgsub"))
947 if '.hgsubstate' not in changes[0]:
947 if '.hgsubstate' not in changes[0]:
948 changes[0].insert(0, '.hgsubstate')
948 changes[0].insert(0, '.hgsubstate')
949
949
950 if subs and not self.ui.configbool('ui', 'commitsubrepos', True):
950 if subs and not self.ui.configbool('ui', 'commitsubrepos', True):
951 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
951 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
952 if changedsubs:
952 if changedsubs:
953 raise util.Abort(_("uncommitted changes in subrepo %s")
953 raise util.Abort(_("uncommitted changes in subrepo %s")
954 % changedsubs[0])
954 % changedsubs[0])
955
955
956 # make sure all explicit patterns are matched
956 # make sure all explicit patterns are matched
957 if not force and match.files():
957 if not force and match.files():
958 matched = set(changes[0] + changes[1] + changes[2])
958 matched = set(changes[0] + changes[1] + changes[2])
959
959
960 for f in match.files():
960 for f in match.files():
961 if f == '.' or f in matched or f in wctx.substate:
961 if f == '.' or f in matched or f in wctx.substate:
962 continue
962 continue
963 if f in changes[3]: # missing
963 if f in changes[3]: # missing
964 fail(f, _('file not found!'))
964 fail(f, _('file not found!'))
965 if f in vdirs: # visited directory
965 if f in vdirs: # visited directory
966 d = f + '/'
966 d = f + '/'
967 for mf in matched:
967 for mf in matched:
968 if mf.startswith(d):
968 if mf.startswith(d):
969 break
969 break
970 else:
970 else:
971 fail(f, _("no match under directory!"))
971 fail(f, _("no match under directory!"))
972 elif f not in self.dirstate:
972 elif f not in self.dirstate:
973 fail(f, _("file not tracked!"))
973 fail(f, _("file not tracked!"))
974
974
975 if (not force and not extra.get("close") and not merge
975 if (not force and not extra.get("close") and not merge
976 and not (changes[0] or changes[1] or changes[2])
976 and not (changes[0] or changes[1] or changes[2])
977 and wctx.branch() == wctx.p1().branch()):
977 and wctx.branch() == wctx.p1().branch()):
978 return None
978 return None
979
979
980 ms = mergemod.mergestate(self)
980 ms = mergemod.mergestate(self)
981 for f in changes[0]:
981 for f in changes[0]:
982 if f in ms and ms[f] == 'u':
982 if f in ms and ms[f] == 'u':
983 raise util.Abort(_("unresolved merge conflicts "
983 raise util.Abort(_("unresolved merge conflicts "
984 "(see hg help resolve)"))
984 "(see hg help resolve)"))
985
985
986 cctx = context.workingctx(self, text, user, date, extra, changes)
986 cctx = context.workingctx(self, text, user, date, extra, changes)
987 if editor:
987 if editor:
988 cctx._text = editor(self, cctx, subs)
988 cctx._text = editor(self, cctx, subs)
989 edited = (text != cctx._text)
989 edited = (text != cctx._text)
990
990
991 # commit subs
991 # commit subs
992 if subs or removedsubs:
992 if subs or removedsubs:
993 state = wctx.substate.copy()
993 state = wctx.substate.copy()
994 for s in sorted(subs):
994 for s in sorted(subs):
995 sub = wctx.sub(s)
995 sub = wctx.sub(s)
996 self.ui.status(_('committing subrepository %s\n') %
996 self.ui.status(_('committing subrepository %s\n') %
997 subrepo.subrelpath(sub))
997 subrepo.subrelpath(sub))
998 sr = sub.commit(cctx._text, user, date)
998 sr = sub.commit(cctx._text, user, date)
999 state[s] = (state[s][0], sr)
999 state[s] = (state[s][0], sr)
1000 subrepo.writestate(self, state)
1000 subrepo.writestate(self, state)
1001
1001
1002 # Save commit message in case this transaction gets rolled back
1002 # Save commit message in case this transaction gets rolled back
1003 # (e.g. by a pretxncommit hook). Leave the content alone on
1003 # (e.g. by a pretxncommit hook). Leave the content alone on
1004 # the assumption that the user will use the same editor again.
1004 # the assumption that the user will use the same editor again.
1005 msgfile = self.opener('last-message.txt', 'wb')
1005 msgfile = self.opener('last-message.txt', 'wb')
1006 msgfile.write(cctx._text)
1006 msgfile.write(cctx._text)
1007 msgfile.close()
1007 msgfile.close()
1008
1008
1009 p1, p2 = self.dirstate.parents()
1009 p1, p2 = self.dirstate.parents()
1010 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1010 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1011 try:
1011 try:
1012 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1012 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1013 ret = self.commitctx(cctx, True)
1013 ret = self.commitctx(cctx, True)
1014 except:
1014 except:
1015 if edited:
1015 if edited:
1016 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
1016 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
1017 self.ui.write(
1017 self.ui.write(
1018 _('note: commit message saved in %s\n') % msgfn)
1018 _('note: commit message saved in %s\n') % msgfn)
1019 raise
1019 raise
1020
1020
1021 # update bookmarks, dirstate and mergestate
1021 # update bookmarks, dirstate and mergestate
1022 parents = (p1, p2)
1022 parents = (p1, p2)
1023 if p2 == nullid:
1023 if p2 == nullid:
1024 parents = (p1,)
1024 parents = (p1,)
1025 bookmarks.update(self, parents, ret)
1025 bookmarks.update(self, parents, ret)
1026 for f in changes[0] + changes[1]:
1026 for f in changes[0] + changes[1]:
1027 self.dirstate.normal(f)
1027 self.dirstate.normal(f)
1028 for f in changes[2]:
1028 for f in changes[2]:
1029 self.dirstate.forget(f)
1029 self.dirstate.forget(f)
1030 self.dirstate.setparents(ret)
1030 self.dirstate.setparents(ret)
1031 ms.reset()
1031 ms.reset()
1032 finally:
1032 finally:
1033 wlock.release()
1033 wlock.release()
1034
1034
1035 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1035 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1036 return ret
1036 return ret
1037
1037
1038 def commitctx(self, ctx, error=False):
1038 def commitctx(self, ctx, error=False):
1039 """Add a new revision to current repository.
1039 """Add a new revision to current repository.
1040 Revision information is passed via the context argument.
1040 Revision information is passed via the context argument.
1041 """
1041 """
1042
1042
1043 tr = lock = None
1043 tr = lock = None
1044 removed = list(ctx.removed())
1044 removed = list(ctx.removed())
1045 p1, p2 = ctx.p1(), ctx.p2()
1045 p1, p2 = ctx.p1(), ctx.p2()
1046 m1 = p1.manifest().copy()
1046 m1 = p1.manifest().copy()
1047 m2 = p2.manifest()
1047 m2 = p2.manifest()
1048 user = ctx.user()
1048 user = ctx.user()
1049
1049
1050 lock = self.lock()
1050 lock = self.lock()
1051 try:
1051 try:
1052 tr = self.transaction("commit")
1052 tr = self.transaction("commit")
1053 trp = weakref.proxy(tr)
1053 trp = weakref.proxy(tr)
1054
1054
1055 # check in files
1055 # check in files
1056 new = {}
1056 new = {}
1057 changed = []
1057 changed = []
1058 linkrev = len(self)
1058 linkrev = len(self)
1059 for f in sorted(ctx.modified() + ctx.added()):
1059 for f in sorted(ctx.modified() + ctx.added()):
1060 self.ui.note(f + "\n")
1060 self.ui.note(f + "\n")
1061 try:
1061 try:
1062 fctx = ctx[f]
1062 fctx = ctx[f]
1063 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1063 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1064 changed)
1064 changed)
1065 m1.set(f, fctx.flags())
1065 m1.set(f, fctx.flags())
1066 except OSError, inst:
1066 except OSError, inst:
1067 self.ui.warn(_("trouble committing %s!\n") % f)
1067 self.ui.warn(_("trouble committing %s!\n") % f)
1068 raise
1068 raise
1069 except IOError, inst:
1069 except IOError, inst:
1070 errcode = getattr(inst, 'errno', errno.ENOENT)
1070 errcode = getattr(inst, 'errno', errno.ENOENT)
1071 if error or errcode and errcode != errno.ENOENT:
1071 if error or errcode and errcode != errno.ENOENT:
1072 self.ui.warn(_("trouble committing %s!\n") % f)
1072 self.ui.warn(_("trouble committing %s!\n") % f)
1073 raise
1073 raise
1074 else:
1074 else:
1075 removed.append(f)
1075 removed.append(f)
1076
1076
1077 # update manifest
1077 # update manifest
1078 m1.update(new)
1078 m1.update(new)
1079 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1079 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1080 drop = [f for f in removed if f in m1]
1080 drop = [f for f in removed if f in m1]
1081 for f in drop:
1081 for f in drop:
1082 del m1[f]
1082 del m1[f]
1083 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1083 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1084 p2.manifestnode(), (new, drop))
1084 p2.manifestnode(), (new, drop))
1085
1085
1086 # update changelog
1086 # update changelog
1087 self.changelog.delayupdate()
1087 self.changelog.delayupdate()
1088 n = self.changelog.add(mn, changed + removed, ctx.description(),
1088 n = self.changelog.add(mn, changed + removed, ctx.description(),
1089 trp, p1.node(), p2.node(),
1089 trp, p1.node(), p2.node(),
1090 user, ctx.date(), ctx.extra().copy())
1090 user, ctx.date(), ctx.extra().copy())
1091 p = lambda: self.changelog.writepending() and self.root or ""
1091 p = lambda: self.changelog.writepending() and self.root or ""
1092 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1092 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1093 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1093 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1094 parent2=xp2, pending=p)
1094 parent2=xp2, pending=p)
1095 self.changelog.finalize(trp)
1095 self.changelog.finalize(trp)
1096 tr.close()
1096 tr.close()
1097
1097
1098 if self._branchcache:
1098 if self._branchcache:
1099 self.updatebranchcache()
1099 self.updatebranchcache()
1100 return n
1100 return n
1101 finally:
1101 finally:
1102 if tr:
1102 if tr:
1103 tr.release()
1103 tr.release()
1104 lock.release()
1104 lock.release()
1105
1105
1106 def destroyed(self):
1106 def destroyed(self):
1107 '''Inform the repository that nodes have been destroyed.
1107 '''Inform the repository that nodes have been destroyed.
1108 Intended for use by strip and rollback, so there's a common
1108 Intended for use by strip and rollback, so there's a common
1109 place for anything that has to be done after destroying history.'''
1109 place for anything that has to be done after destroying history.'''
1110 # XXX it might be nice if we could take the list of destroyed
1110 # XXX it might be nice if we could take the list of destroyed
1111 # nodes, but I don't see an easy way for rollback() to do that
1111 # nodes, but I don't see an easy way for rollback() to do that
1112
1112
1113 # Ensure the persistent tag cache is updated. Doing it now
1113 # Ensure the persistent tag cache is updated. Doing it now
1114 # means that the tag cache only has to worry about destroyed
1114 # means that the tag cache only has to worry about destroyed
1115 # heads immediately after a strip/rollback. That in turn
1115 # heads immediately after a strip/rollback. That in turn
1116 # guarantees that "cachetip == currenttip" (comparing both rev
1116 # guarantees that "cachetip == currenttip" (comparing both rev
1117 # and node) always means no nodes have been added or destroyed.
1117 # and node) always means no nodes have been added or destroyed.
1118
1118
1119 # XXX this is suboptimal when qrefresh'ing: we strip the current
1119 # XXX this is suboptimal when qrefresh'ing: we strip the current
1120 # head, refresh the tag cache, then immediately add a new head.
1120 # head, refresh the tag cache, then immediately add a new head.
1121 # But I think doing it this way is necessary for the "instant
1121 # But I think doing it this way is necessary for the "instant
1122 # tag cache retrieval" case to work.
1122 # tag cache retrieval" case to work.
1123 self.invalidatecaches()
1123 self.invalidatecaches()
1124
1124
1125 def walk(self, match, node=None):
1125 def walk(self, match, node=None):
1126 '''
1126 '''
1127 walk recursively through the directory tree or a given
1127 walk recursively through the directory tree or a given
1128 changeset, finding all files matched by the match
1128 changeset, finding all files matched by the match
1129 function
1129 function
1130 '''
1130 '''
1131 return self[node].walk(match)
1131 return self[node].walk(match)
1132
1132
1133 def status(self, node1='.', node2=None, match=None,
1133 def status(self, node1='.', node2=None, match=None,
1134 ignored=False, clean=False, unknown=False,
1134 ignored=False, clean=False, unknown=False,
1135 listsubrepos=False):
1135 listsubrepos=False):
1136 """return status of files between two nodes or node and working directory
1136 """return status of files between two nodes or node and working directory
1137
1137
1138 If node1 is None, use the first dirstate parent instead.
1138 If node1 is None, use the first dirstate parent instead.
1139 If node2 is None, compare node1 with working directory.
1139 If node2 is None, compare node1 with working directory.
1140 """
1140 """
1141
1141
1142 def mfmatches(ctx):
1142 def mfmatches(ctx):
1143 mf = ctx.manifest().copy()
1143 mf = ctx.manifest().copy()
1144 for fn in mf.keys():
1144 for fn in mf.keys():
1145 if not match(fn):
1145 if not match(fn):
1146 del mf[fn]
1146 del mf[fn]
1147 return mf
1147 return mf
1148
1148
1149 if isinstance(node1, context.changectx):
1149 if isinstance(node1, context.changectx):
1150 ctx1 = node1
1150 ctx1 = node1
1151 else:
1151 else:
1152 ctx1 = self[node1]
1152 ctx1 = self[node1]
1153 if isinstance(node2, context.changectx):
1153 if isinstance(node2, context.changectx):
1154 ctx2 = node2
1154 ctx2 = node2
1155 else:
1155 else:
1156 ctx2 = self[node2]
1156 ctx2 = self[node2]
1157
1157
1158 working = ctx2.rev() is None
1158 working = ctx2.rev() is None
1159 parentworking = working and ctx1 == self['.']
1159 parentworking = working and ctx1 == self['.']
1160 match = match or matchmod.always(self.root, self.getcwd())
1160 match = match or matchmod.always(self.root, self.getcwd())
1161 listignored, listclean, listunknown = ignored, clean, unknown
1161 listignored, listclean, listunknown = ignored, clean, unknown
1162
1162
1163 # load earliest manifest first for caching reasons
1163 # load earliest manifest first for caching reasons
1164 if not working and ctx2.rev() < ctx1.rev():
1164 if not working and ctx2.rev() < ctx1.rev():
1165 ctx2.manifest()
1165 ctx2.manifest()
1166
1166
1167 if not parentworking:
1167 if not parentworking:
1168 def bad(f, msg):
1168 def bad(f, msg):
1169 if f not in ctx1:
1169 if f not in ctx1:
1170 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1170 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1171 match.bad = bad
1171 match.bad = bad
1172
1172
1173 if working: # we need to scan the working dir
1173 if working: # we need to scan the working dir
1174 subrepos = []
1174 subrepos = []
1175 if '.hgsub' in self.dirstate:
1175 if '.hgsub' in self.dirstate:
1176 subrepos = ctx1.substate.keys()
1176 subrepos = ctx1.substate.keys()
1177 s = self.dirstate.status(match, subrepos, listignored,
1177 s = self.dirstate.status(match, subrepos, listignored,
1178 listclean, listunknown)
1178 listclean, listunknown)
1179 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1179 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1180
1180
1181 # check for any possibly clean files
1181 # check for any possibly clean files
1182 if parentworking and cmp:
1182 if parentworking and cmp:
1183 fixup = []
1183 fixup = []
1184 # do a full compare of any files that might have changed
1184 # do a full compare of any files that might have changed
1185 for f in sorted(cmp):
1185 for f in sorted(cmp):
1186 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1186 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1187 or ctx1[f].cmp(ctx2[f])):
1187 or ctx1[f].cmp(ctx2[f])):
1188 modified.append(f)
1188 modified.append(f)
1189 else:
1189 else:
1190 fixup.append(f)
1190 fixup.append(f)
1191
1191
1192 # update dirstate for files that are actually clean
1192 # update dirstate for files that are actually clean
1193 if fixup:
1193 if fixup:
1194 if listclean:
1194 if listclean:
1195 clean += fixup
1195 clean += fixup
1196
1196
1197 try:
1197 try:
1198 # updating the dirstate is optional
1198 # updating the dirstate is optional
1199 # so we don't wait on the lock
1199 # so we don't wait on the lock
1200 wlock = self.wlock(False)
1200 wlock = self.wlock(False)
1201 try:
1201 try:
1202 for f in fixup:
1202 for f in fixup:
1203 self.dirstate.normal(f)
1203 self.dirstate.normal(f)
1204 finally:
1204 finally:
1205 wlock.release()
1205 wlock.release()
1206 except error.LockError:
1206 except error.LockError:
1207 pass
1207 pass
1208
1208
1209 if not parentworking:
1209 if not parentworking:
1210 mf1 = mfmatches(ctx1)
1210 mf1 = mfmatches(ctx1)
1211 if working:
1211 if working:
1212 # we are comparing working dir against non-parent
1212 # we are comparing working dir against non-parent
1213 # generate a pseudo-manifest for the working dir
1213 # generate a pseudo-manifest for the working dir
1214 mf2 = mfmatches(self['.'])
1214 mf2 = mfmatches(self['.'])
1215 for f in cmp + modified + added:
1215 for f in cmp + modified + added:
1216 mf2[f] = None
1216 mf2[f] = None
1217 mf2.set(f, ctx2.flags(f))
1217 mf2.set(f, ctx2.flags(f))
1218 for f in removed:
1218 for f in removed:
1219 if f in mf2:
1219 if f in mf2:
1220 del mf2[f]
1220 del mf2[f]
1221 else:
1221 else:
1222 # we are comparing two revisions
1222 # we are comparing two revisions
1223 deleted, unknown, ignored = [], [], []
1223 deleted, unknown, ignored = [], [], []
1224 mf2 = mfmatches(ctx2)
1224 mf2 = mfmatches(ctx2)
1225
1225
1226 modified, added, clean = [], [], []
1226 modified, added, clean = [], [], []
1227 for fn in mf2:
1227 for fn in mf2:
1228 if fn in mf1:
1228 if fn in mf1:
1229 if (mf1.flags(fn) != mf2.flags(fn) or
1229 if (mf1.flags(fn) != mf2.flags(fn) or
1230 (mf1[fn] != mf2[fn] and
1230 (mf1[fn] != mf2[fn] and
1231 (mf2[fn] or ctx1[fn].cmp(ctx2[fn])))):
1231 (mf2[fn] or ctx1[fn].cmp(ctx2[fn])))):
1232 modified.append(fn)
1232 modified.append(fn)
1233 elif listclean:
1233 elif listclean:
1234 clean.append(fn)
1234 clean.append(fn)
1235 del mf1[fn]
1235 del mf1[fn]
1236 else:
1236 else:
1237 added.append(fn)
1237 added.append(fn)
1238 removed = mf1.keys()
1238 removed = mf1.keys()
1239
1239
1240 r = modified, added, removed, deleted, unknown, ignored, clean
1240 r = modified, added, removed, deleted, unknown, ignored, clean
1241
1241
1242 if listsubrepos:
1242 if listsubrepos:
1243 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1243 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1244 if working:
1244 if working:
1245 rev2 = None
1245 rev2 = None
1246 else:
1246 else:
1247 rev2 = ctx2.substate[subpath][1]
1247 rev2 = ctx2.substate[subpath][1]
1248 try:
1248 try:
1249 submatch = matchmod.narrowmatcher(subpath, match)
1249 submatch = matchmod.narrowmatcher(subpath, match)
1250 s = sub.status(rev2, match=submatch, ignored=listignored,
1250 s = sub.status(rev2, match=submatch, ignored=listignored,
1251 clean=listclean, unknown=listunknown,
1251 clean=listclean, unknown=listunknown,
1252 listsubrepos=True)
1252 listsubrepos=True)
1253 for rfiles, sfiles in zip(r, s):
1253 for rfiles, sfiles in zip(r, s):
1254 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1254 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1255 except error.LookupError:
1255 except error.LookupError:
1256 self.ui.status(_("skipping missing subrepository: %s\n")
1256 self.ui.status(_("skipping missing subrepository: %s\n")
1257 % subpath)
1257 % subpath)
1258
1258
1259 for l in r:
1259 for l in r:
1260 l.sort()
1260 l.sort()
1261 return r
1261 return r
1262
1262
1263 def heads(self, start=None):
1263 def heads(self, start=None):
1264 heads = self.changelog.heads(start)
1264 heads = self.changelog.heads(start)
1265 # sort the output in rev descending order
1265 # sort the output in rev descending order
1266 return sorted(heads, key=self.changelog.rev, reverse=True)
1266 return sorted(heads, key=self.changelog.rev, reverse=True)
1267
1267
1268 def branchheads(self, branch=None, start=None, closed=False):
1268 def branchheads(self, branch=None, start=None, closed=False):
1269 '''return a (possibly filtered) list of heads for the given branch
1269 '''return a (possibly filtered) list of heads for the given branch
1270
1270
1271 Heads are returned in topological order, from newest to oldest.
1271 Heads are returned in topological order, from newest to oldest.
1272 If branch is None, use the dirstate branch.
1272 If branch is None, use the dirstate branch.
1273 If start is not None, return only heads reachable from start.
1273 If start is not None, return only heads reachable from start.
1274 If closed is True, return heads that are marked as closed as well.
1274 If closed is True, return heads that are marked as closed as well.
1275 '''
1275 '''
1276 if branch is None:
1276 if branch is None:
1277 branch = self[None].branch()
1277 branch = self[None].branch()
1278 branches = self.branchmap()
1278 branches = self.branchmap()
1279 if branch not in branches:
1279 if branch not in branches:
1280 return []
1280 return []
1281 # the cache returns heads ordered lowest to highest
1281 # the cache returns heads ordered lowest to highest
1282 bheads = list(reversed(branches[branch]))
1282 bheads = list(reversed(branches[branch]))
1283 if start is not None:
1283 if start is not None:
1284 # filter out the heads that cannot be reached from startrev
1284 # filter out the heads that cannot be reached from startrev
1285 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1285 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1286 bheads = [h for h in bheads if h in fbheads]
1286 bheads = [h for h in bheads if h in fbheads]
1287 if not closed:
1287 if not closed:
1288 bheads = [h for h in bheads if
1288 bheads = [h for h in bheads if
1289 ('close' not in self.changelog.read(h)[5])]
1289 ('close' not in self.changelog.read(h)[5])]
1290 return bheads
1290 return bheads
1291
1291
1292 def branches(self, nodes):
1292 def branches(self, nodes):
1293 if not nodes:
1293 if not nodes:
1294 nodes = [self.changelog.tip()]
1294 nodes = [self.changelog.tip()]
1295 b = []
1295 b = []
1296 for n in nodes:
1296 for n in nodes:
1297 t = n
1297 t = n
1298 while 1:
1298 while 1:
1299 p = self.changelog.parents(n)
1299 p = self.changelog.parents(n)
1300 if p[1] != nullid or p[0] == nullid:
1300 if p[1] != nullid or p[0] == nullid:
1301 b.append((t, n, p[0], p[1]))
1301 b.append((t, n, p[0], p[1]))
1302 break
1302 break
1303 n = p[0]
1303 n = p[0]
1304 return b
1304 return b
1305
1305
1306 def between(self, pairs):
1306 def between(self, pairs):
1307 r = []
1307 r = []
1308
1308
1309 for top, bottom in pairs:
1309 for top, bottom in pairs:
1310 n, l, i = top, [], 0
1310 n, l, i = top, [], 0
1311 f = 1
1311 f = 1
1312
1312
1313 while n != bottom and n != nullid:
1313 while n != bottom and n != nullid:
1314 p = self.changelog.parents(n)[0]
1314 p = self.changelog.parents(n)[0]
1315 if i == f:
1315 if i == f:
1316 l.append(n)
1316 l.append(n)
1317 f = f * 2
1317 f = f * 2
1318 n = p
1318 n = p
1319 i += 1
1319 i += 1
1320
1320
1321 r.append(l)
1321 r.append(l)
1322
1322
1323 return r
1323 return r
1324
1324
1325 def pull(self, remote, heads=None, force=False):
1325 def pull(self, remote, heads=None, force=False):
1326 lock = self.lock()
1326 lock = self.lock()
1327 try:
1327 try:
1328 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1328 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1329 force=force)
1329 force=force)
1330 common, fetch, rheads = tmp
1330 common, fetch, rheads = tmp
1331 if not fetch:
1331 if not fetch:
1332 self.ui.status(_("no changes found\n"))
1332 self.ui.status(_("no changes found\n"))
1333 result = 0
1333 result = 0
1334 else:
1334 else:
1335 if heads is None and fetch == [nullid]:
1335 if heads is None and fetch == [nullid]:
1336 self.ui.status(_("requesting all changes\n"))
1336 self.ui.status(_("requesting all changes\n"))
1337 elif heads is None and remote.capable('changegroupsubset'):
1337 elif heads is None and remote.capable('changegroupsubset'):
1338 # issue1320, avoid a race if remote changed after discovery
1338 # issue1320, avoid a race if remote changed after discovery
1339 heads = rheads
1339 heads = rheads
1340
1340
1341 if heads is None:
1341 if heads is None:
1342 cg = remote.changegroup(fetch, 'pull')
1342 cg = remote.changegroup(fetch, 'pull')
1343 elif not remote.capable('changegroupsubset'):
1343 elif not remote.capable('changegroupsubset'):
1344 raise util.Abort(_("partial pull cannot be done because "
1344 raise util.Abort(_("partial pull cannot be done because "
1345 "other repository doesn't support "
1345 "other repository doesn't support "
1346 "changegroupsubset."))
1346 "changegroupsubset."))
1347 else:
1347 else:
1348 cg = remote.changegroupsubset(fetch, heads, 'pull')
1348 cg = remote.changegroupsubset(fetch, heads, 'pull')
1349 result = self.addchangegroup(cg, 'pull', remote.url(),
1349 result = self.addchangegroup(cg, 'pull', remote.url(),
1350 lock=lock)
1350 lock=lock)
1351 finally:
1351 finally:
1352 lock.release()
1352 lock.release()
1353
1353
1354 return result
1354 return result
1355
1355
1356 def checkpush(self, force, revs):
1356 def checkpush(self, force, revs):
1357 """Extensions can override this function if additional checks have
1357 """Extensions can override this function if additional checks have
1358 to be performed before pushing, or call it if they override push
1358 to be performed before pushing, or call it if they override push
1359 command.
1359 command.
1360 """
1360 """
1361 pass
1361 pass
1362
1362
1363 def push(self, remote, force=False, revs=None, newbranch=False):
1363 def push(self, remote, force=False, revs=None, newbranch=False):
1364 '''Push outgoing changesets (limited by revs) from the current
1364 '''Push outgoing changesets (limited by revs) from the current
1365 repository to remote. Return an integer:
1365 repository to remote. Return an integer:
1366 - 0 means HTTP error *or* nothing to push
1366 - 0 means HTTP error *or* nothing to push
1367 - 1 means we pushed and remote head count is unchanged *or*
1367 - 1 means we pushed and remote head count is unchanged *or*
1368 we have outgoing changesets but refused to push
1368 we have outgoing changesets but refused to push
1369 - other values as described by addchangegroup()
1369 - other values as described by addchangegroup()
1370 '''
1370 '''
1371 # there are two ways to push to remote repo:
1371 # there are two ways to push to remote repo:
1372 #
1372 #
1373 # addchangegroup assumes local user can lock remote
1373 # addchangegroup assumes local user can lock remote
1374 # repo (local filesystem, old ssh servers).
1374 # repo (local filesystem, old ssh servers).
1375 #
1375 #
1376 # unbundle assumes local user cannot lock remote repo (new ssh
1376 # unbundle assumes local user cannot lock remote repo (new ssh
1377 # servers, http servers).
1377 # servers, http servers).
1378
1378
1379 self.checkpush(force, revs)
1379 self.checkpush(force, revs)
1380 lock = None
1380 lock = None
1381 unbundle = remote.capable('unbundle')
1381 unbundle = remote.capable('unbundle')
1382 if not unbundle:
1382 if not unbundle:
1383 lock = remote.lock()
1383 lock = remote.lock()
1384 try:
1384 try:
1385 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1385 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1386 newbranch)
1386 newbranch)
1387 ret = remote_heads
1387 ret = remote_heads
1388 if cg is not None:
1388 if cg is not None:
1389 if unbundle:
1389 if unbundle:
1390 # local repo finds heads on server, finds out what
1390 # local repo finds heads on server, finds out what
1391 # revs it must push. once revs transferred, if server
1391 # revs it must push. once revs transferred, if server
1392 # finds it has different heads (someone else won
1392 # finds it has different heads (someone else won
1393 # commit/push race), server aborts.
1393 # commit/push race), server aborts.
1394 if force:
1394 if force:
1395 remote_heads = ['force']
1395 remote_heads = ['force']
1396 # ssh: return remote's addchangegroup()
1396 # ssh: return remote's addchangegroup()
1397 # http: return remote's addchangegroup() or 0 for error
1397 # http: return remote's addchangegroup() or 0 for error
1398 ret = remote.unbundle(cg, remote_heads, 'push')
1398 ret = remote.unbundle(cg, remote_heads, 'push')
1399 else:
1399 else:
1400 # we return an integer indicating remote head count change
1400 # we return an integer indicating remote head count change
1401 ret = remote.addchangegroup(cg, 'push', self.url(),
1401 ret = remote.addchangegroup(cg, 'push', self.url(),
1402 lock=lock)
1402 lock=lock)
1403 finally:
1403 finally:
1404 if lock is not None:
1404 if lock is not None:
1405 lock.release()
1405 lock.release()
1406
1406
1407 self.ui.debug("checking for updated bookmarks\n")
1407 self.ui.debug("checking for updated bookmarks\n")
1408 rb = remote.listkeys('bookmarks')
1408 rb = remote.listkeys('bookmarks')
1409 for k in rb.keys():
1409 for k in rb.keys():
1410 if k in self._bookmarks:
1410 if k in self._bookmarks:
1411 nr, nl = rb[k], hex(self._bookmarks[k])
1411 nr, nl = rb[k], hex(self._bookmarks[k])
1412 if nr in self:
1412 if nr in self:
1413 cr = self[nr]
1413 cr = self[nr]
1414 cl = self[nl]
1414 cl = self[nl]
1415 if cl in cr.descendants():
1415 if cl in cr.descendants():
1416 r = remote.pushkey('bookmarks', k, nr, nl)
1416 r = remote.pushkey('bookmarks', k, nr, nl)
1417 if r:
1417 if r:
1418 self.ui.status(_("updating bookmark %s\n") % k)
1418 self.ui.status(_("updating bookmark %s\n") % k)
1419 else:
1419 else:
1420 self.ui.warn(_('updating bookmark %s'
1420 self.ui.warn(_('updating bookmark %s'
1421 ' failed!\n') % k)
1421 ' failed!\n') % k)
1422
1422
1423 return ret
1423 return ret
1424
1424
1425 def changegroupinfo(self, nodes, source):
1425 def changegroupinfo(self, nodes, source):
1426 if self.ui.verbose or source == 'bundle':
1426 if self.ui.verbose or source == 'bundle':
1427 self.ui.status(_("%d changesets found\n") % len(nodes))
1427 self.ui.status(_("%d changesets found\n") % len(nodes))
1428 if self.ui.debugflag:
1428 if self.ui.debugflag:
1429 self.ui.debug("list of changesets:\n")
1429 self.ui.debug("list of changesets:\n")
1430 for node in nodes:
1430 for node in nodes:
1431 self.ui.debug("%s\n" % hex(node))
1431 self.ui.debug("%s\n" % hex(node))
1432
1432
1433 def changegroupsubset(self, bases, heads, source):
1433 def changegroupsubset(self, bases, heads, source):
1434 """Compute a changegroup consisting of all the nodes that are
1434 """Compute a changegroup consisting of all the nodes that are
1435 descendents of any of the bases and ancestors of any of the heads.
1435 descendents of any of the bases and ancestors of any of the heads.
1436 Return a chunkbuffer object whose read() method will return
1436 Return a chunkbuffer object whose read() method will return
1437 successive changegroup chunks.
1437 successive changegroup chunks.
1438
1438
1439 It is fairly complex as determining which filenodes and which
1439 It is fairly complex as determining which filenodes and which
1440 manifest nodes need to be included for the changeset to be complete
1440 manifest nodes need to be included for the changeset to be complete
1441 is non-trivial.
1441 is non-trivial.
1442
1442
1443 Another wrinkle is doing the reverse, figuring out which changeset in
1443 Another wrinkle is doing the reverse, figuring out which changeset in
1444 the changegroup a particular filenode or manifestnode belongs to.
1444 the changegroup a particular filenode or manifestnode belongs to.
1445 """
1445 """
1446 cl = self.changelog
1447 if not bases:
1448 bases = [nullid]
1449 csets, bases, heads = cl.nodesbetween(bases, heads)
1450 # We assume that all ancestors of bases are known
1451 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1452 return self._changegroupsubset(common, csets, heads, source)
1453
1454 def getbundle(self, source, heads=None, common=None):
1455 """Like changegroupsubset, but returns the set difference between the
1456 ancestors of heads and the ancestors common.
1457
1458 If heads is None, use the local heads. If common is None, use [nullid].
1459
1460 The nodes in common might not all be known locally due to the way the
1461 current discovery protocol works.
1462 """
1463 cl = self.changelog
1464 if common:
1465 nm = cl.nodemap
1466 common = [n for n in common if n in nm]
1467 else:
1468 common = [nullid]
1469 if not heads:
1470 heads = cl.heads()
1471 common, missing = cl.findcommonmissing(common, heads)
1472 return self._changegroupsubset(common, missing, heads, source)
1473
1474 def _changegroupsubset(self, commonrevs, csets, heads, source):
1446
1475
1447 cl = self.changelog
1476 cl = self.changelog
1448 mf = self.manifest
1477 mf = self.manifest
1449 mfs = {} # needed manifests
1478 mfs = {} # needed manifests
1450 fnodes = {} # needed file nodes
1479 fnodes = {} # needed file nodes
1451
1480
1452 if not bases:
1453 bases = [nullid]
1454 csets, bases, heads = cl.nodesbetween(bases, heads)
1455
1456 # can we go through the fast path ?
1481 # can we go through the fast path ?
1457 heads.sort()
1482 heads.sort()
1458 if heads == sorted(self.heads()):
1483 if heads == sorted(self.heads()):
1459 return self._changegroup(csets, source)
1484 return self._changegroup(csets, source)
1460
1485
1461 # slow path
1486 # slow path
1462 self.hook('preoutgoing', throw=True, source=source)
1487 self.hook('preoutgoing', throw=True, source=source)
1463 self.changegroupinfo(csets, source)
1488 self.changegroupinfo(csets, source)
1464
1489
1465 # We assume that all ancestors of bases are known
1466 commonrevs = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1467
1468 # A function generating function that sets up the initial environment
1490 # A function generating function that sets up the initial environment
1469 # the inner function.
1491 # the inner function.
1470 def filenode_collector(changedfiles):
1492 def filenode_collector(changedfiles):
1471 # This gathers information from each manifestnode included in the
1493 # This gathers information from each manifestnode included in the
1472 # changegroup about which filenodes the manifest node references
1494 # changegroup about which filenodes the manifest node references
1473 # so we can include those in the changegroup too.
1495 # so we can include those in the changegroup too.
1474 #
1496 #
1475 # It also remembers which changenode each filenode belongs to. It
1497 # It also remembers which changenode each filenode belongs to. It
1476 # does this by assuming the a filenode belongs to the changenode
1498 # does this by assuming the a filenode belongs to the changenode
1477 # the first manifest that references it belongs to.
1499 # the first manifest that references it belongs to.
1478 def collect(mnode):
1500 def collect(mnode):
1479 r = mf.rev(mnode)
1501 r = mf.rev(mnode)
1480 clnode = mfs[mnode]
1502 clnode = mfs[mnode]
1481 mdata = mf.readfast(mnode)
1503 mdata = mf.readfast(mnode)
1482 for f in changedfiles:
1504 for f in changedfiles:
1483 if f in mdata:
1505 if f in mdata:
1484 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1506 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1485
1507
1486 return collect
1508 return collect
1487
1509
1488 # If we determine that a particular file or manifest node must be a
1510 # If we determine that a particular file or manifest node must be a
1489 # node that the recipient of the changegroup will already have, we can
1511 # node that the recipient of the changegroup will already have, we can
1490 # also assume the recipient will have all the parents. This function
1512 # also assume the recipient will have all the parents. This function
1491 # prunes them from the set of missing nodes.
1513 # prunes them from the set of missing nodes.
1492 def prune(revlog, missingnodes):
1514 def prune(revlog, missingnodes):
1493 # drop any nodes that claim to be part of a cset in commonrevs
1515 # drop any nodes that claim to be part of a cset in commonrevs
1494 drop = set()
1516 drop = set()
1495 for n in missingnodes:
1517 for n in missingnodes:
1496 if revlog.linkrev(revlog.rev(n)) in commonrevs:
1518 if revlog.linkrev(revlog.rev(n)) in commonrevs:
1497 drop.add(n)
1519 drop.add(n)
1498 for n in drop:
1520 for n in drop:
1499 missingnodes.pop(n, None)
1521 missingnodes.pop(n, None)
1500
1522
1501 # Now that we have all theses utility functions to help out and
1523 # Now that we have all theses utility functions to help out and
1502 # logically divide up the task, generate the group.
1524 # logically divide up the task, generate the group.
1503 def gengroup():
1525 def gengroup():
1504 # The set of changed files starts empty.
1526 # The set of changed files starts empty.
1505 changedfiles = set()
1527 changedfiles = set()
1506 collect = changegroup.collector(cl, mfs, changedfiles)
1528 collect = changegroup.collector(cl, mfs, changedfiles)
1507
1529
1508 # Create a changenode group generator that will call our functions
1530 # Create a changenode group generator that will call our functions
1509 # back to lookup the owning changenode and collect information.
1531 # back to lookup the owning changenode and collect information.
1510 group = cl.group(csets, lambda x: x, collect)
1532 group = cl.group(csets, lambda x: x, collect)
1511 for count, chunk in enumerate(group):
1533 for count, chunk in enumerate(group):
1512 yield chunk
1534 yield chunk
1513 # revlog.group yields three entries per node, so
1535 # revlog.group yields three entries per node, so
1514 # dividing by 3 gives an approximation of how many
1536 # dividing by 3 gives an approximation of how many
1515 # nodes have been processed.
1537 # nodes have been processed.
1516 self.ui.progress(_('bundling'), count / 3,
1538 self.ui.progress(_('bundling'), count / 3,
1517 unit=_('changesets'))
1539 unit=_('changesets'))
1518 changecount = count / 3
1540 changecount = count / 3
1519 efiles = len(changedfiles)
1541 efiles = len(changedfiles)
1520 self.ui.progress(_('bundling'), None)
1542 self.ui.progress(_('bundling'), None)
1521
1543
1522 prune(mf, mfs)
1544 prune(mf, mfs)
1523 # Create a generator for the manifestnodes that calls our lookup
1545 # Create a generator for the manifestnodes that calls our lookup
1524 # and data collection functions back.
1546 # and data collection functions back.
1525 group = mf.group(sorted(mfs, key=mf.rev),
1547 group = mf.group(sorted(mfs, key=mf.rev),
1526 lambda mnode: mfs[mnode],
1548 lambda mnode: mfs[mnode],
1527 filenode_collector(changedfiles))
1549 filenode_collector(changedfiles))
1528 for count, chunk in enumerate(group):
1550 for count, chunk in enumerate(group):
1529 yield chunk
1551 yield chunk
1530 # see above comment for why we divide by 3
1552 # see above comment for why we divide by 3
1531 self.ui.progress(_('bundling'), count / 3,
1553 self.ui.progress(_('bundling'), count / 3,
1532 unit=_('manifests'), total=changecount)
1554 unit=_('manifests'), total=changecount)
1533 self.ui.progress(_('bundling'), None)
1555 self.ui.progress(_('bundling'), None)
1534
1556
1535 mfs.clear()
1557 mfs.clear()
1536
1558
1537 # Go through all our files in order sorted by name.
1559 # Go through all our files in order sorted by name.
1538 for idx, fname in enumerate(sorted(changedfiles)):
1560 for idx, fname in enumerate(sorted(changedfiles)):
1539 filerevlog = self.file(fname)
1561 filerevlog = self.file(fname)
1540 if not len(filerevlog):
1562 if not len(filerevlog):
1541 raise util.Abort(_("empty or missing revlog for %s") % fname)
1563 raise util.Abort(_("empty or missing revlog for %s") % fname)
1542 # Toss out the filenodes that the recipient isn't really
1564 # Toss out the filenodes that the recipient isn't really
1543 # missing.
1565 # missing.
1544 missingfnodes = fnodes.pop(fname, {})
1566 missingfnodes = fnodes.pop(fname, {})
1545 prune(filerevlog, missingfnodes)
1567 prune(filerevlog, missingfnodes)
1546 # If any filenodes are left, generate the group for them,
1568 # If any filenodes are left, generate the group for them,
1547 # otherwise don't bother.
1569 # otherwise don't bother.
1548 if missingfnodes:
1570 if missingfnodes:
1549 yield changegroup.chunkheader(len(fname))
1571 yield changegroup.chunkheader(len(fname))
1550 yield fname
1572 yield fname
1551 # Create a group generator and only pass in a changenode
1573 # Create a group generator and only pass in a changenode
1552 # lookup function as we need to collect no information
1574 # lookup function as we need to collect no information
1553 # from filenodes.
1575 # from filenodes.
1554 group = filerevlog.group(
1576 group = filerevlog.group(
1555 sorted(missingfnodes, key=filerevlog.rev),
1577 sorted(missingfnodes, key=filerevlog.rev),
1556 lambda fnode: missingfnodes[fnode])
1578 lambda fnode: missingfnodes[fnode])
1557 for chunk in group:
1579 for chunk in group:
1558 # even though we print the same progress on
1580 # even though we print the same progress on
1559 # most loop iterations, put the progress call
1581 # most loop iterations, put the progress call
1560 # here so that time estimates (if any) can be updated
1582 # here so that time estimates (if any) can be updated
1561 self.ui.progress(
1583 self.ui.progress(
1562 _('bundling'), idx, item=fname,
1584 _('bundling'), idx, item=fname,
1563 unit=_('files'), total=efiles)
1585 unit=_('files'), total=efiles)
1564 yield chunk
1586 yield chunk
1565 # Signal that no more groups are left.
1587 # Signal that no more groups are left.
1566 yield changegroup.closechunk()
1588 yield changegroup.closechunk()
1567 self.ui.progress(_('bundling'), None)
1589 self.ui.progress(_('bundling'), None)
1568
1590
1569 if csets:
1591 if csets:
1570 self.hook('outgoing', node=hex(csets[0]), source=source)
1592 self.hook('outgoing', node=hex(csets[0]), source=source)
1571
1593
1572 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1594 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1573
1595
1574 def changegroup(self, basenodes, source):
1596 def changegroup(self, basenodes, source):
1575 # to avoid a race we use changegroupsubset() (issue1320)
1597 # to avoid a race we use changegroupsubset() (issue1320)
1576 return self.changegroupsubset(basenodes, self.heads(), source)
1598 return self.changegroupsubset(basenodes, self.heads(), source)
1577
1599
1578 def _changegroup(self, nodes, source):
1600 def _changegroup(self, nodes, source):
1579 """Compute the changegroup of all nodes that we have that a recipient
1601 """Compute the changegroup of all nodes that we have that a recipient
1580 doesn't. Return a chunkbuffer object whose read() method will return
1602 doesn't. Return a chunkbuffer object whose read() method will return
1581 successive changegroup chunks.
1603 successive changegroup chunks.
1582
1604
1583 This is much easier than the previous function as we can assume that
1605 This is much easier than the previous function as we can assume that
1584 the recipient has any changenode we aren't sending them.
1606 the recipient has any changenode we aren't sending them.
1585
1607
1586 nodes is the set of nodes to send"""
1608 nodes is the set of nodes to send"""
1587
1609
1588 self.hook('preoutgoing', throw=True, source=source)
1610 self.hook('preoutgoing', throw=True, source=source)
1589
1611
1590 cl = self.changelog
1612 cl = self.changelog
1591 revset = set([cl.rev(n) for n in nodes])
1613 revset = set([cl.rev(n) for n in nodes])
1592 self.changegroupinfo(nodes, source)
1614 self.changegroupinfo(nodes, source)
1593
1615
1594 def gennodelst(log):
1616 def gennodelst(log):
1595 for r in log:
1617 for r in log:
1596 if log.linkrev(r) in revset:
1618 if log.linkrev(r) in revset:
1597 yield log.node(r)
1619 yield log.node(r)
1598
1620
1599 def lookuplinkrev_func(revlog):
1621 def lookuplinkrev_func(revlog):
1600 def lookuplinkrev(n):
1622 def lookuplinkrev(n):
1601 return cl.node(revlog.linkrev(revlog.rev(n)))
1623 return cl.node(revlog.linkrev(revlog.rev(n)))
1602 return lookuplinkrev
1624 return lookuplinkrev
1603
1625
1604 def gengroup():
1626 def gengroup():
1605 '''yield a sequence of changegroup chunks (strings)'''
1627 '''yield a sequence of changegroup chunks (strings)'''
1606 # construct a list of all changed files
1628 # construct a list of all changed files
1607 changedfiles = set()
1629 changedfiles = set()
1608 mmfs = {}
1630 mmfs = {}
1609 collect = changegroup.collector(cl, mmfs, changedfiles)
1631 collect = changegroup.collector(cl, mmfs, changedfiles)
1610
1632
1611 for count, chunk in enumerate(cl.group(nodes, lambda x: x, collect)):
1633 for count, chunk in enumerate(cl.group(nodes, lambda x: x, collect)):
1612 # revlog.group yields three entries per node, so
1634 # revlog.group yields three entries per node, so
1613 # dividing by 3 gives an approximation of how many
1635 # dividing by 3 gives an approximation of how many
1614 # nodes have been processed.
1636 # nodes have been processed.
1615 self.ui.progress(_('bundling'), count / 3, unit=_('changesets'))
1637 self.ui.progress(_('bundling'), count / 3, unit=_('changesets'))
1616 yield chunk
1638 yield chunk
1617 efiles = len(changedfiles)
1639 efiles = len(changedfiles)
1618 changecount = count / 3
1640 changecount = count / 3
1619 self.ui.progress(_('bundling'), None)
1641 self.ui.progress(_('bundling'), None)
1620
1642
1621 mnfst = self.manifest
1643 mnfst = self.manifest
1622 nodeiter = gennodelst(mnfst)
1644 nodeiter = gennodelst(mnfst)
1623 for count, chunk in enumerate(mnfst.group(nodeiter,
1645 for count, chunk in enumerate(mnfst.group(nodeiter,
1624 lookuplinkrev_func(mnfst))):
1646 lookuplinkrev_func(mnfst))):
1625 # see above comment for why we divide by 3
1647 # see above comment for why we divide by 3
1626 self.ui.progress(_('bundling'), count / 3,
1648 self.ui.progress(_('bundling'), count / 3,
1627 unit=_('manifests'), total=changecount)
1649 unit=_('manifests'), total=changecount)
1628 yield chunk
1650 yield chunk
1629 self.ui.progress(_('bundling'), None)
1651 self.ui.progress(_('bundling'), None)
1630
1652
1631 for idx, fname in enumerate(sorted(changedfiles)):
1653 for idx, fname in enumerate(sorted(changedfiles)):
1632 filerevlog = self.file(fname)
1654 filerevlog = self.file(fname)
1633 if not len(filerevlog):
1655 if not len(filerevlog):
1634 raise util.Abort(_("empty or missing revlog for %s") % fname)
1656 raise util.Abort(_("empty or missing revlog for %s") % fname)
1635 nodeiter = gennodelst(filerevlog)
1657 nodeiter = gennodelst(filerevlog)
1636 nodeiter = list(nodeiter)
1658 nodeiter = list(nodeiter)
1637 if nodeiter:
1659 if nodeiter:
1638 yield changegroup.chunkheader(len(fname))
1660 yield changegroup.chunkheader(len(fname))
1639 yield fname
1661 yield fname
1640 lookup = lookuplinkrev_func(filerevlog)
1662 lookup = lookuplinkrev_func(filerevlog)
1641 for chunk in filerevlog.group(nodeiter, lookup):
1663 for chunk in filerevlog.group(nodeiter, lookup):
1642 self.ui.progress(
1664 self.ui.progress(
1643 _('bundling'), idx, item=fname,
1665 _('bundling'), idx, item=fname,
1644 total=efiles, unit=_('files'))
1666 total=efiles, unit=_('files'))
1645 yield chunk
1667 yield chunk
1646 self.ui.progress(_('bundling'), None)
1668 self.ui.progress(_('bundling'), None)
1647
1669
1648 yield changegroup.closechunk()
1670 yield changegroup.closechunk()
1649
1671
1650 if nodes:
1672 if nodes:
1651 self.hook('outgoing', node=hex(nodes[0]), source=source)
1673 self.hook('outgoing', node=hex(nodes[0]), source=source)
1652
1674
1653 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1675 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1654
1676
1655 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1677 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1656 """Add the changegroup returned by source.read() to this repo.
1678 """Add the changegroup returned by source.read() to this repo.
1657 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1679 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1658 the URL of the repo where this changegroup is coming from.
1680 the URL of the repo where this changegroup is coming from.
1659 If lock is not None, the function takes ownership of the lock
1681 If lock is not None, the function takes ownership of the lock
1660 and releases it after the changegroup is added.
1682 and releases it after the changegroup is added.
1661
1683
1662 Return an integer summarizing the change to this repo:
1684 Return an integer summarizing the change to this repo:
1663 - nothing changed or no source: 0
1685 - nothing changed or no source: 0
1664 - more heads than before: 1+added heads (2..n)
1686 - more heads than before: 1+added heads (2..n)
1665 - fewer heads than before: -1-removed heads (-2..-n)
1687 - fewer heads than before: -1-removed heads (-2..-n)
1666 - number of heads stays the same: 1
1688 - number of heads stays the same: 1
1667 """
1689 """
1668 def csmap(x):
1690 def csmap(x):
1669 self.ui.debug("add changeset %s\n" % short(x))
1691 self.ui.debug("add changeset %s\n" % short(x))
1670 return len(cl)
1692 return len(cl)
1671
1693
1672 def revmap(x):
1694 def revmap(x):
1673 return cl.rev(x)
1695 return cl.rev(x)
1674
1696
1675 if not source:
1697 if not source:
1676 return 0
1698 return 0
1677
1699
1678 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1700 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1679
1701
1680 changesets = files = revisions = 0
1702 changesets = files = revisions = 0
1681 efiles = set()
1703 efiles = set()
1682
1704
1683 # write changelog data to temp files so concurrent readers will not see
1705 # write changelog data to temp files so concurrent readers will not see
1684 # inconsistent view
1706 # inconsistent view
1685 cl = self.changelog
1707 cl = self.changelog
1686 cl.delayupdate()
1708 cl.delayupdate()
1687 oldheads = len(cl.heads())
1709 oldheads = len(cl.heads())
1688
1710
1689 tr = self.transaction("\n".join([srctype, urlmod.hidepassword(url)]))
1711 tr = self.transaction("\n".join([srctype, urlmod.hidepassword(url)]))
1690 try:
1712 try:
1691 trp = weakref.proxy(tr)
1713 trp = weakref.proxy(tr)
1692 # pull off the changeset group
1714 # pull off the changeset group
1693 self.ui.status(_("adding changesets\n"))
1715 self.ui.status(_("adding changesets\n"))
1694 clstart = len(cl)
1716 clstart = len(cl)
1695 class prog(object):
1717 class prog(object):
1696 step = _('changesets')
1718 step = _('changesets')
1697 count = 1
1719 count = 1
1698 ui = self.ui
1720 ui = self.ui
1699 total = None
1721 total = None
1700 def __call__(self):
1722 def __call__(self):
1701 self.ui.progress(self.step, self.count, unit=_('chunks'),
1723 self.ui.progress(self.step, self.count, unit=_('chunks'),
1702 total=self.total)
1724 total=self.total)
1703 self.count += 1
1725 self.count += 1
1704 pr = prog()
1726 pr = prog()
1705 source.callback = pr
1727 source.callback = pr
1706
1728
1707 if (cl.addgroup(source, csmap, trp) is None
1729 if (cl.addgroup(source, csmap, trp) is None
1708 and not emptyok):
1730 and not emptyok):
1709 raise util.Abort(_("received changelog group is empty"))
1731 raise util.Abort(_("received changelog group is empty"))
1710 clend = len(cl)
1732 clend = len(cl)
1711 changesets = clend - clstart
1733 changesets = clend - clstart
1712 for c in xrange(clstart, clend):
1734 for c in xrange(clstart, clend):
1713 efiles.update(self[c].files())
1735 efiles.update(self[c].files())
1714 efiles = len(efiles)
1736 efiles = len(efiles)
1715 self.ui.progress(_('changesets'), None)
1737 self.ui.progress(_('changesets'), None)
1716
1738
1717 # pull off the manifest group
1739 # pull off the manifest group
1718 self.ui.status(_("adding manifests\n"))
1740 self.ui.status(_("adding manifests\n"))
1719 pr.step = _('manifests')
1741 pr.step = _('manifests')
1720 pr.count = 1
1742 pr.count = 1
1721 pr.total = changesets # manifests <= changesets
1743 pr.total = changesets # manifests <= changesets
1722 # no need to check for empty manifest group here:
1744 # no need to check for empty manifest group here:
1723 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1745 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1724 # no new manifest will be created and the manifest group will
1746 # no new manifest will be created and the manifest group will
1725 # be empty during the pull
1747 # be empty during the pull
1726 self.manifest.addgroup(source, revmap, trp)
1748 self.manifest.addgroup(source, revmap, trp)
1727 self.ui.progress(_('manifests'), None)
1749 self.ui.progress(_('manifests'), None)
1728
1750
1729 needfiles = {}
1751 needfiles = {}
1730 if self.ui.configbool('server', 'validate', default=False):
1752 if self.ui.configbool('server', 'validate', default=False):
1731 # validate incoming csets have their manifests
1753 # validate incoming csets have their manifests
1732 for cset in xrange(clstart, clend):
1754 for cset in xrange(clstart, clend):
1733 mfest = self.changelog.read(self.changelog.node(cset))[0]
1755 mfest = self.changelog.read(self.changelog.node(cset))[0]
1734 mfest = self.manifest.readdelta(mfest)
1756 mfest = self.manifest.readdelta(mfest)
1735 # store file nodes we must see
1757 # store file nodes we must see
1736 for f, n in mfest.iteritems():
1758 for f, n in mfest.iteritems():
1737 needfiles.setdefault(f, set()).add(n)
1759 needfiles.setdefault(f, set()).add(n)
1738
1760
1739 # process the files
1761 # process the files
1740 self.ui.status(_("adding file changes\n"))
1762 self.ui.status(_("adding file changes\n"))
1741 pr.step = 'files'
1763 pr.step = 'files'
1742 pr.count = 1
1764 pr.count = 1
1743 pr.total = efiles
1765 pr.total = efiles
1744 source.callback = None
1766 source.callback = None
1745
1767
1746 while 1:
1768 while 1:
1747 f = source.chunk()
1769 f = source.chunk()
1748 if not f:
1770 if not f:
1749 break
1771 break
1750 self.ui.debug("adding %s revisions\n" % f)
1772 self.ui.debug("adding %s revisions\n" % f)
1751 pr()
1773 pr()
1752 fl = self.file(f)
1774 fl = self.file(f)
1753 o = len(fl)
1775 o = len(fl)
1754 if fl.addgroup(source, revmap, trp) is None:
1776 if fl.addgroup(source, revmap, trp) is None:
1755 raise util.Abort(_("received file revlog group is empty"))
1777 raise util.Abort(_("received file revlog group is empty"))
1756 revisions += len(fl) - o
1778 revisions += len(fl) - o
1757 files += 1
1779 files += 1
1758 if f in needfiles:
1780 if f in needfiles:
1759 needs = needfiles[f]
1781 needs = needfiles[f]
1760 for new in xrange(o, len(fl)):
1782 for new in xrange(o, len(fl)):
1761 n = fl.node(new)
1783 n = fl.node(new)
1762 if n in needs:
1784 if n in needs:
1763 needs.remove(n)
1785 needs.remove(n)
1764 if not needs:
1786 if not needs:
1765 del needfiles[f]
1787 del needfiles[f]
1766 self.ui.progress(_('files'), None)
1788 self.ui.progress(_('files'), None)
1767
1789
1768 for f, needs in needfiles.iteritems():
1790 for f, needs in needfiles.iteritems():
1769 fl = self.file(f)
1791 fl = self.file(f)
1770 for n in needs:
1792 for n in needs:
1771 try:
1793 try:
1772 fl.rev(n)
1794 fl.rev(n)
1773 except error.LookupError:
1795 except error.LookupError:
1774 raise util.Abort(
1796 raise util.Abort(
1775 _('missing file data for %s:%s - run hg verify') %
1797 _('missing file data for %s:%s - run hg verify') %
1776 (f, hex(n)))
1798 (f, hex(n)))
1777
1799
1778 newheads = len(cl.heads())
1800 newheads = len(cl.heads())
1779 heads = ""
1801 heads = ""
1780 if oldheads and newheads != oldheads:
1802 if oldheads and newheads != oldheads:
1781 heads = _(" (%+d heads)") % (newheads - oldheads)
1803 heads = _(" (%+d heads)") % (newheads - oldheads)
1782
1804
1783 self.ui.status(_("added %d changesets"
1805 self.ui.status(_("added %d changesets"
1784 " with %d changes to %d files%s\n")
1806 " with %d changes to %d files%s\n")
1785 % (changesets, revisions, files, heads))
1807 % (changesets, revisions, files, heads))
1786
1808
1787 if changesets > 0:
1809 if changesets > 0:
1788 p = lambda: cl.writepending() and self.root or ""
1810 p = lambda: cl.writepending() and self.root or ""
1789 self.hook('pretxnchangegroup', throw=True,
1811 self.hook('pretxnchangegroup', throw=True,
1790 node=hex(cl.node(clstart)), source=srctype,
1812 node=hex(cl.node(clstart)), source=srctype,
1791 url=url, pending=p)
1813 url=url, pending=p)
1792
1814
1793 # make changelog see real files again
1815 # make changelog see real files again
1794 cl.finalize(trp)
1816 cl.finalize(trp)
1795
1817
1796 tr.close()
1818 tr.close()
1797 finally:
1819 finally:
1798 tr.release()
1820 tr.release()
1799 if lock:
1821 if lock:
1800 lock.release()
1822 lock.release()
1801
1823
1802 if changesets > 0:
1824 if changesets > 0:
1803 # forcefully update the on-disk branch cache
1825 # forcefully update the on-disk branch cache
1804 self.ui.debug("updating the branch cache\n")
1826 self.ui.debug("updating the branch cache\n")
1805 self.updatebranchcache()
1827 self.updatebranchcache()
1806 self.hook("changegroup", node=hex(cl.node(clstart)),
1828 self.hook("changegroup", node=hex(cl.node(clstart)),
1807 source=srctype, url=url)
1829 source=srctype, url=url)
1808
1830
1809 for i in xrange(clstart, clend):
1831 for i in xrange(clstart, clend):
1810 self.hook("incoming", node=hex(cl.node(i)),
1832 self.hook("incoming", node=hex(cl.node(i)),
1811 source=srctype, url=url)
1833 source=srctype, url=url)
1812
1834
1813 # never return 0 here:
1835 # never return 0 here:
1814 if newheads < oldheads:
1836 if newheads < oldheads:
1815 return newheads - oldheads - 1
1837 return newheads - oldheads - 1
1816 else:
1838 else:
1817 return newheads - oldheads + 1
1839 return newheads - oldheads + 1
1818
1840
1819
1841
1820 def stream_in(self, remote, requirements):
1842 def stream_in(self, remote, requirements):
1821 lock = self.lock()
1843 lock = self.lock()
1822 try:
1844 try:
1823 fp = remote.stream_out()
1845 fp = remote.stream_out()
1824 l = fp.readline()
1846 l = fp.readline()
1825 try:
1847 try:
1826 resp = int(l)
1848 resp = int(l)
1827 except ValueError:
1849 except ValueError:
1828 raise error.ResponseError(
1850 raise error.ResponseError(
1829 _('Unexpected response from remote server:'), l)
1851 _('Unexpected response from remote server:'), l)
1830 if resp == 1:
1852 if resp == 1:
1831 raise util.Abort(_('operation forbidden by server'))
1853 raise util.Abort(_('operation forbidden by server'))
1832 elif resp == 2:
1854 elif resp == 2:
1833 raise util.Abort(_('locking the remote repository failed'))
1855 raise util.Abort(_('locking the remote repository failed'))
1834 elif resp != 0:
1856 elif resp != 0:
1835 raise util.Abort(_('the server sent an unknown error code'))
1857 raise util.Abort(_('the server sent an unknown error code'))
1836 self.ui.status(_('streaming all changes\n'))
1858 self.ui.status(_('streaming all changes\n'))
1837 l = fp.readline()
1859 l = fp.readline()
1838 try:
1860 try:
1839 total_files, total_bytes = map(int, l.split(' ', 1))
1861 total_files, total_bytes = map(int, l.split(' ', 1))
1840 except (ValueError, TypeError):
1862 except (ValueError, TypeError):
1841 raise error.ResponseError(
1863 raise error.ResponseError(
1842 _('Unexpected response from remote server:'), l)
1864 _('Unexpected response from remote server:'), l)
1843 self.ui.status(_('%d files to transfer, %s of data\n') %
1865 self.ui.status(_('%d files to transfer, %s of data\n') %
1844 (total_files, util.bytecount(total_bytes)))
1866 (total_files, util.bytecount(total_bytes)))
1845 start = time.time()
1867 start = time.time()
1846 for i in xrange(total_files):
1868 for i in xrange(total_files):
1847 # XXX doesn't support '\n' or '\r' in filenames
1869 # XXX doesn't support '\n' or '\r' in filenames
1848 l = fp.readline()
1870 l = fp.readline()
1849 try:
1871 try:
1850 name, size = l.split('\0', 1)
1872 name, size = l.split('\0', 1)
1851 size = int(size)
1873 size = int(size)
1852 except (ValueError, TypeError):
1874 except (ValueError, TypeError):
1853 raise error.ResponseError(
1875 raise error.ResponseError(
1854 _('Unexpected response from remote server:'), l)
1876 _('Unexpected response from remote server:'), l)
1855 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1877 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1856 # for backwards compat, name was partially encoded
1878 # for backwards compat, name was partially encoded
1857 ofp = self.sopener(store.decodedir(name), 'w')
1879 ofp = self.sopener(store.decodedir(name), 'w')
1858 for chunk in util.filechunkiter(fp, limit=size):
1880 for chunk in util.filechunkiter(fp, limit=size):
1859 ofp.write(chunk)
1881 ofp.write(chunk)
1860 ofp.close()
1882 ofp.close()
1861 elapsed = time.time() - start
1883 elapsed = time.time() - start
1862 if elapsed <= 0:
1884 if elapsed <= 0:
1863 elapsed = 0.001
1885 elapsed = 0.001
1864 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1886 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1865 (util.bytecount(total_bytes), elapsed,
1887 (util.bytecount(total_bytes), elapsed,
1866 util.bytecount(total_bytes / elapsed)))
1888 util.bytecount(total_bytes / elapsed)))
1867
1889
1868 # new requirements = old non-format requirements + new format-related
1890 # new requirements = old non-format requirements + new format-related
1869 # requirements from the streamed-in repository
1891 # requirements from the streamed-in repository
1870 requirements.update(set(self.requirements) - self.supportedformats)
1892 requirements.update(set(self.requirements) - self.supportedformats)
1871 self._applyrequirements(requirements)
1893 self._applyrequirements(requirements)
1872 self._writerequirements()
1894 self._writerequirements()
1873
1895
1874 self.invalidate()
1896 self.invalidate()
1875 return len(self.heads()) + 1
1897 return len(self.heads()) + 1
1876 finally:
1898 finally:
1877 lock.release()
1899 lock.release()
1878
1900
1879 def clone(self, remote, heads=[], stream=False):
1901 def clone(self, remote, heads=[], stream=False):
1880 '''clone remote repository.
1902 '''clone remote repository.
1881
1903
1882 keyword arguments:
1904 keyword arguments:
1883 heads: list of revs to clone (forces use of pull)
1905 heads: list of revs to clone (forces use of pull)
1884 stream: use streaming clone if possible'''
1906 stream: use streaming clone if possible'''
1885
1907
1886 # now, all clients that can request uncompressed clones can
1908 # now, all clients that can request uncompressed clones can
1887 # read repo formats supported by all servers that can serve
1909 # read repo formats supported by all servers that can serve
1888 # them.
1910 # them.
1889
1911
1890 # if revlog format changes, client will have to check version
1912 # if revlog format changes, client will have to check version
1891 # and format flags on "stream" capability, and use
1913 # and format flags on "stream" capability, and use
1892 # uncompressed only if compatible.
1914 # uncompressed only if compatible.
1893
1915
1894 if stream and not heads:
1916 if stream and not heads:
1895 # 'stream' means remote revlog format is revlogv1 only
1917 # 'stream' means remote revlog format is revlogv1 only
1896 if remote.capable('stream'):
1918 if remote.capable('stream'):
1897 return self.stream_in(remote, set(('revlogv1',)))
1919 return self.stream_in(remote, set(('revlogv1',)))
1898 # otherwise, 'streamreqs' contains the remote revlog format
1920 # otherwise, 'streamreqs' contains the remote revlog format
1899 streamreqs = remote.capable('streamreqs')
1921 streamreqs = remote.capable('streamreqs')
1900 if streamreqs:
1922 if streamreqs:
1901 streamreqs = set(streamreqs.split(','))
1923 streamreqs = set(streamreqs.split(','))
1902 # if we support it, stream in and adjust our requirements
1924 # if we support it, stream in and adjust our requirements
1903 if not streamreqs - self.supportedformats:
1925 if not streamreqs - self.supportedformats:
1904 return self.stream_in(remote, streamreqs)
1926 return self.stream_in(remote, streamreqs)
1905 return self.pull(remote, heads)
1927 return self.pull(remote, heads)
1906
1928
1907 def pushkey(self, namespace, key, old, new):
1929 def pushkey(self, namespace, key, old, new):
1908 return pushkey.push(self, namespace, key, old, new)
1930 return pushkey.push(self, namespace, key, old, new)
1909
1931
1910 def listkeys(self, namespace):
1932 def listkeys(self, namespace):
1911 return pushkey.list(self, namespace)
1933 return pushkey.list(self, namespace)
1912
1934
1913 def debugwireargs(self, one, two, three=None, four=None):
1935 def debugwireargs(self, one, two, three=None, four=None):
1914 '''used to test argument passing over the wire'''
1936 '''used to test argument passing over the wire'''
1915 return "%s %s %s %s" % (one, two, three, four)
1937 return "%s %s %s %s" % (one, two, three, four)
1916
1938
1917 # used to avoid circular references so destructors work
1939 # used to avoid circular references so destructors work
1918 def aftertrans(files):
1940 def aftertrans(files):
1919 renamefiles = [tuple(t) for t in files]
1941 renamefiles = [tuple(t) for t in files]
1920 def a():
1942 def a():
1921 for src, dest in renamefiles:
1943 for src, dest in renamefiles:
1922 util.rename(src, dest)
1944 util.rename(src, dest)
1923 return a
1945 return a
1924
1946
1925 def instance(ui, path, create):
1947 def instance(ui, path, create):
1926 return localrepository(ui, util.drop_scheme('file', path), create)
1948 return localrepository(ui, util.drop_scheme('file', path), create)
1927
1949
1928 def islocal(path):
1950 def islocal(path):
1929 return True
1951 return True
@@ -1,1263 +1,1282 b''
1 # revlog.py - storage back-end for mercurial
1 # revlog.py - storage back-end for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Storage back-end for Mercurial.
8 """Storage back-end for Mercurial.
9
9
10 This provides efficient delta storage with O(1) retrieve and append
10 This provides efficient delta storage with O(1) retrieve and append
11 and O(changes) merge between branches.
11 and O(changes) merge between branches.
12 """
12 """
13
13
14 # import stuff from node for others to import from revlog
14 # import stuff from node for others to import from revlog
15 from node import bin, hex, nullid, nullrev, short #@UnusedImport
15 from node import bin, hex, nullid, nullrev, short #@UnusedImport
16 from i18n import _
16 from i18n import _
17 import changegroup, ancestor, mdiff, parsers, error, util
17 import changegroup, ancestor, mdiff, parsers, error, util
18 import struct, zlib, errno
18 import struct, zlib, errno
19
19
20 _pack = struct.pack
20 _pack = struct.pack
21 _unpack = struct.unpack
21 _unpack = struct.unpack
22 _compress = zlib.compress
22 _compress = zlib.compress
23 _decompress = zlib.decompress
23 _decompress = zlib.decompress
24 _sha = util.sha1
24 _sha = util.sha1
25
25
26 # revlog header flags
26 # revlog header flags
27 REVLOGV0 = 0
27 REVLOGV0 = 0
28 REVLOGNG = 1
28 REVLOGNG = 1
29 REVLOGNGINLINEDATA = (1 << 16)
29 REVLOGNGINLINEDATA = (1 << 16)
30 REVLOGSHALLOW = (1 << 17)
30 REVLOGSHALLOW = (1 << 17)
31 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
31 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
32 REVLOG_DEFAULT_FORMAT = REVLOGNG
32 REVLOG_DEFAULT_FORMAT = REVLOGNG
33 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
33 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
34 REVLOGNG_FLAGS = REVLOGNGINLINEDATA | REVLOGSHALLOW
34 REVLOGNG_FLAGS = REVLOGNGINLINEDATA | REVLOGSHALLOW
35
35
36 # revlog index flags
36 # revlog index flags
37 REVIDX_PARENTDELTA = 1
37 REVIDX_PARENTDELTA = 1
38 REVIDX_PUNCHED_FLAG = 2
38 REVIDX_PUNCHED_FLAG = 2
39 REVIDX_KNOWN_FLAGS = REVIDX_PUNCHED_FLAG | REVIDX_PARENTDELTA
39 REVIDX_KNOWN_FLAGS = REVIDX_PUNCHED_FLAG | REVIDX_PARENTDELTA
40
40
41 # max size of revlog with inline data
41 # max size of revlog with inline data
42 _maxinline = 131072
42 _maxinline = 131072
43 _chunksize = 1048576
43 _chunksize = 1048576
44
44
45 RevlogError = error.RevlogError
45 RevlogError = error.RevlogError
46 LookupError = error.LookupError
46 LookupError = error.LookupError
47
47
48 def getoffset(q):
48 def getoffset(q):
49 return int(q >> 16)
49 return int(q >> 16)
50
50
51 def gettype(q):
51 def gettype(q):
52 return int(q & 0xFFFF)
52 return int(q & 0xFFFF)
53
53
54 def offset_type(offset, type):
54 def offset_type(offset, type):
55 return long(long(offset) << 16 | type)
55 return long(long(offset) << 16 | type)
56
56
57 nullhash = _sha(nullid)
57 nullhash = _sha(nullid)
58
58
59 def hash(text, p1, p2):
59 def hash(text, p1, p2):
60 """generate a hash from the given text and its parent hashes
60 """generate a hash from the given text and its parent hashes
61
61
62 This hash combines both the current file contents and its history
62 This hash combines both the current file contents and its history
63 in a manner that makes it easy to distinguish nodes with the same
63 in a manner that makes it easy to distinguish nodes with the same
64 content in the revision graph.
64 content in the revision graph.
65 """
65 """
66 # As of now, if one of the parent node is null, p2 is null
66 # As of now, if one of the parent node is null, p2 is null
67 if p2 == nullid:
67 if p2 == nullid:
68 # deep copy of a hash is faster than creating one
68 # deep copy of a hash is faster than creating one
69 s = nullhash.copy()
69 s = nullhash.copy()
70 s.update(p1)
70 s.update(p1)
71 else:
71 else:
72 # none of the parent nodes are nullid
72 # none of the parent nodes are nullid
73 l = [p1, p2]
73 l = [p1, p2]
74 l.sort()
74 l.sort()
75 s = _sha(l[0])
75 s = _sha(l[0])
76 s.update(l[1])
76 s.update(l[1])
77 s.update(text)
77 s.update(text)
78 return s.digest()
78 return s.digest()
79
79
80 def compress(text):
80 def compress(text):
81 """ generate a possibly-compressed representation of text """
81 """ generate a possibly-compressed representation of text """
82 if not text:
82 if not text:
83 return ("", text)
83 return ("", text)
84 l = len(text)
84 l = len(text)
85 bin = None
85 bin = None
86 if l < 44:
86 if l < 44:
87 pass
87 pass
88 elif l > 1000000:
88 elif l > 1000000:
89 # zlib makes an internal copy, thus doubling memory usage for
89 # zlib makes an internal copy, thus doubling memory usage for
90 # large files, so lets do this in pieces
90 # large files, so lets do this in pieces
91 z = zlib.compressobj()
91 z = zlib.compressobj()
92 p = []
92 p = []
93 pos = 0
93 pos = 0
94 while pos < l:
94 while pos < l:
95 pos2 = pos + 2**20
95 pos2 = pos + 2**20
96 p.append(z.compress(text[pos:pos2]))
96 p.append(z.compress(text[pos:pos2]))
97 pos = pos2
97 pos = pos2
98 p.append(z.flush())
98 p.append(z.flush())
99 if sum(map(len, p)) < l:
99 if sum(map(len, p)) < l:
100 bin = "".join(p)
100 bin = "".join(p)
101 else:
101 else:
102 bin = _compress(text)
102 bin = _compress(text)
103 if bin is None or len(bin) > l:
103 if bin is None or len(bin) > l:
104 if text[0] == '\0':
104 if text[0] == '\0':
105 return ("", text)
105 return ("", text)
106 return ('u', text)
106 return ('u', text)
107 return ("", bin)
107 return ("", bin)
108
108
109 def decompress(bin):
109 def decompress(bin):
110 """ decompress the given input """
110 """ decompress the given input """
111 if not bin:
111 if not bin:
112 return bin
112 return bin
113 t = bin[0]
113 t = bin[0]
114 if t == '\0':
114 if t == '\0':
115 return bin
115 return bin
116 if t == 'x':
116 if t == 'x':
117 return _decompress(bin)
117 return _decompress(bin)
118 if t == 'u':
118 if t == 'u':
119 return bin[1:]
119 return bin[1:]
120 raise RevlogError(_("unknown compression type %r") % t)
120 raise RevlogError(_("unknown compression type %r") % t)
121
121
122 indexformatv0 = ">4l20s20s20s"
122 indexformatv0 = ">4l20s20s20s"
123 v0shaoffset = 56
123 v0shaoffset = 56
124
124
125 class revlogoldio(object):
125 class revlogoldio(object):
126 def __init__(self):
126 def __init__(self):
127 self.size = struct.calcsize(indexformatv0)
127 self.size = struct.calcsize(indexformatv0)
128
128
129 def parseindex(self, data, inline):
129 def parseindex(self, data, inline):
130 s = self.size
130 s = self.size
131 index = []
131 index = []
132 nodemap = {nullid: nullrev}
132 nodemap = {nullid: nullrev}
133 n = off = 0
133 n = off = 0
134 l = len(data)
134 l = len(data)
135 while off + s <= l:
135 while off + s <= l:
136 cur = data[off:off + s]
136 cur = data[off:off + s]
137 off += s
137 off += s
138 e = _unpack(indexformatv0, cur)
138 e = _unpack(indexformatv0, cur)
139 # transform to revlogv1 format
139 # transform to revlogv1 format
140 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
140 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
141 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
141 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
142 index.append(e2)
142 index.append(e2)
143 nodemap[e[6]] = n
143 nodemap[e[6]] = n
144 n += 1
144 n += 1
145
145
146 # add the magic null revision at -1
146 # add the magic null revision at -1
147 index.append((0, 0, 0, -1, -1, -1, -1, nullid))
147 index.append((0, 0, 0, -1, -1, -1, -1, nullid))
148
148
149 return index, nodemap, None
149 return index, nodemap, None
150
150
151 def packentry(self, entry, node, version, rev):
151 def packentry(self, entry, node, version, rev):
152 if gettype(entry[0]):
152 if gettype(entry[0]):
153 raise RevlogError(_("index entry flags need RevlogNG"))
153 raise RevlogError(_("index entry flags need RevlogNG"))
154 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
154 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
155 node(entry[5]), node(entry[6]), entry[7])
155 node(entry[5]), node(entry[6]), entry[7])
156 return _pack(indexformatv0, *e2)
156 return _pack(indexformatv0, *e2)
157
157
158 # index ng:
158 # index ng:
159 # 6 bytes: offset
159 # 6 bytes: offset
160 # 2 bytes: flags
160 # 2 bytes: flags
161 # 4 bytes: compressed length
161 # 4 bytes: compressed length
162 # 4 bytes: uncompressed length
162 # 4 bytes: uncompressed length
163 # 4 bytes: base rev
163 # 4 bytes: base rev
164 # 4 bytes: link rev
164 # 4 bytes: link rev
165 # 4 bytes: parent 1 rev
165 # 4 bytes: parent 1 rev
166 # 4 bytes: parent 2 rev
166 # 4 bytes: parent 2 rev
167 # 32 bytes: nodeid
167 # 32 bytes: nodeid
168 indexformatng = ">Qiiiiii20s12x"
168 indexformatng = ">Qiiiiii20s12x"
169 ngshaoffset = 32
169 ngshaoffset = 32
170 versionformat = ">I"
170 versionformat = ">I"
171
171
172 class revlogio(object):
172 class revlogio(object):
173 def __init__(self):
173 def __init__(self):
174 self.size = struct.calcsize(indexformatng)
174 self.size = struct.calcsize(indexformatng)
175
175
176 def parseindex(self, data, inline):
176 def parseindex(self, data, inline):
177 # call the C implementation to parse the index data
177 # call the C implementation to parse the index data
178 index, cache = parsers.parse_index2(data, inline)
178 index, cache = parsers.parse_index2(data, inline)
179 return index, None, cache
179 return index, None, cache
180
180
181 def packentry(self, entry, node, version, rev):
181 def packentry(self, entry, node, version, rev):
182 p = _pack(indexformatng, *entry)
182 p = _pack(indexformatng, *entry)
183 if rev == 0:
183 if rev == 0:
184 p = _pack(versionformat, version) + p[4:]
184 p = _pack(versionformat, version) + p[4:]
185 return p
185 return p
186
186
187 class revlog(object):
187 class revlog(object):
188 """
188 """
189 the underlying revision storage object
189 the underlying revision storage object
190
190
191 A revlog consists of two parts, an index and the revision data.
191 A revlog consists of two parts, an index and the revision data.
192
192
193 The index is a file with a fixed record size containing
193 The index is a file with a fixed record size containing
194 information on each revision, including its nodeid (hash), the
194 information on each revision, including its nodeid (hash), the
195 nodeids of its parents, the position and offset of its data within
195 nodeids of its parents, the position and offset of its data within
196 the data file, and the revision it's based on. Finally, each entry
196 the data file, and the revision it's based on. Finally, each entry
197 contains a linkrev entry that can serve as a pointer to external
197 contains a linkrev entry that can serve as a pointer to external
198 data.
198 data.
199
199
200 The revision data itself is a linear collection of data chunks.
200 The revision data itself is a linear collection of data chunks.
201 Each chunk represents a revision and is usually represented as a
201 Each chunk represents a revision and is usually represented as a
202 delta against the previous chunk. To bound lookup time, runs of
202 delta against the previous chunk. To bound lookup time, runs of
203 deltas are limited to about 2 times the length of the original
203 deltas are limited to about 2 times the length of the original
204 version data. This makes retrieval of a version proportional to
204 version data. This makes retrieval of a version proportional to
205 its size, or O(1) relative to the number of revisions.
205 its size, or O(1) relative to the number of revisions.
206
206
207 Both pieces of the revlog are written to in an append-only
207 Both pieces of the revlog are written to in an append-only
208 fashion, which means we never need to rewrite a file to insert or
208 fashion, which means we never need to rewrite a file to insert or
209 remove data, and can use some simple techniques to avoid the need
209 remove data, and can use some simple techniques to avoid the need
210 for locking while reading.
210 for locking while reading.
211 """
211 """
212 def __init__(self, opener, indexfile, shallowroot=None):
212 def __init__(self, opener, indexfile, shallowroot=None):
213 """
213 """
214 create a revlog object
214 create a revlog object
215
215
216 opener is a function that abstracts the file opening operation
216 opener is a function that abstracts the file opening operation
217 and can be used to implement COW semantics or the like.
217 and can be used to implement COW semantics or the like.
218 """
218 """
219 self.indexfile = indexfile
219 self.indexfile = indexfile
220 self.datafile = indexfile[:-2] + ".d"
220 self.datafile = indexfile[:-2] + ".d"
221 self.opener = opener
221 self.opener = opener
222 self._cache = None
222 self._cache = None
223 self._chunkcache = (0, '')
223 self._chunkcache = (0, '')
224 self.index = []
224 self.index = []
225 self._shallowroot = shallowroot
225 self._shallowroot = shallowroot
226 self._parentdelta = 0
226 self._parentdelta = 0
227 self._pcache = {}
227 self._pcache = {}
228 self._nodecache = {nullid: nullrev}
228 self._nodecache = {nullid: nullrev}
229 self._nodepos = None
229 self._nodepos = None
230
230
231 v = REVLOG_DEFAULT_VERSION
231 v = REVLOG_DEFAULT_VERSION
232 if hasattr(opener, 'options') and 'defversion' in opener.options:
232 if hasattr(opener, 'options') and 'defversion' in opener.options:
233 v = opener.options['defversion']
233 v = opener.options['defversion']
234 if v & REVLOGNG:
234 if v & REVLOGNG:
235 v |= REVLOGNGINLINEDATA
235 v |= REVLOGNGINLINEDATA
236 if v & REVLOGNG and 'parentdelta' in opener.options:
236 if v & REVLOGNG and 'parentdelta' in opener.options:
237 self._parentdelta = 1
237 self._parentdelta = 1
238
238
239 if shallowroot:
239 if shallowroot:
240 v |= REVLOGSHALLOW
240 v |= REVLOGSHALLOW
241
241
242 i = ''
242 i = ''
243 try:
243 try:
244 f = self.opener(self.indexfile)
244 f = self.opener(self.indexfile)
245 i = f.read()
245 i = f.read()
246 f.close()
246 f.close()
247 if len(i) > 0:
247 if len(i) > 0:
248 v = struct.unpack(versionformat, i[:4])[0]
248 v = struct.unpack(versionformat, i[:4])[0]
249 except IOError, inst:
249 except IOError, inst:
250 if inst.errno != errno.ENOENT:
250 if inst.errno != errno.ENOENT:
251 raise
251 raise
252
252
253 self.version = v
253 self.version = v
254 self._inline = v & REVLOGNGINLINEDATA
254 self._inline = v & REVLOGNGINLINEDATA
255 self._shallow = v & REVLOGSHALLOW
255 self._shallow = v & REVLOGSHALLOW
256 flags = v & ~0xFFFF
256 flags = v & ~0xFFFF
257 fmt = v & 0xFFFF
257 fmt = v & 0xFFFF
258 if fmt == REVLOGV0 and flags:
258 if fmt == REVLOGV0 and flags:
259 raise RevlogError(_("index %s unknown flags %#04x for format v0")
259 raise RevlogError(_("index %s unknown flags %#04x for format v0")
260 % (self.indexfile, flags >> 16))
260 % (self.indexfile, flags >> 16))
261 elif fmt == REVLOGNG and flags & ~REVLOGNG_FLAGS:
261 elif fmt == REVLOGNG and flags & ~REVLOGNG_FLAGS:
262 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
262 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
263 % (self.indexfile, flags >> 16))
263 % (self.indexfile, flags >> 16))
264 elif fmt > REVLOGNG:
264 elif fmt > REVLOGNG:
265 raise RevlogError(_("index %s unknown format %d")
265 raise RevlogError(_("index %s unknown format %d")
266 % (self.indexfile, fmt))
266 % (self.indexfile, fmt))
267
267
268 self._io = revlogio()
268 self._io = revlogio()
269 if self.version == REVLOGV0:
269 if self.version == REVLOGV0:
270 self._io = revlogoldio()
270 self._io = revlogoldio()
271 try:
271 try:
272 d = self._io.parseindex(i, self._inline)
272 d = self._io.parseindex(i, self._inline)
273 except (ValueError, IndexError):
273 except (ValueError, IndexError):
274 raise RevlogError(_("index %s is corrupted") % (self.indexfile))
274 raise RevlogError(_("index %s is corrupted") % (self.indexfile))
275 self.index, nodemap, self._chunkcache = d
275 self.index, nodemap, self._chunkcache = d
276 if nodemap is not None:
276 if nodemap is not None:
277 self.nodemap = self._nodecache = nodemap
277 self.nodemap = self._nodecache = nodemap
278 if not self._chunkcache:
278 if not self._chunkcache:
279 self._chunkclear()
279 self._chunkclear()
280
280
281 def tip(self):
281 def tip(self):
282 return self.node(len(self.index) - 2)
282 return self.node(len(self.index) - 2)
283 def __len__(self):
283 def __len__(self):
284 return len(self.index) - 1
284 return len(self.index) - 1
285 def __iter__(self):
285 def __iter__(self):
286 for i in xrange(len(self)):
286 for i in xrange(len(self)):
287 yield i
287 yield i
288
288
289 @util.propertycache
289 @util.propertycache
290 def nodemap(self):
290 def nodemap(self):
291 n = self.rev(self.node(0))
291 n = self.rev(self.node(0))
292 return self._nodecache
292 return self._nodecache
293
293
294 def rev(self, node):
294 def rev(self, node):
295 try:
295 try:
296 return self._nodecache[node]
296 return self._nodecache[node]
297 except KeyError:
297 except KeyError:
298 n = self._nodecache
298 n = self._nodecache
299 i = self.index
299 i = self.index
300 p = self._nodepos
300 p = self._nodepos
301 if p is None:
301 if p is None:
302 p = len(i) - 2
302 p = len(i) - 2
303 for r in xrange(p, -1, -1):
303 for r in xrange(p, -1, -1):
304 v = i[r][7]
304 v = i[r][7]
305 n[v] = r
305 n[v] = r
306 if v == node:
306 if v == node:
307 self._nodepos = r - 1
307 self._nodepos = r - 1
308 return r
308 return r
309 raise LookupError(node, self.indexfile, _('no node'))
309 raise LookupError(node, self.indexfile, _('no node'))
310
310
311 def node(self, rev):
311 def node(self, rev):
312 return self.index[rev][7]
312 return self.index[rev][7]
313 def linkrev(self, rev):
313 def linkrev(self, rev):
314 return self.index[rev][4]
314 return self.index[rev][4]
315 def parents(self, node):
315 def parents(self, node):
316 i = self.index
316 i = self.index
317 d = i[self.rev(node)]
317 d = i[self.rev(node)]
318 return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
318 return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
319 def parentrevs(self, rev):
319 def parentrevs(self, rev):
320 return self.index[rev][5:7]
320 return self.index[rev][5:7]
321 def start(self, rev):
321 def start(self, rev):
322 return int(self.index[rev][0] >> 16)
322 return int(self.index[rev][0] >> 16)
323 def end(self, rev):
323 def end(self, rev):
324 return self.start(rev) + self.length(rev)
324 return self.start(rev) + self.length(rev)
325 def length(self, rev):
325 def length(self, rev):
326 return self.index[rev][1]
326 return self.index[rev][1]
327 def base(self, rev):
327 def base(self, rev):
328 return self.index[rev][3]
328 return self.index[rev][3]
329 def flags(self, rev):
329 def flags(self, rev):
330 return self.index[rev][0] & 0xFFFF
330 return self.index[rev][0] & 0xFFFF
331 def rawsize(self, rev):
331 def rawsize(self, rev):
332 """return the length of the uncompressed text for a given revision"""
332 """return the length of the uncompressed text for a given revision"""
333 l = self.index[rev][2]
333 l = self.index[rev][2]
334 if l >= 0:
334 if l >= 0:
335 return l
335 return l
336
336
337 t = self.revision(self.node(rev))
337 t = self.revision(self.node(rev))
338 return len(t)
338 return len(t)
339 size = rawsize
339 size = rawsize
340
340
341 def reachable(self, node, stop=None):
341 def reachable(self, node, stop=None):
342 """return the set of all nodes ancestral to a given node, including
342 """return the set of all nodes ancestral to a given node, including
343 the node itself, stopping when stop is matched"""
343 the node itself, stopping when stop is matched"""
344 reachable = set((node,))
344 reachable = set((node,))
345 visit = [node]
345 visit = [node]
346 if stop:
346 if stop:
347 stopn = self.rev(stop)
347 stopn = self.rev(stop)
348 else:
348 else:
349 stopn = 0
349 stopn = 0
350 while visit:
350 while visit:
351 n = visit.pop(0)
351 n = visit.pop(0)
352 if n == stop:
352 if n == stop:
353 continue
353 continue
354 if n == nullid:
354 if n == nullid:
355 continue
355 continue
356 for p in self.parents(n):
356 for p in self.parents(n):
357 if self.rev(p) < stopn:
357 if self.rev(p) < stopn:
358 continue
358 continue
359 if p not in reachable:
359 if p not in reachable:
360 reachable.add(p)
360 reachable.add(p)
361 visit.append(p)
361 visit.append(p)
362 return reachable
362 return reachable
363
363
364 def ancestors(self, *revs):
364 def ancestors(self, *revs):
365 """Generate the ancestors of 'revs' in reverse topological order.
365 """Generate the ancestors of 'revs' in reverse topological order.
366
366
367 Yield a sequence of revision numbers starting with the parents
367 Yield a sequence of revision numbers starting with the parents
368 of each revision in revs, i.e., each revision is *not* considered
368 of each revision in revs, i.e., each revision is *not* considered
369 an ancestor of itself. Results are in breadth-first order:
369 an ancestor of itself. Results are in breadth-first order:
370 parents of each rev in revs, then parents of those, etc. Result
370 parents of each rev in revs, then parents of those, etc. Result
371 does not include the null revision."""
371 does not include the null revision."""
372 visit = list(revs)
372 visit = list(revs)
373 seen = set([nullrev])
373 seen = set([nullrev])
374 while visit:
374 while visit:
375 for parent in self.parentrevs(visit.pop(0)):
375 for parent in self.parentrevs(visit.pop(0)):
376 if parent not in seen:
376 if parent not in seen:
377 visit.append(parent)
377 visit.append(parent)
378 seen.add(parent)
378 seen.add(parent)
379 yield parent
379 yield parent
380
380
381 def descendants(self, *revs):
381 def descendants(self, *revs):
382 """Generate the descendants of 'revs' in revision order.
382 """Generate the descendants of 'revs' in revision order.
383
383
384 Yield a sequence of revision numbers starting with a child of
384 Yield a sequence of revision numbers starting with a child of
385 some rev in revs, i.e., each revision is *not* considered a
385 some rev in revs, i.e., each revision is *not* considered a
386 descendant of itself. Results are ordered by revision number (a
386 descendant of itself. Results are ordered by revision number (a
387 topological sort)."""
387 topological sort)."""
388 first = min(revs)
388 first = min(revs)
389 if first == nullrev:
389 if first == nullrev:
390 for i in self:
390 for i in self:
391 yield i
391 yield i
392 return
392 return
393
393
394 seen = set(revs)
394 seen = set(revs)
395 for i in xrange(first + 1, len(self)):
395 for i in xrange(first + 1, len(self)):
396 for x in self.parentrevs(i):
396 for x in self.parentrevs(i):
397 if x != nullrev and x in seen:
397 if x != nullrev and x in seen:
398 seen.add(i)
398 seen.add(i)
399 yield i
399 yield i
400 break
400 break
401
401
402 def findmissing(self, common=None, heads=None):
402 def findcommonmissing(self, common=None, heads=None):
403 """Return the ancestors of heads that are not ancestors of common.
403 """Return a tuple of the ancestors of common and the ancestors of heads
404 that are not ancestors of common.
404
405
405 More specifically, return a list of nodes N such that every N
406 More specifically, the second element is a list of nodes N such that
406 satisfies the following constraints:
407 every N satisfies the following constraints:
407
408
408 1. N is an ancestor of some node in 'heads'
409 1. N is an ancestor of some node in 'heads'
409 2. N is not an ancestor of any node in 'common'
410 2. N is not an ancestor of any node in 'common'
410
411
411 The list is sorted by revision number, meaning it is
412 The list is sorted by revision number, meaning it is
412 topologically sorted.
413 topologically sorted.
413
414
414 'heads' and 'common' are both lists of node IDs. If heads is
415 'heads' and 'common' are both lists of node IDs. If heads is
415 not supplied, uses all of the revlog's heads. If common is not
416 not supplied, uses all of the revlog's heads. If common is not
416 supplied, uses nullid."""
417 supplied, uses nullid."""
417 if common is None:
418 if common is None:
418 common = [nullid]
419 common = [nullid]
419 if heads is None:
420 if heads is None:
420 heads = self.heads()
421 heads = self.heads()
421
422
422 common = [self.rev(n) for n in common]
423 common = [self.rev(n) for n in common]
423 heads = [self.rev(n) for n in heads]
424 heads = [self.rev(n) for n in heads]
424
425
425 # we want the ancestors, but inclusive
426 # we want the ancestors, but inclusive
426 has = set(self.ancestors(*common))
427 has = set(self.ancestors(*common))
427 has.add(nullrev)
428 has.add(nullrev)
428 has.update(common)
429 has.update(common)
429
430
430 # take all ancestors from heads that aren't in has
431 # take all ancestors from heads that aren't in has
431 missing = set()
432 missing = set()
432 visit = [r for r in heads if r not in has]
433 visit = [r for r in heads if r not in has]
433 while visit:
434 while visit:
434 r = visit.pop(0)
435 r = visit.pop(0)
435 if r in missing:
436 if r in missing:
436 continue
437 continue
437 else:
438 else:
438 missing.add(r)
439 missing.add(r)
439 for p in self.parentrevs(r):
440 for p in self.parentrevs(r):
440 if p not in has:
441 if p not in has:
441 visit.append(p)
442 visit.append(p)
442 missing = list(missing)
443 missing = list(missing)
443 missing.sort()
444 missing.sort()
444 return [self.node(r) for r in missing]
445 return has, [self.node(r) for r in missing]
446
447 def findmissing(self, common=None, heads=None):
448 """Return the ancestors of heads that are not ancestors of common.
449
450 More specifically, return a list of nodes N such that every N
451 satisfies the following constraints:
452
453 1. N is an ancestor of some node in 'heads'
454 2. N is not an ancestor of any node in 'common'
455
456 The list is sorted by revision number, meaning it is
457 topologically sorted.
458
459 'heads' and 'common' are both lists of node IDs. If heads is
460 not supplied, uses all of the revlog's heads. If common is not
461 supplied, uses nullid."""
462 _common, missing = self.findcommonmissing(common, heads)
463 return missing
445
464
446 def nodesbetween(self, roots=None, heads=None):
465 def nodesbetween(self, roots=None, heads=None):
447 """Return a topological path from 'roots' to 'heads'.
466 """Return a topological path from 'roots' to 'heads'.
448
467
449 Return a tuple (nodes, outroots, outheads) where 'nodes' is a
468 Return a tuple (nodes, outroots, outheads) where 'nodes' is a
450 topologically sorted list of all nodes N that satisfy both of
469 topologically sorted list of all nodes N that satisfy both of
451 these constraints:
470 these constraints:
452
471
453 1. N is a descendant of some node in 'roots'
472 1. N is a descendant of some node in 'roots'
454 2. N is an ancestor of some node in 'heads'
473 2. N is an ancestor of some node in 'heads'
455
474
456 Every node is considered to be both a descendant and an ancestor
475 Every node is considered to be both a descendant and an ancestor
457 of itself, so every reachable node in 'roots' and 'heads' will be
476 of itself, so every reachable node in 'roots' and 'heads' will be
458 included in 'nodes'.
477 included in 'nodes'.
459
478
460 'outroots' is the list of reachable nodes in 'roots', i.e., the
479 'outroots' is the list of reachable nodes in 'roots', i.e., the
461 subset of 'roots' that is returned in 'nodes'. Likewise,
480 subset of 'roots' that is returned in 'nodes'. Likewise,
462 'outheads' is the subset of 'heads' that is also in 'nodes'.
481 'outheads' is the subset of 'heads' that is also in 'nodes'.
463
482
464 'roots' and 'heads' are both lists of node IDs. If 'roots' is
483 'roots' and 'heads' are both lists of node IDs. If 'roots' is
465 unspecified, uses nullid as the only root. If 'heads' is
484 unspecified, uses nullid as the only root. If 'heads' is
466 unspecified, uses list of all of the revlog's heads."""
485 unspecified, uses list of all of the revlog's heads."""
467 nonodes = ([], [], [])
486 nonodes = ([], [], [])
468 if roots is not None:
487 if roots is not None:
469 roots = list(roots)
488 roots = list(roots)
470 if not roots:
489 if not roots:
471 return nonodes
490 return nonodes
472 lowestrev = min([self.rev(n) for n in roots])
491 lowestrev = min([self.rev(n) for n in roots])
473 else:
492 else:
474 roots = [nullid] # Everybody's a descendent of nullid
493 roots = [nullid] # Everybody's a descendent of nullid
475 lowestrev = nullrev
494 lowestrev = nullrev
476 if (lowestrev == nullrev) and (heads is None):
495 if (lowestrev == nullrev) and (heads is None):
477 # We want _all_ the nodes!
496 # We want _all_ the nodes!
478 return ([self.node(r) for r in self], [nullid], list(self.heads()))
497 return ([self.node(r) for r in self], [nullid], list(self.heads()))
479 if heads is None:
498 if heads is None:
480 # All nodes are ancestors, so the latest ancestor is the last
499 # All nodes are ancestors, so the latest ancestor is the last
481 # node.
500 # node.
482 highestrev = len(self) - 1
501 highestrev = len(self) - 1
483 # Set ancestors to None to signal that every node is an ancestor.
502 # Set ancestors to None to signal that every node is an ancestor.
484 ancestors = None
503 ancestors = None
485 # Set heads to an empty dictionary for later discovery of heads
504 # Set heads to an empty dictionary for later discovery of heads
486 heads = {}
505 heads = {}
487 else:
506 else:
488 heads = list(heads)
507 heads = list(heads)
489 if not heads:
508 if not heads:
490 return nonodes
509 return nonodes
491 ancestors = set()
510 ancestors = set()
492 # Turn heads into a dictionary so we can remove 'fake' heads.
511 # Turn heads into a dictionary so we can remove 'fake' heads.
493 # Also, later we will be using it to filter out the heads we can't
512 # Also, later we will be using it to filter out the heads we can't
494 # find from roots.
513 # find from roots.
495 heads = dict.fromkeys(heads, 0)
514 heads = dict.fromkeys(heads, 0)
496 # Start at the top and keep marking parents until we're done.
515 # Start at the top and keep marking parents until we're done.
497 nodestotag = set(heads)
516 nodestotag = set(heads)
498 # Remember where the top was so we can use it as a limit later.
517 # Remember where the top was so we can use it as a limit later.
499 highestrev = max([self.rev(n) for n in nodestotag])
518 highestrev = max([self.rev(n) for n in nodestotag])
500 while nodestotag:
519 while nodestotag:
501 # grab a node to tag
520 # grab a node to tag
502 n = nodestotag.pop()
521 n = nodestotag.pop()
503 # Never tag nullid
522 # Never tag nullid
504 if n == nullid:
523 if n == nullid:
505 continue
524 continue
506 # A node's revision number represents its place in a
525 # A node's revision number represents its place in a
507 # topologically sorted list of nodes.
526 # topologically sorted list of nodes.
508 r = self.rev(n)
527 r = self.rev(n)
509 if r >= lowestrev:
528 if r >= lowestrev:
510 if n not in ancestors:
529 if n not in ancestors:
511 # If we are possibly a descendent of one of the roots
530 # If we are possibly a descendent of one of the roots
512 # and we haven't already been marked as an ancestor
531 # and we haven't already been marked as an ancestor
513 ancestors.add(n) # Mark as ancestor
532 ancestors.add(n) # Mark as ancestor
514 # Add non-nullid parents to list of nodes to tag.
533 # Add non-nullid parents to list of nodes to tag.
515 nodestotag.update([p for p in self.parents(n) if
534 nodestotag.update([p for p in self.parents(n) if
516 p != nullid])
535 p != nullid])
517 elif n in heads: # We've seen it before, is it a fake head?
536 elif n in heads: # We've seen it before, is it a fake head?
518 # So it is, real heads should not be the ancestors of
537 # So it is, real heads should not be the ancestors of
519 # any other heads.
538 # any other heads.
520 heads.pop(n)
539 heads.pop(n)
521 if not ancestors:
540 if not ancestors:
522 return nonodes
541 return nonodes
523 # Now that we have our set of ancestors, we want to remove any
542 # Now that we have our set of ancestors, we want to remove any
524 # roots that are not ancestors.
543 # roots that are not ancestors.
525
544
526 # If one of the roots was nullid, everything is included anyway.
545 # If one of the roots was nullid, everything is included anyway.
527 if lowestrev > nullrev:
546 if lowestrev > nullrev:
528 # But, since we weren't, let's recompute the lowest rev to not
547 # But, since we weren't, let's recompute the lowest rev to not
529 # include roots that aren't ancestors.
548 # include roots that aren't ancestors.
530
549
531 # Filter out roots that aren't ancestors of heads
550 # Filter out roots that aren't ancestors of heads
532 roots = [n for n in roots if n in ancestors]
551 roots = [n for n in roots if n in ancestors]
533 # Recompute the lowest revision
552 # Recompute the lowest revision
534 if roots:
553 if roots:
535 lowestrev = min([self.rev(n) for n in roots])
554 lowestrev = min([self.rev(n) for n in roots])
536 else:
555 else:
537 # No more roots? Return empty list
556 # No more roots? Return empty list
538 return nonodes
557 return nonodes
539 else:
558 else:
540 # We are descending from nullid, and don't need to care about
559 # We are descending from nullid, and don't need to care about
541 # any other roots.
560 # any other roots.
542 lowestrev = nullrev
561 lowestrev = nullrev
543 roots = [nullid]
562 roots = [nullid]
544 # Transform our roots list into a set.
563 # Transform our roots list into a set.
545 descendents = set(roots)
564 descendents = set(roots)
546 # Also, keep the original roots so we can filter out roots that aren't
565 # Also, keep the original roots so we can filter out roots that aren't
547 # 'real' roots (i.e. are descended from other roots).
566 # 'real' roots (i.e. are descended from other roots).
548 roots = descendents.copy()
567 roots = descendents.copy()
549 # Our topologically sorted list of output nodes.
568 # Our topologically sorted list of output nodes.
550 orderedout = []
569 orderedout = []
551 # Don't start at nullid since we don't want nullid in our output list,
570 # Don't start at nullid since we don't want nullid in our output list,
552 # and if nullid shows up in descedents, empty parents will look like
571 # and if nullid shows up in descedents, empty parents will look like
553 # they're descendents.
572 # they're descendents.
554 for r in xrange(max(lowestrev, 0), highestrev + 1):
573 for r in xrange(max(lowestrev, 0), highestrev + 1):
555 n = self.node(r)
574 n = self.node(r)
556 isdescendent = False
575 isdescendent = False
557 if lowestrev == nullrev: # Everybody is a descendent of nullid
576 if lowestrev == nullrev: # Everybody is a descendent of nullid
558 isdescendent = True
577 isdescendent = True
559 elif n in descendents:
578 elif n in descendents:
560 # n is already a descendent
579 # n is already a descendent
561 isdescendent = True
580 isdescendent = True
562 # This check only needs to be done here because all the roots
581 # This check only needs to be done here because all the roots
563 # will start being marked is descendents before the loop.
582 # will start being marked is descendents before the loop.
564 if n in roots:
583 if n in roots:
565 # If n was a root, check if it's a 'real' root.
584 # If n was a root, check if it's a 'real' root.
566 p = tuple(self.parents(n))
585 p = tuple(self.parents(n))
567 # If any of its parents are descendents, it's not a root.
586 # If any of its parents are descendents, it's not a root.
568 if (p[0] in descendents) or (p[1] in descendents):
587 if (p[0] in descendents) or (p[1] in descendents):
569 roots.remove(n)
588 roots.remove(n)
570 else:
589 else:
571 p = tuple(self.parents(n))
590 p = tuple(self.parents(n))
572 # A node is a descendent if either of its parents are
591 # A node is a descendent if either of its parents are
573 # descendents. (We seeded the dependents list with the roots
592 # descendents. (We seeded the dependents list with the roots
574 # up there, remember?)
593 # up there, remember?)
575 if (p[0] in descendents) or (p[1] in descendents):
594 if (p[0] in descendents) or (p[1] in descendents):
576 descendents.add(n)
595 descendents.add(n)
577 isdescendent = True
596 isdescendent = True
578 if isdescendent and ((ancestors is None) or (n in ancestors)):
597 if isdescendent and ((ancestors is None) or (n in ancestors)):
579 # Only include nodes that are both descendents and ancestors.
598 # Only include nodes that are both descendents and ancestors.
580 orderedout.append(n)
599 orderedout.append(n)
581 if (ancestors is not None) and (n in heads):
600 if (ancestors is not None) and (n in heads):
582 # We're trying to figure out which heads are reachable
601 # We're trying to figure out which heads are reachable
583 # from roots.
602 # from roots.
584 # Mark this head as having been reached
603 # Mark this head as having been reached
585 heads[n] = 1
604 heads[n] = 1
586 elif ancestors is None:
605 elif ancestors is None:
587 # Otherwise, we're trying to discover the heads.
606 # Otherwise, we're trying to discover the heads.
588 # Assume this is a head because if it isn't, the next step
607 # Assume this is a head because if it isn't, the next step
589 # will eventually remove it.
608 # will eventually remove it.
590 heads[n] = 1
609 heads[n] = 1
591 # But, obviously its parents aren't.
610 # But, obviously its parents aren't.
592 for p in self.parents(n):
611 for p in self.parents(n):
593 heads.pop(p, None)
612 heads.pop(p, None)
594 heads = [n for n in heads.iterkeys() if heads[n] != 0]
613 heads = [n for n in heads.iterkeys() if heads[n] != 0]
595 roots = list(roots)
614 roots = list(roots)
596 assert orderedout
615 assert orderedout
597 assert roots
616 assert roots
598 assert heads
617 assert heads
599 return (orderedout, roots, heads)
618 return (orderedout, roots, heads)
600
619
601 def heads(self, start=None, stop=None):
620 def heads(self, start=None, stop=None):
602 """return the list of all nodes that have no children
621 """return the list of all nodes that have no children
603
622
604 if start is specified, only heads that are descendants of
623 if start is specified, only heads that are descendants of
605 start will be returned
624 start will be returned
606 if stop is specified, it will consider all the revs from stop
625 if stop is specified, it will consider all the revs from stop
607 as if they had no children
626 as if they had no children
608 """
627 """
609 if start is None and stop is None:
628 if start is None and stop is None:
610 count = len(self)
629 count = len(self)
611 if not count:
630 if not count:
612 return [nullid]
631 return [nullid]
613 ishead = [1] * (count + 1)
632 ishead = [1] * (count + 1)
614 index = self.index
633 index = self.index
615 for r in xrange(count):
634 for r in xrange(count):
616 e = index[r]
635 e = index[r]
617 ishead[e[5]] = ishead[e[6]] = 0
636 ishead[e[5]] = ishead[e[6]] = 0
618 return [self.node(r) for r in xrange(count) if ishead[r]]
637 return [self.node(r) for r in xrange(count) if ishead[r]]
619
638
620 if start is None:
639 if start is None:
621 start = nullid
640 start = nullid
622 if stop is None:
641 if stop is None:
623 stop = []
642 stop = []
624 stoprevs = set([self.rev(n) for n in stop])
643 stoprevs = set([self.rev(n) for n in stop])
625 startrev = self.rev(start)
644 startrev = self.rev(start)
626 reachable = set((startrev,))
645 reachable = set((startrev,))
627 heads = set((startrev,))
646 heads = set((startrev,))
628
647
629 parentrevs = self.parentrevs
648 parentrevs = self.parentrevs
630 for r in xrange(startrev + 1, len(self)):
649 for r in xrange(startrev + 1, len(self)):
631 for p in parentrevs(r):
650 for p in parentrevs(r):
632 if p in reachable:
651 if p in reachable:
633 if r not in stoprevs:
652 if r not in stoprevs:
634 reachable.add(r)
653 reachable.add(r)
635 heads.add(r)
654 heads.add(r)
636 if p in heads and p not in stoprevs:
655 if p in heads and p not in stoprevs:
637 heads.remove(p)
656 heads.remove(p)
638
657
639 return [self.node(r) for r in heads]
658 return [self.node(r) for r in heads]
640
659
641 def children(self, node):
660 def children(self, node):
642 """find the children of a given node"""
661 """find the children of a given node"""
643 c = []
662 c = []
644 p = self.rev(node)
663 p = self.rev(node)
645 for r in range(p + 1, len(self)):
664 for r in range(p + 1, len(self)):
646 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
665 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
647 if prevs:
666 if prevs:
648 for pr in prevs:
667 for pr in prevs:
649 if pr == p:
668 if pr == p:
650 c.append(self.node(r))
669 c.append(self.node(r))
651 elif p == nullrev:
670 elif p == nullrev:
652 c.append(self.node(r))
671 c.append(self.node(r))
653 return c
672 return c
654
673
655 def descendant(self, start, end):
674 def descendant(self, start, end):
656 if start == nullrev:
675 if start == nullrev:
657 return True
676 return True
658 for i in self.descendants(start):
677 for i in self.descendants(start):
659 if i == end:
678 if i == end:
660 return True
679 return True
661 elif i > end:
680 elif i > end:
662 break
681 break
663 return False
682 return False
664
683
665 def ancestor(self, a, b):
684 def ancestor(self, a, b):
666 """calculate the least common ancestor of nodes a and b"""
685 """calculate the least common ancestor of nodes a and b"""
667
686
668 # fast path, check if it is a descendant
687 # fast path, check if it is a descendant
669 a, b = self.rev(a), self.rev(b)
688 a, b = self.rev(a), self.rev(b)
670 start, end = sorted((a, b))
689 start, end = sorted((a, b))
671 if self.descendant(start, end):
690 if self.descendant(start, end):
672 return self.node(start)
691 return self.node(start)
673
692
674 def parents(rev):
693 def parents(rev):
675 return [p for p in self.parentrevs(rev) if p != nullrev]
694 return [p for p in self.parentrevs(rev) if p != nullrev]
676
695
677 c = ancestor.ancestor(a, b, parents)
696 c = ancestor.ancestor(a, b, parents)
678 if c is None:
697 if c is None:
679 return nullid
698 return nullid
680
699
681 return self.node(c)
700 return self.node(c)
682
701
683 def _match(self, id):
702 def _match(self, id):
684 if isinstance(id, (long, int)):
703 if isinstance(id, (long, int)):
685 # rev
704 # rev
686 return self.node(id)
705 return self.node(id)
687 if len(id) == 20:
706 if len(id) == 20:
688 # possibly a binary node
707 # possibly a binary node
689 # odds of a binary node being all hex in ASCII are 1 in 10**25
708 # odds of a binary node being all hex in ASCII are 1 in 10**25
690 try:
709 try:
691 node = id
710 node = id
692 self.rev(node) # quick search the index
711 self.rev(node) # quick search the index
693 return node
712 return node
694 except LookupError:
713 except LookupError:
695 pass # may be partial hex id
714 pass # may be partial hex id
696 try:
715 try:
697 # str(rev)
716 # str(rev)
698 rev = int(id)
717 rev = int(id)
699 if str(rev) != id:
718 if str(rev) != id:
700 raise ValueError
719 raise ValueError
701 if rev < 0:
720 if rev < 0:
702 rev = len(self) + rev
721 rev = len(self) + rev
703 if rev < 0 or rev >= len(self):
722 if rev < 0 or rev >= len(self):
704 raise ValueError
723 raise ValueError
705 return self.node(rev)
724 return self.node(rev)
706 except (ValueError, OverflowError):
725 except (ValueError, OverflowError):
707 pass
726 pass
708 if len(id) == 40:
727 if len(id) == 40:
709 try:
728 try:
710 # a full hex nodeid?
729 # a full hex nodeid?
711 node = bin(id)
730 node = bin(id)
712 self.rev(node)
731 self.rev(node)
713 return node
732 return node
714 except (TypeError, LookupError):
733 except (TypeError, LookupError):
715 pass
734 pass
716
735
717 def _partialmatch(self, id):
736 def _partialmatch(self, id):
718 if id in self._pcache:
737 if id in self._pcache:
719 return self._pcache[id]
738 return self._pcache[id]
720
739
721 if len(id) < 40:
740 if len(id) < 40:
722 try:
741 try:
723 # hex(node)[:...]
742 # hex(node)[:...]
724 l = len(id) // 2 # grab an even number of digits
743 l = len(id) // 2 # grab an even number of digits
725 prefix = bin(id[:l * 2])
744 prefix = bin(id[:l * 2])
726 nl = [e[7] for e in self.index if e[7].startswith(prefix)]
745 nl = [e[7] for e in self.index if e[7].startswith(prefix)]
727 nl = [n for n in nl if hex(n).startswith(id)]
746 nl = [n for n in nl if hex(n).startswith(id)]
728 if len(nl) > 0:
747 if len(nl) > 0:
729 if len(nl) == 1:
748 if len(nl) == 1:
730 self._pcache[id] = nl[0]
749 self._pcache[id] = nl[0]
731 return nl[0]
750 return nl[0]
732 raise LookupError(id, self.indexfile,
751 raise LookupError(id, self.indexfile,
733 _('ambiguous identifier'))
752 _('ambiguous identifier'))
734 return None
753 return None
735 except TypeError:
754 except TypeError:
736 pass
755 pass
737
756
738 def lookup(self, id):
757 def lookup(self, id):
739 """locate a node based on:
758 """locate a node based on:
740 - revision number or str(revision number)
759 - revision number or str(revision number)
741 - nodeid or subset of hex nodeid
760 - nodeid or subset of hex nodeid
742 """
761 """
743 n = self._match(id)
762 n = self._match(id)
744 if n is not None:
763 if n is not None:
745 return n
764 return n
746 n = self._partialmatch(id)
765 n = self._partialmatch(id)
747 if n:
766 if n:
748 return n
767 return n
749
768
750 raise LookupError(id, self.indexfile, _('no match found'))
769 raise LookupError(id, self.indexfile, _('no match found'))
751
770
752 def cmp(self, node, text):
771 def cmp(self, node, text):
753 """compare text with a given file revision
772 """compare text with a given file revision
754
773
755 returns True if text is different than what is stored.
774 returns True if text is different than what is stored.
756 """
775 """
757 p1, p2 = self.parents(node)
776 p1, p2 = self.parents(node)
758 return hash(text, p1, p2) != node
777 return hash(text, p1, p2) != node
759
778
760 def _addchunk(self, offset, data):
779 def _addchunk(self, offset, data):
761 o, d = self._chunkcache
780 o, d = self._chunkcache
762 # try to add to existing cache
781 # try to add to existing cache
763 if o + len(d) == offset and len(d) + len(data) < _chunksize:
782 if o + len(d) == offset and len(d) + len(data) < _chunksize:
764 self._chunkcache = o, d + data
783 self._chunkcache = o, d + data
765 else:
784 else:
766 self._chunkcache = offset, data
785 self._chunkcache = offset, data
767
786
768 def _loadchunk(self, offset, length):
787 def _loadchunk(self, offset, length):
769 if self._inline:
788 if self._inline:
770 df = self.opener(self.indexfile)
789 df = self.opener(self.indexfile)
771 else:
790 else:
772 df = self.opener(self.datafile)
791 df = self.opener(self.datafile)
773
792
774 readahead = max(65536, length)
793 readahead = max(65536, length)
775 df.seek(offset)
794 df.seek(offset)
776 d = df.read(readahead)
795 d = df.read(readahead)
777 self._addchunk(offset, d)
796 self._addchunk(offset, d)
778 if readahead > length:
797 if readahead > length:
779 return d[:length]
798 return d[:length]
780 return d
799 return d
781
800
782 def _getchunk(self, offset, length):
801 def _getchunk(self, offset, length):
783 o, d = self._chunkcache
802 o, d = self._chunkcache
784 l = len(d)
803 l = len(d)
785
804
786 # is it in the cache?
805 # is it in the cache?
787 cachestart = offset - o
806 cachestart = offset - o
788 cacheend = cachestart + length
807 cacheend = cachestart + length
789 if cachestart >= 0 and cacheend <= l:
808 if cachestart >= 0 and cacheend <= l:
790 if cachestart == 0 and cacheend == l:
809 if cachestart == 0 and cacheend == l:
791 return d # avoid a copy
810 return d # avoid a copy
792 return d[cachestart:cacheend]
811 return d[cachestart:cacheend]
793
812
794 return self._loadchunk(offset, length)
813 return self._loadchunk(offset, length)
795
814
796 def _chunkraw(self, startrev, endrev):
815 def _chunkraw(self, startrev, endrev):
797 start = self.start(startrev)
816 start = self.start(startrev)
798 length = self.end(endrev) - start
817 length = self.end(endrev) - start
799 if self._inline:
818 if self._inline:
800 start += (startrev + 1) * self._io.size
819 start += (startrev + 1) * self._io.size
801 return self._getchunk(start, length)
820 return self._getchunk(start, length)
802
821
803 def _chunk(self, rev):
822 def _chunk(self, rev):
804 return decompress(self._chunkraw(rev, rev))
823 return decompress(self._chunkraw(rev, rev))
805
824
806 def _chunkclear(self):
825 def _chunkclear(self):
807 self._chunkcache = (0, '')
826 self._chunkcache = (0, '')
808
827
809 def deltaparent(self, rev):
828 def deltaparent(self, rev):
810 """return previous revision or parentrev according to flags"""
829 """return previous revision or parentrev according to flags"""
811 if self.flags(rev) & REVIDX_PARENTDELTA:
830 if self.flags(rev) & REVIDX_PARENTDELTA:
812 return self.parentrevs(rev)[0]
831 return self.parentrevs(rev)[0]
813 else:
832 else:
814 return rev - 1
833 return rev - 1
815
834
816 def revdiff(self, rev1, rev2):
835 def revdiff(self, rev1, rev2):
817 """return or calculate a delta between two revisions"""
836 """return or calculate a delta between two revisions"""
818 if self.base(rev2) != rev2 and self.deltaparent(rev2) == rev1:
837 if self.base(rev2) != rev2 and self.deltaparent(rev2) == rev1:
819 return self._chunk(rev2)
838 return self._chunk(rev2)
820
839
821 return mdiff.textdiff(self.revision(self.node(rev1)),
840 return mdiff.textdiff(self.revision(self.node(rev1)),
822 self.revision(self.node(rev2)))
841 self.revision(self.node(rev2)))
823
842
824 def revision(self, node):
843 def revision(self, node):
825 """return an uncompressed revision of a given node"""
844 """return an uncompressed revision of a given node"""
826 cachedrev = None
845 cachedrev = None
827 if node == nullid:
846 if node == nullid:
828 return ""
847 return ""
829 if self._cache:
848 if self._cache:
830 if self._cache[0] == node:
849 if self._cache[0] == node:
831 return self._cache[2]
850 return self._cache[2]
832 cachedrev = self._cache[1]
851 cachedrev = self._cache[1]
833
852
834 # look up what we need to read
853 # look up what we need to read
835 text = None
854 text = None
836 rev = self.rev(node)
855 rev = self.rev(node)
837 base = self.base(rev)
856 base = self.base(rev)
838
857
839 # check rev flags
858 # check rev flags
840 if self.flags(rev) & ~REVIDX_KNOWN_FLAGS:
859 if self.flags(rev) & ~REVIDX_KNOWN_FLAGS:
841 raise RevlogError(_('incompatible revision flag %x') %
860 raise RevlogError(_('incompatible revision flag %x') %
842 (self.flags(rev) & ~REVIDX_KNOWN_FLAGS))
861 (self.flags(rev) & ~REVIDX_KNOWN_FLAGS))
843
862
844 # build delta chain
863 # build delta chain
845 chain = []
864 chain = []
846 index = self.index # for performance
865 index = self.index # for performance
847 iterrev = rev
866 iterrev = rev
848 e = index[iterrev]
867 e = index[iterrev]
849 while iterrev != base and iterrev != cachedrev:
868 while iterrev != base and iterrev != cachedrev:
850 chain.append(iterrev)
869 chain.append(iterrev)
851 if e[0] & REVIDX_PARENTDELTA:
870 if e[0] & REVIDX_PARENTDELTA:
852 iterrev = e[5]
871 iterrev = e[5]
853 else:
872 else:
854 iterrev -= 1
873 iterrev -= 1
855 e = index[iterrev]
874 e = index[iterrev]
856 chain.reverse()
875 chain.reverse()
857 base = iterrev
876 base = iterrev
858
877
859 if iterrev == cachedrev:
878 if iterrev == cachedrev:
860 # cache hit
879 # cache hit
861 text = self._cache[2]
880 text = self._cache[2]
862
881
863 # drop cache to save memory
882 # drop cache to save memory
864 self._cache = None
883 self._cache = None
865
884
866 self._chunkraw(base, rev)
885 self._chunkraw(base, rev)
867 if text is None:
886 if text is None:
868 text = self._chunk(base)
887 text = self._chunk(base)
869
888
870 bins = [self._chunk(r) for r in chain]
889 bins = [self._chunk(r) for r in chain]
871 text = mdiff.patches(text, bins)
890 text = mdiff.patches(text, bins)
872
891
873 text = self._checkhash(text, node, rev)
892 text = self._checkhash(text, node, rev)
874
893
875 self._cache = (node, rev, text)
894 self._cache = (node, rev, text)
876 return text
895 return text
877
896
878 def _checkhash(self, text, node, rev):
897 def _checkhash(self, text, node, rev):
879 p1, p2 = self.parents(node)
898 p1, p2 = self.parents(node)
880 if (node != hash(text, p1, p2) and
899 if (node != hash(text, p1, p2) and
881 not (self.flags(rev) & REVIDX_PUNCHED_FLAG)):
900 not (self.flags(rev) & REVIDX_PUNCHED_FLAG)):
882 raise RevlogError(_("integrity check failed on %s:%d")
901 raise RevlogError(_("integrity check failed on %s:%d")
883 % (self.indexfile, rev))
902 % (self.indexfile, rev))
884 return text
903 return text
885
904
886 def checkinlinesize(self, tr, fp=None):
905 def checkinlinesize(self, tr, fp=None):
887 if not self._inline or (self.start(-2) + self.length(-2)) < _maxinline:
906 if not self._inline or (self.start(-2) + self.length(-2)) < _maxinline:
888 return
907 return
889
908
890 trinfo = tr.find(self.indexfile)
909 trinfo = tr.find(self.indexfile)
891 if trinfo is None:
910 if trinfo is None:
892 raise RevlogError(_("%s not found in the transaction")
911 raise RevlogError(_("%s not found in the transaction")
893 % self.indexfile)
912 % self.indexfile)
894
913
895 trindex = trinfo[2]
914 trindex = trinfo[2]
896 dataoff = self.start(trindex)
915 dataoff = self.start(trindex)
897
916
898 tr.add(self.datafile, dataoff)
917 tr.add(self.datafile, dataoff)
899
918
900 if fp:
919 if fp:
901 fp.flush()
920 fp.flush()
902 fp.close()
921 fp.close()
903
922
904 df = self.opener(self.datafile, 'w')
923 df = self.opener(self.datafile, 'w')
905 try:
924 try:
906 for r in self:
925 for r in self:
907 df.write(self._chunkraw(r, r))
926 df.write(self._chunkraw(r, r))
908 finally:
927 finally:
909 df.close()
928 df.close()
910
929
911 fp = self.opener(self.indexfile, 'w', atomictemp=True)
930 fp = self.opener(self.indexfile, 'w', atomictemp=True)
912 self.version &= ~(REVLOGNGINLINEDATA)
931 self.version &= ~(REVLOGNGINLINEDATA)
913 self._inline = False
932 self._inline = False
914 for i in self:
933 for i in self:
915 e = self._io.packentry(self.index[i], self.node, self.version, i)
934 e = self._io.packentry(self.index[i], self.node, self.version, i)
916 fp.write(e)
935 fp.write(e)
917
936
918 # if we don't call rename, the temp file will never replace the
937 # if we don't call rename, the temp file will never replace the
919 # real index
938 # real index
920 fp.rename()
939 fp.rename()
921
940
922 tr.replace(self.indexfile, trindex * self._io.size)
941 tr.replace(self.indexfile, trindex * self._io.size)
923 self._chunkclear()
942 self._chunkclear()
924
943
925 def addrevision(self, text, transaction, link, p1, p2, cachedelta=None):
944 def addrevision(self, text, transaction, link, p1, p2, cachedelta=None):
926 """add a revision to the log
945 """add a revision to the log
927
946
928 text - the revision data to add
947 text - the revision data to add
929 transaction - the transaction object used for rollback
948 transaction - the transaction object used for rollback
930 link - the linkrev data to add
949 link - the linkrev data to add
931 p1, p2 - the parent nodeids of the revision
950 p1, p2 - the parent nodeids of the revision
932 cachedelta - an optional precomputed delta
951 cachedelta - an optional precomputed delta
933 """
952 """
934 node = hash(text, p1, p2)
953 node = hash(text, p1, p2)
935 if (node in self.nodemap and
954 if (node in self.nodemap and
936 (not self.flags(self.rev(node)) & REVIDX_PUNCHED_FLAG)):
955 (not self.flags(self.rev(node)) & REVIDX_PUNCHED_FLAG)):
937 return node
956 return node
938
957
939 dfh = None
958 dfh = None
940 if not self._inline:
959 if not self._inline:
941 dfh = self.opener(self.datafile, "a")
960 dfh = self.opener(self.datafile, "a")
942 ifh = self.opener(self.indexfile, "a+")
961 ifh = self.opener(self.indexfile, "a+")
943 try:
962 try:
944 return self._addrevision(node, text, transaction, link, p1, p2,
963 return self._addrevision(node, text, transaction, link, p1, p2,
945 cachedelta, ifh, dfh)
964 cachedelta, ifh, dfh)
946 finally:
965 finally:
947 if dfh:
966 if dfh:
948 dfh.close()
967 dfh.close()
949 ifh.close()
968 ifh.close()
950
969
951 def _addrevision(self, node, text, transaction, link, p1, p2,
970 def _addrevision(self, node, text, transaction, link, p1, p2,
952 cachedelta, ifh, dfh):
971 cachedelta, ifh, dfh):
953
972
954 btext = [text]
973 btext = [text]
955 def buildtext():
974 def buildtext():
956 if btext[0] is not None:
975 if btext[0] is not None:
957 return btext[0]
976 return btext[0]
958 # flush any pending writes here so we can read it in revision
977 # flush any pending writes here so we can read it in revision
959 if dfh:
978 if dfh:
960 dfh.flush()
979 dfh.flush()
961 ifh.flush()
980 ifh.flush()
962 basetext = self.revision(self.node(cachedelta[0]))
981 basetext = self.revision(self.node(cachedelta[0]))
963 btext[0] = mdiff.patch(basetext, cachedelta[1])
982 btext[0] = mdiff.patch(basetext, cachedelta[1])
964 chk = hash(btext[0], p1, p2)
983 chk = hash(btext[0], p1, p2)
965 if chk != node:
984 if chk != node:
966 raise RevlogError(_("consistency error in delta"))
985 raise RevlogError(_("consistency error in delta"))
967 return btext[0]
986 return btext[0]
968
987
969 def builddelta(rev):
988 def builddelta(rev):
970 # can we use the cached delta?
989 # can we use the cached delta?
971 if cachedelta and cachedelta[0] == rev:
990 if cachedelta and cachedelta[0] == rev:
972 delta = cachedelta[1]
991 delta = cachedelta[1]
973 else:
992 else:
974 t = buildtext()
993 t = buildtext()
975 ptext = self.revision(self.node(rev))
994 ptext = self.revision(self.node(rev))
976 delta = mdiff.textdiff(ptext, t)
995 delta = mdiff.textdiff(ptext, t)
977 data = compress(delta)
996 data = compress(delta)
978 l = len(data[1]) + len(data[0])
997 l = len(data[1]) + len(data[0])
979 base = self.base(rev)
998 base = self.base(rev)
980 dist = l + offset - self.start(base)
999 dist = l + offset - self.start(base)
981 return dist, l, data, base
1000 return dist, l, data, base
982
1001
983 curr = len(self)
1002 curr = len(self)
984 prev = curr - 1
1003 prev = curr - 1
985 base = curr
1004 base = curr
986 offset = self.end(prev)
1005 offset = self.end(prev)
987 flags = 0
1006 flags = 0
988 d = None
1007 d = None
989 p1r, p2r = self.rev(p1), self.rev(p2)
1008 p1r, p2r = self.rev(p1), self.rev(p2)
990
1009
991 # should we try to build a delta?
1010 # should we try to build a delta?
992 if prev != nullrev:
1011 if prev != nullrev:
993 d = builddelta(prev)
1012 d = builddelta(prev)
994 if self._parentdelta and prev != p1r:
1013 if self._parentdelta and prev != p1r:
995 d2 = builddelta(p1r)
1014 d2 = builddelta(p1r)
996 if d2 < d:
1015 if d2 < d:
997 d = d2
1016 d = d2
998 flags = REVIDX_PARENTDELTA
1017 flags = REVIDX_PARENTDELTA
999 dist, l, data, base = d
1018 dist, l, data, base = d
1000
1019
1001 # full versions are inserted when the needed deltas
1020 # full versions are inserted when the needed deltas
1002 # become comparable to the uncompressed text
1021 # become comparable to the uncompressed text
1003 # or the base revision is punched
1022 # or the base revision is punched
1004 if text is None:
1023 if text is None:
1005 textlen = mdiff.patchedsize(self.rawsize(cachedelta[0]),
1024 textlen = mdiff.patchedsize(self.rawsize(cachedelta[0]),
1006 cachedelta[1])
1025 cachedelta[1])
1007 else:
1026 else:
1008 textlen = len(text)
1027 textlen = len(text)
1009 if (d is None or dist > textlen * 2 or
1028 if (d is None or dist > textlen * 2 or
1010 (self.flags(base) & REVIDX_PUNCHED_FLAG)):
1029 (self.flags(base) & REVIDX_PUNCHED_FLAG)):
1011 text = buildtext()
1030 text = buildtext()
1012 data = compress(text)
1031 data = compress(text)
1013 l = len(data[1]) + len(data[0])
1032 l = len(data[1]) + len(data[0])
1014 base = curr
1033 base = curr
1015
1034
1016 e = (offset_type(offset, flags), l, textlen,
1035 e = (offset_type(offset, flags), l, textlen,
1017 base, link, p1r, p2r, node)
1036 base, link, p1r, p2r, node)
1018 self.index.insert(-1, e)
1037 self.index.insert(-1, e)
1019 self.nodemap[node] = curr
1038 self.nodemap[node] = curr
1020
1039
1021 entry = self._io.packentry(e, self.node, self.version, curr)
1040 entry = self._io.packentry(e, self.node, self.version, curr)
1022 if not self._inline:
1041 if not self._inline:
1023 transaction.add(self.datafile, offset)
1042 transaction.add(self.datafile, offset)
1024 transaction.add(self.indexfile, curr * len(entry))
1043 transaction.add(self.indexfile, curr * len(entry))
1025 if data[0]:
1044 if data[0]:
1026 dfh.write(data[0])
1045 dfh.write(data[0])
1027 dfh.write(data[1])
1046 dfh.write(data[1])
1028 dfh.flush()
1047 dfh.flush()
1029 ifh.write(entry)
1048 ifh.write(entry)
1030 else:
1049 else:
1031 offset += curr * self._io.size
1050 offset += curr * self._io.size
1032 transaction.add(self.indexfile, offset, curr)
1051 transaction.add(self.indexfile, offset, curr)
1033 ifh.write(entry)
1052 ifh.write(entry)
1034 ifh.write(data[0])
1053 ifh.write(data[0])
1035 ifh.write(data[1])
1054 ifh.write(data[1])
1036 self.checkinlinesize(transaction, ifh)
1055 self.checkinlinesize(transaction, ifh)
1037
1056
1038 if type(text) == str: # only accept immutable objects
1057 if type(text) == str: # only accept immutable objects
1039 self._cache = (node, curr, text)
1058 self._cache = (node, curr, text)
1040 return node
1059 return node
1041
1060
1042 def group(self, nodelist, lookup, infocollect=None, fullrev=False):
1061 def group(self, nodelist, lookup, infocollect=None, fullrev=False):
1043 """Calculate a delta group, yielding a sequence of changegroup chunks
1062 """Calculate a delta group, yielding a sequence of changegroup chunks
1044 (strings).
1063 (strings).
1045
1064
1046 Given a list of changeset revs, return a set of deltas and
1065 Given a list of changeset revs, return a set of deltas and
1047 metadata corresponding to nodes. The first delta is
1066 metadata corresponding to nodes. The first delta is
1048 first parent(nodelist[0]) -> nodelist[0], the receiver is
1067 first parent(nodelist[0]) -> nodelist[0], the receiver is
1049 guaranteed to have this parent as it has all history before
1068 guaranteed to have this parent as it has all history before
1050 these changesets. In the case firstparent is nullrev the
1069 these changesets. In the case firstparent is nullrev the
1051 changegroup starts with a full revision.
1070 changegroup starts with a full revision.
1052 fullrev forces the insertion of the full revision, necessary
1071 fullrev forces the insertion of the full revision, necessary
1053 in the case of shallow clones where the first parent might
1072 in the case of shallow clones where the first parent might
1054 not exist at the reciever.
1073 not exist at the reciever.
1055 """
1074 """
1056
1075
1057 revs = [self.rev(n) for n in nodelist]
1076 revs = [self.rev(n) for n in nodelist]
1058
1077
1059 # if we don't have any revisions touched by these changesets, bail
1078 # if we don't have any revisions touched by these changesets, bail
1060 if not revs:
1079 if not revs:
1061 yield changegroup.closechunk()
1080 yield changegroup.closechunk()
1062 return
1081 return
1063
1082
1064 # add the parent of the first rev
1083 # add the parent of the first rev
1065 p = self.parentrevs(revs[0])[0]
1084 p = self.parentrevs(revs[0])[0]
1066 revs.insert(0, p)
1085 revs.insert(0, p)
1067 if p == nullrev:
1086 if p == nullrev:
1068 fullrev = True
1087 fullrev = True
1069
1088
1070 # build deltas
1089 # build deltas
1071 for d in xrange(len(revs) - 1):
1090 for d in xrange(len(revs) - 1):
1072 a, b = revs[d], revs[d + 1]
1091 a, b = revs[d], revs[d + 1]
1073 nb = self.node(b)
1092 nb = self.node(b)
1074
1093
1075 if infocollect is not None:
1094 if infocollect is not None:
1076 infocollect(nb)
1095 infocollect(nb)
1077
1096
1078 p = self.parents(nb)
1097 p = self.parents(nb)
1079 meta = nb + p[0] + p[1] + lookup(nb)
1098 meta = nb + p[0] + p[1] + lookup(nb)
1080 if fullrev:
1099 if fullrev:
1081 d = self.revision(nb)
1100 d = self.revision(nb)
1082 meta += mdiff.trivialdiffheader(len(d))
1101 meta += mdiff.trivialdiffheader(len(d))
1083 fullrev = False
1102 fullrev = False
1084 else:
1103 else:
1085 d = self.revdiff(a, b)
1104 d = self.revdiff(a, b)
1086 yield changegroup.chunkheader(len(meta) + len(d))
1105 yield changegroup.chunkheader(len(meta) + len(d))
1087 yield meta
1106 yield meta
1088 yield d
1107 yield d
1089
1108
1090 yield changegroup.closechunk()
1109 yield changegroup.closechunk()
1091
1110
1092 def addgroup(self, bundle, linkmapper, transaction):
1111 def addgroup(self, bundle, linkmapper, transaction):
1093 """
1112 """
1094 add a delta group
1113 add a delta group
1095
1114
1096 given a set of deltas, add them to the revision log. the
1115 given a set of deltas, add them to the revision log. the
1097 first delta is against its parent, which should be in our
1116 first delta is against its parent, which should be in our
1098 log, the rest are against the previous delta.
1117 log, the rest are against the previous delta.
1099 """
1118 """
1100
1119
1101 # track the base of the current delta log
1120 # track the base of the current delta log
1102 node = None
1121 node = None
1103
1122
1104 r = len(self)
1123 r = len(self)
1105 end = 0
1124 end = 0
1106 if r:
1125 if r:
1107 end = self.end(r - 1)
1126 end = self.end(r - 1)
1108 ifh = self.opener(self.indexfile, "a+")
1127 ifh = self.opener(self.indexfile, "a+")
1109 isize = r * self._io.size
1128 isize = r * self._io.size
1110 if self._inline:
1129 if self._inline:
1111 transaction.add(self.indexfile, end + isize, r)
1130 transaction.add(self.indexfile, end + isize, r)
1112 dfh = None
1131 dfh = None
1113 else:
1132 else:
1114 transaction.add(self.indexfile, isize, r)
1133 transaction.add(self.indexfile, isize, r)
1115 transaction.add(self.datafile, end)
1134 transaction.add(self.datafile, end)
1116 dfh = self.opener(self.datafile, "a")
1135 dfh = self.opener(self.datafile, "a")
1117
1136
1118 try:
1137 try:
1119 # loop through our set of deltas
1138 # loop through our set of deltas
1120 chain = None
1139 chain = None
1121 while 1:
1140 while 1:
1122 chunkdata = bundle.parsechunk()
1141 chunkdata = bundle.parsechunk()
1123 if not chunkdata:
1142 if not chunkdata:
1124 break
1143 break
1125 node = chunkdata['node']
1144 node = chunkdata['node']
1126 p1 = chunkdata['p1']
1145 p1 = chunkdata['p1']
1127 p2 = chunkdata['p2']
1146 p2 = chunkdata['p2']
1128 cs = chunkdata['cs']
1147 cs = chunkdata['cs']
1129 delta = chunkdata['data']
1148 delta = chunkdata['data']
1130
1149
1131 link = linkmapper(cs)
1150 link = linkmapper(cs)
1132 if (node in self.nodemap and
1151 if (node in self.nodemap and
1133 (not self.flags(self.rev(node)) & REVIDX_PUNCHED_FLAG)):
1152 (not self.flags(self.rev(node)) & REVIDX_PUNCHED_FLAG)):
1134 # this can happen if two branches make the same change
1153 # this can happen if two branches make the same change
1135 chain = node
1154 chain = node
1136 continue
1155 continue
1137
1156
1138 for p in (p1, p2):
1157 for p in (p1, p2):
1139 if not p in self.nodemap:
1158 if not p in self.nodemap:
1140 if self._shallow:
1159 if self._shallow:
1141 # add null entries for missing parents
1160 # add null entries for missing parents
1142 # XXX FIXME
1161 # XXX FIXME
1143 #if base == nullrev:
1162 #if base == nullrev:
1144 # base = len(self)
1163 # base = len(self)
1145 #e = (offset_type(end, REVIDX_PUNCHED_FLAG),
1164 #e = (offset_type(end, REVIDX_PUNCHED_FLAG),
1146 # 0, 0, base, nullrev, nullrev, nullrev, p)
1165 # 0, 0, base, nullrev, nullrev, nullrev, p)
1147 #self.index.insert(-1, e)
1166 #self.index.insert(-1, e)
1148 #self.nodemap[p] = r
1167 #self.nodemap[p] = r
1149 #entry = self._io.packentry(e, self.node,
1168 #entry = self._io.packentry(e, self.node,
1150 # self.version, r)
1169 # self.version, r)
1151 #ifh.write(entry)
1170 #ifh.write(entry)
1152 #t, r = r, r + 1
1171 #t, r = r, r + 1
1153 raise LookupError(p, self.indexfile,
1172 raise LookupError(p, self.indexfile,
1154 _('unknown parent'))
1173 _('unknown parent'))
1155 else:
1174 else:
1156 raise LookupError(p, self.indexfile,
1175 raise LookupError(p, self.indexfile,
1157 _('unknown parent'))
1176 _('unknown parent'))
1158
1177
1159 if not chain:
1178 if not chain:
1160 # retrieve the parent revision of the delta chain
1179 # retrieve the parent revision of the delta chain
1161 chain = p1
1180 chain = p1
1162 if not chain in self.nodemap:
1181 if not chain in self.nodemap:
1163 raise LookupError(chain, self.indexfile, _('unknown base'))
1182 raise LookupError(chain, self.indexfile, _('unknown base'))
1164
1183
1165 chainrev = self.rev(chain)
1184 chainrev = self.rev(chain)
1166 chain = self._addrevision(node, None, transaction, link,
1185 chain = self._addrevision(node, None, transaction, link,
1167 p1, p2, (chainrev, delta), ifh, dfh)
1186 p1, p2, (chainrev, delta), ifh, dfh)
1168 if not dfh and not self._inline:
1187 if not dfh and not self._inline:
1169 # addrevision switched from inline to conventional
1188 # addrevision switched from inline to conventional
1170 # reopen the index
1189 # reopen the index
1171 ifh.close()
1190 ifh.close()
1172 dfh = self.opener(self.datafile, "a")
1191 dfh = self.opener(self.datafile, "a")
1173 ifh = self.opener(self.indexfile, "a")
1192 ifh = self.opener(self.indexfile, "a")
1174 finally:
1193 finally:
1175 if dfh:
1194 if dfh:
1176 dfh.close()
1195 dfh.close()
1177 ifh.close()
1196 ifh.close()
1178
1197
1179 return node
1198 return node
1180
1199
1181 def strip(self, minlink, transaction):
1200 def strip(self, minlink, transaction):
1182 """truncate the revlog on the first revision with a linkrev >= minlink
1201 """truncate the revlog on the first revision with a linkrev >= minlink
1183
1202
1184 This function is called when we're stripping revision minlink and
1203 This function is called when we're stripping revision minlink and
1185 its descendants from the repository.
1204 its descendants from the repository.
1186
1205
1187 We have to remove all revisions with linkrev >= minlink, because
1206 We have to remove all revisions with linkrev >= minlink, because
1188 the equivalent changelog revisions will be renumbered after the
1207 the equivalent changelog revisions will be renumbered after the
1189 strip.
1208 strip.
1190
1209
1191 So we truncate the revlog on the first of these revisions, and
1210 So we truncate the revlog on the first of these revisions, and
1192 trust that the caller has saved the revisions that shouldn't be
1211 trust that the caller has saved the revisions that shouldn't be
1193 removed and that it'll readd them after this truncation.
1212 removed and that it'll readd them after this truncation.
1194 """
1213 """
1195 if len(self) == 0:
1214 if len(self) == 0:
1196 return
1215 return
1197
1216
1198 for rev in self:
1217 for rev in self:
1199 if self.index[rev][4] >= minlink:
1218 if self.index[rev][4] >= minlink:
1200 break
1219 break
1201 else:
1220 else:
1202 return
1221 return
1203
1222
1204 # first truncate the files on disk
1223 # first truncate the files on disk
1205 end = self.start(rev)
1224 end = self.start(rev)
1206 if not self._inline:
1225 if not self._inline:
1207 transaction.add(self.datafile, end)
1226 transaction.add(self.datafile, end)
1208 end = rev * self._io.size
1227 end = rev * self._io.size
1209 else:
1228 else:
1210 end += rev * self._io.size
1229 end += rev * self._io.size
1211
1230
1212 transaction.add(self.indexfile, end)
1231 transaction.add(self.indexfile, end)
1213
1232
1214 # then reset internal state in memory to forget those revisions
1233 # then reset internal state in memory to forget those revisions
1215 self._cache = None
1234 self._cache = None
1216 self._chunkclear()
1235 self._chunkclear()
1217 for x in xrange(rev, len(self)):
1236 for x in xrange(rev, len(self)):
1218 del self.nodemap[self.node(x)]
1237 del self.nodemap[self.node(x)]
1219
1238
1220 del self.index[rev:-1]
1239 del self.index[rev:-1]
1221
1240
1222 def checksize(self):
1241 def checksize(self):
1223 expected = 0
1242 expected = 0
1224 if len(self):
1243 if len(self):
1225 expected = max(0, self.end(len(self) - 1))
1244 expected = max(0, self.end(len(self) - 1))
1226
1245
1227 try:
1246 try:
1228 f = self.opener(self.datafile)
1247 f = self.opener(self.datafile)
1229 f.seek(0, 2)
1248 f.seek(0, 2)
1230 actual = f.tell()
1249 actual = f.tell()
1231 f.close()
1250 f.close()
1232 dd = actual - expected
1251 dd = actual - expected
1233 except IOError, inst:
1252 except IOError, inst:
1234 if inst.errno != errno.ENOENT:
1253 if inst.errno != errno.ENOENT:
1235 raise
1254 raise
1236 dd = 0
1255 dd = 0
1237
1256
1238 try:
1257 try:
1239 f = self.opener(self.indexfile)
1258 f = self.opener(self.indexfile)
1240 f.seek(0, 2)
1259 f.seek(0, 2)
1241 actual = f.tell()
1260 actual = f.tell()
1242 f.close()
1261 f.close()
1243 s = self._io.size
1262 s = self._io.size
1244 i = max(0, actual // s)
1263 i = max(0, actual // s)
1245 di = actual - (i * s)
1264 di = actual - (i * s)
1246 if self._inline:
1265 if self._inline:
1247 databytes = 0
1266 databytes = 0
1248 for r in self:
1267 for r in self:
1249 databytes += max(0, self.length(r))
1268 databytes += max(0, self.length(r))
1250 dd = 0
1269 dd = 0
1251 di = actual - len(self) * s - databytes
1270 di = actual - len(self) * s - databytes
1252 except IOError, inst:
1271 except IOError, inst:
1253 if inst.errno != errno.ENOENT:
1272 if inst.errno != errno.ENOENT:
1254 raise
1273 raise
1255 di = 0
1274 di = 0
1256
1275
1257 return (dd, di)
1276 return (dd, di)
1258
1277
1259 def files(self):
1278 def files(self):
1260 res = [self.indexfile]
1279 res = [self.indexfile]
1261 if not self._inline:
1280 if not self._inline:
1262 res.append(self.datafile)
1281 res.append(self.datafile)
1263 return res
1282 return res
@@ -1,393 +1,411 b''
1 # wireproto.py - generic wire protocol support functions
1 # wireproto.py - generic wire protocol support functions
2 #
2 #
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import urllib, tempfile, os, sys
8 import urllib, tempfile, os, sys
9 from i18n import _
9 from i18n import _
10 from node import bin, hex
10 from node import bin, hex
11 import changegroup as changegroupmod
11 import changegroup as changegroupmod
12 import repo, error, encoding, util, store
12 import repo, error, encoding, util, store
13 import pushkey as pushkeymod
13 import pushkey as pushkeymod
14
14
15 # list of nodes encoding / decoding
15 # list of nodes encoding / decoding
16
16
17 def decodelist(l, sep=' '):
17 def decodelist(l, sep=' '):
18 if l:
18 if l:
19 return map(bin, l.split(sep))
19 return map(bin, l.split(sep))
20 return []
20 return []
21
21
22 def encodelist(l, sep=' '):
22 def encodelist(l, sep=' '):
23 return sep.join(map(hex, l))
23 return sep.join(map(hex, l))
24
24
25 # client side
25 # client side
26
26
27 class wirerepository(repo.repository):
27 class wirerepository(repo.repository):
28 def lookup(self, key):
28 def lookup(self, key):
29 self.requirecap('lookup', _('look up remote revision'))
29 self.requirecap('lookup', _('look up remote revision'))
30 d = self._call("lookup", key=encoding.fromlocal(key))
30 d = self._call("lookup", key=encoding.fromlocal(key))
31 success, data = d[:-1].split(" ", 1)
31 success, data = d[:-1].split(" ", 1)
32 if int(success):
32 if int(success):
33 return bin(data)
33 return bin(data)
34 self._abort(error.RepoError(data))
34 self._abort(error.RepoError(data))
35
35
36 def heads(self):
36 def heads(self):
37 d = self._call("heads")
37 d = self._call("heads")
38 try:
38 try:
39 return decodelist(d[:-1])
39 return decodelist(d[:-1])
40 except ValueError:
40 except ValueError:
41 self._abort(error.ResponseError(_("unexpected response:"), d))
41 self._abort(error.ResponseError(_("unexpected response:"), d))
42
42
43 def known(self, nodes):
43 def known(self, nodes):
44 n = encodelist(nodes)
44 n = encodelist(nodes)
45 d = self._call("known", nodes=n)
45 d = self._call("known", nodes=n)
46 try:
46 try:
47 return [bool(int(f)) for f in d]
47 return [bool(int(f)) for f in d]
48 except ValueError:
48 except ValueError:
49 self._abort(error.ResponseError(_("unexpected response:"), d))
49 self._abort(error.ResponseError(_("unexpected response:"), d))
50
50
51 def branchmap(self):
51 def branchmap(self):
52 d = self._call("branchmap")
52 d = self._call("branchmap")
53 try:
53 try:
54 branchmap = {}
54 branchmap = {}
55 for branchpart in d.splitlines():
55 for branchpart in d.splitlines():
56 branchname, branchheads = branchpart.split(' ', 1)
56 branchname, branchheads = branchpart.split(' ', 1)
57 branchname = encoding.tolocal(urllib.unquote(branchname))
57 branchname = encoding.tolocal(urllib.unquote(branchname))
58 branchheads = decodelist(branchheads)
58 branchheads = decodelist(branchheads)
59 branchmap[branchname] = branchheads
59 branchmap[branchname] = branchheads
60 return branchmap
60 return branchmap
61 except TypeError:
61 except TypeError:
62 self._abort(error.ResponseError(_("unexpected response:"), d))
62 self._abort(error.ResponseError(_("unexpected response:"), d))
63
63
64 def branches(self, nodes):
64 def branches(self, nodes):
65 n = encodelist(nodes)
65 n = encodelist(nodes)
66 d = self._call("branches", nodes=n)
66 d = self._call("branches", nodes=n)
67 try:
67 try:
68 br = [tuple(decodelist(b)) for b in d.splitlines()]
68 br = [tuple(decodelist(b)) for b in d.splitlines()]
69 return br
69 return br
70 except ValueError:
70 except ValueError:
71 self._abort(error.ResponseError(_("unexpected response:"), d))
71 self._abort(error.ResponseError(_("unexpected response:"), d))
72
72
73 def between(self, pairs):
73 def between(self, pairs):
74 batch = 8 # avoid giant requests
74 batch = 8 # avoid giant requests
75 r = []
75 r = []
76 for i in xrange(0, len(pairs), batch):
76 for i in xrange(0, len(pairs), batch):
77 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
77 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
78 d = self._call("between", pairs=n)
78 d = self._call("between", pairs=n)
79 try:
79 try:
80 r.extend(l and decodelist(l) or [] for l in d.splitlines())
80 r.extend(l and decodelist(l) or [] for l in d.splitlines())
81 except ValueError:
81 except ValueError:
82 self._abort(error.ResponseError(_("unexpected response:"), d))
82 self._abort(error.ResponseError(_("unexpected response:"), d))
83 return r
83 return r
84
84
85 def pushkey(self, namespace, key, old, new):
85 def pushkey(self, namespace, key, old, new):
86 if not self.capable('pushkey'):
86 if not self.capable('pushkey'):
87 return False
87 return False
88 d = self._call("pushkey",
88 d = self._call("pushkey",
89 namespace=encoding.fromlocal(namespace),
89 namespace=encoding.fromlocal(namespace),
90 key=encoding.fromlocal(key),
90 key=encoding.fromlocal(key),
91 old=encoding.fromlocal(old),
91 old=encoding.fromlocal(old),
92 new=encoding.fromlocal(new))
92 new=encoding.fromlocal(new))
93 try:
93 try:
94 d = bool(int(d))
94 d = bool(int(d))
95 except ValueError:
95 except ValueError:
96 raise error.ResponseError(
96 raise error.ResponseError(
97 _('push failed (unexpected response):'), d)
97 _('push failed (unexpected response):'), d)
98 return d
98 return d
99
99
100 def listkeys(self, namespace):
100 def listkeys(self, namespace):
101 if not self.capable('pushkey'):
101 if not self.capable('pushkey'):
102 return {}
102 return {}
103 d = self._call("listkeys", namespace=encoding.fromlocal(namespace))
103 d = self._call("listkeys", namespace=encoding.fromlocal(namespace))
104 r = {}
104 r = {}
105 for l in d.splitlines():
105 for l in d.splitlines():
106 k, v = l.split('\t')
106 k, v = l.split('\t')
107 r[encoding.tolocal(k)] = encoding.tolocal(v)
107 r[encoding.tolocal(k)] = encoding.tolocal(v)
108 return r
108 return r
109
109
110 def stream_out(self):
110 def stream_out(self):
111 return self._callstream('stream_out')
111 return self._callstream('stream_out')
112
112
113 def changegroup(self, nodes, kind):
113 def changegroup(self, nodes, kind):
114 n = encodelist(nodes)
114 n = encodelist(nodes)
115 f = self._callstream("changegroup", roots=n)
115 f = self._callstream("changegroup", roots=n)
116 return changegroupmod.unbundle10(self._decompress(f), 'UN')
116 return changegroupmod.unbundle10(self._decompress(f), 'UN')
117
117
118 def changegroupsubset(self, bases, heads, kind):
118 def changegroupsubset(self, bases, heads, kind):
119 self.requirecap('changegroupsubset', _('look up remote changes'))
119 self.requirecap('changegroupsubset', _('look up remote changes'))
120 bases = encodelist(bases)
120 bases = encodelist(bases)
121 heads = encodelist(heads)
121 heads = encodelist(heads)
122 f = self._callstream("changegroupsubset",
122 f = self._callstream("changegroupsubset",
123 bases=bases, heads=heads)
123 bases=bases, heads=heads)
124 return changegroupmod.unbundle10(self._decompress(f), 'UN')
124 return changegroupmod.unbundle10(self._decompress(f), 'UN')
125
125
126 def getbundle(self, source, heads=None, common=None):
127 self.requirecap('getbundle', _('look up remote changes'))
128 opts = {}
129 if heads is not None:
130 opts['heads'] = encodelist(heads)
131 if common is not None:
132 opts['common'] = encodelist(common)
133 f = self._callstream("getbundle", **opts)
134 return changegroupmod.unbundle10(self._decompress(f), 'UN')
135
126 def unbundle(self, cg, heads, source):
136 def unbundle(self, cg, heads, source):
127 '''Send cg (a readable file-like object representing the
137 '''Send cg (a readable file-like object representing the
128 changegroup to push, typically a chunkbuffer object) to the
138 changegroup to push, typically a chunkbuffer object) to the
129 remote server as a bundle. Return an integer indicating the
139 remote server as a bundle. Return an integer indicating the
130 result of the push (see localrepository.addchangegroup()).'''
140 result of the push (see localrepository.addchangegroup()).'''
131
141
132 ret, output = self._callpush("unbundle", cg, heads=encodelist(heads))
142 ret, output = self._callpush("unbundle", cg, heads=encodelist(heads))
133 if ret == "":
143 if ret == "":
134 raise error.ResponseError(
144 raise error.ResponseError(
135 _('push failed:'), output)
145 _('push failed:'), output)
136 try:
146 try:
137 ret = int(ret)
147 ret = int(ret)
138 except ValueError:
148 except ValueError:
139 raise error.ResponseError(
149 raise error.ResponseError(
140 _('push failed (unexpected response):'), ret)
150 _('push failed (unexpected response):'), ret)
141
151
142 for l in output.splitlines(True):
152 for l in output.splitlines(True):
143 self.ui.status(_('remote: '), l)
153 self.ui.status(_('remote: '), l)
144 return ret
154 return ret
145
155
146 def debugwireargs(self, one, two, three=None, four=None):
156 def debugwireargs(self, one, two, three=None, four=None):
147 # don't pass optional arguments left at their default value
157 # don't pass optional arguments left at their default value
148 opts = {}
158 opts = {}
149 if three is not None:
159 if three is not None:
150 opts['three'] = three
160 opts['three'] = three
151 if four is not None:
161 if four is not None:
152 opts['four'] = four
162 opts['four'] = four
153 return self._call('debugwireargs', one=one, two=two, **opts)
163 return self._call('debugwireargs', one=one, two=two, **opts)
154
164
155 # server side
165 # server side
156
166
157 class streamres(object):
167 class streamres(object):
158 def __init__(self, gen):
168 def __init__(self, gen):
159 self.gen = gen
169 self.gen = gen
160
170
161 class pushres(object):
171 class pushres(object):
162 def __init__(self, res):
172 def __init__(self, res):
163 self.res = res
173 self.res = res
164
174
165 class pusherr(object):
175 class pusherr(object):
166 def __init__(self, res):
176 def __init__(self, res):
167 self.res = res
177 self.res = res
168
178
169 def dispatch(repo, proto, command):
179 def dispatch(repo, proto, command):
170 func, spec = commands[command]
180 func, spec = commands[command]
171 args = proto.getargs(spec)
181 args = proto.getargs(spec)
172 return func(repo, proto, *args)
182 return func(repo, proto, *args)
173
183
174 def options(cmd, keys, others):
184 def options(cmd, keys, others):
175 opts = {}
185 opts = {}
176 for k in keys:
186 for k in keys:
177 if k in others:
187 if k in others:
178 opts[k] = others[k]
188 opts[k] = others[k]
179 del others[k]
189 del others[k]
180 if others:
190 if others:
181 sys.stderr.write("abort: %s got unexpected arguments %s\n"
191 sys.stderr.write("abort: %s got unexpected arguments %s\n"
182 % (cmd, ",".join(others)))
192 % (cmd, ",".join(others)))
183 return opts
193 return opts
184
194
185 def between(repo, proto, pairs):
195 def between(repo, proto, pairs):
186 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
196 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
187 r = []
197 r = []
188 for b in repo.between(pairs):
198 for b in repo.between(pairs):
189 r.append(encodelist(b) + "\n")
199 r.append(encodelist(b) + "\n")
190 return "".join(r)
200 return "".join(r)
191
201
192 def branchmap(repo, proto):
202 def branchmap(repo, proto):
193 branchmap = repo.branchmap()
203 branchmap = repo.branchmap()
194 heads = []
204 heads = []
195 for branch, nodes in branchmap.iteritems():
205 for branch, nodes in branchmap.iteritems():
196 branchname = urllib.quote(encoding.fromlocal(branch))
206 branchname = urllib.quote(encoding.fromlocal(branch))
197 branchnodes = encodelist(nodes)
207 branchnodes = encodelist(nodes)
198 heads.append('%s %s' % (branchname, branchnodes))
208 heads.append('%s %s' % (branchname, branchnodes))
199 return '\n'.join(heads)
209 return '\n'.join(heads)
200
210
201 def branches(repo, proto, nodes):
211 def branches(repo, proto, nodes):
202 nodes = decodelist(nodes)
212 nodes = decodelist(nodes)
203 r = []
213 r = []
204 for b in repo.branches(nodes):
214 for b in repo.branches(nodes):
205 r.append(encodelist(b) + "\n")
215 r.append(encodelist(b) + "\n")
206 return "".join(r)
216 return "".join(r)
207
217
208 def capabilities(repo, proto):
218 def capabilities(repo, proto):
209 caps = 'lookup changegroupsubset branchmap pushkey known'.split()
219 caps = 'lookup changegroupsubset branchmap pushkey known getbundle'.split()
210 if _allowstream(repo.ui):
220 if _allowstream(repo.ui):
211 requiredformats = repo.requirements & repo.supportedformats
221 requiredformats = repo.requirements & repo.supportedformats
212 # if our local revlogs are just revlogv1, add 'stream' cap
222 # if our local revlogs are just revlogv1, add 'stream' cap
213 if not requiredformats - set(('revlogv1',)):
223 if not requiredformats - set(('revlogv1',)):
214 caps.append('stream')
224 caps.append('stream')
215 # otherwise, add 'streamreqs' detailing our local revlog format
225 # otherwise, add 'streamreqs' detailing our local revlog format
216 else:
226 else:
217 caps.append('streamreqs=%s' % ','.join(requiredformats))
227 caps.append('streamreqs=%s' % ','.join(requiredformats))
218 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
228 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
219 return ' '.join(caps)
229 return ' '.join(caps)
220
230
221 def changegroup(repo, proto, roots):
231 def changegroup(repo, proto, roots):
222 nodes = decodelist(roots)
232 nodes = decodelist(roots)
223 cg = repo.changegroup(nodes, 'serve')
233 cg = repo.changegroup(nodes, 'serve')
224 return streamres(proto.groupchunks(cg))
234 return streamres(proto.groupchunks(cg))
225
235
226 def changegroupsubset(repo, proto, bases, heads):
236 def changegroupsubset(repo, proto, bases, heads):
227 bases = decodelist(bases)
237 bases = decodelist(bases)
228 heads = decodelist(heads)
238 heads = decodelist(heads)
229 cg = repo.changegroupsubset(bases, heads, 'serve')
239 cg = repo.changegroupsubset(bases, heads, 'serve')
230 return streamres(proto.groupchunks(cg))
240 return streamres(proto.groupchunks(cg))
231
241
232 def debugwireargs(repo, proto, one, two, others):
242 def debugwireargs(repo, proto, one, two, others):
233 # only accept optional args from the known set
243 # only accept optional args from the known set
234 opts = options('debugwireargs', ['three', 'four'], others)
244 opts = options('debugwireargs', ['three', 'four'], others)
235 return repo.debugwireargs(one, two, **opts)
245 return repo.debugwireargs(one, two, **opts)
236
246
247 def getbundle(repo, proto, others):
248 opts = options('getbundle', ['heads', 'common'], others)
249 for k, v in opts.iteritems():
250 opts[k] = decodelist(v)
251 cg = repo.getbundle('serve', **opts)
252 return streamres(proto.groupchunks(cg))
253
237 def heads(repo, proto):
254 def heads(repo, proto):
238 h = repo.heads()
255 h = repo.heads()
239 return encodelist(h) + "\n"
256 return encodelist(h) + "\n"
240
257
241 def hello(repo, proto):
258 def hello(repo, proto):
242 '''the hello command returns a set of lines describing various
259 '''the hello command returns a set of lines describing various
243 interesting things about the server, in an RFC822-like format.
260 interesting things about the server, in an RFC822-like format.
244 Currently the only one defined is "capabilities", which
261 Currently the only one defined is "capabilities", which
245 consists of a line in the form:
262 consists of a line in the form:
246
263
247 capabilities: space separated list of tokens
264 capabilities: space separated list of tokens
248 '''
265 '''
249 return "capabilities: %s\n" % (capabilities(repo, proto))
266 return "capabilities: %s\n" % (capabilities(repo, proto))
250
267
251 def listkeys(repo, proto, namespace):
268 def listkeys(repo, proto, namespace):
252 d = pushkeymod.list(repo, encoding.tolocal(namespace)).items()
269 d = pushkeymod.list(repo, encoding.tolocal(namespace)).items()
253 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
270 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
254 for k, v in d])
271 for k, v in d])
255 return t
272 return t
256
273
257 def lookup(repo, proto, key):
274 def lookup(repo, proto, key):
258 try:
275 try:
259 r = hex(repo.lookup(encoding.tolocal(key)))
276 r = hex(repo.lookup(encoding.tolocal(key)))
260 success = 1
277 success = 1
261 except Exception, inst:
278 except Exception, inst:
262 r = str(inst)
279 r = str(inst)
263 success = 0
280 success = 0
264 return "%s %s\n" % (success, r)
281 return "%s %s\n" % (success, r)
265
282
266 def known(repo, proto, nodes):
283 def known(repo, proto, nodes):
267 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
284 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
268
285
269 def pushkey(repo, proto, namespace, key, old, new):
286 def pushkey(repo, proto, namespace, key, old, new):
270 # compatibility with pre-1.8 clients which were accidentally
287 # compatibility with pre-1.8 clients which were accidentally
271 # sending raw binary nodes rather than utf-8-encoded hex
288 # sending raw binary nodes rather than utf-8-encoded hex
272 if len(new) == 20 and new.encode('string-escape') != new:
289 if len(new) == 20 and new.encode('string-escape') != new:
273 # looks like it could be a binary node
290 # looks like it could be a binary node
274 try:
291 try:
275 u = new.decode('utf-8')
292 u = new.decode('utf-8')
276 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
293 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
277 except UnicodeDecodeError:
294 except UnicodeDecodeError:
278 pass # binary, leave unmodified
295 pass # binary, leave unmodified
279 else:
296 else:
280 new = encoding.tolocal(new) # normal path
297 new = encoding.tolocal(new) # normal path
281
298
282 r = pushkeymod.push(repo,
299 r = pushkeymod.push(repo,
283 encoding.tolocal(namespace), encoding.tolocal(key),
300 encoding.tolocal(namespace), encoding.tolocal(key),
284 encoding.tolocal(old), new)
301 encoding.tolocal(old), new)
285 return '%s\n' % int(r)
302 return '%s\n' % int(r)
286
303
287 def _allowstream(ui):
304 def _allowstream(ui):
288 return ui.configbool('server', 'uncompressed', True, untrusted=True)
305 return ui.configbool('server', 'uncompressed', True, untrusted=True)
289
306
290 def stream(repo, proto):
307 def stream(repo, proto):
291 '''If the server supports streaming clone, it advertises the "stream"
308 '''If the server supports streaming clone, it advertises the "stream"
292 capability with a value representing the version and flags of the repo
309 capability with a value representing the version and flags of the repo
293 it is serving. Client checks to see if it understands the format.
310 it is serving. Client checks to see if it understands the format.
294
311
295 The format is simple: the server writes out a line with the amount
312 The format is simple: the server writes out a line with the amount
296 of files, then the total amount of bytes to be transfered (separated
313 of files, then the total amount of bytes to be transfered (separated
297 by a space). Then, for each file, the server first writes the filename
314 by a space). Then, for each file, the server first writes the filename
298 and filesize (separated by the null character), then the file contents.
315 and filesize (separated by the null character), then the file contents.
299 '''
316 '''
300
317
301 if not _allowstream(repo.ui):
318 if not _allowstream(repo.ui):
302 return '1\n'
319 return '1\n'
303
320
304 entries = []
321 entries = []
305 total_bytes = 0
322 total_bytes = 0
306 try:
323 try:
307 # get consistent snapshot of repo, lock during scan
324 # get consistent snapshot of repo, lock during scan
308 lock = repo.lock()
325 lock = repo.lock()
309 try:
326 try:
310 repo.ui.debug('scanning\n')
327 repo.ui.debug('scanning\n')
311 for name, ename, size in repo.store.walk():
328 for name, ename, size in repo.store.walk():
312 entries.append((name, size))
329 entries.append((name, size))
313 total_bytes += size
330 total_bytes += size
314 finally:
331 finally:
315 lock.release()
332 lock.release()
316 except error.LockError:
333 except error.LockError:
317 return '2\n' # error: 2
334 return '2\n' # error: 2
318
335
319 def streamer(repo, entries, total):
336 def streamer(repo, entries, total):
320 '''stream out all metadata files in repository.'''
337 '''stream out all metadata files in repository.'''
321 yield '0\n' # success
338 yield '0\n' # success
322 repo.ui.debug('%d files, %d bytes to transfer\n' %
339 repo.ui.debug('%d files, %d bytes to transfer\n' %
323 (len(entries), total_bytes))
340 (len(entries), total_bytes))
324 yield '%d %d\n' % (len(entries), total_bytes)
341 yield '%d %d\n' % (len(entries), total_bytes)
325 for name, size in entries:
342 for name, size in entries:
326 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
343 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
327 # partially encode name over the wire for backwards compat
344 # partially encode name over the wire for backwards compat
328 yield '%s\0%d\n' % (store.encodedir(name), size)
345 yield '%s\0%d\n' % (store.encodedir(name), size)
329 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
346 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
330 yield chunk
347 yield chunk
331
348
332 return streamres(streamer(repo, entries, total_bytes))
349 return streamres(streamer(repo, entries, total_bytes))
333
350
334 def unbundle(repo, proto, heads):
351 def unbundle(repo, proto, heads):
335 their_heads = decodelist(heads)
352 their_heads = decodelist(heads)
336
353
337 def check_heads():
354 def check_heads():
338 heads = repo.heads()
355 heads = repo.heads()
339 return their_heads == ['force'] or their_heads == heads
356 return their_heads == ['force'] or their_heads == heads
340
357
341 proto.redirect()
358 proto.redirect()
342
359
343 # fail early if possible
360 # fail early if possible
344 if not check_heads():
361 if not check_heads():
345 return pusherr('unsynced changes')
362 return pusherr('unsynced changes')
346
363
347 # write bundle data to temporary file because it can be big
364 # write bundle data to temporary file because it can be big
348 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
365 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
349 fp = os.fdopen(fd, 'wb+')
366 fp = os.fdopen(fd, 'wb+')
350 r = 0
367 r = 0
351 try:
368 try:
352 proto.getfile(fp)
369 proto.getfile(fp)
353 lock = repo.lock()
370 lock = repo.lock()
354 try:
371 try:
355 if not check_heads():
372 if not check_heads():
356 # someone else committed/pushed/unbundled while we
373 # someone else committed/pushed/unbundled while we
357 # were transferring data
374 # were transferring data
358 return pusherr('unsynced changes')
375 return pusherr('unsynced changes')
359
376
360 # push can proceed
377 # push can proceed
361 fp.seek(0)
378 fp.seek(0)
362 gen = changegroupmod.readbundle(fp, None)
379 gen = changegroupmod.readbundle(fp, None)
363
380
364 try:
381 try:
365 r = repo.addchangegroup(gen, 'serve', proto._client(),
382 r = repo.addchangegroup(gen, 'serve', proto._client(),
366 lock=lock)
383 lock=lock)
367 except util.Abort, inst:
384 except util.Abort, inst:
368 sys.stderr.write("abort: %s\n" % inst)
385 sys.stderr.write("abort: %s\n" % inst)
369 finally:
386 finally:
370 lock.release()
387 lock.release()
371 return pushres(r)
388 return pushres(r)
372
389
373 finally:
390 finally:
374 fp.close()
391 fp.close()
375 os.unlink(tempname)
392 os.unlink(tempname)
376
393
377 commands = {
394 commands = {
378 'between': (between, 'pairs'),
395 'between': (between, 'pairs'),
379 'branchmap': (branchmap, ''),
396 'branchmap': (branchmap, ''),
380 'branches': (branches, 'nodes'),
397 'branches': (branches, 'nodes'),
381 'capabilities': (capabilities, ''),
398 'capabilities': (capabilities, ''),
382 'changegroup': (changegroup, 'roots'),
399 'changegroup': (changegroup, 'roots'),
383 'changegroupsubset': (changegroupsubset, 'bases heads'),
400 'changegroupsubset': (changegroupsubset, 'bases heads'),
384 'debugwireargs': (debugwireargs, 'one two *'),
401 'debugwireargs': (debugwireargs, 'one two *'),
402 'getbundle': (getbundle, '*'),
385 'heads': (heads, ''),
403 'heads': (heads, ''),
386 'hello': (hello, ''),
404 'hello': (hello, ''),
387 'known': (known, 'nodes'),
405 'known': (known, 'nodes'),
388 'listkeys': (listkeys, 'namespace'),
406 'listkeys': (listkeys, 'namespace'),
389 'lookup': (lookup, 'key'),
407 'lookup': (lookup, 'key'),
390 'pushkey': (pushkey, 'namespace key old new'),
408 'pushkey': (pushkey, 'namespace key old new'),
391 'stream_out': (stream, ''),
409 'stream_out': (stream, ''),
392 'unbundle': (unbundle, 'heads'),
410 'unbundle': (unbundle, 'heads'),
393 }
411 }
@@ -1,259 +1,261 b''
1 Show all commands except debug commands
1 Show all commands except debug commands
2 $ hg debugcomplete
2 $ hg debugcomplete
3 add
3 add
4 addremove
4 addremove
5 annotate
5 annotate
6 archive
6 archive
7 backout
7 backout
8 bisect
8 bisect
9 bookmarks
9 bookmarks
10 branch
10 branch
11 branches
11 branches
12 bundle
12 bundle
13 cat
13 cat
14 clone
14 clone
15 commit
15 commit
16 copy
16 copy
17 diff
17 diff
18 export
18 export
19 forget
19 forget
20 grep
20 grep
21 heads
21 heads
22 help
22 help
23 identify
23 identify
24 import
24 import
25 incoming
25 incoming
26 init
26 init
27 locate
27 locate
28 log
28 log
29 manifest
29 manifest
30 merge
30 merge
31 outgoing
31 outgoing
32 parents
32 parents
33 paths
33 paths
34 pull
34 pull
35 push
35 push
36 recover
36 recover
37 remove
37 remove
38 rename
38 rename
39 resolve
39 resolve
40 revert
40 revert
41 rollback
41 rollback
42 root
42 root
43 serve
43 serve
44 showconfig
44 showconfig
45 status
45 status
46 summary
46 summary
47 tag
47 tag
48 tags
48 tags
49 tip
49 tip
50 unbundle
50 unbundle
51 update
51 update
52 verify
52 verify
53 version
53 version
54
54
55 Show all commands that start with "a"
55 Show all commands that start with "a"
56 $ hg debugcomplete a
56 $ hg debugcomplete a
57 add
57 add
58 addremove
58 addremove
59 annotate
59 annotate
60 archive
60 archive
61
61
62 Do not show debug commands if there are other candidates
62 Do not show debug commands if there are other candidates
63 $ hg debugcomplete d
63 $ hg debugcomplete d
64 diff
64 diff
65
65
66 Show debug commands if there are no other candidates
66 Show debug commands if there are no other candidates
67 $ hg debugcomplete debug
67 $ hg debugcomplete debug
68 debugancestor
68 debugancestor
69 debugbuilddag
69 debugbuilddag
70 debugbundle
70 debugbundle
71 debugcheckstate
71 debugcheckstate
72 debugcommands
72 debugcommands
73 debugcomplete
73 debugcomplete
74 debugconfig
74 debugconfig
75 debugdag
75 debugdag
76 debugdata
76 debugdata
77 debugdate
77 debugdate
78 debugfsinfo
78 debugfsinfo
79 debuggetbundle
79 debugignore
80 debugignore
80 debugindex
81 debugindex
81 debugindexdot
82 debugindexdot
82 debuginstall
83 debuginstall
83 debugknown
84 debugknown
84 debugpushkey
85 debugpushkey
85 debugrebuildstate
86 debugrebuildstate
86 debugrename
87 debugrename
87 debugrevspec
88 debugrevspec
88 debugsetparents
89 debugsetparents
89 debugstate
90 debugstate
90 debugsub
91 debugsub
91 debugwalk
92 debugwalk
92 debugwireargs
93 debugwireargs
93
94
94 Do not show the alias of a debug command if there are other candidates
95 Do not show the alias of a debug command if there are other candidates
95 (this should hide rawcommit)
96 (this should hide rawcommit)
96 $ hg debugcomplete r
97 $ hg debugcomplete r
97 recover
98 recover
98 remove
99 remove
99 rename
100 rename
100 resolve
101 resolve
101 revert
102 revert
102 rollback
103 rollback
103 root
104 root
104 Show the alias of a debug command if there are no other candidates
105 Show the alias of a debug command if there are no other candidates
105 $ hg debugcomplete rawc
106 $ hg debugcomplete rawc
106
107
107
108
108 Show the global options
109 Show the global options
109 $ hg debugcomplete --options | sort
110 $ hg debugcomplete --options | sort
110 --config
111 --config
111 --cwd
112 --cwd
112 --debug
113 --debug
113 --debugger
114 --debugger
114 --encoding
115 --encoding
115 --encodingmode
116 --encodingmode
116 --help
117 --help
117 --noninteractive
118 --noninteractive
118 --profile
119 --profile
119 --quiet
120 --quiet
120 --repository
121 --repository
121 --time
122 --time
122 --traceback
123 --traceback
123 --verbose
124 --verbose
124 --version
125 --version
125 -R
126 -R
126 -h
127 -h
127 -q
128 -q
128 -v
129 -v
129 -y
130 -y
130
131
131 Show the options for the "serve" command
132 Show the options for the "serve" command
132 $ hg debugcomplete --options serve | sort
133 $ hg debugcomplete --options serve | sort
133 --accesslog
134 --accesslog
134 --address
135 --address
135 --certificate
136 --certificate
136 --config
137 --config
137 --cwd
138 --cwd
138 --daemon
139 --daemon
139 --daemon-pipefds
140 --daemon-pipefds
140 --debug
141 --debug
141 --debugger
142 --debugger
142 --encoding
143 --encoding
143 --encodingmode
144 --encodingmode
144 --errorlog
145 --errorlog
145 --help
146 --help
146 --ipv6
147 --ipv6
147 --name
148 --name
148 --noninteractive
149 --noninteractive
149 --pid-file
150 --pid-file
150 --port
151 --port
151 --prefix
152 --prefix
152 --profile
153 --profile
153 --quiet
154 --quiet
154 --repository
155 --repository
155 --stdio
156 --stdio
156 --style
157 --style
157 --templates
158 --templates
158 --time
159 --time
159 --traceback
160 --traceback
160 --verbose
161 --verbose
161 --version
162 --version
162 --web-conf
163 --web-conf
163 -6
164 -6
164 -A
165 -A
165 -E
166 -E
166 -R
167 -R
167 -a
168 -a
168 -d
169 -d
169 -h
170 -h
170 -n
171 -n
171 -p
172 -p
172 -q
173 -q
173 -t
174 -t
174 -v
175 -v
175 -y
176 -y
176
177
177 Show an error if we use --options with an ambiguous abbreviation
178 Show an error if we use --options with an ambiguous abbreviation
178 $ hg debugcomplete --options s
179 $ hg debugcomplete --options s
179 hg: command 's' is ambiguous:
180 hg: command 's' is ambiguous:
180 serve showconfig status summary
181 serve showconfig status summary
181 [255]
182 [255]
182
183
183 Show all commands + options
184 Show all commands + options
184 $ hg debugcommands
185 $ hg debugcommands
185 add: include, exclude, subrepos, dry-run
186 add: include, exclude, subrepos, dry-run
186 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, include, exclude
187 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, include, exclude
187 clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
188 clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
188 commit: addremove, close-branch, include, exclude, message, logfile, date, user
189 commit: addremove, close-branch, include, exclude, message, logfile, date, user
189 diff: rev, change, text, git, nodates, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, include, exclude, subrepos
190 diff: rev, change, text, git, nodates, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, include, exclude, subrepos
190 export: output, switch-parent, rev, text, git, nodates
191 export: output, switch-parent, rev, text, git, nodates
191 forget: include, exclude
192 forget: include, exclude
192 init: ssh, remotecmd, insecure
193 init: ssh, remotecmd, insecure
193 log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, style, template, include, exclude
194 log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, style, template, include, exclude
194 merge: force, tool, rev, preview
195 merge: force, tool, rev, preview
195 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
196 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
196 push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure
197 push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure
197 remove: after, force, include, exclude
198 remove: after, force, include, exclude
198 serve: accesslog, daemon, daemon-pipefds, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, templates, style, ipv6, certificate
199 serve: accesslog, daemon, daemon-pipefds, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, templates, style, ipv6, certificate
199 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos
200 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos
200 summary: remote
201 summary: remote
201 update: clean, check, date, rev
202 update: clean, check, date, rev
202 addremove: similarity, include, exclude, dry-run
203 addremove: similarity, include, exclude, dry-run
203 archive: no-decode, prefix, rev, type, subrepos, include, exclude
204 archive: no-decode, prefix, rev, type, subrepos, include, exclude
204 backout: merge, parent, tool, rev, include, exclude, message, logfile, date, user
205 backout: merge, parent, tool, rev, include, exclude, message, logfile, date, user
205 bisect: reset, good, bad, skip, extend, command, noupdate
206 bisect: reset, good, bad, skip, extend, command, noupdate
206 bookmarks: force, rev, delete, rename
207 bookmarks: force, rev, delete, rename
207 branch: force, clean
208 branch: force, clean
208 branches: active, closed
209 branches: active, closed
209 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
210 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
210 cat: output, rev, decode, include, exclude
211 cat: output, rev, decode, include, exclude
211 copy: after, force, include, exclude, dry-run
212 copy: after, force, include, exclude, dry-run
212 debugancestor:
213 debugancestor:
213 debugbuilddag: mergeable-file, appended-file, overwritten-file, new-file
214 debugbuilddag: mergeable-file, appended-file, overwritten-file, new-file
214 debugbundle: all
215 debugbundle: all
215 debugcheckstate:
216 debugcheckstate:
216 debugcommands:
217 debugcommands:
217 debugcomplete: options
218 debugcomplete: options
218 debugdag: tags, branches, dots, spaces
219 debugdag: tags, branches, dots, spaces
219 debugdata:
220 debugdata:
220 debugdate: extended
221 debugdate: extended
221 debugfsinfo:
222 debugfsinfo:
223 debuggetbundle: head, common, type
222 debugignore:
224 debugignore:
223 debugindex: format
225 debugindex: format
224 debugindexdot:
226 debugindexdot:
225 debuginstall:
227 debuginstall:
226 debugknown:
228 debugknown:
227 debugpushkey:
229 debugpushkey:
228 debugrebuildstate: rev
230 debugrebuildstate: rev
229 debugrename: rev
231 debugrename: rev
230 debugrevspec:
232 debugrevspec:
231 debugsetparents:
233 debugsetparents:
232 debugstate: nodates
234 debugstate: nodates
233 debugsub: rev
235 debugsub: rev
234 debugwalk: include, exclude
236 debugwalk: include, exclude
235 debugwireargs: three, four, ssh, remotecmd, insecure
237 debugwireargs: three, four, ssh, remotecmd, insecure
236 grep: print0, all, follow, ignore-case, files-with-matches, line-number, rev, user, date, include, exclude
238 grep: print0, all, follow, ignore-case, files-with-matches, line-number, rev, user, date, include, exclude
237 heads: rev, topo, active, closed, style, template
239 heads: rev, topo, active, closed, style, template
238 help:
240 help:
239 identify: rev, num, id, branch, tags, bookmarks
241 identify: rev, num, id, branch, tags, bookmarks
240 import: strip, base, force, no-commit, exact, import-branch, message, logfile, date, user, similarity
242 import: strip, base, force, no-commit, exact, import-branch, message, logfile, date, user, similarity
241 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos
243 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos
242 locate: rev, print0, fullpath, include, exclude
244 locate: rev, print0, fullpath, include, exclude
243 manifest: rev
245 manifest: rev
244 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos
246 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos
245 parents: rev, style, template
247 parents: rev, style, template
246 paths:
248 paths:
247 recover:
249 recover:
248 rename: after, force, include, exclude, dry-run
250 rename: after, force, include, exclude, dry-run
249 resolve: all, list, mark, unmark, tool, no-status, include, exclude
251 resolve: all, list, mark, unmark, tool, no-status, include, exclude
250 revert: all, date, rev, no-backup, include, exclude, dry-run
252 revert: all, date, rev, no-backup, include, exclude, dry-run
251 rollback: dry-run
253 rollback: dry-run
252 root:
254 root:
253 showconfig: untrusted
255 showconfig: untrusted
254 tag: force, local, rev, remove, edit, message, date, user
256 tag: force, local, rev, remove, edit, message, date, user
255 tags:
257 tags:
256 tip: patch, git, style, template
258 tip: patch, git, style, template
257 unbundle: update
259 unbundle: update
258 verify:
260 verify:
259 version:
261 version:
@@ -1,1081 +1,1081 b''
1 An attempt at more fully testing the hgweb web interface.
1 An attempt at more fully testing the hgweb web interface.
2 The following things are tested elsewhere and are therefore omitted:
2 The following things are tested elsewhere and are therefore omitted:
3 - archive, tested in test-archive
3 - archive, tested in test-archive
4 - unbundle, tested in test-push-http
4 - unbundle, tested in test-push-http
5 - changegroupsubset, tested in test-pull
5 - changegroupsubset, tested in test-pull
6
6
7 Set up the repo
7 Set up the repo
8
8
9 $ hg init test
9 $ hg init test
10 $ cd test
10 $ cd test
11 $ mkdir da
11 $ mkdir da
12 $ echo foo > da/foo
12 $ echo foo > da/foo
13 $ echo foo > foo
13 $ echo foo > foo
14 $ hg ci -Ambase
14 $ hg ci -Ambase
15 adding da/foo
15 adding da/foo
16 adding foo
16 adding foo
17 $ hg tag 1.0
17 $ hg tag 1.0
18 $ hg bookmark something
18 $ hg bookmark something
19 $ echo another > foo
19 $ echo another > foo
20 $ hg branch stable
20 $ hg branch stable
21 marked working directory as branch stable
21 marked working directory as branch stable
22 $ hg ci -Ambranch
22 $ hg ci -Ambranch
23 $ hg serve --config server.uncompressed=False -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log
23 $ hg serve --config server.uncompressed=False -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log
24 $ cat hg.pid >> $DAEMON_PIDS
24 $ cat hg.pid >> $DAEMON_PIDS
25
25
26 Logs and changes
26 Logs and changes
27
27
28 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/?style=atom'
28 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/?style=atom'
29 200 Script output follows
29 200 Script output follows
30
30
31 <?xml version="1.0" encoding="ascii"?>
31 <?xml version="1.0" encoding="ascii"?>
32 <feed xmlns="http://www.w3.org/2005/Atom">
32 <feed xmlns="http://www.w3.org/2005/Atom">
33 <!-- Changelog -->
33 <!-- Changelog -->
34 <id>http://*:$HGPORT/</id> (glob)
34 <id>http://*:$HGPORT/</id> (glob)
35 <link rel="self" href="http://*:$HGPORT/atom-log"/> (glob)
35 <link rel="self" href="http://*:$HGPORT/atom-log"/> (glob)
36 <link rel="alternate" href="http://*:$HGPORT/"/> (glob)
36 <link rel="alternate" href="http://*:$HGPORT/"/> (glob)
37 <title>test Changelog</title>
37 <title>test Changelog</title>
38 <updated>1970-01-01T00:00:00+00:00</updated>
38 <updated>1970-01-01T00:00:00+00:00</updated>
39
39
40 <entry>
40 <entry>
41 <title>branch</title>
41 <title>branch</title>
42 <id>http://*:$HGPORT/#changeset-1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe</id> (glob)
42 <id>http://*:$HGPORT/#changeset-1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe</id> (glob)
43 <link href="http://*:$HGPORT/rev/1d22e65f027e"/> (glob)
43 <link href="http://*:$HGPORT/rev/1d22e65f027e"/> (glob)
44 <author>
44 <author>
45 <name>test</name>
45 <name>test</name>
46 <email>&#116;&#101;&#115;&#116;</email>
46 <email>&#116;&#101;&#115;&#116;</email>
47 </author>
47 </author>
48 <updated>1970-01-01T00:00:00+00:00</updated>
48 <updated>1970-01-01T00:00:00+00:00</updated>
49 <published>1970-01-01T00:00:00+00:00</published>
49 <published>1970-01-01T00:00:00+00:00</published>
50 <content type="xhtml">
50 <content type="xhtml">
51 <div xmlns="http://www.w3.org/1999/xhtml">
51 <div xmlns="http://www.w3.org/1999/xhtml">
52 <pre xml:space="preserve">branch</pre>
52 <pre xml:space="preserve">branch</pre>
53 </div>
53 </div>
54 </content>
54 </content>
55 </entry>
55 </entry>
56 <entry>
56 <entry>
57 <title>Added tag 1.0 for changeset 2ef0ac749a14</title>
57 <title>Added tag 1.0 for changeset 2ef0ac749a14</title>
58 <id>http://*:$HGPORT/#changeset-a4f92ed23982be056b9852de5dfe873eaac7f0de</id> (glob)
58 <id>http://*:$HGPORT/#changeset-a4f92ed23982be056b9852de5dfe873eaac7f0de</id> (glob)
59 <link href="http://*:$HGPORT/rev/a4f92ed23982"/> (glob)
59 <link href="http://*:$HGPORT/rev/a4f92ed23982"/> (glob)
60 <author>
60 <author>
61 <name>test</name>
61 <name>test</name>
62 <email>&#116;&#101;&#115;&#116;</email>
62 <email>&#116;&#101;&#115;&#116;</email>
63 </author>
63 </author>
64 <updated>1970-01-01T00:00:00+00:00</updated>
64 <updated>1970-01-01T00:00:00+00:00</updated>
65 <published>1970-01-01T00:00:00+00:00</published>
65 <published>1970-01-01T00:00:00+00:00</published>
66 <content type="xhtml">
66 <content type="xhtml">
67 <div xmlns="http://www.w3.org/1999/xhtml">
67 <div xmlns="http://www.w3.org/1999/xhtml">
68 <pre xml:space="preserve">Added tag 1.0 for changeset 2ef0ac749a14</pre>
68 <pre xml:space="preserve">Added tag 1.0 for changeset 2ef0ac749a14</pre>
69 </div>
69 </div>
70 </content>
70 </content>
71 </entry>
71 </entry>
72 <entry>
72 <entry>
73 <title>base</title>
73 <title>base</title>
74 <id>http://*:$HGPORT/#changeset-2ef0ac749a14e4f57a5a822464a0902c6f7f448f</id> (glob)
74 <id>http://*:$HGPORT/#changeset-2ef0ac749a14e4f57a5a822464a0902c6f7f448f</id> (glob)
75 <link href="http://*:$HGPORT/rev/2ef0ac749a14"/> (glob)
75 <link href="http://*:$HGPORT/rev/2ef0ac749a14"/> (glob)
76 <author>
76 <author>
77 <name>test</name>
77 <name>test</name>
78 <email>&#116;&#101;&#115;&#116;</email>
78 <email>&#116;&#101;&#115;&#116;</email>
79 </author>
79 </author>
80 <updated>1970-01-01T00:00:00+00:00</updated>
80 <updated>1970-01-01T00:00:00+00:00</updated>
81 <published>1970-01-01T00:00:00+00:00</published>
81 <published>1970-01-01T00:00:00+00:00</published>
82 <content type="xhtml">
82 <content type="xhtml">
83 <div xmlns="http://www.w3.org/1999/xhtml">
83 <div xmlns="http://www.w3.org/1999/xhtml">
84 <pre xml:space="preserve">base</pre>
84 <pre xml:space="preserve">base</pre>
85 </div>
85 </div>
86 </content>
86 </content>
87 </entry>
87 </entry>
88
88
89 </feed>
89 </feed>
90 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/?style=atom'
90 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/?style=atom'
91 200 Script output follows
91 200 Script output follows
92
92
93 <?xml version="1.0" encoding="ascii"?>
93 <?xml version="1.0" encoding="ascii"?>
94 <feed xmlns="http://www.w3.org/2005/Atom">
94 <feed xmlns="http://www.w3.org/2005/Atom">
95 <!-- Changelog -->
95 <!-- Changelog -->
96 <id>http://*:$HGPORT/</id> (glob)
96 <id>http://*:$HGPORT/</id> (glob)
97 <link rel="self" href="http://*:$HGPORT/atom-log"/> (glob)
97 <link rel="self" href="http://*:$HGPORT/atom-log"/> (glob)
98 <link rel="alternate" href="http://*:$HGPORT/"/> (glob)
98 <link rel="alternate" href="http://*:$HGPORT/"/> (glob)
99 <title>test Changelog</title>
99 <title>test Changelog</title>
100 <updated>1970-01-01T00:00:00+00:00</updated>
100 <updated>1970-01-01T00:00:00+00:00</updated>
101
101
102 <entry>
102 <entry>
103 <title>branch</title>
103 <title>branch</title>
104 <id>http://*:$HGPORT/#changeset-1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe</id> (glob)
104 <id>http://*:$HGPORT/#changeset-1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe</id> (glob)
105 <link href="http://*:$HGPORT/rev/1d22e65f027e"/> (glob)
105 <link href="http://*:$HGPORT/rev/1d22e65f027e"/> (glob)
106 <author>
106 <author>
107 <name>test</name>
107 <name>test</name>
108 <email>&#116;&#101;&#115;&#116;</email>
108 <email>&#116;&#101;&#115;&#116;</email>
109 </author>
109 </author>
110 <updated>1970-01-01T00:00:00+00:00</updated>
110 <updated>1970-01-01T00:00:00+00:00</updated>
111 <published>1970-01-01T00:00:00+00:00</published>
111 <published>1970-01-01T00:00:00+00:00</published>
112 <content type="xhtml">
112 <content type="xhtml">
113 <div xmlns="http://www.w3.org/1999/xhtml">
113 <div xmlns="http://www.w3.org/1999/xhtml">
114 <pre xml:space="preserve">branch</pre>
114 <pre xml:space="preserve">branch</pre>
115 </div>
115 </div>
116 </content>
116 </content>
117 </entry>
117 </entry>
118 <entry>
118 <entry>
119 <title>Added tag 1.0 for changeset 2ef0ac749a14</title>
119 <title>Added tag 1.0 for changeset 2ef0ac749a14</title>
120 <id>http://*:$HGPORT/#changeset-a4f92ed23982be056b9852de5dfe873eaac7f0de</id> (glob)
120 <id>http://*:$HGPORT/#changeset-a4f92ed23982be056b9852de5dfe873eaac7f0de</id> (glob)
121 <link href="http://*:$HGPORT/rev/a4f92ed23982"/> (glob)
121 <link href="http://*:$HGPORT/rev/a4f92ed23982"/> (glob)
122 <author>
122 <author>
123 <name>test</name>
123 <name>test</name>
124 <email>&#116;&#101;&#115;&#116;</email>
124 <email>&#116;&#101;&#115;&#116;</email>
125 </author>
125 </author>
126 <updated>1970-01-01T00:00:00+00:00</updated>
126 <updated>1970-01-01T00:00:00+00:00</updated>
127 <published>1970-01-01T00:00:00+00:00</published>
127 <published>1970-01-01T00:00:00+00:00</published>
128 <content type="xhtml">
128 <content type="xhtml">
129 <div xmlns="http://www.w3.org/1999/xhtml">
129 <div xmlns="http://www.w3.org/1999/xhtml">
130 <pre xml:space="preserve">Added tag 1.0 for changeset 2ef0ac749a14</pre>
130 <pre xml:space="preserve">Added tag 1.0 for changeset 2ef0ac749a14</pre>
131 </div>
131 </div>
132 </content>
132 </content>
133 </entry>
133 </entry>
134 <entry>
134 <entry>
135 <title>base</title>
135 <title>base</title>
136 <id>http://*:$HGPORT/#changeset-2ef0ac749a14e4f57a5a822464a0902c6f7f448f</id> (glob)
136 <id>http://*:$HGPORT/#changeset-2ef0ac749a14e4f57a5a822464a0902c6f7f448f</id> (glob)
137 <link href="http://*:$HGPORT/rev/2ef0ac749a14"/> (glob)
137 <link href="http://*:$HGPORT/rev/2ef0ac749a14"/> (glob)
138 <author>
138 <author>
139 <name>test</name>
139 <name>test</name>
140 <email>&#116;&#101;&#115;&#116;</email>
140 <email>&#116;&#101;&#115;&#116;</email>
141 </author>
141 </author>
142 <updated>1970-01-01T00:00:00+00:00</updated>
142 <updated>1970-01-01T00:00:00+00:00</updated>
143 <published>1970-01-01T00:00:00+00:00</published>
143 <published>1970-01-01T00:00:00+00:00</published>
144 <content type="xhtml">
144 <content type="xhtml">
145 <div xmlns="http://www.w3.org/1999/xhtml">
145 <div xmlns="http://www.w3.org/1999/xhtml">
146 <pre xml:space="preserve">base</pre>
146 <pre xml:space="preserve">base</pre>
147 </div>
147 </div>
148 </content>
148 </content>
149 </entry>
149 </entry>
150
150
151 </feed>
151 </feed>
152 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/foo/?style=atom'
152 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/foo/?style=atom'
153 200 Script output follows
153 200 Script output follows
154
154
155 <?xml version="1.0" encoding="ascii"?>
155 <?xml version="1.0" encoding="ascii"?>
156 <feed xmlns="http://www.w3.org/2005/Atom">
156 <feed xmlns="http://www.w3.org/2005/Atom">
157 <id>http://*:$HGPORT/atom-log/tip/foo</id> (glob)
157 <id>http://*:$HGPORT/atom-log/tip/foo</id> (glob)
158 <link rel="self" href="http://*:$HGPORT/atom-log/tip/foo"/> (glob)
158 <link rel="self" href="http://*:$HGPORT/atom-log/tip/foo"/> (glob)
159 <title>test: foo history</title>
159 <title>test: foo history</title>
160 <updated>1970-01-01T00:00:00+00:00</updated>
160 <updated>1970-01-01T00:00:00+00:00</updated>
161
161
162 <entry>
162 <entry>
163 <title>base</title>
163 <title>base</title>
164 <id>http://*:$HGPORT/#changeset-2ef0ac749a14e4f57a5a822464a0902c6f7f448f</id> (glob)
164 <id>http://*:$HGPORT/#changeset-2ef0ac749a14e4f57a5a822464a0902c6f7f448f</id> (glob)
165 <link href="http://*:$HGPORT/rev/2ef0ac749a14"/> (glob)
165 <link href="http://*:$HGPORT/rev/2ef0ac749a14"/> (glob)
166 <author>
166 <author>
167 <name>test</name>
167 <name>test</name>
168 <email>&#116;&#101;&#115;&#116;</email>
168 <email>&#116;&#101;&#115;&#116;</email>
169 </author>
169 </author>
170 <updated>1970-01-01T00:00:00+00:00</updated>
170 <updated>1970-01-01T00:00:00+00:00</updated>
171 <published>1970-01-01T00:00:00+00:00</published>
171 <published>1970-01-01T00:00:00+00:00</published>
172 <content type="xhtml">
172 <content type="xhtml">
173 <div xmlns="http://www.w3.org/1999/xhtml">
173 <div xmlns="http://www.w3.org/1999/xhtml">
174 <pre xml:space="preserve">base</pre>
174 <pre xml:space="preserve">base</pre>
175 </div>
175 </div>
176 </content>
176 </content>
177 </entry>
177 </entry>
178
178
179 </feed>
179 </feed>
180 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/shortlog/'
180 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/shortlog/'
181 200 Script output follows
181 200 Script output follows
182
182
183 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
183 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
184 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US">
184 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US">
185 <head>
185 <head>
186 <link rel="icon" href="/static/hgicon.png" type="image/png" />
186 <link rel="icon" href="/static/hgicon.png" type="image/png" />
187 <meta name="robots" content="index, nofollow" />
187 <meta name="robots" content="index, nofollow" />
188 <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
188 <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
189
189
190 <title>test: log</title>
190 <title>test: log</title>
191 <link rel="alternate" type="application/atom+xml"
191 <link rel="alternate" type="application/atom+xml"
192 href="/atom-log" title="Atom feed for test" />
192 href="/atom-log" title="Atom feed for test" />
193 <link rel="alternate" type="application/rss+xml"
193 <link rel="alternate" type="application/rss+xml"
194 href="/rss-log" title="RSS feed for test" />
194 href="/rss-log" title="RSS feed for test" />
195 </head>
195 </head>
196 <body>
196 <body>
197
197
198 <div class="container">
198 <div class="container">
199 <div class="menu">
199 <div class="menu">
200 <div class="logo">
200 <div class="logo">
201 <a href="http://mercurial.selenic.com/">
201 <a href="http://mercurial.selenic.com/">
202 <img src="/static/hglogo.png" alt="mercurial" /></a>
202 <img src="/static/hglogo.png" alt="mercurial" /></a>
203 </div>
203 </div>
204 <ul>
204 <ul>
205 <li class="active">log</li>
205 <li class="active">log</li>
206 <li><a href="/graph/1d22e65f027e">graph</a></li>
206 <li><a href="/graph/1d22e65f027e">graph</a></li>
207 <li><a href="/tags">tags</a></li>
207 <li><a href="/tags">tags</a></li>
208 <li><a href="/bookmarks">bookmarks</a></li>
208 <li><a href="/bookmarks">bookmarks</a></li>
209 <li><a href="/branches">branches</a></li>
209 <li><a href="/branches">branches</a></li>
210 </ul>
210 </ul>
211 <ul>
211 <ul>
212 <li><a href="/rev/1d22e65f027e">changeset</a></li>
212 <li><a href="/rev/1d22e65f027e">changeset</a></li>
213 <li><a href="/file/1d22e65f027e">browse</a></li>
213 <li><a href="/file/1d22e65f027e">browse</a></li>
214 </ul>
214 </ul>
215 <ul>
215 <ul>
216
216
217 </ul>
217 </ul>
218 <ul>
218 <ul>
219 <li><a href="/help">help</a></li>
219 <li><a href="/help">help</a></li>
220 </ul>
220 </ul>
221 </div>
221 </div>
222
222
223 <div class="main">
223 <div class="main">
224 <h2><a href="/">test</a></h2>
224 <h2><a href="/">test</a></h2>
225 <h3>log</h3>
225 <h3>log</h3>
226
226
227 <form class="search" action="/log">
227 <form class="search" action="/log">
228
228
229 <p><input name="rev" id="search1" type="text" size="30" /></p>
229 <p><input name="rev" id="search1" type="text" size="30" /></p>
230 <div id="hint">find changesets by author, revision,
230 <div id="hint">find changesets by author, revision,
231 files, or words in the commit message</div>
231 files, or words in the commit message</div>
232 </form>
232 </form>
233
233
234 <div class="navigate">
234 <div class="navigate">
235 <a href="/shortlog/2?revcount=30">less</a>
235 <a href="/shortlog/2?revcount=30">less</a>
236 <a href="/shortlog/2?revcount=120">more</a>
236 <a href="/shortlog/2?revcount=120">more</a>
237 | rev 2: <a href="/shortlog/2ef0ac749a14">(0)</a> <a href="/shortlog/tip">tip</a>
237 | rev 2: <a href="/shortlog/2ef0ac749a14">(0)</a> <a href="/shortlog/tip">tip</a>
238 </div>
238 </div>
239
239
240 <table class="bigtable">
240 <table class="bigtable">
241 <tr>
241 <tr>
242 <th class="age">age</th>
242 <th class="age">age</th>
243 <th class="author">author</th>
243 <th class="author">author</th>
244 <th class="description">description</th>
244 <th class="description">description</th>
245 </tr>
245 </tr>
246 <tr class="parity0">
246 <tr class="parity0">
247 <td class="age">1970-01-01</td>
247 <td class="age">1970-01-01</td>
248 <td class="author">test</td>
248 <td class="author">test</td>
249 <td class="description"><a href="/rev/1d22e65f027e">branch</a><span class="branchhead">stable</span> <span class="tag">tip</span> <span class="tag">something</span> </td>
249 <td class="description"><a href="/rev/1d22e65f027e">branch</a><span class="branchhead">stable</span> <span class="tag">tip</span> <span class="tag">something</span> </td>
250 </tr>
250 </tr>
251 <tr class="parity1">
251 <tr class="parity1">
252 <td class="age">1970-01-01</td>
252 <td class="age">1970-01-01</td>
253 <td class="author">test</td>
253 <td class="author">test</td>
254 <td class="description"><a href="/rev/a4f92ed23982">Added tag 1.0 for changeset 2ef0ac749a14</a><span class="branchhead">default</span> </td>
254 <td class="description"><a href="/rev/a4f92ed23982">Added tag 1.0 for changeset 2ef0ac749a14</a><span class="branchhead">default</span> </td>
255 </tr>
255 </tr>
256 <tr class="parity0">
256 <tr class="parity0">
257 <td class="age">1970-01-01</td>
257 <td class="age">1970-01-01</td>
258 <td class="author">test</td>
258 <td class="author">test</td>
259 <td class="description"><a href="/rev/2ef0ac749a14">base</a><span class="tag">1.0</span> </td>
259 <td class="description"><a href="/rev/2ef0ac749a14">base</a><span class="tag">1.0</span> </td>
260 </tr>
260 </tr>
261
261
262 </table>
262 </table>
263
263
264 <div class="navigate">
264 <div class="navigate">
265 <a href="/shortlog/2?revcount=30">less</a>
265 <a href="/shortlog/2?revcount=30">less</a>
266 <a href="/shortlog/2?revcount=120">more</a>
266 <a href="/shortlog/2?revcount=120">more</a>
267 | rev 2: <a href="/shortlog/2ef0ac749a14">(0)</a> <a href="/shortlog/tip">tip</a>
267 | rev 2: <a href="/shortlog/2ef0ac749a14">(0)</a> <a href="/shortlog/tip">tip</a>
268 </div>
268 </div>
269
269
270 </div>
270 </div>
271 </div>
271 </div>
272
272
273
273
274
274
275 </body>
275 </body>
276 </html>
276 </html>
277
277
278 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/rev/0/'
278 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/rev/0/'
279 200 Script output follows
279 200 Script output follows
280
280
281 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
281 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
282 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US">
282 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US">
283 <head>
283 <head>
284 <link rel="icon" href="/static/hgicon.png" type="image/png" />
284 <link rel="icon" href="/static/hgicon.png" type="image/png" />
285 <meta name="robots" content="index, nofollow" />
285 <meta name="robots" content="index, nofollow" />
286 <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
286 <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
287
287
288 <title>test: 2ef0ac749a14</title>
288 <title>test: 2ef0ac749a14</title>
289 </head>
289 </head>
290 <body>
290 <body>
291 <div class="container">
291 <div class="container">
292 <div class="menu">
292 <div class="menu">
293 <div class="logo">
293 <div class="logo">
294 <a href="http://mercurial.selenic.com/">
294 <a href="http://mercurial.selenic.com/">
295 <img src="/static/hglogo.png" alt="mercurial" /></a>
295 <img src="/static/hglogo.png" alt="mercurial" /></a>
296 </div>
296 </div>
297 <ul>
297 <ul>
298 <li><a href="/shortlog/2ef0ac749a14">log</a></li>
298 <li><a href="/shortlog/2ef0ac749a14">log</a></li>
299 <li><a href="/graph/2ef0ac749a14">graph</a></li>
299 <li><a href="/graph/2ef0ac749a14">graph</a></li>
300 <li><a href="/tags">tags</a></li>
300 <li><a href="/tags">tags</a></li>
301 <li><a href="/bookmarks">bookmarks</a></li>
301 <li><a href="/bookmarks">bookmarks</a></li>
302 <li><a href="/branches">branches</a></li>
302 <li><a href="/branches">branches</a></li>
303 </ul>
303 </ul>
304 <ul>
304 <ul>
305 <li class="active">changeset</li>
305 <li class="active">changeset</li>
306 <li><a href="/raw-rev/2ef0ac749a14">raw</a></li>
306 <li><a href="/raw-rev/2ef0ac749a14">raw</a></li>
307 <li><a href="/file/2ef0ac749a14">browse</a></li>
307 <li><a href="/file/2ef0ac749a14">browse</a></li>
308 </ul>
308 </ul>
309 <ul>
309 <ul>
310
310
311 </ul>
311 </ul>
312 <ul>
312 <ul>
313 <li><a href="/help">help</a></li>
313 <li><a href="/help">help</a></li>
314 </ul>
314 </ul>
315 </div>
315 </div>
316
316
317 <div class="main">
317 <div class="main">
318
318
319 <h2><a href="/">test</a></h2>
319 <h2><a href="/">test</a></h2>
320 <h3>changeset 0:2ef0ac749a14 <span class="tag">1.0</span> </h3>
320 <h3>changeset 0:2ef0ac749a14 <span class="tag">1.0</span> </h3>
321
321
322 <form class="search" action="/log">
322 <form class="search" action="/log">
323
323
324 <p><input name="rev" id="search1" type="text" size="30" /></p>
324 <p><input name="rev" id="search1" type="text" size="30" /></p>
325 <div id="hint">find changesets by author, revision,
325 <div id="hint">find changesets by author, revision,
326 files, or words in the commit message</div>
326 files, or words in the commit message</div>
327 </form>
327 </form>
328
328
329 <div class="description">base</div>
329 <div class="description">base</div>
330
330
331 <table id="changesetEntry">
331 <table id="changesetEntry">
332 <tr>
332 <tr>
333 <th class="author">author</th>
333 <th class="author">author</th>
334 <td class="author">&#116;&#101;&#115;&#116;</td>
334 <td class="author">&#116;&#101;&#115;&#116;</td>
335 </tr>
335 </tr>
336 <tr>
336 <tr>
337 <th class="date">date</th>
337 <th class="date">date</th>
338 <td class="date">Thu Jan 01 00:00:00 1970 +0000 (1970-01-01)</td></tr>
338 <td class="date">Thu Jan 01 00:00:00 1970 +0000 (1970-01-01)</td></tr>
339 <tr>
339 <tr>
340 <th class="author">parents</th>
340 <th class="author">parents</th>
341 <td class="author"></td>
341 <td class="author"></td>
342 </tr>
342 </tr>
343 <tr>
343 <tr>
344 <th class="author">children</th>
344 <th class="author">children</th>
345 <td class="author"> <a href="/rev/a4f92ed23982">a4f92ed23982</a></td>
345 <td class="author"> <a href="/rev/a4f92ed23982">a4f92ed23982</a></td>
346 </tr>
346 </tr>
347 <tr>
347 <tr>
348 <th class="files">files</th>
348 <th class="files">files</th>
349 <td class="files"><a href="/file/2ef0ac749a14/da/foo">da/foo</a> <a href="/file/2ef0ac749a14/foo">foo</a> </td>
349 <td class="files"><a href="/file/2ef0ac749a14/da/foo">da/foo</a> <a href="/file/2ef0ac749a14/foo">foo</a> </td>
350 </tr>
350 </tr>
351 </table>
351 </table>
352
352
353 <div class="overflow">
353 <div class="overflow">
354 <div class="sourcefirst"> line diff</div>
354 <div class="sourcefirst"> line diff</div>
355
355
356 <div class="source bottomline parity0"><pre><a href="#l1.1" id="l1.1"> 1.1</a> <span class="minusline">--- /dev/null Thu Jan 01 00:00:00 1970 +0000
356 <div class="source bottomline parity0"><pre><a href="#l1.1" id="l1.1"> 1.1</a> <span class="minusline">--- /dev/null Thu Jan 01 00:00:00 1970 +0000
357 </span><a href="#l1.2" id="l1.2"> 1.2</a> <span class="plusline">+++ b/da/foo Thu Jan 01 00:00:00 1970 +0000
357 </span><a href="#l1.2" id="l1.2"> 1.2</a> <span class="plusline">+++ b/da/foo Thu Jan 01 00:00:00 1970 +0000
358 </span><a href="#l1.3" id="l1.3"> 1.3</a> <span class="atline">@@ -0,0 +1,1 @@
358 </span><a href="#l1.3" id="l1.3"> 1.3</a> <span class="atline">@@ -0,0 +1,1 @@
359 </span><a href="#l1.4" id="l1.4"> 1.4</a> <span class="plusline">+foo
359 </span><a href="#l1.4" id="l1.4"> 1.4</a> <span class="plusline">+foo
360 </span></pre></div><div class="source bottomline parity1"><pre><a href="#l2.1" id="l2.1"> 2.1</a> <span class="minusline">--- /dev/null Thu Jan 01 00:00:00 1970 +0000
360 </span></pre></div><div class="source bottomline parity1"><pre><a href="#l2.1" id="l2.1"> 2.1</a> <span class="minusline">--- /dev/null Thu Jan 01 00:00:00 1970 +0000
361 </span><a href="#l2.2" id="l2.2"> 2.2</a> <span class="plusline">+++ b/foo Thu Jan 01 00:00:00 1970 +0000
361 </span><a href="#l2.2" id="l2.2"> 2.2</a> <span class="plusline">+++ b/foo Thu Jan 01 00:00:00 1970 +0000
362 </span><a href="#l2.3" id="l2.3"> 2.3</a> <span class="atline">@@ -0,0 +1,1 @@
362 </span><a href="#l2.3" id="l2.3"> 2.3</a> <span class="atline">@@ -0,0 +1,1 @@
363 </span><a href="#l2.4" id="l2.4"> 2.4</a> <span class="plusline">+foo
363 </span><a href="#l2.4" id="l2.4"> 2.4</a> <span class="plusline">+foo
364 </span></pre></div>
364 </span></pre></div>
365 </div>
365 </div>
366
366
367 </div>
367 </div>
368 </div>
368 </div>
369
369
370
370
371 </body>
371 </body>
372 </html>
372 </html>
373
373
374 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/rev/1/?style=raw'
374 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/rev/1/?style=raw'
375 200 Script output follows
375 200 Script output follows
376
376
377
377
378 # HG changeset patch
378 # HG changeset patch
379 # User test
379 # User test
380 # Date 0 0
380 # Date 0 0
381 # Node ID a4f92ed23982be056b9852de5dfe873eaac7f0de
381 # Node ID a4f92ed23982be056b9852de5dfe873eaac7f0de
382 # Parent 2ef0ac749a14e4f57a5a822464a0902c6f7f448f
382 # Parent 2ef0ac749a14e4f57a5a822464a0902c6f7f448f
383 Added tag 1.0 for changeset 2ef0ac749a14
383 Added tag 1.0 for changeset 2ef0ac749a14
384
384
385 diff -r 2ef0ac749a14 -r a4f92ed23982 .hgtags
385 diff -r 2ef0ac749a14 -r a4f92ed23982 .hgtags
386 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
386 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
387 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
387 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
388 @@ -0,0 +1,1 @@
388 @@ -0,0 +1,1 @@
389 +2ef0ac749a14e4f57a5a822464a0902c6f7f448f 1.0
389 +2ef0ac749a14e4f57a5a822464a0902c6f7f448f 1.0
390
390
391 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log?rev=base'
391 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log?rev=base'
392 200 Script output follows
392 200 Script output follows
393
393
394 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
394 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
395 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US">
395 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US">
396 <head>
396 <head>
397 <link rel="icon" href="/static/hgicon.png" type="image/png" />
397 <link rel="icon" href="/static/hgicon.png" type="image/png" />
398 <meta name="robots" content="index, nofollow" />
398 <meta name="robots" content="index, nofollow" />
399 <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
399 <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
400
400
401 <title>test: searching for base</title>
401 <title>test: searching for base</title>
402 </head>
402 </head>
403 <body>
403 <body>
404
404
405 <div class="container">
405 <div class="container">
406 <div class="menu">
406 <div class="menu">
407 <div class="logo">
407 <div class="logo">
408 <a href="http://mercurial.selenic.com/">
408 <a href="http://mercurial.selenic.com/">
409 <img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
409 <img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
410 </div>
410 </div>
411 <ul>
411 <ul>
412 <li><a href="/shortlog">log</a></li>
412 <li><a href="/shortlog">log</a></li>
413 <li><a href="/graph">graph</a></li>
413 <li><a href="/graph">graph</a></li>
414 <li><a href="/tags">tags</a></li>
414 <li><a href="/tags">tags</a></li>
415 <li><a href="/bookmarks">bookmarks</a></li>
415 <li><a href="/bookmarks">bookmarks</a></li>
416 <li><a href="/branches">branches</a></li>
416 <li><a href="/branches">branches</a></li>
417 <li><a href="/help">help</a></li>
417 <li><a href="/help">help</a></li>
418 </ul>
418 </ul>
419 </div>
419 </div>
420
420
421 <div class="main">
421 <div class="main">
422 <h2><a href="/">test</a></h2>
422 <h2><a href="/">test</a></h2>
423 <h3>searching for 'base'</h3>
423 <h3>searching for 'base'</h3>
424
424
425 <form class="search" action="/log">
425 <form class="search" action="/log">
426
426
427 <p><input name="rev" id="search1" type="text" size="30"></p>
427 <p><input name="rev" id="search1" type="text" size="30"></p>
428 <div id="hint">find changesets by author, revision,
428 <div id="hint">find changesets by author, revision,
429 files, or words in the commit message</div>
429 files, or words in the commit message</div>
430 </form>
430 </form>
431
431
432 <div class="navigate">
432 <div class="navigate">
433 <a href="/search/?rev=base&revcount=5">less</a>
433 <a href="/search/?rev=base&revcount=5">less</a>
434 <a href="/search/?rev=base&revcount=20">more</a>
434 <a href="/search/?rev=base&revcount=20">more</a>
435 </div>
435 </div>
436
436
437 <table class="bigtable">
437 <table class="bigtable">
438 <tr>
438 <tr>
439 <th class="age">age</th>
439 <th class="age">age</th>
440 <th class="author">author</th>
440 <th class="author">author</th>
441 <th class="description">description</th>
441 <th class="description">description</th>
442 </tr>
442 </tr>
443 <tr class="parity0">
443 <tr class="parity0">
444 <td class="age">1970-01-01</td>
444 <td class="age">1970-01-01</td>
445 <td class="author">test</td>
445 <td class="author">test</td>
446 <td class="description"><a href="/rev/2ef0ac749a14">base</a><span class="tag">1.0</span> </td>
446 <td class="description"><a href="/rev/2ef0ac749a14">base</a><span class="tag">1.0</span> </td>
447 </tr>
447 </tr>
448
448
449 </table>
449 </table>
450
450
451 <div class="navigate">
451 <div class="navigate">
452 <a href="/search/?rev=base&revcount=5">less</a>
452 <a href="/search/?rev=base&revcount=5">less</a>
453 <a href="/search/?rev=base&revcount=20">more</a>
453 <a href="/search/?rev=base&revcount=20">more</a>
454 </div>
454 </div>
455
455
456 </div>
456 </div>
457 </div>
457 </div>
458
458
459
459
460
460
461 </body>
461 </body>
462 </html>
462 </html>
463
463
464
464
465 File-related
465 File-related
466
466
467 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/foo/?style=raw'
467 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/foo/?style=raw'
468 200 Script output follows
468 200 Script output follows
469
469
470 foo
470 foo
471 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/annotate/1/foo/?style=raw'
471 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/annotate/1/foo/?style=raw'
472 200 Script output follows
472 200 Script output follows
473
473
474
474
475 test@0: foo
475 test@0: foo
476
476
477
477
478
478
479
479
480 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/?style=raw'
480 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/?style=raw'
481 200 Script output follows
481 200 Script output follows
482
482
483
483
484 drwxr-xr-x da
484 drwxr-xr-x da
485 -rw-r--r-- 45 .hgtags
485 -rw-r--r-- 45 .hgtags
486 -rw-r--r-- 4 foo
486 -rw-r--r-- 4 foo
487
487
488
488
489 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/foo'
489 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/foo'
490 200 Script output follows
490 200 Script output follows
491
491
492 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
492 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
493 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US">
493 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US">
494 <head>
494 <head>
495 <link rel="icon" href="/static/hgicon.png" type="image/png" />
495 <link rel="icon" href="/static/hgicon.png" type="image/png" />
496 <meta name="robots" content="index, nofollow" />
496 <meta name="robots" content="index, nofollow" />
497 <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
497 <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
498
498
499 <title>test: a4f92ed23982 foo</title>
499 <title>test: a4f92ed23982 foo</title>
500 </head>
500 </head>
501 <body>
501 <body>
502
502
503 <div class="container">
503 <div class="container">
504 <div class="menu">
504 <div class="menu">
505 <div class="logo">
505 <div class="logo">
506 <a href="http://mercurial.selenic.com/">
506 <a href="http://mercurial.selenic.com/">
507 <img src="/static/hglogo.png" alt="mercurial" /></a>
507 <img src="/static/hglogo.png" alt="mercurial" /></a>
508 </div>
508 </div>
509 <ul>
509 <ul>
510 <li><a href="/shortlog/a4f92ed23982">log</a></li>
510 <li><a href="/shortlog/a4f92ed23982">log</a></li>
511 <li><a href="/graph/a4f92ed23982">graph</a></li>
511 <li><a href="/graph/a4f92ed23982">graph</a></li>
512 <li><a href="/tags">tags</a></li>
512 <li><a href="/tags">tags</a></li>
513 <li><a href="/branches">branches</a></li>
513 <li><a href="/branches">branches</a></li>
514 </ul>
514 </ul>
515 <ul>
515 <ul>
516 <li><a href="/rev/a4f92ed23982">changeset</a></li>
516 <li><a href="/rev/a4f92ed23982">changeset</a></li>
517 <li><a href="/file/a4f92ed23982/">browse</a></li>
517 <li><a href="/file/a4f92ed23982/">browse</a></li>
518 </ul>
518 </ul>
519 <ul>
519 <ul>
520 <li class="active">file</li>
520 <li class="active">file</li>
521 <li><a href="/file/tip/foo">latest</a></li>
521 <li><a href="/file/tip/foo">latest</a></li>
522 <li><a href="/diff/a4f92ed23982/foo">diff</a></li>
522 <li><a href="/diff/a4f92ed23982/foo">diff</a></li>
523 <li><a href="/annotate/a4f92ed23982/foo">annotate</a></li>
523 <li><a href="/annotate/a4f92ed23982/foo">annotate</a></li>
524 <li><a href="/log/a4f92ed23982/foo">file log</a></li>
524 <li><a href="/log/a4f92ed23982/foo">file log</a></li>
525 <li><a href="/raw-file/a4f92ed23982/foo">raw</a></li>
525 <li><a href="/raw-file/a4f92ed23982/foo">raw</a></li>
526 </ul>
526 </ul>
527 <ul>
527 <ul>
528 <li><a href="/help">help</a></li>
528 <li><a href="/help">help</a></li>
529 </ul>
529 </ul>
530 </div>
530 </div>
531
531
532 <div class="main">
532 <div class="main">
533 <h2><a href="/">test</a></h2>
533 <h2><a href="/">test</a></h2>
534 <h3>view foo @ 1:a4f92ed23982</h3>
534 <h3>view foo @ 1:a4f92ed23982</h3>
535
535
536 <form class="search" action="/log">
536 <form class="search" action="/log">
537
537
538 <p><input name="rev" id="search1" type="text" size="30" /></p>
538 <p><input name="rev" id="search1" type="text" size="30" /></p>
539 <div id="hint">find changesets by author, revision,
539 <div id="hint">find changesets by author, revision,
540 files, or words in the commit message</div>
540 files, or words in the commit message</div>
541 </form>
541 </form>
542
542
543 <div class="description">Added tag 1.0 for changeset 2ef0ac749a14</div>
543 <div class="description">Added tag 1.0 for changeset 2ef0ac749a14</div>
544
544
545 <table id="changesetEntry">
545 <table id="changesetEntry">
546 <tr>
546 <tr>
547 <th class="author">author</th>
547 <th class="author">author</th>
548 <td class="author">&#116;&#101;&#115;&#116;</td>
548 <td class="author">&#116;&#101;&#115;&#116;</td>
549 </tr>
549 </tr>
550 <tr>
550 <tr>
551 <th class="date">date</th>
551 <th class="date">date</th>
552 <td class="date">Thu Jan 01 00:00:00 1970 +0000 (1970-01-01)</td>
552 <td class="date">Thu Jan 01 00:00:00 1970 +0000 (1970-01-01)</td>
553 </tr>
553 </tr>
554 <tr>
554 <tr>
555 <th class="author">parents</th>
555 <th class="author">parents</th>
556 <td class="author"></td>
556 <td class="author"></td>
557 </tr>
557 </tr>
558 <tr>
558 <tr>
559 <th class="author">children</th>
559 <th class="author">children</th>
560 <td class="author"><a href="/file/1d22e65f027e/foo">1d22e65f027e</a> </td>
560 <td class="author"><a href="/file/1d22e65f027e/foo">1d22e65f027e</a> </td>
561 </tr>
561 </tr>
562
562
563 </table>
563 </table>
564
564
565 <div class="overflow">
565 <div class="overflow">
566 <div class="sourcefirst"> line source</div>
566 <div class="sourcefirst"> line source</div>
567
567
568 <div class="parity0 source"><a href="#l1" id="l1"> 1</a> foo
568 <div class="parity0 source"><a href="#l1" id="l1"> 1</a> foo
569 </div>
569 </div>
570 <div class="sourcelast"></div>
570 <div class="sourcelast"></div>
571 </div>
571 </div>
572 </div>
572 </div>
573 </div>
573 </div>
574
574
575
575
576
576
577 </body>
577 </body>
578 </html>
578 </html>
579
579
580 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/filediff/1/foo/?style=raw'
580 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/filediff/1/foo/?style=raw'
581 200 Script output follows
581 200 Script output follows
582
582
583
583
584 diff -r 000000000000 -r a4f92ed23982 foo
584 diff -r 000000000000 -r a4f92ed23982 foo
585 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
585 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
586 +++ b/foo Thu Jan 01 00:00:00 1970 +0000
586 +++ b/foo Thu Jan 01 00:00:00 1970 +0000
587 @@ -0,0 +1,1 @@
587 @@ -0,0 +1,1 @@
588 +foo
588 +foo
589
589
590
590
591
591
592
592
593
593
594 Overviews
594 Overviews
595
595
596 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/raw-tags'
596 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/raw-tags'
597 200 Script output follows
597 200 Script output follows
598
598
599 tip 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe
599 tip 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe
600 1.0 2ef0ac749a14e4f57a5a822464a0902c6f7f448f
600 1.0 2ef0ac749a14e4f57a5a822464a0902c6f7f448f
601 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/raw-branches'
601 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/raw-branches'
602 200 Script output follows
602 200 Script output follows
603
603
604 stable 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe open
604 stable 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe open
605 default a4f92ed23982be056b9852de5dfe873eaac7f0de inactive
605 default a4f92ed23982be056b9852de5dfe873eaac7f0de inactive
606 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/summary/?style=gitweb'
606 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/summary/?style=gitweb'
607 200 Script output follows
607 200 Script output follows
608
608
609 <?xml version="1.0" encoding="ascii"?>
609 <?xml version="1.0" encoding="ascii"?>
610 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
610 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
611 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US">
611 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US">
612 <head>
612 <head>
613 <link rel="icon" href="/static/hgicon.png" type="image/png" />
613 <link rel="icon" href="/static/hgicon.png" type="image/png" />
614 <meta name="robots" content="index, nofollow"/>
614 <meta name="robots" content="index, nofollow"/>
615 <link rel="stylesheet" href="/static/style-gitweb.css" type="text/css" />
615 <link rel="stylesheet" href="/static/style-gitweb.css" type="text/css" />
616
616
617
617
618 <title>test: Summary</title>
618 <title>test: Summary</title>
619 <link rel="alternate" type="application/atom+xml"
619 <link rel="alternate" type="application/atom+xml"
620 href="/atom-log" title="Atom feed for test"/>
620 href="/atom-log" title="Atom feed for test"/>
621 <link rel="alternate" type="application/rss+xml"
621 <link rel="alternate" type="application/rss+xml"
622 href="/rss-log" title="RSS feed for test"/>
622 href="/rss-log" title="RSS feed for test"/>
623 </head>
623 </head>
624 <body>
624 <body>
625
625
626 <div class="page_header">
626 <div class="page_header">
627 <a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="/summary?style=gitweb">test</a> / summary
627 <a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="/summary?style=gitweb">test</a> / summary
628
628
629 <form action="/log">
629 <form action="/log">
630 <input type="hidden" name="style" value="gitweb" />
630 <input type="hidden" name="style" value="gitweb" />
631 <div class="search">
631 <div class="search">
632 <input type="text" name="rev" />
632 <input type="text" name="rev" />
633 </div>
633 </div>
634 </form>
634 </form>
635 </div>
635 </div>
636
636
637 <div class="page_nav">
637 <div class="page_nav">
638 summary |
638 summary |
639 <a href="/shortlog?style=gitweb">shortlog</a> |
639 <a href="/shortlog?style=gitweb">shortlog</a> |
640 <a href="/log?style=gitweb">changelog</a> |
640 <a href="/log?style=gitweb">changelog</a> |
641 <a href="/graph?style=gitweb">graph</a> |
641 <a href="/graph?style=gitweb">graph</a> |
642 <a href="/tags?style=gitweb">tags</a> |
642 <a href="/tags?style=gitweb">tags</a> |
643 <a href="/branches?style=gitweb">branches</a> |
643 <a href="/branches?style=gitweb">branches</a> |
644 <a href="/file/1d22e65f027e?style=gitweb">files</a> |
644 <a href="/file/1d22e65f027e?style=gitweb">files</a> |
645 <a href="/help?style=gitweb">help</a>
645 <a href="/help?style=gitweb">help</a>
646 <br/>
646 <br/>
647 </div>
647 </div>
648
648
649 <div class="title">&nbsp;</div>
649 <div class="title">&nbsp;</div>
650 <table cellspacing="0">
650 <table cellspacing="0">
651 <tr><td>description</td><td>unknown</td></tr>
651 <tr><td>description</td><td>unknown</td></tr>
652 <tr><td>owner</td><td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td></tr>
652 <tr><td>owner</td><td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td></tr>
653 <tr><td>last change</td><td>Thu, 01 Jan 1970 00:00:00 +0000</td></tr>
653 <tr><td>last change</td><td>Thu, 01 Jan 1970 00:00:00 +0000</td></tr>
654 </table>
654 </table>
655
655
656 <div><a class="title" href="/shortlog?style=gitweb">changes</a></div>
656 <div><a class="title" href="/shortlog?style=gitweb">changes</a></div>
657 <table cellspacing="0">
657 <table cellspacing="0">
658
658
659 <tr class="parity0">
659 <tr class="parity0">
660 <td class="age"><i>1970-01-01</i></td>
660 <td class="age"><i>1970-01-01</i></td>
661 <td><i>test</i></td>
661 <td><i>test</i></td>
662 <td>
662 <td>
663 <a class="list" href="/rev/1d22e65f027e?style=gitweb">
663 <a class="list" href="/rev/1d22e65f027e?style=gitweb">
664 <b>branch</b>
664 <b>branch</b>
665 <span class="logtags"><span class="branchtag" title="stable">stable</span> <span class="tagtag" title="tip">tip</span> </span>
665 <span class="logtags"><span class="branchtag" title="stable">stable</span> <span class="tagtag" title="tip">tip</span> </span>
666 </a>
666 </a>
667 </td>
667 </td>
668 <td class="link" nowrap>
668 <td class="link" nowrap>
669 <a href="/rev/1d22e65f027e?style=gitweb">changeset</a> |
669 <a href="/rev/1d22e65f027e?style=gitweb">changeset</a> |
670 <a href="/file/1d22e65f027e?style=gitweb">files</a>
670 <a href="/file/1d22e65f027e?style=gitweb">files</a>
671 </td>
671 </td>
672 </tr>
672 </tr>
673 <tr class="parity1">
673 <tr class="parity1">
674 <td class="age"><i>1970-01-01</i></td>
674 <td class="age"><i>1970-01-01</i></td>
675 <td><i>test</i></td>
675 <td><i>test</i></td>
676 <td>
676 <td>
677 <a class="list" href="/rev/a4f92ed23982?style=gitweb">
677 <a class="list" href="/rev/a4f92ed23982?style=gitweb">
678 <b>Added tag 1.0 for changeset 2ef0ac749a14</b>
678 <b>Added tag 1.0 for changeset 2ef0ac749a14</b>
679 <span class="logtags"><span class="branchtag" title="default">default</span> </span>
679 <span class="logtags"><span class="branchtag" title="default">default</span> </span>
680 </a>
680 </a>
681 </td>
681 </td>
682 <td class="link" nowrap>
682 <td class="link" nowrap>
683 <a href="/rev/a4f92ed23982?style=gitweb">changeset</a> |
683 <a href="/rev/a4f92ed23982?style=gitweb">changeset</a> |
684 <a href="/file/a4f92ed23982?style=gitweb">files</a>
684 <a href="/file/a4f92ed23982?style=gitweb">files</a>
685 </td>
685 </td>
686 </tr>
686 </tr>
687 <tr class="parity0">
687 <tr class="parity0">
688 <td class="age"><i>1970-01-01</i></td>
688 <td class="age"><i>1970-01-01</i></td>
689 <td><i>test</i></td>
689 <td><i>test</i></td>
690 <td>
690 <td>
691 <a class="list" href="/rev/2ef0ac749a14?style=gitweb">
691 <a class="list" href="/rev/2ef0ac749a14?style=gitweb">
692 <b>base</b>
692 <b>base</b>
693 <span class="logtags"><span class="tagtag" title="1.0">1.0</span> </span>
693 <span class="logtags"><span class="tagtag" title="1.0">1.0</span> </span>
694 </a>
694 </a>
695 </td>
695 </td>
696 <td class="link" nowrap>
696 <td class="link" nowrap>
697 <a href="/rev/2ef0ac749a14?style=gitweb">changeset</a> |
697 <a href="/rev/2ef0ac749a14?style=gitweb">changeset</a> |
698 <a href="/file/2ef0ac749a14?style=gitweb">files</a>
698 <a href="/file/2ef0ac749a14?style=gitweb">files</a>
699 </td>
699 </td>
700 </tr>
700 </tr>
701 <tr class="light"><td colspan="4"><a class="list" href="/shortlog?style=gitweb">...</a></td></tr>
701 <tr class="light"><td colspan="4"><a class="list" href="/shortlog?style=gitweb">...</a></td></tr>
702 </table>
702 </table>
703
703
704 <div><a class="title" href="/tags?style=gitweb">tags</a></div>
704 <div><a class="title" href="/tags?style=gitweb">tags</a></div>
705 <table cellspacing="0">
705 <table cellspacing="0">
706
706
707 <tr class="parity0">
707 <tr class="parity0">
708 <td class="age"><i>1970-01-01</i></td>
708 <td class="age"><i>1970-01-01</i></td>
709 <td><a class="list" href="/rev/2ef0ac749a14?style=gitweb"><b>1.0</b></a></td>
709 <td><a class="list" href="/rev/2ef0ac749a14?style=gitweb"><b>1.0</b></a></td>
710 <td class="link">
710 <td class="link">
711 <a href="/rev/2ef0ac749a14?style=gitweb">changeset</a> |
711 <a href="/rev/2ef0ac749a14?style=gitweb">changeset</a> |
712 <a href="/log/2ef0ac749a14?style=gitweb">changelog</a> |
712 <a href="/log/2ef0ac749a14?style=gitweb">changelog</a> |
713 <a href="/file/2ef0ac749a14?style=gitweb">files</a>
713 <a href="/file/2ef0ac749a14?style=gitweb">files</a>
714 </td>
714 </td>
715 </tr>
715 </tr>
716 <tr class="light"><td colspan="3"><a class="list" href="/tags?style=gitweb">...</a></td></tr>
716 <tr class="light"><td colspan="3"><a class="list" href="/tags?style=gitweb">...</a></td></tr>
717 </table>
717 </table>
718
718
719 <div><a class="title" href="#">branches</a></div>
719 <div><a class="title" href="#">branches</a></div>
720 <table cellspacing="0">
720 <table cellspacing="0">
721
721
722 <tr class="parity0">
722 <tr class="parity0">
723 <td class="age"><i>1970-01-01</i></td>
723 <td class="age"><i>1970-01-01</i></td>
724 <td><a class="list" href="/shortlog/1d22e65f027e?style=gitweb"><b>1d22e65f027e</b></a></td>
724 <td><a class="list" href="/shortlog/1d22e65f027e?style=gitweb"><b>1d22e65f027e</b></a></td>
725 <td class="">stable</td>
725 <td class="">stable</td>
726 <td class="link">
726 <td class="link">
727 <a href="/changeset/1d22e65f027e?style=gitweb">changeset</a> |
727 <a href="/changeset/1d22e65f027e?style=gitweb">changeset</a> |
728 <a href="/log/1d22e65f027e?style=gitweb">changelog</a> |
728 <a href="/log/1d22e65f027e?style=gitweb">changelog</a> |
729 <a href="/file/1d22e65f027e?style=gitweb">files</a>
729 <a href="/file/1d22e65f027e?style=gitweb">files</a>
730 </td>
730 </td>
731 </tr>
731 </tr>
732 <tr class="parity1">
732 <tr class="parity1">
733 <td class="age"><i>1970-01-01</i></td>
733 <td class="age"><i>1970-01-01</i></td>
734 <td><a class="list" href="/shortlog/a4f92ed23982?style=gitweb"><b>a4f92ed23982</b></a></td>
734 <td><a class="list" href="/shortlog/a4f92ed23982?style=gitweb"><b>a4f92ed23982</b></a></td>
735 <td class="">default</td>
735 <td class="">default</td>
736 <td class="link">
736 <td class="link">
737 <a href="/changeset/a4f92ed23982?style=gitweb">changeset</a> |
737 <a href="/changeset/a4f92ed23982?style=gitweb">changeset</a> |
738 <a href="/log/a4f92ed23982?style=gitweb">changelog</a> |
738 <a href="/log/a4f92ed23982?style=gitweb">changelog</a> |
739 <a href="/file/a4f92ed23982?style=gitweb">files</a>
739 <a href="/file/a4f92ed23982?style=gitweb">files</a>
740 </td>
740 </td>
741 </tr>
741 </tr>
742 <tr class="light">
742 <tr class="light">
743 <td colspan="4"><a class="list" href="#">...</a></td>
743 <td colspan="4"><a class="list" href="#">...</a></td>
744 </tr>
744 </tr>
745 </table>
745 </table>
746 <div class="page_footer">
746 <div class="page_footer">
747 <div class="page_footer_text">test</div>
747 <div class="page_footer_text">test</div>
748 <div class="rss_logo">
748 <div class="rss_logo">
749 <a href="/rss-log">RSS</a>
749 <a href="/rss-log">RSS</a>
750 <a href="/atom-log">Atom</a>
750 <a href="/atom-log">Atom</a>
751 </div>
751 </div>
752 <br />
752 <br />
753
753
754 </div>
754 </div>
755 </body>
755 </body>
756 </html>
756 </html>
757
757
758 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/graph/?style=gitweb'
758 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/graph/?style=gitweb'
759 200 Script output follows
759 200 Script output follows
760
760
761 <?xml version="1.0" encoding="ascii"?>
761 <?xml version="1.0" encoding="ascii"?>
762 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
762 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
763 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US">
763 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US">
764 <head>
764 <head>
765 <link rel="icon" href="/static/hgicon.png" type="image/png" />
765 <link rel="icon" href="/static/hgicon.png" type="image/png" />
766 <meta name="robots" content="index, nofollow"/>
766 <meta name="robots" content="index, nofollow"/>
767 <link rel="stylesheet" href="/static/style-gitweb.css" type="text/css" />
767 <link rel="stylesheet" href="/static/style-gitweb.css" type="text/css" />
768
768
769
769
770 <title>test: Graph</title>
770 <title>test: Graph</title>
771 <link rel="alternate" type="application/atom+xml"
771 <link rel="alternate" type="application/atom+xml"
772 href="/atom-log" title="Atom feed for test"/>
772 href="/atom-log" title="Atom feed for test"/>
773 <link rel="alternate" type="application/rss+xml"
773 <link rel="alternate" type="application/rss+xml"
774 href="/rss-log" title="RSS feed for test"/>
774 href="/rss-log" title="RSS feed for test"/>
775 <!--[if IE]><script type="text/javascript" src="/static/excanvas.js"></script><![endif]-->
775 <!--[if IE]><script type="text/javascript" src="/static/excanvas.js"></script><![endif]-->
776 </head>
776 </head>
777 <body>
777 <body>
778
778
779 <div class="page_header">
779 <div class="page_header">
780 <a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="/summary?style=gitweb">test</a> / graph
780 <a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="/summary?style=gitweb">test</a> / graph
781 </div>
781 </div>
782
782
783 <form action="/log">
783 <form action="/log">
784 <input type="hidden" name="style" value="gitweb" />
784 <input type="hidden" name="style" value="gitweb" />
785 <div class="search">
785 <div class="search">
786 <input type="text" name="rev" />
786 <input type="text" name="rev" />
787 </div>
787 </div>
788 </form>
788 </form>
789 <div class="page_nav">
789 <div class="page_nav">
790 <a href="/summary?style=gitweb">summary</a> |
790 <a href="/summary?style=gitweb">summary</a> |
791 <a href="/shortlog?style=gitweb">shortlog</a> |
791 <a href="/shortlog?style=gitweb">shortlog</a> |
792 <a href="/log/2?style=gitweb">changelog</a> |
792 <a href="/log/2?style=gitweb">changelog</a> |
793 graph |
793 graph |
794 <a href="/tags?style=gitweb">tags</a> |
794 <a href="/tags?style=gitweb">tags</a> |
795 <a href="/branches?style=gitweb">branches</a> |
795 <a href="/branches?style=gitweb">branches</a> |
796 <a href="/file/1d22e65f027e?style=gitweb">files</a> |
796 <a href="/file/1d22e65f027e?style=gitweb">files</a> |
797 <a href="/help?style=gitweb">help</a>
797 <a href="/help?style=gitweb">help</a>
798 <br/>
798 <br/>
799 <a href="/graph/2?style=gitweb&revcount=30">less</a>
799 <a href="/graph/2?style=gitweb&revcount=30">less</a>
800 <a href="/graph/2?style=gitweb&revcount=120">more</a>
800 <a href="/graph/2?style=gitweb&revcount=120">more</a>
801 | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/2ef0ac749a14?style=gitweb">-2</a> <a href="/graph/tip?style=gitweb">tip</a> <br/>
801 | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/2ef0ac749a14?style=gitweb">-2</a> <a href="/graph/tip?style=gitweb">tip</a> <br/>
802 </div>
802 </div>
803
803
804 <div class="title">&nbsp;</div>
804 <div class="title">&nbsp;</div>
805
805
806 <noscript>The revision graph only works with JavaScript-enabled browsers.</noscript>
806 <noscript>The revision graph only works with JavaScript-enabled browsers.</noscript>
807
807
808 <div id="wrapper">
808 <div id="wrapper">
809 <ul id="nodebgs"></ul>
809 <ul id="nodebgs"></ul>
810 <canvas id="graph" width="480" height="129"></canvas>
810 <canvas id="graph" width="480" height="129"></canvas>
811 <ul id="graphnodes"></ul>
811 <ul id="graphnodes"></ul>
812 </div>
812 </div>
813
813
814 <script type="text/javascript" src="/static/graph.js"></script>
814 <script type="text/javascript" src="/static/graph.js"></script>
815 <script>
815 <script>
816 <!-- hide script content
816 <!-- hide script content
817
817
818 var data = [["1d22e65f027e", [0, 1], [[0, 0, 1]], "branch", "test", "1970-01-01", ["stable", true], ["tip"], ["something"]], ["a4f92ed23982", [0, 1], [[0, 0, 1]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], []]];
818 var data = [["1d22e65f027e", [0, 1], [[0, 0, 1]], "branch", "test", "1970-01-01", ["stable", true], ["tip"], ["something"]], ["a4f92ed23982", [0, 1], [[0, 0, 1]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], []]];
819 var graph = new Graph();
819 var graph = new Graph();
820 graph.scale(39);
820 graph.scale(39);
821
821
822 graph.edge = function(x0, y0, x1, y1, color) {
822 graph.edge = function(x0, y0, x1, y1, color) {
823
823
824 this.setColor(color, 0.0, 0.65);
824 this.setColor(color, 0.0, 0.65);
825 this.ctx.beginPath();
825 this.ctx.beginPath();
826 this.ctx.moveTo(x0, y0);
826 this.ctx.moveTo(x0, y0);
827 this.ctx.lineTo(x1, y1);
827 this.ctx.lineTo(x1, y1);
828 this.ctx.stroke();
828 this.ctx.stroke();
829
829
830 }
830 }
831
831
832 var revlink = '<li style="_STYLE"><span class="desc">';
832 var revlink = '<li style="_STYLE"><span class="desc">';
833 revlink += '<a class="list" href="/rev/_NODEID?style=gitweb" title="_NODEID"><b>_DESC</b></a>';
833 revlink += '<a class="list" href="/rev/_NODEID?style=gitweb" title="_NODEID"><b>_DESC</b></a>';
834 revlink += '</span> _TAGS';
834 revlink += '</span> _TAGS';
835 revlink += '<span class="info">_DATE, by _USER</span></li>';
835 revlink += '<span class="info">_DATE, by _USER</span></li>';
836
836
837 graph.vertex = function(x, y, color, parity, cur) {
837 graph.vertex = function(x, y, color, parity, cur) {
838
838
839 this.ctx.beginPath();
839 this.ctx.beginPath();
840 color = this.setColor(color, 0.25, 0.75);
840 color = this.setColor(color, 0.25, 0.75);
841 this.ctx.arc(x, y, radius, 0, Math.PI * 2, true);
841 this.ctx.arc(x, y, radius, 0, Math.PI * 2, true);
842 this.ctx.fill();
842 this.ctx.fill();
843
843
844 var bg = '<li class="bg parity' + parity + '"></li>';
844 var bg = '<li class="bg parity' + parity + '"></li>';
845 var left = (this.columns + 1) * this.bg_height;
845 var left = (this.columns + 1) * this.bg_height;
846 var nstyle = 'padding-left: ' + left + 'px;';
846 var nstyle = 'padding-left: ' + left + 'px;';
847 var item = revlink.replace(/_STYLE/, nstyle);
847 var item = revlink.replace(/_STYLE/, nstyle);
848 item = item.replace(/_PARITY/, 'parity' + parity);
848 item = item.replace(/_PARITY/, 'parity' + parity);
849 item = item.replace(/_NODEID/, cur[0]);
849 item = item.replace(/_NODEID/, cur[0]);
850 item = item.replace(/_NODEID/, cur[0]);
850 item = item.replace(/_NODEID/, cur[0]);
851 item = item.replace(/_DESC/, cur[3]);
851 item = item.replace(/_DESC/, cur[3]);
852 item = item.replace(/_USER/, cur[4]);
852 item = item.replace(/_USER/, cur[4]);
853 item = item.replace(/_DATE/, cur[5]);
853 item = item.replace(/_DATE/, cur[5]);
854
854
855 var tagspan = '';
855 var tagspan = '';
856 if (cur[7].length || (cur[6][0] != 'default' || cur[6][1])) {
856 if (cur[7].length || (cur[6][0] != 'default' || cur[6][1])) {
857 tagspan = '<span class="logtags">';
857 tagspan = '<span class="logtags">';
858 if (cur[6][1]) {
858 if (cur[6][1]) {
859 tagspan += '<span class="branchtag" title="' + cur[6][0] + '">';
859 tagspan += '<span class="branchtag" title="' + cur[6][0] + '">';
860 tagspan += cur[6][0] + '</span> ';
860 tagspan += cur[6][0] + '</span> ';
861 } else if (!cur[6][1] && cur[6][0] != 'default') {
861 } else if (!cur[6][1] && cur[6][0] != 'default') {
862 tagspan += '<span class="inbranchtag" title="' + cur[6][0] + '">';
862 tagspan += '<span class="inbranchtag" title="' + cur[6][0] + '">';
863 tagspan += cur[6][0] + '</span> ';
863 tagspan += cur[6][0] + '</span> ';
864 }
864 }
865 if (cur[7].length) {
865 if (cur[7].length) {
866 for (var t in cur[7]) {
866 for (var t in cur[7]) {
867 var tag = cur[7][t];
867 var tag = cur[7][t];
868 tagspan += '<span class="tagtag">' + tag + '</span> ';
868 tagspan += '<span class="tagtag">' + tag + '</span> ';
869 }
869 }
870 }
870 }
871 tagspan += '</span>';
871 tagspan += '</span>';
872 }
872 }
873
873
874 item = item.replace(/_TAGS/, tagspan);
874 item = item.replace(/_TAGS/, tagspan);
875 return [bg, item];
875 return [bg, item];
876
876
877 }
877 }
878
878
879 graph.render(data);
879 graph.render(data);
880
880
881 // stop hiding script -->
881 // stop hiding script -->
882 </script>
882 </script>
883
883
884 <div class="page_nav">
884 <div class="page_nav">
885 <a href="/graph/2?style=gitweb&revcount=30">less</a>
885 <a href="/graph/2?style=gitweb&revcount=30">less</a>
886 <a href="/graph/2?style=gitweb&revcount=120">more</a>
886 <a href="/graph/2?style=gitweb&revcount=120">more</a>
887 | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/2ef0ac749a14?style=gitweb">-2</a> <a href="/graph/tip?style=gitweb">tip</a>
887 | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/2ef0ac749a14?style=gitweb">-2</a> <a href="/graph/tip?style=gitweb">tip</a>
888 </div>
888 </div>
889
889
890 <div class="page_footer">
890 <div class="page_footer">
891 <div class="page_footer_text">test</div>
891 <div class="page_footer_text">test</div>
892 <div class="rss_logo">
892 <div class="rss_logo">
893 <a href="/rss-log">RSS</a>
893 <a href="/rss-log">RSS</a>
894 <a href="/atom-log">Atom</a>
894 <a href="/atom-log">Atom</a>
895 </div>
895 </div>
896 <br />
896 <br />
897
897
898 </div>
898 </div>
899 </body>
899 </body>
900 </html>
900 </html>
901
901
902
902
903 capabilities
903 capabilities
904
904
905 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=capabilities'; echo
905 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=capabilities'; echo
906 200 Script output follows
906 200 Script output follows
907
907
908 lookup changegroupsubset branchmap pushkey known unbundle=HG10GZ,HG10BZ,HG10UN
908 lookup changegroupsubset branchmap pushkey known getbundle unbundle=HG10GZ,HG10BZ,HG10UN
909
909
910 heads
910 heads
911
911
912 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=heads'
912 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=heads'
913 200 Script output follows
913 200 Script output follows
914
914
915 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe
915 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe
916
916
917 branches
917 branches
918
918
919 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=branches&nodes=0000000000000000000000000000000000000000'
919 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=branches&nodes=0000000000000000000000000000000000000000'
920 200 Script output follows
920 200 Script output follows
921
921
922 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
922 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
923
923
924 changegroup
924 changegroup
925
925
926 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=changegroup&roots=0000000000000000000000000000000000000000'
926 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=changegroup&roots=0000000000000000000000000000000000000000'
927 200 Script output follows
927 200 Script output follows
928
928
929 x\x9c\xbdTMHUA\x14\xbe\xa8\xf9\xec\xda&\x10\x11*\xb8\x88\x81\x99\xbef\xe6\xce\xbdw\xc6\xf2a\x16E\x1b\x11[%\x98\xcc\xaf\x8f\x8c\xf7\xc0\xf7\x82 (esc)
929 x\x9c\xbdTMHUA\x14\xbe\xa8\xf9\xec\xda&\x10\x11*\xb8\x88\x81\x99\xbef\xe6\xce\xbdw\xc6\xf2a\x16E\x1b\x11[%\x98\xcc\xaf\x8f\x8c\xf7\xc0\xf7\x82 (esc)
930 4\x11KP2m\x95\xad*\xabE\x05AP\xd0\xc22Z\x14\xf9\x03\xb9j\xa3\x9b$\xa4MJ\xb4\x90\xc0\x9a\x9bO0\x10\xdf\x13\xa2\x81\x0f\x869g\xe6|\xe7\x9c\xef\x8ceY\xf7\xa2KO\xd2\xb7K\x16~\\n\xe9\xad\x90w\x86\xab\x93W\x8e\xdf\xb0r\\Y\xee6(\xa2)\xf6\x95\xc6\x01\xe4\x1az\x80R\xe8kN\x98\xe7R\xa4\xa9K@\xe0!A\xb4k\xa7U*m\x03\x07\xd8\x92\x1d\xd2\xc9\xa4\x1d\xc2\xe6,\xa5\xcc+\x1f\xef\xafDgi\xef\xab\x1d\x1d\xb7\x9a\xe7[W\xfbc\x8f\xde-\xcd\xe7\xcaz\xb3\xbb\x19\xd3\x81\x10>c>\x08\x00"X\x11\xc2\x84@\xd2\xe7B*L\x00\x01P\x04R\xc3@\xbaB0\xdb8#\x83:\x83\xa2h\xbc=\xcd\xdaS\xe1Y,L\xd3\xa0\xf2\xa8\x94J:\xe6\xd8\x81Q\xe0\xe8d\xa7#\xe2,\xd1\xaeR*\xed \xa5\x01\x13\x01\xa6\x0cb\xe3;\xbe\xaf\xfcK[^wK\xe1N\xaf\xbbk\xe8B\xd1\xf4\xc1\x07\xb3\xab[\x10\xfdkmvwcB\xa6\xa4\xd4G\xc4D\xc2\x141\xad\x91\x10\x00\x08J\x81\xcb}\xee \xee+W\xba\x8a\x80\x90|\xd4\xa0\xd6\xa0\xd4T\xde\xe1\x9d,!\xe2\xb5\xa94\xe3\xe7\xd5\x9f\x06\x18\xcba\x03aP\xb8f\xcd\x04\x1a_\\9\xf1\xed\xe4\x9e\xe5\xa6\xd1\xd2\x9f\x03\xa7o\xae\x90H\xf3\xfb\xef\xffH3\xadk (esc)
930 4\x11KP2m\x95\xad*\xabE\x05AP\xd0\xc22Z\x14\xf9\x03\xb9j\xa3\x9b$\xa4MJ\xb4\x90\xc0\x9a\x9bO0\x10\xdf\x13\xa2\x81\x0f\x869g\xe6|\xe7\x9c\xef\x8ceY\xf7\xa2KO\xd2\xb7K\x16~\\n\xe9\xad\x90w\x86\xab\x93W\x8e\xdf\xb0r\\Y\xee6(\xa2)\xf6\x95\xc6\x01\xe4\x1az\x80R\xe8kN\x98\xe7R\xa4\xa9K@\xe0!A\xb4k\xa7U*m\x03\x07\xd8\x92\x1d\xd2\xc9\xa4\x1d\xc2\xe6,\xa5\xcc+\x1f\xef\xafDgi\xef\xab\x1d\x1d\xb7\x9a\xe7[W\xfbc\x8f\xde-\xcd\xe7\xcaz\xb3\xbb\x19\xd3\x81\x10>c>\x08\x00"X\x11\xc2\x84@\xd2\xe7B*L\x00\x01P\x04R\xc3@\xbaB0\xdb8#\x83:\x83\xa2h\xbc=\xcd\xdaS\xe1Y,L\xd3\xa0\xf2\xa8\x94J:\xe6\xd8\x81Q\xe0\xe8d\xa7#\xe2,\xd1\xaeR*\xed \xa5\x01\x13\x01\xa6\x0cb\xe3;\xbe\xaf\xfcK[^wK\xe1N\xaf\xbbk\xe8B\xd1\xf4\xc1\x07\xb3\xab[\x10\xfdkmvwcB\xa6\xa4\xd4G\xc4D\xc2\x141\xad\x91\x10\x00\x08J\x81\xcb}\xee \xee+W\xba\x8a\x80\x90|\xd4\xa0\xd6\xa0\xd4T\xde\xe1\x9d,!\xe2\xb5\xa94\xe3\xe7\xd5\x9f\x06\x18\xcba\x03aP\xb8f\xcd\x04\x1a_\\9\xf1\xed\xe4\x9e\xe5\xa6\xd1\xd2\x9f\x03\xa7o\xae\x90H\xf3\xfb\xef\xffH3\xadk (esc)
931 \xb0\x90\x92\x88\xb9\x14"\x068\xc2\x1e@\x00\xbb\x8a)\xd3'\x859 (esc)
931 \xb0\x90\x92\x88\xb9\x14"\x068\xc2\x1e@\x00\xbb\x8a)\xd3'\x859 (esc)
932 \xa8\x80\x84S \xa5\xbd-g\x13`\xe4\xdc\xc3H^\xdf\xe2\xc0TM\xc7\xf4BO\xcf\xde\xae\xe5\xae#\x1frM(K\x97`F\x19\x16s\x05GD\xb9\x01\xc1\x00+\x8c|\x9fp\xc11\xf0\x14\x00\x9cJ\x82<\xe0\x12\x9f\xc1\x90\xd0\xf5\xc8\x19>Pr\xaa\xeaW\xf5\xc4\xae\xd1\xfc\x17\xcf'\x13u\xb1\x9e\xcdHnC\x0e\xcc`\xc8\xa0&\xac\x0e\xf1|\x8c\x10$\xc4\x8c\xa2p\x05`\xdc\x08 \x80\xc4\xd7Rr-\x94\x10\x102\xedi;\xf3f\xf1z\x16\x86\xdb\xd8d\xe5\xe7\x8b\xf5\x8d\rzp\xb2\xfe\xac\xf5\xf2\xd3\xfe\xfckws\xedt\x96b\xd5l\x1c\x0b\x85\xb5\x170\x8f\x11\x84\xb0\x8f\x19\xa0\x00 _\x07\x1ac\xa2\xc3\x89Z\xe7\x96\xf9 \xccNFg\xc7F\xaa\x8a+\x9a\x9cc_\x17\x1b\x17\x9e]z38<\x97+\xb5,",\xc8\xc8?\\\x91\xff\x17.~U\x96\x97\xf5%\xdeN<\x8e\xf5\x97%\xe7^\xcfL\xed~\xda\x96k\xdc->\x86\x02\x83"\x96H\xa6\xe3\xaas=-\xeb7\xe5\xda\x8f\xbc (no-eol) (esc)
932 \xa8\x80\x84S \xa5\xbd-g\x13`\xe4\xdc\xc3H^\xdf\xe2\xc0TM\xc7\xf4BO\xcf\xde\xae\xe5\xae#\x1frM(K\x97`F\x19\x16s\x05GD\xb9\x01\xc1\x00+\x8c|\x9fp\xc11\xf0\x14\x00\x9cJ\x82<\xe0\x12\x9f\xc1\x90\xd0\xf5\xc8\x19>Pr\xaa\xeaW\xf5\xc4\xae\xd1\xfc\x17\xcf'\x13u\xb1\x9e\xcdHnC\x0e\xcc`\xc8\xa0&\xac\x0e\xf1|\x8c\x10$\xc4\x8c\xa2p\x05`\xdc\x08 \x80\xc4\xd7Rr-\x94\x10\x102\xedi;\xf3f\xf1z\x16\x86\xdb\xd8d\xe5\xe7\x8b\xf5\x8d\rzp\xb2\xfe\xac\xf5\xf2\xd3\xfe\xfckws\xedt\x96b\xd5l\x1c\x0b\x85\xb5\x170\x8f\x11\x84\xb0\x8f\x19\xa0\x00 _\x07\x1ac\xa2\xc3\x89Z\xe7\x96\xf9 \xccNFg\xc7F\xaa\x8a+\x9a\x9cc_\x17\x1b\x17\x9e]z38<\x97+\xb5,",\xc8\xc8?\\\x91\xff\x17.~U\x96\x97\xf5%\xdeN<\x8e\xf5\x97%\xe7^\xcfL\xed~\xda\x96k\xdc->\x86\x02\x83"\x96H\xa6\xe3\xaas=-\xeb7\xe5\xda\x8f\xbc (no-eol) (esc)
933
933
934 stream_out
934 stream_out
935
935
936 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=stream_out'
936 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=stream_out'
937 200 Script output follows
937 200 Script output follows
938
938
939 1
939 1
940
940
941 failing unbundle, requires POST request
941 failing unbundle, requires POST request
942
942
943 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=unbundle'
943 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=unbundle'
944 405 push requires POST request
944 405 push requires POST request
945
945
946 0
946 0
947 push requires POST request
947 push requires POST request
948 [1]
948 [1]
949
949
950 Static files
950 Static files
951
951
952 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/static/style.css'
952 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/static/style.css'
953 200 Script output follows
953 200 Script output follows
954
954
955 a { text-decoration:none; }
955 a { text-decoration:none; }
956 .age { white-space:nowrap; }
956 .age { white-space:nowrap; }
957 .date { white-space:nowrap; }
957 .date { white-space:nowrap; }
958 .indexlinks { white-space:nowrap; }
958 .indexlinks { white-space:nowrap; }
959 .parity0 { background-color: #ddd; }
959 .parity0 { background-color: #ddd; }
960 .parity1 { background-color: #eee; }
960 .parity1 { background-color: #eee; }
961 .lineno { width: 60px; color: #aaa; font-size: smaller;
961 .lineno { width: 60px; color: #aaa; font-size: smaller;
962 text-align: right; }
962 text-align: right; }
963 .plusline { color: green; }
963 .plusline { color: green; }
964 .minusline { color: red; }
964 .minusline { color: red; }
965 .atline { color: purple; }
965 .atline { color: purple; }
966 .annotate { font-size: smaller; text-align: right; padding-right: 1em; }
966 .annotate { font-size: smaller; text-align: right; padding-right: 1em; }
967 .buttons a {
967 .buttons a {
968 background-color: #666;
968 background-color: #666;
969 padding: 2pt;
969 padding: 2pt;
970 color: white;
970 color: white;
971 font-family: sans;
971 font-family: sans;
972 font-weight: bold;
972 font-weight: bold;
973 }
973 }
974 .navigate a {
974 .navigate a {
975 background-color: #ccc;
975 background-color: #ccc;
976 padding: 2pt;
976 padding: 2pt;
977 font-family: sans;
977 font-family: sans;
978 color: black;
978 color: black;
979 }
979 }
980
980
981 .metatag {
981 .metatag {
982 background-color: #888;
982 background-color: #888;
983 color: white;
983 color: white;
984 text-align: right;
984 text-align: right;
985 }
985 }
986
986
987 /* Common */
987 /* Common */
988 pre { margin: 0; }
988 pre { margin: 0; }
989
989
990 .logo {
990 .logo {
991 float: right;
991 float: right;
992 clear: right;
992 clear: right;
993 }
993 }
994
994
995 /* Changelog/Filelog entries */
995 /* Changelog/Filelog entries */
996 .logEntry { width: 100%; }
996 .logEntry { width: 100%; }
997 .logEntry .age { width: 15%; }
997 .logEntry .age { width: 15%; }
998 .logEntry th { font-weight: normal; text-align: right; vertical-align: top; }
998 .logEntry th { font-weight: normal; text-align: right; vertical-align: top; }
999 .logEntry th.age, .logEntry th.firstline { font-weight: bold; }
999 .logEntry th.age, .logEntry th.firstline { font-weight: bold; }
1000 .logEntry th.firstline { text-align: left; width: inherit; }
1000 .logEntry th.firstline { text-align: left; width: inherit; }
1001
1001
1002 /* Shortlog entries */
1002 /* Shortlog entries */
1003 .slogEntry { width: 100%; }
1003 .slogEntry { width: 100%; }
1004 .slogEntry .age { width: 8em; }
1004 .slogEntry .age { width: 8em; }
1005 .slogEntry td { font-weight: normal; text-align: left; vertical-align: top; }
1005 .slogEntry td { font-weight: normal; text-align: left; vertical-align: top; }
1006 .slogEntry td.author { width: 15em; }
1006 .slogEntry td.author { width: 15em; }
1007
1007
1008 /* Tag entries */
1008 /* Tag entries */
1009 #tagEntries { list-style: none; margin: 0; padding: 0; }
1009 #tagEntries { list-style: none; margin: 0; padding: 0; }
1010 #tagEntries .tagEntry { list-style: none; margin: 0; padding: 0; }
1010 #tagEntries .tagEntry { list-style: none; margin: 0; padding: 0; }
1011
1011
1012 /* Changeset entry */
1012 /* Changeset entry */
1013 #changesetEntry { }
1013 #changesetEntry { }
1014 #changesetEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; }
1014 #changesetEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; }
1015 #changesetEntry th.files, #changesetEntry th.description { vertical-align: top; }
1015 #changesetEntry th.files, #changesetEntry th.description { vertical-align: top; }
1016
1016
1017 /* File diff view */
1017 /* File diff view */
1018 #filediffEntry { }
1018 #filediffEntry { }
1019 #filediffEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; }
1019 #filediffEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; }
1020
1020
1021 /* Graph */
1021 /* Graph */
1022 div#wrapper {
1022 div#wrapper {
1023 position: relative;
1023 position: relative;
1024 margin: 0;
1024 margin: 0;
1025 padding: 0;
1025 padding: 0;
1026 }
1026 }
1027
1027
1028 canvas {
1028 canvas {
1029 position: absolute;
1029 position: absolute;
1030 z-index: 5;
1030 z-index: 5;
1031 top: -0.6em;
1031 top: -0.6em;
1032 margin: 0;
1032 margin: 0;
1033 }
1033 }
1034
1034
1035 ul#nodebgs {
1035 ul#nodebgs {
1036 list-style: none inside none;
1036 list-style: none inside none;
1037 padding: 0;
1037 padding: 0;
1038 margin: 0;
1038 margin: 0;
1039 top: -0.7em;
1039 top: -0.7em;
1040 }
1040 }
1041
1041
1042 ul#graphnodes li, ul#nodebgs li {
1042 ul#graphnodes li, ul#nodebgs li {
1043 height: 39px;
1043 height: 39px;
1044 }
1044 }
1045
1045
1046 ul#graphnodes {
1046 ul#graphnodes {
1047 position: absolute;
1047 position: absolute;
1048 z-index: 10;
1048 z-index: 10;
1049 top: -0.85em;
1049 top: -0.85em;
1050 list-style: none inside none;
1050 list-style: none inside none;
1051 padding: 0;
1051 padding: 0;
1052 }
1052 }
1053
1053
1054 ul#graphnodes li .info {
1054 ul#graphnodes li .info {
1055 display: block;
1055 display: block;
1056 font-size: 70%;
1056 font-size: 70%;
1057 position: relative;
1057 position: relative;
1058 top: -1px;
1058 top: -1px;
1059 }
1059 }
1060
1060
1061 Stop and restart with HGENCODING=cp932
1061 Stop and restart with HGENCODING=cp932
1062
1062
1063 $ "$TESTDIR/killdaemons.py"
1063 $ "$TESTDIR/killdaemons.py"
1064 $ HGENCODING=cp932 hg serve --config server.uncompressed=False -n test \
1064 $ HGENCODING=cp932 hg serve --config server.uncompressed=False -n test \
1065 > -p $HGPORT -d --pid-file=hg.pid -E errors.log
1065 > -p $HGPORT -d --pid-file=hg.pid -E errors.log
1066 $ cat hg.pid >> $DAEMON_PIDS
1066 $ cat hg.pid >> $DAEMON_PIDS
1067
1067
1068 commit message with Japanese Kanji 'Noh', which ends with '\x5c'
1068 commit message with Japanese Kanji 'Noh', which ends with '\x5c'
1069
1069
1070 $ echo foo >> foo
1070 $ echo foo >> foo
1071 $ HGENCODING=cp932 hg ci -m `python -c 'print("\x94\x5c")'`
1071 $ HGENCODING=cp932 hg ci -m `python -c 'print("\x94\x5c")'`
1072
1072
1073 Graph json escape of multibyte character
1073 Graph json escape of multibyte character
1074
1074
1075 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/graph/' \
1075 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/graph/' \
1076 > | grep '^var data ='
1076 > | grep '^var data ='
1077 var data = [["40b4d6888e92", [0, 1], [[0, 0, 1]], "\u80fd", "test", "1970-01-01", ["stable", true], ["tip"], ["something"]], ["1d22e65f027e", [0, 1], [[0, 0, 1]], "branch", "test", "1970-01-01", ["stable", false], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], []]];
1077 var data = [["40b4d6888e92", [0, 1], [[0, 0, 1]], "\u80fd", "test", "1970-01-01", ["stable", true], ["tip"], ["something"]], ["1d22e65f027e", [0, 1], [[0, 0, 1]], "branch", "test", "1970-01-01", ["stable", false], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], []]];
1078
1078
1079 ERRORS ENCOUNTERED
1079 ERRORS ENCOUNTERED
1080
1080
1081 $ cat errors.log
1081 $ cat errors.log
General Comments 0
You need to be logged in to leave comments. Login now