##// END OF EJS Templates
wireproto: add getbundle() function...
Peter Arrenbrecht -
r13741:b51bf961 default
parent child Browse files
Show More
@@ -0,0 +1,253 b''
1
2 = Test the getbundle() protocol function =
3
4 Enable graphlog extension:
5
6 $ echo "[extensions]" >> $HGRCPATH
7 $ echo "graphlog=" >> $HGRCPATH
8
9 Create a test repository:
10
11 $ hg init repo
12 $ cd repo
13 $ hg debugbuilddag -n -m '+2 :fork +5 :p1 *fork +6 :p2 /p1 :m1 +3' > /dev/null
14 $ hg glog --template '{node}\n'
15 @ 2bba2f40f321484159b395a43f20101d4bb7ead0
16 |
17 o d9e5488323c782fe684573f3043369d199038b6f
18 |
19 o 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
20 |
21 o 733bf0910832b26b768a09172f325f995b5476e1
22 |\
23 | o b5af5d6ea56d73ce24c40bc3cd19a862f74888ac
24 | |
25 | o 6b57ee934bb2996050540f84cdfc8dcad1e7267d
26 | |
27 | o 2c0ec49482e8abe888b7bd090b5827acfc22b3d7
28 | |
29 | o c1818a9f5977dd4139a48f93f5425c67d44a9368
30 | |
31 | o 6c725a58ad10aea441540bfd06c507f63e8b9cdd
32 | |
33 | o 18063366a155bd56b5618229ae2ac3e91849aa5e
34 | |
35 | o a21d913c992197a2eb60b298521ec0f045a04799
36 | |
37 o | b6b2b682253df2ffedc10e9415e4114202b303c5
38 | |
39 o | 2114148793524fd045998f71a45b0aaf139f752b
40 | |
41 o | 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
42 | |
43 o | ea919464b16e003894c48b6cb68df3cd9411b544
44 | |
45 o | 0f82d97ec2778746743fbc996740d409558fda22
46 |/
47 o 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4
48 |
49 o 10e64d654571f11577745b4d8372e859d9e4df63
50
51 $ cd ..
52
53
54 = Test locally =
55
56 Get everything:
57
58 $ hg debuggetbundle repo bundle
59 $ hg debugbundle bundle
60 10e64d654571f11577745b4d8372e859d9e4df63
61 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4
62 0f82d97ec2778746743fbc996740d409558fda22
63 ea919464b16e003894c48b6cb68df3cd9411b544
64 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
65 2114148793524fd045998f71a45b0aaf139f752b
66 b6b2b682253df2ffedc10e9415e4114202b303c5
67 a21d913c992197a2eb60b298521ec0f045a04799
68 18063366a155bd56b5618229ae2ac3e91849aa5e
69 6c725a58ad10aea441540bfd06c507f63e8b9cdd
70 c1818a9f5977dd4139a48f93f5425c67d44a9368
71 2c0ec49482e8abe888b7bd090b5827acfc22b3d7
72 6b57ee934bb2996050540f84cdfc8dcad1e7267d
73 b5af5d6ea56d73ce24c40bc3cd19a862f74888ac
74 733bf0910832b26b768a09172f325f995b5476e1
75 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
76 d9e5488323c782fe684573f3043369d199038b6f
77 2bba2f40f321484159b395a43f20101d4bb7ead0
78
79 Get part of linear run:
80
81 $ hg debuggetbundle repo bundle -H d9e5488323c782fe684573f3043369d199038b6f -C 733bf0910832b26b768a09172f325f995b5476e1
82 $ hg debugbundle bundle
83 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
84 d9e5488323c782fe684573f3043369d199038b6f
85
86 Get missing branch and merge:
87
88 $ hg debuggetbundle repo bundle -H d9e5488323c782fe684573f3043369d199038b6f -C 6b57ee934bb2996050540f84cdfc8dcad1e7267d
89 $ hg debugbundle bundle
90 0f82d97ec2778746743fbc996740d409558fda22
91 ea919464b16e003894c48b6cb68df3cd9411b544
92 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
93 2114148793524fd045998f71a45b0aaf139f752b
94 b6b2b682253df2ffedc10e9415e4114202b303c5
95 b5af5d6ea56d73ce24c40bc3cd19a862f74888ac
96 733bf0910832b26b768a09172f325f995b5476e1
97 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
98 d9e5488323c782fe684573f3043369d199038b6f
99
100 Get from only one head:
101
102 $ hg debuggetbundle repo bundle -H 6c725a58ad10aea441540bfd06c507f63e8b9cdd -C 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4
103 $ hg debugbundle bundle
104 a21d913c992197a2eb60b298521ec0f045a04799
105 18063366a155bd56b5618229ae2ac3e91849aa5e
106 6c725a58ad10aea441540bfd06c507f63e8b9cdd
107
108 Get parts of two branches:
109
110 $ hg debuggetbundle repo bundle -H 6b57ee934bb2996050540f84cdfc8dcad1e7267d -C c1818a9f5977dd4139a48f93f5425c67d44a9368 -H 2114148793524fd045998f71a45b0aaf139f752b -C ea919464b16e003894c48b6cb68df3cd9411b544
111 $ hg debugbundle bundle
112 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
113 2114148793524fd045998f71a45b0aaf139f752b
114 2c0ec49482e8abe888b7bd090b5827acfc22b3d7
115 6b57ee934bb2996050540f84cdfc8dcad1e7267d
116
117 Check that we get all needed file changes:
118
119 $ hg debugbundle bundle --all
120 format: id, p1, p2, cset, len(delta)
121
122 changelog
123 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc ea919464b16e003894c48b6cb68df3cd9411b544 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 99
124 2114148793524fd045998f71a45b0aaf139f752b 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 99
125 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 c1818a9f5977dd4139a48f93f5425c67d44a9368 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 102
126 6b57ee934bb2996050540f84cdfc8dcad1e7267d 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 102
127
128 manifest
129 dac7984588fc4eea7acbf39693a9c1b06f5b175d 591f732a3faf1fb903815273f3c199a514a61ccb 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 113
130 0772616e6b48a76afb6c1458e193cbb3dae2e4ff dac7984588fc4eea7acbf39693a9c1b06f5b175d 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 113
131 eb498cd9af6c44108e43041e951ce829e29f6c80 bff2f4817ced57b386caf7c4e3e36a4bc9af7e93 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 295
132 b15709c071ddd2d93188508ba156196ab4f19620 eb498cd9af6c44108e43041e951ce829e29f6c80 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 114
133
134 mf
135 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 301ca08d026bb72cb4258a9d211bdf7ca0bcd810 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 17
136 c7b583de053293870e145f45bd2d61643563fd06 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 18
137 266ee3c0302a5a18f1cf96817ac79a51836179e9 edc0f6b8db80d68ae6aff2b19f7e5347ab68fa63 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 149
138 698c6a36220548cd3903ca7dada27c59aa500c52 266ee3c0302a5a18f1cf96817ac79a51836179e9 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 19
139
140 nf11
141 33fbc651630ffa7ccbebfe4eb91320a873e7291c 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 16
142
143 nf12
144 ddce0544363f037e9fb889faca058f52dc01c0a5 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 16
145
146 nf4
147 3c1407305701051cbed9f9cb9a68bdfb5997c235 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 15
148
149 nf5
150 0dbd89c185f53a1727c54cd1ce256482fa23968e 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 15
151
152 Get branch and merge:
153
154 $ hg debuggetbundle repo bundle -C 10e64d654571f11577745b4d8372e859d9e4df63 -H 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
155 $ hg debugbundle bundle
156 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4
157 0f82d97ec2778746743fbc996740d409558fda22
158 ea919464b16e003894c48b6cb68df3cd9411b544
159 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
160 2114148793524fd045998f71a45b0aaf139f752b
161 b6b2b682253df2ffedc10e9415e4114202b303c5
162 a21d913c992197a2eb60b298521ec0f045a04799
163 18063366a155bd56b5618229ae2ac3e91849aa5e
164 6c725a58ad10aea441540bfd06c507f63e8b9cdd
165 c1818a9f5977dd4139a48f93f5425c67d44a9368
166 2c0ec49482e8abe888b7bd090b5827acfc22b3d7
167 6b57ee934bb2996050540f84cdfc8dcad1e7267d
168 b5af5d6ea56d73ce24c40bc3cd19a862f74888ac
169 733bf0910832b26b768a09172f325f995b5476e1
170 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
171
172
173 = Test via HTTP =
174
175 Get everything:
176
177 $ hg serve -R repo -p $HGPORT -d --pid-file=hg.pid -E error.log -A access.log
178 $ cat hg.pid >> $DAEMON_PIDS
179 $ hg debuggetbundle http://localhost:$HGPORT/ bundle
180 $ hg debugbundle bundle
181 10e64d654571f11577745b4d8372e859d9e4df63
182 6e23b016bc0f0e79c7bd9dd372ccee07055d7fd4
183 0f82d97ec2778746743fbc996740d409558fda22
184 ea919464b16e003894c48b6cb68df3cd9411b544
185 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
186 2114148793524fd045998f71a45b0aaf139f752b
187 b6b2b682253df2ffedc10e9415e4114202b303c5
188 a21d913c992197a2eb60b298521ec0f045a04799
189 18063366a155bd56b5618229ae2ac3e91849aa5e
190 6c725a58ad10aea441540bfd06c507f63e8b9cdd
191 c1818a9f5977dd4139a48f93f5425c67d44a9368
192 2c0ec49482e8abe888b7bd090b5827acfc22b3d7
193 6b57ee934bb2996050540f84cdfc8dcad1e7267d
194 b5af5d6ea56d73ce24c40bc3cd19a862f74888ac
195 733bf0910832b26b768a09172f325f995b5476e1
196 6e9a5adf5437e49c746288cf95c5ac34fa8f2f72
197 d9e5488323c782fe684573f3043369d199038b6f
198 2bba2f40f321484159b395a43f20101d4bb7ead0
199
200 Get parts of two branches:
201
202 $ hg debuggetbundle http://localhost:$HGPORT/ bundle -H 6b57ee934bb2996050540f84cdfc8dcad1e7267d -C c1818a9f5977dd4139a48f93f5425c67d44a9368 -H 2114148793524fd045998f71a45b0aaf139f752b -C ea919464b16e003894c48b6cb68df3cd9411b544
203 $ hg debugbundle bundle
204 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc
205 2114148793524fd045998f71a45b0aaf139f752b
206 2c0ec49482e8abe888b7bd090b5827acfc22b3d7
207 6b57ee934bb2996050540f84cdfc8dcad1e7267d
208
209 Check that we get all needed file changes:
210
211 $ hg debugbundle bundle --all
212 format: id, p1, p2, cset, len(delta)
213
214 changelog
215 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc ea919464b16e003894c48b6cb68df3cd9411b544 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 99
216 2114148793524fd045998f71a45b0aaf139f752b 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 99
217 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 c1818a9f5977dd4139a48f93f5425c67d44a9368 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 102
218 6b57ee934bb2996050540f84cdfc8dcad1e7267d 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 102
219
220 manifest
221 dac7984588fc4eea7acbf39693a9c1b06f5b175d 591f732a3faf1fb903815273f3c199a514a61ccb 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 113
222 0772616e6b48a76afb6c1458e193cbb3dae2e4ff dac7984588fc4eea7acbf39693a9c1b06f5b175d 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 113
223 eb498cd9af6c44108e43041e951ce829e29f6c80 bff2f4817ced57b386caf7c4e3e36a4bc9af7e93 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 295
224 b15709c071ddd2d93188508ba156196ab4f19620 eb498cd9af6c44108e43041e951ce829e29f6c80 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 114
225
226 mf
227 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 301ca08d026bb72cb4258a9d211bdf7ca0bcd810 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 17
228 c7b583de053293870e145f45bd2d61643563fd06 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 18
229 266ee3c0302a5a18f1cf96817ac79a51836179e9 edc0f6b8db80d68ae6aff2b19f7e5347ab68fa63 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 149
230 698c6a36220548cd3903ca7dada27c59aa500c52 266ee3c0302a5a18f1cf96817ac79a51836179e9 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 19
231
232 nf11
233 33fbc651630ffa7ccbebfe4eb91320a873e7291c 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 2c0ec49482e8abe888b7bd090b5827acfc22b3d7 16
234
235 nf12
236 ddce0544363f037e9fb889faca058f52dc01c0a5 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 6b57ee934bb2996050540f84cdfc8dcad1e7267d 16
237
238 nf4
239 3c1407305701051cbed9f9cb9a68bdfb5997c235 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 74a573f2ae100f1cedfad9aa7b96f8eaab1dabfc 15
240
241 nf5
242 0dbd89c185f53a1727c54cd1ce256482fa23968e 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 2114148793524fd045998f71a45b0aaf139f752b 15
243
244 Verify we hit the HTTP server:
245
246 $ cat access.log
247 * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
248 * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - (glob)
249 * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
250 * - - [*] "GET /?cmd=getbundle&common=c1818a9f5977dd4139a48f93f5425c67d44a9368+ea919464b16e003894c48b6cb68df3cd9411b544&heads=6b57ee934bb2996050540f84cdfc8dcad1e7267d+2114148793524fd045998f71a45b0aaf139f752b HTTP/1.1" 200 - (glob)
251
252 $ cat error.log
253
@@ -1,4874 +1,4904 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _, gettext
11 11 import os, re, sys, difflib, time, tempfile
12 12 import hg, util, revlog, extensions, copies, error, bookmarks
13 13 import patch, help, mdiff, url, encoding, templatekw, discovery
14 14 import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server
15 15 import merge as mergemod
16 16 import minirst, revset, templatefilters
17 17 import dagparser
18 18
19 19 # Commands start here, listed alphabetically
20 20
21 21 def add(ui, repo, *pats, **opts):
22 22 """add the specified files on the next commit
23 23
24 24 Schedule files to be version controlled and added to the
25 25 repository.
26 26
27 27 The files will be added to the repository at the next commit. To
28 28 undo an add before that, see :hg:`forget`.
29 29
30 30 If no names are given, add all files to the repository.
31 31
32 32 .. container:: verbose
33 33
34 34 An example showing how new (unknown) files are added
35 35 automatically by :hg:`add`::
36 36
37 37 $ ls
38 38 foo.c
39 39 $ hg status
40 40 ? foo.c
41 41 $ hg add
42 42 adding foo.c
43 43 $ hg status
44 44 A foo.c
45 45
46 46 Returns 0 if all files are successfully added.
47 47 """
48 48
49 49 m = cmdutil.match(repo, pats, opts)
50 50 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
51 51 opts.get('subrepos'), prefix="")
52 52 return rejected and 1 or 0
53 53
54 54 def addremove(ui, repo, *pats, **opts):
55 55 """add all new files, delete all missing files
56 56
57 57 Add all new files and remove all missing files from the
58 58 repository.
59 59
60 60 New files are ignored if they match any of the patterns in
61 61 ``.hgignore``. As with add, these changes take effect at the next
62 62 commit.
63 63
64 64 Use the -s/--similarity option to detect renamed files. With a
65 65 parameter greater than 0, this compares every removed file with
66 66 every added file and records those similar enough as renames. This
67 67 option takes a percentage between 0 (disabled) and 100 (files must
68 68 be identical) as its parameter. Detecting renamed files this way
69 69 can be expensive. After using this option, :hg:`status -C` can be
70 70 used to check which files were identified as moved or renamed.
71 71
72 72 Returns 0 if all files are successfully added.
73 73 """
74 74 try:
75 75 sim = float(opts.get('similarity') or 100)
76 76 except ValueError:
77 77 raise util.Abort(_('similarity must be a number'))
78 78 if sim < 0 or sim > 100:
79 79 raise util.Abort(_('similarity must be between 0 and 100'))
80 80 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
81 81
82 82 def annotate(ui, repo, *pats, **opts):
83 83 """show changeset information by line for each file
84 84
85 85 List changes in files, showing the revision id responsible for
86 86 each line
87 87
88 88 This command is useful for discovering when a change was made and
89 89 by whom.
90 90
91 91 Without the -a/--text option, annotate will avoid processing files
92 92 it detects as binary. With -a, annotate will annotate the file
93 93 anyway, although the results will probably be neither useful
94 94 nor desirable.
95 95
96 96 Returns 0 on success.
97 97 """
98 98 if opts.get('follow'):
99 99 # --follow is deprecated and now just an alias for -f/--file
100 100 # to mimic the behavior of Mercurial before version 1.5
101 101 opts['file'] = 1
102 102
103 103 datefunc = ui.quiet and util.shortdate or util.datestr
104 104 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
105 105
106 106 if not pats:
107 107 raise util.Abort(_('at least one filename or pattern is required'))
108 108
109 109 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
110 110 ('number', lambda x: str(x[0].rev())),
111 111 ('changeset', lambda x: short(x[0].node())),
112 112 ('date', getdate),
113 113 ('file', lambda x: x[0].path()),
114 114 ]
115 115
116 116 if (not opts.get('user') and not opts.get('changeset')
117 117 and not opts.get('date') and not opts.get('file')):
118 118 opts['number'] = 1
119 119
120 120 linenumber = opts.get('line_number') is not None
121 121 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
122 122 raise util.Abort(_('at least one of -n/-c is required for -l'))
123 123
124 124 funcmap = [func for op, func in opmap if opts.get(op)]
125 125 if linenumber:
126 126 lastfunc = funcmap[-1]
127 127 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
128 128
129 129 def bad(x, y):
130 130 raise util.Abort("%s: %s" % (x, y))
131 131
132 132 ctx = cmdutil.revsingle(repo, opts.get('rev'))
133 133 m = cmdutil.match(repo, pats, opts)
134 134 m.bad = bad
135 135 follow = not opts.get('no_follow')
136 136 for abs in ctx.walk(m):
137 137 fctx = ctx[abs]
138 138 if not opts.get('text') and util.binary(fctx.data()):
139 139 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
140 140 continue
141 141
142 142 lines = fctx.annotate(follow=follow, linenumber=linenumber)
143 143 pieces = []
144 144
145 145 for f in funcmap:
146 146 l = [f(n) for n, dummy in lines]
147 147 if l:
148 148 sized = [(x, encoding.colwidth(x)) for x in l]
149 149 ml = max([w for x, w in sized])
150 150 pieces.append(["%s%s" % (' ' * (ml - w), x) for x, w in sized])
151 151
152 152 if pieces:
153 153 for p, l in zip(zip(*pieces), lines):
154 154 ui.write("%s: %s" % (" ".join(p), l[1]))
155 155
156 156 def archive(ui, repo, dest, **opts):
157 157 '''create an unversioned archive of a repository revision
158 158
159 159 By default, the revision used is the parent of the working
160 160 directory; use -r/--rev to specify a different revision.
161 161
162 162 The archive type is automatically detected based on file
163 163 extension (or override using -t/--type).
164 164
165 165 Valid types are:
166 166
167 167 :``files``: a directory full of files (default)
168 168 :``tar``: tar archive, uncompressed
169 169 :``tbz2``: tar archive, compressed using bzip2
170 170 :``tgz``: tar archive, compressed using gzip
171 171 :``uzip``: zip archive, uncompressed
172 172 :``zip``: zip archive, compressed using deflate
173 173
174 174 The exact name of the destination archive or directory is given
175 175 using a format string; see :hg:`help export` for details.
176 176
177 177 Each member added to an archive file has a directory prefix
178 178 prepended. Use -p/--prefix to specify a format string for the
179 179 prefix. The default is the basename of the archive, with suffixes
180 180 removed.
181 181
182 182 Returns 0 on success.
183 183 '''
184 184
185 185 ctx = cmdutil.revsingle(repo, opts.get('rev'))
186 186 if not ctx:
187 187 raise util.Abort(_('no working directory: please specify a revision'))
188 188 node = ctx.node()
189 189 dest = cmdutil.make_filename(repo, dest, node)
190 190 if os.path.realpath(dest) == repo.root:
191 191 raise util.Abort(_('repository root cannot be destination'))
192 192
193 193 kind = opts.get('type') or archival.guesskind(dest) or 'files'
194 194 prefix = opts.get('prefix')
195 195
196 196 if dest == '-':
197 197 if kind == 'files':
198 198 raise util.Abort(_('cannot archive plain files to stdout'))
199 199 dest = sys.stdout
200 200 if not prefix:
201 201 prefix = os.path.basename(repo.root) + '-%h'
202 202
203 203 prefix = cmdutil.make_filename(repo, prefix, node)
204 204 matchfn = cmdutil.match(repo, [], opts)
205 205 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
206 206 matchfn, prefix, subrepos=opts.get('subrepos'))
207 207
208 208 def backout(ui, repo, node=None, rev=None, **opts):
209 209 '''reverse effect of earlier changeset
210 210
211 211 Prepare a new changeset with the effect of REV undone in the
212 212 current working directory.
213 213
214 214 If REV is the parent of the working directory, then this new changeset
215 215 is committed automatically. Otherwise, hg needs to merge the
216 216 changes and the merged result is left uncommitted.
217 217
218 218 By default, the pending changeset will have one parent,
219 219 maintaining a linear history. With --merge, the pending changeset
220 220 will instead have two parents: the old parent of the working
221 221 directory and a new child of REV that simply undoes REV.
222 222
223 223 Before version 1.7, the behavior without --merge was equivalent to
224 224 specifying --merge followed by :hg:`update --clean .` to cancel
225 225 the merge and leave the child of REV as a head to be merged
226 226 separately.
227 227
228 228 See :hg:`help dates` for a list of formats valid for -d/--date.
229 229
230 230 Returns 0 on success.
231 231 '''
232 232 if rev and node:
233 233 raise util.Abort(_("please specify just one revision"))
234 234
235 235 if not rev:
236 236 rev = node
237 237
238 238 if not rev:
239 239 raise util.Abort(_("please specify a revision to backout"))
240 240
241 241 date = opts.get('date')
242 242 if date:
243 243 opts['date'] = util.parsedate(date)
244 244
245 245 cmdutil.bail_if_changed(repo)
246 246 node = cmdutil.revsingle(repo, rev).node()
247 247
248 248 op1, op2 = repo.dirstate.parents()
249 249 a = repo.changelog.ancestor(op1, node)
250 250 if a != node:
251 251 raise util.Abort(_('cannot backout change on a different branch'))
252 252
253 253 p1, p2 = repo.changelog.parents(node)
254 254 if p1 == nullid:
255 255 raise util.Abort(_('cannot backout a change with no parents'))
256 256 if p2 != nullid:
257 257 if not opts.get('parent'):
258 258 raise util.Abort(_('cannot backout a merge changeset without '
259 259 '--parent'))
260 260 p = repo.lookup(opts['parent'])
261 261 if p not in (p1, p2):
262 262 raise util.Abort(_('%s is not a parent of %s') %
263 263 (short(p), short(node)))
264 264 parent = p
265 265 else:
266 266 if opts.get('parent'):
267 267 raise util.Abort(_('cannot use --parent on non-merge changeset'))
268 268 parent = p1
269 269
270 270 # the backout should appear on the same branch
271 271 branch = repo.dirstate.branch()
272 272 hg.clean(repo, node, show_stats=False)
273 273 repo.dirstate.setbranch(branch)
274 274 revert_opts = opts.copy()
275 275 revert_opts['date'] = None
276 276 revert_opts['all'] = True
277 277 revert_opts['rev'] = hex(parent)
278 278 revert_opts['no_backup'] = None
279 279 revert(ui, repo, **revert_opts)
280 280 if not opts.get('merge') and op1 != node:
281 281 try:
282 282 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
283 283 return hg.update(repo, op1)
284 284 finally:
285 285 ui.setconfig('ui', 'forcemerge', '')
286 286
287 287 commit_opts = opts.copy()
288 288 commit_opts['addremove'] = False
289 289 if not commit_opts['message'] and not commit_opts['logfile']:
290 290 # we don't translate commit messages
291 291 commit_opts['message'] = "Backed out changeset %s" % short(node)
292 292 commit_opts['force_editor'] = True
293 293 commit(ui, repo, **commit_opts)
294 294 def nice(node):
295 295 return '%d:%s' % (repo.changelog.rev(node), short(node))
296 296 ui.status(_('changeset %s backs out changeset %s\n') %
297 297 (nice(repo.changelog.tip()), nice(node)))
298 298 if opts.get('merge') and op1 != node:
299 299 hg.clean(repo, op1, show_stats=False)
300 300 ui.status(_('merging with changeset %s\n')
301 301 % nice(repo.changelog.tip()))
302 302 try:
303 303 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
304 304 return hg.merge(repo, hex(repo.changelog.tip()))
305 305 finally:
306 306 ui.setconfig('ui', 'forcemerge', '')
307 307 return 0
308 308
309 309 def bisect(ui, repo, rev=None, extra=None, command=None,
310 310 reset=None, good=None, bad=None, skip=None, extend=None,
311 311 noupdate=None):
312 312 """subdivision search of changesets
313 313
314 314 This command helps to find changesets which introduce problems. To
315 315 use, mark the earliest changeset you know exhibits the problem as
316 316 bad, then mark the latest changeset which is free from the problem
317 317 as good. Bisect will update your working directory to a revision
318 318 for testing (unless the -U/--noupdate option is specified). Once
319 319 you have performed tests, mark the working directory as good or
320 320 bad, and bisect will either update to another candidate changeset
321 321 or announce that it has found the bad revision.
322 322
323 323 As a shortcut, you can also use the revision argument to mark a
324 324 revision as good or bad without checking it out first.
325 325
326 326 If you supply a command, it will be used for automatic bisection.
327 327 Its exit status will be used to mark revisions as good or bad:
328 328 status 0 means good, 125 means to skip the revision, 127
329 329 (command not found) will abort the bisection, and any other
330 330 non-zero exit status means the revision is bad.
331 331
332 332 Returns 0 on success.
333 333 """
334 334 def extendbisectrange(nodes, good):
335 335 # bisect is incomplete when it ends on a merge node and
336 336 # one of the parent was not checked.
337 337 parents = repo[nodes[0]].parents()
338 338 if len(parents) > 1:
339 339 side = good and state['bad'] or state['good']
340 340 num = len(set(i.node() for i in parents) & set(side))
341 341 if num == 1:
342 342 return parents[0].ancestor(parents[1])
343 343 return None
344 344
345 345 def print_result(nodes, good):
346 346 displayer = cmdutil.show_changeset(ui, repo, {})
347 347 if len(nodes) == 1:
348 348 # narrowed it down to a single revision
349 349 if good:
350 350 ui.write(_("The first good revision is:\n"))
351 351 else:
352 352 ui.write(_("The first bad revision is:\n"))
353 353 displayer.show(repo[nodes[0]])
354 354 parents = repo[nodes[0]].parents()
355 355 extendnode = extendbisectrange(nodes, good)
356 356 if extendnode is not None:
357 357 ui.write(_('Not all ancestors of this changeset have been'
358 358 ' checked.\nUse bisect --extend to continue the '
359 359 'bisection from\nthe common ancestor, %s.\n')
360 360 % short(extendnode.node()))
361 361 else:
362 362 # multiple possible revisions
363 363 if good:
364 364 ui.write(_("Due to skipped revisions, the first "
365 365 "good revision could be any of:\n"))
366 366 else:
367 367 ui.write(_("Due to skipped revisions, the first "
368 368 "bad revision could be any of:\n"))
369 369 for n in nodes:
370 370 displayer.show(repo[n])
371 371 displayer.close()
372 372
373 373 def check_state(state, interactive=True):
374 374 if not state['good'] or not state['bad']:
375 375 if (good or bad or skip or reset) and interactive:
376 376 return
377 377 if not state['good']:
378 378 raise util.Abort(_('cannot bisect (no known good revisions)'))
379 379 else:
380 380 raise util.Abort(_('cannot bisect (no known bad revisions)'))
381 381 return True
382 382
383 383 # backward compatibility
384 384 if rev in "good bad reset init".split():
385 385 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
386 386 cmd, rev, extra = rev, extra, None
387 387 if cmd == "good":
388 388 good = True
389 389 elif cmd == "bad":
390 390 bad = True
391 391 else:
392 392 reset = True
393 393 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
394 394 raise util.Abort(_('incompatible arguments'))
395 395
396 396 if reset:
397 397 p = repo.join("bisect.state")
398 398 if os.path.exists(p):
399 399 os.unlink(p)
400 400 return
401 401
402 402 state = hbisect.load_state(repo)
403 403
404 404 if command:
405 405 changesets = 1
406 406 try:
407 407 while changesets:
408 408 # update state
409 409 status = util.system(command)
410 410 if status == 125:
411 411 transition = "skip"
412 412 elif status == 0:
413 413 transition = "good"
414 414 # status < 0 means process was killed
415 415 elif status == 127:
416 416 raise util.Abort(_("failed to execute %s") % command)
417 417 elif status < 0:
418 418 raise util.Abort(_("%s killed") % command)
419 419 else:
420 420 transition = "bad"
421 421 ctx = cmdutil.revsingle(repo, rev)
422 422 rev = None # clear for future iterations
423 423 state[transition].append(ctx.node())
424 424 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
425 425 check_state(state, interactive=False)
426 426 # bisect
427 427 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
428 428 # update to next check
429 429 cmdutil.bail_if_changed(repo)
430 430 hg.clean(repo, nodes[0], show_stats=False)
431 431 finally:
432 432 hbisect.save_state(repo, state)
433 433 print_result(nodes, good)
434 434 return
435 435
436 436 # update state
437 437
438 438 if rev:
439 439 nodes = [repo.lookup(i) for i in cmdutil.revrange(repo, [rev])]
440 440 else:
441 441 nodes = [repo.lookup('.')]
442 442
443 443 if good or bad or skip:
444 444 if good:
445 445 state['good'] += nodes
446 446 elif bad:
447 447 state['bad'] += nodes
448 448 elif skip:
449 449 state['skip'] += nodes
450 450 hbisect.save_state(repo, state)
451 451
452 452 if not check_state(state):
453 453 return
454 454
455 455 # actually bisect
456 456 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
457 457 if extend:
458 458 if not changesets:
459 459 extendnode = extendbisectrange(nodes, good)
460 460 if extendnode is not None:
461 461 ui.write(_("Extending search to changeset %d:%s\n"
462 462 % (extendnode.rev(), short(extendnode.node()))))
463 463 if noupdate:
464 464 return
465 465 cmdutil.bail_if_changed(repo)
466 466 return hg.clean(repo, extendnode.node())
467 467 raise util.Abort(_("nothing to extend"))
468 468
469 469 if changesets == 0:
470 470 print_result(nodes, good)
471 471 else:
472 472 assert len(nodes) == 1 # only a single node can be tested next
473 473 node = nodes[0]
474 474 # compute the approximate number of remaining tests
475 475 tests, size = 0, 2
476 476 while size <= changesets:
477 477 tests, size = tests + 1, size * 2
478 478 rev = repo.changelog.rev(node)
479 479 ui.write(_("Testing changeset %d:%s "
480 480 "(%d changesets remaining, ~%d tests)\n")
481 481 % (rev, short(node), changesets, tests))
482 482 if not noupdate:
483 483 cmdutil.bail_if_changed(repo)
484 484 return hg.clean(repo, node)
485 485
486 486 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None):
487 487 '''track a line of development with movable markers
488 488
489 489 Bookmarks are pointers to certain commits that move when
490 490 committing. Bookmarks are local. They can be renamed, copied and
491 491 deleted. It is possible to use bookmark names in :hg:`merge` and
492 492 :hg:`update` to merge and update respectively to a given bookmark.
493 493
494 494 You can use :hg:`bookmark NAME` to set a bookmark on the working
495 495 directory's parent revision with the given name. If you specify
496 496 a revision using -r REV (where REV may be an existing bookmark),
497 497 the bookmark is assigned to that revision.
498 498
499 499 Bookmarks can be pushed and pulled between repositories (see :hg:`help
500 500 push` and :hg:`help pull`). This requires both the local and remote
501 501 repositories to support bookmarks. For versions prior to 1.8, this means
502 502 the bookmarks extension must be enabled.
503 503 '''
504 504 hexfn = ui.debugflag and hex or short
505 505 marks = repo._bookmarks
506 506 cur = repo.changectx('.').node()
507 507
508 508 if rename:
509 509 if rename not in marks:
510 510 raise util.Abort(_("a bookmark of this name does not exist"))
511 511 if mark in marks and not force:
512 512 raise util.Abort(_("a bookmark of the same name already exists"))
513 513 if mark is None:
514 514 raise util.Abort(_("new bookmark name required"))
515 515 marks[mark] = marks[rename]
516 516 if repo._bookmarkcurrent == rename:
517 517 bookmarks.setcurrent(repo, mark)
518 518 del marks[rename]
519 519 bookmarks.write(repo)
520 520 return
521 521
522 522 if delete:
523 523 if mark is None:
524 524 raise util.Abort(_("bookmark name required"))
525 525 if mark not in marks:
526 526 raise util.Abort(_("a bookmark of this name does not exist"))
527 527 if mark == repo._bookmarkcurrent:
528 528 bookmarks.setcurrent(repo, None)
529 529 del marks[mark]
530 530 bookmarks.write(repo)
531 531 return
532 532
533 533 if mark is not None:
534 534 if "\n" in mark:
535 535 raise util.Abort(_("bookmark name cannot contain newlines"))
536 536 mark = mark.strip()
537 537 if not mark:
538 538 raise util.Abort(_("bookmark names cannot consist entirely of "
539 539 "whitespace"))
540 540 if mark in marks and not force:
541 541 raise util.Abort(_("a bookmark of the same name already exists"))
542 542 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
543 543 and not force):
544 544 raise util.Abort(
545 545 _("a bookmark cannot have the name of an existing branch"))
546 546 if rev:
547 547 marks[mark] = repo.lookup(rev)
548 548 else:
549 549 marks[mark] = repo.changectx('.').node()
550 550 if repo.changectx('.').node() == marks[mark]:
551 551 bookmarks.setcurrent(repo, mark)
552 552 bookmarks.write(repo)
553 553 return
554 554
555 555 if mark is None:
556 556 if rev:
557 557 raise util.Abort(_("bookmark name required"))
558 558 if len(marks) == 0:
559 559 ui.status(_("no bookmarks set\n"))
560 560 else:
561 561 for bmark, n in sorted(marks.iteritems()):
562 562 current = repo._bookmarkcurrent
563 563 if bmark == current and n == cur:
564 564 prefix, label = '*', 'bookmarks.current'
565 565 else:
566 566 prefix, label = ' ', ''
567 567
568 568 if ui.quiet:
569 569 ui.write("%s\n" % bmark, label=label)
570 570 else:
571 571 ui.write(" %s %-25s %d:%s\n" % (
572 572 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
573 573 label=label)
574 574 return
575 575
576 576 def branch(ui, repo, label=None, **opts):
577 577 """set or show the current branch name
578 578
579 579 With no argument, show the current branch name. With one argument,
580 580 set the working directory branch name (the branch will not exist
581 581 in the repository until the next commit). Standard practice
582 582 recommends that primary development take place on the 'default'
583 583 branch.
584 584
585 585 Unless -f/--force is specified, branch will not let you set a
586 586 branch name that already exists, even if it's inactive.
587 587
588 588 Use -C/--clean to reset the working directory branch to that of
589 589 the parent of the working directory, negating a previous branch
590 590 change.
591 591
592 592 Use the command :hg:`update` to switch to an existing branch. Use
593 593 :hg:`commit --close-branch` to mark this branch as closed.
594 594
595 595 Returns 0 on success.
596 596 """
597 597
598 598 if opts.get('clean'):
599 599 label = repo[None].parents()[0].branch()
600 600 repo.dirstate.setbranch(label)
601 601 ui.status(_('reset working directory to branch %s\n') % label)
602 602 elif label:
603 603 if not opts.get('force') and label in repo.branchtags():
604 604 if label not in [p.branch() for p in repo.parents()]:
605 605 raise util.Abort(_('a branch of the same name already exists'
606 606 " (use 'hg update' to switch to it)"))
607 607 repo.dirstate.setbranch(label)
608 608 ui.status(_('marked working directory as branch %s\n') % label)
609 609 else:
610 610 ui.write("%s\n" % repo.dirstate.branch())
611 611
612 612 def branches(ui, repo, active=False, closed=False):
613 613 """list repository named branches
614 614
615 615 List the repository's named branches, indicating which ones are
616 616 inactive. If -c/--closed is specified, also list branches which have
617 617 been marked closed (see :hg:`commit --close-branch`).
618 618
619 619 If -a/--active is specified, only show active branches. A branch
620 620 is considered active if it contains repository heads.
621 621
622 622 Use the command :hg:`update` to switch to an existing branch.
623 623
624 624 Returns 0.
625 625 """
626 626
627 627 hexfunc = ui.debugflag and hex or short
628 628 activebranches = [repo[n].branch() for n in repo.heads()]
629 629 def testactive(tag, node):
630 630 realhead = tag in activebranches
631 631 open = node in repo.branchheads(tag, closed=False)
632 632 return realhead and open
633 633 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
634 634 for tag, node in repo.branchtags().items()],
635 635 reverse=True)
636 636
637 637 for isactive, node, tag in branches:
638 638 if (not active) or isactive:
639 639 if ui.quiet:
640 640 ui.write("%s\n" % tag)
641 641 else:
642 642 hn = repo.lookup(node)
643 643 if isactive:
644 644 label = 'branches.active'
645 645 notice = ''
646 646 elif hn not in repo.branchheads(tag, closed=False):
647 647 if not closed:
648 648 continue
649 649 label = 'branches.closed'
650 650 notice = _(' (closed)')
651 651 else:
652 652 label = 'branches.inactive'
653 653 notice = _(' (inactive)')
654 654 if tag == repo.dirstate.branch():
655 655 label = 'branches.current'
656 656 rev = str(node).rjust(31 - encoding.colwidth(tag))
657 657 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
658 658 tag = ui.label(tag, label)
659 659 ui.write("%s %s%s\n" % (tag, rev, notice))
660 660
661 661 def bundle(ui, repo, fname, dest=None, **opts):
662 662 """create a changegroup file
663 663
664 664 Generate a compressed changegroup file collecting changesets not
665 665 known to be in another repository.
666 666
667 667 If you omit the destination repository, then hg assumes the
668 668 destination will have all the nodes you specify with --base
669 669 parameters. To create a bundle containing all changesets, use
670 670 -a/--all (or --base null).
671 671
672 672 You can change compression method with the -t/--type option.
673 673 The available compression methods are: none, bzip2, and
674 674 gzip (by default, bundles are compressed using bzip2).
675 675
676 676 The bundle file can then be transferred using conventional means
677 677 and applied to another repository with the unbundle or pull
678 678 command. This is useful when direct push and pull are not
679 679 available or when exporting an entire repository is undesirable.
680 680
681 681 Applying bundles preserves all changeset contents including
682 682 permissions, copy/rename information, and revision history.
683 683
684 684 Returns 0 on success, 1 if no changes found.
685 685 """
686 686 revs = None
687 687 if 'rev' in opts:
688 688 revs = cmdutil.revrange(repo, opts['rev'])
689 689
690 690 if opts.get('all'):
691 691 base = ['null']
692 692 else:
693 693 base = cmdutil.revrange(repo, opts.get('base'))
694 694 if base:
695 695 if dest:
696 696 raise util.Abort(_("--base is incompatible with specifying "
697 697 "a destination"))
698 698 base = [repo.lookup(rev) for rev in base]
699 699 # create the right base
700 700 # XXX: nodesbetween / changegroup* should be "fixed" instead
701 701 o = []
702 702 has = set((nullid,))
703 703 for n in base:
704 704 has.update(repo.changelog.reachable(n))
705 705 if revs:
706 706 revs = [repo.lookup(rev) for rev in revs]
707 707 visit = revs[:]
708 708 has.difference_update(visit)
709 709 else:
710 710 visit = repo.changelog.heads()
711 711 seen = {}
712 712 while visit:
713 713 n = visit.pop(0)
714 714 parents = [p for p in repo.changelog.parents(n) if p not in has]
715 715 if len(parents) == 0:
716 716 if n not in has:
717 717 o.append(n)
718 718 else:
719 719 for p in parents:
720 720 if p not in seen:
721 721 seen[p] = 1
722 722 visit.append(p)
723 723 else:
724 724 dest = ui.expandpath(dest or 'default-push', dest or 'default')
725 725 dest, branches = hg.parseurl(dest, opts.get('branch'))
726 726 other = hg.repository(hg.remoteui(repo, opts), dest)
727 727 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
728 728 if revs:
729 729 revs = [repo.lookup(rev) for rev in revs]
730 730 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
731 731
732 732 if not o:
733 733 ui.status(_("no changes found\n"))
734 734 return 1
735 735
736 736 if revs:
737 737 cg = repo.changegroupsubset(o, revs, 'bundle')
738 738 else:
739 739 cg = repo.changegroup(o, 'bundle')
740 740
741 741 bundletype = opts.get('type', 'bzip2').lower()
742 742 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
743 743 bundletype = btypes.get(bundletype)
744 744 if bundletype not in changegroup.bundletypes:
745 745 raise util.Abort(_('unknown bundle type specified with --type'))
746 746
747 747 changegroup.writebundle(cg, fname, bundletype)
748 748
749 749 def cat(ui, repo, file1, *pats, **opts):
750 750 """output the current or given revision of files
751 751
752 752 Print the specified files as they were at the given revision. If
753 753 no revision is given, the parent of the working directory is used,
754 754 or tip if no revision is checked out.
755 755
756 756 Output may be to a file, in which case the name of the file is
757 757 given using a format string. The formatting rules are the same as
758 758 for the export command, with the following additions:
759 759
760 760 :``%s``: basename of file being printed
761 761 :``%d``: dirname of file being printed, or '.' if in repository root
762 762 :``%p``: root-relative path name of file being printed
763 763
764 764 Returns 0 on success.
765 765 """
766 766 ctx = cmdutil.revsingle(repo, opts.get('rev'))
767 767 err = 1
768 768 m = cmdutil.match(repo, (file1,) + pats, opts)
769 769 for abs in ctx.walk(m):
770 770 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
771 771 data = ctx[abs].data()
772 772 if opts.get('decode'):
773 773 data = repo.wwritedata(abs, data)
774 774 fp.write(data)
775 775 fp.close()
776 776 err = 0
777 777 return err
778 778
779 779 def clone(ui, source, dest=None, **opts):
780 780 """make a copy of an existing repository
781 781
782 782 Create a copy of an existing repository in a new directory.
783 783
784 784 If no destination directory name is specified, it defaults to the
785 785 basename of the source.
786 786
787 787 The location of the source is added to the new repository's
788 788 ``.hg/hgrc`` file, as the default to be used for future pulls.
789 789
790 790 See :hg:`help urls` for valid source format details.
791 791
792 792 It is possible to specify an ``ssh://`` URL as the destination, but no
793 793 ``.hg/hgrc`` and working directory will be created on the remote side.
794 794 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
795 795
796 796 A set of changesets (tags, or branch names) to pull may be specified
797 797 by listing each changeset (tag, or branch name) with -r/--rev.
798 798 If -r/--rev is used, the cloned repository will contain only a subset
799 799 of the changesets of the source repository. Only the set of changesets
800 800 defined by all -r/--rev options (including all their ancestors)
801 801 will be pulled into the destination repository.
802 802 No subsequent changesets (including subsequent tags) will be present
803 803 in the destination.
804 804
805 805 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
806 806 local source repositories.
807 807
808 808 For efficiency, hardlinks are used for cloning whenever the source
809 809 and destination are on the same filesystem (note this applies only
810 810 to the repository data, not to the working directory). Some
811 811 filesystems, such as AFS, implement hardlinking incorrectly, but
812 812 do not report errors. In these cases, use the --pull option to
813 813 avoid hardlinking.
814 814
815 815 In some cases, you can clone repositories and the working directory
816 816 using full hardlinks with ::
817 817
818 818 $ cp -al REPO REPOCLONE
819 819
820 820 This is the fastest way to clone, but it is not always safe. The
821 821 operation is not atomic (making sure REPO is not modified during
822 822 the operation is up to you) and you have to make sure your editor
823 823 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
824 824 this is not compatible with certain extensions that place their
825 825 metadata under the .hg directory, such as mq.
826 826
827 827 Mercurial will update the working directory to the first applicable
828 828 revision from this list:
829 829
830 830 a) null if -U or the source repository has no changesets
831 831 b) if -u . and the source repository is local, the first parent of
832 832 the source repository's working directory
833 833 c) the changeset specified with -u (if a branch name, this means the
834 834 latest head of that branch)
835 835 d) the changeset specified with -r
836 836 e) the tipmost head specified with -b
837 837 f) the tipmost head specified with the url#branch source syntax
838 838 g) the tipmost head of the default branch
839 839 h) tip
840 840
841 841 Returns 0 on success.
842 842 """
843 843 if opts.get('noupdate') and opts.get('updaterev'):
844 844 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
845 845
846 846 r = hg.clone(hg.remoteui(ui, opts), source, dest,
847 847 pull=opts.get('pull'),
848 848 stream=opts.get('uncompressed'),
849 849 rev=opts.get('rev'),
850 850 update=opts.get('updaterev') or not opts.get('noupdate'),
851 851 branch=opts.get('branch'))
852 852
853 853 return r is None
854 854
855 855 def commit(ui, repo, *pats, **opts):
856 856 """commit the specified files or all outstanding changes
857 857
858 858 Commit changes to the given files into the repository. Unlike a
859 859 centralized SCM, this operation is a local operation. See
860 860 :hg:`push` for a way to actively distribute your changes.
861 861
862 862 If a list of files is omitted, all changes reported by :hg:`status`
863 863 will be committed.
864 864
865 865 If you are committing the result of a merge, do not provide any
866 866 filenames or -I/-X filters.
867 867
868 868 If no commit message is specified, Mercurial starts your
869 869 configured editor where you can enter a message. In case your
870 870 commit fails, you will find a backup of your message in
871 871 ``.hg/last-message.txt``.
872 872
873 873 See :hg:`help dates` for a list of formats valid for -d/--date.
874 874
875 875 Returns 0 on success, 1 if nothing changed.
876 876 """
877 877 extra = {}
878 878 if opts.get('close_branch'):
879 879 if repo['.'].node() not in repo.branchheads():
880 880 # The topo heads set is included in the branch heads set of the
881 881 # current branch, so it's sufficient to test branchheads
882 882 raise util.Abort(_('can only close branch heads'))
883 883 extra['close'] = 1
884 884 e = cmdutil.commiteditor
885 885 if opts.get('force_editor'):
886 886 e = cmdutil.commitforceeditor
887 887
888 888 def commitfunc(ui, repo, message, match, opts):
889 889 return repo.commit(message, opts.get('user'), opts.get('date'), match,
890 890 editor=e, extra=extra)
891 891
892 892 branch = repo[None].branch()
893 893 bheads = repo.branchheads(branch)
894 894
895 895 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
896 896 if not node:
897 897 ui.status(_("nothing changed\n"))
898 898 return 1
899 899
900 900 ctx = repo[node]
901 901 parents = ctx.parents()
902 902
903 903 if bheads and not [x for x in parents
904 904 if x.node() in bheads and x.branch() == branch]:
905 905 ui.status(_('created new head\n'))
906 906 # The message is not printed for initial roots. For the other
907 907 # changesets, it is printed in the following situations:
908 908 #
909 909 # Par column: for the 2 parents with ...
910 910 # N: null or no parent
911 911 # B: parent is on another named branch
912 912 # C: parent is a regular non head changeset
913 913 # H: parent was a branch head of the current branch
914 914 # Msg column: whether we print "created new head" message
915 915 # In the following, it is assumed that there already exists some
916 916 # initial branch heads of the current branch, otherwise nothing is
917 917 # printed anyway.
918 918 #
919 919 # Par Msg Comment
920 920 # NN y additional topo root
921 921 #
922 922 # BN y additional branch root
923 923 # CN y additional topo head
924 924 # HN n usual case
925 925 #
926 926 # BB y weird additional branch root
927 927 # CB y branch merge
928 928 # HB n merge with named branch
929 929 #
930 930 # CC y additional head from merge
931 931 # CH n merge with a head
932 932 #
933 933 # HH n head merge: head count decreases
934 934
935 935 if not opts.get('close_branch'):
936 936 for r in parents:
937 937 if r.extra().get('close') and r.branch() == branch:
938 938 ui.status(_('reopening closed branch head %d\n') % r)
939 939
940 940 if ui.debugflag:
941 941 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
942 942 elif ui.verbose:
943 943 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
944 944
945 945 def copy(ui, repo, *pats, **opts):
946 946 """mark files as copied for the next commit
947 947
948 948 Mark dest as having copies of source files. If dest is a
949 949 directory, copies are put in that directory. If dest is a file,
950 950 the source must be a single file.
951 951
952 952 By default, this command copies the contents of files as they
953 953 exist in the working directory. If invoked with -A/--after, the
954 954 operation is recorded, but no copying is performed.
955 955
956 956 This command takes effect with the next commit. To undo a copy
957 957 before that, see :hg:`revert`.
958 958
959 959 Returns 0 on success, 1 if errors are encountered.
960 960 """
961 961 wlock = repo.wlock(False)
962 962 try:
963 963 return cmdutil.copy(ui, repo, pats, opts)
964 964 finally:
965 965 wlock.release()
966 966
967 967 def debugancestor(ui, repo, *args):
968 968 """find the ancestor revision of two revisions in a given index"""
969 969 if len(args) == 3:
970 970 index, rev1, rev2 = args
971 971 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
972 972 lookup = r.lookup
973 973 elif len(args) == 2:
974 974 if not repo:
975 975 raise util.Abort(_("there is no Mercurial repository here "
976 976 "(.hg not found)"))
977 977 rev1, rev2 = args
978 978 r = repo.changelog
979 979 lookup = repo.lookup
980 980 else:
981 981 raise util.Abort(_('either two or three arguments required'))
982 982 a = r.ancestor(lookup(rev1), lookup(rev2))
983 983 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
984 984
985 985 def debugbuilddag(ui, repo, text,
986 986 mergeable_file=False,
987 987 appended_file=False,
988 988 overwritten_file=False,
989 989 new_file=False):
990 990 """builds a repo with a given dag from scratch in the current empty repo
991 991
992 992 Elements:
993 993
994 994 - "+n" is a linear run of n nodes based on the current default parent
995 995 - "." is a single node based on the current default parent
996 996 - "$" resets the default parent to null (implied at the start);
997 997 otherwise the default parent is always the last node created
998 998 - "<p" sets the default parent to the backref p
999 999 - "*p" is a fork at parent p, which is a backref
1000 1000 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1001 1001 - "/p2" is a merge of the preceding node and p2
1002 1002 - ":tag" defines a local tag for the preceding node
1003 1003 - "@branch" sets the named branch for subsequent nodes
1004 1004 - "!command" runs the command using your shell
1005 1005 - "!!my command\\n" is like "!", but to the end of the line
1006 1006 - "#...\\n" is a comment up to the end of the line
1007 1007
1008 1008 Whitespace between the above elements is ignored.
1009 1009
1010 1010 A backref is either
1011 1011
1012 1012 - a number n, which references the node curr-n, where curr is the current
1013 1013 node, or
1014 1014 - the name of a local tag you placed earlier using ":tag", or
1015 1015 - empty to denote the default parent.
1016 1016
1017 1017 All string valued-elements are either strictly alphanumeric, or must
1018 1018 be enclosed in double quotes ("..."), with "\\" as escape character.
1019 1019
1020 1020 Note that the --overwritten-file and --appended-file options imply the
1021 1021 use of "HGMERGE=internal:local" during DAG buildup.
1022 1022 """
1023 1023
1024 1024 if not (mergeable_file or appended_file or overwritten_file or new_file):
1025 1025 raise util.Abort(_('need at least one of -m, -a, -o, -n'))
1026 1026
1027 1027 if len(repo.changelog) > 0:
1028 1028 raise util.Abort(_('repository is not empty'))
1029 1029
1030 1030 if overwritten_file or appended_file:
1031 1031 # we don't want to fail in merges during buildup
1032 1032 os.environ['HGMERGE'] = 'internal:local'
1033 1033
1034 1034 def writefile(fname, text, fmode="wb"):
1035 1035 f = open(fname, fmode)
1036 1036 try:
1037 1037 f.write(text)
1038 1038 finally:
1039 1039 f.close()
1040 1040
1041 1041 if mergeable_file:
1042 1042 linesperrev = 2
1043 1043 # determine number of revs in DAG
1044 1044 n = 0
1045 1045 for type, data in dagparser.parsedag(text):
1046 1046 if type == 'n':
1047 1047 n += 1
1048 1048 # make a file with k lines per rev
1049 1049 writefile("mf", "\n".join(str(i) for i in xrange(0, n * linesperrev))
1050 1050 + "\n")
1051 1051
1052 1052 at = -1
1053 1053 atbranch = 'default'
1054 1054 for type, data in dagparser.parsedag(text):
1055 1055 if type == 'n':
1056 1056 ui.status('node %s\n' % str(data))
1057 1057 id, ps = data
1058 1058 p1 = ps[0]
1059 1059 if p1 != at:
1060 1060 update(ui, repo, node=str(p1), clean=True)
1061 1061 at = p1
1062 1062 if repo.dirstate.branch() != atbranch:
1063 1063 branch(ui, repo, atbranch, force=True)
1064 1064 if len(ps) > 1:
1065 1065 p2 = ps[1]
1066 1066 merge(ui, repo, node=p2)
1067 1067
1068 1068 if mergeable_file:
1069 1069 f = open("mf", "rb+")
1070 1070 try:
1071 1071 lines = f.read().split("\n")
1072 1072 lines[id * linesperrev] += " r%i" % id
1073 1073 f.seek(0)
1074 1074 f.write("\n".join(lines))
1075 1075 finally:
1076 1076 f.close()
1077 1077
1078 1078 if appended_file:
1079 1079 writefile("af", "r%i\n" % id, "ab")
1080 1080
1081 1081 if overwritten_file:
1082 1082 writefile("of", "r%i\n" % id)
1083 1083
1084 1084 if new_file:
1085 1085 writefile("nf%i" % id, "r%i\n" % id)
1086 1086
1087 1087 commit(ui, repo, addremove=True, message="r%i" % id, date=(id, 0))
1088 1088 at = id
1089 1089 elif type == 'l':
1090 1090 id, name = data
1091 1091 ui.status('tag %s\n' % name)
1092 1092 tag(ui, repo, name, local=True)
1093 1093 elif type == 'a':
1094 1094 ui.status('branch %s\n' % data)
1095 1095 atbranch = data
1096 1096 elif type in 'cC':
1097 1097 r = util.system(data, cwd=repo.root)
1098 1098 if r:
1099 1099 desc, r = util.explain_exit(r)
1100 1100 raise util.Abort(_('%s command %s') % (data, desc))
1101 1101
1102 1102 def debugcommands(ui, cmd='', *args):
1103 1103 """list all available commands and options"""
1104 1104 for cmd, vals in sorted(table.iteritems()):
1105 1105 cmd = cmd.split('|')[0].strip('^')
1106 1106 opts = ', '.join([i[1] for i in vals[1]])
1107 1107 ui.write('%s: %s\n' % (cmd, opts))
1108 1108
1109 1109 def debugcomplete(ui, cmd='', **opts):
1110 1110 """returns the completion list associated with the given command"""
1111 1111
1112 1112 if opts.get('options'):
1113 1113 options = []
1114 1114 otables = [globalopts]
1115 1115 if cmd:
1116 1116 aliases, entry = cmdutil.findcmd(cmd, table, False)
1117 1117 otables.append(entry[1])
1118 1118 for t in otables:
1119 1119 for o in t:
1120 1120 if "(DEPRECATED)" in o[3]:
1121 1121 continue
1122 1122 if o[0]:
1123 1123 options.append('-%s' % o[0])
1124 1124 options.append('--%s' % o[1])
1125 1125 ui.write("%s\n" % "\n".join(options))
1126 1126 return
1127 1127
1128 1128 cmdlist = cmdutil.findpossible(cmd, table)
1129 1129 if ui.verbose:
1130 1130 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1131 1131 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1132 1132
1133 1133 def debugfsinfo(ui, path = "."):
1134 1134 """show information detected about current filesystem"""
1135 1135 open('.debugfsinfo', 'w').write('')
1136 1136 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1137 1137 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1138 1138 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1139 1139 and 'yes' or 'no'))
1140 1140 os.unlink('.debugfsinfo')
1141 1141
1142 1142 def debugrebuildstate(ui, repo, rev="tip"):
1143 1143 """rebuild the dirstate as it would look like for the given revision"""
1144 1144 ctx = cmdutil.revsingle(repo, rev)
1145 1145 wlock = repo.wlock()
1146 1146 try:
1147 1147 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1148 1148 finally:
1149 1149 wlock.release()
1150 1150
1151 1151 def debugcheckstate(ui, repo):
1152 1152 """validate the correctness of the current dirstate"""
1153 1153 parent1, parent2 = repo.dirstate.parents()
1154 1154 m1 = repo[parent1].manifest()
1155 1155 m2 = repo[parent2].manifest()
1156 1156 errors = 0
1157 1157 for f in repo.dirstate:
1158 1158 state = repo.dirstate[f]
1159 1159 if state in "nr" and f not in m1:
1160 1160 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1161 1161 errors += 1
1162 1162 if state in "a" and f in m1:
1163 1163 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1164 1164 errors += 1
1165 1165 if state in "m" and f not in m1 and f not in m2:
1166 1166 ui.warn(_("%s in state %s, but not in either manifest\n") %
1167 1167 (f, state))
1168 1168 errors += 1
1169 1169 for f in m1:
1170 1170 state = repo.dirstate[f]
1171 1171 if state not in "nrm":
1172 1172 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1173 1173 errors += 1
1174 1174 if errors:
1175 1175 error = _(".hg/dirstate inconsistent with current parent's manifest")
1176 1176 raise util.Abort(error)
1177 1177
1178 1178 def showconfig(ui, repo, *values, **opts):
1179 1179 """show combined config settings from all hgrc files
1180 1180
1181 1181 With no arguments, print names and values of all config items.
1182 1182
1183 1183 With one argument of the form section.name, print just the value
1184 1184 of that config item.
1185 1185
1186 1186 With multiple arguments, print names and values of all config
1187 1187 items with matching section names.
1188 1188
1189 1189 With --debug, the source (filename and line number) is printed
1190 1190 for each config item.
1191 1191
1192 1192 Returns 0 on success.
1193 1193 """
1194 1194
1195 1195 for f in util.rcpath():
1196 1196 ui.debug(_('read config from: %s\n') % f)
1197 1197 untrusted = bool(opts.get('untrusted'))
1198 1198 if values:
1199 1199 sections = [v for v in values if '.' not in v]
1200 1200 items = [v for v in values if '.' in v]
1201 1201 if len(items) > 1 or items and sections:
1202 1202 raise util.Abort(_('only one config item permitted'))
1203 1203 for section, name, value in ui.walkconfig(untrusted=untrusted):
1204 1204 value = str(value).replace('\n', '\\n')
1205 1205 sectname = section + '.' + name
1206 1206 if values:
1207 1207 for v in values:
1208 1208 if v == section:
1209 1209 ui.debug('%s: ' %
1210 1210 ui.configsource(section, name, untrusted))
1211 1211 ui.write('%s=%s\n' % (sectname, value))
1212 1212 elif v == sectname:
1213 1213 ui.debug('%s: ' %
1214 1214 ui.configsource(section, name, untrusted))
1215 1215 ui.write(value, '\n')
1216 1216 else:
1217 1217 ui.debug('%s: ' %
1218 1218 ui.configsource(section, name, untrusted))
1219 1219 ui.write('%s=%s\n' % (sectname, value))
1220 1220
1221 1221 def debugknown(ui, repopath, *ids, **opts):
1222 1222 """test whether node ids are known to a repo
1223 1223
1224 1224 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1225 1225 indicating unknown/known.
1226 1226 """
1227 1227 repo = hg.repository(ui, repopath)
1228 1228 if not repo.capable('known'):
1229 1229 raise util.Abort("known() not supported by target repository")
1230 1230 flags = repo.known([bin(s) for s in ids])
1231 1231 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1232 1232
1233 1233 def debugbundle(ui, bundlepath, all=None, **opts):
1234 1234 """lists the contents of a bundle"""
1235 1235 f = url.open(ui, bundlepath)
1236 1236 try:
1237 1237 gen = changegroup.readbundle(f, bundlepath)
1238 1238 if all:
1239 1239 ui.write("format: id, p1, p2, cset, len(delta)\n")
1240 1240
1241 1241 def showchunks(named):
1242 1242 ui.write("\n%s\n" % named)
1243 1243 while 1:
1244 1244 chunkdata = gen.parsechunk()
1245 1245 if not chunkdata:
1246 1246 break
1247 1247 node = chunkdata['node']
1248 1248 p1 = chunkdata['p1']
1249 1249 p2 = chunkdata['p2']
1250 1250 cs = chunkdata['cs']
1251 1251 delta = chunkdata['data']
1252 1252 ui.write("%s %s %s %s %s\n" %
1253 1253 (hex(node), hex(p1), hex(p2),
1254 1254 hex(cs), len(delta)))
1255 1255
1256 1256 showchunks("changelog")
1257 1257 showchunks("manifest")
1258 1258 while 1:
1259 1259 fname = gen.chunk()
1260 1260 if not fname:
1261 1261 break
1262 1262 showchunks(fname)
1263 1263 else:
1264 1264 while 1:
1265 1265 chunkdata = gen.parsechunk()
1266 1266 if not chunkdata:
1267 1267 break
1268 1268 node = chunkdata['node']
1269 1269 ui.write("%s\n" % hex(node))
1270 1270 finally:
1271 1271 f.close()
1272 1272
1273 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1274 """retrieves a bundle from a repo
1275
1276 Every ID must be a full-length hex node id string. Saves the bundle to the
1277 given file.
1278 """
1279 repo = hg.repository(ui, repopath)
1280 if not repo.capable('getbundle'):
1281 raise util.Abort("getbundle() not supported by target repository")
1282 args = {}
1283 if common:
1284 args['common'] = [bin(s) for s in common]
1285 if head:
1286 args['heads'] = [bin(s) for s in head]
1287 bundle = repo.getbundle('debug', **args)
1288
1289 bundletype = opts.get('type', 'bzip2').lower()
1290 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1291 bundletype = btypes.get(bundletype)
1292 if bundletype not in changegroup.bundletypes:
1293 raise util.Abort(_('unknown bundle type specified with --type'))
1294 changegroup.writebundle(bundle, bundlepath, bundletype)
1295
1273 1296 def debugpushkey(ui, repopath, namespace, *keyinfo):
1274 1297 '''access the pushkey key/value protocol
1275 1298
1276 1299 With two args, list the keys in the given namespace.
1277 1300
1278 1301 With five args, set a key to new if it currently is set to old.
1279 1302 Reports success or failure.
1280 1303 '''
1281 1304
1282 1305 target = hg.repository(ui, repopath)
1283 1306 if keyinfo:
1284 1307 key, old, new = keyinfo
1285 1308 r = target.pushkey(namespace, key, old, new)
1286 1309 ui.status(str(r) + '\n')
1287 1310 return not r
1288 1311 else:
1289 1312 for k, v in target.listkeys(namespace).iteritems():
1290 1313 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1291 1314 v.encode('string-escape')))
1292 1315
1293 1316 def debugrevspec(ui, repo, expr):
1294 1317 '''parse and apply a revision specification'''
1295 1318 if ui.verbose:
1296 1319 tree = revset.parse(expr)[0]
1297 1320 ui.note(tree, "\n")
1298 1321 func = revset.match(expr)
1299 1322 for c in func(repo, range(len(repo))):
1300 1323 ui.write("%s\n" % c)
1301 1324
1302 1325 def debugsetparents(ui, repo, rev1, rev2=None):
1303 1326 """manually set the parents of the current working directory
1304 1327
1305 1328 This is useful for writing repository conversion tools, but should
1306 1329 be used with care.
1307 1330
1308 1331 Returns 0 on success.
1309 1332 """
1310 1333
1311 1334 r1 = cmdutil.revsingle(repo, rev1).node()
1312 1335 r2 = cmdutil.revsingle(repo, rev2, 'null').node()
1313 1336
1314 1337 wlock = repo.wlock()
1315 1338 try:
1316 1339 repo.dirstate.setparents(r1, r2)
1317 1340 finally:
1318 1341 wlock.release()
1319 1342
1320 1343 def debugstate(ui, repo, nodates=None):
1321 1344 """show the contents of the current dirstate"""
1322 1345 timestr = ""
1323 1346 showdate = not nodates
1324 1347 for file_, ent in sorted(repo.dirstate._map.iteritems()):
1325 1348 if showdate:
1326 1349 if ent[3] == -1:
1327 1350 # Pad or slice to locale representation
1328 1351 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
1329 1352 time.localtime(0)))
1330 1353 timestr = 'unset'
1331 1354 timestr = (timestr[:locale_len] +
1332 1355 ' ' * (locale_len - len(timestr)))
1333 1356 else:
1334 1357 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
1335 1358 time.localtime(ent[3]))
1336 1359 if ent[1] & 020000:
1337 1360 mode = 'lnk'
1338 1361 else:
1339 1362 mode = '%3o' % (ent[1] & 0777)
1340 1363 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
1341 1364 for f in repo.dirstate.copies():
1342 1365 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1343 1366
1344 1367 def debugsub(ui, repo, rev=None):
1345 1368 ctx = cmdutil.revsingle(repo, rev, None)
1346 1369 for k, v in sorted(ctx.substate.items()):
1347 1370 ui.write('path %s\n' % k)
1348 1371 ui.write(' source %s\n' % v[0])
1349 1372 ui.write(' revision %s\n' % v[1])
1350 1373
1351 1374 def debugdag(ui, repo, file_=None, *revs, **opts):
1352 1375 """format the changelog or an index DAG as a concise textual description
1353 1376
1354 1377 If you pass a revlog index, the revlog's DAG is emitted. If you list
1355 1378 revision numbers, they get labelled in the output as rN.
1356 1379
1357 1380 Otherwise, the changelog DAG of the current repo is emitted.
1358 1381 """
1359 1382 spaces = opts.get('spaces')
1360 1383 dots = opts.get('dots')
1361 1384 if file_:
1362 1385 rlog = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1363 1386 revs = set((int(r) for r in revs))
1364 1387 def events():
1365 1388 for r in rlog:
1366 1389 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1367 1390 if r in revs:
1368 1391 yield 'l', (r, "r%i" % r)
1369 1392 elif repo:
1370 1393 cl = repo.changelog
1371 1394 tags = opts.get('tags')
1372 1395 branches = opts.get('branches')
1373 1396 if tags:
1374 1397 labels = {}
1375 1398 for l, n in repo.tags().items():
1376 1399 labels.setdefault(cl.rev(n), []).append(l)
1377 1400 def events():
1378 1401 b = "default"
1379 1402 for r in cl:
1380 1403 if branches:
1381 1404 newb = cl.read(cl.node(r))[5]['branch']
1382 1405 if newb != b:
1383 1406 yield 'a', newb
1384 1407 b = newb
1385 1408 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1386 1409 if tags:
1387 1410 ls = labels.get(r)
1388 1411 if ls:
1389 1412 for l in ls:
1390 1413 yield 'l', (r, l)
1391 1414 else:
1392 1415 raise util.Abort(_('need repo for changelog dag'))
1393 1416
1394 1417 for line in dagparser.dagtextlines(events(),
1395 1418 addspaces=spaces,
1396 1419 wraplabels=True,
1397 1420 wrapannotations=True,
1398 1421 wrapnonlinear=dots,
1399 1422 usedots=dots,
1400 1423 maxlinewidth=70):
1401 1424 ui.write(line)
1402 1425 ui.write("\n")
1403 1426
1404 1427 def debugdata(ui, repo, file_, rev):
1405 1428 """dump the contents of a data file revision"""
1406 1429 r = None
1407 1430 if repo:
1408 1431 filelog = repo.file(file_)
1409 1432 if len(filelog):
1410 1433 r = filelog
1411 1434 if not r:
1412 1435 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
1413 1436 try:
1414 1437 ui.write(r.revision(r.lookup(rev)))
1415 1438 except KeyError:
1416 1439 raise util.Abort(_('invalid revision identifier %s') % rev)
1417 1440
1418 1441 def debugdate(ui, date, range=None, **opts):
1419 1442 """parse and display a date"""
1420 1443 if opts["extended"]:
1421 1444 d = util.parsedate(date, util.extendeddateformats)
1422 1445 else:
1423 1446 d = util.parsedate(date)
1424 1447 ui.write("internal: %s %s\n" % d)
1425 1448 ui.write("standard: %s\n" % util.datestr(d))
1426 1449 if range:
1427 1450 m = util.matchdate(range)
1428 1451 ui.write("match: %s\n" % m(d[0]))
1429 1452
1430 1453 def debugignore(ui, repo, *values, **opts):
1431 1454 """display the combined ignore pattern"""
1432 1455 ignore = repo.dirstate._ignore
1433 1456 if hasattr(ignore, 'includepat'):
1434 1457 ui.write("%s\n" % ignore.includepat)
1435 1458 else:
1436 1459 raise util.Abort(_("no ignore patterns found"))
1437 1460
1438 1461 def debugindex(ui, repo, file_, **opts):
1439 1462 """dump the contents of an index file"""
1440 1463 r = None
1441 1464 if repo:
1442 1465 filelog = repo.file(file_)
1443 1466 if len(filelog):
1444 1467 r = filelog
1445 1468
1446 1469 format = opts.get('format', 0)
1447 1470 if format not in (0, 1):
1448 1471 raise util.Abort(_("unknown format %d") % format)
1449 1472
1450 1473 if not r:
1451 1474 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1452 1475
1453 1476 if format == 0:
1454 1477 ui.write(" rev offset length base linkrev"
1455 1478 " nodeid p1 p2\n")
1456 1479 elif format == 1:
1457 1480 ui.write(" rev flag offset length"
1458 1481 " size base link p1 p2 nodeid\n")
1459 1482
1460 1483 for i in r:
1461 1484 node = r.node(i)
1462 1485 if format == 0:
1463 1486 try:
1464 1487 pp = r.parents(node)
1465 1488 except:
1466 1489 pp = [nullid, nullid]
1467 1490 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1468 1491 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
1469 1492 short(node), short(pp[0]), short(pp[1])))
1470 1493 elif format == 1:
1471 1494 pr = r.parentrevs(i)
1472 1495 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1473 1496 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1474 1497 r.base(i), r.linkrev(i), pr[0], pr[1], short(node)))
1475 1498
1476 1499 def debugindexdot(ui, repo, file_):
1477 1500 """dump an index DAG as a graphviz dot file"""
1478 1501 r = None
1479 1502 if repo:
1480 1503 filelog = repo.file(file_)
1481 1504 if len(filelog):
1482 1505 r = filelog
1483 1506 if not r:
1484 1507 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1485 1508 ui.write("digraph G {\n")
1486 1509 for i in r:
1487 1510 node = r.node(i)
1488 1511 pp = r.parents(node)
1489 1512 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1490 1513 if pp[1] != nullid:
1491 1514 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1492 1515 ui.write("}\n")
1493 1516
1494 1517 def debuginstall(ui):
1495 1518 '''test Mercurial installation
1496 1519
1497 1520 Returns 0 on success.
1498 1521 '''
1499 1522
1500 1523 def writetemp(contents):
1501 1524 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1502 1525 f = os.fdopen(fd, "wb")
1503 1526 f.write(contents)
1504 1527 f.close()
1505 1528 return name
1506 1529
1507 1530 problems = 0
1508 1531
1509 1532 # encoding
1510 1533 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1511 1534 try:
1512 1535 encoding.fromlocal("test")
1513 1536 except util.Abort, inst:
1514 1537 ui.write(" %s\n" % inst)
1515 1538 ui.write(_(" (check that your locale is properly set)\n"))
1516 1539 problems += 1
1517 1540
1518 1541 # compiled modules
1519 1542 ui.status(_("Checking installed modules (%s)...\n")
1520 1543 % os.path.dirname(__file__))
1521 1544 try:
1522 1545 import bdiff, mpatch, base85, osutil
1523 1546 except Exception, inst:
1524 1547 ui.write(" %s\n" % inst)
1525 1548 ui.write(_(" One or more extensions could not be found"))
1526 1549 ui.write(_(" (check that you compiled the extensions)\n"))
1527 1550 problems += 1
1528 1551
1529 1552 # templates
1530 1553 ui.status(_("Checking templates...\n"))
1531 1554 try:
1532 1555 import templater
1533 1556 templater.templater(templater.templatepath("map-cmdline.default"))
1534 1557 except Exception, inst:
1535 1558 ui.write(" %s\n" % inst)
1536 1559 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1537 1560 problems += 1
1538 1561
1539 1562 # patch
1540 1563 ui.status(_("Checking patch...\n"))
1541 1564 patchproblems = 0
1542 1565 a = "1\n2\n3\n4\n"
1543 1566 b = "1\n2\n3\ninsert\n4\n"
1544 1567 fa = writetemp(a)
1545 1568 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
1546 1569 os.path.basename(fa))
1547 1570 fd = writetemp(d)
1548 1571
1549 1572 files = {}
1550 1573 try:
1551 1574 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
1552 1575 except util.Abort, e:
1553 1576 ui.write(_(" patch call failed:\n"))
1554 1577 ui.write(" " + str(e) + "\n")
1555 1578 patchproblems += 1
1556 1579 else:
1557 1580 if list(files) != [os.path.basename(fa)]:
1558 1581 ui.write(_(" unexpected patch output!\n"))
1559 1582 patchproblems += 1
1560 1583 a = open(fa).read()
1561 1584 if a != b:
1562 1585 ui.write(_(" patch test failed!\n"))
1563 1586 patchproblems += 1
1564 1587
1565 1588 if patchproblems:
1566 1589 if ui.config('ui', 'patch'):
1567 1590 ui.write(_(" (Current patch tool may be incompatible with patch,"
1568 1591 " or misconfigured. Please check your configuration"
1569 1592 " file)\n"))
1570 1593 else:
1571 1594 ui.write(_(" Internal patcher failure, please report this error"
1572 1595 " to http://mercurial.selenic.com/wiki/BugTracker\n"))
1573 1596 problems += patchproblems
1574 1597
1575 1598 os.unlink(fa)
1576 1599 os.unlink(fd)
1577 1600
1578 1601 # editor
1579 1602 ui.status(_("Checking commit editor...\n"))
1580 1603 editor = ui.geteditor()
1581 1604 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1582 1605 if not cmdpath:
1583 1606 if editor == 'vi':
1584 1607 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1585 1608 ui.write(_(" (specify a commit editor in your configuration"
1586 1609 " file)\n"))
1587 1610 else:
1588 1611 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1589 1612 ui.write(_(" (specify a commit editor in your configuration"
1590 1613 " file)\n"))
1591 1614 problems += 1
1592 1615
1593 1616 # check username
1594 1617 ui.status(_("Checking username...\n"))
1595 1618 try:
1596 1619 ui.username()
1597 1620 except util.Abort, e:
1598 1621 ui.write(" %s\n" % e)
1599 1622 ui.write(_(" (specify a username in your configuration file)\n"))
1600 1623 problems += 1
1601 1624
1602 1625 if not problems:
1603 1626 ui.status(_("No problems detected\n"))
1604 1627 else:
1605 1628 ui.write(_("%s problems detected,"
1606 1629 " please check your install!\n") % problems)
1607 1630
1608 1631 return problems
1609 1632
1610 1633 def debugrename(ui, repo, file1, *pats, **opts):
1611 1634 """dump rename information"""
1612 1635
1613 1636 ctx = cmdutil.revsingle(repo, opts.get('rev'))
1614 1637 m = cmdutil.match(repo, (file1,) + pats, opts)
1615 1638 for abs in ctx.walk(m):
1616 1639 fctx = ctx[abs]
1617 1640 o = fctx.filelog().renamed(fctx.filenode())
1618 1641 rel = m.rel(abs)
1619 1642 if o:
1620 1643 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1621 1644 else:
1622 1645 ui.write(_("%s not renamed\n") % rel)
1623 1646
1624 1647 def debugwalk(ui, repo, *pats, **opts):
1625 1648 """show how files match on given patterns"""
1626 1649 m = cmdutil.match(repo, pats, opts)
1627 1650 items = list(repo.walk(m))
1628 1651 if not items:
1629 1652 return
1630 1653 fmt = 'f %%-%ds %%-%ds %%s' % (
1631 1654 max([len(abs) for abs in items]),
1632 1655 max([len(m.rel(abs)) for abs in items]))
1633 1656 for abs in items:
1634 1657 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1635 1658 ui.write("%s\n" % line.rstrip())
1636 1659
1637 1660 def debugwireargs(ui, repopath, *vals, **opts):
1638 1661 repo = hg.repository(hg.remoteui(ui, opts), repopath)
1639 1662 for opt in remoteopts:
1640 1663 del opts[opt[1]]
1641 1664 args = {}
1642 1665 for k, v in opts.iteritems():
1643 1666 if v:
1644 1667 args[k] = v
1645 1668 # run twice to check that we don't mess up the stream for the next command
1646 1669 res1 = repo.debugwireargs(*vals, **args)
1647 1670 res2 = repo.debugwireargs(*vals, **args)
1648 1671 ui.write("%s\n" % res1)
1649 1672 if res1 != res2:
1650 1673 ui.warn("%s\n" % res2)
1651 1674
1652 1675 def diff(ui, repo, *pats, **opts):
1653 1676 """diff repository (or selected files)
1654 1677
1655 1678 Show differences between revisions for the specified files.
1656 1679
1657 1680 Differences between files are shown using the unified diff format.
1658 1681
1659 1682 .. note::
1660 1683 diff may generate unexpected results for merges, as it will
1661 1684 default to comparing against the working directory's first
1662 1685 parent changeset if no revisions are specified.
1663 1686
1664 1687 When two revision arguments are given, then changes are shown
1665 1688 between those revisions. If only one revision is specified then
1666 1689 that revision is compared to the working directory, and, when no
1667 1690 revisions are specified, the working directory files are compared
1668 1691 to its parent.
1669 1692
1670 1693 Alternatively you can specify -c/--change with a revision to see
1671 1694 the changes in that changeset relative to its first parent.
1672 1695
1673 1696 Without the -a/--text option, diff will avoid generating diffs of
1674 1697 files it detects as binary. With -a, diff will generate a diff
1675 1698 anyway, probably with undesirable results.
1676 1699
1677 1700 Use the -g/--git option to generate diffs in the git extended diff
1678 1701 format. For more information, read :hg:`help diffs`.
1679 1702
1680 1703 Returns 0 on success.
1681 1704 """
1682 1705
1683 1706 revs = opts.get('rev')
1684 1707 change = opts.get('change')
1685 1708 stat = opts.get('stat')
1686 1709 reverse = opts.get('reverse')
1687 1710
1688 1711 if revs and change:
1689 1712 msg = _('cannot specify --rev and --change at the same time')
1690 1713 raise util.Abort(msg)
1691 1714 elif change:
1692 1715 node2 = cmdutil.revsingle(repo, change, None).node()
1693 1716 node1 = repo[node2].parents()[0].node()
1694 1717 else:
1695 1718 node1, node2 = cmdutil.revpair(repo, revs)
1696 1719
1697 1720 if reverse:
1698 1721 node1, node2 = node2, node1
1699 1722
1700 1723 diffopts = patch.diffopts(ui, opts)
1701 1724 m = cmdutil.match(repo, pats, opts)
1702 1725 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1703 1726 listsubrepos=opts.get('subrepos'))
1704 1727
1705 1728 def export(ui, repo, *changesets, **opts):
1706 1729 """dump the header and diffs for one or more changesets
1707 1730
1708 1731 Print the changeset header and diffs for one or more revisions.
1709 1732
1710 1733 The information shown in the changeset header is: author, date,
1711 1734 branch name (if non-default), changeset hash, parent(s) and commit
1712 1735 comment.
1713 1736
1714 1737 .. note::
1715 1738 export may generate unexpected diff output for merge
1716 1739 changesets, as it will compare the merge changeset against its
1717 1740 first parent only.
1718 1741
1719 1742 Output may be to a file, in which case the name of the file is
1720 1743 given using a format string. The formatting rules are as follows:
1721 1744
1722 1745 :``%%``: literal "%" character
1723 1746 :``%H``: changeset hash (40 hexadecimal digits)
1724 1747 :``%N``: number of patches being generated
1725 1748 :``%R``: changeset revision number
1726 1749 :``%b``: basename of the exporting repository
1727 1750 :``%h``: short-form changeset hash (12 hexadecimal digits)
1728 1751 :``%n``: zero-padded sequence number, starting at 1
1729 1752 :``%r``: zero-padded changeset revision number
1730 1753
1731 1754 Without the -a/--text option, export will avoid generating diffs
1732 1755 of files it detects as binary. With -a, export will generate a
1733 1756 diff anyway, probably with undesirable results.
1734 1757
1735 1758 Use the -g/--git option to generate diffs in the git extended diff
1736 1759 format. See :hg:`help diffs` for more information.
1737 1760
1738 1761 With the --switch-parent option, the diff will be against the
1739 1762 second parent. It can be useful to review a merge.
1740 1763
1741 1764 Returns 0 on success.
1742 1765 """
1743 1766 changesets += tuple(opts.get('rev', []))
1744 1767 if not changesets:
1745 1768 raise util.Abort(_("export requires at least one changeset"))
1746 1769 revs = cmdutil.revrange(repo, changesets)
1747 1770 if len(revs) > 1:
1748 1771 ui.note(_('exporting patches:\n'))
1749 1772 else:
1750 1773 ui.note(_('exporting patch:\n'))
1751 1774 cmdutil.export(repo, revs, template=opts.get('output'),
1752 1775 switch_parent=opts.get('switch_parent'),
1753 1776 opts=patch.diffopts(ui, opts))
1754 1777
1755 1778 def forget(ui, repo, *pats, **opts):
1756 1779 """forget the specified files on the next commit
1757 1780
1758 1781 Mark the specified files so they will no longer be tracked
1759 1782 after the next commit.
1760 1783
1761 1784 This only removes files from the current branch, not from the
1762 1785 entire project history, and it does not delete them from the
1763 1786 working directory.
1764 1787
1765 1788 To undo a forget before the next commit, see :hg:`add`.
1766 1789
1767 1790 Returns 0 on success.
1768 1791 """
1769 1792
1770 1793 if not pats:
1771 1794 raise util.Abort(_('no files specified'))
1772 1795
1773 1796 m = cmdutil.match(repo, pats, opts)
1774 1797 s = repo.status(match=m, clean=True)
1775 1798 forget = sorted(s[0] + s[1] + s[3] + s[6])
1776 1799 errs = 0
1777 1800
1778 1801 for f in m.files():
1779 1802 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1780 1803 ui.warn(_('not removing %s: file is already untracked\n')
1781 1804 % m.rel(f))
1782 1805 errs = 1
1783 1806
1784 1807 for f in forget:
1785 1808 if ui.verbose or not m.exact(f):
1786 1809 ui.status(_('removing %s\n') % m.rel(f))
1787 1810
1788 1811 repo[None].remove(forget, unlink=False)
1789 1812 return errs
1790 1813
1791 1814 def grep(ui, repo, pattern, *pats, **opts):
1792 1815 """search for a pattern in specified files and revisions
1793 1816
1794 1817 Search revisions of files for a regular expression.
1795 1818
1796 1819 This command behaves differently than Unix grep. It only accepts
1797 1820 Python/Perl regexps. It searches repository history, not the
1798 1821 working directory. It always prints the revision number in which a
1799 1822 match appears.
1800 1823
1801 1824 By default, grep only prints output for the first revision of a
1802 1825 file in which it finds a match. To get it to print every revision
1803 1826 that contains a change in match status ("-" for a match that
1804 1827 becomes a non-match, or "+" for a non-match that becomes a match),
1805 1828 use the --all flag.
1806 1829
1807 1830 Returns 0 if a match is found, 1 otherwise.
1808 1831 """
1809 1832 reflags = 0
1810 1833 if opts.get('ignore_case'):
1811 1834 reflags |= re.I
1812 1835 try:
1813 1836 regexp = re.compile(pattern, reflags)
1814 1837 except re.error, inst:
1815 1838 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1816 1839 return 1
1817 1840 sep, eol = ':', '\n'
1818 1841 if opts.get('print0'):
1819 1842 sep = eol = '\0'
1820 1843
1821 1844 getfile = util.lrucachefunc(repo.file)
1822 1845
1823 1846 def matchlines(body):
1824 1847 begin = 0
1825 1848 linenum = 0
1826 1849 while True:
1827 1850 match = regexp.search(body, begin)
1828 1851 if not match:
1829 1852 break
1830 1853 mstart, mend = match.span()
1831 1854 linenum += body.count('\n', begin, mstart) + 1
1832 1855 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1833 1856 begin = body.find('\n', mend) + 1 or len(body)
1834 1857 lend = begin - 1
1835 1858 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1836 1859
1837 1860 class linestate(object):
1838 1861 def __init__(self, line, linenum, colstart, colend):
1839 1862 self.line = line
1840 1863 self.linenum = linenum
1841 1864 self.colstart = colstart
1842 1865 self.colend = colend
1843 1866
1844 1867 def __hash__(self):
1845 1868 return hash((self.linenum, self.line))
1846 1869
1847 1870 def __eq__(self, other):
1848 1871 return self.line == other.line
1849 1872
1850 1873 matches = {}
1851 1874 copies = {}
1852 1875 def grepbody(fn, rev, body):
1853 1876 matches[rev].setdefault(fn, [])
1854 1877 m = matches[rev][fn]
1855 1878 for lnum, cstart, cend, line in matchlines(body):
1856 1879 s = linestate(line, lnum, cstart, cend)
1857 1880 m.append(s)
1858 1881
1859 1882 def difflinestates(a, b):
1860 1883 sm = difflib.SequenceMatcher(None, a, b)
1861 1884 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1862 1885 if tag == 'insert':
1863 1886 for i in xrange(blo, bhi):
1864 1887 yield ('+', b[i])
1865 1888 elif tag == 'delete':
1866 1889 for i in xrange(alo, ahi):
1867 1890 yield ('-', a[i])
1868 1891 elif tag == 'replace':
1869 1892 for i in xrange(alo, ahi):
1870 1893 yield ('-', a[i])
1871 1894 for i in xrange(blo, bhi):
1872 1895 yield ('+', b[i])
1873 1896
1874 1897 def display(fn, ctx, pstates, states):
1875 1898 rev = ctx.rev()
1876 1899 datefunc = ui.quiet and util.shortdate or util.datestr
1877 1900 found = False
1878 1901 filerevmatches = {}
1879 1902 if opts.get('all'):
1880 1903 iter = difflinestates(pstates, states)
1881 1904 else:
1882 1905 iter = [('', l) for l in states]
1883 1906 for change, l in iter:
1884 1907 cols = [fn, str(rev)]
1885 1908 before, match, after = None, None, None
1886 1909 if opts.get('line_number'):
1887 1910 cols.append(str(l.linenum))
1888 1911 if opts.get('all'):
1889 1912 cols.append(change)
1890 1913 if opts.get('user'):
1891 1914 cols.append(ui.shortuser(ctx.user()))
1892 1915 if opts.get('date'):
1893 1916 cols.append(datefunc(ctx.date()))
1894 1917 if opts.get('files_with_matches'):
1895 1918 c = (fn, rev)
1896 1919 if c in filerevmatches:
1897 1920 continue
1898 1921 filerevmatches[c] = 1
1899 1922 else:
1900 1923 before = l.line[:l.colstart]
1901 1924 match = l.line[l.colstart:l.colend]
1902 1925 after = l.line[l.colend:]
1903 1926 ui.write(sep.join(cols))
1904 1927 if before is not None:
1905 1928 ui.write(sep + before)
1906 1929 ui.write(match, label='grep.match')
1907 1930 ui.write(after)
1908 1931 ui.write(eol)
1909 1932 found = True
1910 1933 return found
1911 1934
1912 1935 skip = {}
1913 1936 revfiles = {}
1914 1937 matchfn = cmdutil.match(repo, pats, opts)
1915 1938 found = False
1916 1939 follow = opts.get('follow')
1917 1940
1918 1941 def prep(ctx, fns):
1919 1942 rev = ctx.rev()
1920 1943 pctx = ctx.parents()[0]
1921 1944 parent = pctx.rev()
1922 1945 matches.setdefault(rev, {})
1923 1946 matches.setdefault(parent, {})
1924 1947 files = revfiles.setdefault(rev, [])
1925 1948 for fn in fns:
1926 1949 flog = getfile(fn)
1927 1950 try:
1928 1951 fnode = ctx.filenode(fn)
1929 1952 except error.LookupError:
1930 1953 continue
1931 1954
1932 1955 copied = flog.renamed(fnode)
1933 1956 copy = follow and copied and copied[0]
1934 1957 if copy:
1935 1958 copies.setdefault(rev, {})[fn] = copy
1936 1959 if fn in skip:
1937 1960 if copy:
1938 1961 skip[copy] = True
1939 1962 continue
1940 1963 files.append(fn)
1941 1964
1942 1965 if fn not in matches[rev]:
1943 1966 grepbody(fn, rev, flog.read(fnode))
1944 1967
1945 1968 pfn = copy or fn
1946 1969 if pfn not in matches[parent]:
1947 1970 try:
1948 1971 fnode = pctx.filenode(pfn)
1949 1972 grepbody(pfn, parent, flog.read(fnode))
1950 1973 except error.LookupError:
1951 1974 pass
1952 1975
1953 1976 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1954 1977 rev = ctx.rev()
1955 1978 parent = ctx.parents()[0].rev()
1956 1979 for fn in sorted(revfiles.get(rev, [])):
1957 1980 states = matches[rev][fn]
1958 1981 copy = copies.get(rev, {}).get(fn)
1959 1982 if fn in skip:
1960 1983 if copy:
1961 1984 skip[copy] = True
1962 1985 continue
1963 1986 pstates = matches.get(parent, {}).get(copy or fn, [])
1964 1987 if pstates or states:
1965 1988 r = display(fn, ctx, pstates, states)
1966 1989 found = found or r
1967 1990 if r and not opts.get('all'):
1968 1991 skip[fn] = True
1969 1992 if copy:
1970 1993 skip[copy] = True
1971 1994 del matches[rev]
1972 1995 del revfiles[rev]
1973 1996
1974 1997 return not found
1975 1998
1976 1999 def heads(ui, repo, *branchrevs, **opts):
1977 2000 """show current repository heads or show branch heads
1978 2001
1979 2002 With no arguments, show all repository branch heads.
1980 2003
1981 2004 Repository "heads" are changesets with no child changesets. They are
1982 2005 where development generally takes place and are the usual targets
1983 2006 for update and merge operations. Branch heads are changesets that have
1984 2007 no child changeset on the same branch.
1985 2008
1986 2009 If one or more REVs are given, only branch heads on the branches
1987 2010 associated with the specified changesets are shown.
1988 2011
1989 2012 If -c/--closed is specified, also show branch heads marked closed
1990 2013 (see :hg:`commit --close-branch`).
1991 2014
1992 2015 If STARTREV is specified, only those heads that are descendants of
1993 2016 STARTREV will be displayed.
1994 2017
1995 2018 If -t/--topo is specified, named branch mechanics will be ignored and only
1996 2019 changesets without children will be shown.
1997 2020
1998 2021 Returns 0 if matching heads are found, 1 if not.
1999 2022 """
2000 2023
2001 2024 start = None
2002 2025 if 'rev' in opts:
2003 2026 start = cmdutil.revsingle(repo, opts['rev'], None).node()
2004 2027
2005 2028 if opts.get('topo'):
2006 2029 heads = [repo[h] for h in repo.heads(start)]
2007 2030 else:
2008 2031 heads = []
2009 2032 for b, ls in repo.branchmap().iteritems():
2010 2033 if start is None:
2011 2034 heads += [repo[h] for h in ls]
2012 2035 continue
2013 2036 startrev = repo.changelog.rev(start)
2014 2037 descendants = set(repo.changelog.descendants(startrev))
2015 2038 descendants.add(startrev)
2016 2039 rev = repo.changelog.rev
2017 2040 heads += [repo[h] for h in ls if rev(h) in descendants]
2018 2041
2019 2042 if branchrevs:
2020 2043 branches = set(repo[br].branch() for br in branchrevs)
2021 2044 heads = [h for h in heads if h.branch() in branches]
2022 2045
2023 2046 if not opts.get('closed'):
2024 2047 heads = [h for h in heads if not h.extra().get('close')]
2025 2048
2026 2049 if opts.get('active') and branchrevs:
2027 2050 dagheads = repo.heads(start)
2028 2051 heads = [h for h in heads if h.node() in dagheads]
2029 2052
2030 2053 if branchrevs:
2031 2054 haveheads = set(h.branch() for h in heads)
2032 2055 if branches - haveheads:
2033 2056 headless = ', '.join(b for b in branches - haveheads)
2034 2057 msg = _('no open branch heads found on branches %s')
2035 2058 if opts.get('rev'):
2036 2059 msg += _(' (started at %s)' % opts['rev'])
2037 2060 ui.warn((msg + '\n') % headless)
2038 2061
2039 2062 if not heads:
2040 2063 return 1
2041 2064
2042 2065 heads = sorted(heads, key=lambda x: -x.rev())
2043 2066 displayer = cmdutil.show_changeset(ui, repo, opts)
2044 2067 for ctx in heads:
2045 2068 displayer.show(ctx)
2046 2069 displayer.close()
2047 2070
2048 2071 def help_(ui, name=None, with_version=False, unknowncmd=False):
2049 2072 """show help for a given topic or a help overview
2050 2073
2051 2074 With no arguments, print a list of commands with short help messages.
2052 2075
2053 2076 Given a topic, extension, or command name, print help for that
2054 2077 topic.
2055 2078
2056 2079 Returns 0 if successful.
2057 2080 """
2058 2081 option_lists = []
2059 2082 textwidth = min(ui.termwidth(), 80) - 2
2060 2083
2061 2084 def addglobalopts(aliases):
2062 2085 if ui.verbose:
2063 2086 option_lists.append((_("global options:"), globalopts))
2064 2087 if name == 'shortlist':
2065 2088 option_lists.append((_('use "hg help" for the full list '
2066 2089 'of commands'), ()))
2067 2090 else:
2068 2091 if name == 'shortlist':
2069 2092 msg = _('use "hg help" for the full list of commands '
2070 2093 'or "hg -v" for details')
2071 2094 elif aliases:
2072 2095 msg = _('use "hg -v help%s" to show builtin aliases and '
2073 2096 'global options') % (name and " " + name or "")
2074 2097 else:
2075 2098 msg = _('use "hg -v help %s" to show global options') % name
2076 2099 option_lists.append((msg, ()))
2077 2100
2078 2101 def helpcmd(name):
2079 2102 if with_version:
2080 2103 version_(ui)
2081 2104 ui.write('\n')
2082 2105
2083 2106 try:
2084 2107 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2085 2108 except error.AmbiguousCommand, inst:
2086 2109 # py3k fix: except vars can't be used outside the scope of the
2087 2110 # except block, nor can be used inside a lambda. python issue4617
2088 2111 prefix = inst.args[0]
2089 2112 select = lambda c: c.lstrip('^').startswith(prefix)
2090 2113 helplist(_('list of commands:\n\n'), select)
2091 2114 return
2092 2115
2093 2116 # check if it's an invalid alias and display its error if it is
2094 2117 if getattr(entry[0], 'badalias', False):
2095 2118 if not unknowncmd:
2096 2119 entry[0](ui)
2097 2120 return
2098 2121
2099 2122 # synopsis
2100 2123 if len(entry) > 2:
2101 2124 if entry[2].startswith('hg'):
2102 2125 ui.write("%s\n" % entry[2])
2103 2126 else:
2104 2127 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
2105 2128 else:
2106 2129 ui.write('hg %s\n' % aliases[0])
2107 2130
2108 2131 # aliases
2109 2132 if not ui.quiet and len(aliases) > 1:
2110 2133 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
2111 2134
2112 2135 # description
2113 2136 doc = gettext(entry[0].__doc__)
2114 2137 if not doc:
2115 2138 doc = _("(no help text available)")
2116 2139 if hasattr(entry[0], 'definition'): # aliased command
2117 2140 if entry[0].definition.startswith('!'): # shell alias
2118 2141 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
2119 2142 else:
2120 2143 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
2121 2144 if ui.quiet:
2122 2145 doc = doc.splitlines()[0]
2123 2146 keep = ui.verbose and ['verbose'] or []
2124 2147 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
2125 2148 ui.write("\n%s\n" % formatted)
2126 2149 if pruned:
2127 2150 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
2128 2151
2129 2152 if not ui.quiet:
2130 2153 # options
2131 2154 if entry[1]:
2132 2155 option_lists.append((_("options:\n"), entry[1]))
2133 2156
2134 2157 addglobalopts(False)
2135 2158
2136 2159 def helplist(header, select=None):
2137 2160 h = {}
2138 2161 cmds = {}
2139 2162 for c, e in table.iteritems():
2140 2163 f = c.split("|", 1)[0]
2141 2164 if select and not select(f):
2142 2165 continue
2143 2166 if (not select and name != 'shortlist' and
2144 2167 e[0].__module__ != __name__):
2145 2168 continue
2146 2169 if name == "shortlist" and not f.startswith("^"):
2147 2170 continue
2148 2171 f = f.lstrip("^")
2149 2172 if not ui.debugflag and f.startswith("debug"):
2150 2173 continue
2151 2174 doc = e[0].__doc__
2152 2175 if doc and 'DEPRECATED' in doc and not ui.verbose:
2153 2176 continue
2154 2177 doc = gettext(doc)
2155 2178 if not doc:
2156 2179 doc = _("(no help text available)")
2157 2180 h[f] = doc.splitlines()[0].rstrip()
2158 2181 cmds[f] = c.lstrip("^")
2159 2182
2160 2183 if not h:
2161 2184 ui.status(_('no commands defined\n'))
2162 2185 return
2163 2186
2164 2187 ui.status(header)
2165 2188 fns = sorted(h)
2166 2189 m = max(map(len, fns))
2167 2190 for f in fns:
2168 2191 if ui.verbose:
2169 2192 commands = cmds[f].replace("|",", ")
2170 2193 ui.write(" %s:\n %s\n"%(commands, h[f]))
2171 2194 else:
2172 2195 ui.write('%s\n' % (util.wrap(h[f], textwidth,
2173 2196 initindent=' %-*s ' % (m, f),
2174 2197 hangindent=' ' * (m + 4))))
2175 2198
2176 2199 if not ui.quiet:
2177 2200 addglobalopts(True)
2178 2201
2179 2202 def helptopic(name):
2180 2203 for names, header, doc in help.helptable:
2181 2204 if name in names:
2182 2205 break
2183 2206 else:
2184 2207 raise error.UnknownCommand(name)
2185 2208
2186 2209 # description
2187 2210 if not doc:
2188 2211 doc = _("(no help text available)")
2189 2212 if hasattr(doc, '__call__'):
2190 2213 doc = doc()
2191 2214
2192 2215 ui.write("%s\n\n" % header)
2193 2216 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
2194 2217
2195 2218 def helpext(name):
2196 2219 try:
2197 2220 mod = extensions.find(name)
2198 2221 doc = gettext(mod.__doc__) or _('no help text available')
2199 2222 except KeyError:
2200 2223 mod = None
2201 2224 doc = extensions.disabledext(name)
2202 2225 if not doc:
2203 2226 raise error.UnknownCommand(name)
2204 2227
2205 2228 if '\n' not in doc:
2206 2229 head, tail = doc, ""
2207 2230 else:
2208 2231 head, tail = doc.split('\n', 1)
2209 2232 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
2210 2233 if tail:
2211 2234 ui.write(minirst.format(tail, textwidth))
2212 2235 ui.status('\n\n')
2213 2236
2214 2237 if mod:
2215 2238 try:
2216 2239 ct = mod.cmdtable
2217 2240 except AttributeError:
2218 2241 ct = {}
2219 2242 modcmds = set([c.split('|', 1)[0] for c in ct])
2220 2243 helplist(_('list of commands:\n\n'), modcmds.__contains__)
2221 2244 else:
2222 2245 ui.write(_('use "hg help extensions" for information on enabling '
2223 2246 'extensions\n'))
2224 2247
2225 2248 def helpextcmd(name):
2226 2249 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
2227 2250 doc = gettext(mod.__doc__).splitlines()[0]
2228 2251
2229 2252 msg = help.listexts(_("'%s' is provided by the following "
2230 2253 "extension:") % cmd, {ext: doc}, len(ext),
2231 2254 indent=4)
2232 2255 ui.write(minirst.format(msg, textwidth))
2233 2256 ui.write('\n\n')
2234 2257 ui.write(_('use "hg help extensions" for information on enabling '
2235 2258 'extensions\n'))
2236 2259
2237 2260 help.addtopichook('revsets', revset.makedoc)
2238 2261 help.addtopichook('templates', templatekw.makedoc)
2239 2262 help.addtopichook('templates', templatefilters.makedoc)
2240 2263
2241 2264 if name and name != 'shortlist':
2242 2265 i = None
2243 2266 if unknowncmd:
2244 2267 queries = (helpextcmd,)
2245 2268 else:
2246 2269 queries = (helptopic, helpcmd, helpext, helpextcmd)
2247 2270 for f in queries:
2248 2271 try:
2249 2272 f(name)
2250 2273 i = None
2251 2274 break
2252 2275 except error.UnknownCommand, inst:
2253 2276 i = inst
2254 2277 if i:
2255 2278 raise i
2256 2279
2257 2280 else:
2258 2281 # program name
2259 2282 if ui.verbose or with_version:
2260 2283 version_(ui)
2261 2284 else:
2262 2285 ui.status(_("Mercurial Distributed SCM\n"))
2263 2286 ui.status('\n')
2264 2287
2265 2288 # list of commands
2266 2289 if name == "shortlist":
2267 2290 header = _('basic commands:\n\n')
2268 2291 else:
2269 2292 header = _('list of commands:\n\n')
2270 2293
2271 2294 helplist(header)
2272 2295 if name != 'shortlist':
2273 2296 exts, maxlength = extensions.enabled()
2274 2297 text = help.listexts(_('enabled extensions:'), exts, maxlength)
2275 2298 if text:
2276 2299 ui.write("\n%s\n" % minirst.format(text, textwidth))
2277 2300
2278 2301 # list all option lists
2279 2302 opt_output = []
2280 2303 multioccur = False
2281 2304 for title, options in option_lists:
2282 2305 opt_output.append(("\n%s" % title, None))
2283 2306 for option in options:
2284 2307 if len(option) == 5:
2285 2308 shortopt, longopt, default, desc, optlabel = option
2286 2309 else:
2287 2310 shortopt, longopt, default, desc = option
2288 2311 optlabel = _("VALUE") # default label
2289 2312
2290 2313 if _("DEPRECATED") in desc and not ui.verbose:
2291 2314 continue
2292 2315 if isinstance(default, list):
2293 2316 numqualifier = " %s [+]" % optlabel
2294 2317 multioccur = True
2295 2318 elif (default is not None) and not isinstance(default, bool):
2296 2319 numqualifier = " %s" % optlabel
2297 2320 else:
2298 2321 numqualifier = ""
2299 2322 opt_output.append(("%2s%s" %
2300 2323 (shortopt and "-%s" % shortopt,
2301 2324 longopt and " --%s%s" %
2302 2325 (longopt, numqualifier)),
2303 2326 "%s%s" % (desc,
2304 2327 default
2305 2328 and _(" (default: %s)") % default
2306 2329 or "")))
2307 2330 if multioccur:
2308 2331 msg = _("\n[+] marked option can be specified multiple times")
2309 2332 if ui.verbose and name != 'shortlist':
2310 2333 opt_output.append((msg, None))
2311 2334 else:
2312 2335 opt_output.insert(-1, (msg, None))
2313 2336
2314 2337 if not name:
2315 2338 ui.write(_("\nadditional help topics:\n\n"))
2316 2339 topics = []
2317 2340 for names, header, doc in help.helptable:
2318 2341 topics.append((sorted(names, key=len, reverse=True)[0], header))
2319 2342 topics_len = max([len(s[0]) for s in topics])
2320 2343 for t, desc in topics:
2321 2344 ui.write(" %-*s %s\n" % (topics_len, t, desc))
2322 2345
2323 2346 if opt_output:
2324 2347 colwidth = encoding.colwidth
2325 2348 # normalize: (opt or message, desc or None, width of opt)
2326 2349 entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0)
2327 2350 for opt, desc in opt_output]
2328 2351 hanging = max([e[2] for e in entries])
2329 2352 for opt, desc, width in entries:
2330 2353 if desc:
2331 2354 initindent = ' %s%s ' % (opt, ' ' * (hanging - width))
2332 2355 hangindent = ' ' * (hanging + 3)
2333 2356 ui.write('%s\n' % (util.wrap(desc, textwidth,
2334 2357 initindent=initindent,
2335 2358 hangindent=hangindent)))
2336 2359 else:
2337 2360 ui.write("%s\n" % opt)
2338 2361
2339 2362 def identify(ui, repo, source=None, rev=None,
2340 2363 num=None, id=None, branch=None, tags=None, bookmarks=None):
2341 2364 """identify the working copy or specified revision
2342 2365
2343 2366 With no revision, print a summary of the current state of the
2344 2367 repository.
2345 2368
2346 2369 Specifying a path to a repository root or Mercurial bundle will
2347 2370 cause lookup to operate on that repository/bundle.
2348 2371
2349 2372 This summary identifies the repository state using one or two
2350 2373 parent hash identifiers, followed by a "+" if there are
2351 2374 uncommitted changes in the working directory, a list of tags for
2352 2375 this revision and a branch name for non-default branches.
2353 2376
2354 2377 Returns 0 if successful.
2355 2378 """
2356 2379
2357 2380 if not repo and not source:
2358 2381 raise util.Abort(_("there is no Mercurial repository here "
2359 2382 "(.hg not found)"))
2360 2383
2361 2384 hexfunc = ui.debugflag and hex or short
2362 2385 default = not (num or id or branch or tags or bookmarks)
2363 2386 output = []
2364 2387
2365 2388 revs = []
2366 2389 bms = []
2367 2390 if source:
2368 2391 source, branches = hg.parseurl(ui.expandpath(source))
2369 2392 repo = hg.repository(ui, source)
2370 2393 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
2371 2394
2372 2395 if not repo.local():
2373 2396 if not rev and revs:
2374 2397 rev = revs[0]
2375 2398 if not rev:
2376 2399 rev = "tip"
2377 2400 if num or branch or tags:
2378 2401 raise util.Abort(
2379 2402 _("can't query remote revision number, branch, or tags"))
2380 2403
2381 2404 remoterev = repo.lookup(rev)
2382 2405 if default or id:
2383 2406 output = [hexfunc(remoterev)]
2384 2407
2385 2408 if 'bookmarks' in repo.listkeys('namespaces'):
2386 2409 hexremoterev = hex(remoterev)
2387 2410 bms = [bm for bm, bmrev in repo.listkeys('bookmarks').iteritems()
2388 2411 if bmrev == hexremoterev]
2389 2412
2390 2413 elif not rev:
2391 2414 ctx = repo[None]
2392 2415 parents = ctx.parents()
2393 2416 changed = False
2394 2417 if default or id or num:
2395 2418 changed = util.any(repo.status())
2396 2419 if default or id:
2397 2420 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
2398 2421 (changed) and "+" or "")]
2399 2422 if num:
2400 2423 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
2401 2424 (changed) and "+" or ""))
2402 2425 else:
2403 2426 ctx = cmdutil.revsingle(repo, rev)
2404 2427 if default or id:
2405 2428 output = [hexfunc(ctx.node())]
2406 2429 if num:
2407 2430 output.append(str(ctx.rev()))
2408 2431
2409 2432 if repo.local():
2410 2433 bms = ctx.bookmarks()
2411 2434
2412 2435 if repo.local() and default and not ui.quiet:
2413 2436 b = ctx.branch()
2414 2437 if b != 'default':
2415 2438 output.append("(%s)" % b)
2416 2439
2417 2440 # multiple tags for a single parent separated by '/'
2418 2441 t = "/".join(ctx.tags())
2419 2442 if t:
2420 2443 output.append(t)
2421 2444
2422 2445 if default and not ui.quiet:
2423 2446 # multiple bookmarks for a single parent separated by '/'
2424 2447 bm = '/'.join(bms)
2425 2448 if bm:
2426 2449 output.append(bm)
2427 2450
2428 2451 if branch:
2429 2452 output.append(ctx.branch())
2430 2453
2431 2454 if tags:
2432 2455 output.extend(ctx.tags())
2433 2456
2434 2457 if bookmarks:
2435 2458 output.extend(bms)
2436 2459
2437 2460 ui.write("%s\n" % ' '.join(output))
2438 2461
2439 2462 def import_(ui, repo, patch1, *patches, **opts):
2440 2463 """import an ordered set of patches
2441 2464
2442 2465 Import a list of patches and commit them individually (unless
2443 2466 --no-commit is specified).
2444 2467
2445 2468 If there are outstanding changes in the working directory, import
2446 2469 will abort unless given the -f/--force flag.
2447 2470
2448 2471 You can import a patch straight from a mail message. Even patches
2449 2472 as attachments work (to use the body part, it must have type
2450 2473 text/plain or text/x-patch). From and Subject headers of email
2451 2474 message are used as default committer and commit message. All
2452 2475 text/plain body parts before first diff are added to commit
2453 2476 message.
2454 2477
2455 2478 If the imported patch was generated by :hg:`export`, user and
2456 2479 description from patch override values from message headers and
2457 2480 body. Values given on command line with -m/--message and -u/--user
2458 2481 override these.
2459 2482
2460 2483 If --exact is specified, import will set the working directory to
2461 2484 the parent of each patch before applying it, and will abort if the
2462 2485 resulting changeset has a different ID than the one recorded in
2463 2486 the patch. This may happen due to character set problems or other
2464 2487 deficiencies in the text patch format.
2465 2488
2466 2489 With -s/--similarity, hg will attempt to discover renames and
2467 2490 copies in the patch in the same way as 'addremove'.
2468 2491
2469 2492 To read a patch from standard input, use "-" as the patch name. If
2470 2493 a URL is specified, the patch will be downloaded from it.
2471 2494 See :hg:`help dates` for a list of formats valid for -d/--date.
2472 2495
2473 2496 Returns 0 on success.
2474 2497 """
2475 2498 patches = (patch1,) + patches
2476 2499
2477 2500 date = opts.get('date')
2478 2501 if date:
2479 2502 opts['date'] = util.parsedate(date)
2480 2503
2481 2504 try:
2482 2505 sim = float(opts.get('similarity') or 0)
2483 2506 except ValueError:
2484 2507 raise util.Abort(_('similarity must be a number'))
2485 2508 if sim < 0 or sim > 100:
2486 2509 raise util.Abort(_('similarity must be between 0 and 100'))
2487 2510
2488 2511 if opts.get('exact') or not opts.get('force'):
2489 2512 cmdutil.bail_if_changed(repo)
2490 2513
2491 2514 d = opts["base"]
2492 2515 strip = opts["strip"]
2493 2516 wlock = lock = None
2494 2517 msgs = []
2495 2518
2496 2519 def tryone(ui, hunk):
2497 2520 tmpname, message, user, date, branch, nodeid, p1, p2 = \
2498 2521 patch.extract(ui, hunk)
2499 2522
2500 2523 if not tmpname:
2501 2524 return None
2502 2525 commitid = _('to working directory')
2503 2526
2504 2527 try:
2505 2528 cmdline_message = cmdutil.logmessage(opts)
2506 2529 if cmdline_message:
2507 2530 # pickup the cmdline msg
2508 2531 message = cmdline_message
2509 2532 elif message:
2510 2533 # pickup the patch msg
2511 2534 message = message.strip()
2512 2535 else:
2513 2536 # launch the editor
2514 2537 message = None
2515 2538 ui.debug('message:\n%s\n' % message)
2516 2539
2517 2540 wp = repo.parents()
2518 2541 if opts.get('exact'):
2519 2542 if not nodeid or not p1:
2520 2543 raise util.Abort(_('not a Mercurial patch'))
2521 2544 p1 = repo.lookup(p1)
2522 2545 p2 = repo.lookup(p2 or hex(nullid))
2523 2546
2524 2547 if p1 != wp[0].node():
2525 2548 hg.clean(repo, p1)
2526 2549 repo.dirstate.setparents(p1, p2)
2527 2550 elif p2:
2528 2551 try:
2529 2552 p1 = repo.lookup(p1)
2530 2553 p2 = repo.lookup(p2)
2531 2554 if p1 == wp[0].node():
2532 2555 repo.dirstate.setparents(p1, p2)
2533 2556 except error.RepoError:
2534 2557 pass
2535 2558 if opts.get('exact') or opts.get('import_branch'):
2536 2559 repo.dirstate.setbranch(branch or 'default')
2537 2560
2538 2561 files = {}
2539 2562 try:
2540 2563 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
2541 2564 files=files, eolmode=None)
2542 2565 finally:
2543 2566 files = cmdutil.updatedir(ui, repo, files,
2544 2567 similarity=sim / 100.0)
2545 2568 if opts.get('no_commit'):
2546 2569 if message:
2547 2570 msgs.append(message)
2548 2571 else:
2549 2572 if opts.get('exact'):
2550 2573 m = None
2551 2574 else:
2552 2575 m = cmdutil.matchfiles(repo, files or [])
2553 2576 n = repo.commit(message, opts.get('user') or user,
2554 2577 opts.get('date') or date, match=m,
2555 2578 editor=cmdutil.commiteditor)
2556 2579 if opts.get('exact'):
2557 2580 if hex(n) != nodeid:
2558 2581 repo.rollback()
2559 2582 raise util.Abort(_('patch is damaged'
2560 2583 ' or loses information'))
2561 2584 # Force a dirstate write so that the next transaction
2562 2585 # backups an up-do-date file.
2563 2586 repo.dirstate.write()
2564 2587 if n:
2565 2588 commitid = short(n)
2566 2589
2567 2590 return commitid
2568 2591 finally:
2569 2592 os.unlink(tmpname)
2570 2593
2571 2594 try:
2572 2595 wlock = repo.wlock()
2573 2596 lock = repo.lock()
2574 2597 lastcommit = None
2575 2598 for p in patches:
2576 2599 pf = os.path.join(d, p)
2577 2600
2578 2601 if pf == '-':
2579 2602 ui.status(_("applying patch from stdin\n"))
2580 2603 pf = sys.stdin
2581 2604 else:
2582 2605 ui.status(_("applying %s\n") % p)
2583 2606 pf = url.open(ui, pf)
2584 2607
2585 2608 haspatch = False
2586 2609 for hunk in patch.split(pf):
2587 2610 commitid = tryone(ui, hunk)
2588 2611 if commitid:
2589 2612 haspatch = True
2590 2613 if lastcommit:
2591 2614 ui.status(_('applied %s\n') % lastcommit)
2592 2615 lastcommit = commitid
2593 2616
2594 2617 if not haspatch:
2595 2618 raise util.Abort(_('no diffs found'))
2596 2619
2597 2620 if msgs:
2598 2621 repo.opener('last-message.txt', 'wb').write('\n* * *\n'.join(msgs))
2599 2622 finally:
2600 2623 release(lock, wlock)
2601 2624
2602 2625 def incoming(ui, repo, source="default", **opts):
2603 2626 """show new changesets found in source
2604 2627
2605 2628 Show new changesets found in the specified path/URL or the default
2606 2629 pull location. These are the changesets that would have been pulled
2607 2630 if a pull at the time you issued this command.
2608 2631
2609 2632 For remote repository, using --bundle avoids downloading the
2610 2633 changesets twice if the incoming is followed by a pull.
2611 2634
2612 2635 See pull for valid source format details.
2613 2636
2614 2637 Returns 0 if there are incoming changes, 1 otherwise.
2615 2638 """
2616 2639 if opts.get('bundle') and opts.get('subrepos'):
2617 2640 raise util.Abort(_('cannot combine --bundle and --subrepos'))
2618 2641
2619 2642 if opts.get('bookmarks'):
2620 2643 source, branches = hg.parseurl(ui.expandpath(source),
2621 2644 opts.get('branch'))
2622 2645 other = hg.repository(hg.remoteui(repo, opts), source)
2623 2646 if 'bookmarks' not in other.listkeys('namespaces'):
2624 2647 ui.warn(_("remote doesn't support bookmarks\n"))
2625 2648 return 0
2626 2649 ui.status(_('comparing with %s\n') % url.hidepassword(source))
2627 2650 return bookmarks.diff(ui, repo, other)
2628 2651
2629 2652 ret = hg.incoming(ui, repo, source, opts)
2630 2653 return ret
2631 2654
2632 2655 def init(ui, dest=".", **opts):
2633 2656 """create a new repository in the given directory
2634 2657
2635 2658 Initialize a new repository in the given directory. If the given
2636 2659 directory does not exist, it will be created.
2637 2660
2638 2661 If no directory is given, the current directory is used.
2639 2662
2640 2663 It is possible to specify an ``ssh://`` URL as the destination.
2641 2664 See :hg:`help urls` for more information.
2642 2665
2643 2666 Returns 0 on success.
2644 2667 """
2645 2668 hg.repository(hg.remoteui(ui, opts), ui.expandpath(dest), create=1)
2646 2669
2647 2670 def locate(ui, repo, *pats, **opts):
2648 2671 """locate files matching specific patterns
2649 2672
2650 2673 Print files under Mercurial control in the working directory whose
2651 2674 names match the given patterns.
2652 2675
2653 2676 By default, this command searches all directories in the working
2654 2677 directory. To search just the current directory and its
2655 2678 subdirectories, use "--include .".
2656 2679
2657 2680 If no patterns are given to match, this command prints the names
2658 2681 of all files under Mercurial control in the working directory.
2659 2682
2660 2683 If you want to feed the output of this command into the "xargs"
2661 2684 command, use the -0 option to both this command and "xargs". This
2662 2685 will avoid the problem of "xargs" treating single filenames that
2663 2686 contain whitespace as multiple filenames.
2664 2687
2665 2688 Returns 0 if a match is found, 1 otherwise.
2666 2689 """
2667 2690 end = opts.get('print0') and '\0' or '\n'
2668 2691 rev = cmdutil.revsingle(repo, opts.get('rev'), None).node()
2669 2692
2670 2693 ret = 1
2671 2694 m = cmdutil.match(repo, pats, opts, default='relglob')
2672 2695 m.bad = lambda x, y: False
2673 2696 for abs in repo[rev].walk(m):
2674 2697 if not rev and abs not in repo.dirstate:
2675 2698 continue
2676 2699 if opts.get('fullpath'):
2677 2700 ui.write(repo.wjoin(abs), end)
2678 2701 else:
2679 2702 ui.write(((pats and m.rel(abs)) or abs), end)
2680 2703 ret = 0
2681 2704
2682 2705 return ret
2683 2706
2684 2707 def log(ui, repo, *pats, **opts):
2685 2708 """show revision history of entire repository or files
2686 2709
2687 2710 Print the revision history of the specified files or the entire
2688 2711 project.
2689 2712
2690 2713 File history is shown without following rename or copy history of
2691 2714 files. Use -f/--follow with a filename to follow history across
2692 2715 renames and copies. --follow without a filename will only show
2693 2716 ancestors or descendants of the starting revision. --follow-first
2694 2717 only follows the first parent of merge revisions.
2695 2718
2696 2719 If no revision range is specified, the default is ``tip:0`` unless
2697 2720 --follow is set, in which case the working directory parent is
2698 2721 used as the starting revision. You can specify a revision set for
2699 2722 log, see :hg:`help revsets` for more information.
2700 2723
2701 2724 See :hg:`help dates` for a list of formats valid for -d/--date.
2702 2725
2703 2726 By default this command prints revision number and changeset id,
2704 2727 tags, non-trivial parents, user, date and time, and a summary for
2705 2728 each commit. When the -v/--verbose switch is used, the list of
2706 2729 changed files and full commit message are shown.
2707 2730
2708 2731 .. note::
2709 2732 log -p/--patch may generate unexpected diff output for merge
2710 2733 changesets, as it will only compare the merge changeset against
2711 2734 its first parent. Also, only files different from BOTH parents
2712 2735 will appear in files:.
2713 2736
2714 2737 Returns 0 on success.
2715 2738 """
2716 2739
2717 2740 matchfn = cmdutil.match(repo, pats, opts)
2718 2741 limit = cmdutil.loglimit(opts)
2719 2742 count = 0
2720 2743
2721 2744 endrev = None
2722 2745 if opts.get('copies') and opts.get('rev'):
2723 2746 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2724 2747
2725 2748 df = False
2726 2749 if opts["date"]:
2727 2750 df = util.matchdate(opts["date"])
2728 2751
2729 2752 branches = opts.get('branch', []) + opts.get('only_branch', [])
2730 2753 opts['branch'] = [repo.lookupbranch(b) for b in branches]
2731 2754
2732 2755 displayer = cmdutil.show_changeset(ui, repo, opts, True)
2733 2756 def prep(ctx, fns):
2734 2757 rev = ctx.rev()
2735 2758 parents = [p for p in repo.changelog.parentrevs(rev)
2736 2759 if p != nullrev]
2737 2760 if opts.get('no_merges') and len(parents) == 2:
2738 2761 return
2739 2762 if opts.get('only_merges') and len(parents) != 2:
2740 2763 return
2741 2764 if opts.get('branch') and ctx.branch() not in opts['branch']:
2742 2765 return
2743 2766 if df and not df(ctx.date()[0]):
2744 2767 return
2745 2768 if opts['user'] and not [k for k in opts['user']
2746 2769 if k.lower() in ctx.user().lower()]:
2747 2770 return
2748 2771 if opts.get('keyword'):
2749 2772 for k in [kw.lower() for kw in opts['keyword']]:
2750 2773 if (k in ctx.user().lower() or
2751 2774 k in ctx.description().lower() or
2752 2775 k in " ".join(ctx.files()).lower()):
2753 2776 break
2754 2777 else:
2755 2778 return
2756 2779
2757 2780 copies = None
2758 2781 if opts.get('copies') and rev:
2759 2782 copies = []
2760 2783 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2761 2784 for fn in ctx.files():
2762 2785 rename = getrenamed(fn, rev)
2763 2786 if rename:
2764 2787 copies.append((fn, rename[0]))
2765 2788
2766 2789 revmatchfn = None
2767 2790 if opts.get('patch') or opts.get('stat'):
2768 2791 if opts.get('follow') or opts.get('follow_first'):
2769 2792 # note: this might be wrong when following through merges
2770 2793 revmatchfn = cmdutil.match(repo, fns, default='path')
2771 2794 else:
2772 2795 revmatchfn = matchfn
2773 2796
2774 2797 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2775 2798
2776 2799 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2777 2800 if count == limit:
2778 2801 break
2779 2802 if displayer.flush(ctx.rev()):
2780 2803 count += 1
2781 2804 displayer.close()
2782 2805
2783 2806 def manifest(ui, repo, node=None, rev=None):
2784 2807 """output the current or given revision of the project manifest
2785 2808
2786 2809 Print a list of version controlled files for the given revision.
2787 2810 If no revision is given, the first parent of the working directory
2788 2811 is used, or the null revision if no revision is checked out.
2789 2812
2790 2813 With -v, print file permissions, symlink and executable bits.
2791 2814 With --debug, print file revision hashes.
2792 2815
2793 2816 Returns 0 on success.
2794 2817 """
2795 2818
2796 2819 if rev and node:
2797 2820 raise util.Abort(_("please specify just one revision"))
2798 2821
2799 2822 if not node:
2800 2823 node = rev
2801 2824
2802 2825 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2803 2826 ctx = cmdutil.revsingle(repo, node)
2804 2827 for f in ctx:
2805 2828 if ui.debugflag:
2806 2829 ui.write("%40s " % hex(ctx.manifest()[f]))
2807 2830 if ui.verbose:
2808 2831 ui.write(decor[ctx.flags(f)])
2809 2832 ui.write("%s\n" % f)
2810 2833
2811 2834 def merge(ui, repo, node=None, **opts):
2812 2835 """merge working directory with another revision
2813 2836
2814 2837 The current working directory is updated with all changes made in
2815 2838 the requested revision since the last common predecessor revision.
2816 2839
2817 2840 Files that changed between either parent are marked as changed for
2818 2841 the next commit and a commit must be performed before any further
2819 2842 updates to the repository are allowed. The next commit will have
2820 2843 two parents.
2821 2844
2822 2845 ``--tool`` can be used to specify the merge tool used for file
2823 2846 merges. It overrides the HGMERGE environment variable and your
2824 2847 configuration files.
2825 2848
2826 2849 If no revision is specified, the working directory's parent is a
2827 2850 head revision, and the current branch contains exactly one other
2828 2851 head, the other head is merged with by default. Otherwise, an
2829 2852 explicit revision with which to merge with must be provided.
2830 2853
2831 2854 :hg:`resolve` must be used to resolve unresolved files.
2832 2855
2833 2856 To undo an uncommitted merge, use :hg:`update --clean .` which
2834 2857 will check out a clean copy of the original merge parent, losing
2835 2858 all changes.
2836 2859
2837 2860 Returns 0 on success, 1 if there are unresolved files.
2838 2861 """
2839 2862
2840 2863 if opts.get('rev') and node:
2841 2864 raise util.Abort(_("please specify just one revision"))
2842 2865 if not node:
2843 2866 node = opts.get('rev')
2844 2867
2845 2868 if not node:
2846 2869 branch = repo[None].branch()
2847 2870 bheads = repo.branchheads(branch)
2848 2871 if len(bheads) > 2:
2849 2872 raise util.Abort(_(
2850 2873 'branch \'%s\' has %d heads - '
2851 2874 'please merge with an explicit rev\n'
2852 2875 '(run \'hg heads .\' to see heads)')
2853 2876 % (branch, len(bheads)))
2854 2877
2855 2878 parent = repo.dirstate.parents()[0]
2856 2879 if len(bheads) == 1:
2857 2880 if len(repo.heads()) > 1:
2858 2881 raise util.Abort(_(
2859 2882 'branch \'%s\' has one head - '
2860 2883 'please merge with an explicit rev\n'
2861 2884 '(run \'hg heads\' to see all heads)')
2862 2885 % branch)
2863 2886 msg = _('there is nothing to merge')
2864 2887 if parent != repo.lookup(repo[None].branch()):
2865 2888 msg = _('%s - use "hg update" instead') % msg
2866 2889 raise util.Abort(msg)
2867 2890
2868 2891 if parent not in bheads:
2869 2892 raise util.Abort(_('working dir not at a head rev - '
2870 2893 'use "hg update" or merge with an explicit rev'))
2871 2894 node = parent == bheads[0] and bheads[-1] or bheads[0]
2872 2895 else:
2873 2896 node = cmdutil.revsingle(repo, node).node()
2874 2897
2875 2898 if opts.get('preview'):
2876 2899 # find nodes that are ancestors of p2 but not of p1
2877 2900 p1 = repo.lookup('.')
2878 2901 p2 = repo.lookup(node)
2879 2902 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2880 2903
2881 2904 displayer = cmdutil.show_changeset(ui, repo, opts)
2882 2905 for node in nodes:
2883 2906 displayer.show(repo[node])
2884 2907 displayer.close()
2885 2908 return 0
2886 2909
2887 2910 try:
2888 2911 # ui.forcemerge is an internal variable, do not document
2889 2912 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2890 2913 return hg.merge(repo, node, force=opts.get('force'))
2891 2914 finally:
2892 2915 ui.setconfig('ui', 'forcemerge', '')
2893 2916
2894 2917 def outgoing(ui, repo, dest=None, **opts):
2895 2918 """show changesets not found in the destination
2896 2919
2897 2920 Show changesets not found in the specified destination repository
2898 2921 or the default push location. These are the changesets that would
2899 2922 be pushed if a push was requested.
2900 2923
2901 2924 See pull for details of valid destination formats.
2902 2925
2903 2926 Returns 0 if there are outgoing changes, 1 otherwise.
2904 2927 """
2905 2928
2906 2929 if opts.get('bookmarks'):
2907 2930 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2908 2931 dest, branches = hg.parseurl(dest, opts.get('branch'))
2909 2932 other = hg.repository(hg.remoteui(repo, opts), dest)
2910 2933 if 'bookmarks' not in other.listkeys('namespaces'):
2911 2934 ui.warn(_("remote doesn't support bookmarks\n"))
2912 2935 return 0
2913 2936 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2914 2937 return bookmarks.diff(ui, other, repo)
2915 2938
2916 2939 ret = hg.outgoing(ui, repo, dest, opts)
2917 2940 return ret
2918 2941
2919 2942 def parents(ui, repo, file_=None, **opts):
2920 2943 """show the parents of the working directory or revision
2921 2944
2922 2945 Print the working directory's parent revisions. If a revision is
2923 2946 given via -r/--rev, the parent of that revision will be printed.
2924 2947 If a file argument is given, the revision in which the file was
2925 2948 last changed (before the working directory revision or the
2926 2949 argument to --rev if given) is printed.
2927 2950
2928 2951 Returns 0 on success.
2929 2952 """
2930 2953
2931 2954 ctx = cmdutil.revsingle(repo, opts.get('rev'), None)
2932 2955
2933 2956 if file_:
2934 2957 m = cmdutil.match(repo, (file_,), opts)
2935 2958 if m.anypats() or len(m.files()) != 1:
2936 2959 raise util.Abort(_('can only specify an explicit filename'))
2937 2960 file_ = m.files()[0]
2938 2961 filenodes = []
2939 2962 for cp in ctx.parents():
2940 2963 if not cp:
2941 2964 continue
2942 2965 try:
2943 2966 filenodes.append(cp.filenode(file_))
2944 2967 except error.LookupError:
2945 2968 pass
2946 2969 if not filenodes:
2947 2970 raise util.Abort(_("'%s' not found in manifest!") % file_)
2948 2971 fl = repo.file(file_)
2949 2972 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2950 2973 else:
2951 2974 p = [cp.node() for cp in ctx.parents()]
2952 2975
2953 2976 displayer = cmdutil.show_changeset(ui, repo, opts)
2954 2977 for n in p:
2955 2978 if n != nullid:
2956 2979 displayer.show(repo[n])
2957 2980 displayer.close()
2958 2981
2959 2982 def paths(ui, repo, search=None):
2960 2983 """show aliases for remote repositories
2961 2984
2962 2985 Show definition of symbolic path name NAME. If no name is given,
2963 2986 show definition of all available names.
2964 2987
2965 2988 Path names are defined in the [paths] section of your
2966 2989 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
2967 2990 repository, ``.hg/hgrc`` is used, too.
2968 2991
2969 2992 The path names ``default`` and ``default-push`` have a special
2970 2993 meaning. When performing a push or pull operation, they are used
2971 2994 as fallbacks if no location is specified on the command-line.
2972 2995 When ``default-push`` is set, it will be used for push and
2973 2996 ``default`` will be used for pull; otherwise ``default`` is used
2974 2997 as the fallback for both. When cloning a repository, the clone
2975 2998 source is written as ``default`` in ``.hg/hgrc``. Note that
2976 2999 ``default`` and ``default-push`` apply to all inbound (e.g.
2977 3000 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
2978 3001 :hg:`bundle`) operations.
2979 3002
2980 3003 See :hg:`help urls` for more information.
2981 3004
2982 3005 Returns 0 on success.
2983 3006 """
2984 3007 if search:
2985 3008 for name, path in ui.configitems("paths"):
2986 3009 if name == search:
2987 3010 ui.write("%s\n" % url.hidepassword(path))
2988 3011 return
2989 3012 ui.warn(_("not found!\n"))
2990 3013 return 1
2991 3014 else:
2992 3015 for name, path in ui.configitems("paths"):
2993 3016 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2994 3017
2995 3018 def postincoming(ui, repo, modheads, optupdate, checkout):
2996 3019 if modheads == 0:
2997 3020 return
2998 3021 if optupdate:
2999 3022 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
3000 3023 return hg.update(repo, checkout)
3001 3024 else:
3002 3025 ui.status(_("not updating, since new heads added\n"))
3003 3026 if modheads > 1:
3004 3027 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3005 3028 else:
3006 3029 ui.status(_("(run 'hg update' to get a working copy)\n"))
3007 3030
3008 3031 def pull(ui, repo, source="default", **opts):
3009 3032 """pull changes from the specified source
3010 3033
3011 3034 Pull changes from a remote repository to a local one.
3012 3035
3013 3036 This finds all changes from the repository at the specified path
3014 3037 or URL and adds them to a local repository (the current one unless
3015 3038 -R is specified). By default, this does not update the copy of the
3016 3039 project in the working directory.
3017 3040
3018 3041 Use :hg:`incoming` if you want to see what would have been added
3019 3042 by a pull at the time you issued this command. If you then decide
3020 3043 to add those changes to the repository, you should use :hg:`pull
3021 3044 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3022 3045
3023 3046 If SOURCE is omitted, the 'default' path will be used.
3024 3047 See :hg:`help urls` for more information.
3025 3048
3026 3049 Returns 0 on success, 1 if an update had unresolved files.
3027 3050 """
3028 3051 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3029 3052 other = hg.repository(hg.remoteui(repo, opts), source)
3030 3053 ui.status(_('pulling from %s\n') % url.hidepassword(source))
3031 3054 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3032 3055
3033 3056 if opts.get('bookmark'):
3034 3057 if not revs:
3035 3058 revs = []
3036 3059 rb = other.listkeys('bookmarks')
3037 3060 for b in opts['bookmark']:
3038 3061 if b not in rb:
3039 3062 raise util.Abort(_('remote bookmark %s not found!') % b)
3040 3063 revs.append(rb[b])
3041 3064
3042 3065 if revs:
3043 3066 try:
3044 3067 revs = [other.lookup(rev) for rev in revs]
3045 3068 except error.CapabilityError:
3046 3069 err = _("other repository doesn't support revision lookup, "
3047 3070 "so a rev cannot be specified.")
3048 3071 raise util.Abort(err)
3049 3072
3050 3073 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
3051 3074 bookmarks.updatefromremote(ui, repo, other)
3052 3075 if checkout:
3053 3076 checkout = str(repo.changelog.rev(other.lookup(checkout)))
3054 3077 repo._subtoppath = source
3055 3078 try:
3056 3079 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
3057 3080
3058 3081 finally:
3059 3082 del repo._subtoppath
3060 3083
3061 3084 # update specified bookmarks
3062 3085 if opts.get('bookmark'):
3063 3086 for b in opts['bookmark']:
3064 3087 # explicit pull overrides local bookmark if any
3065 3088 ui.status(_("importing bookmark %s\n") % b)
3066 3089 repo._bookmarks[b] = repo[rb[b]].node()
3067 3090 bookmarks.write(repo)
3068 3091
3069 3092 return ret
3070 3093
3071 3094 def push(ui, repo, dest=None, **opts):
3072 3095 """push changes to the specified destination
3073 3096
3074 3097 Push changesets from the local repository to the specified
3075 3098 destination.
3076 3099
3077 3100 This operation is symmetrical to pull: it is identical to a pull
3078 3101 in the destination repository from the current one.
3079 3102
3080 3103 By default, push will not allow creation of new heads at the
3081 3104 destination, since multiple heads would make it unclear which head
3082 3105 to use. In this situation, it is recommended to pull and merge
3083 3106 before pushing.
3084 3107
3085 3108 Use --new-branch if you want to allow push to create a new named
3086 3109 branch that is not present at the destination. This allows you to
3087 3110 only create a new branch without forcing other changes.
3088 3111
3089 3112 Use -f/--force to override the default behavior and push all
3090 3113 changesets on all branches.
3091 3114
3092 3115 If -r/--rev is used, the specified revision and all its ancestors
3093 3116 will be pushed to the remote repository.
3094 3117
3095 3118 Please see :hg:`help urls` for important details about ``ssh://``
3096 3119 URLs. If DESTINATION is omitted, a default path will be used.
3097 3120
3098 3121 Returns 0 if push was successful, 1 if nothing to push.
3099 3122 """
3100 3123
3101 3124 if opts.get('bookmark'):
3102 3125 for b in opts['bookmark']:
3103 3126 # translate -B options to -r so changesets get pushed
3104 3127 if b in repo._bookmarks:
3105 3128 opts.setdefault('rev', []).append(b)
3106 3129 else:
3107 3130 # if we try to push a deleted bookmark, translate it to null
3108 3131 # this lets simultaneous -r, -b options continue working
3109 3132 opts.setdefault('rev', []).append("null")
3110 3133
3111 3134 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3112 3135 dest, branches = hg.parseurl(dest, opts.get('branch'))
3113 3136 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
3114 3137 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
3115 3138 other = hg.repository(hg.remoteui(repo, opts), dest)
3116 3139 if revs:
3117 3140 revs = [repo.lookup(rev) for rev in revs]
3118 3141
3119 3142 repo._subtoppath = dest
3120 3143 try:
3121 3144 # push subrepos depth-first for coherent ordering
3122 3145 c = repo['']
3123 3146 subs = c.substate # only repos that are committed
3124 3147 for s in sorted(subs):
3125 3148 if not c.sub(s).push(opts.get('force')):
3126 3149 return False
3127 3150 finally:
3128 3151 del repo._subtoppath
3129 3152 result = repo.push(other, opts.get('force'), revs=revs,
3130 3153 newbranch=opts.get('new_branch'))
3131 3154
3132 3155 result = (result == 0)
3133 3156
3134 3157 if opts.get('bookmark'):
3135 3158 rb = other.listkeys('bookmarks')
3136 3159 for b in opts['bookmark']:
3137 3160 # explicit push overrides remote bookmark if any
3138 3161 if b in repo._bookmarks:
3139 3162 ui.status(_("exporting bookmark %s\n") % b)
3140 3163 new = repo[b].hex()
3141 3164 elif b in rb:
3142 3165 ui.status(_("deleting remote bookmark %s\n") % b)
3143 3166 new = '' # delete
3144 3167 else:
3145 3168 ui.warn(_('bookmark %s does not exist on the local '
3146 3169 'or remote repository!\n') % b)
3147 3170 return 2
3148 3171 old = rb.get(b, '')
3149 3172 r = other.pushkey('bookmarks', b, old, new)
3150 3173 if not r:
3151 3174 ui.warn(_('updating bookmark %s failed!\n') % b)
3152 3175 if not result:
3153 3176 result = 2
3154 3177
3155 3178 return result
3156 3179
3157 3180 def recover(ui, repo):
3158 3181 """roll back an interrupted transaction
3159 3182
3160 3183 Recover from an interrupted commit or pull.
3161 3184
3162 3185 This command tries to fix the repository status after an
3163 3186 interrupted operation. It should only be necessary when Mercurial
3164 3187 suggests it.
3165 3188
3166 3189 Returns 0 if successful, 1 if nothing to recover or verify fails.
3167 3190 """
3168 3191 if repo.recover():
3169 3192 return hg.verify(repo)
3170 3193 return 1
3171 3194
3172 3195 def remove(ui, repo, *pats, **opts):
3173 3196 """remove the specified files on the next commit
3174 3197
3175 3198 Schedule the indicated files for removal from the repository.
3176 3199
3177 3200 This only removes files from the current branch, not from the
3178 3201 entire project history. -A/--after can be used to remove only
3179 3202 files that have already been deleted, -f/--force can be used to
3180 3203 force deletion, and -Af can be used to remove files from the next
3181 3204 revision without deleting them from the working directory.
3182 3205
3183 3206 The following table details the behavior of remove for different
3184 3207 file states (columns) and option combinations (rows). The file
3185 3208 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
3186 3209 reported by :hg:`status`). The actions are Warn, Remove (from
3187 3210 branch) and Delete (from disk)::
3188 3211
3189 3212 A C M !
3190 3213 none W RD W R
3191 3214 -f R RD RD R
3192 3215 -A W W W R
3193 3216 -Af R R R R
3194 3217
3195 3218 This command schedules the files to be removed at the next commit.
3196 3219 To undo a remove before that, see :hg:`revert`.
3197 3220
3198 3221 Returns 0 on success, 1 if any warnings encountered.
3199 3222 """
3200 3223
3201 3224 ret = 0
3202 3225 after, force = opts.get('after'), opts.get('force')
3203 3226 if not pats and not after:
3204 3227 raise util.Abort(_('no files specified'))
3205 3228
3206 3229 m = cmdutil.match(repo, pats, opts)
3207 3230 s = repo.status(match=m, clean=True)
3208 3231 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
3209 3232
3210 3233 for f in m.files():
3211 3234 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
3212 3235 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
3213 3236 ret = 1
3214 3237
3215 3238 if force:
3216 3239 remove, forget = modified + deleted + clean, added
3217 3240 elif after:
3218 3241 remove, forget = deleted, []
3219 3242 for f in modified + added + clean:
3220 3243 ui.warn(_('not removing %s: file still exists (use -f'
3221 3244 ' to force removal)\n') % m.rel(f))
3222 3245 ret = 1
3223 3246 else:
3224 3247 remove, forget = deleted + clean, []
3225 3248 for f in modified:
3226 3249 ui.warn(_('not removing %s: file is modified (use -f'
3227 3250 ' to force removal)\n') % m.rel(f))
3228 3251 ret = 1
3229 3252 for f in added:
3230 3253 ui.warn(_('not removing %s: file has been marked for add (use -f'
3231 3254 ' to force removal)\n') % m.rel(f))
3232 3255 ret = 1
3233 3256
3234 3257 for f in sorted(remove + forget):
3235 3258 if ui.verbose or not m.exact(f):
3236 3259 ui.status(_('removing %s\n') % m.rel(f))
3237 3260
3238 3261 repo[None].forget(forget)
3239 3262 repo[None].remove(remove, unlink=not after)
3240 3263 return ret
3241 3264
3242 3265 def rename(ui, repo, *pats, **opts):
3243 3266 """rename files; equivalent of copy + remove
3244 3267
3245 3268 Mark dest as copies of sources; mark sources for deletion. If dest
3246 3269 is a directory, copies are put in that directory. If dest is a
3247 3270 file, there can only be one source.
3248 3271
3249 3272 By default, this command copies the contents of files as they
3250 3273 exist in the working directory. If invoked with -A/--after, the
3251 3274 operation is recorded, but no copying is performed.
3252 3275
3253 3276 This command takes effect at the next commit. To undo a rename
3254 3277 before that, see :hg:`revert`.
3255 3278
3256 3279 Returns 0 on success, 1 if errors are encountered.
3257 3280 """
3258 3281 wlock = repo.wlock(False)
3259 3282 try:
3260 3283 return cmdutil.copy(ui, repo, pats, opts, rename=True)
3261 3284 finally:
3262 3285 wlock.release()
3263 3286
3264 3287 def resolve(ui, repo, *pats, **opts):
3265 3288 """redo merges or set/view the merge status of files
3266 3289
3267 3290 Merges with unresolved conflicts are often the result of
3268 3291 non-interactive merging using the ``internal:merge`` configuration
3269 3292 setting, or a command-line merge tool like ``diff3``. The resolve
3270 3293 command is used to manage the files involved in a merge, after
3271 3294 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
3272 3295 working directory must have two parents).
3273 3296
3274 3297 The resolve command can be used in the following ways:
3275 3298
3276 3299 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
3277 3300 files, discarding any previous merge attempts. Re-merging is not
3278 3301 performed for files already marked as resolved. Use ``--all/-a``
3279 3302 to selects all unresolved files. ``--tool`` can be used to specify
3280 3303 the merge tool used for the given files. It overrides the HGMERGE
3281 3304 environment variable and your configuration files.
3282 3305
3283 3306 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
3284 3307 (e.g. after having manually fixed-up the files). The default is
3285 3308 to mark all unresolved files.
3286 3309
3287 3310 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
3288 3311 default is to mark all resolved files.
3289 3312
3290 3313 - :hg:`resolve -l`: list files which had or still have conflicts.
3291 3314 In the printed list, ``U`` = unresolved and ``R`` = resolved.
3292 3315
3293 3316 Note that Mercurial will not let you commit files with unresolved
3294 3317 merge conflicts. You must use :hg:`resolve -m ...` before you can
3295 3318 commit after a conflicting merge.
3296 3319
3297 3320 Returns 0 on success, 1 if any files fail a resolve attempt.
3298 3321 """
3299 3322
3300 3323 all, mark, unmark, show, nostatus = \
3301 3324 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
3302 3325
3303 3326 if (show and (mark or unmark)) or (mark and unmark):
3304 3327 raise util.Abort(_("too many options specified"))
3305 3328 if pats and all:
3306 3329 raise util.Abort(_("can't specify --all and patterns"))
3307 3330 if not (all or pats or show or mark or unmark):
3308 3331 raise util.Abort(_('no files or directories specified; '
3309 3332 'use --all to remerge all files'))
3310 3333
3311 3334 ms = mergemod.mergestate(repo)
3312 3335 m = cmdutil.match(repo, pats, opts)
3313 3336 ret = 0
3314 3337
3315 3338 for f in ms:
3316 3339 if m(f):
3317 3340 if show:
3318 3341 if nostatus:
3319 3342 ui.write("%s\n" % f)
3320 3343 else:
3321 3344 ui.write("%s %s\n" % (ms[f].upper(), f),
3322 3345 label='resolve.' +
3323 3346 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
3324 3347 elif mark:
3325 3348 ms.mark(f, "r")
3326 3349 elif unmark:
3327 3350 ms.mark(f, "u")
3328 3351 else:
3329 3352 wctx = repo[None]
3330 3353 mctx = wctx.parents()[-1]
3331 3354
3332 3355 # backup pre-resolve (merge uses .orig for its own purposes)
3333 3356 a = repo.wjoin(f)
3334 3357 util.copyfile(a, a + ".resolve")
3335 3358
3336 3359 try:
3337 3360 # resolve file
3338 3361 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
3339 3362 if ms.resolve(f, wctx, mctx):
3340 3363 ret = 1
3341 3364 finally:
3342 3365 ui.setconfig('ui', 'forcemerge', '')
3343 3366
3344 3367 # replace filemerge's .orig file with our resolve file
3345 3368 util.rename(a + ".resolve", a + ".orig")
3346 3369
3347 3370 ms.commit()
3348 3371 return ret
3349 3372
3350 3373 def revert(ui, repo, *pats, **opts):
3351 3374 """restore individual files or directories to an earlier state
3352 3375
3353 3376 .. note::
3354 3377 This command is most likely not what you are looking for.
3355 3378 Revert will partially overwrite content in the working
3356 3379 directory without changing the working directory parents. Use
3357 3380 :hg:`update -r rev` to check out earlier revisions, or
3358 3381 :hg:`update --clean .` to undo a merge which has added another
3359 3382 parent.
3360 3383
3361 3384 With no revision specified, revert the named files or directories
3362 3385 to the contents they had in the parent of the working directory.
3363 3386 This restores the contents of the affected files to an unmodified
3364 3387 state and unschedules adds, removes, copies, and renames. If the
3365 3388 working directory has two parents, you must explicitly specify a
3366 3389 revision.
3367 3390
3368 3391 Using the -r/--rev option, revert the given files or directories
3369 3392 to their contents as of a specific revision. This can be helpful
3370 3393 to "roll back" some or all of an earlier change. See :hg:`help
3371 3394 dates` for a list of formats valid for -d/--date.
3372 3395
3373 3396 Revert modifies the working directory. It does not commit any
3374 3397 changes, or change the parent of the working directory. If you
3375 3398 revert to a revision other than the parent of the working
3376 3399 directory, the reverted files will thus appear modified
3377 3400 afterwards.
3378 3401
3379 3402 If a file has been deleted, it is restored. If the executable mode
3380 3403 of a file was changed, it is reset.
3381 3404
3382 3405 If names are given, all files matching the names are reverted.
3383 3406 If no arguments are given, no files are reverted.
3384 3407
3385 3408 Modified files are saved with a .orig suffix before reverting.
3386 3409 To disable these backups, use --no-backup.
3387 3410
3388 3411 Returns 0 on success.
3389 3412 """
3390 3413
3391 3414 if opts.get("date"):
3392 3415 if opts.get("rev"):
3393 3416 raise util.Abort(_("you can't specify a revision and a date"))
3394 3417 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
3395 3418
3396 3419 parent, p2 = repo.dirstate.parents()
3397 3420 if not opts.get('rev') and p2 != nullid:
3398 3421 raise util.Abort(_('uncommitted merge - '
3399 3422 'use "hg update", see "hg help revert"'))
3400 3423
3401 3424 if not pats and not opts.get('all'):
3402 3425 raise util.Abort(_('no files or directories specified; '
3403 3426 'use --all to revert the whole repo'))
3404 3427
3405 3428 ctx = cmdutil.revsingle(repo, opts.get('rev'))
3406 3429 node = ctx.node()
3407 3430 mf = ctx.manifest()
3408 3431 if node == parent:
3409 3432 pmf = mf
3410 3433 else:
3411 3434 pmf = None
3412 3435
3413 3436 # need all matching names in dirstate and manifest of target rev,
3414 3437 # so have to walk both. do not print errors if files exist in one
3415 3438 # but not other.
3416 3439
3417 3440 names = {}
3418 3441
3419 3442 wlock = repo.wlock()
3420 3443 try:
3421 3444 # walk dirstate.
3422 3445
3423 3446 m = cmdutil.match(repo, pats, opts)
3424 3447 m.bad = lambda x, y: False
3425 3448 for abs in repo.walk(m):
3426 3449 names[abs] = m.rel(abs), m.exact(abs)
3427 3450
3428 3451 # walk target manifest.
3429 3452
3430 3453 def badfn(path, msg):
3431 3454 if path in names:
3432 3455 return
3433 3456 path_ = path + '/'
3434 3457 for f in names:
3435 3458 if f.startswith(path_):
3436 3459 return
3437 3460 ui.warn("%s: %s\n" % (m.rel(path), msg))
3438 3461
3439 3462 m = cmdutil.match(repo, pats, opts)
3440 3463 m.bad = badfn
3441 3464 for abs in repo[node].walk(m):
3442 3465 if abs not in names:
3443 3466 names[abs] = m.rel(abs), m.exact(abs)
3444 3467
3445 3468 m = cmdutil.matchfiles(repo, names)
3446 3469 changes = repo.status(match=m)[:4]
3447 3470 modified, added, removed, deleted = map(set, changes)
3448 3471
3449 3472 # if f is a rename, also revert the source
3450 3473 cwd = repo.getcwd()
3451 3474 for f in added:
3452 3475 src = repo.dirstate.copied(f)
3453 3476 if src and src not in names and repo.dirstate[src] == 'r':
3454 3477 removed.add(src)
3455 3478 names[src] = (repo.pathto(src, cwd), True)
3456 3479
3457 3480 def removeforget(abs):
3458 3481 if repo.dirstate[abs] == 'a':
3459 3482 return _('forgetting %s\n')
3460 3483 return _('removing %s\n')
3461 3484
3462 3485 revert = ([], _('reverting %s\n'))
3463 3486 add = ([], _('adding %s\n'))
3464 3487 remove = ([], removeforget)
3465 3488 undelete = ([], _('undeleting %s\n'))
3466 3489
3467 3490 disptable = (
3468 3491 # dispatch table:
3469 3492 # file state
3470 3493 # action if in target manifest
3471 3494 # action if not in target manifest
3472 3495 # make backup if in target manifest
3473 3496 # make backup if not in target manifest
3474 3497 (modified, revert, remove, True, True),
3475 3498 (added, revert, remove, True, False),
3476 3499 (removed, undelete, None, False, False),
3477 3500 (deleted, revert, remove, False, False),
3478 3501 )
3479 3502
3480 3503 for abs, (rel, exact) in sorted(names.items()):
3481 3504 mfentry = mf.get(abs)
3482 3505 target = repo.wjoin(abs)
3483 3506 def handle(xlist, dobackup):
3484 3507 xlist[0].append(abs)
3485 3508 if (dobackup and not opts.get('no_backup') and
3486 3509 os.path.lexists(target)):
3487 3510 bakname = "%s.orig" % rel
3488 3511 ui.note(_('saving current version of %s as %s\n') %
3489 3512 (rel, bakname))
3490 3513 if not opts.get('dry_run'):
3491 3514 util.rename(target, bakname)
3492 3515 if ui.verbose or not exact:
3493 3516 msg = xlist[1]
3494 3517 if not isinstance(msg, basestring):
3495 3518 msg = msg(abs)
3496 3519 ui.status(msg % rel)
3497 3520 for table, hitlist, misslist, backuphit, backupmiss in disptable:
3498 3521 if abs not in table:
3499 3522 continue
3500 3523 # file has changed in dirstate
3501 3524 if mfentry:
3502 3525 handle(hitlist, backuphit)
3503 3526 elif misslist is not None:
3504 3527 handle(misslist, backupmiss)
3505 3528 break
3506 3529 else:
3507 3530 if abs not in repo.dirstate:
3508 3531 if mfentry:
3509 3532 handle(add, True)
3510 3533 elif exact:
3511 3534 ui.warn(_('file not managed: %s\n') % rel)
3512 3535 continue
3513 3536 # file has not changed in dirstate
3514 3537 if node == parent:
3515 3538 if exact:
3516 3539 ui.warn(_('no changes needed to %s\n') % rel)
3517 3540 continue
3518 3541 if pmf is None:
3519 3542 # only need parent manifest in this unlikely case,
3520 3543 # so do not read by default
3521 3544 pmf = repo[parent].manifest()
3522 3545 if abs in pmf:
3523 3546 if mfentry:
3524 3547 # if version of file is same in parent and target
3525 3548 # manifests, do nothing
3526 3549 if (pmf[abs] != mfentry or
3527 3550 pmf.flags(abs) != mf.flags(abs)):
3528 3551 handle(revert, False)
3529 3552 else:
3530 3553 handle(remove, False)
3531 3554
3532 3555 if not opts.get('dry_run'):
3533 3556 def checkout(f):
3534 3557 fc = ctx[f]
3535 3558 repo.wwrite(f, fc.data(), fc.flags())
3536 3559
3537 3560 audit_path = util.path_auditor(repo.root)
3538 3561 for f in remove[0]:
3539 3562 if repo.dirstate[f] == 'a':
3540 3563 repo.dirstate.forget(f)
3541 3564 continue
3542 3565 audit_path(f)
3543 3566 try:
3544 3567 util.unlinkpath(repo.wjoin(f))
3545 3568 except OSError:
3546 3569 pass
3547 3570 repo.dirstate.remove(f)
3548 3571
3549 3572 normal = None
3550 3573 if node == parent:
3551 3574 # We're reverting to our parent. If possible, we'd like status
3552 3575 # to report the file as clean. We have to use normallookup for
3553 3576 # merges to avoid losing information about merged/dirty files.
3554 3577 if p2 != nullid:
3555 3578 normal = repo.dirstate.normallookup
3556 3579 else:
3557 3580 normal = repo.dirstate.normal
3558 3581 for f in revert[0]:
3559 3582 checkout(f)
3560 3583 if normal:
3561 3584 normal(f)
3562 3585
3563 3586 for f in add[0]:
3564 3587 checkout(f)
3565 3588 repo.dirstate.add(f)
3566 3589
3567 3590 normal = repo.dirstate.normallookup
3568 3591 if node == parent and p2 == nullid:
3569 3592 normal = repo.dirstate.normal
3570 3593 for f in undelete[0]:
3571 3594 checkout(f)
3572 3595 normal(f)
3573 3596
3574 3597 finally:
3575 3598 wlock.release()
3576 3599
3577 3600 def rollback(ui, repo, **opts):
3578 3601 """roll back the last transaction (dangerous)
3579 3602
3580 3603 This command should be used with care. There is only one level of
3581 3604 rollback, and there is no way to undo a rollback. It will also
3582 3605 restore the dirstate at the time of the last transaction, losing
3583 3606 any dirstate changes since that time. This command does not alter
3584 3607 the working directory.
3585 3608
3586 3609 Transactions are used to encapsulate the effects of all commands
3587 3610 that create new changesets or propagate existing changesets into a
3588 3611 repository. For example, the following commands are transactional,
3589 3612 and their effects can be rolled back:
3590 3613
3591 3614 - commit
3592 3615 - import
3593 3616 - pull
3594 3617 - push (with this repository as the destination)
3595 3618 - unbundle
3596 3619
3597 3620 This command is not intended for use on public repositories. Once
3598 3621 changes are visible for pull by other users, rolling a transaction
3599 3622 back locally is ineffective (someone else may already have pulled
3600 3623 the changes). Furthermore, a race is possible with readers of the
3601 3624 repository; for example an in-progress pull from the repository
3602 3625 may fail if a rollback is performed.
3603 3626
3604 3627 Returns 0 on success, 1 if no rollback data is available.
3605 3628 """
3606 3629 return repo.rollback(opts.get('dry_run'))
3607 3630
3608 3631 def root(ui, repo):
3609 3632 """print the root (top) of the current working directory
3610 3633
3611 3634 Print the root directory of the current repository.
3612 3635
3613 3636 Returns 0 on success.
3614 3637 """
3615 3638 ui.write(repo.root + "\n")
3616 3639
3617 3640 def serve(ui, repo, **opts):
3618 3641 """start stand-alone webserver
3619 3642
3620 3643 Start a local HTTP repository browser and pull server. You can use
3621 3644 this for ad-hoc sharing and browsing of repositories. It is
3622 3645 recommended to use a real web server to serve a repository for
3623 3646 longer periods of time.
3624 3647
3625 3648 Please note that the server does not implement access control.
3626 3649 This means that, by default, anybody can read from the server and
3627 3650 nobody can write to it by default. Set the ``web.allow_push``
3628 3651 option to ``*`` to allow everybody to push to the server. You
3629 3652 should use a real web server if you need to authenticate users.
3630 3653
3631 3654 By default, the server logs accesses to stdout and errors to
3632 3655 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
3633 3656 files.
3634 3657
3635 3658 To have the server choose a free port number to listen on, specify
3636 3659 a port number of 0; in this case, the server will print the port
3637 3660 number it uses.
3638 3661
3639 3662 Returns 0 on success.
3640 3663 """
3641 3664
3642 3665 if opts["stdio"]:
3643 3666 if repo is None:
3644 3667 raise error.RepoError(_("There is no Mercurial repository here"
3645 3668 " (.hg not found)"))
3646 3669 s = sshserver.sshserver(ui, repo)
3647 3670 s.serve_forever()
3648 3671
3649 3672 # this way we can check if something was given in the command-line
3650 3673 if opts.get('port'):
3651 3674 opts['port'] = util.getport(opts.get('port'))
3652 3675
3653 3676 baseui = repo and repo.baseui or ui
3654 3677 optlist = ("name templates style address port prefix ipv6"
3655 3678 " accesslog errorlog certificate encoding")
3656 3679 for o in optlist.split():
3657 3680 val = opts.get(o, '')
3658 3681 if val in (None, ''): # should check against default options instead
3659 3682 continue
3660 3683 baseui.setconfig("web", o, val)
3661 3684 if repo and repo.ui != baseui:
3662 3685 repo.ui.setconfig("web", o, val)
3663 3686
3664 3687 o = opts.get('web_conf') or opts.get('webdir_conf')
3665 3688 if not o:
3666 3689 if not repo:
3667 3690 raise error.RepoError(_("There is no Mercurial repository"
3668 3691 " here (.hg not found)"))
3669 3692 o = repo.root
3670 3693
3671 3694 app = hgweb.hgweb(o, baseui=ui)
3672 3695
3673 3696 class service(object):
3674 3697 def init(self):
3675 3698 util.set_signal_handler()
3676 3699 self.httpd = hgweb.server.create_server(ui, app)
3677 3700
3678 3701 if opts['port'] and not ui.verbose:
3679 3702 return
3680 3703
3681 3704 if self.httpd.prefix:
3682 3705 prefix = self.httpd.prefix.strip('/') + '/'
3683 3706 else:
3684 3707 prefix = ''
3685 3708
3686 3709 port = ':%d' % self.httpd.port
3687 3710 if port == ':80':
3688 3711 port = ''
3689 3712
3690 3713 bindaddr = self.httpd.addr
3691 3714 if bindaddr == '0.0.0.0':
3692 3715 bindaddr = '*'
3693 3716 elif ':' in bindaddr: # IPv6
3694 3717 bindaddr = '[%s]' % bindaddr
3695 3718
3696 3719 fqaddr = self.httpd.fqaddr
3697 3720 if ':' in fqaddr:
3698 3721 fqaddr = '[%s]' % fqaddr
3699 3722 if opts['port']:
3700 3723 write = ui.status
3701 3724 else:
3702 3725 write = ui.write
3703 3726 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
3704 3727 (fqaddr, port, prefix, bindaddr, self.httpd.port))
3705 3728
3706 3729 def run(self):
3707 3730 self.httpd.serve_forever()
3708 3731
3709 3732 service = service()
3710 3733
3711 3734 cmdutil.service(opts, initfn=service.init, runfn=service.run)
3712 3735
3713 3736 def status(ui, repo, *pats, **opts):
3714 3737 """show changed files in the working directory
3715 3738
3716 3739 Show status of files in the repository. If names are given, only
3717 3740 files that match are shown. Files that are clean or ignored or
3718 3741 the source of a copy/move operation, are not listed unless
3719 3742 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
3720 3743 Unless options described with "show only ..." are given, the
3721 3744 options -mardu are used.
3722 3745
3723 3746 Option -q/--quiet hides untracked (unknown and ignored) files
3724 3747 unless explicitly requested with -u/--unknown or -i/--ignored.
3725 3748
3726 3749 .. note::
3727 3750 status may appear to disagree with diff if permissions have
3728 3751 changed or a merge has occurred. The standard diff format does
3729 3752 not report permission changes and diff only reports changes
3730 3753 relative to one merge parent.
3731 3754
3732 3755 If one revision is given, it is used as the base revision.
3733 3756 If two revisions are given, the differences between them are
3734 3757 shown. The --change option can also be used as a shortcut to list
3735 3758 the changed files of a revision from its first parent.
3736 3759
3737 3760 The codes used to show the status of files are::
3738 3761
3739 3762 M = modified
3740 3763 A = added
3741 3764 R = removed
3742 3765 C = clean
3743 3766 ! = missing (deleted by non-hg command, but still tracked)
3744 3767 ? = not tracked
3745 3768 I = ignored
3746 3769 = origin of the previous file listed as A (added)
3747 3770
3748 3771 Returns 0 on success.
3749 3772 """
3750 3773
3751 3774 revs = opts.get('rev')
3752 3775 change = opts.get('change')
3753 3776
3754 3777 if revs and change:
3755 3778 msg = _('cannot specify --rev and --change at the same time')
3756 3779 raise util.Abort(msg)
3757 3780 elif change:
3758 3781 node2 = repo.lookup(change)
3759 3782 node1 = repo[node2].parents()[0].node()
3760 3783 else:
3761 3784 node1, node2 = cmdutil.revpair(repo, revs)
3762 3785
3763 3786 cwd = (pats and repo.getcwd()) or ''
3764 3787 end = opts.get('print0') and '\0' or '\n'
3765 3788 copy = {}
3766 3789 states = 'modified added removed deleted unknown ignored clean'.split()
3767 3790 show = [k for k in states if opts.get(k)]
3768 3791 if opts.get('all'):
3769 3792 show += ui.quiet and (states[:4] + ['clean']) or states
3770 3793 if not show:
3771 3794 show = ui.quiet and states[:4] or states[:5]
3772 3795
3773 3796 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3774 3797 'ignored' in show, 'clean' in show, 'unknown' in show,
3775 3798 opts.get('subrepos'))
3776 3799 changestates = zip(states, 'MAR!?IC', stat)
3777 3800
3778 3801 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3779 3802 ctxn = repo[nullid]
3780 3803 ctx1 = repo[node1]
3781 3804 ctx2 = repo[node2]
3782 3805 added = stat[1]
3783 3806 if node2 is None:
3784 3807 added = stat[0] + stat[1] # merged?
3785 3808
3786 3809 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3787 3810 if k in added:
3788 3811 copy[k] = v
3789 3812 elif v in added:
3790 3813 copy[v] = k
3791 3814
3792 3815 for state, char, files in changestates:
3793 3816 if state in show:
3794 3817 format = "%s %%s%s" % (char, end)
3795 3818 if opts.get('no_status'):
3796 3819 format = "%%s%s" % end
3797 3820
3798 3821 for f in files:
3799 3822 ui.write(format % repo.pathto(f, cwd),
3800 3823 label='status.' + state)
3801 3824 if f in copy:
3802 3825 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
3803 3826 label='status.copied')
3804 3827
3805 3828 def summary(ui, repo, **opts):
3806 3829 """summarize working directory state
3807 3830
3808 3831 This generates a brief summary of the working directory state,
3809 3832 including parents, branch, commit status, and available updates.
3810 3833
3811 3834 With the --remote option, this will check the default paths for
3812 3835 incoming and outgoing changes. This can be time-consuming.
3813 3836
3814 3837 Returns 0 on success.
3815 3838 """
3816 3839
3817 3840 ctx = repo[None]
3818 3841 parents = ctx.parents()
3819 3842 pnode = parents[0].node()
3820 3843
3821 3844 for p in parents:
3822 3845 # label with log.changeset (instead of log.parent) since this
3823 3846 # shows a working directory parent *changeset*:
3824 3847 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
3825 3848 label='log.changeset')
3826 3849 ui.write(' '.join(p.tags()), label='log.tag')
3827 3850 if p.bookmarks():
3828 3851 ui.write(' ' + ' '.join(p.bookmarks()), label='log.bookmark')
3829 3852 if p.rev() == -1:
3830 3853 if not len(repo):
3831 3854 ui.write(_(' (empty repository)'))
3832 3855 else:
3833 3856 ui.write(_(' (no revision checked out)'))
3834 3857 ui.write('\n')
3835 3858 if p.description():
3836 3859 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
3837 3860 label='log.summary')
3838 3861
3839 3862 branch = ctx.branch()
3840 3863 bheads = repo.branchheads(branch)
3841 3864 m = _('branch: %s\n') % branch
3842 3865 if branch != 'default':
3843 3866 ui.write(m, label='log.branch')
3844 3867 else:
3845 3868 ui.status(m, label='log.branch')
3846 3869
3847 3870 st = list(repo.status(unknown=True))[:6]
3848 3871
3849 3872 c = repo.dirstate.copies()
3850 3873 copied, renamed = [], []
3851 3874 for d, s in c.iteritems():
3852 3875 if s in st[2]:
3853 3876 st[2].remove(s)
3854 3877 renamed.append(d)
3855 3878 else:
3856 3879 copied.append(d)
3857 3880 if d in st[1]:
3858 3881 st[1].remove(d)
3859 3882 st.insert(3, renamed)
3860 3883 st.insert(4, copied)
3861 3884
3862 3885 ms = mergemod.mergestate(repo)
3863 3886 st.append([f for f in ms if ms[f] == 'u'])
3864 3887
3865 3888 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
3866 3889 st.append(subs)
3867 3890
3868 3891 labels = [ui.label(_('%d modified'), 'status.modified'),
3869 3892 ui.label(_('%d added'), 'status.added'),
3870 3893 ui.label(_('%d removed'), 'status.removed'),
3871 3894 ui.label(_('%d renamed'), 'status.copied'),
3872 3895 ui.label(_('%d copied'), 'status.copied'),
3873 3896 ui.label(_('%d deleted'), 'status.deleted'),
3874 3897 ui.label(_('%d unknown'), 'status.unknown'),
3875 3898 ui.label(_('%d ignored'), 'status.ignored'),
3876 3899 ui.label(_('%d unresolved'), 'resolve.unresolved'),
3877 3900 ui.label(_('%d subrepos'), 'status.modified')]
3878 3901 t = []
3879 3902 for s, l in zip(st, labels):
3880 3903 if s:
3881 3904 t.append(l % len(s))
3882 3905
3883 3906 t = ', '.join(t)
3884 3907 cleanworkdir = False
3885 3908
3886 3909 if len(parents) > 1:
3887 3910 t += _(' (merge)')
3888 3911 elif branch != parents[0].branch():
3889 3912 t += _(' (new branch)')
3890 3913 elif (parents[0].extra().get('close') and
3891 3914 pnode in repo.branchheads(branch, closed=True)):
3892 3915 t += _(' (head closed)')
3893 3916 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
3894 3917 t += _(' (clean)')
3895 3918 cleanworkdir = True
3896 3919 elif pnode not in bheads:
3897 3920 t += _(' (new branch head)')
3898 3921
3899 3922 if cleanworkdir:
3900 3923 ui.status(_('commit: %s\n') % t.strip())
3901 3924 else:
3902 3925 ui.write(_('commit: %s\n') % t.strip())
3903 3926
3904 3927 # all ancestors of branch heads - all ancestors of parent = new csets
3905 3928 new = [0] * len(repo)
3906 3929 cl = repo.changelog
3907 3930 for a in [cl.rev(n) for n in bheads]:
3908 3931 new[a] = 1
3909 3932 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3910 3933 new[a] = 1
3911 3934 for a in [p.rev() for p in parents]:
3912 3935 if a >= 0:
3913 3936 new[a] = 0
3914 3937 for a in cl.ancestors(*[p.rev() for p in parents]):
3915 3938 new[a] = 0
3916 3939 new = sum(new)
3917 3940
3918 3941 if new == 0:
3919 3942 ui.status(_('update: (current)\n'))
3920 3943 elif pnode not in bheads:
3921 3944 ui.write(_('update: %d new changesets (update)\n') % new)
3922 3945 else:
3923 3946 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3924 3947 (new, len(bheads)))
3925 3948
3926 3949 if opts.get('remote'):
3927 3950 t = []
3928 3951 source, branches = hg.parseurl(ui.expandpath('default'))
3929 3952 other = hg.repository(hg.remoteui(repo, {}), source)
3930 3953 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3931 3954 ui.debug('comparing with %s\n' % url.hidepassword(source))
3932 3955 repo.ui.pushbuffer()
3933 3956 common, incoming, rheads = discovery.findcommonincoming(repo, other)
3934 3957 repo.ui.popbuffer()
3935 3958 if incoming:
3936 3959 t.append(_('1 or more incoming'))
3937 3960
3938 3961 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3939 3962 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3940 3963 other = hg.repository(hg.remoteui(repo, {}), dest)
3941 3964 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3942 3965 repo.ui.pushbuffer()
3943 3966 o = discovery.findoutgoing(repo, other)
3944 3967 repo.ui.popbuffer()
3945 3968 o = repo.changelog.nodesbetween(o, None)[0]
3946 3969 if o:
3947 3970 t.append(_('%d outgoing') % len(o))
3948 3971 if 'bookmarks' in other.listkeys('namespaces'):
3949 3972 lmarks = repo.listkeys('bookmarks')
3950 3973 rmarks = other.listkeys('bookmarks')
3951 3974 diff = set(rmarks) - set(lmarks)
3952 3975 if len(diff) > 0:
3953 3976 t.append(_('%d incoming bookmarks') % len(diff))
3954 3977 diff = set(lmarks) - set(rmarks)
3955 3978 if len(diff) > 0:
3956 3979 t.append(_('%d outgoing bookmarks') % len(diff))
3957 3980
3958 3981 if t:
3959 3982 ui.write(_('remote: %s\n') % (', '.join(t)))
3960 3983 else:
3961 3984 ui.status(_('remote: (synced)\n'))
3962 3985
3963 3986 def tag(ui, repo, name1, *names, **opts):
3964 3987 """add one or more tags for the current or given revision
3965 3988
3966 3989 Name a particular revision using <name>.
3967 3990
3968 3991 Tags are used to name particular revisions of the repository and are
3969 3992 very useful to compare different revisions, to go back to significant
3970 3993 earlier versions or to mark branch points as releases, etc. Changing
3971 3994 an existing tag is normally disallowed; use -f/--force to override.
3972 3995
3973 3996 If no revision is given, the parent of the working directory is
3974 3997 used, or tip if no revision is checked out.
3975 3998
3976 3999 To facilitate version control, distribution, and merging of tags,
3977 4000 they are stored as a file named ".hgtags" which is managed similarly
3978 4001 to other project files and can be hand-edited if necessary. This
3979 4002 also means that tagging creates a new commit. The file
3980 4003 ".hg/localtags" is used for local tags (not shared among
3981 4004 repositories).
3982 4005
3983 4006 Tag commits are usually made at the head of a branch. If the parent
3984 4007 of the working directory is not a branch head, :hg:`tag` aborts; use
3985 4008 -f/--force to force the tag commit to be based on a non-head
3986 4009 changeset.
3987 4010
3988 4011 See :hg:`help dates` for a list of formats valid for -d/--date.
3989 4012
3990 4013 Since tag names have priority over branch names during revision
3991 4014 lookup, using an existing branch name as a tag name is discouraged.
3992 4015
3993 4016 Returns 0 on success.
3994 4017 """
3995 4018
3996 4019 rev_ = "."
3997 4020 names = [t.strip() for t in (name1,) + names]
3998 4021 if len(names) != len(set(names)):
3999 4022 raise util.Abort(_('tag names must be unique'))
4000 4023 for n in names:
4001 4024 if n in ['tip', '.', 'null']:
4002 4025 raise util.Abort(_('the name \'%s\' is reserved') % n)
4003 4026 if not n:
4004 4027 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
4005 4028 if opts.get('rev') and opts.get('remove'):
4006 4029 raise util.Abort(_("--rev and --remove are incompatible"))
4007 4030 if opts.get('rev'):
4008 4031 rev_ = opts['rev']
4009 4032 message = opts.get('message')
4010 4033 if opts.get('remove'):
4011 4034 expectedtype = opts.get('local') and 'local' or 'global'
4012 4035 for n in names:
4013 4036 if not repo.tagtype(n):
4014 4037 raise util.Abort(_('tag \'%s\' does not exist') % n)
4015 4038 if repo.tagtype(n) != expectedtype:
4016 4039 if expectedtype == 'global':
4017 4040 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
4018 4041 else:
4019 4042 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
4020 4043 rev_ = nullid
4021 4044 if not message:
4022 4045 # we don't translate commit messages
4023 4046 message = 'Removed tag %s' % ', '.join(names)
4024 4047 elif not opts.get('force'):
4025 4048 for n in names:
4026 4049 if n in repo.tags():
4027 4050 raise util.Abort(_('tag \'%s\' already exists '
4028 4051 '(use -f to force)') % n)
4029 4052 if not opts.get('local'):
4030 4053 p1, p2 = repo.dirstate.parents()
4031 4054 if p2 != nullid:
4032 4055 raise util.Abort(_('uncommitted merge'))
4033 4056 bheads = repo.branchheads()
4034 4057 if not opts.get('force') and bheads and p1 not in bheads:
4035 4058 raise util.Abort(_('not at a branch head (use -f to force)'))
4036 4059 r = cmdutil.revsingle(repo, rev_).node()
4037 4060
4038 4061 if not message:
4039 4062 # we don't translate commit messages
4040 4063 message = ('Added tag %s for changeset %s' %
4041 4064 (', '.join(names), short(r)))
4042 4065
4043 4066 date = opts.get('date')
4044 4067 if date:
4045 4068 date = util.parsedate(date)
4046 4069
4047 4070 if opts.get('edit'):
4048 4071 message = ui.edit(message, ui.username())
4049 4072
4050 4073 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
4051 4074
4052 4075 def tags(ui, repo):
4053 4076 """list repository tags
4054 4077
4055 4078 This lists both regular and local tags. When the -v/--verbose
4056 4079 switch is used, a third column "local" is printed for local tags.
4057 4080
4058 4081 Returns 0 on success.
4059 4082 """
4060 4083
4061 4084 hexfunc = ui.debugflag and hex or short
4062 4085 tagtype = ""
4063 4086
4064 4087 for t, n in reversed(repo.tagslist()):
4065 4088 if ui.quiet:
4066 4089 ui.write("%s\n" % t)
4067 4090 continue
4068 4091
4069 4092 try:
4070 4093 hn = hexfunc(n)
4071 4094 r = "%5d:%s" % (repo.changelog.rev(n), hn)
4072 4095 except error.LookupError:
4073 4096 r = " ?:%s" % hn
4074 4097 else:
4075 4098 spaces = " " * (30 - encoding.colwidth(t))
4076 4099 if ui.verbose:
4077 4100 if repo.tagtype(t) == 'local':
4078 4101 tagtype = " local"
4079 4102 else:
4080 4103 tagtype = ""
4081 4104 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
4082 4105
4083 4106 def tip(ui, repo, **opts):
4084 4107 """show the tip revision
4085 4108
4086 4109 The tip revision (usually just called the tip) is the changeset
4087 4110 most recently added to the repository (and therefore the most
4088 4111 recently changed head).
4089 4112
4090 4113 If you have just made a commit, that commit will be the tip. If
4091 4114 you have just pulled changes from another repository, the tip of
4092 4115 that repository becomes the current tip. The "tip" tag is special
4093 4116 and cannot be renamed or assigned to a different changeset.
4094 4117
4095 4118 Returns 0 on success.
4096 4119 """
4097 4120 displayer = cmdutil.show_changeset(ui, repo, opts)
4098 4121 displayer.show(repo[len(repo) - 1])
4099 4122 displayer.close()
4100 4123
4101 4124 def unbundle(ui, repo, fname1, *fnames, **opts):
4102 4125 """apply one or more changegroup files
4103 4126
4104 4127 Apply one or more compressed changegroup files generated by the
4105 4128 bundle command.
4106 4129
4107 4130 Returns 0 on success, 1 if an update has unresolved files.
4108 4131 """
4109 4132 fnames = (fname1,) + fnames
4110 4133
4111 4134 lock = repo.lock()
4112 4135 wc = repo['.']
4113 4136 try:
4114 4137 for fname in fnames:
4115 4138 f = url.open(ui, fname)
4116 4139 gen = changegroup.readbundle(f, fname)
4117 4140 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
4118 4141 lock=lock)
4119 4142 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
4120 4143 finally:
4121 4144 lock.release()
4122 4145 return postincoming(ui, repo, modheads, opts.get('update'), None)
4123 4146
4124 4147 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
4125 4148 """update working directory (or switch revisions)
4126 4149
4127 4150 Update the repository's working directory to the specified
4128 4151 changeset. If no changeset is specified, update to the tip of the
4129 4152 current named branch.
4130 4153
4131 4154 If the changeset is not a descendant of the working directory's
4132 4155 parent, the update is aborted. With the -c/--check option, the
4133 4156 working directory is checked for uncommitted changes; if none are
4134 4157 found, the working directory is updated to the specified
4135 4158 changeset.
4136 4159
4137 4160 The following rules apply when the working directory contains
4138 4161 uncommitted changes:
4139 4162
4140 4163 1. If neither -c/--check nor -C/--clean is specified, and if
4141 4164 the requested changeset is an ancestor or descendant of
4142 4165 the working directory's parent, the uncommitted changes
4143 4166 are merged into the requested changeset and the merged
4144 4167 result is left uncommitted. If the requested changeset is
4145 4168 not an ancestor or descendant (that is, it is on another
4146 4169 branch), the update is aborted and the uncommitted changes
4147 4170 are preserved.
4148 4171
4149 4172 2. With the -c/--check option, the update is aborted and the
4150 4173 uncommitted changes are preserved.
4151 4174
4152 4175 3. With the -C/--clean option, uncommitted changes are discarded and
4153 4176 the working directory is updated to the requested changeset.
4154 4177
4155 4178 Use null as the changeset to remove the working directory (like
4156 4179 :hg:`clone -U`).
4157 4180
4158 4181 If you want to update just one file to an older changeset, use
4159 4182 :hg:`revert`.
4160 4183
4161 4184 See :hg:`help dates` for a list of formats valid for -d/--date.
4162 4185
4163 4186 Returns 0 on success, 1 if there are unresolved files.
4164 4187 """
4165 4188 if rev and node:
4166 4189 raise util.Abort(_("please specify just one revision"))
4167 4190
4168 4191 if rev is None or rev == '':
4169 4192 rev = node
4170 4193
4171 4194 # if we defined a bookmark, we have to remember the original bookmark name
4172 4195 brev = rev
4173 4196 rev = cmdutil.revsingle(repo, rev, rev).rev()
4174 4197
4175 4198 if check and clean:
4176 4199 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
4177 4200
4178 4201 if check:
4179 4202 # we could use dirty() but we can ignore merge and branch trivia
4180 4203 c = repo[None]
4181 4204 if c.modified() or c.added() or c.removed():
4182 4205 raise util.Abort(_("uncommitted local changes"))
4183 4206
4184 4207 if date:
4185 4208 if rev:
4186 4209 raise util.Abort(_("you can't specify a revision and a date"))
4187 4210 rev = cmdutil.finddate(ui, repo, date)
4188 4211
4189 4212 if clean or check:
4190 4213 ret = hg.clean(repo, rev)
4191 4214 else:
4192 4215 ret = hg.update(repo, rev)
4193 4216
4194 4217 if brev in repo._bookmarks:
4195 4218 bookmarks.setcurrent(repo, brev)
4196 4219
4197 4220 return ret
4198 4221
4199 4222 def verify(ui, repo):
4200 4223 """verify the integrity of the repository
4201 4224
4202 4225 Verify the integrity of the current repository.
4203 4226
4204 4227 This will perform an extensive check of the repository's
4205 4228 integrity, validating the hashes and checksums of each entry in
4206 4229 the changelog, manifest, and tracked files, as well as the
4207 4230 integrity of their crosslinks and indices.
4208 4231
4209 4232 Returns 0 on success, 1 if errors are encountered.
4210 4233 """
4211 4234 return hg.verify(repo)
4212 4235
4213 4236 def version_(ui):
4214 4237 """output version and copyright information"""
4215 4238 ui.write(_("Mercurial Distributed SCM (version %s)\n")
4216 4239 % util.version())
4217 4240 ui.status(_(
4218 4241 "(see http://mercurial.selenic.com for more information)\n"
4219 4242 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
4220 4243 "This is free software; see the source for copying conditions. "
4221 4244 "There is NO\nwarranty; "
4222 4245 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
4223 4246 ))
4224 4247
4225 4248 # Command options and aliases are listed here, alphabetically
4226 4249
4227 4250 globalopts = [
4228 4251 ('R', 'repository', '',
4229 4252 _('repository root directory or name of overlay bundle file'),
4230 4253 _('REPO')),
4231 4254 ('', 'cwd', '',
4232 4255 _('change working directory'), _('DIR')),
4233 4256 ('y', 'noninteractive', None,
4234 4257 _('do not prompt, assume \'yes\' for any required answers')),
4235 4258 ('q', 'quiet', None, _('suppress output')),
4236 4259 ('v', 'verbose', None, _('enable additional output')),
4237 4260 ('', 'config', [],
4238 4261 _('set/override config option (use \'section.name=value\')'),
4239 4262 _('CONFIG')),
4240 4263 ('', 'debug', None, _('enable debugging output')),
4241 4264 ('', 'debugger', None, _('start debugger')),
4242 4265 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
4243 4266 _('ENCODE')),
4244 4267 ('', 'encodingmode', encoding.encodingmode,
4245 4268 _('set the charset encoding mode'), _('MODE')),
4246 4269 ('', 'traceback', None, _('always print a traceback on exception')),
4247 4270 ('', 'time', None, _('time how long the command takes')),
4248 4271 ('', 'profile', None, _('print command execution profile')),
4249 4272 ('', 'version', None, _('output version information and exit')),
4250 4273 ('h', 'help', None, _('display help and exit')),
4251 4274 ]
4252 4275
4253 4276 dryrunopts = [('n', 'dry-run', None,
4254 4277 _('do not perform actions, just print output'))]
4255 4278
4256 4279 remoteopts = [
4257 4280 ('e', 'ssh', '',
4258 4281 _('specify ssh command to use'), _('CMD')),
4259 4282 ('', 'remotecmd', '',
4260 4283 _('specify hg command to run on the remote side'), _('CMD')),
4261 4284 ('', 'insecure', None,
4262 4285 _('do not verify server certificate (ignoring web.cacerts config)')),
4263 4286 ]
4264 4287
4265 4288 walkopts = [
4266 4289 ('I', 'include', [],
4267 4290 _('include names matching the given patterns'), _('PATTERN')),
4268 4291 ('X', 'exclude', [],
4269 4292 _('exclude names matching the given patterns'), _('PATTERN')),
4270 4293 ]
4271 4294
4272 4295 commitopts = [
4273 4296 ('m', 'message', '',
4274 4297 _('use text as commit message'), _('TEXT')),
4275 4298 ('l', 'logfile', '',
4276 4299 _('read commit message from file'), _('FILE')),
4277 4300 ]
4278 4301
4279 4302 commitopts2 = [
4280 4303 ('d', 'date', '',
4281 4304 _('record datecode as commit date'), _('DATE')),
4282 4305 ('u', 'user', '',
4283 4306 _('record the specified user as committer'), _('USER')),
4284 4307 ]
4285 4308
4286 4309 templateopts = [
4287 4310 ('', 'style', '',
4288 4311 _('display using template map file'), _('STYLE')),
4289 4312 ('', 'template', '',
4290 4313 _('display with template'), _('TEMPLATE')),
4291 4314 ]
4292 4315
4293 4316 logopts = [
4294 4317 ('p', 'patch', None, _('show patch')),
4295 4318 ('g', 'git', None, _('use git extended diff format')),
4296 4319 ('l', 'limit', '',
4297 4320 _('limit number of changes displayed'), _('NUM')),
4298 4321 ('M', 'no-merges', None, _('do not show merges')),
4299 4322 ('', 'stat', None, _('output diffstat-style summary of changes')),
4300 4323 ] + templateopts
4301 4324
4302 4325 diffopts = [
4303 4326 ('a', 'text', None, _('treat all files as text')),
4304 4327 ('g', 'git', None, _('use git extended diff format')),
4305 4328 ('', 'nodates', None, _('omit dates from diff headers'))
4306 4329 ]
4307 4330
4308 4331 diffopts2 = [
4309 4332 ('p', 'show-function', None, _('show which function each change is in')),
4310 4333 ('', 'reverse', None, _('produce a diff that undoes the changes')),
4311 4334 ('w', 'ignore-all-space', None,
4312 4335 _('ignore white space when comparing lines')),
4313 4336 ('b', 'ignore-space-change', None,
4314 4337 _('ignore changes in the amount of white space')),
4315 4338 ('B', 'ignore-blank-lines', None,
4316 4339 _('ignore changes whose lines are all blank')),
4317 4340 ('U', 'unified', '',
4318 4341 _('number of lines of context to show'), _('NUM')),
4319 4342 ('', 'stat', None, _('output diffstat-style summary of changes')),
4320 4343 ]
4321 4344
4322 4345 similarityopts = [
4323 4346 ('s', 'similarity', '',
4324 4347 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
4325 4348 ]
4326 4349
4327 4350 subrepoopts = [
4328 4351 ('S', 'subrepos', None,
4329 4352 _('recurse into subrepositories'))
4330 4353 ]
4331 4354
4332 4355 table = {
4333 4356 "^add": (add, walkopts + subrepoopts + dryrunopts,
4334 4357 _('[OPTION]... [FILE]...')),
4335 4358 "addremove":
4336 4359 (addremove, similarityopts + walkopts + dryrunopts,
4337 4360 _('[OPTION]... [FILE]...')),
4338 4361 "^annotate|blame":
4339 4362 (annotate,
4340 4363 [('r', 'rev', '',
4341 4364 _('annotate the specified revision'), _('REV')),
4342 4365 ('', 'follow', None,
4343 4366 _('follow copies/renames and list the filename (DEPRECATED)')),
4344 4367 ('', 'no-follow', None, _("don't follow copies and renames")),
4345 4368 ('a', 'text', None, _('treat all files as text')),
4346 4369 ('u', 'user', None, _('list the author (long with -v)')),
4347 4370 ('f', 'file', None, _('list the filename')),
4348 4371 ('d', 'date', None, _('list the date (short with -q)')),
4349 4372 ('n', 'number', None, _('list the revision number (default)')),
4350 4373 ('c', 'changeset', None, _('list the changeset')),
4351 4374 ('l', 'line-number', None,
4352 4375 _('show line number at the first appearance'))
4353 4376 ] + walkopts,
4354 4377 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
4355 4378 "archive":
4356 4379 (archive,
4357 4380 [('', 'no-decode', None, _('do not pass files through decoders')),
4358 4381 ('p', 'prefix', '',
4359 4382 _('directory prefix for files in archive'), _('PREFIX')),
4360 4383 ('r', 'rev', '',
4361 4384 _('revision to distribute'), _('REV')),
4362 4385 ('t', 'type', '',
4363 4386 _('type of distribution to create'), _('TYPE')),
4364 4387 ] + subrepoopts + walkopts,
4365 4388 _('[OPTION]... DEST')),
4366 4389 "backout":
4367 4390 (backout,
4368 4391 [('', 'merge', None,
4369 4392 _('merge with old dirstate parent after backout')),
4370 4393 ('', 'parent', '',
4371 4394 _('parent to choose when backing out merge'), _('REV')),
4372 4395 ('t', 'tool', '',
4373 4396 _('specify merge tool')),
4374 4397 ('r', 'rev', '',
4375 4398 _('revision to backout'), _('REV')),
4376 4399 ] + walkopts + commitopts + commitopts2,
4377 4400 _('[OPTION]... [-r] REV')),
4378 4401 "bisect":
4379 4402 (bisect,
4380 4403 [('r', 'reset', False, _('reset bisect state')),
4381 4404 ('g', 'good', False, _('mark changeset good')),
4382 4405 ('b', 'bad', False, _('mark changeset bad')),
4383 4406 ('s', 'skip', False, _('skip testing changeset')),
4384 4407 ('e', 'extend', False, _('extend the bisect range')),
4385 4408 ('c', 'command', '',
4386 4409 _('use command to check changeset state'), _('CMD')),
4387 4410 ('U', 'noupdate', False, _('do not update to target'))],
4388 4411 _("[-gbsr] [-U] [-c CMD] [REV]")),
4389 4412 "bookmarks":
4390 4413 (bookmark,
4391 4414 [('f', 'force', False, _('force')),
4392 4415 ('r', 'rev', '', _('revision'), _('REV')),
4393 4416 ('d', 'delete', False, _('delete a given bookmark')),
4394 4417 ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))],
4395 4418 _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')),
4396 4419 "branch":
4397 4420 (branch,
4398 4421 [('f', 'force', None,
4399 4422 _('set branch name even if it shadows an existing branch')),
4400 4423 ('C', 'clean', None, _('reset branch name to parent branch name'))],
4401 4424 _('[-fC] [NAME]')),
4402 4425 "branches":
4403 4426 (branches,
4404 4427 [('a', 'active', False,
4405 4428 _('show only branches that have unmerged heads')),
4406 4429 ('c', 'closed', False,
4407 4430 _('show normal and closed branches'))],
4408 4431 _('[-ac]')),
4409 4432 "bundle":
4410 4433 (bundle,
4411 4434 [('f', 'force', None,
4412 4435 _('run even when the destination is unrelated')),
4413 4436 ('r', 'rev', [],
4414 4437 _('a changeset intended to be added to the destination'),
4415 4438 _('REV')),
4416 4439 ('b', 'branch', [],
4417 4440 _('a specific branch you would like to bundle'),
4418 4441 _('BRANCH')),
4419 4442 ('', 'base', [],
4420 4443 _('a base changeset assumed to be available at the destination'),
4421 4444 _('REV')),
4422 4445 ('a', 'all', None, _('bundle all changesets in the repository')),
4423 4446 ('t', 'type', 'bzip2',
4424 4447 _('bundle compression type to use'), _('TYPE')),
4425 4448 ] + remoteopts,
4426 4449 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
4427 4450 "cat":
4428 4451 (cat,
4429 4452 [('o', 'output', '',
4430 4453 _('print output to file with formatted name'), _('FORMAT')),
4431 4454 ('r', 'rev', '',
4432 4455 _('print the given revision'), _('REV')),
4433 4456 ('', 'decode', None, _('apply any matching decode filter')),
4434 4457 ] + walkopts,
4435 4458 _('[OPTION]... FILE...')),
4436 4459 "^clone":
4437 4460 (clone,
4438 4461 [('U', 'noupdate', None,
4439 4462 _('the clone will include an empty working copy (only a repository)')),
4440 4463 ('u', 'updaterev', '',
4441 4464 _('revision, tag or branch to check out'), _('REV')),
4442 4465 ('r', 'rev', [],
4443 4466 _('include the specified changeset'), _('REV')),
4444 4467 ('b', 'branch', [],
4445 4468 _('clone only the specified branch'), _('BRANCH')),
4446 4469 ('', 'pull', None, _('use pull protocol to copy metadata')),
4447 4470 ('', 'uncompressed', None,
4448 4471 _('use uncompressed transfer (fast over LAN)')),
4449 4472 ] + remoteopts,
4450 4473 _('[OPTION]... SOURCE [DEST]')),
4451 4474 "^commit|ci":
4452 4475 (commit,
4453 4476 [('A', 'addremove', None,
4454 4477 _('mark new/missing files as added/removed before committing')),
4455 4478 ('', 'close-branch', None,
4456 4479 _('mark a branch as closed, hiding it from the branch list')),
4457 4480 ] + walkopts + commitopts + commitopts2,
4458 4481 _('[OPTION]... [FILE]...')),
4459 4482 "copy|cp":
4460 4483 (copy,
4461 4484 [('A', 'after', None, _('record a copy that has already occurred')),
4462 4485 ('f', 'force', None,
4463 4486 _('forcibly copy over an existing managed file')),
4464 4487 ] + walkopts + dryrunopts,
4465 4488 _('[OPTION]... [SOURCE]... DEST')),
4466 4489 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
4467 4490 "debugbuilddag":
4468 4491 (debugbuilddag,
4469 4492 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
4470 4493 ('a', 'appended-file', None, _('add single file all revs append to')),
4471 4494 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
4472 4495 ('n', 'new-file', None, _('add new file at each rev')),
4473 4496 ],
4474 4497 _('[OPTION]... TEXT')),
4475 4498 "debugbundle":
4476 4499 (debugbundle,
4477 4500 [('a', 'all', None, _('show all details')),
4478 4501 ],
4479 4502 _('FILE')),
4480 4503 "debugcheckstate": (debugcheckstate, [], ''),
4481 4504 "debugcommands": (debugcommands, [], _('[COMMAND]')),
4482 4505 "debugcomplete":
4483 4506 (debugcomplete,
4484 4507 [('o', 'options', None, _('show the command options'))],
4485 4508 _('[-o] CMD')),
4486 4509 "debugdag":
4487 4510 (debugdag,
4488 4511 [('t', 'tags', None, _('use tags as labels')),
4489 4512 ('b', 'branches', None, _('annotate with branch names')),
4490 4513 ('', 'dots', None, _('use dots for runs')),
4491 4514 ('s', 'spaces', None, _('separate elements by spaces')),
4492 4515 ],
4493 4516 _('[OPTION]... [FILE [REV]...]')),
4494 4517 "debugdate":
4495 4518 (debugdate,
4496 4519 [('e', 'extended', None, _('try extended date formats'))],
4497 4520 _('[-e] DATE [RANGE]')),
4498 4521 "debugdata": (debugdata, [], _('FILE REV')),
4499 4522 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
4523 "debuggetbundle":
4524 (debuggetbundle,
4525 [('H', 'head', [], _('id of head node'), _('ID')),
4526 ('C', 'common', [], _('id of common node'), _('ID')),
4527 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
4528 ],
4529 _('REPO FILE [-H|-C ID]...')),
4500 4530 "debugignore": (debugignore, [], ''),
4501 4531 "debugindex": (debugindex,
4502 4532 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
4503 4533 _('FILE')),
4504 4534 "debugindexdot": (debugindexdot, [], _('FILE')),
4505 4535 "debuginstall": (debuginstall, [], ''),
4506 4536 "debugknown": (debugknown, [], _('REPO ID...')),
4507 4537 "debugpushkey": (debugpushkey, [], _('REPO NAMESPACE [KEY OLD NEW]')),
4508 4538 "debugrebuildstate":
4509 4539 (debugrebuildstate,
4510 4540 [('r', 'rev', '',
4511 4541 _('revision to rebuild to'), _('REV'))],
4512 4542 _('[-r REV] [REV]')),
4513 4543 "debugrename":
4514 4544 (debugrename,
4515 4545 [('r', 'rev', '',
4516 4546 _('revision to debug'), _('REV'))],
4517 4547 _('[-r REV] FILE')),
4518 4548 "debugrevspec":
4519 4549 (debugrevspec, [], ('REVSPEC')),
4520 4550 "debugsetparents":
4521 4551 (debugsetparents, [], _('REV1 [REV2]')),
4522 4552 "debugstate":
4523 4553 (debugstate,
4524 4554 [('', 'nodates', None, _('do not display the saved mtime'))],
4525 4555 _('[OPTION]...')),
4526 4556 "debugsub":
4527 4557 (debugsub,
4528 4558 [('r', 'rev', '',
4529 4559 _('revision to check'), _('REV'))],
4530 4560 _('[-r REV] [REV]')),
4531 4561 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
4532 4562 "debugwireargs":
4533 4563 (debugwireargs,
4534 4564 [('', 'three', '', 'three'),
4535 4565 ('', 'four', '', 'four'),
4536 4566 ] + remoteopts,
4537 4567 _('REPO [OPTIONS]... [ONE [TWO]]')),
4538 4568 "^diff":
4539 4569 (diff,
4540 4570 [('r', 'rev', [],
4541 4571 _('revision'), _('REV')),
4542 4572 ('c', 'change', '',
4543 4573 _('change made by revision'), _('REV'))
4544 4574 ] + diffopts + diffopts2 + walkopts + subrepoopts,
4545 4575 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
4546 4576 "^export":
4547 4577 (export,
4548 4578 [('o', 'output', '',
4549 4579 _('print output to file with formatted name'), _('FORMAT')),
4550 4580 ('', 'switch-parent', None, _('diff against the second parent')),
4551 4581 ('r', 'rev', [],
4552 4582 _('revisions to export'), _('REV')),
4553 4583 ] + diffopts,
4554 4584 _('[OPTION]... [-o OUTFILESPEC] REV...')),
4555 4585 "^forget":
4556 4586 (forget,
4557 4587 [] + walkopts,
4558 4588 _('[OPTION]... FILE...')),
4559 4589 "grep":
4560 4590 (grep,
4561 4591 [('0', 'print0', None, _('end fields with NUL')),
4562 4592 ('', 'all', None, _('print all revisions that match')),
4563 4593 ('f', 'follow', None,
4564 4594 _('follow changeset history,'
4565 4595 ' or file history across copies and renames')),
4566 4596 ('i', 'ignore-case', None, _('ignore case when matching')),
4567 4597 ('l', 'files-with-matches', None,
4568 4598 _('print only filenames and revisions that match')),
4569 4599 ('n', 'line-number', None, _('print matching line numbers')),
4570 4600 ('r', 'rev', [],
4571 4601 _('only search files changed within revision range'), _('REV')),
4572 4602 ('u', 'user', None, _('list the author (long with -v)')),
4573 4603 ('d', 'date', None, _('list the date (short with -q)')),
4574 4604 ] + walkopts,
4575 4605 _('[OPTION]... PATTERN [FILE]...')),
4576 4606 "heads":
4577 4607 (heads,
4578 4608 [('r', 'rev', '',
4579 4609 _('show only heads which are descendants of STARTREV'),
4580 4610 _('STARTREV')),
4581 4611 ('t', 'topo', False, _('show topological heads only')),
4582 4612 ('a', 'active', False,
4583 4613 _('show active branchheads only (DEPRECATED)')),
4584 4614 ('c', 'closed', False,
4585 4615 _('show normal and closed branch heads')),
4586 4616 ] + templateopts,
4587 4617 _('[-ac] [-r STARTREV] [REV]...')),
4588 4618 "help": (help_, [], _('[TOPIC]')),
4589 4619 "identify|id":
4590 4620 (identify,
4591 4621 [('r', 'rev', '',
4592 4622 _('identify the specified revision'), _('REV')),
4593 4623 ('n', 'num', None, _('show local revision number')),
4594 4624 ('i', 'id', None, _('show global revision id')),
4595 4625 ('b', 'branch', None, _('show branch')),
4596 4626 ('t', 'tags', None, _('show tags')),
4597 4627 ('B', 'bookmarks', None, _('show bookmarks'))],
4598 4628 _('[-nibtB] [-r REV] [SOURCE]')),
4599 4629 "import|patch":
4600 4630 (import_,
4601 4631 [('p', 'strip', 1,
4602 4632 _('directory strip option for patch. This has the same '
4603 4633 'meaning as the corresponding patch option'),
4604 4634 _('NUM')),
4605 4635 ('b', 'base', '',
4606 4636 _('base path'), _('PATH')),
4607 4637 ('f', 'force', None,
4608 4638 _('skip check for outstanding uncommitted changes')),
4609 4639 ('', 'no-commit', None,
4610 4640 _("don't commit, just update the working directory")),
4611 4641 ('', 'exact', None,
4612 4642 _('apply patch to the nodes from which it was generated')),
4613 4643 ('', 'import-branch', None,
4614 4644 _('use any branch information in patch (implied by --exact)'))] +
4615 4645 commitopts + commitopts2 + similarityopts,
4616 4646 _('[OPTION]... PATCH...')),
4617 4647 "incoming|in":
4618 4648 (incoming,
4619 4649 [('f', 'force', None,
4620 4650 _('run even if remote repository is unrelated')),
4621 4651 ('n', 'newest-first', None, _('show newest record first')),
4622 4652 ('', 'bundle', '',
4623 4653 _('file to store the bundles into'), _('FILE')),
4624 4654 ('r', 'rev', [],
4625 4655 _('a remote changeset intended to be added'), _('REV')),
4626 4656 ('B', 'bookmarks', False, _("compare bookmarks")),
4627 4657 ('b', 'branch', [],
4628 4658 _('a specific branch you would like to pull'), _('BRANCH')),
4629 4659 ] + logopts + remoteopts + subrepoopts,
4630 4660 _('[-p] [-n] [-M] [-f] [-r REV]...'
4631 4661 ' [--bundle FILENAME] [SOURCE]')),
4632 4662 "^init":
4633 4663 (init,
4634 4664 remoteopts,
4635 4665 _('[-e CMD] [--remotecmd CMD] [DEST]')),
4636 4666 "locate":
4637 4667 (locate,
4638 4668 [('r', 'rev', '',
4639 4669 _('search the repository as it is in REV'), _('REV')),
4640 4670 ('0', 'print0', None,
4641 4671 _('end filenames with NUL, for use with xargs')),
4642 4672 ('f', 'fullpath', None,
4643 4673 _('print complete paths from the filesystem root')),
4644 4674 ] + walkopts,
4645 4675 _('[OPTION]... [PATTERN]...')),
4646 4676 "^log|history":
4647 4677 (log,
4648 4678 [('f', 'follow', None,
4649 4679 _('follow changeset history,'
4650 4680 ' or file history across copies and renames')),
4651 4681 ('', 'follow-first', None,
4652 4682 _('only follow the first parent of merge changesets')),
4653 4683 ('d', 'date', '',
4654 4684 _('show revisions matching date spec'), _('DATE')),
4655 4685 ('C', 'copies', None, _('show copied files')),
4656 4686 ('k', 'keyword', [],
4657 4687 _('do case-insensitive search for a given text'), _('TEXT')),
4658 4688 ('r', 'rev', [],
4659 4689 _('show the specified revision or range'), _('REV')),
4660 4690 ('', 'removed', None, _('include revisions where files were removed')),
4661 4691 ('m', 'only-merges', None, _('show only merges')),
4662 4692 ('u', 'user', [],
4663 4693 _('revisions committed by user'), _('USER')),
4664 4694 ('', 'only-branch', [],
4665 4695 _('show only changesets within the given named branch (DEPRECATED)'),
4666 4696 _('BRANCH')),
4667 4697 ('b', 'branch', [],
4668 4698 _('show changesets within the given named branch'), _('BRANCH')),
4669 4699 ('P', 'prune', [],
4670 4700 _('do not display revision or any of its ancestors'), _('REV')),
4671 4701 ] + logopts + walkopts,
4672 4702 _('[OPTION]... [FILE]')),
4673 4703 "manifest":
4674 4704 (manifest,
4675 4705 [('r', 'rev', '',
4676 4706 _('revision to display'), _('REV'))],
4677 4707 _('[-r REV]')),
4678 4708 "^merge":
4679 4709 (merge,
4680 4710 [('f', 'force', None, _('force a merge with outstanding changes')),
4681 4711 ('t', 'tool', '', _('specify merge tool')),
4682 4712 ('r', 'rev', '',
4683 4713 _('revision to merge'), _('REV')),
4684 4714 ('P', 'preview', None,
4685 4715 _('review revisions to merge (no merge is performed)'))],
4686 4716 _('[-P] [-f] [[-r] REV]')),
4687 4717 "outgoing|out":
4688 4718 (outgoing,
4689 4719 [('f', 'force', None,
4690 4720 _('run even when the destination is unrelated')),
4691 4721 ('r', 'rev', [],
4692 4722 _('a changeset intended to be included in the destination'),
4693 4723 _('REV')),
4694 4724 ('n', 'newest-first', None, _('show newest record first')),
4695 4725 ('B', 'bookmarks', False, _("compare bookmarks")),
4696 4726 ('b', 'branch', [],
4697 4727 _('a specific branch you would like to push'), _('BRANCH')),
4698 4728 ] + logopts + remoteopts + subrepoopts,
4699 4729 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
4700 4730 "parents":
4701 4731 (parents,
4702 4732 [('r', 'rev', '',
4703 4733 _('show parents of the specified revision'), _('REV')),
4704 4734 ] + templateopts,
4705 4735 _('[-r REV] [FILE]')),
4706 4736 "paths": (paths, [], _('[NAME]')),
4707 4737 "^pull":
4708 4738 (pull,
4709 4739 [('u', 'update', None,
4710 4740 _('update to new branch head if changesets were pulled')),
4711 4741 ('f', 'force', None,
4712 4742 _('run even when remote repository is unrelated')),
4713 4743 ('r', 'rev', [],
4714 4744 _('a remote changeset intended to be added'), _('REV')),
4715 4745 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4716 4746 ('b', 'branch', [],
4717 4747 _('a specific branch you would like to pull'), _('BRANCH')),
4718 4748 ] + remoteopts,
4719 4749 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
4720 4750 "^push":
4721 4751 (push,
4722 4752 [('f', 'force', None, _('force push')),
4723 4753 ('r', 'rev', [],
4724 4754 _('a changeset intended to be included in the destination'),
4725 4755 _('REV')),
4726 4756 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4727 4757 ('b', 'branch', [],
4728 4758 _('a specific branch you would like to push'), _('BRANCH')),
4729 4759 ('', 'new-branch', False, _('allow pushing a new branch')),
4730 4760 ] + remoteopts,
4731 4761 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
4732 4762 "recover": (recover, []),
4733 4763 "^remove|rm":
4734 4764 (remove,
4735 4765 [('A', 'after', None, _('record delete for missing files')),
4736 4766 ('f', 'force', None,
4737 4767 _('remove (and delete) file even if added or modified')),
4738 4768 ] + walkopts,
4739 4769 _('[OPTION]... FILE...')),
4740 4770 "rename|move|mv":
4741 4771 (rename,
4742 4772 [('A', 'after', None, _('record a rename that has already occurred')),
4743 4773 ('f', 'force', None,
4744 4774 _('forcibly copy over an existing managed file')),
4745 4775 ] + walkopts + dryrunopts,
4746 4776 _('[OPTION]... SOURCE... DEST')),
4747 4777 "resolve":
4748 4778 (resolve,
4749 4779 [('a', 'all', None, _('select all unresolved files')),
4750 4780 ('l', 'list', None, _('list state of files needing merge')),
4751 4781 ('m', 'mark', None, _('mark files as resolved')),
4752 4782 ('u', 'unmark', None, _('mark files as unresolved')),
4753 4783 ('t', 'tool', '', _('specify merge tool')),
4754 4784 ('n', 'no-status', None, _('hide status prefix'))]
4755 4785 + walkopts,
4756 4786 _('[OPTION]... [FILE]...')),
4757 4787 "revert":
4758 4788 (revert,
4759 4789 [('a', 'all', None, _('revert all changes when no arguments given')),
4760 4790 ('d', 'date', '',
4761 4791 _('tipmost revision matching date'), _('DATE')),
4762 4792 ('r', 'rev', '',
4763 4793 _('revert to the specified revision'), _('REV')),
4764 4794 ('', 'no-backup', None, _('do not save backup copies of files')),
4765 4795 ] + walkopts + dryrunopts,
4766 4796 _('[OPTION]... [-r REV] [NAME]...')),
4767 4797 "rollback": (rollback, dryrunopts),
4768 4798 "root": (root, []),
4769 4799 "^serve":
4770 4800 (serve,
4771 4801 [('A', 'accesslog', '',
4772 4802 _('name of access log file to write to'), _('FILE')),
4773 4803 ('d', 'daemon', None, _('run server in background')),
4774 4804 ('', 'daemon-pipefds', '',
4775 4805 _('used internally by daemon mode'), _('NUM')),
4776 4806 ('E', 'errorlog', '',
4777 4807 _('name of error log file to write to'), _('FILE')),
4778 4808 # use string type, then we can check if something was passed
4779 4809 ('p', 'port', '',
4780 4810 _('port to listen on (default: 8000)'), _('PORT')),
4781 4811 ('a', 'address', '',
4782 4812 _('address to listen on (default: all interfaces)'), _('ADDR')),
4783 4813 ('', 'prefix', '',
4784 4814 _('prefix path to serve from (default: server root)'), _('PREFIX')),
4785 4815 ('n', 'name', '',
4786 4816 _('name to show in web pages (default: working directory)'),
4787 4817 _('NAME')),
4788 4818 ('', 'web-conf', '',
4789 4819 _('name of the hgweb config file (see "hg help hgweb")'),
4790 4820 _('FILE')),
4791 4821 ('', 'webdir-conf', '',
4792 4822 _('name of the hgweb config file (DEPRECATED)'), _('FILE')),
4793 4823 ('', 'pid-file', '',
4794 4824 _('name of file to write process ID to'), _('FILE')),
4795 4825 ('', 'stdio', None, _('for remote clients')),
4796 4826 ('t', 'templates', '',
4797 4827 _('web templates to use'), _('TEMPLATE')),
4798 4828 ('', 'style', '',
4799 4829 _('template style to use'), _('STYLE')),
4800 4830 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4801 4831 ('', 'certificate', '',
4802 4832 _('SSL certificate file'), _('FILE'))],
4803 4833 _('[OPTION]...')),
4804 4834 "showconfig|debugconfig":
4805 4835 (showconfig,
4806 4836 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4807 4837 _('[-u] [NAME]...')),
4808 4838 "^summary|sum":
4809 4839 (summary,
4810 4840 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
4811 4841 "^status|st":
4812 4842 (status,
4813 4843 [('A', 'all', None, _('show status of all files')),
4814 4844 ('m', 'modified', None, _('show only modified files')),
4815 4845 ('a', 'added', None, _('show only added files')),
4816 4846 ('r', 'removed', None, _('show only removed files')),
4817 4847 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4818 4848 ('c', 'clean', None, _('show only files without changes')),
4819 4849 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4820 4850 ('i', 'ignored', None, _('show only ignored files')),
4821 4851 ('n', 'no-status', None, _('hide status prefix')),
4822 4852 ('C', 'copies', None, _('show source of copied files')),
4823 4853 ('0', 'print0', None,
4824 4854 _('end filenames with NUL, for use with xargs')),
4825 4855 ('', 'rev', [],
4826 4856 _('show difference from revision'), _('REV')),
4827 4857 ('', 'change', '',
4828 4858 _('list the changed files of a revision'), _('REV')),
4829 4859 ] + walkopts + subrepoopts,
4830 4860 _('[OPTION]... [FILE]...')),
4831 4861 "tag":
4832 4862 (tag,
4833 4863 [('f', 'force', None, _('force tag')),
4834 4864 ('l', 'local', None, _('make the tag local')),
4835 4865 ('r', 'rev', '',
4836 4866 _('revision to tag'), _('REV')),
4837 4867 ('', 'remove', None, _('remove a tag')),
4838 4868 # -l/--local is already there, commitopts cannot be used
4839 4869 ('e', 'edit', None, _('edit commit message')),
4840 4870 ('m', 'message', '',
4841 4871 _('use <text> as commit message'), _('TEXT')),
4842 4872 ] + commitopts2,
4843 4873 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
4844 4874 "tags": (tags, [], ''),
4845 4875 "tip":
4846 4876 (tip,
4847 4877 [('p', 'patch', None, _('show patch')),
4848 4878 ('g', 'git', None, _('use git extended diff format')),
4849 4879 ] + templateopts,
4850 4880 _('[-p] [-g]')),
4851 4881 "unbundle":
4852 4882 (unbundle,
4853 4883 [('u', 'update', None,
4854 4884 _('update to new branch head if changesets were unbundled'))],
4855 4885 _('[-u] FILE...')),
4856 4886 "^update|up|checkout|co":
4857 4887 (update,
4858 4888 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
4859 4889 ('c', 'check', None,
4860 4890 _('update across branches if no uncommitted changes')),
4861 4891 ('d', 'date', '',
4862 4892 _('tipmost revision matching date'), _('DATE')),
4863 4893 ('r', 'rev', '',
4864 4894 _('revision'), _('REV'))],
4865 4895 _('[-c] [-C] [-d DATE] [[-r] REV]')),
4866 4896 "verify": (verify, []),
4867 4897 "version": (version_, []),
4868 4898 }
4869 4899
4870 4900 norepo = ("clone init version help debugcommands debugcomplete"
4871 4901 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
4872 " debugknown debugbundle")
4902 " debugknown debuggetbundle debugbundle")
4873 4903 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
4874 4904 " debugdata debugindex debugindexdot")
@@ -1,294 +1,295 b''
1 1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 import os
10 10 from mercurial import ui, hg, hook, error, encoding, templater
11 11 from common import get_mtime, ErrorResponse, permhooks, caching
12 12 from common import HTTP_OK, HTTP_NOT_MODIFIED, HTTP_BAD_REQUEST
13 13 from common import HTTP_NOT_FOUND, HTTP_SERVER_ERROR
14 14 from request import wsgirequest
15 15 import webcommands, protocol, webutil
16 16
17 17 perms = {
18 18 'changegroup': 'pull',
19 19 'changegroupsubset': 'pull',
20 'getbundle': 'pull',
20 21 'stream_out': 'pull',
21 22 'listkeys': 'pull',
22 23 'unbundle': 'push',
23 24 'pushkey': 'push',
24 25 }
25 26
26 27 class hgweb(object):
27 28 def __init__(self, repo, name=None, baseui=None):
28 29 if isinstance(repo, str):
29 30 if baseui:
30 31 u = baseui.copy()
31 32 else:
32 33 u = ui.ui()
33 34 self.repo = hg.repository(u, repo)
34 35 else:
35 36 self.repo = repo
36 37
37 38 self.repo.ui.setconfig('ui', 'report_untrusted', 'off')
38 39 self.repo.ui.setconfig('ui', 'interactive', 'off')
39 40 hook.redirect(True)
40 41 self.mtime = -1
41 42 self.reponame = name
42 43 self.archives = 'zip', 'gz', 'bz2'
43 44 self.stripecount = 1
44 45 # a repo owner may set web.templates in .hg/hgrc to get any file
45 46 # readable by the user running the CGI script
46 47 self.templatepath = self.config('web', 'templates')
47 48
48 49 # The CGI scripts are often run by a user different from the repo owner.
49 50 # Trust the settings from the .hg/hgrc files by default.
50 51 def config(self, section, name, default=None, untrusted=True):
51 52 return self.repo.ui.config(section, name, default,
52 53 untrusted=untrusted)
53 54
54 55 def configbool(self, section, name, default=False, untrusted=True):
55 56 return self.repo.ui.configbool(section, name, default,
56 57 untrusted=untrusted)
57 58
58 59 def configlist(self, section, name, default=None, untrusted=True):
59 60 return self.repo.ui.configlist(section, name, default,
60 61 untrusted=untrusted)
61 62
62 63 def refresh(self, request=None):
63 64 if request:
64 65 self.repo.ui.environ = request.env
65 66 mtime = get_mtime(self.repo.spath)
66 67 if mtime != self.mtime:
67 68 self.mtime = mtime
68 69 self.repo = hg.repository(self.repo.ui, self.repo.root)
69 70 self.maxchanges = int(self.config("web", "maxchanges", 10))
70 71 self.stripecount = int(self.config("web", "stripes", 1))
71 72 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
72 73 self.maxfiles = int(self.config("web", "maxfiles", 10))
73 74 self.allowpull = self.configbool("web", "allowpull", True)
74 75 encoding.encoding = self.config("web", "encoding",
75 76 encoding.encoding)
76 77
77 78 def run(self):
78 79 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
79 80 raise RuntimeError("This function is only intended to be "
80 81 "called while running as a CGI script.")
81 82 import mercurial.hgweb.wsgicgi as wsgicgi
82 83 wsgicgi.launch(self)
83 84
84 85 def __call__(self, env, respond):
85 86 req = wsgirequest(env, respond)
86 87 return self.run_wsgi(req)
87 88
88 89 def run_wsgi(self, req):
89 90
90 91 self.refresh(req)
91 92
92 93 # work with CGI variables to create coherent structure
93 94 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
94 95
95 96 req.url = req.env['SCRIPT_NAME']
96 97 if not req.url.endswith('/'):
97 98 req.url += '/'
98 99 if 'REPO_NAME' in req.env:
99 100 req.url += req.env['REPO_NAME'] + '/'
100 101
101 102 if 'PATH_INFO' in req.env:
102 103 parts = req.env['PATH_INFO'].strip('/').split('/')
103 104 repo_parts = req.env.get('REPO_NAME', '').split('/')
104 105 if parts[:len(repo_parts)] == repo_parts:
105 106 parts = parts[len(repo_parts):]
106 107 query = '/'.join(parts)
107 108 else:
108 109 query = req.env['QUERY_STRING'].split('&', 1)[0]
109 110 query = query.split(';', 1)[0]
110 111
111 112 # process this if it's a protocol request
112 113 # protocol bits don't need to create any URLs
113 114 # and the clients always use the old URL structure
114 115
115 116 cmd = req.form.get('cmd', [''])[0]
116 117 if protocol.iscmd(cmd):
117 118 try:
118 119 if query:
119 120 raise ErrorResponse(HTTP_NOT_FOUND)
120 121 if cmd in perms:
121 122 self.check_perm(req, perms[cmd])
122 123 return protocol.call(self.repo, req, cmd)
123 124 except ErrorResponse, inst:
124 125 # A client that sends unbundle without 100-continue will
125 126 # break if we respond early.
126 127 if (cmd == 'unbundle' and
127 128 req.env.get('HTTP_EXPECT',
128 129 '').lower() != '100-continue'):
129 130 req.drain()
130 131 req.respond(inst, protocol.HGTYPE)
131 132 return '0\n%s\n' % inst.message
132 133
133 134 # translate user-visible url structure to internal structure
134 135
135 136 args = query.split('/', 2)
136 137 if 'cmd' not in req.form and args and args[0]:
137 138
138 139 cmd = args.pop(0)
139 140 style = cmd.rfind('-')
140 141 if style != -1:
141 142 req.form['style'] = [cmd[:style]]
142 143 cmd = cmd[style + 1:]
143 144
144 145 # avoid accepting e.g. style parameter as command
145 146 if hasattr(webcommands, cmd):
146 147 req.form['cmd'] = [cmd]
147 148 else:
148 149 cmd = ''
149 150
150 151 if cmd == 'static':
151 152 req.form['file'] = ['/'.join(args)]
152 153 else:
153 154 if args and args[0]:
154 155 node = args.pop(0)
155 156 req.form['node'] = [node]
156 157 if args:
157 158 req.form['file'] = args
158 159
159 160 ua = req.env.get('HTTP_USER_AGENT', '')
160 161 if cmd == 'rev' and 'mercurial' in ua:
161 162 req.form['style'] = ['raw']
162 163
163 164 if cmd == 'archive':
164 165 fn = req.form['node'][0]
165 166 for type_, spec in self.archive_specs.iteritems():
166 167 ext = spec[2]
167 168 if fn.endswith(ext):
168 169 req.form['node'] = [fn[:-len(ext)]]
169 170 req.form['type'] = [type_]
170 171
171 172 # process the web interface request
172 173
173 174 try:
174 175 tmpl = self.templater(req)
175 176 ctype = tmpl('mimetype', encoding=encoding.encoding)
176 177 ctype = templater.stringify(ctype)
177 178
178 179 # check read permissions non-static content
179 180 if cmd != 'static':
180 181 self.check_perm(req, None)
181 182
182 183 if cmd == '':
183 184 req.form['cmd'] = [tmpl.cache['default']]
184 185 cmd = req.form['cmd'][0]
185 186
186 187 caching(self, req) # sets ETag header or raises NOT_MODIFIED
187 188 if cmd not in webcommands.__all__:
188 189 msg = 'no such method: %s' % cmd
189 190 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
190 191 elif cmd == 'file' and 'raw' in req.form.get('style', []):
191 192 self.ctype = ctype
192 193 content = webcommands.rawfile(self, req, tmpl)
193 194 else:
194 195 content = getattr(webcommands, cmd)(self, req, tmpl)
195 196 req.respond(HTTP_OK, ctype)
196 197
197 198 return content
198 199
199 200 except error.LookupError, err:
200 201 req.respond(HTTP_NOT_FOUND, ctype)
201 202 msg = str(err)
202 203 if 'manifest' not in msg:
203 204 msg = 'revision not found: %s' % err.name
204 205 return tmpl('error', error=msg)
205 206 except (error.RepoError, error.RevlogError), inst:
206 207 req.respond(HTTP_SERVER_ERROR, ctype)
207 208 return tmpl('error', error=str(inst))
208 209 except ErrorResponse, inst:
209 210 req.respond(inst, ctype)
210 211 if inst.code == HTTP_NOT_MODIFIED:
211 212 # Not allowed to return a body on a 304
212 213 return ['']
213 214 return tmpl('error', error=inst.message)
214 215
215 216 def templater(self, req):
216 217
217 218 # determine scheme, port and server name
218 219 # this is needed to create absolute urls
219 220
220 221 proto = req.env.get('wsgi.url_scheme')
221 222 if proto == 'https':
222 223 proto = 'https'
223 224 default_port = "443"
224 225 else:
225 226 proto = 'http'
226 227 default_port = "80"
227 228
228 229 port = req.env["SERVER_PORT"]
229 230 port = port != default_port and (":" + port) or ""
230 231 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
231 232 staticurl = self.config("web", "staticurl") or req.url + 'static/'
232 233 if not staticurl.endswith('/'):
233 234 staticurl += '/'
234 235
235 236 # some functions for the templater
236 237
237 238 def header(**map):
238 239 yield tmpl('header', encoding=encoding.encoding, **map)
239 240
240 241 def footer(**map):
241 242 yield tmpl("footer", **map)
242 243
243 244 def motd(**map):
244 245 yield self.config("web", "motd", "")
245 246
246 247 # figure out which style to use
247 248
248 249 vars = {}
249 250 styles = (
250 251 req.form.get('style', [None])[0],
251 252 self.config('web', 'style'),
252 253 'paper',
253 254 )
254 255 style, mapfile = templater.stylemap(styles, self.templatepath)
255 256 if style == styles[0]:
256 257 vars['style'] = style
257 258
258 259 start = req.url[-1] == '?' and '&' or '?'
259 260 sessionvars = webutil.sessionvars(vars, start)
260 261
261 262 if not self.reponame:
262 263 self.reponame = (self.config("web", "name")
263 264 or req.env.get('REPO_NAME')
264 265 or req.url.strip('/') or self.repo.root)
265 266
266 267 # create the templater
267 268
268 269 tmpl = templater.templater(mapfile,
269 270 defaults={"url": req.url,
270 271 "staticurl": staticurl,
271 272 "urlbase": urlbase,
272 273 "repo": self.reponame,
273 274 "header": header,
274 275 "footer": footer,
275 276 "motd": motd,
276 277 "sessionvars": sessionvars
277 278 })
278 279 return tmpl
279 280
280 281 def archivelist(self, nodeid):
281 282 allowed = self.configlist("web", "allow_archive")
282 283 for i, spec in self.archive_specs.iteritems():
283 284 if i in allowed or self.configbool("web", "allow" + i):
284 285 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
285 286
286 287 archive_specs = {
287 288 'bz2': ('application/x-bzip2', 'tbz2', '.tar.bz2', None),
288 289 'gz': ('application/x-gzip', 'tgz', '.tar.gz', None),
289 290 'zip': ('application/zip', 'zip', '.zip', None),
290 291 }
291 292
292 293 def check_perm(self, req, op):
293 294 for hook in permhooks:
294 295 hook(self, req, op)
@@ -1,1929 +1,1951 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup, subrepo, discovery, pushkey
11 11 import changelog, dirstate, filelog, manifest, context, bookmarks
12 12 import lock, transaction, store, encoding
13 13 import util, extensions, hook, error
14 14 import match as matchmod
15 15 import merge as mergemod
16 16 import tags as tagsmod
17 17 import url as urlmod
18 18 from lock import release
19 19 import weakref, errno, os, time, inspect
20 20 propertycache = util.propertycache
21 21
22 22 class localrepository(repo.repository):
23 23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
24 'known'))
24 'known', 'getbundle'))
25 25 supportedformats = set(('revlogv1', 'parentdelta'))
26 26 supported = supportedformats | set(('store', 'fncache', 'shared',
27 27 'dotencode'))
28 28
29 29 def __init__(self, baseui, path=None, create=0):
30 30 repo.repository.__init__(self)
31 31 self.root = os.path.realpath(util.expandpath(path))
32 32 self.path = os.path.join(self.root, ".hg")
33 33 self.origroot = path
34 34 self.auditor = util.path_auditor(self.root, self._checknested)
35 35 self.opener = util.opener(self.path)
36 36 self.wopener = util.opener(self.root)
37 37 self.baseui = baseui
38 38 self.ui = baseui.copy()
39 39
40 40 try:
41 41 self.ui.readconfig(self.join("hgrc"), self.root)
42 42 extensions.loadall(self.ui)
43 43 except IOError:
44 44 pass
45 45
46 46 if not os.path.isdir(self.path):
47 47 if create:
48 48 if not os.path.exists(path):
49 49 util.makedirs(path)
50 50 os.mkdir(self.path)
51 51 requirements = ["revlogv1"]
52 52 if self.ui.configbool('format', 'usestore', True):
53 53 os.mkdir(os.path.join(self.path, "store"))
54 54 requirements.append("store")
55 55 if self.ui.configbool('format', 'usefncache', True):
56 56 requirements.append("fncache")
57 57 if self.ui.configbool('format', 'dotencode', True):
58 58 requirements.append('dotencode')
59 59 # create an invalid changelog
60 60 self.opener("00changelog.i", "a").write(
61 61 '\0\0\0\2' # represents revlogv2
62 62 ' dummy changelog to prevent using the old repo layout'
63 63 )
64 64 if self.ui.configbool('format', 'parentdelta', False):
65 65 requirements.append("parentdelta")
66 66 else:
67 67 raise error.RepoError(_("repository %s not found") % path)
68 68 elif create:
69 69 raise error.RepoError(_("repository %s already exists") % path)
70 70 else:
71 71 # find requirements
72 72 requirements = set()
73 73 try:
74 74 requirements = set(self.opener("requires").read().splitlines())
75 75 except IOError, inst:
76 76 if inst.errno != errno.ENOENT:
77 77 raise
78 78 for r in requirements - self.supported:
79 79 raise error.RequirementError(
80 80 _("requirement '%s' not supported") % r)
81 81
82 82 self.sharedpath = self.path
83 83 try:
84 84 s = os.path.realpath(self.opener("sharedpath").read())
85 85 if not os.path.exists(s):
86 86 raise error.RepoError(
87 87 _('.hg/sharedpath points to nonexistent directory %s') % s)
88 88 self.sharedpath = s
89 89 except IOError, inst:
90 90 if inst.errno != errno.ENOENT:
91 91 raise
92 92
93 93 self.store = store.store(requirements, self.sharedpath, util.opener)
94 94 self.spath = self.store.path
95 95 self.sopener = self.store.opener
96 96 self.sjoin = self.store.join
97 97 self.opener.createmode = self.store.createmode
98 98 self._applyrequirements(requirements)
99 99 if create:
100 100 self._writerequirements()
101 101
102 102 # These two define the set of tags for this repository. _tags
103 103 # maps tag name to node; _tagtypes maps tag name to 'global' or
104 104 # 'local'. (Global tags are defined by .hgtags across all
105 105 # heads, and local tags are defined in .hg/localtags.) They
106 106 # constitute the in-memory cache of tags.
107 107 self._tags = None
108 108 self._tagtypes = None
109 109
110 110 self._branchcache = None
111 111 self._branchcachetip = None
112 112 self.nodetagscache = None
113 113 self.filterpats = {}
114 114 self._datafilters = {}
115 115 self._transref = self._lockref = self._wlockref = None
116 116
117 117 def _applyrequirements(self, requirements):
118 118 self.requirements = requirements
119 119 self.sopener.options = {}
120 120 if 'parentdelta' in requirements:
121 121 self.sopener.options['parentdelta'] = 1
122 122
123 123 def _writerequirements(self):
124 124 reqfile = self.opener("requires", "w")
125 125 for r in self.requirements:
126 126 reqfile.write("%s\n" % r)
127 127 reqfile.close()
128 128
129 129 def _checknested(self, path):
130 130 """Determine if path is a legal nested repository."""
131 131 if not path.startswith(self.root):
132 132 return False
133 133 subpath = path[len(self.root) + 1:]
134 134
135 135 # XXX: Checking against the current working copy is wrong in
136 136 # the sense that it can reject things like
137 137 #
138 138 # $ hg cat -r 10 sub/x.txt
139 139 #
140 140 # if sub/ is no longer a subrepository in the working copy
141 141 # parent revision.
142 142 #
143 143 # However, it can of course also allow things that would have
144 144 # been rejected before, such as the above cat command if sub/
145 145 # is a subrepository now, but was a normal directory before.
146 146 # The old path auditor would have rejected by mistake since it
147 147 # panics when it sees sub/.hg/.
148 148 #
149 149 # All in all, checking against the working copy seems sensible
150 150 # since we want to prevent access to nested repositories on
151 151 # the filesystem *now*.
152 152 ctx = self[None]
153 153 parts = util.splitpath(subpath)
154 154 while parts:
155 155 prefix = os.sep.join(parts)
156 156 if prefix in ctx.substate:
157 157 if prefix == subpath:
158 158 return True
159 159 else:
160 160 sub = ctx.sub(prefix)
161 161 return sub.checknested(subpath[len(prefix) + 1:])
162 162 else:
163 163 parts.pop()
164 164 return False
165 165
166 166 @util.propertycache
167 167 def _bookmarks(self):
168 168 return bookmarks.read(self)
169 169
170 170 @util.propertycache
171 171 def _bookmarkcurrent(self):
172 172 return bookmarks.readcurrent(self)
173 173
174 174 @propertycache
175 175 def changelog(self):
176 176 c = changelog.changelog(self.sopener)
177 177 if 'HG_PENDING' in os.environ:
178 178 p = os.environ['HG_PENDING']
179 179 if p.startswith(self.root):
180 180 c.readpending('00changelog.i.a')
181 181 self.sopener.options['defversion'] = c.version
182 182 return c
183 183
184 184 @propertycache
185 185 def manifest(self):
186 186 return manifest.manifest(self.sopener)
187 187
188 188 @propertycache
189 189 def dirstate(self):
190 190 warned = [0]
191 191 def validate(node):
192 192 try:
193 193 r = self.changelog.rev(node)
194 194 return node
195 195 except error.LookupError:
196 196 if not warned[0]:
197 197 warned[0] = True
198 198 self.ui.warn(_("warning: ignoring unknown"
199 199 " working parent %s!\n") % short(node))
200 200 return nullid
201 201
202 202 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
203 203
204 204 def __getitem__(self, changeid):
205 205 if changeid is None:
206 206 return context.workingctx(self)
207 207 return context.changectx(self, changeid)
208 208
209 209 def __contains__(self, changeid):
210 210 try:
211 211 return bool(self.lookup(changeid))
212 212 except error.RepoLookupError:
213 213 return False
214 214
215 215 def __nonzero__(self):
216 216 return True
217 217
218 218 def __len__(self):
219 219 return len(self.changelog)
220 220
221 221 def __iter__(self):
222 222 for i in xrange(len(self)):
223 223 yield i
224 224
225 225 def url(self):
226 226 return 'file:' + self.root
227 227
228 228 def hook(self, name, throw=False, **args):
229 229 return hook.hook(self.ui, self, name, throw, **args)
230 230
231 231 tag_disallowed = ':\r\n'
232 232
233 233 def _tag(self, names, node, message, local, user, date, extra={}):
234 234 if isinstance(names, str):
235 235 allchars = names
236 236 names = (names,)
237 237 else:
238 238 allchars = ''.join(names)
239 239 for c in self.tag_disallowed:
240 240 if c in allchars:
241 241 raise util.Abort(_('%r cannot be used in a tag name') % c)
242 242
243 243 branches = self.branchmap()
244 244 for name in names:
245 245 self.hook('pretag', throw=True, node=hex(node), tag=name,
246 246 local=local)
247 247 if name in branches:
248 248 self.ui.warn(_("warning: tag %s conflicts with existing"
249 249 " branch name\n") % name)
250 250
251 251 def writetags(fp, names, munge, prevtags):
252 252 fp.seek(0, 2)
253 253 if prevtags and prevtags[-1] != '\n':
254 254 fp.write('\n')
255 255 for name in names:
256 256 m = munge and munge(name) or name
257 257 if self._tagtypes and name in self._tagtypes:
258 258 old = self._tags.get(name, nullid)
259 259 fp.write('%s %s\n' % (hex(old), m))
260 260 fp.write('%s %s\n' % (hex(node), m))
261 261 fp.close()
262 262
263 263 prevtags = ''
264 264 if local:
265 265 try:
266 266 fp = self.opener('localtags', 'r+')
267 267 except IOError:
268 268 fp = self.opener('localtags', 'a')
269 269 else:
270 270 prevtags = fp.read()
271 271
272 272 # local tags are stored in the current charset
273 273 writetags(fp, names, None, prevtags)
274 274 for name in names:
275 275 self.hook('tag', node=hex(node), tag=name, local=local)
276 276 return
277 277
278 278 try:
279 279 fp = self.wfile('.hgtags', 'rb+')
280 280 except IOError:
281 281 fp = self.wfile('.hgtags', 'ab')
282 282 else:
283 283 prevtags = fp.read()
284 284
285 285 # committed tags are stored in UTF-8
286 286 writetags(fp, names, encoding.fromlocal, prevtags)
287 287
288 288 fp.close()
289 289
290 290 if '.hgtags' not in self.dirstate:
291 291 self[None].add(['.hgtags'])
292 292
293 293 m = matchmod.exact(self.root, '', ['.hgtags'])
294 294 tagnode = self.commit(message, user, date, extra=extra, match=m)
295 295
296 296 for name in names:
297 297 self.hook('tag', node=hex(node), tag=name, local=local)
298 298
299 299 return tagnode
300 300
301 301 def tag(self, names, node, message, local, user, date):
302 302 '''tag a revision with one or more symbolic names.
303 303
304 304 names is a list of strings or, when adding a single tag, names may be a
305 305 string.
306 306
307 307 if local is True, the tags are stored in a per-repository file.
308 308 otherwise, they are stored in the .hgtags file, and a new
309 309 changeset is committed with the change.
310 310
311 311 keyword arguments:
312 312
313 313 local: whether to store tags in non-version-controlled file
314 314 (default False)
315 315
316 316 message: commit message to use if committing
317 317
318 318 user: name of user to use if committing
319 319
320 320 date: date tuple to use if committing'''
321 321
322 322 if not local:
323 323 for x in self.status()[:5]:
324 324 if '.hgtags' in x:
325 325 raise util.Abort(_('working copy of .hgtags is changed '
326 326 '(please commit .hgtags manually)'))
327 327
328 328 self.tags() # instantiate the cache
329 329 self._tag(names, node, message, local, user, date)
330 330
331 331 def tags(self):
332 332 '''return a mapping of tag to node'''
333 333 if self._tags is None:
334 334 (self._tags, self._tagtypes) = self._findtags()
335 335
336 336 return self._tags
337 337
338 338 def _findtags(self):
339 339 '''Do the hard work of finding tags. Return a pair of dicts
340 340 (tags, tagtypes) where tags maps tag name to node, and tagtypes
341 341 maps tag name to a string like \'global\' or \'local\'.
342 342 Subclasses or extensions are free to add their own tags, but
343 343 should be aware that the returned dicts will be retained for the
344 344 duration of the localrepo object.'''
345 345
346 346 # XXX what tagtype should subclasses/extensions use? Currently
347 347 # mq and bookmarks add tags, but do not set the tagtype at all.
348 348 # Should each extension invent its own tag type? Should there
349 349 # be one tagtype for all such "virtual" tags? Or is the status
350 350 # quo fine?
351 351
352 352 alltags = {} # map tag name to (node, hist)
353 353 tagtypes = {}
354 354
355 355 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
356 356 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
357 357
358 358 # Build the return dicts. Have to re-encode tag names because
359 359 # the tags module always uses UTF-8 (in order not to lose info
360 360 # writing to the cache), but the rest of Mercurial wants them in
361 361 # local encoding.
362 362 tags = {}
363 363 for (name, (node, hist)) in alltags.iteritems():
364 364 if node != nullid:
365 365 tags[encoding.tolocal(name)] = node
366 366 tags['tip'] = self.changelog.tip()
367 367 tagtypes = dict([(encoding.tolocal(name), value)
368 368 for (name, value) in tagtypes.iteritems()])
369 369 return (tags, tagtypes)
370 370
371 371 def tagtype(self, tagname):
372 372 '''
373 373 return the type of the given tag. result can be:
374 374
375 375 'local' : a local tag
376 376 'global' : a global tag
377 377 None : tag does not exist
378 378 '''
379 379
380 380 self.tags()
381 381
382 382 return self._tagtypes.get(tagname)
383 383
384 384 def tagslist(self):
385 385 '''return a list of tags ordered by revision'''
386 386 l = []
387 387 for t, n in self.tags().iteritems():
388 388 try:
389 389 r = self.changelog.rev(n)
390 390 except:
391 391 r = -2 # sort to the beginning of the list if unknown
392 392 l.append((r, t, n))
393 393 return [(t, n) for r, t, n in sorted(l)]
394 394
395 395 def nodetags(self, node):
396 396 '''return the tags associated with a node'''
397 397 if not self.nodetagscache:
398 398 self.nodetagscache = {}
399 399 for t, n in self.tags().iteritems():
400 400 self.nodetagscache.setdefault(n, []).append(t)
401 401 for tags in self.nodetagscache.itervalues():
402 402 tags.sort()
403 403 return self.nodetagscache.get(node, [])
404 404
405 405 def nodebookmarks(self, node):
406 406 marks = []
407 407 for bookmark, n in self._bookmarks.iteritems():
408 408 if n == node:
409 409 marks.append(bookmark)
410 410 return sorted(marks)
411 411
412 412 def _branchtags(self, partial, lrev):
413 413 # TODO: rename this function?
414 414 tiprev = len(self) - 1
415 415 if lrev != tiprev:
416 416 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
417 417 self._updatebranchcache(partial, ctxgen)
418 418 self._writebranchcache(partial, self.changelog.tip(), tiprev)
419 419
420 420 return partial
421 421
422 422 def updatebranchcache(self):
423 423 tip = self.changelog.tip()
424 424 if self._branchcache is not None and self._branchcachetip == tip:
425 425 return self._branchcache
426 426
427 427 oldtip = self._branchcachetip
428 428 self._branchcachetip = tip
429 429 if oldtip is None or oldtip not in self.changelog.nodemap:
430 430 partial, last, lrev = self._readbranchcache()
431 431 else:
432 432 lrev = self.changelog.rev(oldtip)
433 433 partial = self._branchcache
434 434
435 435 self._branchtags(partial, lrev)
436 436 # this private cache holds all heads (not just tips)
437 437 self._branchcache = partial
438 438
439 439 def branchmap(self):
440 440 '''returns a dictionary {branch: [branchheads]}'''
441 441 self.updatebranchcache()
442 442 return self._branchcache
443 443
444 444 def branchtags(self):
445 445 '''return a dict where branch names map to the tipmost head of
446 446 the branch, open heads come before closed'''
447 447 bt = {}
448 448 for bn, heads in self.branchmap().iteritems():
449 449 tip = heads[-1]
450 450 for h in reversed(heads):
451 451 if 'close' not in self.changelog.read(h)[5]:
452 452 tip = h
453 453 break
454 454 bt[bn] = tip
455 455 return bt
456 456
457 457 def _readbranchcache(self):
458 458 partial = {}
459 459 try:
460 460 f = self.opener("cache/branchheads")
461 461 lines = f.read().split('\n')
462 462 f.close()
463 463 except (IOError, OSError):
464 464 return {}, nullid, nullrev
465 465
466 466 try:
467 467 last, lrev = lines.pop(0).split(" ", 1)
468 468 last, lrev = bin(last), int(lrev)
469 469 if lrev >= len(self) or self[lrev].node() != last:
470 470 # invalidate the cache
471 471 raise ValueError('invalidating branch cache (tip differs)')
472 472 for l in lines:
473 473 if not l:
474 474 continue
475 475 node, label = l.split(" ", 1)
476 476 label = encoding.tolocal(label.strip())
477 477 partial.setdefault(label, []).append(bin(node))
478 478 except KeyboardInterrupt:
479 479 raise
480 480 except Exception, inst:
481 481 if self.ui.debugflag:
482 482 self.ui.warn(str(inst), '\n')
483 483 partial, last, lrev = {}, nullid, nullrev
484 484 return partial, last, lrev
485 485
486 486 def _writebranchcache(self, branches, tip, tiprev):
487 487 try:
488 488 f = self.opener("cache/branchheads", "w", atomictemp=True)
489 489 f.write("%s %s\n" % (hex(tip), tiprev))
490 490 for label, nodes in branches.iteritems():
491 491 for node in nodes:
492 492 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
493 493 f.rename()
494 494 except (IOError, OSError):
495 495 pass
496 496
497 497 def _updatebranchcache(self, partial, ctxgen):
498 498 # collect new branch entries
499 499 newbranches = {}
500 500 for c in ctxgen:
501 501 newbranches.setdefault(c.branch(), []).append(c.node())
502 502 # if older branchheads are reachable from new ones, they aren't
503 503 # really branchheads. Note checking parents is insufficient:
504 504 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
505 505 for branch, newnodes in newbranches.iteritems():
506 506 bheads = partial.setdefault(branch, [])
507 507 bheads.extend(newnodes)
508 508 if len(bheads) <= 1:
509 509 continue
510 510 # starting from tip means fewer passes over reachable
511 511 while newnodes:
512 512 latest = newnodes.pop()
513 513 if latest not in bheads:
514 514 continue
515 515 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
516 516 reachable = self.changelog.reachable(latest, minbhrev)
517 517 reachable.remove(latest)
518 518 bheads = [b for b in bheads if b not in reachable]
519 519 partial[branch] = bheads
520 520
521 521 def lookup(self, key):
522 522 if isinstance(key, int):
523 523 return self.changelog.node(key)
524 524 elif key == '.':
525 525 return self.dirstate.parents()[0]
526 526 elif key == 'null':
527 527 return nullid
528 528 elif key == 'tip':
529 529 return self.changelog.tip()
530 530 n = self.changelog._match(key)
531 531 if n:
532 532 return n
533 533 if key in self._bookmarks:
534 534 return self._bookmarks[key]
535 535 if key in self.tags():
536 536 return self.tags()[key]
537 537 if key in self.branchtags():
538 538 return self.branchtags()[key]
539 539 n = self.changelog._partialmatch(key)
540 540 if n:
541 541 return n
542 542
543 543 # can't find key, check if it might have come from damaged dirstate
544 544 if key in self.dirstate.parents():
545 545 raise error.Abort(_("working directory has unknown parent '%s'!")
546 546 % short(key))
547 547 try:
548 548 if len(key) == 20:
549 549 key = hex(key)
550 550 except:
551 551 pass
552 552 raise error.RepoLookupError(_("unknown revision '%s'") % key)
553 553
554 554 def lookupbranch(self, key, remote=None):
555 555 repo = remote or self
556 556 if key in repo.branchmap():
557 557 return key
558 558
559 559 repo = (remote and remote.local()) and remote or self
560 560 return repo[key].branch()
561 561
562 562 def known(self, nodes):
563 563 nm = self.changelog.nodemap
564 564 return [(n in nm) for n in nodes]
565 565
566 566 def local(self):
567 567 return True
568 568
569 569 def join(self, f):
570 570 return os.path.join(self.path, f)
571 571
572 572 def wjoin(self, f):
573 573 return os.path.join(self.root, f)
574 574
575 575 def file(self, f):
576 576 if f[0] == '/':
577 577 f = f[1:]
578 578 return filelog.filelog(self.sopener, f)
579 579
580 580 def changectx(self, changeid):
581 581 return self[changeid]
582 582
583 583 def parents(self, changeid=None):
584 584 '''get list of changectxs for parents of changeid'''
585 585 return self[changeid].parents()
586 586
587 587 def filectx(self, path, changeid=None, fileid=None):
588 588 """changeid can be a changeset revision, node, or tag.
589 589 fileid can be a file revision or node."""
590 590 return context.filectx(self, path, changeid, fileid)
591 591
592 592 def getcwd(self):
593 593 return self.dirstate.getcwd()
594 594
595 595 def pathto(self, f, cwd=None):
596 596 return self.dirstate.pathto(f, cwd)
597 597
598 598 def wfile(self, f, mode='r'):
599 599 return self.wopener(f, mode)
600 600
601 601 def _link(self, f):
602 602 return os.path.islink(self.wjoin(f))
603 603
604 604 def _loadfilter(self, filter):
605 605 if filter not in self.filterpats:
606 606 l = []
607 607 for pat, cmd in self.ui.configitems(filter):
608 608 if cmd == '!':
609 609 continue
610 610 mf = matchmod.match(self.root, '', [pat])
611 611 fn = None
612 612 params = cmd
613 613 for name, filterfn in self._datafilters.iteritems():
614 614 if cmd.startswith(name):
615 615 fn = filterfn
616 616 params = cmd[len(name):].lstrip()
617 617 break
618 618 if not fn:
619 619 fn = lambda s, c, **kwargs: util.filter(s, c)
620 620 # Wrap old filters not supporting keyword arguments
621 621 if not inspect.getargspec(fn)[2]:
622 622 oldfn = fn
623 623 fn = lambda s, c, **kwargs: oldfn(s, c)
624 624 l.append((mf, fn, params))
625 625 self.filterpats[filter] = l
626 626 return self.filterpats[filter]
627 627
628 628 def _filter(self, filterpats, filename, data):
629 629 for mf, fn, cmd in filterpats:
630 630 if mf(filename):
631 631 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
632 632 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
633 633 break
634 634
635 635 return data
636 636
637 637 @propertycache
638 638 def _encodefilterpats(self):
639 639 return self._loadfilter('encode')
640 640
641 641 @propertycache
642 642 def _decodefilterpats(self):
643 643 return self._loadfilter('decode')
644 644
645 645 def adddatafilter(self, name, filter):
646 646 self._datafilters[name] = filter
647 647
648 648 def wread(self, filename):
649 649 if self._link(filename):
650 650 data = os.readlink(self.wjoin(filename))
651 651 else:
652 652 data = self.wopener(filename, 'r').read()
653 653 return self._filter(self._encodefilterpats, filename, data)
654 654
655 655 def wwrite(self, filename, data, flags):
656 656 data = self._filter(self._decodefilterpats, filename, data)
657 657 if 'l' in flags:
658 658 self.wopener.symlink(data, filename)
659 659 else:
660 660 self.wopener(filename, 'w').write(data)
661 661 if 'x' in flags:
662 662 util.set_flags(self.wjoin(filename), False, True)
663 663
664 664 def wwritedata(self, filename, data):
665 665 return self._filter(self._decodefilterpats, filename, data)
666 666
667 667 def transaction(self, desc):
668 668 tr = self._transref and self._transref() or None
669 669 if tr and tr.running():
670 670 return tr.nest()
671 671
672 672 # abort here if the journal already exists
673 673 if os.path.exists(self.sjoin("journal")):
674 674 raise error.RepoError(
675 675 _("abandoned transaction found - run hg recover"))
676 676
677 677 # save dirstate for rollback
678 678 try:
679 679 ds = self.opener("dirstate").read()
680 680 except IOError:
681 681 ds = ""
682 682 self.opener("journal.dirstate", "w").write(ds)
683 683 self.opener("journal.branch", "w").write(
684 684 encoding.fromlocal(self.dirstate.branch()))
685 685 self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc))
686 686
687 687 renames = [(self.sjoin("journal"), self.sjoin("undo")),
688 688 (self.join("journal.dirstate"), self.join("undo.dirstate")),
689 689 (self.join("journal.branch"), self.join("undo.branch")),
690 690 (self.join("journal.desc"), self.join("undo.desc"))]
691 691 tr = transaction.transaction(self.ui.warn, self.sopener,
692 692 self.sjoin("journal"),
693 693 aftertrans(renames),
694 694 self.store.createmode)
695 695 self._transref = weakref.ref(tr)
696 696 return tr
697 697
698 698 def recover(self):
699 699 lock = self.lock()
700 700 try:
701 701 if os.path.exists(self.sjoin("journal")):
702 702 self.ui.status(_("rolling back interrupted transaction\n"))
703 703 transaction.rollback(self.sopener, self.sjoin("journal"),
704 704 self.ui.warn)
705 705 self.invalidate()
706 706 return True
707 707 else:
708 708 self.ui.warn(_("no interrupted transaction available\n"))
709 709 return False
710 710 finally:
711 711 lock.release()
712 712
713 713 def rollback(self, dryrun=False):
714 714 wlock = lock = None
715 715 try:
716 716 wlock = self.wlock()
717 717 lock = self.lock()
718 718 if os.path.exists(self.sjoin("undo")):
719 719 try:
720 720 args = self.opener("undo.desc", "r").read().splitlines()
721 721 if len(args) >= 3 and self.ui.verbose:
722 722 desc = _("repository tip rolled back to revision %s"
723 723 " (undo %s: %s)\n") % (
724 724 int(args[0]) - 1, args[1], args[2])
725 725 elif len(args) >= 2:
726 726 desc = _("repository tip rolled back to revision %s"
727 727 " (undo %s)\n") % (
728 728 int(args[0]) - 1, args[1])
729 729 except IOError:
730 730 desc = _("rolling back unknown transaction\n")
731 731 self.ui.status(desc)
732 732 if dryrun:
733 733 return
734 734 transaction.rollback(self.sopener, self.sjoin("undo"),
735 735 self.ui.warn)
736 736 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
737 737 if os.path.exists(self.join('undo.bookmarks')):
738 738 util.rename(self.join('undo.bookmarks'),
739 739 self.join('bookmarks'))
740 740 try:
741 741 branch = self.opener("undo.branch").read()
742 742 self.dirstate.setbranch(branch)
743 743 except IOError:
744 744 self.ui.warn(_("Named branch could not be reset, "
745 745 "current branch still is: %s\n")
746 746 % self.dirstate.branch())
747 747 self.invalidate()
748 748 self.dirstate.invalidate()
749 749 self.destroyed()
750 750 parents = tuple([p.rev() for p in self.parents()])
751 751 if len(parents) > 1:
752 752 self.ui.status(_("working directory now based on "
753 753 "revisions %d and %d\n") % parents)
754 754 else:
755 755 self.ui.status(_("working directory now based on "
756 756 "revision %d\n") % parents)
757 757 else:
758 758 self.ui.warn(_("no rollback information available\n"))
759 759 return 1
760 760 finally:
761 761 release(lock, wlock)
762 762
763 763 def invalidatecaches(self):
764 764 self._tags = None
765 765 self._tagtypes = None
766 766 self.nodetagscache = None
767 767 self._branchcache = None # in UTF-8
768 768 self._branchcachetip = None
769 769
770 770 def invalidate(self):
771 771 for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
772 772 if a in self.__dict__:
773 773 delattr(self, a)
774 774 self.invalidatecaches()
775 775
776 776 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
777 777 try:
778 778 l = lock.lock(lockname, 0, releasefn, desc=desc)
779 779 except error.LockHeld, inst:
780 780 if not wait:
781 781 raise
782 782 self.ui.warn(_("waiting for lock on %s held by %r\n") %
783 783 (desc, inst.locker))
784 784 # default to 600 seconds timeout
785 785 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
786 786 releasefn, desc=desc)
787 787 if acquirefn:
788 788 acquirefn()
789 789 return l
790 790
791 791 def lock(self, wait=True):
792 792 '''Lock the repository store (.hg/store) and return a weak reference
793 793 to the lock. Use this before modifying the store (e.g. committing or
794 794 stripping). If you are opening a transaction, get a lock as well.)'''
795 795 l = self._lockref and self._lockref()
796 796 if l is not None and l.held:
797 797 l.lock()
798 798 return l
799 799
800 800 l = self._lock(self.sjoin("lock"), wait, self.store.write,
801 801 self.invalidate, _('repository %s') % self.origroot)
802 802 self._lockref = weakref.ref(l)
803 803 return l
804 804
805 805 def wlock(self, wait=True):
806 806 '''Lock the non-store parts of the repository (everything under
807 807 .hg except .hg/store) and return a weak reference to the lock.
808 808 Use this before modifying files in .hg.'''
809 809 l = self._wlockref and self._wlockref()
810 810 if l is not None and l.held:
811 811 l.lock()
812 812 return l
813 813
814 814 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
815 815 self.dirstate.invalidate, _('working directory of %s') %
816 816 self.origroot)
817 817 self._wlockref = weakref.ref(l)
818 818 return l
819 819
820 820 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
821 821 """
822 822 commit an individual file as part of a larger transaction
823 823 """
824 824
825 825 fname = fctx.path()
826 826 text = fctx.data()
827 827 flog = self.file(fname)
828 828 fparent1 = manifest1.get(fname, nullid)
829 829 fparent2 = fparent2o = manifest2.get(fname, nullid)
830 830
831 831 meta = {}
832 832 copy = fctx.renamed()
833 833 if copy and copy[0] != fname:
834 834 # Mark the new revision of this file as a copy of another
835 835 # file. This copy data will effectively act as a parent
836 836 # of this new revision. If this is a merge, the first
837 837 # parent will be the nullid (meaning "look up the copy data")
838 838 # and the second one will be the other parent. For example:
839 839 #
840 840 # 0 --- 1 --- 3 rev1 changes file foo
841 841 # \ / rev2 renames foo to bar and changes it
842 842 # \- 2 -/ rev3 should have bar with all changes and
843 843 # should record that bar descends from
844 844 # bar in rev2 and foo in rev1
845 845 #
846 846 # this allows this merge to succeed:
847 847 #
848 848 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
849 849 # \ / merging rev3 and rev4 should use bar@rev2
850 850 # \- 2 --- 4 as the merge base
851 851 #
852 852
853 853 cfname = copy[0]
854 854 crev = manifest1.get(cfname)
855 855 newfparent = fparent2
856 856
857 857 if manifest2: # branch merge
858 858 if fparent2 == nullid or crev is None: # copied on remote side
859 859 if cfname in manifest2:
860 860 crev = manifest2[cfname]
861 861 newfparent = fparent1
862 862
863 863 # find source in nearest ancestor if we've lost track
864 864 if not crev:
865 865 self.ui.debug(" %s: searching for copy revision for %s\n" %
866 866 (fname, cfname))
867 867 for ancestor in self[None].ancestors():
868 868 if cfname in ancestor:
869 869 crev = ancestor[cfname].filenode()
870 870 break
871 871
872 872 if crev:
873 873 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
874 874 meta["copy"] = cfname
875 875 meta["copyrev"] = hex(crev)
876 876 fparent1, fparent2 = nullid, newfparent
877 877 else:
878 878 self.ui.warn(_("warning: can't find ancestor for '%s' "
879 879 "copied from '%s'!\n") % (fname, cfname))
880 880
881 881 elif fparent2 != nullid:
882 882 # is one parent an ancestor of the other?
883 883 fparentancestor = flog.ancestor(fparent1, fparent2)
884 884 if fparentancestor == fparent1:
885 885 fparent1, fparent2 = fparent2, nullid
886 886 elif fparentancestor == fparent2:
887 887 fparent2 = nullid
888 888
889 889 # is the file changed?
890 890 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
891 891 changelist.append(fname)
892 892 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
893 893
894 894 # are just the flags changed during merge?
895 895 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
896 896 changelist.append(fname)
897 897
898 898 return fparent1
899 899
900 900 def commit(self, text="", user=None, date=None, match=None, force=False,
901 901 editor=False, extra={}):
902 902 """Add a new revision to current repository.
903 903
904 904 Revision information is gathered from the working directory,
905 905 match can be used to filter the committed files. If editor is
906 906 supplied, it is called to get a commit message.
907 907 """
908 908
909 909 def fail(f, msg):
910 910 raise util.Abort('%s: %s' % (f, msg))
911 911
912 912 if not match:
913 913 match = matchmod.always(self.root, '')
914 914
915 915 if not force:
916 916 vdirs = []
917 917 match.dir = vdirs.append
918 918 match.bad = fail
919 919
920 920 wlock = self.wlock()
921 921 try:
922 922 wctx = self[None]
923 923 merge = len(wctx.parents()) > 1
924 924
925 925 if (not force and merge and match and
926 926 (match.files() or match.anypats())):
927 927 raise util.Abort(_('cannot partially commit a merge '
928 928 '(do not specify files or patterns)'))
929 929
930 930 changes = self.status(match=match, clean=force)
931 931 if force:
932 932 changes[0].extend(changes[6]) # mq may commit unchanged files
933 933
934 934 # check subrepos
935 935 subs = []
936 936 removedsubs = set()
937 937 for p in wctx.parents():
938 938 removedsubs.update(s for s in p.substate if match(s))
939 939 for s in wctx.substate:
940 940 removedsubs.discard(s)
941 941 if match(s) and wctx.sub(s).dirty():
942 942 subs.append(s)
943 943 if (subs or removedsubs):
944 944 if (not match('.hgsub') and
945 945 '.hgsub' in (wctx.modified() + wctx.added())):
946 946 raise util.Abort(_("can't commit subrepos without .hgsub"))
947 947 if '.hgsubstate' not in changes[0]:
948 948 changes[0].insert(0, '.hgsubstate')
949 949
950 950 if subs and not self.ui.configbool('ui', 'commitsubrepos', True):
951 951 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
952 952 if changedsubs:
953 953 raise util.Abort(_("uncommitted changes in subrepo %s")
954 954 % changedsubs[0])
955 955
956 956 # make sure all explicit patterns are matched
957 957 if not force and match.files():
958 958 matched = set(changes[0] + changes[1] + changes[2])
959 959
960 960 for f in match.files():
961 961 if f == '.' or f in matched or f in wctx.substate:
962 962 continue
963 963 if f in changes[3]: # missing
964 964 fail(f, _('file not found!'))
965 965 if f in vdirs: # visited directory
966 966 d = f + '/'
967 967 for mf in matched:
968 968 if mf.startswith(d):
969 969 break
970 970 else:
971 971 fail(f, _("no match under directory!"))
972 972 elif f not in self.dirstate:
973 973 fail(f, _("file not tracked!"))
974 974
975 975 if (not force and not extra.get("close") and not merge
976 976 and not (changes[0] or changes[1] or changes[2])
977 977 and wctx.branch() == wctx.p1().branch()):
978 978 return None
979 979
980 980 ms = mergemod.mergestate(self)
981 981 for f in changes[0]:
982 982 if f in ms and ms[f] == 'u':
983 983 raise util.Abort(_("unresolved merge conflicts "
984 984 "(see hg help resolve)"))
985 985
986 986 cctx = context.workingctx(self, text, user, date, extra, changes)
987 987 if editor:
988 988 cctx._text = editor(self, cctx, subs)
989 989 edited = (text != cctx._text)
990 990
991 991 # commit subs
992 992 if subs or removedsubs:
993 993 state = wctx.substate.copy()
994 994 for s in sorted(subs):
995 995 sub = wctx.sub(s)
996 996 self.ui.status(_('committing subrepository %s\n') %
997 997 subrepo.subrelpath(sub))
998 998 sr = sub.commit(cctx._text, user, date)
999 999 state[s] = (state[s][0], sr)
1000 1000 subrepo.writestate(self, state)
1001 1001
1002 1002 # Save commit message in case this transaction gets rolled back
1003 1003 # (e.g. by a pretxncommit hook). Leave the content alone on
1004 1004 # the assumption that the user will use the same editor again.
1005 1005 msgfile = self.opener('last-message.txt', 'wb')
1006 1006 msgfile.write(cctx._text)
1007 1007 msgfile.close()
1008 1008
1009 1009 p1, p2 = self.dirstate.parents()
1010 1010 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1011 1011 try:
1012 1012 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1013 1013 ret = self.commitctx(cctx, True)
1014 1014 except:
1015 1015 if edited:
1016 1016 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
1017 1017 self.ui.write(
1018 1018 _('note: commit message saved in %s\n') % msgfn)
1019 1019 raise
1020 1020
1021 1021 # update bookmarks, dirstate and mergestate
1022 1022 parents = (p1, p2)
1023 1023 if p2 == nullid:
1024 1024 parents = (p1,)
1025 1025 bookmarks.update(self, parents, ret)
1026 1026 for f in changes[0] + changes[1]:
1027 1027 self.dirstate.normal(f)
1028 1028 for f in changes[2]:
1029 1029 self.dirstate.forget(f)
1030 1030 self.dirstate.setparents(ret)
1031 1031 ms.reset()
1032 1032 finally:
1033 1033 wlock.release()
1034 1034
1035 1035 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1036 1036 return ret
1037 1037
1038 1038 def commitctx(self, ctx, error=False):
1039 1039 """Add a new revision to current repository.
1040 1040 Revision information is passed via the context argument.
1041 1041 """
1042 1042
1043 1043 tr = lock = None
1044 1044 removed = list(ctx.removed())
1045 1045 p1, p2 = ctx.p1(), ctx.p2()
1046 1046 m1 = p1.manifest().copy()
1047 1047 m2 = p2.manifest()
1048 1048 user = ctx.user()
1049 1049
1050 1050 lock = self.lock()
1051 1051 try:
1052 1052 tr = self.transaction("commit")
1053 1053 trp = weakref.proxy(tr)
1054 1054
1055 1055 # check in files
1056 1056 new = {}
1057 1057 changed = []
1058 1058 linkrev = len(self)
1059 1059 for f in sorted(ctx.modified() + ctx.added()):
1060 1060 self.ui.note(f + "\n")
1061 1061 try:
1062 1062 fctx = ctx[f]
1063 1063 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1064 1064 changed)
1065 1065 m1.set(f, fctx.flags())
1066 1066 except OSError, inst:
1067 1067 self.ui.warn(_("trouble committing %s!\n") % f)
1068 1068 raise
1069 1069 except IOError, inst:
1070 1070 errcode = getattr(inst, 'errno', errno.ENOENT)
1071 1071 if error or errcode and errcode != errno.ENOENT:
1072 1072 self.ui.warn(_("trouble committing %s!\n") % f)
1073 1073 raise
1074 1074 else:
1075 1075 removed.append(f)
1076 1076
1077 1077 # update manifest
1078 1078 m1.update(new)
1079 1079 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1080 1080 drop = [f for f in removed if f in m1]
1081 1081 for f in drop:
1082 1082 del m1[f]
1083 1083 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1084 1084 p2.manifestnode(), (new, drop))
1085 1085
1086 1086 # update changelog
1087 1087 self.changelog.delayupdate()
1088 1088 n = self.changelog.add(mn, changed + removed, ctx.description(),
1089 1089 trp, p1.node(), p2.node(),
1090 1090 user, ctx.date(), ctx.extra().copy())
1091 1091 p = lambda: self.changelog.writepending() and self.root or ""
1092 1092 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1093 1093 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1094 1094 parent2=xp2, pending=p)
1095 1095 self.changelog.finalize(trp)
1096 1096 tr.close()
1097 1097
1098 1098 if self._branchcache:
1099 1099 self.updatebranchcache()
1100 1100 return n
1101 1101 finally:
1102 1102 if tr:
1103 1103 tr.release()
1104 1104 lock.release()
1105 1105
1106 1106 def destroyed(self):
1107 1107 '''Inform the repository that nodes have been destroyed.
1108 1108 Intended for use by strip and rollback, so there's a common
1109 1109 place for anything that has to be done after destroying history.'''
1110 1110 # XXX it might be nice if we could take the list of destroyed
1111 1111 # nodes, but I don't see an easy way for rollback() to do that
1112 1112
1113 1113 # Ensure the persistent tag cache is updated. Doing it now
1114 1114 # means that the tag cache only has to worry about destroyed
1115 1115 # heads immediately after a strip/rollback. That in turn
1116 1116 # guarantees that "cachetip == currenttip" (comparing both rev
1117 1117 # and node) always means no nodes have been added or destroyed.
1118 1118
1119 1119 # XXX this is suboptimal when qrefresh'ing: we strip the current
1120 1120 # head, refresh the tag cache, then immediately add a new head.
1121 1121 # But I think doing it this way is necessary for the "instant
1122 1122 # tag cache retrieval" case to work.
1123 1123 self.invalidatecaches()
1124 1124
1125 1125 def walk(self, match, node=None):
1126 1126 '''
1127 1127 walk recursively through the directory tree or a given
1128 1128 changeset, finding all files matched by the match
1129 1129 function
1130 1130 '''
1131 1131 return self[node].walk(match)
1132 1132
1133 1133 def status(self, node1='.', node2=None, match=None,
1134 1134 ignored=False, clean=False, unknown=False,
1135 1135 listsubrepos=False):
1136 1136 """return status of files between two nodes or node and working directory
1137 1137
1138 1138 If node1 is None, use the first dirstate parent instead.
1139 1139 If node2 is None, compare node1 with working directory.
1140 1140 """
1141 1141
1142 1142 def mfmatches(ctx):
1143 1143 mf = ctx.manifest().copy()
1144 1144 for fn in mf.keys():
1145 1145 if not match(fn):
1146 1146 del mf[fn]
1147 1147 return mf
1148 1148
1149 1149 if isinstance(node1, context.changectx):
1150 1150 ctx1 = node1
1151 1151 else:
1152 1152 ctx1 = self[node1]
1153 1153 if isinstance(node2, context.changectx):
1154 1154 ctx2 = node2
1155 1155 else:
1156 1156 ctx2 = self[node2]
1157 1157
1158 1158 working = ctx2.rev() is None
1159 1159 parentworking = working and ctx1 == self['.']
1160 1160 match = match or matchmod.always(self.root, self.getcwd())
1161 1161 listignored, listclean, listunknown = ignored, clean, unknown
1162 1162
1163 1163 # load earliest manifest first for caching reasons
1164 1164 if not working and ctx2.rev() < ctx1.rev():
1165 1165 ctx2.manifest()
1166 1166
1167 1167 if not parentworking:
1168 1168 def bad(f, msg):
1169 1169 if f not in ctx1:
1170 1170 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1171 1171 match.bad = bad
1172 1172
1173 1173 if working: # we need to scan the working dir
1174 1174 subrepos = []
1175 1175 if '.hgsub' in self.dirstate:
1176 1176 subrepos = ctx1.substate.keys()
1177 1177 s = self.dirstate.status(match, subrepos, listignored,
1178 1178 listclean, listunknown)
1179 1179 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1180 1180
1181 1181 # check for any possibly clean files
1182 1182 if parentworking and cmp:
1183 1183 fixup = []
1184 1184 # do a full compare of any files that might have changed
1185 1185 for f in sorted(cmp):
1186 1186 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1187 1187 or ctx1[f].cmp(ctx2[f])):
1188 1188 modified.append(f)
1189 1189 else:
1190 1190 fixup.append(f)
1191 1191
1192 1192 # update dirstate for files that are actually clean
1193 1193 if fixup:
1194 1194 if listclean:
1195 1195 clean += fixup
1196 1196
1197 1197 try:
1198 1198 # updating the dirstate is optional
1199 1199 # so we don't wait on the lock
1200 1200 wlock = self.wlock(False)
1201 1201 try:
1202 1202 for f in fixup:
1203 1203 self.dirstate.normal(f)
1204 1204 finally:
1205 1205 wlock.release()
1206 1206 except error.LockError:
1207 1207 pass
1208 1208
1209 1209 if not parentworking:
1210 1210 mf1 = mfmatches(ctx1)
1211 1211 if working:
1212 1212 # we are comparing working dir against non-parent
1213 1213 # generate a pseudo-manifest for the working dir
1214 1214 mf2 = mfmatches(self['.'])
1215 1215 for f in cmp + modified + added:
1216 1216 mf2[f] = None
1217 1217 mf2.set(f, ctx2.flags(f))
1218 1218 for f in removed:
1219 1219 if f in mf2:
1220 1220 del mf2[f]
1221 1221 else:
1222 1222 # we are comparing two revisions
1223 1223 deleted, unknown, ignored = [], [], []
1224 1224 mf2 = mfmatches(ctx2)
1225 1225
1226 1226 modified, added, clean = [], [], []
1227 1227 for fn in mf2:
1228 1228 if fn in mf1:
1229 1229 if (mf1.flags(fn) != mf2.flags(fn) or
1230 1230 (mf1[fn] != mf2[fn] and
1231 1231 (mf2[fn] or ctx1[fn].cmp(ctx2[fn])))):
1232 1232 modified.append(fn)
1233 1233 elif listclean:
1234 1234 clean.append(fn)
1235 1235 del mf1[fn]
1236 1236 else:
1237 1237 added.append(fn)
1238 1238 removed = mf1.keys()
1239 1239
1240 1240 r = modified, added, removed, deleted, unknown, ignored, clean
1241 1241
1242 1242 if listsubrepos:
1243 1243 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1244 1244 if working:
1245 1245 rev2 = None
1246 1246 else:
1247 1247 rev2 = ctx2.substate[subpath][1]
1248 1248 try:
1249 1249 submatch = matchmod.narrowmatcher(subpath, match)
1250 1250 s = sub.status(rev2, match=submatch, ignored=listignored,
1251 1251 clean=listclean, unknown=listunknown,
1252 1252 listsubrepos=True)
1253 1253 for rfiles, sfiles in zip(r, s):
1254 1254 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1255 1255 except error.LookupError:
1256 1256 self.ui.status(_("skipping missing subrepository: %s\n")
1257 1257 % subpath)
1258 1258
1259 1259 for l in r:
1260 1260 l.sort()
1261 1261 return r
1262 1262
1263 1263 def heads(self, start=None):
1264 1264 heads = self.changelog.heads(start)
1265 1265 # sort the output in rev descending order
1266 1266 return sorted(heads, key=self.changelog.rev, reverse=True)
1267 1267
1268 1268 def branchheads(self, branch=None, start=None, closed=False):
1269 1269 '''return a (possibly filtered) list of heads for the given branch
1270 1270
1271 1271 Heads are returned in topological order, from newest to oldest.
1272 1272 If branch is None, use the dirstate branch.
1273 1273 If start is not None, return only heads reachable from start.
1274 1274 If closed is True, return heads that are marked as closed as well.
1275 1275 '''
1276 1276 if branch is None:
1277 1277 branch = self[None].branch()
1278 1278 branches = self.branchmap()
1279 1279 if branch not in branches:
1280 1280 return []
1281 1281 # the cache returns heads ordered lowest to highest
1282 1282 bheads = list(reversed(branches[branch]))
1283 1283 if start is not None:
1284 1284 # filter out the heads that cannot be reached from startrev
1285 1285 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1286 1286 bheads = [h for h in bheads if h in fbheads]
1287 1287 if not closed:
1288 1288 bheads = [h for h in bheads if
1289 1289 ('close' not in self.changelog.read(h)[5])]
1290 1290 return bheads
1291 1291
1292 1292 def branches(self, nodes):
1293 1293 if not nodes:
1294 1294 nodes = [self.changelog.tip()]
1295 1295 b = []
1296 1296 for n in nodes:
1297 1297 t = n
1298 1298 while 1:
1299 1299 p = self.changelog.parents(n)
1300 1300 if p[1] != nullid or p[0] == nullid:
1301 1301 b.append((t, n, p[0], p[1]))
1302 1302 break
1303 1303 n = p[0]
1304 1304 return b
1305 1305
1306 1306 def between(self, pairs):
1307 1307 r = []
1308 1308
1309 1309 for top, bottom in pairs:
1310 1310 n, l, i = top, [], 0
1311 1311 f = 1
1312 1312
1313 1313 while n != bottom and n != nullid:
1314 1314 p = self.changelog.parents(n)[0]
1315 1315 if i == f:
1316 1316 l.append(n)
1317 1317 f = f * 2
1318 1318 n = p
1319 1319 i += 1
1320 1320
1321 1321 r.append(l)
1322 1322
1323 1323 return r
1324 1324
1325 1325 def pull(self, remote, heads=None, force=False):
1326 1326 lock = self.lock()
1327 1327 try:
1328 1328 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1329 1329 force=force)
1330 1330 common, fetch, rheads = tmp
1331 1331 if not fetch:
1332 1332 self.ui.status(_("no changes found\n"))
1333 1333 result = 0
1334 1334 else:
1335 1335 if heads is None and fetch == [nullid]:
1336 1336 self.ui.status(_("requesting all changes\n"))
1337 1337 elif heads is None and remote.capable('changegroupsubset'):
1338 1338 # issue1320, avoid a race if remote changed after discovery
1339 1339 heads = rheads
1340 1340
1341 1341 if heads is None:
1342 1342 cg = remote.changegroup(fetch, 'pull')
1343 1343 elif not remote.capable('changegroupsubset'):
1344 1344 raise util.Abort(_("partial pull cannot be done because "
1345 1345 "other repository doesn't support "
1346 1346 "changegroupsubset."))
1347 1347 else:
1348 1348 cg = remote.changegroupsubset(fetch, heads, 'pull')
1349 1349 result = self.addchangegroup(cg, 'pull', remote.url(),
1350 1350 lock=lock)
1351 1351 finally:
1352 1352 lock.release()
1353 1353
1354 1354 return result
1355 1355
1356 1356 def checkpush(self, force, revs):
1357 1357 """Extensions can override this function if additional checks have
1358 1358 to be performed before pushing, or call it if they override push
1359 1359 command.
1360 1360 """
1361 1361 pass
1362 1362
1363 1363 def push(self, remote, force=False, revs=None, newbranch=False):
1364 1364 '''Push outgoing changesets (limited by revs) from the current
1365 1365 repository to remote. Return an integer:
1366 1366 - 0 means HTTP error *or* nothing to push
1367 1367 - 1 means we pushed and remote head count is unchanged *or*
1368 1368 we have outgoing changesets but refused to push
1369 1369 - other values as described by addchangegroup()
1370 1370 '''
1371 1371 # there are two ways to push to remote repo:
1372 1372 #
1373 1373 # addchangegroup assumes local user can lock remote
1374 1374 # repo (local filesystem, old ssh servers).
1375 1375 #
1376 1376 # unbundle assumes local user cannot lock remote repo (new ssh
1377 1377 # servers, http servers).
1378 1378
1379 1379 self.checkpush(force, revs)
1380 1380 lock = None
1381 1381 unbundle = remote.capable('unbundle')
1382 1382 if not unbundle:
1383 1383 lock = remote.lock()
1384 1384 try:
1385 1385 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1386 1386 newbranch)
1387 1387 ret = remote_heads
1388 1388 if cg is not None:
1389 1389 if unbundle:
1390 1390 # local repo finds heads on server, finds out what
1391 1391 # revs it must push. once revs transferred, if server
1392 1392 # finds it has different heads (someone else won
1393 1393 # commit/push race), server aborts.
1394 1394 if force:
1395 1395 remote_heads = ['force']
1396 1396 # ssh: return remote's addchangegroup()
1397 1397 # http: return remote's addchangegroup() or 0 for error
1398 1398 ret = remote.unbundle(cg, remote_heads, 'push')
1399 1399 else:
1400 1400 # we return an integer indicating remote head count change
1401 1401 ret = remote.addchangegroup(cg, 'push', self.url(),
1402 1402 lock=lock)
1403 1403 finally:
1404 1404 if lock is not None:
1405 1405 lock.release()
1406 1406
1407 1407 self.ui.debug("checking for updated bookmarks\n")
1408 1408 rb = remote.listkeys('bookmarks')
1409 1409 for k in rb.keys():
1410 1410 if k in self._bookmarks:
1411 1411 nr, nl = rb[k], hex(self._bookmarks[k])
1412 1412 if nr in self:
1413 1413 cr = self[nr]
1414 1414 cl = self[nl]
1415 1415 if cl in cr.descendants():
1416 1416 r = remote.pushkey('bookmarks', k, nr, nl)
1417 1417 if r:
1418 1418 self.ui.status(_("updating bookmark %s\n") % k)
1419 1419 else:
1420 1420 self.ui.warn(_('updating bookmark %s'
1421 1421 ' failed!\n') % k)
1422 1422
1423 1423 return ret
1424 1424
1425 1425 def changegroupinfo(self, nodes, source):
1426 1426 if self.ui.verbose or source == 'bundle':
1427 1427 self.ui.status(_("%d changesets found\n") % len(nodes))
1428 1428 if self.ui.debugflag:
1429 1429 self.ui.debug("list of changesets:\n")
1430 1430 for node in nodes:
1431 1431 self.ui.debug("%s\n" % hex(node))
1432 1432
1433 1433 def changegroupsubset(self, bases, heads, source):
1434 1434 """Compute a changegroup consisting of all the nodes that are
1435 1435 descendents of any of the bases and ancestors of any of the heads.
1436 1436 Return a chunkbuffer object whose read() method will return
1437 1437 successive changegroup chunks.
1438 1438
1439 1439 It is fairly complex as determining which filenodes and which
1440 1440 manifest nodes need to be included for the changeset to be complete
1441 1441 is non-trivial.
1442 1442
1443 1443 Another wrinkle is doing the reverse, figuring out which changeset in
1444 1444 the changegroup a particular filenode or manifestnode belongs to.
1445 1445 """
1446 cl = self.changelog
1447 if not bases:
1448 bases = [nullid]
1449 csets, bases, heads = cl.nodesbetween(bases, heads)
1450 # We assume that all ancestors of bases are known
1451 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1452 return self._changegroupsubset(common, csets, heads, source)
1453
1454 def getbundle(self, source, heads=None, common=None):
1455 """Like changegroupsubset, but returns the set difference between the
1456 ancestors of heads and the ancestors common.
1457
1458 If heads is None, use the local heads. If common is None, use [nullid].
1459
1460 The nodes in common might not all be known locally due to the way the
1461 current discovery protocol works.
1462 """
1463 cl = self.changelog
1464 if common:
1465 nm = cl.nodemap
1466 common = [n for n in common if n in nm]
1467 else:
1468 common = [nullid]
1469 if not heads:
1470 heads = cl.heads()
1471 common, missing = cl.findcommonmissing(common, heads)
1472 return self._changegroupsubset(common, missing, heads, source)
1473
1474 def _changegroupsubset(self, commonrevs, csets, heads, source):
1446 1475
1447 1476 cl = self.changelog
1448 1477 mf = self.manifest
1449 1478 mfs = {} # needed manifests
1450 1479 fnodes = {} # needed file nodes
1451 1480
1452 if not bases:
1453 bases = [nullid]
1454 csets, bases, heads = cl.nodesbetween(bases, heads)
1455
1456 1481 # can we go through the fast path ?
1457 1482 heads.sort()
1458 1483 if heads == sorted(self.heads()):
1459 1484 return self._changegroup(csets, source)
1460 1485
1461 1486 # slow path
1462 1487 self.hook('preoutgoing', throw=True, source=source)
1463 1488 self.changegroupinfo(csets, source)
1464 1489
1465 # We assume that all ancestors of bases are known
1466 commonrevs = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1467
1468 1490 # A function generating function that sets up the initial environment
1469 1491 # the inner function.
1470 1492 def filenode_collector(changedfiles):
1471 1493 # This gathers information from each manifestnode included in the
1472 1494 # changegroup about which filenodes the manifest node references
1473 1495 # so we can include those in the changegroup too.
1474 1496 #
1475 1497 # It also remembers which changenode each filenode belongs to. It
1476 1498 # does this by assuming the a filenode belongs to the changenode
1477 1499 # the first manifest that references it belongs to.
1478 1500 def collect(mnode):
1479 1501 r = mf.rev(mnode)
1480 1502 clnode = mfs[mnode]
1481 1503 mdata = mf.readfast(mnode)
1482 1504 for f in changedfiles:
1483 1505 if f in mdata:
1484 1506 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1485 1507
1486 1508 return collect
1487 1509
1488 1510 # If we determine that a particular file or manifest node must be a
1489 1511 # node that the recipient of the changegroup will already have, we can
1490 1512 # also assume the recipient will have all the parents. This function
1491 1513 # prunes them from the set of missing nodes.
1492 1514 def prune(revlog, missingnodes):
1493 1515 # drop any nodes that claim to be part of a cset in commonrevs
1494 1516 drop = set()
1495 1517 for n in missingnodes:
1496 1518 if revlog.linkrev(revlog.rev(n)) in commonrevs:
1497 1519 drop.add(n)
1498 1520 for n in drop:
1499 1521 missingnodes.pop(n, None)
1500 1522
1501 1523 # Now that we have all theses utility functions to help out and
1502 1524 # logically divide up the task, generate the group.
1503 1525 def gengroup():
1504 1526 # The set of changed files starts empty.
1505 1527 changedfiles = set()
1506 1528 collect = changegroup.collector(cl, mfs, changedfiles)
1507 1529
1508 1530 # Create a changenode group generator that will call our functions
1509 1531 # back to lookup the owning changenode and collect information.
1510 1532 group = cl.group(csets, lambda x: x, collect)
1511 1533 for count, chunk in enumerate(group):
1512 1534 yield chunk
1513 1535 # revlog.group yields three entries per node, so
1514 1536 # dividing by 3 gives an approximation of how many
1515 1537 # nodes have been processed.
1516 1538 self.ui.progress(_('bundling'), count / 3,
1517 1539 unit=_('changesets'))
1518 1540 changecount = count / 3
1519 1541 efiles = len(changedfiles)
1520 1542 self.ui.progress(_('bundling'), None)
1521 1543
1522 1544 prune(mf, mfs)
1523 1545 # Create a generator for the manifestnodes that calls our lookup
1524 1546 # and data collection functions back.
1525 1547 group = mf.group(sorted(mfs, key=mf.rev),
1526 1548 lambda mnode: mfs[mnode],
1527 1549 filenode_collector(changedfiles))
1528 1550 for count, chunk in enumerate(group):
1529 1551 yield chunk
1530 1552 # see above comment for why we divide by 3
1531 1553 self.ui.progress(_('bundling'), count / 3,
1532 1554 unit=_('manifests'), total=changecount)
1533 1555 self.ui.progress(_('bundling'), None)
1534 1556
1535 1557 mfs.clear()
1536 1558
1537 1559 # Go through all our files in order sorted by name.
1538 1560 for idx, fname in enumerate(sorted(changedfiles)):
1539 1561 filerevlog = self.file(fname)
1540 1562 if not len(filerevlog):
1541 1563 raise util.Abort(_("empty or missing revlog for %s") % fname)
1542 1564 # Toss out the filenodes that the recipient isn't really
1543 1565 # missing.
1544 1566 missingfnodes = fnodes.pop(fname, {})
1545 1567 prune(filerevlog, missingfnodes)
1546 1568 # If any filenodes are left, generate the group for them,
1547 1569 # otherwise don't bother.
1548 1570 if missingfnodes:
1549 1571 yield changegroup.chunkheader(len(fname))
1550 1572 yield fname
1551 1573 # Create a group generator and only pass in a changenode
1552 1574 # lookup function as we need to collect no information
1553 1575 # from filenodes.
1554 1576 group = filerevlog.group(
1555 1577 sorted(missingfnodes, key=filerevlog.rev),
1556 1578 lambda fnode: missingfnodes[fnode])
1557 1579 for chunk in group:
1558 1580 # even though we print the same progress on
1559 1581 # most loop iterations, put the progress call
1560 1582 # here so that time estimates (if any) can be updated
1561 1583 self.ui.progress(
1562 1584 _('bundling'), idx, item=fname,
1563 1585 unit=_('files'), total=efiles)
1564 1586 yield chunk
1565 1587 # Signal that no more groups are left.
1566 1588 yield changegroup.closechunk()
1567 1589 self.ui.progress(_('bundling'), None)
1568 1590
1569 1591 if csets:
1570 1592 self.hook('outgoing', node=hex(csets[0]), source=source)
1571 1593
1572 1594 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1573 1595
1574 1596 def changegroup(self, basenodes, source):
1575 1597 # to avoid a race we use changegroupsubset() (issue1320)
1576 1598 return self.changegroupsubset(basenodes, self.heads(), source)
1577 1599
1578 1600 def _changegroup(self, nodes, source):
1579 1601 """Compute the changegroup of all nodes that we have that a recipient
1580 1602 doesn't. Return a chunkbuffer object whose read() method will return
1581 1603 successive changegroup chunks.
1582 1604
1583 1605 This is much easier than the previous function as we can assume that
1584 1606 the recipient has any changenode we aren't sending them.
1585 1607
1586 1608 nodes is the set of nodes to send"""
1587 1609
1588 1610 self.hook('preoutgoing', throw=True, source=source)
1589 1611
1590 1612 cl = self.changelog
1591 1613 revset = set([cl.rev(n) for n in nodes])
1592 1614 self.changegroupinfo(nodes, source)
1593 1615
1594 1616 def gennodelst(log):
1595 1617 for r in log:
1596 1618 if log.linkrev(r) in revset:
1597 1619 yield log.node(r)
1598 1620
1599 1621 def lookuplinkrev_func(revlog):
1600 1622 def lookuplinkrev(n):
1601 1623 return cl.node(revlog.linkrev(revlog.rev(n)))
1602 1624 return lookuplinkrev
1603 1625
1604 1626 def gengroup():
1605 1627 '''yield a sequence of changegroup chunks (strings)'''
1606 1628 # construct a list of all changed files
1607 1629 changedfiles = set()
1608 1630 mmfs = {}
1609 1631 collect = changegroup.collector(cl, mmfs, changedfiles)
1610 1632
1611 1633 for count, chunk in enumerate(cl.group(nodes, lambda x: x, collect)):
1612 1634 # revlog.group yields three entries per node, so
1613 1635 # dividing by 3 gives an approximation of how many
1614 1636 # nodes have been processed.
1615 1637 self.ui.progress(_('bundling'), count / 3, unit=_('changesets'))
1616 1638 yield chunk
1617 1639 efiles = len(changedfiles)
1618 1640 changecount = count / 3
1619 1641 self.ui.progress(_('bundling'), None)
1620 1642
1621 1643 mnfst = self.manifest
1622 1644 nodeiter = gennodelst(mnfst)
1623 1645 for count, chunk in enumerate(mnfst.group(nodeiter,
1624 1646 lookuplinkrev_func(mnfst))):
1625 1647 # see above comment for why we divide by 3
1626 1648 self.ui.progress(_('bundling'), count / 3,
1627 1649 unit=_('manifests'), total=changecount)
1628 1650 yield chunk
1629 1651 self.ui.progress(_('bundling'), None)
1630 1652
1631 1653 for idx, fname in enumerate(sorted(changedfiles)):
1632 1654 filerevlog = self.file(fname)
1633 1655 if not len(filerevlog):
1634 1656 raise util.Abort(_("empty or missing revlog for %s") % fname)
1635 1657 nodeiter = gennodelst(filerevlog)
1636 1658 nodeiter = list(nodeiter)
1637 1659 if nodeiter:
1638 1660 yield changegroup.chunkheader(len(fname))
1639 1661 yield fname
1640 1662 lookup = lookuplinkrev_func(filerevlog)
1641 1663 for chunk in filerevlog.group(nodeiter, lookup):
1642 1664 self.ui.progress(
1643 1665 _('bundling'), idx, item=fname,
1644 1666 total=efiles, unit=_('files'))
1645 1667 yield chunk
1646 1668 self.ui.progress(_('bundling'), None)
1647 1669
1648 1670 yield changegroup.closechunk()
1649 1671
1650 1672 if nodes:
1651 1673 self.hook('outgoing', node=hex(nodes[0]), source=source)
1652 1674
1653 1675 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1654 1676
1655 1677 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1656 1678 """Add the changegroup returned by source.read() to this repo.
1657 1679 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1658 1680 the URL of the repo where this changegroup is coming from.
1659 1681 If lock is not None, the function takes ownership of the lock
1660 1682 and releases it after the changegroup is added.
1661 1683
1662 1684 Return an integer summarizing the change to this repo:
1663 1685 - nothing changed or no source: 0
1664 1686 - more heads than before: 1+added heads (2..n)
1665 1687 - fewer heads than before: -1-removed heads (-2..-n)
1666 1688 - number of heads stays the same: 1
1667 1689 """
1668 1690 def csmap(x):
1669 1691 self.ui.debug("add changeset %s\n" % short(x))
1670 1692 return len(cl)
1671 1693
1672 1694 def revmap(x):
1673 1695 return cl.rev(x)
1674 1696
1675 1697 if not source:
1676 1698 return 0
1677 1699
1678 1700 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1679 1701
1680 1702 changesets = files = revisions = 0
1681 1703 efiles = set()
1682 1704
1683 1705 # write changelog data to temp files so concurrent readers will not see
1684 1706 # inconsistent view
1685 1707 cl = self.changelog
1686 1708 cl.delayupdate()
1687 1709 oldheads = len(cl.heads())
1688 1710
1689 1711 tr = self.transaction("\n".join([srctype, urlmod.hidepassword(url)]))
1690 1712 try:
1691 1713 trp = weakref.proxy(tr)
1692 1714 # pull off the changeset group
1693 1715 self.ui.status(_("adding changesets\n"))
1694 1716 clstart = len(cl)
1695 1717 class prog(object):
1696 1718 step = _('changesets')
1697 1719 count = 1
1698 1720 ui = self.ui
1699 1721 total = None
1700 1722 def __call__(self):
1701 1723 self.ui.progress(self.step, self.count, unit=_('chunks'),
1702 1724 total=self.total)
1703 1725 self.count += 1
1704 1726 pr = prog()
1705 1727 source.callback = pr
1706 1728
1707 1729 if (cl.addgroup(source, csmap, trp) is None
1708 1730 and not emptyok):
1709 1731 raise util.Abort(_("received changelog group is empty"))
1710 1732 clend = len(cl)
1711 1733 changesets = clend - clstart
1712 1734 for c in xrange(clstart, clend):
1713 1735 efiles.update(self[c].files())
1714 1736 efiles = len(efiles)
1715 1737 self.ui.progress(_('changesets'), None)
1716 1738
1717 1739 # pull off the manifest group
1718 1740 self.ui.status(_("adding manifests\n"))
1719 1741 pr.step = _('manifests')
1720 1742 pr.count = 1
1721 1743 pr.total = changesets # manifests <= changesets
1722 1744 # no need to check for empty manifest group here:
1723 1745 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1724 1746 # no new manifest will be created and the manifest group will
1725 1747 # be empty during the pull
1726 1748 self.manifest.addgroup(source, revmap, trp)
1727 1749 self.ui.progress(_('manifests'), None)
1728 1750
1729 1751 needfiles = {}
1730 1752 if self.ui.configbool('server', 'validate', default=False):
1731 1753 # validate incoming csets have their manifests
1732 1754 for cset in xrange(clstart, clend):
1733 1755 mfest = self.changelog.read(self.changelog.node(cset))[0]
1734 1756 mfest = self.manifest.readdelta(mfest)
1735 1757 # store file nodes we must see
1736 1758 for f, n in mfest.iteritems():
1737 1759 needfiles.setdefault(f, set()).add(n)
1738 1760
1739 1761 # process the files
1740 1762 self.ui.status(_("adding file changes\n"))
1741 1763 pr.step = 'files'
1742 1764 pr.count = 1
1743 1765 pr.total = efiles
1744 1766 source.callback = None
1745 1767
1746 1768 while 1:
1747 1769 f = source.chunk()
1748 1770 if not f:
1749 1771 break
1750 1772 self.ui.debug("adding %s revisions\n" % f)
1751 1773 pr()
1752 1774 fl = self.file(f)
1753 1775 o = len(fl)
1754 1776 if fl.addgroup(source, revmap, trp) is None:
1755 1777 raise util.Abort(_("received file revlog group is empty"))
1756 1778 revisions += len(fl) - o
1757 1779 files += 1
1758 1780 if f in needfiles:
1759 1781 needs = needfiles[f]
1760 1782 for new in xrange(o, len(fl)):
1761 1783 n = fl.node(new)
1762 1784 if n in needs:
1763 1785 needs.remove(n)
1764 1786 if not needs:
1765 1787 del needfiles[f]
1766 1788 self.ui.progress(_('files'), None)
1767 1789
1768 1790 for f, needs in needfiles.iteritems():
1769 1791 fl = self.file(f)
1770 1792 for n in needs:
1771 1793 try:
1772 1794 fl.rev(n)
1773 1795 except error.LookupError:
1774 1796 raise util.Abort(
1775 1797 _('missing file data for %s:%s - run hg verify') %
1776 1798 (f, hex(n)))
1777 1799
1778 1800 newheads = len(cl.heads())
1779 1801 heads = ""
1780 1802 if oldheads and newheads != oldheads:
1781 1803 heads = _(" (%+d heads)") % (newheads - oldheads)
1782 1804
1783 1805 self.ui.status(_("added %d changesets"
1784 1806 " with %d changes to %d files%s\n")
1785 1807 % (changesets, revisions, files, heads))
1786 1808
1787 1809 if changesets > 0:
1788 1810 p = lambda: cl.writepending() and self.root or ""
1789 1811 self.hook('pretxnchangegroup', throw=True,
1790 1812 node=hex(cl.node(clstart)), source=srctype,
1791 1813 url=url, pending=p)
1792 1814
1793 1815 # make changelog see real files again
1794 1816 cl.finalize(trp)
1795 1817
1796 1818 tr.close()
1797 1819 finally:
1798 1820 tr.release()
1799 1821 if lock:
1800 1822 lock.release()
1801 1823
1802 1824 if changesets > 0:
1803 1825 # forcefully update the on-disk branch cache
1804 1826 self.ui.debug("updating the branch cache\n")
1805 1827 self.updatebranchcache()
1806 1828 self.hook("changegroup", node=hex(cl.node(clstart)),
1807 1829 source=srctype, url=url)
1808 1830
1809 1831 for i in xrange(clstart, clend):
1810 1832 self.hook("incoming", node=hex(cl.node(i)),
1811 1833 source=srctype, url=url)
1812 1834
1813 1835 # never return 0 here:
1814 1836 if newheads < oldheads:
1815 1837 return newheads - oldheads - 1
1816 1838 else:
1817 1839 return newheads - oldheads + 1
1818 1840
1819 1841
1820 1842 def stream_in(self, remote, requirements):
1821 1843 lock = self.lock()
1822 1844 try:
1823 1845 fp = remote.stream_out()
1824 1846 l = fp.readline()
1825 1847 try:
1826 1848 resp = int(l)
1827 1849 except ValueError:
1828 1850 raise error.ResponseError(
1829 1851 _('Unexpected response from remote server:'), l)
1830 1852 if resp == 1:
1831 1853 raise util.Abort(_('operation forbidden by server'))
1832 1854 elif resp == 2:
1833 1855 raise util.Abort(_('locking the remote repository failed'))
1834 1856 elif resp != 0:
1835 1857 raise util.Abort(_('the server sent an unknown error code'))
1836 1858 self.ui.status(_('streaming all changes\n'))
1837 1859 l = fp.readline()
1838 1860 try:
1839 1861 total_files, total_bytes = map(int, l.split(' ', 1))
1840 1862 except (ValueError, TypeError):
1841 1863 raise error.ResponseError(
1842 1864 _('Unexpected response from remote server:'), l)
1843 1865 self.ui.status(_('%d files to transfer, %s of data\n') %
1844 1866 (total_files, util.bytecount(total_bytes)))
1845 1867 start = time.time()
1846 1868 for i in xrange(total_files):
1847 1869 # XXX doesn't support '\n' or '\r' in filenames
1848 1870 l = fp.readline()
1849 1871 try:
1850 1872 name, size = l.split('\0', 1)
1851 1873 size = int(size)
1852 1874 except (ValueError, TypeError):
1853 1875 raise error.ResponseError(
1854 1876 _('Unexpected response from remote server:'), l)
1855 1877 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1856 1878 # for backwards compat, name was partially encoded
1857 1879 ofp = self.sopener(store.decodedir(name), 'w')
1858 1880 for chunk in util.filechunkiter(fp, limit=size):
1859 1881 ofp.write(chunk)
1860 1882 ofp.close()
1861 1883 elapsed = time.time() - start
1862 1884 if elapsed <= 0:
1863 1885 elapsed = 0.001
1864 1886 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1865 1887 (util.bytecount(total_bytes), elapsed,
1866 1888 util.bytecount(total_bytes / elapsed)))
1867 1889
1868 1890 # new requirements = old non-format requirements + new format-related
1869 1891 # requirements from the streamed-in repository
1870 1892 requirements.update(set(self.requirements) - self.supportedformats)
1871 1893 self._applyrequirements(requirements)
1872 1894 self._writerequirements()
1873 1895
1874 1896 self.invalidate()
1875 1897 return len(self.heads()) + 1
1876 1898 finally:
1877 1899 lock.release()
1878 1900
1879 1901 def clone(self, remote, heads=[], stream=False):
1880 1902 '''clone remote repository.
1881 1903
1882 1904 keyword arguments:
1883 1905 heads: list of revs to clone (forces use of pull)
1884 1906 stream: use streaming clone if possible'''
1885 1907
1886 1908 # now, all clients that can request uncompressed clones can
1887 1909 # read repo formats supported by all servers that can serve
1888 1910 # them.
1889 1911
1890 1912 # if revlog format changes, client will have to check version
1891 1913 # and format flags on "stream" capability, and use
1892 1914 # uncompressed only if compatible.
1893 1915
1894 1916 if stream and not heads:
1895 1917 # 'stream' means remote revlog format is revlogv1 only
1896 1918 if remote.capable('stream'):
1897 1919 return self.stream_in(remote, set(('revlogv1',)))
1898 1920 # otherwise, 'streamreqs' contains the remote revlog format
1899 1921 streamreqs = remote.capable('streamreqs')
1900 1922 if streamreqs:
1901 1923 streamreqs = set(streamreqs.split(','))
1902 1924 # if we support it, stream in and adjust our requirements
1903 1925 if not streamreqs - self.supportedformats:
1904 1926 return self.stream_in(remote, streamreqs)
1905 1927 return self.pull(remote, heads)
1906 1928
1907 1929 def pushkey(self, namespace, key, old, new):
1908 1930 return pushkey.push(self, namespace, key, old, new)
1909 1931
1910 1932 def listkeys(self, namespace):
1911 1933 return pushkey.list(self, namespace)
1912 1934
1913 1935 def debugwireargs(self, one, two, three=None, four=None):
1914 1936 '''used to test argument passing over the wire'''
1915 1937 return "%s %s %s %s" % (one, two, three, four)
1916 1938
1917 1939 # used to avoid circular references so destructors work
1918 1940 def aftertrans(files):
1919 1941 renamefiles = [tuple(t) for t in files]
1920 1942 def a():
1921 1943 for src, dest in renamefiles:
1922 1944 util.rename(src, dest)
1923 1945 return a
1924 1946
1925 1947 def instance(ui, path, create):
1926 1948 return localrepository(ui, util.drop_scheme('file', path), create)
1927 1949
1928 1950 def islocal(path):
1929 1951 return True
@@ -1,1263 +1,1282 b''
1 1 # revlog.py - storage back-end for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """Storage back-end for Mercurial.
9 9
10 10 This provides efficient delta storage with O(1) retrieve and append
11 11 and O(changes) merge between branches.
12 12 """
13 13
14 14 # import stuff from node for others to import from revlog
15 15 from node import bin, hex, nullid, nullrev, short #@UnusedImport
16 16 from i18n import _
17 17 import changegroup, ancestor, mdiff, parsers, error, util
18 18 import struct, zlib, errno
19 19
20 20 _pack = struct.pack
21 21 _unpack = struct.unpack
22 22 _compress = zlib.compress
23 23 _decompress = zlib.decompress
24 24 _sha = util.sha1
25 25
26 26 # revlog header flags
27 27 REVLOGV0 = 0
28 28 REVLOGNG = 1
29 29 REVLOGNGINLINEDATA = (1 << 16)
30 30 REVLOGSHALLOW = (1 << 17)
31 31 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
32 32 REVLOG_DEFAULT_FORMAT = REVLOGNG
33 33 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
34 34 REVLOGNG_FLAGS = REVLOGNGINLINEDATA | REVLOGSHALLOW
35 35
36 36 # revlog index flags
37 37 REVIDX_PARENTDELTA = 1
38 38 REVIDX_PUNCHED_FLAG = 2
39 39 REVIDX_KNOWN_FLAGS = REVIDX_PUNCHED_FLAG | REVIDX_PARENTDELTA
40 40
41 41 # max size of revlog with inline data
42 42 _maxinline = 131072
43 43 _chunksize = 1048576
44 44
45 45 RevlogError = error.RevlogError
46 46 LookupError = error.LookupError
47 47
48 48 def getoffset(q):
49 49 return int(q >> 16)
50 50
51 51 def gettype(q):
52 52 return int(q & 0xFFFF)
53 53
54 54 def offset_type(offset, type):
55 55 return long(long(offset) << 16 | type)
56 56
57 57 nullhash = _sha(nullid)
58 58
59 59 def hash(text, p1, p2):
60 60 """generate a hash from the given text and its parent hashes
61 61
62 62 This hash combines both the current file contents and its history
63 63 in a manner that makes it easy to distinguish nodes with the same
64 64 content in the revision graph.
65 65 """
66 66 # As of now, if one of the parent node is null, p2 is null
67 67 if p2 == nullid:
68 68 # deep copy of a hash is faster than creating one
69 69 s = nullhash.copy()
70 70 s.update(p1)
71 71 else:
72 72 # none of the parent nodes are nullid
73 73 l = [p1, p2]
74 74 l.sort()
75 75 s = _sha(l[0])
76 76 s.update(l[1])
77 77 s.update(text)
78 78 return s.digest()
79 79
80 80 def compress(text):
81 81 """ generate a possibly-compressed representation of text """
82 82 if not text:
83 83 return ("", text)
84 84 l = len(text)
85 85 bin = None
86 86 if l < 44:
87 87 pass
88 88 elif l > 1000000:
89 89 # zlib makes an internal copy, thus doubling memory usage for
90 90 # large files, so lets do this in pieces
91 91 z = zlib.compressobj()
92 92 p = []
93 93 pos = 0
94 94 while pos < l:
95 95 pos2 = pos + 2**20
96 96 p.append(z.compress(text[pos:pos2]))
97 97 pos = pos2
98 98 p.append(z.flush())
99 99 if sum(map(len, p)) < l:
100 100 bin = "".join(p)
101 101 else:
102 102 bin = _compress(text)
103 103 if bin is None or len(bin) > l:
104 104 if text[0] == '\0':
105 105 return ("", text)
106 106 return ('u', text)
107 107 return ("", bin)
108 108
109 109 def decompress(bin):
110 110 """ decompress the given input """
111 111 if not bin:
112 112 return bin
113 113 t = bin[0]
114 114 if t == '\0':
115 115 return bin
116 116 if t == 'x':
117 117 return _decompress(bin)
118 118 if t == 'u':
119 119 return bin[1:]
120 120 raise RevlogError(_("unknown compression type %r") % t)
121 121
122 122 indexformatv0 = ">4l20s20s20s"
123 123 v0shaoffset = 56
124 124
125 125 class revlogoldio(object):
126 126 def __init__(self):
127 127 self.size = struct.calcsize(indexformatv0)
128 128
129 129 def parseindex(self, data, inline):
130 130 s = self.size
131 131 index = []
132 132 nodemap = {nullid: nullrev}
133 133 n = off = 0
134 134 l = len(data)
135 135 while off + s <= l:
136 136 cur = data[off:off + s]
137 137 off += s
138 138 e = _unpack(indexformatv0, cur)
139 139 # transform to revlogv1 format
140 140 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
141 141 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
142 142 index.append(e2)
143 143 nodemap[e[6]] = n
144 144 n += 1
145 145
146 146 # add the magic null revision at -1
147 147 index.append((0, 0, 0, -1, -1, -1, -1, nullid))
148 148
149 149 return index, nodemap, None
150 150
151 151 def packentry(self, entry, node, version, rev):
152 152 if gettype(entry[0]):
153 153 raise RevlogError(_("index entry flags need RevlogNG"))
154 154 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
155 155 node(entry[5]), node(entry[6]), entry[7])
156 156 return _pack(indexformatv0, *e2)
157 157
158 158 # index ng:
159 159 # 6 bytes: offset
160 160 # 2 bytes: flags
161 161 # 4 bytes: compressed length
162 162 # 4 bytes: uncompressed length
163 163 # 4 bytes: base rev
164 164 # 4 bytes: link rev
165 165 # 4 bytes: parent 1 rev
166 166 # 4 bytes: parent 2 rev
167 167 # 32 bytes: nodeid
168 168 indexformatng = ">Qiiiiii20s12x"
169 169 ngshaoffset = 32
170 170 versionformat = ">I"
171 171
172 172 class revlogio(object):
173 173 def __init__(self):
174 174 self.size = struct.calcsize(indexformatng)
175 175
176 176 def parseindex(self, data, inline):
177 177 # call the C implementation to parse the index data
178 178 index, cache = parsers.parse_index2(data, inline)
179 179 return index, None, cache
180 180
181 181 def packentry(self, entry, node, version, rev):
182 182 p = _pack(indexformatng, *entry)
183 183 if rev == 0:
184 184 p = _pack(versionformat, version) + p[4:]
185 185 return p
186 186
187 187 class revlog(object):
188 188 """
189 189 the underlying revision storage object
190 190
191 191 A revlog consists of two parts, an index and the revision data.
192 192
193 193 The index is a file with a fixed record size containing
194 194 information on each revision, including its nodeid (hash), the
195 195 nodeids of its parents, the position and offset of its data within
196 196 the data file, and the revision it's based on. Finally, each entry
197 197 contains a linkrev entry that can serve as a pointer to external
198 198 data.
199 199
200 200 The revision data itself is a linear collection of data chunks.
201 201 Each chunk represents a revision and is usually represented as a
202 202 delta against the previous chunk. To bound lookup time, runs of
203 203 deltas are limited to about 2 times the length of the original
204 204 version data. This makes retrieval of a version proportional to
205 205 its size, or O(1) relative to the number of revisions.
206 206
207 207 Both pieces of the revlog are written to in an append-only
208 208 fashion, which means we never need to rewrite a file to insert or
209 209 remove data, and can use some simple techniques to avoid the need
210 210 for locking while reading.
211 211 """
212 212 def __init__(self, opener, indexfile, shallowroot=None):
213 213 """
214 214 create a revlog object
215 215
216 216 opener is a function that abstracts the file opening operation
217 217 and can be used to implement COW semantics or the like.
218 218 """
219 219 self.indexfile = indexfile
220 220 self.datafile = indexfile[:-2] + ".d"
221 221 self.opener = opener
222 222 self._cache = None
223 223 self._chunkcache = (0, '')
224 224 self.index = []
225 225 self._shallowroot = shallowroot
226 226 self._parentdelta = 0
227 227 self._pcache = {}
228 228 self._nodecache = {nullid: nullrev}
229 229 self._nodepos = None
230 230
231 231 v = REVLOG_DEFAULT_VERSION
232 232 if hasattr(opener, 'options') and 'defversion' in opener.options:
233 233 v = opener.options['defversion']
234 234 if v & REVLOGNG:
235 235 v |= REVLOGNGINLINEDATA
236 236 if v & REVLOGNG and 'parentdelta' in opener.options:
237 237 self._parentdelta = 1
238 238
239 239 if shallowroot:
240 240 v |= REVLOGSHALLOW
241 241
242 242 i = ''
243 243 try:
244 244 f = self.opener(self.indexfile)
245 245 i = f.read()
246 246 f.close()
247 247 if len(i) > 0:
248 248 v = struct.unpack(versionformat, i[:4])[0]
249 249 except IOError, inst:
250 250 if inst.errno != errno.ENOENT:
251 251 raise
252 252
253 253 self.version = v
254 254 self._inline = v & REVLOGNGINLINEDATA
255 255 self._shallow = v & REVLOGSHALLOW
256 256 flags = v & ~0xFFFF
257 257 fmt = v & 0xFFFF
258 258 if fmt == REVLOGV0 and flags:
259 259 raise RevlogError(_("index %s unknown flags %#04x for format v0")
260 260 % (self.indexfile, flags >> 16))
261 261 elif fmt == REVLOGNG and flags & ~REVLOGNG_FLAGS:
262 262 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
263 263 % (self.indexfile, flags >> 16))
264 264 elif fmt > REVLOGNG:
265 265 raise RevlogError(_("index %s unknown format %d")
266 266 % (self.indexfile, fmt))
267 267
268 268 self._io = revlogio()
269 269 if self.version == REVLOGV0:
270 270 self._io = revlogoldio()
271 271 try:
272 272 d = self._io.parseindex(i, self._inline)
273 273 except (ValueError, IndexError):
274 274 raise RevlogError(_("index %s is corrupted") % (self.indexfile))
275 275 self.index, nodemap, self._chunkcache = d
276 276 if nodemap is not None:
277 277 self.nodemap = self._nodecache = nodemap
278 278 if not self._chunkcache:
279 279 self._chunkclear()
280 280
281 281 def tip(self):
282 282 return self.node(len(self.index) - 2)
283 283 def __len__(self):
284 284 return len(self.index) - 1
285 285 def __iter__(self):
286 286 for i in xrange(len(self)):
287 287 yield i
288 288
289 289 @util.propertycache
290 290 def nodemap(self):
291 291 n = self.rev(self.node(0))
292 292 return self._nodecache
293 293
294 294 def rev(self, node):
295 295 try:
296 296 return self._nodecache[node]
297 297 except KeyError:
298 298 n = self._nodecache
299 299 i = self.index
300 300 p = self._nodepos
301 301 if p is None:
302 302 p = len(i) - 2
303 303 for r in xrange(p, -1, -1):
304 304 v = i[r][7]
305 305 n[v] = r
306 306 if v == node:
307 307 self._nodepos = r - 1
308 308 return r
309 309 raise LookupError(node, self.indexfile, _('no node'))
310 310
311 311 def node(self, rev):
312 312 return self.index[rev][7]
313 313 def linkrev(self, rev):
314 314 return self.index[rev][4]
315 315 def parents(self, node):
316 316 i = self.index
317 317 d = i[self.rev(node)]
318 318 return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
319 319 def parentrevs(self, rev):
320 320 return self.index[rev][5:7]
321 321 def start(self, rev):
322 322 return int(self.index[rev][0] >> 16)
323 323 def end(self, rev):
324 324 return self.start(rev) + self.length(rev)
325 325 def length(self, rev):
326 326 return self.index[rev][1]
327 327 def base(self, rev):
328 328 return self.index[rev][3]
329 329 def flags(self, rev):
330 330 return self.index[rev][0] & 0xFFFF
331 331 def rawsize(self, rev):
332 332 """return the length of the uncompressed text for a given revision"""
333 333 l = self.index[rev][2]
334 334 if l >= 0:
335 335 return l
336 336
337 337 t = self.revision(self.node(rev))
338 338 return len(t)
339 339 size = rawsize
340 340
341 341 def reachable(self, node, stop=None):
342 342 """return the set of all nodes ancestral to a given node, including
343 343 the node itself, stopping when stop is matched"""
344 344 reachable = set((node,))
345 345 visit = [node]
346 346 if stop:
347 347 stopn = self.rev(stop)
348 348 else:
349 349 stopn = 0
350 350 while visit:
351 351 n = visit.pop(0)
352 352 if n == stop:
353 353 continue
354 354 if n == nullid:
355 355 continue
356 356 for p in self.parents(n):
357 357 if self.rev(p) < stopn:
358 358 continue
359 359 if p not in reachable:
360 360 reachable.add(p)
361 361 visit.append(p)
362 362 return reachable
363 363
364 364 def ancestors(self, *revs):
365 365 """Generate the ancestors of 'revs' in reverse topological order.
366 366
367 367 Yield a sequence of revision numbers starting with the parents
368 368 of each revision in revs, i.e., each revision is *not* considered
369 369 an ancestor of itself. Results are in breadth-first order:
370 370 parents of each rev in revs, then parents of those, etc. Result
371 371 does not include the null revision."""
372 372 visit = list(revs)
373 373 seen = set([nullrev])
374 374 while visit:
375 375 for parent in self.parentrevs(visit.pop(0)):
376 376 if parent not in seen:
377 377 visit.append(parent)
378 378 seen.add(parent)
379 379 yield parent
380 380
381 381 def descendants(self, *revs):
382 382 """Generate the descendants of 'revs' in revision order.
383 383
384 384 Yield a sequence of revision numbers starting with a child of
385 385 some rev in revs, i.e., each revision is *not* considered a
386 386 descendant of itself. Results are ordered by revision number (a
387 387 topological sort)."""
388 388 first = min(revs)
389 389 if first == nullrev:
390 390 for i in self:
391 391 yield i
392 392 return
393 393
394 394 seen = set(revs)
395 395 for i in xrange(first + 1, len(self)):
396 396 for x in self.parentrevs(i):
397 397 if x != nullrev and x in seen:
398 398 seen.add(i)
399 399 yield i
400 400 break
401 401
402 def findmissing(self, common=None, heads=None):
403 """Return the ancestors of heads that are not ancestors of common.
402 def findcommonmissing(self, common=None, heads=None):
403 """Return a tuple of the ancestors of common and the ancestors of heads
404 that are not ancestors of common.
404 405
405 More specifically, return a list of nodes N such that every N
406 satisfies the following constraints:
406 More specifically, the second element is a list of nodes N such that
407 every N satisfies the following constraints:
407 408
408 409 1. N is an ancestor of some node in 'heads'
409 410 2. N is not an ancestor of any node in 'common'
410 411
411 412 The list is sorted by revision number, meaning it is
412 413 topologically sorted.
413 414
414 415 'heads' and 'common' are both lists of node IDs. If heads is
415 416 not supplied, uses all of the revlog's heads. If common is not
416 417 supplied, uses nullid."""
417 418 if common is None:
418 419 common = [nullid]
419 420 if heads is None:
420 421 heads = self.heads()
421 422
422 423 common = [self.rev(n) for n in common]
423 424 heads = [self.rev(n) for n in heads]
424 425
425 426 # we want the ancestors, but inclusive
426 427 has = set(self.ancestors(*common))
427 428 has.add(nullrev)
428 429 has.update(common)
429 430
430 431 # take all ancestors from heads that aren't in has
431 432 missing = set()
432 433 visit = [r for r in heads if r not in has]
433 434 while visit:
434 435 r = visit.pop(0)
435 436 if r in missing:
436 437 continue
437 438 else:
438 439 missing.add(r)
439 440 for p in self.parentrevs(r):
440 441 if p not in has:
441 442 visit.append(p)
442 443 missing = list(missing)
443 444 missing.sort()
444 return [self.node(r) for r in missing]
445 return has, [self.node(r) for r in missing]
446
447 def findmissing(self, common=None, heads=None):
448 """Return the ancestors of heads that are not ancestors of common.
449
450 More specifically, return a list of nodes N such that every N
451 satisfies the following constraints:
452
453 1. N is an ancestor of some node in 'heads'
454 2. N is not an ancestor of any node in 'common'
455
456 The list is sorted by revision number, meaning it is
457 topologically sorted.
458
459 'heads' and 'common' are both lists of node IDs. If heads is
460 not supplied, uses all of the revlog's heads. If common is not
461 supplied, uses nullid."""
462 _common, missing = self.findcommonmissing(common, heads)
463 return missing
445 464
446 465 def nodesbetween(self, roots=None, heads=None):
447 466 """Return a topological path from 'roots' to 'heads'.
448 467
449 468 Return a tuple (nodes, outroots, outheads) where 'nodes' is a
450 469 topologically sorted list of all nodes N that satisfy both of
451 470 these constraints:
452 471
453 472 1. N is a descendant of some node in 'roots'
454 473 2. N is an ancestor of some node in 'heads'
455 474
456 475 Every node is considered to be both a descendant and an ancestor
457 476 of itself, so every reachable node in 'roots' and 'heads' will be
458 477 included in 'nodes'.
459 478
460 479 'outroots' is the list of reachable nodes in 'roots', i.e., the
461 480 subset of 'roots' that is returned in 'nodes'. Likewise,
462 481 'outheads' is the subset of 'heads' that is also in 'nodes'.
463 482
464 483 'roots' and 'heads' are both lists of node IDs. If 'roots' is
465 484 unspecified, uses nullid as the only root. If 'heads' is
466 485 unspecified, uses list of all of the revlog's heads."""
467 486 nonodes = ([], [], [])
468 487 if roots is not None:
469 488 roots = list(roots)
470 489 if not roots:
471 490 return nonodes
472 491 lowestrev = min([self.rev(n) for n in roots])
473 492 else:
474 493 roots = [nullid] # Everybody's a descendent of nullid
475 494 lowestrev = nullrev
476 495 if (lowestrev == nullrev) and (heads is None):
477 496 # We want _all_ the nodes!
478 497 return ([self.node(r) for r in self], [nullid], list(self.heads()))
479 498 if heads is None:
480 499 # All nodes are ancestors, so the latest ancestor is the last
481 500 # node.
482 501 highestrev = len(self) - 1
483 502 # Set ancestors to None to signal that every node is an ancestor.
484 503 ancestors = None
485 504 # Set heads to an empty dictionary for later discovery of heads
486 505 heads = {}
487 506 else:
488 507 heads = list(heads)
489 508 if not heads:
490 509 return nonodes
491 510 ancestors = set()
492 511 # Turn heads into a dictionary so we can remove 'fake' heads.
493 512 # Also, later we will be using it to filter out the heads we can't
494 513 # find from roots.
495 514 heads = dict.fromkeys(heads, 0)
496 515 # Start at the top and keep marking parents until we're done.
497 516 nodestotag = set(heads)
498 517 # Remember where the top was so we can use it as a limit later.
499 518 highestrev = max([self.rev(n) for n in nodestotag])
500 519 while nodestotag:
501 520 # grab a node to tag
502 521 n = nodestotag.pop()
503 522 # Never tag nullid
504 523 if n == nullid:
505 524 continue
506 525 # A node's revision number represents its place in a
507 526 # topologically sorted list of nodes.
508 527 r = self.rev(n)
509 528 if r >= lowestrev:
510 529 if n not in ancestors:
511 530 # If we are possibly a descendent of one of the roots
512 531 # and we haven't already been marked as an ancestor
513 532 ancestors.add(n) # Mark as ancestor
514 533 # Add non-nullid parents to list of nodes to tag.
515 534 nodestotag.update([p for p in self.parents(n) if
516 535 p != nullid])
517 536 elif n in heads: # We've seen it before, is it a fake head?
518 537 # So it is, real heads should not be the ancestors of
519 538 # any other heads.
520 539 heads.pop(n)
521 540 if not ancestors:
522 541 return nonodes
523 542 # Now that we have our set of ancestors, we want to remove any
524 543 # roots that are not ancestors.
525 544
526 545 # If one of the roots was nullid, everything is included anyway.
527 546 if lowestrev > nullrev:
528 547 # But, since we weren't, let's recompute the lowest rev to not
529 548 # include roots that aren't ancestors.
530 549
531 550 # Filter out roots that aren't ancestors of heads
532 551 roots = [n for n in roots if n in ancestors]
533 552 # Recompute the lowest revision
534 553 if roots:
535 554 lowestrev = min([self.rev(n) for n in roots])
536 555 else:
537 556 # No more roots? Return empty list
538 557 return nonodes
539 558 else:
540 559 # We are descending from nullid, and don't need to care about
541 560 # any other roots.
542 561 lowestrev = nullrev
543 562 roots = [nullid]
544 563 # Transform our roots list into a set.
545 564 descendents = set(roots)
546 565 # Also, keep the original roots so we can filter out roots that aren't
547 566 # 'real' roots (i.e. are descended from other roots).
548 567 roots = descendents.copy()
549 568 # Our topologically sorted list of output nodes.
550 569 orderedout = []
551 570 # Don't start at nullid since we don't want nullid in our output list,
552 571 # and if nullid shows up in descedents, empty parents will look like
553 572 # they're descendents.
554 573 for r in xrange(max(lowestrev, 0), highestrev + 1):
555 574 n = self.node(r)
556 575 isdescendent = False
557 576 if lowestrev == nullrev: # Everybody is a descendent of nullid
558 577 isdescendent = True
559 578 elif n in descendents:
560 579 # n is already a descendent
561 580 isdescendent = True
562 581 # This check only needs to be done here because all the roots
563 582 # will start being marked is descendents before the loop.
564 583 if n in roots:
565 584 # If n was a root, check if it's a 'real' root.
566 585 p = tuple(self.parents(n))
567 586 # If any of its parents are descendents, it's not a root.
568 587 if (p[0] in descendents) or (p[1] in descendents):
569 588 roots.remove(n)
570 589 else:
571 590 p = tuple(self.parents(n))
572 591 # A node is a descendent if either of its parents are
573 592 # descendents. (We seeded the dependents list with the roots
574 593 # up there, remember?)
575 594 if (p[0] in descendents) or (p[1] in descendents):
576 595 descendents.add(n)
577 596 isdescendent = True
578 597 if isdescendent and ((ancestors is None) or (n in ancestors)):
579 598 # Only include nodes that are both descendents and ancestors.
580 599 orderedout.append(n)
581 600 if (ancestors is not None) and (n in heads):
582 601 # We're trying to figure out which heads are reachable
583 602 # from roots.
584 603 # Mark this head as having been reached
585 604 heads[n] = 1
586 605 elif ancestors is None:
587 606 # Otherwise, we're trying to discover the heads.
588 607 # Assume this is a head because if it isn't, the next step
589 608 # will eventually remove it.
590 609 heads[n] = 1
591 610 # But, obviously its parents aren't.
592 611 for p in self.parents(n):
593 612 heads.pop(p, None)
594 613 heads = [n for n in heads.iterkeys() if heads[n] != 0]
595 614 roots = list(roots)
596 615 assert orderedout
597 616 assert roots
598 617 assert heads
599 618 return (orderedout, roots, heads)
600 619
601 620 def heads(self, start=None, stop=None):
602 621 """return the list of all nodes that have no children
603 622
604 623 if start is specified, only heads that are descendants of
605 624 start will be returned
606 625 if stop is specified, it will consider all the revs from stop
607 626 as if they had no children
608 627 """
609 628 if start is None and stop is None:
610 629 count = len(self)
611 630 if not count:
612 631 return [nullid]
613 632 ishead = [1] * (count + 1)
614 633 index = self.index
615 634 for r in xrange(count):
616 635 e = index[r]
617 636 ishead[e[5]] = ishead[e[6]] = 0
618 637 return [self.node(r) for r in xrange(count) if ishead[r]]
619 638
620 639 if start is None:
621 640 start = nullid
622 641 if stop is None:
623 642 stop = []
624 643 stoprevs = set([self.rev(n) for n in stop])
625 644 startrev = self.rev(start)
626 645 reachable = set((startrev,))
627 646 heads = set((startrev,))
628 647
629 648 parentrevs = self.parentrevs
630 649 for r in xrange(startrev + 1, len(self)):
631 650 for p in parentrevs(r):
632 651 if p in reachable:
633 652 if r not in stoprevs:
634 653 reachable.add(r)
635 654 heads.add(r)
636 655 if p in heads and p not in stoprevs:
637 656 heads.remove(p)
638 657
639 658 return [self.node(r) for r in heads]
640 659
641 660 def children(self, node):
642 661 """find the children of a given node"""
643 662 c = []
644 663 p = self.rev(node)
645 664 for r in range(p + 1, len(self)):
646 665 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
647 666 if prevs:
648 667 for pr in prevs:
649 668 if pr == p:
650 669 c.append(self.node(r))
651 670 elif p == nullrev:
652 671 c.append(self.node(r))
653 672 return c
654 673
655 674 def descendant(self, start, end):
656 675 if start == nullrev:
657 676 return True
658 677 for i in self.descendants(start):
659 678 if i == end:
660 679 return True
661 680 elif i > end:
662 681 break
663 682 return False
664 683
665 684 def ancestor(self, a, b):
666 685 """calculate the least common ancestor of nodes a and b"""
667 686
668 687 # fast path, check if it is a descendant
669 688 a, b = self.rev(a), self.rev(b)
670 689 start, end = sorted((a, b))
671 690 if self.descendant(start, end):
672 691 return self.node(start)
673 692
674 693 def parents(rev):
675 694 return [p for p in self.parentrevs(rev) if p != nullrev]
676 695
677 696 c = ancestor.ancestor(a, b, parents)
678 697 if c is None:
679 698 return nullid
680 699
681 700 return self.node(c)
682 701
683 702 def _match(self, id):
684 703 if isinstance(id, (long, int)):
685 704 # rev
686 705 return self.node(id)
687 706 if len(id) == 20:
688 707 # possibly a binary node
689 708 # odds of a binary node being all hex in ASCII are 1 in 10**25
690 709 try:
691 710 node = id
692 711 self.rev(node) # quick search the index
693 712 return node
694 713 except LookupError:
695 714 pass # may be partial hex id
696 715 try:
697 716 # str(rev)
698 717 rev = int(id)
699 718 if str(rev) != id:
700 719 raise ValueError
701 720 if rev < 0:
702 721 rev = len(self) + rev
703 722 if rev < 0 or rev >= len(self):
704 723 raise ValueError
705 724 return self.node(rev)
706 725 except (ValueError, OverflowError):
707 726 pass
708 727 if len(id) == 40:
709 728 try:
710 729 # a full hex nodeid?
711 730 node = bin(id)
712 731 self.rev(node)
713 732 return node
714 733 except (TypeError, LookupError):
715 734 pass
716 735
717 736 def _partialmatch(self, id):
718 737 if id in self._pcache:
719 738 return self._pcache[id]
720 739
721 740 if len(id) < 40:
722 741 try:
723 742 # hex(node)[:...]
724 743 l = len(id) // 2 # grab an even number of digits
725 744 prefix = bin(id[:l * 2])
726 745 nl = [e[7] for e in self.index if e[7].startswith(prefix)]
727 746 nl = [n for n in nl if hex(n).startswith(id)]
728 747 if len(nl) > 0:
729 748 if len(nl) == 1:
730 749 self._pcache[id] = nl[0]
731 750 return nl[0]
732 751 raise LookupError(id, self.indexfile,
733 752 _('ambiguous identifier'))
734 753 return None
735 754 except TypeError:
736 755 pass
737 756
738 757 def lookup(self, id):
739 758 """locate a node based on:
740 759 - revision number or str(revision number)
741 760 - nodeid or subset of hex nodeid
742 761 """
743 762 n = self._match(id)
744 763 if n is not None:
745 764 return n
746 765 n = self._partialmatch(id)
747 766 if n:
748 767 return n
749 768
750 769 raise LookupError(id, self.indexfile, _('no match found'))
751 770
752 771 def cmp(self, node, text):
753 772 """compare text with a given file revision
754 773
755 774 returns True if text is different than what is stored.
756 775 """
757 776 p1, p2 = self.parents(node)
758 777 return hash(text, p1, p2) != node
759 778
760 779 def _addchunk(self, offset, data):
761 780 o, d = self._chunkcache
762 781 # try to add to existing cache
763 782 if o + len(d) == offset and len(d) + len(data) < _chunksize:
764 783 self._chunkcache = o, d + data
765 784 else:
766 785 self._chunkcache = offset, data
767 786
768 787 def _loadchunk(self, offset, length):
769 788 if self._inline:
770 789 df = self.opener(self.indexfile)
771 790 else:
772 791 df = self.opener(self.datafile)
773 792
774 793 readahead = max(65536, length)
775 794 df.seek(offset)
776 795 d = df.read(readahead)
777 796 self._addchunk(offset, d)
778 797 if readahead > length:
779 798 return d[:length]
780 799 return d
781 800
782 801 def _getchunk(self, offset, length):
783 802 o, d = self._chunkcache
784 803 l = len(d)
785 804
786 805 # is it in the cache?
787 806 cachestart = offset - o
788 807 cacheend = cachestart + length
789 808 if cachestart >= 0 and cacheend <= l:
790 809 if cachestart == 0 and cacheend == l:
791 810 return d # avoid a copy
792 811 return d[cachestart:cacheend]
793 812
794 813 return self._loadchunk(offset, length)
795 814
796 815 def _chunkraw(self, startrev, endrev):
797 816 start = self.start(startrev)
798 817 length = self.end(endrev) - start
799 818 if self._inline:
800 819 start += (startrev + 1) * self._io.size
801 820 return self._getchunk(start, length)
802 821
803 822 def _chunk(self, rev):
804 823 return decompress(self._chunkraw(rev, rev))
805 824
806 825 def _chunkclear(self):
807 826 self._chunkcache = (0, '')
808 827
809 828 def deltaparent(self, rev):
810 829 """return previous revision or parentrev according to flags"""
811 830 if self.flags(rev) & REVIDX_PARENTDELTA:
812 831 return self.parentrevs(rev)[0]
813 832 else:
814 833 return rev - 1
815 834
816 835 def revdiff(self, rev1, rev2):
817 836 """return or calculate a delta between two revisions"""
818 837 if self.base(rev2) != rev2 and self.deltaparent(rev2) == rev1:
819 838 return self._chunk(rev2)
820 839
821 840 return mdiff.textdiff(self.revision(self.node(rev1)),
822 841 self.revision(self.node(rev2)))
823 842
824 843 def revision(self, node):
825 844 """return an uncompressed revision of a given node"""
826 845 cachedrev = None
827 846 if node == nullid:
828 847 return ""
829 848 if self._cache:
830 849 if self._cache[0] == node:
831 850 return self._cache[2]
832 851 cachedrev = self._cache[1]
833 852
834 853 # look up what we need to read
835 854 text = None
836 855 rev = self.rev(node)
837 856 base = self.base(rev)
838 857
839 858 # check rev flags
840 859 if self.flags(rev) & ~REVIDX_KNOWN_FLAGS:
841 860 raise RevlogError(_('incompatible revision flag %x') %
842 861 (self.flags(rev) & ~REVIDX_KNOWN_FLAGS))
843 862
844 863 # build delta chain
845 864 chain = []
846 865 index = self.index # for performance
847 866 iterrev = rev
848 867 e = index[iterrev]
849 868 while iterrev != base and iterrev != cachedrev:
850 869 chain.append(iterrev)
851 870 if e[0] & REVIDX_PARENTDELTA:
852 871 iterrev = e[5]
853 872 else:
854 873 iterrev -= 1
855 874 e = index[iterrev]
856 875 chain.reverse()
857 876 base = iterrev
858 877
859 878 if iterrev == cachedrev:
860 879 # cache hit
861 880 text = self._cache[2]
862 881
863 882 # drop cache to save memory
864 883 self._cache = None
865 884
866 885 self._chunkraw(base, rev)
867 886 if text is None:
868 887 text = self._chunk(base)
869 888
870 889 bins = [self._chunk(r) for r in chain]
871 890 text = mdiff.patches(text, bins)
872 891
873 892 text = self._checkhash(text, node, rev)
874 893
875 894 self._cache = (node, rev, text)
876 895 return text
877 896
878 897 def _checkhash(self, text, node, rev):
879 898 p1, p2 = self.parents(node)
880 899 if (node != hash(text, p1, p2) and
881 900 not (self.flags(rev) & REVIDX_PUNCHED_FLAG)):
882 901 raise RevlogError(_("integrity check failed on %s:%d")
883 902 % (self.indexfile, rev))
884 903 return text
885 904
886 905 def checkinlinesize(self, tr, fp=None):
887 906 if not self._inline or (self.start(-2) + self.length(-2)) < _maxinline:
888 907 return
889 908
890 909 trinfo = tr.find(self.indexfile)
891 910 if trinfo is None:
892 911 raise RevlogError(_("%s not found in the transaction")
893 912 % self.indexfile)
894 913
895 914 trindex = trinfo[2]
896 915 dataoff = self.start(trindex)
897 916
898 917 tr.add(self.datafile, dataoff)
899 918
900 919 if fp:
901 920 fp.flush()
902 921 fp.close()
903 922
904 923 df = self.opener(self.datafile, 'w')
905 924 try:
906 925 for r in self:
907 926 df.write(self._chunkraw(r, r))
908 927 finally:
909 928 df.close()
910 929
911 930 fp = self.opener(self.indexfile, 'w', atomictemp=True)
912 931 self.version &= ~(REVLOGNGINLINEDATA)
913 932 self._inline = False
914 933 for i in self:
915 934 e = self._io.packentry(self.index[i], self.node, self.version, i)
916 935 fp.write(e)
917 936
918 937 # if we don't call rename, the temp file will never replace the
919 938 # real index
920 939 fp.rename()
921 940
922 941 tr.replace(self.indexfile, trindex * self._io.size)
923 942 self._chunkclear()
924 943
925 944 def addrevision(self, text, transaction, link, p1, p2, cachedelta=None):
926 945 """add a revision to the log
927 946
928 947 text - the revision data to add
929 948 transaction - the transaction object used for rollback
930 949 link - the linkrev data to add
931 950 p1, p2 - the parent nodeids of the revision
932 951 cachedelta - an optional precomputed delta
933 952 """
934 953 node = hash(text, p1, p2)
935 954 if (node in self.nodemap and
936 955 (not self.flags(self.rev(node)) & REVIDX_PUNCHED_FLAG)):
937 956 return node
938 957
939 958 dfh = None
940 959 if not self._inline:
941 960 dfh = self.opener(self.datafile, "a")
942 961 ifh = self.opener(self.indexfile, "a+")
943 962 try:
944 963 return self._addrevision(node, text, transaction, link, p1, p2,
945 964 cachedelta, ifh, dfh)
946 965 finally:
947 966 if dfh:
948 967 dfh.close()
949 968 ifh.close()
950 969
951 970 def _addrevision(self, node, text, transaction, link, p1, p2,
952 971 cachedelta, ifh, dfh):
953 972
954 973 btext = [text]
955 974 def buildtext():
956 975 if btext[0] is not None:
957 976 return btext[0]
958 977 # flush any pending writes here so we can read it in revision
959 978 if dfh:
960 979 dfh.flush()
961 980 ifh.flush()
962 981 basetext = self.revision(self.node(cachedelta[0]))
963 982 btext[0] = mdiff.patch(basetext, cachedelta[1])
964 983 chk = hash(btext[0], p1, p2)
965 984 if chk != node:
966 985 raise RevlogError(_("consistency error in delta"))
967 986 return btext[0]
968 987
969 988 def builddelta(rev):
970 989 # can we use the cached delta?
971 990 if cachedelta and cachedelta[0] == rev:
972 991 delta = cachedelta[1]
973 992 else:
974 993 t = buildtext()
975 994 ptext = self.revision(self.node(rev))
976 995 delta = mdiff.textdiff(ptext, t)
977 996 data = compress(delta)
978 997 l = len(data[1]) + len(data[0])
979 998 base = self.base(rev)
980 999 dist = l + offset - self.start(base)
981 1000 return dist, l, data, base
982 1001
983 1002 curr = len(self)
984 1003 prev = curr - 1
985 1004 base = curr
986 1005 offset = self.end(prev)
987 1006 flags = 0
988 1007 d = None
989 1008 p1r, p2r = self.rev(p1), self.rev(p2)
990 1009
991 1010 # should we try to build a delta?
992 1011 if prev != nullrev:
993 1012 d = builddelta(prev)
994 1013 if self._parentdelta and prev != p1r:
995 1014 d2 = builddelta(p1r)
996 1015 if d2 < d:
997 1016 d = d2
998 1017 flags = REVIDX_PARENTDELTA
999 1018 dist, l, data, base = d
1000 1019
1001 1020 # full versions are inserted when the needed deltas
1002 1021 # become comparable to the uncompressed text
1003 1022 # or the base revision is punched
1004 1023 if text is None:
1005 1024 textlen = mdiff.patchedsize(self.rawsize(cachedelta[0]),
1006 1025 cachedelta[1])
1007 1026 else:
1008 1027 textlen = len(text)
1009 1028 if (d is None or dist > textlen * 2 or
1010 1029 (self.flags(base) & REVIDX_PUNCHED_FLAG)):
1011 1030 text = buildtext()
1012 1031 data = compress(text)
1013 1032 l = len(data[1]) + len(data[0])
1014 1033 base = curr
1015 1034
1016 1035 e = (offset_type(offset, flags), l, textlen,
1017 1036 base, link, p1r, p2r, node)
1018 1037 self.index.insert(-1, e)
1019 1038 self.nodemap[node] = curr
1020 1039
1021 1040 entry = self._io.packentry(e, self.node, self.version, curr)
1022 1041 if not self._inline:
1023 1042 transaction.add(self.datafile, offset)
1024 1043 transaction.add(self.indexfile, curr * len(entry))
1025 1044 if data[0]:
1026 1045 dfh.write(data[0])
1027 1046 dfh.write(data[1])
1028 1047 dfh.flush()
1029 1048 ifh.write(entry)
1030 1049 else:
1031 1050 offset += curr * self._io.size
1032 1051 transaction.add(self.indexfile, offset, curr)
1033 1052 ifh.write(entry)
1034 1053 ifh.write(data[0])
1035 1054 ifh.write(data[1])
1036 1055 self.checkinlinesize(transaction, ifh)
1037 1056
1038 1057 if type(text) == str: # only accept immutable objects
1039 1058 self._cache = (node, curr, text)
1040 1059 return node
1041 1060
1042 1061 def group(self, nodelist, lookup, infocollect=None, fullrev=False):
1043 1062 """Calculate a delta group, yielding a sequence of changegroup chunks
1044 1063 (strings).
1045 1064
1046 1065 Given a list of changeset revs, return a set of deltas and
1047 1066 metadata corresponding to nodes. The first delta is
1048 1067 first parent(nodelist[0]) -> nodelist[0], the receiver is
1049 1068 guaranteed to have this parent as it has all history before
1050 1069 these changesets. In the case firstparent is nullrev the
1051 1070 changegroup starts with a full revision.
1052 1071 fullrev forces the insertion of the full revision, necessary
1053 1072 in the case of shallow clones where the first parent might
1054 1073 not exist at the reciever.
1055 1074 """
1056 1075
1057 1076 revs = [self.rev(n) for n in nodelist]
1058 1077
1059 1078 # if we don't have any revisions touched by these changesets, bail
1060 1079 if not revs:
1061 1080 yield changegroup.closechunk()
1062 1081 return
1063 1082
1064 1083 # add the parent of the first rev
1065 1084 p = self.parentrevs(revs[0])[0]
1066 1085 revs.insert(0, p)
1067 1086 if p == nullrev:
1068 1087 fullrev = True
1069 1088
1070 1089 # build deltas
1071 1090 for d in xrange(len(revs) - 1):
1072 1091 a, b = revs[d], revs[d + 1]
1073 1092 nb = self.node(b)
1074 1093
1075 1094 if infocollect is not None:
1076 1095 infocollect(nb)
1077 1096
1078 1097 p = self.parents(nb)
1079 1098 meta = nb + p[0] + p[1] + lookup(nb)
1080 1099 if fullrev:
1081 1100 d = self.revision(nb)
1082 1101 meta += mdiff.trivialdiffheader(len(d))
1083 1102 fullrev = False
1084 1103 else:
1085 1104 d = self.revdiff(a, b)
1086 1105 yield changegroup.chunkheader(len(meta) + len(d))
1087 1106 yield meta
1088 1107 yield d
1089 1108
1090 1109 yield changegroup.closechunk()
1091 1110
1092 1111 def addgroup(self, bundle, linkmapper, transaction):
1093 1112 """
1094 1113 add a delta group
1095 1114
1096 1115 given a set of deltas, add them to the revision log. the
1097 1116 first delta is against its parent, which should be in our
1098 1117 log, the rest are against the previous delta.
1099 1118 """
1100 1119
1101 1120 # track the base of the current delta log
1102 1121 node = None
1103 1122
1104 1123 r = len(self)
1105 1124 end = 0
1106 1125 if r:
1107 1126 end = self.end(r - 1)
1108 1127 ifh = self.opener(self.indexfile, "a+")
1109 1128 isize = r * self._io.size
1110 1129 if self._inline:
1111 1130 transaction.add(self.indexfile, end + isize, r)
1112 1131 dfh = None
1113 1132 else:
1114 1133 transaction.add(self.indexfile, isize, r)
1115 1134 transaction.add(self.datafile, end)
1116 1135 dfh = self.opener(self.datafile, "a")
1117 1136
1118 1137 try:
1119 1138 # loop through our set of deltas
1120 1139 chain = None
1121 1140 while 1:
1122 1141 chunkdata = bundle.parsechunk()
1123 1142 if not chunkdata:
1124 1143 break
1125 1144 node = chunkdata['node']
1126 1145 p1 = chunkdata['p1']
1127 1146 p2 = chunkdata['p2']
1128 1147 cs = chunkdata['cs']
1129 1148 delta = chunkdata['data']
1130 1149
1131 1150 link = linkmapper(cs)
1132 1151 if (node in self.nodemap and
1133 1152 (not self.flags(self.rev(node)) & REVIDX_PUNCHED_FLAG)):
1134 1153 # this can happen if two branches make the same change
1135 1154 chain = node
1136 1155 continue
1137 1156
1138 1157 for p in (p1, p2):
1139 1158 if not p in self.nodemap:
1140 1159 if self._shallow:
1141 1160 # add null entries for missing parents
1142 1161 # XXX FIXME
1143 1162 #if base == nullrev:
1144 1163 # base = len(self)
1145 1164 #e = (offset_type(end, REVIDX_PUNCHED_FLAG),
1146 1165 # 0, 0, base, nullrev, nullrev, nullrev, p)
1147 1166 #self.index.insert(-1, e)
1148 1167 #self.nodemap[p] = r
1149 1168 #entry = self._io.packentry(e, self.node,
1150 1169 # self.version, r)
1151 1170 #ifh.write(entry)
1152 1171 #t, r = r, r + 1
1153 1172 raise LookupError(p, self.indexfile,
1154 1173 _('unknown parent'))
1155 1174 else:
1156 1175 raise LookupError(p, self.indexfile,
1157 1176 _('unknown parent'))
1158 1177
1159 1178 if not chain:
1160 1179 # retrieve the parent revision of the delta chain
1161 1180 chain = p1
1162 1181 if not chain in self.nodemap:
1163 1182 raise LookupError(chain, self.indexfile, _('unknown base'))
1164 1183
1165 1184 chainrev = self.rev(chain)
1166 1185 chain = self._addrevision(node, None, transaction, link,
1167 1186 p1, p2, (chainrev, delta), ifh, dfh)
1168 1187 if not dfh and not self._inline:
1169 1188 # addrevision switched from inline to conventional
1170 1189 # reopen the index
1171 1190 ifh.close()
1172 1191 dfh = self.opener(self.datafile, "a")
1173 1192 ifh = self.opener(self.indexfile, "a")
1174 1193 finally:
1175 1194 if dfh:
1176 1195 dfh.close()
1177 1196 ifh.close()
1178 1197
1179 1198 return node
1180 1199
1181 1200 def strip(self, minlink, transaction):
1182 1201 """truncate the revlog on the first revision with a linkrev >= minlink
1183 1202
1184 1203 This function is called when we're stripping revision minlink and
1185 1204 its descendants from the repository.
1186 1205
1187 1206 We have to remove all revisions with linkrev >= minlink, because
1188 1207 the equivalent changelog revisions will be renumbered after the
1189 1208 strip.
1190 1209
1191 1210 So we truncate the revlog on the first of these revisions, and
1192 1211 trust that the caller has saved the revisions that shouldn't be
1193 1212 removed and that it'll readd them after this truncation.
1194 1213 """
1195 1214 if len(self) == 0:
1196 1215 return
1197 1216
1198 1217 for rev in self:
1199 1218 if self.index[rev][4] >= minlink:
1200 1219 break
1201 1220 else:
1202 1221 return
1203 1222
1204 1223 # first truncate the files on disk
1205 1224 end = self.start(rev)
1206 1225 if not self._inline:
1207 1226 transaction.add(self.datafile, end)
1208 1227 end = rev * self._io.size
1209 1228 else:
1210 1229 end += rev * self._io.size
1211 1230
1212 1231 transaction.add(self.indexfile, end)
1213 1232
1214 1233 # then reset internal state in memory to forget those revisions
1215 1234 self._cache = None
1216 1235 self._chunkclear()
1217 1236 for x in xrange(rev, len(self)):
1218 1237 del self.nodemap[self.node(x)]
1219 1238
1220 1239 del self.index[rev:-1]
1221 1240
1222 1241 def checksize(self):
1223 1242 expected = 0
1224 1243 if len(self):
1225 1244 expected = max(0, self.end(len(self) - 1))
1226 1245
1227 1246 try:
1228 1247 f = self.opener(self.datafile)
1229 1248 f.seek(0, 2)
1230 1249 actual = f.tell()
1231 1250 f.close()
1232 1251 dd = actual - expected
1233 1252 except IOError, inst:
1234 1253 if inst.errno != errno.ENOENT:
1235 1254 raise
1236 1255 dd = 0
1237 1256
1238 1257 try:
1239 1258 f = self.opener(self.indexfile)
1240 1259 f.seek(0, 2)
1241 1260 actual = f.tell()
1242 1261 f.close()
1243 1262 s = self._io.size
1244 1263 i = max(0, actual // s)
1245 1264 di = actual - (i * s)
1246 1265 if self._inline:
1247 1266 databytes = 0
1248 1267 for r in self:
1249 1268 databytes += max(0, self.length(r))
1250 1269 dd = 0
1251 1270 di = actual - len(self) * s - databytes
1252 1271 except IOError, inst:
1253 1272 if inst.errno != errno.ENOENT:
1254 1273 raise
1255 1274 di = 0
1256 1275
1257 1276 return (dd, di)
1258 1277
1259 1278 def files(self):
1260 1279 res = [self.indexfile]
1261 1280 if not self._inline:
1262 1281 res.append(self.datafile)
1263 1282 return res
@@ -1,393 +1,411 b''
1 1 # wireproto.py - generic wire protocol support functions
2 2 #
3 3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import urllib, tempfile, os, sys
9 9 from i18n import _
10 10 from node import bin, hex
11 11 import changegroup as changegroupmod
12 12 import repo, error, encoding, util, store
13 13 import pushkey as pushkeymod
14 14
15 15 # list of nodes encoding / decoding
16 16
17 17 def decodelist(l, sep=' '):
18 18 if l:
19 19 return map(bin, l.split(sep))
20 20 return []
21 21
22 22 def encodelist(l, sep=' '):
23 23 return sep.join(map(hex, l))
24 24
25 25 # client side
26 26
27 27 class wirerepository(repo.repository):
28 28 def lookup(self, key):
29 29 self.requirecap('lookup', _('look up remote revision'))
30 30 d = self._call("lookup", key=encoding.fromlocal(key))
31 31 success, data = d[:-1].split(" ", 1)
32 32 if int(success):
33 33 return bin(data)
34 34 self._abort(error.RepoError(data))
35 35
36 36 def heads(self):
37 37 d = self._call("heads")
38 38 try:
39 39 return decodelist(d[:-1])
40 40 except ValueError:
41 41 self._abort(error.ResponseError(_("unexpected response:"), d))
42 42
43 43 def known(self, nodes):
44 44 n = encodelist(nodes)
45 45 d = self._call("known", nodes=n)
46 46 try:
47 47 return [bool(int(f)) for f in d]
48 48 except ValueError:
49 49 self._abort(error.ResponseError(_("unexpected response:"), d))
50 50
51 51 def branchmap(self):
52 52 d = self._call("branchmap")
53 53 try:
54 54 branchmap = {}
55 55 for branchpart in d.splitlines():
56 56 branchname, branchheads = branchpart.split(' ', 1)
57 57 branchname = encoding.tolocal(urllib.unquote(branchname))
58 58 branchheads = decodelist(branchheads)
59 59 branchmap[branchname] = branchheads
60 60 return branchmap
61 61 except TypeError:
62 62 self._abort(error.ResponseError(_("unexpected response:"), d))
63 63
64 64 def branches(self, nodes):
65 65 n = encodelist(nodes)
66 66 d = self._call("branches", nodes=n)
67 67 try:
68 68 br = [tuple(decodelist(b)) for b in d.splitlines()]
69 69 return br
70 70 except ValueError:
71 71 self._abort(error.ResponseError(_("unexpected response:"), d))
72 72
73 73 def between(self, pairs):
74 74 batch = 8 # avoid giant requests
75 75 r = []
76 76 for i in xrange(0, len(pairs), batch):
77 77 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
78 78 d = self._call("between", pairs=n)
79 79 try:
80 80 r.extend(l and decodelist(l) or [] for l in d.splitlines())
81 81 except ValueError:
82 82 self._abort(error.ResponseError(_("unexpected response:"), d))
83 83 return r
84 84
85 85 def pushkey(self, namespace, key, old, new):
86 86 if not self.capable('pushkey'):
87 87 return False
88 88 d = self._call("pushkey",
89 89 namespace=encoding.fromlocal(namespace),
90 90 key=encoding.fromlocal(key),
91 91 old=encoding.fromlocal(old),
92 92 new=encoding.fromlocal(new))
93 93 try:
94 94 d = bool(int(d))
95 95 except ValueError:
96 96 raise error.ResponseError(
97 97 _('push failed (unexpected response):'), d)
98 98 return d
99 99
100 100 def listkeys(self, namespace):
101 101 if not self.capable('pushkey'):
102 102 return {}
103 103 d = self._call("listkeys", namespace=encoding.fromlocal(namespace))
104 104 r = {}
105 105 for l in d.splitlines():
106 106 k, v = l.split('\t')
107 107 r[encoding.tolocal(k)] = encoding.tolocal(v)
108 108 return r
109 109
110 110 def stream_out(self):
111 111 return self._callstream('stream_out')
112 112
113 113 def changegroup(self, nodes, kind):
114 114 n = encodelist(nodes)
115 115 f = self._callstream("changegroup", roots=n)
116 116 return changegroupmod.unbundle10(self._decompress(f), 'UN')
117 117
118 118 def changegroupsubset(self, bases, heads, kind):
119 119 self.requirecap('changegroupsubset', _('look up remote changes'))
120 120 bases = encodelist(bases)
121 121 heads = encodelist(heads)
122 122 f = self._callstream("changegroupsubset",
123 123 bases=bases, heads=heads)
124 124 return changegroupmod.unbundle10(self._decompress(f), 'UN')
125 125
126 def getbundle(self, source, heads=None, common=None):
127 self.requirecap('getbundle', _('look up remote changes'))
128 opts = {}
129 if heads is not None:
130 opts['heads'] = encodelist(heads)
131 if common is not None:
132 opts['common'] = encodelist(common)
133 f = self._callstream("getbundle", **opts)
134 return changegroupmod.unbundle10(self._decompress(f), 'UN')
135
126 136 def unbundle(self, cg, heads, source):
127 137 '''Send cg (a readable file-like object representing the
128 138 changegroup to push, typically a chunkbuffer object) to the
129 139 remote server as a bundle. Return an integer indicating the
130 140 result of the push (see localrepository.addchangegroup()).'''
131 141
132 142 ret, output = self._callpush("unbundle", cg, heads=encodelist(heads))
133 143 if ret == "":
134 144 raise error.ResponseError(
135 145 _('push failed:'), output)
136 146 try:
137 147 ret = int(ret)
138 148 except ValueError:
139 149 raise error.ResponseError(
140 150 _('push failed (unexpected response):'), ret)
141 151
142 152 for l in output.splitlines(True):
143 153 self.ui.status(_('remote: '), l)
144 154 return ret
145 155
146 156 def debugwireargs(self, one, two, three=None, four=None):
147 157 # don't pass optional arguments left at their default value
148 158 opts = {}
149 159 if three is not None:
150 160 opts['three'] = three
151 161 if four is not None:
152 162 opts['four'] = four
153 163 return self._call('debugwireargs', one=one, two=two, **opts)
154 164
155 165 # server side
156 166
157 167 class streamres(object):
158 168 def __init__(self, gen):
159 169 self.gen = gen
160 170
161 171 class pushres(object):
162 172 def __init__(self, res):
163 173 self.res = res
164 174
165 175 class pusherr(object):
166 176 def __init__(self, res):
167 177 self.res = res
168 178
169 179 def dispatch(repo, proto, command):
170 180 func, spec = commands[command]
171 181 args = proto.getargs(spec)
172 182 return func(repo, proto, *args)
173 183
174 184 def options(cmd, keys, others):
175 185 opts = {}
176 186 for k in keys:
177 187 if k in others:
178 188 opts[k] = others[k]
179 189 del others[k]
180 190 if others:
181 191 sys.stderr.write("abort: %s got unexpected arguments %s\n"
182 192 % (cmd, ",".join(others)))
183 193 return opts
184 194
185 195 def between(repo, proto, pairs):
186 196 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
187 197 r = []
188 198 for b in repo.between(pairs):
189 199 r.append(encodelist(b) + "\n")
190 200 return "".join(r)
191 201
192 202 def branchmap(repo, proto):
193 203 branchmap = repo.branchmap()
194 204 heads = []
195 205 for branch, nodes in branchmap.iteritems():
196 206 branchname = urllib.quote(encoding.fromlocal(branch))
197 207 branchnodes = encodelist(nodes)
198 208 heads.append('%s %s' % (branchname, branchnodes))
199 209 return '\n'.join(heads)
200 210
201 211 def branches(repo, proto, nodes):
202 212 nodes = decodelist(nodes)
203 213 r = []
204 214 for b in repo.branches(nodes):
205 215 r.append(encodelist(b) + "\n")
206 216 return "".join(r)
207 217
208 218 def capabilities(repo, proto):
209 caps = 'lookup changegroupsubset branchmap pushkey known'.split()
219 caps = 'lookup changegroupsubset branchmap pushkey known getbundle'.split()
210 220 if _allowstream(repo.ui):
211 221 requiredformats = repo.requirements & repo.supportedformats
212 222 # if our local revlogs are just revlogv1, add 'stream' cap
213 223 if not requiredformats - set(('revlogv1',)):
214 224 caps.append('stream')
215 225 # otherwise, add 'streamreqs' detailing our local revlog format
216 226 else:
217 227 caps.append('streamreqs=%s' % ','.join(requiredformats))
218 228 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
219 229 return ' '.join(caps)
220 230
221 231 def changegroup(repo, proto, roots):
222 232 nodes = decodelist(roots)
223 233 cg = repo.changegroup(nodes, 'serve')
224 234 return streamres(proto.groupchunks(cg))
225 235
226 236 def changegroupsubset(repo, proto, bases, heads):
227 237 bases = decodelist(bases)
228 238 heads = decodelist(heads)
229 239 cg = repo.changegroupsubset(bases, heads, 'serve')
230 240 return streamres(proto.groupchunks(cg))
231 241
232 242 def debugwireargs(repo, proto, one, two, others):
233 243 # only accept optional args from the known set
234 244 opts = options('debugwireargs', ['three', 'four'], others)
235 245 return repo.debugwireargs(one, two, **opts)
236 246
247 def getbundle(repo, proto, others):
248 opts = options('getbundle', ['heads', 'common'], others)
249 for k, v in opts.iteritems():
250 opts[k] = decodelist(v)
251 cg = repo.getbundle('serve', **opts)
252 return streamres(proto.groupchunks(cg))
253
237 254 def heads(repo, proto):
238 255 h = repo.heads()
239 256 return encodelist(h) + "\n"
240 257
241 258 def hello(repo, proto):
242 259 '''the hello command returns a set of lines describing various
243 260 interesting things about the server, in an RFC822-like format.
244 261 Currently the only one defined is "capabilities", which
245 262 consists of a line in the form:
246 263
247 264 capabilities: space separated list of tokens
248 265 '''
249 266 return "capabilities: %s\n" % (capabilities(repo, proto))
250 267
251 268 def listkeys(repo, proto, namespace):
252 269 d = pushkeymod.list(repo, encoding.tolocal(namespace)).items()
253 270 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
254 271 for k, v in d])
255 272 return t
256 273
257 274 def lookup(repo, proto, key):
258 275 try:
259 276 r = hex(repo.lookup(encoding.tolocal(key)))
260 277 success = 1
261 278 except Exception, inst:
262 279 r = str(inst)
263 280 success = 0
264 281 return "%s %s\n" % (success, r)
265 282
266 283 def known(repo, proto, nodes):
267 284 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
268 285
269 286 def pushkey(repo, proto, namespace, key, old, new):
270 287 # compatibility with pre-1.8 clients which were accidentally
271 288 # sending raw binary nodes rather than utf-8-encoded hex
272 289 if len(new) == 20 and new.encode('string-escape') != new:
273 290 # looks like it could be a binary node
274 291 try:
275 292 u = new.decode('utf-8')
276 293 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
277 294 except UnicodeDecodeError:
278 295 pass # binary, leave unmodified
279 296 else:
280 297 new = encoding.tolocal(new) # normal path
281 298
282 299 r = pushkeymod.push(repo,
283 300 encoding.tolocal(namespace), encoding.tolocal(key),
284 301 encoding.tolocal(old), new)
285 302 return '%s\n' % int(r)
286 303
287 304 def _allowstream(ui):
288 305 return ui.configbool('server', 'uncompressed', True, untrusted=True)
289 306
290 307 def stream(repo, proto):
291 308 '''If the server supports streaming clone, it advertises the "stream"
292 309 capability with a value representing the version and flags of the repo
293 310 it is serving. Client checks to see if it understands the format.
294 311
295 312 The format is simple: the server writes out a line with the amount
296 313 of files, then the total amount of bytes to be transfered (separated
297 314 by a space). Then, for each file, the server first writes the filename
298 315 and filesize (separated by the null character), then the file contents.
299 316 '''
300 317
301 318 if not _allowstream(repo.ui):
302 319 return '1\n'
303 320
304 321 entries = []
305 322 total_bytes = 0
306 323 try:
307 324 # get consistent snapshot of repo, lock during scan
308 325 lock = repo.lock()
309 326 try:
310 327 repo.ui.debug('scanning\n')
311 328 for name, ename, size in repo.store.walk():
312 329 entries.append((name, size))
313 330 total_bytes += size
314 331 finally:
315 332 lock.release()
316 333 except error.LockError:
317 334 return '2\n' # error: 2
318 335
319 336 def streamer(repo, entries, total):
320 337 '''stream out all metadata files in repository.'''
321 338 yield '0\n' # success
322 339 repo.ui.debug('%d files, %d bytes to transfer\n' %
323 340 (len(entries), total_bytes))
324 341 yield '%d %d\n' % (len(entries), total_bytes)
325 342 for name, size in entries:
326 343 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
327 344 # partially encode name over the wire for backwards compat
328 345 yield '%s\0%d\n' % (store.encodedir(name), size)
329 346 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
330 347 yield chunk
331 348
332 349 return streamres(streamer(repo, entries, total_bytes))
333 350
334 351 def unbundle(repo, proto, heads):
335 352 their_heads = decodelist(heads)
336 353
337 354 def check_heads():
338 355 heads = repo.heads()
339 356 return their_heads == ['force'] or their_heads == heads
340 357
341 358 proto.redirect()
342 359
343 360 # fail early if possible
344 361 if not check_heads():
345 362 return pusherr('unsynced changes')
346 363
347 364 # write bundle data to temporary file because it can be big
348 365 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
349 366 fp = os.fdopen(fd, 'wb+')
350 367 r = 0
351 368 try:
352 369 proto.getfile(fp)
353 370 lock = repo.lock()
354 371 try:
355 372 if not check_heads():
356 373 # someone else committed/pushed/unbundled while we
357 374 # were transferring data
358 375 return pusherr('unsynced changes')
359 376
360 377 # push can proceed
361 378 fp.seek(0)
362 379 gen = changegroupmod.readbundle(fp, None)
363 380
364 381 try:
365 382 r = repo.addchangegroup(gen, 'serve', proto._client(),
366 383 lock=lock)
367 384 except util.Abort, inst:
368 385 sys.stderr.write("abort: %s\n" % inst)
369 386 finally:
370 387 lock.release()
371 388 return pushres(r)
372 389
373 390 finally:
374 391 fp.close()
375 392 os.unlink(tempname)
376 393
377 394 commands = {
378 395 'between': (between, 'pairs'),
379 396 'branchmap': (branchmap, ''),
380 397 'branches': (branches, 'nodes'),
381 398 'capabilities': (capabilities, ''),
382 399 'changegroup': (changegroup, 'roots'),
383 400 'changegroupsubset': (changegroupsubset, 'bases heads'),
384 401 'debugwireargs': (debugwireargs, 'one two *'),
402 'getbundle': (getbundle, '*'),
385 403 'heads': (heads, ''),
386 404 'hello': (hello, ''),
387 405 'known': (known, 'nodes'),
388 406 'listkeys': (listkeys, 'namespace'),
389 407 'lookup': (lookup, 'key'),
390 408 'pushkey': (pushkey, 'namespace key old new'),
391 409 'stream_out': (stream, ''),
392 410 'unbundle': (unbundle, 'heads'),
393 411 }
@@ -1,259 +1,261 b''
1 1 Show all commands except debug commands
2 2 $ hg debugcomplete
3 3 add
4 4 addremove
5 5 annotate
6 6 archive
7 7 backout
8 8 bisect
9 9 bookmarks
10 10 branch
11 11 branches
12 12 bundle
13 13 cat
14 14 clone
15 15 commit
16 16 copy
17 17 diff
18 18 export
19 19 forget
20 20 grep
21 21 heads
22 22 help
23 23 identify
24 24 import
25 25 incoming
26 26 init
27 27 locate
28 28 log
29 29 manifest
30 30 merge
31 31 outgoing
32 32 parents
33 33 paths
34 34 pull
35 35 push
36 36 recover
37 37 remove
38 38 rename
39 39 resolve
40 40 revert
41 41 rollback
42 42 root
43 43 serve
44 44 showconfig
45 45 status
46 46 summary
47 47 tag
48 48 tags
49 49 tip
50 50 unbundle
51 51 update
52 52 verify
53 53 version
54 54
55 55 Show all commands that start with "a"
56 56 $ hg debugcomplete a
57 57 add
58 58 addremove
59 59 annotate
60 60 archive
61 61
62 62 Do not show debug commands if there are other candidates
63 63 $ hg debugcomplete d
64 64 diff
65 65
66 66 Show debug commands if there are no other candidates
67 67 $ hg debugcomplete debug
68 68 debugancestor
69 69 debugbuilddag
70 70 debugbundle
71 71 debugcheckstate
72 72 debugcommands
73 73 debugcomplete
74 74 debugconfig
75 75 debugdag
76 76 debugdata
77 77 debugdate
78 78 debugfsinfo
79 debuggetbundle
79 80 debugignore
80 81 debugindex
81 82 debugindexdot
82 83 debuginstall
83 84 debugknown
84 85 debugpushkey
85 86 debugrebuildstate
86 87 debugrename
87 88 debugrevspec
88 89 debugsetparents
89 90 debugstate
90 91 debugsub
91 92 debugwalk
92 93 debugwireargs
93 94
94 95 Do not show the alias of a debug command if there are other candidates
95 96 (this should hide rawcommit)
96 97 $ hg debugcomplete r
97 98 recover
98 99 remove
99 100 rename
100 101 resolve
101 102 revert
102 103 rollback
103 104 root
104 105 Show the alias of a debug command if there are no other candidates
105 106 $ hg debugcomplete rawc
106 107
107 108
108 109 Show the global options
109 110 $ hg debugcomplete --options | sort
110 111 --config
111 112 --cwd
112 113 --debug
113 114 --debugger
114 115 --encoding
115 116 --encodingmode
116 117 --help
117 118 --noninteractive
118 119 --profile
119 120 --quiet
120 121 --repository
121 122 --time
122 123 --traceback
123 124 --verbose
124 125 --version
125 126 -R
126 127 -h
127 128 -q
128 129 -v
129 130 -y
130 131
131 132 Show the options for the "serve" command
132 133 $ hg debugcomplete --options serve | sort
133 134 --accesslog
134 135 --address
135 136 --certificate
136 137 --config
137 138 --cwd
138 139 --daemon
139 140 --daemon-pipefds
140 141 --debug
141 142 --debugger
142 143 --encoding
143 144 --encodingmode
144 145 --errorlog
145 146 --help
146 147 --ipv6
147 148 --name
148 149 --noninteractive
149 150 --pid-file
150 151 --port
151 152 --prefix
152 153 --profile
153 154 --quiet
154 155 --repository
155 156 --stdio
156 157 --style
157 158 --templates
158 159 --time
159 160 --traceback
160 161 --verbose
161 162 --version
162 163 --web-conf
163 164 -6
164 165 -A
165 166 -E
166 167 -R
167 168 -a
168 169 -d
169 170 -h
170 171 -n
171 172 -p
172 173 -q
173 174 -t
174 175 -v
175 176 -y
176 177
177 178 Show an error if we use --options with an ambiguous abbreviation
178 179 $ hg debugcomplete --options s
179 180 hg: command 's' is ambiguous:
180 181 serve showconfig status summary
181 182 [255]
182 183
183 184 Show all commands + options
184 185 $ hg debugcommands
185 186 add: include, exclude, subrepos, dry-run
186 187 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, include, exclude
187 188 clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
188 189 commit: addremove, close-branch, include, exclude, message, logfile, date, user
189 190 diff: rev, change, text, git, nodates, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, include, exclude, subrepos
190 191 export: output, switch-parent, rev, text, git, nodates
191 192 forget: include, exclude
192 193 init: ssh, remotecmd, insecure
193 194 log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, style, template, include, exclude
194 195 merge: force, tool, rev, preview
195 196 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
196 197 push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure
197 198 remove: after, force, include, exclude
198 199 serve: accesslog, daemon, daemon-pipefds, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, templates, style, ipv6, certificate
199 200 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos
200 201 summary: remote
201 202 update: clean, check, date, rev
202 203 addremove: similarity, include, exclude, dry-run
203 204 archive: no-decode, prefix, rev, type, subrepos, include, exclude
204 205 backout: merge, parent, tool, rev, include, exclude, message, logfile, date, user
205 206 bisect: reset, good, bad, skip, extend, command, noupdate
206 207 bookmarks: force, rev, delete, rename
207 208 branch: force, clean
208 209 branches: active, closed
209 210 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
210 211 cat: output, rev, decode, include, exclude
211 212 copy: after, force, include, exclude, dry-run
212 213 debugancestor:
213 214 debugbuilddag: mergeable-file, appended-file, overwritten-file, new-file
214 215 debugbundle: all
215 216 debugcheckstate:
216 217 debugcommands:
217 218 debugcomplete: options
218 219 debugdag: tags, branches, dots, spaces
219 220 debugdata:
220 221 debugdate: extended
221 222 debugfsinfo:
223 debuggetbundle: head, common, type
222 224 debugignore:
223 225 debugindex: format
224 226 debugindexdot:
225 227 debuginstall:
226 228 debugknown:
227 229 debugpushkey:
228 230 debugrebuildstate: rev
229 231 debugrename: rev
230 232 debugrevspec:
231 233 debugsetparents:
232 234 debugstate: nodates
233 235 debugsub: rev
234 236 debugwalk: include, exclude
235 237 debugwireargs: three, four, ssh, remotecmd, insecure
236 238 grep: print0, all, follow, ignore-case, files-with-matches, line-number, rev, user, date, include, exclude
237 239 heads: rev, topo, active, closed, style, template
238 240 help:
239 241 identify: rev, num, id, branch, tags, bookmarks
240 242 import: strip, base, force, no-commit, exact, import-branch, message, logfile, date, user, similarity
241 243 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos
242 244 locate: rev, print0, fullpath, include, exclude
243 245 manifest: rev
244 246 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos
245 247 parents: rev, style, template
246 248 paths:
247 249 recover:
248 250 rename: after, force, include, exclude, dry-run
249 251 resolve: all, list, mark, unmark, tool, no-status, include, exclude
250 252 revert: all, date, rev, no-backup, include, exclude, dry-run
251 253 rollback: dry-run
252 254 root:
253 255 showconfig: untrusted
254 256 tag: force, local, rev, remove, edit, message, date, user
255 257 tags:
256 258 tip: patch, git, style, template
257 259 unbundle: update
258 260 verify:
259 261 version:
@@ -1,1081 +1,1081 b''
1 1 An attempt at more fully testing the hgweb web interface.
2 2 The following things are tested elsewhere and are therefore omitted:
3 3 - archive, tested in test-archive
4 4 - unbundle, tested in test-push-http
5 5 - changegroupsubset, tested in test-pull
6 6
7 7 Set up the repo
8 8
9 9 $ hg init test
10 10 $ cd test
11 11 $ mkdir da
12 12 $ echo foo > da/foo
13 13 $ echo foo > foo
14 14 $ hg ci -Ambase
15 15 adding da/foo
16 16 adding foo
17 17 $ hg tag 1.0
18 18 $ hg bookmark something
19 19 $ echo another > foo
20 20 $ hg branch stable
21 21 marked working directory as branch stable
22 22 $ hg ci -Ambranch
23 23 $ hg serve --config server.uncompressed=False -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log
24 24 $ cat hg.pid >> $DAEMON_PIDS
25 25
26 26 Logs and changes
27 27
28 28 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/?style=atom'
29 29 200 Script output follows
30 30
31 31 <?xml version="1.0" encoding="ascii"?>
32 32 <feed xmlns="http://www.w3.org/2005/Atom">
33 33 <!-- Changelog -->
34 34 <id>http://*:$HGPORT/</id> (glob)
35 35 <link rel="self" href="http://*:$HGPORT/atom-log"/> (glob)
36 36 <link rel="alternate" href="http://*:$HGPORT/"/> (glob)
37 37 <title>test Changelog</title>
38 38 <updated>1970-01-01T00:00:00+00:00</updated>
39 39
40 40 <entry>
41 41 <title>branch</title>
42 42 <id>http://*:$HGPORT/#changeset-1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe</id> (glob)
43 43 <link href="http://*:$HGPORT/rev/1d22e65f027e"/> (glob)
44 44 <author>
45 45 <name>test</name>
46 46 <email>&#116;&#101;&#115;&#116;</email>
47 47 </author>
48 48 <updated>1970-01-01T00:00:00+00:00</updated>
49 49 <published>1970-01-01T00:00:00+00:00</published>
50 50 <content type="xhtml">
51 51 <div xmlns="http://www.w3.org/1999/xhtml">
52 52 <pre xml:space="preserve">branch</pre>
53 53 </div>
54 54 </content>
55 55 </entry>
56 56 <entry>
57 57 <title>Added tag 1.0 for changeset 2ef0ac749a14</title>
58 58 <id>http://*:$HGPORT/#changeset-a4f92ed23982be056b9852de5dfe873eaac7f0de</id> (glob)
59 59 <link href="http://*:$HGPORT/rev/a4f92ed23982"/> (glob)
60 60 <author>
61 61 <name>test</name>
62 62 <email>&#116;&#101;&#115;&#116;</email>
63 63 </author>
64 64 <updated>1970-01-01T00:00:00+00:00</updated>
65 65 <published>1970-01-01T00:00:00+00:00</published>
66 66 <content type="xhtml">
67 67 <div xmlns="http://www.w3.org/1999/xhtml">
68 68 <pre xml:space="preserve">Added tag 1.0 for changeset 2ef0ac749a14</pre>
69 69 </div>
70 70 </content>
71 71 </entry>
72 72 <entry>
73 73 <title>base</title>
74 74 <id>http://*:$HGPORT/#changeset-2ef0ac749a14e4f57a5a822464a0902c6f7f448f</id> (glob)
75 75 <link href="http://*:$HGPORT/rev/2ef0ac749a14"/> (glob)
76 76 <author>
77 77 <name>test</name>
78 78 <email>&#116;&#101;&#115;&#116;</email>
79 79 </author>
80 80 <updated>1970-01-01T00:00:00+00:00</updated>
81 81 <published>1970-01-01T00:00:00+00:00</published>
82 82 <content type="xhtml">
83 83 <div xmlns="http://www.w3.org/1999/xhtml">
84 84 <pre xml:space="preserve">base</pre>
85 85 </div>
86 86 </content>
87 87 </entry>
88 88
89 89 </feed>
90 90 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/?style=atom'
91 91 200 Script output follows
92 92
93 93 <?xml version="1.0" encoding="ascii"?>
94 94 <feed xmlns="http://www.w3.org/2005/Atom">
95 95 <!-- Changelog -->
96 96 <id>http://*:$HGPORT/</id> (glob)
97 97 <link rel="self" href="http://*:$HGPORT/atom-log"/> (glob)
98 98 <link rel="alternate" href="http://*:$HGPORT/"/> (glob)
99 99 <title>test Changelog</title>
100 100 <updated>1970-01-01T00:00:00+00:00</updated>
101 101
102 102 <entry>
103 103 <title>branch</title>
104 104 <id>http://*:$HGPORT/#changeset-1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe</id> (glob)
105 105 <link href="http://*:$HGPORT/rev/1d22e65f027e"/> (glob)
106 106 <author>
107 107 <name>test</name>
108 108 <email>&#116;&#101;&#115;&#116;</email>
109 109 </author>
110 110 <updated>1970-01-01T00:00:00+00:00</updated>
111 111 <published>1970-01-01T00:00:00+00:00</published>
112 112 <content type="xhtml">
113 113 <div xmlns="http://www.w3.org/1999/xhtml">
114 114 <pre xml:space="preserve">branch</pre>
115 115 </div>
116 116 </content>
117 117 </entry>
118 118 <entry>
119 119 <title>Added tag 1.0 for changeset 2ef0ac749a14</title>
120 120 <id>http://*:$HGPORT/#changeset-a4f92ed23982be056b9852de5dfe873eaac7f0de</id> (glob)
121 121 <link href="http://*:$HGPORT/rev/a4f92ed23982"/> (glob)
122 122 <author>
123 123 <name>test</name>
124 124 <email>&#116;&#101;&#115;&#116;</email>
125 125 </author>
126 126 <updated>1970-01-01T00:00:00+00:00</updated>
127 127 <published>1970-01-01T00:00:00+00:00</published>
128 128 <content type="xhtml">
129 129 <div xmlns="http://www.w3.org/1999/xhtml">
130 130 <pre xml:space="preserve">Added tag 1.0 for changeset 2ef0ac749a14</pre>
131 131 </div>
132 132 </content>
133 133 </entry>
134 134 <entry>
135 135 <title>base</title>
136 136 <id>http://*:$HGPORT/#changeset-2ef0ac749a14e4f57a5a822464a0902c6f7f448f</id> (glob)
137 137 <link href="http://*:$HGPORT/rev/2ef0ac749a14"/> (glob)
138 138 <author>
139 139 <name>test</name>
140 140 <email>&#116;&#101;&#115;&#116;</email>
141 141 </author>
142 142 <updated>1970-01-01T00:00:00+00:00</updated>
143 143 <published>1970-01-01T00:00:00+00:00</published>
144 144 <content type="xhtml">
145 145 <div xmlns="http://www.w3.org/1999/xhtml">
146 146 <pre xml:space="preserve">base</pre>
147 147 </div>
148 148 </content>
149 149 </entry>
150 150
151 151 </feed>
152 152 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log/1/foo/?style=atom'
153 153 200 Script output follows
154 154
155 155 <?xml version="1.0" encoding="ascii"?>
156 156 <feed xmlns="http://www.w3.org/2005/Atom">
157 157 <id>http://*:$HGPORT/atom-log/tip/foo</id> (glob)
158 158 <link rel="self" href="http://*:$HGPORT/atom-log/tip/foo"/> (glob)
159 159 <title>test: foo history</title>
160 160 <updated>1970-01-01T00:00:00+00:00</updated>
161 161
162 162 <entry>
163 163 <title>base</title>
164 164 <id>http://*:$HGPORT/#changeset-2ef0ac749a14e4f57a5a822464a0902c6f7f448f</id> (glob)
165 165 <link href="http://*:$HGPORT/rev/2ef0ac749a14"/> (glob)
166 166 <author>
167 167 <name>test</name>
168 168 <email>&#116;&#101;&#115;&#116;</email>
169 169 </author>
170 170 <updated>1970-01-01T00:00:00+00:00</updated>
171 171 <published>1970-01-01T00:00:00+00:00</published>
172 172 <content type="xhtml">
173 173 <div xmlns="http://www.w3.org/1999/xhtml">
174 174 <pre xml:space="preserve">base</pre>
175 175 </div>
176 176 </content>
177 177 </entry>
178 178
179 179 </feed>
180 180 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/shortlog/'
181 181 200 Script output follows
182 182
183 183 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
184 184 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US">
185 185 <head>
186 186 <link rel="icon" href="/static/hgicon.png" type="image/png" />
187 187 <meta name="robots" content="index, nofollow" />
188 188 <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
189 189
190 190 <title>test: log</title>
191 191 <link rel="alternate" type="application/atom+xml"
192 192 href="/atom-log" title="Atom feed for test" />
193 193 <link rel="alternate" type="application/rss+xml"
194 194 href="/rss-log" title="RSS feed for test" />
195 195 </head>
196 196 <body>
197 197
198 198 <div class="container">
199 199 <div class="menu">
200 200 <div class="logo">
201 201 <a href="http://mercurial.selenic.com/">
202 202 <img src="/static/hglogo.png" alt="mercurial" /></a>
203 203 </div>
204 204 <ul>
205 205 <li class="active">log</li>
206 206 <li><a href="/graph/1d22e65f027e">graph</a></li>
207 207 <li><a href="/tags">tags</a></li>
208 208 <li><a href="/bookmarks">bookmarks</a></li>
209 209 <li><a href="/branches">branches</a></li>
210 210 </ul>
211 211 <ul>
212 212 <li><a href="/rev/1d22e65f027e">changeset</a></li>
213 213 <li><a href="/file/1d22e65f027e">browse</a></li>
214 214 </ul>
215 215 <ul>
216 216
217 217 </ul>
218 218 <ul>
219 219 <li><a href="/help">help</a></li>
220 220 </ul>
221 221 </div>
222 222
223 223 <div class="main">
224 224 <h2><a href="/">test</a></h2>
225 225 <h3>log</h3>
226 226
227 227 <form class="search" action="/log">
228 228
229 229 <p><input name="rev" id="search1" type="text" size="30" /></p>
230 230 <div id="hint">find changesets by author, revision,
231 231 files, or words in the commit message</div>
232 232 </form>
233 233
234 234 <div class="navigate">
235 235 <a href="/shortlog/2?revcount=30">less</a>
236 236 <a href="/shortlog/2?revcount=120">more</a>
237 237 | rev 2: <a href="/shortlog/2ef0ac749a14">(0)</a> <a href="/shortlog/tip">tip</a>
238 238 </div>
239 239
240 240 <table class="bigtable">
241 241 <tr>
242 242 <th class="age">age</th>
243 243 <th class="author">author</th>
244 244 <th class="description">description</th>
245 245 </tr>
246 246 <tr class="parity0">
247 247 <td class="age">1970-01-01</td>
248 248 <td class="author">test</td>
249 249 <td class="description"><a href="/rev/1d22e65f027e">branch</a><span class="branchhead">stable</span> <span class="tag">tip</span> <span class="tag">something</span> </td>
250 250 </tr>
251 251 <tr class="parity1">
252 252 <td class="age">1970-01-01</td>
253 253 <td class="author">test</td>
254 254 <td class="description"><a href="/rev/a4f92ed23982">Added tag 1.0 for changeset 2ef0ac749a14</a><span class="branchhead">default</span> </td>
255 255 </tr>
256 256 <tr class="parity0">
257 257 <td class="age">1970-01-01</td>
258 258 <td class="author">test</td>
259 259 <td class="description"><a href="/rev/2ef0ac749a14">base</a><span class="tag">1.0</span> </td>
260 260 </tr>
261 261
262 262 </table>
263 263
264 264 <div class="navigate">
265 265 <a href="/shortlog/2?revcount=30">less</a>
266 266 <a href="/shortlog/2?revcount=120">more</a>
267 267 | rev 2: <a href="/shortlog/2ef0ac749a14">(0)</a> <a href="/shortlog/tip">tip</a>
268 268 </div>
269 269
270 270 </div>
271 271 </div>
272 272
273 273
274 274
275 275 </body>
276 276 </html>
277 277
278 278 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/rev/0/'
279 279 200 Script output follows
280 280
281 281 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
282 282 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US">
283 283 <head>
284 284 <link rel="icon" href="/static/hgicon.png" type="image/png" />
285 285 <meta name="robots" content="index, nofollow" />
286 286 <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
287 287
288 288 <title>test: 2ef0ac749a14</title>
289 289 </head>
290 290 <body>
291 291 <div class="container">
292 292 <div class="menu">
293 293 <div class="logo">
294 294 <a href="http://mercurial.selenic.com/">
295 295 <img src="/static/hglogo.png" alt="mercurial" /></a>
296 296 </div>
297 297 <ul>
298 298 <li><a href="/shortlog/2ef0ac749a14">log</a></li>
299 299 <li><a href="/graph/2ef0ac749a14">graph</a></li>
300 300 <li><a href="/tags">tags</a></li>
301 301 <li><a href="/bookmarks">bookmarks</a></li>
302 302 <li><a href="/branches">branches</a></li>
303 303 </ul>
304 304 <ul>
305 305 <li class="active">changeset</li>
306 306 <li><a href="/raw-rev/2ef0ac749a14">raw</a></li>
307 307 <li><a href="/file/2ef0ac749a14">browse</a></li>
308 308 </ul>
309 309 <ul>
310 310
311 311 </ul>
312 312 <ul>
313 313 <li><a href="/help">help</a></li>
314 314 </ul>
315 315 </div>
316 316
317 317 <div class="main">
318 318
319 319 <h2><a href="/">test</a></h2>
320 320 <h3>changeset 0:2ef0ac749a14 <span class="tag">1.0</span> </h3>
321 321
322 322 <form class="search" action="/log">
323 323
324 324 <p><input name="rev" id="search1" type="text" size="30" /></p>
325 325 <div id="hint">find changesets by author, revision,
326 326 files, or words in the commit message</div>
327 327 </form>
328 328
329 329 <div class="description">base</div>
330 330
331 331 <table id="changesetEntry">
332 332 <tr>
333 333 <th class="author">author</th>
334 334 <td class="author">&#116;&#101;&#115;&#116;</td>
335 335 </tr>
336 336 <tr>
337 337 <th class="date">date</th>
338 338 <td class="date">Thu Jan 01 00:00:00 1970 +0000 (1970-01-01)</td></tr>
339 339 <tr>
340 340 <th class="author">parents</th>
341 341 <td class="author"></td>
342 342 </tr>
343 343 <tr>
344 344 <th class="author">children</th>
345 345 <td class="author"> <a href="/rev/a4f92ed23982">a4f92ed23982</a></td>
346 346 </tr>
347 347 <tr>
348 348 <th class="files">files</th>
349 349 <td class="files"><a href="/file/2ef0ac749a14/da/foo">da/foo</a> <a href="/file/2ef0ac749a14/foo">foo</a> </td>
350 350 </tr>
351 351 </table>
352 352
353 353 <div class="overflow">
354 354 <div class="sourcefirst"> line diff</div>
355 355
356 356 <div class="source bottomline parity0"><pre><a href="#l1.1" id="l1.1"> 1.1</a> <span class="minusline">--- /dev/null Thu Jan 01 00:00:00 1970 +0000
357 357 </span><a href="#l1.2" id="l1.2"> 1.2</a> <span class="plusline">+++ b/da/foo Thu Jan 01 00:00:00 1970 +0000
358 358 </span><a href="#l1.3" id="l1.3"> 1.3</a> <span class="atline">@@ -0,0 +1,1 @@
359 359 </span><a href="#l1.4" id="l1.4"> 1.4</a> <span class="plusline">+foo
360 360 </span></pre></div><div class="source bottomline parity1"><pre><a href="#l2.1" id="l2.1"> 2.1</a> <span class="minusline">--- /dev/null Thu Jan 01 00:00:00 1970 +0000
361 361 </span><a href="#l2.2" id="l2.2"> 2.2</a> <span class="plusline">+++ b/foo Thu Jan 01 00:00:00 1970 +0000
362 362 </span><a href="#l2.3" id="l2.3"> 2.3</a> <span class="atline">@@ -0,0 +1,1 @@
363 363 </span><a href="#l2.4" id="l2.4"> 2.4</a> <span class="plusline">+foo
364 364 </span></pre></div>
365 365 </div>
366 366
367 367 </div>
368 368 </div>
369 369
370 370
371 371 </body>
372 372 </html>
373 373
374 374 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/rev/1/?style=raw'
375 375 200 Script output follows
376 376
377 377
378 378 # HG changeset patch
379 379 # User test
380 380 # Date 0 0
381 381 # Node ID a4f92ed23982be056b9852de5dfe873eaac7f0de
382 382 # Parent 2ef0ac749a14e4f57a5a822464a0902c6f7f448f
383 383 Added tag 1.0 for changeset 2ef0ac749a14
384 384
385 385 diff -r 2ef0ac749a14 -r a4f92ed23982 .hgtags
386 386 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
387 387 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
388 388 @@ -0,0 +1,1 @@
389 389 +2ef0ac749a14e4f57a5a822464a0902c6f7f448f 1.0
390 390
391 391 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/log?rev=base'
392 392 200 Script output follows
393 393
394 394 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
395 395 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US">
396 396 <head>
397 397 <link rel="icon" href="/static/hgicon.png" type="image/png" />
398 398 <meta name="robots" content="index, nofollow" />
399 399 <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
400 400
401 401 <title>test: searching for base</title>
402 402 </head>
403 403 <body>
404 404
405 405 <div class="container">
406 406 <div class="menu">
407 407 <div class="logo">
408 408 <a href="http://mercurial.selenic.com/">
409 409 <img src="/static/hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
410 410 </div>
411 411 <ul>
412 412 <li><a href="/shortlog">log</a></li>
413 413 <li><a href="/graph">graph</a></li>
414 414 <li><a href="/tags">tags</a></li>
415 415 <li><a href="/bookmarks">bookmarks</a></li>
416 416 <li><a href="/branches">branches</a></li>
417 417 <li><a href="/help">help</a></li>
418 418 </ul>
419 419 </div>
420 420
421 421 <div class="main">
422 422 <h2><a href="/">test</a></h2>
423 423 <h3>searching for 'base'</h3>
424 424
425 425 <form class="search" action="/log">
426 426
427 427 <p><input name="rev" id="search1" type="text" size="30"></p>
428 428 <div id="hint">find changesets by author, revision,
429 429 files, or words in the commit message</div>
430 430 </form>
431 431
432 432 <div class="navigate">
433 433 <a href="/search/?rev=base&revcount=5">less</a>
434 434 <a href="/search/?rev=base&revcount=20">more</a>
435 435 </div>
436 436
437 437 <table class="bigtable">
438 438 <tr>
439 439 <th class="age">age</th>
440 440 <th class="author">author</th>
441 441 <th class="description">description</th>
442 442 </tr>
443 443 <tr class="parity0">
444 444 <td class="age">1970-01-01</td>
445 445 <td class="author">test</td>
446 446 <td class="description"><a href="/rev/2ef0ac749a14">base</a><span class="tag">1.0</span> </td>
447 447 </tr>
448 448
449 449 </table>
450 450
451 451 <div class="navigate">
452 452 <a href="/search/?rev=base&revcount=5">less</a>
453 453 <a href="/search/?rev=base&revcount=20">more</a>
454 454 </div>
455 455
456 456 </div>
457 457 </div>
458 458
459 459
460 460
461 461 </body>
462 462 </html>
463 463
464 464
465 465 File-related
466 466
467 467 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/foo/?style=raw'
468 468 200 Script output follows
469 469
470 470 foo
471 471 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/annotate/1/foo/?style=raw'
472 472 200 Script output follows
473 473
474 474
475 475 test@0: foo
476 476
477 477
478 478
479 479
480 480 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/?style=raw'
481 481 200 Script output follows
482 482
483 483
484 484 drwxr-xr-x da
485 485 -rw-r--r-- 45 .hgtags
486 486 -rw-r--r-- 4 foo
487 487
488 488
489 489 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/1/foo'
490 490 200 Script output follows
491 491
492 492 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
493 493 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US">
494 494 <head>
495 495 <link rel="icon" href="/static/hgicon.png" type="image/png" />
496 496 <meta name="robots" content="index, nofollow" />
497 497 <link rel="stylesheet" href="/static/style-paper.css" type="text/css" />
498 498
499 499 <title>test: a4f92ed23982 foo</title>
500 500 </head>
501 501 <body>
502 502
503 503 <div class="container">
504 504 <div class="menu">
505 505 <div class="logo">
506 506 <a href="http://mercurial.selenic.com/">
507 507 <img src="/static/hglogo.png" alt="mercurial" /></a>
508 508 </div>
509 509 <ul>
510 510 <li><a href="/shortlog/a4f92ed23982">log</a></li>
511 511 <li><a href="/graph/a4f92ed23982">graph</a></li>
512 512 <li><a href="/tags">tags</a></li>
513 513 <li><a href="/branches">branches</a></li>
514 514 </ul>
515 515 <ul>
516 516 <li><a href="/rev/a4f92ed23982">changeset</a></li>
517 517 <li><a href="/file/a4f92ed23982/">browse</a></li>
518 518 </ul>
519 519 <ul>
520 520 <li class="active">file</li>
521 521 <li><a href="/file/tip/foo">latest</a></li>
522 522 <li><a href="/diff/a4f92ed23982/foo">diff</a></li>
523 523 <li><a href="/annotate/a4f92ed23982/foo">annotate</a></li>
524 524 <li><a href="/log/a4f92ed23982/foo">file log</a></li>
525 525 <li><a href="/raw-file/a4f92ed23982/foo">raw</a></li>
526 526 </ul>
527 527 <ul>
528 528 <li><a href="/help">help</a></li>
529 529 </ul>
530 530 </div>
531 531
532 532 <div class="main">
533 533 <h2><a href="/">test</a></h2>
534 534 <h3>view foo @ 1:a4f92ed23982</h3>
535 535
536 536 <form class="search" action="/log">
537 537
538 538 <p><input name="rev" id="search1" type="text" size="30" /></p>
539 539 <div id="hint">find changesets by author, revision,
540 540 files, or words in the commit message</div>
541 541 </form>
542 542
543 543 <div class="description">Added tag 1.0 for changeset 2ef0ac749a14</div>
544 544
545 545 <table id="changesetEntry">
546 546 <tr>
547 547 <th class="author">author</th>
548 548 <td class="author">&#116;&#101;&#115;&#116;</td>
549 549 </tr>
550 550 <tr>
551 551 <th class="date">date</th>
552 552 <td class="date">Thu Jan 01 00:00:00 1970 +0000 (1970-01-01)</td>
553 553 </tr>
554 554 <tr>
555 555 <th class="author">parents</th>
556 556 <td class="author"></td>
557 557 </tr>
558 558 <tr>
559 559 <th class="author">children</th>
560 560 <td class="author"><a href="/file/1d22e65f027e/foo">1d22e65f027e</a> </td>
561 561 </tr>
562 562
563 563 </table>
564 564
565 565 <div class="overflow">
566 566 <div class="sourcefirst"> line source</div>
567 567
568 568 <div class="parity0 source"><a href="#l1" id="l1"> 1</a> foo
569 569 </div>
570 570 <div class="sourcelast"></div>
571 571 </div>
572 572 </div>
573 573 </div>
574 574
575 575
576 576
577 577 </body>
578 578 </html>
579 579
580 580 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/filediff/1/foo/?style=raw'
581 581 200 Script output follows
582 582
583 583
584 584 diff -r 000000000000 -r a4f92ed23982 foo
585 585 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
586 586 +++ b/foo Thu Jan 01 00:00:00 1970 +0000
587 587 @@ -0,0 +1,1 @@
588 588 +foo
589 589
590 590
591 591
592 592
593 593
594 594 Overviews
595 595
596 596 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/raw-tags'
597 597 200 Script output follows
598 598
599 599 tip 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe
600 600 1.0 2ef0ac749a14e4f57a5a822464a0902c6f7f448f
601 601 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/raw-branches'
602 602 200 Script output follows
603 603
604 604 stable 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe open
605 605 default a4f92ed23982be056b9852de5dfe873eaac7f0de inactive
606 606 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/summary/?style=gitweb'
607 607 200 Script output follows
608 608
609 609 <?xml version="1.0" encoding="ascii"?>
610 610 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
611 611 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US">
612 612 <head>
613 613 <link rel="icon" href="/static/hgicon.png" type="image/png" />
614 614 <meta name="robots" content="index, nofollow"/>
615 615 <link rel="stylesheet" href="/static/style-gitweb.css" type="text/css" />
616 616
617 617
618 618 <title>test: Summary</title>
619 619 <link rel="alternate" type="application/atom+xml"
620 620 href="/atom-log" title="Atom feed for test"/>
621 621 <link rel="alternate" type="application/rss+xml"
622 622 href="/rss-log" title="RSS feed for test"/>
623 623 </head>
624 624 <body>
625 625
626 626 <div class="page_header">
627 627 <a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="/summary?style=gitweb">test</a> / summary
628 628
629 629 <form action="/log">
630 630 <input type="hidden" name="style" value="gitweb" />
631 631 <div class="search">
632 632 <input type="text" name="rev" />
633 633 </div>
634 634 </form>
635 635 </div>
636 636
637 637 <div class="page_nav">
638 638 summary |
639 639 <a href="/shortlog?style=gitweb">shortlog</a> |
640 640 <a href="/log?style=gitweb">changelog</a> |
641 641 <a href="/graph?style=gitweb">graph</a> |
642 642 <a href="/tags?style=gitweb">tags</a> |
643 643 <a href="/branches?style=gitweb">branches</a> |
644 644 <a href="/file/1d22e65f027e?style=gitweb">files</a> |
645 645 <a href="/help?style=gitweb">help</a>
646 646 <br/>
647 647 </div>
648 648
649 649 <div class="title">&nbsp;</div>
650 650 <table cellspacing="0">
651 651 <tr><td>description</td><td>unknown</td></tr>
652 652 <tr><td>owner</td><td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td></tr>
653 653 <tr><td>last change</td><td>Thu, 01 Jan 1970 00:00:00 +0000</td></tr>
654 654 </table>
655 655
656 656 <div><a class="title" href="/shortlog?style=gitweb">changes</a></div>
657 657 <table cellspacing="0">
658 658
659 659 <tr class="parity0">
660 660 <td class="age"><i>1970-01-01</i></td>
661 661 <td><i>test</i></td>
662 662 <td>
663 663 <a class="list" href="/rev/1d22e65f027e?style=gitweb">
664 664 <b>branch</b>
665 665 <span class="logtags"><span class="branchtag" title="stable">stable</span> <span class="tagtag" title="tip">tip</span> </span>
666 666 </a>
667 667 </td>
668 668 <td class="link" nowrap>
669 669 <a href="/rev/1d22e65f027e?style=gitweb">changeset</a> |
670 670 <a href="/file/1d22e65f027e?style=gitweb">files</a>
671 671 </td>
672 672 </tr>
673 673 <tr class="parity1">
674 674 <td class="age"><i>1970-01-01</i></td>
675 675 <td><i>test</i></td>
676 676 <td>
677 677 <a class="list" href="/rev/a4f92ed23982?style=gitweb">
678 678 <b>Added tag 1.0 for changeset 2ef0ac749a14</b>
679 679 <span class="logtags"><span class="branchtag" title="default">default</span> </span>
680 680 </a>
681 681 </td>
682 682 <td class="link" nowrap>
683 683 <a href="/rev/a4f92ed23982?style=gitweb">changeset</a> |
684 684 <a href="/file/a4f92ed23982?style=gitweb">files</a>
685 685 </td>
686 686 </tr>
687 687 <tr class="parity0">
688 688 <td class="age"><i>1970-01-01</i></td>
689 689 <td><i>test</i></td>
690 690 <td>
691 691 <a class="list" href="/rev/2ef0ac749a14?style=gitweb">
692 692 <b>base</b>
693 693 <span class="logtags"><span class="tagtag" title="1.0">1.0</span> </span>
694 694 </a>
695 695 </td>
696 696 <td class="link" nowrap>
697 697 <a href="/rev/2ef0ac749a14?style=gitweb">changeset</a> |
698 698 <a href="/file/2ef0ac749a14?style=gitweb">files</a>
699 699 </td>
700 700 </tr>
701 701 <tr class="light"><td colspan="4"><a class="list" href="/shortlog?style=gitweb">...</a></td></tr>
702 702 </table>
703 703
704 704 <div><a class="title" href="/tags?style=gitweb">tags</a></div>
705 705 <table cellspacing="0">
706 706
707 707 <tr class="parity0">
708 708 <td class="age"><i>1970-01-01</i></td>
709 709 <td><a class="list" href="/rev/2ef0ac749a14?style=gitweb"><b>1.0</b></a></td>
710 710 <td class="link">
711 711 <a href="/rev/2ef0ac749a14?style=gitweb">changeset</a> |
712 712 <a href="/log/2ef0ac749a14?style=gitweb">changelog</a> |
713 713 <a href="/file/2ef0ac749a14?style=gitweb">files</a>
714 714 </td>
715 715 </tr>
716 716 <tr class="light"><td colspan="3"><a class="list" href="/tags?style=gitweb">...</a></td></tr>
717 717 </table>
718 718
719 719 <div><a class="title" href="#">branches</a></div>
720 720 <table cellspacing="0">
721 721
722 722 <tr class="parity0">
723 723 <td class="age"><i>1970-01-01</i></td>
724 724 <td><a class="list" href="/shortlog/1d22e65f027e?style=gitweb"><b>1d22e65f027e</b></a></td>
725 725 <td class="">stable</td>
726 726 <td class="link">
727 727 <a href="/changeset/1d22e65f027e?style=gitweb">changeset</a> |
728 728 <a href="/log/1d22e65f027e?style=gitweb">changelog</a> |
729 729 <a href="/file/1d22e65f027e?style=gitweb">files</a>
730 730 </td>
731 731 </tr>
732 732 <tr class="parity1">
733 733 <td class="age"><i>1970-01-01</i></td>
734 734 <td><a class="list" href="/shortlog/a4f92ed23982?style=gitweb"><b>a4f92ed23982</b></a></td>
735 735 <td class="">default</td>
736 736 <td class="link">
737 737 <a href="/changeset/a4f92ed23982?style=gitweb">changeset</a> |
738 738 <a href="/log/a4f92ed23982?style=gitweb">changelog</a> |
739 739 <a href="/file/a4f92ed23982?style=gitweb">files</a>
740 740 </td>
741 741 </tr>
742 742 <tr class="light">
743 743 <td colspan="4"><a class="list" href="#">...</a></td>
744 744 </tr>
745 745 </table>
746 746 <div class="page_footer">
747 747 <div class="page_footer_text">test</div>
748 748 <div class="rss_logo">
749 749 <a href="/rss-log">RSS</a>
750 750 <a href="/atom-log">Atom</a>
751 751 </div>
752 752 <br />
753 753
754 754 </div>
755 755 </body>
756 756 </html>
757 757
758 758 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/graph/?style=gitweb'
759 759 200 Script output follows
760 760
761 761 <?xml version="1.0" encoding="ascii"?>
762 762 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
763 763 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US">
764 764 <head>
765 765 <link rel="icon" href="/static/hgicon.png" type="image/png" />
766 766 <meta name="robots" content="index, nofollow"/>
767 767 <link rel="stylesheet" href="/static/style-gitweb.css" type="text/css" />
768 768
769 769
770 770 <title>test: Graph</title>
771 771 <link rel="alternate" type="application/atom+xml"
772 772 href="/atom-log" title="Atom feed for test"/>
773 773 <link rel="alternate" type="application/rss+xml"
774 774 href="/rss-log" title="RSS feed for test"/>
775 775 <!--[if IE]><script type="text/javascript" src="/static/excanvas.js"></script><![endif]-->
776 776 </head>
777 777 <body>
778 778
779 779 <div class="page_header">
780 780 <a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="/summary?style=gitweb">test</a> / graph
781 781 </div>
782 782
783 783 <form action="/log">
784 784 <input type="hidden" name="style" value="gitweb" />
785 785 <div class="search">
786 786 <input type="text" name="rev" />
787 787 </div>
788 788 </form>
789 789 <div class="page_nav">
790 790 <a href="/summary?style=gitweb">summary</a> |
791 791 <a href="/shortlog?style=gitweb">shortlog</a> |
792 792 <a href="/log/2?style=gitweb">changelog</a> |
793 793 graph |
794 794 <a href="/tags?style=gitweb">tags</a> |
795 795 <a href="/branches?style=gitweb">branches</a> |
796 796 <a href="/file/1d22e65f027e?style=gitweb">files</a> |
797 797 <a href="/help?style=gitweb">help</a>
798 798 <br/>
799 799 <a href="/graph/2?style=gitweb&revcount=30">less</a>
800 800 <a href="/graph/2?style=gitweb&revcount=120">more</a>
801 801 | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/2ef0ac749a14?style=gitweb">-2</a> <a href="/graph/tip?style=gitweb">tip</a> <br/>
802 802 </div>
803 803
804 804 <div class="title">&nbsp;</div>
805 805
806 806 <noscript>The revision graph only works with JavaScript-enabled browsers.</noscript>
807 807
808 808 <div id="wrapper">
809 809 <ul id="nodebgs"></ul>
810 810 <canvas id="graph" width="480" height="129"></canvas>
811 811 <ul id="graphnodes"></ul>
812 812 </div>
813 813
814 814 <script type="text/javascript" src="/static/graph.js"></script>
815 815 <script>
816 816 <!-- hide script content
817 817
818 818 var data = [["1d22e65f027e", [0, 1], [[0, 0, 1]], "branch", "test", "1970-01-01", ["stable", true], ["tip"], ["something"]], ["a4f92ed23982", [0, 1], [[0, 0, 1]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], []]];
819 819 var graph = new Graph();
820 820 graph.scale(39);
821 821
822 822 graph.edge = function(x0, y0, x1, y1, color) {
823 823
824 824 this.setColor(color, 0.0, 0.65);
825 825 this.ctx.beginPath();
826 826 this.ctx.moveTo(x0, y0);
827 827 this.ctx.lineTo(x1, y1);
828 828 this.ctx.stroke();
829 829
830 830 }
831 831
832 832 var revlink = '<li style="_STYLE"><span class="desc">';
833 833 revlink += '<a class="list" href="/rev/_NODEID?style=gitweb" title="_NODEID"><b>_DESC</b></a>';
834 834 revlink += '</span> _TAGS';
835 835 revlink += '<span class="info">_DATE, by _USER</span></li>';
836 836
837 837 graph.vertex = function(x, y, color, parity, cur) {
838 838
839 839 this.ctx.beginPath();
840 840 color = this.setColor(color, 0.25, 0.75);
841 841 this.ctx.arc(x, y, radius, 0, Math.PI * 2, true);
842 842 this.ctx.fill();
843 843
844 844 var bg = '<li class="bg parity' + parity + '"></li>';
845 845 var left = (this.columns + 1) * this.bg_height;
846 846 var nstyle = 'padding-left: ' + left + 'px;';
847 847 var item = revlink.replace(/_STYLE/, nstyle);
848 848 item = item.replace(/_PARITY/, 'parity' + parity);
849 849 item = item.replace(/_NODEID/, cur[0]);
850 850 item = item.replace(/_NODEID/, cur[0]);
851 851 item = item.replace(/_DESC/, cur[3]);
852 852 item = item.replace(/_USER/, cur[4]);
853 853 item = item.replace(/_DATE/, cur[5]);
854 854
855 855 var tagspan = '';
856 856 if (cur[7].length || (cur[6][0] != 'default' || cur[6][1])) {
857 857 tagspan = '<span class="logtags">';
858 858 if (cur[6][1]) {
859 859 tagspan += '<span class="branchtag" title="' + cur[6][0] + '">';
860 860 tagspan += cur[6][0] + '</span> ';
861 861 } else if (!cur[6][1] && cur[6][0] != 'default') {
862 862 tagspan += '<span class="inbranchtag" title="' + cur[6][0] + '">';
863 863 tagspan += cur[6][0] + '</span> ';
864 864 }
865 865 if (cur[7].length) {
866 866 for (var t in cur[7]) {
867 867 var tag = cur[7][t];
868 868 tagspan += '<span class="tagtag">' + tag + '</span> ';
869 869 }
870 870 }
871 871 tagspan += '</span>';
872 872 }
873 873
874 874 item = item.replace(/_TAGS/, tagspan);
875 875 return [bg, item];
876 876
877 877 }
878 878
879 879 graph.render(data);
880 880
881 881 // stop hiding script -->
882 882 </script>
883 883
884 884 <div class="page_nav">
885 885 <a href="/graph/2?style=gitweb&revcount=30">less</a>
886 886 <a href="/graph/2?style=gitweb&revcount=120">more</a>
887 887 | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/2ef0ac749a14?style=gitweb">-2</a> <a href="/graph/tip?style=gitweb">tip</a>
888 888 </div>
889 889
890 890 <div class="page_footer">
891 891 <div class="page_footer_text">test</div>
892 892 <div class="rss_logo">
893 893 <a href="/rss-log">RSS</a>
894 894 <a href="/atom-log">Atom</a>
895 895 </div>
896 896 <br />
897 897
898 898 </div>
899 899 </body>
900 900 </html>
901 901
902 902
903 903 capabilities
904 904
905 905 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=capabilities'; echo
906 906 200 Script output follows
907 907
908 lookup changegroupsubset branchmap pushkey known unbundle=HG10GZ,HG10BZ,HG10UN
908 lookup changegroupsubset branchmap pushkey known getbundle unbundle=HG10GZ,HG10BZ,HG10UN
909 909
910 910 heads
911 911
912 912 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=heads'
913 913 200 Script output follows
914 914
915 915 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe
916 916
917 917 branches
918 918
919 919 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=branches&nodes=0000000000000000000000000000000000000000'
920 920 200 Script output follows
921 921
922 922 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
923 923
924 924 changegroup
925 925
926 926 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=changegroup&roots=0000000000000000000000000000000000000000'
927 927 200 Script output follows
928 928
929 929 x\x9c\xbdTMHUA\x14\xbe\xa8\xf9\xec\xda&\x10\x11*\xb8\x88\x81\x99\xbef\xe6\xce\xbdw\xc6\xf2a\x16E\x1b\x11[%\x98\xcc\xaf\x8f\x8c\xf7\xc0\xf7\x82 (esc)
930 930 4\x11KP2m\x95\xad*\xabE\x05AP\xd0\xc22Z\x14\xf9\x03\xb9j\xa3\x9b$\xa4MJ\xb4\x90\xc0\x9a\x9bO0\x10\xdf\x13\xa2\x81\x0f\x869g\xe6|\xe7\x9c\xef\x8ceY\xf7\xa2KO\xd2\xb7K\x16~\\n\xe9\xad\x90w\x86\xab\x93W\x8e\xdf\xb0r\\Y\xee6(\xa2)\xf6\x95\xc6\x01\xe4\x1az\x80R\xe8kN\x98\xe7R\xa4\xa9K@\xe0!A\xb4k\xa7U*m\x03\x07\xd8\x92\x1d\xd2\xc9\xa4\x1d\xc2\xe6,\xa5\xcc+\x1f\xef\xafDgi\xef\xab\x1d\x1d\xb7\x9a\xe7[W\xfbc\x8f\xde-\xcd\xe7\xcaz\xb3\xbb\x19\xd3\x81\x10>c>\x08\x00"X\x11\xc2\x84@\xd2\xe7B*L\x00\x01P\x04R\xc3@\xbaB0\xdb8#\x83:\x83\xa2h\xbc=\xcd\xdaS\xe1Y,L\xd3\xa0\xf2\xa8\x94J:\xe6\xd8\x81Q\xe0\xe8d\xa7#\xe2,\xd1\xaeR*\xed \xa5\x01\x13\x01\xa6\x0cb\xe3;\xbe\xaf\xfcK[^wK\xe1N\xaf\xbbk\xe8B\xd1\xf4\xc1\x07\xb3\xab[\x10\xfdkmvwcB\xa6\xa4\xd4G\xc4D\xc2\x141\xad\x91\x10\x00\x08J\x81\xcb}\xee \xee+W\xba\x8a\x80\x90|\xd4\xa0\xd6\xa0\xd4T\xde\xe1\x9d,!\xe2\xb5\xa94\xe3\xe7\xd5\x9f\x06\x18\xcba\x03aP\xb8f\xcd\x04\x1a_\\9\xf1\xed\xe4\x9e\xe5\xa6\xd1\xd2\x9f\x03\xa7o\xae\x90H\xf3\xfb\xef\xffH3\xadk (esc)
931 931 \xb0\x90\x92\x88\xb9\x14"\x068\xc2\x1e@\x00\xbb\x8a)\xd3'\x859 (esc)
932 932 \xa8\x80\x84S \xa5\xbd-g\x13`\xe4\xdc\xc3H^\xdf\xe2\xc0TM\xc7\xf4BO\xcf\xde\xae\xe5\xae#\x1frM(K\x97`F\x19\x16s\x05GD\xb9\x01\xc1\x00+\x8c|\x9fp\xc11\xf0\x14\x00\x9cJ\x82<\xe0\x12\x9f\xc1\x90\xd0\xf5\xc8\x19>Pr\xaa\xeaW\xf5\xc4\xae\xd1\xfc\x17\xcf'\x13u\xb1\x9e\xcdHnC\x0e\xcc`\xc8\xa0&\xac\x0e\xf1|\x8c\x10$\xc4\x8c\xa2p\x05`\xdc\x08 \x80\xc4\xd7Rr-\x94\x10\x102\xedi;\xf3f\xf1z\x16\x86\xdb\xd8d\xe5\xe7\x8b\xf5\x8d\rzp\xb2\xfe\xac\xf5\xf2\xd3\xfe\xfckws\xedt\x96b\xd5l\x1c\x0b\x85\xb5\x170\x8f\x11\x84\xb0\x8f\x19\xa0\x00 _\x07\x1ac\xa2\xc3\x89Z\xe7\x96\xf9 \xccNFg\xc7F\xaa\x8a+\x9a\x9cc_\x17\x1b\x17\x9e]z38<\x97+\xb5,",\xc8\xc8?\\\x91\xff\x17.~U\x96\x97\xf5%\xdeN<\x8e\xf5\x97%\xe7^\xcfL\xed~\xda\x96k\xdc->\x86\x02\x83"\x96H\xa6\xe3\xaas=-\xeb7\xe5\xda\x8f\xbc (no-eol) (esc)
933 933
934 934 stream_out
935 935
936 936 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=stream_out'
937 937 200 Script output follows
938 938
939 939 1
940 940
941 941 failing unbundle, requires POST request
942 942
943 943 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=unbundle'
944 944 405 push requires POST request
945 945
946 946 0
947 947 push requires POST request
948 948 [1]
949 949
950 950 Static files
951 951
952 952 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/static/style.css'
953 953 200 Script output follows
954 954
955 955 a { text-decoration:none; }
956 956 .age { white-space:nowrap; }
957 957 .date { white-space:nowrap; }
958 958 .indexlinks { white-space:nowrap; }
959 959 .parity0 { background-color: #ddd; }
960 960 .parity1 { background-color: #eee; }
961 961 .lineno { width: 60px; color: #aaa; font-size: smaller;
962 962 text-align: right; }
963 963 .plusline { color: green; }
964 964 .minusline { color: red; }
965 965 .atline { color: purple; }
966 966 .annotate { font-size: smaller; text-align: right; padding-right: 1em; }
967 967 .buttons a {
968 968 background-color: #666;
969 969 padding: 2pt;
970 970 color: white;
971 971 font-family: sans;
972 972 font-weight: bold;
973 973 }
974 974 .navigate a {
975 975 background-color: #ccc;
976 976 padding: 2pt;
977 977 font-family: sans;
978 978 color: black;
979 979 }
980 980
981 981 .metatag {
982 982 background-color: #888;
983 983 color: white;
984 984 text-align: right;
985 985 }
986 986
987 987 /* Common */
988 988 pre { margin: 0; }
989 989
990 990 .logo {
991 991 float: right;
992 992 clear: right;
993 993 }
994 994
995 995 /* Changelog/Filelog entries */
996 996 .logEntry { width: 100%; }
997 997 .logEntry .age { width: 15%; }
998 998 .logEntry th { font-weight: normal; text-align: right; vertical-align: top; }
999 999 .logEntry th.age, .logEntry th.firstline { font-weight: bold; }
1000 1000 .logEntry th.firstline { text-align: left; width: inherit; }
1001 1001
1002 1002 /* Shortlog entries */
1003 1003 .slogEntry { width: 100%; }
1004 1004 .slogEntry .age { width: 8em; }
1005 1005 .slogEntry td { font-weight: normal; text-align: left; vertical-align: top; }
1006 1006 .slogEntry td.author { width: 15em; }
1007 1007
1008 1008 /* Tag entries */
1009 1009 #tagEntries { list-style: none; margin: 0; padding: 0; }
1010 1010 #tagEntries .tagEntry { list-style: none; margin: 0; padding: 0; }
1011 1011
1012 1012 /* Changeset entry */
1013 1013 #changesetEntry { }
1014 1014 #changesetEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; }
1015 1015 #changesetEntry th.files, #changesetEntry th.description { vertical-align: top; }
1016 1016
1017 1017 /* File diff view */
1018 1018 #filediffEntry { }
1019 1019 #filediffEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; }
1020 1020
1021 1021 /* Graph */
1022 1022 div#wrapper {
1023 1023 position: relative;
1024 1024 margin: 0;
1025 1025 padding: 0;
1026 1026 }
1027 1027
1028 1028 canvas {
1029 1029 position: absolute;
1030 1030 z-index: 5;
1031 1031 top: -0.6em;
1032 1032 margin: 0;
1033 1033 }
1034 1034
1035 1035 ul#nodebgs {
1036 1036 list-style: none inside none;
1037 1037 padding: 0;
1038 1038 margin: 0;
1039 1039 top: -0.7em;
1040 1040 }
1041 1041
1042 1042 ul#graphnodes li, ul#nodebgs li {
1043 1043 height: 39px;
1044 1044 }
1045 1045
1046 1046 ul#graphnodes {
1047 1047 position: absolute;
1048 1048 z-index: 10;
1049 1049 top: -0.85em;
1050 1050 list-style: none inside none;
1051 1051 padding: 0;
1052 1052 }
1053 1053
1054 1054 ul#graphnodes li .info {
1055 1055 display: block;
1056 1056 font-size: 70%;
1057 1057 position: relative;
1058 1058 top: -1px;
1059 1059 }
1060 1060
1061 1061 Stop and restart with HGENCODING=cp932
1062 1062
1063 1063 $ "$TESTDIR/killdaemons.py"
1064 1064 $ HGENCODING=cp932 hg serve --config server.uncompressed=False -n test \
1065 1065 > -p $HGPORT -d --pid-file=hg.pid -E errors.log
1066 1066 $ cat hg.pid >> $DAEMON_PIDS
1067 1067
1068 1068 commit message with Japanese Kanji 'Noh', which ends with '\x5c'
1069 1069
1070 1070 $ echo foo >> foo
1071 1071 $ HGENCODING=cp932 hg ci -m `python -c 'print("\x94\x5c")'`
1072 1072
1073 1073 Graph json escape of multibyte character
1074 1074
1075 1075 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/graph/' \
1076 1076 > | grep '^var data ='
1077 1077 var data = [["40b4d6888e92", [0, 1], [[0, 0, 1]], "\u80fd", "test", "1970-01-01", ["stable", true], ["tip"], ["something"]], ["1d22e65f027e", [0, 1], [[0, 0, 1]], "branch", "test", "1970-01-01", ["stable", false], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], []]];
1078 1078
1079 1079 ERRORS ENCOUNTERED
1080 1080
1081 1081 $ cat errors.log
General Comments 0
You need to be logged in to leave comments. Login now