##// END OF EJS Templates
More whitespace cleanups...
mpm@selenic.com -
r575:7f5ce4bb default
parent child Browse files
Show More
@@ -1,452 +1,451
1 # This library is free software; you can redistribute it and/or
1 # This library is free software; you can redistribute it and/or
2 # modify it under the terms of the GNU Lesser General Public
2 # modify it under the terms of the GNU Lesser General Public
3 # License as published by the Free Software Foundation; either
3 # License as published by the Free Software Foundation; either
4 # version 2.1 of the License, or (at your option) any later version.
4 # version 2.1 of the License, or (at your option) any later version.
5 #
5 #
6 # This library is distributed in the hope that it will be useful,
6 # This library is distributed in the hope that it will be useful,
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
9 # Lesser General Public License for more details.
9 # Lesser General Public License for more details.
10 #
10 #
11 # You should have received a copy of the GNU Lesser General Public
11 # You should have received a copy of the GNU Lesser General Public
12 # License along with this library; if not, write to the
12 # License along with this library; if not, write to the
13 # Free Software Foundation, Inc.,
13 # Free Software Foundation, Inc.,
14 # 59 Temple Place, Suite 330,
14 # 59 Temple Place, Suite 330,
15 # Boston, MA 02111-1307 USA
15 # Boston, MA 02111-1307 USA
16
16
17 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
17 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
18 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
18 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
19
19
20 # $Id: byterange.py,v 1.9 2005/02/14 21:55:07 mstenner Exp $
20 # $Id: byterange.py,v 1.9 2005/02/14 21:55:07 mstenner Exp $
21
21
22 import os
22 import os
23 import stat
23 import stat
24 import urllib
24 import urllib
25 import urllib2
25 import urllib2
26 import rfc822
26 import rfc822
27
27
28 try:
28 try:
29 from cStringIO import StringIO
29 from cStringIO import StringIO
30 except ImportError, msg:
30 except ImportError, msg:
31 from StringIO import StringIO
31 from StringIO import StringIO
32
32
33 class RangeError(IOError):
33 class RangeError(IOError):
34 """Error raised when an unsatisfiable range is requested."""
34 """Error raised when an unsatisfiable range is requested."""
35 pass
35 pass
36
36
37 class HTTPRangeHandler(urllib2.BaseHandler):
37 class HTTPRangeHandler(urllib2.BaseHandler):
38 """Handler that enables HTTP Range headers.
38 """Handler that enables HTTP Range headers.
39
39
40 This was extremely simple. The Range header is a HTTP feature to
40 This was extremely simple. The Range header is a HTTP feature to
41 begin with so all this class does is tell urllib2 that the
41 begin with so all this class does is tell urllib2 that the
42 "206 Partial Content" reponse from the HTTP server is what we
42 "206 Partial Content" reponse from the HTTP server is what we
43 expected.
43 expected.
44
44
45 Example:
45 Example:
46 import urllib2
46 import urllib2
47 import byterange
47 import byterange
48
48
49 range_handler = range.HTTPRangeHandler()
49 range_handler = range.HTTPRangeHandler()
50 opener = urllib2.build_opener(range_handler)
50 opener = urllib2.build_opener(range_handler)
51
51
52 # install it
52 # install it
53 urllib2.install_opener(opener)
53 urllib2.install_opener(opener)
54
54
55 # create Request and set Range header
55 # create Request and set Range header
56 req = urllib2.Request('http://www.python.org/')
56 req = urllib2.Request('http://www.python.org/')
57 req.header['Range'] = 'bytes=30-50'
57 req.header['Range'] = 'bytes=30-50'
58 f = urllib2.urlopen(req)
58 f = urllib2.urlopen(req)
59 """
59 """
60
60
61 def http_error_206(self, req, fp, code, msg, hdrs):
61 def http_error_206(self, req, fp, code, msg, hdrs):
62 # 206 Partial Content Response
62 # 206 Partial Content Response
63 r = urllib.addinfourl(fp, hdrs, req.get_full_url())
63 r = urllib.addinfourl(fp, hdrs, req.get_full_url())
64 r.code = code
64 r.code = code
65 r.msg = msg
65 r.msg = msg
66 return r
66 return r
67
67
68 def http_error_416(self, req, fp, code, msg, hdrs):
68 def http_error_416(self, req, fp, code, msg, hdrs):
69 # HTTP's Range Not Satisfiable error
69 # HTTP's Range Not Satisfiable error
70 raise RangeError('Requested Range Not Satisfiable')
70 raise RangeError('Requested Range Not Satisfiable')
71
71
72 class RangeableFileObject:
72 class RangeableFileObject:
73 """File object wrapper to enable raw range handling.
73 """File object wrapper to enable raw range handling.
74 This was implemented primarilary for handling range
74 This was implemented primarilary for handling range
75 specifications for file:// urls. This object effectively makes
75 specifications for file:// urls. This object effectively makes
76 a file object look like it consists only of a range of bytes in
76 a file object look like it consists only of a range of bytes in
77 the stream.
77 the stream.
78
78
79 Examples:
79 Examples:
80 # expose 10 bytes, starting at byte position 20, from
80 # expose 10 bytes, starting at byte position 20, from
81 # /etc/aliases.
81 # /etc/aliases.
82 >>> fo = RangeableFileObject(file('/etc/passwd', 'r'), (20,30))
82 >>> fo = RangeableFileObject(file('/etc/passwd', 'r'), (20,30))
83 # seek seeks within the range (to position 23 in this case)
83 # seek seeks within the range (to position 23 in this case)
84 >>> fo.seek(3)
84 >>> fo.seek(3)
85 # tell tells where your at _within the range_ (position 3 in
85 # tell tells where your at _within the range_ (position 3 in
86 # this case)
86 # this case)
87 >>> fo.tell()
87 >>> fo.tell()
88 # read EOFs if an attempt is made to read past the last
88 # read EOFs if an attempt is made to read past the last
89 # byte in the range. the following will return only 7 bytes.
89 # byte in the range. the following will return only 7 bytes.
90 >>> fo.read(30)
90 >>> fo.read(30)
91 """
91 """
92
92
93 def __init__(self, fo, rangetup):
93 def __init__(self, fo, rangetup):
94 """Create a RangeableFileObject.
94 """Create a RangeableFileObject.
95 fo -- a file like object. only the read() method need be
95 fo -- a file like object. only the read() method need be
96 supported but supporting an optimized seek() is
96 supported but supporting an optimized seek() is
97 preferable.
97 preferable.
98 rangetup -- a (firstbyte,lastbyte) tuple specifying the range
98 rangetup -- a (firstbyte,lastbyte) tuple specifying the range
99 to work over.
99 to work over.
100 The file object provided is assumed to be at byte offset 0.
100 The file object provided is assumed to be at byte offset 0.
101 """
101 """
102 self.fo = fo
102 self.fo = fo
103 (self.firstbyte, self.lastbyte) = range_tuple_normalize(rangetup)
103 (self.firstbyte, self.lastbyte) = range_tuple_normalize(rangetup)
104 self.realpos = 0
104 self.realpos = 0
105 self._do_seek(self.firstbyte)
105 self._do_seek(self.firstbyte)
106
106
107 def __getattr__(self, name):
107 def __getattr__(self, name):
108 """This effectively allows us to wrap at the instance level.
108 """This effectively allows us to wrap at the instance level.
109 Any attribute not found in _this_ object will be searched for
109 Any attribute not found in _this_ object will be searched for
110 in self.fo. This includes methods."""
110 in self.fo. This includes methods."""
111 if hasattr(self.fo, name):
111 if hasattr(self.fo, name):
112 return getattr(self.fo, name)
112 return getattr(self.fo, name)
113 raise AttributeError, name
113 raise AttributeError, name
114
114
115 def tell(self):
115 def tell(self):
116 """Return the position within the range.
116 """Return the position within the range.
117 This is different from fo.seek in that position 0 is the
117 This is different from fo.seek in that position 0 is the
118 first byte position of the range tuple. For example, if
118 first byte position of the range tuple. For example, if
119 this object was created with a range tuple of (500,899),
119 this object was created with a range tuple of (500,899),
120 tell() will return 0 when at byte position 500 of the file.
120 tell() will return 0 when at byte position 500 of the file.
121 """
121 """
122 return (self.realpos - self.firstbyte)
122 return (self.realpos - self.firstbyte)
123
123
124 def seek(self,offset,whence=0):
124 def seek(self,offset,whence=0):
125 """Seek within the byte range.
125 """Seek within the byte range.
126 Positioning is identical to that described under tell().
126 Positioning is identical to that described under tell().
127 """
127 """
128 assert whence in (0, 1, 2)
128 assert whence in (0, 1, 2)
129 if whence == 0: # absolute seek
129 if whence == 0: # absolute seek
130 realoffset = self.firstbyte + offset
130 realoffset = self.firstbyte + offset
131 elif whence == 1: # relative seek
131 elif whence == 1: # relative seek
132 realoffset = self.realpos + offset
132 realoffset = self.realpos + offset
133 elif whence == 2: # absolute from end of file
133 elif whence == 2: # absolute from end of file
134 # XXX: are we raising the right Error here?
134 # XXX: are we raising the right Error here?
135 raise IOError('seek from end of file not supported.')
135 raise IOError('seek from end of file not supported.')
136
136
137 # do not allow seek past lastbyte in range
137 # do not allow seek past lastbyte in range
138 if self.lastbyte and (realoffset >= self.lastbyte):
138 if self.lastbyte and (realoffset >= self.lastbyte):
139 realoffset = self.lastbyte
139 realoffset = self.lastbyte
140
140
141 self._do_seek(realoffset - self.realpos)
141 self._do_seek(realoffset - self.realpos)
142
142
143 def read(self, size=-1):
143 def read(self, size=-1):
144 """Read within the range.
144 """Read within the range.
145 This method will limit the size read based on the range.
145 This method will limit the size read based on the range.
146 """
146 """
147 size = self._calc_read_size(size)
147 size = self._calc_read_size(size)
148 rslt = self.fo.read(size)
148 rslt = self.fo.read(size)
149 self.realpos += len(rslt)
149 self.realpos += len(rslt)
150 return rslt
150 return rslt
151
151
152 def readline(self, size=-1):
152 def readline(self, size=-1):
153 """Read lines within the range.
153 """Read lines within the range.
154 This method will limit the size read based on the range.
154 This method will limit the size read based on the range.
155 """
155 """
156 size = self._calc_read_size(size)
156 size = self._calc_read_size(size)
157 rslt = self.fo.readline(size)
157 rslt = self.fo.readline(size)
158 self.realpos += len(rslt)
158 self.realpos += len(rslt)
159 return rslt
159 return rslt
160
160
161 def _calc_read_size(self, size):
161 def _calc_read_size(self, size):
162 """Handles calculating the amount of data to read based on
162 """Handles calculating the amount of data to read based on
163 the range.
163 the range.
164 """
164 """
165 if self.lastbyte:
165 if self.lastbyte:
166 if size > -1:
166 if size > -1:
167 if ((self.realpos + size) >= self.lastbyte):
167 if ((self.realpos + size) >= self.lastbyte):
168 size = (self.lastbyte - self.realpos)
168 size = (self.lastbyte - self.realpos)
169 else:
169 else:
170 size = (self.lastbyte - self.realpos)
170 size = (self.lastbyte - self.realpos)
171 return size
171 return size
172
172
173 def _do_seek(self,offset):
173 def _do_seek(self,offset):
174 """Seek based on whether wrapped object supports seek().
174 """Seek based on whether wrapped object supports seek().
175 offset is relative to the current position (self.realpos).
175 offset is relative to the current position (self.realpos).
176 """
176 """
177 assert offset >= 0
177 assert offset >= 0
178 if not hasattr(self.fo, 'seek'):
178 if not hasattr(self.fo, 'seek'):
179 self._poor_mans_seek(offset)
179 self._poor_mans_seek(offset)
180 else:
180 else:
181 self.fo.seek(self.realpos + offset)
181 self.fo.seek(self.realpos + offset)
182 self.realpos+= offset
182 self.realpos+= offset
183
183
184 def _poor_mans_seek(self,offset):
184 def _poor_mans_seek(self,offset):
185 """Seek by calling the wrapped file objects read() method.
185 """Seek by calling the wrapped file objects read() method.
186 This is used for file like objects that do not have native
186 This is used for file like objects that do not have native
187 seek support. The wrapped objects read() method is called
187 seek support. The wrapped objects read() method is called
188 to manually seek to the desired position.
188 to manually seek to the desired position.
189 offset -- read this number of bytes from the wrapped
189 offset -- read this number of bytes from the wrapped
190 file object.
190 file object.
191 raise RangeError if we encounter EOF before reaching the
191 raise RangeError if we encounter EOF before reaching the
192 specified offset.
192 specified offset.
193 """
193 """
194 pos = 0
194 pos = 0
195 bufsize = 1024
195 bufsize = 1024
196 while pos < offset:
196 while pos < offset:
197 if (pos + bufsize) > offset:
197 if (pos + bufsize) > offset:
198 bufsize = offset - pos
198 bufsize = offset - pos
199 buf = self.fo.read(bufsize)
199 buf = self.fo.read(bufsize)
200 if len(buf) != bufsize:
200 if len(buf) != bufsize:
201 raise RangeError('Requested Range Not Satisfiable')
201 raise RangeError('Requested Range Not Satisfiable')
202 pos+= bufsize
202 pos+= bufsize
203
203
204 class FileRangeHandler(urllib2.FileHandler):
204 class FileRangeHandler(urllib2.FileHandler):
205 """FileHandler subclass that adds Range support.
205 """FileHandler subclass that adds Range support.
206 This class handles Range headers exactly like an HTTP
206 This class handles Range headers exactly like an HTTP
207 server would.
207 server would.
208 """
208 """
209 def open_local_file(self, req):
209 def open_local_file(self, req):
210 import mimetypes
210 import mimetypes
211 import mimetools
211 import mimetools
212 host = req.get_host()
212 host = req.get_host()
213 file = req.get_selector()
213 file = req.get_selector()
214 localfile = urllib.url2pathname(file)
214 localfile = urllib.url2pathname(file)
215 stats = os.stat(localfile)
215 stats = os.stat(localfile)
216 size = stats[stat.ST_SIZE]
216 size = stats[stat.ST_SIZE]
217 modified = rfc822.formatdate(stats[stat.ST_MTIME])
217 modified = rfc822.formatdate(stats[stat.ST_MTIME])
218 mtype = mimetypes.guess_type(file)[0]
218 mtype = mimetypes.guess_type(file)[0]
219 if host:
219 if host:
220 host, port = urllib.splitport(host)
220 host, port = urllib.splitport(host)
221 if port or socket.gethostbyname(host) not in self.get_names():
221 if port or socket.gethostbyname(host) not in self.get_names():
222 raise URLError('file not on local host')
222 raise URLError('file not on local host')
223 fo = open(localfile,'rb')
223 fo = open(localfile,'rb')
224 brange = req.headers.get('Range',None)
224 brange = req.headers.get('Range',None)
225 brange = range_header_to_tuple(brange)
225 brange = range_header_to_tuple(brange)
226 assert brange != ()
226 assert brange != ()
227 if brange:
227 if brange:
228 (fb,lb) = brange
228 (fb,lb) = brange
229 if lb == '': lb = size
229 if lb == '': lb = size
230 if fb < 0 or fb > size or lb > size:
230 if fb < 0 or fb > size or lb > size:
231 raise RangeError('Requested Range Not Satisfiable')
231 raise RangeError('Requested Range Not Satisfiable')
232 size = (lb - fb)
232 size = (lb - fb)
233 fo = RangeableFileObject(fo, (fb,lb))
233 fo = RangeableFileObject(fo, (fb,lb))
234 headers = mimetools.Message(StringIO(
234 headers = mimetools.Message(StringIO(
235 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
235 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
236 (mtype or 'text/plain', size, modified)))
236 (mtype or 'text/plain', size, modified)))
237 return urllib.addinfourl(fo, headers, 'file:'+file)
237 return urllib.addinfourl(fo, headers, 'file:'+file)
238
238
239
239
240 # FTP Range Support
240 # FTP Range Support
241 # Unfortunately, a large amount of base FTP code had to be copied
241 # Unfortunately, a large amount of base FTP code had to be copied
242 # from urllib and urllib2 in order to insert the FTP REST command.
242 # from urllib and urllib2 in order to insert the FTP REST command.
243 # Code modifications for range support have been commented as
243 # Code modifications for range support have been commented as
244 # follows:
244 # follows:
245 # -- range support modifications start/end here
245 # -- range support modifications start/end here
246
246
247 from urllib import splitport, splituser, splitpasswd, splitattr, \
247 from urllib import splitport, splituser, splitpasswd, splitattr, \
248 unquote, addclosehook, addinfourl
248 unquote, addclosehook, addinfourl
249 import ftplib
249 import ftplib
250 import socket
250 import socket
251 import sys
251 import sys
252 import ftplib
252 import ftplib
253 import mimetypes
253 import mimetypes
254 import mimetools
254 import mimetools
255
255
256 class FTPRangeHandler(urllib2.FTPHandler):
256 class FTPRangeHandler(urllib2.FTPHandler):
257 def ftp_open(self, req):
257 def ftp_open(self, req):
258 host = req.get_host()
258 host = req.get_host()
259 if not host:
259 if not host:
260 raise IOError, ('ftp error', 'no host given')
260 raise IOError, ('ftp error', 'no host given')
261 host, port = splitport(host)
261 host, port = splitport(host)
262 if port is None:
262 if port is None:
263 port = ftplib.FTP_PORT
263 port = ftplib.FTP_PORT
264
264
265 # username/password handling
265 # username/password handling
266 user, host = splituser(host)
266 user, host = splituser(host)
267 if user:
267 if user:
268 user, passwd = splitpasswd(user)
268 user, passwd = splitpasswd(user)
269 else:
269 else:
270 passwd = None
270 passwd = None
271 host = unquote(host)
271 host = unquote(host)
272 user = unquote(user or '')
272 user = unquote(user or '')
273 passwd = unquote(passwd or '')
273 passwd = unquote(passwd or '')
274
274
275 try:
275 try:
276 host = socket.gethostbyname(host)
276 host = socket.gethostbyname(host)
277 except socket.error, msg:
277 except socket.error, msg:
278 raise URLError(msg)
278 raise URLError(msg)
279 path, attrs = splitattr(req.get_selector())
279 path, attrs = splitattr(req.get_selector())
280 dirs = path.split('/')
280 dirs = path.split('/')
281 dirs = map(unquote, dirs)
281 dirs = map(unquote, dirs)
282 dirs, file = dirs[:-1], dirs[-1]
282 dirs, file = dirs[:-1], dirs[-1]
283 if dirs and not dirs[0]:
283 if dirs and not dirs[0]:
284 dirs = dirs[1:]
284 dirs = dirs[1:]
285 try:
285 try:
286 fw = self.connect_ftp(user, passwd, host, port, dirs)
286 fw = self.connect_ftp(user, passwd, host, port, dirs)
287 type = file and 'I' or 'D'
287 type = file and 'I' or 'D'
288 for attr in attrs:
288 for attr in attrs:
289 attr, value = splitattr(attr)
289 attr, value = splitattr(attr)
290 if attr.lower() == 'type' and \
290 if attr.lower() == 'type' and \
291 value in ('a', 'A', 'i', 'I', 'd', 'D'):
291 value in ('a', 'A', 'i', 'I', 'd', 'D'):
292 type = value.upper()
292 type = value.upper()
293
293
294 # -- range support modifications start here
294 # -- range support modifications start here
295 rest = None
295 rest = None
296 range_tup = range_header_to_tuple(req.headers.get('Range',None))
296 range_tup = range_header_to_tuple(req.headers.get('Range',None))
297 assert range_tup != ()
297 assert range_tup != ()
298 if range_tup:
298 if range_tup:
299 (fb,lb) = range_tup
299 (fb,lb) = range_tup
300 if fb > 0: rest = fb
300 if fb > 0: rest = fb
301 # -- range support modifications end here
301 # -- range support modifications end here
302
302
303 fp, retrlen = fw.retrfile(file, type, rest)
303 fp, retrlen = fw.retrfile(file, type, rest)
304
304
305 # -- range support modifications start here
305 # -- range support modifications start here
306 if range_tup:
306 if range_tup:
307 (fb,lb) = range_tup
307 (fb,lb) = range_tup
308 if lb == '':
308 if lb == '':
309 if retrlen is None or retrlen == 0:
309 if retrlen is None or retrlen == 0:
310 raise RangeError('Requested Range Not Satisfiable due to unobtainable file length.')
310 raise RangeError('Requested Range Not Satisfiable due to unobtainable file length.')
311 lb = retrlen
311 lb = retrlen
312 retrlen = lb - fb
312 retrlen = lb - fb
313 if retrlen < 0:
313 if retrlen < 0:
314 # beginning of range is larger than file
314 # beginning of range is larger than file
315 raise RangeError('Requested Range Not Satisfiable')
315 raise RangeError('Requested Range Not Satisfiable')
316 else:
316 else:
317 retrlen = lb - fb
317 retrlen = lb - fb
318 fp = RangeableFileObject(fp, (0,retrlen))
318 fp = RangeableFileObject(fp, (0,retrlen))
319 # -- range support modifications end here
319 # -- range support modifications end here
320
320
321 headers = ""
321 headers = ""
322 mtype = mimetypes.guess_type(req.get_full_url())[0]
322 mtype = mimetypes.guess_type(req.get_full_url())[0]
323 if mtype:
323 if mtype:
324 headers += "Content-Type: %s\n" % mtype
324 headers += "Content-Type: %s\n" % mtype
325 if retrlen is not None and retrlen >= 0:
325 if retrlen is not None and retrlen >= 0:
326 headers += "Content-Length: %d\n" % retrlen
326 headers += "Content-Length: %d\n" % retrlen
327 sf = StringIO(headers)
327 sf = StringIO(headers)
328 headers = mimetools.Message(sf)
328 headers = mimetools.Message(sf)
329 return addinfourl(fp, headers, req.get_full_url())
329 return addinfourl(fp, headers, req.get_full_url())
330 except ftplib.all_errors, msg:
330 except ftplib.all_errors, msg:
331 raise IOError, ('ftp error', msg), sys.exc_info()[2]
331 raise IOError, ('ftp error', msg), sys.exc_info()[2]
332
332
333 def connect_ftp(self, user, passwd, host, port, dirs):
333 def connect_ftp(self, user, passwd, host, port, dirs):
334 fw = ftpwrapper(user, passwd, host, port, dirs)
334 fw = ftpwrapper(user, passwd, host, port, dirs)
335 return fw
335 return fw
336
336
337 class ftpwrapper(urllib.ftpwrapper):
337 class ftpwrapper(urllib.ftpwrapper):
338 # range support note:
338 # range support note:
339 # this ftpwrapper code is copied directly from
339 # this ftpwrapper code is copied directly from
340 # urllib. The only enhancement is to add the rest
340 # urllib. The only enhancement is to add the rest
341 # argument and pass it on to ftp.ntransfercmd
341 # argument and pass it on to ftp.ntransfercmd
342 def retrfile(self, file, type, rest=None):
342 def retrfile(self, file, type, rest=None):
343 self.endtransfer()
343 self.endtransfer()
344 if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
344 if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
345 else: cmd = 'TYPE ' + type; isdir = 0
345 else: cmd = 'TYPE ' + type; isdir = 0
346 try:
346 try:
347 self.ftp.voidcmd(cmd)
347 self.ftp.voidcmd(cmd)
348 except ftplib.all_errors:
348 except ftplib.all_errors:
349 self.init()
349 self.init()
350 self.ftp.voidcmd(cmd)
350 self.ftp.voidcmd(cmd)
351 conn = None
351 conn = None
352 if file and not isdir:
352 if file and not isdir:
353 # Use nlst to see if the file exists at all
353 # Use nlst to see if the file exists at all
354 try:
354 try:
355 self.ftp.nlst(file)
355 self.ftp.nlst(file)
356 except ftplib.error_perm, reason:
356 except ftplib.error_perm, reason:
357 raise IOError, ('ftp error', reason), sys.exc_info()[2]
357 raise IOError, ('ftp error', reason), sys.exc_info()[2]
358 # Restore the transfer mode!
358 # Restore the transfer mode!
359 self.ftp.voidcmd(cmd)
359 self.ftp.voidcmd(cmd)
360 # Try to retrieve as a file
360 # Try to retrieve as a file
361 try:
361 try:
362 cmd = 'RETR ' + file
362 cmd = 'RETR ' + file
363 conn = self.ftp.ntransfercmd(cmd, rest)
363 conn = self.ftp.ntransfercmd(cmd, rest)
364 except ftplib.error_perm, reason:
364 except ftplib.error_perm, reason:
365 if str(reason)[:3] == '501':
365 if str(reason)[:3] == '501':
366 # workaround for REST not supported error
366 # workaround for REST not supported error
367 fp, retrlen = self.retrfile(file, type)
367 fp, retrlen = self.retrfile(file, type)
368 fp = RangeableFileObject(fp, (rest,''))
368 fp = RangeableFileObject(fp, (rest,''))
369 return (fp, retrlen)
369 return (fp, retrlen)
370 elif str(reason)[:3] != '550':
370 elif str(reason)[:3] != '550':
371 raise IOError, ('ftp error', reason), sys.exc_info()[2]
371 raise IOError, ('ftp error', reason), sys.exc_info()[2]
372 if not conn:
372 if not conn:
373 # Set transfer mode to ASCII!
373 # Set transfer mode to ASCII!
374 self.ftp.voidcmd('TYPE A')
374 self.ftp.voidcmd('TYPE A')
375 # Try a directory listing
375 # Try a directory listing
376 if file: cmd = 'LIST ' + file
376 if file: cmd = 'LIST ' + file
377 else: cmd = 'LIST'
377 else: cmd = 'LIST'
378 conn = self.ftp.ntransfercmd(cmd)
378 conn = self.ftp.ntransfercmd(cmd)
379 self.busy = 1
379 self.busy = 1
380 # Pass back both a suitably decorated object and a retrieval length
380 # Pass back both a suitably decorated object and a retrieval length
381 return (addclosehook(conn[0].makefile('rb'),
381 return (addclosehook(conn[0].makefile('rb'),
382 self.endtransfer), conn[1])
382 self.endtransfer), conn[1])
383
383
384
384
385 ####################################################################
385 ####################################################################
386 # Range Tuple Functions
386 # Range Tuple Functions
387 # XXX: These range tuple functions might go better in a class.
387 # XXX: These range tuple functions might go better in a class.
388
388
389 _rangere = None
389 _rangere = None
390 def range_header_to_tuple(range_header):
390 def range_header_to_tuple(range_header):
391 """Get a (firstbyte,lastbyte) tuple from a Range header value.
391 """Get a (firstbyte,lastbyte) tuple from a Range header value.
392
392
393 Range headers have the form "bytes=<firstbyte>-<lastbyte>". This
393 Range headers have the form "bytes=<firstbyte>-<lastbyte>". This
394 function pulls the firstbyte and lastbyte values and returns
394 function pulls the firstbyte and lastbyte values and returns
395 a (firstbyte,lastbyte) tuple. If lastbyte is not specified in
395 a (firstbyte,lastbyte) tuple. If lastbyte is not specified in
396 the header value, it is returned as an empty string in the
396 the header value, it is returned as an empty string in the
397 tuple.
397 tuple.
398
398
399 Return None if range_header is None
399 Return None if range_header is None
400 Return () if range_header does not conform to the range spec
400 Return () if range_header does not conform to the range spec
401 pattern.
401 pattern.
402
402
403 """
403 """
404 global _rangere
404 global _rangere
405 if range_header is None: return None
405 if range_header is None: return None
406 if _rangere is None:
406 if _rangere is None:
407 import re
407 import re
408 _rangere = re.compile(r'^bytes=(\d{1,})-(\d*)')
408 _rangere = re.compile(r'^bytes=(\d{1,})-(\d*)')
409 match = _rangere.match(range_header)
409 match = _rangere.match(range_header)
410 if match:
410 if match:
411 tup = range_tuple_normalize(match.group(1,2))
411 tup = range_tuple_normalize(match.group(1,2))
412 if tup and tup[1]:
412 if tup and tup[1]:
413 tup = (tup[0],tup[1]+1)
413 tup = (tup[0],tup[1]+1)
414 return tup
414 return tup
415 return ()
415 return ()
416
416
417 def range_tuple_to_header(range_tup):
417 def range_tuple_to_header(range_tup):
418 """Convert a range tuple to a Range header value.
418 """Convert a range tuple to a Range header value.
419 Return a string of the form "bytes=<firstbyte>-<lastbyte>" or None
419 Return a string of the form "bytes=<firstbyte>-<lastbyte>" or None
420 if no range is needed.
420 if no range is needed.
421 """
421 """
422 if range_tup is None: return None
422 if range_tup is None: return None
423 range_tup = range_tuple_normalize(range_tup)
423 range_tup = range_tuple_normalize(range_tup)
424 if range_tup:
424 if range_tup:
425 if range_tup[1]:
425 if range_tup[1]:
426 range_tup = (range_tup[0],range_tup[1] - 1)
426 range_tup = (range_tup[0],range_tup[1] - 1)
427 return 'bytes=%s-%s' % range_tup
427 return 'bytes=%s-%s' % range_tup
428
428
429 def range_tuple_normalize(range_tup):
429 def range_tuple_normalize(range_tup):
430 """Normalize a (first_byte,last_byte) range tuple.
430 """Normalize a (first_byte,last_byte) range tuple.
431 Return a tuple whose first element is guaranteed to be an int
431 Return a tuple whose first element is guaranteed to be an int
432 and whose second element will be '' (meaning: the last byte) or
432 and whose second element will be '' (meaning: the last byte) or
433 an int. Finally, return None if the normalized tuple == (0,'')
433 an int. Finally, return None if the normalized tuple == (0,'')
434 as that is equivelant to retrieving the entire file.
434 as that is equivelant to retrieving the entire file.
435 """
435 """
436 if range_tup is None: return None
436 if range_tup is None: return None
437 # handle first byte
437 # handle first byte
438 fb = range_tup[0]
438 fb = range_tup[0]
439 if fb in (None,''): fb = 0
439 if fb in (None,''): fb = 0
440 else: fb = int(fb)
440 else: fb = int(fb)
441 # handle last byte
441 # handle last byte
442 try: lb = range_tup[1]
442 try: lb = range_tup[1]
443 except IndexError: lb = ''
443 except IndexError: lb = ''
444 else:
444 else:
445 if lb is None: lb = ''
445 if lb is None: lb = ''
446 elif lb != '': lb = int(lb)
446 elif lb != '': lb = int(lb)
447 # check if range is over the entire file
447 # check if range is over the entire file
448 if (fb,lb) == (0,''): return None
448 if (fb,lb) == (0,''): return None
449 # check that the range is valid
449 # check that the range is valid
450 if lb < fb: raise RangeError('Invalid byte range: %s-%s' % (fb,lb))
450 if lb < fb: raise RangeError('Invalid byte range: %s-%s' % (fb,lb))
451 return (fb,lb)
451 return (fb,lb)
452
@@ -1,1524 +1,1524
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff")
13 demandload(globals(), "tempfile httprangereader bdiff")
14
14
15 class filelog(revlog):
15 class filelog(revlog):
16 def __init__(self, opener, path):
16 def __init__(self, opener, path):
17 revlog.__init__(self, opener,
17 revlog.__init__(self, opener,
18 os.path.join("data", path + ".i"),
18 os.path.join("data", path + ".i"),
19 os.path.join("data", path + ".d"))
19 os.path.join("data", path + ".d"))
20
20
21 def read(self, node):
21 def read(self, node):
22 t = self.revision(node)
22 t = self.revision(node)
23 if t[:2] != '\1\n':
23 if t[:2] != '\1\n':
24 return t
24 return t
25 s = t.find('\1\n', 2)
25 s = t.find('\1\n', 2)
26 return t[s+2:]
26 return t[s+2:]
27
27
28 def readmeta(self, node):
28 def readmeta(self, node):
29 t = self.revision(node)
29 t = self.revision(node)
30 if t[:2] != '\1\n':
30 if t[:2] != '\1\n':
31 return t
31 return t
32 s = t.find('\1\n', 2)
32 s = t.find('\1\n', 2)
33 mt = t[2:s]
33 mt = t[2:s]
34 for l in mt.splitlines():
34 for l in mt.splitlines():
35 k, v = l.split(": ", 1)
35 k, v = l.split(": ", 1)
36 m[k] = v
36 m[k] = v
37 return m
37 return m
38
38
39 def add(self, text, meta, transaction, link, p1=None, p2=None):
39 def add(self, text, meta, transaction, link, p1=None, p2=None):
40 if meta or text[:2] == '\1\n':
40 if meta or text[:2] == '\1\n':
41 mt = ""
41 mt = ""
42 if meta:
42 if meta:
43 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
43 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
44 text = "\1\n" + "".join(mt) + "\1\n" + text
44 text = "\1\n" + "".join(mt) + "\1\n" + text
45 return self.addrevision(text, transaction, link, p1, p2)
45 return self.addrevision(text, transaction, link, p1, p2)
46
46
47 def annotate(self, node):
47 def annotate(self, node):
48
48
49 def decorate(text, rev):
49 def decorate(text, rev):
50 return ([rev] * len(text.splitlines()), text)
50 return ([rev] * len(text.splitlines()), text)
51
51
52 def pair(parent, child):
52 def pair(parent, child):
53 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
53 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
54 child[0][b1:b2] = parent[0][a1:a2]
54 child[0][b1:b2] = parent[0][a1:a2]
55 return child
55 return child
56
56
57 # find all ancestors
57 # find all ancestors
58 needed = {node:1}
58 needed = {node:1}
59 visit = [node]
59 visit = [node]
60 while visit:
60 while visit:
61 n = visit.pop(0)
61 n = visit.pop(0)
62 for p in self.parents(n):
62 for p in self.parents(n):
63 if p not in needed:
63 if p not in needed:
64 needed[p] = 1
64 needed[p] = 1
65 visit.append(p)
65 visit.append(p)
66 else:
66 else:
67 # count how many times we'll use this
67 # count how many times we'll use this
68 needed[p] += 1
68 needed[p] += 1
69
69
70 # sort by revision which is a topological order
70 # sort by revision which is a topological order
71 visit = [ (self.rev(n), n) for n in needed.keys() ]
71 visit = [ (self.rev(n), n) for n in needed.keys() ]
72 visit.sort()
72 visit.sort()
73 hist = {}
73 hist = {}
74
74
75 for r,n in visit:
75 for r,n in visit:
76 curr = decorate(self.read(n), self.linkrev(n))
76 curr = decorate(self.read(n), self.linkrev(n))
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p != nullid:
78 if p != nullid:
79 curr = pair(hist[p], curr)
79 curr = pair(hist[p], curr)
80 # trim the history of unneeded revs
80 # trim the history of unneeded revs
81 needed[p] -= 1
81 needed[p] -= 1
82 if not needed[p]:
82 if not needed[p]:
83 del hist[p]
83 del hist[p]
84 hist[n] = curr
84 hist[n] = curr
85
85
86 return zip(hist[n][0], hist[n][1].splitlines(1))
86 return zip(hist[n][0], hist[n][1].splitlines(1))
87
87
88 class manifest(revlog):
88 class manifest(revlog):
89 def __init__(self, opener):
89 def __init__(self, opener):
90 self.mapcache = None
90 self.mapcache = None
91 self.listcache = None
91 self.listcache = None
92 self.addlist = None
92 self.addlist = None
93 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
93 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
94
94
95 def read(self, node):
95 def read(self, node):
96 if node == nullid: return {} # don't upset local cache
96 if node == nullid: return {} # don't upset local cache
97 if self.mapcache and self.mapcache[0] == node:
97 if self.mapcache and self.mapcache[0] == node:
98 return self.mapcache[1]
98 return self.mapcache[1]
99 text = self.revision(node)
99 text = self.revision(node)
100 map = {}
100 map = {}
101 flag = {}
101 flag = {}
102 self.listcache = (text, text.splitlines(1))
102 self.listcache = (text, text.splitlines(1))
103 for l in self.listcache[1]:
103 for l in self.listcache[1]:
104 (f, n) = l.split('\0')
104 (f, n) = l.split('\0')
105 map[f] = bin(n[:40])
105 map[f] = bin(n[:40])
106 flag[f] = (n[40:-1] == "x")
106 flag[f] = (n[40:-1] == "x")
107 self.mapcache = (node, map, flag)
107 self.mapcache = (node, map, flag)
108 return map
108 return map
109
109
110 def readflags(self, node):
110 def readflags(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if not self.mapcache or self.mapcache[0] != node:
112 if not self.mapcache or self.mapcache[0] != node:
113 self.read(node)
113 self.read(node)
114 return self.mapcache[2]
114 return self.mapcache[2]
115
115
116 def diff(self, a, b):
116 def diff(self, a, b):
117 # this is sneaky, as we're not actually using a and b
117 # this is sneaky, as we're not actually using a and b
118 if self.listcache and self.addlist and self.listcache[0] == a:
118 if self.listcache and self.addlist and self.listcache[0] == a:
119 d = mdiff.diff(self.listcache[1], self.addlist, 1)
119 d = mdiff.diff(self.listcache[1], self.addlist, 1)
120 if mdiff.patch(a, d) != b:
120 if mdiff.patch(a, d) != b:
121 sys.stderr.write("*** sortdiff failed, falling back ***\n")
121 sys.stderr.write("*** sortdiff failed, falling back ***\n")
122 return mdiff.textdiff(a, b)
122 return mdiff.textdiff(a, b)
123 return d
123 return d
124 else:
124 else:
125 return mdiff.textdiff(a, b)
125 return mdiff.textdiff(a, b)
126
126
127 def add(self, map, flags, transaction, link, p1=None, p2=None):
127 def add(self, map, flags, transaction, link, p1=None, p2=None):
128 files = map.keys()
128 files = map.keys()
129 files.sort()
129 files.sort()
130
130
131 self.addlist = ["%s\000%s%s\n" %
131 self.addlist = ["%s\000%s%s\n" %
132 (f, hex(map[f]), flags[f] and "x" or '')
132 (f, hex(map[f]), flags[f] and "x" or '')
133 for f in files]
133 for f in files]
134 text = "".join(self.addlist)
134 text = "".join(self.addlist)
135
135
136 n = self.addrevision(text, transaction, link, p1, p2)
136 n = self.addrevision(text, transaction, link, p1, p2)
137 self.mapcache = (n, map, flags)
137 self.mapcache = (n, map, flags)
138 self.listcache = (text, self.addlist)
138 self.listcache = (text, self.addlist)
139 self.addlist = None
139 self.addlist = None
140
140
141 return n
141 return n
142
142
143 class changelog(revlog):
143 class changelog(revlog):
144 def __init__(self, opener):
144 def __init__(self, opener):
145 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
145 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
146
146
147 def extract(self, text):
147 def extract(self, text):
148 if not text:
148 if not text:
149 return (nullid, "", "0", [], "")
149 return (nullid, "", "0", [], "")
150 last = text.index("\n\n")
150 last = text.index("\n\n")
151 desc = text[last + 2:]
151 desc = text[last + 2:]
152 l = text[:last].splitlines()
152 l = text[:last].splitlines()
153 manifest = bin(l[0])
153 manifest = bin(l[0])
154 user = l[1]
154 user = l[1]
155 date = l[2]
155 date = l[2]
156 files = l[3:]
156 files = l[3:]
157 return (manifest, user, date, files, desc)
157 return (manifest, user, date, files, desc)
158
158
159 def read(self, node):
159 def read(self, node):
160 return self.extract(self.revision(node))
160 return self.extract(self.revision(node))
161
161
162 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
162 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
163 user=None, date=None):
163 user=None, date=None):
164 user = (user or
164 user = (user or
165 os.environ.get("HGUSER") or
165 os.environ.get("HGUSER") or
166 os.environ.get("EMAIL") or
166 os.environ.get("EMAIL") or
167 (os.environ.get("LOGNAME",
167 (os.environ.get("LOGNAME",
168 os.environ.get("USERNAME", "unknown"))
168 os.environ.get("USERNAME", "unknown"))
169 + '@' + socket.getfqdn()))
169 + '@' + socket.getfqdn()))
170 date = date or "%d %d" % (time.time(), time.timezone)
170 date = date or "%d %d" % (time.time(), time.timezone)
171 list.sort()
171 list.sort()
172 l = [hex(manifest), user, date] + list + ["", desc]
172 l = [hex(manifest), user, date] + list + ["", desc]
173 text = "\n".join(l)
173 text = "\n".join(l)
174 return self.addrevision(text, transaction, self.count(), p1, p2)
174 return self.addrevision(text, transaction, self.count(), p1, p2)
175
175
176 class dirstate:
176 class dirstate:
177 def __init__(self, opener, ui, root):
177 def __init__(self, opener, ui, root):
178 self.opener = opener
178 self.opener = opener
179 self.root = root
179 self.root = root
180 self.dirty = 0
180 self.dirty = 0
181 self.ui = ui
181 self.ui = ui
182 self.map = None
182 self.map = None
183 self.pl = None
183 self.pl = None
184 self.copies = {}
184 self.copies = {}
185
185
186 def __del__(self):
186 def __del__(self):
187 if self.dirty:
187 if self.dirty:
188 self.write()
188 self.write()
189
189
190 def __getitem__(self, key):
190 def __getitem__(self, key):
191 try:
191 try:
192 return self.map[key]
192 return self.map[key]
193 except TypeError:
193 except TypeError:
194 self.read()
194 self.read()
195 return self[key]
195 return self[key]
196
196
197 def __contains__(self, key):
197 def __contains__(self, key):
198 if not self.map: self.read()
198 if not self.map: self.read()
199 return key in self.map
199 return key in self.map
200
200
201 def parents(self):
201 def parents(self):
202 if not self.pl:
202 if not self.pl:
203 self.read()
203 self.read()
204 return self.pl
204 return self.pl
205
205
206 def setparents(self, p1, p2 = nullid):
206 def setparents(self, p1, p2 = nullid):
207 self.dirty = 1
207 self.dirty = 1
208 self.pl = p1, p2
208 self.pl = p1, p2
209
209
210 def state(self, key):
210 def state(self, key):
211 try:
211 try:
212 return self[key][0]
212 return self[key][0]
213 except KeyError:
213 except KeyError:
214 return "?"
214 return "?"
215
215
216 def read(self):
216 def read(self):
217 if self.map is not None: return self.map
217 if self.map is not None: return self.map
218
218
219 self.map = {}
219 self.map = {}
220 self.pl = [nullid, nullid]
220 self.pl = [nullid, nullid]
221 try:
221 try:
222 st = self.opener("dirstate").read()
222 st = self.opener("dirstate").read()
223 if not st: return
223 if not st: return
224 except: return
224 except: return
225
225
226 self.pl = [st[:20], st[20: 40]]
226 self.pl = [st[:20], st[20: 40]]
227
227
228 pos = 40
228 pos = 40
229 while pos < len(st):
229 while pos < len(st):
230 e = struct.unpack(">cllll", st[pos:pos+17])
230 e = struct.unpack(">cllll", st[pos:pos+17])
231 l = e[4]
231 l = e[4]
232 pos += 17
232 pos += 17
233 f = st[pos:pos + l]
233 f = st[pos:pos + l]
234 if '\0' in f:
234 if '\0' in f:
235 f, c = f.split('\0')
235 f, c = f.split('\0')
236 self.copies[f] = c
236 self.copies[f] = c
237 self.map[f] = e[:4]
237 self.map[f] = e[:4]
238 pos += l
238 pos += l
239
239
240 def copy(self, source, dest):
240 def copy(self, source, dest):
241 self.read()
241 self.read()
242 self.dirty = 1
242 self.dirty = 1
243 self.copies[dest] = source
243 self.copies[dest] = source
244
244
245 def copied(self, file):
245 def copied(self, file):
246 return self.copies.get(file, None)
246 return self.copies.get(file, None)
247
247
248 def update(self, files, state):
248 def update(self, files, state):
249 ''' current states:
249 ''' current states:
250 n normal
250 n normal
251 m needs merging
251 m needs merging
252 r marked for removal
252 r marked for removal
253 a marked for addition'''
253 a marked for addition'''
254
254
255 if not files: return
255 if not files: return
256 self.read()
256 self.read()
257 self.dirty = 1
257 self.dirty = 1
258 for f in files:
258 for f in files:
259 if state == "r":
259 if state == "r":
260 self.map[f] = ('r', 0, 0, 0)
260 self.map[f] = ('r', 0, 0, 0)
261 else:
261 else:
262 s = os.stat(os.path.join(self.root, f))
262 s = os.stat(os.path.join(self.root, f))
263 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
263 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
264
264
265 def forget(self, files):
265 def forget(self, files):
266 if not files: return
266 if not files: return
267 self.read()
267 self.read()
268 self.dirty = 1
268 self.dirty = 1
269 for f in files:
269 for f in files:
270 try:
270 try:
271 del self.map[f]
271 del self.map[f]
272 except KeyError:
272 except KeyError:
273 self.ui.warn("not in dirstate: %s!\n" % f)
273 self.ui.warn("not in dirstate: %s!\n" % f)
274 pass
274 pass
275
275
276 def clear(self):
276 def clear(self):
277 self.map = {}
277 self.map = {}
278 self.dirty = 1
278 self.dirty = 1
279
279
280 def write(self):
280 def write(self):
281 st = self.opener("dirstate", "w")
281 st = self.opener("dirstate", "w")
282 st.write("".join(self.pl))
282 st.write("".join(self.pl))
283 for f, e in self.map.items():
283 for f, e in self.map.items():
284 c = self.copied(f)
284 c = self.copied(f)
285 if c:
285 if c:
286 f = f + "\0" + c
286 f = f + "\0" + c
287 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
287 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
288 st.write(e + f)
288 st.write(e + f)
289 self.dirty = 0
289 self.dirty = 0
290
290
291 def changes(self, files, ignore):
291 def changes(self, files, ignore):
292 self.read()
292 self.read()
293 dc = self.map.copy()
293 dc = self.map.copy()
294 lookup, changed, added, unknown = [], [], [], []
294 lookup, changed, added, unknown = [], [], [], []
295
295
296 # compare all files by default
296 # compare all files by default
297 if not files: files = [self.root]
297 if not files: files = [self.root]
298
298
299 # recursive generator of all files listed
299 # recursive generator of all files listed
300 def walk(files):
300 def walk(files):
301 for f in util.unique(files):
301 for f in util.unique(files):
302 f = os.path.join(self.root, f)
302 f = os.path.join(self.root, f)
303 if os.path.isdir(f):
303 if os.path.isdir(f):
304 for dir, subdirs, fl in os.walk(f):
304 for dir, subdirs, fl in os.walk(f):
305 d = dir[len(self.root) + 1:]
305 d = dir[len(self.root) + 1:]
306 if ".hg" in subdirs: subdirs.remove(".hg")
306 if ".hg" in subdirs: subdirs.remove(".hg")
307 for fn in fl:
307 for fn in fl:
308 fn = util.pconvert(os.path.join(d, fn))
308 fn = util.pconvert(os.path.join(d, fn))
309 yield fn
309 yield fn
310 else:
310 else:
311 yield f[len(self.root) + 1:]
311 yield f[len(self.root) + 1:]
312
312
313 for fn in util.unique(walk(files)):
313 for fn in util.unique(walk(files)):
314 try: s = os.stat(os.path.join(self.root, fn))
314 try: s = os.stat(os.path.join(self.root, fn))
315 except: continue
315 except: continue
316
316
317 if fn in dc:
317 if fn in dc:
318 c = dc[fn]
318 c = dc[fn]
319 del dc[fn]
319 del dc[fn]
320
320
321 if c[0] == 'm':
321 if c[0] == 'm':
322 changed.append(fn)
322 changed.append(fn)
323 elif c[0] == 'a':
323 elif c[0] == 'a':
324 added.append(fn)
324 added.append(fn)
325 elif c[0] == 'r':
325 elif c[0] == 'r':
326 unknown.append(fn)
326 unknown.append(fn)
327 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
327 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
328 changed.append(fn)
328 changed.append(fn)
329 elif c[1] != s.st_mode or c[3] != s.st_mtime:
329 elif c[1] != s.st_mode or c[3] != s.st_mtime:
330 lookup.append(fn)
330 lookup.append(fn)
331 else:
331 else:
332 if not ignore(fn): unknown.append(fn)
332 if not ignore(fn): unknown.append(fn)
333
333
334 return (lookup, changed, added, dc.keys(), unknown)
334 return (lookup, changed, added, dc.keys(), unknown)
335
335
336 # used to avoid circular references so destructors work
336 # used to avoid circular references so destructors work
337 def opener(base):
337 def opener(base):
338 p = base
338 p = base
339 def o(path, mode="r"):
339 def o(path, mode="r"):
340 if p[:7] == "http://":
340 if p[:7] == "http://":
341 f = os.path.join(p, urllib.quote(path))
341 f = os.path.join(p, urllib.quote(path))
342 return httprangereader.httprangereader(f)
342 return httprangereader.httprangereader(f)
343
343
344 f = os.path.join(p, path)
344 f = os.path.join(p, path)
345
345
346 mode += "b" # for that other OS
346 mode += "b" # for that other OS
347
347
348 if mode[0] != "r":
348 if mode[0] != "r":
349 try:
349 try:
350 s = os.stat(f)
350 s = os.stat(f)
351 except OSError:
351 except OSError:
352 d = os.path.dirname(f)
352 d = os.path.dirname(f)
353 if not os.path.isdir(d):
353 if not os.path.isdir(d):
354 os.makedirs(d)
354 os.makedirs(d)
355 else:
355 else:
356 if s.st_nlink > 1:
356 if s.st_nlink > 1:
357 file(f + ".tmp", "wb").write(file(f, "rb").read())
357 file(f + ".tmp", "wb").write(file(f, "rb").read())
358 util.rename(f+".tmp", f)
358 util.rename(f+".tmp", f)
359
359
360 return file(f, mode)
360 return file(f, mode)
361
361
362 return o
362 return o
363
363
364 class RepoError(Exception): pass
364 class RepoError(Exception): pass
365
365
366 class localrepository:
366 class localrepository:
367 def __init__(self, ui, path=None, create=0):
367 def __init__(self, ui, path=None, create=0):
368 self.remote = 0
368 self.remote = 0
369 if path and path[:7] == "http://":
369 if path and path[:7] == "http://":
370 self.remote = 1
370 self.remote = 1
371 self.path = path
371 self.path = path
372 else:
372 else:
373 if not path:
373 if not path:
374 p = os.getcwd()
374 p = os.getcwd()
375 while not os.path.isdir(os.path.join(p, ".hg")):
375 while not os.path.isdir(os.path.join(p, ".hg")):
376 oldp = p
376 oldp = p
377 p = os.path.dirname(p)
377 p = os.path.dirname(p)
378 if p == oldp: raise RepoError("no repo found")
378 if p == oldp: raise RepoError("no repo found")
379 path = p
379 path = p
380 self.path = os.path.join(path, ".hg")
380 self.path = os.path.join(path, ".hg")
381
381
382 if not create and not os.path.isdir(self.path):
382 if not create and not os.path.isdir(self.path):
383 raise RepoError("repository %s not found" % self.path)
383 raise RepoError("repository %s not found" % self.path)
384
384
385 self.root = path
385 self.root = path
386 self.ui = ui
386 self.ui = ui
387
387
388 if create:
388 if create:
389 os.mkdir(self.path)
389 os.mkdir(self.path)
390 os.mkdir(self.join("data"))
390 os.mkdir(self.join("data"))
391
391
392 self.opener = opener(self.path)
392 self.opener = opener(self.path)
393 self.wopener = opener(self.root)
393 self.wopener = opener(self.root)
394 self.manifest = manifest(self.opener)
394 self.manifest = manifest(self.opener)
395 self.changelog = changelog(self.opener)
395 self.changelog = changelog(self.opener)
396 self.ignorelist = None
396 self.ignorelist = None
397 self.tagscache = None
397 self.tagscache = None
398 self.nodetagscache = None
398 self.nodetagscache = None
399
399
400 if not self.remote:
400 if not self.remote:
401 self.dirstate = dirstate(self.opener, ui, self.root)
401 self.dirstate = dirstate(self.opener, ui, self.root)
402 try:
402 try:
403 self.ui.readconfig(self.opener("hgrc"))
403 self.ui.readconfig(self.opener("hgrc"))
404 except IOError: pass
404 except IOError: pass
405
405
406 def ignore(self, f):
406 def ignore(self, f):
407 if self.ignorelist is None:
407 if self.ignorelist is None:
408 self.ignorelist = []
408 self.ignorelist = []
409 try:
409 try:
410 l = file(self.wjoin(".hgignore"))
410 l = file(self.wjoin(".hgignore"))
411 for pat in l:
411 for pat in l:
412 if pat != "\n":
412 if pat != "\n":
413 self.ignorelist.append(re.compile(util.pconvert(pat[:-1])))
413 self.ignorelist.append(re.compile(util.pconvert(pat[:-1])))
414 except IOError: pass
414 except IOError: pass
415 for pat in self.ignorelist:
415 for pat in self.ignorelist:
416 if pat.search(f): return True
416 if pat.search(f): return True
417 return False
417 return False
418
418
419 def hook(self, name, **args):
419 def hook(self, name, **args):
420 s = self.ui.config("hooks", name)
420 s = self.ui.config("hooks", name)
421 if s:
421 if s:
422 self.ui.note("running hook %s: %s\n" % (name, s))
422 self.ui.note("running hook %s: %s\n" % (name, s))
423 old = {}
423 old = {}
424 for k, v in args.items():
424 for k, v in args.items():
425 k = k.upper()
425 k = k.upper()
426 old[k] = os.environ.get(k, None)
426 old[k] = os.environ.get(k, None)
427 os.environ[k] = v
427 os.environ[k] = v
428
428
429 r = os.system(s)
429 r = os.system(s)
430
430
431 for k, v in old.items():
431 for k, v in old.items():
432 if v != None:
432 if v != None:
433 os.environ[k] = v
433 os.environ[k] = v
434 else:
434 else:
435 del os.environ[k]
435 del os.environ[k]
436
436
437 if r:
437 if r:
438 self.ui.warn("abort: %s hook failed with status %d!\n" %
438 self.ui.warn("abort: %s hook failed with status %d!\n" %
439 (name, r))
439 (name, r))
440 return False
440 return False
441 return True
441 return True
442
442
443 def tags(self):
443 def tags(self):
444 '''return a mapping of tag to node'''
444 '''return a mapping of tag to node'''
445 if not self.tagscache:
445 if not self.tagscache:
446 self.tagscache = {}
446 self.tagscache = {}
447 try:
447 try:
448 # read each head of the tags file, ending with the tip
448 # read each head of the tags file, ending with the tip
449 # and add each tag found to the map, with "newer" ones
449 # and add each tag found to the map, with "newer" ones
450 # taking precedence
450 # taking precedence
451 fl = self.file(".hgtags")
451 fl = self.file(".hgtags")
452 h = fl.heads()
452 h = fl.heads()
453 h.reverse()
453 h.reverse()
454 for r in h:
454 for r in h:
455 for l in fl.revision(r).splitlines():
455 for l in fl.revision(r).splitlines():
456 if l:
456 if l:
457 n, k = l.split(" ", 1)
457 n, k = l.split(" ", 1)
458 try:
458 try:
459 bin_n = bin(n)
459 bin_n = bin(n)
460 except TypeError:
460 except TypeError:
461 bin_n = ''
461 bin_n = ''
462 self.tagscache[k.strip()] = bin_n
462 self.tagscache[k.strip()] = bin_n
463 except KeyError:
463 except KeyError:
464 pass
464 pass
465 for k, n in self.ui.configitems("tags"):
465 for k, n in self.ui.configitems("tags"):
466 try:
466 try:
467 bin_n = bin(n)
467 bin_n = bin(n)
468 except TypeError:
468 except TypeError:
469 bin_n = ''
469 bin_n = ''
470 self.tagscache[k] = bin_n
470 self.tagscache[k] = bin_n
471
471
472 self.tagscache['tip'] = self.changelog.tip()
472 self.tagscache['tip'] = self.changelog.tip()
473
473
474 return self.tagscache
474 return self.tagscache
475
475
476 def tagslist(self):
476 def tagslist(self):
477 '''return a list of tags ordered by revision'''
477 '''return a list of tags ordered by revision'''
478 l = []
478 l = []
479 for t, n in self.tags().items():
479 for t, n in self.tags().items():
480 try:
480 try:
481 r = self.changelog.rev(n)
481 r = self.changelog.rev(n)
482 except:
482 except:
483 r = -2 # sort to the beginning of the list if unknown
483 r = -2 # sort to the beginning of the list if unknown
484 l.append((r,t,n))
484 l.append((r,t,n))
485 l.sort()
485 l.sort()
486 return [(t,n) for r,t,n in l]
486 return [(t,n) for r,t,n in l]
487
487
488 def nodetags(self, node):
488 def nodetags(self, node):
489 '''return the tags associated with a node'''
489 '''return the tags associated with a node'''
490 if not self.nodetagscache:
490 if not self.nodetagscache:
491 self.nodetagscache = {}
491 self.nodetagscache = {}
492 for t,n in self.tags().items():
492 for t,n in self.tags().items():
493 self.nodetagscache.setdefault(n,[]).append(t)
493 self.nodetagscache.setdefault(n,[]).append(t)
494 return self.nodetagscache.get(node, [])
494 return self.nodetagscache.get(node, [])
495
495
496 def lookup(self, key):
496 def lookup(self, key):
497 try:
497 try:
498 return self.tags()[key]
498 return self.tags()[key]
499 except KeyError:
499 except KeyError:
500 return self.changelog.lookup(key)
500 return self.changelog.lookup(key)
501
501
502 def join(self, f):
502 def join(self, f):
503 return os.path.join(self.path, f)
503 return os.path.join(self.path, f)
504
504
505 def wjoin(self, f):
505 def wjoin(self, f):
506 return os.path.join(self.root, f)
506 return os.path.join(self.root, f)
507
507
508 def file(self, f):
508 def file(self, f):
509 if f[0] == '/': f = f[1:]
509 if f[0] == '/': f = f[1:]
510 return filelog(self.opener, f)
510 return filelog(self.opener, f)
511
511
512 def wfile(self, f, mode='r'):
512 def wfile(self, f, mode='r'):
513 return self.wopener(f, mode)
513 return self.wopener(f, mode)
514
514
515 def transaction(self):
515 def transaction(self):
516 # save dirstate for undo
516 # save dirstate for undo
517 try:
517 try:
518 ds = self.opener("dirstate").read()
518 ds = self.opener("dirstate").read()
519 except IOError:
519 except IOError:
520 ds = ""
520 ds = ""
521 self.opener("undo.dirstate", "w").write(ds)
521 self.opener("undo.dirstate", "w").write(ds)
522
522
523 return transaction.transaction(self.opener, self.join("journal"),
523 return transaction.transaction(self.opener, self.join("journal"),
524 self.join("undo"))
524 self.join("undo"))
525
525
526 def recover(self):
526 def recover(self):
527 lock = self.lock()
527 lock = self.lock()
528 if os.path.exists(self.join("journal")):
528 if os.path.exists(self.join("journal")):
529 self.ui.status("rolling back interrupted transaction\n")
529 self.ui.status("rolling back interrupted transaction\n")
530 return transaction.rollback(self.opener, self.join("journal"))
530 return transaction.rollback(self.opener, self.join("journal"))
531 else:
531 else:
532 self.ui.warn("no interrupted transaction available\n")
532 self.ui.warn("no interrupted transaction available\n")
533
533
534 def undo(self):
534 def undo(self):
535 lock = self.lock()
535 lock = self.lock()
536 if os.path.exists(self.join("undo")):
536 if os.path.exists(self.join("undo")):
537 self.ui.status("rolling back last transaction\n")
537 self.ui.status("rolling back last transaction\n")
538 transaction.rollback(self.opener, self.join("undo"))
538 transaction.rollback(self.opener, self.join("undo"))
539 self.dirstate = None
539 self.dirstate = None
540 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
540 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
541 self.dirstate = dirstate(self.opener, self.ui, self.root)
541 self.dirstate = dirstate(self.opener, self.ui, self.root)
542 else:
542 else:
543 self.ui.warn("no undo information available\n")
543 self.ui.warn("no undo information available\n")
544
544
545 def lock(self, wait = 1):
545 def lock(self, wait = 1):
546 try:
546 try:
547 return lock.lock(self.join("lock"), 0)
547 return lock.lock(self.join("lock"), 0)
548 except lock.LockHeld, inst:
548 except lock.LockHeld, inst:
549 if wait:
549 if wait:
550 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
550 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
551 return lock.lock(self.join("lock"), wait)
551 return lock.lock(self.join("lock"), wait)
552 raise inst
552 raise inst
553
553
554 def rawcommit(self, files, text, user, date, p1=None, p2=None):
554 def rawcommit(self, files, text, user, date, p1=None, p2=None):
555 orig_parent = self.dirstate.parents()[0] or nullid
555 orig_parent = self.dirstate.parents()[0] or nullid
556 p1 = p1 or self.dirstate.parents()[0] or nullid
556 p1 = p1 or self.dirstate.parents()[0] or nullid
557 p2 = p2 or self.dirstate.parents()[1] or nullid
557 p2 = p2 or self.dirstate.parents()[1] or nullid
558 c1 = self.changelog.read(p1)
558 c1 = self.changelog.read(p1)
559 c2 = self.changelog.read(p2)
559 c2 = self.changelog.read(p2)
560 m1 = self.manifest.read(c1[0])
560 m1 = self.manifest.read(c1[0])
561 mf1 = self.manifest.readflags(c1[0])
561 mf1 = self.manifest.readflags(c1[0])
562 m2 = self.manifest.read(c2[0])
562 m2 = self.manifest.read(c2[0])
563
563
564 if orig_parent == p1:
564 if orig_parent == p1:
565 update_dirstate = 1
565 update_dirstate = 1
566 else:
566 else:
567 update_dirstate = 0
567 update_dirstate = 0
568
568
569 tr = self.transaction()
569 tr = self.transaction()
570 mm = m1.copy()
570 mm = m1.copy()
571 mfm = mf1.copy()
571 mfm = mf1.copy()
572 linkrev = self.changelog.count()
572 linkrev = self.changelog.count()
573 for f in files:
573 for f in files:
574 try:
574 try:
575 t = self.wfile(f).read()
575 t = self.wfile(f).read()
576 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
576 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
577 r = self.file(f)
577 r = self.file(f)
578 mfm[f] = tm
578 mfm[f] = tm
579 mm[f] = r.add(t, {}, tr, linkrev,
579 mm[f] = r.add(t, {}, tr, linkrev,
580 m1.get(f, nullid), m2.get(f, nullid))
580 m1.get(f, nullid), m2.get(f, nullid))
581 if update_dirstate:
581 if update_dirstate:
582 self.dirstate.update([f], "n")
582 self.dirstate.update([f], "n")
583 except IOError:
583 except IOError:
584 try:
584 try:
585 del mm[f]
585 del mm[f]
586 del mfm[f]
586 del mfm[f]
587 if update_dirstate:
587 if update_dirstate:
588 self.dirstate.forget([f])
588 self.dirstate.forget([f])
589 except:
589 except:
590 # deleted from p2?
590 # deleted from p2?
591 pass
591 pass
592
592
593 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
593 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
594 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
594 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
595 tr.close()
595 tr.close()
596 if update_dirstate:
596 if update_dirstate:
597 self.dirstate.setparents(n, nullid)
597 self.dirstate.setparents(n, nullid)
598
598
599 def commit(self, files = None, text = "", user = None, date = None):
599 def commit(self, files = None, text = "", user = None, date = None):
600 commit = []
600 commit = []
601 remove = []
601 remove = []
602 if files:
602 if files:
603 for f in files:
603 for f in files:
604 s = self.dirstate.state(f)
604 s = self.dirstate.state(f)
605 if s in 'nmai':
605 if s in 'nmai':
606 commit.append(f)
606 commit.append(f)
607 elif s == 'r':
607 elif s == 'r':
608 remove.append(f)
608 remove.append(f)
609 else:
609 else:
610 self.ui.warn("%s not tracked!\n" % f)
610 self.ui.warn("%s not tracked!\n" % f)
611 else:
611 else:
612 (c, a, d, u) = self.changes(None, None)
612 (c, a, d, u) = self.changes(None, None)
613 commit = c + a
613 commit = c + a
614 remove = d
614 remove = d
615
615
616 if not commit and not remove:
616 if not commit and not remove:
617 self.ui.status("nothing changed\n")
617 self.ui.status("nothing changed\n")
618 return
618 return
619
619
620 if not self.hook("precommit"):
620 if not self.hook("precommit"):
621 return 1
621 return 1
622
622
623 p1, p2 = self.dirstate.parents()
623 p1, p2 = self.dirstate.parents()
624 c1 = self.changelog.read(p1)
624 c1 = self.changelog.read(p1)
625 c2 = self.changelog.read(p2)
625 c2 = self.changelog.read(p2)
626 m1 = self.manifest.read(c1[0])
626 m1 = self.manifest.read(c1[0])
627 mf1 = self.manifest.readflags(c1[0])
627 mf1 = self.manifest.readflags(c1[0])
628 m2 = self.manifest.read(c2[0])
628 m2 = self.manifest.read(c2[0])
629 lock = self.lock()
629 lock = self.lock()
630 tr = self.transaction()
630 tr = self.transaction()
631
631
632 # check in files
632 # check in files
633 new = {}
633 new = {}
634 linkrev = self.changelog.count()
634 linkrev = self.changelog.count()
635 commit.sort()
635 commit.sort()
636 for f in commit:
636 for f in commit:
637 self.ui.note(f + "\n")
637 self.ui.note(f + "\n")
638 try:
638 try:
639 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
639 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
640 t = self.wfile(f).read()
640 t = self.wfile(f).read()
641 except IOError:
641 except IOError:
642 self.warn("trouble committing %s!\n" % f)
642 self.warn("trouble committing %s!\n" % f)
643 raise
643 raise
644
644
645 meta = {}
645 meta = {}
646 cp = self.dirstate.copied(f)
646 cp = self.dirstate.copied(f)
647 if cp:
647 if cp:
648 meta["copy"] = cp
648 meta["copy"] = cp
649 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
649 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
650 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
650 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
651
651
652 r = self.file(f)
652 r = self.file(f)
653 fp1 = m1.get(f, nullid)
653 fp1 = m1.get(f, nullid)
654 fp2 = m2.get(f, nullid)
654 fp2 = m2.get(f, nullid)
655 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
655 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
656
656
657 # update manifest
657 # update manifest
658 m1.update(new)
658 m1.update(new)
659 for f in remove:
659 for f in remove:
660 if f in m1:
660 if f in m1:
661 del m1[f]
661 del m1[f]
662 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0])
662 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0])
663
663
664 # add changeset
664 # add changeset
665 new = new.keys()
665 new = new.keys()
666 new.sort()
666 new.sort()
667
667
668 if not text:
668 if not text:
669 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
669 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
670 edittext += "".join(["HG: changed %s\n" % f for f in new])
670 edittext += "".join(["HG: changed %s\n" % f for f in new])
671 edittext += "".join(["HG: removed %s\n" % f for f in remove])
671 edittext += "".join(["HG: removed %s\n" % f for f in remove])
672 edittext = self.ui.edit(edittext)
672 edittext = self.ui.edit(edittext)
673 if not edittext.rstrip():
673 if not edittext.rstrip():
674 return 1
674 return 1
675 text = edittext
675 text = edittext
676
676
677 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
677 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
678
678
679 if not self.hook("commit", node=hex(n)):
679 if not self.hook("commit", node=hex(n)):
680 return 1
680 return 1
681
681
682 tr.close()
682 tr.close()
683
683
684 self.dirstate.setparents(n)
684 self.dirstate.setparents(n)
685 self.dirstate.update(new, "n")
685 self.dirstate.update(new, "n")
686 self.dirstate.forget(remove)
686 self.dirstate.forget(remove)
687
687
688 def changes(self, node1, node2, files=None):
688 def changes(self, node1, node2, files=None):
689 mf2, u = None, []
689 mf2, u = None, []
690
690
691 def fcmp(fn, mf):
691 def fcmp(fn, mf):
692 t1 = self.wfile(fn).read()
692 t1 = self.wfile(fn).read()
693 t2 = self.file(fn).revision(mf[fn])
693 t2 = self.file(fn).revision(mf[fn])
694 return cmp(t1, t2)
694 return cmp(t1, t2)
695
695
696 # are we comparing the working directory?
696 # are we comparing the working directory?
697 if not node2:
697 if not node2:
698 l, c, a, d, u = self.dirstate.changes(files, self.ignore)
698 l, c, a, d, u = self.dirstate.changes(files, self.ignore)
699
699
700 # are we comparing working dir against its parent?
700 # are we comparing working dir against its parent?
701 if not node1:
701 if not node1:
702 if l:
702 if l:
703 # do a full compare of any files that might have changed
703 # do a full compare of any files that might have changed
704 change = self.changelog.read(self.dirstate.parents()[0])
704 change = self.changelog.read(self.dirstate.parents()[0])
705 mf2 = self.manifest.read(change[0])
705 mf2 = self.manifest.read(change[0])
706 for f in l:
706 for f in l:
707 if fcmp(f, mf2):
707 if fcmp(f, mf2):
708 c.append(f)
708 c.append(f)
709
709
710 for l in c, a, d, u:
710 for l in c, a, d, u:
711 l.sort()
711 l.sort()
712
712
713 return (c, a, d, u)
713 return (c, a, d, u)
714
714
715 # are we comparing working dir against non-tip?
715 # are we comparing working dir against non-tip?
716 # generate a pseudo-manifest for the working dir
716 # generate a pseudo-manifest for the working dir
717 if not node2:
717 if not node2:
718 if not mf2:
718 if not mf2:
719 change = self.changelog.read(self.dirstate.parents()[0])
719 change = self.changelog.read(self.dirstate.parents()[0])
720 mf2 = self.manifest.read(change[0]).copy()
720 mf2 = self.manifest.read(change[0]).copy()
721 for f in a + c + l:
721 for f in a + c + l:
722 mf2[f] = ""
722 mf2[f] = ""
723 for f in d:
723 for f in d:
724 if f in mf2: del mf2[f]
724 if f in mf2: del mf2[f]
725 else:
725 else:
726 change = self.changelog.read(node2)
726 change = self.changelog.read(node2)
727 mf2 = self.manifest.read(change[0])
727 mf2 = self.manifest.read(change[0])
728
728
729 # flush lists from dirstate before comparing manifests
729 # flush lists from dirstate before comparing manifests
730 c, a = [], []
730 c, a = [], []
731
731
732 change = self.changelog.read(node1)
732 change = self.changelog.read(node1)
733 mf1 = self.manifest.read(change[0]).copy()
733 mf1 = self.manifest.read(change[0]).copy()
734
734
735 for fn in mf2:
735 for fn in mf2:
736 if mf1.has_key(fn):
736 if mf1.has_key(fn):
737 if mf1[fn] != mf2[fn]:
737 if mf1[fn] != mf2[fn]:
738 if mf2[fn] != "" or fcmp(fn, mf1):
738 if mf2[fn] != "" or fcmp(fn, mf1):
739 c.append(fn)
739 c.append(fn)
740 del mf1[fn]
740 del mf1[fn]
741 else:
741 else:
742 a.append(fn)
742 a.append(fn)
743
743
744 d = mf1.keys()
744 d = mf1.keys()
745
745
746 for l in c, a, d, u:
746 for l in c, a, d, u:
747 l.sort()
747 l.sort()
748
748
749 return (c, a, d, u)
749 return (c, a, d, u)
750
750
751 def add(self, list):
751 def add(self, list):
752 for f in list:
752 for f in list:
753 p = self.wjoin(f)
753 p = self.wjoin(f)
754 if not os.path.isfile(p):
754 if not os.path.isfile(p):
755 self.ui.warn("%s does not exist!\n" % f)
755 self.ui.warn("%s does not exist!\n" % f)
756 elif self.dirstate.state(f) == 'n':
756 elif self.dirstate.state(f) == 'n':
757 self.ui.warn("%s already tracked!\n" % f)
757 self.ui.warn("%s already tracked!\n" % f)
758 else:
758 else:
759 self.dirstate.update([f], "a")
759 self.dirstate.update([f], "a")
760
760
761 def forget(self, list):
761 def forget(self, list):
762 for f in list:
762 for f in list:
763 if self.dirstate.state(f) not in 'ai':
763 if self.dirstate.state(f) not in 'ai':
764 self.ui.warn("%s not added!\n" % f)
764 self.ui.warn("%s not added!\n" % f)
765 else:
765 else:
766 self.dirstate.forget([f])
766 self.dirstate.forget([f])
767
767
768 def remove(self, list):
768 def remove(self, list):
769 for f in list:
769 for f in list:
770 p = self.wjoin(f)
770 p = self.wjoin(f)
771 if os.path.isfile(p):
771 if os.path.isfile(p):
772 self.ui.warn("%s still exists!\n" % f)
772 self.ui.warn("%s still exists!\n" % f)
773 elif self.dirstate.state(f) == 'a':
773 elif self.dirstate.state(f) == 'a':
774 self.ui.warn("%s never committed!\n" % f)
774 self.ui.warn("%s never committed!\n" % f)
775 self.dirstate.forget(f)
775 self.dirstate.forget(f)
776 elif f not in self.dirstate:
776 elif f not in self.dirstate:
777 self.ui.warn("%s not tracked!\n" % f)
777 self.ui.warn("%s not tracked!\n" % f)
778 else:
778 else:
779 self.dirstate.update([f], "r")
779 self.dirstate.update([f], "r")
780
780
781 def copy(self, source, dest):
781 def copy(self, source, dest):
782 p = self.wjoin(dest)
782 p = self.wjoin(dest)
783 if not os.path.isfile(dest):
783 if not os.path.isfile(dest):
784 self.ui.warn("%s does not exist!\n" % dest)
784 self.ui.warn("%s does not exist!\n" % dest)
785 else:
785 else:
786 if self.dirstate.state(dest) == '?':
786 if self.dirstate.state(dest) == '?':
787 self.dirstate.update([dest], "a")
787 self.dirstate.update([dest], "a")
788 self.dirstate.copy(source, dest)
788 self.dirstate.copy(source, dest)
789
789
790 def heads(self):
790 def heads(self):
791 return self.changelog.heads()
791 return self.changelog.heads()
792
792
793 def branches(self, nodes):
793 def branches(self, nodes):
794 if not nodes: nodes = [self.changelog.tip()]
794 if not nodes: nodes = [self.changelog.tip()]
795 b = []
795 b = []
796 for n in nodes:
796 for n in nodes:
797 t = n
797 t = n
798 while n:
798 while n:
799 p = self.changelog.parents(n)
799 p = self.changelog.parents(n)
800 if p[1] != nullid or p[0] == nullid:
800 if p[1] != nullid or p[0] == nullid:
801 b.append((t, n, p[0], p[1]))
801 b.append((t, n, p[0], p[1]))
802 break
802 break
803 n = p[0]
803 n = p[0]
804 return b
804 return b
805
805
806 def between(self, pairs):
806 def between(self, pairs):
807 r = []
807 r = []
808
808
809 for top, bottom in pairs:
809 for top, bottom in pairs:
810 n, l, i = top, [], 0
810 n, l, i = top, [], 0
811 f = 1
811 f = 1
812
812
813 while n != bottom:
813 while n != bottom:
814 p = self.changelog.parents(n)[0]
814 p = self.changelog.parents(n)[0]
815 if i == f:
815 if i == f:
816 l.append(n)
816 l.append(n)
817 f = f * 2
817 f = f * 2
818 n = p
818 n = p
819 i += 1
819 i += 1
820
820
821 r.append(l)
821 r.append(l)
822
822
823 return r
823 return r
824
824
825 def newer(self, nodes):
825 def newer(self, nodes):
826 m = {}
826 m = {}
827 nl = []
827 nl = []
828 pm = {}
828 pm = {}
829 cl = self.changelog
829 cl = self.changelog
830 t = l = cl.count()
830 t = l = cl.count()
831
831
832 # find the lowest numbered node
832 # find the lowest numbered node
833 for n in nodes:
833 for n in nodes:
834 l = min(l, cl.rev(n))
834 l = min(l, cl.rev(n))
835 m[n] = 1
835 m[n] = 1
836
836
837 for i in xrange(l, t):
837 for i in xrange(l, t):
838 n = cl.node(i)
838 n = cl.node(i)
839 if n in m: # explicitly listed
839 if n in m: # explicitly listed
840 pm[n] = 1
840 pm[n] = 1
841 nl.append(n)
841 nl.append(n)
842 continue
842 continue
843 for p in cl.parents(n):
843 for p in cl.parents(n):
844 if p in pm: # parent listed
844 if p in pm: # parent listed
845 pm[n] = 1
845 pm[n] = 1
846 nl.append(n)
846 nl.append(n)
847 break
847 break
848
848
849 return nl
849 return nl
850
850
851 def findincoming(self, remote):
851 def findincoming(self, remote):
852 m = self.changelog.nodemap
852 m = self.changelog.nodemap
853 search = []
853 search = []
854 fetch = []
854 fetch = []
855 seen = {}
855 seen = {}
856 seenbranch = {}
856 seenbranch = {}
857
857
858 # if we have an empty repo, fetch everything
858 # if we have an empty repo, fetch everything
859 if self.changelog.tip() == nullid:
859 if self.changelog.tip() == nullid:
860 self.ui.status("requesting all changes\n")
860 self.ui.status("requesting all changes\n")
861 return [nullid]
861 return [nullid]
862
862
863 # otherwise, assume we're closer to the tip than the root
863 # otherwise, assume we're closer to the tip than the root
864 self.ui.status("searching for changes\n")
864 self.ui.status("searching for changes\n")
865 heads = remote.heads()
865 heads = remote.heads()
866 unknown = []
866 unknown = []
867 for h in heads:
867 for h in heads:
868 if h not in m:
868 if h not in m:
869 unknown.append(h)
869 unknown.append(h)
870
870
871 if not unknown:
871 if not unknown:
872 return None
872 return None
873
873
874 rep = {}
874 rep = {}
875 reqcnt = 0
875 reqcnt = 0
876
876
877 unknown = remote.branches(unknown)
877 unknown = remote.branches(unknown)
878 while unknown:
878 while unknown:
879 r = []
879 r = []
880 while unknown:
880 while unknown:
881 n = unknown.pop(0)
881 n = unknown.pop(0)
882 if n[0] in seen:
882 if n[0] in seen:
883 continue
883 continue
884
884
885 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
885 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
886 if n[0] == nullid:
886 if n[0] == nullid:
887 break
887 break
888 if n in seenbranch:
888 if n in seenbranch:
889 self.ui.debug("branch already found\n")
889 self.ui.debug("branch already found\n")
890 continue
890 continue
891 if n[1] and n[1] in m: # do we know the base?
891 if n[1] and n[1] in m: # do we know the base?
892 self.ui.debug("found incomplete branch %s:%s\n"
892 self.ui.debug("found incomplete branch %s:%s\n"
893 % (short(n[0]), short(n[1])))
893 % (short(n[0]), short(n[1])))
894 search.append(n) # schedule branch range for scanning
894 search.append(n) # schedule branch range for scanning
895 seenbranch[n] = 1
895 seenbranch[n] = 1
896 else:
896 else:
897 if n[1] not in seen and n[1] not in fetch:
897 if n[1] not in seen and n[1] not in fetch:
898 if n[2] in m and n[3] in m:
898 if n[2] in m and n[3] in m:
899 self.ui.debug("found new changeset %s\n" %
899 self.ui.debug("found new changeset %s\n" %
900 short(n[1]))
900 short(n[1]))
901 fetch.append(n[1]) # earliest unknown
901 fetch.append(n[1]) # earliest unknown
902 continue
902 continue
903
903
904 for a in n[2:4]:
904 for a in n[2:4]:
905 if a not in rep:
905 if a not in rep:
906 r.append(a)
906 r.append(a)
907 rep[a] = 1
907 rep[a] = 1
908
908
909 seen[n[0]] = 1
909 seen[n[0]] = 1
910
910
911 if r:
911 if r:
912 reqcnt += 1
912 reqcnt += 1
913 self.ui.debug("request %d: %s\n" %
913 self.ui.debug("request %d: %s\n" %
914 (reqcnt, " ".join(map(short, r))))
914 (reqcnt, " ".join(map(short, r))))
915 for p in range(0, len(r), 10):
915 for p in range(0, len(r), 10):
916 for b in remote.branches(r[p:p+10]):
916 for b in remote.branches(r[p:p+10]):
917 self.ui.debug("received %s:%s\n" %
917 self.ui.debug("received %s:%s\n" %
918 (short(b[0]), short(b[1])))
918 (short(b[0]), short(b[1])))
919 if b[0] not in m and b[0] not in seen:
919 if b[0] not in m and b[0] not in seen:
920 unknown.append(b)
920 unknown.append(b)
921
921
922 while search:
922 while search:
923 n = search.pop(0)
923 n = search.pop(0)
924 reqcnt += 1
924 reqcnt += 1
925 l = remote.between([(n[0], n[1])])[0]
925 l = remote.between([(n[0], n[1])])[0]
926 l.append(n[1])
926 l.append(n[1])
927 p = n[0]
927 p = n[0]
928 f = 1
928 f = 1
929 for i in l:
929 for i in l:
930 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
930 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
931 if i in m:
931 if i in m:
932 if f <= 2:
932 if f <= 2:
933 self.ui.debug("found new branch changeset %s\n" %
933 self.ui.debug("found new branch changeset %s\n" %
934 short(p))
934 short(p))
935 fetch.append(p)
935 fetch.append(p)
936 else:
936 else:
937 self.ui.debug("narrowed branch search to %s:%s\n"
937 self.ui.debug("narrowed branch search to %s:%s\n"
938 % (short(p), short(i)))
938 % (short(p), short(i)))
939 search.append((p, i))
939 search.append((p, i))
940 break
940 break
941 p, f = i, f * 2
941 p, f = i, f * 2
942
942
943 for f in fetch:
943 for f in fetch:
944 if f in m:
944 if f in m:
945 raise RepoError("already have changeset " + short(f[:4]))
945 raise RepoError("already have changeset " + short(f[:4]))
946
946
947 if fetch == [nullid]:
947 if fetch == [nullid]:
948 self.ui.warn("warning: pulling from an unrelated repository!\n")
948 self.ui.warn("warning: pulling from an unrelated repository!\n")
949
949
950 self.ui.note("adding new changesets starting at " +
950 self.ui.note("adding new changesets starting at " +
951 " ".join([short(f) for f in fetch]) + "\n")
951 " ".join([short(f) for f in fetch]) + "\n")
952
952
953 self.ui.debug("%d total queries\n" % reqcnt)
953 self.ui.debug("%d total queries\n" % reqcnt)
954
954
955 return fetch
955 return fetch
956
956
957 def changegroup(self, basenodes):
957 def changegroup(self, basenodes):
958 nodes = self.newer(basenodes)
958 nodes = self.newer(basenodes)
959
959
960 # construct the link map
960 # construct the link map
961 linkmap = {}
961 linkmap = {}
962 for n in nodes:
962 for n in nodes:
963 linkmap[self.changelog.rev(n)] = n
963 linkmap[self.changelog.rev(n)] = n
964
964
965 # construct a list of all changed files
965 # construct a list of all changed files
966 changed = {}
966 changed = {}
967 for n in nodes:
967 for n in nodes:
968 c = self.changelog.read(n)
968 c = self.changelog.read(n)
969 for f in c[3]:
969 for f in c[3]:
970 changed[f] = 1
970 changed[f] = 1
971 changed = changed.keys()
971 changed = changed.keys()
972 changed.sort()
972 changed.sort()
973
973
974 # the changegroup is changesets + manifests + all file revs
974 # the changegroup is changesets + manifests + all file revs
975 revs = [ self.changelog.rev(n) for n in nodes ]
975 revs = [ self.changelog.rev(n) for n in nodes ]
976
976
977 for y in self.changelog.group(linkmap): yield y
977 for y in self.changelog.group(linkmap): yield y
978 for y in self.manifest.group(linkmap): yield y
978 for y in self.manifest.group(linkmap): yield y
979 for f in changed:
979 for f in changed:
980 yield struct.pack(">l", len(f) + 4) + f
980 yield struct.pack(">l", len(f) + 4) + f
981 g = self.file(f).group(linkmap)
981 g = self.file(f).group(linkmap)
982 for y in g:
982 for y in g:
983 yield y
983 yield y
984
984
985 def addchangegroup(self, generator):
985 def addchangegroup(self, generator):
986
986
987 class genread:
987 class genread:
988 def __init__(self, generator):
988 def __init__(self, generator):
989 self.g = generator
989 self.g = generator
990 self.buf = ""
990 self.buf = ""
991 def read(self, l):
991 def read(self, l):
992 while l > len(self.buf):
992 while l > len(self.buf):
993 try:
993 try:
994 self.buf += self.g.next()
994 self.buf += self.g.next()
995 except StopIteration:
995 except StopIteration:
996 break
996 break
997 d, self.buf = self.buf[:l], self.buf[l:]
997 d, self.buf = self.buf[:l], self.buf[l:]
998 return d
998 return d
999
999
1000 def getchunk():
1000 def getchunk():
1001 d = source.read(4)
1001 d = source.read(4)
1002 if not d: return ""
1002 if not d: return ""
1003 l = struct.unpack(">l", d)[0]
1003 l = struct.unpack(">l", d)[0]
1004 if l <= 4: return ""
1004 if l <= 4: return ""
1005 return source.read(l - 4)
1005 return source.read(l - 4)
1006
1006
1007 def getgroup():
1007 def getgroup():
1008 while 1:
1008 while 1:
1009 c = getchunk()
1009 c = getchunk()
1010 if not c: break
1010 if not c: break
1011 yield c
1011 yield c
1012
1012
1013 def csmap(x):
1013 def csmap(x):
1014 self.ui.debug("add changeset %s\n" % short(x))
1014 self.ui.debug("add changeset %s\n" % short(x))
1015 return self.changelog.count()
1015 return self.changelog.count()
1016
1016
1017 def revmap(x):
1017 def revmap(x):
1018 return self.changelog.rev(x)
1018 return self.changelog.rev(x)
1019
1019
1020 if not generator: return
1020 if not generator: return
1021 changesets = files = revisions = 0
1021 changesets = files = revisions = 0
1022
1022
1023 source = genread(generator)
1023 source = genread(generator)
1024 lock = self.lock()
1024 lock = self.lock()
1025 tr = self.transaction()
1025 tr = self.transaction()
1026
1026
1027 # pull off the changeset group
1027 # pull off the changeset group
1028 self.ui.status("adding changesets\n")
1028 self.ui.status("adding changesets\n")
1029 co = self.changelog.tip()
1029 co = self.changelog.tip()
1030 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1030 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1031 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1031 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1032
1032
1033 # pull off the manifest group
1033 # pull off the manifest group
1034 self.ui.status("adding manifests\n")
1034 self.ui.status("adding manifests\n")
1035 mm = self.manifest.tip()
1035 mm = self.manifest.tip()
1036 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1036 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1037
1037
1038 # process the files
1038 # process the files
1039 self.ui.status("adding file revisions\n")
1039 self.ui.status("adding file revisions\n")
1040 while 1:
1040 while 1:
1041 f = getchunk()
1041 f = getchunk()
1042 if not f: break
1042 if not f: break
1043 self.ui.debug("adding %s revisions\n" % f)
1043 self.ui.debug("adding %s revisions\n" % f)
1044 fl = self.file(f)
1044 fl = self.file(f)
1045 o = fl.count()
1045 o = fl.count()
1046 n = fl.addgroup(getgroup(), revmap, tr)
1046 n = fl.addgroup(getgroup(), revmap, tr)
1047 revisions += fl.count() - o
1047 revisions += fl.count() - o
1048 files += 1
1048 files += 1
1049
1049
1050 self.ui.status(("modified %d files, added %d changesets" +
1050 self.ui.status(("modified %d files, added %d changesets" +
1051 " and %d new revisions\n")
1051 " and %d new revisions\n")
1052 % (files, changesets, revisions))
1052 % (files, changesets, revisions))
1053
1053
1054 tr.close()
1054 tr.close()
1055 return
1055 return
1056
1056
1057 def update(self, node, allow=False, force=False):
1057 def update(self, node, allow=False, force=False):
1058 pl = self.dirstate.parents()
1058 pl = self.dirstate.parents()
1059 if not force and pl[1] != nullid:
1059 if not force and pl[1] != nullid:
1060 self.ui.warn("aborting: outstanding uncommitted merges\n")
1060 self.ui.warn("aborting: outstanding uncommitted merges\n")
1061 return
1061 return
1062
1062
1063 p1, p2 = pl[0], node
1063 p1, p2 = pl[0], node
1064 pa = self.changelog.ancestor(p1, p2)
1064 pa = self.changelog.ancestor(p1, p2)
1065 m1n = self.changelog.read(p1)[0]
1065 m1n = self.changelog.read(p1)[0]
1066 m2n = self.changelog.read(p2)[0]
1066 m2n = self.changelog.read(p2)[0]
1067 man = self.manifest.ancestor(m1n, m2n)
1067 man = self.manifest.ancestor(m1n, m2n)
1068 m1 = self.manifest.read(m1n)
1068 m1 = self.manifest.read(m1n)
1069 mf1 = self.manifest.readflags(m1n)
1069 mf1 = self.manifest.readflags(m1n)
1070 m2 = self.manifest.read(m2n)
1070 m2 = self.manifest.read(m2n)
1071 mf2 = self.manifest.readflags(m2n)
1071 mf2 = self.manifest.readflags(m2n)
1072 ma = self.manifest.read(man)
1072 ma = self.manifest.read(man)
1073 mfa = self.manifest.readflags(man)
1073 mfa = self.manifest.readflags(man)
1074
1074
1075 (c, a, d, u) = self.changes(None, None)
1075 (c, a, d, u) = self.changes(None, None)
1076
1076
1077 # is this a jump, or a merge? i.e. is there a linear path
1077 # is this a jump, or a merge? i.e. is there a linear path
1078 # from p1 to p2?
1078 # from p1 to p2?
1079 linear_path = (pa == p1 or pa == p2)
1079 linear_path = (pa == p1 or pa == p2)
1080
1080
1081 # resolve the manifest to determine which files
1081 # resolve the manifest to determine which files
1082 # we care about merging
1082 # we care about merging
1083 self.ui.note("resolving manifests\n")
1083 self.ui.note("resolving manifests\n")
1084 self.ui.debug(" ancestor %s local %s remote %s\n" %
1084 self.ui.debug(" ancestor %s local %s remote %s\n" %
1085 (short(man), short(m1n), short(m2n)))
1085 (short(man), short(m1n), short(m2n)))
1086
1086
1087 merge = {}
1087 merge = {}
1088 get = {}
1088 get = {}
1089 remove = []
1089 remove = []
1090 mark = {}
1090 mark = {}
1091
1091
1092 # construct a working dir manifest
1092 # construct a working dir manifest
1093 mw = m1.copy()
1093 mw = m1.copy()
1094 mfw = mf1.copy()
1094 mfw = mf1.copy()
1095 for f in a + c + u:
1095 for f in a + c + u:
1096 mw[f] = ""
1096 mw[f] = ""
1097 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1097 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1098 for f in d:
1098 for f in d:
1099 if f in mw: del mw[f]
1099 if f in mw: del mw[f]
1100
1100
1101 # If we're jumping between revisions (as opposed to merging),
1101 # If we're jumping between revisions (as opposed to merging),
1102 # and if neither the working directory nor the target rev has
1102 # and if neither the working directory nor the target rev has
1103 # the file, then we need to remove it from the dirstate, to
1103 # the file, then we need to remove it from the dirstate, to
1104 # prevent the dirstate from listing the file when it is no
1104 # prevent the dirstate from listing the file when it is no
1105 # longer in the manifest.
1105 # longer in the manifest.
1106 if linear_path and f not in m2:
1106 if linear_path and f not in m2:
1107 self.dirstate.forget((f,))
1107 self.dirstate.forget((f,))
1108
1108
1109 for f, n in mw.iteritems():
1109 for f, n in mw.iteritems():
1110 if f in m2:
1110 if f in m2:
1111 s = 0
1111 s = 0
1112
1112
1113 # is the wfile new since m1, and match m2?
1113 # is the wfile new since m1, and match m2?
1114 if f not in m1:
1114 if f not in m1:
1115 t1 = self.wfile(f).read()
1115 t1 = self.wfile(f).read()
1116 t2 = self.file(f).revision(m2[f])
1116 t2 = self.file(f).revision(m2[f])
1117 if cmp(t1, t2) == 0:
1117 if cmp(t1, t2) == 0:
1118 mark[f] = 1
1118 mark[f] = 1
1119 n = m2[f]
1119 n = m2[f]
1120 del t1, t2
1120 del t1, t2
1121
1121
1122 # are files different?
1122 # are files different?
1123 if n != m2[f]:
1123 if n != m2[f]:
1124 a = ma.get(f, nullid)
1124 a = ma.get(f, nullid)
1125 # are both different from the ancestor?
1125 # are both different from the ancestor?
1126 if n != a and m2[f] != a:
1126 if n != a and m2[f] != a:
1127 self.ui.debug(" %s versions differ, resolve\n" % f)
1127 self.ui.debug(" %s versions differ, resolve\n" % f)
1128 # merge executable bits
1128 # merge executable bits
1129 # "if we changed or they changed, change in merge"
1129 # "if we changed or they changed, change in merge"
1130 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1130 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1131 mode = ((a^b) | (a^c)) ^ a
1131 mode = ((a^b) | (a^c)) ^ a
1132 merge[f] = (m1.get(f, nullid), m2[f], mode)
1132 merge[f] = (m1.get(f, nullid), m2[f], mode)
1133 s = 1
1133 s = 1
1134 # are we clobbering?
1134 # are we clobbering?
1135 # is remote's version newer?
1135 # is remote's version newer?
1136 # or are we going back in time?
1136 # or are we going back in time?
1137 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1137 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1138 self.ui.debug(" remote %s is newer, get\n" % f)
1138 self.ui.debug(" remote %s is newer, get\n" % f)
1139 get[f] = m2[f]
1139 get[f] = m2[f]
1140 s = 1
1140 s = 1
1141 else:
1141 else:
1142 mark[f] = 1
1142 mark[f] = 1
1143
1143
1144 if not s and mfw[f] != mf2[f]:
1144 if not s and mfw[f] != mf2[f]:
1145 if force:
1145 if force:
1146 self.ui.debug(" updating permissions for %s\n" % f)
1146 self.ui.debug(" updating permissions for %s\n" % f)
1147 util.set_exec(self.wjoin(f), mf2[f])
1147 util.set_exec(self.wjoin(f), mf2[f])
1148 else:
1148 else:
1149 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1149 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1150 mode = ((a^b) | (a^c)) ^ a
1150 mode = ((a^b) | (a^c)) ^ a
1151 if mode != b:
1151 if mode != b:
1152 self.ui.debug(" updating permissions for %s\n" % f)
1152 self.ui.debug(" updating permissions for %s\n" % f)
1153 util.set_exec(self.wjoin(f), mode)
1153 util.set_exec(self.wjoin(f), mode)
1154 mark[f] = 1
1154 mark[f] = 1
1155 del m2[f]
1155 del m2[f]
1156 elif f in ma:
1156 elif f in ma:
1157 if not force and n != ma[f]:
1157 if not force and n != ma[f]:
1158 r = ""
1158 r = ""
1159 if linear_path or allow:
1159 if linear_path or allow:
1160 r = self.ui.prompt(
1160 r = self.ui.prompt(
1161 (" local changed %s which remote deleted\n" % f) +
1161 (" local changed %s which remote deleted\n" % f) +
1162 "(k)eep or (d)elete?", "[kd]", "k")
1162 "(k)eep or (d)elete?", "[kd]", "k")
1163 if r == "d":
1163 if r == "d":
1164 remove.append(f)
1164 remove.append(f)
1165 else:
1165 else:
1166 self.ui.debug("other deleted %s\n" % f)
1166 self.ui.debug("other deleted %s\n" % f)
1167 remove.append(f) # other deleted it
1167 remove.append(f) # other deleted it
1168 else:
1168 else:
1169 if n == m1.get(f, nullid): # same as parent
1169 if n == m1.get(f, nullid): # same as parent
1170 if p2 == pa: # going backwards?
1170 if p2 == pa: # going backwards?
1171 self.ui.debug("remote deleted %s\n" % f)
1171 self.ui.debug("remote deleted %s\n" % f)
1172 remove.append(f)
1172 remove.append(f)
1173 else:
1173 else:
1174 self.ui.debug("local created %s, keeping\n" % f)
1174 self.ui.debug("local created %s, keeping\n" % f)
1175 else:
1175 else:
1176 self.ui.debug("working dir created %s, keeping\n" % f)
1176 self.ui.debug("working dir created %s, keeping\n" % f)
1177
1177
1178 for f, n in m2.iteritems():
1178 for f, n in m2.iteritems():
1179 if f[0] == "/": continue
1179 if f[0] == "/": continue
1180 if not force and f in ma and n != ma[f]:
1180 if not force and f in ma and n != ma[f]:
1181 r = ""
1181 r = ""
1182 if linear_path or allow:
1182 if linear_path or allow:
1183 r = self.ui.prompt(
1183 r = self.ui.prompt(
1184 ("remote changed %s which local deleted\n" % f) +
1184 ("remote changed %s which local deleted\n" % f) +
1185 "(k)eep or (d)elete?", "[kd]", "k")
1185 "(k)eep or (d)elete?", "[kd]", "k")
1186 if r == "d": remove.append(f)
1186 if r == "d": remove.append(f)
1187 else:
1187 else:
1188 self.ui.debug("remote created %s\n" % f)
1188 self.ui.debug("remote created %s\n" % f)
1189 get[f] = n
1189 get[f] = n
1190
1190
1191 del mw, m1, m2, ma
1191 del mw, m1, m2, ma
1192
1192
1193 if force:
1193 if force:
1194 for f in merge:
1194 for f in merge:
1195 get[f] = merge[f][1]
1195 get[f] = merge[f][1]
1196 merge = {}
1196 merge = {}
1197
1197
1198 if linear_path:
1198 if linear_path:
1199 # we don't need to do any magic, just jump to the new rev
1199 # we don't need to do any magic, just jump to the new rev
1200 mode = 'n'
1200 mode = 'n'
1201 p1, p2 = p2, nullid
1201 p1, p2 = p2, nullid
1202 else:
1202 else:
1203 if not allow:
1203 if not allow:
1204 self.ui.status("this update spans a branch" +
1204 self.ui.status("this update spans a branch" +
1205 " affecting the following files:\n")
1205 " affecting the following files:\n")
1206 fl = merge.keys() + get.keys()
1206 fl = merge.keys() + get.keys()
1207 fl.sort()
1207 fl.sort()
1208 for f in fl:
1208 for f in fl:
1209 cf = ""
1209 cf = ""
1210 if f in merge: cf = " (resolve)"
1210 if f in merge: cf = " (resolve)"
1211 self.ui.status(" %s%s\n" % (f, cf))
1211 self.ui.status(" %s%s\n" % (f, cf))
1212 self.ui.warn("aborting update spanning branches!\n")
1212 self.ui.warn("aborting update spanning branches!\n")
1213 self.ui.status("(use update -m to perform a branch merge)\n")
1213 self.ui.status("(use update -m to perform a branch merge)\n")
1214 return 1
1214 return 1
1215 # we have to remember what files we needed to get/change
1215 # we have to remember what files we needed to get/change
1216 # because any file that's different from either one of its
1216 # because any file that's different from either one of its
1217 # parents must be in the changeset
1217 # parents must be in the changeset
1218 mode = 'm'
1218 mode = 'm'
1219 self.dirstate.update(mark.keys(), "m")
1219 self.dirstate.update(mark.keys(), "m")
1220
1220
1221 self.dirstate.setparents(p1, p2)
1221 self.dirstate.setparents(p1, p2)
1222
1222
1223 # get the files we don't need to change
1223 # get the files we don't need to change
1224 files = get.keys()
1224 files = get.keys()
1225 files.sort()
1225 files.sort()
1226 for f in files:
1226 for f in files:
1227 if f[0] == "/": continue
1227 if f[0] == "/": continue
1228 self.ui.note("getting %s\n" % f)
1228 self.ui.note("getting %s\n" % f)
1229 t = self.file(f).read(get[f])
1229 t = self.file(f).read(get[f])
1230 try:
1230 try:
1231 self.wfile(f, "w").write(t)
1231 self.wfile(f, "w").write(t)
1232 except IOError:
1232 except IOError:
1233 os.makedirs(os.path.dirname(self.wjoin(f)))
1233 os.makedirs(os.path.dirname(self.wjoin(f)))
1234 self.wfile(f, "w").write(t)
1234 self.wfile(f, "w").write(t)
1235 util.set_exec(self.wjoin(f), mf2[f])
1235 util.set_exec(self.wjoin(f), mf2[f])
1236 self.dirstate.update([f], mode)
1236 self.dirstate.update([f], mode)
1237
1237
1238 # merge the tricky bits
1238 # merge the tricky bits
1239 files = merge.keys()
1239 files = merge.keys()
1240 files.sort()
1240 files.sort()
1241 for f in files:
1241 for f in files:
1242 self.ui.status("merging %s\n" % f)
1242 self.ui.status("merging %s\n" % f)
1243 m, o, flag = merge[f]
1243 m, o, flag = merge[f]
1244 self.merge3(f, m, o)
1244 self.merge3(f, m, o)
1245 util.set_exec(self.wjoin(f), flag)
1245 util.set_exec(self.wjoin(f), flag)
1246 self.dirstate.update([f], 'm')
1246 self.dirstate.update([f], 'm')
1247
1247
1248 for f in remove:
1248 for f in remove:
1249 self.ui.note("removing %s\n" % f)
1249 self.ui.note("removing %s\n" % f)
1250 os.unlink(f)
1250 os.unlink(f)
1251 if mode == 'n':
1251 if mode == 'n':
1252 self.dirstate.forget(remove)
1252 self.dirstate.forget(remove)
1253 else:
1253 else:
1254 self.dirstate.update(remove, 'r')
1254 self.dirstate.update(remove, 'r')
1255
1255
1256 def merge3(self, fn, my, other):
1256 def merge3(self, fn, my, other):
1257 """perform a 3-way merge in the working directory"""
1257 """perform a 3-way merge in the working directory"""
1258
1258
1259 def temp(prefix, node):
1259 def temp(prefix, node):
1260 pre = "%s~%s." % (os.path.basename(fn), prefix)
1260 pre = "%s~%s." % (os.path.basename(fn), prefix)
1261 (fd, name) = tempfile.mkstemp("", pre)
1261 (fd, name) = tempfile.mkstemp("", pre)
1262 f = os.fdopen(fd, "wb")
1262 f = os.fdopen(fd, "wb")
1263 f.write(fl.revision(node))
1263 f.write(fl.revision(node))
1264 f.close()
1264 f.close()
1265 return name
1265 return name
1266
1266
1267 fl = self.file(fn)
1267 fl = self.file(fn)
1268 base = fl.ancestor(my, other)
1268 base = fl.ancestor(my, other)
1269 a = self.wjoin(fn)
1269 a = self.wjoin(fn)
1270 b = temp("base", base)
1270 b = temp("base", base)
1271 c = temp("other", other)
1271 c = temp("other", other)
1272
1272
1273 self.ui.note("resolving %s\n" % fn)
1273 self.ui.note("resolving %s\n" % fn)
1274 self.ui.debug("file %s: other %s ancestor %s\n" %
1274 self.ui.debug("file %s: other %s ancestor %s\n" %
1275 (fn, short(other), short(base)))
1275 (fn, short(other), short(base)))
1276
1276
1277 cmd = os.environ.get("HGMERGE", "hgmerge")
1277 cmd = os.environ.get("HGMERGE", "hgmerge")
1278 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1278 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1279 if r:
1279 if r:
1280 self.ui.warn("merging %s failed!\n" % fn)
1280 self.ui.warn("merging %s failed!\n" % fn)
1281
1281
1282 os.unlink(b)
1282 os.unlink(b)
1283 os.unlink(c)
1283 os.unlink(c)
1284
1284
1285 def verify(self):
1285 def verify(self):
1286 filelinkrevs = {}
1286 filelinkrevs = {}
1287 filenodes = {}
1287 filenodes = {}
1288 changesets = revisions = files = 0
1288 changesets = revisions = files = 0
1289 errors = 0
1289 errors = 0
1290
1290
1291 seen = {}
1291 seen = {}
1292 self.ui.status("checking changesets\n")
1292 self.ui.status("checking changesets\n")
1293 for i in range(self.changelog.count()):
1293 for i in range(self.changelog.count()):
1294 changesets += 1
1294 changesets += 1
1295 n = self.changelog.node(i)
1295 n = self.changelog.node(i)
1296 if n in seen:
1296 if n in seen:
1297 self.ui.warn("duplicate changeset at revision %d\n" % i)
1297 self.ui.warn("duplicate changeset at revision %d\n" % i)
1298 errors += 1
1298 errors += 1
1299 seen[n] = 1
1299 seen[n] = 1
1300
1300
1301 for p in self.changelog.parents(n):
1301 for p in self.changelog.parents(n):
1302 if p not in self.changelog.nodemap:
1302 if p not in self.changelog.nodemap:
1303 self.ui.warn("changeset %s has unknown parent %s\n" %
1303 self.ui.warn("changeset %s has unknown parent %s\n" %
1304 (short(n), short(p)))
1304 (short(n), short(p)))
1305 errors += 1
1305 errors += 1
1306 try:
1306 try:
1307 changes = self.changelog.read(n)
1307 changes = self.changelog.read(n)
1308 except Exception, inst:
1308 except Exception, inst:
1309 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1309 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1310 errors += 1
1310 errors += 1
1311
1311
1312 for f in changes[3]:
1312 for f in changes[3]:
1313 filelinkrevs.setdefault(f, []).append(i)
1313 filelinkrevs.setdefault(f, []).append(i)
1314
1314
1315 seen = {}
1315 seen = {}
1316 self.ui.status("checking manifests\n")
1316 self.ui.status("checking manifests\n")
1317 for i in range(self.manifest.count()):
1317 for i in range(self.manifest.count()):
1318 n = self.manifest.node(i)
1318 n = self.manifest.node(i)
1319 if n in seen:
1319 if n in seen:
1320 self.ui.warn("duplicate manifest at revision %d\n" % i)
1320 self.ui.warn("duplicate manifest at revision %d\n" % i)
1321 errors += 1
1321 errors += 1
1322 seen[n] = 1
1322 seen[n] = 1
1323
1323
1324 for p in self.manifest.parents(n):
1324 for p in self.manifest.parents(n):
1325 if p not in self.manifest.nodemap:
1325 if p not in self.manifest.nodemap:
1326 self.ui.warn("manifest %s has unknown parent %s\n" %
1326 self.ui.warn("manifest %s has unknown parent %s\n" %
1327 (short(n), short(p)))
1327 (short(n), short(p)))
1328 errors += 1
1328 errors += 1
1329
1329
1330 try:
1330 try:
1331 delta = mdiff.patchtext(self.manifest.delta(n))
1331 delta = mdiff.patchtext(self.manifest.delta(n))
1332 except KeyboardInterrupt:
1332 except KeyboardInterrupt:
1333 print "aborted"
1333 print "aborted"
1334 sys.exit(0)
1334 sys.exit(0)
1335 except Exception, inst:
1335 except Exception, inst:
1336 self.ui.warn("unpacking manifest %s: %s\n"
1336 self.ui.warn("unpacking manifest %s: %s\n"
1337 % (short(n), inst))
1337 % (short(n), inst))
1338 errors += 1
1338 errors += 1
1339
1339
1340 ff = [ l.split('\0') for l in delta.splitlines() ]
1340 ff = [ l.split('\0') for l in delta.splitlines() ]
1341 for f, fn in ff:
1341 for f, fn in ff:
1342 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1342 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1343
1343
1344 self.ui.status("crosschecking files in changesets and manifests\n")
1344 self.ui.status("crosschecking files in changesets and manifests\n")
1345 for f in filenodes:
1345 for f in filenodes:
1346 if f not in filelinkrevs:
1346 if f not in filelinkrevs:
1347 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1347 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1348 errors += 1
1348 errors += 1
1349
1349
1350 for f in filelinkrevs:
1350 for f in filelinkrevs:
1351 if f not in filenodes:
1351 if f not in filenodes:
1352 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1352 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1353 errors += 1
1353 errors += 1
1354
1354
1355 self.ui.status("checking files\n")
1355 self.ui.status("checking files\n")
1356 ff = filenodes.keys()
1356 ff = filenodes.keys()
1357 ff.sort()
1357 ff.sort()
1358 for f in ff:
1358 for f in ff:
1359 if f == "/dev/null": continue
1359 if f == "/dev/null": continue
1360 files += 1
1360 files += 1
1361 fl = self.file(f)
1361 fl = self.file(f)
1362 nodes = { nullid: 1 }
1362 nodes = { nullid: 1 }
1363 seen = {}
1363 seen = {}
1364 for i in range(fl.count()):
1364 for i in range(fl.count()):
1365 revisions += 1
1365 revisions += 1
1366 n = fl.node(i)
1366 n = fl.node(i)
1367
1367
1368 if n in seen:
1368 if n in seen:
1369 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1369 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1370 errors += 1
1370 errors += 1
1371
1371
1372 if n not in filenodes[f]:
1372 if n not in filenodes[f]:
1373 self.ui.warn("%s: %d:%s not in manifests\n"
1373 self.ui.warn("%s: %d:%s not in manifests\n"
1374 % (f, i, short(n)))
1374 % (f, i, short(n)))
1375 print len(filenodes[f].keys()), fl.count(), f
1375 print len(filenodes[f].keys()), fl.count(), f
1376 errors += 1
1376 errors += 1
1377 else:
1377 else:
1378 del filenodes[f][n]
1378 del filenodes[f][n]
1379
1379
1380 flr = fl.linkrev(n)
1380 flr = fl.linkrev(n)
1381 if flr not in filelinkrevs[f]:
1381 if flr not in filelinkrevs[f]:
1382 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1382 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1383 % (f, short(n), fl.linkrev(n)))
1383 % (f, short(n), fl.linkrev(n)))
1384 errors += 1
1384 errors += 1
1385 else:
1385 else:
1386 filelinkrevs[f].remove(flr)
1386 filelinkrevs[f].remove(flr)
1387
1387
1388 # verify contents
1388 # verify contents
1389 try:
1389 try:
1390 t = fl.read(n)
1390 t = fl.read(n)
1391 except Exception, inst:
1391 except Exception, inst:
1392 self.ui.warn("unpacking file %s %s: %s\n"
1392 self.ui.warn("unpacking file %s %s: %s\n"
1393 % (f, short(n), inst))
1393 % (f, short(n), inst))
1394 errors += 1
1394 errors += 1
1395
1395
1396 # verify parents
1396 # verify parents
1397 (p1, p2) = fl.parents(n)
1397 (p1, p2) = fl.parents(n)
1398 if p1 not in nodes:
1398 if p1 not in nodes:
1399 self.ui.warn("file %s:%s unknown parent 1 %s" %
1399 self.ui.warn("file %s:%s unknown parent 1 %s" %
1400 (f, short(n), short(p1)))
1400 (f, short(n), short(p1)))
1401 errors += 1
1401 errors += 1
1402 if p2 not in nodes:
1402 if p2 not in nodes:
1403 self.ui.warn("file %s:%s unknown parent 2 %s" %
1403 self.ui.warn("file %s:%s unknown parent 2 %s" %
1404 (f, short(n), short(p1)))
1404 (f, short(n), short(p1)))
1405 errors += 1
1405 errors += 1
1406 nodes[n] = 1
1406 nodes[n] = 1
1407
1407
1408 # cross-check
1408 # cross-check
1409 for node in filenodes[f]:
1409 for node in filenodes[f]:
1410 self.ui.warn("node %s in manifests not in %s\n"
1410 self.ui.warn("node %s in manifests not in %s\n"
1411 % (hex(n), f))
1411 % (hex(n), f))
1412 errors += 1
1412 errors += 1
1413
1413
1414 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1414 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1415 (files, changesets, revisions))
1415 (files, changesets, revisions))
1416
1416
1417 if errors:
1417 if errors:
1418 self.ui.warn("%d integrity errors encountered!\n" % errors)
1418 self.ui.warn("%d integrity errors encountered!\n" % errors)
1419 return 1
1419 return 1
1420
1420
1421 class remoterepository:
1421 class remoterepository:
1422 def __init__(self, ui, path):
1422 def __init__(self, ui, path):
1423 self.url = path
1423 self.url = path
1424 self.ui = ui
1424 self.ui = ui
1425 no_list = [ "localhost", "127.0.0.1" ]
1425 no_list = [ "localhost", "127.0.0.1" ]
1426 host = ui.config("http_proxy", "host")
1426 host = ui.config("http_proxy", "host")
1427 if host is None:
1427 if host is None:
1428 host = os.environ.get("http_proxy")
1428 host = os.environ.get("http_proxy")
1429 if host and host.startswith('http://'):
1429 if host and host.startswith('http://'):
1430 host = host[7:]
1430 host = host[7:]
1431 user = ui.config("http_proxy", "user")
1431 user = ui.config("http_proxy", "user")
1432 passwd = ui.config("http_proxy", "passwd")
1432 passwd = ui.config("http_proxy", "passwd")
1433 no = ui.config("http_proxy", "no")
1433 no = ui.config("http_proxy", "no")
1434 if no is None:
1434 if no is None:
1435 no = os.environ.get("no_proxy")
1435 no = os.environ.get("no_proxy")
1436 if no:
1436 if no:
1437 no_list = no_list + no.split(",")
1437 no_list = no_list + no.split(",")
1438
1438
1439 no_proxy = 0
1439 no_proxy = 0
1440 for h in no_list:
1440 for h in no_list:
1441 if (path.startswith("http://" + h + "/") or
1441 if (path.startswith("http://" + h + "/") or
1442 path.startswith("http://" + h + ":") or
1442 path.startswith("http://" + h + ":") or
1443 path == "http://" + h):
1443 path == "http://" + h):
1444 no_proxy = 1
1444 no_proxy = 1
1445
1445
1446 # Note: urllib2 takes proxy values from the environment and those will
1446 # Note: urllib2 takes proxy values from the environment and those will
1447 # take precedence
1447 # take precedence
1448 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1448 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1449 if os.environ.has_key(env):
1449 if os.environ.has_key(env):
1450 del os.environ[env]
1450 del os.environ[env]
1451
1451
1452 proxy_handler = urllib2.BaseHandler()
1452 proxy_handler = urllib2.BaseHandler()
1453 if host and not no_proxy:
1453 if host and not no_proxy:
1454 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1454 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1455
1455
1456 authinfo = None
1456 authinfo = None
1457 if user and passwd:
1457 if user and passwd:
1458 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1458 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1459 passmgr.add_password(None, host, user, passwd)
1459 passmgr.add_password(None, host, user, passwd)
1460 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1460 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1461
1461
1462 opener = urllib2.build_opener(proxy_handler, authinfo)
1462 opener = urllib2.build_opener(proxy_handler, authinfo)
1463 urllib2.install_opener(opener)
1463 urllib2.install_opener(opener)
1464
1464
1465 def do_cmd(self, cmd, **args):
1465 def do_cmd(self, cmd, **args):
1466 self.ui.debug("sending %s command\n" % cmd)
1466 self.ui.debug("sending %s command\n" % cmd)
1467 q = {"cmd": cmd}
1467 q = {"cmd": cmd}
1468 q.update(args)
1468 q.update(args)
1469 qs = urllib.urlencode(q)
1469 qs = urllib.urlencode(q)
1470 cu = "%s?%s" % (self.url, qs)
1470 cu = "%s?%s" % (self.url, qs)
1471 return urllib2.urlopen(cu)
1471 return urllib2.urlopen(cu)
1472
1472
1473 def heads(self):
1473 def heads(self):
1474 d = self.do_cmd("heads").read()
1474 d = self.do_cmd("heads").read()
1475 try:
1475 try:
1476 return map(bin, d[:-1].split(" "))
1476 return map(bin, d[:-1].split(" "))
1477 except:
1477 except:
1478 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1478 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1479 raise
1479 raise
1480
1480
1481 def branches(self, nodes):
1481 def branches(self, nodes):
1482 n = " ".join(map(hex, nodes))
1482 n = " ".join(map(hex, nodes))
1483 d = self.do_cmd("branches", nodes=n).read()
1483 d = self.do_cmd("branches", nodes=n).read()
1484 try:
1484 try:
1485 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1485 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1486 return br
1486 return br
1487 except:
1487 except:
1488 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1488 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1489 raise
1489 raise
1490
1490
1491 def between(self, pairs):
1491 def between(self, pairs):
1492 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1492 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1493 d = self.do_cmd("between", pairs=n).read()
1493 d = self.do_cmd("between", pairs=n).read()
1494 try:
1494 try:
1495 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1495 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1496 return p
1496 return p
1497 except:
1497 except:
1498 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1498 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1499 raise
1499 raise
1500
1500
1501 def changegroup(self, nodes):
1501 def changegroup(self, nodes):
1502 n = " ".join(map(hex, nodes))
1502 n = " ".join(map(hex, nodes))
1503 zd = zlib.decompressobj()
1503 zd = zlib.decompressobj()
1504 f = self.do_cmd("changegroup", roots=n)
1504 f = self.do_cmd("changegroup", roots=n)
1505 bytes = 0
1505 bytes = 0
1506 while 1:
1506 while 1:
1507 d = f.read(4096)
1507 d = f.read(4096)
1508 bytes += len(d)
1508 bytes += len(d)
1509 if not d:
1509 if not d:
1510 yield zd.flush()
1510 yield zd.flush()
1511 break
1511 break
1512 yield zd.decompress(d)
1512 yield zd.decompress(d)
1513 self.ui.note("%d bytes of data transfered\n" % bytes)
1513 self.ui.note("%d bytes of data transfered\n" % bytes)
1514
1514
1515 def repository(ui, path=None, create=0):
1515 def repository(ui, path=None, create=0):
1516 if path and path[:7] == "http://":
1516 if path and path[:7] == "http://":
1517 return remoterepository(ui, path)
1517 return remoterepository(ui, path)
1518 if path and path[:5] == "hg://":
1518 if path and path[:5] == "hg://":
1519 return remoterepository(ui, path.replace("hg://", "http://"))
1519 return remoterepository(ui, path.replace("hg://", "http://"))
1520 if path and path[:11] == "old-http://":
1520 if path and path[:11] == "old-http://":
1521 return localrepository(ui, path.replace("old-http://", "http://"))
1521 return localrepository(ui, path.replace("old-http://", "http://"))
1522 else:
1522 else:
1523 return localrepository(ui, path, create)
1523 return localrepository(ui, path, create)
1524
1524
@@ -1,787 +1,787
1 # hgweb.py - web interface to a mercurial repository
1 # hgweb.py - web interface to a mercurial repository
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, cgi, time, re, difflib, sys, zlib
9 import os, cgi, time, re, difflib, sys, zlib
10 from mercurial.hg import *
10 from mercurial.hg import *
11 from mercurial.ui import *
11 from mercurial.ui import *
12
12
13 def templatepath():
13 def templatepath():
14 for f in "templates", "../templates":
14 for f in "templates", "../templates":
15 p = os.path.join(os.path.dirname(__file__), f)
15 p = os.path.join(os.path.dirname(__file__), f)
16 if os.path.isdir(p): return p
16 if os.path.isdir(p): return p
17
17
18 def age(t):
18 def age(t):
19 def plural(t, c):
19 def plural(t, c):
20 if c == 1: return t
20 if c == 1: return t
21 return t + "s"
21 return t + "s"
22 def fmt(t, c):
22 def fmt(t, c):
23 return "%d %s" % (c, plural(t, c))
23 return "%d %s" % (c, plural(t, c))
24
24
25 now = time.time()
25 now = time.time()
26 delta = max(1, int(now - t))
26 delta = max(1, int(now - t))
27
27
28 scales = [["second", 1],
28 scales = [["second", 1],
29 ["minute", 60],
29 ["minute", 60],
30 ["hour", 3600],
30 ["hour", 3600],
31 ["day", 3600 * 24],
31 ["day", 3600 * 24],
32 ["week", 3600 * 24 * 7],
32 ["week", 3600 * 24 * 7],
33 ["month", 3600 * 24 * 30],
33 ["month", 3600 * 24 * 30],
34 ["year", 3600 * 24 * 365]]
34 ["year", 3600 * 24 * 365]]
35
35
36 scales.reverse()
36 scales.reverse()
37
37
38 for t, s in scales:
38 for t, s in scales:
39 n = delta / s
39 n = delta / s
40 if n >= 2 or s == 1: return fmt(t, n)
40 if n >= 2 or s == 1: return fmt(t, n)
41
41
42 def nl2br(text):
42 def nl2br(text):
43 return text.replace('\n', '<br/>\n')
43 return text.replace('\n', '<br/>\n')
44
44
45 def obfuscate(text):
45 def obfuscate(text):
46 return ''.join([ '&#%d;' % ord(c) for c in text ])
46 return ''.join([ '&#%d;' % ord(c) for c in text ])
47
47
48 def up(p):
48 def up(p):
49 if p[0] != "/": p = "/" + p
49 if p[0] != "/": p = "/" + p
50 if p[-1] == "/": p = p[:-1]
50 if p[-1] == "/": p = p[:-1]
51 up = os.path.dirname(p)
51 up = os.path.dirname(p)
52 if up == "/":
52 if up == "/":
53 return "/"
53 return "/"
54 return up + "/"
54 return up + "/"
55
55
56 def httphdr(type):
56 def httphdr(type):
57 print 'Content-type: %s\n' % type
57 print 'Content-type: %s\n' % type
58
58
59 def write(*things):
59 def write(*things):
60 for thing in things:
60 for thing in things:
61 if hasattr(thing, "__iter__"):
61 if hasattr(thing, "__iter__"):
62 for part in thing:
62 for part in thing:
63 write(part)
63 write(part)
64 else:
64 else:
65 sys.stdout.write(str(thing))
65 sys.stdout.write(str(thing))
66
66
67 def template(tmpl, filters = {}, **map):
67 def template(tmpl, filters = {}, **map):
68 while tmpl:
68 while tmpl:
69 m = re.search(r"#([a-zA-Z0-9]+)((\|[a-zA-Z0-9]+)*)#", tmpl)
69 m = re.search(r"#([a-zA-Z0-9]+)((\|[a-zA-Z0-9]+)*)#", tmpl)
70 if m:
70 if m:
71 yield tmpl[:m.start(0)]
71 yield tmpl[:m.start(0)]
72 v = map.get(m.group(1), "")
72 v = map.get(m.group(1), "")
73 v = callable(v) and v() or v
73 v = callable(v) and v() or v
74
74
75 fl = m.group(2)
75 fl = m.group(2)
76 if fl:
76 if fl:
77 for f in fl.split("|")[1:]:
77 for f in fl.split("|")[1:]:
78 v = filters[f](v)
78 v = filters[f](v)
79
79
80 yield v
80 yield v
81 tmpl = tmpl[m.end(0):]
81 tmpl = tmpl[m.end(0):]
82 else:
82 else:
83 yield tmpl
83 yield tmpl
84 return
84 return
85
85
86 class templater:
86 class templater:
87 def __init__(self, mapfile, filters = {}):
87 def __init__(self, mapfile, filters = {}):
88 self.cache = {}
88 self.cache = {}
89 self.map = {}
89 self.map = {}
90 self.base = os.path.dirname(mapfile)
90 self.base = os.path.dirname(mapfile)
91 self.filters = filters
91 self.filters = filters
92
92
93 for l in file(mapfile):
93 for l in file(mapfile):
94 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
94 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
95 if m:
95 if m:
96 self.cache[m.group(1)] = m.group(2)
96 self.cache[m.group(1)] = m.group(2)
97 else:
97 else:
98 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
98 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
99 if m:
99 if m:
100 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
100 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
101 else:
101 else:
102 raise "unknown map entry '%s'" % l
102 raise "unknown map entry '%s'" % l
103
103
104 def __call__(self, t, **map):
104 def __call__(self, t, **map):
105 try:
105 try:
106 tmpl = self.cache[t]
106 tmpl = self.cache[t]
107 except KeyError:
107 except KeyError:
108 tmpl = self.cache[t] = file(self.map[t]).read()
108 tmpl = self.cache[t] = file(self.map[t]).read()
109 return template(tmpl, self.filters, **map)
109 return template(tmpl, self.filters, **map)
110
110
111 class hgweb:
111 class hgweb:
112 maxchanges = 10
112 maxchanges = 10
113 maxfiles = 10
113 maxfiles = 10
114
114
115 def __init__(self, path, name, templates = ""):
115 def __init__(self, path, name, templates = ""):
116 self.templates = templates or templatepath()
116 self.templates = templates or templatepath()
117 self.reponame = name
117 self.reponame = name
118 self.path = path
118 self.path = path
119 self.mtime = -1
119 self.mtime = -1
120 self.viewonly = 0
120 self.viewonly = 0
121
121
122 self.filters = {
122 self.filters = {
123 "escape": cgi.escape,
123 "escape": cgi.escape,
124 "age": age,
124 "age": age,
125 "date": (lambda x: time.asctime(time.gmtime(x))),
125 "date": (lambda x: time.asctime(time.gmtime(x))),
126 "addbreaks": nl2br,
126 "addbreaks": nl2br,
127 "obfuscate": obfuscate,
127 "obfuscate": obfuscate,
128 "short": (lambda x: x[:12]),
128 "short": (lambda x: x[:12]),
129 "firstline": (lambda x: x.splitlines(1)[0]),
129 "firstline": (lambda x: x.splitlines(1)[0]),
130 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--")
130 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--")
131 }
131 }
132
132
133 def refresh(self):
133 def refresh(self):
134 s = os.stat(os.path.join(self.path, ".hg", "00changelog.i"))
134 s = os.stat(os.path.join(self.path, ".hg", "00changelog.i"))
135 if s.st_mtime != self.mtime:
135 if s.st_mtime != self.mtime:
136 self.mtime = s.st_mtime
136 self.mtime = s.st_mtime
137 self.repo = repository(ui(), self.path)
137 self.repo = repository(ui(), self.path)
138
138
139 def date(self, cs):
139 def date(self, cs):
140 return time.asctime(time.gmtime(float(cs[2].split(' ')[0])))
140 return time.asctime(time.gmtime(float(cs[2].split(' ')[0])))
141
141
142 def listfiles(self, files, mf):
142 def listfiles(self, files, mf):
143 for f in files[:self.maxfiles]:
143 for f in files[:self.maxfiles]:
144 yield self.t("filenodelink", node = hex(mf[f]), file = f)
144 yield self.t("filenodelink", node = hex(mf[f]), file = f)
145 if len(files) > self.maxfiles:
145 if len(files) > self.maxfiles:
146 yield self.t("fileellipses")
146 yield self.t("fileellipses")
147
147
148 def listfilediffs(self, files, changeset):
148 def listfilediffs(self, files, changeset):
149 for f in files[:self.maxfiles]:
149 for f in files[:self.maxfiles]:
150 yield self.t("filedifflink", node = hex(changeset), file = f)
150 yield self.t("filedifflink", node = hex(changeset), file = f)
151 if len(files) > self.maxfiles:
151 if len(files) > self.maxfiles:
152 yield self.t("fileellipses")
152 yield self.t("fileellipses")
153
153
154 def parents(self, t1, nodes=[], rev=None,**args):
154 def parents(self, t1, nodes=[], rev=None,**args):
155 if not rev: rev = lambda x: ""
155 if not rev: rev = lambda x: ""
156 for node in nodes:
156 for node in nodes:
157 if node != nullid:
157 if node != nullid:
158 yield self.t(t1, node = hex(node), rev = rev(node), **args)
158 yield self.t(t1, node = hex(node), rev = rev(node), **args)
159
159
160 def showtag(self, t1, node=nullid, **args):
160 def showtag(self, t1, node=nullid, **args):
161 for t in self.repo.nodetags(node):
161 for t in self.repo.nodetags(node):
162 yield self.t(t1, tag = t, **args)
162 yield self.t(t1, tag = t, **args)
163
163
164 def diff(self, node1, node2, files):
164 def diff(self, node1, node2, files):
165 def filterfiles(list, files):
165 def filterfiles(list, files):
166 l = [ x for x in list if x in files ]
166 l = [ x for x in list if x in files ]
167
167
168 for f in files:
168 for f in files:
169 if f[-1] != os.sep: f += os.sep
169 if f[-1] != os.sep: f += os.sep
170 l += [ x for x in list if x.startswith(f) ]
170 l += [ x for x in list if x.startswith(f) ]
171 return l
171 return l
172
172
173 parity = [0]
173 parity = [0]
174 def diffblock(diff, f, fn):
174 def diffblock(diff, f, fn):
175 yield self.t("diffblock",
175 yield self.t("diffblock",
176 lines = prettyprintlines(diff),
176 lines = prettyprintlines(diff),
177 parity = parity[0],
177 parity = parity[0],
178 file = f,
178 file = f,
179 filenode = hex(fn or nullid))
179 filenode = hex(fn or nullid))
180 parity[0] = 1 - parity[0]
180 parity[0] = 1 - parity[0]
181
181
182 def prettyprintlines(diff):
182 def prettyprintlines(diff):
183 for l in diff.splitlines(1):
183 for l in diff.splitlines(1):
184 if l.startswith('+'):
184 if l.startswith('+'):
185 yield self.t("difflineplus", line = l)
185 yield self.t("difflineplus", line = l)
186 elif l.startswith('-'):
186 elif l.startswith('-'):
187 yield self.t("difflineminus", line = l)
187 yield self.t("difflineminus", line = l)
188 elif l.startswith('@'):
188 elif l.startswith('@'):
189 yield self.t("difflineat", line = l)
189 yield self.t("difflineat", line = l)
190 else:
190 else:
191 yield self.t("diffline", line = l)
191 yield self.t("diffline", line = l)
192
192
193 r = self.repo
193 r = self.repo
194 cl = r.changelog
194 cl = r.changelog
195 mf = r.manifest
195 mf = r.manifest
196 change1 = cl.read(node1)
196 change1 = cl.read(node1)
197 change2 = cl.read(node2)
197 change2 = cl.read(node2)
198 mmap1 = mf.read(change1[0])
198 mmap1 = mf.read(change1[0])
199 mmap2 = mf.read(change2[0])
199 mmap2 = mf.read(change2[0])
200 date1 = self.date(change1)
200 date1 = self.date(change1)
201 date2 = self.date(change2)
201 date2 = self.date(change2)
202
202
203 c, a, d, u = r.changes(node1, node2)
203 c, a, d, u = r.changes(node1, node2)
204 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
204 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
205
205
206 for f in c:
206 for f in c:
207 to = r.file(f).read(mmap1[f])
207 to = r.file(f).read(mmap1[f])
208 tn = r.file(f).read(mmap2[f])
208 tn = r.file(f).read(mmap2[f])
209 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
209 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
210 for f in a:
210 for f in a:
211 to = None
211 to = None
212 tn = r.file(f).read(mmap2[f])
212 tn = r.file(f).read(mmap2[f])
213 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
213 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
214 for f in d:
214 for f in d:
215 to = r.file(f).read(mmap1[f])
215 to = r.file(f).read(mmap1[f])
216 tn = None
216 tn = None
217 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
217 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
218
218
219 def header(self):
219 def header(self):
220 yield self.t("header", repo = self.reponame)
220 yield self.t("header", repo = self.reponame)
221
221
222 def footer(self):
222 def footer(self):
223 yield self.t("footer", repo = self.reponame)
223 yield self.t("footer", repo = self.reponame)
224
224
225 def changelog(self, pos):
225 def changelog(self, pos):
226 def changenav():
226 def changenav():
227 def seq(factor = 1):
227 def seq(factor = 1):
228 yield 1 * factor
228 yield 1 * factor
229 yield 3 * factor
229 yield 3 * factor
230 #yield 5 * factor
230 #yield 5 * factor
231 for f in seq(factor * 10):
231 for f in seq(factor * 10):
232 yield f
232 yield f
233
233
234 l = []
234 l = []
235 for f in seq():
235 for f in seq():
236 if f < self.maxchanges / 2: continue
236 if f < self.maxchanges / 2: continue
237 if f > count: break
237 if f > count: break
238 r = "%d" % f
238 r = "%d" % f
239 if pos + f < count: l.append(("+" + r, pos + f))
239 if pos + f < count: l.append(("+" + r, pos + f))
240 if pos - f >= 0: l.insert(0, ("-" + r, pos - f))
240 if pos - f >= 0: l.insert(0, ("-" + r, pos - f))
241
241
242 yield self.t("naventry", rev = 0, label="(0)")
242 yield self.t("naventry", rev = 0, label="(0)")
243
243
244 for label, rev in l:
244 for label, rev in l:
245 yield self.t("naventry", label = label, rev = rev)
245 yield self.t("naventry", label = label, rev = rev)
246
246
247 yield self.t("naventry", label="tip")
247 yield self.t("naventry", label="tip")
248
248
249 def changelist():
249 def changelist():
250 parity = (start - end) & 1
250 parity = (start - end) & 1
251 cl = self.repo.changelog
251 cl = self.repo.changelog
252 l = [] # build a list in forward order for efficiency
252 l = [] # build a list in forward order for efficiency
253 for i in range(start, end):
253 for i in range(start, end):
254 n = cl.node(i)
254 n = cl.node(i)
255 changes = cl.read(n)
255 changes = cl.read(n)
256 hn = hex(n)
256 hn = hex(n)
257 p1, p2 = cl.parents(n)
257 p1, p2 = cl.parents(n)
258 t = float(changes[2].split(' ')[0])
258 t = float(changes[2].split(' ')[0])
259
259
260 l.insert(0, self.t(
260 l.insert(0, self.t(
261 'changelogentry',
261 'changelogentry',
262 parity = parity,
262 parity = parity,
263 author = changes[1],
263 author = changes[1],
264 parent = self.parents("changelogparent",
264 parent = self.parents("changelogparent",
265 cl.parents(n), cl.rev),
265 cl.parents(n), cl.rev),
266 changelogtag = self.showtag("changelogtag",n),
266 changelogtag = self.showtag("changelogtag",n),
267 p1 = hex(p1), p2 = hex(p2),
267 p1 = hex(p1), p2 = hex(p2),
268 p1rev = cl.rev(p1), p2rev = cl.rev(p2),
268 p1rev = cl.rev(p1), p2rev = cl.rev(p2),
269 manifest = hex(changes[0]),
269 manifest = hex(changes[0]),
270 desc = changes[4],
270 desc = changes[4],
271 date = t,
271 date = t,
272 files = self.listfilediffs(changes[3], n),
272 files = self.listfilediffs(changes[3], n),
273 rev = i,
273 rev = i,
274 node = hn))
274 node = hn))
275 parity = 1 - parity
275 parity = 1 - parity
276
276
277 yield l
277 yield l
278
278
279 cl = self.repo.changelog
279 cl = self.repo.changelog
280 mf = cl.read(cl.tip())[0]
280 mf = cl.read(cl.tip())[0]
281 count = cl.count()
281 count = cl.count()
282 start = max(0, pos - self.maxchanges + 1)
282 start = max(0, pos - self.maxchanges + 1)
283 end = min(count, start + self.maxchanges)
283 end = min(count, start + self.maxchanges)
284 pos = end - 1
284 pos = end - 1
285
285
286 yield self.t('changelog',
286 yield self.t('changelog',
287 header = self.header(),
287 header = self.header(),
288 footer = self.footer(),
288 footer = self.footer(),
289 repo = self.reponame,
289 repo = self.reponame,
290 changenav = changenav,
290 changenav = changenav,
291 manifest = hex(mf),
291 manifest = hex(mf),
292 rev = pos, changesets = count, entries = changelist)
292 rev = pos, changesets = count, entries = changelist)
293
293
294 def search(self, query):
294 def search(self, query):
295
295
296 def changelist():
296 def changelist():
297 cl = self.repo.changelog
297 cl = self.repo.changelog
298 count = 0
298 count = 0
299 qw = query.lower().split()
299 qw = query.lower().split()
300
300
301 def revgen():
301 def revgen():
302 for i in range(cl.count() - 1, 0, -100):
302 for i in range(cl.count() - 1, 0, -100):
303 l = []
303 l = []
304 for j in range(max(0, i - 100), i):
304 for j in range(max(0, i - 100), i):
305 n = cl.node(j)
305 n = cl.node(j)
306 changes = cl.read(n)
306 changes = cl.read(n)
307 l.insert(0, (n, j, changes))
307 l.insert(0, (n, j, changes))
308 for e in l:
308 for e in l:
309 yield e
309 yield e
310
310
311 for n, i, changes in revgen():
311 for n, i, changes in revgen():
312 miss = 0
312 miss = 0
313 for q in qw:
313 for q in qw:
314 if not (q in changes[1].lower() or
314 if not (q in changes[1].lower() or
315 q in changes[4].lower() or
315 q in changes[4].lower() or
316 q in " ".join(changes[3][:20]).lower()):
316 q in " ".join(changes[3][:20]).lower()):
317 miss = 1
317 miss = 1
318 break
318 break
319 if miss: continue
319 if miss: continue
320
320
321 count += 1
321 count += 1
322 hn = hex(n)
322 hn = hex(n)
323 p1, p2 = cl.parents(n)
323 p1, p2 = cl.parents(n)
324 t = float(changes[2].split(' ')[0])
324 t = float(changes[2].split(' ')[0])
325
325
326 yield self.t(
326 yield self.t(
327 'searchentry',
327 'searchentry',
328 parity = count & 1,
328 parity = count & 1,
329 author = changes[1],
329 author = changes[1],
330 parent = self.parents("changelogparent",
330 parent = self.parents("changelogparent",
331 cl.parents(n), cl.rev),
331 cl.parents(n), cl.rev),
332 changelogtag = self.showtag("changelogtag",n),
332 changelogtag = self.showtag("changelogtag",n),
333 p1 = hex(p1), p2 = hex(p2),
333 p1 = hex(p1), p2 = hex(p2),
334 p1rev = cl.rev(p1), p2rev = cl.rev(p2),
334 p1rev = cl.rev(p1), p2rev = cl.rev(p2),
335 manifest = hex(changes[0]),
335 manifest = hex(changes[0]),
336 desc = changes[4],
336 desc = changes[4],
337 date = t,
337 date = t,
338 files = self.listfilediffs(changes[3], n),
338 files = self.listfilediffs(changes[3], n),
339 rev = i,
339 rev = i,
340 node = hn)
340 node = hn)
341
341
342 if count >= self.maxchanges: break
342 if count >= self.maxchanges: break
343
343
344 cl = self.repo.changelog
344 cl = self.repo.changelog
345 mf = cl.read(cl.tip())[0]
345 mf = cl.read(cl.tip())[0]
346
346
347 yield self.t('search',
347 yield self.t('search',
348 header = self.header(),
348 header = self.header(),
349 footer = self.footer(),
349 footer = self.footer(),
350 query = query,
350 query = query,
351 repo = self.reponame,
351 repo = self.reponame,
352 manifest = hex(mf),
352 manifest = hex(mf),
353 entries = changelist)
353 entries = changelist)
354
354
355 def changeset(self, nodeid):
355 def changeset(self, nodeid):
356 n = bin(nodeid)
356 n = bin(nodeid)
357 cl = self.repo.changelog
357 cl = self.repo.changelog
358 changes = cl.read(n)
358 changes = cl.read(n)
359 p1, p2 = cl.parents(n)
359 p1, p2 = cl.parents(n)
360 p1rev, p2rev = cl.rev(p1), cl.rev(p2)
360 p1rev, p2rev = cl.rev(p1), cl.rev(p2)
361 t = float(changes[2].split(' ')[0])
361 t = float(changes[2].split(' ')[0])
362
362
363 files = []
363 files = []
364 mf = self.repo.manifest.read(changes[0])
364 mf = self.repo.manifest.read(changes[0])
365 for f in changes[3]:
365 for f in changes[3]:
366 files.append(self.t("filenodelink",
366 files.append(self.t("filenodelink",
367 filenode = hex(mf.get(f, nullid)), file = f))
367 filenode = hex(mf.get(f, nullid)), file = f))
368
368
369 def diff():
369 def diff():
370 yield self.diff(p1, n, changes[3])
370 yield self.diff(p1, n, changes[3])
371
371
372 yield self.t('changeset',
372 yield self.t('changeset',
373 header = self.header(),
373 header = self.header(),
374 footer = self.footer(),
374 footer = self.footer(),
375 repo = self.reponame,
375 repo = self.reponame,
376 diff = diff,
376 diff = diff,
377 rev = cl.rev(n),
377 rev = cl.rev(n),
378 node = nodeid,
378 node = nodeid,
379 parent = self.parents("changesetparent",
379 parent = self.parents("changesetparent",
380 cl.parents(n), cl.rev),
380 cl.parents(n), cl.rev),
381 changesettag = self.showtag("changesettag",n),
381 changesettag = self.showtag("changesettag",n),
382 p1 = hex(p1), p2 = hex(p2),
382 p1 = hex(p1), p2 = hex(p2),
383 p1rev = cl.rev(p1), p2rev = cl.rev(p2),
383 p1rev = cl.rev(p1), p2rev = cl.rev(p2),
384 manifest = hex(changes[0]),
384 manifest = hex(changes[0]),
385 author = changes[1],
385 author = changes[1],
386 desc = changes[4],
386 desc = changes[4],
387 date = t,
387 date = t,
388 files = files)
388 files = files)
389
389
390 def filelog(self, f, filenode):
390 def filelog(self, f, filenode):
391 cl = self.repo.changelog
391 cl = self.repo.changelog
392 fl = self.repo.file(f)
392 fl = self.repo.file(f)
393 count = fl.count()
393 count = fl.count()
394
394
395 def entries():
395 def entries():
396 l = []
396 l = []
397 parity = (count - 1) & 1
397 parity = (count - 1) & 1
398
398
399 for i in range(count):
399 for i in range(count):
400
400
401 n = fl.node(i)
401 n = fl.node(i)
402 lr = fl.linkrev(n)
402 lr = fl.linkrev(n)
403 cn = cl.node(lr)
403 cn = cl.node(lr)
404 cs = cl.read(cl.node(lr))
404 cs = cl.read(cl.node(lr))
405 p1, p2 = fl.parents(n)
405 p1, p2 = fl.parents(n)
406 t = float(cs[2].split(' ')[0])
406 t = float(cs[2].split(' ')[0])
407
407
408 l.insert(0, self.t("filelogentry",
408 l.insert(0, self.t("filelogentry",
409 parity = parity,
409 parity = parity,
410 filenode = hex(n),
410 filenode = hex(n),
411 filerev = i,
411 filerev = i,
412 file = f,
412 file = f,
413 node = hex(cn),
413 node = hex(cn),
414 author = cs[1],
414 author = cs[1],
415 date = t,
415 date = t,
416 desc = cs[4],
416 desc = cs[4],
417 p1 = hex(p1), p2 = hex(p2),
417 p1 = hex(p1), p2 = hex(p2),
418 p1rev = fl.rev(p1), p2rev = fl.rev(p2)))
418 p1rev = fl.rev(p1), p2rev = fl.rev(p2)))
419 parity = 1 - parity
419 parity = 1 - parity
420
420
421 yield l
421 yield l
422
422
423 yield self.t("filelog",
423 yield self.t("filelog",
424 header = self.header(),
424 header = self.header(),
425 footer = self.footer(),
425 footer = self.footer(),
426 repo = self.reponame,
426 repo = self.reponame,
427 file = f,
427 file = f,
428 filenode = filenode,
428 filenode = filenode,
429 entries = entries)
429 entries = entries)
430
430
431 def filerevision(self, f, node):
431 def filerevision(self, f, node):
432 fl = self.repo.file(f)
432 fl = self.repo.file(f)
433 n = bin(node)
433 n = bin(node)
434 text = fl.read(n)
434 text = fl.read(n)
435 changerev = fl.linkrev(n)
435 changerev = fl.linkrev(n)
436 cl = self.repo.changelog
436 cl = self.repo.changelog
437 cn = cl.node(changerev)
437 cn = cl.node(changerev)
438 cs = cl.read(cn)
438 cs = cl.read(cn)
439 p1, p2 = fl.parents(n)
439 p1, p2 = fl.parents(n)
440 t = float(cs[2].split(' ')[0])
440 t = float(cs[2].split(' ')[0])
441 mfn = cs[0]
441 mfn = cs[0]
442
442
443 def lines():
443 def lines():
444 for l, t in enumerate(text.splitlines(1)):
444 for l, t in enumerate(text.splitlines(1)):
445 yield self.t("fileline", line = t,
445 yield self.t("fileline", line = t,
446 linenumber = "% 6d" % (l + 1),
446 linenumber = "% 6d" % (l + 1),
447 parity = l & 1)
447 parity = l & 1)
448
448
449 yield self.t("filerevision", file = f,
449 yield self.t("filerevision", file = f,
450 header = self.header(),
450 header = self.header(),
451 footer = self.footer(),
451 footer = self.footer(),
452 repo = self.reponame,
452 repo = self.reponame,
453 filenode = node,
453 filenode = node,
454 path = up(f),
454 path = up(f),
455 text = lines(),
455 text = lines(),
456 rev = changerev,
456 rev = changerev,
457 node = hex(cn),
457 node = hex(cn),
458 manifest = hex(mfn),
458 manifest = hex(mfn),
459 author = cs[1],
459 author = cs[1],
460 date = t,
460 date = t,
461 parent = self.parents("filerevparent",
461 parent = self.parents("filerevparent",
462 fl.parents(n), fl.rev, file=f),
462 fl.parents(n), fl.rev, file=f),
463 p1 = hex(p1), p2 = hex(p2),
463 p1 = hex(p1), p2 = hex(p2),
464 permissions = self.repo.manifest.readflags(mfn)[f],
464 permissions = self.repo.manifest.readflags(mfn)[f],
465 p1rev = fl.rev(p1), p2rev = fl.rev(p2))
465 p1rev = fl.rev(p1), p2rev = fl.rev(p2))
466
466
467 def fileannotate(self, f, node):
467 def fileannotate(self, f, node):
468 bcache = {}
468 bcache = {}
469 ncache = {}
469 ncache = {}
470 fl = self.repo.file(f)
470 fl = self.repo.file(f)
471 n = bin(node)
471 n = bin(node)
472 changerev = fl.linkrev(n)
472 changerev = fl.linkrev(n)
473
473
474 cl = self.repo.changelog
474 cl = self.repo.changelog
475 cn = cl.node(changerev)
475 cn = cl.node(changerev)
476 cs = cl.read(cn)
476 cs = cl.read(cn)
477 p1, p2 = fl.parents(n)
477 p1, p2 = fl.parents(n)
478 t = float(cs[2].split(' ')[0])
478 t = float(cs[2].split(' ')[0])
479 mfn = cs[0]
479 mfn = cs[0]
480
480
481 def annotate():
481 def annotate():
482 parity = 1
482 parity = 1
483 last = None
483 last = None
484 for r, l in fl.annotate(n):
484 for r, l in fl.annotate(n):
485 try:
485 try:
486 cnode = ncache[r]
486 cnode = ncache[r]
487 except KeyError:
487 except KeyError:
488 cnode = ncache[r] = self.repo.changelog.node(r)
488 cnode = ncache[r] = self.repo.changelog.node(r)
489
489
490 try:
490 try:
491 name = bcache[r]
491 name = bcache[r]
492 except KeyError:
492 except KeyError:
493 cl = self.repo.changelog.read(cnode)
493 cl = self.repo.changelog.read(cnode)
494 name = cl[1]
494 name = cl[1]
495 f = name.find('@')
495 f = name.find('@')
496 if f >= 0:
496 if f >= 0:
497 name = name[:f]
497 name = name[:f]
498 f = name.find('<')
498 f = name.find('<')
499 if f >= 0:
499 if f >= 0:
500 name = name[f+1:]
500 name = name[f+1:]
501 bcache[r] = name
501 bcache[r] = name
502
502
503 if last != cnode:
503 if last != cnode:
504 parity = 1 - parity
504 parity = 1 - parity
505 last = cnode
505 last = cnode
506
506
507 yield self.t("annotateline",
507 yield self.t("annotateline",
508 parity = parity,
508 parity = parity,
509 node = hex(cnode),
509 node = hex(cnode),
510 rev = r,
510 rev = r,
511 author = name,
511 author = name,
512 file = f,
512 file = f,
513 line = l)
513 line = l)
514
514
515 yield self.t("fileannotate",
515 yield self.t("fileannotate",
516 header = self.header(),
516 header = self.header(),
517 footer = self.footer(),
517 footer = self.footer(),
518 repo = self.reponame,
518 repo = self.reponame,
519 file = f,
519 file = f,
520 filenode = node,
520 filenode = node,
521 annotate = annotate,
521 annotate = annotate,
522 path = up(f),
522 path = up(f),
523 rev = changerev,
523 rev = changerev,
524 node = hex(cn),
524 node = hex(cn),
525 manifest = hex(mfn),
525 manifest = hex(mfn),
526 author = cs[1],
526 author = cs[1],
527 date = t,
527 date = t,
528 parent = self.parents("fileannotateparent",
528 parent = self.parents("fileannotateparent",
529 fl.parents(n), fl.rev, file=f),
529 fl.parents(n), fl.rev, file=f),
530 p1 = hex(p1), p2 = hex(p2),
530 p1 = hex(p1), p2 = hex(p2),
531 permissions = self.repo.manifest.readflags(mfn)[f],
531 permissions = self.repo.manifest.readflags(mfn)[f],
532 p1rev = fl.rev(p1), p2rev = fl.rev(p2))
532 p1rev = fl.rev(p1), p2rev = fl.rev(p2))
533
533
534 def manifest(self, mnode, path):
534 def manifest(self, mnode, path):
535 mf = self.repo.manifest.read(bin(mnode))
535 mf = self.repo.manifest.read(bin(mnode))
536 rev = self.repo.manifest.rev(bin(mnode))
536 rev = self.repo.manifest.rev(bin(mnode))
537 node = self.repo.changelog.node(rev)
537 node = self.repo.changelog.node(rev)
538 mff=self.repo.manifest.readflags(bin(mnode))
538 mff=self.repo.manifest.readflags(bin(mnode))
539
539
540 files = {}
540 files = {}
541
541
542 p = path[1:]
542 p = path[1:]
543 l = len(p)
543 l = len(p)
544
544
545 for f,n in mf.items():
545 for f,n in mf.items():
546 if f[:l] != p:
546 if f[:l] != p:
547 continue
547 continue
548 remain = f[l:]
548 remain = f[l:]
549 if "/" in remain:
549 if "/" in remain:
550 short = remain[:remain.find("/") + 1] # bleah
550 short = remain[:remain.find("/") + 1] # bleah
551 files[short] = (f, None)
551 files[short] = (f, None)
552 else:
552 else:
553 short = os.path.basename(remain)
553 short = os.path.basename(remain)
554 files[short] = (f, n)
554 files[short] = (f, n)
555
555
556 def filelist():
556 def filelist():
557 parity = 0
557 parity = 0
558 fl = files.keys()
558 fl = files.keys()
559 fl.sort()
559 fl.sort()
560 for f in fl:
560 for f in fl:
561 full, fnode = files[f]
561 full, fnode = files[f]
562 if fnode:
562 if fnode:
563 yield self.t("manifestfileentry",
563 yield self.t("manifestfileentry",
564 file = full,
564 file = full,
565 manifest = mnode,
565 manifest = mnode,
566 filenode = hex(fnode),
566 filenode = hex(fnode),
567 parity = parity,
567 parity = parity,
568 basename = f,
568 basename = f,
569 permissions = mff[full])
569 permissions = mff[full])
570 else:
570 else:
571 yield self.t("manifestdirentry",
571 yield self.t("manifestdirentry",
572 parity = parity,
572 parity = parity,
573 path = os.path.join(path, f),
573 path = os.path.join(path, f),
574 manifest = mnode, basename = f[:-1])
574 manifest = mnode, basename = f[:-1])
575 parity = 1 - parity
575 parity = 1 - parity
576
576
577 yield self.t("manifest",
577 yield self.t("manifest",
578 header = self.header(),
578 header = self.header(),
579 footer = self.footer(),
579 footer = self.footer(),
580 repo = self.reponame,
580 repo = self.reponame,
581 manifest = mnode,
581 manifest = mnode,
582 rev = rev,
582 rev = rev,
583 node = hex(node),
583 node = hex(node),
584 path = path,
584 path = path,
585 up = up(path),
585 up = up(path),
586 entries = filelist)
586 entries = filelist)
587
587
588 def tags(self):
588 def tags(self):
589 cl = self.repo.changelog
589 cl = self.repo.changelog
590 mf = cl.read(cl.tip())[0]
590 mf = cl.read(cl.tip())[0]
591
591
592 i = self.repo.tagslist()
592 i = self.repo.tagslist()
593 i.reverse()
593 i.reverse()
594
594
595 def entries():
595 def entries():
596 parity = 0
596 parity = 0
597 for k,n in i:
597 for k,n in i:
598 yield self.t("tagentry",
598 yield self.t("tagentry",
599 parity = parity,
599 parity = parity,
600 tag = k,
600 tag = k,
601 node = hex(n))
601 node = hex(n))
602 parity = 1 - parity
602 parity = 1 - parity
603
603
604 yield self.t("tags",
604 yield self.t("tags",
605 header = self.header(),
605 header = self.header(),
606 footer = self.footer(),
606 footer = self.footer(),
607 repo = self.reponame,
607 repo = self.reponame,
608 manifest = hex(mf),
608 manifest = hex(mf),
609 entries = entries)
609 entries = entries)
610
610
611 def filediff(self, file, changeset):
611 def filediff(self, file, changeset):
612 n = bin(changeset)
612 n = bin(changeset)
613 cl = self.repo.changelog
613 cl = self.repo.changelog
614 p1 = cl.parents(n)[0]
614 p1 = cl.parents(n)[0]
615 cs = cl.read(n)
615 cs = cl.read(n)
616 mf = self.repo.manifest.read(cs[0])
616 mf = self.repo.manifest.read(cs[0])
617
617
618 def diff():
618 def diff():
619 yield self.diff(p1, n, file)
619 yield self.diff(p1, n, file)
620
620
621 yield self.t("filediff",
621 yield self.t("filediff",
622 header = self.header(),
622 header = self.header(),
623 footer = self.footer(),
623 footer = self.footer(),
624 repo = self.reponame,
624 repo = self.reponame,
625 file = file,
625 file = file,
626 filenode = hex(mf.get(file, nullid)),
626 filenode = hex(mf.get(file, nullid)),
627 node = changeset,
627 node = changeset,
628 rev = self.repo.changelog.rev(n),
628 rev = self.repo.changelog.rev(n),
629 parent = self.parents("filediffparent",
629 parent = self.parents("filediffparent",
630 cl.parents(n), cl.rev),
630 cl.parents(n), cl.rev),
631 p1rev = self.repo.changelog.rev(p1),
631 p1rev = self.repo.changelog.rev(p1),
632 diff = diff)
632 diff = diff)
633
633
634 # add tags to things
634 # add tags to things
635 # tags -> list of changesets corresponding to tags
635 # tags -> list of changesets corresponding to tags
636 # find tag, changeset, file
636 # find tag, changeset, file
637
637
638 def run(self):
638 def run(self):
639 self.refresh()
639 self.refresh()
640 args = cgi.parse()
640 args = cgi.parse()
641
641
642 m = os.path.join(self.templates, "map")
642 m = os.path.join(self.templates, "map")
643 if args.has_key('style'):
643 if args.has_key('style'):
644 b = os.path.basename("map-" + args['style'][0])
644 b = os.path.basename("map-" + args['style'][0])
645 p = os.path.join(self.templates, b)
645 p = os.path.join(self.templates, b)
646 if os.path.isfile(p): m = p
646 if os.path.isfile(p): m = p
647
647
648 self.t = templater(m, self.filters)
648 self.t = templater(m, self.filters)
649
649
650 if not args.has_key('cmd') or args['cmd'][0] == 'changelog':
650 if not args.has_key('cmd') or args['cmd'][0] == 'changelog':
651 c = self.repo.changelog.count() - 1
651 c = self.repo.changelog.count() - 1
652 hi = c
652 hi = c
653 if args.has_key('rev'):
653 if args.has_key('rev'):
654 hi = args['rev'][0]
654 hi = args['rev'][0]
655 try:
655 try:
656 hi = self.repo.changelog.rev(self.repo.lookup(hi))
656 hi = self.repo.changelog.rev(self.repo.lookup(hi))
657 except KeyError:
657 except KeyError:
658 write(self.search(hi))
658 write(self.search(hi))
659 return
659 return
660
660
661 write(self.changelog(hi))
661 write(self.changelog(hi))
662
662
663 elif args['cmd'][0] == 'changeset':
663 elif args['cmd'][0] == 'changeset':
664 write(self.changeset(args['node'][0]))
664 write(self.changeset(args['node'][0]))
665
665
666 elif args['cmd'][0] == 'manifest':
666 elif args['cmd'][0] == 'manifest':
667 write(self.manifest(args['manifest'][0], args['path'][0]))
667 write(self.manifest(args['manifest'][0], args['path'][0]))
668
668
669 elif args['cmd'][0] == 'tags':
669 elif args['cmd'][0] == 'tags':
670 write(self.tags())
670 write(self.tags())
671
671
672 elif args['cmd'][0] == 'filediff':
672 elif args['cmd'][0] == 'filediff':
673 write(self.filediff(args['file'][0], args['node'][0]))
673 write(self.filediff(args['file'][0], args['node'][0]))
674
674
675 elif args['cmd'][0] == 'file':
675 elif args['cmd'][0] == 'file':
676 write(self.filerevision(args['file'][0], args['filenode'][0]))
676 write(self.filerevision(args['file'][0], args['filenode'][0]))
677
677
678 elif args['cmd'][0] == 'annotate':
678 elif args['cmd'][0] == 'annotate':
679 write(self.fileannotate(args['file'][0], args['filenode'][0]))
679 write(self.fileannotate(args['file'][0], args['filenode'][0]))
680
680
681 elif args['cmd'][0] == 'filelog':
681 elif args['cmd'][0] == 'filelog':
682 write(self.filelog(args['file'][0], args['filenode'][0]))
682 write(self.filelog(args['file'][0], args['filenode'][0]))
683
683
684 elif args['cmd'][0] == 'heads':
684 elif args['cmd'][0] == 'heads':
685 httphdr("text/plain")
685 httphdr("text/plain")
686 h = self.repo.heads()
686 h = self.repo.heads()
687 sys.stdout.write(" ".join(map(hex, h)) + "\n")
687 sys.stdout.write(" ".join(map(hex, h)) + "\n")
688
688
689 elif args['cmd'][0] == 'branches':
689 elif args['cmd'][0] == 'branches':
690 httphdr("text/plain")
690 httphdr("text/plain")
691 nodes = []
691 nodes = []
692 if args.has_key('nodes'):
692 if args.has_key('nodes'):
693 nodes = map(bin, args['nodes'][0].split(" "))
693 nodes = map(bin, args['nodes'][0].split(" "))
694 for b in self.repo.branches(nodes):
694 for b in self.repo.branches(nodes):
695 sys.stdout.write(" ".join(map(hex, b)) + "\n")
695 sys.stdout.write(" ".join(map(hex, b)) + "\n")
696
696
697 elif args['cmd'][0] == 'between':
697 elif args['cmd'][0] == 'between':
698 httphdr("text/plain")
698 httphdr("text/plain")
699 nodes = []
699 nodes = []
700 if args.has_key('pairs'):
700 if args.has_key('pairs'):
701 pairs = [ map(bin, p.split("-"))
701 pairs = [ map(bin, p.split("-"))
702 for p in args['pairs'][0].split(" ") ]
702 for p in args['pairs'][0].split(" ") ]
703 for b in self.repo.between(pairs):
703 for b in self.repo.between(pairs):
704 sys.stdout.write(" ".join(map(hex, b)) + "\n")
704 sys.stdout.write(" ".join(map(hex, b)) + "\n")
705
705
706 elif args['cmd'][0] == 'changegroup':
706 elif args['cmd'][0] == 'changegroup':
707 httphdr("application/hg-changegroup")
707 httphdr("application/hg-changegroup")
708 nodes = []
708 nodes = []
709 if self.viewonly:
709 if self.viewonly:
710 return
710 return
711
711
712 if args.has_key('roots'):
712 if args.has_key('roots'):
713 nodes = map(bin, args['roots'][0].split(" "))
713 nodes = map(bin, args['roots'][0].split(" "))
714
714
715 z = zlib.compressobj()
715 z = zlib.compressobj()
716 for chunk in self.repo.changegroup(nodes):
716 for chunk in self.repo.changegroup(nodes):
717 sys.stdout.write(z.compress(chunk))
717 sys.stdout.write(z.compress(chunk))
718
718
719 sys.stdout.write(z.flush())
719 sys.stdout.write(z.flush())
720
720
721 else:
721 else:
722 write(self.t("error"))
722 write(self.t("error"))
723
723
724 def server(path, name, templates, address, port):
724 def server(path, name, templates, address, port):
725
725
726 import BaseHTTPServer
726 import BaseHTTPServer
727 import sys, os
727 import sys, os
728
728
729 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
729 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
730 def do_POST(self):
730 def do_POST(self):
731 try:
731 try:
732 self.do_hgweb()
732 self.do_hgweb()
733 except socket.error, inst:
733 except socket.error, inst:
734 if inst.args[0] != 32: raise
734 if inst.args[0] != 32: raise
735
735
736 def do_GET(self):
736 def do_GET(self):
737 self.do_POST()
737 self.do_POST()
738
738
739 def do_hgweb(self):
739 def do_hgweb(self):
740 query = ""
740 query = ""
741 p = self.path.find("?")
741 p = self.path.find("?")
742 if p:
742 if p:
743 query = self.path[p + 1:]
743 query = self.path[p + 1:]
744 query = query.replace('+', ' ')
744 query = query.replace('+', ' ')
745
745
746 env = {}
746 env = {}
747 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
747 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
748 env['REQUEST_METHOD'] = self.command
748 env['REQUEST_METHOD'] = self.command
749 if query:
749 if query:
750 env['QUERY_STRING'] = query
750 env['QUERY_STRING'] = query
751 host = self.address_string()
751 host = self.address_string()
752 if host != self.client_address[0]:
752 if host != self.client_address[0]:
753 env['REMOTE_HOST'] = host
753 env['REMOTE_HOST'] = host
754 env['REMOTE_ADDR'] = self.client_address[0]
754 env['REMOTE_ADDR'] = self.client_address[0]
755
755
756 if self.headers.typeheader is None:
756 if self.headers.typeheader is None:
757 env['CONTENT_TYPE'] = self.headers.type
757 env['CONTENT_TYPE'] = self.headers.type
758 else:
758 else:
759 env['CONTENT_TYPE'] = self.headers.typeheader
759 env['CONTENT_TYPE'] = self.headers.typeheader
760 length = self.headers.getheader('content-length')
760 length = self.headers.getheader('content-length')
761 if length:
761 if length:
762 env['CONTENT_LENGTH'] = length
762 env['CONTENT_LENGTH'] = length
763 accept = []
763 accept = []
764 for line in self.headers.getallmatchingheaders('accept'):
764 for line in self.headers.getallmatchingheaders('accept'):
765 if line[:1] in "\t\n\r ":
765 if line[:1] in "\t\n\r ":
766 accept.append(line.strip())
766 accept.append(line.strip())
767 else:
767 else:
768 accept = accept + line[7:].split(',')
768 accept = accept + line[7:].split(',')
769 env['HTTP_ACCEPT'] = ','.join(accept)
769 env['HTTP_ACCEPT'] = ','.join(accept)
770
770
771 os.environ.update(env)
771 os.environ.update(env)
772
772
773 save = sys.argv, sys.stdin, sys.stdout, sys.stderr
773 save = sys.argv, sys.stdin, sys.stdout, sys.stderr
774 try:
774 try:
775 sys.stdin = self.rfile
775 sys.stdin = self.rfile
776 sys.stdout = self.wfile
776 sys.stdout = self.wfile
777 sys.argv = ["hgweb.py"]
777 sys.argv = ["hgweb.py"]
778 if '=' not in query:
778 if '=' not in query:
779 sys.argv.append(query)
779 sys.argv.append(query)
780 self.send_response(200, "Script output follows")
780 self.send_response(200, "Script output follows")
781 hg.run()
781 hg.run()
782 finally:
782 finally:
783 sys.argv, sys.stdin, sys.stdout, sys.stderr = save
783 sys.argv, sys.stdin, sys.stdout, sys.stderr = save
784
784
785 hg = hgweb(path, name, templates)
785 hg = hgweb(path, name, templates)
786 httpd = BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
786 httpd = BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
787 httpd.serve_forever()
787 httpd.serve_forever()
@@ -1,42 +1,42
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2
2 #
3 # This is the mercurial setup script.
3 # This is the mercurial setup script.
4 #
4 #
5 # './setup.py install', or
5 # './setup.py install', or
6 # './setup.py --help' for more options
6 # './setup.py --help' for more options
7
7
8 import glob
8 import glob
9 from distutils.core import setup, Extension
9 from distutils.core import setup, Extension
10 from distutils.command.install_data import install_data
10 from distutils.command.install_data import install_data
11
11
12 import mercurial.version
12 import mercurial.version
13
13
14 # specify version string, otherwise 'hg identify' will be used:
14 # specify version string, otherwise 'hg identify' will be used:
15 version = ''
15 version = ''
16
16
17 class install_package_data(install_data):
17 class install_package_data(install_data):
18 def finalize_options(self):
18 def finalize_options(self):
19 self.set_undefined_options('install',
19 self.set_undefined_options('install',
20 ('install_lib', 'install_dir'))
20 ('install_lib', 'install_dir'))
21 install_data.finalize_options(self)
21 install_data.finalize_options(self)
22
22
23 try:
23 try:
24 mercurial.version.remember_version(version)
24 mercurial.version.remember_version(version)
25 setup(name='mercurial',
25 setup(name='mercurial',
26 version=mercurial.version.get_version(),
26 version=mercurial.version.get_version(),
27 author='Matt Mackall',
27 author='Matt Mackall',
28 author_email='mpm@selenic.com',
28 author_email='mpm@selenic.com',
29 url='http://selenic.com/mercurial',
29 url='http://selenic.com/mercurial',
30 description='scalable distributed SCM',
30 description='scalable distributed SCM',
31 license='GNU GPL',
31 license='GNU GPL',
32 packages=['mercurial'],
32 packages=['mercurial'],
33 ext_modules=[Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
33 ext_modules=[Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
34 Extension('mercurial.bdiff', ['mercurial/bdiff.c'])],
34 Extension('mercurial.bdiff', ['mercurial/bdiff.c'])],
35 data_files=[('mercurial/templates',
35 data_files=[('mercurial/templates',
36 ['templates/map'] +
36 ['templates/map'] +
37 glob.glob('templates/map-*') +
37 glob.glob('templates/map-*') +
38 glob.glob('templates/*.tmpl'))],
38 glob.glob('templates/*.tmpl'))],
39 cmdclass = { 'install_data' : install_package_data },
39 cmdclass = { 'install_data' : install_package_data },
40 scripts=['hg', 'hgmerge'])
40 scripts=['hg', 'hgmerge'])
41 finally:
41 finally:
42 mercurial.version.forget_version()
42 mercurial.version.forget_version()
General Comments 0
You need to be logged in to leave comments. Login now