##// END OF EJS Templates
merge with crew.
Vadim Gelfer -
r2475:7a77934e merge default
parent child Browse files
Show More
@@ -1,44 +1,43 b''
1 """
1 """
2 changegroup.py - Mercurial changegroup manipulation functions
2 changegroup.py - Mercurial changegroup manipulation functions
3
3
4 Copyright 2006 Matt Mackall <mpm@selenic.com>
4 Copyright 2006 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9 import struct
10 from i18n import gettext as _
9 from i18n import gettext as _
11 from demandload import *
10 from demandload import *
12 demandload(globals(), "util")
11 demandload(globals(), "struct util")
13
12
14 def getchunk(source):
13 def getchunk(source):
15 """get a chunk from a changegroup"""
14 """get a chunk from a changegroup"""
16 d = source.read(4)
15 d = source.read(4)
17 if not d:
16 if not d:
18 return ""
17 return ""
19 l = struct.unpack(">l", d)[0]
18 l = struct.unpack(">l", d)[0]
20 if l <= 4:
19 if l <= 4:
21 return ""
20 return ""
22 d = source.read(l - 4)
21 d = source.read(l - 4)
23 if len(d) < l - 4:
22 if len(d) < l - 4:
24 raise util.Abort(_("premature EOF reading chunk"
23 raise util.Abort(_("premature EOF reading chunk"
25 " (got %d bytes, expected %d)")
24 " (got %d bytes, expected %d)")
26 % (len(d), l - 4))
25 % (len(d), l - 4))
27 return d
26 return d
28
27
29 def chunkiter(source):
28 def chunkiter(source):
30 """iterate through the chunks in source"""
29 """iterate through the chunks in source"""
31 while 1:
30 while 1:
32 c = getchunk(source)
31 c = getchunk(source)
33 if not c:
32 if not c:
34 break
33 break
35 yield c
34 yield c
36
35
37 def genchunk(data):
36 def genchunk(data):
38 """build a changegroup chunk"""
37 """build a changegroup chunk"""
39 header = struct.pack(">l", len(data)+ 4)
38 header = struct.pack(">l", len(data)+ 4)
40 return "%s%s" % (header, data)
39 return "%s%s" % (header, data)
41
40
42 def closechunk():
41 def closechunk():
43 return struct.pack(">l", 0)
42 return struct.pack(">l", 0)
44
43
@@ -1,487 +1,486 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 import struct, os
11 from node import *
10 from node import *
12 from i18n import gettext as _
11 from i18n import gettext as _
13 from demandload import *
12 from demandload import *
14 demandload(globals(), "time bisect stat util re errno")
13 demandload(globals(), "struct os time bisect stat util re errno")
15
14
16 class dirstate(object):
15 class dirstate(object):
17 format = ">cllll"
16 format = ">cllll"
18
17
19 def __init__(self, opener, ui, root):
18 def __init__(self, opener, ui, root):
20 self.opener = opener
19 self.opener = opener
21 self.root = root
20 self.root = root
22 self.dirty = 0
21 self.dirty = 0
23 self.ui = ui
22 self.ui = ui
24 self.map = None
23 self.map = None
25 self.pl = None
24 self.pl = None
26 self.copies = {}
25 self.copies = {}
27 self.ignorefunc = None
26 self.ignorefunc = None
28 self.blockignore = False
27 self.blockignore = False
29
28
30 def wjoin(self, f):
29 def wjoin(self, f):
31 return os.path.join(self.root, f)
30 return os.path.join(self.root, f)
32
31
33 def getcwd(self):
32 def getcwd(self):
34 cwd = os.getcwd()
33 cwd = os.getcwd()
35 if cwd == self.root: return ''
34 if cwd == self.root: return ''
36 return cwd[len(self.root) + 1:]
35 return cwd[len(self.root) + 1:]
37
36
38 def hgignore(self):
37 def hgignore(self):
39 '''return the contents of .hgignore files as a list of patterns.
38 '''return the contents of .hgignore files as a list of patterns.
40
39
41 the files parsed for patterns include:
40 the files parsed for patterns include:
42 .hgignore in the repository root
41 .hgignore in the repository root
43 any additional files specified in the [ui] section of ~/.hgrc
42 any additional files specified in the [ui] section of ~/.hgrc
44
43
45 trailing white space is dropped.
44 trailing white space is dropped.
46 the escape character is backslash.
45 the escape character is backslash.
47 comments start with #.
46 comments start with #.
48 empty lines are skipped.
47 empty lines are skipped.
49
48
50 lines can be of the following formats:
49 lines can be of the following formats:
51
50
52 syntax: regexp # defaults following lines to non-rooted regexps
51 syntax: regexp # defaults following lines to non-rooted regexps
53 syntax: glob # defaults following lines to non-rooted globs
52 syntax: glob # defaults following lines to non-rooted globs
54 re:pattern # non-rooted regular expression
53 re:pattern # non-rooted regular expression
55 glob:pattern # non-rooted glob
54 glob:pattern # non-rooted glob
56 pattern # pattern of the current default type'''
55 pattern # pattern of the current default type'''
57 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
56 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
58 def parselines(fp):
57 def parselines(fp):
59 for line in fp:
58 for line in fp:
60 escape = False
59 escape = False
61 for i in xrange(len(line)):
60 for i in xrange(len(line)):
62 if escape: escape = False
61 if escape: escape = False
63 elif line[i] == '\\': escape = True
62 elif line[i] == '\\': escape = True
64 elif line[i] == '#': break
63 elif line[i] == '#': break
65 line = line[:i].rstrip()
64 line = line[:i].rstrip()
66 if line: yield line
65 if line: yield line
67 repoignore = self.wjoin('.hgignore')
66 repoignore = self.wjoin('.hgignore')
68 files = [repoignore]
67 files = [repoignore]
69 files.extend(self.ui.hgignorefiles())
68 files.extend(self.ui.hgignorefiles())
70 pats = {}
69 pats = {}
71 for f in files:
70 for f in files:
72 try:
71 try:
73 pats[f] = []
72 pats[f] = []
74 fp = open(f)
73 fp = open(f)
75 syntax = 'relre:'
74 syntax = 'relre:'
76 for line in parselines(fp):
75 for line in parselines(fp):
77 if line.startswith('syntax:'):
76 if line.startswith('syntax:'):
78 s = line[7:].strip()
77 s = line[7:].strip()
79 try:
78 try:
80 syntax = syntaxes[s]
79 syntax = syntaxes[s]
81 except KeyError:
80 except KeyError:
82 self.ui.warn(_("%s: ignoring invalid "
81 self.ui.warn(_("%s: ignoring invalid "
83 "syntax '%s'\n") % (f, s))
82 "syntax '%s'\n") % (f, s))
84 continue
83 continue
85 pat = syntax + line
84 pat = syntax + line
86 for s in syntaxes.values():
85 for s in syntaxes.values():
87 if line.startswith(s):
86 if line.startswith(s):
88 pat = line
87 pat = line
89 break
88 break
90 pats[f].append(pat)
89 pats[f].append(pat)
91 except IOError, inst:
90 except IOError, inst:
92 if f != repoignore:
91 if f != repoignore:
93 self.ui.warn(_("skipping unreadable ignore file"
92 self.ui.warn(_("skipping unreadable ignore file"
94 " '%s': %s\n") % (f, inst.strerror))
93 " '%s': %s\n") % (f, inst.strerror))
95 return pats
94 return pats
96
95
97 def ignore(self, fn):
96 def ignore(self, fn):
98 '''default match function used by dirstate and
97 '''default match function used by dirstate and
99 localrepository. this honours the repository .hgignore file
98 localrepository. this honours the repository .hgignore file
100 and any other files specified in the [ui] section of .hgrc.'''
99 and any other files specified in the [ui] section of .hgrc.'''
101 if self.blockignore:
100 if self.blockignore:
102 return False
101 return False
103 if not self.ignorefunc:
102 if not self.ignorefunc:
104 ignore = self.hgignore()
103 ignore = self.hgignore()
105 allpats = []
104 allpats = []
106 [allpats.extend(patlist) for patlist in ignore.values()]
105 [allpats.extend(patlist) for patlist in ignore.values()]
107 if allpats:
106 if allpats:
108 try:
107 try:
109 files, self.ignorefunc, anypats = (
108 files, self.ignorefunc, anypats = (
110 util.matcher(self.root, inc=allpats, src='.hgignore'))
109 util.matcher(self.root, inc=allpats, src='.hgignore'))
111 except util.Abort:
110 except util.Abort:
112 # Re-raise an exception where the src is the right file
111 # Re-raise an exception where the src is the right file
113 for f, patlist in ignore.items():
112 for f, patlist in ignore.items():
114 files, self.ignorefunc, anypats = (
113 files, self.ignorefunc, anypats = (
115 util.matcher(self.root, inc=patlist, src=f))
114 util.matcher(self.root, inc=patlist, src=f))
116 else:
115 else:
117 self.ignorefunc = util.never
116 self.ignorefunc = util.never
118 return self.ignorefunc(fn)
117 return self.ignorefunc(fn)
119
118
120 def __del__(self):
119 def __del__(self):
121 if self.dirty:
120 if self.dirty:
122 self.write()
121 self.write()
123
122
124 def __getitem__(self, key):
123 def __getitem__(self, key):
125 try:
124 try:
126 return self.map[key]
125 return self.map[key]
127 except TypeError:
126 except TypeError:
128 self.lazyread()
127 self.lazyread()
129 return self[key]
128 return self[key]
130
129
131 def __contains__(self, key):
130 def __contains__(self, key):
132 self.lazyread()
131 self.lazyread()
133 return key in self.map
132 return key in self.map
134
133
135 def parents(self):
134 def parents(self):
136 self.lazyread()
135 self.lazyread()
137 return self.pl
136 return self.pl
138
137
139 def markdirty(self):
138 def markdirty(self):
140 if not self.dirty:
139 if not self.dirty:
141 self.dirty = 1
140 self.dirty = 1
142
141
143 def setparents(self, p1, p2=nullid):
142 def setparents(self, p1, p2=nullid):
144 self.lazyread()
143 self.lazyread()
145 self.markdirty()
144 self.markdirty()
146 self.pl = p1, p2
145 self.pl = p1, p2
147
146
148 def state(self, key):
147 def state(self, key):
149 try:
148 try:
150 return self[key][0]
149 return self[key][0]
151 except KeyError:
150 except KeyError:
152 return "?"
151 return "?"
153
152
154 def lazyread(self):
153 def lazyread(self):
155 if self.map is None:
154 if self.map is None:
156 self.read()
155 self.read()
157
156
158 def parse(self, st):
157 def parse(self, st):
159 self.pl = [st[:20], st[20: 40]]
158 self.pl = [st[:20], st[20: 40]]
160
159
161 # deref fields so they will be local in loop
160 # deref fields so they will be local in loop
162 map = self.map
161 map = self.map
163 copies = self.copies
162 copies = self.copies
164 format = self.format
163 format = self.format
165 unpack = struct.unpack
164 unpack = struct.unpack
166
165
167 pos = 40
166 pos = 40
168 e_size = struct.calcsize(format)
167 e_size = struct.calcsize(format)
169
168
170 while pos < len(st):
169 while pos < len(st):
171 newpos = pos + e_size
170 newpos = pos + e_size
172 e = unpack(format, st[pos:newpos])
171 e = unpack(format, st[pos:newpos])
173 l = e[4]
172 l = e[4]
174 pos = newpos
173 pos = newpos
175 newpos = pos + l
174 newpos = pos + l
176 f = st[pos:newpos]
175 f = st[pos:newpos]
177 if '\0' in f:
176 if '\0' in f:
178 f, c = f.split('\0')
177 f, c = f.split('\0')
179 copies[f] = c
178 copies[f] = c
180 map[f] = e[:4]
179 map[f] = e[:4]
181 pos = newpos
180 pos = newpos
182
181
183 def read(self):
182 def read(self):
184 self.map = {}
183 self.map = {}
185 self.pl = [nullid, nullid]
184 self.pl = [nullid, nullid]
186 try:
185 try:
187 st = self.opener("dirstate").read()
186 st = self.opener("dirstate").read()
188 if st:
187 if st:
189 self.parse(st)
188 self.parse(st)
190 except IOError, err:
189 except IOError, err:
191 if err.errno != errno.ENOENT: raise
190 if err.errno != errno.ENOENT: raise
192
191
193 def copy(self, source, dest):
192 def copy(self, source, dest):
194 self.lazyread()
193 self.lazyread()
195 self.markdirty()
194 self.markdirty()
196 self.copies[dest] = source
195 self.copies[dest] = source
197
196
198 def copied(self, file):
197 def copied(self, file):
199 return self.copies.get(file, None)
198 return self.copies.get(file, None)
200
199
201 def update(self, files, state, **kw):
200 def update(self, files, state, **kw):
202 ''' current states:
201 ''' current states:
203 n normal
202 n normal
204 m needs merging
203 m needs merging
205 r marked for removal
204 r marked for removal
206 a marked for addition'''
205 a marked for addition'''
207
206
208 if not files: return
207 if not files: return
209 self.lazyread()
208 self.lazyread()
210 self.markdirty()
209 self.markdirty()
211 for f in files:
210 for f in files:
212 if state == "r":
211 if state == "r":
213 self.map[f] = ('r', 0, 0, 0)
212 self.map[f] = ('r', 0, 0, 0)
214 else:
213 else:
215 s = os.lstat(self.wjoin(f))
214 s = os.lstat(self.wjoin(f))
216 st_size = kw.get('st_size', s.st_size)
215 st_size = kw.get('st_size', s.st_size)
217 st_mtime = kw.get('st_mtime', s.st_mtime)
216 st_mtime = kw.get('st_mtime', s.st_mtime)
218 self.map[f] = (state, s.st_mode, st_size, st_mtime)
217 self.map[f] = (state, s.st_mode, st_size, st_mtime)
219 if self.copies.has_key(f):
218 if self.copies.has_key(f):
220 del self.copies[f]
219 del self.copies[f]
221
220
222 def forget(self, files):
221 def forget(self, files):
223 if not files: return
222 if not files: return
224 self.lazyread()
223 self.lazyread()
225 self.markdirty()
224 self.markdirty()
226 for f in files:
225 for f in files:
227 try:
226 try:
228 del self.map[f]
227 del self.map[f]
229 except KeyError:
228 except KeyError:
230 self.ui.warn(_("not in dirstate: %s!\n") % f)
229 self.ui.warn(_("not in dirstate: %s!\n") % f)
231 pass
230 pass
232
231
233 def clear(self):
232 def clear(self):
234 self.map = {}
233 self.map = {}
235 self.copies = {}
234 self.copies = {}
236 self.markdirty()
235 self.markdirty()
237
236
238 def rebuild(self, parent, files):
237 def rebuild(self, parent, files):
239 self.clear()
238 self.clear()
240 umask = os.umask(0)
239 umask = os.umask(0)
241 os.umask(umask)
240 os.umask(umask)
242 for f, mode in files:
241 for f, mode in files:
243 if mode:
242 if mode:
244 self.map[f] = ('n', ~umask, -1, 0)
243 self.map[f] = ('n', ~umask, -1, 0)
245 else:
244 else:
246 self.map[f] = ('n', ~umask & 0666, -1, 0)
245 self.map[f] = ('n', ~umask & 0666, -1, 0)
247 self.pl = (parent, nullid)
246 self.pl = (parent, nullid)
248 self.markdirty()
247 self.markdirty()
249
248
250 def write(self):
249 def write(self):
251 if not self.dirty:
250 if not self.dirty:
252 return
251 return
253 st = self.opener("dirstate", "w", atomic=True)
252 st = self.opener("dirstate", "w", atomic=True)
254 st.write("".join(self.pl))
253 st.write("".join(self.pl))
255 for f, e in self.map.items():
254 for f, e in self.map.items():
256 c = self.copied(f)
255 c = self.copied(f)
257 if c:
256 if c:
258 f = f + "\0" + c
257 f = f + "\0" + c
259 e = struct.pack(self.format, e[0], e[1], e[2], e[3], len(f))
258 e = struct.pack(self.format, e[0], e[1], e[2], e[3], len(f))
260 st.write(e + f)
259 st.write(e + f)
261 self.dirty = 0
260 self.dirty = 0
262
261
263 def filterfiles(self, files):
262 def filterfiles(self, files):
264 ret = {}
263 ret = {}
265 unknown = []
264 unknown = []
266
265
267 for x in files:
266 for x in files:
268 if x == '.':
267 if x == '.':
269 return self.map.copy()
268 return self.map.copy()
270 if x not in self.map:
269 if x not in self.map:
271 unknown.append(x)
270 unknown.append(x)
272 else:
271 else:
273 ret[x] = self.map[x]
272 ret[x] = self.map[x]
274
273
275 if not unknown:
274 if not unknown:
276 return ret
275 return ret
277
276
278 b = self.map.keys()
277 b = self.map.keys()
279 b.sort()
278 b.sort()
280 blen = len(b)
279 blen = len(b)
281
280
282 for x in unknown:
281 for x in unknown:
283 bs = bisect.bisect(b, x)
282 bs = bisect.bisect(b, x)
284 if bs != 0 and b[bs-1] == x:
283 if bs != 0 and b[bs-1] == x:
285 ret[x] = self.map[x]
284 ret[x] = self.map[x]
286 continue
285 continue
287 while bs < blen:
286 while bs < blen:
288 s = b[bs]
287 s = b[bs]
289 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
288 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
290 ret[s] = self.map[s]
289 ret[s] = self.map[s]
291 else:
290 else:
292 break
291 break
293 bs += 1
292 bs += 1
294 return ret
293 return ret
295
294
296 def supported_type(self, f, st, verbose=False):
295 def supported_type(self, f, st, verbose=False):
297 if stat.S_ISREG(st.st_mode):
296 if stat.S_ISREG(st.st_mode):
298 return True
297 return True
299 if verbose:
298 if verbose:
300 kind = 'unknown'
299 kind = 'unknown'
301 if stat.S_ISCHR(st.st_mode): kind = _('character device')
300 if stat.S_ISCHR(st.st_mode): kind = _('character device')
302 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
301 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
303 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
302 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
304 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
303 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
305 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
304 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
306 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
305 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
307 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
306 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
308 util.pathto(self.getcwd(), f),
307 util.pathto(self.getcwd(), f),
309 kind))
308 kind))
310 return False
309 return False
311
310
312 def statwalk(self, files=None, match=util.always, dc=None, ignored=False,
311 def statwalk(self, files=None, match=util.always, dc=None, ignored=False,
313 badmatch=None):
312 badmatch=None):
314 self.lazyread()
313 self.lazyread()
315
314
316 # walk all files by default
315 # walk all files by default
317 if not files:
316 if not files:
318 files = [self.root]
317 files = [self.root]
319 if not dc:
318 if not dc:
320 dc = self.map.copy()
319 dc = self.map.copy()
321 elif not dc:
320 elif not dc:
322 dc = self.filterfiles(files)
321 dc = self.filterfiles(files)
323
322
324 def statmatch(file_, stat):
323 def statmatch(file_, stat):
325 file_ = util.pconvert(file_)
324 file_ = util.pconvert(file_)
326 if not ignored and file_ not in dc and self.ignore(file_):
325 if not ignored and file_ not in dc and self.ignore(file_):
327 return False
326 return False
328 return match(file_)
327 return match(file_)
329
328
330 return self.walkhelper(files=files, statmatch=statmatch, dc=dc,
329 return self.walkhelper(files=files, statmatch=statmatch, dc=dc,
331 badmatch=badmatch)
330 badmatch=badmatch)
332
331
333 def walk(self, files=None, match=util.always, dc=None, badmatch=None):
332 def walk(self, files=None, match=util.always, dc=None, badmatch=None):
334 # filter out the stat
333 # filter out the stat
335 for src, f, st in self.statwalk(files, match, dc, badmatch=badmatch):
334 for src, f, st in self.statwalk(files, match, dc, badmatch=badmatch):
336 yield src, f
335 yield src, f
337
336
338 # walk recursively through the directory tree, finding all files
337 # walk recursively through the directory tree, finding all files
339 # matched by the statmatch function
338 # matched by the statmatch function
340 #
339 #
341 # results are yielded in a tuple (src, filename, st), where src
340 # results are yielded in a tuple (src, filename, st), where src
342 # is one of:
341 # is one of:
343 # 'f' the file was found in the directory tree
342 # 'f' the file was found in the directory tree
344 # 'm' the file was only in the dirstate and not in the tree
343 # 'm' the file was only in the dirstate and not in the tree
345 # and st is the stat result if the file was found in the directory.
344 # and st is the stat result if the file was found in the directory.
346 #
345 #
347 # dc is an optional arg for the current dirstate. dc is not modified
346 # dc is an optional arg for the current dirstate. dc is not modified
348 # directly by this function, but might be modified by your statmatch call.
347 # directly by this function, but might be modified by your statmatch call.
349 #
348 #
350 def walkhelper(self, files, statmatch, dc, badmatch=None):
349 def walkhelper(self, files, statmatch, dc, badmatch=None):
351 # recursion free walker, faster than os.walk.
350 # recursion free walker, faster than os.walk.
352 def findfiles(s):
351 def findfiles(s):
353 work = [s]
352 work = [s]
354 while work:
353 while work:
355 top = work.pop()
354 top = work.pop()
356 names = os.listdir(top)
355 names = os.listdir(top)
357 names.sort()
356 names.sort()
358 # nd is the top of the repository dir tree
357 # nd is the top of the repository dir tree
359 nd = util.normpath(top[len(self.root) + 1:])
358 nd = util.normpath(top[len(self.root) + 1:])
360 if nd == '.':
359 if nd == '.':
361 nd = ''
360 nd = ''
362 else:
361 else:
363 # do not recurse into a repo contained in this
362 # do not recurse into a repo contained in this
364 # one. use bisect to find .hg directory so speed
363 # one. use bisect to find .hg directory so speed
365 # is good on big directory.
364 # is good on big directory.
366 hg = bisect.bisect_left(names, '.hg')
365 hg = bisect.bisect_left(names, '.hg')
367 if hg < len(names) and names[hg] == '.hg':
366 if hg < len(names) and names[hg] == '.hg':
368 if os.path.isdir(os.path.join(top, '.hg')):
367 if os.path.isdir(os.path.join(top, '.hg')):
369 continue
368 continue
370 for f in names:
369 for f in names:
371 np = util.pconvert(os.path.join(nd, f))
370 np = util.pconvert(os.path.join(nd, f))
372 if seen(np):
371 if seen(np):
373 continue
372 continue
374 p = os.path.join(top, f)
373 p = os.path.join(top, f)
375 # don't trip over symlinks
374 # don't trip over symlinks
376 st = os.lstat(p)
375 st = os.lstat(p)
377 if stat.S_ISDIR(st.st_mode):
376 if stat.S_ISDIR(st.st_mode):
378 ds = os.path.join(nd, f +'/')
377 ds = os.path.join(nd, f +'/')
379 if statmatch(ds, st):
378 if statmatch(ds, st):
380 work.append(p)
379 work.append(p)
381 if statmatch(np, st) and np in dc:
380 if statmatch(np, st) and np in dc:
382 yield 'm', np, st
381 yield 'm', np, st
383 elif statmatch(np, st):
382 elif statmatch(np, st):
384 if self.supported_type(np, st):
383 if self.supported_type(np, st):
385 yield 'f', np, st
384 yield 'f', np, st
386 elif np in dc:
385 elif np in dc:
387 yield 'm', np, st
386 yield 'm', np, st
388
387
389 known = {'.hg': 1}
388 known = {'.hg': 1}
390 def seen(fn):
389 def seen(fn):
391 if fn in known: return True
390 if fn in known: return True
392 known[fn] = 1
391 known[fn] = 1
393
392
394 # step one, find all files that match our criteria
393 # step one, find all files that match our criteria
395 files.sort()
394 files.sort()
396 for ff in util.unique(files):
395 for ff in util.unique(files):
397 f = self.wjoin(ff)
396 f = self.wjoin(ff)
398 try:
397 try:
399 st = os.lstat(f)
398 st = os.lstat(f)
400 except OSError, inst:
399 except OSError, inst:
401 nf = util.normpath(ff)
400 nf = util.normpath(ff)
402 found = False
401 found = False
403 for fn in dc:
402 for fn in dc:
404 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
403 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
405 found = True
404 found = True
406 break
405 break
407 if not found:
406 if not found:
408 if inst.errno != errno.ENOENT or not badmatch:
407 if inst.errno != errno.ENOENT or not badmatch:
409 self.ui.warn('%s: %s\n' % (
408 self.ui.warn('%s: %s\n' % (
410 util.pathto(self.getcwd(), ff),
409 util.pathto(self.getcwd(), ff),
411 inst.strerror))
410 inst.strerror))
412 elif badmatch and badmatch(ff) and statmatch(ff, None):
411 elif badmatch and badmatch(ff) and statmatch(ff, None):
413 yield 'b', ff, None
412 yield 'b', ff, None
414 continue
413 continue
415 if stat.S_ISDIR(st.st_mode):
414 if stat.S_ISDIR(st.st_mode):
416 cmp1 = (lambda x, y: cmp(x[1], y[1]))
415 cmp1 = (lambda x, y: cmp(x[1], y[1]))
417 sorted_ = [ x for x in findfiles(f) ]
416 sorted_ = [ x for x in findfiles(f) ]
418 sorted_.sort(cmp1)
417 sorted_.sort(cmp1)
419 for e in sorted_:
418 for e in sorted_:
420 yield e
419 yield e
421 else:
420 else:
422 ff = util.normpath(ff)
421 ff = util.normpath(ff)
423 if seen(ff):
422 if seen(ff):
424 continue
423 continue
425 self.blockignore = True
424 self.blockignore = True
426 if statmatch(ff, st):
425 if statmatch(ff, st):
427 if self.supported_type(ff, st, verbose=True):
426 if self.supported_type(ff, st, verbose=True):
428 yield 'f', ff, st
427 yield 'f', ff, st
429 elif ff in dc:
428 elif ff in dc:
430 yield 'm', ff, st
429 yield 'm', ff, st
431 self.blockignore = False
430 self.blockignore = False
432
431
433 # step two run through anything left in the dc hash and yield
432 # step two run through anything left in the dc hash and yield
434 # if we haven't already seen it
433 # if we haven't already seen it
435 ks = dc.keys()
434 ks = dc.keys()
436 ks.sort()
435 ks.sort()
437 for k in ks:
436 for k in ks:
438 if not seen(k) and (statmatch(k, None)):
437 if not seen(k) and (statmatch(k, None)):
439 yield 'm', k, None
438 yield 'm', k, None
440
439
441 def changes(self, files=None, match=util.always, show_ignored=None):
440 def changes(self, files=None, match=util.always, show_ignored=None):
442 lookup, modified, added, unknown, ignored = [], [], [], [], []
441 lookup, modified, added, unknown, ignored = [], [], [], [], []
443 removed, deleted = [], []
442 removed, deleted = [], []
444
443
445 for src, fn, st in self.statwalk(files, match, ignored=show_ignored):
444 for src, fn, st in self.statwalk(files, match, ignored=show_ignored):
446 try:
445 try:
447 type_, mode, size, time = self[fn]
446 type_, mode, size, time = self[fn]
448 except KeyError:
447 except KeyError:
449 if show_ignored and self.ignore(fn):
448 if show_ignored and self.ignore(fn):
450 ignored.append(fn)
449 ignored.append(fn)
451 else:
450 else:
452 unknown.append(fn)
451 unknown.append(fn)
453 continue
452 continue
454 if src == 'm':
453 if src == 'm':
455 nonexistent = True
454 nonexistent = True
456 if not st:
455 if not st:
457 try:
456 try:
458 st = os.lstat(self.wjoin(fn))
457 st = os.lstat(self.wjoin(fn))
459 except OSError, inst:
458 except OSError, inst:
460 if inst.errno != errno.ENOENT:
459 if inst.errno != errno.ENOENT:
461 raise
460 raise
462 st = None
461 st = None
463 # We need to re-check that it is a valid file
462 # We need to re-check that it is a valid file
464 if st and self.supported_type(fn, st):
463 if st and self.supported_type(fn, st):
465 nonexistent = False
464 nonexistent = False
466 # XXX: what to do with file no longer present in the fs
465 # XXX: what to do with file no longer present in the fs
467 # who are not removed in the dirstate ?
466 # who are not removed in the dirstate ?
468 if nonexistent and type_ in "nm":
467 if nonexistent and type_ in "nm":
469 deleted.append(fn)
468 deleted.append(fn)
470 continue
469 continue
471 # check the common case first
470 # check the common case first
472 if type_ == 'n':
471 if type_ == 'n':
473 if not st:
472 if not st:
474 st = os.lstat(self.wjoin(fn))
473 st = os.lstat(self.wjoin(fn))
475 if size >= 0 and (size != st.st_size
474 if size >= 0 and (size != st.st_size
476 or (mode ^ st.st_mode) & 0100):
475 or (mode ^ st.st_mode) & 0100):
477 modified.append(fn)
476 modified.append(fn)
478 elif time != st.st_mtime:
477 elif time != st.st_mtime:
479 lookup.append(fn)
478 lookup.append(fn)
480 elif type_ == 'm':
479 elif type_ == 'm':
481 modified.append(fn)
480 modified.append(fn)
482 elif type_ == 'a':
481 elif type_ == 'a':
483 added.append(fn)
482 added.append(fn)
484 elif type_ == 'r':
483 elif type_ == 'r':
485 removed.append(fn)
484 removed.append(fn)
486
485
487 return (lookup, modified, added, removed, deleted, unknown, ignored)
486 return (lookup, modified, added, removed, deleted, unknown, ignored)
@@ -1,108 +1,107 b''
1 # filelog.py - file history class for mercurial
1 # filelog.py - file history class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os
9 from revlog import *
8 from revlog import *
10 from demandload import *
9 from demandload import *
11 demandload(globals(), "bdiff")
10 demandload(globals(), "bdiff os")
12
11
13 class filelog(revlog):
12 class filelog(revlog):
14 def __init__(self, opener, path, defversion=REVLOG_DEFAULT_VERSION):
13 def __init__(self, opener, path, defversion=REVLOG_DEFAULT_VERSION):
15 revlog.__init__(self, opener,
14 revlog.__init__(self, opener,
16 os.path.join("data", self.encodedir(path + ".i")),
15 os.path.join("data", self.encodedir(path + ".i")),
17 os.path.join("data", self.encodedir(path + ".d")),
16 os.path.join("data", self.encodedir(path + ".d")),
18 defversion)
17 defversion)
19
18
20 # This avoids a collision between a file named foo and a dir named
19 # This avoids a collision between a file named foo and a dir named
21 # foo.i or foo.d
20 # foo.i or foo.d
22 def encodedir(self, path):
21 def encodedir(self, path):
23 return (path
22 return (path
24 .replace(".hg/", ".hg.hg/")
23 .replace(".hg/", ".hg.hg/")
25 .replace(".i/", ".i.hg/")
24 .replace(".i/", ".i.hg/")
26 .replace(".d/", ".d.hg/"))
25 .replace(".d/", ".d.hg/"))
27
26
28 def decodedir(self, path):
27 def decodedir(self, path):
29 return (path
28 return (path
30 .replace(".d.hg/", ".d/")
29 .replace(".d.hg/", ".d/")
31 .replace(".i.hg/", ".i/")
30 .replace(".i.hg/", ".i/")
32 .replace(".hg.hg/", ".hg/"))
31 .replace(".hg.hg/", ".hg/"))
33
32
34 def read(self, node):
33 def read(self, node):
35 t = self.revision(node)
34 t = self.revision(node)
36 if not t.startswith('\1\n'):
35 if not t.startswith('\1\n'):
37 return t
36 return t
38 s = t.find('\1\n', 2)
37 s = t.find('\1\n', 2)
39 return t[s+2:]
38 return t[s+2:]
40
39
41 def readmeta(self, node):
40 def readmeta(self, node):
42 t = self.revision(node)
41 t = self.revision(node)
43 if not t.startswith('\1\n'):
42 if not t.startswith('\1\n'):
44 return {}
43 return {}
45 s = t.find('\1\n', 2)
44 s = t.find('\1\n', 2)
46 mt = t[2:s]
45 mt = t[2:s]
47 m = {}
46 m = {}
48 for l in mt.splitlines():
47 for l in mt.splitlines():
49 k, v = l.split(": ", 1)
48 k, v = l.split(": ", 1)
50 m[k] = v
49 m[k] = v
51 return m
50 return m
52
51
53 def add(self, text, meta, transaction, link, p1=None, p2=None):
52 def add(self, text, meta, transaction, link, p1=None, p2=None):
54 if meta or text.startswith('\1\n'):
53 if meta or text.startswith('\1\n'):
55 mt = ""
54 mt = ""
56 if meta:
55 if meta:
57 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
56 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
58 text = "\1\n%s\1\n%s" % ("".join(mt), text)
57 text = "\1\n%s\1\n%s" % ("".join(mt), text)
59 return self.addrevision(text, transaction, link, p1, p2)
58 return self.addrevision(text, transaction, link, p1, p2)
60
59
61 def renamed(self, node):
60 def renamed(self, node):
62 if self.parents(node)[0] != nullid:
61 if self.parents(node)[0] != nullid:
63 return False
62 return False
64 m = self.readmeta(node)
63 m = self.readmeta(node)
65 if m and m.has_key("copy"):
64 if m and m.has_key("copy"):
66 return (m["copy"], bin(m["copyrev"]))
65 return (m["copy"], bin(m["copyrev"]))
67 return False
66 return False
68
67
69 def annotate(self, node):
68 def annotate(self, node):
70
69
71 def decorate(text, rev):
70 def decorate(text, rev):
72 return ([rev] * len(text.splitlines()), text)
71 return ([rev] * len(text.splitlines()), text)
73
72
74 def pair(parent, child):
73 def pair(parent, child):
75 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
74 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
76 child[0][b1:b2] = parent[0][a1:a2]
75 child[0][b1:b2] = parent[0][a1:a2]
77 return child
76 return child
78
77
79 # find all ancestors
78 # find all ancestors
80 needed = {node:1}
79 needed = {node:1}
81 visit = [node]
80 visit = [node]
82 while visit:
81 while visit:
83 n = visit.pop(0)
82 n = visit.pop(0)
84 for p in self.parents(n):
83 for p in self.parents(n):
85 if p not in needed:
84 if p not in needed:
86 needed[p] = 1
85 needed[p] = 1
87 visit.append(p)
86 visit.append(p)
88 else:
87 else:
89 # count how many times we'll use this
88 # count how many times we'll use this
90 needed[p] += 1
89 needed[p] += 1
91
90
92 # sort by revision which is a topological order
91 # sort by revision which is a topological order
93 visit = [ (self.rev(n), n) for n in needed.keys() ]
92 visit = [ (self.rev(n), n) for n in needed.keys() ]
94 visit.sort()
93 visit.sort()
95 hist = {}
94 hist = {}
96
95
97 for r,n in visit:
96 for r,n in visit:
98 curr = decorate(self.read(n), self.linkrev(n))
97 curr = decorate(self.read(n), self.linkrev(n))
99 for p in self.parents(n):
98 for p in self.parents(n):
100 if p != nullid:
99 if p != nullid:
101 curr = pair(hist[p], curr)
100 curr = pair(hist[p], curr)
102 # trim the history of unneeded revs
101 # trim the history of unneeded revs
103 needed[p] -= 1
102 needed[p] -= 1
104 if not needed[p]:
103 if not needed[p]:
105 del hist[p]
104 del hist[p]
106 hist[n] = curr
105 hist[n] = curr
107
106
108 return zip(hist[n][0], hist[n][1].splitlines(1))
107 return zip(hist[n][0], hist[n][1].splitlines(1))
@@ -1,2152 +1,2151 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, util
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
8 from node import *
11 from i18n import gettext as _
9 from i18n import gettext as _
12 from demandload import *
10 from demandload import *
13 demandload(globals(), "appendfile changegroup")
11 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "changelog dirstate filelog manifest repo")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "revlog")
14 demandload(globals(), "os revlog util")
16
15
17 class localrepository(object):
16 class localrepository(object):
18 capabilities = ()
17 capabilities = ()
19
18
20 def __del__(self):
19 def __del__(self):
21 self.transhandle = None
20 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
23 if not path:
22 if not path:
24 p = os.getcwd()
23 p = os.getcwd()
25 while not os.path.isdir(os.path.join(p, ".hg")):
24 while not os.path.isdir(os.path.join(p, ".hg")):
26 oldp = p
25 oldp = p
27 p = os.path.dirname(p)
26 p = os.path.dirname(p)
28 if p == oldp:
27 if p == oldp:
29 raise repo.RepoError(_("no repo found"))
28 raise repo.RepoError(_("no repo found"))
30 path = p
29 path = p
31 self.path = os.path.join(path, ".hg")
30 self.path = os.path.join(path, ".hg")
32
31
33 if not create and not os.path.isdir(self.path):
32 if not create and not os.path.isdir(self.path):
34 raise repo.RepoError(_("repository %s not found") % path)
33 raise repo.RepoError(_("repository %s not found") % path)
35
34
36 self.root = os.path.abspath(path)
35 self.root = os.path.abspath(path)
37 self.origroot = path
36 self.origroot = path
38 self.ui = ui.ui(parentui=parentui)
37 self.ui = ui.ui(parentui=parentui)
39 self.opener = util.opener(self.path)
38 self.opener = util.opener(self.path)
40 self.wopener = util.opener(self.root)
39 self.wopener = util.opener(self.root)
41
40
42 try:
41 try:
43 self.ui.readconfig(self.join("hgrc"), self.root)
42 self.ui.readconfig(self.join("hgrc"), self.root)
44 except IOError:
43 except IOError:
45 pass
44 pass
46
45
47 v = self.ui.revlogopts
46 v = self.ui.revlogopts
48 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
47 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
49 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
48 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
50 fl = v.get('flags', None)
49 fl = v.get('flags', None)
51 flags = 0
50 flags = 0
52 if fl != None:
51 if fl != None:
53 for x in fl.split():
52 for x in fl.split():
54 flags |= revlog.flagstr(x)
53 flags |= revlog.flagstr(x)
55 elif self.revlogv1:
54 elif self.revlogv1:
56 flags = revlog.REVLOG_DEFAULT_FLAGS
55 flags = revlog.REVLOG_DEFAULT_FLAGS
57
56
58 v = self.revlogversion | flags
57 v = self.revlogversion | flags
59 self.manifest = manifest.manifest(self.opener, v)
58 self.manifest = manifest.manifest(self.opener, v)
60 self.changelog = changelog.changelog(self.opener, v)
59 self.changelog = changelog.changelog(self.opener, v)
61
60
62 # the changelog might not have the inline index flag
61 # the changelog might not have the inline index flag
63 # on. If the format of the changelog is the same as found in
62 # on. If the format of the changelog is the same as found in
64 # .hgrc, apply any flags found in the .hgrc as well.
63 # .hgrc, apply any flags found in the .hgrc as well.
65 # Otherwise, just version from the changelog
64 # Otherwise, just version from the changelog
66 v = self.changelog.version
65 v = self.changelog.version
67 if v == self.revlogversion:
66 if v == self.revlogversion:
68 v |= flags
67 v |= flags
69 self.revlogversion = v
68 self.revlogversion = v
70
69
71 self.tagscache = None
70 self.tagscache = None
72 self.nodetagscache = None
71 self.nodetagscache = None
73 self.encodepats = None
72 self.encodepats = None
74 self.decodepats = None
73 self.decodepats = None
75 self.transhandle = None
74 self.transhandle = None
76
75
77 if create:
76 if create:
78 os.mkdir(self.path)
77 os.mkdir(self.path)
79 os.mkdir(self.join("data"))
78 os.mkdir(self.join("data"))
80
79
81 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
80 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
82
81
83 def hook(self, name, throw=False, **args):
82 def hook(self, name, throw=False, **args):
84 def callhook(hname, funcname):
83 def callhook(hname, funcname):
85 '''call python hook. hook is callable object, looked up as
84 '''call python hook. hook is callable object, looked up as
86 name in python module. if callable returns "true", hook
85 name in python module. if callable returns "true", hook
87 fails, else passes. if hook raises exception, treated as
86 fails, else passes. if hook raises exception, treated as
88 hook failure. exception propagates if throw is "true".
87 hook failure. exception propagates if throw is "true".
89
88
90 reason for "true" meaning "hook failed" is so that
89 reason for "true" meaning "hook failed" is so that
91 unmodified commands (e.g. mercurial.commands.update) can
90 unmodified commands (e.g. mercurial.commands.update) can
92 be run as hooks without wrappers to convert return values.'''
91 be run as hooks without wrappers to convert return values.'''
93
92
94 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
93 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
95 d = funcname.rfind('.')
94 d = funcname.rfind('.')
96 if d == -1:
95 if d == -1:
97 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
96 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
98 % (hname, funcname))
97 % (hname, funcname))
99 modname = funcname[:d]
98 modname = funcname[:d]
100 try:
99 try:
101 obj = __import__(modname)
100 obj = __import__(modname)
102 except ImportError:
101 except ImportError:
103 raise util.Abort(_('%s hook is invalid '
102 raise util.Abort(_('%s hook is invalid '
104 '(import of "%s" failed)') %
103 '(import of "%s" failed)') %
105 (hname, modname))
104 (hname, modname))
106 try:
105 try:
107 for p in funcname.split('.')[1:]:
106 for p in funcname.split('.')[1:]:
108 obj = getattr(obj, p)
107 obj = getattr(obj, p)
109 except AttributeError, err:
108 except AttributeError, err:
110 raise util.Abort(_('%s hook is invalid '
109 raise util.Abort(_('%s hook is invalid '
111 '("%s" is not defined)') %
110 '("%s" is not defined)') %
112 (hname, funcname))
111 (hname, funcname))
113 if not callable(obj):
112 if not callable(obj):
114 raise util.Abort(_('%s hook is invalid '
113 raise util.Abort(_('%s hook is invalid '
115 '("%s" is not callable)') %
114 '("%s" is not callable)') %
116 (hname, funcname))
115 (hname, funcname))
117 try:
116 try:
118 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
117 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
119 except (KeyboardInterrupt, util.SignalInterrupt):
118 except (KeyboardInterrupt, util.SignalInterrupt):
120 raise
119 raise
121 except Exception, exc:
120 except Exception, exc:
122 if isinstance(exc, util.Abort):
121 if isinstance(exc, util.Abort):
123 self.ui.warn(_('error: %s hook failed: %s\n') %
122 self.ui.warn(_('error: %s hook failed: %s\n') %
124 (hname, exc.args[0] % exc.args[1:]))
123 (hname, exc.args[0] % exc.args[1:]))
125 else:
124 else:
126 self.ui.warn(_('error: %s hook raised an exception: '
125 self.ui.warn(_('error: %s hook raised an exception: '
127 '%s\n') % (hname, exc))
126 '%s\n') % (hname, exc))
128 if throw:
127 if throw:
129 raise
128 raise
130 self.ui.print_exc()
129 self.ui.print_exc()
131 return True
130 return True
132 if r:
131 if r:
133 if throw:
132 if throw:
134 raise util.Abort(_('%s hook failed') % hname)
133 raise util.Abort(_('%s hook failed') % hname)
135 self.ui.warn(_('warning: %s hook failed\n') % hname)
134 self.ui.warn(_('warning: %s hook failed\n') % hname)
136 return r
135 return r
137
136
138 def runhook(name, cmd):
137 def runhook(name, cmd):
139 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
138 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
140 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
139 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
141 r = util.system(cmd, environ=env, cwd=self.root)
140 r = util.system(cmd, environ=env, cwd=self.root)
142 if r:
141 if r:
143 desc, r = util.explain_exit(r)
142 desc, r = util.explain_exit(r)
144 if throw:
143 if throw:
145 raise util.Abort(_('%s hook %s') % (name, desc))
144 raise util.Abort(_('%s hook %s') % (name, desc))
146 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
145 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
147 return r
146 return r
148
147
149 r = False
148 r = False
150 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
149 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
151 if hname.split(".", 1)[0] == name and cmd]
150 if hname.split(".", 1)[0] == name and cmd]
152 hooks.sort()
151 hooks.sort()
153 for hname, cmd in hooks:
152 for hname, cmd in hooks:
154 if cmd.startswith('python:'):
153 if cmd.startswith('python:'):
155 r = callhook(hname, cmd[7:].strip()) or r
154 r = callhook(hname, cmd[7:].strip()) or r
156 else:
155 else:
157 r = runhook(hname, cmd) or r
156 r = runhook(hname, cmd) or r
158 return r
157 return r
159
158
160 def tags(self):
159 def tags(self):
161 '''return a mapping of tag to node'''
160 '''return a mapping of tag to node'''
162 if not self.tagscache:
161 if not self.tagscache:
163 self.tagscache = {}
162 self.tagscache = {}
164
163
165 def parsetag(line, context):
164 def parsetag(line, context):
166 if not line:
165 if not line:
167 return
166 return
168 s = l.split(" ", 1)
167 s = l.split(" ", 1)
169 if len(s) != 2:
168 if len(s) != 2:
170 self.ui.warn(_("%s: cannot parse entry\n") % context)
169 self.ui.warn(_("%s: cannot parse entry\n") % context)
171 return
170 return
172 node, key = s
171 node, key = s
173 key = key.strip()
172 key = key.strip()
174 try:
173 try:
175 bin_n = bin(node)
174 bin_n = bin(node)
176 except TypeError:
175 except TypeError:
177 self.ui.warn(_("%s: node '%s' is not well formed\n") %
176 self.ui.warn(_("%s: node '%s' is not well formed\n") %
178 (context, node))
177 (context, node))
179 return
178 return
180 if bin_n not in self.changelog.nodemap:
179 if bin_n not in self.changelog.nodemap:
181 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
180 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
182 (context, key))
181 (context, key))
183 return
182 return
184 self.tagscache[key] = bin_n
183 self.tagscache[key] = bin_n
185
184
186 # read the tags file from each head, ending with the tip,
185 # read the tags file from each head, ending with the tip,
187 # and add each tag found to the map, with "newer" ones
186 # and add each tag found to the map, with "newer" ones
188 # taking precedence
187 # taking precedence
189 heads = self.heads()
188 heads = self.heads()
190 heads.reverse()
189 heads.reverse()
191 fl = self.file(".hgtags")
190 fl = self.file(".hgtags")
192 for node in heads:
191 for node in heads:
193 change = self.changelog.read(node)
192 change = self.changelog.read(node)
194 rev = self.changelog.rev(node)
193 rev = self.changelog.rev(node)
195 fn, ff = self.manifest.find(change[0], '.hgtags')
194 fn, ff = self.manifest.find(change[0], '.hgtags')
196 if fn is None: continue
195 if fn is None: continue
197 count = 0
196 count = 0
198 for l in fl.read(fn).splitlines():
197 for l in fl.read(fn).splitlines():
199 count += 1
198 count += 1
200 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
199 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
201 (rev, short(node), count))
200 (rev, short(node), count))
202 try:
201 try:
203 f = self.opener("localtags")
202 f = self.opener("localtags")
204 count = 0
203 count = 0
205 for l in f:
204 for l in f:
206 count += 1
205 count += 1
207 parsetag(l, _("localtags, line %d") % count)
206 parsetag(l, _("localtags, line %d") % count)
208 except IOError:
207 except IOError:
209 pass
208 pass
210
209
211 self.tagscache['tip'] = self.changelog.tip()
210 self.tagscache['tip'] = self.changelog.tip()
212
211
213 return self.tagscache
212 return self.tagscache
214
213
215 def tagslist(self):
214 def tagslist(self):
216 '''return a list of tags ordered by revision'''
215 '''return a list of tags ordered by revision'''
217 l = []
216 l = []
218 for t, n in self.tags().items():
217 for t, n in self.tags().items():
219 try:
218 try:
220 r = self.changelog.rev(n)
219 r = self.changelog.rev(n)
221 except:
220 except:
222 r = -2 # sort to the beginning of the list if unknown
221 r = -2 # sort to the beginning of the list if unknown
223 l.append((r, t, n))
222 l.append((r, t, n))
224 l.sort()
223 l.sort()
225 return [(t, n) for r, t, n in l]
224 return [(t, n) for r, t, n in l]
226
225
227 def nodetags(self, node):
226 def nodetags(self, node):
228 '''return the tags associated with a node'''
227 '''return the tags associated with a node'''
229 if not self.nodetagscache:
228 if not self.nodetagscache:
230 self.nodetagscache = {}
229 self.nodetagscache = {}
231 for t, n in self.tags().items():
230 for t, n in self.tags().items():
232 self.nodetagscache.setdefault(n, []).append(t)
231 self.nodetagscache.setdefault(n, []).append(t)
233 return self.nodetagscache.get(node, [])
232 return self.nodetagscache.get(node, [])
234
233
235 def lookup(self, key):
234 def lookup(self, key):
236 try:
235 try:
237 return self.tags()[key]
236 return self.tags()[key]
238 except KeyError:
237 except KeyError:
239 try:
238 try:
240 return self.changelog.lookup(key)
239 return self.changelog.lookup(key)
241 except:
240 except:
242 raise repo.RepoError(_("unknown revision '%s'") % key)
241 raise repo.RepoError(_("unknown revision '%s'") % key)
243
242
244 def dev(self):
243 def dev(self):
245 return os.lstat(self.path).st_dev
244 return os.lstat(self.path).st_dev
246
245
247 def local(self):
246 def local(self):
248 return True
247 return True
249
248
250 def join(self, f):
249 def join(self, f):
251 return os.path.join(self.path, f)
250 return os.path.join(self.path, f)
252
251
253 def wjoin(self, f):
252 def wjoin(self, f):
254 return os.path.join(self.root, f)
253 return os.path.join(self.root, f)
255
254
256 def file(self, f):
255 def file(self, f):
257 if f[0] == '/':
256 if f[0] == '/':
258 f = f[1:]
257 f = f[1:]
259 return filelog.filelog(self.opener, f, self.revlogversion)
258 return filelog.filelog(self.opener, f, self.revlogversion)
260
259
261 def getcwd(self):
260 def getcwd(self):
262 return self.dirstate.getcwd()
261 return self.dirstate.getcwd()
263
262
264 def wfile(self, f, mode='r'):
263 def wfile(self, f, mode='r'):
265 return self.wopener(f, mode)
264 return self.wopener(f, mode)
266
265
267 def wread(self, filename):
266 def wread(self, filename):
268 if self.encodepats == None:
267 if self.encodepats == None:
269 l = []
268 l = []
270 for pat, cmd in self.ui.configitems("encode"):
269 for pat, cmd in self.ui.configitems("encode"):
271 mf = util.matcher(self.root, "", [pat], [], [])[1]
270 mf = util.matcher(self.root, "", [pat], [], [])[1]
272 l.append((mf, cmd))
271 l.append((mf, cmd))
273 self.encodepats = l
272 self.encodepats = l
274
273
275 data = self.wopener(filename, 'r').read()
274 data = self.wopener(filename, 'r').read()
276
275
277 for mf, cmd in self.encodepats:
276 for mf, cmd in self.encodepats:
278 if mf(filename):
277 if mf(filename):
279 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
278 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
280 data = util.filter(data, cmd)
279 data = util.filter(data, cmd)
281 break
280 break
282
281
283 return data
282 return data
284
283
285 def wwrite(self, filename, data, fd=None):
284 def wwrite(self, filename, data, fd=None):
286 if self.decodepats == None:
285 if self.decodepats == None:
287 l = []
286 l = []
288 for pat, cmd in self.ui.configitems("decode"):
287 for pat, cmd in self.ui.configitems("decode"):
289 mf = util.matcher(self.root, "", [pat], [], [])[1]
288 mf = util.matcher(self.root, "", [pat], [], [])[1]
290 l.append((mf, cmd))
289 l.append((mf, cmd))
291 self.decodepats = l
290 self.decodepats = l
292
291
293 for mf, cmd in self.decodepats:
292 for mf, cmd in self.decodepats:
294 if mf(filename):
293 if mf(filename):
295 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
294 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
296 data = util.filter(data, cmd)
295 data = util.filter(data, cmd)
297 break
296 break
298
297
299 if fd:
298 if fd:
300 return fd.write(data)
299 return fd.write(data)
301 return self.wopener(filename, 'w').write(data)
300 return self.wopener(filename, 'w').write(data)
302
301
303 def transaction(self):
302 def transaction(self):
304 tr = self.transhandle
303 tr = self.transhandle
305 if tr != None and tr.running():
304 if tr != None and tr.running():
306 return tr.nest()
305 return tr.nest()
307
306
308 # save dirstate for rollback
307 # save dirstate for rollback
309 try:
308 try:
310 ds = self.opener("dirstate").read()
309 ds = self.opener("dirstate").read()
311 except IOError:
310 except IOError:
312 ds = ""
311 ds = ""
313 self.opener("journal.dirstate", "w").write(ds)
312 self.opener("journal.dirstate", "w").write(ds)
314
313
315 tr = transaction.transaction(self.ui.warn, self.opener,
314 tr = transaction.transaction(self.ui.warn, self.opener,
316 self.join("journal"),
315 self.join("journal"),
317 aftertrans(self.path))
316 aftertrans(self.path))
318 self.transhandle = tr
317 self.transhandle = tr
319 return tr
318 return tr
320
319
321 def recover(self):
320 def recover(self):
322 l = self.lock()
321 l = self.lock()
323 if os.path.exists(self.join("journal")):
322 if os.path.exists(self.join("journal")):
324 self.ui.status(_("rolling back interrupted transaction\n"))
323 self.ui.status(_("rolling back interrupted transaction\n"))
325 transaction.rollback(self.opener, self.join("journal"))
324 transaction.rollback(self.opener, self.join("journal"))
326 self.reload()
325 self.reload()
327 return True
326 return True
328 else:
327 else:
329 self.ui.warn(_("no interrupted transaction available\n"))
328 self.ui.warn(_("no interrupted transaction available\n"))
330 return False
329 return False
331
330
332 def rollback(self, wlock=None):
331 def rollback(self, wlock=None):
333 if not wlock:
332 if not wlock:
334 wlock = self.wlock()
333 wlock = self.wlock()
335 l = self.lock()
334 l = self.lock()
336 if os.path.exists(self.join("undo")):
335 if os.path.exists(self.join("undo")):
337 self.ui.status(_("rolling back last transaction\n"))
336 self.ui.status(_("rolling back last transaction\n"))
338 transaction.rollback(self.opener, self.join("undo"))
337 transaction.rollback(self.opener, self.join("undo"))
339 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
338 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
340 self.reload()
339 self.reload()
341 self.wreload()
340 self.wreload()
342 else:
341 else:
343 self.ui.warn(_("no rollback information available\n"))
342 self.ui.warn(_("no rollback information available\n"))
344
343
345 def wreload(self):
344 def wreload(self):
346 self.dirstate.read()
345 self.dirstate.read()
347
346
348 def reload(self):
347 def reload(self):
349 self.changelog.load()
348 self.changelog.load()
350 self.manifest.load()
349 self.manifest.load()
351 self.tagscache = None
350 self.tagscache = None
352 self.nodetagscache = None
351 self.nodetagscache = None
353
352
354 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
353 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
355 desc=None):
354 desc=None):
356 try:
355 try:
357 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
356 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
358 except lock.LockHeld, inst:
357 except lock.LockHeld, inst:
359 if not wait:
358 if not wait:
360 raise
359 raise
361 self.ui.warn(_("waiting for lock on %s held by %s\n") %
360 self.ui.warn(_("waiting for lock on %s held by %s\n") %
362 (desc, inst.args[0]))
361 (desc, inst.args[0]))
363 # default to 600 seconds timeout
362 # default to 600 seconds timeout
364 l = lock.lock(self.join(lockname),
363 l = lock.lock(self.join(lockname),
365 int(self.ui.config("ui", "timeout") or 600),
364 int(self.ui.config("ui", "timeout") or 600),
366 releasefn, desc=desc)
365 releasefn, desc=desc)
367 if acquirefn:
366 if acquirefn:
368 acquirefn()
367 acquirefn()
369 return l
368 return l
370
369
371 def lock(self, wait=1):
370 def lock(self, wait=1):
372 return self.do_lock("lock", wait, acquirefn=self.reload,
371 return self.do_lock("lock", wait, acquirefn=self.reload,
373 desc=_('repository %s') % self.origroot)
372 desc=_('repository %s') % self.origroot)
374
373
375 def wlock(self, wait=1):
374 def wlock(self, wait=1):
376 return self.do_lock("wlock", wait, self.dirstate.write,
375 return self.do_lock("wlock", wait, self.dirstate.write,
377 self.wreload,
376 self.wreload,
378 desc=_('working directory of %s') % self.origroot)
377 desc=_('working directory of %s') % self.origroot)
379
378
380 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
379 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
381 "determine whether a new filenode is needed"
380 "determine whether a new filenode is needed"
382 fp1 = manifest1.get(filename, nullid)
381 fp1 = manifest1.get(filename, nullid)
383 fp2 = manifest2.get(filename, nullid)
382 fp2 = manifest2.get(filename, nullid)
384
383
385 if fp2 != nullid:
384 if fp2 != nullid:
386 # is one parent an ancestor of the other?
385 # is one parent an ancestor of the other?
387 fpa = filelog.ancestor(fp1, fp2)
386 fpa = filelog.ancestor(fp1, fp2)
388 if fpa == fp1:
387 if fpa == fp1:
389 fp1, fp2 = fp2, nullid
388 fp1, fp2 = fp2, nullid
390 elif fpa == fp2:
389 elif fpa == fp2:
391 fp2 = nullid
390 fp2 = nullid
392
391
393 # is the file unmodified from the parent? report existing entry
392 # is the file unmodified from the parent? report existing entry
394 if fp2 == nullid and text == filelog.read(fp1):
393 if fp2 == nullid and text == filelog.read(fp1):
395 return (fp1, None, None)
394 return (fp1, None, None)
396
395
397 return (None, fp1, fp2)
396 return (None, fp1, fp2)
398
397
399 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
398 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
400 orig_parent = self.dirstate.parents()[0] or nullid
399 orig_parent = self.dirstate.parents()[0] or nullid
401 p1 = p1 or self.dirstate.parents()[0] or nullid
400 p1 = p1 or self.dirstate.parents()[0] or nullid
402 p2 = p2 or self.dirstate.parents()[1] or nullid
401 p2 = p2 or self.dirstate.parents()[1] or nullid
403 c1 = self.changelog.read(p1)
402 c1 = self.changelog.read(p1)
404 c2 = self.changelog.read(p2)
403 c2 = self.changelog.read(p2)
405 m1 = self.manifest.read(c1[0])
404 m1 = self.manifest.read(c1[0])
406 mf1 = self.manifest.readflags(c1[0])
405 mf1 = self.manifest.readflags(c1[0])
407 m2 = self.manifest.read(c2[0])
406 m2 = self.manifest.read(c2[0])
408 changed = []
407 changed = []
409
408
410 if orig_parent == p1:
409 if orig_parent == p1:
411 update_dirstate = 1
410 update_dirstate = 1
412 else:
411 else:
413 update_dirstate = 0
412 update_dirstate = 0
414
413
415 if not wlock:
414 if not wlock:
416 wlock = self.wlock()
415 wlock = self.wlock()
417 l = self.lock()
416 l = self.lock()
418 tr = self.transaction()
417 tr = self.transaction()
419 mm = m1.copy()
418 mm = m1.copy()
420 mfm = mf1.copy()
419 mfm = mf1.copy()
421 linkrev = self.changelog.count()
420 linkrev = self.changelog.count()
422 for f in files:
421 for f in files:
423 try:
422 try:
424 t = self.wread(f)
423 t = self.wread(f)
425 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
424 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
426 r = self.file(f)
425 r = self.file(f)
427 mfm[f] = tm
426 mfm[f] = tm
428
427
429 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
428 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
430 if entry:
429 if entry:
431 mm[f] = entry
430 mm[f] = entry
432 continue
431 continue
433
432
434 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
433 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
435 changed.append(f)
434 changed.append(f)
436 if update_dirstate:
435 if update_dirstate:
437 self.dirstate.update([f], "n")
436 self.dirstate.update([f], "n")
438 except IOError:
437 except IOError:
439 try:
438 try:
440 del mm[f]
439 del mm[f]
441 del mfm[f]
440 del mfm[f]
442 if update_dirstate:
441 if update_dirstate:
443 self.dirstate.forget([f])
442 self.dirstate.forget([f])
444 except:
443 except:
445 # deleted from p2?
444 # deleted from p2?
446 pass
445 pass
447
446
448 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
447 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
449 user = user or self.ui.username()
448 user = user or self.ui.username()
450 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
449 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
451 tr.close()
450 tr.close()
452 if update_dirstate:
451 if update_dirstate:
453 self.dirstate.setparents(n, nullid)
452 self.dirstate.setparents(n, nullid)
454
453
455 def commit(self, files=None, text="", user=None, date=None,
454 def commit(self, files=None, text="", user=None, date=None,
456 match=util.always, force=False, lock=None, wlock=None,
455 match=util.always, force=False, lock=None, wlock=None,
457 force_editor=False):
456 force_editor=False):
458 commit = []
457 commit = []
459 remove = []
458 remove = []
460 changed = []
459 changed = []
461
460
462 if files:
461 if files:
463 for f in files:
462 for f in files:
464 s = self.dirstate.state(f)
463 s = self.dirstate.state(f)
465 if s in 'nmai':
464 if s in 'nmai':
466 commit.append(f)
465 commit.append(f)
467 elif s == 'r':
466 elif s == 'r':
468 remove.append(f)
467 remove.append(f)
469 else:
468 else:
470 self.ui.warn(_("%s not tracked!\n") % f)
469 self.ui.warn(_("%s not tracked!\n") % f)
471 else:
470 else:
472 modified, added, removed, deleted, unknown = self.changes(match=match)
471 modified, added, removed, deleted, unknown = self.changes(match=match)
473 commit = modified + added
472 commit = modified + added
474 remove = removed
473 remove = removed
475
474
476 p1, p2 = self.dirstate.parents()
475 p1, p2 = self.dirstate.parents()
477 c1 = self.changelog.read(p1)
476 c1 = self.changelog.read(p1)
478 c2 = self.changelog.read(p2)
477 c2 = self.changelog.read(p2)
479 m1 = self.manifest.read(c1[0])
478 m1 = self.manifest.read(c1[0])
480 mf1 = self.manifest.readflags(c1[0])
479 mf1 = self.manifest.readflags(c1[0])
481 m2 = self.manifest.read(c2[0])
480 m2 = self.manifest.read(c2[0])
482
481
483 if not commit and not remove and not force and p2 == nullid:
482 if not commit and not remove and not force and p2 == nullid:
484 self.ui.status(_("nothing changed\n"))
483 self.ui.status(_("nothing changed\n"))
485 return None
484 return None
486
485
487 xp1 = hex(p1)
486 xp1 = hex(p1)
488 if p2 == nullid: xp2 = ''
487 if p2 == nullid: xp2 = ''
489 else: xp2 = hex(p2)
488 else: xp2 = hex(p2)
490
489
491 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
490 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
492
491
493 if not wlock:
492 if not wlock:
494 wlock = self.wlock()
493 wlock = self.wlock()
495 if not lock:
494 if not lock:
496 lock = self.lock()
495 lock = self.lock()
497 tr = self.transaction()
496 tr = self.transaction()
498
497
499 # check in files
498 # check in files
500 new = {}
499 new = {}
501 linkrev = self.changelog.count()
500 linkrev = self.changelog.count()
502 commit.sort()
501 commit.sort()
503 for f in commit:
502 for f in commit:
504 self.ui.note(f + "\n")
503 self.ui.note(f + "\n")
505 try:
504 try:
506 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
505 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
507 t = self.wread(f)
506 t = self.wread(f)
508 except IOError:
507 except IOError:
509 self.ui.warn(_("trouble committing %s!\n") % f)
508 self.ui.warn(_("trouble committing %s!\n") % f)
510 raise
509 raise
511
510
512 r = self.file(f)
511 r = self.file(f)
513
512
514 meta = {}
513 meta = {}
515 cp = self.dirstate.copied(f)
514 cp = self.dirstate.copied(f)
516 if cp:
515 if cp:
517 meta["copy"] = cp
516 meta["copy"] = cp
518 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
517 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
519 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
518 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
520 fp1, fp2 = nullid, nullid
519 fp1, fp2 = nullid, nullid
521 else:
520 else:
522 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
521 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
523 if entry:
522 if entry:
524 new[f] = entry
523 new[f] = entry
525 continue
524 continue
526
525
527 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
526 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
528 # remember what we've added so that we can later calculate
527 # remember what we've added so that we can later calculate
529 # the files to pull from a set of changesets
528 # the files to pull from a set of changesets
530 changed.append(f)
529 changed.append(f)
531
530
532 # update manifest
531 # update manifest
533 m1 = m1.copy()
532 m1 = m1.copy()
534 m1.update(new)
533 m1.update(new)
535 for f in remove:
534 for f in remove:
536 if f in m1:
535 if f in m1:
537 del m1[f]
536 del m1[f]
538 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
537 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
539 (new, remove))
538 (new, remove))
540
539
541 # add changeset
540 # add changeset
542 new = new.keys()
541 new = new.keys()
543 new.sort()
542 new.sort()
544
543
545 user = user or self.ui.username()
544 user = user or self.ui.username()
546 if not text or force_editor:
545 if not text or force_editor:
547 edittext = []
546 edittext = []
548 if text:
547 if text:
549 edittext.append(text)
548 edittext.append(text)
550 edittext.append("")
549 edittext.append("")
551 if p2 != nullid:
550 if p2 != nullid:
552 edittext.append("HG: branch merge")
551 edittext.append("HG: branch merge")
553 edittext.extend(["HG: changed %s" % f for f in changed])
552 edittext.extend(["HG: changed %s" % f for f in changed])
554 edittext.extend(["HG: removed %s" % f for f in remove])
553 edittext.extend(["HG: removed %s" % f for f in remove])
555 if not changed and not remove:
554 if not changed and not remove:
556 edittext.append("HG: no files changed")
555 edittext.append("HG: no files changed")
557 edittext.append("")
556 edittext.append("")
558 # run editor in the repository root
557 # run editor in the repository root
559 olddir = os.getcwd()
558 olddir = os.getcwd()
560 os.chdir(self.root)
559 os.chdir(self.root)
561 text = self.ui.edit("\n".join(edittext), user)
560 text = self.ui.edit("\n".join(edittext), user)
562 os.chdir(olddir)
561 os.chdir(olddir)
563
562
564 lines = [line.rstrip() for line in text.rstrip().splitlines()]
563 lines = [line.rstrip() for line in text.rstrip().splitlines()]
565 while lines and not lines[0]:
564 while lines and not lines[0]:
566 del lines[0]
565 del lines[0]
567 if not lines:
566 if not lines:
568 return None
567 return None
569 text = '\n'.join(lines)
568 text = '\n'.join(lines)
570 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
569 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
571 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
570 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
572 parent2=xp2)
571 parent2=xp2)
573 tr.close()
572 tr.close()
574
573
575 self.dirstate.setparents(n)
574 self.dirstate.setparents(n)
576 self.dirstate.update(new, "n")
575 self.dirstate.update(new, "n")
577 self.dirstate.forget(remove)
576 self.dirstate.forget(remove)
578
577
579 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
578 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
580 return n
579 return n
581
580
582 def walk(self, node=None, files=[], match=util.always, badmatch=None):
581 def walk(self, node=None, files=[], match=util.always, badmatch=None):
583 if node:
582 if node:
584 fdict = dict.fromkeys(files)
583 fdict = dict.fromkeys(files)
585 for fn in self.manifest.read(self.changelog.read(node)[0]):
584 for fn in self.manifest.read(self.changelog.read(node)[0]):
586 fdict.pop(fn, None)
585 fdict.pop(fn, None)
587 if match(fn):
586 if match(fn):
588 yield 'm', fn
587 yield 'm', fn
589 for fn in fdict:
588 for fn in fdict:
590 if badmatch and badmatch(fn):
589 if badmatch and badmatch(fn):
591 if match(fn):
590 if match(fn):
592 yield 'b', fn
591 yield 'b', fn
593 else:
592 else:
594 self.ui.warn(_('%s: No such file in rev %s\n') % (
593 self.ui.warn(_('%s: No such file in rev %s\n') % (
595 util.pathto(self.getcwd(), fn), short(node)))
594 util.pathto(self.getcwd(), fn), short(node)))
596 else:
595 else:
597 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
596 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
598 yield src, fn
597 yield src, fn
599
598
600 def changes(self, node1=None, node2=None, files=[], match=util.always,
599 def changes(self, node1=None, node2=None, files=[], match=util.always,
601 wlock=None, show_ignored=None):
600 wlock=None, show_ignored=None):
602 """return changes between two nodes or node and working directory
601 """return changes between two nodes or node and working directory
603
602
604 If node1 is None, use the first dirstate parent instead.
603 If node1 is None, use the first dirstate parent instead.
605 If node2 is None, compare node1 with working directory.
604 If node2 is None, compare node1 with working directory.
606 """
605 """
607
606
608 def fcmp(fn, mf):
607 def fcmp(fn, mf):
609 t1 = self.wread(fn)
608 t1 = self.wread(fn)
610 t2 = self.file(fn).read(mf.get(fn, nullid))
609 t2 = self.file(fn).read(mf.get(fn, nullid))
611 return cmp(t1, t2)
610 return cmp(t1, t2)
612
611
613 def mfmatches(node):
612 def mfmatches(node):
614 change = self.changelog.read(node)
613 change = self.changelog.read(node)
615 mf = dict(self.manifest.read(change[0]))
614 mf = dict(self.manifest.read(change[0]))
616 for fn in mf.keys():
615 for fn in mf.keys():
617 if not match(fn):
616 if not match(fn):
618 del mf[fn]
617 del mf[fn]
619 return mf
618 return mf
620
619
621 compareworking = False
620 compareworking = False
622 if not node1 or node1 == self.dirstate.parents()[0]:
621 if not node1 or node1 == self.dirstate.parents()[0]:
623 compareworking = True
622 compareworking = True
624
623
625 if not compareworking:
624 if not compareworking:
626 # read the manifest from node1 before the manifest from node2,
625 # read the manifest from node1 before the manifest from node2,
627 # so that we'll hit the manifest cache if we're going through
626 # so that we'll hit the manifest cache if we're going through
628 # all the revisions in parent->child order.
627 # all the revisions in parent->child order.
629 mf1 = mfmatches(node1)
628 mf1 = mfmatches(node1)
630
629
631 # are we comparing the working directory?
630 # are we comparing the working directory?
632 if not node2:
631 if not node2:
633 if not wlock:
632 if not wlock:
634 try:
633 try:
635 wlock = self.wlock(wait=0)
634 wlock = self.wlock(wait=0)
636 except lock.LockException:
635 except lock.LockException:
637 wlock = None
636 wlock = None
638 lookup, modified, added, removed, deleted, unknown, ignored = (
637 lookup, modified, added, removed, deleted, unknown, ignored = (
639 self.dirstate.changes(files, match, show_ignored))
638 self.dirstate.changes(files, match, show_ignored))
640
639
641 # are we comparing working dir against its parent?
640 # are we comparing working dir against its parent?
642 if compareworking:
641 if compareworking:
643 if lookup:
642 if lookup:
644 # do a full compare of any files that might have changed
643 # do a full compare of any files that might have changed
645 mf2 = mfmatches(self.dirstate.parents()[0])
644 mf2 = mfmatches(self.dirstate.parents()[0])
646 for f in lookup:
645 for f in lookup:
647 if fcmp(f, mf2):
646 if fcmp(f, mf2):
648 modified.append(f)
647 modified.append(f)
649 elif wlock is not None:
648 elif wlock is not None:
650 self.dirstate.update([f], "n")
649 self.dirstate.update([f], "n")
651 else:
650 else:
652 # we are comparing working dir against non-parent
651 # we are comparing working dir against non-parent
653 # generate a pseudo-manifest for the working dir
652 # generate a pseudo-manifest for the working dir
654 mf2 = mfmatches(self.dirstate.parents()[0])
653 mf2 = mfmatches(self.dirstate.parents()[0])
655 for f in lookup + modified + added:
654 for f in lookup + modified + added:
656 mf2[f] = ""
655 mf2[f] = ""
657 for f in removed:
656 for f in removed:
658 if f in mf2:
657 if f in mf2:
659 del mf2[f]
658 del mf2[f]
660 else:
659 else:
661 # we are comparing two revisions
660 # we are comparing two revisions
662 deleted, unknown, ignored = [], [], []
661 deleted, unknown, ignored = [], [], []
663 mf2 = mfmatches(node2)
662 mf2 = mfmatches(node2)
664
663
665 if not compareworking:
664 if not compareworking:
666 # flush lists from dirstate before comparing manifests
665 # flush lists from dirstate before comparing manifests
667 modified, added = [], []
666 modified, added = [], []
668
667
669 # make sure to sort the files so we talk to the disk in a
668 # make sure to sort the files so we talk to the disk in a
670 # reasonable order
669 # reasonable order
671 mf2keys = mf2.keys()
670 mf2keys = mf2.keys()
672 mf2keys.sort()
671 mf2keys.sort()
673 for fn in mf2keys:
672 for fn in mf2keys:
674 if mf1.has_key(fn):
673 if mf1.has_key(fn):
675 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
674 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
676 modified.append(fn)
675 modified.append(fn)
677 del mf1[fn]
676 del mf1[fn]
678 else:
677 else:
679 added.append(fn)
678 added.append(fn)
680
679
681 removed = mf1.keys()
680 removed = mf1.keys()
682
681
683 # sort and return results:
682 # sort and return results:
684 for l in modified, added, removed, deleted, unknown, ignored:
683 for l in modified, added, removed, deleted, unknown, ignored:
685 l.sort()
684 l.sort()
686 if show_ignored is None:
685 if show_ignored is None:
687 return (modified, added, removed, deleted, unknown)
686 return (modified, added, removed, deleted, unknown)
688 else:
687 else:
689 return (modified, added, removed, deleted, unknown, ignored)
688 return (modified, added, removed, deleted, unknown, ignored)
690
689
691 def add(self, list, wlock=None):
690 def add(self, list, wlock=None):
692 if not wlock:
691 if not wlock:
693 wlock = self.wlock()
692 wlock = self.wlock()
694 for f in list:
693 for f in list:
695 p = self.wjoin(f)
694 p = self.wjoin(f)
696 if not os.path.exists(p):
695 if not os.path.exists(p):
697 self.ui.warn(_("%s does not exist!\n") % f)
696 self.ui.warn(_("%s does not exist!\n") % f)
698 elif not os.path.isfile(p):
697 elif not os.path.isfile(p):
699 self.ui.warn(_("%s not added: only files supported currently\n")
698 self.ui.warn(_("%s not added: only files supported currently\n")
700 % f)
699 % f)
701 elif self.dirstate.state(f) in 'an':
700 elif self.dirstate.state(f) in 'an':
702 self.ui.warn(_("%s already tracked!\n") % f)
701 self.ui.warn(_("%s already tracked!\n") % f)
703 else:
702 else:
704 self.dirstate.update([f], "a")
703 self.dirstate.update([f], "a")
705
704
706 def forget(self, list, wlock=None):
705 def forget(self, list, wlock=None):
707 if not wlock:
706 if not wlock:
708 wlock = self.wlock()
707 wlock = self.wlock()
709 for f in list:
708 for f in list:
710 if self.dirstate.state(f) not in 'ai':
709 if self.dirstate.state(f) not in 'ai':
711 self.ui.warn(_("%s not added!\n") % f)
710 self.ui.warn(_("%s not added!\n") % f)
712 else:
711 else:
713 self.dirstate.forget([f])
712 self.dirstate.forget([f])
714
713
715 def remove(self, list, unlink=False, wlock=None):
714 def remove(self, list, unlink=False, wlock=None):
716 if unlink:
715 if unlink:
717 for f in list:
716 for f in list:
718 try:
717 try:
719 util.unlink(self.wjoin(f))
718 util.unlink(self.wjoin(f))
720 except OSError, inst:
719 except OSError, inst:
721 if inst.errno != errno.ENOENT:
720 if inst.errno != errno.ENOENT:
722 raise
721 raise
723 if not wlock:
722 if not wlock:
724 wlock = self.wlock()
723 wlock = self.wlock()
725 for f in list:
724 for f in list:
726 p = self.wjoin(f)
725 p = self.wjoin(f)
727 if os.path.exists(p):
726 if os.path.exists(p):
728 self.ui.warn(_("%s still exists!\n") % f)
727 self.ui.warn(_("%s still exists!\n") % f)
729 elif self.dirstate.state(f) == 'a':
728 elif self.dirstate.state(f) == 'a':
730 self.dirstate.forget([f])
729 self.dirstate.forget([f])
731 elif f not in self.dirstate:
730 elif f not in self.dirstate:
732 self.ui.warn(_("%s not tracked!\n") % f)
731 self.ui.warn(_("%s not tracked!\n") % f)
733 else:
732 else:
734 self.dirstate.update([f], "r")
733 self.dirstate.update([f], "r")
735
734
736 def undelete(self, list, wlock=None):
735 def undelete(self, list, wlock=None):
737 p = self.dirstate.parents()[0]
736 p = self.dirstate.parents()[0]
738 mn = self.changelog.read(p)[0]
737 mn = self.changelog.read(p)[0]
739 mf = self.manifest.readflags(mn)
738 mf = self.manifest.readflags(mn)
740 m = self.manifest.read(mn)
739 m = self.manifest.read(mn)
741 if not wlock:
740 if not wlock:
742 wlock = self.wlock()
741 wlock = self.wlock()
743 for f in list:
742 for f in list:
744 if self.dirstate.state(f) not in "r":
743 if self.dirstate.state(f) not in "r":
745 self.ui.warn("%s not removed!\n" % f)
744 self.ui.warn("%s not removed!\n" % f)
746 else:
745 else:
747 t = self.file(f).read(m[f])
746 t = self.file(f).read(m[f])
748 self.wwrite(f, t)
747 self.wwrite(f, t)
749 util.set_exec(self.wjoin(f), mf[f])
748 util.set_exec(self.wjoin(f), mf[f])
750 self.dirstate.update([f], "n")
749 self.dirstate.update([f], "n")
751
750
752 def copy(self, source, dest, wlock=None):
751 def copy(self, source, dest, wlock=None):
753 p = self.wjoin(dest)
752 p = self.wjoin(dest)
754 if not os.path.exists(p):
753 if not os.path.exists(p):
755 self.ui.warn(_("%s does not exist!\n") % dest)
754 self.ui.warn(_("%s does not exist!\n") % dest)
756 elif not os.path.isfile(p):
755 elif not os.path.isfile(p):
757 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
756 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
758 else:
757 else:
759 if not wlock:
758 if not wlock:
760 wlock = self.wlock()
759 wlock = self.wlock()
761 if self.dirstate.state(dest) == '?':
760 if self.dirstate.state(dest) == '?':
762 self.dirstate.update([dest], "a")
761 self.dirstate.update([dest], "a")
763 self.dirstate.copy(source, dest)
762 self.dirstate.copy(source, dest)
764
763
765 def heads(self, start=None):
764 def heads(self, start=None):
766 heads = self.changelog.heads(start)
765 heads = self.changelog.heads(start)
767 # sort the output in rev descending order
766 # sort the output in rev descending order
768 heads = [(-self.changelog.rev(h), h) for h in heads]
767 heads = [(-self.changelog.rev(h), h) for h in heads]
769 heads.sort()
768 heads.sort()
770 return [n for (r, n) in heads]
769 return [n for (r, n) in heads]
771
770
772 # branchlookup returns a dict giving a list of branches for
771 # branchlookup returns a dict giving a list of branches for
773 # each head. A branch is defined as the tag of a node or
772 # each head. A branch is defined as the tag of a node or
774 # the branch of the node's parents. If a node has multiple
773 # the branch of the node's parents. If a node has multiple
775 # branch tags, tags are eliminated if they are visible from other
774 # branch tags, tags are eliminated if they are visible from other
776 # branch tags.
775 # branch tags.
777 #
776 #
778 # So, for this graph: a->b->c->d->e
777 # So, for this graph: a->b->c->d->e
779 # \ /
778 # \ /
780 # aa -----/
779 # aa -----/
781 # a has tag 2.6.12
780 # a has tag 2.6.12
782 # d has tag 2.6.13
781 # d has tag 2.6.13
783 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
782 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
784 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
783 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
785 # from the list.
784 # from the list.
786 #
785 #
787 # It is possible that more than one head will have the same branch tag.
786 # It is possible that more than one head will have the same branch tag.
788 # callers need to check the result for multiple heads under the same
787 # callers need to check the result for multiple heads under the same
789 # branch tag if that is a problem for them (ie checkout of a specific
788 # branch tag if that is a problem for them (ie checkout of a specific
790 # branch).
789 # branch).
791 #
790 #
792 # passing in a specific branch will limit the depth of the search
791 # passing in a specific branch will limit the depth of the search
793 # through the parents. It won't limit the branches returned in the
792 # through the parents. It won't limit the branches returned in the
794 # result though.
793 # result though.
795 def branchlookup(self, heads=None, branch=None):
794 def branchlookup(self, heads=None, branch=None):
796 if not heads:
795 if not heads:
797 heads = self.heads()
796 heads = self.heads()
798 headt = [ h for h in heads ]
797 headt = [ h for h in heads ]
799 chlog = self.changelog
798 chlog = self.changelog
800 branches = {}
799 branches = {}
801 merges = []
800 merges = []
802 seenmerge = {}
801 seenmerge = {}
803
802
804 # traverse the tree once for each head, recording in the branches
803 # traverse the tree once for each head, recording in the branches
805 # dict which tags are visible from this head. The branches
804 # dict which tags are visible from this head. The branches
806 # dict also records which tags are visible from each tag
805 # dict also records which tags are visible from each tag
807 # while we traverse.
806 # while we traverse.
808 while headt or merges:
807 while headt or merges:
809 if merges:
808 if merges:
810 n, found = merges.pop()
809 n, found = merges.pop()
811 visit = [n]
810 visit = [n]
812 else:
811 else:
813 h = headt.pop()
812 h = headt.pop()
814 visit = [h]
813 visit = [h]
815 found = [h]
814 found = [h]
816 seen = {}
815 seen = {}
817 while visit:
816 while visit:
818 n = visit.pop()
817 n = visit.pop()
819 if n in seen:
818 if n in seen:
820 continue
819 continue
821 pp = chlog.parents(n)
820 pp = chlog.parents(n)
822 tags = self.nodetags(n)
821 tags = self.nodetags(n)
823 if tags:
822 if tags:
824 for x in tags:
823 for x in tags:
825 if x == 'tip':
824 if x == 'tip':
826 continue
825 continue
827 for f in found:
826 for f in found:
828 branches.setdefault(f, {})[n] = 1
827 branches.setdefault(f, {})[n] = 1
829 branches.setdefault(n, {})[n] = 1
828 branches.setdefault(n, {})[n] = 1
830 break
829 break
831 if n not in found:
830 if n not in found:
832 found.append(n)
831 found.append(n)
833 if branch in tags:
832 if branch in tags:
834 continue
833 continue
835 seen[n] = 1
834 seen[n] = 1
836 if pp[1] != nullid and n not in seenmerge:
835 if pp[1] != nullid and n not in seenmerge:
837 merges.append((pp[1], [x for x in found]))
836 merges.append((pp[1], [x for x in found]))
838 seenmerge[n] = 1
837 seenmerge[n] = 1
839 if pp[0] != nullid:
838 if pp[0] != nullid:
840 visit.append(pp[0])
839 visit.append(pp[0])
841 # traverse the branches dict, eliminating branch tags from each
840 # traverse the branches dict, eliminating branch tags from each
842 # head that are visible from another branch tag for that head.
841 # head that are visible from another branch tag for that head.
843 out = {}
842 out = {}
844 viscache = {}
843 viscache = {}
845 for h in heads:
844 for h in heads:
846 def visible(node):
845 def visible(node):
847 if node in viscache:
846 if node in viscache:
848 return viscache[node]
847 return viscache[node]
849 ret = {}
848 ret = {}
850 visit = [node]
849 visit = [node]
851 while visit:
850 while visit:
852 x = visit.pop()
851 x = visit.pop()
853 if x in viscache:
852 if x in viscache:
854 ret.update(viscache[x])
853 ret.update(viscache[x])
855 elif x not in ret:
854 elif x not in ret:
856 ret[x] = 1
855 ret[x] = 1
857 if x in branches:
856 if x in branches:
858 visit[len(visit):] = branches[x].keys()
857 visit[len(visit):] = branches[x].keys()
859 viscache[node] = ret
858 viscache[node] = ret
860 return ret
859 return ret
861 if h not in branches:
860 if h not in branches:
862 continue
861 continue
863 # O(n^2), but somewhat limited. This only searches the
862 # O(n^2), but somewhat limited. This only searches the
864 # tags visible from a specific head, not all the tags in the
863 # tags visible from a specific head, not all the tags in the
865 # whole repo.
864 # whole repo.
866 for b in branches[h]:
865 for b in branches[h]:
867 vis = False
866 vis = False
868 for bb in branches[h].keys():
867 for bb in branches[h].keys():
869 if b != bb:
868 if b != bb:
870 if b in visible(bb):
869 if b in visible(bb):
871 vis = True
870 vis = True
872 break
871 break
873 if not vis:
872 if not vis:
874 l = out.setdefault(h, [])
873 l = out.setdefault(h, [])
875 l[len(l):] = self.nodetags(b)
874 l[len(l):] = self.nodetags(b)
876 return out
875 return out
877
876
878 def branches(self, nodes):
877 def branches(self, nodes):
879 if not nodes:
878 if not nodes:
880 nodes = [self.changelog.tip()]
879 nodes = [self.changelog.tip()]
881 b = []
880 b = []
882 for n in nodes:
881 for n in nodes:
883 t = n
882 t = n
884 while 1:
883 while 1:
885 p = self.changelog.parents(n)
884 p = self.changelog.parents(n)
886 if p[1] != nullid or p[0] == nullid:
885 if p[1] != nullid or p[0] == nullid:
887 b.append((t, n, p[0], p[1]))
886 b.append((t, n, p[0], p[1]))
888 break
887 break
889 n = p[0]
888 n = p[0]
890 return b
889 return b
891
890
892 def between(self, pairs):
891 def between(self, pairs):
893 r = []
892 r = []
894
893
895 for top, bottom in pairs:
894 for top, bottom in pairs:
896 n, l, i = top, [], 0
895 n, l, i = top, [], 0
897 f = 1
896 f = 1
898
897
899 while n != bottom:
898 while n != bottom:
900 p = self.changelog.parents(n)[0]
899 p = self.changelog.parents(n)[0]
901 if i == f:
900 if i == f:
902 l.append(n)
901 l.append(n)
903 f = f * 2
902 f = f * 2
904 n = p
903 n = p
905 i += 1
904 i += 1
906
905
907 r.append(l)
906 r.append(l)
908
907
909 return r
908 return r
910
909
911 def findincoming(self, remote, base=None, heads=None, force=False):
910 def findincoming(self, remote, base=None, heads=None, force=False):
912 """Return list of roots of the subsets of missing nodes from remote
911 """Return list of roots of the subsets of missing nodes from remote
913
912
914 If base dict is specified, assume that these nodes and their parents
913 If base dict is specified, assume that these nodes and their parents
915 exist on the remote side and that no child of a node of base exists
914 exist on the remote side and that no child of a node of base exists
916 in both remote and self.
915 in both remote and self.
917 Furthermore base will be updated to include the nodes that exists
916 Furthermore base will be updated to include the nodes that exists
918 in self and remote but no children exists in self and remote.
917 in self and remote but no children exists in self and remote.
919 If a list of heads is specified, return only nodes which are heads
918 If a list of heads is specified, return only nodes which are heads
920 or ancestors of these heads.
919 or ancestors of these heads.
921
920
922 All the ancestors of base are in self and in remote.
921 All the ancestors of base are in self and in remote.
923 All the descendants of the list returned are missing in self.
922 All the descendants of the list returned are missing in self.
924 (and so we know that the rest of the nodes are missing in remote, see
923 (and so we know that the rest of the nodes are missing in remote, see
925 outgoing)
924 outgoing)
926 """
925 """
927 m = self.changelog.nodemap
926 m = self.changelog.nodemap
928 search = []
927 search = []
929 fetch = {}
928 fetch = {}
930 seen = {}
929 seen = {}
931 seenbranch = {}
930 seenbranch = {}
932 if base == None:
931 if base == None:
933 base = {}
932 base = {}
934
933
935 if not heads:
934 if not heads:
936 heads = remote.heads()
935 heads = remote.heads()
937
936
938 if self.changelog.tip() == nullid:
937 if self.changelog.tip() == nullid:
939 base[nullid] = 1
938 base[nullid] = 1
940 if heads != [nullid]:
939 if heads != [nullid]:
941 return [nullid]
940 return [nullid]
942 return []
941 return []
943
942
944 # assume we're closer to the tip than the root
943 # assume we're closer to the tip than the root
945 # and start by examining the heads
944 # and start by examining the heads
946 self.ui.status(_("searching for changes\n"))
945 self.ui.status(_("searching for changes\n"))
947
946
948 unknown = []
947 unknown = []
949 for h in heads:
948 for h in heads:
950 if h not in m:
949 if h not in m:
951 unknown.append(h)
950 unknown.append(h)
952 else:
951 else:
953 base[h] = 1
952 base[h] = 1
954
953
955 if not unknown:
954 if not unknown:
956 return []
955 return []
957
956
958 req = dict.fromkeys(unknown)
957 req = dict.fromkeys(unknown)
959 reqcnt = 0
958 reqcnt = 0
960
959
961 # search through remote branches
960 # search through remote branches
962 # a 'branch' here is a linear segment of history, with four parts:
961 # a 'branch' here is a linear segment of history, with four parts:
963 # head, root, first parent, second parent
962 # head, root, first parent, second parent
964 # (a branch always has two parents (or none) by definition)
963 # (a branch always has two parents (or none) by definition)
965 unknown = remote.branches(unknown)
964 unknown = remote.branches(unknown)
966 while unknown:
965 while unknown:
967 r = []
966 r = []
968 while unknown:
967 while unknown:
969 n = unknown.pop(0)
968 n = unknown.pop(0)
970 if n[0] in seen:
969 if n[0] in seen:
971 continue
970 continue
972
971
973 self.ui.debug(_("examining %s:%s\n")
972 self.ui.debug(_("examining %s:%s\n")
974 % (short(n[0]), short(n[1])))
973 % (short(n[0]), short(n[1])))
975 if n[0] == nullid: # found the end of the branch
974 if n[0] == nullid: # found the end of the branch
976 pass
975 pass
977 elif n in seenbranch:
976 elif n in seenbranch:
978 self.ui.debug(_("branch already found\n"))
977 self.ui.debug(_("branch already found\n"))
979 continue
978 continue
980 elif n[1] and n[1] in m: # do we know the base?
979 elif n[1] and n[1] in m: # do we know the base?
981 self.ui.debug(_("found incomplete branch %s:%s\n")
980 self.ui.debug(_("found incomplete branch %s:%s\n")
982 % (short(n[0]), short(n[1])))
981 % (short(n[0]), short(n[1])))
983 search.append(n) # schedule branch range for scanning
982 search.append(n) # schedule branch range for scanning
984 seenbranch[n] = 1
983 seenbranch[n] = 1
985 else:
984 else:
986 if n[1] not in seen and n[1] not in fetch:
985 if n[1] not in seen and n[1] not in fetch:
987 if n[2] in m and n[3] in m:
986 if n[2] in m and n[3] in m:
988 self.ui.debug(_("found new changeset %s\n") %
987 self.ui.debug(_("found new changeset %s\n") %
989 short(n[1]))
988 short(n[1]))
990 fetch[n[1]] = 1 # earliest unknown
989 fetch[n[1]] = 1 # earliest unknown
991 for p in n[2:4]:
990 for p in n[2:4]:
992 if p in m:
991 if p in m:
993 base[p] = 1 # latest known
992 base[p] = 1 # latest known
994
993
995 for p in n[2:4]:
994 for p in n[2:4]:
996 if p not in req and p not in m:
995 if p not in req and p not in m:
997 r.append(p)
996 r.append(p)
998 req[p] = 1
997 req[p] = 1
999 seen[n[0]] = 1
998 seen[n[0]] = 1
1000
999
1001 if r:
1000 if r:
1002 reqcnt += 1
1001 reqcnt += 1
1003 self.ui.debug(_("request %d: %s\n") %
1002 self.ui.debug(_("request %d: %s\n") %
1004 (reqcnt, " ".join(map(short, r))))
1003 (reqcnt, " ".join(map(short, r))))
1005 for p in range(0, len(r), 10):
1004 for p in range(0, len(r), 10):
1006 for b in remote.branches(r[p:p+10]):
1005 for b in remote.branches(r[p:p+10]):
1007 self.ui.debug(_("received %s:%s\n") %
1006 self.ui.debug(_("received %s:%s\n") %
1008 (short(b[0]), short(b[1])))
1007 (short(b[0]), short(b[1])))
1009 unknown.append(b)
1008 unknown.append(b)
1010
1009
1011 # do binary search on the branches we found
1010 # do binary search on the branches we found
1012 while search:
1011 while search:
1013 n = search.pop(0)
1012 n = search.pop(0)
1014 reqcnt += 1
1013 reqcnt += 1
1015 l = remote.between([(n[0], n[1])])[0]
1014 l = remote.between([(n[0], n[1])])[0]
1016 l.append(n[1])
1015 l.append(n[1])
1017 p = n[0]
1016 p = n[0]
1018 f = 1
1017 f = 1
1019 for i in l:
1018 for i in l:
1020 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1019 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1021 if i in m:
1020 if i in m:
1022 if f <= 2:
1021 if f <= 2:
1023 self.ui.debug(_("found new branch changeset %s\n") %
1022 self.ui.debug(_("found new branch changeset %s\n") %
1024 short(p))
1023 short(p))
1025 fetch[p] = 1
1024 fetch[p] = 1
1026 base[i] = 1
1025 base[i] = 1
1027 else:
1026 else:
1028 self.ui.debug(_("narrowed branch search to %s:%s\n")
1027 self.ui.debug(_("narrowed branch search to %s:%s\n")
1029 % (short(p), short(i)))
1028 % (short(p), short(i)))
1030 search.append((p, i))
1029 search.append((p, i))
1031 break
1030 break
1032 p, f = i, f * 2
1031 p, f = i, f * 2
1033
1032
1034 # sanity check our fetch list
1033 # sanity check our fetch list
1035 for f in fetch.keys():
1034 for f in fetch.keys():
1036 if f in m:
1035 if f in m:
1037 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1036 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1038
1037
1039 if base.keys() == [nullid]:
1038 if base.keys() == [nullid]:
1040 if force:
1039 if force:
1041 self.ui.warn(_("warning: repository is unrelated\n"))
1040 self.ui.warn(_("warning: repository is unrelated\n"))
1042 else:
1041 else:
1043 raise util.Abort(_("repository is unrelated"))
1042 raise util.Abort(_("repository is unrelated"))
1044
1043
1045 self.ui.note(_("found new changesets starting at ") +
1044 self.ui.note(_("found new changesets starting at ") +
1046 " ".join([short(f) for f in fetch]) + "\n")
1045 " ".join([short(f) for f in fetch]) + "\n")
1047
1046
1048 self.ui.debug(_("%d total queries\n") % reqcnt)
1047 self.ui.debug(_("%d total queries\n") % reqcnt)
1049
1048
1050 return fetch.keys()
1049 return fetch.keys()
1051
1050
1052 def findoutgoing(self, remote, base=None, heads=None, force=False):
1051 def findoutgoing(self, remote, base=None, heads=None, force=False):
1053 """Return list of nodes that are roots of subsets not in remote
1052 """Return list of nodes that are roots of subsets not in remote
1054
1053
1055 If base dict is specified, assume that these nodes and their parents
1054 If base dict is specified, assume that these nodes and their parents
1056 exist on the remote side.
1055 exist on the remote side.
1057 If a list of heads is specified, return only nodes which are heads
1056 If a list of heads is specified, return only nodes which are heads
1058 or ancestors of these heads, and return a second element which
1057 or ancestors of these heads, and return a second element which
1059 contains all remote heads which get new children.
1058 contains all remote heads which get new children.
1060 """
1059 """
1061 if base == None:
1060 if base == None:
1062 base = {}
1061 base = {}
1063 self.findincoming(remote, base, heads, force=force)
1062 self.findincoming(remote, base, heads, force=force)
1064
1063
1065 self.ui.debug(_("common changesets up to ")
1064 self.ui.debug(_("common changesets up to ")
1066 + " ".join(map(short, base.keys())) + "\n")
1065 + " ".join(map(short, base.keys())) + "\n")
1067
1066
1068 remain = dict.fromkeys(self.changelog.nodemap)
1067 remain = dict.fromkeys(self.changelog.nodemap)
1069
1068
1070 # prune everything remote has from the tree
1069 # prune everything remote has from the tree
1071 del remain[nullid]
1070 del remain[nullid]
1072 remove = base.keys()
1071 remove = base.keys()
1073 while remove:
1072 while remove:
1074 n = remove.pop(0)
1073 n = remove.pop(0)
1075 if n in remain:
1074 if n in remain:
1076 del remain[n]
1075 del remain[n]
1077 for p in self.changelog.parents(n):
1076 for p in self.changelog.parents(n):
1078 remove.append(p)
1077 remove.append(p)
1079
1078
1080 # find every node whose parents have been pruned
1079 # find every node whose parents have been pruned
1081 subset = []
1080 subset = []
1082 # find every remote head that will get new children
1081 # find every remote head that will get new children
1083 updated_heads = {}
1082 updated_heads = {}
1084 for n in remain:
1083 for n in remain:
1085 p1, p2 = self.changelog.parents(n)
1084 p1, p2 = self.changelog.parents(n)
1086 if p1 not in remain and p2 not in remain:
1085 if p1 not in remain and p2 not in remain:
1087 subset.append(n)
1086 subset.append(n)
1088 if heads:
1087 if heads:
1089 if p1 in heads:
1088 if p1 in heads:
1090 updated_heads[p1] = True
1089 updated_heads[p1] = True
1091 if p2 in heads:
1090 if p2 in heads:
1092 updated_heads[p2] = True
1091 updated_heads[p2] = True
1093
1092
1094 # this is the set of all roots we have to push
1093 # this is the set of all roots we have to push
1095 if heads:
1094 if heads:
1096 return subset, updated_heads.keys()
1095 return subset, updated_heads.keys()
1097 else:
1096 else:
1098 return subset
1097 return subset
1099
1098
1100 def pull(self, remote, heads=None, force=False):
1099 def pull(self, remote, heads=None, force=False):
1101 l = self.lock()
1100 l = self.lock()
1102
1101
1103 fetch = self.findincoming(remote, force=force)
1102 fetch = self.findincoming(remote, force=force)
1104 if fetch == [nullid]:
1103 if fetch == [nullid]:
1105 self.ui.status(_("requesting all changes\n"))
1104 self.ui.status(_("requesting all changes\n"))
1106
1105
1107 if not fetch:
1106 if not fetch:
1108 self.ui.status(_("no changes found\n"))
1107 self.ui.status(_("no changes found\n"))
1109 return 0
1108 return 0
1110
1109
1111 if heads is None:
1110 if heads is None:
1112 cg = remote.changegroup(fetch, 'pull')
1111 cg = remote.changegroup(fetch, 'pull')
1113 else:
1112 else:
1114 cg = remote.changegroupsubset(fetch, heads, 'pull')
1113 cg = remote.changegroupsubset(fetch, heads, 'pull')
1115 return self.addchangegroup(cg, 'pull')
1114 return self.addchangegroup(cg, 'pull')
1116
1115
1117 def push(self, remote, force=False, revs=None):
1116 def push(self, remote, force=False, revs=None):
1118 # there are two ways to push to remote repo:
1117 # there are two ways to push to remote repo:
1119 #
1118 #
1120 # addchangegroup assumes local user can lock remote
1119 # addchangegroup assumes local user can lock remote
1121 # repo (local filesystem, old ssh servers).
1120 # repo (local filesystem, old ssh servers).
1122 #
1121 #
1123 # unbundle assumes local user cannot lock remote repo (new ssh
1122 # unbundle assumes local user cannot lock remote repo (new ssh
1124 # servers, http servers).
1123 # servers, http servers).
1125
1124
1126 if 'unbundle' in remote.capabilities:
1125 if 'unbundle' in remote.capabilities:
1127 return self.push_unbundle(remote, force, revs)
1126 return self.push_unbundle(remote, force, revs)
1128 return self.push_addchangegroup(remote, force, revs)
1127 return self.push_addchangegroup(remote, force, revs)
1129
1128
1130 def prepush(self, remote, force, revs):
1129 def prepush(self, remote, force, revs):
1131 base = {}
1130 base = {}
1132 remote_heads = remote.heads()
1131 remote_heads = remote.heads()
1133 inc = self.findincoming(remote, base, remote_heads, force=force)
1132 inc = self.findincoming(remote, base, remote_heads, force=force)
1134 if not force and inc:
1133 if not force and inc:
1135 self.ui.warn(_("abort: unsynced remote changes!\n"))
1134 self.ui.warn(_("abort: unsynced remote changes!\n"))
1136 self.ui.status(_("(did you forget to sync?"
1135 self.ui.status(_("(did you forget to sync?"
1137 " use push -f to force)\n"))
1136 " use push -f to force)\n"))
1138 return None, 1
1137 return None, 1
1139
1138
1140 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1139 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1141 if revs is not None:
1140 if revs is not None:
1142 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1141 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1143 else:
1142 else:
1144 bases, heads = update, self.changelog.heads()
1143 bases, heads = update, self.changelog.heads()
1145
1144
1146 if not bases:
1145 if not bases:
1147 self.ui.status(_("no changes found\n"))
1146 self.ui.status(_("no changes found\n"))
1148 return None, 1
1147 return None, 1
1149 elif not force:
1148 elif not force:
1150 # FIXME we don't properly detect creation of new heads
1149 # FIXME we don't properly detect creation of new heads
1151 # in the push -r case, assume the user knows what he's doing
1150 # in the push -r case, assume the user knows what he's doing
1152 if not revs and len(remote_heads) < len(heads) \
1151 if not revs and len(remote_heads) < len(heads) \
1153 and remote_heads != [nullid]:
1152 and remote_heads != [nullid]:
1154 self.ui.warn(_("abort: push creates new remote branches!\n"))
1153 self.ui.warn(_("abort: push creates new remote branches!\n"))
1155 self.ui.status(_("(did you forget to merge?"
1154 self.ui.status(_("(did you forget to merge?"
1156 " use push -f to force)\n"))
1155 " use push -f to force)\n"))
1157 return None, 1
1156 return None, 1
1158
1157
1159 if revs is None:
1158 if revs is None:
1160 cg = self.changegroup(update, 'push')
1159 cg = self.changegroup(update, 'push')
1161 else:
1160 else:
1162 cg = self.changegroupsubset(update, revs, 'push')
1161 cg = self.changegroupsubset(update, revs, 'push')
1163 return cg, remote_heads
1162 return cg, remote_heads
1164
1163
1165 def push_addchangegroup(self, remote, force, revs):
1164 def push_addchangegroup(self, remote, force, revs):
1166 lock = remote.lock()
1165 lock = remote.lock()
1167
1166
1168 ret = self.prepush(remote, force, revs)
1167 ret = self.prepush(remote, force, revs)
1169 if ret[0] is not None:
1168 if ret[0] is not None:
1170 cg, remote_heads = ret
1169 cg, remote_heads = ret
1171 return remote.addchangegroup(cg, 'push')
1170 return remote.addchangegroup(cg, 'push')
1172 return ret[1]
1171 return ret[1]
1173
1172
1174 def push_unbundle(self, remote, force, revs):
1173 def push_unbundle(self, remote, force, revs):
1175 # local repo finds heads on server, finds out what revs it
1174 # local repo finds heads on server, finds out what revs it
1176 # must push. once revs transferred, if server finds it has
1175 # must push. once revs transferred, if server finds it has
1177 # different heads (someone else won commit/push race), server
1176 # different heads (someone else won commit/push race), server
1178 # aborts.
1177 # aborts.
1179
1178
1180 ret = self.prepush(remote, force, revs)
1179 ret = self.prepush(remote, force, revs)
1181 if ret[0] is not None:
1180 if ret[0] is not None:
1182 cg, remote_heads = ret
1181 cg, remote_heads = ret
1183 if force: remote_heads = ['force']
1182 if force: remote_heads = ['force']
1184 return remote.unbundle(cg, remote_heads, 'push')
1183 return remote.unbundle(cg, remote_heads, 'push')
1185 return ret[1]
1184 return ret[1]
1186
1185
1187 def changegroupsubset(self, bases, heads, source):
1186 def changegroupsubset(self, bases, heads, source):
1188 """This function generates a changegroup consisting of all the nodes
1187 """This function generates a changegroup consisting of all the nodes
1189 that are descendents of any of the bases, and ancestors of any of
1188 that are descendents of any of the bases, and ancestors of any of
1190 the heads.
1189 the heads.
1191
1190
1192 It is fairly complex as determining which filenodes and which
1191 It is fairly complex as determining which filenodes and which
1193 manifest nodes need to be included for the changeset to be complete
1192 manifest nodes need to be included for the changeset to be complete
1194 is non-trivial.
1193 is non-trivial.
1195
1194
1196 Another wrinkle is doing the reverse, figuring out which changeset in
1195 Another wrinkle is doing the reverse, figuring out which changeset in
1197 the changegroup a particular filenode or manifestnode belongs to."""
1196 the changegroup a particular filenode or manifestnode belongs to."""
1198
1197
1199 self.hook('preoutgoing', throw=True, source=source)
1198 self.hook('preoutgoing', throw=True, source=source)
1200
1199
1201 # Set up some initial variables
1200 # Set up some initial variables
1202 # Make it easy to refer to self.changelog
1201 # Make it easy to refer to self.changelog
1203 cl = self.changelog
1202 cl = self.changelog
1204 # msng is short for missing - compute the list of changesets in this
1203 # msng is short for missing - compute the list of changesets in this
1205 # changegroup.
1204 # changegroup.
1206 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1205 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1207 # Some bases may turn out to be superfluous, and some heads may be
1206 # Some bases may turn out to be superfluous, and some heads may be
1208 # too. nodesbetween will return the minimal set of bases and heads
1207 # too. nodesbetween will return the minimal set of bases and heads
1209 # necessary to re-create the changegroup.
1208 # necessary to re-create the changegroup.
1210
1209
1211 # Known heads are the list of heads that it is assumed the recipient
1210 # Known heads are the list of heads that it is assumed the recipient
1212 # of this changegroup will know about.
1211 # of this changegroup will know about.
1213 knownheads = {}
1212 knownheads = {}
1214 # We assume that all parents of bases are known heads.
1213 # We assume that all parents of bases are known heads.
1215 for n in bases:
1214 for n in bases:
1216 for p in cl.parents(n):
1215 for p in cl.parents(n):
1217 if p != nullid:
1216 if p != nullid:
1218 knownheads[p] = 1
1217 knownheads[p] = 1
1219 knownheads = knownheads.keys()
1218 knownheads = knownheads.keys()
1220 if knownheads:
1219 if knownheads:
1221 # Now that we know what heads are known, we can compute which
1220 # Now that we know what heads are known, we can compute which
1222 # changesets are known. The recipient must know about all
1221 # changesets are known. The recipient must know about all
1223 # changesets required to reach the known heads from the null
1222 # changesets required to reach the known heads from the null
1224 # changeset.
1223 # changeset.
1225 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1224 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1226 junk = None
1225 junk = None
1227 # Transform the list into an ersatz set.
1226 # Transform the list into an ersatz set.
1228 has_cl_set = dict.fromkeys(has_cl_set)
1227 has_cl_set = dict.fromkeys(has_cl_set)
1229 else:
1228 else:
1230 # If there were no known heads, the recipient cannot be assumed to
1229 # If there were no known heads, the recipient cannot be assumed to
1231 # know about any changesets.
1230 # know about any changesets.
1232 has_cl_set = {}
1231 has_cl_set = {}
1233
1232
1234 # Make it easy to refer to self.manifest
1233 # Make it easy to refer to self.manifest
1235 mnfst = self.manifest
1234 mnfst = self.manifest
1236 # We don't know which manifests are missing yet
1235 # We don't know which manifests are missing yet
1237 msng_mnfst_set = {}
1236 msng_mnfst_set = {}
1238 # Nor do we know which filenodes are missing.
1237 # Nor do we know which filenodes are missing.
1239 msng_filenode_set = {}
1238 msng_filenode_set = {}
1240
1239
1241 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1240 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1242 junk = None
1241 junk = None
1243
1242
1244 # A changeset always belongs to itself, so the changenode lookup
1243 # A changeset always belongs to itself, so the changenode lookup
1245 # function for a changenode is identity.
1244 # function for a changenode is identity.
1246 def identity(x):
1245 def identity(x):
1247 return x
1246 return x
1248
1247
1249 # A function generating function. Sets up an environment for the
1248 # A function generating function. Sets up an environment for the
1250 # inner function.
1249 # inner function.
1251 def cmp_by_rev_func(revlog):
1250 def cmp_by_rev_func(revlog):
1252 # Compare two nodes by their revision number in the environment's
1251 # Compare two nodes by their revision number in the environment's
1253 # revision history. Since the revision number both represents the
1252 # revision history. Since the revision number both represents the
1254 # most efficient order to read the nodes in, and represents a
1253 # most efficient order to read the nodes in, and represents a
1255 # topological sorting of the nodes, this function is often useful.
1254 # topological sorting of the nodes, this function is often useful.
1256 def cmp_by_rev(a, b):
1255 def cmp_by_rev(a, b):
1257 return cmp(revlog.rev(a), revlog.rev(b))
1256 return cmp(revlog.rev(a), revlog.rev(b))
1258 return cmp_by_rev
1257 return cmp_by_rev
1259
1258
1260 # If we determine that a particular file or manifest node must be a
1259 # If we determine that a particular file or manifest node must be a
1261 # node that the recipient of the changegroup will already have, we can
1260 # node that the recipient of the changegroup will already have, we can
1262 # also assume the recipient will have all the parents. This function
1261 # also assume the recipient will have all the parents. This function
1263 # prunes them from the set of missing nodes.
1262 # prunes them from the set of missing nodes.
1264 def prune_parents(revlog, hasset, msngset):
1263 def prune_parents(revlog, hasset, msngset):
1265 haslst = hasset.keys()
1264 haslst = hasset.keys()
1266 haslst.sort(cmp_by_rev_func(revlog))
1265 haslst.sort(cmp_by_rev_func(revlog))
1267 for node in haslst:
1266 for node in haslst:
1268 parentlst = [p for p in revlog.parents(node) if p != nullid]
1267 parentlst = [p for p in revlog.parents(node) if p != nullid]
1269 while parentlst:
1268 while parentlst:
1270 n = parentlst.pop()
1269 n = parentlst.pop()
1271 if n not in hasset:
1270 if n not in hasset:
1272 hasset[n] = 1
1271 hasset[n] = 1
1273 p = [p for p in revlog.parents(n) if p != nullid]
1272 p = [p for p in revlog.parents(n) if p != nullid]
1274 parentlst.extend(p)
1273 parentlst.extend(p)
1275 for n in hasset:
1274 for n in hasset:
1276 msngset.pop(n, None)
1275 msngset.pop(n, None)
1277
1276
1278 # This is a function generating function used to set up an environment
1277 # This is a function generating function used to set up an environment
1279 # for the inner function to execute in.
1278 # for the inner function to execute in.
1280 def manifest_and_file_collector(changedfileset):
1279 def manifest_and_file_collector(changedfileset):
1281 # This is an information gathering function that gathers
1280 # This is an information gathering function that gathers
1282 # information from each changeset node that goes out as part of
1281 # information from each changeset node that goes out as part of
1283 # the changegroup. The information gathered is a list of which
1282 # the changegroup. The information gathered is a list of which
1284 # manifest nodes are potentially required (the recipient may
1283 # manifest nodes are potentially required (the recipient may
1285 # already have them) and total list of all files which were
1284 # already have them) and total list of all files which were
1286 # changed in any changeset in the changegroup.
1285 # changed in any changeset in the changegroup.
1287 #
1286 #
1288 # We also remember the first changenode we saw any manifest
1287 # We also remember the first changenode we saw any manifest
1289 # referenced by so we can later determine which changenode 'owns'
1288 # referenced by so we can later determine which changenode 'owns'
1290 # the manifest.
1289 # the manifest.
1291 def collect_manifests_and_files(clnode):
1290 def collect_manifests_and_files(clnode):
1292 c = cl.read(clnode)
1291 c = cl.read(clnode)
1293 for f in c[3]:
1292 for f in c[3]:
1294 # This is to make sure we only have one instance of each
1293 # This is to make sure we only have one instance of each
1295 # filename string for each filename.
1294 # filename string for each filename.
1296 changedfileset.setdefault(f, f)
1295 changedfileset.setdefault(f, f)
1297 msng_mnfst_set.setdefault(c[0], clnode)
1296 msng_mnfst_set.setdefault(c[0], clnode)
1298 return collect_manifests_and_files
1297 return collect_manifests_and_files
1299
1298
1300 # Figure out which manifest nodes (of the ones we think might be part
1299 # Figure out which manifest nodes (of the ones we think might be part
1301 # of the changegroup) the recipient must know about and remove them
1300 # of the changegroup) the recipient must know about and remove them
1302 # from the changegroup.
1301 # from the changegroup.
1303 def prune_manifests():
1302 def prune_manifests():
1304 has_mnfst_set = {}
1303 has_mnfst_set = {}
1305 for n in msng_mnfst_set:
1304 for n in msng_mnfst_set:
1306 # If a 'missing' manifest thinks it belongs to a changenode
1305 # If a 'missing' manifest thinks it belongs to a changenode
1307 # the recipient is assumed to have, obviously the recipient
1306 # the recipient is assumed to have, obviously the recipient
1308 # must have that manifest.
1307 # must have that manifest.
1309 linknode = cl.node(mnfst.linkrev(n))
1308 linknode = cl.node(mnfst.linkrev(n))
1310 if linknode in has_cl_set:
1309 if linknode in has_cl_set:
1311 has_mnfst_set[n] = 1
1310 has_mnfst_set[n] = 1
1312 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1311 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1313
1312
1314 # Use the information collected in collect_manifests_and_files to say
1313 # Use the information collected in collect_manifests_and_files to say
1315 # which changenode any manifestnode belongs to.
1314 # which changenode any manifestnode belongs to.
1316 def lookup_manifest_link(mnfstnode):
1315 def lookup_manifest_link(mnfstnode):
1317 return msng_mnfst_set[mnfstnode]
1316 return msng_mnfst_set[mnfstnode]
1318
1317
1319 # A function generating function that sets up the initial environment
1318 # A function generating function that sets up the initial environment
1320 # the inner function.
1319 # the inner function.
1321 def filenode_collector(changedfiles):
1320 def filenode_collector(changedfiles):
1322 next_rev = [0]
1321 next_rev = [0]
1323 # This gathers information from each manifestnode included in the
1322 # This gathers information from each manifestnode included in the
1324 # changegroup about which filenodes the manifest node references
1323 # changegroup about which filenodes the manifest node references
1325 # so we can include those in the changegroup too.
1324 # so we can include those in the changegroup too.
1326 #
1325 #
1327 # It also remembers which changenode each filenode belongs to. It
1326 # It also remembers which changenode each filenode belongs to. It
1328 # does this by assuming the a filenode belongs to the changenode
1327 # does this by assuming the a filenode belongs to the changenode
1329 # the first manifest that references it belongs to.
1328 # the first manifest that references it belongs to.
1330 def collect_msng_filenodes(mnfstnode):
1329 def collect_msng_filenodes(mnfstnode):
1331 r = mnfst.rev(mnfstnode)
1330 r = mnfst.rev(mnfstnode)
1332 if r == next_rev[0]:
1331 if r == next_rev[0]:
1333 # If the last rev we looked at was the one just previous,
1332 # If the last rev we looked at was the one just previous,
1334 # we only need to see a diff.
1333 # we only need to see a diff.
1335 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1334 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1336 # For each line in the delta
1335 # For each line in the delta
1337 for dline in delta.splitlines():
1336 for dline in delta.splitlines():
1338 # get the filename and filenode for that line
1337 # get the filename and filenode for that line
1339 f, fnode = dline.split('\0')
1338 f, fnode = dline.split('\0')
1340 fnode = bin(fnode[:40])
1339 fnode = bin(fnode[:40])
1341 f = changedfiles.get(f, None)
1340 f = changedfiles.get(f, None)
1342 # And if the file is in the list of files we care
1341 # And if the file is in the list of files we care
1343 # about.
1342 # about.
1344 if f is not None:
1343 if f is not None:
1345 # Get the changenode this manifest belongs to
1344 # Get the changenode this manifest belongs to
1346 clnode = msng_mnfst_set[mnfstnode]
1345 clnode = msng_mnfst_set[mnfstnode]
1347 # Create the set of filenodes for the file if
1346 # Create the set of filenodes for the file if
1348 # there isn't one already.
1347 # there isn't one already.
1349 ndset = msng_filenode_set.setdefault(f, {})
1348 ndset = msng_filenode_set.setdefault(f, {})
1350 # And set the filenode's changelog node to the
1349 # And set the filenode's changelog node to the
1351 # manifest's if it hasn't been set already.
1350 # manifest's if it hasn't been set already.
1352 ndset.setdefault(fnode, clnode)
1351 ndset.setdefault(fnode, clnode)
1353 else:
1352 else:
1354 # Otherwise we need a full manifest.
1353 # Otherwise we need a full manifest.
1355 m = mnfst.read(mnfstnode)
1354 m = mnfst.read(mnfstnode)
1356 # For every file in we care about.
1355 # For every file in we care about.
1357 for f in changedfiles:
1356 for f in changedfiles:
1358 fnode = m.get(f, None)
1357 fnode = m.get(f, None)
1359 # If it's in the manifest
1358 # If it's in the manifest
1360 if fnode is not None:
1359 if fnode is not None:
1361 # See comments above.
1360 # See comments above.
1362 clnode = msng_mnfst_set[mnfstnode]
1361 clnode = msng_mnfst_set[mnfstnode]
1363 ndset = msng_filenode_set.setdefault(f, {})
1362 ndset = msng_filenode_set.setdefault(f, {})
1364 ndset.setdefault(fnode, clnode)
1363 ndset.setdefault(fnode, clnode)
1365 # Remember the revision we hope to see next.
1364 # Remember the revision we hope to see next.
1366 next_rev[0] = r + 1
1365 next_rev[0] = r + 1
1367 return collect_msng_filenodes
1366 return collect_msng_filenodes
1368
1367
1369 # We have a list of filenodes we think we need for a file, lets remove
1368 # We have a list of filenodes we think we need for a file, lets remove
1370 # all those we now the recipient must have.
1369 # all those we now the recipient must have.
1371 def prune_filenodes(f, filerevlog):
1370 def prune_filenodes(f, filerevlog):
1372 msngset = msng_filenode_set[f]
1371 msngset = msng_filenode_set[f]
1373 hasset = {}
1372 hasset = {}
1374 # If a 'missing' filenode thinks it belongs to a changenode we
1373 # If a 'missing' filenode thinks it belongs to a changenode we
1375 # assume the recipient must have, then the recipient must have
1374 # assume the recipient must have, then the recipient must have
1376 # that filenode.
1375 # that filenode.
1377 for n in msngset:
1376 for n in msngset:
1378 clnode = cl.node(filerevlog.linkrev(n))
1377 clnode = cl.node(filerevlog.linkrev(n))
1379 if clnode in has_cl_set:
1378 if clnode in has_cl_set:
1380 hasset[n] = 1
1379 hasset[n] = 1
1381 prune_parents(filerevlog, hasset, msngset)
1380 prune_parents(filerevlog, hasset, msngset)
1382
1381
1383 # A function generator function that sets up the a context for the
1382 # A function generator function that sets up the a context for the
1384 # inner function.
1383 # inner function.
1385 def lookup_filenode_link_func(fname):
1384 def lookup_filenode_link_func(fname):
1386 msngset = msng_filenode_set[fname]
1385 msngset = msng_filenode_set[fname]
1387 # Lookup the changenode the filenode belongs to.
1386 # Lookup the changenode the filenode belongs to.
1388 def lookup_filenode_link(fnode):
1387 def lookup_filenode_link(fnode):
1389 return msngset[fnode]
1388 return msngset[fnode]
1390 return lookup_filenode_link
1389 return lookup_filenode_link
1391
1390
1392 # Now that we have all theses utility functions to help out and
1391 # Now that we have all theses utility functions to help out and
1393 # logically divide up the task, generate the group.
1392 # logically divide up the task, generate the group.
1394 def gengroup():
1393 def gengroup():
1395 # The set of changed files starts empty.
1394 # The set of changed files starts empty.
1396 changedfiles = {}
1395 changedfiles = {}
1397 # Create a changenode group generator that will call our functions
1396 # Create a changenode group generator that will call our functions
1398 # back to lookup the owning changenode and collect information.
1397 # back to lookup the owning changenode and collect information.
1399 group = cl.group(msng_cl_lst, identity,
1398 group = cl.group(msng_cl_lst, identity,
1400 manifest_and_file_collector(changedfiles))
1399 manifest_and_file_collector(changedfiles))
1401 for chnk in group:
1400 for chnk in group:
1402 yield chnk
1401 yield chnk
1403
1402
1404 # The list of manifests has been collected by the generator
1403 # The list of manifests has been collected by the generator
1405 # calling our functions back.
1404 # calling our functions back.
1406 prune_manifests()
1405 prune_manifests()
1407 msng_mnfst_lst = msng_mnfst_set.keys()
1406 msng_mnfst_lst = msng_mnfst_set.keys()
1408 # Sort the manifestnodes by revision number.
1407 # Sort the manifestnodes by revision number.
1409 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1408 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1410 # Create a generator for the manifestnodes that calls our lookup
1409 # Create a generator for the manifestnodes that calls our lookup
1411 # and data collection functions back.
1410 # and data collection functions back.
1412 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1411 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1413 filenode_collector(changedfiles))
1412 filenode_collector(changedfiles))
1414 for chnk in group:
1413 for chnk in group:
1415 yield chnk
1414 yield chnk
1416
1415
1417 # These are no longer needed, dereference and toss the memory for
1416 # These are no longer needed, dereference and toss the memory for
1418 # them.
1417 # them.
1419 msng_mnfst_lst = None
1418 msng_mnfst_lst = None
1420 msng_mnfst_set.clear()
1419 msng_mnfst_set.clear()
1421
1420
1422 changedfiles = changedfiles.keys()
1421 changedfiles = changedfiles.keys()
1423 changedfiles.sort()
1422 changedfiles.sort()
1424 # Go through all our files in order sorted by name.
1423 # Go through all our files in order sorted by name.
1425 for fname in changedfiles:
1424 for fname in changedfiles:
1426 filerevlog = self.file(fname)
1425 filerevlog = self.file(fname)
1427 # Toss out the filenodes that the recipient isn't really
1426 # Toss out the filenodes that the recipient isn't really
1428 # missing.
1427 # missing.
1429 if msng_filenode_set.has_key(fname):
1428 if msng_filenode_set.has_key(fname):
1430 prune_filenodes(fname, filerevlog)
1429 prune_filenodes(fname, filerevlog)
1431 msng_filenode_lst = msng_filenode_set[fname].keys()
1430 msng_filenode_lst = msng_filenode_set[fname].keys()
1432 else:
1431 else:
1433 msng_filenode_lst = []
1432 msng_filenode_lst = []
1434 # If any filenodes are left, generate the group for them,
1433 # If any filenodes are left, generate the group for them,
1435 # otherwise don't bother.
1434 # otherwise don't bother.
1436 if len(msng_filenode_lst) > 0:
1435 if len(msng_filenode_lst) > 0:
1437 yield changegroup.genchunk(fname)
1436 yield changegroup.genchunk(fname)
1438 # Sort the filenodes by their revision #
1437 # Sort the filenodes by their revision #
1439 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1438 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1440 # Create a group generator and only pass in a changenode
1439 # Create a group generator and only pass in a changenode
1441 # lookup function as we need to collect no information
1440 # lookup function as we need to collect no information
1442 # from filenodes.
1441 # from filenodes.
1443 group = filerevlog.group(msng_filenode_lst,
1442 group = filerevlog.group(msng_filenode_lst,
1444 lookup_filenode_link_func(fname))
1443 lookup_filenode_link_func(fname))
1445 for chnk in group:
1444 for chnk in group:
1446 yield chnk
1445 yield chnk
1447 if msng_filenode_set.has_key(fname):
1446 if msng_filenode_set.has_key(fname):
1448 # Don't need this anymore, toss it to free memory.
1447 # Don't need this anymore, toss it to free memory.
1449 del msng_filenode_set[fname]
1448 del msng_filenode_set[fname]
1450 # Signal that no more groups are left.
1449 # Signal that no more groups are left.
1451 yield changegroup.closechunk()
1450 yield changegroup.closechunk()
1452
1451
1453 if msng_cl_lst:
1452 if msng_cl_lst:
1454 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1453 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1455
1454
1456 return util.chunkbuffer(gengroup())
1455 return util.chunkbuffer(gengroup())
1457
1456
1458 def changegroup(self, basenodes, source):
1457 def changegroup(self, basenodes, source):
1459 """Generate a changegroup of all nodes that we have that a recipient
1458 """Generate a changegroup of all nodes that we have that a recipient
1460 doesn't.
1459 doesn't.
1461
1460
1462 This is much easier than the previous function as we can assume that
1461 This is much easier than the previous function as we can assume that
1463 the recipient has any changenode we aren't sending them."""
1462 the recipient has any changenode we aren't sending them."""
1464
1463
1465 self.hook('preoutgoing', throw=True, source=source)
1464 self.hook('preoutgoing', throw=True, source=source)
1466
1465
1467 cl = self.changelog
1466 cl = self.changelog
1468 nodes = cl.nodesbetween(basenodes, None)[0]
1467 nodes = cl.nodesbetween(basenodes, None)[0]
1469 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1468 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1470
1469
1471 def identity(x):
1470 def identity(x):
1472 return x
1471 return x
1473
1472
1474 def gennodelst(revlog):
1473 def gennodelst(revlog):
1475 for r in xrange(0, revlog.count()):
1474 for r in xrange(0, revlog.count()):
1476 n = revlog.node(r)
1475 n = revlog.node(r)
1477 if revlog.linkrev(n) in revset:
1476 if revlog.linkrev(n) in revset:
1478 yield n
1477 yield n
1479
1478
1480 def changed_file_collector(changedfileset):
1479 def changed_file_collector(changedfileset):
1481 def collect_changed_files(clnode):
1480 def collect_changed_files(clnode):
1482 c = cl.read(clnode)
1481 c = cl.read(clnode)
1483 for fname in c[3]:
1482 for fname in c[3]:
1484 changedfileset[fname] = 1
1483 changedfileset[fname] = 1
1485 return collect_changed_files
1484 return collect_changed_files
1486
1485
1487 def lookuprevlink_func(revlog):
1486 def lookuprevlink_func(revlog):
1488 def lookuprevlink(n):
1487 def lookuprevlink(n):
1489 return cl.node(revlog.linkrev(n))
1488 return cl.node(revlog.linkrev(n))
1490 return lookuprevlink
1489 return lookuprevlink
1491
1490
1492 def gengroup():
1491 def gengroup():
1493 # construct a list of all changed files
1492 # construct a list of all changed files
1494 changedfiles = {}
1493 changedfiles = {}
1495
1494
1496 for chnk in cl.group(nodes, identity,
1495 for chnk in cl.group(nodes, identity,
1497 changed_file_collector(changedfiles)):
1496 changed_file_collector(changedfiles)):
1498 yield chnk
1497 yield chnk
1499 changedfiles = changedfiles.keys()
1498 changedfiles = changedfiles.keys()
1500 changedfiles.sort()
1499 changedfiles.sort()
1501
1500
1502 mnfst = self.manifest
1501 mnfst = self.manifest
1503 nodeiter = gennodelst(mnfst)
1502 nodeiter = gennodelst(mnfst)
1504 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1503 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1505 yield chnk
1504 yield chnk
1506
1505
1507 for fname in changedfiles:
1506 for fname in changedfiles:
1508 filerevlog = self.file(fname)
1507 filerevlog = self.file(fname)
1509 nodeiter = gennodelst(filerevlog)
1508 nodeiter = gennodelst(filerevlog)
1510 nodeiter = list(nodeiter)
1509 nodeiter = list(nodeiter)
1511 if nodeiter:
1510 if nodeiter:
1512 yield changegroup.genchunk(fname)
1511 yield changegroup.genchunk(fname)
1513 lookup = lookuprevlink_func(filerevlog)
1512 lookup = lookuprevlink_func(filerevlog)
1514 for chnk in filerevlog.group(nodeiter, lookup):
1513 for chnk in filerevlog.group(nodeiter, lookup):
1515 yield chnk
1514 yield chnk
1516
1515
1517 yield changegroup.closechunk()
1516 yield changegroup.closechunk()
1518
1517
1519 if nodes:
1518 if nodes:
1520 self.hook('outgoing', node=hex(nodes[0]), source=source)
1519 self.hook('outgoing', node=hex(nodes[0]), source=source)
1521
1520
1522 return util.chunkbuffer(gengroup())
1521 return util.chunkbuffer(gengroup())
1523
1522
1524 def addchangegroup(self, source, srctype):
1523 def addchangegroup(self, source, srctype):
1525 """add changegroup to repo.
1524 """add changegroup to repo.
1526 returns number of heads modified or added + 1."""
1525 returns number of heads modified or added + 1."""
1527
1526
1528 def csmap(x):
1527 def csmap(x):
1529 self.ui.debug(_("add changeset %s\n") % short(x))
1528 self.ui.debug(_("add changeset %s\n") % short(x))
1530 return cl.count()
1529 return cl.count()
1531
1530
1532 def revmap(x):
1531 def revmap(x):
1533 return cl.rev(x)
1532 return cl.rev(x)
1534
1533
1535 if not source:
1534 if not source:
1536 return 0
1535 return 0
1537
1536
1538 self.hook('prechangegroup', throw=True, source=srctype)
1537 self.hook('prechangegroup', throw=True, source=srctype)
1539
1538
1540 changesets = files = revisions = 0
1539 changesets = files = revisions = 0
1541
1540
1542 tr = self.transaction()
1541 tr = self.transaction()
1543
1542
1544 # write changelog data to temp files so concurrent readers will not see
1543 # write changelog data to temp files so concurrent readers will not see
1545 # inconsistent view
1544 # inconsistent view
1546 cl = None
1545 cl = None
1547 try:
1546 try:
1548 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1547 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1549
1548
1550 oldheads = len(cl.heads())
1549 oldheads = len(cl.heads())
1551
1550
1552 # pull off the changeset group
1551 # pull off the changeset group
1553 self.ui.status(_("adding changesets\n"))
1552 self.ui.status(_("adding changesets\n"))
1554 cor = cl.count() - 1
1553 cor = cl.count() - 1
1555 chunkiter = changegroup.chunkiter(source)
1554 chunkiter = changegroup.chunkiter(source)
1556 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1555 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1557 raise util.Abort(_("received changelog group is empty"))
1556 raise util.Abort(_("received changelog group is empty"))
1558 cnr = cl.count() - 1
1557 cnr = cl.count() - 1
1559 changesets = cnr - cor
1558 changesets = cnr - cor
1560
1559
1561 # pull off the manifest group
1560 # pull off the manifest group
1562 self.ui.status(_("adding manifests\n"))
1561 self.ui.status(_("adding manifests\n"))
1563 chunkiter = changegroup.chunkiter(source)
1562 chunkiter = changegroup.chunkiter(source)
1564 # no need to check for empty manifest group here:
1563 # no need to check for empty manifest group here:
1565 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1564 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1566 # no new manifest will be created and the manifest group will
1565 # no new manifest will be created and the manifest group will
1567 # be empty during the pull
1566 # be empty during the pull
1568 self.manifest.addgroup(chunkiter, revmap, tr)
1567 self.manifest.addgroup(chunkiter, revmap, tr)
1569
1568
1570 # process the files
1569 # process the files
1571 self.ui.status(_("adding file changes\n"))
1570 self.ui.status(_("adding file changes\n"))
1572 while 1:
1571 while 1:
1573 f = changegroup.getchunk(source)
1572 f = changegroup.getchunk(source)
1574 if not f:
1573 if not f:
1575 break
1574 break
1576 self.ui.debug(_("adding %s revisions\n") % f)
1575 self.ui.debug(_("adding %s revisions\n") % f)
1577 fl = self.file(f)
1576 fl = self.file(f)
1578 o = fl.count()
1577 o = fl.count()
1579 chunkiter = changegroup.chunkiter(source)
1578 chunkiter = changegroup.chunkiter(source)
1580 if fl.addgroup(chunkiter, revmap, tr) is None:
1579 if fl.addgroup(chunkiter, revmap, tr) is None:
1581 raise util.Abort(_("received file revlog group is empty"))
1580 raise util.Abort(_("received file revlog group is empty"))
1582 revisions += fl.count() - o
1581 revisions += fl.count() - o
1583 files += 1
1582 files += 1
1584
1583
1585 cl.writedata()
1584 cl.writedata()
1586 finally:
1585 finally:
1587 if cl:
1586 if cl:
1588 cl.cleanup()
1587 cl.cleanup()
1589
1588
1590 # make changelog see real files again
1589 # make changelog see real files again
1591 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1590 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1592 self.changelog.checkinlinesize(tr)
1591 self.changelog.checkinlinesize(tr)
1593
1592
1594 newheads = len(self.changelog.heads())
1593 newheads = len(self.changelog.heads())
1595 heads = ""
1594 heads = ""
1596 if oldheads and newheads != oldheads:
1595 if oldheads and newheads != oldheads:
1597 heads = _(" (%+d heads)") % (newheads - oldheads)
1596 heads = _(" (%+d heads)") % (newheads - oldheads)
1598
1597
1599 self.ui.status(_("added %d changesets"
1598 self.ui.status(_("added %d changesets"
1600 " with %d changes to %d files%s\n")
1599 " with %d changes to %d files%s\n")
1601 % (changesets, revisions, files, heads))
1600 % (changesets, revisions, files, heads))
1602
1601
1603 if changesets > 0:
1602 if changesets > 0:
1604 self.hook('pretxnchangegroup', throw=True,
1603 self.hook('pretxnchangegroup', throw=True,
1605 node=hex(self.changelog.node(cor+1)), source=srctype)
1604 node=hex(self.changelog.node(cor+1)), source=srctype)
1606
1605
1607 tr.close()
1606 tr.close()
1608
1607
1609 if changesets > 0:
1608 if changesets > 0:
1610 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1609 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1611 source=srctype)
1610 source=srctype)
1612
1611
1613 for i in range(cor + 1, cnr + 1):
1612 for i in range(cor + 1, cnr + 1):
1614 self.hook("incoming", node=hex(self.changelog.node(i)),
1613 self.hook("incoming", node=hex(self.changelog.node(i)),
1615 source=srctype)
1614 source=srctype)
1616
1615
1617 return newheads - oldheads + 1
1616 return newheads - oldheads + 1
1618
1617
1619 def update(self, node, allow=False, force=False, choose=None,
1618 def update(self, node, allow=False, force=False, choose=None,
1620 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1619 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1621 pl = self.dirstate.parents()
1620 pl = self.dirstate.parents()
1622 if not force and pl[1] != nullid:
1621 if not force and pl[1] != nullid:
1623 raise util.Abort(_("outstanding uncommitted merges"))
1622 raise util.Abort(_("outstanding uncommitted merges"))
1624
1623
1625 err = False
1624 err = False
1626
1625
1627 p1, p2 = pl[0], node
1626 p1, p2 = pl[0], node
1628 pa = self.changelog.ancestor(p1, p2)
1627 pa = self.changelog.ancestor(p1, p2)
1629 m1n = self.changelog.read(p1)[0]
1628 m1n = self.changelog.read(p1)[0]
1630 m2n = self.changelog.read(p2)[0]
1629 m2n = self.changelog.read(p2)[0]
1631 man = self.manifest.ancestor(m1n, m2n)
1630 man = self.manifest.ancestor(m1n, m2n)
1632 m1 = self.manifest.read(m1n)
1631 m1 = self.manifest.read(m1n)
1633 mf1 = self.manifest.readflags(m1n)
1632 mf1 = self.manifest.readflags(m1n)
1634 m2 = self.manifest.read(m2n).copy()
1633 m2 = self.manifest.read(m2n).copy()
1635 mf2 = self.manifest.readflags(m2n)
1634 mf2 = self.manifest.readflags(m2n)
1636 ma = self.manifest.read(man)
1635 ma = self.manifest.read(man)
1637 mfa = self.manifest.readflags(man)
1636 mfa = self.manifest.readflags(man)
1638
1637
1639 modified, added, removed, deleted, unknown = self.changes()
1638 modified, added, removed, deleted, unknown = self.changes()
1640
1639
1641 # is this a jump, or a merge? i.e. is there a linear path
1640 # is this a jump, or a merge? i.e. is there a linear path
1642 # from p1 to p2?
1641 # from p1 to p2?
1643 linear_path = (pa == p1 or pa == p2)
1642 linear_path = (pa == p1 or pa == p2)
1644
1643
1645 if allow and linear_path:
1644 if allow and linear_path:
1646 raise util.Abort(_("there is nothing to merge, "
1645 raise util.Abort(_("there is nothing to merge, "
1647 "just use 'hg update'"))
1646 "just use 'hg update'"))
1648 if allow and not forcemerge:
1647 if allow and not forcemerge:
1649 if modified or added or removed:
1648 if modified or added or removed:
1650 raise util.Abort(_("outstanding uncommitted changes"))
1649 raise util.Abort(_("outstanding uncommitted changes"))
1651
1650
1652 if not forcemerge and not force:
1651 if not forcemerge and not force:
1653 for f in unknown:
1652 for f in unknown:
1654 if f in m2:
1653 if f in m2:
1655 t1 = self.wread(f)
1654 t1 = self.wread(f)
1656 t2 = self.file(f).read(m2[f])
1655 t2 = self.file(f).read(m2[f])
1657 if cmp(t1, t2) != 0:
1656 if cmp(t1, t2) != 0:
1658 raise util.Abort(_("'%s' already exists in the working"
1657 raise util.Abort(_("'%s' already exists in the working"
1659 " dir and differs from remote") % f)
1658 " dir and differs from remote") % f)
1660
1659
1661 # resolve the manifest to determine which files
1660 # resolve the manifest to determine which files
1662 # we care about merging
1661 # we care about merging
1663 self.ui.note(_("resolving manifests\n"))
1662 self.ui.note(_("resolving manifests\n"))
1664 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1663 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1665 (force, allow, moddirstate, linear_path))
1664 (force, allow, moddirstate, linear_path))
1666 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1665 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1667 (short(man), short(m1n), short(m2n)))
1666 (short(man), short(m1n), short(m2n)))
1668
1667
1669 merge = {}
1668 merge = {}
1670 get = {}
1669 get = {}
1671 remove = []
1670 remove = []
1672
1671
1673 # construct a working dir manifest
1672 # construct a working dir manifest
1674 mw = m1.copy()
1673 mw = m1.copy()
1675 mfw = mf1.copy()
1674 mfw = mf1.copy()
1676 umap = dict.fromkeys(unknown)
1675 umap = dict.fromkeys(unknown)
1677
1676
1678 for f in added + modified + unknown:
1677 for f in added + modified + unknown:
1679 mw[f] = ""
1678 mw[f] = ""
1680 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1679 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1681
1680
1682 if moddirstate and not wlock:
1681 if moddirstate and not wlock:
1683 wlock = self.wlock()
1682 wlock = self.wlock()
1684
1683
1685 for f in deleted + removed:
1684 for f in deleted + removed:
1686 if f in mw:
1685 if f in mw:
1687 del mw[f]
1686 del mw[f]
1688
1687
1689 # If we're jumping between revisions (as opposed to merging),
1688 # If we're jumping between revisions (as opposed to merging),
1690 # and if neither the working directory nor the target rev has
1689 # and if neither the working directory nor the target rev has
1691 # the file, then we need to remove it from the dirstate, to
1690 # the file, then we need to remove it from the dirstate, to
1692 # prevent the dirstate from listing the file when it is no
1691 # prevent the dirstate from listing the file when it is no
1693 # longer in the manifest.
1692 # longer in the manifest.
1694 if moddirstate and linear_path and f not in m2:
1693 if moddirstate and linear_path and f not in m2:
1695 self.dirstate.forget((f,))
1694 self.dirstate.forget((f,))
1696
1695
1697 # Compare manifests
1696 # Compare manifests
1698 for f, n in mw.iteritems():
1697 for f, n in mw.iteritems():
1699 if choose and not choose(f):
1698 if choose and not choose(f):
1700 continue
1699 continue
1701 if f in m2:
1700 if f in m2:
1702 s = 0
1701 s = 0
1703
1702
1704 # is the wfile new since m1, and match m2?
1703 # is the wfile new since m1, and match m2?
1705 if f not in m1:
1704 if f not in m1:
1706 t1 = self.wread(f)
1705 t1 = self.wread(f)
1707 t2 = self.file(f).read(m2[f])
1706 t2 = self.file(f).read(m2[f])
1708 if cmp(t1, t2) == 0:
1707 if cmp(t1, t2) == 0:
1709 n = m2[f]
1708 n = m2[f]
1710 del t1, t2
1709 del t1, t2
1711
1710
1712 # are files different?
1711 # are files different?
1713 if n != m2[f]:
1712 if n != m2[f]:
1714 a = ma.get(f, nullid)
1713 a = ma.get(f, nullid)
1715 # are both different from the ancestor?
1714 # are both different from the ancestor?
1716 if n != a and m2[f] != a:
1715 if n != a and m2[f] != a:
1717 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1716 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1718 # merge executable bits
1717 # merge executable bits
1719 # "if we changed or they changed, change in merge"
1718 # "if we changed or they changed, change in merge"
1720 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1719 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1721 mode = ((a^b) | (a^c)) ^ a
1720 mode = ((a^b) | (a^c)) ^ a
1722 merge[f] = (m1.get(f, nullid), m2[f], mode)
1721 merge[f] = (m1.get(f, nullid), m2[f], mode)
1723 s = 1
1722 s = 1
1724 # are we clobbering?
1723 # are we clobbering?
1725 # is remote's version newer?
1724 # is remote's version newer?
1726 # or are we going back in time?
1725 # or are we going back in time?
1727 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1726 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1728 self.ui.debug(_(" remote %s is newer, get\n") % f)
1727 self.ui.debug(_(" remote %s is newer, get\n") % f)
1729 get[f] = m2[f]
1728 get[f] = m2[f]
1730 s = 1
1729 s = 1
1731 elif f in umap or f in added:
1730 elif f in umap or f in added:
1732 # this unknown file is the same as the checkout
1731 # this unknown file is the same as the checkout
1733 # we need to reset the dirstate if the file was added
1732 # we need to reset the dirstate if the file was added
1734 get[f] = m2[f]
1733 get[f] = m2[f]
1735
1734
1736 if not s and mfw[f] != mf2[f]:
1735 if not s and mfw[f] != mf2[f]:
1737 if force:
1736 if force:
1738 self.ui.debug(_(" updating permissions for %s\n") % f)
1737 self.ui.debug(_(" updating permissions for %s\n") % f)
1739 util.set_exec(self.wjoin(f), mf2[f])
1738 util.set_exec(self.wjoin(f), mf2[f])
1740 else:
1739 else:
1741 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1740 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1742 mode = ((a^b) | (a^c)) ^ a
1741 mode = ((a^b) | (a^c)) ^ a
1743 if mode != b:
1742 if mode != b:
1744 self.ui.debug(_(" updating permissions for %s\n")
1743 self.ui.debug(_(" updating permissions for %s\n")
1745 % f)
1744 % f)
1746 util.set_exec(self.wjoin(f), mode)
1745 util.set_exec(self.wjoin(f), mode)
1747 del m2[f]
1746 del m2[f]
1748 elif f in ma:
1747 elif f in ma:
1749 if n != ma[f]:
1748 if n != ma[f]:
1750 r = _("d")
1749 r = _("d")
1751 if not force and (linear_path or allow):
1750 if not force and (linear_path or allow):
1752 r = self.ui.prompt(
1751 r = self.ui.prompt(
1753 (_(" local changed %s which remote deleted\n") % f) +
1752 (_(" local changed %s which remote deleted\n") % f) +
1754 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1753 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1755 if r == _("d"):
1754 if r == _("d"):
1756 remove.append(f)
1755 remove.append(f)
1757 else:
1756 else:
1758 self.ui.debug(_("other deleted %s\n") % f)
1757 self.ui.debug(_("other deleted %s\n") % f)
1759 remove.append(f) # other deleted it
1758 remove.append(f) # other deleted it
1760 else:
1759 else:
1761 # file is created on branch or in working directory
1760 # file is created on branch or in working directory
1762 if force and f not in umap:
1761 if force and f not in umap:
1763 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1762 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1764 remove.append(f)
1763 remove.append(f)
1765 elif n == m1.get(f, nullid): # same as parent
1764 elif n == m1.get(f, nullid): # same as parent
1766 if p2 == pa: # going backwards?
1765 if p2 == pa: # going backwards?
1767 self.ui.debug(_("remote deleted %s\n") % f)
1766 self.ui.debug(_("remote deleted %s\n") % f)
1768 remove.append(f)
1767 remove.append(f)
1769 else:
1768 else:
1770 self.ui.debug(_("local modified %s, keeping\n") % f)
1769 self.ui.debug(_("local modified %s, keeping\n") % f)
1771 else:
1770 else:
1772 self.ui.debug(_("working dir created %s, keeping\n") % f)
1771 self.ui.debug(_("working dir created %s, keeping\n") % f)
1773
1772
1774 for f, n in m2.iteritems():
1773 for f, n in m2.iteritems():
1775 if choose and not choose(f):
1774 if choose and not choose(f):
1776 continue
1775 continue
1777 if f[0] == "/":
1776 if f[0] == "/":
1778 continue
1777 continue
1779 if f in ma and n != ma[f]:
1778 if f in ma and n != ma[f]:
1780 r = _("k")
1779 r = _("k")
1781 if not force and (linear_path or allow):
1780 if not force and (linear_path or allow):
1782 r = self.ui.prompt(
1781 r = self.ui.prompt(
1783 (_("remote changed %s which local deleted\n") % f) +
1782 (_("remote changed %s which local deleted\n") % f) +
1784 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1783 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1785 if r == _("k"):
1784 if r == _("k"):
1786 get[f] = n
1785 get[f] = n
1787 elif f not in ma:
1786 elif f not in ma:
1788 self.ui.debug(_("remote created %s\n") % f)
1787 self.ui.debug(_("remote created %s\n") % f)
1789 get[f] = n
1788 get[f] = n
1790 else:
1789 else:
1791 if force or p2 == pa: # going backwards?
1790 if force or p2 == pa: # going backwards?
1792 self.ui.debug(_("local deleted %s, recreating\n") % f)
1791 self.ui.debug(_("local deleted %s, recreating\n") % f)
1793 get[f] = n
1792 get[f] = n
1794 else:
1793 else:
1795 self.ui.debug(_("local deleted %s\n") % f)
1794 self.ui.debug(_("local deleted %s\n") % f)
1796
1795
1797 del mw, m1, m2, ma
1796 del mw, m1, m2, ma
1798
1797
1799 if force:
1798 if force:
1800 for f in merge:
1799 for f in merge:
1801 get[f] = merge[f][1]
1800 get[f] = merge[f][1]
1802 merge = {}
1801 merge = {}
1803
1802
1804 if linear_path or force:
1803 if linear_path or force:
1805 # we don't need to do any magic, just jump to the new rev
1804 # we don't need to do any magic, just jump to the new rev
1806 branch_merge = False
1805 branch_merge = False
1807 p1, p2 = p2, nullid
1806 p1, p2 = p2, nullid
1808 else:
1807 else:
1809 if not allow:
1808 if not allow:
1810 self.ui.status(_("this update spans a branch"
1809 self.ui.status(_("this update spans a branch"
1811 " affecting the following files:\n"))
1810 " affecting the following files:\n"))
1812 fl = merge.keys() + get.keys()
1811 fl = merge.keys() + get.keys()
1813 fl.sort()
1812 fl.sort()
1814 for f in fl:
1813 for f in fl:
1815 cf = ""
1814 cf = ""
1816 if f in merge:
1815 if f in merge:
1817 cf = _(" (resolve)")
1816 cf = _(" (resolve)")
1818 self.ui.status(" %s%s\n" % (f, cf))
1817 self.ui.status(" %s%s\n" % (f, cf))
1819 self.ui.warn(_("aborting update spanning branches!\n"))
1818 self.ui.warn(_("aborting update spanning branches!\n"))
1820 self.ui.status(_("(use 'hg merge' to merge across branches"
1819 self.ui.status(_("(use 'hg merge' to merge across branches"
1821 " or 'hg update -C' to lose changes)\n"))
1820 " or 'hg update -C' to lose changes)\n"))
1822 return 1
1821 return 1
1823 branch_merge = True
1822 branch_merge = True
1824
1823
1825 xp1 = hex(p1)
1824 xp1 = hex(p1)
1826 xp2 = hex(p2)
1825 xp2 = hex(p2)
1827 if p2 == nullid: xxp2 = ''
1826 if p2 == nullid: xxp2 = ''
1828 else: xxp2 = xp2
1827 else: xxp2 = xp2
1829
1828
1830 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1829 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1831
1830
1832 # get the files we don't need to change
1831 # get the files we don't need to change
1833 files = get.keys()
1832 files = get.keys()
1834 files.sort()
1833 files.sort()
1835 for f in files:
1834 for f in files:
1836 if f[0] == "/":
1835 if f[0] == "/":
1837 continue
1836 continue
1838 self.ui.note(_("getting %s\n") % f)
1837 self.ui.note(_("getting %s\n") % f)
1839 t = self.file(f).read(get[f])
1838 t = self.file(f).read(get[f])
1840 self.wwrite(f, t)
1839 self.wwrite(f, t)
1841 util.set_exec(self.wjoin(f), mf2[f])
1840 util.set_exec(self.wjoin(f), mf2[f])
1842 if moddirstate:
1841 if moddirstate:
1843 if branch_merge:
1842 if branch_merge:
1844 self.dirstate.update([f], 'n', st_mtime=-1)
1843 self.dirstate.update([f], 'n', st_mtime=-1)
1845 else:
1844 else:
1846 self.dirstate.update([f], 'n')
1845 self.dirstate.update([f], 'n')
1847
1846
1848 # merge the tricky bits
1847 # merge the tricky bits
1849 failedmerge = []
1848 failedmerge = []
1850 files = merge.keys()
1849 files = merge.keys()
1851 files.sort()
1850 files.sort()
1852 for f in files:
1851 for f in files:
1853 self.ui.status(_("merging %s\n") % f)
1852 self.ui.status(_("merging %s\n") % f)
1854 my, other, flag = merge[f]
1853 my, other, flag = merge[f]
1855 ret = self.merge3(f, my, other, xp1, xp2)
1854 ret = self.merge3(f, my, other, xp1, xp2)
1856 if ret:
1855 if ret:
1857 err = True
1856 err = True
1858 failedmerge.append(f)
1857 failedmerge.append(f)
1859 util.set_exec(self.wjoin(f), flag)
1858 util.set_exec(self.wjoin(f), flag)
1860 if moddirstate:
1859 if moddirstate:
1861 if branch_merge:
1860 if branch_merge:
1862 # We've done a branch merge, mark this file as merged
1861 # We've done a branch merge, mark this file as merged
1863 # so that we properly record the merger later
1862 # so that we properly record the merger later
1864 self.dirstate.update([f], 'm')
1863 self.dirstate.update([f], 'm')
1865 else:
1864 else:
1866 # We've update-merged a locally modified file, so
1865 # We've update-merged a locally modified file, so
1867 # we set the dirstate to emulate a normal checkout
1866 # we set the dirstate to emulate a normal checkout
1868 # of that file some time in the past. Thus our
1867 # of that file some time in the past. Thus our
1869 # merge will appear as a normal local file
1868 # merge will appear as a normal local file
1870 # modification.
1869 # modification.
1871 f_len = len(self.file(f).read(other))
1870 f_len = len(self.file(f).read(other))
1872 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1871 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1873
1872
1874 remove.sort()
1873 remove.sort()
1875 for f in remove:
1874 for f in remove:
1876 self.ui.note(_("removing %s\n") % f)
1875 self.ui.note(_("removing %s\n") % f)
1877 util.audit_path(f)
1876 util.audit_path(f)
1878 try:
1877 try:
1879 util.unlink(self.wjoin(f))
1878 util.unlink(self.wjoin(f))
1880 except OSError, inst:
1879 except OSError, inst:
1881 if inst.errno != errno.ENOENT:
1880 if inst.errno != errno.ENOENT:
1882 self.ui.warn(_("update failed to remove %s: %s!\n") %
1881 self.ui.warn(_("update failed to remove %s: %s!\n") %
1883 (f, inst.strerror))
1882 (f, inst.strerror))
1884 if moddirstate:
1883 if moddirstate:
1885 if branch_merge:
1884 if branch_merge:
1886 self.dirstate.update(remove, 'r')
1885 self.dirstate.update(remove, 'r')
1887 else:
1886 else:
1888 self.dirstate.forget(remove)
1887 self.dirstate.forget(remove)
1889
1888
1890 if moddirstate:
1889 if moddirstate:
1891 self.dirstate.setparents(p1, p2)
1890 self.dirstate.setparents(p1, p2)
1892
1891
1893 if show_stats:
1892 if show_stats:
1894 stats = ((len(get), _("updated")),
1893 stats = ((len(get), _("updated")),
1895 (len(merge) - len(failedmerge), _("merged")),
1894 (len(merge) - len(failedmerge), _("merged")),
1896 (len(remove), _("removed")),
1895 (len(remove), _("removed")),
1897 (len(failedmerge), _("unresolved")))
1896 (len(failedmerge), _("unresolved")))
1898 note = ", ".join([_("%d files %s") % s for s in stats])
1897 note = ", ".join([_("%d files %s") % s for s in stats])
1899 self.ui.status("%s\n" % note)
1898 self.ui.status("%s\n" % note)
1900 if moddirstate:
1899 if moddirstate:
1901 if branch_merge:
1900 if branch_merge:
1902 if failedmerge:
1901 if failedmerge:
1903 self.ui.status(_("There are unresolved merges,"
1902 self.ui.status(_("There are unresolved merges,"
1904 " you can redo the full merge using:\n"
1903 " you can redo the full merge using:\n"
1905 " hg update -C %s\n"
1904 " hg update -C %s\n"
1906 " hg merge %s\n"
1905 " hg merge %s\n"
1907 % (self.changelog.rev(p1),
1906 % (self.changelog.rev(p1),
1908 self.changelog.rev(p2))))
1907 self.changelog.rev(p2))))
1909 else:
1908 else:
1910 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1909 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1911 elif failedmerge:
1910 elif failedmerge:
1912 self.ui.status(_("There are unresolved merges with"
1911 self.ui.status(_("There are unresolved merges with"
1913 " locally modified files.\n"))
1912 " locally modified files.\n"))
1914
1913
1915 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1914 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1916 return err
1915 return err
1917
1916
1918 def merge3(self, fn, my, other, p1, p2):
1917 def merge3(self, fn, my, other, p1, p2):
1919 """perform a 3-way merge in the working directory"""
1918 """perform a 3-way merge in the working directory"""
1920
1919
1921 def temp(prefix, node):
1920 def temp(prefix, node):
1922 pre = "%s~%s." % (os.path.basename(fn), prefix)
1921 pre = "%s~%s." % (os.path.basename(fn), prefix)
1923 (fd, name) = tempfile.mkstemp(prefix=pre)
1922 (fd, name) = tempfile.mkstemp(prefix=pre)
1924 f = os.fdopen(fd, "wb")
1923 f = os.fdopen(fd, "wb")
1925 self.wwrite(fn, fl.read(node), f)
1924 self.wwrite(fn, fl.read(node), f)
1926 f.close()
1925 f.close()
1927 return name
1926 return name
1928
1927
1929 fl = self.file(fn)
1928 fl = self.file(fn)
1930 base = fl.ancestor(my, other)
1929 base = fl.ancestor(my, other)
1931 a = self.wjoin(fn)
1930 a = self.wjoin(fn)
1932 b = temp("base", base)
1931 b = temp("base", base)
1933 c = temp("other", other)
1932 c = temp("other", other)
1934
1933
1935 self.ui.note(_("resolving %s\n") % fn)
1934 self.ui.note(_("resolving %s\n") % fn)
1936 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1935 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1937 (fn, short(my), short(other), short(base)))
1936 (fn, short(my), short(other), short(base)))
1938
1937
1939 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1938 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1940 or "hgmerge")
1939 or "hgmerge")
1941 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1940 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1942 environ={'HG_FILE': fn,
1941 environ={'HG_FILE': fn,
1943 'HG_MY_NODE': p1,
1942 'HG_MY_NODE': p1,
1944 'HG_OTHER_NODE': p2,
1943 'HG_OTHER_NODE': p2,
1945 'HG_FILE_MY_NODE': hex(my),
1944 'HG_FILE_MY_NODE': hex(my),
1946 'HG_FILE_OTHER_NODE': hex(other),
1945 'HG_FILE_OTHER_NODE': hex(other),
1947 'HG_FILE_BASE_NODE': hex(base)})
1946 'HG_FILE_BASE_NODE': hex(base)})
1948 if r:
1947 if r:
1949 self.ui.warn(_("merging %s failed!\n") % fn)
1948 self.ui.warn(_("merging %s failed!\n") % fn)
1950
1949
1951 os.unlink(b)
1950 os.unlink(b)
1952 os.unlink(c)
1951 os.unlink(c)
1953 return r
1952 return r
1954
1953
1955 def verify(self):
1954 def verify(self):
1956 filelinkrevs = {}
1955 filelinkrevs = {}
1957 filenodes = {}
1956 filenodes = {}
1958 changesets = revisions = files = 0
1957 changesets = revisions = files = 0
1959 errors = [0]
1958 errors = [0]
1960 warnings = [0]
1959 warnings = [0]
1961 neededmanifests = {}
1960 neededmanifests = {}
1962
1961
1963 def err(msg):
1962 def err(msg):
1964 self.ui.warn(msg + "\n")
1963 self.ui.warn(msg + "\n")
1965 errors[0] += 1
1964 errors[0] += 1
1966
1965
1967 def warn(msg):
1966 def warn(msg):
1968 self.ui.warn(msg + "\n")
1967 self.ui.warn(msg + "\n")
1969 warnings[0] += 1
1968 warnings[0] += 1
1970
1969
1971 def checksize(obj, name):
1970 def checksize(obj, name):
1972 d = obj.checksize()
1971 d = obj.checksize()
1973 if d[0]:
1972 if d[0]:
1974 err(_("%s data length off by %d bytes") % (name, d[0]))
1973 err(_("%s data length off by %d bytes") % (name, d[0]))
1975 if d[1]:
1974 if d[1]:
1976 err(_("%s index contains %d extra bytes") % (name, d[1]))
1975 err(_("%s index contains %d extra bytes") % (name, d[1]))
1977
1976
1978 def checkversion(obj, name):
1977 def checkversion(obj, name):
1979 if obj.version != revlog.REVLOGV0:
1978 if obj.version != revlog.REVLOGV0:
1980 if not revlogv1:
1979 if not revlogv1:
1981 warn(_("warning: `%s' uses revlog format 1") % name)
1980 warn(_("warning: `%s' uses revlog format 1") % name)
1982 elif revlogv1:
1981 elif revlogv1:
1983 warn(_("warning: `%s' uses revlog format 0") % name)
1982 warn(_("warning: `%s' uses revlog format 0") % name)
1984
1983
1985 revlogv1 = self.revlogversion != revlog.REVLOGV0
1984 revlogv1 = self.revlogversion != revlog.REVLOGV0
1986 if self.ui.verbose or revlogv1 != self.revlogv1:
1985 if self.ui.verbose or revlogv1 != self.revlogv1:
1987 self.ui.status(_("repository uses revlog format %d\n") %
1986 self.ui.status(_("repository uses revlog format %d\n") %
1988 (revlogv1 and 1 or 0))
1987 (revlogv1 and 1 or 0))
1989
1988
1990 seen = {}
1989 seen = {}
1991 self.ui.status(_("checking changesets\n"))
1990 self.ui.status(_("checking changesets\n"))
1992 checksize(self.changelog, "changelog")
1991 checksize(self.changelog, "changelog")
1993
1992
1994 for i in range(self.changelog.count()):
1993 for i in range(self.changelog.count()):
1995 changesets += 1
1994 changesets += 1
1996 n = self.changelog.node(i)
1995 n = self.changelog.node(i)
1997 l = self.changelog.linkrev(n)
1996 l = self.changelog.linkrev(n)
1998 if l != i:
1997 if l != i:
1999 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1998 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
2000 if n in seen:
1999 if n in seen:
2001 err(_("duplicate changeset at revision %d") % i)
2000 err(_("duplicate changeset at revision %d") % i)
2002 seen[n] = 1
2001 seen[n] = 1
2003
2002
2004 for p in self.changelog.parents(n):
2003 for p in self.changelog.parents(n):
2005 if p not in self.changelog.nodemap:
2004 if p not in self.changelog.nodemap:
2006 err(_("changeset %s has unknown parent %s") %
2005 err(_("changeset %s has unknown parent %s") %
2007 (short(n), short(p)))
2006 (short(n), short(p)))
2008 try:
2007 try:
2009 changes = self.changelog.read(n)
2008 changes = self.changelog.read(n)
2010 except KeyboardInterrupt:
2009 except KeyboardInterrupt:
2011 self.ui.warn(_("interrupted"))
2010 self.ui.warn(_("interrupted"))
2012 raise
2011 raise
2013 except Exception, inst:
2012 except Exception, inst:
2014 err(_("unpacking changeset %s: %s") % (short(n), inst))
2013 err(_("unpacking changeset %s: %s") % (short(n), inst))
2015 continue
2014 continue
2016
2015
2017 neededmanifests[changes[0]] = n
2016 neededmanifests[changes[0]] = n
2018
2017
2019 for f in changes[3]:
2018 for f in changes[3]:
2020 filelinkrevs.setdefault(f, []).append(i)
2019 filelinkrevs.setdefault(f, []).append(i)
2021
2020
2022 seen = {}
2021 seen = {}
2023 self.ui.status(_("checking manifests\n"))
2022 self.ui.status(_("checking manifests\n"))
2024 checkversion(self.manifest, "manifest")
2023 checkversion(self.manifest, "manifest")
2025 checksize(self.manifest, "manifest")
2024 checksize(self.manifest, "manifest")
2026
2025
2027 for i in range(self.manifest.count()):
2026 for i in range(self.manifest.count()):
2028 n = self.manifest.node(i)
2027 n = self.manifest.node(i)
2029 l = self.manifest.linkrev(n)
2028 l = self.manifest.linkrev(n)
2030
2029
2031 if l < 0 or l >= self.changelog.count():
2030 if l < 0 or l >= self.changelog.count():
2032 err(_("bad manifest link (%d) at revision %d") % (l, i))
2031 err(_("bad manifest link (%d) at revision %d") % (l, i))
2033
2032
2034 if n in neededmanifests:
2033 if n in neededmanifests:
2035 del neededmanifests[n]
2034 del neededmanifests[n]
2036
2035
2037 if n in seen:
2036 if n in seen:
2038 err(_("duplicate manifest at revision %d") % i)
2037 err(_("duplicate manifest at revision %d") % i)
2039
2038
2040 seen[n] = 1
2039 seen[n] = 1
2041
2040
2042 for p in self.manifest.parents(n):
2041 for p in self.manifest.parents(n):
2043 if p not in self.manifest.nodemap:
2042 if p not in self.manifest.nodemap:
2044 err(_("manifest %s has unknown parent %s") %
2043 err(_("manifest %s has unknown parent %s") %
2045 (short(n), short(p)))
2044 (short(n), short(p)))
2046
2045
2047 try:
2046 try:
2048 delta = mdiff.patchtext(self.manifest.delta(n))
2047 delta = mdiff.patchtext(self.manifest.delta(n))
2049 except KeyboardInterrupt:
2048 except KeyboardInterrupt:
2050 self.ui.warn(_("interrupted"))
2049 self.ui.warn(_("interrupted"))
2051 raise
2050 raise
2052 except Exception, inst:
2051 except Exception, inst:
2053 err(_("unpacking manifest %s: %s") % (short(n), inst))
2052 err(_("unpacking manifest %s: %s") % (short(n), inst))
2054 continue
2053 continue
2055
2054
2056 try:
2055 try:
2057 ff = [ l.split('\0') for l in delta.splitlines() ]
2056 ff = [ l.split('\0') for l in delta.splitlines() ]
2058 for f, fn in ff:
2057 for f, fn in ff:
2059 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2058 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2060 except (ValueError, TypeError), inst:
2059 except (ValueError, TypeError), inst:
2061 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2060 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2062
2061
2063 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2062 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2064
2063
2065 for m, c in neededmanifests.items():
2064 for m, c in neededmanifests.items():
2066 err(_("Changeset %s refers to unknown manifest %s") %
2065 err(_("Changeset %s refers to unknown manifest %s") %
2067 (short(m), short(c)))
2066 (short(m), short(c)))
2068 del neededmanifests
2067 del neededmanifests
2069
2068
2070 for f in filenodes:
2069 for f in filenodes:
2071 if f not in filelinkrevs:
2070 if f not in filelinkrevs:
2072 err(_("file %s in manifest but not in changesets") % f)
2071 err(_("file %s in manifest but not in changesets") % f)
2073
2072
2074 for f in filelinkrevs:
2073 for f in filelinkrevs:
2075 if f not in filenodes:
2074 if f not in filenodes:
2076 err(_("file %s in changeset but not in manifest") % f)
2075 err(_("file %s in changeset but not in manifest") % f)
2077
2076
2078 self.ui.status(_("checking files\n"))
2077 self.ui.status(_("checking files\n"))
2079 ff = filenodes.keys()
2078 ff = filenodes.keys()
2080 ff.sort()
2079 ff.sort()
2081 for f in ff:
2080 for f in ff:
2082 if f == "/dev/null":
2081 if f == "/dev/null":
2083 continue
2082 continue
2084 files += 1
2083 files += 1
2085 if not f:
2084 if not f:
2086 err(_("file without name in manifest %s") % short(n))
2085 err(_("file without name in manifest %s") % short(n))
2087 continue
2086 continue
2088 fl = self.file(f)
2087 fl = self.file(f)
2089 checkversion(fl, f)
2088 checkversion(fl, f)
2090 checksize(fl, f)
2089 checksize(fl, f)
2091
2090
2092 nodes = {nullid: 1}
2091 nodes = {nullid: 1}
2093 seen = {}
2092 seen = {}
2094 for i in range(fl.count()):
2093 for i in range(fl.count()):
2095 revisions += 1
2094 revisions += 1
2096 n = fl.node(i)
2095 n = fl.node(i)
2097
2096
2098 if n in seen:
2097 if n in seen:
2099 err(_("%s: duplicate revision %d") % (f, i))
2098 err(_("%s: duplicate revision %d") % (f, i))
2100 if n not in filenodes[f]:
2099 if n not in filenodes[f]:
2101 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2100 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2102 else:
2101 else:
2103 del filenodes[f][n]
2102 del filenodes[f][n]
2104
2103
2105 flr = fl.linkrev(n)
2104 flr = fl.linkrev(n)
2106 if flr not in filelinkrevs.get(f, []):
2105 if flr not in filelinkrevs.get(f, []):
2107 err(_("%s:%s points to unexpected changeset %d")
2106 err(_("%s:%s points to unexpected changeset %d")
2108 % (f, short(n), flr))
2107 % (f, short(n), flr))
2109 else:
2108 else:
2110 filelinkrevs[f].remove(flr)
2109 filelinkrevs[f].remove(flr)
2111
2110
2112 # verify contents
2111 # verify contents
2113 try:
2112 try:
2114 t = fl.read(n)
2113 t = fl.read(n)
2115 except KeyboardInterrupt:
2114 except KeyboardInterrupt:
2116 self.ui.warn(_("interrupted"))
2115 self.ui.warn(_("interrupted"))
2117 raise
2116 raise
2118 except Exception, inst:
2117 except Exception, inst:
2119 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2118 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2120
2119
2121 # verify parents
2120 # verify parents
2122 (p1, p2) = fl.parents(n)
2121 (p1, p2) = fl.parents(n)
2123 if p1 not in nodes:
2122 if p1 not in nodes:
2124 err(_("file %s:%s unknown parent 1 %s") %
2123 err(_("file %s:%s unknown parent 1 %s") %
2125 (f, short(n), short(p1)))
2124 (f, short(n), short(p1)))
2126 if p2 not in nodes:
2125 if p2 not in nodes:
2127 err(_("file %s:%s unknown parent 2 %s") %
2126 err(_("file %s:%s unknown parent 2 %s") %
2128 (f, short(n), short(p1)))
2127 (f, short(n), short(p1)))
2129 nodes[n] = 1
2128 nodes[n] = 1
2130
2129
2131 # cross-check
2130 # cross-check
2132 for node in filenodes[f]:
2131 for node in filenodes[f]:
2133 err(_("node %s in manifests not in %s") % (hex(node), f))
2132 err(_("node %s in manifests not in %s") % (hex(node), f))
2134
2133
2135 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2134 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2136 (files, changesets, revisions))
2135 (files, changesets, revisions))
2137
2136
2138 if warnings[0]:
2137 if warnings[0]:
2139 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2138 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2140 if errors[0]:
2139 if errors[0]:
2141 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2140 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2142 return 1
2141 return 1
2143
2142
2144 # used to avoid circular references so destructors work
2143 # used to avoid circular references so destructors work
2145 def aftertrans(base):
2144 def aftertrans(base):
2146 p = base
2145 p = base
2147 def a():
2146 def a():
2148 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2147 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2149 util.rename(os.path.join(p, "journal.dirstate"),
2148 util.rename(os.path.join(p, "journal.dirstate"),
2150 os.path.join(p, "undo.dirstate"))
2149 os.path.join(p, "undo.dirstate"))
2151 return a
2150 return a
2152
2151
@@ -1,189 +1,188 b''
1 # manifest.py - manifest revision class for mercurial
1 # manifest.py - manifest revision class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct
9 from revlog import *
8 from revlog import *
10 from i18n import gettext as _
9 from i18n import gettext as _
11 from demandload import *
10 from demandload import *
12 demandload(globals(), "bisect array")
11 demandload(globals(), "array bisect struct")
13
12
14 class manifest(revlog):
13 class manifest(revlog):
15 def __init__(self, opener, defversion=REVLOGV0):
14 def __init__(self, opener, defversion=REVLOGV0):
16 self.mapcache = None
15 self.mapcache = None
17 self.listcache = None
16 self.listcache = None
18 revlog.__init__(self, opener, "00manifest.i", "00manifest.d",
17 revlog.__init__(self, opener, "00manifest.i", "00manifest.d",
19 defversion)
18 defversion)
20
19
21 def read(self, node):
20 def read(self, node):
22 if node == nullid: return {} # don't upset local cache
21 if node == nullid: return {} # don't upset local cache
23 if self.mapcache and self.mapcache[0] == node:
22 if self.mapcache and self.mapcache[0] == node:
24 return self.mapcache[1]
23 return self.mapcache[1]
25 text = self.revision(node)
24 text = self.revision(node)
26 map = {}
25 map = {}
27 flag = {}
26 flag = {}
28 self.listcache = array.array('c', text)
27 self.listcache = array.array('c', text)
29 lines = text.splitlines(1)
28 lines = text.splitlines(1)
30 for l in lines:
29 for l in lines:
31 (f, n) = l.split('\0')
30 (f, n) = l.split('\0')
32 map[f] = bin(n[:40])
31 map[f] = bin(n[:40])
33 flag[f] = (n[40:-1] == "x")
32 flag[f] = (n[40:-1] == "x")
34 self.mapcache = (node, map, flag)
33 self.mapcache = (node, map, flag)
35 return map
34 return map
36
35
37 def readflags(self, node):
36 def readflags(self, node):
38 if node == nullid: return {} # don't upset local cache
37 if node == nullid: return {} # don't upset local cache
39 if not self.mapcache or self.mapcache[0] != node:
38 if not self.mapcache or self.mapcache[0] != node:
40 self.read(node)
39 self.read(node)
41 return self.mapcache[2]
40 return self.mapcache[2]
42
41
43 def diff(self, a, b):
42 def diff(self, a, b):
44 return mdiff.textdiff(str(a), str(b))
43 return mdiff.textdiff(str(a), str(b))
45
44
46 def _search(self, m, s, lo=0, hi=None):
45 def _search(self, m, s, lo=0, hi=None):
47 '''return a tuple (start, end) that says where to find s within m.
46 '''return a tuple (start, end) that says where to find s within m.
48
47
49 If the string is found m[start:end] are the line containing
48 If the string is found m[start:end] are the line containing
50 that string. If start == end the string was not found and
49 that string. If start == end the string was not found and
51 they indicate the proper sorted insertion point. This was
50 they indicate the proper sorted insertion point. This was
52 taken from bisect_left, and modified to find line start/end as
51 taken from bisect_left, and modified to find line start/end as
53 it goes along.
52 it goes along.
54
53
55 m should be a buffer or a string
54 m should be a buffer or a string
56 s is a string'''
55 s is a string'''
57 def advance(i, c):
56 def advance(i, c):
58 while i < lenm and m[i] != c:
57 while i < lenm and m[i] != c:
59 i += 1
58 i += 1
60 return i
59 return i
61 lenm = len(m)
60 lenm = len(m)
62 if not hi:
61 if not hi:
63 hi = lenm
62 hi = lenm
64 while lo < hi:
63 while lo < hi:
65 mid = (lo + hi) // 2
64 mid = (lo + hi) // 2
66 start = mid
65 start = mid
67 while start > 0 and m[start-1] != '\n':
66 while start > 0 and m[start-1] != '\n':
68 start -= 1
67 start -= 1
69 end = advance(start, '\0')
68 end = advance(start, '\0')
70 if m[start:end] < s:
69 if m[start:end] < s:
71 # we know that after the null there are 40 bytes of sha1
70 # we know that after the null there are 40 bytes of sha1
72 # this translates to the bisect lo = mid + 1
71 # this translates to the bisect lo = mid + 1
73 lo = advance(end + 40, '\n') + 1
72 lo = advance(end + 40, '\n') + 1
74 else:
73 else:
75 # this translates to the bisect hi = mid
74 # this translates to the bisect hi = mid
76 hi = start
75 hi = start
77 end = advance(lo, '\0')
76 end = advance(lo, '\0')
78 found = m[lo:end]
77 found = m[lo:end]
79 if cmp(s, found) == 0:
78 if cmp(s, found) == 0:
80 # we know that after the null there are 40 bytes of sha1
79 # we know that after the null there are 40 bytes of sha1
81 end = advance(end + 40, '\n')
80 end = advance(end + 40, '\n')
82 return (lo, end+1)
81 return (lo, end+1)
83 else:
82 else:
84 return (lo, lo)
83 return (lo, lo)
85
84
86 def find(self, node, f):
85 def find(self, node, f):
87 '''look up entry for a single file efficiently.
86 '''look up entry for a single file efficiently.
88 return (node, flag) pair if found, (None, None) if not.'''
87 return (node, flag) pair if found, (None, None) if not.'''
89 if self.mapcache and node == self.mapcache[0]:
88 if self.mapcache and node == self.mapcache[0]:
90 return self.mapcache[1].get(f), self.mapcache[2].get(f)
89 return self.mapcache[1].get(f), self.mapcache[2].get(f)
91 text = self.revision(node)
90 text = self.revision(node)
92 start, end = self._search(text, f)
91 start, end = self._search(text, f)
93 if start == end:
92 if start == end:
94 return None, None
93 return None, None
95 l = text[start:end]
94 l = text[start:end]
96 f, n = l.split('\0')
95 f, n = l.split('\0')
97 return bin(n[:40]), n[40:-1] == 'x'
96 return bin(n[:40]), n[40:-1] == 'x'
98
97
99 def add(self, map, flags, transaction, link, p1=None, p2=None,
98 def add(self, map, flags, transaction, link, p1=None, p2=None,
100 changed=None):
99 changed=None):
101 # apply the changes collected during the bisect loop to our addlist
100 # apply the changes collected during the bisect loop to our addlist
102 # return a delta suitable for addrevision
101 # return a delta suitable for addrevision
103 def addlistdelta(addlist, x):
102 def addlistdelta(addlist, x):
104 # start from the bottom up
103 # start from the bottom up
105 # so changes to the offsets don't mess things up.
104 # so changes to the offsets don't mess things up.
106 i = len(x)
105 i = len(x)
107 while i > 0:
106 while i > 0:
108 i -= 1
107 i -= 1
109 start = x[i][0]
108 start = x[i][0]
110 end = x[i][1]
109 end = x[i][1]
111 if x[i][2]:
110 if x[i][2]:
112 addlist[start:end] = array.array('c', x[i][2])
111 addlist[start:end] = array.array('c', x[i][2])
113 else:
112 else:
114 del addlist[start:end]
113 del addlist[start:end]
115 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
114 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
116 for d in x ])
115 for d in x ])
117
116
118 # if we're using the listcache, make sure it is valid and
117 # if we're using the listcache, make sure it is valid and
119 # parented by the same node we're diffing against
118 # parented by the same node we're diffing against
120 if not changed or not self.listcache or not p1 or \
119 if not changed or not self.listcache or not p1 or \
121 self.mapcache[0] != p1:
120 self.mapcache[0] != p1:
122 files = map.keys()
121 files = map.keys()
123 files.sort()
122 files.sort()
124
123
125 # if this is changed to support newlines in filenames,
124 # if this is changed to support newlines in filenames,
126 # be sure to check the templates/ dir again (especially *-raw.tmpl)
125 # be sure to check the templates/ dir again (especially *-raw.tmpl)
127 text = ["%s\000%s%s\n" %
126 text = ["%s\000%s%s\n" %
128 (f, hex(map[f]), flags[f] and "x" or '')
127 (f, hex(map[f]), flags[f] and "x" or '')
129 for f in files]
128 for f in files]
130 self.listcache = array.array('c', "".join(text))
129 self.listcache = array.array('c', "".join(text))
131 cachedelta = None
130 cachedelta = None
132 else:
131 else:
133 addlist = self.listcache
132 addlist = self.listcache
134
133
135 # combine the changed lists into one list for sorting
134 # combine the changed lists into one list for sorting
136 work = [[x, 0] for x in changed[0]]
135 work = [[x, 0] for x in changed[0]]
137 work[len(work):] = [[x, 1] for x in changed[1]]
136 work[len(work):] = [[x, 1] for x in changed[1]]
138 work.sort()
137 work.sort()
139
138
140 delta = []
139 delta = []
141 dstart = None
140 dstart = None
142 dend = None
141 dend = None
143 dline = [""]
142 dline = [""]
144 start = 0
143 start = 0
145 # zero copy representation of addlist as a buffer
144 # zero copy representation of addlist as a buffer
146 addbuf = buffer(addlist)
145 addbuf = buffer(addlist)
147
146
148 # start with a readonly loop that finds the offset of
147 # start with a readonly loop that finds the offset of
149 # each line and creates the deltas
148 # each line and creates the deltas
150 for w in work:
149 for w in work:
151 f = w[0]
150 f = w[0]
152 # bs will either be the index of the item or the insert point
151 # bs will either be the index of the item or the insert point
153 start, end = self._search(addbuf, f, start)
152 start, end = self._search(addbuf, f, start)
154 if w[1] == 0:
153 if w[1] == 0:
155 l = "%s\000%s%s\n" % (f, hex(map[f]),
154 l = "%s\000%s%s\n" % (f, hex(map[f]),
156 flags[f] and "x" or '')
155 flags[f] and "x" or '')
157 else:
156 else:
158 l = ""
157 l = ""
159 if start == end and w[1] == 1:
158 if start == end and w[1] == 1:
160 # item we want to delete was not found, error out
159 # item we want to delete was not found, error out
161 raise AssertionError(
160 raise AssertionError(
162 _("failed to remove %s from manifest\n") % f)
161 _("failed to remove %s from manifest\n") % f)
163 if dstart != None and dstart <= start and dend >= start:
162 if dstart != None and dstart <= start and dend >= start:
164 if dend < end:
163 if dend < end:
165 dend = end
164 dend = end
166 if l:
165 if l:
167 dline.append(l)
166 dline.append(l)
168 else:
167 else:
169 if dstart != None:
168 if dstart != None:
170 delta.append([dstart, dend, "".join(dline)])
169 delta.append([dstart, dend, "".join(dline)])
171 dstart = start
170 dstart = start
172 dend = end
171 dend = end
173 dline = [l]
172 dline = [l]
174
173
175 if dstart != None:
174 if dstart != None:
176 delta.append([dstart, dend, "".join(dline)])
175 delta.append([dstart, dend, "".join(dline)])
177 # apply the delta to the addlist, and get a delta for addrevision
176 # apply the delta to the addlist, and get a delta for addrevision
178 cachedelta = addlistdelta(addlist, delta)
177 cachedelta = addlistdelta(addlist, delta)
179
178
180 # the delta is only valid if we've been processing the tip revision
179 # the delta is only valid if we've been processing the tip revision
181 if self.mapcache[0] != self.tip():
180 if self.mapcache[0] != self.tip():
182 cachedelta = None
181 cachedelta = None
183 self.listcache = addlist
182 self.listcache = addlist
184
183
185 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
184 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
186 p2, cachedelta)
185 p2, cachedelta)
187 self.mapcache = (n, map, flags)
186 self.mapcache = (n, map, flags)
188
187
189 return n
188 return n
@@ -1,205 +1,205 b''
1 # mdiff.py - diff and patch routines for mercurial
1 # mdiff.py - diff and patch routines for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 import struct, bdiff, util, mpatch
9 import bdiff, mpatch
10 demandload(globals(), "re")
10 demandload(globals(), "re struct util")
11
11
12 def splitnewlines(text):
12 def splitnewlines(text):
13 '''like str.splitlines, but only split on newlines.'''
13 '''like str.splitlines, but only split on newlines.'''
14 lines = [l + '\n' for l in text.split('\n')]
14 lines = [l + '\n' for l in text.split('\n')]
15 if lines:
15 if lines:
16 if lines[-1] == '\n':
16 if lines[-1] == '\n':
17 lines.pop()
17 lines.pop()
18 else:
18 else:
19 lines[-1] = lines[-1][:-1]
19 lines[-1] = lines[-1][:-1]
20 return lines
20 return lines
21
21
22 def unidiff(a, ad, b, bd, fn, r=None, text=False,
22 def unidiff(a, ad, b, bd, fn, r=None, text=False,
23 showfunc=False, ignorews=False):
23 showfunc=False, ignorews=False):
24
24
25 if not a and not b: return ""
25 if not a and not b: return ""
26 epoch = util.datestr((0, 0))
26 epoch = util.datestr((0, 0))
27
27
28 if not text and (util.binary(a) or util.binary(b)):
28 if not text and (util.binary(a) or util.binary(b)):
29 l = ['Binary file %s has changed\n' % fn]
29 l = ['Binary file %s has changed\n' % fn]
30 elif not a:
30 elif not a:
31 b = splitnewlines(b)
31 b = splitnewlines(b)
32 if a is None:
32 if a is None:
33 l1 = "--- %s\t%s\n" % ("/dev/null", epoch)
33 l1 = "--- %s\t%s\n" % ("/dev/null", epoch)
34 else:
34 else:
35 l1 = "--- %s\t%s\n" % ("a/" + fn, ad)
35 l1 = "--- %s\t%s\n" % ("a/" + fn, ad)
36 l2 = "+++ %s\t%s\n" % ("b/" + fn, bd)
36 l2 = "+++ %s\t%s\n" % ("b/" + fn, bd)
37 l3 = "@@ -0,0 +1,%d @@\n" % len(b)
37 l3 = "@@ -0,0 +1,%d @@\n" % len(b)
38 l = [l1, l2, l3] + ["+" + e for e in b]
38 l = [l1, l2, l3] + ["+" + e for e in b]
39 elif not b:
39 elif not b:
40 a = splitnewlines(a)
40 a = splitnewlines(a)
41 l1 = "--- %s\t%s\n" % ("a/" + fn, ad)
41 l1 = "--- %s\t%s\n" % ("a/" + fn, ad)
42 if b is None:
42 if b is None:
43 l2 = "+++ %s\t%s\n" % ("/dev/null", epoch)
43 l2 = "+++ %s\t%s\n" % ("/dev/null", epoch)
44 else:
44 else:
45 l2 = "+++ %s\t%s\n" % ("b/" + fn, bd)
45 l2 = "+++ %s\t%s\n" % ("b/" + fn, bd)
46 l3 = "@@ -1,%d +0,0 @@\n" % len(a)
46 l3 = "@@ -1,%d +0,0 @@\n" % len(a)
47 l = [l1, l2, l3] + ["-" + e for e in a]
47 l = [l1, l2, l3] + ["-" + e for e in a]
48 else:
48 else:
49 al = splitnewlines(a)
49 al = splitnewlines(a)
50 bl = splitnewlines(b)
50 bl = splitnewlines(b)
51 l = list(bunidiff(a, b, al, bl, "a/" + fn, "b/" + fn,
51 l = list(bunidiff(a, b, al, bl, "a/" + fn, "b/" + fn,
52 showfunc=showfunc, ignorews=ignorews))
52 showfunc=showfunc, ignorews=ignorews))
53 if not l: return ""
53 if not l: return ""
54 # difflib uses a space, rather than a tab
54 # difflib uses a space, rather than a tab
55 l[0] = "%s\t%s\n" % (l[0][:-2], ad)
55 l[0] = "%s\t%s\n" % (l[0][:-2], ad)
56 l[1] = "%s\t%s\n" % (l[1][:-2], bd)
56 l[1] = "%s\t%s\n" % (l[1][:-2], bd)
57
57
58 for ln in xrange(len(l)):
58 for ln in xrange(len(l)):
59 if l[ln][-1] != '\n':
59 if l[ln][-1] != '\n':
60 l[ln] += "\n\ No newline at end of file\n"
60 l[ln] += "\n\ No newline at end of file\n"
61
61
62 if r:
62 if r:
63 l.insert(0, "diff %s %s\n" %
63 l.insert(0, "diff %s %s\n" %
64 (' '.join(["-r %s" % rev for rev in r]), fn))
64 (' '.join(["-r %s" % rev for rev in r]), fn))
65
65
66 return "".join(l)
66 return "".join(l)
67
67
68 # somewhat self contained replacement for difflib.unified_diff
68 # somewhat self contained replacement for difflib.unified_diff
69 # t1 and t2 are the text to be diffed
69 # t1 and t2 are the text to be diffed
70 # l1 and l2 are the text broken up into lines
70 # l1 and l2 are the text broken up into lines
71 # header1 and header2 are the filenames for the diff output
71 # header1 and header2 are the filenames for the diff output
72 # context is the number of context lines
72 # context is the number of context lines
73 # showfunc enables diff -p output
73 # showfunc enables diff -p output
74 # ignorews ignores all whitespace changes in the diff
74 # ignorews ignores all whitespace changes in the diff
75 def bunidiff(t1, t2, l1, l2, header1, header2, context=3, showfunc=False,
75 def bunidiff(t1, t2, l1, l2, header1, header2, context=3, showfunc=False,
76 ignorews=False):
76 ignorews=False):
77 def contextend(l, len):
77 def contextend(l, len):
78 ret = l + context
78 ret = l + context
79 if ret > len:
79 if ret > len:
80 ret = len
80 ret = len
81 return ret
81 return ret
82
82
83 def contextstart(l):
83 def contextstart(l):
84 ret = l - context
84 ret = l - context
85 if ret < 0:
85 if ret < 0:
86 return 0
86 return 0
87 return ret
87 return ret
88
88
89 def yieldhunk(hunk, header):
89 def yieldhunk(hunk, header):
90 if header:
90 if header:
91 for x in header:
91 for x in header:
92 yield x
92 yield x
93 (astart, a2, bstart, b2, delta) = hunk
93 (astart, a2, bstart, b2, delta) = hunk
94 aend = contextend(a2, len(l1))
94 aend = contextend(a2, len(l1))
95 alen = aend - astart
95 alen = aend - astart
96 blen = b2 - bstart + aend - a2
96 blen = b2 - bstart + aend - a2
97
97
98 func = ""
98 func = ""
99 if showfunc:
99 if showfunc:
100 # walk backwards from the start of the context
100 # walk backwards from the start of the context
101 # to find a line starting with an alphanumeric char.
101 # to find a line starting with an alphanumeric char.
102 for x in xrange(astart, -1, -1):
102 for x in xrange(astart, -1, -1):
103 t = l1[x].rstrip()
103 t = l1[x].rstrip()
104 if funcre.match(t):
104 if funcre.match(t):
105 func = ' ' + t[:40]
105 func = ' ' + t[:40]
106 break
106 break
107
107
108 yield "@@ -%d,%d +%d,%d @@%s\n" % (astart + 1, alen,
108 yield "@@ -%d,%d +%d,%d @@%s\n" % (astart + 1, alen,
109 bstart + 1, blen, func)
109 bstart + 1, blen, func)
110 for x in delta:
110 for x in delta:
111 yield x
111 yield x
112 for x in xrange(a2, aend):
112 for x in xrange(a2, aend):
113 yield ' ' + l1[x]
113 yield ' ' + l1[x]
114
114
115 header = [ "--- %s\t\n" % header1, "+++ %s\t\n" % header2 ]
115 header = [ "--- %s\t\n" % header1, "+++ %s\t\n" % header2 ]
116
116
117 if showfunc:
117 if showfunc:
118 funcre = re.compile('\w')
118 funcre = re.compile('\w')
119 if ignorews:
119 if ignorews:
120 wsre = re.compile('[ \t]')
120 wsre = re.compile('[ \t]')
121
121
122 # bdiff.blocks gives us the matching sequences in the files. The loop
122 # bdiff.blocks gives us the matching sequences in the files. The loop
123 # below finds the spaces between those matching sequences and translates
123 # below finds the spaces between those matching sequences and translates
124 # them into diff output.
124 # them into diff output.
125 #
125 #
126 diff = bdiff.blocks(t1, t2)
126 diff = bdiff.blocks(t1, t2)
127 hunk = None
127 hunk = None
128 for i in xrange(len(diff)):
128 for i in xrange(len(diff)):
129 # The first match is special.
129 # The first match is special.
130 # we've either found a match starting at line 0 or a match later
130 # we've either found a match starting at line 0 or a match later
131 # in the file. If it starts later, old and new below will both be
131 # in the file. If it starts later, old and new below will both be
132 # empty and we'll continue to the next match.
132 # empty and we'll continue to the next match.
133 if i > 0:
133 if i > 0:
134 s = diff[i-1]
134 s = diff[i-1]
135 else:
135 else:
136 s = [0, 0, 0, 0]
136 s = [0, 0, 0, 0]
137 delta = []
137 delta = []
138 s1 = diff[i]
138 s1 = diff[i]
139 a1 = s[1]
139 a1 = s[1]
140 a2 = s1[0]
140 a2 = s1[0]
141 b1 = s[3]
141 b1 = s[3]
142 b2 = s1[2]
142 b2 = s1[2]
143
143
144 old = l1[a1:a2]
144 old = l1[a1:a2]
145 new = l2[b1:b2]
145 new = l2[b1:b2]
146
146
147 # bdiff sometimes gives huge matches past eof, this check eats them,
147 # bdiff sometimes gives huge matches past eof, this check eats them,
148 # and deals with the special first match case described above
148 # and deals with the special first match case described above
149 if not old and not new:
149 if not old and not new:
150 continue
150 continue
151
151
152 if ignorews:
152 if ignorews:
153 wsold = wsre.sub('', "".join(old))
153 wsold = wsre.sub('', "".join(old))
154 wsnew = wsre.sub('', "".join(new))
154 wsnew = wsre.sub('', "".join(new))
155 if wsold == wsnew:
155 if wsold == wsnew:
156 continue
156 continue
157
157
158 astart = contextstart(a1)
158 astart = contextstart(a1)
159 bstart = contextstart(b1)
159 bstart = contextstart(b1)
160 prev = None
160 prev = None
161 if hunk:
161 if hunk:
162 # join with the previous hunk if it falls inside the context
162 # join with the previous hunk if it falls inside the context
163 if astart < hunk[1] + context + 1:
163 if astart < hunk[1] + context + 1:
164 prev = hunk
164 prev = hunk
165 astart = hunk[1]
165 astart = hunk[1]
166 bstart = hunk[3]
166 bstart = hunk[3]
167 else:
167 else:
168 for x in yieldhunk(hunk, header):
168 for x in yieldhunk(hunk, header):
169 yield x
169 yield x
170 # we only want to yield the header if the files differ, and
170 # we only want to yield the header if the files differ, and
171 # we only want to yield it once.
171 # we only want to yield it once.
172 header = None
172 header = None
173 if prev:
173 if prev:
174 # we've joined the previous hunk, record the new ending points.
174 # we've joined the previous hunk, record the new ending points.
175 hunk[1] = a2
175 hunk[1] = a2
176 hunk[3] = b2
176 hunk[3] = b2
177 delta = hunk[4]
177 delta = hunk[4]
178 else:
178 else:
179 # create a new hunk
179 # create a new hunk
180 hunk = [ astart, a2, bstart, b2, delta ]
180 hunk = [ astart, a2, bstart, b2, delta ]
181
181
182 delta[len(delta):] = [ ' ' + x for x in l1[astart:a1] ]
182 delta[len(delta):] = [ ' ' + x for x in l1[astart:a1] ]
183 delta[len(delta):] = [ '-' + x for x in old ]
183 delta[len(delta):] = [ '-' + x for x in old ]
184 delta[len(delta):] = [ '+' + x for x in new ]
184 delta[len(delta):] = [ '+' + x for x in new ]
185
185
186 if hunk:
186 if hunk:
187 for x in yieldhunk(hunk, header):
187 for x in yieldhunk(hunk, header):
188 yield x
188 yield x
189
189
190 def patchtext(bin):
190 def patchtext(bin):
191 pos = 0
191 pos = 0
192 t = []
192 t = []
193 while pos < len(bin):
193 while pos < len(bin):
194 p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12])
194 p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12])
195 pos += 12
195 pos += 12
196 t.append(bin[pos:pos + l])
196 t.append(bin[pos:pos + l])
197 pos += l
197 pos += l
198 return "".join(t)
198 return "".join(t)
199
199
200 def patch(a, bin):
200 def patch(a, bin):
201 return mpatch.patches(a, [bin])
201 return mpatch.patches(a, [bin])
202
202
203 patches = mpatch.patches
203 patches = mpatch.patches
204 patchedsize = mpatch.patchedsize
204 patchedsize = mpatch.patchedsize
205 textdiff = bdiff.bdiff
205 textdiff = bdiff.bdiff
@@ -1,21 +1,22 b''
1 """
1 """
2 node.py - basic nodeid manipulation for mercurial
2 node.py - basic nodeid manipulation for mercurial
3
3
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 import binascii
10 from demandload import demandload
11 demandload(globals(), "binascii")
11
12
12 nullid = "\0" * 20
13 nullid = "\0" * 20
13
14
14 def hex(node):
15 def hex(node):
15 return binascii.hexlify(node)
16 return binascii.hexlify(node)
16
17
17 def bin(node):
18 def bin(node):
18 return binascii.unhexlify(node)
19 return binascii.unhexlify(node)
19
20
20 def short(node):
21 def short(node):
21 return hex(node[:6])
22 return hex(node[:6])
@@ -1,515 +1,519 b''
1 # templater.py - template expansion for output
1 # templater.py - template expansion for output
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import re
9 from demandload import demandload
8 from demandload import demandload
10 from i18n import gettext as _
9 from i18n import gettext as _
11 from node import *
10 from node import *
12 demandload(globals(), "cStringIO cgi re sys os time urllib util textwrap")
11 demandload(globals(), "cStringIO cgi re sys os time urllib util textwrap")
13
12
14 esctable = {
13 esctable = {
15 '\\': '\\',
14 '\\': '\\',
16 'r': '\r',
15 'r': '\r',
17 't': '\t',
16 't': '\t',
18 'n': '\n',
17 'n': '\n',
19 'v': '\v',
18 'v': '\v',
20 }
19 }
21
20
22 def parsestring(s, quoted=True):
21 def parsestring(s, quoted=True):
23 '''parse a string using simple c-like syntax.
22 '''parse a string using simple c-like syntax.
24 string must be in quotes if quoted is True.'''
23 string must be in quotes if quoted is True.'''
25 fp = cStringIO.StringIO()
24 fp = cStringIO.StringIO()
26 if quoted:
25 if quoted:
27 first = s[0]
26 first = s[0]
28 if len(s) < 2: raise SyntaxError(_('string too short'))
27 if len(s) < 2: raise SyntaxError(_('string too short'))
29 if first not in "'\"": raise SyntaxError(_('invalid quote'))
28 if first not in "'\"": raise SyntaxError(_('invalid quote'))
30 if s[-1] != first: raise SyntaxError(_('unmatched quotes'))
29 if s[-1] != first: raise SyntaxError(_('unmatched quotes'))
31 s = s[1:-1]
30 s = s[1:-1]
32 escape = False
31 escape = False
33 for c in s:
32 for c in s:
34 if escape:
33 if escape:
35 fp.write(esctable.get(c, c))
34 fp.write(esctable.get(c, c))
36 escape = False
35 escape = False
37 elif c == '\\': escape = True
36 elif c == '\\': escape = True
38 elif quoted and c == first: raise SyntaxError(_('string ends early'))
37 elif quoted and c == first: raise SyntaxError(_('string ends early'))
39 else: fp.write(c)
38 else: fp.write(c)
40 if escape: raise SyntaxError(_('unterminated escape'))
39 if escape: raise SyntaxError(_('unterminated escape'))
41 return fp.getvalue()
40 return fp.getvalue()
42
41
43 class templater(object):
42 class templater(object):
44 '''template expansion engine.
43 '''template expansion engine.
45
44
46 template expansion works like this. a map file contains key=value
45 template expansion works like this. a map file contains key=value
47 pairs. if value is quoted, it is treated as string. otherwise, it
46 pairs. if value is quoted, it is treated as string. otherwise, it
48 is treated as name of template file.
47 is treated as name of template file.
49
48
50 templater is asked to expand a key in map. it looks up key, and
49 templater is asked to expand a key in map. it looks up key, and
51 looks for atrings like this: {foo}. it expands {foo} by looking up
50 looks for atrings like this: {foo}. it expands {foo} by looking up
52 foo in map, and substituting it. expansion is recursive: it stops
51 foo in map, and substituting it. expansion is recursive: it stops
53 when there is no more {foo} to replace.
52 when there is no more {foo} to replace.
54
53
55 expansion also allows formatting and filtering.
54 expansion also allows formatting and filtering.
56
55
57 format uses key to expand each item in list. syntax is
56 format uses key to expand each item in list. syntax is
58 {key%format}.
57 {key%format}.
59
58
60 filter uses function to transform value. syntax is
59 filter uses function to transform value. syntax is
61 {key|filter1|filter2|...}.'''
60 {key|filter1|filter2|...}.'''
62
61
63 def __init__(self, mapfile, filters={}, defaults={}, cache={}):
62 def __init__(self, mapfile, filters={}, defaults={}, cache={}):
64 '''set up template engine.
63 '''set up template engine.
65 mapfile is name of file to read map definitions from.
64 mapfile is name of file to read map definitions from.
66 filters is dict of functions. each transforms a value into another.
65 filters is dict of functions. each transforms a value into another.
67 defaults is dict of default map definitions.'''
66 defaults is dict of default map definitions.'''
68 self.mapfile = mapfile or 'template'
67 self.mapfile = mapfile or 'template'
69 self.cache = cache.copy()
68 self.cache = cache.copy()
70 self.map = {}
69 self.map = {}
71 self.base = (mapfile and os.path.dirname(mapfile)) or ''
70 self.base = (mapfile and os.path.dirname(mapfile)) or ''
72 self.filters = filters
71 self.filters = filters
73 self.defaults = defaults
72 self.defaults = defaults
74
73
75 if not mapfile:
74 if not mapfile:
76 return
75 return
77 i = 0
76 i = 0
78 for l in file(mapfile):
77 for l in file(mapfile):
79 l = l.strip()
78 l = l.strip()
80 i += 1
79 i += 1
81 if not l or l[0] in '#;': continue
80 if not l or l[0] in '#;': continue
82 m = re.match(r'([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.+)$', l)
81 m = re.match(r'([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.+)$', l)
83 if m:
82 if m:
84 key, val = m.groups()
83 key, val = m.groups()
85 if val[0] in "'\"":
84 if val[0] in "'\"":
86 try:
85 try:
87 self.cache[key] = parsestring(val)
86 self.cache[key] = parsestring(val)
88 except SyntaxError, inst:
87 except SyntaxError, inst:
89 raise SyntaxError('%s:%s: %s' %
88 raise SyntaxError('%s:%s: %s' %
90 (mapfile, i, inst.args[0]))
89 (mapfile, i, inst.args[0]))
91 else:
90 else:
92 self.map[key] = os.path.join(self.base, val)
91 self.map[key] = os.path.join(self.base, val)
93 else:
92 else:
94 raise SyntaxError(_("%s:%s: parse error") % (mapfile, i))
93 raise SyntaxError(_("%s:%s: parse error") % (mapfile, i))
95
94
96 def __contains__(self, key):
95 def __contains__(self, key):
97 return key in self.cache
96 return key in self.cache
98
97
99 def __call__(self, t, **map):
98 def __call__(self, t, **map):
100 '''perform expansion.
99 '''perform expansion.
101 t is name of map element to expand.
100 t is name of map element to expand.
102 map is added elements to use during expansion.'''
101 map is added elements to use during expansion.'''
103 m = self.defaults.copy()
102 m = self.defaults.copy()
104 m.update(map)
103 m.update(map)
105 try:
104 try:
106 tmpl = self.cache[t]
105 tmpl = self.cache[t]
107 except KeyError:
106 except KeyError:
108 try:
107 try:
109 tmpl = self.cache[t] = file(self.map[t]).read()
108 tmpl = self.cache[t] = file(self.map[t]).read()
110 except IOError, inst:
109 except IOError, inst:
111 raise IOError(inst.args[0], _('template file %s: %s') %
110 raise IOError(inst.args[0], _('template file %s: %s') %
112 (self.map[t], inst.args[1]))
111 (self.map[t], inst.args[1]))
113 return self.template(tmpl, self.filters, **m)
112 return self.template(tmpl, self.filters, **m)
114
113
115 template_re = re.compile(r"[#{]([a-zA-Z_][a-zA-Z0-9_]*)"
114 template_re = re.compile(r"[#{]([a-zA-Z_][a-zA-Z0-9_]*)"
116 r"((%[a-zA-Z_][a-zA-Z0-9_]*)*)"
115 r"((%[a-zA-Z_][a-zA-Z0-9_]*)*)"
117 r"((\|[a-zA-Z_][a-zA-Z0-9_]*)*)[#}]")
116 r"((\|[a-zA-Z_][a-zA-Z0-9_]*)*)[#}]")
118
117
119 def template(self, tmpl, filters={}, **map):
118 def template(self, tmpl, filters={}, **map):
120 lm = map.copy()
119 lm = map.copy()
121 while tmpl:
120 while tmpl:
122 m = self.template_re.search(tmpl)
121 m = self.template_re.search(tmpl)
123 if m:
122 if m:
124 start, end = m.span(0)
123 start, end = m.span(0)
125 s, e = tmpl[start], tmpl[end - 1]
124 s, e = tmpl[start], tmpl[end - 1]
126 key = m.group(1)
125 key = m.group(1)
127 if ((s == '#' and e != '#') or (s == '{' and e != '}')):
126 if ((s == '#' and e != '#') or (s == '{' and e != '}')):
128 raise SyntaxError(_("'%s'/'%s' mismatch expanding '%s'") %
127 raise SyntaxError(_("'%s'/'%s' mismatch expanding '%s'") %
129 (s, e, key))
128 (s, e, key))
130 if start:
129 if start:
131 yield tmpl[:start]
130 yield tmpl[:start]
132 v = map.get(key, "")
131 v = map.get(key, "")
133 v = callable(v) and v(**map) or v
132 v = callable(v) and v(**map) or v
134
133
135 format = m.group(2)
134 format = m.group(2)
136 fl = m.group(4)
135 fl = m.group(4)
137
136
138 if format:
137 if format:
139 q = v.__iter__
138 q = v.__iter__
140 for i in q():
139 for i in q():
141 lm.update(i)
140 lm.update(i)
142 yield self(format[1:], **lm)
141 yield self(format[1:], **lm)
143
142
144 v = ""
143 v = ""
145
144
146 elif fl:
145 elif fl:
147 for f in fl.split("|")[1:]:
146 for f in fl.split("|")[1:]:
148 v = filters[f](v)
147 v = filters[f](v)
149
148
150 yield v
149 yield v
151 tmpl = tmpl[end:]
150 tmpl = tmpl[end:]
152 else:
151 else:
153 yield tmpl
152 yield tmpl
154 break
153 break
155
154
156 agescales = [("second", 1),
155 agescales = [("second", 1),
157 ("minute", 60),
156 ("minute", 60),
158 ("hour", 3600),
157 ("hour", 3600),
159 ("day", 3600 * 24),
158 ("day", 3600 * 24),
160 ("week", 3600 * 24 * 7),
159 ("week", 3600 * 24 * 7),
161 ("month", 3600 * 24 * 30),
160 ("month", 3600 * 24 * 30),
162 ("year", 3600 * 24 * 365)]
161 ("year", 3600 * 24 * 365)]
163
162
164 agescales.reverse()
163 agescales.reverse()
165
164
166 def age(date):
165 def age(date):
167 '''turn a (timestamp, tzoff) tuple into an age string.'''
166 '''turn a (timestamp, tzoff) tuple into an age string.'''
168
167
169 def plural(t, c):
168 def plural(t, c):
170 if c == 1:
169 if c == 1:
171 return t
170 return t
172 return t + "s"
171 return t + "s"
173 def fmt(t, c):
172 def fmt(t, c):
174 return "%d %s" % (c, plural(t, c))
173 return "%d %s" % (c, plural(t, c))
175
174
176 now = time.time()
175 now = time.time()
177 then = date[0]
176 then = date[0]
178 delta = max(1, int(now - then))
177 delta = max(1, int(now - then))
179
178
180 for t, s in agescales:
179 for t, s in agescales:
181 n = delta / s
180 n = delta / s
182 if n >= 2 or s == 1:
181 if n >= 2 or s == 1:
183 return fmt(t, n)
182 return fmt(t, n)
184
183
185 def stringify(thing):
184 def stringify(thing):
186 '''turn nested template iterator into string.'''
185 '''turn nested template iterator into string.'''
187 cs = cStringIO.StringIO()
186 cs = cStringIO.StringIO()
188 def walk(things):
187 def walk(things):
189 for t in things:
188 for t in things:
190 if hasattr(t, '__iter__'):
189 if hasattr(t, '__iter__'):
191 walk(t)
190 walk(t)
192 else:
191 else:
193 cs.write(t)
192 cs.write(t)
194 walk(thing)
193 walk(thing)
195 return cs.getvalue()
194 return cs.getvalue()
196
195
197 para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
196 para_re = None
198 space_re = re.compile(r' +')
197 space_re = None
199
198
200 def fill(text, width):
199 def fill(text, width):
201 '''fill many paragraphs.'''
200 '''fill many paragraphs.'''
201 global para_re, space_re
202 if para_re is None:
203 para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
204 space_re = re.compile(r' +')
205
202 def findparas():
206 def findparas():
203 start = 0
207 start = 0
204 while True:
208 while True:
205 m = para_re.search(text, start)
209 m = para_re.search(text, start)
206 if not m:
210 if not m:
207 w = len(text)
211 w = len(text)
208 while w > start and text[w-1].isspace(): w -= 1
212 while w > start and text[w-1].isspace(): w -= 1
209 yield text[start:w], text[w:]
213 yield text[start:w], text[w:]
210 break
214 break
211 yield text[start:m.start(0)], m.group(1)
215 yield text[start:m.start(0)], m.group(1)
212 start = m.end(1)
216 start = m.end(1)
213
217
214 fp = cStringIO.StringIO()
218 fp = cStringIO.StringIO()
215 for para, rest in findparas():
219 for para, rest in findparas():
216 fp.write(space_re.sub(' ', textwrap.fill(para, width)))
220 fp.write(space_re.sub(' ', textwrap.fill(para, width)))
217 fp.write(rest)
221 fp.write(rest)
218 return fp.getvalue()
222 return fp.getvalue()
219
223
220 def isodate(date):
224 def isodate(date):
221 '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.'''
225 '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.'''
222 return util.datestr(date, format='%Y-%m-%d %H:%M')
226 return util.datestr(date, format='%Y-%m-%d %H:%M')
223
227
224 def nl2br(text):
228 def nl2br(text):
225 '''replace raw newlines with xhtml line breaks.'''
229 '''replace raw newlines with xhtml line breaks.'''
226 return text.replace('\n', '<br/>\n')
230 return text.replace('\n', '<br/>\n')
227
231
228 def obfuscate(text):
232 def obfuscate(text):
229 return ''.join(['&#%d;' % ord(c) for c in text])
233 return ''.join(['&#%d;' % ord(c) for c in text])
230
234
231 def domain(author):
235 def domain(author):
232 '''get domain of author, or empty string if none.'''
236 '''get domain of author, or empty string if none.'''
233 f = author.find('@')
237 f = author.find('@')
234 if f == -1: return ''
238 if f == -1: return ''
235 author = author[f+1:]
239 author = author[f+1:]
236 f = author.find('>')
240 f = author.find('>')
237 if f >= 0: author = author[:f]
241 if f >= 0: author = author[:f]
238 return author
242 return author
239
243
240 def email(author):
244 def email(author):
241 '''get email of author.'''
245 '''get email of author.'''
242 r = author.find('>')
246 r = author.find('>')
243 if r == -1: r = None
247 if r == -1: r = None
244 return author[author.find('<')+1:r]
248 return author[author.find('<')+1:r]
245
249
246 def person(author):
250 def person(author):
247 '''get name of author, or else username.'''
251 '''get name of author, or else username.'''
248 f = author.find('<')
252 f = author.find('<')
249 if f == -1: return util.shortuser(author)
253 if f == -1: return util.shortuser(author)
250 return author[:f].rstrip()
254 return author[:f].rstrip()
251
255
252 def shortdate(date):
256 def shortdate(date):
253 '''turn (timestamp, tzoff) tuple into iso 8631 date.'''
257 '''turn (timestamp, tzoff) tuple into iso 8631 date.'''
254 return util.datestr(date, format='%Y-%m-%d', timezone=False)
258 return util.datestr(date, format='%Y-%m-%d', timezone=False)
255
259
256 def indent(text, prefix):
260 def indent(text, prefix):
257 '''indent each non-empty line of text after first with prefix.'''
261 '''indent each non-empty line of text after first with prefix.'''
258 fp = cStringIO.StringIO()
262 fp = cStringIO.StringIO()
259 lines = text.splitlines()
263 lines = text.splitlines()
260 num_lines = len(lines)
264 num_lines = len(lines)
261 for i in xrange(num_lines):
265 for i in xrange(num_lines):
262 l = lines[i]
266 l = lines[i]
263 if i and l.strip(): fp.write(prefix)
267 if i and l.strip(): fp.write(prefix)
264 fp.write(l)
268 fp.write(l)
265 if i < num_lines - 1 or text.endswith('\n'):
269 if i < num_lines - 1 or text.endswith('\n'):
266 fp.write('\n')
270 fp.write('\n')
267 return fp.getvalue()
271 return fp.getvalue()
268
272
269 common_filters = {
273 common_filters = {
270 "addbreaks": nl2br,
274 "addbreaks": nl2br,
271 "basename": os.path.basename,
275 "basename": os.path.basename,
272 "age": age,
276 "age": age,
273 "date": lambda x: util.datestr(x),
277 "date": lambda x: util.datestr(x),
274 "domain": domain,
278 "domain": domain,
275 "email": email,
279 "email": email,
276 "escape": lambda x: cgi.escape(x, True),
280 "escape": lambda x: cgi.escape(x, True),
277 "fill68": lambda x: fill(x, width=68),
281 "fill68": lambda x: fill(x, width=68),
278 "fill76": lambda x: fill(x, width=76),
282 "fill76": lambda x: fill(x, width=76),
279 "firstline": lambda x: x.splitlines(1)[0].rstrip('\r\n'),
283 "firstline": lambda x: x.splitlines(1)[0].rstrip('\r\n'),
280 "tabindent": lambda x: indent(x, '\t'),
284 "tabindent": lambda x: indent(x, '\t'),
281 "isodate": isodate,
285 "isodate": isodate,
282 "obfuscate": obfuscate,
286 "obfuscate": obfuscate,
283 "permissions": lambda x: x and "-rwxr-xr-x" or "-rw-r--r--",
287 "permissions": lambda x: x and "-rwxr-xr-x" or "-rw-r--r--",
284 "person": person,
288 "person": person,
285 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
289 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
286 "short": lambda x: x[:12],
290 "short": lambda x: x[:12],
287 "shortdate": shortdate,
291 "shortdate": shortdate,
288 "stringify": stringify,
292 "stringify": stringify,
289 "strip": lambda x: x.strip(),
293 "strip": lambda x: x.strip(),
290 "urlescape": lambda x: urllib.quote(x),
294 "urlescape": lambda x: urllib.quote(x),
291 "user": lambda x: util.shortuser(x),
295 "user": lambda x: util.shortuser(x),
292 }
296 }
293
297
294 def templatepath(name=None):
298 def templatepath(name=None):
295 '''return location of template file or directory (if no name).
299 '''return location of template file or directory (if no name).
296 returns None if not found.'''
300 returns None if not found.'''
297
301
298 # executable version (py2exe) doesn't support __file__
302 # executable version (py2exe) doesn't support __file__
299 if hasattr(sys, 'frozen'):
303 if hasattr(sys, 'frozen'):
300 module = sys.executable
304 module = sys.executable
301 else:
305 else:
302 module = __file__
306 module = __file__
303 for f in 'templates', '../templates':
307 for f in 'templates', '../templates':
304 fl = f.split('/')
308 fl = f.split('/')
305 if name: fl.append(name)
309 if name: fl.append(name)
306 p = os.path.join(os.path.dirname(module), *fl)
310 p = os.path.join(os.path.dirname(module), *fl)
307 if (name and os.path.exists(p)) or os.path.isdir(p):
311 if (name and os.path.exists(p)) or os.path.isdir(p):
308 return os.path.normpath(p)
312 return os.path.normpath(p)
309
313
310 class changeset_templater(object):
314 class changeset_templater(object):
311 '''format changeset information.'''
315 '''format changeset information.'''
312
316
313 def __init__(self, ui, repo, mapfile, dest=None):
317 def __init__(self, ui, repo, mapfile, dest=None):
314 self.t = templater(mapfile, common_filters,
318 self.t = templater(mapfile, common_filters,
315 cache={'parent': '{rev}:{node|short} ',
319 cache={'parent': '{rev}:{node|short} ',
316 'manifest': '{rev}:{node|short}'})
320 'manifest': '{rev}:{node|short}'})
317 self.ui = ui
321 self.ui = ui
318 self.dest = dest
322 self.dest = dest
319 self.repo = repo
323 self.repo = repo
320
324
321 def use_template(self, t):
325 def use_template(self, t):
322 '''set template string to use'''
326 '''set template string to use'''
323 self.t.cache['changeset'] = t
327 self.t.cache['changeset'] = t
324
328
325 def write(self, thing, header=False):
329 def write(self, thing, header=False):
326 '''write expanded template.
330 '''write expanded template.
327 uses in-order recursive traverse of iterators.'''
331 uses in-order recursive traverse of iterators.'''
328 dest = self.dest or self.ui
332 dest = self.dest or self.ui
329 for t in thing:
333 for t in thing:
330 if hasattr(t, '__iter__'):
334 if hasattr(t, '__iter__'):
331 self.write(t, header=header)
335 self.write(t, header=header)
332 elif header:
336 elif header:
333 dest.write_header(t)
337 dest.write_header(t)
334 else:
338 else:
335 dest.write(t)
339 dest.write(t)
336
340
337 def write_header(self, thing):
341 def write_header(self, thing):
338 self.write(thing, header=True)
342 self.write(thing, header=True)
339
343
340 def show(self, rev=0, changenode=None, brinfo=None, changes=None,
344 def show(self, rev=0, changenode=None, brinfo=None, changes=None,
341 **props):
345 **props):
342 '''show a single changeset or file revision'''
346 '''show a single changeset or file revision'''
343 log = self.repo.changelog
347 log = self.repo.changelog
344 if changenode is None:
348 if changenode is None:
345 changenode = log.node(rev)
349 changenode = log.node(rev)
346 elif not rev:
350 elif not rev:
347 rev = log.rev(changenode)
351 rev = log.rev(changenode)
348 if changes is None:
352 if changes is None:
349 changes = log.read(changenode)
353 changes = log.read(changenode)
350
354
351 def showlist(name, values, plural=None, **args):
355 def showlist(name, values, plural=None, **args):
352 '''expand set of values.
356 '''expand set of values.
353 name is name of key in template map.
357 name is name of key in template map.
354 values is list of strings or dicts.
358 values is list of strings or dicts.
355 plural is plural of name, if not simply name + 's'.
359 plural is plural of name, if not simply name + 's'.
356
360
357 expansion works like this, given name 'foo'.
361 expansion works like this, given name 'foo'.
358
362
359 if values is empty, expand 'no_foos'.
363 if values is empty, expand 'no_foos'.
360
364
361 if 'foo' not in template map, return values as a string,
365 if 'foo' not in template map, return values as a string,
362 joined by space.
366 joined by space.
363
367
364 expand 'start_foos'.
368 expand 'start_foos'.
365
369
366 for each value, expand 'foo'. if 'last_foo' in template
370 for each value, expand 'foo'. if 'last_foo' in template
367 map, expand it instead of 'foo' for last key.
371 map, expand it instead of 'foo' for last key.
368
372
369 expand 'end_foos'.
373 expand 'end_foos'.
370 '''
374 '''
371 if plural: names = plural
375 if plural: names = plural
372 else: names = name + 's'
376 else: names = name + 's'
373 if not values:
377 if not values:
374 noname = 'no_' + names
378 noname = 'no_' + names
375 if noname in self.t:
379 if noname in self.t:
376 yield self.t(noname, **args)
380 yield self.t(noname, **args)
377 return
381 return
378 if name not in self.t:
382 if name not in self.t:
379 if isinstance(values[0], str):
383 if isinstance(values[0], str):
380 yield ' '.join(values)
384 yield ' '.join(values)
381 else:
385 else:
382 for v in values:
386 for v in values:
383 yield dict(v, **args)
387 yield dict(v, **args)
384 return
388 return
385 startname = 'start_' + names
389 startname = 'start_' + names
386 if startname in self.t:
390 if startname in self.t:
387 yield self.t(startname, **args)
391 yield self.t(startname, **args)
388 vargs = args.copy()
392 vargs = args.copy()
389 def one(v, tag=name):
393 def one(v, tag=name):
390 try:
394 try:
391 vargs.update(v)
395 vargs.update(v)
392 except (AttributeError, ValueError):
396 except (AttributeError, ValueError):
393 try:
397 try:
394 for a, b in v:
398 for a, b in v:
395 vargs[a] = b
399 vargs[a] = b
396 except ValueError:
400 except ValueError:
397 vargs[name] = v
401 vargs[name] = v
398 return self.t(tag, **vargs)
402 return self.t(tag, **vargs)
399 lastname = 'last_' + name
403 lastname = 'last_' + name
400 if lastname in self.t:
404 if lastname in self.t:
401 last = values.pop()
405 last = values.pop()
402 else:
406 else:
403 last = None
407 last = None
404 for v in values:
408 for v in values:
405 yield one(v)
409 yield one(v)
406 if last is not None:
410 if last is not None:
407 yield one(last, tag=lastname)
411 yield one(last, tag=lastname)
408 endname = 'end_' + names
412 endname = 'end_' + names
409 if endname in self.t:
413 if endname in self.t:
410 yield self.t(endname, **args)
414 yield self.t(endname, **args)
411
415
412 if brinfo:
416 if brinfo:
413 def showbranches(**args):
417 def showbranches(**args):
414 if changenode in brinfo:
418 if changenode in brinfo:
415 for x in showlist('branch', brinfo[changenode],
419 for x in showlist('branch', brinfo[changenode],
416 plural='branches', **args):
420 plural='branches', **args):
417 yield x
421 yield x
418 else:
422 else:
419 showbranches = ''
423 showbranches = ''
420
424
421 if self.ui.debugflag:
425 if self.ui.debugflag:
422 def showmanifest(**args):
426 def showmanifest(**args):
423 args = args.copy()
427 args = args.copy()
424 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
428 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
425 node=hex(changes[0])))
429 node=hex(changes[0])))
426 yield self.t('manifest', **args)
430 yield self.t('manifest', **args)
427 else:
431 else:
428 showmanifest = ''
432 showmanifest = ''
429
433
430 def showparents(**args):
434 def showparents(**args):
431 parents = [[('rev', log.rev(p)), ('node', hex(p))]
435 parents = [[('rev', log.rev(p)), ('node', hex(p))]
432 for p in log.parents(changenode)
436 for p in log.parents(changenode)
433 if self.ui.debugflag or p != nullid]
437 if self.ui.debugflag or p != nullid]
434 if (not self.ui.debugflag and len(parents) == 1 and
438 if (not self.ui.debugflag and len(parents) == 1 and
435 parents[0][0][1] == rev - 1):
439 parents[0][0][1] == rev - 1):
436 return
440 return
437 for x in showlist('parent', parents, **args):
441 for x in showlist('parent', parents, **args):
438 yield x
442 yield x
439
443
440 def showtags(**args):
444 def showtags(**args):
441 for x in showlist('tag', self.repo.nodetags(changenode), **args):
445 for x in showlist('tag', self.repo.nodetags(changenode), **args):
442 yield x
446 yield x
443
447
444 if self.ui.debugflag:
448 if self.ui.debugflag:
445 files = self.repo.changes(log.parents(changenode)[0], changenode)
449 files = self.repo.changes(log.parents(changenode)[0], changenode)
446 def showfiles(**args):
450 def showfiles(**args):
447 for x in showlist('file', files[0], **args): yield x
451 for x in showlist('file', files[0], **args): yield x
448 def showadds(**args):
452 def showadds(**args):
449 for x in showlist('file_add', files[1], **args): yield x
453 for x in showlist('file_add', files[1], **args): yield x
450 def showdels(**args):
454 def showdels(**args):
451 for x in showlist('file_del', files[2], **args): yield x
455 for x in showlist('file_del', files[2], **args): yield x
452 else:
456 else:
453 def showfiles(**args):
457 def showfiles(**args):
454 for x in showlist('file', changes[3], **args): yield x
458 for x in showlist('file', changes[3], **args): yield x
455 showadds = ''
459 showadds = ''
456 showdels = ''
460 showdels = ''
457
461
458 defprops = {
462 defprops = {
459 'author': changes[1],
463 'author': changes[1],
460 'branches': showbranches,
464 'branches': showbranches,
461 'date': changes[2],
465 'date': changes[2],
462 'desc': changes[4],
466 'desc': changes[4],
463 'file_adds': showadds,
467 'file_adds': showadds,
464 'file_dels': showdels,
468 'file_dels': showdels,
465 'files': showfiles,
469 'files': showfiles,
466 'manifest': showmanifest,
470 'manifest': showmanifest,
467 'node': hex(changenode),
471 'node': hex(changenode),
468 'parents': showparents,
472 'parents': showparents,
469 'rev': rev,
473 'rev': rev,
470 'tags': showtags,
474 'tags': showtags,
471 }
475 }
472 props = props.copy()
476 props = props.copy()
473 props.update(defprops)
477 props.update(defprops)
474
478
475 try:
479 try:
476 if self.ui.debugflag and 'header_debug' in self.t:
480 if self.ui.debugflag and 'header_debug' in self.t:
477 key = 'header_debug'
481 key = 'header_debug'
478 elif self.ui.quiet and 'header_quiet' in self.t:
482 elif self.ui.quiet and 'header_quiet' in self.t:
479 key = 'header_quiet'
483 key = 'header_quiet'
480 elif self.ui.verbose and 'header_verbose' in self.t:
484 elif self.ui.verbose and 'header_verbose' in self.t:
481 key = 'header_verbose'
485 key = 'header_verbose'
482 elif 'header' in self.t:
486 elif 'header' in self.t:
483 key = 'header'
487 key = 'header'
484 else:
488 else:
485 key = ''
489 key = ''
486 if key:
490 if key:
487 self.write_header(self.t(key, **props))
491 self.write_header(self.t(key, **props))
488 if self.ui.debugflag and 'changeset_debug' in self.t:
492 if self.ui.debugflag and 'changeset_debug' in self.t:
489 key = 'changeset_debug'
493 key = 'changeset_debug'
490 elif self.ui.quiet and 'changeset_quiet' in self.t:
494 elif self.ui.quiet and 'changeset_quiet' in self.t:
491 key = 'changeset_quiet'
495 key = 'changeset_quiet'
492 elif self.ui.verbose and 'changeset_verbose' in self.t:
496 elif self.ui.verbose and 'changeset_verbose' in self.t:
493 key = 'changeset_verbose'
497 key = 'changeset_verbose'
494 else:
498 else:
495 key = 'changeset'
499 key = 'changeset'
496 self.write(self.t(key, **props))
500 self.write(self.t(key, **props))
497 except KeyError, inst:
501 except KeyError, inst:
498 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
502 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
499 inst.args[0]))
503 inst.args[0]))
500 except SyntaxError, inst:
504 except SyntaxError, inst:
501 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
505 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
502
506
503 class stringio(object):
507 class stringio(object):
504 '''wrap cStringIO for use by changeset_templater.'''
508 '''wrap cStringIO for use by changeset_templater.'''
505 def __init__(self):
509 def __init__(self):
506 self.fp = cStringIO.StringIO()
510 self.fp = cStringIO.StringIO()
507
511
508 def write(self, *args):
512 def write(self, *args):
509 for a in args:
513 for a in args:
510 self.fp.write(a)
514 self.fp.write(a)
511
515
512 write_header = write
516 write_header = write
513
517
514 def __getattr__(self, key):
518 def __getattr__(self, key):
515 return getattr(self.fp, key)
519 return getattr(self.fp, key)
@@ -1,106 +1,107 b''
1 # transaction.py - simple journalling scheme for mercurial
1 # transaction.py - simple journalling scheme for mercurial
2 #
2 #
3 # This transaction scheme is intended to gracefully handle program
3 # This transaction scheme is intended to gracefully handle program
4 # errors and interruptions. More serious failures like system crashes
4 # errors and interruptions. More serious failures like system crashes
5 # can be recovered with an fsck-like tool. As the whole repository is
5 # can be recovered with an fsck-like tool. As the whole repository is
6 # effectively log-structured, this should amount to simply truncating
6 # effectively log-structured, this should amount to simply truncating
7 # anything that isn't referenced in the changelog.
7 # anything that isn't referenced in the changelog.
8 #
8 #
9 # Copyright 2005 Matt Mackall <mpm@selenic.com>
9 # Copyright 2005 Matt Mackall <mpm@selenic.com>
10 #
10 #
11 # This software may be used and distributed according to the terms
11 # This software may be used and distributed according to the terms
12 # of the GNU General Public License, incorporated herein by reference.
12 # of the GNU General Public License, incorporated herein by reference.
13
13
14 import os
14 from demandload import demandload
15 from i18n import gettext as _
15 from i18n import gettext as _
16 demandload(globals(), 'os')
16
17
17 class transaction(object):
18 class transaction(object):
18 def __init__(self, report, opener, journal, after=None):
19 def __init__(self, report, opener, journal, after=None):
19 self.journal = None
20 self.journal = None
20
21
21 # abort here if the journal already exists
22 # abort here if the journal already exists
22 if os.path.exists(journal):
23 if os.path.exists(journal):
23 raise AssertionError(_("journal already exists - run hg recover"))
24 raise AssertionError(_("journal already exists - run hg recover"))
24
25
25 self.count = 1
26 self.count = 1
26 self.report = report
27 self.report = report
27 self.opener = opener
28 self.opener = opener
28 self.after = after
29 self.after = after
29 self.entries = []
30 self.entries = []
30 self.map = {}
31 self.map = {}
31 self.journal = journal
32 self.journal = journal
32
33
33 self.file = open(self.journal, "w")
34 self.file = open(self.journal, "w")
34
35
35 def __del__(self):
36 def __del__(self):
36 if self.journal:
37 if self.journal:
37 if self.entries: self.abort()
38 if self.entries: self.abort()
38 self.file.close()
39 self.file.close()
39 try: os.unlink(self.journal)
40 try: os.unlink(self.journal)
40 except: pass
41 except: pass
41
42
42 def add(self, file, offset, data=None):
43 def add(self, file, offset, data=None):
43 if file in self.map: return
44 if file in self.map: return
44 self.entries.append((file, offset, data))
45 self.entries.append((file, offset, data))
45 self.map[file] = len(self.entries) - 1
46 self.map[file] = len(self.entries) - 1
46 # add enough data to the journal to do the truncate
47 # add enough data to the journal to do the truncate
47 self.file.write("%s\0%d\n" % (file, offset))
48 self.file.write("%s\0%d\n" % (file, offset))
48 self.file.flush()
49 self.file.flush()
49
50
50 def find(self, file):
51 def find(self, file):
51 if file in self.map:
52 if file in self.map:
52 return self.entries[self.map[file]]
53 return self.entries[self.map[file]]
53 return None
54 return None
54
55
55 def replace(self, file, offset, data=None):
56 def replace(self, file, offset, data=None):
56 if file not in self.map:
57 if file not in self.map:
57 raise KeyError(file)
58 raise KeyError(file)
58 index = self.map[file]
59 index = self.map[file]
59 self.entries[index] = (file, offset, data)
60 self.entries[index] = (file, offset, data)
60 self.file.write("%s\0%d\n" % (file, offset))
61 self.file.write("%s\0%d\n" % (file, offset))
61 self.file.flush()
62 self.file.flush()
62
63
63 def nest(self):
64 def nest(self):
64 self.count += 1
65 self.count += 1
65 return self
66 return self
66
67
67 def running(self):
68 def running(self):
68 return self.count > 0
69 return self.count > 0
69
70
70 def close(self):
71 def close(self):
71 self.count -= 1
72 self.count -= 1
72 if self.count != 0:
73 if self.count != 0:
73 return
74 return
74 self.file.close()
75 self.file.close()
75 self.entries = []
76 self.entries = []
76 if self.after:
77 if self.after:
77 self.after()
78 self.after()
78 else:
79 else:
79 os.unlink(self.journal)
80 os.unlink(self.journal)
80 self.journal = None
81 self.journal = None
81
82
82 def abort(self):
83 def abort(self):
83 if not self.entries: return
84 if not self.entries: return
84
85
85 self.report(_("transaction abort!\n"))
86 self.report(_("transaction abort!\n"))
86
87
87 for f, o, ignore in self.entries:
88 for f, o, ignore in self.entries:
88 try:
89 try:
89 self.opener(f, "a").truncate(o)
90 self.opener(f, "a").truncate(o)
90 except:
91 except:
91 self.report(_("failed to truncate %s\n") % f)
92 self.report(_("failed to truncate %s\n") % f)
92
93
93 self.entries = []
94 self.entries = []
94
95
95 self.report(_("rollback completed\n"))
96 self.report(_("rollback completed\n"))
96
97
97 def rollback(opener, file):
98 def rollback(opener, file):
98 files = {}
99 files = {}
99 for l in open(file).readlines():
100 for l in open(file).readlines():
100 f, o = l.split('\0')
101 f, o = l.split('\0')
101 files[f] = o
102 files[f] = o
102 for f in files:
103 for f in files:
103 o = files[f]
104 o = files[f]
104 opener(f, "a").truncate(int(o))
105 opener(f, "a").truncate(int(o))
105 os.unlink(file)
106 os.unlink(file)
106
107
@@ -1,344 +1,343 b''
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import ConfigParser
9 from i18n import gettext as _
8 from i18n import gettext as _
10 from demandload import *
9 from demandload import *
11 demandload(globals(), "errno getpass os re smtplib socket sys tempfile")
10 demandload(globals(), "errno getpass os re smtplib socket sys tempfile")
12 demandload(globals(), "templater traceback util")
11 demandload(globals(), "ConfigParser templater traceback util")
13
12
14 class ui(object):
13 class ui(object):
15 def __init__(self, verbose=False, debug=False, quiet=False,
14 def __init__(self, verbose=False, debug=False, quiet=False,
16 interactive=True, traceback=False, parentui=None):
15 interactive=True, traceback=False, parentui=None):
17 self.overlay = {}
16 self.overlay = {}
18 if parentui is None:
17 if parentui is None:
19 # this is the parent of all ui children
18 # this is the parent of all ui children
20 self.parentui = None
19 self.parentui = None
21 self.cdata = ConfigParser.SafeConfigParser()
20 self.cdata = ConfigParser.SafeConfigParser()
22 self.readconfig(util.rcpath())
21 self.readconfig(util.rcpath())
23
22
24 self.quiet = self.configbool("ui", "quiet")
23 self.quiet = self.configbool("ui", "quiet")
25 self.verbose = self.configbool("ui", "verbose")
24 self.verbose = self.configbool("ui", "verbose")
26 self.debugflag = self.configbool("ui", "debug")
25 self.debugflag = self.configbool("ui", "debug")
27 self.interactive = self.configbool("ui", "interactive", True)
26 self.interactive = self.configbool("ui", "interactive", True)
28 self.traceback = traceback
27 self.traceback = traceback
29
28
30 self.updateopts(verbose, debug, quiet, interactive)
29 self.updateopts(verbose, debug, quiet, interactive)
31 self.diffcache = None
30 self.diffcache = None
32 self.header = []
31 self.header = []
33 self.prev_header = []
32 self.prev_header = []
34 self.revlogopts = self.configrevlog()
33 self.revlogopts = self.configrevlog()
35 else:
34 else:
36 # parentui may point to an ui object which is already a child
35 # parentui may point to an ui object which is already a child
37 self.parentui = parentui.parentui or parentui
36 self.parentui = parentui.parentui or parentui
38 parent_cdata = self.parentui.cdata
37 parent_cdata = self.parentui.cdata
39 self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
38 self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
40 # make interpolation work
39 # make interpolation work
41 for section in parent_cdata.sections():
40 for section in parent_cdata.sections():
42 self.cdata.add_section(section)
41 self.cdata.add_section(section)
43 for name, value in parent_cdata.items(section, raw=True):
42 for name, value in parent_cdata.items(section, raw=True):
44 self.cdata.set(section, name, value)
43 self.cdata.set(section, name, value)
45
44
46 def __getattr__(self, key):
45 def __getattr__(self, key):
47 return getattr(self.parentui, key)
46 return getattr(self.parentui, key)
48
47
49 def updateopts(self, verbose=False, debug=False, quiet=False,
48 def updateopts(self, verbose=False, debug=False, quiet=False,
50 interactive=True, traceback=False, config=[]):
49 interactive=True, traceback=False, config=[]):
51 self.quiet = (self.quiet or quiet) and not verbose and not debug
50 self.quiet = (self.quiet or quiet) and not verbose and not debug
52 self.verbose = (self.verbose or verbose) or debug
51 self.verbose = (self.verbose or verbose) or debug
53 self.debugflag = (self.debugflag or debug)
52 self.debugflag = (self.debugflag or debug)
54 self.interactive = (self.interactive and interactive)
53 self.interactive = (self.interactive and interactive)
55 self.traceback = self.traceback or traceback
54 self.traceback = self.traceback or traceback
56 for cfg in config:
55 for cfg in config:
57 try:
56 try:
58 name, value = cfg.split('=', 1)
57 name, value = cfg.split('=', 1)
59 section, name = name.split('.', 1)
58 section, name = name.split('.', 1)
60 if not self.cdata.has_section(section):
59 if not self.cdata.has_section(section):
61 self.cdata.add_section(section)
60 self.cdata.add_section(section)
62 if not section or not name:
61 if not section or not name:
63 raise IndexError
62 raise IndexError
64 self.cdata.set(section, name, value)
63 self.cdata.set(section, name, value)
65 except (IndexError, ValueError):
64 except (IndexError, ValueError):
66 raise util.Abort(_('malformed --config option: %s') % cfg)
65 raise util.Abort(_('malformed --config option: %s') % cfg)
67
66
68 def readconfig(self, fn, root=None):
67 def readconfig(self, fn, root=None):
69 if isinstance(fn, basestring):
68 if isinstance(fn, basestring):
70 fn = [fn]
69 fn = [fn]
71 for f in fn:
70 for f in fn:
72 try:
71 try:
73 self.cdata.read(f)
72 self.cdata.read(f)
74 except ConfigParser.ParsingError, inst:
73 except ConfigParser.ParsingError, inst:
75 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
74 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
76 # translate paths relative to root (or home) into absolute paths
75 # translate paths relative to root (or home) into absolute paths
77 if root is None:
76 if root is None:
78 root = os.path.expanduser('~')
77 root = os.path.expanduser('~')
79 for name, path in self.configitems("paths"):
78 for name, path in self.configitems("paths"):
80 if path and path.find("://") == -1 and not os.path.isabs(path):
79 if path and path.find("://") == -1 and not os.path.isabs(path):
81 self.cdata.set("paths", name, os.path.join(root, path))
80 self.cdata.set("paths", name, os.path.join(root, path))
82
81
83 def setconfig(self, section, name, val):
82 def setconfig(self, section, name, val):
84 self.overlay[(section, name)] = val
83 self.overlay[(section, name)] = val
85
84
86 def config(self, section, name, default=None):
85 def config(self, section, name, default=None):
87 if self.overlay.has_key((section, name)):
86 if self.overlay.has_key((section, name)):
88 return self.overlay[(section, name)]
87 return self.overlay[(section, name)]
89 if self.cdata.has_option(section, name):
88 if self.cdata.has_option(section, name):
90 try:
89 try:
91 return self.cdata.get(section, name)
90 return self.cdata.get(section, name)
92 except ConfigParser.InterpolationError, inst:
91 except ConfigParser.InterpolationError, inst:
93 raise util.Abort(_("Error in configuration:\n%s") % inst)
92 raise util.Abort(_("Error in configuration:\n%s") % inst)
94 if self.parentui is None:
93 if self.parentui is None:
95 return default
94 return default
96 else:
95 else:
97 return self.parentui.config(section, name, default)
96 return self.parentui.config(section, name, default)
98
97
99 def configbool(self, section, name, default=False):
98 def configbool(self, section, name, default=False):
100 if self.overlay.has_key((section, name)):
99 if self.overlay.has_key((section, name)):
101 return self.overlay[(section, name)]
100 return self.overlay[(section, name)]
102 if self.cdata.has_option(section, name):
101 if self.cdata.has_option(section, name):
103 try:
102 try:
104 return self.cdata.getboolean(section, name)
103 return self.cdata.getboolean(section, name)
105 except ConfigParser.InterpolationError, inst:
104 except ConfigParser.InterpolationError, inst:
106 raise util.Abort(_("Error in configuration:\n%s") % inst)
105 raise util.Abort(_("Error in configuration:\n%s") % inst)
107 if self.parentui is None:
106 if self.parentui is None:
108 return default
107 return default
109 else:
108 else:
110 return self.parentui.configbool(section, name, default)
109 return self.parentui.configbool(section, name, default)
111
110
112 def has_config(self, section):
111 def has_config(self, section):
113 '''tell whether section exists in config.'''
112 '''tell whether section exists in config.'''
114 return self.cdata.has_section(section)
113 return self.cdata.has_section(section)
115
114
116 def configitems(self, section):
115 def configitems(self, section):
117 items = {}
116 items = {}
118 if self.parentui is not None:
117 if self.parentui is not None:
119 items = dict(self.parentui.configitems(section))
118 items = dict(self.parentui.configitems(section))
120 if self.cdata.has_section(section):
119 if self.cdata.has_section(section):
121 try:
120 try:
122 items.update(dict(self.cdata.items(section)))
121 items.update(dict(self.cdata.items(section)))
123 except ConfigParser.InterpolationError, inst:
122 except ConfigParser.InterpolationError, inst:
124 raise util.Abort(_("Error in configuration:\n%s") % inst)
123 raise util.Abort(_("Error in configuration:\n%s") % inst)
125 x = items.items()
124 x = items.items()
126 x.sort()
125 x.sort()
127 return x
126 return x
128
127
129 def walkconfig(self, seen=None):
128 def walkconfig(self, seen=None):
130 if seen is None:
129 if seen is None:
131 seen = {}
130 seen = {}
132 for (section, name), value in self.overlay.iteritems():
131 for (section, name), value in self.overlay.iteritems():
133 yield section, name, value
132 yield section, name, value
134 seen[section, name] = 1
133 seen[section, name] = 1
135 for section in self.cdata.sections():
134 for section in self.cdata.sections():
136 for name, value in self.cdata.items(section):
135 for name, value in self.cdata.items(section):
137 if (section, name) in seen: continue
136 if (section, name) in seen: continue
138 yield section, name, value.replace('\n', '\\n')
137 yield section, name, value.replace('\n', '\\n')
139 seen[section, name] = 1
138 seen[section, name] = 1
140 if self.parentui is not None:
139 if self.parentui is not None:
141 for parent in self.parentui.walkconfig(seen):
140 for parent in self.parentui.walkconfig(seen):
142 yield parent
141 yield parent
143
142
144 def extensions(self):
143 def extensions(self):
145 result = self.configitems("extensions")
144 result = self.configitems("extensions")
146 for i, (key, value) in enumerate(result):
145 for i, (key, value) in enumerate(result):
147 if value:
146 if value:
148 result[i] = (key, os.path.expanduser(value))
147 result[i] = (key, os.path.expanduser(value))
149 return result
148 return result
150
149
151 def hgignorefiles(self):
150 def hgignorefiles(self):
152 result = []
151 result = []
153 for key, value in self.configitems("ui"):
152 for key, value in self.configitems("ui"):
154 if key == 'ignore' or key.startswith('ignore.'):
153 if key == 'ignore' or key.startswith('ignore.'):
155 result.append(os.path.expanduser(value))
154 result.append(os.path.expanduser(value))
156 return result
155 return result
157
156
158 def configrevlog(self):
157 def configrevlog(self):
159 result = {}
158 result = {}
160 for key, value in self.configitems("revlog"):
159 for key, value in self.configitems("revlog"):
161 result[key.lower()] = value
160 result[key.lower()] = value
162 return result
161 return result
163
162
164 def diffopts(self):
163 def diffopts(self):
165 if self.diffcache:
164 if self.diffcache:
166 return self.diffcache
165 return self.diffcache
167 result = {'showfunc': True, 'ignorews': False}
166 result = {'showfunc': True, 'ignorews': False}
168 for key, value in self.configitems("diff"):
167 for key, value in self.configitems("diff"):
169 if value:
168 if value:
170 result[key.lower()] = (value.lower() == 'true')
169 result[key.lower()] = (value.lower() == 'true')
171 self.diffcache = result
170 self.diffcache = result
172 return result
171 return result
173
172
174 def username(self):
173 def username(self):
175 """Return default username to be used in commits.
174 """Return default username to be used in commits.
176
175
177 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
176 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
178 and stop searching if one of these is set.
177 and stop searching if one of these is set.
179 Abort if found username is an empty string to force specifying
178 Abort if found username is an empty string to force specifying
180 the commit user elsewhere, e.g. with line option or repo hgrc.
179 the commit user elsewhere, e.g. with line option or repo hgrc.
181 If not found, use ($LOGNAME or $USER or $LNAME or
180 If not found, use ($LOGNAME or $USER or $LNAME or
182 $USERNAME) +"@full.hostname".
181 $USERNAME) +"@full.hostname".
183 """
182 """
184 user = os.environ.get("HGUSER")
183 user = os.environ.get("HGUSER")
185 if user is None:
184 if user is None:
186 user = self.config("ui", "username")
185 user = self.config("ui", "username")
187 if user is None:
186 if user is None:
188 user = os.environ.get("EMAIL")
187 user = os.environ.get("EMAIL")
189 if user is None:
188 if user is None:
190 try:
189 try:
191 user = '%s@%s' % (getpass.getuser(), socket.getfqdn())
190 user = '%s@%s' % (getpass.getuser(), socket.getfqdn())
192 except KeyError:
191 except KeyError:
193 raise util.Abort(_("Please specify a username."))
192 raise util.Abort(_("Please specify a username."))
194 return user
193 return user
195
194
196 def shortuser(self, user):
195 def shortuser(self, user):
197 """Return a short representation of a user name or email address."""
196 """Return a short representation of a user name or email address."""
198 if not self.verbose: user = util.shortuser(user)
197 if not self.verbose: user = util.shortuser(user)
199 return user
198 return user
200
199
201 def expandpath(self, loc):
200 def expandpath(self, loc):
202 """Return repository location relative to cwd or from [paths]"""
201 """Return repository location relative to cwd or from [paths]"""
203 if loc.find("://") != -1 or os.path.exists(loc):
202 if loc.find("://") != -1 or os.path.exists(loc):
204 return loc
203 return loc
205
204
206 return self.config("paths", loc, loc)
205 return self.config("paths", loc, loc)
207
206
208 def write(self, *args):
207 def write(self, *args):
209 if self.header:
208 if self.header:
210 if self.header != self.prev_header:
209 if self.header != self.prev_header:
211 self.prev_header = self.header
210 self.prev_header = self.header
212 self.write(*self.header)
211 self.write(*self.header)
213 self.header = []
212 self.header = []
214 for a in args:
213 for a in args:
215 sys.stdout.write(str(a))
214 sys.stdout.write(str(a))
216
215
217 def write_header(self, *args):
216 def write_header(self, *args):
218 for a in args:
217 for a in args:
219 self.header.append(str(a))
218 self.header.append(str(a))
220
219
221 def write_err(self, *args):
220 def write_err(self, *args):
222 try:
221 try:
223 if not sys.stdout.closed: sys.stdout.flush()
222 if not sys.stdout.closed: sys.stdout.flush()
224 for a in args:
223 for a in args:
225 sys.stderr.write(str(a))
224 sys.stderr.write(str(a))
226 except IOError, inst:
225 except IOError, inst:
227 if inst.errno != errno.EPIPE:
226 if inst.errno != errno.EPIPE:
228 raise
227 raise
229
228
230 def flush(self):
229 def flush(self):
231 try: sys.stdout.flush()
230 try: sys.stdout.flush()
232 except: pass
231 except: pass
233 try: sys.stderr.flush()
232 try: sys.stderr.flush()
234 except: pass
233 except: pass
235
234
236 def readline(self):
235 def readline(self):
237 return sys.stdin.readline()[:-1]
236 return sys.stdin.readline()[:-1]
238 def prompt(self, msg, pat=None, default="y"):
237 def prompt(self, msg, pat=None, default="y"):
239 if not self.interactive: return default
238 if not self.interactive: return default
240 while 1:
239 while 1:
241 self.write(msg, " ")
240 self.write(msg, " ")
242 r = self.readline()
241 r = self.readline()
243 if not pat or re.match(pat, r):
242 if not pat or re.match(pat, r):
244 return r
243 return r
245 else:
244 else:
246 self.write(_("unrecognized response\n"))
245 self.write(_("unrecognized response\n"))
247 def getpass(self, prompt=None, default=None):
246 def getpass(self, prompt=None, default=None):
248 if not self.interactive: return default
247 if not self.interactive: return default
249 return getpass.getpass(prompt or _('password: '))
248 return getpass.getpass(prompt or _('password: '))
250 def status(self, *msg):
249 def status(self, *msg):
251 if not self.quiet: self.write(*msg)
250 if not self.quiet: self.write(*msg)
252 def warn(self, *msg):
251 def warn(self, *msg):
253 self.write_err(*msg)
252 self.write_err(*msg)
254 def note(self, *msg):
253 def note(self, *msg):
255 if self.verbose: self.write(*msg)
254 if self.verbose: self.write(*msg)
256 def debug(self, *msg):
255 def debug(self, *msg):
257 if self.debugflag: self.write(*msg)
256 if self.debugflag: self.write(*msg)
258 def edit(self, text, user):
257 def edit(self, text, user):
259 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
258 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
260 text=True)
259 text=True)
261 try:
260 try:
262 f = os.fdopen(fd, "w")
261 f = os.fdopen(fd, "w")
263 f.write(text)
262 f.write(text)
264 f.close()
263 f.close()
265
264
266 editor = (os.environ.get("HGEDITOR") or
265 editor = (os.environ.get("HGEDITOR") or
267 self.config("ui", "editor") or
266 self.config("ui", "editor") or
268 os.environ.get("EDITOR", "vi"))
267 os.environ.get("EDITOR", "vi"))
269
268
270 util.system("%s \"%s\"" % (editor, name),
269 util.system("%s \"%s\"" % (editor, name),
271 environ={'HGUSER': user},
270 environ={'HGUSER': user},
272 onerr=util.Abort, errprefix=_("edit failed"))
271 onerr=util.Abort, errprefix=_("edit failed"))
273
272
274 f = open(name)
273 f = open(name)
275 t = f.read()
274 t = f.read()
276 f.close()
275 f.close()
277 t = re.sub("(?m)^HG:.*\n", "", t)
276 t = re.sub("(?m)^HG:.*\n", "", t)
278 finally:
277 finally:
279 os.unlink(name)
278 os.unlink(name)
280
279
281 return t
280 return t
282
281
283 def sendmail(self):
282 def sendmail(self):
284 '''send mail message. object returned has one method, sendmail.
283 '''send mail message. object returned has one method, sendmail.
285 call as sendmail(sender, list-of-recipients, msg).'''
284 call as sendmail(sender, list-of-recipients, msg).'''
286
285
287 def smtp():
286 def smtp():
288 '''send mail using smtp.'''
287 '''send mail using smtp.'''
289
288
290 s = smtplib.SMTP()
289 s = smtplib.SMTP()
291 mailhost = self.config('smtp', 'host')
290 mailhost = self.config('smtp', 'host')
292 if not mailhost:
291 if not mailhost:
293 raise util.Abort(_('no [smtp]host in hgrc - cannot send mail'))
292 raise util.Abort(_('no [smtp]host in hgrc - cannot send mail'))
294 mailport = int(self.config('smtp', 'port', 25))
293 mailport = int(self.config('smtp', 'port', 25))
295 self.note(_('sending mail: smtp host %s, port %s\n') %
294 self.note(_('sending mail: smtp host %s, port %s\n') %
296 (mailhost, mailport))
295 (mailhost, mailport))
297 s.connect(host=mailhost, port=mailport)
296 s.connect(host=mailhost, port=mailport)
298 if self.configbool('smtp', 'tls'):
297 if self.configbool('smtp', 'tls'):
299 self.note(_('(using tls)\n'))
298 self.note(_('(using tls)\n'))
300 s.ehlo()
299 s.ehlo()
301 s.starttls()
300 s.starttls()
302 s.ehlo()
301 s.ehlo()
303 username = self.config('smtp', 'username')
302 username = self.config('smtp', 'username')
304 password = self.config('smtp', 'password')
303 password = self.config('smtp', 'password')
305 if username and password:
304 if username and password:
306 self.note(_('(authenticating to mail server as %s)\n') %
305 self.note(_('(authenticating to mail server as %s)\n') %
307 (username))
306 (username))
308 s.login(username, password)
307 s.login(username, password)
309 return s
308 return s
310
309
311 class sendmail(object):
310 class sendmail(object):
312 '''send mail using sendmail.'''
311 '''send mail using sendmail.'''
313
312
314 def __init__(self, ui, program):
313 def __init__(self, ui, program):
315 self.ui = ui
314 self.ui = ui
316 self.program = program
315 self.program = program
317
316
318 def sendmail(self, sender, recipients, msg):
317 def sendmail(self, sender, recipients, msg):
319 cmdline = '%s -f %s %s' % (
318 cmdline = '%s -f %s %s' % (
320 self.program, templater.email(sender),
319 self.program, templater.email(sender),
321 ' '.join(map(templater.email, recipients)))
320 ' '.join(map(templater.email, recipients)))
322 self.ui.note(_('sending mail: %s\n') % cmdline)
321 self.ui.note(_('sending mail: %s\n') % cmdline)
323 fp = os.popen(cmdline, 'w')
322 fp = os.popen(cmdline, 'w')
324 fp.write(msg)
323 fp.write(msg)
325 ret = fp.close()
324 ret = fp.close()
326 if ret:
325 if ret:
327 raise util.Abort('%s %s' % (
326 raise util.Abort('%s %s' % (
328 os.path.basename(self.program.split(None, 1)[0]),
327 os.path.basename(self.program.split(None, 1)[0]),
329 util.explain_exit(ret)[0]))
328 util.explain_exit(ret)[0]))
330
329
331 method = self.config('email', 'method', 'smtp')
330 method = self.config('email', 'method', 'smtp')
332 if method == 'smtp':
331 if method == 'smtp':
333 mail = smtp()
332 mail = smtp()
334 else:
333 else:
335 mail = sendmail(self, method)
334 mail = sendmail(self, method)
336 return mail
335 return mail
337
336
338 def print_exc(self):
337 def print_exc(self):
339 '''print exception traceback if traceback printing enabled.
338 '''print exception traceback if traceback printing enabled.
340 only to call in exception handler. returns true if traceback
339 only to call in exception handler. returns true if traceback
341 printed.'''
340 printed.'''
342 if self.traceback:
341 if self.traceback:
343 traceback.print_exc()
342 traceback.print_exc()
344 return self.traceback
343 return self.traceback
@@ -1,906 +1,905 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8
8
9 This contains helper routines that are independent of the SCM core and hide
9 This contains helper routines that are independent of the SCM core and hide
10 platform-specific details from the core.
10 platform-specific details from the core.
11 """
11 """
12
12
13 import os, errno
14 from i18n import gettext as _
13 from i18n import gettext as _
15 from demandload import *
14 from demandload import *
16 demandload(globals(), "cStringIO errno popen2 re shutil sys tempfile")
15 demandload(globals(), "cStringIO errno popen2 re shutil sys tempfile")
17 demandload(globals(), "threading time")
16 demandload(globals(), "os threading time")
18
17
19 class SignalInterrupt(Exception):
18 class SignalInterrupt(Exception):
20 """Exception raised on SIGTERM and SIGHUP."""
19 """Exception raised on SIGTERM and SIGHUP."""
21
20
22 def pipefilter(s, cmd):
21 def pipefilter(s, cmd):
23 '''filter string S through command CMD, returning its output'''
22 '''filter string S through command CMD, returning its output'''
24 (pout, pin) = popen2.popen2(cmd, -1, 'b')
23 (pout, pin) = popen2.popen2(cmd, -1, 'b')
25 def writer():
24 def writer():
26 try:
25 try:
27 pin.write(s)
26 pin.write(s)
28 pin.close()
27 pin.close()
29 except IOError, inst:
28 except IOError, inst:
30 if inst.errno != errno.EPIPE:
29 if inst.errno != errno.EPIPE:
31 raise
30 raise
32
31
33 # we should use select instead on UNIX, but this will work on most
32 # we should use select instead on UNIX, but this will work on most
34 # systems, including Windows
33 # systems, including Windows
35 w = threading.Thread(target=writer)
34 w = threading.Thread(target=writer)
36 w.start()
35 w.start()
37 f = pout.read()
36 f = pout.read()
38 pout.close()
37 pout.close()
39 w.join()
38 w.join()
40 return f
39 return f
41
40
42 def tempfilter(s, cmd):
41 def tempfilter(s, cmd):
43 '''filter string S through a pair of temporary files with CMD.
42 '''filter string S through a pair of temporary files with CMD.
44 CMD is used as a template to create the real command to be run,
43 CMD is used as a template to create the real command to be run,
45 with the strings INFILE and OUTFILE replaced by the real names of
44 with the strings INFILE and OUTFILE replaced by the real names of
46 the temporary files generated.'''
45 the temporary files generated.'''
47 inname, outname = None, None
46 inname, outname = None, None
48 try:
47 try:
49 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
48 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
50 fp = os.fdopen(infd, 'wb')
49 fp = os.fdopen(infd, 'wb')
51 fp.write(s)
50 fp.write(s)
52 fp.close()
51 fp.close()
53 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
52 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
54 os.close(outfd)
53 os.close(outfd)
55 cmd = cmd.replace('INFILE', inname)
54 cmd = cmd.replace('INFILE', inname)
56 cmd = cmd.replace('OUTFILE', outname)
55 cmd = cmd.replace('OUTFILE', outname)
57 code = os.system(cmd)
56 code = os.system(cmd)
58 if code: raise Abort(_("command '%s' failed: %s") %
57 if code: raise Abort(_("command '%s' failed: %s") %
59 (cmd, explain_exit(code)))
58 (cmd, explain_exit(code)))
60 return open(outname, 'rb').read()
59 return open(outname, 'rb').read()
61 finally:
60 finally:
62 try:
61 try:
63 if inname: os.unlink(inname)
62 if inname: os.unlink(inname)
64 except: pass
63 except: pass
65 try:
64 try:
66 if outname: os.unlink(outname)
65 if outname: os.unlink(outname)
67 except: pass
66 except: pass
68
67
69 filtertable = {
68 filtertable = {
70 'tempfile:': tempfilter,
69 'tempfile:': tempfilter,
71 'pipe:': pipefilter,
70 'pipe:': pipefilter,
72 }
71 }
73
72
74 def filter(s, cmd):
73 def filter(s, cmd):
75 "filter a string through a command that transforms its input to its output"
74 "filter a string through a command that transforms its input to its output"
76 for name, fn in filtertable.iteritems():
75 for name, fn in filtertable.iteritems():
77 if cmd.startswith(name):
76 if cmd.startswith(name):
78 return fn(s, cmd[len(name):].lstrip())
77 return fn(s, cmd[len(name):].lstrip())
79 return pipefilter(s, cmd)
78 return pipefilter(s, cmd)
80
79
81 def find_in_path(name, path, default=None):
80 def find_in_path(name, path, default=None):
82 '''find name in search path. path can be string (will be split
81 '''find name in search path. path can be string (will be split
83 with os.pathsep), or iterable thing that returns strings. if name
82 with os.pathsep), or iterable thing that returns strings. if name
84 found, return path to name. else return default.'''
83 found, return path to name. else return default.'''
85 if isinstance(path, str):
84 if isinstance(path, str):
86 path = path.split(os.pathsep)
85 path = path.split(os.pathsep)
87 for p in path:
86 for p in path:
88 p_name = os.path.join(p, name)
87 p_name = os.path.join(p, name)
89 if os.path.exists(p_name):
88 if os.path.exists(p_name):
90 return p_name
89 return p_name
91 return default
90 return default
92
91
93 def patch(strip, patchname, ui):
92 def patch(strip, patchname, ui):
94 """apply the patch <patchname> to the working directory.
93 """apply the patch <patchname> to the working directory.
95 a list of patched files is returned"""
94 a list of patched files is returned"""
96 patcher = find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
95 patcher = find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
97 fp = os.popen('%s -p%d < "%s"' % (patcher, strip, patchname))
96 fp = os.popen('%s -p%d < "%s"' % (patcher, strip, patchname))
98 files = {}
97 files = {}
99 for line in fp:
98 for line in fp:
100 line = line.rstrip()
99 line = line.rstrip()
101 ui.status("%s\n" % line)
100 ui.status("%s\n" % line)
102 if line.startswith('patching file '):
101 if line.startswith('patching file '):
103 pf = parse_patch_output(line)
102 pf = parse_patch_output(line)
104 files.setdefault(pf, 1)
103 files.setdefault(pf, 1)
105 code = fp.close()
104 code = fp.close()
106 if code:
105 if code:
107 raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
106 raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
108 return files.keys()
107 return files.keys()
109
108
110 def binary(s):
109 def binary(s):
111 """return true if a string is binary data using diff's heuristic"""
110 """return true if a string is binary data using diff's heuristic"""
112 if s and '\0' in s[:4096]:
111 if s and '\0' in s[:4096]:
113 return True
112 return True
114 return False
113 return False
115
114
116 def unique(g):
115 def unique(g):
117 """return the uniq elements of iterable g"""
116 """return the uniq elements of iterable g"""
118 seen = {}
117 seen = {}
119 for f in g:
118 for f in g:
120 if f not in seen:
119 if f not in seen:
121 seen[f] = 1
120 seen[f] = 1
122 yield f
121 yield f
123
122
124 class Abort(Exception):
123 class Abort(Exception):
125 """Raised if a command needs to print an error and exit."""
124 """Raised if a command needs to print an error and exit."""
126
125
127 def always(fn): return True
126 def always(fn): return True
128 def never(fn): return False
127 def never(fn): return False
129
128
130 def patkind(name, dflt_pat='glob'):
129 def patkind(name, dflt_pat='glob'):
131 """Split a string into an optional pattern kind prefix and the
130 """Split a string into an optional pattern kind prefix and the
132 actual pattern."""
131 actual pattern."""
133 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
132 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
134 if name.startswith(prefix + ':'): return name.split(':', 1)
133 if name.startswith(prefix + ':'): return name.split(':', 1)
135 return dflt_pat, name
134 return dflt_pat, name
136
135
137 def globre(pat, head='^', tail='$'):
136 def globre(pat, head='^', tail='$'):
138 "convert a glob pattern into a regexp"
137 "convert a glob pattern into a regexp"
139 i, n = 0, len(pat)
138 i, n = 0, len(pat)
140 res = ''
139 res = ''
141 group = False
140 group = False
142 def peek(): return i < n and pat[i]
141 def peek(): return i < n and pat[i]
143 while i < n:
142 while i < n:
144 c = pat[i]
143 c = pat[i]
145 i = i+1
144 i = i+1
146 if c == '*':
145 if c == '*':
147 if peek() == '*':
146 if peek() == '*':
148 i += 1
147 i += 1
149 res += '.*'
148 res += '.*'
150 else:
149 else:
151 res += '[^/]*'
150 res += '[^/]*'
152 elif c == '?':
151 elif c == '?':
153 res += '.'
152 res += '.'
154 elif c == '[':
153 elif c == '[':
155 j = i
154 j = i
156 if j < n and pat[j] in '!]':
155 if j < n and pat[j] in '!]':
157 j += 1
156 j += 1
158 while j < n and pat[j] != ']':
157 while j < n and pat[j] != ']':
159 j += 1
158 j += 1
160 if j >= n:
159 if j >= n:
161 res += '\\['
160 res += '\\['
162 else:
161 else:
163 stuff = pat[i:j].replace('\\','\\\\')
162 stuff = pat[i:j].replace('\\','\\\\')
164 i = j + 1
163 i = j + 1
165 if stuff[0] == '!':
164 if stuff[0] == '!':
166 stuff = '^' + stuff[1:]
165 stuff = '^' + stuff[1:]
167 elif stuff[0] == '^':
166 elif stuff[0] == '^':
168 stuff = '\\' + stuff
167 stuff = '\\' + stuff
169 res = '%s[%s]' % (res, stuff)
168 res = '%s[%s]' % (res, stuff)
170 elif c == '{':
169 elif c == '{':
171 group = True
170 group = True
172 res += '(?:'
171 res += '(?:'
173 elif c == '}' and group:
172 elif c == '}' and group:
174 res += ')'
173 res += ')'
175 group = False
174 group = False
176 elif c == ',' and group:
175 elif c == ',' and group:
177 res += '|'
176 res += '|'
178 elif c == '\\':
177 elif c == '\\':
179 p = peek()
178 p = peek()
180 if p:
179 if p:
181 i += 1
180 i += 1
182 res += re.escape(p)
181 res += re.escape(p)
183 else:
182 else:
184 res += re.escape(c)
183 res += re.escape(c)
185 else:
184 else:
186 res += re.escape(c)
185 res += re.escape(c)
187 return head + res + tail
186 return head + res + tail
188
187
189 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
188 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
190
189
191 def pathto(n1, n2):
190 def pathto(n1, n2):
192 '''return the relative path from one place to another.
191 '''return the relative path from one place to another.
193 this returns a path in the form used by the local filesystem, not hg.'''
192 this returns a path in the form used by the local filesystem, not hg.'''
194 if not n1: return localpath(n2)
193 if not n1: return localpath(n2)
195 a, b = n1.split('/'), n2.split('/')
194 a, b = n1.split('/'), n2.split('/')
196 a.reverse()
195 a.reverse()
197 b.reverse()
196 b.reverse()
198 while a and b and a[-1] == b[-1]:
197 while a and b and a[-1] == b[-1]:
199 a.pop()
198 a.pop()
200 b.pop()
199 b.pop()
201 b.reverse()
200 b.reverse()
202 return os.sep.join((['..'] * len(a)) + b)
201 return os.sep.join((['..'] * len(a)) + b)
203
202
204 def canonpath(root, cwd, myname):
203 def canonpath(root, cwd, myname):
205 """return the canonical path of myname, given cwd and root"""
204 """return the canonical path of myname, given cwd and root"""
206 if root == os.sep:
205 if root == os.sep:
207 rootsep = os.sep
206 rootsep = os.sep
208 elif root.endswith(os.sep):
207 elif root.endswith(os.sep):
209 rootsep = root
208 rootsep = root
210 else:
209 else:
211 rootsep = root + os.sep
210 rootsep = root + os.sep
212 name = myname
211 name = myname
213 if not os.path.isabs(name):
212 if not os.path.isabs(name):
214 name = os.path.join(root, cwd, name)
213 name = os.path.join(root, cwd, name)
215 name = os.path.normpath(name)
214 name = os.path.normpath(name)
216 if name != rootsep and name.startswith(rootsep):
215 if name != rootsep and name.startswith(rootsep):
217 name = name[len(rootsep):]
216 name = name[len(rootsep):]
218 audit_path(name)
217 audit_path(name)
219 return pconvert(name)
218 return pconvert(name)
220 elif name == root:
219 elif name == root:
221 return ''
220 return ''
222 else:
221 else:
223 # Determine whether `name' is in the hierarchy at or beneath `root',
222 # Determine whether `name' is in the hierarchy at or beneath `root',
224 # by iterating name=dirname(name) until that causes no change (can't
223 # by iterating name=dirname(name) until that causes no change (can't
225 # check name == '/', because that doesn't work on windows). For each
224 # check name == '/', because that doesn't work on windows). For each
226 # `name', compare dev/inode numbers. If they match, the list `rel'
225 # `name', compare dev/inode numbers. If they match, the list `rel'
227 # holds the reversed list of components making up the relative file
226 # holds the reversed list of components making up the relative file
228 # name we want.
227 # name we want.
229 root_st = os.stat(root)
228 root_st = os.stat(root)
230 rel = []
229 rel = []
231 while True:
230 while True:
232 try:
231 try:
233 name_st = os.stat(name)
232 name_st = os.stat(name)
234 except OSError:
233 except OSError:
235 break
234 break
236 if samestat(name_st, root_st):
235 if samestat(name_st, root_st):
237 rel.reverse()
236 rel.reverse()
238 name = os.path.join(*rel)
237 name = os.path.join(*rel)
239 audit_path(name)
238 audit_path(name)
240 return pconvert(name)
239 return pconvert(name)
241 dirname, basename = os.path.split(name)
240 dirname, basename = os.path.split(name)
242 rel.append(basename)
241 rel.append(basename)
243 if dirname == name:
242 if dirname == name:
244 break
243 break
245 name = dirname
244 name = dirname
246
245
247 raise Abort('%s not under root' % myname)
246 raise Abort('%s not under root' % myname)
248
247
249 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
248 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
250 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
249 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
251
250
252 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
251 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
253 if os.name == 'nt':
252 if os.name == 'nt':
254 dflt_pat = 'glob'
253 dflt_pat = 'glob'
255 else:
254 else:
256 dflt_pat = 'relpath'
255 dflt_pat = 'relpath'
257 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
256 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
258
257
259 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
258 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
260 """build a function to match a set of file patterns
259 """build a function to match a set of file patterns
261
260
262 arguments:
261 arguments:
263 canonroot - the canonical root of the tree you're matching against
262 canonroot - the canonical root of the tree you're matching against
264 cwd - the current working directory, if relevant
263 cwd - the current working directory, if relevant
265 names - patterns to find
264 names - patterns to find
266 inc - patterns to include
265 inc - patterns to include
267 exc - patterns to exclude
266 exc - patterns to exclude
268 head - a regex to prepend to patterns to control whether a match is rooted
267 head - a regex to prepend to patterns to control whether a match is rooted
269
268
270 a pattern is one of:
269 a pattern is one of:
271 'glob:<rooted glob>'
270 'glob:<rooted glob>'
272 're:<rooted regexp>'
271 're:<rooted regexp>'
273 'path:<rooted path>'
272 'path:<rooted path>'
274 'relglob:<relative glob>'
273 'relglob:<relative glob>'
275 'relpath:<relative path>'
274 'relpath:<relative path>'
276 'relre:<relative regexp>'
275 'relre:<relative regexp>'
277 '<rooted path or regexp>'
276 '<rooted path or regexp>'
278
277
279 returns:
278 returns:
280 a 3-tuple containing
279 a 3-tuple containing
281 - list of explicit non-pattern names passed in
280 - list of explicit non-pattern names passed in
282 - a bool match(filename) function
281 - a bool match(filename) function
283 - a bool indicating if any patterns were passed in
282 - a bool indicating if any patterns were passed in
284
283
285 todo:
284 todo:
286 make head regex a rooted bool
285 make head regex a rooted bool
287 """
286 """
288
287
289 def contains_glob(name):
288 def contains_glob(name):
290 for c in name:
289 for c in name:
291 if c in _globchars: return True
290 if c in _globchars: return True
292 return False
291 return False
293
292
294 def regex(kind, name, tail):
293 def regex(kind, name, tail):
295 '''convert a pattern into a regular expression'''
294 '''convert a pattern into a regular expression'''
296 if kind == 're':
295 if kind == 're':
297 return name
296 return name
298 elif kind == 'path':
297 elif kind == 'path':
299 return '^' + re.escape(name) + '(?:/|$)'
298 return '^' + re.escape(name) + '(?:/|$)'
300 elif kind == 'relglob':
299 elif kind == 'relglob':
301 return head + globre(name, '(?:|.*/)', tail)
300 return head + globre(name, '(?:|.*/)', tail)
302 elif kind == 'relpath':
301 elif kind == 'relpath':
303 return head + re.escape(name) + tail
302 return head + re.escape(name) + tail
304 elif kind == 'relre':
303 elif kind == 'relre':
305 if name.startswith('^'):
304 if name.startswith('^'):
306 return name
305 return name
307 return '.*' + name
306 return '.*' + name
308 return head + globre(name, '', tail)
307 return head + globre(name, '', tail)
309
308
310 def matchfn(pats, tail):
309 def matchfn(pats, tail):
311 """build a matching function from a set of patterns"""
310 """build a matching function from a set of patterns"""
312 if not pats:
311 if not pats:
313 return
312 return
314 matches = []
313 matches = []
315 for k, p in pats:
314 for k, p in pats:
316 try:
315 try:
317 pat = '(?:%s)' % regex(k, p, tail)
316 pat = '(?:%s)' % regex(k, p, tail)
318 matches.append(re.compile(pat).match)
317 matches.append(re.compile(pat).match)
319 except re.error:
318 except re.error:
320 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
319 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
321 else: raise Abort("invalid pattern (%s): %s" % (k, p))
320 else: raise Abort("invalid pattern (%s): %s" % (k, p))
322
321
323 def buildfn(text):
322 def buildfn(text):
324 for m in matches:
323 for m in matches:
325 r = m(text)
324 r = m(text)
326 if r:
325 if r:
327 return r
326 return r
328
327
329 return buildfn
328 return buildfn
330
329
331 def globprefix(pat):
330 def globprefix(pat):
332 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
331 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
333 root = []
332 root = []
334 for p in pat.split(os.sep):
333 for p in pat.split(os.sep):
335 if contains_glob(p): break
334 if contains_glob(p): break
336 root.append(p)
335 root.append(p)
337 return '/'.join(root)
336 return '/'.join(root)
338
337
339 pats = []
338 pats = []
340 files = []
339 files = []
341 roots = []
340 roots = []
342 for kind, name in [patkind(p, dflt_pat) for p in names]:
341 for kind, name in [patkind(p, dflt_pat) for p in names]:
343 if kind in ('glob', 'relpath'):
342 if kind in ('glob', 'relpath'):
344 name = canonpath(canonroot, cwd, name)
343 name = canonpath(canonroot, cwd, name)
345 if name == '':
344 if name == '':
346 kind, name = 'glob', '**'
345 kind, name = 'glob', '**'
347 if kind in ('glob', 'path', 're'):
346 if kind in ('glob', 'path', 're'):
348 pats.append((kind, name))
347 pats.append((kind, name))
349 if kind == 'glob':
348 if kind == 'glob':
350 root = globprefix(name)
349 root = globprefix(name)
351 if root: roots.append(root)
350 if root: roots.append(root)
352 elif kind == 'relpath':
351 elif kind == 'relpath':
353 files.append((kind, name))
352 files.append((kind, name))
354 roots.append(name)
353 roots.append(name)
355
354
356 patmatch = matchfn(pats, '$') or always
355 patmatch = matchfn(pats, '$') or always
357 filematch = matchfn(files, '(?:/|$)') or always
356 filematch = matchfn(files, '(?:/|$)') or always
358 incmatch = always
357 incmatch = always
359 if inc:
358 if inc:
360 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
359 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
361 excmatch = lambda fn: False
360 excmatch = lambda fn: False
362 if exc:
361 if exc:
363 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
362 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
364
363
365 return (roots,
364 return (roots,
366 lambda fn: (incmatch(fn) and not excmatch(fn) and
365 lambda fn: (incmatch(fn) and not excmatch(fn) and
367 (fn.endswith('/') or
366 (fn.endswith('/') or
368 (not pats and not files) or
367 (not pats and not files) or
369 (pats and patmatch(fn)) or
368 (pats and patmatch(fn)) or
370 (files and filematch(fn)))),
369 (files and filematch(fn)))),
371 (inc or exc or (pats and pats != [('glob', '**')])) and True)
370 (inc or exc or (pats and pats != [('glob', '**')])) and True)
372
371
373 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
372 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
374 '''enhanced shell command execution.
373 '''enhanced shell command execution.
375 run with environment maybe modified, maybe in different dir.
374 run with environment maybe modified, maybe in different dir.
376
375
377 if command fails and onerr is None, return status. if ui object,
376 if command fails and onerr is None, return status. if ui object,
378 print error message and return status, else raise onerr object as
377 print error message and return status, else raise onerr object as
379 exception.'''
378 exception.'''
380 oldenv = {}
379 oldenv = {}
381 for k in environ:
380 for k in environ:
382 oldenv[k] = os.environ.get(k)
381 oldenv[k] = os.environ.get(k)
383 if cwd is not None:
382 if cwd is not None:
384 oldcwd = os.getcwd()
383 oldcwd = os.getcwd()
385 try:
384 try:
386 for k, v in environ.iteritems():
385 for k, v in environ.iteritems():
387 os.environ[k] = str(v)
386 os.environ[k] = str(v)
388 if cwd is not None and oldcwd != cwd:
387 if cwd is not None and oldcwd != cwd:
389 os.chdir(cwd)
388 os.chdir(cwd)
390 rc = os.system(cmd)
389 rc = os.system(cmd)
391 if rc and onerr:
390 if rc and onerr:
392 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
391 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
393 explain_exit(rc)[0])
392 explain_exit(rc)[0])
394 if errprefix:
393 if errprefix:
395 errmsg = '%s: %s' % (errprefix, errmsg)
394 errmsg = '%s: %s' % (errprefix, errmsg)
396 try:
395 try:
397 onerr.warn(errmsg + '\n')
396 onerr.warn(errmsg + '\n')
398 except AttributeError:
397 except AttributeError:
399 raise onerr(errmsg)
398 raise onerr(errmsg)
400 return rc
399 return rc
401 finally:
400 finally:
402 for k, v in oldenv.iteritems():
401 for k, v in oldenv.iteritems():
403 if v is None:
402 if v is None:
404 del os.environ[k]
403 del os.environ[k]
405 else:
404 else:
406 os.environ[k] = v
405 os.environ[k] = v
407 if cwd is not None and oldcwd != cwd:
406 if cwd is not None and oldcwd != cwd:
408 os.chdir(oldcwd)
407 os.chdir(oldcwd)
409
408
410 def rename(src, dst):
409 def rename(src, dst):
411 """forcibly rename a file"""
410 """forcibly rename a file"""
412 try:
411 try:
413 os.rename(src, dst)
412 os.rename(src, dst)
414 except OSError, err:
413 except OSError, err:
415 # on windows, rename to existing file is not allowed, so we
414 # on windows, rename to existing file is not allowed, so we
416 # must delete destination first. but if file is open, unlink
415 # must delete destination first. but if file is open, unlink
417 # schedules it for delete but does not delete it. rename
416 # schedules it for delete but does not delete it. rename
418 # happens immediately even for open files, so we create
417 # happens immediately even for open files, so we create
419 # temporary file, delete it, rename destination to that name,
418 # temporary file, delete it, rename destination to that name,
420 # then delete that. then rename is safe to do.
419 # then delete that. then rename is safe to do.
421 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
420 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
422 os.close(fd)
421 os.close(fd)
423 os.unlink(temp)
422 os.unlink(temp)
424 os.rename(dst, temp)
423 os.rename(dst, temp)
425 os.unlink(temp)
424 os.unlink(temp)
426 os.rename(src, dst)
425 os.rename(src, dst)
427
426
428 def unlink(f):
427 def unlink(f):
429 """unlink and remove the directory if it is empty"""
428 """unlink and remove the directory if it is empty"""
430 os.unlink(f)
429 os.unlink(f)
431 # try removing directories that might now be empty
430 # try removing directories that might now be empty
432 try:
431 try:
433 os.removedirs(os.path.dirname(f))
432 os.removedirs(os.path.dirname(f))
434 except OSError:
433 except OSError:
435 pass
434 pass
436
435
437 def copyfiles(src, dst, hardlink=None):
436 def copyfiles(src, dst, hardlink=None):
438 """Copy a directory tree using hardlinks if possible"""
437 """Copy a directory tree using hardlinks if possible"""
439
438
440 if hardlink is None:
439 if hardlink is None:
441 hardlink = (os.stat(src).st_dev ==
440 hardlink = (os.stat(src).st_dev ==
442 os.stat(os.path.dirname(dst)).st_dev)
441 os.stat(os.path.dirname(dst)).st_dev)
443
442
444 if os.path.isdir(src):
443 if os.path.isdir(src):
445 os.mkdir(dst)
444 os.mkdir(dst)
446 for name in os.listdir(src):
445 for name in os.listdir(src):
447 srcname = os.path.join(src, name)
446 srcname = os.path.join(src, name)
448 dstname = os.path.join(dst, name)
447 dstname = os.path.join(dst, name)
449 copyfiles(srcname, dstname, hardlink)
448 copyfiles(srcname, dstname, hardlink)
450 else:
449 else:
451 if hardlink:
450 if hardlink:
452 try:
451 try:
453 os_link(src, dst)
452 os_link(src, dst)
454 except (IOError, OSError):
453 except (IOError, OSError):
455 hardlink = False
454 hardlink = False
456 shutil.copy(src, dst)
455 shutil.copy(src, dst)
457 else:
456 else:
458 shutil.copy(src, dst)
457 shutil.copy(src, dst)
459
458
460 def audit_path(path):
459 def audit_path(path):
461 """Abort if path contains dangerous components"""
460 """Abort if path contains dangerous components"""
462 parts = os.path.normcase(path).split(os.sep)
461 parts = os.path.normcase(path).split(os.sep)
463 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
462 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
464 or os.pardir in parts):
463 or os.pardir in parts):
465 raise Abort(_("path contains illegal component: %s\n") % path)
464 raise Abort(_("path contains illegal component: %s\n") % path)
466
465
467 def _makelock_file(info, pathname):
466 def _makelock_file(info, pathname):
468 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
467 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
469 os.write(ld, info)
468 os.write(ld, info)
470 os.close(ld)
469 os.close(ld)
471
470
472 def _readlock_file(pathname):
471 def _readlock_file(pathname):
473 return posixfile(pathname).read()
472 return posixfile(pathname).read()
474
473
475 def nlinks(pathname):
474 def nlinks(pathname):
476 """Return number of hardlinks for the given file."""
475 """Return number of hardlinks for the given file."""
477 return os.lstat(pathname).st_nlink
476 return os.lstat(pathname).st_nlink
478
477
479 if hasattr(os, 'link'):
478 if hasattr(os, 'link'):
480 os_link = os.link
479 os_link = os.link
481 else:
480 else:
482 def os_link(src, dst):
481 def os_link(src, dst):
483 raise OSError(0, _("Hardlinks not supported"))
482 raise OSError(0, _("Hardlinks not supported"))
484
483
485 def fstat(fp):
484 def fstat(fp):
486 '''stat file object that may not have fileno method.'''
485 '''stat file object that may not have fileno method.'''
487 try:
486 try:
488 return os.fstat(fp.fileno())
487 return os.fstat(fp.fileno())
489 except AttributeError:
488 except AttributeError:
490 return os.stat(fp.name)
489 return os.stat(fp.name)
491
490
492 posixfile = file
491 posixfile = file
493
492
494 def is_win_9x():
493 def is_win_9x():
495 '''return true if run on windows 95, 98 or me.'''
494 '''return true if run on windows 95, 98 or me.'''
496 try:
495 try:
497 return sys.getwindowsversion()[3] == 1
496 return sys.getwindowsversion()[3] == 1
498 except AttributeError:
497 except AttributeError:
499 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
498 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
500
499
501 # Platform specific variants
500 # Platform specific variants
502 if os.name == 'nt':
501 if os.name == 'nt':
503 demandload(globals(), "msvcrt")
502 demandload(globals(), "msvcrt")
504 nulldev = 'NUL:'
503 nulldev = 'NUL:'
505
504
506 class winstdout:
505 class winstdout:
507 '''stdout on windows misbehaves if sent through a pipe'''
506 '''stdout on windows misbehaves if sent through a pipe'''
508
507
509 def __init__(self, fp):
508 def __init__(self, fp):
510 self.fp = fp
509 self.fp = fp
511
510
512 def __getattr__(self, key):
511 def __getattr__(self, key):
513 return getattr(self.fp, key)
512 return getattr(self.fp, key)
514
513
515 def close(self):
514 def close(self):
516 try:
515 try:
517 self.fp.close()
516 self.fp.close()
518 except: pass
517 except: pass
519
518
520 def write(self, s):
519 def write(self, s):
521 try:
520 try:
522 return self.fp.write(s)
521 return self.fp.write(s)
523 except IOError, inst:
522 except IOError, inst:
524 if inst.errno != 0: raise
523 if inst.errno != 0: raise
525 self.close()
524 self.close()
526 raise IOError(errno.EPIPE, 'Broken pipe')
525 raise IOError(errno.EPIPE, 'Broken pipe')
527
526
528 sys.stdout = winstdout(sys.stdout)
527 sys.stdout = winstdout(sys.stdout)
529
528
530 def system_rcpath():
529 def system_rcpath():
531 try:
530 try:
532 return system_rcpath_win32()
531 return system_rcpath_win32()
533 except:
532 except:
534 return [r'c:\mercurial\mercurial.ini']
533 return [r'c:\mercurial\mercurial.ini']
535
534
536 def os_rcpath():
535 def os_rcpath():
537 '''return default os-specific hgrc search path'''
536 '''return default os-specific hgrc search path'''
538 path = system_rcpath()
537 path = system_rcpath()
539 path.append(user_rcpath())
538 path.append(user_rcpath())
540 userprofile = os.environ.get('USERPROFILE')
539 userprofile = os.environ.get('USERPROFILE')
541 if userprofile:
540 if userprofile:
542 path.append(os.path.join(userprofile, 'mercurial.ini'))
541 path.append(os.path.join(userprofile, 'mercurial.ini'))
543 return path
542 return path
544
543
545 def user_rcpath():
544 def user_rcpath():
546 '''return os-specific hgrc search path to the user dir'''
545 '''return os-specific hgrc search path to the user dir'''
547 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
546 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
548
547
549 def parse_patch_output(output_line):
548 def parse_patch_output(output_line):
550 """parses the output produced by patch and returns the file name"""
549 """parses the output produced by patch and returns the file name"""
551 pf = output_line[14:]
550 pf = output_line[14:]
552 if pf[0] == '`':
551 if pf[0] == '`':
553 pf = pf[1:-1] # Remove the quotes
552 pf = pf[1:-1] # Remove the quotes
554 return pf
553 return pf
555
554
556 def testpid(pid):
555 def testpid(pid):
557 '''return False if pid dead, True if running or not known'''
556 '''return False if pid dead, True if running or not known'''
558 return True
557 return True
559
558
560 def is_exec(f, last):
559 def is_exec(f, last):
561 return last
560 return last
562
561
563 def set_exec(f, mode):
562 def set_exec(f, mode):
564 pass
563 pass
565
564
566 def set_binary(fd):
565 def set_binary(fd):
567 msvcrt.setmode(fd.fileno(), os.O_BINARY)
566 msvcrt.setmode(fd.fileno(), os.O_BINARY)
568
567
569 def pconvert(path):
568 def pconvert(path):
570 return path.replace("\\", "/")
569 return path.replace("\\", "/")
571
570
572 def localpath(path):
571 def localpath(path):
573 return path.replace('/', '\\')
572 return path.replace('/', '\\')
574
573
575 def normpath(path):
574 def normpath(path):
576 return pconvert(os.path.normpath(path))
575 return pconvert(os.path.normpath(path))
577
576
578 makelock = _makelock_file
577 makelock = _makelock_file
579 readlock = _readlock_file
578 readlock = _readlock_file
580
579
581 def samestat(s1, s2):
580 def samestat(s1, s2):
582 return False
581 return False
583
582
584 def explain_exit(code):
583 def explain_exit(code):
585 return _("exited with status %d") % code, code
584 return _("exited with status %d") % code, code
586
585
587 try:
586 try:
588 # override functions with win32 versions if possible
587 # override functions with win32 versions if possible
589 from util_win32 import *
588 from util_win32 import *
590 if not is_win_9x():
589 if not is_win_9x():
591 posixfile = posixfile_nt
590 posixfile = posixfile_nt
592 except ImportError:
591 except ImportError:
593 pass
592 pass
594
593
595 else:
594 else:
596 nulldev = '/dev/null'
595 nulldev = '/dev/null'
597
596
598 def rcfiles(path):
597 def rcfiles(path):
599 rcs = [os.path.join(path, 'hgrc')]
598 rcs = [os.path.join(path, 'hgrc')]
600 rcdir = os.path.join(path, 'hgrc.d')
599 rcdir = os.path.join(path, 'hgrc.d')
601 try:
600 try:
602 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
601 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
603 if f.endswith(".rc")])
602 if f.endswith(".rc")])
604 except OSError, inst: pass
603 except OSError, inst: pass
605 return rcs
604 return rcs
606
605
607 def os_rcpath():
606 def os_rcpath():
608 '''return default os-specific hgrc search path'''
607 '''return default os-specific hgrc search path'''
609 path = []
608 path = []
610 # old mod_python does not set sys.argv
609 # old mod_python does not set sys.argv
611 if len(getattr(sys, 'argv', [])) > 0:
610 if len(getattr(sys, 'argv', [])) > 0:
612 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
611 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
613 '/../etc/mercurial'))
612 '/../etc/mercurial'))
614 path.extend(rcfiles('/etc/mercurial'))
613 path.extend(rcfiles('/etc/mercurial'))
615 path.append(os.path.expanduser('~/.hgrc'))
614 path.append(os.path.expanduser('~/.hgrc'))
616 path = [os.path.normpath(f) for f in path]
615 path = [os.path.normpath(f) for f in path]
617 return path
616 return path
618
617
619 def parse_patch_output(output_line):
618 def parse_patch_output(output_line):
620 """parses the output produced by patch and returns the file name"""
619 """parses the output produced by patch and returns the file name"""
621 pf = output_line[14:]
620 pf = output_line[14:]
622 if pf.startswith("'") and pf.endswith("'") and pf.find(" ") >= 0:
621 if pf.startswith("'") and pf.endswith("'") and pf.find(" ") >= 0:
623 pf = pf[1:-1] # Remove the quotes
622 pf = pf[1:-1] # Remove the quotes
624 return pf
623 return pf
625
624
626 def is_exec(f, last):
625 def is_exec(f, last):
627 """check whether a file is executable"""
626 """check whether a file is executable"""
628 return (os.lstat(f).st_mode & 0100 != 0)
627 return (os.lstat(f).st_mode & 0100 != 0)
629
628
630 def set_exec(f, mode):
629 def set_exec(f, mode):
631 s = os.lstat(f).st_mode
630 s = os.lstat(f).st_mode
632 if (s & 0100 != 0) == mode:
631 if (s & 0100 != 0) == mode:
633 return
632 return
634 if mode:
633 if mode:
635 # Turn on +x for every +r bit when making a file executable
634 # Turn on +x for every +r bit when making a file executable
636 # and obey umask.
635 # and obey umask.
637 umask = os.umask(0)
636 umask = os.umask(0)
638 os.umask(umask)
637 os.umask(umask)
639 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
638 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
640 else:
639 else:
641 os.chmod(f, s & 0666)
640 os.chmod(f, s & 0666)
642
641
643 def set_binary(fd):
642 def set_binary(fd):
644 pass
643 pass
645
644
646 def pconvert(path):
645 def pconvert(path):
647 return path
646 return path
648
647
649 def localpath(path):
648 def localpath(path):
650 return path
649 return path
651
650
652 normpath = os.path.normpath
651 normpath = os.path.normpath
653 samestat = os.path.samestat
652 samestat = os.path.samestat
654
653
655 def makelock(info, pathname):
654 def makelock(info, pathname):
656 try:
655 try:
657 os.symlink(info, pathname)
656 os.symlink(info, pathname)
658 except OSError, why:
657 except OSError, why:
659 if why.errno == errno.EEXIST:
658 if why.errno == errno.EEXIST:
660 raise
659 raise
661 else:
660 else:
662 _makelock_file(info, pathname)
661 _makelock_file(info, pathname)
663
662
664 def readlock(pathname):
663 def readlock(pathname):
665 try:
664 try:
666 return os.readlink(pathname)
665 return os.readlink(pathname)
667 except OSError, why:
666 except OSError, why:
668 if why.errno == errno.EINVAL:
667 if why.errno == errno.EINVAL:
669 return _readlock_file(pathname)
668 return _readlock_file(pathname)
670 else:
669 else:
671 raise
670 raise
672
671
673 def testpid(pid):
672 def testpid(pid):
674 '''return False if pid dead, True if running or not sure'''
673 '''return False if pid dead, True if running or not sure'''
675 try:
674 try:
676 os.kill(pid, 0)
675 os.kill(pid, 0)
677 return True
676 return True
678 except OSError, inst:
677 except OSError, inst:
679 return inst.errno != errno.ESRCH
678 return inst.errno != errno.ESRCH
680
679
681 def explain_exit(code):
680 def explain_exit(code):
682 """return a 2-tuple (desc, code) describing a process's status"""
681 """return a 2-tuple (desc, code) describing a process's status"""
683 if os.WIFEXITED(code):
682 if os.WIFEXITED(code):
684 val = os.WEXITSTATUS(code)
683 val = os.WEXITSTATUS(code)
685 return _("exited with status %d") % val, val
684 return _("exited with status %d") % val, val
686 elif os.WIFSIGNALED(code):
685 elif os.WIFSIGNALED(code):
687 val = os.WTERMSIG(code)
686 val = os.WTERMSIG(code)
688 return _("killed by signal %d") % val, val
687 return _("killed by signal %d") % val, val
689 elif os.WIFSTOPPED(code):
688 elif os.WIFSTOPPED(code):
690 val = os.WSTOPSIG(code)
689 val = os.WSTOPSIG(code)
691 return _("stopped by signal %d") % val, val
690 return _("stopped by signal %d") % val, val
692 raise ValueError(_("invalid exit code"))
691 raise ValueError(_("invalid exit code"))
693
692
694 def opener(base, audit=True):
693 def opener(base, audit=True):
695 """
694 """
696 return a function that opens files relative to base
695 return a function that opens files relative to base
697
696
698 this function is used to hide the details of COW semantics and
697 this function is used to hide the details of COW semantics and
699 remote file access from higher level code.
698 remote file access from higher level code.
700 """
699 """
701 p = base
700 p = base
702 audit_p = audit
701 audit_p = audit
703
702
704 def mktempcopy(name):
703 def mktempcopy(name):
705 d, fn = os.path.split(name)
704 d, fn = os.path.split(name)
706 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
705 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
707 os.close(fd)
706 os.close(fd)
708 ofp = posixfile(temp, "wb")
707 ofp = posixfile(temp, "wb")
709 try:
708 try:
710 try:
709 try:
711 ifp = posixfile(name, "rb")
710 ifp = posixfile(name, "rb")
712 except IOError, inst:
711 except IOError, inst:
713 if not getattr(inst, 'filename', None):
712 if not getattr(inst, 'filename', None):
714 inst.filename = name
713 inst.filename = name
715 raise
714 raise
716 for chunk in filechunkiter(ifp):
715 for chunk in filechunkiter(ifp):
717 ofp.write(chunk)
716 ofp.write(chunk)
718 ifp.close()
717 ifp.close()
719 ofp.close()
718 ofp.close()
720 except:
719 except:
721 try: os.unlink(temp)
720 try: os.unlink(temp)
722 except: pass
721 except: pass
723 raise
722 raise
724 st = os.lstat(name)
723 st = os.lstat(name)
725 os.chmod(temp, st.st_mode)
724 os.chmod(temp, st.st_mode)
726 return temp
725 return temp
727
726
728 class atomictempfile(posixfile):
727 class atomictempfile(posixfile):
729 """the file will only be copied when rename is called"""
728 """the file will only be copied when rename is called"""
730 def __init__(self, name, mode):
729 def __init__(self, name, mode):
731 self.__name = name
730 self.__name = name
732 self.temp = mktempcopy(name)
731 self.temp = mktempcopy(name)
733 posixfile.__init__(self, self.temp, mode)
732 posixfile.__init__(self, self.temp, mode)
734 def rename(self):
733 def rename(self):
735 if not self.closed:
734 if not self.closed:
736 posixfile.close(self)
735 posixfile.close(self)
737 rename(self.temp, localpath(self.__name))
736 rename(self.temp, localpath(self.__name))
738 def __del__(self):
737 def __del__(self):
739 if not self.closed:
738 if not self.closed:
740 try:
739 try:
741 os.unlink(self.temp)
740 os.unlink(self.temp)
742 except: pass
741 except: pass
743 posixfile.close(self)
742 posixfile.close(self)
744
743
745 class atomicfile(atomictempfile):
744 class atomicfile(atomictempfile):
746 """the file will only be copied on close"""
745 """the file will only be copied on close"""
747 def __init__(self, name, mode):
746 def __init__(self, name, mode):
748 atomictempfile.__init__(self, name, mode)
747 atomictempfile.__init__(self, name, mode)
749 def close(self):
748 def close(self):
750 self.rename()
749 self.rename()
751 def __del__(self):
750 def __del__(self):
752 self.rename()
751 self.rename()
753
752
754 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
753 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
755 if audit_p:
754 if audit_p:
756 audit_path(path)
755 audit_path(path)
757 f = os.path.join(p, path)
756 f = os.path.join(p, path)
758
757
759 if not text:
758 if not text:
760 mode += "b" # for that other OS
759 mode += "b" # for that other OS
761
760
762 if mode[0] != "r":
761 if mode[0] != "r":
763 try:
762 try:
764 nlink = nlinks(f)
763 nlink = nlinks(f)
765 except OSError:
764 except OSError:
766 d = os.path.dirname(f)
765 d = os.path.dirname(f)
767 if not os.path.isdir(d):
766 if not os.path.isdir(d):
768 os.makedirs(d)
767 os.makedirs(d)
769 else:
768 else:
770 if atomic:
769 if atomic:
771 return atomicfile(f, mode)
770 return atomicfile(f, mode)
772 elif atomictemp:
771 elif atomictemp:
773 return atomictempfile(f, mode)
772 return atomictempfile(f, mode)
774 if nlink > 1:
773 if nlink > 1:
775 rename(mktempcopy(f), f)
774 rename(mktempcopy(f), f)
776 return posixfile(f, mode)
775 return posixfile(f, mode)
777
776
778 return o
777 return o
779
778
780 class chunkbuffer(object):
779 class chunkbuffer(object):
781 """Allow arbitrary sized chunks of data to be efficiently read from an
780 """Allow arbitrary sized chunks of data to be efficiently read from an
782 iterator over chunks of arbitrary size."""
781 iterator over chunks of arbitrary size."""
783
782
784 def __init__(self, in_iter, targetsize = 2**16):
783 def __init__(self, in_iter, targetsize = 2**16):
785 """in_iter is the iterator that's iterating over the input chunks.
784 """in_iter is the iterator that's iterating over the input chunks.
786 targetsize is how big a buffer to try to maintain."""
785 targetsize is how big a buffer to try to maintain."""
787 self.in_iter = iter(in_iter)
786 self.in_iter = iter(in_iter)
788 self.buf = ''
787 self.buf = ''
789 self.targetsize = int(targetsize)
788 self.targetsize = int(targetsize)
790 if self.targetsize <= 0:
789 if self.targetsize <= 0:
791 raise ValueError(_("targetsize must be greater than 0, was %d") %
790 raise ValueError(_("targetsize must be greater than 0, was %d") %
792 targetsize)
791 targetsize)
793 self.iterempty = False
792 self.iterempty = False
794
793
795 def fillbuf(self):
794 def fillbuf(self):
796 """Ignore target size; read every chunk from iterator until empty."""
795 """Ignore target size; read every chunk from iterator until empty."""
797 if not self.iterempty:
796 if not self.iterempty:
798 collector = cStringIO.StringIO()
797 collector = cStringIO.StringIO()
799 collector.write(self.buf)
798 collector.write(self.buf)
800 for ch in self.in_iter:
799 for ch in self.in_iter:
801 collector.write(ch)
800 collector.write(ch)
802 self.buf = collector.getvalue()
801 self.buf = collector.getvalue()
803 self.iterempty = True
802 self.iterempty = True
804
803
805 def read(self, l):
804 def read(self, l):
806 """Read L bytes of data from the iterator of chunks of data.
805 """Read L bytes of data from the iterator of chunks of data.
807 Returns less than L bytes if the iterator runs dry."""
806 Returns less than L bytes if the iterator runs dry."""
808 if l > len(self.buf) and not self.iterempty:
807 if l > len(self.buf) and not self.iterempty:
809 # Clamp to a multiple of self.targetsize
808 # Clamp to a multiple of self.targetsize
810 targetsize = self.targetsize * ((l // self.targetsize) + 1)
809 targetsize = self.targetsize * ((l // self.targetsize) + 1)
811 collector = cStringIO.StringIO()
810 collector = cStringIO.StringIO()
812 collector.write(self.buf)
811 collector.write(self.buf)
813 collected = len(self.buf)
812 collected = len(self.buf)
814 for chunk in self.in_iter:
813 for chunk in self.in_iter:
815 collector.write(chunk)
814 collector.write(chunk)
816 collected += len(chunk)
815 collected += len(chunk)
817 if collected >= targetsize:
816 if collected >= targetsize:
818 break
817 break
819 if collected < targetsize:
818 if collected < targetsize:
820 self.iterempty = True
819 self.iterempty = True
821 self.buf = collector.getvalue()
820 self.buf = collector.getvalue()
822 s, self.buf = self.buf[:l], buffer(self.buf, l)
821 s, self.buf = self.buf[:l], buffer(self.buf, l)
823 return s
822 return s
824
823
825 def filechunkiter(f, size=65536, limit=None):
824 def filechunkiter(f, size=65536, limit=None):
826 """Create a generator that produces the data in the file size
825 """Create a generator that produces the data in the file size
827 (default 65536) bytes at a time, up to optional limit (default is
826 (default 65536) bytes at a time, up to optional limit (default is
828 to read all data). Chunks may be less than size bytes if the
827 to read all data). Chunks may be less than size bytes if the
829 chunk is the last chunk in the file, or the file is a socket or
828 chunk is the last chunk in the file, or the file is a socket or
830 some other type of file that sometimes reads less data than is
829 some other type of file that sometimes reads less data than is
831 requested."""
830 requested."""
832 assert size >= 0
831 assert size >= 0
833 assert limit is None or limit >= 0
832 assert limit is None or limit >= 0
834 while True:
833 while True:
835 if limit is None: nbytes = size
834 if limit is None: nbytes = size
836 else: nbytes = min(limit, size)
835 else: nbytes = min(limit, size)
837 s = nbytes and f.read(nbytes)
836 s = nbytes and f.read(nbytes)
838 if not s: break
837 if not s: break
839 if limit: limit -= len(s)
838 if limit: limit -= len(s)
840 yield s
839 yield s
841
840
842 def makedate():
841 def makedate():
843 lt = time.localtime()
842 lt = time.localtime()
844 if lt[8] == 1 and time.daylight:
843 if lt[8] == 1 and time.daylight:
845 tz = time.altzone
844 tz = time.altzone
846 else:
845 else:
847 tz = time.timezone
846 tz = time.timezone
848 return time.mktime(lt), tz
847 return time.mktime(lt), tz
849
848
850 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
849 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
851 """represent a (unixtime, offset) tuple as a localized time.
850 """represent a (unixtime, offset) tuple as a localized time.
852 unixtime is seconds since the epoch, and offset is the time zone's
851 unixtime is seconds since the epoch, and offset is the time zone's
853 number of seconds away from UTC. if timezone is false, do not
852 number of seconds away from UTC. if timezone is false, do not
854 append time zone to string."""
853 append time zone to string."""
855 t, tz = date or makedate()
854 t, tz = date or makedate()
856 s = time.strftime(format, time.gmtime(float(t) - tz))
855 s = time.strftime(format, time.gmtime(float(t) - tz))
857 if timezone:
856 if timezone:
858 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
857 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
859 return s
858 return s
860
859
861 def shortuser(user):
860 def shortuser(user):
862 """Return a short representation of a user name or email address."""
861 """Return a short representation of a user name or email address."""
863 f = user.find('@')
862 f = user.find('@')
864 if f >= 0:
863 if f >= 0:
865 user = user[:f]
864 user = user[:f]
866 f = user.find('<')
865 f = user.find('<')
867 if f >= 0:
866 if f >= 0:
868 user = user[f+1:]
867 user = user[f+1:]
869 return user
868 return user
870
869
871 def walkrepos(path):
870 def walkrepos(path):
872 '''yield every hg repository under path, recursively.'''
871 '''yield every hg repository under path, recursively.'''
873 def errhandler(err):
872 def errhandler(err):
874 if err.filename == path:
873 if err.filename == path:
875 raise err
874 raise err
876
875
877 for root, dirs, files in os.walk(path, onerror=errhandler):
876 for root, dirs, files in os.walk(path, onerror=errhandler):
878 for d in dirs:
877 for d in dirs:
879 if d == '.hg':
878 if d == '.hg':
880 yield root
879 yield root
881 dirs[:] = []
880 dirs[:] = []
882 break
881 break
883
882
884 _rcpath = None
883 _rcpath = None
885
884
886 def rcpath():
885 def rcpath():
887 '''return hgrc search path. if env var HGRCPATH is set, use it.
886 '''return hgrc search path. if env var HGRCPATH is set, use it.
888 for each item in path, if directory, use files ending in .rc,
887 for each item in path, if directory, use files ending in .rc,
889 else use item.
888 else use item.
890 make HGRCPATH empty to only look in .hg/hgrc of current repo.
889 make HGRCPATH empty to only look in .hg/hgrc of current repo.
891 if no HGRCPATH, use default os-specific path.'''
890 if no HGRCPATH, use default os-specific path.'''
892 global _rcpath
891 global _rcpath
893 if _rcpath is None:
892 if _rcpath is None:
894 if 'HGRCPATH' in os.environ:
893 if 'HGRCPATH' in os.environ:
895 _rcpath = []
894 _rcpath = []
896 for p in os.environ['HGRCPATH'].split(os.pathsep):
895 for p in os.environ['HGRCPATH'].split(os.pathsep):
897 if not p: continue
896 if not p: continue
898 if os.path.isdir(p):
897 if os.path.isdir(p):
899 for f in os.listdir(p):
898 for f in os.listdir(p):
900 if f.endswith('.rc'):
899 if f.endswith('.rc'):
901 _rcpath.append(os.path.join(p, f))
900 _rcpath.append(os.path.join(p, f))
902 else:
901 else:
903 _rcpath.append(p)
902 _rcpath.append(p)
904 else:
903 else:
905 _rcpath = os_rcpath()
904 _rcpath = os_rcpath()
906 return _rcpath
905 return _rcpath
General Comments 0
You need to be logged in to leave comments. Login now