##// END OF EJS Templates
i18n part2: use '_' for all strings who are part of the user interface
Benoit Boissinot -
r1402:9d2c2e6b default
parent child Browse files
Show More
@@ -1,57 +1,57 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import demandload
10 from demandload import demandload
11 demandload(globals(), "os time util")
11 demandload(globals(), "os time util")
12
12
13 class changelog(revlog):
13 class changelog(revlog):
14 def __init__(self, opener):
14 def __init__(self, opener):
15 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
15 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
16
16
17 def extract(self, text):
17 def extract(self, text):
18 if not text:
18 if not text:
19 return (nullid, "", (0, 0), [], "")
19 return (nullid, "", (0, 0), [], "")
20 last = text.index("\n\n")
20 last = text.index("\n\n")
21 desc = text[last + 2:]
21 desc = text[last + 2:]
22 l = text[:last].splitlines()
22 l = text[:last].splitlines()
23 manifest = bin(l[0])
23 manifest = bin(l[0])
24 user = l[1]
24 user = l[1]
25 date = l[2].split(' ')
25 date = l[2].split(' ')
26 time = float(date.pop(0))
26 time = float(date.pop(0))
27 try:
27 try:
28 # various tools did silly things with the time zone field.
28 # various tools did silly things with the time zone field.
29 timezone = int(date[0])
29 timezone = int(date[0])
30 except:
30 except:
31 timezone = 0
31 timezone = 0
32 files = l[3:]
32 files = l[3:]
33 return (manifest, user, (time, timezone), files, desc)
33 return (manifest, user, (time, timezone), files, desc)
34
34
35 def read(self, node):
35 def read(self, node):
36 return self.extract(self.revision(node))
36 return self.extract(self.revision(node))
37
37
38 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
38 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
39 user=None, date=None):
39 user=None, date=None):
40 if date:
40 if date:
41 # validate explicit (probably user-specified) date and
41 # validate explicit (probably user-specified) date and
42 # time zone offset. values must fit in signed 32 bits for
42 # time zone offset. values must fit in signed 32 bits for
43 # current 32-bit linux runtimes.
43 # current 32-bit linux runtimes.
44 try:
44 try:
45 when, offset = map(int, date.split(' '))
45 when, offset = map(int, date.split(' '))
46 except ValueError:
46 except ValueError:
47 raise ValueError('invalid date: %r' % date)
47 raise ValueError(_('invalid date: %r') % date)
48 if abs(when) > 0x7fffffff:
48 if abs(when) > 0x7fffffff:
49 raise ValueError('date exceeds 32 bits: %d' % when)
49 raise ValueError(_('date exceeds 32 bits: %d') % when)
50 if abs(offset) >= 43200:
50 if abs(offset) >= 43200:
51 raise ValueError('impossible time zone offset: %d' % offset)
51 raise ValueError(_('impossible time zone offset: %d') % offset)
52 else:
52 else:
53 date = "%d %d" % util.makedate()
53 date = "%d %d" % util.makedate()
54 list.sort()
54 list.sort()
55 l = [hex(manifest), user, date] + list + ["", desc]
55 l = [hex(manifest), user, date] + list + ["", desc]
56 text = "\n".join(l)
56 text = "\n".join(l)
57 return self.addrevision(text, transaction, self.count(), p1, p2)
57 return self.addrevision(text, transaction, self.count(), p1, p2)
This diff has been collapsed as it changes many lines, (614 lines changed) Show them Hide them
@@ -1,2242 +1,2242 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18
18
19 def filterfiles(filters, files):
19 def filterfiles(filters, files):
20 l = [x for x in files if x in filters]
20 l = [x for x in files if x in filters]
21
21
22 for t in filters:
22 for t in filters:
23 if t and t[-1] != "/":
23 if t and t[-1] != "/":
24 t += "/"
24 t += "/"
25 l += [x for x in files if x.startswith(t)]
25 l += [x for x in files if x.startswith(t)]
26 return l
26 return l
27
27
28 def relpath(repo, args):
28 def relpath(repo, args):
29 cwd = repo.getcwd()
29 cwd = repo.getcwd()
30 if cwd:
30 if cwd:
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 return args
32 return args
33
33
34 def matchpats(repo, cwd, pats=[], opts={}, head=''):
34 def matchpats(repo, cwd, pats=[], opts={}, head=''):
35 return util.matcher(repo.root, cwd, pats or ['.'], opts.get('include'),
35 return util.matcher(repo.root, cwd, pats or ['.'], opts.get('include'),
36 opts.get('exclude'), head)
36 opts.get('exclude'), head)
37
37
38 def makewalk(repo, pats, opts, head=''):
38 def makewalk(repo, pats, opts, head=''):
39 cwd = repo.getcwd()
39 cwd = repo.getcwd()
40 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
40 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
41 exact = dict(zip(files, files))
41 exact = dict(zip(files, files))
42 def walk():
42 def walk():
43 for src, fn in repo.walk(files=files, match=matchfn):
43 for src, fn in repo.walk(files=files, match=matchfn):
44 yield src, fn, util.pathto(cwd, fn), fn in exact
44 yield src, fn, util.pathto(cwd, fn), fn in exact
45 return files, matchfn, walk()
45 return files, matchfn, walk()
46
46
47 def walk(repo, pats, opts, head=''):
47 def walk(repo, pats, opts, head=''):
48 files, matchfn, results = makewalk(repo, pats, opts, head)
48 files, matchfn, results = makewalk(repo, pats, opts, head)
49 for r in results:
49 for r in results:
50 yield r
50 yield r
51
51
52 def walkchangerevs(ui, repo, cwd, pats, opts):
52 def walkchangerevs(ui, repo, cwd, pats, opts):
53 '''Iterate over files and the revs they changed in.
53 '''Iterate over files and the revs they changed in.
54
54
55 Callers most commonly need to iterate backwards over the history
55 Callers most commonly need to iterate backwards over the history
56 it is interested in. Doing so has awful (quadratic-looking)
56 it is interested in. Doing so has awful (quadratic-looking)
57 performance, so we use iterators in a "windowed" way.
57 performance, so we use iterators in a "windowed" way.
58
58
59 We walk a window of revisions in the desired order. Within the
59 We walk a window of revisions in the desired order. Within the
60 window, we first walk forwards to gather data, then in the desired
60 window, we first walk forwards to gather data, then in the desired
61 order (usually backwards) to display it.
61 order (usually backwards) to display it.
62
62
63 This function returns an (iterator, getchange) pair. The
63 This function returns an (iterator, getchange) pair. The
64 getchange function returns the changelog entry for a numeric
64 getchange function returns the changelog entry for a numeric
65 revision. The iterator yields 3-tuples. They will be of one of
65 revision. The iterator yields 3-tuples. They will be of one of
66 the following forms:
66 the following forms:
67
67
68 "window", incrementing, lastrev: stepping through a window,
68 "window", incrementing, lastrev: stepping through a window,
69 positive if walking forwards through revs, last rev in the
69 positive if walking forwards through revs, last rev in the
70 sequence iterated over - use to reset state for the current window
70 sequence iterated over - use to reset state for the current window
71
71
72 "add", rev, fns: out-of-order traversal of the given file names
72 "add", rev, fns: out-of-order traversal of the given file names
73 fns, which changed during revision rev - use to gather data for
73 fns, which changed during revision rev - use to gather data for
74 possible display
74 possible display
75
75
76 "iter", rev, None: in-order traversal of the revs earlier iterated
76 "iter", rev, None: in-order traversal of the revs earlier iterated
77 over with "add" - use to display data'''
77 over with "add" - use to display data'''
78
78
79 if repo.changelog.count() == 0:
79 if repo.changelog.count() == 0:
80 return [], False
80 return [], False
81
81
82 cwd = repo.getcwd()
82 cwd = repo.getcwd()
83 if not pats and cwd:
83 if not pats and cwd:
84 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
84 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
85 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
85 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
86 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
86 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
87 pats, opts)
87 pats, opts)
88 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
88 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
89 wanted = {}
89 wanted = {}
90 slowpath = anypats
90 slowpath = anypats
91 window = 300
91 window = 300
92 fncache = {}
92 fncache = {}
93
93
94 chcache = {}
94 chcache = {}
95 def getchange(rev):
95 def getchange(rev):
96 ch = chcache.get(rev)
96 ch = chcache.get(rev)
97 if ch is None:
97 if ch is None:
98 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
98 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
99 return ch
99 return ch
100
100
101 if not slowpath and not files:
101 if not slowpath and not files:
102 # No files, no patterns. Display all revs.
102 # No files, no patterns. Display all revs.
103 wanted = dict(zip(revs, revs))
103 wanted = dict(zip(revs, revs))
104 if not slowpath:
104 if not slowpath:
105 # Only files, no patterns. Check the history of each file.
105 # Only files, no patterns. Check the history of each file.
106 def filerevgen(filelog):
106 def filerevgen(filelog):
107 for i in xrange(filelog.count() - 1, -1, -window):
107 for i in xrange(filelog.count() - 1, -1, -window):
108 revs = []
108 revs = []
109 for j in xrange(max(0, i - window), i + 1):
109 for j in xrange(max(0, i - window), i + 1):
110 revs.append(filelog.linkrev(filelog.node(j)))
110 revs.append(filelog.linkrev(filelog.node(j)))
111 revs.reverse()
111 revs.reverse()
112 for rev in revs:
112 for rev in revs:
113 yield rev
113 yield rev
114
114
115 minrev, maxrev = min(revs), max(revs)
115 minrev, maxrev = min(revs), max(revs)
116 for file in files:
116 for file in files:
117 filelog = repo.file(file)
117 filelog = repo.file(file)
118 # A zero count may be a directory or deleted file, so
118 # A zero count may be a directory or deleted file, so
119 # try to find matching entries on the slow path.
119 # try to find matching entries on the slow path.
120 if filelog.count() == 0:
120 if filelog.count() == 0:
121 slowpath = True
121 slowpath = True
122 break
122 break
123 for rev in filerevgen(filelog):
123 for rev in filerevgen(filelog):
124 if rev <= maxrev:
124 if rev <= maxrev:
125 if rev < minrev:
125 if rev < minrev:
126 break
126 break
127 fncache.setdefault(rev, [])
127 fncache.setdefault(rev, [])
128 fncache[rev].append(file)
128 fncache[rev].append(file)
129 wanted[rev] = 1
129 wanted[rev] = 1
130 if slowpath:
130 if slowpath:
131 # The slow path checks files modified in every changeset.
131 # The slow path checks files modified in every changeset.
132 def changerevgen():
132 def changerevgen():
133 for i in xrange(repo.changelog.count() - 1, -1, -window):
133 for i in xrange(repo.changelog.count() - 1, -1, -window):
134 for j in xrange(max(0, i - window), i + 1):
134 for j in xrange(max(0, i - window), i + 1):
135 yield j, getchange(j)[3]
135 yield j, getchange(j)[3]
136
136
137 for rev, changefiles in changerevgen():
137 for rev, changefiles in changerevgen():
138 matches = filter(matchfn, changefiles)
138 matches = filter(matchfn, changefiles)
139 if matches:
139 if matches:
140 fncache[rev] = matches
140 fncache[rev] = matches
141 wanted[rev] = 1
141 wanted[rev] = 1
142
142
143 def iterate():
143 def iterate():
144 for i in xrange(0, len(revs), window):
144 for i in xrange(0, len(revs), window):
145 yield 'window', revs[0] < revs[-1], revs[-1]
145 yield 'window', revs[0] < revs[-1], revs[-1]
146 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
146 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
147 if rev in wanted]
147 if rev in wanted]
148 srevs = list(nrevs)
148 srevs = list(nrevs)
149 srevs.sort()
149 srevs.sort()
150 for rev in srevs:
150 for rev in srevs:
151 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
151 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
152 yield 'add', rev, fns
152 yield 'add', rev, fns
153 for rev in nrevs:
153 for rev in nrevs:
154 yield 'iter', rev, None
154 yield 'iter', rev, None
155 return iterate(), getchange
155 return iterate(), getchange
156
156
157 revrangesep = ':'
157 revrangesep = ':'
158
158
159 def revrange(ui, repo, revs, revlog=None):
159 def revrange(ui, repo, revs, revlog=None):
160 """Yield revision as strings from a list of revision specifications."""
160 """Yield revision as strings from a list of revision specifications."""
161 if revlog is None:
161 if revlog is None:
162 revlog = repo.changelog
162 revlog = repo.changelog
163 revcount = revlog.count()
163 revcount = revlog.count()
164 def fix(val, defval):
164 def fix(val, defval):
165 if not val:
165 if not val:
166 return defval
166 return defval
167 try:
167 try:
168 num = int(val)
168 num = int(val)
169 if str(num) != val:
169 if str(num) != val:
170 raise ValueError
170 raise ValueError
171 if num < 0: num += revcount
171 if num < 0: num += revcount
172 if num < 0: num = 0
172 if num < 0: num = 0
173 elif num >= revcount:
173 elif num >= revcount:
174 raise ValueError
174 raise ValueError
175 except ValueError:
175 except ValueError:
176 try:
176 try:
177 num = repo.changelog.rev(repo.lookup(val))
177 num = repo.changelog.rev(repo.lookup(val))
178 except KeyError:
178 except KeyError:
179 try:
179 try:
180 num = revlog.rev(revlog.lookup(val))
180 num = revlog.rev(revlog.lookup(val))
181 except KeyError:
181 except KeyError:
182 raise util.Abort('invalid revision identifier %s', val)
182 raise util.Abort(_('invalid revision identifier %s'), val)
183 return num
183 return num
184 seen = {}
184 seen = {}
185 for spec in revs:
185 for spec in revs:
186 if spec.find(revrangesep) >= 0:
186 if spec.find(revrangesep) >= 0:
187 start, end = spec.split(revrangesep, 1)
187 start, end = spec.split(revrangesep, 1)
188 start = fix(start, 0)
188 start = fix(start, 0)
189 end = fix(end, revcount - 1)
189 end = fix(end, revcount - 1)
190 step = start > end and -1 or 1
190 step = start > end and -1 or 1
191 for rev in xrange(start, end+step, step):
191 for rev in xrange(start, end+step, step):
192 if rev in seen: continue
192 if rev in seen: continue
193 seen[rev] = 1
193 seen[rev] = 1
194 yield str(rev)
194 yield str(rev)
195 else:
195 else:
196 rev = fix(spec, None)
196 rev = fix(spec, None)
197 if rev in seen: continue
197 if rev in seen: continue
198 seen[rev] = 1
198 seen[rev] = 1
199 yield str(rev)
199 yield str(rev)
200
200
201 def make_filename(repo, r, pat, node=None,
201 def make_filename(repo, r, pat, node=None,
202 total=None, seqno=None, revwidth=None, pathname=None):
202 total=None, seqno=None, revwidth=None, pathname=None):
203 node_expander = {
203 node_expander = {
204 'H': lambda: hex(node),
204 'H': lambda: hex(node),
205 'R': lambda: str(r.rev(node)),
205 'R': lambda: str(r.rev(node)),
206 'h': lambda: short(node),
206 'h': lambda: short(node),
207 }
207 }
208 expander = {
208 expander = {
209 '%': lambda: '%',
209 '%': lambda: '%',
210 'b': lambda: os.path.basename(repo.root),
210 'b': lambda: os.path.basename(repo.root),
211 }
211 }
212
212
213 try:
213 try:
214 if node:
214 if node:
215 expander.update(node_expander)
215 expander.update(node_expander)
216 if node and revwidth is not None:
216 if node and revwidth is not None:
217 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
217 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
218 if total is not None:
218 if total is not None:
219 expander['N'] = lambda: str(total)
219 expander['N'] = lambda: str(total)
220 if seqno is not None:
220 if seqno is not None:
221 expander['n'] = lambda: str(seqno)
221 expander['n'] = lambda: str(seqno)
222 if total is not None and seqno is not None:
222 if total is not None and seqno is not None:
223 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
223 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
224 if pathname is not None:
224 if pathname is not None:
225 expander['s'] = lambda: os.path.basename(pathname)
225 expander['s'] = lambda: os.path.basename(pathname)
226 expander['d'] = lambda: os.path.dirname(pathname) or '.'
226 expander['d'] = lambda: os.path.dirname(pathname) or '.'
227 expander['p'] = lambda: pathname
227 expander['p'] = lambda: pathname
228
228
229 newname = []
229 newname = []
230 patlen = len(pat)
230 patlen = len(pat)
231 i = 0
231 i = 0
232 while i < patlen:
232 while i < patlen:
233 c = pat[i]
233 c = pat[i]
234 if c == '%':
234 if c == '%':
235 i += 1
235 i += 1
236 c = pat[i]
236 c = pat[i]
237 c = expander[c]()
237 c = expander[c]()
238 newname.append(c)
238 newname.append(c)
239 i += 1
239 i += 1
240 return ''.join(newname)
240 return ''.join(newname)
241 except KeyError, inst:
241 except KeyError, inst:
242 raise util.Abort("invalid format spec '%%%s' in output file name",
242 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
243 inst.args[0])
243 inst.args[0])
244
244
245 def make_file(repo, r, pat, node=None,
245 def make_file(repo, r, pat, node=None,
246 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
246 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
247 if not pat or pat == '-':
247 if not pat or pat == '-':
248 return 'w' in mode and sys.stdout or sys.stdin
248 return 'w' in mode and sys.stdout or sys.stdin
249 if hasattr(pat, 'write') and 'w' in mode:
249 if hasattr(pat, 'write') and 'w' in mode:
250 return pat
250 return pat
251 if hasattr(pat, 'read') and 'r' in mode:
251 if hasattr(pat, 'read') and 'r' in mode:
252 return pat
252 return pat
253 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
253 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
254 pathname),
254 pathname),
255 mode)
255 mode)
256
256
257 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
257 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
258 changes=None, text=False):
258 changes=None, text=False):
259 if not changes:
259 if not changes:
260 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
260 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
261 else:
261 else:
262 (c, a, d, u) = changes
262 (c, a, d, u) = changes
263 if files:
263 if files:
264 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
264 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
265
265
266 if not c and not a and not d:
266 if not c and not a and not d:
267 return
267 return
268
268
269 if node2:
269 if node2:
270 change = repo.changelog.read(node2)
270 change = repo.changelog.read(node2)
271 mmap2 = repo.manifest.read(change[0])
271 mmap2 = repo.manifest.read(change[0])
272 date2 = util.datestr(change[2])
272 date2 = util.datestr(change[2])
273 def read(f):
273 def read(f):
274 return repo.file(f).read(mmap2[f])
274 return repo.file(f).read(mmap2[f])
275 else:
275 else:
276 date2 = util.datestr()
276 date2 = util.datestr()
277 if not node1:
277 if not node1:
278 node1 = repo.dirstate.parents()[0]
278 node1 = repo.dirstate.parents()[0]
279 def read(f):
279 def read(f):
280 return repo.wfile(f).read()
280 return repo.wfile(f).read()
281
281
282 if ui.quiet:
282 if ui.quiet:
283 r = None
283 r = None
284 else:
284 else:
285 hexfunc = ui.verbose and hex or short
285 hexfunc = ui.verbose and hex or short
286 r = [hexfunc(node) for node in [node1, node2] if node]
286 r = [hexfunc(node) for node in [node1, node2] if node]
287
287
288 change = repo.changelog.read(node1)
288 change = repo.changelog.read(node1)
289 mmap = repo.manifest.read(change[0])
289 mmap = repo.manifest.read(change[0])
290 date1 = util.datestr(change[2])
290 date1 = util.datestr(change[2])
291
291
292 for f in c:
292 for f in c:
293 to = None
293 to = None
294 if f in mmap:
294 if f in mmap:
295 to = repo.file(f).read(mmap[f])
295 to = repo.file(f).read(mmap[f])
296 tn = read(f)
296 tn = read(f)
297 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
297 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
298 for f in a:
298 for f in a:
299 to = None
299 to = None
300 tn = read(f)
300 tn = read(f)
301 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
301 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
302 for f in d:
302 for f in d:
303 to = repo.file(f).read(mmap[f])
303 to = repo.file(f).read(mmap[f])
304 tn = None
304 tn = None
305 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
305 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
306
306
307 def trimuser(ui, name, rev, revcache):
307 def trimuser(ui, name, rev, revcache):
308 """trim the name of the user who committed a change"""
308 """trim the name of the user who committed a change"""
309 user = revcache.get(rev)
309 user = revcache.get(rev)
310 if user is None:
310 if user is None:
311 user = revcache[rev] = ui.shortuser(name)
311 user = revcache[rev] = ui.shortuser(name)
312 return user
312 return user
313
313
314 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
314 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
315 """show a single changeset or file revision"""
315 """show a single changeset or file revision"""
316 log = repo.changelog
316 log = repo.changelog
317 if changenode is None:
317 if changenode is None:
318 changenode = log.node(rev)
318 changenode = log.node(rev)
319 elif not rev:
319 elif not rev:
320 rev = log.rev(changenode)
320 rev = log.rev(changenode)
321
321
322 if ui.quiet:
322 if ui.quiet:
323 ui.write("%d:%s\n" % (rev, short(changenode)))
323 ui.write("%d:%s\n" % (rev, short(changenode)))
324 return
324 return
325
325
326 changes = log.read(changenode)
326 changes = log.read(changenode)
327 date = util.datestr(changes[2])
327 date = util.datestr(changes[2])
328
328
329 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
329 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
330 for p in log.parents(changenode)
330 for p in log.parents(changenode)
331 if ui.debugflag or p != nullid]
331 if ui.debugflag or p != nullid]
332 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
332 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
333 parents = []
333 parents = []
334
334
335 if ui.verbose:
335 if ui.verbose:
336 ui.write("changeset: %d:%s\n" % (rev, hex(changenode)))
336 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
337 else:
337 else:
338 ui.write("changeset: %d:%s\n" % (rev, short(changenode)))
338 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
339
339
340 for tag in repo.nodetags(changenode):
340 for tag in repo.nodetags(changenode):
341 ui.status("tag: %s\n" % tag)
341 ui.status(_("tag: %s\n") % tag)
342 for parent in parents:
342 for parent in parents:
343 ui.write("parent: %d:%s\n" % parent)
343 ui.write(_("parent: %d:%s\n") % parent)
344
344
345 if brinfo and changenode in brinfo:
345 if brinfo and changenode in brinfo:
346 br = brinfo[changenode]
346 br = brinfo[changenode]
347 ui.write("branch: %s\n" % " ".join(br))
347 ui.write(_("branch: %s\n") % " ".join(br))
348
348
349 ui.debug("manifest: %d:%s\n" % (repo.manifest.rev(changes[0]),
349 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
350 hex(changes[0])))
350 hex(changes[0])))
351 ui.status("user: %s\n" % changes[1])
351 ui.status(_("user: %s\n") % changes[1])
352 ui.status("date: %s\n" % date)
352 ui.status(_("date: %s\n") % date)
353
353
354 if ui.debugflag:
354 if ui.debugflag:
355 files = repo.changes(log.parents(changenode)[0], changenode)
355 files = repo.changes(log.parents(changenode)[0], changenode)
356 for key, value in zip(["files:", "files+:", "files-:"], files):
356 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
357 if value:
357 if value:
358 ui.note("%-12s %s\n" % (key, " ".join(value)))
358 ui.note("%-12s %s\n" % (key, " ".join(value)))
359 else:
359 else:
360 ui.note("files: %s\n" % " ".join(changes[3]))
360 ui.note(_("files: %s\n") % " ".join(changes[3]))
361
361
362 description = changes[4].strip()
362 description = changes[4].strip()
363 if description:
363 if description:
364 if ui.verbose:
364 if ui.verbose:
365 ui.status("description:\n")
365 ui.status(_("description:\n"))
366 ui.status(description)
366 ui.status(description)
367 ui.status("\n\n")
367 ui.status("\n\n")
368 else:
368 else:
369 ui.status("summary: %s\n" % description.splitlines()[0])
369 ui.status(_("summary: %s\n") % description.splitlines()[0])
370 ui.status("\n")
370 ui.status("\n")
371
371
372 def show_version(ui):
372 def show_version(ui):
373 """output version and copyright information"""
373 """output version and copyright information"""
374 ui.write("Mercurial Distributed SCM (version %s)\n"
374 ui.write(_("Mercurial Distributed SCM (version %s)\n")
375 % version.get_version())
375 % version.get_version())
376 ui.status(
376 ui.status(_(
377 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
377 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
378 "This is free software; see the source for copying conditions. "
378 "This is free software; see the source for copying conditions. "
379 "There is NO\nwarranty; "
379 "There is NO\nwarranty; "
380 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
380 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
381 )
381 ))
382
382
383 def help_(ui, cmd=None, with_version=False):
383 def help_(ui, cmd=None, with_version=False):
384 """show help for a given command or all commands"""
384 """show help for a given command or all commands"""
385 option_lists = []
385 option_lists = []
386 if cmd and cmd != 'shortlist':
386 if cmd and cmd != 'shortlist':
387 if with_version:
387 if with_version:
388 show_version(ui)
388 show_version(ui)
389 ui.write('\n')
389 ui.write('\n')
390 key, i = find(cmd)
390 key, i = find(cmd)
391 # synopsis
391 # synopsis
392 ui.write("%s\n\n" % i[2])
392 ui.write("%s\n\n" % i[2])
393
393
394 # description
394 # description
395 doc = i[0].__doc__
395 doc = i[0].__doc__
396 if ui.quiet:
396 if ui.quiet:
397 doc = doc.splitlines(0)[0]
397 doc = doc.splitlines(0)[0]
398 ui.write("%s\n" % doc.rstrip())
398 ui.write("%s\n" % doc.rstrip())
399
399
400 if not ui.quiet:
400 if not ui.quiet:
401 # aliases
401 # aliases
402 aliases = ', '.join(key.split('|')[1:])
402 aliases = ', '.join(key.split('|')[1:])
403 if aliases:
403 if aliases:
404 ui.write("\naliases: %s\n" % aliases)
404 ui.write(_("\naliases: %s\n") % aliases)
405
405
406 # options
406 # options
407 if i[1]:
407 if i[1]:
408 option_lists.append(("options", i[1]))
408 option_lists.append(("options", i[1]))
409
409
410 else:
410 else:
411 # program name
411 # program name
412 if ui.verbose or with_version:
412 if ui.verbose or with_version:
413 show_version(ui)
413 show_version(ui)
414 else:
414 else:
415 ui.status("Mercurial Distributed SCM\n")
415 ui.status(_("Mercurial Distributed SCM\n"))
416 ui.status('\n')
416 ui.status('\n')
417
417
418 # list of commands
418 # list of commands
419 if cmd == "shortlist":
419 if cmd == "shortlist":
420 ui.status('basic commands (use "hg help" '
420 ui.status(_('basic commands (use "hg help" '
421 'for the full list or option "-v" for details):\n\n')
421 'for the full list or option "-v" for details):\n\n'))
422 elif ui.verbose:
422 elif ui.verbose:
423 ui.status('list of commands:\n\n')
423 ui.status(_('list of commands:\n\n'))
424 else:
424 else:
425 ui.status('list of commands (use "hg help -v" '
425 ui.status(_('list of commands (use "hg help -v" '
426 'to show aliases and global options):\n\n')
426 'to show aliases and global options):\n\n'))
427
427
428 h = {}
428 h = {}
429 cmds = {}
429 cmds = {}
430 for c, e in table.items():
430 for c, e in table.items():
431 f = c.split("|")[0]
431 f = c.split("|")[0]
432 if cmd == "shortlist" and not f.startswith("^"):
432 if cmd == "shortlist" and not f.startswith("^"):
433 continue
433 continue
434 f = f.lstrip("^")
434 f = f.lstrip("^")
435 if not ui.debugflag and f.startswith("debug"):
435 if not ui.debugflag and f.startswith("debug"):
436 continue
436 continue
437 d = ""
437 d = ""
438 if e[0].__doc__:
438 if e[0].__doc__:
439 d = e[0].__doc__.splitlines(0)[0].rstrip()
439 d = e[0].__doc__.splitlines(0)[0].rstrip()
440 h[f] = d
440 h[f] = d
441 cmds[f]=c.lstrip("^")
441 cmds[f]=c.lstrip("^")
442
442
443 fns = h.keys()
443 fns = h.keys()
444 fns.sort()
444 fns.sort()
445 m = max(map(len, fns))
445 m = max(map(len, fns))
446 for f in fns:
446 for f in fns:
447 if ui.verbose:
447 if ui.verbose:
448 commands = cmds[f].replace("|",", ")
448 commands = cmds[f].replace("|",", ")
449 ui.write(" %s:\n %s\n"%(commands,h[f]))
449 ui.write(" %s:\n %s\n"%(commands,h[f]))
450 else:
450 else:
451 ui.write(' %-*s %s\n' % (m, f, h[f]))
451 ui.write(' %-*s %s\n' % (m, f, h[f]))
452
452
453 # global options
453 # global options
454 if ui.verbose:
454 if ui.verbose:
455 option_lists.append(("global options", globalopts))
455 option_lists.append(("global options", globalopts))
456
456
457 # list all option lists
457 # list all option lists
458 opt_output = []
458 opt_output = []
459 for title, options in option_lists:
459 for title, options in option_lists:
460 opt_output.append(("\n%s:\n" % title, None))
460 opt_output.append(("\n%s:\n" % title, None))
461 for shortopt, longopt, default, desc in options:
461 for shortopt, longopt, default, desc in options:
462 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
462 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
463 longopt and " --%s" % longopt),
463 longopt and " --%s" % longopt),
464 "%s%s" % (desc,
464 "%s%s" % (desc,
465 default and " (default: %s)" % default
465 default and _(" (default: %s)") % default
466 or "")))
466 or "")))
467
467
468 if opt_output:
468 if opt_output:
469 opts_len = max([len(line[0]) for line in opt_output if line[1]])
469 opts_len = max([len(line[0]) for line in opt_output if line[1]])
470 for first, second in opt_output:
470 for first, second in opt_output:
471 if second:
471 if second:
472 ui.write(" %-*s %s\n" % (opts_len, first, second))
472 ui.write(" %-*s %s\n" % (opts_len, first, second))
473 else:
473 else:
474 ui.write("%s\n" % first)
474 ui.write("%s\n" % first)
475
475
476 # Commands start here, listed alphabetically
476 # Commands start here, listed alphabetically
477
477
478 def add(ui, repo, *pats, **opts):
478 def add(ui, repo, *pats, **opts):
479 '''add the specified files on the next commit'''
479 '''add the specified files on the next commit'''
480 names = []
480 names = []
481 for src, abs, rel, exact in walk(repo, pats, opts):
481 for src, abs, rel, exact in walk(repo, pats, opts):
482 if exact:
482 if exact:
483 if ui.verbose: ui.status('adding %s\n' % rel)
483 if ui.verbose: ui.status(_('adding %s\n') % rel)
484 names.append(abs)
484 names.append(abs)
485 elif repo.dirstate.state(abs) == '?':
485 elif repo.dirstate.state(abs) == '?':
486 ui.status('adding %s\n' % rel)
486 ui.status(_('adding %s\n') % rel)
487 names.append(abs)
487 names.append(abs)
488 repo.add(names)
488 repo.add(names)
489
489
490 def addremove(ui, repo, *pats, **opts):
490 def addremove(ui, repo, *pats, **opts):
491 """add all new files, delete all missing files"""
491 """add all new files, delete all missing files"""
492 add, remove = [], []
492 add, remove = [], []
493 for src, abs, rel, exact in walk(repo, pats, opts):
493 for src, abs, rel, exact in walk(repo, pats, opts):
494 if src == 'f' and repo.dirstate.state(abs) == '?':
494 if src == 'f' and repo.dirstate.state(abs) == '?':
495 add.append(abs)
495 add.append(abs)
496 if ui.verbose or not exact:
496 if ui.verbose or not exact:
497 ui.status('adding ', rel, '\n')
497 ui.status(_('adding %s\n') % rel)
498 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
498 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
499 remove.append(abs)
499 remove.append(abs)
500 if ui.verbose or not exact:
500 if ui.verbose or not exact:
501 ui.status('removing ', rel, '\n')
501 ui.status(_('removing %s\n') % rel)
502 repo.add(add)
502 repo.add(add)
503 repo.remove(remove)
503 repo.remove(remove)
504
504
505 def annotate(ui, repo, *pats, **opts):
505 def annotate(ui, repo, *pats, **opts):
506 """show changeset information per file line"""
506 """show changeset information per file line"""
507 def getnode(rev):
507 def getnode(rev):
508 return short(repo.changelog.node(rev))
508 return short(repo.changelog.node(rev))
509
509
510 ucache = {}
510 ucache = {}
511 def getname(rev):
511 def getname(rev):
512 cl = repo.changelog.read(repo.changelog.node(rev))
512 cl = repo.changelog.read(repo.changelog.node(rev))
513 return trimuser(ui, cl[1], rev, ucache)
513 return trimuser(ui, cl[1], rev, ucache)
514
514
515 if not pats:
515 if not pats:
516 raise util.Abort('at least one file name or pattern required')
516 raise util.Abort(_('at least one file name or pattern required'))
517
517
518 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
518 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
519 if not opts['user'] and not opts['changeset']:
519 if not opts['user'] and not opts['changeset']:
520 opts['number'] = 1
520 opts['number'] = 1
521
521
522 if opts['rev']:
522 if opts['rev']:
523 node = repo.changelog.lookup(opts['rev'])
523 node = repo.changelog.lookup(opts['rev'])
524 else:
524 else:
525 node = repo.dirstate.parents()[0]
525 node = repo.dirstate.parents()[0]
526 change = repo.changelog.read(node)
526 change = repo.changelog.read(node)
527 mmap = repo.manifest.read(change[0])
527 mmap = repo.manifest.read(change[0])
528
528
529 for src, abs, rel, exact in walk(repo, pats, opts):
529 for src, abs, rel, exact in walk(repo, pats, opts):
530 if abs not in mmap:
530 if abs not in mmap:
531 ui.warn("warning: %s is not in the repository!\n" % rel)
531 ui.warn(_("warning: %s is not in the repository!\n") % rel)
532 continue
532 continue
533
533
534 f = repo.file(abs)
534 f = repo.file(abs)
535 if not opts['text'] and util.binary(f.read(mmap[abs])):
535 if not opts['text'] and util.binary(f.read(mmap[abs])):
536 ui.write("%s: binary file\n" % rel)
536 ui.write(_("%s: binary file\n") % rel)
537 continue
537 continue
538
538
539 lines = f.annotate(mmap[abs])
539 lines = f.annotate(mmap[abs])
540 pieces = []
540 pieces = []
541
541
542 for o, f in opmap:
542 for o, f in opmap:
543 if opts[o]:
543 if opts[o]:
544 l = [f(n) for n, dummy in lines]
544 l = [f(n) for n, dummy in lines]
545 if l:
545 if l:
546 m = max(map(len, l))
546 m = max(map(len, l))
547 pieces.append(["%*s" % (m, x) for x in l])
547 pieces.append(["%*s" % (m, x) for x in l])
548
548
549 if pieces:
549 if pieces:
550 for p, l in zip(zip(*pieces), lines):
550 for p, l in zip(zip(*pieces), lines):
551 ui.write("%s: %s" % (" ".join(p), l[1]))
551 ui.write("%s: %s" % (" ".join(p), l[1]))
552
552
553 def bundle(ui, repo, fname, dest="default-push", **opts):
553 def bundle(ui, repo, fname, dest="default-push", **opts):
554 """create a changegroup file"""
554 """create a changegroup file"""
555 f = open(fname, "wb")
555 f = open(fname, "wb")
556 dest = ui.expandpath(dest)
556 dest = ui.expandpath(dest)
557 other = hg.repository(ui, dest)
557 other = hg.repository(ui, dest)
558 o = repo.findoutgoing(other)
558 o = repo.findoutgoing(other)
559 cg = repo.changegroup(o)
559 cg = repo.changegroup(o)
560
560
561 try:
561 try:
562 f.write("HG10")
562 f.write("HG10")
563 z = bz2.BZ2Compressor(9)
563 z = bz2.BZ2Compressor(9)
564 while 1:
564 while 1:
565 chunk = cg.read(4096)
565 chunk = cg.read(4096)
566 if not chunk:
566 if not chunk:
567 break
567 break
568 f.write(z.compress(chunk))
568 f.write(z.compress(chunk))
569 f.write(z.flush())
569 f.write(z.flush())
570 except:
570 except:
571 os.unlink(fname)
571 os.unlink(fname)
572 raise
572 raise
573
573
574 def cat(ui, repo, file1, *pats, **opts):
574 def cat(ui, repo, file1, *pats, **opts):
575 """output the latest or given revisions of files"""
575 """output the latest or given revisions of files"""
576 mf = {}
576 mf = {}
577 if opts['rev']:
577 if opts['rev']:
578 change = repo.changelog.read(repo.lookup(opts['rev']))
578 change = repo.changelog.read(repo.lookup(opts['rev']))
579 mf = repo.manifest.read(change[0])
579 mf = repo.manifest.read(change[0])
580 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
580 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
581 r = repo.file(abs)
581 r = repo.file(abs)
582 if opts['rev']:
582 if opts['rev']:
583 try:
583 try:
584 n = mf[abs]
584 n = mf[abs]
585 except (hg.RepoError, KeyError):
585 except (hg.RepoError, KeyError):
586 try:
586 try:
587 n = r.lookup(rev)
587 n = r.lookup(rev)
588 except KeyError, inst:
588 except KeyError, inst:
589 raise util.Abort('cannot find file %s in rev %s', rel, rev)
589 raise util.Abort(_('cannot find file %s in rev %s'), rel, rev)
590 else:
590 else:
591 n = r.tip()
591 n = r.tip()
592 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
592 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
593 fp.write(r.read(n))
593 fp.write(r.read(n))
594
594
595 def clone(ui, source, dest=None, **opts):
595 def clone(ui, source, dest=None, **opts):
596 """make a copy of an existing repository"""
596 """make a copy of an existing repository"""
597 if dest is None:
597 if dest is None:
598 dest = os.path.basename(os.path.normpath(source))
598 dest = os.path.basename(os.path.normpath(source))
599
599
600 if os.path.exists(dest):
600 if os.path.exists(dest):
601 raise util.Abort("destination '%s' already exists", dest)
601 raise util.Abort(_("destination '%s' already exists"), dest)
602
602
603 dest = os.path.realpath(dest)
603 dest = os.path.realpath(dest)
604
604
605 class Dircleanup:
605 class Dircleanup:
606 def __init__(self, dir_):
606 def __init__(self, dir_):
607 self.rmtree = shutil.rmtree
607 self.rmtree = shutil.rmtree
608 self.dir_ = dir_
608 self.dir_ = dir_
609 os.mkdir(dir_)
609 os.mkdir(dir_)
610 def close(self):
610 def close(self):
611 self.dir_ = None
611 self.dir_ = None
612 def __del__(self):
612 def __del__(self):
613 if self.dir_:
613 if self.dir_:
614 self.rmtree(self.dir_, True)
614 self.rmtree(self.dir_, True)
615
615
616 if opts['ssh']:
616 if opts['ssh']:
617 ui.setconfig("ui", "ssh", opts['ssh'])
617 ui.setconfig("ui", "ssh", opts['ssh'])
618 if opts['remotecmd']:
618 if opts['remotecmd']:
619 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
619 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
620
620
621 if not os.path.exists(source):
621 if not os.path.exists(source):
622 source = ui.expandpath(source)
622 source = ui.expandpath(source)
623
623
624 d = Dircleanup(dest)
624 d = Dircleanup(dest)
625 abspath = source
625 abspath = source
626 other = hg.repository(ui, source)
626 other = hg.repository(ui, source)
627
627
628 copy = False
628 copy = False
629 if other.dev() != -1:
629 if other.dev() != -1:
630 abspath = os.path.abspath(source)
630 abspath = os.path.abspath(source)
631 if not opts['pull']:
631 if not opts['pull']:
632 copy = True
632 copy = True
633
633
634 if copy:
634 if copy:
635 try:
635 try:
636 # we use a lock here because if we race with commit, we
636 # we use a lock here because if we race with commit, we
637 # can end up with extra data in the cloned revlogs that's
637 # can end up with extra data in the cloned revlogs that's
638 # not pointed to by changesets, thus causing verify to
638 # not pointed to by changesets, thus causing verify to
639 # fail
639 # fail
640 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
640 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
641 except OSError:
641 except OSError:
642 copy = False
642 copy = False
643
643
644 if copy:
644 if copy:
645 # we lock here to avoid premature writing to the target
645 # we lock here to avoid premature writing to the target
646 os.mkdir(os.path.join(dest, ".hg"))
646 os.mkdir(os.path.join(dest, ".hg"))
647 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
647 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
648
648
649 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
649 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
650 for f in files.split():
650 for f in files.split():
651 src = os.path.join(source, ".hg", f)
651 src = os.path.join(source, ".hg", f)
652 dst = os.path.join(dest, ".hg", f)
652 dst = os.path.join(dest, ".hg", f)
653 util.copyfiles(src, dst)
653 util.copyfiles(src, dst)
654
654
655 repo = hg.repository(ui, dest)
655 repo = hg.repository(ui, dest)
656
656
657 else:
657 else:
658 repo = hg.repository(ui, dest, create=1)
658 repo = hg.repository(ui, dest, create=1)
659 repo.pull(other)
659 repo.pull(other)
660
660
661 f = repo.opener("hgrc", "w", text=True)
661 f = repo.opener("hgrc", "w", text=True)
662 f.write("[paths]\n")
662 f.write("[paths]\n")
663 f.write("default = %s\n" % abspath)
663 f.write("default = %s\n" % abspath)
664
664
665 if not opts['noupdate']:
665 if not opts['noupdate']:
666 update(ui, repo)
666 update(ui, repo)
667
667
668 d.close()
668 d.close()
669
669
670 def commit(ui, repo, *pats, **opts):
670 def commit(ui, repo, *pats, **opts):
671 """commit the specified files or all outstanding changes"""
671 """commit the specified files or all outstanding changes"""
672 if opts['text']:
672 if opts['text']:
673 ui.warn("Warning: -t and --text is deprecated,"
673 ui.warn(_("Warning: -t and --text is deprecated,"
674 " please use -m or --message instead.\n")
674 " please use -m or --message instead.\n"))
675 message = opts['message'] or opts['text']
675 message = opts['message'] or opts['text']
676 logfile = opts['logfile']
676 logfile = opts['logfile']
677
677
678 if message and logfile:
678 if message and logfile:
679 raise util.Abort('options --message and --logfile are mutually '
679 raise util.Abort(_('options --message and --logfile are mutually '
680 'exclusive')
680 'exclusive'))
681 if not message and logfile:
681 if not message and logfile:
682 try:
682 try:
683 if logfile == '-':
683 if logfile == '-':
684 message = sys.stdin.read()
684 message = sys.stdin.read()
685 else:
685 else:
686 message = open(logfile).read()
686 message = open(logfile).read()
687 except IOError, inst:
687 except IOError, inst:
688 raise util.Abort("can't read commit message '%s': %s" %
688 raise util.Abort(_("can't read commit message '%s': %s") %
689 (logfile, inst.strerror))
689 (logfile, inst.strerror))
690
690
691 if opts['addremove']:
691 if opts['addremove']:
692 addremove(ui, repo, *pats, **opts)
692 addremove(ui, repo, *pats, **opts)
693 cwd = repo.getcwd()
693 cwd = repo.getcwd()
694 if not pats and cwd:
694 if not pats and cwd:
695 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
695 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
696 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
696 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
697 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
697 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
698 pats, opts)
698 pats, opts)
699 if pats:
699 if pats:
700 c, a, d, u = repo.changes(files=fns, match=match)
700 c, a, d, u = repo.changes(files=fns, match=match)
701 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
701 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
702 else:
702 else:
703 files = []
703 files = []
704 try:
704 try:
705 repo.commit(files, message, opts['user'], opts['date'], match)
705 repo.commit(files, message, opts['user'], opts['date'], match)
706 except ValueError, inst:
706 except ValueError, inst:
707 raise util.Abort(str(inst))
707 raise util.Abort(str(inst))
708
708
709 def docopy(ui, repo, pats, opts):
709 def docopy(ui, repo, pats, opts):
710 if not pats:
710 if not pats:
711 raise util.Abort('no source or destination specified')
711 raise util.Abort(_('no source or destination specified'))
712 elif len(pats) == 1:
712 elif len(pats) == 1:
713 raise util.Abort('no destination specified')
713 raise util.Abort(_('no destination specified'))
714 pats = list(pats)
714 pats = list(pats)
715 dest = pats.pop()
715 dest = pats.pop()
716 sources = []
716 sources = []
717
717
718 def okaytocopy(abs, rel, exact):
718 def okaytocopy(abs, rel, exact):
719 reasons = {'?': 'is not managed',
719 reasons = {'?': _('is not managed'),
720 'a': 'has been marked for add'}
720 'a': _('has been marked for add')}
721 reason = reasons.get(repo.dirstate.state(abs))
721 reason = reasons.get(repo.dirstate.state(abs))
722 if reason:
722 if reason:
723 if exact: ui.warn('%s: not copying - file %s\n' % (rel, reason))
723 if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
724 else:
724 else:
725 return True
725 return True
726
726
727 for src, abs, rel, exact in walk(repo, pats, opts):
727 for src, abs, rel, exact in walk(repo, pats, opts):
728 if okaytocopy(abs, rel, exact):
728 if okaytocopy(abs, rel, exact):
729 sources.append((abs, rel, exact))
729 sources.append((abs, rel, exact))
730 if not sources:
730 if not sources:
731 raise util.Abort('no files to copy')
731 raise util.Abort(_('no files to copy'))
732
732
733 cwd = repo.getcwd()
733 cwd = repo.getcwd()
734 absdest = util.canonpath(repo.root, cwd, dest)
734 absdest = util.canonpath(repo.root, cwd, dest)
735 reldest = util.pathto(cwd, absdest)
735 reldest = util.pathto(cwd, absdest)
736 if os.path.exists(reldest):
736 if os.path.exists(reldest):
737 destisfile = not os.path.isdir(reldest)
737 destisfile = not os.path.isdir(reldest)
738 else:
738 else:
739 destisfile = len(sources) == 1 or repo.dirstate.state(absdest) != '?'
739 destisfile = len(sources) == 1 or repo.dirstate.state(absdest) != '?'
740
740
741 if destisfile:
741 if destisfile:
742 if opts['parents']:
742 if opts['parents']:
743 raise util.Abort('with --parents, destination must be a directory')
743 raise util.Abort(_('with --parents, destination must be a directory'))
744 elif len(sources) > 1:
744 elif len(sources) > 1:
745 raise util.Abort('with multiple sources, destination must be a '
745 raise util.Abort(_('with multiple sources, destination must be a '
746 'directory')
746 'directory'))
747 errs, copied = 0, []
747 errs, copied = 0, []
748 for abs, rel, exact in sources:
748 for abs, rel, exact in sources:
749 if opts['parents']:
749 if opts['parents']:
750 mydest = os.path.join(dest, rel)
750 mydest = os.path.join(dest, rel)
751 elif destisfile:
751 elif destisfile:
752 mydest = reldest
752 mydest = reldest
753 else:
753 else:
754 mydest = os.path.join(dest, os.path.basename(rel))
754 mydest = os.path.join(dest, os.path.basename(rel))
755 myabsdest = util.canonpath(repo.root, cwd, mydest)
755 myabsdest = util.canonpath(repo.root, cwd, mydest)
756 myreldest = util.pathto(cwd, myabsdest)
756 myreldest = util.pathto(cwd, myabsdest)
757 if not opts['force'] and repo.dirstate.state(myabsdest) not in 'a?':
757 if not opts['force'] and repo.dirstate.state(myabsdest) not in 'a?':
758 ui.warn('%s: not overwriting - file already managed\n' % myreldest)
758 ui.warn(_('%s: not overwriting - file already managed\n') % myreldest)
759 continue
759 continue
760 mydestdir = os.path.dirname(myreldest) or '.'
760 mydestdir = os.path.dirname(myreldest) or '.'
761 if not opts['after']:
761 if not opts['after']:
762 try:
762 try:
763 if opts['parents']: os.makedirs(mydestdir)
763 if opts['parents']: os.makedirs(mydestdir)
764 elif not destisfile: os.mkdir(mydestdir)
764 elif not destisfile: os.mkdir(mydestdir)
765 except OSError, inst:
765 except OSError, inst:
766 if inst.errno != errno.EEXIST: raise
766 if inst.errno != errno.EEXIST: raise
767 if ui.verbose or not exact:
767 if ui.verbose or not exact:
768 ui.status('copying %s to %s\n' % (rel, myreldest))
768 ui.status(_('copying %s to %s\n') % (rel, myreldest))
769 if not opts['after']:
769 if not opts['after']:
770 try:
770 try:
771 shutil.copyfile(rel, myreldest)
771 shutil.copyfile(rel, myreldest)
772 shutil.copymode(rel, myreldest)
772 shutil.copymode(rel, myreldest)
773 except shutil.Error, inst:
773 except shutil.Error, inst:
774 raise util.Abort(str(inst))
774 raise util.Abort(str(inst))
775 except IOError, inst:
775 except IOError, inst:
776 if inst.errno == errno.ENOENT:
776 if inst.errno == errno.ENOENT:
777 ui.warn('%s: deleted in working copy\n' % rel)
777 ui.warn(_('%s: deleted in working copy\n') % rel)
778 else:
778 else:
779 ui.warn('%s: cannot copy - %s\n' % (rel, inst.strerror))
779 ui.warn(_('%s: cannot copy - %s\n') % (rel, inst.strerror))
780 errs += 1
780 errs += 1
781 continue
781 continue
782 repo.copy(abs, myabsdest)
782 repo.copy(abs, myabsdest)
783 copied.append((abs, rel, exact))
783 copied.append((abs, rel, exact))
784 if errs:
784 if errs:
785 ui.warn('(consider using --after)\n')
785 ui.warn(_('(consider using --after)\n'))
786 return errs, copied
786 return errs, copied
787
787
788 def copy(ui, repo, *pats, **opts):
788 def copy(ui, repo, *pats, **opts):
789 """mark files as copied for the next commit"""
789 """mark files as copied for the next commit"""
790 errs, copied = docopy(ui, repo, pats, opts)
790 errs, copied = docopy(ui, repo, pats, opts)
791 return errs
791 return errs
792
792
793 def debugancestor(ui, index, rev1, rev2):
793 def debugancestor(ui, index, rev1, rev2):
794 """find the ancestor revision of two revisions in a given index"""
794 """find the ancestor revision of two revisions in a given index"""
795 r = revlog.revlog(file, index, "")
795 r = revlog.revlog(file, index, "")
796 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
796 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
797 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
797 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
798
798
799 def debugcheckstate(ui, repo):
799 def debugcheckstate(ui, repo):
800 """validate the correctness of the current dirstate"""
800 """validate the correctness of the current dirstate"""
801 parent1, parent2 = repo.dirstate.parents()
801 parent1, parent2 = repo.dirstate.parents()
802 repo.dirstate.read()
802 repo.dirstate.read()
803 dc = repo.dirstate.map
803 dc = repo.dirstate.map
804 keys = dc.keys()
804 keys = dc.keys()
805 keys.sort()
805 keys.sort()
806 m1n = repo.changelog.read(parent1)[0]
806 m1n = repo.changelog.read(parent1)[0]
807 m2n = repo.changelog.read(parent2)[0]
807 m2n = repo.changelog.read(parent2)[0]
808 m1 = repo.manifest.read(m1n)
808 m1 = repo.manifest.read(m1n)
809 m2 = repo.manifest.read(m2n)
809 m2 = repo.manifest.read(m2n)
810 errors = 0
810 errors = 0
811 for f in dc:
811 for f in dc:
812 state = repo.dirstate.state(f)
812 state = repo.dirstate.state(f)
813 if state in "nr" and f not in m1:
813 if state in "nr" and f not in m1:
814 ui.warn("%s in state %s, but not in manifest1\n" % (f, state))
814 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
815 errors += 1
815 errors += 1
816 if state in "a" and f in m1:
816 if state in "a" and f in m1:
817 ui.warn("%s in state %s, but also in manifest1\n" % (f, state))
817 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
818 errors += 1
818 errors += 1
819 if state in "m" and f not in m1 and f not in m2:
819 if state in "m" and f not in m1 and f not in m2:
820 ui.warn("%s in state %s, but not in either manifest\n" %
820 ui.warn(_("%s in state %s, but not in either manifest\n") %
821 (f, state))
821 (f, state))
822 errors += 1
822 errors += 1
823 for f in m1:
823 for f in m1:
824 state = repo.dirstate.state(f)
824 state = repo.dirstate.state(f)
825 if state not in "nrm":
825 if state not in "nrm":
826 ui.warn("%s in manifest1, but listed as state %s" % (f, state))
826 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
827 errors += 1
827 errors += 1
828 if errors:
828 if errors:
829 raise util.Abort(".hg/dirstate inconsistent with current parent's manifest")
829 raise util.Abort(_(".hg/dirstate inconsistent with current parent's manifest"))
830
830
831 def debugconfig(ui):
831 def debugconfig(ui):
832 """show combined config settings from all hgrc files"""
832 """show combined config settings from all hgrc files"""
833 try:
833 try:
834 repo = hg.repository(ui)
834 repo = hg.repository(ui)
835 except hg.RepoError:
835 except hg.RepoError:
836 pass
836 pass
837 for section, name, value in ui.walkconfig():
837 for section, name, value in ui.walkconfig():
838 ui.write('%s.%s=%s\n' % (section, name, value))
838 ui.write('%s.%s=%s\n' % (section, name, value))
839
839
840 def debugsetparents(ui, repo, rev1, rev2=None):
840 def debugsetparents(ui, repo, rev1, rev2=None):
841 """
841 """
842 manually set the parents of the current working directory
842 manually set the parents of the current working directory
843
843
844 This is useful for writing repository conversion tools, but should
844 This is useful for writing repository conversion tools, but should
845 be used with care.
845 be used with care.
846 """
846 """
847
847
848 if not rev2:
848 if not rev2:
849 rev2 = hex(nullid)
849 rev2 = hex(nullid)
850
850
851 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
851 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
852
852
853 def debugstate(ui, repo):
853 def debugstate(ui, repo):
854 """show the contents of the current dirstate"""
854 """show the contents of the current dirstate"""
855 repo.dirstate.read()
855 repo.dirstate.read()
856 dc = repo.dirstate.map
856 dc = repo.dirstate.map
857 keys = dc.keys()
857 keys = dc.keys()
858 keys.sort()
858 keys.sort()
859 for file_ in keys:
859 for file_ in keys:
860 ui.write("%c %3o %10d %s %s\n"
860 ui.write("%c %3o %10d %s %s\n"
861 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
861 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
862 time.strftime("%x %X",
862 time.strftime("%x %X",
863 time.localtime(dc[file_][3])), file_))
863 time.localtime(dc[file_][3])), file_))
864 for f in repo.dirstate.copies:
864 for f in repo.dirstate.copies:
865 ui.write("copy: %s -> %s\n" % (repo.dirstate.copies[f], f))
865 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
866
866
867 def debugdata(ui, file_, rev):
867 def debugdata(ui, file_, rev):
868 """dump the contents of an data file revision"""
868 """dump the contents of an data file revision"""
869 r = revlog.revlog(file, file_[:-2] + ".i", file_)
869 r = revlog.revlog(file, file_[:-2] + ".i", file_)
870 try:
870 try:
871 ui.write(r.revision(r.lookup(rev)))
871 ui.write(r.revision(r.lookup(rev)))
872 except KeyError:
872 except KeyError:
873 raise util.Abort('invalid revision identifier %s', rev)
873 raise util.Abort(_('invalid revision identifier %s'), rev)
874
874
875 def debugindex(ui, file_):
875 def debugindex(ui, file_):
876 """dump the contents of an index file"""
876 """dump the contents of an index file"""
877 r = revlog.revlog(file, file_, "")
877 r = revlog.revlog(file, file_, "")
878 ui.write(" rev offset length base linkrev" +
878 ui.write(" rev offset length base linkrev" +
879 " nodeid p1 p2\n")
879 " nodeid p1 p2\n")
880 for i in range(r.count()):
880 for i in range(r.count()):
881 e = r.index[i]
881 e = r.index[i]
882 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
882 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
883 i, e[0], e[1], e[2], e[3],
883 i, e[0], e[1], e[2], e[3],
884 short(e[6]), short(e[4]), short(e[5])))
884 short(e[6]), short(e[4]), short(e[5])))
885
885
886 def debugindexdot(ui, file_):
886 def debugindexdot(ui, file_):
887 """dump an index DAG as a .dot file"""
887 """dump an index DAG as a .dot file"""
888 r = revlog.revlog(file, file_, "")
888 r = revlog.revlog(file, file_, "")
889 ui.write("digraph G {\n")
889 ui.write("digraph G {\n")
890 for i in range(r.count()):
890 for i in range(r.count()):
891 e = r.index[i]
891 e = r.index[i]
892 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
892 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
893 if e[5] != nullid:
893 if e[5] != nullid:
894 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
894 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
895 ui.write("}\n")
895 ui.write("}\n")
896
896
897 def debugrename(ui, repo, file, rev=None):
897 def debugrename(ui, repo, file, rev=None):
898 """dump rename information"""
898 """dump rename information"""
899 r = repo.file(relpath(repo, [file])[0])
899 r = repo.file(relpath(repo, [file])[0])
900 if rev:
900 if rev:
901 try:
901 try:
902 # assume all revision numbers are for changesets
902 # assume all revision numbers are for changesets
903 n = repo.lookup(rev)
903 n = repo.lookup(rev)
904 change = repo.changelog.read(n)
904 change = repo.changelog.read(n)
905 m = repo.manifest.read(change[0])
905 m = repo.manifest.read(change[0])
906 n = m[relpath(repo, [file])[0]]
906 n = m[relpath(repo, [file])[0]]
907 except hg.RepoError, KeyError:
907 except hg.RepoError, KeyError:
908 n = r.lookup(rev)
908 n = r.lookup(rev)
909 else:
909 else:
910 n = r.tip()
910 n = r.tip()
911 m = r.renamed(n)
911 m = r.renamed(n)
912 if m:
912 if m:
913 ui.write("renamed from %s:%s\n" % (m[0], hex(m[1])))
913 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
914 else:
914 else:
915 ui.write("not renamed\n")
915 ui.write(_("not renamed\n"))
916
916
917 def debugwalk(ui, repo, *pats, **opts):
917 def debugwalk(ui, repo, *pats, **opts):
918 """show how files match on given patterns"""
918 """show how files match on given patterns"""
919 items = list(walk(repo, pats, opts))
919 items = list(walk(repo, pats, opts))
920 if not items:
920 if not items:
921 return
921 return
922 fmt = '%%s %%-%ds %%-%ds %%s' % (
922 fmt = '%%s %%-%ds %%-%ds %%s' % (
923 max([len(abs) for (src, abs, rel, exact) in items]),
923 max([len(abs) for (src, abs, rel, exact) in items]),
924 max([len(rel) for (src, abs, rel, exact) in items]))
924 max([len(rel) for (src, abs, rel, exact) in items]))
925 for src, abs, rel, exact in items:
925 for src, abs, rel, exact in items:
926 line = fmt % (src, abs, rel, exact and 'exact' or '')
926 line = fmt % (src, abs, rel, exact and 'exact' or '')
927 ui.write("%s\n" % line.rstrip())
927 ui.write("%s\n" % line.rstrip())
928
928
929 def diff(ui, repo, *pats, **opts):
929 def diff(ui, repo, *pats, **opts):
930 """diff working directory (or selected files)"""
930 """diff working directory (or selected files)"""
931 node1, node2 = None, None
931 node1, node2 = None, None
932 revs = [repo.lookup(x) for x in opts['rev']]
932 revs = [repo.lookup(x) for x in opts['rev']]
933
933
934 if len(revs) > 0:
934 if len(revs) > 0:
935 node1 = revs[0]
935 node1 = revs[0]
936 if len(revs) > 1:
936 if len(revs) > 1:
937 node2 = revs[1]
937 node2 = revs[1]
938 if len(revs) > 2:
938 if len(revs) > 2:
939 raise util.Abort("too many revisions to diff")
939 raise util.Abort(_("too many revisions to diff"))
940
940
941 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
941 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
942
942
943 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
943 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
944 text=opts['text'])
944 text=opts['text'])
945
945
946 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
946 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
947 node = repo.lookup(changeset)
947 node = repo.lookup(changeset)
948 prev, other = repo.changelog.parents(node)
948 prev, other = repo.changelog.parents(node)
949 change = repo.changelog.read(node)
949 change = repo.changelog.read(node)
950
950
951 fp = make_file(repo, repo.changelog, opts['output'],
951 fp = make_file(repo, repo.changelog, opts['output'],
952 node=node, total=total, seqno=seqno,
952 node=node, total=total, seqno=seqno,
953 revwidth=revwidth)
953 revwidth=revwidth)
954 if fp != sys.stdout:
954 if fp != sys.stdout:
955 ui.note("%s\n" % fp.name)
955 ui.note("%s\n" % fp.name)
956
956
957 fp.write("# HG changeset patch\n")
957 fp.write("# HG changeset patch\n")
958 fp.write("# User %s\n" % change[1])
958 fp.write("# User %s\n" % change[1])
959 fp.write("# Node ID %s\n" % hex(node))
959 fp.write("# Node ID %s\n" % hex(node))
960 fp.write("# Parent %s\n" % hex(prev))
960 fp.write("# Parent %s\n" % hex(prev))
961 if other != nullid:
961 if other != nullid:
962 fp.write("# Parent %s\n" % hex(other))
962 fp.write("# Parent %s\n" % hex(other))
963 fp.write(change[4].rstrip())
963 fp.write(change[4].rstrip())
964 fp.write("\n\n")
964 fp.write("\n\n")
965
965
966 dodiff(fp, ui, repo, prev, node, text=opts['text'])
966 dodiff(fp, ui, repo, prev, node, text=opts['text'])
967 if fp != sys.stdout:
967 if fp != sys.stdout:
968 fp.close()
968 fp.close()
969
969
970 def export(ui, repo, *changesets, **opts):
970 def export(ui, repo, *changesets, **opts):
971 """dump the header and diffs for one or more changesets"""
971 """dump the header and diffs for one or more changesets"""
972 if not changesets:
972 if not changesets:
973 raise util.Abort("export requires at least one changeset")
973 raise util.Abort(_("export requires at least one changeset"))
974 seqno = 0
974 seqno = 0
975 revs = list(revrange(ui, repo, changesets))
975 revs = list(revrange(ui, repo, changesets))
976 total = len(revs)
976 total = len(revs)
977 revwidth = max(map(len, revs))
977 revwidth = max(map(len, revs))
978 ui.note(len(revs) > 1 and "Exporting patches:\n" or "Exporting patch:\n")
978 ui.note(len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n"))
979 for cset in revs:
979 for cset in revs:
980 seqno += 1
980 seqno += 1
981 doexport(ui, repo, cset, seqno, total, revwidth, opts)
981 doexport(ui, repo, cset, seqno, total, revwidth, opts)
982
982
983 def forget(ui, repo, *pats, **opts):
983 def forget(ui, repo, *pats, **opts):
984 """don't add the specified files on the next commit"""
984 """don't add the specified files on the next commit"""
985 forget = []
985 forget = []
986 for src, abs, rel, exact in walk(repo, pats, opts):
986 for src, abs, rel, exact in walk(repo, pats, opts):
987 if repo.dirstate.state(abs) == 'a':
987 if repo.dirstate.state(abs) == 'a':
988 forget.append(abs)
988 forget.append(abs)
989 if ui.verbose or not exact:
989 if ui.verbose or not exact:
990 ui.status('forgetting ', rel, '\n')
990 ui.status(_('forgetting %s\n') % rel)
991 repo.forget(forget)
991 repo.forget(forget)
992
992
993 def grep(ui, repo, pattern, *pats, **opts):
993 def grep(ui, repo, pattern, *pats, **opts):
994 """search for a pattern in specified files and revisions"""
994 """search for a pattern in specified files and revisions"""
995 reflags = 0
995 reflags = 0
996 if opts['ignore_case']:
996 if opts['ignore_case']:
997 reflags |= re.I
997 reflags |= re.I
998 regexp = re.compile(pattern, reflags)
998 regexp = re.compile(pattern, reflags)
999 sep, eol = ':', '\n'
999 sep, eol = ':', '\n'
1000 if opts['print0']:
1000 if opts['print0']:
1001 sep = eol = '\0'
1001 sep = eol = '\0'
1002
1002
1003 fcache = {}
1003 fcache = {}
1004 def getfile(fn):
1004 def getfile(fn):
1005 if fn not in fcache:
1005 if fn not in fcache:
1006 fcache[fn] = repo.file(fn)
1006 fcache[fn] = repo.file(fn)
1007 return fcache[fn]
1007 return fcache[fn]
1008
1008
1009 def matchlines(body):
1009 def matchlines(body):
1010 begin = 0
1010 begin = 0
1011 linenum = 0
1011 linenum = 0
1012 while True:
1012 while True:
1013 match = regexp.search(body, begin)
1013 match = regexp.search(body, begin)
1014 if not match:
1014 if not match:
1015 break
1015 break
1016 mstart, mend = match.span()
1016 mstart, mend = match.span()
1017 linenum += body.count('\n', begin, mstart) + 1
1017 linenum += body.count('\n', begin, mstart) + 1
1018 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1018 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1019 lend = body.find('\n', mend)
1019 lend = body.find('\n', mend)
1020 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1020 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1021 begin = lend + 1
1021 begin = lend + 1
1022
1022
1023 class linestate:
1023 class linestate:
1024 def __init__(self, line, linenum, colstart, colend):
1024 def __init__(self, line, linenum, colstart, colend):
1025 self.line = line
1025 self.line = line
1026 self.linenum = linenum
1026 self.linenum = linenum
1027 self.colstart = colstart
1027 self.colstart = colstart
1028 self.colend = colend
1028 self.colend = colend
1029 def __eq__(self, other):
1029 def __eq__(self, other):
1030 return self.line == other.line
1030 return self.line == other.line
1031 def __hash__(self):
1031 def __hash__(self):
1032 return hash(self.line)
1032 return hash(self.line)
1033
1033
1034 matches = {}
1034 matches = {}
1035 def grepbody(fn, rev, body):
1035 def grepbody(fn, rev, body):
1036 matches[rev].setdefault(fn, {})
1036 matches[rev].setdefault(fn, {})
1037 m = matches[rev][fn]
1037 m = matches[rev][fn]
1038 for lnum, cstart, cend, line in matchlines(body):
1038 for lnum, cstart, cend, line in matchlines(body):
1039 s = linestate(line, lnum, cstart, cend)
1039 s = linestate(line, lnum, cstart, cend)
1040 m[s] = s
1040 m[s] = s
1041
1041
1042 prev = {}
1042 prev = {}
1043 ucache = {}
1043 ucache = {}
1044 def display(fn, rev, states, prevstates):
1044 def display(fn, rev, states, prevstates):
1045 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1045 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1046 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1046 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1047 counts = {'-': 0, '+': 0}
1047 counts = {'-': 0, '+': 0}
1048 filerevmatches = {}
1048 filerevmatches = {}
1049 for l in diff:
1049 for l in diff:
1050 if incrementing or not opts['all']:
1050 if incrementing or not opts['all']:
1051 change = ((l in prevstates) and '-') or '+'
1051 change = ((l in prevstates) and '-') or '+'
1052 r = rev
1052 r = rev
1053 else:
1053 else:
1054 change = ((l in states) and '-') or '+'
1054 change = ((l in states) and '-') or '+'
1055 r = prev[fn]
1055 r = prev[fn]
1056 cols = [fn, str(rev)]
1056 cols = [fn, str(rev)]
1057 if opts['line_number']: cols.append(str(l.linenum))
1057 if opts['line_number']: cols.append(str(l.linenum))
1058 if opts['all']: cols.append(change)
1058 if opts['all']: cols.append(change)
1059 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1059 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1060 ucache))
1060 ucache))
1061 if opts['files_with_matches']:
1061 if opts['files_with_matches']:
1062 c = (fn, rev)
1062 c = (fn, rev)
1063 if c in filerevmatches: continue
1063 if c in filerevmatches: continue
1064 filerevmatches[c] = 1
1064 filerevmatches[c] = 1
1065 else:
1065 else:
1066 cols.append(l.line)
1066 cols.append(l.line)
1067 ui.write(sep.join(cols), eol)
1067 ui.write(sep.join(cols), eol)
1068 counts[change] += 1
1068 counts[change] += 1
1069 return counts['+'], counts['-']
1069 return counts['+'], counts['-']
1070
1070
1071 fstate = {}
1071 fstate = {}
1072 skip = {}
1072 skip = {}
1073 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1073 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1074 count = 0
1074 count = 0
1075 incrementing = False
1075 incrementing = False
1076 for st, rev, fns in changeiter:
1076 for st, rev, fns in changeiter:
1077 if st == 'window':
1077 if st == 'window':
1078 incrementing = rev
1078 incrementing = rev
1079 matches.clear()
1079 matches.clear()
1080 elif st == 'add':
1080 elif st == 'add':
1081 change = repo.changelog.read(repo.lookup(str(rev)))
1081 change = repo.changelog.read(repo.lookup(str(rev)))
1082 mf = repo.manifest.read(change[0])
1082 mf = repo.manifest.read(change[0])
1083 matches[rev] = {}
1083 matches[rev] = {}
1084 for fn in fns:
1084 for fn in fns:
1085 if fn in skip: continue
1085 if fn in skip: continue
1086 fstate.setdefault(fn, {})
1086 fstate.setdefault(fn, {})
1087 try:
1087 try:
1088 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1088 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1089 except KeyError:
1089 except KeyError:
1090 pass
1090 pass
1091 elif st == 'iter':
1091 elif st == 'iter':
1092 states = matches[rev].items()
1092 states = matches[rev].items()
1093 states.sort()
1093 states.sort()
1094 for fn, m in states:
1094 for fn, m in states:
1095 if fn in skip: continue
1095 if fn in skip: continue
1096 if incrementing or not opts['all'] or fstate[fn]:
1096 if incrementing or not opts['all'] or fstate[fn]:
1097 pos, neg = display(fn, rev, m, fstate[fn])
1097 pos, neg = display(fn, rev, m, fstate[fn])
1098 count += pos + neg
1098 count += pos + neg
1099 if pos and not opts['all']:
1099 if pos and not opts['all']:
1100 skip[fn] = True
1100 skip[fn] = True
1101 fstate[fn] = m
1101 fstate[fn] = m
1102 prev[fn] = rev
1102 prev[fn] = rev
1103
1103
1104 if not incrementing:
1104 if not incrementing:
1105 fstate = fstate.items()
1105 fstate = fstate.items()
1106 fstate.sort()
1106 fstate.sort()
1107 for fn, state in fstate:
1107 for fn, state in fstate:
1108 if fn in skip: continue
1108 if fn in skip: continue
1109 display(fn, rev, {}, state)
1109 display(fn, rev, {}, state)
1110 return (count == 0 and 1) or 0
1110 return (count == 0 and 1) or 0
1111
1111
1112 def heads(ui, repo, **opts):
1112 def heads(ui, repo, **opts):
1113 """show current repository heads"""
1113 """show current repository heads"""
1114 heads = repo.changelog.heads()
1114 heads = repo.changelog.heads()
1115 br = None
1115 br = None
1116 if opts['branches']:
1116 if opts['branches']:
1117 br = repo.branchlookup(heads)
1117 br = repo.branchlookup(heads)
1118 for n in repo.changelog.heads():
1118 for n in repo.changelog.heads():
1119 show_changeset(ui, repo, changenode=n, brinfo=br)
1119 show_changeset(ui, repo, changenode=n, brinfo=br)
1120
1120
1121 def identify(ui, repo):
1121 def identify(ui, repo):
1122 """print information about the working copy"""
1122 """print information about the working copy"""
1123 parents = [p for p in repo.dirstate.parents() if p != nullid]
1123 parents = [p for p in repo.dirstate.parents() if p != nullid]
1124 if not parents:
1124 if not parents:
1125 ui.write("unknown\n")
1125 ui.write(_("unknown\n"))
1126 return
1126 return
1127
1127
1128 hexfunc = ui.verbose and hex or short
1128 hexfunc = ui.verbose and hex or short
1129 (c, a, d, u) = repo.changes()
1129 (c, a, d, u) = repo.changes()
1130 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1130 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1131 (c or a or d) and "+" or "")]
1131 (c or a or d) and "+" or "")]
1132
1132
1133 if not ui.quiet:
1133 if not ui.quiet:
1134 # multiple tags for a single parent separated by '/'
1134 # multiple tags for a single parent separated by '/'
1135 parenttags = ['/'.join(tags)
1135 parenttags = ['/'.join(tags)
1136 for tags in map(repo.nodetags, parents) if tags]
1136 for tags in map(repo.nodetags, parents) if tags]
1137 # tags for multiple parents separated by ' + '
1137 # tags for multiple parents separated by ' + '
1138 if parenttags:
1138 if parenttags:
1139 output.append(' + '.join(parenttags))
1139 output.append(' + '.join(parenttags))
1140
1140
1141 ui.write("%s\n" % ' '.join(output))
1141 ui.write("%s\n" % ' '.join(output))
1142
1142
1143 def import_(ui, repo, patch1, *patches, **opts):
1143 def import_(ui, repo, patch1, *patches, **opts):
1144 """import an ordered set of patches"""
1144 """import an ordered set of patches"""
1145 patches = (patch1,) + patches
1145 patches = (patch1,) + patches
1146
1146
1147 if not opts['force']:
1147 if not opts['force']:
1148 (c, a, d, u) = repo.changes()
1148 (c, a, d, u) = repo.changes()
1149 if c or a or d:
1149 if c or a or d:
1150 raise util.Abort("outstanding uncommitted changes")
1150 raise util.Abort(_("outstanding uncommitted changes"))
1151
1151
1152 d = opts["base"]
1152 d = opts["base"]
1153 strip = opts["strip"]
1153 strip = opts["strip"]
1154
1154
1155 mailre = re.compile(r'(?:From |[\w-]+:)')
1155 mailre = re.compile(r'(?:From |[\w-]+:)')
1156
1156
1157 # attempt to detect the start of a patch
1157 # attempt to detect the start of a patch
1158 # (this heuristic is borrowed from quilt)
1158 # (this heuristic is borrowed from quilt)
1159 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1159 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1160 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1160 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1161 '(---|\*\*\*)[ \t])')
1161 '(---|\*\*\*)[ \t])')
1162
1162
1163 for patch in patches:
1163 for patch in patches:
1164 ui.status("applying %s\n" % patch)
1164 ui.status(_("applying %s\n") % patch)
1165 pf = os.path.join(d, patch)
1165 pf = os.path.join(d, patch)
1166
1166
1167 message = []
1167 message = []
1168 user = None
1168 user = None
1169 hgpatch = False
1169 hgpatch = False
1170 for line in file(pf):
1170 for line in file(pf):
1171 line = line.rstrip()
1171 line = line.rstrip()
1172 if (not message and not hgpatch and
1172 if (not message and not hgpatch and
1173 mailre.match(line) and not opts['force']):
1173 mailre.match(line) and not opts['force']):
1174 if len(line) > 35: line = line[:32] + '...'
1174 if len(line) > 35: line = line[:32] + '...'
1175 raise util.Abort('first line looks like a '
1175 raise util.Abort(_('first line looks like a '
1176 'mail header: ' + line)
1176 'mail header: ') + line)
1177 if diffre.match(line):
1177 if diffre.match(line):
1178 break
1178 break
1179 elif hgpatch:
1179 elif hgpatch:
1180 # parse values when importing the result of an hg export
1180 # parse values when importing the result of an hg export
1181 if line.startswith("# User "):
1181 if line.startswith("# User "):
1182 user = line[7:]
1182 user = line[7:]
1183 ui.debug('User: %s\n' % user)
1183 ui.debug(_('User: %s\n') % user)
1184 elif not line.startswith("# ") and line:
1184 elif not line.startswith("# ") and line:
1185 message.append(line)
1185 message.append(line)
1186 hgpatch = False
1186 hgpatch = False
1187 elif line == '# HG changeset patch':
1187 elif line == '# HG changeset patch':
1188 hgpatch = True
1188 hgpatch = True
1189 message = [] # We may have collected garbage
1189 message = [] # We may have collected garbage
1190 else:
1190 else:
1191 message.append(line)
1191 message.append(line)
1192
1192
1193 # make sure message isn't empty
1193 # make sure message isn't empty
1194 if not message:
1194 if not message:
1195 message = "imported patch %s\n" % patch
1195 message = _("imported patch %s\n") % patch
1196 else:
1196 else:
1197 message = "%s\n" % '\n'.join(message)
1197 message = "%s\n" % '\n'.join(message)
1198 ui.debug('message:\n%s\n' % message)
1198 ui.debug(_('message:\n%s\n') % message)
1199
1199
1200 files = util.patch(strip, pf, ui)
1200 files = util.patch(strip, pf, ui)
1201
1201
1202 if len(files) > 0:
1202 if len(files) > 0:
1203 addremove(ui, repo, *files)
1203 addremove(ui, repo, *files)
1204 repo.commit(files, message, user)
1204 repo.commit(files, message, user)
1205
1205
1206 def incoming(ui, repo, source="default", **opts):
1206 def incoming(ui, repo, source="default", **opts):
1207 """show new changesets found in source"""
1207 """show new changesets found in source"""
1208 source = ui.expandpath(source)
1208 source = ui.expandpath(source)
1209 other = hg.repository(ui, source)
1209 other = hg.repository(ui, source)
1210 if not other.local():
1210 if not other.local():
1211 raise util.Abort("incoming doesn't work for remote repositories yet")
1211 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1212 o = repo.findincoming(other)
1212 o = repo.findincoming(other)
1213 if not o:
1213 if not o:
1214 return
1214 return
1215 o = other.newer(o)
1215 o = other.newer(o)
1216 for n in o:
1216 for n in o:
1217 show_changeset(ui, other, changenode=n)
1217 show_changeset(ui, other, changenode=n)
1218 if opts['patch']:
1218 if opts['patch']:
1219 prev = other.changelog.parents(n)[0]
1219 prev = other.changelog.parents(n)[0]
1220 dodiff(ui, ui, other, prev, n)
1220 dodiff(ui, ui, other, prev, n)
1221 ui.write("\n")
1221 ui.write("\n")
1222
1222
1223 def init(ui, dest="."):
1223 def init(ui, dest="."):
1224 """create a new repository in the given directory"""
1224 """create a new repository in the given directory"""
1225 if not os.path.exists(dest):
1225 if not os.path.exists(dest):
1226 os.mkdir(dest)
1226 os.mkdir(dest)
1227 hg.repository(ui, dest, create=1)
1227 hg.repository(ui, dest, create=1)
1228
1228
1229 def locate(ui, repo, *pats, **opts):
1229 def locate(ui, repo, *pats, **opts):
1230 """locate files matching specific patterns"""
1230 """locate files matching specific patterns"""
1231 end = opts['print0'] and '\0' or '\n'
1231 end = opts['print0'] and '\0' or '\n'
1232
1232
1233 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1233 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1234 if repo.dirstate.state(abs) == '?':
1234 if repo.dirstate.state(abs) == '?':
1235 continue
1235 continue
1236 if opts['fullpath']:
1236 if opts['fullpath']:
1237 ui.write(os.path.join(repo.root, abs), end)
1237 ui.write(os.path.join(repo.root, abs), end)
1238 else:
1238 else:
1239 ui.write(rel, end)
1239 ui.write(rel, end)
1240
1240
1241 def log(ui, repo, *pats, **opts):
1241 def log(ui, repo, *pats, **opts):
1242 """show revision history of entire repository or files"""
1242 """show revision history of entire repository or files"""
1243 class dui:
1243 class dui:
1244 # Implement and delegate some ui protocol. Save hunks of
1244 # Implement and delegate some ui protocol. Save hunks of
1245 # output for later display in the desired order.
1245 # output for later display in the desired order.
1246 def __init__(self, ui):
1246 def __init__(self, ui):
1247 self.ui = ui
1247 self.ui = ui
1248 self.hunk = {}
1248 self.hunk = {}
1249 def bump(self, rev):
1249 def bump(self, rev):
1250 self.rev = rev
1250 self.rev = rev
1251 self.hunk[rev] = []
1251 self.hunk[rev] = []
1252 def note(self, *args):
1252 def note(self, *args):
1253 if self.verbose:
1253 if self.verbose:
1254 self.write(*args)
1254 self.write(*args)
1255 def status(self, *args):
1255 def status(self, *args):
1256 if not self.quiet:
1256 if not self.quiet:
1257 self.write(*args)
1257 self.write(*args)
1258 def write(self, *args):
1258 def write(self, *args):
1259 self.hunk[self.rev].append(args)
1259 self.hunk[self.rev].append(args)
1260 def debug(self, *args):
1260 def debug(self, *args):
1261 if self.debugflag:
1261 if self.debugflag:
1262 self.write(*args)
1262 self.write(*args)
1263 def __getattr__(self, key):
1263 def __getattr__(self, key):
1264 return getattr(self.ui, key)
1264 return getattr(self.ui, key)
1265 cwd = repo.getcwd()
1265 cwd = repo.getcwd()
1266 if not pats and cwd:
1266 if not pats and cwd:
1267 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1267 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1268 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1268 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1269 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1269 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1270 pats, opts)
1270 pats, opts)
1271 for st, rev, fns in changeiter:
1271 for st, rev, fns in changeiter:
1272 if st == 'window':
1272 if st == 'window':
1273 du = dui(ui)
1273 du = dui(ui)
1274 elif st == 'add':
1274 elif st == 'add':
1275 du.bump(rev)
1275 du.bump(rev)
1276 br = None
1276 br = None
1277 if opts['branch']:
1277 if opts['branch']:
1278 br = repo.branchlookup([repo.changelog.node(rev)])
1278 br = repo.branchlookup([repo.changelog.node(rev)])
1279
1279
1280 if opts['keyword']:
1280 if opts['keyword']:
1281 changes = repo.changelog.read(repo.changelog.node(rev))
1281 changes = repo.changelog.read(repo.changelog.node(rev))
1282 miss = 0
1282 miss = 0
1283 for k in opts['keyword']:
1283 for k in opts['keyword']:
1284 if not (k in changes[1].lower() or
1284 if not (k in changes[1].lower() or
1285 k in changes[4].lower() or
1285 k in changes[4].lower() or
1286 k in " ".join(changes[3][:20]).lower()):
1286 k in " ".join(changes[3][:20]).lower()):
1287 miss = 1
1287 miss = 1
1288 break
1288 break
1289 if miss:
1289 if miss:
1290 continue
1290 continue
1291
1291
1292 show_changeset(du, repo, rev, brinfo=br)
1292 show_changeset(du, repo, rev, brinfo=br)
1293 if opts['patch']:
1293 if opts['patch']:
1294 changenode = repo.changelog.node(rev)
1294 changenode = repo.changelog.node(rev)
1295 prev, other = repo.changelog.parents(changenode)
1295 prev, other = repo.changelog.parents(changenode)
1296 dodiff(du, du, repo, prev, changenode, fns)
1296 dodiff(du, du, repo, prev, changenode, fns)
1297 du.write("\n\n")
1297 du.write("\n\n")
1298 elif st == 'iter':
1298 elif st == 'iter':
1299 for args in du.hunk[rev]:
1299 for args in du.hunk[rev]:
1300 ui.write(*args)
1300 ui.write(*args)
1301
1301
1302 def manifest(ui, repo, rev=None):
1302 def manifest(ui, repo, rev=None):
1303 """output the latest or given revision of the project manifest"""
1303 """output the latest or given revision of the project manifest"""
1304 if rev:
1304 if rev:
1305 try:
1305 try:
1306 # assume all revision numbers are for changesets
1306 # assume all revision numbers are for changesets
1307 n = repo.lookup(rev)
1307 n = repo.lookup(rev)
1308 change = repo.changelog.read(n)
1308 change = repo.changelog.read(n)
1309 n = change[0]
1309 n = change[0]
1310 except hg.RepoError:
1310 except hg.RepoError:
1311 n = repo.manifest.lookup(rev)
1311 n = repo.manifest.lookup(rev)
1312 else:
1312 else:
1313 n = repo.manifest.tip()
1313 n = repo.manifest.tip()
1314 m = repo.manifest.read(n)
1314 m = repo.manifest.read(n)
1315 mf = repo.manifest.readflags(n)
1315 mf = repo.manifest.readflags(n)
1316 files = m.keys()
1316 files = m.keys()
1317 files.sort()
1317 files.sort()
1318
1318
1319 for f in files:
1319 for f in files:
1320 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1320 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1321
1321
1322 def outgoing(ui, repo, dest="default-push", **opts):
1322 def outgoing(ui, repo, dest="default-push", **opts):
1323 """show changesets not found in destination"""
1323 """show changesets not found in destination"""
1324 dest = ui.expandpath(dest)
1324 dest = ui.expandpath(dest)
1325 other = hg.repository(ui, dest)
1325 other = hg.repository(ui, dest)
1326 o = repo.findoutgoing(other)
1326 o = repo.findoutgoing(other)
1327 o = repo.newer(o)
1327 o = repo.newer(o)
1328 for n in o:
1328 for n in o:
1329 show_changeset(ui, repo, changenode=n)
1329 show_changeset(ui, repo, changenode=n)
1330 if opts['patch']:
1330 if opts['patch']:
1331 prev = repo.changelog.parents(n)[0]
1331 prev = repo.changelog.parents(n)[0]
1332 dodiff(ui, ui, repo, prev, n)
1332 dodiff(ui, ui, repo, prev, n)
1333 ui.write("\n")
1333 ui.write("\n")
1334
1334
1335 def parents(ui, repo, rev=None):
1335 def parents(ui, repo, rev=None):
1336 """show the parents of the working dir or revision"""
1336 """show the parents of the working dir or revision"""
1337 if rev:
1337 if rev:
1338 p = repo.changelog.parents(repo.lookup(rev))
1338 p = repo.changelog.parents(repo.lookup(rev))
1339 else:
1339 else:
1340 p = repo.dirstate.parents()
1340 p = repo.dirstate.parents()
1341
1341
1342 for n in p:
1342 for n in p:
1343 if n != nullid:
1343 if n != nullid:
1344 show_changeset(ui, repo, changenode=n)
1344 show_changeset(ui, repo, changenode=n)
1345
1345
1346 def paths(ui, search=None):
1346 def paths(ui, search=None):
1347 """show definition of symbolic path names"""
1347 """show definition of symbolic path names"""
1348 try:
1348 try:
1349 repo = hg.repository(ui=ui)
1349 repo = hg.repository(ui=ui)
1350 except hg.RepoError:
1350 except hg.RepoError:
1351 pass
1351 pass
1352
1352
1353 if search:
1353 if search:
1354 for name, path in ui.configitems("paths"):
1354 for name, path in ui.configitems("paths"):
1355 if name == search:
1355 if name == search:
1356 ui.write("%s\n" % path)
1356 ui.write("%s\n" % path)
1357 return
1357 return
1358 ui.warn("not found!\n")
1358 ui.warn(_("not found!\n"))
1359 return 1
1359 return 1
1360 else:
1360 else:
1361 for name, path in ui.configitems("paths"):
1361 for name, path in ui.configitems("paths"):
1362 ui.write("%s = %s\n" % (name, path))
1362 ui.write("%s = %s\n" % (name, path))
1363
1363
1364 def pull(ui, repo, source="default", **opts):
1364 def pull(ui, repo, source="default", **opts):
1365 """pull changes from the specified source"""
1365 """pull changes from the specified source"""
1366 source = ui.expandpath(source)
1366 source = ui.expandpath(source)
1367 ui.status('pulling from %s\n' % (source))
1367 ui.status(_('pulling from %s\n') % (source))
1368
1368
1369 if opts['ssh']:
1369 if opts['ssh']:
1370 ui.setconfig("ui", "ssh", opts['ssh'])
1370 ui.setconfig("ui", "ssh", opts['ssh'])
1371 if opts['remotecmd']:
1371 if opts['remotecmd']:
1372 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1372 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1373
1373
1374 other = hg.repository(ui, source)
1374 other = hg.repository(ui, source)
1375 r = repo.pull(other)
1375 r = repo.pull(other)
1376 if not r:
1376 if not r:
1377 if opts['update']:
1377 if opts['update']:
1378 return update(ui, repo)
1378 return update(ui, repo)
1379 else:
1379 else:
1380 ui.status("(run 'hg update' to get a working copy)\n")
1380 ui.status(_("(run 'hg update' to get a working copy)\n"))
1381
1381
1382 return r
1382 return r
1383
1383
1384 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1384 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1385 """push changes to the specified destination"""
1385 """push changes to the specified destination"""
1386 dest = ui.expandpath(dest)
1386 dest = ui.expandpath(dest)
1387 ui.status('pushing to %s\n' % (dest))
1387 ui.status('pushing to %s\n' % (dest))
1388
1388
1389 if ssh:
1389 if ssh:
1390 ui.setconfig("ui", "ssh", ssh)
1390 ui.setconfig("ui", "ssh", ssh)
1391 if remotecmd:
1391 if remotecmd:
1392 ui.setconfig("ui", "remotecmd", remotecmd)
1392 ui.setconfig("ui", "remotecmd", remotecmd)
1393
1393
1394 other = hg.repository(ui, dest)
1394 other = hg.repository(ui, dest)
1395 r = repo.push(other, force)
1395 r = repo.push(other, force)
1396 return r
1396 return r
1397
1397
1398 def rawcommit(ui, repo, *flist, **rc):
1398 def rawcommit(ui, repo, *flist, **rc):
1399 "raw commit interface"
1399 "raw commit interface"
1400 if rc['text']:
1400 if rc['text']:
1401 ui.warn("Warning: -t and --text is deprecated,"
1401 ui.warn(_("Warning: -t and --text is deprecated,"
1402 " please use -m or --message instead.\n")
1402 " please use -m or --message instead.\n"))
1403 message = rc['message'] or rc['text']
1403 message = rc['message'] or rc['text']
1404 if not message and rc['logfile']:
1404 if not message and rc['logfile']:
1405 try:
1405 try:
1406 message = open(rc['logfile']).read()
1406 message = open(rc['logfile']).read()
1407 except IOError:
1407 except IOError:
1408 pass
1408 pass
1409 if not message and not rc['logfile']:
1409 if not message and not rc['logfile']:
1410 raise util.Abort("missing commit message")
1410 raise util.Abort(_("missing commit message"))
1411
1411
1412 files = relpath(repo, list(flist))
1412 files = relpath(repo, list(flist))
1413 if rc['files']:
1413 if rc['files']:
1414 files += open(rc['files']).read().splitlines()
1414 files += open(rc['files']).read().splitlines()
1415
1415
1416 rc['parent'] = map(repo.lookup, rc['parent'])
1416 rc['parent'] = map(repo.lookup, rc['parent'])
1417
1417
1418 try:
1418 try:
1419 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1419 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1420 except ValueError, inst:
1420 except ValueError, inst:
1421 raise util.Abort(str(inst))
1421 raise util.Abort(str(inst))
1422
1422
1423 def recover(ui, repo):
1423 def recover(ui, repo):
1424 """roll back an interrupted transaction"""
1424 """roll back an interrupted transaction"""
1425 repo.recover()
1425 repo.recover()
1426
1426
1427 def remove(ui, repo, pat, *pats, **opts):
1427 def remove(ui, repo, pat, *pats, **opts):
1428 """remove the specified files on the next commit"""
1428 """remove the specified files on the next commit"""
1429 names = []
1429 names = []
1430 def okaytoremove(abs, rel, exact):
1430 def okaytoremove(abs, rel, exact):
1431 c, a, d, u = repo.changes(files = [abs])
1431 c, a, d, u = repo.changes(files = [abs])
1432 reason = None
1432 reason = None
1433 if c: reason = 'is modified'
1433 if c: reason = _('is modified')
1434 elif a: reason = 'has been marked for add'
1434 elif a: reason = _('has been marked for add')
1435 elif u: reason = 'is not managed'
1435 elif u: reason = _('is not managed')
1436 if reason:
1436 if reason:
1437 if exact: ui.warn('not removing %s: file %s\n' % (rel, reason))
1437 if exact: ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1438 else:
1438 else:
1439 return True
1439 return True
1440 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1440 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1441 if okaytoremove(abs, rel, exact):
1441 if okaytoremove(abs, rel, exact):
1442 if ui.verbose or not exact: ui.status('removing %s\n' % rel)
1442 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1443 names.append(abs)
1443 names.append(abs)
1444 for name in names:
1444 for name in names:
1445 try:
1445 try:
1446 os.unlink(name)
1446 os.unlink(name)
1447 except OSError, inst:
1447 except OSError, inst:
1448 if inst.errno != errno.ENOENT: raise
1448 if inst.errno != errno.ENOENT: raise
1449 repo.remove(names)
1449 repo.remove(names)
1450
1450
1451 def rename(ui, repo, *pats, **opts):
1451 def rename(ui, repo, *pats, **opts):
1452 """rename files; equivalent of copy + remove"""
1452 """rename files; equivalent of copy + remove"""
1453 errs, copied = docopy(ui, repo, pats, opts)
1453 errs, copied = docopy(ui, repo, pats, opts)
1454 names = []
1454 names = []
1455 for abs, rel, exact in copied:
1455 for abs, rel, exact in copied:
1456 if ui.verbose or not exact: ui.status('removing %s\n' % rel)
1456 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1457 try:
1457 try:
1458 os.unlink(rel)
1458 os.unlink(rel)
1459 except OSError, inst:
1459 except OSError, inst:
1460 if inst.errno != errno.ENOENT: raise
1460 if inst.errno != errno.ENOENT: raise
1461 names.append(abs)
1461 names.append(abs)
1462 repo.remove(names)
1462 repo.remove(names)
1463 return errs
1463 return errs
1464
1464
1465 def revert(ui, repo, *names, **opts):
1465 def revert(ui, repo, *names, **opts):
1466 """revert modified files or dirs back to their unmodified states"""
1466 """revert modified files or dirs back to their unmodified states"""
1467 node = opts['rev'] and repo.lookup(opts['rev']) or \
1467 node = opts['rev'] and repo.lookup(opts['rev']) or \
1468 repo.dirstate.parents()[0]
1468 repo.dirstate.parents()[0]
1469 root = os.path.realpath(repo.root)
1469 root = os.path.realpath(repo.root)
1470
1470
1471 def trimpath(p):
1471 def trimpath(p):
1472 p = os.path.realpath(p)
1472 p = os.path.realpath(p)
1473 if p.startswith(root):
1473 if p.startswith(root):
1474 rest = p[len(root):]
1474 rest = p[len(root):]
1475 if not rest:
1475 if not rest:
1476 return rest
1476 return rest
1477 if p.startswith(os.sep):
1477 if p.startswith(os.sep):
1478 return rest[1:]
1478 return rest[1:]
1479 return p
1479 return p
1480
1480
1481 relnames = map(trimpath, names or [os.getcwd()])
1481 relnames = map(trimpath, names or [os.getcwd()])
1482 chosen = {}
1482 chosen = {}
1483
1483
1484 def choose(name):
1484 def choose(name):
1485 def body(name):
1485 def body(name):
1486 for r in relnames:
1486 for r in relnames:
1487 if not name.startswith(r):
1487 if not name.startswith(r):
1488 continue
1488 continue
1489 rest = name[len(r):]
1489 rest = name[len(r):]
1490 if not rest:
1490 if not rest:
1491 return r, True
1491 return r, True
1492 depth = rest.count(os.sep)
1492 depth = rest.count(os.sep)
1493 if not r:
1493 if not r:
1494 if depth == 0 or not opts['nonrecursive']:
1494 if depth == 0 or not opts['nonrecursive']:
1495 return r, True
1495 return r, True
1496 elif rest[0] == os.sep:
1496 elif rest[0] == os.sep:
1497 if depth == 1 or not opts['nonrecursive']:
1497 if depth == 1 or not opts['nonrecursive']:
1498 return r, True
1498 return r, True
1499 return None, False
1499 return None, False
1500 relname, ret = body(name)
1500 relname, ret = body(name)
1501 if ret:
1501 if ret:
1502 chosen[relname] = 1
1502 chosen[relname] = 1
1503 return ret
1503 return ret
1504
1504
1505 r = repo.update(node, False, True, choose, False)
1505 r = repo.update(node, False, True, choose, False)
1506 for n in relnames:
1506 for n in relnames:
1507 if n not in chosen:
1507 if n not in chosen:
1508 ui.warn('error: no matches for %s\n' % n)
1508 ui.warn(_('error: no matches for %s\n') % n)
1509 r = 1
1509 r = 1
1510 sys.stdout.flush()
1510 sys.stdout.flush()
1511 return r
1511 return r
1512
1512
1513 def root(ui, repo):
1513 def root(ui, repo):
1514 """print the root (top) of the current working dir"""
1514 """print the root (top) of the current working dir"""
1515 ui.write(repo.root + "\n")
1515 ui.write(repo.root + "\n")
1516
1516
1517 def serve(ui, repo, **opts):
1517 def serve(ui, repo, **opts):
1518 """export the repository via HTTP"""
1518 """export the repository via HTTP"""
1519
1519
1520 if opts["stdio"]:
1520 if opts["stdio"]:
1521 fin, fout = sys.stdin, sys.stdout
1521 fin, fout = sys.stdin, sys.stdout
1522 sys.stdout = sys.stderr
1522 sys.stdout = sys.stderr
1523
1523
1524 def getarg():
1524 def getarg():
1525 argline = fin.readline()[:-1]
1525 argline = fin.readline()[:-1]
1526 arg, l = argline.split()
1526 arg, l = argline.split()
1527 val = fin.read(int(l))
1527 val = fin.read(int(l))
1528 return arg, val
1528 return arg, val
1529 def respond(v):
1529 def respond(v):
1530 fout.write("%d\n" % len(v))
1530 fout.write("%d\n" % len(v))
1531 fout.write(v)
1531 fout.write(v)
1532 fout.flush()
1532 fout.flush()
1533
1533
1534 lock = None
1534 lock = None
1535
1535
1536 while 1:
1536 while 1:
1537 cmd = fin.readline()[:-1]
1537 cmd = fin.readline()[:-1]
1538 if cmd == '':
1538 if cmd == '':
1539 return
1539 return
1540 if cmd == "heads":
1540 if cmd == "heads":
1541 h = repo.heads()
1541 h = repo.heads()
1542 respond(" ".join(map(hex, h)) + "\n")
1542 respond(" ".join(map(hex, h)) + "\n")
1543 if cmd == "lock":
1543 if cmd == "lock":
1544 lock = repo.lock()
1544 lock = repo.lock()
1545 respond("")
1545 respond("")
1546 if cmd == "unlock":
1546 if cmd == "unlock":
1547 if lock:
1547 if lock:
1548 lock.release()
1548 lock.release()
1549 lock = None
1549 lock = None
1550 respond("")
1550 respond("")
1551 elif cmd == "branches":
1551 elif cmd == "branches":
1552 arg, nodes = getarg()
1552 arg, nodes = getarg()
1553 nodes = map(bin, nodes.split(" "))
1553 nodes = map(bin, nodes.split(" "))
1554 r = []
1554 r = []
1555 for b in repo.branches(nodes):
1555 for b in repo.branches(nodes):
1556 r.append(" ".join(map(hex, b)) + "\n")
1556 r.append(" ".join(map(hex, b)) + "\n")
1557 respond("".join(r))
1557 respond("".join(r))
1558 elif cmd == "between":
1558 elif cmd == "between":
1559 arg, pairs = getarg()
1559 arg, pairs = getarg()
1560 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1560 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1561 r = []
1561 r = []
1562 for b in repo.between(pairs):
1562 for b in repo.between(pairs):
1563 r.append(" ".join(map(hex, b)) + "\n")
1563 r.append(" ".join(map(hex, b)) + "\n")
1564 respond("".join(r))
1564 respond("".join(r))
1565 elif cmd == "changegroup":
1565 elif cmd == "changegroup":
1566 nodes = []
1566 nodes = []
1567 arg, roots = getarg()
1567 arg, roots = getarg()
1568 nodes = map(bin, roots.split(" "))
1568 nodes = map(bin, roots.split(" "))
1569
1569
1570 cg = repo.changegroup(nodes)
1570 cg = repo.changegroup(nodes)
1571 while 1:
1571 while 1:
1572 d = cg.read(4096)
1572 d = cg.read(4096)
1573 if not d:
1573 if not d:
1574 break
1574 break
1575 fout.write(d)
1575 fout.write(d)
1576
1576
1577 fout.flush()
1577 fout.flush()
1578
1578
1579 elif cmd == "addchangegroup":
1579 elif cmd == "addchangegroup":
1580 if not lock:
1580 if not lock:
1581 respond("not locked")
1581 respond("not locked")
1582 continue
1582 continue
1583 respond("")
1583 respond("")
1584
1584
1585 r = repo.addchangegroup(fin)
1585 r = repo.addchangegroup(fin)
1586 respond("")
1586 respond("")
1587
1587
1588 optlist = "name templates style address port ipv6 accesslog errorlog"
1588 optlist = "name templates style address port ipv6 accesslog errorlog"
1589 for o in optlist.split():
1589 for o in optlist.split():
1590 if opts[o]:
1590 if opts[o]:
1591 ui.setconfig("web", o, opts[o])
1591 ui.setconfig("web", o, opts[o])
1592
1592
1593 try:
1593 try:
1594 httpd = hgweb.create_server(repo)
1594 httpd = hgweb.create_server(repo)
1595 except socket.error, inst:
1595 except socket.error, inst:
1596 raise util.Abort('cannot start server: ' + inst.args[1])
1596 raise util.Abort('cannot start server: ' + inst.args[1])
1597
1597
1598 if ui.verbose:
1598 if ui.verbose:
1599 addr, port = httpd.socket.getsockname()
1599 addr, port = httpd.socket.getsockname()
1600 if addr == '0.0.0.0':
1600 if addr == '0.0.0.0':
1601 addr = socket.gethostname()
1601 addr = socket.gethostname()
1602 else:
1602 else:
1603 try:
1603 try:
1604 addr = socket.gethostbyaddr(addr)[0]
1604 addr = socket.gethostbyaddr(addr)[0]
1605 except socket.error:
1605 except socket.error:
1606 pass
1606 pass
1607 if port != 80:
1607 if port != 80:
1608 ui.status('listening at http://%s:%d/\n' % (addr, port))
1608 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
1609 else:
1609 else:
1610 ui.status('listening at http://%s/\n' % addr)
1610 ui.status(_('listening at http://%s/\n') % addr)
1611 httpd.serve_forever()
1611 httpd.serve_forever()
1612
1612
1613 def status(ui, repo, *pats, **opts):
1613 def status(ui, repo, *pats, **opts):
1614 '''show changed files in the working directory
1614 '''show changed files in the working directory
1615
1615
1616 M = modified
1616 M = modified
1617 A = added
1617 A = added
1618 R = removed
1618 R = removed
1619 ? = not tracked
1619 ? = not tracked
1620 '''
1620 '''
1621
1621
1622 cwd = repo.getcwd()
1622 cwd = repo.getcwd()
1623 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1623 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1624 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1624 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1625 for n in repo.changes(files=files, match=matchfn)]
1625 for n in repo.changes(files=files, match=matchfn)]
1626
1626
1627 changetypes = [('modified', 'M', c),
1627 changetypes = [(_('modified'), 'M', c),
1628 ('added', 'A', a),
1628 (_('added'), 'A', a),
1629 ('removed', 'R', d),
1629 (_('removed'), 'R', d),
1630 ('unknown', '?', u)]
1630 (_('unknown'), '?', u)]
1631
1631
1632 end = opts['print0'] and '\0' or '\n'
1632 end = opts['print0'] and '\0' or '\n'
1633
1633
1634 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1634 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1635 or changetypes):
1635 or changetypes):
1636 if opts['no_status']:
1636 if opts['no_status']:
1637 format = "%%s%s" % end
1637 format = "%%s%s" % end
1638 else:
1638 else:
1639 format = "%s %%s%s" % (char, end);
1639 format = "%s %%s%s" % (char, end);
1640
1640
1641 for f in changes:
1641 for f in changes:
1642 ui.write(format % f)
1642 ui.write(format % f)
1643
1643
1644 def tag(ui, repo, name, rev=None, **opts):
1644 def tag(ui, repo, name, rev=None, **opts):
1645 """add a tag for the current tip or a given revision"""
1645 """add a tag for the current tip or a given revision"""
1646 if opts['text']:
1646 if opts['text']:
1647 ui.warn("Warning: -t and --text is deprecated,"
1647 ui.warn(_("Warning: -t and --text is deprecated,"
1648 " please use -m or --message instead.\n")
1648 " please use -m or --message instead.\n"))
1649 if name == "tip":
1649 if name == "tip":
1650 raise util.Abort("the name 'tip' is reserved")
1650 raise util.Abort(_("the name 'tip' is reserved"))
1651 if rev:
1651 if rev:
1652 r = hex(repo.lookup(rev))
1652 r = hex(repo.lookup(rev))
1653 else:
1653 else:
1654 r = hex(repo.changelog.tip())
1654 r = hex(repo.changelog.tip())
1655
1655
1656 if name.find(revrangesep) >= 0:
1656 if name.find(revrangesep) >= 0:
1657 raise util.Abort("'%s' cannot be used in a tag name" % revrangesep)
1657 raise util.Abort(_("'%s' cannot be used in a tag name") % revrangesep)
1658
1658
1659 if opts['local']:
1659 if opts['local']:
1660 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
1660 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
1661 return
1661 return
1662
1662
1663 (c, a, d, u) = repo.changes()
1663 (c, a, d, u) = repo.changes()
1664 for x in (c, a, d, u):
1664 for x in (c, a, d, u):
1665 if ".hgtags" in x:
1665 if ".hgtags" in x:
1666 raise util.Abort("working copy of .hgtags is changed "
1666 raise util.Abort(_("working copy of .hgtags is changed "
1667 "(please commit .hgtags manually)")
1667 "(please commit .hgtags manually)"))
1668
1668
1669 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
1669 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
1670 if repo.dirstate.state(".hgtags") == '?':
1670 if repo.dirstate.state(".hgtags") == '?':
1671 repo.add([".hgtags"])
1671 repo.add([".hgtags"])
1672
1672
1673 message = (opts['message'] or opts['text'] or
1673 message = (opts['message'] or opts['text'] or
1674 "Added tag %s for changeset %s" % (name, r))
1674 _("Added tag %s for changeset %s") % (name, r))
1675 try:
1675 try:
1676 repo.commit([".hgtags"], message, opts['user'], opts['date'])
1676 repo.commit([".hgtags"], message, opts['user'], opts['date'])
1677 except ValueError, inst:
1677 except ValueError, inst:
1678 raise util.Abort(str(inst))
1678 raise util.Abort(str(inst))
1679
1679
1680 def tags(ui, repo):
1680 def tags(ui, repo):
1681 """list repository tags"""
1681 """list repository tags"""
1682
1682
1683 l = repo.tagslist()
1683 l = repo.tagslist()
1684 l.reverse()
1684 l.reverse()
1685 for t, n in l:
1685 for t, n in l:
1686 try:
1686 try:
1687 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
1687 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
1688 except KeyError:
1688 except KeyError:
1689 r = " ?:?"
1689 r = " ?:?"
1690 ui.write("%-30s %s\n" % (t, r))
1690 ui.write("%-30s %s\n" % (t, r))
1691
1691
1692 def tip(ui, repo):
1692 def tip(ui, repo):
1693 """show the tip revision"""
1693 """show the tip revision"""
1694 n = repo.changelog.tip()
1694 n = repo.changelog.tip()
1695 show_changeset(ui, repo, changenode=n)
1695 show_changeset(ui, repo, changenode=n)
1696
1696
1697 def unbundle(ui, repo, fname):
1697 def unbundle(ui, repo, fname):
1698 """apply a changegroup file"""
1698 """apply a changegroup file"""
1699 f = urllib.urlopen(fname)
1699 f = urllib.urlopen(fname)
1700
1700
1701 if f.read(4) != "HG10":
1701 if f.read(4) != "HG10":
1702 raise util.Abort("%s: not a Mercurial bundle file" % fname)
1702 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
1703
1703
1704 def bzgenerator(f):
1704 def bzgenerator(f):
1705 zd = bz2.BZ2Decompressor()
1705 zd = bz2.BZ2Decompressor()
1706 for chunk in f:
1706 for chunk in f:
1707 yield zd.decompress(chunk)
1707 yield zd.decompress(chunk)
1708 yield zd.flush()
1708 yield zd.flush()
1709
1709
1710 bzgen = bzgenerator(util.filechunkiter(f, 4096))
1710 bzgen = bzgenerator(util.filechunkiter(f, 4096))
1711 repo.addchangegroup(util.chunkbuffer(bzgen))
1711 repo.addchangegroup(util.chunkbuffer(bzgen))
1712
1712
1713 def undo(ui, repo):
1713 def undo(ui, repo):
1714 """undo the last commit or pull
1714 """undo the last commit or pull
1715
1715
1716 Roll back the last pull or commit transaction on the
1716 Roll back the last pull or commit transaction on the
1717 repository, restoring the project to its earlier state.
1717 repository, restoring the project to its earlier state.
1718
1718
1719 This command should be used with care. There is only one level of
1719 This command should be used with care. There is only one level of
1720 undo and there is no redo.
1720 undo and there is no redo.
1721
1721
1722 This command is not intended for use on public repositories. Once
1722 This command is not intended for use on public repositories. Once
1723 a change is visible for pull by other users, undoing it locally is
1723 a change is visible for pull by other users, undoing it locally is
1724 ineffective.
1724 ineffective.
1725 """
1725 """
1726 repo.undo()
1726 repo.undo()
1727
1727
1728 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
1728 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
1729 '''update or merge working directory
1729 '''update or merge working directory
1730
1730
1731 If there are no outstanding changes in the working directory and
1731 If there are no outstanding changes in the working directory and
1732 there is a linear relationship between the current version and the
1732 there is a linear relationship between the current version and the
1733 requested version, the result is the requested version.
1733 requested version, the result is the requested version.
1734
1734
1735 Otherwise the result is a merge between the contents of the
1735 Otherwise the result is a merge between the contents of the
1736 current working directory and the requested version. Files that
1736 current working directory and the requested version. Files that
1737 changed between either parent are marked as changed for the next
1737 changed between either parent are marked as changed for the next
1738 commit and a commit must be performed before any further updates
1738 commit and a commit must be performed before any further updates
1739 are allowed.
1739 are allowed.
1740 '''
1740 '''
1741 if branch:
1741 if branch:
1742 br = repo.branchlookup(branch=branch)
1742 br = repo.branchlookup(branch=branch)
1743 found = []
1743 found = []
1744 for x in br:
1744 for x in br:
1745 if branch in br[x]:
1745 if branch in br[x]:
1746 found.append(x)
1746 found.append(x)
1747 if len(found) > 1:
1747 if len(found) > 1:
1748 ui.warn("Found multiple heads for %s\n" % branch)
1748 ui.warn(_("Found multiple heads for %s\n") % branch)
1749 for x in found:
1749 for x in found:
1750 show_changeset(ui, repo, changenode=x, brinfo=br)
1750 show_changeset(ui, repo, changenode=x, brinfo=br)
1751 return 1
1751 return 1
1752 if len(found) == 1:
1752 if len(found) == 1:
1753 node = found[0]
1753 node = found[0]
1754 ui.warn("Using head %s for branch %s\n" % (short(node), branch))
1754 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
1755 else:
1755 else:
1756 ui.warn("branch %s not found\n" % (branch))
1756 ui.warn(_("branch %s not found\n") % (branch))
1757 return 1
1757 return 1
1758 else:
1758 else:
1759 node = node and repo.lookup(node) or repo.changelog.tip()
1759 node = node and repo.lookup(node) or repo.changelog.tip()
1760 return repo.update(node, allow=merge, force=clean)
1760 return repo.update(node, allow=merge, force=clean)
1761
1761
1762 def verify(ui, repo):
1762 def verify(ui, repo):
1763 """verify the integrity of the repository"""
1763 """verify the integrity of the repository"""
1764 return repo.verify()
1764 return repo.verify()
1765
1765
1766 # Command options and aliases are listed here, alphabetically
1766 # Command options and aliases are listed here, alphabetically
1767
1767
1768 table = {
1768 table = {
1769 "^add":
1769 "^add":
1770 (add,
1770 (add,
1771 [('I', 'include', [], 'include path in search'),
1771 [('I', 'include', [], _('include path in search')),
1772 ('X', 'exclude', [], 'exclude path from search')],
1772 ('X', 'exclude', [], _('exclude path from search'))],
1773 "hg add [OPTION]... [FILE]..."),
1773 "hg add [OPTION]... [FILE]..."),
1774 "addremove":
1774 "addremove":
1775 (addremove,
1775 (addremove,
1776 [('I', 'include', [], 'include path in search'),
1776 [('I', 'include', [], _('include path in search')),
1777 ('X', 'exclude', [], 'exclude path from search')],
1777 ('X', 'exclude', [], _('exclude path from search'))],
1778 "hg addremove [OPTION]... [FILE]..."),
1778 _("hg addremove [OPTION]... [FILE]...")),
1779 "^annotate":
1779 "^annotate":
1780 (annotate,
1780 (annotate,
1781 [('r', 'rev', '', 'revision'),
1781 [('r', 'rev', '', _('revision')),
1782 ('a', 'text', None, 'treat all files as text'),
1782 ('a', 'text', None, _('treat all files as text')),
1783 ('u', 'user', None, 'show user'),
1783 ('u', 'user', None, _('show user')),
1784 ('n', 'number', None, 'show revision number'),
1784 ('n', 'number', None, _('show revision number')),
1785 ('c', 'changeset', None, 'show changeset'),
1785 ('c', 'changeset', None, _('show changeset')),
1786 ('I', 'include', [], 'include path in search'),
1786 ('I', 'include', [], _('include path in search')),
1787 ('X', 'exclude', [], 'exclude path from search')],
1787 ('X', 'exclude', [], _('exclude path from search'))],
1788 'hg annotate [OPTION]... FILE...'),
1788 _('hg annotate [OPTION]... FILE...')),
1789 "bundle":
1789 "bundle":
1790 (bundle,
1790 (bundle,
1791 [],
1791 [],
1792 'hg bundle FILE DEST'),
1792 _('hg bundle FILE DEST')),
1793 "cat":
1793 "cat":
1794 (cat,
1794 (cat,
1795 [('I', 'include', [], 'include path in search'),
1795 [('I', 'include', [], _('include path in search')),
1796 ('X', 'exclude', [], 'exclude path from search'),
1796 ('X', 'exclude', [], _('exclude path from search')),
1797 ('o', 'output', "", 'output to file'),
1797 ('o', 'output', "", _('output to file')),
1798 ('r', 'rev', '', 'revision')],
1798 ('r', 'rev', '', _('revision'))],
1799 'hg cat [OPTION]... FILE...'),
1799 _('hg cat [OPTION]... FILE...')),
1800 "^clone":
1800 "^clone":
1801 (clone,
1801 (clone,
1802 [('U', 'noupdate', None, 'skip update after cloning'),
1802 [('U', 'noupdate', None, _('skip update after cloning')),
1803 ('e', 'ssh', "", 'ssh command'),
1803 ('e', 'ssh', "", _('ssh command')),
1804 ('', 'pull', None, 'use pull protocol to copy metadata'),
1804 ('', 'pull', None, _('use pull protocol to copy metadata')),
1805 ('', 'remotecmd', "", 'remote hg command')],
1805 ('', 'remotecmd', "", _('remote hg command'))],
1806 'hg clone [OPTION]... SOURCE [DEST]'),
1806 _('hg clone [OPTION]... SOURCE [DEST]')),
1807 "^commit|ci":
1807 "^commit|ci":
1808 (commit,
1808 (commit,
1809 [('A', 'addremove', None, 'run add/remove during commit'),
1809 [('A', 'addremove', None, _('run add/remove during commit')),
1810 ('I', 'include', [], 'include path in search'),
1810 ('I', 'include', [], _('include path in search')),
1811 ('X', 'exclude', [], 'exclude path from search'),
1811 ('X', 'exclude', [], _('exclude path from search')),
1812 ('m', 'message', "", 'commit message'),
1812 ('m', 'message', "", _('commit message')),
1813 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1813 ('t', 'text', "", _('commit message (deprecated: use -m)')),
1814 ('l', 'logfile', "", 'commit message file'),
1814 ('l', 'logfile', "", _('commit message file')),
1815 ('d', 'date', "", 'date code'),
1815 ('d', 'date', "", _('date code')),
1816 ('u', 'user', "", 'user')],
1816 ('u', 'user', "", _('user'))],
1817 'hg commit [OPTION]... [FILE]...'),
1817 _('hg commit [OPTION]... [FILE]...')),
1818 "copy|cp": (copy,
1818 "copy|cp": (copy,
1819 [('I', 'include', [], 'include path in search'),
1819 [('I', 'include', [], _('include path in search')),
1820 ('X', 'exclude', [], 'exclude path from search'),
1820 ('X', 'exclude', [], _('exclude path from search')),
1821 ('A', 'after', None, 'record a copy after it has happened'),
1821 ('A', 'after', None, _('record a copy after it has happened')),
1822 ('f', 'force', None, 'replace destination if it exists'),
1822 ('f', 'force', None, _('replace destination if it exists')),
1823 ('p', 'parents', None, 'append source path to dest')],
1823 ('p', 'parents', None, _('append source path to dest'))],
1824 'hg copy [OPTION]... [SOURCE]... DEST'),
1824 _('hg copy [OPTION]... [SOURCE]... DEST')),
1825 "debugancestor": (debugancestor, [], 'debugancestor INDEX REV1 REV2'),
1825 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
1826 "debugcheckstate": (debugcheckstate, [], 'debugcheckstate'),
1826 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
1827 "debugconfig": (debugconfig, [], 'debugconfig'),
1827 "debugconfig": (debugconfig, [], _('debugconfig')),
1828 "debugsetparents": (debugsetparents, [], 'debugsetparents REV1 [REV2]'),
1828 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
1829 "debugstate": (debugstate, [], 'debugstate'),
1829 "debugstate": (debugstate, [], _('debugstate')),
1830 "debugdata": (debugdata, [], 'debugdata FILE REV'),
1830 "debugdata": (debugdata, [], _('debugdata FILE REV')),
1831 "debugindex": (debugindex, [], 'debugindex FILE'),
1831 "debugindex": (debugindex, [], _('debugindex FILE')),
1832 "debugindexdot": (debugindexdot, [], 'debugindexdot FILE'),
1832 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
1833 "debugrename": (debugrename, [], 'debugrename FILE [REV]'),
1833 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
1834 "debugwalk":
1834 "debugwalk":
1835 (debugwalk,
1835 (debugwalk,
1836 [('I', 'include', [], 'include path in search'),
1836 [('I', 'include', [], _('include path in search')),
1837 ('X', 'exclude', [], 'exclude path from search')],
1837 ('X', 'exclude', [], _('exclude path from search'))],
1838 'debugwalk [OPTION]... [FILE]...'),
1838 _('debugwalk [OPTION]... [FILE]...')),
1839 "^diff":
1839 "^diff":
1840 (diff,
1840 (diff,
1841 [('r', 'rev', [], 'revision'),
1841 [('r', 'rev', [], _('revision')),
1842 ('a', 'text', None, 'treat all files as text'),
1842 ('a', 'text', None, _('treat all files as text')),
1843 ('I', 'include', [], 'include path in search'),
1843 ('I', 'include', [], _('include path in search')),
1844 ('X', 'exclude', [], 'exclude path from search')],
1844 ('X', 'exclude', [], _('exclude path from search'))],
1845 'hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...'),
1845 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
1846 "^export":
1846 "^export":
1847 (export,
1847 (export,
1848 [('o', 'output', "", 'output to file'),
1848 [('o', 'output', "", _('output to file')),
1849 ('a', 'text', None, 'treat all files as text')],
1849 ('a', 'text', None, _('treat all files as text'))],
1850 "hg export [-a] [-o OUTFILE] REV..."),
1850 _("hg export [-a] [-o OUTFILE] REV...")),
1851 "forget":
1851 "forget":
1852 (forget,
1852 (forget,
1853 [('I', 'include', [], 'include path in search'),
1853 [('I', 'include', [], _('include path in search')),
1854 ('X', 'exclude', [], 'exclude path from search')],
1854 ('X', 'exclude', [], _('exclude path from search'))],
1855 "hg forget [OPTION]... FILE..."),
1855 _("hg forget [OPTION]... FILE...")),
1856 "grep":
1856 "grep":
1857 (grep,
1857 (grep,
1858 [('0', 'print0', None, 'end fields with NUL'),
1858 [('0', 'print0', None, _('end fields with NUL')),
1859 ('I', 'include', [], 'include path in search'),
1859 ('I', 'include', [], _('include path in search')),
1860 ('X', 'exclude', [], 'include path in search'),
1860 ('X', 'exclude', [], _('include path in search')),
1861 ('', 'all', None, 'print all revisions with matches'),
1861 ('', 'all', None, _('print all revisions with matches')),
1862 ('i', 'ignore-case', None, 'ignore case when matching'),
1862 ('i', 'ignore-case', None, _('ignore case when matching')),
1863 ('l', 'files-with-matches', None, 'print names of files and revs with matches'),
1863 ('l', 'files-with-matches', None, _('print names of files and revs with matches')),
1864 ('n', 'line-number', None, 'print line numbers'),
1864 ('n', 'line-number', None, _('print line numbers')),
1865 ('r', 'rev', [], 'search in revision rev'),
1865 ('r', 'rev', [], _('search in revision rev')),
1866 ('u', 'user', None, 'print user who made change')],
1866 ('u', 'user', None, _('print user who made change'))],
1867 "hg grep [OPTION]... PATTERN [FILE]..."),
1867 _("hg grep [OPTION]... PATTERN [FILE]...")),
1868 "heads":
1868 "heads":
1869 (heads,
1869 (heads,
1870 [('b', 'branches', None, 'find branch info')],
1870 [('b', 'branches', None, _('find branch info'))],
1871 'hg heads [-b]'),
1871 _('hg heads [-b]')),
1872 "help": (help_, [], 'hg help [COMMAND]'),
1872 "help": (help_, [], _('hg help [COMMAND]')),
1873 "identify|id": (identify, [], 'hg identify'),
1873 "identify|id": (identify, [], _('hg identify')),
1874 "import|patch":
1874 "import|patch":
1875 (import_,
1875 (import_,
1876 [('p', 'strip', 1, 'path strip'),
1876 [('p', 'strip', 1, _('path strip')),
1877 ('f', 'force', None, 'skip check for outstanding changes'),
1877 ('f', 'force', None, _('skip check for outstanding changes')),
1878 ('b', 'base', "", 'base path')],
1878 ('b', 'base', "", _('base path'))],
1879 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
1879 _("hg import [-f] [-p NUM] [-b BASE] PATCH...")),
1880 "incoming|in": (incoming,
1880 "incoming|in": (incoming,
1881 [('p', 'patch', None, 'show patch')],
1881 [('p', 'patch', None, _('show patch'))],
1882 'hg incoming [-p] [SOURCE]'),
1882 _('hg incoming [-p] [SOURCE]')),
1883 "^init": (init, [], 'hg init [DEST]'),
1883 "^init": (init, [], _('hg init [DEST]')),
1884 "locate":
1884 "locate":
1885 (locate,
1885 (locate,
1886 [('r', 'rev', '', 'revision'),
1886 [('r', 'rev', '', _('revision')),
1887 ('0', 'print0', None, 'end filenames with NUL'),
1887 ('0', 'print0', None, _('end filenames with NUL')),
1888 ('f', 'fullpath', None, 'print complete paths'),
1888 ('f', 'fullpath', None, _('print complete paths')),
1889 ('I', 'include', [], 'include path in search'),
1889 ('I', 'include', [], _('include path in search')),
1890 ('X', 'exclude', [], 'exclude path from search')],
1890 ('X', 'exclude', [], _('exclude path from search'))],
1891 'hg locate [OPTION]... [PATTERN]...'),
1891 _('hg locate [OPTION]... [PATTERN]...')),
1892 "^log|history":
1892 "^log|history":
1893 (log,
1893 (log,
1894 [('I', 'include', [], 'include path in search'),
1894 [('I', 'include', [], _('include path in search')),
1895 ('X', 'exclude', [], 'exclude path from search'),
1895 ('X', 'exclude', [], _('exclude path from search')),
1896 ('b', 'branch', None, 'show branches'),
1896 ('b', 'branch', None, _('show branches')),
1897 ('k', 'keyword', [], 'search for a keyword'),
1897 ('k', 'keyword', [], _('search for a keyword')),
1898 ('r', 'rev', [], 'revision'),
1898 ('r', 'rev', [], _('revision')),
1899 ('p', 'patch', None, 'show patch')],
1899 ('p', 'patch', None, _('show patch'))],
1900 'hg log [-I] [-X] [-r REV]... [-p] [FILE]'),
1900 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
1901 "manifest": (manifest, [], 'hg manifest [REV]'),
1901 "manifest": (manifest, [], _('hg manifest [REV]')),
1902 "outgoing|out": (outgoing,
1902 "outgoing|out": (outgoing,
1903 [('p', 'patch', None, 'show patch')],
1903 [('p', 'patch', None, _('show patch'))],
1904 'hg outgoing [-p] [DEST]'),
1904 _('hg outgoing [-p] [DEST]')),
1905 "parents": (parents, [], 'hg parents [REV]'),
1905 "parents": (parents, [], _('hg parents [REV]')),
1906 "paths": (paths, [], 'hg paths [NAME]'),
1906 "paths": (paths, [], _('hg paths [NAME]')),
1907 "^pull":
1907 "^pull":
1908 (pull,
1908 (pull,
1909 [('u', 'update', None, 'update working directory'),
1909 [('u', 'update', None, _('update working directory')),
1910 ('e', 'ssh', "", 'ssh command'),
1910 ('e', 'ssh', "", _('ssh command')),
1911 ('', 'remotecmd', "", 'remote hg command')],
1911 ('', 'remotecmd', "", _('remote hg command'))],
1912 'hg pull [-u] [-e FILE] [--remotecmd FILE] [SOURCE]'),
1912 _('hg pull [-u] [-e FILE] [--remotecmd FILE] [SOURCE]')),
1913 "^push":
1913 "^push":
1914 (push,
1914 (push,
1915 [('f', 'force', None, 'force push'),
1915 [('f', 'force', None, _('force push')),
1916 ('e', 'ssh', "", 'ssh command'),
1916 ('e', 'ssh', "", _('ssh command')),
1917 ('', 'remotecmd', "", 'remote hg command')],
1917 ('', 'remotecmd', "", _('remote hg command'))],
1918 'hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]'),
1918 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
1919 "rawcommit":
1919 "rawcommit":
1920 (rawcommit,
1920 (rawcommit,
1921 [('p', 'parent', [], 'parent'),
1921 [('p', 'parent', [], _('parent')),
1922 ('d', 'date', "", 'date code'),
1922 ('d', 'date', "", _('date code')),
1923 ('u', 'user', "", 'user'),
1923 ('u', 'user', "", _('user')),
1924 ('F', 'files', "", 'file list'),
1924 ('F', 'files', "", _('file list')),
1925 ('m', 'message', "", 'commit message'),
1925 ('m', 'message', "", _('commit message')),
1926 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1926 ('t', 'text', "", _('commit message (deprecated: use -m)')),
1927 ('l', 'logfile', "", 'commit message file')],
1927 ('l', 'logfile', "", _('commit message file'))],
1928 'hg rawcommit [OPTION]... [FILE]...'),
1928 _('hg rawcommit [OPTION]... [FILE]...')),
1929 "recover": (recover, [], "hg recover"),
1929 "recover": (recover, [], _("hg recover")),
1930 "^remove|rm": (remove,
1930 "^remove|rm": (remove,
1931 [('I', 'include', [], 'include path in search'),
1931 [('I', 'include', [], _('include path in search')),
1932 ('X', 'exclude', [], 'exclude path from search')],
1932 ('X', 'exclude', [], _('exclude path from search'))],
1933 "hg remove [OPTION]... FILE..."),
1933 _("hg remove [OPTION]... FILE...")),
1934 "rename|mv": (rename,
1934 "rename|mv": (rename,
1935 [('I', 'include', [], 'include path in search'),
1935 [('I', 'include', [], _('include path in search')),
1936 ('X', 'exclude', [], 'exclude path from search'),
1936 ('X', 'exclude', [], _('exclude path from search')),
1937 ('A', 'after', None, 'record a copy after it has happened'),
1937 ('A', 'after', None, _('record a copy after it has happened')),
1938 ('f', 'force', None, 'replace destination if it exists'),
1938 ('f', 'force', None, _('replace destination if it exists')),
1939 ('p', 'parents', None, 'append source path to dest')],
1939 ('p', 'parents', None, _('append source path to dest'))],
1940 'hg rename [OPTION]... [SOURCE]... DEST'),
1940 _('hg rename [OPTION]... [SOURCE]... DEST')),
1941 "^revert":
1941 "^revert":
1942 (revert,
1942 (revert,
1943 [("n", "nonrecursive", None, "don't recurse into subdirs"),
1943 [("n", "nonrecursive", None, _("don't recurse into subdirs")),
1944 ("r", "rev", "", "revision")],
1944 ("r", "rev", "", _("revision"))],
1945 "hg revert [-n] [-r REV] [NAME]..."),
1945 _("hg revert [-n] [-r REV] [NAME]...")),
1946 "root": (root, [], "hg root"),
1946 "root": (root, [], _("hg root")),
1947 "^serve":
1947 "^serve":
1948 (serve,
1948 (serve,
1949 [('A', 'accesslog', '', 'access log file'),
1949 [('A', 'accesslog', '', _('access log file')),
1950 ('E', 'errorlog', '', 'error log file'),
1950 ('E', 'errorlog', '', _('error log file')),
1951 ('p', 'port', 0, 'listen port'),
1951 ('p', 'port', 0, _('listen port')),
1952 ('a', 'address', '', 'interface address'),
1952 ('a', 'address', '', _('interface address')),
1953 ('n', 'name', "", 'repository name'),
1953 ('n', 'name', "", _('repository name')),
1954 ('', 'stdio', None, 'for remote clients'),
1954 ('', 'stdio', None, _('for remote clients')),
1955 ('t', 'templates', "", 'template directory'),
1955 ('t', 'templates', "", _('template directory')),
1956 ('', 'style', "", 'template style'),
1956 ('', 'style', "", _('template style')),
1957 ('6', 'ipv6', None, 'use IPv6 in addition to IPv4')],
1957 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
1958 "hg serve [OPTION]..."),
1958 _("hg serve [OPTION]...")),
1959 "^status":
1959 "^status":
1960 (status,
1960 (status,
1961 [('m', 'modified', None, 'show only modified files'),
1961 [('m', 'modified', None, _('show only modified files')),
1962 ('a', 'added', None, 'show only added files'),
1962 ('a', 'added', None, _('show only added files')),
1963 ('r', 'removed', None, 'show only removed files'),
1963 ('r', 'removed', None, _('show only removed files')),
1964 ('u', 'unknown', None, 'show only unknown (not tracked) files'),
1964 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
1965 ('n', 'no-status', None, 'hide status prefix'),
1965 ('n', 'no-status', None, _('hide status prefix')),
1966 ('0', 'print0', None, 'end filenames with NUL'),
1966 ('0', 'print0', None, _('end filenames with NUL')),
1967 ('I', 'include', [], 'include path in search'),
1967 ('I', 'include', [], _('include path in search')),
1968 ('X', 'exclude', [], 'exclude path from search')],
1968 ('X', 'exclude', [], _('exclude path from search'))],
1969 "hg status [OPTION]... [FILE]..."),
1969 _("hg status [OPTION]... [FILE]...")),
1970 "tag":
1970 "tag":
1971 (tag,
1971 (tag,
1972 [('l', 'local', None, 'make the tag local'),
1972 [('l', 'local', None, _('make the tag local')),
1973 ('m', 'message', "", 'commit message'),
1973 ('m', 'message', "", _('commit message')),
1974 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1974 ('t', 'text', "", _('commit message (deprecated: use -m)')),
1975 ('d', 'date', "", 'date code'),
1975 ('d', 'date', "", _('date code')),
1976 ('u', 'user', "", 'user')],
1976 ('u', 'user', "", _('user'))],
1977 'hg tag [OPTION]... NAME [REV]'),
1977 _('hg tag [OPTION]... NAME [REV]')),
1978 "tags": (tags, [], 'hg tags'),
1978 "tags": (tags, [], _('hg tags')),
1979 "tip": (tip, [], 'hg tip'),
1979 "tip": (tip, [], _('hg tip')),
1980 "unbundle":
1980 "unbundle":
1981 (unbundle,
1981 (unbundle,
1982 [],
1982 [],
1983 'hg unbundle FILE'),
1983 _('hg unbundle FILE')),
1984 "undo": (undo, [], 'hg undo'),
1984 "undo": (undo, [], _('hg undo')),
1985 "^update|up|checkout|co":
1985 "^update|up|checkout|co":
1986 (update,
1986 (update,
1987 [('b', 'branch', "", 'checkout the head of a specific branch'),
1987 [('b', 'branch', "", _('checkout the head of a specific branch')),
1988 ('m', 'merge', None, 'allow merging of conflicts'),
1988 ('m', 'merge', None, _('allow merging of conflicts')),
1989 ('C', 'clean', None, 'overwrite locally modified files')],
1989 ('C', 'clean', None, _('overwrite locally modified files'))],
1990 'hg update [-b TAG] [-m] [-C] [REV]'),
1990 _('hg update [-b TAG] [-m] [-C] [REV]')),
1991 "verify": (verify, [], 'hg verify'),
1991 "verify": (verify, [], _('hg verify')),
1992 "version": (show_version, [], 'hg version'),
1992 "version": (show_version, [], _('hg version')),
1993 }
1993 }
1994
1994
1995 globalopts = [
1995 globalopts = [
1996 ('R', 'repository', "", 'repository root directory'),
1996 ('R', 'repository', "", _('repository root directory')),
1997 ('', 'cwd', '', 'change working directory'),
1997 ('', 'cwd', '', _('change working directory')),
1998 ('y', 'noninteractive', None, 'run non-interactively'),
1998 ('y', 'noninteractive', None, _('run non-interactively')),
1999 ('q', 'quiet', None, 'quiet mode'),
1999 ('q', 'quiet', None, _('quiet mode')),
2000 ('v', 'verbose', None, 'verbose mode'),
2000 ('v', 'verbose', None, _('verbose mode')),
2001 ('', 'debug', None, 'debug mode'),
2001 ('', 'debug', None, _('debug mode')),
2002 ('', 'debugger', None, 'start debugger'),
2002 ('', 'debugger', None, _('start debugger')),
2003 ('', 'traceback', None, 'print traceback on exception'),
2003 ('', 'traceback', None, _('print traceback on exception')),
2004 ('', 'time', None, 'time how long the command takes'),
2004 ('', 'time', None, _('time how long the command takes')),
2005 ('', 'profile', None, 'profile'),
2005 ('', 'profile', None, _('profile')),
2006 ('', 'version', None, 'output version information and exit'),
2006 ('', 'version', None, _('output version information and exit')),
2007 ('h', 'help', None, 'display help and exit'),
2007 ('h', 'help', None, _('display help and exit')),
2008 ]
2008 ]
2009
2009
2010 norepo = ("clone init version help debugancestor debugconfig debugdata"
2010 norepo = ("clone init version help debugancestor debugconfig debugdata"
2011 " debugindex debugindexdot paths")
2011 " debugindex debugindexdot paths")
2012
2012
2013 def find(cmd):
2013 def find(cmd):
2014 for e in table.keys():
2014 for e in table.keys():
2015 if re.match("(%s)$" % e, cmd):
2015 if re.match("(%s)$" % e, cmd):
2016 return e, table[e]
2016 return e, table[e]
2017
2017
2018 raise UnknownCommand(cmd)
2018 raise UnknownCommand(cmd)
2019
2019
2020 class SignalInterrupt(Exception):
2020 class SignalInterrupt(Exception):
2021 """Exception raised on SIGTERM and SIGHUP."""
2021 """Exception raised on SIGTERM and SIGHUP."""
2022
2022
2023 def catchterm(*args):
2023 def catchterm(*args):
2024 raise SignalInterrupt
2024 raise SignalInterrupt
2025
2025
2026 def run():
2026 def run():
2027 sys.exit(dispatch(sys.argv[1:]))
2027 sys.exit(dispatch(sys.argv[1:]))
2028
2028
2029 class ParseError(Exception):
2029 class ParseError(Exception):
2030 """Exception raised on errors in parsing the command line."""
2030 """Exception raised on errors in parsing the command line."""
2031
2031
2032 def parse(args):
2032 def parse(args):
2033 options = {}
2033 options = {}
2034 cmdoptions = {}
2034 cmdoptions = {}
2035
2035
2036 try:
2036 try:
2037 args = fancyopts.fancyopts(args, globalopts, options)
2037 args = fancyopts.fancyopts(args, globalopts, options)
2038 except fancyopts.getopt.GetoptError, inst:
2038 except fancyopts.getopt.GetoptError, inst:
2039 raise ParseError(None, inst)
2039 raise ParseError(None, inst)
2040
2040
2041 if args:
2041 if args:
2042 cmd, args = args[0], args[1:]
2042 cmd, args = args[0], args[1:]
2043 i = find(cmd)[1]
2043 i = find(cmd)[1]
2044 c = list(i[1])
2044 c = list(i[1])
2045 else:
2045 else:
2046 cmd = None
2046 cmd = None
2047 c = []
2047 c = []
2048
2048
2049 # combine global options into local
2049 # combine global options into local
2050 for o in globalopts:
2050 for o in globalopts:
2051 c.append((o[0], o[1], options[o[1]], o[3]))
2051 c.append((o[0], o[1], options[o[1]], o[3]))
2052
2052
2053 try:
2053 try:
2054 args = fancyopts.fancyopts(args, c, cmdoptions)
2054 args = fancyopts.fancyopts(args, c, cmdoptions)
2055 except fancyopts.getopt.GetoptError, inst:
2055 except fancyopts.getopt.GetoptError, inst:
2056 raise ParseError(cmd, inst)
2056 raise ParseError(cmd, inst)
2057
2057
2058 # separate global options back out
2058 # separate global options back out
2059 for o in globalopts:
2059 for o in globalopts:
2060 n = o[1]
2060 n = o[1]
2061 options[n] = cmdoptions[n]
2061 options[n] = cmdoptions[n]
2062 del cmdoptions[n]
2062 del cmdoptions[n]
2063
2063
2064 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2064 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2065
2065
2066 def dispatch(args):
2066 def dispatch(args):
2067 signal.signal(signal.SIGTERM, catchterm)
2067 signal.signal(signal.SIGTERM, catchterm)
2068 try:
2068 try:
2069 signal.signal(signal.SIGHUP, catchterm)
2069 signal.signal(signal.SIGHUP, catchterm)
2070 except AttributeError:
2070 except AttributeError:
2071 pass
2071 pass
2072
2072
2073 u = ui.ui()
2073 u = ui.ui()
2074 external = []
2074 external = []
2075 for x in u.extensions():
2075 for x in u.extensions():
2076 if x[1]:
2076 if x[1]:
2077 try:
2077 try:
2078 mod = imp.load_source(x[0], x[1])
2078 mod = imp.load_source(x[0], x[1])
2079 except:
2079 except:
2080 u.warn("*** failed to import extension %s\n" % x[1])
2080 u.warn(_("*** failed to import extension %s\n") % x[1])
2081 continue
2081 continue
2082 else:
2082 else:
2083 def importh(name):
2083 def importh(name):
2084 mod = __import__(name)
2084 mod = __import__(name)
2085 components = name.split('.')
2085 components = name.split('.')
2086 for comp in components[1:]:
2086 for comp in components[1:]:
2087 mod = getattr(mod, comp)
2087 mod = getattr(mod, comp)
2088 return mod
2088 return mod
2089 try:
2089 try:
2090 mod = importh(x[0])
2090 mod = importh(x[0])
2091 except:
2091 except:
2092 u.warn("failed to import extension %s\n" % x[0])
2092 u.warn(_("failed to import extension %s\n") % x[0])
2093 continue
2093 continue
2094
2094
2095 external.append(mod)
2095 external.append(mod)
2096 for x in external:
2096 for x in external:
2097 cmdtable = getattr(x, 'cmdtable', {})
2097 cmdtable = getattr(x, 'cmdtable', {})
2098 for t in cmdtable:
2098 for t in cmdtable:
2099 if t in table:
2099 if t in table:
2100 u.warn("module %s overrides %s\n" % (x.__name__, t))
2100 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2101 table.update(cmdtable)
2101 table.update(cmdtable)
2102
2102
2103 try:
2103 try:
2104 cmd, func, args, options, cmdoptions = parse(args)
2104 cmd, func, args, options, cmdoptions = parse(args)
2105 except ParseError, inst:
2105 except ParseError, inst:
2106 if inst.args[0]:
2106 if inst.args[0]:
2107 u.warn("hg %s: %s\n" % (inst.args[0], inst.args[1]))
2107 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2108 help_(u, inst.args[0])
2108 help_(u, inst.args[0])
2109 else:
2109 else:
2110 u.warn("hg: %s\n" % inst.args[1])
2110 u.warn(_("hg: %s\n") % inst.args[1])
2111 help_(u, 'shortlist')
2111 help_(u, 'shortlist')
2112 sys.exit(-1)
2112 sys.exit(-1)
2113 except UnknownCommand, inst:
2113 except UnknownCommand, inst:
2114 u.warn("hg: unknown command '%s'\n" % inst.args[0])
2114 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2115 help_(u, 'shortlist')
2115 help_(u, 'shortlist')
2116 sys.exit(1)
2116 sys.exit(1)
2117
2117
2118 if options["time"]:
2118 if options["time"]:
2119 def get_times():
2119 def get_times():
2120 t = os.times()
2120 t = os.times()
2121 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2121 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2122 t = (t[0], t[1], t[2], t[3], time.clock())
2122 t = (t[0], t[1], t[2], t[3], time.clock())
2123 return t
2123 return t
2124 s = get_times()
2124 s = get_times()
2125 def print_time():
2125 def print_time():
2126 t = get_times()
2126 t = get_times()
2127 u.warn("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n" %
2127 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2128 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2128 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2129 atexit.register(print_time)
2129 atexit.register(print_time)
2130
2130
2131 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2131 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2132 not options["noninteractive"])
2132 not options["noninteractive"])
2133
2133
2134 # enter the debugger before command execution
2134 # enter the debugger before command execution
2135 if options['debugger']:
2135 if options['debugger']:
2136 pdb.set_trace()
2136 pdb.set_trace()
2137
2137
2138 try:
2138 try:
2139 try:
2139 try:
2140 if options['help']:
2140 if options['help']:
2141 help_(u, cmd, options['version'])
2141 help_(u, cmd, options['version'])
2142 sys.exit(0)
2142 sys.exit(0)
2143 elif options['version']:
2143 elif options['version']:
2144 show_version(u)
2144 show_version(u)
2145 sys.exit(0)
2145 sys.exit(0)
2146 elif not cmd:
2146 elif not cmd:
2147 help_(u, 'shortlist')
2147 help_(u, 'shortlist')
2148 sys.exit(0)
2148 sys.exit(0)
2149
2149
2150 if options['cwd']:
2150 if options['cwd']:
2151 try:
2151 try:
2152 os.chdir(options['cwd'])
2152 os.chdir(options['cwd'])
2153 except OSError, inst:
2153 except OSError, inst:
2154 raise util.Abort('%s: %s' %
2154 raise util.Abort('%s: %s' %
2155 (options['cwd'], inst.strerror))
2155 (options['cwd'], inst.strerror))
2156
2156
2157 if cmd not in norepo.split():
2157 if cmd not in norepo.split():
2158 path = options["repository"] or ""
2158 path = options["repository"] or ""
2159 repo = hg.repository(ui=u, path=path)
2159 repo = hg.repository(ui=u, path=path)
2160 for x in external:
2160 for x in external:
2161 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2161 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2162 d = lambda: func(u, repo, *args, **cmdoptions)
2162 d = lambda: func(u, repo, *args, **cmdoptions)
2163 else:
2163 else:
2164 d = lambda: func(u, *args, **cmdoptions)
2164 d = lambda: func(u, *args, **cmdoptions)
2165
2165
2166 if options['profile']:
2166 if options['profile']:
2167 import hotshot, hotshot.stats
2167 import hotshot, hotshot.stats
2168 prof = hotshot.Profile("hg.prof")
2168 prof = hotshot.Profile("hg.prof")
2169 r = prof.runcall(d)
2169 r = prof.runcall(d)
2170 prof.close()
2170 prof.close()
2171 stats = hotshot.stats.load("hg.prof")
2171 stats = hotshot.stats.load("hg.prof")
2172 stats.strip_dirs()
2172 stats.strip_dirs()
2173 stats.sort_stats('time', 'calls')
2173 stats.sort_stats('time', 'calls')
2174 stats.print_stats(40)
2174 stats.print_stats(40)
2175 return r
2175 return r
2176 else:
2176 else:
2177 return d()
2177 return d()
2178 except:
2178 except:
2179 # enter the debugger when we hit an exception
2179 # enter the debugger when we hit an exception
2180 if options['debugger']:
2180 if options['debugger']:
2181 pdb.post_mortem(sys.exc_info()[2])
2181 pdb.post_mortem(sys.exc_info()[2])
2182 if options['traceback']:
2182 if options['traceback']:
2183 traceback.print_exc()
2183 traceback.print_exc()
2184 raise
2184 raise
2185 except hg.RepoError, inst:
2185 except hg.RepoError, inst:
2186 u.warn("abort: ", inst, "!\n")
2186 u.warn(_("abort: "), inst, "!\n")
2187 except revlog.RevlogError, inst:
2187 except revlog.RevlogError, inst:
2188 u.warn("abort: ", inst, "!\n")
2188 u.warn(_("abort: "), inst, "!\n")
2189 except SignalInterrupt:
2189 except SignalInterrupt:
2190 u.warn("killed!\n")
2190 u.warn(_("killed!\n"))
2191 except KeyboardInterrupt:
2191 except KeyboardInterrupt:
2192 try:
2192 try:
2193 u.warn("interrupted!\n")
2193 u.warn(_("interrupted!\n"))
2194 except IOError, inst:
2194 except IOError, inst:
2195 if inst.errno == errno.EPIPE:
2195 if inst.errno == errno.EPIPE:
2196 if u.debugflag:
2196 if u.debugflag:
2197 u.warn("\nbroken pipe\n")
2197 u.warn(_("\nbroken pipe\n"))
2198 else:
2198 else:
2199 raise
2199 raise
2200 except IOError, inst:
2200 except IOError, inst:
2201 if hasattr(inst, "code"):
2201 if hasattr(inst, "code"):
2202 u.warn("abort: %s\n" % inst)
2202 u.warn(_("abort: %s\n") % inst)
2203 elif hasattr(inst, "reason"):
2203 elif hasattr(inst, "reason"):
2204 u.warn("abort: error: %s\n" % inst.reason[1])
2204 u.warn(_("abort: error: %s\n") % inst.reason[1])
2205 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2205 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2206 if u.debugflag:
2206 if u.debugflag:
2207 u.warn("broken pipe\n")
2207 u.warn(_("broken pipe\n"))
2208 elif getattr(inst, "strerror", None):
2208 elif getattr(inst, "strerror", None):
2209 if getattr(inst, "filename", None):
2209 if getattr(inst, "filename", None):
2210 u.warn("abort: %s - %s\n" % (inst.strerror, inst.filename))
2210 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2211 else:
2211 else:
2212 u.warn("abort: %s\n" % inst.strerror)
2212 u.warn(_("abort: %s\n") % inst.strerror)
2213 else:
2213 else:
2214 raise
2214 raise
2215 except OSError, inst:
2215 except OSError, inst:
2216 if hasattr(inst, "filename"):
2216 if hasattr(inst, "filename"):
2217 u.warn("abort: %s: %s\n" % (inst.strerror, inst.filename))
2217 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2218 else:
2218 else:
2219 u.warn("abort: %s\n" % inst.strerror)
2219 u.warn(_("abort: %s\n") % inst.strerror)
2220 except util.Abort, inst:
2220 except util.Abort, inst:
2221 u.warn('abort: ', inst.args[0] % inst.args[1:], '\n')
2221 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2222 sys.exit(1)
2222 sys.exit(1)
2223 except TypeError, inst:
2223 except TypeError, inst:
2224 # was this an argument error?
2224 # was this an argument error?
2225 tb = traceback.extract_tb(sys.exc_info()[2])
2225 tb = traceback.extract_tb(sys.exc_info()[2])
2226 if len(tb) > 2: # no
2226 if len(tb) > 2: # no
2227 raise
2227 raise
2228 u.debug(inst, "\n")
2228 u.debug(inst, "\n")
2229 u.warn("%s: invalid arguments\n" % cmd)
2229 u.warn(_("%s: invalid arguments\n") % cmd)
2230 help_(u, cmd)
2230 help_(u, cmd)
2231 except UnknownCommand, inst:
2231 except UnknownCommand, inst:
2232 u.warn("hg: unknown command '%s'\n" % inst.args[0])
2232 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2233 help_(u, 'shortlist')
2233 help_(u, 'shortlist')
2234 except SystemExit:
2234 except SystemExit:
2235 # don't catch this in the catch-all below
2235 # don't catch this in the catch-all below
2236 raise
2236 raise
2237 except:
2237 except:
2238 u.warn("** unknown exception encountered, details follow\n")
2238 u.warn(_("** unknown exception encountered, details follow\n"))
2239 u.warn("** report bug details to mercurial@selenic.com\n")
2239 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2240 raise
2240 raise
2241
2241
2242 sys.exit(-1)
2242 sys.exit(-1)
@@ -1,426 +1,426 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 import struct, os
10 import struct, os
11 from node import *
11 from node import *
12 from i18n import gettext as _
12 from i18n import gettext as _
13 from demandload import *
13 from demandload import *
14 demandload(globals(), "time bisect stat util re")
14 demandload(globals(), "time bisect stat util re")
15
15
16 class dirstate:
16 class dirstate:
17 def __init__(self, opener, ui, root):
17 def __init__(self, opener, ui, root):
18 self.opener = opener
18 self.opener = opener
19 self.root = root
19 self.root = root
20 self.dirty = 0
20 self.dirty = 0
21 self.ui = ui
21 self.ui = ui
22 self.map = None
22 self.map = None
23 self.pl = None
23 self.pl = None
24 self.copies = {}
24 self.copies = {}
25 self.ignorefunc = None
25 self.ignorefunc = None
26 self.blockignore = False
26 self.blockignore = False
27
27
28 def wjoin(self, f):
28 def wjoin(self, f):
29 return os.path.join(self.root, f)
29 return os.path.join(self.root, f)
30
30
31 def getcwd(self):
31 def getcwd(self):
32 cwd = os.getcwd()
32 cwd = os.getcwd()
33 if cwd == self.root: return ''
33 if cwd == self.root: return ''
34 return cwd[len(self.root) + 1:]
34 return cwd[len(self.root) + 1:]
35
35
36 def hgignore(self):
36 def hgignore(self):
37 '''return the contents of .hgignore as a list of patterns.
37 '''return the contents of .hgignore as a list of patterns.
38
38
39 trailing white space is dropped.
39 trailing white space is dropped.
40 the escape character is backslash.
40 the escape character is backslash.
41 comments start with #.
41 comments start with #.
42 empty lines are skipped.
42 empty lines are skipped.
43
43
44 lines can be of the following formats:
44 lines can be of the following formats:
45
45
46 syntax: regexp # defaults following lines to non-rooted regexps
46 syntax: regexp # defaults following lines to non-rooted regexps
47 syntax: glob # defaults following lines to non-rooted globs
47 syntax: glob # defaults following lines to non-rooted globs
48 re:pattern # non-rooted regular expression
48 re:pattern # non-rooted regular expression
49 glob:pattern # non-rooted glob
49 glob:pattern # non-rooted glob
50 pattern # pattern of the current default type'''
50 pattern # pattern of the current default type'''
51 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
51 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
52 def parselines(fp):
52 def parselines(fp):
53 for line in fp:
53 for line in fp:
54 escape = False
54 escape = False
55 for i in xrange(len(line)):
55 for i in xrange(len(line)):
56 if escape: escape = False
56 if escape: escape = False
57 elif line[i] == '\\': escape = True
57 elif line[i] == '\\': escape = True
58 elif line[i] == '#': break
58 elif line[i] == '#': break
59 line = line[:i].rstrip()
59 line = line[:i].rstrip()
60 if line: yield line
60 if line: yield line
61 pats = []
61 pats = []
62 try:
62 try:
63 fp = open(self.wjoin('.hgignore'))
63 fp = open(self.wjoin('.hgignore'))
64 syntax = 'relre:'
64 syntax = 'relre:'
65 for line in parselines(fp):
65 for line in parselines(fp):
66 if line.startswith('syntax:'):
66 if line.startswith('syntax:'):
67 s = line[7:].strip()
67 s = line[7:].strip()
68 try:
68 try:
69 syntax = syntaxes[s]
69 syntax = syntaxes[s]
70 except KeyError:
70 except KeyError:
71 self.ui.warn("ignoring invalid syntax '%s'\n" % s)
71 self.ui.warn(_("ignoring invalid syntax '%s'\n") % s)
72 continue
72 continue
73 pat = syntax + line
73 pat = syntax + line
74 for s in syntaxes.values():
74 for s in syntaxes.values():
75 if line.startswith(s):
75 if line.startswith(s):
76 pat = line
76 pat = line
77 break
77 break
78 pats.append(pat)
78 pats.append(pat)
79 except IOError: pass
79 except IOError: pass
80 return pats
80 return pats
81
81
82 def ignore(self, fn):
82 def ignore(self, fn):
83 '''default match function used by dirstate and localrepository.
83 '''default match function used by dirstate and localrepository.
84 this honours the .hgignore file, and nothing more.'''
84 this honours the .hgignore file, and nothing more.'''
85 if self.blockignore:
85 if self.blockignore:
86 return False
86 return False
87 if not self.ignorefunc:
87 if not self.ignorefunc:
88 ignore = self.hgignore()
88 ignore = self.hgignore()
89 if ignore:
89 if ignore:
90 files, self.ignorefunc, anypats = util.matcher(self.root,
90 files, self.ignorefunc, anypats = util.matcher(self.root,
91 inc=ignore)
91 inc=ignore)
92 else:
92 else:
93 self.ignorefunc = util.never
93 self.ignorefunc = util.never
94 return self.ignorefunc(fn)
94 return self.ignorefunc(fn)
95
95
96 def __del__(self):
96 def __del__(self):
97 if self.dirty:
97 if self.dirty:
98 self.write()
98 self.write()
99
99
100 def __getitem__(self, key):
100 def __getitem__(self, key):
101 try:
101 try:
102 return self.map[key]
102 return self.map[key]
103 except TypeError:
103 except TypeError:
104 self.read()
104 self.read()
105 return self[key]
105 return self[key]
106
106
107 def __contains__(self, key):
107 def __contains__(self, key):
108 if not self.map: self.read()
108 if not self.map: self.read()
109 return key in self.map
109 return key in self.map
110
110
111 def parents(self):
111 def parents(self):
112 if not self.pl:
112 if not self.pl:
113 self.read()
113 self.read()
114 return self.pl
114 return self.pl
115
115
116 def markdirty(self):
116 def markdirty(self):
117 if not self.dirty:
117 if not self.dirty:
118 self.dirty = 1
118 self.dirty = 1
119
119
120 def setparents(self, p1, p2=nullid):
120 def setparents(self, p1, p2=nullid):
121 if not self.pl:
121 if not self.pl:
122 self.read()
122 self.read()
123 self.markdirty()
123 self.markdirty()
124 self.pl = p1, p2
124 self.pl = p1, p2
125
125
126 def state(self, key):
126 def state(self, key):
127 try:
127 try:
128 return self[key][0]
128 return self[key][0]
129 except KeyError:
129 except KeyError:
130 return "?"
130 return "?"
131
131
132 def read(self):
132 def read(self):
133 if self.map is not None: return self.map
133 if self.map is not None: return self.map
134
134
135 self.map = {}
135 self.map = {}
136 self.pl = [nullid, nullid]
136 self.pl = [nullid, nullid]
137 try:
137 try:
138 st = self.opener("dirstate").read()
138 st = self.opener("dirstate").read()
139 if not st: return
139 if not st: return
140 except: return
140 except: return
141
141
142 self.pl = [st[:20], st[20: 40]]
142 self.pl = [st[:20], st[20: 40]]
143
143
144 pos = 40
144 pos = 40
145 while pos < len(st):
145 while pos < len(st):
146 e = struct.unpack(">cllll", st[pos:pos+17])
146 e = struct.unpack(">cllll", st[pos:pos+17])
147 l = e[4]
147 l = e[4]
148 pos += 17
148 pos += 17
149 f = st[pos:pos + l]
149 f = st[pos:pos + l]
150 if '\0' in f:
150 if '\0' in f:
151 f, c = f.split('\0')
151 f, c = f.split('\0')
152 self.copies[f] = c
152 self.copies[f] = c
153 self.map[f] = e[:4]
153 self.map[f] = e[:4]
154 pos += l
154 pos += l
155
155
156 def copy(self, source, dest):
156 def copy(self, source, dest):
157 self.read()
157 self.read()
158 self.markdirty()
158 self.markdirty()
159 self.copies[dest] = source
159 self.copies[dest] = source
160
160
161 def copied(self, file):
161 def copied(self, file):
162 return self.copies.get(file, None)
162 return self.copies.get(file, None)
163
163
164 def update(self, files, state, **kw):
164 def update(self, files, state, **kw):
165 ''' current states:
165 ''' current states:
166 n normal
166 n normal
167 m needs merging
167 m needs merging
168 r marked for removal
168 r marked for removal
169 a marked for addition'''
169 a marked for addition'''
170
170
171 if not files: return
171 if not files: return
172 self.read()
172 self.read()
173 self.markdirty()
173 self.markdirty()
174 for f in files:
174 for f in files:
175 if state == "r":
175 if state == "r":
176 self.map[f] = ('r', 0, 0, 0)
176 self.map[f] = ('r', 0, 0, 0)
177 else:
177 else:
178 s = os.lstat(os.path.join(self.root, f))
178 s = os.lstat(os.path.join(self.root, f))
179 st_size = kw.get('st_size', s.st_size)
179 st_size = kw.get('st_size', s.st_size)
180 st_mtime = kw.get('st_mtime', s.st_mtime)
180 st_mtime = kw.get('st_mtime', s.st_mtime)
181 self.map[f] = (state, s.st_mode, st_size, st_mtime)
181 self.map[f] = (state, s.st_mode, st_size, st_mtime)
182 if self.copies.has_key(f):
182 if self.copies.has_key(f):
183 del self.copies[f]
183 del self.copies[f]
184
184
185 def forget(self, files):
185 def forget(self, files):
186 if not files: return
186 if not files: return
187 self.read()
187 self.read()
188 self.markdirty()
188 self.markdirty()
189 for f in files:
189 for f in files:
190 try:
190 try:
191 del self.map[f]
191 del self.map[f]
192 except KeyError:
192 except KeyError:
193 self.ui.warn("not in dirstate: %s!\n" % f)
193 self.ui.warn(_("not in dirstate: %s!\n") % f)
194 pass
194 pass
195
195
196 def clear(self):
196 def clear(self):
197 self.map = {}
197 self.map = {}
198 self.markdirty()
198 self.markdirty()
199
199
200 def write(self):
200 def write(self):
201 st = self.opener("dirstate", "w")
201 st = self.opener("dirstate", "w")
202 st.write("".join(self.pl))
202 st.write("".join(self.pl))
203 for f, e in self.map.items():
203 for f, e in self.map.items():
204 c = self.copied(f)
204 c = self.copied(f)
205 if c:
205 if c:
206 f = f + "\0" + c
206 f = f + "\0" + c
207 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
207 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
208 st.write(e + f)
208 st.write(e + f)
209 self.dirty = 0
209 self.dirty = 0
210
210
211 def filterfiles(self, files):
211 def filterfiles(self, files):
212 ret = {}
212 ret = {}
213 unknown = []
213 unknown = []
214
214
215 for x in files:
215 for x in files:
216 if x is '.':
216 if x is '.':
217 return self.map.copy()
217 return self.map.copy()
218 if x not in self.map:
218 if x not in self.map:
219 unknown.append(x)
219 unknown.append(x)
220 else:
220 else:
221 ret[x] = self.map[x]
221 ret[x] = self.map[x]
222
222
223 if not unknown:
223 if not unknown:
224 return ret
224 return ret
225
225
226 b = self.map.keys()
226 b = self.map.keys()
227 b.sort()
227 b.sort()
228 blen = len(b)
228 blen = len(b)
229
229
230 for x in unknown:
230 for x in unknown:
231 bs = bisect.bisect(b, x)
231 bs = bisect.bisect(b, x)
232 if bs != 0 and b[bs-1] == x:
232 if bs != 0 and b[bs-1] == x:
233 ret[x] = self.map[x]
233 ret[x] = self.map[x]
234 continue
234 continue
235 while bs < blen:
235 while bs < blen:
236 s = b[bs]
236 s = b[bs]
237 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
237 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
238 ret[s] = self.map[s]
238 ret[s] = self.map[s]
239 else:
239 else:
240 break
240 break
241 bs += 1
241 bs += 1
242 return ret
242 return ret
243
243
244 def walk(self, files=None, match=util.always, dc=None):
244 def walk(self, files=None, match=util.always, dc=None):
245 self.read()
245 self.read()
246
246
247 # walk all files by default
247 # walk all files by default
248 if not files:
248 if not files:
249 files = [self.root]
249 files = [self.root]
250 if not dc:
250 if not dc:
251 dc = self.map.copy()
251 dc = self.map.copy()
252 elif not dc:
252 elif not dc:
253 dc = self.filterfiles(files)
253 dc = self.filterfiles(files)
254
254
255 def statmatch(file, stat):
255 def statmatch(file, stat):
256 file = util.pconvert(file)
256 file = util.pconvert(file)
257 if file not in dc and self.ignore(file):
257 if file not in dc and self.ignore(file):
258 return False
258 return False
259 return match(file)
259 return match(file)
260
260
261 return self.walkhelper(files=files, statmatch=statmatch, dc=dc)
261 return self.walkhelper(files=files, statmatch=statmatch, dc=dc)
262
262
263 # walk recursively through the directory tree, finding all files
263 # walk recursively through the directory tree, finding all files
264 # matched by the statmatch function
264 # matched by the statmatch function
265 #
265 #
266 # results are yielded in a tuple (src, filename), where src is one of:
266 # results are yielded in a tuple (src, filename), where src is one of:
267 # 'f' the file was found in the directory tree
267 # 'f' the file was found in the directory tree
268 # 'm' the file was only in the dirstate and not in the tree
268 # 'm' the file was only in the dirstate and not in the tree
269 #
269 #
270 # dc is an optional arg for the current dirstate. dc is not modified
270 # dc is an optional arg for the current dirstate. dc is not modified
271 # directly by this function, but might be modified by your statmatch call.
271 # directly by this function, but might be modified by your statmatch call.
272 #
272 #
273 def walkhelper(self, files, statmatch, dc):
273 def walkhelper(self, files, statmatch, dc):
274 def supported_type(f, st):
274 def supported_type(f, st):
275 if stat.S_ISREG(st.st_mode):
275 if stat.S_ISREG(st.st_mode):
276 return True
276 return True
277 else:
277 else:
278 kind = 'unknown'
278 kind = 'unknown'
279 if stat.S_ISCHR(st.st_mode): kind = 'character device'
279 if stat.S_ISCHR(st.st_mode): kind = _('character device')
280 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
280 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
281 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
281 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
282 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
282 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
283 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
283 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
284 elif stat.S_ISDIR(st.st_mode): kind = 'directory'
284 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
285 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
285 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
286 util.pathto(self.getcwd(), f),
286 util.pathto(self.getcwd(), f),
287 kind))
287 kind))
288 return False
288 return False
289
289
290 # recursion free walker, faster than os.walk.
290 # recursion free walker, faster than os.walk.
291 def findfiles(s):
291 def findfiles(s):
292 retfiles = []
292 retfiles = []
293 work = [s]
293 work = [s]
294 while work:
294 while work:
295 top = work.pop()
295 top = work.pop()
296 names = os.listdir(top)
296 names = os.listdir(top)
297 names.sort()
297 names.sort()
298 # nd is the top of the repository dir tree
298 # nd is the top of the repository dir tree
299 nd = util.normpath(top[len(self.root) + 1:])
299 nd = util.normpath(top[len(self.root) + 1:])
300 if nd == '.': nd = ''
300 if nd == '.': nd = ''
301 for f in names:
301 for f in names:
302 np = os.path.join(nd, f)
302 np = os.path.join(nd, f)
303 if seen(np):
303 if seen(np):
304 continue
304 continue
305 p = os.path.join(top, f)
305 p = os.path.join(top, f)
306 # don't trip over symlinks
306 # don't trip over symlinks
307 st = os.lstat(p)
307 st = os.lstat(p)
308 if stat.S_ISDIR(st.st_mode):
308 if stat.S_ISDIR(st.st_mode):
309 ds = os.path.join(nd, f +'/')
309 ds = os.path.join(nd, f +'/')
310 if statmatch(ds, st):
310 if statmatch(ds, st):
311 work.append(p)
311 work.append(p)
312 elif statmatch(np, st) and supported_type(np, st):
312 elif statmatch(np, st) and supported_type(np, st):
313 yield util.pconvert(np)
313 yield util.pconvert(np)
314
314
315
315
316 known = {'.hg': 1}
316 known = {'.hg': 1}
317 def seen(fn):
317 def seen(fn):
318 if fn in known: return True
318 if fn in known: return True
319 known[fn] = 1
319 known[fn] = 1
320
320
321 # step one, find all files that match our criteria
321 # step one, find all files that match our criteria
322 files.sort()
322 files.sort()
323 for ff in util.unique(files):
323 for ff in util.unique(files):
324 f = os.path.join(self.root, ff)
324 f = os.path.join(self.root, ff)
325 try:
325 try:
326 st = os.lstat(f)
326 st = os.lstat(f)
327 except OSError, inst:
327 except OSError, inst:
328 if ff not in dc: self.ui.warn('%s: %s\n' % (
328 if ff not in dc: self.ui.warn('%s: %s\n' % (
329 util.pathto(self.getcwd(), ff),
329 util.pathto(self.getcwd(), ff),
330 inst.strerror))
330 inst.strerror))
331 continue
331 continue
332 if stat.S_ISDIR(st.st_mode):
332 if stat.S_ISDIR(st.st_mode):
333 sorted = [ x for x in findfiles(f) ]
333 sorted = [ x for x in findfiles(f) ]
334 sorted.sort()
334 sorted.sort()
335 for fl in sorted:
335 for fl in sorted:
336 yield 'f', fl
336 yield 'f', fl
337 else:
337 else:
338 ff = util.normpath(ff)
338 ff = util.normpath(ff)
339 if seen(ff):
339 if seen(ff):
340 continue
340 continue
341 found = False
341 found = False
342 self.blockignore = True
342 self.blockignore = True
343 if statmatch(ff, st) and supported_type(ff, st):
343 if statmatch(ff, st) and supported_type(ff, st):
344 found = True
344 found = True
345 self.blockignore = False
345 self.blockignore = False
346 if found:
346 if found:
347 yield 'f', ff
347 yield 'f', ff
348
348
349 # step two run through anything left in the dc hash and yield
349 # step two run through anything left in the dc hash and yield
350 # if we haven't already seen it
350 # if we haven't already seen it
351 ks = dc.keys()
351 ks = dc.keys()
352 ks.sort()
352 ks.sort()
353 for k in ks:
353 for k in ks:
354 if not seen(k) and (statmatch(k, None)):
354 if not seen(k) and (statmatch(k, None)):
355 yield 'm', k
355 yield 'm', k
356
356
357 def changes(self, files=None, match=util.always):
357 def changes(self, files=None, match=util.always):
358 self.read()
358 self.read()
359 if not files:
359 if not files:
360 files = [self.root]
360 files = [self.root]
361 dc = self.map.copy()
361 dc = self.map.copy()
362 else:
362 else:
363 dc = self.filterfiles(files)
363 dc = self.filterfiles(files)
364 lookup, modified, added, unknown = [], [], [], []
364 lookup, modified, added, unknown = [], [], [], []
365 removed, deleted = [], []
365 removed, deleted = [], []
366
366
367 # statmatch function to eliminate entries from the dirstate copy
367 # statmatch function to eliminate entries from the dirstate copy
368 # and put files into the appropriate array. This gets passed
368 # and put files into the appropriate array. This gets passed
369 # to the walking code
369 # to the walking code
370 def statmatch(fn, s):
370 def statmatch(fn, s):
371 fn = util.pconvert(fn)
371 fn = util.pconvert(fn)
372 def checkappend(l, fn):
372 def checkappend(l, fn):
373 if match is util.always or match(fn):
373 if match is util.always or match(fn):
374 l.append(fn)
374 l.append(fn)
375
375
376 if not s or stat.S_ISDIR(s.st_mode):
376 if not s or stat.S_ISDIR(s.st_mode):
377 if self.ignore(fn): return False
377 if self.ignore(fn): return False
378 return match(fn)
378 return match(fn)
379
379
380 c = dc.pop(fn, None)
380 c = dc.pop(fn, None)
381 if c:
381 if c:
382 type, mode, size, time = c
382 type, mode, size, time = c
383 # check the common case first
383 # check the common case first
384 if type == 'n':
384 if type == 'n':
385 if size != s.st_size or (mode ^ s.st_mode) & 0100:
385 if size != s.st_size or (mode ^ s.st_mode) & 0100:
386 checkappend(modified, fn)
386 checkappend(modified, fn)
387 elif time != s.st_mtime:
387 elif time != s.st_mtime:
388 checkappend(lookup, fn)
388 checkappend(lookup, fn)
389 elif type == 'm':
389 elif type == 'm':
390 checkappend(modified, fn)
390 checkappend(modified, fn)
391 elif type == 'a':
391 elif type == 'a':
392 checkappend(added, fn)
392 checkappend(added, fn)
393 elif type == 'r':
393 elif type == 'r':
394 checkappend(unknown, fn)
394 checkappend(unknown, fn)
395 elif not self.ignore(fn) and match(fn):
395 elif not self.ignore(fn) and match(fn):
396 unknown.append(fn)
396 unknown.append(fn)
397 # return false because we've already handled all cases above.
397 # return false because we've already handled all cases above.
398 # there's no need for the walking code to process the file
398 # there's no need for the walking code to process the file
399 # any further.
399 # any further.
400 return False
400 return False
401
401
402 # because our statmatch always returns false, self.walk will only
402 # because our statmatch always returns false, self.walk will only
403 # return files in the dirstate map that are not present in the FS.
403 # return files in the dirstate map that are not present in the FS.
404 # But, we still need to iterate through the results to force the
404 # But, we still need to iterate through the results to force the
405 # walk to complete
405 # walk to complete
406 for src, fn in self.walkhelper(files, statmatch, dc):
406 for src, fn in self.walkhelper(files, statmatch, dc):
407 pass
407 pass
408
408
409 # there may be patterns in the .hgignore file that prevent us
409 # there may be patterns in the .hgignore file that prevent us
410 # from examining entire directories in the dirstate map, so we
410 # from examining entire directories in the dirstate map, so we
411 # go back and explicitly examine any matching files we've
411 # go back and explicitly examine any matching files we've
412 # ignored
412 # ignored
413 unexamined = [fn for fn in dc.iterkeys()
413 unexamined = [fn for fn in dc.iterkeys()
414 if self.ignore(fn) and match(fn)]
414 if self.ignore(fn) and match(fn)]
415
415
416 for src, fn in self.walkhelper(unexamined, statmatch, dc):
416 for src, fn in self.walkhelper(unexamined, statmatch, dc):
417 pass
417 pass
418
418
419 # anything left in dc didn't exist in the filesystem
419 # anything left in dc didn't exist in the filesystem
420 for fn, c in dc.iteritems():
420 for fn, c in dc.iteritems():
421 if not match(fn): continue
421 if not match(fn): continue
422 if c[0] == 'r':
422 if c[0] == 'r':
423 removed.append(fn)
423 removed.append(fn)
424 else:
424 else:
425 deleted.append(fn)
425 deleted.append(fn)
426 return (lookup, modified, added, removed + deleted, unknown)
426 return (lookup, modified, added, removed + deleted, unknown)
@@ -1,987 +1,987 b''
1 # hgweb.py - web interface to a mercurial repository
1 # hgweb.py - web interface to a mercurial repository
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, cgi, sys
9 import os, cgi, sys
10 from demandload import demandload
10 from demandload import demandload
11 demandload(globals(), "mdiff time re socket zlib errno ui hg ConfigParser")
11 demandload(globals(), "mdiff time re socket zlib errno ui hg ConfigParser")
12 demandload(globals(), "zipfile tempfile StringIO tarfile BaseHTTPServer util")
12 demandload(globals(), "zipfile tempfile StringIO tarfile BaseHTTPServer util")
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15
15
16 def templatepath():
16 def templatepath():
17 for f in "templates", "../templates":
17 for f in "templates", "../templates":
18 p = os.path.join(os.path.dirname(__file__), f)
18 p = os.path.join(os.path.dirname(__file__), f)
19 if os.path.isdir(p):
19 if os.path.isdir(p):
20 return p
20 return p
21
21
22 def age(x):
22 def age(x):
23 def plural(t, c):
23 def plural(t, c):
24 if c == 1:
24 if c == 1:
25 return t
25 return t
26 return t + "s"
26 return t + "s"
27 def fmt(t, c):
27 def fmt(t, c):
28 return "%d %s" % (c, plural(t, c))
28 return "%d %s" % (c, plural(t, c))
29
29
30 now = time.time()
30 now = time.time()
31 then = x[0]
31 then = x[0]
32 delta = max(1, int(now - then))
32 delta = max(1, int(now - then))
33
33
34 scales = [["second", 1],
34 scales = [["second", 1],
35 ["minute", 60],
35 ["minute", 60],
36 ["hour", 3600],
36 ["hour", 3600],
37 ["day", 3600 * 24],
37 ["day", 3600 * 24],
38 ["week", 3600 * 24 * 7],
38 ["week", 3600 * 24 * 7],
39 ["month", 3600 * 24 * 30],
39 ["month", 3600 * 24 * 30],
40 ["year", 3600 * 24 * 365]]
40 ["year", 3600 * 24 * 365]]
41
41
42 scales.reverse()
42 scales.reverse()
43
43
44 for t, s in scales:
44 for t, s in scales:
45 n = delta / s
45 n = delta / s
46 if n >= 2 or s == 1:
46 if n >= 2 or s == 1:
47 return fmt(t, n)
47 return fmt(t, n)
48
48
49 def nl2br(text):
49 def nl2br(text):
50 return text.replace('\n', '<br/>\n')
50 return text.replace('\n', '<br/>\n')
51
51
52 def obfuscate(text):
52 def obfuscate(text):
53 return ''.join(['&#%d;' % ord(c) for c in text])
53 return ''.join(['&#%d;' % ord(c) for c in text])
54
54
55 def up(p):
55 def up(p):
56 if p[0] != "/":
56 if p[0] != "/":
57 p = "/" + p
57 p = "/" + p
58 if p[-1] == "/":
58 if p[-1] == "/":
59 p = p[:-1]
59 p = p[:-1]
60 up = os.path.dirname(p)
60 up = os.path.dirname(p)
61 if up == "/":
61 if up == "/":
62 return "/"
62 return "/"
63 return up + "/"
63 return up + "/"
64
64
65 class hgrequest:
65 class hgrequest:
66 def __init__(self, inp=None, out=None, env=None):
66 def __init__(self, inp=None, out=None, env=None):
67 self.inp = inp or sys.stdin
67 self.inp = inp or sys.stdin
68 self.out = out or sys.stdout
68 self.out = out or sys.stdout
69 self.env = env or os.environ
69 self.env = env or os.environ
70 self.form = cgi.parse(self.inp, self.env)
70 self.form = cgi.parse(self.inp, self.env)
71
71
72 def write(self, *things):
72 def write(self, *things):
73 for thing in things:
73 for thing in things:
74 if hasattr(thing, "__iter__"):
74 if hasattr(thing, "__iter__"):
75 for part in thing:
75 for part in thing:
76 self.write(part)
76 self.write(part)
77 else:
77 else:
78 try:
78 try:
79 self.out.write(str(thing))
79 self.out.write(str(thing))
80 except socket.error, inst:
80 except socket.error, inst:
81 if inst[0] != errno.ECONNRESET:
81 if inst[0] != errno.ECONNRESET:
82 raise
82 raise
83
83
84 def header(self, headers=[('Content-type','text/html')]):
84 def header(self, headers=[('Content-type','text/html')]):
85 for header in headers:
85 for header in headers:
86 self.out.write("%s: %s\r\n" % header)
86 self.out.write("%s: %s\r\n" % header)
87 self.out.write("\r\n")
87 self.out.write("\r\n")
88
88
89 def httphdr(self, type, file="", size=0):
89 def httphdr(self, type, file="", size=0):
90
90
91 headers = [('Content-type', type)]
91 headers = [('Content-type', type)]
92 if file:
92 if file:
93 headers.append(('Content-disposition', 'attachment; filename=%s' % file))
93 headers.append(('Content-disposition', 'attachment; filename=%s' % file))
94 if size > 0:
94 if size > 0:
95 headers.append(('Content-length', str(size)))
95 headers.append(('Content-length', str(size)))
96 self.header(headers)
96 self.header(headers)
97
97
98 class templater:
98 class templater:
99 def __init__(self, mapfile, filters={}, defaults={}):
99 def __init__(self, mapfile, filters={}, defaults={}):
100 self.cache = {}
100 self.cache = {}
101 self.map = {}
101 self.map = {}
102 self.base = os.path.dirname(mapfile)
102 self.base = os.path.dirname(mapfile)
103 self.filters = filters
103 self.filters = filters
104 self.defaults = defaults
104 self.defaults = defaults
105
105
106 for l in file(mapfile):
106 for l in file(mapfile):
107 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
107 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
108 if m:
108 if m:
109 self.cache[m.group(1)] = m.group(2)
109 self.cache[m.group(1)] = m.group(2)
110 else:
110 else:
111 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
111 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
112 if m:
112 if m:
113 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
113 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
114 else:
114 else:
115 raise LookupError("unknown map entry '%s'" % l)
115 raise LookupError(_("unknown map entry '%s'") % l)
116
116
117 def __call__(self, t, **map):
117 def __call__(self, t, **map):
118 m = self.defaults.copy()
118 m = self.defaults.copy()
119 m.update(map)
119 m.update(map)
120 try:
120 try:
121 tmpl = self.cache[t]
121 tmpl = self.cache[t]
122 except KeyError:
122 except KeyError:
123 tmpl = self.cache[t] = file(self.map[t]).read()
123 tmpl = self.cache[t] = file(self.map[t]).read()
124 return self.template(tmpl, self.filters, **m)
124 return self.template(tmpl, self.filters, **m)
125
125
126 def template(self, tmpl, filters={}, **map):
126 def template(self, tmpl, filters={}, **map):
127 while tmpl:
127 while tmpl:
128 m = re.search(r"#([a-zA-Z0-9]+)((%[a-zA-Z0-9]+)*)((\|[a-zA-Z0-9]+)*)#", tmpl)
128 m = re.search(r"#([a-zA-Z0-9]+)((%[a-zA-Z0-9]+)*)((\|[a-zA-Z0-9]+)*)#", tmpl)
129 if m:
129 if m:
130 yield tmpl[:m.start(0)]
130 yield tmpl[:m.start(0)]
131 v = map.get(m.group(1), "")
131 v = map.get(m.group(1), "")
132 v = callable(v) and v(**map) or v
132 v = callable(v) and v(**map) or v
133
133
134 format = m.group(2)
134 format = m.group(2)
135 fl = m.group(4)
135 fl = m.group(4)
136
136
137 if format:
137 if format:
138 q = v.__iter__
138 q = v.__iter__
139 for i in q():
139 for i in q():
140 lm = map.copy()
140 lm = map.copy()
141 lm.update(i)
141 lm.update(i)
142 yield self(format[1:], **lm)
142 yield self(format[1:], **lm)
143
143
144 v = ""
144 v = ""
145
145
146 elif fl:
146 elif fl:
147 for f in fl.split("|")[1:]:
147 for f in fl.split("|")[1:]:
148 v = filters[f](v)
148 v = filters[f](v)
149
149
150 yield v
150 yield v
151 tmpl = tmpl[m.end(0):]
151 tmpl = tmpl[m.end(0):]
152 else:
152 else:
153 yield tmpl
153 yield tmpl
154 return
154 return
155
155
156 common_filters = {
156 common_filters = {
157 "escape": cgi.escape,
157 "escape": cgi.escape,
158 "age": age,
158 "age": age,
159 "date": lambda x: util.datestr(x),
159 "date": lambda x: util.datestr(x),
160 "addbreaks": nl2br,
160 "addbreaks": nl2br,
161 "obfuscate": obfuscate,
161 "obfuscate": obfuscate,
162 "short": (lambda x: x[:12]),
162 "short": (lambda x: x[:12]),
163 "firstline": (lambda x: x.splitlines(1)[0]),
163 "firstline": (lambda x: x.splitlines(1)[0]),
164 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--"),
164 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--"),
165 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
165 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
166 }
166 }
167
167
168 class hgweb:
168 class hgweb:
169 def __init__(self, repo, name=None):
169 def __init__(self, repo, name=None):
170 if type(repo) == type(""):
170 if type(repo) == type(""):
171 self.repo = hg.repository(ui.ui(), repo)
171 self.repo = hg.repository(ui.ui(), repo)
172 else:
172 else:
173 self.repo = repo
173 self.repo = repo
174
174
175 self.mtime = -1
175 self.mtime = -1
176 self.reponame = name
176 self.reponame = name
177 self.archives = 'zip', 'gz', 'bz2'
177 self.archives = 'zip', 'gz', 'bz2'
178
178
179 def refresh(self):
179 def refresh(self):
180 s = os.stat(os.path.join(self.repo.root, ".hg", "00changelog.i"))
180 s = os.stat(os.path.join(self.repo.root, ".hg", "00changelog.i"))
181 if s.st_mtime != self.mtime:
181 if s.st_mtime != self.mtime:
182 self.mtime = s.st_mtime
182 self.mtime = s.st_mtime
183 self.repo = hg.repository(self.repo.ui, self.repo.root)
183 self.repo = hg.repository(self.repo.ui, self.repo.root)
184 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
184 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
185 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
185 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
186 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
186 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
187
187
188 def listfiles(self, files, mf):
188 def listfiles(self, files, mf):
189 for f in files[:self.maxfiles]:
189 for f in files[:self.maxfiles]:
190 yield self.t("filenodelink", node=hex(mf[f]), file=f)
190 yield self.t("filenodelink", node=hex(mf[f]), file=f)
191 if len(files) > self.maxfiles:
191 if len(files) > self.maxfiles:
192 yield self.t("fileellipses")
192 yield self.t("fileellipses")
193
193
194 def listfilediffs(self, files, changeset):
194 def listfilediffs(self, files, changeset):
195 for f in files[:self.maxfiles]:
195 for f in files[:self.maxfiles]:
196 yield self.t("filedifflink", node=hex(changeset), file=f)
196 yield self.t("filedifflink", node=hex(changeset), file=f)
197 if len(files) > self.maxfiles:
197 if len(files) > self.maxfiles:
198 yield self.t("fileellipses")
198 yield self.t("fileellipses")
199
199
200 def parents(self, t1, nodes=[], rev=None,**args):
200 def parents(self, t1, nodes=[], rev=None,**args):
201 if not rev:
201 if not rev:
202 rev = lambda x: ""
202 rev = lambda x: ""
203 for node in nodes:
203 for node in nodes:
204 if node != nullid:
204 if node != nullid:
205 yield self.t(t1, node=hex(node), rev=rev(node), **args)
205 yield self.t(t1, node=hex(node), rev=rev(node), **args)
206
206
207 def showtag(self, t1, node=nullid, **args):
207 def showtag(self, t1, node=nullid, **args):
208 for t in self.repo.nodetags(node):
208 for t in self.repo.nodetags(node):
209 yield self.t(t1, tag=t, **args)
209 yield self.t(t1, tag=t, **args)
210
210
211 def diff(self, node1, node2, files):
211 def diff(self, node1, node2, files):
212 def filterfiles(list, files):
212 def filterfiles(list, files):
213 l = [x for x in list if x in files]
213 l = [x for x in list if x in files]
214
214
215 for f in files:
215 for f in files:
216 if f[-1] != os.sep:
216 if f[-1] != os.sep:
217 f += os.sep
217 f += os.sep
218 l += [x for x in list if x.startswith(f)]
218 l += [x for x in list if x.startswith(f)]
219 return l
219 return l
220
220
221 parity = [0]
221 parity = [0]
222 def diffblock(diff, f, fn):
222 def diffblock(diff, f, fn):
223 yield self.t("diffblock",
223 yield self.t("diffblock",
224 lines=prettyprintlines(diff),
224 lines=prettyprintlines(diff),
225 parity=parity[0],
225 parity=parity[0],
226 file=f,
226 file=f,
227 filenode=hex(fn or nullid))
227 filenode=hex(fn or nullid))
228 parity[0] = 1 - parity[0]
228 parity[0] = 1 - parity[0]
229
229
230 def prettyprintlines(diff):
230 def prettyprintlines(diff):
231 for l in diff.splitlines(1):
231 for l in diff.splitlines(1):
232 if l.startswith('+'):
232 if l.startswith('+'):
233 yield self.t("difflineplus", line=l)
233 yield self.t("difflineplus", line=l)
234 elif l.startswith('-'):
234 elif l.startswith('-'):
235 yield self.t("difflineminus", line=l)
235 yield self.t("difflineminus", line=l)
236 elif l.startswith('@'):
236 elif l.startswith('@'):
237 yield self.t("difflineat", line=l)
237 yield self.t("difflineat", line=l)
238 else:
238 else:
239 yield self.t("diffline", line=l)
239 yield self.t("diffline", line=l)
240
240
241 r = self.repo
241 r = self.repo
242 cl = r.changelog
242 cl = r.changelog
243 mf = r.manifest
243 mf = r.manifest
244 change1 = cl.read(node1)
244 change1 = cl.read(node1)
245 change2 = cl.read(node2)
245 change2 = cl.read(node2)
246 mmap1 = mf.read(change1[0])
246 mmap1 = mf.read(change1[0])
247 mmap2 = mf.read(change2[0])
247 mmap2 = mf.read(change2[0])
248 date1 = util.datestr(change1[2])
248 date1 = util.datestr(change1[2])
249 date2 = util.datestr(change2[2])
249 date2 = util.datestr(change2[2])
250
250
251 c, a, d, u = r.changes(node1, node2)
251 c, a, d, u = r.changes(node1, node2)
252 if files:
252 if files:
253 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
253 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
254
254
255 for f in c:
255 for f in c:
256 to = r.file(f).read(mmap1[f])
256 to = r.file(f).read(mmap1[f])
257 tn = r.file(f).read(mmap2[f])
257 tn = r.file(f).read(mmap2[f])
258 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
258 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
259 for f in a:
259 for f in a:
260 to = None
260 to = None
261 tn = r.file(f).read(mmap2[f])
261 tn = r.file(f).read(mmap2[f])
262 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
262 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
263 for f in d:
263 for f in d:
264 to = r.file(f).read(mmap1[f])
264 to = r.file(f).read(mmap1[f])
265 tn = None
265 tn = None
266 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
266 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
267
267
268 def changelog(self, pos):
268 def changelog(self, pos):
269 def changenav(**map):
269 def changenav(**map):
270 def seq(factor=1):
270 def seq(factor=1):
271 yield 1 * factor
271 yield 1 * factor
272 yield 3 * factor
272 yield 3 * factor
273 #yield 5 * factor
273 #yield 5 * factor
274 for f in seq(factor * 10):
274 for f in seq(factor * 10):
275 yield f
275 yield f
276
276
277 l = []
277 l = []
278 for f in seq():
278 for f in seq():
279 if f < self.maxchanges / 2:
279 if f < self.maxchanges / 2:
280 continue
280 continue
281 if f > count:
281 if f > count:
282 break
282 break
283 r = "%d" % f
283 r = "%d" % f
284 if pos + f < count:
284 if pos + f < count:
285 l.append(("+" + r, pos + f))
285 l.append(("+" + r, pos + f))
286 if pos - f >= 0:
286 if pos - f >= 0:
287 l.insert(0, ("-" + r, pos - f))
287 l.insert(0, ("-" + r, pos - f))
288
288
289 yield {"rev": 0, "label": "(0)"}
289 yield {"rev": 0, "label": "(0)"}
290
290
291 for label, rev in l:
291 for label, rev in l:
292 yield {"label": label, "rev": rev}
292 yield {"label": label, "rev": rev}
293
293
294 yield {"label": "tip", "rev": ""}
294 yield {"label": "tip", "rev": ""}
295
295
296 def changelist(**map):
296 def changelist(**map):
297 parity = (start - end) & 1
297 parity = (start - end) & 1
298 cl = self.repo.changelog
298 cl = self.repo.changelog
299 l = [] # build a list in forward order for efficiency
299 l = [] # build a list in forward order for efficiency
300 for i in range(start, end):
300 for i in range(start, end):
301 n = cl.node(i)
301 n = cl.node(i)
302 changes = cl.read(n)
302 changes = cl.read(n)
303 hn = hex(n)
303 hn = hex(n)
304
304
305 l.insert(0, {"parity": parity,
305 l.insert(0, {"parity": parity,
306 "author": changes[1],
306 "author": changes[1],
307 "parent": self.parents("changelogparent",
307 "parent": self.parents("changelogparent",
308 cl.parents(n), cl.rev),
308 cl.parents(n), cl.rev),
309 "changelogtag": self.showtag("changelogtag",n),
309 "changelogtag": self.showtag("changelogtag",n),
310 "manifest": hex(changes[0]),
310 "manifest": hex(changes[0]),
311 "desc": changes[4],
311 "desc": changes[4],
312 "date": changes[2],
312 "date": changes[2],
313 "files": self.listfilediffs(changes[3], n),
313 "files": self.listfilediffs(changes[3], n),
314 "rev": i,
314 "rev": i,
315 "node": hn})
315 "node": hn})
316 parity = 1 - parity
316 parity = 1 - parity
317
317
318 for e in l:
318 for e in l:
319 yield e
319 yield e
320
320
321 cl = self.repo.changelog
321 cl = self.repo.changelog
322 mf = cl.read(cl.tip())[0]
322 mf = cl.read(cl.tip())[0]
323 count = cl.count()
323 count = cl.count()
324 start = max(0, pos - self.maxchanges + 1)
324 start = max(0, pos - self.maxchanges + 1)
325 end = min(count, start + self.maxchanges)
325 end = min(count, start + self.maxchanges)
326 pos = end - 1
326 pos = end - 1
327
327
328 yield self.t('changelog',
328 yield self.t('changelog',
329 changenav=changenav,
329 changenav=changenav,
330 manifest=hex(mf),
330 manifest=hex(mf),
331 rev=pos, changesets=count, entries=changelist)
331 rev=pos, changesets=count, entries=changelist)
332
332
333 def search(self, query):
333 def search(self, query):
334
334
335 def changelist(**map):
335 def changelist(**map):
336 cl = self.repo.changelog
336 cl = self.repo.changelog
337 count = 0
337 count = 0
338 qw = query.lower().split()
338 qw = query.lower().split()
339
339
340 def revgen():
340 def revgen():
341 for i in range(cl.count() - 1, 0, -100):
341 for i in range(cl.count() - 1, 0, -100):
342 l = []
342 l = []
343 for j in range(max(0, i - 100), i):
343 for j in range(max(0, i - 100), i):
344 n = cl.node(j)
344 n = cl.node(j)
345 changes = cl.read(n)
345 changes = cl.read(n)
346 l.append((n, j, changes))
346 l.append((n, j, changes))
347 l.reverse()
347 l.reverse()
348 for e in l:
348 for e in l:
349 yield e
349 yield e
350
350
351 for n, i, changes in revgen():
351 for n, i, changes in revgen():
352 miss = 0
352 miss = 0
353 for q in qw:
353 for q in qw:
354 if not (q in changes[1].lower() or
354 if not (q in changes[1].lower() or
355 q in changes[4].lower() or
355 q in changes[4].lower() or
356 q in " ".join(changes[3][:20]).lower()):
356 q in " ".join(changes[3][:20]).lower()):
357 miss = 1
357 miss = 1
358 break
358 break
359 if miss:
359 if miss:
360 continue
360 continue
361
361
362 count += 1
362 count += 1
363 hn = hex(n)
363 hn = hex(n)
364
364
365 yield self.t('searchentry',
365 yield self.t('searchentry',
366 parity=count & 1,
366 parity=count & 1,
367 author=changes[1],
367 author=changes[1],
368 parent=self.parents("changelogparent",
368 parent=self.parents("changelogparent",
369 cl.parents(n), cl.rev),
369 cl.parents(n), cl.rev),
370 changelogtag=self.showtag("changelogtag",n),
370 changelogtag=self.showtag("changelogtag",n),
371 manifest=hex(changes[0]),
371 manifest=hex(changes[0]),
372 desc=changes[4],
372 desc=changes[4],
373 date=changes[2],
373 date=changes[2],
374 files=self.listfilediffs(changes[3], n),
374 files=self.listfilediffs(changes[3], n),
375 rev=i,
375 rev=i,
376 node=hn)
376 node=hn)
377
377
378 if count >= self.maxchanges:
378 if count >= self.maxchanges:
379 break
379 break
380
380
381 cl = self.repo.changelog
381 cl = self.repo.changelog
382 mf = cl.read(cl.tip())[0]
382 mf = cl.read(cl.tip())[0]
383
383
384 yield self.t('search',
384 yield self.t('search',
385 query=query,
385 query=query,
386 manifest=hex(mf),
386 manifest=hex(mf),
387 entries=changelist)
387 entries=changelist)
388
388
389 def changeset(self, nodeid):
389 def changeset(self, nodeid):
390 cl = self.repo.changelog
390 cl = self.repo.changelog
391 n = self.repo.lookup(nodeid)
391 n = self.repo.lookup(nodeid)
392 nodeid = hex(n)
392 nodeid = hex(n)
393 changes = cl.read(n)
393 changes = cl.read(n)
394 p1 = cl.parents(n)[0]
394 p1 = cl.parents(n)[0]
395
395
396 files = []
396 files = []
397 mf = self.repo.manifest.read(changes[0])
397 mf = self.repo.manifest.read(changes[0])
398 for f in changes[3]:
398 for f in changes[3]:
399 files.append(self.t("filenodelink",
399 files.append(self.t("filenodelink",
400 filenode=hex(mf.get(f, nullid)), file=f))
400 filenode=hex(mf.get(f, nullid)), file=f))
401
401
402 def diff(**map):
402 def diff(**map):
403 yield self.diff(p1, n, None)
403 yield self.diff(p1, n, None)
404
404
405 def archivelist():
405 def archivelist():
406 for i in self.archives:
406 for i in self.archives:
407 if self.repo.ui.configbool("web", "allow" + i, False):
407 if self.repo.ui.configbool("web", "allow" + i, False):
408 yield {"type" : i, "node" : nodeid}
408 yield {"type" : i, "node" : nodeid}
409
409
410 yield self.t('changeset',
410 yield self.t('changeset',
411 diff=diff,
411 diff=diff,
412 rev=cl.rev(n),
412 rev=cl.rev(n),
413 node=nodeid,
413 node=nodeid,
414 parent=self.parents("changesetparent",
414 parent=self.parents("changesetparent",
415 cl.parents(n), cl.rev),
415 cl.parents(n), cl.rev),
416 changesettag=self.showtag("changesettag",n),
416 changesettag=self.showtag("changesettag",n),
417 manifest=hex(changes[0]),
417 manifest=hex(changes[0]),
418 author=changes[1],
418 author=changes[1],
419 desc=changes[4],
419 desc=changes[4],
420 date=changes[2],
420 date=changes[2],
421 files=files,
421 files=files,
422 archives=archivelist())
422 archives=archivelist())
423
423
424 def filelog(self, f, filenode):
424 def filelog(self, f, filenode):
425 cl = self.repo.changelog
425 cl = self.repo.changelog
426 fl = self.repo.file(f)
426 fl = self.repo.file(f)
427 filenode = hex(fl.lookup(filenode))
427 filenode = hex(fl.lookup(filenode))
428 count = fl.count()
428 count = fl.count()
429
429
430 def entries(**map):
430 def entries(**map):
431 l = []
431 l = []
432 parity = (count - 1) & 1
432 parity = (count - 1) & 1
433
433
434 for i in range(count):
434 for i in range(count):
435 n = fl.node(i)
435 n = fl.node(i)
436 lr = fl.linkrev(n)
436 lr = fl.linkrev(n)
437 cn = cl.node(lr)
437 cn = cl.node(lr)
438 cs = cl.read(cl.node(lr))
438 cs = cl.read(cl.node(lr))
439
439
440 l.insert(0, {"parity": parity,
440 l.insert(0, {"parity": parity,
441 "filenode": hex(n),
441 "filenode": hex(n),
442 "filerev": i,
442 "filerev": i,
443 "file": f,
443 "file": f,
444 "node": hex(cn),
444 "node": hex(cn),
445 "author": cs[1],
445 "author": cs[1],
446 "date": cs[2],
446 "date": cs[2],
447 "parent": self.parents("filelogparent",
447 "parent": self.parents("filelogparent",
448 fl.parents(n),
448 fl.parents(n),
449 fl.rev, file=f),
449 fl.rev, file=f),
450 "desc": cs[4]})
450 "desc": cs[4]})
451 parity = 1 - parity
451 parity = 1 - parity
452
452
453 for e in l:
453 for e in l:
454 yield e
454 yield e
455
455
456 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
456 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
457
457
458 def filerevision(self, f, node):
458 def filerevision(self, f, node):
459 fl = self.repo.file(f)
459 fl = self.repo.file(f)
460 n = fl.lookup(node)
460 n = fl.lookup(node)
461 node = hex(n)
461 node = hex(n)
462 text = fl.read(n)
462 text = fl.read(n)
463 changerev = fl.linkrev(n)
463 changerev = fl.linkrev(n)
464 cl = self.repo.changelog
464 cl = self.repo.changelog
465 cn = cl.node(changerev)
465 cn = cl.node(changerev)
466 cs = cl.read(cn)
466 cs = cl.read(cn)
467 mfn = cs[0]
467 mfn = cs[0]
468
468
469 def lines():
469 def lines():
470 for l, t in enumerate(text.splitlines(1)):
470 for l, t in enumerate(text.splitlines(1)):
471 yield {"line": t,
471 yield {"line": t,
472 "linenumber": "% 6d" % (l + 1),
472 "linenumber": "% 6d" % (l + 1),
473 "parity": l & 1}
473 "parity": l & 1}
474
474
475 yield self.t("filerevision",
475 yield self.t("filerevision",
476 file=f,
476 file=f,
477 filenode=node,
477 filenode=node,
478 path=up(f),
478 path=up(f),
479 text=lines(),
479 text=lines(),
480 rev=changerev,
480 rev=changerev,
481 node=hex(cn),
481 node=hex(cn),
482 manifest=hex(mfn),
482 manifest=hex(mfn),
483 author=cs[1],
483 author=cs[1],
484 date=cs[2],
484 date=cs[2],
485 parent=self.parents("filerevparent",
485 parent=self.parents("filerevparent",
486 fl.parents(n), fl.rev, file=f),
486 fl.parents(n), fl.rev, file=f),
487 permissions=self.repo.manifest.readflags(mfn)[f])
487 permissions=self.repo.manifest.readflags(mfn)[f])
488
488
489 def fileannotate(self, f, node):
489 def fileannotate(self, f, node):
490 bcache = {}
490 bcache = {}
491 ncache = {}
491 ncache = {}
492 fl = self.repo.file(f)
492 fl = self.repo.file(f)
493 n = fl.lookup(node)
493 n = fl.lookup(node)
494 node = hex(n)
494 node = hex(n)
495 changerev = fl.linkrev(n)
495 changerev = fl.linkrev(n)
496
496
497 cl = self.repo.changelog
497 cl = self.repo.changelog
498 cn = cl.node(changerev)
498 cn = cl.node(changerev)
499 cs = cl.read(cn)
499 cs = cl.read(cn)
500 mfn = cs[0]
500 mfn = cs[0]
501
501
502 def annotate(**map):
502 def annotate(**map):
503 parity = 1
503 parity = 1
504 last = None
504 last = None
505 for r, l in fl.annotate(n):
505 for r, l in fl.annotate(n):
506 try:
506 try:
507 cnode = ncache[r]
507 cnode = ncache[r]
508 except KeyError:
508 except KeyError:
509 cnode = ncache[r] = self.repo.changelog.node(r)
509 cnode = ncache[r] = self.repo.changelog.node(r)
510
510
511 try:
511 try:
512 name = bcache[r]
512 name = bcache[r]
513 except KeyError:
513 except KeyError:
514 cl = self.repo.changelog.read(cnode)
514 cl = self.repo.changelog.read(cnode)
515 bcache[r] = name = self.repo.ui.shortuser(cl[1])
515 bcache[r] = name = self.repo.ui.shortuser(cl[1])
516
516
517 if last != cnode:
517 if last != cnode:
518 parity = 1 - parity
518 parity = 1 - parity
519 last = cnode
519 last = cnode
520
520
521 yield {"parity": parity,
521 yield {"parity": parity,
522 "node": hex(cnode),
522 "node": hex(cnode),
523 "rev": r,
523 "rev": r,
524 "author": name,
524 "author": name,
525 "file": f,
525 "file": f,
526 "line": l}
526 "line": l}
527
527
528 yield self.t("fileannotate",
528 yield self.t("fileannotate",
529 file=f,
529 file=f,
530 filenode=node,
530 filenode=node,
531 annotate=annotate,
531 annotate=annotate,
532 path=up(f),
532 path=up(f),
533 rev=changerev,
533 rev=changerev,
534 node=hex(cn),
534 node=hex(cn),
535 manifest=hex(mfn),
535 manifest=hex(mfn),
536 author=cs[1],
536 author=cs[1],
537 date=cs[2],
537 date=cs[2],
538 parent=self.parents("fileannotateparent",
538 parent=self.parents("fileannotateparent",
539 fl.parents(n), fl.rev, file=f),
539 fl.parents(n), fl.rev, file=f),
540 permissions=self.repo.manifest.readflags(mfn)[f])
540 permissions=self.repo.manifest.readflags(mfn)[f])
541
541
542 def manifest(self, mnode, path):
542 def manifest(self, mnode, path):
543 man = self.repo.manifest
543 man = self.repo.manifest
544 mn = man.lookup(mnode)
544 mn = man.lookup(mnode)
545 mnode = hex(mn)
545 mnode = hex(mn)
546 mf = man.read(mn)
546 mf = man.read(mn)
547 rev = man.rev(mn)
547 rev = man.rev(mn)
548 node = self.repo.changelog.node(rev)
548 node = self.repo.changelog.node(rev)
549 mff = man.readflags(mn)
549 mff = man.readflags(mn)
550
550
551 files = {}
551 files = {}
552
552
553 p = path[1:]
553 p = path[1:]
554 l = len(p)
554 l = len(p)
555
555
556 for f,n in mf.items():
556 for f,n in mf.items():
557 if f[:l] != p:
557 if f[:l] != p:
558 continue
558 continue
559 remain = f[l:]
559 remain = f[l:]
560 if "/" in remain:
560 if "/" in remain:
561 short = remain[:remain.find("/") + 1] # bleah
561 short = remain[:remain.find("/") + 1] # bleah
562 files[short] = (f, None)
562 files[short] = (f, None)
563 else:
563 else:
564 short = os.path.basename(remain)
564 short = os.path.basename(remain)
565 files[short] = (f, n)
565 files[short] = (f, n)
566
566
567 def filelist(**map):
567 def filelist(**map):
568 parity = 0
568 parity = 0
569 fl = files.keys()
569 fl = files.keys()
570 fl.sort()
570 fl.sort()
571 for f in fl:
571 for f in fl:
572 full, fnode = files[f]
572 full, fnode = files[f]
573 if not fnode:
573 if not fnode:
574 continue
574 continue
575
575
576 yield {"file": full,
576 yield {"file": full,
577 "manifest": mnode,
577 "manifest": mnode,
578 "filenode": hex(fnode),
578 "filenode": hex(fnode),
579 "parity": parity,
579 "parity": parity,
580 "basename": f,
580 "basename": f,
581 "permissions": mff[full]}
581 "permissions": mff[full]}
582 parity = 1 - parity
582 parity = 1 - parity
583
583
584 def dirlist(**map):
584 def dirlist(**map):
585 parity = 0
585 parity = 0
586 fl = files.keys()
586 fl = files.keys()
587 fl.sort()
587 fl.sort()
588 for f in fl:
588 for f in fl:
589 full, fnode = files[f]
589 full, fnode = files[f]
590 if fnode:
590 if fnode:
591 continue
591 continue
592
592
593 yield {"parity": parity,
593 yield {"parity": parity,
594 "path": os.path.join(path, f),
594 "path": os.path.join(path, f),
595 "manifest": mnode,
595 "manifest": mnode,
596 "basename": f[:-1]}
596 "basename": f[:-1]}
597 parity = 1 - parity
597 parity = 1 - parity
598
598
599 yield self.t("manifest",
599 yield self.t("manifest",
600 manifest=mnode,
600 manifest=mnode,
601 rev=rev,
601 rev=rev,
602 node=hex(node),
602 node=hex(node),
603 path=path,
603 path=path,
604 up=up(path),
604 up=up(path),
605 fentries=filelist,
605 fentries=filelist,
606 dentries=dirlist)
606 dentries=dirlist)
607
607
608 def tags(self):
608 def tags(self):
609 cl = self.repo.changelog
609 cl = self.repo.changelog
610 mf = cl.read(cl.tip())[0]
610 mf = cl.read(cl.tip())[0]
611
611
612 i = self.repo.tagslist()
612 i = self.repo.tagslist()
613 i.reverse()
613 i.reverse()
614
614
615 def entries(**map):
615 def entries(**map):
616 parity = 0
616 parity = 0
617 for k,n in i:
617 for k,n in i:
618 yield {"parity": parity,
618 yield {"parity": parity,
619 "tag": k,
619 "tag": k,
620 "node": hex(n)}
620 "node": hex(n)}
621 parity = 1 - parity
621 parity = 1 - parity
622
622
623 yield self.t("tags",
623 yield self.t("tags",
624 manifest=hex(mf),
624 manifest=hex(mf),
625 entries=entries)
625 entries=entries)
626
626
627 def filediff(self, file, changeset):
627 def filediff(self, file, changeset):
628 cl = self.repo.changelog
628 cl = self.repo.changelog
629 n = self.repo.lookup(changeset)
629 n = self.repo.lookup(changeset)
630 changeset = hex(n)
630 changeset = hex(n)
631 p1 = cl.parents(n)[0]
631 p1 = cl.parents(n)[0]
632 cs = cl.read(n)
632 cs = cl.read(n)
633 mf = self.repo.manifest.read(cs[0])
633 mf = self.repo.manifest.read(cs[0])
634
634
635 def diff(**map):
635 def diff(**map):
636 yield self.diff(p1, n, file)
636 yield self.diff(p1, n, file)
637
637
638 yield self.t("filediff",
638 yield self.t("filediff",
639 file=file,
639 file=file,
640 filenode=hex(mf.get(file, nullid)),
640 filenode=hex(mf.get(file, nullid)),
641 node=changeset,
641 node=changeset,
642 rev=self.repo.changelog.rev(n),
642 rev=self.repo.changelog.rev(n),
643 parent=self.parents("filediffparent",
643 parent=self.parents("filediffparent",
644 cl.parents(n), cl.rev),
644 cl.parents(n), cl.rev),
645 diff=diff)
645 diff=diff)
646
646
647 def archive(self, req, cnode, type):
647 def archive(self, req, cnode, type):
648 cs = self.repo.changelog.read(cnode)
648 cs = self.repo.changelog.read(cnode)
649 mnode = cs[0]
649 mnode = cs[0]
650 mf = self.repo.manifest.read(mnode)
650 mf = self.repo.manifest.read(mnode)
651 rev = self.repo.manifest.rev(mnode)
651 rev = self.repo.manifest.rev(mnode)
652 reponame = re.sub(r"\W+", "-", self.reponame)
652 reponame = re.sub(r"\W+", "-", self.reponame)
653 name = "%s-%s/" % (reponame, short(cnode))
653 name = "%s-%s/" % (reponame, short(cnode))
654
654
655 files = mf.keys()
655 files = mf.keys()
656 files.sort()
656 files.sort()
657
657
658 if type == 'zip':
658 if type == 'zip':
659 tmp = tempfile.mkstemp()[1]
659 tmp = tempfile.mkstemp()[1]
660 try:
660 try:
661 zf = zipfile.ZipFile(tmp, "w", zipfile.ZIP_DEFLATED)
661 zf = zipfile.ZipFile(tmp, "w", zipfile.ZIP_DEFLATED)
662
662
663 for f in files:
663 for f in files:
664 zf.writestr(name + f, self.repo.file(f).read(mf[f]))
664 zf.writestr(name + f, self.repo.file(f).read(mf[f]))
665 zf.close()
665 zf.close()
666
666
667 f = open(tmp, 'r')
667 f = open(tmp, 'r')
668 req.httphdr('application/zip', name[:-1] + '.zip',
668 req.httphdr('application/zip', name[:-1] + '.zip',
669 os.path.getsize(tmp))
669 os.path.getsize(tmp))
670 req.write(f.read())
670 req.write(f.read())
671 f.close()
671 f.close()
672 finally:
672 finally:
673 os.unlink(tmp)
673 os.unlink(tmp)
674
674
675 else:
675 else:
676 tf = tarfile.TarFile.open(mode='w|' + type, fileobj=req.out)
676 tf = tarfile.TarFile.open(mode='w|' + type, fileobj=req.out)
677 mff = self.repo.manifest.readflags(mnode)
677 mff = self.repo.manifest.readflags(mnode)
678 mtime = int(time.time())
678 mtime = int(time.time())
679
679
680 if type == "gz":
680 if type == "gz":
681 encoding = "gzip"
681 encoding = "gzip"
682 else:
682 else:
683 encoding = "x-bzip2"
683 encoding = "x-bzip2"
684 req.header([('Content-type', 'application/x-tar'),
684 req.header([('Content-type', 'application/x-tar'),
685 ('Content-disposition', 'attachment; filename=%s%s%s' %
685 ('Content-disposition', 'attachment; filename=%s%s%s' %
686 (name[:-1], '.tar.', type)),
686 (name[:-1], '.tar.', type)),
687 ('Content-encoding', encoding)])
687 ('Content-encoding', encoding)])
688 for fname in files:
688 for fname in files:
689 rcont = self.repo.file(fname).read(mf[fname])
689 rcont = self.repo.file(fname).read(mf[fname])
690 finfo = tarfile.TarInfo(name + fname)
690 finfo = tarfile.TarInfo(name + fname)
691 finfo.mtime = mtime
691 finfo.mtime = mtime
692 finfo.size = len(rcont)
692 finfo.size = len(rcont)
693 finfo.mode = mff[fname] and 0755 or 0644
693 finfo.mode = mff[fname] and 0755 or 0644
694 tf.addfile(finfo, StringIO.StringIO(rcont))
694 tf.addfile(finfo, StringIO.StringIO(rcont))
695 tf.close()
695 tf.close()
696
696
697 # add tags to things
697 # add tags to things
698 # tags -> list of changesets corresponding to tags
698 # tags -> list of changesets corresponding to tags
699 # find tag, changeset, file
699 # find tag, changeset, file
700
700
701 def run(self, req=hgrequest()):
701 def run(self, req=hgrequest()):
702 def header(**map):
702 def header(**map):
703 yield self.t("header", **map)
703 yield self.t("header", **map)
704
704
705 def footer(**map):
705 def footer(**map):
706 yield self.t("footer", **map)
706 yield self.t("footer", **map)
707
707
708 self.refresh()
708 self.refresh()
709
709
710 t = self.repo.ui.config("web", "templates", templatepath())
710 t = self.repo.ui.config("web", "templates", templatepath())
711 m = os.path.join(t, "map")
711 m = os.path.join(t, "map")
712 style = self.repo.ui.config("web", "style", "")
712 style = self.repo.ui.config("web", "style", "")
713 if req.form.has_key('style'):
713 if req.form.has_key('style'):
714 style = req.form['style'][0]
714 style = req.form['style'][0]
715 if style:
715 if style:
716 b = os.path.basename("map-" + style)
716 b = os.path.basename("map-" + style)
717 p = os.path.join(t, b)
717 p = os.path.join(t, b)
718 if os.path.isfile(p):
718 if os.path.isfile(p):
719 m = p
719 m = p
720
720
721 port = req.env["SERVER_PORT"]
721 port = req.env["SERVER_PORT"]
722 port = port != "80" and (":" + port) or ""
722 port = port != "80" and (":" + port) or ""
723 uri = req.env["REQUEST_URI"]
723 uri = req.env["REQUEST_URI"]
724 if "?" in uri:
724 if "?" in uri:
725 uri = uri.split("?")[0]
725 uri = uri.split("?")[0]
726 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
726 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
727 if not self.reponame:
727 if not self.reponame:
728 self.reponame = (self.repo.ui.config("web", "name")
728 self.reponame = (self.repo.ui.config("web", "name")
729 or uri.strip('/') or self.repo.root)
729 or uri.strip('/') or self.repo.root)
730
730
731 self.t = templater(m, common_filters,
731 self.t = templater(m, common_filters,
732 {"url": url,
732 {"url": url,
733 "repo": self.reponame,
733 "repo": self.reponame,
734 "header": header,
734 "header": header,
735 "footer": footer,
735 "footer": footer,
736 })
736 })
737
737
738 if not req.form.has_key('cmd'):
738 if not req.form.has_key('cmd'):
739 req.form['cmd'] = [self.t.cache['default'],]
739 req.form['cmd'] = [self.t.cache['default'],]
740
740
741 if req.form['cmd'][0] == 'changelog':
741 if req.form['cmd'][0] == 'changelog':
742 c = self.repo.changelog.count() - 1
742 c = self.repo.changelog.count() - 1
743 hi = c
743 hi = c
744 if req.form.has_key('rev'):
744 if req.form.has_key('rev'):
745 hi = req.form['rev'][0]
745 hi = req.form['rev'][0]
746 try:
746 try:
747 hi = self.repo.changelog.rev(self.repo.lookup(hi))
747 hi = self.repo.changelog.rev(self.repo.lookup(hi))
748 except hg.RepoError:
748 except hg.RepoError:
749 req.write(self.search(hi))
749 req.write(self.search(hi))
750 return
750 return
751
751
752 req.write(self.changelog(hi))
752 req.write(self.changelog(hi))
753
753
754 elif req.form['cmd'][0] == 'changeset':
754 elif req.form['cmd'][0] == 'changeset':
755 req.write(self.changeset(req.form['node'][0]))
755 req.write(self.changeset(req.form['node'][0]))
756
756
757 elif req.form['cmd'][0] == 'manifest':
757 elif req.form['cmd'][0] == 'manifest':
758 req.write(self.manifest(req.form['manifest'][0], req.form['path'][0]))
758 req.write(self.manifest(req.form['manifest'][0], req.form['path'][0]))
759
759
760 elif req.form['cmd'][0] == 'tags':
760 elif req.form['cmd'][0] == 'tags':
761 req.write(self.tags())
761 req.write(self.tags())
762
762
763 elif req.form['cmd'][0] == 'filediff':
763 elif req.form['cmd'][0] == 'filediff':
764 req.write(self.filediff(req.form['file'][0], req.form['node'][0]))
764 req.write(self.filediff(req.form['file'][0], req.form['node'][0]))
765
765
766 elif req.form['cmd'][0] == 'file':
766 elif req.form['cmd'][0] == 'file':
767 req.write(self.filerevision(req.form['file'][0], req.form['filenode'][0]))
767 req.write(self.filerevision(req.form['file'][0], req.form['filenode'][0]))
768
768
769 elif req.form['cmd'][0] == 'annotate':
769 elif req.form['cmd'][0] == 'annotate':
770 req.write(self.fileannotate(req.form['file'][0], req.form['filenode'][0]))
770 req.write(self.fileannotate(req.form['file'][0], req.form['filenode'][0]))
771
771
772 elif req.form['cmd'][0] == 'filelog':
772 elif req.form['cmd'][0] == 'filelog':
773 req.write(self.filelog(req.form['file'][0], req.form['filenode'][0]))
773 req.write(self.filelog(req.form['file'][0], req.form['filenode'][0]))
774
774
775 elif req.form['cmd'][0] == 'heads':
775 elif req.form['cmd'][0] == 'heads':
776 req.httphdr("application/mercurial-0.1")
776 req.httphdr("application/mercurial-0.1")
777 h = self.repo.heads()
777 h = self.repo.heads()
778 req.write(" ".join(map(hex, h)) + "\n")
778 req.write(" ".join(map(hex, h)) + "\n")
779
779
780 elif req.form['cmd'][0] == 'branches':
780 elif req.form['cmd'][0] == 'branches':
781 req.httphdr("application/mercurial-0.1")
781 req.httphdr("application/mercurial-0.1")
782 nodes = []
782 nodes = []
783 if req.form.has_key('nodes'):
783 if req.form.has_key('nodes'):
784 nodes = map(bin, req.form['nodes'][0].split(" "))
784 nodes = map(bin, req.form['nodes'][0].split(" "))
785 for b in self.repo.branches(nodes):
785 for b in self.repo.branches(nodes):
786 req.write(" ".join(map(hex, b)) + "\n")
786 req.write(" ".join(map(hex, b)) + "\n")
787
787
788 elif req.form['cmd'][0] == 'between':
788 elif req.form['cmd'][0] == 'between':
789 req.httphdr("application/mercurial-0.1")
789 req.httphdr("application/mercurial-0.1")
790 nodes = []
790 nodes = []
791 if req.form.has_key('pairs'):
791 if req.form.has_key('pairs'):
792 pairs = [map(bin, p.split("-"))
792 pairs = [map(bin, p.split("-"))
793 for p in req.form['pairs'][0].split(" ")]
793 for p in req.form['pairs'][0].split(" ")]
794 for b in self.repo.between(pairs):
794 for b in self.repo.between(pairs):
795 req.write(" ".join(map(hex, b)) + "\n")
795 req.write(" ".join(map(hex, b)) + "\n")
796
796
797 elif req.form['cmd'][0] == 'changegroup':
797 elif req.form['cmd'][0] == 'changegroup':
798 req.httphdr("application/mercurial-0.1")
798 req.httphdr("application/mercurial-0.1")
799 nodes = []
799 nodes = []
800 if not self.allowpull:
800 if not self.allowpull:
801 return
801 return
802
802
803 if req.form.has_key('roots'):
803 if req.form.has_key('roots'):
804 nodes = map(bin, req.form['roots'][0].split(" "))
804 nodes = map(bin, req.form['roots'][0].split(" "))
805
805
806 z = zlib.compressobj()
806 z = zlib.compressobj()
807 f = self.repo.changegroup(nodes)
807 f = self.repo.changegroup(nodes)
808 while 1:
808 while 1:
809 chunk = f.read(4096)
809 chunk = f.read(4096)
810 if not chunk:
810 if not chunk:
811 break
811 break
812 req.write(z.compress(chunk))
812 req.write(z.compress(chunk))
813
813
814 req.write(z.flush())
814 req.write(z.flush())
815
815
816 elif req.form['cmd'][0] == 'archive':
816 elif req.form['cmd'][0] == 'archive':
817 changeset = self.repo.lookup(req.form['node'][0])
817 changeset = self.repo.lookup(req.form['node'][0])
818 type = req.form['type'][0]
818 type = req.form['type'][0]
819 if (type in self.archives and
819 if (type in self.archives and
820 self.repo.ui.configbool("web", "allow" + type, False)):
820 self.repo.ui.configbool("web", "allow" + type, False)):
821 self.archive(req, changeset, type)
821 self.archive(req, changeset, type)
822 return
822 return
823
823
824 req.write(self.t("error"))
824 req.write(self.t("error"))
825
825
826 else:
826 else:
827 req.write(self.t("error"))
827 req.write(self.t("error"))
828
828
829 def create_server(repo):
829 def create_server(repo):
830
830
831 def openlog(opt, default):
831 def openlog(opt, default):
832 if opt and opt != '-':
832 if opt and opt != '-':
833 return open(opt, 'w')
833 return open(opt, 'w')
834 return default
834 return default
835
835
836 address = repo.ui.config("web", "address", "")
836 address = repo.ui.config("web", "address", "")
837 port = int(repo.ui.config("web", "port", 8000))
837 port = int(repo.ui.config("web", "port", 8000))
838 use_ipv6 = repo.ui.configbool("web", "ipv6")
838 use_ipv6 = repo.ui.configbool("web", "ipv6")
839 accesslog = openlog(repo.ui.config("web", "accesslog", "-"), sys.stdout)
839 accesslog = openlog(repo.ui.config("web", "accesslog", "-"), sys.stdout)
840 errorlog = openlog(repo.ui.config("web", "errorlog", "-"), sys.stderr)
840 errorlog = openlog(repo.ui.config("web", "errorlog", "-"), sys.stderr)
841
841
842 class IPv6HTTPServer(BaseHTTPServer.HTTPServer):
842 class IPv6HTTPServer(BaseHTTPServer.HTTPServer):
843 address_family = getattr(socket, 'AF_INET6', None)
843 address_family = getattr(socket, 'AF_INET6', None)
844
844
845 def __init__(self, *args, **kwargs):
845 def __init__(self, *args, **kwargs):
846 if self.address_family is None:
846 if self.address_family is None:
847 raise hg.RepoError('IPv6 not available on this system')
847 raise hg.RepoError(_('IPv6 not available on this system'))
848 BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
848 BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
849
849
850 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
850 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
851 def log_error(self, format, *args):
851 def log_error(self, format, *args):
852 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
852 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
853 self.log_date_time_string(),
853 self.log_date_time_string(),
854 format % args))
854 format % args))
855
855
856 def log_message(self, format, *args):
856 def log_message(self, format, *args):
857 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
857 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
858 self.log_date_time_string(),
858 self.log_date_time_string(),
859 format % args))
859 format % args))
860
860
861 def do_POST(self):
861 def do_POST(self):
862 try:
862 try:
863 self.do_hgweb()
863 self.do_hgweb()
864 except socket.error, inst:
864 except socket.error, inst:
865 if inst[0] != errno.EPIPE:
865 if inst[0] != errno.EPIPE:
866 raise
866 raise
867
867
868 def do_GET(self):
868 def do_GET(self):
869 self.do_POST()
869 self.do_POST()
870
870
871 def do_hgweb(self):
871 def do_hgweb(self):
872 query = ""
872 query = ""
873 p = self.path.find("?")
873 p = self.path.find("?")
874 if p:
874 if p:
875 query = self.path[p + 1:]
875 query = self.path[p + 1:]
876 query = query.replace('+', ' ')
876 query = query.replace('+', ' ')
877
877
878 env = {}
878 env = {}
879 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
879 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
880 env['REQUEST_METHOD'] = self.command
880 env['REQUEST_METHOD'] = self.command
881 env['SERVER_NAME'] = self.server.server_name
881 env['SERVER_NAME'] = self.server.server_name
882 env['SERVER_PORT'] = str(self.server.server_port)
882 env['SERVER_PORT'] = str(self.server.server_port)
883 env['REQUEST_URI'] = "/"
883 env['REQUEST_URI'] = "/"
884 if query:
884 if query:
885 env['QUERY_STRING'] = query
885 env['QUERY_STRING'] = query
886 host = self.address_string()
886 host = self.address_string()
887 if host != self.client_address[0]:
887 if host != self.client_address[0]:
888 env['REMOTE_HOST'] = host
888 env['REMOTE_HOST'] = host
889 env['REMOTE_ADDR'] = self.client_address[0]
889 env['REMOTE_ADDR'] = self.client_address[0]
890
890
891 if self.headers.typeheader is None:
891 if self.headers.typeheader is None:
892 env['CONTENT_TYPE'] = self.headers.type
892 env['CONTENT_TYPE'] = self.headers.type
893 else:
893 else:
894 env['CONTENT_TYPE'] = self.headers.typeheader
894 env['CONTENT_TYPE'] = self.headers.typeheader
895 length = self.headers.getheader('content-length')
895 length = self.headers.getheader('content-length')
896 if length:
896 if length:
897 env['CONTENT_LENGTH'] = length
897 env['CONTENT_LENGTH'] = length
898 accept = []
898 accept = []
899 for line in self.headers.getallmatchingheaders('accept'):
899 for line in self.headers.getallmatchingheaders('accept'):
900 if line[:1] in "\t\n\r ":
900 if line[:1] in "\t\n\r ":
901 accept.append(line.strip())
901 accept.append(line.strip())
902 else:
902 else:
903 accept = accept + line[7:].split(',')
903 accept = accept + line[7:].split(',')
904 env['HTTP_ACCEPT'] = ','.join(accept)
904 env['HTTP_ACCEPT'] = ','.join(accept)
905
905
906 req = hgrequest(self.rfile, self.wfile, env)
906 req = hgrequest(self.rfile, self.wfile, env)
907 self.send_response(200, "Script output follows")
907 self.send_response(200, "Script output follows")
908 hg.run(req)
908 hg.run(req)
909
909
910 hg = hgweb(repo)
910 hg = hgweb(repo)
911 if use_ipv6:
911 if use_ipv6:
912 return IPv6HTTPServer((address, port), hgwebhandler)
912 return IPv6HTTPServer((address, port), hgwebhandler)
913 else:
913 else:
914 return BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
914 return BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
915
915
916 def server(path, name, templates, address, port, use_ipv6=False,
916 def server(path, name, templates, address, port, use_ipv6=False,
917 accesslog=sys.stdout, errorlog=sys.stderr):
917 accesslog=sys.stdout, errorlog=sys.stderr):
918 httpd = create_server(path, name, templates, address, port, use_ipv6,
918 httpd = create_server(path, name, templates, address, port, use_ipv6,
919 accesslog, errorlog)
919 accesslog, errorlog)
920 httpd.serve_forever()
920 httpd.serve_forever()
921
921
922 # This is a stopgap
922 # This is a stopgap
923 class hgwebdir:
923 class hgwebdir:
924 def __init__(self, config):
924 def __init__(self, config):
925 def cleannames(items):
925 def cleannames(items):
926 return [(name.strip('/'), path) for name, path in items]
926 return [(name.strip('/'), path) for name, path in items]
927
927
928 if type(config) == type([]):
928 if type(config) == type([]):
929 self.repos = cleannames(config)
929 self.repos = cleannames(config)
930 elif type(config) == type({}):
930 elif type(config) == type({}):
931 self.repos = cleannames(config.items())
931 self.repos = cleannames(config.items())
932 self.repos.sort()
932 self.repos.sort()
933 else:
933 else:
934 cp = ConfigParser.SafeConfigParser()
934 cp = ConfigParser.SafeConfigParser()
935 cp.read(config)
935 cp.read(config)
936 self.repos = cleannames(cp.items("paths"))
936 self.repos = cleannames(cp.items("paths"))
937 self.repos.sort()
937 self.repos.sort()
938
938
939 def run(self, req=hgrequest()):
939 def run(self, req=hgrequest()):
940 def header(**map):
940 def header(**map):
941 yield tmpl("header", **map)
941 yield tmpl("header", **map)
942
942
943 def footer(**map):
943 def footer(**map):
944 yield tmpl("footer", **map)
944 yield tmpl("footer", **map)
945
945
946 m = os.path.join(templatepath(), "map")
946 m = os.path.join(templatepath(), "map")
947 tmpl = templater(m, common_filters,
947 tmpl = templater(m, common_filters,
948 {"header": header, "footer": footer})
948 {"header": header, "footer": footer})
949
949
950 def entries(**map):
950 def entries(**map):
951 parity = 0
951 parity = 0
952 for name, path in self.repos:
952 for name, path in self.repos:
953 u = ui.ui()
953 u = ui.ui()
954 try:
954 try:
955 u.readconfig(file(os.path.join(path, '.hg', 'hgrc')))
955 u.readconfig(file(os.path.join(path, '.hg', 'hgrc')))
956 except IOError:
956 except IOError:
957 pass
957 pass
958 get = u.config
958 get = u.config
959
959
960 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
960 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
961 .replace("//", "/"))
961 .replace("//", "/"))
962
962
963 # update time with local timezone
963 # update time with local timezone
964 d = (os.stat(os.path.join(path,
964 d = (os.stat(os.path.join(path,
965 ".hg", "00changelog.d")).st_mtime,
965 ".hg", "00changelog.d")).st_mtime,
966 util.makedate()[1])
966 util.makedate()[1])
967
967
968 yield dict(contact=(get("ui", "username") or # preferred
968 yield dict(contact=(get("ui", "username") or # preferred
969 get("web", "contact") or # deprecated
969 get("web", "contact") or # deprecated
970 get("web", "author", "unknown")), # also
970 get("web", "author", "unknown")), # also
971 name=get("web", "name", name),
971 name=get("web", "name", name),
972 url=url,
972 url=url,
973 parity=parity,
973 parity=parity,
974 shortdesc=get("web", "description", "unknown"),
974 shortdesc=get("web", "description", "unknown"),
975 lastupdate=d)
975 lastupdate=d)
976
976
977 parity = 1 - parity
977 parity = 1 - parity
978
978
979 virtual = req.env.get("PATH_INFO", "").strip('/')
979 virtual = req.env.get("PATH_INFO", "").strip('/')
980 if virtual:
980 if virtual:
981 real = dict(self.repos).get(virtual)
981 real = dict(self.repos).get(virtual)
982 if real:
982 if real:
983 hgweb(real).run(req)
983 hgweb(real).run(req)
984 else:
984 else:
985 req.write(tmpl("notfound", repo=virtual))
985 req.write(tmpl("notfound", repo=virtual))
986 else:
986 else:
987 req.write(tmpl("index", entries=entries))
987 req.write(tmpl("index", entries=entries))
@@ -1,136 +1,136 b''
1 # httprepo.py - HTTP repository proxy classes for mercurial
1 # httprepo.py - HTTP repository proxy classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from remoterepo import *
9 from remoterepo import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "hg os urllib urllib2 urlparse zlib util")
12 demandload(globals(), "hg os urllib urllib2 urlparse zlib util")
13
13
14 class httprepository(remoterepository):
14 class httprepository(remoterepository):
15 def __init__(self, ui, path):
15 def __init__(self, ui, path):
16 # fix missing / after hostname
16 # fix missing / after hostname
17 s = urlparse.urlsplit(path)
17 s = urlparse.urlsplit(path)
18 partial = s[2]
18 partial = s[2]
19 if not partial: partial = "/"
19 if not partial: partial = "/"
20 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
20 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
21 self.ui = ui
21 self.ui = ui
22 no_list = [ "localhost", "127.0.0.1" ]
22 no_list = [ "localhost", "127.0.0.1" ]
23 host = ui.config("http_proxy", "host")
23 host = ui.config("http_proxy", "host")
24 if host is None:
24 if host is None:
25 host = os.environ.get("http_proxy")
25 host = os.environ.get("http_proxy")
26 if host and host.startswith('http://'):
26 if host and host.startswith('http://'):
27 host = host[7:]
27 host = host[7:]
28 user = ui.config("http_proxy", "user")
28 user = ui.config("http_proxy", "user")
29 passwd = ui.config("http_proxy", "passwd")
29 passwd = ui.config("http_proxy", "passwd")
30 no = ui.config("http_proxy", "no")
30 no = ui.config("http_proxy", "no")
31 if no is None:
31 if no is None:
32 no = os.environ.get("no_proxy")
32 no = os.environ.get("no_proxy")
33 if no:
33 if no:
34 no_list = no_list + no.split(",")
34 no_list = no_list + no.split(",")
35
35
36 no_proxy = 0
36 no_proxy = 0
37 for h in no_list:
37 for h in no_list:
38 if (path.startswith("http://" + h + "/") or
38 if (path.startswith("http://" + h + "/") or
39 path.startswith("http://" + h + ":") or
39 path.startswith("http://" + h + ":") or
40 path == "http://" + h):
40 path == "http://" + h):
41 no_proxy = 1
41 no_proxy = 1
42
42
43 # Note: urllib2 takes proxy values from the environment and those will
43 # Note: urllib2 takes proxy values from the environment and those will
44 # take precedence
44 # take precedence
45 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
45 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
46 try:
46 try:
47 if os.environ.has_key(env):
47 if os.environ.has_key(env):
48 del os.environ[env]
48 del os.environ[env]
49 except OSError:
49 except OSError:
50 pass
50 pass
51
51
52 proxy_handler = urllib2.BaseHandler()
52 proxy_handler = urllib2.BaseHandler()
53 if host and not no_proxy:
53 if host and not no_proxy:
54 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
54 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
55
55
56 authinfo = None
56 authinfo = None
57 if user and passwd:
57 if user and passwd:
58 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
58 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
59 passmgr.add_password(None, host, user, passwd)
59 passmgr.add_password(None, host, user, passwd)
60 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
60 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
61
61
62 opener = urllib2.build_opener(proxy_handler, authinfo)
62 opener = urllib2.build_opener(proxy_handler, authinfo)
63 # 1.0 here is the _protocol_ version
63 # 1.0 here is the _protocol_ version
64 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
64 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
65 urllib2.install_opener(opener)
65 urllib2.install_opener(opener)
66
66
67 def dev(self):
67 def dev(self):
68 return -1
68 return -1
69
69
70 def do_cmd(self, cmd, **args):
70 def do_cmd(self, cmd, **args):
71 self.ui.debug("sending %s command\n" % cmd)
71 self.ui.debug(_("sending %s command\n") % cmd)
72 q = {"cmd": cmd}
72 q = {"cmd": cmd}
73 q.update(args)
73 q.update(args)
74 qs = urllib.urlencode(q)
74 qs = urllib.urlencode(q)
75 cu = "%s?%s" % (self.url, qs)
75 cu = "%s?%s" % (self.url, qs)
76 resp = urllib2.urlopen(cu)
76 resp = urllib2.urlopen(cu)
77 proto = resp.headers['content-type']
77 proto = resp.headers['content-type']
78
78
79 # accept old "text/plain" and "application/hg-changegroup" for now
79 # accept old "text/plain" and "application/hg-changegroup" for now
80 if not proto.startswith('application/mercurial') and \
80 if not proto.startswith('application/mercurial') and \
81 not proto.startswith('text/plain') and \
81 not proto.startswith('text/plain') and \
82 not proto.startswith('application/hg-changegroup'):
82 not proto.startswith('application/hg-changegroup'):
83 raise hg.RepoError("'%s' does not appear to be an hg repository" %
83 raise hg.RepoError(_("'%s' does not appear to be an hg repository") %
84 self.url)
84 self.url)
85
85
86 if proto.startswith('application/mercurial'):
86 if proto.startswith('application/mercurial'):
87 version = proto[22:]
87 version = proto[22:]
88 if float(version) > 0.1:
88 if float(version) > 0.1:
89 raise hg.RepoError("'%s' uses newer protocol %s" %
89 raise hg.RepoError(_("'%s' uses newer protocol %s") %
90 (self.url, version))
90 (self.url, version))
91
91
92 return resp
92 return resp
93
93
94 def heads(self):
94 def heads(self):
95 d = self.do_cmd("heads").read()
95 d = self.do_cmd("heads").read()
96 try:
96 try:
97 return map(bin, d[:-1].split(" "))
97 return map(bin, d[:-1].split(" "))
98 except:
98 except:
99 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
99 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
100 raise
100 raise
101
101
102 def branches(self, nodes):
102 def branches(self, nodes):
103 n = " ".join(map(hex, nodes))
103 n = " ".join(map(hex, nodes))
104 d = self.do_cmd("branches", nodes=n).read()
104 d = self.do_cmd("branches", nodes=n).read()
105 try:
105 try:
106 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
106 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
107 return br
107 return br
108 except:
108 except:
109 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
109 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
110 raise
110 raise
111
111
112 def between(self, pairs):
112 def between(self, pairs):
113 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
113 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
114 d = self.do_cmd("between", pairs=n).read()
114 d = self.do_cmd("between", pairs=n).read()
115 try:
115 try:
116 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
116 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
117 return p
117 return p
118 except:
118 except:
119 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
119 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
120 raise
120 raise
121
121
122 def changegroup(self, nodes):
122 def changegroup(self, nodes):
123 n = " ".join(map(hex, nodes))
123 n = " ".join(map(hex, nodes))
124 f = self.do_cmd("changegroup", roots=n)
124 f = self.do_cmd("changegroup", roots=n)
125 bytes = 0
125 bytes = 0
126
126
127 def zgenerator(f):
127 def zgenerator(f):
128 zd = zlib.decompressobj()
128 zd = zlib.decompressobj()
129 for chnk in f:
129 for chnk in f:
130 yield zd.decompress(chnk)
130 yield zd.decompress(chnk)
131 yield zd.flush()
131 yield zd.flush()
132
132
133 return util.chunkbuffer(zgenerator(util.filechunkiter(f)))
133 return util.chunkbuffer(zgenerator(util.filechunkiter(f)))
134
134
135 class httpsrepository(httprepository):
135 class httpsrepository(httprepository):
136 pass
136 pass
@@ -1,1449 +1,1449 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14
14
15 class localrepository:
15 class localrepository:
16 def __init__(self, ui, path=None, create=0):
16 def __init__(self, ui, path=None, create=0):
17 if not path:
17 if not path:
18 p = os.getcwd()
18 p = os.getcwd()
19 while not os.path.isdir(os.path.join(p, ".hg")):
19 while not os.path.isdir(os.path.join(p, ".hg")):
20 oldp = p
20 oldp = p
21 p = os.path.dirname(p)
21 p = os.path.dirname(p)
22 if p == oldp: raise repo.RepoError("no repo found")
22 if p == oldp: raise repo.RepoError(_("no repo found"))
23 path = p
23 path = p
24 self.path = os.path.join(path, ".hg")
24 self.path = os.path.join(path, ".hg")
25
25
26 if not create and not os.path.isdir(self.path):
26 if not create and not os.path.isdir(self.path):
27 raise repo.RepoError("repository %s not found" % self.path)
27 raise repo.RepoError(_("repository %s not found") % self.path)
28
28
29 self.root = os.path.abspath(path)
29 self.root = os.path.abspath(path)
30 self.ui = ui
30 self.ui = ui
31 self.opener = util.opener(self.path)
31 self.opener = util.opener(self.path)
32 self.wopener = util.opener(self.root)
32 self.wopener = util.opener(self.root)
33 self.manifest = manifest.manifest(self.opener)
33 self.manifest = manifest.manifest(self.opener)
34 self.changelog = changelog.changelog(self.opener)
34 self.changelog = changelog.changelog(self.opener)
35 self.tagscache = None
35 self.tagscache = None
36 self.nodetagscache = None
36 self.nodetagscache = None
37 self.encodepats = None
37 self.encodepats = None
38 self.decodepats = None
38 self.decodepats = None
39
39
40 if create:
40 if create:
41 os.mkdir(self.path)
41 os.mkdir(self.path)
42 os.mkdir(self.join("data"))
42 os.mkdir(self.join("data"))
43
43
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 try:
45 try:
46 self.ui.readconfig(self.opener("hgrc"))
46 self.ui.readconfig(self.opener("hgrc"))
47 except IOError: pass
47 except IOError: pass
48
48
49 def hook(self, name, **args):
49 def hook(self, name, **args):
50 s = self.ui.config("hooks", name)
50 s = self.ui.config("hooks", name)
51 if s:
51 if s:
52 self.ui.note("running hook %s: %s\n" % (name, s))
52 self.ui.note(_("running hook %s: %s\n") % (name, s))
53 old = {}
53 old = {}
54 for k, v in args.items():
54 for k, v in args.items():
55 k = k.upper()
55 k = k.upper()
56 old[k] = os.environ.get(k, None)
56 old[k] = os.environ.get(k, None)
57 os.environ[k] = v
57 os.environ[k] = v
58
58
59 # Hooks run in the repository root
59 # Hooks run in the repository root
60 olddir = os.getcwd()
60 olddir = os.getcwd()
61 os.chdir(self.root)
61 os.chdir(self.root)
62 r = os.system(s)
62 r = os.system(s)
63 os.chdir(olddir)
63 os.chdir(olddir)
64
64
65 for k, v in old.items():
65 for k, v in old.items():
66 if v != None:
66 if v != None:
67 os.environ[k] = v
67 os.environ[k] = v
68 else:
68 else:
69 del os.environ[k]
69 del os.environ[k]
70
70
71 if r:
71 if r:
72 self.ui.warn("abort: %s hook failed with status %d!\n" %
72 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
73 (name, r))
73 (name, r))
74 return False
74 return False
75 return True
75 return True
76
76
77 def tags(self):
77 def tags(self):
78 '''return a mapping of tag to node'''
78 '''return a mapping of tag to node'''
79 if not self.tagscache:
79 if not self.tagscache:
80 self.tagscache = {}
80 self.tagscache = {}
81 def addtag(self, k, n):
81 def addtag(self, k, n):
82 try:
82 try:
83 bin_n = bin(n)
83 bin_n = bin(n)
84 except TypeError:
84 except TypeError:
85 bin_n = ''
85 bin_n = ''
86 self.tagscache[k.strip()] = bin_n
86 self.tagscache[k.strip()] = bin_n
87
87
88 try:
88 try:
89 # read each head of the tags file, ending with the tip
89 # read each head of the tags file, ending with the tip
90 # and add each tag found to the map, with "newer" ones
90 # and add each tag found to the map, with "newer" ones
91 # taking precedence
91 # taking precedence
92 fl = self.file(".hgtags")
92 fl = self.file(".hgtags")
93 h = fl.heads()
93 h = fl.heads()
94 h.reverse()
94 h.reverse()
95 for r in h:
95 for r in h:
96 for l in fl.read(r).splitlines():
96 for l in fl.read(r).splitlines():
97 if l:
97 if l:
98 n, k = l.split(" ", 1)
98 n, k = l.split(" ", 1)
99 addtag(self, k, n)
99 addtag(self, k, n)
100 except KeyError:
100 except KeyError:
101 pass
101 pass
102
102
103 try:
103 try:
104 f = self.opener("localtags")
104 f = self.opener("localtags")
105 for l in f:
105 for l in f:
106 n, k = l.split(" ", 1)
106 n, k = l.split(" ", 1)
107 addtag(self, k, n)
107 addtag(self, k, n)
108 except IOError:
108 except IOError:
109 pass
109 pass
110
110
111 self.tagscache['tip'] = self.changelog.tip()
111 self.tagscache['tip'] = self.changelog.tip()
112
112
113 return self.tagscache
113 return self.tagscache
114
114
115 def tagslist(self):
115 def tagslist(self):
116 '''return a list of tags ordered by revision'''
116 '''return a list of tags ordered by revision'''
117 l = []
117 l = []
118 for t, n in self.tags().items():
118 for t, n in self.tags().items():
119 try:
119 try:
120 r = self.changelog.rev(n)
120 r = self.changelog.rev(n)
121 except:
121 except:
122 r = -2 # sort to the beginning of the list if unknown
122 r = -2 # sort to the beginning of the list if unknown
123 l.append((r,t,n))
123 l.append((r,t,n))
124 l.sort()
124 l.sort()
125 return [(t,n) for r,t,n in l]
125 return [(t,n) for r,t,n in l]
126
126
127 def nodetags(self, node):
127 def nodetags(self, node):
128 '''return the tags associated with a node'''
128 '''return the tags associated with a node'''
129 if not self.nodetagscache:
129 if not self.nodetagscache:
130 self.nodetagscache = {}
130 self.nodetagscache = {}
131 for t,n in self.tags().items():
131 for t,n in self.tags().items():
132 self.nodetagscache.setdefault(n,[]).append(t)
132 self.nodetagscache.setdefault(n,[]).append(t)
133 return self.nodetagscache.get(node, [])
133 return self.nodetagscache.get(node, [])
134
134
135 def lookup(self, key):
135 def lookup(self, key):
136 try:
136 try:
137 return self.tags()[key]
137 return self.tags()[key]
138 except KeyError:
138 except KeyError:
139 try:
139 try:
140 return self.changelog.lookup(key)
140 return self.changelog.lookup(key)
141 except:
141 except:
142 raise repo.RepoError("unknown revision '%s'" % key)
142 raise repo.RepoError(_("unknown revision '%s'") % key)
143
143
144 def dev(self):
144 def dev(self):
145 return os.stat(self.path).st_dev
145 return os.stat(self.path).st_dev
146
146
147 def local(self):
147 def local(self):
148 return True
148 return True
149
149
150 def join(self, f):
150 def join(self, f):
151 return os.path.join(self.path, f)
151 return os.path.join(self.path, f)
152
152
153 def wjoin(self, f):
153 def wjoin(self, f):
154 return os.path.join(self.root, f)
154 return os.path.join(self.root, f)
155
155
156 def file(self, f):
156 def file(self, f):
157 if f[0] == '/': f = f[1:]
157 if f[0] == '/': f = f[1:]
158 return filelog.filelog(self.opener, f)
158 return filelog.filelog(self.opener, f)
159
159
160 def getcwd(self):
160 def getcwd(self):
161 return self.dirstate.getcwd()
161 return self.dirstate.getcwd()
162
162
163 def wfile(self, f, mode='r'):
163 def wfile(self, f, mode='r'):
164 return self.wopener(f, mode)
164 return self.wopener(f, mode)
165
165
166 def wread(self, filename):
166 def wread(self, filename):
167 if self.encodepats == None:
167 if self.encodepats == None:
168 l = []
168 l = []
169 for pat, cmd in self.ui.configitems("encode"):
169 for pat, cmd in self.ui.configitems("encode"):
170 mf = util.matcher("", "/", [pat], [], [])[1]
170 mf = util.matcher("", "/", [pat], [], [])[1]
171 l.append((mf, cmd))
171 l.append((mf, cmd))
172 self.encodepats = l
172 self.encodepats = l
173
173
174 data = self.wopener(filename, 'r').read()
174 data = self.wopener(filename, 'r').read()
175
175
176 for mf, cmd in self.encodepats:
176 for mf, cmd in self.encodepats:
177 if mf(filename):
177 if mf(filename):
178 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
178 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
179 data = util.filter(data, cmd)
179 data = util.filter(data, cmd)
180 break
180 break
181
181
182 return data
182 return data
183
183
184 def wwrite(self, filename, data, fd=None):
184 def wwrite(self, filename, data, fd=None):
185 if self.decodepats == None:
185 if self.decodepats == None:
186 l = []
186 l = []
187 for pat, cmd in self.ui.configitems("decode"):
187 for pat, cmd in self.ui.configitems("decode"):
188 mf = util.matcher("", "/", [pat], [], [])[1]
188 mf = util.matcher("", "/", [pat], [], [])[1]
189 l.append((mf, cmd))
189 l.append((mf, cmd))
190 self.decodepats = l
190 self.decodepats = l
191
191
192 for mf, cmd in self.decodepats:
192 for mf, cmd in self.decodepats:
193 if mf(filename):
193 if mf(filename):
194 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
194 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
195 data = util.filter(data, cmd)
195 data = util.filter(data, cmd)
196 break
196 break
197
197
198 if fd:
198 if fd:
199 return fd.write(data)
199 return fd.write(data)
200 return self.wopener(filename, 'w').write(data)
200 return self.wopener(filename, 'w').write(data)
201
201
202 def transaction(self):
202 def transaction(self):
203 # save dirstate for undo
203 # save dirstate for undo
204 try:
204 try:
205 ds = self.opener("dirstate").read()
205 ds = self.opener("dirstate").read()
206 except IOError:
206 except IOError:
207 ds = ""
207 ds = ""
208 self.opener("journal.dirstate", "w").write(ds)
208 self.opener("journal.dirstate", "w").write(ds)
209
209
210 def after():
210 def after():
211 util.rename(self.join("journal"), self.join("undo"))
211 util.rename(self.join("journal"), self.join("undo"))
212 util.rename(self.join("journal.dirstate"),
212 util.rename(self.join("journal.dirstate"),
213 self.join("undo.dirstate"))
213 self.join("undo.dirstate"))
214
214
215 return transaction.transaction(self.ui.warn, self.opener,
215 return transaction.transaction(self.ui.warn, self.opener,
216 self.join("journal"), after)
216 self.join("journal"), after)
217
217
218 def recover(self):
218 def recover(self):
219 lock = self.lock()
219 lock = self.lock()
220 if os.path.exists(self.join("journal")):
220 if os.path.exists(self.join("journal")):
221 self.ui.status("rolling back interrupted transaction\n")
221 self.ui.status(_("rolling back interrupted transaction\n"))
222 return transaction.rollback(self.opener, self.join("journal"))
222 return transaction.rollback(self.opener, self.join("journal"))
223 else:
223 else:
224 self.ui.warn("no interrupted transaction available\n")
224 self.ui.warn(_("no interrupted transaction available\n"))
225
225
226 def undo(self):
226 def undo(self):
227 lock = self.lock()
227 lock = self.lock()
228 if os.path.exists(self.join("undo")):
228 if os.path.exists(self.join("undo")):
229 self.ui.status("rolling back last transaction\n")
229 self.ui.status(_("rolling back last transaction\n"))
230 transaction.rollback(self.opener, self.join("undo"))
230 transaction.rollback(self.opener, self.join("undo"))
231 self.dirstate = None
231 self.dirstate = None
232 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
232 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
233 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
233 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
234 else:
234 else:
235 self.ui.warn("no undo information available\n")
235 self.ui.warn(_("no undo information available\n"))
236
236
237 def lock(self, wait=1):
237 def lock(self, wait=1):
238 try:
238 try:
239 return lock.lock(self.join("lock"), 0)
239 return lock.lock(self.join("lock"), 0)
240 except lock.LockHeld, inst:
240 except lock.LockHeld, inst:
241 if wait:
241 if wait:
242 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
242 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
243 return lock.lock(self.join("lock"), wait)
243 return lock.lock(self.join("lock"), wait)
244 raise inst
244 raise inst
245
245
246 def rawcommit(self, files, text, user, date, p1=None, p2=None):
246 def rawcommit(self, files, text, user, date, p1=None, p2=None):
247 orig_parent = self.dirstate.parents()[0] or nullid
247 orig_parent = self.dirstate.parents()[0] or nullid
248 p1 = p1 or self.dirstate.parents()[0] or nullid
248 p1 = p1 or self.dirstate.parents()[0] or nullid
249 p2 = p2 or self.dirstate.parents()[1] or nullid
249 p2 = p2 or self.dirstate.parents()[1] or nullid
250 c1 = self.changelog.read(p1)
250 c1 = self.changelog.read(p1)
251 c2 = self.changelog.read(p2)
251 c2 = self.changelog.read(p2)
252 m1 = self.manifest.read(c1[0])
252 m1 = self.manifest.read(c1[0])
253 mf1 = self.manifest.readflags(c1[0])
253 mf1 = self.manifest.readflags(c1[0])
254 m2 = self.manifest.read(c2[0])
254 m2 = self.manifest.read(c2[0])
255 changed = []
255 changed = []
256
256
257 if orig_parent == p1:
257 if orig_parent == p1:
258 update_dirstate = 1
258 update_dirstate = 1
259 else:
259 else:
260 update_dirstate = 0
260 update_dirstate = 0
261
261
262 tr = self.transaction()
262 tr = self.transaction()
263 mm = m1.copy()
263 mm = m1.copy()
264 mfm = mf1.copy()
264 mfm = mf1.copy()
265 linkrev = self.changelog.count()
265 linkrev = self.changelog.count()
266 for f in files:
266 for f in files:
267 try:
267 try:
268 t = self.wread(f)
268 t = self.wread(f)
269 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
269 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
270 r = self.file(f)
270 r = self.file(f)
271 mfm[f] = tm
271 mfm[f] = tm
272
272
273 fp1 = m1.get(f, nullid)
273 fp1 = m1.get(f, nullid)
274 fp2 = m2.get(f, nullid)
274 fp2 = m2.get(f, nullid)
275
275
276 # is the same revision on two branches of a merge?
276 # is the same revision on two branches of a merge?
277 if fp2 == fp1:
277 if fp2 == fp1:
278 fp2 = nullid
278 fp2 = nullid
279
279
280 if fp2 != nullid:
280 if fp2 != nullid:
281 # is one parent an ancestor of the other?
281 # is one parent an ancestor of the other?
282 fpa = r.ancestor(fp1, fp2)
282 fpa = r.ancestor(fp1, fp2)
283 if fpa == fp1:
283 if fpa == fp1:
284 fp1, fp2 = fp2, nullid
284 fp1, fp2 = fp2, nullid
285 elif fpa == fp2:
285 elif fpa == fp2:
286 fp2 = nullid
286 fp2 = nullid
287
287
288 # is the file unmodified from the parent?
288 # is the file unmodified from the parent?
289 if t == r.read(fp1):
289 if t == r.read(fp1):
290 # record the proper existing parent in manifest
290 # record the proper existing parent in manifest
291 # no need to add a revision
291 # no need to add a revision
292 mm[f] = fp1
292 mm[f] = fp1
293 continue
293 continue
294
294
295 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
295 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
296 changed.append(f)
296 changed.append(f)
297 if update_dirstate:
297 if update_dirstate:
298 self.dirstate.update([f], "n")
298 self.dirstate.update([f], "n")
299 except IOError:
299 except IOError:
300 try:
300 try:
301 del mm[f]
301 del mm[f]
302 del mfm[f]
302 del mfm[f]
303 if update_dirstate:
303 if update_dirstate:
304 self.dirstate.forget([f])
304 self.dirstate.forget([f])
305 except:
305 except:
306 # deleted from p2?
306 # deleted from p2?
307 pass
307 pass
308
308
309 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
309 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
310 user = user or self.ui.username()
310 user = user or self.ui.username()
311 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
311 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
312 tr.close()
312 tr.close()
313 if update_dirstate:
313 if update_dirstate:
314 self.dirstate.setparents(n, nullid)
314 self.dirstate.setparents(n, nullid)
315
315
316 def commit(self, files = None, text = "", user = None, date = None,
316 def commit(self, files = None, text = "", user = None, date = None,
317 match = util.always, force=False):
317 match = util.always, force=False):
318 commit = []
318 commit = []
319 remove = []
319 remove = []
320 changed = []
320 changed = []
321
321
322 if files:
322 if files:
323 for f in files:
323 for f in files:
324 s = self.dirstate.state(f)
324 s = self.dirstate.state(f)
325 if s in 'nmai':
325 if s in 'nmai':
326 commit.append(f)
326 commit.append(f)
327 elif s == 'r':
327 elif s == 'r':
328 remove.append(f)
328 remove.append(f)
329 else:
329 else:
330 self.ui.warn("%s not tracked!\n" % f)
330 self.ui.warn(_("%s not tracked!\n") % f)
331 else:
331 else:
332 (c, a, d, u) = self.changes(match=match)
332 (c, a, d, u) = self.changes(match=match)
333 commit = c + a
333 commit = c + a
334 remove = d
334 remove = d
335
335
336 p1, p2 = self.dirstate.parents()
336 p1, p2 = self.dirstate.parents()
337 c1 = self.changelog.read(p1)
337 c1 = self.changelog.read(p1)
338 c2 = self.changelog.read(p2)
338 c2 = self.changelog.read(p2)
339 m1 = self.manifest.read(c1[0])
339 m1 = self.manifest.read(c1[0])
340 mf1 = self.manifest.readflags(c1[0])
340 mf1 = self.manifest.readflags(c1[0])
341 m2 = self.manifest.read(c2[0])
341 m2 = self.manifest.read(c2[0])
342
342
343 if not commit and not remove and not force and p2 == nullid:
343 if not commit and not remove and not force and p2 == nullid:
344 self.ui.status("nothing changed\n")
344 self.ui.status(_("nothing changed\n"))
345 return None
345 return None
346
346
347 if not self.hook("precommit"):
347 if not self.hook("precommit"):
348 return None
348 return None
349
349
350 lock = self.lock()
350 lock = self.lock()
351 tr = self.transaction()
351 tr = self.transaction()
352
352
353 # check in files
353 # check in files
354 new = {}
354 new = {}
355 linkrev = self.changelog.count()
355 linkrev = self.changelog.count()
356 commit.sort()
356 commit.sort()
357 for f in commit:
357 for f in commit:
358 self.ui.note(f + "\n")
358 self.ui.note(f + "\n")
359 try:
359 try:
360 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
360 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
361 t = self.wread(f)
361 t = self.wread(f)
362 except IOError:
362 except IOError:
363 self.ui.warn("trouble committing %s!\n" % f)
363 self.ui.warn(_("trouble committing %s!\n") % f)
364 raise
364 raise
365
365
366 r = self.file(f)
366 r = self.file(f)
367
367
368 meta = {}
368 meta = {}
369 cp = self.dirstate.copied(f)
369 cp = self.dirstate.copied(f)
370 if cp:
370 if cp:
371 meta["copy"] = cp
371 meta["copy"] = cp
372 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
372 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
373 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
373 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
374 fp1, fp2 = nullid, nullid
374 fp1, fp2 = nullid, nullid
375 else:
375 else:
376 fp1 = m1.get(f, nullid)
376 fp1 = m1.get(f, nullid)
377 fp2 = m2.get(f, nullid)
377 fp2 = m2.get(f, nullid)
378
378
379 # is the same revision on two branches of a merge?
379 # is the same revision on two branches of a merge?
380 if fp2 == fp1:
380 if fp2 == fp1:
381 fp2 = nullid
381 fp2 = nullid
382
382
383 if fp2 != nullid:
383 if fp2 != nullid:
384 # is one parent an ancestor of the other?
384 # is one parent an ancestor of the other?
385 fpa = r.ancestor(fp1, fp2)
385 fpa = r.ancestor(fp1, fp2)
386 if fpa == fp1:
386 if fpa == fp1:
387 fp1, fp2 = fp2, nullid
387 fp1, fp2 = fp2, nullid
388 elif fpa == fp2:
388 elif fpa == fp2:
389 fp2 = nullid
389 fp2 = nullid
390
390
391 # is the file unmodified from the parent?
391 # is the file unmodified from the parent?
392 if not meta and t == r.read(fp1):
392 if not meta and t == r.read(fp1):
393 # record the proper existing parent in manifest
393 # record the proper existing parent in manifest
394 # no need to add a revision
394 # no need to add a revision
395 new[f] = fp1
395 new[f] = fp1
396 continue
396 continue
397
397
398 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
398 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
399 # remember what we've added so that we can later calculate
399 # remember what we've added so that we can later calculate
400 # the files to pull from a set of changesets
400 # the files to pull from a set of changesets
401 changed.append(f)
401 changed.append(f)
402
402
403 # update manifest
403 # update manifest
404 m1.update(new)
404 m1.update(new)
405 for f in remove:
405 for f in remove:
406 if f in m1:
406 if f in m1:
407 del m1[f]
407 del m1[f]
408 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
408 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
409 (new, remove))
409 (new, remove))
410
410
411 # add changeset
411 # add changeset
412 new = new.keys()
412 new = new.keys()
413 new.sort()
413 new.sort()
414
414
415 if not text:
415 if not text:
416 edittext = ""
416 edittext = ""
417 if p2 != nullid:
417 if p2 != nullid:
418 edittext += "HG: branch merge\n"
418 edittext += "HG: branch merge\n"
419 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
419 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
420 edittext += "".join(["HG: changed %s\n" % f for f in changed])
420 edittext += "".join(["HG: changed %s\n" % f for f in changed])
421 edittext += "".join(["HG: removed %s\n" % f for f in remove])
421 edittext += "".join(["HG: removed %s\n" % f for f in remove])
422 if not changed and not remove:
422 if not changed and not remove:
423 edittext += "HG: no files changed\n"
423 edittext += "HG: no files changed\n"
424 edittext = self.ui.edit(edittext)
424 edittext = self.ui.edit(edittext)
425 if not edittext.rstrip():
425 if not edittext.rstrip():
426 return None
426 return None
427 text = edittext
427 text = edittext
428
428
429 user = user or self.ui.username()
429 user = user or self.ui.username()
430 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
430 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
431 tr.close()
431 tr.close()
432
432
433 self.dirstate.setparents(n)
433 self.dirstate.setparents(n)
434 self.dirstate.update(new, "n")
434 self.dirstate.update(new, "n")
435 self.dirstate.forget(remove)
435 self.dirstate.forget(remove)
436
436
437 if not self.hook("commit", node=hex(n)):
437 if not self.hook("commit", node=hex(n)):
438 return None
438 return None
439 return n
439 return n
440
440
441 def walk(self, node=None, files=[], match=util.always):
441 def walk(self, node=None, files=[], match=util.always):
442 if node:
442 if node:
443 for fn in self.manifest.read(self.changelog.read(node)[0]):
443 for fn in self.manifest.read(self.changelog.read(node)[0]):
444 if match(fn): yield 'm', fn
444 if match(fn): yield 'm', fn
445 else:
445 else:
446 for src, fn in self.dirstate.walk(files, match):
446 for src, fn in self.dirstate.walk(files, match):
447 yield src, fn
447 yield src, fn
448
448
449 def changes(self, node1 = None, node2 = None, files = [],
449 def changes(self, node1 = None, node2 = None, files = [],
450 match = util.always):
450 match = util.always):
451 mf2, u = None, []
451 mf2, u = None, []
452
452
453 def fcmp(fn, mf):
453 def fcmp(fn, mf):
454 t1 = self.wread(fn)
454 t1 = self.wread(fn)
455 t2 = self.file(fn).read(mf.get(fn, nullid))
455 t2 = self.file(fn).read(mf.get(fn, nullid))
456 return cmp(t1, t2)
456 return cmp(t1, t2)
457
457
458 def mfmatches(node):
458 def mfmatches(node):
459 mf = dict(self.manifest.read(node))
459 mf = dict(self.manifest.read(node))
460 for fn in mf.keys():
460 for fn in mf.keys():
461 if not match(fn):
461 if not match(fn):
462 del mf[fn]
462 del mf[fn]
463 return mf
463 return mf
464
464
465 # are we comparing the working directory?
465 # are we comparing the working directory?
466 if not node2:
466 if not node2:
467 l, c, a, d, u = self.dirstate.changes(files, match)
467 l, c, a, d, u = self.dirstate.changes(files, match)
468
468
469 # are we comparing working dir against its parent?
469 # are we comparing working dir against its parent?
470 if not node1:
470 if not node1:
471 if l:
471 if l:
472 # do a full compare of any files that might have changed
472 # do a full compare of any files that might have changed
473 change = self.changelog.read(self.dirstate.parents()[0])
473 change = self.changelog.read(self.dirstate.parents()[0])
474 mf2 = mfmatches(change[0])
474 mf2 = mfmatches(change[0])
475 for f in l:
475 for f in l:
476 if fcmp(f, mf2):
476 if fcmp(f, mf2):
477 c.append(f)
477 c.append(f)
478
478
479 for l in c, a, d, u:
479 for l in c, a, d, u:
480 l.sort()
480 l.sort()
481
481
482 return (c, a, d, u)
482 return (c, a, d, u)
483
483
484 # are we comparing working dir against non-tip?
484 # are we comparing working dir against non-tip?
485 # generate a pseudo-manifest for the working dir
485 # generate a pseudo-manifest for the working dir
486 if not node2:
486 if not node2:
487 if not mf2:
487 if not mf2:
488 change = self.changelog.read(self.dirstate.parents()[0])
488 change = self.changelog.read(self.dirstate.parents()[0])
489 mf2 = mfmatches(change[0])
489 mf2 = mfmatches(change[0])
490 for f in a + c + l:
490 for f in a + c + l:
491 mf2[f] = ""
491 mf2[f] = ""
492 for f in d:
492 for f in d:
493 if f in mf2: del mf2[f]
493 if f in mf2: del mf2[f]
494 else:
494 else:
495 change = self.changelog.read(node2)
495 change = self.changelog.read(node2)
496 mf2 = mfmatches(change[0])
496 mf2 = mfmatches(change[0])
497
497
498 # flush lists from dirstate before comparing manifests
498 # flush lists from dirstate before comparing manifests
499 c, a = [], []
499 c, a = [], []
500
500
501 change = self.changelog.read(node1)
501 change = self.changelog.read(node1)
502 mf1 = mfmatches(change[0])
502 mf1 = mfmatches(change[0])
503
503
504 for fn in mf2:
504 for fn in mf2:
505 if mf1.has_key(fn):
505 if mf1.has_key(fn):
506 if mf1[fn] != mf2[fn]:
506 if mf1[fn] != mf2[fn]:
507 if mf2[fn] != "" or fcmp(fn, mf1):
507 if mf2[fn] != "" or fcmp(fn, mf1):
508 c.append(fn)
508 c.append(fn)
509 del mf1[fn]
509 del mf1[fn]
510 else:
510 else:
511 a.append(fn)
511 a.append(fn)
512
512
513 d = mf1.keys()
513 d = mf1.keys()
514
514
515 for l in c, a, d, u:
515 for l in c, a, d, u:
516 l.sort()
516 l.sort()
517
517
518 return (c, a, d, u)
518 return (c, a, d, u)
519
519
520 def add(self, list):
520 def add(self, list):
521 for f in list:
521 for f in list:
522 p = self.wjoin(f)
522 p = self.wjoin(f)
523 if not os.path.exists(p):
523 if not os.path.exists(p):
524 self.ui.warn("%s does not exist!\n" % f)
524 self.ui.warn(_("%s does not exist!\n") % f)
525 elif not os.path.isfile(p):
525 elif not os.path.isfile(p):
526 self.ui.warn("%s not added: only files supported currently\n" % f)
526 self.ui.warn(_("%s not added: only files supported currently\n") % f)
527 elif self.dirstate.state(f) in 'an':
527 elif self.dirstate.state(f) in 'an':
528 self.ui.warn("%s already tracked!\n" % f)
528 self.ui.warn(_("%s already tracked!\n") % f)
529 else:
529 else:
530 self.dirstate.update([f], "a")
530 self.dirstate.update([f], "a")
531
531
532 def forget(self, list):
532 def forget(self, list):
533 for f in list:
533 for f in list:
534 if self.dirstate.state(f) not in 'ai':
534 if self.dirstate.state(f) not in 'ai':
535 self.ui.warn("%s not added!\n" % f)
535 self.ui.warn(_("%s not added!\n") % f)
536 else:
536 else:
537 self.dirstate.forget([f])
537 self.dirstate.forget([f])
538
538
539 def remove(self, list):
539 def remove(self, list):
540 for f in list:
540 for f in list:
541 p = self.wjoin(f)
541 p = self.wjoin(f)
542 if os.path.exists(p):
542 if os.path.exists(p):
543 self.ui.warn("%s still exists!\n" % f)
543 self.ui.warn(_("%s still exists!\n") % f)
544 elif self.dirstate.state(f) == 'a':
544 elif self.dirstate.state(f) == 'a':
545 self.ui.warn("%s never committed!\n" % f)
545 self.ui.warn(_("%s never committed!\n") % f)
546 self.dirstate.forget([f])
546 self.dirstate.forget([f])
547 elif f not in self.dirstate:
547 elif f not in self.dirstate:
548 self.ui.warn("%s not tracked!\n" % f)
548 self.ui.warn(_("%s not tracked!\n") % f)
549 else:
549 else:
550 self.dirstate.update([f], "r")
550 self.dirstate.update([f], "r")
551
551
552 def copy(self, source, dest):
552 def copy(self, source, dest):
553 p = self.wjoin(dest)
553 p = self.wjoin(dest)
554 if not os.path.exists(p):
554 if not os.path.exists(p):
555 self.ui.warn("%s does not exist!\n" % dest)
555 self.ui.warn(_("%s does not exist!\n") % dest)
556 elif not os.path.isfile(p):
556 elif not os.path.isfile(p):
557 self.ui.warn("copy failed: %s is not a file\n" % dest)
557 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
558 else:
558 else:
559 if self.dirstate.state(dest) == '?':
559 if self.dirstate.state(dest) == '?':
560 self.dirstate.update([dest], "a")
560 self.dirstate.update([dest], "a")
561 self.dirstate.copy(source, dest)
561 self.dirstate.copy(source, dest)
562
562
563 def heads(self):
563 def heads(self):
564 return self.changelog.heads()
564 return self.changelog.heads()
565
565
566 # branchlookup returns a dict giving a list of branches for
566 # branchlookup returns a dict giving a list of branches for
567 # each head. A branch is defined as the tag of a node or
567 # each head. A branch is defined as the tag of a node or
568 # the branch of the node's parents. If a node has multiple
568 # the branch of the node's parents. If a node has multiple
569 # branch tags, tags are eliminated if they are visible from other
569 # branch tags, tags are eliminated if they are visible from other
570 # branch tags.
570 # branch tags.
571 #
571 #
572 # So, for this graph: a->b->c->d->e
572 # So, for this graph: a->b->c->d->e
573 # \ /
573 # \ /
574 # aa -----/
574 # aa -----/
575 # a has tag 2.6.12
575 # a has tag 2.6.12
576 # d has tag 2.6.13
576 # d has tag 2.6.13
577 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
577 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
578 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
578 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
579 # from the list.
579 # from the list.
580 #
580 #
581 # It is possible that more than one head will have the same branch tag.
581 # It is possible that more than one head will have the same branch tag.
582 # callers need to check the result for multiple heads under the same
582 # callers need to check the result for multiple heads under the same
583 # branch tag if that is a problem for them (ie checkout of a specific
583 # branch tag if that is a problem for them (ie checkout of a specific
584 # branch).
584 # branch).
585 #
585 #
586 # passing in a specific branch will limit the depth of the search
586 # passing in a specific branch will limit the depth of the search
587 # through the parents. It won't limit the branches returned in the
587 # through the parents. It won't limit the branches returned in the
588 # result though.
588 # result though.
589 def branchlookup(self, heads=None, branch=None):
589 def branchlookup(self, heads=None, branch=None):
590 if not heads:
590 if not heads:
591 heads = self.heads()
591 heads = self.heads()
592 headt = [ h for h in heads ]
592 headt = [ h for h in heads ]
593 chlog = self.changelog
593 chlog = self.changelog
594 branches = {}
594 branches = {}
595 merges = []
595 merges = []
596 seenmerge = {}
596 seenmerge = {}
597
597
598 # traverse the tree once for each head, recording in the branches
598 # traverse the tree once for each head, recording in the branches
599 # dict which tags are visible from this head. The branches
599 # dict which tags are visible from this head. The branches
600 # dict also records which tags are visible from each tag
600 # dict also records which tags are visible from each tag
601 # while we traverse.
601 # while we traverse.
602 while headt or merges:
602 while headt or merges:
603 if merges:
603 if merges:
604 n, found = merges.pop()
604 n, found = merges.pop()
605 visit = [n]
605 visit = [n]
606 else:
606 else:
607 h = headt.pop()
607 h = headt.pop()
608 visit = [h]
608 visit = [h]
609 found = [h]
609 found = [h]
610 seen = {}
610 seen = {}
611 while visit:
611 while visit:
612 n = visit.pop()
612 n = visit.pop()
613 if n in seen:
613 if n in seen:
614 continue
614 continue
615 pp = chlog.parents(n)
615 pp = chlog.parents(n)
616 tags = self.nodetags(n)
616 tags = self.nodetags(n)
617 if tags:
617 if tags:
618 for x in tags:
618 for x in tags:
619 if x == 'tip':
619 if x == 'tip':
620 continue
620 continue
621 for f in found:
621 for f in found:
622 branches.setdefault(f, {})[n] = 1
622 branches.setdefault(f, {})[n] = 1
623 branches.setdefault(n, {})[n] = 1
623 branches.setdefault(n, {})[n] = 1
624 break
624 break
625 if n not in found:
625 if n not in found:
626 found.append(n)
626 found.append(n)
627 if branch in tags:
627 if branch in tags:
628 continue
628 continue
629 seen[n] = 1
629 seen[n] = 1
630 if pp[1] != nullid and n not in seenmerge:
630 if pp[1] != nullid and n not in seenmerge:
631 merges.append((pp[1], [x for x in found]))
631 merges.append((pp[1], [x for x in found]))
632 seenmerge[n] = 1
632 seenmerge[n] = 1
633 if pp[0] != nullid:
633 if pp[0] != nullid:
634 visit.append(pp[0])
634 visit.append(pp[0])
635 # traverse the branches dict, eliminating branch tags from each
635 # traverse the branches dict, eliminating branch tags from each
636 # head that are visible from another branch tag for that head.
636 # head that are visible from another branch tag for that head.
637 out = {}
637 out = {}
638 viscache = {}
638 viscache = {}
639 for h in heads:
639 for h in heads:
640 def visible(node):
640 def visible(node):
641 if node in viscache:
641 if node in viscache:
642 return viscache[node]
642 return viscache[node]
643 ret = {}
643 ret = {}
644 visit = [node]
644 visit = [node]
645 while visit:
645 while visit:
646 x = visit.pop()
646 x = visit.pop()
647 if x in viscache:
647 if x in viscache:
648 ret.update(viscache[x])
648 ret.update(viscache[x])
649 elif x not in ret:
649 elif x not in ret:
650 ret[x] = 1
650 ret[x] = 1
651 if x in branches:
651 if x in branches:
652 visit[len(visit):] = branches[x].keys()
652 visit[len(visit):] = branches[x].keys()
653 viscache[node] = ret
653 viscache[node] = ret
654 return ret
654 return ret
655 if h not in branches:
655 if h not in branches:
656 continue
656 continue
657 # O(n^2), but somewhat limited. This only searches the
657 # O(n^2), but somewhat limited. This only searches the
658 # tags visible from a specific head, not all the tags in the
658 # tags visible from a specific head, not all the tags in the
659 # whole repo.
659 # whole repo.
660 for b in branches[h]:
660 for b in branches[h]:
661 vis = False
661 vis = False
662 for bb in branches[h].keys():
662 for bb in branches[h].keys():
663 if b != bb:
663 if b != bb:
664 if b in visible(bb):
664 if b in visible(bb):
665 vis = True
665 vis = True
666 break
666 break
667 if not vis:
667 if not vis:
668 l = out.setdefault(h, [])
668 l = out.setdefault(h, [])
669 l[len(l):] = self.nodetags(b)
669 l[len(l):] = self.nodetags(b)
670 return out
670 return out
671
671
672 def branches(self, nodes):
672 def branches(self, nodes):
673 if not nodes: nodes = [self.changelog.tip()]
673 if not nodes: nodes = [self.changelog.tip()]
674 b = []
674 b = []
675 for n in nodes:
675 for n in nodes:
676 t = n
676 t = n
677 while n:
677 while n:
678 p = self.changelog.parents(n)
678 p = self.changelog.parents(n)
679 if p[1] != nullid or p[0] == nullid:
679 if p[1] != nullid or p[0] == nullid:
680 b.append((t, n, p[0], p[1]))
680 b.append((t, n, p[0], p[1]))
681 break
681 break
682 n = p[0]
682 n = p[0]
683 return b
683 return b
684
684
685 def between(self, pairs):
685 def between(self, pairs):
686 r = []
686 r = []
687
687
688 for top, bottom in pairs:
688 for top, bottom in pairs:
689 n, l, i = top, [], 0
689 n, l, i = top, [], 0
690 f = 1
690 f = 1
691
691
692 while n != bottom:
692 while n != bottom:
693 p = self.changelog.parents(n)[0]
693 p = self.changelog.parents(n)[0]
694 if i == f:
694 if i == f:
695 l.append(n)
695 l.append(n)
696 f = f * 2
696 f = f * 2
697 n = p
697 n = p
698 i += 1
698 i += 1
699
699
700 r.append(l)
700 r.append(l)
701
701
702 return r
702 return r
703
703
704 def newer(self, nodes):
704 def newer(self, nodes):
705 m = {}
705 m = {}
706 nl = []
706 nl = []
707 pm = {}
707 pm = {}
708 cl = self.changelog
708 cl = self.changelog
709 t = l = cl.count()
709 t = l = cl.count()
710
710
711 # find the lowest numbered node
711 # find the lowest numbered node
712 for n in nodes:
712 for n in nodes:
713 l = min(l, cl.rev(n))
713 l = min(l, cl.rev(n))
714 m[n] = 1
714 m[n] = 1
715
715
716 for i in xrange(l, t):
716 for i in xrange(l, t):
717 n = cl.node(i)
717 n = cl.node(i)
718 if n in m: # explicitly listed
718 if n in m: # explicitly listed
719 pm[n] = 1
719 pm[n] = 1
720 nl.append(n)
720 nl.append(n)
721 continue
721 continue
722 for p in cl.parents(n):
722 for p in cl.parents(n):
723 if p in pm: # parent listed
723 if p in pm: # parent listed
724 pm[n] = 1
724 pm[n] = 1
725 nl.append(n)
725 nl.append(n)
726 break
726 break
727
727
728 return nl
728 return nl
729
729
730 def findincoming(self, remote, base=None, heads=None):
730 def findincoming(self, remote, base=None, heads=None):
731 m = self.changelog.nodemap
731 m = self.changelog.nodemap
732 search = []
732 search = []
733 fetch = {}
733 fetch = {}
734 seen = {}
734 seen = {}
735 seenbranch = {}
735 seenbranch = {}
736 if base == None:
736 if base == None:
737 base = {}
737 base = {}
738
738
739 # assume we're closer to the tip than the root
739 # assume we're closer to the tip than the root
740 # and start by examining the heads
740 # and start by examining the heads
741 self.ui.status("searching for changes\n")
741 self.ui.status(_("searching for changes\n"))
742
742
743 if not heads:
743 if not heads:
744 heads = remote.heads()
744 heads = remote.heads()
745
745
746 unknown = []
746 unknown = []
747 for h in heads:
747 for h in heads:
748 if h not in m:
748 if h not in m:
749 unknown.append(h)
749 unknown.append(h)
750 else:
750 else:
751 base[h] = 1
751 base[h] = 1
752
752
753 if not unknown:
753 if not unknown:
754 return None
754 return None
755
755
756 rep = {}
756 rep = {}
757 reqcnt = 0
757 reqcnt = 0
758
758
759 # search through remote branches
759 # search through remote branches
760 # a 'branch' here is a linear segment of history, with four parts:
760 # a 'branch' here is a linear segment of history, with four parts:
761 # head, root, first parent, second parent
761 # head, root, first parent, second parent
762 # (a branch always has two parents (or none) by definition)
762 # (a branch always has two parents (or none) by definition)
763 unknown = remote.branches(unknown)
763 unknown = remote.branches(unknown)
764 while unknown:
764 while unknown:
765 r = []
765 r = []
766 while unknown:
766 while unknown:
767 n = unknown.pop(0)
767 n = unknown.pop(0)
768 if n[0] in seen:
768 if n[0] in seen:
769 continue
769 continue
770
770
771 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
771 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
772 if n[0] == nullid:
772 if n[0] == nullid:
773 break
773 break
774 if n in seenbranch:
774 if n in seenbranch:
775 self.ui.debug("branch already found\n")
775 self.ui.debug(_("branch already found\n"))
776 continue
776 continue
777 if n[1] and n[1] in m: # do we know the base?
777 if n[1] and n[1] in m: # do we know the base?
778 self.ui.debug("found incomplete branch %s:%s\n"
778 self.ui.debug(_("found incomplete branch %s:%s\n")
779 % (short(n[0]), short(n[1])))
779 % (short(n[0]), short(n[1])))
780 search.append(n) # schedule branch range for scanning
780 search.append(n) # schedule branch range for scanning
781 seenbranch[n] = 1
781 seenbranch[n] = 1
782 else:
782 else:
783 if n[1] not in seen and n[1] not in fetch:
783 if n[1] not in seen and n[1] not in fetch:
784 if n[2] in m and n[3] in m:
784 if n[2] in m and n[3] in m:
785 self.ui.debug("found new changeset %s\n" %
785 self.ui.debug(_("found new changeset %s\n") %
786 short(n[1]))
786 short(n[1]))
787 fetch[n[1]] = 1 # earliest unknown
787 fetch[n[1]] = 1 # earliest unknown
788 base[n[2]] = 1 # latest known
788 base[n[2]] = 1 # latest known
789 continue
789 continue
790
790
791 for a in n[2:4]:
791 for a in n[2:4]:
792 if a not in rep:
792 if a not in rep:
793 r.append(a)
793 r.append(a)
794 rep[a] = 1
794 rep[a] = 1
795
795
796 seen[n[0]] = 1
796 seen[n[0]] = 1
797
797
798 if r:
798 if r:
799 reqcnt += 1
799 reqcnt += 1
800 self.ui.debug("request %d: %s\n" %
800 self.ui.debug(_("request %d: %s\n") %
801 (reqcnt, " ".join(map(short, r))))
801 (reqcnt, " ".join(map(short, r))))
802 for p in range(0, len(r), 10):
802 for p in range(0, len(r), 10):
803 for b in remote.branches(r[p:p+10]):
803 for b in remote.branches(r[p:p+10]):
804 self.ui.debug("received %s:%s\n" %
804 self.ui.debug(_("received %s:%s\n") %
805 (short(b[0]), short(b[1])))
805 (short(b[0]), short(b[1])))
806 if b[0] in m:
806 if b[0] in m:
807 self.ui.debug("found base node %s\n" % short(b[0]))
807 self.ui.debug(_("found base node %s\n") % short(b[0]))
808 base[b[0]] = 1
808 base[b[0]] = 1
809 elif b[0] not in seen:
809 elif b[0] not in seen:
810 unknown.append(b)
810 unknown.append(b)
811
811
812 # do binary search on the branches we found
812 # do binary search on the branches we found
813 while search:
813 while search:
814 n = search.pop(0)
814 n = search.pop(0)
815 reqcnt += 1
815 reqcnt += 1
816 l = remote.between([(n[0], n[1])])[0]
816 l = remote.between([(n[0], n[1])])[0]
817 l.append(n[1])
817 l.append(n[1])
818 p = n[0]
818 p = n[0]
819 f = 1
819 f = 1
820 for i in l:
820 for i in l:
821 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
821 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
822 if i in m:
822 if i in m:
823 if f <= 2:
823 if f <= 2:
824 self.ui.debug("found new branch changeset %s\n" %
824 self.ui.debug(_("found new branch changeset %s\n") %
825 short(p))
825 short(p))
826 fetch[p] = 1
826 fetch[p] = 1
827 base[i] = 1
827 base[i] = 1
828 else:
828 else:
829 self.ui.debug("narrowed branch search to %s:%s\n"
829 self.ui.debug(_("narrowed branch search to %s:%s\n")
830 % (short(p), short(i)))
830 % (short(p), short(i)))
831 search.append((p, i))
831 search.append((p, i))
832 break
832 break
833 p, f = i, f * 2
833 p, f = i, f * 2
834
834
835 # sanity check our fetch list
835 # sanity check our fetch list
836 for f in fetch.keys():
836 for f in fetch.keys():
837 if f in m:
837 if f in m:
838 raise repo.RepoError("already have changeset " + short(f[:4]))
838 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
839
839
840 if base.keys() == [nullid]:
840 if base.keys() == [nullid]:
841 self.ui.warn("warning: pulling from an unrelated repository!\n")
841 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
842
842
843 self.ui.note("found new changesets starting at " +
843 self.ui.note(_("found new changesets starting at ") +
844 " ".join([short(f) for f in fetch]) + "\n")
844 " ".join([short(f) for f in fetch]) + "\n")
845
845
846 self.ui.debug("%d total queries\n" % reqcnt)
846 self.ui.debug(_("%d total queries\n") % reqcnt)
847
847
848 return fetch.keys()
848 return fetch.keys()
849
849
850 def findoutgoing(self, remote, base=None, heads=None):
850 def findoutgoing(self, remote, base=None, heads=None):
851 if base == None:
851 if base == None:
852 base = {}
852 base = {}
853 self.findincoming(remote, base, heads)
853 self.findincoming(remote, base, heads)
854
854
855 self.ui.debug("common changesets up to "
855 self.ui.debug(_("common changesets up to ")
856 + " ".join(map(short, base.keys())) + "\n")
856 + " ".join(map(short, base.keys())) + "\n")
857
857
858 remain = dict.fromkeys(self.changelog.nodemap)
858 remain = dict.fromkeys(self.changelog.nodemap)
859
859
860 # prune everything remote has from the tree
860 # prune everything remote has from the tree
861 del remain[nullid]
861 del remain[nullid]
862 remove = base.keys()
862 remove = base.keys()
863 while remove:
863 while remove:
864 n = remove.pop(0)
864 n = remove.pop(0)
865 if n in remain:
865 if n in remain:
866 del remain[n]
866 del remain[n]
867 for p in self.changelog.parents(n):
867 for p in self.changelog.parents(n):
868 remove.append(p)
868 remove.append(p)
869
869
870 # find every node whose parents have been pruned
870 # find every node whose parents have been pruned
871 subset = []
871 subset = []
872 for n in remain:
872 for n in remain:
873 p1, p2 = self.changelog.parents(n)
873 p1, p2 = self.changelog.parents(n)
874 if p1 not in remain and p2 not in remain:
874 if p1 not in remain and p2 not in remain:
875 subset.append(n)
875 subset.append(n)
876
876
877 # this is the set of all roots we have to push
877 # this is the set of all roots we have to push
878 return subset
878 return subset
879
879
880 def pull(self, remote):
880 def pull(self, remote):
881 lock = self.lock()
881 lock = self.lock()
882
882
883 # if we have an empty repo, fetch everything
883 # if we have an empty repo, fetch everything
884 if self.changelog.tip() == nullid:
884 if self.changelog.tip() == nullid:
885 self.ui.status("requesting all changes\n")
885 self.ui.status(_("requesting all changes\n"))
886 fetch = [nullid]
886 fetch = [nullid]
887 else:
887 else:
888 fetch = self.findincoming(remote)
888 fetch = self.findincoming(remote)
889
889
890 if not fetch:
890 if not fetch:
891 self.ui.status("no changes found\n")
891 self.ui.status(_("no changes found\n"))
892 return 1
892 return 1
893
893
894 cg = remote.changegroup(fetch)
894 cg = remote.changegroup(fetch)
895 return self.addchangegroup(cg)
895 return self.addchangegroup(cg)
896
896
897 def push(self, remote, force=False):
897 def push(self, remote, force=False):
898 lock = remote.lock()
898 lock = remote.lock()
899
899
900 base = {}
900 base = {}
901 heads = remote.heads()
901 heads = remote.heads()
902 inc = self.findincoming(remote, base, heads)
902 inc = self.findincoming(remote, base, heads)
903 if not force and inc:
903 if not force and inc:
904 self.ui.warn("abort: unsynced remote changes!\n")
904 self.ui.warn(_("abort: unsynced remote changes!\n"))
905 self.ui.status("(did you forget to sync? use push -f to force)\n")
905 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
906 return 1
906 return 1
907
907
908 update = self.findoutgoing(remote, base)
908 update = self.findoutgoing(remote, base)
909 if not update:
909 if not update:
910 self.ui.status("no changes found\n")
910 self.ui.status(_("no changes found\n"))
911 return 1
911 return 1
912 elif not force:
912 elif not force:
913 if len(heads) < len(self.changelog.heads()):
913 if len(heads) < len(self.changelog.heads()):
914 self.ui.warn("abort: push creates new remote branches!\n")
914 self.ui.warn(_("abort: push creates new remote branches!\n"))
915 self.ui.status("(did you forget to merge?" +
915 self.ui.status(_("(did you forget to merge?"
916 " use push -f to force)\n")
916 " use push -f to force)\n"))
917 return 1
917 return 1
918
918
919 cg = self.changegroup(update)
919 cg = self.changegroup(update)
920 return remote.addchangegroup(cg)
920 return remote.addchangegroup(cg)
921
921
922 def changegroup(self, basenodes):
922 def changegroup(self, basenodes):
923 genread = util.chunkbuffer
923 genread = util.chunkbuffer
924
924
925 def gengroup():
925 def gengroup():
926 nodes = self.newer(basenodes)
926 nodes = self.newer(basenodes)
927
927
928 # construct the link map
928 # construct the link map
929 linkmap = {}
929 linkmap = {}
930 for n in nodes:
930 for n in nodes:
931 linkmap[self.changelog.rev(n)] = n
931 linkmap[self.changelog.rev(n)] = n
932
932
933 # construct a list of all changed files
933 # construct a list of all changed files
934 changed = {}
934 changed = {}
935 for n in nodes:
935 for n in nodes:
936 c = self.changelog.read(n)
936 c = self.changelog.read(n)
937 for f in c[3]:
937 for f in c[3]:
938 changed[f] = 1
938 changed[f] = 1
939 changed = changed.keys()
939 changed = changed.keys()
940 changed.sort()
940 changed.sort()
941
941
942 # the changegroup is changesets + manifests + all file revs
942 # the changegroup is changesets + manifests + all file revs
943 revs = [ self.changelog.rev(n) for n in nodes ]
943 revs = [ self.changelog.rev(n) for n in nodes ]
944
944
945 for y in self.changelog.group(linkmap): yield y
945 for y in self.changelog.group(linkmap): yield y
946 for y in self.manifest.group(linkmap): yield y
946 for y in self.manifest.group(linkmap): yield y
947 for f in changed:
947 for f in changed:
948 yield struct.pack(">l", len(f) + 4) + f
948 yield struct.pack(">l", len(f) + 4) + f
949 g = self.file(f).group(linkmap)
949 g = self.file(f).group(linkmap)
950 for y in g:
950 for y in g:
951 yield y
951 yield y
952
952
953 yield struct.pack(">l", 0)
953 yield struct.pack(">l", 0)
954
954
955 return genread(gengroup())
955 return genread(gengroup())
956
956
957 def addchangegroup(self, source):
957 def addchangegroup(self, source):
958
958
959 def getchunk():
959 def getchunk():
960 d = source.read(4)
960 d = source.read(4)
961 if not d: return ""
961 if not d: return ""
962 l = struct.unpack(">l", d)[0]
962 l = struct.unpack(">l", d)[0]
963 if l <= 4: return ""
963 if l <= 4: return ""
964 d = source.read(l - 4)
964 d = source.read(l - 4)
965 if len(d) < l - 4:
965 if len(d) < l - 4:
966 raise repo.RepoError("premature EOF reading chunk" +
966 raise repo.RepoError(_("premature EOF reading chunk"
967 " (got %d bytes, expected %d)"
967 " (got %d bytes, expected %d)")
968 % (len(d), l - 4))
968 % (len(d), l - 4))
969 return d
969 return d
970
970
971 def getgroup():
971 def getgroup():
972 while 1:
972 while 1:
973 c = getchunk()
973 c = getchunk()
974 if not c: break
974 if not c: break
975 yield c
975 yield c
976
976
977 def csmap(x):
977 def csmap(x):
978 self.ui.debug("add changeset %s\n" % short(x))
978 self.ui.debug(_("add changeset %s\n") % short(x))
979 return self.changelog.count()
979 return self.changelog.count()
980
980
981 def revmap(x):
981 def revmap(x):
982 return self.changelog.rev(x)
982 return self.changelog.rev(x)
983
983
984 if not source: return
984 if not source: return
985 changesets = files = revisions = 0
985 changesets = files = revisions = 0
986
986
987 tr = self.transaction()
987 tr = self.transaction()
988
988
989 oldheads = len(self.changelog.heads())
989 oldheads = len(self.changelog.heads())
990
990
991 # pull off the changeset group
991 # pull off the changeset group
992 self.ui.status("adding changesets\n")
992 self.ui.status(_("adding changesets\n"))
993 co = self.changelog.tip()
993 co = self.changelog.tip()
994 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
994 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
995 cnr, cor = map(self.changelog.rev, (cn, co))
995 cnr, cor = map(self.changelog.rev, (cn, co))
996 if cn == nullid:
996 if cn == nullid:
997 cnr = cor
997 cnr = cor
998 changesets = cnr - cor
998 changesets = cnr - cor
999
999
1000 # pull off the manifest group
1000 # pull off the manifest group
1001 self.ui.status("adding manifests\n")
1001 self.ui.status(_("adding manifests\n"))
1002 mm = self.manifest.tip()
1002 mm = self.manifest.tip()
1003 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1003 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1004
1004
1005 # process the files
1005 # process the files
1006 self.ui.status("adding file changes\n")
1006 self.ui.status(_("adding file changes\n"))
1007 while 1:
1007 while 1:
1008 f = getchunk()
1008 f = getchunk()
1009 if not f: break
1009 if not f: break
1010 self.ui.debug("adding %s revisions\n" % f)
1010 self.ui.debug(_("adding %s revisions\n") % f)
1011 fl = self.file(f)
1011 fl = self.file(f)
1012 o = fl.count()
1012 o = fl.count()
1013 n = fl.addgroup(getgroup(), revmap, tr)
1013 n = fl.addgroup(getgroup(), revmap, tr)
1014 revisions += fl.count() - o
1014 revisions += fl.count() - o
1015 files += 1
1015 files += 1
1016
1016
1017 newheads = len(self.changelog.heads())
1017 newheads = len(self.changelog.heads())
1018 heads = ""
1018 heads = ""
1019 if oldheads and newheads > oldheads:
1019 if oldheads and newheads > oldheads:
1020 heads = " (+%d heads)" % (newheads - oldheads)
1020 heads = _(" (+%d heads)") % (newheads - oldheads)
1021
1021
1022 self.ui.status(("added %d changesets" +
1022 self.ui.status(_("added %d changesets"
1023 " with %d changes to %d files%s\n")
1023 " with %d changes to %d files%s\n")
1024 % (changesets, revisions, files, heads))
1024 % (changesets, revisions, files, heads))
1025
1025
1026 tr.close()
1026 tr.close()
1027
1027
1028 if changesets > 0:
1028 if changesets > 0:
1029 if not self.hook("changegroup",
1029 if not self.hook("changegroup",
1030 node=hex(self.changelog.node(cor+1))):
1030 node=hex(self.changelog.node(cor+1))):
1031 self.ui.warn("abort: changegroup hook returned failure!\n")
1031 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1032 return 1
1032 return 1
1033
1033
1034 for i in range(cor + 1, cnr + 1):
1034 for i in range(cor + 1, cnr + 1):
1035 self.hook("commit", node=hex(self.changelog.node(i)))
1035 self.hook("commit", node=hex(self.changelog.node(i)))
1036
1036
1037 return
1037 return
1038
1038
1039 def update(self, node, allow=False, force=False, choose=None,
1039 def update(self, node, allow=False, force=False, choose=None,
1040 moddirstate=True):
1040 moddirstate=True):
1041 pl = self.dirstate.parents()
1041 pl = self.dirstate.parents()
1042 if not force and pl[1] != nullid:
1042 if not force and pl[1] != nullid:
1043 self.ui.warn("aborting: outstanding uncommitted merges\n")
1043 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1044 return 1
1044 return 1
1045
1045
1046 p1, p2 = pl[0], node
1046 p1, p2 = pl[0], node
1047 pa = self.changelog.ancestor(p1, p2)
1047 pa = self.changelog.ancestor(p1, p2)
1048 m1n = self.changelog.read(p1)[0]
1048 m1n = self.changelog.read(p1)[0]
1049 m2n = self.changelog.read(p2)[0]
1049 m2n = self.changelog.read(p2)[0]
1050 man = self.manifest.ancestor(m1n, m2n)
1050 man = self.manifest.ancestor(m1n, m2n)
1051 m1 = self.manifest.read(m1n)
1051 m1 = self.manifest.read(m1n)
1052 mf1 = self.manifest.readflags(m1n)
1052 mf1 = self.manifest.readflags(m1n)
1053 m2 = self.manifest.read(m2n)
1053 m2 = self.manifest.read(m2n)
1054 mf2 = self.manifest.readflags(m2n)
1054 mf2 = self.manifest.readflags(m2n)
1055 ma = self.manifest.read(man)
1055 ma = self.manifest.read(man)
1056 mfa = self.manifest.readflags(man)
1056 mfa = self.manifest.readflags(man)
1057
1057
1058 (c, a, d, u) = self.changes()
1058 (c, a, d, u) = self.changes()
1059
1059
1060 # is this a jump, or a merge? i.e. is there a linear path
1060 # is this a jump, or a merge? i.e. is there a linear path
1061 # from p1 to p2?
1061 # from p1 to p2?
1062 linear_path = (pa == p1 or pa == p2)
1062 linear_path = (pa == p1 or pa == p2)
1063
1063
1064 # resolve the manifest to determine which files
1064 # resolve the manifest to determine which files
1065 # we care about merging
1065 # we care about merging
1066 self.ui.note("resolving manifests\n")
1066 self.ui.note(_("resolving manifests\n"))
1067 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1067 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1068 (force, allow, moddirstate, linear_path))
1068 (force, allow, moddirstate, linear_path))
1069 self.ui.debug(" ancestor %s local %s remote %s\n" %
1069 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1070 (short(man), short(m1n), short(m2n)))
1070 (short(man), short(m1n), short(m2n)))
1071
1071
1072 merge = {}
1072 merge = {}
1073 get = {}
1073 get = {}
1074 remove = []
1074 remove = []
1075
1075
1076 # construct a working dir manifest
1076 # construct a working dir manifest
1077 mw = m1.copy()
1077 mw = m1.copy()
1078 mfw = mf1.copy()
1078 mfw = mf1.copy()
1079 umap = dict.fromkeys(u)
1079 umap = dict.fromkeys(u)
1080
1080
1081 for f in a + c + u:
1081 for f in a + c + u:
1082 mw[f] = ""
1082 mw[f] = ""
1083 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1083 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1084
1084
1085 for f in d:
1085 for f in d:
1086 if f in mw: del mw[f]
1086 if f in mw: del mw[f]
1087
1087
1088 # If we're jumping between revisions (as opposed to merging),
1088 # If we're jumping between revisions (as opposed to merging),
1089 # and if neither the working directory nor the target rev has
1089 # and if neither the working directory nor the target rev has
1090 # the file, then we need to remove it from the dirstate, to
1090 # the file, then we need to remove it from the dirstate, to
1091 # prevent the dirstate from listing the file when it is no
1091 # prevent the dirstate from listing the file when it is no
1092 # longer in the manifest.
1092 # longer in the manifest.
1093 if moddirstate and linear_path and f not in m2:
1093 if moddirstate and linear_path and f not in m2:
1094 self.dirstate.forget((f,))
1094 self.dirstate.forget((f,))
1095
1095
1096 # Compare manifests
1096 # Compare manifests
1097 for f, n in mw.iteritems():
1097 for f, n in mw.iteritems():
1098 if choose and not choose(f): continue
1098 if choose and not choose(f): continue
1099 if f in m2:
1099 if f in m2:
1100 s = 0
1100 s = 0
1101
1101
1102 # is the wfile new since m1, and match m2?
1102 # is the wfile new since m1, and match m2?
1103 if f not in m1:
1103 if f not in m1:
1104 t1 = self.wread(f)
1104 t1 = self.wread(f)
1105 t2 = self.file(f).read(m2[f])
1105 t2 = self.file(f).read(m2[f])
1106 if cmp(t1, t2) == 0:
1106 if cmp(t1, t2) == 0:
1107 n = m2[f]
1107 n = m2[f]
1108 del t1, t2
1108 del t1, t2
1109
1109
1110 # are files different?
1110 # are files different?
1111 if n != m2[f]:
1111 if n != m2[f]:
1112 a = ma.get(f, nullid)
1112 a = ma.get(f, nullid)
1113 # are both different from the ancestor?
1113 # are both different from the ancestor?
1114 if n != a and m2[f] != a:
1114 if n != a and m2[f] != a:
1115 self.ui.debug(" %s versions differ, resolve\n" % f)
1115 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1116 # merge executable bits
1116 # merge executable bits
1117 # "if we changed or they changed, change in merge"
1117 # "if we changed or they changed, change in merge"
1118 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1118 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1119 mode = ((a^b) | (a^c)) ^ a
1119 mode = ((a^b) | (a^c)) ^ a
1120 merge[f] = (m1.get(f, nullid), m2[f], mode)
1120 merge[f] = (m1.get(f, nullid), m2[f], mode)
1121 s = 1
1121 s = 1
1122 # are we clobbering?
1122 # are we clobbering?
1123 # is remote's version newer?
1123 # is remote's version newer?
1124 # or are we going back in time?
1124 # or are we going back in time?
1125 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1125 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1126 self.ui.debug(" remote %s is newer, get\n" % f)
1126 self.ui.debug(_(" remote %s is newer, get\n") % f)
1127 get[f] = m2[f]
1127 get[f] = m2[f]
1128 s = 1
1128 s = 1
1129 elif f in umap:
1129 elif f in umap:
1130 # this unknown file is the same as the checkout
1130 # this unknown file is the same as the checkout
1131 get[f] = m2[f]
1131 get[f] = m2[f]
1132
1132
1133 if not s and mfw[f] != mf2[f]:
1133 if not s and mfw[f] != mf2[f]:
1134 if force:
1134 if force:
1135 self.ui.debug(" updating permissions for %s\n" % f)
1135 self.ui.debug(_(" updating permissions for %s\n") % f)
1136 util.set_exec(self.wjoin(f), mf2[f])
1136 util.set_exec(self.wjoin(f), mf2[f])
1137 else:
1137 else:
1138 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1138 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1139 mode = ((a^b) | (a^c)) ^ a
1139 mode = ((a^b) | (a^c)) ^ a
1140 if mode != b:
1140 if mode != b:
1141 self.ui.debug(" updating permissions for %s\n" % f)
1141 self.ui.debug(_(" updating permissions for %s\n") % f)
1142 util.set_exec(self.wjoin(f), mode)
1142 util.set_exec(self.wjoin(f), mode)
1143 del m2[f]
1143 del m2[f]
1144 elif f in ma:
1144 elif f in ma:
1145 if n != ma[f]:
1145 if n != ma[f]:
1146 r = "d"
1146 r = _("d")
1147 if not force and (linear_path or allow):
1147 if not force and (linear_path or allow):
1148 r = self.ui.prompt(
1148 r = self.ui.prompt(
1149 (" local changed %s which remote deleted\n" % f) +
1149 (_(" local changed %s which remote deleted\n") % f) +
1150 "(k)eep or (d)elete?", "[kd]", "k")
1150 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1151 if r == "d":
1151 if r == _("d"):
1152 remove.append(f)
1152 remove.append(f)
1153 else:
1153 else:
1154 self.ui.debug("other deleted %s\n" % f)
1154 self.ui.debug(_("other deleted %s\n") % f)
1155 remove.append(f) # other deleted it
1155 remove.append(f) # other deleted it
1156 else:
1156 else:
1157 # file is created on branch or in working directory
1157 # file is created on branch or in working directory
1158 if force and f not in umap:
1158 if force and f not in umap:
1159 self.ui.debug("remote deleted %s, clobbering\n" % f)
1159 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1160 remove.append(f)
1160 remove.append(f)
1161 elif n == m1.get(f, nullid): # same as parent
1161 elif n == m1.get(f, nullid): # same as parent
1162 if p2 == pa: # going backwards?
1162 if p2 == pa: # going backwards?
1163 self.ui.debug("remote deleted %s\n" % f)
1163 self.ui.debug(_("remote deleted %s\n") % f)
1164 remove.append(f)
1164 remove.append(f)
1165 else:
1165 else:
1166 self.ui.debug("local modified %s, keeping\n" % f)
1166 self.ui.debug(_("local modified %s, keeping\n") % f)
1167 else:
1167 else:
1168 self.ui.debug("working dir created %s, keeping\n" % f)
1168 self.ui.debug(_("working dir created %s, keeping\n") % f)
1169
1169
1170 for f, n in m2.iteritems():
1170 for f, n in m2.iteritems():
1171 if choose and not choose(f): continue
1171 if choose and not choose(f): continue
1172 if f[0] == "/": continue
1172 if f[0] == "/": continue
1173 if f in ma and n != ma[f]:
1173 if f in ma and n != ma[f]:
1174 r = "k"
1174 r = _("k")
1175 if not force and (linear_path or allow):
1175 if not force and (linear_path or allow):
1176 r = self.ui.prompt(
1176 r = self.ui.prompt(
1177 ("remote changed %s which local deleted\n" % f) +
1177 (_("remote changed %s which local deleted\n") % f) +
1178 "(k)eep or (d)elete?", "[kd]", "k")
1178 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1179 if r == "k": get[f] = n
1179 if r == _("k"): get[f] = n
1180 elif f not in ma:
1180 elif f not in ma:
1181 self.ui.debug("remote created %s\n" % f)
1181 self.ui.debug(_("remote created %s\n") % f)
1182 get[f] = n
1182 get[f] = n
1183 else:
1183 else:
1184 if force or p2 == pa: # going backwards?
1184 if force or p2 == pa: # going backwards?
1185 self.ui.debug("local deleted %s, recreating\n" % f)
1185 self.ui.debug(_("local deleted %s, recreating\n") % f)
1186 get[f] = n
1186 get[f] = n
1187 else:
1187 else:
1188 self.ui.debug("local deleted %s\n" % f)
1188 self.ui.debug(_("local deleted %s\n") % f)
1189
1189
1190 del mw, m1, m2, ma
1190 del mw, m1, m2, ma
1191
1191
1192 if force:
1192 if force:
1193 for f in merge:
1193 for f in merge:
1194 get[f] = merge[f][1]
1194 get[f] = merge[f][1]
1195 merge = {}
1195 merge = {}
1196
1196
1197 if linear_path or force:
1197 if linear_path or force:
1198 # we don't need to do any magic, just jump to the new rev
1198 # we don't need to do any magic, just jump to the new rev
1199 branch_merge = False
1199 branch_merge = False
1200 p1, p2 = p2, nullid
1200 p1, p2 = p2, nullid
1201 else:
1201 else:
1202 if not allow:
1202 if not allow:
1203 self.ui.status("this update spans a branch" +
1203 self.ui.status(_("this update spans a branch"
1204 " affecting the following files:\n")
1204 " affecting the following files:\n"))
1205 fl = merge.keys() + get.keys()
1205 fl = merge.keys() + get.keys()
1206 fl.sort()
1206 fl.sort()
1207 for f in fl:
1207 for f in fl:
1208 cf = ""
1208 cf = ""
1209 if f in merge: cf = " (resolve)"
1209 if f in merge: cf = _(" (resolve)")
1210 self.ui.status(" %s%s\n" % (f, cf))
1210 self.ui.status(" %s%s\n" % (f, cf))
1211 self.ui.warn("aborting update spanning branches!\n")
1211 self.ui.warn(_("aborting update spanning branches!\n"))
1212 self.ui.status("(use update -m to merge across branches" +
1212 self.ui.status(_("(use update -m to merge across branches"
1213 " or -C to lose changes)\n")
1213 " or -C to lose changes)\n"))
1214 return 1
1214 return 1
1215 branch_merge = True
1215 branch_merge = True
1216
1216
1217 if moddirstate:
1217 if moddirstate:
1218 self.dirstate.setparents(p1, p2)
1218 self.dirstate.setparents(p1, p2)
1219
1219
1220 # get the files we don't need to change
1220 # get the files we don't need to change
1221 files = get.keys()
1221 files = get.keys()
1222 files.sort()
1222 files.sort()
1223 for f in files:
1223 for f in files:
1224 if f[0] == "/": continue
1224 if f[0] == "/": continue
1225 self.ui.note("getting %s\n" % f)
1225 self.ui.note(_("getting %s\n") % f)
1226 t = self.file(f).read(get[f])
1226 t = self.file(f).read(get[f])
1227 try:
1227 try:
1228 self.wwrite(f, t)
1228 self.wwrite(f, t)
1229 except IOError, e:
1229 except IOError, e:
1230 if e.errno != errno.ENOENT:
1230 if e.errno != errno.ENOENT:
1231 raise
1231 raise
1232 os.makedirs(os.path.dirname(self.wjoin(f)))
1232 os.makedirs(os.path.dirname(self.wjoin(f)))
1233 self.wwrite(f, t)
1233 self.wwrite(f, t)
1234 util.set_exec(self.wjoin(f), mf2[f])
1234 util.set_exec(self.wjoin(f), mf2[f])
1235 if moddirstate:
1235 if moddirstate:
1236 if branch_merge:
1236 if branch_merge:
1237 self.dirstate.update([f], 'n', st_mtime=-1)
1237 self.dirstate.update([f], 'n', st_mtime=-1)
1238 else:
1238 else:
1239 self.dirstate.update([f], 'n')
1239 self.dirstate.update([f], 'n')
1240
1240
1241 # merge the tricky bits
1241 # merge the tricky bits
1242 files = merge.keys()
1242 files = merge.keys()
1243 files.sort()
1243 files.sort()
1244 for f in files:
1244 for f in files:
1245 self.ui.status("merging %s\n" % f)
1245 self.ui.status(_("merging %s\n") % f)
1246 my, other, flag = merge[f]
1246 my, other, flag = merge[f]
1247 self.merge3(f, my, other)
1247 self.merge3(f, my, other)
1248 util.set_exec(self.wjoin(f), flag)
1248 util.set_exec(self.wjoin(f), flag)
1249 if moddirstate:
1249 if moddirstate:
1250 if branch_merge:
1250 if branch_merge:
1251 # We've done a branch merge, mark this file as merged
1251 # We've done a branch merge, mark this file as merged
1252 # so that we properly record the merger later
1252 # so that we properly record the merger later
1253 self.dirstate.update([f], 'm')
1253 self.dirstate.update([f], 'm')
1254 else:
1254 else:
1255 # We've update-merged a locally modified file, so
1255 # We've update-merged a locally modified file, so
1256 # we set the dirstate to emulate a normal checkout
1256 # we set the dirstate to emulate a normal checkout
1257 # of that file some time in the past. Thus our
1257 # of that file some time in the past. Thus our
1258 # merge will appear as a normal local file
1258 # merge will appear as a normal local file
1259 # modification.
1259 # modification.
1260 f_len = len(self.file(f).read(other))
1260 f_len = len(self.file(f).read(other))
1261 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1261 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1262
1262
1263 remove.sort()
1263 remove.sort()
1264 for f in remove:
1264 for f in remove:
1265 self.ui.note("removing %s\n" % f)
1265 self.ui.note(_("removing %s\n") % f)
1266 try:
1266 try:
1267 os.unlink(self.wjoin(f))
1267 os.unlink(self.wjoin(f))
1268 except OSError, inst:
1268 except OSError, inst:
1269 if inst.errno != errno.ENOENT:
1269 if inst.errno != errno.ENOENT:
1270 self.ui.warn("update failed to remove %s: %s!\n" %
1270 self.ui.warn(_("update failed to remove %s: %s!\n") %
1271 (f, inst.strerror))
1271 (f, inst.strerror))
1272 # try removing directories that might now be empty
1272 # try removing directories that might now be empty
1273 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1273 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1274 except: pass
1274 except: pass
1275 if moddirstate:
1275 if moddirstate:
1276 if branch_merge:
1276 if branch_merge:
1277 self.dirstate.update(remove, 'r')
1277 self.dirstate.update(remove, 'r')
1278 else:
1278 else:
1279 self.dirstate.forget(remove)
1279 self.dirstate.forget(remove)
1280
1280
1281 def merge3(self, fn, my, other):
1281 def merge3(self, fn, my, other):
1282 """perform a 3-way merge in the working directory"""
1282 """perform a 3-way merge in the working directory"""
1283
1283
1284 def temp(prefix, node):
1284 def temp(prefix, node):
1285 pre = "%s~%s." % (os.path.basename(fn), prefix)
1285 pre = "%s~%s." % (os.path.basename(fn), prefix)
1286 (fd, name) = tempfile.mkstemp("", pre)
1286 (fd, name) = tempfile.mkstemp("", pre)
1287 f = os.fdopen(fd, "wb")
1287 f = os.fdopen(fd, "wb")
1288 self.wwrite(fn, fl.read(node), f)
1288 self.wwrite(fn, fl.read(node), f)
1289 f.close()
1289 f.close()
1290 return name
1290 return name
1291
1291
1292 fl = self.file(fn)
1292 fl = self.file(fn)
1293 base = fl.ancestor(my, other)
1293 base = fl.ancestor(my, other)
1294 a = self.wjoin(fn)
1294 a = self.wjoin(fn)
1295 b = temp("base", base)
1295 b = temp("base", base)
1296 c = temp("other", other)
1296 c = temp("other", other)
1297
1297
1298 self.ui.note("resolving %s\n" % fn)
1298 self.ui.note(_("resolving %s\n") % fn)
1299 self.ui.debug("file %s: my %s other %s ancestor %s\n" %
1299 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1300 (fn, short(my), short(other), short(base)))
1300 (fn, short(my), short(other), short(base)))
1301
1301
1302 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1302 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1303 or "hgmerge")
1303 or "hgmerge")
1304 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1304 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1305 if r:
1305 if r:
1306 self.ui.warn("merging %s failed!\n" % fn)
1306 self.ui.warn(_("merging %s failed!\n") % fn)
1307
1307
1308 os.unlink(b)
1308 os.unlink(b)
1309 os.unlink(c)
1309 os.unlink(c)
1310
1310
1311 def verify(self):
1311 def verify(self):
1312 filelinkrevs = {}
1312 filelinkrevs = {}
1313 filenodes = {}
1313 filenodes = {}
1314 changesets = revisions = files = 0
1314 changesets = revisions = files = 0
1315 errors = [0]
1315 errors = [0]
1316 neededmanifests = {}
1316 neededmanifests = {}
1317
1317
1318 def err(msg):
1318 def err(msg):
1319 self.ui.warn(msg + "\n")
1319 self.ui.warn(msg + "\n")
1320 errors[0] += 1
1320 errors[0] += 1
1321
1321
1322 seen = {}
1322 seen = {}
1323 self.ui.status("checking changesets\n")
1323 self.ui.status(_("checking changesets\n"))
1324 for i in range(self.changelog.count()):
1324 for i in range(self.changelog.count()):
1325 changesets += 1
1325 changesets += 1
1326 n = self.changelog.node(i)
1326 n = self.changelog.node(i)
1327 l = self.changelog.linkrev(n)
1327 l = self.changelog.linkrev(n)
1328 if l != i:
1328 if l != i:
1329 err("incorrect link (%d) for changeset revision %d" % (l, i))
1329 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1330 if n in seen:
1330 if n in seen:
1331 err("duplicate changeset at revision %d" % i)
1331 err(_("duplicate changeset at revision %d") % i)
1332 seen[n] = 1
1332 seen[n] = 1
1333
1333
1334 for p in self.changelog.parents(n):
1334 for p in self.changelog.parents(n):
1335 if p not in self.changelog.nodemap:
1335 if p not in self.changelog.nodemap:
1336 err("changeset %s has unknown parent %s" %
1336 err(_("changeset %s has unknown parent %s") %
1337 (short(n), short(p)))
1337 (short(n), short(p)))
1338 try:
1338 try:
1339 changes = self.changelog.read(n)
1339 changes = self.changelog.read(n)
1340 except Exception, inst:
1340 except Exception, inst:
1341 err("unpacking changeset %s: %s" % (short(n), inst))
1341 err(_("unpacking changeset %s: %s") % (short(n), inst))
1342
1342
1343 neededmanifests[changes[0]] = n
1343 neededmanifests[changes[0]] = n
1344
1344
1345 for f in changes[3]:
1345 for f in changes[3]:
1346 filelinkrevs.setdefault(f, []).append(i)
1346 filelinkrevs.setdefault(f, []).append(i)
1347
1347
1348 seen = {}
1348 seen = {}
1349 self.ui.status("checking manifests\n")
1349 self.ui.status(_("checking manifests\n"))
1350 for i in range(self.manifest.count()):
1350 for i in range(self.manifest.count()):
1351 n = self.manifest.node(i)
1351 n = self.manifest.node(i)
1352 l = self.manifest.linkrev(n)
1352 l = self.manifest.linkrev(n)
1353
1353
1354 if l < 0 or l >= self.changelog.count():
1354 if l < 0 or l >= self.changelog.count():
1355 err("bad manifest link (%d) at revision %d" % (l, i))
1355 err(_("bad manifest link (%d) at revision %d") % (l, i))
1356
1356
1357 if n in neededmanifests:
1357 if n in neededmanifests:
1358 del neededmanifests[n]
1358 del neededmanifests[n]
1359
1359
1360 if n in seen:
1360 if n in seen:
1361 err("duplicate manifest at revision %d" % i)
1361 err(_("duplicate manifest at revision %d") % i)
1362
1362
1363 seen[n] = 1
1363 seen[n] = 1
1364
1364
1365 for p in self.manifest.parents(n):
1365 for p in self.manifest.parents(n):
1366 if p not in self.manifest.nodemap:
1366 if p not in self.manifest.nodemap:
1367 err("manifest %s has unknown parent %s" %
1367 err(_("manifest %s has unknown parent %s") %
1368 (short(n), short(p)))
1368 (short(n), short(p)))
1369
1369
1370 try:
1370 try:
1371 delta = mdiff.patchtext(self.manifest.delta(n))
1371 delta = mdiff.patchtext(self.manifest.delta(n))
1372 except KeyboardInterrupt:
1372 except KeyboardInterrupt:
1373 self.ui.warn("interrupted")
1373 self.ui.warn(_("interrupted"))
1374 raise
1374 raise
1375 except Exception, inst:
1375 except Exception, inst:
1376 err("unpacking manifest %s: %s" % (short(n), inst))
1376 err(_("unpacking manifest %s: %s") % (short(n), inst))
1377
1377
1378 ff = [ l.split('\0') for l in delta.splitlines() ]
1378 ff = [ l.split('\0') for l in delta.splitlines() ]
1379 for f, fn in ff:
1379 for f, fn in ff:
1380 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1380 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1381
1381
1382 self.ui.status("crosschecking files in changesets and manifests\n")
1382 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1383
1383
1384 for m,c in neededmanifests.items():
1384 for m,c in neededmanifests.items():
1385 err("Changeset %s refers to unknown manifest %s" %
1385 err(_("Changeset %s refers to unknown manifest %s") %
1386 (short(m), short(c)))
1386 (short(m), short(c)))
1387 del neededmanifests
1387 del neededmanifests
1388
1388
1389 for f in filenodes:
1389 for f in filenodes:
1390 if f not in filelinkrevs:
1390 if f not in filelinkrevs:
1391 err("file %s in manifest but not in changesets" % f)
1391 err(_("file %s in manifest but not in changesets") % f)
1392
1392
1393 for f in filelinkrevs:
1393 for f in filelinkrevs:
1394 if f not in filenodes:
1394 if f not in filenodes:
1395 err("file %s in changeset but not in manifest" % f)
1395 err(_("file %s in changeset but not in manifest") % f)
1396
1396
1397 self.ui.status("checking files\n")
1397 self.ui.status(_("checking files\n"))
1398 ff = filenodes.keys()
1398 ff = filenodes.keys()
1399 ff.sort()
1399 ff.sort()
1400 for f in ff:
1400 for f in ff:
1401 if f == "/dev/null": continue
1401 if f == "/dev/null": continue
1402 files += 1
1402 files += 1
1403 fl = self.file(f)
1403 fl = self.file(f)
1404 nodes = { nullid: 1 }
1404 nodes = { nullid: 1 }
1405 seen = {}
1405 seen = {}
1406 for i in range(fl.count()):
1406 for i in range(fl.count()):
1407 revisions += 1
1407 revisions += 1
1408 n = fl.node(i)
1408 n = fl.node(i)
1409
1409
1410 if n in seen:
1410 if n in seen:
1411 err("%s: duplicate revision %d" % (f, i))
1411 err(_("%s: duplicate revision %d") % (f, i))
1412 if n not in filenodes[f]:
1412 if n not in filenodes[f]:
1413 err("%s: %d:%s not in manifests" % (f, i, short(n)))
1413 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1414 else:
1414 else:
1415 del filenodes[f][n]
1415 del filenodes[f][n]
1416
1416
1417 flr = fl.linkrev(n)
1417 flr = fl.linkrev(n)
1418 if flr not in filelinkrevs[f]:
1418 if flr not in filelinkrevs[f]:
1419 err("%s:%s points to unexpected changeset %d"
1419 err(_("%s:%s points to unexpected changeset %d")
1420 % (f, short(n), flr))
1420 % (f, short(n), flr))
1421 else:
1421 else:
1422 filelinkrevs[f].remove(flr)
1422 filelinkrevs[f].remove(flr)
1423
1423
1424 # verify contents
1424 # verify contents
1425 try:
1425 try:
1426 t = fl.read(n)
1426 t = fl.read(n)
1427 except Exception, inst:
1427 except Exception, inst:
1428 err("unpacking file %s %s: %s" % (f, short(n), inst))
1428 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1429
1429
1430 # verify parents
1430 # verify parents
1431 (p1, p2) = fl.parents(n)
1431 (p1, p2) = fl.parents(n)
1432 if p1 not in nodes:
1432 if p1 not in nodes:
1433 err("file %s:%s unknown parent 1 %s" %
1433 err(_("file %s:%s unknown parent 1 %s") %
1434 (f, short(n), short(p1)))
1434 (f, short(n), short(p1)))
1435 if p2 not in nodes:
1435 if p2 not in nodes:
1436 err("file %s:%s unknown parent 2 %s" %
1436 err(_("file %s:%s unknown parent 2 %s") %
1437 (f, short(n), short(p1)))
1437 (f, short(n), short(p1)))
1438 nodes[n] = 1
1438 nodes[n] = 1
1439
1439
1440 # cross-check
1440 # cross-check
1441 for node in filenodes[f]:
1441 for node in filenodes[f]:
1442 err("node %s in manifests not in %s" % (hex(node), f))
1442 err(_("node %s in manifests not in %s") % (hex(node), f))
1443
1443
1444 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1444 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1445 (files, changesets, revisions))
1445 (files, changesets, revisions))
1446
1446
1447 if errors[0]:
1447 if errors[0]:
1448 self.ui.warn("%d integrity errors encountered!\n" % errors[0])
1448 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1449 return 1
1449 return 1
@@ -1,168 +1,168 b''
1 # manifest.py - manifest revision class for mercurial
1 # manifest.py - manifest revision class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct
8 import sys, struct
9 from revlog import *
9 from revlog import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "bisect")
12 demandload(globals(), "bisect")
13
13
14 class manifest(revlog):
14 class manifest(revlog):
15 def __init__(self, opener):
15 def __init__(self, opener):
16 self.mapcache = None
16 self.mapcache = None
17 self.listcache = None
17 self.listcache = None
18 self.addlist = None
18 self.addlist = None
19 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
19 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
20
20
21 def read(self, node):
21 def read(self, node):
22 if node == nullid: return {} # don't upset local cache
22 if node == nullid: return {} # don't upset local cache
23 if self.mapcache and self.mapcache[0] == node:
23 if self.mapcache and self.mapcache[0] == node:
24 return self.mapcache[1]
24 return self.mapcache[1]
25 text = self.revision(node)
25 text = self.revision(node)
26 map = {}
26 map = {}
27 flag = {}
27 flag = {}
28 self.listcache = (text, text.splitlines(1))
28 self.listcache = (text, text.splitlines(1))
29 for l in self.listcache[1]:
29 for l in self.listcache[1]:
30 (f, n) = l.split('\0')
30 (f, n) = l.split('\0')
31 map[f] = bin(n[:40])
31 map[f] = bin(n[:40])
32 flag[f] = (n[40:-1] == "x")
32 flag[f] = (n[40:-1] == "x")
33 self.mapcache = (node, map, flag)
33 self.mapcache = (node, map, flag)
34 return map
34 return map
35
35
36 def readflags(self, node):
36 def readflags(self, node):
37 if node == nullid: return {} # don't upset local cache
37 if node == nullid: return {} # don't upset local cache
38 if not self.mapcache or self.mapcache[0] != node:
38 if not self.mapcache or self.mapcache[0] != node:
39 self.read(node)
39 self.read(node)
40 return self.mapcache[2]
40 return self.mapcache[2]
41
41
42 def diff(self, a, b):
42 def diff(self, a, b):
43 # this is sneaky, as we're not actually using a and b
43 # this is sneaky, as we're not actually using a and b
44 if self.listcache and self.addlist and self.listcache[0] == a:
44 if self.listcache and self.addlist and self.listcache[0] == a:
45 d = mdiff.diff(self.listcache[1], self.addlist, 1)
45 d = mdiff.diff(self.listcache[1], self.addlist, 1)
46 if mdiff.patch(a, d) != b:
46 if mdiff.patch(a, d) != b:
47 raise AssertionError("sortdiff failed!")
47 raise AssertionError(_("sortdiff failed!"))
48 return d
48 return d
49 else:
49 else:
50 return mdiff.textdiff(a, b)
50 return mdiff.textdiff(a, b)
51
51
52 def add(self, map, flags, transaction, link, p1=None, p2=None,
52 def add(self, map, flags, transaction, link, p1=None, p2=None,
53 changed=None):
53 changed=None):
54 # directly generate the mdiff delta from the data collected during
54 # directly generate the mdiff delta from the data collected during
55 # the bisect loop below
55 # the bisect loop below
56 def gendelta(delta):
56 def gendelta(delta):
57 i = 0
57 i = 0
58 result = []
58 result = []
59 while i < len(delta):
59 while i < len(delta):
60 start = delta[i][2]
60 start = delta[i][2]
61 end = delta[i][3]
61 end = delta[i][3]
62 l = delta[i][4]
62 l = delta[i][4]
63 if l == None:
63 if l == None:
64 l = ""
64 l = ""
65 while i < len(delta) - 1 and start <= delta[i+1][2] \
65 while i < len(delta) - 1 and start <= delta[i+1][2] \
66 and end >= delta[i+1][2]:
66 and end >= delta[i+1][2]:
67 if delta[i+1][3] > end:
67 if delta[i+1][3] > end:
68 end = delta[i+1][3]
68 end = delta[i+1][3]
69 if delta[i+1][4]:
69 if delta[i+1][4]:
70 l += delta[i+1][4]
70 l += delta[i+1][4]
71 i += 1
71 i += 1
72 result.append(struct.pack(">lll", start, end, len(l)) + l)
72 result.append(struct.pack(">lll", start, end, len(l)) + l)
73 i += 1
73 i += 1
74 return result
74 return result
75
75
76 # apply the changes collected during the bisect loop to our addlist
76 # apply the changes collected during the bisect loop to our addlist
77 def addlistdelta(addlist, delta):
77 def addlistdelta(addlist, delta):
78 # apply the deltas to the addlist. start from the bottom up
78 # apply the deltas to the addlist. start from the bottom up
79 # so changes to the offsets don't mess things up.
79 # so changes to the offsets don't mess things up.
80 i = len(delta)
80 i = len(delta)
81 while i > 0:
81 while i > 0:
82 i -= 1
82 i -= 1
83 start = delta[i][0]
83 start = delta[i][0]
84 end = delta[i][1]
84 end = delta[i][1]
85 if delta[i][4]:
85 if delta[i][4]:
86 addlist[start:end] = [delta[i][4]]
86 addlist[start:end] = [delta[i][4]]
87 else:
87 else:
88 del addlist[start:end]
88 del addlist[start:end]
89 return addlist
89 return addlist
90
90
91 # calculate the byte offset of the start of each line in the
91 # calculate the byte offset of the start of each line in the
92 # manifest
92 # manifest
93 def calcoffsets(addlist):
93 def calcoffsets(addlist):
94 offsets = [0] * (len(addlist) + 1)
94 offsets = [0] * (len(addlist) + 1)
95 offset = 0
95 offset = 0
96 i = 0
96 i = 0
97 while i < len(addlist):
97 while i < len(addlist):
98 offsets[i] = offset
98 offsets[i] = offset
99 offset += len(addlist[i])
99 offset += len(addlist[i])
100 i += 1
100 i += 1
101 offsets[i] = offset
101 offsets[i] = offset
102 return offsets
102 return offsets
103
103
104 # if we're using the listcache, make sure it is valid and
104 # if we're using the listcache, make sure it is valid and
105 # parented by the same node we're diffing against
105 # parented by the same node we're diffing against
106 if not changed or not self.listcache or not p1 or \
106 if not changed or not self.listcache or not p1 or \
107 self.mapcache[0] != p1:
107 self.mapcache[0] != p1:
108 files = map.keys()
108 files = map.keys()
109 files.sort()
109 files.sort()
110
110
111 self.addlist = ["%s\000%s%s\n" %
111 self.addlist = ["%s\000%s%s\n" %
112 (f, hex(map[f]), flags[f] and "x" or '')
112 (f, hex(map[f]), flags[f] and "x" or '')
113 for f in files]
113 for f in files]
114 cachedelta = None
114 cachedelta = None
115 else:
115 else:
116 addlist = self.listcache[1]
116 addlist = self.listcache[1]
117
117
118 # find the starting offset for each line in the add list
118 # find the starting offset for each line in the add list
119 offsets = calcoffsets(addlist)
119 offsets = calcoffsets(addlist)
120
120
121 # combine the changed lists into one list for sorting
121 # combine the changed lists into one list for sorting
122 work = [[x, 0] for x in changed[0]]
122 work = [[x, 0] for x in changed[0]]
123 work[len(work):] = [[x, 1] for x in changed[1]]
123 work[len(work):] = [[x, 1] for x in changed[1]]
124 work.sort()
124 work.sort()
125
125
126 delta = []
126 delta = []
127 bs = 0
127 bs = 0
128
128
129 for w in work:
129 for w in work:
130 f = w[0]
130 f = w[0]
131 # bs will either be the index of the item or the insert point
131 # bs will either be the index of the item or the insert point
132 bs = bisect.bisect(addlist, f, bs)
132 bs = bisect.bisect(addlist, f, bs)
133 if bs < len(addlist):
133 if bs < len(addlist):
134 fn = addlist[bs][:addlist[bs].index('\0')]
134 fn = addlist[bs][:addlist[bs].index('\0')]
135 else:
135 else:
136 fn = None
136 fn = None
137 if w[1] == 0:
137 if w[1] == 0:
138 l = "%s\000%s%s\n" % (f, hex(map[f]),
138 l = "%s\000%s%s\n" % (f, hex(map[f]),
139 flags[f] and "x" or '')
139 flags[f] and "x" or '')
140 else:
140 else:
141 l = None
141 l = None
142 start = bs
142 start = bs
143 if fn != f:
143 if fn != f:
144 # item not found, insert a new one
144 # item not found, insert a new one
145 end = bs
145 end = bs
146 if w[1] == 1:
146 if w[1] == 1:
147 raise AssertionError(
147 raise AssertionError(
148 "failed to remove %s from manifest\n" % f)
148 _("failed to remove %s from manifest\n") % f)
149 else:
149 else:
150 # item is found, replace/delete the existing line
150 # item is found, replace/delete the existing line
151 end = bs + 1
151 end = bs + 1
152 delta.append([start, end, offsets[start], offsets[end], l])
152 delta.append([start, end, offsets[start], offsets[end], l])
153
153
154 self.addlist = addlistdelta(addlist, delta)
154 self.addlist = addlistdelta(addlist, delta)
155 if self.mapcache[0] == self.tip():
155 if self.mapcache[0] == self.tip():
156 cachedelta = "".join(gendelta(delta))
156 cachedelta = "".join(gendelta(delta))
157 else:
157 else:
158 cachedelta = None
158 cachedelta = None
159
159
160 text = "".join(self.addlist)
160 text = "".join(self.addlist)
161 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
161 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
162 raise AssertionError("manifest delta failure\n")
162 raise AssertionError(_("manifest delta failure\n"))
163 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
163 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
164 self.mapcache = (n, map, flags)
164 self.mapcache = (n, map, flags)
165 self.listcache = (text, self.addlist)
165 self.listcache = (text, self.addlist)
166 self.addlist = None
166 self.addlist = None
167
167
168 return n
168 return n
@@ -1,675 +1,675 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 from demandload import demandload
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
17
17
18 def hash(text, p1, p2):
18 def hash(text, p1, p2):
19 """generate a hash from the given text and its parent hashes
19 """generate a hash from the given text and its parent hashes
20
20
21 This hash combines both the current file contents and its history
21 This hash combines both the current file contents and its history
22 in a manner that makes it easy to distinguish nodes with the same
22 in a manner that makes it easy to distinguish nodes with the same
23 content in the revision graph.
23 content in the revision graph.
24 """
24 """
25 l = [p1, p2]
25 l = [p1, p2]
26 l.sort()
26 l.sort()
27 s = sha.new(l[0])
27 s = sha.new(l[0])
28 s.update(l[1])
28 s.update(l[1])
29 s.update(text)
29 s.update(text)
30 return s.digest()
30 return s.digest()
31
31
32 def compress(text):
32 def compress(text):
33 """ generate a possibly-compressed representation of text """
33 """ generate a possibly-compressed representation of text """
34 if not text: return text
34 if not text: return text
35 if len(text) < 44:
35 if len(text) < 44:
36 if text[0] == '\0': return text
36 if text[0] == '\0': return text
37 return 'u' + text
37 return 'u' + text
38 bin = zlib.compress(text)
38 bin = zlib.compress(text)
39 if len(bin) > len(text):
39 if len(bin) > len(text):
40 if text[0] == '\0': return text
40 if text[0] == '\0': return text
41 return 'u' + text
41 return 'u' + text
42 return bin
42 return bin
43
43
44 def decompress(bin):
44 def decompress(bin):
45 """ decompress the given input """
45 """ decompress the given input """
46 if not bin: return bin
46 if not bin: return bin
47 t = bin[0]
47 t = bin[0]
48 if t == '\0': return bin
48 if t == '\0': return bin
49 if t == 'x': return zlib.decompress(bin)
49 if t == 'x': return zlib.decompress(bin)
50 if t == 'u': return bin[1:]
50 if t == 'u': return bin[1:]
51 raise RevlogError("unknown compression type %s" % t)
51 raise RevlogError(_("unknown compression type %s") % t)
52
52
53 indexformat = ">4l20s20s20s"
53 indexformat = ">4l20s20s20s"
54
54
55 class lazyparser:
55 class lazyparser:
56 """
56 """
57 this class avoids the need to parse the entirety of large indices
57 this class avoids the need to parse the entirety of large indices
58
58
59 By default we parse and load 1000 entries at a time.
59 By default we parse and load 1000 entries at a time.
60
60
61 If no position is specified, we load the whole index, and replace
61 If no position is specified, we load the whole index, and replace
62 the lazy objects in revlog with the underlying objects for
62 the lazy objects in revlog with the underlying objects for
63 efficiency in cases where we look at most of the nodes.
63 efficiency in cases where we look at most of the nodes.
64 """
64 """
65 def __init__(self, data, revlog):
65 def __init__(self, data, revlog):
66 self.data = data
66 self.data = data
67 self.s = struct.calcsize(indexformat)
67 self.s = struct.calcsize(indexformat)
68 self.l = len(data)/self.s
68 self.l = len(data)/self.s
69 self.index = [None] * self.l
69 self.index = [None] * self.l
70 self.map = {nullid: -1}
70 self.map = {nullid: -1}
71 self.all = 0
71 self.all = 0
72 self.revlog = revlog
72 self.revlog = revlog
73
73
74 def load(self, pos=None):
74 def load(self, pos=None):
75 if self.all: return
75 if self.all: return
76 if pos is not None:
76 if pos is not None:
77 block = pos / 1000
77 block = pos / 1000
78 i = block * 1000
78 i = block * 1000
79 end = min(self.l, i + 1000)
79 end = min(self.l, i + 1000)
80 else:
80 else:
81 self.all = 1
81 self.all = 1
82 i = 0
82 i = 0
83 end = self.l
83 end = self.l
84 self.revlog.index = self.index
84 self.revlog.index = self.index
85 self.revlog.nodemap = self.map
85 self.revlog.nodemap = self.map
86
86
87 while i < end:
87 while i < end:
88 d = self.data[i * self.s: (i + 1) * self.s]
88 d = self.data[i * self.s: (i + 1) * self.s]
89 e = struct.unpack(indexformat, d)
89 e = struct.unpack(indexformat, d)
90 self.index[i] = e
90 self.index[i] = e
91 self.map[e[6]] = i
91 self.map[e[6]] = i
92 i += 1
92 i += 1
93
93
94 class lazyindex:
94 class lazyindex:
95 """a lazy version of the index array"""
95 """a lazy version of the index array"""
96 def __init__(self, parser):
96 def __init__(self, parser):
97 self.p = parser
97 self.p = parser
98 def __len__(self):
98 def __len__(self):
99 return len(self.p.index)
99 return len(self.p.index)
100 def load(self, pos):
100 def load(self, pos):
101 self.p.load(pos)
101 self.p.load(pos)
102 return self.p.index[pos]
102 return self.p.index[pos]
103 def __getitem__(self, pos):
103 def __getitem__(self, pos):
104 return self.p.index[pos] or self.load(pos)
104 return self.p.index[pos] or self.load(pos)
105 def append(self, e):
105 def append(self, e):
106 self.p.index.append(e)
106 self.p.index.append(e)
107
107
108 class lazymap:
108 class lazymap:
109 """a lazy version of the node map"""
109 """a lazy version of the node map"""
110 def __init__(self, parser):
110 def __init__(self, parser):
111 self.p = parser
111 self.p = parser
112 def load(self, key):
112 def load(self, key):
113 if self.p.all: return
113 if self.p.all: return
114 n = self.p.data.find(key)
114 n = self.p.data.find(key)
115 if n < 0:
115 if n < 0:
116 raise KeyError(key)
116 raise KeyError(key)
117 pos = n / self.p.s
117 pos = n / self.p.s
118 self.p.load(pos)
118 self.p.load(pos)
119 def __contains__(self, key):
119 def __contains__(self, key):
120 self.p.load()
120 self.p.load()
121 return key in self.p.map
121 return key in self.p.map
122 def __iter__(self):
122 def __iter__(self):
123 yield nullid
123 yield nullid
124 for i in xrange(self.p.l):
124 for i in xrange(self.p.l):
125 try:
125 try:
126 yield self.p.index[i][6]
126 yield self.p.index[i][6]
127 except:
127 except:
128 self.p.load(i)
128 self.p.load(i)
129 yield self.p.index[i][6]
129 yield self.p.index[i][6]
130 def __getitem__(self, key):
130 def __getitem__(self, key):
131 try:
131 try:
132 return self.p.map[key]
132 return self.p.map[key]
133 except KeyError:
133 except KeyError:
134 try:
134 try:
135 self.load(key)
135 self.load(key)
136 return self.p.map[key]
136 return self.p.map[key]
137 except KeyError:
137 except KeyError:
138 raise KeyError("node " + hex(key))
138 raise KeyError("node " + hex(key))
139 def __setitem__(self, key, val):
139 def __setitem__(self, key, val):
140 self.p.map[key] = val
140 self.p.map[key] = val
141
141
142 class RevlogError(Exception): pass
142 class RevlogError(Exception): pass
143
143
144 class revlog:
144 class revlog:
145 """
145 """
146 the underlying revision storage object
146 the underlying revision storage object
147
147
148 A revlog consists of two parts, an index and the revision data.
148 A revlog consists of two parts, an index and the revision data.
149
149
150 The index is a file with a fixed record size containing
150 The index is a file with a fixed record size containing
151 information on each revision, includings its nodeid (hash), the
151 information on each revision, includings its nodeid (hash), the
152 nodeids of its parents, the position and offset of its data within
152 nodeids of its parents, the position and offset of its data within
153 the data file, and the revision it's based on. Finally, each entry
153 the data file, and the revision it's based on. Finally, each entry
154 contains a linkrev entry that can serve as a pointer to external
154 contains a linkrev entry that can serve as a pointer to external
155 data.
155 data.
156
156
157 The revision data itself is a linear collection of data chunks.
157 The revision data itself is a linear collection of data chunks.
158 Each chunk represents a revision and is usually represented as a
158 Each chunk represents a revision and is usually represented as a
159 delta against the previous chunk. To bound lookup time, runs of
159 delta against the previous chunk. To bound lookup time, runs of
160 deltas are limited to about 2 times the length of the original
160 deltas are limited to about 2 times the length of the original
161 version data. This makes retrieval of a version proportional to
161 version data. This makes retrieval of a version proportional to
162 its size, or O(1) relative to the number of revisions.
162 its size, or O(1) relative to the number of revisions.
163
163
164 Both pieces of the revlog are written to in an append-only
164 Both pieces of the revlog are written to in an append-only
165 fashion, which means we never need to rewrite a file to insert or
165 fashion, which means we never need to rewrite a file to insert or
166 remove data, and can use some simple techniques to avoid the need
166 remove data, and can use some simple techniques to avoid the need
167 for locking while reading.
167 for locking while reading.
168 """
168 """
169 def __init__(self, opener, indexfile, datafile):
169 def __init__(self, opener, indexfile, datafile):
170 """
170 """
171 create a revlog object
171 create a revlog object
172
172
173 opener is a function that abstracts the file opening operation
173 opener is a function that abstracts the file opening operation
174 and can be used to implement COW semantics or the like.
174 and can be used to implement COW semantics or the like.
175 """
175 """
176 self.indexfile = indexfile
176 self.indexfile = indexfile
177 self.datafile = datafile
177 self.datafile = datafile
178 self.opener = opener
178 self.opener = opener
179 self.cache = None
179 self.cache = None
180
180
181 try:
181 try:
182 i = self.opener(self.indexfile).read()
182 i = self.opener(self.indexfile).read()
183 except IOError, inst:
183 except IOError, inst:
184 if inst.errno != errno.ENOENT:
184 if inst.errno != errno.ENOENT:
185 raise
185 raise
186 i = ""
186 i = ""
187
187
188 if len(i) > 10000:
188 if len(i) > 10000:
189 # big index, let's parse it on demand
189 # big index, let's parse it on demand
190 parser = lazyparser(i, self)
190 parser = lazyparser(i, self)
191 self.index = lazyindex(parser)
191 self.index = lazyindex(parser)
192 self.nodemap = lazymap(parser)
192 self.nodemap = lazymap(parser)
193 else:
193 else:
194 s = struct.calcsize(indexformat)
194 s = struct.calcsize(indexformat)
195 l = len(i) / s
195 l = len(i) / s
196 self.index = [None] * l
196 self.index = [None] * l
197 m = [None] * l
197 m = [None] * l
198
198
199 n = 0
199 n = 0
200 for f in xrange(0, len(i), s):
200 for f in xrange(0, len(i), s):
201 # offset, size, base, linkrev, p1, p2, nodeid
201 # offset, size, base, linkrev, p1, p2, nodeid
202 e = struct.unpack(indexformat, i[f:f + s])
202 e = struct.unpack(indexformat, i[f:f + s])
203 m[n] = (e[6], n)
203 m[n] = (e[6], n)
204 self.index[n] = e
204 self.index[n] = e
205 n += 1
205 n += 1
206
206
207 self.nodemap = dict(m)
207 self.nodemap = dict(m)
208 self.nodemap[nullid] = -1
208 self.nodemap[nullid] = -1
209
209
210 def tip(self): return self.node(len(self.index) - 1)
210 def tip(self): return self.node(len(self.index) - 1)
211 def count(self): return len(self.index)
211 def count(self): return len(self.index)
212 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
212 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
213 def rev(self, node):
213 def rev(self, node):
214 try:
214 try:
215 return self.nodemap[node]
215 return self.nodemap[node]
216 except KeyError:
216 except KeyError:
217 raise RevlogError('%s: no node %s' % (self.indexfile, hex(node)))
217 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
218 def linkrev(self, node): return self.index[self.rev(node)][3]
218 def linkrev(self, node): return self.index[self.rev(node)][3]
219 def parents(self, node):
219 def parents(self, node):
220 if node == nullid: return (nullid, nullid)
220 if node == nullid: return (nullid, nullid)
221 return self.index[self.rev(node)][4:6]
221 return self.index[self.rev(node)][4:6]
222
222
223 def start(self, rev): return self.index[rev][0]
223 def start(self, rev): return self.index[rev][0]
224 def length(self, rev): return self.index[rev][1]
224 def length(self, rev): return self.index[rev][1]
225 def end(self, rev): return self.start(rev) + self.length(rev)
225 def end(self, rev): return self.start(rev) + self.length(rev)
226 def base(self, rev): return self.index[rev][2]
226 def base(self, rev): return self.index[rev][2]
227
227
228 def reachable(self, rev, stop=None):
228 def reachable(self, rev, stop=None):
229 reachable = {}
229 reachable = {}
230 visit = [rev]
230 visit = [rev]
231 reachable[rev] = 1
231 reachable[rev] = 1
232 if stop:
232 if stop:
233 stopn = self.rev(stop)
233 stopn = self.rev(stop)
234 else:
234 else:
235 stopn = 0
235 stopn = 0
236 while visit:
236 while visit:
237 n = visit.pop(0)
237 n = visit.pop(0)
238 if n == stop:
238 if n == stop:
239 continue
239 continue
240 if n == nullid:
240 if n == nullid:
241 continue
241 continue
242 for p in self.parents(n):
242 for p in self.parents(n):
243 if self.rev(p) < stopn:
243 if self.rev(p) < stopn:
244 continue
244 continue
245 if p not in reachable:
245 if p not in reachable:
246 reachable[p] = 1
246 reachable[p] = 1
247 visit.append(p)
247 visit.append(p)
248 return reachable
248 return reachable
249
249
250 def heads(self, stop=None):
250 def heads(self, stop=None):
251 """return the list of all nodes that have no children"""
251 """return the list of all nodes that have no children"""
252 p = {}
252 p = {}
253 h = []
253 h = []
254 stoprev = 0
254 stoprev = 0
255 if stop and stop in self.nodemap:
255 if stop and stop in self.nodemap:
256 stoprev = self.rev(stop)
256 stoprev = self.rev(stop)
257
257
258 for r in range(self.count() - 1, -1, -1):
258 for r in range(self.count() - 1, -1, -1):
259 n = self.node(r)
259 n = self.node(r)
260 if n not in p:
260 if n not in p:
261 h.append(n)
261 h.append(n)
262 if n == stop:
262 if n == stop:
263 break
263 break
264 if r < stoprev:
264 if r < stoprev:
265 break
265 break
266 for pn in self.parents(n):
266 for pn in self.parents(n):
267 p[pn] = 1
267 p[pn] = 1
268 return h
268 return h
269
269
270 def children(self, node):
270 def children(self, node):
271 """find the children of a given node"""
271 """find the children of a given node"""
272 c = []
272 c = []
273 p = self.rev(node)
273 p = self.rev(node)
274 for r in range(p + 1, self.count()):
274 for r in range(p + 1, self.count()):
275 n = self.node(r)
275 n = self.node(r)
276 for pn in self.parents(n):
276 for pn in self.parents(n):
277 if pn == node:
277 if pn == node:
278 c.append(n)
278 c.append(n)
279 continue
279 continue
280 elif pn == nullid:
280 elif pn == nullid:
281 continue
281 continue
282 return c
282 return c
283
283
284 def lookup(self, id):
284 def lookup(self, id):
285 """locate a node based on revision number or subset of hex nodeid"""
285 """locate a node based on revision number or subset of hex nodeid"""
286 try:
286 try:
287 rev = int(id)
287 rev = int(id)
288 if str(rev) != id: raise ValueError
288 if str(rev) != id: raise ValueError
289 if rev < 0: rev = self.count() + rev
289 if rev < 0: rev = self.count() + rev
290 if rev < 0 or rev >= self.count(): raise ValueError
290 if rev < 0 or rev >= self.count(): raise ValueError
291 return self.node(rev)
291 return self.node(rev)
292 except (ValueError, OverflowError):
292 except (ValueError, OverflowError):
293 c = []
293 c = []
294 for n in self.nodemap:
294 for n in self.nodemap:
295 if hex(n).startswith(id):
295 if hex(n).startswith(id):
296 c.append(n)
296 c.append(n)
297 if len(c) > 1: raise RevlogError("Ambiguous identifier")
297 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
298 if len(c) < 1: raise RevlogError("No match found")
298 if len(c) < 1: raise RevlogError(_("No match found"))
299 return c[0]
299 return c[0]
300
300
301 return None
301 return None
302
302
303 def diff(self, a, b):
303 def diff(self, a, b):
304 """return a delta between two revisions"""
304 """return a delta between two revisions"""
305 return mdiff.textdiff(a, b)
305 return mdiff.textdiff(a, b)
306
306
307 def patches(self, t, pl):
307 def patches(self, t, pl):
308 """apply a list of patches to a string"""
308 """apply a list of patches to a string"""
309 return mdiff.patches(t, pl)
309 return mdiff.patches(t, pl)
310
310
311 def delta(self, node):
311 def delta(self, node):
312 """return or calculate a delta between a node and its predecessor"""
312 """return or calculate a delta between a node and its predecessor"""
313 r = self.rev(node)
313 r = self.rev(node)
314 b = self.base(r)
314 b = self.base(r)
315 if r == b:
315 if r == b:
316 return self.diff(self.revision(self.node(r - 1)),
316 return self.diff(self.revision(self.node(r - 1)),
317 self.revision(node))
317 self.revision(node))
318 else:
318 else:
319 f = self.opener(self.datafile)
319 f = self.opener(self.datafile)
320 f.seek(self.start(r))
320 f.seek(self.start(r))
321 data = f.read(self.length(r))
321 data = f.read(self.length(r))
322 return decompress(data)
322 return decompress(data)
323
323
324 def revision(self, node):
324 def revision(self, node):
325 """return an uncompressed revision of a given"""
325 """return an uncompressed revision of a given"""
326 if node == nullid: return ""
326 if node == nullid: return ""
327 if self.cache and self.cache[0] == node: return self.cache[2]
327 if self.cache and self.cache[0] == node: return self.cache[2]
328
328
329 # look up what we need to read
329 # look up what we need to read
330 text = None
330 text = None
331 rev = self.rev(node)
331 rev = self.rev(node)
332 start, length, base, link, p1, p2, node = self.index[rev]
332 start, length, base, link, p1, p2, node = self.index[rev]
333 end = start + length
333 end = start + length
334 if base != rev: start = self.start(base)
334 if base != rev: start = self.start(base)
335
335
336 # do we have useful data cached?
336 # do we have useful data cached?
337 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
337 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
338 base = self.cache[1]
338 base = self.cache[1]
339 start = self.start(base + 1)
339 start = self.start(base + 1)
340 text = self.cache[2]
340 text = self.cache[2]
341 last = 0
341 last = 0
342
342
343 f = self.opener(self.datafile)
343 f = self.opener(self.datafile)
344 f.seek(start)
344 f.seek(start)
345 data = f.read(end - start)
345 data = f.read(end - start)
346
346
347 if text is None:
347 if text is None:
348 last = self.length(base)
348 last = self.length(base)
349 text = decompress(data[:last])
349 text = decompress(data[:last])
350
350
351 bins = []
351 bins = []
352 for r in xrange(base + 1, rev + 1):
352 for r in xrange(base + 1, rev + 1):
353 s = self.length(r)
353 s = self.length(r)
354 bins.append(decompress(data[last:last + s]))
354 bins.append(decompress(data[last:last + s]))
355 last = last + s
355 last = last + s
356
356
357 text = mdiff.patches(text, bins)
357 text = mdiff.patches(text, bins)
358
358
359 if node != hash(text, p1, p2):
359 if node != hash(text, p1, p2):
360 raise RevlogError("integrity check failed on %s:%d"
360 raise RevlogError(_("integrity check failed on %s:%d")
361 % (self.datafile, rev))
361 % (self.datafile, rev))
362
362
363 self.cache = (node, rev, text)
363 self.cache = (node, rev, text)
364 return text
364 return text
365
365
366 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
366 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
367 """add a revision to the log
367 """add a revision to the log
368
368
369 text - the revision data to add
369 text - the revision data to add
370 transaction - the transaction object used for rollback
370 transaction - the transaction object used for rollback
371 link - the linkrev data to add
371 link - the linkrev data to add
372 p1, p2 - the parent nodeids of the revision
372 p1, p2 - the parent nodeids of the revision
373 d - an optional precomputed delta
373 d - an optional precomputed delta
374 """
374 """
375 if text is None: text = ""
375 if text is None: text = ""
376 if p1 is None: p1 = self.tip()
376 if p1 is None: p1 = self.tip()
377 if p2 is None: p2 = nullid
377 if p2 is None: p2 = nullid
378
378
379 node = hash(text, p1, p2)
379 node = hash(text, p1, p2)
380
380
381 if node in self.nodemap:
381 if node in self.nodemap:
382 return node
382 return node
383
383
384 n = self.count()
384 n = self.count()
385 t = n - 1
385 t = n - 1
386
386
387 if n:
387 if n:
388 base = self.base(t)
388 base = self.base(t)
389 start = self.start(base)
389 start = self.start(base)
390 end = self.end(t)
390 end = self.end(t)
391 if not d:
391 if not d:
392 prev = self.revision(self.tip())
392 prev = self.revision(self.tip())
393 d = self.diff(prev, text)
393 d = self.diff(prev, text)
394 data = compress(d)
394 data = compress(d)
395 dist = end - start + len(data)
395 dist = end - start + len(data)
396
396
397 # full versions are inserted when the needed deltas
397 # full versions are inserted when the needed deltas
398 # become comparable to the uncompressed text
398 # become comparable to the uncompressed text
399 if not n or dist > len(text) * 2:
399 if not n or dist > len(text) * 2:
400 data = compress(text)
400 data = compress(text)
401 base = n
401 base = n
402 else:
402 else:
403 base = self.base(t)
403 base = self.base(t)
404
404
405 offset = 0
405 offset = 0
406 if t >= 0:
406 if t >= 0:
407 offset = self.end(t)
407 offset = self.end(t)
408
408
409 e = (offset, len(data), base, link, p1, p2, node)
409 e = (offset, len(data), base, link, p1, p2, node)
410
410
411 self.index.append(e)
411 self.index.append(e)
412 self.nodemap[node] = n
412 self.nodemap[node] = n
413 entry = struct.pack(indexformat, *e)
413 entry = struct.pack(indexformat, *e)
414
414
415 transaction.add(self.datafile, e[0])
415 transaction.add(self.datafile, e[0])
416 self.opener(self.datafile, "a").write(data)
416 self.opener(self.datafile, "a").write(data)
417 transaction.add(self.indexfile, n * len(entry))
417 transaction.add(self.indexfile, n * len(entry))
418 self.opener(self.indexfile, "a").write(entry)
418 self.opener(self.indexfile, "a").write(entry)
419
419
420 self.cache = (node, n, text)
420 self.cache = (node, n, text)
421 return node
421 return node
422
422
423 def ancestor(self, a, b):
423 def ancestor(self, a, b):
424 """calculate the least common ancestor of nodes a and b"""
424 """calculate the least common ancestor of nodes a and b"""
425 # calculate the distance of every node from root
425 # calculate the distance of every node from root
426 dist = {nullid: 0}
426 dist = {nullid: 0}
427 for i in xrange(self.count()):
427 for i in xrange(self.count()):
428 n = self.node(i)
428 n = self.node(i)
429 p1, p2 = self.parents(n)
429 p1, p2 = self.parents(n)
430 dist[n] = max(dist[p1], dist[p2]) + 1
430 dist[n] = max(dist[p1], dist[p2]) + 1
431
431
432 # traverse ancestors in order of decreasing distance from root
432 # traverse ancestors in order of decreasing distance from root
433 def ancestors(node):
433 def ancestors(node):
434 # we store negative distances because heap returns smallest member
434 # we store negative distances because heap returns smallest member
435 h = [(-dist[node], node)]
435 h = [(-dist[node], node)]
436 seen = {}
436 seen = {}
437 earliest = self.count()
437 earliest = self.count()
438 while h:
438 while h:
439 d, n = heapq.heappop(h)
439 d, n = heapq.heappop(h)
440 if n not in seen:
440 if n not in seen:
441 seen[n] = 1
441 seen[n] = 1
442 r = self.rev(n)
442 r = self.rev(n)
443 yield (-d, n)
443 yield (-d, n)
444 for p in self.parents(n):
444 for p in self.parents(n):
445 heapq.heappush(h, (-dist[p], p))
445 heapq.heappush(h, (-dist[p], p))
446
446
447 def generations(node):
447 def generations(node):
448 sg, s = None, {}
448 sg, s = None, {}
449 for g,n in ancestors(node):
449 for g,n in ancestors(node):
450 if g != sg:
450 if g != sg:
451 if sg:
451 if sg:
452 yield sg, s
452 yield sg, s
453 sg, s = g, {n:1}
453 sg, s = g, {n:1}
454 else:
454 else:
455 s[n] = 1
455 s[n] = 1
456 yield sg, s
456 yield sg, s
457
457
458 x = generations(a)
458 x = generations(a)
459 y = generations(b)
459 y = generations(b)
460 gx = x.next()
460 gx = x.next()
461 gy = y.next()
461 gy = y.next()
462
462
463 # increment each ancestor list until it is closer to root than
463 # increment each ancestor list until it is closer to root than
464 # the other, or they match
464 # the other, or they match
465 while 1:
465 while 1:
466 #print "ancestor gen %s %s" % (gx[0], gy[0])
466 #print "ancestor gen %s %s" % (gx[0], gy[0])
467 if gx[0] == gy[0]:
467 if gx[0] == gy[0]:
468 # find the intersection
468 # find the intersection
469 i = [ n for n in gx[1] if n in gy[1] ]
469 i = [ n for n in gx[1] if n in gy[1] ]
470 if i:
470 if i:
471 return i[0]
471 return i[0]
472 else:
472 else:
473 #print "next"
473 #print "next"
474 gy = y.next()
474 gy = y.next()
475 gx = x.next()
475 gx = x.next()
476 elif gx[0] < gy[0]:
476 elif gx[0] < gy[0]:
477 #print "next y"
477 #print "next y"
478 gy = y.next()
478 gy = y.next()
479 else:
479 else:
480 #print "next x"
480 #print "next x"
481 gx = x.next()
481 gx = x.next()
482
482
483 def group(self, linkmap):
483 def group(self, linkmap):
484 """calculate a delta group
484 """calculate a delta group
485
485
486 Given a list of changeset revs, return a set of deltas and
486 Given a list of changeset revs, return a set of deltas and
487 metadata corresponding to nodes. the first delta is
487 metadata corresponding to nodes. the first delta is
488 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
488 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
489 have this parent as it has all history before these
489 have this parent as it has all history before these
490 changesets. parent is parent[0]
490 changesets. parent is parent[0]
491 """
491 """
492 revs = []
492 revs = []
493 needed = {}
493 needed = {}
494
494
495 # find file nodes/revs that match changeset revs
495 # find file nodes/revs that match changeset revs
496 for i in xrange(0, self.count()):
496 for i in xrange(0, self.count()):
497 if self.index[i][3] in linkmap:
497 if self.index[i][3] in linkmap:
498 revs.append(i)
498 revs.append(i)
499 needed[i] = 1
499 needed[i] = 1
500
500
501 # if we don't have any revisions touched by these changesets, bail
501 # if we don't have any revisions touched by these changesets, bail
502 if not revs:
502 if not revs:
503 yield struct.pack(">l", 0)
503 yield struct.pack(">l", 0)
504 return
504 return
505
505
506 # add the parent of the first rev
506 # add the parent of the first rev
507 p = self.parents(self.node(revs[0]))[0]
507 p = self.parents(self.node(revs[0]))[0]
508 revs.insert(0, self.rev(p))
508 revs.insert(0, self.rev(p))
509
509
510 # for each delta that isn't contiguous in the log, we need to
510 # for each delta that isn't contiguous in the log, we need to
511 # reconstruct the base, reconstruct the result, and then
511 # reconstruct the base, reconstruct the result, and then
512 # calculate the delta. We also need to do this where we've
512 # calculate the delta. We also need to do this where we've
513 # stored a full version and not a delta
513 # stored a full version and not a delta
514 for i in xrange(0, len(revs) - 1):
514 for i in xrange(0, len(revs) - 1):
515 a, b = revs[i], revs[i + 1]
515 a, b = revs[i], revs[i + 1]
516 if a + 1 != b or self.base(b) == b:
516 if a + 1 != b or self.base(b) == b:
517 for j in xrange(self.base(a), a + 1):
517 for j in xrange(self.base(a), a + 1):
518 needed[j] = 1
518 needed[j] = 1
519 for j in xrange(self.base(b), b + 1):
519 for j in xrange(self.base(b), b + 1):
520 needed[j] = 1
520 needed[j] = 1
521
521
522 # calculate spans to retrieve from datafile
522 # calculate spans to retrieve from datafile
523 needed = needed.keys()
523 needed = needed.keys()
524 needed.sort()
524 needed.sort()
525 spans = []
525 spans = []
526 oo = -1
526 oo = -1
527 ol = 0
527 ol = 0
528 for n in needed:
528 for n in needed:
529 if n < 0: continue
529 if n < 0: continue
530 o = self.start(n)
530 o = self.start(n)
531 l = self.length(n)
531 l = self.length(n)
532 if oo + ol == o: # can we merge with the previous?
532 if oo + ol == o: # can we merge with the previous?
533 nl = spans[-1][2]
533 nl = spans[-1][2]
534 nl.append((n, l))
534 nl.append((n, l))
535 ol += l
535 ol += l
536 spans[-1] = (oo, ol, nl)
536 spans[-1] = (oo, ol, nl)
537 else:
537 else:
538 oo = o
538 oo = o
539 ol = l
539 ol = l
540 spans.append((oo, ol, [(n, l)]))
540 spans.append((oo, ol, [(n, l)]))
541
541
542 # read spans in, divide up chunks
542 # read spans in, divide up chunks
543 chunks = {}
543 chunks = {}
544 for span in spans:
544 for span in spans:
545 # we reopen the file for each span to make http happy for now
545 # we reopen the file for each span to make http happy for now
546 f = self.opener(self.datafile)
546 f = self.opener(self.datafile)
547 f.seek(span[0])
547 f.seek(span[0])
548 data = f.read(span[1])
548 data = f.read(span[1])
549
549
550 # divide up the span
550 # divide up the span
551 pos = 0
551 pos = 0
552 for r, l in span[2]:
552 for r, l in span[2]:
553 chunks[r] = decompress(data[pos: pos + l])
553 chunks[r] = decompress(data[pos: pos + l])
554 pos += l
554 pos += l
555
555
556 # helper to reconstruct intermediate versions
556 # helper to reconstruct intermediate versions
557 def construct(text, base, rev):
557 def construct(text, base, rev):
558 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
558 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
559 return mdiff.patches(text, bins)
559 return mdiff.patches(text, bins)
560
560
561 # build deltas
561 # build deltas
562 deltas = []
562 deltas = []
563 for d in xrange(0, len(revs) - 1):
563 for d in xrange(0, len(revs) - 1):
564 a, b = revs[d], revs[d + 1]
564 a, b = revs[d], revs[d + 1]
565 n = self.node(b)
565 n = self.node(b)
566
566
567 # do we need to construct a new delta?
567 # do we need to construct a new delta?
568 if a + 1 != b or self.base(b) == b:
568 if a + 1 != b or self.base(b) == b:
569 if a >= 0:
569 if a >= 0:
570 base = self.base(a)
570 base = self.base(a)
571 ta = chunks[self.base(a)]
571 ta = chunks[self.base(a)]
572 ta = construct(ta, base, a)
572 ta = construct(ta, base, a)
573 else:
573 else:
574 ta = ""
574 ta = ""
575
575
576 base = self.base(b)
576 base = self.base(b)
577 if a > base:
577 if a > base:
578 base = a
578 base = a
579 tb = ta
579 tb = ta
580 else:
580 else:
581 tb = chunks[self.base(b)]
581 tb = chunks[self.base(b)]
582 tb = construct(tb, base, b)
582 tb = construct(tb, base, b)
583 d = self.diff(ta, tb)
583 d = self.diff(ta, tb)
584 else:
584 else:
585 d = chunks[b]
585 d = chunks[b]
586
586
587 p = self.parents(n)
587 p = self.parents(n)
588 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
588 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
589 l = struct.pack(">l", len(meta) + len(d) + 4)
589 l = struct.pack(">l", len(meta) + len(d) + 4)
590 yield l
590 yield l
591 yield meta
591 yield meta
592 yield d
592 yield d
593
593
594 yield struct.pack(">l", 0)
594 yield struct.pack(">l", 0)
595
595
596 def addgroup(self, revs, linkmapper, transaction, unique=0):
596 def addgroup(self, revs, linkmapper, transaction, unique=0):
597 """
597 """
598 add a delta group
598 add a delta group
599
599
600 given a set of deltas, add them to the revision log. the
600 given a set of deltas, add them to the revision log. the
601 first delta is against its parent, which should be in our
601 first delta is against its parent, which should be in our
602 log, the rest are against the previous delta.
602 log, the rest are against the previous delta.
603 """
603 """
604
604
605 #track the base of the current delta log
605 #track the base of the current delta log
606 r = self.count()
606 r = self.count()
607 t = r - 1
607 t = r - 1
608 node = nullid
608 node = nullid
609
609
610 base = prev = -1
610 base = prev = -1
611 start = end = measure = 0
611 start = end = measure = 0
612 if r:
612 if r:
613 start = self.start(self.base(t))
613 start = self.start(self.base(t))
614 end = self.end(t)
614 end = self.end(t)
615 measure = self.length(self.base(t))
615 measure = self.length(self.base(t))
616 base = self.base(t)
616 base = self.base(t)
617 prev = self.tip()
617 prev = self.tip()
618
618
619 transaction.add(self.datafile, end)
619 transaction.add(self.datafile, end)
620 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
620 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
621 dfh = self.opener(self.datafile, "a")
621 dfh = self.opener(self.datafile, "a")
622 ifh = self.opener(self.indexfile, "a")
622 ifh = self.opener(self.indexfile, "a")
623
623
624 # loop through our set of deltas
624 # loop through our set of deltas
625 chain = None
625 chain = None
626 for chunk in revs:
626 for chunk in revs:
627 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
627 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
628 link = linkmapper(cs)
628 link = linkmapper(cs)
629 if node in self.nodemap:
629 if node in self.nodemap:
630 # this can happen if two branches make the same change
630 # this can happen if two branches make the same change
631 # if unique:
631 # if unique:
632 # raise RevlogError("already have %s" % hex(node[:4]))
632 # raise RevlogError(_("already have %s") % hex(node[:4]))
633 chain = node
633 chain = node
634 continue
634 continue
635 delta = chunk[80:]
635 delta = chunk[80:]
636
636
637 if not chain:
637 if not chain:
638 # retrieve the parent revision of the delta chain
638 # retrieve the parent revision of the delta chain
639 chain = p1
639 chain = p1
640 if not chain in self.nodemap:
640 if not chain in self.nodemap:
641 raise RevlogError("unknown base %s" % short(chain[:4]))
641 raise RevlogError(_("unknown base %s") % short(chain[:4]))
642
642
643 # full versions are inserted when the needed deltas become
643 # full versions are inserted when the needed deltas become
644 # comparable to the uncompressed text or when the previous
644 # comparable to the uncompressed text or when the previous
645 # version is not the one we have a delta against. We use
645 # version is not the one we have a delta against. We use
646 # the size of the previous full rev as a proxy for the
646 # the size of the previous full rev as a proxy for the
647 # current size.
647 # current size.
648
648
649 if chain == prev:
649 if chain == prev:
650 cdelta = compress(delta)
650 cdelta = compress(delta)
651
651
652 if chain != prev or (end - start + len(cdelta)) > measure * 2:
652 if chain != prev or (end - start + len(cdelta)) > measure * 2:
653 # flush our writes here so we can read it in revision
653 # flush our writes here so we can read it in revision
654 dfh.flush()
654 dfh.flush()
655 ifh.flush()
655 ifh.flush()
656 text = self.revision(chain)
656 text = self.revision(chain)
657 text = self.patches(text, [delta])
657 text = self.patches(text, [delta])
658 chk = self.addrevision(text, transaction, link, p1, p2)
658 chk = self.addrevision(text, transaction, link, p1, p2)
659 if chk != node:
659 if chk != node:
660 raise RevlogError("consistency error adding group")
660 raise RevlogError(_("consistency error adding group"))
661 measure = len(text)
661 measure = len(text)
662 else:
662 else:
663 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
663 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
664 self.index.append(e)
664 self.index.append(e)
665 self.nodemap[node] = r
665 self.nodemap[node] = r
666 dfh.write(cdelta)
666 dfh.write(cdelta)
667 ifh.write(struct.pack(indexformat, *e))
667 ifh.write(struct.pack(indexformat, *e))
668
668
669 t, r, chain, prev = r, r + 1, node, node
669 t, r, chain, prev = r, r + 1, node, node
670 start = self.start(self.base(t))
670 start = self.start(self.base(t))
671 end = self.end(t)
671 end = self.end(t)
672
672
673 dfh.close()
673 dfh.close()
674 ifh.close()
674 ifh.close()
675 return node
675 return node
@@ -1,133 +1,133 b''
1 # sshrepo.py - ssh repository proxy class for mercurial
1 # sshrepo.py - ssh repository proxy class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from remoterepo import *
9 from remoterepo import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "hg os re stat")
12 demandload(globals(), "hg os re stat")
13
13
14 class sshrepository(remoterepository):
14 class sshrepository(remoterepository):
15 def __init__(self, ui, path):
15 def __init__(self, ui, path):
16 self.url = path
16 self.url = path
17 self.ui = ui
17 self.ui = ui
18
18
19 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
19 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
20 if not m:
20 if not m:
21 raise hg.RepoError("couldn't parse destination %s" % path)
21 raise hg.RepoError(_("couldn't parse destination %s") % path)
22
22
23 self.user = m.group(2)
23 self.user = m.group(2)
24 self.host = m.group(3)
24 self.host = m.group(3)
25 self.port = m.group(5)
25 self.port = m.group(5)
26 self.path = m.group(7) or "."
26 self.path = m.group(7) or "."
27
27
28 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
28 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
29 args = self.port and ("%s -p %s") % (args, self.port) or args
29 args = self.port and ("%s -p %s") % (args, self.port) or args
30
30
31 sshcmd = self.ui.config("ui", "ssh", "ssh")
31 sshcmd = self.ui.config("ui", "ssh", "ssh")
32 remotecmd = self.ui.config("ui", "remotecmd", "hg")
32 remotecmd = self.ui.config("ui", "remotecmd", "hg")
33 cmd = '%s %s "%s -R %s serve --stdio"'
33 cmd = '%s %s "%s -R %s serve --stdio"'
34 cmd = cmd % (sshcmd, args, remotecmd, self.path)
34 cmd = cmd % (sshcmd, args, remotecmd, self.path)
35
35
36 ui.note('running %s\n' % cmd)
36 ui.note('running %s\n' % cmd)
37 self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
37 self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
38
38
39 def readerr(self):
39 def readerr(self):
40 while 1:
40 while 1:
41 size = os.fstat(self.pipee.fileno())[stat.ST_SIZE]
41 size = os.fstat(self.pipee.fileno())[stat.ST_SIZE]
42 if size == 0: break
42 if size == 0: break
43 l = self.pipee.readline()
43 l = self.pipee.readline()
44 if not l: break
44 if not l: break
45 self.ui.status("remote: ", l)
45 self.ui.status(_("remote: "), l)
46
46
47 def __del__(self):
47 def __del__(self):
48 try:
48 try:
49 self.pipeo.close()
49 self.pipeo.close()
50 self.pipei.close()
50 self.pipei.close()
51 # read the error descriptor until EOF
51 # read the error descriptor until EOF
52 for l in self.pipee:
52 for l in self.pipee:
53 self.ui.status("remote: ", l)
53 self.ui.status(_("remote: "), l)
54 self.pipee.close()
54 self.pipee.close()
55 except:
55 except:
56 pass
56 pass
57
57
58 def dev(self):
58 def dev(self):
59 return -1
59 return -1
60
60
61 def do_cmd(self, cmd, **args):
61 def do_cmd(self, cmd, **args):
62 self.ui.debug("sending %s command\n" % cmd)
62 self.ui.debug(_("sending %s command\n") % cmd)
63 self.pipeo.write("%s\n" % cmd)
63 self.pipeo.write("%s\n" % cmd)
64 for k, v in args.items():
64 for k, v in args.items():
65 self.pipeo.write("%s %d\n" % (k, len(v)))
65 self.pipeo.write("%s %d\n" % (k, len(v)))
66 self.pipeo.write(v)
66 self.pipeo.write(v)
67 self.pipeo.flush()
67 self.pipeo.flush()
68
68
69 return self.pipei
69 return self.pipei
70
70
71 def call(self, cmd, **args):
71 def call(self, cmd, **args):
72 r = self.do_cmd(cmd, **args)
72 r = self.do_cmd(cmd, **args)
73 l = r.readline()
73 l = r.readline()
74 self.readerr()
74 self.readerr()
75 try:
75 try:
76 l = int(l)
76 l = int(l)
77 except:
77 except:
78 raise hg.RepoError("unexpected response '%s'" % l)
78 raise hg.RepoError(_("unexpected response '%s'") % l)
79 return r.read(l)
79 return r.read(l)
80
80
81 def lock(self):
81 def lock(self):
82 self.call("lock")
82 self.call("lock")
83 return remotelock(self)
83 return remotelock(self)
84
84
85 def unlock(self):
85 def unlock(self):
86 self.call("unlock")
86 self.call("unlock")
87
87
88 def heads(self):
88 def heads(self):
89 d = self.call("heads")
89 d = self.call("heads")
90 try:
90 try:
91 return map(bin, d[:-1].split(" "))
91 return map(bin, d[:-1].split(" "))
92 except:
92 except:
93 raise hg.RepoError("unexpected response '%s'" % (d[:400] + "..."))
93 raise hg.RepoError(_("unexpected response '%s'") % (d[:400] + "..."))
94
94
95 def branches(self, nodes):
95 def branches(self, nodes):
96 n = " ".join(map(hex, nodes))
96 n = " ".join(map(hex, nodes))
97 d = self.call("branches", nodes=n)
97 d = self.call("branches", nodes=n)
98 try:
98 try:
99 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
99 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
100 return br
100 return br
101 except:
101 except:
102 raise hg.RepoError("unexpected response '%s'" % (d[:400] + "..."))
102 raise hg.RepoError(_("unexpected response '%s'") % (d[:400] + "..."))
103
103
104 def between(self, pairs):
104 def between(self, pairs):
105 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
105 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
106 d = self.call("between", pairs=n)
106 d = self.call("between", pairs=n)
107 try:
107 try:
108 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
108 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
109 return p
109 return p
110 except:
110 except:
111 raise hg.RepoError("unexpected response '%s'" % (d[:400] + "..."))
111 raise hg.RepoError(_("unexpected response '%s'") % (d[:400] + "..."))
112
112
113 def changegroup(self, nodes):
113 def changegroup(self, nodes):
114 n = " ".join(map(hex, nodes))
114 n = " ".join(map(hex, nodes))
115 f = self.do_cmd("changegroup", roots=n)
115 f = self.do_cmd("changegroup", roots=n)
116 return self.pipei
116 return self.pipei
117
117
118 def addchangegroup(self, cg):
118 def addchangegroup(self, cg):
119 d = self.call("addchangegroup")
119 d = self.call("addchangegroup")
120 if d:
120 if d:
121 raise hg.RepoError("push refused: %s", d)
121 raise hg.RepoError(_("push refused: %s"), d)
122
122
123 while 1:
123 while 1:
124 d = cg.read(4096)
124 d = cg.read(4096)
125 if not d: break
125 if not d: break
126 self.pipeo.write(d)
126 self.pipeo.write(d)
127 self.readerr()
127 self.readerr()
128
128
129 self.pipeo.flush()
129 self.pipeo.flush()
130
130
131 self.readerr()
131 self.readerr()
132 l = int(self.pipei.readline())
132 l = int(self.pipei.readline())
133 return self.pipei.read(l) != ""
133 return self.pipei.read(l) != ""
@@ -1,79 +1,79 b''
1 # transaction.py - simple journalling scheme for mercurial
1 # transaction.py - simple journalling scheme for mercurial
2 #
2 #
3 # This transaction scheme is intended to gracefully handle program
3 # This transaction scheme is intended to gracefully handle program
4 # errors and interruptions. More serious failures like system crashes
4 # errors and interruptions. More serious failures like system crashes
5 # can be recovered with an fsck-like tool. As the whole repository is
5 # can be recovered with an fsck-like tool. As the whole repository is
6 # effectively log-structured, this should amount to simply truncating
6 # effectively log-structured, this should amount to simply truncating
7 # anything that isn't referenced in the changelog.
7 # anything that isn't referenced in the changelog.
8 #
8 #
9 # Copyright 2005 Matt Mackall <mpm@selenic.com>
9 # Copyright 2005 Matt Mackall <mpm@selenic.com>
10 #
10 #
11 # This software may be used and distributed according to the terms
11 # This software may be used and distributed according to the terms
12 # of the GNU General Public License, incorporated herein by reference.
12 # of the GNU General Public License, incorporated herein by reference.
13
13
14 import os
14 import os
15 import util
15 import util
16 from i18n import gettext as _
16 from i18n import gettext as _
17
17
18 class transaction:
18 class transaction:
19 def __init__(self, report, opener, journal, after=None):
19 def __init__(self, report, opener, journal, after=None):
20 self.journal = None
20 self.journal = None
21
21
22 # abort here if the journal already exists
22 # abort here if the journal already exists
23 if os.path.exists(journal):
23 if os.path.exists(journal):
24 raise AssertionError("journal already exists - run hg recover")
24 raise AssertionError(_("journal already exists - run hg recover"))
25
25
26 self.report = report
26 self.report = report
27 self.opener = opener
27 self.opener = opener
28 self.after = after
28 self.after = after
29 self.entries = []
29 self.entries = []
30 self.map = {}
30 self.map = {}
31 self.journal = journal
31 self.journal = journal
32
32
33 self.file = open(self.journal, "w")
33 self.file = open(self.journal, "w")
34
34
35 def __del__(self):
35 def __del__(self):
36 if self.journal:
36 if self.journal:
37 if self.entries: self.abort()
37 if self.entries: self.abort()
38 self.file.close()
38 self.file.close()
39 try: os.unlink(self.journal)
39 try: os.unlink(self.journal)
40 except: pass
40 except: pass
41
41
42 def add(self, file, offset):
42 def add(self, file, offset):
43 if file in self.map: return
43 if file in self.map: return
44 self.entries.append((file, offset))
44 self.entries.append((file, offset))
45 self.map[file] = 1
45 self.map[file] = 1
46 # add enough data to the journal to do the truncate
46 # add enough data to the journal to do the truncate
47 self.file.write("%s\0%d\n" % (file, offset))
47 self.file.write("%s\0%d\n" % (file, offset))
48 self.file.flush()
48 self.file.flush()
49
49
50 def close(self):
50 def close(self):
51 self.file.close()
51 self.file.close()
52 self.entries = []
52 self.entries = []
53 if self.after:
53 if self.after:
54 self.after()
54 self.after()
55 else:
55 else:
56 os.unlink(self.journal)
56 os.unlink(self.journal)
57 self.journal = None
57 self.journal = None
58
58
59 def abort(self):
59 def abort(self):
60 if not self.entries: return
60 if not self.entries: return
61
61
62 self.report("transaction abort!\n")
62 self.report(_("transaction abort!\n"))
63
63
64 for f, o in self.entries:
64 for f, o in self.entries:
65 try:
65 try:
66 self.opener(f, "a").truncate(o)
66 self.opener(f, "a").truncate(o)
67 except:
67 except:
68 self.report("failed to truncate %s\n" % f)
68 self.report(_("failed to truncate %s\n") % f)
69
69
70 self.entries = []
70 self.entries = []
71
71
72 self.report("rollback completed\n")
72 self.report(_("rollback completed\n"))
73
73
74 def rollback(opener, file):
74 def rollback(opener, file):
75 for l in open(file).readlines():
75 for l in open(file).readlines():
76 f, o = l.split('\0')
76 f, o = l.split('\0')
77 opener(f, "a").truncate(int(o))
77 opener(f, "a").truncate(int(o))
78 os.unlink(file)
78 os.unlink(file)
79
79
@@ -1,146 +1,146 b''
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, ConfigParser
8 import os, ConfigParser
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "re socket sys util")
11 demandload(globals(), "re socket sys util")
12
12
13 class ui:
13 class ui:
14 def __init__(self, verbose=False, debug=False, quiet=False,
14 def __init__(self, verbose=False, debug=False, quiet=False,
15 interactive=True):
15 interactive=True):
16 self.overlay = {}
16 self.overlay = {}
17 self.cdata = ConfigParser.SafeConfigParser()
17 self.cdata = ConfigParser.SafeConfigParser()
18 self.cdata.read(util.rcpath)
18 self.cdata.read(util.rcpath)
19
19
20 self.quiet = self.configbool("ui", "quiet")
20 self.quiet = self.configbool("ui", "quiet")
21 self.verbose = self.configbool("ui", "verbose")
21 self.verbose = self.configbool("ui", "verbose")
22 self.debugflag = self.configbool("ui", "debug")
22 self.debugflag = self.configbool("ui", "debug")
23 self.interactive = self.configbool("ui", "interactive", True)
23 self.interactive = self.configbool("ui", "interactive", True)
24
24
25 self.updateopts(verbose, debug, quiet, interactive)
25 self.updateopts(verbose, debug, quiet, interactive)
26
26
27 def updateopts(self, verbose=False, debug=False, quiet=False,
27 def updateopts(self, verbose=False, debug=False, quiet=False,
28 interactive=True):
28 interactive=True):
29 self.quiet = (self.quiet or quiet) and not verbose and not debug
29 self.quiet = (self.quiet or quiet) and not verbose and not debug
30 self.verbose = (self.verbose or verbose) or debug
30 self.verbose = (self.verbose or verbose) or debug
31 self.debugflag = (self.debugflag or debug)
31 self.debugflag = (self.debugflag or debug)
32 self.interactive = (self.interactive and interactive)
32 self.interactive = (self.interactive and interactive)
33
33
34 def readconfig(self, fp):
34 def readconfig(self, fp):
35 self.cdata.readfp(fp)
35 self.cdata.readfp(fp)
36
36
37 def setconfig(self, section, name, val):
37 def setconfig(self, section, name, val):
38 self.overlay[(section, name)] = val
38 self.overlay[(section, name)] = val
39
39
40 def config(self, section, name, default=None):
40 def config(self, section, name, default=None):
41 if self.overlay.has_key((section, name)):
41 if self.overlay.has_key((section, name)):
42 return self.overlay[(section, name)]
42 return self.overlay[(section, name)]
43 if self.cdata.has_option(section, name):
43 if self.cdata.has_option(section, name):
44 return self.cdata.get(section, name)
44 return self.cdata.get(section, name)
45 return default
45 return default
46
46
47 def configbool(self, section, name, default=False):
47 def configbool(self, section, name, default=False):
48 if self.overlay.has_key((section, name)):
48 if self.overlay.has_key((section, name)):
49 return self.overlay[(section, name)]
49 return self.overlay[(section, name)]
50 if self.cdata.has_option(section, name):
50 if self.cdata.has_option(section, name):
51 return self.cdata.getboolean(section, name)
51 return self.cdata.getboolean(section, name)
52 return default
52 return default
53
53
54 def configitems(self, section):
54 def configitems(self, section):
55 if self.cdata.has_section(section):
55 if self.cdata.has_section(section):
56 return self.cdata.items(section)
56 return self.cdata.items(section)
57 return []
57 return []
58
58
59 def walkconfig(self):
59 def walkconfig(self):
60 seen = {}
60 seen = {}
61 for (section, name), value in self.overlay.iteritems():
61 for (section, name), value in self.overlay.iteritems():
62 yield section, name, value
62 yield section, name, value
63 seen[section, name] = 1
63 seen[section, name] = 1
64 for section in self.cdata.sections():
64 for section in self.cdata.sections():
65 for name, value in self.cdata.items(section):
65 for name, value in self.cdata.items(section):
66 if (section, name) in seen: continue
66 if (section, name) in seen: continue
67 yield section, name, value.replace('\n', '\\n')
67 yield section, name, value.replace('\n', '\\n')
68 seen[section, name] = 1
68 seen[section, name] = 1
69
69
70 def extensions(self):
70 def extensions(self):
71 return self.configitems("extensions")
71 return self.configitems("extensions")
72
72
73 def username(self):
73 def username(self):
74 return (os.environ.get("HGUSER") or
74 return (os.environ.get("HGUSER") or
75 self.config("ui", "username") or
75 self.config("ui", "username") or
76 os.environ.get("EMAIL") or
76 os.environ.get("EMAIL") or
77 (os.environ.get("LOGNAME",
77 (os.environ.get("LOGNAME",
78 os.environ.get("USERNAME", "unknown"))
78 os.environ.get("USERNAME", "unknown"))
79 + '@' + socket.getfqdn()))
79 + '@' + socket.getfqdn()))
80
80
81 def shortuser(self, user):
81 def shortuser(self, user):
82 """Return a short representation of a user name or email address."""
82 """Return a short representation of a user name or email address."""
83 if not self.verbose:
83 if not self.verbose:
84 f = user.find('@')
84 f = user.find('@')
85 if f >= 0:
85 if f >= 0:
86 user = user[:f]
86 user = user[:f]
87 f = user.find('<')
87 f = user.find('<')
88 if f >= 0:
88 if f >= 0:
89 user = user[f+1:]
89 user = user[f+1:]
90 return user
90 return user
91
91
92 def expandpath(self, loc):
92 def expandpath(self, loc):
93 paths = {}
93 paths = {}
94 for name, path in self.configitems("paths"):
94 for name, path in self.configitems("paths"):
95 paths[name] = path
95 paths[name] = path
96
96
97 return paths.get(loc, loc)
97 return paths.get(loc, loc)
98
98
99 def write(self, *args):
99 def write(self, *args):
100 for a in args:
100 for a in args:
101 sys.stdout.write(str(a))
101 sys.stdout.write(str(a))
102
102
103 def write_err(self, *args):
103 def write_err(self, *args):
104 sys.stdout.flush()
104 sys.stdout.flush()
105 for a in args:
105 for a in args:
106 sys.stderr.write(str(a))
106 sys.stderr.write(str(a))
107
107
108 def readline(self):
108 def readline(self):
109 return sys.stdin.readline()[:-1]
109 return sys.stdin.readline()[:-1]
110 def prompt(self, msg, pat, default="y"):
110 def prompt(self, msg, pat, default="y"):
111 if not self.interactive: return default
111 if not self.interactive: return default
112 while 1:
112 while 1:
113 self.write(msg, " ")
113 self.write(msg, " ")
114 r = self.readline()
114 r = self.readline()
115 if re.match(pat, r):
115 if re.match(pat, r):
116 return r
116 return r
117 else:
117 else:
118 self.write("unrecognized response\n")
118 self.write(_("unrecognized response\n"))
119 def status(self, *msg):
119 def status(self, *msg):
120 if not self.quiet: self.write(*msg)
120 if not self.quiet: self.write(*msg)
121 def warn(self, *msg):
121 def warn(self, *msg):
122 self.write_err(*msg)
122 self.write_err(*msg)
123 def note(self, *msg):
123 def note(self, *msg):
124 if self.verbose: self.write(*msg)
124 if self.verbose: self.write(*msg)
125 def debug(self, *msg):
125 def debug(self, *msg):
126 if self.debugflag: self.write(*msg)
126 if self.debugflag: self.write(*msg)
127 def edit(self, text):
127 def edit(self, text):
128 import tempfile
128 import tempfile
129 (fd, name) = tempfile.mkstemp("hg")
129 (fd, name) = tempfile.mkstemp("hg")
130 f = os.fdopen(fd, "w")
130 f = os.fdopen(fd, "w")
131 f.write(text)
131 f.write(text)
132 f.close()
132 f.close()
133
133
134 editor = (os.environ.get("HGEDITOR") or
134 editor = (os.environ.get("HGEDITOR") or
135 self.config("ui", "editor") or
135 self.config("ui", "editor") or
136 os.environ.get("EDITOR", "vi"))
136 os.environ.get("EDITOR", "vi"))
137
137
138 os.environ["HGUSER"] = self.username()
138 os.environ["HGUSER"] = self.username()
139 util.system("%s %s" % (editor, name), errprefix="edit failed")
139 util.system("%s %s" % (editor, name), errprefix=_("edit failed"))
140
140
141 t = open(name).read()
141 t = open(name).read()
142 t = re.sub("(?m)^HG:.*\n", "", t)
142 t = re.sub("(?m)^HG:.*\n", "", t)
143
143
144 os.unlink(name)
144 os.unlink(name)
145
145
146 return t
146 return t
@@ -1,576 +1,576 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8
8
9 This contains helper routines that are independent of the SCM core and hide
9 This contains helper routines that are independent of the SCM core and hide
10 platform-specific details from the core.
10 platform-specific details from the core.
11 """
11 """
12
12
13 import os, errno
13 import os, errno
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import *
15 from demandload import *
16 demandload(globals(), "re cStringIO shutil popen2 tempfile threading time")
16 demandload(globals(), "re cStringIO shutil popen2 tempfile threading time")
17
17
18 def pipefilter(s, cmd):
18 def pipefilter(s, cmd):
19 '''filter string S through command CMD, returning its output'''
19 '''filter string S through command CMD, returning its output'''
20 (pout, pin) = popen2.popen2(cmd, -1, 'b')
20 (pout, pin) = popen2.popen2(cmd, -1, 'b')
21 def writer():
21 def writer():
22 pin.write(s)
22 pin.write(s)
23 pin.close()
23 pin.close()
24
24
25 # we should use select instead on UNIX, but this will work on most
25 # we should use select instead on UNIX, but this will work on most
26 # systems, including Windows
26 # systems, including Windows
27 w = threading.Thread(target=writer)
27 w = threading.Thread(target=writer)
28 w.start()
28 w.start()
29 f = pout.read()
29 f = pout.read()
30 pout.close()
30 pout.close()
31 w.join()
31 w.join()
32 return f
32 return f
33
33
34 def tempfilter(s, cmd):
34 def tempfilter(s, cmd):
35 '''filter string S through a pair of temporary files with CMD.
35 '''filter string S through a pair of temporary files with CMD.
36 CMD is used as a template to create the real command to be run,
36 CMD is used as a template to create the real command to be run,
37 with the strings INFILE and OUTFILE replaced by the real names of
37 with the strings INFILE and OUTFILE replaced by the real names of
38 the temporary files generated.'''
38 the temporary files generated.'''
39 inname, outname = None, None
39 inname, outname = None, None
40 try:
40 try:
41 infd, inname = tempfile.mkstemp(prefix='hgfin')
41 infd, inname = tempfile.mkstemp(prefix='hgfin')
42 fp = os.fdopen(infd, 'wb')
42 fp = os.fdopen(infd, 'wb')
43 fp.write(s)
43 fp.write(s)
44 fp.close()
44 fp.close()
45 outfd, outname = tempfile.mkstemp(prefix='hgfout')
45 outfd, outname = tempfile.mkstemp(prefix='hgfout')
46 os.close(outfd)
46 os.close(outfd)
47 cmd = cmd.replace('INFILE', inname)
47 cmd = cmd.replace('INFILE', inname)
48 cmd = cmd.replace('OUTFILE', outname)
48 cmd = cmd.replace('OUTFILE', outname)
49 code = os.system(cmd)
49 code = os.system(cmd)
50 if code: raise Abort("command '%s' failed: %s" %
50 if code: raise Abort(_("command '%s' failed: %s") %
51 (cmd, explain_exit(code)))
51 (cmd, explain_exit(code)))
52 return open(outname, 'rb').read()
52 return open(outname, 'rb').read()
53 finally:
53 finally:
54 try:
54 try:
55 if inname: os.unlink(inname)
55 if inname: os.unlink(inname)
56 except: pass
56 except: pass
57 try:
57 try:
58 if outname: os.unlink(outname)
58 if outname: os.unlink(outname)
59 except: pass
59 except: pass
60
60
61 filtertable = {
61 filtertable = {
62 'tempfile:': tempfilter,
62 'tempfile:': tempfilter,
63 'pipe:': pipefilter,
63 'pipe:': pipefilter,
64 }
64 }
65
65
66 def filter(s, cmd):
66 def filter(s, cmd):
67 "filter a string through a command that transforms its input to its output"
67 "filter a string through a command that transforms its input to its output"
68 for name, fn in filtertable.iteritems():
68 for name, fn in filtertable.iteritems():
69 if cmd.startswith(name):
69 if cmd.startswith(name):
70 return fn(s, cmd[len(name):].lstrip())
70 return fn(s, cmd[len(name):].lstrip())
71 return pipefilter(s, cmd)
71 return pipefilter(s, cmd)
72
72
73 def patch(strip, patchname, ui):
73 def patch(strip, patchname, ui):
74 """apply the patch <patchname> to the working directory.
74 """apply the patch <patchname> to the working directory.
75 a list of patched files is returned"""
75 a list of patched files is returned"""
76 fp = os.popen('patch -p%d < "%s"' % (strip, patchname))
76 fp = os.popen('patch -p%d < "%s"' % (strip, patchname))
77 files = {}
77 files = {}
78 for line in fp:
78 for line in fp:
79 line = line.rstrip()
79 line = line.rstrip()
80 ui.status("%s\n" % line)
80 ui.status("%s\n" % line)
81 if line.startswith('patching file '):
81 if line.startswith('patching file '):
82 pf = parse_patch_output(line)
82 pf = parse_patch_output(line)
83 files.setdefault(pf, 1)
83 files.setdefault(pf, 1)
84 code = fp.close()
84 code = fp.close()
85 if code:
85 if code:
86 raise Abort("patch command failed: %s" % explain_exit(code)[0])
86 raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
87 return files.keys()
87 return files.keys()
88
88
89 def binary(s):
89 def binary(s):
90 """return true if a string is binary data using diff's heuristic"""
90 """return true if a string is binary data using diff's heuristic"""
91 if s and '\0' in s[:4096]:
91 if s and '\0' in s[:4096]:
92 return True
92 return True
93 return False
93 return False
94
94
95 def unique(g):
95 def unique(g):
96 """return the uniq elements of iterable g"""
96 """return the uniq elements of iterable g"""
97 seen = {}
97 seen = {}
98 for f in g:
98 for f in g:
99 if f not in seen:
99 if f not in seen:
100 seen[f] = 1
100 seen[f] = 1
101 yield f
101 yield f
102
102
103 class Abort(Exception):
103 class Abort(Exception):
104 """Raised if a command needs to print an error and exit."""
104 """Raised if a command needs to print an error and exit."""
105
105
106 def always(fn): return True
106 def always(fn): return True
107 def never(fn): return False
107 def never(fn): return False
108
108
109 def globre(pat, head='^', tail='$'):
109 def globre(pat, head='^', tail='$'):
110 "convert a glob pattern into a regexp"
110 "convert a glob pattern into a regexp"
111 i, n = 0, len(pat)
111 i, n = 0, len(pat)
112 res = ''
112 res = ''
113 group = False
113 group = False
114 def peek(): return i < n and pat[i]
114 def peek(): return i < n and pat[i]
115 while i < n:
115 while i < n:
116 c = pat[i]
116 c = pat[i]
117 i = i+1
117 i = i+1
118 if c == '*':
118 if c == '*':
119 if peek() == '*':
119 if peek() == '*':
120 i += 1
120 i += 1
121 res += '.*'
121 res += '.*'
122 else:
122 else:
123 res += '[^/]*'
123 res += '[^/]*'
124 elif c == '?':
124 elif c == '?':
125 res += '.'
125 res += '.'
126 elif c == '[':
126 elif c == '[':
127 j = i
127 j = i
128 if j < n and pat[j] in '!]':
128 if j < n and pat[j] in '!]':
129 j += 1
129 j += 1
130 while j < n and pat[j] != ']':
130 while j < n and pat[j] != ']':
131 j += 1
131 j += 1
132 if j >= n:
132 if j >= n:
133 res += '\\['
133 res += '\\['
134 else:
134 else:
135 stuff = pat[i:j].replace('\\','\\\\')
135 stuff = pat[i:j].replace('\\','\\\\')
136 i = j + 1
136 i = j + 1
137 if stuff[0] == '!':
137 if stuff[0] == '!':
138 stuff = '^' + stuff[1:]
138 stuff = '^' + stuff[1:]
139 elif stuff[0] == '^':
139 elif stuff[0] == '^':
140 stuff = '\\' + stuff
140 stuff = '\\' + stuff
141 res = '%s[%s]' % (res, stuff)
141 res = '%s[%s]' % (res, stuff)
142 elif c == '{':
142 elif c == '{':
143 group = True
143 group = True
144 res += '(?:'
144 res += '(?:'
145 elif c == '}' and group:
145 elif c == '}' and group:
146 res += ')'
146 res += ')'
147 group = False
147 group = False
148 elif c == ',' and group:
148 elif c == ',' and group:
149 res += '|'
149 res += '|'
150 else:
150 else:
151 res += re.escape(c)
151 res += re.escape(c)
152 return head + res + tail
152 return head + res + tail
153
153
154 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
154 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
155
155
156 def pathto(n1, n2):
156 def pathto(n1, n2):
157 '''return the relative path from one place to another.
157 '''return the relative path from one place to another.
158 this returns a path in the form used by the local filesystem, not hg.'''
158 this returns a path in the form used by the local filesystem, not hg.'''
159 if not n1: return localpath(n2)
159 if not n1: return localpath(n2)
160 a, b = n1.split('/'), n2.split('/')
160 a, b = n1.split('/'), n2.split('/')
161 a.reverse(), b.reverse()
161 a.reverse(), b.reverse()
162 while a and b and a[-1] == b[-1]:
162 while a and b and a[-1] == b[-1]:
163 a.pop(), b.pop()
163 a.pop(), b.pop()
164 b.reverse()
164 b.reverse()
165 return os.sep.join((['..'] * len(a)) + b)
165 return os.sep.join((['..'] * len(a)) + b)
166
166
167 def canonpath(root, cwd, myname):
167 def canonpath(root, cwd, myname):
168 """return the canonical path of myname, given cwd and root"""
168 """return the canonical path of myname, given cwd and root"""
169 rootsep = root + os.sep
169 rootsep = root + os.sep
170 name = myname
170 name = myname
171 if not name.startswith(os.sep):
171 if not name.startswith(os.sep):
172 name = os.path.join(root, cwd, name)
172 name = os.path.join(root, cwd, name)
173 name = os.path.normpath(name)
173 name = os.path.normpath(name)
174 if name.startswith(rootsep):
174 if name.startswith(rootsep):
175 return pconvert(name[len(rootsep):])
175 return pconvert(name[len(rootsep):])
176 elif name == root:
176 elif name == root:
177 return ''
177 return ''
178 else:
178 else:
179 raise Abort('%s not under root' % myname)
179 raise Abort('%s not under root' % myname)
180
180
181 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head=''):
181 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head=''):
182 """build a function to match a set of file patterns
182 """build a function to match a set of file patterns
183
183
184 arguments:
184 arguments:
185 canonroot - the canonical root of the tree you're matching against
185 canonroot - the canonical root of the tree you're matching against
186 cwd - the current working directory, if relevant
186 cwd - the current working directory, if relevant
187 names - patterns to find
187 names - patterns to find
188 inc - patterns to include
188 inc - patterns to include
189 exc - patterns to exclude
189 exc - patterns to exclude
190 head - a regex to prepend to patterns to control whether a match is rooted
190 head - a regex to prepend to patterns to control whether a match is rooted
191
191
192 a pattern is one of:
192 a pattern is one of:
193 'glob:<rooted glob>'
193 'glob:<rooted glob>'
194 're:<rooted regexp>'
194 're:<rooted regexp>'
195 'path:<rooted path>'
195 'path:<rooted path>'
196 'relglob:<relative glob>'
196 'relglob:<relative glob>'
197 'relpath:<relative path>'
197 'relpath:<relative path>'
198 'relre:<relative regexp>'
198 'relre:<relative regexp>'
199 '<rooted path or regexp>'
199 '<rooted path or regexp>'
200
200
201 returns:
201 returns:
202 a 3-tuple containing
202 a 3-tuple containing
203 - list of explicit non-pattern names passed in
203 - list of explicit non-pattern names passed in
204 - a bool match(filename) function
204 - a bool match(filename) function
205 - a bool indicating if any patterns were passed in
205 - a bool indicating if any patterns were passed in
206
206
207 todo:
207 todo:
208 make head regex a rooted bool
208 make head regex a rooted bool
209 """
209 """
210
210
211 def patkind(name):
211 def patkind(name):
212 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
212 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
213 if name.startswith(prefix + ':'): return name.split(':', 1)
213 if name.startswith(prefix + ':'): return name.split(':', 1)
214 for c in name:
214 for c in name:
215 if c in _globchars: return 'glob', name
215 if c in _globchars: return 'glob', name
216 return 'relpath', name
216 return 'relpath', name
217
217
218 def regex(kind, name, tail):
218 def regex(kind, name, tail):
219 '''convert a pattern into a regular expression'''
219 '''convert a pattern into a regular expression'''
220 if kind == 're':
220 if kind == 're':
221 return name
221 return name
222 elif kind == 'path':
222 elif kind == 'path':
223 return '^' + re.escape(name) + '(?:/|$)'
223 return '^' + re.escape(name) + '(?:/|$)'
224 elif kind == 'relglob':
224 elif kind == 'relglob':
225 return head + globre(name, '(?:|.*/)', tail)
225 return head + globre(name, '(?:|.*/)', tail)
226 elif kind == 'relpath':
226 elif kind == 'relpath':
227 return head + re.escape(name) + tail
227 return head + re.escape(name) + tail
228 elif kind == 'relre':
228 elif kind == 'relre':
229 if name.startswith('^'):
229 if name.startswith('^'):
230 return name
230 return name
231 return '.*' + name
231 return '.*' + name
232 return head + globre(name, '', tail)
232 return head + globre(name, '', tail)
233
233
234 def matchfn(pats, tail):
234 def matchfn(pats, tail):
235 """build a matching function from a set of patterns"""
235 """build a matching function from a set of patterns"""
236 if pats:
236 if pats:
237 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
237 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
238 return re.compile(pat).match
238 return re.compile(pat).match
239
239
240 def globprefix(pat):
240 def globprefix(pat):
241 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
241 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
242 root = []
242 root = []
243 for p in pat.split(os.sep):
243 for p in pat.split(os.sep):
244 if patkind(p)[0] == 'glob': break
244 if patkind(p)[0] == 'glob': break
245 root.append(p)
245 root.append(p)
246 return '/'.join(root)
246 return '/'.join(root)
247
247
248 pats = []
248 pats = []
249 files = []
249 files = []
250 roots = []
250 roots = []
251 for kind, name in map(patkind, names):
251 for kind, name in map(patkind, names):
252 if kind in ('glob', 'relpath'):
252 if kind in ('glob', 'relpath'):
253 name = canonpath(canonroot, cwd, name)
253 name = canonpath(canonroot, cwd, name)
254 if name == '':
254 if name == '':
255 kind, name = 'glob', '**'
255 kind, name = 'glob', '**'
256 if kind in ('glob', 'path', 're'):
256 if kind in ('glob', 'path', 're'):
257 pats.append((kind, name))
257 pats.append((kind, name))
258 if kind == 'glob':
258 if kind == 'glob':
259 root = globprefix(name)
259 root = globprefix(name)
260 if root: roots.append(root)
260 if root: roots.append(root)
261 elif kind == 'relpath':
261 elif kind == 'relpath':
262 files.append((kind, name))
262 files.append((kind, name))
263 roots.append(name)
263 roots.append(name)
264
264
265 patmatch = matchfn(pats, '$') or always
265 patmatch = matchfn(pats, '$') or always
266 filematch = matchfn(files, '(?:/|$)') or always
266 filematch = matchfn(files, '(?:/|$)') or always
267 incmatch = always
267 incmatch = always
268 if inc:
268 if inc:
269 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
269 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
270 excmatch = lambda fn: False
270 excmatch = lambda fn: False
271 if exc:
271 if exc:
272 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
272 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
273
273
274 return (roots,
274 return (roots,
275 lambda fn: (incmatch(fn) and not excmatch(fn) and
275 lambda fn: (incmatch(fn) and not excmatch(fn) and
276 (fn.endswith('/') or
276 (fn.endswith('/') or
277 (not pats and not files) or
277 (not pats and not files) or
278 (pats and patmatch(fn)) or
278 (pats and patmatch(fn)) or
279 (files and filematch(fn)))),
279 (files and filematch(fn)))),
280 (inc or exc or (pats and pats != [('glob', '**')])) and True)
280 (inc or exc or (pats and pats != [('glob', '**')])) and True)
281
281
282 def system(cmd, errprefix=None):
282 def system(cmd, errprefix=None):
283 """execute a shell command that must succeed"""
283 """execute a shell command that must succeed"""
284 rc = os.system(cmd)
284 rc = os.system(cmd)
285 if rc:
285 if rc:
286 errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
286 errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
287 explain_exit(rc)[0])
287 explain_exit(rc)[0])
288 if errprefix:
288 if errprefix:
289 errmsg = "%s: %s" % (errprefix, errmsg)
289 errmsg = "%s: %s" % (errprefix, errmsg)
290 raise Abort(errmsg)
290 raise Abort(errmsg)
291
291
292 def rename(src, dst):
292 def rename(src, dst):
293 """forcibly rename a file"""
293 """forcibly rename a file"""
294 try:
294 try:
295 os.rename(src, dst)
295 os.rename(src, dst)
296 except:
296 except:
297 os.unlink(dst)
297 os.unlink(dst)
298 os.rename(src, dst)
298 os.rename(src, dst)
299
299
300 def copyfiles(src, dst, hardlink=None):
300 def copyfiles(src, dst, hardlink=None):
301 """Copy a directory tree using hardlinks if possible"""
301 """Copy a directory tree using hardlinks if possible"""
302
302
303 if hardlink is None:
303 if hardlink is None:
304 hardlink = (os.stat(src).st_dev ==
304 hardlink = (os.stat(src).st_dev ==
305 os.stat(os.path.dirname(dst)).st_dev)
305 os.stat(os.path.dirname(dst)).st_dev)
306
306
307 if os.path.isdir(src):
307 if os.path.isdir(src):
308 os.mkdir(dst)
308 os.mkdir(dst)
309 for name in os.listdir(src):
309 for name in os.listdir(src):
310 srcname = os.path.join(src, name)
310 srcname = os.path.join(src, name)
311 dstname = os.path.join(dst, name)
311 dstname = os.path.join(dst, name)
312 copyfiles(srcname, dstname, hardlink)
312 copyfiles(srcname, dstname, hardlink)
313 else:
313 else:
314 if hardlink:
314 if hardlink:
315 try:
315 try:
316 os_link(src, dst)
316 os_link(src, dst)
317 except:
317 except:
318 hardlink = False
318 hardlink = False
319 shutil.copy2(src, dst)
319 shutil.copy2(src, dst)
320 else:
320 else:
321 shutil.copy2(src, dst)
321 shutil.copy2(src, dst)
322
322
323 def opener(base):
323 def opener(base):
324 """
324 """
325 return a function that opens files relative to base
325 return a function that opens files relative to base
326
326
327 this function is used to hide the details of COW semantics and
327 this function is used to hide the details of COW semantics and
328 remote file access from higher level code.
328 remote file access from higher level code.
329 """
329 """
330 p = base
330 p = base
331 def o(path, mode="r", text=False):
331 def o(path, mode="r", text=False):
332 f = os.path.join(p, path)
332 f = os.path.join(p, path)
333
333
334 if not text:
334 if not text:
335 mode += "b" # for that other OS
335 mode += "b" # for that other OS
336
336
337 if mode[0] != "r":
337 if mode[0] != "r":
338 try:
338 try:
339 nlink = nlinks(f)
339 nlink = nlinks(f)
340 except OSError:
340 except OSError:
341 d = os.path.dirname(f)
341 d = os.path.dirname(f)
342 if not os.path.isdir(d):
342 if not os.path.isdir(d):
343 os.makedirs(d)
343 os.makedirs(d)
344 else:
344 else:
345 if nlink > 1:
345 if nlink > 1:
346 file(f + ".tmp", "wb").write(file(f, "rb").read())
346 file(f + ".tmp", "wb").write(file(f, "rb").read())
347 rename(f+".tmp", f)
347 rename(f+".tmp", f)
348
348
349 return file(f, mode)
349 return file(f, mode)
350
350
351 return o
351 return o
352
352
353 def _makelock_file(info, pathname):
353 def _makelock_file(info, pathname):
354 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
354 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
355 os.write(ld, info)
355 os.write(ld, info)
356 os.close(ld)
356 os.close(ld)
357
357
358 def _readlock_file(pathname):
358 def _readlock_file(pathname):
359 return file(pathname).read()
359 return file(pathname).read()
360
360
361 def nlinks(pathname):
361 def nlinks(pathname):
362 """Return number of hardlinks for the given file."""
362 """Return number of hardlinks for the given file."""
363 return os.stat(pathname).st_nlink
363 return os.stat(pathname).st_nlink
364
364
365 if hasattr(os, 'link'):
365 if hasattr(os, 'link'):
366 os_link = os.link
366 os_link = os.link
367 else:
367 else:
368 def os_link(src, dst):
368 def os_link(src, dst):
369 raise OSError(0, "Hardlinks not supported")
369 raise OSError(0, _("Hardlinks not supported"))
370
370
371 # Platform specific variants
371 # Platform specific variants
372 if os.name == 'nt':
372 if os.name == 'nt':
373 nulldev = 'NUL:'
373 nulldev = 'NUL:'
374
374
375 try:
375 try:
376 import win32api, win32process
376 import win32api, win32process
377 filename = win32process.GetModuleFileNameEx(win32api.GetCurrentProcess(), 0)
377 filename = win32process.GetModuleFileNameEx(win32api.GetCurrentProcess(), 0)
378 systemrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
378 systemrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
379
379
380 except ImportError:
380 except ImportError:
381 systemrc = r'c:\mercurial\mercurial.ini'
381 systemrc = r'c:\mercurial\mercurial.ini'
382 pass
382 pass
383
383
384 rcpath = (systemrc,
384 rcpath = (systemrc,
385 os.path.join(os.path.expanduser('~'), 'mercurial.ini'))
385 os.path.join(os.path.expanduser('~'), 'mercurial.ini'))
386
386
387 def parse_patch_output(output_line):
387 def parse_patch_output(output_line):
388 """parses the output produced by patch and returns the file name"""
388 """parses the output produced by patch and returns the file name"""
389 pf = output_line[14:]
389 pf = output_line[14:]
390 if pf[0] == '`':
390 if pf[0] == '`':
391 pf = pf[1:-1] # Remove the quotes
391 pf = pf[1:-1] # Remove the quotes
392 return pf
392 return pf
393
393
394 try: # ActivePython can create hard links using win32file module
394 try: # ActivePython can create hard links using win32file module
395 import win32file
395 import win32file
396
396
397 def os_link(src, dst): # NB will only succeed on NTFS
397 def os_link(src, dst): # NB will only succeed on NTFS
398 win32file.CreateHardLink(dst, src)
398 win32file.CreateHardLink(dst, src)
399
399
400 def nlinks(pathname):
400 def nlinks(pathname):
401 """Return number of hardlinks for the given file."""
401 """Return number of hardlinks for the given file."""
402 try:
402 try:
403 fh = win32file.CreateFile(pathname,
403 fh = win32file.CreateFile(pathname,
404 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
404 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
405 None, win32file.OPEN_EXISTING, 0, None)
405 None, win32file.OPEN_EXISTING, 0, None)
406 res = win32file.GetFileInformationByHandle(fh)
406 res = win32file.GetFileInformationByHandle(fh)
407 fh.Close()
407 fh.Close()
408 return res[7]
408 return res[7]
409 except:
409 except:
410 return os.stat(pathname).st_nlink
410 return os.stat(pathname).st_nlink
411
411
412 except ImportError:
412 except ImportError:
413 pass
413 pass
414
414
415 def is_exec(f, last):
415 def is_exec(f, last):
416 return last
416 return last
417
417
418 def set_exec(f, mode):
418 def set_exec(f, mode):
419 pass
419 pass
420
420
421 def pconvert(path):
421 def pconvert(path):
422 return path.replace("\\", "/")
422 return path.replace("\\", "/")
423
423
424 def localpath(path):
424 def localpath(path):
425 return path.replace('/', '\\')
425 return path.replace('/', '\\')
426
426
427 def normpath(path):
427 def normpath(path):
428 return pconvert(os.path.normpath(path))
428 return pconvert(os.path.normpath(path))
429
429
430 makelock = _makelock_file
430 makelock = _makelock_file
431 readlock = _readlock_file
431 readlock = _readlock_file
432
432
433 def explain_exit(code):
433 def explain_exit(code):
434 return "exited with status %d" % code, code
434 return _("exited with status %d") % code, code
435
435
436 else:
436 else:
437 nulldev = '/dev/null'
437 nulldev = '/dev/null'
438
438
439 hgrcd = '/etc/mercurial/hgrc.d'
439 hgrcd = '/etc/mercurial/hgrc.d'
440 hgrcs = []
440 hgrcs = []
441 if os.path.isdir(hgrcd):
441 if os.path.isdir(hgrcd):
442 hgrcs = [f for f in os.listdir(hgrcd) if f.endswith(".rc")]
442 hgrcs = [f for f in os.listdir(hgrcd) if f.endswith(".rc")]
443 rcpath = map(os.path.normpath, hgrcs +
443 rcpath = map(os.path.normpath, hgrcs +
444 ['/etc/mercurial/hgrc', os.path.expanduser('~/.hgrc')])
444 ['/etc/mercurial/hgrc', os.path.expanduser('~/.hgrc')])
445
445
446 def parse_patch_output(output_line):
446 def parse_patch_output(output_line):
447 """parses the output produced by patch and returns the file name"""
447 """parses the output produced by patch and returns the file name"""
448 return output_line[14:]
448 return output_line[14:]
449
449
450 def is_exec(f, last):
450 def is_exec(f, last):
451 """check whether a file is executable"""
451 """check whether a file is executable"""
452 return (os.stat(f).st_mode & 0100 != 0)
452 return (os.stat(f).st_mode & 0100 != 0)
453
453
454 def set_exec(f, mode):
454 def set_exec(f, mode):
455 s = os.stat(f).st_mode
455 s = os.stat(f).st_mode
456 if (s & 0100 != 0) == mode:
456 if (s & 0100 != 0) == mode:
457 return
457 return
458 if mode:
458 if mode:
459 # Turn on +x for every +r bit when making a file executable
459 # Turn on +x for every +r bit when making a file executable
460 # and obey umask.
460 # and obey umask.
461 umask = os.umask(0)
461 umask = os.umask(0)
462 os.umask(umask)
462 os.umask(umask)
463 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
463 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
464 else:
464 else:
465 os.chmod(f, s & 0666)
465 os.chmod(f, s & 0666)
466
466
467 def pconvert(path):
467 def pconvert(path):
468 return path
468 return path
469
469
470 def localpath(path):
470 def localpath(path):
471 return path
471 return path
472
472
473 normpath = os.path.normpath
473 normpath = os.path.normpath
474
474
475 def makelock(info, pathname):
475 def makelock(info, pathname):
476 try:
476 try:
477 os.symlink(info, pathname)
477 os.symlink(info, pathname)
478 except OSError, why:
478 except OSError, why:
479 if why.errno == errno.EEXIST:
479 if why.errno == errno.EEXIST:
480 raise
480 raise
481 else:
481 else:
482 _makelock_file(info, pathname)
482 _makelock_file(info, pathname)
483
483
484 def readlock(pathname):
484 def readlock(pathname):
485 try:
485 try:
486 return os.readlink(pathname)
486 return os.readlink(pathname)
487 except OSError, why:
487 except OSError, why:
488 if why.errno == errno.EINVAL:
488 if why.errno == errno.EINVAL:
489 return _readlock_file(pathname)
489 return _readlock_file(pathname)
490 else:
490 else:
491 raise
491 raise
492
492
493 def explain_exit(code):
493 def explain_exit(code):
494 """return a 2-tuple (desc, code) describing a process's status"""
494 """return a 2-tuple (desc, code) describing a process's status"""
495 if os.WIFEXITED(code):
495 if os.WIFEXITED(code):
496 val = os.WEXITSTATUS(code)
496 val = os.WEXITSTATUS(code)
497 return "exited with status %d" % val, val
497 return _("exited with status %d") % val, val
498 elif os.WIFSIGNALED(code):
498 elif os.WIFSIGNALED(code):
499 val = os.WTERMSIG(code)
499 val = os.WTERMSIG(code)
500 return "killed by signal %d" % val, val
500 return _("killed by signal %d") % val, val
501 elif os.WIFSTOPPED(code):
501 elif os.WIFSTOPPED(code):
502 val = os.WSTOPSIG(code)
502 val = os.WSTOPSIG(code)
503 return "stopped by signal %d" % val, val
503 return _("stopped by signal %d") % val, val
504 raise ValueError("invalid exit code")
504 raise ValueError(_("invalid exit code"))
505
505
506 class chunkbuffer(object):
506 class chunkbuffer(object):
507 """Allow arbitrary sized chunks of data to be efficiently read from an
507 """Allow arbitrary sized chunks of data to be efficiently read from an
508 iterator over chunks of arbitrary size."""
508 iterator over chunks of arbitrary size."""
509
509
510 def __init__(self, in_iter, targetsize = 2**16):
510 def __init__(self, in_iter, targetsize = 2**16):
511 """in_iter is the iterator that's iterating over the input chunks.
511 """in_iter is the iterator that's iterating over the input chunks.
512 targetsize is how big a buffer to try to maintain."""
512 targetsize is how big a buffer to try to maintain."""
513 self.in_iter = iter(in_iter)
513 self.in_iter = iter(in_iter)
514 self.buf = ''
514 self.buf = ''
515 self.targetsize = int(targetsize)
515 self.targetsize = int(targetsize)
516 if self.targetsize <= 0:
516 if self.targetsize <= 0:
517 raise ValueError("targetsize must be greater than 0, was %d" %
517 raise ValueError(_("targetsize must be greater than 0, was %d") %
518 targetsize)
518 targetsize)
519 self.iterempty = False
519 self.iterempty = False
520
520
521 def fillbuf(self):
521 def fillbuf(self):
522 """Ignore target size; read every chunk from iterator until empty."""
522 """Ignore target size; read every chunk from iterator until empty."""
523 if not self.iterempty:
523 if not self.iterempty:
524 collector = cStringIO.StringIO()
524 collector = cStringIO.StringIO()
525 collector.write(self.buf)
525 collector.write(self.buf)
526 for ch in self.in_iter:
526 for ch in self.in_iter:
527 collector.write(ch)
527 collector.write(ch)
528 self.buf = collector.getvalue()
528 self.buf = collector.getvalue()
529 self.iterempty = True
529 self.iterempty = True
530
530
531 def read(self, l):
531 def read(self, l):
532 """Read L bytes of data from the iterator of chunks of data.
532 """Read L bytes of data from the iterator of chunks of data.
533 Returns less than L bytes if the iterator runs dry."""
533 Returns less than L bytes if the iterator runs dry."""
534 if l > len(self.buf) and not self.iterempty:
534 if l > len(self.buf) and not self.iterempty:
535 # Clamp to a multiple of self.targetsize
535 # Clamp to a multiple of self.targetsize
536 targetsize = self.targetsize * ((l // self.targetsize) + 1)
536 targetsize = self.targetsize * ((l // self.targetsize) + 1)
537 collector = cStringIO.StringIO()
537 collector = cStringIO.StringIO()
538 collector.write(self.buf)
538 collector.write(self.buf)
539 collected = len(self.buf)
539 collected = len(self.buf)
540 for chunk in self.in_iter:
540 for chunk in self.in_iter:
541 collector.write(chunk)
541 collector.write(chunk)
542 collected += len(chunk)
542 collected += len(chunk)
543 if collected >= targetsize:
543 if collected >= targetsize:
544 break
544 break
545 if collected < targetsize:
545 if collected < targetsize:
546 self.iterempty = True
546 self.iterempty = True
547 self.buf = collector.getvalue()
547 self.buf = collector.getvalue()
548 s, self.buf = self.buf[:l], buffer(self.buf, l)
548 s, self.buf = self.buf[:l], buffer(self.buf, l)
549 return s
549 return s
550
550
551 def filechunkiter(f, size = 65536):
551 def filechunkiter(f, size = 65536):
552 """Create a generator that produces all the data in the file size
552 """Create a generator that produces all the data in the file size
553 (default 65536) bytes at a time. Chunks may be less than size
553 (default 65536) bytes at a time. Chunks may be less than size
554 bytes if the chunk is the last chunk in the file, or the file is a
554 bytes if the chunk is the last chunk in the file, or the file is a
555 socket or some other type of file that sometimes reads less data
555 socket or some other type of file that sometimes reads less data
556 than is requested."""
556 than is requested."""
557 s = f.read(size)
557 s = f.read(size)
558 while len(s) > 0:
558 while len(s) > 0:
559 yield s
559 yield s
560 s = f.read(size)
560 s = f.read(size)
561
561
562 def makedate():
562 def makedate():
563 t = time.time()
563 t = time.time()
564 if time.daylight: tz = time.altzone
564 if time.daylight: tz = time.altzone
565 else: tz = time.timezone
565 else: tz = time.timezone
566 return t, tz
566 return t, tz
567
567
568 def datestr(date=None, format='%c'):
568 def datestr(date=None, format='%c'):
569 """represent a (unixtime, offset) tuple as a localized time.
569 """represent a (unixtime, offset) tuple as a localized time.
570 unixtime is seconds since the epoch, and offset is the time zone's
570 unixtime is seconds since the epoch, and offset is the time zone's
571 number of seconds away from UTC."""
571 number of seconds away from UTC."""
572 t, tz = date or makedate()
572 t, tz = date or makedate()
573 return ("%s %+03d%02d" %
573 return ("%s %+03d%02d" %
574 (time.strftime(format, time.gmtime(float(t) - tz)),
574 (time.strftime(format, time.gmtime(float(t) - tz)),
575 -tz / 3600,
575 -tz / 3600,
576 ((-tz % 3600) / 60)))
576 ((-tz % 3600) / 60)))
General Comments 0
You need to be logged in to leave comments. Login now