##// END OF EJS Templates
Provide better context for custom Python encode/decode filters....
Jesse Glick -
r5967:f8ad3b76 default
parent child Browse files
Show More
@@ -1,108 +1,107 b''
1 # win32text.py - LF <-> CRLF translation utilities for Windows users
1 # win32text.py - LF <-> CRLF translation utilities for Windows users
2 #
2 #
3 # This software may be used and distributed according to the terms
3 # This software may be used and distributed according to the terms
4 # of the GNU General Public License, incorporated herein by reference.
4 # of the GNU General Public License, incorporated herein by reference.
5 #
5 #
6 # To perform automatic newline conversion, use:
6 # To perform automatic newline conversion, use:
7 #
7 #
8 # [extensions]
8 # [extensions]
9 # hgext.win32text =
9 # hgext.win32text =
10 # [encode]
10 # [encode]
11 # ** = cleverencode:
11 # ** = cleverencode:
12 # [decode]
12 # [decode]
13 # ** = cleverdecode:
13 # ** = cleverdecode:
14 #
14 #
15 # If not doing conversion, to make sure you do not commit CRLF by accident:
15 # If not doing conversion, to make sure you do not commit CRLF by accident:
16 #
16 #
17 # [hooks]
17 # [hooks]
18 # pretxncommit.crlf = python:hgext.win32text.forbidcrlf
18 # pretxncommit.crlf = python:hgext.win32text.forbidcrlf
19 #
19 #
20 # To do the same check on a server to prevent CRLF from being pushed or pulled:
20 # To do the same check on a server to prevent CRLF from being pushed or pulled:
21 #
21 #
22 # [hooks]
22 # [hooks]
23 # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
23 # pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
24
24
25 from mercurial import util, ui
25 from mercurial import util, ui
26 from mercurial.i18n import gettext as _
26 from mercurial.i18n import gettext as _
27 from mercurial.node import *
27 from mercurial.node import *
28 import re
28 import re
29
29
30 # regexp for single LF without CR preceding.
30 # regexp for single LF without CR preceding.
31 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
31 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
32
32
33 def dumbdecode(s, cmd):
33 def dumbdecode(s, cmd, ui=None, repo=None, filename=None, **kwargs):
34 # warn if already has CRLF in repository.
34 # warn if already has CRLF in repository.
35 # it might cause unexpected eol conversion.
35 # it might cause unexpected eol conversion.
36 # see issue 302:
36 # see issue 302:
37 # http://www.selenic.com/mercurial/bts/issue302
37 # http://www.selenic.com/mercurial/bts/issue302
38 if '\r\n' in s:
38 if '\r\n' in s and ui and filename and repo:
39 u = ui.ui()
39 ui.warn(_('WARNING: %s already has CRLF line endings\n'
40 u.warn(_('WARNING: file in repository already has CRLF line ending \n'
40 'and does not need EOL conversion by the win32text plugin.\n'
41 ' which does not need eol conversion by win32text plugin.\n'
41 'Before your next commit, please reconsider your '
42 ' Please reconsider encode/decode setting in'
42 'encode/decode settings in \nMercurial.ini or %s.\n') %
43 ' mercurial.ini or .hg/hgrc\n'
43 (filename, repo.join('hgrc')))
44 ' before next commit.\n'))
45 # replace single LF to CRLF
44 # replace single LF to CRLF
46 return re_single_lf.sub('\\1\r\n', s)
45 return re_single_lf.sub('\\1\r\n', s)
47
46
48 def dumbencode(s, cmd):
47 def dumbencode(s, cmd):
49 return s.replace('\r\n', '\n')
48 return s.replace('\r\n', '\n')
50
49
51 def clevertest(s, cmd):
50 def clevertest(s, cmd):
52 if '\0' in s: return False
51 if '\0' in s: return False
53 return True
52 return True
54
53
55 def cleverdecode(s, cmd):
54 def cleverdecode(s, cmd, **kwargs):
56 if clevertest(s, cmd):
55 if clevertest(s, cmd):
57 return dumbdecode(s, cmd)
56 return dumbdecode(s, cmd, **kwargs)
58 return s
57 return s
59
58
60 def cleverencode(s, cmd):
59 def cleverencode(s, cmd):
61 if clevertest(s, cmd):
60 if clevertest(s, cmd):
62 return dumbencode(s, cmd)
61 return dumbencode(s, cmd)
63 return s
62 return s
64
63
65 _filters = {
64 _filters = {
66 'dumbdecode:': dumbdecode,
65 'dumbdecode:': dumbdecode,
67 'dumbencode:': dumbencode,
66 'dumbencode:': dumbencode,
68 'cleverdecode:': cleverdecode,
67 'cleverdecode:': cleverdecode,
69 'cleverencode:': cleverencode,
68 'cleverencode:': cleverencode,
70 }
69 }
71
70
72 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
71 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
73 halt = False
72 halt = False
74 for rev in xrange(repo.changelog.rev(bin(node)), repo.changelog.count()):
73 for rev in xrange(repo.changelog.rev(bin(node)), repo.changelog.count()):
75 c = repo.changectx(rev)
74 c = repo.changectx(rev)
76 for f in c.files():
75 for f in c.files():
77 if f not in c:
76 if f not in c:
78 continue
77 continue
79 data = c[f].data()
78 data = c[f].data()
80 if '\0' not in data and '\r\n' in data:
79 if '\0' not in data and '\r\n' in data:
81 if not halt:
80 if not halt:
82 ui.warn(_('Attempt to commit or push text file(s) '
81 ui.warn(_('Attempt to commit or push text file(s) '
83 'using CRLF line endings\n'))
82 'using CRLF line endings\n'))
84 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
83 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
85 halt = True
84 halt = True
86 if halt and hooktype == 'pretxnchangegroup':
85 if halt and hooktype == 'pretxnchangegroup':
87 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
86 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
88 'add to Mercurial.ini or .hg/hgrc:\n'
87 'add to Mercurial.ini or .hg/hgrc:\n'
89 '\n'
88 '\n'
90 '[hooks]\n'
89 '[hooks]\n'
91 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf\n'
90 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf\n'
92 '\n'
91 '\n'
93 'and also consider adding:\n'
92 'and also consider adding:\n'
94 '\n'
93 '\n'
95 '[extensions]\n'
94 '[extensions]\n'
96 'hgext.win32text =\n'
95 'hgext.win32text =\n'
97 '[encode]\n'
96 '[encode]\n'
98 '** = cleverencode:\n'
97 '** = cleverencode:\n'
99 '[decode]\n'
98 '[decode]\n'
100 '** = cleverdecode:\n'))
99 '** = cleverdecode:\n'))
101 return halt
100 return halt
102
101
103 def reposetup(ui, repo):
102 def reposetup(ui, repo):
104 if not repo.local():
103 if not repo.local():
105 return
104 return
106 for name, fn in _filters.iteritems():
105 for name, fn in _filters.iteritems():
107 repo.adddatafilter(name, fn)
106 repo.adddatafilter(name, fn)
108
107
@@ -1,2072 +1,2076 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import re, lock, transaction, tempfile, stat, errno, ui
12 import re, lock, transaction, tempfile, stat, errno, ui
13 import os, revlog, time, util, extensions, hook
13 import os, revlog, time, util, extensions, hook, inspect
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = util.set(('lookup', 'changegroupsubset'))
16 capabilities = util.set(('lookup', 'changegroupsubset'))
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __init__(self, parentui, path=None, create=0):
19 def __init__(self, parentui, path=None, create=0):
20 repo.repository.__init__(self)
20 repo.repository.__init__(self)
21 self.root = os.path.realpath(path)
21 self.root = os.path.realpath(path)
22 self.path = os.path.join(self.root, ".hg")
22 self.path = os.path.join(self.root, ".hg")
23 self.origroot = path
23 self.origroot = path
24 self.opener = util.opener(self.path)
24 self.opener = util.opener(self.path)
25 self.wopener = util.opener(self.root)
25 self.wopener = util.opener(self.root)
26
26
27 if not os.path.isdir(self.path):
27 if not os.path.isdir(self.path):
28 if create:
28 if create:
29 if not os.path.exists(path):
29 if not os.path.exists(path):
30 os.mkdir(path)
30 os.mkdir(path)
31 os.mkdir(self.path)
31 os.mkdir(self.path)
32 requirements = ["revlogv1"]
32 requirements = ["revlogv1"]
33 if parentui.configbool('format', 'usestore', True):
33 if parentui.configbool('format', 'usestore', True):
34 os.mkdir(os.path.join(self.path, "store"))
34 os.mkdir(os.path.join(self.path, "store"))
35 requirements.append("store")
35 requirements.append("store")
36 # create an invalid changelog
36 # create an invalid changelog
37 self.opener("00changelog.i", "a").write(
37 self.opener("00changelog.i", "a").write(
38 '\0\0\0\2' # represents revlogv2
38 '\0\0\0\2' # represents revlogv2
39 ' dummy changelog to prevent using the old repo layout'
39 ' dummy changelog to prevent using the old repo layout'
40 )
40 )
41 reqfile = self.opener("requires", "w")
41 reqfile = self.opener("requires", "w")
42 for r in requirements:
42 for r in requirements:
43 reqfile.write("%s\n" % r)
43 reqfile.write("%s\n" % r)
44 reqfile.close()
44 reqfile.close()
45 else:
45 else:
46 raise repo.RepoError(_("repository %s not found") % path)
46 raise repo.RepoError(_("repository %s not found") % path)
47 elif create:
47 elif create:
48 raise repo.RepoError(_("repository %s already exists") % path)
48 raise repo.RepoError(_("repository %s already exists") % path)
49 else:
49 else:
50 # find requirements
50 # find requirements
51 try:
51 try:
52 requirements = self.opener("requires").read().splitlines()
52 requirements = self.opener("requires").read().splitlines()
53 except IOError, inst:
53 except IOError, inst:
54 if inst.errno != errno.ENOENT:
54 if inst.errno != errno.ENOENT:
55 raise
55 raise
56 requirements = []
56 requirements = []
57 # check them
57 # check them
58 for r in requirements:
58 for r in requirements:
59 if r not in self.supported:
59 if r not in self.supported:
60 raise repo.RepoError(_("requirement '%s' not supported") % r)
60 raise repo.RepoError(_("requirement '%s' not supported") % r)
61
61
62 # setup store
62 # setup store
63 if "store" in requirements:
63 if "store" in requirements:
64 self.encodefn = util.encodefilename
64 self.encodefn = util.encodefilename
65 self.decodefn = util.decodefilename
65 self.decodefn = util.decodefilename
66 self.spath = os.path.join(self.path, "store")
66 self.spath = os.path.join(self.path, "store")
67 else:
67 else:
68 self.encodefn = lambda x: x
68 self.encodefn = lambda x: x
69 self.decodefn = lambda x: x
69 self.decodefn = lambda x: x
70 self.spath = self.path
70 self.spath = self.path
71 self.sopener = util.encodedopener(util.opener(self.spath),
71 self.sopener = util.encodedopener(util.opener(self.spath),
72 self.encodefn)
72 self.encodefn)
73
73
74 self.ui = ui.ui(parentui=parentui)
74 self.ui = ui.ui(parentui=parentui)
75 try:
75 try:
76 self.ui.readconfig(self.join("hgrc"), self.root)
76 self.ui.readconfig(self.join("hgrc"), self.root)
77 extensions.loadall(self.ui)
77 extensions.loadall(self.ui)
78 except IOError:
78 except IOError:
79 pass
79 pass
80
80
81 self.tagscache = None
81 self.tagscache = None
82 self._tagstypecache = None
82 self._tagstypecache = None
83 self.branchcache = None
83 self.branchcache = None
84 self.nodetagscache = None
84 self.nodetagscache = None
85 self.filterpats = {}
85 self.filterpats = {}
86 self._datafilters = {}
86 self._datafilters = {}
87 self._transref = self._lockref = self._wlockref = None
87 self._transref = self._lockref = self._wlockref = None
88
88
89 def __getattr__(self, name):
89 def __getattr__(self, name):
90 if name == 'changelog':
90 if name == 'changelog':
91 self.changelog = changelog.changelog(self.sopener)
91 self.changelog = changelog.changelog(self.sopener)
92 self.sopener.defversion = self.changelog.version
92 self.sopener.defversion = self.changelog.version
93 return self.changelog
93 return self.changelog
94 if name == 'manifest':
94 if name == 'manifest':
95 self.changelog
95 self.changelog
96 self.manifest = manifest.manifest(self.sopener)
96 self.manifest = manifest.manifest(self.sopener)
97 return self.manifest
97 return self.manifest
98 if name == 'dirstate':
98 if name == 'dirstate':
99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
100 return self.dirstate
100 return self.dirstate
101 else:
101 else:
102 raise AttributeError, name
102 raise AttributeError, name
103
103
104 def url(self):
104 def url(self):
105 return 'file:' + self.root
105 return 'file:' + self.root
106
106
107 def hook(self, name, throw=False, **args):
107 def hook(self, name, throw=False, **args):
108 return hook.hook(self.ui, self, name, throw, **args)
108 return hook.hook(self.ui, self, name, throw, **args)
109
109
110 tag_disallowed = ':\r\n'
110 tag_disallowed = ':\r\n'
111
111
112 def _tag(self, name, node, message, local, user, date, parent=None,
112 def _tag(self, name, node, message, local, user, date, parent=None,
113 extra={}):
113 extra={}):
114 use_dirstate = parent is None
114 use_dirstate = parent is None
115
115
116 for c in self.tag_disallowed:
116 for c in self.tag_disallowed:
117 if c in name:
117 if c in name:
118 raise util.Abort(_('%r cannot be used in a tag name') % c)
118 raise util.Abort(_('%r cannot be used in a tag name') % c)
119
119
120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
121
121
122 def writetag(fp, name, munge, prevtags):
122 def writetag(fp, name, munge, prevtags):
123 if prevtags and prevtags[-1] != '\n':
123 if prevtags and prevtags[-1] != '\n':
124 fp.write('\n')
124 fp.write('\n')
125 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
125 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
126 fp.close()
126 fp.close()
127
127
128 prevtags = ''
128 prevtags = ''
129 if local:
129 if local:
130 try:
130 try:
131 fp = self.opener('localtags', 'r+')
131 fp = self.opener('localtags', 'r+')
132 except IOError, err:
132 except IOError, err:
133 fp = self.opener('localtags', 'a')
133 fp = self.opener('localtags', 'a')
134 else:
134 else:
135 prevtags = fp.read()
135 prevtags = fp.read()
136
136
137 # local tags are stored in the current charset
137 # local tags are stored in the current charset
138 writetag(fp, name, None, prevtags)
138 writetag(fp, name, None, prevtags)
139 self.hook('tag', node=hex(node), tag=name, local=local)
139 self.hook('tag', node=hex(node), tag=name, local=local)
140 return
140 return
141
141
142 if use_dirstate:
142 if use_dirstate:
143 try:
143 try:
144 fp = self.wfile('.hgtags', 'rb+')
144 fp = self.wfile('.hgtags', 'rb+')
145 except IOError, err:
145 except IOError, err:
146 fp = self.wfile('.hgtags', 'ab')
146 fp = self.wfile('.hgtags', 'ab')
147 else:
147 else:
148 prevtags = fp.read()
148 prevtags = fp.read()
149 else:
149 else:
150 try:
150 try:
151 prevtags = self.filectx('.hgtags', parent).data()
151 prevtags = self.filectx('.hgtags', parent).data()
152 except revlog.LookupError:
152 except revlog.LookupError:
153 pass
153 pass
154 fp = self.wfile('.hgtags', 'wb')
154 fp = self.wfile('.hgtags', 'wb')
155 if prevtags:
155 if prevtags:
156 fp.write(prevtags)
156 fp.write(prevtags)
157
157
158 # committed tags are stored in UTF-8
158 # committed tags are stored in UTF-8
159 writetag(fp, name, util.fromlocal, prevtags)
159 writetag(fp, name, util.fromlocal, prevtags)
160
160
161 if use_dirstate and '.hgtags' not in self.dirstate:
161 if use_dirstate and '.hgtags' not in self.dirstate:
162 self.add(['.hgtags'])
162 self.add(['.hgtags'])
163
163
164 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
164 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
165 extra=extra)
165 extra=extra)
166
166
167 self.hook('tag', node=hex(node), tag=name, local=local)
167 self.hook('tag', node=hex(node), tag=name, local=local)
168
168
169 return tagnode
169 return tagnode
170
170
171 def tag(self, name, node, message, local, user, date):
171 def tag(self, name, node, message, local, user, date):
172 '''tag a revision with a symbolic name.
172 '''tag a revision with a symbolic name.
173
173
174 if local is True, the tag is stored in a per-repository file.
174 if local is True, the tag is stored in a per-repository file.
175 otherwise, it is stored in the .hgtags file, and a new
175 otherwise, it is stored in the .hgtags file, and a new
176 changeset is committed with the change.
176 changeset is committed with the change.
177
177
178 keyword arguments:
178 keyword arguments:
179
179
180 local: whether to store tag in non-version-controlled file
180 local: whether to store tag in non-version-controlled file
181 (default False)
181 (default False)
182
182
183 message: commit message to use if committing
183 message: commit message to use if committing
184
184
185 user: name of user to use if committing
185 user: name of user to use if committing
186
186
187 date: date tuple to use if committing'''
187 date: date tuple to use if committing'''
188
188
189 for x in self.status()[:5]:
189 for x in self.status()[:5]:
190 if '.hgtags' in x:
190 if '.hgtags' in x:
191 raise util.Abort(_('working copy of .hgtags is changed '
191 raise util.Abort(_('working copy of .hgtags is changed '
192 '(please commit .hgtags manually)'))
192 '(please commit .hgtags manually)'))
193
193
194
194
195 self._tag(name, node, message, local, user, date)
195 self._tag(name, node, message, local, user, date)
196
196
197 def tags(self):
197 def tags(self):
198 '''return a mapping of tag to node'''
198 '''return a mapping of tag to node'''
199 if self.tagscache:
199 if self.tagscache:
200 return self.tagscache
200 return self.tagscache
201
201
202 globaltags = {}
202 globaltags = {}
203 tagtypes = {}
203 tagtypes = {}
204
204
205 def readtags(lines, fn, tagtype):
205 def readtags(lines, fn, tagtype):
206 filetags = {}
206 filetags = {}
207 count = 0
207 count = 0
208
208
209 def warn(msg):
209 def warn(msg):
210 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
210 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
211
211
212 for l in lines:
212 for l in lines:
213 count += 1
213 count += 1
214 if not l:
214 if not l:
215 continue
215 continue
216 s = l.split(" ", 1)
216 s = l.split(" ", 1)
217 if len(s) != 2:
217 if len(s) != 2:
218 warn(_("cannot parse entry"))
218 warn(_("cannot parse entry"))
219 continue
219 continue
220 node, key = s
220 node, key = s
221 key = util.tolocal(key.strip()) # stored in UTF-8
221 key = util.tolocal(key.strip()) # stored in UTF-8
222 try:
222 try:
223 bin_n = bin(node)
223 bin_n = bin(node)
224 except TypeError:
224 except TypeError:
225 warn(_("node '%s' is not well formed") % node)
225 warn(_("node '%s' is not well formed") % node)
226 continue
226 continue
227 if bin_n not in self.changelog.nodemap:
227 if bin_n not in self.changelog.nodemap:
228 warn(_("tag '%s' refers to unknown node") % key)
228 warn(_("tag '%s' refers to unknown node") % key)
229 continue
229 continue
230
230
231 h = []
231 h = []
232 if key in filetags:
232 if key in filetags:
233 n, h = filetags[key]
233 n, h = filetags[key]
234 h.append(n)
234 h.append(n)
235 filetags[key] = (bin_n, h)
235 filetags[key] = (bin_n, h)
236
236
237 for k, nh in filetags.items():
237 for k, nh in filetags.items():
238 if k not in globaltags:
238 if k not in globaltags:
239 globaltags[k] = nh
239 globaltags[k] = nh
240 tagtypes[k] = tagtype
240 tagtypes[k] = tagtype
241 continue
241 continue
242
242
243 # we prefer the global tag if:
243 # we prefer the global tag if:
244 # it supercedes us OR
244 # it supercedes us OR
245 # mutual supercedes and it has a higher rank
245 # mutual supercedes and it has a higher rank
246 # otherwise we win because we're tip-most
246 # otherwise we win because we're tip-most
247 an, ah = nh
247 an, ah = nh
248 bn, bh = globaltags[k]
248 bn, bh = globaltags[k]
249 if (bn != an and an in bh and
249 if (bn != an and an in bh and
250 (bn not in ah or len(bh) > len(ah))):
250 (bn not in ah or len(bh) > len(ah))):
251 an = bn
251 an = bn
252 ah.extend([n for n in bh if n not in ah])
252 ah.extend([n for n in bh if n not in ah])
253 globaltags[k] = an, ah
253 globaltags[k] = an, ah
254 tagtypes[k] = tagtype
254 tagtypes[k] = tagtype
255
255
256 # read the tags file from each head, ending with the tip
256 # read the tags file from each head, ending with the tip
257 f = None
257 f = None
258 for rev, node, fnode in self._hgtagsnodes():
258 for rev, node, fnode in self._hgtagsnodes():
259 f = (f and f.filectx(fnode) or
259 f = (f and f.filectx(fnode) or
260 self.filectx('.hgtags', fileid=fnode))
260 self.filectx('.hgtags', fileid=fnode))
261 readtags(f.data().splitlines(), f, "global")
261 readtags(f.data().splitlines(), f, "global")
262
262
263 try:
263 try:
264 data = util.fromlocal(self.opener("localtags").read())
264 data = util.fromlocal(self.opener("localtags").read())
265 # localtags are stored in the local character set
265 # localtags are stored in the local character set
266 # while the internal tag table is stored in UTF-8
266 # while the internal tag table is stored in UTF-8
267 readtags(data.splitlines(), "localtags", "local")
267 readtags(data.splitlines(), "localtags", "local")
268 except IOError:
268 except IOError:
269 pass
269 pass
270
270
271 self.tagscache = {}
271 self.tagscache = {}
272 self._tagstypecache = {}
272 self._tagstypecache = {}
273 for k,nh in globaltags.items():
273 for k,nh in globaltags.items():
274 n = nh[0]
274 n = nh[0]
275 if n != nullid:
275 if n != nullid:
276 self.tagscache[k] = n
276 self.tagscache[k] = n
277 self._tagstypecache[k] = tagtypes[k]
277 self._tagstypecache[k] = tagtypes[k]
278 self.tagscache['tip'] = self.changelog.tip()
278 self.tagscache['tip'] = self.changelog.tip()
279
279
280 return self.tagscache
280 return self.tagscache
281
281
282 def tagtype(self, tagname):
282 def tagtype(self, tagname):
283 '''
283 '''
284 return the type of the given tag. result can be:
284 return the type of the given tag. result can be:
285
285
286 'local' : a local tag
286 'local' : a local tag
287 'global' : a global tag
287 'global' : a global tag
288 None : tag does not exist
288 None : tag does not exist
289 '''
289 '''
290
290
291 self.tags()
291 self.tags()
292
292
293 return self._tagstypecache.get(tagname)
293 return self._tagstypecache.get(tagname)
294
294
295 def _hgtagsnodes(self):
295 def _hgtagsnodes(self):
296 heads = self.heads()
296 heads = self.heads()
297 heads.reverse()
297 heads.reverse()
298 last = {}
298 last = {}
299 ret = []
299 ret = []
300 for node in heads:
300 for node in heads:
301 c = self.changectx(node)
301 c = self.changectx(node)
302 rev = c.rev()
302 rev = c.rev()
303 try:
303 try:
304 fnode = c.filenode('.hgtags')
304 fnode = c.filenode('.hgtags')
305 except revlog.LookupError:
305 except revlog.LookupError:
306 continue
306 continue
307 ret.append((rev, node, fnode))
307 ret.append((rev, node, fnode))
308 if fnode in last:
308 if fnode in last:
309 ret[last[fnode]] = None
309 ret[last[fnode]] = None
310 last[fnode] = len(ret) - 1
310 last[fnode] = len(ret) - 1
311 return [item for item in ret if item]
311 return [item for item in ret if item]
312
312
313 def tagslist(self):
313 def tagslist(self):
314 '''return a list of tags ordered by revision'''
314 '''return a list of tags ordered by revision'''
315 l = []
315 l = []
316 for t, n in self.tags().items():
316 for t, n in self.tags().items():
317 try:
317 try:
318 r = self.changelog.rev(n)
318 r = self.changelog.rev(n)
319 except:
319 except:
320 r = -2 # sort to the beginning of the list if unknown
320 r = -2 # sort to the beginning of the list if unknown
321 l.append((r, t, n))
321 l.append((r, t, n))
322 l.sort()
322 l.sort()
323 return [(t, n) for r, t, n in l]
323 return [(t, n) for r, t, n in l]
324
324
325 def nodetags(self, node):
325 def nodetags(self, node):
326 '''return the tags associated with a node'''
326 '''return the tags associated with a node'''
327 if not self.nodetagscache:
327 if not self.nodetagscache:
328 self.nodetagscache = {}
328 self.nodetagscache = {}
329 for t, n in self.tags().items():
329 for t, n in self.tags().items():
330 self.nodetagscache.setdefault(n, []).append(t)
330 self.nodetagscache.setdefault(n, []).append(t)
331 return self.nodetagscache.get(node, [])
331 return self.nodetagscache.get(node, [])
332
332
333 def _branchtags(self):
333 def _branchtags(self):
334 partial, last, lrev = self._readbranchcache()
334 partial, last, lrev = self._readbranchcache()
335
335
336 tiprev = self.changelog.count() - 1
336 tiprev = self.changelog.count() - 1
337 if lrev != tiprev:
337 if lrev != tiprev:
338 self._updatebranchcache(partial, lrev+1, tiprev+1)
338 self._updatebranchcache(partial, lrev+1, tiprev+1)
339 self._writebranchcache(partial, self.changelog.tip(), tiprev)
339 self._writebranchcache(partial, self.changelog.tip(), tiprev)
340
340
341 return partial
341 return partial
342
342
343 def branchtags(self):
343 def branchtags(self):
344 if self.branchcache is not None:
344 if self.branchcache is not None:
345 return self.branchcache
345 return self.branchcache
346
346
347 self.branchcache = {} # avoid recursion in changectx
347 self.branchcache = {} # avoid recursion in changectx
348 partial = self._branchtags()
348 partial = self._branchtags()
349
349
350 # the branch cache is stored on disk as UTF-8, but in the local
350 # the branch cache is stored on disk as UTF-8, but in the local
351 # charset internally
351 # charset internally
352 for k, v in partial.items():
352 for k, v in partial.items():
353 self.branchcache[util.tolocal(k)] = v
353 self.branchcache[util.tolocal(k)] = v
354 return self.branchcache
354 return self.branchcache
355
355
356 def _readbranchcache(self):
356 def _readbranchcache(self):
357 partial = {}
357 partial = {}
358 try:
358 try:
359 f = self.opener("branch.cache")
359 f = self.opener("branch.cache")
360 lines = f.read().split('\n')
360 lines = f.read().split('\n')
361 f.close()
361 f.close()
362 except (IOError, OSError):
362 except (IOError, OSError):
363 return {}, nullid, nullrev
363 return {}, nullid, nullrev
364
364
365 try:
365 try:
366 last, lrev = lines.pop(0).split(" ", 1)
366 last, lrev = lines.pop(0).split(" ", 1)
367 last, lrev = bin(last), int(lrev)
367 last, lrev = bin(last), int(lrev)
368 if not (lrev < self.changelog.count() and
368 if not (lrev < self.changelog.count() and
369 self.changelog.node(lrev) == last): # sanity check
369 self.changelog.node(lrev) == last): # sanity check
370 # invalidate the cache
370 # invalidate the cache
371 raise ValueError('Invalid branch cache: unknown tip')
371 raise ValueError('Invalid branch cache: unknown tip')
372 for l in lines:
372 for l in lines:
373 if not l: continue
373 if not l: continue
374 node, label = l.split(" ", 1)
374 node, label = l.split(" ", 1)
375 partial[label.strip()] = bin(node)
375 partial[label.strip()] = bin(node)
376 except (KeyboardInterrupt, util.SignalInterrupt):
376 except (KeyboardInterrupt, util.SignalInterrupt):
377 raise
377 raise
378 except Exception, inst:
378 except Exception, inst:
379 if self.ui.debugflag:
379 if self.ui.debugflag:
380 self.ui.warn(str(inst), '\n')
380 self.ui.warn(str(inst), '\n')
381 partial, last, lrev = {}, nullid, nullrev
381 partial, last, lrev = {}, nullid, nullrev
382 return partial, last, lrev
382 return partial, last, lrev
383
383
384 def _writebranchcache(self, branches, tip, tiprev):
384 def _writebranchcache(self, branches, tip, tiprev):
385 try:
385 try:
386 f = self.opener("branch.cache", "w", atomictemp=True)
386 f = self.opener("branch.cache", "w", atomictemp=True)
387 f.write("%s %s\n" % (hex(tip), tiprev))
387 f.write("%s %s\n" % (hex(tip), tiprev))
388 for label, node in branches.iteritems():
388 for label, node in branches.iteritems():
389 f.write("%s %s\n" % (hex(node), label))
389 f.write("%s %s\n" % (hex(node), label))
390 f.rename()
390 f.rename()
391 except (IOError, OSError):
391 except (IOError, OSError):
392 pass
392 pass
393
393
394 def _updatebranchcache(self, partial, start, end):
394 def _updatebranchcache(self, partial, start, end):
395 for r in xrange(start, end):
395 for r in xrange(start, end):
396 c = self.changectx(r)
396 c = self.changectx(r)
397 b = c.branch()
397 b = c.branch()
398 partial[b] = c.node()
398 partial[b] = c.node()
399
399
400 def lookup(self, key):
400 def lookup(self, key):
401 if key == '.':
401 if key == '.':
402 key, second = self.dirstate.parents()
402 key, second = self.dirstate.parents()
403 if key == nullid:
403 if key == nullid:
404 raise repo.RepoError(_("no revision checked out"))
404 raise repo.RepoError(_("no revision checked out"))
405 if second != nullid:
405 if second != nullid:
406 self.ui.warn(_("warning: working directory has two parents, "
406 self.ui.warn(_("warning: working directory has two parents, "
407 "tag '.' uses the first\n"))
407 "tag '.' uses the first\n"))
408 elif key == 'null':
408 elif key == 'null':
409 return nullid
409 return nullid
410 n = self.changelog._match(key)
410 n = self.changelog._match(key)
411 if n:
411 if n:
412 return n
412 return n
413 if key in self.tags():
413 if key in self.tags():
414 return self.tags()[key]
414 return self.tags()[key]
415 if key in self.branchtags():
415 if key in self.branchtags():
416 return self.branchtags()[key]
416 return self.branchtags()[key]
417 n = self.changelog._partialmatch(key)
417 n = self.changelog._partialmatch(key)
418 if n:
418 if n:
419 return n
419 return n
420 try:
420 try:
421 if len(key) == 20:
421 if len(key) == 20:
422 key = hex(key)
422 key = hex(key)
423 except:
423 except:
424 pass
424 pass
425 raise repo.RepoError(_("unknown revision '%s'") % key)
425 raise repo.RepoError(_("unknown revision '%s'") % key)
426
426
427 def dev(self):
427 def dev(self):
428 return os.lstat(self.path).st_dev
428 return os.lstat(self.path).st_dev
429
429
430 def local(self):
430 def local(self):
431 return True
431 return True
432
432
433 def join(self, f):
433 def join(self, f):
434 return os.path.join(self.path, f)
434 return os.path.join(self.path, f)
435
435
436 def sjoin(self, f):
436 def sjoin(self, f):
437 f = self.encodefn(f)
437 f = self.encodefn(f)
438 return os.path.join(self.spath, f)
438 return os.path.join(self.spath, f)
439
439
440 def wjoin(self, f):
440 def wjoin(self, f):
441 return os.path.join(self.root, f)
441 return os.path.join(self.root, f)
442
442
443 def file(self, f):
443 def file(self, f):
444 if f[0] == '/':
444 if f[0] == '/':
445 f = f[1:]
445 f = f[1:]
446 return filelog.filelog(self.sopener, f)
446 return filelog.filelog(self.sopener, f)
447
447
448 def changectx(self, changeid=None):
448 def changectx(self, changeid=None):
449 return context.changectx(self, changeid)
449 return context.changectx(self, changeid)
450
450
451 def workingctx(self):
451 def workingctx(self):
452 return context.workingctx(self)
452 return context.workingctx(self)
453
453
454 def parents(self, changeid=None):
454 def parents(self, changeid=None):
455 '''
455 '''
456 get list of changectxs for parents of changeid or working directory
456 get list of changectxs for parents of changeid or working directory
457 '''
457 '''
458 if changeid is None:
458 if changeid is None:
459 pl = self.dirstate.parents()
459 pl = self.dirstate.parents()
460 else:
460 else:
461 n = self.changelog.lookup(changeid)
461 n = self.changelog.lookup(changeid)
462 pl = self.changelog.parents(n)
462 pl = self.changelog.parents(n)
463 if pl[1] == nullid:
463 if pl[1] == nullid:
464 return [self.changectx(pl[0])]
464 return [self.changectx(pl[0])]
465 return [self.changectx(pl[0]), self.changectx(pl[1])]
465 return [self.changectx(pl[0]), self.changectx(pl[1])]
466
466
467 def filectx(self, path, changeid=None, fileid=None):
467 def filectx(self, path, changeid=None, fileid=None):
468 """changeid can be a changeset revision, node, or tag.
468 """changeid can be a changeset revision, node, or tag.
469 fileid can be a file revision or node."""
469 fileid can be a file revision or node."""
470 return context.filectx(self, path, changeid, fileid)
470 return context.filectx(self, path, changeid, fileid)
471
471
472 def getcwd(self):
472 def getcwd(self):
473 return self.dirstate.getcwd()
473 return self.dirstate.getcwd()
474
474
475 def pathto(self, f, cwd=None):
475 def pathto(self, f, cwd=None):
476 return self.dirstate.pathto(f, cwd)
476 return self.dirstate.pathto(f, cwd)
477
477
478 def wfile(self, f, mode='r'):
478 def wfile(self, f, mode='r'):
479 return self.wopener(f, mode)
479 return self.wopener(f, mode)
480
480
481 def _link(self, f):
481 def _link(self, f):
482 return os.path.islink(self.wjoin(f))
482 return os.path.islink(self.wjoin(f))
483
483
484 def _filter(self, filter, filename, data):
484 def _filter(self, filter, filename, data):
485 if filter not in self.filterpats:
485 if filter not in self.filterpats:
486 l = []
486 l = []
487 for pat, cmd in self.ui.configitems(filter):
487 for pat, cmd in self.ui.configitems(filter):
488 mf = util.matcher(self.root, "", [pat], [], [])[1]
488 mf = util.matcher(self.root, "", [pat], [], [])[1]
489 fn = None
489 fn = None
490 for name, filterfn in self._datafilters.iteritems():
490 for name, filterfn in self._datafilters.iteritems():
491 if cmd.startswith(name):
491 if cmd.startswith(name):
492 fn = filterfn
492 fn = filterfn
493 break
493 break
494 if not fn:
494 if not fn:
495 fn = lambda s, c: util.filter(s, c)
495 fn = lambda s, c, **kwargs: util.filter(s, c)
496 # Wrap old filters not supporting keyword arguments
497 if not inspect.getargspec(fn)[2]:
498 oldfn = fn
499 fn = lambda s, c, **kwargs: oldfn(s, c)
496 l.append((mf, fn, cmd))
500 l.append((mf, fn, cmd))
497 self.filterpats[filter] = l
501 self.filterpats[filter] = l
498
502
499 for mf, fn, cmd in self.filterpats[filter]:
503 for mf, fn, cmd in self.filterpats[filter]:
500 if mf(filename):
504 if mf(filename):
501 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
505 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
502 data = fn(data, cmd)
506 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
503 break
507 break
504
508
505 return data
509 return data
506
510
507 def adddatafilter(self, name, filter):
511 def adddatafilter(self, name, filter):
508 self._datafilters[name] = filter
512 self._datafilters[name] = filter
509
513
510 def wread(self, filename):
514 def wread(self, filename):
511 if self._link(filename):
515 if self._link(filename):
512 data = os.readlink(self.wjoin(filename))
516 data = os.readlink(self.wjoin(filename))
513 else:
517 else:
514 data = self.wopener(filename, 'r').read()
518 data = self.wopener(filename, 'r').read()
515 return self._filter("encode", filename, data)
519 return self._filter("encode", filename, data)
516
520
517 def wwrite(self, filename, data, flags):
521 def wwrite(self, filename, data, flags):
518 data = self._filter("decode", filename, data)
522 data = self._filter("decode", filename, data)
519 try:
523 try:
520 os.unlink(self.wjoin(filename))
524 os.unlink(self.wjoin(filename))
521 except OSError:
525 except OSError:
522 pass
526 pass
523 self.wopener(filename, 'w').write(data)
527 self.wopener(filename, 'w').write(data)
524 util.set_flags(self.wjoin(filename), flags)
528 util.set_flags(self.wjoin(filename), flags)
525
529
526 def wwritedata(self, filename, data):
530 def wwritedata(self, filename, data):
527 return self._filter("decode", filename, data)
531 return self._filter("decode", filename, data)
528
532
529 def transaction(self):
533 def transaction(self):
530 if self._transref and self._transref():
534 if self._transref and self._transref():
531 return self._transref().nest()
535 return self._transref().nest()
532
536
533 # abort here if the journal already exists
537 # abort here if the journal already exists
534 if os.path.exists(self.sjoin("journal")):
538 if os.path.exists(self.sjoin("journal")):
535 raise repo.RepoError(_("journal already exists - run hg recover"))
539 raise repo.RepoError(_("journal already exists - run hg recover"))
536
540
537 # save dirstate for rollback
541 # save dirstate for rollback
538 try:
542 try:
539 ds = self.opener("dirstate").read()
543 ds = self.opener("dirstate").read()
540 except IOError:
544 except IOError:
541 ds = ""
545 ds = ""
542 self.opener("journal.dirstate", "w").write(ds)
546 self.opener("journal.dirstate", "w").write(ds)
543 self.opener("journal.branch", "w").write(self.dirstate.branch())
547 self.opener("journal.branch", "w").write(self.dirstate.branch())
544
548
545 renames = [(self.sjoin("journal"), self.sjoin("undo")),
549 renames = [(self.sjoin("journal"), self.sjoin("undo")),
546 (self.join("journal.dirstate"), self.join("undo.dirstate")),
550 (self.join("journal.dirstate"), self.join("undo.dirstate")),
547 (self.join("journal.branch"), self.join("undo.branch"))]
551 (self.join("journal.branch"), self.join("undo.branch"))]
548 tr = transaction.transaction(self.ui.warn, self.sopener,
552 tr = transaction.transaction(self.ui.warn, self.sopener,
549 self.sjoin("journal"),
553 self.sjoin("journal"),
550 aftertrans(renames))
554 aftertrans(renames))
551 self._transref = weakref.ref(tr)
555 self._transref = weakref.ref(tr)
552 return tr
556 return tr
553
557
554 def recover(self):
558 def recover(self):
555 l = self.lock()
559 l = self.lock()
556 try:
560 try:
557 if os.path.exists(self.sjoin("journal")):
561 if os.path.exists(self.sjoin("journal")):
558 self.ui.status(_("rolling back interrupted transaction\n"))
562 self.ui.status(_("rolling back interrupted transaction\n"))
559 transaction.rollback(self.sopener, self.sjoin("journal"))
563 transaction.rollback(self.sopener, self.sjoin("journal"))
560 self.invalidate()
564 self.invalidate()
561 return True
565 return True
562 else:
566 else:
563 self.ui.warn(_("no interrupted transaction available\n"))
567 self.ui.warn(_("no interrupted transaction available\n"))
564 return False
568 return False
565 finally:
569 finally:
566 del l
570 del l
567
571
568 def rollback(self):
572 def rollback(self):
569 wlock = lock = None
573 wlock = lock = None
570 try:
574 try:
571 wlock = self.wlock()
575 wlock = self.wlock()
572 lock = self.lock()
576 lock = self.lock()
573 if os.path.exists(self.sjoin("undo")):
577 if os.path.exists(self.sjoin("undo")):
574 self.ui.status(_("rolling back last transaction\n"))
578 self.ui.status(_("rolling back last transaction\n"))
575 transaction.rollback(self.sopener, self.sjoin("undo"))
579 transaction.rollback(self.sopener, self.sjoin("undo"))
576 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
580 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
577 branch = self.opener("undo.branch").read()
581 branch = self.opener("undo.branch").read()
578 self.dirstate.setbranch(branch)
582 self.dirstate.setbranch(branch)
579 self.invalidate()
583 self.invalidate()
580 self.dirstate.invalidate()
584 self.dirstate.invalidate()
581 else:
585 else:
582 self.ui.warn(_("no rollback information available\n"))
586 self.ui.warn(_("no rollback information available\n"))
583 finally:
587 finally:
584 del lock, wlock
588 del lock, wlock
585
589
586 def invalidate(self):
590 def invalidate(self):
587 for a in "changelog manifest".split():
591 for a in "changelog manifest".split():
588 if hasattr(self, a):
592 if hasattr(self, a):
589 self.__delattr__(a)
593 self.__delattr__(a)
590 self.tagscache = None
594 self.tagscache = None
591 self._tagstypecache = None
595 self._tagstypecache = None
592 self.nodetagscache = None
596 self.nodetagscache = None
593
597
594 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
598 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
595 try:
599 try:
596 l = lock.lock(lockname, 0, releasefn, desc=desc)
600 l = lock.lock(lockname, 0, releasefn, desc=desc)
597 except lock.LockHeld, inst:
601 except lock.LockHeld, inst:
598 if not wait:
602 if not wait:
599 raise
603 raise
600 self.ui.warn(_("waiting for lock on %s held by %r\n") %
604 self.ui.warn(_("waiting for lock on %s held by %r\n") %
601 (desc, inst.locker))
605 (desc, inst.locker))
602 # default to 600 seconds timeout
606 # default to 600 seconds timeout
603 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
607 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
604 releasefn, desc=desc)
608 releasefn, desc=desc)
605 if acquirefn:
609 if acquirefn:
606 acquirefn()
610 acquirefn()
607 return l
611 return l
608
612
609 def lock(self, wait=True):
613 def lock(self, wait=True):
610 if self._lockref and self._lockref():
614 if self._lockref and self._lockref():
611 return self._lockref()
615 return self._lockref()
612
616
613 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
617 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
614 _('repository %s') % self.origroot)
618 _('repository %s') % self.origroot)
615 self._lockref = weakref.ref(l)
619 self._lockref = weakref.ref(l)
616 return l
620 return l
617
621
618 def wlock(self, wait=True):
622 def wlock(self, wait=True):
619 if self._wlockref and self._wlockref():
623 if self._wlockref and self._wlockref():
620 return self._wlockref()
624 return self._wlockref()
621
625
622 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
626 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
623 self.dirstate.invalidate, _('working directory of %s') %
627 self.dirstate.invalidate, _('working directory of %s') %
624 self.origroot)
628 self.origroot)
625 self._wlockref = weakref.ref(l)
629 self._wlockref = weakref.ref(l)
626 return l
630 return l
627
631
628 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
632 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
629 """
633 """
630 commit an individual file as part of a larger transaction
634 commit an individual file as part of a larger transaction
631 """
635 """
632
636
633 t = self.wread(fn)
637 t = self.wread(fn)
634 fl = self.file(fn)
638 fl = self.file(fn)
635 fp1 = manifest1.get(fn, nullid)
639 fp1 = manifest1.get(fn, nullid)
636 fp2 = manifest2.get(fn, nullid)
640 fp2 = manifest2.get(fn, nullid)
637
641
638 meta = {}
642 meta = {}
639 cp = self.dirstate.copied(fn)
643 cp = self.dirstate.copied(fn)
640 if cp:
644 if cp:
641 # Mark the new revision of this file as a copy of another
645 # Mark the new revision of this file as a copy of another
642 # file. This copy data will effectively act as a parent
646 # file. This copy data will effectively act as a parent
643 # of this new revision. If this is a merge, the first
647 # of this new revision. If this is a merge, the first
644 # parent will be the nullid (meaning "look up the copy data")
648 # parent will be the nullid (meaning "look up the copy data")
645 # and the second one will be the other parent. For example:
649 # and the second one will be the other parent. For example:
646 #
650 #
647 # 0 --- 1 --- 3 rev1 changes file foo
651 # 0 --- 1 --- 3 rev1 changes file foo
648 # \ / rev2 renames foo to bar and changes it
652 # \ / rev2 renames foo to bar and changes it
649 # \- 2 -/ rev3 should have bar with all changes and
653 # \- 2 -/ rev3 should have bar with all changes and
650 # should record that bar descends from
654 # should record that bar descends from
651 # bar in rev2 and foo in rev1
655 # bar in rev2 and foo in rev1
652 #
656 #
653 # this allows this merge to succeed:
657 # this allows this merge to succeed:
654 #
658 #
655 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
659 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
656 # \ / merging rev3 and rev4 should use bar@rev2
660 # \ / merging rev3 and rev4 should use bar@rev2
657 # \- 2 --- 4 as the merge base
661 # \- 2 --- 4 as the merge base
658 #
662 #
659 meta["copy"] = cp
663 meta["copy"] = cp
660 if not manifest2: # not a branch merge
664 if not manifest2: # not a branch merge
661 meta["copyrev"] = hex(manifest1.get(cp, nullid))
665 meta["copyrev"] = hex(manifest1.get(cp, nullid))
662 fp2 = nullid
666 fp2 = nullid
663 elif fp2 != nullid: # copied on remote side
667 elif fp2 != nullid: # copied on remote side
664 meta["copyrev"] = hex(manifest1.get(cp, nullid))
668 meta["copyrev"] = hex(manifest1.get(cp, nullid))
665 elif fp1 != nullid: # copied on local side, reversed
669 elif fp1 != nullid: # copied on local side, reversed
666 meta["copyrev"] = hex(manifest2.get(cp))
670 meta["copyrev"] = hex(manifest2.get(cp))
667 fp2 = fp1
671 fp2 = fp1
668 elif cp in manifest2: # directory rename on local side
672 elif cp in manifest2: # directory rename on local side
669 meta["copyrev"] = hex(manifest2[cp])
673 meta["copyrev"] = hex(manifest2[cp])
670 else: # directory rename on remote side
674 else: # directory rename on remote side
671 meta["copyrev"] = hex(manifest1.get(cp, nullid))
675 meta["copyrev"] = hex(manifest1.get(cp, nullid))
672 self.ui.debug(_(" %s: copy %s:%s\n") %
676 self.ui.debug(_(" %s: copy %s:%s\n") %
673 (fn, cp, meta["copyrev"]))
677 (fn, cp, meta["copyrev"]))
674 fp1 = nullid
678 fp1 = nullid
675 elif fp2 != nullid:
679 elif fp2 != nullid:
676 # is one parent an ancestor of the other?
680 # is one parent an ancestor of the other?
677 fpa = fl.ancestor(fp1, fp2)
681 fpa = fl.ancestor(fp1, fp2)
678 if fpa == fp1:
682 if fpa == fp1:
679 fp1, fp2 = fp2, nullid
683 fp1, fp2 = fp2, nullid
680 elif fpa == fp2:
684 elif fpa == fp2:
681 fp2 = nullid
685 fp2 = nullid
682
686
683 # is the file unmodified from the parent? report existing entry
687 # is the file unmodified from the parent? report existing entry
684 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
688 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
685 return fp1
689 return fp1
686
690
687 changelist.append(fn)
691 changelist.append(fn)
688 return fl.add(t, meta, tr, linkrev, fp1, fp2)
692 return fl.add(t, meta, tr, linkrev, fp1, fp2)
689
693
690 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
694 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
691 if p1 is None:
695 if p1 is None:
692 p1, p2 = self.dirstate.parents()
696 p1, p2 = self.dirstate.parents()
693 return self.commit(files=files, text=text, user=user, date=date,
697 return self.commit(files=files, text=text, user=user, date=date,
694 p1=p1, p2=p2, extra=extra, empty_ok=True)
698 p1=p1, p2=p2, extra=extra, empty_ok=True)
695
699
696 def commit(self, files=None, text="", user=None, date=None,
700 def commit(self, files=None, text="", user=None, date=None,
697 match=util.always, force=False, force_editor=False,
701 match=util.always, force=False, force_editor=False,
698 p1=None, p2=None, extra={}, empty_ok=False):
702 p1=None, p2=None, extra={}, empty_ok=False):
699 wlock = lock = tr = None
703 wlock = lock = tr = None
700 valid = 0 # don't save the dirstate if this isn't set
704 valid = 0 # don't save the dirstate if this isn't set
701 if files:
705 if files:
702 files = util.unique(files)
706 files = util.unique(files)
703 try:
707 try:
704 commit = []
708 commit = []
705 remove = []
709 remove = []
706 changed = []
710 changed = []
707 use_dirstate = (p1 is None) # not rawcommit
711 use_dirstate = (p1 is None) # not rawcommit
708 extra = extra.copy()
712 extra = extra.copy()
709
713
710 if use_dirstate:
714 if use_dirstate:
711 if files:
715 if files:
712 for f in files:
716 for f in files:
713 s = self.dirstate[f]
717 s = self.dirstate[f]
714 if s in 'nma':
718 if s in 'nma':
715 commit.append(f)
719 commit.append(f)
716 elif s == 'r':
720 elif s == 'r':
717 remove.append(f)
721 remove.append(f)
718 else:
722 else:
719 self.ui.warn(_("%s not tracked!\n") % f)
723 self.ui.warn(_("%s not tracked!\n") % f)
720 else:
724 else:
721 changes = self.status(match=match)[:5]
725 changes = self.status(match=match)[:5]
722 modified, added, removed, deleted, unknown = changes
726 modified, added, removed, deleted, unknown = changes
723 commit = modified + added
727 commit = modified + added
724 remove = removed
728 remove = removed
725 else:
729 else:
726 commit = files
730 commit = files
727
731
728 if use_dirstate:
732 if use_dirstate:
729 p1, p2 = self.dirstate.parents()
733 p1, p2 = self.dirstate.parents()
730 update_dirstate = True
734 update_dirstate = True
731 else:
735 else:
732 p1, p2 = p1, p2 or nullid
736 p1, p2 = p1, p2 or nullid
733 update_dirstate = (self.dirstate.parents()[0] == p1)
737 update_dirstate = (self.dirstate.parents()[0] == p1)
734
738
735 c1 = self.changelog.read(p1)
739 c1 = self.changelog.read(p1)
736 c2 = self.changelog.read(p2)
740 c2 = self.changelog.read(p2)
737 m1 = self.manifest.read(c1[0]).copy()
741 m1 = self.manifest.read(c1[0]).copy()
738 m2 = self.manifest.read(c2[0])
742 m2 = self.manifest.read(c2[0])
739
743
740 if use_dirstate:
744 if use_dirstate:
741 branchname = self.workingctx().branch()
745 branchname = self.workingctx().branch()
742 try:
746 try:
743 branchname = branchname.decode('UTF-8').encode('UTF-8')
747 branchname = branchname.decode('UTF-8').encode('UTF-8')
744 except UnicodeDecodeError:
748 except UnicodeDecodeError:
745 raise util.Abort(_('branch name not in UTF-8!'))
749 raise util.Abort(_('branch name not in UTF-8!'))
746 else:
750 else:
747 branchname = ""
751 branchname = ""
748
752
749 if use_dirstate:
753 if use_dirstate:
750 oldname = c1[5].get("branch") # stored in UTF-8
754 oldname = c1[5].get("branch") # stored in UTF-8
751 if (not commit and not remove and not force and p2 == nullid
755 if (not commit and not remove and not force and p2 == nullid
752 and branchname == oldname):
756 and branchname == oldname):
753 self.ui.status(_("nothing changed\n"))
757 self.ui.status(_("nothing changed\n"))
754 return None
758 return None
755
759
756 xp1 = hex(p1)
760 xp1 = hex(p1)
757 if p2 == nullid: xp2 = ''
761 if p2 == nullid: xp2 = ''
758 else: xp2 = hex(p2)
762 else: xp2 = hex(p2)
759
763
760 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
764 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
761
765
762 wlock = self.wlock()
766 wlock = self.wlock()
763 lock = self.lock()
767 lock = self.lock()
764 tr = self.transaction()
768 tr = self.transaction()
765 trp = weakref.proxy(tr)
769 trp = weakref.proxy(tr)
766
770
767 # check in files
771 # check in files
768 new = {}
772 new = {}
769 linkrev = self.changelog.count()
773 linkrev = self.changelog.count()
770 commit.sort()
774 commit.sort()
771 is_exec = util.execfunc(self.root, m1.execf)
775 is_exec = util.execfunc(self.root, m1.execf)
772 is_link = util.linkfunc(self.root, m1.linkf)
776 is_link = util.linkfunc(self.root, m1.linkf)
773 for f in commit:
777 for f in commit:
774 self.ui.note(f + "\n")
778 self.ui.note(f + "\n")
775 try:
779 try:
776 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
780 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
777 new_exec = is_exec(f)
781 new_exec = is_exec(f)
778 new_link = is_link(f)
782 new_link = is_link(f)
779 if ((not changed or changed[-1] != f) and
783 if ((not changed or changed[-1] != f) and
780 m2.get(f) != new[f]):
784 m2.get(f) != new[f]):
781 # mention the file in the changelog if some
785 # mention the file in the changelog if some
782 # flag changed, even if there was no content
786 # flag changed, even if there was no content
783 # change.
787 # change.
784 old_exec = m1.execf(f)
788 old_exec = m1.execf(f)
785 old_link = m1.linkf(f)
789 old_link = m1.linkf(f)
786 if old_exec != new_exec or old_link != new_link:
790 if old_exec != new_exec or old_link != new_link:
787 changed.append(f)
791 changed.append(f)
788 m1.set(f, new_exec, new_link)
792 m1.set(f, new_exec, new_link)
789 if use_dirstate:
793 if use_dirstate:
790 self.dirstate.normal(f)
794 self.dirstate.normal(f)
791
795
792 except (OSError, IOError):
796 except (OSError, IOError):
793 if use_dirstate:
797 if use_dirstate:
794 self.ui.warn(_("trouble committing %s!\n") % f)
798 self.ui.warn(_("trouble committing %s!\n") % f)
795 raise
799 raise
796 else:
800 else:
797 remove.append(f)
801 remove.append(f)
798
802
799 # update manifest
803 # update manifest
800 m1.update(new)
804 m1.update(new)
801 remove.sort()
805 remove.sort()
802 removed = []
806 removed = []
803
807
804 for f in remove:
808 for f in remove:
805 if f in m1:
809 if f in m1:
806 del m1[f]
810 del m1[f]
807 removed.append(f)
811 removed.append(f)
808 elif f in m2:
812 elif f in m2:
809 removed.append(f)
813 removed.append(f)
810 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
814 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
811 (new, removed))
815 (new, removed))
812
816
813 # add changeset
817 # add changeset
814 new = new.keys()
818 new = new.keys()
815 new.sort()
819 new.sort()
816
820
817 user = user or self.ui.username()
821 user = user or self.ui.username()
818 if (not empty_ok and not text) or force_editor:
822 if (not empty_ok and not text) or force_editor:
819 edittext = []
823 edittext = []
820 if text:
824 if text:
821 edittext.append(text)
825 edittext.append(text)
822 edittext.append("")
826 edittext.append("")
823 edittext.append(_("HG: Enter commit message."
827 edittext.append(_("HG: Enter commit message."
824 " Lines beginning with 'HG:' are removed."))
828 " Lines beginning with 'HG:' are removed."))
825 edittext.append("HG: --")
829 edittext.append("HG: --")
826 edittext.append("HG: user: %s" % user)
830 edittext.append("HG: user: %s" % user)
827 if p2 != nullid:
831 if p2 != nullid:
828 edittext.append("HG: branch merge")
832 edittext.append("HG: branch merge")
829 if branchname:
833 if branchname:
830 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
834 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
831 edittext.extend(["HG: changed %s" % f for f in changed])
835 edittext.extend(["HG: changed %s" % f for f in changed])
832 edittext.extend(["HG: removed %s" % f for f in removed])
836 edittext.extend(["HG: removed %s" % f for f in removed])
833 if not changed and not remove:
837 if not changed and not remove:
834 edittext.append("HG: no files changed")
838 edittext.append("HG: no files changed")
835 edittext.append("")
839 edittext.append("")
836 # run editor in the repository root
840 # run editor in the repository root
837 olddir = os.getcwd()
841 olddir = os.getcwd()
838 os.chdir(self.root)
842 os.chdir(self.root)
839 text = self.ui.edit("\n".join(edittext), user)
843 text = self.ui.edit("\n".join(edittext), user)
840 os.chdir(olddir)
844 os.chdir(olddir)
841
845
842 if branchname:
846 if branchname:
843 extra["branch"] = branchname
847 extra["branch"] = branchname
844
848
845 if use_dirstate:
849 if use_dirstate:
846 lines = [line.rstrip() for line in text.rstrip().splitlines()]
850 lines = [line.rstrip() for line in text.rstrip().splitlines()]
847 while lines and not lines[0]:
851 while lines and not lines[0]:
848 del lines[0]
852 del lines[0]
849 if not lines:
853 if not lines:
850 raise util.Abort(_("empty commit message"))
854 raise util.Abort(_("empty commit message"))
851 text = '\n'.join(lines)
855 text = '\n'.join(lines)
852
856
853 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
857 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
854 user, date, extra)
858 user, date, extra)
855 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
859 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
856 parent2=xp2)
860 parent2=xp2)
857 tr.close()
861 tr.close()
858
862
859 if self.branchcache and "branch" in extra:
863 if self.branchcache and "branch" in extra:
860 self.branchcache[util.tolocal(extra["branch"])] = n
864 self.branchcache[util.tolocal(extra["branch"])] = n
861
865
862 if use_dirstate or update_dirstate:
866 if use_dirstate or update_dirstate:
863 self.dirstate.setparents(n)
867 self.dirstate.setparents(n)
864 if use_dirstate:
868 if use_dirstate:
865 for f in removed:
869 for f in removed:
866 self.dirstate.forget(f)
870 self.dirstate.forget(f)
867 valid = 1 # our dirstate updates are complete
871 valid = 1 # our dirstate updates are complete
868
872
869 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
873 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
870 return n
874 return n
871 finally:
875 finally:
872 if not valid: # don't save our updated dirstate
876 if not valid: # don't save our updated dirstate
873 self.dirstate.invalidate()
877 self.dirstate.invalidate()
874 del tr, lock, wlock
878 del tr, lock, wlock
875
879
876 def walk(self, node=None, files=[], match=util.always, badmatch=None):
880 def walk(self, node=None, files=[], match=util.always, badmatch=None):
877 '''
881 '''
878 walk recursively through the directory tree or a given
882 walk recursively through the directory tree or a given
879 changeset, finding all files matched by the match
883 changeset, finding all files matched by the match
880 function
884 function
881
885
882 results are yielded in a tuple (src, filename), where src
886 results are yielded in a tuple (src, filename), where src
883 is one of:
887 is one of:
884 'f' the file was found in the directory tree
888 'f' the file was found in the directory tree
885 'm' the file was only in the dirstate and not in the tree
889 'm' the file was only in the dirstate and not in the tree
886 'b' file was not found and matched badmatch
890 'b' file was not found and matched badmatch
887 '''
891 '''
888
892
889 if node:
893 if node:
890 fdict = dict.fromkeys(files)
894 fdict = dict.fromkeys(files)
891 # for dirstate.walk, files=['.'] means "walk the whole tree".
895 # for dirstate.walk, files=['.'] means "walk the whole tree".
892 # follow that here, too
896 # follow that here, too
893 fdict.pop('.', None)
897 fdict.pop('.', None)
894 mdict = self.manifest.read(self.changelog.read(node)[0])
898 mdict = self.manifest.read(self.changelog.read(node)[0])
895 mfiles = mdict.keys()
899 mfiles = mdict.keys()
896 mfiles.sort()
900 mfiles.sort()
897 for fn in mfiles:
901 for fn in mfiles:
898 for ffn in fdict:
902 for ffn in fdict:
899 # match if the file is the exact name or a directory
903 # match if the file is the exact name or a directory
900 if ffn == fn or fn.startswith("%s/" % ffn):
904 if ffn == fn or fn.startswith("%s/" % ffn):
901 del fdict[ffn]
905 del fdict[ffn]
902 break
906 break
903 if match(fn):
907 if match(fn):
904 yield 'm', fn
908 yield 'm', fn
905 ffiles = fdict.keys()
909 ffiles = fdict.keys()
906 ffiles.sort()
910 ffiles.sort()
907 for fn in ffiles:
911 for fn in ffiles:
908 if badmatch and badmatch(fn):
912 if badmatch and badmatch(fn):
909 if match(fn):
913 if match(fn):
910 yield 'b', fn
914 yield 'b', fn
911 else:
915 else:
912 self.ui.warn(_('%s: No such file in rev %s\n')
916 self.ui.warn(_('%s: No such file in rev %s\n')
913 % (self.pathto(fn), short(node)))
917 % (self.pathto(fn), short(node)))
914 else:
918 else:
915 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
919 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
916 yield src, fn
920 yield src, fn
917
921
918 def status(self, node1=None, node2=None, files=[], match=util.always,
922 def status(self, node1=None, node2=None, files=[], match=util.always,
919 list_ignored=False, list_clean=False):
923 list_ignored=False, list_clean=False):
920 """return status of files between two nodes or node and working directory
924 """return status of files between two nodes or node and working directory
921
925
922 If node1 is None, use the first dirstate parent instead.
926 If node1 is None, use the first dirstate parent instead.
923 If node2 is None, compare node1 with working directory.
927 If node2 is None, compare node1 with working directory.
924 """
928 """
925
929
926 def fcmp(fn, getnode):
930 def fcmp(fn, getnode):
927 t1 = self.wread(fn)
931 t1 = self.wread(fn)
928 return self.file(fn).cmp(getnode(fn), t1)
932 return self.file(fn).cmp(getnode(fn), t1)
929
933
930 def mfmatches(node):
934 def mfmatches(node):
931 change = self.changelog.read(node)
935 change = self.changelog.read(node)
932 mf = self.manifest.read(change[0]).copy()
936 mf = self.manifest.read(change[0]).copy()
933 for fn in mf.keys():
937 for fn in mf.keys():
934 if not match(fn):
938 if not match(fn):
935 del mf[fn]
939 del mf[fn]
936 return mf
940 return mf
937
941
938 modified, added, removed, deleted, unknown = [], [], [], [], []
942 modified, added, removed, deleted, unknown = [], [], [], [], []
939 ignored, clean = [], []
943 ignored, clean = [], []
940
944
941 compareworking = False
945 compareworking = False
942 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
946 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
943 compareworking = True
947 compareworking = True
944
948
945 if not compareworking:
949 if not compareworking:
946 # read the manifest from node1 before the manifest from node2,
950 # read the manifest from node1 before the manifest from node2,
947 # so that we'll hit the manifest cache if we're going through
951 # so that we'll hit the manifest cache if we're going through
948 # all the revisions in parent->child order.
952 # all the revisions in parent->child order.
949 mf1 = mfmatches(node1)
953 mf1 = mfmatches(node1)
950
954
951 # are we comparing the working directory?
955 # are we comparing the working directory?
952 if not node2:
956 if not node2:
953 (lookup, modified, added, removed, deleted, unknown,
957 (lookup, modified, added, removed, deleted, unknown,
954 ignored, clean) = self.dirstate.status(files, match,
958 ignored, clean) = self.dirstate.status(files, match,
955 list_ignored, list_clean)
959 list_ignored, list_clean)
956
960
957 # are we comparing working dir against its parent?
961 # are we comparing working dir against its parent?
958 if compareworking:
962 if compareworking:
959 if lookup:
963 if lookup:
960 fixup = []
964 fixup = []
961 # do a full compare of any files that might have changed
965 # do a full compare of any files that might have changed
962 ctx = self.changectx()
966 ctx = self.changectx()
963 for f in lookup:
967 for f in lookup:
964 if f not in ctx or ctx[f].cmp(self.wread(f)):
968 if f not in ctx or ctx[f].cmp(self.wread(f)):
965 modified.append(f)
969 modified.append(f)
966 else:
970 else:
967 fixup.append(f)
971 fixup.append(f)
968 if list_clean:
972 if list_clean:
969 clean.append(f)
973 clean.append(f)
970
974
971 # update dirstate for files that are actually clean
975 # update dirstate for files that are actually clean
972 if fixup:
976 if fixup:
973 wlock = None
977 wlock = None
974 try:
978 try:
975 try:
979 try:
976 wlock = self.wlock(False)
980 wlock = self.wlock(False)
977 except lock.LockException:
981 except lock.LockException:
978 pass
982 pass
979 if wlock:
983 if wlock:
980 for f in fixup:
984 for f in fixup:
981 self.dirstate.normal(f)
985 self.dirstate.normal(f)
982 finally:
986 finally:
983 del wlock
987 del wlock
984 else:
988 else:
985 # we are comparing working dir against non-parent
989 # we are comparing working dir against non-parent
986 # generate a pseudo-manifest for the working dir
990 # generate a pseudo-manifest for the working dir
987 # XXX: create it in dirstate.py ?
991 # XXX: create it in dirstate.py ?
988 mf2 = mfmatches(self.dirstate.parents()[0])
992 mf2 = mfmatches(self.dirstate.parents()[0])
989 is_exec = util.execfunc(self.root, mf2.execf)
993 is_exec = util.execfunc(self.root, mf2.execf)
990 is_link = util.linkfunc(self.root, mf2.linkf)
994 is_link = util.linkfunc(self.root, mf2.linkf)
991 for f in lookup + modified + added:
995 for f in lookup + modified + added:
992 mf2[f] = ""
996 mf2[f] = ""
993 mf2.set(f, is_exec(f), is_link(f))
997 mf2.set(f, is_exec(f), is_link(f))
994 for f in removed:
998 for f in removed:
995 if f in mf2:
999 if f in mf2:
996 del mf2[f]
1000 del mf2[f]
997
1001
998 else:
1002 else:
999 # we are comparing two revisions
1003 # we are comparing two revisions
1000 mf2 = mfmatches(node2)
1004 mf2 = mfmatches(node2)
1001
1005
1002 if not compareworking:
1006 if not compareworking:
1003 # flush lists from dirstate before comparing manifests
1007 # flush lists from dirstate before comparing manifests
1004 modified, added, clean = [], [], []
1008 modified, added, clean = [], [], []
1005
1009
1006 # make sure to sort the files so we talk to the disk in a
1010 # make sure to sort the files so we talk to the disk in a
1007 # reasonable order
1011 # reasonable order
1008 mf2keys = mf2.keys()
1012 mf2keys = mf2.keys()
1009 mf2keys.sort()
1013 mf2keys.sort()
1010 getnode = lambda fn: mf1.get(fn, nullid)
1014 getnode = lambda fn: mf1.get(fn, nullid)
1011 for fn in mf2keys:
1015 for fn in mf2keys:
1012 if fn in mf1:
1016 if fn in mf1:
1013 if (mf1.flags(fn) != mf2.flags(fn) or
1017 if (mf1.flags(fn) != mf2.flags(fn) or
1014 (mf1[fn] != mf2[fn] and
1018 (mf1[fn] != mf2[fn] and
1015 (mf2[fn] != "" or fcmp(fn, getnode)))):
1019 (mf2[fn] != "" or fcmp(fn, getnode)))):
1016 modified.append(fn)
1020 modified.append(fn)
1017 elif list_clean:
1021 elif list_clean:
1018 clean.append(fn)
1022 clean.append(fn)
1019 del mf1[fn]
1023 del mf1[fn]
1020 else:
1024 else:
1021 added.append(fn)
1025 added.append(fn)
1022
1026
1023 removed = mf1.keys()
1027 removed = mf1.keys()
1024
1028
1025 # sort and return results:
1029 # sort and return results:
1026 for l in modified, added, removed, deleted, unknown, ignored, clean:
1030 for l in modified, added, removed, deleted, unknown, ignored, clean:
1027 l.sort()
1031 l.sort()
1028 return (modified, added, removed, deleted, unknown, ignored, clean)
1032 return (modified, added, removed, deleted, unknown, ignored, clean)
1029
1033
1030 def add(self, list):
1034 def add(self, list):
1031 wlock = self.wlock()
1035 wlock = self.wlock()
1032 try:
1036 try:
1033 rejected = []
1037 rejected = []
1034 for f in list:
1038 for f in list:
1035 p = self.wjoin(f)
1039 p = self.wjoin(f)
1036 try:
1040 try:
1037 st = os.lstat(p)
1041 st = os.lstat(p)
1038 except:
1042 except:
1039 self.ui.warn(_("%s does not exist!\n") % f)
1043 self.ui.warn(_("%s does not exist!\n") % f)
1040 rejected.append(f)
1044 rejected.append(f)
1041 continue
1045 continue
1042 if st.st_size > 10000000:
1046 if st.st_size > 10000000:
1043 self.ui.warn(_("%s: files over 10MB may cause memory and"
1047 self.ui.warn(_("%s: files over 10MB may cause memory and"
1044 " performance problems\n"
1048 " performance problems\n"
1045 "(use 'hg revert %s' to unadd the file)\n")
1049 "(use 'hg revert %s' to unadd the file)\n")
1046 % (f, f))
1050 % (f, f))
1047 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1051 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1048 self.ui.warn(_("%s not added: only files and symlinks "
1052 self.ui.warn(_("%s not added: only files and symlinks "
1049 "supported currently\n") % f)
1053 "supported currently\n") % f)
1050 rejected.append(p)
1054 rejected.append(p)
1051 elif self.dirstate[f] in 'amn':
1055 elif self.dirstate[f] in 'amn':
1052 self.ui.warn(_("%s already tracked!\n") % f)
1056 self.ui.warn(_("%s already tracked!\n") % f)
1053 elif self.dirstate[f] == 'r':
1057 elif self.dirstate[f] == 'r':
1054 self.dirstate.normallookup(f)
1058 self.dirstate.normallookup(f)
1055 else:
1059 else:
1056 self.dirstate.add(f)
1060 self.dirstate.add(f)
1057 return rejected
1061 return rejected
1058 finally:
1062 finally:
1059 del wlock
1063 del wlock
1060
1064
1061 def forget(self, list):
1065 def forget(self, list):
1062 wlock = self.wlock()
1066 wlock = self.wlock()
1063 try:
1067 try:
1064 for f in list:
1068 for f in list:
1065 if self.dirstate[f] != 'a':
1069 if self.dirstate[f] != 'a':
1066 self.ui.warn(_("%s not added!\n") % f)
1070 self.ui.warn(_("%s not added!\n") % f)
1067 else:
1071 else:
1068 self.dirstate.forget(f)
1072 self.dirstate.forget(f)
1069 finally:
1073 finally:
1070 del wlock
1074 del wlock
1071
1075
1072 def remove(self, list, unlink=False):
1076 def remove(self, list, unlink=False):
1073 wlock = None
1077 wlock = None
1074 try:
1078 try:
1075 if unlink:
1079 if unlink:
1076 for f in list:
1080 for f in list:
1077 try:
1081 try:
1078 util.unlink(self.wjoin(f))
1082 util.unlink(self.wjoin(f))
1079 except OSError, inst:
1083 except OSError, inst:
1080 if inst.errno != errno.ENOENT:
1084 if inst.errno != errno.ENOENT:
1081 raise
1085 raise
1082 wlock = self.wlock()
1086 wlock = self.wlock()
1083 for f in list:
1087 for f in list:
1084 if unlink and os.path.exists(self.wjoin(f)):
1088 if unlink and os.path.exists(self.wjoin(f)):
1085 self.ui.warn(_("%s still exists!\n") % f)
1089 self.ui.warn(_("%s still exists!\n") % f)
1086 elif self.dirstate[f] == 'a':
1090 elif self.dirstate[f] == 'a':
1087 self.dirstate.forget(f)
1091 self.dirstate.forget(f)
1088 elif f not in self.dirstate:
1092 elif f not in self.dirstate:
1089 self.ui.warn(_("%s not tracked!\n") % f)
1093 self.ui.warn(_("%s not tracked!\n") % f)
1090 else:
1094 else:
1091 self.dirstate.remove(f)
1095 self.dirstate.remove(f)
1092 finally:
1096 finally:
1093 del wlock
1097 del wlock
1094
1098
1095 def undelete(self, list):
1099 def undelete(self, list):
1096 wlock = None
1100 wlock = None
1097 try:
1101 try:
1098 manifests = [self.manifest.read(self.changelog.read(p)[0])
1102 manifests = [self.manifest.read(self.changelog.read(p)[0])
1099 for p in self.dirstate.parents() if p != nullid]
1103 for p in self.dirstate.parents() if p != nullid]
1100 wlock = self.wlock()
1104 wlock = self.wlock()
1101 for f in list:
1105 for f in list:
1102 if self.dirstate[f] != 'r':
1106 if self.dirstate[f] != 'r':
1103 self.ui.warn("%s not removed!\n" % f)
1107 self.ui.warn("%s not removed!\n" % f)
1104 else:
1108 else:
1105 m = f in manifests[0] and manifests[0] or manifests[1]
1109 m = f in manifests[0] and manifests[0] or manifests[1]
1106 t = self.file(f).read(m[f])
1110 t = self.file(f).read(m[f])
1107 self.wwrite(f, t, m.flags(f))
1111 self.wwrite(f, t, m.flags(f))
1108 self.dirstate.normal(f)
1112 self.dirstate.normal(f)
1109 finally:
1113 finally:
1110 del wlock
1114 del wlock
1111
1115
1112 def copy(self, source, dest):
1116 def copy(self, source, dest):
1113 wlock = None
1117 wlock = None
1114 try:
1118 try:
1115 p = self.wjoin(dest)
1119 p = self.wjoin(dest)
1116 if not (os.path.exists(p) or os.path.islink(p)):
1120 if not (os.path.exists(p) or os.path.islink(p)):
1117 self.ui.warn(_("%s does not exist!\n") % dest)
1121 self.ui.warn(_("%s does not exist!\n") % dest)
1118 elif not (os.path.isfile(p) or os.path.islink(p)):
1122 elif not (os.path.isfile(p) or os.path.islink(p)):
1119 self.ui.warn(_("copy failed: %s is not a file or a "
1123 self.ui.warn(_("copy failed: %s is not a file or a "
1120 "symbolic link\n") % dest)
1124 "symbolic link\n") % dest)
1121 else:
1125 else:
1122 wlock = self.wlock()
1126 wlock = self.wlock()
1123 if dest not in self.dirstate:
1127 if dest not in self.dirstate:
1124 self.dirstate.add(dest)
1128 self.dirstate.add(dest)
1125 self.dirstate.copy(source, dest)
1129 self.dirstate.copy(source, dest)
1126 finally:
1130 finally:
1127 del wlock
1131 del wlock
1128
1132
1129 def heads(self, start=None):
1133 def heads(self, start=None):
1130 heads = self.changelog.heads(start)
1134 heads = self.changelog.heads(start)
1131 # sort the output in rev descending order
1135 # sort the output in rev descending order
1132 heads = [(-self.changelog.rev(h), h) for h in heads]
1136 heads = [(-self.changelog.rev(h), h) for h in heads]
1133 heads.sort()
1137 heads.sort()
1134 return [n for (r, n) in heads]
1138 return [n for (r, n) in heads]
1135
1139
1136 def branchheads(self, branch, start=None):
1140 def branchheads(self, branch, start=None):
1137 branches = self.branchtags()
1141 branches = self.branchtags()
1138 if branch not in branches:
1142 if branch not in branches:
1139 return []
1143 return []
1140 # The basic algorithm is this:
1144 # The basic algorithm is this:
1141 #
1145 #
1142 # Start from the branch tip since there are no later revisions that can
1146 # Start from the branch tip since there are no later revisions that can
1143 # possibly be in this branch, and the tip is a guaranteed head.
1147 # possibly be in this branch, and the tip is a guaranteed head.
1144 #
1148 #
1145 # Remember the tip's parents as the first ancestors, since these by
1149 # Remember the tip's parents as the first ancestors, since these by
1146 # definition are not heads.
1150 # definition are not heads.
1147 #
1151 #
1148 # Step backwards from the brach tip through all the revisions. We are
1152 # Step backwards from the brach tip through all the revisions. We are
1149 # guaranteed by the rules of Mercurial that we will now be visiting the
1153 # guaranteed by the rules of Mercurial that we will now be visiting the
1150 # nodes in reverse topological order (children before parents).
1154 # nodes in reverse topological order (children before parents).
1151 #
1155 #
1152 # If a revision is one of the ancestors of a head then we can toss it
1156 # If a revision is one of the ancestors of a head then we can toss it
1153 # out of the ancestors set (we've already found it and won't be
1157 # out of the ancestors set (we've already found it and won't be
1154 # visiting it again) and put its parents in the ancestors set.
1158 # visiting it again) and put its parents in the ancestors set.
1155 #
1159 #
1156 # Otherwise, if a revision is in the branch it's another head, since it
1160 # Otherwise, if a revision is in the branch it's another head, since it
1157 # wasn't in the ancestor list of an existing head. So add it to the
1161 # wasn't in the ancestor list of an existing head. So add it to the
1158 # head list, and add its parents to the ancestor list.
1162 # head list, and add its parents to the ancestor list.
1159 #
1163 #
1160 # If it is not in the branch ignore it.
1164 # If it is not in the branch ignore it.
1161 #
1165 #
1162 # Once we have a list of heads, use nodesbetween to filter out all the
1166 # Once we have a list of heads, use nodesbetween to filter out all the
1163 # heads that cannot be reached from startrev. There may be a more
1167 # heads that cannot be reached from startrev. There may be a more
1164 # efficient way to do this as part of the previous algorithm.
1168 # efficient way to do this as part of the previous algorithm.
1165
1169
1166 set = util.set
1170 set = util.set
1167 heads = [self.changelog.rev(branches[branch])]
1171 heads = [self.changelog.rev(branches[branch])]
1168 # Don't care if ancestors contains nullrev or not.
1172 # Don't care if ancestors contains nullrev or not.
1169 ancestors = set(self.changelog.parentrevs(heads[0]))
1173 ancestors = set(self.changelog.parentrevs(heads[0]))
1170 for rev in xrange(heads[0] - 1, nullrev, -1):
1174 for rev in xrange(heads[0] - 1, nullrev, -1):
1171 if rev in ancestors:
1175 if rev in ancestors:
1172 ancestors.update(self.changelog.parentrevs(rev))
1176 ancestors.update(self.changelog.parentrevs(rev))
1173 ancestors.remove(rev)
1177 ancestors.remove(rev)
1174 elif self.changectx(rev).branch() == branch:
1178 elif self.changectx(rev).branch() == branch:
1175 heads.append(rev)
1179 heads.append(rev)
1176 ancestors.update(self.changelog.parentrevs(rev))
1180 ancestors.update(self.changelog.parentrevs(rev))
1177 heads = [self.changelog.node(rev) for rev in heads]
1181 heads = [self.changelog.node(rev) for rev in heads]
1178 if start is not None:
1182 if start is not None:
1179 heads = self.changelog.nodesbetween([start], heads)[2]
1183 heads = self.changelog.nodesbetween([start], heads)[2]
1180 return heads
1184 return heads
1181
1185
1182 def branches(self, nodes):
1186 def branches(self, nodes):
1183 if not nodes:
1187 if not nodes:
1184 nodes = [self.changelog.tip()]
1188 nodes = [self.changelog.tip()]
1185 b = []
1189 b = []
1186 for n in nodes:
1190 for n in nodes:
1187 t = n
1191 t = n
1188 while 1:
1192 while 1:
1189 p = self.changelog.parents(n)
1193 p = self.changelog.parents(n)
1190 if p[1] != nullid or p[0] == nullid:
1194 if p[1] != nullid or p[0] == nullid:
1191 b.append((t, n, p[0], p[1]))
1195 b.append((t, n, p[0], p[1]))
1192 break
1196 break
1193 n = p[0]
1197 n = p[0]
1194 return b
1198 return b
1195
1199
1196 def between(self, pairs):
1200 def between(self, pairs):
1197 r = []
1201 r = []
1198
1202
1199 for top, bottom in pairs:
1203 for top, bottom in pairs:
1200 n, l, i = top, [], 0
1204 n, l, i = top, [], 0
1201 f = 1
1205 f = 1
1202
1206
1203 while n != bottom:
1207 while n != bottom:
1204 p = self.changelog.parents(n)[0]
1208 p = self.changelog.parents(n)[0]
1205 if i == f:
1209 if i == f:
1206 l.append(n)
1210 l.append(n)
1207 f = f * 2
1211 f = f * 2
1208 n = p
1212 n = p
1209 i += 1
1213 i += 1
1210
1214
1211 r.append(l)
1215 r.append(l)
1212
1216
1213 return r
1217 return r
1214
1218
1215 def findincoming(self, remote, base=None, heads=None, force=False):
1219 def findincoming(self, remote, base=None, heads=None, force=False):
1216 """Return list of roots of the subsets of missing nodes from remote
1220 """Return list of roots of the subsets of missing nodes from remote
1217
1221
1218 If base dict is specified, assume that these nodes and their parents
1222 If base dict is specified, assume that these nodes and their parents
1219 exist on the remote side and that no child of a node of base exists
1223 exist on the remote side and that no child of a node of base exists
1220 in both remote and self.
1224 in both remote and self.
1221 Furthermore base will be updated to include the nodes that exists
1225 Furthermore base will be updated to include the nodes that exists
1222 in self and remote but no children exists in self and remote.
1226 in self and remote but no children exists in self and remote.
1223 If a list of heads is specified, return only nodes which are heads
1227 If a list of heads is specified, return only nodes which are heads
1224 or ancestors of these heads.
1228 or ancestors of these heads.
1225
1229
1226 All the ancestors of base are in self and in remote.
1230 All the ancestors of base are in self and in remote.
1227 All the descendants of the list returned are missing in self.
1231 All the descendants of the list returned are missing in self.
1228 (and so we know that the rest of the nodes are missing in remote, see
1232 (and so we know that the rest of the nodes are missing in remote, see
1229 outgoing)
1233 outgoing)
1230 """
1234 """
1231 m = self.changelog.nodemap
1235 m = self.changelog.nodemap
1232 search = []
1236 search = []
1233 fetch = {}
1237 fetch = {}
1234 seen = {}
1238 seen = {}
1235 seenbranch = {}
1239 seenbranch = {}
1236 if base == None:
1240 if base == None:
1237 base = {}
1241 base = {}
1238
1242
1239 if not heads:
1243 if not heads:
1240 heads = remote.heads()
1244 heads = remote.heads()
1241
1245
1242 if self.changelog.tip() == nullid:
1246 if self.changelog.tip() == nullid:
1243 base[nullid] = 1
1247 base[nullid] = 1
1244 if heads != [nullid]:
1248 if heads != [nullid]:
1245 return [nullid]
1249 return [nullid]
1246 return []
1250 return []
1247
1251
1248 # assume we're closer to the tip than the root
1252 # assume we're closer to the tip than the root
1249 # and start by examining the heads
1253 # and start by examining the heads
1250 self.ui.status(_("searching for changes\n"))
1254 self.ui.status(_("searching for changes\n"))
1251
1255
1252 unknown = []
1256 unknown = []
1253 for h in heads:
1257 for h in heads:
1254 if h not in m:
1258 if h not in m:
1255 unknown.append(h)
1259 unknown.append(h)
1256 else:
1260 else:
1257 base[h] = 1
1261 base[h] = 1
1258
1262
1259 if not unknown:
1263 if not unknown:
1260 return []
1264 return []
1261
1265
1262 req = dict.fromkeys(unknown)
1266 req = dict.fromkeys(unknown)
1263 reqcnt = 0
1267 reqcnt = 0
1264
1268
1265 # search through remote branches
1269 # search through remote branches
1266 # a 'branch' here is a linear segment of history, with four parts:
1270 # a 'branch' here is a linear segment of history, with four parts:
1267 # head, root, first parent, second parent
1271 # head, root, first parent, second parent
1268 # (a branch always has two parents (or none) by definition)
1272 # (a branch always has two parents (or none) by definition)
1269 unknown = remote.branches(unknown)
1273 unknown = remote.branches(unknown)
1270 while unknown:
1274 while unknown:
1271 r = []
1275 r = []
1272 while unknown:
1276 while unknown:
1273 n = unknown.pop(0)
1277 n = unknown.pop(0)
1274 if n[0] in seen:
1278 if n[0] in seen:
1275 continue
1279 continue
1276
1280
1277 self.ui.debug(_("examining %s:%s\n")
1281 self.ui.debug(_("examining %s:%s\n")
1278 % (short(n[0]), short(n[1])))
1282 % (short(n[0]), short(n[1])))
1279 if n[0] == nullid: # found the end of the branch
1283 if n[0] == nullid: # found the end of the branch
1280 pass
1284 pass
1281 elif n in seenbranch:
1285 elif n in seenbranch:
1282 self.ui.debug(_("branch already found\n"))
1286 self.ui.debug(_("branch already found\n"))
1283 continue
1287 continue
1284 elif n[1] and n[1] in m: # do we know the base?
1288 elif n[1] and n[1] in m: # do we know the base?
1285 self.ui.debug(_("found incomplete branch %s:%s\n")
1289 self.ui.debug(_("found incomplete branch %s:%s\n")
1286 % (short(n[0]), short(n[1])))
1290 % (short(n[0]), short(n[1])))
1287 search.append(n) # schedule branch range for scanning
1291 search.append(n) # schedule branch range for scanning
1288 seenbranch[n] = 1
1292 seenbranch[n] = 1
1289 else:
1293 else:
1290 if n[1] not in seen and n[1] not in fetch:
1294 if n[1] not in seen and n[1] not in fetch:
1291 if n[2] in m and n[3] in m:
1295 if n[2] in m and n[3] in m:
1292 self.ui.debug(_("found new changeset %s\n") %
1296 self.ui.debug(_("found new changeset %s\n") %
1293 short(n[1]))
1297 short(n[1]))
1294 fetch[n[1]] = 1 # earliest unknown
1298 fetch[n[1]] = 1 # earliest unknown
1295 for p in n[2:4]:
1299 for p in n[2:4]:
1296 if p in m:
1300 if p in m:
1297 base[p] = 1 # latest known
1301 base[p] = 1 # latest known
1298
1302
1299 for p in n[2:4]:
1303 for p in n[2:4]:
1300 if p not in req and p not in m:
1304 if p not in req and p not in m:
1301 r.append(p)
1305 r.append(p)
1302 req[p] = 1
1306 req[p] = 1
1303 seen[n[0]] = 1
1307 seen[n[0]] = 1
1304
1308
1305 if r:
1309 if r:
1306 reqcnt += 1
1310 reqcnt += 1
1307 self.ui.debug(_("request %d: %s\n") %
1311 self.ui.debug(_("request %d: %s\n") %
1308 (reqcnt, " ".join(map(short, r))))
1312 (reqcnt, " ".join(map(short, r))))
1309 for p in xrange(0, len(r), 10):
1313 for p in xrange(0, len(r), 10):
1310 for b in remote.branches(r[p:p+10]):
1314 for b in remote.branches(r[p:p+10]):
1311 self.ui.debug(_("received %s:%s\n") %
1315 self.ui.debug(_("received %s:%s\n") %
1312 (short(b[0]), short(b[1])))
1316 (short(b[0]), short(b[1])))
1313 unknown.append(b)
1317 unknown.append(b)
1314
1318
1315 # do binary search on the branches we found
1319 # do binary search on the branches we found
1316 while search:
1320 while search:
1317 n = search.pop(0)
1321 n = search.pop(0)
1318 reqcnt += 1
1322 reqcnt += 1
1319 l = remote.between([(n[0], n[1])])[0]
1323 l = remote.between([(n[0], n[1])])[0]
1320 l.append(n[1])
1324 l.append(n[1])
1321 p = n[0]
1325 p = n[0]
1322 f = 1
1326 f = 1
1323 for i in l:
1327 for i in l:
1324 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1328 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1325 if i in m:
1329 if i in m:
1326 if f <= 2:
1330 if f <= 2:
1327 self.ui.debug(_("found new branch changeset %s\n") %
1331 self.ui.debug(_("found new branch changeset %s\n") %
1328 short(p))
1332 short(p))
1329 fetch[p] = 1
1333 fetch[p] = 1
1330 base[i] = 1
1334 base[i] = 1
1331 else:
1335 else:
1332 self.ui.debug(_("narrowed branch search to %s:%s\n")
1336 self.ui.debug(_("narrowed branch search to %s:%s\n")
1333 % (short(p), short(i)))
1337 % (short(p), short(i)))
1334 search.append((p, i))
1338 search.append((p, i))
1335 break
1339 break
1336 p, f = i, f * 2
1340 p, f = i, f * 2
1337
1341
1338 # sanity check our fetch list
1342 # sanity check our fetch list
1339 for f in fetch.keys():
1343 for f in fetch.keys():
1340 if f in m:
1344 if f in m:
1341 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1345 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1342
1346
1343 if base.keys() == [nullid]:
1347 if base.keys() == [nullid]:
1344 if force:
1348 if force:
1345 self.ui.warn(_("warning: repository is unrelated\n"))
1349 self.ui.warn(_("warning: repository is unrelated\n"))
1346 else:
1350 else:
1347 raise util.Abort(_("repository is unrelated"))
1351 raise util.Abort(_("repository is unrelated"))
1348
1352
1349 self.ui.debug(_("found new changesets starting at ") +
1353 self.ui.debug(_("found new changesets starting at ") +
1350 " ".join([short(f) for f in fetch]) + "\n")
1354 " ".join([short(f) for f in fetch]) + "\n")
1351
1355
1352 self.ui.debug(_("%d total queries\n") % reqcnt)
1356 self.ui.debug(_("%d total queries\n") % reqcnt)
1353
1357
1354 return fetch.keys()
1358 return fetch.keys()
1355
1359
1356 def findoutgoing(self, remote, base=None, heads=None, force=False):
1360 def findoutgoing(self, remote, base=None, heads=None, force=False):
1357 """Return list of nodes that are roots of subsets not in remote
1361 """Return list of nodes that are roots of subsets not in remote
1358
1362
1359 If base dict is specified, assume that these nodes and their parents
1363 If base dict is specified, assume that these nodes and their parents
1360 exist on the remote side.
1364 exist on the remote side.
1361 If a list of heads is specified, return only nodes which are heads
1365 If a list of heads is specified, return only nodes which are heads
1362 or ancestors of these heads, and return a second element which
1366 or ancestors of these heads, and return a second element which
1363 contains all remote heads which get new children.
1367 contains all remote heads which get new children.
1364 """
1368 """
1365 if base == None:
1369 if base == None:
1366 base = {}
1370 base = {}
1367 self.findincoming(remote, base, heads, force=force)
1371 self.findincoming(remote, base, heads, force=force)
1368
1372
1369 self.ui.debug(_("common changesets up to ")
1373 self.ui.debug(_("common changesets up to ")
1370 + " ".join(map(short, base.keys())) + "\n")
1374 + " ".join(map(short, base.keys())) + "\n")
1371
1375
1372 remain = dict.fromkeys(self.changelog.nodemap)
1376 remain = dict.fromkeys(self.changelog.nodemap)
1373
1377
1374 # prune everything remote has from the tree
1378 # prune everything remote has from the tree
1375 del remain[nullid]
1379 del remain[nullid]
1376 remove = base.keys()
1380 remove = base.keys()
1377 while remove:
1381 while remove:
1378 n = remove.pop(0)
1382 n = remove.pop(0)
1379 if n in remain:
1383 if n in remain:
1380 del remain[n]
1384 del remain[n]
1381 for p in self.changelog.parents(n):
1385 for p in self.changelog.parents(n):
1382 remove.append(p)
1386 remove.append(p)
1383
1387
1384 # find every node whose parents have been pruned
1388 # find every node whose parents have been pruned
1385 subset = []
1389 subset = []
1386 # find every remote head that will get new children
1390 # find every remote head that will get new children
1387 updated_heads = {}
1391 updated_heads = {}
1388 for n in remain:
1392 for n in remain:
1389 p1, p2 = self.changelog.parents(n)
1393 p1, p2 = self.changelog.parents(n)
1390 if p1 not in remain and p2 not in remain:
1394 if p1 not in remain and p2 not in remain:
1391 subset.append(n)
1395 subset.append(n)
1392 if heads:
1396 if heads:
1393 if p1 in heads:
1397 if p1 in heads:
1394 updated_heads[p1] = True
1398 updated_heads[p1] = True
1395 if p2 in heads:
1399 if p2 in heads:
1396 updated_heads[p2] = True
1400 updated_heads[p2] = True
1397
1401
1398 # this is the set of all roots we have to push
1402 # this is the set of all roots we have to push
1399 if heads:
1403 if heads:
1400 return subset, updated_heads.keys()
1404 return subset, updated_heads.keys()
1401 else:
1405 else:
1402 return subset
1406 return subset
1403
1407
1404 def pull(self, remote, heads=None, force=False):
1408 def pull(self, remote, heads=None, force=False):
1405 lock = self.lock()
1409 lock = self.lock()
1406 try:
1410 try:
1407 fetch = self.findincoming(remote, heads=heads, force=force)
1411 fetch = self.findincoming(remote, heads=heads, force=force)
1408 if fetch == [nullid]:
1412 if fetch == [nullid]:
1409 self.ui.status(_("requesting all changes\n"))
1413 self.ui.status(_("requesting all changes\n"))
1410
1414
1411 if not fetch:
1415 if not fetch:
1412 self.ui.status(_("no changes found\n"))
1416 self.ui.status(_("no changes found\n"))
1413 return 0
1417 return 0
1414
1418
1415 if heads is None:
1419 if heads is None:
1416 cg = remote.changegroup(fetch, 'pull')
1420 cg = remote.changegroup(fetch, 'pull')
1417 else:
1421 else:
1418 if 'changegroupsubset' not in remote.capabilities:
1422 if 'changegroupsubset' not in remote.capabilities:
1419 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1423 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1420 cg = remote.changegroupsubset(fetch, heads, 'pull')
1424 cg = remote.changegroupsubset(fetch, heads, 'pull')
1421 return self.addchangegroup(cg, 'pull', remote.url())
1425 return self.addchangegroup(cg, 'pull', remote.url())
1422 finally:
1426 finally:
1423 del lock
1427 del lock
1424
1428
1425 def push(self, remote, force=False, revs=None):
1429 def push(self, remote, force=False, revs=None):
1426 # there are two ways to push to remote repo:
1430 # there are two ways to push to remote repo:
1427 #
1431 #
1428 # addchangegroup assumes local user can lock remote
1432 # addchangegroup assumes local user can lock remote
1429 # repo (local filesystem, old ssh servers).
1433 # repo (local filesystem, old ssh servers).
1430 #
1434 #
1431 # unbundle assumes local user cannot lock remote repo (new ssh
1435 # unbundle assumes local user cannot lock remote repo (new ssh
1432 # servers, http servers).
1436 # servers, http servers).
1433
1437
1434 if remote.capable('unbundle'):
1438 if remote.capable('unbundle'):
1435 return self.push_unbundle(remote, force, revs)
1439 return self.push_unbundle(remote, force, revs)
1436 return self.push_addchangegroup(remote, force, revs)
1440 return self.push_addchangegroup(remote, force, revs)
1437
1441
1438 def prepush(self, remote, force, revs):
1442 def prepush(self, remote, force, revs):
1439 base = {}
1443 base = {}
1440 remote_heads = remote.heads()
1444 remote_heads = remote.heads()
1441 inc = self.findincoming(remote, base, remote_heads, force=force)
1445 inc = self.findincoming(remote, base, remote_heads, force=force)
1442
1446
1443 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1447 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1444 if revs is not None:
1448 if revs is not None:
1445 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1449 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1446 else:
1450 else:
1447 bases, heads = update, self.changelog.heads()
1451 bases, heads = update, self.changelog.heads()
1448
1452
1449 if not bases:
1453 if not bases:
1450 self.ui.status(_("no changes found\n"))
1454 self.ui.status(_("no changes found\n"))
1451 return None, 1
1455 return None, 1
1452 elif not force:
1456 elif not force:
1453 # check if we're creating new remote heads
1457 # check if we're creating new remote heads
1454 # to be a remote head after push, node must be either
1458 # to be a remote head after push, node must be either
1455 # - unknown locally
1459 # - unknown locally
1456 # - a local outgoing head descended from update
1460 # - a local outgoing head descended from update
1457 # - a remote head that's known locally and not
1461 # - a remote head that's known locally and not
1458 # ancestral to an outgoing head
1462 # ancestral to an outgoing head
1459
1463
1460 warn = 0
1464 warn = 0
1461
1465
1462 if remote_heads == [nullid]:
1466 if remote_heads == [nullid]:
1463 warn = 0
1467 warn = 0
1464 elif not revs and len(heads) > len(remote_heads):
1468 elif not revs and len(heads) > len(remote_heads):
1465 warn = 1
1469 warn = 1
1466 else:
1470 else:
1467 newheads = list(heads)
1471 newheads = list(heads)
1468 for r in remote_heads:
1472 for r in remote_heads:
1469 if r in self.changelog.nodemap:
1473 if r in self.changelog.nodemap:
1470 desc = self.changelog.heads(r, heads)
1474 desc = self.changelog.heads(r, heads)
1471 l = [h for h in heads if h in desc]
1475 l = [h for h in heads if h in desc]
1472 if not l:
1476 if not l:
1473 newheads.append(r)
1477 newheads.append(r)
1474 else:
1478 else:
1475 newheads.append(r)
1479 newheads.append(r)
1476 if len(newheads) > len(remote_heads):
1480 if len(newheads) > len(remote_heads):
1477 warn = 1
1481 warn = 1
1478
1482
1479 if warn:
1483 if warn:
1480 self.ui.warn(_("abort: push creates new remote branches!\n"))
1484 self.ui.warn(_("abort: push creates new remote branches!\n"))
1481 self.ui.status(_("(did you forget to merge?"
1485 self.ui.status(_("(did you forget to merge?"
1482 " use push -f to force)\n"))
1486 " use push -f to force)\n"))
1483 return None, 1
1487 return None, 1
1484 elif inc:
1488 elif inc:
1485 self.ui.warn(_("note: unsynced remote changes!\n"))
1489 self.ui.warn(_("note: unsynced remote changes!\n"))
1486
1490
1487
1491
1488 if revs is None:
1492 if revs is None:
1489 cg = self.changegroup(update, 'push')
1493 cg = self.changegroup(update, 'push')
1490 else:
1494 else:
1491 cg = self.changegroupsubset(update, revs, 'push')
1495 cg = self.changegroupsubset(update, revs, 'push')
1492 return cg, remote_heads
1496 return cg, remote_heads
1493
1497
1494 def push_addchangegroup(self, remote, force, revs):
1498 def push_addchangegroup(self, remote, force, revs):
1495 lock = remote.lock()
1499 lock = remote.lock()
1496 try:
1500 try:
1497 ret = self.prepush(remote, force, revs)
1501 ret = self.prepush(remote, force, revs)
1498 if ret[0] is not None:
1502 if ret[0] is not None:
1499 cg, remote_heads = ret
1503 cg, remote_heads = ret
1500 return remote.addchangegroup(cg, 'push', self.url())
1504 return remote.addchangegroup(cg, 'push', self.url())
1501 return ret[1]
1505 return ret[1]
1502 finally:
1506 finally:
1503 del lock
1507 del lock
1504
1508
1505 def push_unbundle(self, remote, force, revs):
1509 def push_unbundle(self, remote, force, revs):
1506 # local repo finds heads on server, finds out what revs it
1510 # local repo finds heads on server, finds out what revs it
1507 # must push. once revs transferred, if server finds it has
1511 # must push. once revs transferred, if server finds it has
1508 # different heads (someone else won commit/push race), server
1512 # different heads (someone else won commit/push race), server
1509 # aborts.
1513 # aborts.
1510
1514
1511 ret = self.prepush(remote, force, revs)
1515 ret = self.prepush(remote, force, revs)
1512 if ret[0] is not None:
1516 if ret[0] is not None:
1513 cg, remote_heads = ret
1517 cg, remote_heads = ret
1514 if force: remote_heads = ['force']
1518 if force: remote_heads = ['force']
1515 return remote.unbundle(cg, remote_heads, 'push')
1519 return remote.unbundle(cg, remote_heads, 'push')
1516 return ret[1]
1520 return ret[1]
1517
1521
1518 def changegroupinfo(self, nodes, source):
1522 def changegroupinfo(self, nodes, source):
1519 if self.ui.verbose or source == 'bundle':
1523 if self.ui.verbose or source == 'bundle':
1520 self.ui.status(_("%d changesets found\n") % len(nodes))
1524 self.ui.status(_("%d changesets found\n") % len(nodes))
1521 if self.ui.debugflag:
1525 if self.ui.debugflag:
1522 self.ui.debug(_("List of changesets:\n"))
1526 self.ui.debug(_("List of changesets:\n"))
1523 for node in nodes:
1527 for node in nodes:
1524 self.ui.debug("%s\n" % hex(node))
1528 self.ui.debug("%s\n" % hex(node))
1525
1529
1526 def changegroupsubset(self, bases, heads, source, extranodes=None):
1530 def changegroupsubset(self, bases, heads, source, extranodes=None):
1527 """This function generates a changegroup consisting of all the nodes
1531 """This function generates a changegroup consisting of all the nodes
1528 that are descendents of any of the bases, and ancestors of any of
1532 that are descendents of any of the bases, and ancestors of any of
1529 the heads.
1533 the heads.
1530
1534
1531 It is fairly complex as determining which filenodes and which
1535 It is fairly complex as determining which filenodes and which
1532 manifest nodes need to be included for the changeset to be complete
1536 manifest nodes need to be included for the changeset to be complete
1533 is non-trivial.
1537 is non-trivial.
1534
1538
1535 Another wrinkle is doing the reverse, figuring out which changeset in
1539 Another wrinkle is doing the reverse, figuring out which changeset in
1536 the changegroup a particular filenode or manifestnode belongs to.
1540 the changegroup a particular filenode or manifestnode belongs to.
1537
1541
1538 The caller can specify some nodes that must be included in the
1542 The caller can specify some nodes that must be included in the
1539 changegroup using the extranodes argument. It should be a dict
1543 changegroup using the extranodes argument. It should be a dict
1540 where the keys are the filenames (or 1 for the manifest), and the
1544 where the keys are the filenames (or 1 for the manifest), and the
1541 values are lists of (node, linknode) tuples, where node is a wanted
1545 values are lists of (node, linknode) tuples, where node is a wanted
1542 node and linknode is the changelog node that should be transmitted as
1546 node and linknode is the changelog node that should be transmitted as
1543 the linkrev.
1547 the linkrev.
1544 """
1548 """
1545
1549
1546 self.hook('preoutgoing', throw=True, source=source)
1550 self.hook('preoutgoing', throw=True, source=source)
1547
1551
1548 # Set up some initial variables
1552 # Set up some initial variables
1549 # Make it easy to refer to self.changelog
1553 # Make it easy to refer to self.changelog
1550 cl = self.changelog
1554 cl = self.changelog
1551 # msng is short for missing - compute the list of changesets in this
1555 # msng is short for missing - compute the list of changesets in this
1552 # changegroup.
1556 # changegroup.
1553 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1557 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1554 self.changegroupinfo(msng_cl_lst, source)
1558 self.changegroupinfo(msng_cl_lst, source)
1555 # Some bases may turn out to be superfluous, and some heads may be
1559 # Some bases may turn out to be superfluous, and some heads may be
1556 # too. nodesbetween will return the minimal set of bases and heads
1560 # too. nodesbetween will return the minimal set of bases and heads
1557 # necessary to re-create the changegroup.
1561 # necessary to re-create the changegroup.
1558
1562
1559 # Known heads are the list of heads that it is assumed the recipient
1563 # Known heads are the list of heads that it is assumed the recipient
1560 # of this changegroup will know about.
1564 # of this changegroup will know about.
1561 knownheads = {}
1565 knownheads = {}
1562 # We assume that all parents of bases are known heads.
1566 # We assume that all parents of bases are known heads.
1563 for n in bases:
1567 for n in bases:
1564 for p in cl.parents(n):
1568 for p in cl.parents(n):
1565 if p != nullid:
1569 if p != nullid:
1566 knownheads[p] = 1
1570 knownheads[p] = 1
1567 knownheads = knownheads.keys()
1571 knownheads = knownheads.keys()
1568 if knownheads:
1572 if knownheads:
1569 # Now that we know what heads are known, we can compute which
1573 # Now that we know what heads are known, we can compute which
1570 # changesets are known. The recipient must know about all
1574 # changesets are known. The recipient must know about all
1571 # changesets required to reach the known heads from the null
1575 # changesets required to reach the known heads from the null
1572 # changeset.
1576 # changeset.
1573 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1577 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1574 junk = None
1578 junk = None
1575 # Transform the list into an ersatz set.
1579 # Transform the list into an ersatz set.
1576 has_cl_set = dict.fromkeys(has_cl_set)
1580 has_cl_set = dict.fromkeys(has_cl_set)
1577 else:
1581 else:
1578 # If there were no known heads, the recipient cannot be assumed to
1582 # If there were no known heads, the recipient cannot be assumed to
1579 # know about any changesets.
1583 # know about any changesets.
1580 has_cl_set = {}
1584 has_cl_set = {}
1581
1585
1582 # Make it easy to refer to self.manifest
1586 # Make it easy to refer to self.manifest
1583 mnfst = self.manifest
1587 mnfst = self.manifest
1584 # We don't know which manifests are missing yet
1588 # We don't know which manifests are missing yet
1585 msng_mnfst_set = {}
1589 msng_mnfst_set = {}
1586 # Nor do we know which filenodes are missing.
1590 # Nor do we know which filenodes are missing.
1587 msng_filenode_set = {}
1591 msng_filenode_set = {}
1588
1592
1589 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1593 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1590 junk = None
1594 junk = None
1591
1595
1592 # A changeset always belongs to itself, so the changenode lookup
1596 # A changeset always belongs to itself, so the changenode lookup
1593 # function for a changenode is identity.
1597 # function for a changenode is identity.
1594 def identity(x):
1598 def identity(x):
1595 return x
1599 return x
1596
1600
1597 # A function generating function. Sets up an environment for the
1601 # A function generating function. Sets up an environment for the
1598 # inner function.
1602 # inner function.
1599 def cmp_by_rev_func(revlog):
1603 def cmp_by_rev_func(revlog):
1600 # Compare two nodes by their revision number in the environment's
1604 # Compare two nodes by their revision number in the environment's
1601 # revision history. Since the revision number both represents the
1605 # revision history. Since the revision number both represents the
1602 # most efficient order to read the nodes in, and represents a
1606 # most efficient order to read the nodes in, and represents a
1603 # topological sorting of the nodes, this function is often useful.
1607 # topological sorting of the nodes, this function is often useful.
1604 def cmp_by_rev(a, b):
1608 def cmp_by_rev(a, b):
1605 return cmp(revlog.rev(a), revlog.rev(b))
1609 return cmp(revlog.rev(a), revlog.rev(b))
1606 return cmp_by_rev
1610 return cmp_by_rev
1607
1611
1608 # If we determine that a particular file or manifest node must be a
1612 # If we determine that a particular file or manifest node must be a
1609 # node that the recipient of the changegroup will already have, we can
1613 # node that the recipient of the changegroup will already have, we can
1610 # also assume the recipient will have all the parents. This function
1614 # also assume the recipient will have all the parents. This function
1611 # prunes them from the set of missing nodes.
1615 # prunes them from the set of missing nodes.
1612 def prune_parents(revlog, hasset, msngset):
1616 def prune_parents(revlog, hasset, msngset):
1613 haslst = hasset.keys()
1617 haslst = hasset.keys()
1614 haslst.sort(cmp_by_rev_func(revlog))
1618 haslst.sort(cmp_by_rev_func(revlog))
1615 for node in haslst:
1619 for node in haslst:
1616 parentlst = [p for p in revlog.parents(node) if p != nullid]
1620 parentlst = [p for p in revlog.parents(node) if p != nullid]
1617 while parentlst:
1621 while parentlst:
1618 n = parentlst.pop()
1622 n = parentlst.pop()
1619 if n not in hasset:
1623 if n not in hasset:
1620 hasset[n] = 1
1624 hasset[n] = 1
1621 p = [p for p in revlog.parents(n) if p != nullid]
1625 p = [p for p in revlog.parents(n) if p != nullid]
1622 parentlst.extend(p)
1626 parentlst.extend(p)
1623 for n in hasset:
1627 for n in hasset:
1624 msngset.pop(n, None)
1628 msngset.pop(n, None)
1625
1629
1626 # This is a function generating function used to set up an environment
1630 # This is a function generating function used to set up an environment
1627 # for the inner function to execute in.
1631 # for the inner function to execute in.
1628 def manifest_and_file_collector(changedfileset):
1632 def manifest_and_file_collector(changedfileset):
1629 # This is an information gathering function that gathers
1633 # This is an information gathering function that gathers
1630 # information from each changeset node that goes out as part of
1634 # information from each changeset node that goes out as part of
1631 # the changegroup. The information gathered is a list of which
1635 # the changegroup. The information gathered is a list of which
1632 # manifest nodes are potentially required (the recipient may
1636 # manifest nodes are potentially required (the recipient may
1633 # already have them) and total list of all files which were
1637 # already have them) and total list of all files which were
1634 # changed in any changeset in the changegroup.
1638 # changed in any changeset in the changegroup.
1635 #
1639 #
1636 # We also remember the first changenode we saw any manifest
1640 # We also remember the first changenode we saw any manifest
1637 # referenced by so we can later determine which changenode 'owns'
1641 # referenced by so we can later determine which changenode 'owns'
1638 # the manifest.
1642 # the manifest.
1639 def collect_manifests_and_files(clnode):
1643 def collect_manifests_and_files(clnode):
1640 c = cl.read(clnode)
1644 c = cl.read(clnode)
1641 for f in c[3]:
1645 for f in c[3]:
1642 # This is to make sure we only have one instance of each
1646 # This is to make sure we only have one instance of each
1643 # filename string for each filename.
1647 # filename string for each filename.
1644 changedfileset.setdefault(f, f)
1648 changedfileset.setdefault(f, f)
1645 msng_mnfst_set.setdefault(c[0], clnode)
1649 msng_mnfst_set.setdefault(c[0], clnode)
1646 return collect_manifests_and_files
1650 return collect_manifests_and_files
1647
1651
1648 # Figure out which manifest nodes (of the ones we think might be part
1652 # Figure out which manifest nodes (of the ones we think might be part
1649 # of the changegroup) the recipient must know about and remove them
1653 # of the changegroup) the recipient must know about and remove them
1650 # from the changegroup.
1654 # from the changegroup.
1651 def prune_manifests():
1655 def prune_manifests():
1652 has_mnfst_set = {}
1656 has_mnfst_set = {}
1653 for n in msng_mnfst_set:
1657 for n in msng_mnfst_set:
1654 # If a 'missing' manifest thinks it belongs to a changenode
1658 # If a 'missing' manifest thinks it belongs to a changenode
1655 # the recipient is assumed to have, obviously the recipient
1659 # the recipient is assumed to have, obviously the recipient
1656 # must have that manifest.
1660 # must have that manifest.
1657 linknode = cl.node(mnfst.linkrev(n))
1661 linknode = cl.node(mnfst.linkrev(n))
1658 if linknode in has_cl_set:
1662 if linknode in has_cl_set:
1659 has_mnfst_set[n] = 1
1663 has_mnfst_set[n] = 1
1660 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1664 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1661
1665
1662 # Use the information collected in collect_manifests_and_files to say
1666 # Use the information collected in collect_manifests_and_files to say
1663 # which changenode any manifestnode belongs to.
1667 # which changenode any manifestnode belongs to.
1664 def lookup_manifest_link(mnfstnode):
1668 def lookup_manifest_link(mnfstnode):
1665 return msng_mnfst_set[mnfstnode]
1669 return msng_mnfst_set[mnfstnode]
1666
1670
1667 # A function generating function that sets up the initial environment
1671 # A function generating function that sets up the initial environment
1668 # the inner function.
1672 # the inner function.
1669 def filenode_collector(changedfiles):
1673 def filenode_collector(changedfiles):
1670 next_rev = [0]
1674 next_rev = [0]
1671 # This gathers information from each manifestnode included in the
1675 # This gathers information from each manifestnode included in the
1672 # changegroup about which filenodes the manifest node references
1676 # changegroup about which filenodes the manifest node references
1673 # so we can include those in the changegroup too.
1677 # so we can include those in the changegroup too.
1674 #
1678 #
1675 # It also remembers which changenode each filenode belongs to. It
1679 # It also remembers which changenode each filenode belongs to. It
1676 # does this by assuming the a filenode belongs to the changenode
1680 # does this by assuming the a filenode belongs to the changenode
1677 # the first manifest that references it belongs to.
1681 # the first manifest that references it belongs to.
1678 def collect_msng_filenodes(mnfstnode):
1682 def collect_msng_filenodes(mnfstnode):
1679 r = mnfst.rev(mnfstnode)
1683 r = mnfst.rev(mnfstnode)
1680 if r == next_rev[0]:
1684 if r == next_rev[0]:
1681 # If the last rev we looked at was the one just previous,
1685 # If the last rev we looked at was the one just previous,
1682 # we only need to see a diff.
1686 # we only need to see a diff.
1683 deltamf = mnfst.readdelta(mnfstnode)
1687 deltamf = mnfst.readdelta(mnfstnode)
1684 # For each line in the delta
1688 # For each line in the delta
1685 for f, fnode in deltamf.items():
1689 for f, fnode in deltamf.items():
1686 f = changedfiles.get(f, None)
1690 f = changedfiles.get(f, None)
1687 # And if the file is in the list of files we care
1691 # And if the file is in the list of files we care
1688 # about.
1692 # about.
1689 if f is not None:
1693 if f is not None:
1690 # Get the changenode this manifest belongs to
1694 # Get the changenode this manifest belongs to
1691 clnode = msng_mnfst_set[mnfstnode]
1695 clnode = msng_mnfst_set[mnfstnode]
1692 # Create the set of filenodes for the file if
1696 # Create the set of filenodes for the file if
1693 # there isn't one already.
1697 # there isn't one already.
1694 ndset = msng_filenode_set.setdefault(f, {})
1698 ndset = msng_filenode_set.setdefault(f, {})
1695 # And set the filenode's changelog node to the
1699 # And set the filenode's changelog node to the
1696 # manifest's if it hasn't been set already.
1700 # manifest's if it hasn't been set already.
1697 ndset.setdefault(fnode, clnode)
1701 ndset.setdefault(fnode, clnode)
1698 else:
1702 else:
1699 # Otherwise we need a full manifest.
1703 # Otherwise we need a full manifest.
1700 m = mnfst.read(mnfstnode)
1704 m = mnfst.read(mnfstnode)
1701 # For every file in we care about.
1705 # For every file in we care about.
1702 for f in changedfiles:
1706 for f in changedfiles:
1703 fnode = m.get(f, None)
1707 fnode = m.get(f, None)
1704 # If it's in the manifest
1708 # If it's in the manifest
1705 if fnode is not None:
1709 if fnode is not None:
1706 # See comments above.
1710 # See comments above.
1707 clnode = msng_mnfst_set[mnfstnode]
1711 clnode = msng_mnfst_set[mnfstnode]
1708 ndset = msng_filenode_set.setdefault(f, {})
1712 ndset = msng_filenode_set.setdefault(f, {})
1709 ndset.setdefault(fnode, clnode)
1713 ndset.setdefault(fnode, clnode)
1710 # Remember the revision we hope to see next.
1714 # Remember the revision we hope to see next.
1711 next_rev[0] = r + 1
1715 next_rev[0] = r + 1
1712 return collect_msng_filenodes
1716 return collect_msng_filenodes
1713
1717
1714 # We have a list of filenodes we think we need for a file, lets remove
1718 # We have a list of filenodes we think we need for a file, lets remove
1715 # all those we now the recipient must have.
1719 # all those we now the recipient must have.
1716 def prune_filenodes(f, filerevlog):
1720 def prune_filenodes(f, filerevlog):
1717 msngset = msng_filenode_set[f]
1721 msngset = msng_filenode_set[f]
1718 hasset = {}
1722 hasset = {}
1719 # If a 'missing' filenode thinks it belongs to a changenode we
1723 # If a 'missing' filenode thinks it belongs to a changenode we
1720 # assume the recipient must have, then the recipient must have
1724 # assume the recipient must have, then the recipient must have
1721 # that filenode.
1725 # that filenode.
1722 for n in msngset:
1726 for n in msngset:
1723 clnode = cl.node(filerevlog.linkrev(n))
1727 clnode = cl.node(filerevlog.linkrev(n))
1724 if clnode in has_cl_set:
1728 if clnode in has_cl_set:
1725 hasset[n] = 1
1729 hasset[n] = 1
1726 prune_parents(filerevlog, hasset, msngset)
1730 prune_parents(filerevlog, hasset, msngset)
1727
1731
1728 # A function generator function that sets up the a context for the
1732 # A function generator function that sets up the a context for the
1729 # inner function.
1733 # inner function.
1730 def lookup_filenode_link_func(fname):
1734 def lookup_filenode_link_func(fname):
1731 msngset = msng_filenode_set[fname]
1735 msngset = msng_filenode_set[fname]
1732 # Lookup the changenode the filenode belongs to.
1736 # Lookup the changenode the filenode belongs to.
1733 def lookup_filenode_link(fnode):
1737 def lookup_filenode_link(fnode):
1734 return msngset[fnode]
1738 return msngset[fnode]
1735 return lookup_filenode_link
1739 return lookup_filenode_link
1736
1740
1737 # Add the nodes that were explicitly requested.
1741 # Add the nodes that were explicitly requested.
1738 def add_extra_nodes(name, nodes):
1742 def add_extra_nodes(name, nodes):
1739 if not extranodes or name not in extranodes:
1743 if not extranodes or name not in extranodes:
1740 return
1744 return
1741
1745
1742 for node, linknode in extranodes[name]:
1746 for node, linknode in extranodes[name]:
1743 if node not in nodes:
1747 if node not in nodes:
1744 nodes[node] = linknode
1748 nodes[node] = linknode
1745
1749
1746 # Now that we have all theses utility functions to help out and
1750 # Now that we have all theses utility functions to help out and
1747 # logically divide up the task, generate the group.
1751 # logically divide up the task, generate the group.
1748 def gengroup():
1752 def gengroup():
1749 # The set of changed files starts empty.
1753 # The set of changed files starts empty.
1750 changedfiles = {}
1754 changedfiles = {}
1751 # Create a changenode group generator that will call our functions
1755 # Create a changenode group generator that will call our functions
1752 # back to lookup the owning changenode and collect information.
1756 # back to lookup the owning changenode and collect information.
1753 group = cl.group(msng_cl_lst, identity,
1757 group = cl.group(msng_cl_lst, identity,
1754 manifest_and_file_collector(changedfiles))
1758 manifest_and_file_collector(changedfiles))
1755 for chnk in group:
1759 for chnk in group:
1756 yield chnk
1760 yield chnk
1757
1761
1758 # The list of manifests has been collected by the generator
1762 # The list of manifests has been collected by the generator
1759 # calling our functions back.
1763 # calling our functions back.
1760 prune_manifests()
1764 prune_manifests()
1761 add_extra_nodes(1, msng_mnfst_set)
1765 add_extra_nodes(1, msng_mnfst_set)
1762 msng_mnfst_lst = msng_mnfst_set.keys()
1766 msng_mnfst_lst = msng_mnfst_set.keys()
1763 # Sort the manifestnodes by revision number.
1767 # Sort the manifestnodes by revision number.
1764 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1768 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1765 # Create a generator for the manifestnodes that calls our lookup
1769 # Create a generator for the manifestnodes that calls our lookup
1766 # and data collection functions back.
1770 # and data collection functions back.
1767 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1771 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1768 filenode_collector(changedfiles))
1772 filenode_collector(changedfiles))
1769 for chnk in group:
1773 for chnk in group:
1770 yield chnk
1774 yield chnk
1771
1775
1772 # These are no longer needed, dereference and toss the memory for
1776 # These are no longer needed, dereference and toss the memory for
1773 # them.
1777 # them.
1774 msng_mnfst_lst = None
1778 msng_mnfst_lst = None
1775 msng_mnfst_set.clear()
1779 msng_mnfst_set.clear()
1776
1780
1777 if extranodes:
1781 if extranodes:
1778 for fname in extranodes:
1782 for fname in extranodes:
1779 if isinstance(fname, int):
1783 if isinstance(fname, int):
1780 continue
1784 continue
1781 add_extra_nodes(fname,
1785 add_extra_nodes(fname,
1782 msng_filenode_set.setdefault(fname, {}))
1786 msng_filenode_set.setdefault(fname, {}))
1783 changedfiles[fname] = 1
1787 changedfiles[fname] = 1
1784 changedfiles = changedfiles.keys()
1788 changedfiles = changedfiles.keys()
1785 changedfiles.sort()
1789 changedfiles.sort()
1786 # Go through all our files in order sorted by name.
1790 # Go through all our files in order sorted by name.
1787 for fname in changedfiles:
1791 for fname in changedfiles:
1788 filerevlog = self.file(fname)
1792 filerevlog = self.file(fname)
1789 if filerevlog.count() == 0:
1793 if filerevlog.count() == 0:
1790 raise util.Abort(_("empty or missing revlog for %s") % fname)
1794 raise util.Abort(_("empty or missing revlog for %s") % fname)
1791 # Toss out the filenodes that the recipient isn't really
1795 # Toss out the filenodes that the recipient isn't really
1792 # missing.
1796 # missing.
1793 if fname in msng_filenode_set:
1797 if fname in msng_filenode_set:
1794 prune_filenodes(fname, filerevlog)
1798 prune_filenodes(fname, filerevlog)
1795 msng_filenode_lst = msng_filenode_set[fname].keys()
1799 msng_filenode_lst = msng_filenode_set[fname].keys()
1796 else:
1800 else:
1797 msng_filenode_lst = []
1801 msng_filenode_lst = []
1798 # If any filenodes are left, generate the group for them,
1802 # If any filenodes are left, generate the group for them,
1799 # otherwise don't bother.
1803 # otherwise don't bother.
1800 if len(msng_filenode_lst) > 0:
1804 if len(msng_filenode_lst) > 0:
1801 yield changegroup.chunkheader(len(fname))
1805 yield changegroup.chunkheader(len(fname))
1802 yield fname
1806 yield fname
1803 # Sort the filenodes by their revision #
1807 # Sort the filenodes by their revision #
1804 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1808 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1805 # Create a group generator and only pass in a changenode
1809 # Create a group generator and only pass in a changenode
1806 # lookup function as we need to collect no information
1810 # lookup function as we need to collect no information
1807 # from filenodes.
1811 # from filenodes.
1808 group = filerevlog.group(msng_filenode_lst,
1812 group = filerevlog.group(msng_filenode_lst,
1809 lookup_filenode_link_func(fname))
1813 lookup_filenode_link_func(fname))
1810 for chnk in group:
1814 for chnk in group:
1811 yield chnk
1815 yield chnk
1812 if fname in msng_filenode_set:
1816 if fname in msng_filenode_set:
1813 # Don't need this anymore, toss it to free memory.
1817 # Don't need this anymore, toss it to free memory.
1814 del msng_filenode_set[fname]
1818 del msng_filenode_set[fname]
1815 # Signal that no more groups are left.
1819 # Signal that no more groups are left.
1816 yield changegroup.closechunk()
1820 yield changegroup.closechunk()
1817
1821
1818 if msng_cl_lst:
1822 if msng_cl_lst:
1819 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1823 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1820
1824
1821 return util.chunkbuffer(gengroup())
1825 return util.chunkbuffer(gengroup())
1822
1826
1823 def changegroup(self, basenodes, source):
1827 def changegroup(self, basenodes, source):
1824 """Generate a changegroup of all nodes that we have that a recipient
1828 """Generate a changegroup of all nodes that we have that a recipient
1825 doesn't.
1829 doesn't.
1826
1830
1827 This is much easier than the previous function as we can assume that
1831 This is much easier than the previous function as we can assume that
1828 the recipient has any changenode we aren't sending them."""
1832 the recipient has any changenode we aren't sending them."""
1829
1833
1830 self.hook('preoutgoing', throw=True, source=source)
1834 self.hook('preoutgoing', throw=True, source=source)
1831
1835
1832 cl = self.changelog
1836 cl = self.changelog
1833 nodes = cl.nodesbetween(basenodes, None)[0]
1837 nodes = cl.nodesbetween(basenodes, None)[0]
1834 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1838 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1835 self.changegroupinfo(nodes, source)
1839 self.changegroupinfo(nodes, source)
1836
1840
1837 def identity(x):
1841 def identity(x):
1838 return x
1842 return x
1839
1843
1840 def gennodelst(revlog):
1844 def gennodelst(revlog):
1841 for r in xrange(0, revlog.count()):
1845 for r in xrange(0, revlog.count()):
1842 n = revlog.node(r)
1846 n = revlog.node(r)
1843 if revlog.linkrev(n) in revset:
1847 if revlog.linkrev(n) in revset:
1844 yield n
1848 yield n
1845
1849
1846 def changed_file_collector(changedfileset):
1850 def changed_file_collector(changedfileset):
1847 def collect_changed_files(clnode):
1851 def collect_changed_files(clnode):
1848 c = cl.read(clnode)
1852 c = cl.read(clnode)
1849 for fname in c[3]:
1853 for fname in c[3]:
1850 changedfileset[fname] = 1
1854 changedfileset[fname] = 1
1851 return collect_changed_files
1855 return collect_changed_files
1852
1856
1853 def lookuprevlink_func(revlog):
1857 def lookuprevlink_func(revlog):
1854 def lookuprevlink(n):
1858 def lookuprevlink(n):
1855 return cl.node(revlog.linkrev(n))
1859 return cl.node(revlog.linkrev(n))
1856 return lookuprevlink
1860 return lookuprevlink
1857
1861
1858 def gengroup():
1862 def gengroup():
1859 # construct a list of all changed files
1863 # construct a list of all changed files
1860 changedfiles = {}
1864 changedfiles = {}
1861
1865
1862 for chnk in cl.group(nodes, identity,
1866 for chnk in cl.group(nodes, identity,
1863 changed_file_collector(changedfiles)):
1867 changed_file_collector(changedfiles)):
1864 yield chnk
1868 yield chnk
1865 changedfiles = changedfiles.keys()
1869 changedfiles = changedfiles.keys()
1866 changedfiles.sort()
1870 changedfiles.sort()
1867
1871
1868 mnfst = self.manifest
1872 mnfst = self.manifest
1869 nodeiter = gennodelst(mnfst)
1873 nodeiter = gennodelst(mnfst)
1870 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1874 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1871 yield chnk
1875 yield chnk
1872
1876
1873 for fname in changedfiles:
1877 for fname in changedfiles:
1874 filerevlog = self.file(fname)
1878 filerevlog = self.file(fname)
1875 if filerevlog.count() == 0:
1879 if filerevlog.count() == 0:
1876 raise util.Abort(_("empty or missing revlog for %s") % fname)
1880 raise util.Abort(_("empty or missing revlog for %s") % fname)
1877 nodeiter = gennodelst(filerevlog)
1881 nodeiter = gennodelst(filerevlog)
1878 nodeiter = list(nodeiter)
1882 nodeiter = list(nodeiter)
1879 if nodeiter:
1883 if nodeiter:
1880 yield changegroup.chunkheader(len(fname))
1884 yield changegroup.chunkheader(len(fname))
1881 yield fname
1885 yield fname
1882 lookup = lookuprevlink_func(filerevlog)
1886 lookup = lookuprevlink_func(filerevlog)
1883 for chnk in filerevlog.group(nodeiter, lookup):
1887 for chnk in filerevlog.group(nodeiter, lookup):
1884 yield chnk
1888 yield chnk
1885
1889
1886 yield changegroup.closechunk()
1890 yield changegroup.closechunk()
1887
1891
1888 if nodes:
1892 if nodes:
1889 self.hook('outgoing', node=hex(nodes[0]), source=source)
1893 self.hook('outgoing', node=hex(nodes[0]), source=source)
1890
1894
1891 return util.chunkbuffer(gengroup())
1895 return util.chunkbuffer(gengroup())
1892
1896
1893 def addchangegroup(self, source, srctype, url, emptyok=False):
1897 def addchangegroup(self, source, srctype, url, emptyok=False):
1894 """add changegroup to repo.
1898 """add changegroup to repo.
1895
1899
1896 return values:
1900 return values:
1897 - nothing changed or no source: 0
1901 - nothing changed or no source: 0
1898 - more heads than before: 1+added heads (2..n)
1902 - more heads than before: 1+added heads (2..n)
1899 - less heads than before: -1-removed heads (-2..-n)
1903 - less heads than before: -1-removed heads (-2..-n)
1900 - number of heads stays the same: 1
1904 - number of heads stays the same: 1
1901 """
1905 """
1902 def csmap(x):
1906 def csmap(x):
1903 self.ui.debug(_("add changeset %s\n") % short(x))
1907 self.ui.debug(_("add changeset %s\n") % short(x))
1904 return cl.count()
1908 return cl.count()
1905
1909
1906 def revmap(x):
1910 def revmap(x):
1907 return cl.rev(x)
1911 return cl.rev(x)
1908
1912
1909 if not source:
1913 if not source:
1910 return 0
1914 return 0
1911
1915
1912 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1916 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1913
1917
1914 changesets = files = revisions = 0
1918 changesets = files = revisions = 0
1915
1919
1916 # write changelog data to temp files so concurrent readers will not see
1920 # write changelog data to temp files so concurrent readers will not see
1917 # inconsistent view
1921 # inconsistent view
1918 cl = self.changelog
1922 cl = self.changelog
1919 cl.delayupdate()
1923 cl.delayupdate()
1920 oldheads = len(cl.heads())
1924 oldheads = len(cl.heads())
1921
1925
1922 tr = self.transaction()
1926 tr = self.transaction()
1923 try:
1927 try:
1924 trp = weakref.proxy(tr)
1928 trp = weakref.proxy(tr)
1925 # pull off the changeset group
1929 # pull off the changeset group
1926 self.ui.status(_("adding changesets\n"))
1930 self.ui.status(_("adding changesets\n"))
1927 cor = cl.count() - 1
1931 cor = cl.count() - 1
1928 chunkiter = changegroup.chunkiter(source)
1932 chunkiter = changegroup.chunkiter(source)
1929 if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
1933 if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
1930 raise util.Abort(_("received changelog group is empty"))
1934 raise util.Abort(_("received changelog group is empty"))
1931 cnr = cl.count() - 1
1935 cnr = cl.count() - 1
1932 changesets = cnr - cor
1936 changesets = cnr - cor
1933
1937
1934 # pull off the manifest group
1938 # pull off the manifest group
1935 self.ui.status(_("adding manifests\n"))
1939 self.ui.status(_("adding manifests\n"))
1936 chunkiter = changegroup.chunkiter(source)
1940 chunkiter = changegroup.chunkiter(source)
1937 # no need to check for empty manifest group here:
1941 # no need to check for empty manifest group here:
1938 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1942 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1939 # no new manifest will be created and the manifest group will
1943 # no new manifest will be created and the manifest group will
1940 # be empty during the pull
1944 # be empty during the pull
1941 self.manifest.addgroup(chunkiter, revmap, trp)
1945 self.manifest.addgroup(chunkiter, revmap, trp)
1942
1946
1943 # process the files
1947 # process the files
1944 self.ui.status(_("adding file changes\n"))
1948 self.ui.status(_("adding file changes\n"))
1945 while 1:
1949 while 1:
1946 f = changegroup.getchunk(source)
1950 f = changegroup.getchunk(source)
1947 if not f:
1951 if not f:
1948 break
1952 break
1949 self.ui.debug(_("adding %s revisions\n") % f)
1953 self.ui.debug(_("adding %s revisions\n") % f)
1950 fl = self.file(f)
1954 fl = self.file(f)
1951 o = fl.count()
1955 o = fl.count()
1952 chunkiter = changegroup.chunkiter(source)
1956 chunkiter = changegroup.chunkiter(source)
1953 if fl.addgroup(chunkiter, revmap, trp) is None:
1957 if fl.addgroup(chunkiter, revmap, trp) is None:
1954 raise util.Abort(_("received file revlog group is empty"))
1958 raise util.Abort(_("received file revlog group is empty"))
1955 revisions += fl.count() - o
1959 revisions += fl.count() - o
1956 files += 1
1960 files += 1
1957
1961
1958 # make changelog see real files again
1962 # make changelog see real files again
1959 cl.finalize(trp)
1963 cl.finalize(trp)
1960
1964
1961 newheads = len(self.changelog.heads())
1965 newheads = len(self.changelog.heads())
1962 heads = ""
1966 heads = ""
1963 if oldheads and newheads != oldheads:
1967 if oldheads and newheads != oldheads:
1964 heads = _(" (%+d heads)") % (newheads - oldheads)
1968 heads = _(" (%+d heads)") % (newheads - oldheads)
1965
1969
1966 self.ui.status(_("added %d changesets"
1970 self.ui.status(_("added %d changesets"
1967 " with %d changes to %d files%s\n")
1971 " with %d changes to %d files%s\n")
1968 % (changesets, revisions, files, heads))
1972 % (changesets, revisions, files, heads))
1969
1973
1970 if changesets > 0:
1974 if changesets > 0:
1971 self.hook('pretxnchangegroup', throw=True,
1975 self.hook('pretxnchangegroup', throw=True,
1972 node=hex(self.changelog.node(cor+1)), source=srctype,
1976 node=hex(self.changelog.node(cor+1)), source=srctype,
1973 url=url)
1977 url=url)
1974
1978
1975 tr.close()
1979 tr.close()
1976 finally:
1980 finally:
1977 del tr
1981 del tr
1978
1982
1979 if changesets > 0:
1983 if changesets > 0:
1980 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1984 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1981 source=srctype, url=url)
1985 source=srctype, url=url)
1982
1986
1983 for i in xrange(cor + 1, cnr + 1):
1987 for i in xrange(cor + 1, cnr + 1):
1984 self.hook("incoming", node=hex(self.changelog.node(i)),
1988 self.hook("incoming", node=hex(self.changelog.node(i)),
1985 source=srctype, url=url)
1989 source=srctype, url=url)
1986
1990
1987 # never return 0 here:
1991 # never return 0 here:
1988 if newheads < oldheads:
1992 if newheads < oldheads:
1989 return newheads - oldheads - 1
1993 return newheads - oldheads - 1
1990 else:
1994 else:
1991 return newheads - oldheads + 1
1995 return newheads - oldheads + 1
1992
1996
1993
1997
1994 def stream_in(self, remote):
1998 def stream_in(self, remote):
1995 fp = remote.stream_out()
1999 fp = remote.stream_out()
1996 l = fp.readline()
2000 l = fp.readline()
1997 try:
2001 try:
1998 resp = int(l)
2002 resp = int(l)
1999 except ValueError:
2003 except ValueError:
2000 raise util.UnexpectedOutput(
2004 raise util.UnexpectedOutput(
2001 _('Unexpected response from remote server:'), l)
2005 _('Unexpected response from remote server:'), l)
2002 if resp == 1:
2006 if resp == 1:
2003 raise util.Abort(_('operation forbidden by server'))
2007 raise util.Abort(_('operation forbidden by server'))
2004 elif resp == 2:
2008 elif resp == 2:
2005 raise util.Abort(_('locking the remote repository failed'))
2009 raise util.Abort(_('locking the remote repository failed'))
2006 elif resp != 0:
2010 elif resp != 0:
2007 raise util.Abort(_('the server sent an unknown error code'))
2011 raise util.Abort(_('the server sent an unknown error code'))
2008 self.ui.status(_('streaming all changes\n'))
2012 self.ui.status(_('streaming all changes\n'))
2009 l = fp.readline()
2013 l = fp.readline()
2010 try:
2014 try:
2011 total_files, total_bytes = map(int, l.split(' ', 1))
2015 total_files, total_bytes = map(int, l.split(' ', 1))
2012 except ValueError, TypeError:
2016 except ValueError, TypeError:
2013 raise util.UnexpectedOutput(
2017 raise util.UnexpectedOutput(
2014 _('Unexpected response from remote server:'), l)
2018 _('Unexpected response from remote server:'), l)
2015 self.ui.status(_('%d files to transfer, %s of data\n') %
2019 self.ui.status(_('%d files to transfer, %s of data\n') %
2016 (total_files, util.bytecount(total_bytes)))
2020 (total_files, util.bytecount(total_bytes)))
2017 start = time.time()
2021 start = time.time()
2018 for i in xrange(total_files):
2022 for i in xrange(total_files):
2019 # XXX doesn't support '\n' or '\r' in filenames
2023 # XXX doesn't support '\n' or '\r' in filenames
2020 l = fp.readline()
2024 l = fp.readline()
2021 try:
2025 try:
2022 name, size = l.split('\0', 1)
2026 name, size = l.split('\0', 1)
2023 size = int(size)
2027 size = int(size)
2024 except ValueError, TypeError:
2028 except ValueError, TypeError:
2025 raise util.UnexpectedOutput(
2029 raise util.UnexpectedOutput(
2026 _('Unexpected response from remote server:'), l)
2030 _('Unexpected response from remote server:'), l)
2027 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2031 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2028 ofp = self.sopener(name, 'w')
2032 ofp = self.sopener(name, 'w')
2029 for chunk in util.filechunkiter(fp, limit=size):
2033 for chunk in util.filechunkiter(fp, limit=size):
2030 ofp.write(chunk)
2034 ofp.write(chunk)
2031 ofp.close()
2035 ofp.close()
2032 elapsed = time.time() - start
2036 elapsed = time.time() - start
2033 if elapsed <= 0:
2037 if elapsed <= 0:
2034 elapsed = 0.001
2038 elapsed = 0.001
2035 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2039 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2036 (util.bytecount(total_bytes), elapsed,
2040 (util.bytecount(total_bytes), elapsed,
2037 util.bytecount(total_bytes / elapsed)))
2041 util.bytecount(total_bytes / elapsed)))
2038 self.invalidate()
2042 self.invalidate()
2039 return len(self.heads()) + 1
2043 return len(self.heads()) + 1
2040
2044
2041 def clone(self, remote, heads=[], stream=False):
2045 def clone(self, remote, heads=[], stream=False):
2042 '''clone remote repository.
2046 '''clone remote repository.
2043
2047
2044 keyword arguments:
2048 keyword arguments:
2045 heads: list of revs to clone (forces use of pull)
2049 heads: list of revs to clone (forces use of pull)
2046 stream: use streaming clone if possible'''
2050 stream: use streaming clone if possible'''
2047
2051
2048 # now, all clients that can request uncompressed clones can
2052 # now, all clients that can request uncompressed clones can
2049 # read repo formats supported by all servers that can serve
2053 # read repo formats supported by all servers that can serve
2050 # them.
2054 # them.
2051
2055
2052 # if revlog format changes, client will have to check version
2056 # if revlog format changes, client will have to check version
2053 # and format flags on "stream" capability, and use
2057 # and format flags on "stream" capability, and use
2054 # uncompressed only if compatible.
2058 # uncompressed only if compatible.
2055
2059
2056 if stream and not heads and remote.capable('stream'):
2060 if stream and not heads and remote.capable('stream'):
2057 return self.stream_in(remote)
2061 return self.stream_in(remote)
2058 return self.pull(remote, heads)
2062 return self.pull(remote, heads)
2059
2063
2060 # used to avoid circular references so destructors work
2064 # used to avoid circular references so destructors work
2061 def aftertrans(files):
2065 def aftertrans(files):
2062 renamefiles = [tuple(t) for t in files]
2066 renamefiles = [tuple(t) for t in files]
2063 def a():
2067 def a():
2064 for src, dest in renamefiles:
2068 for src, dest in renamefiles:
2065 util.rename(src, dest)
2069 util.rename(src, dest)
2066 return a
2070 return a
2067
2071
2068 def instance(ui, path, create):
2072 def instance(ui, path, create):
2069 return localrepository(ui, util.drop_scheme('file', path), create)
2073 return localrepository(ui, util.drop_scheme('file', path), create)
2070
2074
2071 def islocal(path):
2075 def islocal(path):
2072 return True
2076 return True
@@ -1,65 +1,101 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 cat > unix2dos.py <<EOF
3 cat > unix2dos.py <<EOF
4 import sys
4 import sys
5
5
6 for path in sys.argv[1:]:
6 for path in sys.argv[1:]:
7 data = file(path, 'rb').read()
7 data = file(path, 'rb').read()
8 data = data.replace('\n', '\r\n')
8 data = data.replace('\n', '\r\n')
9 file(path, 'wb').write(data)
9 file(path, 'wb').write(data)
10 EOF
10 EOF
11
11
12 cat > print.py <<EOF
13 import sys
14 print(sys.stdin.read().replace('\n', '<LF>').replace('\r', '<CR>').replace('\0', '<NUL>'))
15 EOF
16
12 hg init
17 hg init
13 echo '[hooks]' >> .hg/hgrc
18 echo '[hooks]' >> .hg/hgrc
14 echo 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc
19 echo 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc
15 echo 'pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc
20 echo 'pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc
16 cat .hg/hgrc
21 cat .hg/hgrc
17 echo
22 echo
18
23
19 echo hello > f
24 echo hello > f
20 hg add f
25 hg add f
21 hg ci -m 1 -d'0 0'
26 hg ci -m 1 -d'0 0'
22 echo
27 echo
23
28
24 python unix2dos.py f
29 python unix2dos.py f
25 hg ci -m 2 -d'0 0'
30 hg ci -m 2 -d'0 0'
26 hg revert -a
31 hg revert -a
27 echo
32 echo
28
33
29 mkdir d
34 mkdir d
30 echo hello > d/f2
35 echo hello > d/f2
31 python unix2dos.py d/f2
36 python unix2dos.py d/f2
32 hg add d/f2
37 hg add d/f2
33 hg ci -m 3 -d'0 0'
38 hg ci -m 3 -d'0 0'
34 hg revert -a
39 hg revert -a
35 rm d/f2
40 rm d/f2
36 echo
41 echo
37
42
38 hg rem f
43 hg rem f
39 hg ci -m 4 -d'0 0'
44 hg ci -m 4 -d'0 0'
40 echo
45 echo
41
46
42 python -c 'file("bin", "wb").write("hello\x00\x0D\x0A")'
47 python -c 'file("bin", "wb").write("hello\x00\x0D\x0A")'
43 hg add bin
48 hg add bin
44 hg ci -m 5 -d'0 0'
49 hg ci -m 5 -d'0 0'
45 hg log -v
50 hg log -v
46 echo
51 echo
47
52
48 hg clone . dupe
53 hg clone . dupe
49 echo
54 echo
50 for x in a b c d; do echo content > dupe/$x; done
55 for x in a b c d; do echo content > dupe/$x; done
51 hg -R dupe add
56 hg -R dupe add
52 python unix2dos.py dupe/b dupe/c dupe/d
57 python unix2dos.py dupe/b dupe/c dupe/d
53 hg -R dupe ci -m a -d'0 0' dupe/a
58 hg -R dupe ci -m a -d'0 0' dupe/a
54 hg -R dupe ci -m b/c -d'0 0' dupe/[bc]
59 hg -R dupe ci -m b/c -d'0 0' dupe/[bc]
55 hg -R dupe ci -m d -d'0 0' dupe/d
60 hg -R dupe ci -m d -d'0 0' dupe/d
56 hg -R dupe log -v
61 hg -R dupe log -v
57 echo
62 echo
58
63
59 hg pull dupe
64 hg pull dupe
60 echo
65 echo
61
66
62 hg log -v
67 hg log -v
63 echo
68 echo
64
69
65 # XXX missing tests for encode/decode hooks
70 rm .hg/hgrc
71 (echo some; echo text) > f3
72 python -c 'file("f4.bat", "wb").write("rem empty\x0D\x0A")'
73 hg add f3 f4.bat
74 hg ci -m 6 -d'0 0'
75
76 python print.py < bin
77 python print.py < f3
78 python print.py < f4.bat
79 echo
80
81 echo '[extensions]' >> .hg/hgrc
82 echo 'win32text = ' >> .hg/hgrc
83 echo '[decode]' >> .hg/hgrc
84 echo '** = cleverdecode:' >> .hg/hgrc
85 echo '[encode]' >> .hg/hgrc
86 echo '** = cleverencode:' >> .hg/hgrc
87 cat .hg/hgrc
88 echo
89
90 rm f3 f4.bat bin
91 hg co 2>&1 | python -c 'import sys, os; sys.stdout.write(sys.stdin.read().replace(os.getcwd(), "...."))'
92 python print.py < bin
93 python print.py < f3
94 python print.py < f4.bat
95 echo
96
97 python -c 'file("f5.sh", "wb").write("# empty\x0D\x0A")'
98 hg add f5.sh
99 hg ci -m 7 -d'0 0'
100 python print.py < f5.sh
101 hg cat f5.sh | python print.py
@@ -1,157 +1,179 b''
1 [hooks]
1 [hooks]
2 pretxncommit.crlf = python:hgext.win32text.forbidcrlf
2 pretxncommit.crlf = python:hgext.win32text.forbidcrlf
3 pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
3 pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
4
4
5
5
6 Attempt to commit or push text file(s) using CRLF line endings
6 Attempt to commit or push text file(s) using CRLF line endings
7 in b1aa5cde7ff4: f
7 in b1aa5cde7ff4: f
8 transaction abort!
8 transaction abort!
9 rollback completed
9 rollback completed
10 abort: pretxncommit.crlf hook failed
10 abort: pretxncommit.crlf hook failed
11 reverting f
11 reverting f
12
12
13 Attempt to commit or push text file(s) using CRLF line endings
13 Attempt to commit or push text file(s) using CRLF line endings
14 in 88b17af74937: d/f2
14 in 88b17af74937: d/f2
15 transaction abort!
15 transaction abort!
16 rollback completed
16 rollback completed
17 abort: pretxncommit.crlf hook failed
17 abort: pretxncommit.crlf hook failed
18 forgetting d/f2
18 forgetting d/f2
19
19
20
20
21 changeset: 2:b67b2dae057a
21 changeset: 2:b67b2dae057a
22 tag: tip
22 tag: tip
23 user: test
23 user: test
24 date: Thu Jan 01 00:00:00 1970 +0000
24 date: Thu Jan 01 00:00:00 1970 +0000
25 files: bin
25 files: bin
26 description:
26 description:
27 5
27 5
28
28
29
29
30 changeset: 1:c72a7d1d0907
30 changeset: 1:c72a7d1d0907
31 user: test
31 user: test
32 date: Thu Jan 01 00:00:00 1970 +0000
32 date: Thu Jan 01 00:00:00 1970 +0000
33 files: f
33 files: f
34 description:
34 description:
35 4
35 4
36
36
37
37
38 changeset: 0:fcf06d5c4e1d
38 changeset: 0:fcf06d5c4e1d
39 user: test
39 user: test
40 date: Thu Jan 01 00:00:00 1970 +0000
40 date: Thu Jan 01 00:00:00 1970 +0000
41 files: f
41 files: f
42 description:
42 description:
43 1
43 1
44
44
45
45
46
46
47 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
47 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
48
48
49 adding dupe/a
49 adding dupe/a
50 adding dupe/b
50 adding dupe/b
51 adding dupe/c
51 adding dupe/c
52 adding dupe/d
52 adding dupe/d
53 changeset: 5:6e8a7629ff5b
53 changeset: 5:6e8a7629ff5b
54 tag: tip
54 tag: tip
55 user: test
55 user: test
56 date: Thu Jan 01 00:00:00 1970 +0000
56 date: Thu Jan 01 00:00:00 1970 +0000
57 files: d
57 files: d
58 description:
58 description:
59 d
59 d
60
60
61
61
62 changeset: 4:ac30a42ce8bc
62 changeset: 4:ac30a42ce8bc
63 user: test
63 user: test
64 date: Thu Jan 01 00:00:00 1970 +0000
64 date: Thu Jan 01 00:00:00 1970 +0000
65 files: b c
65 files: b c
66 description:
66 description:
67 b/c
67 b/c
68
68
69
69
70 changeset: 3:a73b85ef1fb7
70 changeset: 3:a73b85ef1fb7
71 user: test
71 user: test
72 date: Thu Jan 01 00:00:00 1970 +0000
72 date: Thu Jan 01 00:00:00 1970 +0000
73 files: a
73 files: a
74 description:
74 description:
75 a
75 a
76
76
77
77
78 changeset: 2:b67b2dae057a
78 changeset: 2:b67b2dae057a
79 user: test
79 user: test
80 date: Thu Jan 01 00:00:00 1970 +0000
80 date: Thu Jan 01 00:00:00 1970 +0000
81 files: bin
81 files: bin
82 description:
82 description:
83 5
83 5
84
84
85
85
86 changeset: 1:c72a7d1d0907
86 changeset: 1:c72a7d1d0907
87 user: test
87 user: test
88 date: Thu Jan 01 00:00:00 1970 +0000
88 date: Thu Jan 01 00:00:00 1970 +0000
89 files: f
89 files: f
90 description:
90 description:
91 4
91 4
92
92
93
93
94 changeset: 0:fcf06d5c4e1d
94 changeset: 0:fcf06d5c4e1d
95 user: test
95 user: test
96 date: Thu Jan 01 00:00:00 1970 +0000
96 date: Thu Jan 01 00:00:00 1970 +0000
97 files: f
97 files: f
98 description:
98 description:
99 1
99 1
100
100
101
101
102
102
103 pulling from dupe
103 pulling from dupe
104 searching for changes
104 searching for changes
105 adding changesets
105 adding changesets
106 adding manifests
106 adding manifests
107 adding file changes
107 adding file changes
108 added 3 changesets with 4 changes to 4 files
108 added 3 changesets with 4 changes to 4 files
109 Attempt to commit or push text file(s) using CRLF line endings
109 Attempt to commit or push text file(s) using CRLF line endings
110 in ac30a42ce8bc: b
110 in ac30a42ce8bc: b
111 in ac30a42ce8bc: c
111 in ac30a42ce8bc: c
112 in 6e8a7629ff5b: d
112 in 6e8a7629ff5b: d
113
113
114 To prevent this mistake in your local repository,
114 To prevent this mistake in your local repository,
115 add to Mercurial.ini or .hg/hgrc:
115 add to Mercurial.ini or .hg/hgrc:
116
116
117 [hooks]
117 [hooks]
118 pretxncommit.crlf = python:hgext.win32text.forbidcrlf
118 pretxncommit.crlf = python:hgext.win32text.forbidcrlf
119
119
120 and also consider adding:
120 and also consider adding:
121
121
122 [extensions]
122 [extensions]
123 hgext.win32text =
123 hgext.win32text =
124 [encode]
124 [encode]
125 ** = cleverencode:
125 ** = cleverencode:
126 [decode]
126 [decode]
127 ** = cleverdecode:
127 ** = cleverdecode:
128 transaction abort!
128 transaction abort!
129 rollback completed
129 rollback completed
130 abort: pretxnchangegroup.crlf hook failed
130 abort: pretxnchangegroup.crlf hook failed
131
131
132 changeset: 2:b67b2dae057a
132 changeset: 2:b67b2dae057a
133 tag: tip
133 tag: tip
134 user: test
134 user: test
135 date: Thu Jan 01 00:00:00 1970 +0000
135 date: Thu Jan 01 00:00:00 1970 +0000
136 files: bin
136 files: bin
137 description:
137 description:
138 5
138 5
139
139
140
140
141 changeset: 1:c72a7d1d0907
141 changeset: 1:c72a7d1d0907
142 user: test
142 user: test
143 date: Thu Jan 01 00:00:00 1970 +0000
143 date: Thu Jan 01 00:00:00 1970 +0000
144 files: f
144 files: f
145 description:
145 description:
146 4
146 4
147
147
148
148
149 changeset: 0:fcf06d5c4e1d
149 changeset: 0:fcf06d5c4e1d
150 user: test
150 user: test
151 date: Thu Jan 01 00:00:00 1970 +0000
151 date: Thu Jan 01 00:00:00 1970 +0000
152 files: f
152 files: f
153 description:
153 description:
154 1
154 1
155
155
156
156
157
157
158 hello<NUL><CR><LF>
159 some<LF>text<LF>
160 rem empty<CR><LF>
161
162 [extensions]
163 win32text =
164 [decode]
165 ** = cleverdecode:
166 [encode]
167 ** = cleverencode:
168
169 WARNING: f4.bat already has CRLF line endings
170 and does not need EOL conversion by the win32text plugin.
171 Before your next commit, please reconsider your encode/decode settings in
172 Mercurial.ini or ..../.hg/hgrc.
173 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
174 hello<NUL><CR><LF>
175 some<CR><LF>text<CR><LF>
176 rem empty<CR><LF>
177
178 # empty<CR><LF>
179 # empty<LF>
General Comments 0
You need to be logged in to leave comments. Login now