Show More
@@ -1,928 +1,928 b'' | |||||
1 | # Subversion 1.4/1.5 Python API backend |
|
1 | # Subversion 1.4/1.5 Python API backend | |
2 | # |
|
2 | # | |
3 | # Copyright(C) 2007 Daniel Holth et al |
|
3 | # Copyright(C) 2007 Daniel Holth et al | |
4 | # |
|
4 | # | |
5 | # Configuration options: |
|
5 | # Configuration options: | |
6 | # |
|
6 | # | |
7 | # convert.svn.trunk |
|
7 | # convert.svn.trunk | |
8 | # Relative path to the trunk (default: "trunk") |
|
8 | # Relative path to the trunk (default: "trunk") | |
9 | # convert.svn.branches |
|
9 | # convert.svn.branches | |
10 | # Relative path to tree of branches (default: "branches") |
|
10 | # Relative path to tree of branches (default: "branches") | |
11 | # convert.svn.tags |
|
11 | # convert.svn.tags | |
12 | # Relative path to tree of tags (default: "tags") |
|
12 | # Relative path to tree of tags (default: "tags") | |
13 | # |
|
13 | # | |
14 | # Set these in a hgrc, or on the command line as follows: |
|
14 | # Set these in a hgrc, or on the command line as follows: | |
15 | # |
|
15 | # | |
16 | # hg convert --config convert.svn.trunk=wackoname [...] |
|
16 | # hg convert --config convert.svn.trunk=wackoname [...] | |
17 |
|
17 | |||
18 | import locale |
|
18 | import locale | |
19 | import os |
|
19 | import os | |
20 | import re |
|
20 | import re | |
21 | import sys |
|
21 | import sys | |
22 | import cPickle as pickle |
|
22 | import cPickle as pickle | |
23 | import tempfile |
|
23 | import tempfile | |
24 |
|
24 | |||
25 | from mercurial import strutil, util |
|
25 | from mercurial import strutil, util | |
26 | from mercurial.i18n import _ |
|
26 | from mercurial.i18n import _ | |
27 |
|
27 | |||
28 | # Subversion stuff. Works best with very recent Python SVN bindings |
|
28 | # Subversion stuff. Works best with very recent Python SVN bindings | |
29 | # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing |
|
29 | # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing | |
30 | # these bindings. |
|
30 | # these bindings. | |
31 |
|
31 | |||
32 | from cStringIO import StringIO |
|
32 | from cStringIO import StringIO | |
33 |
|
33 | |||
34 | from common import NoRepo, commit, converter_source, encodeargs, decodeargs |
|
34 | from common import NoRepo, commit, converter_source, encodeargs, decodeargs | |
35 | from common import commandline, converter_sink, mapfile |
|
35 | from common import commandline, converter_sink, mapfile | |
36 |
|
36 | |||
37 | try: |
|
37 | try: | |
38 | from svn.core import SubversionException, Pool |
|
38 | from svn.core import SubversionException, Pool | |
39 | import svn |
|
39 | import svn | |
40 | import svn.client |
|
40 | import svn.client | |
41 | import svn.core |
|
41 | import svn.core | |
42 | import svn.ra |
|
42 | import svn.ra | |
43 | import svn.delta |
|
43 | import svn.delta | |
44 | import transport |
|
44 | import transport | |
45 | except ImportError: |
|
45 | except ImportError: | |
46 | pass |
|
46 | pass | |
47 |
|
47 | |||
48 | def geturl(path): |
|
48 | def geturl(path): | |
49 | try: |
|
49 | try: | |
50 | return svn.client.url_from_path(svn.core.svn_path_canonicalize(path)) |
|
50 | return svn.client.url_from_path(svn.core.svn_path_canonicalize(path)) | |
51 | except SubversionException: |
|
51 | except SubversionException: | |
52 | pass |
|
52 | pass | |
53 | if os.path.isdir(path): |
|
53 | if os.path.isdir(path): | |
54 | path = os.path.normpath(os.path.abspath(path)) |
|
54 | path = os.path.normpath(os.path.abspath(path)) | |
55 | if os.name == 'nt': |
|
55 | if os.name == 'nt': | |
56 |
path = '/' + path |
|
56 | path = '/' + util.normpath(path) | |
57 | return 'file://%s' % path |
|
57 | return 'file://%s' % path | |
58 | return path |
|
58 | return path | |
59 |
|
59 | |||
60 | def optrev(number): |
|
60 | def optrev(number): | |
61 | optrev = svn.core.svn_opt_revision_t() |
|
61 | optrev = svn.core.svn_opt_revision_t() | |
62 | optrev.kind = svn.core.svn_opt_revision_number |
|
62 | optrev.kind = svn.core.svn_opt_revision_number | |
63 | optrev.value.number = number |
|
63 | optrev.value.number = number | |
64 | return optrev |
|
64 | return optrev | |
65 |
|
65 | |||
66 | class changedpath(object): |
|
66 | class changedpath(object): | |
67 | def __init__(self, p): |
|
67 | def __init__(self, p): | |
68 | self.copyfrom_path = p.copyfrom_path |
|
68 | self.copyfrom_path = p.copyfrom_path | |
69 | self.copyfrom_rev = p.copyfrom_rev |
|
69 | self.copyfrom_rev = p.copyfrom_rev | |
70 | self.action = p.action |
|
70 | self.action = p.action | |
71 |
|
71 | |||
72 | def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True, |
|
72 | def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True, | |
73 | strict_node_history=False): |
|
73 | strict_node_history=False): | |
74 | protocol = -1 |
|
74 | protocol = -1 | |
75 | def receiver(orig_paths, revnum, author, date, message, pool): |
|
75 | def receiver(orig_paths, revnum, author, date, message, pool): | |
76 | if orig_paths is not None: |
|
76 | if orig_paths is not None: | |
77 | for k, v in orig_paths.iteritems(): |
|
77 | for k, v in orig_paths.iteritems(): | |
78 | orig_paths[k] = changedpath(v) |
|
78 | orig_paths[k] = changedpath(v) | |
79 | pickle.dump((orig_paths, revnum, author, date, message), |
|
79 | pickle.dump((orig_paths, revnum, author, date, message), | |
80 | fp, protocol) |
|
80 | fp, protocol) | |
81 |
|
81 | |||
82 | try: |
|
82 | try: | |
83 | # Use an ra of our own so that our parent can consume |
|
83 | # Use an ra of our own so that our parent can consume | |
84 | # our results without confusing the server. |
|
84 | # our results without confusing the server. | |
85 | t = transport.SvnRaTransport(url=url) |
|
85 | t = transport.SvnRaTransport(url=url) | |
86 | svn.ra.get_log(t.ra, paths, start, end, limit, |
|
86 | svn.ra.get_log(t.ra, paths, start, end, limit, | |
87 | discover_changed_paths, |
|
87 | discover_changed_paths, | |
88 | strict_node_history, |
|
88 | strict_node_history, | |
89 | receiver) |
|
89 | receiver) | |
90 | except SubversionException, (inst, num): |
|
90 | except SubversionException, (inst, num): | |
91 | pickle.dump(num, fp, protocol) |
|
91 | pickle.dump(num, fp, protocol) | |
92 | else: |
|
92 | else: | |
93 | pickle.dump(None, fp, protocol) |
|
93 | pickle.dump(None, fp, protocol) | |
94 | fp.close() |
|
94 | fp.close() | |
95 |
|
95 | |||
96 | def debugsvnlog(ui, **opts): |
|
96 | def debugsvnlog(ui, **opts): | |
97 | """Fetch SVN log in a subprocess and channel them back to parent to |
|
97 | """Fetch SVN log in a subprocess and channel them back to parent to | |
98 | avoid memory collection issues. |
|
98 | avoid memory collection issues. | |
99 | """ |
|
99 | """ | |
100 | util.set_binary(sys.stdin) |
|
100 | util.set_binary(sys.stdin) | |
101 | util.set_binary(sys.stdout) |
|
101 | util.set_binary(sys.stdout) | |
102 | args = decodeargs(sys.stdin.read()) |
|
102 | args = decodeargs(sys.stdin.read()) | |
103 | get_log_child(sys.stdout, *args) |
|
103 | get_log_child(sys.stdout, *args) | |
104 |
|
104 | |||
105 | # SVN conversion code stolen from bzr-svn and tailor |
|
105 | # SVN conversion code stolen from bzr-svn and tailor | |
106 | class svn_source(converter_source): |
|
106 | class svn_source(converter_source): | |
107 | def __init__(self, ui, url, rev=None): |
|
107 | def __init__(self, ui, url, rev=None): | |
108 | super(svn_source, self).__init__(ui, url, rev=rev) |
|
108 | super(svn_source, self).__init__(ui, url, rev=rev) | |
109 |
|
109 | |||
110 | try: |
|
110 | try: | |
111 | SubversionException |
|
111 | SubversionException | |
112 | except NameError: |
|
112 | except NameError: | |
113 | raise NoRepo('Subversion python bindings could not be loaded') |
|
113 | raise NoRepo('Subversion python bindings could not be loaded') | |
114 |
|
114 | |||
115 | self.encoding = locale.getpreferredencoding() |
|
115 | self.encoding = locale.getpreferredencoding() | |
116 | self.lastrevs = {} |
|
116 | self.lastrevs = {} | |
117 |
|
117 | |||
118 | latest = None |
|
118 | latest = None | |
119 | try: |
|
119 | try: | |
120 | # Support file://path@rev syntax. Useful e.g. to convert |
|
120 | # Support file://path@rev syntax. Useful e.g. to convert | |
121 | # deleted branches. |
|
121 | # deleted branches. | |
122 | at = url.rfind('@') |
|
122 | at = url.rfind('@') | |
123 | if at >= 0: |
|
123 | if at >= 0: | |
124 | latest = int(url[at+1:]) |
|
124 | latest = int(url[at+1:]) | |
125 | url = url[:at] |
|
125 | url = url[:at] | |
126 | except ValueError, e: |
|
126 | except ValueError, e: | |
127 | pass |
|
127 | pass | |
128 | self.url = geturl(url) |
|
128 | self.url = geturl(url) | |
129 | self.encoding = 'UTF-8' # Subversion is always nominal UTF-8 |
|
129 | self.encoding = 'UTF-8' # Subversion is always nominal UTF-8 | |
130 | try: |
|
130 | try: | |
131 | self.transport = transport.SvnRaTransport(url=self.url) |
|
131 | self.transport = transport.SvnRaTransport(url=self.url) | |
132 | self.ra = self.transport.ra |
|
132 | self.ra = self.transport.ra | |
133 | self.ctx = self.transport.client |
|
133 | self.ctx = self.transport.client | |
134 | self.base = svn.ra.get_repos_root(self.ra) |
|
134 | self.base = svn.ra.get_repos_root(self.ra) | |
135 | self.module = self.url[len(self.base):] |
|
135 | self.module = self.url[len(self.base):] | |
136 | self.modulemap = {} # revision, module |
|
136 | self.modulemap = {} # revision, module | |
137 | self.commits = {} |
|
137 | self.commits = {} | |
138 | self.paths = {} |
|
138 | self.paths = {} | |
139 | self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding) |
|
139 | self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding) | |
140 | except SubversionException, e: |
|
140 | except SubversionException, e: | |
141 | ui.print_exc() |
|
141 | ui.print_exc() | |
142 | raise NoRepo("%s does not look like a Subversion repo" % self.url) |
|
142 | raise NoRepo("%s does not look like a Subversion repo" % self.url) | |
143 |
|
143 | |||
144 | if rev: |
|
144 | if rev: | |
145 | try: |
|
145 | try: | |
146 | latest = int(rev) |
|
146 | latest = int(rev) | |
147 | except ValueError: |
|
147 | except ValueError: | |
148 | raise util.Abort('svn: revision %s is not an integer' % rev) |
|
148 | raise util.Abort('svn: revision %s is not an integer' % rev) | |
149 |
|
149 | |||
150 | try: |
|
150 | try: | |
151 | self.get_blacklist() |
|
151 | self.get_blacklist() | |
152 | except IOError, e: |
|
152 | except IOError, e: | |
153 | pass |
|
153 | pass | |
154 |
|
154 | |||
155 | self.last_changed = self.latest(self.module, latest) |
|
155 | self.last_changed = self.latest(self.module, latest) | |
156 |
|
156 | |||
157 | self.head = self.revid(self.last_changed) |
|
157 | self.head = self.revid(self.last_changed) | |
158 | self._changescache = None |
|
158 | self._changescache = None | |
159 |
|
159 | |||
160 | if os.path.exists(os.path.join(url, '.svn/entries')): |
|
160 | if os.path.exists(os.path.join(url, '.svn/entries')): | |
161 | self.wc = url |
|
161 | self.wc = url | |
162 | else: |
|
162 | else: | |
163 | self.wc = None |
|
163 | self.wc = None | |
164 | self.convertfp = None |
|
164 | self.convertfp = None | |
165 |
|
165 | |||
166 | def setrevmap(self, revmap): |
|
166 | def setrevmap(self, revmap): | |
167 | lastrevs = {} |
|
167 | lastrevs = {} | |
168 | for revid in revmap.iterkeys(): |
|
168 | for revid in revmap.iterkeys(): | |
169 | uuid, module, revnum = self.revsplit(revid) |
|
169 | uuid, module, revnum = self.revsplit(revid) | |
170 | lastrevnum = lastrevs.setdefault(module, revnum) |
|
170 | lastrevnum = lastrevs.setdefault(module, revnum) | |
171 | if revnum > lastrevnum: |
|
171 | if revnum > lastrevnum: | |
172 | lastrevs[module] = revnum |
|
172 | lastrevs[module] = revnum | |
173 | self.lastrevs = lastrevs |
|
173 | self.lastrevs = lastrevs | |
174 |
|
174 | |||
175 | def exists(self, path, optrev): |
|
175 | def exists(self, path, optrev): | |
176 | try: |
|
176 | try: | |
177 | svn.client.ls(self.url.rstrip('/') + '/' + path, |
|
177 | svn.client.ls(self.url.rstrip('/') + '/' + path, | |
178 | optrev, False, self.ctx) |
|
178 | optrev, False, self.ctx) | |
179 | return True |
|
179 | return True | |
180 | except SubversionException, err: |
|
180 | except SubversionException, err: | |
181 | return False |
|
181 | return False | |
182 |
|
182 | |||
183 | def getheads(self): |
|
183 | def getheads(self): | |
184 | # detect standard /branches, /tags, /trunk layout |
|
184 | # detect standard /branches, /tags, /trunk layout | |
185 | rev = optrev(self.last_changed) |
|
185 | rev = optrev(self.last_changed) | |
186 | rpath = self.url.strip('/') |
|
186 | rpath = self.url.strip('/') | |
187 | cfgtrunk = self.ui.config('convert', 'svn.trunk') |
|
187 | cfgtrunk = self.ui.config('convert', 'svn.trunk') | |
188 | cfgbranches = self.ui.config('convert', 'svn.branches') |
|
188 | cfgbranches = self.ui.config('convert', 'svn.branches') | |
189 | cfgtags = self.ui.config('convert', 'svn.tags') |
|
189 | cfgtags = self.ui.config('convert', 'svn.tags') | |
190 | trunk = (cfgtrunk or 'trunk').strip('/') |
|
190 | trunk = (cfgtrunk or 'trunk').strip('/') | |
191 | branches = (cfgbranches or 'branches').strip('/') |
|
191 | branches = (cfgbranches or 'branches').strip('/') | |
192 | tags = (cfgtags or 'tags').strip('/') |
|
192 | tags = (cfgtags or 'tags').strip('/') | |
193 | if self.exists(trunk, rev) and self.exists(branches, rev) and self.exists(tags, rev): |
|
193 | if self.exists(trunk, rev) and self.exists(branches, rev) and self.exists(tags, rev): | |
194 | self.ui.note('found trunk at %r, branches at %r and tags at %r\n' % |
|
194 | self.ui.note('found trunk at %r, branches at %r and tags at %r\n' % | |
195 | (trunk, branches, tags)) |
|
195 | (trunk, branches, tags)) | |
196 | oldmodule = self.module |
|
196 | oldmodule = self.module | |
197 | self.module += '/' + trunk |
|
197 | self.module += '/' + trunk | |
198 | lt = self.latest(self.module, self.last_changed) |
|
198 | lt = self.latest(self.module, self.last_changed) | |
199 | self.head = self.revid(lt) |
|
199 | self.head = self.revid(lt) | |
200 | self.heads = [self.head] |
|
200 | self.heads = [self.head] | |
201 | branchnames = svn.client.ls(rpath + '/' + branches, rev, False, |
|
201 | branchnames = svn.client.ls(rpath + '/' + branches, rev, False, | |
202 | self.ctx) |
|
202 | self.ctx) | |
203 | for branch in branchnames.keys(): |
|
203 | for branch in branchnames.keys(): | |
204 | if oldmodule: |
|
204 | if oldmodule: | |
205 | module = oldmodule + '/' + branches + '/' + branch |
|
205 | module = oldmodule + '/' + branches + '/' + branch | |
206 | else: |
|
206 | else: | |
207 | module = '/' + branches + '/' + branch |
|
207 | module = '/' + branches + '/' + branch | |
208 | brevnum = self.latest(module, self.last_changed) |
|
208 | brevnum = self.latest(module, self.last_changed) | |
209 | brev = self.revid(brevnum, module) |
|
209 | brev = self.revid(brevnum, module) | |
210 | self.ui.note('found branch %s at %d\n' % (branch, brevnum)) |
|
210 | self.ui.note('found branch %s at %d\n' % (branch, brevnum)) | |
211 | self.heads.append(brev) |
|
211 | self.heads.append(brev) | |
212 |
|
212 | |||
213 | if oldmodule: |
|
213 | if oldmodule: | |
214 | self.tags = '%s/%s' % (oldmodule, tags) |
|
214 | self.tags = '%s/%s' % (oldmodule, tags) | |
215 | else: |
|
215 | else: | |
216 | self.tags = '/%s' % tags |
|
216 | self.tags = '/%s' % tags | |
217 |
|
217 | |||
218 | elif cfgtrunk or cfgbranches or cfgtags: |
|
218 | elif cfgtrunk or cfgbranches or cfgtags: | |
219 | raise util.Abort('trunk/branch/tags layout expected, but not found') |
|
219 | raise util.Abort('trunk/branch/tags layout expected, but not found') | |
220 | else: |
|
220 | else: | |
221 | self.ui.note('working with one branch\n') |
|
221 | self.ui.note('working with one branch\n') | |
222 | self.heads = [self.head] |
|
222 | self.heads = [self.head] | |
223 | self.tags = tags |
|
223 | self.tags = tags | |
224 | return self.heads |
|
224 | return self.heads | |
225 |
|
225 | |||
226 | def getfile(self, file, rev): |
|
226 | def getfile(self, file, rev): | |
227 | data, mode = self._getfile(file, rev) |
|
227 | data, mode = self._getfile(file, rev) | |
228 | self.modecache[(file, rev)] = mode |
|
228 | self.modecache[(file, rev)] = mode | |
229 | return data |
|
229 | return data | |
230 |
|
230 | |||
231 | def getmode(self, file, rev): |
|
231 | def getmode(self, file, rev): | |
232 | return self.modecache[(file, rev)] |
|
232 | return self.modecache[(file, rev)] | |
233 |
|
233 | |||
234 | def getchanges(self, rev): |
|
234 | def getchanges(self, rev): | |
235 | if self._changescache and self._changescache[0] == rev: |
|
235 | if self._changescache and self._changescache[0] == rev: | |
236 | return self._changescache[1] |
|
236 | return self._changescache[1] | |
237 | self._changescache = None |
|
237 | self._changescache = None | |
238 | self.modecache = {} |
|
238 | self.modecache = {} | |
239 | (paths, parents) = self.paths[rev] |
|
239 | (paths, parents) = self.paths[rev] | |
240 | files, copies = self.expandpaths(rev, paths, parents) |
|
240 | files, copies = self.expandpaths(rev, paths, parents) | |
241 | files.sort() |
|
241 | files.sort() | |
242 | files = zip(files, [rev] * len(files)) |
|
242 | files = zip(files, [rev] * len(files)) | |
243 |
|
243 | |||
244 | # caller caches the result, so free it here to release memory |
|
244 | # caller caches the result, so free it here to release memory | |
245 | del self.paths[rev] |
|
245 | del self.paths[rev] | |
246 | return (files, copies) |
|
246 | return (files, copies) | |
247 |
|
247 | |||
248 | def getchangedfiles(self, rev, i): |
|
248 | def getchangedfiles(self, rev, i): | |
249 | changes = self.getchanges(rev) |
|
249 | changes = self.getchanges(rev) | |
250 | self._changescache = (rev, changes) |
|
250 | self._changescache = (rev, changes) | |
251 | return [f[0] for f in changes[0]] |
|
251 | return [f[0] for f in changes[0]] | |
252 |
|
252 | |||
253 | def getcommit(self, rev): |
|
253 | def getcommit(self, rev): | |
254 | if rev not in self.commits: |
|
254 | if rev not in self.commits: | |
255 | uuid, module, revnum = self.revsplit(rev) |
|
255 | uuid, module, revnum = self.revsplit(rev) | |
256 | self.module = module |
|
256 | self.module = module | |
257 | self.reparent(module) |
|
257 | self.reparent(module) | |
258 | stop = self.lastrevs.get(module, 0) |
|
258 | stop = self.lastrevs.get(module, 0) | |
259 | self._fetch_revisions(from_revnum=revnum, to_revnum=stop) |
|
259 | self._fetch_revisions(from_revnum=revnum, to_revnum=stop) | |
260 | commit = self.commits[rev] |
|
260 | commit = self.commits[rev] | |
261 | # caller caches the result, so free it here to release memory |
|
261 | # caller caches the result, so free it here to release memory | |
262 | del self.commits[rev] |
|
262 | del self.commits[rev] | |
263 | return commit |
|
263 | return commit | |
264 |
|
264 | |||
265 | def get_log(self, paths, start, end, limit=0, discover_changed_paths=True, |
|
265 | def get_log(self, paths, start, end, limit=0, discover_changed_paths=True, | |
266 | strict_node_history=False): |
|
266 | strict_node_history=False): | |
267 |
|
267 | |||
268 | def parent(fp): |
|
268 | def parent(fp): | |
269 | while True: |
|
269 | while True: | |
270 | entry = pickle.load(fp) |
|
270 | entry = pickle.load(fp) | |
271 | try: |
|
271 | try: | |
272 | orig_paths, revnum, author, date, message = entry |
|
272 | orig_paths, revnum, author, date, message = entry | |
273 | except: |
|
273 | except: | |
274 | if entry is None: |
|
274 | if entry is None: | |
275 | break |
|
275 | break | |
276 | raise SubversionException("child raised exception", entry) |
|
276 | raise SubversionException("child raised exception", entry) | |
277 | yield entry |
|
277 | yield entry | |
278 |
|
278 | |||
279 | args = [self.url, paths, start, end, limit, discover_changed_paths, |
|
279 | args = [self.url, paths, start, end, limit, discover_changed_paths, | |
280 | strict_node_history] |
|
280 | strict_node_history] | |
281 | arg = encodeargs(args) |
|
281 | arg = encodeargs(args) | |
282 | hgexe = util.hgexecutable() |
|
282 | hgexe = util.hgexecutable() | |
283 | cmd = '%s debugsvnlog' % util.shellquote(hgexe) |
|
283 | cmd = '%s debugsvnlog' % util.shellquote(hgexe) | |
284 | stdin, stdout = os.popen2(cmd, 'b') |
|
284 | stdin, stdout = os.popen2(cmd, 'b') | |
285 |
|
285 | |||
286 | stdin.write(arg) |
|
286 | stdin.write(arg) | |
287 | stdin.close() |
|
287 | stdin.close() | |
288 |
|
288 | |||
289 | for p in parent(stdout): |
|
289 | for p in parent(stdout): | |
290 | yield p |
|
290 | yield p | |
291 |
|
291 | |||
292 | def gettags(self): |
|
292 | def gettags(self): | |
293 | tags = {} |
|
293 | tags = {} | |
294 | start = self.revnum(self.head) |
|
294 | start = self.revnum(self.head) | |
295 | try: |
|
295 | try: | |
296 | for entry in self.get_log([self.tags], 0, start): |
|
296 | for entry in self.get_log([self.tags], 0, start): | |
297 | orig_paths, revnum, author, date, message = entry |
|
297 | orig_paths, revnum, author, date, message = entry | |
298 | for path in orig_paths: |
|
298 | for path in orig_paths: | |
299 | if not path.startswith(self.tags+'/'): |
|
299 | if not path.startswith(self.tags+'/'): | |
300 | continue |
|
300 | continue | |
301 | ent = orig_paths[path] |
|
301 | ent = orig_paths[path] | |
302 | source = ent.copyfrom_path |
|
302 | source = ent.copyfrom_path | |
303 | rev = ent.copyfrom_rev |
|
303 | rev = ent.copyfrom_rev | |
304 | tag = path.split('/')[-1] |
|
304 | tag = path.split('/')[-1] | |
305 | tags[tag] = self.revid(rev, module=source) |
|
305 | tags[tag] = self.revid(rev, module=source) | |
306 | except SubversionException, (inst, num): |
|
306 | except SubversionException, (inst, num): | |
307 | self.ui.note('no tags found at revision %d\n' % start) |
|
307 | self.ui.note('no tags found at revision %d\n' % start) | |
308 | return tags |
|
308 | return tags | |
309 |
|
309 | |||
310 | def converted(self, rev, destrev): |
|
310 | def converted(self, rev, destrev): | |
311 | if not self.wc: |
|
311 | if not self.wc: | |
312 | return |
|
312 | return | |
313 | if self.convertfp is None: |
|
313 | if self.convertfp is None: | |
314 | self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'), |
|
314 | self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'), | |
315 | 'a') |
|
315 | 'a') | |
316 | self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev))) |
|
316 | self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev))) | |
317 | self.convertfp.flush() |
|
317 | self.convertfp.flush() | |
318 |
|
318 | |||
319 | # -- helper functions -- |
|
319 | # -- helper functions -- | |
320 |
|
320 | |||
321 | def revid(self, revnum, module=None): |
|
321 | def revid(self, revnum, module=None): | |
322 | if not module: |
|
322 | if not module: | |
323 | module = self.module |
|
323 | module = self.module | |
324 | return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding), |
|
324 | return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding), | |
325 | revnum) |
|
325 | revnum) | |
326 |
|
326 | |||
327 | def revnum(self, rev): |
|
327 | def revnum(self, rev): | |
328 | return int(rev.split('@')[-1]) |
|
328 | return int(rev.split('@')[-1]) | |
329 |
|
329 | |||
330 | def revsplit(self, rev): |
|
330 | def revsplit(self, rev): | |
331 | url, revnum = rev.encode(self.encoding).split('@', 1) |
|
331 | url, revnum = rev.encode(self.encoding).split('@', 1) | |
332 | revnum = int(revnum) |
|
332 | revnum = int(revnum) | |
333 | parts = url.split('/', 1) |
|
333 | parts = url.split('/', 1) | |
334 | uuid = parts.pop(0)[4:] |
|
334 | uuid = parts.pop(0)[4:] | |
335 | mod = '' |
|
335 | mod = '' | |
336 | if parts: |
|
336 | if parts: | |
337 | mod = '/' + parts[0] |
|
337 | mod = '/' + parts[0] | |
338 | return uuid, mod, revnum |
|
338 | return uuid, mod, revnum | |
339 |
|
339 | |||
340 | def latest(self, path, stop=0): |
|
340 | def latest(self, path, stop=0): | |
341 | 'find the latest revision affecting path, up to stop' |
|
341 | 'find the latest revision affecting path, up to stop' | |
342 | if not stop: |
|
342 | if not stop: | |
343 | stop = svn.ra.get_latest_revnum(self.ra) |
|
343 | stop = svn.ra.get_latest_revnum(self.ra) | |
344 | try: |
|
344 | try: | |
345 | self.reparent('') |
|
345 | self.reparent('') | |
346 | dirent = svn.ra.stat(self.ra, path.strip('/'), stop) |
|
346 | dirent = svn.ra.stat(self.ra, path.strip('/'), stop) | |
347 | self.reparent(self.module) |
|
347 | self.reparent(self.module) | |
348 | except SubversionException: |
|
348 | except SubversionException: | |
349 | dirent = None |
|
349 | dirent = None | |
350 | if not dirent: |
|
350 | if not dirent: | |
351 | raise util.Abort('%s not found up to revision %d' % (path, stop)) |
|
351 | raise util.Abort('%s not found up to revision %d' % (path, stop)) | |
352 |
|
352 | |||
353 | return dirent.created_rev |
|
353 | return dirent.created_rev | |
354 |
|
354 | |||
355 | def get_blacklist(self): |
|
355 | def get_blacklist(self): | |
356 | """Avoid certain revision numbers. |
|
356 | """Avoid certain revision numbers. | |
357 | It is not uncommon for two nearby revisions to cancel each other |
|
357 | It is not uncommon for two nearby revisions to cancel each other | |
358 | out, e.g. 'I copied trunk into a subdirectory of itself instead |
|
358 | out, e.g. 'I copied trunk into a subdirectory of itself instead | |
359 | of making a branch'. The converted repository is significantly |
|
359 | of making a branch'. The converted repository is significantly | |
360 | smaller if we ignore such revisions.""" |
|
360 | smaller if we ignore such revisions.""" | |
361 | self.blacklist = util.set() |
|
361 | self.blacklist = util.set() | |
362 | blacklist = self.blacklist |
|
362 | blacklist = self.blacklist | |
363 | for line in file("blacklist.txt", "r"): |
|
363 | for line in file("blacklist.txt", "r"): | |
364 | if not line.startswith("#"): |
|
364 | if not line.startswith("#"): | |
365 | try: |
|
365 | try: | |
366 | svn_rev = int(line.strip()) |
|
366 | svn_rev = int(line.strip()) | |
367 | blacklist.add(svn_rev) |
|
367 | blacklist.add(svn_rev) | |
368 | except ValueError, e: |
|
368 | except ValueError, e: | |
369 | pass # not an integer or a comment |
|
369 | pass # not an integer or a comment | |
370 |
|
370 | |||
371 | def is_blacklisted(self, svn_rev): |
|
371 | def is_blacklisted(self, svn_rev): | |
372 | return svn_rev in self.blacklist |
|
372 | return svn_rev in self.blacklist | |
373 |
|
373 | |||
374 | def reparent(self, module): |
|
374 | def reparent(self, module): | |
375 | svn_url = self.base + module |
|
375 | svn_url = self.base + module | |
376 | self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding)) |
|
376 | self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding)) | |
377 | svn.ra.reparent(self.ra, svn_url.encode(self.encoding)) |
|
377 | svn.ra.reparent(self.ra, svn_url.encode(self.encoding)) | |
378 |
|
378 | |||
379 | def expandpaths(self, rev, paths, parents): |
|
379 | def expandpaths(self, rev, paths, parents): | |
380 | def get_entry_from_path(path, module=self.module): |
|
380 | def get_entry_from_path(path, module=self.module): | |
381 | # Given the repository url of this wc, say |
|
381 | # Given the repository url of this wc, say | |
382 | # "http://server/plone/CMFPlone/branches/Plone-2_0-branch" |
|
382 | # "http://server/plone/CMFPlone/branches/Plone-2_0-branch" | |
383 | # extract the "entry" portion (a relative path) from what |
|
383 | # extract the "entry" portion (a relative path) from what | |
384 | # svn log --xml says, ie |
|
384 | # svn log --xml says, ie | |
385 | # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py" |
|
385 | # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py" | |
386 | # that is to say "tests/PloneTestCase.py" |
|
386 | # that is to say "tests/PloneTestCase.py" | |
387 | if path.startswith(module): |
|
387 | if path.startswith(module): | |
388 | relative = path[len(module):] |
|
388 | relative = path[len(module):] | |
389 | if relative.startswith('/'): |
|
389 | if relative.startswith('/'): | |
390 | return relative[1:] |
|
390 | return relative[1:] | |
391 | else: |
|
391 | else: | |
392 | return relative |
|
392 | return relative | |
393 |
|
393 | |||
394 | # The path is outside our tracked tree... |
|
394 | # The path is outside our tracked tree... | |
395 | self.ui.debug('%r is not under %r, ignoring\n' % (path, module)) |
|
395 | self.ui.debug('%r is not under %r, ignoring\n' % (path, module)) | |
396 | return None |
|
396 | return None | |
397 |
|
397 | |||
398 | entries = [] |
|
398 | entries = [] | |
399 | copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions. |
|
399 | copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions. | |
400 | copies = {} |
|
400 | copies = {} | |
401 | revnum = self.revnum(rev) |
|
401 | revnum = self.revnum(rev) | |
402 |
|
402 | |||
403 | if revnum in self.modulemap: |
|
403 | if revnum in self.modulemap: | |
404 | new_module = self.modulemap[revnum] |
|
404 | new_module = self.modulemap[revnum] | |
405 | if new_module != self.module: |
|
405 | if new_module != self.module: | |
406 | self.module = new_module |
|
406 | self.module = new_module | |
407 | self.reparent(self.module) |
|
407 | self.reparent(self.module) | |
408 |
|
408 | |||
409 | for path, ent in paths: |
|
409 | for path, ent in paths: | |
410 | entrypath = get_entry_from_path(path, module=self.module) |
|
410 | entrypath = get_entry_from_path(path, module=self.module) | |
411 | entry = entrypath.decode(self.encoding) |
|
411 | entry = entrypath.decode(self.encoding) | |
412 |
|
412 | |||
413 | kind = svn.ra.check_path(self.ra, entrypath, revnum) |
|
413 | kind = svn.ra.check_path(self.ra, entrypath, revnum) | |
414 | if kind == svn.core.svn_node_file: |
|
414 | if kind == svn.core.svn_node_file: | |
415 | if ent.copyfrom_path: |
|
415 | if ent.copyfrom_path: | |
416 | copyfrom_path = get_entry_from_path(ent.copyfrom_path) |
|
416 | copyfrom_path = get_entry_from_path(ent.copyfrom_path) | |
417 | if copyfrom_path: |
|
417 | if copyfrom_path: | |
418 | self.ui.debug("Copied to %s from %s@%s\n" % |
|
418 | self.ui.debug("Copied to %s from %s@%s\n" % | |
419 | (entrypath, copyfrom_path, |
|
419 | (entrypath, copyfrom_path, | |
420 | ent.copyfrom_rev)) |
|
420 | ent.copyfrom_rev)) | |
421 | # It's probably important for hg that the source |
|
421 | # It's probably important for hg that the source | |
422 | # exists in the revision's parent, not just the |
|
422 | # exists in the revision's parent, not just the | |
423 | # ent.copyfrom_rev |
|
423 | # ent.copyfrom_rev | |
424 | fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev) |
|
424 | fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev) | |
425 | if fromkind != 0: |
|
425 | if fromkind != 0: | |
426 | copies[self.recode(entry)] = self.recode(copyfrom_path) |
|
426 | copies[self.recode(entry)] = self.recode(copyfrom_path) | |
427 | entries.append(self.recode(entry)) |
|
427 | entries.append(self.recode(entry)) | |
428 | elif kind == 0: # gone, but had better be a deleted *file* |
|
428 | elif kind == 0: # gone, but had better be a deleted *file* | |
429 | self.ui.debug("gone from %s\n" % ent.copyfrom_rev) |
|
429 | self.ui.debug("gone from %s\n" % ent.copyfrom_rev) | |
430 |
|
430 | |||
431 | # if a branch is created but entries are removed in the same |
|
431 | # if a branch is created but entries are removed in the same | |
432 | # changeset, get the right fromrev |
|
432 | # changeset, get the right fromrev | |
433 | if parents: |
|
433 | if parents: | |
434 | uuid, old_module, fromrev = self.revsplit(parents[0]) |
|
434 | uuid, old_module, fromrev = self.revsplit(parents[0]) | |
435 | else: |
|
435 | else: | |
436 | fromrev = revnum - 1 |
|
436 | fromrev = revnum - 1 | |
437 | # might always need to be revnum - 1 in these 3 lines? |
|
437 | # might always need to be revnum - 1 in these 3 lines? | |
438 | old_module = self.modulemap.get(fromrev, self.module) |
|
438 | old_module = self.modulemap.get(fromrev, self.module) | |
439 |
|
439 | |||
440 | basepath = old_module + "/" + get_entry_from_path(path, module=self.module) |
|
440 | basepath = old_module + "/" + get_entry_from_path(path, module=self.module) | |
441 | entrypath = old_module + "/" + get_entry_from_path(path, module=self.module) |
|
441 | entrypath = old_module + "/" + get_entry_from_path(path, module=self.module) | |
442 |
|
442 | |||
443 | def lookup_parts(p): |
|
443 | def lookup_parts(p): | |
444 | rc = None |
|
444 | rc = None | |
445 | parts = p.split("/") |
|
445 | parts = p.split("/") | |
446 | for i in range(len(parts)): |
|
446 | for i in range(len(parts)): | |
447 | part = "/".join(parts[:i]) |
|
447 | part = "/".join(parts[:i]) | |
448 | info = part, copyfrom.get(part, None) |
|
448 | info = part, copyfrom.get(part, None) | |
449 | if info[1] is not None: |
|
449 | if info[1] is not None: | |
450 | self.ui.debug("Found parent directory %s\n" % info[1]) |
|
450 | self.ui.debug("Found parent directory %s\n" % info[1]) | |
451 | rc = info |
|
451 | rc = info | |
452 | return rc |
|
452 | return rc | |
453 |
|
453 | |||
454 | self.ui.debug("base, entry %s %s\n" % (basepath, entrypath)) |
|
454 | self.ui.debug("base, entry %s %s\n" % (basepath, entrypath)) | |
455 |
|
455 | |||
456 | frompath, froment = lookup_parts(entrypath) or (None, revnum - 1) |
|
456 | frompath, froment = lookup_parts(entrypath) or (None, revnum - 1) | |
457 |
|
457 | |||
458 | # need to remove fragment from lookup_parts and replace with copyfrom_path |
|
458 | # need to remove fragment from lookup_parts and replace with copyfrom_path | |
459 | if frompath is not None: |
|
459 | if frompath is not None: | |
460 | self.ui.debug("munge-o-matic\n") |
|
460 | self.ui.debug("munge-o-matic\n") | |
461 | self.ui.debug(entrypath + '\n') |
|
461 | self.ui.debug(entrypath + '\n') | |
462 | self.ui.debug(entrypath[len(frompath):] + '\n') |
|
462 | self.ui.debug(entrypath[len(frompath):] + '\n') | |
463 | entrypath = froment.copyfrom_path + entrypath[len(frompath):] |
|
463 | entrypath = froment.copyfrom_path + entrypath[len(frompath):] | |
464 | fromrev = froment.copyfrom_rev |
|
464 | fromrev = froment.copyfrom_rev | |
465 | self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath)) |
|
465 | self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath)) | |
466 |
|
466 | |||
467 | fromkind = svn.ra.check_path(self.ra, entrypath, fromrev) |
|
467 | fromkind = svn.ra.check_path(self.ra, entrypath, fromrev) | |
468 | if fromkind == svn.core.svn_node_file: # a deleted file |
|
468 | if fromkind == svn.core.svn_node_file: # a deleted file | |
469 | entries.append(self.recode(entry)) |
|
469 | entries.append(self.recode(entry)) | |
470 | elif fromkind == svn.core.svn_node_dir: |
|
470 | elif fromkind == svn.core.svn_node_dir: | |
471 | # print "Deleted/moved non-file:", revnum, path, ent |
|
471 | # print "Deleted/moved non-file:", revnum, path, ent | |
472 | # children = self._find_children(path, revnum - 1) |
|
472 | # children = self._find_children(path, revnum - 1) | |
473 | # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action) |
|
473 | # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action) | |
474 | # Sometimes this is tricky. For example: in |
|
474 | # Sometimes this is tricky. For example: in | |
475 | # The Subversion Repository revision 6940 a dir |
|
475 | # The Subversion Repository revision 6940 a dir | |
476 | # was copied and one of its files was deleted |
|
476 | # was copied and one of its files was deleted | |
477 | # from the new location in the same commit. This |
|
477 | # from the new location in the same commit. This | |
478 | # code can't deal with that yet. |
|
478 | # code can't deal with that yet. | |
479 | if ent.action == 'C': |
|
479 | if ent.action == 'C': | |
480 | children = self._find_children(path, fromrev) |
|
480 | children = self._find_children(path, fromrev) | |
481 | else: |
|
481 | else: | |
482 | oroot = entrypath.strip('/') |
|
482 | oroot = entrypath.strip('/') | |
483 | nroot = path.strip('/') |
|
483 | nroot = path.strip('/') | |
484 | children = self._find_children(oroot, fromrev) |
|
484 | children = self._find_children(oroot, fromrev) | |
485 | children = [s.replace(oroot,nroot) for s in children] |
|
485 | children = [s.replace(oroot,nroot) for s in children] | |
486 | # Mark all [files, not directories] as deleted. |
|
486 | # Mark all [files, not directories] as deleted. | |
487 | for child in children: |
|
487 | for child in children: | |
488 | # Can we move a child directory and its |
|
488 | # Can we move a child directory and its | |
489 | # parent in the same commit? (probably can). Could |
|
489 | # parent in the same commit? (probably can). Could | |
490 | # cause problems if instead of revnum -1, |
|
490 | # cause problems if instead of revnum -1, | |
491 | # we have to look in (copyfrom_path, revnum - 1) |
|
491 | # we have to look in (copyfrom_path, revnum - 1) | |
492 | entrypath = get_entry_from_path("/" + child, module=old_module) |
|
492 | entrypath = get_entry_from_path("/" + child, module=old_module) | |
493 | if entrypath: |
|
493 | if entrypath: | |
494 | entry = self.recode(entrypath.decode(self.encoding)) |
|
494 | entry = self.recode(entrypath.decode(self.encoding)) | |
495 | if entry in copies: |
|
495 | if entry in copies: | |
496 | # deleted file within a copy |
|
496 | # deleted file within a copy | |
497 | del copies[entry] |
|
497 | del copies[entry] | |
498 | else: |
|
498 | else: | |
499 | entries.append(entry) |
|
499 | entries.append(entry) | |
500 | else: |
|
500 | else: | |
501 | self.ui.debug('unknown path in revision %d: %s\n' % \ |
|
501 | self.ui.debug('unknown path in revision %d: %s\n' % \ | |
502 | (revnum, path)) |
|
502 | (revnum, path)) | |
503 | elif kind == svn.core.svn_node_dir: |
|
503 | elif kind == svn.core.svn_node_dir: | |
504 | # Should probably synthesize normal file entries |
|
504 | # Should probably synthesize normal file entries | |
505 | # and handle as above to clean up copy/rename handling. |
|
505 | # and handle as above to clean up copy/rename handling. | |
506 |
|
506 | |||
507 | # If the directory just had a prop change, |
|
507 | # If the directory just had a prop change, | |
508 | # then we shouldn't need to look for its children. |
|
508 | # then we shouldn't need to look for its children. | |
509 | # Also this could create duplicate entries. Not sure |
|
509 | # Also this could create duplicate entries. Not sure | |
510 | # whether this will matter. Maybe should make entries a set. |
|
510 | # whether this will matter. Maybe should make entries a set. | |
511 | # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev |
|
511 | # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev | |
512 | # This will fail if a directory was copied |
|
512 | # This will fail if a directory was copied | |
513 | # from another branch and then some of its files |
|
513 | # from another branch and then some of its files | |
514 | # were deleted in the same transaction. |
|
514 | # were deleted in the same transaction. | |
515 | children = self._find_children(path, revnum) |
|
515 | children = self._find_children(path, revnum) | |
516 | children.sort() |
|
516 | children.sort() | |
517 | for child in children: |
|
517 | for child in children: | |
518 | # Can we move a child directory and its |
|
518 | # Can we move a child directory and its | |
519 | # parent in the same commit? (probably can). Could |
|
519 | # parent in the same commit? (probably can). Could | |
520 | # cause problems if instead of revnum -1, |
|
520 | # cause problems if instead of revnum -1, | |
521 | # we have to look in (copyfrom_path, revnum - 1) |
|
521 | # we have to look in (copyfrom_path, revnum - 1) | |
522 | entrypath = get_entry_from_path("/" + child, module=self.module) |
|
522 | entrypath = get_entry_from_path("/" + child, module=self.module) | |
523 | # print child, self.module, entrypath |
|
523 | # print child, self.module, entrypath | |
524 | if entrypath: |
|
524 | if entrypath: | |
525 | # Need to filter out directories here... |
|
525 | # Need to filter out directories here... | |
526 | kind = svn.ra.check_path(self.ra, entrypath, revnum) |
|
526 | kind = svn.ra.check_path(self.ra, entrypath, revnum) | |
527 | if kind != svn.core.svn_node_dir: |
|
527 | if kind != svn.core.svn_node_dir: | |
528 | entries.append(self.recode(entrypath)) |
|
528 | entries.append(self.recode(entrypath)) | |
529 |
|
529 | |||
530 | # Copies here (must copy all from source) |
|
530 | # Copies here (must copy all from source) | |
531 | # Probably not a real problem for us if |
|
531 | # Probably not a real problem for us if | |
532 | # source does not exist |
|
532 | # source does not exist | |
533 |
|
533 | |||
534 | # Can do this with the copy command "hg copy" |
|
534 | # Can do this with the copy command "hg copy" | |
535 | # if ent.copyfrom_path: |
|
535 | # if ent.copyfrom_path: | |
536 | # copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding), |
|
536 | # copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding), | |
537 | # module=self.module) |
|
537 | # module=self.module) | |
538 | # copyto_entry = entrypath |
|
538 | # copyto_entry = entrypath | |
539 | # |
|
539 | # | |
540 | # print "copy directory", copyfrom_entry, 'to', copyto_entry |
|
540 | # print "copy directory", copyfrom_entry, 'to', copyto_entry | |
541 | # |
|
541 | # | |
542 | # copies.append((copyfrom_entry, copyto_entry)) |
|
542 | # copies.append((copyfrom_entry, copyto_entry)) | |
543 |
|
543 | |||
544 | if ent.copyfrom_path: |
|
544 | if ent.copyfrom_path: | |
545 | copyfrom_path = ent.copyfrom_path.decode(self.encoding) |
|
545 | copyfrom_path = ent.copyfrom_path.decode(self.encoding) | |
546 | copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module) |
|
546 | copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module) | |
547 | if copyfrom_entry: |
|
547 | if copyfrom_entry: | |
548 | copyfrom[path] = ent |
|
548 | copyfrom[path] = ent | |
549 | self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path])) |
|
549 | self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path])) | |
550 |
|
550 | |||
551 | # Good, /probably/ a regular copy. Really should check |
|
551 | # Good, /probably/ a regular copy. Really should check | |
552 | # to see whether the parent revision actually contains |
|
552 | # to see whether the parent revision actually contains | |
553 | # the directory in question. |
|
553 | # the directory in question. | |
554 | children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev) |
|
554 | children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev) | |
555 | children.sort() |
|
555 | children.sort() | |
556 | for child in children: |
|
556 | for child in children: | |
557 | entrypath = get_entry_from_path("/" + child, module=self.module) |
|
557 | entrypath = get_entry_from_path("/" + child, module=self.module) | |
558 | if entrypath: |
|
558 | if entrypath: | |
559 | entry = entrypath.decode(self.encoding) |
|
559 | entry = entrypath.decode(self.encoding) | |
560 | # print "COPY COPY From", copyfrom_entry, entry |
|
560 | # print "COPY COPY From", copyfrom_entry, entry | |
561 | copyto_path = path + entry[len(copyfrom_entry):] |
|
561 | copyto_path = path + entry[len(copyfrom_entry):] | |
562 | copyto_entry = get_entry_from_path(copyto_path, module=self.module) |
|
562 | copyto_entry = get_entry_from_path(copyto_path, module=self.module) | |
563 | # print "COPY", entry, "COPY To", copyto_entry |
|
563 | # print "COPY", entry, "COPY To", copyto_entry | |
564 | copies[self.recode(copyto_entry)] = self.recode(entry) |
|
564 | copies[self.recode(copyto_entry)] = self.recode(entry) | |
565 | # copy from quux splort/quuxfile |
|
565 | # copy from quux splort/quuxfile | |
566 |
|
566 | |||
567 | return (entries, copies) |
|
567 | return (entries, copies) | |
568 |
|
568 | |||
569 | def _fetch_revisions(self, from_revnum = 0, to_revnum = 347): |
|
569 | def _fetch_revisions(self, from_revnum = 0, to_revnum = 347): | |
570 | self.child_cset = None |
|
570 | self.child_cset = None | |
571 | def parselogentry(orig_paths, revnum, author, date, message): |
|
571 | def parselogentry(orig_paths, revnum, author, date, message): | |
572 | self.ui.debug("parsing revision %d (%d changes)\n" % |
|
572 | self.ui.debug("parsing revision %d (%d changes)\n" % | |
573 | (revnum, len(orig_paths))) |
|
573 | (revnum, len(orig_paths))) | |
574 |
|
574 | |||
575 | if revnum in self.modulemap: |
|
575 | if revnum in self.modulemap: | |
576 | new_module = self.modulemap[revnum] |
|
576 | new_module = self.modulemap[revnum] | |
577 | if new_module != self.module: |
|
577 | if new_module != self.module: | |
578 | self.module = new_module |
|
578 | self.module = new_module | |
579 | self.reparent(self.module) |
|
579 | self.reparent(self.module) | |
580 |
|
580 | |||
581 | rev = self.revid(revnum) |
|
581 | rev = self.revid(revnum) | |
582 | # branch log might return entries for a parent we already have |
|
582 | # branch log might return entries for a parent we already have | |
583 | if (rev in self.commits or |
|
583 | if (rev in self.commits or | |
584 | (revnum < self.lastrevs.get(self.module, 0))): |
|
584 | (revnum < self.lastrevs.get(self.module, 0))): | |
585 | return |
|
585 | return | |
586 |
|
586 | |||
587 | parents = [] |
|
587 | parents = [] | |
588 | # check whether this revision is the start of a branch |
|
588 | # check whether this revision is the start of a branch | |
589 | if self.module in orig_paths: |
|
589 | if self.module in orig_paths: | |
590 | ent = orig_paths[self.module] |
|
590 | ent = orig_paths[self.module] | |
591 | if ent.copyfrom_path: |
|
591 | if ent.copyfrom_path: | |
592 | # ent.copyfrom_rev may not be the actual last revision |
|
592 | # ent.copyfrom_rev may not be the actual last revision | |
593 | prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev) |
|
593 | prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev) | |
594 | self.modulemap[prev] = ent.copyfrom_path |
|
594 | self.modulemap[prev] = ent.copyfrom_path | |
595 | parents = [self.revid(prev, ent.copyfrom_path)] |
|
595 | parents = [self.revid(prev, ent.copyfrom_path)] | |
596 | self.ui.note('found parent of branch %s at %d: %s\n' % \ |
|
596 | self.ui.note('found parent of branch %s at %d: %s\n' % \ | |
597 | (self.module, prev, ent.copyfrom_path)) |
|
597 | (self.module, prev, ent.copyfrom_path)) | |
598 | else: |
|
598 | else: | |
599 | self.ui.debug("No copyfrom path, don't know what to do.\n") |
|
599 | self.ui.debug("No copyfrom path, don't know what to do.\n") | |
600 |
|
600 | |||
601 | self.modulemap[revnum] = self.module # track backwards in time |
|
601 | self.modulemap[revnum] = self.module # track backwards in time | |
602 |
|
602 | |||
603 | orig_paths = orig_paths.items() |
|
603 | orig_paths = orig_paths.items() | |
604 | orig_paths.sort() |
|
604 | orig_paths.sort() | |
605 | paths = [] |
|
605 | paths = [] | |
606 | # filter out unrelated paths |
|
606 | # filter out unrelated paths | |
607 | for path, ent in orig_paths: |
|
607 | for path, ent in orig_paths: | |
608 | if not path.startswith(self.module): |
|
608 | if not path.startswith(self.module): | |
609 | self.ui.debug("boring@%s: %s\n" % (revnum, path)) |
|
609 | self.ui.debug("boring@%s: %s\n" % (revnum, path)) | |
610 | continue |
|
610 | continue | |
611 | paths.append((path, ent)) |
|
611 | paths.append((path, ent)) | |
612 |
|
612 | |||
613 | self.paths[rev] = (paths, parents) |
|
613 | self.paths[rev] = (paths, parents) | |
614 |
|
614 | |||
615 | # Example SVN datetime. Includes microseconds. |
|
615 | # Example SVN datetime. Includes microseconds. | |
616 | # ISO-8601 conformant |
|
616 | # ISO-8601 conformant | |
617 | # '2007-01-04T17:35:00.902377Z' |
|
617 | # '2007-01-04T17:35:00.902377Z' | |
618 | date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"]) |
|
618 | date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"]) | |
619 |
|
619 | |||
620 | log = message and self.recode(message) |
|
620 | log = message and self.recode(message) | |
621 | author = author and self.recode(author) or '' |
|
621 | author = author and self.recode(author) or '' | |
622 | try: |
|
622 | try: | |
623 | branch = self.module.split("/")[-1] |
|
623 | branch = self.module.split("/")[-1] | |
624 | if branch == 'trunk': |
|
624 | if branch == 'trunk': | |
625 | branch = '' |
|
625 | branch = '' | |
626 | except IndexError: |
|
626 | except IndexError: | |
627 | branch = None |
|
627 | branch = None | |
628 |
|
628 | |||
629 | cset = commit(author=author, |
|
629 | cset = commit(author=author, | |
630 | date=util.datestr(date), |
|
630 | date=util.datestr(date), | |
631 | desc=log, |
|
631 | desc=log, | |
632 | parents=parents, |
|
632 | parents=parents, | |
633 | branch=branch, |
|
633 | branch=branch, | |
634 | rev=rev.encode('utf-8')) |
|
634 | rev=rev.encode('utf-8')) | |
635 |
|
635 | |||
636 | self.commits[rev] = cset |
|
636 | self.commits[rev] = cset | |
637 | if self.child_cset and not self.child_cset.parents: |
|
637 | if self.child_cset and not self.child_cset.parents: | |
638 | self.child_cset.parents = [rev] |
|
638 | self.child_cset.parents = [rev] | |
639 | self.child_cset = cset |
|
639 | self.child_cset = cset | |
640 |
|
640 | |||
641 | self.ui.note('fetching revision log for "%s" from %d to %d\n' % |
|
641 | self.ui.note('fetching revision log for "%s" from %d to %d\n' % | |
642 | (self.module, from_revnum, to_revnum)) |
|
642 | (self.module, from_revnum, to_revnum)) | |
643 |
|
643 | |||
644 | try: |
|
644 | try: | |
645 | for entry in self.get_log([self.module], from_revnum, to_revnum): |
|
645 | for entry in self.get_log([self.module], from_revnum, to_revnum): | |
646 | orig_paths, revnum, author, date, message = entry |
|
646 | orig_paths, revnum, author, date, message = entry | |
647 | if self.is_blacklisted(revnum): |
|
647 | if self.is_blacklisted(revnum): | |
648 | self.ui.note('skipping blacklisted revision %d\n' % revnum) |
|
648 | self.ui.note('skipping blacklisted revision %d\n' % revnum) | |
649 | continue |
|
649 | continue | |
650 | if orig_paths is None: |
|
650 | if orig_paths is None: | |
651 | self.ui.debug('revision %d has no entries\n' % revnum) |
|
651 | self.ui.debug('revision %d has no entries\n' % revnum) | |
652 | continue |
|
652 | continue | |
653 | parselogentry(orig_paths, revnum, author, date, message) |
|
653 | parselogentry(orig_paths, revnum, author, date, message) | |
654 | except SubversionException, (inst, num): |
|
654 | except SubversionException, (inst, num): | |
655 | if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION: |
|
655 | if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION: | |
656 | raise NoSuchRevision(branch=self, |
|
656 | raise NoSuchRevision(branch=self, | |
657 | revision="Revision number %d" % to_revnum) |
|
657 | revision="Revision number %d" % to_revnum) | |
658 | raise |
|
658 | raise | |
659 |
|
659 | |||
660 | def _getfile(self, file, rev): |
|
660 | def _getfile(self, file, rev): | |
661 | io = StringIO() |
|
661 | io = StringIO() | |
662 | # TODO: ra.get_file transmits the whole file instead of diffs. |
|
662 | # TODO: ra.get_file transmits the whole file instead of diffs. | |
663 | mode = '' |
|
663 | mode = '' | |
664 | try: |
|
664 | try: | |
665 | revnum = self.revnum(rev) |
|
665 | revnum = self.revnum(rev) | |
666 | if self.module != self.modulemap[revnum]: |
|
666 | if self.module != self.modulemap[revnum]: | |
667 | self.module = self.modulemap[revnum] |
|
667 | self.module = self.modulemap[revnum] | |
668 | self.reparent(self.module) |
|
668 | self.reparent(self.module) | |
669 | info = svn.ra.get_file(self.ra, file, revnum, io) |
|
669 | info = svn.ra.get_file(self.ra, file, revnum, io) | |
670 | if isinstance(info, list): |
|
670 | if isinstance(info, list): | |
671 | info = info[-1] |
|
671 | info = info[-1] | |
672 | mode = ("svn:executable" in info) and 'x' or '' |
|
672 | mode = ("svn:executable" in info) and 'x' or '' | |
673 | mode = ("svn:special" in info) and 'l' or mode |
|
673 | mode = ("svn:special" in info) and 'l' or mode | |
674 | except SubversionException, e: |
|
674 | except SubversionException, e: | |
675 | notfound = (svn.core.SVN_ERR_FS_NOT_FOUND, |
|
675 | notfound = (svn.core.SVN_ERR_FS_NOT_FOUND, | |
676 | svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND) |
|
676 | svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND) | |
677 | if e.apr_err in notfound: # File not found |
|
677 | if e.apr_err in notfound: # File not found | |
678 | raise IOError() |
|
678 | raise IOError() | |
679 | raise |
|
679 | raise | |
680 | data = io.getvalue() |
|
680 | data = io.getvalue() | |
681 | if mode == 'l': |
|
681 | if mode == 'l': | |
682 | link_prefix = "link " |
|
682 | link_prefix = "link " | |
683 | if data.startswith(link_prefix): |
|
683 | if data.startswith(link_prefix): | |
684 | data = data[len(link_prefix):] |
|
684 | data = data[len(link_prefix):] | |
685 | return data, mode |
|
685 | return data, mode | |
686 |
|
686 | |||
687 | def _find_children(self, path, revnum): |
|
687 | def _find_children(self, path, revnum): | |
688 | path = path.strip('/') |
|
688 | path = path.strip('/') | |
689 | pool = Pool() |
|
689 | pool = Pool() | |
690 | rpath = '/'.join([self.base, path]).strip('/') |
|
690 | rpath = '/'.join([self.base, path]).strip('/') | |
691 | return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()] |
|
691 | return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()] | |
692 |
|
692 | |||
693 | pre_revprop_change = '''#!/bin/sh |
|
693 | pre_revprop_change = '''#!/bin/sh | |
694 |
|
694 | |||
695 | REPOS="$1" |
|
695 | REPOS="$1" | |
696 | REV="$2" |
|
696 | REV="$2" | |
697 | USER="$3" |
|
697 | USER="$3" | |
698 | PROPNAME="$4" |
|
698 | PROPNAME="$4" | |
699 | ACTION="$5" |
|
699 | ACTION="$5" | |
700 |
|
700 | |||
701 | if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi |
|
701 | if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi | |
702 | if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi |
|
702 | if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi | |
703 | if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi |
|
703 | if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi | |
704 |
|
704 | |||
705 | echo "Changing prohibited revision property" >&2 |
|
705 | echo "Changing prohibited revision property" >&2 | |
706 | exit 1 |
|
706 | exit 1 | |
707 | ''' |
|
707 | ''' | |
708 |
|
708 | |||
709 | class svn_sink(converter_sink, commandline): |
|
709 | class svn_sink(converter_sink, commandline): | |
710 | commit_re = re.compile(r'Committed revision (\d+).', re.M) |
|
710 | commit_re = re.compile(r'Committed revision (\d+).', re.M) | |
711 |
|
711 | |||
712 | def prerun(self): |
|
712 | def prerun(self): | |
713 | if self.wc: |
|
713 | if self.wc: | |
714 | os.chdir(self.wc) |
|
714 | os.chdir(self.wc) | |
715 |
|
715 | |||
716 | def postrun(self): |
|
716 | def postrun(self): | |
717 | if self.wc: |
|
717 | if self.wc: | |
718 | os.chdir(self.cwd) |
|
718 | os.chdir(self.cwd) | |
719 |
|
719 | |||
720 | def join(self, name): |
|
720 | def join(self, name): | |
721 | return os.path.join(self.wc, '.svn', name) |
|
721 | return os.path.join(self.wc, '.svn', name) | |
722 |
|
722 | |||
723 | def revmapfile(self): |
|
723 | def revmapfile(self): | |
724 | return self.join('hg-shamap') |
|
724 | return self.join('hg-shamap') | |
725 |
|
725 | |||
726 | def authorfile(self): |
|
726 | def authorfile(self): | |
727 | return self.join('hg-authormap') |
|
727 | return self.join('hg-authormap') | |
728 |
|
728 | |||
729 | def __init__(self, ui, path): |
|
729 | def __init__(self, ui, path): | |
730 | converter_sink.__init__(self, ui, path) |
|
730 | converter_sink.__init__(self, ui, path) | |
731 | commandline.__init__(self, ui, 'svn') |
|
731 | commandline.__init__(self, ui, 'svn') | |
732 | self.delete = [] |
|
732 | self.delete = [] | |
733 | self.setexec = [] |
|
733 | self.setexec = [] | |
734 | self.delexec = [] |
|
734 | self.delexec = [] | |
735 | self.copies = [] |
|
735 | self.copies = [] | |
736 | self.wc = None |
|
736 | self.wc = None | |
737 | self.cwd = os.getcwd() |
|
737 | self.cwd = os.getcwd() | |
738 |
|
738 | |||
739 | path = os.path.realpath(path) |
|
739 | path = os.path.realpath(path) | |
740 |
|
740 | |||
741 | created = False |
|
741 | created = False | |
742 | if os.path.isfile(os.path.join(path, '.svn', 'entries')): |
|
742 | if os.path.isfile(os.path.join(path, '.svn', 'entries')): | |
743 | self.wc = path |
|
743 | self.wc = path | |
744 | self.run0('update') |
|
744 | self.run0('update') | |
745 | else: |
|
745 | else: | |
746 | wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc') |
|
746 | wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc') | |
747 |
|
747 | |||
748 | if os.path.isdir(os.path.dirname(path)): |
|
748 | if os.path.isdir(os.path.dirname(path)): | |
749 | if not os.path.exists(os.path.join(path, 'db', 'fs-type')): |
|
749 | if not os.path.exists(os.path.join(path, 'db', 'fs-type')): | |
750 | ui.status(_('initializing svn repo %r\n') % |
|
750 | ui.status(_('initializing svn repo %r\n') % | |
751 | os.path.basename(path)) |
|
751 | os.path.basename(path)) | |
752 | commandline(ui, 'svnadmin').run0('create', path) |
|
752 | commandline(ui, 'svnadmin').run0('create', path) | |
753 | created = path |
|
753 | created = path | |
754 |
path = path |
|
754 | path = util.normpath(path) | |
755 | if not path.startswith('/'): |
|
755 | if not path.startswith('/'): | |
756 | path = '/' + path |
|
756 | path = '/' + path | |
757 | path = 'file://' + path |
|
757 | path = 'file://' + path | |
758 |
|
758 | |||
759 | ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath)) |
|
759 | ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath)) | |
760 | self.run0('checkout', path, wcpath) |
|
760 | self.run0('checkout', path, wcpath) | |
761 |
|
761 | |||
762 | self.wc = wcpath |
|
762 | self.wc = wcpath | |
763 | self.opener = util.opener(self.wc) |
|
763 | self.opener = util.opener(self.wc) | |
764 | self.wopener = util.opener(self.wc) |
|
764 | self.wopener = util.opener(self.wc) | |
765 | self.childmap = mapfile(ui, self.join('hg-childmap')) |
|
765 | self.childmap = mapfile(ui, self.join('hg-childmap')) | |
766 | self.is_exec = util.checkexec(self.wc) and util.is_exec or None |
|
766 | self.is_exec = util.checkexec(self.wc) and util.is_exec or None | |
767 |
|
767 | |||
768 | if created: |
|
768 | if created: | |
769 | hook = os.path.join(created, 'hooks', 'pre-revprop-change') |
|
769 | hook = os.path.join(created, 'hooks', 'pre-revprop-change') | |
770 | fp = open(hook, 'w') |
|
770 | fp = open(hook, 'w') | |
771 | fp.write(pre_revprop_change) |
|
771 | fp.write(pre_revprop_change) | |
772 | fp.close() |
|
772 | fp.close() | |
773 | util.set_flags(hook, "x") |
|
773 | util.set_flags(hook, "x") | |
774 |
|
774 | |||
775 | xport = transport.SvnRaTransport(url=geturl(path)) |
|
775 | xport = transport.SvnRaTransport(url=geturl(path)) | |
776 | self.uuid = svn.ra.get_uuid(xport.ra) |
|
776 | self.uuid = svn.ra.get_uuid(xport.ra) | |
777 |
|
777 | |||
778 | def wjoin(self, *names): |
|
778 | def wjoin(self, *names): | |
779 | return os.path.join(self.wc, *names) |
|
779 | return os.path.join(self.wc, *names) | |
780 |
|
780 | |||
781 | def putfile(self, filename, flags, data): |
|
781 | def putfile(self, filename, flags, data): | |
782 | if 'l' in flags: |
|
782 | if 'l' in flags: | |
783 | self.wopener.symlink(data, filename) |
|
783 | self.wopener.symlink(data, filename) | |
784 | else: |
|
784 | else: | |
785 | try: |
|
785 | try: | |
786 | if os.path.islink(self.wjoin(filename)): |
|
786 | if os.path.islink(self.wjoin(filename)): | |
787 | os.unlink(filename) |
|
787 | os.unlink(filename) | |
788 | except OSError: |
|
788 | except OSError: | |
789 | pass |
|
789 | pass | |
790 | self.wopener(filename, 'w').write(data) |
|
790 | self.wopener(filename, 'w').write(data) | |
791 |
|
791 | |||
792 | if self.is_exec: |
|
792 | if self.is_exec: | |
793 | was_exec = self.is_exec(self.wjoin(filename)) |
|
793 | was_exec = self.is_exec(self.wjoin(filename)) | |
794 | else: |
|
794 | else: | |
795 | # On filesystems not supporting execute-bit, there is no way |
|
795 | # On filesystems not supporting execute-bit, there is no way | |
796 | # to know if it is set but asking subversion. Setting it |
|
796 | # to know if it is set but asking subversion. Setting it | |
797 | # systematically is just as expensive and much simpler. |
|
797 | # systematically is just as expensive and much simpler. | |
798 | was_exec = 'x' not in flags |
|
798 | was_exec = 'x' not in flags | |
799 |
|
799 | |||
800 | util.set_flags(self.wjoin(filename), flags) |
|
800 | util.set_flags(self.wjoin(filename), flags) | |
801 | if was_exec: |
|
801 | if was_exec: | |
802 | if 'x' not in flags: |
|
802 | if 'x' not in flags: | |
803 | self.delexec.append(filename) |
|
803 | self.delexec.append(filename) | |
804 | else: |
|
804 | else: | |
805 | if 'x' in flags: |
|
805 | if 'x' in flags: | |
806 | self.setexec.append(filename) |
|
806 | self.setexec.append(filename) | |
807 |
|
807 | |||
808 | def delfile(self, name): |
|
808 | def delfile(self, name): | |
809 | self.delete.append(name) |
|
809 | self.delete.append(name) | |
810 |
|
810 | |||
811 | def copyfile(self, source, dest): |
|
811 | def copyfile(self, source, dest): | |
812 | self.copies.append([source, dest]) |
|
812 | self.copies.append([source, dest]) | |
813 |
|
813 | |||
814 | def _copyfile(self, source, dest): |
|
814 | def _copyfile(self, source, dest): | |
815 | # SVN's copy command pukes if the destination file exists, but |
|
815 | # SVN's copy command pukes if the destination file exists, but | |
816 | # our copyfile method expects to record a copy that has |
|
816 | # our copyfile method expects to record a copy that has | |
817 | # already occurred. Cross the semantic gap. |
|
817 | # already occurred. Cross the semantic gap. | |
818 | wdest = self.wjoin(dest) |
|
818 | wdest = self.wjoin(dest) | |
819 | exists = os.path.exists(wdest) |
|
819 | exists = os.path.exists(wdest) | |
820 | if exists: |
|
820 | if exists: | |
821 | fd, tempname = tempfile.mkstemp( |
|
821 | fd, tempname = tempfile.mkstemp( | |
822 | prefix='hg-copy-', dir=os.path.dirname(wdest)) |
|
822 | prefix='hg-copy-', dir=os.path.dirname(wdest)) | |
823 | os.close(fd) |
|
823 | os.close(fd) | |
824 | os.unlink(tempname) |
|
824 | os.unlink(tempname) | |
825 | os.rename(wdest, tempname) |
|
825 | os.rename(wdest, tempname) | |
826 | try: |
|
826 | try: | |
827 | self.run0('copy', source, dest) |
|
827 | self.run0('copy', source, dest) | |
828 | finally: |
|
828 | finally: | |
829 | if exists: |
|
829 | if exists: | |
830 | try: |
|
830 | try: | |
831 | os.unlink(wdest) |
|
831 | os.unlink(wdest) | |
832 | except OSError: |
|
832 | except OSError: | |
833 | pass |
|
833 | pass | |
834 | os.rename(tempname, wdest) |
|
834 | os.rename(tempname, wdest) | |
835 |
|
835 | |||
836 | def dirs_of(self, files): |
|
836 | def dirs_of(self, files): | |
837 | dirs = set() |
|
837 | dirs = set() | |
838 | for f in files: |
|
838 | for f in files: | |
839 | if os.path.isdir(self.wjoin(f)): |
|
839 | if os.path.isdir(self.wjoin(f)): | |
840 | dirs.add(f) |
|
840 | dirs.add(f) | |
841 | for i in strutil.rfindall(f, '/'): |
|
841 | for i in strutil.rfindall(f, '/'): | |
842 | dirs.add(f[:i]) |
|
842 | dirs.add(f[:i]) | |
843 | return dirs |
|
843 | return dirs | |
844 |
|
844 | |||
845 | def add_dirs(self, files): |
|
845 | def add_dirs(self, files): | |
846 | add_dirs = [d for d in self.dirs_of(files) |
|
846 | add_dirs = [d for d in self.dirs_of(files) | |
847 | if not os.path.exists(self.wjoin(d, '.svn', 'entries'))] |
|
847 | if not os.path.exists(self.wjoin(d, '.svn', 'entries'))] | |
848 | if add_dirs: |
|
848 | if add_dirs: | |
849 | add_dirs.sort() |
|
849 | add_dirs.sort() | |
850 | self.xargs(add_dirs, 'add', non_recursive=True, quiet=True) |
|
850 | self.xargs(add_dirs, 'add', non_recursive=True, quiet=True) | |
851 | return add_dirs |
|
851 | return add_dirs | |
852 |
|
852 | |||
853 | def add_files(self, files): |
|
853 | def add_files(self, files): | |
854 | if files: |
|
854 | if files: | |
855 | self.xargs(files, 'add', quiet=True) |
|
855 | self.xargs(files, 'add', quiet=True) | |
856 | return files |
|
856 | return files | |
857 |
|
857 | |||
858 | def tidy_dirs(self, names): |
|
858 | def tidy_dirs(self, names): | |
859 | dirs = list(self.dirs_of(names)) |
|
859 | dirs = list(self.dirs_of(names)) | |
860 | dirs.sort(reverse=True) |
|
860 | dirs.sort(reverse=True) | |
861 | deleted = [] |
|
861 | deleted = [] | |
862 | for d in dirs: |
|
862 | for d in dirs: | |
863 | wd = self.wjoin(d) |
|
863 | wd = self.wjoin(d) | |
864 | if os.listdir(wd) == '.svn': |
|
864 | if os.listdir(wd) == '.svn': | |
865 | self.run0('delete', d) |
|
865 | self.run0('delete', d) | |
866 | deleted.append(d) |
|
866 | deleted.append(d) | |
867 | return deleted |
|
867 | return deleted | |
868 |
|
868 | |||
869 | def addchild(self, parent, child): |
|
869 | def addchild(self, parent, child): | |
870 | self.childmap[parent] = child |
|
870 | self.childmap[parent] = child | |
871 |
|
871 | |||
872 | def revid(self, rev): |
|
872 | def revid(self, rev): | |
873 | return u"svn:%s@%s" % (self.uuid, rev) |
|
873 | return u"svn:%s@%s" % (self.uuid, rev) | |
874 |
|
874 | |||
875 | def putcommit(self, files, parents, commit): |
|
875 | def putcommit(self, files, parents, commit): | |
876 | for parent in parents: |
|
876 | for parent in parents: | |
877 | try: |
|
877 | try: | |
878 | return self.revid(self.childmap[parent]) |
|
878 | return self.revid(self.childmap[parent]) | |
879 | except KeyError: |
|
879 | except KeyError: | |
880 | pass |
|
880 | pass | |
881 | entries = set(self.delete) |
|
881 | entries = set(self.delete) | |
882 | files = util.frozenset(files) |
|
882 | files = util.frozenset(files) | |
883 | entries.update(self.add_dirs(files.difference(entries))) |
|
883 | entries.update(self.add_dirs(files.difference(entries))) | |
884 | if self.copies: |
|
884 | if self.copies: | |
885 | for s, d in self.copies: |
|
885 | for s, d in self.copies: | |
886 | self._copyfile(s, d) |
|
886 | self._copyfile(s, d) | |
887 | self.copies = [] |
|
887 | self.copies = [] | |
888 | if self.delete: |
|
888 | if self.delete: | |
889 | self.xargs(self.delete, 'delete') |
|
889 | self.xargs(self.delete, 'delete') | |
890 | self.delete = [] |
|
890 | self.delete = [] | |
891 | entries.update(self.add_files(files.difference(entries))) |
|
891 | entries.update(self.add_files(files.difference(entries))) | |
892 | entries.update(self.tidy_dirs(entries)) |
|
892 | entries.update(self.tidy_dirs(entries)) | |
893 | if self.delexec: |
|
893 | if self.delexec: | |
894 | self.xargs(self.delexec, 'propdel', 'svn:executable') |
|
894 | self.xargs(self.delexec, 'propdel', 'svn:executable') | |
895 | self.delexec = [] |
|
895 | self.delexec = [] | |
896 | if self.setexec: |
|
896 | if self.setexec: | |
897 | self.xargs(self.setexec, 'propset', 'svn:executable', '*') |
|
897 | self.xargs(self.setexec, 'propset', 'svn:executable', '*') | |
898 | self.setexec = [] |
|
898 | self.setexec = [] | |
899 |
|
899 | |||
900 | fd, messagefile = tempfile.mkstemp(prefix='hg-convert-') |
|
900 | fd, messagefile = tempfile.mkstemp(prefix='hg-convert-') | |
901 | fp = os.fdopen(fd, 'w') |
|
901 | fp = os.fdopen(fd, 'w') | |
902 | fp.write(commit.desc) |
|
902 | fp.write(commit.desc) | |
903 | fp.close() |
|
903 | fp.close() | |
904 | try: |
|
904 | try: | |
905 | output = self.run0('commit', |
|
905 | output = self.run0('commit', | |
906 | username=util.shortuser(commit.author), |
|
906 | username=util.shortuser(commit.author), | |
907 | file=messagefile, |
|
907 | file=messagefile, | |
908 | encoding='utf-8') |
|
908 | encoding='utf-8') | |
909 | try: |
|
909 | try: | |
910 | rev = self.commit_re.search(output).group(1) |
|
910 | rev = self.commit_re.search(output).group(1) | |
911 | except AttributeError: |
|
911 | except AttributeError: | |
912 | self.ui.warn(_('unexpected svn output:\n')) |
|
912 | self.ui.warn(_('unexpected svn output:\n')) | |
913 | self.ui.warn(output) |
|
913 | self.ui.warn(output) | |
914 | raise util.Abort(_('unable to cope with svn output')) |
|
914 | raise util.Abort(_('unable to cope with svn output')) | |
915 | if commit.rev: |
|
915 | if commit.rev: | |
916 | self.run('propset', 'hg:convert-rev', commit.rev, |
|
916 | self.run('propset', 'hg:convert-rev', commit.rev, | |
917 | revprop=True, revision=rev) |
|
917 | revprop=True, revision=rev) | |
918 | if commit.branch and commit.branch != 'default': |
|
918 | if commit.branch and commit.branch != 'default': | |
919 | self.run('propset', 'hg:convert-branch', commit.branch, |
|
919 | self.run('propset', 'hg:convert-branch', commit.branch, | |
920 | revprop=True, revision=rev) |
|
920 | revprop=True, revision=rev) | |
921 | for parent in parents: |
|
921 | for parent in parents: | |
922 | self.addchild(parent, rev) |
|
922 | self.addchild(parent, rev) | |
923 | return self.revid(rev) |
|
923 | return self.revid(rev) | |
924 | finally: |
|
924 | finally: | |
925 | os.unlink(messagefile) |
|
925 | os.unlink(messagefile) | |
926 |
|
926 | |||
927 | def puttags(self, tags): |
|
927 | def puttags(self, tags): | |
928 | self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n')) |
|
928 | self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n')) |
@@ -1,220 +1,220 b'' | |||||
1 | # archival.py - revision archival for mercurial |
|
1 | # archival.py - revision archival for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> |
|
3 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of |
|
5 | # This software may be used and distributed according to the terms of | |
6 | # the GNU General Public License, incorporated herein by reference. |
|
6 | # the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 | from i18n import _ |
|
8 | from i18n import _ | |
9 | from node import * |
|
9 | from node import * | |
10 | import cStringIO, os, stat, tarfile, time, util, zipfile |
|
10 | import cStringIO, os, stat, tarfile, time, util, zipfile | |
11 | import zlib, gzip |
|
11 | import zlib, gzip | |
12 |
|
12 | |||
13 | def tidyprefix(dest, prefix, suffixes): |
|
13 | def tidyprefix(dest, prefix, suffixes): | |
14 | '''choose prefix to use for names in archive. make sure prefix is |
|
14 | '''choose prefix to use for names in archive. make sure prefix is | |
15 | safe for consumers.''' |
|
15 | safe for consumers.''' | |
16 |
|
16 | |||
17 | if prefix: |
|
17 | if prefix: | |
18 |
prefix = prefix |
|
18 | prefix = util.normpath(prefix) | |
19 | else: |
|
19 | else: | |
20 | if not isinstance(dest, str): |
|
20 | if not isinstance(dest, str): | |
21 | raise ValueError('dest must be string if no prefix') |
|
21 | raise ValueError('dest must be string if no prefix') | |
22 | prefix = os.path.basename(dest) |
|
22 | prefix = os.path.basename(dest) | |
23 | lower = prefix.lower() |
|
23 | lower = prefix.lower() | |
24 | for sfx in suffixes: |
|
24 | for sfx in suffixes: | |
25 | if lower.endswith(sfx): |
|
25 | if lower.endswith(sfx): | |
26 | prefix = prefix[:-len(sfx)] |
|
26 | prefix = prefix[:-len(sfx)] | |
27 | break |
|
27 | break | |
28 | lpfx = os.path.normpath(util.localpath(prefix)) |
|
28 | lpfx = os.path.normpath(util.localpath(prefix)) | |
29 | prefix = util.pconvert(lpfx) |
|
29 | prefix = util.pconvert(lpfx) | |
30 | if not prefix.endswith('/'): |
|
30 | if not prefix.endswith('/'): | |
31 | prefix += '/' |
|
31 | prefix += '/' | |
32 | if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix: |
|
32 | if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix: | |
33 | raise util.Abort(_('archive prefix contains illegal components')) |
|
33 | raise util.Abort(_('archive prefix contains illegal components')) | |
34 | return prefix |
|
34 | return prefix | |
35 |
|
35 | |||
36 | class tarit: |
|
36 | class tarit: | |
37 | '''write archive to tar file or stream. can write uncompressed, |
|
37 | '''write archive to tar file or stream. can write uncompressed, | |
38 | or compress with gzip or bzip2.''' |
|
38 | or compress with gzip or bzip2.''' | |
39 |
|
39 | |||
40 | class GzipFileWithTime(gzip.GzipFile): |
|
40 | class GzipFileWithTime(gzip.GzipFile): | |
41 |
|
41 | |||
42 | def __init__(self, *args, **kw): |
|
42 | def __init__(self, *args, **kw): | |
43 | timestamp = None |
|
43 | timestamp = None | |
44 | if 'timestamp' in kw: |
|
44 | if 'timestamp' in kw: | |
45 | timestamp = kw.pop('timestamp') |
|
45 | timestamp = kw.pop('timestamp') | |
46 | if timestamp == None: |
|
46 | if timestamp == None: | |
47 | self.timestamp = time.time() |
|
47 | self.timestamp = time.time() | |
48 | else: |
|
48 | else: | |
49 | self.timestamp = timestamp |
|
49 | self.timestamp = timestamp | |
50 | gzip.GzipFile.__init__(self, *args, **kw) |
|
50 | gzip.GzipFile.__init__(self, *args, **kw) | |
51 |
|
51 | |||
52 | def _write_gzip_header(self): |
|
52 | def _write_gzip_header(self): | |
53 | self.fileobj.write('\037\213') # magic header |
|
53 | self.fileobj.write('\037\213') # magic header | |
54 | self.fileobj.write('\010') # compression method |
|
54 | self.fileobj.write('\010') # compression method | |
55 | fname = self.filename[:-3] |
|
55 | fname = self.filename[:-3] | |
56 | flags = 0 |
|
56 | flags = 0 | |
57 | if fname: |
|
57 | if fname: | |
58 | flags = gzip.FNAME |
|
58 | flags = gzip.FNAME | |
59 | self.fileobj.write(chr(flags)) |
|
59 | self.fileobj.write(chr(flags)) | |
60 | gzip.write32u(self.fileobj, long(self.timestamp)) |
|
60 | gzip.write32u(self.fileobj, long(self.timestamp)) | |
61 | self.fileobj.write('\002') |
|
61 | self.fileobj.write('\002') | |
62 | self.fileobj.write('\377') |
|
62 | self.fileobj.write('\377') | |
63 | if fname: |
|
63 | if fname: | |
64 | self.fileobj.write(fname + '\000') |
|
64 | self.fileobj.write(fname + '\000') | |
65 |
|
65 | |||
66 | def __init__(self, dest, prefix, mtime, kind=''): |
|
66 | def __init__(self, dest, prefix, mtime, kind=''): | |
67 | self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz', |
|
67 | self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz', | |
68 | '.tgz', '.tbz2']) |
|
68 | '.tgz', '.tbz2']) | |
69 | self.mtime = mtime |
|
69 | self.mtime = mtime | |
70 |
|
70 | |||
71 | def taropen(name, mode, fileobj=None): |
|
71 | def taropen(name, mode, fileobj=None): | |
72 | if kind == 'gz': |
|
72 | if kind == 'gz': | |
73 | mode = mode[0] |
|
73 | mode = mode[0] | |
74 | if not fileobj: |
|
74 | if not fileobj: | |
75 | fileobj = open(name, mode + 'b') |
|
75 | fileobj = open(name, mode + 'b') | |
76 | gzfileobj = self.GzipFileWithTime(name, mode + 'b', |
|
76 | gzfileobj = self.GzipFileWithTime(name, mode + 'b', | |
77 | zlib.Z_BEST_COMPRESSION, |
|
77 | zlib.Z_BEST_COMPRESSION, | |
78 | fileobj, timestamp=mtime) |
|
78 | fileobj, timestamp=mtime) | |
79 | return tarfile.TarFile.taropen(name, mode, gzfileobj) |
|
79 | return tarfile.TarFile.taropen(name, mode, gzfileobj) | |
80 | else: |
|
80 | else: | |
81 | return tarfile.open(name, mode + kind, fileobj) |
|
81 | return tarfile.open(name, mode + kind, fileobj) | |
82 |
|
82 | |||
83 | if isinstance(dest, str): |
|
83 | if isinstance(dest, str): | |
84 | self.z = taropen(dest, mode='w:') |
|
84 | self.z = taropen(dest, mode='w:') | |
85 | else: |
|
85 | else: | |
86 | # Python 2.5-2.5.1 have a regression that requires a name arg |
|
86 | # Python 2.5-2.5.1 have a regression that requires a name arg | |
87 | self.z = taropen(name='', mode='w|', fileobj=dest) |
|
87 | self.z = taropen(name='', mode='w|', fileobj=dest) | |
88 |
|
88 | |||
89 | def addfile(self, name, mode, islink, data): |
|
89 | def addfile(self, name, mode, islink, data): | |
90 | i = tarfile.TarInfo(self.prefix + name) |
|
90 | i = tarfile.TarInfo(self.prefix + name) | |
91 | i.mtime = self.mtime |
|
91 | i.mtime = self.mtime | |
92 | i.size = len(data) |
|
92 | i.size = len(data) | |
93 | if islink: |
|
93 | if islink: | |
94 | i.type = tarfile.SYMTYPE |
|
94 | i.type = tarfile.SYMTYPE | |
95 | i.mode = 0777 |
|
95 | i.mode = 0777 | |
96 | i.linkname = data |
|
96 | i.linkname = data | |
97 | data = None |
|
97 | data = None | |
98 | else: |
|
98 | else: | |
99 | i.mode = mode |
|
99 | i.mode = mode | |
100 | data = cStringIO.StringIO(data) |
|
100 | data = cStringIO.StringIO(data) | |
101 | self.z.addfile(i, data) |
|
101 | self.z.addfile(i, data) | |
102 |
|
102 | |||
103 | def done(self): |
|
103 | def done(self): | |
104 | self.z.close() |
|
104 | self.z.close() | |
105 |
|
105 | |||
106 | class tellable: |
|
106 | class tellable: | |
107 | '''provide tell method for zipfile.ZipFile when writing to http |
|
107 | '''provide tell method for zipfile.ZipFile when writing to http | |
108 | response file object.''' |
|
108 | response file object.''' | |
109 |
|
109 | |||
110 | def __init__(self, fp): |
|
110 | def __init__(self, fp): | |
111 | self.fp = fp |
|
111 | self.fp = fp | |
112 | self.offset = 0 |
|
112 | self.offset = 0 | |
113 |
|
113 | |||
114 | def __getattr__(self, key): |
|
114 | def __getattr__(self, key): | |
115 | return getattr(self.fp, key) |
|
115 | return getattr(self.fp, key) | |
116 |
|
116 | |||
117 | def write(self, s): |
|
117 | def write(self, s): | |
118 | self.fp.write(s) |
|
118 | self.fp.write(s) | |
119 | self.offset += len(s) |
|
119 | self.offset += len(s) | |
120 |
|
120 | |||
121 | def tell(self): |
|
121 | def tell(self): | |
122 | return self.offset |
|
122 | return self.offset | |
123 |
|
123 | |||
124 | class zipit: |
|
124 | class zipit: | |
125 | '''write archive to zip file or stream. can write uncompressed, |
|
125 | '''write archive to zip file or stream. can write uncompressed, | |
126 | or compressed with deflate.''' |
|
126 | or compressed with deflate.''' | |
127 |
|
127 | |||
128 | def __init__(self, dest, prefix, mtime, compress=True): |
|
128 | def __init__(self, dest, prefix, mtime, compress=True): | |
129 | self.prefix = tidyprefix(dest, prefix, ('.zip',)) |
|
129 | self.prefix = tidyprefix(dest, prefix, ('.zip',)) | |
130 | if not isinstance(dest, str): |
|
130 | if not isinstance(dest, str): | |
131 | try: |
|
131 | try: | |
132 | dest.tell() |
|
132 | dest.tell() | |
133 | except (AttributeError, IOError): |
|
133 | except (AttributeError, IOError): | |
134 | dest = tellable(dest) |
|
134 | dest = tellable(dest) | |
135 | self.z = zipfile.ZipFile(dest, 'w', |
|
135 | self.z = zipfile.ZipFile(dest, 'w', | |
136 | compress and zipfile.ZIP_DEFLATED or |
|
136 | compress and zipfile.ZIP_DEFLATED or | |
137 | zipfile.ZIP_STORED) |
|
137 | zipfile.ZIP_STORED) | |
138 | self.date_time = time.gmtime(mtime)[:6] |
|
138 | self.date_time = time.gmtime(mtime)[:6] | |
139 |
|
139 | |||
140 | def addfile(self, name, mode, islink, data): |
|
140 | def addfile(self, name, mode, islink, data): | |
141 | i = zipfile.ZipInfo(self.prefix + name, self.date_time) |
|
141 | i = zipfile.ZipInfo(self.prefix + name, self.date_time) | |
142 | i.compress_type = self.z.compression |
|
142 | i.compress_type = self.z.compression | |
143 | # unzip will not honor unix file modes unless file creator is |
|
143 | # unzip will not honor unix file modes unless file creator is | |
144 | # set to unix (id 3). |
|
144 | # set to unix (id 3). | |
145 | i.create_system = 3 |
|
145 | i.create_system = 3 | |
146 | ftype = stat.S_IFREG |
|
146 | ftype = stat.S_IFREG | |
147 | if islink: |
|
147 | if islink: | |
148 | mode = 0777 |
|
148 | mode = 0777 | |
149 | ftype = stat.S_IFLNK |
|
149 | ftype = stat.S_IFLNK | |
150 | i.external_attr = (mode | ftype) << 16L |
|
150 | i.external_attr = (mode | ftype) << 16L | |
151 | self.z.writestr(i, data) |
|
151 | self.z.writestr(i, data) | |
152 |
|
152 | |||
153 | def done(self): |
|
153 | def done(self): | |
154 | self.z.close() |
|
154 | self.z.close() | |
155 |
|
155 | |||
156 | class fileit: |
|
156 | class fileit: | |
157 | '''write archive as files in directory.''' |
|
157 | '''write archive as files in directory.''' | |
158 |
|
158 | |||
159 | def __init__(self, name, prefix, mtime): |
|
159 | def __init__(self, name, prefix, mtime): | |
160 | if prefix: |
|
160 | if prefix: | |
161 | raise util.Abort(_('cannot give prefix when archiving to files')) |
|
161 | raise util.Abort(_('cannot give prefix when archiving to files')) | |
162 | self.basedir = name |
|
162 | self.basedir = name | |
163 | self.opener = util.opener(self.basedir) |
|
163 | self.opener = util.opener(self.basedir) | |
164 |
|
164 | |||
165 | def addfile(self, name, mode, islink, data): |
|
165 | def addfile(self, name, mode, islink, data): | |
166 | if islink: |
|
166 | if islink: | |
167 | self.opener.symlink(data, name) |
|
167 | self.opener.symlink(data, name) | |
168 | return |
|
168 | return | |
169 | f = self.opener(name, "w", atomictemp=True) |
|
169 | f = self.opener(name, "w", atomictemp=True) | |
170 | f.write(data) |
|
170 | f.write(data) | |
171 | f.rename() |
|
171 | f.rename() | |
172 | destfile = os.path.join(self.basedir, name) |
|
172 | destfile = os.path.join(self.basedir, name) | |
173 | os.chmod(destfile, mode) |
|
173 | os.chmod(destfile, mode) | |
174 |
|
174 | |||
175 | def done(self): |
|
175 | def done(self): | |
176 | pass |
|
176 | pass | |
177 |
|
177 | |||
178 | archivers = { |
|
178 | archivers = { | |
179 | 'files': fileit, |
|
179 | 'files': fileit, | |
180 | 'tar': tarit, |
|
180 | 'tar': tarit, | |
181 | 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'), |
|
181 | 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'), | |
182 | 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'), |
|
182 | 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'), | |
183 | 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False), |
|
183 | 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False), | |
184 | 'zip': zipit, |
|
184 | 'zip': zipit, | |
185 | } |
|
185 | } | |
186 |
|
186 | |||
187 | def archive(repo, dest, node, kind, decode=True, matchfn=None, |
|
187 | def archive(repo, dest, node, kind, decode=True, matchfn=None, | |
188 | prefix=None, mtime=None): |
|
188 | prefix=None, mtime=None): | |
189 | '''create archive of repo as it was at node. |
|
189 | '''create archive of repo as it was at node. | |
190 |
|
190 | |||
191 | dest can be name of directory, name of archive file, or file |
|
191 | dest can be name of directory, name of archive file, or file | |
192 | object to write archive to. |
|
192 | object to write archive to. | |
193 |
|
193 | |||
194 | kind is type of archive to create. |
|
194 | kind is type of archive to create. | |
195 |
|
195 | |||
196 | decode tells whether to put files through decode filters from |
|
196 | decode tells whether to put files through decode filters from | |
197 | hgrc. |
|
197 | hgrc. | |
198 |
|
198 | |||
199 | matchfn is function to filter names of files to write to archive. |
|
199 | matchfn is function to filter names of files to write to archive. | |
200 |
|
200 | |||
201 | prefix is name of path to put before every archive member.''' |
|
201 | prefix is name of path to put before every archive member.''' | |
202 |
|
202 | |||
203 | def write(name, mode, islink, getdata): |
|
203 | def write(name, mode, islink, getdata): | |
204 | if matchfn and not matchfn(name): return |
|
204 | if matchfn and not matchfn(name): return | |
205 | data = getdata() |
|
205 | data = getdata() | |
206 | if decode: |
|
206 | if decode: | |
207 | data = repo.wwritedata(name, data) |
|
207 | data = repo.wwritedata(name, data) | |
208 | archiver.addfile(name, mode, islink, data) |
|
208 | archiver.addfile(name, mode, islink, data) | |
209 |
|
209 | |||
210 | ctx = repo.changectx(node) |
|
210 | ctx = repo.changectx(node) | |
211 | archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0]) |
|
211 | archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0]) | |
212 | m = ctx.manifest() |
|
212 | m = ctx.manifest() | |
213 | items = m.items() |
|
213 | items = m.items() | |
214 | items.sort() |
|
214 | items.sort() | |
215 | write('.hg_archival.txt', 0644, False, |
|
215 | write('.hg_archival.txt', 0644, False, | |
216 | lambda: 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node))) |
|
216 | lambda: 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node))) | |
217 | for filename, filenode in items: |
|
217 | for filename, filenode in items: | |
218 | write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename), |
|
218 | write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename), | |
219 | lambda: repo.file(filename).read(filenode)) |
|
219 | lambda: repo.file(filename).read(filenode)) | |
220 | archiver.done() |
|
220 | archiver.done() |
@@ -1,582 +1,582 b'' | |||||
1 | """ |
|
1 | """ | |
2 | dirstate.py - working directory tracking for mercurial |
|
2 | dirstate.py - working directory tracking for mercurial | |
3 |
|
3 | |||
4 | Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
4 | Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
5 |
|
5 | |||
6 | This software may be used and distributed according to the terms |
|
6 | This software may be used and distributed according to the terms | |
7 | of the GNU General Public License, incorporated herein by reference. |
|
7 | of the GNU General Public License, incorporated herein by reference. | |
8 | """ |
|
8 | """ | |
9 |
|
9 | |||
10 | from node import * |
|
10 | from node import * | |
11 | from i18n import _ |
|
11 | from i18n import _ | |
12 | import struct, os, time, bisect, stat, strutil, util, re, errno, ignore |
|
12 | import struct, os, time, bisect, stat, strutil, util, re, errno, ignore | |
13 | import cStringIO, osutil |
|
13 | import cStringIO, osutil | |
14 |
|
14 | |||
15 | _unknown = ('?', 0, 0, 0) |
|
15 | _unknown = ('?', 0, 0, 0) | |
16 | _format = ">cllll" |
|
16 | _format = ">cllll" | |
17 |
|
17 | |||
18 | class dirstate(object): |
|
18 | class dirstate(object): | |
19 |
|
19 | |||
20 | def __init__(self, opener, ui, root): |
|
20 | def __init__(self, opener, ui, root): | |
21 | self._opener = opener |
|
21 | self._opener = opener | |
22 | self._root = root |
|
22 | self._root = root | |
23 | self._dirty = False |
|
23 | self._dirty = False | |
24 | self._dirtypl = False |
|
24 | self._dirtypl = False | |
25 | self._ui = ui |
|
25 | self._ui = ui | |
26 |
|
26 | |||
27 | def __getattr__(self, name): |
|
27 | def __getattr__(self, name): | |
28 | if name == '_map': |
|
28 | if name == '_map': | |
29 | self._read() |
|
29 | self._read() | |
30 | return self._map |
|
30 | return self._map | |
31 | elif name == '_copymap': |
|
31 | elif name == '_copymap': | |
32 | self._read() |
|
32 | self._read() | |
33 | return self._copymap |
|
33 | return self._copymap | |
34 | elif name == '_branch': |
|
34 | elif name == '_branch': | |
35 | try: |
|
35 | try: | |
36 | self._branch = (self._opener("branch").read().strip() |
|
36 | self._branch = (self._opener("branch").read().strip() | |
37 | or "default") |
|
37 | or "default") | |
38 | except IOError: |
|
38 | except IOError: | |
39 | self._branch = "default" |
|
39 | self._branch = "default" | |
40 | return self._branch |
|
40 | return self._branch | |
41 | elif name == '_pl': |
|
41 | elif name == '_pl': | |
42 | self._pl = [nullid, nullid] |
|
42 | self._pl = [nullid, nullid] | |
43 | try: |
|
43 | try: | |
44 | st = self._opener("dirstate").read(40) |
|
44 | st = self._opener("dirstate").read(40) | |
45 | if len(st) == 40: |
|
45 | if len(st) == 40: | |
46 | self._pl = st[:20], st[20:40] |
|
46 | self._pl = st[:20], st[20:40] | |
47 | except IOError, err: |
|
47 | except IOError, err: | |
48 | if err.errno != errno.ENOENT: raise |
|
48 | if err.errno != errno.ENOENT: raise | |
49 | return self._pl |
|
49 | return self._pl | |
50 | elif name == '_dirs': |
|
50 | elif name == '_dirs': | |
51 | self._dirs = {} |
|
51 | self._dirs = {} | |
52 | for f in self._map: |
|
52 | for f in self._map: | |
53 | if self[f] != 'r': |
|
53 | if self[f] != 'r': | |
54 | self._incpath(f) |
|
54 | self._incpath(f) | |
55 | return self._dirs |
|
55 | return self._dirs | |
56 | elif name == '_ignore': |
|
56 | elif name == '_ignore': | |
57 | files = [self._join('.hgignore')] |
|
57 | files = [self._join('.hgignore')] | |
58 | for name, path in self._ui.configitems("ui"): |
|
58 | for name, path in self._ui.configitems("ui"): | |
59 | if name == 'ignore' or name.startswith('ignore.'): |
|
59 | if name == 'ignore' or name.startswith('ignore.'): | |
60 | files.append(os.path.expanduser(path)) |
|
60 | files.append(os.path.expanduser(path)) | |
61 | self._ignore = ignore.ignore(self._root, files, self._ui.warn) |
|
61 | self._ignore = ignore.ignore(self._root, files, self._ui.warn) | |
62 | return self._ignore |
|
62 | return self._ignore | |
63 | elif name == '_slash': |
|
63 | elif name == '_slash': | |
64 | self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/' |
|
64 | self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/' | |
65 | return self._slash |
|
65 | return self._slash | |
66 | else: |
|
66 | else: | |
67 | raise AttributeError, name |
|
67 | raise AttributeError, name | |
68 |
|
68 | |||
69 | def _join(self, f): |
|
69 | def _join(self, f): | |
70 | return os.path.join(self._root, f) |
|
70 | return os.path.join(self._root, f) | |
71 |
|
71 | |||
72 | def getcwd(self): |
|
72 | def getcwd(self): | |
73 | cwd = os.getcwd() |
|
73 | cwd = os.getcwd() | |
74 | if cwd == self._root: return '' |
|
74 | if cwd == self._root: return '' | |
75 | # self._root ends with a path separator if self._root is '/' or 'C:\' |
|
75 | # self._root ends with a path separator if self._root is '/' or 'C:\' | |
76 | rootsep = self._root |
|
76 | rootsep = self._root | |
77 | if not rootsep.endswith(os.sep): |
|
77 | if not rootsep.endswith(os.sep): | |
78 | rootsep += os.sep |
|
78 | rootsep += os.sep | |
79 | if cwd.startswith(rootsep): |
|
79 | if cwd.startswith(rootsep): | |
80 | return cwd[len(rootsep):] |
|
80 | return cwd[len(rootsep):] | |
81 | else: |
|
81 | else: | |
82 | # we're outside the repo. return an absolute path. |
|
82 | # we're outside the repo. return an absolute path. | |
83 | return cwd |
|
83 | return cwd | |
84 |
|
84 | |||
85 | def pathto(self, f, cwd=None): |
|
85 | def pathto(self, f, cwd=None): | |
86 | if cwd is None: |
|
86 | if cwd is None: | |
87 | cwd = self.getcwd() |
|
87 | cwd = self.getcwd() | |
88 | path = util.pathto(self._root, cwd, f) |
|
88 | path = util.pathto(self._root, cwd, f) | |
89 | if self._slash: |
|
89 | if self._slash: | |
90 |
return path |
|
90 | return util.normpath(path) | |
91 | return path |
|
91 | return path | |
92 |
|
92 | |||
93 | def __getitem__(self, key): |
|
93 | def __getitem__(self, key): | |
94 | ''' current states: |
|
94 | ''' current states: | |
95 | n normal |
|
95 | n normal | |
96 | m needs merging |
|
96 | m needs merging | |
97 | r marked for removal |
|
97 | r marked for removal | |
98 | a marked for addition |
|
98 | a marked for addition | |
99 | ? not tracked''' |
|
99 | ? not tracked''' | |
100 | return self._map.get(key, ("?",))[0] |
|
100 | return self._map.get(key, ("?",))[0] | |
101 |
|
101 | |||
102 | def __contains__(self, key): |
|
102 | def __contains__(self, key): | |
103 | return key in self._map |
|
103 | return key in self._map | |
104 |
|
104 | |||
105 | def __iter__(self): |
|
105 | def __iter__(self): | |
106 | a = self._map.keys() |
|
106 | a = self._map.keys() | |
107 | a.sort() |
|
107 | a.sort() | |
108 | for x in a: |
|
108 | for x in a: | |
109 | yield x |
|
109 | yield x | |
110 |
|
110 | |||
111 | def parents(self): |
|
111 | def parents(self): | |
112 | return self._pl |
|
112 | return self._pl | |
113 |
|
113 | |||
114 | def branch(self): |
|
114 | def branch(self): | |
115 | return self._branch |
|
115 | return self._branch | |
116 |
|
116 | |||
117 | def setparents(self, p1, p2=nullid): |
|
117 | def setparents(self, p1, p2=nullid): | |
118 | self._dirty = self._dirtypl = True |
|
118 | self._dirty = self._dirtypl = True | |
119 | self._pl = p1, p2 |
|
119 | self._pl = p1, p2 | |
120 |
|
120 | |||
121 | def setbranch(self, branch): |
|
121 | def setbranch(self, branch): | |
122 | self._branch = branch |
|
122 | self._branch = branch | |
123 | self._opener("branch", "w").write(branch + '\n') |
|
123 | self._opener("branch", "w").write(branch + '\n') | |
124 |
|
124 | |||
125 | def _read(self): |
|
125 | def _read(self): | |
126 | self._map = {} |
|
126 | self._map = {} | |
127 | self._copymap = {} |
|
127 | self._copymap = {} | |
128 | if not self._dirtypl: |
|
128 | if not self._dirtypl: | |
129 | self._pl = [nullid, nullid] |
|
129 | self._pl = [nullid, nullid] | |
130 | try: |
|
130 | try: | |
131 | st = self._opener("dirstate").read() |
|
131 | st = self._opener("dirstate").read() | |
132 | except IOError, err: |
|
132 | except IOError, err: | |
133 | if err.errno != errno.ENOENT: raise |
|
133 | if err.errno != errno.ENOENT: raise | |
134 | return |
|
134 | return | |
135 | if not st: |
|
135 | if not st: | |
136 | return |
|
136 | return | |
137 |
|
137 | |||
138 | if not self._dirtypl: |
|
138 | if not self._dirtypl: | |
139 | self._pl = [st[:20], st[20: 40]] |
|
139 | self._pl = [st[:20], st[20: 40]] | |
140 |
|
140 | |||
141 | # deref fields so they will be local in loop |
|
141 | # deref fields so they will be local in loop | |
142 | dmap = self._map |
|
142 | dmap = self._map | |
143 | copymap = self._copymap |
|
143 | copymap = self._copymap | |
144 | unpack = struct.unpack |
|
144 | unpack = struct.unpack | |
145 | e_size = struct.calcsize(_format) |
|
145 | e_size = struct.calcsize(_format) | |
146 | pos1 = 40 |
|
146 | pos1 = 40 | |
147 | l = len(st) |
|
147 | l = len(st) | |
148 |
|
148 | |||
149 | # the inner loop |
|
149 | # the inner loop | |
150 | while pos1 < l: |
|
150 | while pos1 < l: | |
151 | pos2 = pos1 + e_size |
|
151 | pos2 = pos1 + e_size | |
152 | e = unpack(">cllll", st[pos1:pos2]) # a literal here is faster |
|
152 | e = unpack(">cllll", st[pos1:pos2]) # a literal here is faster | |
153 | pos1 = pos2 + e[4] |
|
153 | pos1 = pos2 + e[4] | |
154 | f = st[pos2:pos1] |
|
154 | f = st[pos2:pos1] | |
155 | if '\0' in f: |
|
155 | if '\0' in f: | |
156 | f, c = f.split('\0') |
|
156 | f, c = f.split('\0') | |
157 | copymap[f] = c |
|
157 | copymap[f] = c | |
158 | dmap[f] = e # we hold onto e[4] because making a subtuple is slow |
|
158 | dmap[f] = e # we hold onto e[4] because making a subtuple is slow | |
159 |
|
159 | |||
160 | def invalidate(self): |
|
160 | def invalidate(self): | |
161 | for a in "_map _copymap _branch _pl _dirs _ignore".split(): |
|
161 | for a in "_map _copymap _branch _pl _dirs _ignore".split(): | |
162 | if a in self.__dict__: |
|
162 | if a in self.__dict__: | |
163 | delattr(self, a) |
|
163 | delattr(self, a) | |
164 | self._dirty = False |
|
164 | self._dirty = False | |
165 |
|
165 | |||
166 | def copy(self, source, dest): |
|
166 | def copy(self, source, dest): | |
167 | self._dirty = True |
|
167 | self._dirty = True | |
168 | self._copymap[dest] = source |
|
168 | self._copymap[dest] = source | |
169 |
|
169 | |||
170 | def copied(self, file): |
|
170 | def copied(self, file): | |
171 | return self._copymap.get(file, None) |
|
171 | return self._copymap.get(file, None) | |
172 |
|
172 | |||
173 | def copies(self): |
|
173 | def copies(self): | |
174 | return self._copymap |
|
174 | return self._copymap | |
175 |
|
175 | |||
176 | def _incpath(self, path): |
|
176 | def _incpath(self, path): | |
177 | c = path.rfind('/') |
|
177 | c = path.rfind('/') | |
178 | if c >= 0: |
|
178 | if c >= 0: | |
179 | dirs = self._dirs |
|
179 | dirs = self._dirs | |
180 | base = path[:c] |
|
180 | base = path[:c] | |
181 | if base not in dirs: |
|
181 | if base not in dirs: | |
182 | self._incpath(base) |
|
182 | self._incpath(base) | |
183 | dirs[base] = 1 |
|
183 | dirs[base] = 1 | |
184 | else: |
|
184 | else: | |
185 | dirs[base] += 1 |
|
185 | dirs[base] += 1 | |
186 |
|
186 | |||
187 | def _decpath(self, path): |
|
187 | def _decpath(self, path): | |
188 | c = path.rfind('/') |
|
188 | c = path.rfind('/') | |
189 | if c >= 0: |
|
189 | if c >= 0: | |
190 | base = path[:c] |
|
190 | base = path[:c] | |
191 | dirs = self._dirs |
|
191 | dirs = self._dirs | |
192 | if dirs[base] == 1: |
|
192 | if dirs[base] == 1: | |
193 | del dirs[base] |
|
193 | del dirs[base] | |
194 | self._decpath(base) |
|
194 | self._decpath(base) | |
195 | else: |
|
195 | else: | |
196 | dirs[base] -= 1 |
|
196 | dirs[base] -= 1 | |
197 |
|
197 | |||
198 | def _incpathcheck(self, f): |
|
198 | def _incpathcheck(self, f): | |
199 | if '\r' in f or '\n' in f: |
|
199 | if '\r' in f or '\n' in f: | |
200 | raise util.Abort(_("'\\n' and '\\r' disallowed in filenames")) |
|
200 | raise util.Abort(_("'\\n' and '\\r' disallowed in filenames")) | |
201 | # shadows |
|
201 | # shadows | |
202 | if f in self._dirs: |
|
202 | if f in self._dirs: | |
203 | raise util.Abort(_('directory %r already in dirstate') % f) |
|
203 | raise util.Abort(_('directory %r already in dirstate') % f) | |
204 | for c in strutil.rfindall(f, '/'): |
|
204 | for c in strutil.rfindall(f, '/'): | |
205 | d = f[:c] |
|
205 | d = f[:c] | |
206 | if d in self._dirs: |
|
206 | if d in self._dirs: | |
207 | break |
|
207 | break | |
208 | if d in self._map and self[d] != 'r': |
|
208 | if d in self._map and self[d] != 'r': | |
209 | raise util.Abort(_('file %r in dirstate clashes with %r') % |
|
209 | raise util.Abort(_('file %r in dirstate clashes with %r') % | |
210 | (d, f)) |
|
210 | (d, f)) | |
211 | self._incpath(f) |
|
211 | self._incpath(f) | |
212 |
|
212 | |||
213 | def _changepath(self, f, newstate, relaxed=False): |
|
213 | def _changepath(self, f, newstate, relaxed=False): | |
214 | # handle upcoming path changes |
|
214 | # handle upcoming path changes | |
215 | oldstate = self[f] |
|
215 | oldstate = self[f] | |
216 | if oldstate not in "?r" and newstate in "?r": |
|
216 | if oldstate not in "?r" and newstate in "?r": | |
217 | if "_dirs" in self.__dict__: |
|
217 | if "_dirs" in self.__dict__: | |
218 | self._decpath(f) |
|
218 | self._decpath(f) | |
219 | return |
|
219 | return | |
220 | if oldstate in "?r" and newstate not in "?r": |
|
220 | if oldstate in "?r" and newstate not in "?r": | |
221 | if relaxed and oldstate == '?': |
|
221 | if relaxed and oldstate == '?': | |
222 | # XXX |
|
222 | # XXX | |
223 | # in relaxed mode we assume the caller knows |
|
223 | # in relaxed mode we assume the caller knows | |
224 | # what it is doing, workaround for updating |
|
224 | # what it is doing, workaround for updating | |
225 | # dir-to-file revisions |
|
225 | # dir-to-file revisions | |
226 | if "_dirs" in self.__dict__: |
|
226 | if "_dirs" in self.__dict__: | |
227 | self._incpath(f) |
|
227 | self._incpath(f) | |
228 | return |
|
228 | return | |
229 | self._incpathcheck(f) |
|
229 | self._incpathcheck(f) | |
230 | return |
|
230 | return | |
231 |
|
231 | |||
232 | def normal(self, f): |
|
232 | def normal(self, f): | |
233 | 'mark a file normal and clean' |
|
233 | 'mark a file normal and clean' | |
234 | self._dirty = True |
|
234 | self._dirty = True | |
235 | self._changepath(f, 'n', True) |
|
235 | self._changepath(f, 'n', True) | |
236 | s = os.lstat(self._join(f)) |
|
236 | s = os.lstat(self._join(f)) | |
237 | self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0) |
|
237 | self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0) | |
238 | if self._copymap.has_key(f): |
|
238 | if self._copymap.has_key(f): | |
239 | del self._copymap[f] |
|
239 | del self._copymap[f] | |
240 |
|
240 | |||
241 | def normallookup(self, f): |
|
241 | def normallookup(self, f): | |
242 | 'mark a file normal, but possibly dirty' |
|
242 | 'mark a file normal, but possibly dirty' | |
243 | self._dirty = True |
|
243 | self._dirty = True | |
244 | self._changepath(f, 'n', True) |
|
244 | self._changepath(f, 'n', True) | |
245 | self._map[f] = ('n', 0, -1, -1, 0) |
|
245 | self._map[f] = ('n', 0, -1, -1, 0) | |
246 | if f in self._copymap: |
|
246 | if f in self._copymap: | |
247 | del self._copymap[f] |
|
247 | del self._copymap[f] | |
248 |
|
248 | |||
249 | def normaldirty(self, f): |
|
249 | def normaldirty(self, f): | |
250 | 'mark a file normal, but dirty' |
|
250 | 'mark a file normal, but dirty' | |
251 | self._dirty = True |
|
251 | self._dirty = True | |
252 | self._changepath(f, 'n', True) |
|
252 | self._changepath(f, 'n', True) | |
253 | self._map[f] = ('n', 0, -2, -1, 0) |
|
253 | self._map[f] = ('n', 0, -2, -1, 0) | |
254 | if f in self._copymap: |
|
254 | if f in self._copymap: | |
255 | del self._copymap[f] |
|
255 | del self._copymap[f] | |
256 |
|
256 | |||
257 | def add(self, f): |
|
257 | def add(self, f): | |
258 | 'mark a file added' |
|
258 | 'mark a file added' | |
259 | self._dirty = True |
|
259 | self._dirty = True | |
260 | self._changepath(f, 'a') |
|
260 | self._changepath(f, 'a') | |
261 | self._map[f] = ('a', 0, -1, -1, 0) |
|
261 | self._map[f] = ('a', 0, -1, -1, 0) | |
262 | if f in self._copymap: |
|
262 | if f in self._copymap: | |
263 | del self._copymap[f] |
|
263 | del self._copymap[f] | |
264 |
|
264 | |||
265 | def remove(self, f): |
|
265 | def remove(self, f): | |
266 | 'mark a file removed' |
|
266 | 'mark a file removed' | |
267 | self._dirty = True |
|
267 | self._dirty = True | |
268 | self._changepath(f, 'r') |
|
268 | self._changepath(f, 'r') | |
269 | self._map[f] = ('r', 0, 0, 0, 0) |
|
269 | self._map[f] = ('r', 0, 0, 0, 0) | |
270 | if f in self._copymap: |
|
270 | if f in self._copymap: | |
271 | del self._copymap[f] |
|
271 | del self._copymap[f] | |
272 |
|
272 | |||
273 | def merge(self, f): |
|
273 | def merge(self, f): | |
274 | 'mark a file merged' |
|
274 | 'mark a file merged' | |
275 | self._dirty = True |
|
275 | self._dirty = True | |
276 | s = os.lstat(self._join(f)) |
|
276 | s = os.lstat(self._join(f)) | |
277 | self._changepath(f, 'm', True) |
|
277 | self._changepath(f, 'm', True) | |
278 | self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0) |
|
278 | self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0) | |
279 | if f in self._copymap: |
|
279 | if f in self._copymap: | |
280 | del self._copymap[f] |
|
280 | del self._copymap[f] | |
281 |
|
281 | |||
282 | def forget(self, f): |
|
282 | def forget(self, f): | |
283 | 'forget a file' |
|
283 | 'forget a file' | |
284 | self._dirty = True |
|
284 | self._dirty = True | |
285 | try: |
|
285 | try: | |
286 | self._changepath(f, '?') |
|
286 | self._changepath(f, '?') | |
287 | del self._map[f] |
|
287 | del self._map[f] | |
288 | except KeyError: |
|
288 | except KeyError: | |
289 | self._ui.warn(_("not in dirstate: %s!\n") % f) |
|
289 | self._ui.warn(_("not in dirstate: %s!\n") % f) | |
290 |
|
290 | |||
291 | def clear(self): |
|
291 | def clear(self): | |
292 | self._map = {} |
|
292 | self._map = {} | |
293 | if "_dirs" in self.__dict__: |
|
293 | if "_dirs" in self.__dict__: | |
294 | delattr(self, "_dirs"); |
|
294 | delattr(self, "_dirs"); | |
295 | self._copymap = {} |
|
295 | self._copymap = {} | |
296 | self._pl = [nullid, nullid] |
|
296 | self._pl = [nullid, nullid] | |
297 | self._dirty = True |
|
297 | self._dirty = True | |
298 |
|
298 | |||
299 | def rebuild(self, parent, files): |
|
299 | def rebuild(self, parent, files): | |
300 | self.clear() |
|
300 | self.clear() | |
301 | for f in files: |
|
301 | for f in files: | |
302 | if files.execf(f): |
|
302 | if files.execf(f): | |
303 | self._map[f] = ('n', 0777, -1, 0, 0) |
|
303 | self._map[f] = ('n', 0777, -1, 0, 0) | |
304 | else: |
|
304 | else: | |
305 | self._map[f] = ('n', 0666, -1, 0, 0) |
|
305 | self._map[f] = ('n', 0666, -1, 0, 0) | |
306 | self._pl = (parent, nullid) |
|
306 | self._pl = (parent, nullid) | |
307 | self._dirty = True |
|
307 | self._dirty = True | |
308 |
|
308 | |||
309 | def write(self): |
|
309 | def write(self): | |
310 | if not self._dirty: |
|
310 | if not self._dirty: | |
311 | return |
|
311 | return | |
312 | cs = cStringIO.StringIO() |
|
312 | cs = cStringIO.StringIO() | |
313 | copymap = self._copymap |
|
313 | copymap = self._copymap | |
314 | pack = struct.pack |
|
314 | pack = struct.pack | |
315 | write = cs.write |
|
315 | write = cs.write | |
316 | write("".join(self._pl)) |
|
316 | write("".join(self._pl)) | |
317 | for f, e in self._map.iteritems(): |
|
317 | for f, e in self._map.iteritems(): | |
318 | if f in copymap: |
|
318 | if f in copymap: | |
319 | f = "%s\0%s" % (f, copymap[f]) |
|
319 | f = "%s\0%s" % (f, copymap[f]) | |
320 | e = pack(_format, e[0], e[1], e[2], e[3], len(f)) |
|
320 | e = pack(_format, e[0], e[1], e[2], e[3], len(f)) | |
321 | write(e) |
|
321 | write(e) | |
322 | write(f) |
|
322 | write(f) | |
323 | st = self._opener("dirstate", "w", atomictemp=True) |
|
323 | st = self._opener("dirstate", "w", atomictemp=True) | |
324 | st.write(cs.getvalue()) |
|
324 | st.write(cs.getvalue()) | |
325 | st.rename() |
|
325 | st.rename() | |
326 | self._dirty = self._dirtypl = False |
|
326 | self._dirty = self._dirtypl = False | |
327 |
|
327 | |||
328 | def _filter(self, files): |
|
328 | def _filter(self, files): | |
329 | ret = {} |
|
329 | ret = {} | |
330 | unknown = [] |
|
330 | unknown = [] | |
331 |
|
331 | |||
332 | for x in files: |
|
332 | for x in files: | |
333 | if x == '.': |
|
333 | if x == '.': | |
334 | return self._map.copy() |
|
334 | return self._map.copy() | |
335 | if x not in self._map: |
|
335 | if x not in self._map: | |
336 | unknown.append(x) |
|
336 | unknown.append(x) | |
337 | else: |
|
337 | else: | |
338 | ret[x] = self._map[x] |
|
338 | ret[x] = self._map[x] | |
339 |
|
339 | |||
340 | if not unknown: |
|
340 | if not unknown: | |
341 | return ret |
|
341 | return ret | |
342 |
|
342 | |||
343 | b = self._map.keys() |
|
343 | b = self._map.keys() | |
344 | b.sort() |
|
344 | b.sort() | |
345 | blen = len(b) |
|
345 | blen = len(b) | |
346 |
|
346 | |||
347 | for x in unknown: |
|
347 | for x in unknown: | |
348 | bs = bisect.bisect(b, "%s%s" % (x, '/')) |
|
348 | bs = bisect.bisect(b, "%s%s" % (x, '/')) | |
349 | while bs < blen: |
|
349 | while bs < blen: | |
350 | s = b[bs] |
|
350 | s = b[bs] | |
351 | if len(s) > len(x) and s.startswith(x): |
|
351 | if len(s) > len(x) and s.startswith(x): | |
352 | ret[s] = self._map[s] |
|
352 | ret[s] = self._map[s] | |
353 | else: |
|
353 | else: | |
354 | break |
|
354 | break | |
355 | bs += 1 |
|
355 | bs += 1 | |
356 | return ret |
|
356 | return ret | |
357 |
|
357 | |||
358 | def _supported(self, f, mode, verbose=False): |
|
358 | def _supported(self, f, mode, verbose=False): | |
359 | if stat.S_ISREG(mode) or stat.S_ISLNK(mode): |
|
359 | if stat.S_ISREG(mode) or stat.S_ISLNK(mode): | |
360 | return True |
|
360 | return True | |
361 | if verbose: |
|
361 | if verbose: | |
362 | kind = 'unknown' |
|
362 | kind = 'unknown' | |
363 | if stat.S_ISCHR(mode): kind = _('character device') |
|
363 | if stat.S_ISCHR(mode): kind = _('character device') | |
364 | elif stat.S_ISBLK(mode): kind = _('block device') |
|
364 | elif stat.S_ISBLK(mode): kind = _('block device') | |
365 | elif stat.S_ISFIFO(mode): kind = _('fifo') |
|
365 | elif stat.S_ISFIFO(mode): kind = _('fifo') | |
366 | elif stat.S_ISSOCK(mode): kind = _('socket') |
|
366 | elif stat.S_ISSOCK(mode): kind = _('socket') | |
367 | elif stat.S_ISDIR(mode): kind = _('directory') |
|
367 | elif stat.S_ISDIR(mode): kind = _('directory') | |
368 | self._ui.warn(_('%s: unsupported file type (type is %s)\n') |
|
368 | self._ui.warn(_('%s: unsupported file type (type is %s)\n') | |
369 | % (self.pathto(f), kind)) |
|
369 | % (self.pathto(f), kind)) | |
370 | return False |
|
370 | return False | |
371 |
|
371 | |||
372 | def walk(self, files=None, match=util.always, badmatch=None): |
|
372 | def walk(self, files=None, match=util.always, badmatch=None): | |
373 | # filter out the stat |
|
373 | # filter out the stat | |
374 | for src, f, st in self.statwalk(files, match, badmatch=badmatch): |
|
374 | for src, f, st in self.statwalk(files, match, badmatch=badmatch): | |
375 | yield src, f |
|
375 | yield src, f | |
376 |
|
376 | |||
377 | def statwalk(self, files=None, match=util.always, ignored=False, |
|
377 | def statwalk(self, files=None, match=util.always, ignored=False, | |
378 | badmatch=None, directories=False): |
|
378 | badmatch=None, directories=False): | |
379 | ''' |
|
379 | ''' | |
380 | walk recursively through the directory tree, finding all files |
|
380 | walk recursively through the directory tree, finding all files | |
381 | matched by the match function |
|
381 | matched by the match function | |
382 |
|
382 | |||
383 | results are yielded in a tuple (src, filename, st), where src |
|
383 | results are yielded in a tuple (src, filename, st), where src | |
384 | is one of: |
|
384 | is one of: | |
385 | 'f' the file was found in the directory tree |
|
385 | 'f' the file was found in the directory tree | |
386 | 'd' the file is a directory of the tree |
|
386 | 'd' the file is a directory of the tree | |
387 | 'm' the file was only in the dirstate and not in the tree |
|
387 | 'm' the file was only in the dirstate and not in the tree | |
388 | 'b' file was not found and matched badmatch |
|
388 | 'b' file was not found and matched badmatch | |
389 |
|
389 | |||
390 | and st is the stat result if the file was found in the directory. |
|
390 | and st is the stat result if the file was found in the directory. | |
391 | ''' |
|
391 | ''' | |
392 |
|
392 | |||
393 | # walk all files by default |
|
393 | # walk all files by default | |
394 | if not files: |
|
394 | if not files: | |
395 | files = ['.'] |
|
395 | files = ['.'] | |
396 | dc = self._map.copy() |
|
396 | dc = self._map.copy() | |
397 | else: |
|
397 | else: | |
398 | files = util.unique(files) |
|
398 | files = util.unique(files) | |
399 | dc = self._filter(files) |
|
399 | dc = self._filter(files) | |
400 |
|
400 | |||
401 | def imatch(file_): |
|
401 | def imatch(file_): | |
402 | if file_ not in dc and self._ignore(file_): |
|
402 | if file_ not in dc and self._ignore(file_): | |
403 | return False |
|
403 | return False | |
404 | return match(file_) |
|
404 | return match(file_) | |
405 |
|
405 | |||
406 | ignore = self._ignore |
|
406 | ignore = self._ignore | |
407 | if ignored: |
|
407 | if ignored: | |
408 | imatch = match |
|
408 | imatch = match | |
409 | ignore = util.never |
|
409 | ignore = util.never | |
410 |
|
410 | |||
411 | # self._root may end with a path separator when self._root == '/' |
|
411 | # self._root may end with a path separator when self._root == '/' | |
412 | common_prefix_len = len(self._root) |
|
412 | common_prefix_len = len(self._root) | |
413 | if not self._root.endswith(os.sep): |
|
413 | if not self._root.endswith(os.sep): | |
414 | common_prefix_len += 1 |
|
414 | common_prefix_len += 1 | |
415 |
|
415 | |||
416 | normpath = util.normpath |
|
416 | normpath = util.normpath | |
417 | listdir = osutil.listdir |
|
417 | listdir = osutil.listdir | |
418 | lstat = os.lstat |
|
418 | lstat = os.lstat | |
419 | bisect_left = bisect.bisect_left |
|
419 | bisect_left = bisect.bisect_left | |
420 | isdir = os.path.isdir |
|
420 | isdir = os.path.isdir | |
421 | pconvert = util.pconvert |
|
421 | pconvert = util.pconvert | |
422 | join = os.path.join |
|
422 | join = os.path.join | |
423 | s_isdir = stat.S_ISDIR |
|
423 | s_isdir = stat.S_ISDIR | |
424 | supported = self._supported |
|
424 | supported = self._supported | |
425 | _join = self._join |
|
425 | _join = self._join | |
426 | known = {'.hg': 1} |
|
426 | known = {'.hg': 1} | |
427 |
|
427 | |||
428 | # recursion free walker, faster than os.walk. |
|
428 | # recursion free walker, faster than os.walk. | |
429 | def findfiles(s): |
|
429 | def findfiles(s): | |
430 | work = [s] |
|
430 | work = [s] | |
431 | wadd = work.append |
|
431 | wadd = work.append | |
432 | found = [] |
|
432 | found = [] | |
433 | add = found.append |
|
433 | add = found.append | |
434 | if directories: |
|
434 | if directories: | |
435 | add((normpath(s[common_prefix_len:]), 'd', lstat(s))) |
|
435 | add((normpath(s[common_prefix_len:]), 'd', lstat(s))) | |
436 | while work: |
|
436 | while work: | |
437 | top = work.pop() |
|
437 | top = work.pop() | |
438 | entries = listdir(top, stat=True) |
|
438 | entries = listdir(top, stat=True) | |
439 | # nd is the top of the repository dir tree |
|
439 | # nd is the top of the repository dir tree | |
440 | nd = normpath(top[common_prefix_len:]) |
|
440 | nd = normpath(top[common_prefix_len:]) | |
441 | if nd == '.': |
|
441 | if nd == '.': | |
442 | nd = '' |
|
442 | nd = '' | |
443 | else: |
|
443 | else: | |
444 | # do not recurse into a repo contained in this |
|
444 | # do not recurse into a repo contained in this | |
445 | # one. use bisect to find .hg directory so speed |
|
445 | # one. use bisect to find .hg directory so speed | |
446 | # is good on big directory. |
|
446 | # is good on big directory. | |
447 | names = [e[0] for e in entries] |
|
447 | names = [e[0] for e in entries] | |
448 | hg = bisect_left(names, '.hg') |
|
448 | hg = bisect_left(names, '.hg') | |
449 | if hg < len(names) and names[hg] == '.hg': |
|
449 | if hg < len(names) and names[hg] == '.hg': | |
450 | if isdir(join(top, '.hg')): |
|
450 | if isdir(join(top, '.hg')): | |
451 | continue |
|
451 | continue | |
452 | for f, kind, st in entries: |
|
452 | for f, kind, st in entries: | |
453 | np = pconvert(join(nd, f)) |
|
453 | np = pconvert(join(nd, f)) | |
454 | if np in known: |
|
454 | if np in known: | |
455 | continue |
|
455 | continue | |
456 | known[np] = 1 |
|
456 | known[np] = 1 | |
457 | p = join(top, f) |
|
457 | p = join(top, f) | |
458 | # don't trip over symlinks |
|
458 | # don't trip over symlinks | |
459 | if kind == stat.S_IFDIR: |
|
459 | if kind == stat.S_IFDIR: | |
460 | if not ignore(np): |
|
460 | if not ignore(np): | |
461 | wadd(p) |
|
461 | wadd(p) | |
462 | if directories: |
|
462 | if directories: | |
463 | add((np, 'd', st)) |
|
463 | add((np, 'd', st)) | |
464 | if np in dc and match(np): |
|
464 | if np in dc and match(np): | |
465 | add((np, 'm', st)) |
|
465 | add((np, 'm', st)) | |
466 | elif imatch(np): |
|
466 | elif imatch(np): | |
467 | if supported(np, st.st_mode): |
|
467 | if supported(np, st.st_mode): | |
468 | add((np, 'f', st)) |
|
468 | add((np, 'f', st)) | |
469 | elif np in dc: |
|
469 | elif np in dc: | |
470 | add((np, 'm', st)) |
|
470 | add((np, 'm', st)) | |
471 | found.sort() |
|
471 | found.sort() | |
472 | return found |
|
472 | return found | |
473 |
|
473 | |||
474 | # step one, find all files that match our criteria |
|
474 | # step one, find all files that match our criteria | |
475 | files.sort() |
|
475 | files.sort() | |
476 | for ff in files: |
|
476 | for ff in files: | |
477 | nf = normpath(ff) |
|
477 | nf = normpath(ff) | |
478 | f = _join(ff) |
|
478 | f = _join(ff) | |
479 | try: |
|
479 | try: | |
480 | st = lstat(f) |
|
480 | st = lstat(f) | |
481 | except OSError, inst: |
|
481 | except OSError, inst: | |
482 | found = False |
|
482 | found = False | |
483 | for fn in dc: |
|
483 | for fn in dc: | |
484 | if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'): |
|
484 | if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'): | |
485 | found = True |
|
485 | found = True | |
486 | break |
|
486 | break | |
487 | if not found: |
|
487 | if not found: | |
488 | if inst.errno != errno.ENOENT or not badmatch: |
|
488 | if inst.errno != errno.ENOENT or not badmatch: | |
489 | self._ui.warn('%s: %s\n' % |
|
489 | self._ui.warn('%s: %s\n' % | |
490 | (self.pathto(ff), inst.strerror)) |
|
490 | (self.pathto(ff), inst.strerror)) | |
491 | elif badmatch and badmatch(ff) and imatch(nf): |
|
491 | elif badmatch and badmatch(ff) and imatch(nf): | |
492 | yield 'b', ff, None |
|
492 | yield 'b', ff, None | |
493 | continue |
|
493 | continue | |
494 | if s_isdir(st.st_mode): |
|
494 | if s_isdir(st.st_mode): | |
495 | for f, src, st in findfiles(f): |
|
495 | for f, src, st in findfiles(f): | |
496 | yield src, f, st |
|
496 | yield src, f, st | |
497 | else: |
|
497 | else: | |
498 | if nf in known: |
|
498 | if nf in known: | |
499 | continue |
|
499 | continue | |
500 | known[nf] = 1 |
|
500 | known[nf] = 1 | |
501 | if match(nf): |
|
501 | if match(nf): | |
502 | if supported(ff, st.st_mode, verbose=True): |
|
502 | if supported(ff, st.st_mode, verbose=True): | |
503 | yield 'f', nf, st |
|
503 | yield 'f', nf, st | |
504 | elif ff in dc: |
|
504 | elif ff in dc: | |
505 | yield 'm', nf, st |
|
505 | yield 'm', nf, st | |
506 |
|
506 | |||
507 | # step two run through anything left in the dc hash and yield |
|
507 | # step two run through anything left in the dc hash and yield | |
508 | # if we haven't already seen it |
|
508 | # if we haven't already seen it | |
509 | ks = dc.keys() |
|
509 | ks = dc.keys() | |
510 | ks.sort() |
|
510 | ks.sort() | |
511 | for k in ks: |
|
511 | for k in ks: | |
512 | if k in known: |
|
512 | if k in known: | |
513 | continue |
|
513 | continue | |
514 | known[k] = 1 |
|
514 | known[k] = 1 | |
515 | if imatch(k): |
|
515 | if imatch(k): | |
516 | yield 'm', k, None |
|
516 | yield 'm', k, None | |
517 |
|
517 | |||
518 | def status(self, files, match, list_ignored, list_clean): |
|
518 | def status(self, files, match, list_ignored, list_clean): | |
519 | lookup, modified, added, unknown, ignored = [], [], [], [], [] |
|
519 | lookup, modified, added, unknown, ignored = [], [], [], [], [] | |
520 | removed, deleted, clean = [], [], [] |
|
520 | removed, deleted, clean = [], [], [] | |
521 |
|
521 | |||
522 | _join = self._join |
|
522 | _join = self._join | |
523 | lstat = os.lstat |
|
523 | lstat = os.lstat | |
524 | cmap = self._copymap |
|
524 | cmap = self._copymap | |
525 | dmap = self._map |
|
525 | dmap = self._map | |
526 | ladd = lookup.append |
|
526 | ladd = lookup.append | |
527 | madd = modified.append |
|
527 | madd = modified.append | |
528 | aadd = added.append |
|
528 | aadd = added.append | |
529 | uadd = unknown.append |
|
529 | uadd = unknown.append | |
530 | iadd = ignored.append |
|
530 | iadd = ignored.append | |
531 | radd = removed.append |
|
531 | radd = removed.append | |
532 | dadd = deleted.append |
|
532 | dadd = deleted.append | |
533 | cadd = clean.append |
|
533 | cadd = clean.append | |
534 |
|
534 | |||
535 | for src, fn, st in self.statwalk(files, match, ignored=list_ignored): |
|
535 | for src, fn, st in self.statwalk(files, match, ignored=list_ignored): | |
536 | if fn in dmap: |
|
536 | if fn in dmap: | |
537 | type_, mode, size, time, foo = dmap[fn] |
|
537 | type_, mode, size, time, foo = dmap[fn] | |
538 | else: |
|
538 | else: | |
539 | if list_ignored and self._ignore(fn): |
|
539 | if list_ignored and self._ignore(fn): | |
540 | iadd(fn) |
|
540 | iadd(fn) | |
541 | else: |
|
541 | else: | |
542 | uadd(fn) |
|
542 | uadd(fn) | |
543 | continue |
|
543 | continue | |
544 | if src == 'm': |
|
544 | if src == 'm': | |
545 | nonexistent = True |
|
545 | nonexistent = True | |
546 | if not st: |
|
546 | if not st: | |
547 | try: |
|
547 | try: | |
548 | st = lstat(_join(fn)) |
|
548 | st = lstat(_join(fn)) | |
549 | except OSError, inst: |
|
549 | except OSError, inst: | |
550 | if inst.errno not in (errno.ENOENT, errno.ENOTDIR): |
|
550 | if inst.errno not in (errno.ENOENT, errno.ENOTDIR): | |
551 | raise |
|
551 | raise | |
552 | st = None |
|
552 | st = None | |
553 | # We need to re-check that it is a valid file |
|
553 | # We need to re-check that it is a valid file | |
554 | if st and self._supported(fn, st.st_mode): |
|
554 | if st and self._supported(fn, st.st_mode): | |
555 | nonexistent = False |
|
555 | nonexistent = False | |
556 | # XXX: what to do with file no longer present in the fs |
|
556 | # XXX: what to do with file no longer present in the fs | |
557 | # who are not removed in the dirstate ? |
|
557 | # who are not removed in the dirstate ? | |
558 | if nonexistent and type_ in "nm": |
|
558 | if nonexistent and type_ in "nm": | |
559 | dadd(fn) |
|
559 | dadd(fn) | |
560 | continue |
|
560 | continue | |
561 | # check the common case first |
|
561 | # check the common case first | |
562 | if type_ == 'n': |
|
562 | if type_ == 'n': | |
563 | if not st: |
|
563 | if not st: | |
564 | st = lstat(_join(fn)) |
|
564 | st = lstat(_join(fn)) | |
565 | if (size >= 0 and (size != st.st_size |
|
565 | if (size >= 0 and (size != st.st_size | |
566 | or (mode ^ st.st_mode) & 0100) |
|
566 | or (mode ^ st.st_mode) & 0100) | |
567 | or size == -2 |
|
567 | or size == -2 | |
568 | or fn in self._copymap): |
|
568 | or fn in self._copymap): | |
569 | madd(fn) |
|
569 | madd(fn) | |
570 | elif time != int(st.st_mtime): |
|
570 | elif time != int(st.st_mtime): | |
571 | ladd(fn) |
|
571 | ladd(fn) | |
572 | elif list_clean: |
|
572 | elif list_clean: | |
573 | cadd(fn) |
|
573 | cadd(fn) | |
574 | elif type_ == 'm': |
|
574 | elif type_ == 'm': | |
575 | madd(fn) |
|
575 | madd(fn) | |
576 | elif type_ == 'a': |
|
576 | elif type_ == 'a': | |
577 | aadd(fn) |
|
577 | aadd(fn) | |
578 | elif type_ == 'r': |
|
578 | elif type_ == 'r': | |
579 | radd(fn) |
|
579 | radd(fn) | |
580 |
|
580 | |||
581 | return (lookup, modified, added, removed, deleted, unknown, ignored, |
|
581 | return (lookup, modified, added, removed, deleted, unknown, ignored, | |
582 | clean) |
|
582 | clean) |
General Comments 0
You need to be logged in to leave comments.
Login now