##// END OF EJS Templates
remotefilelog: fix bug in maybesparsematch returning alwaysmatcher...
Kyle Lippincott -
r41107:517a51d9 default
parent child Browse files
Show More
@@ -1,300 +1,305 b''
1 1 # shallowrepo.py - shallow repository that uses remote filelogs
2 2 #
3 3 # Copyright 2013 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from __future__ import absolute_import
8 8
9 9 import os
10 10
11 11 from mercurial.i18n import _
12 12 from mercurial.node import hex, nullid, nullrev
13 13 from mercurial import (
14 14 encoding,
15 15 error,
16 16 localrepo,
17 17 match,
18 18 scmutil,
19 19 sparse,
20 20 util,
21 21 )
22 22 from mercurial.utils import procutil
23 23 from . import (
24 24 connectionpool,
25 25 constants,
26 26 contentstore,
27 27 datapack,
28 28 fileserverclient,
29 29 historypack,
30 30 metadatastore,
31 31 remotefilectx,
32 32 remotefilelog,
33 33 shallowutil,
34 34 )
35 35
36 36 if util.safehasattr(util, '_hgexecutable'):
37 37 # Before 5be286db
38 38 _hgexecutable = util.hgexecutable
39 39 else:
40 40 from mercurial.utils import procutil
41 41 _hgexecutable = procutil.hgexecutable
42 42
43 43 # These make*stores functions are global so that other extensions can replace
44 44 # them.
45 45 def makelocalstores(repo):
46 46 """In-repo stores, like .hg/store/data; can not be discarded."""
47 47 localpath = os.path.join(repo.svfs.vfs.base, 'data')
48 48 if not os.path.exists(localpath):
49 49 os.makedirs(localpath)
50 50
51 51 # Instantiate local data stores
52 52 localcontent = contentstore.remotefilelogcontentstore(
53 53 repo, localpath, repo.name, shared=False)
54 54 localmetadata = metadatastore.remotefilelogmetadatastore(
55 55 repo, localpath, repo.name, shared=False)
56 56 return localcontent, localmetadata
57 57
58 58 def makecachestores(repo):
59 59 """Typically machine-wide, cache of remote data; can be discarded."""
60 60 # Instantiate shared cache stores
61 61 cachepath = shallowutil.getcachepath(repo.ui)
62 62 cachecontent = contentstore.remotefilelogcontentstore(
63 63 repo, cachepath, repo.name, shared=True)
64 64 cachemetadata = metadatastore.remotefilelogmetadatastore(
65 65 repo, cachepath, repo.name, shared=True)
66 66
67 67 repo.sharedstore = cachecontent
68 68 repo.shareddatastores.append(cachecontent)
69 69 repo.sharedhistorystores.append(cachemetadata)
70 70
71 71 return cachecontent, cachemetadata
72 72
73 73 def makeremotestores(repo, cachecontent, cachemetadata):
74 74 """These stores fetch data from a remote server."""
75 75 # Instantiate remote stores
76 76 repo.fileservice = fileserverclient.fileserverclient(repo)
77 77 remotecontent = contentstore.remotecontentstore(
78 78 repo.ui, repo.fileservice, cachecontent)
79 79 remotemetadata = metadatastore.remotemetadatastore(
80 80 repo.ui, repo.fileservice, cachemetadata)
81 81 return remotecontent, remotemetadata
82 82
83 83 def makepackstores(repo):
84 84 """Packs are more efficient (to read from) cache stores."""
85 85 # Instantiate pack stores
86 86 packpath = shallowutil.getcachepackpath(repo,
87 87 constants.FILEPACK_CATEGORY)
88 88 packcontentstore = datapack.datapackstore(repo.ui, packpath)
89 89 packmetadatastore = historypack.historypackstore(repo.ui, packpath)
90 90
91 91 repo.shareddatastores.append(packcontentstore)
92 92 repo.sharedhistorystores.append(packmetadatastore)
93 93 shallowutil.reportpackmetrics(repo.ui, 'filestore', packcontentstore,
94 94 packmetadatastore)
95 95 return packcontentstore, packmetadatastore
96 96
97 97 def makeunionstores(repo):
98 98 """Union stores iterate the other stores and return the first result."""
99 99 repo.shareddatastores = []
100 100 repo.sharedhistorystores = []
101 101
102 102 packcontentstore, packmetadatastore = makepackstores(repo)
103 103 cachecontent, cachemetadata = makecachestores(repo)
104 104 localcontent, localmetadata = makelocalstores(repo)
105 105 remotecontent, remotemetadata = makeremotestores(repo, cachecontent,
106 106 cachemetadata)
107 107
108 108 # Instantiate union stores
109 109 repo.contentstore = contentstore.unioncontentstore(
110 110 packcontentstore, cachecontent,
111 111 localcontent, remotecontent, writestore=localcontent)
112 112 repo.metadatastore = metadatastore.unionmetadatastore(
113 113 packmetadatastore, cachemetadata, localmetadata, remotemetadata,
114 114 writestore=localmetadata)
115 115
116 116 fileservicedatawrite = cachecontent
117 117 fileservicehistorywrite = cachemetadata
118 118 repo.fileservice.setstore(repo.contentstore, repo.metadatastore,
119 119 fileservicedatawrite, fileservicehistorywrite)
120 120 shallowutil.reportpackmetrics(repo.ui, 'filestore',
121 121 packcontentstore, packmetadatastore)
122 122
123 123 def wraprepo(repo):
124 124 class shallowrepository(repo.__class__):
125 125 @util.propertycache
126 126 def name(self):
127 127 return self.ui.config('remotefilelog', 'reponame')
128 128
129 129 @util.propertycache
130 130 def fallbackpath(self):
131 131 path = repo.ui.config("remotefilelog", "fallbackpath",
132 132 repo.ui.config('paths', 'default'))
133 133 if not path:
134 134 raise error.Abort("no remotefilelog server "
135 135 "configured - is your .hg/hgrc trusted?")
136 136
137 137 return path
138 138
139 139 def maybesparsematch(self, *revs, **kwargs):
140 140 '''
141 141 A wrapper that allows the remotefilelog to invoke sparsematch() if
142 142 this is a sparse repository, or returns None if this is not a
143 143 sparse repository.
144 144 '''
145 145 if revs:
146 return sparse.matcher(repo, revs=revs)
147 return sparse.matcher(repo)
146 ret = sparse.matcher(repo, revs=revs)
147 else:
148 ret = sparse.matcher(repo)
149
150 if ret.always():
151 return None
152 return ret
148 153
149 154 def file(self, f):
150 155 if f[0] == '/':
151 156 f = f[1:]
152 157
153 158 if self.shallowmatch(f):
154 159 return remotefilelog.remotefilelog(self.svfs, f, self)
155 160 else:
156 161 return super(shallowrepository, self).file(f)
157 162
158 163 def filectx(self, path, *args, **kwargs):
159 164 if self.shallowmatch(path):
160 165 return remotefilectx.remotefilectx(self, path, *args, **kwargs)
161 166 else:
162 167 return super(shallowrepository, self).filectx(path, *args,
163 168 **kwargs)
164 169
165 170 @localrepo.unfilteredmethod
166 171 def commitctx(self, ctx, error=False):
167 172 """Add a new revision to current repository.
168 173 Revision information is passed via the context argument.
169 174 """
170 175
171 176 # some contexts already have manifest nodes, they don't need any
172 177 # prefetching (for example if we're just editing a commit message
173 178 # we can reuse manifest
174 179 if not ctx.manifestnode():
175 180 # prefetch files that will likely be compared
176 181 m1 = ctx.p1().manifest()
177 182 files = []
178 183 for f in ctx.modified() + ctx.added():
179 184 fparent1 = m1.get(f, nullid)
180 185 if fparent1 != nullid:
181 186 files.append((f, hex(fparent1)))
182 187 self.fileservice.prefetch(files)
183 188 return super(shallowrepository, self).commitctx(ctx,
184 189 error=error)
185 190
186 191 def backgroundprefetch(self, revs, base=None, repack=False, pats=None,
187 192 opts=None):
188 193 """Runs prefetch in background with optional repack
189 194 """
190 195 cmd = [_hgexecutable(), '-R', repo.origroot, 'prefetch']
191 196 if repack:
192 197 cmd.append('--repack')
193 198 if revs:
194 199 cmd += ['-r', revs]
195 200 procutil.runbgcommand(cmd, encoding.environ)
196 201
197 202 def prefetch(self, revs, base=None, pats=None, opts=None):
198 203 """Prefetches all the necessary file revisions for the given revs
199 204 Optionally runs repack in background
200 205 """
201 206 with repo._lock(repo.svfs, 'prefetchlock', True, None, None,
202 207 _('prefetching in %s') % repo.origroot):
203 208 self._prefetch(revs, base, pats, opts)
204 209
205 210 def _prefetch(self, revs, base=None, pats=None, opts=None):
206 211 fallbackpath = self.fallbackpath
207 212 if fallbackpath:
208 213 # If we know a rev is on the server, we should fetch the server
209 214 # version of those files, since our local file versions might
210 215 # become obsolete if the local commits are stripped.
211 216 localrevs = repo.revs('outgoing(%s)', fallbackpath)
212 217 if base is not None and base != nullrev:
213 218 serverbase = list(repo.revs('first(reverse(::%s) - %ld)',
214 219 base, localrevs))
215 220 if serverbase:
216 221 base = serverbase[0]
217 222 else:
218 223 localrevs = repo
219 224
220 225 mfl = repo.manifestlog
221 226 mfrevlog = mfl.getstorage('')
222 227 if base is not None:
223 228 mfdict = mfl[repo[base].manifestnode()].read()
224 229 skip = set(mfdict.iteritems())
225 230 else:
226 231 skip = set()
227 232
228 233 # Copy the skip set to start large and avoid constant resizing,
229 234 # and since it's likely to be very similar to the prefetch set.
230 235 files = skip.copy()
231 236 serverfiles = skip.copy()
232 237 visited = set()
233 238 visited.add(nullrev)
234 239 revcount = len(revs)
235 240 progress = self.ui.makeprogress(_('prefetching'), total=revcount)
236 241 progress.update(0)
237 242 for rev in sorted(revs):
238 243 ctx = repo[rev]
239 244 if pats:
240 245 m = scmutil.match(ctx, pats, opts)
241 246 sparsematch = repo.maybesparsematch(rev)
242 247
243 248 mfnode = ctx.manifestnode()
244 249 mfrev = mfrevlog.rev(mfnode)
245 250
246 251 # Decompressing manifests is expensive.
247 252 # When possible, only read the deltas.
248 253 p1, p2 = mfrevlog.parentrevs(mfrev)
249 254 if p1 in visited and p2 in visited:
250 255 mfdict = mfl[mfnode].readfast()
251 256 else:
252 257 mfdict = mfl[mfnode].read()
253 258
254 259 diff = mfdict.iteritems()
255 260 if pats:
256 261 diff = (pf for pf in diff if m(pf[0]))
257 262 if sparsematch:
258 263 diff = (pf for pf in diff if sparsematch(pf[0]))
259 264 if rev not in localrevs:
260 265 serverfiles.update(diff)
261 266 else:
262 267 files.update(diff)
263 268
264 269 visited.add(mfrev)
265 270 progress.increment()
266 271
267 272 files.difference_update(skip)
268 273 serverfiles.difference_update(skip)
269 274 progress.complete()
270 275
271 276 # Fetch files known to be on the server
272 277 if serverfiles:
273 278 results = [(path, hex(fnode)) for (path, fnode) in serverfiles]
274 279 repo.fileservice.prefetch(results, force=True)
275 280
276 281 # Fetch files that may or may not be on the server
277 282 if files:
278 283 results = [(path, hex(fnode)) for (path, fnode) in files]
279 284 repo.fileservice.prefetch(results)
280 285
281 286 def close(self):
282 287 super(shallowrepository, self).close()
283 288 self.connectionpool.close()
284 289
285 290 repo.__class__ = shallowrepository
286 291
287 292 repo.shallowmatch = match.always(repo.root, '')
288 293
289 294 makeunionstores(repo)
290 295
291 296 repo.includepattern = repo.ui.configlist("remotefilelog", "includepattern",
292 297 None)
293 298 repo.excludepattern = repo.ui.configlist("remotefilelog", "excludepattern",
294 299 None)
295 300 if not util.safehasattr(repo, 'connectionpool'):
296 301 repo.connectionpool = connectionpool.connectionpool(repo)
297 302
298 303 if repo.includepattern or repo.excludepattern:
299 304 repo.shallowmatch = match.match(repo.root, '', None,
300 305 repo.includepattern, repo.excludepattern)
General Comments 0
You need to be logged in to leave comments. Login now