##// END OF EJS Templates
shallowrepo: remove backwards compat code that predates in-tree remotefilelog...
Augie Fackler -
r42695:373aeede default
parent child Browse files
Show More
@@ -1,305 +1,298 b''
1 1 # shallowrepo.py - shallow repository that uses remote filelogs
2 2 #
3 3 # Copyright 2013 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from __future__ import absolute_import
8 8
9 9 import os
10 10
11 11 from mercurial.i18n import _
12 12 from mercurial.node import hex, nullid, nullrev
13 13 from mercurial import (
14 14 encoding,
15 15 error,
16 16 localrepo,
17 17 match,
18 18 scmutil,
19 19 sparse,
20 20 util,
21 21 )
22 22 from mercurial.utils import procutil
23 23 from . import (
24 24 connectionpool,
25 25 constants,
26 26 contentstore,
27 27 datapack,
28 28 fileserverclient,
29 29 historypack,
30 30 metadatastore,
31 31 remotefilectx,
32 32 remotefilelog,
33 33 shallowutil,
34 34 )
35 35
36 if util.safehasattr(util, '_hgexecutable'):
37 # Before 5be286db
38 _hgexecutable = util.hgexecutable
39 else:
40 from mercurial.utils import procutil
41 _hgexecutable = procutil.hgexecutable
42
43 36 # These make*stores functions are global so that other extensions can replace
44 37 # them.
45 38 def makelocalstores(repo):
46 39 """In-repo stores, like .hg/store/data; can not be discarded."""
47 40 localpath = os.path.join(repo.svfs.vfs.base, 'data')
48 41 if not os.path.exists(localpath):
49 42 os.makedirs(localpath)
50 43
51 44 # Instantiate local data stores
52 45 localcontent = contentstore.remotefilelogcontentstore(
53 46 repo, localpath, repo.name, shared=False)
54 47 localmetadata = metadatastore.remotefilelogmetadatastore(
55 48 repo, localpath, repo.name, shared=False)
56 49 return localcontent, localmetadata
57 50
58 51 def makecachestores(repo):
59 52 """Typically machine-wide, cache of remote data; can be discarded."""
60 53 # Instantiate shared cache stores
61 54 cachepath = shallowutil.getcachepath(repo.ui)
62 55 cachecontent = contentstore.remotefilelogcontentstore(
63 56 repo, cachepath, repo.name, shared=True)
64 57 cachemetadata = metadatastore.remotefilelogmetadatastore(
65 58 repo, cachepath, repo.name, shared=True)
66 59
67 60 repo.sharedstore = cachecontent
68 61 repo.shareddatastores.append(cachecontent)
69 62 repo.sharedhistorystores.append(cachemetadata)
70 63
71 64 return cachecontent, cachemetadata
72 65
73 66 def makeremotestores(repo, cachecontent, cachemetadata):
74 67 """These stores fetch data from a remote server."""
75 68 # Instantiate remote stores
76 69 repo.fileservice = fileserverclient.fileserverclient(repo)
77 70 remotecontent = contentstore.remotecontentstore(
78 71 repo.ui, repo.fileservice, cachecontent)
79 72 remotemetadata = metadatastore.remotemetadatastore(
80 73 repo.ui, repo.fileservice, cachemetadata)
81 74 return remotecontent, remotemetadata
82 75
83 76 def makepackstores(repo):
84 77 """Packs are more efficient (to read from) cache stores."""
85 78 # Instantiate pack stores
86 79 packpath = shallowutil.getcachepackpath(repo,
87 80 constants.FILEPACK_CATEGORY)
88 81 packcontentstore = datapack.datapackstore(repo.ui, packpath)
89 82 packmetadatastore = historypack.historypackstore(repo.ui, packpath)
90 83
91 84 repo.shareddatastores.append(packcontentstore)
92 85 repo.sharedhistorystores.append(packmetadatastore)
93 86 shallowutil.reportpackmetrics(repo.ui, 'filestore', packcontentstore,
94 87 packmetadatastore)
95 88 return packcontentstore, packmetadatastore
96 89
97 90 def makeunionstores(repo):
98 91 """Union stores iterate the other stores and return the first result."""
99 92 repo.shareddatastores = []
100 93 repo.sharedhistorystores = []
101 94
102 95 packcontentstore, packmetadatastore = makepackstores(repo)
103 96 cachecontent, cachemetadata = makecachestores(repo)
104 97 localcontent, localmetadata = makelocalstores(repo)
105 98 remotecontent, remotemetadata = makeremotestores(repo, cachecontent,
106 99 cachemetadata)
107 100
108 101 # Instantiate union stores
109 102 repo.contentstore = contentstore.unioncontentstore(
110 103 packcontentstore, cachecontent,
111 104 localcontent, remotecontent, writestore=localcontent)
112 105 repo.metadatastore = metadatastore.unionmetadatastore(
113 106 packmetadatastore, cachemetadata, localmetadata, remotemetadata,
114 107 writestore=localmetadata)
115 108
116 109 fileservicedatawrite = cachecontent
117 110 fileservicehistorywrite = cachemetadata
118 111 repo.fileservice.setstore(repo.contentstore, repo.metadatastore,
119 112 fileservicedatawrite, fileservicehistorywrite)
120 113 shallowutil.reportpackmetrics(repo.ui, 'filestore',
121 114 packcontentstore, packmetadatastore)
122 115
123 116 def wraprepo(repo):
124 117 class shallowrepository(repo.__class__):
125 118 @util.propertycache
126 119 def name(self):
127 120 return self.ui.config('remotefilelog', 'reponame')
128 121
129 122 @util.propertycache
130 123 def fallbackpath(self):
131 124 path = repo.ui.config("remotefilelog", "fallbackpath",
132 125 repo.ui.config('paths', 'default'))
133 126 if not path:
134 127 raise error.Abort("no remotefilelog server "
135 128 "configured - is your .hg/hgrc trusted?")
136 129
137 130 return path
138 131
139 132 def maybesparsematch(self, *revs, **kwargs):
140 133 '''
141 134 A wrapper that allows the remotefilelog to invoke sparsematch() if
142 135 this is a sparse repository, or returns None if this is not a
143 136 sparse repository.
144 137 '''
145 138 if revs:
146 139 ret = sparse.matcher(repo, revs=revs)
147 140 else:
148 141 ret = sparse.matcher(repo)
149 142
150 143 if ret.always():
151 144 return None
152 145 return ret
153 146
154 147 def file(self, f):
155 148 if f[0] == '/':
156 149 f = f[1:]
157 150
158 151 if self.shallowmatch(f):
159 152 return remotefilelog.remotefilelog(self.svfs, f, self)
160 153 else:
161 154 return super(shallowrepository, self).file(f)
162 155
163 156 def filectx(self, path, *args, **kwargs):
164 157 if self.shallowmatch(path):
165 158 return remotefilectx.remotefilectx(self, path, *args, **kwargs)
166 159 else:
167 160 return super(shallowrepository, self).filectx(path, *args,
168 161 **kwargs)
169 162
170 163 @localrepo.unfilteredmethod
171 164 def commitctx(self, ctx, error=False):
172 165 """Add a new revision to current repository.
173 166 Revision information is passed via the context argument.
174 167 """
175 168
176 169 # some contexts already have manifest nodes, they don't need any
177 170 # prefetching (for example if we're just editing a commit message
178 171 # we can reuse manifest
179 172 if not ctx.manifestnode():
180 173 # prefetch files that will likely be compared
181 174 m1 = ctx.p1().manifest()
182 175 files = []
183 176 for f in ctx.modified() + ctx.added():
184 177 fparent1 = m1.get(f, nullid)
185 178 if fparent1 != nullid:
186 179 files.append((f, hex(fparent1)))
187 180 self.fileservice.prefetch(files)
188 181 return super(shallowrepository, self).commitctx(ctx,
189 182 error=error)
190 183
191 184 def backgroundprefetch(self, revs, base=None, repack=False, pats=None,
192 185 opts=None):
193 186 """Runs prefetch in background with optional repack
194 187 """
195 cmd = [_hgexecutable(), '-R', repo.origroot, 'prefetch']
188 cmd = [procutil.hgexecutable(), '-R', repo.origroot, 'prefetch']
196 189 if repack:
197 190 cmd.append('--repack')
198 191 if revs:
199 192 cmd += ['-r', revs]
200 193 procutil.runbgcommand(cmd, encoding.environ)
201 194
202 195 def prefetch(self, revs, base=None, pats=None, opts=None):
203 196 """Prefetches all the necessary file revisions for the given revs
204 197 Optionally runs repack in background
205 198 """
206 199 with repo._lock(repo.svfs, 'prefetchlock', True, None, None,
207 200 _('prefetching in %s') % repo.origroot):
208 201 self._prefetch(revs, base, pats, opts)
209 202
210 203 def _prefetch(self, revs, base=None, pats=None, opts=None):
211 204 fallbackpath = self.fallbackpath
212 205 if fallbackpath:
213 206 # If we know a rev is on the server, we should fetch the server
214 207 # version of those files, since our local file versions might
215 208 # become obsolete if the local commits are stripped.
216 209 localrevs = repo.revs('outgoing(%s)', fallbackpath)
217 210 if base is not None and base != nullrev:
218 211 serverbase = list(repo.revs('first(reverse(::%s) - %ld)',
219 212 base, localrevs))
220 213 if serverbase:
221 214 base = serverbase[0]
222 215 else:
223 216 localrevs = repo
224 217
225 218 mfl = repo.manifestlog
226 219 mfrevlog = mfl.getstorage('')
227 220 if base is not None:
228 221 mfdict = mfl[repo[base].manifestnode()].read()
229 222 skip = set(mfdict.iteritems())
230 223 else:
231 224 skip = set()
232 225
233 226 # Copy the skip set to start large and avoid constant resizing,
234 227 # and since it's likely to be very similar to the prefetch set.
235 228 files = skip.copy()
236 229 serverfiles = skip.copy()
237 230 visited = set()
238 231 visited.add(nullrev)
239 232 revcount = len(revs)
240 233 progress = self.ui.makeprogress(_('prefetching'), total=revcount)
241 234 progress.update(0)
242 235 for rev in sorted(revs):
243 236 ctx = repo[rev]
244 237 if pats:
245 238 m = scmutil.match(ctx, pats, opts)
246 239 sparsematch = repo.maybesparsematch(rev)
247 240
248 241 mfnode = ctx.manifestnode()
249 242 mfrev = mfrevlog.rev(mfnode)
250 243
251 244 # Decompressing manifests is expensive.
252 245 # When possible, only read the deltas.
253 246 p1, p2 = mfrevlog.parentrevs(mfrev)
254 247 if p1 in visited and p2 in visited:
255 248 mfdict = mfl[mfnode].readfast()
256 249 else:
257 250 mfdict = mfl[mfnode].read()
258 251
259 252 diff = mfdict.iteritems()
260 253 if pats:
261 254 diff = (pf for pf in diff if m(pf[0]))
262 255 if sparsematch:
263 256 diff = (pf for pf in diff if sparsematch(pf[0]))
264 257 if rev not in localrevs:
265 258 serverfiles.update(diff)
266 259 else:
267 260 files.update(diff)
268 261
269 262 visited.add(mfrev)
270 263 progress.increment()
271 264
272 265 files.difference_update(skip)
273 266 serverfiles.difference_update(skip)
274 267 progress.complete()
275 268
276 269 # Fetch files known to be on the server
277 270 if serverfiles:
278 271 results = [(path, hex(fnode)) for (path, fnode) in serverfiles]
279 272 repo.fileservice.prefetch(results, force=True)
280 273
281 274 # Fetch files that may or may not be on the server
282 275 if files:
283 276 results = [(path, hex(fnode)) for (path, fnode) in files]
284 277 repo.fileservice.prefetch(results)
285 278
286 279 def close(self):
287 280 super(shallowrepository, self).close()
288 281 self.connectionpool.close()
289 282
290 283 repo.__class__ = shallowrepository
291 284
292 285 repo.shallowmatch = match.always()
293 286
294 287 makeunionstores(repo)
295 288
296 289 repo.includepattern = repo.ui.configlist("remotefilelog", "includepattern",
297 290 None)
298 291 repo.excludepattern = repo.ui.configlist("remotefilelog", "excludepattern",
299 292 None)
300 293 if not util.safehasattr(repo, 'connectionpool'):
301 294 repo.connectionpool = connectionpool.connectionpool(repo)
302 295
303 296 if repo.includepattern or repo.excludepattern:
304 297 repo.shallowmatch = match.match(repo.root, '', None,
305 298 repo.includepattern, repo.excludepattern)
General Comments 0
You need to be logged in to leave comments. Login now