##// END OF EJS Templates
remotefilelog: use progress helper in shallowrepo...
Martin von Zweigbergk -
r40880:b6a6dc1a default
parent child Browse files
Show More
@@ -1,303 +1,300 b''
1 1 # shallowrepo.py - shallow repository that uses remote filelogs
2 2 #
3 3 # Copyright 2013 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from __future__ import absolute_import
8 8
9 9 import os
10 10
11 11 from mercurial.i18n import _
12 12 from mercurial.node import hex, nullid, nullrev
13 13 from mercurial import (
14 14 encoding,
15 15 error,
16 16 localrepo,
17 17 match,
18 18 scmutil,
19 19 sparse,
20 20 util,
21 21 )
22 22 from mercurial.utils import procutil
23 23 from . import (
24 24 connectionpool,
25 25 constants,
26 26 contentstore,
27 27 datapack,
28 28 fileserverclient,
29 29 historypack,
30 30 metadatastore,
31 31 remotefilectx,
32 32 remotefilelog,
33 33 shallowutil,
34 34 )
35 35
36 36 if util.safehasattr(util, '_hgexecutable'):
37 37 # Before 5be286db
38 38 _hgexecutable = util.hgexecutable
39 39 else:
40 40 from mercurial.utils import procutil
41 41 _hgexecutable = procutil.hgexecutable
42 42
43 _prefetching = _('prefetching')
44
45 43 # These make*stores functions are global so that other extensions can replace
46 44 # them.
47 45 def makelocalstores(repo):
48 46 """In-repo stores, like .hg/store/data; can not be discarded."""
49 47 localpath = os.path.join(repo.svfs.vfs.base, 'data')
50 48 if not os.path.exists(localpath):
51 49 os.makedirs(localpath)
52 50
53 51 # Instantiate local data stores
54 52 localcontent = contentstore.remotefilelogcontentstore(
55 53 repo, localpath, repo.name, shared=False)
56 54 localmetadata = metadatastore.remotefilelogmetadatastore(
57 55 repo, localpath, repo.name, shared=False)
58 56 return localcontent, localmetadata
59 57
60 58 def makecachestores(repo):
61 59 """Typically machine-wide, cache of remote data; can be discarded."""
62 60 # Instantiate shared cache stores
63 61 cachepath = shallowutil.getcachepath(repo.ui)
64 62 cachecontent = contentstore.remotefilelogcontentstore(
65 63 repo, cachepath, repo.name, shared=True)
66 64 cachemetadata = metadatastore.remotefilelogmetadatastore(
67 65 repo, cachepath, repo.name, shared=True)
68 66
69 67 repo.sharedstore = cachecontent
70 68 repo.shareddatastores.append(cachecontent)
71 69 repo.sharedhistorystores.append(cachemetadata)
72 70
73 71 return cachecontent, cachemetadata
74 72
75 73 def makeremotestores(repo, cachecontent, cachemetadata):
76 74 """These stores fetch data from a remote server."""
77 75 # Instantiate remote stores
78 76 repo.fileservice = fileserverclient.fileserverclient(repo)
79 77 remotecontent = contentstore.remotecontentstore(
80 78 repo.ui, repo.fileservice, cachecontent)
81 79 remotemetadata = metadatastore.remotemetadatastore(
82 80 repo.ui, repo.fileservice, cachemetadata)
83 81 return remotecontent, remotemetadata
84 82
85 83 def makepackstores(repo):
86 84 """Packs are more efficient (to read from) cache stores."""
87 85 # Instantiate pack stores
88 86 packpath = shallowutil.getcachepackpath(repo,
89 87 constants.FILEPACK_CATEGORY)
90 88 packcontentstore = datapack.datapackstore(repo.ui, packpath)
91 89 packmetadatastore = historypack.historypackstore(repo.ui, packpath)
92 90
93 91 repo.shareddatastores.append(packcontentstore)
94 92 repo.sharedhistorystores.append(packmetadatastore)
95 93 shallowutil.reportpackmetrics(repo.ui, 'filestore', packcontentstore,
96 94 packmetadatastore)
97 95 return packcontentstore, packmetadatastore
98 96
99 97 def makeunionstores(repo):
100 98 """Union stores iterate the other stores and return the first result."""
101 99 repo.shareddatastores = []
102 100 repo.sharedhistorystores = []
103 101
104 102 packcontentstore, packmetadatastore = makepackstores(repo)
105 103 cachecontent, cachemetadata = makecachestores(repo)
106 104 localcontent, localmetadata = makelocalstores(repo)
107 105 remotecontent, remotemetadata = makeremotestores(repo, cachecontent,
108 106 cachemetadata)
109 107
110 108 # Instantiate union stores
111 109 repo.contentstore = contentstore.unioncontentstore(
112 110 packcontentstore, cachecontent,
113 111 localcontent, remotecontent, writestore=localcontent)
114 112 repo.metadatastore = metadatastore.unionmetadatastore(
115 113 packmetadatastore, cachemetadata, localmetadata, remotemetadata,
116 114 writestore=localmetadata)
117 115
118 116 fileservicedatawrite = cachecontent
119 117 fileservicehistorywrite = cachemetadata
120 118 repo.fileservice.setstore(repo.contentstore, repo.metadatastore,
121 119 fileservicedatawrite, fileservicehistorywrite)
122 120 shallowutil.reportpackmetrics(repo.ui, 'filestore',
123 121 packcontentstore, packmetadatastore)
124 122
125 123 def wraprepo(repo):
126 124 class shallowrepository(repo.__class__):
127 125 @util.propertycache
128 126 def name(self):
129 127 return self.ui.config('remotefilelog', 'reponame')
130 128
131 129 @util.propertycache
132 130 def fallbackpath(self):
133 131 path = repo.ui.config("remotefilelog", "fallbackpath",
134 132 repo.ui.config('paths', 'default'))
135 133 if not path:
136 134 raise error.Abort("no remotefilelog server "
137 135 "configured - is your .hg/hgrc trusted?")
138 136
139 137 return path
140 138
141 139 def maybesparsematch(self, *revs, **kwargs):
142 140 '''
143 141 A wrapper that allows the remotefilelog to invoke sparsematch() if
144 142 this is a sparse repository, or returns None if this is not a
145 143 sparse repository.
146 144 '''
147 145 if revs:
148 146 return sparse.matcher(repo, revs=revs)
149 147 return sparse.matcher(repo)
150 148
151 149 def file(self, f):
152 150 if f[0] == '/':
153 151 f = f[1:]
154 152
155 153 if self.shallowmatch(f):
156 154 return remotefilelog.remotefilelog(self.svfs, f, self)
157 155 else:
158 156 return super(shallowrepository, self).file(f)
159 157
160 158 def filectx(self, path, *args, **kwargs):
161 159 if self.shallowmatch(path):
162 160 return remotefilectx.remotefilectx(self, path, *args, **kwargs)
163 161 else:
164 162 return super(shallowrepository, self).filectx(path, *args,
165 163 **kwargs)
166 164
167 165 @localrepo.unfilteredmethod
168 166 def commitctx(self, ctx, error=False):
169 167 """Add a new revision to current repository.
170 168 Revision information is passed via the context argument.
171 169 """
172 170
173 171 # some contexts already have manifest nodes, they don't need any
174 172 # prefetching (for example if we're just editing a commit message
175 173 # we can reuse manifest
176 174 if not ctx.manifestnode():
177 175 # prefetch files that will likely be compared
178 176 m1 = ctx.p1().manifest()
179 177 files = []
180 178 for f in ctx.modified() + ctx.added():
181 179 fparent1 = m1.get(f, nullid)
182 180 if fparent1 != nullid:
183 181 files.append((f, hex(fparent1)))
184 182 self.fileservice.prefetch(files)
185 183 return super(shallowrepository, self).commitctx(ctx,
186 184 error=error)
187 185
188 186 def backgroundprefetch(self, revs, base=None, repack=False, pats=None,
189 187 opts=None):
190 188 """Runs prefetch in background with optional repack
191 189 """
192 190 cmd = [_hgexecutable(), '-R', repo.origroot, 'prefetch']
193 191 if repack:
194 192 cmd.append('--repack')
195 193 if revs:
196 194 cmd += ['-r', revs]
197 195 procutil.runbgcommand(cmd, encoding.environ)
198 196
199 197 def prefetch(self, revs, base=None, pats=None, opts=None):
200 198 """Prefetches all the necessary file revisions for the given revs
201 199 Optionally runs repack in background
202 200 """
203 201 with repo._lock(repo.svfs, 'prefetchlock', True, None, None,
204 202 _('prefetching in %s') % repo.origroot):
205 203 self._prefetch(revs, base, pats, opts)
206 204
207 205 def _prefetch(self, revs, base=None, pats=None, opts=None):
208 206 fallbackpath = self.fallbackpath
209 207 if fallbackpath:
210 208 # If we know a rev is on the server, we should fetch the server
211 209 # version of those files, since our local file versions might
212 210 # become obsolete if the local commits are stripped.
213 211 localrevs = repo.revs('outgoing(%s)', fallbackpath)
214 212 if base is not None and base != nullrev:
215 213 serverbase = list(repo.revs('first(reverse(::%s) - %ld)',
216 214 base, localrevs))
217 215 if serverbase:
218 216 base = serverbase[0]
219 217 else:
220 218 localrevs = repo
221 219
222 220 mfl = repo.manifestlog
223 221 mfrevlog = mfl.getstorage('')
224 222 if base is not None:
225 223 mfdict = mfl[repo[base].manifestnode()].read()
226 224 skip = set(mfdict.iteritems())
227 225 else:
228 226 skip = set()
229 227
230 228 # Copy the skip set to start large and avoid constant resizing,
231 229 # and since it's likely to be very similar to the prefetch set.
232 230 files = skip.copy()
233 231 serverfiles = skip.copy()
234 232 visited = set()
235 233 visited.add(nullrev)
236 revnum = 0
237 234 revcount = len(revs)
238 self.ui.progress(_prefetching, revnum, total=revcount)
235 progress = self.ui.makeprogress(_('prefetching'), total=revcount)
236 progress.update(0)
239 237 for rev in sorted(revs):
240 238 ctx = repo[rev]
241 239 if pats:
242 240 m = scmutil.match(ctx, pats, opts)
243 241 sparsematch = repo.maybesparsematch(rev)
244 242
245 243 mfnode = ctx.manifestnode()
246 244 mfrev = mfrevlog.rev(mfnode)
247 245
248 246 # Decompressing manifests is expensive.
249 247 # When possible, only read the deltas.
250 248 p1, p2 = mfrevlog.parentrevs(mfrev)
251 249 if p1 in visited and p2 in visited:
252 250 mfdict = mfl[mfnode].readfast()
253 251 else:
254 252 mfdict = mfl[mfnode].read()
255 253
256 254 diff = mfdict.iteritems()
257 255 if pats:
258 256 diff = (pf for pf in diff if m(pf[0]))
259 257 if sparsematch:
260 258 diff = (pf for pf in diff if sparsematch(pf[0]))
261 259 if rev not in localrevs:
262 260 serverfiles.update(diff)
263 261 else:
264 262 files.update(diff)
265 263
266 264 visited.add(mfrev)
267 revnum += 1
268 self.ui.progress(_prefetching, revnum, total=revcount)
265 progress.increment()
269 266
270 267 files.difference_update(skip)
271 268 serverfiles.difference_update(skip)
272 self.ui.progress(_prefetching, None)
269 progress.complete()
273 270
274 271 # Fetch files known to be on the server
275 272 if serverfiles:
276 273 results = [(path, hex(fnode)) for (path, fnode) in serverfiles]
277 274 repo.fileservice.prefetch(results, force=True)
278 275
279 276 # Fetch files that may or may not be on the server
280 277 if files:
281 278 results = [(path, hex(fnode)) for (path, fnode) in files]
282 279 repo.fileservice.prefetch(results)
283 280
284 281 def close(self):
285 282 super(shallowrepository, self).close()
286 283 self.connectionpool.close()
287 284
288 285 repo.__class__ = shallowrepository
289 286
290 287 repo.shallowmatch = match.always(repo.root, '')
291 288
292 289 makeunionstores(repo)
293 290
294 291 repo.includepattern = repo.ui.configlist("remotefilelog", "includepattern",
295 292 None)
296 293 repo.excludepattern = repo.ui.configlist("remotefilelog", "excludepattern",
297 294 None)
298 295 if not util.safehasattr(repo, 'connectionpool'):
299 296 repo.connectionpool = connectionpool.connectionpool(repo)
300 297
301 298 if repo.includepattern or repo.excludepattern:
302 299 repo.shallowmatch = match.match(repo.root, '', None,
303 300 repo.includepattern, repo.excludepattern)
General Comments 0
You need to be logged in to leave comments. Login now