Show More
This diff has been collapsed as it changes many lines, (953 lines changed) Show them Hide them | |||||
@@ -123,6 +123,8 b' class dirstatemap(_dirstatemapcommon):' | |||||
123 | msg += "(should have detected unsupported requirement)" |
|
123 | msg += "(should have detected unsupported requirement)" | |
124 | raise error.ProgrammingError(msg) |
|
124 | raise error.ProgrammingError(msg) | |
125 |
|
125 | |||
|
126 | ### Core data storage and access | |||
|
127 | ||||
126 | @propertycache |
|
128 | @propertycache | |
127 | def _map(self): |
|
129 | def _map(self): | |
128 | self._map = {} |
|
130 | self._map = {} | |
@@ -162,6 +164,128 b' class dirstatemap(_dirstatemapcommon):' | |||||
162 | def keys(self): |
|
164 | def keys(self): | |
163 | return self._map.keys() |
|
165 | return self._map.keys() | |
164 |
|
166 | |||
|
167 | ### reading/setting parents | |||
|
168 | ||||
|
169 | def parents(self): | |||
|
170 | if not self._parents: | |||
|
171 | try: | |||
|
172 | fp = self._opendirstatefile() | |||
|
173 | st = fp.read(2 * self._nodelen) | |||
|
174 | fp.close() | |||
|
175 | except IOError as err: | |||
|
176 | if err.errno != errno.ENOENT: | |||
|
177 | raise | |||
|
178 | # File doesn't exist, so the current state is empty | |||
|
179 | st = b'' | |||
|
180 | ||||
|
181 | l = len(st) | |||
|
182 | if l == self._nodelen * 2: | |||
|
183 | self._parents = ( | |||
|
184 | st[: self._nodelen], | |||
|
185 | st[self._nodelen : 2 * self._nodelen], | |||
|
186 | ) | |||
|
187 | elif l == 0: | |||
|
188 | self._parents = ( | |||
|
189 | self._nodeconstants.nullid, | |||
|
190 | self._nodeconstants.nullid, | |||
|
191 | ) | |||
|
192 | else: | |||
|
193 | raise error.Abort( | |||
|
194 | _(b'working directory state appears damaged!') | |||
|
195 | ) | |||
|
196 | ||||
|
197 | return self._parents | |||
|
198 | ||||
|
199 | def setparents(self, p1, p2, fold_p2=False): | |||
|
200 | self._parents = (p1, p2) | |||
|
201 | self._dirtyparents = True | |||
|
202 | copies = {} | |||
|
203 | if fold_p2: | |||
|
204 | for f, s in pycompat.iteritems(self._map): | |||
|
205 | # Discard "merged" markers when moving away from a merge state | |||
|
206 | if s.merged or s.from_p2: | |||
|
207 | source = self.copymap.pop(f, None) | |||
|
208 | if source: | |||
|
209 | copies[f] = source | |||
|
210 | s.drop_merge_data() | |||
|
211 | return copies | |||
|
212 | ||||
|
213 | ### disk interaction | |||
|
214 | ||||
|
215 | def read(self): | |||
|
216 | # ignore HG_PENDING because identity is used only for writing | |||
|
217 | self.identity = util.filestat.frompath( | |||
|
218 | self._opener.join(self._filename) | |||
|
219 | ) | |||
|
220 | ||||
|
221 | try: | |||
|
222 | fp = self._opendirstatefile() | |||
|
223 | try: | |||
|
224 | st = fp.read() | |||
|
225 | finally: | |||
|
226 | fp.close() | |||
|
227 | except IOError as err: | |||
|
228 | if err.errno != errno.ENOENT: | |||
|
229 | raise | |||
|
230 | return | |||
|
231 | if not st: | |||
|
232 | return | |||
|
233 | ||||
|
234 | if util.safehasattr(parsers, b'dict_new_presized'): | |||
|
235 | # Make an estimate of the number of files in the dirstate based on | |||
|
236 | # its size. This trades wasting some memory for avoiding costly | |||
|
237 | # resizes. Each entry have a prefix of 17 bytes followed by one or | |||
|
238 | # two path names. Studies on various large-scale real-world repositories | |||
|
239 | # found 54 bytes a reasonable upper limit for the average path names. | |||
|
240 | # Copy entries are ignored for the sake of this estimate. | |||
|
241 | self._map = parsers.dict_new_presized(len(st) // 71) | |||
|
242 | ||||
|
243 | # Python's garbage collector triggers a GC each time a certain number | |||
|
244 | # of container objects (the number being defined by | |||
|
245 | # gc.get_threshold()) are allocated. parse_dirstate creates a tuple | |||
|
246 | # for each file in the dirstate. The C version then immediately marks | |||
|
247 | # them as not to be tracked by the collector. However, this has no | |||
|
248 | # effect on when GCs are triggered, only on what objects the GC looks | |||
|
249 | # into. This means that O(number of files) GCs are unavoidable. | |||
|
250 | # Depending on when in the process's lifetime the dirstate is parsed, | |||
|
251 | # this can get very expensive. As a workaround, disable GC while | |||
|
252 | # parsing the dirstate. | |||
|
253 | # | |||
|
254 | # (we cannot decorate the function directly since it is in a C module) | |||
|
255 | parse_dirstate = util.nogc(parsers.parse_dirstate) | |||
|
256 | p = parse_dirstate(self._map, self.copymap, st) | |||
|
257 | if not self._dirtyparents: | |||
|
258 | self.setparents(*p) | |||
|
259 | ||||
|
260 | # Avoid excess attribute lookups by fast pathing certain checks | |||
|
261 | self.__contains__ = self._map.__contains__ | |||
|
262 | self.__getitem__ = self._map.__getitem__ | |||
|
263 | self.get = self._map.get | |||
|
264 | ||||
|
265 | def write(self, _tr, st, now): | |||
|
266 | d = parsers.pack_dirstate(self._map, self.copymap, self.parents(), now) | |||
|
267 | st.write(d) | |||
|
268 | st.close() | |||
|
269 | self._dirtyparents = False | |||
|
270 | ||||
|
271 | def _opendirstatefile(self): | |||
|
272 | fp, mode = txnutil.trypending(self._root, self._opener, self._filename) | |||
|
273 | if self._pendingmode is not None and self._pendingmode != mode: | |||
|
274 | fp.close() | |||
|
275 | raise error.Abort( | |||
|
276 | _(b'working directory state may be changed parallelly') | |||
|
277 | ) | |||
|
278 | self._pendingmode = mode | |||
|
279 | return fp | |||
|
280 | ||||
|
281 | @propertycache | |||
|
282 | def identity(self): | |||
|
283 | self._map | |||
|
284 | return self.identity | |||
|
285 | ||||
|
286 | ### code related to maintaining and accessing "extra" property | |||
|
287 | # (e.g. "has_dir") | |||
|
288 | ||||
165 | def _dirs_incr(self, filename, old_entry=None): |
|
289 | def _dirs_incr(self, filename, old_entry=None): | |
166 | """incremente the dirstate counter if applicable""" |
|
290 | """incremente the dirstate counter if applicable""" | |
167 | if ( |
|
291 | if ( | |
@@ -184,6 +308,60 b' class dirstatemap(_dirstatemapcommon):' | |||||
184 | normed = util.normcase(filename) |
|
308 | normed = util.normcase(filename) | |
185 | self.filefoldmap.pop(normed, None) |
|
309 | self.filefoldmap.pop(normed, None) | |
186 |
|
310 | |||
|
311 | @propertycache | |||
|
312 | def filefoldmap(self): | |||
|
313 | """Returns a dictionary mapping normalized case paths to their | |||
|
314 | non-normalized versions. | |||
|
315 | """ | |||
|
316 | try: | |||
|
317 | makefilefoldmap = parsers.make_file_foldmap | |||
|
318 | except AttributeError: | |||
|
319 | pass | |||
|
320 | else: | |||
|
321 | return makefilefoldmap( | |||
|
322 | self._map, util.normcasespec, util.normcasefallback | |||
|
323 | ) | |||
|
324 | ||||
|
325 | f = {} | |||
|
326 | normcase = util.normcase | |||
|
327 | for name, s in pycompat.iteritems(self._map): | |||
|
328 | if not s.removed: | |||
|
329 | f[normcase(name)] = name | |||
|
330 | f[b'.'] = b'.' # prevents useless util.fspath() invocation | |||
|
331 | return f | |||
|
332 | ||||
|
333 | @propertycache | |||
|
334 | def dirfoldmap(self): | |||
|
335 | f = {} | |||
|
336 | normcase = util.normcase | |||
|
337 | for name in self._dirs: | |||
|
338 | f[normcase(name)] = name | |||
|
339 | return f | |||
|
340 | ||||
|
341 | def hastrackeddir(self, d): | |||
|
342 | """ | |||
|
343 | Returns True if the dirstate contains a tracked (not removed) file | |||
|
344 | in this directory. | |||
|
345 | """ | |||
|
346 | return d in self._dirs | |||
|
347 | ||||
|
348 | def hasdir(self, d): | |||
|
349 | """ | |||
|
350 | Returns True if the dirstate contains a file (tracked or removed) | |||
|
351 | in this directory. | |||
|
352 | """ | |||
|
353 | return d in self._alldirs | |||
|
354 | ||||
|
355 | @propertycache | |||
|
356 | def _dirs(self): | |||
|
357 | return pathutil.dirs(self._map, only_tracked=True) | |||
|
358 | ||||
|
359 | @propertycache | |||
|
360 | def _alldirs(self): | |||
|
361 | return pathutil.dirs(self._map) | |||
|
362 | ||||
|
363 | ### code related to manipulation of entries and copy-sources | |||
|
364 | ||||
187 | def set_possibly_dirty(self, filename): |
|
365 | def set_possibly_dirty(self, filename): | |
188 | """record that the current state of the file on disk is unknown""" |
|
366 | """record that the current state of the file on disk is unknown""" | |
189 | self[filename].set_possibly_dirty() |
|
367 | self[filename].set_possibly_dirty() | |
@@ -307,174 +485,6 b' class dirstatemap(_dirstatemapcommon):' | |||||
307 | entry.set_untracked() |
|
485 | entry.set_untracked() | |
308 | return True |
|
486 | return True | |
309 |
|
487 | |||
310 | @propertycache |
|
|||
311 | def filefoldmap(self): |
|
|||
312 | """Returns a dictionary mapping normalized case paths to their |
|
|||
313 | non-normalized versions. |
|
|||
314 | """ |
|
|||
315 | try: |
|
|||
316 | makefilefoldmap = parsers.make_file_foldmap |
|
|||
317 | except AttributeError: |
|
|||
318 | pass |
|
|||
319 | else: |
|
|||
320 | return makefilefoldmap( |
|
|||
321 | self._map, util.normcasespec, util.normcasefallback |
|
|||
322 | ) |
|
|||
323 |
|
||||
324 | f = {} |
|
|||
325 | normcase = util.normcase |
|
|||
326 | for name, s in pycompat.iteritems(self._map): |
|
|||
327 | if not s.removed: |
|
|||
328 | f[normcase(name)] = name |
|
|||
329 | f[b'.'] = b'.' # prevents useless util.fspath() invocation |
|
|||
330 | return f |
|
|||
331 |
|
||||
332 | def hastrackeddir(self, d): |
|
|||
333 | """ |
|
|||
334 | Returns True if the dirstate contains a tracked (not removed) file |
|
|||
335 | in this directory. |
|
|||
336 | """ |
|
|||
337 | return d in self._dirs |
|
|||
338 |
|
||||
339 | def hasdir(self, d): |
|
|||
340 | """ |
|
|||
341 | Returns True if the dirstate contains a file (tracked or removed) |
|
|||
342 | in this directory. |
|
|||
343 | """ |
|
|||
344 | return d in self._alldirs |
|
|||
345 |
|
||||
346 | @propertycache |
|
|||
347 | def _dirs(self): |
|
|||
348 | return pathutil.dirs(self._map, only_tracked=True) |
|
|||
349 |
|
||||
350 | @propertycache |
|
|||
351 | def _alldirs(self): |
|
|||
352 | return pathutil.dirs(self._map) |
|
|||
353 |
|
||||
354 | def _opendirstatefile(self): |
|
|||
355 | fp, mode = txnutil.trypending(self._root, self._opener, self._filename) |
|
|||
356 | if self._pendingmode is not None and self._pendingmode != mode: |
|
|||
357 | fp.close() |
|
|||
358 | raise error.Abort( |
|
|||
359 | _(b'working directory state may be changed parallelly') |
|
|||
360 | ) |
|
|||
361 | self._pendingmode = mode |
|
|||
362 | return fp |
|
|||
363 |
|
||||
364 | def parents(self): |
|
|||
365 | if not self._parents: |
|
|||
366 | try: |
|
|||
367 | fp = self._opendirstatefile() |
|
|||
368 | st = fp.read(2 * self._nodelen) |
|
|||
369 | fp.close() |
|
|||
370 | except IOError as err: |
|
|||
371 | if err.errno != errno.ENOENT: |
|
|||
372 | raise |
|
|||
373 | # File doesn't exist, so the current state is empty |
|
|||
374 | st = b'' |
|
|||
375 |
|
||||
376 | l = len(st) |
|
|||
377 | if l == self._nodelen * 2: |
|
|||
378 | self._parents = ( |
|
|||
379 | st[: self._nodelen], |
|
|||
380 | st[self._nodelen : 2 * self._nodelen], |
|
|||
381 | ) |
|
|||
382 | elif l == 0: |
|
|||
383 | self._parents = ( |
|
|||
384 | self._nodeconstants.nullid, |
|
|||
385 | self._nodeconstants.nullid, |
|
|||
386 | ) |
|
|||
387 | else: |
|
|||
388 | raise error.Abort( |
|
|||
389 | _(b'working directory state appears damaged!') |
|
|||
390 | ) |
|
|||
391 |
|
||||
392 | return self._parents |
|
|||
393 |
|
||||
394 | def setparents(self, p1, p2, fold_p2=False): |
|
|||
395 | self._parents = (p1, p2) |
|
|||
396 | self._dirtyparents = True |
|
|||
397 | copies = {} |
|
|||
398 | if fold_p2: |
|
|||
399 | for f, s in pycompat.iteritems(self._map): |
|
|||
400 | # Discard "merged" markers when moving away from a merge state |
|
|||
401 | if s.merged or s.from_p2: |
|
|||
402 | source = self.copymap.pop(f, None) |
|
|||
403 | if source: |
|
|||
404 | copies[f] = source |
|
|||
405 | s.drop_merge_data() |
|
|||
406 | return copies |
|
|||
407 |
|
||||
408 | def read(self): |
|
|||
409 | # ignore HG_PENDING because identity is used only for writing |
|
|||
410 | self.identity = util.filestat.frompath( |
|
|||
411 | self._opener.join(self._filename) |
|
|||
412 | ) |
|
|||
413 |
|
||||
414 | try: |
|
|||
415 | fp = self._opendirstatefile() |
|
|||
416 | try: |
|
|||
417 | st = fp.read() |
|
|||
418 | finally: |
|
|||
419 | fp.close() |
|
|||
420 | except IOError as err: |
|
|||
421 | if err.errno != errno.ENOENT: |
|
|||
422 | raise |
|
|||
423 | return |
|
|||
424 | if not st: |
|
|||
425 | return |
|
|||
426 |
|
||||
427 | if util.safehasattr(parsers, b'dict_new_presized'): |
|
|||
428 | # Make an estimate of the number of files in the dirstate based on |
|
|||
429 | # its size. This trades wasting some memory for avoiding costly |
|
|||
430 | # resizes. Each entry have a prefix of 17 bytes followed by one or |
|
|||
431 | # two path names. Studies on various large-scale real-world repositories |
|
|||
432 | # found 54 bytes a reasonable upper limit for the average path names. |
|
|||
433 | # Copy entries are ignored for the sake of this estimate. |
|
|||
434 | self._map = parsers.dict_new_presized(len(st) // 71) |
|
|||
435 |
|
||||
436 | # Python's garbage collector triggers a GC each time a certain number |
|
|||
437 | # of container objects (the number being defined by |
|
|||
438 | # gc.get_threshold()) are allocated. parse_dirstate creates a tuple |
|
|||
439 | # for each file in the dirstate. The C version then immediately marks |
|
|||
440 | # them as not to be tracked by the collector. However, this has no |
|
|||
441 | # effect on when GCs are triggered, only on what objects the GC looks |
|
|||
442 | # into. This means that O(number of files) GCs are unavoidable. |
|
|||
443 | # Depending on when in the process's lifetime the dirstate is parsed, |
|
|||
444 | # this can get very expensive. As a workaround, disable GC while |
|
|||
445 | # parsing the dirstate. |
|
|||
446 | # |
|
|||
447 | # (we cannot decorate the function directly since it is in a C module) |
|
|||
448 | parse_dirstate = util.nogc(parsers.parse_dirstate) |
|
|||
449 | p = parse_dirstate(self._map, self.copymap, st) |
|
|||
450 | if not self._dirtyparents: |
|
|||
451 | self.setparents(*p) |
|
|||
452 |
|
||||
453 | # Avoid excess attribute lookups by fast pathing certain checks |
|
|||
454 | self.__contains__ = self._map.__contains__ |
|
|||
455 | self.__getitem__ = self._map.__getitem__ |
|
|||
456 | self.get = self._map.get |
|
|||
457 |
|
||||
458 | def write(self, _tr, st, now): |
|
|||
459 | st.write( |
|
|||
460 | parsers.pack_dirstate(self._map, self.copymap, self.parents(), now) |
|
|||
461 | ) |
|
|||
462 | st.close() |
|
|||
463 | self._dirtyparents = False |
|
|||
464 |
|
||||
465 | @propertycache |
|
|||
466 | def identity(self): |
|
|||
467 | self._map |
|
|||
468 | return self.identity |
|
|||
469 |
|
||||
470 | @propertycache |
|
|||
471 | def dirfoldmap(self): |
|
|||
472 | f = {} |
|
|||
473 | normcase = util.normcase |
|
|||
474 | for name in self._dirs: |
|
|||
475 | f[normcase(name)] = name |
|
|||
476 | return f |
|
|||
477 |
|
||||
478 |
|
488 | |||
479 | if rustmod is not None: |
|
489 | if rustmod is not None: | |
480 |
|
490 | |||
@@ -485,39 +495,281 b' if rustmod is not None:' | |||||
485 | ) |
|
495 | ) | |
486 | self._docket = None |
|
496 | self._docket = None | |
487 |
|
497 | |||
488 | def addfile( |
|
498 | ### Core data storage and access | |
489 | self, |
|
499 | ||
490 | f, |
|
500 | @property | |
491 | mode=0, |
|
501 | def docket(self): | |
492 | size=None, |
|
502 | if not self._docket: | |
493 | mtime=None, |
|
503 | if not self._use_dirstate_v2: | |
494 | added=False, |
|
504 | raise error.ProgrammingError( | |
495 | merged=False, |
|
505 | b'dirstate only has a docket in v2 format' | |
496 | from_p2=False, |
|
506 | ) | |
497 | possibly_dirty=False, |
|
507 | self._docket = docketmod.DirstateDocket.parse( | |
498 | ): |
|
508 | self._readdirstatefile(), self._nodeconstants | |
499 |
|
|
509 | ) | |
500 | assert not possibly_dirty |
|
510 | return self._docket | |
501 | assert not from_p2 |
|
511 | ||
502 | item = DirstateItem.new_added() |
|
512 | @propertycache | |
503 | elif merged: |
|
513 | def _map(self): | |
504 | assert not possibly_dirty |
|
514 | """ | |
505 | assert not from_p2 |
|
515 | Fills the Dirstatemap when called. | |
506 | item = DirstateItem.new_merged() |
|
516 | """ | |
507 | elif from_p2: |
|
517 | # ignore HG_PENDING because identity is used only for writing | |
508 | assert not possibly_dirty |
|
518 | self.identity = util.filestat.frompath( | |
509 | item = DirstateItem.new_from_p2() |
|
519 | self._opener.join(self._filename) | |
510 | elif possibly_dirty: |
|
520 | ) | |
511 | item = DirstateItem.new_possibly_dirty() |
|
521 | ||
|
522 | if self._use_dirstate_v2: | |||
|
523 | if self.docket.uuid: | |||
|
524 | # TODO: use mmap when possible | |||
|
525 | data = self._opener.read(self.docket.data_filename()) | |||
|
526 | else: | |||
|
527 | data = b'' | |||
|
528 | self._map = rustmod.DirstateMap.new_v2( | |||
|
529 | data, self.docket.data_size, self.docket.tree_metadata | |||
|
530 | ) | |||
|
531 | parents = self.docket.parents | |||
512 | else: |
|
532 | else: | |
513 | assert size is not None |
|
533 | self._map, parents = rustmod.DirstateMap.new_v1( | |
514 | assert mtime is not None |
|
534 | self._readdirstatefile() | |
515 | size = size & rangemask |
|
535 | ) | |
516 | mtime = mtime & rangemask |
|
536 | ||
517 | item = DirstateItem.new_normal(mode, size, mtime) |
|
537 | if parents and not self._dirtyparents: | |
518 | self._map.addfile(f, item) |
|
538 | self.setparents(*parents) | |
519 | if added: |
|
539 | ||
520 | self.copymap.pop(f, None) |
|
540 | self.__contains__ = self._map.__contains__ | |
|
541 | self.__getitem__ = self._map.__getitem__ | |||
|
542 | self.get = self._map.get | |||
|
543 | return self._map | |||
|
544 | ||||
|
545 | @property | |||
|
546 | def copymap(self): | |||
|
547 | return self._map.copymap() | |||
|
548 | ||||
|
549 | def debug_iter(self, all): | |||
|
550 | """ | |||
|
551 | Return an iterator of (filename, state, mode, size, mtime) tuples | |||
|
552 | ||||
|
553 | `all`: also include with `state == b' '` dirstate tree nodes that | |||
|
554 | don't have an associated `DirstateItem`. | |||
|
555 | ||||
|
556 | """ | |||
|
557 | return self._map.debug_iter(all) | |||
|
558 | ||||
|
559 | def clear(self): | |||
|
560 | self._map.clear() | |||
|
561 | self.setparents( | |||
|
562 | self._nodeconstants.nullid, self._nodeconstants.nullid | |||
|
563 | ) | |||
|
564 | util.clearcachedproperty(self, b"_dirs") | |||
|
565 | util.clearcachedproperty(self, b"_alldirs") | |||
|
566 | util.clearcachedproperty(self, b"dirfoldmap") | |||
|
567 | ||||
|
568 | def items(self): | |||
|
569 | return self._map.items() | |||
|
570 | ||||
|
571 | # forward for python2,3 compat | |||
|
572 | iteritems = items | |||
|
573 | ||||
|
574 | def keys(self): | |||
|
575 | return iter(self._map) | |||
|
576 | ||||
|
577 | ### reading/setting parents | |||
|
578 | ||||
|
579 | def setparents(self, p1, p2, fold_p2=False): | |||
|
580 | self._parents = (p1, p2) | |||
|
581 | self._dirtyparents = True | |||
|
582 | copies = {} | |||
|
583 | if fold_p2: | |||
|
584 | # Collect into an intermediate list to avoid a `RuntimeError` | |||
|
585 | # exception due to mutation during iteration. | |||
|
586 | # TODO: move this the whole loop to Rust where `iter_mut` | |||
|
587 | # enables in-place mutation of elements of a collection while | |||
|
588 | # iterating it, without mutating the collection itself. | |||
|
589 | candidatefiles = [ | |||
|
590 | (f, s) | |||
|
591 | for f, s in self._map.items() | |||
|
592 | if s.merged or s.from_p2 | |||
|
593 | ] | |||
|
594 | for f, s in candidatefiles: | |||
|
595 | # Discard "merged" markers when moving away from a merge state | |||
|
596 | if s.merged: | |||
|
597 | source = self.copymap.get(f) | |||
|
598 | if source: | |||
|
599 | copies[f] = source | |||
|
600 | self.reset_state( | |||
|
601 | f, | |||
|
602 | wc_tracked=True, | |||
|
603 | p1_tracked=True, | |||
|
604 | possibly_dirty=True, | |||
|
605 | ) | |||
|
606 | # Also fix up otherparent markers | |||
|
607 | elif s.from_p2: | |||
|
608 | source = self.copymap.get(f) | |||
|
609 | if source: | |||
|
610 | copies[f] = source | |||
|
611 | self.reset_state( | |||
|
612 | f, | |||
|
613 | p1_tracked=False, | |||
|
614 | wc_tracked=True, | |||
|
615 | ) | |||
|
616 | return copies | |||
|
617 | ||||
|
618 | def parents(self): | |||
|
619 | if not self._parents: | |||
|
620 | if self._use_dirstate_v2: | |||
|
621 | self._parents = self.docket.parents | |||
|
622 | else: | |||
|
623 | read_len = self._nodelen * 2 | |||
|
624 | st = self._readdirstatefile(read_len) | |||
|
625 | l = len(st) | |||
|
626 | if l == read_len: | |||
|
627 | self._parents = ( | |||
|
628 | st[: self._nodelen], | |||
|
629 | st[self._nodelen : 2 * self._nodelen], | |||
|
630 | ) | |||
|
631 | elif l == 0: | |||
|
632 | self._parents = ( | |||
|
633 | self._nodeconstants.nullid, | |||
|
634 | self._nodeconstants.nullid, | |||
|
635 | ) | |||
|
636 | else: | |||
|
637 | raise error.Abort( | |||
|
638 | _(b'working directory state appears damaged!') | |||
|
639 | ) | |||
|
640 | ||||
|
641 | return self._parents | |||
|
642 | ||||
|
643 | ### disk interaction | |||
|
644 | ||||
|
645 | @propertycache | |||
|
646 | def identity(self): | |||
|
647 | self._map | |||
|
648 | return self.identity | |||
|
649 | ||||
|
650 | def write(self, tr, st, now): | |||
|
651 | if not self._use_dirstate_v2: | |||
|
652 | p1, p2 = self.parents() | |||
|
653 | packed = self._map.write_v1(p1, p2, now) | |||
|
654 | st.write(packed) | |||
|
655 | st.close() | |||
|
656 | self._dirtyparents = False | |||
|
657 | return | |||
|
658 | ||||
|
659 | # We can only append to an existing data file if there is one | |||
|
660 | can_append = self.docket.uuid is not None | |||
|
661 | packed, meta, append = self._map.write_v2(now, can_append) | |||
|
662 | if append: | |||
|
663 | docket = self.docket | |||
|
664 | data_filename = docket.data_filename() | |||
|
665 | if tr: | |||
|
666 | tr.add(data_filename, docket.data_size) | |||
|
667 | with self._opener(data_filename, b'r+b') as fp: | |||
|
668 | fp.seek(docket.data_size) | |||
|
669 | assert fp.tell() == docket.data_size | |||
|
670 | written = fp.write(packed) | |||
|
671 | if written is not None: # py2 may return None | |||
|
672 | assert written == len(packed), (written, len(packed)) | |||
|
673 | docket.data_size += len(packed) | |||
|
674 | docket.parents = self.parents() | |||
|
675 | docket.tree_metadata = meta | |||
|
676 | st.write(docket.serialize()) | |||
|
677 | st.close() | |||
|
678 | else: | |||
|
679 | old_docket = self.docket | |||
|
680 | new_docket = docketmod.DirstateDocket.with_new_uuid( | |||
|
681 | self.parents(), len(packed), meta | |||
|
682 | ) | |||
|
683 | data_filename = new_docket.data_filename() | |||
|
684 | if tr: | |||
|
685 | tr.add(data_filename, 0) | |||
|
686 | self._opener.write(data_filename, packed) | |||
|
687 | # Write the new docket after the new data file has been | |||
|
688 | # written. Because `st` was opened with `atomictemp=True`, | |||
|
689 | # the actual `.hg/dirstate` file is only affected on close. | |||
|
690 | st.write(new_docket.serialize()) | |||
|
691 | st.close() | |||
|
692 | # Remove the old data file after the new docket pointing to | |||
|
693 | # the new data file was written. | |||
|
694 | if old_docket.uuid: | |||
|
695 | data_filename = old_docket.data_filename() | |||
|
696 | unlink = lambda _tr=None: self._opener.unlink(data_filename) | |||
|
697 | if tr: | |||
|
698 | category = b"dirstate-v2-clean-" + old_docket.uuid | |||
|
699 | tr.addpostclose(category, unlink) | |||
|
700 | else: | |||
|
701 | unlink() | |||
|
702 | self._docket = new_docket | |||
|
703 | # Reload from the newly-written file | |||
|
704 | util.clearcachedproperty(self, b"_map") | |||
|
705 | self._dirtyparents = False | |||
|
706 | ||||
|
707 | def _opendirstatefile(self): | |||
|
708 | fp, mode = txnutil.trypending( | |||
|
709 | self._root, self._opener, self._filename | |||
|
710 | ) | |||
|
711 | if self._pendingmode is not None and self._pendingmode != mode: | |||
|
712 | fp.close() | |||
|
713 | raise error.Abort( | |||
|
714 | _(b'working directory state may be changed parallelly') | |||
|
715 | ) | |||
|
716 | self._pendingmode = mode | |||
|
717 | return fp | |||
|
718 | ||||
|
719 | def _readdirstatefile(self, size=-1): | |||
|
720 | try: | |||
|
721 | with self._opendirstatefile() as fp: | |||
|
722 | return fp.read(size) | |||
|
723 | except IOError as err: | |||
|
724 | if err.errno != errno.ENOENT: | |||
|
725 | raise | |||
|
726 | # File doesn't exist, so the current state is empty | |||
|
727 | return b'' | |||
|
728 | ||||
|
729 | ### code related to maintaining and accessing "extra" property | |||
|
730 | # (e.g. "has_dir") | |||
|
731 | ||||
|
732 | @propertycache | |||
|
733 | def filefoldmap(self): | |||
|
734 | """Returns a dictionary mapping normalized case paths to their | |||
|
735 | non-normalized versions. | |||
|
736 | """ | |||
|
737 | return self._map.filefoldmapasdict() | |||
|
738 | ||||
|
739 | def hastrackeddir(self, d): | |||
|
740 | return self._map.hastrackeddir(d) | |||
|
741 | ||||
|
742 | def hasdir(self, d): | |||
|
743 | return self._map.hasdir(d) | |||
|
744 | ||||
|
745 | @propertycache | |||
|
746 | def dirfoldmap(self): | |||
|
747 | f = {} | |||
|
748 | normcase = util.normcase | |||
|
749 | for name in self._map.tracked_dirs(): | |||
|
750 | f[normcase(name)] = name | |||
|
751 | return f | |||
|
752 | ||||
|
753 | ### code related to manipulation of entries and copy-sources | |||
|
754 | ||||
|
755 | def set_possibly_dirty(self, filename): | |||
|
756 | """record that the current state of the file on disk is unknown""" | |||
|
757 | entry = self[filename] | |||
|
758 | entry.set_possibly_dirty() | |||
|
759 | self._map.set_dirstate_item(filename, entry) | |||
|
760 | ||||
|
761 | def set_clean(self, filename, mode, size, mtime): | |||
|
762 | """mark a file as back to a clean state""" | |||
|
763 | entry = self[filename] | |||
|
764 | mtime = mtime & rangemask | |||
|
765 | size = size & rangemask | |||
|
766 | entry.set_clean(mode, size, mtime) | |||
|
767 | self._map.set_dirstate_item(filename, entry) | |||
|
768 | self._map.copymap().pop(filename, None) | |||
|
769 | ||||
|
770 | def __setitem__(self, key, value): | |||
|
771 | assert isinstance(value, DirstateItem) | |||
|
772 | self._map.set_dirstate_item(key, value) | |||
521 |
|
773 | |||
522 | def reset_state( |
|
774 | def reset_state( | |
523 | self, |
|
775 | self, | |
@@ -631,270 +883,41 b' if rustmod is not None:' | |||||
631 | self._map.removefile(f, in_merge=True) |
|
883 | self._map.removefile(f, in_merge=True) | |
632 | return True |
|
884 | return True | |
633 |
|
885 | |||
|
886 | ### Legacy method we need to get rid of | |||
|
887 | ||||
|
888 | def addfile( | |||
|
889 | self, | |||
|
890 | f, | |||
|
891 | mode=0, | |||
|
892 | size=None, | |||
|
893 | mtime=None, | |||
|
894 | added=False, | |||
|
895 | merged=False, | |||
|
896 | from_p2=False, | |||
|
897 | possibly_dirty=False, | |||
|
898 | ): | |||
|
899 | if added: | |||
|
900 | assert not possibly_dirty | |||
|
901 | assert not from_p2 | |||
|
902 | item = DirstateItem.new_added() | |||
|
903 | elif merged: | |||
|
904 | assert not possibly_dirty | |||
|
905 | assert not from_p2 | |||
|
906 | item = DirstateItem.new_merged() | |||
|
907 | elif from_p2: | |||
|
908 | assert not possibly_dirty | |||
|
909 | item = DirstateItem.new_from_p2() | |||
|
910 | elif possibly_dirty: | |||
|
911 | item = DirstateItem.new_possibly_dirty() | |||
|
912 | else: | |||
|
913 | assert size is not None | |||
|
914 | assert mtime is not None | |||
|
915 | size = size & rangemask | |||
|
916 | mtime = mtime & rangemask | |||
|
917 | item = DirstateItem.new_normal(mode, size, mtime) | |||
|
918 | self._map.addfile(f, item) | |||
|
919 | if added: | |||
|
920 | self.copymap.pop(f, None) | |||
|
921 | ||||
634 | def removefile(self, *args, **kwargs): |
|
922 | def removefile(self, *args, **kwargs): | |
635 | return self._map.removefile(*args, **kwargs) |
|
923 | return self._map.removefile(*args, **kwargs) | |
636 |
|
||||
637 | @property |
|
|||
638 | def copymap(self): |
|
|||
639 | return self._map.copymap() |
|
|||
640 |
|
||||
641 | def debug_iter(self, all): |
|
|||
642 | """ |
|
|||
643 | Return an iterator of (filename, state, mode, size, mtime) tuples |
|
|||
644 |
|
||||
645 | `all`: also include with `state == b' '` dirstate tree nodes that |
|
|||
646 | don't have an associated `DirstateItem`. |
|
|||
647 |
|
||||
648 | """ |
|
|||
649 | return self._map.debug_iter(all) |
|
|||
650 |
|
||||
651 | def clear(self): |
|
|||
652 | self._map.clear() |
|
|||
653 | self.setparents( |
|
|||
654 | self._nodeconstants.nullid, self._nodeconstants.nullid |
|
|||
655 | ) |
|
|||
656 | util.clearcachedproperty(self, b"_dirs") |
|
|||
657 | util.clearcachedproperty(self, b"_alldirs") |
|
|||
658 | util.clearcachedproperty(self, b"dirfoldmap") |
|
|||
659 |
|
||||
660 | def items(self): |
|
|||
661 | return self._map.items() |
|
|||
662 |
|
||||
663 | def keys(self): |
|
|||
664 | return iter(self._map) |
|
|||
665 |
|
||||
666 | # forward for python2,3 compat |
|
|||
667 | iteritems = items |
|
|||
668 |
|
||||
669 | def _opendirstatefile(self): |
|
|||
670 | fp, mode = txnutil.trypending( |
|
|||
671 | self._root, self._opener, self._filename |
|
|||
672 | ) |
|
|||
673 | if self._pendingmode is not None and self._pendingmode != mode: |
|
|||
674 | fp.close() |
|
|||
675 | raise error.Abort( |
|
|||
676 | _(b'working directory state may be changed parallelly') |
|
|||
677 | ) |
|
|||
678 | self._pendingmode = mode |
|
|||
679 | return fp |
|
|||
680 |
|
||||
681 | def _readdirstatefile(self, size=-1): |
|
|||
682 | try: |
|
|||
683 | with self._opendirstatefile() as fp: |
|
|||
684 | return fp.read(size) |
|
|||
685 | except IOError as err: |
|
|||
686 | if err.errno != errno.ENOENT: |
|
|||
687 | raise |
|
|||
688 | # File doesn't exist, so the current state is empty |
|
|||
689 | return b'' |
|
|||
690 |
|
||||
691 | def setparents(self, p1, p2, fold_p2=False): |
|
|||
692 | self._parents = (p1, p2) |
|
|||
693 | self._dirtyparents = True |
|
|||
694 | copies = {} |
|
|||
695 | if fold_p2: |
|
|||
696 | # Collect into an intermediate list to avoid a `RuntimeError` |
|
|||
697 | # exception due to mutation during iteration. |
|
|||
698 | # TODO: move this the whole loop to Rust where `iter_mut` |
|
|||
699 | # enables in-place mutation of elements of a collection while |
|
|||
700 | # iterating it, without mutating the collection itself. |
|
|||
701 | candidatefiles = [ |
|
|||
702 | (f, s) |
|
|||
703 | for f, s in self._map.items() |
|
|||
704 | if s.merged or s.from_p2 |
|
|||
705 | ] |
|
|||
706 | for f, s in candidatefiles: |
|
|||
707 | # Discard "merged" markers when moving away from a merge state |
|
|||
708 | if s.merged: |
|
|||
709 | source = self.copymap.get(f) |
|
|||
710 | if source: |
|
|||
711 | copies[f] = source |
|
|||
712 | self.reset_state( |
|
|||
713 | f, |
|
|||
714 | wc_tracked=True, |
|
|||
715 | p1_tracked=True, |
|
|||
716 | possibly_dirty=True, |
|
|||
717 | ) |
|
|||
718 | # Also fix up otherparent markers |
|
|||
719 | elif s.from_p2: |
|
|||
720 | source = self.copymap.get(f) |
|
|||
721 | if source: |
|
|||
722 | copies[f] = source |
|
|||
723 | self.reset_state( |
|
|||
724 | f, |
|
|||
725 | p1_tracked=False, |
|
|||
726 | wc_tracked=True, |
|
|||
727 | ) |
|
|||
728 | return copies |
|
|||
729 |
|
||||
730 | def parents(self): |
|
|||
731 | if not self._parents: |
|
|||
732 | if self._use_dirstate_v2: |
|
|||
733 | self._parents = self.docket.parents |
|
|||
734 | else: |
|
|||
735 | read_len = self._nodelen * 2 |
|
|||
736 | st = self._readdirstatefile(read_len) |
|
|||
737 | l = len(st) |
|
|||
738 | if l == read_len: |
|
|||
739 | self._parents = ( |
|
|||
740 | st[: self._nodelen], |
|
|||
741 | st[self._nodelen : 2 * self._nodelen], |
|
|||
742 | ) |
|
|||
743 | elif l == 0: |
|
|||
744 | self._parents = ( |
|
|||
745 | self._nodeconstants.nullid, |
|
|||
746 | self._nodeconstants.nullid, |
|
|||
747 | ) |
|
|||
748 | else: |
|
|||
749 | raise error.Abort( |
|
|||
750 | _(b'working directory state appears damaged!') |
|
|||
751 | ) |
|
|||
752 |
|
||||
753 | return self._parents |
|
|||
754 |
|
||||
755 | @property |
|
|||
756 | def docket(self): |
|
|||
757 | if not self._docket: |
|
|||
758 | if not self._use_dirstate_v2: |
|
|||
759 | raise error.ProgrammingError( |
|
|||
760 | b'dirstate only has a docket in v2 format' |
|
|||
761 | ) |
|
|||
762 | self._docket = docketmod.DirstateDocket.parse( |
|
|||
763 | self._readdirstatefile(), self._nodeconstants |
|
|||
764 | ) |
|
|||
765 | return self._docket |
|
|||
766 |
|
||||
767 | @propertycache |
|
|||
768 | def _map(self): |
|
|||
769 | """ |
|
|||
770 | Fills the Dirstatemap when called. |
|
|||
771 | """ |
|
|||
772 | # ignore HG_PENDING because identity is used only for writing |
|
|||
773 | self.identity = util.filestat.frompath( |
|
|||
774 | self._opener.join(self._filename) |
|
|||
775 | ) |
|
|||
776 |
|
||||
777 | if self._use_dirstate_v2: |
|
|||
778 | if self.docket.uuid: |
|
|||
779 | # TODO: use mmap when possible |
|
|||
780 | data = self._opener.read(self.docket.data_filename()) |
|
|||
781 | else: |
|
|||
782 | data = b'' |
|
|||
783 | self._map = rustmod.DirstateMap.new_v2( |
|
|||
784 | data, self.docket.data_size, self.docket.tree_metadata |
|
|||
785 | ) |
|
|||
786 | parents = self.docket.parents |
|
|||
787 | else: |
|
|||
788 | self._map, parents = rustmod.DirstateMap.new_v1( |
|
|||
789 | self._readdirstatefile() |
|
|||
790 | ) |
|
|||
791 |
|
||||
792 | if parents and not self._dirtyparents: |
|
|||
793 | self.setparents(*parents) |
|
|||
794 |
|
||||
795 | self.__contains__ = self._map.__contains__ |
|
|||
796 | self.__getitem__ = self._map.__getitem__ |
|
|||
797 | self.get = self._map.get |
|
|||
798 | return self._map |
|
|||
799 |
|
||||
800 | def write(self, tr, st, now): |
|
|||
801 | if not self._use_dirstate_v2: |
|
|||
802 | p1, p2 = self.parents() |
|
|||
803 | packed = self._map.write_v1(p1, p2, now) |
|
|||
804 | st.write(packed) |
|
|||
805 | st.close() |
|
|||
806 | self._dirtyparents = False |
|
|||
807 | return |
|
|||
808 |
|
||||
809 | # We can only append to an existing data file if there is one |
|
|||
810 | can_append = self.docket.uuid is not None |
|
|||
811 | packed, meta, append = self._map.write_v2(now, can_append) |
|
|||
812 | if append: |
|
|||
813 | docket = self.docket |
|
|||
814 | data_filename = docket.data_filename() |
|
|||
815 | if tr: |
|
|||
816 | tr.add(data_filename, docket.data_size) |
|
|||
817 | with self._opener(data_filename, b'r+b') as fp: |
|
|||
818 | fp.seek(docket.data_size) |
|
|||
819 | assert fp.tell() == docket.data_size |
|
|||
820 | written = fp.write(packed) |
|
|||
821 | if written is not None: # py2 may return None |
|
|||
822 | assert written == len(packed), (written, len(packed)) |
|
|||
823 | docket.data_size += len(packed) |
|
|||
824 | docket.parents = self.parents() |
|
|||
825 | docket.tree_metadata = meta |
|
|||
826 | st.write(docket.serialize()) |
|
|||
827 | st.close() |
|
|||
828 | else: |
|
|||
829 | old_docket = self.docket |
|
|||
830 | new_docket = docketmod.DirstateDocket.with_new_uuid( |
|
|||
831 | self.parents(), len(packed), meta |
|
|||
832 | ) |
|
|||
833 | data_filename = new_docket.data_filename() |
|
|||
834 | if tr: |
|
|||
835 | tr.add(data_filename, 0) |
|
|||
836 | self._opener.write(data_filename, packed) |
|
|||
837 | # Write the new docket after the new data file has been |
|
|||
838 | # written. Because `st` was opened with `atomictemp=True`, |
|
|||
839 | # the actual `.hg/dirstate` file is only affected on close. |
|
|||
840 | st.write(new_docket.serialize()) |
|
|||
841 | st.close() |
|
|||
842 | # Remove the old data file after the new docket pointing to |
|
|||
843 | # the new data file was written. |
|
|||
844 | if old_docket.uuid: |
|
|||
845 | data_filename = old_docket.data_filename() |
|
|||
846 | unlink = lambda _tr=None: self._opener.unlink(data_filename) |
|
|||
847 | if tr: |
|
|||
848 | category = b"dirstate-v2-clean-" + old_docket.uuid |
|
|||
849 | tr.addpostclose(category, unlink) |
|
|||
850 | else: |
|
|||
851 | unlink() |
|
|||
852 | self._docket = new_docket |
|
|||
853 | # Reload from the newly-written file |
|
|||
854 | util.clearcachedproperty(self, b"_map") |
|
|||
855 | self._dirtyparents = False |
|
|||
856 |
|
||||
857 | @propertycache |
|
|||
858 | def filefoldmap(self): |
|
|||
859 | """Returns a dictionary mapping normalized case paths to their |
|
|||
860 | non-normalized versions. |
|
|||
861 | """ |
|
|||
862 | return self._map.filefoldmapasdict() |
|
|||
863 |
|
||||
864 | def hastrackeddir(self, d): |
|
|||
865 | return self._map.hastrackeddir(d) |
|
|||
866 |
|
||||
867 | def hasdir(self, d): |
|
|||
868 | return self._map.hasdir(d) |
|
|||
869 |
|
||||
870 | @propertycache |
|
|||
871 | def identity(self): |
|
|||
872 | self._map |
|
|||
873 | return self.identity |
|
|||
874 |
|
||||
875 | @propertycache |
|
|||
876 | def dirfoldmap(self): |
|
|||
877 | f = {} |
|
|||
878 | normcase = util.normcase |
|
|||
879 | for name in self._map.tracked_dirs(): |
|
|||
880 | f[normcase(name)] = name |
|
|||
881 | return f |
|
|||
882 |
|
||||
883 | def set_possibly_dirty(self, filename): |
|
|||
884 | """record that the current state of the file on disk is unknown""" |
|
|||
885 | entry = self[filename] |
|
|||
886 | entry.set_possibly_dirty() |
|
|||
887 | self._map.set_dirstate_item(filename, entry) |
|
|||
888 |
|
||||
889 | def set_clean(self, filename, mode, size, mtime): |
|
|||
890 | """mark a file as back to a clean state""" |
|
|||
891 | entry = self[filename] |
|
|||
892 | mtime = mtime & rangemask |
|
|||
893 | size = size & rangemask |
|
|||
894 | entry.set_clean(mode, size, mtime) |
|
|||
895 | self._map.set_dirstate_item(filename, entry) |
|
|||
896 | self._map.copymap().pop(filename, None) |
|
|||
897 |
|
||||
898 | def __setitem__(self, key, value): |
|
|||
899 | assert isinstance(value, DirstateItem) |
|
|||
900 | self._map.set_dirstate_item(key, value) |
|
General Comments 0
You need to be logged in to leave comments.
Login now