##// END OF EJS Templates
cext-revlog: fixed __delitem__ for uninitialized nodetree...
Georges Racinet -
r44306:49fa0b31 default
parent child Browse files
Show More
@@ -1,3069 +1,3072 b''
1 1 /*
2 2 parsers.c - efficient content parsing
3 3
4 4 Copyright 2008 Matt Mackall <mpm@selenic.com> and others
5 5
6 6 This software may be used and distributed according to the terms of
7 7 the GNU General Public License, incorporated herein by reference.
8 8 */
9 9
10 10 #define PY_SSIZE_T_CLEAN
11 11 #include <Python.h>
12 12 #include <assert.h>
13 13 #include <ctype.h>
14 14 #include <limits.h>
15 15 #include <stddef.h>
16 16 #include <stdlib.h>
17 17 #include <string.h>
18 18
19 19 #include "bitmanipulation.h"
20 20 #include "charencode.h"
21 21 #include "revlog.h"
22 22 #include "util.h"
23 23
24 24 #ifdef IS_PY3K
25 25 /* The mapping of Python types is meant to be temporary to get Python
26 26 * 3 to compile. We should remove this once Python 3 support is fully
27 27 * supported and proper types are used in the extensions themselves. */
28 28 #define PyInt_Check PyLong_Check
29 29 #define PyInt_FromLong PyLong_FromLong
30 30 #define PyInt_FromSsize_t PyLong_FromSsize_t
31 31 #define PyInt_AsLong PyLong_AsLong
32 32 #endif
33 33
34 34 typedef struct indexObjectStruct indexObject;
35 35
36 36 typedef struct {
37 37 int children[16];
38 38 } nodetreenode;
39 39
40 40 /*
41 41 * A base-16 trie for fast node->rev mapping.
42 42 *
43 43 * Positive value is index of the next node in the trie
44 44 * Negative value is a leaf: -(rev + 2)
45 45 * Zero is empty
46 46 */
47 47 typedef struct {
48 48 indexObject *index;
49 49 nodetreenode *nodes;
50 50 unsigned length; /* # nodes in use */
51 51 unsigned capacity; /* # nodes allocated */
52 52 int depth; /* maximum depth of tree */
53 53 int splits; /* # splits performed */
54 54 } nodetree;
55 55
56 56 typedef struct {
57 57 PyObject_HEAD /* ; */
58 58 nodetree nt;
59 59 } nodetreeObject;
60 60
61 61 /*
62 62 * This class has two behaviors.
63 63 *
64 64 * When used in a list-like way (with integer keys), we decode an
65 65 * entry in a RevlogNG index file on demand. We have limited support for
66 66 * integer-keyed insert and delete, only at elements right before the
67 67 * end.
68 68 *
69 69 * With string keys, we lazily perform a reverse mapping from node to
70 70 * rev, using a base-16 trie.
71 71 */
72 72 struct indexObjectStruct {
73 73 PyObject_HEAD
74 74 /* Type-specific fields go here. */
75 75 PyObject *data; /* raw bytes of index */
76 76 Py_buffer buf; /* buffer of data */
77 77 PyObject **cache; /* cached tuples */
78 78 const char **offsets; /* populated on demand */
79 79 Py_ssize_t raw_length; /* original number of elements */
80 80 Py_ssize_t length; /* current number of elements */
81 81 PyObject *added; /* populated on demand */
82 82 PyObject *headrevs; /* cache, invalidated on changes */
83 83 PyObject *filteredrevs; /* filtered revs set */
84 84 nodetree nt; /* base-16 trie */
85 85 int ntinitialized; /* 0 or 1 */
86 86 int ntrev; /* last rev scanned */
87 87 int ntlookups; /* # lookups */
88 88 int ntmisses; /* # lookups that miss the cache */
89 89 int inlined;
90 90 };
91 91
92 92 static Py_ssize_t index_length(const indexObject *self)
93 93 {
94 94 if (self->added == NULL)
95 95 return self->length;
96 96 return self->length + PyList_GET_SIZE(self->added);
97 97 }
98 98
99 99 static PyObject *nullentry = NULL;
100 100 static const char nullid[20] = {0};
101 101 static const Py_ssize_t nullrev = -1;
102 102
103 103 static Py_ssize_t inline_scan(indexObject *self, const char **offsets);
104 104
105 105 #if LONG_MAX == 0x7fffffffL
106 106 static const char *const tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#");
107 107 #else
108 108 static const char *const tuple_format = PY23("kiiiiiis#", "kiiiiiiy#");
109 109 #endif
110 110
111 111 /* A RevlogNG v1 index entry is 64 bytes long. */
112 112 static const long v1_hdrsize = 64;
113 113
114 114 static void raise_revlog_error(void)
115 115 {
116 116 PyObject *mod = NULL, *dict = NULL, *errclass = NULL;
117 117
118 118 mod = PyImport_ImportModule("mercurial.error");
119 119 if (mod == NULL) {
120 120 goto cleanup;
121 121 }
122 122
123 123 dict = PyModule_GetDict(mod);
124 124 if (dict == NULL) {
125 125 goto cleanup;
126 126 }
127 127 Py_INCREF(dict);
128 128
129 129 errclass = PyDict_GetItemString(dict, "RevlogError");
130 130 if (errclass == NULL) {
131 131 PyErr_SetString(PyExc_SystemError,
132 132 "could not find RevlogError");
133 133 goto cleanup;
134 134 }
135 135
136 136 /* value of exception is ignored by callers */
137 137 PyErr_SetString(errclass, "RevlogError");
138 138
139 139 cleanup:
140 140 Py_XDECREF(dict);
141 141 Py_XDECREF(mod);
142 142 }
143 143
144 144 /*
145 145 * Return a pointer to the beginning of a RevlogNG record.
146 146 */
147 147 static const char *index_deref(indexObject *self, Py_ssize_t pos)
148 148 {
149 149 if (self->inlined && pos > 0) {
150 150 if (self->offsets == NULL) {
151 151 self->offsets = PyMem_Malloc(self->raw_length *
152 152 sizeof(*self->offsets));
153 153 if (self->offsets == NULL)
154 154 return (const char *)PyErr_NoMemory();
155 155 inline_scan(self, self->offsets);
156 156 }
157 157 return self->offsets[pos];
158 158 }
159 159
160 160 return (const char *)(self->buf.buf) + pos * v1_hdrsize;
161 161 }
162 162
163 163 /*
164 164 * Get parents of the given rev.
165 165 *
166 166 * The specified rev must be valid and must not be nullrev. A returned
167 167 * parent revision may be nullrev, but is guaranteed to be in valid range.
168 168 */
169 169 static inline int index_get_parents(indexObject *self, Py_ssize_t rev, int *ps,
170 170 int maxrev)
171 171 {
172 172 if (rev >= self->length) {
173 173 long tmp;
174 174 PyObject *tuple =
175 175 PyList_GET_ITEM(self->added, rev - self->length);
176 176 if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 5), &tmp)) {
177 177 return -1;
178 178 }
179 179 ps[0] = (int)tmp;
180 180 if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 6), &tmp)) {
181 181 return -1;
182 182 }
183 183 ps[1] = (int)tmp;
184 184 } else {
185 185 const char *data = index_deref(self, rev);
186 186 ps[0] = getbe32(data + 24);
187 187 ps[1] = getbe32(data + 28);
188 188 }
189 189 /* If index file is corrupted, ps[] may point to invalid revisions. So
190 190 * there is a risk of buffer overflow to trust them unconditionally. */
191 191 if (ps[0] < -1 || ps[0] > maxrev || ps[1] < -1 || ps[1] > maxrev) {
192 192 PyErr_SetString(PyExc_ValueError, "parent out of range");
193 193 return -1;
194 194 }
195 195 return 0;
196 196 }
197 197
198 198 /*
199 199 * Get parents of the given rev.
200 200 *
201 201 * If the specified rev is out of range, IndexError will be raised. If the
202 202 * revlog entry is corrupted, ValueError may be raised.
203 203 *
204 204 * Returns 0 on success or -1 on failure.
205 205 */
206 206 int HgRevlogIndex_GetParents(PyObject *op, int rev, int *ps)
207 207 {
208 208 int tiprev;
209 209 if (!op || !HgRevlogIndex_Check(op) || !ps) {
210 210 PyErr_BadInternalCall();
211 211 return -1;
212 212 }
213 213 tiprev = (int)index_length((indexObject *)op) - 1;
214 214 if (rev < -1 || rev > tiprev) {
215 215 PyErr_Format(PyExc_IndexError, "rev out of range: %d", rev);
216 216 return -1;
217 217 } else if (rev == -1) {
218 218 ps[0] = ps[1] = -1;
219 219 return 0;
220 220 } else {
221 221 return index_get_parents((indexObject *)op, rev, ps, tiprev);
222 222 }
223 223 }
224 224
225 225 static inline int64_t index_get_start(indexObject *self, Py_ssize_t rev)
226 226 {
227 227 uint64_t offset;
228 228 if (rev == nullrev) {
229 229 return 0;
230 230 }
231 231 if (rev >= self->length) {
232 232 PyObject *tuple;
233 233 PyObject *pylong;
234 234 PY_LONG_LONG tmp;
235 235 tuple = PyList_GET_ITEM(self->added, rev - self->length);
236 236 pylong = PyTuple_GET_ITEM(tuple, 0);
237 237 tmp = PyLong_AsLongLong(pylong);
238 238 if (tmp == -1 && PyErr_Occurred()) {
239 239 return -1;
240 240 }
241 241 if (tmp < 0) {
242 242 PyErr_Format(PyExc_OverflowError,
243 243 "revlog entry size out of bound (%lld)",
244 244 (long long)tmp);
245 245 return -1;
246 246 }
247 247 offset = (uint64_t)tmp;
248 248 } else {
249 249 const char *data = index_deref(self, rev);
250 250 offset = getbe32(data + 4);
251 251 if (rev == 0) {
252 252 /* mask out version number for the first entry */
253 253 offset &= 0xFFFF;
254 254 } else {
255 255 uint32_t offset_high = getbe32(data);
256 256 offset |= ((uint64_t)offset_high) << 32;
257 257 }
258 258 }
259 259 return (int64_t)(offset >> 16);
260 260 }
261 261
262 262 static inline int index_get_length(indexObject *self, Py_ssize_t rev)
263 263 {
264 264 if (rev == nullrev) {
265 265 return 0;
266 266 }
267 267 if (rev >= self->length) {
268 268 PyObject *tuple;
269 269 PyObject *pylong;
270 270 long ret;
271 271 tuple = PyList_GET_ITEM(self->added, rev - self->length);
272 272 pylong = PyTuple_GET_ITEM(tuple, 1);
273 273 ret = PyInt_AsLong(pylong);
274 274 if (ret == -1 && PyErr_Occurred()) {
275 275 return -1;
276 276 }
277 277 if (ret < 0 || ret > (long)INT_MAX) {
278 278 PyErr_Format(PyExc_OverflowError,
279 279 "revlog entry size out of bound (%ld)",
280 280 ret);
281 281 return -1;
282 282 }
283 283 return (int)ret;
284 284 } else {
285 285 const char *data = index_deref(self, rev);
286 286 int tmp = (int)getbe32(data + 8);
287 287 if (tmp < 0) {
288 288 PyErr_Format(PyExc_OverflowError,
289 289 "revlog entry size out of bound (%d)",
290 290 tmp);
291 291 return -1;
292 292 }
293 293 return tmp;
294 294 }
295 295 }
296 296
297 297 /*
298 298 * RevlogNG format (all in big endian, data may be inlined):
299 299 * 6 bytes: offset
300 300 * 2 bytes: flags
301 301 * 4 bytes: compressed length
302 302 * 4 bytes: uncompressed length
303 303 * 4 bytes: base revision
304 304 * 4 bytes: link revision
305 305 * 4 bytes: parent 1 revision
306 306 * 4 bytes: parent 2 revision
307 307 * 32 bytes: nodeid (only 20 bytes used)
308 308 */
309 309 static PyObject *index_get(indexObject *self, Py_ssize_t pos)
310 310 {
311 311 uint64_t offset_flags;
312 312 int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
313 313 const char *c_node_id;
314 314 const char *data;
315 315 Py_ssize_t length = index_length(self);
316 316 PyObject *entry;
317 317
318 318 if (pos == nullrev) {
319 319 Py_INCREF(nullentry);
320 320 return nullentry;
321 321 }
322 322
323 323 if (pos < 0 || pos >= length) {
324 324 PyErr_SetString(PyExc_IndexError, "revlog index out of range");
325 325 return NULL;
326 326 }
327 327
328 328 if (pos >= self->length) {
329 329 PyObject *obj;
330 330 obj = PyList_GET_ITEM(self->added, pos - self->length);
331 331 Py_INCREF(obj);
332 332 return obj;
333 333 }
334 334
335 335 if (self->cache) {
336 336 if (self->cache[pos]) {
337 337 Py_INCREF(self->cache[pos]);
338 338 return self->cache[pos];
339 339 }
340 340 } else {
341 341 self->cache = calloc(self->raw_length, sizeof(PyObject *));
342 342 if (self->cache == NULL)
343 343 return PyErr_NoMemory();
344 344 }
345 345
346 346 data = index_deref(self, pos);
347 347 if (data == NULL)
348 348 return NULL;
349 349
350 350 offset_flags = getbe32(data + 4);
351 351 if (pos == 0) /* mask out version number for the first entry */
352 352 offset_flags &= 0xFFFF;
353 353 else {
354 354 uint32_t offset_high = getbe32(data);
355 355 offset_flags |= ((uint64_t)offset_high) << 32;
356 356 }
357 357
358 358 comp_len = getbe32(data + 8);
359 359 uncomp_len = getbe32(data + 12);
360 360 base_rev = getbe32(data + 16);
361 361 link_rev = getbe32(data + 20);
362 362 parent_1 = getbe32(data + 24);
363 363 parent_2 = getbe32(data + 28);
364 364 c_node_id = data + 32;
365 365
366 366 entry = Py_BuildValue(tuple_format, offset_flags, comp_len, uncomp_len,
367 367 base_rev, link_rev, parent_1, parent_2, c_node_id,
368 368 (Py_ssize_t)20);
369 369
370 370 if (entry) {
371 371 PyObject_GC_UnTrack(entry);
372 372 Py_INCREF(entry);
373 373 }
374 374
375 375 self->cache[pos] = entry;
376 376
377 377 return entry;
378 378 }
379 379
380 380 /*
381 381 * Return the 20-byte SHA of the node corresponding to the given rev.
382 382 */
383 383 static const char *index_node(indexObject *self, Py_ssize_t pos)
384 384 {
385 385 Py_ssize_t length = index_length(self);
386 386 const char *data;
387 387
388 388 if (pos == nullrev)
389 389 return nullid;
390 390
391 391 if (pos >= length)
392 392 return NULL;
393 393
394 394 if (pos >= self->length) {
395 395 PyObject *tuple, *str;
396 396 tuple = PyList_GET_ITEM(self->added, pos - self->length);
397 397 str = PyTuple_GetItem(tuple, 7);
398 398 return str ? PyBytes_AS_STRING(str) : NULL;
399 399 }
400 400
401 401 data = index_deref(self, pos);
402 402 return data ? data + 32 : NULL;
403 403 }
404 404
405 405 /*
406 406 * Return the 20-byte SHA of the node corresponding to the given rev. The
407 407 * rev is assumed to be existing. If not, an exception is set.
408 408 */
409 409 static const char *index_node_existing(indexObject *self, Py_ssize_t pos)
410 410 {
411 411 const char *node = index_node(self, pos);
412 412 if (node == NULL) {
413 413 PyErr_Format(PyExc_IndexError, "could not access rev %d",
414 414 (int)pos);
415 415 }
416 416 return node;
417 417 }
418 418
419 419 static int nt_insert(nodetree *self, const char *node, int rev);
420 420
421 421 static int node_check(PyObject *obj, char **node)
422 422 {
423 423 Py_ssize_t nodelen;
424 424 if (PyBytes_AsStringAndSize(obj, node, &nodelen) == -1)
425 425 return -1;
426 426 if (nodelen == 20)
427 427 return 0;
428 428 PyErr_SetString(PyExc_ValueError, "20-byte hash required");
429 429 return -1;
430 430 }
431 431
432 432 static PyObject *index_append(indexObject *self, PyObject *obj)
433 433 {
434 434 char *node;
435 435 Py_ssize_t len;
436 436
437 437 if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
438 438 PyErr_SetString(PyExc_TypeError, "8-tuple required");
439 439 return NULL;
440 440 }
441 441
442 442 if (node_check(PyTuple_GET_ITEM(obj, 7), &node) == -1)
443 443 return NULL;
444 444
445 445 len = index_length(self);
446 446
447 447 if (self->added == NULL) {
448 448 self->added = PyList_New(0);
449 449 if (self->added == NULL)
450 450 return NULL;
451 451 }
452 452
453 453 if (PyList_Append(self->added, obj) == -1)
454 454 return NULL;
455 455
456 456 if (self->ntinitialized)
457 457 nt_insert(&self->nt, node, (int)len);
458 458
459 459 Py_CLEAR(self->headrevs);
460 460 Py_RETURN_NONE;
461 461 }
462 462
463 463 static PyObject *index_stats(indexObject *self)
464 464 {
465 465 PyObject *obj = PyDict_New();
466 466 PyObject *s = NULL;
467 467 PyObject *t = NULL;
468 468
469 469 if (obj == NULL)
470 470 return NULL;
471 471
472 472 #define istat(__n, __d) \
473 473 do { \
474 474 s = PyBytes_FromString(__d); \
475 475 t = PyInt_FromSsize_t(self->__n); \
476 476 if (!s || !t) \
477 477 goto bail; \
478 478 if (PyDict_SetItem(obj, s, t) == -1) \
479 479 goto bail; \
480 480 Py_CLEAR(s); \
481 481 Py_CLEAR(t); \
482 482 } while (0)
483 483
484 484 if (self->added) {
485 485 Py_ssize_t len = PyList_GET_SIZE(self->added);
486 486 s = PyBytes_FromString("index entries added");
487 487 t = PyInt_FromSsize_t(len);
488 488 if (!s || !t)
489 489 goto bail;
490 490 if (PyDict_SetItem(obj, s, t) == -1)
491 491 goto bail;
492 492 Py_CLEAR(s);
493 493 Py_CLEAR(t);
494 494 }
495 495
496 496 if (self->raw_length != self->length)
497 497 istat(raw_length, "revs on disk");
498 498 istat(length, "revs in memory");
499 499 istat(ntlookups, "node trie lookups");
500 500 istat(ntmisses, "node trie misses");
501 501 istat(ntrev, "node trie last rev scanned");
502 502 if (self->ntinitialized) {
503 503 istat(nt.capacity, "node trie capacity");
504 504 istat(nt.depth, "node trie depth");
505 505 istat(nt.length, "node trie count");
506 506 istat(nt.splits, "node trie splits");
507 507 }
508 508
509 509 #undef istat
510 510
511 511 return obj;
512 512
513 513 bail:
514 514 Py_XDECREF(obj);
515 515 Py_XDECREF(s);
516 516 Py_XDECREF(t);
517 517 return NULL;
518 518 }
519 519
520 520 /*
521 521 * When we cache a list, we want to be sure the caller can't mutate
522 522 * the cached copy.
523 523 */
524 524 static PyObject *list_copy(PyObject *list)
525 525 {
526 526 Py_ssize_t len = PyList_GET_SIZE(list);
527 527 PyObject *newlist = PyList_New(len);
528 528 Py_ssize_t i;
529 529
530 530 if (newlist == NULL)
531 531 return NULL;
532 532
533 533 for (i = 0; i < len; i++) {
534 534 PyObject *obj = PyList_GET_ITEM(list, i);
535 535 Py_INCREF(obj);
536 536 PyList_SET_ITEM(newlist, i, obj);
537 537 }
538 538
539 539 return newlist;
540 540 }
541 541
542 542 static int check_filter(PyObject *filter, Py_ssize_t arg)
543 543 {
544 544 if (filter) {
545 545 PyObject *arglist, *result;
546 546 int isfiltered;
547 547
548 548 arglist = Py_BuildValue("(n)", arg);
549 549 if (!arglist) {
550 550 return -1;
551 551 }
552 552
553 553 result = PyEval_CallObject(filter, arglist);
554 554 Py_DECREF(arglist);
555 555 if (!result) {
556 556 return -1;
557 557 }
558 558
559 559 /* PyObject_IsTrue returns 1 if true, 0 if false, -1 if error,
560 560 * same as this function, so we can just return it directly.*/
561 561 isfiltered = PyObject_IsTrue(result);
562 562 Py_DECREF(result);
563 563 return isfiltered;
564 564 } else {
565 565 return 0;
566 566 }
567 567 }
568 568
569 569 static Py_ssize_t add_roots_get_min(indexObject *self, PyObject *list,
570 570 Py_ssize_t marker, char *phases)
571 571 {
572 572 PyObject *iter = NULL;
573 573 PyObject *iter_item = NULL;
574 574 Py_ssize_t min_idx = index_length(self) + 2;
575 575 long iter_item_long;
576 576
577 577 if (PyList_GET_SIZE(list) != 0) {
578 578 iter = PyObject_GetIter(list);
579 579 if (iter == NULL)
580 580 return -2;
581 581 while ((iter_item = PyIter_Next(iter))) {
582 582 if (!pylong_to_long(iter_item, &iter_item_long)) {
583 583 Py_DECREF(iter_item);
584 584 return -2;
585 585 }
586 586 Py_DECREF(iter_item);
587 587 if (iter_item_long < min_idx)
588 588 min_idx = iter_item_long;
589 589 phases[iter_item_long] = (char)marker;
590 590 }
591 591 Py_DECREF(iter);
592 592 }
593 593
594 594 return min_idx;
595 595 }
596 596
597 597 static inline void set_phase_from_parents(char *phases, int parent_1,
598 598 int parent_2, Py_ssize_t i)
599 599 {
600 600 if (parent_1 >= 0 && phases[parent_1] > phases[i])
601 601 phases[i] = phases[parent_1];
602 602 if (parent_2 >= 0 && phases[parent_2] > phases[i])
603 603 phases[i] = phases[parent_2];
604 604 }
605 605
606 606 static PyObject *reachableroots2(indexObject *self, PyObject *args)
607 607 {
608 608
609 609 /* Input */
610 610 long minroot;
611 611 PyObject *includepatharg = NULL;
612 612 int includepath = 0;
613 613 /* heads and roots are lists */
614 614 PyObject *heads = NULL;
615 615 PyObject *roots = NULL;
616 616 PyObject *reachable = NULL;
617 617
618 618 PyObject *val;
619 619 Py_ssize_t len = index_length(self);
620 620 long revnum;
621 621 Py_ssize_t k;
622 622 Py_ssize_t i;
623 623 Py_ssize_t l;
624 624 int r;
625 625 int parents[2];
626 626
627 627 /* Internal data structure:
628 628 * tovisit: array of length len+1 (all revs + nullrev), filled upto
629 629 * lentovisit
630 630 *
631 631 * revstates: array of length len+1 (all revs + nullrev) */
632 632 int *tovisit = NULL;
633 633 long lentovisit = 0;
634 634 enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 };
635 635 char *revstates = NULL;
636 636
637 637 /* Get arguments */
638 638 if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads,
639 639 &PyList_Type, &roots, &PyBool_Type,
640 640 &includepatharg))
641 641 goto bail;
642 642
643 643 if (includepatharg == Py_True)
644 644 includepath = 1;
645 645
646 646 /* Initialize return set */
647 647 reachable = PyList_New(0);
648 648 if (reachable == NULL)
649 649 goto bail;
650 650
651 651 /* Initialize internal datastructures */
652 652 tovisit = (int *)malloc((len + 1) * sizeof(int));
653 653 if (tovisit == NULL) {
654 654 PyErr_NoMemory();
655 655 goto bail;
656 656 }
657 657
658 658 revstates = (char *)calloc(len + 1, 1);
659 659 if (revstates == NULL) {
660 660 PyErr_NoMemory();
661 661 goto bail;
662 662 }
663 663
664 664 l = PyList_GET_SIZE(roots);
665 665 for (i = 0; i < l; i++) {
666 666 revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
667 667 if (revnum == -1 && PyErr_Occurred())
668 668 goto bail;
669 669 /* If root is out of range, e.g. wdir(), it must be unreachable
670 670 * from heads. So we can just ignore it. */
671 671 if (revnum + 1 < 0 || revnum + 1 >= len + 1)
672 672 continue;
673 673 revstates[revnum + 1] |= RS_ROOT;
674 674 }
675 675
676 676 /* Populate tovisit with all the heads */
677 677 l = PyList_GET_SIZE(heads);
678 678 for (i = 0; i < l; i++) {
679 679 revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
680 680 if (revnum == -1 && PyErr_Occurred())
681 681 goto bail;
682 682 if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
683 683 PyErr_SetString(PyExc_IndexError, "head out of range");
684 684 goto bail;
685 685 }
686 686 if (!(revstates[revnum + 1] & RS_SEEN)) {
687 687 tovisit[lentovisit++] = (int)revnum;
688 688 revstates[revnum + 1] |= RS_SEEN;
689 689 }
690 690 }
691 691
692 692 /* Visit the tovisit list and find the reachable roots */
693 693 k = 0;
694 694 while (k < lentovisit) {
695 695 /* Add the node to reachable if it is a root*/
696 696 revnum = tovisit[k++];
697 697 if (revstates[revnum + 1] & RS_ROOT) {
698 698 revstates[revnum + 1] |= RS_REACHABLE;
699 699 val = PyInt_FromLong(revnum);
700 700 if (val == NULL)
701 701 goto bail;
702 702 r = PyList_Append(reachable, val);
703 703 Py_DECREF(val);
704 704 if (r < 0)
705 705 goto bail;
706 706 if (includepath == 0)
707 707 continue;
708 708 }
709 709
710 710 /* Add its parents to the list of nodes to visit */
711 711 if (revnum == nullrev)
712 712 continue;
713 713 r = index_get_parents(self, revnum, parents, (int)len - 1);
714 714 if (r < 0)
715 715 goto bail;
716 716 for (i = 0; i < 2; i++) {
717 717 if (!(revstates[parents[i] + 1] & RS_SEEN) &&
718 718 parents[i] >= minroot) {
719 719 tovisit[lentovisit++] = parents[i];
720 720 revstates[parents[i] + 1] |= RS_SEEN;
721 721 }
722 722 }
723 723 }
724 724
725 725 /* Find all the nodes in between the roots we found and the heads
726 726 * and add them to the reachable set */
727 727 if (includepath == 1) {
728 728 long minidx = minroot;
729 729 if (minidx < 0)
730 730 minidx = 0;
731 731 for (i = minidx; i < len; i++) {
732 732 if (!(revstates[i + 1] & RS_SEEN))
733 733 continue;
734 734 r = index_get_parents(self, i, parents, (int)len - 1);
735 735 /* Corrupted index file, error is set from
736 736 * index_get_parents */
737 737 if (r < 0)
738 738 goto bail;
739 739 if (((revstates[parents[0] + 1] |
740 740 revstates[parents[1] + 1]) &
741 741 RS_REACHABLE) &&
742 742 !(revstates[i + 1] & RS_REACHABLE)) {
743 743 revstates[i + 1] |= RS_REACHABLE;
744 744 val = PyInt_FromSsize_t(i);
745 745 if (val == NULL)
746 746 goto bail;
747 747 r = PyList_Append(reachable, val);
748 748 Py_DECREF(val);
749 749 if (r < 0)
750 750 goto bail;
751 751 }
752 752 }
753 753 }
754 754
755 755 free(revstates);
756 756 free(tovisit);
757 757 return reachable;
758 758 bail:
759 759 Py_XDECREF(reachable);
760 760 free(revstates);
761 761 free(tovisit);
762 762 return NULL;
763 763 }
764 764
765 765 static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args)
766 766 {
767 767 PyObject *roots = Py_None;
768 768 PyObject *ret = NULL;
769 769 PyObject *phasessize = NULL;
770 770 PyObject *phaseroots = NULL;
771 771 PyObject *phaseset = NULL;
772 772 PyObject *phasessetlist = NULL;
773 773 PyObject *rev = NULL;
774 774 Py_ssize_t len = index_length(self);
775 775 Py_ssize_t numphase = 0;
776 776 Py_ssize_t minrevallphases = 0;
777 777 Py_ssize_t minrevphase = 0;
778 778 Py_ssize_t i = 0;
779 779 char *phases = NULL;
780 780 long phase;
781 781
782 782 if (!PyArg_ParseTuple(args, "O", &roots))
783 783 goto done;
784 784 if (roots == NULL || !PyList_Check(roots)) {
785 785 PyErr_SetString(PyExc_TypeError, "roots must be a list");
786 786 goto done;
787 787 }
788 788
789 789 phases = calloc(
790 790 len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */
791 791 if (phases == NULL) {
792 792 PyErr_NoMemory();
793 793 goto done;
794 794 }
795 795 /* Put the phase information of all the roots in phases */
796 796 numphase = PyList_GET_SIZE(roots) + 1;
797 797 minrevallphases = len + 1;
798 798 phasessetlist = PyList_New(numphase);
799 799 if (phasessetlist == NULL)
800 800 goto done;
801 801
802 802 PyList_SET_ITEM(phasessetlist, 0, Py_None);
803 803 Py_INCREF(Py_None);
804 804
805 805 for (i = 0; i < numphase - 1; i++) {
806 806 phaseroots = PyList_GET_ITEM(roots, i);
807 807 phaseset = PySet_New(NULL);
808 808 if (phaseset == NULL)
809 809 goto release;
810 810 PyList_SET_ITEM(phasessetlist, i + 1, phaseset);
811 811 if (!PyList_Check(phaseroots)) {
812 812 PyErr_SetString(PyExc_TypeError,
813 813 "roots item must be a list");
814 814 goto release;
815 815 }
816 816 minrevphase =
817 817 add_roots_get_min(self, phaseroots, i + 1, phases);
818 818 if (minrevphase == -2) /* Error from add_roots_get_min */
819 819 goto release;
820 820 minrevallphases = MIN(minrevallphases, minrevphase);
821 821 }
822 822 /* Propagate the phase information from the roots to the revs */
823 823 if (minrevallphases != -1) {
824 824 int parents[2];
825 825 for (i = minrevallphases; i < len; i++) {
826 826 if (index_get_parents(self, i, parents, (int)len - 1) <
827 827 0)
828 828 goto release;
829 829 set_phase_from_parents(phases, parents[0], parents[1],
830 830 i);
831 831 }
832 832 }
833 833 /* Transform phase list to a python list */
834 834 phasessize = PyInt_FromSsize_t(len);
835 835 if (phasessize == NULL)
836 836 goto release;
837 837 for (i = 0; i < len; i++) {
838 838 phase = phases[i];
839 839 /* We only store the sets of phase for non public phase, the
840 840 * public phase is computed as a difference */
841 841 if (phase != 0) {
842 842 phaseset = PyList_GET_ITEM(phasessetlist, phase);
843 843 rev = PyInt_FromSsize_t(i);
844 844 if (rev == NULL)
845 845 goto release;
846 846 PySet_Add(phaseset, rev);
847 847 Py_XDECREF(rev);
848 848 }
849 849 }
850 850 ret = PyTuple_Pack(2, phasessize, phasessetlist);
851 851
852 852 release:
853 853 Py_XDECREF(phasessize);
854 854 Py_XDECREF(phasessetlist);
855 855 done:
856 856 free(phases);
857 857 return ret;
858 858 }
859 859
860 860 static PyObject *index_headrevs(indexObject *self, PyObject *args)
861 861 {
862 862 Py_ssize_t i, j, len;
863 863 char *nothead = NULL;
864 864 PyObject *heads = NULL;
865 865 PyObject *filter = NULL;
866 866 PyObject *filteredrevs = Py_None;
867 867
868 868 if (!PyArg_ParseTuple(args, "|O", &filteredrevs)) {
869 869 return NULL;
870 870 }
871 871
872 872 if (self->headrevs && filteredrevs == self->filteredrevs)
873 873 return list_copy(self->headrevs);
874 874
875 875 Py_DECREF(self->filteredrevs);
876 876 self->filteredrevs = filteredrevs;
877 877 Py_INCREF(filteredrevs);
878 878
879 879 if (filteredrevs != Py_None) {
880 880 filter = PyObject_GetAttrString(filteredrevs, "__contains__");
881 881 if (!filter) {
882 882 PyErr_SetString(
883 883 PyExc_TypeError,
884 884 "filteredrevs has no attribute __contains__");
885 885 goto bail;
886 886 }
887 887 }
888 888
889 889 len = index_length(self);
890 890 heads = PyList_New(0);
891 891 if (heads == NULL)
892 892 goto bail;
893 893 if (len == 0) {
894 894 PyObject *nullid = PyInt_FromLong(-1);
895 895 if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
896 896 Py_XDECREF(nullid);
897 897 goto bail;
898 898 }
899 899 goto done;
900 900 }
901 901
902 902 nothead = calloc(len, 1);
903 903 if (nothead == NULL) {
904 904 PyErr_NoMemory();
905 905 goto bail;
906 906 }
907 907
908 908 for (i = len - 1; i >= 0; i--) {
909 909 int isfiltered;
910 910 int parents[2];
911 911
912 912 /* If nothead[i] == 1, it means we've seen an unfiltered child
913 913 * of this node already, and therefore this node is not
914 914 * filtered. So we can skip the expensive check_filter step.
915 915 */
916 916 if (nothead[i] != 1) {
917 917 isfiltered = check_filter(filter, i);
918 918 if (isfiltered == -1) {
919 919 PyErr_SetString(PyExc_TypeError,
920 920 "unable to check filter");
921 921 goto bail;
922 922 }
923 923
924 924 if (isfiltered) {
925 925 nothead[i] = 1;
926 926 continue;
927 927 }
928 928 }
929 929
930 930 if (index_get_parents(self, i, parents, (int)len - 1) < 0)
931 931 goto bail;
932 932 for (j = 0; j < 2; j++) {
933 933 if (parents[j] >= 0)
934 934 nothead[parents[j]] = 1;
935 935 }
936 936 }
937 937
938 938 for (i = 0; i < len; i++) {
939 939 PyObject *head;
940 940
941 941 if (nothead[i])
942 942 continue;
943 943 head = PyInt_FromSsize_t(i);
944 944 if (head == NULL || PyList_Append(heads, head) == -1) {
945 945 Py_XDECREF(head);
946 946 goto bail;
947 947 }
948 948 }
949 949
950 950 done:
951 951 self->headrevs = heads;
952 952 Py_XDECREF(filter);
953 953 free(nothead);
954 954 return list_copy(self->headrevs);
955 955 bail:
956 956 Py_XDECREF(filter);
957 957 Py_XDECREF(heads);
958 958 free(nothead);
959 959 return NULL;
960 960 }
961 961
962 962 /**
963 963 * Obtain the base revision index entry.
964 964 *
965 965 * Callers must ensure that rev >= 0 or illegal memory access may occur.
966 966 */
967 967 static inline int index_baserev(indexObject *self, int rev)
968 968 {
969 969 const char *data;
970 970 int result;
971 971
972 972 if (rev >= self->length) {
973 973 PyObject *tuple =
974 974 PyList_GET_ITEM(self->added, rev - self->length);
975 975 long ret;
976 976 if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 3), &ret)) {
977 977 return -2;
978 978 }
979 979 result = (int)ret;
980 980 } else {
981 981 data = index_deref(self, rev);
982 982 if (data == NULL) {
983 983 return -2;
984 984 }
985 985
986 986 result = getbe32(data + 16);
987 987 }
988 988 if (result > rev) {
989 989 PyErr_Format(
990 990 PyExc_ValueError,
991 991 "corrupted revlog, revision base above revision: %d, %d",
992 992 rev, result);
993 993 return -2;
994 994 }
995 995 if (result < -1) {
996 996 PyErr_Format(
997 997 PyExc_ValueError,
998 998 "corrupted revlog, revision base out of range: %d, %d", rev,
999 999 result);
1000 1000 return -2;
1001 1001 }
1002 1002 return result;
1003 1003 }
1004 1004
1005 1005 /**
1006 1006 * Find if a revision is a snapshot or not
1007 1007 *
1008 1008 * Only relevant for sparse-revlog case.
1009 1009 * Callers must ensure that rev is in a valid range.
1010 1010 */
1011 1011 static int index_issnapshotrev(indexObject *self, Py_ssize_t rev)
1012 1012 {
1013 1013 int ps[2];
1014 1014 Py_ssize_t base;
1015 1015 while (rev >= 0) {
1016 1016 base = (Py_ssize_t)index_baserev(self, rev);
1017 1017 if (base == rev) {
1018 1018 base = -1;
1019 1019 }
1020 1020 if (base == -2) {
1021 1021 assert(PyErr_Occurred());
1022 1022 return -1;
1023 1023 }
1024 1024 if (base == -1) {
1025 1025 return 1;
1026 1026 }
1027 1027 if (index_get_parents(self, rev, ps, (int)rev) < 0) {
1028 1028 assert(PyErr_Occurred());
1029 1029 return -1;
1030 1030 };
1031 1031 if (base == ps[0] || base == ps[1]) {
1032 1032 return 0;
1033 1033 }
1034 1034 rev = base;
1035 1035 }
1036 1036 return rev == -1;
1037 1037 }
1038 1038
1039 1039 static PyObject *index_issnapshot(indexObject *self, PyObject *value)
1040 1040 {
1041 1041 long rev;
1042 1042 int issnap;
1043 1043 Py_ssize_t length = index_length(self);
1044 1044
1045 1045 if (!pylong_to_long(value, &rev)) {
1046 1046 return NULL;
1047 1047 }
1048 1048 if (rev < -1 || rev >= length) {
1049 1049 PyErr_Format(PyExc_ValueError, "revlog index out of range: %ld",
1050 1050 rev);
1051 1051 return NULL;
1052 1052 };
1053 1053 issnap = index_issnapshotrev(self, (Py_ssize_t)rev);
1054 1054 if (issnap < 0) {
1055 1055 return NULL;
1056 1056 };
1057 1057 return PyBool_FromLong((long)issnap);
1058 1058 }
1059 1059
1060 1060 static PyObject *index_findsnapshots(indexObject *self, PyObject *args)
1061 1061 {
1062 1062 Py_ssize_t start_rev;
1063 1063 PyObject *cache;
1064 1064 Py_ssize_t base;
1065 1065 Py_ssize_t rev;
1066 1066 PyObject *key = NULL;
1067 1067 PyObject *value = NULL;
1068 1068 const Py_ssize_t length = index_length(self);
1069 1069 if (!PyArg_ParseTuple(args, "O!n", &PyDict_Type, &cache, &start_rev)) {
1070 1070 return NULL;
1071 1071 }
1072 1072 for (rev = start_rev; rev < length; rev++) {
1073 1073 int issnap;
1074 1074 PyObject *allvalues = NULL;
1075 1075 issnap = index_issnapshotrev(self, rev);
1076 1076 if (issnap < 0) {
1077 1077 goto bail;
1078 1078 }
1079 1079 if (issnap == 0) {
1080 1080 continue;
1081 1081 }
1082 1082 base = (Py_ssize_t)index_baserev(self, rev);
1083 1083 if (base == rev) {
1084 1084 base = -1;
1085 1085 }
1086 1086 if (base == -2) {
1087 1087 assert(PyErr_Occurred());
1088 1088 goto bail;
1089 1089 }
1090 1090 key = PyInt_FromSsize_t(base);
1091 1091 allvalues = PyDict_GetItem(cache, key);
1092 1092 if (allvalues == NULL && PyErr_Occurred()) {
1093 1093 goto bail;
1094 1094 }
1095 1095 if (allvalues == NULL) {
1096 1096 int r;
1097 1097 allvalues = PyList_New(0);
1098 1098 if (!allvalues) {
1099 1099 goto bail;
1100 1100 }
1101 1101 r = PyDict_SetItem(cache, key, allvalues);
1102 1102 Py_DECREF(allvalues);
1103 1103 if (r < 0) {
1104 1104 goto bail;
1105 1105 }
1106 1106 }
1107 1107 value = PyInt_FromSsize_t(rev);
1108 1108 if (PyList_Append(allvalues, value)) {
1109 1109 goto bail;
1110 1110 }
1111 1111 Py_CLEAR(key);
1112 1112 Py_CLEAR(value);
1113 1113 }
1114 1114 Py_RETURN_NONE;
1115 1115 bail:
1116 1116 Py_XDECREF(key);
1117 1117 Py_XDECREF(value);
1118 1118 return NULL;
1119 1119 }
1120 1120
1121 1121 static PyObject *index_deltachain(indexObject *self, PyObject *args)
1122 1122 {
1123 1123 int rev, generaldelta;
1124 1124 PyObject *stoparg;
1125 1125 int stoprev, iterrev, baserev = -1;
1126 1126 int stopped;
1127 1127 PyObject *chain = NULL, *result = NULL;
1128 1128 const Py_ssize_t length = index_length(self);
1129 1129
1130 1130 if (!PyArg_ParseTuple(args, "iOi", &rev, &stoparg, &generaldelta)) {
1131 1131 return NULL;
1132 1132 }
1133 1133
1134 1134 if (PyInt_Check(stoparg)) {
1135 1135 stoprev = (int)PyInt_AsLong(stoparg);
1136 1136 if (stoprev == -1 && PyErr_Occurred()) {
1137 1137 return NULL;
1138 1138 }
1139 1139 } else if (stoparg == Py_None) {
1140 1140 stoprev = -2;
1141 1141 } else {
1142 1142 PyErr_SetString(PyExc_ValueError,
1143 1143 "stoprev must be integer or None");
1144 1144 return NULL;
1145 1145 }
1146 1146
1147 1147 if (rev < 0 || rev >= length) {
1148 1148 PyErr_SetString(PyExc_ValueError, "revlog index out of range");
1149 1149 return NULL;
1150 1150 }
1151 1151
1152 1152 chain = PyList_New(0);
1153 1153 if (chain == NULL) {
1154 1154 return NULL;
1155 1155 }
1156 1156
1157 1157 baserev = index_baserev(self, rev);
1158 1158
1159 1159 /* This should never happen. */
1160 1160 if (baserev <= -2) {
1161 1161 /* Error should be set by index_deref() */
1162 1162 assert(PyErr_Occurred());
1163 1163 goto bail;
1164 1164 }
1165 1165
1166 1166 iterrev = rev;
1167 1167
1168 1168 while (iterrev != baserev && iterrev != stoprev) {
1169 1169 PyObject *value = PyInt_FromLong(iterrev);
1170 1170 if (value == NULL) {
1171 1171 goto bail;
1172 1172 }
1173 1173 if (PyList_Append(chain, value)) {
1174 1174 Py_DECREF(value);
1175 1175 goto bail;
1176 1176 }
1177 1177 Py_DECREF(value);
1178 1178
1179 1179 if (generaldelta) {
1180 1180 iterrev = baserev;
1181 1181 } else {
1182 1182 iterrev--;
1183 1183 }
1184 1184
1185 1185 if (iterrev < 0) {
1186 1186 break;
1187 1187 }
1188 1188
1189 1189 if (iterrev >= length) {
1190 1190 PyErr_SetString(PyExc_IndexError,
1191 1191 "revision outside index");
1192 1192 return NULL;
1193 1193 }
1194 1194
1195 1195 baserev = index_baserev(self, iterrev);
1196 1196
1197 1197 /* This should never happen. */
1198 1198 if (baserev <= -2) {
1199 1199 /* Error should be set by index_deref() */
1200 1200 assert(PyErr_Occurred());
1201 1201 goto bail;
1202 1202 }
1203 1203 }
1204 1204
1205 1205 if (iterrev == stoprev) {
1206 1206 stopped = 1;
1207 1207 } else {
1208 1208 PyObject *value = PyInt_FromLong(iterrev);
1209 1209 if (value == NULL) {
1210 1210 goto bail;
1211 1211 }
1212 1212 if (PyList_Append(chain, value)) {
1213 1213 Py_DECREF(value);
1214 1214 goto bail;
1215 1215 }
1216 1216 Py_DECREF(value);
1217 1217
1218 1218 stopped = 0;
1219 1219 }
1220 1220
1221 1221 if (PyList_Reverse(chain)) {
1222 1222 goto bail;
1223 1223 }
1224 1224
1225 1225 result = Py_BuildValue("OO", chain, stopped ? Py_True : Py_False);
1226 1226 Py_DECREF(chain);
1227 1227 return result;
1228 1228
1229 1229 bail:
1230 1230 Py_DECREF(chain);
1231 1231 return NULL;
1232 1232 }
1233 1233
1234 1234 static inline int64_t
1235 1235 index_segment_span(indexObject *self, Py_ssize_t start_rev, Py_ssize_t end_rev)
1236 1236 {
1237 1237 int64_t start_offset;
1238 1238 int64_t end_offset;
1239 1239 int end_size;
1240 1240 start_offset = index_get_start(self, start_rev);
1241 1241 if (start_offset < 0) {
1242 1242 return -1;
1243 1243 }
1244 1244 end_offset = index_get_start(self, end_rev);
1245 1245 if (end_offset < 0) {
1246 1246 return -1;
1247 1247 }
1248 1248 end_size = index_get_length(self, end_rev);
1249 1249 if (end_size < 0) {
1250 1250 return -1;
1251 1251 }
1252 1252 if (end_offset < start_offset) {
1253 1253 PyErr_Format(PyExc_ValueError,
1254 1254 "corrupted revlog index: inconsistent offset "
1255 1255 "between revisions (%zd) and (%zd)",
1256 1256 start_rev, end_rev);
1257 1257 return -1;
1258 1258 }
1259 1259 return (end_offset - start_offset) + (int64_t)end_size;
1260 1260 }
1261 1261
1262 1262 /* returns endidx so that revs[startidx:endidx] has no empty trailing revs */
1263 1263 static Py_ssize_t trim_endidx(indexObject *self, const Py_ssize_t *revs,
1264 1264 Py_ssize_t startidx, Py_ssize_t endidx)
1265 1265 {
1266 1266 int length;
1267 1267 while (endidx > 1 && endidx > startidx) {
1268 1268 length = index_get_length(self, revs[endidx - 1]);
1269 1269 if (length < 0) {
1270 1270 return -1;
1271 1271 }
1272 1272 if (length != 0) {
1273 1273 break;
1274 1274 }
1275 1275 endidx -= 1;
1276 1276 }
1277 1277 return endidx;
1278 1278 }
1279 1279
1280 1280 struct Gap {
1281 1281 int64_t size;
1282 1282 Py_ssize_t idx;
1283 1283 };
1284 1284
1285 1285 static int gap_compare(const void *left, const void *right)
1286 1286 {
1287 1287 const struct Gap *l_left = ((const struct Gap *)left);
1288 1288 const struct Gap *l_right = ((const struct Gap *)right);
1289 1289 if (l_left->size < l_right->size) {
1290 1290 return -1;
1291 1291 } else if (l_left->size > l_right->size) {
1292 1292 return 1;
1293 1293 }
1294 1294 return 0;
1295 1295 }
1296 1296 static int Py_ssize_t_compare(const void *left, const void *right)
1297 1297 {
1298 1298 const Py_ssize_t l_left = *(const Py_ssize_t *)left;
1299 1299 const Py_ssize_t l_right = *(const Py_ssize_t *)right;
1300 1300 if (l_left < l_right) {
1301 1301 return -1;
1302 1302 } else if (l_left > l_right) {
1303 1303 return 1;
1304 1304 }
1305 1305 return 0;
1306 1306 }
1307 1307
1308 1308 static PyObject *index_slicechunktodensity(indexObject *self, PyObject *args)
1309 1309 {
1310 1310 /* method arguments */
1311 1311 PyObject *list_revs = NULL; /* revisions in the chain */
1312 1312 double targetdensity = 0; /* min density to achieve */
1313 1313 Py_ssize_t mingapsize = 0; /* threshold to ignore gaps */
1314 1314
1315 1315 /* other core variables */
1316 1316 Py_ssize_t idxlen = index_length(self);
1317 1317 Py_ssize_t i; /* used for various iteration */
1318 1318 PyObject *result = NULL; /* the final return of the function */
1319 1319
1320 1320 /* generic information about the delta chain being slice */
1321 1321 Py_ssize_t num_revs = 0; /* size of the full delta chain */
1322 1322 Py_ssize_t *revs = NULL; /* native array of revision in the chain */
1323 1323 int64_t chainpayload = 0; /* sum of all delta in the chain */
1324 1324 int64_t deltachainspan = 0; /* distance from first byte to last byte */
1325 1325
1326 1326 /* variable used for slicing the delta chain */
1327 1327 int64_t readdata = 0; /* amount of data currently planned to be read */
1328 1328 double density = 0; /* ration of payload data compared to read ones */
1329 1329 int64_t previous_end;
1330 1330 struct Gap *gaps = NULL; /* array of notable gap in the chain */
1331 1331 Py_ssize_t num_gaps =
1332 1332 0; /* total number of notable gap recorded so far */
1333 1333 Py_ssize_t *selected_indices = NULL; /* indices of gap skipped over */
1334 1334 Py_ssize_t num_selected = 0; /* number of gaps skipped */
1335 1335 PyObject *chunk = NULL; /* individual slice */
1336 1336 PyObject *allchunks = NULL; /* all slices */
1337 1337 Py_ssize_t previdx;
1338 1338
1339 1339 /* parsing argument */
1340 1340 if (!PyArg_ParseTuple(args, "O!dn", &PyList_Type, &list_revs,
1341 1341 &targetdensity, &mingapsize)) {
1342 1342 goto bail;
1343 1343 }
1344 1344
1345 1345 /* If the delta chain contains a single element, we do not need slicing
1346 1346 */
1347 1347 num_revs = PyList_GET_SIZE(list_revs);
1348 1348 if (num_revs <= 1) {
1349 1349 result = PyTuple_Pack(1, list_revs);
1350 1350 goto done;
1351 1351 }
1352 1352
1353 1353 /* Turn the python list into a native integer array (for efficiency) */
1354 1354 revs = (Py_ssize_t *)calloc(num_revs, sizeof(Py_ssize_t));
1355 1355 if (revs == NULL) {
1356 1356 PyErr_NoMemory();
1357 1357 goto bail;
1358 1358 }
1359 1359 for (i = 0; i < num_revs; i++) {
1360 1360 Py_ssize_t revnum = PyInt_AsLong(PyList_GET_ITEM(list_revs, i));
1361 1361 if (revnum == -1 && PyErr_Occurred()) {
1362 1362 goto bail;
1363 1363 }
1364 1364 if (revnum < nullrev || revnum >= idxlen) {
1365 1365 PyErr_Format(PyExc_IndexError,
1366 1366 "index out of range: %zd", revnum);
1367 1367 goto bail;
1368 1368 }
1369 1369 revs[i] = revnum;
1370 1370 }
1371 1371
1372 1372 /* Compute and check various property of the unsliced delta chain */
1373 1373 deltachainspan = index_segment_span(self, revs[0], revs[num_revs - 1]);
1374 1374 if (deltachainspan < 0) {
1375 1375 goto bail;
1376 1376 }
1377 1377
1378 1378 if (deltachainspan <= mingapsize) {
1379 1379 result = PyTuple_Pack(1, list_revs);
1380 1380 goto done;
1381 1381 }
1382 1382 chainpayload = 0;
1383 1383 for (i = 0; i < num_revs; i++) {
1384 1384 int tmp = index_get_length(self, revs[i]);
1385 1385 if (tmp < 0) {
1386 1386 goto bail;
1387 1387 }
1388 1388 chainpayload += tmp;
1389 1389 }
1390 1390
1391 1391 readdata = deltachainspan;
1392 1392 density = 1.0;
1393 1393
1394 1394 if (0 < deltachainspan) {
1395 1395 density = (double)chainpayload / (double)deltachainspan;
1396 1396 }
1397 1397
1398 1398 if (density >= targetdensity) {
1399 1399 result = PyTuple_Pack(1, list_revs);
1400 1400 goto done;
1401 1401 }
1402 1402
1403 1403 /* if chain is too sparse, look for relevant gaps */
1404 1404 gaps = (struct Gap *)calloc(num_revs, sizeof(struct Gap));
1405 1405 if (gaps == NULL) {
1406 1406 PyErr_NoMemory();
1407 1407 goto bail;
1408 1408 }
1409 1409
1410 1410 previous_end = -1;
1411 1411 for (i = 0; i < num_revs; i++) {
1412 1412 int64_t revstart;
1413 1413 int revsize;
1414 1414 revstart = index_get_start(self, revs[i]);
1415 1415 if (revstart < 0) {
1416 1416 goto bail;
1417 1417 };
1418 1418 revsize = index_get_length(self, revs[i]);
1419 1419 if (revsize < 0) {
1420 1420 goto bail;
1421 1421 };
1422 1422 if (revsize == 0) {
1423 1423 continue;
1424 1424 }
1425 1425 if (previous_end >= 0) {
1426 1426 int64_t gapsize = revstart - previous_end;
1427 1427 if (gapsize > mingapsize) {
1428 1428 gaps[num_gaps].size = gapsize;
1429 1429 gaps[num_gaps].idx = i;
1430 1430 num_gaps += 1;
1431 1431 }
1432 1432 }
1433 1433 previous_end = revstart + revsize;
1434 1434 }
1435 1435 if (num_gaps == 0) {
1436 1436 result = PyTuple_Pack(1, list_revs);
1437 1437 goto done;
1438 1438 }
1439 1439 qsort(gaps, num_gaps, sizeof(struct Gap), &gap_compare);
1440 1440
1441 1441 /* Slice the largest gap first, they improve the density the most */
1442 1442 selected_indices =
1443 1443 (Py_ssize_t *)malloc((num_gaps + 1) * sizeof(Py_ssize_t));
1444 1444 if (selected_indices == NULL) {
1445 1445 PyErr_NoMemory();
1446 1446 goto bail;
1447 1447 }
1448 1448
1449 1449 for (i = num_gaps - 1; i >= 0; i--) {
1450 1450 selected_indices[num_selected] = gaps[i].idx;
1451 1451 readdata -= gaps[i].size;
1452 1452 num_selected += 1;
1453 1453 if (readdata <= 0) {
1454 1454 density = 1.0;
1455 1455 } else {
1456 1456 density = (double)chainpayload / (double)readdata;
1457 1457 }
1458 1458 if (density >= targetdensity) {
1459 1459 break;
1460 1460 }
1461 1461 }
1462 1462 qsort(selected_indices, num_selected, sizeof(Py_ssize_t),
1463 1463 &Py_ssize_t_compare);
1464 1464
1465 1465 /* create the resulting slice */
1466 1466 allchunks = PyList_New(0);
1467 1467 if (allchunks == NULL) {
1468 1468 goto bail;
1469 1469 }
1470 1470 previdx = 0;
1471 1471 selected_indices[num_selected] = num_revs;
1472 1472 for (i = 0; i <= num_selected; i++) {
1473 1473 Py_ssize_t idx = selected_indices[i];
1474 1474 Py_ssize_t endidx = trim_endidx(self, revs, previdx, idx);
1475 1475 if (endidx < 0) {
1476 1476 goto bail;
1477 1477 }
1478 1478 if (previdx < endidx) {
1479 1479 chunk = PyList_GetSlice(list_revs, previdx, endidx);
1480 1480 if (chunk == NULL) {
1481 1481 goto bail;
1482 1482 }
1483 1483 if (PyList_Append(allchunks, chunk) == -1) {
1484 1484 goto bail;
1485 1485 }
1486 1486 Py_DECREF(chunk);
1487 1487 chunk = NULL;
1488 1488 }
1489 1489 previdx = idx;
1490 1490 }
1491 1491 result = allchunks;
1492 1492 goto done;
1493 1493
1494 1494 bail:
1495 1495 Py_XDECREF(allchunks);
1496 1496 Py_XDECREF(chunk);
1497 1497 done:
1498 1498 free(revs);
1499 1499 free(gaps);
1500 1500 free(selected_indices);
1501 1501 return result;
1502 1502 }
1503 1503
1504 1504 static inline int nt_level(const char *node, Py_ssize_t level)
1505 1505 {
1506 1506 int v = node[level >> 1];
1507 1507 if (!(level & 1))
1508 1508 v >>= 4;
1509 1509 return v & 0xf;
1510 1510 }
1511 1511
1512 1512 /*
1513 1513 * Return values:
1514 1514 *
1515 1515 * -4: match is ambiguous (multiple candidates)
1516 1516 * -2: not found
1517 1517 * rest: valid rev
1518 1518 */
1519 1519 static int nt_find(nodetree *self, const char *node, Py_ssize_t nodelen,
1520 1520 int hex)
1521 1521 {
1522 1522 int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
1523 1523 int level, maxlevel, off;
1524 1524
1525 1525 if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
1526 1526 return -1;
1527 1527
1528 1528 if (hex)
1529 1529 maxlevel = nodelen > 40 ? 40 : (int)nodelen;
1530 1530 else
1531 1531 maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
1532 1532
1533 1533 for (level = off = 0; level < maxlevel; level++) {
1534 1534 int k = getnybble(node, level);
1535 1535 nodetreenode *n = &self->nodes[off];
1536 1536 int v = n->children[k];
1537 1537
1538 1538 if (v < 0) {
1539 1539 const char *n;
1540 1540 Py_ssize_t i;
1541 1541
1542 1542 v = -(v + 2);
1543 1543 n = index_node(self->index, v);
1544 1544 if (n == NULL)
1545 1545 return -2;
1546 1546 for (i = level; i < maxlevel; i++)
1547 1547 if (getnybble(node, i) != nt_level(n, i))
1548 1548 return -2;
1549 1549 return v;
1550 1550 }
1551 1551 if (v == 0)
1552 1552 return -2;
1553 1553 off = v;
1554 1554 }
1555 1555 /* multiple matches against an ambiguous prefix */
1556 1556 return -4;
1557 1557 }
1558 1558
1559 1559 static int nt_new(nodetree *self)
1560 1560 {
1561 1561 if (self->length == self->capacity) {
1562 1562 unsigned newcapacity;
1563 1563 nodetreenode *newnodes;
1564 1564 newcapacity = self->capacity * 2;
1565 1565 if (newcapacity >= INT_MAX / sizeof(nodetreenode)) {
1566 1566 PyErr_SetString(PyExc_MemoryError,
1567 1567 "overflow in nt_new");
1568 1568 return -1;
1569 1569 }
1570 1570 newnodes =
1571 1571 realloc(self->nodes, newcapacity * sizeof(nodetreenode));
1572 1572 if (newnodes == NULL) {
1573 1573 PyErr_SetString(PyExc_MemoryError, "out of memory");
1574 1574 return -1;
1575 1575 }
1576 1576 self->capacity = newcapacity;
1577 1577 self->nodes = newnodes;
1578 1578 memset(&self->nodes[self->length], 0,
1579 1579 sizeof(nodetreenode) * (self->capacity - self->length));
1580 1580 }
1581 1581 return self->length++;
1582 1582 }
1583 1583
1584 1584 static int nt_insert(nodetree *self, const char *node, int rev)
1585 1585 {
1586 1586 int level = 0;
1587 1587 int off = 0;
1588 1588
1589 1589 while (level < 40) {
1590 1590 int k = nt_level(node, level);
1591 1591 nodetreenode *n;
1592 1592 int v;
1593 1593
1594 1594 n = &self->nodes[off];
1595 1595 v = n->children[k];
1596 1596
1597 1597 if (v == 0) {
1598 1598 n->children[k] = -rev - 2;
1599 1599 return 0;
1600 1600 }
1601 1601 if (v < 0) {
1602 1602 const char *oldnode =
1603 1603 index_node_existing(self->index, -(v + 2));
1604 1604 int noff;
1605 1605
1606 1606 if (oldnode == NULL)
1607 1607 return -1;
1608 1608 if (!memcmp(oldnode, node, 20)) {
1609 1609 n->children[k] = -rev - 2;
1610 1610 return 0;
1611 1611 }
1612 1612 noff = nt_new(self);
1613 1613 if (noff == -1)
1614 1614 return -1;
1615 1615 /* self->nodes may have been changed by realloc */
1616 1616 self->nodes[off].children[k] = noff;
1617 1617 off = noff;
1618 1618 n = &self->nodes[off];
1619 1619 n->children[nt_level(oldnode, ++level)] = v;
1620 1620 if (level > self->depth)
1621 1621 self->depth = level;
1622 1622 self->splits += 1;
1623 1623 } else {
1624 1624 level += 1;
1625 1625 off = v;
1626 1626 }
1627 1627 }
1628 1628
1629 1629 return -1;
1630 1630 }
1631 1631
1632 1632 static PyObject *ntobj_insert(nodetreeObject *self, PyObject *args)
1633 1633 {
1634 1634 Py_ssize_t rev;
1635 1635 const char *node;
1636 1636 Py_ssize_t length;
1637 1637 if (!PyArg_ParseTuple(args, "n", &rev))
1638 1638 return NULL;
1639 1639 length = index_length(self->nt.index);
1640 1640 if (rev < 0 || rev >= length) {
1641 1641 PyErr_SetString(PyExc_ValueError, "revlog index out of range");
1642 1642 return NULL;
1643 1643 }
1644 1644 node = index_node_existing(self->nt.index, rev);
1645 1645 if (nt_insert(&self->nt, node, (int)rev) == -1)
1646 1646 return NULL;
1647 1647 Py_RETURN_NONE;
1648 1648 }
1649 1649
1650 1650 static int nt_delete_node(nodetree *self, const char *node)
1651 1651 {
1652 1652 /* rev==-2 happens to get encoded as 0, which is interpreted as not set
1653 1653 */
1654 1654 return nt_insert(self, node, -2);
1655 1655 }
1656 1656
1657 1657 static int nt_init(nodetree *self, indexObject *index, unsigned capacity)
1658 1658 {
1659 1659 /* Initialize before overflow-checking to avoid nt_dealloc() crash. */
1660 1660 self->nodes = NULL;
1661 1661
1662 1662 self->index = index;
1663 1663 /* The input capacity is in terms of revisions, while the field is in
1664 1664 * terms of nodetree nodes. */
1665 1665 self->capacity = (capacity < 4 ? 4 : capacity / 2);
1666 1666 self->depth = 0;
1667 1667 self->splits = 0;
1668 1668 if ((size_t)self->capacity > INT_MAX / sizeof(nodetreenode)) {
1669 1669 PyErr_SetString(PyExc_ValueError, "overflow in init_nt");
1670 1670 return -1;
1671 1671 }
1672 1672 self->nodes = calloc(self->capacity, sizeof(nodetreenode));
1673 1673 if (self->nodes == NULL) {
1674 1674 PyErr_NoMemory();
1675 1675 return -1;
1676 1676 }
1677 1677 self->length = 1;
1678 1678 return 0;
1679 1679 }
1680 1680
1681 1681 static int ntobj_init(nodetreeObject *self, PyObject *args)
1682 1682 {
1683 1683 PyObject *index;
1684 1684 unsigned capacity;
1685 1685 if (!PyArg_ParseTuple(args, "O!I", &HgRevlogIndex_Type, &index,
1686 1686 &capacity))
1687 1687 return -1;
1688 1688 Py_INCREF(index);
1689 1689 return nt_init(&self->nt, (indexObject *)index, capacity);
1690 1690 }
1691 1691
1692 1692 static int nt_partialmatch(nodetree *self, const char *node, Py_ssize_t nodelen)
1693 1693 {
1694 1694 return nt_find(self, node, nodelen, 1);
1695 1695 }
1696 1696
1697 1697 /*
1698 1698 * Find the length of the shortest unique prefix of node.
1699 1699 *
1700 1700 * Return values:
1701 1701 *
1702 1702 * -3: error (exception set)
1703 1703 * -2: not found (no exception set)
1704 1704 * rest: length of shortest prefix
1705 1705 */
1706 1706 static int nt_shortest(nodetree *self, const char *node)
1707 1707 {
1708 1708 int level, off;
1709 1709
1710 1710 for (level = off = 0; level < 40; level++) {
1711 1711 int k, v;
1712 1712 nodetreenode *n = &self->nodes[off];
1713 1713 k = nt_level(node, level);
1714 1714 v = n->children[k];
1715 1715 if (v < 0) {
1716 1716 const char *n;
1717 1717 v = -(v + 2);
1718 1718 n = index_node_existing(self->index, v);
1719 1719 if (n == NULL)
1720 1720 return -3;
1721 1721 if (memcmp(node, n, 20) != 0)
1722 1722 /*
1723 1723 * Found a unique prefix, but it wasn't for the
1724 1724 * requested node (i.e the requested node does
1725 1725 * not exist).
1726 1726 */
1727 1727 return -2;
1728 1728 return level + 1;
1729 1729 }
1730 1730 if (v == 0)
1731 1731 return -2;
1732 1732 off = v;
1733 1733 }
1734 1734 /*
1735 1735 * The node was still not unique after 40 hex digits, so this won't
1736 1736 * happen. Also, if we get here, then there's a programming error in
1737 1737 * this file that made us insert a node longer than 40 hex digits.
1738 1738 */
1739 1739 PyErr_SetString(PyExc_Exception, "broken node tree");
1740 1740 return -3;
1741 1741 }
1742 1742
1743 1743 static PyObject *ntobj_shortest(nodetreeObject *self, PyObject *args)
1744 1744 {
1745 1745 PyObject *val;
1746 1746 char *node;
1747 1747 int length;
1748 1748
1749 1749 if (!PyArg_ParseTuple(args, "O", &val))
1750 1750 return NULL;
1751 1751 if (node_check(val, &node) == -1)
1752 1752 return NULL;
1753 1753
1754 1754 length = nt_shortest(&self->nt, node);
1755 1755 if (length == -3)
1756 1756 return NULL;
1757 1757 if (length == -2) {
1758 1758 raise_revlog_error();
1759 1759 return NULL;
1760 1760 }
1761 1761 return PyInt_FromLong(length);
1762 1762 }
1763 1763
1764 1764 static void nt_dealloc(nodetree *self)
1765 1765 {
1766 1766 free(self->nodes);
1767 1767 self->nodes = NULL;
1768 1768 }
1769 1769
1770 1770 static void ntobj_dealloc(nodetreeObject *self)
1771 1771 {
1772 1772 Py_XDECREF(self->nt.index);
1773 1773 nt_dealloc(&self->nt);
1774 1774 PyObject_Del(self);
1775 1775 }
1776 1776
1777 1777 static PyMethodDef ntobj_methods[] = {
1778 1778 {"insert", (PyCFunction)ntobj_insert, METH_VARARGS,
1779 1779 "insert an index entry"},
1780 1780 {"shortest", (PyCFunction)ntobj_shortest, METH_VARARGS,
1781 1781 "find length of shortest hex nodeid of a binary ID"},
1782 1782 {NULL} /* Sentinel */
1783 1783 };
1784 1784
1785 1785 static PyTypeObject nodetreeType = {
1786 1786 PyVarObject_HEAD_INIT(NULL, 0) /* header */
1787 1787 "parsers.nodetree", /* tp_name */
1788 1788 sizeof(nodetreeObject), /* tp_basicsize */
1789 1789 0, /* tp_itemsize */
1790 1790 (destructor)ntobj_dealloc, /* tp_dealloc */
1791 1791 0, /* tp_print */
1792 1792 0, /* tp_getattr */
1793 1793 0, /* tp_setattr */
1794 1794 0, /* tp_compare */
1795 1795 0, /* tp_repr */
1796 1796 0, /* tp_as_number */
1797 1797 0, /* tp_as_sequence */
1798 1798 0, /* tp_as_mapping */
1799 1799 0, /* tp_hash */
1800 1800 0, /* tp_call */
1801 1801 0, /* tp_str */
1802 1802 0, /* tp_getattro */
1803 1803 0, /* tp_setattro */
1804 1804 0, /* tp_as_buffer */
1805 1805 Py_TPFLAGS_DEFAULT, /* tp_flags */
1806 1806 "nodetree", /* tp_doc */
1807 1807 0, /* tp_traverse */
1808 1808 0, /* tp_clear */
1809 1809 0, /* tp_richcompare */
1810 1810 0, /* tp_weaklistoffset */
1811 1811 0, /* tp_iter */
1812 1812 0, /* tp_iternext */
1813 1813 ntobj_methods, /* tp_methods */
1814 1814 0, /* tp_members */
1815 1815 0, /* tp_getset */
1816 1816 0, /* tp_base */
1817 1817 0, /* tp_dict */
1818 1818 0, /* tp_descr_get */
1819 1819 0, /* tp_descr_set */
1820 1820 0, /* tp_dictoffset */
1821 1821 (initproc)ntobj_init, /* tp_init */
1822 1822 0, /* tp_alloc */
1823 1823 };
1824 1824
1825 1825 static int index_init_nt(indexObject *self)
1826 1826 {
1827 1827 if (!self->ntinitialized) {
1828 1828 if (nt_init(&self->nt, self, (int)self->raw_length) == -1) {
1829 1829 nt_dealloc(&self->nt);
1830 1830 return -1;
1831 1831 }
1832 1832 if (nt_insert(&self->nt, nullid, -1) == -1) {
1833 1833 nt_dealloc(&self->nt);
1834 1834 return -1;
1835 1835 }
1836 1836 self->ntinitialized = 1;
1837 1837 self->ntrev = (int)index_length(self);
1838 1838 self->ntlookups = 1;
1839 1839 self->ntmisses = 0;
1840 1840 }
1841 1841 return 0;
1842 1842 }
1843 1843
1844 1844 /*
1845 1845 * Return values:
1846 1846 *
1847 1847 * -3: error (exception set)
1848 1848 * -2: not found (no exception set)
1849 1849 * rest: valid rev
1850 1850 */
1851 1851 static int index_find_node(indexObject *self, const char *node,
1852 1852 Py_ssize_t nodelen)
1853 1853 {
1854 1854 int rev;
1855 1855
1856 1856 if (index_init_nt(self) == -1)
1857 1857 return -3;
1858 1858
1859 1859 self->ntlookups++;
1860 1860 rev = nt_find(&self->nt, node, nodelen, 0);
1861 1861 if (rev >= -1)
1862 1862 return rev;
1863 1863
1864 1864 /*
1865 1865 * For the first handful of lookups, we scan the entire index,
1866 1866 * and cache only the matching nodes. This optimizes for cases
1867 1867 * like "hg tip", where only a few nodes are accessed.
1868 1868 *
1869 1869 * After that, we cache every node we visit, using a single
1870 1870 * scan amortized over multiple lookups. This gives the best
1871 1871 * bulk performance, e.g. for "hg log".
1872 1872 */
1873 1873 if (self->ntmisses++ < 4) {
1874 1874 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1875 1875 const char *n = index_node_existing(self, rev);
1876 1876 if (n == NULL)
1877 1877 return -3;
1878 1878 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1879 1879 if (nt_insert(&self->nt, n, rev) == -1)
1880 1880 return -3;
1881 1881 break;
1882 1882 }
1883 1883 }
1884 1884 } else {
1885 1885 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1886 1886 const char *n = index_node_existing(self, rev);
1887 1887 if (n == NULL)
1888 1888 return -3;
1889 1889 if (nt_insert(&self->nt, n, rev) == -1) {
1890 1890 self->ntrev = rev + 1;
1891 1891 return -3;
1892 1892 }
1893 1893 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1894 1894 break;
1895 1895 }
1896 1896 }
1897 1897 self->ntrev = rev;
1898 1898 }
1899 1899
1900 1900 if (rev >= 0)
1901 1901 return rev;
1902 1902 return -2;
1903 1903 }
1904 1904
1905 1905 static PyObject *index_getitem(indexObject *self, PyObject *value)
1906 1906 {
1907 1907 char *node;
1908 1908 int rev;
1909 1909
1910 1910 if (PyInt_Check(value)) {
1911 1911 long idx;
1912 1912 if (!pylong_to_long(value, &idx)) {
1913 1913 return NULL;
1914 1914 }
1915 1915 return index_get(self, idx);
1916 1916 }
1917 1917
1918 1918 if (node_check(value, &node) == -1)
1919 1919 return NULL;
1920 1920 rev = index_find_node(self, node, 20);
1921 1921 if (rev >= -1)
1922 1922 return PyInt_FromLong(rev);
1923 1923 if (rev == -2)
1924 1924 raise_revlog_error();
1925 1925 return NULL;
1926 1926 }
1927 1927
1928 1928 /*
1929 1929 * Fully populate the radix tree.
1930 1930 */
1931 1931 static int index_populate_nt(indexObject *self)
1932 1932 {
1933 1933 int rev;
1934 1934 if (self->ntrev > 0) {
1935 1935 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1936 1936 const char *n = index_node_existing(self, rev);
1937 1937 if (n == NULL)
1938 1938 return -1;
1939 1939 if (nt_insert(&self->nt, n, rev) == -1)
1940 1940 return -1;
1941 1941 }
1942 1942 self->ntrev = -1;
1943 1943 }
1944 1944 return 0;
1945 1945 }
1946 1946
1947 1947 static PyObject *index_partialmatch(indexObject *self, PyObject *args)
1948 1948 {
1949 1949 const char *fullnode;
1950 1950 Py_ssize_t nodelen;
1951 1951 char *node;
1952 1952 int rev, i;
1953 1953
1954 1954 if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &node, &nodelen))
1955 1955 return NULL;
1956 1956
1957 1957 if (nodelen < 1) {
1958 1958 PyErr_SetString(PyExc_ValueError, "key too short");
1959 1959 return NULL;
1960 1960 }
1961 1961
1962 1962 if (nodelen > 40) {
1963 1963 PyErr_SetString(PyExc_ValueError, "key too long");
1964 1964 return NULL;
1965 1965 }
1966 1966
1967 1967 for (i = 0; i < nodelen; i++)
1968 1968 hexdigit(node, i);
1969 1969 if (PyErr_Occurred()) {
1970 1970 /* input contains non-hex characters */
1971 1971 PyErr_Clear();
1972 1972 Py_RETURN_NONE;
1973 1973 }
1974 1974
1975 1975 if (index_init_nt(self) == -1)
1976 1976 return NULL;
1977 1977 if (index_populate_nt(self) == -1)
1978 1978 return NULL;
1979 1979 rev = nt_partialmatch(&self->nt, node, nodelen);
1980 1980
1981 1981 switch (rev) {
1982 1982 case -4:
1983 1983 raise_revlog_error();
1984 1984 return NULL;
1985 1985 case -2:
1986 1986 Py_RETURN_NONE;
1987 1987 case -1:
1988 1988 return PyBytes_FromStringAndSize(nullid, 20);
1989 1989 }
1990 1990
1991 1991 fullnode = index_node_existing(self, rev);
1992 1992 if (fullnode == NULL) {
1993 1993 return NULL;
1994 1994 }
1995 1995 return PyBytes_FromStringAndSize(fullnode, 20);
1996 1996 }
1997 1997
1998 1998 static PyObject *index_shortest(indexObject *self, PyObject *args)
1999 1999 {
2000 2000 PyObject *val;
2001 2001 char *node;
2002 2002 int length;
2003 2003
2004 2004 if (!PyArg_ParseTuple(args, "O", &val))
2005 2005 return NULL;
2006 2006 if (node_check(val, &node) == -1)
2007 2007 return NULL;
2008 2008
2009 2009 self->ntlookups++;
2010 2010 if (index_init_nt(self) == -1)
2011 2011 return NULL;
2012 2012 if (index_populate_nt(self) == -1)
2013 2013 return NULL;
2014 2014 length = nt_shortest(&self->nt, node);
2015 2015 if (length == -3)
2016 2016 return NULL;
2017 2017 if (length == -2) {
2018 2018 raise_revlog_error();
2019 2019 return NULL;
2020 2020 }
2021 2021 return PyInt_FromLong(length);
2022 2022 }
2023 2023
2024 2024 static PyObject *index_m_get(indexObject *self, PyObject *args)
2025 2025 {
2026 2026 PyObject *val;
2027 2027 char *node;
2028 2028 int rev;
2029 2029
2030 2030 if (!PyArg_ParseTuple(args, "O", &val))
2031 2031 return NULL;
2032 2032 if (node_check(val, &node) == -1)
2033 2033 return NULL;
2034 2034 rev = index_find_node(self, node, 20);
2035 2035 if (rev == -3)
2036 2036 return NULL;
2037 2037 if (rev == -2)
2038 2038 Py_RETURN_NONE;
2039 2039 return PyInt_FromLong(rev);
2040 2040 }
2041 2041
2042 2042 static int index_contains(indexObject *self, PyObject *value)
2043 2043 {
2044 2044 char *node;
2045 2045
2046 2046 if (PyInt_Check(value)) {
2047 2047 long rev;
2048 2048 if (!pylong_to_long(value, &rev)) {
2049 2049 return -1;
2050 2050 }
2051 2051 return rev >= -1 && rev < index_length(self);
2052 2052 }
2053 2053
2054 2054 if (node_check(value, &node) == -1)
2055 2055 return -1;
2056 2056
2057 2057 switch (index_find_node(self, node, 20)) {
2058 2058 case -3:
2059 2059 return -1;
2060 2060 case -2:
2061 2061 return 0;
2062 2062 default:
2063 2063 return 1;
2064 2064 }
2065 2065 }
2066 2066
2067 2067 static PyObject *index_m_has_node(indexObject *self, PyObject *args)
2068 2068 {
2069 2069 int ret = index_contains(self, args);
2070 2070 if (ret < 0)
2071 2071 return NULL;
2072 2072 return PyBool_FromLong((long)ret);
2073 2073 }
2074 2074
2075 2075 static PyObject *index_m_rev(indexObject *self, PyObject *val)
2076 2076 {
2077 2077 char *node;
2078 2078 int rev;
2079 2079
2080 2080 if (node_check(val, &node) == -1)
2081 2081 return NULL;
2082 2082 rev = index_find_node(self, node, 20);
2083 2083 if (rev >= -1)
2084 2084 return PyInt_FromLong(rev);
2085 2085 if (rev == -2)
2086 2086 raise_revlog_error();
2087 2087 return NULL;
2088 2088 }
2089 2089
2090 2090 typedef uint64_t bitmask;
2091 2091
2092 2092 /*
2093 2093 * Given a disjoint set of revs, return all candidates for the
2094 2094 * greatest common ancestor. In revset notation, this is the set
2095 2095 * "heads(::a and ::b and ...)"
2096 2096 */
2097 2097 static PyObject *find_gca_candidates(indexObject *self, const int *revs,
2098 2098 int revcount)
2099 2099 {
2100 2100 const bitmask allseen = (1ull << revcount) - 1;
2101 2101 const bitmask poison = 1ull << revcount;
2102 2102 PyObject *gca = PyList_New(0);
2103 2103 int i, v, interesting;
2104 2104 int maxrev = -1;
2105 2105 bitmask sp;
2106 2106 bitmask *seen;
2107 2107
2108 2108 if (gca == NULL)
2109 2109 return PyErr_NoMemory();
2110 2110
2111 2111 for (i = 0; i < revcount; i++) {
2112 2112 if (revs[i] > maxrev)
2113 2113 maxrev = revs[i];
2114 2114 }
2115 2115
2116 2116 seen = calloc(sizeof(*seen), maxrev + 1);
2117 2117 if (seen == NULL) {
2118 2118 Py_DECREF(gca);
2119 2119 return PyErr_NoMemory();
2120 2120 }
2121 2121
2122 2122 for (i = 0; i < revcount; i++)
2123 2123 seen[revs[i]] = 1ull << i;
2124 2124
2125 2125 interesting = revcount;
2126 2126
2127 2127 for (v = maxrev; v >= 0 && interesting; v--) {
2128 2128 bitmask sv = seen[v];
2129 2129 int parents[2];
2130 2130
2131 2131 if (!sv)
2132 2132 continue;
2133 2133
2134 2134 if (sv < poison) {
2135 2135 interesting -= 1;
2136 2136 if (sv == allseen) {
2137 2137 PyObject *obj = PyInt_FromLong(v);
2138 2138 if (obj == NULL)
2139 2139 goto bail;
2140 2140 if (PyList_Append(gca, obj) == -1) {
2141 2141 Py_DECREF(obj);
2142 2142 goto bail;
2143 2143 }
2144 2144 sv |= poison;
2145 2145 for (i = 0; i < revcount; i++) {
2146 2146 if (revs[i] == v)
2147 2147 goto done;
2148 2148 }
2149 2149 }
2150 2150 }
2151 2151 if (index_get_parents(self, v, parents, maxrev) < 0)
2152 2152 goto bail;
2153 2153
2154 2154 for (i = 0; i < 2; i++) {
2155 2155 int p = parents[i];
2156 2156 if (p == -1)
2157 2157 continue;
2158 2158 sp = seen[p];
2159 2159 if (sv < poison) {
2160 2160 if (sp == 0) {
2161 2161 seen[p] = sv;
2162 2162 interesting++;
2163 2163 } else if (sp != sv)
2164 2164 seen[p] |= sv;
2165 2165 } else {
2166 2166 if (sp && sp < poison)
2167 2167 interesting--;
2168 2168 seen[p] = sv;
2169 2169 }
2170 2170 }
2171 2171 }
2172 2172
2173 2173 done:
2174 2174 free(seen);
2175 2175 return gca;
2176 2176 bail:
2177 2177 free(seen);
2178 2178 Py_XDECREF(gca);
2179 2179 return NULL;
2180 2180 }
2181 2181
2182 2182 /*
2183 2183 * Given a disjoint set of revs, return the subset with the longest
2184 2184 * path to the root.
2185 2185 */
2186 2186 static PyObject *find_deepest(indexObject *self, PyObject *revs)
2187 2187 {
2188 2188 const Py_ssize_t revcount = PyList_GET_SIZE(revs);
2189 2189 static const Py_ssize_t capacity = 24;
2190 2190 int *depth, *interesting = NULL;
2191 2191 int i, j, v, ninteresting;
2192 2192 PyObject *dict = NULL, *keys = NULL;
2193 2193 long *seen = NULL;
2194 2194 int maxrev = -1;
2195 2195 long final;
2196 2196
2197 2197 if (revcount > capacity) {
2198 2198 PyErr_Format(PyExc_OverflowError,
2199 2199 "bitset size (%ld) > capacity (%ld)",
2200 2200 (long)revcount, (long)capacity);
2201 2201 return NULL;
2202 2202 }
2203 2203
2204 2204 for (i = 0; i < revcount; i++) {
2205 2205 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
2206 2206 if (n > maxrev)
2207 2207 maxrev = n;
2208 2208 }
2209 2209
2210 2210 depth = calloc(sizeof(*depth), maxrev + 1);
2211 2211 if (depth == NULL)
2212 2212 return PyErr_NoMemory();
2213 2213
2214 2214 seen = calloc(sizeof(*seen), maxrev + 1);
2215 2215 if (seen == NULL) {
2216 2216 PyErr_NoMemory();
2217 2217 goto bail;
2218 2218 }
2219 2219
2220 2220 interesting = calloc(sizeof(*interesting), ((size_t)1) << revcount);
2221 2221 if (interesting == NULL) {
2222 2222 PyErr_NoMemory();
2223 2223 goto bail;
2224 2224 }
2225 2225
2226 2226 if (PyList_Sort(revs) == -1)
2227 2227 goto bail;
2228 2228
2229 2229 for (i = 0; i < revcount; i++) {
2230 2230 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
2231 2231 long b = 1l << i;
2232 2232 depth[n] = 1;
2233 2233 seen[n] = b;
2234 2234 interesting[b] = 1;
2235 2235 }
2236 2236
2237 2237 /* invariant: ninteresting is the number of non-zero entries in
2238 2238 * interesting. */
2239 2239 ninteresting = (int)revcount;
2240 2240
2241 2241 for (v = maxrev; v >= 0 && ninteresting > 1; v--) {
2242 2242 int dv = depth[v];
2243 2243 int parents[2];
2244 2244 long sv;
2245 2245
2246 2246 if (dv == 0)
2247 2247 continue;
2248 2248
2249 2249 sv = seen[v];
2250 2250 if (index_get_parents(self, v, parents, maxrev) < 0)
2251 2251 goto bail;
2252 2252
2253 2253 for (i = 0; i < 2; i++) {
2254 2254 int p = parents[i];
2255 2255 long sp;
2256 2256 int dp;
2257 2257
2258 2258 if (p == -1)
2259 2259 continue;
2260 2260
2261 2261 dp = depth[p];
2262 2262 sp = seen[p];
2263 2263 if (dp <= dv) {
2264 2264 depth[p] = dv + 1;
2265 2265 if (sp != sv) {
2266 2266 interesting[sv] += 1;
2267 2267 seen[p] = sv;
2268 2268 if (sp) {
2269 2269 interesting[sp] -= 1;
2270 2270 if (interesting[sp] == 0)
2271 2271 ninteresting -= 1;
2272 2272 }
2273 2273 }
2274 2274 } else if (dv == dp - 1) {
2275 2275 long nsp = sp | sv;
2276 2276 if (nsp == sp)
2277 2277 continue;
2278 2278 seen[p] = nsp;
2279 2279 interesting[sp] -= 1;
2280 2280 if (interesting[sp] == 0)
2281 2281 ninteresting -= 1;
2282 2282 if (interesting[nsp] == 0)
2283 2283 ninteresting += 1;
2284 2284 interesting[nsp] += 1;
2285 2285 }
2286 2286 }
2287 2287 interesting[sv] -= 1;
2288 2288 if (interesting[sv] == 0)
2289 2289 ninteresting -= 1;
2290 2290 }
2291 2291
2292 2292 final = 0;
2293 2293 j = ninteresting;
2294 2294 for (i = 0; i < (int)(2 << revcount) && j > 0; i++) {
2295 2295 if (interesting[i] == 0)
2296 2296 continue;
2297 2297 final |= i;
2298 2298 j -= 1;
2299 2299 }
2300 2300 if (final == 0) {
2301 2301 keys = PyList_New(0);
2302 2302 goto bail;
2303 2303 }
2304 2304
2305 2305 dict = PyDict_New();
2306 2306 if (dict == NULL)
2307 2307 goto bail;
2308 2308
2309 2309 for (i = 0; i < revcount; i++) {
2310 2310 PyObject *key;
2311 2311
2312 2312 if ((final & (1 << i)) == 0)
2313 2313 continue;
2314 2314
2315 2315 key = PyList_GET_ITEM(revs, i);
2316 2316 Py_INCREF(key);
2317 2317 Py_INCREF(Py_None);
2318 2318 if (PyDict_SetItem(dict, key, Py_None) == -1) {
2319 2319 Py_DECREF(key);
2320 2320 Py_DECREF(Py_None);
2321 2321 goto bail;
2322 2322 }
2323 2323 }
2324 2324
2325 2325 keys = PyDict_Keys(dict);
2326 2326
2327 2327 bail:
2328 2328 free(depth);
2329 2329 free(seen);
2330 2330 free(interesting);
2331 2331 Py_XDECREF(dict);
2332 2332
2333 2333 return keys;
2334 2334 }
2335 2335
2336 2336 /*
2337 2337 * Given a (possibly overlapping) set of revs, return all the
2338 2338 * common ancestors heads: heads(::args[0] and ::a[1] and ...)
2339 2339 */
2340 2340 static PyObject *index_commonancestorsheads(indexObject *self, PyObject *args)
2341 2341 {
2342 2342 PyObject *ret = NULL;
2343 2343 Py_ssize_t argcount, i, len;
2344 2344 bitmask repeat = 0;
2345 2345 int revcount = 0;
2346 2346 int *revs;
2347 2347
2348 2348 argcount = PySequence_Length(args);
2349 2349 revs = PyMem_Malloc(argcount * sizeof(*revs));
2350 2350 if (argcount > 0 && revs == NULL)
2351 2351 return PyErr_NoMemory();
2352 2352 len = index_length(self);
2353 2353
2354 2354 for (i = 0; i < argcount; i++) {
2355 2355 static const int capacity = 24;
2356 2356 PyObject *obj = PySequence_GetItem(args, i);
2357 2357 bitmask x;
2358 2358 long val;
2359 2359
2360 2360 if (!PyInt_Check(obj)) {
2361 2361 PyErr_SetString(PyExc_TypeError,
2362 2362 "arguments must all be ints");
2363 2363 Py_DECREF(obj);
2364 2364 goto bail;
2365 2365 }
2366 2366 val = PyInt_AsLong(obj);
2367 2367 Py_DECREF(obj);
2368 2368 if (val == -1) {
2369 2369 ret = PyList_New(0);
2370 2370 goto done;
2371 2371 }
2372 2372 if (val < 0 || val >= len) {
2373 2373 PyErr_SetString(PyExc_IndexError, "index out of range");
2374 2374 goto bail;
2375 2375 }
2376 2376 /* this cheesy bloom filter lets us avoid some more
2377 2377 * expensive duplicate checks in the common set-is-disjoint
2378 2378 * case */
2379 2379 x = 1ull << (val & 0x3f);
2380 2380 if (repeat & x) {
2381 2381 int k;
2382 2382 for (k = 0; k < revcount; k++) {
2383 2383 if (val == revs[k])
2384 2384 goto duplicate;
2385 2385 }
2386 2386 } else
2387 2387 repeat |= x;
2388 2388 if (revcount >= capacity) {
2389 2389 PyErr_Format(PyExc_OverflowError,
2390 2390 "bitset size (%d) > capacity (%d)",
2391 2391 revcount, capacity);
2392 2392 goto bail;
2393 2393 }
2394 2394 revs[revcount++] = (int)val;
2395 2395 duplicate:;
2396 2396 }
2397 2397
2398 2398 if (revcount == 0) {
2399 2399 ret = PyList_New(0);
2400 2400 goto done;
2401 2401 }
2402 2402 if (revcount == 1) {
2403 2403 PyObject *obj;
2404 2404 ret = PyList_New(1);
2405 2405 if (ret == NULL)
2406 2406 goto bail;
2407 2407 obj = PyInt_FromLong(revs[0]);
2408 2408 if (obj == NULL)
2409 2409 goto bail;
2410 2410 PyList_SET_ITEM(ret, 0, obj);
2411 2411 goto done;
2412 2412 }
2413 2413
2414 2414 ret = find_gca_candidates(self, revs, revcount);
2415 2415 if (ret == NULL)
2416 2416 goto bail;
2417 2417
2418 2418 done:
2419 2419 PyMem_Free(revs);
2420 2420 return ret;
2421 2421
2422 2422 bail:
2423 2423 PyMem_Free(revs);
2424 2424 Py_XDECREF(ret);
2425 2425 return NULL;
2426 2426 }
2427 2427
2428 2428 /*
2429 2429 * Given a (possibly overlapping) set of revs, return the greatest
2430 2430 * common ancestors: those with the longest path to the root.
2431 2431 */
2432 2432 static PyObject *index_ancestors(indexObject *self, PyObject *args)
2433 2433 {
2434 2434 PyObject *ret;
2435 2435 PyObject *gca = index_commonancestorsheads(self, args);
2436 2436 if (gca == NULL)
2437 2437 return NULL;
2438 2438
2439 2439 if (PyList_GET_SIZE(gca) <= 1) {
2440 2440 return gca;
2441 2441 }
2442 2442
2443 2443 ret = find_deepest(self, gca);
2444 2444 Py_DECREF(gca);
2445 2445 return ret;
2446 2446 }
2447 2447
2448 2448 /*
2449 2449 * Invalidate any trie entries introduced by added revs.
2450 2450 */
2451 2451 static void index_invalidate_added(indexObject *self, Py_ssize_t start)
2452 2452 {
2453 2453 Py_ssize_t i, len = PyList_GET_SIZE(self->added);
2454 2454
2455 2455 for (i = start; i < len; i++) {
2456 2456 PyObject *tuple = PyList_GET_ITEM(self->added, i);
2457 2457 PyObject *node = PyTuple_GET_ITEM(tuple, 7);
2458 2458
2459 2459 nt_delete_node(&self->nt, PyBytes_AS_STRING(node));
2460 2460 }
2461 2461
2462 2462 if (start == 0)
2463 2463 Py_CLEAR(self->added);
2464 2464 }
2465 2465
2466 2466 /*
2467 2467 * Delete a numeric range of revs, which must be at the end of the
2468 2468 * range.
2469 2469 */
2470 2470 static int index_slice_del(indexObject *self, PyObject *item)
2471 2471 {
2472 2472 Py_ssize_t start, stop, step, slicelength;
2473 2473 Py_ssize_t length = index_length(self) + 1;
2474 2474 int ret = 0;
2475 2475
2476 2476 /* Argument changed from PySliceObject* to PyObject* in Python 3. */
2477 2477 #ifdef IS_PY3K
2478 2478 if (PySlice_GetIndicesEx(item, length, &start, &stop, &step,
2479 2479 &slicelength) < 0)
2480 2480 #else
2481 2481 if (PySlice_GetIndicesEx((PySliceObject *)item, length, &start, &stop,
2482 2482 &step, &slicelength) < 0)
2483 2483 #endif
2484 2484 return -1;
2485 2485
2486 2486 if (slicelength <= 0)
2487 2487 return 0;
2488 2488
2489 2489 if ((step < 0 && start < stop) || (step > 0 && start > stop))
2490 2490 stop = start;
2491 2491
2492 2492 if (step < 0) {
2493 2493 stop = start + 1;
2494 2494 start = stop + step * (slicelength - 1) - 1;
2495 2495 step = -step;
2496 2496 }
2497 2497
2498 2498 if (step != 1) {
2499 2499 PyErr_SetString(PyExc_ValueError,
2500 2500 "revlog index delete requires step size of 1");
2501 2501 return -1;
2502 2502 }
2503 2503
2504 2504 if (stop != length - 1) {
2505 2505 PyErr_SetString(PyExc_IndexError,
2506 2506 "revlog index deletion indices are invalid");
2507 2507 return -1;
2508 2508 }
2509 2509
2510 2510 if (start < self->length) {
2511 2511 if (self->ntinitialized) {
2512 2512 Py_ssize_t i;
2513 2513
2514 2514 for (i = start; i < self->length; i++) {
2515 2515 const char *node = index_node_existing(self, i);
2516 2516 if (node == NULL)
2517 2517 return -1;
2518 2518
2519 2519 nt_delete_node(&self->nt, node);
2520 2520 }
2521 2521 if (self->added)
2522 2522 index_invalidate_added(self, 0);
2523 2523 if (self->ntrev > start)
2524 2524 self->ntrev = (int)start;
2525 } else if (self->added) {
2526 Py_CLEAR(self->added);
2525 2527 }
2528
2526 2529 self->length = start;
2527 2530 if (start < self->raw_length) {
2528 2531 if (self->cache) {
2529 2532 Py_ssize_t i;
2530 2533 for (i = start; i < self->raw_length; i++)
2531 2534 Py_CLEAR(self->cache[i]);
2532 2535 }
2533 2536 self->raw_length = start;
2534 2537 }
2535 2538 goto done;
2536 2539 }
2537 2540
2538 2541 if (self->ntinitialized) {
2539 2542 index_invalidate_added(self, start - self->length);
2540 2543 if (self->ntrev > start)
2541 2544 self->ntrev = (int)start;
2542 2545 }
2543 2546 if (self->added)
2544 2547 ret = PyList_SetSlice(self->added, start - self->length,
2545 2548 PyList_GET_SIZE(self->added), NULL);
2546 2549 done:
2547 2550 Py_CLEAR(self->headrevs);
2548 2551 return ret;
2549 2552 }
2550 2553
2551 2554 /*
2552 2555 * Supported ops:
2553 2556 *
2554 2557 * slice deletion
2555 2558 * string assignment (extend node->rev mapping)
2556 2559 * string deletion (shrink node->rev mapping)
2557 2560 */
2558 2561 static int index_assign_subscript(indexObject *self, PyObject *item,
2559 2562 PyObject *value)
2560 2563 {
2561 2564 char *node;
2562 2565 long rev;
2563 2566
2564 2567 if (PySlice_Check(item) && value == NULL)
2565 2568 return index_slice_del(self, item);
2566 2569
2567 2570 if (node_check(item, &node) == -1)
2568 2571 return -1;
2569 2572
2570 2573 if (value == NULL)
2571 2574 return self->ntinitialized ? nt_delete_node(&self->nt, node)
2572 2575 : 0;
2573 2576 rev = PyInt_AsLong(value);
2574 2577 if (rev > INT_MAX || rev < 0) {
2575 2578 if (!PyErr_Occurred())
2576 2579 PyErr_SetString(PyExc_ValueError, "rev out of range");
2577 2580 return -1;
2578 2581 }
2579 2582
2580 2583 if (index_init_nt(self) == -1)
2581 2584 return -1;
2582 2585 return nt_insert(&self->nt, node, (int)rev);
2583 2586 }
2584 2587
2585 2588 /*
2586 2589 * Find all RevlogNG entries in an index that has inline data. Update
2587 2590 * the optional "offsets" table with those entries.
2588 2591 */
2589 2592 static Py_ssize_t inline_scan(indexObject *self, const char **offsets)
2590 2593 {
2591 2594 const char *data = (const char *)self->buf.buf;
2592 2595 Py_ssize_t pos = 0;
2593 2596 Py_ssize_t end = self->buf.len;
2594 2597 long incr = v1_hdrsize;
2595 2598 Py_ssize_t len = 0;
2596 2599
2597 2600 while (pos + v1_hdrsize <= end && pos >= 0) {
2598 2601 uint32_t comp_len;
2599 2602 /* 3rd element of header is length of compressed inline data */
2600 2603 comp_len = getbe32(data + pos + 8);
2601 2604 incr = v1_hdrsize + comp_len;
2602 2605 if (offsets)
2603 2606 offsets[len] = data + pos;
2604 2607 len++;
2605 2608 pos += incr;
2606 2609 }
2607 2610
2608 2611 if (pos != end) {
2609 2612 if (!PyErr_Occurred())
2610 2613 PyErr_SetString(PyExc_ValueError, "corrupt index file");
2611 2614 return -1;
2612 2615 }
2613 2616
2614 2617 return len;
2615 2618 }
2616 2619
2617 2620 static int index_init(indexObject *self, PyObject *args)
2618 2621 {
2619 2622 PyObject *data_obj, *inlined_obj;
2620 2623 Py_ssize_t size;
2621 2624
2622 2625 /* Initialize before argument-checking to avoid index_dealloc() crash.
2623 2626 */
2624 2627 self->raw_length = 0;
2625 2628 self->added = NULL;
2626 2629 self->cache = NULL;
2627 2630 self->data = NULL;
2628 2631 memset(&self->buf, 0, sizeof(self->buf));
2629 2632 self->headrevs = NULL;
2630 2633 self->filteredrevs = Py_None;
2631 2634 Py_INCREF(Py_None);
2632 2635 self->ntinitialized = 0;
2633 2636 self->offsets = NULL;
2634 2637
2635 2638 if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
2636 2639 return -1;
2637 2640 if (!PyObject_CheckBuffer(data_obj)) {
2638 2641 PyErr_SetString(PyExc_TypeError,
2639 2642 "data does not support buffer interface");
2640 2643 return -1;
2641 2644 }
2642 2645
2643 2646 if (PyObject_GetBuffer(data_obj, &self->buf, PyBUF_SIMPLE) == -1)
2644 2647 return -1;
2645 2648 size = self->buf.len;
2646 2649
2647 2650 self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
2648 2651 self->data = data_obj;
2649 2652
2650 2653 self->ntlookups = self->ntmisses = 0;
2651 2654 self->ntrev = -1;
2652 2655 Py_INCREF(self->data);
2653 2656
2654 2657 if (self->inlined) {
2655 2658 Py_ssize_t len = inline_scan(self, NULL);
2656 2659 if (len == -1)
2657 2660 goto bail;
2658 2661 self->raw_length = len;
2659 2662 self->length = len;
2660 2663 } else {
2661 2664 if (size % v1_hdrsize) {
2662 2665 PyErr_SetString(PyExc_ValueError, "corrupt index file");
2663 2666 goto bail;
2664 2667 }
2665 2668 self->raw_length = size / v1_hdrsize;
2666 2669 self->length = self->raw_length;
2667 2670 }
2668 2671
2669 2672 return 0;
2670 2673 bail:
2671 2674 return -1;
2672 2675 }
2673 2676
2674 2677 static PyObject *index_nodemap(indexObject *self)
2675 2678 {
2676 2679 Py_INCREF(self);
2677 2680 return (PyObject *)self;
2678 2681 }
2679 2682
2680 2683 static void _index_clearcaches(indexObject *self)
2681 2684 {
2682 2685 if (self->cache) {
2683 2686 Py_ssize_t i;
2684 2687
2685 2688 for (i = 0; i < self->raw_length; i++)
2686 2689 Py_CLEAR(self->cache[i]);
2687 2690 free(self->cache);
2688 2691 self->cache = NULL;
2689 2692 }
2690 2693 if (self->offsets) {
2691 2694 PyMem_Free((void *)self->offsets);
2692 2695 self->offsets = NULL;
2693 2696 }
2694 2697 if (self->ntinitialized) {
2695 2698 nt_dealloc(&self->nt);
2696 2699 }
2697 2700 self->ntinitialized = 0;
2698 2701 Py_CLEAR(self->headrevs);
2699 2702 }
2700 2703
2701 2704 static PyObject *index_clearcaches(indexObject *self)
2702 2705 {
2703 2706 _index_clearcaches(self);
2704 2707 self->ntrev = -1;
2705 2708 self->ntlookups = self->ntmisses = 0;
2706 2709 Py_RETURN_NONE;
2707 2710 }
2708 2711
2709 2712 static void index_dealloc(indexObject *self)
2710 2713 {
2711 2714 _index_clearcaches(self);
2712 2715 Py_XDECREF(self->filteredrevs);
2713 2716 if (self->buf.buf) {
2714 2717 PyBuffer_Release(&self->buf);
2715 2718 memset(&self->buf, 0, sizeof(self->buf));
2716 2719 }
2717 2720 Py_XDECREF(self->data);
2718 2721 Py_XDECREF(self->added);
2719 2722 PyObject_Del(self);
2720 2723 }
2721 2724
2722 2725 static PySequenceMethods index_sequence_methods = {
2723 2726 (lenfunc)index_length, /* sq_length */
2724 2727 0, /* sq_concat */
2725 2728 0, /* sq_repeat */
2726 2729 (ssizeargfunc)index_get, /* sq_item */
2727 2730 0, /* sq_slice */
2728 2731 0, /* sq_ass_item */
2729 2732 0, /* sq_ass_slice */
2730 2733 (objobjproc)index_contains, /* sq_contains */
2731 2734 };
2732 2735
2733 2736 static PyMappingMethods index_mapping_methods = {
2734 2737 (lenfunc)index_length, /* mp_length */
2735 2738 (binaryfunc)index_getitem, /* mp_subscript */
2736 2739 (objobjargproc)index_assign_subscript, /* mp_ass_subscript */
2737 2740 };
2738 2741
2739 2742 static PyMethodDef index_methods[] = {
2740 2743 {"ancestors", (PyCFunction)index_ancestors, METH_VARARGS,
2741 2744 "return the gca set of the given revs"},
2742 2745 {"commonancestorsheads", (PyCFunction)index_commonancestorsheads,
2743 2746 METH_VARARGS,
2744 2747 "return the heads of the common ancestors of the given revs"},
2745 2748 {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
2746 2749 "clear the index caches"},
2747 2750 {"get", (PyCFunction)index_m_get, METH_VARARGS, "get an index entry"},
2748 2751 {"get_rev", (PyCFunction)index_m_get, METH_VARARGS,
2749 2752 "return `rev` associated with a node or None"},
2750 2753 {"has_node", (PyCFunction)index_m_has_node, METH_O,
2751 2754 "return True if the node exist in the index"},
2752 2755 {"rev", (PyCFunction)index_m_rev, METH_O,
2753 2756 "return `rev` associated with a node or raise RevlogError"},
2754 2757 {"computephasesmapsets", (PyCFunction)compute_phases_map_sets, METH_VARARGS,
2755 2758 "compute phases"},
2756 2759 {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
2757 2760 "reachableroots"},
2758 2761 {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
2759 2762 "get head revisions"}, /* Can do filtering since 3.2 */
2760 2763 {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
2761 2764 "get filtered head revisions"}, /* Can always do filtering */
2762 2765 {"issnapshot", (PyCFunction)index_issnapshot, METH_O,
2763 2766 "True if the object is a snapshot"},
2764 2767 {"findsnapshots", (PyCFunction)index_findsnapshots, METH_VARARGS,
2765 2768 "Gather snapshot data in a cache dict"},
2766 2769 {"deltachain", (PyCFunction)index_deltachain, METH_VARARGS,
2767 2770 "determine revisions with deltas to reconstruct fulltext"},
2768 2771 {"slicechunktodensity", (PyCFunction)index_slicechunktodensity,
2769 2772 METH_VARARGS, "determine revisions with deltas to reconstruct fulltext"},
2770 2773 {"append", (PyCFunction)index_append, METH_O, "append an index entry"},
2771 2774 {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
2772 2775 "match a potentially ambiguous node ID"},
2773 2776 {"shortest", (PyCFunction)index_shortest, METH_VARARGS,
2774 2777 "find length of shortest hex nodeid of a binary ID"},
2775 2778 {"stats", (PyCFunction)index_stats, METH_NOARGS, "stats for the index"},
2776 2779 {NULL} /* Sentinel */
2777 2780 };
2778 2781
2779 2782 static PyGetSetDef index_getset[] = {
2780 2783 {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
2781 2784 {NULL} /* Sentinel */
2782 2785 };
2783 2786
2784 2787 PyTypeObject HgRevlogIndex_Type = {
2785 2788 PyVarObject_HEAD_INIT(NULL, 0) /* header */
2786 2789 "parsers.index", /* tp_name */
2787 2790 sizeof(indexObject), /* tp_basicsize */
2788 2791 0, /* tp_itemsize */
2789 2792 (destructor)index_dealloc, /* tp_dealloc */
2790 2793 0, /* tp_print */
2791 2794 0, /* tp_getattr */
2792 2795 0, /* tp_setattr */
2793 2796 0, /* tp_compare */
2794 2797 0, /* tp_repr */
2795 2798 0, /* tp_as_number */
2796 2799 &index_sequence_methods, /* tp_as_sequence */
2797 2800 &index_mapping_methods, /* tp_as_mapping */
2798 2801 0, /* tp_hash */
2799 2802 0, /* tp_call */
2800 2803 0, /* tp_str */
2801 2804 0, /* tp_getattro */
2802 2805 0, /* tp_setattro */
2803 2806 0, /* tp_as_buffer */
2804 2807 Py_TPFLAGS_DEFAULT, /* tp_flags */
2805 2808 "revlog index", /* tp_doc */
2806 2809 0, /* tp_traverse */
2807 2810 0, /* tp_clear */
2808 2811 0, /* tp_richcompare */
2809 2812 0, /* tp_weaklistoffset */
2810 2813 0, /* tp_iter */
2811 2814 0, /* tp_iternext */
2812 2815 index_methods, /* tp_methods */
2813 2816 0, /* tp_members */
2814 2817 index_getset, /* tp_getset */
2815 2818 0, /* tp_base */
2816 2819 0, /* tp_dict */
2817 2820 0, /* tp_descr_get */
2818 2821 0, /* tp_descr_set */
2819 2822 0, /* tp_dictoffset */
2820 2823 (initproc)index_init, /* tp_init */
2821 2824 0, /* tp_alloc */
2822 2825 };
2823 2826
2824 2827 /*
2825 2828 * returns a tuple of the form (index, index, cache) with elements as
2826 2829 * follows:
2827 2830 *
2828 2831 * index: an index object that lazily parses RevlogNG records
2829 2832 * cache: if data is inlined, a tuple (0, index_file_content), else None
2830 2833 * index_file_content could be a string, or a buffer
2831 2834 *
2832 2835 * added complications are for backwards compatibility
2833 2836 */
2834 2837 PyObject *parse_index2(PyObject *self, PyObject *args)
2835 2838 {
2836 2839 PyObject *tuple = NULL, *cache = NULL;
2837 2840 indexObject *idx;
2838 2841 int ret;
2839 2842
2840 2843 idx = PyObject_New(indexObject, &HgRevlogIndex_Type);
2841 2844 if (idx == NULL)
2842 2845 goto bail;
2843 2846
2844 2847 ret = index_init(idx, args);
2845 2848 if (ret == -1)
2846 2849 goto bail;
2847 2850
2848 2851 if (idx->inlined) {
2849 2852 cache = Py_BuildValue("iO", 0, idx->data);
2850 2853 if (cache == NULL)
2851 2854 goto bail;
2852 2855 } else {
2853 2856 cache = Py_None;
2854 2857 Py_INCREF(cache);
2855 2858 }
2856 2859
2857 2860 tuple = Py_BuildValue("NN", idx, cache);
2858 2861 if (!tuple)
2859 2862 goto bail;
2860 2863 return tuple;
2861 2864
2862 2865 bail:
2863 2866 Py_XDECREF(idx);
2864 2867 Py_XDECREF(cache);
2865 2868 Py_XDECREF(tuple);
2866 2869 return NULL;
2867 2870 }
2868 2871
2869 2872 #ifdef WITH_RUST
2870 2873
2871 2874 /* rustlazyancestors: iteration over ancestors implemented in Rust
2872 2875 *
2873 2876 * This class holds a reference to an index and to the Rust iterator.
2874 2877 */
2875 2878 typedef struct rustlazyancestorsObjectStruct rustlazyancestorsObject;
2876 2879
2877 2880 struct rustlazyancestorsObjectStruct {
2878 2881 PyObject_HEAD
2879 2882 /* Type-specific fields go here. */
2880 2883 indexObject *index; /* Ref kept to avoid GC'ing the index */
2881 2884 void *iter; /* Rust iterator */
2882 2885 };
2883 2886
2884 2887 /* FFI exposed from Rust code */
2885 2888 rustlazyancestorsObject *rustlazyancestors_init(indexObject *index,
2886 2889 /* intrevs vector */
2887 2890 Py_ssize_t initrevslen,
2888 2891 long *initrevs, long stoprev,
2889 2892 int inclusive);
2890 2893 void rustlazyancestors_drop(rustlazyancestorsObject *self);
2891 2894 int rustlazyancestors_next(rustlazyancestorsObject *self);
2892 2895 int rustlazyancestors_contains(rustlazyancestorsObject *self, long rev);
2893 2896
2894 2897 /* CPython instance methods */
2895 2898 static int rustla_init(rustlazyancestorsObject *self, PyObject *args)
2896 2899 {
2897 2900 PyObject *initrevsarg = NULL;
2898 2901 PyObject *inclusivearg = NULL;
2899 2902 long stoprev = 0;
2900 2903 long *initrevs = NULL;
2901 2904 int inclusive = 0;
2902 2905 Py_ssize_t i;
2903 2906
2904 2907 indexObject *index;
2905 2908 if (!PyArg_ParseTuple(args, "O!O!lO!", &HgRevlogIndex_Type, &index,
2906 2909 &PyList_Type, &initrevsarg, &stoprev,
2907 2910 &PyBool_Type, &inclusivearg))
2908 2911 return -1;
2909 2912
2910 2913 Py_INCREF(index);
2911 2914 self->index = index;
2912 2915
2913 2916 if (inclusivearg == Py_True)
2914 2917 inclusive = 1;
2915 2918
2916 2919 Py_ssize_t linit = PyList_GET_SIZE(initrevsarg);
2917 2920
2918 2921 initrevs = (long *)calloc(linit, sizeof(long));
2919 2922
2920 2923 if (initrevs == NULL) {
2921 2924 PyErr_NoMemory();
2922 2925 goto bail;
2923 2926 }
2924 2927
2925 2928 for (i = 0; i < linit; i++) {
2926 2929 initrevs[i] = PyInt_AsLong(PyList_GET_ITEM(initrevsarg, i));
2927 2930 }
2928 2931 if (PyErr_Occurred())
2929 2932 goto bail;
2930 2933
2931 2934 self->iter =
2932 2935 rustlazyancestors_init(index, linit, initrevs, stoprev, inclusive);
2933 2936 if (self->iter == NULL) {
2934 2937 /* if this is because of GraphError::ParentOutOfRange
2935 2938 * HgRevlogIndex_GetParents() has already set the proper
2936 2939 * exception */
2937 2940 goto bail;
2938 2941 }
2939 2942
2940 2943 free(initrevs);
2941 2944 return 0;
2942 2945
2943 2946 bail:
2944 2947 free(initrevs);
2945 2948 return -1;
2946 2949 };
2947 2950
2948 2951 static void rustla_dealloc(rustlazyancestorsObject *self)
2949 2952 {
2950 2953 Py_XDECREF(self->index);
2951 2954 if (self->iter != NULL) { /* can happen if rustla_init failed */
2952 2955 rustlazyancestors_drop(self->iter);
2953 2956 }
2954 2957 PyObject_Del(self);
2955 2958 }
2956 2959
2957 2960 static PyObject *rustla_next(rustlazyancestorsObject *self)
2958 2961 {
2959 2962 int res = rustlazyancestors_next(self->iter);
2960 2963 if (res == -1) {
2961 2964 /* Setting an explicit exception seems unnecessary
2962 2965 * as examples from Python source code (Objects/rangeobjets.c
2963 2966 * and Modules/_io/stringio.c) seem to demonstrate.
2964 2967 */
2965 2968 return NULL;
2966 2969 }
2967 2970 return PyInt_FromLong(res);
2968 2971 }
2969 2972
2970 2973 static int rustla_contains(rustlazyancestorsObject *self, PyObject *rev)
2971 2974 {
2972 2975 long lrev;
2973 2976 if (!pylong_to_long(rev, &lrev)) {
2974 2977 PyErr_Clear();
2975 2978 return 0;
2976 2979 }
2977 2980 return rustlazyancestors_contains(self->iter, lrev);
2978 2981 }
2979 2982
2980 2983 static PySequenceMethods rustla_sequence_methods = {
2981 2984 0, /* sq_length */
2982 2985 0, /* sq_concat */
2983 2986 0, /* sq_repeat */
2984 2987 0, /* sq_item */
2985 2988 0, /* sq_slice */
2986 2989 0, /* sq_ass_item */
2987 2990 0, /* sq_ass_slice */
2988 2991 (objobjproc)rustla_contains, /* sq_contains */
2989 2992 };
2990 2993
2991 2994 static PyTypeObject rustlazyancestorsType = {
2992 2995 PyVarObject_HEAD_INIT(NULL, 0) /* header */
2993 2996 "parsers.rustlazyancestors", /* tp_name */
2994 2997 sizeof(rustlazyancestorsObject), /* tp_basicsize */
2995 2998 0, /* tp_itemsize */
2996 2999 (destructor)rustla_dealloc, /* tp_dealloc */
2997 3000 0, /* tp_print */
2998 3001 0, /* tp_getattr */
2999 3002 0, /* tp_setattr */
3000 3003 0, /* tp_compare */
3001 3004 0, /* tp_repr */
3002 3005 0, /* tp_as_number */
3003 3006 &rustla_sequence_methods, /* tp_as_sequence */
3004 3007 0, /* tp_as_mapping */
3005 3008 0, /* tp_hash */
3006 3009 0, /* tp_call */
3007 3010 0, /* tp_str */
3008 3011 0, /* tp_getattro */
3009 3012 0, /* tp_setattro */
3010 3013 0, /* tp_as_buffer */
3011 3014 Py_TPFLAGS_DEFAULT, /* tp_flags */
3012 3015 "Iterator over ancestors, implemented in Rust", /* tp_doc */
3013 3016 0, /* tp_traverse */
3014 3017 0, /* tp_clear */
3015 3018 0, /* tp_richcompare */
3016 3019 0, /* tp_weaklistoffset */
3017 3020 0, /* tp_iter */
3018 3021 (iternextfunc)rustla_next, /* tp_iternext */
3019 3022 0, /* tp_methods */
3020 3023 0, /* tp_members */
3021 3024 0, /* tp_getset */
3022 3025 0, /* tp_base */
3023 3026 0, /* tp_dict */
3024 3027 0, /* tp_descr_get */
3025 3028 0, /* tp_descr_set */
3026 3029 0, /* tp_dictoffset */
3027 3030 (initproc)rustla_init, /* tp_init */
3028 3031 0, /* tp_alloc */
3029 3032 };
3030 3033 #endif /* WITH_RUST */
3031 3034
3032 3035 void revlog_module_init(PyObject *mod)
3033 3036 {
3034 3037 PyObject *caps = NULL;
3035 3038 HgRevlogIndex_Type.tp_new = PyType_GenericNew;
3036 3039 if (PyType_Ready(&HgRevlogIndex_Type) < 0)
3037 3040 return;
3038 3041 Py_INCREF(&HgRevlogIndex_Type);
3039 3042 PyModule_AddObject(mod, "index", (PyObject *)&HgRevlogIndex_Type);
3040 3043
3041 3044 nodetreeType.tp_new = PyType_GenericNew;
3042 3045 if (PyType_Ready(&nodetreeType) < 0)
3043 3046 return;
3044 3047 Py_INCREF(&nodetreeType);
3045 3048 PyModule_AddObject(mod, "nodetree", (PyObject *)&nodetreeType);
3046 3049
3047 3050 if (!nullentry) {
3048 3051 nullentry =
3049 3052 Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0, -1,
3050 3053 -1, -1, -1, nullid, (Py_ssize_t)20);
3051 3054 }
3052 3055 if (nullentry)
3053 3056 PyObject_GC_UnTrack(nullentry);
3054 3057
3055 3058 caps = PyCapsule_New(HgRevlogIndex_GetParents,
3056 3059 "mercurial.cext.parsers.index_get_parents_CAPI",
3057 3060 NULL);
3058 3061 if (caps != NULL)
3059 3062 PyModule_AddObject(mod, "index_get_parents_CAPI", caps);
3060 3063
3061 3064 #ifdef WITH_RUST
3062 3065 rustlazyancestorsType.tp_new = PyType_GenericNew;
3063 3066 if (PyType_Ready(&rustlazyancestorsType) < 0)
3064 3067 return;
3065 3068 Py_INCREF(&rustlazyancestorsType);
3066 3069 PyModule_AddObject(mod, "rustlazyancestors",
3067 3070 (PyObject *)&rustlazyancestorsType);
3068 3071 #endif
3069 3072 }
@@ -1,254 +1,280 b''
1 1 """This unit test primarily tests parsers.parse_index2().
2 2
3 3 It also checks certain aspects of the parsers module as a whole.
4 4 """
5 5
6 6 from __future__ import absolute_import, print_function
7 7
8 8 import struct
9 9 import subprocess
10 10 import sys
11 11 import unittest
12 12
13 13 from mercurial.node import (
14 14 nullid,
15 15 nullrev,
16 16 )
17 17 from mercurial import (
18 18 node as nodemod,
19 19 policy,
20 20 pycompat,
21 21 )
22 22
23 23 parsers = policy.importmod('parsers')
24 24
25 25 # original python implementation
26 26 def gettype(q):
27 27 return int(q & 0xFFFF)
28 28
29 29
30 30 def offset_type(offset, type):
31 31 return int(int(offset) << 16 | type)
32 32
33 33
34 34 indexformatng = ">Qiiiiii20s12x"
35 35
36 36
37 37 def py_parseindex(data, inline):
38 38 s = 64
39 39 cache = None
40 40 index = []
41 41 nodemap = {nullid: nullrev}
42 42 n = off = 0
43 43
44 44 l = len(data) - s
45 45 append = index.append
46 46 if inline:
47 47 cache = (0, data)
48 48 while off <= l:
49 49 e = struct.unpack(indexformatng, data[off : off + s])
50 50 nodemap[e[7]] = n
51 51 append(e)
52 52 n += 1
53 53 if e[1] < 0:
54 54 break
55 55 off += e[1] + s
56 56 else:
57 57 while off <= l:
58 58 e = struct.unpack(indexformatng, data[off : off + s])
59 59 nodemap[e[7]] = n
60 60 append(e)
61 61 n += 1
62 62 off += s
63 63
64 64 e = list(index[0])
65 65 type = gettype(e[0])
66 66 e[0] = offset_type(0, type)
67 67 index[0] = tuple(e)
68 68
69 69 return index, cache
70 70
71 71
72 72 data_inlined = (
73 73 b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x01\x8c'
74 74 b'\x00\x00\x04\x07\x00\x00\x00\x00\x00\x00\x15\x15\xff\xff\xff'
75 75 b'\xff\xff\xff\xff\xff\xebG\x97\xb7\x1fB\x04\xcf\x13V\x81\tw\x1b'
76 76 b'w\xdduR\xda\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
77 77 b'x\x9c\x9d\x93?O\xc30\x10\xc5\xf7|\x8a\xdb\x9a\xa8m\x06\xd8*\x95'
78 78 b'\x81B\xa1\xa2\xa2R\xcb\x86Pd\x9a\x0b5$vd_\x04\xfd\xf6\x9c\xff@'
79 79 b'\x11!\x0b\xd9\xec\xf7\xbbw\xe7gG6\xad6\x04\xdaN\xc0\x92\xa0$)'
80 80 b'\xb1\x82\xa2\xd1%\x16\xa4\x8b7\xa9\xca\xd4-\xb2Y\x02\xfc\xc9'
81 81 b'\xcaS\xf9\xaeX\xed\xb6\xd77Q\x02\x83\xd4\x19\xf5--Y\xea\xe1W'
82 82 b'\xab\xed\x10\xceR\x0f_\xdf\xdf\r\xe1,\xf5\xf0\xcb\xf5 \xceR\x0f'
83 83 b'_\xdc\x0e\x0e\xc3R\x0f_\xae\x96\x9b!\x9e\xa5\x1e\xbf\xdb,\x06'
84 84 b'\xc7q\x9a/\x88\x82\xc3B\xea\xb5\xb4TJ\x93\xb6\x82\x0e\xe16\xe6'
85 85 b'KQ\xdb\xaf\xecG\xa3\xd1 \x01\xd3\x0b_^\xe8\xaa\xa0\xae\xad\xd1'
86 86 b'&\xbef\x1bz\x08\xb0|\xc9Xz\x06\xf6Z\x91\x90J\xaa\x17\x90\xaa'
87 87 b'\xd2\xa6\x11$5C\xcf\xba#\xa0\x03\x02*2\x92-\xfc\xb1\x94\xdf\xe2'
88 88 b'\xae\xb8\'m\x8ey0^\x85\xd3\x82\xb4\xf0`:\x9c\x00\x8a\xfd\x01'
89 89 b'\xb0\xc6\x86\x8b\xdd\xae\x80\xf3\xa9\x9fd\x16\n\x00R%\x1a\x06'
90 90 b'\xe9\xd8b\x98\x1d\xf4\xf3+\x9bf\x01\xd8p\x1b\xf3.\xed\x9f^g\xc3'
91 91 b'^\xd9W81T\xdb\xd5\x04sx|\xf2\xeb\xd6`%?x\xed"\x831\xbf\xf3\xdc'
92 92 b'b\xeb%gaY\xe1\xad\x9f\xb9f\'1w\xa9\xa5a\x83s\x82J\xb98\xbc4\x8b'
93 93 b'\x83\x00\x9f$z\xb8#\xa5\xb1\xdf\x98\xd9\xec\x1b\x89O\xe3Ts\x9a4'
94 94 b'\x17m\x8b\xfc\x8f\xa5\x95\x9a\xfc\xfa\xed,\xe5|\xa1\xfe\x15\xb9'
95 95 b'\xbc\xb2\x93\x1f\xf2\x95\xff\xdf,\x1a\xc5\xe7\x17*\x93Oz:>\x0e'
96 96 )
97 97
98 98 data_non_inlined = (
99 99 b'\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01D\x19'
100 100 b'\x00\x07e\x12\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff'
101 101 b'\xff\xff\xff\xff\xd1\xf4\xbb\xb0\xbe\xfc\x13\xbd\x8c\xd3\x9d'
102 102 b'\x0f\xcd\xd9;\x8c\x07\x8cJ/\x00\x00\x00\x00\x00\x00\x00\x00\x00'
103 103 b'\x00\x00\x00\x00\x00\x00\x01D\x19\x00\x00\x00\x00\x00\xdf\x00'
104 104 b'\x00\x01q\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff'
105 105 b'\xff\xff\xff\xc1\x12\xb9\x04\x96\xa4Z1t\x91\xdfsJ\x90\xf0\x9bh'
106 106 b'\x07l&\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
107 107 b'\x00\x01D\xf8\x00\x00\x00\x00\x01\x1b\x00\x00\x01\xb8\x00\x00'
108 108 b'\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\x02\n'
109 109 b'\x0e\xc6&\xa1\x92\xae6\x0b\x02i\xfe-\xe5\xbao\x05\xd1\xe7\x00'
110 110 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01F'
111 111 b'\x13\x00\x00\x00\x00\x01\xec\x00\x00\x03\x06\x00\x00\x00\x01'
112 112 b'\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x12\xcb\xeby1'
113 113 b'\xb6\r\x98B\xcb\x07\xbd`\x8f\x92\xd9\xc4\x84\xbdK\x00\x00\x00'
114 114 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00'
115 115 )
116 116
117 117
118 118 def parse_index2(data, inline):
119 119 index, chunkcache = parsers.parse_index2(data, inline)
120 120 return list(index), chunkcache
121 121
122 122
123 123 def importparsers(hexversion):
124 124 """Import mercurial.parsers with the given sys.hexversion."""
125 125 # The file parsers.c inspects sys.hexversion to determine the version
126 126 # of the currently-running Python interpreter, so we monkey-patch
127 127 # sys.hexversion to simulate using different versions.
128 128 code = (
129 129 "import sys; sys.hexversion=%s; "
130 130 "import mercurial.cext.parsers" % hexversion
131 131 )
132 132 cmd = "python -c \"%s\"" % code
133 133 # We need to do these tests inside a subprocess because parser.c's
134 134 # version-checking code happens inside the module init function, and
135 135 # when using reload() to reimport an extension module, "The init function
136 136 # of extension modules is not called a second time"
137 137 # (from http://docs.python.org/2/library/functions.html?#reload).
138 138 p = subprocess.Popen(
139 139 cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
140 140 )
141 141 return p.communicate() # returns stdout, stderr
142 142
143 143
144 144 def hexfailmsg(testnumber, hexversion, stdout, expected):
145 145 try:
146 146 hexstring = hex(hexversion)
147 147 except TypeError:
148 148 hexstring = None
149 149 return (
150 150 "FAILED: version test #%s with Python %s and patched "
151 151 "sys.hexversion %r (%r):\n Expected %s but got:\n-->'%s'\n"
152 152 % (
153 153 testnumber,
154 154 sys.version_info,
155 155 hexversion,
156 156 hexstring,
157 157 expected,
158 158 stdout,
159 159 )
160 160 )
161 161
162 162
163 163 def makehex(major, minor, micro):
164 164 return int("%x%02x%02x00" % (major, minor, micro), 16)
165 165
166 166
167 167 class parseindex2tests(unittest.TestCase):
168 168 def assertversionokay(self, testnumber, hexversion):
169 169 stdout, stderr = importparsers(hexversion)
170 170 self.assertFalse(
171 171 stdout, hexfailmsg(testnumber, hexversion, stdout, 'no stdout')
172 172 )
173 173
174 174 def assertversionfail(self, testnumber, hexversion):
175 175 stdout, stderr = importparsers(hexversion)
176 176 # We include versionerrortext to distinguish from other ImportErrors.
177 177 errtext = b"ImportError: %s" % pycompat.sysbytes(
178 178 parsers.versionerrortext
179 179 )
180 180 self.assertIn(
181 181 errtext,
182 182 stdout,
183 183 hexfailmsg(
184 184 testnumber,
185 185 hexversion,
186 186 stdout,
187 187 expected="stdout to contain %r" % errtext,
188 188 ),
189 189 )
190 190
191 191 def testversiondetection(self):
192 192 """Check the version-detection logic when importing parsers."""
193 193 # Only test the version-detection logic if it is present.
194 194 try:
195 195 parsers.versionerrortext
196 196 except AttributeError:
197 197 return
198 198 info = sys.version_info
199 199 major, minor, micro = info[0], info[1], info[2]
200 200 # Test same major-minor versions.
201 201 self.assertversionokay(1, makehex(major, minor, micro))
202 202 self.assertversionokay(2, makehex(major, minor, micro + 1))
203 203 # Test different major-minor versions.
204 204 self.assertversionfail(3, makehex(major + 1, minor, micro))
205 205 self.assertversionfail(4, makehex(major, minor + 1, micro))
206 206 self.assertversionfail(5, "'foo'")
207 207
208 208 def testbadargs(self):
209 209 # Check that parse_index2() raises TypeError on bad arguments.
210 210 with self.assertRaises(TypeError):
211 211 parse_index2(0, True)
212 212
213 213 def testparseindexfile(self):
214 214 # Check parsers.parse_index2() on an index file against the
215 215 # original Python implementation of parseindex, both with and
216 216 # without inlined data.
217 217
218 218 want = py_parseindex(data_inlined, True)
219 219 got = parse_index2(data_inlined, True)
220 220 self.assertEqual(want, got) # inline data
221 221
222 222 want = py_parseindex(data_non_inlined, False)
223 223 got = parse_index2(data_non_inlined, False)
224 224 self.assertEqual(want, got) # no inline data
225 225
226 226 ix = parsers.parse_index2(data_inlined, True)[0]
227 227 for i, r in enumerate(ix):
228 228 if r[7] == nullid:
229 229 i = -1
230 230 try:
231 231 self.assertEqual(
232 232 ix[r[7]],
233 233 i,
234 234 'Reverse lookup inconsistent for %r' % nodemod.hex(r[7]),
235 235 )
236 236 except TypeError:
237 237 # pure version doesn't support this
238 238 break
239 239
240 240 def testminusone(self):
241 241 want = (0, 0, 0, -1, -1, -1, -1, nullid)
242 242 index, junk = parsers.parse_index2(data_inlined, True)
243 243 got = index[-1]
244 244 self.assertEqual(want, got) # inline data
245 245
246 246 index, junk = parsers.parse_index2(data_non_inlined, False)
247 247 got = index[-1]
248 248 self.assertEqual(want, got) # no inline data
249 249
250 def testdelitemwithoutnodetree(self):
251 index, _junk = parsers.parse_index2(data_non_inlined, False)
252
253 def hexrev(rev):
254 if rev == nullrev:
255 return b'\xff\xff\xff\xff'
256 else:
257 return nodemod.bin('%08x' % rev)
258
259 def appendrev(p1, p2=nullrev):
260 # node won't matter for this test, let's just make sure
261 # they don't collide. Other data don't matter either.
262 node = hexrev(p1) + hexrev(p2) + b'.' * 12
263 index.append((0, 0, 12, 1, 34, p1, p2, node))
264
265 appendrev(4)
266 appendrev(5)
267 appendrev(6)
268 self.assertEqual(len(index), 7)
269
270 del index[1:7]
271
272 # assertions that failed before correction
273 self.assertEqual(len(index), 1) # was 4
274 self.assertEqual(index.headrevs(), [0]) # gave ValueError
275
250 276
251 277 if __name__ == '__main__':
252 278 import silenttestrunner
253 279
254 280 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now