##// END OF EJS Templates
sparse-revlog: add a `index_get_start` function in C...
Boris Feld -
r40739:d5b300ec default
parent child Browse files
Show More
@@ -1,2517 +1,2551 b''
1 1 /*
2 2 parsers.c - efficient content parsing
3 3
4 4 Copyright 2008 Matt Mackall <mpm@selenic.com> and others
5 5
6 6 This software may be used and distributed according to the terms of
7 7 the GNU General Public License, incorporated herein by reference.
8 8 */
9 9
10 10 #include <Python.h>
11 11 #include <assert.h>
12 12 #include <ctype.h>
13 13 #include <stddef.h>
14 14 #include <string.h>
15 15
16 16 #include "bitmanipulation.h"
17 17 #include "charencode.h"
18 18 #include "util.h"
19 19
20 20 #ifdef IS_PY3K
21 21 /* The mapping of Python types is meant to be temporary to get Python
22 22 * 3 to compile. We should remove this once Python 3 support is fully
23 23 * supported and proper types are used in the extensions themselves. */
24 24 #define PyInt_Check PyLong_Check
25 25 #define PyInt_FromLong PyLong_FromLong
26 26 #define PyInt_FromSsize_t PyLong_FromSsize_t
27 27 #define PyInt_AsLong PyLong_AsLong
28 28 #endif
29 29
30 30 typedef struct indexObjectStruct indexObject;
31 31
32 32 typedef struct {
33 33 int children[16];
34 34 } nodetreenode;
35 35
36 36 /*
37 37 * A base-16 trie for fast node->rev mapping.
38 38 *
39 39 * Positive value is index of the next node in the trie
40 40 * Negative value is a leaf: -(rev + 2)
41 41 * Zero is empty
42 42 */
43 43 typedef struct {
44 44 indexObject *index;
45 45 nodetreenode *nodes;
46 46 unsigned length; /* # nodes in use */
47 47 unsigned capacity; /* # nodes allocated */
48 48 int depth; /* maximum depth of tree */
49 49 int splits; /* # splits performed */
50 50 } nodetree;
51 51
52 52 typedef struct {
53 53 PyObject_HEAD /* ; */
54 54 nodetree nt;
55 55 } nodetreeObject;
56 56
57 57 /*
58 58 * This class has two behaviors.
59 59 *
60 60 * When used in a list-like way (with integer keys), we decode an
61 61 * entry in a RevlogNG index file on demand. Our last entry is a
62 62 * sentinel, always a nullid. We have limited support for
63 63 * integer-keyed insert and delete, only at elements right before the
64 64 * sentinel.
65 65 *
66 66 * With string keys, we lazily perform a reverse mapping from node to
67 67 * rev, using a base-16 trie.
68 68 */
69 69 struct indexObjectStruct {
70 70 PyObject_HEAD
71 71 /* Type-specific fields go here. */
72 72 PyObject *data; /* raw bytes of index */
73 73 Py_buffer buf; /* buffer of data */
74 74 PyObject **cache; /* cached tuples */
75 75 const char **offsets; /* populated on demand */
76 76 Py_ssize_t raw_length; /* original number of elements */
77 77 Py_ssize_t length; /* current number of elements */
78 78 PyObject *added; /* populated on demand */
79 79 PyObject *headrevs; /* cache, invalidated on changes */
80 80 PyObject *filteredrevs; /* filtered revs set */
81 81 nodetree nt; /* base-16 trie */
82 82 int ntinitialized; /* 0 or 1 */
83 83 int ntrev; /* last rev scanned */
84 84 int ntlookups; /* # lookups */
85 85 int ntmisses; /* # lookups that miss the cache */
86 86 int inlined;
87 87 };
88 88
89 89 static Py_ssize_t index_length(const indexObject *self)
90 90 {
91 91 if (self->added == NULL)
92 92 return self->length;
93 93 return self->length + PyList_GET_SIZE(self->added);
94 94 }
95 95
96 96 static PyObject *nullentry = NULL;
97 97 static const char nullid[20] = {0};
98 98
99 99 static Py_ssize_t inline_scan(indexObject *self, const char **offsets);
100 100
101 101 #if LONG_MAX == 0x7fffffffL
102 102 static const char *const tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#");
103 103 #else
104 104 static const char *const tuple_format = PY23("kiiiiiis#", "kiiiiiiy#");
105 105 #endif
106 106
107 107 /* A RevlogNG v1 index entry is 64 bytes long. */
108 108 static const long v1_hdrsize = 64;
109 109
110 110 static void raise_revlog_error(void)
111 111 {
112 112 PyObject *mod = NULL, *dict = NULL, *errclass = NULL;
113 113
114 114 mod = PyImport_ImportModule("mercurial.error");
115 115 if (mod == NULL) {
116 116 goto cleanup;
117 117 }
118 118
119 119 dict = PyModule_GetDict(mod);
120 120 if (dict == NULL) {
121 121 goto cleanup;
122 122 }
123 123 Py_INCREF(dict);
124 124
125 125 errclass = PyDict_GetItemString(dict, "RevlogError");
126 126 if (errclass == NULL) {
127 127 PyErr_SetString(PyExc_SystemError,
128 128 "could not find RevlogError");
129 129 goto cleanup;
130 130 }
131 131
132 132 /* value of exception is ignored by callers */
133 133 PyErr_SetString(errclass, "RevlogError");
134 134
135 135 cleanup:
136 136 Py_XDECREF(dict);
137 137 Py_XDECREF(mod);
138 138 }
139 139
140 140 /*
141 141 * Return a pointer to the beginning of a RevlogNG record.
142 142 */
143 143 static const char *index_deref(indexObject *self, Py_ssize_t pos)
144 144 {
145 145 if (self->inlined && pos > 0) {
146 146 if (self->offsets == NULL) {
147 147 self->offsets = PyMem_Malloc(self->raw_length *
148 148 sizeof(*self->offsets));
149 149 if (self->offsets == NULL)
150 150 return (const char *)PyErr_NoMemory();
151 151 inline_scan(self, self->offsets);
152 152 }
153 153 return self->offsets[pos];
154 154 }
155 155
156 156 return (const char *)(self->buf.buf) + pos * v1_hdrsize;
157 157 }
158 158
159 159 static inline int index_get_parents(indexObject *self, Py_ssize_t rev, int *ps,
160 160 int maxrev)
161 161 {
162 162 if (rev >= self->length) {
163 163 long tmp;
164 164 PyObject *tuple =
165 165 PyList_GET_ITEM(self->added, rev - self->length);
166 166 if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 5), &tmp)) {
167 167 return -1;
168 168 }
169 169 ps[0] = (int)tmp;
170 170 if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 6), &tmp)) {
171 171 return -1;
172 172 }
173 173 ps[1] = (int)tmp;
174 174 } else {
175 175 const char *data = index_deref(self, rev);
176 176 ps[0] = getbe32(data + 24);
177 177 ps[1] = getbe32(data + 28);
178 178 }
179 179 /* If index file is corrupted, ps[] may point to invalid revisions. So
180 180 * there is a risk of buffer overflow to trust them unconditionally. */
181 181 if (ps[0] > maxrev || ps[1] > maxrev) {
182 182 PyErr_SetString(PyExc_ValueError, "parent out of range");
183 183 return -1;
184 184 }
185 185 return 0;
186 186 }
187 187
188 static inline int64_t index_get_start(indexObject *self, Py_ssize_t rev)
189 {
190 uint64_t offset;
191 if (rev >= self->length) {
192 PyObject *tuple;
193 PyObject *pylong;
194 PY_LONG_LONG tmp;
195 tuple = PyList_GET_ITEM(self->added, rev - self->length);
196 pylong = PyTuple_GET_ITEM(tuple, 0);
197 tmp = PyLong_AsLongLong(pylong);
198 if (tmp == -1 && PyErr_Occurred()) {
199 return -1;
200 }
201 if (tmp < 0) {
202 PyErr_Format(PyExc_OverflowError,
203 "revlog entry size out of bound (%lld)",
204 (long long)tmp);
205 return -1;
206 }
207 offset = (uint64_t)tmp;
208 } else {
209 const char *data = index_deref(self, rev);
210 offset = getbe32(data + 4);
211 if (rev == 0) {
212 /* mask out version number for the first entry */
213 offset &= 0xFFFF;
214 } else {
215 uint32_t offset_high = getbe32(data);
216 offset |= ((uint64_t)offset_high) << 32;
217 }
218 }
219 return (int64_t)(offset >> 16);
220 }
221
188 222 /*
189 223 * RevlogNG format (all in big endian, data may be inlined):
190 224 * 6 bytes: offset
191 225 * 2 bytes: flags
192 226 * 4 bytes: compressed length
193 227 * 4 bytes: uncompressed length
194 228 * 4 bytes: base revision
195 229 * 4 bytes: link revision
196 230 * 4 bytes: parent 1 revision
197 231 * 4 bytes: parent 2 revision
198 232 * 32 bytes: nodeid (only 20 bytes used)
199 233 */
200 234 static PyObject *index_get(indexObject *self, Py_ssize_t pos)
201 235 {
202 236 uint64_t offset_flags;
203 237 int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
204 238 const char *c_node_id;
205 239 const char *data;
206 240 Py_ssize_t length = index_length(self);
207 241 PyObject *entry;
208 242
209 243 if (pos == -1) {
210 244 Py_INCREF(nullentry);
211 245 return nullentry;
212 246 }
213 247
214 248 if (pos < 0 || pos >= length) {
215 249 PyErr_SetString(PyExc_IndexError, "revlog index out of range");
216 250 return NULL;
217 251 }
218 252
219 253 if (pos >= self->length) {
220 254 PyObject *obj;
221 255 obj = PyList_GET_ITEM(self->added, pos - self->length);
222 256 Py_INCREF(obj);
223 257 return obj;
224 258 }
225 259
226 260 if (self->cache) {
227 261 if (self->cache[pos]) {
228 262 Py_INCREF(self->cache[pos]);
229 263 return self->cache[pos];
230 264 }
231 265 } else {
232 266 self->cache = calloc(self->raw_length, sizeof(PyObject *));
233 267 if (self->cache == NULL)
234 268 return PyErr_NoMemory();
235 269 }
236 270
237 271 data = index_deref(self, pos);
238 272 if (data == NULL)
239 273 return NULL;
240 274
241 275 offset_flags = getbe32(data + 4);
242 276 if (pos == 0) /* mask out version number for the first entry */
243 277 offset_flags &= 0xFFFF;
244 278 else {
245 279 uint32_t offset_high = getbe32(data);
246 280 offset_flags |= ((uint64_t)offset_high) << 32;
247 281 }
248 282
249 283 comp_len = getbe32(data + 8);
250 284 uncomp_len = getbe32(data + 12);
251 285 base_rev = getbe32(data + 16);
252 286 link_rev = getbe32(data + 20);
253 287 parent_1 = getbe32(data + 24);
254 288 parent_2 = getbe32(data + 28);
255 289 c_node_id = data + 32;
256 290
257 291 entry = Py_BuildValue(tuple_format, offset_flags, comp_len, uncomp_len,
258 292 base_rev, link_rev, parent_1, parent_2, c_node_id,
259 293 20);
260 294
261 295 if (entry) {
262 296 PyObject_GC_UnTrack(entry);
263 297 Py_INCREF(entry);
264 298 }
265 299
266 300 self->cache[pos] = entry;
267 301
268 302 return entry;
269 303 }
270 304
271 305 /*
272 306 * Return the 20-byte SHA of the node corresponding to the given rev.
273 307 */
274 308 static const char *index_node(indexObject *self, Py_ssize_t pos)
275 309 {
276 310 Py_ssize_t length = index_length(self);
277 311 const char *data;
278 312
279 313 if (pos == -1)
280 314 return nullid;
281 315
282 316 if (pos >= length)
283 317 return NULL;
284 318
285 319 if (pos >= self->length) {
286 320 PyObject *tuple, *str;
287 321 tuple = PyList_GET_ITEM(self->added, pos - self->length);
288 322 str = PyTuple_GetItem(tuple, 7);
289 323 return str ? PyBytes_AS_STRING(str) : NULL;
290 324 }
291 325
292 326 data = index_deref(self, pos);
293 327 return data ? data + 32 : NULL;
294 328 }
295 329
296 330 /*
297 331 * Return the 20-byte SHA of the node corresponding to the given rev. The
298 332 * rev is assumed to be existing. If not, an exception is set.
299 333 */
300 334 static const char *index_node_existing(indexObject *self, Py_ssize_t pos)
301 335 {
302 336 const char *node = index_node(self, pos);
303 337 if (node == NULL) {
304 338 PyErr_Format(PyExc_IndexError, "could not access rev %d",
305 339 (int)pos);
306 340 }
307 341 return node;
308 342 }
309 343
310 344 static int nt_insert(nodetree *self, const char *node, int rev);
311 345
312 346 static int node_check(PyObject *obj, char **node)
313 347 {
314 348 Py_ssize_t nodelen;
315 349 if (PyBytes_AsStringAndSize(obj, node, &nodelen) == -1)
316 350 return -1;
317 351 if (nodelen == 20)
318 352 return 0;
319 353 PyErr_SetString(PyExc_ValueError, "20-byte hash required");
320 354 return -1;
321 355 }
322 356
323 357 static PyObject *index_append(indexObject *self, PyObject *obj)
324 358 {
325 359 char *node;
326 360 Py_ssize_t len;
327 361
328 362 if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
329 363 PyErr_SetString(PyExc_TypeError, "8-tuple required");
330 364 return NULL;
331 365 }
332 366
333 367 if (node_check(PyTuple_GET_ITEM(obj, 7), &node) == -1)
334 368 return NULL;
335 369
336 370 len = index_length(self);
337 371
338 372 if (self->added == NULL) {
339 373 self->added = PyList_New(0);
340 374 if (self->added == NULL)
341 375 return NULL;
342 376 }
343 377
344 378 if (PyList_Append(self->added, obj) == -1)
345 379 return NULL;
346 380
347 381 if (self->ntinitialized)
348 382 nt_insert(&self->nt, node, (int)len);
349 383
350 384 Py_CLEAR(self->headrevs);
351 385 Py_RETURN_NONE;
352 386 }
353 387
354 388 static PyObject *index_stats(indexObject *self)
355 389 {
356 390 PyObject *obj = PyDict_New();
357 391 PyObject *s = NULL;
358 392 PyObject *t = NULL;
359 393
360 394 if (obj == NULL)
361 395 return NULL;
362 396
363 397 #define istat(__n, __d) \
364 398 do { \
365 399 s = PyBytes_FromString(__d); \
366 400 t = PyInt_FromSsize_t(self->__n); \
367 401 if (!s || !t) \
368 402 goto bail; \
369 403 if (PyDict_SetItem(obj, s, t) == -1) \
370 404 goto bail; \
371 405 Py_CLEAR(s); \
372 406 Py_CLEAR(t); \
373 407 } while (0)
374 408
375 409 if (self->added) {
376 410 Py_ssize_t len = PyList_GET_SIZE(self->added);
377 411 s = PyBytes_FromString("index entries added");
378 412 t = PyInt_FromSsize_t(len);
379 413 if (!s || !t)
380 414 goto bail;
381 415 if (PyDict_SetItem(obj, s, t) == -1)
382 416 goto bail;
383 417 Py_CLEAR(s);
384 418 Py_CLEAR(t);
385 419 }
386 420
387 421 if (self->raw_length != self->length)
388 422 istat(raw_length, "revs on disk");
389 423 istat(length, "revs in memory");
390 424 istat(ntlookups, "node trie lookups");
391 425 istat(ntmisses, "node trie misses");
392 426 istat(ntrev, "node trie last rev scanned");
393 427 if (self->ntinitialized) {
394 428 istat(nt.capacity, "node trie capacity");
395 429 istat(nt.depth, "node trie depth");
396 430 istat(nt.length, "node trie count");
397 431 istat(nt.splits, "node trie splits");
398 432 }
399 433
400 434 #undef istat
401 435
402 436 return obj;
403 437
404 438 bail:
405 439 Py_XDECREF(obj);
406 440 Py_XDECREF(s);
407 441 Py_XDECREF(t);
408 442 return NULL;
409 443 }
410 444
411 445 /*
412 446 * When we cache a list, we want to be sure the caller can't mutate
413 447 * the cached copy.
414 448 */
415 449 static PyObject *list_copy(PyObject *list)
416 450 {
417 451 Py_ssize_t len = PyList_GET_SIZE(list);
418 452 PyObject *newlist = PyList_New(len);
419 453 Py_ssize_t i;
420 454
421 455 if (newlist == NULL)
422 456 return NULL;
423 457
424 458 for (i = 0; i < len; i++) {
425 459 PyObject *obj = PyList_GET_ITEM(list, i);
426 460 Py_INCREF(obj);
427 461 PyList_SET_ITEM(newlist, i, obj);
428 462 }
429 463
430 464 return newlist;
431 465 }
432 466
433 467 static int check_filter(PyObject *filter, Py_ssize_t arg)
434 468 {
435 469 if (filter) {
436 470 PyObject *arglist, *result;
437 471 int isfiltered;
438 472
439 473 arglist = Py_BuildValue("(n)", arg);
440 474 if (!arglist) {
441 475 return -1;
442 476 }
443 477
444 478 result = PyEval_CallObject(filter, arglist);
445 479 Py_DECREF(arglist);
446 480 if (!result) {
447 481 return -1;
448 482 }
449 483
450 484 /* PyObject_IsTrue returns 1 if true, 0 if false, -1 if error,
451 485 * same as this function, so we can just return it directly.*/
452 486 isfiltered = PyObject_IsTrue(result);
453 487 Py_DECREF(result);
454 488 return isfiltered;
455 489 } else {
456 490 return 0;
457 491 }
458 492 }
459 493
460 494 static Py_ssize_t add_roots_get_min(indexObject *self, PyObject *list,
461 495 Py_ssize_t marker, char *phases)
462 496 {
463 497 PyObject *iter = NULL;
464 498 PyObject *iter_item = NULL;
465 499 Py_ssize_t min_idx = index_length(self) + 2;
466 500 long iter_item_long;
467 501
468 502 if (PyList_GET_SIZE(list) != 0) {
469 503 iter = PyObject_GetIter(list);
470 504 if (iter == NULL)
471 505 return -2;
472 506 while ((iter_item = PyIter_Next(iter))) {
473 507 if (!pylong_to_long(iter_item, &iter_item_long)) {
474 508 Py_DECREF(iter_item);
475 509 return -2;
476 510 }
477 511 Py_DECREF(iter_item);
478 512 if (iter_item_long < min_idx)
479 513 min_idx = iter_item_long;
480 514 phases[iter_item_long] = (char)marker;
481 515 }
482 516 Py_DECREF(iter);
483 517 }
484 518
485 519 return min_idx;
486 520 }
487 521
488 522 static inline void set_phase_from_parents(char *phases, int parent_1,
489 523 int parent_2, Py_ssize_t i)
490 524 {
491 525 if (parent_1 >= 0 && phases[parent_1] > phases[i])
492 526 phases[i] = phases[parent_1];
493 527 if (parent_2 >= 0 && phases[parent_2] > phases[i])
494 528 phases[i] = phases[parent_2];
495 529 }
496 530
497 531 static PyObject *reachableroots2(indexObject *self, PyObject *args)
498 532 {
499 533
500 534 /* Input */
501 535 long minroot;
502 536 PyObject *includepatharg = NULL;
503 537 int includepath = 0;
504 538 /* heads and roots are lists */
505 539 PyObject *heads = NULL;
506 540 PyObject *roots = NULL;
507 541 PyObject *reachable = NULL;
508 542
509 543 PyObject *val;
510 544 Py_ssize_t len = index_length(self);
511 545 long revnum;
512 546 Py_ssize_t k;
513 547 Py_ssize_t i;
514 548 Py_ssize_t l;
515 549 int r;
516 550 int parents[2];
517 551
518 552 /* Internal data structure:
519 553 * tovisit: array of length len+1 (all revs + nullrev), filled upto
520 554 * lentovisit
521 555 *
522 556 * revstates: array of length len+1 (all revs + nullrev) */
523 557 int *tovisit = NULL;
524 558 long lentovisit = 0;
525 559 enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 };
526 560 char *revstates = NULL;
527 561
528 562 /* Get arguments */
529 563 if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads,
530 564 &PyList_Type, &roots, &PyBool_Type,
531 565 &includepatharg))
532 566 goto bail;
533 567
534 568 if (includepatharg == Py_True)
535 569 includepath = 1;
536 570
537 571 /* Initialize return set */
538 572 reachable = PyList_New(0);
539 573 if (reachable == NULL)
540 574 goto bail;
541 575
542 576 /* Initialize internal datastructures */
543 577 tovisit = (int *)malloc((len + 1) * sizeof(int));
544 578 if (tovisit == NULL) {
545 579 PyErr_NoMemory();
546 580 goto bail;
547 581 }
548 582
549 583 revstates = (char *)calloc(len + 1, 1);
550 584 if (revstates == NULL) {
551 585 PyErr_NoMemory();
552 586 goto bail;
553 587 }
554 588
555 589 l = PyList_GET_SIZE(roots);
556 590 for (i = 0; i < l; i++) {
557 591 revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
558 592 if (revnum == -1 && PyErr_Occurred())
559 593 goto bail;
560 594 /* If root is out of range, e.g. wdir(), it must be unreachable
561 595 * from heads. So we can just ignore it. */
562 596 if (revnum + 1 < 0 || revnum + 1 >= len + 1)
563 597 continue;
564 598 revstates[revnum + 1] |= RS_ROOT;
565 599 }
566 600
567 601 /* Populate tovisit with all the heads */
568 602 l = PyList_GET_SIZE(heads);
569 603 for (i = 0; i < l; i++) {
570 604 revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
571 605 if (revnum == -1 && PyErr_Occurred())
572 606 goto bail;
573 607 if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
574 608 PyErr_SetString(PyExc_IndexError, "head out of range");
575 609 goto bail;
576 610 }
577 611 if (!(revstates[revnum + 1] & RS_SEEN)) {
578 612 tovisit[lentovisit++] = (int)revnum;
579 613 revstates[revnum + 1] |= RS_SEEN;
580 614 }
581 615 }
582 616
583 617 /* Visit the tovisit list and find the reachable roots */
584 618 k = 0;
585 619 while (k < lentovisit) {
586 620 /* Add the node to reachable if it is a root*/
587 621 revnum = tovisit[k++];
588 622 if (revstates[revnum + 1] & RS_ROOT) {
589 623 revstates[revnum + 1] |= RS_REACHABLE;
590 624 val = PyInt_FromLong(revnum);
591 625 if (val == NULL)
592 626 goto bail;
593 627 r = PyList_Append(reachable, val);
594 628 Py_DECREF(val);
595 629 if (r < 0)
596 630 goto bail;
597 631 if (includepath == 0)
598 632 continue;
599 633 }
600 634
601 635 /* Add its parents to the list of nodes to visit */
602 636 if (revnum == -1)
603 637 continue;
604 638 r = index_get_parents(self, revnum, parents, (int)len - 1);
605 639 if (r < 0)
606 640 goto bail;
607 641 for (i = 0; i < 2; i++) {
608 642 if (!(revstates[parents[i] + 1] & RS_SEEN) &&
609 643 parents[i] >= minroot) {
610 644 tovisit[lentovisit++] = parents[i];
611 645 revstates[parents[i] + 1] |= RS_SEEN;
612 646 }
613 647 }
614 648 }
615 649
616 650 /* Find all the nodes in between the roots we found and the heads
617 651 * and add them to the reachable set */
618 652 if (includepath == 1) {
619 653 long minidx = minroot;
620 654 if (minidx < 0)
621 655 minidx = 0;
622 656 for (i = minidx; i < len; i++) {
623 657 if (!(revstates[i + 1] & RS_SEEN))
624 658 continue;
625 659 r = index_get_parents(self, i, parents, (int)len - 1);
626 660 /* Corrupted index file, error is set from
627 661 * index_get_parents */
628 662 if (r < 0)
629 663 goto bail;
630 664 if (((revstates[parents[0] + 1] |
631 665 revstates[parents[1] + 1]) &
632 666 RS_REACHABLE) &&
633 667 !(revstates[i + 1] & RS_REACHABLE)) {
634 668 revstates[i + 1] |= RS_REACHABLE;
635 669 val = PyInt_FromSsize_t(i);
636 670 if (val == NULL)
637 671 goto bail;
638 672 r = PyList_Append(reachable, val);
639 673 Py_DECREF(val);
640 674 if (r < 0)
641 675 goto bail;
642 676 }
643 677 }
644 678 }
645 679
646 680 free(revstates);
647 681 free(tovisit);
648 682 return reachable;
649 683 bail:
650 684 Py_XDECREF(reachable);
651 685 free(revstates);
652 686 free(tovisit);
653 687 return NULL;
654 688 }
655 689
656 690 static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args)
657 691 {
658 692 PyObject *roots = Py_None;
659 693 PyObject *ret = NULL;
660 694 PyObject *phasessize = NULL;
661 695 PyObject *phaseroots = NULL;
662 696 PyObject *phaseset = NULL;
663 697 PyObject *phasessetlist = NULL;
664 698 PyObject *rev = NULL;
665 699 Py_ssize_t len = index_length(self);
666 700 Py_ssize_t numphase = 0;
667 701 Py_ssize_t minrevallphases = 0;
668 702 Py_ssize_t minrevphase = 0;
669 703 Py_ssize_t i = 0;
670 704 char *phases = NULL;
671 705 long phase;
672 706
673 707 if (!PyArg_ParseTuple(args, "O", &roots))
674 708 goto done;
675 709 if (roots == NULL || !PyList_Check(roots)) {
676 710 PyErr_SetString(PyExc_TypeError, "roots must be a list");
677 711 goto done;
678 712 }
679 713
680 714 phases = calloc(
681 715 len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */
682 716 if (phases == NULL) {
683 717 PyErr_NoMemory();
684 718 goto done;
685 719 }
686 720 /* Put the phase information of all the roots in phases */
687 721 numphase = PyList_GET_SIZE(roots) + 1;
688 722 minrevallphases = len + 1;
689 723 phasessetlist = PyList_New(numphase);
690 724 if (phasessetlist == NULL)
691 725 goto done;
692 726
693 727 PyList_SET_ITEM(phasessetlist, 0, Py_None);
694 728 Py_INCREF(Py_None);
695 729
696 730 for (i = 0; i < numphase - 1; i++) {
697 731 phaseroots = PyList_GET_ITEM(roots, i);
698 732 phaseset = PySet_New(NULL);
699 733 if (phaseset == NULL)
700 734 goto release;
701 735 PyList_SET_ITEM(phasessetlist, i + 1, phaseset);
702 736 if (!PyList_Check(phaseroots)) {
703 737 PyErr_SetString(PyExc_TypeError,
704 738 "roots item must be a list");
705 739 goto release;
706 740 }
707 741 minrevphase =
708 742 add_roots_get_min(self, phaseroots, i + 1, phases);
709 743 if (minrevphase == -2) /* Error from add_roots_get_min */
710 744 goto release;
711 745 minrevallphases = MIN(minrevallphases, minrevphase);
712 746 }
713 747 /* Propagate the phase information from the roots to the revs */
714 748 if (minrevallphases != -1) {
715 749 int parents[2];
716 750 for (i = minrevallphases; i < len; i++) {
717 751 if (index_get_parents(self, i, parents, (int)len - 1) <
718 752 0)
719 753 goto release;
720 754 set_phase_from_parents(phases, parents[0], parents[1],
721 755 i);
722 756 }
723 757 }
724 758 /* Transform phase list to a python list */
725 759 phasessize = PyInt_FromSsize_t(len);
726 760 if (phasessize == NULL)
727 761 goto release;
728 762 for (i = 0; i < len; i++) {
729 763 phase = phases[i];
730 764 /* We only store the sets of phase for non public phase, the
731 765 * public phase is computed as a difference */
732 766 if (phase != 0) {
733 767 phaseset = PyList_GET_ITEM(phasessetlist, phase);
734 768 rev = PyInt_FromSsize_t(i);
735 769 if (rev == NULL)
736 770 goto release;
737 771 PySet_Add(phaseset, rev);
738 772 Py_XDECREF(rev);
739 773 }
740 774 }
741 775 ret = PyTuple_Pack(2, phasessize, phasessetlist);
742 776
743 777 release:
744 778 Py_XDECREF(phasessize);
745 779 Py_XDECREF(phasessetlist);
746 780 done:
747 781 free(phases);
748 782 return ret;
749 783 }
750 784
751 785 static PyObject *index_headrevs(indexObject *self, PyObject *args)
752 786 {
753 787 Py_ssize_t i, j, len;
754 788 char *nothead = NULL;
755 789 PyObject *heads = NULL;
756 790 PyObject *filter = NULL;
757 791 PyObject *filteredrevs = Py_None;
758 792
759 793 if (!PyArg_ParseTuple(args, "|O", &filteredrevs)) {
760 794 return NULL;
761 795 }
762 796
763 797 if (self->headrevs && filteredrevs == self->filteredrevs)
764 798 return list_copy(self->headrevs);
765 799
766 800 Py_DECREF(self->filteredrevs);
767 801 self->filteredrevs = filteredrevs;
768 802 Py_INCREF(filteredrevs);
769 803
770 804 if (filteredrevs != Py_None) {
771 805 filter = PyObject_GetAttrString(filteredrevs, "__contains__");
772 806 if (!filter) {
773 807 PyErr_SetString(
774 808 PyExc_TypeError,
775 809 "filteredrevs has no attribute __contains__");
776 810 goto bail;
777 811 }
778 812 }
779 813
780 814 len = index_length(self);
781 815 heads = PyList_New(0);
782 816 if (heads == NULL)
783 817 goto bail;
784 818 if (len == 0) {
785 819 PyObject *nullid = PyInt_FromLong(-1);
786 820 if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
787 821 Py_XDECREF(nullid);
788 822 goto bail;
789 823 }
790 824 goto done;
791 825 }
792 826
793 827 nothead = calloc(len, 1);
794 828 if (nothead == NULL) {
795 829 PyErr_NoMemory();
796 830 goto bail;
797 831 }
798 832
799 833 for (i = len - 1; i >= 0; i--) {
800 834 int isfiltered;
801 835 int parents[2];
802 836
803 837 /* If nothead[i] == 1, it means we've seen an unfiltered child
804 838 * of this node already, and therefore this node is not
805 839 * filtered. So we can skip the expensive check_filter step.
806 840 */
807 841 if (nothead[i] != 1) {
808 842 isfiltered = check_filter(filter, i);
809 843 if (isfiltered == -1) {
810 844 PyErr_SetString(PyExc_TypeError,
811 845 "unable to check filter");
812 846 goto bail;
813 847 }
814 848
815 849 if (isfiltered) {
816 850 nothead[i] = 1;
817 851 continue;
818 852 }
819 853 }
820 854
821 855 if (index_get_parents(self, i, parents, (int)len - 1) < 0)
822 856 goto bail;
823 857 for (j = 0; j < 2; j++) {
824 858 if (parents[j] >= 0)
825 859 nothead[parents[j]] = 1;
826 860 }
827 861 }
828 862
829 863 for (i = 0; i < len; i++) {
830 864 PyObject *head;
831 865
832 866 if (nothead[i])
833 867 continue;
834 868 head = PyInt_FromSsize_t(i);
835 869 if (head == NULL || PyList_Append(heads, head) == -1) {
836 870 Py_XDECREF(head);
837 871 goto bail;
838 872 }
839 873 }
840 874
841 875 done:
842 876 self->headrevs = heads;
843 877 Py_XDECREF(filter);
844 878 free(nothead);
845 879 return list_copy(self->headrevs);
846 880 bail:
847 881 Py_XDECREF(filter);
848 882 Py_XDECREF(heads);
849 883 free(nothead);
850 884 return NULL;
851 885 }
852 886
853 887 /**
854 888 * Obtain the base revision index entry.
855 889 *
856 890 * Callers must ensure that rev >= 0 or illegal memory access may occur.
857 891 */
858 892 static inline int index_baserev(indexObject *self, int rev)
859 893 {
860 894 const char *data;
861 895
862 896 if (rev >= self->length) {
863 897 PyObject *tuple =
864 898 PyList_GET_ITEM(self->added, rev - self->length);
865 899 long ret;
866 900 if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 3), &ret)) {
867 901 return -2;
868 902 }
869 903 return (int)ret;
870 904 } else {
871 905 data = index_deref(self, rev);
872 906 if (data == NULL) {
873 907 return -2;
874 908 }
875 909
876 910 return getbe32(data + 16);
877 911 }
878 912 }
879 913
880 914 static PyObject *index_deltachain(indexObject *self, PyObject *args)
881 915 {
882 916 int rev, generaldelta;
883 917 PyObject *stoparg;
884 918 int stoprev, iterrev, baserev = -1;
885 919 int stopped;
886 920 PyObject *chain = NULL, *result = NULL;
887 921 const Py_ssize_t length = index_length(self);
888 922
889 923 if (!PyArg_ParseTuple(args, "iOi", &rev, &stoparg, &generaldelta)) {
890 924 return NULL;
891 925 }
892 926
893 927 if (PyInt_Check(stoparg)) {
894 928 stoprev = (int)PyInt_AsLong(stoparg);
895 929 if (stoprev == -1 && PyErr_Occurred()) {
896 930 return NULL;
897 931 }
898 932 } else if (stoparg == Py_None) {
899 933 stoprev = -2;
900 934 } else {
901 935 PyErr_SetString(PyExc_ValueError,
902 936 "stoprev must be integer or None");
903 937 return NULL;
904 938 }
905 939
906 940 if (rev < 0 || rev >= length) {
907 941 PyErr_SetString(PyExc_ValueError, "revlog index out of range");
908 942 return NULL;
909 943 }
910 944
911 945 chain = PyList_New(0);
912 946 if (chain == NULL) {
913 947 return NULL;
914 948 }
915 949
916 950 baserev = index_baserev(self, rev);
917 951
918 952 /* This should never happen. */
919 953 if (baserev <= -2) {
920 954 /* Error should be set by index_deref() */
921 955 assert(PyErr_Occurred());
922 956 goto bail;
923 957 }
924 958
925 959 iterrev = rev;
926 960
927 961 while (iterrev != baserev && iterrev != stoprev) {
928 962 PyObject *value = PyInt_FromLong(iterrev);
929 963 if (value == NULL) {
930 964 goto bail;
931 965 }
932 966 if (PyList_Append(chain, value)) {
933 967 Py_DECREF(value);
934 968 goto bail;
935 969 }
936 970 Py_DECREF(value);
937 971
938 972 if (generaldelta) {
939 973 iterrev = baserev;
940 974 } else {
941 975 iterrev--;
942 976 }
943 977
944 978 if (iterrev < 0) {
945 979 break;
946 980 }
947 981
948 982 if (iterrev >= length) {
949 983 PyErr_SetString(PyExc_IndexError,
950 984 "revision outside index");
951 985 return NULL;
952 986 }
953 987
954 988 baserev = index_baserev(self, iterrev);
955 989
956 990 /* This should never happen. */
957 991 if (baserev <= -2) {
958 992 /* Error should be set by index_deref() */
959 993 assert(PyErr_Occurred());
960 994 goto bail;
961 995 }
962 996 }
963 997
964 998 if (iterrev == stoprev) {
965 999 stopped = 1;
966 1000 } else {
967 1001 PyObject *value = PyInt_FromLong(iterrev);
968 1002 if (value == NULL) {
969 1003 goto bail;
970 1004 }
971 1005 if (PyList_Append(chain, value)) {
972 1006 Py_DECREF(value);
973 1007 goto bail;
974 1008 }
975 1009 Py_DECREF(value);
976 1010
977 1011 stopped = 0;
978 1012 }
979 1013
980 1014 if (PyList_Reverse(chain)) {
981 1015 goto bail;
982 1016 }
983 1017
984 1018 result = Py_BuildValue("OO", chain, stopped ? Py_True : Py_False);
985 1019 Py_DECREF(chain);
986 1020 return result;
987 1021
988 1022 bail:
989 1023 Py_DECREF(chain);
990 1024 return NULL;
991 1025 }
992 1026
993 1027 static inline int nt_level(const char *node, Py_ssize_t level)
994 1028 {
995 1029 int v = node[level >> 1];
996 1030 if (!(level & 1))
997 1031 v >>= 4;
998 1032 return v & 0xf;
999 1033 }
1000 1034
1001 1035 /*
1002 1036 * Return values:
1003 1037 *
1004 1038 * -4: match is ambiguous (multiple candidates)
1005 1039 * -2: not found
1006 1040 * rest: valid rev
1007 1041 */
1008 1042 static int nt_find(nodetree *self, const char *node, Py_ssize_t nodelen,
1009 1043 int hex)
1010 1044 {
1011 1045 int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
1012 1046 int level, maxlevel, off;
1013 1047
1014 1048 if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
1015 1049 return -1;
1016 1050
1017 1051 if (hex)
1018 1052 maxlevel = nodelen > 40 ? 40 : (int)nodelen;
1019 1053 else
1020 1054 maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
1021 1055
1022 1056 for (level = off = 0; level < maxlevel; level++) {
1023 1057 int k = getnybble(node, level);
1024 1058 nodetreenode *n = &self->nodes[off];
1025 1059 int v = n->children[k];
1026 1060
1027 1061 if (v < 0) {
1028 1062 const char *n;
1029 1063 Py_ssize_t i;
1030 1064
1031 1065 v = -(v + 2);
1032 1066 n = index_node(self->index, v);
1033 1067 if (n == NULL)
1034 1068 return -2;
1035 1069 for (i = level; i < maxlevel; i++)
1036 1070 if (getnybble(node, i) != nt_level(n, i))
1037 1071 return -2;
1038 1072 return v;
1039 1073 }
1040 1074 if (v == 0)
1041 1075 return -2;
1042 1076 off = v;
1043 1077 }
1044 1078 /* multiple matches against an ambiguous prefix */
1045 1079 return -4;
1046 1080 }
1047 1081
1048 1082 static int nt_new(nodetree *self)
1049 1083 {
1050 1084 if (self->length == self->capacity) {
1051 1085 unsigned newcapacity;
1052 1086 nodetreenode *newnodes;
1053 1087 newcapacity = self->capacity * 2;
1054 1088 if (newcapacity >= INT_MAX / sizeof(nodetreenode)) {
1055 1089 PyErr_SetString(PyExc_MemoryError,
1056 1090 "overflow in nt_new");
1057 1091 return -1;
1058 1092 }
1059 1093 newnodes =
1060 1094 realloc(self->nodes, newcapacity * sizeof(nodetreenode));
1061 1095 if (newnodes == NULL) {
1062 1096 PyErr_SetString(PyExc_MemoryError, "out of memory");
1063 1097 return -1;
1064 1098 }
1065 1099 self->capacity = newcapacity;
1066 1100 self->nodes = newnodes;
1067 1101 memset(&self->nodes[self->length], 0,
1068 1102 sizeof(nodetreenode) * (self->capacity - self->length));
1069 1103 }
1070 1104 return self->length++;
1071 1105 }
1072 1106
1073 1107 static int nt_insert(nodetree *self, const char *node, int rev)
1074 1108 {
1075 1109 int level = 0;
1076 1110 int off = 0;
1077 1111
1078 1112 while (level < 40) {
1079 1113 int k = nt_level(node, level);
1080 1114 nodetreenode *n;
1081 1115 int v;
1082 1116
1083 1117 n = &self->nodes[off];
1084 1118 v = n->children[k];
1085 1119
1086 1120 if (v == 0) {
1087 1121 n->children[k] = -rev - 2;
1088 1122 return 0;
1089 1123 }
1090 1124 if (v < 0) {
1091 1125 const char *oldnode =
1092 1126 index_node_existing(self->index, -(v + 2));
1093 1127 int noff;
1094 1128
1095 1129 if (oldnode == NULL)
1096 1130 return -1;
1097 1131 if (!memcmp(oldnode, node, 20)) {
1098 1132 n->children[k] = -rev - 2;
1099 1133 return 0;
1100 1134 }
1101 1135 noff = nt_new(self);
1102 1136 if (noff == -1)
1103 1137 return -1;
1104 1138 /* self->nodes may have been changed by realloc */
1105 1139 self->nodes[off].children[k] = noff;
1106 1140 off = noff;
1107 1141 n = &self->nodes[off];
1108 1142 n->children[nt_level(oldnode, ++level)] = v;
1109 1143 if (level > self->depth)
1110 1144 self->depth = level;
1111 1145 self->splits += 1;
1112 1146 } else {
1113 1147 level += 1;
1114 1148 off = v;
1115 1149 }
1116 1150 }
1117 1151
1118 1152 return -1;
1119 1153 }
1120 1154
1121 1155 static PyObject *ntobj_insert(nodetreeObject *self, PyObject *args)
1122 1156 {
1123 1157 Py_ssize_t rev;
1124 1158 const char *node;
1125 1159 Py_ssize_t length;
1126 1160 if (!PyArg_ParseTuple(args, "n", &rev))
1127 1161 return NULL;
1128 1162 length = index_length(self->nt.index);
1129 1163 if (rev < 0 || rev >= length) {
1130 1164 PyErr_SetString(PyExc_ValueError, "revlog index out of range");
1131 1165 return NULL;
1132 1166 }
1133 1167 node = index_node_existing(self->nt.index, rev);
1134 1168 if (nt_insert(&self->nt, node, (int)rev) == -1)
1135 1169 return NULL;
1136 1170 Py_RETURN_NONE;
1137 1171 }
1138 1172
1139 1173 static int nt_delete_node(nodetree *self, const char *node)
1140 1174 {
1141 1175 /* rev==-2 happens to get encoded as 0, which is interpreted as not set
1142 1176 */
1143 1177 return nt_insert(self, node, -2);
1144 1178 }
1145 1179
1146 1180 static int nt_init(nodetree *self, indexObject *index, unsigned capacity)
1147 1181 {
1148 1182 /* Initialize before overflow-checking to avoid nt_dealloc() crash. */
1149 1183 self->nodes = NULL;
1150 1184
1151 1185 self->index = index;
1152 1186 /* The input capacity is in terms of revisions, while the field is in
1153 1187 * terms of nodetree nodes. */
1154 1188 self->capacity = (capacity < 4 ? 4 : capacity / 2);
1155 1189 self->depth = 0;
1156 1190 self->splits = 0;
1157 1191 if ((size_t)self->capacity > INT_MAX / sizeof(nodetreenode)) {
1158 1192 PyErr_SetString(PyExc_ValueError, "overflow in init_nt");
1159 1193 return -1;
1160 1194 }
1161 1195 self->nodes = calloc(self->capacity, sizeof(nodetreenode));
1162 1196 if (self->nodes == NULL) {
1163 1197 PyErr_NoMemory();
1164 1198 return -1;
1165 1199 }
1166 1200 self->length = 1;
1167 1201 return 0;
1168 1202 }
1169 1203
1170 1204 static PyTypeObject indexType;
1171 1205
1172 1206 static int ntobj_init(nodetreeObject *self, PyObject *args)
1173 1207 {
1174 1208 PyObject *index;
1175 1209 unsigned capacity;
1176 1210 if (!PyArg_ParseTuple(args, "O!I", &indexType, &index, &capacity))
1177 1211 return -1;
1178 1212 Py_INCREF(index);
1179 1213 return nt_init(&self->nt, (indexObject *)index, capacity);
1180 1214 }
1181 1215
1182 1216 static int nt_partialmatch(nodetree *self, const char *node, Py_ssize_t nodelen)
1183 1217 {
1184 1218 return nt_find(self, node, nodelen, 1);
1185 1219 }
1186 1220
1187 1221 /*
1188 1222 * Find the length of the shortest unique prefix of node.
1189 1223 *
1190 1224 * Return values:
1191 1225 *
1192 1226 * -3: error (exception set)
1193 1227 * -2: not found (no exception set)
1194 1228 * rest: length of shortest prefix
1195 1229 */
1196 1230 static int nt_shortest(nodetree *self, const char *node)
1197 1231 {
1198 1232 int level, off;
1199 1233
1200 1234 for (level = off = 0; level < 40; level++) {
1201 1235 int k, v;
1202 1236 nodetreenode *n = &self->nodes[off];
1203 1237 k = nt_level(node, level);
1204 1238 v = n->children[k];
1205 1239 if (v < 0) {
1206 1240 const char *n;
1207 1241 v = -(v + 2);
1208 1242 n = index_node_existing(self->index, v);
1209 1243 if (n == NULL)
1210 1244 return -3;
1211 1245 if (memcmp(node, n, 20) != 0)
1212 1246 /*
1213 1247 * Found a unique prefix, but it wasn't for the
1214 1248 * requested node (i.e the requested node does
1215 1249 * not exist).
1216 1250 */
1217 1251 return -2;
1218 1252 return level + 1;
1219 1253 }
1220 1254 if (v == 0)
1221 1255 return -2;
1222 1256 off = v;
1223 1257 }
1224 1258 /*
1225 1259 * The node was still not unique after 40 hex digits, so this won't
1226 1260 * happen. Also, if we get here, then there's a programming error in
1227 1261 * this file that made us insert a node longer than 40 hex digits.
1228 1262 */
1229 1263 PyErr_SetString(PyExc_Exception, "broken node tree");
1230 1264 return -3;
1231 1265 }
1232 1266
1233 1267 static PyObject *ntobj_shortest(nodetreeObject *self, PyObject *args)
1234 1268 {
1235 1269 PyObject *val;
1236 1270 char *node;
1237 1271 int length;
1238 1272
1239 1273 if (!PyArg_ParseTuple(args, "O", &val))
1240 1274 return NULL;
1241 1275 if (node_check(val, &node) == -1)
1242 1276 return NULL;
1243 1277
1244 1278 length = nt_shortest(&self->nt, node);
1245 1279 if (length == -3)
1246 1280 return NULL;
1247 1281 if (length == -2) {
1248 1282 raise_revlog_error();
1249 1283 return NULL;
1250 1284 }
1251 1285 return PyInt_FromLong(length);
1252 1286 }
1253 1287
1254 1288 static void nt_dealloc(nodetree *self)
1255 1289 {
1256 1290 free(self->nodes);
1257 1291 self->nodes = NULL;
1258 1292 }
1259 1293
1260 1294 static void ntobj_dealloc(nodetreeObject *self)
1261 1295 {
1262 1296 Py_XDECREF(self->nt.index);
1263 1297 nt_dealloc(&self->nt);
1264 1298 PyObject_Del(self);
1265 1299 }
1266 1300
1267 1301 static PyMethodDef ntobj_methods[] = {
1268 1302 {"insert", (PyCFunction)ntobj_insert, METH_VARARGS,
1269 1303 "insert an index entry"},
1270 1304 {"shortest", (PyCFunction)ntobj_shortest, METH_VARARGS,
1271 1305 "find length of shortest hex nodeid of a binary ID"},
1272 1306 {NULL} /* Sentinel */
1273 1307 };
1274 1308
1275 1309 static PyTypeObject nodetreeType = {
1276 1310 PyVarObject_HEAD_INIT(NULL, 0) /* header */
1277 1311 "parsers.nodetree", /* tp_name */
1278 1312 sizeof(nodetreeObject), /* tp_basicsize */
1279 1313 0, /* tp_itemsize */
1280 1314 (destructor)ntobj_dealloc, /* tp_dealloc */
1281 1315 0, /* tp_print */
1282 1316 0, /* tp_getattr */
1283 1317 0, /* tp_setattr */
1284 1318 0, /* tp_compare */
1285 1319 0, /* tp_repr */
1286 1320 0, /* tp_as_number */
1287 1321 0, /* tp_as_sequence */
1288 1322 0, /* tp_as_mapping */
1289 1323 0, /* tp_hash */
1290 1324 0, /* tp_call */
1291 1325 0, /* tp_str */
1292 1326 0, /* tp_getattro */
1293 1327 0, /* tp_setattro */
1294 1328 0, /* tp_as_buffer */
1295 1329 Py_TPFLAGS_DEFAULT, /* tp_flags */
1296 1330 "nodetree", /* tp_doc */
1297 1331 0, /* tp_traverse */
1298 1332 0, /* tp_clear */
1299 1333 0, /* tp_richcompare */
1300 1334 0, /* tp_weaklistoffset */
1301 1335 0, /* tp_iter */
1302 1336 0, /* tp_iternext */
1303 1337 ntobj_methods, /* tp_methods */
1304 1338 0, /* tp_members */
1305 1339 0, /* tp_getset */
1306 1340 0, /* tp_base */
1307 1341 0, /* tp_dict */
1308 1342 0, /* tp_descr_get */
1309 1343 0, /* tp_descr_set */
1310 1344 0, /* tp_dictoffset */
1311 1345 (initproc)ntobj_init, /* tp_init */
1312 1346 0, /* tp_alloc */
1313 1347 };
1314 1348
1315 1349 static int index_init_nt(indexObject *self)
1316 1350 {
1317 1351 if (!self->ntinitialized) {
1318 1352 if (nt_init(&self->nt, self, (int)self->raw_length) == -1) {
1319 1353 nt_dealloc(&self->nt);
1320 1354 return -1;
1321 1355 }
1322 1356 if (nt_insert(&self->nt, nullid, -1) == -1) {
1323 1357 nt_dealloc(&self->nt);
1324 1358 return -1;
1325 1359 }
1326 1360 self->ntinitialized = 1;
1327 1361 self->ntrev = (int)index_length(self);
1328 1362 self->ntlookups = 1;
1329 1363 self->ntmisses = 0;
1330 1364 }
1331 1365 return 0;
1332 1366 }
1333 1367
1334 1368 /*
1335 1369 * Return values:
1336 1370 *
1337 1371 * -3: error (exception set)
1338 1372 * -2: not found (no exception set)
1339 1373 * rest: valid rev
1340 1374 */
1341 1375 static int index_find_node(indexObject *self, const char *node,
1342 1376 Py_ssize_t nodelen)
1343 1377 {
1344 1378 int rev;
1345 1379
1346 1380 if (index_init_nt(self) == -1)
1347 1381 return -3;
1348 1382
1349 1383 self->ntlookups++;
1350 1384 rev = nt_find(&self->nt, node, nodelen, 0);
1351 1385 if (rev >= -1)
1352 1386 return rev;
1353 1387
1354 1388 /*
1355 1389 * For the first handful of lookups, we scan the entire index,
1356 1390 * and cache only the matching nodes. This optimizes for cases
1357 1391 * like "hg tip", where only a few nodes are accessed.
1358 1392 *
1359 1393 * After that, we cache every node we visit, using a single
1360 1394 * scan amortized over multiple lookups. This gives the best
1361 1395 * bulk performance, e.g. for "hg log".
1362 1396 */
1363 1397 if (self->ntmisses++ < 4) {
1364 1398 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1365 1399 const char *n = index_node_existing(self, rev);
1366 1400 if (n == NULL)
1367 1401 return -3;
1368 1402 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1369 1403 if (nt_insert(&self->nt, n, rev) == -1)
1370 1404 return -3;
1371 1405 break;
1372 1406 }
1373 1407 }
1374 1408 } else {
1375 1409 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1376 1410 const char *n = index_node_existing(self, rev);
1377 1411 if (n == NULL)
1378 1412 return -3;
1379 1413 if (nt_insert(&self->nt, n, rev) == -1) {
1380 1414 self->ntrev = rev + 1;
1381 1415 return -3;
1382 1416 }
1383 1417 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1384 1418 break;
1385 1419 }
1386 1420 }
1387 1421 self->ntrev = rev;
1388 1422 }
1389 1423
1390 1424 if (rev >= 0)
1391 1425 return rev;
1392 1426 return -2;
1393 1427 }
1394 1428
1395 1429 static PyObject *index_getitem(indexObject *self, PyObject *value)
1396 1430 {
1397 1431 char *node;
1398 1432 int rev;
1399 1433
1400 1434 if (PyInt_Check(value)) {
1401 1435 long idx;
1402 1436 if (!pylong_to_long(value, &idx)) {
1403 1437 return NULL;
1404 1438 }
1405 1439 return index_get(self, idx);
1406 1440 }
1407 1441
1408 1442 if (node_check(value, &node) == -1)
1409 1443 return NULL;
1410 1444 rev = index_find_node(self, node, 20);
1411 1445 if (rev >= -1)
1412 1446 return PyInt_FromLong(rev);
1413 1447 if (rev == -2)
1414 1448 raise_revlog_error();
1415 1449 return NULL;
1416 1450 }
1417 1451
1418 1452 /*
1419 1453 * Fully populate the radix tree.
1420 1454 */
1421 1455 static int index_populate_nt(indexObject *self)
1422 1456 {
1423 1457 int rev;
1424 1458 if (self->ntrev > 0) {
1425 1459 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1426 1460 const char *n = index_node_existing(self, rev);
1427 1461 if (n == NULL)
1428 1462 return -1;
1429 1463 if (nt_insert(&self->nt, n, rev) == -1)
1430 1464 return -1;
1431 1465 }
1432 1466 self->ntrev = -1;
1433 1467 }
1434 1468 return 0;
1435 1469 }
1436 1470
1437 1471 static PyObject *index_partialmatch(indexObject *self, PyObject *args)
1438 1472 {
1439 1473 const char *fullnode;
1440 1474 int nodelen;
1441 1475 char *node;
1442 1476 int rev, i;
1443 1477
1444 1478 if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &node, &nodelen))
1445 1479 return NULL;
1446 1480
1447 1481 if (nodelen < 1) {
1448 1482 PyErr_SetString(PyExc_ValueError, "key too short");
1449 1483 return NULL;
1450 1484 }
1451 1485
1452 1486 if (nodelen > 40) {
1453 1487 PyErr_SetString(PyExc_ValueError, "key too long");
1454 1488 return NULL;
1455 1489 }
1456 1490
1457 1491 for (i = 0; i < nodelen; i++)
1458 1492 hexdigit(node, i);
1459 1493 if (PyErr_Occurred()) {
1460 1494 /* input contains non-hex characters */
1461 1495 PyErr_Clear();
1462 1496 Py_RETURN_NONE;
1463 1497 }
1464 1498
1465 1499 if (index_init_nt(self) == -1)
1466 1500 return NULL;
1467 1501 if (index_populate_nt(self) == -1)
1468 1502 return NULL;
1469 1503 rev = nt_partialmatch(&self->nt, node, nodelen);
1470 1504
1471 1505 switch (rev) {
1472 1506 case -4:
1473 1507 raise_revlog_error();
1474 1508 return NULL;
1475 1509 case -2:
1476 1510 Py_RETURN_NONE;
1477 1511 case -1:
1478 1512 return PyBytes_FromStringAndSize(nullid, 20);
1479 1513 }
1480 1514
1481 1515 fullnode = index_node_existing(self, rev);
1482 1516 if (fullnode == NULL) {
1483 1517 return NULL;
1484 1518 }
1485 1519 return PyBytes_FromStringAndSize(fullnode, 20);
1486 1520 }
1487 1521
1488 1522 static PyObject *index_shortest(indexObject *self, PyObject *args)
1489 1523 {
1490 1524 PyObject *val;
1491 1525 char *node;
1492 1526 int length;
1493 1527
1494 1528 if (!PyArg_ParseTuple(args, "O", &val))
1495 1529 return NULL;
1496 1530 if (node_check(val, &node) == -1)
1497 1531 return NULL;
1498 1532
1499 1533 self->ntlookups++;
1500 1534 if (index_init_nt(self) == -1)
1501 1535 return NULL;
1502 1536 if (index_populate_nt(self) == -1)
1503 1537 return NULL;
1504 1538 length = nt_shortest(&self->nt, node);
1505 1539 if (length == -3)
1506 1540 return NULL;
1507 1541 if (length == -2) {
1508 1542 raise_revlog_error();
1509 1543 return NULL;
1510 1544 }
1511 1545 return PyInt_FromLong(length);
1512 1546 }
1513 1547
1514 1548 static PyObject *index_m_get(indexObject *self, PyObject *args)
1515 1549 {
1516 1550 PyObject *val;
1517 1551 char *node;
1518 1552 int rev;
1519 1553
1520 1554 if (!PyArg_ParseTuple(args, "O", &val))
1521 1555 return NULL;
1522 1556 if (node_check(val, &node) == -1)
1523 1557 return NULL;
1524 1558 rev = index_find_node(self, node, 20);
1525 1559 if (rev == -3)
1526 1560 return NULL;
1527 1561 if (rev == -2)
1528 1562 Py_RETURN_NONE;
1529 1563 return PyInt_FromLong(rev);
1530 1564 }
1531 1565
1532 1566 static int index_contains(indexObject *self, PyObject *value)
1533 1567 {
1534 1568 char *node;
1535 1569
1536 1570 if (PyInt_Check(value)) {
1537 1571 long rev;
1538 1572 if (!pylong_to_long(value, &rev)) {
1539 1573 return -1;
1540 1574 }
1541 1575 return rev >= -1 && rev < index_length(self);
1542 1576 }
1543 1577
1544 1578 if (node_check(value, &node) == -1)
1545 1579 return -1;
1546 1580
1547 1581 switch (index_find_node(self, node, 20)) {
1548 1582 case -3:
1549 1583 return -1;
1550 1584 case -2:
1551 1585 return 0;
1552 1586 default:
1553 1587 return 1;
1554 1588 }
1555 1589 }
1556 1590
1557 1591 typedef uint64_t bitmask;
1558 1592
1559 1593 /*
1560 1594 * Given a disjoint set of revs, return all candidates for the
1561 1595 * greatest common ancestor. In revset notation, this is the set
1562 1596 * "heads(::a and ::b and ...)"
1563 1597 */
1564 1598 static PyObject *find_gca_candidates(indexObject *self, const int *revs,
1565 1599 int revcount)
1566 1600 {
1567 1601 const bitmask allseen = (1ull << revcount) - 1;
1568 1602 const bitmask poison = 1ull << revcount;
1569 1603 PyObject *gca = PyList_New(0);
1570 1604 int i, v, interesting;
1571 1605 int maxrev = -1;
1572 1606 bitmask sp;
1573 1607 bitmask *seen;
1574 1608
1575 1609 if (gca == NULL)
1576 1610 return PyErr_NoMemory();
1577 1611
1578 1612 for (i = 0; i < revcount; i++) {
1579 1613 if (revs[i] > maxrev)
1580 1614 maxrev = revs[i];
1581 1615 }
1582 1616
1583 1617 seen = calloc(sizeof(*seen), maxrev + 1);
1584 1618 if (seen == NULL) {
1585 1619 Py_DECREF(gca);
1586 1620 return PyErr_NoMemory();
1587 1621 }
1588 1622
1589 1623 for (i = 0; i < revcount; i++)
1590 1624 seen[revs[i]] = 1ull << i;
1591 1625
1592 1626 interesting = revcount;
1593 1627
1594 1628 for (v = maxrev; v >= 0 && interesting; v--) {
1595 1629 bitmask sv = seen[v];
1596 1630 int parents[2];
1597 1631
1598 1632 if (!sv)
1599 1633 continue;
1600 1634
1601 1635 if (sv < poison) {
1602 1636 interesting -= 1;
1603 1637 if (sv == allseen) {
1604 1638 PyObject *obj = PyInt_FromLong(v);
1605 1639 if (obj == NULL)
1606 1640 goto bail;
1607 1641 if (PyList_Append(gca, obj) == -1) {
1608 1642 Py_DECREF(obj);
1609 1643 goto bail;
1610 1644 }
1611 1645 sv |= poison;
1612 1646 for (i = 0; i < revcount; i++) {
1613 1647 if (revs[i] == v)
1614 1648 goto done;
1615 1649 }
1616 1650 }
1617 1651 }
1618 1652 if (index_get_parents(self, v, parents, maxrev) < 0)
1619 1653 goto bail;
1620 1654
1621 1655 for (i = 0; i < 2; i++) {
1622 1656 int p = parents[i];
1623 1657 if (p == -1)
1624 1658 continue;
1625 1659 sp = seen[p];
1626 1660 if (sv < poison) {
1627 1661 if (sp == 0) {
1628 1662 seen[p] = sv;
1629 1663 interesting++;
1630 1664 } else if (sp != sv)
1631 1665 seen[p] |= sv;
1632 1666 } else {
1633 1667 if (sp && sp < poison)
1634 1668 interesting--;
1635 1669 seen[p] = sv;
1636 1670 }
1637 1671 }
1638 1672 }
1639 1673
1640 1674 done:
1641 1675 free(seen);
1642 1676 return gca;
1643 1677 bail:
1644 1678 free(seen);
1645 1679 Py_XDECREF(gca);
1646 1680 return NULL;
1647 1681 }
1648 1682
1649 1683 /*
1650 1684 * Given a disjoint set of revs, return the subset with the longest
1651 1685 * path to the root.
1652 1686 */
1653 1687 static PyObject *find_deepest(indexObject *self, PyObject *revs)
1654 1688 {
1655 1689 const Py_ssize_t revcount = PyList_GET_SIZE(revs);
1656 1690 static const Py_ssize_t capacity = 24;
1657 1691 int *depth, *interesting = NULL;
1658 1692 int i, j, v, ninteresting;
1659 1693 PyObject *dict = NULL, *keys = NULL;
1660 1694 long *seen = NULL;
1661 1695 int maxrev = -1;
1662 1696 long final;
1663 1697
1664 1698 if (revcount > capacity) {
1665 1699 PyErr_Format(PyExc_OverflowError,
1666 1700 "bitset size (%ld) > capacity (%ld)",
1667 1701 (long)revcount, (long)capacity);
1668 1702 return NULL;
1669 1703 }
1670 1704
1671 1705 for (i = 0; i < revcount; i++) {
1672 1706 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
1673 1707 if (n > maxrev)
1674 1708 maxrev = n;
1675 1709 }
1676 1710
1677 1711 depth = calloc(sizeof(*depth), maxrev + 1);
1678 1712 if (depth == NULL)
1679 1713 return PyErr_NoMemory();
1680 1714
1681 1715 seen = calloc(sizeof(*seen), maxrev + 1);
1682 1716 if (seen == NULL) {
1683 1717 PyErr_NoMemory();
1684 1718 goto bail;
1685 1719 }
1686 1720
1687 1721 interesting = calloc(sizeof(*interesting), ((size_t)1) << revcount);
1688 1722 if (interesting == NULL) {
1689 1723 PyErr_NoMemory();
1690 1724 goto bail;
1691 1725 }
1692 1726
1693 1727 if (PyList_Sort(revs) == -1)
1694 1728 goto bail;
1695 1729
1696 1730 for (i = 0; i < revcount; i++) {
1697 1731 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
1698 1732 long b = 1l << i;
1699 1733 depth[n] = 1;
1700 1734 seen[n] = b;
1701 1735 interesting[b] = 1;
1702 1736 }
1703 1737
1704 1738 /* invariant: ninteresting is the number of non-zero entries in
1705 1739 * interesting. */
1706 1740 ninteresting = (int)revcount;
1707 1741
1708 1742 for (v = maxrev; v >= 0 && ninteresting > 1; v--) {
1709 1743 int dv = depth[v];
1710 1744 int parents[2];
1711 1745 long sv;
1712 1746
1713 1747 if (dv == 0)
1714 1748 continue;
1715 1749
1716 1750 sv = seen[v];
1717 1751 if (index_get_parents(self, v, parents, maxrev) < 0)
1718 1752 goto bail;
1719 1753
1720 1754 for (i = 0; i < 2; i++) {
1721 1755 int p = parents[i];
1722 1756 long sp;
1723 1757 int dp;
1724 1758
1725 1759 if (p == -1)
1726 1760 continue;
1727 1761
1728 1762 dp = depth[p];
1729 1763 sp = seen[p];
1730 1764 if (dp <= dv) {
1731 1765 depth[p] = dv + 1;
1732 1766 if (sp != sv) {
1733 1767 interesting[sv] += 1;
1734 1768 seen[p] = sv;
1735 1769 if (sp) {
1736 1770 interesting[sp] -= 1;
1737 1771 if (interesting[sp] == 0)
1738 1772 ninteresting -= 1;
1739 1773 }
1740 1774 }
1741 1775 } else if (dv == dp - 1) {
1742 1776 long nsp = sp | sv;
1743 1777 if (nsp == sp)
1744 1778 continue;
1745 1779 seen[p] = nsp;
1746 1780 interesting[sp] -= 1;
1747 1781 if (interesting[sp] == 0)
1748 1782 ninteresting -= 1;
1749 1783 if (interesting[nsp] == 0)
1750 1784 ninteresting += 1;
1751 1785 interesting[nsp] += 1;
1752 1786 }
1753 1787 }
1754 1788 interesting[sv] -= 1;
1755 1789 if (interesting[sv] == 0)
1756 1790 ninteresting -= 1;
1757 1791 }
1758 1792
1759 1793 final = 0;
1760 1794 j = ninteresting;
1761 1795 for (i = 0; i < (int)(2 << revcount) && j > 0; i++) {
1762 1796 if (interesting[i] == 0)
1763 1797 continue;
1764 1798 final |= i;
1765 1799 j -= 1;
1766 1800 }
1767 1801 if (final == 0) {
1768 1802 keys = PyList_New(0);
1769 1803 goto bail;
1770 1804 }
1771 1805
1772 1806 dict = PyDict_New();
1773 1807 if (dict == NULL)
1774 1808 goto bail;
1775 1809
1776 1810 for (i = 0; i < revcount; i++) {
1777 1811 PyObject *key;
1778 1812
1779 1813 if ((final & (1 << i)) == 0)
1780 1814 continue;
1781 1815
1782 1816 key = PyList_GET_ITEM(revs, i);
1783 1817 Py_INCREF(key);
1784 1818 Py_INCREF(Py_None);
1785 1819 if (PyDict_SetItem(dict, key, Py_None) == -1) {
1786 1820 Py_DECREF(key);
1787 1821 Py_DECREF(Py_None);
1788 1822 goto bail;
1789 1823 }
1790 1824 }
1791 1825
1792 1826 keys = PyDict_Keys(dict);
1793 1827
1794 1828 bail:
1795 1829 free(depth);
1796 1830 free(seen);
1797 1831 free(interesting);
1798 1832 Py_XDECREF(dict);
1799 1833
1800 1834 return keys;
1801 1835 }
1802 1836
1803 1837 /*
1804 1838 * Given a (possibly overlapping) set of revs, return all the
1805 1839 * common ancestors heads: heads(::args[0] and ::a[1] and ...)
1806 1840 */
1807 1841 static PyObject *index_commonancestorsheads(indexObject *self, PyObject *args)
1808 1842 {
1809 1843 PyObject *ret = NULL;
1810 1844 Py_ssize_t argcount, i, len;
1811 1845 bitmask repeat = 0;
1812 1846 int revcount = 0;
1813 1847 int *revs;
1814 1848
1815 1849 argcount = PySequence_Length(args);
1816 1850 revs = PyMem_Malloc(argcount * sizeof(*revs));
1817 1851 if (argcount > 0 && revs == NULL)
1818 1852 return PyErr_NoMemory();
1819 1853 len = index_length(self);
1820 1854
1821 1855 for (i = 0; i < argcount; i++) {
1822 1856 static const int capacity = 24;
1823 1857 PyObject *obj = PySequence_GetItem(args, i);
1824 1858 bitmask x;
1825 1859 long val;
1826 1860
1827 1861 if (!PyInt_Check(obj)) {
1828 1862 PyErr_SetString(PyExc_TypeError,
1829 1863 "arguments must all be ints");
1830 1864 Py_DECREF(obj);
1831 1865 goto bail;
1832 1866 }
1833 1867 val = PyInt_AsLong(obj);
1834 1868 Py_DECREF(obj);
1835 1869 if (val == -1) {
1836 1870 ret = PyList_New(0);
1837 1871 goto done;
1838 1872 }
1839 1873 if (val < 0 || val >= len) {
1840 1874 PyErr_SetString(PyExc_IndexError, "index out of range");
1841 1875 goto bail;
1842 1876 }
1843 1877 /* this cheesy bloom filter lets us avoid some more
1844 1878 * expensive duplicate checks in the common set-is-disjoint
1845 1879 * case */
1846 1880 x = 1ull << (val & 0x3f);
1847 1881 if (repeat & x) {
1848 1882 int k;
1849 1883 for (k = 0; k < revcount; k++) {
1850 1884 if (val == revs[k])
1851 1885 goto duplicate;
1852 1886 }
1853 1887 } else
1854 1888 repeat |= x;
1855 1889 if (revcount >= capacity) {
1856 1890 PyErr_Format(PyExc_OverflowError,
1857 1891 "bitset size (%d) > capacity (%d)",
1858 1892 revcount, capacity);
1859 1893 goto bail;
1860 1894 }
1861 1895 revs[revcount++] = (int)val;
1862 1896 duplicate:;
1863 1897 }
1864 1898
1865 1899 if (revcount == 0) {
1866 1900 ret = PyList_New(0);
1867 1901 goto done;
1868 1902 }
1869 1903 if (revcount == 1) {
1870 1904 PyObject *obj;
1871 1905 ret = PyList_New(1);
1872 1906 if (ret == NULL)
1873 1907 goto bail;
1874 1908 obj = PyInt_FromLong(revs[0]);
1875 1909 if (obj == NULL)
1876 1910 goto bail;
1877 1911 PyList_SET_ITEM(ret, 0, obj);
1878 1912 goto done;
1879 1913 }
1880 1914
1881 1915 ret = find_gca_candidates(self, revs, revcount);
1882 1916 if (ret == NULL)
1883 1917 goto bail;
1884 1918
1885 1919 done:
1886 1920 PyMem_Free(revs);
1887 1921 return ret;
1888 1922
1889 1923 bail:
1890 1924 PyMem_Free(revs);
1891 1925 Py_XDECREF(ret);
1892 1926 return NULL;
1893 1927 }
1894 1928
1895 1929 /*
1896 1930 * Given a (possibly overlapping) set of revs, return the greatest
1897 1931 * common ancestors: those with the longest path to the root.
1898 1932 */
1899 1933 static PyObject *index_ancestors(indexObject *self, PyObject *args)
1900 1934 {
1901 1935 PyObject *ret;
1902 1936 PyObject *gca = index_commonancestorsheads(self, args);
1903 1937 if (gca == NULL)
1904 1938 return NULL;
1905 1939
1906 1940 if (PyList_GET_SIZE(gca) <= 1) {
1907 1941 return gca;
1908 1942 }
1909 1943
1910 1944 ret = find_deepest(self, gca);
1911 1945 Py_DECREF(gca);
1912 1946 return ret;
1913 1947 }
1914 1948
1915 1949 /*
1916 1950 * Invalidate any trie entries introduced by added revs.
1917 1951 */
1918 1952 static void index_invalidate_added(indexObject *self, Py_ssize_t start)
1919 1953 {
1920 1954 Py_ssize_t i, len = PyList_GET_SIZE(self->added);
1921 1955
1922 1956 for (i = start; i < len; i++) {
1923 1957 PyObject *tuple = PyList_GET_ITEM(self->added, i);
1924 1958 PyObject *node = PyTuple_GET_ITEM(tuple, 7);
1925 1959
1926 1960 nt_delete_node(&self->nt, PyBytes_AS_STRING(node));
1927 1961 }
1928 1962
1929 1963 if (start == 0)
1930 1964 Py_CLEAR(self->added);
1931 1965 }
1932 1966
1933 1967 /*
1934 1968 * Delete a numeric range of revs, which must be at the end of the
1935 1969 * range, but exclude the sentinel nullid entry.
1936 1970 */
1937 1971 static int index_slice_del(indexObject *self, PyObject *item)
1938 1972 {
1939 1973 Py_ssize_t start, stop, step, slicelength;
1940 1974 Py_ssize_t length = index_length(self) + 1;
1941 1975 int ret = 0;
1942 1976
1943 1977 /* Argument changed from PySliceObject* to PyObject* in Python 3. */
1944 1978 #ifdef IS_PY3K
1945 1979 if (PySlice_GetIndicesEx(item, length, &start, &stop, &step,
1946 1980 &slicelength) < 0)
1947 1981 #else
1948 1982 if (PySlice_GetIndicesEx((PySliceObject *)item, length, &start, &stop,
1949 1983 &step, &slicelength) < 0)
1950 1984 #endif
1951 1985 return -1;
1952 1986
1953 1987 if (slicelength <= 0)
1954 1988 return 0;
1955 1989
1956 1990 if ((step < 0 && start < stop) || (step > 0 && start > stop))
1957 1991 stop = start;
1958 1992
1959 1993 if (step < 0) {
1960 1994 stop = start + 1;
1961 1995 start = stop + step * (slicelength - 1) - 1;
1962 1996 step = -step;
1963 1997 }
1964 1998
1965 1999 if (step != 1) {
1966 2000 PyErr_SetString(PyExc_ValueError,
1967 2001 "revlog index delete requires step size of 1");
1968 2002 return -1;
1969 2003 }
1970 2004
1971 2005 if (stop != length - 1) {
1972 2006 PyErr_SetString(PyExc_IndexError,
1973 2007 "revlog index deletion indices are invalid");
1974 2008 return -1;
1975 2009 }
1976 2010
1977 2011 if (start < self->length) {
1978 2012 if (self->ntinitialized) {
1979 2013 Py_ssize_t i;
1980 2014
1981 2015 for (i = start + 1; i < self->length; i++) {
1982 2016 const char *node = index_node_existing(self, i);
1983 2017 if (node == NULL)
1984 2018 return -1;
1985 2019
1986 2020 nt_delete_node(&self->nt, node);
1987 2021 }
1988 2022 if (self->added)
1989 2023 index_invalidate_added(self, 0);
1990 2024 if (self->ntrev > start)
1991 2025 self->ntrev = (int)start;
1992 2026 }
1993 2027 self->length = start;
1994 2028 if (start < self->raw_length) {
1995 2029 if (self->cache) {
1996 2030 Py_ssize_t i;
1997 2031 for (i = start; i < self->raw_length; i++)
1998 2032 Py_CLEAR(self->cache[i]);
1999 2033 }
2000 2034 self->raw_length = start;
2001 2035 }
2002 2036 goto done;
2003 2037 }
2004 2038
2005 2039 if (self->ntinitialized) {
2006 2040 index_invalidate_added(self, start - self->length);
2007 2041 if (self->ntrev > start)
2008 2042 self->ntrev = (int)start;
2009 2043 }
2010 2044 if (self->added)
2011 2045 ret = PyList_SetSlice(self->added, start - self->length,
2012 2046 PyList_GET_SIZE(self->added), NULL);
2013 2047 done:
2014 2048 Py_CLEAR(self->headrevs);
2015 2049 return ret;
2016 2050 }
2017 2051
2018 2052 /*
2019 2053 * Supported ops:
2020 2054 *
2021 2055 * slice deletion
2022 2056 * string assignment (extend node->rev mapping)
2023 2057 * string deletion (shrink node->rev mapping)
2024 2058 */
2025 2059 static int index_assign_subscript(indexObject *self, PyObject *item,
2026 2060 PyObject *value)
2027 2061 {
2028 2062 char *node;
2029 2063 long rev;
2030 2064
2031 2065 if (PySlice_Check(item) && value == NULL)
2032 2066 return index_slice_del(self, item);
2033 2067
2034 2068 if (node_check(item, &node) == -1)
2035 2069 return -1;
2036 2070
2037 2071 if (value == NULL)
2038 2072 return self->ntinitialized ? nt_delete_node(&self->nt, node)
2039 2073 : 0;
2040 2074 rev = PyInt_AsLong(value);
2041 2075 if (rev > INT_MAX || rev < 0) {
2042 2076 if (!PyErr_Occurred())
2043 2077 PyErr_SetString(PyExc_ValueError, "rev out of range");
2044 2078 return -1;
2045 2079 }
2046 2080
2047 2081 if (index_init_nt(self) == -1)
2048 2082 return -1;
2049 2083 return nt_insert(&self->nt, node, (int)rev);
2050 2084 }
2051 2085
2052 2086 /*
2053 2087 * Find all RevlogNG entries in an index that has inline data. Update
2054 2088 * the optional "offsets" table with those entries.
2055 2089 */
2056 2090 static Py_ssize_t inline_scan(indexObject *self, const char **offsets)
2057 2091 {
2058 2092 const char *data = (const char *)self->buf.buf;
2059 2093 Py_ssize_t pos = 0;
2060 2094 Py_ssize_t end = self->buf.len;
2061 2095 long incr = v1_hdrsize;
2062 2096 Py_ssize_t len = 0;
2063 2097
2064 2098 while (pos + v1_hdrsize <= end && pos >= 0) {
2065 2099 uint32_t comp_len;
2066 2100 /* 3rd element of header is length of compressed inline data */
2067 2101 comp_len = getbe32(data + pos + 8);
2068 2102 incr = v1_hdrsize + comp_len;
2069 2103 if (offsets)
2070 2104 offsets[len] = data + pos;
2071 2105 len++;
2072 2106 pos += incr;
2073 2107 }
2074 2108
2075 2109 if (pos != end) {
2076 2110 if (!PyErr_Occurred())
2077 2111 PyErr_SetString(PyExc_ValueError, "corrupt index file");
2078 2112 return -1;
2079 2113 }
2080 2114
2081 2115 return len;
2082 2116 }
2083 2117
2084 2118 static int index_init(indexObject *self, PyObject *args)
2085 2119 {
2086 2120 PyObject *data_obj, *inlined_obj;
2087 2121 Py_ssize_t size;
2088 2122
2089 2123 /* Initialize before argument-checking to avoid index_dealloc() crash.
2090 2124 */
2091 2125 self->raw_length = 0;
2092 2126 self->added = NULL;
2093 2127 self->cache = NULL;
2094 2128 self->data = NULL;
2095 2129 memset(&self->buf, 0, sizeof(self->buf));
2096 2130 self->headrevs = NULL;
2097 2131 self->filteredrevs = Py_None;
2098 2132 Py_INCREF(Py_None);
2099 2133 self->ntinitialized = 0;
2100 2134 self->offsets = NULL;
2101 2135
2102 2136 if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
2103 2137 return -1;
2104 2138 if (!PyObject_CheckBuffer(data_obj)) {
2105 2139 PyErr_SetString(PyExc_TypeError,
2106 2140 "data does not support buffer interface");
2107 2141 return -1;
2108 2142 }
2109 2143
2110 2144 if (PyObject_GetBuffer(data_obj, &self->buf, PyBUF_SIMPLE) == -1)
2111 2145 return -1;
2112 2146 size = self->buf.len;
2113 2147
2114 2148 self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
2115 2149 self->data = data_obj;
2116 2150
2117 2151 self->ntlookups = self->ntmisses = 0;
2118 2152 self->ntrev = -1;
2119 2153 Py_INCREF(self->data);
2120 2154
2121 2155 if (self->inlined) {
2122 2156 Py_ssize_t len = inline_scan(self, NULL);
2123 2157 if (len == -1)
2124 2158 goto bail;
2125 2159 self->raw_length = len;
2126 2160 self->length = len;
2127 2161 } else {
2128 2162 if (size % v1_hdrsize) {
2129 2163 PyErr_SetString(PyExc_ValueError, "corrupt index file");
2130 2164 goto bail;
2131 2165 }
2132 2166 self->raw_length = size / v1_hdrsize;
2133 2167 self->length = self->raw_length;
2134 2168 }
2135 2169
2136 2170 return 0;
2137 2171 bail:
2138 2172 return -1;
2139 2173 }
2140 2174
2141 2175 static PyObject *index_nodemap(indexObject *self)
2142 2176 {
2143 2177 Py_INCREF(self);
2144 2178 return (PyObject *)self;
2145 2179 }
2146 2180
2147 2181 static void _index_clearcaches(indexObject *self)
2148 2182 {
2149 2183 if (self->cache) {
2150 2184 Py_ssize_t i;
2151 2185
2152 2186 for (i = 0; i < self->raw_length; i++)
2153 2187 Py_CLEAR(self->cache[i]);
2154 2188 free(self->cache);
2155 2189 self->cache = NULL;
2156 2190 }
2157 2191 if (self->offsets) {
2158 2192 PyMem_Free((void *)self->offsets);
2159 2193 self->offsets = NULL;
2160 2194 }
2161 2195 if (self->ntinitialized) {
2162 2196 nt_dealloc(&self->nt);
2163 2197 }
2164 2198 self->ntinitialized = 0;
2165 2199 Py_CLEAR(self->headrevs);
2166 2200 }
2167 2201
2168 2202 static PyObject *index_clearcaches(indexObject *self)
2169 2203 {
2170 2204 _index_clearcaches(self);
2171 2205 self->ntrev = -1;
2172 2206 self->ntlookups = self->ntmisses = 0;
2173 2207 Py_RETURN_NONE;
2174 2208 }
2175 2209
2176 2210 static void index_dealloc(indexObject *self)
2177 2211 {
2178 2212 _index_clearcaches(self);
2179 2213 Py_XDECREF(self->filteredrevs);
2180 2214 if (self->buf.buf) {
2181 2215 PyBuffer_Release(&self->buf);
2182 2216 memset(&self->buf, 0, sizeof(self->buf));
2183 2217 }
2184 2218 Py_XDECREF(self->data);
2185 2219 Py_XDECREF(self->added);
2186 2220 PyObject_Del(self);
2187 2221 }
2188 2222
2189 2223 static PySequenceMethods index_sequence_methods = {
2190 2224 (lenfunc)index_length, /* sq_length */
2191 2225 0, /* sq_concat */
2192 2226 0, /* sq_repeat */
2193 2227 (ssizeargfunc)index_get, /* sq_item */
2194 2228 0, /* sq_slice */
2195 2229 0, /* sq_ass_item */
2196 2230 0, /* sq_ass_slice */
2197 2231 (objobjproc)index_contains, /* sq_contains */
2198 2232 };
2199 2233
2200 2234 static PyMappingMethods index_mapping_methods = {
2201 2235 (lenfunc)index_length, /* mp_length */
2202 2236 (binaryfunc)index_getitem, /* mp_subscript */
2203 2237 (objobjargproc)index_assign_subscript, /* mp_ass_subscript */
2204 2238 };
2205 2239
2206 2240 static PyMethodDef index_methods[] = {
2207 2241 {"ancestors", (PyCFunction)index_ancestors, METH_VARARGS,
2208 2242 "return the gca set of the given revs"},
2209 2243 {"commonancestorsheads", (PyCFunction)index_commonancestorsheads,
2210 2244 METH_VARARGS,
2211 2245 "return the heads of the common ancestors of the given revs"},
2212 2246 {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
2213 2247 "clear the index caches"},
2214 2248 {"get", (PyCFunction)index_m_get, METH_VARARGS, "get an index entry"},
2215 2249 {"computephasesmapsets", (PyCFunction)compute_phases_map_sets, METH_VARARGS,
2216 2250 "compute phases"},
2217 2251 {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
2218 2252 "reachableroots"},
2219 2253 {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
2220 2254 "get head revisions"}, /* Can do filtering since 3.2 */
2221 2255 {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
2222 2256 "get filtered head revisions"}, /* Can always do filtering */
2223 2257 {"deltachain", (PyCFunction)index_deltachain, METH_VARARGS,
2224 2258 "determine revisions with deltas to reconstruct fulltext"},
2225 2259 {"append", (PyCFunction)index_append, METH_O, "append an index entry"},
2226 2260 {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
2227 2261 "match a potentially ambiguous node ID"},
2228 2262 {"shortest", (PyCFunction)index_shortest, METH_VARARGS,
2229 2263 "find length of shortest hex nodeid of a binary ID"},
2230 2264 {"stats", (PyCFunction)index_stats, METH_NOARGS, "stats for the index"},
2231 2265 {NULL} /* Sentinel */
2232 2266 };
2233 2267
2234 2268 static PyGetSetDef index_getset[] = {
2235 2269 {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
2236 2270 {NULL} /* Sentinel */
2237 2271 };
2238 2272
2239 2273 static PyTypeObject indexType = {
2240 2274 PyVarObject_HEAD_INIT(NULL, 0) /* header */
2241 2275 "parsers.index", /* tp_name */
2242 2276 sizeof(indexObject), /* tp_basicsize */
2243 2277 0, /* tp_itemsize */
2244 2278 (destructor)index_dealloc, /* tp_dealloc */
2245 2279 0, /* tp_print */
2246 2280 0, /* tp_getattr */
2247 2281 0, /* tp_setattr */
2248 2282 0, /* tp_compare */
2249 2283 0, /* tp_repr */
2250 2284 0, /* tp_as_number */
2251 2285 &index_sequence_methods, /* tp_as_sequence */
2252 2286 &index_mapping_methods, /* tp_as_mapping */
2253 2287 0, /* tp_hash */
2254 2288 0, /* tp_call */
2255 2289 0, /* tp_str */
2256 2290 0, /* tp_getattro */
2257 2291 0, /* tp_setattro */
2258 2292 0, /* tp_as_buffer */
2259 2293 Py_TPFLAGS_DEFAULT, /* tp_flags */
2260 2294 "revlog index", /* tp_doc */
2261 2295 0, /* tp_traverse */
2262 2296 0, /* tp_clear */
2263 2297 0, /* tp_richcompare */
2264 2298 0, /* tp_weaklistoffset */
2265 2299 0, /* tp_iter */
2266 2300 0, /* tp_iternext */
2267 2301 index_methods, /* tp_methods */
2268 2302 0, /* tp_members */
2269 2303 index_getset, /* tp_getset */
2270 2304 0, /* tp_base */
2271 2305 0, /* tp_dict */
2272 2306 0, /* tp_descr_get */
2273 2307 0, /* tp_descr_set */
2274 2308 0, /* tp_dictoffset */
2275 2309 (initproc)index_init, /* tp_init */
2276 2310 0, /* tp_alloc */
2277 2311 };
2278 2312
2279 2313 /*
2280 2314 * returns a tuple of the form (index, index, cache) with elements as
2281 2315 * follows:
2282 2316 *
2283 2317 * index: an index object that lazily parses RevlogNG records
2284 2318 * cache: if data is inlined, a tuple (0, index_file_content), else None
2285 2319 * index_file_content could be a string, or a buffer
2286 2320 *
2287 2321 * added complications are for backwards compatibility
2288 2322 */
2289 2323 PyObject *parse_index2(PyObject *self, PyObject *args)
2290 2324 {
2291 2325 PyObject *tuple = NULL, *cache = NULL;
2292 2326 indexObject *idx;
2293 2327 int ret;
2294 2328
2295 2329 idx = PyObject_New(indexObject, &indexType);
2296 2330 if (idx == NULL)
2297 2331 goto bail;
2298 2332
2299 2333 ret = index_init(idx, args);
2300 2334 if (ret == -1)
2301 2335 goto bail;
2302 2336
2303 2337 if (idx->inlined) {
2304 2338 cache = Py_BuildValue("iO", 0, idx->data);
2305 2339 if (cache == NULL)
2306 2340 goto bail;
2307 2341 } else {
2308 2342 cache = Py_None;
2309 2343 Py_INCREF(cache);
2310 2344 }
2311 2345
2312 2346 tuple = Py_BuildValue("NN", idx, cache);
2313 2347 if (!tuple)
2314 2348 goto bail;
2315 2349 return tuple;
2316 2350
2317 2351 bail:
2318 2352 Py_XDECREF(idx);
2319 2353 Py_XDECREF(cache);
2320 2354 Py_XDECREF(tuple);
2321 2355 return NULL;
2322 2356 }
2323 2357
2324 2358 #ifdef WITH_RUST
2325 2359
2326 2360 /* rustlazyancestors: iteration over ancestors implemented in Rust
2327 2361 *
2328 2362 * This class holds a reference to an index and to the Rust iterator.
2329 2363 */
2330 2364 typedef struct rustlazyancestorsObjectStruct rustlazyancestorsObject;
2331 2365
2332 2366 struct rustlazyancestorsObjectStruct {
2333 2367 PyObject_HEAD
2334 2368 /* Type-specific fields go here. */
2335 2369 indexObject *index; /* Ref kept to avoid GC'ing the index */
2336 2370 void *iter; /* Rust iterator */
2337 2371 };
2338 2372
2339 2373 /* FFI exposed from Rust code */
2340 2374 rustlazyancestorsObject *
2341 2375 rustlazyancestors_init(indexObject *index,
2342 2376 /* to pass index_get_parents() */
2343 2377 int (*)(indexObject *, Py_ssize_t, int *, int),
2344 2378 /* intrevs vector */
2345 2379 Py_ssize_t initrevslen, long *initrevs, long stoprev,
2346 2380 int inclusive);
2347 2381 void rustlazyancestors_drop(rustlazyancestorsObject *self);
2348 2382 int rustlazyancestors_next(rustlazyancestorsObject *self);
2349 2383 int rustlazyancestors_contains(rustlazyancestorsObject *self, long rev);
2350 2384
2351 2385 /* CPython instance methods */
2352 2386 static int rustla_init(rustlazyancestorsObject *self, PyObject *args)
2353 2387 {
2354 2388 PyObject *initrevsarg = NULL;
2355 2389 PyObject *inclusivearg = NULL;
2356 2390 long stoprev = 0;
2357 2391 long *initrevs = NULL;
2358 2392 int inclusive = 0;
2359 2393 Py_ssize_t i;
2360 2394
2361 2395 indexObject *index;
2362 2396 if (!PyArg_ParseTuple(args, "O!O!lO!", &indexType, &index, &PyList_Type,
2363 2397 &initrevsarg, &stoprev, &PyBool_Type,
2364 2398 &inclusivearg))
2365 2399 return -1;
2366 2400
2367 2401 Py_INCREF(index);
2368 2402 self->index = index;
2369 2403
2370 2404 if (inclusivearg == Py_True)
2371 2405 inclusive = 1;
2372 2406
2373 2407 Py_ssize_t linit = PyList_GET_SIZE(initrevsarg);
2374 2408
2375 2409 initrevs = (long *)calloc(linit, sizeof(long));
2376 2410
2377 2411 if (initrevs == NULL) {
2378 2412 PyErr_NoMemory();
2379 2413 goto bail;
2380 2414 }
2381 2415
2382 2416 for (i = 0; i < linit; i++) {
2383 2417 initrevs[i] = PyInt_AsLong(PyList_GET_ITEM(initrevsarg, i));
2384 2418 }
2385 2419 if (PyErr_Occurred())
2386 2420 goto bail;
2387 2421
2388 2422 self->iter = rustlazyancestors_init(index, index_get_parents, linit,
2389 2423 initrevs, stoprev, inclusive);
2390 2424 if (self->iter == NULL) {
2391 2425 /* if this is because of GraphError::ParentOutOfRange
2392 2426 * index_get_parents() has already set the proper ValueError */
2393 2427 goto bail;
2394 2428 }
2395 2429
2396 2430 free(initrevs);
2397 2431 return 0;
2398 2432
2399 2433 bail:
2400 2434 free(initrevs);
2401 2435 return -1;
2402 2436 };
2403 2437
2404 2438 static void rustla_dealloc(rustlazyancestorsObject *self)
2405 2439 {
2406 2440 Py_XDECREF(self->index);
2407 2441 if (self->iter != NULL) { /* can happen if rustla_init failed */
2408 2442 rustlazyancestors_drop(self->iter);
2409 2443 }
2410 2444 PyObject_Del(self);
2411 2445 }
2412 2446
2413 2447 static PyObject *rustla_next(rustlazyancestorsObject *self)
2414 2448 {
2415 2449 int res = rustlazyancestors_next(self->iter);
2416 2450 if (res == -1) {
2417 2451 /* Setting an explicit exception seems unnecessary
2418 2452 * as examples from Python source code (Objects/rangeobjets.c
2419 2453 * and Modules/_io/stringio.c) seem to demonstrate.
2420 2454 */
2421 2455 return NULL;
2422 2456 }
2423 2457 return PyInt_FromLong(res);
2424 2458 }
2425 2459
2426 2460 static int rustla_contains(rustlazyancestorsObject *self, PyObject *rev)
2427 2461 {
2428 2462 long lrev;
2429 2463 if (!pylong_to_long(rev, &lrev)) {
2430 2464 PyErr_Clear();
2431 2465 return 0;
2432 2466 }
2433 2467 return rustlazyancestors_contains(self->iter, lrev);
2434 2468 }
2435 2469
2436 2470 static PySequenceMethods rustla_sequence_methods = {
2437 2471 0, /* sq_length */
2438 2472 0, /* sq_concat */
2439 2473 0, /* sq_repeat */
2440 2474 0, /* sq_item */
2441 2475 0, /* sq_slice */
2442 2476 0, /* sq_ass_item */
2443 2477 0, /* sq_ass_slice */
2444 2478 (objobjproc)rustla_contains, /* sq_contains */
2445 2479 };
2446 2480
2447 2481 static PyTypeObject rustlazyancestorsType = {
2448 2482 PyVarObject_HEAD_INIT(NULL, 0) /* header */
2449 2483 "parsers.rustlazyancestors", /* tp_name */
2450 2484 sizeof(rustlazyancestorsObject), /* tp_basicsize */
2451 2485 0, /* tp_itemsize */
2452 2486 (destructor)rustla_dealloc, /* tp_dealloc */
2453 2487 0, /* tp_print */
2454 2488 0, /* tp_getattr */
2455 2489 0, /* tp_setattr */
2456 2490 0, /* tp_compare */
2457 2491 0, /* tp_repr */
2458 2492 0, /* tp_as_number */
2459 2493 &rustla_sequence_methods, /* tp_as_sequence */
2460 2494 0, /* tp_as_mapping */
2461 2495 0, /* tp_hash */
2462 2496 0, /* tp_call */
2463 2497 0, /* tp_str */
2464 2498 0, /* tp_getattro */
2465 2499 0, /* tp_setattro */
2466 2500 0, /* tp_as_buffer */
2467 2501 Py_TPFLAGS_DEFAULT, /* tp_flags */
2468 2502 "Iterator over ancestors, implemented in Rust", /* tp_doc */
2469 2503 0, /* tp_traverse */
2470 2504 0, /* tp_clear */
2471 2505 0, /* tp_richcompare */
2472 2506 0, /* tp_weaklistoffset */
2473 2507 0, /* tp_iter */
2474 2508 (iternextfunc)rustla_next, /* tp_iternext */
2475 2509 0, /* tp_methods */
2476 2510 0, /* tp_members */
2477 2511 0, /* tp_getset */
2478 2512 0, /* tp_base */
2479 2513 0, /* tp_dict */
2480 2514 0, /* tp_descr_get */
2481 2515 0, /* tp_descr_set */
2482 2516 0, /* tp_dictoffset */
2483 2517 (initproc)rustla_init, /* tp_init */
2484 2518 0, /* tp_alloc */
2485 2519 };
2486 2520 #endif /* WITH_RUST */
2487 2521
2488 2522 void revlog_module_init(PyObject *mod)
2489 2523 {
2490 2524 indexType.tp_new = PyType_GenericNew;
2491 2525 if (PyType_Ready(&indexType) < 0)
2492 2526 return;
2493 2527 Py_INCREF(&indexType);
2494 2528 PyModule_AddObject(mod, "index", (PyObject *)&indexType);
2495 2529
2496 2530 nodetreeType.tp_new = PyType_GenericNew;
2497 2531 if (PyType_Ready(&nodetreeType) < 0)
2498 2532 return;
2499 2533 Py_INCREF(&nodetreeType);
2500 2534 PyModule_AddObject(mod, "nodetree", (PyObject *)&nodetreeType);
2501 2535
2502 2536 if (!nullentry) {
2503 2537 nullentry = Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0,
2504 2538 0, -1, -1, -1, -1, nullid, 20);
2505 2539 }
2506 2540 if (nullentry)
2507 2541 PyObject_GC_UnTrack(nullentry);
2508 2542
2509 2543 #ifdef WITH_RUST
2510 2544 rustlazyancestorsType.tp_new = PyType_GenericNew;
2511 2545 if (PyType_Ready(&rustlazyancestorsType) < 0)
2512 2546 return;
2513 2547 Py_INCREF(&rustlazyancestorsType);
2514 2548 PyModule_AddObject(mod, "rustlazyancestors",
2515 2549 (PyObject *)&rustlazyancestorsType);
2516 2550 #endif
2517 2551 }
General Comments 0
You need to be logged in to leave comments. Login now