##// END OF EJS Templates
index: remove side-effect from failed nt_init()...
Martin von Zweigbergk -
r38972:f7d8fb2e default
parent child Browse files
Show More
@@ -1,2179 +1,2179 b''
1 1 /*
2 2 parsers.c - efficient content parsing
3 3
4 4 Copyright 2008 Matt Mackall <mpm@selenic.com> and others
5 5
6 6 This software may be used and distributed according to the terms of
7 7 the GNU General Public License, incorporated herein by reference.
8 8 */
9 9
10 10 #include <Python.h>
11 11 #include <assert.h>
12 12 #include <ctype.h>
13 13 #include <stddef.h>
14 14 #include <string.h>
15 15
16 16 #include "bitmanipulation.h"
17 17 #include "charencode.h"
18 18 #include "util.h"
19 19
20 20 #ifdef IS_PY3K
21 21 /* The mapping of Python types is meant to be temporary to get Python
22 22 * 3 to compile. We should remove this once Python 3 support is fully
23 23 * supported and proper types are used in the extensions themselves. */
24 24 #define PyInt_Check PyLong_Check
25 25 #define PyInt_FromLong PyLong_FromLong
26 26 #define PyInt_FromSsize_t PyLong_FromSsize_t
27 27 #define PyInt_AS_LONG PyLong_AS_LONG
28 28 #define PyInt_AsLong PyLong_AsLong
29 29 #endif
30 30
31 31 typedef struct {
32 32 int children[16];
33 33 } nodetreenode;
34 34
35 35 /*
36 36 * A base-16 trie for fast node->rev mapping.
37 37 *
38 38 * Positive value is index of the next node in the trie
39 39 * Negative value is a leaf: -(rev + 2)
40 40 * Zero is empty
41 41 */
42 42 typedef struct {
43 43 nodetreenode *nodes;
44 44 unsigned length; /* # nodes in use */
45 45 unsigned capacity; /* # nodes allocated */
46 46 int depth; /* maximum depth of tree */
47 47 int splits; /* # splits performed */
48 48 } nodetree;
49 49
50 50 /*
51 51 * This class has two behaviors.
52 52 *
53 53 * When used in a list-like way (with integer keys), we decode an
54 54 * entry in a RevlogNG index file on demand. Our last entry is a
55 55 * sentinel, always a nullid. We have limited support for
56 56 * integer-keyed insert and delete, only at elements right before the
57 57 * sentinel.
58 58 *
59 59 * With string keys, we lazily perform a reverse mapping from node to
60 60 * rev, using a base-16 trie.
61 61 */
62 62 typedef struct {
63 63 PyObject_HEAD
64 64 /* Type-specific fields go here. */
65 65 PyObject *data; /* raw bytes of index */
66 66 Py_buffer buf; /* buffer of data */
67 67 PyObject **cache; /* cached tuples */
68 68 const char **offsets; /* populated on demand */
69 69 Py_ssize_t raw_length; /* original number of elements */
70 70 Py_ssize_t length; /* current number of elements */
71 71 PyObject *added; /* populated on demand */
72 72 PyObject *headrevs; /* cache, invalidated on changes */
73 73 PyObject *filteredrevs;/* filtered revs set */
74 74 nodetree *nt; /* base-16 trie */
75 75 int ntrev; /* last rev scanned */
76 76 int ntlookups; /* # lookups */
77 77 int ntmisses; /* # lookups that miss the cache */
78 78 int inlined;
79 79 } indexObject;
80 80
81 81 static Py_ssize_t index_length(const indexObject *self)
82 82 {
83 83 if (self->added == NULL)
84 84 return self->length - 1;
85 85 return self->length + PyList_GET_SIZE(self->added) - 1;
86 86 }
87 87
88 88 static PyObject *nullentry;
89 89 static const char nullid[20];
90 90
91 91 static Py_ssize_t inline_scan(indexObject *self, const char **offsets);
92 92
93 93 #if LONG_MAX == 0x7fffffffL
94 94 static const char *const tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#");
95 95 #else
96 96 static const char *const tuple_format = PY23("kiiiiiis#", "kiiiiiiy#");
97 97 #endif
98 98
99 99 /* A RevlogNG v1 index entry is 64 bytes long. */
100 100 static const long v1_hdrsize = 64;
101 101
102 102 /*
103 103 * Return a pointer to the beginning of a RevlogNG record.
104 104 */
105 105 static const char *index_deref(indexObject *self, Py_ssize_t pos)
106 106 {
107 107 if (self->inlined && pos > 0) {
108 108 if (self->offsets == NULL) {
109 109 self->offsets = PyMem_Malloc(self->raw_length *
110 110 sizeof(*self->offsets));
111 111 if (self->offsets == NULL)
112 112 return (const char *)PyErr_NoMemory();
113 113 inline_scan(self, self->offsets);
114 114 }
115 115 return self->offsets[pos];
116 116 }
117 117
118 118 return (const char *)(self->buf.buf) + pos * v1_hdrsize;
119 119 }
120 120
121 121 static inline int index_get_parents(indexObject *self, Py_ssize_t rev,
122 122 int *ps, int maxrev)
123 123 {
124 124 if (rev >= self->length - 1) {
125 125 PyObject *tuple = PyList_GET_ITEM(self->added,
126 126 rev - self->length + 1);
127 127 ps[0] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 5));
128 128 ps[1] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 6));
129 129 } else {
130 130 const char *data = index_deref(self, rev);
131 131 ps[0] = getbe32(data + 24);
132 132 ps[1] = getbe32(data + 28);
133 133 }
134 134 /* If index file is corrupted, ps[] may point to invalid revisions. So
135 135 * there is a risk of buffer overflow to trust them unconditionally. */
136 136 if (ps[0] > maxrev || ps[1] > maxrev) {
137 137 PyErr_SetString(PyExc_ValueError, "parent out of range");
138 138 return -1;
139 139 }
140 140 return 0;
141 141 }
142 142
143 143
144 144 /*
145 145 * RevlogNG format (all in big endian, data may be inlined):
146 146 * 6 bytes: offset
147 147 * 2 bytes: flags
148 148 * 4 bytes: compressed length
149 149 * 4 bytes: uncompressed length
150 150 * 4 bytes: base revision
151 151 * 4 bytes: link revision
152 152 * 4 bytes: parent 1 revision
153 153 * 4 bytes: parent 2 revision
154 154 * 32 bytes: nodeid (only 20 bytes used)
155 155 */
156 156 static PyObject *index_get(indexObject *self, Py_ssize_t pos)
157 157 {
158 158 uint64_t offset_flags;
159 159 int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
160 160 const char *c_node_id;
161 161 const char *data;
162 162 Py_ssize_t length = index_length(self);
163 163 PyObject *entry;
164 164
165 165 if (pos == -1) {
166 166 Py_INCREF(nullentry);
167 167 return nullentry;
168 168 }
169 169
170 170 if (pos < 0 || pos >= length) {
171 171 PyErr_SetString(PyExc_IndexError, "revlog index out of range");
172 172 return NULL;
173 173 }
174 174
175 175 if (pos >= self->length - 1) {
176 176 PyObject *obj;
177 177 obj = PyList_GET_ITEM(self->added, pos - self->length + 1);
178 178 Py_INCREF(obj);
179 179 return obj;
180 180 }
181 181
182 182 if (self->cache) {
183 183 if (self->cache[pos]) {
184 184 Py_INCREF(self->cache[pos]);
185 185 return self->cache[pos];
186 186 }
187 187 } else {
188 188 self->cache = calloc(self->raw_length, sizeof(PyObject *));
189 189 if (self->cache == NULL)
190 190 return PyErr_NoMemory();
191 191 }
192 192
193 193 data = index_deref(self, pos);
194 194 if (data == NULL)
195 195 return NULL;
196 196
197 197 offset_flags = getbe32(data + 4);
198 198 if (pos == 0) /* mask out version number for the first entry */
199 199 offset_flags &= 0xFFFF;
200 200 else {
201 201 uint32_t offset_high = getbe32(data);
202 202 offset_flags |= ((uint64_t)offset_high) << 32;
203 203 }
204 204
205 205 comp_len = getbe32(data + 8);
206 206 uncomp_len = getbe32(data + 12);
207 207 base_rev = getbe32(data + 16);
208 208 link_rev = getbe32(data + 20);
209 209 parent_1 = getbe32(data + 24);
210 210 parent_2 = getbe32(data + 28);
211 211 c_node_id = data + 32;
212 212
213 213 entry = Py_BuildValue(tuple_format, offset_flags, comp_len,
214 214 uncomp_len, base_rev, link_rev,
215 215 parent_1, parent_2, c_node_id, 20);
216 216
217 217 if (entry) {
218 218 PyObject_GC_UnTrack(entry);
219 219 Py_INCREF(entry);
220 220 }
221 221
222 222 self->cache[pos] = entry;
223 223
224 224 return entry;
225 225 }
226 226
227 227 /*
228 228 * Return the 20-byte SHA of the node corresponding to the given rev.
229 229 */
230 230 static const char *index_node(indexObject *self, Py_ssize_t pos)
231 231 {
232 232 Py_ssize_t length = index_length(self);
233 233 const char *data;
234 234
235 235 if (pos == -1)
236 236 return nullid;
237 237
238 238 if (pos >= length)
239 239 return NULL;
240 240
241 241 if (pos >= self->length - 1) {
242 242 PyObject *tuple, *str;
243 243 tuple = PyList_GET_ITEM(self->added, pos - self->length + 1);
244 244 str = PyTuple_GetItem(tuple, 7);
245 245 return str ? PyBytes_AS_STRING(str) : NULL;
246 246 }
247 247
248 248 data = index_deref(self, pos);
249 249 return data ? data + 32 : NULL;
250 250 }
251 251
252 252 /*
253 253 * Return the 20-byte SHA of the node corresponding to the given rev. The
254 254 * rev is assumed to be existing. If not, an exception is set.
255 255 */
256 256 static const char *index_node_existing(indexObject *self, Py_ssize_t pos)
257 257 {
258 258 const char *node = index_node(self, pos);
259 259 if (node == NULL) {
260 260 PyErr_Format(PyExc_IndexError, "could not access rev %d",
261 261 (int)pos);
262 262 }
263 263 return node;
264 264 }
265 265
266 266 static int nt_insert(indexObject *self, const char *node, int rev);
267 267
268 268 static int node_check(PyObject *obj, char **node)
269 269 {
270 270 Py_ssize_t nodelen;
271 271 if (PyBytes_AsStringAndSize(obj, node, &nodelen) == -1)
272 272 return -1;
273 273 if (nodelen == 20)
274 274 return 0;
275 275 PyErr_SetString(PyExc_ValueError, "20-byte hash required");
276 276 return -1;
277 277 }
278 278
279 279 static PyObject *index_append(indexObject *self, PyObject *obj)
280 280 {
281 281 char *node;
282 282 Py_ssize_t len;
283 283
284 284 if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
285 285 PyErr_SetString(PyExc_TypeError, "8-tuple required");
286 286 return NULL;
287 287 }
288 288
289 289 if (node_check(PyTuple_GET_ITEM(obj, 7), &node) == -1)
290 290 return NULL;
291 291
292 292 len = index_length(self);
293 293
294 294 if (self->added == NULL) {
295 295 self->added = PyList_New(0);
296 296 if (self->added == NULL)
297 297 return NULL;
298 298 }
299 299
300 300 if (PyList_Append(self->added, obj) == -1)
301 301 return NULL;
302 302
303 303 if (self->nt)
304 304 nt_insert(self, node, len);
305 305
306 306 Py_CLEAR(self->headrevs);
307 307 Py_RETURN_NONE;
308 308 }
309 309
310 310 static void _index_clearcaches(indexObject *self)
311 311 {
312 312 if (self->cache) {
313 313 Py_ssize_t i;
314 314
315 315 for (i = 0; i < self->raw_length; i++)
316 316 Py_CLEAR(self->cache[i]);
317 317 free(self->cache);
318 318 self->cache = NULL;
319 319 }
320 320 if (self->offsets) {
321 321 PyMem_Free(self->offsets);
322 322 self->offsets = NULL;
323 323 }
324 324 if (self->nt != NULL) {
325 325 free(self->nt->nodes);
326 326 PyMem_Free(self->nt);
327 327 }
328 328 self->nt = NULL;
329 329 Py_CLEAR(self->headrevs);
330 330 }
331 331
332 332 static PyObject *index_clearcaches(indexObject *self)
333 333 {
334 334 _index_clearcaches(self);
335 335 self->ntrev = -1;
336 336 self->ntlookups = self->ntmisses = 0;
337 337 Py_RETURN_NONE;
338 338 }
339 339
340 340 static PyObject *index_stats(indexObject *self)
341 341 {
342 342 PyObject *obj = PyDict_New();
343 343 PyObject *t = NULL;
344 344
345 345 if (obj == NULL)
346 346 return NULL;
347 347
348 348 #define istat(__n, __d) \
349 349 do { \
350 350 t = PyInt_FromSsize_t(self->__n); \
351 351 if (!t) \
352 352 goto bail; \
353 353 if (PyDict_SetItemString(obj, __d, t) == -1) \
354 354 goto bail; \
355 355 Py_DECREF(t); \
356 356 } while (0)
357 357
358 358 if (self->added) {
359 359 Py_ssize_t len = PyList_GET_SIZE(self->added);
360 360 t = PyInt_FromSsize_t(len);
361 361 if (!t)
362 362 goto bail;
363 363 if (PyDict_SetItemString(obj, "index entries added", t) == -1)
364 364 goto bail;
365 365 Py_DECREF(t);
366 366 }
367 367
368 368 if (self->raw_length != self->length - 1)
369 369 istat(raw_length, "revs on disk");
370 370 istat(length, "revs in memory");
371 371 istat(ntlookups, "node trie lookups");
372 372 istat(ntmisses, "node trie misses");
373 373 istat(ntrev, "node trie last rev scanned");
374 374 if (self->nt) {
375 375 istat(nt->capacity, "node trie capacity");
376 376 istat(nt->depth, "node trie depth");
377 377 istat(nt->length, "node trie count");
378 378 istat(nt->splits, "node trie splits");
379 379 }
380 380
381 381 #undef istat
382 382
383 383 return obj;
384 384
385 385 bail:
386 386 Py_XDECREF(obj);
387 387 Py_XDECREF(t);
388 388 return NULL;
389 389 }
390 390
391 391 /*
392 392 * When we cache a list, we want to be sure the caller can't mutate
393 393 * the cached copy.
394 394 */
395 395 static PyObject *list_copy(PyObject *list)
396 396 {
397 397 Py_ssize_t len = PyList_GET_SIZE(list);
398 398 PyObject *newlist = PyList_New(len);
399 399 Py_ssize_t i;
400 400
401 401 if (newlist == NULL)
402 402 return NULL;
403 403
404 404 for (i = 0; i < len; i++) {
405 405 PyObject *obj = PyList_GET_ITEM(list, i);
406 406 Py_INCREF(obj);
407 407 PyList_SET_ITEM(newlist, i, obj);
408 408 }
409 409
410 410 return newlist;
411 411 }
412 412
413 413 static int check_filter(PyObject *filter, Py_ssize_t arg)
414 414 {
415 415 if (filter) {
416 416 PyObject *arglist, *result;
417 417 int isfiltered;
418 418
419 419 arglist = Py_BuildValue("(n)", arg);
420 420 if (!arglist) {
421 421 return -1;
422 422 }
423 423
424 424 result = PyEval_CallObject(filter, arglist);
425 425 Py_DECREF(arglist);
426 426 if (!result) {
427 427 return -1;
428 428 }
429 429
430 430 /* PyObject_IsTrue returns 1 if true, 0 if false, -1 if error,
431 431 * same as this function, so we can just return it directly.*/
432 432 isfiltered = PyObject_IsTrue(result);
433 433 Py_DECREF(result);
434 434 return isfiltered;
435 435 } else {
436 436 return 0;
437 437 }
438 438 }
439 439
440 440 static Py_ssize_t add_roots_get_min(indexObject *self, PyObject *list,
441 441 Py_ssize_t marker, char *phases)
442 442 {
443 443 PyObject *iter = NULL;
444 444 PyObject *iter_item = NULL;
445 445 Py_ssize_t min_idx = index_length(self) + 2;
446 446 long iter_item_long;
447 447
448 448 if (PyList_GET_SIZE(list) != 0) {
449 449 iter = PyObject_GetIter(list);
450 450 if (iter == NULL)
451 451 return -2;
452 452 while ((iter_item = PyIter_Next(iter))) {
453 453 iter_item_long = PyInt_AS_LONG(iter_item);
454 454 Py_DECREF(iter_item);
455 455 if (iter_item_long < min_idx)
456 456 min_idx = iter_item_long;
457 457 phases[iter_item_long] = marker;
458 458 }
459 459 Py_DECREF(iter);
460 460 }
461 461
462 462 return min_idx;
463 463 }
464 464
465 465 static inline void set_phase_from_parents(char *phases, int parent_1,
466 466 int parent_2, Py_ssize_t i)
467 467 {
468 468 if (parent_1 >= 0 && phases[parent_1] > phases[i])
469 469 phases[i] = phases[parent_1];
470 470 if (parent_2 >= 0 && phases[parent_2] > phases[i])
471 471 phases[i] = phases[parent_2];
472 472 }
473 473
474 474 static PyObject *reachableroots2(indexObject *self, PyObject *args)
475 475 {
476 476
477 477 /* Input */
478 478 long minroot;
479 479 PyObject *includepatharg = NULL;
480 480 int includepath = 0;
481 481 /* heads and roots are lists */
482 482 PyObject *heads = NULL;
483 483 PyObject *roots = NULL;
484 484 PyObject *reachable = NULL;
485 485
486 486 PyObject *val;
487 487 Py_ssize_t len = index_length(self);
488 488 long revnum;
489 489 Py_ssize_t k;
490 490 Py_ssize_t i;
491 491 Py_ssize_t l;
492 492 int r;
493 493 int parents[2];
494 494
495 495 /* Internal data structure:
496 496 * tovisit: array of length len+1 (all revs + nullrev), filled upto lentovisit
497 497 * revstates: array of length len+1 (all revs + nullrev) */
498 498 int *tovisit = NULL;
499 499 long lentovisit = 0;
500 500 enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 };
501 501 char *revstates = NULL;
502 502
503 503 /* Get arguments */
504 504 if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads,
505 505 &PyList_Type, &roots,
506 506 &PyBool_Type, &includepatharg))
507 507 goto bail;
508 508
509 509 if (includepatharg == Py_True)
510 510 includepath = 1;
511 511
512 512 /* Initialize return set */
513 513 reachable = PyList_New(0);
514 514 if (reachable == NULL)
515 515 goto bail;
516 516
517 517 /* Initialize internal datastructures */
518 518 tovisit = (int *)malloc((len + 1) * sizeof(int));
519 519 if (tovisit == NULL) {
520 520 PyErr_NoMemory();
521 521 goto bail;
522 522 }
523 523
524 524 revstates = (char *)calloc(len + 1, 1);
525 525 if (revstates == NULL) {
526 526 PyErr_NoMemory();
527 527 goto bail;
528 528 }
529 529
530 530 l = PyList_GET_SIZE(roots);
531 531 for (i = 0; i < l; i++) {
532 532 revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
533 533 if (revnum == -1 && PyErr_Occurred())
534 534 goto bail;
535 535 /* If root is out of range, e.g. wdir(), it must be unreachable
536 536 * from heads. So we can just ignore it. */
537 537 if (revnum + 1 < 0 || revnum + 1 >= len + 1)
538 538 continue;
539 539 revstates[revnum + 1] |= RS_ROOT;
540 540 }
541 541
542 542 /* Populate tovisit with all the heads */
543 543 l = PyList_GET_SIZE(heads);
544 544 for (i = 0; i < l; i++) {
545 545 revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
546 546 if (revnum == -1 && PyErr_Occurred())
547 547 goto bail;
548 548 if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
549 549 PyErr_SetString(PyExc_IndexError, "head out of range");
550 550 goto bail;
551 551 }
552 552 if (!(revstates[revnum + 1] & RS_SEEN)) {
553 553 tovisit[lentovisit++] = (int)revnum;
554 554 revstates[revnum + 1] |= RS_SEEN;
555 555 }
556 556 }
557 557
558 558 /* Visit the tovisit list and find the reachable roots */
559 559 k = 0;
560 560 while (k < lentovisit) {
561 561 /* Add the node to reachable if it is a root*/
562 562 revnum = tovisit[k++];
563 563 if (revstates[revnum + 1] & RS_ROOT) {
564 564 revstates[revnum + 1] |= RS_REACHABLE;
565 565 val = PyInt_FromLong(revnum);
566 566 if (val == NULL)
567 567 goto bail;
568 568 r = PyList_Append(reachable, val);
569 569 Py_DECREF(val);
570 570 if (r < 0)
571 571 goto bail;
572 572 if (includepath == 0)
573 573 continue;
574 574 }
575 575
576 576 /* Add its parents to the list of nodes to visit */
577 577 if (revnum == -1)
578 578 continue;
579 579 r = index_get_parents(self, revnum, parents, (int)len - 1);
580 580 if (r < 0)
581 581 goto bail;
582 582 for (i = 0; i < 2; i++) {
583 583 if (!(revstates[parents[i] + 1] & RS_SEEN)
584 584 && parents[i] >= minroot) {
585 585 tovisit[lentovisit++] = parents[i];
586 586 revstates[parents[i] + 1] |= RS_SEEN;
587 587 }
588 588 }
589 589 }
590 590
591 591 /* Find all the nodes in between the roots we found and the heads
592 592 * and add them to the reachable set */
593 593 if (includepath == 1) {
594 594 long minidx = minroot;
595 595 if (minidx < 0)
596 596 minidx = 0;
597 597 for (i = minidx; i < len; i++) {
598 598 if (!(revstates[i + 1] & RS_SEEN))
599 599 continue;
600 600 r = index_get_parents(self, i, parents, (int)len - 1);
601 601 /* Corrupted index file, error is set from
602 602 * index_get_parents */
603 603 if (r < 0)
604 604 goto bail;
605 605 if (((revstates[parents[0] + 1] |
606 606 revstates[parents[1] + 1]) & RS_REACHABLE)
607 607 && !(revstates[i + 1] & RS_REACHABLE)) {
608 608 revstates[i + 1] |= RS_REACHABLE;
609 609 val = PyInt_FromLong(i);
610 610 if (val == NULL)
611 611 goto bail;
612 612 r = PyList_Append(reachable, val);
613 613 Py_DECREF(val);
614 614 if (r < 0)
615 615 goto bail;
616 616 }
617 617 }
618 618 }
619 619
620 620 free(revstates);
621 621 free(tovisit);
622 622 return reachable;
623 623 bail:
624 624 Py_XDECREF(reachable);
625 625 free(revstates);
626 626 free(tovisit);
627 627 return NULL;
628 628 }
629 629
630 630 static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args)
631 631 {
632 632 PyObject *roots = Py_None;
633 633 PyObject *ret = NULL;
634 634 PyObject *phasessize = NULL;
635 635 PyObject *phaseroots = NULL;
636 636 PyObject *phaseset = NULL;
637 637 PyObject *phasessetlist = NULL;
638 638 PyObject *rev = NULL;
639 639 Py_ssize_t len = index_length(self);
640 640 Py_ssize_t numphase = 0;
641 641 Py_ssize_t minrevallphases = 0;
642 642 Py_ssize_t minrevphase = 0;
643 643 Py_ssize_t i = 0;
644 644 char *phases = NULL;
645 645 long phase;
646 646
647 647 if (!PyArg_ParseTuple(args, "O", &roots))
648 648 goto done;
649 649 if (roots == NULL || !PyList_Check(roots)) {
650 650 PyErr_SetString(PyExc_TypeError, "roots must be a list");
651 651 goto done;
652 652 }
653 653
654 654 phases = calloc(len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */
655 655 if (phases == NULL) {
656 656 PyErr_NoMemory();
657 657 goto done;
658 658 }
659 659 /* Put the phase information of all the roots in phases */
660 660 numphase = PyList_GET_SIZE(roots)+1;
661 661 minrevallphases = len + 1;
662 662 phasessetlist = PyList_New(numphase);
663 663 if (phasessetlist == NULL)
664 664 goto done;
665 665
666 666 PyList_SET_ITEM(phasessetlist, 0, Py_None);
667 667 Py_INCREF(Py_None);
668 668
669 669 for (i = 0; i < numphase-1; i++) {
670 670 phaseroots = PyList_GET_ITEM(roots, i);
671 671 phaseset = PySet_New(NULL);
672 672 if (phaseset == NULL)
673 673 goto release;
674 674 PyList_SET_ITEM(phasessetlist, i+1, phaseset);
675 675 if (!PyList_Check(phaseroots)) {
676 676 PyErr_SetString(PyExc_TypeError,
677 677 "roots item must be a list");
678 678 goto release;
679 679 }
680 680 minrevphase = add_roots_get_min(self, phaseroots, i+1, phases);
681 681 if (minrevphase == -2) /* Error from add_roots_get_min */
682 682 goto release;
683 683 minrevallphases = MIN(minrevallphases, minrevphase);
684 684 }
685 685 /* Propagate the phase information from the roots to the revs */
686 686 if (minrevallphases != -1) {
687 687 int parents[2];
688 688 for (i = minrevallphases; i < len; i++) {
689 689 if (index_get_parents(self, i, parents,
690 690 (int)len - 1) < 0)
691 691 goto release;
692 692 set_phase_from_parents(phases, parents[0], parents[1], i);
693 693 }
694 694 }
695 695 /* Transform phase list to a python list */
696 696 phasessize = PyInt_FromLong(len);
697 697 if (phasessize == NULL)
698 698 goto release;
699 699 for (i = 0; i < len; i++) {
700 700 phase = phases[i];
701 701 /* We only store the sets of phase for non public phase, the public phase
702 702 * is computed as a difference */
703 703 if (phase != 0) {
704 704 phaseset = PyList_GET_ITEM(phasessetlist, phase);
705 705 rev = PyInt_FromLong(i);
706 706 if (rev == NULL)
707 707 goto release;
708 708 PySet_Add(phaseset, rev);
709 709 Py_XDECREF(rev);
710 710 }
711 711 }
712 712 ret = PyTuple_Pack(2, phasessize, phasessetlist);
713 713
714 714 release:
715 715 Py_XDECREF(phasessize);
716 716 Py_XDECREF(phasessetlist);
717 717 done:
718 718 free(phases);
719 719 return ret;
720 720 }
721 721
722 722 static PyObject *index_headrevs(indexObject *self, PyObject *args)
723 723 {
724 724 Py_ssize_t i, j, len;
725 725 char *nothead = NULL;
726 726 PyObject *heads = NULL;
727 727 PyObject *filter = NULL;
728 728 PyObject *filteredrevs = Py_None;
729 729
730 730 if (!PyArg_ParseTuple(args, "|O", &filteredrevs)) {
731 731 return NULL;
732 732 }
733 733
734 734 if (self->headrevs && filteredrevs == self->filteredrevs)
735 735 return list_copy(self->headrevs);
736 736
737 737 Py_DECREF(self->filteredrevs);
738 738 self->filteredrevs = filteredrevs;
739 739 Py_INCREF(filteredrevs);
740 740
741 741 if (filteredrevs != Py_None) {
742 742 filter = PyObject_GetAttrString(filteredrevs, "__contains__");
743 743 if (!filter) {
744 744 PyErr_SetString(PyExc_TypeError,
745 745 "filteredrevs has no attribute __contains__");
746 746 goto bail;
747 747 }
748 748 }
749 749
750 750 len = index_length(self);
751 751 heads = PyList_New(0);
752 752 if (heads == NULL)
753 753 goto bail;
754 754 if (len == 0) {
755 755 PyObject *nullid = PyInt_FromLong(-1);
756 756 if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
757 757 Py_XDECREF(nullid);
758 758 goto bail;
759 759 }
760 760 goto done;
761 761 }
762 762
763 763 nothead = calloc(len, 1);
764 764 if (nothead == NULL) {
765 765 PyErr_NoMemory();
766 766 goto bail;
767 767 }
768 768
769 769 for (i = len - 1; i >= 0; i--) {
770 770 int isfiltered;
771 771 int parents[2];
772 772
773 773 /* If nothead[i] == 1, it means we've seen an unfiltered child of this
774 774 * node already, and therefore this node is not filtered. So we can skip
775 775 * the expensive check_filter step.
776 776 */
777 777 if (nothead[i] != 1) {
778 778 isfiltered = check_filter(filter, i);
779 779 if (isfiltered == -1) {
780 780 PyErr_SetString(PyExc_TypeError,
781 781 "unable to check filter");
782 782 goto bail;
783 783 }
784 784
785 785 if (isfiltered) {
786 786 nothead[i] = 1;
787 787 continue;
788 788 }
789 789 }
790 790
791 791 if (index_get_parents(self, i, parents, (int)len - 1) < 0)
792 792 goto bail;
793 793 for (j = 0; j < 2; j++) {
794 794 if (parents[j] >= 0)
795 795 nothead[parents[j]] = 1;
796 796 }
797 797 }
798 798
799 799 for (i = 0; i < len; i++) {
800 800 PyObject *head;
801 801
802 802 if (nothead[i])
803 803 continue;
804 804 head = PyInt_FromSsize_t(i);
805 805 if (head == NULL || PyList_Append(heads, head) == -1) {
806 806 Py_XDECREF(head);
807 807 goto bail;
808 808 }
809 809 }
810 810
811 811 done:
812 812 self->headrevs = heads;
813 813 Py_XDECREF(filter);
814 814 free(nothead);
815 815 return list_copy(self->headrevs);
816 816 bail:
817 817 Py_XDECREF(filter);
818 818 Py_XDECREF(heads);
819 819 free(nothead);
820 820 return NULL;
821 821 }
822 822
823 823 /**
824 824 * Obtain the base revision index entry.
825 825 *
826 826 * Callers must ensure that rev >= 0 or illegal memory access may occur.
827 827 */
828 828 static inline int index_baserev(indexObject *self, int rev)
829 829 {
830 830 const char *data;
831 831
832 832 if (rev >= self->length - 1) {
833 833 PyObject *tuple = PyList_GET_ITEM(self->added,
834 834 rev - self->length + 1);
835 835 return (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 3));
836 836 }
837 837 else {
838 838 data = index_deref(self, rev);
839 839 if (data == NULL) {
840 840 return -2;
841 841 }
842 842
843 843 return getbe32(data + 16);
844 844 }
845 845 }
846 846
847 847 static PyObject *index_deltachain(indexObject *self, PyObject *args)
848 848 {
849 849 int rev, generaldelta;
850 850 PyObject *stoparg;
851 851 int stoprev, iterrev, baserev = -1;
852 852 int stopped;
853 853 PyObject *chain = NULL, *result = NULL;
854 854 const Py_ssize_t length = index_length(self);
855 855
856 856 if (!PyArg_ParseTuple(args, "iOi", &rev, &stoparg, &generaldelta)) {
857 857 return NULL;
858 858 }
859 859
860 860 if (PyInt_Check(stoparg)) {
861 861 stoprev = (int)PyInt_AsLong(stoparg);
862 862 if (stoprev == -1 && PyErr_Occurred()) {
863 863 return NULL;
864 864 }
865 865 }
866 866 else if (stoparg == Py_None) {
867 867 stoprev = -2;
868 868 }
869 869 else {
870 870 PyErr_SetString(PyExc_ValueError,
871 871 "stoprev must be integer or None");
872 872 return NULL;
873 873 }
874 874
875 875 if (rev < 0 || rev >= length) {
876 876 PyErr_SetString(PyExc_ValueError, "revlog index out of range");
877 877 return NULL;
878 878 }
879 879
880 880 chain = PyList_New(0);
881 881 if (chain == NULL) {
882 882 return NULL;
883 883 }
884 884
885 885 baserev = index_baserev(self, rev);
886 886
887 887 /* This should never happen. */
888 888 if (baserev <= -2) {
889 889 /* Error should be set by index_deref() */
890 890 assert(PyErr_Occurred());
891 891 goto bail;
892 892 }
893 893
894 894 iterrev = rev;
895 895
896 896 while (iterrev != baserev && iterrev != stoprev) {
897 897 PyObject *value = PyInt_FromLong(iterrev);
898 898 if (value == NULL) {
899 899 goto bail;
900 900 }
901 901 if (PyList_Append(chain, value)) {
902 902 Py_DECREF(value);
903 903 goto bail;
904 904 }
905 905 Py_DECREF(value);
906 906
907 907 if (generaldelta) {
908 908 iterrev = baserev;
909 909 }
910 910 else {
911 911 iterrev--;
912 912 }
913 913
914 914 if (iterrev < 0) {
915 915 break;
916 916 }
917 917
918 918 if (iterrev >= length) {
919 919 PyErr_SetString(PyExc_IndexError, "revision outside index");
920 920 return NULL;
921 921 }
922 922
923 923 baserev = index_baserev(self, iterrev);
924 924
925 925 /* This should never happen. */
926 926 if (baserev <= -2) {
927 927 /* Error should be set by index_deref() */
928 928 assert(PyErr_Occurred());
929 929 goto bail;
930 930 }
931 931 }
932 932
933 933 if (iterrev == stoprev) {
934 934 stopped = 1;
935 935 }
936 936 else {
937 937 PyObject *value = PyInt_FromLong(iterrev);
938 938 if (value == NULL) {
939 939 goto bail;
940 940 }
941 941 if (PyList_Append(chain, value)) {
942 942 Py_DECREF(value);
943 943 goto bail;
944 944 }
945 945 Py_DECREF(value);
946 946
947 947 stopped = 0;
948 948 }
949 949
950 950 if (PyList_Reverse(chain)) {
951 951 goto bail;
952 952 }
953 953
954 954 result = Py_BuildValue("OO", chain, stopped ? Py_True : Py_False);
955 955 Py_DECREF(chain);
956 956 return result;
957 957
958 958 bail:
959 959 Py_DECREF(chain);
960 960 return NULL;
961 961 }
962 962
963 963 static inline int nt_level(const char *node, Py_ssize_t level)
964 964 {
965 965 int v = node[level>>1];
966 966 if (!(level & 1))
967 967 v >>= 4;
968 968 return v & 0xf;
969 969 }
970 970
971 971 /*
972 972 * Return values:
973 973 *
974 974 * -4: match is ambiguous (multiple candidates)
975 975 * -2: not found
976 976 * rest: valid rev
977 977 */
978 978 static int nt_find(indexObject *self, const char *node, Py_ssize_t nodelen,
979 979 int hex)
980 980 {
981 981 int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
982 982 int level, maxlevel, off;
983 983
984 984 if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
985 985 return -1;
986 986
987 987 if (hex)
988 988 maxlevel = nodelen > 40 ? 40 : (int)nodelen;
989 989 else
990 990 maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
991 991
992 992 for (level = off = 0; level < maxlevel; level++) {
993 993 int k = getnybble(node, level);
994 994 nodetreenode *n = &self->nt->nodes[off];
995 995 int v = n->children[k];
996 996
997 997 if (v < 0) {
998 998 const char *n;
999 999 Py_ssize_t i;
1000 1000
1001 1001 v = -(v + 2);
1002 1002 n = index_node(self, v);
1003 1003 if (n == NULL)
1004 1004 return -2;
1005 1005 for (i = level; i < maxlevel; i++)
1006 1006 if (getnybble(node, i) != nt_level(n, i))
1007 1007 return -2;
1008 1008 return v;
1009 1009 }
1010 1010 if (v == 0)
1011 1011 return -2;
1012 1012 off = v;
1013 1013 }
1014 1014 /* multiple matches against an ambiguous prefix */
1015 1015 return -4;
1016 1016 }
1017 1017
1018 1018 static int nt_new(nodetree *self)
1019 1019 {
1020 1020 if (self->length == self->capacity) {
1021 1021 if (self->capacity >= INT_MAX / (sizeof(nodetreenode) * 2)) {
1022 1022 PyErr_SetString(PyExc_MemoryError,
1023 1023 "overflow in nt_new");
1024 1024 return -1;
1025 1025 }
1026 1026 self->capacity *= 2;
1027 1027 self->nodes = realloc(self->nodes,
1028 1028 self->capacity * sizeof(nodetreenode));
1029 1029 if (self->nodes == NULL) {
1030 1030 PyErr_SetString(PyExc_MemoryError, "out of memory");
1031 1031 return -1;
1032 1032 }
1033 1033 memset(&self->nodes[self->length], 0,
1034 1034 sizeof(nodetreenode) * (self->capacity - self->length));
1035 1035 }
1036 1036 return self->length++;
1037 1037 }
1038 1038
1039 1039 static int nt_insert(indexObject *self, const char *node, int rev)
1040 1040 {
1041 1041 int level = 0;
1042 1042 int off = 0;
1043 1043
1044 1044 while (level < 40) {
1045 1045 int k = nt_level(node, level);
1046 1046 nodetreenode *n;
1047 1047 int v;
1048 1048
1049 1049 n = &self->nt->nodes[off];
1050 1050 v = n->children[k];
1051 1051
1052 1052 if (v == 0) {
1053 1053 n->children[k] = -rev - 2;
1054 1054 return 0;
1055 1055 }
1056 1056 if (v < 0) {
1057 1057 const char *oldnode = index_node_existing(self, -(v + 2));
1058 1058 int noff;
1059 1059
1060 1060 if (oldnode == NULL)
1061 1061 return -1;
1062 1062 if (!memcmp(oldnode, node, 20)) {
1063 1063 n->children[k] = -rev - 2;
1064 1064 return 0;
1065 1065 }
1066 1066 noff = nt_new(self->nt);
1067 1067 if (noff == -1)
1068 1068 return -1;
1069 1069 /* self->nt->nodes may have been changed by realloc */
1070 1070 self->nt->nodes[off].children[k] = noff;
1071 1071 off = noff;
1072 1072 n = &self->nt->nodes[off];
1073 1073 n->children[nt_level(oldnode, ++level)] = v;
1074 1074 if (level > self->nt->depth)
1075 1075 self->nt->depth = level;
1076 1076 self->nt->splits += 1;
1077 1077 } else {
1078 1078 level += 1;
1079 1079 off = v;
1080 1080 }
1081 1081 }
1082 1082
1083 1083 return -1;
1084 1084 }
1085 1085
1086 1086 static int nt_delete_node(indexObject *self, const char *node)
1087 1087 {
1088 1088 /* rev==-2 happens to get encoded as 0, which is interpreted as not set */
1089 1089 return nt_insert(self, node, -2);
1090 1090 }
1091 1091
1092 1092 static int nt_init(indexObject *self)
1093 1093 {
1094 1094 if (self->nt == NULL) {
1095 if ((size_t)self->raw_length > INT_MAX / sizeof(nodetreenode)) {
1096 PyErr_SetString(PyExc_ValueError, "overflow in nt_init");
1097 return -1;
1098 }
1095 1099 self->nt = PyMem_Malloc(sizeof(nodetree));
1096 1100 if (self->nt == NULL) {
1097 1101 PyErr_NoMemory();
1098 1102 return -1;
1099 1103 }
1100 if ((size_t)self->raw_length > INT_MAX / sizeof(nodetreenode)) {
1101 PyErr_SetString(PyExc_ValueError, "overflow in nt_init");
1102 return -1;
1103 }
1104 1104 self->nt->capacity = self->raw_length < 4
1105 1105 ? 4 : (int)self->raw_length / 2;
1106 1106
1107 1107 self->nt->nodes = calloc(self->nt->capacity, sizeof(nodetreenode));
1108 1108 if (self->nt->nodes == NULL) {
1109 1109 PyMem_Free(self->nt);
1110 1110 self->nt = NULL;
1111 1111 PyErr_NoMemory();
1112 1112 return -1;
1113 1113 }
1114 1114 self->ntrev = (int)index_length(self);
1115 1115 self->ntlookups = 1;
1116 1116 self->ntmisses = 0;
1117 1117 self->nt->depth = 0;
1118 1118 self->nt->splits = 0;
1119 1119 self->nt->length = 1;
1120 1120 if (nt_insert(self, nullid, -1) == -1) {
1121 1121 free(self->nt->nodes);
1122 1122 PyMem_Free(self->nt);
1123 1123 self->nt = NULL;
1124 1124 return -1;
1125 1125 }
1126 1126 }
1127 1127 return 0;
1128 1128 }
1129 1129
1130 1130 /*
1131 1131 * Return values:
1132 1132 *
1133 1133 * -3: error (exception set)
1134 1134 * -2: not found (no exception set)
1135 1135 * rest: valid rev
1136 1136 */
1137 1137 static int index_find_node(indexObject *self,
1138 1138 const char *node, Py_ssize_t nodelen)
1139 1139 {
1140 1140 int rev;
1141 1141
1142 1142 if (nt_init(self) == -1)
1143 1143 return -3;
1144 1144
1145 1145 self->ntlookups++;
1146 1146 rev = nt_find(self, node, nodelen, 0);
1147 1147 if (rev >= -1)
1148 1148 return rev;
1149 1149
1150 1150 /*
1151 1151 * For the first handful of lookups, we scan the entire index,
1152 1152 * and cache only the matching nodes. This optimizes for cases
1153 1153 * like "hg tip", where only a few nodes are accessed.
1154 1154 *
1155 1155 * After that, we cache every node we visit, using a single
1156 1156 * scan amortized over multiple lookups. This gives the best
1157 1157 * bulk performance, e.g. for "hg log".
1158 1158 */
1159 1159 if (self->ntmisses++ < 4) {
1160 1160 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1161 1161 const char *n = index_node_existing(self, rev);
1162 1162 if (n == NULL)
1163 1163 return -3;
1164 1164 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1165 1165 if (nt_insert(self, n, rev) == -1)
1166 1166 return -3;
1167 1167 break;
1168 1168 }
1169 1169 }
1170 1170 } else {
1171 1171 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1172 1172 const char *n = index_node_existing(self, rev);
1173 1173 if (n == NULL)
1174 1174 return -3;
1175 1175 if (nt_insert(self, n, rev) == -1) {
1176 1176 self->ntrev = rev + 1;
1177 1177 return -3;
1178 1178 }
1179 1179 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1180 1180 break;
1181 1181 }
1182 1182 }
1183 1183 self->ntrev = rev;
1184 1184 }
1185 1185
1186 1186 if (rev >= 0)
1187 1187 return rev;
1188 1188 return -2;
1189 1189 }
1190 1190
1191 1191 static void raise_revlog_error(void)
1192 1192 {
1193 1193 PyObject *mod = NULL, *dict = NULL, *errclass = NULL;
1194 1194
1195 1195 mod = PyImport_ImportModule("mercurial.error");
1196 1196 if (mod == NULL) {
1197 1197 goto cleanup;
1198 1198 }
1199 1199
1200 1200 dict = PyModule_GetDict(mod);
1201 1201 if (dict == NULL) {
1202 1202 goto cleanup;
1203 1203 }
1204 1204 Py_INCREF(dict);
1205 1205
1206 1206 errclass = PyDict_GetItemString(dict, "RevlogError");
1207 1207 if (errclass == NULL) {
1208 1208 PyErr_SetString(PyExc_SystemError,
1209 1209 "could not find RevlogError");
1210 1210 goto cleanup;
1211 1211 }
1212 1212
1213 1213 /* value of exception is ignored by callers */
1214 1214 PyErr_SetString(errclass, "RevlogError");
1215 1215
1216 1216 cleanup:
1217 1217 Py_XDECREF(dict);
1218 1218 Py_XDECREF(mod);
1219 1219 }
1220 1220
1221 1221 static PyObject *index_getitem(indexObject *self, PyObject *value)
1222 1222 {
1223 1223 char *node;
1224 1224 int rev;
1225 1225
1226 1226 if (PyInt_Check(value))
1227 1227 return index_get(self, PyInt_AS_LONG(value));
1228 1228
1229 1229 if (node_check(value, &node) == -1)
1230 1230 return NULL;
1231 1231 rev = index_find_node(self, node, 20);
1232 1232 if (rev >= -1)
1233 1233 return PyInt_FromLong(rev);
1234 1234 if (rev == -2)
1235 1235 raise_revlog_error();
1236 1236 return NULL;
1237 1237 }
1238 1238
1239 1239 /*
1240 1240 * Fully populate the radix tree.
1241 1241 */
1242 1242 static int nt_populate(indexObject *self) {
1243 1243 int rev;
1244 1244 if (self->ntrev > 0) {
1245 1245 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1246 1246 const char *n = index_node_existing(self, rev);
1247 1247 if (n == NULL)
1248 1248 return -1;
1249 1249 if (nt_insert(self, n, rev) == -1)
1250 1250 return -1;
1251 1251 }
1252 1252 self->ntrev = -1;
1253 1253 }
1254 1254 return 0;
1255 1255 }
1256 1256
1257 1257 static int nt_partialmatch(indexObject *self, const char *node,
1258 1258 Py_ssize_t nodelen)
1259 1259 {
1260 1260 return nt_find(self, node, nodelen, 1);
1261 1261 }
1262 1262
1263 1263 /*
1264 1264 * Find the length of the shortest unique prefix of node.
1265 1265 *
1266 1266 * Return values:
1267 1267 *
1268 1268 * -3: error (exception set)
1269 1269 * -2: not found (no exception set)
1270 1270 * rest: length of shortest prefix
1271 1271 */
1272 1272 static int nt_shortest(indexObject *self, const char *node)
1273 1273 {
1274 1274 int level, off;
1275 1275
1276 1276 for (level = off = 0; level < 40; level++) {
1277 1277 int k, v;
1278 1278 nodetreenode *n = &self->nt->nodes[off];
1279 1279 k = nt_level(node, level);
1280 1280 v = n->children[k];
1281 1281 if (v < 0) {
1282 1282 const char *n;
1283 1283 v = -(v + 2);
1284 1284 n = index_node_existing(self, v);
1285 1285 if (n == NULL)
1286 1286 return -3;
1287 1287 if (memcmp(node, n, 20) != 0)
1288 1288 /*
1289 1289 * Found a unique prefix, but it wasn't for the
1290 1290 * requested node (i.e the requested node does
1291 1291 * not exist).
1292 1292 */
1293 1293 return -2;
1294 1294 return level + 1;
1295 1295 }
1296 1296 if (v == 0)
1297 1297 return -2;
1298 1298 off = v;
1299 1299 }
1300 1300 /*
1301 1301 * The node was still not unique after 40 hex digits, so this won't
1302 1302 * happen. Also, if we get here, then there's a programming error in
1303 1303 * this file that made us insert a node longer than 40 hex digits.
1304 1304 */
1305 1305 PyErr_SetString(PyExc_Exception, "broken node tree");
1306 1306 return -3;
1307 1307 }
1308 1308
1309 1309 static PyObject *index_partialmatch(indexObject *self, PyObject *args)
1310 1310 {
1311 1311 const char *fullnode;
1312 1312 int nodelen;
1313 1313 char *node;
1314 1314 int rev, i;
1315 1315
1316 1316 if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &node, &nodelen))
1317 1317 return NULL;
1318 1318
1319 1319 if (nodelen < 1) {
1320 1320 PyErr_SetString(PyExc_ValueError, "key too short");
1321 1321 return NULL;
1322 1322 }
1323 1323
1324 1324 if (nodelen > 40) {
1325 1325 PyErr_SetString(PyExc_ValueError, "key too long");
1326 1326 return NULL;
1327 1327 }
1328 1328
1329 1329 for (i = 0; i < nodelen; i++)
1330 1330 hexdigit(node, i);
1331 1331 if (PyErr_Occurred()) {
1332 1332 /* input contains non-hex characters */
1333 1333 PyErr_Clear();
1334 1334 Py_RETURN_NONE;
1335 1335 }
1336 1336
1337 1337 if (nt_init(self) == -1)
1338 1338 return NULL;
1339 1339 if (nt_populate(self) == -1)
1340 1340 return NULL;
1341 1341 rev = nt_partialmatch(self, node, nodelen);
1342 1342
1343 1343 switch (rev) {
1344 1344 case -4:
1345 1345 raise_revlog_error();
1346 1346 return NULL;
1347 1347 case -2:
1348 1348 Py_RETURN_NONE;
1349 1349 case -1:
1350 1350 return PyBytes_FromStringAndSize(nullid, 20);
1351 1351 }
1352 1352
1353 1353 fullnode = index_node_existing(self, rev);
1354 1354 if (fullnode == NULL) {
1355 1355 return NULL;
1356 1356 }
1357 1357 return PyBytes_FromStringAndSize(fullnode, 20);
1358 1358 }
1359 1359
1360 1360 static PyObject *index_shortest(indexObject *self, PyObject *args)
1361 1361 {
1362 1362 PyObject *val;
1363 1363 char *node;
1364 1364 int length;
1365 1365
1366 1366 if (!PyArg_ParseTuple(args, "O", &val))
1367 1367 return NULL;
1368 1368 if (node_check(val, &node) == -1)
1369 1369 return NULL;
1370 1370
1371 1371 self->ntlookups++;
1372 1372 if (nt_init(self) == -1)
1373 1373 return NULL;
1374 1374 if (nt_populate(self) == -1)
1375 1375 return NULL;
1376 1376 length = nt_shortest(self, node);
1377 1377 if (length == -3)
1378 1378 return NULL;
1379 1379 if (length == -2) {
1380 1380 raise_revlog_error();
1381 1381 return NULL;
1382 1382 }
1383 1383 return PyInt_FromLong(length);
1384 1384 }
1385 1385
1386 1386 static PyObject *index_m_get(indexObject *self, PyObject *args)
1387 1387 {
1388 1388 PyObject *val;
1389 1389 char *node;
1390 1390 int rev;
1391 1391
1392 1392 if (!PyArg_ParseTuple(args, "O", &val))
1393 1393 return NULL;
1394 1394 if (node_check(val, &node) == -1)
1395 1395 return NULL;
1396 1396 rev = index_find_node(self, node, 20);
1397 1397 if (rev == -3)
1398 1398 return NULL;
1399 1399 if (rev == -2)
1400 1400 Py_RETURN_NONE;
1401 1401 return PyInt_FromLong(rev);
1402 1402 }
1403 1403
1404 1404 static int index_contains(indexObject *self, PyObject *value)
1405 1405 {
1406 1406 char *node;
1407 1407
1408 1408 if (PyInt_Check(value)) {
1409 1409 long rev = PyInt_AS_LONG(value);
1410 1410 return rev >= -1 && rev < index_length(self);
1411 1411 }
1412 1412
1413 1413 if (node_check(value, &node) == -1)
1414 1414 return -1;
1415 1415
1416 1416 switch (index_find_node(self, node, 20)) {
1417 1417 case -3:
1418 1418 return -1;
1419 1419 case -2:
1420 1420 return 0;
1421 1421 default:
1422 1422 return 1;
1423 1423 }
1424 1424 }
1425 1425
1426 1426 typedef uint64_t bitmask;
1427 1427
1428 1428 /*
1429 1429 * Given a disjoint set of revs, return all candidates for the
1430 1430 * greatest common ancestor. In revset notation, this is the set
1431 1431 * "heads(::a and ::b and ...)"
1432 1432 */
1433 1433 static PyObject *find_gca_candidates(indexObject *self, const int *revs,
1434 1434 int revcount)
1435 1435 {
1436 1436 const bitmask allseen = (1ull << revcount) - 1;
1437 1437 const bitmask poison = 1ull << revcount;
1438 1438 PyObject *gca = PyList_New(0);
1439 1439 int i, v, interesting;
1440 1440 int maxrev = -1;
1441 1441 bitmask sp;
1442 1442 bitmask *seen;
1443 1443
1444 1444 if (gca == NULL)
1445 1445 return PyErr_NoMemory();
1446 1446
1447 1447 for (i = 0; i < revcount; i++) {
1448 1448 if (revs[i] > maxrev)
1449 1449 maxrev = revs[i];
1450 1450 }
1451 1451
1452 1452 seen = calloc(sizeof(*seen), maxrev + 1);
1453 1453 if (seen == NULL) {
1454 1454 Py_DECREF(gca);
1455 1455 return PyErr_NoMemory();
1456 1456 }
1457 1457
1458 1458 for (i = 0; i < revcount; i++)
1459 1459 seen[revs[i]] = 1ull << i;
1460 1460
1461 1461 interesting = revcount;
1462 1462
1463 1463 for (v = maxrev; v >= 0 && interesting; v--) {
1464 1464 bitmask sv = seen[v];
1465 1465 int parents[2];
1466 1466
1467 1467 if (!sv)
1468 1468 continue;
1469 1469
1470 1470 if (sv < poison) {
1471 1471 interesting -= 1;
1472 1472 if (sv == allseen) {
1473 1473 PyObject *obj = PyInt_FromLong(v);
1474 1474 if (obj == NULL)
1475 1475 goto bail;
1476 1476 if (PyList_Append(gca, obj) == -1) {
1477 1477 Py_DECREF(obj);
1478 1478 goto bail;
1479 1479 }
1480 1480 sv |= poison;
1481 1481 for (i = 0; i < revcount; i++) {
1482 1482 if (revs[i] == v)
1483 1483 goto done;
1484 1484 }
1485 1485 }
1486 1486 }
1487 1487 if (index_get_parents(self, v, parents, maxrev) < 0)
1488 1488 goto bail;
1489 1489
1490 1490 for (i = 0; i < 2; i++) {
1491 1491 int p = parents[i];
1492 1492 if (p == -1)
1493 1493 continue;
1494 1494 sp = seen[p];
1495 1495 if (sv < poison) {
1496 1496 if (sp == 0) {
1497 1497 seen[p] = sv;
1498 1498 interesting++;
1499 1499 }
1500 1500 else if (sp != sv)
1501 1501 seen[p] |= sv;
1502 1502 } else {
1503 1503 if (sp && sp < poison)
1504 1504 interesting--;
1505 1505 seen[p] = sv;
1506 1506 }
1507 1507 }
1508 1508 }
1509 1509
1510 1510 done:
1511 1511 free(seen);
1512 1512 return gca;
1513 1513 bail:
1514 1514 free(seen);
1515 1515 Py_XDECREF(gca);
1516 1516 return NULL;
1517 1517 }
1518 1518
1519 1519 /*
1520 1520 * Given a disjoint set of revs, return the subset with the longest
1521 1521 * path to the root.
1522 1522 */
1523 1523 static PyObject *find_deepest(indexObject *self, PyObject *revs)
1524 1524 {
1525 1525 const Py_ssize_t revcount = PyList_GET_SIZE(revs);
1526 1526 static const Py_ssize_t capacity = 24;
1527 1527 int *depth, *interesting = NULL;
1528 1528 int i, j, v, ninteresting;
1529 1529 PyObject *dict = NULL, *keys = NULL;
1530 1530 long *seen = NULL;
1531 1531 int maxrev = -1;
1532 1532 long final;
1533 1533
1534 1534 if (revcount > capacity) {
1535 1535 PyErr_Format(PyExc_OverflowError,
1536 1536 "bitset size (%ld) > capacity (%ld)",
1537 1537 (long)revcount, (long)capacity);
1538 1538 return NULL;
1539 1539 }
1540 1540
1541 1541 for (i = 0; i < revcount; i++) {
1542 1542 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
1543 1543 if (n > maxrev)
1544 1544 maxrev = n;
1545 1545 }
1546 1546
1547 1547 depth = calloc(sizeof(*depth), maxrev + 1);
1548 1548 if (depth == NULL)
1549 1549 return PyErr_NoMemory();
1550 1550
1551 1551 seen = calloc(sizeof(*seen), maxrev + 1);
1552 1552 if (seen == NULL) {
1553 1553 PyErr_NoMemory();
1554 1554 goto bail;
1555 1555 }
1556 1556
1557 1557 interesting = calloc(sizeof(*interesting), 1 << revcount);
1558 1558 if (interesting == NULL) {
1559 1559 PyErr_NoMemory();
1560 1560 goto bail;
1561 1561 }
1562 1562
1563 1563 if (PyList_Sort(revs) == -1)
1564 1564 goto bail;
1565 1565
1566 1566 for (i = 0; i < revcount; i++) {
1567 1567 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
1568 1568 long b = 1l << i;
1569 1569 depth[n] = 1;
1570 1570 seen[n] = b;
1571 1571 interesting[b] = 1;
1572 1572 }
1573 1573
1574 1574 /* invariant: ninteresting is the number of non-zero entries in
1575 1575 * interesting. */
1576 1576 ninteresting = (int)revcount;
1577 1577
1578 1578 for (v = maxrev; v >= 0 && ninteresting > 1; v--) {
1579 1579 int dv = depth[v];
1580 1580 int parents[2];
1581 1581 long sv;
1582 1582
1583 1583 if (dv == 0)
1584 1584 continue;
1585 1585
1586 1586 sv = seen[v];
1587 1587 if (index_get_parents(self, v, parents, maxrev) < 0)
1588 1588 goto bail;
1589 1589
1590 1590 for (i = 0; i < 2; i++) {
1591 1591 int p = parents[i];
1592 1592 long sp;
1593 1593 int dp;
1594 1594
1595 1595 if (p == -1)
1596 1596 continue;
1597 1597
1598 1598 dp = depth[p];
1599 1599 sp = seen[p];
1600 1600 if (dp <= dv) {
1601 1601 depth[p] = dv + 1;
1602 1602 if (sp != sv) {
1603 1603 interesting[sv] += 1;
1604 1604 seen[p] = sv;
1605 1605 if (sp) {
1606 1606 interesting[sp] -= 1;
1607 1607 if (interesting[sp] == 0)
1608 1608 ninteresting -= 1;
1609 1609 }
1610 1610 }
1611 1611 }
1612 1612 else if (dv == dp - 1) {
1613 1613 long nsp = sp | sv;
1614 1614 if (nsp == sp)
1615 1615 continue;
1616 1616 seen[p] = nsp;
1617 1617 interesting[sp] -= 1;
1618 1618 if (interesting[sp] == 0)
1619 1619 ninteresting -= 1;
1620 1620 if (interesting[nsp] == 0)
1621 1621 ninteresting += 1;
1622 1622 interesting[nsp] += 1;
1623 1623 }
1624 1624 }
1625 1625 interesting[sv] -= 1;
1626 1626 if (interesting[sv] == 0)
1627 1627 ninteresting -= 1;
1628 1628 }
1629 1629
1630 1630 final = 0;
1631 1631 j = ninteresting;
1632 1632 for (i = 0; i < (int)(2 << revcount) && j > 0; i++) {
1633 1633 if (interesting[i] == 0)
1634 1634 continue;
1635 1635 final |= i;
1636 1636 j -= 1;
1637 1637 }
1638 1638 if (final == 0) {
1639 1639 keys = PyList_New(0);
1640 1640 goto bail;
1641 1641 }
1642 1642
1643 1643 dict = PyDict_New();
1644 1644 if (dict == NULL)
1645 1645 goto bail;
1646 1646
1647 1647 for (i = 0; i < revcount; i++) {
1648 1648 PyObject *key;
1649 1649
1650 1650 if ((final & (1 << i)) == 0)
1651 1651 continue;
1652 1652
1653 1653 key = PyList_GET_ITEM(revs, i);
1654 1654 Py_INCREF(key);
1655 1655 Py_INCREF(Py_None);
1656 1656 if (PyDict_SetItem(dict, key, Py_None) == -1) {
1657 1657 Py_DECREF(key);
1658 1658 Py_DECREF(Py_None);
1659 1659 goto bail;
1660 1660 }
1661 1661 }
1662 1662
1663 1663 keys = PyDict_Keys(dict);
1664 1664
1665 1665 bail:
1666 1666 free(depth);
1667 1667 free(seen);
1668 1668 free(interesting);
1669 1669 Py_XDECREF(dict);
1670 1670
1671 1671 return keys;
1672 1672 }
1673 1673
1674 1674 /*
1675 1675 * Given a (possibly overlapping) set of revs, return all the
1676 1676 * common ancestors heads: heads(::args[0] and ::a[1] and ...)
1677 1677 */
1678 1678 static PyObject *index_commonancestorsheads(indexObject *self, PyObject *args)
1679 1679 {
1680 1680 PyObject *ret = NULL;
1681 1681 Py_ssize_t argcount, i, len;
1682 1682 bitmask repeat = 0;
1683 1683 int revcount = 0;
1684 1684 int *revs;
1685 1685
1686 1686 argcount = PySequence_Length(args);
1687 1687 revs = PyMem_Malloc(argcount * sizeof(*revs));
1688 1688 if (argcount > 0 && revs == NULL)
1689 1689 return PyErr_NoMemory();
1690 1690 len = index_length(self);
1691 1691
1692 1692 for (i = 0; i < argcount; i++) {
1693 1693 static const int capacity = 24;
1694 1694 PyObject *obj = PySequence_GetItem(args, i);
1695 1695 bitmask x;
1696 1696 long val;
1697 1697
1698 1698 if (!PyInt_Check(obj)) {
1699 1699 PyErr_SetString(PyExc_TypeError,
1700 1700 "arguments must all be ints");
1701 1701 Py_DECREF(obj);
1702 1702 goto bail;
1703 1703 }
1704 1704 val = PyInt_AsLong(obj);
1705 1705 Py_DECREF(obj);
1706 1706 if (val == -1) {
1707 1707 ret = PyList_New(0);
1708 1708 goto done;
1709 1709 }
1710 1710 if (val < 0 || val >= len) {
1711 1711 PyErr_SetString(PyExc_IndexError,
1712 1712 "index out of range");
1713 1713 goto bail;
1714 1714 }
1715 1715 /* this cheesy bloom filter lets us avoid some more
1716 1716 * expensive duplicate checks in the common set-is-disjoint
1717 1717 * case */
1718 1718 x = 1ull << (val & 0x3f);
1719 1719 if (repeat & x) {
1720 1720 int k;
1721 1721 for (k = 0; k < revcount; k++) {
1722 1722 if (val == revs[k])
1723 1723 goto duplicate;
1724 1724 }
1725 1725 }
1726 1726 else repeat |= x;
1727 1727 if (revcount >= capacity) {
1728 1728 PyErr_Format(PyExc_OverflowError,
1729 1729 "bitset size (%d) > capacity (%d)",
1730 1730 revcount, capacity);
1731 1731 goto bail;
1732 1732 }
1733 1733 revs[revcount++] = (int)val;
1734 1734 duplicate:;
1735 1735 }
1736 1736
1737 1737 if (revcount == 0) {
1738 1738 ret = PyList_New(0);
1739 1739 goto done;
1740 1740 }
1741 1741 if (revcount == 1) {
1742 1742 PyObject *obj;
1743 1743 ret = PyList_New(1);
1744 1744 if (ret == NULL)
1745 1745 goto bail;
1746 1746 obj = PyInt_FromLong(revs[0]);
1747 1747 if (obj == NULL)
1748 1748 goto bail;
1749 1749 PyList_SET_ITEM(ret, 0, obj);
1750 1750 goto done;
1751 1751 }
1752 1752
1753 1753 ret = find_gca_candidates(self, revs, revcount);
1754 1754 if (ret == NULL)
1755 1755 goto bail;
1756 1756
1757 1757 done:
1758 1758 PyMem_Free(revs);
1759 1759 return ret;
1760 1760
1761 1761 bail:
1762 1762 PyMem_Free(revs);
1763 1763 Py_XDECREF(ret);
1764 1764 return NULL;
1765 1765 }
1766 1766
1767 1767 /*
1768 1768 * Given a (possibly overlapping) set of revs, return the greatest
1769 1769 * common ancestors: those with the longest path to the root.
1770 1770 */
1771 1771 static PyObject *index_ancestors(indexObject *self, PyObject *args)
1772 1772 {
1773 1773 PyObject *ret;
1774 1774 PyObject *gca = index_commonancestorsheads(self, args);
1775 1775 if (gca == NULL)
1776 1776 return NULL;
1777 1777
1778 1778 if (PyList_GET_SIZE(gca) <= 1) {
1779 1779 return gca;
1780 1780 }
1781 1781
1782 1782 ret = find_deepest(self, gca);
1783 1783 Py_DECREF(gca);
1784 1784 return ret;
1785 1785 }
1786 1786
1787 1787 /*
1788 1788 * Invalidate any trie entries introduced by added revs.
1789 1789 */
1790 1790 static void nt_invalidate_added(indexObject *self, Py_ssize_t start)
1791 1791 {
1792 1792 Py_ssize_t i, len = PyList_GET_SIZE(self->added);
1793 1793
1794 1794 for (i = start; i < len; i++) {
1795 1795 PyObject *tuple = PyList_GET_ITEM(self->added, i);
1796 1796 PyObject *node = PyTuple_GET_ITEM(tuple, 7);
1797 1797
1798 1798 nt_delete_node(self, PyBytes_AS_STRING(node));
1799 1799 }
1800 1800
1801 1801 if (start == 0)
1802 1802 Py_CLEAR(self->added);
1803 1803 }
1804 1804
1805 1805 /*
1806 1806 * Delete a numeric range of revs, which must be at the end of the
1807 1807 * range, but exclude the sentinel nullid entry.
1808 1808 */
1809 1809 static int index_slice_del(indexObject *self, PyObject *item)
1810 1810 {
1811 1811 Py_ssize_t start, stop, step, slicelength;
1812 1812 Py_ssize_t length = index_length(self) + 1;
1813 1813 int ret = 0;
1814 1814
1815 1815 /* Argument changed from PySliceObject* to PyObject* in Python 3. */
1816 1816 #ifdef IS_PY3K
1817 1817 if (PySlice_GetIndicesEx(item, length,
1818 1818 #else
1819 1819 if (PySlice_GetIndicesEx((PySliceObject*)item, length,
1820 1820 #endif
1821 1821 &start, &stop, &step, &slicelength) < 0)
1822 1822 return -1;
1823 1823
1824 1824 if (slicelength <= 0)
1825 1825 return 0;
1826 1826
1827 1827 if ((step < 0 && start < stop) || (step > 0 && start > stop))
1828 1828 stop = start;
1829 1829
1830 1830 if (step < 0) {
1831 1831 stop = start + 1;
1832 1832 start = stop + step*(slicelength - 1) - 1;
1833 1833 step = -step;
1834 1834 }
1835 1835
1836 1836 if (step != 1) {
1837 1837 PyErr_SetString(PyExc_ValueError,
1838 1838 "revlog index delete requires step size of 1");
1839 1839 return -1;
1840 1840 }
1841 1841
1842 1842 if (stop != length - 1) {
1843 1843 PyErr_SetString(PyExc_IndexError,
1844 1844 "revlog index deletion indices are invalid");
1845 1845 return -1;
1846 1846 }
1847 1847
1848 1848 if (start < self->length - 1) {
1849 1849 if (self->nt) {
1850 1850 Py_ssize_t i;
1851 1851
1852 1852 for (i = start + 1; i < self->length - 1; i++) {
1853 1853 const char *node = index_node_existing(self, i);
1854 1854 if (node == NULL)
1855 1855 return -1;
1856 1856
1857 1857 nt_delete_node(self, node);
1858 1858 }
1859 1859 if (self->added)
1860 1860 nt_invalidate_added(self, 0);
1861 1861 if (self->ntrev > start)
1862 1862 self->ntrev = (int)start;
1863 1863 }
1864 1864 self->length = start + 1;
1865 1865 if (start < self->raw_length) {
1866 1866 if (self->cache) {
1867 1867 Py_ssize_t i;
1868 1868 for (i = start; i < self->raw_length; i++)
1869 1869 Py_CLEAR(self->cache[i]);
1870 1870 }
1871 1871 self->raw_length = start;
1872 1872 }
1873 1873 goto done;
1874 1874 }
1875 1875
1876 1876 if (self->nt) {
1877 1877 nt_invalidate_added(self, start - self->length + 1);
1878 1878 if (self->ntrev > start)
1879 1879 self->ntrev = (int)start;
1880 1880 }
1881 1881 if (self->added)
1882 1882 ret = PyList_SetSlice(self->added, start - self->length + 1,
1883 1883 PyList_GET_SIZE(self->added), NULL);
1884 1884 done:
1885 1885 Py_CLEAR(self->headrevs);
1886 1886 return ret;
1887 1887 }
1888 1888
1889 1889 /*
1890 1890 * Supported ops:
1891 1891 *
1892 1892 * slice deletion
1893 1893 * string assignment (extend node->rev mapping)
1894 1894 * string deletion (shrink node->rev mapping)
1895 1895 */
1896 1896 static int index_assign_subscript(indexObject *self, PyObject *item,
1897 1897 PyObject *value)
1898 1898 {
1899 1899 char *node;
1900 1900 long rev;
1901 1901
1902 1902 if (PySlice_Check(item) && value == NULL)
1903 1903 return index_slice_del(self, item);
1904 1904
1905 1905 if (node_check(item, &node) == -1)
1906 1906 return -1;
1907 1907
1908 1908 if (value == NULL)
1909 1909 return self->nt ? nt_delete_node(self, node) : 0;
1910 1910 rev = PyInt_AsLong(value);
1911 1911 if (rev > INT_MAX || rev < 0) {
1912 1912 if (!PyErr_Occurred())
1913 1913 PyErr_SetString(PyExc_ValueError, "rev out of range");
1914 1914 return -1;
1915 1915 }
1916 1916
1917 1917 if (nt_init(self) == -1)
1918 1918 return -1;
1919 1919 return nt_insert(self, node, (int)rev);
1920 1920 }
1921 1921
1922 1922 /*
1923 1923 * Find all RevlogNG entries in an index that has inline data. Update
1924 1924 * the optional "offsets" table with those entries.
1925 1925 */
1926 1926 static Py_ssize_t inline_scan(indexObject *self, const char **offsets)
1927 1927 {
1928 1928 const char *data = (const char *)self->buf.buf;
1929 1929 Py_ssize_t pos = 0;
1930 1930 Py_ssize_t end = self->buf.len;
1931 1931 long incr = v1_hdrsize;
1932 1932 Py_ssize_t len = 0;
1933 1933
1934 1934 while (pos + v1_hdrsize <= end && pos >= 0) {
1935 1935 uint32_t comp_len;
1936 1936 /* 3rd element of header is length of compressed inline data */
1937 1937 comp_len = getbe32(data + pos + 8);
1938 1938 incr = v1_hdrsize + comp_len;
1939 1939 if (offsets)
1940 1940 offsets[len] = data + pos;
1941 1941 len++;
1942 1942 pos += incr;
1943 1943 }
1944 1944
1945 1945 if (pos != end) {
1946 1946 if (!PyErr_Occurred())
1947 1947 PyErr_SetString(PyExc_ValueError, "corrupt index file");
1948 1948 return -1;
1949 1949 }
1950 1950
1951 1951 return len;
1952 1952 }
1953 1953
1954 1954 static int index_init(indexObject *self, PyObject *args)
1955 1955 {
1956 1956 PyObject *data_obj, *inlined_obj;
1957 1957 Py_ssize_t size;
1958 1958
1959 1959 /* Initialize before argument-checking to avoid index_dealloc() crash. */
1960 1960 self->raw_length = 0;
1961 1961 self->added = NULL;
1962 1962 self->cache = NULL;
1963 1963 self->data = NULL;
1964 1964 memset(&self->buf, 0, sizeof(self->buf));
1965 1965 self->headrevs = NULL;
1966 1966 self->filteredrevs = Py_None;
1967 1967 Py_INCREF(Py_None);
1968 1968 self->nt = NULL;
1969 1969 self->offsets = NULL;
1970 1970
1971 1971 if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
1972 1972 return -1;
1973 1973 if (!PyObject_CheckBuffer(data_obj)) {
1974 1974 PyErr_SetString(PyExc_TypeError,
1975 1975 "data does not support buffer interface");
1976 1976 return -1;
1977 1977 }
1978 1978
1979 1979 if (PyObject_GetBuffer(data_obj, &self->buf, PyBUF_SIMPLE) == -1)
1980 1980 return -1;
1981 1981 size = self->buf.len;
1982 1982
1983 1983 self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
1984 1984 self->data = data_obj;
1985 1985
1986 1986 self->ntlookups = self->ntmisses = 0;
1987 1987 self->ntrev = -1;
1988 1988 Py_INCREF(self->data);
1989 1989
1990 1990 if (self->inlined) {
1991 1991 Py_ssize_t len = inline_scan(self, NULL);
1992 1992 if (len == -1)
1993 1993 goto bail;
1994 1994 self->raw_length = len;
1995 1995 self->length = len + 1;
1996 1996 } else {
1997 1997 if (size % v1_hdrsize) {
1998 1998 PyErr_SetString(PyExc_ValueError, "corrupt index file");
1999 1999 goto bail;
2000 2000 }
2001 2001 self->raw_length = size / v1_hdrsize;
2002 2002 self->length = self->raw_length + 1;
2003 2003 }
2004 2004
2005 2005 return 0;
2006 2006 bail:
2007 2007 return -1;
2008 2008 }
2009 2009
2010 2010 static PyObject *index_nodemap(indexObject *self)
2011 2011 {
2012 2012 Py_INCREF(self);
2013 2013 return (PyObject *)self;
2014 2014 }
2015 2015
2016 2016 static void index_dealloc(indexObject *self)
2017 2017 {
2018 2018 _index_clearcaches(self);
2019 2019 Py_XDECREF(self->filteredrevs);
2020 2020 if (self->buf.buf) {
2021 2021 PyBuffer_Release(&self->buf);
2022 2022 memset(&self->buf, 0, sizeof(self->buf));
2023 2023 }
2024 2024 Py_XDECREF(self->data);
2025 2025 Py_XDECREF(self->added);
2026 2026 PyObject_Del(self);
2027 2027 }
2028 2028
2029 2029 static PySequenceMethods index_sequence_methods = {
2030 2030 (lenfunc)index_length, /* sq_length */
2031 2031 0, /* sq_concat */
2032 2032 0, /* sq_repeat */
2033 2033 (ssizeargfunc)index_get, /* sq_item */
2034 2034 0, /* sq_slice */
2035 2035 0, /* sq_ass_item */
2036 2036 0, /* sq_ass_slice */
2037 2037 (objobjproc)index_contains, /* sq_contains */
2038 2038 };
2039 2039
2040 2040 static PyMappingMethods index_mapping_methods = {
2041 2041 (lenfunc)index_length, /* mp_length */
2042 2042 (binaryfunc)index_getitem, /* mp_subscript */
2043 2043 (objobjargproc)index_assign_subscript, /* mp_ass_subscript */
2044 2044 };
2045 2045
2046 2046 static PyMethodDef index_methods[] = {
2047 2047 {"ancestors", (PyCFunction)index_ancestors, METH_VARARGS,
2048 2048 "return the gca set of the given revs"},
2049 2049 {"commonancestorsheads", (PyCFunction)index_commonancestorsheads,
2050 2050 METH_VARARGS,
2051 2051 "return the heads of the common ancestors of the given revs"},
2052 2052 {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
2053 2053 "clear the index caches"},
2054 2054 {"get", (PyCFunction)index_m_get, METH_VARARGS,
2055 2055 "get an index entry"},
2056 2056 {"computephasesmapsets", (PyCFunction)compute_phases_map_sets,
2057 2057 METH_VARARGS, "compute phases"},
2058 2058 {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
2059 2059 "reachableroots"},
2060 2060 {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
2061 2061 "get head revisions"}, /* Can do filtering since 3.2 */
2062 2062 {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
2063 2063 "get filtered head revisions"}, /* Can always do filtering */
2064 2064 {"deltachain", (PyCFunction)index_deltachain, METH_VARARGS,
2065 2065 "determine revisions with deltas to reconstruct fulltext"},
2066 2066 {"append", (PyCFunction)index_append, METH_O,
2067 2067 "append an index entry"},
2068 2068 {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
2069 2069 "match a potentially ambiguous node ID"},
2070 2070 {"shortest", (PyCFunction)index_shortest, METH_VARARGS,
2071 2071 "find length of shortest hex nodeid of a binary ID"},
2072 2072 {"stats", (PyCFunction)index_stats, METH_NOARGS,
2073 2073 "stats for the index"},
2074 2074 {NULL} /* Sentinel */
2075 2075 };
2076 2076
2077 2077 static PyGetSetDef index_getset[] = {
2078 2078 {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
2079 2079 {NULL} /* Sentinel */
2080 2080 };
2081 2081
2082 2082 static PyTypeObject indexType = {
2083 2083 PyVarObject_HEAD_INIT(NULL, 0) /* header */
2084 2084 "parsers.index", /* tp_name */
2085 2085 sizeof(indexObject), /* tp_basicsize */
2086 2086 0, /* tp_itemsize */
2087 2087 (destructor)index_dealloc, /* tp_dealloc */
2088 2088 0, /* tp_print */
2089 2089 0, /* tp_getattr */
2090 2090 0, /* tp_setattr */
2091 2091 0, /* tp_compare */
2092 2092 0, /* tp_repr */
2093 2093 0, /* tp_as_number */
2094 2094 &index_sequence_methods, /* tp_as_sequence */
2095 2095 &index_mapping_methods, /* tp_as_mapping */
2096 2096 0, /* tp_hash */
2097 2097 0, /* tp_call */
2098 2098 0, /* tp_str */
2099 2099 0, /* tp_getattro */
2100 2100 0, /* tp_setattro */
2101 2101 0, /* tp_as_buffer */
2102 2102 Py_TPFLAGS_DEFAULT, /* tp_flags */
2103 2103 "revlog index", /* tp_doc */
2104 2104 0, /* tp_traverse */
2105 2105 0, /* tp_clear */
2106 2106 0, /* tp_richcompare */
2107 2107 0, /* tp_weaklistoffset */
2108 2108 0, /* tp_iter */
2109 2109 0, /* tp_iternext */
2110 2110 index_methods, /* tp_methods */
2111 2111 0, /* tp_members */
2112 2112 index_getset, /* tp_getset */
2113 2113 0, /* tp_base */
2114 2114 0, /* tp_dict */
2115 2115 0, /* tp_descr_get */
2116 2116 0, /* tp_descr_set */
2117 2117 0, /* tp_dictoffset */
2118 2118 (initproc)index_init, /* tp_init */
2119 2119 0, /* tp_alloc */
2120 2120 };
2121 2121
2122 2122 /*
2123 2123 * returns a tuple of the form (index, index, cache) with elements as
2124 2124 * follows:
2125 2125 *
2126 2126 * index: an index object that lazily parses RevlogNG records
2127 2127 * cache: if data is inlined, a tuple (0, index_file_content), else None
2128 2128 * index_file_content could be a string, or a buffer
2129 2129 *
2130 2130 * added complications are for backwards compatibility
2131 2131 */
2132 2132 PyObject *parse_index2(PyObject *self, PyObject *args)
2133 2133 {
2134 2134 PyObject *tuple = NULL, *cache = NULL;
2135 2135 indexObject *idx;
2136 2136 int ret;
2137 2137
2138 2138 idx = PyObject_New(indexObject, &indexType);
2139 2139 if (idx == NULL)
2140 2140 goto bail;
2141 2141
2142 2142 ret = index_init(idx, args);
2143 2143 if (ret == -1)
2144 2144 goto bail;
2145 2145
2146 2146 if (idx->inlined) {
2147 2147 cache = Py_BuildValue("iO", 0, idx->data);
2148 2148 if (cache == NULL)
2149 2149 goto bail;
2150 2150 } else {
2151 2151 cache = Py_None;
2152 2152 Py_INCREF(cache);
2153 2153 }
2154 2154
2155 2155 tuple = Py_BuildValue("NN", idx, cache);
2156 2156 if (!tuple)
2157 2157 goto bail;
2158 2158 return tuple;
2159 2159
2160 2160 bail:
2161 2161 Py_XDECREF(idx);
2162 2162 Py_XDECREF(cache);
2163 2163 Py_XDECREF(tuple);
2164 2164 return NULL;
2165 2165 }
2166 2166
2167 2167 void revlog_module_init(PyObject *mod)
2168 2168 {
2169 2169 indexType.tp_new = PyType_GenericNew;
2170 2170 if (PyType_Ready(&indexType) < 0)
2171 2171 return;
2172 2172 Py_INCREF(&indexType);
2173 2173 PyModule_AddObject(mod, "index", (PyObject *)&indexType);
2174 2174
2175 2175 nullentry = Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0,
2176 2176 -1, -1, -1, -1, nullid, 20);
2177 2177 if (nullentry)
2178 2178 PyObject_GC_UnTrack(nullentry);
2179 2179 }
General Comments 0
You need to be logged in to leave comments. Login now