##// END OF EJS Templates
index: remove side-effect from failed nt_new()...
Martin von Zweigbergk -
r38973:dcd395dc default
parent child Browse files
Show More
@@ -1,2179 +1,2182 b''
1 1 /*
2 2 parsers.c - efficient content parsing
3 3
4 4 Copyright 2008 Matt Mackall <mpm@selenic.com> and others
5 5
6 6 This software may be used and distributed according to the terms of
7 7 the GNU General Public License, incorporated herein by reference.
8 8 */
9 9
10 10 #include <Python.h>
11 11 #include <assert.h>
12 12 #include <ctype.h>
13 13 #include <stddef.h>
14 14 #include <string.h>
15 15
16 16 #include "bitmanipulation.h"
17 17 #include "charencode.h"
18 18 #include "util.h"
19 19
20 20 #ifdef IS_PY3K
21 21 /* The mapping of Python types is meant to be temporary to get Python
22 22 * 3 to compile. We should remove this once Python 3 support is fully
23 23 * supported and proper types are used in the extensions themselves. */
24 24 #define PyInt_Check PyLong_Check
25 25 #define PyInt_FromLong PyLong_FromLong
26 26 #define PyInt_FromSsize_t PyLong_FromSsize_t
27 27 #define PyInt_AS_LONG PyLong_AS_LONG
28 28 #define PyInt_AsLong PyLong_AsLong
29 29 #endif
30 30
31 31 typedef struct {
32 32 int children[16];
33 33 } nodetreenode;
34 34
35 35 /*
36 36 * A base-16 trie for fast node->rev mapping.
37 37 *
38 38 * Positive value is index of the next node in the trie
39 39 * Negative value is a leaf: -(rev + 2)
40 40 * Zero is empty
41 41 */
42 42 typedef struct {
43 43 nodetreenode *nodes;
44 44 unsigned length; /* # nodes in use */
45 45 unsigned capacity; /* # nodes allocated */
46 46 int depth; /* maximum depth of tree */
47 47 int splits; /* # splits performed */
48 48 } nodetree;
49 49
50 50 /*
51 51 * This class has two behaviors.
52 52 *
53 53 * When used in a list-like way (with integer keys), we decode an
54 54 * entry in a RevlogNG index file on demand. Our last entry is a
55 55 * sentinel, always a nullid. We have limited support for
56 56 * integer-keyed insert and delete, only at elements right before the
57 57 * sentinel.
58 58 *
59 59 * With string keys, we lazily perform a reverse mapping from node to
60 60 * rev, using a base-16 trie.
61 61 */
62 62 typedef struct {
63 63 PyObject_HEAD
64 64 /* Type-specific fields go here. */
65 65 PyObject *data; /* raw bytes of index */
66 66 Py_buffer buf; /* buffer of data */
67 67 PyObject **cache; /* cached tuples */
68 68 const char **offsets; /* populated on demand */
69 69 Py_ssize_t raw_length; /* original number of elements */
70 70 Py_ssize_t length; /* current number of elements */
71 71 PyObject *added; /* populated on demand */
72 72 PyObject *headrevs; /* cache, invalidated on changes */
73 73 PyObject *filteredrevs;/* filtered revs set */
74 74 nodetree *nt; /* base-16 trie */
75 75 int ntrev; /* last rev scanned */
76 76 int ntlookups; /* # lookups */
77 77 int ntmisses; /* # lookups that miss the cache */
78 78 int inlined;
79 79 } indexObject;
80 80
81 81 static Py_ssize_t index_length(const indexObject *self)
82 82 {
83 83 if (self->added == NULL)
84 84 return self->length - 1;
85 85 return self->length + PyList_GET_SIZE(self->added) - 1;
86 86 }
87 87
88 88 static PyObject *nullentry;
89 89 static const char nullid[20];
90 90
91 91 static Py_ssize_t inline_scan(indexObject *self, const char **offsets);
92 92
93 93 #if LONG_MAX == 0x7fffffffL
94 94 static const char *const tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#");
95 95 #else
96 96 static const char *const tuple_format = PY23("kiiiiiis#", "kiiiiiiy#");
97 97 #endif
98 98
99 99 /* A RevlogNG v1 index entry is 64 bytes long. */
100 100 static const long v1_hdrsize = 64;
101 101
102 102 /*
103 103 * Return a pointer to the beginning of a RevlogNG record.
104 104 */
105 105 static const char *index_deref(indexObject *self, Py_ssize_t pos)
106 106 {
107 107 if (self->inlined && pos > 0) {
108 108 if (self->offsets == NULL) {
109 109 self->offsets = PyMem_Malloc(self->raw_length *
110 110 sizeof(*self->offsets));
111 111 if (self->offsets == NULL)
112 112 return (const char *)PyErr_NoMemory();
113 113 inline_scan(self, self->offsets);
114 114 }
115 115 return self->offsets[pos];
116 116 }
117 117
118 118 return (const char *)(self->buf.buf) + pos * v1_hdrsize;
119 119 }
120 120
121 121 static inline int index_get_parents(indexObject *self, Py_ssize_t rev,
122 122 int *ps, int maxrev)
123 123 {
124 124 if (rev >= self->length - 1) {
125 125 PyObject *tuple = PyList_GET_ITEM(self->added,
126 126 rev - self->length + 1);
127 127 ps[0] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 5));
128 128 ps[1] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 6));
129 129 } else {
130 130 const char *data = index_deref(self, rev);
131 131 ps[0] = getbe32(data + 24);
132 132 ps[1] = getbe32(data + 28);
133 133 }
134 134 /* If index file is corrupted, ps[] may point to invalid revisions. So
135 135 * there is a risk of buffer overflow to trust them unconditionally. */
136 136 if (ps[0] > maxrev || ps[1] > maxrev) {
137 137 PyErr_SetString(PyExc_ValueError, "parent out of range");
138 138 return -1;
139 139 }
140 140 return 0;
141 141 }
142 142
143 143
144 144 /*
145 145 * RevlogNG format (all in big endian, data may be inlined):
146 146 * 6 bytes: offset
147 147 * 2 bytes: flags
148 148 * 4 bytes: compressed length
149 149 * 4 bytes: uncompressed length
150 150 * 4 bytes: base revision
151 151 * 4 bytes: link revision
152 152 * 4 bytes: parent 1 revision
153 153 * 4 bytes: parent 2 revision
154 154 * 32 bytes: nodeid (only 20 bytes used)
155 155 */
156 156 static PyObject *index_get(indexObject *self, Py_ssize_t pos)
157 157 {
158 158 uint64_t offset_flags;
159 159 int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
160 160 const char *c_node_id;
161 161 const char *data;
162 162 Py_ssize_t length = index_length(self);
163 163 PyObject *entry;
164 164
165 165 if (pos == -1) {
166 166 Py_INCREF(nullentry);
167 167 return nullentry;
168 168 }
169 169
170 170 if (pos < 0 || pos >= length) {
171 171 PyErr_SetString(PyExc_IndexError, "revlog index out of range");
172 172 return NULL;
173 173 }
174 174
175 175 if (pos >= self->length - 1) {
176 176 PyObject *obj;
177 177 obj = PyList_GET_ITEM(self->added, pos - self->length + 1);
178 178 Py_INCREF(obj);
179 179 return obj;
180 180 }
181 181
182 182 if (self->cache) {
183 183 if (self->cache[pos]) {
184 184 Py_INCREF(self->cache[pos]);
185 185 return self->cache[pos];
186 186 }
187 187 } else {
188 188 self->cache = calloc(self->raw_length, sizeof(PyObject *));
189 189 if (self->cache == NULL)
190 190 return PyErr_NoMemory();
191 191 }
192 192
193 193 data = index_deref(self, pos);
194 194 if (data == NULL)
195 195 return NULL;
196 196
197 197 offset_flags = getbe32(data + 4);
198 198 if (pos == 0) /* mask out version number for the first entry */
199 199 offset_flags &= 0xFFFF;
200 200 else {
201 201 uint32_t offset_high = getbe32(data);
202 202 offset_flags |= ((uint64_t)offset_high) << 32;
203 203 }
204 204
205 205 comp_len = getbe32(data + 8);
206 206 uncomp_len = getbe32(data + 12);
207 207 base_rev = getbe32(data + 16);
208 208 link_rev = getbe32(data + 20);
209 209 parent_1 = getbe32(data + 24);
210 210 parent_2 = getbe32(data + 28);
211 211 c_node_id = data + 32;
212 212
213 213 entry = Py_BuildValue(tuple_format, offset_flags, comp_len,
214 214 uncomp_len, base_rev, link_rev,
215 215 parent_1, parent_2, c_node_id, 20);
216 216
217 217 if (entry) {
218 218 PyObject_GC_UnTrack(entry);
219 219 Py_INCREF(entry);
220 220 }
221 221
222 222 self->cache[pos] = entry;
223 223
224 224 return entry;
225 225 }
226 226
227 227 /*
228 228 * Return the 20-byte SHA of the node corresponding to the given rev.
229 229 */
230 230 static const char *index_node(indexObject *self, Py_ssize_t pos)
231 231 {
232 232 Py_ssize_t length = index_length(self);
233 233 const char *data;
234 234
235 235 if (pos == -1)
236 236 return nullid;
237 237
238 238 if (pos >= length)
239 239 return NULL;
240 240
241 241 if (pos >= self->length - 1) {
242 242 PyObject *tuple, *str;
243 243 tuple = PyList_GET_ITEM(self->added, pos - self->length + 1);
244 244 str = PyTuple_GetItem(tuple, 7);
245 245 return str ? PyBytes_AS_STRING(str) : NULL;
246 246 }
247 247
248 248 data = index_deref(self, pos);
249 249 return data ? data + 32 : NULL;
250 250 }
251 251
252 252 /*
253 253 * Return the 20-byte SHA of the node corresponding to the given rev. The
254 254 * rev is assumed to be existing. If not, an exception is set.
255 255 */
256 256 static const char *index_node_existing(indexObject *self, Py_ssize_t pos)
257 257 {
258 258 const char *node = index_node(self, pos);
259 259 if (node == NULL) {
260 260 PyErr_Format(PyExc_IndexError, "could not access rev %d",
261 261 (int)pos);
262 262 }
263 263 return node;
264 264 }
265 265
266 266 static int nt_insert(indexObject *self, const char *node, int rev);
267 267
268 268 static int node_check(PyObject *obj, char **node)
269 269 {
270 270 Py_ssize_t nodelen;
271 271 if (PyBytes_AsStringAndSize(obj, node, &nodelen) == -1)
272 272 return -1;
273 273 if (nodelen == 20)
274 274 return 0;
275 275 PyErr_SetString(PyExc_ValueError, "20-byte hash required");
276 276 return -1;
277 277 }
278 278
279 279 static PyObject *index_append(indexObject *self, PyObject *obj)
280 280 {
281 281 char *node;
282 282 Py_ssize_t len;
283 283
284 284 if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
285 285 PyErr_SetString(PyExc_TypeError, "8-tuple required");
286 286 return NULL;
287 287 }
288 288
289 289 if (node_check(PyTuple_GET_ITEM(obj, 7), &node) == -1)
290 290 return NULL;
291 291
292 292 len = index_length(self);
293 293
294 294 if (self->added == NULL) {
295 295 self->added = PyList_New(0);
296 296 if (self->added == NULL)
297 297 return NULL;
298 298 }
299 299
300 300 if (PyList_Append(self->added, obj) == -1)
301 301 return NULL;
302 302
303 303 if (self->nt)
304 304 nt_insert(self, node, len);
305 305
306 306 Py_CLEAR(self->headrevs);
307 307 Py_RETURN_NONE;
308 308 }
309 309
310 310 static void _index_clearcaches(indexObject *self)
311 311 {
312 312 if (self->cache) {
313 313 Py_ssize_t i;
314 314
315 315 for (i = 0; i < self->raw_length; i++)
316 316 Py_CLEAR(self->cache[i]);
317 317 free(self->cache);
318 318 self->cache = NULL;
319 319 }
320 320 if (self->offsets) {
321 321 PyMem_Free(self->offsets);
322 322 self->offsets = NULL;
323 323 }
324 324 if (self->nt != NULL) {
325 325 free(self->nt->nodes);
326 326 PyMem_Free(self->nt);
327 327 }
328 328 self->nt = NULL;
329 329 Py_CLEAR(self->headrevs);
330 330 }
331 331
332 332 static PyObject *index_clearcaches(indexObject *self)
333 333 {
334 334 _index_clearcaches(self);
335 335 self->ntrev = -1;
336 336 self->ntlookups = self->ntmisses = 0;
337 337 Py_RETURN_NONE;
338 338 }
339 339
340 340 static PyObject *index_stats(indexObject *self)
341 341 {
342 342 PyObject *obj = PyDict_New();
343 343 PyObject *t = NULL;
344 344
345 345 if (obj == NULL)
346 346 return NULL;
347 347
348 348 #define istat(__n, __d) \
349 349 do { \
350 350 t = PyInt_FromSsize_t(self->__n); \
351 351 if (!t) \
352 352 goto bail; \
353 353 if (PyDict_SetItemString(obj, __d, t) == -1) \
354 354 goto bail; \
355 355 Py_DECREF(t); \
356 356 } while (0)
357 357
358 358 if (self->added) {
359 359 Py_ssize_t len = PyList_GET_SIZE(self->added);
360 360 t = PyInt_FromSsize_t(len);
361 361 if (!t)
362 362 goto bail;
363 363 if (PyDict_SetItemString(obj, "index entries added", t) == -1)
364 364 goto bail;
365 365 Py_DECREF(t);
366 366 }
367 367
368 368 if (self->raw_length != self->length - 1)
369 369 istat(raw_length, "revs on disk");
370 370 istat(length, "revs in memory");
371 371 istat(ntlookups, "node trie lookups");
372 372 istat(ntmisses, "node trie misses");
373 373 istat(ntrev, "node trie last rev scanned");
374 374 if (self->nt) {
375 375 istat(nt->capacity, "node trie capacity");
376 376 istat(nt->depth, "node trie depth");
377 377 istat(nt->length, "node trie count");
378 378 istat(nt->splits, "node trie splits");
379 379 }
380 380
381 381 #undef istat
382 382
383 383 return obj;
384 384
385 385 bail:
386 386 Py_XDECREF(obj);
387 387 Py_XDECREF(t);
388 388 return NULL;
389 389 }
390 390
391 391 /*
392 392 * When we cache a list, we want to be sure the caller can't mutate
393 393 * the cached copy.
394 394 */
395 395 static PyObject *list_copy(PyObject *list)
396 396 {
397 397 Py_ssize_t len = PyList_GET_SIZE(list);
398 398 PyObject *newlist = PyList_New(len);
399 399 Py_ssize_t i;
400 400
401 401 if (newlist == NULL)
402 402 return NULL;
403 403
404 404 for (i = 0; i < len; i++) {
405 405 PyObject *obj = PyList_GET_ITEM(list, i);
406 406 Py_INCREF(obj);
407 407 PyList_SET_ITEM(newlist, i, obj);
408 408 }
409 409
410 410 return newlist;
411 411 }
412 412
413 413 static int check_filter(PyObject *filter, Py_ssize_t arg)
414 414 {
415 415 if (filter) {
416 416 PyObject *arglist, *result;
417 417 int isfiltered;
418 418
419 419 arglist = Py_BuildValue("(n)", arg);
420 420 if (!arglist) {
421 421 return -1;
422 422 }
423 423
424 424 result = PyEval_CallObject(filter, arglist);
425 425 Py_DECREF(arglist);
426 426 if (!result) {
427 427 return -1;
428 428 }
429 429
430 430 /* PyObject_IsTrue returns 1 if true, 0 if false, -1 if error,
431 431 * same as this function, so we can just return it directly.*/
432 432 isfiltered = PyObject_IsTrue(result);
433 433 Py_DECREF(result);
434 434 return isfiltered;
435 435 } else {
436 436 return 0;
437 437 }
438 438 }
439 439
440 440 static Py_ssize_t add_roots_get_min(indexObject *self, PyObject *list,
441 441 Py_ssize_t marker, char *phases)
442 442 {
443 443 PyObject *iter = NULL;
444 444 PyObject *iter_item = NULL;
445 445 Py_ssize_t min_idx = index_length(self) + 2;
446 446 long iter_item_long;
447 447
448 448 if (PyList_GET_SIZE(list) != 0) {
449 449 iter = PyObject_GetIter(list);
450 450 if (iter == NULL)
451 451 return -2;
452 452 while ((iter_item = PyIter_Next(iter))) {
453 453 iter_item_long = PyInt_AS_LONG(iter_item);
454 454 Py_DECREF(iter_item);
455 455 if (iter_item_long < min_idx)
456 456 min_idx = iter_item_long;
457 457 phases[iter_item_long] = marker;
458 458 }
459 459 Py_DECREF(iter);
460 460 }
461 461
462 462 return min_idx;
463 463 }
464 464
465 465 static inline void set_phase_from_parents(char *phases, int parent_1,
466 466 int parent_2, Py_ssize_t i)
467 467 {
468 468 if (parent_1 >= 0 && phases[parent_1] > phases[i])
469 469 phases[i] = phases[parent_1];
470 470 if (parent_2 >= 0 && phases[parent_2] > phases[i])
471 471 phases[i] = phases[parent_2];
472 472 }
473 473
474 474 static PyObject *reachableroots2(indexObject *self, PyObject *args)
475 475 {
476 476
477 477 /* Input */
478 478 long minroot;
479 479 PyObject *includepatharg = NULL;
480 480 int includepath = 0;
481 481 /* heads and roots are lists */
482 482 PyObject *heads = NULL;
483 483 PyObject *roots = NULL;
484 484 PyObject *reachable = NULL;
485 485
486 486 PyObject *val;
487 487 Py_ssize_t len = index_length(self);
488 488 long revnum;
489 489 Py_ssize_t k;
490 490 Py_ssize_t i;
491 491 Py_ssize_t l;
492 492 int r;
493 493 int parents[2];
494 494
495 495 /* Internal data structure:
496 496 * tovisit: array of length len+1 (all revs + nullrev), filled upto lentovisit
497 497 * revstates: array of length len+1 (all revs + nullrev) */
498 498 int *tovisit = NULL;
499 499 long lentovisit = 0;
500 500 enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 };
501 501 char *revstates = NULL;
502 502
503 503 /* Get arguments */
504 504 if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads,
505 505 &PyList_Type, &roots,
506 506 &PyBool_Type, &includepatharg))
507 507 goto bail;
508 508
509 509 if (includepatharg == Py_True)
510 510 includepath = 1;
511 511
512 512 /* Initialize return set */
513 513 reachable = PyList_New(0);
514 514 if (reachable == NULL)
515 515 goto bail;
516 516
517 517 /* Initialize internal datastructures */
518 518 tovisit = (int *)malloc((len + 1) * sizeof(int));
519 519 if (tovisit == NULL) {
520 520 PyErr_NoMemory();
521 521 goto bail;
522 522 }
523 523
524 524 revstates = (char *)calloc(len + 1, 1);
525 525 if (revstates == NULL) {
526 526 PyErr_NoMemory();
527 527 goto bail;
528 528 }
529 529
530 530 l = PyList_GET_SIZE(roots);
531 531 for (i = 0; i < l; i++) {
532 532 revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
533 533 if (revnum == -1 && PyErr_Occurred())
534 534 goto bail;
535 535 /* If root is out of range, e.g. wdir(), it must be unreachable
536 536 * from heads. So we can just ignore it. */
537 537 if (revnum + 1 < 0 || revnum + 1 >= len + 1)
538 538 continue;
539 539 revstates[revnum + 1] |= RS_ROOT;
540 540 }
541 541
542 542 /* Populate tovisit with all the heads */
543 543 l = PyList_GET_SIZE(heads);
544 544 for (i = 0; i < l; i++) {
545 545 revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
546 546 if (revnum == -1 && PyErr_Occurred())
547 547 goto bail;
548 548 if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
549 549 PyErr_SetString(PyExc_IndexError, "head out of range");
550 550 goto bail;
551 551 }
552 552 if (!(revstates[revnum + 1] & RS_SEEN)) {
553 553 tovisit[lentovisit++] = (int)revnum;
554 554 revstates[revnum + 1] |= RS_SEEN;
555 555 }
556 556 }
557 557
558 558 /* Visit the tovisit list and find the reachable roots */
559 559 k = 0;
560 560 while (k < lentovisit) {
561 561 /* Add the node to reachable if it is a root*/
562 562 revnum = tovisit[k++];
563 563 if (revstates[revnum + 1] & RS_ROOT) {
564 564 revstates[revnum + 1] |= RS_REACHABLE;
565 565 val = PyInt_FromLong(revnum);
566 566 if (val == NULL)
567 567 goto bail;
568 568 r = PyList_Append(reachable, val);
569 569 Py_DECREF(val);
570 570 if (r < 0)
571 571 goto bail;
572 572 if (includepath == 0)
573 573 continue;
574 574 }
575 575
576 576 /* Add its parents to the list of nodes to visit */
577 577 if (revnum == -1)
578 578 continue;
579 579 r = index_get_parents(self, revnum, parents, (int)len - 1);
580 580 if (r < 0)
581 581 goto bail;
582 582 for (i = 0; i < 2; i++) {
583 583 if (!(revstates[parents[i] + 1] & RS_SEEN)
584 584 && parents[i] >= minroot) {
585 585 tovisit[lentovisit++] = parents[i];
586 586 revstates[parents[i] + 1] |= RS_SEEN;
587 587 }
588 588 }
589 589 }
590 590
591 591 /* Find all the nodes in between the roots we found and the heads
592 592 * and add them to the reachable set */
593 593 if (includepath == 1) {
594 594 long minidx = minroot;
595 595 if (minidx < 0)
596 596 minidx = 0;
597 597 for (i = minidx; i < len; i++) {
598 598 if (!(revstates[i + 1] & RS_SEEN))
599 599 continue;
600 600 r = index_get_parents(self, i, parents, (int)len - 1);
601 601 /* Corrupted index file, error is set from
602 602 * index_get_parents */
603 603 if (r < 0)
604 604 goto bail;
605 605 if (((revstates[parents[0] + 1] |
606 606 revstates[parents[1] + 1]) & RS_REACHABLE)
607 607 && !(revstates[i + 1] & RS_REACHABLE)) {
608 608 revstates[i + 1] |= RS_REACHABLE;
609 609 val = PyInt_FromLong(i);
610 610 if (val == NULL)
611 611 goto bail;
612 612 r = PyList_Append(reachable, val);
613 613 Py_DECREF(val);
614 614 if (r < 0)
615 615 goto bail;
616 616 }
617 617 }
618 618 }
619 619
620 620 free(revstates);
621 621 free(tovisit);
622 622 return reachable;
623 623 bail:
624 624 Py_XDECREF(reachable);
625 625 free(revstates);
626 626 free(tovisit);
627 627 return NULL;
628 628 }
629 629
630 630 static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args)
631 631 {
632 632 PyObject *roots = Py_None;
633 633 PyObject *ret = NULL;
634 634 PyObject *phasessize = NULL;
635 635 PyObject *phaseroots = NULL;
636 636 PyObject *phaseset = NULL;
637 637 PyObject *phasessetlist = NULL;
638 638 PyObject *rev = NULL;
639 639 Py_ssize_t len = index_length(self);
640 640 Py_ssize_t numphase = 0;
641 641 Py_ssize_t minrevallphases = 0;
642 642 Py_ssize_t minrevphase = 0;
643 643 Py_ssize_t i = 0;
644 644 char *phases = NULL;
645 645 long phase;
646 646
647 647 if (!PyArg_ParseTuple(args, "O", &roots))
648 648 goto done;
649 649 if (roots == NULL || !PyList_Check(roots)) {
650 650 PyErr_SetString(PyExc_TypeError, "roots must be a list");
651 651 goto done;
652 652 }
653 653
654 654 phases = calloc(len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */
655 655 if (phases == NULL) {
656 656 PyErr_NoMemory();
657 657 goto done;
658 658 }
659 659 /* Put the phase information of all the roots in phases */
660 660 numphase = PyList_GET_SIZE(roots)+1;
661 661 minrevallphases = len + 1;
662 662 phasessetlist = PyList_New(numphase);
663 663 if (phasessetlist == NULL)
664 664 goto done;
665 665
666 666 PyList_SET_ITEM(phasessetlist, 0, Py_None);
667 667 Py_INCREF(Py_None);
668 668
669 669 for (i = 0; i < numphase-1; i++) {
670 670 phaseroots = PyList_GET_ITEM(roots, i);
671 671 phaseset = PySet_New(NULL);
672 672 if (phaseset == NULL)
673 673 goto release;
674 674 PyList_SET_ITEM(phasessetlist, i+1, phaseset);
675 675 if (!PyList_Check(phaseroots)) {
676 676 PyErr_SetString(PyExc_TypeError,
677 677 "roots item must be a list");
678 678 goto release;
679 679 }
680 680 minrevphase = add_roots_get_min(self, phaseroots, i+1, phases);
681 681 if (minrevphase == -2) /* Error from add_roots_get_min */
682 682 goto release;
683 683 minrevallphases = MIN(minrevallphases, minrevphase);
684 684 }
685 685 /* Propagate the phase information from the roots to the revs */
686 686 if (minrevallphases != -1) {
687 687 int parents[2];
688 688 for (i = minrevallphases; i < len; i++) {
689 689 if (index_get_parents(self, i, parents,
690 690 (int)len - 1) < 0)
691 691 goto release;
692 692 set_phase_from_parents(phases, parents[0], parents[1], i);
693 693 }
694 694 }
695 695 /* Transform phase list to a python list */
696 696 phasessize = PyInt_FromLong(len);
697 697 if (phasessize == NULL)
698 698 goto release;
699 699 for (i = 0; i < len; i++) {
700 700 phase = phases[i];
701 701 /* We only store the sets of phase for non public phase, the public phase
702 702 * is computed as a difference */
703 703 if (phase != 0) {
704 704 phaseset = PyList_GET_ITEM(phasessetlist, phase);
705 705 rev = PyInt_FromLong(i);
706 706 if (rev == NULL)
707 707 goto release;
708 708 PySet_Add(phaseset, rev);
709 709 Py_XDECREF(rev);
710 710 }
711 711 }
712 712 ret = PyTuple_Pack(2, phasessize, phasessetlist);
713 713
714 714 release:
715 715 Py_XDECREF(phasessize);
716 716 Py_XDECREF(phasessetlist);
717 717 done:
718 718 free(phases);
719 719 return ret;
720 720 }
721 721
722 722 static PyObject *index_headrevs(indexObject *self, PyObject *args)
723 723 {
724 724 Py_ssize_t i, j, len;
725 725 char *nothead = NULL;
726 726 PyObject *heads = NULL;
727 727 PyObject *filter = NULL;
728 728 PyObject *filteredrevs = Py_None;
729 729
730 730 if (!PyArg_ParseTuple(args, "|O", &filteredrevs)) {
731 731 return NULL;
732 732 }
733 733
734 734 if (self->headrevs && filteredrevs == self->filteredrevs)
735 735 return list_copy(self->headrevs);
736 736
737 737 Py_DECREF(self->filteredrevs);
738 738 self->filteredrevs = filteredrevs;
739 739 Py_INCREF(filteredrevs);
740 740
741 741 if (filteredrevs != Py_None) {
742 742 filter = PyObject_GetAttrString(filteredrevs, "__contains__");
743 743 if (!filter) {
744 744 PyErr_SetString(PyExc_TypeError,
745 745 "filteredrevs has no attribute __contains__");
746 746 goto bail;
747 747 }
748 748 }
749 749
750 750 len = index_length(self);
751 751 heads = PyList_New(0);
752 752 if (heads == NULL)
753 753 goto bail;
754 754 if (len == 0) {
755 755 PyObject *nullid = PyInt_FromLong(-1);
756 756 if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
757 757 Py_XDECREF(nullid);
758 758 goto bail;
759 759 }
760 760 goto done;
761 761 }
762 762
763 763 nothead = calloc(len, 1);
764 764 if (nothead == NULL) {
765 765 PyErr_NoMemory();
766 766 goto bail;
767 767 }
768 768
769 769 for (i = len - 1; i >= 0; i--) {
770 770 int isfiltered;
771 771 int parents[2];
772 772
773 773 /* If nothead[i] == 1, it means we've seen an unfiltered child of this
774 774 * node already, and therefore this node is not filtered. So we can skip
775 775 * the expensive check_filter step.
776 776 */
777 777 if (nothead[i] != 1) {
778 778 isfiltered = check_filter(filter, i);
779 779 if (isfiltered == -1) {
780 780 PyErr_SetString(PyExc_TypeError,
781 781 "unable to check filter");
782 782 goto bail;
783 783 }
784 784
785 785 if (isfiltered) {
786 786 nothead[i] = 1;
787 787 continue;
788 788 }
789 789 }
790 790
791 791 if (index_get_parents(self, i, parents, (int)len - 1) < 0)
792 792 goto bail;
793 793 for (j = 0; j < 2; j++) {
794 794 if (parents[j] >= 0)
795 795 nothead[parents[j]] = 1;
796 796 }
797 797 }
798 798
799 799 for (i = 0; i < len; i++) {
800 800 PyObject *head;
801 801
802 802 if (nothead[i])
803 803 continue;
804 804 head = PyInt_FromSsize_t(i);
805 805 if (head == NULL || PyList_Append(heads, head) == -1) {
806 806 Py_XDECREF(head);
807 807 goto bail;
808 808 }
809 809 }
810 810
811 811 done:
812 812 self->headrevs = heads;
813 813 Py_XDECREF(filter);
814 814 free(nothead);
815 815 return list_copy(self->headrevs);
816 816 bail:
817 817 Py_XDECREF(filter);
818 818 Py_XDECREF(heads);
819 819 free(nothead);
820 820 return NULL;
821 821 }
822 822
823 823 /**
824 824 * Obtain the base revision index entry.
825 825 *
826 826 * Callers must ensure that rev >= 0 or illegal memory access may occur.
827 827 */
828 828 static inline int index_baserev(indexObject *self, int rev)
829 829 {
830 830 const char *data;
831 831
832 832 if (rev >= self->length - 1) {
833 833 PyObject *tuple = PyList_GET_ITEM(self->added,
834 834 rev - self->length + 1);
835 835 return (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 3));
836 836 }
837 837 else {
838 838 data = index_deref(self, rev);
839 839 if (data == NULL) {
840 840 return -2;
841 841 }
842 842
843 843 return getbe32(data + 16);
844 844 }
845 845 }
846 846
847 847 static PyObject *index_deltachain(indexObject *self, PyObject *args)
848 848 {
849 849 int rev, generaldelta;
850 850 PyObject *stoparg;
851 851 int stoprev, iterrev, baserev = -1;
852 852 int stopped;
853 853 PyObject *chain = NULL, *result = NULL;
854 854 const Py_ssize_t length = index_length(self);
855 855
856 856 if (!PyArg_ParseTuple(args, "iOi", &rev, &stoparg, &generaldelta)) {
857 857 return NULL;
858 858 }
859 859
860 860 if (PyInt_Check(stoparg)) {
861 861 stoprev = (int)PyInt_AsLong(stoparg);
862 862 if (stoprev == -1 && PyErr_Occurred()) {
863 863 return NULL;
864 864 }
865 865 }
866 866 else if (stoparg == Py_None) {
867 867 stoprev = -2;
868 868 }
869 869 else {
870 870 PyErr_SetString(PyExc_ValueError,
871 871 "stoprev must be integer or None");
872 872 return NULL;
873 873 }
874 874
875 875 if (rev < 0 || rev >= length) {
876 876 PyErr_SetString(PyExc_ValueError, "revlog index out of range");
877 877 return NULL;
878 878 }
879 879
880 880 chain = PyList_New(0);
881 881 if (chain == NULL) {
882 882 return NULL;
883 883 }
884 884
885 885 baserev = index_baserev(self, rev);
886 886
887 887 /* This should never happen. */
888 888 if (baserev <= -2) {
889 889 /* Error should be set by index_deref() */
890 890 assert(PyErr_Occurred());
891 891 goto bail;
892 892 }
893 893
894 894 iterrev = rev;
895 895
896 896 while (iterrev != baserev && iterrev != stoprev) {
897 897 PyObject *value = PyInt_FromLong(iterrev);
898 898 if (value == NULL) {
899 899 goto bail;
900 900 }
901 901 if (PyList_Append(chain, value)) {
902 902 Py_DECREF(value);
903 903 goto bail;
904 904 }
905 905 Py_DECREF(value);
906 906
907 907 if (generaldelta) {
908 908 iterrev = baserev;
909 909 }
910 910 else {
911 911 iterrev--;
912 912 }
913 913
914 914 if (iterrev < 0) {
915 915 break;
916 916 }
917 917
918 918 if (iterrev >= length) {
919 919 PyErr_SetString(PyExc_IndexError, "revision outside index");
920 920 return NULL;
921 921 }
922 922
923 923 baserev = index_baserev(self, iterrev);
924 924
925 925 /* This should never happen. */
926 926 if (baserev <= -2) {
927 927 /* Error should be set by index_deref() */
928 928 assert(PyErr_Occurred());
929 929 goto bail;
930 930 }
931 931 }
932 932
933 933 if (iterrev == stoprev) {
934 934 stopped = 1;
935 935 }
936 936 else {
937 937 PyObject *value = PyInt_FromLong(iterrev);
938 938 if (value == NULL) {
939 939 goto bail;
940 940 }
941 941 if (PyList_Append(chain, value)) {
942 942 Py_DECREF(value);
943 943 goto bail;
944 944 }
945 945 Py_DECREF(value);
946 946
947 947 stopped = 0;
948 948 }
949 949
950 950 if (PyList_Reverse(chain)) {
951 951 goto bail;
952 952 }
953 953
954 954 result = Py_BuildValue("OO", chain, stopped ? Py_True : Py_False);
955 955 Py_DECREF(chain);
956 956 return result;
957 957
958 958 bail:
959 959 Py_DECREF(chain);
960 960 return NULL;
961 961 }
962 962
963 963 static inline int nt_level(const char *node, Py_ssize_t level)
964 964 {
965 965 int v = node[level>>1];
966 966 if (!(level & 1))
967 967 v >>= 4;
968 968 return v & 0xf;
969 969 }
970 970
971 971 /*
972 972 * Return values:
973 973 *
974 974 * -4: match is ambiguous (multiple candidates)
975 975 * -2: not found
976 976 * rest: valid rev
977 977 */
978 978 static int nt_find(indexObject *self, const char *node, Py_ssize_t nodelen,
979 979 int hex)
980 980 {
981 981 int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
982 982 int level, maxlevel, off;
983 983
984 984 if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
985 985 return -1;
986 986
987 987 if (hex)
988 988 maxlevel = nodelen > 40 ? 40 : (int)nodelen;
989 989 else
990 990 maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
991 991
992 992 for (level = off = 0; level < maxlevel; level++) {
993 993 int k = getnybble(node, level);
994 994 nodetreenode *n = &self->nt->nodes[off];
995 995 int v = n->children[k];
996 996
997 997 if (v < 0) {
998 998 const char *n;
999 999 Py_ssize_t i;
1000 1000
1001 1001 v = -(v + 2);
1002 1002 n = index_node(self, v);
1003 1003 if (n == NULL)
1004 1004 return -2;
1005 1005 for (i = level; i < maxlevel; i++)
1006 1006 if (getnybble(node, i) != nt_level(n, i))
1007 1007 return -2;
1008 1008 return v;
1009 1009 }
1010 1010 if (v == 0)
1011 1011 return -2;
1012 1012 off = v;
1013 1013 }
1014 1014 /* multiple matches against an ambiguous prefix */
1015 1015 return -4;
1016 1016 }
1017 1017
1018 1018 static int nt_new(nodetree *self)
1019 1019 {
1020 1020 if (self->length == self->capacity) {
1021 unsigned newcapacity;
1022 nodetreenode *newnodes;
1021 1023 if (self->capacity >= INT_MAX / (sizeof(nodetreenode) * 2)) {
1022 1024 PyErr_SetString(PyExc_MemoryError,
1023 1025 "overflow in nt_new");
1024 1026 return -1;
1025 1027 }
1026 self->capacity *= 2;
1027 self->nodes = realloc(self->nodes,
1028 self->capacity * sizeof(nodetreenode));
1029 if (self->nodes == NULL) {
1028 newcapacity = self->capacity * 2;
1029 newnodes = realloc(self->nodes, newcapacity * sizeof(nodetreenode));
1030 if (newnodes == NULL) {
1030 1031 PyErr_SetString(PyExc_MemoryError, "out of memory");
1031 1032 return -1;
1032 1033 }
1034 self->capacity = newcapacity;
1035 self->nodes = newnodes;
1033 1036 memset(&self->nodes[self->length], 0,
1034 1037 sizeof(nodetreenode) * (self->capacity - self->length));
1035 1038 }
1036 1039 return self->length++;
1037 1040 }
1038 1041
1039 1042 static int nt_insert(indexObject *self, const char *node, int rev)
1040 1043 {
1041 1044 int level = 0;
1042 1045 int off = 0;
1043 1046
1044 1047 while (level < 40) {
1045 1048 int k = nt_level(node, level);
1046 1049 nodetreenode *n;
1047 1050 int v;
1048 1051
1049 1052 n = &self->nt->nodes[off];
1050 1053 v = n->children[k];
1051 1054
1052 1055 if (v == 0) {
1053 1056 n->children[k] = -rev - 2;
1054 1057 return 0;
1055 1058 }
1056 1059 if (v < 0) {
1057 1060 const char *oldnode = index_node_existing(self, -(v + 2));
1058 1061 int noff;
1059 1062
1060 1063 if (oldnode == NULL)
1061 1064 return -1;
1062 1065 if (!memcmp(oldnode, node, 20)) {
1063 1066 n->children[k] = -rev - 2;
1064 1067 return 0;
1065 1068 }
1066 1069 noff = nt_new(self->nt);
1067 1070 if (noff == -1)
1068 1071 return -1;
1069 1072 /* self->nt->nodes may have been changed by realloc */
1070 1073 self->nt->nodes[off].children[k] = noff;
1071 1074 off = noff;
1072 1075 n = &self->nt->nodes[off];
1073 1076 n->children[nt_level(oldnode, ++level)] = v;
1074 1077 if (level > self->nt->depth)
1075 1078 self->nt->depth = level;
1076 1079 self->nt->splits += 1;
1077 1080 } else {
1078 1081 level += 1;
1079 1082 off = v;
1080 1083 }
1081 1084 }
1082 1085
1083 1086 return -1;
1084 1087 }
1085 1088
1086 1089 static int nt_delete_node(indexObject *self, const char *node)
1087 1090 {
1088 1091 /* rev==-2 happens to get encoded as 0, which is interpreted as not set */
1089 1092 return nt_insert(self, node, -2);
1090 1093 }
1091 1094
1092 1095 static int nt_init(indexObject *self)
1093 1096 {
1094 1097 if (self->nt == NULL) {
1095 1098 if ((size_t)self->raw_length > INT_MAX / sizeof(nodetreenode)) {
1096 1099 PyErr_SetString(PyExc_ValueError, "overflow in nt_init");
1097 1100 return -1;
1098 1101 }
1099 1102 self->nt = PyMem_Malloc(sizeof(nodetree));
1100 1103 if (self->nt == NULL) {
1101 1104 PyErr_NoMemory();
1102 1105 return -1;
1103 1106 }
1104 1107 self->nt->capacity = self->raw_length < 4
1105 1108 ? 4 : (int)self->raw_length / 2;
1106 1109
1107 1110 self->nt->nodes = calloc(self->nt->capacity, sizeof(nodetreenode));
1108 1111 if (self->nt->nodes == NULL) {
1109 1112 PyMem_Free(self->nt);
1110 1113 self->nt = NULL;
1111 1114 PyErr_NoMemory();
1112 1115 return -1;
1113 1116 }
1114 1117 self->ntrev = (int)index_length(self);
1115 1118 self->ntlookups = 1;
1116 1119 self->ntmisses = 0;
1117 1120 self->nt->depth = 0;
1118 1121 self->nt->splits = 0;
1119 1122 self->nt->length = 1;
1120 1123 if (nt_insert(self, nullid, -1) == -1) {
1121 1124 free(self->nt->nodes);
1122 1125 PyMem_Free(self->nt);
1123 1126 self->nt = NULL;
1124 1127 return -1;
1125 1128 }
1126 1129 }
1127 1130 return 0;
1128 1131 }
1129 1132
1130 1133 /*
1131 1134 * Return values:
1132 1135 *
1133 1136 * -3: error (exception set)
1134 1137 * -2: not found (no exception set)
1135 1138 * rest: valid rev
1136 1139 */
1137 1140 static int index_find_node(indexObject *self,
1138 1141 const char *node, Py_ssize_t nodelen)
1139 1142 {
1140 1143 int rev;
1141 1144
1142 1145 if (nt_init(self) == -1)
1143 1146 return -3;
1144 1147
1145 1148 self->ntlookups++;
1146 1149 rev = nt_find(self, node, nodelen, 0);
1147 1150 if (rev >= -1)
1148 1151 return rev;
1149 1152
1150 1153 /*
1151 1154 * For the first handful of lookups, we scan the entire index,
1152 1155 * and cache only the matching nodes. This optimizes for cases
1153 1156 * like "hg tip", where only a few nodes are accessed.
1154 1157 *
1155 1158 * After that, we cache every node we visit, using a single
1156 1159 * scan amortized over multiple lookups. This gives the best
1157 1160 * bulk performance, e.g. for "hg log".
1158 1161 */
1159 1162 if (self->ntmisses++ < 4) {
1160 1163 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1161 1164 const char *n = index_node_existing(self, rev);
1162 1165 if (n == NULL)
1163 1166 return -3;
1164 1167 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1165 1168 if (nt_insert(self, n, rev) == -1)
1166 1169 return -3;
1167 1170 break;
1168 1171 }
1169 1172 }
1170 1173 } else {
1171 1174 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1172 1175 const char *n = index_node_existing(self, rev);
1173 1176 if (n == NULL)
1174 1177 return -3;
1175 1178 if (nt_insert(self, n, rev) == -1) {
1176 1179 self->ntrev = rev + 1;
1177 1180 return -3;
1178 1181 }
1179 1182 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1180 1183 break;
1181 1184 }
1182 1185 }
1183 1186 self->ntrev = rev;
1184 1187 }
1185 1188
1186 1189 if (rev >= 0)
1187 1190 return rev;
1188 1191 return -2;
1189 1192 }
1190 1193
1191 1194 static void raise_revlog_error(void)
1192 1195 {
1193 1196 PyObject *mod = NULL, *dict = NULL, *errclass = NULL;
1194 1197
1195 1198 mod = PyImport_ImportModule("mercurial.error");
1196 1199 if (mod == NULL) {
1197 1200 goto cleanup;
1198 1201 }
1199 1202
1200 1203 dict = PyModule_GetDict(mod);
1201 1204 if (dict == NULL) {
1202 1205 goto cleanup;
1203 1206 }
1204 1207 Py_INCREF(dict);
1205 1208
1206 1209 errclass = PyDict_GetItemString(dict, "RevlogError");
1207 1210 if (errclass == NULL) {
1208 1211 PyErr_SetString(PyExc_SystemError,
1209 1212 "could not find RevlogError");
1210 1213 goto cleanup;
1211 1214 }
1212 1215
1213 1216 /* value of exception is ignored by callers */
1214 1217 PyErr_SetString(errclass, "RevlogError");
1215 1218
1216 1219 cleanup:
1217 1220 Py_XDECREF(dict);
1218 1221 Py_XDECREF(mod);
1219 1222 }
1220 1223
1221 1224 static PyObject *index_getitem(indexObject *self, PyObject *value)
1222 1225 {
1223 1226 char *node;
1224 1227 int rev;
1225 1228
1226 1229 if (PyInt_Check(value))
1227 1230 return index_get(self, PyInt_AS_LONG(value));
1228 1231
1229 1232 if (node_check(value, &node) == -1)
1230 1233 return NULL;
1231 1234 rev = index_find_node(self, node, 20);
1232 1235 if (rev >= -1)
1233 1236 return PyInt_FromLong(rev);
1234 1237 if (rev == -2)
1235 1238 raise_revlog_error();
1236 1239 return NULL;
1237 1240 }
1238 1241
1239 1242 /*
1240 1243 * Fully populate the radix tree.
1241 1244 */
1242 1245 static int nt_populate(indexObject *self) {
1243 1246 int rev;
1244 1247 if (self->ntrev > 0) {
1245 1248 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1246 1249 const char *n = index_node_existing(self, rev);
1247 1250 if (n == NULL)
1248 1251 return -1;
1249 1252 if (nt_insert(self, n, rev) == -1)
1250 1253 return -1;
1251 1254 }
1252 1255 self->ntrev = -1;
1253 1256 }
1254 1257 return 0;
1255 1258 }
1256 1259
1257 1260 static int nt_partialmatch(indexObject *self, const char *node,
1258 1261 Py_ssize_t nodelen)
1259 1262 {
1260 1263 return nt_find(self, node, nodelen, 1);
1261 1264 }
1262 1265
1263 1266 /*
1264 1267 * Find the length of the shortest unique prefix of node.
1265 1268 *
1266 1269 * Return values:
1267 1270 *
1268 1271 * -3: error (exception set)
1269 1272 * -2: not found (no exception set)
1270 1273 * rest: length of shortest prefix
1271 1274 */
1272 1275 static int nt_shortest(indexObject *self, const char *node)
1273 1276 {
1274 1277 int level, off;
1275 1278
1276 1279 for (level = off = 0; level < 40; level++) {
1277 1280 int k, v;
1278 1281 nodetreenode *n = &self->nt->nodes[off];
1279 1282 k = nt_level(node, level);
1280 1283 v = n->children[k];
1281 1284 if (v < 0) {
1282 1285 const char *n;
1283 1286 v = -(v + 2);
1284 1287 n = index_node_existing(self, v);
1285 1288 if (n == NULL)
1286 1289 return -3;
1287 1290 if (memcmp(node, n, 20) != 0)
1288 1291 /*
1289 1292 * Found a unique prefix, but it wasn't for the
1290 1293 * requested node (i.e the requested node does
1291 1294 * not exist).
1292 1295 */
1293 1296 return -2;
1294 1297 return level + 1;
1295 1298 }
1296 1299 if (v == 0)
1297 1300 return -2;
1298 1301 off = v;
1299 1302 }
1300 1303 /*
1301 1304 * The node was still not unique after 40 hex digits, so this won't
1302 1305 * happen. Also, if we get here, then there's a programming error in
1303 1306 * this file that made us insert a node longer than 40 hex digits.
1304 1307 */
1305 1308 PyErr_SetString(PyExc_Exception, "broken node tree");
1306 1309 return -3;
1307 1310 }
1308 1311
1309 1312 static PyObject *index_partialmatch(indexObject *self, PyObject *args)
1310 1313 {
1311 1314 const char *fullnode;
1312 1315 int nodelen;
1313 1316 char *node;
1314 1317 int rev, i;
1315 1318
1316 1319 if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &node, &nodelen))
1317 1320 return NULL;
1318 1321
1319 1322 if (nodelen < 1) {
1320 1323 PyErr_SetString(PyExc_ValueError, "key too short");
1321 1324 return NULL;
1322 1325 }
1323 1326
1324 1327 if (nodelen > 40) {
1325 1328 PyErr_SetString(PyExc_ValueError, "key too long");
1326 1329 return NULL;
1327 1330 }
1328 1331
1329 1332 for (i = 0; i < nodelen; i++)
1330 1333 hexdigit(node, i);
1331 1334 if (PyErr_Occurred()) {
1332 1335 /* input contains non-hex characters */
1333 1336 PyErr_Clear();
1334 1337 Py_RETURN_NONE;
1335 1338 }
1336 1339
1337 1340 if (nt_init(self) == -1)
1338 1341 return NULL;
1339 1342 if (nt_populate(self) == -1)
1340 1343 return NULL;
1341 1344 rev = nt_partialmatch(self, node, nodelen);
1342 1345
1343 1346 switch (rev) {
1344 1347 case -4:
1345 1348 raise_revlog_error();
1346 1349 return NULL;
1347 1350 case -2:
1348 1351 Py_RETURN_NONE;
1349 1352 case -1:
1350 1353 return PyBytes_FromStringAndSize(nullid, 20);
1351 1354 }
1352 1355
1353 1356 fullnode = index_node_existing(self, rev);
1354 1357 if (fullnode == NULL) {
1355 1358 return NULL;
1356 1359 }
1357 1360 return PyBytes_FromStringAndSize(fullnode, 20);
1358 1361 }
1359 1362
1360 1363 static PyObject *index_shortest(indexObject *self, PyObject *args)
1361 1364 {
1362 1365 PyObject *val;
1363 1366 char *node;
1364 1367 int length;
1365 1368
1366 1369 if (!PyArg_ParseTuple(args, "O", &val))
1367 1370 return NULL;
1368 1371 if (node_check(val, &node) == -1)
1369 1372 return NULL;
1370 1373
1371 1374 self->ntlookups++;
1372 1375 if (nt_init(self) == -1)
1373 1376 return NULL;
1374 1377 if (nt_populate(self) == -1)
1375 1378 return NULL;
1376 1379 length = nt_shortest(self, node);
1377 1380 if (length == -3)
1378 1381 return NULL;
1379 1382 if (length == -2) {
1380 1383 raise_revlog_error();
1381 1384 return NULL;
1382 1385 }
1383 1386 return PyInt_FromLong(length);
1384 1387 }
1385 1388
1386 1389 static PyObject *index_m_get(indexObject *self, PyObject *args)
1387 1390 {
1388 1391 PyObject *val;
1389 1392 char *node;
1390 1393 int rev;
1391 1394
1392 1395 if (!PyArg_ParseTuple(args, "O", &val))
1393 1396 return NULL;
1394 1397 if (node_check(val, &node) == -1)
1395 1398 return NULL;
1396 1399 rev = index_find_node(self, node, 20);
1397 1400 if (rev == -3)
1398 1401 return NULL;
1399 1402 if (rev == -2)
1400 1403 Py_RETURN_NONE;
1401 1404 return PyInt_FromLong(rev);
1402 1405 }
1403 1406
1404 1407 static int index_contains(indexObject *self, PyObject *value)
1405 1408 {
1406 1409 char *node;
1407 1410
1408 1411 if (PyInt_Check(value)) {
1409 1412 long rev = PyInt_AS_LONG(value);
1410 1413 return rev >= -1 && rev < index_length(self);
1411 1414 }
1412 1415
1413 1416 if (node_check(value, &node) == -1)
1414 1417 return -1;
1415 1418
1416 1419 switch (index_find_node(self, node, 20)) {
1417 1420 case -3:
1418 1421 return -1;
1419 1422 case -2:
1420 1423 return 0;
1421 1424 default:
1422 1425 return 1;
1423 1426 }
1424 1427 }
1425 1428
1426 1429 typedef uint64_t bitmask;
1427 1430
1428 1431 /*
1429 1432 * Given a disjoint set of revs, return all candidates for the
1430 1433 * greatest common ancestor. In revset notation, this is the set
1431 1434 * "heads(::a and ::b and ...)"
1432 1435 */
1433 1436 static PyObject *find_gca_candidates(indexObject *self, const int *revs,
1434 1437 int revcount)
1435 1438 {
1436 1439 const bitmask allseen = (1ull << revcount) - 1;
1437 1440 const bitmask poison = 1ull << revcount;
1438 1441 PyObject *gca = PyList_New(0);
1439 1442 int i, v, interesting;
1440 1443 int maxrev = -1;
1441 1444 bitmask sp;
1442 1445 bitmask *seen;
1443 1446
1444 1447 if (gca == NULL)
1445 1448 return PyErr_NoMemory();
1446 1449
1447 1450 for (i = 0; i < revcount; i++) {
1448 1451 if (revs[i] > maxrev)
1449 1452 maxrev = revs[i];
1450 1453 }
1451 1454
1452 1455 seen = calloc(sizeof(*seen), maxrev + 1);
1453 1456 if (seen == NULL) {
1454 1457 Py_DECREF(gca);
1455 1458 return PyErr_NoMemory();
1456 1459 }
1457 1460
1458 1461 for (i = 0; i < revcount; i++)
1459 1462 seen[revs[i]] = 1ull << i;
1460 1463
1461 1464 interesting = revcount;
1462 1465
1463 1466 for (v = maxrev; v >= 0 && interesting; v--) {
1464 1467 bitmask sv = seen[v];
1465 1468 int parents[2];
1466 1469
1467 1470 if (!sv)
1468 1471 continue;
1469 1472
1470 1473 if (sv < poison) {
1471 1474 interesting -= 1;
1472 1475 if (sv == allseen) {
1473 1476 PyObject *obj = PyInt_FromLong(v);
1474 1477 if (obj == NULL)
1475 1478 goto bail;
1476 1479 if (PyList_Append(gca, obj) == -1) {
1477 1480 Py_DECREF(obj);
1478 1481 goto bail;
1479 1482 }
1480 1483 sv |= poison;
1481 1484 for (i = 0; i < revcount; i++) {
1482 1485 if (revs[i] == v)
1483 1486 goto done;
1484 1487 }
1485 1488 }
1486 1489 }
1487 1490 if (index_get_parents(self, v, parents, maxrev) < 0)
1488 1491 goto bail;
1489 1492
1490 1493 for (i = 0; i < 2; i++) {
1491 1494 int p = parents[i];
1492 1495 if (p == -1)
1493 1496 continue;
1494 1497 sp = seen[p];
1495 1498 if (sv < poison) {
1496 1499 if (sp == 0) {
1497 1500 seen[p] = sv;
1498 1501 interesting++;
1499 1502 }
1500 1503 else if (sp != sv)
1501 1504 seen[p] |= sv;
1502 1505 } else {
1503 1506 if (sp && sp < poison)
1504 1507 interesting--;
1505 1508 seen[p] = sv;
1506 1509 }
1507 1510 }
1508 1511 }
1509 1512
1510 1513 done:
1511 1514 free(seen);
1512 1515 return gca;
1513 1516 bail:
1514 1517 free(seen);
1515 1518 Py_XDECREF(gca);
1516 1519 return NULL;
1517 1520 }
1518 1521
1519 1522 /*
1520 1523 * Given a disjoint set of revs, return the subset with the longest
1521 1524 * path to the root.
1522 1525 */
1523 1526 static PyObject *find_deepest(indexObject *self, PyObject *revs)
1524 1527 {
1525 1528 const Py_ssize_t revcount = PyList_GET_SIZE(revs);
1526 1529 static const Py_ssize_t capacity = 24;
1527 1530 int *depth, *interesting = NULL;
1528 1531 int i, j, v, ninteresting;
1529 1532 PyObject *dict = NULL, *keys = NULL;
1530 1533 long *seen = NULL;
1531 1534 int maxrev = -1;
1532 1535 long final;
1533 1536
1534 1537 if (revcount > capacity) {
1535 1538 PyErr_Format(PyExc_OverflowError,
1536 1539 "bitset size (%ld) > capacity (%ld)",
1537 1540 (long)revcount, (long)capacity);
1538 1541 return NULL;
1539 1542 }
1540 1543
1541 1544 for (i = 0; i < revcount; i++) {
1542 1545 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
1543 1546 if (n > maxrev)
1544 1547 maxrev = n;
1545 1548 }
1546 1549
1547 1550 depth = calloc(sizeof(*depth), maxrev + 1);
1548 1551 if (depth == NULL)
1549 1552 return PyErr_NoMemory();
1550 1553
1551 1554 seen = calloc(sizeof(*seen), maxrev + 1);
1552 1555 if (seen == NULL) {
1553 1556 PyErr_NoMemory();
1554 1557 goto bail;
1555 1558 }
1556 1559
1557 1560 interesting = calloc(sizeof(*interesting), 1 << revcount);
1558 1561 if (interesting == NULL) {
1559 1562 PyErr_NoMemory();
1560 1563 goto bail;
1561 1564 }
1562 1565
1563 1566 if (PyList_Sort(revs) == -1)
1564 1567 goto bail;
1565 1568
1566 1569 for (i = 0; i < revcount; i++) {
1567 1570 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
1568 1571 long b = 1l << i;
1569 1572 depth[n] = 1;
1570 1573 seen[n] = b;
1571 1574 interesting[b] = 1;
1572 1575 }
1573 1576
1574 1577 /* invariant: ninteresting is the number of non-zero entries in
1575 1578 * interesting. */
1576 1579 ninteresting = (int)revcount;
1577 1580
1578 1581 for (v = maxrev; v >= 0 && ninteresting > 1; v--) {
1579 1582 int dv = depth[v];
1580 1583 int parents[2];
1581 1584 long sv;
1582 1585
1583 1586 if (dv == 0)
1584 1587 continue;
1585 1588
1586 1589 sv = seen[v];
1587 1590 if (index_get_parents(self, v, parents, maxrev) < 0)
1588 1591 goto bail;
1589 1592
1590 1593 for (i = 0; i < 2; i++) {
1591 1594 int p = parents[i];
1592 1595 long sp;
1593 1596 int dp;
1594 1597
1595 1598 if (p == -1)
1596 1599 continue;
1597 1600
1598 1601 dp = depth[p];
1599 1602 sp = seen[p];
1600 1603 if (dp <= dv) {
1601 1604 depth[p] = dv + 1;
1602 1605 if (sp != sv) {
1603 1606 interesting[sv] += 1;
1604 1607 seen[p] = sv;
1605 1608 if (sp) {
1606 1609 interesting[sp] -= 1;
1607 1610 if (interesting[sp] == 0)
1608 1611 ninteresting -= 1;
1609 1612 }
1610 1613 }
1611 1614 }
1612 1615 else if (dv == dp - 1) {
1613 1616 long nsp = sp | sv;
1614 1617 if (nsp == sp)
1615 1618 continue;
1616 1619 seen[p] = nsp;
1617 1620 interesting[sp] -= 1;
1618 1621 if (interesting[sp] == 0)
1619 1622 ninteresting -= 1;
1620 1623 if (interesting[nsp] == 0)
1621 1624 ninteresting += 1;
1622 1625 interesting[nsp] += 1;
1623 1626 }
1624 1627 }
1625 1628 interesting[sv] -= 1;
1626 1629 if (interesting[sv] == 0)
1627 1630 ninteresting -= 1;
1628 1631 }
1629 1632
1630 1633 final = 0;
1631 1634 j = ninteresting;
1632 1635 for (i = 0; i < (int)(2 << revcount) && j > 0; i++) {
1633 1636 if (interesting[i] == 0)
1634 1637 continue;
1635 1638 final |= i;
1636 1639 j -= 1;
1637 1640 }
1638 1641 if (final == 0) {
1639 1642 keys = PyList_New(0);
1640 1643 goto bail;
1641 1644 }
1642 1645
1643 1646 dict = PyDict_New();
1644 1647 if (dict == NULL)
1645 1648 goto bail;
1646 1649
1647 1650 for (i = 0; i < revcount; i++) {
1648 1651 PyObject *key;
1649 1652
1650 1653 if ((final & (1 << i)) == 0)
1651 1654 continue;
1652 1655
1653 1656 key = PyList_GET_ITEM(revs, i);
1654 1657 Py_INCREF(key);
1655 1658 Py_INCREF(Py_None);
1656 1659 if (PyDict_SetItem(dict, key, Py_None) == -1) {
1657 1660 Py_DECREF(key);
1658 1661 Py_DECREF(Py_None);
1659 1662 goto bail;
1660 1663 }
1661 1664 }
1662 1665
1663 1666 keys = PyDict_Keys(dict);
1664 1667
1665 1668 bail:
1666 1669 free(depth);
1667 1670 free(seen);
1668 1671 free(interesting);
1669 1672 Py_XDECREF(dict);
1670 1673
1671 1674 return keys;
1672 1675 }
1673 1676
1674 1677 /*
1675 1678 * Given a (possibly overlapping) set of revs, return all the
1676 1679 * common ancestors heads: heads(::args[0] and ::a[1] and ...)
1677 1680 */
1678 1681 static PyObject *index_commonancestorsheads(indexObject *self, PyObject *args)
1679 1682 {
1680 1683 PyObject *ret = NULL;
1681 1684 Py_ssize_t argcount, i, len;
1682 1685 bitmask repeat = 0;
1683 1686 int revcount = 0;
1684 1687 int *revs;
1685 1688
1686 1689 argcount = PySequence_Length(args);
1687 1690 revs = PyMem_Malloc(argcount * sizeof(*revs));
1688 1691 if (argcount > 0 && revs == NULL)
1689 1692 return PyErr_NoMemory();
1690 1693 len = index_length(self);
1691 1694
1692 1695 for (i = 0; i < argcount; i++) {
1693 1696 static const int capacity = 24;
1694 1697 PyObject *obj = PySequence_GetItem(args, i);
1695 1698 bitmask x;
1696 1699 long val;
1697 1700
1698 1701 if (!PyInt_Check(obj)) {
1699 1702 PyErr_SetString(PyExc_TypeError,
1700 1703 "arguments must all be ints");
1701 1704 Py_DECREF(obj);
1702 1705 goto bail;
1703 1706 }
1704 1707 val = PyInt_AsLong(obj);
1705 1708 Py_DECREF(obj);
1706 1709 if (val == -1) {
1707 1710 ret = PyList_New(0);
1708 1711 goto done;
1709 1712 }
1710 1713 if (val < 0 || val >= len) {
1711 1714 PyErr_SetString(PyExc_IndexError,
1712 1715 "index out of range");
1713 1716 goto bail;
1714 1717 }
1715 1718 /* this cheesy bloom filter lets us avoid some more
1716 1719 * expensive duplicate checks in the common set-is-disjoint
1717 1720 * case */
1718 1721 x = 1ull << (val & 0x3f);
1719 1722 if (repeat & x) {
1720 1723 int k;
1721 1724 for (k = 0; k < revcount; k++) {
1722 1725 if (val == revs[k])
1723 1726 goto duplicate;
1724 1727 }
1725 1728 }
1726 1729 else repeat |= x;
1727 1730 if (revcount >= capacity) {
1728 1731 PyErr_Format(PyExc_OverflowError,
1729 1732 "bitset size (%d) > capacity (%d)",
1730 1733 revcount, capacity);
1731 1734 goto bail;
1732 1735 }
1733 1736 revs[revcount++] = (int)val;
1734 1737 duplicate:;
1735 1738 }
1736 1739
1737 1740 if (revcount == 0) {
1738 1741 ret = PyList_New(0);
1739 1742 goto done;
1740 1743 }
1741 1744 if (revcount == 1) {
1742 1745 PyObject *obj;
1743 1746 ret = PyList_New(1);
1744 1747 if (ret == NULL)
1745 1748 goto bail;
1746 1749 obj = PyInt_FromLong(revs[0]);
1747 1750 if (obj == NULL)
1748 1751 goto bail;
1749 1752 PyList_SET_ITEM(ret, 0, obj);
1750 1753 goto done;
1751 1754 }
1752 1755
1753 1756 ret = find_gca_candidates(self, revs, revcount);
1754 1757 if (ret == NULL)
1755 1758 goto bail;
1756 1759
1757 1760 done:
1758 1761 PyMem_Free(revs);
1759 1762 return ret;
1760 1763
1761 1764 bail:
1762 1765 PyMem_Free(revs);
1763 1766 Py_XDECREF(ret);
1764 1767 return NULL;
1765 1768 }
1766 1769
1767 1770 /*
1768 1771 * Given a (possibly overlapping) set of revs, return the greatest
1769 1772 * common ancestors: those with the longest path to the root.
1770 1773 */
1771 1774 static PyObject *index_ancestors(indexObject *self, PyObject *args)
1772 1775 {
1773 1776 PyObject *ret;
1774 1777 PyObject *gca = index_commonancestorsheads(self, args);
1775 1778 if (gca == NULL)
1776 1779 return NULL;
1777 1780
1778 1781 if (PyList_GET_SIZE(gca) <= 1) {
1779 1782 return gca;
1780 1783 }
1781 1784
1782 1785 ret = find_deepest(self, gca);
1783 1786 Py_DECREF(gca);
1784 1787 return ret;
1785 1788 }
1786 1789
1787 1790 /*
1788 1791 * Invalidate any trie entries introduced by added revs.
1789 1792 */
1790 1793 static void nt_invalidate_added(indexObject *self, Py_ssize_t start)
1791 1794 {
1792 1795 Py_ssize_t i, len = PyList_GET_SIZE(self->added);
1793 1796
1794 1797 for (i = start; i < len; i++) {
1795 1798 PyObject *tuple = PyList_GET_ITEM(self->added, i);
1796 1799 PyObject *node = PyTuple_GET_ITEM(tuple, 7);
1797 1800
1798 1801 nt_delete_node(self, PyBytes_AS_STRING(node));
1799 1802 }
1800 1803
1801 1804 if (start == 0)
1802 1805 Py_CLEAR(self->added);
1803 1806 }
1804 1807
1805 1808 /*
1806 1809 * Delete a numeric range of revs, which must be at the end of the
1807 1810 * range, but exclude the sentinel nullid entry.
1808 1811 */
1809 1812 static int index_slice_del(indexObject *self, PyObject *item)
1810 1813 {
1811 1814 Py_ssize_t start, stop, step, slicelength;
1812 1815 Py_ssize_t length = index_length(self) + 1;
1813 1816 int ret = 0;
1814 1817
1815 1818 /* Argument changed from PySliceObject* to PyObject* in Python 3. */
1816 1819 #ifdef IS_PY3K
1817 1820 if (PySlice_GetIndicesEx(item, length,
1818 1821 #else
1819 1822 if (PySlice_GetIndicesEx((PySliceObject*)item, length,
1820 1823 #endif
1821 1824 &start, &stop, &step, &slicelength) < 0)
1822 1825 return -1;
1823 1826
1824 1827 if (slicelength <= 0)
1825 1828 return 0;
1826 1829
1827 1830 if ((step < 0 && start < stop) || (step > 0 && start > stop))
1828 1831 stop = start;
1829 1832
1830 1833 if (step < 0) {
1831 1834 stop = start + 1;
1832 1835 start = stop + step*(slicelength - 1) - 1;
1833 1836 step = -step;
1834 1837 }
1835 1838
1836 1839 if (step != 1) {
1837 1840 PyErr_SetString(PyExc_ValueError,
1838 1841 "revlog index delete requires step size of 1");
1839 1842 return -1;
1840 1843 }
1841 1844
1842 1845 if (stop != length - 1) {
1843 1846 PyErr_SetString(PyExc_IndexError,
1844 1847 "revlog index deletion indices are invalid");
1845 1848 return -1;
1846 1849 }
1847 1850
1848 1851 if (start < self->length - 1) {
1849 1852 if (self->nt) {
1850 1853 Py_ssize_t i;
1851 1854
1852 1855 for (i = start + 1; i < self->length - 1; i++) {
1853 1856 const char *node = index_node_existing(self, i);
1854 1857 if (node == NULL)
1855 1858 return -1;
1856 1859
1857 1860 nt_delete_node(self, node);
1858 1861 }
1859 1862 if (self->added)
1860 1863 nt_invalidate_added(self, 0);
1861 1864 if (self->ntrev > start)
1862 1865 self->ntrev = (int)start;
1863 1866 }
1864 1867 self->length = start + 1;
1865 1868 if (start < self->raw_length) {
1866 1869 if (self->cache) {
1867 1870 Py_ssize_t i;
1868 1871 for (i = start; i < self->raw_length; i++)
1869 1872 Py_CLEAR(self->cache[i]);
1870 1873 }
1871 1874 self->raw_length = start;
1872 1875 }
1873 1876 goto done;
1874 1877 }
1875 1878
1876 1879 if (self->nt) {
1877 1880 nt_invalidate_added(self, start - self->length + 1);
1878 1881 if (self->ntrev > start)
1879 1882 self->ntrev = (int)start;
1880 1883 }
1881 1884 if (self->added)
1882 1885 ret = PyList_SetSlice(self->added, start - self->length + 1,
1883 1886 PyList_GET_SIZE(self->added), NULL);
1884 1887 done:
1885 1888 Py_CLEAR(self->headrevs);
1886 1889 return ret;
1887 1890 }
1888 1891
1889 1892 /*
1890 1893 * Supported ops:
1891 1894 *
1892 1895 * slice deletion
1893 1896 * string assignment (extend node->rev mapping)
1894 1897 * string deletion (shrink node->rev mapping)
1895 1898 */
1896 1899 static int index_assign_subscript(indexObject *self, PyObject *item,
1897 1900 PyObject *value)
1898 1901 {
1899 1902 char *node;
1900 1903 long rev;
1901 1904
1902 1905 if (PySlice_Check(item) && value == NULL)
1903 1906 return index_slice_del(self, item);
1904 1907
1905 1908 if (node_check(item, &node) == -1)
1906 1909 return -1;
1907 1910
1908 1911 if (value == NULL)
1909 1912 return self->nt ? nt_delete_node(self, node) : 0;
1910 1913 rev = PyInt_AsLong(value);
1911 1914 if (rev > INT_MAX || rev < 0) {
1912 1915 if (!PyErr_Occurred())
1913 1916 PyErr_SetString(PyExc_ValueError, "rev out of range");
1914 1917 return -1;
1915 1918 }
1916 1919
1917 1920 if (nt_init(self) == -1)
1918 1921 return -1;
1919 1922 return nt_insert(self, node, (int)rev);
1920 1923 }
1921 1924
1922 1925 /*
1923 1926 * Find all RevlogNG entries in an index that has inline data. Update
1924 1927 * the optional "offsets" table with those entries.
1925 1928 */
1926 1929 static Py_ssize_t inline_scan(indexObject *self, const char **offsets)
1927 1930 {
1928 1931 const char *data = (const char *)self->buf.buf;
1929 1932 Py_ssize_t pos = 0;
1930 1933 Py_ssize_t end = self->buf.len;
1931 1934 long incr = v1_hdrsize;
1932 1935 Py_ssize_t len = 0;
1933 1936
1934 1937 while (pos + v1_hdrsize <= end && pos >= 0) {
1935 1938 uint32_t comp_len;
1936 1939 /* 3rd element of header is length of compressed inline data */
1937 1940 comp_len = getbe32(data + pos + 8);
1938 1941 incr = v1_hdrsize + comp_len;
1939 1942 if (offsets)
1940 1943 offsets[len] = data + pos;
1941 1944 len++;
1942 1945 pos += incr;
1943 1946 }
1944 1947
1945 1948 if (pos != end) {
1946 1949 if (!PyErr_Occurred())
1947 1950 PyErr_SetString(PyExc_ValueError, "corrupt index file");
1948 1951 return -1;
1949 1952 }
1950 1953
1951 1954 return len;
1952 1955 }
1953 1956
1954 1957 static int index_init(indexObject *self, PyObject *args)
1955 1958 {
1956 1959 PyObject *data_obj, *inlined_obj;
1957 1960 Py_ssize_t size;
1958 1961
1959 1962 /* Initialize before argument-checking to avoid index_dealloc() crash. */
1960 1963 self->raw_length = 0;
1961 1964 self->added = NULL;
1962 1965 self->cache = NULL;
1963 1966 self->data = NULL;
1964 1967 memset(&self->buf, 0, sizeof(self->buf));
1965 1968 self->headrevs = NULL;
1966 1969 self->filteredrevs = Py_None;
1967 1970 Py_INCREF(Py_None);
1968 1971 self->nt = NULL;
1969 1972 self->offsets = NULL;
1970 1973
1971 1974 if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
1972 1975 return -1;
1973 1976 if (!PyObject_CheckBuffer(data_obj)) {
1974 1977 PyErr_SetString(PyExc_TypeError,
1975 1978 "data does not support buffer interface");
1976 1979 return -1;
1977 1980 }
1978 1981
1979 1982 if (PyObject_GetBuffer(data_obj, &self->buf, PyBUF_SIMPLE) == -1)
1980 1983 return -1;
1981 1984 size = self->buf.len;
1982 1985
1983 1986 self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
1984 1987 self->data = data_obj;
1985 1988
1986 1989 self->ntlookups = self->ntmisses = 0;
1987 1990 self->ntrev = -1;
1988 1991 Py_INCREF(self->data);
1989 1992
1990 1993 if (self->inlined) {
1991 1994 Py_ssize_t len = inline_scan(self, NULL);
1992 1995 if (len == -1)
1993 1996 goto bail;
1994 1997 self->raw_length = len;
1995 1998 self->length = len + 1;
1996 1999 } else {
1997 2000 if (size % v1_hdrsize) {
1998 2001 PyErr_SetString(PyExc_ValueError, "corrupt index file");
1999 2002 goto bail;
2000 2003 }
2001 2004 self->raw_length = size / v1_hdrsize;
2002 2005 self->length = self->raw_length + 1;
2003 2006 }
2004 2007
2005 2008 return 0;
2006 2009 bail:
2007 2010 return -1;
2008 2011 }
2009 2012
2010 2013 static PyObject *index_nodemap(indexObject *self)
2011 2014 {
2012 2015 Py_INCREF(self);
2013 2016 return (PyObject *)self;
2014 2017 }
2015 2018
2016 2019 static void index_dealloc(indexObject *self)
2017 2020 {
2018 2021 _index_clearcaches(self);
2019 2022 Py_XDECREF(self->filteredrevs);
2020 2023 if (self->buf.buf) {
2021 2024 PyBuffer_Release(&self->buf);
2022 2025 memset(&self->buf, 0, sizeof(self->buf));
2023 2026 }
2024 2027 Py_XDECREF(self->data);
2025 2028 Py_XDECREF(self->added);
2026 2029 PyObject_Del(self);
2027 2030 }
2028 2031
2029 2032 static PySequenceMethods index_sequence_methods = {
2030 2033 (lenfunc)index_length, /* sq_length */
2031 2034 0, /* sq_concat */
2032 2035 0, /* sq_repeat */
2033 2036 (ssizeargfunc)index_get, /* sq_item */
2034 2037 0, /* sq_slice */
2035 2038 0, /* sq_ass_item */
2036 2039 0, /* sq_ass_slice */
2037 2040 (objobjproc)index_contains, /* sq_contains */
2038 2041 };
2039 2042
2040 2043 static PyMappingMethods index_mapping_methods = {
2041 2044 (lenfunc)index_length, /* mp_length */
2042 2045 (binaryfunc)index_getitem, /* mp_subscript */
2043 2046 (objobjargproc)index_assign_subscript, /* mp_ass_subscript */
2044 2047 };
2045 2048
2046 2049 static PyMethodDef index_methods[] = {
2047 2050 {"ancestors", (PyCFunction)index_ancestors, METH_VARARGS,
2048 2051 "return the gca set of the given revs"},
2049 2052 {"commonancestorsheads", (PyCFunction)index_commonancestorsheads,
2050 2053 METH_VARARGS,
2051 2054 "return the heads of the common ancestors of the given revs"},
2052 2055 {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
2053 2056 "clear the index caches"},
2054 2057 {"get", (PyCFunction)index_m_get, METH_VARARGS,
2055 2058 "get an index entry"},
2056 2059 {"computephasesmapsets", (PyCFunction)compute_phases_map_sets,
2057 2060 METH_VARARGS, "compute phases"},
2058 2061 {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
2059 2062 "reachableroots"},
2060 2063 {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
2061 2064 "get head revisions"}, /* Can do filtering since 3.2 */
2062 2065 {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
2063 2066 "get filtered head revisions"}, /* Can always do filtering */
2064 2067 {"deltachain", (PyCFunction)index_deltachain, METH_VARARGS,
2065 2068 "determine revisions with deltas to reconstruct fulltext"},
2066 2069 {"append", (PyCFunction)index_append, METH_O,
2067 2070 "append an index entry"},
2068 2071 {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
2069 2072 "match a potentially ambiguous node ID"},
2070 2073 {"shortest", (PyCFunction)index_shortest, METH_VARARGS,
2071 2074 "find length of shortest hex nodeid of a binary ID"},
2072 2075 {"stats", (PyCFunction)index_stats, METH_NOARGS,
2073 2076 "stats for the index"},
2074 2077 {NULL} /* Sentinel */
2075 2078 };
2076 2079
2077 2080 static PyGetSetDef index_getset[] = {
2078 2081 {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
2079 2082 {NULL} /* Sentinel */
2080 2083 };
2081 2084
2082 2085 static PyTypeObject indexType = {
2083 2086 PyVarObject_HEAD_INIT(NULL, 0) /* header */
2084 2087 "parsers.index", /* tp_name */
2085 2088 sizeof(indexObject), /* tp_basicsize */
2086 2089 0, /* tp_itemsize */
2087 2090 (destructor)index_dealloc, /* tp_dealloc */
2088 2091 0, /* tp_print */
2089 2092 0, /* tp_getattr */
2090 2093 0, /* tp_setattr */
2091 2094 0, /* tp_compare */
2092 2095 0, /* tp_repr */
2093 2096 0, /* tp_as_number */
2094 2097 &index_sequence_methods, /* tp_as_sequence */
2095 2098 &index_mapping_methods, /* tp_as_mapping */
2096 2099 0, /* tp_hash */
2097 2100 0, /* tp_call */
2098 2101 0, /* tp_str */
2099 2102 0, /* tp_getattro */
2100 2103 0, /* tp_setattro */
2101 2104 0, /* tp_as_buffer */
2102 2105 Py_TPFLAGS_DEFAULT, /* tp_flags */
2103 2106 "revlog index", /* tp_doc */
2104 2107 0, /* tp_traverse */
2105 2108 0, /* tp_clear */
2106 2109 0, /* tp_richcompare */
2107 2110 0, /* tp_weaklistoffset */
2108 2111 0, /* tp_iter */
2109 2112 0, /* tp_iternext */
2110 2113 index_methods, /* tp_methods */
2111 2114 0, /* tp_members */
2112 2115 index_getset, /* tp_getset */
2113 2116 0, /* tp_base */
2114 2117 0, /* tp_dict */
2115 2118 0, /* tp_descr_get */
2116 2119 0, /* tp_descr_set */
2117 2120 0, /* tp_dictoffset */
2118 2121 (initproc)index_init, /* tp_init */
2119 2122 0, /* tp_alloc */
2120 2123 };
2121 2124
2122 2125 /*
2123 2126 * returns a tuple of the form (index, index, cache) with elements as
2124 2127 * follows:
2125 2128 *
2126 2129 * index: an index object that lazily parses RevlogNG records
2127 2130 * cache: if data is inlined, a tuple (0, index_file_content), else None
2128 2131 * index_file_content could be a string, or a buffer
2129 2132 *
2130 2133 * added complications are for backwards compatibility
2131 2134 */
2132 2135 PyObject *parse_index2(PyObject *self, PyObject *args)
2133 2136 {
2134 2137 PyObject *tuple = NULL, *cache = NULL;
2135 2138 indexObject *idx;
2136 2139 int ret;
2137 2140
2138 2141 idx = PyObject_New(indexObject, &indexType);
2139 2142 if (idx == NULL)
2140 2143 goto bail;
2141 2144
2142 2145 ret = index_init(idx, args);
2143 2146 if (ret == -1)
2144 2147 goto bail;
2145 2148
2146 2149 if (idx->inlined) {
2147 2150 cache = Py_BuildValue("iO", 0, idx->data);
2148 2151 if (cache == NULL)
2149 2152 goto bail;
2150 2153 } else {
2151 2154 cache = Py_None;
2152 2155 Py_INCREF(cache);
2153 2156 }
2154 2157
2155 2158 tuple = Py_BuildValue("NN", idx, cache);
2156 2159 if (!tuple)
2157 2160 goto bail;
2158 2161 return tuple;
2159 2162
2160 2163 bail:
2161 2164 Py_XDECREF(idx);
2162 2165 Py_XDECREF(cache);
2163 2166 Py_XDECREF(tuple);
2164 2167 return NULL;
2165 2168 }
2166 2169
2167 2170 void revlog_module_init(PyObject *mod)
2168 2171 {
2169 2172 indexType.tp_new = PyType_GenericNew;
2170 2173 if (PyType_Ready(&indexType) < 0)
2171 2174 return;
2172 2175 Py_INCREF(&indexType);
2173 2176 PyModule_AddObject(mod, "index", (PyObject *)&indexType);
2174 2177
2175 2178 nullentry = Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0,
2176 2179 -1, -1, -1, -1, nullid, 20);
2177 2180 if (nullentry)
2178 2181 PyObject_GC_UnTrack(nullentry);
2179 2182 }
General Comments 0
You need to be logged in to leave comments. Login now