##// END OF EJS Templates
cext: fix most truncation warnings in revlog on Windows...
Matt Harbison -
r39109:acd23830 default
parent child Browse files
Show More
@@ -1,2190 +1,2190 b''
1 1 /*
2 2 parsers.c - efficient content parsing
3 3
4 4 Copyright 2008 Matt Mackall <mpm@selenic.com> and others
5 5
6 6 This software may be used and distributed according to the terms of
7 7 the GNU General Public License, incorporated herein by reference.
8 8 */
9 9
10 10 #include <Python.h>
11 11 #include <assert.h>
12 12 #include <ctype.h>
13 13 #include <stddef.h>
14 14 #include <string.h>
15 15
16 16 #include "bitmanipulation.h"
17 17 #include "charencode.h"
18 18 #include "util.h"
19 19
20 20 #ifdef IS_PY3K
21 21 /* The mapping of Python types is meant to be temporary to get Python
22 22 * 3 to compile. We should remove this once Python 3 support is fully
23 23 * supported and proper types are used in the extensions themselves. */
24 24 #define PyInt_Check PyLong_Check
25 25 #define PyInt_FromLong PyLong_FromLong
26 26 #define PyInt_FromSsize_t PyLong_FromSsize_t
27 27 #define PyInt_AS_LONG PyLong_AS_LONG
28 28 #define PyInt_AsLong PyLong_AsLong
29 29 #endif
30 30
31 31 typedef struct indexObjectStruct indexObject;
32 32
33 33 typedef struct {
34 34 int children[16];
35 35 } nodetreenode;
36 36
37 37 /*
38 38 * A base-16 trie for fast node->rev mapping.
39 39 *
40 40 * Positive value is index of the next node in the trie
41 41 * Negative value is a leaf: -(rev + 2)
42 42 * Zero is empty
43 43 */
44 44 typedef struct {
45 45 indexObject *index;
46 46 nodetreenode *nodes;
47 47 unsigned length; /* # nodes in use */
48 48 unsigned capacity; /* # nodes allocated */
49 49 int depth; /* maximum depth of tree */
50 50 int splits; /* # splits performed */
51 51 } nodetree;
52 52
53 53 /*
54 54 * This class has two behaviors.
55 55 *
56 56 * When used in a list-like way (with integer keys), we decode an
57 57 * entry in a RevlogNG index file on demand. Our last entry is a
58 58 * sentinel, always a nullid. We have limited support for
59 59 * integer-keyed insert and delete, only at elements right before the
60 60 * sentinel.
61 61 *
62 62 * With string keys, we lazily perform a reverse mapping from node to
63 63 * rev, using a base-16 trie.
64 64 */
65 65 struct indexObjectStruct {
66 66 PyObject_HEAD
67 67 /* Type-specific fields go here. */
68 68 PyObject *data; /* raw bytes of index */
69 69 Py_buffer buf; /* buffer of data */
70 70 PyObject **cache; /* cached tuples */
71 71 const char **offsets; /* populated on demand */
72 72 Py_ssize_t raw_length; /* original number of elements */
73 73 Py_ssize_t length; /* current number of elements */
74 74 PyObject *added; /* populated on demand */
75 75 PyObject *headrevs; /* cache, invalidated on changes */
76 76 PyObject *filteredrevs;/* filtered revs set */
77 77 nodetree *nt; /* base-16 trie */
78 78 int ntrev; /* last rev scanned */
79 79 int ntlookups; /* # lookups */
80 80 int ntmisses; /* # lookups that miss the cache */
81 81 int inlined;
82 82 };
83 83
84 84 static Py_ssize_t index_length(const indexObject *self)
85 85 {
86 86 if (self->added == NULL)
87 87 return self->length;
88 88 return self->length + PyList_GET_SIZE(self->added);
89 89 }
90 90
91 91 static PyObject *nullentry;
92 92 static const char nullid[20];
93 93
94 94 static Py_ssize_t inline_scan(indexObject *self, const char **offsets);
95 95
96 96 #if LONG_MAX == 0x7fffffffL
97 97 static const char *const tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#");
98 98 #else
99 99 static const char *const tuple_format = PY23("kiiiiiis#", "kiiiiiiy#");
100 100 #endif
101 101
102 102 /* A RevlogNG v1 index entry is 64 bytes long. */
103 103 static const long v1_hdrsize = 64;
104 104
105 105 /*
106 106 * Return a pointer to the beginning of a RevlogNG record.
107 107 */
108 108 static const char *index_deref(indexObject *self, Py_ssize_t pos)
109 109 {
110 110 if (self->inlined && pos > 0) {
111 111 if (self->offsets == NULL) {
112 112 self->offsets = PyMem_Malloc(self->raw_length *
113 113 sizeof(*self->offsets));
114 114 if (self->offsets == NULL)
115 115 return (const char *)PyErr_NoMemory();
116 116 inline_scan(self, self->offsets);
117 117 }
118 118 return self->offsets[pos];
119 119 }
120 120
121 121 return (const char *)(self->buf.buf) + pos * v1_hdrsize;
122 122 }
123 123
124 124 static inline int index_get_parents(indexObject *self, Py_ssize_t rev,
125 125 int *ps, int maxrev)
126 126 {
127 127 if (rev >= self->length) {
128 128 PyObject *tuple = PyList_GET_ITEM(self->added, rev - self->length);
129 129 ps[0] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 5));
130 130 ps[1] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 6));
131 131 } else {
132 132 const char *data = index_deref(self, rev);
133 133 ps[0] = getbe32(data + 24);
134 134 ps[1] = getbe32(data + 28);
135 135 }
136 136 /* If index file is corrupted, ps[] may point to invalid revisions. So
137 137 * there is a risk of buffer overflow to trust them unconditionally. */
138 138 if (ps[0] > maxrev || ps[1] > maxrev) {
139 139 PyErr_SetString(PyExc_ValueError, "parent out of range");
140 140 return -1;
141 141 }
142 142 return 0;
143 143 }
144 144
145 145
146 146 /*
147 147 * RevlogNG format (all in big endian, data may be inlined):
148 148 * 6 bytes: offset
149 149 * 2 bytes: flags
150 150 * 4 bytes: compressed length
151 151 * 4 bytes: uncompressed length
152 152 * 4 bytes: base revision
153 153 * 4 bytes: link revision
154 154 * 4 bytes: parent 1 revision
155 155 * 4 bytes: parent 2 revision
156 156 * 32 bytes: nodeid (only 20 bytes used)
157 157 */
158 158 static PyObject *index_get(indexObject *self, Py_ssize_t pos)
159 159 {
160 160 uint64_t offset_flags;
161 161 int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
162 162 const char *c_node_id;
163 163 const char *data;
164 164 Py_ssize_t length = index_length(self);
165 165 PyObject *entry;
166 166
167 167 if (pos == -1) {
168 168 Py_INCREF(nullentry);
169 169 return nullentry;
170 170 }
171 171
172 172 if (pos < 0 || pos >= length) {
173 173 PyErr_SetString(PyExc_IndexError, "revlog index out of range");
174 174 return NULL;
175 175 }
176 176
177 177 if (pos >= self->length) {
178 178 PyObject *obj;
179 179 obj = PyList_GET_ITEM(self->added, pos - self->length);
180 180 Py_INCREF(obj);
181 181 return obj;
182 182 }
183 183
184 184 if (self->cache) {
185 185 if (self->cache[pos]) {
186 186 Py_INCREF(self->cache[pos]);
187 187 return self->cache[pos];
188 188 }
189 189 } else {
190 190 self->cache = calloc(self->raw_length, sizeof(PyObject *));
191 191 if (self->cache == NULL)
192 192 return PyErr_NoMemory();
193 193 }
194 194
195 195 data = index_deref(self, pos);
196 196 if (data == NULL)
197 197 return NULL;
198 198
199 199 offset_flags = getbe32(data + 4);
200 200 if (pos == 0) /* mask out version number for the first entry */
201 201 offset_flags &= 0xFFFF;
202 202 else {
203 203 uint32_t offset_high = getbe32(data);
204 204 offset_flags |= ((uint64_t)offset_high) << 32;
205 205 }
206 206
207 207 comp_len = getbe32(data + 8);
208 208 uncomp_len = getbe32(data + 12);
209 209 base_rev = getbe32(data + 16);
210 210 link_rev = getbe32(data + 20);
211 211 parent_1 = getbe32(data + 24);
212 212 parent_2 = getbe32(data + 28);
213 213 c_node_id = data + 32;
214 214
215 215 entry = Py_BuildValue(tuple_format, offset_flags, comp_len,
216 216 uncomp_len, base_rev, link_rev,
217 217 parent_1, parent_2, c_node_id, 20);
218 218
219 219 if (entry) {
220 220 PyObject_GC_UnTrack(entry);
221 221 Py_INCREF(entry);
222 222 }
223 223
224 224 self->cache[pos] = entry;
225 225
226 226 return entry;
227 227 }
228 228
229 229 /*
230 230 * Return the 20-byte SHA of the node corresponding to the given rev.
231 231 */
232 232 static const char *index_node(indexObject *self, Py_ssize_t pos)
233 233 {
234 234 Py_ssize_t length = index_length(self);
235 235 const char *data;
236 236
237 237 if (pos == -1)
238 238 return nullid;
239 239
240 240 if (pos >= length)
241 241 return NULL;
242 242
243 243 if (pos >= self->length) {
244 244 PyObject *tuple, *str;
245 245 tuple = PyList_GET_ITEM(self->added, pos - self->length);
246 246 str = PyTuple_GetItem(tuple, 7);
247 247 return str ? PyBytes_AS_STRING(str) : NULL;
248 248 }
249 249
250 250 data = index_deref(self, pos);
251 251 return data ? data + 32 : NULL;
252 252 }
253 253
254 254 /*
255 255 * Return the 20-byte SHA of the node corresponding to the given rev. The
256 256 * rev is assumed to be existing. If not, an exception is set.
257 257 */
258 258 static const char *index_node_existing(indexObject *self, Py_ssize_t pos)
259 259 {
260 260 const char *node = index_node(self, pos);
261 261 if (node == NULL) {
262 262 PyErr_Format(PyExc_IndexError, "could not access rev %d",
263 263 (int)pos);
264 264 }
265 265 return node;
266 266 }
267 267
268 268 static int nt_insert(nodetree *self, const char *node, int rev);
269 269
270 270 static int node_check(PyObject *obj, char **node)
271 271 {
272 272 Py_ssize_t nodelen;
273 273 if (PyBytes_AsStringAndSize(obj, node, &nodelen) == -1)
274 274 return -1;
275 275 if (nodelen == 20)
276 276 return 0;
277 277 PyErr_SetString(PyExc_ValueError, "20-byte hash required");
278 278 return -1;
279 279 }
280 280
281 281 static PyObject *index_append(indexObject *self, PyObject *obj)
282 282 {
283 283 char *node;
284 284 Py_ssize_t len;
285 285
286 286 if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
287 287 PyErr_SetString(PyExc_TypeError, "8-tuple required");
288 288 return NULL;
289 289 }
290 290
291 291 if (node_check(PyTuple_GET_ITEM(obj, 7), &node) == -1)
292 292 return NULL;
293 293
294 294 len = index_length(self);
295 295
296 296 if (self->added == NULL) {
297 297 self->added = PyList_New(0);
298 298 if (self->added == NULL)
299 299 return NULL;
300 300 }
301 301
302 302 if (PyList_Append(self->added, obj) == -1)
303 303 return NULL;
304 304
305 305 if (self->nt)
306 nt_insert(self->nt, node, len);
306 nt_insert(self->nt, node, (int)len);
307 307
308 308 Py_CLEAR(self->headrevs);
309 309 Py_RETURN_NONE;
310 310 }
311 311
312 312 static PyObject *index_stats(indexObject *self)
313 313 {
314 314 PyObject *obj = PyDict_New();
315 315 PyObject *t = NULL;
316 316
317 317 if (obj == NULL)
318 318 return NULL;
319 319
320 320 #define istat(__n, __d) \
321 321 do { \
322 322 t = PyInt_FromSsize_t(self->__n); \
323 323 if (!t) \
324 324 goto bail; \
325 325 if (PyDict_SetItemString(obj, __d, t) == -1) \
326 326 goto bail; \
327 327 Py_DECREF(t); \
328 328 } while (0)
329 329
330 330 if (self->added) {
331 331 Py_ssize_t len = PyList_GET_SIZE(self->added);
332 332 t = PyInt_FromSsize_t(len);
333 333 if (!t)
334 334 goto bail;
335 335 if (PyDict_SetItemString(obj, "index entries added", t) == -1)
336 336 goto bail;
337 337 Py_DECREF(t);
338 338 }
339 339
340 340 if (self->raw_length != self->length)
341 341 istat(raw_length, "revs on disk");
342 342 istat(length, "revs in memory");
343 343 istat(ntlookups, "node trie lookups");
344 344 istat(ntmisses, "node trie misses");
345 345 istat(ntrev, "node trie last rev scanned");
346 346 if (self->nt) {
347 347 istat(nt->capacity, "node trie capacity");
348 348 istat(nt->depth, "node trie depth");
349 349 istat(nt->length, "node trie count");
350 350 istat(nt->splits, "node trie splits");
351 351 }
352 352
353 353 #undef istat
354 354
355 355 return obj;
356 356
357 357 bail:
358 358 Py_XDECREF(obj);
359 359 Py_XDECREF(t);
360 360 return NULL;
361 361 }
362 362
363 363 /*
364 364 * When we cache a list, we want to be sure the caller can't mutate
365 365 * the cached copy.
366 366 */
367 367 static PyObject *list_copy(PyObject *list)
368 368 {
369 369 Py_ssize_t len = PyList_GET_SIZE(list);
370 370 PyObject *newlist = PyList_New(len);
371 371 Py_ssize_t i;
372 372
373 373 if (newlist == NULL)
374 374 return NULL;
375 375
376 376 for (i = 0; i < len; i++) {
377 377 PyObject *obj = PyList_GET_ITEM(list, i);
378 378 Py_INCREF(obj);
379 379 PyList_SET_ITEM(newlist, i, obj);
380 380 }
381 381
382 382 return newlist;
383 383 }
384 384
385 385 static int check_filter(PyObject *filter, Py_ssize_t arg)
386 386 {
387 387 if (filter) {
388 388 PyObject *arglist, *result;
389 389 int isfiltered;
390 390
391 391 arglist = Py_BuildValue("(n)", arg);
392 392 if (!arglist) {
393 393 return -1;
394 394 }
395 395
396 396 result = PyEval_CallObject(filter, arglist);
397 397 Py_DECREF(arglist);
398 398 if (!result) {
399 399 return -1;
400 400 }
401 401
402 402 /* PyObject_IsTrue returns 1 if true, 0 if false, -1 if error,
403 403 * same as this function, so we can just return it directly.*/
404 404 isfiltered = PyObject_IsTrue(result);
405 405 Py_DECREF(result);
406 406 return isfiltered;
407 407 } else {
408 408 return 0;
409 409 }
410 410 }
411 411
412 412 static Py_ssize_t add_roots_get_min(indexObject *self, PyObject *list,
413 413 Py_ssize_t marker, char *phases)
414 414 {
415 415 PyObject *iter = NULL;
416 416 PyObject *iter_item = NULL;
417 417 Py_ssize_t min_idx = index_length(self) + 2;
418 418 long iter_item_long;
419 419
420 420 if (PyList_GET_SIZE(list) != 0) {
421 421 iter = PyObject_GetIter(list);
422 422 if (iter == NULL)
423 423 return -2;
424 424 while ((iter_item = PyIter_Next(iter))) {
425 425 iter_item_long = PyInt_AS_LONG(iter_item);
426 426 Py_DECREF(iter_item);
427 427 if (iter_item_long < min_idx)
428 428 min_idx = iter_item_long;
429 429 phases[iter_item_long] = marker;
430 430 }
431 431 Py_DECREF(iter);
432 432 }
433 433
434 434 return min_idx;
435 435 }
436 436
437 437 static inline void set_phase_from_parents(char *phases, int parent_1,
438 438 int parent_2, Py_ssize_t i)
439 439 {
440 440 if (parent_1 >= 0 && phases[parent_1] > phases[i])
441 441 phases[i] = phases[parent_1];
442 442 if (parent_2 >= 0 && phases[parent_2] > phases[i])
443 443 phases[i] = phases[parent_2];
444 444 }
445 445
446 446 static PyObject *reachableroots2(indexObject *self, PyObject *args)
447 447 {
448 448
449 449 /* Input */
450 450 long minroot;
451 451 PyObject *includepatharg = NULL;
452 452 int includepath = 0;
453 453 /* heads and roots are lists */
454 454 PyObject *heads = NULL;
455 455 PyObject *roots = NULL;
456 456 PyObject *reachable = NULL;
457 457
458 458 PyObject *val;
459 459 Py_ssize_t len = index_length(self);
460 460 long revnum;
461 461 Py_ssize_t k;
462 462 Py_ssize_t i;
463 463 Py_ssize_t l;
464 464 int r;
465 465 int parents[2];
466 466
467 467 /* Internal data structure:
468 468 * tovisit: array of length len+1 (all revs + nullrev), filled upto lentovisit
469 469 * revstates: array of length len+1 (all revs + nullrev) */
470 470 int *tovisit = NULL;
471 471 long lentovisit = 0;
472 472 enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 };
473 473 char *revstates = NULL;
474 474
475 475 /* Get arguments */
476 476 if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads,
477 477 &PyList_Type, &roots,
478 478 &PyBool_Type, &includepatharg))
479 479 goto bail;
480 480
481 481 if (includepatharg == Py_True)
482 482 includepath = 1;
483 483
484 484 /* Initialize return set */
485 485 reachable = PyList_New(0);
486 486 if (reachable == NULL)
487 487 goto bail;
488 488
489 489 /* Initialize internal datastructures */
490 490 tovisit = (int *)malloc((len + 1) * sizeof(int));
491 491 if (tovisit == NULL) {
492 492 PyErr_NoMemory();
493 493 goto bail;
494 494 }
495 495
496 496 revstates = (char *)calloc(len + 1, 1);
497 497 if (revstates == NULL) {
498 498 PyErr_NoMemory();
499 499 goto bail;
500 500 }
501 501
502 502 l = PyList_GET_SIZE(roots);
503 503 for (i = 0; i < l; i++) {
504 504 revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
505 505 if (revnum == -1 && PyErr_Occurred())
506 506 goto bail;
507 507 /* If root is out of range, e.g. wdir(), it must be unreachable
508 508 * from heads. So we can just ignore it. */
509 509 if (revnum + 1 < 0 || revnum + 1 >= len + 1)
510 510 continue;
511 511 revstates[revnum + 1] |= RS_ROOT;
512 512 }
513 513
514 514 /* Populate tovisit with all the heads */
515 515 l = PyList_GET_SIZE(heads);
516 516 for (i = 0; i < l; i++) {
517 517 revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
518 518 if (revnum == -1 && PyErr_Occurred())
519 519 goto bail;
520 520 if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
521 521 PyErr_SetString(PyExc_IndexError, "head out of range");
522 522 goto bail;
523 523 }
524 524 if (!(revstates[revnum + 1] & RS_SEEN)) {
525 525 tovisit[lentovisit++] = (int)revnum;
526 526 revstates[revnum + 1] |= RS_SEEN;
527 527 }
528 528 }
529 529
530 530 /* Visit the tovisit list and find the reachable roots */
531 531 k = 0;
532 532 while (k < lentovisit) {
533 533 /* Add the node to reachable if it is a root*/
534 534 revnum = tovisit[k++];
535 535 if (revstates[revnum + 1] & RS_ROOT) {
536 536 revstates[revnum + 1] |= RS_REACHABLE;
537 537 val = PyInt_FromLong(revnum);
538 538 if (val == NULL)
539 539 goto bail;
540 540 r = PyList_Append(reachable, val);
541 541 Py_DECREF(val);
542 542 if (r < 0)
543 543 goto bail;
544 544 if (includepath == 0)
545 545 continue;
546 546 }
547 547
548 548 /* Add its parents to the list of nodes to visit */
549 549 if (revnum == -1)
550 550 continue;
551 551 r = index_get_parents(self, revnum, parents, (int)len - 1);
552 552 if (r < 0)
553 553 goto bail;
554 554 for (i = 0; i < 2; i++) {
555 555 if (!(revstates[parents[i] + 1] & RS_SEEN)
556 556 && parents[i] >= minroot) {
557 557 tovisit[lentovisit++] = parents[i];
558 558 revstates[parents[i] + 1] |= RS_SEEN;
559 559 }
560 560 }
561 561 }
562 562
563 563 /* Find all the nodes in between the roots we found and the heads
564 564 * and add them to the reachable set */
565 565 if (includepath == 1) {
566 566 long minidx = minroot;
567 567 if (minidx < 0)
568 568 minidx = 0;
569 569 for (i = minidx; i < len; i++) {
570 570 if (!(revstates[i + 1] & RS_SEEN))
571 571 continue;
572 572 r = index_get_parents(self, i, parents, (int)len - 1);
573 573 /* Corrupted index file, error is set from
574 574 * index_get_parents */
575 575 if (r < 0)
576 576 goto bail;
577 577 if (((revstates[parents[0] + 1] |
578 578 revstates[parents[1] + 1]) & RS_REACHABLE)
579 579 && !(revstates[i + 1] & RS_REACHABLE)) {
580 580 revstates[i + 1] |= RS_REACHABLE;
581 val = PyInt_FromLong(i);
581 val = PyInt_FromSsize_t(i);
582 582 if (val == NULL)
583 583 goto bail;
584 584 r = PyList_Append(reachable, val);
585 585 Py_DECREF(val);
586 586 if (r < 0)
587 587 goto bail;
588 588 }
589 589 }
590 590 }
591 591
592 592 free(revstates);
593 593 free(tovisit);
594 594 return reachable;
595 595 bail:
596 596 Py_XDECREF(reachable);
597 597 free(revstates);
598 598 free(tovisit);
599 599 return NULL;
600 600 }
601 601
602 602 static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args)
603 603 {
604 604 PyObject *roots = Py_None;
605 605 PyObject *ret = NULL;
606 606 PyObject *phasessize = NULL;
607 607 PyObject *phaseroots = NULL;
608 608 PyObject *phaseset = NULL;
609 609 PyObject *phasessetlist = NULL;
610 610 PyObject *rev = NULL;
611 611 Py_ssize_t len = index_length(self);
612 612 Py_ssize_t numphase = 0;
613 613 Py_ssize_t minrevallphases = 0;
614 614 Py_ssize_t minrevphase = 0;
615 615 Py_ssize_t i = 0;
616 616 char *phases = NULL;
617 617 long phase;
618 618
619 619 if (!PyArg_ParseTuple(args, "O", &roots))
620 620 goto done;
621 621 if (roots == NULL || !PyList_Check(roots)) {
622 622 PyErr_SetString(PyExc_TypeError, "roots must be a list");
623 623 goto done;
624 624 }
625 625
626 626 phases = calloc(len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */
627 627 if (phases == NULL) {
628 628 PyErr_NoMemory();
629 629 goto done;
630 630 }
631 631 /* Put the phase information of all the roots in phases */
632 632 numphase = PyList_GET_SIZE(roots)+1;
633 633 minrevallphases = len + 1;
634 634 phasessetlist = PyList_New(numphase);
635 635 if (phasessetlist == NULL)
636 636 goto done;
637 637
638 638 PyList_SET_ITEM(phasessetlist, 0, Py_None);
639 639 Py_INCREF(Py_None);
640 640
641 641 for (i = 0; i < numphase-1; i++) {
642 642 phaseroots = PyList_GET_ITEM(roots, i);
643 643 phaseset = PySet_New(NULL);
644 644 if (phaseset == NULL)
645 645 goto release;
646 646 PyList_SET_ITEM(phasessetlist, i+1, phaseset);
647 647 if (!PyList_Check(phaseroots)) {
648 648 PyErr_SetString(PyExc_TypeError,
649 649 "roots item must be a list");
650 650 goto release;
651 651 }
652 652 minrevphase = add_roots_get_min(self, phaseroots, i+1, phases);
653 653 if (minrevphase == -2) /* Error from add_roots_get_min */
654 654 goto release;
655 655 minrevallphases = MIN(minrevallphases, minrevphase);
656 656 }
657 657 /* Propagate the phase information from the roots to the revs */
658 658 if (minrevallphases != -1) {
659 659 int parents[2];
660 660 for (i = minrevallphases; i < len; i++) {
661 661 if (index_get_parents(self, i, parents,
662 662 (int)len - 1) < 0)
663 663 goto release;
664 664 set_phase_from_parents(phases, parents[0], parents[1], i);
665 665 }
666 666 }
667 667 /* Transform phase list to a python list */
668 phasessize = PyInt_FromLong(len);
668 phasessize = PyInt_FromSsize_t(len);
669 669 if (phasessize == NULL)
670 670 goto release;
671 671 for (i = 0; i < len; i++) {
672 672 phase = phases[i];
673 673 /* We only store the sets of phase for non public phase, the public phase
674 674 * is computed as a difference */
675 675 if (phase != 0) {
676 676 phaseset = PyList_GET_ITEM(phasessetlist, phase);
677 rev = PyInt_FromLong(i);
677 rev = PyInt_FromSsize_t(i);
678 678 if (rev == NULL)
679 679 goto release;
680 680 PySet_Add(phaseset, rev);
681 681 Py_XDECREF(rev);
682 682 }
683 683 }
684 684 ret = PyTuple_Pack(2, phasessize, phasessetlist);
685 685
686 686 release:
687 687 Py_XDECREF(phasessize);
688 688 Py_XDECREF(phasessetlist);
689 689 done:
690 690 free(phases);
691 691 return ret;
692 692 }
693 693
694 694 static PyObject *index_headrevs(indexObject *self, PyObject *args)
695 695 {
696 696 Py_ssize_t i, j, len;
697 697 char *nothead = NULL;
698 698 PyObject *heads = NULL;
699 699 PyObject *filter = NULL;
700 700 PyObject *filteredrevs = Py_None;
701 701
702 702 if (!PyArg_ParseTuple(args, "|O", &filteredrevs)) {
703 703 return NULL;
704 704 }
705 705
706 706 if (self->headrevs && filteredrevs == self->filteredrevs)
707 707 return list_copy(self->headrevs);
708 708
709 709 Py_DECREF(self->filteredrevs);
710 710 self->filteredrevs = filteredrevs;
711 711 Py_INCREF(filteredrevs);
712 712
713 713 if (filteredrevs != Py_None) {
714 714 filter = PyObject_GetAttrString(filteredrevs, "__contains__");
715 715 if (!filter) {
716 716 PyErr_SetString(PyExc_TypeError,
717 717 "filteredrevs has no attribute __contains__");
718 718 goto bail;
719 719 }
720 720 }
721 721
722 722 len = index_length(self);
723 723 heads = PyList_New(0);
724 724 if (heads == NULL)
725 725 goto bail;
726 726 if (len == 0) {
727 727 PyObject *nullid = PyInt_FromLong(-1);
728 728 if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
729 729 Py_XDECREF(nullid);
730 730 goto bail;
731 731 }
732 732 goto done;
733 733 }
734 734
735 735 nothead = calloc(len, 1);
736 736 if (nothead == NULL) {
737 737 PyErr_NoMemory();
738 738 goto bail;
739 739 }
740 740
741 741 for (i = len - 1; i >= 0; i--) {
742 742 int isfiltered;
743 743 int parents[2];
744 744
745 745 /* If nothead[i] == 1, it means we've seen an unfiltered child of this
746 746 * node already, and therefore this node is not filtered. So we can skip
747 747 * the expensive check_filter step.
748 748 */
749 749 if (nothead[i] != 1) {
750 750 isfiltered = check_filter(filter, i);
751 751 if (isfiltered == -1) {
752 752 PyErr_SetString(PyExc_TypeError,
753 753 "unable to check filter");
754 754 goto bail;
755 755 }
756 756
757 757 if (isfiltered) {
758 758 nothead[i] = 1;
759 759 continue;
760 760 }
761 761 }
762 762
763 763 if (index_get_parents(self, i, parents, (int)len - 1) < 0)
764 764 goto bail;
765 765 for (j = 0; j < 2; j++) {
766 766 if (parents[j] >= 0)
767 767 nothead[parents[j]] = 1;
768 768 }
769 769 }
770 770
771 771 for (i = 0; i < len; i++) {
772 772 PyObject *head;
773 773
774 774 if (nothead[i])
775 775 continue;
776 776 head = PyInt_FromSsize_t(i);
777 777 if (head == NULL || PyList_Append(heads, head) == -1) {
778 778 Py_XDECREF(head);
779 779 goto bail;
780 780 }
781 781 }
782 782
783 783 done:
784 784 self->headrevs = heads;
785 785 Py_XDECREF(filter);
786 786 free(nothead);
787 787 return list_copy(self->headrevs);
788 788 bail:
789 789 Py_XDECREF(filter);
790 790 Py_XDECREF(heads);
791 791 free(nothead);
792 792 return NULL;
793 793 }
794 794
795 795 /**
796 796 * Obtain the base revision index entry.
797 797 *
798 798 * Callers must ensure that rev >= 0 or illegal memory access may occur.
799 799 */
800 800 static inline int index_baserev(indexObject *self, int rev)
801 801 {
802 802 const char *data;
803 803
804 804 if (rev >= self->length) {
805 805 PyObject *tuple = PyList_GET_ITEM(self->added, rev - self->length);
806 806 return (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 3));
807 807 }
808 808 else {
809 809 data = index_deref(self, rev);
810 810 if (data == NULL) {
811 811 return -2;
812 812 }
813 813
814 814 return getbe32(data + 16);
815 815 }
816 816 }
817 817
818 818 static PyObject *index_deltachain(indexObject *self, PyObject *args)
819 819 {
820 820 int rev, generaldelta;
821 821 PyObject *stoparg;
822 822 int stoprev, iterrev, baserev = -1;
823 823 int stopped;
824 824 PyObject *chain = NULL, *result = NULL;
825 825 const Py_ssize_t length = index_length(self);
826 826
827 827 if (!PyArg_ParseTuple(args, "iOi", &rev, &stoparg, &generaldelta)) {
828 828 return NULL;
829 829 }
830 830
831 831 if (PyInt_Check(stoparg)) {
832 832 stoprev = (int)PyInt_AsLong(stoparg);
833 833 if (stoprev == -1 && PyErr_Occurred()) {
834 834 return NULL;
835 835 }
836 836 }
837 837 else if (stoparg == Py_None) {
838 838 stoprev = -2;
839 839 }
840 840 else {
841 841 PyErr_SetString(PyExc_ValueError,
842 842 "stoprev must be integer or None");
843 843 return NULL;
844 844 }
845 845
846 846 if (rev < 0 || rev >= length) {
847 847 PyErr_SetString(PyExc_ValueError, "revlog index out of range");
848 848 return NULL;
849 849 }
850 850
851 851 chain = PyList_New(0);
852 852 if (chain == NULL) {
853 853 return NULL;
854 854 }
855 855
856 856 baserev = index_baserev(self, rev);
857 857
858 858 /* This should never happen. */
859 859 if (baserev <= -2) {
860 860 /* Error should be set by index_deref() */
861 861 assert(PyErr_Occurred());
862 862 goto bail;
863 863 }
864 864
865 865 iterrev = rev;
866 866
867 867 while (iterrev != baserev && iterrev != stoprev) {
868 868 PyObject *value = PyInt_FromLong(iterrev);
869 869 if (value == NULL) {
870 870 goto bail;
871 871 }
872 872 if (PyList_Append(chain, value)) {
873 873 Py_DECREF(value);
874 874 goto bail;
875 875 }
876 876 Py_DECREF(value);
877 877
878 878 if (generaldelta) {
879 879 iterrev = baserev;
880 880 }
881 881 else {
882 882 iterrev--;
883 883 }
884 884
885 885 if (iterrev < 0) {
886 886 break;
887 887 }
888 888
889 889 if (iterrev >= length) {
890 890 PyErr_SetString(PyExc_IndexError, "revision outside index");
891 891 return NULL;
892 892 }
893 893
894 894 baserev = index_baserev(self, iterrev);
895 895
896 896 /* This should never happen. */
897 897 if (baserev <= -2) {
898 898 /* Error should be set by index_deref() */
899 899 assert(PyErr_Occurred());
900 900 goto bail;
901 901 }
902 902 }
903 903
904 904 if (iterrev == stoprev) {
905 905 stopped = 1;
906 906 }
907 907 else {
908 908 PyObject *value = PyInt_FromLong(iterrev);
909 909 if (value == NULL) {
910 910 goto bail;
911 911 }
912 912 if (PyList_Append(chain, value)) {
913 913 Py_DECREF(value);
914 914 goto bail;
915 915 }
916 916 Py_DECREF(value);
917 917
918 918 stopped = 0;
919 919 }
920 920
921 921 if (PyList_Reverse(chain)) {
922 922 goto bail;
923 923 }
924 924
925 925 result = Py_BuildValue("OO", chain, stopped ? Py_True : Py_False);
926 926 Py_DECREF(chain);
927 927 return result;
928 928
929 929 bail:
930 930 Py_DECREF(chain);
931 931 return NULL;
932 932 }
933 933
934 934 static inline int nt_level(const char *node, Py_ssize_t level)
935 935 {
936 936 int v = node[level>>1];
937 937 if (!(level & 1))
938 938 v >>= 4;
939 939 return v & 0xf;
940 940 }
941 941
942 942 /*
943 943 * Return values:
944 944 *
945 945 * -4: match is ambiguous (multiple candidates)
946 946 * -2: not found
947 947 * rest: valid rev
948 948 */
949 949 static int nt_find(nodetree *self, const char *node, Py_ssize_t nodelen,
950 950 int hex)
951 951 {
952 952 int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
953 953 int level, maxlevel, off;
954 954
955 955 if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
956 956 return -1;
957 957
958 958 if (hex)
959 959 maxlevel = nodelen > 40 ? 40 : (int)nodelen;
960 960 else
961 961 maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
962 962
963 963 for (level = off = 0; level < maxlevel; level++) {
964 964 int k = getnybble(node, level);
965 965 nodetreenode *n = &self->nodes[off];
966 966 int v = n->children[k];
967 967
968 968 if (v < 0) {
969 969 const char *n;
970 970 Py_ssize_t i;
971 971
972 972 v = -(v + 2);
973 973 n = index_node(self->index, v);
974 974 if (n == NULL)
975 975 return -2;
976 976 for (i = level; i < maxlevel; i++)
977 977 if (getnybble(node, i) != nt_level(n, i))
978 978 return -2;
979 979 return v;
980 980 }
981 981 if (v == 0)
982 982 return -2;
983 983 off = v;
984 984 }
985 985 /* multiple matches against an ambiguous prefix */
986 986 return -4;
987 987 }
988 988
989 989 static int nt_new(nodetree *self)
990 990 {
991 991 if (self->length == self->capacity) {
992 992 unsigned newcapacity;
993 993 nodetreenode *newnodes;
994 994 newcapacity = self->capacity * 2;
995 995 if (newcapacity >= INT_MAX / sizeof(nodetreenode)) {
996 996 PyErr_SetString(PyExc_MemoryError, "overflow in nt_new");
997 997 return -1;
998 998 }
999 999 newnodes = realloc(self->nodes, newcapacity * sizeof(nodetreenode));
1000 1000 if (newnodes == NULL) {
1001 1001 PyErr_SetString(PyExc_MemoryError, "out of memory");
1002 1002 return -1;
1003 1003 }
1004 1004 self->capacity = newcapacity;
1005 1005 self->nodes = newnodes;
1006 1006 memset(&self->nodes[self->length], 0,
1007 1007 sizeof(nodetreenode) * (self->capacity - self->length));
1008 1008 }
1009 1009 return self->length++;
1010 1010 }
1011 1011
1012 1012 static int nt_insert(nodetree *self, const char *node, int rev)
1013 1013 {
1014 1014 int level = 0;
1015 1015 int off = 0;
1016 1016
1017 1017 while (level < 40) {
1018 1018 int k = nt_level(node, level);
1019 1019 nodetreenode *n;
1020 1020 int v;
1021 1021
1022 1022 n = &self->nodes[off];
1023 1023 v = n->children[k];
1024 1024
1025 1025 if (v == 0) {
1026 1026 n->children[k] = -rev - 2;
1027 1027 return 0;
1028 1028 }
1029 1029 if (v < 0) {
1030 1030 const char *oldnode = index_node_existing(self->index, -(v + 2));
1031 1031 int noff;
1032 1032
1033 1033 if (oldnode == NULL)
1034 1034 return -1;
1035 1035 if (!memcmp(oldnode, node, 20)) {
1036 1036 n->children[k] = -rev - 2;
1037 1037 return 0;
1038 1038 }
1039 1039 noff = nt_new(self);
1040 1040 if (noff == -1)
1041 1041 return -1;
1042 1042 /* self->nodes may have been changed by realloc */
1043 1043 self->nodes[off].children[k] = noff;
1044 1044 off = noff;
1045 1045 n = &self->nodes[off];
1046 1046 n->children[nt_level(oldnode, ++level)] = v;
1047 1047 if (level > self->depth)
1048 1048 self->depth = level;
1049 1049 self->splits += 1;
1050 1050 } else {
1051 1051 level += 1;
1052 1052 off = v;
1053 1053 }
1054 1054 }
1055 1055
1056 1056 return -1;
1057 1057 }
1058 1058
1059 1059 static int nt_delete_node(nodetree *self, const char *node)
1060 1060 {
1061 1061 /* rev==-2 happens to get encoded as 0, which is interpreted as not set */
1062 1062 return nt_insert(self, node, -2);
1063 1063 }
1064 1064
1065 1065 static int nt_init(nodetree *self, indexObject *index, unsigned capacity)
1066 1066 {
1067 1067 self->index = index;
1068 1068 /* The input capacity is in terms of revisions, while the field is in
1069 1069 * terms of nodetree nodes. */
1070 1070 self->capacity = (capacity < 4 ? 4 : capacity / 2);
1071 1071 self->depth = 0;
1072 1072 self->splits = 0;
1073 1073 if ((size_t)self->capacity > INT_MAX / sizeof(nodetreenode)) {
1074 1074 PyErr_SetString(PyExc_ValueError, "overflow in init_nt");
1075 1075 return -1;
1076 1076 }
1077 1077 self->nodes = calloc(self->capacity, sizeof(nodetreenode));
1078 1078 if (self->nodes == NULL) {
1079 1079 PyErr_NoMemory();
1080 1080 return -1;
1081 1081 }
1082 1082 self->length = 1;
1083 1083 return 0;
1084 1084 }
1085 1085
1086 1086 static int nt_partialmatch(nodetree *self, const char *node,
1087 1087 Py_ssize_t nodelen)
1088 1088 {
1089 1089 return nt_find(self, node, nodelen, 1);
1090 1090 }
1091 1091
1092 1092 /*
1093 1093 * Find the length of the shortest unique prefix of node.
1094 1094 *
1095 1095 * Return values:
1096 1096 *
1097 1097 * -3: error (exception set)
1098 1098 * -2: not found (no exception set)
1099 1099 * rest: length of shortest prefix
1100 1100 */
1101 1101 static int nt_shortest(nodetree *self, const char *node)
1102 1102 {
1103 1103 int level, off;
1104 1104
1105 1105 for (level = off = 0; level < 40; level++) {
1106 1106 int k, v;
1107 1107 nodetreenode *n = &self->nodes[off];
1108 1108 k = nt_level(node, level);
1109 1109 v = n->children[k];
1110 1110 if (v < 0) {
1111 1111 const char *n;
1112 1112 v = -(v + 2);
1113 1113 n = index_node_existing(self->index, v);
1114 1114 if (n == NULL)
1115 1115 return -3;
1116 1116 if (memcmp(node, n, 20) != 0)
1117 1117 /*
1118 1118 * Found a unique prefix, but it wasn't for the
1119 1119 * requested node (i.e the requested node does
1120 1120 * not exist).
1121 1121 */
1122 1122 return -2;
1123 1123 return level + 1;
1124 1124 }
1125 1125 if (v == 0)
1126 1126 return -2;
1127 1127 off = v;
1128 1128 }
1129 1129 /*
1130 1130 * The node was still not unique after 40 hex digits, so this won't
1131 1131 * happen. Also, if we get here, then there's a programming error in
1132 1132 * this file that made us insert a node longer than 40 hex digits.
1133 1133 */
1134 1134 PyErr_SetString(PyExc_Exception, "broken node tree");
1135 1135 return -3;
1136 1136 }
1137 1137
1138 1138 static int index_init_nt(indexObject *self)
1139 1139 {
1140 1140 if (self->nt == NULL) {
1141 1141 self->nt = PyMem_Malloc(sizeof(nodetree));
1142 1142 if (self->nt == NULL) {
1143 1143 PyErr_NoMemory();
1144 1144 return -1;
1145 1145 }
1146 1146 if (nt_init(self->nt, self, self->raw_length) == -1) {
1147 1147 PyMem_Free(self->nt);
1148 1148 self->nt = NULL;
1149 1149 return -1;
1150 1150 }
1151 1151 if (nt_insert(self->nt, nullid, -1) == -1) {
1152 1152 PyMem_Free(self->nt);
1153 1153 self->nt = NULL;
1154 1154 return -1;
1155 1155 }
1156 1156 self->ntrev = (int)index_length(self);
1157 1157 self->ntlookups = 1;
1158 1158 self->ntmisses = 0;
1159 1159 }
1160 1160 return 0;
1161 1161 }
1162 1162
1163 1163 /*
1164 1164 * Return values:
1165 1165 *
1166 1166 * -3: error (exception set)
1167 1167 * -2: not found (no exception set)
1168 1168 * rest: valid rev
1169 1169 */
1170 1170 static int index_find_node(indexObject *self,
1171 1171 const char *node, Py_ssize_t nodelen)
1172 1172 {
1173 1173 int rev;
1174 1174
1175 1175 if (index_init_nt(self) == -1)
1176 1176 return -3;
1177 1177
1178 1178 self->ntlookups++;
1179 1179 rev = nt_find(self->nt, node, nodelen, 0);
1180 1180 if (rev >= -1)
1181 1181 return rev;
1182 1182
1183 1183 /*
1184 1184 * For the first handful of lookups, we scan the entire index,
1185 1185 * and cache only the matching nodes. This optimizes for cases
1186 1186 * like "hg tip", where only a few nodes are accessed.
1187 1187 *
1188 1188 * After that, we cache every node we visit, using a single
1189 1189 * scan amortized over multiple lookups. This gives the best
1190 1190 * bulk performance, e.g. for "hg log".
1191 1191 */
1192 1192 if (self->ntmisses++ < 4) {
1193 1193 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1194 1194 const char *n = index_node_existing(self, rev);
1195 1195 if (n == NULL)
1196 1196 return -3;
1197 1197 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1198 1198 if (nt_insert(self->nt, n, rev) == -1)
1199 1199 return -3;
1200 1200 break;
1201 1201 }
1202 1202 }
1203 1203 } else {
1204 1204 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1205 1205 const char *n = index_node_existing(self, rev);
1206 1206 if (n == NULL)
1207 1207 return -3;
1208 1208 if (nt_insert(self->nt, n, rev) == -1) {
1209 1209 self->ntrev = rev + 1;
1210 1210 return -3;
1211 1211 }
1212 1212 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1213 1213 break;
1214 1214 }
1215 1215 }
1216 1216 self->ntrev = rev;
1217 1217 }
1218 1218
1219 1219 if (rev >= 0)
1220 1220 return rev;
1221 1221 return -2;
1222 1222 }
1223 1223
1224 1224 static void raise_revlog_error(void)
1225 1225 {
1226 1226 PyObject *mod = NULL, *dict = NULL, *errclass = NULL;
1227 1227
1228 1228 mod = PyImport_ImportModule("mercurial.error");
1229 1229 if (mod == NULL) {
1230 1230 goto cleanup;
1231 1231 }
1232 1232
1233 1233 dict = PyModule_GetDict(mod);
1234 1234 if (dict == NULL) {
1235 1235 goto cleanup;
1236 1236 }
1237 1237 Py_INCREF(dict);
1238 1238
1239 1239 errclass = PyDict_GetItemString(dict, "RevlogError");
1240 1240 if (errclass == NULL) {
1241 1241 PyErr_SetString(PyExc_SystemError,
1242 1242 "could not find RevlogError");
1243 1243 goto cleanup;
1244 1244 }
1245 1245
1246 1246 /* value of exception is ignored by callers */
1247 1247 PyErr_SetString(errclass, "RevlogError");
1248 1248
1249 1249 cleanup:
1250 1250 Py_XDECREF(dict);
1251 1251 Py_XDECREF(mod);
1252 1252 }
1253 1253
1254 1254 static PyObject *index_getitem(indexObject *self, PyObject *value)
1255 1255 {
1256 1256 char *node;
1257 1257 int rev;
1258 1258
1259 1259 if (PyInt_Check(value))
1260 1260 return index_get(self, PyInt_AS_LONG(value));
1261 1261
1262 1262 if (node_check(value, &node) == -1)
1263 1263 return NULL;
1264 1264 rev = index_find_node(self, node, 20);
1265 1265 if (rev >= -1)
1266 1266 return PyInt_FromLong(rev);
1267 1267 if (rev == -2)
1268 1268 raise_revlog_error();
1269 1269 return NULL;
1270 1270 }
1271 1271
1272 1272 /*
1273 1273 * Fully populate the radix tree.
1274 1274 */
1275 1275 static int index_populate_nt(indexObject *self) {
1276 1276 int rev;
1277 1277 if (self->ntrev > 0) {
1278 1278 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1279 1279 const char *n = index_node_existing(self, rev);
1280 1280 if (n == NULL)
1281 1281 return -1;
1282 1282 if (nt_insert(self->nt, n, rev) == -1)
1283 1283 return -1;
1284 1284 }
1285 1285 self->ntrev = -1;
1286 1286 }
1287 1287 return 0;
1288 1288 }
1289 1289
1290 1290 static PyObject *index_partialmatch(indexObject *self, PyObject *args)
1291 1291 {
1292 1292 const char *fullnode;
1293 1293 int nodelen;
1294 1294 char *node;
1295 1295 int rev, i;
1296 1296
1297 1297 if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &node, &nodelen))
1298 1298 return NULL;
1299 1299
1300 1300 if (nodelen < 1) {
1301 1301 PyErr_SetString(PyExc_ValueError, "key too short");
1302 1302 return NULL;
1303 1303 }
1304 1304
1305 1305 if (nodelen > 40) {
1306 1306 PyErr_SetString(PyExc_ValueError, "key too long");
1307 1307 return NULL;
1308 1308 }
1309 1309
1310 1310 for (i = 0; i < nodelen; i++)
1311 1311 hexdigit(node, i);
1312 1312 if (PyErr_Occurred()) {
1313 1313 /* input contains non-hex characters */
1314 1314 PyErr_Clear();
1315 1315 Py_RETURN_NONE;
1316 1316 }
1317 1317
1318 1318 if (index_init_nt(self) == -1)
1319 1319 return NULL;
1320 1320 if (index_populate_nt(self) == -1)
1321 1321 return NULL;
1322 1322 rev = nt_partialmatch(self->nt, node, nodelen);
1323 1323
1324 1324 switch (rev) {
1325 1325 case -4:
1326 1326 raise_revlog_error();
1327 1327 return NULL;
1328 1328 case -2:
1329 1329 Py_RETURN_NONE;
1330 1330 case -1:
1331 1331 return PyBytes_FromStringAndSize(nullid, 20);
1332 1332 }
1333 1333
1334 1334 fullnode = index_node_existing(self, rev);
1335 1335 if (fullnode == NULL) {
1336 1336 return NULL;
1337 1337 }
1338 1338 return PyBytes_FromStringAndSize(fullnode, 20);
1339 1339 }
1340 1340
1341 1341 static PyObject *index_shortest(indexObject *self, PyObject *args)
1342 1342 {
1343 1343 PyObject *val;
1344 1344 char *node;
1345 1345 int length;
1346 1346
1347 1347 if (!PyArg_ParseTuple(args, "O", &val))
1348 1348 return NULL;
1349 1349 if (node_check(val, &node) == -1)
1350 1350 return NULL;
1351 1351
1352 1352 self->ntlookups++;
1353 1353 if (index_init_nt(self) == -1)
1354 1354 return NULL;
1355 1355 if (index_populate_nt(self) == -1)
1356 1356 return NULL;
1357 1357 length = nt_shortest(self->nt, node);
1358 1358 if (length == -3)
1359 1359 return NULL;
1360 1360 if (length == -2) {
1361 1361 raise_revlog_error();
1362 1362 return NULL;
1363 1363 }
1364 1364 return PyInt_FromLong(length);
1365 1365 }
1366 1366
1367 1367 static PyObject *index_m_get(indexObject *self, PyObject *args)
1368 1368 {
1369 1369 PyObject *val;
1370 1370 char *node;
1371 1371 int rev;
1372 1372
1373 1373 if (!PyArg_ParseTuple(args, "O", &val))
1374 1374 return NULL;
1375 1375 if (node_check(val, &node) == -1)
1376 1376 return NULL;
1377 1377 rev = index_find_node(self, node, 20);
1378 1378 if (rev == -3)
1379 1379 return NULL;
1380 1380 if (rev == -2)
1381 1381 Py_RETURN_NONE;
1382 1382 return PyInt_FromLong(rev);
1383 1383 }
1384 1384
1385 1385 static int index_contains(indexObject *self, PyObject *value)
1386 1386 {
1387 1387 char *node;
1388 1388
1389 1389 if (PyInt_Check(value)) {
1390 1390 long rev = PyInt_AS_LONG(value);
1391 1391 return rev >= -1 && rev < index_length(self);
1392 1392 }
1393 1393
1394 1394 if (node_check(value, &node) == -1)
1395 1395 return -1;
1396 1396
1397 1397 switch (index_find_node(self, node, 20)) {
1398 1398 case -3:
1399 1399 return -1;
1400 1400 case -2:
1401 1401 return 0;
1402 1402 default:
1403 1403 return 1;
1404 1404 }
1405 1405 }
1406 1406
1407 1407 typedef uint64_t bitmask;
1408 1408
1409 1409 /*
1410 1410 * Given a disjoint set of revs, return all candidates for the
1411 1411 * greatest common ancestor. In revset notation, this is the set
1412 1412 * "heads(::a and ::b and ...)"
1413 1413 */
1414 1414 static PyObject *find_gca_candidates(indexObject *self, const int *revs,
1415 1415 int revcount)
1416 1416 {
1417 1417 const bitmask allseen = (1ull << revcount) - 1;
1418 1418 const bitmask poison = 1ull << revcount;
1419 1419 PyObject *gca = PyList_New(0);
1420 1420 int i, v, interesting;
1421 1421 int maxrev = -1;
1422 1422 bitmask sp;
1423 1423 bitmask *seen;
1424 1424
1425 1425 if (gca == NULL)
1426 1426 return PyErr_NoMemory();
1427 1427
1428 1428 for (i = 0; i < revcount; i++) {
1429 1429 if (revs[i] > maxrev)
1430 1430 maxrev = revs[i];
1431 1431 }
1432 1432
1433 1433 seen = calloc(sizeof(*seen), maxrev + 1);
1434 1434 if (seen == NULL) {
1435 1435 Py_DECREF(gca);
1436 1436 return PyErr_NoMemory();
1437 1437 }
1438 1438
1439 1439 for (i = 0; i < revcount; i++)
1440 1440 seen[revs[i]] = 1ull << i;
1441 1441
1442 1442 interesting = revcount;
1443 1443
1444 1444 for (v = maxrev; v >= 0 && interesting; v--) {
1445 1445 bitmask sv = seen[v];
1446 1446 int parents[2];
1447 1447
1448 1448 if (!sv)
1449 1449 continue;
1450 1450
1451 1451 if (sv < poison) {
1452 1452 interesting -= 1;
1453 1453 if (sv == allseen) {
1454 1454 PyObject *obj = PyInt_FromLong(v);
1455 1455 if (obj == NULL)
1456 1456 goto bail;
1457 1457 if (PyList_Append(gca, obj) == -1) {
1458 1458 Py_DECREF(obj);
1459 1459 goto bail;
1460 1460 }
1461 1461 sv |= poison;
1462 1462 for (i = 0; i < revcount; i++) {
1463 1463 if (revs[i] == v)
1464 1464 goto done;
1465 1465 }
1466 1466 }
1467 1467 }
1468 1468 if (index_get_parents(self, v, parents, maxrev) < 0)
1469 1469 goto bail;
1470 1470
1471 1471 for (i = 0; i < 2; i++) {
1472 1472 int p = parents[i];
1473 1473 if (p == -1)
1474 1474 continue;
1475 1475 sp = seen[p];
1476 1476 if (sv < poison) {
1477 1477 if (sp == 0) {
1478 1478 seen[p] = sv;
1479 1479 interesting++;
1480 1480 }
1481 1481 else if (sp != sv)
1482 1482 seen[p] |= sv;
1483 1483 } else {
1484 1484 if (sp && sp < poison)
1485 1485 interesting--;
1486 1486 seen[p] = sv;
1487 1487 }
1488 1488 }
1489 1489 }
1490 1490
1491 1491 done:
1492 1492 free(seen);
1493 1493 return gca;
1494 1494 bail:
1495 1495 free(seen);
1496 1496 Py_XDECREF(gca);
1497 1497 return NULL;
1498 1498 }
1499 1499
1500 1500 /*
1501 1501 * Given a disjoint set of revs, return the subset with the longest
1502 1502 * path to the root.
1503 1503 */
1504 1504 static PyObject *find_deepest(indexObject *self, PyObject *revs)
1505 1505 {
1506 1506 const Py_ssize_t revcount = PyList_GET_SIZE(revs);
1507 1507 static const Py_ssize_t capacity = 24;
1508 1508 int *depth, *interesting = NULL;
1509 1509 int i, j, v, ninteresting;
1510 1510 PyObject *dict = NULL, *keys = NULL;
1511 1511 long *seen = NULL;
1512 1512 int maxrev = -1;
1513 1513 long final;
1514 1514
1515 1515 if (revcount > capacity) {
1516 1516 PyErr_Format(PyExc_OverflowError,
1517 1517 "bitset size (%ld) > capacity (%ld)",
1518 1518 (long)revcount, (long)capacity);
1519 1519 return NULL;
1520 1520 }
1521 1521
1522 1522 for (i = 0; i < revcount; i++) {
1523 1523 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
1524 1524 if (n > maxrev)
1525 1525 maxrev = n;
1526 1526 }
1527 1527
1528 1528 depth = calloc(sizeof(*depth), maxrev + 1);
1529 1529 if (depth == NULL)
1530 1530 return PyErr_NoMemory();
1531 1531
1532 1532 seen = calloc(sizeof(*seen), maxrev + 1);
1533 1533 if (seen == NULL) {
1534 1534 PyErr_NoMemory();
1535 1535 goto bail;
1536 1536 }
1537 1537
1538 1538 interesting = calloc(sizeof(*interesting), ((size_t)1) << revcount);
1539 1539 if (interesting == NULL) {
1540 1540 PyErr_NoMemory();
1541 1541 goto bail;
1542 1542 }
1543 1543
1544 1544 if (PyList_Sort(revs) == -1)
1545 1545 goto bail;
1546 1546
1547 1547 for (i = 0; i < revcount; i++) {
1548 1548 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
1549 1549 long b = 1l << i;
1550 1550 depth[n] = 1;
1551 1551 seen[n] = b;
1552 1552 interesting[b] = 1;
1553 1553 }
1554 1554
1555 1555 /* invariant: ninteresting is the number of non-zero entries in
1556 1556 * interesting. */
1557 1557 ninteresting = (int)revcount;
1558 1558
1559 1559 for (v = maxrev; v >= 0 && ninteresting > 1; v--) {
1560 1560 int dv = depth[v];
1561 1561 int parents[2];
1562 1562 long sv;
1563 1563
1564 1564 if (dv == 0)
1565 1565 continue;
1566 1566
1567 1567 sv = seen[v];
1568 1568 if (index_get_parents(self, v, parents, maxrev) < 0)
1569 1569 goto bail;
1570 1570
1571 1571 for (i = 0; i < 2; i++) {
1572 1572 int p = parents[i];
1573 1573 long sp;
1574 1574 int dp;
1575 1575
1576 1576 if (p == -1)
1577 1577 continue;
1578 1578
1579 1579 dp = depth[p];
1580 1580 sp = seen[p];
1581 1581 if (dp <= dv) {
1582 1582 depth[p] = dv + 1;
1583 1583 if (sp != sv) {
1584 1584 interesting[sv] += 1;
1585 1585 seen[p] = sv;
1586 1586 if (sp) {
1587 1587 interesting[sp] -= 1;
1588 1588 if (interesting[sp] == 0)
1589 1589 ninteresting -= 1;
1590 1590 }
1591 1591 }
1592 1592 }
1593 1593 else if (dv == dp - 1) {
1594 1594 long nsp = sp | sv;
1595 1595 if (nsp == sp)
1596 1596 continue;
1597 1597 seen[p] = nsp;
1598 1598 interesting[sp] -= 1;
1599 1599 if (interesting[sp] == 0)
1600 1600 ninteresting -= 1;
1601 1601 if (interesting[nsp] == 0)
1602 1602 ninteresting += 1;
1603 1603 interesting[nsp] += 1;
1604 1604 }
1605 1605 }
1606 1606 interesting[sv] -= 1;
1607 1607 if (interesting[sv] == 0)
1608 1608 ninteresting -= 1;
1609 1609 }
1610 1610
1611 1611 final = 0;
1612 1612 j = ninteresting;
1613 1613 for (i = 0; i < (int)(2 << revcount) && j > 0; i++) {
1614 1614 if (interesting[i] == 0)
1615 1615 continue;
1616 1616 final |= i;
1617 1617 j -= 1;
1618 1618 }
1619 1619 if (final == 0) {
1620 1620 keys = PyList_New(0);
1621 1621 goto bail;
1622 1622 }
1623 1623
1624 1624 dict = PyDict_New();
1625 1625 if (dict == NULL)
1626 1626 goto bail;
1627 1627
1628 1628 for (i = 0; i < revcount; i++) {
1629 1629 PyObject *key;
1630 1630
1631 1631 if ((final & (1 << i)) == 0)
1632 1632 continue;
1633 1633
1634 1634 key = PyList_GET_ITEM(revs, i);
1635 1635 Py_INCREF(key);
1636 1636 Py_INCREF(Py_None);
1637 1637 if (PyDict_SetItem(dict, key, Py_None) == -1) {
1638 1638 Py_DECREF(key);
1639 1639 Py_DECREF(Py_None);
1640 1640 goto bail;
1641 1641 }
1642 1642 }
1643 1643
1644 1644 keys = PyDict_Keys(dict);
1645 1645
1646 1646 bail:
1647 1647 free(depth);
1648 1648 free(seen);
1649 1649 free(interesting);
1650 1650 Py_XDECREF(dict);
1651 1651
1652 1652 return keys;
1653 1653 }
1654 1654
1655 1655 /*
1656 1656 * Given a (possibly overlapping) set of revs, return all the
1657 1657 * common ancestors heads: heads(::args[0] and ::a[1] and ...)
1658 1658 */
1659 1659 static PyObject *index_commonancestorsheads(indexObject *self, PyObject *args)
1660 1660 {
1661 1661 PyObject *ret = NULL;
1662 1662 Py_ssize_t argcount, i, len;
1663 1663 bitmask repeat = 0;
1664 1664 int revcount = 0;
1665 1665 int *revs;
1666 1666
1667 1667 argcount = PySequence_Length(args);
1668 1668 revs = PyMem_Malloc(argcount * sizeof(*revs));
1669 1669 if (argcount > 0 && revs == NULL)
1670 1670 return PyErr_NoMemory();
1671 1671 len = index_length(self);
1672 1672
1673 1673 for (i = 0; i < argcount; i++) {
1674 1674 static const int capacity = 24;
1675 1675 PyObject *obj = PySequence_GetItem(args, i);
1676 1676 bitmask x;
1677 1677 long val;
1678 1678
1679 1679 if (!PyInt_Check(obj)) {
1680 1680 PyErr_SetString(PyExc_TypeError,
1681 1681 "arguments must all be ints");
1682 1682 Py_DECREF(obj);
1683 1683 goto bail;
1684 1684 }
1685 1685 val = PyInt_AsLong(obj);
1686 1686 Py_DECREF(obj);
1687 1687 if (val == -1) {
1688 1688 ret = PyList_New(0);
1689 1689 goto done;
1690 1690 }
1691 1691 if (val < 0 || val >= len) {
1692 1692 PyErr_SetString(PyExc_IndexError,
1693 1693 "index out of range");
1694 1694 goto bail;
1695 1695 }
1696 1696 /* this cheesy bloom filter lets us avoid some more
1697 1697 * expensive duplicate checks in the common set-is-disjoint
1698 1698 * case */
1699 1699 x = 1ull << (val & 0x3f);
1700 1700 if (repeat & x) {
1701 1701 int k;
1702 1702 for (k = 0; k < revcount; k++) {
1703 1703 if (val == revs[k])
1704 1704 goto duplicate;
1705 1705 }
1706 1706 }
1707 1707 else repeat |= x;
1708 1708 if (revcount >= capacity) {
1709 1709 PyErr_Format(PyExc_OverflowError,
1710 1710 "bitset size (%d) > capacity (%d)",
1711 1711 revcount, capacity);
1712 1712 goto bail;
1713 1713 }
1714 1714 revs[revcount++] = (int)val;
1715 1715 duplicate:;
1716 1716 }
1717 1717
1718 1718 if (revcount == 0) {
1719 1719 ret = PyList_New(0);
1720 1720 goto done;
1721 1721 }
1722 1722 if (revcount == 1) {
1723 1723 PyObject *obj;
1724 1724 ret = PyList_New(1);
1725 1725 if (ret == NULL)
1726 1726 goto bail;
1727 1727 obj = PyInt_FromLong(revs[0]);
1728 1728 if (obj == NULL)
1729 1729 goto bail;
1730 1730 PyList_SET_ITEM(ret, 0, obj);
1731 1731 goto done;
1732 1732 }
1733 1733
1734 1734 ret = find_gca_candidates(self, revs, revcount);
1735 1735 if (ret == NULL)
1736 1736 goto bail;
1737 1737
1738 1738 done:
1739 1739 PyMem_Free(revs);
1740 1740 return ret;
1741 1741
1742 1742 bail:
1743 1743 PyMem_Free(revs);
1744 1744 Py_XDECREF(ret);
1745 1745 return NULL;
1746 1746 }
1747 1747
1748 1748 /*
1749 1749 * Given a (possibly overlapping) set of revs, return the greatest
1750 1750 * common ancestors: those with the longest path to the root.
1751 1751 */
1752 1752 static PyObject *index_ancestors(indexObject *self, PyObject *args)
1753 1753 {
1754 1754 PyObject *ret;
1755 1755 PyObject *gca = index_commonancestorsheads(self, args);
1756 1756 if (gca == NULL)
1757 1757 return NULL;
1758 1758
1759 1759 if (PyList_GET_SIZE(gca) <= 1) {
1760 1760 return gca;
1761 1761 }
1762 1762
1763 1763 ret = find_deepest(self, gca);
1764 1764 Py_DECREF(gca);
1765 1765 return ret;
1766 1766 }
1767 1767
1768 1768 /*
1769 1769 * Invalidate any trie entries introduced by added revs.
1770 1770 */
1771 1771 static void index_invalidate_added(indexObject *self, Py_ssize_t start)
1772 1772 {
1773 1773 Py_ssize_t i, len = PyList_GET_SIZE(self->added);
1774 1774
1775 1775 for (i = start; i < len; i++) {
1776 1776 PyObject *tuple = PyList_GET_ITEM(self->added, i);
1777 1777 PyObject *node = PyTuple_GET_ITEM(tuple, 7);
1778 1778
1779 1779 nt_delete_node(self->nt, PyBytes_AS_STRING(node));
1780 1780 }
1781 1781
1782 1782 if (start == 0)
1783 1783 Py_CLEAR(self->added);
1784 1784 }
1785 1785
1786 1786 /*
1787 1787 * Delete a numeric range of revs, which must be at the end of the
1788 1788 * range, but exclude the sentinel nullid entry.
1789 1789 */
1790 1790 static int index_slice_del(indexObject *self, PyObject *item)
1791 1791 {
1792 1792 Py_ssize_t start, stop, step, slicelength;
1793 1793 Py_ssize_t length = index_length(self) + 1;
1794 1794 int ret = 0;
1795 1795
1796 1796 /* Argument changed from PySliceObject* to PyObject* in Python 3. */
1797 1797 #ifdef IS_PY3K
1798 1798 if (PySlice_GetIndicesEx(item, length,
1799 1799 #else
1800 1800 if (PySlice_GetIndicesEx((PySliceObject*)item, length,
1801 1801 #endif
1802 1802 &start, &stop, &step, &slicelength) < 0)
1803 1803 return -1;
1804 1804
1805 1805 if (slicelength <= 0)
1806 1806 return 0;
1807 1807
1808 1808 if ((step < 0 && start < stop) || (step > 0 && start > stop))
1809 1809 stop = start;
1810 1810
1811 1811 if (step < 0) {
1812 1812 stop = start + 1;
1813 1813 start = stop + step*(slicelength - 1) - 1;
1814 1814 step = -step;
1815 1815 }
1816 1816
1817 1817 if (step != 1) {
1818 1818 PyErr_SetString(PyExc_ValueError,
1819 1819 "revlog index delete requires step size of 1");
1820 1820 return -1;
1821 1821 }
1822 1822
1823 1823 if (stop != length - 1) {
1824 1824 PyErr_SetString(PyExc_IndexError,
1825 1825 "revlog index deletion indices are invalid");
1826 1826 return -1;
1827 1827 }
1828 1828
1829 1829 if (start < self->length) {
1830 1830 if (self->nt) {
1831 1831 Py_ssize_t i;
1832 1832
1833 1833 for (i = start + 1; i < self->length; i++) {
1834 1834 const char *node = index_node_existing(self, i);
1835 1835 if (node == NULL)
1836 1836 return -1;
1837 1837
1838 1838 nt_delete_node(self->nt, node);
1839 1839 }
1840 1840 if (self->added)
1841 1841 index_invalidate_added(self, 0);
1842 1842 if (self->ntrev > start)
1843 1843 self->ntrev = (int)start;
1844 1844 }
1845 1845 self->length = start;
1846 1846 if (start < self->raw_length) {
1847 1847 if (self->cache) {
1848 1848 Py_ssize_t i;
1849 1849 for (i = start; i < self->raw_length; i++)
1850 1850 Py_CLEAR(self->cache[i]);
1851 1851 }
1852 1852 self->raw_length = start;
1853 1853 }
1854 1854 goto done;
1855 1855 }
1856 1856
1857 1857 if (self->nt) {
1858 1858 index_invalidate_added(self, start - self->length);
1859 1859 if (self->ntrev > start)
1860 1860 self->ntrev = (int)start;
1861 1861 }
1862 1862 if (self->added)
1863 1863 ret = PyList_SetSlice(self->added, start - self->length,
1864 1864 PyList_GET_SIZE(self->added), NULL);
1865 1865 done:
1866 1866 Py_CLEAR(self->headrevs);
1867 1867 return ret;
1868 1868 }
1869 1869
1870 1870 /*
1871 1871 * Supported ops:
1872 1872 *
1873 1873 * slice deletion
1874 1874 * string assignment (extend node->rev mapping)
1875 1875 * string deletion (shrink node->rev mapping)
1876 1876 */
1877 1877 static int index_assign_subscript(indexObject *self, PyObject *item,
1878 1878 PyObject *value)
1879 1879 {
1880 1880 char *node;
1881 1881 long rev;
1882 1882
1883 1883 if (PySlice_Check(item) && value == NULL)
1884 1884 return index_slice_del(self, item);
1885 1885
1886 1886 if (node_check(item, &node) == -1)
1887 1887 return -1;
1888 1888
1889 1889 if (value == NULL)
1890 1890 return self->nt ? nt_delete_node(self->nt, node) : 0;
1891 1891 rev = PyInt_AsLong(value);
1892 1892 if (rev > INT_MAX || rev < 0) {
1893 1893 if (!PyErr_Occurred())
1894 1894 PyErr_SetString(PyExc_ValueError, "rev out of range");
1895 1895 return -1;
1896 1896 }
1897 1897
1898 1898 if (index_init_nt(self) == -1)
1899 1899 return -1;
1900 1900 return nt_insert(self->nt, node, (int)rev);
1901 1901 }
1902 1902
1903 1903 /*
1904 1904 * Find all RevlogNG entries in an index that has inline data. Update
1905 1905 * the optional "offsets" table with those entries.
1906 1906 */
1907 1907 static Py_ssize_t inline_scan(indexObject *self, const char **offsets)
1908 1908 {
1909 1909 const char *data = (const char *)self->buf.buf;
1910 1910 Py_ssize_t pos = 0;
1911 1911 Py_ssize_t end = self->buf.len;
1912 1912 long incr = v1_hdrsize;
1913 1913 Py_ssize_t len = 0;
1914 1914
1915 1915 while (pos + v1_hdrsize <= end && pos >= 0) {
1916 1916 uint32_t comp_len;
1917 1917 /* 3rd element of header is length of compressed inline data */
1918 1918 comp_len = getbe32(data + pos + 8);
1919 1919 incr = v1_hdrsize + comp_len;
1920 1920 if (offsets)
1921 1921 offsets[len] = data + pos;
1922 1922 len++;
1923 1923 pos += incr;
1924 1924 }
1925 1925
1926 1926 if (pos != end) {
1927 1927 if (!PyErr_Occurred())
1928 1928 PyErr_SetString(PyExc_ValueError, "corrupt index file");
1929 1929 return -1;
1930 1930 }
1931 1931
1932 1932 return len;
1933 1933 }
1934 1934
1935 1935 static int index_init(indexObject *self, PyObject *args)
1936 1936 {
1937 1937 PyObject *data_obj, *inlined_obj;
1938 1938 Py_ssize_t size;
1939 1939
1940 1940 /* Initialize before argument-checking to avoid index_dealloc() crash. */
1941 1941 self->raw_length = 0;
1942 1942 self->added = NULL;
1943 1943 self->cache = NULL;
1944 1944 self->data = NULL;
1945 1945 memset(&self->buf, 0, sizeof(self->buf));
1946 1946 self->headrevs = NULL;
1947 1947 self->filteredrevs = Py_None;
1948 1948 Py_INCREF(Py_None);
1949 1949 self->nt = NULL;
1950 1950 self->offsets = NULL;
1951 1951
1952 1952 if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
1953 1953 return -1;
1954 1954 if (!PyObject_CheckBuffer(data_obj)) {
1955 1955 PyErr_SetString(PyExc_TypeError,
1956 1956 "data does not support buffer interface");
1957 1957 return -1;
1958 1958 }
1959 1959
1960 1960 if (PyObject_GetBuffer(data_obj, &self->buf, PyBUF_SIMPLE) == -1)
1961 1961 return -1;
1962 1962 size = self->buf.len;
1963 1963
1964 1964 self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
1965 1965 self->data = data_obj;
1966 1966
1967 1967 self->ntlookups = self->ntmisses = 0;
1968 1968 self->ntrev = -1;
1969 1969 Py_INCREF(self->data);
1970 1970
1971 1971 if (self->inlined) {
1972 1972 Py_ssize_t len = inline_scan(self, NULL);
1973 1973 if (len == -1)
1974 1974 goto bail;
1975 1975 self->raw_length = len;
1976 1976 self->length = len;
1977 1977 } else {
1978 1978 if (size % v1_hdrsize) {
1979 1979 PyErr_SetString(PyExc_ValueError, "corrupt index file");
1980 1980 goto bail;
1981 1981 }
1982 1982 self->raw_length = size / v1_hdrsize;
1983 1983 self->length = self->raw_length;
1984 1984 }
1985 1985
1986 1986 return 0;
1987 1987 bail:
1988 1988 return -1;
1989 1989 }
1990 1990
1991 1991 static PyObject *index_nodemap(indexObject *self)
1992 1992 {
1993 1993 Py_INCREF(self);
1994 1994 return (PyObject *)self;
1995 1995 }
1996 1996
1997 1997 static void _index_clearcaches(indexObject *self)
1998 1998 {
1999 1999 if (self->cache) {
2000 2000 Py_ssize_t i;
2001 2001
2002 2002 for (i = 0; i < self->raw_length; i++)
2003 2003 Py_CLEAR(self->cache[i]);
2004 2004 free(self->cache);
2005 2005 self->cache = NULL;
2006 2006 }
2007 2007 if (self->offsets) {
2008 2008 PyMem_Free(self->offsets);
2009 2009 self->offsets = NULL;
2010 2010 }
2011 2011 if (self->nt != NULL) {
2012 2012 free(self->nt->nodes);
2013 2013 PyMem_Free(self->nt);
2014 2014 }
2015 2015 self->nt = NULL;
2016 2016 Py_CLEAR(self->headrevs);
2017 2017 }
2018 2018
2019 2019 static PyObject *index_clearcaches(indexObject *self)
2020 2020 {
2021 2021 _index_clearcaches(self);
2022 2022 self->ntrev = -1;
2023 2023 self->ntlookups = self->ntmisses = 0;
2024 2024 Py_RETURN_NONE;
2025 2025 }
2026 2026
2027 2027 static void index_dealloc(indexObject *self)
2028 2028 {
2029 2029 _index_clearcaches(self);
2030 2030 Py_XDECREF(self->filteredrevs);
2031 2031 if (self->buf.buf) {
2032 2032 PyBuffer_Release(&self->buf);
2033 2033 memset(&self->buf, 0, sizeof(self->buf));
2034 2034 }
2035 2035 Py_XDECREF(self->data);
2036 2036 Py_XDECREF(self->added);
2037 2037 PyObject_Del(self);
2038 2038 }
2039 2039
2040 2040 static PySequenceMethods index_sequence_methods = {
2041 2041 (lenfunc)index_length, /* sq_length */
2042 2042 0, /* sq_concat */
2043 2043 0, /* sq_repeat */
2044 2044 (ssizeargfunc)index_get, /* sq_item */
2045 2045 0, /* sq_slice */
2046 2046 0, /* sq_ass_item */
2047 2047 0, /* sq_ass_slice */
2048 2048 (objobjproc)index_contains, /* sq_contains */
2049 2049 };
2050 2050
2051 2051 static PyMappingMethods index_mapping_methods = {
2052 2052 (lenfunc)index_length, /* mp_length */
2053 2053 (binaryfunc)index_getitem, /* mp_subscript */
2054 2054 (objobjargproc)index_assign_subscript, /* mp_ass_subscript */
2055 2055 };
2056 2056
2057 2057 static PyMethodDef index_methods[] = {
2058 2058 {"ancestors", (PyCFunction)index_ancestors, METH_VARARGS,
2059 2059 "return the gca set of the given revs"},
2060 2060 {"commonancestorsheads", (PyCFunction)index_commonancestorsheads,
2061 2061 METH_VARARGS,
2062 2062 "return the heads of the common ancestors of the given revs"},
2063 2063 {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
2064 2064 "clear the index caches"},
2065 2065 {"get", (PyCFunction)index_m_get, METH_VARARGS,
2066 2066 "get an index entry"},
2067 2067 {"computephasesmapsets", (PyCFunction)compute_phases_map_sets,
2068 2068 METH_VARARGS, "compute phases"},
2069 2069 {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
2070 2070 "reachableroots"},
2071 2071 {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
2072 2072 "get head revisions"}, /* Can do filtering since 3.2 */
2073 2073 {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
2074 2074 "get filtered head revisions"}, /* Can always do filtering */
2075 2075 {"deltachain", (PyCFunction)index_deltachain, METH_VARARGS,
2076 2076 "determine revisions with deltas to reconstruct fulltext"},
2077 2077 {"append", (PyCFunction)index_append, METH_O,
2078 2078 "append an index entry"},
2079 2079 {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
2080 2080 "match a potentially ambiguous node ID"},
2081 2081 {"shortest", (PyCFunction)index_shortest, METH_VARARGS,
2082 2082 "find length of shortest hex nodeid of a binary ID"},
2083 2083 {"stats", (PyCFunction)index_stats, METH_NOARGS,
2084 2084 "stats for the index"},
2085 2085 {NULL} /* Sentinel */
2086 2086 };
2087 2087
2088 2088 static PyGetSetDef index_getset[] = {
2089 2089 {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
2090 2090 {NULL} /* Sentinel */
2091 2091 };
2092 2092
2093 2093 static PyTypeObject indexType = {
2094 2094 PyVarObject_HEAD_INIT(NULL, 0) /* header */
2095 2095 "parsers.index", /* tp_name */
2096 2096 sizeof(indexObject), /* tp_basicsize */
2097 2097 0, /* tp_itemsize */
2098 2098 (destructor)index_dealloc, /* tp_dealloc */
2099 2099 0, /* tp_print */
2100 2100 0, /* tp_getattr */
2101 2101 0, /* tp_setattr */
2102 2102 0, /* tp_compare */
2103 2103 0, /* tp_repr */
2104 2104 0, /* tp_as_number */
2105 2105 &index_sequence_methods, /* tp_as_sequence */
2106 2106 &index_mapping_methods, /* tp_as_mapping */
2107 2107 0, /* tp_hash */
2108 2108 0, /* tp_call */
2109 2109 0, /* tp_str */
2110 2110 0, /* tp_getattro */
2111 2111 0, /* tp_setattro */
2112 2112 0, /* tp_as_buffer */
2113 2113 Py_TPFLAGS_DEFAULT, /* tp_flags */
2114 2114 "revlog index", /* tp_doc */
2115 2115 0, /* tp_traverse */
2116 2116 0, /* tp_clear */
2117 2117 0, /* tp_richcompare */
2118 2118 0, /* tp_weaklistoffset */
2119 2119 0, /* tp_iter */
2120 2120 0, /* tp_iternext */
2121 2121 index_methods, /* tp_methods */
2122 2122 0, /* tp_members */
2123 2123 index_getset, /* tp_getset */
2124 2124 0, /* tp_base */
2125 2125 0, /* tp_dict */
2126 2126 0, /* tp_descr_get */
2127 2127 0, /* tp_descr_set */
2128 2128 0, /* tp_dictoffset */
2129 2129 (initproc)index_init, /* tp_init */
2130 2130 0, /* tp_alloc */
2131 2131 };
2132 2132
2133 2133 /*
2134 2134 * returns a tuple of the form (index, index, cache) with elements as
2135 2135 * follows:
2136 2136 *
2137 2137 * index: an index object that lazily parses RevlogNG records
2138 2138 * cache: if data is inlined, a tuple (0, index_file_content), else None
2139 2139 * index_file_content could be a string, or a buffer
2140 2140 *
2141 2141 * added complications are for backwards compatibility
2142 2142 */
2143 2143 PyObject *parse_index2(PyObject *self, PyObject *args)
2144 2144 {
2145 2145 PyObject *tuple = NULL, *cache = NULL;
2146 2146 indexObject *idx;
2147 2147 int ret;
2148 2148
2149 2149 idx = PyObject_New(indexObject, &indexType);
2150 2150 if (idx == NULL)
2151 2151 goto bail;
2152 2152
2153 2153 ret = index_init(idx, args);
2154 2154 if (ret == -1)
2155 2155 goto bail;
2156 2156
2157 2157 if (idx->inlined) {
2158 2158 cache = Py_BuildValue("iO", 0, idx->data);
2159 2159 if (cache == NULL)
2160 2160 goto bail;
2161 2161 } else {
2162 2162 cache = Py_None;
2163 2163 Py_INCREF(cache);
2164 2164 }
2165 2165
2166 2166 tuple = Py_BuildValue("NN", idx, cache);
2167 2167 if (!tuple)
2168 2168 goto bail;
2169 2169 return tuple;
2170 2170
2171 2171 bail:
2172 2172 Py_XDECREF(idx);
2173 2173 Py_XDECREF(cache);
2174 2174 Py_XDECREF(tuple);
2175 2175 return NULL;
2176 2176 }
2177 2177
2178 2178 void revlog_module_init(PyObject *mod)
2179 2179 {
2180 2180 indexType.tp_new = PyType_GenericNew;
2181 2181 if (PyType_Ready(&indexType) < 0)
2182 2182 return;
2183 2183 Py_INCREF(&indexType);
2184 2184 PyModule_AddObject(mod, "index", (PyObject *)&indexType);
2185 2185
2186 2186 nullentry = Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0,
2187 2187 -1, -1, -1, -1, nullid, 20);
2188 2188 if (nullentry)
2189 2189 PyObject_GC_UnTrack(nullentry);
2190 2190 }
General Comments 0
You need to be logged in to leave comments. Login now