##// END OF EJS Templates
cext: fix truncation warnings in revlog on Windows
Matt Harbison -
r39264:66f04611 default
parent child Browse files
Show More
@@ -1,2306 +1,2306 b''
1 1 /*
2 2 parsers.c - efficient content parsing
3 3
4 4 Copyright 2008 Matt Mackall <mpm@selenic.com> and others
5 5
6 6 This software may be used and distributed according to the terms of
7 7 the GNU General Public License, incorporated herein by reference.
8 8 */
9 9
10 10 #include <Python.h>
11 11 #include <assert.h>
12 12 #include <ctype.h>
13 13 #include <stddef.h>
14 14 #include <string.h>
15 15
16 16 #include "bitmanipulation.h"
17 17 #include "charencode.h"
18 18 #include "util.h"
19 19
20 20 #ifdef IS_PY3K
21 21 /* The mapping of Python types is meant to be temporary to get Python
22 22 * 3 to compile. We should remove this once Python 3 support is fully
23 23 * supported and proper types are used in the extensions themselves. */
24 24 #define PyInt_Check PyLong_Check
25 25 #define PyInt_FromLong PyLong_FromLong
26 26 #define PyInt_FromSsize_t PyLong_FromSsize_t
27 27 #define PyInt_AS_LONG PyLong_AS_LONG
28 28 #define PyInt_AsLong PyLong_AsLong
29 29 #endif
30 30
31 31 typedef struct indexObjectStruct indexObject;
32 32
33 33 typedef struct {
34 34 int children[16];
35 35 } nodetreenode;
36 36
37 37 /*
38 38 * A base-16 trie for fast node->rev mapping.
39 39 *
40 40 * Positive value is index of the next node in the trie
41 41 * Negative value is a leaf: -(rev + 2)
42 42 * Zero is empty
43 43 */
44 44 typedef struct {
45 45 PyObject_HEAD
46 46 indexObject *index;
47 47 nodetreenode *nodes;
48 48 unsigned length; /* # nodes in use */
49 49 unsigned capacity; /* # nodes allocated */
50 50 int depth; /* maximum depth of tree */
51 51 int splits; /* # splits performed */
52 52 } nodetree;
53 53
54 54 /*
55 55 * This class has two behaviors.
56 56 *
57 57 * When used in a list-like way (with integer keys), we decode an
58 58 * entry in a RevlogNG index file on demand. Our last entry is a
59 59 * sentinel, always a nullid. We have limited support for
60 60 * integer-keyed insert and delete, only at elements right before the
61 61 * sentinel.
62 62 *
63 63 * With string keys, we lazily perform a reverse mapping from node to
64 64 * rev, using a base-16 trie.
65 65 */
66 66 struct indexObjectStruct {
67 67 PyObject_HEAD
68 68 /* Type-specific fields go here. */
69 69 PyObject *data; /* raw bytes of index */
70 70 Py_buffer buf; /* buffer of data */
71 71 PyObject **cache; /* cached tuples */
72 72 const char **offsets; /* populated on demand */
73 73 Py_ssize_t raw_length; /* original number of elements */
74 74 Py_ssize_t length; /* current number of elements */
75 75 PyObject *added; /* populated on demand */
76 76 PyObject *headrevs; /* cache, invalidated on changes */
77 77 PyObject *filteredrevs;/* filtered revs set */
78 78 nodetree *nt; /* base-16 trie */
79 79 int ntrev; /* last rev scanned */
80 80 int ntlookups; /* # lookups */
81 81 int ntmisses; /* # lookups that miss the cache */
82 82 int inlined;
83 83 };
84 84
85 85 static Py_ssize_t index_length(const indexObject *self)
86 86 {
87 87 if (self->added == NULL)
88 88 return self->length;
89 89 return self->length + PyList_GET_SIZE(self->added);
90 90 }
91 91
92 92 static PyObject *nullentry;
93 93 static const char nullid[20];
94 94
95 95 static Py_ssize_t inline_scan(indexObject *self, const char **offsets);
96 96
97 97 #if LONG_MAX == 0x7fffffffL
98 98 static const char *const tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#");
99 99 #else
100 100 static const char *const tuple_format = PY23("kiiiiiis#", "kiiiiiiy#");
101 101 #endif
102 102
103 103 /* A RevlogNG v1 index entry is 64 bytes long. */
104 104 static const long v1_hdrsize = 64;
105 105
106 106 static void raise_revlog_error(void)
107 107 {
108 108 PyObject *mod = NULL, *dict = NULL, *errclass = NULL;
109 109
110 110 mod = PyImport_ImportModule("mercurial.error");
111 111 if (mod == NULL) {
112 112 goto cleanup;
113 113 }
114 114
115 115 dict = PyModule_GetDict(mod);
116 116 if (dict == NULL) {
117 117 goto cleanup;
118 118 }
119 119 Py_INCREF(dict);
120 120
121 121 errclass = PyDict_GetItemString(dict, "RevlogError");
122 122 if (errclass == NULL) {
123 123 PyErr_SetString(PyExc_SystemError,
124 124 "could not find RevlogError");
125 125 goto cleanup;
126 126 }
127 127
128 128 /* value of exception is ignored by callers */
129 129 PyErr_SetString(errclass, "RevlogError");
130 130
131 131 cleanup:
132 132 Py_XDECREF(dict);
133 133 Py_XDECREF(mod);
134 134 }
135 135
136 136 /*
137 137 * Return a pointer to the beginning of a RevlogNG record.
138 138 */
139 139 static const char *index_deref(indexObject *self, Py_ssize_t pos)
140 140 {
141 141 if (self->inlined && pos > 0) {
142 142 if (self->offsets == NULL) {
143 143 self->offsets = PyMem_Malloc(self->raw_length *
144 144 sizeof(*self->offsets));
145 145 if (self->offsets == NULL)
146 146 return (const char *)PyErr_NoMemory();
147 147 inline_scan(self, self->offsets);
148 148 }
149 149 return self->offsets[pos];
150 150 }
151 151
152 152 return (const char *)(self->buf.buf) + pos * v1_hdrsize;
153 153 }
154 154
155 155 static inline int index_get_parents(indexObject *self, Py_ssize_t rev,
156 156 int *ps, int maxrev)
157 157 {
158 158 if (rev >= self->length) {
159 159 PyObject *tuple = PyList_GET_ITEM(self->added, rev - self->length);
160 160 ps[0] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 5));
161 161 ps[1] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 6));
162 162 } else {
163 163 const char *data = index_deref(self, rev);
164 164 ps[0] = getbe32(data + 24);
165 165 ps[1] = getbe32(data + 28);
166 166 }
167 167 /* If index file is corrupted, ps[] may point to invalid revisions. So
168 168 * there is a risk of buffer overflow to trust them unconditionally. */
169 169 if (ps[0] > maxrev || ps[1] > maxrev) {
170 170 PyErr_SetString(PyExc_ValueError, "parent out of range");
171 171 return -1;
172 172 }
173 173 return 0;
174 174 }
175 175
176 176
177 177 /*
178 178 * RevlogNG format (all in big endian, data may be inlined):
179 179 * 6 bytes: offset
180 180 * 2 bytes: flags
181 181 * 4 bytes: compressed length
182 182 * 4 bytes: uncompressed length
183 183 * 4 bytes: base revision
184 184 * 4 bytes: link revision
185 185 * 4 bytes: parent 1 revision
186 186 * 4 bytes: parent 2 revision
187 187 * 32 bytes: nodeid (only 20 bytes used)
188 188 */
189 189 static PyObject *index_get(indexObject *self, Py_ssize_t pos)
190 190 {
191 191 uint64_t offset_flags;
192 192 int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
193 193 const char *c_node_id;
194 194 const char *data;
195 195 Py_ssize_t length = index_length(self);
196 196 PyObject *entry;
197 197
198 198 if (pos == -1) {
199 199 Py_INCREF(nullentry);
200 200 return nullentry;
201 201 }
202 202
203 203 if (pos < 0 || pos >= length) {
204 204 PyErr_SetString(PyExc_IndexError, "revlog index out of range");
205 205 return NULL;
206 206 }
207 207
208 208 if (pos >= self->length) {
209 209 PyObject *obj;
210 210 obj = PyList_GET_ITEM(self->added, pos - self->length);
211 211 Py_INCREF(obj);
212 212 return obj;
213 213 }
214 214
215 215 if (self->cache) {
216 216 if (self->cache[pos]) {
217 217 Py_INCREF(self->cache[pos]);
218 218 return self->cache[pos];
219 219 }
220 220 } else {
221 221 self->cache = calloc(self->raw_length, sizeof(PyObject *));
222 222 if (self->cache == NULL)
223 223 return PyErr_NoMemory();
224 224 }
225 225
226 226 data = index_deref(self, pos);
227 227 if (data == NULL)
228 228 return NULL;
229 229
230 230 offset_flags = getbe32(data + 4);
231 231 if (pos == 0) /* mask out version number for the first entry */
232 232 offset_flags &= 0xFFFF;
233 233 else {
234 234 uint32_t offset_high = getbe32(data);
235 235 offset_flags |= ((uint64_t)offset_high) << 32;
236 236 }
237 237
238 238 comp_len = getbe32(data + 8);
239 239 uncomp_len = getbe32(data + 12);
240 240 base_rev = getbe32(data + 16);
241 241 link_rev = getbe32(data + 20);
242 242 parent_1 = getbe32(data + 24);
243 243 parent_2 = getbe32(data + 28);
244 244 c_node_id = data + 32;
245 245
246 246 entry = Py_BuildValue(tuple_format, offset_flags, comp_len,
247 247 uncomp_len, base_rev, link_rev,
248 248 parent_1, parent_2, c_node_id, 20);
249 249
250 250 if (entry) {
251 251 PyObject_GC_UnTrack(entry);
252 252 Py_INCREF(entry);
253 253 }
254 254
255 255 self->cache[pos] = entry;
256 256
257 257 return entry;
258 258 }
259 259
260 260 /*
261 261 * Return the 20-byte SHA of the node corresponding to the given rev.
262 262 */
263 263 static const char *index_node(indexObject *self, Py_ssize_t pos)
264 264 {
265 265 Py_ssize_t length = index_length(self);
266 266 const char *data;
267 267
268 268 if (pos == -1)
269 269 return nullid;
270 270
271 271 if (pos >= length)
272 272 return NULL;
273 273
274 274 if (pos >= self->length) {
275 275 PyObject *tuple, *str;
276 276 tuple = PyList_GET_ITEM(self->added, pos - self->length);
277 277 str = PyTuple_GetItem(tuple, 7);
278 278 return str ? PyBytes_AS_STRING(str) : NULL;
279 279 }
280 280
281 281 data = index_deref(self, pos);
282 282 return data ? data + 32 : NULL;
283 283 }
284 284
285 285 /*
286 286 * Return the 20-byte SHA of the node corresponding to the given rev. The
287 287 * rev is assumed to be existing. If not, an exception is set.
288 288 */
289 289 static const char *index_node_existing(indexObject *self, Py_ssize_t pos)
290 290 {
291 291 const char *node = index_node(self, pos);
292 292 if (node == NULL) {
293 293 PyErr_Format(PyExc_IndexError, "could not access rev %d",
294 294 (int)pos);
295 295 }
296 296 return node;
297 297 }
298 298
299 299 static int nt_insert(nodetree *self, const char *node, int rev);
300 300
301 301 static int node_check(PyObject *obj, char **node)
302 302 {
303 303 Py_ssize_t nodelen;
304 304 if (PyBytes_AsStringAndSize(obj, node, &nodelen) == -1)
305 305 return -1;
306 306 if (nodelen == 20)
307 307 return 0;
308 308 PyErr_SetString(PyExc_ValueError, "20-byte hash required");
309 309 return -1;
310 310 }
311 311
312 312 static PyObject *index_append(indexObject *self, PyObject *obj)
313 313 {
314 314 char *node;
315 315 Py_ssize_t len;
316 316
317 317 if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
318 318 PyErr_SetString(PyExc_TypeError, "8-tuple required");
319 319 return NULL;
320 320 }
321 321
322 322 if (node_check(PyTuple_GET_ITEM(obj, 7), &node) == -1)
323 323 return NULL;
324 324
325 325 len = index_length(self);
326 326
327 327 if (self->added == NULL) {
328 328 self->added = PyList_New(0);
329 329 if (self->added == NULL)
330 330 return NULL;
331 331 }
332 332
333 333 if (PyList_Append(self->added, obj) == -1)
334 334 return NULL;
335 335
336 336 if (self->nt)
337 337 nt_insert(self->nt, node, (int)len);
338 338
339 339 Py_CLEAR(self->headrevs);
340 340 Py_RETURN_NONE;
341 341 }
342 342
343 343 static PyObject *index_stats(indexObject *self)
344 344 {
345 345 PyObject *obj = PyDict_New();
346 346 PyObject *t = NULL;
347 347
348 348 if (obj == NULL)
349 349 return NULL;
350 350
351 351 #define istat(__n, __d) \
352 352 do { \
353 353 t = PyInt_FromSsize_t(self->__n); \
354 354 if (!t) \
355 355 goto bail; \
356 356 if (PyDict_SetItemString(obj, __d, t) == -1) \
357 357 goto bail; \
358 358 Py_DECREF(t); \
359 359 } while (0)
360 360
361 361 if (self->added) {
362 362 Py_ssize_t len = PyList_GET_SIZE(self->added);
363 363 t = PyInt_FromSsize_t(len);
364 364 if (!t)
365 365 goto bail;
366 366 if (PyDict_SetItemString(obj, "index entries added", t) == -1)
367 367 goto bail;
368 368 Py_DECREF(t);
369 369 }
370 370
371 371 if (self->raw_length != self->length)
372 372 istat(raw_length, "revs on disk");
373 373 istat(length, "revs in memory");
374 374 istat(ntlookups, "node trie lookups");
375 375 istat(ntmisses, "node trie misses");
376 376 istat(ntrev, "node trie last rev scanned");
377 377 if (self->nt) {
378 378 istat(nt->capacity, "node trie capacity");
379 379 istat(nt->depth, "node trie depth");
380 380 istat(nt->length, "node trie count");
381 381 istat(nt->splits, "node trie splits");
382 382 }
383 383
384 384 #undef istat
385 385
386 386 return obj;
387 387
388 388 bail:
389 389 Py_XDECREF(obj);
390 390 Py_XDECREF(t);
391 391 return NULL;
392 392 }
393 393
394 394 /*
395 395 * When we cache a list, we want to be sure the caller can't mutate
396 396 * the cached copy.
397 397 */
398 398 static PyObject *list_copy(PyObject *list)
399 399 {
400 400 Py_ssize_t len = PyList_GET_SIZE(list);
401 401 PyObject *newlist = PyList_New(len);
402 402 Py_ssize_t i;
403 403
404 404 if (newlist == NULL)
405 405 return NULL;
406 406
407 407 for (i = 0; i < len; i++) {
408 408 PyObject *obj = PyList_GET_ITEM(list, i);
409 409 Py_INCREF(obj);
410 410 PyList_SET_ITEM(newlist, i, obj);
411 411 }
412 412
413 413 return newlist;
414 414 }
415 415
416 416 static int check_filter(PyObject *filter, Py_ssize_t arg)
417 417 {
418 418 if (filter) {
419 419 PyObject *arglist, *result;
420 420 int isfiltered;
421 421
422 422 arglist = Py_BuildValue("(n)", arg);
423 423 if (!arglist) {
424 424 return -1;
425 425 }
426 426
427 427 result = PyEval_CallObject(filter, arglist);
428 428 Py_DECREF(arglist);
429 429 if (!result) {
430 430 return -1;
431 431 }
432 432
433 433 /* PyObject_IsTrue returns 1 if true, 0 if false, -1 if error,
434 434 * same as this function, so we can just return it directly.*/
435 435 isfiltered = PyObject_IsTrue(result);
436 436 Py_DECREF(result);
437 437 return isfiltered;
438 438 } else {
439 439 return 0;
440 440 }
441 441 }
442 442
443 443 static Py_ssize_t add_roots_get_min(indexObject *self, PyObject *list,
444 444 Py_ssize_t marker, char *phases)
445 445 {
446 446 PyObject *iter = NULL;
447 447 PyObject *iter_item = NULL;
448 448 Py_ssize_t min_idx = index_length(self) + 2;
449 449 long iter_item_long;
450 450
451 451 if (PyList_GET_SIZE(list) != 0) {
452 452 iter = PyObject_GetIter(list);
453 453 if (iter == NULL)
454 454 return -2;
455 455 while ((iter_item = PyIter_Next(iter))) {
456 456 iter_item_long = PyInt_AS_LONG(iter_item);
457 457 Py_DECREF(iter_item);
458 458 if (iter_item_long < min_idx)
459 459 min_idx = iter_item_long;
460 phases[iter_item_long] = marker;
460 phases[iter_item_long] = (char)marker;
461 461 }
462 462 Py_DECREF(iter);
463 463 }
464 464
465 465 return min_idx;
466 466 }
467 467
468 468 static inline void set_phase_from_parents(char *phases, int parent_1,
469 469 int parent_2, Py_ssize_t i)
470 470 {
471 471 if (parent_1 >= 0 && phases[parent_1] > phases[i])
472 472 phases[i] = phases[parent_1];
473 473 if (parent_2 >= 0 && phases[parent_2] > phases[i])
474 474 phases[i] = phases[parent_2];
475 475 }
476 476
477 477 static PyObject *reachableroots2(indexObject *self, PyObject *args)
478 478 {
479 479
480 480 /* Input */
481 481 long minroot;
482 482 PyObject *includepatharg = NULL;
483 483 int includepath = 0;
484 484 /* heads and roots are lists */
485 485 PyObject *heads = NULL;
486 486 PyObject *roots = NULL;
487 487 PyObject *reachable = NULL;
488 488
489 489 PyObject *val;
490 490 Py_ssize_t len = index_length(self);
491 491 long revnum;
492 492 Py_ssize_t k;
493 493 Py_ssize_t i;
494 494 Py_ssize_t l;
495 495 int r;
496 496 int parents[2];
497 497
498 498 /* Internal data structure:
499 499 * tovisit: array of length len+1 (all revs + nullrev), filled upto lentovisit
500 500 * revstates: array of length len+1 (all revs + nullrev) */
501 501 int *tovisit = NULL;
502 502 long lentovisit = 0;
503 503 enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 };
504 504 char *revstates = NULL;
505 505
506 506 /* Get arguments */
507 507 if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads,
508 508 &PyList_Type, &roots,
509 509 &PyBool_Type, &includepatharg))
510 510 goto bail;
511 511
512 512 if (includepatharg == Py_True)
513 513 includepath = 1;
514 514
515 515 /* Initialize return set */
516 516 reachable = PyList_New(0);
517 517 if (reachable == NULL)
518 518 goto bail;
519 519
520 520 /* Initialize internal datastructures */
521 521 tovisit = (int *)malloc((len + 1) * sizeof(int));
522 522 if (tovisit == NULL) {
523 523 PyErr_NoMemory();
524 524 goto bail;
525 525 }
526 526
527 527 revstates = (char *)calloc(len + 1, 1);
528 528 if (revstates == NULL) {
529 529 PyErr_NoMemory();
530 530 goto bail;
531 531 }
532 532
533 533 l = PyList_GET_SIZE(roots);
534 534 for (i = 0; i < l; i++) {
535 535 revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
536 536 if (revnum == -1 && PyErr_Occurred())
537 537 goto bail;
538 538 /* If root is out of range, e.g. wdir(), it must be unreachable
539 539 * from heads. So we can just ignore it. */
540 540 if (revnum + 1 < 0 || revnum + 1 >= len + 1)
541 541 continue;
542 542 revstates[revnum + 1] |= RS_ROOT;
543 543 }
544 544
545 545 /* Populate tovisit with all the heads */
546 546 l = PyList_GET_SIZE(heads);
547 547 for (i = 0; i < l; i++) {
548 548 revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
549 549 if (revnum == -1 && PyErr_Occurred())
550 550 goto bail;
551 551 if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
552 552 PyErr_SetString(PyExc_IndexError, "head out of range");
553 553 goto bail;
554 554 }
555 555 if (!(revstates[revnum + 1] & RS_SEEN)) {
556 556 tovisit[lentovisit++] = (int)revnum;
557 557 revstates[revnum + 1] |= RS_SEEN;
558 558 }
559 559 }
560 560
561 561 /* Visit the tovisit list and find the reachable roots */
562 562 k = 0;
563 563 while (k < lentovisit) {
564 564 /* Add the node to reachable if it is a root*/
565 565 revnum = tovisit[k++];
566 566 if (revstates[revnum + 1] & RS_ROOT) {
567 567 revstates[revnum + 1] |= RS_REACHABLE;
568 568 val = PyInt_FromLong(revnum);
569 569 if (val == NULL)
570 570 goto bail;
571 571 r = PyList_Append(reachable, val);
572 572 Py_DECREF(val);
573 573 if (r < 0)
574 574 goto bail;
575 575 if (includepath == 0)
576 576 continue;
577 577 }
578 578
579 579 /* Add its parents to the list of nodes to visit */
580 580 if (revnum == -1)
581 581 continue;
582 582 r = index_get_parents(self, revnum, parents, (int)len - 1);
583 583 if (r < 0)
584 584 goto bail;
585 585 for (i = 0; i < 2; i++) {
586 586 if (!(revstates[parents[i] + 1] & RS_SEEN)
587 587 && parents[i] >= minroot) {
588 588 tovisit[lentovisit++] = parents[i];
589 589 revstates[parents[i] + 1] |= RS_SEEN;
590 590 }
591 591 }
592 592 }
593 593
594 594 /* Find all the nodes in between the roots we found and the heads
595 595 * and add them to the reachable set */
596 596 if (includepath == 1) {
597 597 long minidx = minroot;
598 598 if (minidx < 0)
599 599 minidx = 0;
600 600 for (i = minidx; i < len; i++) {
601 601 if (!(revstates[i + 1] & RS_SEEN))
602 602 continue;
603 603 r = index_get_parents(self, i, parents, (int)len - 1);
604 604 /* Corrupted index file, error is set from
605 605 * index_get_parents */
606 606 if (r < 0)
607 607 goto bail;
608 608 if (((revstates[parents[0] + 1] |
609 609 revstates[parents[1] + 1]) & RS_REACHABLE)
610 610 && !(revstates[i + 1] & RS_REACHABLE)) {
611 611 revstates[i + 1] |= RS_REACHABLE;
612 612 val = PyInt_FromSsize_t(i);
613 613 if (val == NULL)
614 614 goto bail;
615 615 r = PyList_Append(reachable, val);
616 616 Py_DECREF(val);
617 617 if (r < 0)
618 618 goto bail;
619 619 }
620 620 }
621 621 }
622 622
623 623 free(revstates);
624 624 free(tovisit);
625 625 return reachable;
626 626 bail:
627 627 Py_XDECREF(reachable);
628 628 free(revstates);
629 629 free(tovisit);
630 630 return NULL;
631 631 }
632 632
633 633 static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args)
634 634 {
635 635 PyObject *roots = Py_None;
636 636 PyObject *ret = NULL;
637 637 PyObject *phasessize = NULL;
638 638 PyObject *phaseroots = NULL;
639 639 PyObject *phaseset = NULL;
640 640 PyObject *phasessetlist = NULL;
641 641 PyObject *rev = NULL;
642 642 Py_ssize_t len = index_length(self);
643 643 Py_ssize_t numphase = 0;
644 644 Py_ssize_t minrevallphases = 0;
645 645 Py_ssize_t minrevphase = 0;
646 646 Py_ssize_t i = 0;
647 647 char *phases = NULL;
648 648 long phase;
649 649
650 650 if (!PyArg_ParseTuple(args, "O", &roots))
651 651 goto done;
652 652 if (roots == NULL || !PyList_Check(roots)) {
653 653 PyErr_SetString(PyExc_TypeError, "roots must be a list");
654 654 goto done;
655 655 }
656 656
657 657 phases = calloc(len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */
658 658 if (phases == NULL) {
659 659 PyErr_NoMemory();
660 660 goto done;
661 661 }
662 662 /* Put the phase information of all the roots in phases */
663 663 numphase = PyList_GET_SIZE(roots)+1;
664 664 minrevallphases = len + 1;
665 665 phasessetlist = PyList_New(numphase);
666 666 if (phasessetlist == NULL)
667 667 goto done;
668 668
669 669 PyList_SET_ITEM(phasessetlist, 0, Py_None);
670 670 Py_INCREF(Py_None);
671 671
672 672 for (i = 0; i < numphase-1; i++) {
673 673 phaseroots = PyList_GET_ITEM(roots, i);
674 674 phaseset = PySet_New(NULL);
675 675 if (phaseset == NULL)
676 676 goto release;
677 677 PyList_SET_ITEM(phasessetlist, i+1, phaseset);
678 678 if (!PyList_Check(phaseroots)) {
679 679 PyErr_SetString(PyExc_TypeError,
680 680 "roots item must be a list");
681 681 goto release;
682 682 }
683 683 minrevphase = add_roots_get_min(self, phaseroots, i+1, phases);
684 684 if (minrevphase == -2) /* Error from add_roots_get_min */
685 685 goto release;
686 686 minrevallphases = MIN(minrevallphases, minrevphase);
687 687 }
688 688 /* Propagate the phase information from the roots to the revs */
689 689 if (minrevallphases != -1) {
690 690 int parents[2];
691 691 for (i = minrevallphases; i < len; i++) {
692 692 if (index_get_parents(self, i, parents,
693 693 (int)len - 1) < 0)
694 694 goto release;
695 695 set_phase_from_parents(phases, parents[0], parents[1], i);
696 696 }
697 697 }
698 698 /* Transform phase list to a python list */
699 699 phasessize = PyInt_FromSsize_t(len);
700 700 if (phasessize == NULL)
701 701 goto release;
702 702 for (i = 0; i < len; i++) {
703 703 phase = phases[i];
704 704 /* We only store the sets of phase for non public phase, the public phase
705 705 * is computed as a difference */
706 706 if (phase != 0) {
707 707 phaseset = PyList_GET_ITEM(phasessetlist, phase);
708 708 rev = PyInt_FromSsize_t(i);
709 709 if (rev == NULL)
710 710 goto release;
711 711 PySet_Add(phaseset, rev);
712 712 Py_XDECREF(rev);
713 713 }
714 714 }
715 715 ret = PyTuple_Pack(2, phasessize, phasessetlist);
716 716
717 717 release:
718 718 Py_XDECREF(phasessize);
719 719 Py_XDECREF(phasessetlist);
720 720 done:
721 721 free(phases);
722 722 return ret;
723 723 }
724 724
725 725 static PyObject *index_headrevs(indexObject *self, PyObject *args)
726 726 {
727 727 Py_ssize_t i, j, len;
728 728 char *nothead = NULL;
729 729 PyObject *heads = NULL;
730 730 PyObject *filter = NULL;
731 731 PyObject *filteredrevs = Py_None;
732 732
733 733 if (!PyArg_ParseTuple(args, "|O", &filteredrevs)) {
734 734 return NULL;
735 735 }
736 736
737 737 if (self->headrevs && filteredrevs == self->filteredrevs)
738 738 return list_copy(self->headrevs);
739 739
740 740 Py_DECREF(self->filteredrevs);
741 741 self->filteredrevs = filteredrevs;
742 742 Py_INCREF(filteredrevs);
743 743
744 744 if (filteredrevs != Py_None) {
745 745 filter = PyObject_GetAttrString(filteredrevs, "__contains__");
746 746 if (!filter) {
747 747 PyErr_SetString(PyExc_TypeError,
748 748 "filteredrevs has no attribute __contains__");
749 749 goto bail;
750 750 }
751 751 }
752 752
753 753 len = index_length(self);
754 754 heads = PyList_New(0);
755 755 if (heads == NULL)
756 756 goto bail;
757 757 if (len == 0) {
758 758 PyObject *nullid = PyInt_FromLong(-1);
759 759 if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
760 760 Py_XDECREF(nullid);
761 761 goto bail;
762 762 }
763 763 goto done;
764 764 }
765 765
766 766 nothead = calloc(len, 1);
767 767 if (nothead == NULL) {
768 768 PyErr_NoMemory();
769 769 goto bail;
770 770 }
771 771
772 772 for (i = len - 1; i >= 0; i--) {
773 773 int isfiltered;
774 774 int parents[2];
775 775
776 776 /* If nothead[i] == 1, it means we've seen an unfiltered child of this
777 777 * node already, and therefore this node is not filtered. So we can skip
778 778 * the expensive check_filter step.
779 779 */
780 780 if (nothead[i] != 1) {
781 781 isfiltered = check_filter(filter, i);
782 782 if (isfiltered == -1) {
783 783 PyErr_SetString(PyExc_TypeError,
784 784 "unable to check filter");
785 785 goto bail;
786 786 }
787 787
788 788 if (isfiltered) {
789 789 nothead[i] = 1;
790 790 continue;
791 791 }
792 792 }
793 793
794 794 if (index_get_parents(self, i, parents, (int)len - 1) < 0)
795 795 goto bail;
796 796 for (j = 0; j < 2; j++) {
797 797 if (parents[j] >= 0)
798 798 nothead[parents[j]] = 1;
799 799 }
800 800 }
801 801
802 802 for (i = 0; i < len; i++) {
803 803 PyObject *head;
804 804
805 805 if (nothead[i])
806 806 continue;
807 807 head = PyInt_FromSsize_t(i);
808 808 if (head == NULL || PyList_Append(heads, head) == -1) {
809 809 Py_XDECREF(head);
810 810 goto bail;
811 811 }
812 812 }
813 813
814 814 done:
815 815 self->headrevs = heads;
816 816 Py_XDECREF(filter);
817 817 free(nothead);
818 818 return list_copy(self->headrevs);
819 819 bail:
820 820 Py_XDECREF(filter);
821 821 Py_XDECREF(heads);
822 822 free(nothead);
823 823 return NULL;
824 824 }
825 825
826 826 /**
827 827 * Obtain the base revision index entry.
828 828 *
829 829 * Callers must ensure that rev >= 0 or illegal memory access may occur.
830 830 */
831 831 static inline int index_baserev(indexObject *self, int rev)
832 832 {
833 833 const char *data;
834 834
835 835 if (rev >= self->length) {
836 836 PyObject *tuple = PyList_GET_ITEM(self->added, rev - self->length);
837 837 return (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 3));
838 838 }
839 839 else {
840 840 data = index_deref(self, rev);
841 841 if (data == NULL) {
842 842 return -2;
843 843 }
844 844
845 845 return getbe32(data + 16);
846 846 }
847 847 }
848 848
849 849 static PyObject *index_deltachain(indexObject *self, PyObject *args)
850 850 {
851 851 int rev, generaldelta;
852 852 PyObject *stoparg;
853 853 int stoprev, iterrev, baserev = -1;
854 854 int stopped;
855 855 PyObject *chain = NULL, *result = NULL;
856 856 const Py_ssize_t length = index_length(self);
857 857
858 858 if (!PyArg_ParseTuple(args, "iOi", &rev, &stoparg, &generaldelta)) {
859 859 return NULL;
860 860 }
861 861
862 862 if (PyInt_Check(stoparg)) {
863 863 stoprev = (int)PyInt_AsLong(stoparg);
864 864 if (stoprev == -1 && PyErr_Occurred()) {
865 865 return NULL;
866 866 }
867 867 }
868 868 else if (stoparg == Py_None) {
869 869 stoprev = -2;
870 870 }
871 871 else {
872 872 PyErr_SetString(PyExc_ValueError,
873 873 "stoprev must be integer or None");
874 874 return NULL;
875 875 }
876 876
877 877 if (rev < 0 || rev >= length) {
878 878 PyErr_SetString(PyExc_ValueError, "revlog index out of range");
879 879 return NULL;
880 880 }
881 881
882 882 chain = PyList_New(0);
883 883 if (chain == NULL) {
884 884 return NULL;
885 885 }
886 886
887 887 baserev = index_baserev(self, rev);
888 888
889 889 /* This should never happen. */
890 890 if (baserev <= -2) {
891 891 /* Error should be set by index_deref() */
892 892 assert(PyErr_Occurred());
893 893 goto bail;
894 894 }
895 895
896 896 iterrev = rev;
897 897
898 898 while (iterrev != baserev && iterrev != stoprev) {
899 899 PyObject *value = PyInt_FromLong(iterrev);
900 900 if (value == NULL) {
901 901 goto bail;
902 902 }
903 903 if (PyList_Append(chain, value)) {
904 904 Py_DECREF(value);
905 905 goto bail;
906 906 }
907 907 Py_DECREF(value);
908 908
909 909 if (generaldelta) {
910 910 iterrev = baserev;
911 911 }
912 912 else {
913 913 iterrev--;
914 914 }
915 915
916 916 if (iterrev < 0) {
917 917 break;
918 918 }
919 919
920 920 if (iterrev >= length) {
921 921 PyErr_SetString(PyExc_IndexError, "revision outside index");
922 922 return NULL;
923 923 }
924 924
925 925 baserev = index_baserev(self, iterrev);
926 926
927 927 /* This should never happen. */
928 928 if (baserev <= -2) {
929 929 /* Error should be set by index_deref() */
930 930 assert(PyErr_Occurred());
931 931 goto bail;
932 932 }
933 933 }
934 934
935 935 if (iterrev == stoprev) {
936 936 stopped = 1;
937 937 }
938 938 else {
939 939 PyObject *value = PyInt_FromLong(iterrev);
940 940 if (value == NULL) {
941 941 goto bail;
942 942 }
943 943 if (PyList_Append(chain, value)) {
944 944 Py_DECREF(value);
945 945 goto bail;
946 946 }
947 947 Py_DECREF(value);
948 948
949 949 stopped = 0;
950 950 }
951 951
952 952 if (PyList_Reverse(chain)) {
953 953 goto bail;
954 954 }
955 955
956 956 result = Py_BuildValue("OO", chain, stopped ? Py_True : Py_False);
957 957 Py_DECREF(chain);
958 958 return result;
959 959
960 960 bail:
961 961 Py_DECREF(chain);
962 962 return NULL;
963 963 }
964 964
965 965 static inline int nt_level(const char *node, Py_ssize_t level)
966 966 {
967 967 int v = node[level>>1];
968 968 if (!(level & 1))
969 969 v >>= 4;
970 970 return v & 0xf;
971 971 }
972 972
973 973 /*
974 974 * Return values:
975 975 *
976 976 * -4: match is ambiguous (multiple candidates)
977 977 * -2: not found
978 978 * rest: valid rev
979 979 */
980 980 static int nt_find(nodetree *self, const char *node, Py_ssize_t nodelen,
981 981 int hex)
982 982 {
983 983 int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
984 984 int level, maxlevel, off;
985 985
986 986 if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
987 987 return -1;
988 988
989 989 if (hex)
990 990 maxlevel = nodelen > 40 ? 40 : (int)nodelen;
991 991 else
992 992 maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
993 993
994 994 for (level = off = 0; level < maxlevel; level++) {
995 995 int k = getnybble(node, level);
996 996 nodetreenode *n = &self->nodes[off];
997 997 int v = n->children[k];
998 998
999 999 if (v < 0) {
1000 1000 const char *n;
1001 1001 Py_ssize_t i;
1002 1002
1003 1003 v = -(v + 2);
1004 1004 n = index_node(self->index, v);
1005 1005 if (n == NULL)
1006 1006 return -2;
1007 1007 for (i = level; i < maxlevel; i++)
1008 1008 if (getnybble(node, i) != nt_level(n, i))
1009 1009 return -2;
1010 1010 return v;
1011 1011 }
1012 1012 if (v == 0)
1013 1013 return -2;
1014 1014 off = v;
1015 1015 }
1016 1016 /* multiple matches against an ambiguous prefix */
1017 1017 return -4;
1018 1018 }
1019 1019
1020 1020 static int nt_new(nodetree *self)
1021 1021 {
1022 1022 if (self->length == self->capacity) {
1023 1023 unsigned newcapacity;
1024 1024 nodetreenode *newnodes;
1025 1025 newcapacity = self->capacity * 2;
1026 1026 if (newcapacity >= INT_MAX / sizeof(nodetreenode)) {
1027 1027 PyErr_SetString(PyExc_MemoryError, "overflow in nt_new");
1028 1028 return -1;
1029 1029 }
1030 1030 newnodes = realloc(self->nodes, newcapacity * sizeof(nodetreenode));
1031 1031 if (newnodes == NULL) {
1032 1032 PyErr_SetString(PyExc_MemoryError, "out of memory");
1033 1033 return -1;
1034 1034 }
1035 1035 self->capacity = newcapacity;
1036 1036 self->nodes = newnodes;
1037 1037 memset(&self->nodes[self->length], 0,
1038 1038 sizeof(nodetreenode) * (self->capacity - self->length));
1039 1039 }
1040 1040 return self->length++;
1041 1041 }
1042 1042
1043 1043 static int nt_insert(nodetree *self, const char *node, int rev)
1044 1044 {
1045 1045 int level = 0;
1046 1046 int off = 0;
1047 1047
1048 1048 while (level < 40) {
1049 1049 int k = nt_level(node, level);
1050 1050 nodetreenode *n;
1051 1051 int v;
1052 1052
1053 1053 n = &self->nodes[off];
1054 1054 v = n->children[k];
1055 1055
1056 1056 if (v == 0) {
1057 1057 n->children[k] = -rev - 2;
1058 1058 return 0;
1059 1059 }
1060 1060 if (v < 0) {
1061 1061 const char *oldnode = index_node_existing(self->index, -(v + 2));
1062 1062 int noff;
1063 1063
1064 1064 if (oldnode == NULL)
1065 1065 return -1;
1066 1066 if (!memcmp(oldnode, node, 20)) {
1067 1067 n->children[k] = -rev - 2;
1068 1068 return 0;
1069 1069 }
1070 1070 noff = nt_new(self);
1071 1071 if (noff == -1)
1072 1072 return -1;
1073 1073 /* self->nodes may have been changed by realloc */
1074 1074 self->nodes[off].children[k] = noff;
1075 1075 off = noff;
1076 1076 n = &self->nodes[off];
1077 1077 n->children[nt_level(oldnode, ++level)] = v;
1078 1078 if (level > self->depth)
1079 1079 self->depth = level;
1080 1080 self->splits += 1;
1081 1081 } else {
1082 1082 level += 1;
1083 1083 off = v;
1084 1084 }
1085 1085 }
1086 1086
1087 1087 return -1;
1088 1088 }
1089 1089
1090 1090 static PyObject *nt_insert_py(nodetree *self, PyObject *args)
1091 1091 {
1092 1092 Py_ssize_t rev;
1093 1093 const char *node;
1094 1094 Py_ssize_t length;
1095 1095 if (!PyArg_ParseTuple(args, "n", &rev))
1096 1096 return NULL;
1097 1097 length = index_length(self->index);
1098 1098 if (rev < 0 || rev >= length) {
1099 1099 PyErr_SetString(PyExc_ValueError, "revlog index out of range");
1100 1100 return NULL;
1101 1101 }
1102 1102 node = index_node_existing(self->index, rev);
1103 if (nt_insert(self, node, rev) == -1)
1103 if (nt_insert(self, node, (int)rev) == -1)
1104 1104 return NULL;
1105 1105 Py_RETURN_NONE;
1106 1106 }
1107 1107
1108 1108 static int nt_delete_node(nodetree *self, const char *node)
1109 1109 {
1110 1110 /* rev==-2 happens to get encoded as 0, which is interpreted as not set */
1111 1111 return nt_insert(self, node, -2);
1112 1112 }
1113 1113
1114 1114 static int nt_init(nodetree *self, indexObject *index, unsigned capacity)
1115 1115 {
1116 1116 /* Initialize before overflow-checking to avoid nt_dealloc() crash. */
1117 1117 self->nodes = NULL;
1118 1118
1119 1119 self->index = index;
1120 1120 Py_INCREF(index);
1121 1121 /* The input capacity is in terms of revisions, while the field is in
1122 1122 * terms of nodetree nodes. */
1123 1123 self->capacity = (capacity < 4 ? 4 : capacity / 2);
1124 1124 self->depth = 0;
1125 1125 self->splits = 0;
1126 1126 if ((size_t)self->capacity > INT_MAX / sizeof(nodetreenode)) {
1127 1127 PyErr_SetString(PyExc_ValueError, "overflow in init_nt");
1128 1128 return -1;
1129 1129 }
1130 1130 self->nodes = calloc(self->capacity, sizeof(nodetreenode));
1131 1131 if (self->nodes == NULL) {
1132 1132 PyErr_NoMemory();
1133 1133 return -1;
1134 1134 }
1135 1135 self->length = 1;
1136 1136 return 0;
1137 1137 }
1138 1138
1139 1139 static PyTypeObject indexType;
1140 1140
1141 1141 static int nt_init_py(nodetree *self, PyObject *args)
1142 1142 {
1143 1143 PyObject *index;
1144 1144 unsigned capacity;
1145 1145 if (!PyArg_ParseTuple(args, "O!I", &indexType, &index, &capacity))
1146 1146 return -1;
1147 1147 return nt_init(self, (indexObject*)index, capacity);
1148 1148 }
1149 1149
1150 1150 static int nt_partialmatch(nodetree *self, const char *node,
1151 1151 Py_ssize_t nodelen)
1152 1152 {
1153 1153 return nt_find(self, node, nodelen, 1);
1154 1154 }
1155 1155
1156 1156 /*
1157 1157 * Find the length of the shortest unique prefix of node.
1158 1158 *
1159 1159 * Return values:
1160 1160 *
1161 1161 * -3: error (exception set)
1162 1162 * -2: not found (no exception set)
1163 1163 * rest: length of shortest prefix
1164 1164 */
1165 1165 static int nt_shortest(nodetree *self, const char *node)
1166 1166 {
1167 1167 int level, off;
1168 1168
1169 1169 for (level = off = 0; level < 40; level++) {
1170 1170 int k, v;
1171 1171 nodetreenode *n = &self->nodes[off];
1172 1172 k = nt_level(node, level);
1173 1173 v = n->children[k];
1174 1174 if (v < 0) {
1175 1175 const char *n;
1176 1176 v = -(v + 2);
1177 1177 n = index_node_existing(self->index, v);
1178 1178 if (n == NULL)
1179 1179 return -3;
1180 1180 if (memcmp(node, n, 20) != 0)
1181 1181 /*
1182 1182 * Found a unique prefix, but it wasn't for the
1183 1183 * requested node (i.e the requested node does
1184 1184 * not exist).
1185 1185 */
1186 1186 return -2;
1187 1187 return level + 1;
1188 1188 }
1189 1189 if (v == 0)
1190 1190 return -2;
1191 1191 off = v;
1192 1192 }
1193 1193 /*
1194 1194 * The node was still not unique after 40 hex digits, so this won't
1195 1195 * happen. Also, if we get here, then there's a programming error in
1196 1196 * this file that made us insert a node longer than 40 hex digits.
1197 1197 */
1198 1198 PyErr_SetString(PyExc_Exception, "broken node tree");
1199 1199 return -3;
1200 1200 }
1201 1201
1202 1202 static PyObject *nt_shortest_py(nodetree *self, PyObject *args)
1203 1203 {
1204 1204 PyObject *val;
1205 1205 char *node;
1206 1206 int length;
1207 1207
1208 1208 if (!PyArg_ParseTuple(args, "O", &val))
1209 1209 return NULL;
1210 1210 if (node_check(val, &node) == -1)
1211 1211 return NULL;
1212 1212
1213 1213 length = nt_shortest(self, node);
1214 1214 if (length == -3)
1215 1215 return NULL;
1216 1216 if (length == -2) {
1217 1217 raise_revlog_error();
1218 1218 return NULL;
1219 1219 }
1220 1220 return PyInt_FromLong(length);
1221 1221 }
1222 1222
1223 1223 static void nt_dealloc(nodetree *self)
1224 1224 {
1225 1225 Py_XDECREF(self->index);
1226 1226 free(self->nodes);
1227 1227 self->nodes = NULL;
1228 1228 PyObject_Del(self);
1229 1229 }
1230 1230
1231 1231 static PyMethodDef nt_methods[] = {
1232 1232 {"insert", (PyCFunction)nt_insert_py, METH_VARARGS,
1233 1233 "insert an index entry"},
1234 1234 {"shortest", (PyCFunction)nt_shortest_py, METH_VARARGS,
1235 1235 "find length of shortest hex nodeid of a binary ID"},
1236 1236 {NULL} /* Sentinel */
1237 1237 };
1238 1238
1239 1239 static PyTypeObject nodetreeType = {
1240 1240 PyVarObject_HEAD_INIT(NULL, 0) /* header */
1241 1241 "parsers.nodetree", /* tp_name */
1242 1242 sizeof(nodetree) , /* tp_basicsize */
1243 1243 0, /* tp_itemsize */
1244 1244 (destructor)nt_dealloc, /* tp_dealloc */
1245 1245 0, /* tp_print */
1246 1246 0, /* tp_getattr */
1247 1247 0, /* tp_setattr */
1248 1248 0, /* tp_compare */
1249 1249 0, /* tp_repr */
1250 1250 0, /* tp_as_number */
1251 1251 0, /* tp_as_sequence */
1252 1252 0, /* tp_as_mapping */
1253 1253 0, /* tp_hash */
1254 1254 0, /* tp_call */
1255 1255 0, /* tp_str */
1256 1256 0, /* tp_getattro */
1257 1257 0, /* tp_setattro */
1258 1258 0, /* tp_as_buffer */
1259 1259 Py_TPFLAGS_DEFAULT, /* tp_flags */
1260 1260 "nodetree", /* tp_doc */
1261 1261 0, /* tp_traverse */
1262 1262 0, /* tp_clear */
1263 1263 0, /* tp_richcompare */
1264 1264 0, /* tp_weaklistoffset */
1265 1265 0, /* tp_iter */
1266 1266 0, /* tp_iternext */
1267 1267 nt_methods, /* tp_methods */
1268 1268 0, /* tp_members */
1269 1269 0, /* tp_getset */
1270 1270 0, /* tp_base */
1271 1271 0, /* tp_dict */
1272 1272 0, /* tp_descr_get */
1273 1273 0, /* tp_descr_set */
1274 1274 0, /* tp_dictoffset */
1275 1275 (initproc)nt_init_py, /* tp_init */
1276 1276 0, /* tp_alloc */
1277 1277 };
1278 1278
1279 1279 static int index_init_nt(indexObject *self)
1280 1280 {
1281 1281 if (self->nt == NULL) {
1282 1282 self->nt = PyObject_New(nodetree, &nodetreeType);
1283 1283 if (self->nt == NULL) {
1284 1284 return -1;
1285 1285 }
1286 if (nt_init(self->nt, self, self->raw_length) == -1) {
1286 if (nt_init(self->nt, self, (int)self->raw_length) == -1) {
1287 1287 nt_dealloc(self->nt);
1288 1288 self->nt = NULL;
1289 1289 return -1;
1290 1290 }
1291 1291 if (nt_insert(self->nt, nullid, -1) == -1) {
1292 1292 nt_dealloc(self->nt);
1293 1293 self->nt = NULL;
1294 1294 return -1;
1295 1295 }
1296 1296 self->ntrev = (int)index_length(self);
1297 1297 self->ntlookups = 1;
1298 1298 self->ntmisses = 0;
1299 1299 }
1300 1300 return 0;
1301 1301 }
1302 1302
1303 1303 /*
1304 1304 * Return values:
1305 1305 *
1306 1306 * -3: error (exception set)
1307 1307 * -2: not found (no exception set)
1308 1308 * rest: valid rev
1309 1309 */
1310 1310 static int index_find_node(indexObject *self,
1311 1311 const char *node, Py_ssize_t nodelen)
1312 1312 {
1313 1313 int rev;
1314 1314
1315 1315 if (index_init_nt(self) == -1)
1316 1316 return -3;
1317 1317
1318 1318 self->ntlookups++;
1319 1319 rev = nt_find(self->nt, node, nodelen, 0);
1320 1320 if (rev >= -1)
1321 1321 return rev;
1322 1322
1323 1323 /*
1324 1324 * For the first handful of lookups, we scan the entire index,
1325 1325 * and cache only the matching nodes. This optimizes for cases
1326 1326 * like "hg tip", where only a few nodes are accessed.
1327 1327 *
1328 1328 * After that, we cache every node we visit, using a single
1329 1329 * scan amortized over multiple lookups. This gives the best
1330 1330 * bulk performance, e.g. for "hg log".
1331 1331 */
1332 1332 if (self->ntmisses++ < 4) {
1333 1333 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1334 1334 const char *n = index_node_existing(self, rev);
1335 1335 if (n == NULL)
1336 1336 return -3;
1337 1337 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1338 1338 if (nt_insert(self->nt, n, rev) == -1)
1339 1339 return -3;
1340 1340 break;
1341 1341 }
1342 1342 }
1343 1343 } else {
1344 1344 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1345 1345 const char *n = index_node_existing(self, rev);
1346 1346 if (n == NULL)
1347 1347 return -3;
1348 1348 if (nt_insert(self->nt, n, rev) == -1) {
1349 1349 self->ntrev = rev + 1;
1350 1350 return -3;
1351 1351 }
1352 1352 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1353 1353 break;
1354 1354 }
1355 1355 }
1356 1356 self->ntrev = rev;
1357 1357 }
1358 1358
1359 1359 if (rev >= 0)
1360 1360 return rev;
1361 1361 return -2;
1362 1362 }
1363 1363
1364 1364 static PyObject *index_getitem(indexObject *self, PyObject *value)
1365 1365 {
1366 1366 char *node;
1367 1367 int rev;
1368 1368
1369 1369 if (PyInt_Check(value))
1370 1370 return index_get(self, PyInt_AS_LONG(value));
1371 1371
1372 1372 if (node_check(value, &node) == -1)
1373 1373 return NULL;
1374 1374 rev = index_find_node(self, node, 20);
1375 1375 if (rev >= -1)
1376 1376 return PyInt_FromLong(rev);
1377 1377 if (rev == -2)
1378 1378 raise_revlog_error();
1379 1379 return NULL;
1380 1380 }
1381 1381
1382 1382 /*
1383 1383 * Fully populate the radix tree.
1384 1384 */
1385 1385 static int index_populate_nt(indexObject *self) {
1386 1386 int rev;
1387 1387 if (self->ntrev > 0) {
1388 1388 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1389 1389 const char *n = index_node_existing(self, rev);
1390 1390 if (n == NULL)
1391 1391 return -1;
1392 1392 if (nt_insert(self->nt, n, rev) == -1)
1393 1393 return -1;
1394 1394 }
1395 1395 self->ntrev = -1;
1396 1396 }
1397 1397 return 0;
1398 1398 }
1399 1399
1400 1400 static PyObject *index_partialmatch(indexObject *self, PyObject *args)
1401 1401 {
1402 1402 const char *fullnode;
1403 1403 int nodelen;
1404 1404 char *node;
1405 1405 int rev, i;
1406 1406
1407 1407 if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &node, &nodelen))
1408 1408 return NULL;
1409 1409
1410 1410 if (nodelen < 1) {
1411 1411 PyErr_SetString(PyExc_ValueError, "key too short");
1412 1412 return NULL;
1413 1413 }
1414 1414
1415 1415 if (nodelen > 40) {
1416 1416 PyErr_SetString(PyExc_ValueError, "key too long");
1417 1417 return NULL;
1418 1418 }
1419 1419
1420 1420 for (i = 0; i < nodelen; i++)
1421 1421 hexdigit(node, i);
1422 1422 if (PyErr_Occurred()) {
1423 1423 /* input contains non-hex characters */
1424 1424 PyErr_Clear();
1425 1425 Py_RETURN_NONE;
1426 1426 }
1427 1427
1428 1428 if (index_init_nt(self) == -1)
1429 1429 return NULL;
1430 1430 if (index_populate_nt(self) == -1)
1431 1431 return NULL;
1432 1432 rev = nt_partialmatch(self->nt, node, nodelen);
1433 1433
1434 1434 switch (rev) {
1435 1435 case -4:
1436 1436 raise_revlog_error();
1437 1437 return NULL;
1438 1438 case -2:
1439 1439 Py_RETURN_NONE;
1440 1440 case -1:
1441 1441 return PyBytes_FromStringAndSize(nullid, 20);
1442 1442 }
1443 1443
1444 1444 fullnode = index_node_existing(self, rev);
1445 1445 if (fullnode == NULL) {
1446 1446 return NULL;
1447 1447 }
1448 1448 return PyBytes_FromStringAndSize(fullnode, 20);
1449 1449 }
1450 1450
1451 1451 static PyObject *index_shortest(indexObject *self, PyObject *args)
1452 1452 {
1453 1453 PyObject *val;
1454 1454 char *node;
1455 1455 int length;
1456 1456
1457 1457 if (!PyArg_ParseTuple(args, "O", &val))
1458 1458 return NULL;
1459 1459 if (node_check(val, &node) == -1)
1460 1460 return NULL;
1461 1461
1462 1462 self->ntlookups++;
1463 1463 if (index_init_nt(self) == -1)
1464 1464 return NULL;
1465 1465 if (index_populate_nt(self) == -1)
1466 1466 return NULL;
1467 1467 length = nt_shortest(self->nt, node);
1468 1468 if (length == -3)
1469 1469 return NULL;
1470 1470 if (length == -2) {
1471 1471 raise_revlog_error();
1472 1472 return NULL;
1473 1473 }
1474 1474 return PyInt_FromLong(length);
1475 1475 }
1476 1476
1477 1477 static PyObject *index_m_get(indexObject *self, PyObject *args)
1478 1478 {
1479 1479 PyObject *val;
1480 1480 char *node;
1481 1481 int rev;
1482 1482
1483 1483 if (!PyArg_ParseTuple(args, "O", &val))
1484 1484 return NULL;
1485 1485 if (node_check(val, &node) == -1)
1486 1486 return NULL;
1487 1487 rev = index_find_node(self, node, 20);
1488 1488 if (rev == -3)
1489 1489 return NULL;
1490 1490 if (rev == -2)
1491 1491 Py_RETURN_NONE;
1492 1492 return PyInt_FromLong(rev);
1493 1493 }
1494 1494
1495 1495 static int index_contains(indexObject *self, PyObject *value)
1496 1496 {
1497 1497 char *node;
1498 1498
1499 1499 if (PyInt_Check(value)) {
1500 1500 long rev = PyInt_AS_LONG(value);
1501 1501 return rev >= -1 && rev < index_length(self);
1502 1502 }
1503 1503
1504 1504 if (node_check(value, &node) == -1)
1505 1505 return -1;
1506 1506
1507 1507 switch (index_find_node(self, node, 20)) {
1508 1508 case -3:
1509 1509 return -1;
1510 1510 case -2:
1511 1511 return 0;
1512 1512 default:
1513 1513 return 1;
1514 1514 }
1515 1515 }
1516 1516
1517 1517 typedef uint64_t bitmask;
1518 1518
1519 1519 /*
1520 1520 * Given a disjoint set of revs, return all candidates for the
1521 1521 * greatest common ancestor. In revset notation, this is the set
1522 1522 * "heads(::a and ::b and ...)"
1523 1523 */
1524 1524 static PyObject *find_gca_candidates(indexObject *self, const int *revs,
1525 1525 int revcount)
1526 1526 {
1527 1527 const bitmask allseen = (1ull << revcount) - 1;
1528 1528 const bitmask poison = 1ull << revcount;
1529 1529 PyObject *gca = PyList_New(0);
1530 1530 int i, v, interesting;
1531 1531 int maxrev = -1;
1532 1532 bitmask sp;
1533 1533 bitmask *seen;
1534 1534
1535 1535 if (gca == NULL)
1536 1536 return PyErr_NoMemory();
1537 1537
1538 1538 for (i = 0; i < revcount; i++) {
1539 1539 if (revs[i] > maxrev)
1540 1540 maxrev = revs[i];
1541 1541 }
1542 1542
1543 1543 seen = calloc(sizeof(*seen), maxrev + 1);
1544 1544 if (seen == NULL) {
1545 1545 Py_DECREF(gca);
1546 1546 return PyErr_NoMemory();
1547 1547 }
1548 1548
1549 1549 for (i = 0; i < revcount; i++)
1550 1550 seen[revs[i]] = 1ull << i;
1551 1551
1552 1552 interesting = revcount;
1553 1553
1554 1554 for (v = maxrev; v >= 0 && interesting; v--) {
1555 1555 bitmask sv = seen[v];
1556 1556 int parents[2];
1557 1557
1558 1558 if (!sv)
1559 1559 continue;
1560 1560
1561 1561 if (sv < poison) {
1562 1562 interesting -= 1;
1563 1563 if (sv == allseen) {
1564 1564 PyObject *obj = PyInt_FromLong(v);
1565 1565 if (obj == NULL)
1566 1566 goto bail;
1567 1567 if (PyList_Append(gca, obj) == -1) {
1568 1568 Py_DECREF(obj);
1569 1569 goto bail;
1570 1570 }
1571 1571 sv |= poison;
1572 1572 for (i = 0; i < revcount; i++) {
1573 1573 if (revs[i] == v)
1574 1574 goto done;
1575 1575 }
1576 1576 }
1577 1577 }
1578 1578 if (index_get_parents(self, v, parents, maxrev) < 0)
1579 1579 goto bail;
1580 1580
1581 1581 for (i = 0; i < 2; i++) {
1582 1582 int p = parents[i];
1583 1583 if (p == -1)
1584 1584 continue;
1585 1585 sp = seen[p];
1586 1586 if (sv < poison) {
1587 1587 if (sp == 0) {
1588 1588 seen[p] = sv;
1589 1589 interesting++;
1590 1590 }
1591 1591 else if (sp != sv)
1592 1592 seen[p] |= sv;
1593 1593 } else {
1594 1594 if (sp && sp < poison)
1595 1595 interesting--;
1596 1596 seen[p] = sv;
1597 1597 }
1598 1598 }
1599 1599 }
1600 1600
1601 1601 done:
1602 1602 free(seen);
1603 1603 return gca;
1604 1604 bail:
1605 1605 free(seen);
1606 1606 Py_XDECREF(gca);
1607 1607 return NULL;
1608 1608 }
1609 1609
1610 1610 /*
1611 1611 * Given a disjoint set of revs, return the subset with the longest
1612 1612 * path to the root.
1613 1613 */
1614 1614 static PyObject *find_deepest(indexObject *self, PyObject *revs)
1615 1615 {
1616 1616 const Py_ssize_t revcount = PyList_GET_SIZE(revs);
1617 1617 static const Py_ssize_t capacity = 24;
1618 1618 int *depth, *interesting = NULL;
1619 1619 int i, j, v, ninteresting;
1620 1620 PyObject *dict = NULL, *keys = NULL;
1621 1621 long *seen = NULL;
1622 1622 int maxrev = -1;
1623 1623 long final;
1624 1624
1625 1625 if (revcount > capacity) {
1626 1626 PyErr_Format(PyExc_OverflowError,
1627 1627 "bitset size (%ld) > capacity (%ld)",
1628 1628 (long)revcount, (long)capacity);
1629 1629 return NULL;
1630 1630 }
1631 1631
1632 1632 for (i = 0; i < revcount; i++) {
1633 1633 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
1634 1634 if (n > maxrev)
1635 1635 maxrev = n;
1636 1636 }
1637 1637
1638 1638 depth = calloc(sizeof(*depth), maxrev + 1);
1639 1639 if (depth == NULL)
1640 1640 return PyErr_NoMemory();
1641 1641
1642 1642 seen = calloc(sizeof(*seen), maxrev + 1);
1643 1643 if (seen == NULL) {
1644 1644 PyErr_NoMemory();
1645 1645 goto bail;
1646 1646 }
1647 1647
1648 1648 interesting = calloc(sizeof(*interesting), ((size_t)1) << revcount);
1649 1649 if (interesting == NULL) {
1650 1650 PyErr_NoMemory();
1651 1651 goto bail;
1652 1652 }
1653 1653
1654 1654 if (PyList_Sort(revs) == -1)
1655 1655 goto bail;
1656 1656
1657 1657 for (i = 0; i < revcount; i++) {
1658 1658 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
1659 1659 long b = 1l << i;
1660 1660 depth[n] = 1;
1661 1661 seen[n] = b;
1662 1662 interesting[b] = 1;
1663 1663 }
1664 1664
1665 1665 /* invariant: ninteresting is the number of non-zero entries in
1666 1666 * interesting. */
1667 1667 ninteresting = (int)revcount;
1668 1668
1669 1669 for (v = maxrev; v >= 0 && ninteresting > 1; v--) {
1670 1670 int dv = depth[v];
1671 1671 int parents[2];
1672 1672 long sv;
1673 1673
1674 1674 if (dv == 0)
1675 1675 continue;
1676 1676
1677 1677 sv = seen[v];
1678 1678 if (index_get_parents(self, v, parents, maxrev) < 0)
1679 1679 goto bail;
1680 1680
1681 1681 for (i = 0; i < 2; i++) {
1682 1682 int p = parents[i];
1683 1683 long sp;
1684 1684 int dp;
1685 1685
1686 1686 if (p == -1)
1687 1687 continue;
1688 1688
1689 1689 dp = depth[p];
1690 1690 sp = seen[p];
1691 1691 if (dp <= dv) {
1692 1692 depth[p] = dv + 1;
1693 1693 if (sp != sv) {
1694 1694 interesting[sv] += 1;
1695 1695 seen[p] = sv;
1696 1696 if (sp) {
1697 1697 interesting[sp] -= 1;
1698 1698 if (interesting[sp] == 0)
1699 1699 ninteresting -= 1;
1700 1700 }
1701 1701 }
1702 1702 }
1703 1703 else if (dv == dp - 1) {
1704 1704 long nsp = sp | sv;
1705 1705 if (nsp == sp)
1706 1706 continue;
1707 1707 seen[p] = nsp;
1708 1708 interesting[sp] -= 1;
1709 1709 if (interesting[sp] == 0)
1710 1710 ninteresting -= 1;
1711 1711 if (interesting[nsp] == 0)
1712 1712 ninteresting += 1;
1713 1713 interesting[nsp] += 1;
1714 1714 }
1715 1715 }
1716 1716 interesting[sv] -= 1;
1717 1717 if (interesting[sv] == 0)
1718 1718 ninteresting -= 1;
1719 1719 }
1720 1720
1721 1721 final = 0;
1722 1722 j = ninteresting;
1723 1723 for (i = 0; i < (int)(2 << revcount) && j > 0; i++) {
1724 1724 if (interesting[i] == 0)
1725 1725 continue;
1726 1726 final |= i;
1727 1727 j -= 1;
1728 1728 }
1729 1729 if (final == 0) {
1730 1730 keys = PyList_New(0);
1731 1731 goto bail;
1732 1732 }
1733 1733
1734 1734 dict = PyDict_New();
1735 1735 if (dict == NULL)
1736 1736 goto bail;
1737 1737
1738 1738 for (i = 0; i < revcount; i++) {
1739 1739 PyObject *key;
1740 1740
1741 1741 if ((final & (1 << i)) == 0)
1742 1742 continue;
1743 1743
1744 1744 key = PyList_GET_ITEM(revs, i);
1745 1745 Py_INCREF(key);
1746 1746 Py_INCREF(Py_None);
1747 1747 if (PyDict_SetItem(dict, key, Py_None) == -1) {
1748 1748 Py_DECREF(key);
1749 1749 Py_DECREF(Py_None);
1750 1750 goto bail;
1751 1751 }
1752 1752 }
1753 1753
1754 1754 keys = PyDict_Keys(dict);
1755 1755
1756 1756 bail:
1757 1757 free(depth);
1758 1758 free(seen);
1759 1759 free(interesting);
1760 1760 Py_XDECREF(dict);
1761 1761
1762 1762 return keys;
1763 1763 }
1764 1764
1765 1765 /*
1766 1766 * Given a (possibly overlapping) set of revs, return all the
1767 1767 * common ancestors heads: heads(::args[0] and ::a[1] and ...)
1768 1768 */
1769 1769 static PyObject *index_commonancestorsheads(indexObject *self, PyObject *args)
1770 1770 {
1771 1771 PyObject *ret = NULL;
1772 1772 Py_ssize_t argcount, i, len;
1773 1773 bitmask repeat = 0;
1774 1774 int revcount = 0;
1775 1775 int *revs;
1776 1776
1777 1777 argcount = PySequence_Length(args);
1778 1778 revs = PyMem_Malloc(argcount * sizeof(*revs));
1779 1779 if (argcount > 0 && revs == NULL)
1780 1780 return PyErr_NoMemory();
1781 1781 len = index_length(self);
1782 1782
1783 1783 for (i = 0; i < argcount; i++) {
1784 1784 static const int capacity = 24;
1785 1785 PyObject *obj = PySequence_GetItem(args, i);
1786 1786 bitmask x;
1787 1787 long val;
1788 1788
1789 1789 if (!PyInt_Check(obj)) {
1790 1790 PyErr_SetString(PyExc_TypeError,
1791 1791 "arguments must all be ints");
1792 1792 Py_DECREF(obj);
1793 1793 goto bail;
1794 1794 }
1795 1795 val = PyInt_AsLong(obj);
1796 1796 Py_DECREF(obj);
1797 1797 if (val == -1) {
1798 1798 ret = PyList_New(0);
1799 1799 goto done;
1800 1800 }
1801 1801 if (val < 0 || val >= len) {
1802 1802 PyErr_SetString(PyExc_IndexError,
1803 1803 "index out of range");
1804 1804 goto bail;
1805 1805 }
1806 1806 /* this cheesy bloom filter lets us avoid some more
1807 1807 * expensive duplicate checks in the common set-is-disjoint
1808 1808 * case */
1809 1809 x = 1ull << (val & 0x3f);
1810 1810 if (repeat & x) {
1811 1811 int k;
1812 1812 for (k = 0; k < revcount; k++) {
1813 1813 if (val == revs[k])
1814 1814 goto duplicate;
1815 1815 }
1816 1816 }
1817 1817 else repeat |= x;
1818 1818 if (revcount >= capacity) {
1819 1819 PyErr_Format(PyExc_OverflowError,
1820 1820 "bitset size (%d) > capacity (%d)",
1821 1821 revcount, capacity);
1822 1822 goto bail;
1823 1823 }
1824 1824 revs[revcount++] = (int)val;
1825 1825 duplicate:;
1826 1826 }
1827 1827
1828 1828 if (revcount == 0) {
1829 1829 ret = PyList_New(0);
1830 1830 goto done;
1831 1831 }
1832 1832 if (revcount == 1) {
1833 1833 PyObject *obj;
1834 1834 ret = PyList_New(1);
1835 1835 if (ret == NULL)
1836 1836 goto bail;
1837 1837 obj = PyInt_FromLong(revs[0]);
1838 1838 if (obj == NULL)
1839 1839 goto bail;
1840 1840 PyList_SET_ITEM(ret, 0, obj);
1841 1841 goto done;
1842 1842 }
1843 1843
1844 1844 ret = find_gca_candidates(self, revs, revcount);
1845 1845 if (ret == NULL)
1846 1846 goto bail;
1847 1847
1848 1848 done:
1849 1849 PyMem_Free(revs);
1850 1850 return ret;
1851 1851
1852 1852 bail:
1853 1853 PyMem_Free(revs);
1854 1854 Py_XDECREF(ret);
1855 1855 return NULL;
1856 1856 }
1857 1857
1858 1858 /*
1859 1859 * Given a (possibly overlapping) set of revs, return the greatest
1860 1860 * common ancestors: those with the longest path to the root.
1861 1861 */
1862 1862 static PyObject *index_ancestors(indexObject *self, PyObject *args)
1863 1863 {
1864 1864 PyObject *ret;
1865 1865 PyObject *gca = index_commonancestorsheads(self, args);
1866 1866 if (gca == NULL)
1867 1867 return NULL;
1868 1868
1869 1869 if (PyList_GET_SIZE(gca) <= 1) {
1870 1870 return gca;
1871 1871 }
1872 1872
1873 1873 ret = find_deepest(self, gca);
1874 1874 Py_DECREF(gca);
1875 1875 return ret;
1876 1876 }
1877 1877
1878 1878 /*
1879 1879 * Invalidate any trie entries introduced by added revs.
1880 1880 */
1881 1881 static void index_invalidate_added(indexObject *self, Py_ssize_t start)
1882 1882 {
1883 1883 Py_ssize_t i, len = PyList_GET_SIZE(self->added);
1884 1884
1885 1885 for (i = start; i < len; i++) {
1886 1886 PyObject *tuple = PyList_GET_ITEM(self->added, i);
1887 1887 PyObject *node = PyTuple_GET_ITEM(tuple, 7);
1888 1888
1889 1889 nt_delete_node(self->nt, PyBytes_AS_STRING(node));
1890 1890 }
1891 1891
1892 1892 if (start == 0)
1893 1893 Py_CLEAR(self->added);
1894 1894 }
1895 1895
1896 1896 /*
1897 1897 * Delete a numeric range of revs, which must be at the end of the
1898 1898 * range, but exclude the sentinel nullid entry.
1899 1899 */
1900 1900 static int index_slice_del(indexObject *self, PyObject *item)
1901 1901 {
1902 1902 Py_ssize_t start, stop, step, slicelength;
1903 1903 Py_ssize_t length = index_length(self) + 1;
1904 1904 int ret = 0;
1905 1905
1906 1906 /* Argument changed from PySliceObject* to PyObject* in Python 3. */
1907 1907 #ifdef IS_PY3K
1908 1908 if (PySlice_GetIndicesEx(item, length,
1909 1909 #else
1910 1910 if (PySlice_GetIndicesEx((PySliceObject*)item, length,
1911 1911 #endif
1912 1912 &start, &stop, &step, &slicelength) < 0)
1913 1913 return -1;
1914 1914
1915 1915 if (slicelength <= 0)
1916 1916 return 0;
1917 1917
1918 1918 if ((step < 0 && start < stop) || (step > 0 && start > stop))
1919 1919 stop = start;
1920 1920
1921 1921 if (step < 0) {
1922 1922 stop = start + 1;
1923 1923 start = stop + step*(slicelength - 1) - 1;
1924 1924 step = -step;
1925 1925 }
1926 1926
1927 1927 if (step != 1) {
1928 1928 PyErr_SetString(PyExc_ValueError,
1929 1929 "revlog index delete requires step size of 1");
1930 1930 return -1;
1931 1931 }
1932 1932
1933 1933 if (stop != length - 1) {
1934 1934 PyErr_SetString(PyExc_IndexError,
1935 1935 "revlog index deletion indices are invalid");
1936 1936 return -1;
1937 1937 }
1938 1938
1939 1939 if (start < self->length) {
1940 1940 if (self->nt) {
1941 1941 Py_ssize_t i;
1942 1942
1943 1943 for (i = start + 1; i < self->length; i++) {
1944 1944 const char *node = index_node_existing(self, i);
1945 1945 if (node == NULL)
1946 1946 return -1;
1947 1947
1948 1948 nt_delete_node(self->nt, node);
1949 1949 }
1950 1950 if (self->added)
1951 1951 index_invalidate_added(self, 0);
1952 1952 if (self->ntrev > start)
1953 1953 self->ntrev = (int)start;
1954 1954 }
1955 1955 self->length = start;
1956 1956 if (start < self->raw_length) {
1957 1957 if (self->cache) {
1958 1958 Py_ssize_t i;
1959 1959 for (i = start; i < self->raw_length; i++)
1960 1960 Py_CLEAR(self->cache[i]);
1961 1961 }
1962 1962 self->raw_length = start;
1963 1963 }
1964 1964 goto done;
1965 1965 }
1966 1966
1967 1967 if (self->nt) {
1968 1968 index_invalidate_added(self, start - self->length);
1969 1969 if (self->ntrev > start)
1970 1970 self->ntrev = (int)start;
1971 1971 }
1972 1972 if (self->added)
1973 1973 ret = PyList_SetSlice(self->added, start - self->length,
1974 1974 PyList_GET_SIZE(self->added), NULL);
1975 1975 done:
1976 1976 Py_CLEAR(self->headrevs);
1977 1977 return ret;
1978 1978 }
1979 1979
1980 1980 /*
1981 1981 * Supported ops:
1982 1982 *
1983 1983 * slice deletion
1984 1984 * string assignment (extend node->rev mapping)
1985 1985 * string deletion (shrink node->rev mapping)
1986 1986 */
1987 1987 static int index_assign_subscript(indexObject *self, PyObject *item,
1988 1988 PyObject *value)
1989 1989 {
1990 1990 char *node;
1991 1991 long rev;
1992 1992
1993 1993 if (PySlice_Check(item) && value == NULL)
1994 1994 return index_slice_del(self, item);
1995 1995
1996 1996 if (node_check(item, &node) == -1)
1997 1997 return -1;
1998 1998
1999 1999 if (value == NULL)
2000 2000 return self->nt ? nt_delete_node(self->nt, node) : 0;
2001 2001 rev = PyInt_AsLong(value);
2002 2002 if (rev > INT_MAX || rev < 0) {
2003 2003 if (!PyErr_Occurred())
2004 2004 PyErr_SetString(PyExc_ValueError, "rev out of range");
2005 2005 return -1;
2006 2006 }
2007 2007
2008 2008 if (index_init_nt(self) == -1)
2009 2009 return -1;
2010 2010 return nt_insert(self->nt, node, (int)rev);
2011 2011 }
2012 2012
2013 2013 /*
2014 2014 * Find all RevlogNG entries in an index that has inline data. Update
2015 2015 * the optional "offsets" table with those entries.
2016 2016 */
2017 2017 static Py_ssize_t inline_scan(indexObject *self, const char **offsets)
2018 2018 {
2019 2019 const char *data = (const char *)self->buf.buf;
2020 2020 Py_ssize_t pos = 0;
2021 2021 Py_ssize_t end = self->buf.len;
2022 2022 long incr = v1_hdrsize;
2023 2023 Py_ssize_t len = 0;
2024 2024
2025 2025 while (pos + v1_hdrsize <= end && pos >= 0) {
2026 2026 uint32_t comp_len;
2027 2027 /* 3rd element of header is length of compressed inline data */
2028 2028 comp_len = getbe32(data + pos + 8);
2029 2029 incr = v1_hdrsize + comp_len;
2030 2030 if (offsets)
2031 2031 offsets[len] = data + pos;
2032 2032 len++;
2033 2033 pos += incr;
2034 2034 }
2035 2035
2036 2036 if (pos != end) {
2037 2037 if (!PyErr_Occurred())
2038 2038 PyErr_SetString(PyExc_ValueError, "corrupt index file");
2039 2039 return -1;
2040 2040 }
2041 2041
2042 2042 return len;
2043 2043 }
2044 2044
2045 2045 static int index_init(indexObject *self, PyObject *args)
2046 2046 {
2047 2047 PyObject *data_obj, *inlined_obj;
2048 2048 Py_ssize_t size;
2049 2049
2050 2050 /* Initialize before argument-checking to avoid index_dealloc() crash. */
2051 2051 self->raw_length = 0;
2052 2052 self->added = NULL;
2053 2053 self->cache = NULL;
2054 2054 self->data = NULL;
2055 2055 memset(&self->buf, 0, sizeof(self->buf));
2056 2056 self->headrevs = NULL;
2057 2057 self->filteredrevs = Py_None;
2058 2058 Py_INCREF(Py_None);
2059 2059 self->nt = NULL;
2060 2060 self->offsets = NULL;
2061 2061
2062 2062 if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
2063 2063 return -1;
2064 2064 if (!PyObject_CheckBuffer(data_obj)) {
2065 2065 PyErr_SetString(PyExc_TypeError,
2066 2066 "data does not support buffer interface");
2067 2067 return -1;
2068 2068 }
2069 2069
2070 2070 if (PyObject_GetBuffer(data_obj, &self->buf, PyBUF_SIMPLE) == -1)
2071 2071 return -1;
2072 2072 size = self->buf.len;
2073 2073
2074 2074 self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
2075 2075 self->data = data_obj;
2076 2076
2077 2077 self->ntlookups = self->ntmisses = 0;
2078 2078 self->ntrev = -1;
2079 2079 Py_INCREF(self->data);
2080 2080
2081 2081 if (self->inlined) {
2082 2082 Py_ssize_t len = inline_scan(self, NULL);
2083 2083 if (len == -1)
2084 2084 goto bail;
2085 2085 self->raw_length = len;
2086 2086 self->length = len;
2087 2087 } else {
2088 2088 if (size % v1_hdrsize) {
2089 2089 PyErr_SetString(PyExc_ValueError, "corrupt index file");
2090 2090 goto bail;
2091 2091 }
2092 2092 self->raw_length = size / v1_hdrsize;
2093 2093 self->length = self->raw_length;
2094 2094 }
2095 2095
2096 2096 return 0;
2097 2097 bail:
2098 2098 return -1;
2099 2099 }
2100 2100
2101 2101 static PyObject *index_nodemap(indexObject *self)
2102 2102 {
2103 2103 Py_INCREF(self);
2104 2104 return (PyObject *)self;
2105 2105 }
2106 2106
2107 2107 static void _index_clearcaches(indexObject *self)
2108 2108 {
2109 2109 if (self->cache) {
2110 2110 Py_ssize_t i;
2111 2111
2112 2112 for (i = 0; i < self->raw_length; i++)
2113 2113 Py_CLEAR(self->cache[i]);
2114 2114 free(self->cache);
2115 2115 self->cache = NULL;
2116 2116 }
2117 2117 if (self->offsets) {
2118 2118 PyMem_Free((void *)self->offsets);
2119 2119 self->offsets = NULL;
2120 2120 }
2121 2121 if (self->nt != NULL) {
2122 2122 nt_dealloc(self->nt);
2123 2123 }
2124 2124 self->nt = NULL;
2125 2125 Py_CLEAR(self->headrevs);
2126 2126 }
2127 2127
2128 2128 static PyObject *index_clearcaches(indexObject *self)
2129 2129 {
2130 2130 _index_clearcaches(self);
2131 2131 self->ntrev = -1;
2132 2132 self->ntlookups = self->ntmisses = 0;
2133 2133 Py_RETURN_NONE;
2134 2134 }
2135 2135
2136 2136 static void index_dealloc(indexObject *self)
2137 2137 {
2138 2138 _index_clearcaches(self);
2139 2139 Py_XDECREF(self->filteredrevs);
2140 2140 if (self->buf.buf) {
2141 2141 PyBuffer_Release(&self->buf);
2142 2142 memset(&self->buf, 0, sizeof(self->buf));
2143 2143 }
2144 2144 Py_XDECREF(self->data);
2145 2145 Py_XDECREF(self->added);
2146 2146 Py_XDECREF(self->nt);
2147 2147 PyObject_Del(self);
2148 2148 }
2149 2149
2150 2150 static PySequenceMethods index_sequence_methods = {
2151 2151 (lenfunc)index_length, /* sq_length */
2152 2152 0, /* sq_concat */
2153 2153 0, /* sq_repeat */
2154 2154 (ssizeargfunc)index_get, /* sq_item */
2155 2155 0, /* sq_slice */
2156 2156 0, /* sq_ass_item */
2157 2157 0, /* sq_ass_slice */
2158 2158 (objobjproc)index_contains, /* sq_contains */
2159 2159 };
2160 2160
2161 2161 static PyMappingMethods index_mapping_methods = {
2162 2162 (lenfunc)index_length, /* mp_length */
2163 2163 (binaryfunc)index_getitem, /* mp_subscript */
2164 2164 (objobjargproc)index_assign_subscript, /* mp_ass_subscript */
2165 2165 };
2166 2166
2167 2167 static PyMethodDef index_methods[] = {
2168 2168 {"ancestors", (PyCFunction)index_ancestors, METH_VARARGS,
2169 2169 "return the gca set of the given revs"},
2170 2170 {"commonancestorsheads", (PyCFunction)index_commonancestorsheads,
2171 2171 METH_VARARGS,
2172 2172 "return the heads of the common ancestors of the given revs"},
2173 2173 {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
2174 2174 "clear the index caches"},
2175 2175 {"get", (PyCFunction)index_m_get, METH_VARARGS,
2176 2176 "get an index entry"},
2177 2177 {"computephasesmapsets", (PyCFunction)compute_phases_map_sets,
2178 2178 METH_VARARGS, "compute phases"},
2179 2179 {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
2180 2180 "reachableroots"},
2181 2181 {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
2182 2182 "get head revisions"}, /* Can do filtering since 3.2 */
2183 2183 {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
2184 2184 "get filtered head revisions"}, /* Can always do filtering */
2185 2185 {"deltachain", (PyCFunction)index_deltachain, METH_VARARGS,
2186 2186 "determine revisions with deltas to reconstruct fulltext"},
2187 2187 {"append", (PyCFunction)index_append, METH_O,
2188 2188 "append an index entry"},
2189 2189 {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
2190 2190 "match a potentially ambiguous node ID"},
2191 2191 {"shortest", (PyCFunction)index_shortest, METH_VARARGS,
2192 2192 "find length of shortest hex nodeid of a binary ID"},
2193 2193 {"stats", (PyCFunction)index_stats, METH_NOARGS,
2194 2194 "stats for the index"},
2195 2195 {NULL} /* Sentinel */
2196 2196 };
2197 2197
2198 2198 static PyGetSetDef index_getset[] = {
2199 2199 {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
2200 2200 {NULL} /* Sentinel */
2201 2201 };
2202 2202
2203 2203 static PyTypeObject indexType = {
2204 2204 PyVarObject_HEAD_INIT(NULL, 0) /* header */
2205 2205 "parsers.index", /* tp_name */
2206 2206 sizeof(indexObject), /* tp_basicsize */
2207 2207 0, /* tp_itemsize */
2208 2208 (destructor)index_dealloc, /* tp_dealloc */
2209 2209 0, /* tp_print */
2210 2210 0, /* tp_getattr */
2211 2211 0, /* tp_setattr */
2212 2212 0, /* tp_compare */
2213 2213 0, /* tp_repr */
2214 2214 0, /* tp_as_number */
2215 2215 &index_sequence_methods, /* tp_as_sequence */
2216 2216 &index_mapping_methods, /* tp_as_mapping */
2217 2217 0, /* tp_hash */
2218 2218 0, /* tp_call */
2219 2219 0, /* tp_str */
2220 2220 0, /* tp_getattro */
2221 2221 0, /* tp_setattro */
2222 2222 0, /* tp_as_buffer */
2223 2223 Py_TPFLAGS_DEFAULT, /* tp_flags */
2224 2224 "revlog index", /* tp_doc */
2225 2225 0, /* tp_traverse */
2226 2226 0, /* tp_clear */
2227 2227 0, /* tp_richcompare */
2228 2228 0, /* tp_weaklistoffset */
2229 2229 0, /* tp_iter */
2230 2230 0, /* tp_iternext */
2231 2231 index_methods, /* tp_methods */
2232 2232 0, /* tp_members */
2233 2233 index_getset, /* tp_getset */
2234 2234 0, /* tp_base */
2235 2235 0, /* tp_dict */
2236 2236 0, /* tp_descr_get */
2237 2237 0, /* tp_descr_set */
2238 2238 0, /* tp_dictoffset */
2239 2239 (initproc)index_init, /* tp_init */
2240 2240 0, /* tp_alloc */
2241 2241 };
2242 2242
2243 2243 /*
2244 2244 * returns a tuple of the form (index, index, cache) with elements as
2245 2245 * follows:
2246 2246 *
2247 2247 * index: an index object that lazily parses RevlogNG records
2248 2248 * cache: if data is inlined, a tuple (0, index_file_content), else None
2249 2249 * index_file_content could be a string, or a buffer
2250 2250 *
2251 2251 * added complications are for backwards compatibility
2252 2252 */
2253 2253 PyObject *parse_index2(PyObject *self, PyObject *args)
2254 2254 {
2255 2255 PyObject *tuple = NULL, *cache = NULL;
2256 2256 indexObject *idx;
2257 2257 int ret;
2258 2258
2259 2259 idx = PyObject_New(indexObject, &indexType);
2260 2260 if (idx == NULL)
2261 2261 goto bail;
2262 2262
2263 2263 ret = index_init(idx, args);
2264 2264 if (ret == -1)
2265 2265 goto bail;
2266 2266
2267 2267 if (idx->inlined) {
2268 2268 cache = Py_BuildValue("iO", 0, idx->data);
2269 2269 if (cache == NULL)
2270 2270 goto bail;
2271 2271 } else {
2272 2272 cache = Py_None;
2273 2273 Py_INCREF(cache);
2274 2274 }
2275 2275
2276 2276 tuple = Py_BuildValue("NN", idx, cache);
2277 2277 if (!tuple)
2278 2278 goto bail;
2279 2279 return tuple;
2280 2280
2281 2281 bail:
2282 2282 Py_XDECREF(idx);
2283 2283 Py_XDECREF(cache);
2284 2284 Py_XDECREF(tuple);
2285 2285 return NULL;
2286 2286 }
2287 2287
2288 2288 void revlog_module_init(PyObject *mod)
2289 2289 {
2290 2290 indexType.tp_new = PyType_GenericNew;
2291 2291 if (PyType_Ready(&indexType) < 0)
2292 2292 return;
2293 2293 Py_INCREF(&indexType);
2294 2294 PyModule_AddObject(mod, "index", (PyObject *)&indexType);
2295 2295
2296 2296 nodetreeType.tp_new = PyType_GenericNew;
2297 2297 if (PyType_Ready(&nodetreeType) < 0)
2298 2298 return;
2299 2299 Py_INCREF(&nodetreeType);
2300 2300 PyModule_AddObject(mod, "nodetree", (PyObject *)&nodetreeType);
2301 2301
2302 2302 nullentry = Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0,
2303 2303 -1, -1, -1, -1, nullid, 20);
2304 2304 if (nullentry)
2305 2305 PyObject_GC_UnTrack(nullentry);
2306 2306 }
General Comments 0
You need to be logged in to leave comments. Login now