##// END OF EJS Templates
phases: fix clang-format error
Yuya Nishihara -
r45736:3264d58e default
parent child Browse files
Show More
@@ -1,2955 +1,2956 b''
1 1 /*
2 2 parsers.c - efficient content parsing
3 3
4 4 Copyright 2008 Matt Mackall <mpm@selenic.com> and others
5 5
6 6 This software may be used and distributed according to the terms of
7 7 the GNU General Public License, incorporated herein by reference.
8 8 */
9 9
10 10 #define PY_SSIZE_T_CLEAN
11 11 #include <Python.h>
12 12 #include <assert.h>
13 13 #include <ctype.h>
14 14 #include <limits.h>
15 15 #include <stddef.h>
16 16 #include <stdlib.h>
17 17 #include <string.h>
18 18
19 19 #include "bitmanipulation.h"
20 20 #include "charencode.h"
21 21 #include "revlog.h"
22 22 #include "util.h"
23 23
24 24 #ifdef IS_PY3K
25 25 /* The mapping of Python types is meant to be temporary to get Python
26 26 * 3 to compile. We should remove this once Python 3 support is fully
27 27 * supported and proper types are used in the extensions themselves. */
28 28 #define PyInt_Check PyLong_Check
29 29 #define PyInt_FromLong PyLong_FromLong
30 30 #define PyInt_FromSsize_t PyLong_FromSsize_t
31 31 #define PyInt_AsLong PyLong_AsLong
32 32 #endif
33 33
34 34 typedef struct indexObjectStruct indexObject;
35 35
36 36 typedef struct {
37 37 int children[16];
38 38 } nodetreenode;
39 39
40 40 typedef struct {
41 41 int abi_version;
42 42 Py_ssize_t (*index_length)(const indexObject *);
43 43 const char *(*index_node)(indexObject *, Py_ssize_t);
44 44 int (*index_parents)(PyObject *, int, int *);
45 45 } Revlog_CAPI;
46 46
47 47 /*
48 48 * A base-16 trie for fast node->rev mapping.
49 49 *
50 50 * Positive value is index of the next node in the trie
51 51 * Negative value is a leaf: -(rev + 2)
52 52 * Zero is empty
53 53 */
54 54 typedef struct {
55 55 indexObject *index;
56 56 nodetreenode *nodes;
57 57 unsigned length; /* # nodes in use */
58 58 unsigned capacity; /* # nodes allocated */
59 59 int depth; /* maximum depth of tree */
60 60 int splits; /* # splits performed */
61 61 } nodetree;
62 62
63 63 typedef struct {
64 64 PyObject_HEAD /* ; */
65 65 nodetree nt;
66 66 } nodetreeObject;
67 67
68 68 /*
69 69 * This class has two behaviors.
70 70 *
71 71 * When used in a list-like way (with integer keys), we decode an
72 72 * entry in a RevlogNG index file on demand. We have limited support for
73 73 * integer-keyed insert and delete, only at elements right before the
74 74 * end.
75 75 *
76 76 * With string keys, we lazily perform a reverse mapping from node to
77 77 * rev, using a base-16 trie.
78 78 */
79 79 struct indexObjectStruct {
80 80 PyObject_HEAD
81 81 /* Type-specific fields go here. */
82 82 PyObject *data; /* raw bytes of index */
83 83 Py_buffer buf; /* buffer of data */
84 84 PyObject **cache; /* cached tuples */
85 85 const char **offsets; /* populated on demand */
86 86 Py_ssize_t raw_length; /* original number of elements */
87 87 Py_ssize_t length; /* current number of elements */
88 88 PyObject *added; /* populated on demand */
89 89 PyObject *headrevs; /* cache, invalidated on changes */
90 90 PyObject *filteredrevs; /* filtered revs set */
91 91 nodetree nt; /* base-16 trie */
92 92 int ntinitialized; /* 0 or 1 */
93 93 int ntrev; /* last rev scanned */
94 94 int ntlookups; /* # lookups */
95 95 int ntmisses; /* # lookups that miss the cache */
96 96 int inlined;
97 97 };
98 98
99 99 static Py_ssize_t index_length(const indexObject *self)
100 100 {
101 101 if (self->added == NULL)
102 102 return self->length;
103 103 return self->length + PyList_GET_SIZE(self->added);
104 104 }
105 105
106 106 static PyObject *nullentry = NULL;
107 107 static const char nullid[20] = {0};
108 108 static const Py_ssize_t nullrev = -1;
109 109
110 110 static Py_ssize_t inline_scan(indexObject *self, const char **offsets);
111 111
112 112 static int index_find_node(indexObject *self, const char *node,
113 113 Py_ssize_t nodelen);
114 114
115 115 #if LONG_MAX == 0x7fffffffL
116 116 static const char *const tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#");
117 117 #else
118 118 static const char *const tuple_format = PY23("kiiiiiis#", "kiiiiiiy#");
119 119 #endif
120 120
121 121 /* A RevlogNG v1 index entry is 64 bytes long. */
122 122 static const long v1_hdrsize = 64;
123 123
124 124 static void raise_revlog_error(void)
125 125 {
126 126 PyObject *mod = NULL, *dict = NULL, *errclass = NULL;
127 127
128 128 mod = PyImport_ImportModule("mercurial.error");
129 129 if (mod == NULL) {
130 130 goto cleanup;
131 131 }
132 132
133 133 dict = PyModule_GetDict(mod);
134 134 if (dict == NULL) {
135 135 goto cleanup;
136 136 }
137 137 Py_INCREF(dict);
138 138
139 139 errclass = PyDict_GetItemString(dict, "RevlogError");
140 140 if (errclass == NULL) {
141 141 PyErr_SetString(PyExc_SystemError,
142 142 "could not find RevlogError");
143 143 goto cleanup;
144 144 }
145 145
146 146 /* value of exception is ignored by callers */
147 147 PyErr_SetString(errclass, "RevlogError");
148 148
149 149 cleanup:
150 150 Py_XDECREF(dict);
151 151 Py_XDECREF(mod);
152 152 }
153 153
154 154 /*
155 155 * Return a pointer to the beginning of a RevlogNG record.
156 156 */
157 157 static const char *index_deref(indexObject *self, Py_ssize_t pos)
158 158 {
159 159 if (self->inlined && pos > 0) {
160 160 if (self->offsets == NULL) {
161 161 Py_ssize_t ret;
162 162 self->offsets = PyMem_Malloc(self->raw_length *
163 163 sizeof(*self->offsets));
164 164 if (self->offsets == NULL)
165 165 return (const char *)PyErr_NoMemory();
166 166 ret = inline_scan(self, self->offsets);
167 167 if (ret == -1) {
168 168 return NULL;
169 169 };
170 170 }
171 171 return self->offsets[pos];
172 172 }
173 173
174 174 return (const char *)(self->buf.buf) + pos * v1_hdrsize;
175 175 }
176 176
177 177 /*
178 178 * Get parents of the given rev.
179 179 *
180 180 * The specified rev must be valid and must not be nullrev. A returned
181 181 * parent revision may be nullrev, but is guaranteed to be in valid range.
182 182 */
183 183 static inline int index_get_parents(indexObject *self, Py_ssize_t rev, int *ps,
184 184 int maxrev)
185 185 {
186 186 if (rev >= self->length) {
187 187 long tmp;
188 188 PyObject *tuple =
189 189 PyList_GET_ITEM(self->added, rev - self->length);
190 190 if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 5), &tmp)) {
191 191 return -1;
192 192 }
193 193 ps[0] = (int)tmp;
194 194 if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 6), &tmp)) {
195 195 return -1;
196 196 }
197 197 ps[1] = (int)tmp;
198 198 } else {
199 199 const char *data = index_deref(self, rev);
200 200 ps[0] = getbe32(data + 24);
201 201 ps[1] = getbe32(data + 28);
202 202 }
203 203 /* If index file is corrupted, ps[] may point to invalid revisions. So
204 204 * there is a risk of buffer overflow to trust them unconditionally. */
205 205 if (ps[0] < -1 || ps[0] > maxrev || ps[1] < -1 || ps[1] > maxrev) {
206 206 PyErr_SetString(PyExc_ValueError, "parent out of range");
207 207 return -1;
208 208 }
209 209 return 0;
210 210 }
211 211
212 212 /*
213 213 * Get parents of the given rev.
214 214 *
215 215 * If the specified rev is out of range, IndexError will be raised. If the
216 216 * revlog entry is corrupted, ValueError may be raised.
217 217 *
218 218 * Returns 0 on success or -1 on failure.
219 219 */
220 220 static int HgRevlogIndex_GetParents(PyObject *op, int rev, int *ps)
221 221 {
222 222 int tiprev;
223 223 if (!op || !HgRevlogIndex_Check(op) || !ps) {
224 224 PyErr_BadInternalCall();
225 225 return -1;
226 226 }
227 227 tiprev = (int)index_length((indexObject *)op) - 1;
228 228 if (rev < -1 || rev > tiprev) {
229 229 PyErr_Format(PyExc_IndexError, "rev out of range: %d", rev);
230 230 return -1;
231 231 } else if (rev == -1) {
232 232 ps[0] = ps[1] = -1;
233 233 return 0;
234 234 } else {
235 235 return index_get_parents((indexObject *)op, rev, ps, tiprev);
236 236 }
237 237 }
238 238
239 239 static inline int64_t index_get_start(indexObject *self, Py_ssize_t rev)
240 240 {
241 241 uint64_t offset;
242 242 if (rev == nullrev) {
243 243 return 0;
244 244 }
245 245 if (rev >= self->length) {
246 246 PyObject *tuple;
247 247 PyObject *pylong;
248 248 PY_LONG_LONG tmp;
249 249 tuple = PyList_GET_ITEM(self->added, rev - self->length);
250 250 pylong = PyTuple_GET_ITEM(tuple, 0);
251 251 tmp = PyLong_AsLongLong(pylong);
252 252 if (tmp == -1 && PyErr_Occurred()) {
253 253 return -1;
254 254 }
255 255 if (tmp < 0) {
256 256 PyErr_Format(PyExc_OverflowError,
257 257 "revlog entry size out of bound (%lld)",
258 258 (long long)tmp);
259 259 return -1;
260 260 }
261 261 offset = (uint64_t)tmp;
262 262 } else {
263 263 const char *data = index_deref(self, rev);
264 264 offset = getbe32(data + 4);
265 265 if (rev == 0) {
266 266 /* mask out version number for the first entry */
267 267 offset &= 0xFFFF;
268 268 } else {
269 269 uint32_t offset_high = getbe32(data);
270 270 offset |= ((uint64_t)offset_high) << 32;
271 271 }
272 272 }
273 273 return (int64_t)(offset >> 16);
274 274 }
275 275
276 276 static inline int index_get_length(indexObject *self, Py_ssize_t rev)
277 277 {
278 278 if (rev == nullrev) {
279 279 return 0;
280 280 }
281 281 if (rev >= self->length) {
282 282 PyObject *tuple;
283 283 PyObject *pylong;
284 284 long ret;
285 285 tuple = PyList_GET_ITEM(self->added, rev - self->length);
286 286 pylong = PyTuple_GET_ITEM(tuple, 1);
287 287 ret = PyInt_AsLong(pylong);
288 288 if (ret == -1 && PyErr_Occurred()) {
289 289 return -1;
290 290 }
291 291 if (ret < 0 || ret > (long)INT_MAX) {
292 292 PyErr_Format(PyExc_OverflowError,
293 293 "revlog entry size out of bound (%ld)",
294 294 ret);
295 295 return -1;
296 296 }
297 297 return (int)ret;
298 298 } else {
299 299 const char *data = index_deref(self, rev);
300 300 int tmp = (int)getbe32(data + 8);
301 301 if (tmp < 0) {
302 302 PyErr_Format(PyExc_OverflowError,
303 303 "revlog entry size out of bound (%d)",
304 304 tmp);
305 305 return -1;
306 306 }
307 307 return tmp;
308 308 }
309 309 }
310 310
311 311 /*
312 312 * RevlogNG format (all in big endian, data may be inlined):
313 313 * 6 bytes: offset
314 314 * 2 bytes: flags
315 315 * 4 bytes: compressed length
316 316 * 4 bytes: uncompressed length
317 317 * 4 bytes: base revision
318 318 * 4 bytes: link revision
319 319 * 4 bytes: parent 1 revision
320 320 * 4 bytes: parent 2 revision
321 321 * 32 bytes: nodeid (only 20 bytes used)
322 322 */
323 323 static PyObject *index_get(indexObject *self, Py_ssize_t pos)
324 324 {
325 325 uint64_t offset_flags;
326 326 int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
327 327 const char *c_node_id;
328 328 const char *data;
329 329 Py_ssize_t length = index_length(self);
330 330 PyObject *entry;
331 331
332 332 if (pos == nullrev) {
333 333 Py_INCREF(nullentry);
334 334 return nullentry;
335 335 }
336 336
337 337 if (pos < 0 || pos >= length) {
338 338 PyErr_SetString(PyExc_IndexError, "revlog index out of range");
339 339 return NULL;
340 340 }
341 341
342 342 if (pos >= self->length) {
343 343 PyObject *obj;
344 344 obj = PyList_GET_ITEM(self->added, pos - self->length);
345 345 Py_INCREF(obj);
346 346 return obj;
347 347 }
348 348
349 349 if (self->cache) {
350 350 if (self->cache[pos]) {
351 351 Py_INCREF(self->cache[pos]);
352 352 return self->cache[pos];
353 353 }
354 354 } else {
355 355 self->cache = calloc(self->raw_length, sizeof(PyObject *));
356 356 if (self->cache == NULL)
357 357 return PyErr_NoMemory();
358 358 }
359 359
360 360 data = index_deref(self, pos);
361 361 if (data == NULL)
362 362 return NULL;
363 363
364 364 offset_flags = getbe32(data + 4);
365 365 if (pos == 0) /* mask out version number for the first entry */
366 366 offset_flags &= 0xFFFF;
367 367 else {
368 368 uint32_t offset_high = getbe32(data);
369 369 offset_flags |= ((uint64_t)offset_high) << 32;
370 370 }
371 371
372 372 comp_len = getbe32(data + 8);
373 373 uncomp_len = getbe32(data + 12);
374 374 base_rev = getbe32(data + 16);
375 375 link_rev = getbe32(data + 20);
376 376 parent_1 = getbe32(data + 24);
377 377 parent_2 = getbe32(data + 28);
378 378 c_node_id = data + 32;
379 379
380 380 entry = Py_BuildValue(tuple_format, offset_flags, comp_len, uncomp_len,
381 381 base_rev, link_rev, parent_1, parent_2, c_node_id,
382 382 (Py_ssize_t)20);
383 383
384 384 if (entry) {
385 385 PyObject_GC_UnTrack(entry);
386 386 Py_INCREF(entry);
387 387 }
388 388
389 389 self->cache[pos] = entry;
390 390
391 391 return entry;
392 392 }
393 393
394 394 /*
395 395 * Return the 20-byte SHA of the node corresponding to the given rev.
396 396 */
397 397 static const char *index_node(indexObject *self, Py_ssize_t pos)
398 398 {
399 399 Py_ssize_t length = index_length(self);
400 400 const char *data;
401 401
402 402 if (pos == nullrev)
403 403 return nullid;
404 404
405 405 if (pos >= length)
406 406 return NULL;
407 407
408 408 if (pos >= self->length) {
409 409 PyObject *tuple, *str;
410 410 tuple = PyList_GET_ITEM(self->added, pos - self->length);
411 411 str = PyTuple_GetItem(tuple, 7);
412 412 return str ? PyBytes_AS_STRING(str) : NULL;
413 413 }
414 414
415 415 data = index_deref(self, pos);
416 416 return data ? data + 32 : NULL;
417 417 }
418 418
419 419 /*
420 420 * Return the 20-byte SHA of the node corresponding to the given rev. The
421 421 * rev is assumed to be existing. If not, an exception is set.
422 422 */
423 423 static const char *index_node_existing(indexObject *self, Py_ssize_t pos)
424 424 {
425 425 const char *node = index_node(self, pos);
426 426 if (node == NULL) {
427 427 PyErr_Format(PyExc_IndexError, "could not access rev %d",
428 428 (int)pos);
429 429 }
430 430 return node;
431 431 }
432 432
433 433 static int nt_insert(nodetree *self, const char *node, int rev);
434 434
435 435 static int node_check(PyObject *obj, char **node)
436 436 {
437 437 Py_ssize_t nodelen;
438 438 if (PyBytes_AsStringAndSize(obj, node, &nodelen) == -1)
439 439 return -1;
440 440 if (nodelen == 20)
441 441 return 0;
442 442 PyErr_SetString(PyExc_ValueError, "20-byte hash required");
443 443 return -1;
444 444 }
445 445
446 446 static PyObject *index_append(indexObject *self, PyObject *obj)
447 447 {
448 448 char *node;
449 449 Py_ssize_t len;
450 450
451 451 if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
452 452 PyErr_SetString(PyExc_TypeError, "8-tuple required");
453 453 return NULL;
454 454 }
455 455
456 456 if (node_check(PyTuple_GET_ITEM(obj, 7), &node) == -1)
457 457 return NULL;
458 458
459 459 len = index_length(self);
460 460
461 461 if (self->added == NULL) {
462 462 self->added = PyList_New(0);
463 463 if (self->added == NULL)
464 464 return NULL;
465 465 }
466 466
467 467 if (PyList_Append(self->added, obj) == -1)
468 468 return NULL;
469 469
470 470 if (self->ntinitialized)
471 471 nt_insert(&self->nt, node, (int)len);
472 472
473 473 Py_CLEAR(self->headrevs);
474 474 Py_RETURN_NONE;
475 475 }
476 476
477 477 static PyObject *index_stats(indexObject *self)
478 478 {
479 479 PyObject *obj = PyDict_New();
480 480 PyObject *s = NULL;
481 481 PyObject *t = NULL;
482 482
483 483 if (obj == NULL)
484 484 return NULL;
485 485
486 486 #define istat(__n, __d) \
487 487 do { \
488 488 s = PyBytes_FromString(__d); \
489 489 t = PyInt_FromSsize_t(self->__n); \
490 490 if (!s || !t) \
491 491 goto bail; \
492 492 if (PyDict_SetItem(obj, s, t) == -1) \
493 493 goto bail; \
494 494 Py_CLEAR(s); \
495 495 Py_CLEAR(t); \
496 496 } while (0)
497 497
498 498 if (self->added) {
499 499 Py_ssize_t len = PyList_GET_SIZE(self->added);
500 500 s = PyBytes_FromString("index entries added");
501 501 t = PyInt_FromSsize_t(len);
502 502 if (!s || !t)
503 503 goto bail;
504 504 if (PyDict_SetItem(obj, s, t) == -1)
505 505 goto bail;
506 506 Py_CLEAR(s);
507 507 Py_CLEAR(t);
508 508 }
509 509
510 510 if (self->raw_length != self->length)
511 511 istat(raw_length, "revs on disk");
512 512 istat(length, "revs in memory");
513 513 istat(ntlookups, "node trie lookups");
514 514 istat(ntmisses, "node trie misses");
515 515 istat(ntrev, "node trie last rev scanned");
516 516 if (self->ntinitialized) {
517 517 istat(nt.capacity, "node trie capacity");
518 518 istat(nt.depth, "node trie depth");
519 519 istat(nt.length, "node trie count");
520 520 istat(nt.splits, "node trie splits");
521 521 }
522 522
523 523 #undef istat
524 524
525 525 return obj;
526 526
527 527 bail:
528 528 Py_XDECREF(obj);
529 529 Py_XDECREF(s);
530 530 Py_XDECREF(t);
531 531 return NULL;
532 532 }
533 533
534 534 /*
535 535 * When we cache a list, we want to be sure the caller can't mutate
536 536 * the cached copy.
537 537 */
538 538 static PyObject *list_copy(PyObject *list)
539 539 {
540 540 Py_ssize_t len = PyList_GET_SIZE(list);
541 541 PyObject *newlist = PyList_New(len);
542 542 Py_ssize_t i;
543 543
544 544 if (newlist == NULL)
545 545 return NULL;
546 546
547 547 for (i = 0; i < len; i++) {
548 548 PyObject *obj = PyList_GET_ITEM(list, i);
549 549 Py_INCREF(obj);
550 550 PyList_SET_ITEM(newlist, i, obj);
551 551 }
552 552
553 553 return newlist;
554 554 }
555 555
556 556 static int check_filter(PyObject *filter, Py_ssize_t arg)
557 557 {
558 558 if (filter) {
559 559 PyObject *arglist, *result;
560 560 int isfiltered;
561 561
562 562 arglist = Py_BuildValue("(n)", arg);
563 563 if (!arglist) {
564 564 return -1;
565 565 }
566 566
567 567 result = PyEval_CallObject(filter, arglist);
568 568 Py_DECREF(arglist);
569 569 if (!result) {
570 570 return -1;
571 571 }
572 572
573 573 /* PyObject_IsTrue returns 1 if true, 0 if false, -1 if error,
574 574 * same as this function, so we can just return it directly.*/
575 575 isfiltered = PyObject_IsTrue(result);
576 576 Py_DECREF(result);
577 577 return isfiltered;
578 578 } else {
579 579 return 0;
580 580 }
581 581 }
582 582
583 583 static inline void set_phase_from_parents(char *phases, int parent_1,
584 584 int parent_2, Py_ssize_t i)
585 585 {
586 586 if (parent_1 >= 0 && phases[parent_1] > phases[i])
587 587 phases[i] = phases[parent_1];
588 588 if (parent_2 >= 0 && phases[parent_2] > phases[i])
589 589 phases[i] = phases[parent_2];
590 590 }
591 591
592 592 static PyObject *reachableroots2(indexObject *self, PyObject *args)
593 593 {
594 594
595 595 /* Input */
596 596 long minroot;
597 597 PyObject *includepatharg = NULL;
598 598 int includepath = 0;
599 599 /* heads and roots are lists */
600 600 PyObject *heads = NULL;
601 601 PyObject *roots = NULL;
602 602 PyObject *reachable = NULL;
603 603
604 604 PyObject *val;
605 605 Py_ssize_t len = index_length(self);
606 606 long revnum;
607 607 Py_ssize_t k;
608 608 Py_ssize_t i;
609 609 Py_ssize_t l;
610 610 int r;
611 611 int parents[2];
612 612
613 613 /* Internal data structure:
614 614 * tovisit: array of length len+1 (all revs + nullrev), filled upto
615 615 * lentovisit
616 616 *
617 617 * revstates: array of length len+1 (all revs + nullrev) */
618 618 int *tovisit = NULL;
619 619 long lentovisit = 0;
620 620 enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 };
621 621 char *revstates = NULL;
622 622
623 623 /* Get arguments */
624 624 if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads,
625 625 &PyList_Type, &roots, &PyBool_Type,
626 626 &includepatharg))
627 627 goto bail;
628 628
629 629 if (includepatharg == Py_True)
630 630 includepath = 1;
631 631
632 632 /* Initialize return set */
633 633 reachable = PyList_New(0);
634 634 if (reachable == NULL)
635 635 goto bail;
636 636
637 637 /* Initialize internal datastructures */
638 638 tovisit = (int *)malloc((len + 1) * sizeof(int));
639 639 if (tovisit == NULL) {
640 640 PyErr_NoMemory();
641 641 goto bail;
642 642 }
643 643
644 644 revstates = (char *)calloc(len + 1, 1);
645 645 if (revstates == NULL) {
646 646 PyErr_NoMemory();
647 647 goto bail;
648 648 }
649 649
650 650 l = PyList_GET_SIZE(roots);
651 651 for (i = 0; i < l; i++) {
652 652 revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
653 653 if (revnum == -1 && PyErr_Occurred())
654 654 goto bail;
655 655 /* If root is out of range, e.g. wdir(), it must be unreachable
656 656 * from heads. So we can just ignore it. */
657 657 if (revnum + 1 < 0 || revnum + 1 >= len + 1)
658 658 continue;
659 659 revstates[revnum + 1] |= RS_ROOT;
660 660 }
661 661
662 662 /* Populate tovisit with all the heads */
663 663 l = PyList_GET_SIZE(heads);
664 664 for (i = 0; i < l; i++) {
665 665 revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
666 666 if (revnum == -1 && PyErr_Occurred())
667 667 goto bail;
668 668 if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
669 669 PyErr_SetString(PyExc_IndexError, "head out of range");
670 670 goto bail;
671 671 }
672 672 if (!(revstates[revnum + 1] & RS_SEEN)) {
673 673 tovisit[lentovisit++] = (int)revnum;
674 674 revstates[revnum + 1] |= RS_SEEN;
675 675 }
676 676 }
677 677
678 678 /* Visit the tovisit list and find the reachable roots */
679 679 k = 0;
680 680 while (k < lentovisit) {
681 681 /* Add the node to reachable if it is a root*/
682 682 revnum = tovisit[k++];
683 683 if (revstates[revnum + 1] & RS_ROOT) {
684 684 revstates[revnum + 1] |= RS_REACHABLE;
685 685 val = PyInt_FromLong(revnum);
686 686 if (val == NULL)
687 687 goto bail;
688 688 r = PyList_Append(reachable, val);
689 689 Py_DECREF(val);
690 690 if (r < 0)
691 691 goto bail;
692 692 if (includepath == 0)
693 693 continue;
694 694 }
695 695
696 696 /* Add its parents to the list of nodes to visit */
697 697 if (revnum == nullrev)
698 698 continue;
699 699 r = index_get_parents(self, revnum, parents, (int)len - 1);
700 700 if (r < 0)
701 701 goto bail;
702 702 for (i = 0; i < 2; i++) {
703 703 if (!(revstates[parents[i] + 1] & RS_SEEN) &&
704 704 parents[i] >= minroot) {
705 705 tovisit[lentovisit++] = parents[i];
706 706 revstates[parents[i] + 1] |= RS_SEEN;
707 707 }
708 708 }
709 709 }
710 710
711 711 /* Find all the nodes in between the roots we found and the heads
712 712 * and add them to the reachable set */
713 713 if (includepath == 1) {
714 714 long minidx = minroot;
715 715 if (minidx < 0)
716 716 minidx = 0;
717 717 for (i = minidx; i < len; i++) {
718 718 if (!(revstates[i + 1] & RS_SEEN))
719 719 continue;
720 720 r = index_get_parents(self, i, parents, (int)len - 1);
721 721 /* Corrupted index file, error is set from
722 722 * index_get_parents */
723 723 if (r < 0)
724 724 goto bail;
725 725 if (((revstates[parents[0] + 1] |
726 726 revstates[parents[1] + 1]) &
727 727 RS_REACHABLE) &&
728 728 !(revstates[i + 1] & RS_REACHABLE)) {
729 729 revstates[i + 1] |= RS_REACHABLE;
730 730 val = PyInt_FromSsize_t(i);
731 731 if (val == NULL)
732 732 goto bail;
733 733 r = PyList_Append(reachable, val);
734 734 Py_DECREF(val);
735 735 if (r < 0)
736 736 goto bail;
737 737 }
738 738 }
739 739 }
740 740
741 741 free(revstates);
742 742 free(tovisit);
743 743 return reachable;
744 744 bail:
745 745 Py_XDECREF(reachable);
746 746 free(revstates);
747 747 free(tovisit);
748 748 return NULL;
749 749 }
750 750
751 751 static int add_roots_get_min(indexObject *self, PyObject *roots, char *phases,
752 752 char phase)
753 753 {
754 754 Py_ssize_t len = index_length(self);
755 755 PyObject *item;
756 756 PyObject *iterator;
757 757 int rev, minrev = -1;
758 758 char *node;
759 759
760 760 if (!PySet_Check(roots)) {
761 761 PyErr_SetString(PyExc_TypeError,
762 762 "roots must be a set of nodes");
763 763 return -2;
764 764 }
765 765 iterator = PyObject_GetIter(roots);
766 766 if (iterator == NULL)
767 767 return -2;
768 768 while ((item = PyIter_Next(iterator))) {
769 769 if (node_check(item, &node) == -1)
770 770 goto failed;
771 771 rev = index_find_node(self, node, 20);
772 772 /* null is implicitly public, so negative is invalid */
773 773 if (rev < 0 || rev >= len)
774 774 goto failed;
775 775 phases[rev] = phase;
776 776 if (minrev == -1 || minrev > rev)
777 777 minrev = rev;
778 778 Py_DECREF(item);
779 779 }
780 780 Py_DECREF(iterator);
781 781 return minrev;
782 782 failed:
783 783 Py_DECREF(iterator);
784 784 Py_DECREF(item);
785 785 return -2;
786 786 }
787 787
788 788 static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args)
789 789 {
790 790 /* 0: public (untracked), 1: draft, 2: secret, 32: archive,
791 791 96: internal */
792 792 static const char trackedphases[] = {1, 2, 32, 96};
793 793 PyObject *ret = NULL;
794 794 PyObject *roots = Py_None;
795 795 PyObject *pyphase = NULL;
796 796 PyObject *pyrev = NULL;
797 797 PyObject *phaseroots = NULL;
798 798 PyObject *phasessize = NULL;
799 799 PyObject *phasesets[4] = {NULL, NULL, NULL, NULL};
800 800 Py_ssize_t len = index_length(self);
801 801 char *phases = NULL;
802 802 int minphaserev = -1, rev, i;
803 803 const int numphases = (int)(sizeof(phasesets) / sizeof(phasesets[0]));
804 804
805 805 if (!PyArg_ParseTuple(args, "O", &roots))
806 806 return NULL;
807 807 if (roots == NULL || !PyDict_Check(roots)) {
808 808 PyErr_SetString(PyExc_TypeError, "roots must be a dictionary");
809 809 return NULL;
810 810 }
811 811
812 812 phases = calloc(len, 1);
813 813 if (phases == NULL) {
814 814 PyErr_NoMemory();
815 815 return NULL;
816 816 }
817 817
818 818 for (i = 0; i < numphases; ++i) {
819 819 pyphase = PyInt_FromLong(trackedphases[i]);
820 820 if (pyphase == NULL)
821 821 goto release;
822 822 phaseroots = PyDict_GetItem(roots, pyphase);
823 823 Py_DECREF(pyphase);
824 824 if (phaseroots == NULL)
825 825 continue;
826 rev = add_roots_get_min(self, phaseroots, phases, trackedphases[i]);
826 rev = add_roots_get_min(self, phaseroots, phases,
827 trackedphases[i]);
827 828 phaseroots = NULL;
828 829 if (rev == -2)
829 830 goto release;
830 831 if (rev != -1 && (minphaserev == -1 || rev < minphaserev))
831 832 minphaserev = rev;
832 833 }
833 834
834 835 for (i = 0; i < numphases; ++i) {
835 836 phasesets[i] = PySet_New(NULL);
836 837 if (phasesets[i] == NULL)
837 838 goto release;
838 839 }
839 840
840 841 if (minphaserev == -1)
841 842 minphaserev = len;
842 843 for (rev = minphaserev; rev < len; ++rev) {
843 844 int parents[2];
844 845 /*
845 846 * The parent lookup could be skipped for phaseroots, but
846 847 * phase --force would historically not recompute them
847 848 * correctly, leaving descendents with a lower phase around.
848 849 * As such, unconditionally recompute the phase.
849 850 */
850 851 if (index_get_parents(self, rev, parents, (int)len - 1) < 0)
851 852 goto release;
852 853 set_phase_from_parents(phases, parents[0], parents[1], rev);
853 854 switch (phases[rev]) {
854 855 case 0:
855 856 continue;
856 857 case 1:
857 858 pyphase = phasesets[0];
858 859 break;
859 860 case 2:
860 861 pyphase = phasesets[1];
861 862 break;
862 863 case 32:
863 864 pyphase = phasesets[2];
864 865 break;
865 866 case 96:
866 867 pyphase = phasesets[3];
867 868 break;
868 869 default:
869 870 goto release;
870 871 }
871 872 pyrev = PyInt_FromLong(rev);
872 873 if (pyrev == NULL)
873 874 goto release;
874 875 if (PySet_Add(pyphase, pyrev) == -1) {
875 876 Py_DECREF(pyrev);
876 877 goto release;
877 878 }
878 879 Py_DECREF(pyrev);
879 880 }
880 881 phaseroots = _dict_new_presized(numphases);
881 882 if (phaseroots == NULL)
882 883 goto release;
883 884 for (i = 0; i < numphases; ++i) {
884 885 pyphase = PyInt_FromLong(trackedphases[i]);
885 886 if (pyphase == NULL)
886 887 goto release;
887 888 if (PyDict_SetItem(phaseroots, pyphase, phasesets[i]) == -1) {
888 889 Py_DECREF(pyphase);
889 890 goto release;
890 891 }
891 892 Py_DECREF(phasesets[i]);
892 893 phasesets[i] = NULL;
893 894 }
894 895 phasessize = PyInt_FromSsize_t(len);
895 896 if (phasessize == NULL)
896 897 goto release;
897 898
898 899 ret = PyTuple_Pack(2, phasessize, phaseroots);
899 900 Py_DECREF(phasessize);
900 901 Py_DECREF(phaseroots);
901 902 return ret;
902 903
903 904 release:
904 905 for (i = 0; i < numphases; ++i)
905 906 Py_XDECREF(phasesets[i]);
906 907 Py_XDECREF(phaseroots);
907 908
908 909 free(phases);
909 910 return NULL;
910 911 }
911 912
912 913 static PyObject *index_headrevs(indexObject *self, PyObject *args)
913 914 {
914 915 Py_ssize_t i, j, len;
915 916 char *nothead = NULL;
916 917 PyObject *heads = NULL;
917 918 PyObject *filter = NULL;
918 919 PyObject *filteredrevs = Py_None;
919 920
920 921 if (!PyArg_ParseTuple(args, "|O", &filteredrevs)) {
921 922 return NULL;
922 923 }
923 924
924 925 if (self->headrevs && filteredrevs == self->filteredrevs)
925 926 return list_copy(self->headrevs);
926 927
927 928 Py_DECREF(self->filteredrevs);
928 929 self->filteredrevs = filteredrevs;
929 930 Py_INCREF(filteredrevs);
930 931
931 932 if (filteredrevs != Py_None) {
932 933 filter = PyObject_GetAttrString(filteredrevs, "__contains__");
933 934 if (!filter) {
934 935 PyErr_SetString(
935 936 PyExc_TypeError,
936 937 "filteredrevs has no attribute __contains__");
937 938 goto bail;
938 939 }
939 940 }
940 941
941 942 len = index_length(self);
942 943 heads = PyList_New(0);
943 944 if (heads == NULL)
944 945 goto bail;
945 946 if (len == 0) {
946 947 PyObject *nullid = PyInt_FromLong(-1);
947 948 if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
948 949 Py_XDECREF(nullid);
949 950 goto bail;
950 951 }
951 952 goto done;
952 953 }
953 954
954 955 nothead = calloc(len, 1);
955 956 if (nothead == NULL) {
956 957 PyErr_NoMemory();
957 958 goto bail;
958 959 }
959 960
960 961 for (i = len - 1; i >= 0; i--) {
961 962 int isfiltered;
962 963 int parents[2];
963 964
964 965 /* If nothead[i] == 1, it means we've seen an unfiltered child
965 966 * of this node already, and therefore this node is not
966 967 * filtered. So we can skip the expensive check_filter step.
967 968 */
968 969 if (nothead[i] != 1) {
969 970 isfiltered = check_filter(filter, i);
970 971 if (isfiltered == -1) {
971 972 PyErr_SetString(PyExc_TypeError,
972 973 "unable to check filter");
973 974 goto bail;
974 975 }
975 976
976 977 if (isfiltered) {
977 978 nothead[i] = 1;
978 979 continue;
979 980 }
980 981 }
981 982
982 983 if (index_get_parents(self, i, parents, (int)len - 1) < 0)
983 984 goto bail;
984 985 for (j = 0; j < 2; j++) {
985 986 if (parents[j] >= 0)
986 987 nothead[parents[j]] = 1;
987 988 }
988 989 }
989 990
990 991 for (i = 0; i < len; i++) {
991 992 PyObject *head;
992 993
993 994 if (nothead[i])
994 995 continue;
995 996 head = PyInt_FromSsize_t(i);
996 997 if (head == NULL || PyList_Append(heads, head) == -1) {
997 998 Py_XDECREF(head);
998 999 goto bail;
999 1000 }
1000 1001 }
1001 1002
1002 1003 done:
1003 1004 self->headrevs = heads;
1004 1005 Py_XDECREF(filter);
1005 1006 free(nothead);
1006 1007 return list_copy(self->headrevs);
1007 1008 bail:
1008 1009 Py_XDECREF(filter);
1009 1010 Py_XDECREF(heads);
1010 1011 free(nothead);
1011 1012 return NULL;
1012 1013 }
1013 1014
1014 1015 /**
1015 1016 * Obtain the base revision index entry.
1016 1017 *
1017 1018 * Callers must ensure that rev >= 0 or illegal memory access may occur.
1018 1019 */
1019 1020 static inline int index_baserev(indexObject *self, int rev)
1020 1021 {
1021 1022 const char *data;
1022 1023 int result;
1023 1024
1024 1025 if (rev >= self->length) {
1025 1026 PyObject *tuple =
1026 1027 PyList_GET_ITEM(self->added, rev - self->length);
1027 1028 long ret;
1028 1029 if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 3), &ret)) {
1029 1030 return -2;
1030 1031 }
1031 1032 result = (int)ret;
1032 1033 } else {
1033 1034 data = index_deref(self, rev);
1034 1035 if (data == NULL) {
1035 1036 return -2;
1036 1037 }
1037 1038
1038 1039 result = getbe32(data + 16);
1039 1040 }
1040 1041 if (result > rev) {
1041 1042 PyErr_Format(
1042 1043 PyExc_ValueError,
1043 1044 "corrupted revlog, revision base above revision: %d, %d",
1044 1045 rev, result);
1045 1046 return -2;
1046 1047 }
1047 1048 if (result < -1) {
1048 1049 PyErr_Format(
1049 1050 PyExc_ValueError,
1050 1051 "corrupted revlog, revision base out of range: %d, %d", rev,
1051 1052 result);
1052 1053 return -2;
1053 1054 }
1054 1055 return result;
1055 1056 }
1056 1057
1057 1058 /**
1058 1059 * Find if a revision is a snapshot or not
1059 1060 *
1060 1061 * Only relevant for sparse-revlog case.
1061 1062 * Callers must ensure that rev is in a valid range.
1062 1063 */
1063 1064 static int index_issnapshotrev(indexObject *self, Py_ssize_t rev)
1064 1065 {
1065 1066 int ps[2];
1066 1067 Py_ssize_t base;
1067 1068 while (rev >= 0) {
1068 1069 base = (Py_ssize_t)index_baserev(self, rev);
1069 1070 if (base == rev) {
1070 1071 base = -1;
1071 1072 }
1072 1073 if (base == -2) {
1073 1074 assert(PyErr_Occurred());
1074 1075 return -1;
1075 1076 }
1076 1077 if (base == -1) {
1077 1078 return 1;
1078 1079 }
1079 1080 if (index_get_parents(self, rev, ps, (int)rev) < 0) {
1080 1081 assert(PyErr_Occurred());
1081 1082 return -1;
1082 1083 };
1083 1084 if (base == ps[0] || base == ps[1]) {
1084 1085 return 0;
1085 1086 }
1086 1087 rev = base;
1087 1088 }
1088 1089 return rev == -1;
1089 1090 }
1090 1091
1091 1092 static PyObject *index_issnapshot(indexObject *self, PyObject *value)
1092 1093 {
1093 1094 long rev;
1094 1095 int issnap;
1095 1096 Py_ssize_t length = index_length(self);
1096 1097
1097 1098 if (!pylong_to_long(value, &rev)) {
1098 1099 return NULL;
1099 1100 }
1100 1101 if (rev < -1 || rev >= length) {
1101 1102 PyErr_Format(PyExc_ValueError, "revlog index out of range: %ld",
1102 1103 rev);
1103 1104 return NULL;
1104 1105 };
1105 1106 issnap = index_issnapshotrev(self, (Py_ssize_t)rev);
1106 1107 if (issnap < 0) {
1107 1108 return NULL;
1108 1109 };
1109 1110 return PyBool_FromLong((long)issnap);
1110 1111 }
1111 1112
1112 1113 static PyObject *index_findsnapshots(indexObject *self, PyObject *args)
1113 1114 {
1114 1115 Py_ssize_t start_rev;
1115 1116 PyObject *cache;
1116 1117 Py_ssize_t base;
1117 1118 Py_ssize_t rev;
1118 1119 PyObject *key = NULL;
1119 1120 PyObject *value = NULL;
1120 1121 const Py_ssize_t length = index_length(self);
1121 1122 if (!PyArg_ParseTuple(args, "O!n", &PyDict_Type, &cache, &start_rev)) {
1122 1123 return NULL;
1123 1124 }
1124 1125 for (rev = start_rev; rev < length; rev++) {
1125 1126 int issnap;
1126 1127 PyObject *allvalues = NULL;
1127 1128 issnap = index_issnapshotrev(self, rev);
1128 1129 if (issnap < 0) {
1129 1130 goto bail;
1130 1131 }
1131 1132 if (issnap == 0) {
1132 1133 continue;
1133 1134 }
1134 1135 base = (Py_ssize_t)index_baserev(self, rev);
1135 1136 if (base == rev) {
1136 1137 base = -1;
1137 1138 }
1138 1139 if (base == -2) {
1139 1140 assert(PyErr_Occurred());
1140 1141 goto bail;
1141 1142 }
1142 1143 key = PyInt_FromSsize_t(base);
1143 1144 allvalues = PyDict_GetItem(cache, key);
1144 1145 if (allvalues == NULL && PyErr_Occurred()) {
1145 1146 goto bail;
1146 1147 }
1147 1148 if (allvalues == NULL) {
1148 1149 int r;
1149 1150 allvalues = PyList_New(0);
1150 1151 if (!allvalues) {
1151 1152 goto bail;
1152 1153 }
1153 1154 r = PyDict_SetItem(cache, key, allvalues);
1154 1155 Py_DECREF(allvalues);
1155 1156 if (r < 0) {
1156 1157 goto bail;
1157 1158 }
1158 1159 }
1159 1160 value = PyInt_FromSsize_t(rev);
1160 1161 if (PyList_Append(allvalues, value)) {
1161 1162 goto bail;
1162 1163 }
1163 1164 Py_CLEAR(key);
1164 1165 Py_CLEAR(value);
1165 1166 }
1166 1167 Py_RETURN_NONE;
1167 1168 bail:
1168 1169 Py_XDECREF(key);
1169 1170 Py_XDECREF(value);
1170 1171 return NULL;
1171 1172 }
1172 1173
1173 1174 static PyObject *index_deltachain(indexObject *self, PyObject *args)
1174 1175 {
1175 1176 int rev, generaldelta;
1176 1177 PyObject *stoparg;
1177 1178 int stoprev, iterrev, baserev = -1;
1178 1179 int stopped;
1179 1180 PyObject *chain = NULL, *result = NULL;
1180 1181 const Py_ssize_t length = index_length(self);
1181 1182
1182 1183 if (!PyArg_ParseTuple(args, "iOi", &rev, &stoparg, &generaldelta)) {
1183 1184 return NULL;
1184 1185 }
1185 1186
1186 1187 if (PyInt_Check(stoparg)) {
1187 1188 stoprev = (int)PyInt_AsLong(stoparg);
1188 1189 if (stoprev == -1 && PyErr_Occurred()) {
1189 1190 return NULL;
1190 1191 }
1191 1192 } else if (stoparg == Py_None) {
1192 1193 stoprev = -2;
1193 1194 } else {
1194 1195 PyErr_SetString(PyExc_ValueError,
1195 1196 "stoprev must be integer or None");
1196 1197 return NULL;
1197 1198 }
1198 1199
1199 1200 if (rev < 0 || rev >= length) {
1200 1201 PyErr_SetString(PyExc_ValueError, "revlog index out of range");
1201 1202 return NULL;
1202 1203 }
1203 1204
1204 1205 chain = PyList_New(0);
1205 1206 if (chain == NULL) {
1206 1207 return NULL;
1207 1208 }
1208 1209
1209 1210 baserev = index_baserev(self, rev);
1210 1211
1211 1212 /* This should never happen. */
1212 1213 if (baserev <= -2) {
1213 1214 /* Error should be set by index_deref() */
1214 1215 assert(PyErr_Occurred());
1215 1216 goto bail;
1216 1217 }
1217 1218
1218 1219 iterrev = rev;
1219 1220
1220 1221 while (iterrev != baserev && iterrev != stoprev) {
1221 1222 PyObject *value = PyInt_FromLong(iterrev);
1222 1223 if (value == NULL) {
1223 1224 goto bail;
1224 1225 }
1225 1226 if (PyList_Append(chain, value)) {
1226 1227 Py_DECREF(value);
1227 1228 goto bail;
1228 1229 }
1229 1230 Py_DECREF(value);
1230 1231
1231 1232 if (generaldelta) {
1232 1233 iterrev = baserev;
1233 1234 } else {
1234 1235 iterrev--;
1235 1236 }
1236 1237
1237 1238 if (iterrev < 0) {
1238 1239 break;
1239 1240 }
1240 1241
1241 1242 if (iterrev >= length) {
1242 1243 PyErr_SetString(PyExc_IndexError,
1243 1244 "revision outside index");
1244 1245 return NULL;
1245 1246 }
1246 1247
1247 1248 baserev = index_baserev(self, iterrev);
1248 1249
1249 1250 /* This should never happen. */
1250 1251 if (baserev <= -2) {
1251 1252 /* Error should be set by index_deref() */
1252 1253 assert(PyErr_Occurred());
1253 1254 goto bail;
1254 1255 }
1255 1256 }
1256 1257
1257 1258 if (iterrev == stoprev) {
1258 1259 stopped = 1;
1259 1260 } else {
1260 1261 PyObject *value = PyInt_FromLong(iterrev);
1261 1262 if (value == NULL) {
1262 1263 goto bail;
1263 1264 }
1264 1265 if (PyList_Append(chain, value)) {
1265 1266 Py_DECREF(value);
1266 1267 goto bail;
1267 1268 }
1268 1269 Py_DECREF(value);
1269 1270
1270 1271 stopped = 0;
1271 1272 }
1272 1273
1273 1274 if (PyList_Reverse(chain)) {
1274 1275 goto bail;
1275 1276 }
1276 1277
1277 1278 result = Py_BuildValue("OO", chain, stopped ? Py_True : Py_False);
1278 1279 Py_DECREF(chain);
1279 1280 return result;
1280 1281
1281 1282 bail:
1282 1283 Py_DECREF(chain);
1283 1284 return NULL;
1284 1285 }
1285 1286
1286 1287 static inline int64_t
1287 1288 index_segment_span(indexObject *self, Py_ssize_t start_rev, Py_ssize_t end_rev)
1288 1289 {
1289 1290 int64_t start_offset;
1290 1291 int64_t end_offset;
1291 1292 int end_size;
1292 1293 start_offset = index_get_start(self, start_rev);
1293 1294 if (start_offset < 0) {
1294 1295 return -1;
1295 1296 }
1296 1297 end_offset = index_get_start(self, end_rev);
1297 1298 if (end_offset < 0) {
1298 1299 return -1;
1299 1300 }
1300 1301 end_size = index_get_length(self, end_rev);
1301 1302 if (end_size < 0) {
1302 1303 return -1;
1303 1304 }
1304 1305 if (end_offset < start_offset) {
1305 1306 PyErr_Format(PyExc_ValueError,
1306 1307 "corrupted revlog index: inconsistent offset "
1307 1308 "between revisions (%zd) and (%zd)",
1308 1309 start_rev, end_rev);
1309 1310 return -1;
1310 1311 }
1311 1312 return (end_offset - start_offset) + (int64_t)end_size;
1312 1313 }
1313 1314
1314 1315 /* returns endidx so that revs[startidx:endidx] has no empty trailing revs */
1315 1316 static Py_ssize_t trim_endidx(indexObject *self, const Py_ssize_t *revs,
1316 1317 Py_ssize_t startidx, Py_ssize_t endidx)
1317 1318 {
1318 1319 int length;
1319 1320 while (endidx > 1 && endidx > startidx) {
1320 1321 length = index_get_length(self, revs[endidx - 1]);
1321 1322 if (length < 0) {
1322 1323 return -1;
1323 1324 }
1324 1325 if (length != 0) {
1325 1326 break;
1326 1327 }
1327 1328 endidx -= 1;
1328 1329 }
1329 1330 return endidx;
1330 1331 }
1331 1332
1332 1333 struct Gap {
1333 1334 int64_t size;
1334 1335 Py_ssize_t idx;
1335 1336 };
1336 1337
1337 1338 static int gap_compare(const void *left, const void *right)
1338 1339 {
1339 1340 const struct Gap *l_left = ((const struct Gap *)left);
1340 1341 const struct Gap *l_right = ((const struct Gap *)right);
1341 1342 if (l_left->size < l_right->size) {
1342 1343 return -1;
1343 1344 } else if (l_left->size > l_right->size) {
1344 1345 return 1;
1345 1346 }
1346 1347 return 0;
1347 1348 }
1348 1349 static int Py_ssize_t_compare(const void *left, const void *right)
1349 1350 {
1350 1351 const Py_ssize_t l_left = *(const Py_ssize_t *)left;
1351 1352 const Py_ssize_t l_right = *(const Py_ssize_t *)right;
1352 1353 if (l_left < l_right) {
1353 1354 return -1;
1354 1355 } else if (l_left > l_right) {
1355 1356 return 1;
1356 1357 }
1357 1358 return 0;
1358 1359 }
1359 1360
1360 1361 static PyObject *index_slicechunktodensity(indexObject *self, PyObject *args)
1361 1362 {
1362 1363 /* method arguments */
1363 1364 PyObject *list_revs = NULL; /* revisions in the chain */
1364 1365 double targetdensity = 0; /* min density to achieve */
1365 1366 Py_ssize_t mingapsize = 0; /* threshold to ignore gaps */
1366 1367
1367 1368 /* other core variables */
1368 1369 Py_ssize_t idxlen = index_length(self);
1369 1370 Py_ssize_t i; /* used for various iteration */
1370 1371 PyObject *result = NULL; /* the final return of the function */
1371 1372
1372 1373 /* generic information about the delta chain being slice */
1373 1374 Py_ssize_t num_revs = 0; /* size of the full delta chain */
1374 1375 Py_ssize_t *revs = NULL; /* native array of revision in the chain */
1375 1376 int64_t chainpayload = 0; /* sum of all delta in the chain */
1376 1377 int64_t deltachainspan = 0; /* distance from first byte to last byte */
1377 1378
1378 1379 /* variable used for slicing the delta chain */
1379 1380 int64_t readdata = 0; /* amount of data currently planned to be read */
1380 1381 double density = 0; /* ration of payload data compared to read ones */
1381 1382 int64_t previous_end;
1382 1383 struct Gap *gaps = NULL; /* array of notable gap in the chain */
1383 1384 Py_ssize_t num_gaps =
1384 1385 0; /* total number of notable gap recorded so far */
1385 1386 Py_ssize_t *selected_indices = NULL; /* indices of gap skipped over */
1386 1387 Py_ssize_t num_selected = 0; /* number of gaps skipped */
1387 1388 PyObject *chunk = NULL; /* individual slice */
1388 1389 PyObject *allchunks = NULL; /* all slices */
1389 1390 Py_ssize_t previdx;
1390 1391
1391 1392 /* parsing argument */
1392 1393 if (!PyArg_ParseTuple(args, "O!dn", &PyList_Type, &list_revs,
1393 1394 &targetdensity, &mingapsize)) {
1394 1395 goto bail;
1395 1396 }
1396 1397
1397 1398 /* If the delta chain contains a single element, we do not need slicing
1398 1399 */
1399 1400 num_revs = PyList_GET_SIZE(list_revs);
1400 1401 if (num_revs <= 1) {
1401 1402 result = PyTuple_Pack(1, list_revs);
1402 1403 goto done;
1403 1404 }
1404 1405
1405 1406 /* Turn the python list into a native integer array (for efficiency) */
1406 1407 revs = (Py_ssize_t *)calloc(num_revs, sizeof(Py_ssize_t));
1407 1408 if (revs == NULL) {
1408 1409 PyErr_NoMemory();
1409 1410 goto bail;
1410 1411 }
1411 1412 for (i = 0; i < num_revs; i++) {
1412 1413 Py_ssize_t revnum = PyInt_AsLong(PyList_GET_ITEM(list_revs, i));
1413 1414 if (revnum == -1 && PyErr_Occurred()) {
1414 1415 goto bail;
1415 1416 }
1416 1417 if (revnum < nullrev || revnum >= idxlen) {
1417 1418 PyErr_Format(PyExc_IndexError,
1418 1419 "index out of range: %zd", revnum);
1419 1420 goto bail;
1420 1421 }
1421 1422 revs[i] = revnum;
1422 1423 }
1423 1424
1424 1425 /* Compute and check various property of the unsliced delta chain */
1425 1426 deltachainspan = index_segment_span(self, revs[0], revs[num_revs - 1]);
1426 1427 if (deltachainspan < 0) {
1427 1428 goto bail;
1428 1429 }
1429 1430
1430 1431 if (deltachainspan <= mingapsize) {
1431 1432 result = PyTuple_Pack(1, list_revs);
1432 1433 goto done;
1433 1434 }
1434 1435 chainpayload = 0;
1435 1436 for (i = 0; i < num_revs; i++) {
1436 1437 int tmp = index_get_length(self, revs[i]);
1437 1438 if (tmp < 0) {
1438 1439 goto bail;
1439 1440 }
1440 1441 chainpayload += tmp;
1441 1442 }
1442 1443
1443 1444 readdata = deltachainspan;
1444 1445 density = 1.0;
1445 1446
1446 1447 if (0 < deltachainspan) {
1447 1448 density = (double)chainpayload / (double)deltachainspan;
1448 1449 }
1449 1450
1450 1451 if (density >= targetdensity) {
1451 1452 result = PyTuple_Pack(1, list_revs);
1452 1453 goto done;
1453 1454 }
1454 1455
1455 1456 /* if chain is too sparse, look for relevant gaps */
1456 1457 gaps = (struct Gap *)calloc(num_revs, sizeof(struct Gap));
1457 1458 if (gaps == NULL) {
1458 1459 PyErr_NoMemory();
1459 1460 goto bail;
1460 1461 }
1461 1462
1462 1463 previous_end = -1;
1463 1464 for (i = 0; i < num_revs; i++) {
1464 1465 int64_t revstart;
1465 1466 int revsize;
1466 1467 revstart = index_get_start(self, revs[i]);
1467 1468 if (revstart < 0) {
1468 1469 goto bail;
1469 1470 };
1470 1471 revsize = index_get_length(self, revs[i]);
1471 1472 if (revsize < 0) {
1472 1473 goto bail;
1473 1474 };
1474 1475 if (revsize == 0) {
1475 1476 continue;
1476 1477 }
1477 1478 if (previous_end >= 0) {
1478 1479 int64_t gapsize = revstart - previous_end;
1479 1480 if (gapsize > mingapsize) {
1480 1481 gaps[num_gaps].size = gapsize;
1481 1482 gaps[num_gaps].idx = i;
1482 1483 num_gaps += 1;
1483 1484 }
1484 1485 }
1485 1486 previous_end = revstart + revsize;
1486 1487 }
1487 1488 if (num_gaps == 0) {
1488 1489 result = PyTuple_Pack(1, list_revs);
1489 1490 goto done;
1490 1491 }
1491 1492 qsort(gaps, num_gaps, sizeof(struct Gap), &gap_compare);
1492 1493
1493 1494 /* Slice the largest gap first, they improve the density the most */
1494 1495 selected_indices =
1495 1496 (Py_ssize_t *)malloc((num_gaps + 1) * sizeof(Py_ssize_t));
1496 1497 if (selected_indices == NULL) {
1497 1498 PyErr_NoMemory();
1498 1499 goto bail;
1499 1500 }
1500 1501
1501 1502 for (i = num_gaps - 1; i >= 0; i--) {
1502 1503 selected_indices[num_selected] = gaps[i].idx;
1503 1504 readdata -= gaps[i].size;
1504 1505 num_selected += 1;
1505 1506 if (readdata <= 0) {
1506 1507 density = 1.0;
1507 1508 } else {
1508 1509 density = (double)chainpayload / (double)readdata;
1509 1510 }
1510 1511 if (density >= targetdensity) {
1511 1512 break;
1512 1513 }
1513 1514 }
1514 1515 qsort(selected_indices, num_selected, sizeof(Py_ssize_t),
1515 1516 &Py_ssize_t_compare);
1516 1517
1517 1518 /* create the resulting slice */
1518 1519 allchunks = PyList_New(0);
1519 1520 if (allchunks == NULL) {
1520 1521 goto bail;
1521 1522 }
1522 1523 previdx = 0;
1523 1524 selected_indices[num_selected] = num_revs;
1524 1525 for (i = 0; i <= num_selected; i++) {
1525 1526 Py_ssize_t idx = selected_indices[i];
1526 1527 Py_ssize_t endidx = trim_endidx(self, revs, previdx, idx);
1527 1528 if (endidx < 0) {
1528 1529 goto bail;
1529 1530 }
1530 1531 if (previdx < endidx) {
1531 1532 chunk = PyList_GetSlice(list_revs, previdx, endidx);
1532 1533 if (chunk == NULL) {
1533 1534 goto bail;
1534 1535 }
1535 1536 if (PyList_Append(allchunks, chunk) == -1) {
1536 1537 goto bail;
1537 1538 }
1538 1539 Py_DECREF(chunk);
1539 1540 chunk = NULL;
1540 1541 }
1541 1542 previdx = idx;
1542 1543 }
1543 1544 result = allchunks;
1544 1545 goto done;
1545 1546
1546 1547 bail:
1547 1548 Py_XDECREF(allchunks);
1548 1549 Py_XDECREF(chunk);
1549 1550 done:
1550 1551 free(revs);
1551 1552 free(gaps);
1552 1553 free(selected_indices);
1553 1554 return result;
1554 1555 }
1555 1556
1556 1557 static inline int nt_level(const char *node, Py_ssize_t level)
1557 1558 {
1558 1559 int v = node[level >> 1];
1559 1560 if (!(level & 1))
1560 1561 v >>= 4;
1561 1562 return v & 0xf;
1562 1563 }
1563 1564
1564 1565 /*
1565 1566 * Return values:
1566 1567 *
1567 1568 * -4: match is ambiguous (multiple candidates)
1568 1569 * -2: not found
1569 1570 * rest: valid rev
1570 1571 */
1571 1572 static int nt_find(nodetree *self, const char *node, Py_ssize_t nodelen,
1572 1573 int hex)
1573 1574 {
1574 1575 int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
1575 1576 int level, maxlevel, off;
1576 1577
1577 1578 if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
1578 1579 return -1;
1579 1580
1580 1581 if (hex)
1581 1582 maxlevel = nodelen > 40 ? 40 : (int)nodelen;
1582 1583 else
1583 1584 maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
1584 1585
1585 1586 for (level = off = 0; level < maxlevel; level++) {
1586 1587 int k = getnybble(node, level);
1587 1588 nodetreenode *n = &self->nodes[off];
1588 1589 int v = n->children[k];
1589 1590
1590 1591 if (v < 0) {
1591 1592 const char *n;
1592 1593 Py_ssize_t i;
1593 1594
1594 1595 v = -(v + 2);
1595 1596 n = index_node(self->index, v);
1596 1597 if (n == NULL)
1597 1598 return -2;
1598 1599 for (i = level; i < maxlevel; i++)
1599 1600 if (getnybble(node, i) != nt_level(n, i))
1600 1601 return -2;
1601 1602 return v;
1602 1603 }
1603 1604 if (v == 0)
1604 1605 return -2;
1605 1606 off = v;
1606 1607 }
1607 1608 /* multiple matches against an ambiguous prefix */
1608 1609 return -4;
1609 1610 }
1610 1611
1611 1612 static int nt_new(nodetree *self)
1612 1613 {
1613 1614 if (self->length == self->capacity) {
1614 1615 unsigned newcapacity;
1615 1616 nodetreenode *newnodes;
1616 1617 newcapacity = self->capacity * 2;
1617 1618 if (newcapacity >= INT_MAX / sizeof(nodetreenode)) {
1618 1619 PyErr_SetString(PyExc_MemoryError,
1619 1620 "overflow in nt_new");
1620 1621 return -1;
1621 1622 }
1622 1623 newnodes =
1623 1624 realloc(self->nodes, newcapacity * sizeof(nodetreenode));
1624 1625 if (newnodes == NULL) {
1625 1626 PyErr_SetString(PyExc_MemoryError, "out of memory");
1626 1627 return -1;
1627 1628 }
1628 1629 self->capacity = newcapacity;
1629 1630 self->nodes = newnodes;
1630 1631 memset(&self->nodes[self->length], 0,
1631 1632 sizeof(nodetreenode) * (self->capacity - self->length));
1632 1633 }
1633 1634 return self->length++;
1634 1635 }
1635 1636
1636 1637 static int nt_insert(nodetree *self, const char *node, int rev)
1637 1638 {
1638 1639 int level = 0;
1639 1640 int off = 0;
1640 1641
1641 1642 while (level < 40) {
1642 1643 int k = nt_level(node, level);
1643 1644 nodetreenode *n;
1644 1645 int v;
1645 1646
1646 1647 n = &self->nodes[off];
1647 1648 v = n->children[k];
1648 1649
1649 1650 if (v == 0) {
1650 1651 n->children[k] = -rev - 2;
1651 1652 return 0;
1652 1653 }
1653 1654 if (v < 0) {
1654 1655 const char *oldnode =
1655 1656 index_node_existing(self->index, -(v + 2));
1656 1657 int noff;
1657 1658
1658 1659 if (oldnode == NULL)
1659 1660 return -1;
1660 1661 if (!memcmp(oldnode, node, 20)) {
1661 1662 n->children[k] = -rev - 2;
1662 1663 return 0;
1663 1664 }
1664 1665 noff = nt_new(self);
1665 1666 if (noff == -1)
1666 1667 return -1;
1667 1668 /* self->nodes may have been changed by realloc */
1668 1669 self->nodes[off].children[k] = noff;
1669 1670 off = noff;
1670 1671 n = &self->nodes[off];
1671 1672 n->children[nt_level(oldnode, ++level)] = v;
1672 1673 if (level > self->depth)
1673 1674 self->depth = level;
1674 1675 self->splits += 1;
1675 1676 } else {
1676 1677 level += 1;
1677 1678 off = v;
1678 1679 }
1679 1680 }
1680 1681
1681 1682 return -1;
1682 1683 }
1683 1684
1684 1685 static PyObject *ntobj_insert(nodetreeObject *self, PyObject *args)
1685 1686 {
1686 1687 Py_ssize_t rev;
1687 1688 const char *node;
1688 1689 Py_ssize_t length;
1689 1690 if (!PyArg_ParseTuple(args, "n", &rev))
1690 1691 return NULL;
1691 1692 length = index_length(self->nt.index);
1692 1693 if (rev < 0 || rev >= length) {
1693 1694 PyErr_SetString(PyExc_ValueError, "revlog index out of range");
1694 1695 return NULL;
1695 1696 }
1696 1697 node = index_node_existing(self->nt.index, rev);
1697 1698 if (nt_insert(&self->nt, node, (int)rev) == -1)
1698 1699 return NULL;
1699 1700 Py_RETURN_NONE;
1700 1701 }
1701 1702
1702 1703 static int nt_delete_node(nodetree *self, const char *node)
1703 1704 {
1704 1705 /* rev==-2 happens to get encoded as 0, which is interpreted as not set
1705 1706 */
1706 1707 return nt_insert(self, node, -2);
1707 1708 }
1708 1709
1709 1710 static int nt_init(nodetree *self, indexObject *index, unsigned capacity)
1710 1711 {
1711 1712 /* Initialize before overflow-checking to avoid nt_dealloc() crash. */
1712 1713 self->nodes = NULL;
1713 1714
1714 1715 self->index = index;
1715 1716 /* The input capacity is in terms of revisions, while the field is in
1716 1717 * terms of nodetree nodes. */
1717 1718 self->capacity = (capacity < 4 ? 4 : capacity / 2);
1718 1719 self->depth = 0;
1719 1720 self->splits = 0;
1720 1721 if ((size_t)self->capacity > INT_MAX / sizeof(nodetreenode)) {
1721 1722 PyErr_SetString(PyExc_ValueError, "overflow in init_nt");
1722 1723 return -1;
1723 1724 }
1724 1725 self->nodes = calloc(self->capacity, sizeof(nodetreenode));
1725 1726 if (self->nodes == NULL) {
1726 1727 PyErr_NoMemory();
1727 1728 return -1;
1728 1729 }
1729 1730 self->length = 1;
1730 1731 return 0;
1731 1732 }
1732 1733
1733 1734 static int ntobj_init(nodetreeObject *self, PyObject *args)
1734 1735 {
1735 1736 PyObject *index;
1736 1737 unsigned capacity;
1737 1738 if (!PyArg_ParseTuple(args, "O!I", &HgRevlogIndex_Type, &index,
1738 1739 &capacity))
1739 1740 return -1;
1740 1741 Py_INCREF(index);
1741 1742 return nt_init(&self->nt, (indexObject *)index, capacity);
1742 1743 }
1743 1744
1744 1745 static int nt_partialmatch(nodetree *self, const char *node, Py_ssize_t nodelen)
1745 1746 {
1746 1747 return nt_find(self, node, nodelen, 1);
1747 1748 }
1748 1749
1749 1750 /*
1750 1751 * Find the length of the shortest unique prefix of node.
1751 1752 *
1752 1753 * Return values:
1753 1754 *
1754 1755 * -3: error (exception set)
1755 1756 * -2: not found (no exception set)
1756 1757 * rest: length of shortest prefix
1757 1758 */
1758 1759 static int nt_shortest(nodetree *self, const char *node)
1759 1760 {
1760 1761 int level, off;
1761 1762
1762 1763 for (level = off = 0; level < 40; level++) {
1763 1764 int k, v;
1764 1765 nodetreenode *n = &self->nodes[off];
1765 1766 k = nt_level(node, level);
1766 1767 v = n->children[k];
1767 1768 if (v < 0) {
1768 1769 const char *n;
1769 1770 v = -(v + 2);
1770 1771 n = index_node_existing(self->index, v);
1771 1772 if (n == NULL)
1772 1773 return -3;
1773 1774 if (memcmp(node, n, 20) != 0)
1774 1775 /*
1775 1776 * Found a unique prefix, but it wasn't for the
1776 1777 * requested node (i.e the requested node does
1777 1778 * not exist).
1778 1779 */
1779 1780 return -2;
1780 1781 return level + 1;
1781 1782 }
1782 1783 if (v == 0)
1783 1784 return -2;
1784 1785 off = v;
1785 1786 }
1786 1787 /*
1787 1788 * The node was still not unique after 40 hex digits, so this won't
1788 1789 * happen. Also, if we get here, then there's a programming error in
1789 1790 * this file that made us insert a node longer than 40 hex digits.
1790 1791 */
1791 1792 PyErr_SetString(PyExc_Exception, "broken node tree");
1792 1793 return -3;
1793 1794 }
1794 1795
1795 1796 static PyObject *ntobj_shortest(nodetreeObject *self, PyObject *args)
1796 1797 {
1797 1798 PyObject *val;
1798 1799 char *node;
1799 1800 int length;
1800 1801
1801 1802 if (!PyArg_ParseTuple(args, "O", &val))
1802 1803 return NULL;
1803 1804 if (node_check(val, &node) == -1)
1804 1805 return NULL;
1805 1806
1806 1807 length = nt_shortest(&self->nt, node);
1807 1808 if (length == -3)
1808 1809 return NULL;
1809 1810 if (length == -2) {
1810 1811 raise_revlog_error();
1811 1812 return NULL;
1812 1813 }
1813 1814 return PyInt_FromLong(length);
1814 1815 }
1815 1816
1816 1817 static void nt_dealloc(nodetree *self)
1817 1818 {
1818 1819 free(self->nodes);
1819 1820 self->nodes = NULL;
1820 1821 }
1821 1822
1822 1823 static void ntobj_dealloc(nodetreeObject *self)
1823 1824 {
1824 1825 Py_XDECREF(self->nt.index);
1825 1826 nt_dealloc(&self->nt);
1826 1827 PyObject_Del(self);
1827 1828 }
1828 1829
1829 1830 static PyMethodDef ntobj_methods[] = {
1830 1831 {"insert", (PyCFunction)ntobj_insert, METH_VARARGS,
1831 1832 "insert an index entry"},
1832 1833 {"shortest", (PyCFunction)ntobj_shortest, METH_VARARGS,
1833 1834 "find length of shortest hex nodeid of a binary ID"},
1834 1835 {NULL} /* Sentinel */
1835 1836 };
1836 1837
1837 1838 static PyTypeObject nodetreeType = {
1838 1839 PyVarObject_HEAD_INIT(NULL, 0) /* header */
1839 1840 "parsers.nodetree", /* tp_name */
1840 1841 sizeof(nodetreeObject), /* tp_basicsize */
1841 1842 0, /* tp_itemsize */
1842 1843 (destructor)ntobj_dealloc, /* tp_dealloc */
1843 1844 0, /* tp_print */
1844 1845 0, /* tp_getattr */
1845 1846 0, /* tp_setattr */
1846 1847 0, /* tp_compare */
1847 1848 0, /* tp_repr */
1848 1849 0, /* tp_as_number */
1849 1850 0, /* tp_as_sequence */
1850 1851 0, /* tp_as_mapping */
1851 1852 0, /* tp_hash */
1852 1853 0, /* tp_call */
1853 1854 0, /* tp_str */
1854 1855 0, /* tp_getattro */
1855 1856 0, /* tp_setattro */
1856 1857 0, /* tp_as_buffer */
1857 1858 Py_TPFLAGS_DEFAULT, /* tp_flags */
1858 1859 "nodetree", /* tp_doc */
1859 1860 0, /* tp_traverse */
1860 1861 0, /* tp_clear */
1861 1862 0, /* tp_richcompare */
1862 1863 0, /* tp_weaklistoffset */
1863 1864 0, /* tp_iter */
1864 1865 0, /* tp_iternext */
1865 1866 ntobj_methods, /* tp_methods */
1866 1867 0, /* tp_members */
1867 1868 0, /* tp_getset */
1868 1869 0, /* tp_base */
1869 1870 0, /* tp_dict */
1870 1871 0, /* tp_descr_get */
1871 1872 0, /* tp_descr_set */
1872 1873 0, /* tp_dictoffset */
1873 1874 (initproc)ntobj_init, /* tp_init */
1874 1875 0, /* tp_alloc */
1875 1876 };
1876 1877
1877 1878 static int index_init_nt(indexObject *self)
1878 1879 {
1879 1880 if (!self->ntinitialized) {
1880 1881 if (nt_init(&self->nt, self, (int)self->raw_length) == -1) {
1881 1882 nt_dealloc(&self->nt);
1882 1883 return -1;
1883 1884 }
1884 1885 if (nt_insert(&self->nt, nullid, -1) == -1) {
1885 1886 nt_dealloc(&self->nt);
1886 1887 return -1;
1887 1888 }
1888 1889 self->ntinitialized = 1;
1889 1890 self->ntrev = (int)index_length(self);
1890 1891 self->ntlookups = 1;
1891 1892 self->ntmisses = 0;
1892 1893 }
1893 1894 return 0;
1894 1895 }
1895 1896
1896 1897 /*
1897 1898 * Return values:
1898 1899 *
1899 1900 * -3: error (exception set)
1900 1901 * -2: not found (no exception set)
1901 1902 * rest: valid rev
1902 1903 */
1903 1904 static int index_find_node(indexObject *self, const char *node,
1904 1905 Py_ssize_t nodelen)
1905 1906 {
1906 1907 int rev;
1907 1908
1908 1909 if (index_init_nt(self) == -1)
1909 1910 return -3;
1910 1911
1911 1912 self->ntlookups++;
1912 1913 rev = nt_find(&self->nt, node, nodelen, 0);
1913 1914 if (rev >= -1)
1914 1915 return rev;
1915 1916
1916 1917 /*
1917 1918 * For the first handful of lookups, we scan the entire index,
1918 1919 * and cache only the matching nodes. This optimizes for cases
1919 1920 * like "hg tip", where only a few nodes are accessed.
1920 1921 *
1921 1922 * After that, we cache every node we visit, using a single
1922 1923 * scan amortized over multiple lookups. This gives the best
1923 1924 * bulk performance, e.g. for "hg log".
1924 1925 */
1925 1926 if (self->ntmisses++ < 4) {
1926 1927 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1927 1928 const char *n = index_node_existing(self, rev);
1928 1929 if (n == NULL)
1929 1930 return -3;
1930 1931 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1931 1932 if (nt_insert(&self->nt, n, rev) == -1)
1932 1933 return -3;
1933 1934 break;
1934 1935 }
1935 1936 }
1936 1937 } else {
1937 1938 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1938 1939 const char *n = index_node_existing(self, rev);
1939 1940 if (n == NULL)
1940 1941 return -3;
1941 1942 if (nt_insert(&self->nt, n, rev) == -1) {
1942 1943 self->ntrev = rev + 1;
1943 1944 return -3;
1944 1945 }
1945 1946 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1946 1947 break;
1947 1948 }
1948 1949 }
1949 1950 self->ntrev = rev;
1950 1951 }
1951 1952
1952 1953 if (rev >= 0)
1953 1954 return rev;
1954 1955 return -2;
1955 1956 }
1956 1957
1957 1958 static PyObject *index_getitem(indexObject *self, PyObject *value)
1958 1959 {
1959 1960 char *node;
1960 1961 int rev;
1961 1962
1962 1963 if (PyInt_Check(value)) {
1963 1964 long idx;
1964 1965 if (!pylong_to_long(value, &idx)) {
1965 1966 return NULL;
1966 1967 }
1967 1968 return index_get(self, idx);
1968 1969 }
1969 1970
1970 1971 if (node_check(value, &node) == -1)
1971 1972 return NULL;
1972 1973 rev = index_find_node(self, node, 20);
1973 1974 if (rev >= -1)
1974 1975 return PyInt_FromLong(rev);
1975 1976 if (rev == -2)
1976 1977 raise_revlog_error();
1977 1978 return NULL;
1978 1979 }
1979 1980
1980 1981 /*
1981 1982 * Fully populate the radix tree.
1982 1983 */
1983 1984 static int index_populate_nt(indexObject *self)
1984 1985 {
1985 1986 int rev;
1986 1987 if (self->ntrev > 0) {
1987 1988 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1988 1989 const char *n = index_node_existing(self, rev);
1989 1990 if (n == NULL)
1990 1991 return -1;
1991 1992 if (nt_insert(&self->nt, n, rev) == -1)
1992 1993 return -1;
1993 1994 }
1994 1995 self->ntrev = -1;
1995 1996 }
1996 1997 return 0;
1997 1998 }
1998 1999
1999 2000 static PyObject *index_partialmatch(indexObject *self, PyObject *args)
2000 2001 {
2001 2002 const char *fullnode;
2002 2003 Py_ssize_t nodelen;
2003 2004 char *node;
2004 2005 int rev, i;
2005 2006
2006 2007 if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &node, &nodelen))
2007 2008 return NULL;
2008 2009
2009 2010 if (nodelen < 1) {
2010 2011 PyErr_SetString(PyExc_ValueError, "key too short");
2011 2012 return NULL;
2012 2013 }
2013 2014
2014 2015 if (nodelen > 40) {
2015 2016 PyErr_SetString(PyExc_ValueError, "key too long");
2016 2017 return NULL;
2017 2018 }
2018 2019
2019 2020 for (i = 0; i < nodelen; i++)
2020 2021 hexdigit(node, i);
2021 2022 if (PyErr_Occurred()) {
2022 2023 /* input contains non-hex characters */
2023 2024 PyErr_Clear();
2024 2025 Py_RETURN_NONE;
2025 2026 }
2026 2027
2027 2028 if (index_init_nt(self) == -1)
2028 2029 return NULL;
2029 2030 if (index_populate_nt(self) == -1)
2030 2031 return NULL;
2031 2032 rev = nt_partialmatch(&self->nt, node, nodelen);
2032 2033
2033 2034 switch (rev) {
2034 2035 case -4:
2035 2036 raise_revlog_error();
2036 2037 return NULL;
2037 2038 case -2:
2038 2039 Py_RETURN_NONE;
2039 2040 case -1:
2040 2041 return PyBytes_FromStringAndSize(nullid, 20);
2041 2042 }
2042 2043
2043 2044 fullnode = index_node_existing(self, rev);
2044 2045 if (fullnode == NULL) {
2045 2046 return NULL;
2046 2047 }
2047 2048 return PyBytes_FromStringAndSize(fullnode, 20);
2048 2049 }
2049 2050
2050 2051 static PyObject *index_shortest(indexObject *self, PyObject *args)
2051 2052 {
2052 2053 PyObject *val;
2053 2054 char *node;
2054 2055 int length;
2055 2056
2056 2057 if (!PyArg_ParseTuple(args, "O", &val))
2057 2058 return NULL;
2058 2059 if (node_check(val, &node) == -1)
2059 2060 return NULL;
2060 2061
2061 2062 self->ntlookups++;
2062 2063 if (index_init_nt(self) == -1)
2063 2064 return NULL;
2064 2065 if (index_populate_nt(self) == -1)
2065 2066 return NULL;
2066 2067 length = nt_shortest(&self->nt, node);
2067 2068 if (length == -3)
2068 2069 return NULL;
2069 2070 if (length == -2) {
2070 2071 raise_revlog_error();
2071 2072 return NULL;
2072 2073 }
2073 2074 return PyInt_FromLong(length);
2074 2075 }
2075 2076
2076 2077 static PyObject *index_m_get(indexObject *self, PyObject *args)
2077 2078 {
2078 2079 PyObject *val;
2079 2080 char *node;
2080 2081 int rev;
2081 2082
2082 2083 if (!PyArg_ParseTuple(args, "O", &val))
2083 2084 return NULL;
2084 2085 if (node_check(val, &node) == -1)
2085 2086 return NULL;
2086 2087 rev = index_find_node(self, node, 20);
2087 2088 if (rev == -3)
2088 2089 return NULL;
2089 2090 if (rev == -2)
2090 2091 Py_RETURN_NONE;
2091 2092 return PyInt_FromLong(rev);
2092 2093 }
2093 2094
2094 2095 static int index_contains(indexObject *self, PyObject *value)
2095 2096 {
2096 2097 char *node;
2097 2098
2098 2099 if (PyInt_Check(value)) {
2099 2100 long rev;
2100 2101 if (!pylong_to_long(value, &rev)) {
2101 2102 return -1;
2102 2103 }
2103 2104 return rev >= -1 && rev < index_length(self);
2104 2105 }
2105 2106
2106 2107 if (node_check(value, &node) == -1)
2107 2108 return -1;
2108 2109
2109 2110 switch (index_find_node(self, node, 20)) {
2110 2111 case -3:
2111 2112 return -1;
2112 2113 case -2:
2113 2114 return 0;
2114 2115 default:
2115 2116 return 1;
2116 2117 }
2117 2118 }
2118 2119
2119 2120 static PyObject *index_m_has_node(indexObject *self, PyObject *args)
2120 2121 {
2121 2122 int ret = index_contains(self, args);
2122 2123 if (ret < 0)
2123 2124 return NULL;
2124 2125 return PyBool_FromLong((long)ret);
2125 2126 }
2126 2127
2127 2128 static PyObject *index_m_rev(indexObject *self, PyObject *val)
2128 2129 {
2129 2130 char *node;
2130 2131 int rev;
2131 2132
2132 2133 if (node_check(val, &node) == -1)
2133 2134 return NULL;
2134 2135 rev = index_find_node(self, node, 20);
2135 2136 if (rev >= -1)
2136 2137 return PyInt_FromLong(rev);
2137 2138 if (rev == -2)
2138 2139 raise_revlog_error();
2139 2140 return NULL;
2140 2141 }
2141 2142
2142 2143 typedef uint64_t bitmask;
2143 2144
2144 2145 /*
2145 2146 * Given a disjoint set of revs, return all candidates for the
2146 2147 * greatest common ancestor. In revset notation, this is the set
2147 2148 * "heads(::a and ::b and ...)"
2148 2149 */
2149 2150 static PyObject *find_gca_candidates(indexObject *self, const int *revs,
2150 2151 int revcount)
2151 2152 {
2152 2153 const bitmask allseen = (1ull << revcount) - 1;
2153 2154 const bitmask poison = 1ull << revcount;
2154 2155 PyObject *gca = PyList_New(0);
2155 2156 int i, v, interesting;
2156 2157 int maxrev = -1;
2157 2158 bitmask sp;
2158 2159 bitmask *seen;
2159 2160
2160 2161 if (gca == NULL)
2161 2162 return PyErr_NoMemory();
2162 2163
2163 2164 for (i = 0; i < revcount; i++) {
2164 2165 if (revs[i] > maxrev)
2165 2166 maxrev = revs[i];
2166 2167 }
2167 2168
2168 2169 seen = calloc(sizeof(*seen), maxrev + 1);
2169 2170 if (seen == NULL) {
2170 2171 Py_DECREF(gca);
2171 2172 return PyErr_NoMemory();
2172 2173 }
2173 2174
2174 2175 for (i = 0; i < revcount; i++)
2175 2176 seen[revs[i]] = 1ull << i;
2176 2177
2177 2178 interesting = revcount;
2178 2179
2179 2180 for (v = maxrev; v >= 0 && interesting; v--) {
2180 2181 bitmask sv = seen[v];
2181 2182 int parents[2];
2182 2183
2183 2184 if (!sv)
2184 2185 continue;
2185 2186
2186 2187 if (sv < poison) {
2187 2188 interesting -= 1;
2188 2189 if (sv == allseen) {
2189 2190 PyObject *obj = PyInt_FromLong(v);
2190 2191 if (obj == NULL)
2191 2192 goto bail;
2192 2193 if (PyList_Append(gca, obj) == -1) {
2193 2194 Py_DECREF(obj);
2194 2195 goto bail;
2195 2196 }
2196 2197 sv |= poison;
2197 2198 for (i = 0; i < revcount; i++) {
2198 2199 if (revs[i] == v)
2199 2200 goto done;
2200 2201 }
2201 2202 }
2202 2203 }
2203 2204 if (index_get_parents(self, v, parents, maxrev) < 0)
2204 2205 goto bail;
2205 2206
2206 2207 for (i = 0; i < 2; i++) {
2207 2208 int p = parents[i];
2208 2209 if (p == -1)
2209 2210 continue;
2210 2211 sp = seen[p];
2211 2212 if (sv < poison) {
2212 2213 if (sp == 0) {
2213 2214 seen[p] = sv;
2214 2215 interesting++;
2215 2216 } else if (sp != sv)
2216 2217 seen[p] |= sv;
2217 2218 } else {
2218 2219 if (sp && sp < poison)
2219 2220 interesting--;
2220 2221 seen[p] = sv;
2221 2222 }
2222 2223 }
2223 2224 }
2224 2225
2225 2226 done:
2226 2227 free(seen);
2227 2228 return gca;
2228 2229 bail:
2229 2230 free(seen);
2230 2231 Py_XDECREF(gca);
2231 2232 return NULL;
2232 2233 }
2233 2234
2234 2235 /*
2235 2236 * Given a disjoint set of revs, return the subset with the longest
2236 2237 * path to the root.
2237 2238 */
2238 2239 static PyObject *find_deepest(indexObject *self, PyObject *revs)
2239 2240 {
2240 2241 const Py_ssize_t revcount = PyList_GET_SIZE(revs);
2241 2242 static const Py_ssize_t capacity = 24;
2242 2243 int *depth, *interesting = NULL;
2243 2244 int i, j, v, ninteresting;
2244 2245 PyObject *dict = NULL, *keys = NULL;
2245 2246 long *seen = NULL;
2246 2247 int maxrev = -1;
2247 2248 long final;
2248 2249
2249 2250 if (revcount > capacity) {
2250 2251 PyErr_Format(PyExc_OverflowError,
2251 2252 "bitset size (%ld) > capacity (%ld)",
2252 2253 (long)revcount, (long)capacity);
2253 2254 return NULL;
2254 2255 }
2255 2256
2256 2257 for (i = 0; i < revcount; i++) {
2257 2258 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
2258 2259 if (n > maxrev)
2259 2260 maxrev = n;
2260 2261 }
2261 2262
2262 2263 depth = calloc(sizeof(*depth), maxrev + 1);
2263 2264 if (depth == NULL)
2264 2265 return PyErr_NoMemory();
2265 2266
2266 2267 seen = calloc(sizeof(*seen), maxrev + 1);
2267 2268 if (seen == NULL) {
2268 2269 PyErr_NoMemory();
2269 2270 goto bail;
2270 2271 }
2271 2272
2272 2273 interesting = calloc(sizeof(*interesting), ((size_t)1) << revcount);
2273 2274 if (interesting == NULL) {
2274 2275 PyErr_NoMemory();
2275 2276 goto bail;
2276 2277 }
2277 2278
2278 2279 if (PyList_Sort(revs) == -1)
2279 2280 goto bail;
2280 2281
2281 2282 for (i = 0; i < revcount; i++) {
2282 2283 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
2283 2284 long b = 1l << i;
2284 2285 depth[n] = 1;
2285 2286 seen[n] = b;
2286 2287 interesting[b] = 1;
2287 2288 }
2288 2289
2289 2290 /* invariant: ninteresting is the number of non-zero entries in
2290 2291 * interesting. */
2291 2292 ninteresting = (int)revcount;
2292 2293
2293 2294 for (v = maxrev; v >= 0 && ninteresting > 1; v--) {
2294 2295 int dv = depth[v];
2295 2296 int parents[2];
2296 2297 long sv;
2297 2298
2298 2299 if (dv == 0)
2299 2300 continue;
2300 2301
2301 2302 sv = seen[v];
2302 2303 if (index_get_parents(self, v, parents, maxrev) < 0)
2303 2304 goto bail;
2304 2305
2305 2306 for (i = 0; i < 2; i++) {
2306 2307 int p = parents[i];
2307 2308 long sp;
2308 2309 int dp;
2309 2310
2310 2311 if (p == -1)
2311 2312 continue;
2312 2313
2313 2314 dp = depth[p];
2314 2315 sp = seen[p];
2315 2316 if (dp <= dv) {
2316 2317 depth[p] = dv + 1;
2317 2318 if (sp != sv) {
2318 2319 interesting[sv] += 1;
2319 2320 seen[p] = sv;
2320 2321 if (sp) {
2321 2322 interesting[sp] -= 1;
2322 2323 if (interesting[sp] == 0)
2323 2324 ninteresting -= 1;
2324 2325 }
2325 2326 }
2326 2327 } else if (dv == dp - 1) {
2327 2328 long nsp = sp | sv;
2328 2329 if (nsp == sp)
2329 2330 continue;
2330 2331 seen[p] = nsp;
2331 2332 interesting[sp] -= 1;
2332 2333 if (interesting[sp] == 0)
2333 2334 ninteresting -= 1;
2334 2335 if (interesting[nsp] == 0)
2335 2336 ninteresting += 1;
2336 2337 interesting[nsp] += 1;
2337 2338 }
2338 2339 }
2339 2340 interesting[sv] -= 1;
2340 2341 if (interesting[sv] == 0)
2341 2342 ninteresting -= 1;
2342 2343 }
2343 2344
2344 2345 final = 0;
2345 2346 j = ninteresting;
2346 2347 for (i = 0; i < (int)(2 << revcount) && j > 0; i++) {
2347 2348 if (interesting[i] == 0)
2348 2349 continue;
2349 2350 final |= i;
2350 2351 j -= 1;
2351 2352 }
2352 2353 if (final == 0) {
2353 2354 keys = PyList_New(0);
2354 2355 goto bail;
2355 2356 }
2356 2357
2357 2358 dict = PyDict_New();
2358 2359 if (dict == NULL)
2359 2360 goto bail;
2360 2361
2361 2362 for (i = 0; i < revcount; i++) {
2362 2363 PyObject *key;
2363 2364
2364 2365 if ((final & (1 << i)) == 0)
2365 2366 continue;
2366 2367
2367 2368 key = PyList_GET_ITEM(revs, i);
2368 2369 Py_INCREF(key);
2369 2370 Py_INCREF(Py_None);
2370 2371 if (PyDict_SetItem(dict, key, Py_None) == -1) {
2371 2372 Py_DECREF(key);
2372 2373 Py_DECREF(Py_None);
2373 2374 goto bail;
2374 2375 }
2375 2376 }
2376 2377
2377 2378 keys = PyDict_Keys(dict);
2378 2379
2379 2380 bail:
2380 2381 free(depth);
2381 2382 free(seen);
2382 2383 free(interesting);
2383 2384 Py_XDECREF(dict);
2384 2385
2385 2386 return keys;
2386 2387 }
2387 2388
2388 2389 /*
2389 2390 * Given a (possibly overlapping) set of revs, return all the
2390 2391 * common ancestors heads: heads(::args[0] and ::a[1] and ...)
2391 2392 */
2392 2393 static PyObject *index_commonancestorsheads(indexObject *self, PyObject *args)
2393 2394 {
2394 2395 PyObject *ret = NULL;
2395 2396 Py_ssize_t argcount, i, len;
2396 2397 bitmask repeat = 0;
2397 2398 int revcount = 0;
2398 2399 int *revs;
2399 2400
2400 2401 argcount = PySequence_Length(args);
2401 2402 revs = PyMem_Malloc(argcount * sizeof(*revs));
2402 2403 if (argcount > 0 && revs == NULL)
2403 2404 return PyErr_NoMemory();
2404 2405 len = index_length(self);
2405 2406
2406 2407 for (i = 0; i < argcount; i++) {
2407 2408 static const int capacity = 24;
2408 2409 PyObject *obj = PySequence_GetItem(args, i);
2409 2410 bitmask x;
2410 2411 long val;
2411 2412
2412 2413 if (!PyInt_Check(obj)) {
2413 2414 PyErr_SetString(PyExc_TypeError,
2414 2415 "arguments must all be ints");
2415 2416 Py_DECREF(obj);
2416 2417 goto bail;
2417 2418 }
2418 2419 val = PyInt_AsLong(obj);
2419 2420 Py_DECREF(obj);
2420 2421 if (val == -1) {
2421 2422 ret = PyList_New(0);
2422 2423 goto done;
2423 2424 }
2424 2425 if (val < 0 || val >= len) {
2425 2426 PyErr_SetString(PyExc_IndexError, "index out of range");
2426 2427 goto bail;
2427 2428 }
2428 2429 /* this cheesy bloom filter lets us avoid some more
2429 2430 * expensive duplicate checks in the common set-is-disjoint
2430 2431 * case */
2431 2432 x = 1ull << (val & 0x3f);
2432 2433 if (repeat & x) {
2433 2434 int k;
2434 2435 for (k = 0; k < revcount; k++) {
2435 2436 if (val == revs[k])
2436 2437 goto duplicate;
2437 2438 }
2438 2439 } else
2439 2440 repeat |= x;
2440 2441 if (revcount >= capacity) {
2441 2442 PyErr_Format(PyExc_OverflowError,
2442 2443 "bitset size (%d) > capacity (%d)",
2443 2444 revcount, capacity);
2444 2445 goto bail;
2445 2446 }
2446 2447 revs[revcount++] = (int)val;
2447 2448 duplicate:;
2448 2449 }
2449 2450
2450 2451 if (revcount == 0) {
2451 2452 ret = PyList_New(0);
2452 2453 goto done;
2453 2454 }
2454 2455 if (revcount == 1) {
2455 2456 PyObject *obj;
2456 2457 ret = PyList_New(1);
2457 2458 if (ret == NULL)
2458 2459 goto bail;
2459 2460 obj = PyInt_FromLong(revs[0]);
2460 2461 if (obj == NULL)
2461 2462 goto bail;
2462 2463 PyList_SET_ITEM(ret, 0, obj);
2463 2464 goto done;
2464 2465 }
2465 2466
2466 2467 ret = find_gca_candidates(self, revs, revcount);
2467 2468 if (ret == NULL)
2468 2469 goto bail;
2469 2470
2470 2471 done:
2471 2472 PyMem_Free(revs);
2472 2473 return ret;
2473 2474
2474 2475 bail:
2475 2476 PyMem_Free(revs);
2476 2477 Py_XDECREF(ret);
2477 2478 return NULL;
2478 2479 }
2479 2480
2480 2481 /*
2481 2482 * Given a (possibly overlapping) set of revs, return the greatest
2482 2483 * common ancestors: those with the longest path to the root.
2483 2484 */
2484 2485 static PyObject *index_ancestors(indexObject *self, PyObject *args)
2485 2486 {
2486 2487 PyObject *ret;
2487 2488 PyObject *gca = index_commonancestorsheads(self, args);
2488 2489 if (gca == NULL)
2489 2490 return NULL;
2490 2491
2491 2492 if (PyList_GET_SIZE(gca) <= 1) {
2492 2493 return gca;
2493 2494 }
2494 2495
2495 2496 ret = find_deepest(self, gca);
2496 2497 Py_DECREF(gca);
2497 2498 return ret;
2498 2499 }
2499 2500
2500 2501 /*
2501 2502 * Invalidate any trie entries introduced by added revs.
2502 2503 */
2503 2504 static void index_invalidate_added(indexObject *self, Py_ssize_t start)
2504 2505 {
2505 2506 Py_ssize_t i, len = PyList_GET_SIZE(self->added);
2506 2507
2507 2508 for (i = start; i < len; i++) {
2508 2509 PyObject *tuple = PyList_GET_ITEM(self->added, i);
2509 2510 PyObject *node = PyTuple_GET_ITEM(tuple, 7);
2510 2511
2511 2512 nt_delete_node(&self->nt, PyBytes_AS_STRING(node));
2512 2513 }
2513 2514
2514 2515 if (start == 0)
2515 2516 Py_CLEAR(self->added);
2516 2517 }
2517 2518
2518 2519 /*
2519 2520 * Delete a numeric range of revs, which must be at the end of the
2520 2521 * range.
2521 2522 */
2522 2523 static int index_slice_del(indexObject *self, PyObject *item)
2523 2524 {
2524 2525 Py_ssize_t start, stop, step, slicelength;
2525 2526 Py_ssize_t length = index_length(self) + 1;
2526 2527 int ret = 0;
2527 2528
2528 2529 /* Argument changed from PySliceObject* to PyObject* in Python 3. */
2529 2530 #ifdef IS_PY3K
2530 2531 if (PySlice_GetIndicesEx(item, length, &start, &stop, &step,
2531 2532 &slicelength) < 0)
2532 2533 #else
2533 2534 if (PySlice_GetIndicesEx((PySliceObject *)item, length, &start, &stop,
2534 2535 &step, &slicelength) < 0)
2535 2536 #endif
2536 2537 return -1;
2537 2538
2538 2539 if (slicelength <= 0)
2539 2540 return 0;
2540 2541
2541 2542 if ((step < 0 && start < stop) || (step > 0 && start > stop))
2542 2543 stop = start;
2543 2544
2544 2545 if (step < 0) {
2545 2546 stop = start + 1;
2546 2547 start = stop + step * (slicelength - 1) - 1;
2547 2548 step = -step;
2548 2549 }
2549 2550
2550 2551 if (step != 1) {
2551 2552 PyErr_SetString(PyExc_ValueError,
2552 2553 "revlog index delete requires step size of 1");
2553 2554 return -1;
2554 2555 }
2555 2556
2556 2557 if (stop != length - 1) {
2557 2558 PyErr_SetString(PyExc_IndexError,
2558 2559 "revlog index deletion indices are invalid");
2559 2560 return -1;
2560 2561 }
2561 2562
2562 2563 if (start < self->length) {
2563 2564 if (self->ntinitialized) {
2564 2565 Py_ssize_t i;
2565 2566
2566 2567 for (i = start; i < self->length; i++) {
2567 2568 const char *node = index_node_existing(self, i);
2568 2569 if (node == NULL)
2569 2570 return -1;
2570 2571
2571 2572 nt_delete_node(&self->nt, node);
2572 2573 }
2573 2574 if (self->added)
2574 2575 index_invalidate_added(self, 0);
2575 2576 if (self->ntrev > start)
2576 2577 self->ntrev = (int)start;
2577 2578 } else if (self->added) {
2578 2579 Py_CLEAR(self->added);
2579 2580 }
2580 2581
2581 2582 self->length = start;
2582 2583 if (start < self->raw_length) {
2583 2584 if (self->cache) {
2584 2585 Py_ssize_t i;
2585 2586 for (i = start; i < self->raw_length; i++)
2586 2587 Py_CLEAR(self->cache[i]);
2587 2588 }
2588 2589 self->raw_length = start;
2589 2590 }
2590 2591 goto done;
2591 2592 }
2592 2593
2593 2594 if (self->ntinitialized) {
2594 2595 index_invalidate_added(self, start - self->length);
2595 2596 if (self->ntrev > start)
2596 2597 self->ntrev = (int)start;
2597 2598 }
2598 2599 if (self->added)
2599 2600 ret = PyList_SetSlice(self->added, start - self->length,
2600 2601 PyList_GET_SIZE(self->added), NULL);
2601 2602 done:
2602 2603 Py_CLEAR(self->headrevs);
2603 2604 return ret;
2604 2605 }
2605 2606
2606 2607 /*
2607 2608 * Supported ops:
2608 2609 *
2609 2610 * slice deletion
2610 2611 * string assignment (extend node->rev mapping)
2611 2612 * string deletion (shrink node->rev mapping)
2612 2613 */
2613 2614 static int index_assign_subscript(indexObject *self, PyObject *item,
2614 2615 PyObject *value)
2615 2616 {
2616 2617 char *node;
2617 2618 long rev;
2618 2619
2619 2620 if (PySlice_Check(item) && value == NULL)
2620 2621 return index_slice_del(self, item);
2621 2622
2622 2623 if (node_check(item, &node) == -1)
2623 2624 return -1;
2624 2625
2625 2626 if (value == NULL)
2626 2627 return self->ntinitialized ? nt_delete_node(&self->nt, node)
2627 2628 : 0;
2628 2629 rev = PyInt_AsLong(value);
2629 2630 if (rev > INT_MAX || rev < 0) {
2630 2631 if (!PyErr_Occurred())
2631 2632 PyErr_SetString(PyExc_ValueError, "rev out of range");
2632 2633 return -1;
2633 2634 }
2634 2635
2635 2636 if (index_init_nt(self) == -1)
2636 2637 return -1;
2637 2638 return nt_insert(&self->nt, node, (int)rev);
2638 2639 }
2639 2640
2640 2641 /*
2641 2642 * Find all RevlogNG entries in an index that has inline data. Update
2642 2643 * the optional "offsets" table with those entries.
2643 2644 */
2644 2645 static Py_ssize_t inline_scan(indexObject *self, const char **offsets)
2645 2646 {
2646 2647 const char *data = (const char *)self->buf.buf;
2647 2648 Py_ssize_t pos = 0;
2648 2649 Py_ssize_t end = self->buf.len;
2649 2650 long incr = v1_hdrsize;
2650 2651 Py_ssize_t len = 0;
2651 2652
2652 2653 while (pos + v1_hdrsize <= end && pos >= 0) {
2653 2654 uint32_t comp_len;
2654 2655 /* 3rd element of header is length of compressed inline data */
2655 2656 comp_len = getbe32(data + pos + 8);
2656 2657 incr = v1_hdrsize + comp_len;
2657 2658 if (offsets)
2658 2659 offsets[len] = data + pos;
2659 2660 len++;
2660 2661 pos += incr;
2661 2662 }
2662 2663
2663 2664 if (pos != end) {
2664 2665 if (!PyErr_Occurred())
2665 2666 PyErr_SetString(PyExc_ValueError, "corrupt index file");
2666 2667 return -1;
2667 2668 }
2668 2669
2669 2670 return len;
2670 2671 }
2671 2672
2672 2673 static int index_init(indexObject *self, PyObject *args)
2673 2674 {
2674 2675 PyObject *data_obj, *inlined_obj;
2675 2676 Py_ssize_t size;
2676 2677
2677 2678 /* Initialize before argument-checking to avoid index_dealloc() crash.
2678 2679 */
2679 2680 self->raw_length = 0;
2680 2681 self->added = NULL;
2681 2682 self->cache = NULL;
2682 2683 self->data = NULL;
2683 2684 memset(&self->buf, 0, sizeof(self->buf));
2684 2685 self->headrevs = NULL;
2685 2686 self->filteredrevs = Py_None;
2686 2687 Py_INCREF(Py_None);
2687 2688 self->ntinitialized = 0;
2688 2689 self->offsets = NULL;
2689 2690
2690 2691 if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
2691 2692 return -1;
2692 2693 if (!PyObject_CheckBuffer(data_obj)) {
2693 2694 PyErr_SetString(PyExc_TypeError,
2694 2695 "data does not support buffer interface");
2695 2696 return -1;
2696 2697 }
2697 2698
2698 2699 if (PyObject_GetBuffer(data_obj, &self->buf, PyBUF_SIMPLE) == -1)
2699 2700 return -1;
2700 2701 size = self->buf.len;
2701 2702
2702 2703 self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
2703 2704 self->data = data_obj;
2704 2705
2705 2706 self->ntlookups = self->ntmisses = 0;
2706 2707 self->ntrev = -1;
2707 2708 Py_INCREF(self->data);
2708 2709
2709 2710 if (self->inlined) {
2710 2711 Py_ssize_t len = inline_scan(self, NULL);
2711 2712 if (len == -1)
2712 2713 goto bail;
2713 2714 self->raw_length = len;
2714 2715 self->length = len;
2715 2716 } else {
2716 2717 if (size % v1_hdrsize) {
2717 2718 PyErr_SetString(PyExc_ValueError, "corrupt index file");
2718 2719 goto bail;
2719 2720 }
2720 2721 self->raw_length = size / v1_hdrsize;
2721 2722 self->length = self->raw_length;
2722 2723 }
2723 2724
2724 2725 return 0;
2725 2726 bail:
2726 2727 return -1;
2727 2728 }
2728 2729
2729 2730 static PyObject *index_nodemap(indexObject *self)
2730 2731 {
2731 2732 Py_INCREF(self);
2732 2733 return (PyObject *)self;
2733 2734 }
2734 2735
2735 2736 static void _index_clearcaches(indexObject *self)
2736 2737 {
2737 2738 if (self->cache) {
2738 2739 Py_ssize_t i;
2739 2740
2740 2741 for (i = 0; i < self->raw_length; i++)
2741 2742 Py_CLEAR(self->cache[i]);
2742 2743 free(self->cache);
2743 2744 self->cache = NULL;
2744 2745 }
2745 2746 if (self->offsets) {
2746 2747 PyMem_Free((void *)self->offsets);
2747 2748 self->offsets = NULL;
2748 2749 }
2749 2750 if (self->ntinitialized) {
2750 2751 nt_dealloc(&self->nt);
2751 2752 }
2752 2753 self->ntinitialized = 0;
2753 2754 Py_CLEAR(self->headrevs);
2754 2755 }
2755 2756
2756 2757 static PyObject *index_clearcaches(indexObject *self)
2757 2758 {
2758 2759 _index_clearcaches(self);
2759 2760 self->ntrev = -1;
2760 2761 self->ntlookups = self->ntmisses = 0;
2761 2762 Py_RETURN_NONE;
2762 2763 }
2763 2764
2764 2765 static void index_dealloc(indexObject *self)
2765 2766 {
2766 2767 _index_clearcaches(self);
2767 2768 Py_XDECREF(self->filteredrevs);
2768 2769 if (self->buf.buf) {
2769 2770 PyBuffer_Release(&self->buf);
2770 2771 memset(&self->buf, 0, sizeof(self->buf));
2771 2772 }
2772 2773 Py_XDECREF(self->data);
2773 2774 Py_XDECREF(self->added);
2774 2775 PyObject_Del(self);
2775 2776 }
2776 2777
2777 2778 static PySequenceMethods index_sequence_methods = {
2778 2779 (lenfunc)index_length, /* sq_length */
2779 2780 0, /* sq_concat */
2780 2781 0, /* sq_repeat */
2781 2782 (ssizeargfunc)index_get, /* sq_item */
2782 2783 0, /* sq_slice */
2783 2784 0, /* sq_ass_item */
2784 2785 0, /* sq_ass_slice */
2785 2786 (objobjproc)index_contains, /* sq_contains */
2786 2787 };
2787 2788
2788 2789 static PyMappingMethods index_mapping_methods = {
2789 2790 (lenfunc)index_length, /* mp_length */
2790 2791 (binaryfunc)index_getitem, /* mp_subscript */
2791 2792 (objobjargproc)index_assign_subscript, /* mp_ass_subscript */
2792 2793 };
2793 2794
2794 2795 static PyMethodDef index_methods[] = {
2795 2796 {"ancestors", (PyCFunction)index_ancestors, METH_VARARGS,
2796 2797 "return the gca set of the given revs"},
2797 2798 {"commonancestorsheads", (PyCFunction)index_commonancestorsheads,
2798 2799 METH_VARARGS,
2799 2800 "return the heads of the common ancestors of the given revs"},
2800 2801 {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
2801 2802 "clear the index caches"},
2802 2803 {"get", (PyCFunction)index_m_get, METH_VARARGS, "get an index entry"},
2803 2804 {"get_rev", (PyCFunction)index_m_get, METH_VARARGS,
2804 2805 "return `rev` associated with a node or None"},
2805 2806 {"has_node", (PyCFunction)index_m_has_node, METH_O,
2806 2807 "return True if the node exist in the index"},
2807 2808 {"rev", (PyCFunction)index_m_rev, METH_O,
2808 2809 "return `rev` associated with a node or raise RevlogError"},
2809 2810 {"computephasesmapsets", (PyCFunction)compute_phases_map_sets, METH_VARARGS,
2810 2811 "compute phases"},
2811 2812 {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
2812 2813 "reachableroots"},
2813 2814 {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
2814 2815 "get head revisions"}, /* Can do filtering since 3.2 */
2815 2816 {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
2816 2817 "get filtered head revisions"}, /* Can always do filtering */
2817 2818 {"issnapshot", (PyCFunction)index_issnapshot, METH_O,
2818 2819 "True if the object is a snapshot"},
2819 2820 {"findsnapshots", (PyCFunction)index_findsnapshots, METH_VARARGS,
2820 2821 "Gather snapshot data in a cache dict"},
2821 2822 {"deltachain", (PyCFunction)index_deltachain, METH_VARARGS,
2822 2823 "determine revisions with deltas to reconstruct fulltext"},
2823 2824 {"slicechunktodensity", (PyCFunction)index_slicechunktodensity,
2824 2825 METH_VARARGS, "determine revisions with deltas to reconstruct fulltext"},
2825 2826 {"append", (PyCFunction)index_append, METH_O, "append an index entry"},
2826 2827 {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
2827 2828 "match a potentially ambiguous node ID"},
2828 2829 {"shortest", (PyCFunction)index_shortest, METH_VARARGS,
2829 2830 "find length of shortest hex nodeid of a binary ID"},
2830 2831 {"stats", (PyCFunction)index_stats, METH_NOARGS, "stats for the index"},
2831 2832 {NULL} /* Sentinel */
2832 2833 };
2833 2834
2834 2835 static PyGetSetDef index_getset[] = {
2835 2836 {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
2836 2837 {NULL} /* Sentinel */
2837 2838 };
2838 2839
2839 2840 PyTypeObject HgRevlogIndex_Type = {
2840 2841 PyVarObject_HEAD_INIT(NULL, 0) /* header */
2841 2842 "parsers.index", /* tp_name */
2842 2843 sizeof(indexObject), /* tp_basicsize */
2843 2844 0, /* tp_itemsize */
2844 2845 (destructor)index_dealloc, /* tp_dealloc */
2845 2846 0, /* tp_print */
2846 2847 0, /* tp_getattr */
2847 2848 0, /* tp_setattr */
2848 2849 0, /* tp_compare */
2849 2850 0, /* tp_repr */
2850 2851 0, /* tp_as_number */
2851 2852 &index_sequence_methods, /* tp_as_sequence */
2852 2853 &index_mapping_methods, /* tp_as_mapping */
2853 2854 0, /* tp_hash */
2854 2855 0, /* tp_call */
2855 2856 0, /* tp_str */
2856 2857 0, /* tp_getattro */
2857 2858 0, /* tp_setattro */
2858 2859 0, /* tp_as_buffer */
2859 2860 Py_TPFLAGS_DEFAULT, /* tp_flags */
2860 2861 "revlog index", /* tp_doc */
2861 2862 0, /* tp_traverse */
2862 2863 0, /* tp_clear */
2863 2864 0, /* tp_richcompare */
2864 2865 0, /* tp_weaklistoffset */
2865 2866 0, /* tp_iter */
2866 2867 0, /* tp_iternext */
2867 2868 index_methods, /* tp_methods */
2868 2869 0, /* tp_members */
2869 2870 index_getset, /* tp_getset */
2870 2871 0, /* tp_base */
2871 2872 0, /* tp_dict */
2872 2873 0, /* tp_descr_get */
2873 2874 0, /* tp_descr_set */
2874 2875 0, /* tp_dictoffset */
2875 2876 (initproc)index_init, /* tp_init */
2876 2877 0, /* tp_alloc */
2877 2878 };
2878 2879
2879 2880 /*
2880 2881 * returns a tuple of the form (index, index, cache) with elements as
2881 2882 * follows:
2882 2883 *
2883 2884 * index: an index object that lazily parses RevlogNG records
2884 2885 * cache: if data is inlined, a tuple (0, index_file_content), else None
2885 2886 * index_file_content could be a string, or a buffer
2886 2887 *
2887 2888 * added complications are for backwards compatibility
2888 2889 */
2889 2890 PyObject *parse_index2(PyObject *self, PyObject *args)
2890 2891 {
2891 2892 PyObject *cache = NULL;
2892 2893 indexObject *idx;
2893 2894 int ret;
2894 2895
2895 2896 idx = PyObject_New(indexObject, &HgRevlogIndex_Type);
2896 2897 if (idx == NULL)
2897 2898 goto bail;
2898 2899
2899 2900 ret = index_init(idx, args);
2900 2901 if (ret == -1)
2901 2902 goto bail;
2902 2903
2903 2904 if (idx->inlined) {
2904 2905 cache = Py_BuildValue("iO", 0, idx->data);
2905 2906 if (cache == NULL)
2906 2907 goto bail;
2907 2908 } else {
2908 2909 cache = Py_None;
2909 2910 Py_INCREF(cache);
2910 2911 }
2911 2912
2912 2913 return Py_BuildValue("NN", idx, cache);
2913 2914
2914 2915 bail:
2915 2916 Py_XDECREF(idx);
2916 2917 Py_XDECREF(cache);
2917 2918 return NULL;
2918 2919 }
2919 2920
2920 2921 static Revlog_CAPI CAPI = {
2921 2922 /* increment the abi_version field upon each change in the Revlog_CAPI
2922 2923 struct or in the ABI of the listed functions */
2923 2924 2,
2924 2925 index_length,
2925 2926 index_node,
2926 2927 HgRevlogIndex_GetParents,
2927 2928 };
2928 2929
2929 2930 void revlog_module_init(PyObject *mod)
2930 2931 {
2931 2932 PyObject *caps = NULL;
2932 2933 HgRevlogIndex_Type.tp_new = PyType_GenericNew;
2933 2934 if (PyType_Ready(&HgRevlogIndex_Type) < 0)
2934 2935 return;
2935 2936 Py_INCREF(&HgRevlogIndex_Type);
2936 2937 PyModule_AddObject(mod, "index", (PyObject *)&HgRevlogIndex_Type);
2937 2938
2938 2939 nodetreeType.tp_new = PyType_GenericNew;
2939 2940 if (PyType_Ready(&nodetreeType) < 0)
2940 2941 return;
2941 2942 Py_INCREF(&nodetreeType);
2942 2943 PyModule_AddObject(mod, "nodetree", (PyObject *)&nodetreeType);
2943 2944
2944 2945 if (!nullentry) {
2945 2946 nullentry =
2946 2947 Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0, -1,
2947 2948 -1, -1, -1, nullid, (Py_ssize_t)20);
2948 2949 }
2949 2950 if (nullentry)
2950 2951 PyObject_GC_UnTrack(nullentry);
2951 2952
2952 2953 caps = PyCapsule_New(&CAPI, "mercurial.cext.parsers.revlog_CAPI", NULL);
2953 2954 if (caps != NULL)
2954 2955 PyModule_AddObject(mod, "revlog_CAPI", caps);
2955 2956 }
General Comments 0
You need to be logged in to leave comments. Login now