##// END OF EJS Templates
diffs: replace compare controller with new html based diffs:...
dan -
r1030:158ce501 default
parent child Browse files
Show More
@@ -0,0 +1,14 b''
1 Copyright 2006 Google Inc.
2 http://code.google.com/p/google-diff-match-patch/
3
4 Licensed under the Apache License, Version 2.0 (the "License");
5 you may not use this file except in compliance with the License.
6 You may obtain a copy of the License at
7
8 http://www.apache.org/licenses/LICENSE-2.0
9
10 Unless required by applicable law or agreed to in writing, software
11 distributed under the License is distributed on an "AS IS" BASIS,
12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 See the License for the specific language governing permissions and
14 limitations under the License.
This diff has been collapsed as it changes many lines, (1919 lines changed) Show them Hide them
@@ -0,0 +1,1919 b''
1 #!/usr/bin/python2.4
2
3 from __future__ import division
4
5 """Diff Match and Patch
6
7 Copyright 2006 Google Inc.
8 http://code.google.com/p/google-diff-match-patch/
9
10 Licensed under the Apache License, Version 2.0 (the "License");
11 you may not use this file except in compliance with the License.
12 You may obtain a copy of the License at
13
14 http://www.apache.org/licenses/LICENSE-2.0
15
16 Unless required by applicable law or agreed to in writing, software
17 distributed under the License is distributed on an "AS IS" BASIS,
18 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 See the License for the specific language governing permissions and
20 limitations under the License.
21 """
22
23 """Functions for diff, match and patch.
24
25 Computes the difference between two texts to create a patch.
26 Applies the patch onto another text, allowing for errors.
27 """
28
29 __author__ = 'fraser@google.com (Neil Fraser)'
30
31 import math
32 import re
33 import sys
34 import time
35 import urllib
36
37 class diff_match_patch:
38 """Class containing the diff, match and patch methods.
39
40 Also contains the behaviour settings.
41 """
42
43 def __init__(self):
44 """Inits a diff_match_patch object with default settings.
45 Redefine these in your program to override the defaults.
46 """
47
48 # Number of seconds to map a diff before giving up (0 for infinity).
49 self.Diff_Timeout = 1.0
50 # Cost of an empty edit operation in terms of edit characters.
51 self.Diff_EditCost = 4
52 # At what point is no match declared (0.0 = perfection, 1.0 = very loose).
53 self.Match_Threshold = 0.5
54 # How far to search for a match (0 = exact location, 1000+ = broad match).
55 # A match this many characters away from the expected location will add
56 # 1.0 to the score (0.0 is a perfect match).
57 self.Match_Distance = 1000
58 # When deleting a large block of text (over ~64 characters), how close do
59 # the contents have to be to match the expected contents. (0.0 = perfection,
60 # 1.0 = very loose). Note that Match_Threshold controls how closely the
61 # end points of a delete need to match.
62 self.Patch_DeleteThreshold = 0.5
63 # Chunk size for context length.
64 self.Patch_Margin = 4
65
66 # The number of bits in an int.
67 # Python has no maximum, thus to disable patch splitting set to 0.
68 # However to avoid long patches in certain pathological cases, use 32.
69 # Multiple short patches (using native ints) are much faster than long ones.
70 self.Match_MaxBits = 32
71
72 # DIFF FUNCTIONS
73
74 # The data structure representing a diff is an array of tuples:
75 # [(DIFF_DELETE, "Hello"), (DIFF_INSERT, "Goodbye"), (DIFF_EQUAL, " world.")]
76 # which means: delete "Hello", add "Goodbye" and keep " world."
77 DIFF_DELETE = -1
78 DIFF_INSERT = 1
79 DIFF_EQUAL = 0
80
81 def diff_main(self, text1, text2, checklines=True, deadline=None):
82 """Find the differences between two texts. Simplifies the problem by
83 stripping any common prefix or suffix off the texts before diffing.
84
85 Args:
86 text1: Old string to be diffed.
87 text2: New string to be diffed.
88 checklines: Optional speedup flag. If present and false, then don't run
89 a line-level diff first to identify the changed areas.
90 Defaults to true, which does a faster, slightly less optimal diff.
91 deadline: Optional time when the diff should be complete by. Used
92 internally for recursive calls. Users should set DiffTimeout instead.
93
94 Returns:
95 Array of changes.
96 """
97 # Set a deadline by which time the diff must be complete.
98 if deadline == None:
99 # Unlike in most languages, Python counts time in seconds.
100 if self.Diff_Timeout <= 0:
101 deadline = sys.maxint
102 else:
103 deadline = time.time() + self.Diff_Timeout
104
105 # Check for null inputs.
106 if text1 == None or text2 == None:
107 raise ValueError("Null inputs. (diff_main)")
108
109 # Check for equality (speedup).
110 if text1 == text2:
111 if text1:
112 return [(self.DIFF_EQUAL, text1)]
113 return []
114
115 # Trim off common prefix (speedup).
116 commonlength = self.diff_commonPrefix(text1, text2)
117 commonprefix = text1[:commonlength]
118 text1 = text1[commonlength:]
119 text2 = text2[commonlength:]
120
121 # Trim off common suffix (speedup).
122 commonlength = self.diff_commonSuffix(text1, text2)
123 if commonlength == 0:
124 commonsuffix = ''
125 else:
126 commonsuffix = text1[-commonlength:]
127 text1 = text1[:-commonlength]
128 text2 = text2[:-commonlength]
129
130 # Compute the diff on the middle block.
131 diffs = self.diff_compute(text1, text2, checklines, deadline)
132
133 # Restore the prefix and suffix.
134 if commonprefix:
135 diffs[:0] = [(self.DIFF_EQUAL, commonprefix)]
136 if commonsuffix:
137 diffs.append((self.DIFF_EQUAL, commonsuffix))
138 self.diff_cleanupMerge(diffs)
139 return diffs
140
141 def diff_compute(self, text1, text2, checklines, deadline):
142 """Find the differences between two texts. Assumes that the texts do not
143 have any common prefix or suffix.
144
145 Args:
146 text1: Old string to be diffed.
147 text2: New string to be diffed.
148 checklines: Speedup flag. If false, then don't run a line-level diff
149 first to identify the changed areas.
150 If true, then run a faster, slightly less optimal diff.
151 deadline: Time when the diff should be complete by.
152
153 Returns:
154 Array of changes.
155 """
156 if not text1:
157 # Just add some text (speedup).
158 return [(self.DIFF_INSERT, text2)]
159
160 if not text2:
161 # Just delete some text (speedup).
162 return [(self.DIFF_DELETE, text1)]
163
164 if len(text1) > len(text2):
165 (longtext, shorttext) = (text1, text2)
166 else:
167 (shorttext, longtext) = (text1, text2)
168 i = longtext.find(shorttext)
169 if i != -1:
170 # Shorter text is inside the longer text (speedup).
171 diffs = [(self.DIFF_INSERT, longtext[:i]), (self.DIFF_EQUAL, shorttext),
172 (self.DIFF_INSERT, longtext[i + len(shorttext):])]
173 # Swap insertions for deletions if diff is reversed.
174 if len(text1) > len(text2):
175 diffs[0] = (self.DIFF_DELETE, diffs[0][1])
176 diffs[2] = (self.DIFF_DELETE, diffs[2][1])
177 return diffs
178
179 if len(shorttext) == 1:
180 # Single character string.
181 # After the previous speedup, the character can't be an equality.
182 return [(self.DIFF_DELETE, text1), (self.DIFF_INSERT, text2)]
183
184 # Check to see if the problem can be split in two.
185 hm = self.diff_halfMatch(text1, text2)
186 if hm:
187 # A half-match was found, sort out the return data.
188 (text1_a, text1_b, text2_a, text2_b, mid_common) = hm
189 # Send both pairs off for separate processing.
190 diffs_a = self.diff_main(text1_a, text2_a, checklines, deadline)
191 diffs_b = self.diff_main(text1_b, text2_b, checklines, deadline)
192 # Merge the results.
193 return diffs_a + [(self.DIFF_EQUAL, mid_common)] + diffs_b
194
195 if checklines and len(text1) > 100 and len(text2) > 100:
196 return self.diff_lineMode(text1, text2, deadline)
197
198 return self.diff_bisect(text1, text2, deadline)
199
200 def diff_lineMode(self, text1, text2, deadline):
201 """Do a quick line-level diff on both strings, then rediff the parts for
202 greater accuracy.
203 This speedup can produce non-minimal diffs.
204
205 Args:
206 text1: Old string to be diffed.
207 text2: New string to be diffed.
208 deadline: Time when the diff should be complete by.
209
210 Returns:
211 Array of changes.
212 """
213
214 # Scan the text on a line-by-line basis first.
215 (text1, text2, linearray) = self.diff_linesToChars(text1, text2)
216
217 diffs = self.diff_main(text1, text2, False, deadline)
218
219 # Convert the diff back to original text.
220 self.diff_charsToLines(diffs, linearray)
221 # Eliminate freak matches (e.g. blank lines)
222 self.diff_cleanupSemantic(diffs)
223
224 # Rediff any replacement blocks, this time character-by-character.
225 # Add a dummy entry at the end.
226 diffs.append((self.DIFF_EQUAL, ''))
227 pointer = 0
228 count_delete = 0
229 count_insert = 0
230 text_delete = ''
231 text_insert = ''
232 while pointer < len(diffs):
233 if diffs[pointer][0] == self.DIFF_INSERT:
234 count_insert += 1
235 text_insert += diffs[pointer][1]
236 elif diffs[pointer][0] == self.DIFF_DELETE:
237 count_delete += 1
238 text_delete += diffs[pointer][1]
239 elif diffs[pointer][0] == self.DIFF_EQUAL:
240 # Upon reaching an equality, check for prior redundancies.
241 if count_delete >= 1 and count_insert >= 1:
242 # Delete the offending records and add the merged ones.
243 a = self.diff_main(text_delete, text_insert, False, deadline)
244 diffs[pointer - count_delete - count_insert : pointer] = a
245 pointer = pointer - count_delete - count_insert + len(a)
246 count_insert = 0
247 count_delete = 0
248 text_delete = ''
249 text_insert = ''
250
251 pointer += 1
252
253 diffs.pop() # Remove the dummy entry at the end.
254
255 return diffs
256
257 def diff_bisect(self, text1, text2, deadline):
258 """Find the 'middle snake' of a diff, split the problem in two
259 and return the recursively constructed diff.
260 See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations.
261
262 Args:
263 text1: Old string to be diffed.
264 text2: New string to be diffed.
265 deadline: Time at which to bail if not yet complete.
266
267 Returns:
268 Array of diff tuples.
269 """
270
271 # Cache the text lengths to prevent multiple calls.
272 text1_length = len(text1)
273 text2_length = len(text2)
274 max_d = (text1_length + text2_length + 1) // 2
275 v_offset = max_d
276 v_length = 2 * max_d
277 v1 = [-1] * v_length
278 v1[v_offset + 1] = 0
279 v2 = v1[:]
280 delta = text1_length - text2_length
281 # If the total number of characters is odd, then the front path will
282 # collide with the reverse path.
283 front = (delta % 2 != 0)
284 # Offsets for start and end of k loop.
285 # Prevents mapping of space beyond the grid.
286 k1start = 0
287 k1end = 0
288 k2start = 0
289 k2end = 0
290 for d in xrange(max_d):
291 # Bail out if deadline is reached.
292 if time.time() > deadline:
293 break
294
295 # Walk the front path one step.
296 for k1 in xrange(-d + k1start, d + 1 - k1end, 2):
297 k1_offset = v_offset + k1
298 if k1 == -d or (k1 != d and
299 v1[k1_offset - 1] < v1[k1_offset + 1]):
300 x1 = v1[k1_offset + 1]
301 else:
302 x1 = v1[k1_offset - 1] + 1
303 y1 = x1 - k1
304 while (x1 < text1_length and y1 < text2_length and
305 text1[x1] == text2[y1]):
306 x1 += 1
307 y1 += 1
308 v1[k1_offset] = x1
309 if x1 > text1_length:
310 # Ran off the right of the graph.
311 k1end += 2
312 elif y1 > text2_length:
313 # Ran off the bottom of the graph.
314 k1start += 2
315 elif front:
316 k2_offset = v_offset + delta - k1
317 if k2_offset >= 0 and k2_offset < v_length and v2[k2_offset] != -1:
318 # Mirror x2 onto top-left coordinate system.
319 x2 = text1_length - v2[k2_offset]
320 if x1 >= x2:
321 # Overlap detected.
322 return self.diff_bisectSplit(text1, text2, x1, y1, deadline)
323
324 # Walk the reverse path one step.
325 for k2 in xrange(-d + k2start, d + 1 - k2end, 2):
326 k2_offset = v_offset + k2
327 if k2 == -d or (k2 != d and
328 v2[k2_offset - 1] < v2[k2_offset + 1]):
329 x2 = v2[k2_offset + 1]
330 else:
331 x2 = v2[k2_offset - 1] + 1
332 y2 = x2 - k2
333 while (x2 < text1_length and y2 < text2_length and
334 text1[-x2 - 1] == text2[-y2 - 1]):
335 x2 += 1
336 y2 += 1
337 v2[k2_offset] = x2
338 if x2 > text1_length:
339 # Ran off the left of the graph.
340 k2end += 2
341 elif y2 > text2_length:
342 # Ran off the top of the graph.
343 k2start += 2
344 elif not front:
345 k1_offset = v_offset + delta - k2
346 if k1_offset >= 0 and k1_offset < v_length and v1[k1_offset] != -1:
347 x1 = v1[k1_offset]
348 y1 = v_offset + x1 - k1_offset
349 # Mirror x2 onto top-left coordinate system.
350 x2 = text1_length - x2
351 if x1 >= x2:
352 # Overlap detected.
353 return self.diff_bisectSplit(text1, text2, x1, y1, deadline)
354
355 # Diff took too long and hit the deadline or
356 # number of diffs equals number of characters, no commonality at all.
357 return [(self.DIFF_DELETE, text1), (self.DIFF_INSERT, text2)]
358
359 def diff_bisectSplit(self, text1, text2, x, y, deadline):
360 """Given the location of the 'middle snake', split the diff in two parts
361 and recurse.
362
363 Args:
364 text1: Old string to be diffed.
365 text2: New string to be diffed.
366 x: Index of split point in text1.
367 y: Index of split point in text2.
368 deadline: Time at which to bail if not yet complete.
369
370 Returns:
371 Array of diff tuples.
372 """
373 text1a = text1[:x]
374 text2a = text2[:y]
375 text1b = text1[x:]
376 text2b = text2[y:]
377
378 # Compute both diffs serially.
379 diffs = self.diff_main(text1a, text2a, False, deadline)
380 diffsb = self.diff_main(text1b, text2b, False, deadline)
381
382 return diffs + diffsb
383
384 def diff_linesToChars(self, text1, text2):
385 """Split two texts into an array of strings. Reduce the texts to a string
386 of hashes where each Unicode character represents one line.
387
388 Args:
389 text1: First string.
390 text2: Second string.
391
392 Returns:
393 Three element tuple, containing the encoded text1, the encoded text2 and
394 the array of unique strings. The zeroth element of the array of unique
395 strings is intentionally blank.
396 """
397 lineArray = [] # e.g. lineArray[4] == "Hello\n"
398 lineHash = {} # e.g. lineHash["Hello\n"] == 4
399
400 # "\x00" is a valid character, but various debuggers don't like it.
401 # So we'll insert a junk entry to avoid generating a null character.
402 lineArray.append('')
403
404 def diff_linesToCharsMunge(text):
405 """Split a text into an array of strings. Reduce the texts to a string
406 of hashes where each Unicode character represents one line.
407 Modifies linearray and linehash through being a closure.
408
409 Args:
410 text: String to encode.
411
412 Returns:
413 Encoded string.
414 """
415 chars = []
416 # Walk the text, pulling out a substring for each line.
417 # text.split('\n') would would temporarily double our memory footprint.
418 # Modifying text would create many large strings to garbage collect.
419 lineStart = 0
420 lineEnd = -1
421 while lineEnd < len(text) - 1:
422 lineEnd = text.find('\n', lineStart)
423 if lineEnd == -1:
424 lineEnd = len(text) - 1
425 line = text[lineStart:lineEnd + 1]
426 lineStart = lineEnd + 1
427
428 if line in lineHash:
429 chars.append(unichr(lineHash[line]))
430 else:
431 lineArray.append(line)
432 lineHash[line] = len(lineArray) - 1
433 chars.append(unichr(len(lineArray) - 1))
434 return "".join(chars)
435
436 chars1 = diff_linesToCharsMunge(text1)
437 chars2 = diff_linesToCharsMunge(text2)
438 return (chars1, chars2, lineArray)
439
440 def diff_charsToLines(self, diffs, lineArray):
441 """Rehydrate the text in a diff from a string of line hashes to real lines
442 of text.
443
444 Args:
445 diffs: Array of diff tuples.
446 lineArray: Array of unique strings.
447 """
448 for x in xrange(len(diffs)):
449 text = []
450 for char in diffs[x][1]:
451 text.append(lineArray[ord(char)])
452 diffs[x] = (diffs[x][0], "".join(text))
453
454 def diff_commonPrefix(self, text1, text2):
455 """Determine the common prefix of two strings.
456
457 Args:
458 text1: First string.
459 text2: Second string.
460
461 Returns:
462 The number of characters common to the start of each string.
463 """
464 # Quick check for common null cases.
465 if not text1 or not text2 or text1[0] != text2[0]:
466 return 0
467 # Binary search.
468 # Performance analysis: http://neil.fraser.name/news/2007/10/09/
469 pointermin = 0
470 pointermax = min(len(text1), len(text2))
471 pointermid = pointermax
472 pointerstart = 0
473 while pointermin < pointermid:
474 if text1[pointerstart:pointermid] == text2[pointerstart:pointermid]:
475 pointermin = pointermid
476 pointerstart = pointermin
477 else:
478 pointermax = pointermid
479 pointermid = (pointermax - pointermin) // 2 + pointermin
480 return pointermid
481
482 def diff_commonSuffix(self, text1, text2):
483 """Determine the common suffix of two strings.
484
485 Args:
486 text1: First string.
487 text2: Second string.
488
489 Returns:
490 The number of characters common to the end of each string.
491 """
492 # Quick check for common null cases.
493 if not text1 or not text2 or text1[-1] != text2[-1]:
494 return 0
495 # Binary search.
496 # Performance analysis: http://neil.fraser.name/news/2007/10/09/
497 pointermin = 0
498 pointermax = min(len(text1), len(text2))
499 pointermid = pointermax
500 pointerend = 0
501 while pointermin < pointermid:
502 if (text1[-pointermid:len(text1) - pointerend] ==
503 text2[-pointermid:len(text2) - pointerend]):
504 pointermin = pointermid
505 pointerend = pointermin
506 else:
507 pointermax = pointermid
508 pointermid = (pointermax - pointermin) // 2 + pointermin
509 return pointermid
510
511 def diff_commonOverlap(self, text1, text2):
512 """Determine if the suffix of one string is the prefix of another.
513
514 Args:
515 text1 First string.
516 text2 Second string.
517
518 Returns:
519 The number of characters common to the end of the first
520 string and the start of the second string.
521 """
522 # Cache the text lengths to prevent multiple calls.
523 text1_length = len(text1)
524 text2_length = len(text2)
525 # Eliminate the null case.
526 if text1_length == 0 or text2_length == 0:
527 return 0
528 # Truncate the longer string.
529 if text1_length > text2_length:
530 text1 = text1[-text2_length:]
531 elif text1_length < text2_length:
532 text2 = text2[:text1_length]
533 text_length = min(text1_length, text2_length)
534 # Quick check for the worst case.
535 if text1 == text2:
536 return text_length
537
538 # Start by looking for a single character match
539 # and increase length until no match is found.
540 # Performance analysis: http://neil.fraser.name/news/2010/11/04/
541 best = 0
542 length = 1
543 while True:
544 pattern = text1[-length:]
545 found = text2.find(pattern)
546 if found == -1:
547 return best
548 length += found
549 if found == 0 or text1[-length:] == text2[:length]:
550 best = length
551 length += 1
552
553 def diff_halfMatch(self, text1, text2):
554 """Do the two texts share a substring which is at least half the length of
555 the longer text?
556 This speedup can produce non-minimal diffs.
557
558 Args:
559 text1: First string.
560 text2: Second string.
561
562 Returns:
563 Five element Array, containing the prefix of text1, the suffix of text1,
564 the prefix of text2, the suffix of text2 and the common middle. Or None
565 if there was no match.
566 """
567 if self.Diff_Timeout <= 0:
568 # Don't risk returning a non-optimal diff if we have unlimited time.
569 return None
570 if len(text1) > len(text2):
571 (longtext, shorttext) = (text1, text2)
572 else:
573 (shorttext, longtext) = (text1, text2)
574 if len(longtext) < 4 or len(shorttext) * 2 < len(longtext):
575 return None # Pointless.
576
577 def diff_halfMatchI(longtext, shorttext, i):
578 """Does a substring of shorttext exist within longtext such that the
579 substring is at least half the length of longtext?
580 Closure, but does not reference any external variables.
581
582 Args:
583 longtext: Longer string.
584 shorttext: Shorter string.
585 i: Start index of quarter length substring within longtext.
586
587 Returns:
588 Five element Array, containing the prefix of longtext, the suffix of
589 longtext, the prefix of shorttext, the suffix of shorttext and the
590 common middle. Or None if there was no match.
591 """
592 seed = longtext[i:i + len(longtext) // 4]
593 best_common = ''
594 j = shorttext.find(seed)
595 while j != -1:
596 prefixLength = self.diff_commonPrefix(longtext[i:], shorttext[j:])
597 suffixLength = self.diff_commonSuffix(longtext[:i], shorttext[:j])
598 if len(best_common) < suffixLength + prefixLength:
599 best_common = (shorttext[j - suffixLength:j] +
600 shorttext[j:j + prefixLength])
601 best_longtext_a = longtext[:i - suffixLength]
602 best_longtext_b = longtext[i + prefixLength:]
603 best_shorttext_a = shorttext[:j - suffixLength]
604 best_shorttext_b = shorttext[j + prefixLength:]
605 j = shorttext.find(seed, j + 1)
606
607 if len(best_common) * 2 >= len(longtext):
608 return (best_longtext_a, best_longtext_b,
609 best_shorttext_a, best_shorttext_b, best_common)
610 else:
611 return None
612
613 # First check if the second quarter is the seed for a half-match.
614 hm1 = diff_halfMatchI(longtext, shorttext, (len(longtext) + 3) // 4)
615 # Check again based on the third quarter.
616 hm2 = diff_halfMatchI(longtext, shorttext, (len(longtext) + 1) // 2)
617 if not hm1 and not hm2:
618 return None
619 elif not hm2:
620 hm = hm1
621 elif not hm1:
622 hm = hm2
623 else:
624 # Both matched. Select the longest.
625 if len(hm1[4]) > len(hm2[4]):
626 hm = hm1
627 else:
628 hm = hm2
629
630 # A half-match was found, sort out the return data.
631 if len(text1) > len(text2):
632 (text1_a, text1_b, text2_a, text2_b, mid_common) = hm
633 else:
634 (text2_a, text2_b, text1_a, text1_b, mid_common) = hm
635 return (text1_a, text1_b, text2_a, text2_b, mid_common)
636
637 def diff_cleanupSemantic(self, diffs):
638 """Reduce the number of edits by eliminating semantically trivial
639 equalities.
640
641 Args:
642 diffs: Array of diff tuples.
643 """
644 changes = False
645 equalities = [] # Stack of indices where equalities are found.
646 lastequality = None # Always equal to diffs[equalities[-1]][1]
647 pointer = 0 # Index of current position.
648 # Number of chars that changed prior to the equality.
649 length_insertions1, length_deletions1 = 0, 0
650 # Number of chars that changed after the equality.
651 length_insertions2, length_deletions2 = 0, 0
652 while pointer < len(diffs):
653 if diffs[pointer][0] == self.DIFF_EQUAL: # Equality found.
654 equalities.append(pointer)
655 length_insertions1, length_insertions2 = length_insertions2, 0
656 length_deletions1, length_deletions2 = length_deletions2, 0
657 lastequality = diffs[pointer][1]
658 else: # An insertion or deletion.
659 if diffs[pointer][0] == self.DIFF_INSERT:
660 length_insertions2 += len(diffs[pointer][1])
661 else:
662 length_deletions2 += len(diffs[pointer][1])
663 # Eliminate an equality that is smaller or equal to the edits on both
664 # sides of it.
665 if (lastequality and (len(lastequality) <=
666 max(length_insertions1, length_deletions1)) and
667 (len(lastequality) <= max(length_insertions2, length_deletions2))):
668 # Duplicate record.
669 diffs.insert(equalities[-1], (self.DIFF_DELETE, lastequality))
670 # Change second copy to insert.
671 diffs[equalities[-1] + 1] = (self.DIFF_INSERT,
672 diffs[equalities[-1] + 1][1])
673 # Throw away the equality we just deleted.
674 equalities.pop()
675 # Throw away the previous equality (it needs to be reevaluated).
676 if len(equalities):
677 equalities.pop()
678 if len(equalities):
679 pointer = equalities[-1]
680 else:
681 pointer = -1
682 # Reset the counters.
683 length_insertions1, length_deletions1 = 0, 0
684 length_insertions2, length_deletions2 = 0, 0
685 lastequality = None
686 changes = True
687 pointer += 1
688
689 # Normalize the diff.
690 if changes:
691 self.diff_cleanupMerge(diffs)
692 self.diff_cleanupSemanticLossless(diffs)
693
694 # Find any overlaps between deletions and insertions.
695 # e.g: <del>abcxxx</del><ins>xxxdef</ins>
696 # -> <del>abc</del>xxx<ins>def</ins>
697 # e.g: <del>xxxabc</del><ins>defxxx</ins>
698 # -> <ins>def</ins>xxx<del>abc</del>
699 # Only extract an overlap if it is as big as the edit ahead or behind it.
700 pointer = 1
701 while pointer < len(diffs):
702 if (diffs[pointer - 1][0] == self.DIFF_DELETE and
703 diffs[pointer][0] == self.DIFF_INSERT):
704 deletion = diffs[pointer - 1][1]
705 insertion = diffs[pointer][1]
706 overlap_length1 = self.diff_commonOverlap(deletion, insertion)
707 overlap_length2 = self.diff_commonOverlap(insertion, deletion)
708 if overlap_length1 >= overlap_length2:
709 if (overlap_length1 >= len(deletion) / 2.0 or
710 overlap_length1 >= len(insertion) / 2.0):
711 # Overlap found. Insert an equality and trim the surrounding edits.
712 diffs.insert(pointer, (self.DIFF_EQUAL,
713 insertion[:overlap_length1]))
714 diffs[pointer - 1] = (self.DIFF_DELETE,
715 deletion[:len(deletion) - overlap_length1])
716 diffs[pointer + 1] = (self.DIFF_INSERT,
717 insertion[overlap_length1:])
718 pointer += 1
719 else:
720 if (overlap_length2 >= len(deletion) / 2.0 or
721 overlap_length2 >= len(insertion) / 2.0):
722 # Reverse overlap found.
723 # Insert an equality and swap and trim the surrounding edits.
724 diffs.insert(pointer, (self.DIFF_EQUAL, deletion[:overlap_length2]))
725 diffs[pointer - 1] = (self.DIFF_INSERT,
726 insertion[:len(insertion) - overlap_length2])
727 diffs[pointer + 1] = (self.DIFF_DELETE, deletion[overlap_length2:])
728 pointer += 1
729 pointer += 1
730 pointer += 1
731
732 def diff_cleanupSemanticLossless(self, diffs):
733 """Look for single edits surrounded on both sides by equalities
734 which can be shifted sideways to align the edit to a word boundary.
735 e.g: The c<ins>at c</ins>ame. -> The <ins>cat </ins>came.
736
737 Args:
738 diffs: Array of diff tuples.
739 """
740
741 def diff_cleanupSemanticScore(one, two):
742 """Given two strings, compute a score representing whether the
743 internal boundary falls on logical boundaries.
744 Scores range from 6 (best) to 0 (worst).
745 Closure, but does not reference any external variables.
746
747 Args:
748 one: First string.
749 two: Second string.
750
751 Returns:
752 The score.
753 """
754 if not one or not two:
755 # Edges are the best.
756 return 6
757
758 # Each port of this function behaves slightly differently due to
759 # subtle differences in each language's definition of things like
760 # 'whitespace'. Since this function's purpose is largely cosmetic,
761 # the choice has been made to use each language's native features
762 # rather than force total conformity.
763 char1 = one[-1]
764 char2 = two[0]
765 nonAlphaNumeric1 = not char1.isalnum()
766 nonAlphaNumeric2 = not char2.isalnum()
767 whitespace1 = nonAlphaNumeric1 and char1.isspace()
768 whitespace2 = nonAlphaNumeric2 and char2.isspace()
769 lineBreak1 = whitespace1 and (char1 == "\r" or char1 == "\n")
770 lineBreak2 = whitespace2 and (char2 == "\r" or char2 == "\n")
771 blankLine1 = lineBreak1 and self.BLANKLINEEND.search(one)
772 blankLine2 = lineBreak2 and self.BLANKLINESTART.match(two)
773
774 if blankLine1 or blankLine2:
775 # Five points for blank lines.
776 return 5
777 elif lineBreak1 or lineBreak2:
778 # Four points for line breaks.
779 return 4
780 elif nonAlphaNumeric1 and not whitespace1 and whitespace2:
781 # Three points for end of sentences.
782 return 3
783 elif whitespace1 or whitespace2:
784 # Two points for whitespace.
785 return 2
786 elif nonAlphaNumeric1 or nonAlphaNumeric2:
787 # One point for non-alphanumeric.
788 return 1
789 return 0
790
791 pointer = 1
792 # Intentionally ignore the first and last element (don't need checking).
793 while pointer < len(diffs) - 1:
794 if (diffs[pointer - 1][0] == self.DIFF_EQUAL and
795 diffs[pointer + 1][0] == self.DIFF_EQUAL):
796 # This is a single edit surrounded by equalities.
797 equality1 = diffs[pointer - 1][1]
798 edit = diffs[pointer][1]
799 equality2 = diffs[pointer + 1][1]
800
801 # First, shift the edit as far left as possible.
802 commonOffset = self.diff_commonSuffix(equality1, edit)
803 if commonOffset:
804 commonString = edit[-commonOffset:]
805 equality1 = equality1[:-commonOffset]
806 edit = commonString + edit[:-commonOffset]
807 equality2 = commonString + equality2
808
809 # Second, step character by character right, looking for the best fit.
810 bestEquality1 = equality1
811 bestEdit = edit
812 bestEquality2 = equality2
813 bestScore = (diff_cleanupSemanticScore(equality1, edit) +
814 diff_cleanupSemanticScore(edit, equality2))
815 while edit and equality2 and edit[0] == equality2[0]:
816 equality1 += edit[0]
817 edit = edit[1:] + equality2[0]
818 equality2 = equality2[1:]
819 score = (diff_cleanupSemanticScore(equality1, edit) +
820 diff_cleanupSemanticScore(edit, equality2))
821 # The >= encourages trailing rather than leading whitespace on edits.
822 if score >= bestScore:
823 bestScore = score
824 bestEquality1 = equality1
825 bestEdit = edit
826 bestEquality2 = equality2
827
828 if diffs[pointer - 1][1] != bestEquality1:
829 # We have an improvement, save it back to the diff.
830 if bestEquality1:
831 diffs[pointer - 1] = (diffs[pointer - 1][0], bestEquality1)
832 else:
833 del diffs[pointer - 1]
834 pointer -= 1
835 diffs[pointer] = (diffs[pointer][0], bestEdit)
836 if bestEquality2:
837 diffs[pointer + 1] = (diffs[pointer + 1][0], bestEquality2)
838 else:
839 del diffs[pointer + 1]
840 pointer -= 1
841 pointer += 1
842
843 # Define some regex patterns for matching boundaries.
844 BLANKLINEEND = re.compile(r"\n\r?\n$");
845 BLANKLINESTART = re.compile(r"^\r?\n\r?\n");
846
847 def diff_cleanupEfficiency(self, diffs):
848 """Reduce the number of edits by eliminating operationally trivial
849 equalities.
850
851 Args:
852 diffs: Array of diff tuples.
853 """
854 changes = False
855 equalities = [] # Stack of indices where equalities are found.
856 lastequality = None # Always equal to diffs[equalities[-1]][1]
857 pointer = 0 # Index of current position.
858 pre_ins = False # Is there an insertion operation before the last equality.
859 pre_del = False # Is there a deletion operation before the last equality.
860 post_ins = False # Is there an insertion operation after the last equality.
861 post_del = False # Is there a deletion operation after the last equality.
862 while pointer < len(diffs):
863 if diffs[pointer][0] == self.DIFF_EQUAL: # Equality found.
864 if (len(diffs[pointer][1]) < self.Diff_EditCost and
865 (post_ins or post_del)):
866 # Candidate found.
867 equalities.append(pointer)
868 pre_ins = post_ins
869 pre_del = post_del
870 lastequality = diffs[pointer][1]
871 else:
872 # Not a candidate, and can never become one.
873 equalities = []
874 lastequality = None
875
876 post_ins = post_del = False
877 else: # An insertion or deletion.
878 if diffs[pointer][0] == self.DIFF_DELETE:
879 post_del = True
880 else:
881 post_ins = True
882
883 # Five types to be split:
884 # <ins>A</ins><del>B</del>XY<ins>C</ins><del>D</del>
885 # <ins>A</ins>X<ins>C</ins><del>D</del>
886 # <ins>A</ins><del>B</del>X<ins>C</ins>
887 # <ins>A</del>X<ins>C</ins><del>D</del>
888 # <ins>A</ins><del>B</del>X<del>C</del>
889
890 if lastequality and ((pre_ins and pre_del and post_ins and post_del) or
891 ((len(lastequality) < self.Diff_EditCost / 2) and
892 (pre_ins + pre_del + post_ins + post_del) == 3)):
893 # Duplicate record.
894 diffs.insert(equalities[-1], (self.DIFF_DELETE, lastequality))
895 # Change second copy to insert.
896 diffs[equalities[-1] + 1] = (self.DIFF_INSERT,
897 diffs[equalities[-1] + 1][1])
898 equalities.pop() # Throw away the equality we just deleted.
899 lastequality = None
900 if pre_ins and pre_del:
901 # No changes made which could affect previous entry, keep going.
902 post_ins = post_del = True
903 equalities = []
904 else:
905 if len(equalities):
906 equalities.pop() # Throw away the previous equality.
907 if len(equalities):
908 pointer = equalities[-1]
909 else:
910 pointer = -1
911 post_ins = post_del = False
912 changes = True
913 pointer += 1
914
915 if changes:
916 self.diff_cleanupMerge(diffs)
917
918 def diff_cleanupMerge(self, diffs):
919 """Reorder and merge like edit sections. Merge equalities.
920 Any edit section can move as long as it doesn't cross an equality.
921
922 Args:
923 diffs: Array of diff tuples.
924 """
925 diffs.append((self.DIFF_EQUAL, '')) # Add a dummy entry at the end.
926 pointer = 0
927 count_delete = 0
928 count_insert = 0
929 text_delete = ''
930 text_insert = ''
931 while pointer < len(diffs):
932 if diffs[pointer][0] == self.DIFF_INSERT:
933 count_insert += 1
934 text_insert += diffs[pointer][1]
935 pointer += 1
936 elif diffs[pointer][0] == self.DIFF_DELETE:
937 count_delete += 1
938 text_delete += diffs[pointer][1]
939 pointer += 1
940 elif diffs[pointer][0] == self.DIFF_EQUAL:
941 # Upon reaching an equality, check for prior redundancies.
942 if count_delete + count_insert > 1:
943 if count_delete != 0 and count_insert != 0:
944 # Factor out any common prefixies.
945 commonlength = self.diff_commonPrefix(text_insert, text_delete)
946 if commonlength != 0:
947 x = pointer - count_delete - count_insert - 1
948 if x >= 0 and diffs[x][0] == self.DIFF_EQUAL:
949 diffs[x] = (diffs[x][0], diffs[x][1] +
950 text_insert[:commonlength])
951 else:
952 diffs.insert(0, (self.DIFF_EQUAL, text_insert[:commonlength]))
953 pointer += 1
954 text_insert = text_insert[commonlength:]
955 text_delete = text_delete[commonlength:]
956 # Factor out any common suffixies.
957 commonlength = self.diff_commonSuffix(text_insert, text_delete)
958 if commonlength != 0:
959 diffs[pointer] = (diffs[pointer][0], text_insert[-commonlength:] +
960 diffs[pointer][1])
961 text_insert = text_insert[:-commonlength]
962 text_delete = text_delete[:-commonlength]
963 # Delete the offending records and add the merged ones.
964 if count_delete == 0:
965 diffs[pointer - count_insert : pointer] = [
966 (self.DIFF_INSERT, text_insert)]
967 elif count_insert == 0:
968 diffs[pointer - count_delete : pointer] = [
969 (self.DIFF_DELETE, text_delete)]
970 else:
971 diffs[pointer - count_delete - count_insert : pointer] = [
972 (self.DIFF_DELETE, text_delete),
973 (self.DIFF_INSERT, text_insert)]
974 pointer = pointer - count_delete - count_insert + 1
975 if count_delete != 0:
976 pointer += 1
977 if count_insert != 0:
978 pointer += 1
979 elif pointer != 0 and diffs[pointer - 1][0] == self.DIFF_EQUAL:
980 # Merge this equality with the previous one.
981 diffs[pointer - 1] = (diffs[pointer - 1][0],
982 diffs[pointer - 1][1] + diffs[pointer][1])
983 del diffs[pointer]
984 else:
985 pointer += 1
986
987 count_insert = 0
988 count_delete = 0
989 text_delete = ''
990 text_insert = ''
991
992 if diffs[-1][1] == '':
993 diffs.pop() # Remove the dummy entry at the end.
994
995 # Second pass: look for single edits surrounded on both sides by equalities
996 # which can be shifted sideways to eliminate an equality.
997 # e.g: A<ins>BA</ins>C -> <ins>AB</ins>AC
998 changes = False
999 pointer = 1
1000 # Intentionally ignore the first and last element (don't need checking).
1001 while pointer < len(diffs) - 1:
1002 if (diffs[pointer - 1][0] == self.DIFF_EQUAL and
1003 diffs[pointer + 1][0] == self.DIFF_EQUAL):
1004 # This is a single edit surrounded by equalities.
1005 if diffs[pointer][1].endswith(diffs[pointer - 1][1]):
1006 # Shift the edit over the previous equality.
1007 diffs[pointer] = (diffs[pointer][0],
1008 diffs[pointer - 1][1] +
1009 diffs[pointer][1][:-len(diffs[pointer - 1][1])])
1010 diffs[pointer + 1] = (diffs[pointer + 1][0],
1011 diffs[pointer - 1][1] + diffs[pointer + 1][1])
1012 del diffs[pointer - 1]
1013 changes = True
1014 elif diffs[pointer][1].startswith(diffs[pointer + 1][1]):
1015 # Shift the edit over the next equality.
1016 diffs[pointer - 1] = (diffs[pointer - 1][0],
1017 diffs[pointer - 1][1] + diffs[pointer + 1][1])
1018 diffs[pointer] = (diffs[pointer][0],
1019 diffs[pointer][1][len(diffs[pointer + 1][1]):] +
1020 diffs[pointer + 1][1])
1021 del diffs[pointer + 1]
1022 changes = True
1023 pointer += 1
1024
1025 # If shifts were made, the diff needs reordering and another shift sweep.
1026 if changes:
1027 self.diff_cleanupMerge(diffs)
1028
1029 def diff_xIndex(self, diffs, loc):
1030 """loc is a location in text1, compute and return the equivalent location
1031 in text2. e.g. "The cat" vs "The big cat", 1->1, 5->8
1032
1033 Args:
1034 diffs: Array of diff tuples.
1035 loc: Location within text1.
1036
1037 Returns:
1038 Location within text2.
1039 """
1040 chars1 = 0
1041 chars2 = 0
1042 last_chars1 = 0
1043 last_chars2 = 0
1044 for x in xrange(len(diffs)):
1045 (op, text) = diffs[x]
1046 if op != self.DIFF_INSERT: # Equality or deletion.
1047 chars1 += len(text)
1048 if op != self.DIFF_DELETE: # Equality or insertion.
1049 chars2 += len(text)
1050 if chars1 > loc: # Overshot the location.
1051 break
1052 last_chars1 = chars1
1053 last_chars2 = chars2
1054
1055 if len(diffs) != x and diffs[x][0] == self.DIFF_DELETE:
1056 # The location was deleted.
1057 return last_chars2
1058 # Add the remaining len(character).
1059 return last_chars2 + (loc - last_chars1)
1060
1061 def diff_prettyHtml(self, diffs):
1062 """Convert a diff array into a pretty HTML report.
1063
1064 Args:
1065 diffs: Array of diff tuples.
1066
1067 Returns:
1068 HTML representation.
1069 """
1070 html = []
1071 for (op, data) in diffs:
1072 text = (data.replace("&", "&amp;").replace("<", "&lt;")
1073 .replace(">", "&gt;").replace("\n", "&para;<br>"))
1074 if op == self.DIFF_INSERT:
1075 html.append("<ins style=\"background:#e6ffe6;\">%s</ins>" % text)
1076 elif op == self.DIFF_DELETE:
1077 html.append("<del style=\"background:#ffe6e6;\">%s</del>" % text)
1078 elif op == self.DIFF_EQUAL:
1079 html.append("<span>%s</span>" % text)
1080 return "".join(html)
1081
1082 def diff_text1(self, diffs):
1083 """Compute and return the source text (all equalities and deletions).
1084
1085 Args:
1086 diffs: Array of diff tuples.
1087
1088 Returns:
1089 Source text.
1090 """
1091 text = []
1092 for (op, data) in diffs:
1093 if op != self.DIFF_INSERT:
1094 text.append(data)
1095 return "".join(text)
1096
1097 def diff_text2(self, diffs):
1098 """Compute and return the destination text (all equalities and insertions).
1099
1100 Args:
1101 diffs: Array of diff tuples.
1102
1103 Returns:
1104 Destination text.
1105 """
1106 text = []
1107 for (op, data) in diffs:
1108 if op != self.DIFF_DELETE:
1109 text.append(data)
1110 return "".join(text)
1111
1112 def diff_levenshtein(self, diffs):
1113 """Compute the Levenshtein distance; the number of inserted, deleted or
1114 substituted characters.
1115
1116 Args:
1117 diffs: Array of diff tuples.
1118
1119 Returns:
1120 Number of changes.
1121 """
1122 levenshtein = 0
1123 insertions = 0
1124 deletions = 0
1125 for (op, data) in diffs:
1126 if op == self.DIFF_INSERT:
1127 insertions += len(data)
1128 elif op == self.DIFF_DELETE:
1129 deletions += len(data)
1130 elif op == self.DIFF_EQUAL:
1131 # A deletion and an insertion is one substitution.
1132 levenshtein += max(insertions, deletions)
1133 insertions = 0
1134 deletions = 0
1135 levenshtein += max(insertions, deletions)
1136 return levenshtein
1137
1138 def diff_toDelta(self, diffs):
1139 """Crush the diff into an encoded string which describes the operations
1140 required to transform text1 into text2.
1141 E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'.
1142 Operations are tab-separated. Inserted text is escaped using %xx notation.
1143
1144 Args:
1145 diffs: Array of diff tuples.
1146
1147 Returns:
1148 Delta text.
1149 """
1150 text = []
1151 for (op, data) in diffs:
1152 if op == self.DIFF_INSERT:
1153 # High ascii will raise UnicodeDecodeError. Use Unicode instead.
1154 data = data.encode("utf-8")
1155 text.append("+" + urllib.quote(data, "!~*'();/?:@&=+$,# "))
1156 elif op == self.DIFF_DELETE:
1157 text.append("-%d" % len(data))
1158 elif op == self.DIFF_EQUAL:
1159 text.append("=%d" % len(data))
1160 return "\t".join(text)
1161
1162 def diff_fromDelta(self, text1, delta):
1163 """Given the original text1, and an encoded string which describes the
1164 operations required to transform text1 into text2, compute the full diff.
1165
1166 Args:
1167 text1: Source string for the diff.
1168 delta: Delta text.
1169
1170 Returns:
1171 Array of diff tuples.
1172
1173 Raises:
1174 ValueError: If invalid input.
1175 """
1176 if type(delta) == unicode:
1177 # Deltas should be composed of a subset of ascii chars, Unicode not
1178 # required. If this encode raises UnicodeEncodeError, delta is invalid.
1179 delta = delta.encode("ascii")
1180 diffs = []
1181 pointer = 0 # Cursor in text1
1182 tokens = delta.split("\t")
1183 for token in tokens:
1184 if token == "":
1185 # Blank tokens are ok (from a trailing \t).
1186 continue
1187 # Each token begins with a one character parameter which specifies the
1188 # operation of this token (delete, insert, equality).
1189 param = token[1:]
1190 if token[0] == "+":
1191 param = urllib.unquote(param).decode("utf-8")
1192 diffs.append((self.DIFF_INSERT, param))
1193 elif token[0] == "-" or token[0] == "=":
1194 try:
1195 n = int(param)
1196 except ValueError:
1197 raise ValueError("Invalid number in diff_fromDelta: " + param)
1198 if n < 0:
1199 raise ValueError("Negative number in diff_fromDelta: " + param)
1200 text = text1[pointer : pointer + n]
1201 pointer += n
1202 if token[0] == "=":
1203 diffs.append((self.DIFF_EQUAL, text))
1204 else:
1205 diffs.append((self.DIFF_DELETE, text))
1206 else:
1207 # Anything else is an error.
1208 raise ValueError("Invalid diff operation in diff_fromDelta: " +
1209 token[0])
1210 if pointer != len(text1):
1211 raise ValueError(
1212 "Delta length (%d) does not equal source text length (%d)." %
1213 (pointer, len(text1)))
1214 return diffs
1215
1216 # MATCH FUNCTIONS
1217
1218 def match_main(self, text, pattern, loc):
1219 """Locate the best instance of 'pattern' in 'text' near 'loc'.
1220
1221 Args:
1222 text: The text to search.
1223 pattern: The pattern to search for.
1224 loc: The location to search around.
1225
1226 Returns:
1227 Best match index or -1.
1228 """
1229 # Check for null inputs.
1230 if text == None or pattern == None:
1231 raise ValueError("Null inputs. (match_main)")
1232
1233 loc = max(0, min(loc, len(text)))
1234 if text == pattern:
1235 # Shortcut (potentially not guaranteed by the algorithm)
1236 return 0
1237 elif not text:
1238 # Nothing to match.
1239 return -1
1240 elif text[loc:loc + len(pattern)] == pattern:
1241 # Perfect match at the perfect spot! (Includes case of null pattern)
1242 return loc
1243 else:
1244 # Do a fuzzy compare.
1245 match = self.match_bitap(text, pattern, loc)
1246 return match
1247
1248 def match_bitap(self, text, pattern, loc):
1249 """Locate the best instance of 'pattern' in 'text' near 'loc' using the
1250 Bitap algorithm.
1251
1252 Args:
1253 text: The text to search.
1254 pattern: The pattern to search for.
1255 loc: The location to search around.
1256
1257 Returns:
1258 Best match index or -1.
1259 """
1260 # Python doesn't have a maxint limit, so ignore this check.
1261 #if self.Match_MaxBits != 0 and len(pattern) > self.Match_MaxBits:
1262 # raise ValueError("Pattern too long for this application.")
1263
1264 # Initialise the alphabet.
1265 s = self.match_alphabet(pattern)
1266
1267 def match_bitapScore(e, x):
1268 """Compute and return the score for a match with e errors and x location.
1269 Accesses loc and pattern through being a closure.
1270
1271 Args:
1272 e: Number of errors in match.
1273 x: Location of match.
1274
1275 Returns:
1276 Overall score for match (0.0 = good, 1.0 = bad).
1277 """
1278 accuracy = float(e) / len(pattern)
1279 proximity = abs(loc - x)
1280 if not self.Match_Distance:
1281 # Dodge divide by zero error.
1282 return proximity and 1.0 or accuracy
1283 return accuracy + (proximity / float(self.Match_Distance))
1284
1285 # Highest score beyond which we give up.
1286 score_threshold = self.Match_Threshold
1287 # Is there a nearby exact match? (speedup)
1288 best_loc = text.find(pattern, loc)
1289 if best_loc != -1:
1290 score_threshold = min(match_bitapScore(0, best_loc), score_threshold)
1291 # What about in the other direction? (speedup)
1292 best_loc = text.rfind(pattern, loc + len(pattern))
1293 if best_loc != -1:
1294 score_threshold = min(match_bitapScore(0, best_loc), score_threshold)
1295
1296 # Initialise the bit arrays.
1297 matchmask = 1 << (len(pattern) - 1)
1298 best_loc = -1
1299
1300 bin_max = len(pattern) + len(text)
1301 # Empty initialization added to appease pychecker.
1302 last_rd = None
1303 for d in xrange(len(pattern)):
1304 # Scan for the best match each iteration allows for one more error.
1305 # Run a binary search to determine how far from 'loc' we can stray at
1306 # this error level.
1307 bin_min = 0
1308 bin_mid = bin_max
1309 while bin_min < bin_mid:
1310 if match_bitapScore(d, loc + bin_mid) <= score_threshold:
1311 bin_min = bin_mid
1312 else:
1313 bin_max = bin_mid
1314 bin_mid = (bin_max - bin_min) // 2 + bin_min
1315
1316 # Use the result from this iteration as the maximum for the next.
1317 bin_max = bin_mid
1318 start = max(1, loc - bin_mid + 1)
1319 finish = min(loc + bin_mid, len(text)) + len(pattern)
1320
1321 rd = [0] * (finish + 2)
1322 rd[finish + 1] = (1 << d) - 1
1323 for j in xrange(finish, start - 1, -1):
1324 if len(text) <= j - 1:
1325 # Out of range.
1326 charMatch = 0
1327 else:
1328 charMatch = s.get(text[j - 1], 0)
1329 if d == 0: # First pass: exact match.
1330 rd[j] = ((rd[j + 1] << 1) | 1) & charMatch
1331 else: # Subsequent passes: fuzzy match.
1332 rd[j] = (((rd[j + 1] << 1) | 1) & charMatch) | (
1333 ((last_rd[j + 1] | last_rd[j]) << 1) | 1) | last_rd[j + 1]
1334 if rd[j] & matchmask:
1335 score = match_bitapScore(d, j - 1)
1336 # This match will almost certainly be better than any existing match.
1337 # But check anyway.
1338 if score <= score_threshold:
1339 # Told you so.
1340 score_threshold = score
1341 best_loc = j - 1
1342 if best_loc > loc:
1343 # When passing loc, don't exceed our current distance from loc.
1344 start = max(1, 2 * loc - best_loc)
1345 else:
1346 # Already passed loc, downhill from here on in.
1347 break
1348 # No hope for a (better) match at greater error levels.
1349 if match_bitapScore(d + 1, loc) > score_threshold:
1350 break
1351 last_rd = rd
1352 return best_loc
1353
1354 def match_alphabet(self, pattern):
1355 """Initialise the alphabet for the Bitap algorithm.
1356
1357 Args:
1358 pattern: The text to encode.
1359
1360 Returns:
1361 Hash of character locations.
1362 """
1363 s = {}
1364 for char in pattern:
1365 s[char] = 0
1366 for i in xrange(len(pattern)):
1367 s[pattern[i]] |= 1 << (len(pattern) - i - 1)
1368 return s
1369
1370 # PATCH FUNCTIONS
1371
1372 def patch_addContext(self, patch, text):
1373 """Increase the context until it is unique,
1374 but don't let the pattern expand beyond Match_MaxBits.
1375
1376 Args:
1377 patch: The patch to grow.
1378 text: Source text.
1379 """
1380 if len(text) == 0:
1381 return
1382 pattern = text[patch.start2 : patch.start2 + patch.length1]
1383 padding = 0
1384
1385 # Look for the first and last matches of pattern in text. If two different
1386 # matches are found, increase the pattern length.
1387 while (text.find(pattern) != text.rfind(pattern) and (self.Match_MaxBits ==
1388 0 or len(pattern) < self.Match_MaxBits - self.Patch_Margin -
1389 self.Patch_Margin)):
1390 padding += self.Patch_Margin
1391 pattern = text[max(0, patch.start2 - padding) :
1392 patch.start2 + patch.length1 + padding]
1393 # Add one chunk for good luck.
1394 padding += self.Patch_Margin
1395
1396 # Add the prefix.
1397 prefix = text[max(0, patch.start2 - padding) : patch.start2]
1398 if prefix:
1399 patch.diffs[:0] = [(self.DIFF_EQUAL, prefix)]
1400 # Add the suffix.
1401 suffix = text[patch.start2 + patch.length1 :
1402 patch.start2 + patch.length1 + padding]
1403 if suffix:
1404 patch.diffs.append((self.DIFF_EQUAL, suffix))
1405
1406 # Roll back the start points.
1407 patch.start1 -= len(prefix)
1408 patch.start2 -= len(prefix)
1409 # Extend lengths.
1410 patch.length1 += len(prefix) + len(suffix)
1411 patch.length2 += len(prefix) + len(suffix)
1412
1413 def patch_make(self, a, b=None, c=None):
1414 """Compute a list of patches to turn text1 into text2.
1415 Use diffs if provided, otherwise compute it ourselves.
1416 There are four ways to call this function, depending on what data is
1417 available to the caller:
1418 Method 1:
1419 a = text1, b = text2
1420 Method 2:
1421 a = diffs
1422 Method 3 (optimal):
1423 a = text1, b = diffs
1424 Method 4 (deprecated, use method 3):
1425 a = text1, b = text2, c = diffs
1426
1427 Args:
1428 a: text1 (methods 1,3,4) or Array of diff tuples for text1 to
1429 text2 (method 2).
1430 b: text2 (methods 1,4) or Array of diff tuples for text1 to
1431 text2 (method 3) or undefined (method 2).
1432 c: Array of diff tuples for text1 to text2 (method 4) or
1433 undefined (methods 1,2,3).
1434
1435 Returns:
1436 Array of Patch objects.
1437 """
1438 text1 = None
1439 diffs = None
1440 # Note that texts may arrive as 'str' or 'unicode'.
1441 if isinstance(a, basestring) and isinstance(b, basestring) and c is None:
1442 # Method 1: text1, text2
1443 # Compute diffs from text1 and text2.
1444 text1 = a
1445 diffs = self.diff_main(text1, b, True)
1446 if len(diffs) > 2:
1447 self.diff_cleanupSemantic(diffs)
1448 self.diff_cleanupEfficiency(diffs)
1449 elif isinstance(a, list) and b is None and c is None:
1450 # Method 2: diffs
1451 # Compute text1 from diffs.
1452 diffs = a
1453 text1 = self.diff_text1(diffs)
1454 elif isinstance(a, basestring) and isinstance(b, list) and c is None:
1455 # Method 3: text1, diffs
1456 text1 = a
1457 diffs = b
1458 elif (isinstance(a, basestring) and isinstance(b, basestring) and
1459 isinstance(c, list)):
1460 # Method 4: text1, text2, diffs
1461 # text2 is not used.
1462 text1 = a
1463 diffs = c
1464 else:
1465 raise ValueError("Unknown call format to patch_make.")
1466
1467 if not diffs:
1468 return [] # Get rid of the None case.
1469 patches = []
1470 patch = patch_obj()
1471 char_count1 = 0 # Number of characters into the text1 string.
1472 char_count2 = 0 # Number of characters into the text2 string.
1473 prepatch_text = text1 # Recreate the patches to determine context info.
1474 postpatch_text = text1
1475 for x in xrange(len(diffs)):
1476 (diff_type, diff_text) = diffs[x]
1477 if len(patch.diffs) == 0 and diff_type != self.DIFF_EQUAL:
1478 # A new patch starts here.
1479 patch.start1 = char_count1
1480 patch.start2 = char_count2
1481 if diff_type == self.DIFF_INSERT:
1482 # Insertion
1483 patch.diffs.append(diffs[x])
1484 patch.length2 += len(diff_text)
1485 postpatch_text = (postpatch_text[:char_count2] + diff_text +
1486 postpatch_text[char_count2:])
1487 elif diff_type == self.DIFF_DELETE:
1488 # Deletion.
1489 patch.length1 += len(diff_text)
1490 patch.diffs.append(diffs[x])
1491 postpatch_text = (postpatch_text[:char_count2] +
1492 postpatch_text[char_count2 + len(diff_text):])
1493 elif (diff_type == self.DIFF_EQUAL and
1494 len(diff_text) <= 2 * self.Patch_Margin and
1495 len(patch.diffs) != 0 and len(diffs) != x + 1):
1496 # Small equality inside a patch.
1497 patch.diffs.append(diffs[x])
1498 patch.length1 += len(diff_text)
1499 patch.length2 += len(diff_text)
1500
1501 if (diff_type == self.DIFF_EQUAL and
1502 len(diff_text) >= 2 * self.Patch_Margin):
1503 # Time for a new patch.
1504 if len(patch.diffs) != 0:
1505 self.patch_addContext(patch, prepatch_text)
1506 patches.append(patch)
1507 patch = patch_obj()
1508 # Unlike Unidiff, our patch lists have a rolling context.
1509 # http://code.google.com/p/google-diff-match-patch/wiki/Unidiff
1510 # Update prepatch text & pos to reflect the application of the
1511 # just completed patch.
1512 prepatch_text = postpatch_text
1513 char_count1 = char_count2
1514
1515 # Update the current character count.
1516 if diff_type != self.DIFF_INSERT:
1517 char_count1 += len(diff_text)
1518 if diff_type != self.DIFF_DELETE:
1519 char_count2 += len(diff_text)
1520
1521 # Pick up the leftover patch if not empty.
1522 if len(patch.diffs) != 0:
1523 self.patch_addContext(patch, prepatch_text)
1524 patches.append(patch)
1525 return patches
1526
1527 def patch_deepCopy(self, patches):
1528 """Given an array of patches, return another array that is identical.
1529
1530 Args:
1531 patches: Array of Patch objects.
1532
1533 Returns:
1534 Array of Patch objects.
1535 """
1536 patchesCopy = []
1537 for patch in patches:
1538 patchCopy = patch_obj()
1539 # No need to deep copy the tuples since they are immutable.
1540 patchCopy.diffs = patch.diffs[:]
1541 patchCopy.start1 = patch.start1
1542 patchCopy.start2 = patch.start2
1543 patchCopy.length1 = patch.length1
1544 patchCopy.length2 = patch.length2
1545 patchesCopy.append(patchCopy)
1546 return patchesCopy
1547
1548 def patch_apply(self, patches, text):
1549 """Merge a set of patches onto the text. Return a patched text, as well
1550 as a list of true/false values indicating which patches were applied.
1551
1552 Args:
1553 patches: Array of Patch objects.
1554 text: Old text.
1555
1556 Returns:
1557 Two element Array, containing the new text and an array of boolean values.
1558 """
1559 if not patches:
1560 return (text, [])
1561
1562 # Deep copy the patches so that no changes are made to originals.
1563 patches = self.patch_deepCopy(patches)
1564
1565 nullPadding = self.patch_addPadding(patches)
1566 text = nullPadding + text + nullPadding
1567 self.patch_splitMax(patches)
1568
1569 # delta keeps track of the offset between the expected and actual location
1570 # of the previous patch. If there are patches expected at positions 10 and
1571 # 20, but the first patch was found at 12, delta is 2 and the second patch
1572 # has an effective expected position of 22.
1573 delta = 0
1574 results = []
1575 for patch in patches:
1576 expected_loc = patch.start2 + delta
1577 text1 = self.diff_text1(patch.diffs)
1578 end_loc = -1
1579 if len(text1) > self.Match_MaxBits:
1580 # patch_splitMax will only provide an oversized pattern in the case of
1581 # a monster delete.
1582 start_loc = self.match_main(text, text1[:self.Match_MaxBits],
1583 expected_loc)
1584 if start_loc != -1:
1585 end_loc = self.match_main(text, text1[-self.Match_MaxBits:],
1586 expected_loc + len(text1) - self.Match_MaxBits)
1587 if end_loc == -1 or start_loc >= end_loc:
1588 # Can't find valid trailing context. Drop this patch.
1589 start_loc = -1
1590 else:
1591 start_loc = self.match_main(text, text1, expected_loc)
1592 if start_loc == -1:
1593 # No match found. :(
1594 results.append(False)
1595 # Subtract the delta for this failed patch from subsequent patches.
1596 delta -= patch.length2 - patch.length1
1597 else:
1598 # Found a match. :)
1599 results.append(True)
1600 delta = start_loc - expected_loc
1601 if end_loc == -1:
1602 text2 = text[start_loc : start_loc + len(text1)]
1603 else:
1604 text2 = text[start_loc : end_loc + self.Match_MaxBits]
1605 if text1 == text2:
1606 # Perfect match, just shove the replacement text in.
1607 text = (text[:start_loc] + self.diff_text2(patch.diffs) +
1608 text[start_loc + len(text1):])
1609 else:
1610 # Imperfect match.
1611 # Run a diff to get a framework of equivalent indices.
1612 diffs = self.diff_main(text1, text2, False)
1613 if (len(text1) > self.Match_MaxBits and
1614 self.diff_levenshtein(diffs) / float(len(text1)) >
1615 self.Patch_DeleteThreshold):
1616 # The end points match, but the content is unacceptably bad.
1617 results[-1] = False
1618 else:
1619 self.diff_cleanupSemanticLossless(diffs)
1620 index1 = 0
1621 for (op, data) in patch.diffs:
1622 if op != self.DIFF_EQUAL:
1623 index2 = self.diff_xIndex(diffs, index1)
1624 if op == self.DIFF_INSERT: # Insertion
1625 text = text[:start_loc + index2] + data + text[start_loc +
1626 index2:]
1627 elif op == self.DIFF_DELETE: # Deletion
1628 text = text[:start_loc + index2] + text[start_loc +
1629 self.diff_xIndex(diffs, index1 + len(data)):]
1630 if op != self.DIFF_DELETE:
1631 index1 += len(data)
1632 # Strip the padding off.
1633 text = text[len(nullPadding):-len(nullPadding)]
1634 return (text, results)
1635
1636 def patch_addPadding(self, patches):
1637 """Add some padding on text start and end so that edges can match
1638 something. Intended to be called only from within patch_apply.
1639
1640 Args:
1641 patches: Array of Patch objects.
1642
1643 Returns:
1644 The padding string added to each side.
1645 """
1646 paddingLength = self.Patch_Margin
1647 nullPadding = ""
1648 for x in xrange(1, paddingLength + 1):
1649 nullPadding += chr(x)
1650
1651 # Bump all the patches forward.
1652 for patch in patches:
1653 patch.start1 += paddingLength
1654 patch.start2 += paddingLength
1655
1656 # Add some padding on start of first diff.
1657 patch = patches[0]
1658 diffs = patch.diffs
1659 if not diffs or diffs[0][0] != self.DIFF_EQUAL:
1660 # Add nullPadding equality.
1661 diffs.insert(0, (self.DIFF_EQUAL, nullPadding))
1662 patch.start1 -= paddingLength # Should be 0.
1663 patch.start2 -= paddingLength # Should be 0.
1664 patch.length1 += paddingLength
1665 patch.length2 += paddingLength
1666 elif paddingLength > len(diffs[0][1]):
1667 # Grow first equality.
1668 extraLength = paddingLength - len(diffs[0][1])
1669 newText = nullPadding[len(diffs[0][1]):] + diffs[0][1]
1670 diffs[0] = (diffs[0][0], newText)
1671 patch.start1 -= extraLength
1672 patch.start2 -= extraLength
1673 patch.length1 += extraLength
1674 patch.length2 += extraLength
1675
1676 # Add some padding on end of last diff.
1677 patch = patches[-1]
1678 diffs = patch.diffs
1679 if not diffs or diffs[-1][0] != self.DIFF_EQUAL:
1680 # Add nullPadding equality.
1681 diffs.append((self.DIFF_EQUAL, nullPadding))
1682 patch.length1 += paddingLength
1683 patch.length2 += paddingLength
1684 elif paddingLength > len(diffs[-1][1]):
1685 # Grow last equality.
1686 extraLength = paddingLength - len(diffs[-1][1])
1687 newText = diffs[-1][1] + nullPadding[:extraLength]
1688 diffs[-1] = (diffs[-1][0], newText)
1689 patch.length1 += extraLength
1690 patch.length2 += extraLength
1691
1692 return nullPadding
1693
1694 def patch_splitMax(self, patches):
1695 """Look through the patches and break up any which are longer than the
1696 maximum limit of the match algorithm.
1697 Intended to be called only from within patch_apply.
1698
1699 Args:
1700 patches: Array of Patch objects.
1701 """
1702 patch_size = self.Match_MaxBits
1703 if patch_size == 0:
1704 # Python has the option of not splitting strings due to its ability
1705 # to handle integers of arbitrary precision.
1706 return
1707 for x in xrange(len(patches)):
1708 if patches[x].length1 <= patch_size:
1709 continue
1710 bigpatch = patches[x]
1711 # Remove the big old patch.
1712 del patches[x]
1713 x -= 1
1714 start1 = bigpatch.start1
1715 start2 = bigpatch.start2
1716 precontext = ''
1717 while len(bigpatch.diffs) != 0:
1718 # Create one of several smaller patches.
1719 patch = patch_obj()
1720 empty = True
1721 patch.start1 = start1 - len(precontext)
1722 patch.start2 = start2 - len(precontext)
1723 if precontext:
1724 patch.length1 = patch.length2 = len(precontext)
1725 patch.diffs.append((self.DIFF_EQUAL, precontext))
1726
1727 while (len(bigpatch.diffs) != 0 and
1728 patch.length1 < patch_size - self.Patch_Margin):
1729 (diff_type, diff_text) = bigpatch.diffs[0]
1730 if diff_type == self.DIFF_INSERT:
1731 # Insertions are harmless.
1732 patch.length2 += len(diff_text)
1733 start2 += len(diff_text)
1734 patch.diffs.append(bigpatch.diffs.pop(0))
1735 empty = False
1736 elif (diff_type == self.DIFF_DELETE and len(patch.diffs) == 1 and
1737 patch.diffs[0][0] == self.DIFF_EQUAL and
1738 len(diff_text) > 2 * patch_size):
1739 # This is a large deletion. Let it pass in one chunk.
1740 patch.length1 += len(diff_text)
1741 start1 += len(diff_text)
1742 empty = False
1743 patch.diffs.append((diff_type, diff_text))
1744 del bigpatch.diffs[0]
1745 else:
1746 # Deletion or equality. Only take as much as we can stomach.
1747 diff_text = diff_text[:patch_size - patch.length1 -
1748 self.Patch_Margin]
1749 patch.length1 += len(diff_text)
1750 start1 += len(diff_text)
1751 if diff_type == self.DIFF_EQUAL:
1752 patch.length2 += len(diff_text)
1753 start2 += len(diff_text)
1754 else:
1755 empty = False
1756
1757 patch.diffs.append((diff_type, diff_text))
1758 if diff_text == bigpatch.diffs[0][1]:
1759 del bigpatch.diffs[0]
1760 else:
1761 bigpatch.diffs[0] = (bigpatch.diffs[0][0],
1762 bigpatch.diffs[0][1][len(diff_text):])
1763
1764 # Compute the head context for the next patch.
1765 precontext = self.diff_text2(patch.diffs)
1766 precontext = precontext[-self.Patch_Margin:]
1767 # Append the end context for this patch.
1768 postcontext = self.diff_text1(bigpatch.diffs)[:self.Patch_Margin]
1769 if postcontext:
1770 patch.length1 += len(postcontext)
1771 patch.length2 += len(postcontext)
1772 if len(patch.diffs) != 0 and patch.diffs[-1][0] == self.DIFF_EQUAL:
1773 patch.diffs[-1] = (self.DIFF_EQUAL, patch.diffs[-1][1] +
1774 postcontext)
1775 else:
1776 patch.diffs.append((self.DIFF_EQUAL, postcontext))
1777
1778 if not empty:
1779 x += 1
1780 patches.insert(x, patch)
1781
1782 def patch_toText(self, patches):
1783 """Take a list of patches and return a textual representation.
1784
1785 Args:
1786 patches: Array of Patch objects.
1787
1788 Returns:
1789 Text representation of patches.
1790 """
1791 text = []
1792 for patch in patches:
1793 text.append(str(patch))
1794 return "".join(text)
1795
1796 def patch_fromText(self, textline):
1797 """Parse a textual representation of patches and return a list of patch
1798 objects.
1799
1800 Args:
1801 textline: Text representation of patches.
1802
1803 Returns:
1804 Array of Patch objects.
1805
1806 Raises:
1807 ValueError: If invalid input.
1808 """
1809 if type(textline) == unicode:
1810 # Patches should be composed of a subset of ascii chars, Unicode not
1811 # required. If this encode raises UnicodeEncodeError, patch is invalid.
1812 textline = textline.encode("ascii")
1813 patches = []
1814 if not textline:
1815 return patches
1816 text = textline.split('\n')
1817 while len(text) != 0:
1818 m = re.match("^@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@$", text[0])
1819 if not m:
1820 raise ValueError("Invalid patch string: " + text[0])
1821 patch = patch_obj()
1822 patches.append(patch)
1823 patch.start1 = int(m.group(1))
1824 if m.group(2) == '':
1825 patch.start1 -= 1
1826 patch.length1 = 1
1827 elif m.group(2) == '0':
1828 patch.length1 = 0
1829 else:
1830 patch.start1 -= 1
1831 patch.length1 = int(m.group(2))
1832
1833 patch.start2 = int(m.group(3))
1834 if m.group(4) == '':
1835 patch.start2 -= 1
1836 patch.length2 = 1
1837 elif m.group(4) == '0':
1838 patch.length2 = 0
1839 else:
1840 patch.start2 -= 1
1841 patch.length2 = int(m.group(4))
1842
1843 del text[0]
1844
1845 while len(text) != 0:
1846 if text[0]:
1847 sign = text[0][0]
1848 else:
1849 sign = ''
1850 line = urllib.unquote(text[0][1:])
1851 line = line.decode("utf-8")
1852 if sign == '+':
1853 # Insertion.
1854 patch.diffs.append((self.DIFF_INSERT, line))
1855 elif sign == '-':
1856 # Deletion.
1857 patch.diffs.append((self.DIFF_DELETE, line))
1858 elif sign == ' ':
1859 # Minor equality.
1860 patch.diffs.append((self.DIFF_EQUAL, line))
1861 elif sign == '@':
1862 # Start of next patch.
1863 break
1864 elif sign == '':
1865 # Blank line? Whatever.
1866 pass
1867 else:
1868 # WTF?
1869 raise ValueError("Invalid patch mode: '%s'\n%s" % (sign, line))
1870 del text[0]
1871 return patches
1872
1873
1874 class patch_obj:
1875 """Class representing one patch operation.
1876 """
1877
1878 def __init__(self):
1879 """Initializes with an empty list of diffs.
1880 """
1881 self.diffs = []
1882 self.start1 = None
1883 self.start2 = None
1884 self.length1 = 0
1885 self.length2 = 0
1886
1887 def __str__(self):
1888 """Emmulate GNU diff's format.
1889 Header: @@ -382,8 +481,9 @@
1890 Indicies are printed as 1-based, not 0-based.
1891
1892 Returns:
1893 The GNU diff string.
1894 """
1895 if self.length1 == 0:
1896 coords1 = str(self.start1) + ",0"
1897 elif self.length1 == 1:
1898 coords1 = str(self.start1 + 1)
1899 else:
1900 coords1 = str(self.start1 + 1) + "," + str(self.length1)
1901 if self.length2 == 0:
1902 coords2 = str(self.start2) + ",0"
1903 elif self.length2 == 1:
1904 coords2 = str(self.start2 + 1)
1905 else:
1906 coords2 = str(self.start2 + 1) + "," + str(self.length2)
1907 text = ["@@ -", coords1, " +", coords2, " @@\n"]
1908 # Escape the body of the patch with %xx notation.
1909 for (op, data) in self.diffs:
1910 if op == diff_match_patch.DIFF_INSERT:
1911 text.append("+")
1912 elif op == diff_match_patch.DIFF_DELETE:
1913 text.append("-")
1914 elif op == diff_match_patch.DIFF_EQUAL:
1915 text.append(" ")
1916 # High ascii will raise UnicodeDecodeError. Use Unicode instead.
1917 data = data.encode("utf-8")
1918 text.append(urllib.quote(data, "!~*'();/?:@&=+$,# ") + "\n")
1919 return "".join(text) No newline at end of file
@@ -0,0 +1,398 b''
1 <%def name="diff_line_anchor(filename, line, type)"><%
2 return '%s_%s_%i' % (h.safeid(filename), type, line)
3 %></%def>
4
5 <%def name="action_class(action)"><%
6 return {
7 '-': 'cb-deletion',
8 '+': 'cb-addition',
9 ' ': 'cb-context',
10 }.get(action, 'cb-empty')
11 %></%def>
12
13 <%def name="op_class(op_id)"><%
14 return {
15 DEL_FILENODE: 'deletion', # file deleted
16 BIN_FILENODE: 'warning' # binary diff hidden
17 }.get(op_id, 'addition')
18 %></%def>
19
20 <%def name="link_for(**kw)"><%
21 new_args = request.GET.mixed()
22 new_args.update(kw)
23 return h.url('', **new_args)
24 %></%def>
25
26 <%def name="render_diffset(diffset,
27
28 # collapse all file diff entries when there are more than this amount of files in the diff
29 collapse_when_files_over=20,
30
31 # collapse lines in the diff when more than this amount of lines changed in the file diff
32 lines_changed_limit=500,
33 )">
34 <%
35 # TODO: dan: move this to an argument - and set a cookie so that it is saved
36 # default option for future requests
37 diff_mode = request.GET.get('diffmode', 'sideside')
38 if diff_mode not in ('sideside', 'unified'):
39 diff_mode = 'sideside'
40
41 collapse_all = len(diffset.files) > collapse_when_files_over
42 %>
43
44 %if diff_mode == 'sideside':
45 <style>
46 .wrapper {
47 max-width: 1600px !important;
48 }
49 </style>
50 %endif
51
52 % if diffset.limited_diff:
53 <div class="alert alert-warning">
54 ${_('The requested commit is too big and content was truncated.')} <a href="${link_for(fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
55 </div>
56 % endif
57
58 <div class="cs_files">
59 <div class="cs_files_title">
60 %if diffset.files:
61 <div class="pull-right">
62 <div class="btn-group">
63 <a
64 class="btn ${diff_mode == 'sideside' and 'btn-primary'} tooltip"
65 title="${_('View side by side')}"
66 href="${link_for(diffmode='sideside')}">
67 <span>${_('Side by Side')}</span>
68 </a>
69 <a
70 class="btn ${diff_mode == 'unified' and 'btn-primary'} tooltip"
71 title="${_('View unified')}" href="${link_for(diffmode='unified')}">
72 <span>${_('Unified')}</span>
73 </a>
74 </div>
75 </div>
76 <div class="pull-left">
77 <div class="btn-group">
78 <a
79 class="btn"
80 href="#"
81 onclick="$('input[class=diff-collapse-state]').prop('checked', false); return false">${_('Expand All')}</a>
82 <a
83 class="btn"
84 href="#"
85 onclick="$('input[class=diff-collapse-state]').prop('checked', true); return false">${_('Collapse All')}</a>
86 </div>
87 </div>
88 %endif
89 <h2 style="padding: 5px; text-align: center;">
90 %if diffset.limited_diff:
91 ${ungettext('%(num)s file changed', '%(num)s files changed', diffset.changed_files) % {'num': diffset.changed_files}}
92 %else:
93 ${ungettext('%(num)s file changed: %(linesadd)s inserted, ''%(linesdel)s deleted',
94 '%(num)s files changed: %(linesadd)s inserted, %(linesdel)s deleted', diffset.changed_files) % {'num': diffset.changed_files, 'linesadd': diffset.lines_added, 'linesdel': diffset.lines_deleted}}
95 %endif
96 </h2>
97 </div>
98
99 %if not diffset.files:
100 <p class="empty_data">${_('No files')}</p>
101 %endif
102
103 <div class="filediffs">
104 %for i, filediff in enumerate(diffset.files):
105 <%
106 lines_changed = filediff['patch']['stats']['added'] + filediff['patch']['stats']['deleted']
107 over_lines_changed_limit = lines_changed > lines_changed_limit
108 %>
109 <input ${collapse_all and 'checked' or ''} class="diff-collapse-state" id="diff-collapse-${i}" type="checkbox">
110 <div
111 class="diff"
112 data-f-path="${filediff['patch']['filename']}"
113 id="a_${h.FID('', filediff['patch']['filename'])}">
114 <label for="diff-collapse-${i}" class="diff-heading">
115 <div class="diff-collapse-indicator"></div>
116 ${diff_ops(filediff)}
117 </label>
118 ${diff_menu(filediff)}
119 <table class="cb cb-diff-${diff_mode} code-highlight ${over_lines_changed_limit and 'cb-collapsed' or ''}">
120 %if not filediff.hunks:
121 %for op_id, op_text in filediff['patch']['stats']['ops'].items():
122 <tr>
123 <td class="cb-text cb-${op_class(op_id)}" ${diff_mode == 'unified' and 'colspan=3' or 'colspan=4'}>
124 %if op_id == DEL_FILENODE:
125 ${_('File was deleted')}
126 %elif op_id == BIN_FILENODE:
127 ${_('Binary file hidden')}
128 %else:
129 ${op_text}
130 %endif
131 </td>
132 </tr>
133 %endfor
134 %endif
135 %if over_lines_changed_limit:
136 <tr class="cb-warning cb-collapser">
137 <td class="cb-text" ${diff_mode == 'unified' and 'colspan=3' or 'colspan=4'}>
138 ${_('This diff has been collapsed as it changes many lines, (%i lines changed)' % lines_changed)}
139 <a href="#" class="cb-expand"
140 onclick="$(this).closest('table').removeClass('cb-collapsed'); return false;">${_('Show them')}
141 </a>
142 <a href="#" class="cb-collapse"
143 onclick="$(this).closest('table').addClass('cb-collapsed'); return false;">${_('Hide them')}
144 </a>
145 </td>
146 </tr>
147 %endif
148 %if filediff.patch['is_limited_diff']:
149 <tr class="cb-warning cb-collapser">
150 <td class="cb-text" ${diff_mode == 'unified' and 'colspan=3' or 'colspan=4'}>
151 ${_('The requested commit is too big and content was truncated.')} <a href="${link_for(fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
152 </td>
153 </tr>
154 %endif
155 %for hunk in filediff.hunks:
156 <tr class="cb-hunk">
157 <td ${diff_mode == 'unified' and 'colspan=2' or ''}>
158 ## TODO: dan: add ajax loading of more context here
159 ## <a href="#">
160 <i class="icon-more"></i>
161 ## </a>
162 </td>
163 <td ${diff_mode == 'sideside' and 'colspan=3' or ''}>
164 @@
165 -${hunk.source_start},${hunk.source_length}
166 +${hunk.target_start},${hunk.target_length}
167 ${hunk.section_header}
168 </td>
169 </tr>
170 %if diff_mode == 'unified':
171 ${render_hunk_lines_unified(hunk)}
172 %elif diff_mode == 'sideside':
173 ${render_hunk_lines_sideside(hunk)}
174 %else:
175 <tr class="cb-line">
176 <td>unknown diff mode</td>
177 </tr>
178 %endif
179 %endfor
180 </table>
181 </div>
182 %endfor
183 </div>
184 </div>
185 </%def>
186
187 <%def name="diff_ops(filediff)">
188 <%
189 stats = filediff['patch']['stats']
190 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
191 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
192 %>
193 <span class="diff-pill">
194 %if filediff.source_file_path and filediff.target_file_path:
195 %if filediff.source_file_path != filediff.target_file_path: # file was renamed
196 <strong>${filediff.target_file_path}</strong> β¬… <del>${filediff.source_file_path}</del>
197 %else:
198 ## file was modified
199 <strong>${filediff.source_file_path}</strong>
200 %endif
201 %else:
202 %if filediff.source_file_path:
203 ## file was deleted
204 <strong>${filediff.source_file_path}</strong>
205 %else:
206 ## file was added
207 <strong>${filediff.target_file_path}</strong>
208 %endif
209 %endif
210 </span>
211 <span class="diff-pill-group" style="float: left">
212 %if filediff.patch['is_limited_diff']:
213 <span class="diff-pill tooltip" op="limited" title="The stats for this diff are not complete">limited diff</span>
214 %endif
215 %if RENAMED_FILENODE in stats['ops']:
216 <span class="diff-pill" op="renamed">renamed</span>
217 %endif
218
219 %if NEW_FILENODE in stats['ops']:
220 <span class="diff-pill" op="created">created</span>
221 %if filediff['target_mode'].startswith('120'):
222 <span class="diff-pill" op="symlink">symlink</span>
223 %else:
224 <span class="diff-pill" op="mode">${nice_mode(filediff['target_mode'])}</span>
225 %endif
226 %endif
227
228 %if DEL_FILENODE in stats['ops']:
229 <span class="diff-pill" op="removed">removed</span>
230 %endif
231
232 %if CHMOD_FILENODE in stats['ops']:
233 <span class="diff-pill" op="mode">
234 ${nice_mode(filediff['source_mode'])} ➑ ${nice_mode(filediff['target_mode'])}
235 </span>
236 %endif
237 </span>
238
239 <a class="diff-pill diff-anchor" href="#a_${h.FID('', filediff.patch['filename'])}">ΒΆ</a>
240
241 <span class="diff-pill-group" style="float: right">
242 %if BIN_FILENODE in stats['ops']:
243 <span class="diff-pill" op="binary">binary</span>
244 %if MOD_FILENODE in stats['ops']:
245 <span class="diff-pill" op="modified">modified</span>
246 %endif
247 %endif
248 %if stats['deleted']:
249 <span class="diff-pill" op="deleted">-${stats['deleted']}</span>
250 %endif
251 %if stats['added']:
252 <span class="diff-pill" op="added">+${stats['added']}</span>
253 %endif
254 </span>
255
256 </%def>
257
258 <%def name="nice_mode(filemode)">
259 ${filemode.startswith('100') and filemode[3:] or filemode}
260 </%def>
261
262 <%def name="diff_menu(filediff)">
263 <div class="diff-menu">
264 %if filediff.diffset.source_ref:
265 %if filediff.patch['operation'] in ['D', 'M']:
266 <a
267 class="tooltip"
268 href="${h.url('files_home',repo_name=c.repo_name,f_path=filediff.source_file_path,revision=filediff.diffset.source_ref)}"
269 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
270 >
271 ${_('Show file before')}
272 </a>
273 %else:
274 <a
275 disabled
276 class="tooltip"
277 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
278 >
279 ${_('Show file before')}
280 </a>
281 %endif
282 %if filediff.patch['operation'] in ['A', 'M']:
283 <a
284 class="tooltip"
285 href="${h.url('files_home',repo_name=c.repo_name,f_path=filediff.target_file_path,revision=filediff.diffset.target_ref)}"
286 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
287 >
288 ${_('Show file after')}
289 </a>
290 %else:
291 <a
292 disabled
293 class="tooltip"
294 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
295 >
296 ${_('Show file after')}
297 </a>
298 %endif
299 <a
300 class="tooltip"
301 title="${h.tooltip(_('Raw diff'))}"
302 href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=filediff.target_file_path,diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='raw')}"
303 >
304 ${_('Raw diff')}
305 </a>
306 <a
307 class="tooltip"
308 title="${h.tooltip(_('Download diff'))}"
309 href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=filediff.target_file_path,diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='download')}"
310 >
311 ${_('Download diff')}
312 </a>
313 %endif
314 </div>
315 </%def>
316
317
318 <%def name="render_hunk_lines_sideside(hunk)">
319 %for i, line in enumerate(hunk.sideside):
320 <%
321 old_line_anchor, new_line_anchor = None, None
322 if line.original.lineno:
323 old_line_anchor = diff_line_anchor(hunk.filediff.source_file_path, line.original.lineno, 'o')
324 if line.modified.lineno:
325 new_line_anchor = diff_line_anchor(hunk.filediff.target_file_path, line.modified.lineno, 'n')
326 %>
327 <tr class="cb-line">
328 <td class="cb-lineno ${action_class(line.original.action)}"
329 data-line-number="${line.original.lineno}"
330 %if old_line_anchor:
331 id="${old_line_anchor}"
332 %endif
333 >
334 %if line.original.lineno:
335 <a name="${old_line_anchor}" href="#${old_line_anchor}">${line.original.lineno}</a>
336 %endif
337 </td>
338 <td class="cb-content ${action_class(line.original.action)}"
339 data-line-number="o${line.original.lineno}"
340 ><span class="cb-code">${line.original.action} ${line.original.content or '' | n}</span>
341 </td>
342 <td class="cb-lineno ${action_class(line.modified.action)}"
343 data-line-number="${line.modified.lineno}"
344 %if new_line_anchor:
345 id="${new_line_anchor}"
346 %endif
347 >
348 %if line.modified.lineno:
349 <a name="${new_line_anchor}" href="#${new_line_anchor}">${line.modified.lineno}</a>
350 %endif
351 </td>
352 <td class="cb-content ${action_class(line.modified.action)}"
353 data-line-number="n${line.modified.lineno}"
354 >
355 <span class="cb-code">${line.modified.action} ${line.modified.content or '' | n}</span>
356 </td>
357 </tr>
358 %endfor
359 </%def>
360
361
362 <%def name="render_hunk_lines_unified(hunk)">
363 %for old_line_no, new_line_no, action, content in hunk.unified:
364 <%
365 old_line_anchor, new_line_anchor = None, None
366 if old_line_no:
367 old_line_anchor = diff_line_anchor(hunk.filediff.source_file_path, old_line_no, 'o')
368 if new_line_no:
369 new_line_anchor = diff_line_anchor(hunk.filediff.target_file_path, new_line_no, 'n')
370 %>
371 <tr class="cb-line">
372 <td class="cb-lineno ${action_class(action)}"
373 data-line-number="${old_line_no}"
374 %if old_line_anchor:
375 id="${old_line_anchor}"
376 %endif
377 >
378 %if old_line_anchor:
379 <a name="${old_line_anchor}" href="#${old_line_anchor}">${old_line_no}</a>
380 %endif
381 </td>
382 <td class="cb-lineno ${action_class(action)}"
383 data-line-number="${new_line_no}"
384 %if new_line_anchor:
385 id="${new_line_anchor}"
386 %endif
387 >
388 %if new_line_anchor:
389 <a name="${new_line_anchor}" href="#${new_line_anchor}">${new_line_no}</a>
390 %endif
391 </td>
392 <td class="cb-content ${action_class(action)}"
393 data-line-number="${new_line_no and 'n' or 'o'}${new_line_no or old_line_no}"
394 ><span class="cb-code">${action} ${content or '' | n}</span>
395 </td>
396 </tr>
397 %endfor
398 </%def>
@@ -1,694 +1,696 b''
1 This program is free software: you can redistribute it and/or modify
1 This program is free software: you can redistribute it and/or modify
2 it under the terms of the GNU Affero General Public License, version 3
2 it under the terms of the GNU Affero General Public License, version 3
3 (only), as published by the Free Software Foundation.
3 (only), as published by the Free Software Foundation.
4
4
5
5
6 This program incorporates work covered by the following copyright and
6 This program incorporates work covered by the following copyright and
7 permission notice:
7 permission notice:
8
8
9 Copyright (c) 2014-2016 - packaging
9 Copyright (c) 2014-2016 - packaging
10 file:
10 file:
11 Copyright (c) 2008-2011 - msgpack-python
11 Copyright (c) 2008-2011 - msgpack-python
12 file:licenses/msgpack_license.txt
12 file:licenses/msgpack_license.txt
13 Copyright (c) 2009 - tornado
13 Copyright (c) 2009 - tornado
14 file:licenses/tornado_license.txt
14 file:licenses/tornado_license.txt
15 Copyright (c) 2015 - pygments-markdown-lexer
15 Copyright (c) 2015 - pygments-markdown-lexer
16 file:licenses/pygments_markdown_lexer_license.txt
16 file:licenses/pygments_markdown_lexer_license.txt
17 Copyright 2006 - diff_match_patch
18 file:licenses/diff_match_patch_license.txt
17
19
18 All licensed under the Apache License, Version 2.0 (the "License");
20 All licensed under the Apache License, Version 2.0 (the "License");
19 you may not use this file except in compliance with the License.
21 you may not use this file except in compliance with the License.
20 You may obtain a copy of the License at
22 You may obtain a copy of the License at
21
23
22 http://www.apache.org/licenses/LICENSE-2.0
24 http://www.apache.org/licenses/LICENSE-2.0
23
25
24 Unless required by applicable law or agreed to in writing, software
26 Unless required by applicable law or agreed to in writing, software
25 distributed under the License is distributed on an "AS IS" BASIS,
27 distributed under the License is distributed on an "AS IS" BASIS,
26 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
28 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
27 See the License for the specific language governing permissions and
29 See the License for the specific language governing permissions and
28 imitations under the License.
30 imitations under the License.
29
31
30
32
31 Below is the full text of GNU Affero General Public License, version 3
33 Below is the full text of GNU Affero General Public License, version 3
32
34
33
35
34 GNU AFFERO GENERAL PUBLIC LICENSE
36 GNU AFFERO GENERAL PUBLIC LICENSE
35 Version 3, 19 November 2007
37 Version 3, 19 November 2007
36
38
37 Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
39 Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
38 Everyone is permitted to copy and distribute verbatim copies
40 Everyone is permitted to copy and distribute verbatim copies
39 of this license document, but changing it is not allowed.
41 of this license document, but changing it is not allowed.
40
42
41 Preamble
43 Preamble
42
44
43 The GNU Affero General Public License is a free, copyleft license for
45 The GNU Affero General Public License is a free, copyleft license for
44 software and other kinds of works, specifically designed to ensure
46 software and other kinds of works, specifically designed to ensure
45 cooperation with the community in the case of network server software.
47 cooperation with the community in the case of network server software.
46
48
47 The licenses for most software and other practical works are designed
49 The licenses for most software and other practical works are designed
48 to take away your freedom to share and change the works. By contrast,
50 to take away your freedom to share and change the works. By contrast,
49 our General Public Licenses are intended to guarantee your freedom to
51 our General Public Licenses are intended to guarantee your freedom to
50 share and change all versions of a program--to make sure it remains free
52 share and change all versions of a program--to make sure it remains free
51 software for all its users.
53 software for all its users.
52
54
53 When we speak of free software, we are referring to freedom, not
55 When we speak of free software, we are referring to freedom, not
54 price. Our General Public Licenses are designed to make sure that you
56 price. Our General Public Licenses are designed to make sure that you
55 have the freedom to distribute copies of free software (and charge for
57 have the freedom to distribute copies of free software (and charge for
56 them if you wish), that you receive source code or can get it if you
58 them if you wish), that you receive source code or can get it if you
57 want it, that you can change the software or use pieces of it in new
59 want it, that you can change the software or use pieces of it in new
58 free programs, and that you know you can do these things.
60 free programs, and that you know you can do these things.
59
61
60 Developers that use our General Public Licenses protect your rights
62 Developers that use our General Public Licenses protect your rights
61 with two steps: (1) assert copyright on the software, and (2) offer
63 with two steps: (1) assert copyright on the software, and (2) offer
62 you this License which gives you legal permission to copy, distribute
64 you this License which gives you legal permission to copy, distribute
63 and/or modify the software.
65 and/or modify the software.
64
66
65 A secondary benefit of defending all users' freedom is that
67 A secondary benefit of defending all users' freedom is that
66 improvements made in alternate versions of the program, if they
68 improvements made in alternate versions of the program, if they
67 receive widespread use, become available for other developers to
69 receive widespread use, become available for other developers to
68 incorporate. Many developers of free software are heartened and
70 incorporate. Many developers of free software are heartened and
69 encouraged by the resulting cooperation. However, in the case of
71 encouraged by the resulting cooperation. However, in the case of
70 software used on network servers, this result may fail to come about.
72 software used on network servers, this result may fail to come about.
71 The GNU General Public License permits making a modified version and
73 The GNU General Public License permits making a modified version and
72 letting the public access it on a server without ever releasing its
74 letting the public access it on a server without ever releasing its
73 source code to the public.
75 source code to the public.
74
76
75 The GNU Affero General Public License is designed specifically to
77 The GNU Affero General Public License is designed specifically to
76 ensure that, in such cases, the modified source code becomes available
78 ensure that, in such cases, the modified source code becomes available
77 to the community. It requires the operator of a network server to
79 to the community. It requires the operator of a network server to
78 provide the source code of the modified version running there to the
80 provide the source code of the modified version running there to the
79 users of that server. Therefore, public use of a modified version, on
81 users of that server. Therefore, public use of a modified version, on
80 a publicly accessible server, gives the public access to the source
82 a publicly accessible server, gives the public access to the source
81 code of the modified version.
83 code of the modified version.
82
84
83 An older license, called the Affero General Public License and
85 An older license, called the Affero General Public License and
84 published by Affero, was designed to accomplish similar goals. This is
86 published by Affero, was designed to accomplish similar goals. This is
85 a different license, not a version of the Affero GPL, but Affero has
87 a different license, not a version of the Affero GPL, but Affero has
86 released a new version of the Affero GPL which permits relicensing under
88 released a new version of the Affero GPL which permits relicensing under
87 this license.
89 this license.
88
90
89 The precise terms and conditions for copying, distribution and
91 The precise terms and conditions for copying, distribution and
90 modification follow.
92 modification follow.
91
93
92 TERMS AND CONDITIONS
94 TERMS AND CONDITIONS
93
95
94 0. Definitions.
96 0. Definitions.
95
97
96 "This License" refers to version 3 of the GNU Affero General Public License.
98 "This License" refers to version 3 of the GNU Affero General Public License.
97
99
98 "Copyright" also means copyright-like laws that apply to other kinds of
100 "Copyright" also means copyright-like laws that apply to other kinds of
99 works, such as semiconductor masks.
101 works, such as semiconductor masks.
100
102
101 "The Program" refers to any copyrightable work licensed under this
103 "The Program" refers to any copyrightable work licensed under this
102 License. Each licensee is addressed as "you". "Licensees" and
104 License. Each licensee is addressed as "you". "Licensees" and
103 "recipients" may be individuals or organizations.
105 "recipients" may be individuals or organizations.
104
106
105 To "modify" a work means to copy from or adapt all or part of the work
107 To "modify" a work means to copy from or adapt all or part of the work
106 in a fashion requiring copyright permission, other than the making of an
108 in a fashion requiring copyright permission, other than the making of an
107 exact copy. The resulting work is called a "modified version" of the
109 exact copy. The resulting work is called a "modified version" of the
108 earlier work or a work "based on" the earlier work.
110 earlier work or a work "based on" the earlier work.
109
111
110 A "covered work" means either the unmodified Program or a work based
112 A "covered work" means either the unmodified Program or a work based
111 on the Program.
113 on the Program.
112
114
113 To "propagate" a work means to do anything with it that, without
115 To "propagate" a work means to do anything with it that, without
114 permission, would make you directly or secondarily liable for
116 permission, would make you directly or secondarily liable for
115 infringement under applicable copyright law, except executing it on a
117 infringement under applicable copyright law, except executing it on a
116 computer or modifying a private copy. Propagation includes copying,
118 computer or modifying a private copy. Propagation includes copying,
117 distribution (with or without modification), making available to the
119 distribution (with or without modification), making available to the
118 public, and in some countries other activities as well.
120 public, and in some countries other activities as well.
119
121
120 To "convey" a work means any kind of propagation that enables other
122 To "convey" a work means any kind of propagation that enables other
121 parties to make or receive copies. Mere interaction with a user through
123 parties to make or receive copies. Mere interaction with a user through
122 a computer network, with no transfer of a copy, is not conveying.
124 a computer network, with no transfer of a copy, is not conveying.
123
125
124 An interactive user interface displays "Appropriate Legal Notices"
126 An interactive user interface displays "Appropriate Legal Notices"
125 to the extent that it includes a convenient and prominently visible
127 to the extent that it includes a convenient and prominently visible
126 feature that (1) displays an appropriate copyright notice, and (2)
128 feature that (1) displays an appropriate copyright notice, and (2)
127 tells the user that there is no warranty for the work (except to the
129 tells the user that there is no warranty for the work (except to the
128 extent that warranties are provided), that licensees may convey the
130 extent that warranties are provided), that licensees may convey the
129 work under this License, and how to view a copy of this License. If
131 work under this License, and how to view a copy of this License. If
130 the interface presents a list of user commands or options, such as a
132 the interface presents a list of user commands or options, such as a
131 menu, a prominent item in the list meets this criterion.
133 menu, a prominent item in the list meets this criterion.
132
134
133 1. Source Code.
135 1. Source Code.
134
136
135 The "source code" for a work means the preferred form of the work
137 The "source code" for a work means the preferred form of the work
136 for making modifications to it. "Object code" means any non-source
138 for making modifications to it. "Object code" means any non-source
137 form of a work.
139 form of a work.
138
140
139 A "Standard Interface" means an interface that either is an official
141 A "Standard Interface" means an interface that either is an official
140 standard defined by a recognized standards body, or, in the case of
142 standard defined by a recognized standards body, or, in the case of
141 interfaces specified for a particular programming language, one that
143 interfaces specified for a particular programming language, one that
142 is widely used among developers working in that language.
144 is widely used among developers working in that language.
143
145
144 The "System Libraries" of an executable work include anything, other
146 The "System Libraries" of an executable work include anything, other
145 than the work as a whole, that (a) is included in the normal form of
147 than the work as a whole, that (a) is included in the normal form of
146 packaging a Major Component, but which is not part of that Major
148 packaging a Major Component, but which is not part of that Major
147 Component, and (b) serves only to enable use of the work with that
149 Component, and (b) serves only to enable use of the work with that
148 Major Component, or to implement a Standard Interface for which an
150 Major Component, or to implement a Standard Interface for which an
149 implementation is available to the public in source code form. A
151 implementation is available to the public in source code form. A
150 "Major Component", in this context, means a major essential component
152 "Major Component", in this context, means a major essential component
151 (kernel, window system, and so on) of the specific operating system
153 (kernel, window system, and so on) of the specific operating system
152 (if any) on which the executable work runs, or a compiler used to
154 (if any) on which the executable work runs, or a compiler used to
153 produce the work, or an object code interpreter used to run it.
155 produce the work, or an object code interpreter used to run it.
154
156
155 The "Corresponding Source" for a work in object code form means all
157 The "Corresponding Source" for a work in object code form means all
156 the source code needed to generate, install, and (for an executable
158 the source code needed to generate, install, and (for an executable
157 work) run the object code and to modify the work, including scripts to
159 work) run the object code and to modify the work, including scripts to
158 control those activities. However, it does not include the work's
160 control those activities. However, it does not include the work's
159 System Libraries, or general-purpose tools or generally available free
161 System Libraries, or general-purpose tools or generally available free
160 programs which are used unmodified in performing those activities but
162 programs which are used unmodified in performing those activities but
161 which are not part of the work. For example, Corresponding Source
163 which are not part of the work. For example, Corresponding Source
162 includes interface definition files associated with source files for
164 includes interface definition files associated with source files for
163 the work, and the source code for shared libraries and dynamically
165 the work, and the source code for shared libraries and dynamically
164 linked subprograms that the work is specifically designed to require,
166 linked subprograms that the work is specifically designed to require,
165 such as by intimate data communication or control flow between those
167 such as by intimate data communication or control flow between those
166 subprograms and other parts of the work.
168 subprograms and other parts of the work.
167
169
168 The Corresponding Source need not include anything that users
170 The Corresponding Source need not include anything that users
169 can regenerate automatically from other parts of the Corresponding
171 can regenerate automatically from other parts of the Corresponding
170 Source.
172 Source.
171
173
172 The Corresponding Source for a work in source code form is that
174 The Corresponding Source for a work in source code form is that
173 same work.
175 same work.
174
176
175 2. Basic Permissions.
177 2. Basic Permissions.
176
178
177 All rights granted under this License are granted for the term of
179 All rights granted under this License are granted for the term of
178 copyright on the Program, and are irrevocable provided the stated
180 copyright on the Program, and are irrevocable provided the stated
179 conditions are met. This License explicitly affirms your unlimited
181 conditions are met. This License explicitly affirms your unlimited
180 permission to run the unmodified Program. The output from running a
182 permission to run the unmodified Program. The output from running a
181 covered work is covered by this License only if the output, given its
183 covered work is covered by this License only if the output, given its
182 content, constitutes a covered work. This License acknowledges your
184 content, constitutes a covered work. This License acknowledges your
183 rights of fair use or other equivalent, as provided by copyright law.
185 rights of fair use or other equivalent, as provided by copyright law.
184
186
185 You may make, run and propagate covered works that you do not
187 You may make, run and propagate covered works that you do not
186 convey, without conditions so long as your license otherwise remains
188 convey, without conditions so long as your license otherwise remains
187 in force. You may convey covered works to others for the sole purpose
189 in force. You may convey covered works to others for the sole purpose
188 of having them make modifications exclusively for you, or provide you
190 of having them make modifications exclusively for you, or provide you
189 with facilities for running those works, provided that you comply with
191 with facilities for running those works, provided that you comply with
190 the terms of this License in conveying all material for which you do
192 the terms of this License in conveying all material for which you do
191 not control copyright. Those thus making or running the covered works
193 not control copyright. Those thus making or running the covered works
192 for you must do so exclusively on your behalf, under your direction
194 for you must do so exclusively on your behalf, under your direction
193 and control, on terms that prohibit them from making any copies of
195 and control, on terms that prohibit them from making any copies of
194 your copyrighted material outside their relationship with you.
196 your copyrighted material outside their relationship with you.
195
197
196 Conveying under any other circumstances is permitted solely under
198 Conveying under any other circumstances is permitted solely under
197 the conditions stated below. Sublicensing is not allowed; section 10
199 the conditions stated below. Sublicensing is not allowed; section 10
198 makes it unnecessary.
200 makes it unnecessary.
199
201
200 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
202 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
201
203
202 No covered work shall be deemed part of an effective technological
204 No covered work shall be deemed part of an effective technological
203 measure under any applicable law fulfilling obligations under article
205 measure under any applicable law fulfilling obligations under article
204 11 of the WIPO copyright treaty adopted on 20 December 1996, or
206 11 of the WIPO copyright treaty adopted on 20 December 1996, or
205 similar laws prohibiting or restricting circumvention of such
207 similar laws prohibiting or restricting circumvention of such
206 measures.
208 measures.
207
209
208 When you convey a covered work, you waive any legal power to forbid
210 When you convey a covered work, you waive any legal power to forbid
209 circumvention of technological measures to the extent such circumvention
211 circumvention of technological measures to the extent such circumvention
210 is effected by exercising rights under this License with respect to
212 is effected by exercising rights under this License with respect to
211 the covered work, and you disclaim any intention to limit operation or
213 the covered work, and you disclaim any intention to limit operation or
212 modification of the work as a means of enforcing, against the work's
214 modification of the work as a means of enforcing, against the work's
213 users, your or third parties' legal rights to forbid circumvention of
215 users, your or third parties' legal rights to forbid circumvention of
214 technological measures.
216 technological measures.
215
217
216 4. Conveying Verbatim Copies.
218 4. Conveying Verbatim Copies.
217
219
218 You may convey verbatim copies of the Program's source code as you
220 You may convey verbatim copies of the Program's source code as you
219 receive it, in any medium, provided that you conspicuously and
221 receive it, in any medium, provided that you conspicuously and
220 appropriately publish on each copy an appropriate copyright notice;
222 appropriately publish on each copy an appropriate copyright notice;
221 keep intact all notices stating that this License and any
223 keep intact all notices stating that this License and any
222 non-permissive terms added in accord with section 7 apply to the code;
224 non-permissive terms added in accord with section 7 apply to the code;
223 keep intact all notices of the absence of any warranty; and give all
225 keep intact all notices of the absence of any warranty; and give all
224 recipients a copy of this License along with the Program.
226 recipients a copy of this License along with the Program.
225
227
226 You may charge any price or no price for each copy that you convey,
228 You may charge any price or no price for each copy that you convey,
227 and you may offer support or warranty protection for a fee.
229 and you may offer support or warranty protection for a fee.
228
230
229 5. Conveying Modified Source Versions.
231 5. Conveying Modified Source Versions.
230
232
231 You may convey a work based on the Program, or the modifications to
233 You may convey a work based on the Program, or the modifications to
232 produce it from the Program, in the form of source code under the
234 produce it from the Program, in the form of source code under the
233 terms of section 4, provided that you also meet all of these conditions:
235 terms of section 4, provided that you also meet all of these conditions:
234
236
235 a) The work must carry prominent notices stating that you modified
237 a) The work must carry prominent notices stating that you modified
236 it, and giving a relevant date.
238 it, and giving a relevant date.
237
239
238 b) The work must carry prominent notices stating that it is
240 b) The work must carry prominent notices stating that it is
239 released under this License and any conditions added under section
241 released under this License and any conditions added under section
240 7. This requirement modifies the requirement in section 4 to
242 7. This requirement modifies the requirement in section 4 to
241 "keep intact all notices".
243 "keep intact all notices".
242
244
243 c) You must license the entire work, as a whole, under this
245 c) You must license the entire work, as a whole, under this
244 License to anyone who comes into possession of a copy. This
246 License to anyone who comes into possession of a copy. This
245 License will therefore apply, along with any applicable section 7
247 License will therefore apply, along with any applicable section 7
246 additional terms, to the whole of the work, and all its parts,
248 additional terms, to the whole of the work, and all its parts,
247 regardless of how they are packaged. This License gives no
249 regardless of how they are packaged. This License gives no
248 permission to license the work in any other way, but it does not
250 permission to license the work in any other way, but it does not
249 invalidate such permission if you have separately received it.
251 invalidate such permission if you have separately received it.
250
252
251 d) If the work has interactive user interfaces, each must display
253 d) If the work has interactive user interfaces, each must display
252 Appropriate Legal Notices; however, if the Program has interactive
254 Appropriate Legal Notices; however, if the Program has interactive
253 interfaces that do not display Appropriate Legal Notices, your
255 interfaces that do not display Appropriate Legal Notices, your
254 work need not make them do so.
256 work need not make them do so.
255
257
256 A compilation of a covered work with other separate and independent
258 A compilation of a covered work with other separate and independent
257 works, which are not by their nature extensions of the covered work,
259 works, which are not by their nature extensions of the covered work,
258 and which are not combined with it such as to form a larger program,
260 and which are not combined with it such as to form a larger program,
259 in or on a volume of a storage or distribution medium, is called an
261 in or on a volume of a storage or distribution medium, is called an
260 "aggregate" if the compilation and its resulting copyright are not
262 "aggregate" if the compilation and its resulting copyright are not
261 used to limit the access or legal rights of the compilation's users
263 used to limit the access or legal rights of the compilation's users
262 beyond what the individual works permit. Inclusion of a covered work
264 beyond what the individual works permit. Inclusion of a covered work
263 in an aggregate does not cause this License to apply to the other
265 in an aggregate does not cause this License to apply to the other
264 parts of the aggregate.
266 parts of the aggregate.
265
267
266 6. Conveying Non-Source Forms.
268 6. Conveying Non-Source Forms.
267
269
268 You may convey a covered work in object code form under the terms
270 You may convey a covered work in object code form under the terms
269 of sections 4 and 5, provided that you also convey the
271 of sections 4 and 5, provided that you also convey the
270 machine-readable Corresponding Source under the terms of this License,
272 machine-readable Corresponding Source under the terms of this License,
271 in one of these ways:
273 in one of these ways:
272
274
273 a) Convey the object code in, or embodied in, a physical product
275 a) Convey the object code in, or embodied in, a physical product
274 (including a physical distribution medium), accompanied by the
276 (including a physical distribution medium), accompanied by the
275 Corresponding Source fixed on a durable physical medium
277 Corresponding Source fixed on a durable physical medium
276 customarily used for software interchange.
278 customarily used for software interchange.
277
279
278 b) Convey the object code in, or embodied in, a physical product
280 b) Convey the object code in, or embodied in, a physical product
279 (including a physical distribution medium), accompanied by a
281 (including a physical distribution medium), accompanied by a
280 written offer, valid for at least three years and valid for as
282 written offer, valid for at least three years and valid for as
281 long as you offer spare parts or customer support for that product
283 long as you offer spare parts or customer support for that product
282 model, to give anyone who possesses the object code either (1) a
284 model, to give anyone who possesses the object code either (1) a
283 copy of the Corresponding Source for all the software in the
285 copy of the Corresponding Source for all the software in the
284 product that is covered by this License, on a durable physical
286 product that is covered by this License, on a durable physical
285 medium customarily used for software interchange, for a price no
287 medium customarily used for software interchange, for a price no
286 more than your reasonable cost of physically performing this
288 more than your reasonable cost of physically performing this
287 conveying of source, or (2) access to copy the
289 conveying of source, or (2) access to copy the
288 Corresponding Source from a network server at no charge.
290 Corresponding Source from a network server at no charge.
289
291
290 c) Convey individual copies of the object code with a copy of the
292 c) Convey individual copies of the object code with a copy of the
291 written offer to provide the Corresponding Source. This
293 written offer to provide the Corresponding Source. This
292 alternative is allowed only occasionally and noncommercially, and
294 alternative is allowed only occasionally and noncommercially, and
293 only if you received the object code with such an offer, in accord
295 only if you received the object code with such an offer, in accord
294 with subsection 6b.
296 with subsection 6b.
295
297
296 d) Convey the object code by offering access from a designated
298 d) Convey the object code by offering access from a designated
297 place (gratis or for a charge), and offer equivalent access to the
299 place (gratis or for a charge), and offer equivalent access to the
298 Corresponding Source in the same way through the same place at no
300 Corresponding Source in the same way through the same place at no
299 further charge. You need not require recipients to copy the
301 further charge. You need not require recipients to copy the
300 Corresponding Source along with the object code. If the place to
302 Corresponding Source along with the object code. If the place to
301 copy the object code is a network server, the Corresponding Source
303 copy the object code is a network server, the Corresponding Source
302 may be on a different server (operated by you or a third party)
304 may be on a different server (operated by you or a third party)
303 that supports equivalent copying facilities, provided you maintain
305 that supports equivalent copying facilities, provided you maintain
304 clear directions next to the object code saying where to find the
306 clear directions next to the object code saying where to find the
305 Corresponding Source. Regardless of what server hosts the
307 Corresponding Source. Regardless of what server hosts the
306 Corresponding Source, you remain obligated to ensure that it is
308 Corresponding Source, you remain obligated to ensure that it is
307 available for as long as needed to satisfy these requirements.
309 available for as long as needed to satisfy these requirements.
308
310
309 e) Convey the object code using peer-to-peer transmission, provided
311 e) Convey the object code using peer-to-peer transmission, provided
310 you inform other peers where the object code and Corresponding
312 you inform other peers where the object code and Corresponding
311 Source of the work are being offered to the general public at no
313 Source of the work are being offered to the general public at no
312 charge under subsection 6d.
314 charge under subsection 6d.
313
315
314 A separable portion of the object code, whose source code is excluded
316 A separable portion of the object code, whose source code is excluded
315 from the Corresponding Source as a System Library, need not be
317 from the Corresponding Source as a System Library, need not be
316 included in conveying the object code work.
318 included in conveying the object code work.
317
319
318 A "User Product" is either (1) a "consumer product", which means any
320 A "User Product" is either (1) a "consumer product", which means any
319 tangible personal property which is normally used for personal, family,
321 tangible personal property which is normally used for personal, family,
320 or household purposes, or (2) anything designed or sold for incorporation
322 or household purposes, or (2) anything designed or sold for incorporation
321 into a dwelling. In determining whether a product is a consumer product,
323 into a dwelling. In determining whether a product is a consumer product,
322 doubtful cases shall be resolved in favor of coverage. For a particular
324 doubtful cases shall be resolved in favor of coverage. For a particular
323 product received by a particular user, "normally used" refers to a
325 product received by a particular user, "normally used" refers to a
324 typical or common use of that class of product, regardless of the status
326 typical or common use of that class of product, regardless of the status
325 of the particular user or of the way in which the particular user
327 of the particular user or of the way in which the particular user
326 actually uses, or expects or is expected to use, the product. A product
328 actually uses, or expects or is expected to use, the product. A product
327 is a consumer product regardless of whether the product has substantial
329 is a consumer product regardless of whether the product has substantial
328 commercial, industrial or non-consumer uses, unless such uses represent
330 commercial, industrial or non-consumer uses, unless such uses represent
329 the only significant mode of use of the product.
331 the only significant mode of use of the product.
330
332
331 "Installation Information" for a User Product means any methods,
333 "Installation Information" for a User Product means any methods,
332 procedures, authorization keys, or other information required to install
334 procedures, authorization keys, or other information required to install
333 and execute modified versions of a covered work in that User Product from
335 and execute modified versions of a covered work in that User Product from
334 a modified version of its Corresponding Source. The information must
336 a modified version of its Corresponding Source. The information must
335 suffice to ensure that the continued functioning of the modified object
337 suffice to ensure that the continued functioning of the modified object
336 code is in no case prevented or interfered with solely because
338 code is in no case prevented or interfered with solely because
337 modification has been made.
339 modification has been made.
338
340
339 If you convey an object code work under this section in, or with, or
341 If you convey an object code work under this section in, or with, or
340 specifically for use in, a User Product, and the conveying occurs as
342 specifically for use in, a User Product, and the conveying occurs as
341 part of a transaction in which the right of possession and use of the
343 part of a transaction in which the right of possession and use of the
342 User Product is transferred to the recipient in perpetuity or for a
344 User Product is transferred to the recipient in perpetuity or for a
343 fixed term (regardless of how the transaction is characterized), the
345 fixed term (regardless of how the transaction is characterized), the
344 Corresponding Source conveyed under this section must be accompanied
346 Corresponding Source conveyed under this section must be accompanied
345 by the Installation Information. But this requirement does not apply
347 by the Installation Information. But this requirement does not apply
346 if neither you nor any third party retains the ability to install
348 if neither you nor any third party retains the ability to install
347 modified object code on the User Product (for example, the work has
349 modified object code on the User Product (for example, the work has
348 been installed in ROM).
350 been installed in ROM).
349
351
350 The requirement to provide Installation Information does not include a
352 The requirement to provide Installation Information does not include a
351 requirement to continue to provide support service, warranty, or updates
353 requirement to continue to provide support service, warranty, or updates
352 for a work that has been modified or installed by the recipient, or for
354 for a work that has been modified or installed by the recipient, or for
353 the User Product in which it has been modified or installed. Access to a
355 the User Product in which it has been modified or installed. Access to a
354 network may be denied when the modification itself materially and
356 network may be denied when the modification itself materially and
355 adversely affects the operation of the network or violates the rules and
357 adversely affects the operation of the network or violates the rules and
356 protocols for communication across the network.
358 protocols for communication across the network.
357
359
358 Corresponding Source conveyed, and Installation Information provided,
360 Corresponding Source conveyed, and Installation Information provided,
359 in accord with this section must be in a format that is publicly
361 in accord with this section must be in a format that is publicly
360 documented (and with an implementation available to the public in
362 documented (and with an implementation available to the public in
361 source code form), and must require no special password or key for
363 source code form), and must require no special password or key for
362 unpacking, reading or copying.
364 unpacking, reading or copying.
363
365
364 7. Additional Terms.
366 7. Additional Terms.
365
367
366 "Additional permissions" are terms that supplement the terms of this
368 "Additional permissions" are terms that supplement the terms of this
367 License by making exceptions from one or more of its conditions.
369 License by making exceptions from one or more of its conditions.
368 Additional permissions that are applicable to the entire Program shall
370 Additional permissions that are applicable to the entire Program shall
369 be treated as though they were included in this License, to the extent
371 be treated as though they were included in this License, to the extent
370 that they are valid under applicable law. If additional permissions
372 that they are valid under applicable law. If additional permissions
371 apply only to part of the Program, that part may be used separately
373 apply only to part of the Program, that part may be used separately
372 under those permissions, but the entire Program remains governed by
374 under those permissions, but the entire Program remains governed by
373 this License without regard to the additional permissions.
375 this License without regard to the additional permissions.
374
376
375 When you convey a copy of a covered work, you may at your option
377 When you convey a copy of a covered work, you may at your option
376 remove any additional permissions from that copy, or from any part of
378 remove any additional permissions from that copy, or from any part of
377 it. (Additional permissions may be written to require their own
379 it. (Additional permissions may be written to require their own
378 removal in certain cases when you modify the work.) You may place
380 removal in certain cases when you modify the work.) You may place
379 additional permissions on material, added by you to a covered work,
381 additional permissions on material, added by you to a covered work,
380 for which you have or can give appropriate copyright permission.
382 for which you have or can give appropriate copyright permission.
381
383
382 Notwithstanding any other provision of this License, for material you
384 Notwithstanding any other provision of this License, for material you
383 add to a covered work, you may (if authorized by the copyright holders of
385 add to a covered work, you may (if authorized by the copyright holders of
384 that material) supplement the terms of this License with terms:
386 that material) supplement the terms of this License with terms:
385
387
386 a) Disclaiming warranty or limiting liability differently from the
388 a) Disclaiming warranty or limiting liability differently from the
387 terms of sections 15 and 16 of this License; or
389 terms of sections 15 and 16 of this License; or
388
390
389 b) Requiring preservation of specified reasonable legal notices or
391 b) Requiring preservation of specified reasonable legal notices or
390 author attributions in that material or in the Appropriate Legal
392 author attributions in that material or in the Appropriate Legal
391 Notices displayed by works containing it; or
393 Notices displayed by works containing it; or
392
394
393 c) Prohibiting misrepresentation of the origin of that material, or
395 c) Prohibiting misrepresentation of the origin of that material, or
394 requiring that modified versions of such material be marked in
396 requiring that modified versions of such material be marked in
395 reasonable ways as different from the original version; or
397 reasonable ways as different from the original version; or
396
398
397 d) Limiting the use for publicity purposes of names of licensors or
399 d) Limiting the use for publicity purposes of names of licensors or
398 authors of the material; or
400 authors of the material; or
399
401
400 e) Declining to grant rights under trademark law for use of some
402 e) Declining to grant rights under trademark law for use of some
401 trade names, trademarks, or service marks; or
403 trade names, trademarks, or service marks; or
402
404
403 f) Requiring indemnification of licensors and authors of that
405 f) Requiring indemnification of licensors and authors of that
404 material by anyone who conveys the material (or modified versions of
406 material by anyone who conveys the material (or modified versions of
405 it) with contractual assumptions of liability to the recipient, for
407 it) with contractual assumptions of liability to the recipient, for
406 any liability that these contractual assumptions directly impose on
408 any liability that these contractual assumptions directly impose on
407 those licensors and authors.
409 those licensors and authors.
408
410
409 All other non-permissive additional terms are considered "further
411 All other non-permissive additional terms are considered "further
410 restrictions" within the meaning of section 10. If the Program as you
412 restrictions" within the meaning of section 10. If the Program as you
411 received it, or any part of it, contains a notice stating that it is
413 received it, or any part of it, contains a notice stating that it is
412 governed by this License along with a term that is a further
414 governed by this License along with a term that is a further
413 restriction, you may remove that term. If a license document contains
415 restriction, you may remove that term. If a license document contains
414 a further restriction but permits relicensing or conveying under this
416 a further restriction but permits relicensing or conveying under this
415 License, you may add to a covered work material governed by the terms
417 License, you may add to a covered work material governed by the terms
416 of that license document, provided that the further restriction does
418 of that license document, provided that the further restriction does
417 not survive such relicensing or conveying.
419 not survive such relicensing or conveying.
418
420
419 If you add terms to a covered work in accord with this section, you
421 If you add terms to a covered work in accord with this section, you
420 must place, in the relevant source files, a statement of the
422 must place, in the relevant source files, a statement of the
421 additional terms that apply to those files, or a notice indicating
423 additional terms that apply to those files, or a notice indicating
422 where to find the applicable terms.
424 where to find the applicable terms.
423
425
424 Additional terms, permissive or non-permissive, may be stated in the
426 Additional terms, permissive or non-permissive, may be stated in the
425 form of a separately written license, or stated as exceptions;
427 form of a separately written license, or stated as exceptions;
426 the above requirements apply either way.
428 the above requirements apply either way.
427
429
428 8. Termination.
430 8. Termination.
429
431
430 You may not propagate or modify a covered work except as expressly
432 You may not propagate or modify a covered work except as expressly
431 provided under this License. Any attempt otherwise to propagate or
433 provided under this License. Any attempt otherwise to propagate or
432 modify it is void, and will automatically terminate your rights under
434 modify it is void, and will automatically terminate your rights under
433 this License (including any patent licenses granted under the third
435 this License (including any patent licenses granted under the third
434 paragraph of section 11).
436 paragraph of section 11).
435
437
436 However, if you cease all violation of this License, then your
438 However, if you cease all violation of this License, then your
437 license from a particular copyright holder is reinstated (a)
439 license from a particular copyright holder is reinstated (a)
438 provisionally, unless and until the copyright holder explicitly and
440 provisionally, unless and until the copyright holder explicitly and
439 finally terminates your license, and (b) permanently, if the copyright
441 finally terminates your license, and (b) permanently, if the copyright
440 holder fails to notify you of the violation by some reasonable means
442 holder fails to notify you of the violation by some reasonable means
441 prior to 60 days after the cessation.
443 prior to 60 days after the cessation.
442
444
443 Moreover, your license from a particular copyright holder is
445 Moreover, your license from a particular copyright holder is
444 reinstated permanently if the copyright holder notifies you of the
446 reinstated permanently if the copyright holder notifies you of the
445 violation by some reasonable means, this is the first time you have
447 violation by some reasonable means, this is the first time you have
446 received notice of violation of this License (for any work) from that
448 received notice of violation of this License (for any work) from that
447 copyright holder, and you cure the violation prior to 30 days after
449 copyright holder, and you cure the violation prior to 30 days after
448 your receipt of the notice.
450 your receipt of the notice.
449
451
450 Termination of your rights under this section does not terminate the
452 Termination of your rights under this section does not terminate the
451 licenses of parties who have received copies or rights from you under
453 licenses of parties who have received copies or rights from you under
452 this License. If your rights have been terminated and not permanently
454 this License. If your rights have been terminated and not permanently
453 reinstated, you do not qualify to receive new licenses for the same
455 reinstated, you do not qualify to receive new licenses for the same
454 material under section 10.
456 material under section 10.
455
457
456 9. Acceptance Not Required for Having Copies.
458 9. Acceptance Not Required for Having Copies.
457
459
458 You are not required to accept this License in order to receive or
460 You are not required to accept this License in order to receive or
459 run a copy of the Program. Ancillary propagation of a covered work
461 run a copy of the Program. Ancillary propagation of a covered work
460 occurring solely as a consequence of using peer-to-peer transmission
462 occurring solely as a consequence of using peer-to-peer transmission
461 to receive a copy likewise does not require acceptance. However,
463 to receive a copy likewise does not require acceptance. However,
462 nothing other than this License grants you permission to propagate or
464 nothing other than this License grants you permission to propagate or
463 modify any covered work. These actions infringe copyright if you do
465 modify any covered work. These actions infringe copyright if you do
464 not accept this License. Therefore, by modifying or propagating a
466 not accept this License. Therefore, by modifying or propagating a
465 covered work, you indicate your acceptance of this License to do so.
467 covered work, you indicate your acceptance of this License to do so.
466
468
467 10. Automatic Licensing of Downstream Recipients.
469 10. Automatic Licensing of Downstream Recipients.
468
470
469 Each time you convey a covered work, the recipient automatically
471 Each time you convey a covered work, the recipient automatically
470 receives a license from the original licensors, to run, modify and
472 receives a license from the original licensors, to run, modify and
471 propagate that work, subject to this License. You are not responsible
473 propagate that work, subject to this License. You are not responsible
472 for enforcing compliance by third parties with this License.
474 for enforcing compliance by third parties with this License.
473
475
474 An "entity transaction" is a transaction transferring control of an
476 An "entity transaction" is a transaction transferring control of an
475 organization, or substantially all assets of one, or subdividing an
477 organization, or substantially all assets of one, or subdividing an
476 organization, or merging organizations. If propagation of a covered
478 organization, or merging organizations. If propagation of a covered
477 work results from an entity transaction, each party to that
479 work results from an entity transaction, each party to that
478 transaction who receives a copy of the work also receives whatever
480 transaction who receives a copy of the work also receives whatever
479 licenses to the work the party's predecessor in interest had or could
481 licenses to the work the party's predecessor in interest had or could
480 give under the previous paragraph, plus a right to possession of the
482 give under the previous paragraph, plus a right to possession of the
481 Corresponding Source of the work from the predecessor in interest, if
483 Corresponding Source of the work from the predecessor in interest, if
482 the predecessor has it or can get it with reasonable efforts.
484 the predecessor has it or can get it with reasonable efforts.
483
485
484 You may not impose any further restrictions on the exercise of the
486 You may not impose any further restrictions on the exercise of the
485 rights granted or affirmed under this License. For example, you may
487 rights granted or affirmed under this License. For example, you may
486 not impose a license fee, royalty, or other charge for exercise of
488 not impose a license fee, royalty, or other charge for exercise of
487 rights granted under this License, and you may not initiate litigation
489 rights granted under this License, and you may not initiate litigation
488 (including a cross-claim or counterclaim in a lawsuit) alleging that
490 (including a cross-claim or counterclaim in a lawsuit) alleging that
489 any patent claim is infringed by making, using, selling, offering for
491 any patent claim is infringed by making, using, selling, offering for
490 sale, or importing the Program or any portion of it.
492 sale, or importing the Program or any portion of it.
491
493
492 11. Patents.
494 11. Patents.
493
495
494 A "contributor" is a copyright holder who authorizes use under this
496 A "contributor" is a copyright holder who authorizes use under this
495 License of the Program or a work on which the Program is based. The
497 License of the Program or a work on which the Program is based. The
496 work thus licensed is called the contributor's "contributor version".
498 work thus licensed is called the contributor's "contributor version".
497
499
498 A contributor's "essential patent claims" are all patent claims
500 A contributor's "essential patent claims" are all patent claims
499 owned or controlled by the contributor, whether already acquired or
501 owned or controlled by the contributor, whether already acquired or
500 hereafter acquired, that would be infringed by some manner, permitted
502 hereafter acquired, that would be infringed by some manner, permitted
501 by this License, of making, using, or selling its contributor version,
503 by this License, of making, using, or selling its contributor version,
502 but do not include claims that would be infringed only as a
504 but do not include claims that would be infringed only as a
503 consequence of further modification of the contributor version. For
505 consequence of further modification of the contributor version. For
504 purposes of this definition, "control" includes the right to grant
506 purposes of this definition, "control" includes the right to grant
505 patent sublicenses in a manner consistent with the requirements of
507 patent sublicenses in a manner consistent with the requirements of
506 this License.
508 this License.
507
509
508 Each contributor grants you a non-exclusive, worldwide, royalty-free
510 Each contributor grants you a non-exclusive, worldwide, royalty-free
509 patent license under the contributor's essential patent claims, to
511 patent license under the contributor's essential patent claims, to
510 make, use, sell, offer for sale, import and otherwise run, modify and
512 make, use, sell, offer for sale, import and otherwise run, modify and
511 propagate the contents of its contributor version.
513 propagate the contents of its contributor version.
512
514
513 In the following three paragraphs, a "patent license" is any express
515 In the following three paragraphs, a "patent license" is any express
514 agreement or commitment, however denominated, not to enforce a patent
516 agreement or commitment, however denominated, not to enforce a patent
515 (such as an express permission to practice a patent or covenant not to
517 (such as an express permission to practice a patent or covenant not to
516 sue for patent infringement). To "grant" such a patent license to a
518 sue for patent infringement). To "grant" such a patent license to a
517 party means to make such an agreement or commitment not to enforce a
519 party means to make such an agreement or commitment not to enforce a
518 patent against the party.
520 patent against the party.
519
521
520 If you convey a covered work, knowingly relying on a patent license,
522 If you convey a covered work, knowingly relying on a patent license,
521 and the Corresponding Source of the work is not available for anyone
523 and the Corresponding Source of the work is not available for anyone
522 to copy, free of charge and under the terms of this License, through a
524 to copy, free of charge and under the terms of this License, through a
523 publicly available network server or other readily accessible means,
525 publicly available network server or other readily accessible means,
524 then you must either (1) cause the Corresponding Source to be so
526 then you must either (1) cause the Corresponding Source to be so
525 available, or (2) arrange to deprive yourself of the benefit of the
527 available, or (2) arrange to deprive yourself of the benefit of the
526 patent license for this particular work, or (3) arrange, in a manner
528 patent license for this particular work, or (3) arrange, in a manner
527 consistent with the requirements of this License, to extend the patent
529 consistent with the requirements of this License, to extend the patent
528 license to downstream recipients. "Knowingly relying" means you have
530 license to downstream recipients. "Knowingly relying" means you have
529 actual knowledge that, but for the patent license, your conveying the
531 actual knowledge that, but for the patent license, your conveying the
530 covered work in a country, or your recipient's use of the covered work
532 covered work in a country, or your recipient's use of the covered work
531 in a country, would infringe one or more identifiable patents in that
533 in a country, would infringe one or more identifiable patents in that
532 country that you have reason to believe are valid.
534 country that you have reason to believe are valid.
533
535
534 If, pursuant to or in connection with a single transaction or
536 If, pursuant to or in connection with a single transaction or
535 arrangement, you convey, or propagate by procuring conveyance of, a
537 arrangement, you convey, or propagate by procuring conveyance of, a
536 covered work, and grant a patent license to some of the parties
538 covered work, and grant a patent license to some of the parties
537 receiving the covered work authorizing them to use, propagate, modify
539 receiving the covered work authorizing them to use, propagate, modify
538 or convey a specific copy of the covered work, then the patent license
540 or convey a specific copy of the covered work, then the patent license
539 you grant is automatically extended to all recipients of the covered
541 you grant is automatically extended to all recipients of the covered
540 work and works based on it.
542 work and works based on it.
541
543
542 A patent license is "discriminatory" if it does not include within
544 A patent license is "discriminatory" if it does not include within
543 the scope of its coverage, prohibits the exercise of, or is
545 the scope of its coverage, prohibits the exercise of, or is
544 conditioned on the non-exercise of one or more of the rights that are
546 conditioned on the non-exercise of one or more of the rights that are
545 specifically granted under this License. You may not convey a covered
547 specifically granted under this License. You may not convey a covered
546 work if you are a party to an arrangement with a third party that is
548 work if you are a party to an arrangement with a third party that is
547 in the business of distributing software, under which you make payment
549 in the business of distributing software, under which you make payment
548 to the third party based on the extent of your activity of conveying
550 to the third party based on the extent of your activity of conveying
549 the work, and under which the third party grants, to any of the
551 the work, and under which the third party grants, to any of the
550 parties who would receive the covered work from you, a discriminatory
552 parties who would receive the covered work from you, a discriminatory
551 patent license (a) in connection with copies of the covered work
553 patent license (a) in connection with copies of the covered work
552 conveyed by you (or copies made from those copies), or (b) primarily
554 conveyed by you (or copies made from those copies), or (b) primarily
553 for and in connection with specific products or compilations that
555 for and in connection with specific products or compilations that
554 contain the covered work, unless you entered into that arrangement,
556 contain the covered work, unless you entered into that arrangement,
555 or that patent license was granted, prior to 28 March 2007.
557 or that patent license was granted, prior to 28 March 2007.
556
558
557 Nothing in this License shall be construed as excluding or limiting
559 Nothing in this License shall be construed as excluding or limiting
558 any implied license or other defenses to infringement that may
560 any implied license or other defenses to infringement that may
559 otherwise be available to you under applicable patent law.
561 otherwise be available to you under applicable patent law.
560
562
561 12. No Surrender of Others' Freedom.
563 12. No Surrender of Others' Freedom.
562
564
563 If conditions are imposed on you (whether by court order, agreement or
565 If conditions are imposed on you (whether by court order, agreement or
564 otherwise) that contradict the conditions of this License, they do not
566 otherwise) that contradict the conditions of this License, they do not
565 excuse you from the conditions of this License. If you cannot convey a
567 excuse you from the conditions of this License. If you cannot convey a
566 covered work so as to satisfy simultaneously your obligations under this
568 covered work so as to satisfy simultaneously your obligations under this
567 License and any other pertinent obligations, then as a consequence you may
569 License and any other pertinent obligations, then as a consequence you may
568 not convey it at all. For example, if you agree to terms that obligate you
570 not convey it at all. For example, if you agree to terms that obligate you
569 to collect a royalty for further conveying from those to whom you convey
571 to collect a royalty for further conveying from those to whom you convey
570 the Program, the only way you could satisfy both those terms and this
572 the Program, the only way you could satisfy both those terms and this
571 License would be to refrain entirely from conveying the Program.
573 License would be to refrain entirely from conveying the Program.
572
574
573 13. Remote Network Interaction; Use with the GNU General Public License.
575 13. Remote Network Interaction; Use with the GNU General Public License.
574
576
575 Notwithstanding any other provision of this License, if you modify the
577 Notwithstanding any other provision of this License, if you modify the
576 Program, your modified version must prominently offer all users
578 Program, your modified version must prominently offer all users
577 interacting with it remotely through a computer network (if your version
579 interacting with it remotely through a computer network (if your version
578 supports such interaction) an opportunity to receive the Corresponding
580 supports such interaction) an opportunity to receive the Corresponding
579 Source of your version by providing access to the Corresponding Source
581 Source of your version by providing access to the Corresponding Source
580 from a network server at no charge, through some standard or customary
582 from a network server at no charge, through some standard or customary
581 means of facilitating copying of software. This Corresponding Source
583 means of facilitating copying of software. This Corresponding Source
582 shall include the Corresponding Source for any work covered by version 3
584 shall include the Corresponding Source for any work covered by version 3
583 of the GNU General Public License that is incorporated pursuant to the
585 of the GNU General Public License that is incorporated pursuant to the
584 following paragraph.
586 following paragraph.
585
587
586 Notwithstanding any other provision of this License, you have
588 Notwithstanding any other provision of this License, you have
587 permission to link or combine any covered work with a work licensed
589 permission to link or combine any covered work with a work licensed
588 under version 3 of the GNU General Public License into a single
590 under version 3 of the GNU General Public License into a single
589 combined work, and to convey the resulting work. The terms of this
591 combined work, and to convey the resulting work. The terms of this
590 License will continue to apply to the part which is the covered work,
592 License will continue to apply to the part which is the covered work,
591 but the work with which it is combined will remain governed by version
593 but the work with which it is combined will remain governed by version
592 3 of the GNU General Public License.
594 3 of the GNU General Public License.
593
595
594 14. Revised Versions of this License.
596 14. Revised Versions of this License.
595
597
596 The Free Software Foundation may publish revised and/or new versions of
598 The Free Software Foundation may publish revised and/or new versions of
597 the GNU Affero General Public License from time to time. Such new versions
599 the GNU Affero General Public License from time to time. Such new versions
598 will be similar in spirit to the present version, but may differ in detail to
600 will be similar in spirit to the present version, but may differ in detail to
599 address new problems or concerns.
601 address new problems or concerns.
600
602
601 Each version is given a distinguishing version number. If the
603 Each version is given a distinguishing version number. If the
602 Program specifies that a certain numbered version of the GNU Affero General
604 Program specifies that a certain numbered version of the GNU Affero General
603 Public License "or any later version" applies to it, you have the
605 Public License "or any later version" applies to it, you have the
604 option of following the terms and conditions either of that numbered
606 option of following the terms and conditions either of that numbered
605 version or of any later version published by the Free Software
607 version or of any later version published by the Free Software
606 Foundation. If the Program does not specify a version number of the
608 Foundation. If the Program does not specify a version number of the
607 GNU Affero General Public License, you may choose any version ever published
609 GNU Affero General Public License, you may choose any version ever published
608 by the Free Software Foundation.
610 by the Free Software Foundation.
609
611
610 If the Program specifies that a proxy can decide which future
612 If the Program specifies that a proxy can decide which future
611 versions of the GNU Affero General Public License can be used, that proxy's
613 versions of the GNU Affero General Public License can be used, that proxy's
612 public statement of acceptance of a version permanently authorizes you
614 public statement of acceptance of a version permanently authorizes you
613 to choose that version for the Program.
615 to choose that version for the Program.
614
616
615 Later license versions may give you additional or different
617 Later license versions may give you additional or different
616 permissions. However, no additional obligations are imposed on any
618 permissions. However, no additional obligations are imposed on any
617 author or copyright holder as a result of your choosing to follow a
619 author or copyright holder as a result of your choosing to follow a
618 later version.
620 later version.
619
621
620 15. Disclaimer of Warranty.
622 15. Disclaimer of Warranty.
621
623
622 THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
624 THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
623 APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
625 APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
624 HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
626 HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
625 OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
627 OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
626 THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
628 THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
627 PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
629 PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
628 IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
630 IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
629 ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
631 ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
630
632
631 16. Limitation of Liability.
633 16. Limitation of Liability.
632
634
633 IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
635 IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
634 WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
636 WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
635 THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
637 THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
636 GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
638 GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
637 USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
639 USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
638 DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
640 DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
639 PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
641 PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
640 EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
642 EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
641 SUCH DAMAGES.
643 SUCH DAMAGES.
642
644
643 17. Interpretation of Sections 15 and 16.
645 17. Interpretation of Sections 15 and 16.
644
646
645 If the disclaimer of warranty and limitation of liability provided
647 If the disclaimer of warranty and limitation of liability provided
646 above cannot be given local legal effect according to their terms,
648 above cannot be given local legal effect according to their terms,
647 reviewing courts shall apply local law that most closely approximates
649 reviewing courts shall apply local law that most closely approximates
648 an absolute waiver of all civil liability in connection with the
650 an absolute waiver of all civil liability in connection with the
649 Program, unless a warranty or assumption of liability accompanies a
651 Program, unless a warranty or assumption of liability accompanies a
650 copy of the Program in return for a fee.
652 copy of the Program in return for a fee.
651
653
652 END OF TERMS AND CONDITIONS
654 END OF TERMS AND CONDITIONS
653
655
654 How to Apply These Terms to Your New Programs
656 How to Apply These Terms to Your New Programs
655
657
656 If you develop a new program, and you want it to be of the greatest
658 If you develop a new program, and you want it to be of the greatest
657 possible use to the public, the best way to achieve this is to make it
659 possible use to the public, the best way to achieve this is to make it
658 free software which everyone can redistribute and change under these terms.
660 free software which everyone can redistribute and change under these terms.
659
661
660 To do so, attach the following notices to the program. It is safest
662 To do so, attach the following notices to the program. It is safest
661 to attach them to the start of each source file to most effectively
663 to attach them to the start of each source file to most effectively
662 state the exclusion of warranty; and each file should have at least
664 state the exclusion of warranty; and each file should have at least
663 the "copyright" line and a pointer to where the full notice is found.
665 the "copyright" line and a pointer to where the full notice is found.
664
666
665 <one line to give the program's name and a brief idea of what it does.>
667 <one line to give the program's name and a brief idea of what it does.>
666 Copyright (C) <year> <name of author>
668 Copyright (C) <year> <name of author>
667
669
668 This program is free software: you can redistribute it and/or modify
670 This program is free software: you can redistribute it and/or modify
669 it under the terms of the GNU Affero General Public License as published by
671 it under the terms of the GNU Affero General Public License as published by
670 the Free Software Foundation, either version 3 of the License, or
672 the Free Software Foundation, either version 3 of the License, or
671 (at your option) any later version.
673 (at your option) any later version.
672
674
673 This program is distributed in the hope that it will be useful,
675 This program is distributed in the hope that it will be useful,
674 but WITHOUT ANY WARRANTY; without even the implied warranty of
676 but WITHOUT ANY WARRANTY; without even the implied warranty of
675 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
677 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
676 GNU Affero General Public License for more details.
678 GNU Affero General Public License for more details.
677
679
678 You should have received a copy of the GNU Affero General Public License
680 You should have received a copy of the GNU Affero General Public License
679 along with this program. If not, see <http://www.gnu.org/licenses/>.
681 along with this program. If not, see <http://www.gnu.org/licenses/>.
680
682
681 Also add information on how to contact you by electronic and paper mail.
683 Also add information on how to contact you by electronic and paper mail.
682
684
683 If your software can interact with users remotely through a computer
685 If your software can interact with users remotely through a computer
684 network, you should also make sure that it provides a way for users to
686 network, you should also make sure that it provides a way for users to
685 get its source. For example, if your program is a web application, its
687 get its source. For example, if your program is a web application, its
686 interface could display a "Source" link that leads users to an archive
688 interface could display a "Source" link that leads users to an archive
687 of the code. There are many ways you could offer source, and different
689 of the code. There are many ways you could offer source, and different
688 solutions will be better for different programs; see section 13 for the
690 solutions will be better for different programs; see section 13 for the
689 specific requirements.
691 specific requirements.
690
692
691 You should also get your employer (if you work as a programmer) or school,
693 You should also get your employer (if you work as a programmer) or school,
692 if any, to sign a "copyright disclaimer" for the program, if necessary.
694 if any, to sign a "copyright disclaimer" for the program, if necessary.
693 For more information on this, and how to apply and follow the GNU AGPL, see
695 For more information on this, and how to apply and follow the GNU AGPL, see
694 <http://www.gnu.org/licenses/>.
696 <http://www.gnu.org/licenses/>.
@@ -1,267 +1,263 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Compare controller for showing differences between two commits/refs/tags etc.
22 Compare controller for showing differences between two commits/refs/tags etc.
23 """
23 """
24
24
25 import logging
25 import logging
26
26
27 from webob.exc import HTTPBadRequest
27 from webob.exc import HTTPBadRequest
28 from pylons import request, tmpl_context as c, url
28 from pylons import request, tmpl_context as c, url
29 from pylons.controllers.util import redirect
29 from pylons.controllers.util import redirect
30 from pylons.i18n.translation import _
30 from pylons.i18n.translation import _
31
31
32 from rhodecode.controllers.utils import parse_path_ref, get_commit_from_ref_name
32 from rhodecode.controllers.utils import parse_path_ref, get_commit_from_ref_name
33 from rhodecode.lib import helpers as h
33 from rhodecode.lib import helpers as h
34 from rhodecode.lib import diffs
34 from rhodecode.lib import diffs, codeblocks
35 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
35 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
36 from rhodecode.lib.base import BaseRepoController, render
36 from rhodecode.lib.base import BaseRepoController, render
37 from rhodecode.lib.utils import safe_str
37 from rhodecode.lib.utils import safe_str
38 from rhodecode.lib.utils2 import safe_unicode, str2bool
38 from rhodecode.lib.utils2 import safe_unicode, str2bool
39 from rhodecode.lib.vcs.exceptions import (
39 from rhodecode.lib.vcs.exceptions import (
40 EmptyRepositoryError, RepositoryError, RepositoryRequirementError)
40 EmptyRepositoryError, RepositoryError, RepositoryRequirementError,
41 NodeDoesNotExistError)
41 from rhodecode.model.db import Repository, ChangesetStatus
42 from rhodecode.model.db import Repository, ChangesetStatus
42
43
43 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
44
45
45
46
46 class CompareController(BaseRepoController):
47 class CompareController(BaseRepoController):
47
48
48 def __before__(self):
49 def __before__(self):
49 super(CompareController, self).__before__()
50 super(CompareController, self).__before__()
50
51
51 def _get_commit_or_redirect(
52 def _get_commit_or_redirect(
52 self, ref, ref_type, repo, redirect_after=True, partial=False):
53 self, ref, ref_type, repo, redirect_after=True, partial=False):
53 """
54 """
54 This is a safe way to get a commit. If an error occurs it
55 This is a safe way to get a commit. If an error occurs it
55 redirects to a commit with a proper message. If partial is set
56 redirects to a commit with a proper message. If partial is set
56 then it does not do redirect raise and throws an exception instead.
57 then it does not do redirect raise and throws an exception instead.
57 """
58 """
58 try:
59 try:
59 return get_commit_from_ref_name(repo, safe_str(ref), ref_type)
60 return get_commit_from_ref_name(repo, safe_str(ref), ref_type)
60 except EmptyRepositoryError:
61 except EmptyRepositoryError:
61 if not redirect_after:
62 if not redirect_after:
62 return repo.scm_instance().EMPTY_COMMIT
63 return repo.scm_instance().EMPTY_COMMIT
63 h.flash(h.literal(_('There are no commits yet')),
64 h.flash(h.literal(_('There are no commits yet')),
64 category='warning')
65 category='warning')
65 redirect(url('summary_home', repo_name=repo.repo_name))
66 redirect(url('summary_home', repo_name=repo.repo_name))
66
67
67 except RepositoryError as e:
68 except RepositoryError as e:
68 msg = safe_str(e)
69 msg = safe_str(e)
69 log.exception(msg)
70 log.exception(msg)
70 h.flash(msg, category='warning')
71 h.flash(msg, category='warning')
71 if not partial:
72 if not partial:
72 redirect(h.url('summary_home', repo_name=repo.repo_name))
73 redirect(h.url('summary_home', repo_name=repo.repo_name))
73 raise HTTPBadRequest()
74 raise HTTPBadRequest()
74
75
75 @LoginRequired()
76 @LoginRequired()
76 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
77 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
77 'repository.admin')
78 'repository.admin')
78 def index(self, repo_name):
79 def index(self, repo_name):
79 c.compare_home = True
80 c.compare_home = True
80 c.commit_ranges = []
81 c.commit_ranges = []
81 c.files = []
82 c.diffset = None
82 c.limited_diff = False
83 c.limited_diff = False
83 source_repo = c.rhodecode_db_repo.repo_name
84 source_repo = c.rhodecode_db_repo.repo_name
84 target_repo = request.GET.get('target_repo', source_repo)
85 target_repo = request.GET.get('target_repo', source_repo)
85 c.source_repo = Repository.get_by_repo_name(source_repo)
86 c.source_repo = Repository.get_by_repo_name(source_repo)
86 c.target_repo = Repository.get_by_repo_name(target_repo)
87 c.target_repo = Repository.get_by_repo_name(target_repo)
87 c.source_ref = c.target_ref = _('Select commit')
88 c.source_ref = c.target_ref = _('Select commit')
88 c.source_ref_type = ""
89 c.source_ref_type = ""
89 c.target_ref_type = ""
90 c.target_ref_type = ""
90 c.commit_statuses = ChangesetStatus.STATUSES
91 c.commit_statuses = ChangesetStatus.STATUSES
91 c.preview_mode = False
92 c.preview_mode = False
92 return render('compare/compare_diff.html')
93 return render('compare/compare_diff.html')
93
94
94 @LoginRequired()
95 @LoginRequired()
95 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
96 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
96 'repository.admin')
97 'repository.admin')
97 def compare(self, repo_name, source_ref_type, source_ref,
98 def compare(self, repo_name, source_ref_type, source_ref,
98 target_ref_type, target_ref):
99 target_ref_type, target_ref):
99 # source_ref will be evaluated in source_repo
100 # source_ref will be evaluated in source_repo
100 source_repo_name = c.rhodecode_db_repo.repo_name
101 source_repo_name = c.rhodecode_db_repo.repo_name
101 source_path, source_id = parse_path_ref(source_ref)
102 source_path, source_id = parse_path_ref(source_ref)
102
103
103 # target_ref will be evaluated in target_repo
104 # target_ref will be evaluated in target_repo
104 target_repo_name = request.GET.get('target_repo', source_repo_name)
105 target_repo_name = request.GET.get('target_repo', source_repo_name)
105 target_path, target_id = parse_path_ref(target_ref)
106 target_path, target_id = parse_path_ref(target_ref)
106
107
107 c.commit_statuses = ChangesetStatus.STATUSES
108 c.commit_statuses = ChangesetStatus.STATUSES
108
109
109 # if merge is True
110 # if merge is True
110 # Show what changes since the shared ancestor commit of target/source
111 # Show what changes since the shared ancestor commit of target/source
111 # the source would get if it was merged with target. Only commits
112 # the source would get if it was merged with target. Only commits
112 # which are in target but not in source will be shown.
113 # which are in target but not in source will be shown.
113 merge = str2bool(request.GET.get('merge'))
114 merge = str2bool(request.GET.get('merge'))
114 # if merge is False
115 # if merge is False
115 # Show a raw diff of source/target refs even if no ancestor exists
116 # Show a raw diff of source/target refs even if no ancestor exists
116
117
117
118
118 # c.fulldiff disables cut_off_limit
119 # c.fulldiff disables cut_off_limit
119 c.fulldiff = str2bool(request.GET.get('fulldiff'))
120 c.fulldiff = str2bool(request.GET.get('fulldiff'))
120
121
121 # if partial, returns just compare_commits.html (commits log)
122 # if partial, returns just compare_commits.html (commits log)
122 partial = request.is_xhr
123 partial = request.is_xhr
123
124
124 # swap url for compare_diff page
125 # swap url for compare_diff page
125 c.swap_url = h.url(
126 c.swap_url = h.url(
126 'compare_url',
127 'compare_url',
127 repo_name=target_repo_name,
128 repo_name=target_repo_name,
128 source_ref_type=target_ref_type,
129 source_ref_type=target_ref_type,
129 source_ref=target_ref,
130 source_ref=target_ref,
130 target_repo=source_repo_name,
131 target_repo=source_repo_name,
131 target_ref_type=source_ref_type,
132 target_ref_type=source_ref_type,
132 target_ref=source_ref,
133 target_ref=source_ref,
133 merge=merge and '1' or '')
134 merge=merge and '1' or '')
134
135
135 source_repo = Repository.get_by_repo_name(source_repo_name)
136 source_repo = Repository.get_by_repo_name(source_repo_name)
136 target_repo = Repository.get_by_repo_name(target_repo_name)
137 target_repo = Repository.get_by_repo_name(target_repo_name)
137
138
138 if source_repo is None:
139 if source_repo is None:
139 msg = _('Could not find the original repo: %(repo)s') % {
140 msg = _('Could not find the original repo: %(repo)s') % {
140 'repo': source_repo}
141 'repo': source_repo}
141
142
142 log.error(msg)
143 log.error(msg)
143 h.flash(msg, category='error')
144 h.flash(msg, category='error')
144 return redirect(url('compare_home', repo_name=c.repo_name))
145 return redirect(url('compare_home', repo_name=c.repo_name))
145
146
146 if target_repo is None:
147 if target_repo is None:
147 msg = _('Could not find the other repo: %(repo)s') % {
148 msg = _('Could not find the other repo: %(repo)s') % {
148 'repo': target_repo_name}
149 'repo': target_repo_name}
149 log.error(msg)
150 log.error(msg)
150 h.flash(msg, category='error')
151 h.flash(msg, category='error')
151 return redirect(url('compare_home', repo_name=c.repo_name))
152 return redirect(url('compare_home', repo_name=c.repo_name))
152
153
153 source_alias = source_repo.scm_instance().alias
154 source_alias = source_repo.scm_instance().alias
154 target_alias = target_repo.scm_instance().alias
155 target_alias = target_repo.scm_instance().alias
155 if source_alias != target_alias:
156 if source_alias != target_alias:
156 msg = _('The comparison of two different kinds of remote repos '
157 msg = _('The comparison of two different kinds of remote repos '
157 'is not available')
158 'is not available')
158 log.error(msg)
159 log.error(msg)
159 h.flash(msg, category='error')
160 h.flash(msg, category='error')
160 return redirect(url('compare_home', repo_name=c.repo_name))
161 return redirect(url('compare_home', repo_name=c.repo_name))
161
162
162 source_commit = self._get_commit_or_redirect(
163 source_commit = self._get_commit_or_redirect(
163 ref=source_id, ref_type=source_ref_type, repo=source_repo,
164 ref=source_id, ref_type=source_ref_type, repo=source_repo,
164 partial=partial)
165 partial=partial)
165 target_commit = self._get_commit_or_redirect(
166 target_commit = self._get_commit_or_redirect(
166 ref=target_id, ref_type=target_ref_type, repo=target_repo,
167 ref=target_id, ref_type=target_ref_type, repo=target_repo,
167 partial=partial)
168 partial=partial)
168
169
169 c.compare_home = False
170 c.compare_home = False
170 c.source_repo = source_repo
171 c.source_repo = source_repo
171 c.target_repo = target_repo
172 c.target_repo = target_repo
172 c.source_ref = source_ref
173 c.source_ref = source_ref
173 c.target_ref = target_ref
174 c.target_ref = target_ref
174 c.source_ref_type = source_ref_type
175 c.source_ref_type = source_ref_type
175 c.target_ref_type = target_ref_type
176 c.target_ref_type = target_ref_type
176
177
177 source_scm = source_repo.scm_instance()
178 source_scm = source_repo.scm_instance()
178 target_scm = target_repo.scm_instance()
179 target_scm = target_repo.scm_instance()
179
180
180 pre_load = ["author", "branch", "date", "message"]
181 pre_load = ["author", "branch", "date", "message"]
181 c.ancestor = None
182 c.ancestor = None
182 try:
183 try:
183 c.commit_ranges = source_scm.compare(
184 c.commit_ranges = source_scm.compare(
184 source_commit.raw_id, target_commit.raw_id,
185 source_commit.raw_id, target_commit.raw_id,
185 target_scm, merge, pre_load=pre_load)
186 target_scm, merge, pre_load=pre_load)
186 if merge:
187 if merge:
187 c.ancestor = source_scm.get_common_ancestor(
188 c.ancestor = source_scm.get_common_ancestor(
188 source_commit.raw_id, target_commit.raw_id, target_scm)
189 source_commit.raw_id, target_commit.raw_id, target_scm)
189 except RepositoryRequirementError:
190 except RepositoryRequirementError:
190 msg = _('Could not compare repos with different '
191 msg = _('Could not compare repos with different '
191 'large file settings')
192 'large file settings')
192 log.error(msg)
193 log.error(msg)
193 if partial:
194 if partial:
194 return msg
195 return msg
195 h.flash(msg, category='error')
196 h.flash(msg, category='error')
196 return redirect(url('compare_home', repo_name=c.repo_name))
197 return redirect(url('compare_home', repo_name=c.repo_name))
197
198
198 c.statuses = c.rhodecode_db_repo.statuses(
199 c.statuses = c.rhodecode_db_repo.statuses(
199 [x.raw_id for x in c.commit_ranges])
200 [x.raw_id for x in c.commit_ranges])
200
201
201 if partial: # for PR ajax commits loader
202 if partial: # for PR ajax commits loader
202 if not c.ancestor:
203 if not c.ancestor:
203 return '' # cannot merge if there is no ancestor
204 return '' # cannot merge if there is no ancestor
204 return render('compare/compare_commits.html')
205 return render('compare/compare_commits.html')
205
206
206 if c.ancestor:
207 if c.ancestor:
207 # case we want a simple diff without incoming commits,
208 # case we want a simple diff without incoming commits,
208 # previewing what will be merged.
209 # previewing what will be merged.
209 # Make the diff on target repo (which is known to have target_ref)
210 # Make the diff on target repo (which is known to have target_ref)
210 log.debug('Using ancestor %s as source_ref instead of %s'
211 log.debug('Using ancestor %s as source_ref instead of %s'
211 % (c.ancestor, source_ref))
212 % (c.ancestor, source_ref))
212 source_repo = target_repo
213 source_repo = target_repo
213 source_commit = target_repo.get_commit(commit_id=c.ancestor)
214 source_commit = target_repo.get_commit(commit_id=c.ancestor)
214
215
215 # diff_limit will cut off the whole diff if the limit is applied
216 # diff_limit will cut off the whole diff if the limit is applied
216 # otherwise it will just hide the big files from the front-end
217 # otherwise it will just hide the big files from the front-end
217 diff_limit = self.cut_off_limit_diff
218 diff_limit = self.cut_off_limit_diff
218 file_limit = self.cut_off_limit_file
219 file_limit = self.cut_off_limit_file
219
220
220 log.debug('calculating diff between '
221 log.debug('calculating diff between '
221 'source_ref:%s and target_ref:%s for repo `%s`',
222 'source_ref:%s and target_ref:%s for repo `%s`',
222 source_commit, target_commit,
223 source_commit, target_commit,
223 safe_unicode(source_repo.scm_instance().path))
224 safe_unicode(source_repo.scm_instance().path))
224
225
225 if source_commit.repository != target_commit.repository:
226 if source_commit.repository != target_commit.repository:
226 msg = _(
227 msg = _(
227 "Repositories unrelated. "
228 "Repositories unrelated. "
228 "Cannot compare commit %(commit1)s from repository %(repo1)s "
229 "Cannot compare commit %(commit1)s from repository %(repo1)s "
229 "with commit %(commit2)s from repository %(repo2)s.") % {
230 "with commit %(commit2)s from repository %(repo2)s.") % {
230 'commit1': h.show_id(source_commit),
231 'commit1': h.show_id(source_commit),
231 'repo1': source_repo.repo_name,
232 'repo1': source_repo.repo_name,
232 'commit2': h.show_id(target_commit),
233 'commit2': h.show_id(target_commit),
233 'repo2': target_repo.repo_name,
234 'repo2': target_repo.repo_name,
234 }
235 }
235 h.flash(msg, category='error')
236 h.flash(msg, category='error')
236 raise HTTPBadRequest()
237 raise HTTPBadRequest()
237
238
238 txtdiff = source_repo.scm_instance().get_diff(
239 txtdiff = source_repo.scm_instance().get_diff(
239 commit1=source_commit, commit2=target_commit,
240 commit1=source_commit, commit2=target_commit,
240 path1=source_path, path=target_path)
241 path1=source_path, path=target_path)
241 diff_processor = diffs.DiffProcessor(
242 diff_processor = diffs.DiffProcessor(
242 txtdiff, format='gitdiff', diff_limit=diff_limit,
243 txtdiff, format='newdiff', diff_limit=diff_limit,
243 file_limit=file_limit, show_full_diff=c.fulldiff)
244 file_limit=file_limit, show_full_diff=c.fulldiff)
244 _parsed = diff_processor.prepare()
245 _parsed = diff_processor.prepare()
245
246
246 c.limited_diff = False
247 def _node_getter(commit):
247 if isinstance(_parsed, diffs.LimitedDiffContainer):
248 """ Returns a function that returns a node for a commit or None """
248 c.limited_diff = True
249 def get_node(fname):
250 try:
251 return commit.get_node(fname)
252 except NodeDoesNotExistError:
253 return None
254 return get_node
249
255
250 c.files = []
256 c.diffset = codeblocks.DiffSet(
251 c.changes = {}
257 source_node_getter=_node_getter(source_commit),
252 c.lines_added = 0
258 target_node_getter=_node_getter(target_commit),
253 c.lines_deleted = 0
259 ).render_patchset(_parsed, source_ref, target_ref)
254 for f in _parsed:
255 st = f['stats']
256 if not st['binary']:
257 c.lines_added += st['added']
258 c.lines_deleted += st['deleted']
259 fid = h.FID('', f['filename'])
260 c.files.append([fid, f['operation'], f['filename'], f['stats'], f])
261 htmldiff = diff_processor.as_html(
262 enable_comments=False, parsed_lines=[f])
263 c.changes[fid] = [f['operation'], f['filename'], htmldiff, f]
264
260
265 c.preview_mode = merge
261 c.preview_mode = merge
266
262
267 return render('compare/compare_diff.html')
263 return render('compare/compare_diff.html')
@@ -1,214 +1,635 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2016 RhodeCode GmbH
3 # Copyright (C) 2011-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import difflib
22 from itertools import groupby
23 from itertools import groupby
23
24
24 from pygments import lex
25 from pygments import lex
25 from pygments.formatters.html import _get_ttype_class as pygment_token_class
26 from pygments.formatters.html import _get_ttype_class as pygment_token_class
26 from rhodecode.lib.helpers import get_lexer_for_filenode, html_escape
27 from rhodecode.lib.helpers import (
28 get_lexer_for_filenode, get_lexer_safe, html_escape)
27 from rhodecode.lib.utils2 import AttributeDict
29 from rhodecode.lib.utils2 import AttributeDict
28 from rhodecode.lib.vcs.nodes import FileNode
30 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.diff_match_patch import diff_match_patch
32 from rhodecode.lib.diffs import LimitedDiffContainer
29 from pygments.lexers import get_lexer_by_name
33 from pygments.lexers import get_lexer_by_name
30
34
31 plain_text_lexer = get_lexer_by_name(
35 plain_text_lexer = get_lexer_by_name(
32 'text', stripall=False, stripnl=False, ensurenl=False)
36 'text', stripall=False, stripnl=False, ensurenl=False)
33
37
34
38
35 log = logging.getLogger()
39 log = logging.getLogger()
36
40
37
41
38 def filenode_as_lines_tokens(filenode, lexer=None):
42 def filenode_as_lines_tokens(filenode, lexer=None):
39 lexer = lexer or get_lexer_for_filenode(filenode)
43 lexer = lexer or get_lexer_for_filenode(filenode)
40 log.debug('Generating file node pygment tokens for %s, %s', lexer, filenode)
44 log.debug('Generating file node pygment tokens for %s, %s', lexer, filenode)
41 tokens = tokenize_string(filenode.content, get_lexer_for_filenode(filenode))
45 tokens = tokenize_string(filenode.content, lexer)
42 lines = split_token_stream(tokens, split_string='\n')
46 lines = split_token_stream(tokens, split_string='\n')
43 rv = list(lines)
47 rv = list(lines)
44 return rv
48 return rv
45
49
46
50
47 def tokenize_string(content, lexer):
51 def tokenize_string(content, lexer):
48 """
52 """
49 Use pygments to tokenize some content based on a lexer
53 Use pygments to tokenize some content based on a lexer
50 ensuring all original new lines and whitespace is preserved
54 ensuring all original new lines and whitespace is preserved
51 """
55 """
52
56
53 lexer.stripall = False
57 lexer.stripall = False
54 lexer.stripnl = False
58 lexer.stripnl = False
55 lexer.ensurenl = False
59 lexer.ensurenl = False
56 for token_type, token_text in lex(content, lexer):
60 for token_type, token_text in lex(content, lexer):
57 yield pygment_token_class(token_type), token_text
61 yield pygment_token_class(token_type), token_text
58
62
59
63
60 def split_token_stream(tokens, split_string=u'\n'):
64 def split_token_stream(tokens, split_string=u'\n'):
61 """
65 """
62 Take a list of (TokenType, text) tuples and split them by a string
66 Take a list of (TokenType, text) tuples and split them by a string
63
67
64 >>> split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')])
68 >>> split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')])
65 [(TEXT, 'some'), (TEXT, 'text'),
69 [(TEXT, 'some'), (TEXT, 'text'),
66 (TEXT, 'more'), (TEXT, 'text')]
70 (TEXT, 'more'), (TEXT, 'text')]
67 """
71 """
68
72
69 buffer = []
73 buffer = []
70 for token_class, token_text in tokens:
74 for token_class, token_text in tokens:
71 parts = token_text.split(split_string)
75 parts = token_text.split(split_string)
72 for part in parts[:-1]:
76 for part in parts[:-1]:
73 buffer.append((token_class, part))
77 buffer.append((token_class, part))
74 yield buffer
78 yield buffer
75 buffer = []
79 buffer = []
76
80
77 buffer.append((token_class, parts[-1]))
81 buffer.append((token_class, parts[-1]))
78
82
79 if buffer:
83 if buffer:
80 yield buffer
84 yield buffer
81
85
82
86
83 def filenode_as_annotated_lines_tokens(filenode):
87 def filenode_as_annotated_lines_tokens(filenode):
84 """
88 """
85 Take a file node and return a list of annotations => lines, if no annotation
89 Take a file node and return a list of annotations => lines, if no annotation
86 is found, it will be None.
90 is found, it will be None.
87
91
88 eg:
92 eg:
89
93
90 [
94 [
91 (annotation1, [
95 (annotation1, [
92 (1, line1_tokens_list),
96 (1, line1_tokens_list),
93 (2, line2_tokens_list),
97 (2, line2_tokens_list),
94 ]),
98 ]),
95 (annotation2, [
99 (annotation2, [
96 (3, line1_tokens_list),
100 (3, line1_tokens_list),
97 ]),
101 ]),
98 (None, [
102 (None, [
99 (4, line1_tokens_list),
103 (4, line1_tokens_list),
100 ]),
104 ]),
101 (annotation1, [
105 (annotation1, [
102 (5, line1_tokens_list),
106 (5, line1_tokens_list),
103 (6, line2_tokens_list),
107 (6, line2_tokens_list),
104 ])
108 ])
105 ]
109 ]
106 """
110 """
107
111
108 commit_cache = {} # cache commit_getter lookups
112 commit_cache = {} # cache commit_getter lookups
109
113
110 def _get_annotation(commit_id, commit_getter):
114 def _get_annotation(commit_id, commit_getter):
111 if commit_id not in commit_cache:
115 if commit_id not in commit_cache:
112 commit_cache[commit_id] = commit_getter()
116 commit_cache[commit_id] = commit_getter()
113 return commit_cache[commit_id]
117 return commit_cache[commit_id]
114
118
115 annotation_lookup = {
119 annotation_lookup = {
116 line_no: _get_annotation(commit_id, commit_getter)
120 line_no: _get_annotation(commit_id, commit_getter)
117 for line_no, commit_id, commit_getter, line_content
121 for line_no, commit_id, commit_getter, line_content
118 in filenode.annotate
122 in filenode.annotate
119 }
123 }
120
124
121 annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens)
125 annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens)
122 for line_no, tokens
126 for line_no, tokens
123 in enumerate(filenode_as_lines_tokens(filenode), 1))
127 in enumerate(filenode_as_lines_tokens(filenode), 1))
124
128
125 grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0])
129 grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0])
126
130
127 for annotation, group in grouped_annotations_lines:
131 for annotation, group in grouped_annotations_lines:
128 yield (
132 yield (
129 annotation, [(line_no, tokens)
133 annotation, [(line_no, tokens)
130 for (_, line_no, tokens) in group]
134 for (_, line_no, tokens) in group]
131 )
135 )
132
136
133
137
134 def render_tokenstream(tokenstream):
138 def render_tokenstream(tokenstream):
135 result = []
139 result = []
136 for token_class, token_ops_texts in rollup_tokenstream(tokenstream):
140 for token_class, token_ops_texts in rollup_tokenstream(tokenstream):
137
141
138 if token_class:
142 if token_class:
139 result.append(u'<span class="%s">' % token_class)
143 result.append(u'<span class="%s">' % token_class)
140 else:
144 else:
141 result.append(u'<span>')
145 result.append(u'<span>')
142
146
143 for op_tag, token_text in token_ops_texts:
147 for op_tag, token_text in token_ops_texts:
144
148
145 if op_tag:
149 if op_tag:
146 result.append(u'<%s>' % op_tag)
150 result.append(u'<%s>' % op_tag)
147
151
148 escaped_text = html_escape(token_text)
152 escaped_text = html_escape(token_text)
149 escaped_text = escaped_text.replace('\n', '<nl>\n</nl>')
153
154 # TODO: dan: investigate showing hidden characters like space/nl/tab
155 # escaped_text = escaped_text.replace(' ', '<sp> </sp>')
156 # escaped_text = escaped_text.replace('\n', '<nl>\n</nl>')
157 # escaped_text = escaped_text.replace('\t', '<tab>\t</tab>')
150
158
151 result.append(escaped_text)
159 result.append(escaped_text)
152
160
153 if op_tag:
161 if op_tag:
154 result.append(u'</%s>' % op_tag)
162 result.append(u'</%s>' % op_tag)
155
163
156 result.append(u'</span>')
164 result.append(u'</span>')
157
165
158 html = ''.join(result)
166 html = ''.join(result)
159 return html
167 return html
160
168
161
169
162 def rollup_tokenstream(tokenstream):
170 def rollup_tokenstream(tokenstream):
163 """
171 """
164 Group a token stream of the format:
172 Group a token stream of the format:
165
173
166 ('class', 'op', 'text')
174 ('class', 'op', 'text')
167 or
175 or
168 ('class', 'text')
176 ('class', 'text')
169
177
170 into
178 into
171
179
172 [('class1',
180 [('class1',
173 [('op1', 'text'),
181 [('op1', 'text'),
174 ('op2', 'text')]),
182 ('op2', 'text')]),
175 ('class2',
183 ('class2',
176 [('op3', 'text')])]
184 [('op3', 'text')])]
177
185
178 This is used to get the minimal tags necessary when
186 This is used to get the minimal tags necessary when
179 rendering to html eg for a token stream ie.
187 rendering to html eg for a token stream ie.
180
188
181 <span class="A"><ins>he</ins>llo</span>
189 <span class="A"><ins>he</ins>llo</span>
182 vs
190 vs
183 <span class="A"><ins>he</ins></span><span class="A">llo</span>
191 <span class="A"><ins>he</ins></span><span class="A">llo</span>
184
192
185 If a 2 tuple is passed in, the output op will be an empty string.
193 If a 2 tuple is passed in, the output op will be an empty string.
186
194
187 eg:
195 eg:
188
196
189 >>> rollup_tokenstream([('classA', '', 'h'),
197 >>> rollup_tokenstream([('classA', '', 'h'),
190 ('classA', 'del', 'ell'),
198 ('classA', 'del', 'ell'),
191 ('classA', '', 'o'),
199 ('classA', '', 'o'),
192 ('classB', '', ' '),
200 ('classB', '', ' '),
193 ('classA', '', 'the'),
201 ('classA', '', 'the'),
194 ('classA', '', 're'),
202 ('classA', '', 're'),
195 ])
203 ])
196
204
197 [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')],
205 [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')],
198 ('classB', [('', ' ')],
206 ('classB', [('', ' ')],
199 ('classA', [('', 'there')]]
207 ('classA', [('', 'there')]]
200
208
201 """
209 """
202 if tokenstream and len(tokenstream[0]) == 2:
210 if tokenstream and len(tokenstream[0]) == 2:
203 tokenstream = ((t[0], '', t[1]) for t in tokenstream)
211 tokenstream = ((t[0], '', t[1]) for t in tokenstream)
204
212
205 result = []
213 result = []
206 for token_class, op_list in groupby(tokenstream, lambda t: t[0]):
214 for token_class, op_list in groupby(tokenstream, lambda t: t[0]):
207 ops = []
215 ops = []
208 for token_op, token_text_list in groupby(op_list, lambda o: o[1]):
216 for token_op, token_text_list in groupby(op_list, lambda o: o[1]):
209 text_buffer = []
217 text_buffer = []
210 for t_class, t_op, t_text in token_text_list:
218 for t_class, t_op, t_text in token_text_list:
211 text_buffer.append(t_text)
219 text_buffer.append(t_text)
212 ops.append((token_op, ''.join(text_buffer)))
220 ops.append((token_op, ''.join(text_buffer)))
213 result.append((token_class, ops))
221 result.append((token_class, ops))
214 return result
222 return result
223
224
225 def tokens_diff(old_tokens, new_tokens, use_diff_match_patch=True):
226 """
227 Converts a list of (token_class, token_text) tuples to a list of
228 (token_class, token_op, token_text) tuples where token_op is one of
229 ('ins', 'del', '')
230
231 :param old_tokens: list of (token_class, token_text) tuples of old line
232 :param new_tokens: list of (token_class, token_text) tuples of new line
233 :param use_diff_match_patch: boolean, will use google's diff match patch
234 library which has options to 'smooth' out the character by character
235 differences making nicer ins/del blocks
236 """
237
238 old_tokens_result = []
239 new_tokens_result = []
240
241 similarity = difflib.SequenceMatcher(None,
242 ''.join(token_text for token_class, token_text in old_tokens),
243 ''.join(token_text for token_class, token_text in new_tokens)
244 ).ratio()
245
246 if similarity < 0.6: # return, the blocks are too different
247 for token_class, token_text in old_tokens:
248 old_tokens_result.append((token_class, '', token_text))
249 for token_class, token_text in new_tokens:
250 new_tokens_result.append((token_class, '', token_text))
251 return old_tokens_result, new_tokens_result, similarity
252
253 token_sequence_matcher = difflib.SequenceMatcher(None,
254 [x[1] for x in old_tokens],
255 [x[1] for x in new_tokens])
256
257 for tag, o1, o2, n1, n2 in token_sequence_matcher.get_opcodes():
258 # check the differences by token block types first to give a more
259 # nicer "block" level replacement vs character diffs
260
261 if tag == 'equal':
262 for token_class, token_text in old_tokens[o1:o2]:
263 old_tokens_result.append((token_class, '', token_text))
264 for token_class, token_text in new_tokens[n1:n2]:
265 new_tokens_result.append((token_class, '', token_text))
266 elif tag == 'delete':
267 for token_class, token_text in old_tokens[o1:o2]:
268 old_tokens_result.append((token_class, 'del', token_text))
269 elif tag == 'insert':
270 for token_class, token_text in new_tokens[n1:n2]:
271 new_tokens_result.append((token_class, 'ins', token_text))
272 elif tag == 'replace':
273 # if same type token blocks must be replaced, do a diff on the
274 # characters in the token blocks to show individual changes
275
276 old_char_tokens = []
277 new_char_tokens = []
278 for token_class, token_text in old_tokens[o1:o2]:
279 for char in token_text:
280 old_char_tokens.append((token_class, char))
281
282 for token_class, token_text in new_tokens[n1:n2]:
283 for char in token_text:
284 new_char_tokens.append((token_class, char))
285
286 old_string = ''.join([token_text for
287 token_class, token_text in old_char_tokens])
288 new_string = ''.join([token_text for
289 token_class, token_text in new_char_tokens])
290
291 char_sequence = difflib.SequenceMatcher(
292 None, old_string, new_string)
293 copcodes = char_sequence.get_opcodes()
294 obuffer, nbuffer = [], []
295
296 if use_diff_match_patch:
297 dmp = diff_match_patch()
298 dmp.Diff_EditCost = 11 # TODO: dan: extract this to a setting
299 reps = dmp.diff_main(old_string, new_string)
300 dmp.diff_cleanupEfficiency(reps)
301
302 a, b = 0, 0
303 for op, rep in reps:
304 l = len(rep)
305 if op == 0:
306 for i, c in enumerate(rep):
307 obuffer.append((old_char_tokens[a+i][0], '', c))
308 nbuffer.append((new_char_tokens[b+i][0], '', c))
309 a += l
310 b += l
311 elif op == -1:
312 for i, c in enumerate(rep):
313 obuffer.append((old_char_tokens[a+i][0], 'del', c))
314 a += l
315 elif op == 1:
316 for i, c in enumerate(rep):
317 nbuffer.append((new_char_tokens[b+i][0], 'ins', c))
318 b += l
319 else:
320 for ctag, co1, co2, cn1, cn2 in copcodes:
321 if ctag == 'equal':
322 for token_class, token_text in old_char_tokens[co1:co2]:
323 obuffer.append((token_class, '', token_text))
324 for token_class, token_text in new_char_tokens[cn1:cn2]:
325 nbuffer.append((token_class, '', token_text))
326 elif ctag == 'delete':
327 for token_class, token_text in old_char_tokens[co1:co2]:
328 obuffer.append((token_class, 'del', token_text))
329 elif ctag == 'insert':
330 for token_class, token_text in new_char_tokens[cn1:cn2]:
331 nbuffer.append((token_class, 'ins', token_text))
332 elif ctag == 'replace':
333 for token_class, token_text in old_char_tokens[co1:co2]:
334 obuffer.append((token_class, 'del', token_text))
335 for token_class, token_text in new_char_tokens[cn1:cn2]:
336 nbuffer.append((token_class, 'ins', token_text))
337
338 old_tokens_result.extend(obuffer)
339 new_tokens_result.extend(nbuffer)
340
341 return old_tokens_result, new_tokens_result, similarity
342
343
344 class DiffSet(object):
345 """
346 An object for parsing the diff result from diffs.DiffProcessor and
347 adding highlighting, side by side/unified renderings and line diffs
348 """
349
350 HL_REAL = 'REAL' # highlights using original file, slow
351 HL_FAST = 'FAST' # highlights using just the line, fast but not correct
352 # in the case of multiline code
353 HL_NONE = 'NONE' # no highlighting, fastest
354
355 def __init__(self, highlight_mode=HL_REAL,
356 source_node_getter=lambda filename: None,
357 target_node_getter=lambda filename: None,
358 source_nodes=None, target_nodes=None,
359 max_file_size_limit=150 * 1024, # files over this size will
360 # use fast highlighting
361 ):
362
363 self.highlight_mode = highlight_mode
364 self.highlighted_filenodes = {}
365 self.source_node_getter = source_node_getter
366 self.target_node_getter = target_node_getter
367 self.source_nodes = source_nodes or {}
368 self.target_nodes = target_nodes or {}
369
370
371 self.max_file_size_limit = max_file_size_limit
372
373 def render_patchset(self, patchset, source_ref=None, target_ref=None):
374 diffset = AttributeDict(dict(
375 lines_added=0,
376 lines_deleted=0,
377 changed_files=0,
378 files=[],
379 limited_diff=isinstance(patchset, LimitedDiffContainer),
380 source_ref=source_ref,
381 target_ref=target_ref,
382 ))
383 for patch in patchset:
384 filediff = self.render_patch(patch)
385 filediff.diffset = diffset
386 diffset.files.append(filediff)
387 diffset.changed_files += 1
388 if not patch['stats']['binary']:
389 diffset.lines_added += patch['stats']['added']
390 diffset.lines_deleted += patch['stats']['deleted']
391
392 return diffset
393
394 _lexer_cache = {}
395 def _get_lexer_for_filename(self, filename):
396 # cached because we might need to call it twice for source/target
397 if filename not in self._lexer_cache:
398 self._lexer_cache[filename] = get_lexer_safe(filepath=filename)
399 return self._lexer_cache[filename]
400
401 def render_patch(self, patch):
402 log.debug('rendering diff for %r' % patch['filename'])
403
404 source_filename = patch['original_filename']
405 target_filename = patch['filename']
406
407 source_lexer = plain_text_lexer
408 target_lexer = plain_text_lexer
409
410 if not patch['stats']['binary']:
411 if self.highlight_mode == self.HL_REAL:
412 if (source_filename and patch['operation'] in ('D', 'M')
413 and source_filename not in self.source_nodes):
414 self.source_nodes[source_filename] = (
415 self.source_node_getter(source_filename))
416
417 if (target_filename and patch['operation'] in ('A', 'M')
418 and target_filename not in self.target_nodes):
419 self.target_nodes[target_filename] = (
420 self.target_node_getter(target_filename))
421
422 elif self.highlight_mode == self.HL_FAST:
423 source_lexer = self._get_lexer_for_filename(source_filename)
424 target_lexer = self._get_lexer_for_filename(target_filename)
425
426 source_file = self.source_nodes.get(source_filename, source_filename)
427 target_file = self.target_nodes.get(target_filename, target_filename)
428
429 source_filenode, target_filenode = None, None
430
431 # TODO: dan: FileNode.lexer works on the content of the file - which
432 # can be slow - issue #4289 explains a lexer clean up - which once
433 # done can allow caching a lexer for a filenode to avoid the file lookup
434 if isinstance(source_file, FileNode):
435 source_filenode = source_file
436 source_lexer = source_file.lexer
437 if isinstance(target_file, FileNode):
438 target_filenode = target_file
439 target_lexer = target_file.lexer
440
441 source_file_path, target_file_path = None, None
442
443 if source_filename != '/dev/null':
444 source_file_path = source_filename
445 if target_filename != '/dev/null':
446 target_file_path = target_filename
447
448 source_file_type = source_lexer.name
449 target_file_type = target_lexer.name
450
451 op_hunks = patch['chunks'][0]
452 hunks = patch['chunks'][1:]
453
454 filediff = AttributeDict({
455 'source_file_path': source_file_path,
456 'target_file_path': target_file_path,
457 'source_filenode': source_filenode,
458 'target_filenode': target_filenode,
459 'hunks': [],
460 'source_file_type': target_file_type,
461 'target_file_type': source_file_type,
462 'patch': patch,
463 'source_mode': patch['stats']['old_mode'],
464 'target_mode': patch['stats']['new_mode'],
465 'limited_diff': isinstance(patch, LimitedDiffContainer),
466 'diffset': self,
467 })
468
469 for hunk in hunks:
470 hunkbit = self.parse_hunk(hunk, source_file, target_file)
471 hunkbit.filediff = filediff
472 filediff.hunks.append(hunkbit)
473 return filediff
474
475 def parse_hunk(self, hunk, source_file, target_file):
476 result = AttributeDict(dict(
477 source_start=hunk['source_start'],
478 source_length=hunk['source_length'],
479 target_start=hunk['target_start'],
480 target_length=hunk['target_length'],
481 section_header=hunk['section_header'],
482 lines=[],
483 ))
484 before, after = [], []
485
486 for line in hunk['lines']:
487 if line['action'] == 'unmod':
488 result.lines.extend(
489 self.parse_lines(before, after, source_file, target_file))
490 after.append(line)
491 before.append(line)
492 elif line['action'] == 'add':
493 after.append(line)
494 elif line['action'] == 'del':
495 before.append(line)
496 elif line['action'] == 'context-old':
497 before.append(line)
498 elif line['action'] == 'context-new':
499 after.append(line)
500
501 result.lines.extend(
502 self.parse_lines(before, after, source_file, target_file))
503 result.unified = self.as_unified(result.lines)
504 result.sideside = result.lines
505 return result
506
507 def parse_lines(self, before_lines, after_lines, source_file, target_file):
508 # TODO: dan: investigate doing the diff comparison and fast highlighting
509 # on the entire before and after buffered block lines rather than by
510 # line, this means we can get better 'fast' highlighting if the context
511 # allows it - eg.
512 # line 4: """
513 # line 5: this gets highlighted as a string
514 # line 6: """
515
516 lines = []
517 while before_lines or after_lines:
518 before, after = None, None
519 before_tokens, after_tokens = None, None
520
521 if before_lines:
522 before = before_lines.pop(0)
523 if after_lines:
524 after = after_lines.pop(0)
525
526 original = AttributeDict()
527 modified = AttributeDict()
528
529 if before:
530 before_tokens = self.get_line_tokens(
531 line_text=before['line'], line_number=before['old_lineno'],
532 file=source_file)
533 original.lineno = before['old_lineno']
534 original.content = before['line']
535 original.action = self.action_to_op(before['action'])
536
537 if after:
538 after_tokens = self.get_line_tokens(
539 line_text=after['line'], line_number=after['new_lineno'],
540 file=target_file)
541 modified.lineno = after['new_lineno']
542 modified.content = after['line']
543 modified.action = self.action_to_op(after['action'])
544
545
546 # diff the lines
547 if before_tokens and after_tokens:
548 o_tokens, m_tokens, similarity = tokens_diff(before_tokens, after_tokens)
549 original.content = render_tokenstream(o_tokens)
550 modified.content = render_tokenstream(m_tokens)
551 elif before_tokens:
552 original.content = render_tokenstream(
553 [(x[0], '', x[1]) for x in before_tokens])
554 elif after_tokens:
555 modified.content = render_tokenstream(
556 [(x[0], '', x[1]) for x in after_tokens])
557
558 lines.append(AttributeDict({
559 'original': original,
560 'modified': modified,
561 }))
562
563 return lines
564
565 def get_line_tokens(self, line_text, line_number, file=None):
566 filenode = None
567 filename = None
568
569 if isinstance(file, basestring):
570 filename = file
571 elif isinstance(file, FileNode):
572 filenode = file
573 filename = file.unicode_path
574
575 if self.highlight_mode == self.HL_REAL and filenode:
576 if line_number and file.size < self.max_file_size_limit:
577 return self.get_tokenized_filenode_line(file, line_number)
578
579 if self.highlight_mode in (self.HL_REAL, self.HL_FAST) and filename:
580 lexer = self._get_lexer_for_filename(filename)
581 return list(tokenize_string(line_text, lexer))
582
583 return list(tokenize_string(line_text, plain_text_lexer))
584
585 def get_tokenized_filenode_line(self, filenode, line_number):
586
587 if filenode not in self.highlighted_filenodes:
588 tokenized_lines = filenode_as_lines_tokens(filenode, filenode.lexer)
589 self.highlighted_filenodes[filenode] = tokenized_lines
590 return self.highlighted_filenodes[filenode][line_number - 1]
591
592 def action_to_op(self, action):
593 return {
594 'add': '+',
595 'del': '-',
596 'unmod': ' ',
597 'context-old': ' ',
598 'context-new': ' ',
599 }.get(action, action)
600
601 def as_unified(self, lines):
602 """ Return a generator that yields the lines of a diff in unified order """
603 def generator():
604 buf = []
605 for line in lines:
606
607 if buf and not line.original or line.original.action == ' ':
608 for b in buf:
609 yield b
610 buf = []
611
612 if line.original:
613 if line.original.action == ' ':
614 yield (line.original.lineno, line.modified.lineno,
615 line.original.action, line.original.content)
616 continue
617
618 if line.original.action == '-':
619 yield (line.original.lineno, None,
620 line.original.action, line.original.content)
621
622 if line.modified.action == '+':
623 buf.append((
624 None, line.modified.lineno,
625 line.modified.action, line.modified.content))
626 continue
627
628 if line.modified:
629 yield (None, line.modified.lineno,
630 line.modified.action, line.modified.content)
631
632 for b in buf:
633 yield b
634
635 return generator()
@@ -1,886 +1,1161 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2016 RhodeCode GmbH
3 # Copyright (C) 2011-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Set of diffing helpers, previously part of vcs
23 Set of diffing helpers, previously part of vcs
24 """
24 """
25
25
26 import collections
26 import collections
27 import re
27 import re
28 import difflib
28 import difflib
29 import logging
29 import logging
30
30
31 from itertools import tee, imap
31 from itertools import tee, imap
32
32
33 from pylons.i18n.translation import _
33 from pylons.i18n.translation import _
34
34
35 from rhodecode.lib.vcs.exceptions import VCSError
35 from rhodecode.lib.vcs.exceptions import VCSError
36 from rhodecode.lib.vcs.nodes import FileNode, SubModuleNode
36 from rhodecode.lib.vcs.nodes import FileNode, SubModuleNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib.helpers import escape
38 from rhodecode.lib.helpers import escape
39 from rhodecode.lib.utils2 import safe_unicode
39 from rhodecode.lib.utils2 import safe_unicode
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43 # define max context, a file with more than this numbers of lines is unusable
43 # define max context, a file with more than this numbers of lines is unusable
44 # in browser anyway
44 # in browser anyway
45 MAX_CONTEXT = 1024 * 1014
45 MAX_CONTEXT = 1024 * 1014
46
46
47
47
48 class OPS(object):
48 class OPS(object):
49 ADD = 'A'
49 ADD = 'A'
50 MOD = 'M'
50 MOD = 'M'
51 DEL = 'D'
51 DEL = 'D'
52
52
53
53
54 def wrap_to_table(str_):
54 def wrap_to_table(str_):
55 return '''<table class="code-difftable">
55 return '''<table class="code-difftable">
56 <tr class="line no-comment">
56 <tr class="line no-comment">
57 <td class="add-comment-line tooltip" title="%s"><span class="add-comment-content"></span></td>
57 <td class="add-comment-line tooltip" title="%s"><span class="add-comment-content"></span></td>
58 <td></td>
58 <td></td>
59 <td class="lineno new"></td>
59 <td class="lineno new"></td>
60 <td class="code no-comment"><pre>%s</pre></td>
60 <td class="code no-comment"><pre>%s</pre></td>
61 </tr>
61 </tr>
62 </table>''' % (_('Click to comment'), str_)
62 </table>''' % (_('Click to comment'), str_)
63
63
64
64
65 def wrapped_diff(filenode_old, filenode_new, diff_limit=None, file_limit=None,
65 def wrapped_diff(filenode_old, filenode_new, diff_limit=None, file_limit=None,
66 show_full_diff=False, ignore_whitespace=True, line_context=3,
66 show_full_diff=False, ignore_whitespace=True, line_context=3,
67 enable_comments=False):
67 enable_comments=False):
68 """
68 """
69 returns a wrapped diff into a table, checks for cut_off_limit for file and
69 returns a wrapped diff into a table, checks for cut_off_limit for file and
70 whole diff and presents proper message
70 whole diff and presents proper message
71 """
71 """
72
72
73 if filenode_old is None:
73 if filenode_old is None:
74 filenode_old = FileNode(filenode_new.path, '', EmptyCommit())
74 filenode_old = FileNode(filenode_new.path, '', EmptyCommit())
75
75
76 if filenode_old.is_binary or filenode_new.is_binary:
76 if filenode_old.is_binary or filenode_new.is_binary:
77 diff = wrap_to_table(_('Binary file'))
77 diff = wrap_to_table(_('Binary file'))
78 stats = None
78 stats = None
79 size = 0
79 size = 0
80 data = None
80 data = None
81
81
82 elif diff_limit != -1 and (diff_limit is None or
82 elif diff_limit != -1 and (diff_limit is None or
83 (filenode_old.size < diff_limit and filenode_new.size < diff_limit)):
83 (filenode_old.size < diff_limit and filenode_new.size < diff_limit)):
84
84
85 f_gitdiff = get_gitdiff(filenode_old, filenode_new,
85 f_gitdiff = get_gitdiff(filenode_old, filenode_new,
86 ignore_whitespace=ignore_whitespace,
86 ignore_whitespace=ignore_whitespace,
87 context=line_context)
87 context=line_context)
88 diff_processor = DiffProcessor(
88 diff_processor = DiffProcessor(
89 f_gitdiff, format='gitdiff', diff_limit=diff_limit,
89 f_gitdiff, format='gitdiff', diff_limit=diff_limit,
90 file_limit=file_limit, show_full_diff=show_full_diff)
90 file_limit=file_limit, show_full_diff=show_full_diff)
91 _parsed = diff_processor.prepare()
91 _parsed = diff_processor.prepare()
92
92
93 diff = diff_processor.as_html(enable_comments=enable_comments)
93 diff = diff_processor.as_html(enable_comments=enable_comments)
94 stats = _parsed[0]['stats'] if _parsed else None
94 stats = _parsed[0]['stats'] if _parsed else None
95 size = len(diff or '')
95 size = len(diff or '')
96 data = _parsed[0] if _parsed else None
96 data = _parsed[0] if _parsed else None
97 else:
97 else:
98 diff = wrap_to_table(_('Changeset was too big and was cut off, use '
98 diff = wrap_to_table(_('Changeset was too big and was cut off, use '
99 'diff menu to display this diff'))
99 'diff menu to display this diff'))
100 stats = None
100 stats = None
101 size = 0
101 size = 0
102 data = None
102 data = None
103 if not diff:
103 if not diff:
104 submodules = filter(lambda o: isinstance(o, SubModuleNode),
104 submodules = filter(lambda o: isinstance(o, SubModuleNode),
105 [filenode_new, filenode_old])
105 [filenode_new, filenode_old])
106 if submodules:
106 if submodules:
107 diff = wrap_to_table(escape('Submodule %r' % submodules[0]))
107 diff = wrap_to_table(escape('Submodule %r' % submodules[0]))
108 else:
108 else:
109 diff = wrap_to_table(_('No changes detected'))
109 diff = wrap_to_table(_('No changes detected'))
110
110
111 cs1 = filenode_old.commit.raw_id
111 cs1 = filenode_old.commit.raw_id
112 cs2 = filenode_new.commit.raw_id
112 cs2 = filenode_new.commit.raw_id
113
113
114 return size, cs1, cs2, diff, stats, data
114 return size, cs1, cs2, diff, stats, data
115
115
116
116
117 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
117 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
118 """
118 """
119 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
119 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
120
120
121 :param ignore_whitespace: ignore whitespaces in diff
121 :param ignore_whitespace: ignore whitespaces in diff
122 """
122 """
123 # make sure we pass in default context
123 # make sure we pass in default context
124 context = context or 3
124 context = context or 3
125 # protect against IntOverflow when passing HUGE context
125 # protect against IntOverflow when passing HUGE context
126 if context > MAX_CONTEXT:
126 if context > MAX_CONTEXT:
127 context = MAX_CONTEXT
127 context = MAX_CONTEXT
128
128
129 submodules = filter(lambda o: isinstance(o, SubModuleNode),
129 submodules = filter(lambda o: isinstance(o, SubModuleNode),
130 [filenode_new, filenode_old])
130 [filenode_new, filenode_old])
131 if submodules:
131 if submodules:
132 return ''
132 return ''
133
133
134 for filenode in (filenode_old, filenode_new):
134 for filenode in (filenode_old, filenode_new):
135 if not isinstance(filenode, FileNode):
135 if not isinstance(filenode, FileNode):
136 raise VCSError(
136 raise VCSError(
137 "Given object should be FileNode object, not %s"
137 "Given object should be FileNode object, not %s"
138 % filenode.__class__)
138 % filenode.__class__)
139
139
140 repo = filenode_new.commit.repository
140 repo = filenode_new.commit.repository
141 old_commit = filenode_old.commit or repo.EMPTY_COMMIT
141 old_commit = filenode_old.commit or repo.EMPTY_COMMIT
142 new_commit = filenode_new.commit
142 new_commit = filenode_new.commit
143
143
144 vcs_gitdiff = repo.get_diff(
144 vcs_gitdiff = repo.get_diff(
145 old_commit, new_commit, filenode_new.path,
145 old_commit, new_commit, filenode_new.path,
146 ignore_whitespace, context, path1=filenode_old.path)
146 ignore_whitespace, context, path1=filenode_old.path)
147 return vcs_gitdiff
147 return vcs_gitdiff
148
148
149 NEW_FILENODE = 1
149 NEW_FILENODE = 1
150 DEL_FILENODE = 2
150 DEL_FILENODE = 2
151 MOD_FILENODE = 3
151 MOD_FILENODE = 3
152 RENAMED_FILENODE = 4
152 RENAMED_FILENODE = 4
153 COPIED_FILENODE = 5
153 COPIED_FILENODE = 5
154 CHMOD_FILENODE = 6
154 CHMOD_FILENODE = 6
155 BIN_FILENODE = 7
155 BIN_FILENODE = 7
156
156
157
157
158 class LimitedDiffContainer(object):
158 class LimitedDiffContainer(object):
159
159
160 def __init__(self, diff_limit, cur_diff_size, diff):
160 def __init__(self, diff_limit, cur_diff_size, diff):
161 self.diff = diff
161 self.diff = diff
162 self.diff_limit = diff_limit
162 self.diff_limit = diff_limit
163 self.cur_diff_size = cur_diff_size
163 self.cur_diff_size = cur_diff_size
164
164
165 def __getitem__(self, key):
165 def __getitem__(self, key):
166 return self.diff.__getitem__(key)
166 return self.diff.__getitem__(key)
167
167
168 def __iter__(self):
168 def __iter__(self):
169 for l in self.diff:
169 for l in self.diff:
170 yield l
170 yield l
171
171
172
172
173 class Action(object):
173 class Action(object):
174 """
174 """
175 Contains constants for the action value of the lines in a parsed diff.
175 Contains constants for the action value of the lines in a parsed diff.
176 """
176 """
177
177
178 ADD = 'add'
178 ADD = 'add'
179 DELETE = 'del'
179 DELETE = 'del'
180 UNMODIFIED = 'unmod'
180 UNMODIFIED = 'unmod'
181
181
182 CONTEXT = 'context'
182 CONTEXT = 'context'
183 CONTEXT_OLD = 'context-old'
184 CONTEXT_NEW = 'context-new'
183
185
184
186
185 class DiffProcessor(object):
187 class DiffProcessor(object):
186 """
188 """
187 Give it a unified or git diff and it returns a list of the files that were
189 Give it a unified or git diff and it returns a list of the files that were
188 mentioned in the diff together with a dict of meta information that
190 mentioned in the diff together with a dict of meta information that
189 can be used to render it in a HTML template.
191 can be used to render it in a HTML template.
190
192
191 .. note:: Unicode handling
193 .. note:: Unicode handling
192
194
193 The original diffs are a byte sequence and can contain filenames
195 The original diffs are a byte sequence and can contain filenames
194 in mixed encodings. This class generally returns `unicode` objects
196 in mixed encodings. This class generally returns `unicode` objects
195 since the result is intended for presentation to the user.
197 since the result is intended for presentation to the user.
196
198
197 """
199 """
198 _chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
200 _chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
199 _newline_marker = re.compile(r'^\\ No newline at end of file')
201 _newline_marker = re.compile(r'^\\ No newline at end of file')
200
202
201 # used for inline highlighter word split
203 # used for inline highlighter word split
202 _token_re = re.compile(r'()(&gt;|&lt;|&amp;|\W+?)')
204 _token_re = re.compile(r'()(&gt;|&lt;|&amp;|\W+?)')
203
205
204 def __init__(self, diff, format='gitdiff', diff_limit=None,
206 def __init__(self, diff, format='gitdiff', diff_limit=None,
205 file_limit=None, show_full_diff=True):
207 file_limit=None, show_full_diff=True):
206 """
208 """
207 :param diff: A `Diff` object representing a diff from a vcs backend
209 :param diff: A `Diff` object representing a diff from a vcs backend
208 :param format: format of diff passed, `udiff` or `gitdiff`
210 :param format: format of diff passed, `udiff` or `gitdiff`
209 :param diff_limit: define the size of diff that is considered "big"
211 :param diff_limit: define the size of diff that is considered "big"
210 based on that parameter cut off will be triggered, set to None
212 based on that parameter cut off will be triggered, set to None
211 to show full diff
213 to show full diff
212 """
214 """
213 self._diff = diff
215 self._diff = diff
214 self._format = format
216 self._format = format
215 self.adds = 0
217 self.adds = 0
216 self.removes = 0
218 self.removes = 0
217 # calculate diff size
219 # calculate diff size
218 self.diff_limit = diff_limit
220 self.diff_limit = diff_limit
219 self.file_limit = file_limit
221 self.file_limit = file_limit
220 self.show_full_diff = show_full_diff
222 self.show_full_diff = show_full_diff
221 self.cur_diff_size = 0
223 self.cur_diff_size = 0
222 self.parsed = False
224 self.parsed = False
223 self.parsed_diff = []
225 self.parsed_diff = []
224
226
225 if format == 'gitdiff':
227 if format == 'gitdiff':
226 self.differ = self._highlight_line_difflib
228 self.differ = self._highlight_line_difflib
227 self._parser = self._parse_gitdiff
229 self._parser = self._parse_gitdiff
228 else:
230 else:
229 self.differ = self._highlight_line_udiff
231 self.differ = self._highlight_line_udiff
230 self._parser = self._parse_udiff
232 self._parser = self._new_parse_gitdiff
231
233
232 def _copy_iterator(self):
234 def _copy_iterator(self):
233 """
235 """
234 make a fresh copy of generator, we should not iterate thru
236 make a fresh copy of generator, we should not iterate thru
235 an original as it's needed for repeating operations on
237 an original as it's needed for repeating operations on
236 this instance of DiffProcessor
238 this instance of DiffProcessor
237 """
239 """
238 self.__udiff, iterator_copy = tee(self.__udiff)
240 self.__udiff, iterator_copy = tee(self.__udiff)
239 return iterator_copy
241 return iterator_copy
240
242
241 def _escaper(self, string):
243 def _escaper(self, string):
242 """
244 """
243 Escaper for diff escapes special chars and checks the diff limit
245 Escaper for diff escapes special chars and checks the diff limit
244
246
245 :param string:
247 :param string:
246 """
248 """
247
249
248 self.cur_diff_size += len(string)
250 self.cur_diff_size += len(string)
249
251
250 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
252 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
251 raise DiffLimitExceeded('Diff Limit Exceeded')
253 raise DiffLimitExceeded('Diff Limit Exceeded')
252
254
253 return safe_unicode(string)\
255 return safe_unicode(string)\
254 .replace('&', '&amp;')\
256 .replace('&', '&amp;')\
255 .replace('<', '&lt;')\
257 .replace('<', '&lt;')\
256 .replace('>', '&gt;')
258 .replace('>', '&gt;')
257
259
258 def _line_counter(self, l):
260 def _line_counter(self, l):
259 """
261 """
260 Checks each line and bumps total adds/removes for this diff
262 Checks each line and bumps total adds/removes for this diff
261
263
262 :param l:
264 :param l:
263 """
265 """
264 if l.startswith('+') and not l.startswith('+++'):
266 if l.startswith('+') and not l.startswith('+++'):
265 self.adds += 1
267 self.adds += 1
266 elif l.startswith('-') and not l.startswith('---'):
268 elif l.startswith('-') and not l.startswith('---'):
267 self.removes += 1
269 self.removes += 1
268 return safe_unicode(l)
270 return safe_unicode(l)
269
271
270 def _highlight_line_difflib(self, line, next_):
272 def _highlight_line_difflib(self, line, next_):
271 """
273 """
272 Highlight inline changes in both lines.
274 Highlight inline changes in both lines.
273 """
275 """
274
276
275 if line['action'] == Action.DELETE:
277 if line['action'] == Action.DELETE:
276 old, new = line, next_
278 old, new = line, next_
277 else:
279 else:
278 old, new = next_, line
280 old, new = next_, line
279
281
280 oldwords = self._token_re.split(old['line'])
282 oldwords = self._token_re.split(old['line'])
281 newwords = self._token_re.split(new['line'])
283 newwords = self._token_re.split(new['line'])
282 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
284 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
283
285
284 oldfragments, newfragments = [], []
286 oldfragments, newfragments = [], []
285 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
287 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
286 oldfrag = ''.join(oldwords[i1:i2])
288 oldfrag = ''.join(oldwords[i1:i2])
287 newfrag = ''.join(newwords[j1:j2])
289 newfrag = ''.join(newwords[j1:j2])
288 if tag != 'equal':
290 if tag != 'equal':
289 if oldfrag:
291 if oldfrag:
290 oldfrag = '<del>%s</del>' % oldfrag
292 oldfrag = '<del>%s</del>' % oldfrag
291 if newfrag:
293 if newfrag:
292 newfrag = '<ins>%s</ins>' % newfrag
294 newfrag = '<ins>%s</ins>' % newfrag
293 oldfragments.append(oldfrag)
295 oldfragments.append(oldfrag)
294 newfragments.append(newfrag)
296 newfragments.append(newfrag)
295
297
296 old['line'] = "".join(oldfragments)
298 old['line'] = "".join(oldfragments)
297 new['line'] = "".join(newfragments)
299 new['line'] = "".join(newfragments)
298
300
299 def _highlight_line_udiff(self, line, next_):
301 def _highlight_line_udiff(self, line, next_):
300 """
302 """
301 Highlight inline changes in both lines.
303 Highlight inline changes in both lines.
302 """
304 """
303 start = 0
305 start = 0
304 limit = min(len(line['line']), len(next_['line']))
306 limit = min(len(line['line']), len(next_['line']))
305 while start < limit and line['line'][start] == next_['line'][start]:
307 while start < limit and line['line'][start] == next_['line'][start]:
306 start += 1
308 start += 1
307 end = -1
309 end = -1
308 limit -= start
310 limit -= start
309 while -end <= limit and line['line'][end] == next_['line'][end]:
311 while -end <= limit and line['line'][end] == next_['line'][end]:
310 end -= 1
312 end -= 1
311 end += 1
313 end += 1
312 if start or end:
314 if start or end:
313 def do(l):
315 def do(l):
314 last = end + len(l['line'])
316 last = end + len(l['line'])
315 if l['action'] == Action.ADD:
317 if l['action'] == Action.ADD:
316 tag = 'ins'
318 tag = 'ins'
317 else:
319 else:
318 tag = 'del'
320 tag = 'del'
319 l['line'] = '%s<%s>%s</%s>%s' % (
321 l['line'] = '%s<%s>%s</%s>%s' % (
320 l['line'][:start],
322 l['line'][:start],
321 tag,
323 tag,
322 l['line'][start:last],
324 l['line'][start:last],
323 tag,
325 tag,
324 l['line'][last:]
326 l['line'][last:]
325 )
327 )
326 do(line)
328 do(line)
327 do(next_)
329 do(next_)
328
330
329 def _clean_line(self, line, command):
331 def _clean_line(self, line, command):
330 if command in ['+', '-', ' ']:
332 if command in ['+', '-', ' ']:
331 # only modify the line if it's actually a diff thing
333 # only modify the line if it's actually a diff thing
332 line = line[1:]
334 line = line[1:]
333 return line
335 return line
334
336
335 def _parse_gitdiff(self, inline_diff=True):
337 def _parse_gitdiff(self, inline_diff=True):
336 _files = []
338 _files = []
337 diff_container = lambda arg: arg
339 diff_container = lambda arg: arg
338
340
339 for chunk in self._diff.chunks():
341 for chunk in self._diff.chunks():
340 head = chunk.header
342 head = chunk.header
341
343
342 diff = imap(self._escaper, chunk.diff.splitlines(1))
344 diff = imap(self._escaper, chunk.diff.splitlines(1))
343 raw_diff = chunk.raw
345 raw_diff = chunk.raw
344 limited_diff = False
346 limited_diff = False
345 exceeds_limit = False
347 exceeds_limit = False
346
348
347 op = None
349 op = None
348 stats = {
350 stats = {
349 'added': 0,
351 'added': 0,
350 'deleted': 0,
352 'deleted': 0,
351 'binary': False,
353 'binary': False,
352 'ops': {},
354 'ops': {},
353 }
355 }
354
356
355 if head['deleted_file_mode']:
357 if head['deleted_file_mode']:
356 op = OPS.DEL
358 op = OPS.DEL
357 stats['binary'] = True
359 stats['binary'] = True
358 stats['ops'][DEL_FILENODE] = 'deleted file'
360 stats['ops'][DEL_FILENODE] = 'deleted file'
359
361
360 elif head['new_file_mode']:
362 elif head['new_file_mode']:
361 op = OPS.ADD
363 op = OPS.ADD
362 stats['binary'] = True
364 stats['binary'] = True
363 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
365 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
364 else: # modify operation, can be copy, rename or chmod
366 else: # modify operation, can be copy, rename or chmod
365
367
366 # CHMOD
368 # CHMOD
367 if head['new_mode'] and head['old_mode']:
369 if head['new_mode'] and head['old_mode']:
368 op = OPS.MOD
370 op = OPS.MOD
369 stats['binary'] = True
371 stats['binary'] = True
370 stats['ops'][CHMOD_FILENODE] = (
372 stats['ops'][CHMOD_FILENODE] = (
371 'modified file chmod %s => %s' % (
373 'modified file chmod %s => %s' % (
372 head['old_mode'], head['new_mode']))
374 head['old_mode'], head['new_mode']))
373 # RENAME
375 # RENAME
374 if head['rename_from'] != head['rename_to']:
376 if head['rename_from'] != head['rename_to']:
375 op = OPS.MOD
377 op = OPS.MOD
376 stats['binary'] = True
378 stats['binary'] = True
377 stats['ops'][RENAMED_FILENODE] = (
379 stats['ops'][RENAMED_FILENODE] = (
378 'file renamed from %s to %s' % (
380 'file renamed from %s to %s' % (
379 head['rename_from'], head['rename_to']))
381 head['rename_from'], head['rename_to']))
380 # COPY
382 # COPY
381 if head.get('copy_from') and head.get('copy_to'):
383 if head.get('copy_from') and head.get('copy_to'):
382 op = OPS.MOD
384 op = OPS.MOD
383 stats['binary'] = True
385 stats['binary'] = True
384 stats['ops'][COPIED_FILENODE] = (
386 stats['ops'][COPIED_FILENODE] = (
385 'file copied from %s to %s' % (
387 'file copied from %s to %s' % (
386 head['copy_from'], head['copy_to']))
388 head['copy_from'], head['copy_to']))
387
389
388 # If our new parsed headers didn't match anything fallback to
390 # If our new parsed headers didn't match anything fallback to
389 # old style detection
391 # old style detection
390 if op is None:
392 if op is None:
391 if not head['a_file'] and head['b_file']:
393 if not head['a_file'] and head['b_file']:
392 op = OPS.ADD
394 op = OPS.ADD
393 stats['binary'] = True
395 stats['binary'] = True
394 stats['ops'][NEW_FILENODE] = 'new file'
396 stats['ops'][NEW_FILENODE] = 'new file'
395
397
396 elif head['a_file'] and not head['b_file']:
398 elif head['a_file'] and not head['b_file']:
397 op = OPS.DEL
399 op = OPS.DEL
398 stats['binary'] = True
400 stats['binary'] = True
399 stats['ops'][DEL_FILENODE] = 'deleted file'
401 stats['ops'][DEL_FILENODE] = 'deleted file'
400
402
401 # it's not ADD not DELETE
403 # it's not ADD not DELETE
402 if op is None:
404 if op is None:
403 op = OPS.MOD
405 op = OPS.MOD
404 stats['binary'] = True
406 stats['binary'] = True
405 stats['ops'][MOD_FILENODE] = 'modified file'
407 stats['ops'][MOD_FILENODE] = 'modified file'
406
408
407 # a real non-binary diff
409 # a real non-binary diff
408 if head['a_file'] or head['b_file']:
410 if head['a_file'] or head['b_file']:
409 try:
411 try:
410 raw_diff, chunks, _stats = self._parse_lines(diff)
412 raw_diff, chunks, _stats = self._parse_lines(diff)
411 stats['binary'] = False
413 stats['binary'] = False
412 stats['added'] = _stats[0]
414 stats['added'] = _stats[0]
413 stats['deleted'] = _stats[1]
415 stats['deleted'] = _stats[1]
414 # explicit mark that it's a modified file
416 # explicit mark that it's a modified file
415 if op == OPS.MOD:
417 if op == OPS.MOD:
416 stats['ops'][MOD_FILENODE] = 'modified file'
418 stats['ops'][MOD_FILENODE] = 'modified file'
417 exceeds_limit = len(raw_diff) > self.file_limit
419 exceeds_limit = len(raw_diff) > self.file_limit
418
420
419 # changed from _escaper function so we validate size of
421 # changed from _escaper function so we validate size of
420 # each file instead of the whole diff
422 # each file instead of the whole diff
421 # diff will hide big files but still show small ones
423 # diff will hide big files but still show small ones
422 # from my tests, big files are fairly safe to be parsed
424 # from my tests, big files are fairly safe to be parsed
423 # but the browser is the bottleneck
425 # but the browser is the bottleneck
424 if not self.show_full_diff and exceeds_limit:
426 if not self.show_full_diff and exceeds_limit:
425 raise DiffLimitExceeded('File Limit Exceeded')
427 raise DiffLimitExceeded('File Limit Exceeded')
426
428
427 except DiffLimitExceeded:
429 except DiffLimitExceeded:
428 diff_container = lambda _diff: \
430 diff_container = lambda _diff: \
429 LimitedDiffContainer(
431 LimitedDiffContainer(
430 self.diff_limit, self.cur_diff_size, _diff)
432 self.diff_limit, self.cur_diff_size, _diff)
431
433
432 exceeds_limit = len(raw_diff) > self.file_limit
434 exceeds_limit = len(raw_diff) > self.file_limit
433 limited_diff = True
435 limited_diff = True
434 chunks = []
436 chunks = []
435
437
436 else: # GIT format binary patch, or possibly empty diff
438 else: # GIT format binary patch, or possibly empty diff
437 if head['bin_patch']:
439 if head['bin_patch']:
438 # we have operation already extracted, but we mark simply
440 # we have operation already extracted, but we mark simply
439 # it's a diff we wont show for binary files
441 # it's a diff we wont show for binary files
440 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
442 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
441 chunks = []
443 chunks = []
442
444
443 if chunks and not self.show_full_diff and op == OPS.DEL:
445 if chunks and not self.show_full_diff and op == OPS.DEL:
444 # if not full diff mode show deleted file contents
446 # if not full diff mode show deleted file contents
445 # TODO: anderson: if the view is not too big, there is no way
447 # TODO: anderson: if the view is not too big, there is no way
446 # to see the content of the file
448 # to see the content of the file
447 chunks = []
449 chunks = []
448
450
449 chunks.insert(0, [{
451 chunks.insert(0, [{
450 'old_lineno': '',
452 'old_lineno': '',
451 'new_lineno': '',
453 'new_lineno': '',
452 'action': Action.CONTEXT,
454 'action': Action.CONTEXT,
453 'line': msg,
455 'line': msg,
454 } for _op, msg in stats['ops'].iteritems()
456 } for _op, msg in stats['ops'].iteritems()
455 if _op not in [MOD_FILENODE]])
457 if _op not in [MOD_FILENODE]])
456
458
457 _files.append({
459 _files.append({
458 'filename': safe_unicode(head['b_path']),
460 'filename': safe_unicode(head['b_path']),
459 'old_revision': head['a_blob_id'],
461 'old_revision': head['a_blob_id'],
460 'new_revision': head['b_blob_id'],
462 'new_revision': head['b_blob_id'],
461 'chunks': chunks,
463 'chunks': chunks,
462 'raw_diff': safe_unicode(raw_diff),
464 'raw_diff': safe_unicode(raw_diff),
463 'operation': op,
465 'operation': op,
464 'stats': stats,
466 'stats': stats,
465 'exceeds_limit': exceeds_limit,
467 'exceeds_limit': exceeds_limit,
466 'is_limited_diff': limited_diff,
468 'is_limited_diff': limited_diff,
467 })
469 })
468
470
469 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
471 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
470 OPS.DEL: 2}.get(info['operation'])
472 OPS.DEL: 2}.get(info['operation'])
471
473
472 if not inline_diff:
474 if not inline_diff:
473 return diff_container(sorted(_files, key=sorter))
475 return diff_container(sorted(_files, key=sorter))
474
476
475 # highlight inline changes
477 # highlight inline changes
476 for diff_data in _files:
478 for diff_data in _files:
477 for chunk in diff_data['chunks']:
479 for chunk in diff_data['chunks']:
478 lineiter = iter(chunk)
480 lineiter = iter(chunk)
479 try:
481 try:
480 while 1:
482 while 1:
481 line = lineiter.next()
483 line = lineiter.next()
482 if line['action'] not in (
484 if line['action'] not in (
483 Action.UNMODIFIED, Action.CONTEXT):
485 Action.UNMODIFIED, Action.CONTEXT):
484 nextline = lineiter.next()
486 nextline = lineiter.next()
485 if nextline['action'] in ['unmod', 'context'] or \
487 if nextline['action'] in ['unmod', 'context'] or \
486 nextline['action'] == line['action']:
488 nextline['action'] == line['action']:
487 continue
489 continue
488 self.differ(line, nextline)
490 self.differ(line, nextline)
489 except StopIteration:
491 except StopIteration:
490 pass
492 pass
491
493
492 return diff_container(sorted(_files, key=sorter))
494 return diff_container(sorted(_files, key=sorter))
493
495
494 def _parse_udiff(self, inline_diff=True):
496
495 raise NotImplementedError()
497 # FIXME: NEWDIFFS: dan: this replaces the old _escaper function
498 def _process_line(self, string):
499 """
500 Process a diff line, checks the diff limit
501
502 :param string:
503 """
504
505 self.cur_diff_size += len(string)
506
507 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
508 raise DiffLimitExceeded('Diff Limit Exceeded')
509
510 return safe_unicode(string)
511
512 # FIXME: NEWDIFFS: dan: this replaces _parse_gitdiff
513 def _new_parse_gitdiff(self, inline_diff=True):
514 _files = []
515 diff_container = lambda arg: arg
516 for chunk in self._diff.chunks():
517 head = chunk.header
518 log.debug('parsing diff %r' % head)
519
520 diff = imap(self._process_line, chunk.diff.splitlines(1))
521 raw_diff = chunk.raw
522 limited_diff = False
523 exceeds_limit = False
524 # if 'empty_file_to_modify_and_rename' in head['a_path']:
525 # 1/0
526 op = None
527 stats = {
528 'added': 0,
529 'deleted': 0,
530 'binary': False,
531 'old_mode': None,
532 'new_mode': None,
533 'ops': {},
534 }
535 if head['old_mode']:
536 stats['old_mode'] = head['old_mode']
537 if head['new_mode']:
538 stats['new_mode'] = head['new_mode']
539 if head['b_mode']:
540 stats['new_mode'] = head['b_mode']
541
542 if head['deleted_file_mode']:
543 op = OPS.DEL
544 stats['binary'] = True
545 stats['ops'][DEL_FILENODE] = 'deleted file'
546
547 elif head['new_file_mode']:
548 op = OPS.ADD
549 stats['binary'] = True
550 stats['old_mode'] = None
551 stats['new_mode'] = head['new_file_mode']
552 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
553 else: # modify operation, can be copy, rename or chmod
554
555 # CHMOD
556 if head['new_mode'] and head['old_mode']:
557 op = OPS.MOD
558 stats['binary'] = True
559 stats['ops'][CHMOD_FILENODE] = (
560 'modified file chmod %s => %s' % (
561 head['old_mode'], head['new_mode']))
562
563 # RENAME
564 if head['rename_from'] != head['rename_to']:
565 op = OPS.MOD
566 stats['binary'] = True
567 stats['renamed'] = (head['rename_from'], head['rename_to'])
568 stats['ops'][RENAMED_FILENODE] = (
569 'file renamed from %s to %s' % (
570 head['rename_from'], head['rename_to']))
571 # COPY
572 if head.get('copy_from') and head.get('copy_to'):
573 op = OPS.MOD
574 stats['binary'] = True
575 stats['copied'] = (head['copy_from'], head['copy_to'])
576 stats['ops'][COPIED_FILENODE] = (
577 'file copied from %s to %s' % (
578 head['copy_from'], head['copy_to']))
496
579
580 # If our new parsed headers didn't match anything fallback to
581 # old style detection
582 if op is None:
583 if not head['a_file'] and head['b_file']:
584 op = OPS.ADD
585 stats['binary'] = True
586 stats['new_file'] = True
587 stats['ops'][NEW_FILENODE] = 'new file'
588
589 elif head['a_file'] and not head['b_file']:
590 op = OPS.DEL
591 stats['binary'] = True
592 stats['ops'][DEL_FILENODE] = 'deleted file'
593
594 # it's not ADD not DELETE
595 if op is None:
596 op = OPS.MOD
597 stats['binary'] = True
598 stats['ops'][MOD_FILENODE] = 'modified file'
599
600 # a real non-binary diff
601 if head['a_file'] or head['b_file']:
602 try:
603 raw_diff, chunks, _stats = self._new_parse_lines(diff)
604 stats['binary'] = False
605 stats['added'] = _stats[0]
606 stats['deleted'] = _stats[1]
607 # explicit mark that it's a modified file
608 if op == OPS.MOD:
609 stats['ops'][MOD_FILENODE] = 'modified file'
610 exceeds_limit = len(raw_diff) > self.file_limit
611
612 # changed from _escaper function so we validate size of
613 # each file instead of the whole diff
614 # diff will hide big files but still show small ones
615 # from my tests, big files are fairly safe to be parsed
616 # but the browser is the bottleneck
617 if not self.show_full_diff and exceeds_limit:
618 raise DiffLimitExceeded('File Limit Exceeded')
619
620 except DiffLimitExceeded:
621 diff_container = lambda _diff: \
622 LimitedDiffContainer(
623 self.diff_limit, self.cur_diff_size, _diff)
624
625 exceeds_limit = len(raw_diff) > self.file_limit
626 limited_diff = True
627 chunks = []
628
629 else: # GIT format binary patch, or possibly empty diff
630 if head['bin_patch']:
631 # we have operation already extracted, but we mark simply
632 # it's a diff we wont show for binary files
633 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
634 chunks = []
635
636 if chunks and not self.show_full_diff and op == OPS.DEL:
637 # if not full diff mode show deleted file contents
638 # TODO: anderson: if the view is not too big, there is no way
639 # to see the content of the file
640 chunks = []
641
642 chunks.insert(0, [{
643 'old_lineno': '',
644 'new_lineno': '',
645 'action': Action.CONTEXT,
646 'line': msg,
647 } for _op, msg in stats['ops'].iteritems()
648 if _op not in [MOD_FILENODE]])
649
650 original_filename = safe_unicode(head['a_path'])
651 _files.append({
652 'original_filename': original_filename,
653 'filename': safe_unicode(head['b_path']),
654 'old_revision': head['a_blob_id'],
655 'new_revision': head['b_blob_id'],
656 'chunks': chunks,
657 'raw_diff': safe_unicode(raw_diff),
658 'operation': op,
659 'stats': stats,
660 'exceeds_limit': exceeds_limit,
661 'is_limited_diff': limited_diff,
662 })
663
664
665 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
666 OPS.DEL: 2}.get(info['operation'])
667
668 return diff_container(sorted(_files, key=sorter))
669
670 # FIXME: NEWDIFFS: dan: this gets replaced by _new_parse_lines
497 def _parse_lines(self, diff):
671 def _parse_lines(self, diff):
498 """
672 """
499 Parse the diff an return data for the template.
673 Parse the diff an return data for the template.
500 """
674 """
501
675
502 lineiter = iter(diff)
676 lineiter = iter(diff)
503 stats = [0, 0]
677 stats = [0, 0]
504 chunks = []
678 chunks = []
505 raw_diff = []
679 raw_diff = []
506
680
507 try:
681 try:
508 line = lineiter.next()
682 line = lineiter.next()
509
683
510 while line:
684 while line:
511 raw_diff.append(line)
685 raw_diff.append(line)
512 lines = []
686 lines = []
513 chunks.append(lines)
687 chunks.append(lines)
514
688
515 match = self._chunk_re.match(line)
689 match = self._chunk_re.match(line)
516
690
517 if not match:
691 if not match:
518 break
692 break
519
693
520 gr = match.groups()
694 gr = match.groups()
521 (old_line, old_end,
695 (old_line, old_end,
522 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
696 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
523 old_line -= 1
697 old_line -= 1
524 new_line -= 1
698 new_line -= 1
525
699
526 context = len(gr) == 5
700 context = len(gr) == 5
527 old_end += old_line
701 old_end += old_line
528 new_end += new_line
702 new_end += new_line
529
703
530 if context:
704 if context:
531 # skip context only if it's first line
705 # skip context only if it's first line
532 if int(gr[0]) > 1:
706 if int(gr[0]) > 1:
533 lines.append({
707 lines.append({
534 'old_lineno': '...',
708 'old_lineno': '...',
535 'new_lineno': '...',
709 'new_lineno': '...',
536 'action': Action.CONTEXT,
710 'action': Action.CONTEXT,
537 'line': line,
711 'line': line,
538 })
712 })
539
713
540 line = lineiter.next()
714 line = lineiter.next()
541
715
542 while old_line < old_end or new_line < new_end:
716 while old_line < old_end or new_line < new_end:
543 command = ' '
717 command = ' '
544 if line:
718 if line:
545 command = line[0]
719 command = line[0]
546
720
547 affects_old = affects_new = False
721 affects_old = affects_new = False
548
722
549 # ignore those if we don't expect them
723 # ignore those if we don't expect them
550 if command in '#@':
724 if command in '#@':
551 continue
725 continue
552 elif command == '+':
726 elif command == '+':
553 affects_new = True
727 affects_new = True
554 action = Action.ADD
728 action = Action.ADD
555 stats[0] += 1
729 stats[0] += 1
556 elif command == '-':
730 elif command == '-':
557 affects_old = True
731 affects_old = True
558 action = Action.DELETE
732 action = Action.DELETE
559 stats[1] += 1
733 stats[1] += 1
560 else:
734 else:
561 affects_old = affects_new = True
735 affects_old = affects_new = True
562 action = Action.UNMODIFIED
736 action = Action.UNMODIFIED
563
737
564 if not self._newline_marker.match(line):
738 if not self._newline_marker.match(line):
565 old_line += affects_old
739 old_line += affects_old
566 new_line += affects_new
740 new_line += affects_new
567 lines.append({
741 lines.append({
568 'old_lineno': affects_old and old_line or '',
742 'old_lineno': affects_old and old_line or '',
569 'new_lineno': affects_new and new_line or '',
743 'new_lineno': affects_new and new_line or '',
570 'action': action,
744 'action': action,
571 'line': self._clean_line(line, command)
745 'line': self._clean_line(line, command)
572 })
746 })
573 raw_diff.append(line)
747 raw_diff.append(line)
574
748
575 line = lineiter.next()
749 line = lineiter.next()
576
750
577 if self._newline_marker.match(line):
751 if self._newline_marker.match(line):
578 # we need to append to lines, since this is not
752 # we need to append to lines, since this is not
579 # counted in the line specs of diff
753 # counted in the line specs of diff
580 lines.append({
754 lines.append({
581 'old_lineno': '...',
755 'old_lineno': '...',
582 'new_lineno': '...',
756 'new_lineno': '...',
583 'action': Action.CONTEXT,
757 'action': Action.CONTEXT,
584 'line': self._clean_line(line, command)
758 'line': self._clean_line(line, command)
585 })
759 })
586
760
587 except StopIteration:
761 except StopIteration:
588 pass
762 pass
589 return ''.join(raw_diff), chunks, stats
763 return ''.join(raw_diff), chunks, stats
590
764
765 # FIXME: NEWDIFFS: dan: this replaces _parse_lines
766 def _new_parse_lines(self, diff):
767 """
768 Parse the diff an return data for the template.
769 """
770
771 lineiter = iter(diff)
772 stats = [0, 0]
773 chunks = []
774 raw_diff = []
775
776 try:
777 line = lineiter.next()
778
779 while line:
780 raw_diff.append(line)
781 match = self._chunk_re.match(line)
782
783 if not match:
784 break
785
786 gr = match.groups()
787 (old_line, old_end,
788 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
789
790 lines = []
791 hunk = {
792 'section_header': gr[-1],
793 'source_start': old_line,
794 'source_length': old_end,
795 'target_start': new_line,
796 'target_length': new_end,
797 'lines': lines,
798 }
799 chunks.append(hunk)
800
801 old_line -= 1
802 new_line -= 1
803
804 context = len(gr) == 5
805 old_end += old_line
806 new_end += new_line
807
808 line = lineiter.next()
809
810 while old_line < old_end or new_line < new_end:
811 command = ' '
812 if line:
813 command = line[0]
814
815 affects_old = affects_new = False
816
817 # ignore those if we don't expect them
818 if command in '#@':
819 continue
820 elif command == '+':
821 affects_new = True
822 action = Action.ADD
823 stats[0] += 1
824 elif command == '-':
825 affects_old = True
826 action = Action.DELETE
827 stats[1] += 1
828 else:
829 affects_old = affects_new = True
830 action = Action.UNMODIFIED
831
832 if not self._newline_marker.match(line):
833 old_line += affects_old
834 new_line += affects_new
835 lines.append({
836 'old_lineno': affects_old and old_line or '',
837 'new_lineno': affects_new and new_line or '',
838 'action': action,
839 'line': self._clean_line(line, command)
840 })
841 raw_diff.append(line)
842
843 line = lineiter.next()
844
845 if self._newline_marker.match(line):
846 # we need to append to lines, since this is not
847 # counted in the line specs of diff
848 if affects_old:
849 action = Action.CONTEXT_OLD
850 elif affects_new:
851 action = Action.CONTEXT_NEW
852 else:
853 raise Exception('invalid context for no newline')
854
855 lines.append({
856 'old_lineno': None,
857 'new_lineno': None,
858 'action': action,
859 'line': self._clean_line(line, command)
860 })
861
862 except StopIteration:
863 pass
864 return ''.join(raw_diff), chunks, stats
865
591 def _safe_id(self, idstring):
866 def _safe_id(self, idstring):
592 """Make a string safe for including in an id attribute.
867 """Make a string safe for including in an id attribute.
593
868
594 The HTML spec says that id attributes 'must begin with
869 The HTML spec says that id attributes 'must begin with
595 a letter ([A-Za-z]) and may be followed by any number
870 a letter ([A-Za-z]) and may be followed by any number
596 of letters, digits ([0-9]), hyphens ("-"), underscores
871 of letters, digits ([0-9]), hyphens ("-"), underscores
597 ("_"), colons (":"), and periods (".")'. These regexps
872 ("_"), colons (":"), and periods (".")'. These regexps
598 are slightly over-zealous, in that they remove colons
873 are slightly over-zealous, in that they remove colons
599 and periods unnecessarily.
874 and periods unnecessarily.
600
875
601 Whitespace is transformed into underscores, and then
876 Whitespace is transformed into underscores, and then
602 anything which is not a hyphen or a character that
877 anything which is not a hyphen or a character that
603 matches \w (alphanumerics and underscore) is removed.
878 matches \w (alphanumerics and underscore) is removed.
604
879
605 """
880 """
606 # Transform all whitespace to underscore
881 # Transform all whitespace to underscore
607 idstring = re.sub(r'\s', "_", '%s' % idstring)
882 idstring = re.sub(r'\s', "_", '%s' % idstring)
608 # Remove everything that is not a hyphen or a member of \w
883 # Remove everything that is not a hyphen or a member of \w
609 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
884 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
610 return idstring
885 return idstring
611
886
612 def prepare(self, inline_diff=True):
887 def prepare(self, inline_diff=True):
613 """
888 """
614 Prepare the passed udiff for HTML rendering.
889 Prepare the passed udiff for HTML rendering.
615
890
616 :return: A list of dicts with diff information.
891 :return: A list of dicts with diff information.
617 """
892 """
618 parsed = self._parser(inline_diff=inline_diff)
893 parsed = self._parser(inline_diff=inline_diff)
619 self.parsed = True
894 self.parsed = True
620 self.parsed_diff = parsed
895 self.parsed_diff = parsed
621 return parsed
896 return parsed
622
897
623 def as_raw(self, diff_lines=None):
898 def as_raw(self, diff_lines=None):
624 """
899 """
625 Returns raw diff as a byte string
900 Returns raw diff as a byte string
626 """
901 """
627 return self._diff.raw
902 return self._diff.raw
628
903
629 def as_html(self, table_class='code-difftable', line_class='line',
904 def as_html(self, table_class='code-difftable', line_class='line',
630 old_lineno_class='lineno old', new_lineno_class='lineno new',
905 old_lineno_class='lineno old', new_lineno_class='lineno new',
631 code_class='code', enable_comments=False, parsed_lines=None):
906 code_class='code', enable_comments=False, parsed_lines=None):
632 """
907 """
633 Return given diff as html table with customized css classes
908 Return given diff as html table with customized css classes
634 """
909 """
635 def _link_to_if(condition, label, url):
910 def _link_to_if(condition, label, url):
636 """
911 """
637 Generates a link if condition is meet or just the label if not.
912 Generates a link if condition is meet or just the label if not.
638 """
913 """
639
914
640 if condition:
915 if condition:
641 return '''<a href="%(url)s" class="tooltip"
916 return '''<a href="%(url)s" class="tooltip"
642 title="%(title)s">%(label)s</a>''' % {
917 title="%(title)s">%(label)s</a>''' % {
643 'title': _('Click to select line'),
918 'title': _('Click to select line'),
644 'url': url,
919 'url': url,
645 'label': label
920 'label': label
646 }
921 }
647 else:
922 else:
648 return label
923 return label
649 if not self.parsed:
924 if not self.parsed:
650 self.prepare()
925 self.prepare()
651
926
652 diff_lines = self.parsed_diff
927 diff_lines = self.parsed_diff
653 if parsed_lines:
928 if parsed_lines:
654 diff_lines = parsed_lines
929 diff_lines = parsed_lines
655
930
656 _html_empty = True
931 _html_empty = True
657 _html = []
932 _html = []
658 _html.append('''<table class="%(table_class)s">\n''' % {
933 _html.append('''<table class="%(table_class)s">\n''' % {
659 'table_class': table_class
934 'table_class': table_class
660 })
935 })
661
936
662 for diff in diff_lines:
937 for diff in diff_lines:
663 for line in diff['chunks']:
938 for line in diff['chunks']:
664 _html_empty = False
939 _html_empty = False
665 for change in line:
940 for change in line:
666 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
941 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
667 'lc': line_class,
942 'lc': line_class,
668 'action': change['action']
943 'action': change['action']
669 })
944 })
670 anchor_old_id = ''
945 anchor_old_id = ''
671 anchor_new_id = ''
946 anchor_new_id = ''
672 anchor_old = "%(filename)s_o%(oldline_no)s" % {
947 anchor_old = "%(filename)s_o%(oldline_no)s" % {
673 'filename': self._safe_id(diff['filename']),
948 'filename': self._safe_id(diff['filename']),
674 'oldline_no': change['old_lineno']
949 'oldline_no': change['old_lineno']
675 }
950 }
676 anchor_new = "%(filename)s_n%(oldline_no)s" % {
951 anchor_new = "%(filename)s_n%(oldline_no)s" % {
677 'filename': self._safe_id(diff['filename']),
952 'filename': self._safe_id(diff['filename']),
678 'oldline_no': change['new_lineno']
953 'oldline_no': change['new_lineno']
679 }
954 }
680 cond_old = (change['old_lineno'] != '...' and
955 cond_old = (change['old_lineno'] != '...' and
681 change['old_lineno'])
956 change['old_lineno'])
682 cond_new = (change['new_lineno'] != '...' and
957 cond_new = (change['new_lineno'] != '...' and
683 change['new_lineno'])
958 change['new_lineno'])
684 if cond_old:
959 if cond_old:
685 anchor_old_id = 'id="%s"' % anchor_old
960 anchor_old_id = 'id="%s"' % anchor_old
686 if cond_new:
961 if cond_new:
687 anchor_new_id = 'id="%s"' % anchor_new
962 anchor_new_id = 'id="%s"' % anchor_new
688
963
689 if change['action'] != Action.CONTEXT:
964 if change['action'] != Action.CONTEXT:
690 anchor_link = True
965 anchor_link = True
691 else:
966 else:
692 anchor_link = False
967 anchor_link = False
693
968
694 ###########################################################
969 ###########################################################
695 # COMMENT ICONS
970 # COMMENT ICONS
696 ###########################################################
971 ###########################################################
697 _html.append('''\t<td class="add-comment-line"><span class="add-comment-content">''')
972 _html.append('''\t<td class="add-comment-line"><span class="add-comment-content">''')
698
973
699 if enable_comments and change['action'] != Action.CONTEXT:
974 if enable_comments and change['action'] != Action.CONTEXT:
700 _html.append('''<a href="#"><span class="icon-comment-add"></span></a>''')
975 _html.append('''<a href="#"><span class="icon-comment-add"></span></a>''')
701
976
702 _html.append('''</span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>\n''')
977 _html.append('''</span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>\n''')
703
978
704 ###########################################################
979 ###########################################################
705 # OLD LINE NUMBER
980 # OLD LINE NUMBER
706 ###########################################################
981 ###########################################################
707 _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
982 _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
708 'a_id': anchor_old_id,
983 'a_id': anchor_old_id,
709 'olc': old_lineno_class
984 'olc': old_lineno_class
710 })
985 })
711
986
712 _html.append('''%(link)s''' % {
987 _html.append('''%(link)s''' % {
713 'link': _link_to_if(anchor_link, change['old_lineno'],
988 'link': _link_to_if(anchor_link, change['old_lineno'],
714 '#%s' % anchor_old)
989 '#%s' % anchor_old)
715 })
990 })
716 _html.append('''</td>\n''')
991 _html.append('''</td>\n''')
717 ###########################################################
992 ###########################################################
718 # NEW LINE NUMBER
993 # NEW LINE NUMBER
719 ###########################################################
994 ###########################################################
720
995
721 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
996 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
722 'a_id': anchor_new_id,
997 'a_id': anchor_new_id,
723 'nlc': new_lineno_class
998 'nlc': new_lineno_class
724 })
999 })
725
1000
726 _html.append('''%(link)s''' % {
1001 _html.append('''%(link)s''' % {
727 'link': _link_to_if(anchor_link, change['new_lineno'],
1002 'link': _link_to_if(anchor_link, change['new_lineno'],
728 '#%s' % anchor_new)
1003 '#%s' % anchor_new)
729 })
1004 })
730 _html.append('''</td>\n''')
1005 _html.append('''</td>\n''')
731 ###########################################################
1006 ###########################################################
732 # CODE
1007 # CODE
733 ###########################################################
1008 ###########################################################
734 code_classes = [code_class]
1009 code_classes = [code_class]
735 if (not enable_comments or
1010 if (not enable_comments or
736 change['action'] == Action.CONTEXT):
1011 change['action'] == Action.CONTEXT):
737 code_classes.append('no-comment')
1012 code_classes.append('no-comment')
738 _html.append('\t<td class="%s">' % ' '.join(code_classes))
1013 _html.append('\t<td class="%s">' % ' '.join(code_classes))
739 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' % {
1014 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' % {
740 'code': change['line']
1015 'code': change['line']
741 })
1016 })
742
1017
743 _html.append('''\t</td>''')
1018 _html.append('''\t</td>''')
744 _html.append('''\n</tr>\n''')
1019 _html.append('''\n</tr>\n''')
745 _html.append('''</table>''')
1020 _html.append('''</table>''')
746 if _html_empty:
1021 if _html_empty:
747 return None
1022 return None
748 return ''.join(_html)
1023 return ''.join(_html)
749
1024
750 def stat(self):
1025 def stat(self):
751 """
1026 """
752 Returns tuple of added, and removed lines for this instance
1027 Returns tuple of added, and removed lines for this instance
753 """
1028 """
754 return self.adds, self.removes
1029 return self.adds, self.removes
755
1030
756 def get_context_of_line(
1031 def get_context_of_line(
757 self, path, diff_line=None, context_before=3, context_after=3):
1032 self, path, diff_line=None, context_before=3, context_after=3):
758 """
1033 """
759 Returns the context lines for the specified diff line.
1034 Returns the context lines for the specified diff line.
760
1035
761 :type diff_line: :class:`DiffLineNumber`
1036 :type diff_line: :class:`DiffLineNumber`
762 """
1037 """
763 assert self.parsed, "DiffProcessor is not initialized."
1038 assert self.parsed, "DiffProcessor is not initialized."
764
1039
765 if None not in diff_line:
1040 if None not in diff_line:
766 raise ValueError(
1041 raise ValueError(
767 "Cannot specify both line numbers: {}".format(diff_line))
1042 "Cannot specify both line numbers: {}".format(diff_line))
768
1043
769 file_diff = self._get_file_diff(path)
1044 file_diff = self._get_file_diff(path)
770 chunk, idx = self._find_chunk_line_index(file_diff, diff_line)
1045 chunk, idx = self._find_chunk_line_index(file_diff, diff_line)
771
1046
772 first_line_to_include = max(idx - context_before, 0)
1047 first_line_to_include = max(idx - context_before, 0)
773 first_line_after_context = idx + context_after + 1
1048 first_line_after_context = idx + context_after + 1
774 context_lines = chunk[first_line_to_include:first_line_after_context]
1049 context_lines = chunk[first_line_to_include:first_line_after_context]
775
1050
776 line_contents = [
1051 line_contents = [
777 _context_line(line) for line in context_lines
1052 _context_line(line) for line in context_lines
778 if _is_diff_content(line)]
1053 if _is_diff_content(line)]
779 # TODO: johbo: Interim fixup, the diff chunks drop the final newline.
1054 # TODO: johbo: Interim fixup, the diff chunks drop the final newline.
780 # Once they are fixed, we can drop this line here.
1055 # Once they are fixed, we can drop this line here.
781 if line_contents:
1056 if line_contents:
782 line_contents[-1] = (
1057 line_contents[-1] = (
783 line_contents[-1][0], line_contents[-1][1].rstrip('\n') + '\n')
1058 line_contents[-1][0], line_contents[-1][1].rstrip('\n') + '\n')
784 return line_contents
1059 return line_contents
785
1060
786 def find_context(self, path, context, offset=0):
1061 def find_context(self, path, context, offset=0):
787 """
1062 """
788 Finds the given `context` inside of the diff.
1063 Finds the given `context` inside of the diff.
789
1064
790 Use the parameter `offset` to specify which offset the target line has
1065 Use the parameter `offset` to specify which offset the target line has
791 inside of the given `context`. This way the correct diff line will be
1066 inside of the given `context`. This way the correct diff line will be
792 returned.
1067 returned.
793
1068
794 :param offset: Shall be used to specify the offset of the main line
1069 :param offset: Shall be used to specify the offset of the main line
795 within the given `context`.
1070 within the given `context`.
796 """
1071 """
797 if offset < 0 or offset >= len(context):
1072 if offset < 0 or offset >= len(context):
798 raise ValueError(
1073 raise ValueError(
799 "Only positive values up to the length of the context "
1074 "Only positive values up to the length of the context "
800 "minus one are allowed.")
1075 "minus one are allowed.")
801
1076
802 matches = []
1077 matches = []
803 file_diff = self._get_file_diff(path)
1078 file_diff = self._get_file_diff(path)
804
1079
805 for chunk in file_diff['chunks']:
1080 for chunk in file_diff['chunks']:
806 context_iter = iter(context)
1081 context_iter = iter(context)
807 for line_idx, line in enumerate(chunk):
1082 for line_idx, line in enumerate(chunk):
808 try:
1083 try:
809 if _context_line(line) == context_iter.next():
1084 if _context_line(line) == context_iter.next():
810 continue
1085 continue
811 except StopIteration:
1086 except StopIteration:
812 matches.append((line_idx, chunk))
1087 matches.append((line_idx, chunk))
813 context_iter = iter(context)
1088 context_iter = iter(context)
814
1089
815 # Increment position and triger StopIteration
1090 # Increment position and triger StopIteration
816 # if we had a match at the end
1091 # if we had a match at the end
817 line_idx += 1
1092 line_idx += 1
818 try:
1093 try:
819 context_iter.next()
1094 context_iter.next()
820 except StopIteration:
1095 except StopIteration:
821 matches.append((line_idx, chunk))
1096 matches.append((line_idx, chunk))
822
1097
823 effective_offset = len(context) - offset
1098 effective_offset = len(context) - offset
824 found_at_diff_lines = [
1099 found_at_diff_lines = [
825 _line_to_diff_line_number(chunk[idx - effective_offset])
1100 _line_to_diff_line_number(chunk[idx - effective_offset])
826 for idx, chunk in matches]
1101 for idx, chunk in matches]
827
1102
828 return found_at_diff_lines
1103 return found_at_diff_lines
829
1104
830 def _get_file_diff(self, path):
1105 def _get_file_diff(self, path):
831 for file_diff in self.parsed_diff:
1106 for file_diff in self.parsed_diff:
832 if file_diff['filename'] == path:
1107 if file_diff['filename'] == path:
833 break
1108 break
834 else:
1109 else:
835 raise FileNotInDiffException("File {} not in diff".format(path))
1110 raise FileNotInDiffException("File {} not in diff".format(path))
836 return file_diff
1111 return file_diff
837
1112
838 def _find_chunk_line_index(self, file_diff, diff_line):
1113 def _find_chunk_line_index(self, file_diff, diff_line):
839 for chunk in file_diff['chunks']:
1114 for chunk in file_diff['chunks']:
840 for idx, line in enumerate(chunk):
1115 for idx, line in enumerate(chunk):
841 if line['old_lineno'] == diff_line.old:
1116 if line['old_lineno'] == diff_line.old:
842 return chunk, idx
1117 return chunk, idx
843 if line['new_lineno'] == diff_line.new:
1118 if line['new_lineno'] == diff_line.new:
844 return chunk, idx
1119 return chunk, idx
845 raise LineNotInDiffException(
1120 raise LineNotInDiffException(
846 "The line {} is not part of the diff.".format(diff_line))
1121 "The line {} is not part of the diff.".format(diff_line))
847
1122
848
1123
849 def _is_diff_content(line):
1124 def _is_diff_content(line):
850 return line['action'] in (
1125 return line['action'] in (
851 Action.UNMODIFIED, Action.ADD, Action.DELETE)
1126 Action.UNMODIFIED, Action.ADD, Action.DELETE)
852
1127
853
1128
854 def _context_line(line):
1129 def _context_line(line):
855 return (line['action'], line['line'])
1130 return (line['action'], line['line'])
856
1131
857
1132
858 DiffLineNumber = collections.namedtuple('DiffLineNumber', ['old', 'new'])
1133 DiffLineNumber = collections.namedtuple('DiffLineNumber', ['old', 'new'])
859
1134
860
1135
861 def _line_to_diff_line_number(line):
1136 def _line_to_diff_line_number(line):
862 new_line_no = line['new_lineno'] or None
1137 new_line_no = line['new_lineno'] or None
863 old_line_no = line['old_lineno'] or None
1138 old_line_no = line['old_lineno'] or None
864 return DiffLineNumber(old=old_line_no, new=new_line_no)
1139 return DiffLineNumber(old=old_line_no, new=new_line_no)
865
1140
866
1141
867 class FileNotInDiffException(Exception):
1142 class FileNotInDiffException(Exception):
868 """
1143 """
869 Raised when the context for a missing file is requested.
1144 Raised when the context for a missing file is requested.
870
1145
871 If you request the context for a line in a file which is not part of the
1146 If you request the context for a line in a file which is not part of the
872 given diff, then this exception is raised.
1147 given diff, then this exception is raised.
873 """
1148 """
874
1149
875
1150
876 class LineNotInDiffException(Exception):
1151 class LineNotInDiffException(Exception):
877 """
1152 """
878 Raised when the context for a missing line is requested.
1153 Raised when the context for a missing line is requested.
879
1154
880 If you request the context for a line in a file and this line is not
1155 If you request the context for a line in a file and this line is not
881 part of the given diff, then this exception is raised.
1156 part of the given diff, then this exception is raised.
882 """
1157 """
883
1158
884
1159
885 class DiffLimitExceeded(Exception):
1160 class DiffLimitExceeded(Exception):
886 pass
1161 pass
@@ -1,47 +1,49 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT diff module
22 GIT diff module
23 """
23 """
24
24
25 import re
25 import re
26
26
27 from rhodecode.lib.vcs.backends import base
27 from rhodecode.lib.vcs.backends import base
28
28
29
29
30 class GitDiff(base.Diff):
30 class GitDiff(base.Diff):
31
31
32 _header_re = re.compile(r"""
32 _header_re = re.compile(r"""
33 #^diff[ ]--git
33 #^diff[ ]--git
34 [ ]"?a/(?P<a_path>.+?)"?[ ]"?b/(?P<b_path>.+?)"?\n
34 [ ]"?a/(?P<a_path>.+?)"?[ ]"?b/(?P<b_path>.+?)"?\n
35 (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%\n
36 ^rename[ ]from[ ](?P<rename_from>[^\r\n]+)\n
37 ^rename[ ]to[ ](?P<rename_to>[^\r\n]+)(?:\n|$))?
38 (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
35 (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
39 ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
36 ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
37 (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%(?:\n|$))?
38 (?:^rename[ ]from[ ](?P<rename_from>[^\r\n]+)\n
39 ^rename[ ]to[ ](?P<rename_to>[^\r\n]+)(?:\n|$))?
40 (?:^copy[ ]from[ ](?P<copy_from>[^\r\n]+)\n
41 ^copy[ ]to[ ](?P<copy_to>[^\r\n]+)(?:\n|$))?
40 (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
42 (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
41 (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
43 (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
42 (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
44 (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
43 \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
45 \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
44 (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
46 (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
45 (?:^---[ ](a/(?P<a_file>.+)|/dev/null)(?:\n|$))?
47 (?:^---[ ](a/(?P<a_file>.+)|/dev/null)(?:\n|$))?
46 (?:^\+\+\+[ ](b/(?P<b_file>.+)|/dev/null)(?:\n|$))?
48 (?:^\+\+\+[ ](b/(?P<b_file>.+)|/dev/null)(?:\n|$))?
47 """, re.VERBOSE | re.MULTILINE)
49 """, re.VERBOSE | re.MULTILINE)
@@ -1,381 +1,389 b''
1
1
2
2
3 //BUTTONS
3 //BUTTONS
4 button,
4 button,
5 .btn,
5 .btn,
6 input[type="button"] {
6 input[type="button"] {
7 -webkit-appearance: none;
7 -webkit-appearance: none;
8 display: inline-block;
8 display: inline-block;
9 margin: 0 @padding/3 0 0;
9 margin: 0 @padding/3 0 0;
10 padding: @button-padding;
10 padding: @button-padding;
11 text-align: center;
11 text-align: center;
12 font-size: @basefontsize;
12 font-size: @basefontsize;
13 line-height: 1em;
13 line-height: 1em;
14 font-family: @text-light;
14 font-family: @text-light;
15 text-decoration: none;
15 text-decoration: none;
16 text-shadow: none;
16 text-shadow: none;
17 color: @grey4;
17 color: @grey4;
18 background-color: white;
18 background-color: white;
19 background-image: none;
19 background-image: none;
20 border: none;
20 border: none;
21 .border ( @border-thickness-buttons, @grey4 );
21 .border ( @border-thickness-buttons, @grey4 );
22 .border-radius (@border-radius);
22 .border-radius (@border-radius);
23 cursor: pointer;
23 cursor: pointer;
24 white-space: nowrap;
24 white-space: nowrap;
25 -webkit-transition: background .3s,color .3s;
25 -webkit-transition: background .3s,color .3s;
26 -moz-transition: background .3s,color .3s;
26 -moz-transition: background .3s,color .3s;
27 -o-transition: background .3s,color .3s;
27 -o-transition: background .3s,color .3s;
28 transition: background .3s,color .3s;
28 transition: background .3s,color .3s;
29
29
30 a {
30 a {
31 display: block;
31 display: block;
32 margin: 0;
32 margin: 0;
33 padding: 0;
33 padding: 0;
34 color: inherit;
34 color: inherit;
35 text-decoration: none;
35 text-decoration: none;
36
36
37 &:hover {
37 &:hover {
38 text-decoration: none;
38 text-decoration: none;
39 }
39 }
40 }
40 }
41
41
42 &:focus,
42 &:focus,
43 &:active {
43 &:active {
44 outline:none;
44 outline:none;
45 }
45 }
46 &:hover {
46 &:hover {
47 color: white;
47 color: white;
48 background-color: @grey4;
48 background-color: @grey4;
49 }
49 }
50
50
51 .icon-remove-sign {
51 .icon-remove-sign {
52 display: none;
52 display: none;
53 }
53 }
54
54
55 //disabled buttons
55 //disabled buttons
56 //last; overrides any other styles
56 //last; overrides any other styles
57 &:disabled {
57 &:disabled {
58 opacity: .7;
58 opacity: .7;
59 cursor: auto;
59 cursor: auto;
60 background-color: white;
60 background-color: white;
61 color: @grey4;
61 color: @grey4;
62 text-shadow: none;
62 text-shadow: none;
63 }
63 }
64
64
65 }
65 }
66
66
67
67
68 .btn-default {
68 .btn-default {
69 .border ( @border-thickness-buttons, @rcblue );
69 .border ( @border-thickness-buttons, @rcblue );
70 background-image: none;
70 background-image: none;
71 color: @rcblue;
71 color: @rcblue;
72
72
73 a {
73 a {
74 color: @rcblue;
74 color: @rcblue;
75 }
75 }
76
76
77 &:hover,
77 &:hover,
78 &.active {
78 &.active {
79 color: white;
79 color: white;
80 background-color: @rcdarkblue;
80 background-color: @rcdarkblue;
81 .border ( @border-thickness, @rcdarkblue );
81 .border ( @border-thickness, @rcdarkblue );
82
82
83 a {
83 a {
84 color: white;
84 color: white;
85 }
85 }
86 }
86 }
87 &:disabled {
87 &:disabled {
88 .border ( @border-thickness-buttons, @grey4 );
88 .border ( @border-thickness-buttons, @grey4 );
89 background-color: none;
89 background-color: none;
90 }
90 }
91 }
91 }
92
92
93 .btn-primary,
93 .btn-primary,
94 .btn-small, /* TODO: anderson: remove .btn-small to not mix with the new btn-sm */
94 .btn-small, /* TODO: anderson: remove .btn-small to not mix with the new btn-sm */
95 .btn-success {
95 .btn-success {
96 .border ( @border-thickness, @rcblue );
96 .border ( @border-thickness, @rcblue );
97 background-color: @rcblue;
97 background-color: @rcblue;
98 color: white;
98 color: white;
99
99
100 a {
100 a {
101 color: white;
101 color: white;
102 }
102 }
103
103
104 &:hover,
104 &:hover,
105 &.active {
105 &.active {
106 .border ( @border-thickness, @rcdarkblue );
106 .border ( @border-thickness, @rcdarkblue );
107 color: white;
107 color: white;
108 background-color: @rcdarkblue;
108 background-color: @rcdarkblue;
109
109
110 a {
110 a {
111 color: white;
111 color: white;
112 }
112 }
113 }
113 }
114 &:disabled {
114 &:disabled {
115 background-color: @rcblue;
115 background-color: @rcblue;
116 }
116 }
117 }
117 }
118
118
119 .btn-secondary {
119 .btn-secondary {
120 &:extend(.btn-default);
120 &:extend(.btn-default);
121
121
122 background-color: white;
122 background-color: white;
123
123
124 &:focus {
124 &:focus {
125 outline: 0;
125 outline: 0;
126 }
126 }
127
127
128 &:hover {
128 &:hover {
129 &:extend(.btn-default:hover);
129 &:extend(.btn-default:hover);
130 }
130 }
131
131
132 &.btn-link {
132 &.btn-link {
133 &:extend(.btn-link);
133 &:extend(.btn-link);
134 color: @rcblue;
134 color: @rcblue;
135 }
135 }
136
136
137 &:disabled {
137 &:disabled {
138 color: @rcblue;
138 color: @rcblue;
139 background-color: white;
139 background-color: white;
140 }
140 }
141 }
141 }
142
142
143 .btn-warning,
143 .btn-warning,
144 .btn-danger,
144 .btn-danger,
145 .revoke_perm,
145 .revoke_perm,
146 .btn-x,
146 .btn-x,
147 .form .action_button.btn-x {
147 .form .action_button.btn-x {
148 .border ( @border-thickness, @alert2 );
148 .border ( @border-thickness, @alert2 );
149 background-color: white;
149 background-color: white;
150 color: @alert2;
150 color: @alert2;
151
151
152 a {
152 a {
153 color: @alert2;
153 color: @alert2;
154 }
154 }
155
155
156 &:hover,
156 &:hover,
157 &.active {
157 &.active {
158 .border ( @border-thickness, @alert2 );
158 .border ( @border-thickness, @alert2 );
159 color: white;
159 color: white;
160 background-color: @alert2;
160 background-color: @alert2;
161
161
162 a {
162 a {
163 color: white;
163 color: white;
164 }
164 }
165 }
165 }
166
166
167 i {
167 i {
168 display:none;
168 display:none;
169 }
169 }
170
170
171 &:disabled {
171 &:disabled {
172 background-color: white;
172 background-color: white;
173 color: @alert2;
173 color: @alert2;
174 }
174 }
175 }
175 }
176
176
177 .btn-sm,
177 .btn-sm,
178 .btn-mini,
178 .btn-mini,
179 .field-sm .btn {
179 .field-sm .btn {
180 padding: @padding/3;
180 padding: @padding/3;
181 }
181 }
182
182
183 .btn-xs {
183 .btn-xs {
184 padding: @padding/4;
184 padding: @padding/4;
185 }
185 }
186
186
187 .btn-lg {
187 .btn-lg {
188 padding: @padding * 1.2;
188 padding: @padding * 1.2;
189 }
189 }
190
190
191 .btn-group {
192 display: inline-block;
193 .btn {
194 float: left;
195 margin: 0 0 0 -1px;
196 }
197 }
198
191 .btn-link {
199 .btn-link {
192 background: transparent;
200 background: transparent;
193 border: none;
201 border: none;
194 padding: 0;
202 padding: 0;
195 color: @rcblue;
203 color: @rcblue;
196
204
197 &:hover {
205 &:hover {
198 background: transparent;
206 background: transparent;
199 border: none;
207 border: none;
200 color: @rcdarkblue;
208 color: @rcdarkblue;
201 }
209 }
202
210
203 &:disabled {
211 &:disabled {
204 color: @grey4;
212 color: @grey4;
205 }
213 }
206
214
207 // TODO: johbo: Check if we can avoid this, indicates that the structure
215 // TODO: johbo: Check if we can avoid this, indicates that the structure
208 // is not yet good.
216 // is not yet good.
209 // lisa: The button CSS reflects the button HTML; both need a cleanup.
217 // lisa: The button CSS reflects the button HTML; both need a cleanup.
210 &.btn-danger {
218 &.btn-danger {
211 color: @alert2;
219 color: @alert2;
212
220
213 &:hover {
221 &:hover {
214 color: darken(@alert2,30%);
222 color: darken(@alert2,30%);
215 }
223 }
216
224
217 &:disabled {
225 &:disabled {
218 color: @alert2;
226 color: @alert2;
219 }
227 }
220 }
228 }
221 }
229 }
222
230
223 .btn-social {
231 .btn-social {
224 &:extend(.btn-default);
232 &:extend(.btn-default);
225 margin: 5px 5px 5px 0px;
233 margin: 5px 5px 5px 0px;
226 min-width: 150px;
234 min-width: 150px;
227 }
235 }
228
236
229 // TODO: johbo: check these exceptions
237 // TODO: johbo: check these exceptions
230
238
231 .links {
239 .links {
232
240
233 .btn + .btn {
241 .btn + .btn {
234 margin-top: @padding;
242 margin-top: @padding;
235 }
243 }
236 }
244 }
237
245
238
246
239 .action_button {
247 .action_button {
240 display:inline;
248 display:inline;
241 margin: 0;
249 margin: 0;
242 padding: 0 1em 0 0;
250 padding: 0 1em 0 0;
243 font-size: inherit;
251 font-size: inherit;
244 color: @rcblue;
252 color: @rcblue;
245 border: none;
253 border: none;
246 .border-radius (0);
254 .border-radius (0);
247 background-color: transparent;
255 background-color: transparent;
248
256
249 &:last-child {
257 &:last-child {
250 border: none;
258 border: none;
251 }
259 }
252
260
253 &:hover {
261 &:hover {
254 color: @rcdarkblue;
262 color: @rcdarkblue;
255 background-color: transparent;
263 background-color: transparent;
256 border: none;
264 border: none;
257 }
265 }
258 }
266 }
259 .grid_delete {
267 .grid_delete {
260 .action_button {
268 .action_button {
261 border: none;
269 border: none;
262 }
270 }
263 }
271 }
264
272
265
273
266 // TODO: johbo: Form button tweaks, check if we can use the classes instead
274 // TODO: johbo: Form button tweaks, check if we can use the classes instead
267 input[type="submit"] {
275 input[type="submit"] {
268 &:extend(.btn-primary);
276 &:extend(.btn-primary);
269
277
270 &:focus {
278 &:focus {
271 outline: 0;
279 outline: 0;
272 }
280 }
273
281
274 &:hover {
282 &:hover {
275 &:extend(.btn-primary:hover);
283 &:extend(.btn-primary:hover);
276 }
284 }
277
285
278 &.btn-link {
286 &.btn-link {
279 &:extend(.btn-link);
287 &:extend(.btn-link);
280 color: @rcblue;
288 color: @rcblue;
281
289
282 &:disabled {
290 &:disabled {
283 color: @rcblue;
291 color: @rcblue;
284 background-color: transparent;
292 background-color: transparent;
285 }
293 }
286 }
294 }
287
295
288 &:disabled {
296 &:disabled {
289 .border ( @border-thickness-buttons, @rcblue );
297 .border ( @border-thickness-buttons, @rcblue );
290 background-color: @rcblue;
298 background-color: @rcblue;
291 color: white;
299 color: white;
292 }
300 }
293 }
301 }
294
302
295 input[type="reset"] {
303 input[type="reset"] {
296 &:extend(.btn-default);
304 &:extend(.btn-default);
297
305
298 // TODO: johbo: Check if this tweak can be avoided.
306 // TODO: johbo: Check if this tweak can be avoided.
299 background: transparent;
307 background: transparent;
300
308
301 &:focus {
309 &:focus {
302 outline: 0;
310 outline: 0;
303 }
311 }
304
312
305 &:hover {
313 &:hover {
306 &:extend(.btn-default:hover);
314 &:extend(.btn-default:hover);
307 }
315 }
308
316
309 &.btn-link {
317 &.btn-link {
310 &:extend(.btn-link);
318 &:extend(.btn-link);
311 color: @rcblue;
319 color: @rcblue;
312
320
313 &:disabled {
321 &:disabled {
314 border: none;
322 border: none;
315 }
323 }
316 }
324 }
317
325
318 &:disabled {
326 &:disabled {
319 .border ( @border-thickness-buttons, @rcblue );
327 .border ( @border-thickness-buttons, @rcblue );
320 background-color: white;
328 background-color: white;
321 color: @rcblue;
329 color: @rcblue;
322 }
330 }
323 }
331 }
324
332
325 input[type="submit"],
333 input[type="submit"],
326 input[type="reset"] {
334 input[type="reset"] {
327 &.btn-danger {
335 &.btn-danger {
328 &:extend(.btn-danger);
336 &:extend(.btn-danger);
329
337
330 &:focus {
338 &:focus {
331 outline: 0;
339 outline: 0;
332 }
340 }
333
341
334 &:hover {
342 &:hover {
335 &:extend(.btn-danger:hover);
343 &:extend(.btn-danger:hover);
336 }
344 }
337
345
338 &.btn-link {
346 &.btn-link {
339 &:extend(.btn-link);
347 &:extend(.btn-link);
340 color: @alert2;
348 color: @alert2;
341
349
342 &:hover {
350 &:hover {
343 color: darken(@alert2,30%);
351 color: darken(@alert2,30%);
344 }
352 }
345 }
353 }
346
354
347 &:disabled {
355 &:disabled {
348 color: @alert2;
356 color: @alert2;
349 background-color: white;
357 background-color: white;
350 }
358 }
351 }
359 }
352 &.btn-danger-action {
360 &.btn-danger-action {
353 .border ( @border-thickness, @alert2 );
361 .border ( @border-thickness, @alert2 );
354 background-color: @alert2;
362 background-color: @alert2;
355 color: white;
363 color: white;
356
364
357 a {
365 a {
358 color: white;
366 color: white;
359 }
367 }
360
368
361 &:hover {
369 &:hover {
362 background-color: darken(@alert2,20%);
370 background-color: darken(@alert2,20%);
363 }
371 }
364
372
365 &.active {
373 &.active {
366 .border ( @border-thickness, @alert2 );
374 .border ( @border-thickness, @alert2 );
367 color: white;
375 color: white;
368 background-color: @alert2;
376 background-color: @alert2;
369
377
370 a {
378 a {
371 color: white;
379 color: white;
372 }
380 }
373 }
381 }
374
382
375 &:disabled {
383 &:disabled {
376 background-color: white;
384 background-color: white;
377 color: @alert2;
385 color: @alert2;
378 }
386 }
379 }
387 }
380 }
388 }
381
389
@@ -1,753 +1,1005 b''
1 // Default styles
1 // Default styles
2
2
3 .diff-collapse {
3 .diff-collapse {
4 margin: @padding 0;
4 margin: @padding 0;
5 text-align: right;
5 text-align: right;
6 }
6 }
7
7
8 .diff-container {
8 .diff-container {
9 margin-bottom: @space;
9 margin-bottom: @space;
10
10
11 .diffblock {
11 .diffblock {
12 margin-bottom: @space;
12 margin-bottom: @space;
13 }
13 }
14
14
15 &.hidden {
15 &.hidden {
16 display: none;
16 display: none;
17 overflow: hidden;
17 overflow: hidden;
18 }
18 }
19 }
19 }
20
20
21 .compare_view_files {
21 .compare_view_files {
22
22
23 .diff-container {
23 .diff-container {
24
24
25 .diffblock {
25 .diffblock {
26 margin-bottom: 0;
26 margin-bottom: 0;
27 }
27 }
28 }
28 }
29 }
29 }
30
30
31 div.diffblock .sidebyside {
31 div.diffblock .sidebyside {
32 background: #ffffff;
32 background: #ffffff;
33 }
33 }
34
34
35 div.diffblock {
35 div.diffblock {
36 overflow-x: auto;
36 overflow-x: auto;
37 overflow-y: hidden;
37 overflow-y: hidden;
38 clear: both;
38 clear: both;
39 padding: 0px;
39 padding: 0px;
40 background: @grey6;
40 background: @grey6;
41 border: @border-thickness solid @grey5;
41 border: @border-thickness solid @grey5;
42 -webkit-border-radius: @border-radius @border-radius 0px 0px;
42 -webkit-border-radius: @border-radius @border-radius 0px 0px;
43 border-radius: @border-radius @border-radius 0px 0px;
43 border-radius: @border-radius @border-radius 0px 0px;
44
44
45
45
46 .comments-number {
46 .comments-number {
47 float: right;
47 float: right;
48 }
48 }
49
49
50 // BEGIN CODE-HEADER STYLES
50 // BEGIN CODE-HEADER STYLES
51
51
52 .code-header {
52 .code-header {
53 background: @grey6;
53 background: @grey6;
54 padding: 10px 0 10px 0;
54 padding: 10px 0 10px 0;
55 height: auto;
55 height: auto;
56 width: 100%;
56 width: 100%;
57
57
58 .hash {
58 .hash {
59 float: left;
59 float: left;
60 padding: 2px 0 0 2px;
60 padding: 2px 0 0 2px;
61 }
61 }
62
62
63 .date {
63 .date {
64 float: left;
64 float: left;
65 text-transform: uppercase;
65 text-transform: uppercase;
66 padding: 4px 0px 0px 2px;
66 padding: 4px 0px 0px 2px;
67 }
67 }
68
68
69 div {
69 div {
70 margin-left: 4px;
70 margin-left: 4px;
71 }
71 }
72
72
73 div.compare_header {
73 div.compare_header {
74 min-height: 40px;
74 min-height: 40px;
75 margin: 0;
75 margin: 0;
76 padding: 0 @padding;
76 padding: 0 @padding;
77
77
78 .drop-menu {
78 .drop-menu {
79 float:left;
79 float:left;
80 display: block;
80 display: block;
81 margin:0 0 @padding 0;
81 margin:0 0 @padding 0;
82 }
82 }
83
83
84 .compare-label {
84 .compare-label {
85 float: left;
85 float: left;
86 clear: both;
86 clear: both;
87 display: inline-block;
87 display: inline-block;
88 min-width: 5em;
88 min-width: 5em;
89 margin: 0;
89 margin: 0;
90 padding: @button-padding @button-padding @button-padding 0;
90 padding: @button-padding @button-padding @button-padding 0;
91 font-family: @text-semibold;
91 font-family: @text-semibold;
92 }
92 }
93
93
94 .compare-buttons {
94 .compare-buttons {
95 float: left;
95 float: left;
96 margin: 0;
96 margin: 0;
97 padding: 0 0 @padding;
97 padding: 0 0 @padding;
98
98
99 .btn {
99 .btn {
100 margin: 0 @padding 0 0;
100 margin: 0 @padding 0 0;
101 }
101 }
102 }
102 }
103 }
103 }
104
104
105 }
105 }
106
106
107 .parents {
107 .parents {
108 float: left;
108 float: left;
109 width: 100px;
109 width: 100px;
110 font-weight: 400;
110 font-weight: 400;
111 vertical-align: middle;
111 vertical-align: middle;
112 padding: 0px 2px 0px 2px;
112 padding: 0px 2px 0px 2px;
113 background-color: @grey6;
113 background-color: @grey6;
114
114
115 #parent_link {
115 #parent_link {
116 margin: 00px 2px;
116 margin: 00px 2px;
117
117
118 &.double {
118 &.double {
119 margin: 0px 2px;
119 margin: 0px 2px;
120 }
120 }
121
121
122 &.disabled{
122 &.disabled{
123 margin-right: @padding;
123 margin-right: @padding;
124 }
124 }
125 }
125 }
126 }
126 }
127
127
128 .children {
128 .children {
129 float: right;
129 float: right;
130 width: 100px;
130 width: 100px;
131 font-weight: 400;
131 font-weight: 400;
132 vertical-align: middle;
132 vertical-align: middle;
133 text-align: right;
133 text-align: right;
134 padding: 0px 2px 0px 2px;
134 padding: 0px 2px 0px 2px;
135 background-color: @grey6;
135 background-color: @grey6;
136
136
137 #child_link {
137 #child_link {
138 margin: 0px 2px;
138 margin: 0px 2px;
139
139
140 &.double {
140 &.double {
141 margin: 0px 2px;
141 margin: 0px 2px;
142 }
142 }
143
143
144 &.disabled{
144 &.disabled{
145 margin-right: @padding;
145 margin-right: @padding;
146 }
146 }
147 }
147 }
148 }
148 }
149
149
150 .changeset_header {
150 .changeset_header {
151 height: 16px;
151 height: 16px;
152
152
153 & > div{
153 & > div{
154 margin-right: @padding;
154 margin-right: @padding;
155 }
155 }
156 }
156 }
157
157
158 .changeset_file {
158 .changeset_file {
159 text-align: left;
159 text-align: left;
160 float: left;
160 float: left;
161 padding: 0;
161 padding: 0;
162
162
163 a{
163 a{
164 display: inline-block;
164 display: inline-block;
165 margin-right: 0.5em;
165 margin-right: 0.5em;
166 }
166 }
167
167
168 #selected_mode{
168 #selected_mode{
169 margin-left: 0;
169 margin-left: 0;
170 }
170 }
171 }
171 }
172
172
173 .diff-menu-wrapper {
173 .diff-menu-wrapper {
174 float: left;
174 float: left;
175 }
175 }
176
176
177 .diff-menu {
177 .diff-menu {
178 position: absolute;
178 position: absolute;
179 background: none repeat scroll 0 0 #FFFFFF;
179 background: none repeat scroll 0 0 #FFFFFF;
180 border-color: #003367 @grey3 @grey3;
180 border-color: #003367 @grey3 @grey3;
181 border-right: 1px solid @grey3;
181 border-right: 1px solid @grey3;
182 border-style: solid solid solid;
182 border-style: solid solid solid;
183 border-width: @border-thickness;
183 border-width: @border-thickness;
184 box-shadow: 2px 8px 4px rgba(0, 0, 0, 0.2);
184 box-shadow: 2px 8px 4px rgba(0, 0, 0, 0.2);
185 margin-top: 5px;
185 margin-top: 5px;
186 margin-left: 1px;
186 margin-left: 1px;
187 }
187 }
188
188
189 .diff-actions, .editor-actions {
189 .diff-actions, .editor-actions {
190 float: left;
190 float: left;
191
191
192 input{
192 input{
193 margin: 0 0.5em 0 0;
193 margin: 0 0.5em 0 0;
194 }
194 }
195 }
195 }
196
196
197 // END CODE-HEADER STYLES
197 // END CODE-HEADER STYLES
198
198
199 // BEGIN CODE-BODY STYLES
199 // BEGIN CODE-BODY STYLES
200
200
201 .code-body {
201 .code-body {
202 background: white;
202 background: white;
203 padding: 0;
203 padding: 0;
204 background-color: #ffffff;
204 background-color: #ffffff;
205 position: relative;
205 position: relative;
206 max-width: none;
206 max-width: none;
207 box-sizing: border-box;
207 box-sizing: border-box;
208 // TODO: johbo: Parent has overflow: auto, this forces the child here
208 // TODO: johbo: Parent has overflow: auto, this forces the child here
209 // to have the intended size and to scroll. Should be simplified.
209 // to have the intended size and to scroll. Should be simplified.
210 width: 100%;
210 width: 100%;
211 overflow-x: auto;
211 overflow-x: auto;
212 }
212 }
213
213
214 pre.raw {
214 pre.raw {
215 background: white;
215 background: white;
216 color: @grey1;
216 color: @grey1;
217 }
217 }
218 // END CODE-BODY STYLES
218 // END CODE-BODY STYLES
219
219
220 }
220 }
221
221
222
222
223 table.code-difftable {
223 table.code-difftable {
224 border-collapse: collapse;
224 border-collapse: collapse;
225 width: 99%;
225 width: 99%;
226 border-radius: 0px !important;
226 border-radius: 0px !important;
227
227
228 td {
228 td {
229 padding: 0 !important;
229 padding: 0 !important;
230 background: none !important;
230 background: none !important;
231 border: 0 !important;
231 border: 0 !important;
232 }
232 }
233
233
234 .context {
234 .context {
235 background: none repeat scroll 0 0 #DDE7EF;
235 background: none repeat scroll 0 0 #DDE7EF;
236 }
236 }
237
237
238 .add {
238 .add {
239 background: none repeat scroll 0 0 #DDFFDD;
239 background: none repeat scroll 0 0 #DDFFDD;
240
240
241 ins {
241 ins {
242 background: none repeat scroll 0 0 #AAFFAA;
242 background: none repeat scroll 0 0 #AAFFAA;
243 text-decoration: none;
243 text-decoration: none;
244 }
244 }
245 }
245 }
246
246
247 .del {
247 .del {
248 background: none repeat scroll 0 0 #FFDDDD;
248 background: none repeat scroll 0 0 #FFDDDD;
249
249
250 del {
250 del {
251 background: none repeat scroll 0 0 #FFAAAA;
251 background: none repeat scroll 0 0 #FFAAAA;
252 text-decoration: none;
252 text-decoration: none;
253 }
253 }
254 }
254 }
255
255
256 /** LINE NUMBERS **/
256 /** LINE NUMBERS **/
257 .lineno {
257 .lineno {
258 padding-left: 2px !important;
258 padding-left: 2px !important;
259 padding-right: 2px;
259 padding-right: 2px;
260 text-align: right;
260 text-align: right;
261 width: 32px;
261 width: 32px;
262 -moz-user-select: none;
262 -moz-user-select: none;
263 -webkit-user-select: none;
263 -webkit-user-select: none;
264 border-right: @border-thickness solid @grey5 !important;
264 border-right: @border-thickness solid @grey5 !important;
265 border-left: 0px solid #CCC !important;
265 border-left: 0px solid #CCC !important;
266 border-top: 0px solid #CCC !important;
266 border-top: 0px solid #CCC !important;
267 border-bottom: none !important;
267 border-bottom: none !important;
268
268
269 a {
269 a {
270 &:extend(pre);
270 &:extend(pre);
271 text-align: right;
271 text-align: right;
272 padding-right: 2px;
272 padding-right: 2px;
273 cursor: pointer;
273 cursor: pointer;
274 display: block;
274 display: block;
275 width: 32px;
275 width: 32px;
276 }
276 }
277 }
277 }
278
278
279 .context {
279 .context {
280 cursor: auto;
280 cursor: auto;
281 &:extend(pre);
281 &:extend(pre);
282 }
282 }
283
283
284 .lineno-inline {
284 .lineno-inline {
285 background: none repeat scroll 0 0 #FFF !important;
285 background: none repeat scroll 0 0 #FFF !important;
286 padding-left: 2px;
286 padding-left: 2px;
287 padding-right: 2px;
287 padding-right: 2px;
288 text-align: right;
288 text-align: right;
289 width: 30px;
289 width: 30px;
290 -moz-user-select: none;
290 -moz-user-select: none;
291 -webkit-user-select: none;
291 -webkit-user-select: none;
292 }
292 }
293
293
294 /** CODE **/
294 /** CODE **/
295 .code {
295 .code {
296 display: block;
296 display: block;
297 width: 100%;
297 width: 100%;
298
298
299 td {
299 td {
300 margin: 0;
300 margin: 0;
301 padding: 0;
301 padding: 0;
302 }
302 }
303
303
304 pre {
304 pre {
305 margin: 0;
305 margin: 0;
306 padding: 0;
306 padding: 0;
307 margin-left: .5em;
307 margin-left: .5em;
308 }
308 }
309 }
309 }
310 }
310 }
311
311
312
312
313 // Comments
313 // Comments
314
314
315 div.comment:target {
315 div.comment:target {
316 border-left: 6px solid @comment-highlight-color;
316 border-left: 6px solid @comment-highlight-color;
317 padding-left: 3px;
317 padding-left: 3px;
318 margin-left: -9px;
318 margin-left: -9px;
319 }
319 }
320
320
321 //TODO: anderson: can't get an absolute number out of anything, so had to put the
321 //TODO: anderson: can't get an absolute number out of anything, so had to put the
322 //current values that might change. But to make it clear I put as a calculation
322 //current values that might change. But to make it clear I put as a calculation
323 @comment-max-width: 1065px;
323 @comment-max-width: 1065px;
324 @pr-extra-margin: 34px;
324 @pr-extra-margin: 34px;
325 @pr-border-spacing: 4px;
325 @pr-border-spacing: 4px;
326 @pr-comment-width: @comment-max-width - @pr-extra-margin - @pr-border-spacing;
326 @pr-comment-width: @comment-max-width - @pr-extra-margin - @pr-border-spacing;
327
327
328 // Pull Request
328 // Pull Request
329 .cs_files .code-difftable {
329 .cs_files .code-difftable {
330 border: @border-thickness solid @grey5; //borders only on PRs
330 border: @border-thickness solid @grey5; //borders only on PRs
331
331
332 .comment-inline-form,
332 .comment-inline-form,
333 div.comment {
333 div.comment {
334 width: @pr-comment-width;
334 width: @pr-comment-width;
335 }
335 }
336 }
336 }
337
337
338 // Changeset
338 // Changeset
339 .code-difftable {
339 .code-difftable {
340 .comment-inline-form,
340 .comment-inline-form,
341 div.comment {
341 div.comment {
342 width: @comment-max-width;
342 width: @comment-max-width;
343 }
343 }
344 }
344 }
345
345
346 //Style page
346 //Style page
347 @style-extra-margin: @sidebar-width + (@sidebarpadding * 3) + @padding;
347 @style-extra-margin: @sidebar-width + (@sidebarpadding * 3) + @padding;
348 #style-page .code-difftable{
348 #style-page .code-difftable{
349 .comment-inline-form,
349 .comment-inline-form,
350 div.comment {
350 div.comment {
351 width: @comment-max-width - @style-extra-margin;
351 width: @comment-max-width - @style-extra-margin;
352 }
352 }
353 }
353 }
354
354
355 #context-bar > h2 {
355 #context-bar > h2 {
356 font-size: 20px;
356 font-size: 20px;
357 }
357 }
358
358
359 #context-bar > h2> a {
359 #context-bar > h2> a {
360 font-size: 20px;
360 font-size: 20px;
361 }
361 }
362 // end of defaults
362 // end of defaults
363
363
364 .file_diff_buttons {
364 .file_diff_buttons {
365 padding: 0 0 @padding;
365 padding: 0 0 @padding;
366
366
367 .drop-menu {
367 .drop-menu {
368 float: left;
368 float: left;
369 margin: 0 @padding 0 0;
369 margin: 0 @padding 0 0;
370 }
370 }
371 .btn {
371 .btn {
372 margin: 0 @padding 0 0;
372 margin: 0 @padding 0 0;
373 }
373 }
374 }
374 }
375
375
376 .code-body.textarea.editor {
376 .code-body.textarea.editor {
377 max-width: none;
377 max-width: none;
378 padding: 15px;
378 padding: 15px;
379 }
379 }
380
380
381 td.injected_diff{
381 td.injected_diff{
382 max-width: 1178px;
382 max-width: 1178px;
383 overflow-x: auto;
383 overflow-x: auto;
384 overflow-y: hidden;
384 overflow-y: hidden;
385
385
386 div.diff-container,
386 div.diff-container,
387 div.diffblock{
387 div.diffblock{
388 max-width: 100%;
388 max-width: 100%;
389 }
389 }
390
390
391 div.code-body {
391 div.code-body {
392 max-width: 1124px;
392 max-width: 1124px;
393 overflow-x: auto;
393 overflow-x: auto;
394 overflow-y: hidden;
394 overflow-y: hidden;
395 padding: 0;
395 padding: 0;
396 }
396 }
397 div.diffblock {
397 div.diffblock {
398 border: none;
398 border: none;
399 }
399 }
400
400
401 &.inline-form {
401 &.inline-form {
402 width: 99%
402 width: 99%
403 }
403 }
404 }
404 }
405
405
406
406
407 table.code-difftable {
407 table.code-difftable {
408 width: 100%;
408 width: 100%;
409 }
409 }
410
410
411 /** PYGMENTS COLORING **/
411 /** PYGMENTS COLORING **/
412 div.codeblock {
412 div.codeblock {
413
413
414 // TODO: johbo: Added interim to get rid of the margin around
414 // TODO: johbo: Added interim to get rid of the margin around
415 // Select2 widgets. This needs further cleanup.
415 // Select2 widgets. This needs further cleanup.
416 margin-top: @padding;
416 margin-top: @padding;
417
417
418 overflow: auto;
418 overflow: auto;
419 padding: 0px;
419 padding: 0px;
420 border: @border-thickness solid @grey5;
420 border: @border-thickness solid @grey5;
421 background: @grey6;
421 background: @grey6;
422 .border-radius(@border-radius);
422 .border-radius(@border-radius);
423
423
424 #remove_gist {
424 #remove_gist {
425 float: right;
425 float: right;
426 }
426 }
427
427
428 .author {
428 .author {
429 clear: both;
429 clear: both;
430 vertical-align: middle;
430 vertical-align: middle;
431 font-family: @text-bold;
431 font-family: @text-bold;
432 }
432 }
433
433
434 .btn-mini {
434 .btn-mini {
435 float: left;
435 float: left;
436 margin: 0 5px 0 0;
436 margin: 0 5px 0 0;
437 }
437 }
438
438
439 .code-header {
439 .code-header {
440 padding: @padding;
440 padding: @padding;
441 border-bottom: @border-thickness solid @grey5;
441 border-bottom: @border-thickness solid @grey5;
442
442
443 .rc-user {
443 .rc-user {
444 min-width: 0;
444 min-width: 0;
445 margin-right: .5em;
445 margin-right: .5em;
446 }
446 }
447
447
448 .stats {
448 .stats {
449 clear: both;
449 clear: both;
450 margin: 0 0 @padding 0;
450 margin: 0 0 @padding 0;
451 padding: 0;
451 padding: 0;
452 .left {
452 .left {
453 float: left;
453 float: left;
454 clear: left;
454 clear: left;
455 max-width: 75%;
455 max-width: 75%;
456 margin: 0 0 @padding 0;
456 margin: 0 0 @padding 0;
457
457
458 &.item {
458 &.item {
459 margin-right: @padding;
459 margin-right: @padding;
460 &.last { border-right: none; }
460 &.last { border-right: none; }
461 }
461 }
462 }
462 }
463 .buttons { float: right; }
463 .buttons { float: right; }
464 .author {
464 .author {
465 height: 25px; margin-left: 15px; font-weight: bold;
465 height: 25px; margin-left: 15px; font-weight: bold;
466 }
466 }
467 }
467 }
468
468
469 .commit {
469 .commit {
470 margin: 5px 0 0 26px;
470 margin: 5px 0 0 26px;
471 font-weight: normal;
471 font-weight: normal;
472 white-space: pre-wrap;
472 white-space: pre-wrap;
473 }
473 }
474 }
474 }
475
475
476 .message {
476 .message {
477 position: relative;
477 position: relative;
478 margin: @padding;
478 margin: @padding;
479
479
480 .codeblock-label {
480 .codeblock-label {
481 margin: 0 0 1em 0;
481 margin: 0 0 1em 0;
482 }
482 }
483 }
483 }
484
484
485 .code-body {
485 .code-body {
486 padding: @padding;
486 padding: @padding;
487 background-color: #ffffff;
487 background-color: #ffffff;
488 min-width: 100%;
488 min-width: 100%;
489 box-sizing: border-box;
489 box-sizing: border-box;
490 // TODO: johbo: Parent has overflow: auto, this forces the child here
490 // TODO: johbo: Parent has overflow: auto, this forces the child here
491 // to have the intended size and to scroll. Should be simplified.
491 // to have the intended size and to scroll. Should be simplified.
492 width: 100%;
492 width: 100%;
493 overflow-x: auto;
493 overflow-x: auto;
494 }
494 }
495 }
495 }
496
496
497 .code-highlighttable,
497 .code-highlighttable,
498 div.codeblock {
498 div.codeblock {
499
499
500 &.readme {
500 &.readme {
501 background-color: white;
501 background-color: white;
502 }
502 }
503
503
504 .markdown-block table {
504 .markdown-block table {
505 border-collapse: collapse;
505 border-collapse: collapse;
506
506
507 th,
507 th,
508 td {
508 td {
509 padding: .5em;
509 padding: .5em;
510 border: @border-thickness solid @border-default-color;
510 border: @border-thickness solid @border-default-color;
511 }
511 }
512 }
512 }
513
513
514 table {
514 table {
515 border: 0px;
515 border: 0px;
516 margin: 0;
516 margin: 0;
517 letter-spacing: normal;
517 letter-spacing: normal;
518
518
519
519
520 td {
520 td {
521 border: 0px;
521 border: 0px;
522 vertical-align: top;
522 vertical-align: top;
523 }
523 }
524 }
524 }
525 }
525 }
526
526
527 div.codeblock .code-header .search-path { padding: 0 0 0 10px; }
527 div.codeblock .code-header .search-path { padding: 0 0 0 10px; }
528 div.search-code-body {
528 div.search-code-body {
529 background-color: #ffffff; padding: 5px 0 5px 10px;
529 background-color: #ffffff; padding: 5px 0 5px 10px;
530 pre {
530 pre {
531 .match { background-color: #faffa6;}
531 .match { background-color: #faffa6;}
532 .break { display: block; width: 100%; background-color: #DDE7EF; color: #747474; }
532 .break { display: block; width: 100%; background-color: #DDE7EF; color: #747474; }
533 }
533 }
534 .code-highlighttable {
534 .code-highlighttable {
535 border-collapse: collapse;
535 border-collapse: collapse;
536
536
537 tr:hover {
537 tr:hover {
538 background: #fafafa;
538 background: #fafafa;
539 }
539 }
540 td.code {
540 td.code {
541 padding-left: 10px;
541 padding-left: 10px;
542 }
542 }
543 td.line {
543 td.line {
544 border-right: 1px solid #ccc !important;
544 border-right: 1px solid #ccc !important;
545 padding-right: 10px;
545 padding-right: 10px;
546 text-align: right;
546 text-align: right;
547 font-family: "Lucida Console",Monaco,monospace;
547 font-family: "Lucida Console",Monaco,monospace;
548 span {
548 span {
549 white-space: pre-wrap;
549 white-space: pre-wrap;
550 color: #666666;
550 color: #666666;
551 }
551 }
552 }
552 }
553 }
553 }
554 }
554 }
555
555
556 div.annotatediv { margin-left: 2px; margin-right: 4px; }
556 div.annotatediv { margin-left: 2px; margin-right: 4px; }
557 .code-highlight {
557 .code-highlight {
558 margin: 0; padding: 0; border-left: @border-thickness solid @grey5;
558 margin: 0; padding: 0; border-left: @border-thickness solid @grey5;
559 pre, .linenodiv pre { padding: 0 5px; margin: 0; }
559 pre, .linenodiv pre { padding: 0 5px; margin: 0; }
560 pre div:target {background-color: @comment-highlight-color !important;}
560 pre div:target {background-color: @comment-highlight-color !important;}
561 }
561 }
562
562
563 .linenos a { text-decoration: none; }
563 .linenos a { text-decoration: none; }
564
564
565 .CodeMirror-selected { background: @rchighlightblue; }
565 .CodeMirror-selected { background: @rchighlightblue; }
566 .CodeMirror-focused .CodeMirror-selected { background: @rchighlightblue; }
566 .CodeMirror-focused .CodeMirror-selected { background: @rchighlightblue; }
567 .CodeMirror ::selection { background: @rchighlightblue; }
567 .CodeMirror ::selection { background: @rchighlightblue; }
568 .CodeMirror ::-moz-selection { background: @rchighlightblue; }
568 .CodeMirror ::-moz-selection { background: @rchighlightblue; }
569
569
570 .code { display: block; border:0px !important; }
570 .code { display: block; border:0px !important; }
571 .code-highlight, /* TODO: dan: merge codehilite into code-highlight */
571 .code-highlight, /* TODO: dan: merge codehilite into code-highlight */
572 .codehilite {
572 .codehilite {
573 .hll { background-color: #ffffcc }
573 .hll { background-color: #ffffcc }
574 .c { color: #408080; font-style: italic } /* Comment */
574 .c { color: #408080; font-style: italic } /* Comment */
575 .err, .codehilite .err { border: @border-thickness solid #FF0000 } /* Error */
575 .err, .codehilite .err { border: @border-thickness solid #FF0000 } /* Error */
576 .k { color: #008000; font-weight: bold } /* Keyword */
576 .k { color: #008000; font-weight: bold } /* Keyword */
577 .o { color: #666666 } /* Operator */
577 .o { color: #666666 } /* Operator */
578 .cm { color: #408080; font-style: italic } /* Comment.Multiline */
578 .cm { color: #408080; font-style: italic } /* Comment.Multiline */
579 .cp { color: #BC7A00 } /* Comment.Preproc */
579 .cp { color: #BC7A00 } /* Comment.Preproc */
580 .c1 { color: #408080; font-style: italic } /* Comment.Single */
580 .c1 { color: #408080; font-style: italic } /* Comment.Single */
581 .cs { color: #408080; font-style: italic } /* Comment.Special */
581 .cs { color: #408080; font-style: italic } /* Comment.Special */
582 .gd { color: #A00000 } /* Generic.Deleted */
582 .gd { color: #A00000 } /* Generic.Deleted */
583 .ge { font-style: italic } /* Generic.Emph */
583 .ge { font-style: italic } /* Generic.Emph */
584 .gr { color: #FF0000 } /* Generic.Error */
584 .gr { color: #FF0000 } /* Generic.Error */
585 .gh { color: #000080; font-weight: bold } /* Generic.Heading */
585 .gh { color: #000080; font-weight: bold } /* Generic.Heading */
586 .gi { color: #00A000 } /* Generic.Inserted */
586 .gi { color: #00A000 } /* Generic.Inserted */
587 .go { color: #808080 } /* Generic.Output */
587 .go { color: #808080 } /* Generic.Output */
588 .gp { color: #000080; font-weight: bold } /* Generic.Prompt */
588 .gp { color: #000080; font-weight: bold } /* Generic.Prompt */
589 .gs { font-weight: bold } /* Generic.Strong */
589 .gs { font-weight: bold } /* Generic.Strong */
590 .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
590 .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
591 .gt { color: #0040D0 } /* Generic.Traceback */
591 .gt { color: #0040D0 } /* Generic.Traceback */
592 .kc { color: #008000; font-weight: bold } /* Keyword.Constant */
592 .kc { color: #008000; font-weight: bold } /* Keyword.Constant */
593 .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */
593 .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */
594 .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */
594 .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */
595 .kp { color: #008000 } /* Keyword.Pseudo */
595 .kp { color: #008000 } /* Keyword.Pseudo */
596 .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */
596 .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */
597 .kt { color: #B00040 } /* Keyword.Type */
597 .kt { color: #B00040 } /* Keyword.Type */
598 .m { color: #666666 } /* Literal.Number */
598 .m { color: #666666 } /* Literal.Number */
599 .s { color: #BA2121 } /* Literal.String */
599 .s { color: #BA2121 } /* Literal.String */
600 .na { color: #7D9029 } /* Name.Attribute */
600 .na { color: #7D9029 } /* Name.Attribute */
601 .nb { color: #008000 } /* Name.Builtin */
601 .nb { color: #008000 } /* Name.Builtin */
602 .nc { color: #0000FF; font-weight: bold } /* Name.Class */
602 .nc { color: #0000FF; font-weight: bold } /* Name.Class */
603 .no { color: #880000 } /* Name.Constant */
603 .no { color: #880000 } /* Name.Constant */
604 .nd { color: #AA22FF } /* Name.Decorator */
604 .nd { color: #AA22FF } /* Name.Decorator */
605 .ni { color: #999999; font-weight: bold } /* Name.Entity */
605 .ni { color: #999999; font-weight: bold } /* Name.Entity */
606 .ne { color: #D2413A; font-weight: bold } /* Name.Exception */
606 .ne { color: #D2413A; font-weight: bold } /* Name.Exception */
607 .nf { color: #0000FF } /* Name.Function */
607 .nf { color: #0000FF } /* Name.Function */
608 .nl { color: #A0A000 } /* Name.Label */
608 .nl { color: #A0A000 } /* Name.Label */
609 .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */
609 .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */
610 .nt { color: #008000; font-weight: bold } /* Name.Tag */
610 .nt { color: #008000; font-weight: bold } /* Name.Tag */
611 .nv { color: #19177C } /* Name.Variable */
611 .nv { color: #19177C } /* Name.Variable */
612 .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */
612 .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */
613 .w { color: #bbbbbb } /* Text.Whitespace */
613 .w { color: #bbbbbb } /* Text.Whitespace */
614 .mf { color: #666666 } /* Literal.Number.Float */
614 .mf { color: #666666 } /* Literal.Number.Float */
615 .mh { color: #666666 } /* Literal.Number.Hex */
615 .mh { color: #666666 } /* Literal.Number.Hex */
616 .mi { color: #666666 } /* Literal.Number.Integer */
616 .mi { color: #666666 } /* Literal.Number.Integer */
617 .mo { color: #666666 } /* Literal.Number.Oct */
617 .mo { color: #666666 } /* Literal.Number.Oct */
618 .sb { color: #BA2121 } /* Literal.String.Backtick */
618 .sb { color: #BA2121 } /* Literal.String.Backtick */
619 .sc { color: #BA2121 } /* Literal.String.Char */
619 .sc { color: #BA2121 } /* Literal.String.Char */
620 .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */
620 .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */
621 .s2 { color: #BA2121 } /* Literal.String.Double */
621 .s2 { color: #BA2121 } /* Literal.String.Double */
622 .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */
622 .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */
623 .sh { color: #BA2121 } /* Literal.String.Heredoc */
623 .sh { color: #BA2121 } /* Literal.String.Heredoc */
624 .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */
624 .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */
625 .sx { color: #008000 } /* Literal.String.Other */
625 .sx { color: #008000 } /* Literal.String.Other */
626 .sr { color: #BB6688 } /* Literal.String.Regex */
626 .sr { color: #BB6688 } /* Literal.String.Regex */
627 .s1 { color: #BA2121 } /* Literal.String.Single */
627 .s1 { color: #BA2121 } /* Literal.String.Single */
628 .ss { color: #19177C } /* Literal.String.Symbol */
628 .ss { color: #19177C } /* Literal.String.Symbol */
629 .bp { color: #008000 } /* Name.Builtin.Pseudo */
629 .bp { color: #008000 } /* Name.Builtin.Pseudo */
630 .vc { color: #19177C } /* Name.Variable.Class */
630 .vc { color: #19177C } /* Name.Variable.Class */
631 .vg { color: #19177C } /* Name.Variable.Global */
631 .vg { color: #19177C } /* Name.Variable.Global */
632 .vi { color: #19177C } /* Name.Variable.Instance */
632 .vi { color: #19177C } /* Name.Variable.Instance */
633 .il { color: #666666 } /* Literal.Number.Integer.Long */
633 .il { color: #666666 } /* Literal.Number.Integer.Long */
634 }
634 }
635
635
636 /* customized pre blocks for markdown/rst */
636 /* customized pre blocks for markdown/rst */
637 pre.literal-block, .codehilite pre{
637 pre.literal-block, .codehilite pre{
638 padding: @padding;
638 padding: @padding;
639 border: 1px solid @grey6;
639 border: 1px solid @grey6;
640 .border-radius(@border-radius);
640 .border-radius(@border-radius);
641 background-color: @grey7;
641 background-color: @grey7;
642 }
642 }
643
643
644
644
645 /* START NEW CODE BLOCK CSS */
645 /* START NEW CODE BLOCK CSS */
646
646
647 @cb-line-height: 18px;
647 @cb-line-height: 18px;
648 @cb-line-code-padding: 10px;
648 @cb-line-code-padding: 10px;
649 @cb-text-padding: 5px;
649
650
651 @diff-pill-padding: 2px 7px;
652
653 input.diff-collapse-state {
654 display: none;
655
656 &:checked + .diff { /* file diff is collapsed */
657 .cb {
658 display: none
659 }
660 .diff-collapse-indicator {
661 border-width: 9px 0 9px 15.6px;
662 border-color: transparent transparent transparent #ccc;
663 }
664 .diff-menu {
665 display: none;
666 }
667 margin: -1px 0 0 0;
668 }
669
670 &+ .diff { /* file diff is expanded */
671 .diff-collapse-indicator {
672 border-width: 15.6px 9px 0 9px;
673 border-color: #ccc transparent transparent transparent;
674 }
675 .diff-menu {
676 display: block;
677 }
678 margin: 20px 0;
679 }
680 }
681 .diff {
682 border: 1px solid @grey5;
683
684 /* START OVERRIDES */
685 .code-highlight {
686 border: none; // TODO: remove this border from the global
687 // .code-highlight, it doesn't belong there
688 }
689 label {
690 margin: 0; // TODO: remove this margin definition from global label
691 // it doesn't belong there - if margin on labels
692 // are needed for a form they should be defined
693 // in the form's class
694 }
695 /* END OVERRIDES */
696
697 * {
698 box-sizing: border-box;
699 }
700 .diff-anchor {
701 visibility: hidden;
702 }
703 &:hover {
704 .diff-anchor {
705 visibility: visible;
706 }
707 }
708
709 .diff-collapse-indicator {
710 width: 0;
711 height: 0;
712 border-style: solid;
713 float: left;
714 margin: 2px 2px 0 0;
715 cursor: pointer;
716 }
717
718 .diff-heading {
719 background: @grey7;
720 cursor: pointer;
721 display: block;
722 padding: 5px 10px;
723 }
724 .diff-heading:after {
725 content: "";
726 display: table;
727 clear: both;
728 }
729 .diff-heading:hover {
730 background: #e1e9f4 !important;
731 }
732
733 .diff-menu {
734 float: right;
735 a, button {
736 padding: 5px;
737 display: block;
738 float: left
739 }
740 }
741 .diff-pill {
742 display: block;
743 float: left;
744 padding: @diff-pill-padding;
745 }
746 .diff-pill-group {
747 .diff-pill {
748 opacity: .8;
749 &:first-child {
750 border-radius: @border-radius 0 0 @border-radius;
751 }
752 &:last-child {
753 border-radius: 0 @border-radius @border-radius 0;
754 }
755 &:only-child {
756 border-radius: @border-radius;
757 }
758 }
759 }
760 .diff-pill {
761 &[op="name"] {
762 background: none;
763 color: @grey2;
764 opacity: 1;
765 color: white;
766 }
767 &[op="limited"] {
768 background: @grey2;
769 color: white;
770 }
771 &[op="binary"] {
772 background: @color7;
773 color: white;
774 }
775 &[op="modified"] {
776 background: @alert1;
777 color: white;
778 }
779 &[op="renamed"] {
780 background: @color4;
781 color: white;
782 }
783 &[op="mode"] {
784 background: @grey3;
785 color: white;
786 }
787 &[op="symlink"] {
788 background: @color8;
789 color: white;
790 }
791
792 &[op="added"] { /* added lines */
793 background: @alert1;
794 color: white;
795 }
796 &[op="deleted"] { /* deleted lines */
797 background: @alert2;
798 color: white;
799 }
800
801 &[op="created"] { /* created file */
802 background: @alert1;
803 color: white;
804 }
805 &[op="removed"] { /* deleted file */
806 background: @color5;
807 color: white;
808 }
809 }
810
811 .diff-collapse-button, .diff-expand-button {
812 cursor: pointer;
813 }
814 .diff-collapse-button {
815 display: inline;
816 }
817 .diff-expand-button {
818 display: none;
819 }
820 .diff-collapsed .diff-collapse-button {
821 display: none;
822 }
823 .diff-collapsed .diff-expand-button {
824 display: inline;
825 }
826 }
650 table.cb {
827 table.cb {
651 width: 100%;
828 width: 100%;
652 border-collapse: collapse;
829 border-collapse: collapse;
653 margin-bottom: 10px;
654
830
655 * {
831 .cb-text {
656 box-sizing: border-box;
832 padding: @cb-text-padding;
833 }
834 .cb-hunk {
835 padding: @cb-text-padding;
836 }
837 .cb-expand {
838 display: none;
839 }
840 .cb-collapse {
841 display: inline;
842 }
843 &.cb-collapsed {
844 .cb-line {
845 display: none;
846 }
847 .cb-expand {
848 display: inline;
849 }
850 .cb-collapse {
851 display: none;
852 }
657 }
853 }
658
854
659 /* intentionally general selector since .cb-line-selected must override it
855 /* intentionally general selector since .cb-line-selected must override it
660 and they both use !important since the td itself may have a random color
856 and they both use !important since the td itself may have a random color
661 generated by annotation blocks. TLDR: if you change it, make sure
857 generated by annotation blocks. TLDR: if you change it, make sure
662 annotated block selection and line selection in file view still work */
858 annotated block selection and line selection in file view still work */
663 .cb-line-fresh .cb-content {
859 .cb-line-fresh .cb-content {
664 background: white !important;
860 background: white !important;
665 }
861 }
862 .cb-warning {
863 background: #fff4dd;
864 }
666
865
667 tr.cb-annotate {
866 &.cb-diff-sideside {
867 td {
868 &.cb-content {
869 width: 50%;
870 }
871 }
872 }
873
874 tr {
875 &.cb-annotate {
668 border-top: 1px solid #eee;
876 border-top: 1px solid #eee;
669
877
670 &+ .cb-line {
878 &+ .cb-line {
671 border-top: 1px solid #eee;
879 border-top: 1px solid #eee;
672 }
880 }
673
881
674 &:first-child {
882 &:first-child {
675 border-top: none;
883 border-top: none;
676 &+ .cb-line {
884 &+ .cb-line {
677 border-top: none;
885 border-top: none;
678 }
886 }
679 }
887 }
680 }
888 }
681
889
890 &.cb-hunk {
891 font-family: @font-family-monospace;
892 color: rgba(0, 0, 0, 0.3);
893
894 td {
895 &:first-child {
896 background: #edf2f9;
897 }
898 &:last-child {
899 background: #f4f7fb;
900 }
901 }
902 }
903 }
904
682 td {
905 td {
683 vertical-align: top;
906 vertical-align: top;
684 padding: 0;
907 padding: 0;
685
908
686 &.cb-content {
909 &.cb-content {
687 font-size: 12.35px;
910 font-size: 12.35px;
688
911
912 &.cb-line-selected .cb-code {
913 background: @comment-highlight-color !important;
914 }
915
689 span.cb-code {
916 span.cb-code {
690 line-height: @cb-line-height;
917 line-height: @cb-line-height;
691 padding-left: @cb-line-code-padding;
918 padding-left: @cb-line-code-padding;
919 padding-right: @cb-line-code-padding;
692 display: block;
920 display: block;
693 white-space: pre-wrap;
921 white-space: pre-wrap;
694 font-family: @font-family-monospace;
922 font-family: @font-family-monospace;
695 word-break: break-word;
923 word-break: break-word;
696 }
924 }
697 }
925 }
698
926
699 &.cb-lineno {
927 &.cb-lineno {
700 padding: 0;
928 padding: 0;
701 width: 50px;
929 width: 50px;
702 color: rgba(0, 0, 0, 0.3);
930 color: rgba(0, 0, 0, 0.3);
703 text-align: right;
931 text-align: right;
704 border-right: 1px solid #eee;
932 border-right: 1px solid #eee;
705 font-family: @font-family-monospace;
933 font-family: @font-family-monospace;
706
934
707 a::before {
935 a::before {
708 content: attr(data-line-no);
936 content: attr(data-line-no);
709 }
937 }
710 &.cb-line-selected a {
938 &.cb-line-selected a {
711 background: @comment-highlight-color !important;
939 background: @comment-highlight-color !important;
712 }
940 }
713
941
714 a {
942 a {
715 display: block;
943 display: block;
716 padding-right: @cb-line-code-padding;
944 padding-right: @cb-line-code-padding;
945 padding-left: @cb-line-code-padding;
717 line-height: @cb-line-height;
946 line-height: @cb-line-height;
718 color: rgba(0, 0, 0, 0.3);
947 color: rgba(0, 0, 0, 0.3);
719 }
948 }
720 }
949 }
721
950
722 &.cb-content {
951 &.cb-empty {
723 &.cb-line-selected .cb-code {
952 background: @grey7;
724 background: @comment-highlight-color !important;
953 }
954
955 ins {
956 color: black;
957 background: #a6f3a6;
958 text-decoration: none;
959 }
960 del {
961 color: black;
962 background: #f8cbcb;
963 text-decoration: none;
964 }
965 &.cb-addition {
966 background: #ecffec;
967
968 &.blob-lineno {
969 background: #ddffdd;
970 }
971 }
972 &.cb-deletion {
973 background: #ffecec;
974
975 &.blob-lineno {
976 background: #ffdddd;
725 }
977 }
726 }
978 }
727
979
728 &.cb-annotate-info {
980 &.cb-annotate-info {
729 width: 320px;
981 width: 320px;
730 min-width: 320px;
982 min-width: 320px;
731 max-width: 320px;
983 max-width: 320px;
732 padding: 5px 2px;
984 padding: 5px 2px;
733 font-size: 13px;
985 font-size: 13px;
734
986
735 strong.cb-annotate-message {
987 strong.cb-annotate-message {
736 padding: 5px 0;
988 padding: 5px 0;
737 white-space: pre-line;
989 white-space: pre-line;
738 display: inline-block;
990 display: inline-block;
739 }
991 }
740 .rc-user {
992 .rc-user {
741 float: none;
993 float: none;
742 padding: 0 6px 0 17px;
994 padding: 0 6px 0 17px;
743 min-width: auto;
995 min-width: auto;
744 min-height: auto;
996 min-height: auto;
745 }
997 }
746 }
998 }
747
999
748 &.cb-annotate-revision {
1000 &.cb-annotate-revision {
749 cursor: pointer;
1001 cursor: pointer;
750 text-align: right;
1002 text-align: right;
751 }
1003 }
752 }
1004 }
753 }
1005 }
@@ -1,476 +1,492 b''
1 // # Copyright (C) 2010-2016 RhodeCode GmbH
1 // # Copyright (C) 2010-2016 RhodeCode GmbH
2 // #
2 // #
3 // # This program is free software: you can redistribute it and/or modify
3 // # This program is free software: you can redistribute it and/or modify
4 // # it under the terms of the GNU Affero General Public License, version 3
4 // # it under the terms of the GNU Affero General Public License, version 3
5 // # (only), as published by the Free Software Foundation.
5 // # (only), as published by the Free Software Foundation.
6 // #
6 // #
7 // # This program is distributed in the hope that it will be useful,
7 // # This program is distributed in the hope that it will be useful,
8 // # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 // # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 // # GNU General Public License for more details.
10 // # GNU General Public License for more details.
11 // #
11 // #
12 // # You should have received a copy of the GNU Affero General Public License
12 // # You should have received a copy of the GNU Affero General Public License
13 // # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 // # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 // #
14 // #
15 // # This program is dual-licensed. If you wish to learn more about the
15 // # This program is dual-licensed. If you wish to learn more about the
16 // # RhodeCode Enterprise Edition, including its added features, Support services,
16 // # RhodeCode Enterprise Edition, including its added features, Support services,
17 // # and proprietary license terms, please see https://rhodecode.com/licenses/
17 // # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 /**
19 /**
20 RhodeCode JS Files
20 RhodeCode JS Files
21 **/
21 **/
22
22
23 if (typeof console == "undefined" || typeof console.log == "undefined"){
23 if (typeof console == "undefined" || typeof console.log == "undefined"){
24 console = { log: function() {} }
24 console = { log: function() {} }
25 }
25 }
26
26
27 // TODO: move the following function to submodules
27 // TODO: move the following function to submodules
28
28
29 /**
29 /**
30 * show more
30 * show more
31 */
31 */
32 var show_more_event = function(){
32 var show_more_event = function(){
33 $('table .show_more').click(function(e) {
33 $('table .show_more').click(function(e) {
34 var cid = e.target.id.substring(1);
34 var cid = e.target.id.substring(1);
35 var button = $(this);
35 var button = $(this);
36 if (button.hasClass('open')) {
36 if (button.hasClass('open')) {
37 $('#'+cid).hide();
37 $('#'+cid).hide();
38 button.removeClass('open');
38 button.removeClass('open');
39 } else {
39 } else {
40 $('#'+cid).show();
40 $('#'+cid).show();
41 button.addClass('open one');
41 button.addClass('open one');
42 }
42 }
43 });
43 });
44 };
44 };
45
45
46 var compare_radio_buttons = function(repo_name, compare_ref_type){
46 var compare_radio_buttons = function(repo_name, compare_ref_type){
47 $('#compare_action').on('click', function(e){
47 $('#compare_action').on('click', function(e){
48 e.preventDefault();
48 e.preventDefault();
49
49
50 var source = $('input[name=compare_source]:checked').val();
50 var source = $('input[name=compare_source]:checked').val();
51 var target = $('input[name=compare_target]:checked').val();
51 var target = $('input[name=compare_target]:checked').val();
52 if(source && target){
52 if(source && target){
53 var url_data = {
53 var url_data = {
54 repo_name: repo_name,
54 repo_name: repo_name,
55 source_ref: source,
55 source_ref: source,
56 source_ref_type: compare_ref_type,
56 source_ref_type: compare_ref_type,
57 target_ref: target,
57 target_ref: target,
58 target_ref_type: compare_ref_type,
58 target_ref_type: compare_ref_type,
59 merge: 1
59 merge: 1
60 };
60 };
61 window.location = pyroutes.url('compare_url', url_data);
61 window.location = pyroutes.url('compare_url', url_data);
62 }
62 }
63 });
63 });
64 $('.compare-radio-button').on('click', function(e){
64 $('.compare-radio-button').on('click', function(e){
65 var source = $('input[name=compare_source]:checked').val();
65 var source = $('input[name=compare_source]:checked').val();
66 var target = $('input[name=compare_target]:checked').val();
66 var target = $('input[name=compare_target]:checked').val();
67 if(source && target){
67 if(source && target){
68 $('#compare_action').removeAttr("disabled");
68 $('#compare_action').removeAttr("disabled");
69 $('#compare_action').removeClass("disabled");
69 $('#compare_action').removeClass("disabled");
70 }
70 }
71 })
71 })
72 };
72 };
73
73
74 var showRepoSize = function(target, repo_name, commit_id, callback) {
74 var showRepoSize = function(target, repo_name, commit_id, callback) {
75 var container = $('#' + target);
75 var container = $('#' + target);
76 var url = pyroutes.url('repo_stats',
76 var url = pyroutes.url('repo_stats',
77 {"repo_name": repo_name, "commit_id": commit_id});
77 {"repo_name": repo_name, "commit_id": commit_id});
78
78
79 if (!container.hasClass('loaded')) {
79 if (!container.hasClass('loaded')) {
80 $.ajax({url: url})
80 $.ajax({url: url})
81 .complete(function (data) {
81 .complete(function (data) {
82 var responseJSON = data.responseJSON;
82 var responseJSON = data.responseJSON;
83 container.addClass('loaded');
83 container.addClass('loaded');
84 container.html(responseJSON.size);
84 container.html(responseJSON.size);
85 callback(responseJSON.code_stats)
85 callback(responseJSON.code_stats)
86 })
86 })
87 .fail(function (data) {
87 .fail(function (data) {
88 console.log('failed to load repo stats');
88 console.log('failed to load repo stats');
89 });
89 });
90 }
90 }
91
91
92 };
92 };
93
93
94 var showRepoStats = function(target, data){
94 var showRepoStats = function(target, data){
95 var container = $('#' + target);
95 var container = $('#' + target);
96
96
97 if (container.hasClass('loaded')) {
97 if (container.hasClass('loaded')) {
98 return
98 return
99 }
99 }
100
100
101 var total = 0;
101 var total = 0;
102 var no_data = true;
102 var no_data = true;
103 var tbl = document.createElement('table');
103 var tbl = document.createElement('table');
104 tbl.setAttribute('class', 'trending_language_tbl');
104 tbl.setAttribute('class', 'trending_language_tbl');
105
105
106 $.each(data, function(key, val){
106 $.each(data, function(key, val){
107 total += val.count;
107 total += val.count;
108 });
108 });
109
109
110 var sortedStats = [];
110 var sortedStats = [];
111 for (var obj in data){
111 for (var obj in data){
112 sortedStats.push([obj, data[obj]])
112 sortedStats.push([obj, data[obj]])
113 }
113 }
114 var sortedData = sortedStats.sort(function (a, b) {
114 var sortedData = sortedStats.sort(function (a, b) {
115 return b[1].count - a[1].count
115 return b[1].count - a[1].count
116 });
116 });
117 var cnt = 0;
117 var cnt = 0;
118 $.each(sortedData, function(idx, val){
118 $.each(sortedData, function(idx, val){
119 cnt += 1;
119 cnt += 1;
120 no_data = false;
120 no_data = false;
121
121
122 var hide = cnt > 2;
122 var hide = cnt > 2;
123 var tr = document.createElement('tr');
123 var tr = document.createElement('tr');
124 if (hide) {
124 if (hide) {
125 tr.setAttribute('style', 'display:none');
125 tr.setAttribute('style', 'display:none');
126 tr.setAttribute('class', 'stats_hidden');
126 tr.setAttribute('class', 'stats_hidden');
127 }
127 }
128
128
129 var key = val[0];
129 var key = val[0];
130 var obj = {"desc": val[1].desc, "count": val[1].count};
130 var obj = {"desc": val[1].desc, "count": val[1].count};
131
131
132 var percentage = Math.round((obj.count / total * 100), 2);
132 var percentage = Math.round((obj.count / total * 100), 2);
133
133
134 var td1 = document.createElement('td');
134 var td1 = document.createElement('td');
135 td1.width = 300;
135 td1.width = 300;
136 var trending_language_label = document.createElement('div');
136 var trending_language_label = document.createElement('div');
137 trending_language_label.innerHTML = obj.desc + " (.{0})".format(key);
137 trending_language_label.innerHTML = obj.desc + " (.{0})".format(key);
138 td1.appendChild(trending_language_label);
138 td1.appendChild(trending_language_label);
139
139
140 var td2 = document.createElement('td');
140 var td2 = document.createElement('td');
141 var trending_language = document.createElement('div');
141 var trending_language = document.createElement('div');
142 var nr_files = obj.count +" "+ _ngettext('file', 'files', obj.count);
142 var nr_files = obj.count +" "+ _ngettext('file', 'files', obj.count);
143
143
144 trending_language.title = key + " " + nr_files;
144 trending_language.title = key + " " + nr_files;
145
145
146 trending_language.innerHTML = "<span>" + percentage + "% " + nr_files
146 trending_language.innerHTML = "<span>" + percentage + "% " + nr_files
147 + "</span><b>" + percentage + "% " + nr_files + "</b>";
147 + "</span><b>" + percentage + "% " + nr_files + "</b>";
148
148
149 trending_language.setAttribute("class", 'trending_language');
149 trending_language.setAttribute("class", 'trending_language');
150 $('b', trending_language)[0].style.width = percentage + "%";
150 $('b', trending_language)[0].style.width = percentage + "%";
151 td2.appendChild(trending_language);
151 td2.appendChild(trending_language);
152
152
153 tr.appendChild(td1);
153 tr.appendChild(td1);
154 tr.appendChild(td2);
154 tr.appendChild(td2);
155 tbl.appendChild(tr);
155 tbl.appendChild(tr);
156 if (cnt == 3) {
156 if (cnt == 3) {
157 var show_more = document.createElement('tr');
157 var show_more = document.createElement('tr');
158 var td = document.createElement('td');
158 var td = document.createElement('td');
159 lnk = document.createElement('a');
159 lnk = document.createElement('a');
160
160
161 lnk.href = '#';
161 lnk.href = '#';
162 lnk.innerHTML = _gettext('Show more');
162 lnk.innerHTML = _gettext('Show more');
163 lnk.id = 'code_stats_show_more';
163 lnk.id = 'code_stats_show_more';
164 td.appendChild(lnk);
164 td.appendChild(lnk);
165
165
166 show_more.appendChild(td);
166 show_more.appendChild(td);
167 show_more.appendChild(document.createElement('td'));
167 show_more.appendChild(document.createElement('td'));
168 tbl.appendChild(show_more);
168 tbl.appendChild(show_more);
169 }
169 }
170 });
170 });
171
171
172 $(container).html(tbl);
172 $(container).html(tbl);
173 $(container).addClass('loaded');
173 $(container).addClass('loaded');
174
174
175 $('#code_stats_show_more').on('click', function (e) {
175 $('#code_stats_show_more').on('click', function (e) {
176 e.preventDefault();
176 e.preventDefault();
177 $('.stats_hidden').each(function (idx) {
177 $('.stats_hidden').each(function (idx) {
178 $(this).css("display", "");
178 $(this).css("display", "");
179 });
179 });
180 $('#code_stats_show_more').hide();
180 $('#code_stats_show_more').hide();
181 });
181 });
182
182
183 };
183 };
184
184
185
185
186 // Toggle Collapsable Content
186 // Toggle Collapsable Content
187 function collapsableContent() {
187 function collapsableContent() {
188
188
189 $('.collapsable-content').not('.no-hide').hide();
189 $('.collapsable-content').not('.no-hide').hide();
190
190
191 $('.btn-collapse').unbind(); //in case we've been here before
191 $('.btn-collapse').unbind(); //in case we've been here before
192 $('.btn-collapse').click(function() {
192 $('.btn-collapse').click(function() {
193 var button = $(this);
193 var button = $(this);
194 var togglename = $(this).data("toggle");
194 var togglename = $(this).data("toggle");
195 $('.collapsable-content[data-toggle='+togglename+']').toggle();
195 $('.collapsable-content[data-toggle='+togglename+']').toggle();
196 if ($(this).html()=="Show Less")
196 if ($(this).html()=="Show Less")
197 $(this).html("Show More");
197 $(this).html("Show More");
198 else
198 else
199 $(this).html("Show Less");
199 $(this).html("Show Less");
200 });
200 });
201 };
201 };
202
202
203 var timeagoActivate = function() {
203 var timeagoActivate = function() {
204 $("time.timeago").timeago();
204 $("time.timeago").timeago();
205 };
205 };
206
206
207 // Formatting values in a Select2 dropdown of commit references
207 // Formatting values in a Select2 dropdown of commit references
208 var formatSelect2SelectionRefs = function(commit_ref){
208 var formatSelect2SelectionRefs = function(commit_ref){
209 var tmpl = '';
209 var tmpl = '';
210 if (!commit_ref.text || commit_ref.type === 'sha'){
210 if (!commit_ref.text || commit_ref.type === 'sha'){
211 return commit_ref.text;
211 return commit_ref.text;
212 }
212 }
213 if (commit_ref.type === 'branch'){
213 if (commit_ref.type === 'branch'){
214 tmpl = tmpl.concat('<i class="icon-branch"></i> ');
214 tmpl = tmpl.concat('<i class="icon-branch"></i> ');
215 } else if (commit_ref.type === 'tag'){
215 } else if (commit_ref.type === 'tag'){
216 tmpl = tmpl.concat('<i class="icon-tag"></i> ');
216 tmpl = tmpl.concat('<i class="icon-tag"></i> ');
217 } else if (commit_ref.type === 'book'){
217 } else if (commit_ref.type === 'book'){
218 tmpl = tmpl.concat('<i class="icon-bookmark"></i> ');
218 tmpl = tmpl.concat('<i class="icon-bookmark"></i> ');
219 }
219 }
220 return tmpl.concat(commit_ref.text);
220 return tmpl.concat(commit_ref.text);
221 };
221 };
222
222
223 // takes a given html element and scrolls it down offset pixels
223 // takes a given html element and scrolls it down offset pixels
224 function offsetScroll(element, offset){
224 function offsetScroll(element, offset) {
225 setTimeout(function(){
225 setTimeout(function() {
226 var location = element.offset().top;
226 var location = element.offset().top;
227 // some browsers use body, some use html
227 // some browsers use body, some use html
228 $('html, body').animate({ scrollTop: (location - offset) });
228 $('html, body').animate({ scrollTop: (location - offset) });
229 }, 100);
229 }, 100);
230 }
230 }
231
231
232 // scroll an element `percent`% from the top of page in `time` ms
233 function scrollToElement(element, percent, time) {
234 percent = (percent === undefined ? 25 : percent);
235 time = (time === undefined ? 100 : time);
236
237 var $element = $(element);
238 var elOffset = $element.offset().top;
239 var elHeight = $element.height();
240 var windowHeight = $(window).height();
241 var offset = elOffset;
242 if (elHeight < windowHeight) {
243 offset = elOffset - ((windowHeight / (100 / percent)) - (elHeight / 2));
244 }
245 setTimeout(function() {
246 $('html, body').animate({ scrollTop: offset});
247 }, time);
248 }
249
232 /**
250 /**
233 * global hooks after DOM is loaded
251 * global hooks after DOM is loaded
234 */
252 */
235 $(document).ready(function() {
253 $(document).ready(function() {
236 firefoxAnchorFix();
254 firefoxAnchorFix();
237
255
238 $('.navigation a.menulink').on('click', function(e){
256 $('.navigation a.menulink').on('click', function(e){
239 var menuitem = $(this).parent('li');
257 var menuitem = $(this).parent('li');
240 if (menuitem.hasClass('open')) {
258 if (menuitem.hasClass('open')) {
241 menuitem.removeClass('open');
259 menuitem.removeClass('open');
242 } else {
260 } else {
243 menuitem.addClass('open');
261 menuitem.addClass('open');
244 $(document).on('click', function(event) {
262 $(document).on('click', function(event) {
245 if (!$(event.target).closest(menuitem).length) {
263 if (!$(event.target).closest(menuitem).length) {
246 menuitem.removeClass('open');
264 menuitem.removeClass('open');
247 }
265 }
248 });
266 });
249 }
267 }
250 });
268 });
251 $('.compare_view_files').on(
269 $('.compare_view_files').on(
252 'mouseenter mouseleave', 'tr.line .lineno a',function(event) {
270 'mouseenter mouseleave', 'tr.line .lineno a',function(event) {
253 if (event.type === "mouseenter") {
271 if (event.type === "mouseenter") {
254 $(this).parents('tr.line').addClass('hover');
272 $(this).parents('tr.line').addClass('hover');
255 } else {
273 } else {
256 $(this).parents('tr.line').removeClass('hover');
274 $(this).parents('tr.line').removeClass('hover');
257 }
275 }
258 });
276 });
259
277
260 $('.compare_view_files').on(
278 $('.compare_view_files').on(
261 'mouseenter mouseleave', 'tr.line .add-comment-line a',function(event){
279 'mouseenter mouseleave', 'tr.line .add-comment-line a',function(event){
262 if (event.type === "mouseenter") {
280 if (event.type === "mouseenter") {
263 $(this).parents('tr.line').addClass('commenting');
281 $(this).parents('tr.line').addClass('commenting');
264 } else {
282 } else {
265 $(this).parents('tr.line').removeClass('commenting');
283 $(this).parents('tr.line').removeClass('commenting');
266 }
284 }
267 });
285 });
268
286
269 $('body').on( /* TODO: replace the $('.compare_view_files').on('click') below
287 $('body').on( /* TODO: replace the $('.compare_view_files').on('click') below
270 when new diffs are integrated */
288 when new diffs are integrated */
271 'click', '.cb-lineno a', function(event) {
289 'click', '.cb-lineno a', function(event) {
272
290
273 if ($(this).attr('data-line-no') !== ""){
291 if ($(this).attr('data-line-no') !== ""){
274 $('.cb-line-selected').removeClass('cb-line-selected');
292 $('.cb-line-selected').removeClass('cb-line-selected');
275 var td = $(this).parent();
293 var td = $(this).parent();
276 td.addClass('cb-line-selected'); // line number td
294 td.addClass('cb-line-selected'); // line number td
277 td.next().addClass('cb-line-selected'); // line content td
295 td.next().addClass('cb-line-selected'); // line content td
278
296
279 // Replace URL without jumping to it if browser supports.
297 // Replace URL without jumping to it if browser supports.
280 // Default otherwise
298 // Default otherwise
281 if (history.pushState) {
299 if (history.pushState) {
282 var new_location = location.href.rstrip('#');
300 var new_location = location.href.rstrip('#');
283 if (location.hash) {
301 if (location.hash) {
284 new_location = new_location.replace(location.hash, "");
302 new_location = new_location.replace(location.hash, "");
285 }
303 }
286
304
287 // Make new anchor url
305 // Make new anchor url
288 new_location = new_location + $(this).attr('href');
306 new_location = new_location + $(this).attr('href');
289 history.pushState(true, document.title, new_location);
307 history.pushState(true, document.title, new_location);
290
308
291 return false;
309 return false;
292 }
310 }
293 }
311 }
294 });
312 });
295
313
296 $('.compare_view_files').on( /* TODO: replace this with .cb function above
314 $('.compare_view_files').on( /* TODO: replace this with .cb function above
297 when new diffs are integrated */
315 when new diffs are integrated */
298 'click', 'tr.line .lineno a',function(event) {
316 'click', 'tr.line .lineno a',function(event) {
299 if ($(this).text() != ""){
317 if ($(this).text() != ""){
300 $('tr.line').removeClass('selected');
318 $('tr.line').removeClass('selected');
301 $(this).parents("tr.line").addClass('selected');
319 $(this).parents("tr.line").addClass('selected');
302
320
303 // Replace URL without jumping to it if browser supports.
321 // Replace URL without jumping to it if browser supports.
304 // Default otherwise
322 // Default otherwise
305 if (history.pushState) {
323 if (history.pushState) {
306 var new_location = location.href;
324 var new_location = location.href;
307 if (location.hash){
325 if (location.hash){
308 new_location = new_location.replace(location.hash, "");
326 new_location = new_location.replace(location.hash, "");
309 }
327 }
310
328
311 // Make new anchor url
329 // Make new anchor url
312 var new_location = new_location+$(this).attr('href');
330 var new_location = new_location+$(this).attr('href');
313 history.pushState(true, document.title, new_location);
331 history.pushState(true, document.title, new_location);
314
332
315 return false;
333 return false;
316 }
334 }
317 }
335 }
318 });
336 });
319
337
320 $('.compare_view_files').on(
338 $('.compare_view_files').on(
321 'click', 'tr.line .add-comment-line a',function(event) {
339 'click', 'tr.line .add-comment-line a',function(event) {
322 var tr = $(event.currentTarget).parents('tr.line')[0];
340 var tr = $(event.currentTarget).parents('tr.line')[0];
323 injectInlineForm(tr);
341 injectInlineForm(tr);
324 return false;
342 return false;
325 });
343 });
326
344
327 $('.collapse_file').on('click', function(e) {
345 $('.collapse_file').on('click', function(e) {
328 e.stopPropagation();
346 e.stopPropagation();
329 if ($(e.target).is('a')) { return; }
347 if ($(e.target).is('a')) { return; }
330 var node = $(e.delegateTarget).first();
348 var node = $(e.delegateTarget).first();
331 var icon = $($(node.children().first()).children().first());
349 var icon = $($(node.children().first()).children().first());
332 var id = node.attr('fid');
350 var id = node.attr('fid');
333 var target = $('#'+id);
351 var target = $('#'+id);
334 var tr = $('#tr_'+id);
352 var tr = $('#tr_'+id);
335 var diff = $('#diff_'+id);
353 var diff = $('#diff_'+id);
336 if(node.hasClass('expand_file')){
354 if(node.hasClass('expand_file')){
337 node.removeClass('expand_file');
355 node.removeClass('expand_file');
338 icon.removeClass('expand_file_icon');
356 icon.removeClass('expand_file_icon');
339 node.addClass('collapse_file');
357 node.addClass('collapse_file');
340 icon.addClass('collapse_file_icon');
358 icon.addClass('collapse_file_icon');
341 diff.show();
359 diff.show();
342 tr.show();
360 tr.show();
343 target.show();
361 target.show();
344 } else {
362 } else {
345 node.removeClass('collapse_file');
363 node.removeClass('collapse_file');
346 icon.removeClass('collapse_file_icon');
364 icon.removeClass('collapse_file_icon');
347 node.addClass('expand_file');
365 node.addClass('expand_file');
348 icon.addClass('expand_file_icon');
366 icon.addClass('expand_file_icon');
349 diff.hide();
367 diff.hide();
350 tr.hide();
368 tr.hide();
351 target.hide();
369 target.hide();
352 }
370 }
353 });
371 });
354
372
355 $('#expand_all_files').click(function() {
373 $('#expand_all_files').click(function() {
356 $('.expand_file').each(function() {
374 $('.expand_file').each(function() {
357 var node = $(this);
375 var node = $(this);
358 var icon = $($(node.children().first()).children().first());
376 var icon = $($(node.children().first()).children().first());
359 var id = $(this).attr('fid');
377 var id = $(this).attr('fid');
360 var target = $('#'+id);
378 var target = $('#'+id);
361 var tr = $('#tr_'+id);
379 var tr = $('#tr_'+id);
362 var diff = $('#diff_'+id);
380 var diff = $('#diff_'+id);
363 node.removeClass('expand_file');
381 node.removeClass('expand_file');
364 icon.removeClass('expand_file_icon');
382 icon.removeClass('expand_file_icon');
365 node.addClass('collapse_file');
383 node.addClass('collapse_file');
366 icon.addClass('collapse_file_icon');
384 icon.addClass('collapse_file_icon');
367 diff.show();
385 diff.show();
368 tr.show();
386 tr.show();
369 target.show();
387 target.show();
370 });
388 });
371 });
389 });
372
390
373 $('#collapse_all_files').click(function() {
391 $('#collapse_all_files').click(function() {
374 $('.collapse_file').each(function() {
392 $('.collapse_file').each(function() {
375 var node = $(this);
393 var node = $(this);
376 var icon = $($(node.children().first()).children().first());
394 var icon = $($(node.children().first()).children().first());
377 var id = $(this).attr('fid');
395 var id = $(this).attr('fid');
378 var target = $('#'+id);
396 var target = $('#'+id);
379 var tr = $('#tr_'+id);
397 var tr = $('#tr_'+id);
380 var diff = $('#diff_'+id);
398 var diff = $('#diff_'+id);
381 node.removeClass('collapse_file');
399 node.removeClass('collapse_file');
382 icon.removeClass('collapse_file_icon');
400 icon.removeClass('collapse_file_icon');
383 node.addClass('expand_file');
401 node.addClass('expand_file');
384 icon.addClass('expand_file_icon');
402 icon.addClass('expand_file_icon');
385 diff.hide();
403 diff.hide();
386 tr.hide();
404 tr.hide();
387 target.hide();
405 target.hide();
388 });
406 });
389 });
407 });
390
408
391 // Mouse over behavior for comments and line selection
409 // Mouse over behavior for comments and line selection
392
410
393 // Select the line that comes from the url anchor
411 // Select the line that comes from the url anchor
394 // At the time of development, Chrome didn't seem to support jquery's :target
412 // At the time of development, Chrome didn't seem to support jquery's :target
395 // element, so I had to scroll manually
413 // element, so I had to scroll manually
396
414
397 if (location.hash) { /* TODO: dan: remove this and replace with code block
415 if (location.hash) { /* TODO: dan: remove this and replace with code block
398 below when new diffs are ready */
416 below when new diffs are ready */
399 var result = splitDelimitedHash(location.hash);
417 var result = splitDelimitedHash(location.hash);
400 var loc = result.loc;
418 var loc = result.loc;
401 if (loc.length > 1){
419 if (loc.length > 1){
402 var lineno = $(loc+'.lineno');
420 var lineno = $(loc+'.lineno');
403 if (lineno.length > 0){
421 if (lineno.length > 0){
404 var tr = lineno.parents('tr.line');
422 var tr = lineno.parents('tr.line');
405 tr.addClass('selected');
423 tr.addClass('selected');
406
424
407 tr[0].scrollIntoView();
425 tr[0].scrollIntoView();
408
426
409 $.Topic('/ui/plugins/code/anchor_focus').prepareOrPublish({
427 $.Topic('/ui/plugins/code/anchor_focus').prepareOrPublish({
410 tr: tr,
428 tr: tr,
411 remainder: result.remainder});
429 remainder: result.remainder});
412 }
430 }
413 }
431 }
414 }
432 }
415
433
416 if (location.hash) { /* TODO: dan: use this to replace the code block above
434 if (location.hash) { /* TODO: dan: use this to replace the code block above
417 when new diffs are ready */
435 when new diffs are ready */
418 var result = splitDelimitedHash(location.hash);
436 var result = splitDelimitedHash(location.hash);
419 var loc = result.loc;
437 var loc = result.loc;
420 if (loc.length > 1) {
438 if (loc.length > 1) {
439
440 var highlightable_line_tds = [];
441
442 // source code line format
421 var page_highlights = loc.substring(
443 var page_highlights = loc.substring(
422 loc.indexOf('#') + 1).split('L');
444 loc.indexOf('#') + 1).split('L');
423
445
424 if (page_highlights.length > 1) {
446 if (page_highlights.length > 1) {
425 var highlight_ranges = page_highlights[1].split(",");
447 var highlight_ranges = page_highlights[1].split(",");
426 var h_lines = [];
448 var h_lines = [];
427 for (var pos in highlight_ranges) {
449 for (var pos in highlight_ranges) {
428 var _range = highlight_ranges[pos].split('-');
450 var _range = highlight_ranges[pos].split('-');
429 if (_range.length === 2) {
451 if (_range.length === 2) {
430 var start = parseInt(_range[0]);
452 var start = parseInt(_range[0]);
431 var end = parseInt(_range[1]);
453 var end = parseInt(_range[1]);
432 if (start < end) {
454 if (start < end) {
433 for (var i = start; i <= end; i++) {
455 for (var i = start; i <= end; i++) {
434 h_lines.push(i);
456 h_lines.push(i);
435 }
457 }
436 }
458 }
437 }
459 }
438 else {
460 else {
439 h_lines.push(parseInt(highlight_ranges[pos]));
461 h_lines.push(parseInt(highlight_ranges[pos]));
440 }
462 }
441 }
463 }
442 for (pos in h_lines) {
464 for (pos in h_lines) {
443 var line_td = $('td.cb-lineno#L' + h_lines[pos]);
465 var line_td = $('td.cb-lineno#L' + h_lines[pos]);
444 if (line_td.length) {
466 if (line_td.length) {
445 line_td.addClass('cb-line-selected'); // line number td
467 highlightable_line_tds.push(line_td);
446 line_td.next().addClass('cb-line-selected'); // line content
468 }
447 }
469 }
448 }
470 }
449 var first_line_td = $('td.cb-lineno#L' + h_lines[0]);
450 if (first_line_td.length) {
451 var elOffset = first_line_td.offset().top;
452 var elHeight = first_line_td.height();
453 var windowHeight = $(window).height();
454 var offset;
455
471
456 if (elHeight < windowHeight) {
472 // now check a direct id reference (diff page)
457 offset = elOffset - ((windowHeight / 4) - (elHeight / 2));
473 if ($(loc).length && $(loc).hasClass('cb-lineno')) {
474 highlightable_line_tds.push($(loc));
458 }
475 }
459 else {
476 $.each(highlightable_line_tds, function (i, $td) {
460 offset = elOffset;
477 $td.addClass('cb-line-selected'); // line number td
461 }
478 $td.next().addClass('cb-line-selected'); // line content
462 $(function() { // let browser scroll to hash first, then
463 // scroll the line to the middle of page
464 setTimeout(function() {
465 $('html, body').animate({ scrollTop: offset });
466 }, 100);
467 });
479 });
480
481 if (highlightable_line_tds.length) {
482 var $first_line_td = highlightable_line_tds[0];
483 scrollToElement($first_line_td);
468 $.Topic('/ui/plugins/code/anchor_focus').prepareOrPublish({
484 $.Topic('/ui/plugins/code/anchor_focus').prepareOrPublish({
469 lineno: first_line_td,
485 lineno: $first_line_td,
470 remainder: result.remainder});
486 remainder: result.remainder
471 }
487 });
472 }
488 }
473 }
489 }
474 }
490 }
475 collapsableContent();
491 collapsableContent();
476 });
492 });
@@ -1,322 +1,258 b''
1 ## -*- coding: utf-8 -*-
1 ## -*- coding: utf-8 -*-
2 <%inherit file="/base/base.html"/>
2 <%inherit file="/base/base.html"/>
3 <%namespace name="cbdiffs" file="/codeblocks/diffs.html"/>
3
4
4 <%def name="title()">
5 <%def name="title()">
5 %if c.compare_home:
6 %if c.compare_home:
6 ${_('%s Compare') % c.repo_name}
7 ${_('%s Compare') % c.repo_name}
7 %else:
8 %else:
8 ${_('%s Compare') % c.repo_name} - ${'%s@%s' % (c.source_repo.repo_name, c.source_ref)} &gt; ${'%s@%s' % (c.target_repo.repo_name, c.target_ref)}
9 ${_('%s Compare') % c.repo_name} - ${'%s@%s' % (c.source_repo.repo_name, c.source_ref)} &gt; ${'%s@%s' % (c.target_repo.repo_name, c.target_ref)}
9 %endif
10 %endif
10 %if c.rhodecode_name:
11 %if c.rhodecode_name:
11 &middot; ${h.branding(c.rhodecode_name)}
12 &middot; ${h.branding(c.rhodecode_name)}
12 %endif
13 %endif
13 </%def>
14 </%def>
14
15
15 <%def name="breadcrumbs_links()">
16 <%def name="breadcrumbs_links()">
16 ${ungettext('%s commit','%s commits', len(c.commit_ranges)) % len(c.commit_ranges)}
17 ${ungettext('%s commit','%s commits', len(c.commit_ranges)) % len(c.commit_ranges)}
17 </%def>
18 </%def>
18
19
19 <%def name="menu_bar_nav()">
20 <%def name="menu_bar_nav()">
20 ${self.menu_items(active='repositories')}
21 ${self.menu_items(active='repositories')}
21 </%def>
22 </%def>
22
23
23 <%def name="menu_bar_subnav()">
24 <%def name="menu_bar_subnav()">
24 ${self.repo_menu(active='compare')}
25 ${self.repo_menu(active='compare')}
25 </%def>
26 </%def>
26
27
27 <%def name="main()">
28 <%def name="main()">
28 <script type="text/javascript">
29 <script type="text/javascript">
29 // set fake commitId on this commit-range page
30 // set fake commitId on this commit-range page
30 templateContext.commit_data.commit_id = "${h.EmptyCommit().raw_id}";
31 templateContext.commit_data.commit_id = "${h.EmptyCommit().raw_id}";
31 </script>
32 </script>
32
33
33 <div class="box">
34 <div class="box">
34 <div class="title">
35 <div class="title">
35 ${self.repo_page_title(c.rhodecode_db_repo)}
36 ${self.repo_page_title(c.rhodecode_db_repo)}
36 <div class="breadcrumbs">
37 <div class="breadcrumbs">
37 ${_('Compare Commits')}
38 ${_('Compare Commits')}
38 </div>
39 </div>
39 </div>
40 </div>
40
41
41 <div class="table">
42 <div class="table">
42 <div id="codeblock" class="diffblock">
43 <div id="codeblock" class="diffblock">
43 <div class="code-header" >
44 <div class="code-header" >
44 <div class="compare_header">
45 <div class="compare_header">
45 ## The hidden elements are replaced with a select2 widget
46 ## The hidden elements are replaced with a select2 widget
46 <div class="compare-label">${_('Target')}</div>${h.hidden('compare_source')}
47 <div class="compare-label">${_('Target')}</div>${h.hidden('compare_source')}
47 <div class="compare-label">${_('Source')}</div>${h.hidden('compare_target')}
48 <div class="compare-label">${_('Source')}</div>${h.hidden('compare_target')}
48
49
49 %if not c.preview_mode:
50 %if not c.preview_mode:
50 <div class="compare-label"></div>
51 <div class="compare-label"></div>
51 <div class="compare-buttons">
52 <div class="compare-buttons">
52 %if not c.compare_home:
53 %if not c.compare_home:
53 <a id="btn-swap" class="btn btn-primary" href="${c.swap_url}"><i class="icon-refresh"></i> ${_('Swap')}</a>
54 <a id="btn-swap" class="btn btn-primary" href="${c.swap_url}"><i class="icon-refresh"></i> ${_('Swap')}</a>
54 %endif
55 %endif
55 <div id="compare_revs" class="btn btn-primary"><i class ="icon-loop"></i> ${_('Compare Commits')}</div>
56 <div id="compare_revs" class="btn btn-primary"><i class ="icon-loop"></i> ${_('Compare Commits')}</div>
56 %if c.files:
57 %if c.diffset and c.diffset.files:
57 <div id="compare_changeset_status_toggle" class="btn btn-primary">${_('Comment')}</div>
58 <div id="compare_changeset_status_toggle" class="btn btn-primary">${_('Comment')}</div>
58 %endif
59 %endif
59 </div>
60 </div>
60 %endif
61 %endif
61 </div>
62 </div>
62 </div>
63 </div>
63 </div>
64 </div>
64 ## use JS script to load it quickly before potentially large diffs render long time
65 ## use JS script to load it quickly before potentially large diffs render long time
65 ## this prevents from situation when large diffs block rendering of select2 fields
66 ## this prevents from situation when large diffs block rendering of select2 fields
66 <script type="text/javascript">
67 <script type="text/javascript">
67
68
68 var cache = {};
69 var cache = {};
69
70
70 var formatSelection = function(repoName){
71 var formatSelection = function(repoName){
71 return function(data, container, escapeMarkup) {
72 return function(data, container, escapeMarkup) {
72 var selection = data ? this.text(data) : "";
73 var selection = data ? this.text(data) : "";
73 return escapeMarkup('{0}@{1}'.format(repoName, selection));
74 return escapeMarkup('{0}@{1}'.format(repoName, selection));
74 }
75 }
75 };
76 };
76
77
77 var feedCompareData = function(query, cachedValue){
78 var feedCompareData = function(query, cachedValue){
78 var data = {results: []};
79 var data = {results: []};
79 //filter results
80 //filter results
80 $.each(cachedValue.results, function() {
81 $.each(cachedValue.results, function() {
81 var section = this.text;
82 var section = this.text;
82 var children = [];
83 var children = [];
83 $.each(this.children, function() {
84 $.each(this.children, function() {
84 if (query.term.length === 0 || this.text.toUpperCase().indexOf(query.term.toUpperCase()) >= 0) {
85 if (query.term.length === 0 || this.text.toUpperCase().indexOf(query.term.toUpperCase()) >= 0) {
85 children.push({
86 children.push({
86 'id': this.id,
87 'id': this.id,
87 'text': this.text,
88 'text': this.text,
88 'type': this.type
89 'type': this.type
89 })
90 })
90 }
91 }
91 });
92 });
92 data.results.push({
93 data.results.push({
93 'text': section,
94 'text': section,
94 'children': children
95 'children': children
95 })
96 })
96 });
97 });
97 //push the typed in changeset
98 //push the typed in changeset
98 data.results.push({
99 data.results.push({
99 'text': _gettext('specify commit'),
100 'text': _gettext('specify commit'),
100 'children': [{
101 'children': [{
101 'id': query.term,
102 'id': query.term,
102 'text': query.term,
103 'text': query.term,
103 'type': 'rev'
104 'type': 'rev'
104 }]
105 }]
105 });
106 });
106 query.callback(data);
107 query.callback(data);
107 };
108 };
108
109
109 var loadCompareData = function(repoName, query, cache){
110 var loadCompareData = function(repoName, query, cache){
110 $.ajax({
111 $.ajax({
111 url: pyroutes.url('repo_refs_data', {'repo_name': repoName}),
112 url: pyroutes.url('repo_refs_data', {'repo_name': repoName}),
112 data: {},
113 data: {},
113 dataType: 'json',
114 dataType: 'json',
114 type: 'GET',
115 type: 'GET',
115 success: function(data) {
116 success: function(data) {
116 cache[repoName] = data;
117 cache[repoName] = data;
117 query.callback({results: data.results});
118 query.callback({results: data.results});
118 }
119 }
119 })
120 })
120 };
121 };
121
122
122 var enable_fields = ${"false" if c.preview_mode else "true"};
123 var enable_fields = ${"false" if c.preview_mode else "true"};
123 $("#compare_source").select2({
124 $("#compare_source").select2({
124 placeholder: "${'%s@%s' % (c.source_repo.repo_name, c.source_ref)}",
125 placeholder: "${'%s@%s' % (c.source_repo.repo_name, c.source_ref)}",
125 containerCssClass: "drop-menu",
126 containerCssClass: "drop-menu",
126 dropdownCssClass: "drop-menu-dropdown",
127 dropdownCssClass: "drop-menu-dropdown",
127 formatSelection: formatSelection("${c.source_repo.repo_name}"),
128 formatSelection: formatSelection("${c.source_repo.repo_name}"),
128 dropdownAutoWidth: true,
129 dropdownAutoWidth: true,
129 query: function(query) {
130 query: function(query) {
130 var repoName = '${c.source_repo.repo_name}';
131 var repoName = '${c.source_repo.repo_name}';
131 var cachedValue = cache[repoName];
132 var cachedValue = cache[repoName];
132
133
133 if (cachedValue){
134 if (cachedValue){
134 feedCompareData(query, cachedValue);
135 feedCompareData(query, cachedValue);
135 }
136 }
136 else {
137 else {
137 loadCompareData(repoName, query, cache);
138 loadCompareData(repoName, query, cache);
138 }
139 }
139 }
140 }
140 }).select2("enable", enable_fields);
141 }).select2("enable", enable_fields);
141
142
142 $("#compare_target").select2({
143 $("#compare_target").select2({
143 placeholder: "${'%s@%s' % (c.target_repo.repo_name, c.target_ref)}",
144 placeholder: "${'%s@%s' % (c.target_repo.repo_name, c.target_ref)}",
144 dropdownAutoWidth: true,
145 dropdownAutoWidth: true,
145 containerCssClass: "drop-menu",
146 containerCssClass: "drop-menu",
146 dropdownCssClass: "drop-menu-dropdown",
147 dropdownCssClass: "drop-menu-dropdown",
147 formatSelection: formatSelection("${c.target_repo.repo_name}"),
148 formatSelection: formatSelection("${c.target_repo.repo_name}"),
148 query: function(query) {
149 query: function(query) {
149 var repoName = '${c.target_repo.repo_name}';
150 var repoName = '${c.target_repo.repo_name}';
150 var cachedValue = cache[repoName];
151 var cachedValue = cache[repoName];
151
152
152 if (cachedValue){
153 if (cachedValue){
153 feedCompareData(query, cachedValue);
154 feedCompareData(query, cachedValue);
154 }
155 }
155 else {
156 else {
156 loadCompareData(repoName, query, cache);
157 loadCompareData(repoName, query, cache);
157 }
158 }
158 }
159 }
159 }).select2("enable", enable_fields);
160 }).select2("enable", enable_fields);
160 var initial_compare_source = {id: "${c.source_ref}", type:"${c.source_ref_type}"};
161 var initial_compare_source = {id: "${c.source_ref}", type:"${c.source_ref_type}"};
161 var initial_compare_target = {id: "${c.target_ref}", type:"${c.target_ref_type}"};
162 var initial_compare_target = {id: "${c.target_ref}", type:"${c.target_ref_type}"};
162
163
163 $('#compare_revs').on('click', function(e) {
164 $('#compare_revs').on('click', function(e) {
164 var source = $('#compare_source').select2('data') || initial_compare_source;
165 var source = $('#compare_source').select2('data') || initial_compare_source;
165 var target = $('#compare_target').select2('data') || initial_compare_target;
166 var target = $('#compare_target').select2('data') || initial_compare_target;
166 if (source && target) {
167 if (source && target) {
167 var url_data = {
168 var url_data = {
168 repo_name: "${c.repo_name}",
169 repo_name: "${c.repo_name}",
169 source_ref: source.id,
170 source_ref: source.id,
170 source_ref_type: source.type,
171 source_ref_type: source.type,
171 target_ref: target.id,
172 target_ref: target.id,
172 target_ref_type: target.type
173 target_ref_type: target.type
173 };
174 };
174 window.location = pyroutes.url('compare_url', url_data);
175 window.location = pyroutes.url('compare_url', url_data);
175 }
176 }
176 });
177 });
177 $('#compare_changeset_status_toggle').on('click', function(e) {
178 $('#compare_changeset_status_toggle').on('click', function(e) {
178 $('#compare_changeset_status').toggle();
179 $('#compare_changeset_status').toggle();
179 });
180 });
180
181
181 </script>
182 </script>
182
183
183 ## changeset status form
184 ## changeset status form
184 <%namespace name="comment" file="/changeset/changeset_file_comment.html"/>
185 <%namespace name="comment" file="/changeset/changeset_file_comment.html"/>
185 ## main comment form and it status
186 ## main comment form and it status
186 <%
187 <%
187 def revs(_revs):
188 def revs(_revs):
188 form_inputs = []
189 form_inputs = []
189 for cs in _revs:
190 for cs in _revs:
190 tmpl = '<input type="hidden" data-commit-id="%(cid)s" name="commit_ids" value="%(cid)s">' % {'cid': cs.raw_id}
191 tmpl = '<input type="hidden" data-commit-id="%(cid)s" name="commit_ids" value="%(cid)s">' % {'cid': cs.raw_id}
191 form_inputs.append(tmpl)
192 form_inputs.append(tmpl)
192 return form_inputs
193 return form_inputs
193 %>
194 %>
194 <div id="compare_changeset_status" style="display: none;">
195 <div id="compare_changeset_status" style="display: none;">
195 ${comment.comments(h.url('changeset_comment', repo_name=c.repo_name, revision='0'*16), None, is_compare=True, form_extras=revs(c.commit_ranges))}
196 ${comment.comments(h.url('changeset_comment', repo_name=c.repo_name, revision='0'*16), None, is_compare=True, form_extras=revs(c.commit_ranges))}
196 <script type="text/javascript">
197 <script type="text/javascript">
197
198
198 mainCommentForm.setHandleFormSubmit(function(o) {
199 mainCommentForm.setHandleFormSubmit(function(o) {
199 var text = mainCommentForm.cm.getValue();
200 var text = mainCommentForm.cm.getValue();
200 var status = mainCommentForm.getCommentStatus();
201 var status = mainCommentForm.getCommentStatus();
201
202
202 if (text === "" && !status) {
203 if (text === "" && !status) {
203 return;
204 return;
204 }
205 }
205
206
206 // we can pick which commits we want to make the comment by
207 // we can pick which commits we want to make the comment by
207 // selecting them via click on preview pane, this will alter the hidden inputs
208 // selecting them via click on preview pane, this will alter the hidden inputs
208 var cherryPicked = $('#changeset_compare_view_content .compare_select.hl').length > 0;
209 var cherryPicked = $('#changeset_compare_view_content .compare_select.hl').length > 0;
209
210
210 var commitIds = [];
211 var commitIds = [];
211 $('#changeset_compare_view_content .compare_select').each(function(el) {
212 $('#changeset_compare_view_content .compare_select').each(function(el) {
212 var commitId = this.id.replace('row-', '');
213 var commitId = this.id.replace('row-', '');
213 if ($(this).hasClass('hl') || !cherryPicked) {
214 if ($(this).hasClass('hl') || !cherryPicked) {
214 $("input[data-commit-id='{0}']".format(commitId)).val(commitId)
215 $("input[data-commit-id='{0}']".format(commitId)).val(commitId)
215 commitIds.push(commitId);
216 commitIds.push(commitId);
216 } else {
217 } else {
217 $("input[data-commit-id='{0}']".format(commitId)).val('')
218 $("input[data-commit-id='{0}']".format(commitId)).val('')
218 }
219 }
219 });
220 });
220
221
221 mainCommentForm.setActionButtonsDisabled(true);
222 mainCommentForm.setActionButtonsDisabled(true);
222 mainCommentForm.cm.setOption("readOnly", true);
223 mainCommentForm.cm.setOption("readOnly", true);
223 var postData = {
224 var postData = {
224 'text': text,
225 'text': text,
225 'changeset_status': status,
226 'changeset_status': status,
226 'commit_ids': commitIds,
227 'commit_ids': commitIds,
227 'csrf_token': CSRF_TOKEN
228 'csrf_token': CSRF_TOKEN
228 };
229 };
229
230
230 var submitSuccessCallback = function(o) {
231 var submitSuccessCallback = function(o) {
231 location.reload(true);
232 location.reload(true);
232 };
233 };
233 var submitFailCallback = function(){
234 var submitFailCallback = function(){
234 mainCommentForm.resetCommentFormState(text)
235 mainCommentForm.resetCommentFormState(text)
235 };
236 };
236 mainCommentForm.submitAjaxPOST(
237 mainCommentForm.submitAjaxPOST(
237 mainCommentForm.submitUrl, postData, submitSuccessCallback, submitFailCallback);
238 mainCommentForm.submitUrl, postData, submitSuccessCallback, submitFailCallback);
238 });
239 });
239 </script>
240 </script>
240
241
241 </div>
242 </div>
242
243
243 %if c.compare_home:
244 %if c.compare_home:
244 <div id="changeset_compare_view_content">
245 <div id="changeset_compare_view_content">
245 <div class="help-block">${_('Compare commits, branches, bookmarks or tags.')}</div>
246 <div class="help-block">${_('Compare commits, branches, bookmarks or tags.')}</div>
246 </div>
247 </div>
247 %else:
248 %else:
248 <div id="changeset_compare_view_content">
249 <div id="changeset_compare_view_content">
249 ##CS
250 ##CS
250 <%include file="compare_commits.html"/>
251 <%include file="compare_commits.html"/>
251
252 ${cbdiffs.render_diffset(c.diffset)}
252 ## FILES
253 <div class="cs_files_title">
254 <span class="cs_files_expand">
255 <span id="expand_all_files">${_('Expand All')}</span> | <span id="collapse_all_files">${_('Collapse All')}</span>
256 </span>
257 <h2>
258 ${diff_block.diff_summary_text(len(c.files), c.lines_added, c.lines_deleted, c.limited_diff)}
259 </h2>
260 </div>
261 <div class="cs_files">
262 %if not c.files:
263 <p class="empty_data">${_('No files')}</p>
264 %endif
265 <table class="compare_view_files">
266 <%namespace name="diff_block" file="/changeset/diff_block.html"/>
267 %for FID, change, path, stats, file in c.files:
268 <tr class="cs_${change} collapse_file" fid="${FID}">
269 <td class="cs_icon_td">
270 <span class="collapse_file_icon" fid="${FID}"></span>
271 </td>
272 <td class="cs_icon_td">
273 <div class="flag_status not_reviewed hidden"></div>
274 </td>
275 <td class="cs_${change}" id="a_${FID}">
276 <div class="node">
277 <a href="#a_${FID}">
278 <i class="icon-file-${change.lower()}"></i>
279 ${h.safe_unicode(path)}
280 </a>
281 </div>
282 </td>
283 <td>
284 <div class="changes pull-right">${h.fancy_file_stats(stats)}</div>
285 <div class="comment-bubble pull-right" data-path="${path}">
286 <i class="icon-comment"></i>
287 </div>
288 </td>
289 </tr>
290 <tr fid="${FID}" id="diff_${FID}" class="diff_links">
291 <td></td>
292 <td></td>
293 <td class="cs_${change}">
294 %if c.target_repo.repo_name == c.repo_name:
295 ${diff_block.diff_menu(c.repo_name, h.safe_unicode(path), c.source_ref, c.target_ref, change, file)}
296 %else:
297 ## this is slightly different case later, since the target repo can have this
298 ## file in target state than the source repo
299 ${diff_block.diff_menu(c.target_repo.repo_name, h.safe_unicode(path), c.source_ref, c.target_ref, change, file)}
300 %endif
301 </td>
302 <td class="td-actions rc-form">
303 </td>
304 </tr>
305 <tr id="tr_${FID}">
306 <td></td>
307 <td></td>
308 <td class="injected_diff" colspan="2">
309 ${diff_block.diff_block_simple([c.changes[FID]])}
310 </td>
311 </tr>
312 %endfor
313 </table>
314 % if c.limited_diff:
315 ${diff_block.changeset_message()}
316 % endif
317 </div>
253 </div>
318 %endif
254 %endif
319 </div>
255 </div>
320 </div>
256 </div>
321 </div>
257 </div>
322 </%def>
258 </%def>
@@ -1,286 +1,299 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.lib.helpers import _shorten_commit_id
23 from rhodecode.lib.helpers import _shorten_commit_id
24 from rhodecode.tests import url
24 from rhodecode.tests import url
25
25
26
26
27 @pytest.mark.usefixtures("app")
27 @pytest.mark.usefixtures("app")
28 class TestChangesetController(object):
28 class TestChangesetController(object):
29
29
30 def test_index(self, backend):
30 def test_index(self, backend):
31 commit_id = self.commit_id[backend.alias]
31 commit_id = self.commit_id[backend.alias]
32 response = self.app.get(url(
32 response = self.app.get(url(
33 controller='changeset', action='index',
33 controller='changeset', action='index',
34 repo_name=backend.repo_name, revision=commit_id))
34 repo_name=backend.repo_name, revision=commit_id))
35 response.mustcontain('Added a symlink')
35 response.mustcontain('Added a symlink')
36 response.mustcontain(commit_id)
36 response.mustcontain(commit_id)
37 response.mustcontain('No newline at end of file')
37 response.mustcontain('No newline at end of file')
38
38
39 def test_index_raw(self, backend):
39 def test_index_raw(self, backend):
40 commit_id = self.commit_id[backend.alias]
40 commit_id = self.commit_id[backend.alias]
41 response = self.app.get(url(
41 response = self.app.get(url(
42 controller='changeset', action='changeset_raw',
42 controller='changeset', action='changeset_raw',
43 repo_name=backend.repo_name, revision=commit_id))
43 repo_name=backend.repo_name, revision=commit_id))
44 assert response.body == self.diffs[backend.alias]
44 assert response.body == self.diffs[backend.alias]
45
45
46 def test_index_raw_patch(self, backend):
46 def test_index_raw_patch(self, backend):
47 response = self.app.get(url(
47 response = self.app.get(url(
48 controller='changeset', action='changeset_patch',
48 controller='changeset', action='changeset_patch',
49 repo_name=backend.repo_name,
49 repo_name=backend.repo_name,
50 revision=self.commit_id[backend.alias]))
50 revision=self.commit_id[backend.alias]))
51 assert response.body == self.patches[backend.alias]
51 assert response.body == self.patches[backend.alias]
52
52
53 def test_index_changeset_download(self, backend):
53 def test_index_changeset_download(self, backend):
54 response = self.app.get(url(
54 response = self.app.get(url(
55 controller='changeset', action='changeset_download',
55 controller='changeset', action='changeset_download',
56 repo_name=backend.repo_name,
56 repo_name=backend.repo_name,
57 revision=self.commit_id[backend.alias]))
57 revision=self.commit_id[backend.alias]))
58 assert response.body == self.diffs[backend.alias]
58 assert response.body == self.diffs[backend.alias]
59
59
60 def test_single_commit_page_different_ops(self, backend):
60 def test_single_commit_page_different_ops(self, backend):
61 commit_id = {
61 commit_id = {
62 'hg': '603d6c72c46d953420c89d36372f08d9f305f5dd',
62 'hg': '603d6c72c46d953420c89d36372f08d9f305f5dd',
63 'git': '03fa803d7e9fb14daa9a3089e0d1494eda75d986',
63 'git': '03fa803d7e9fb14daa9a3089e0d1494eda75d986',
64 'svn': '337',
64 'svn': '337',
65 }
65 }
66 commit_id = commit_id[backend.alias]
66 commit_id = commit_id[backend.alias]
67 response = self.app.get(url(
67 response = self.app.get(url(
68 controller='changeset', action='index',
68 controller='changeset', action='index',
69 repo_name=backend.repo_name, revision=commit_id))
69 repo_name=backend.repo_name, revision=commit_id))
70
70
71 response.mustcontain(_shorten_commit_id(commit_id))
71 response.mustcontain(_shorten_commit_id(commit_id))
72 response.mustcontain('21 files changed: 943 inserted, 288 deleted')
72 response.mustcontain('21 files changed: 943 inserted, 288 deleted')
73
73
74 # files op files
74 # files op files
75 response.mustcontain('File no longer present at commit: %s' %
75 response.mustcontain('File no longer present at commit: %s' %
76 _shorten_commit_id(commit_id))
76 _shorten_commit_id(commit_id))
77
77
78 # svn uses a different filename
78 # svn uses a different filename
79 if backend.alias == 'svn':
79 if backend.alias == 'svn':
80 response.mustcontain('new file 10644')
80 response.mustcontain('new file 10644')
81 else:
81 else:
82 response.mustcontain('new file 100644')
82 response.mustcontain('new file 100644')
83 response.mustcontain('Changed theme to ADC theme') # commit msg
83 response.mustcontain('Changed theme to ADC theme') # commit msg
84
84
85 self._check_diff_menus(response, right_menu=True)
85 self._check_diff_menus(response, right_menu=True)
86
86
87 def test_commit_range_page_different_ops(self, backend):
87 def test_commit_range_page_different_ops(self, backend):
88 commit_id_range = {
88 commit_id_range = {
89 'hg': (
89 'hg': (
90 '25d7e49c18b159446cadfa506a5cf8ad1cb04067',
90 '25d7e49c18b159446cadfa506a5cf8ad1cb04067',
91 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
91 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
92 'git': (
92 'git': (
93 '6fc9270775aaf5544c1deb014f4ddd60c952fcbb',
93 '6fc9270775aaf5544c1deb014f4ddd60c952fcbb',
94 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
94 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
95 'svn': (
95 'svn': (
96 '335',
96 '335',
97 '337'),
97 '337'),
98 }
98 }
99 commit_ids = commit_id_range[backend.alias]
99 commit_ids = commit_id_range[backend.alias]
100 commit_id = '%s...%s' % (commit_ids[0], commit_ids[1])
100 commit_id = '%s...%s' % (commit_ids[0], commit_ids[1])
101 response = self.app.get(url(
101 response = self.app.get(url(
102 controller='changeset', action='index',
102 controller='changeset', action='index',
103 repo_name=backend.repo_name, revision=commit_id))
103 repo_name=backend.repo_name, revision=commit_id))
104
104
105 response.mustcontain(_shorten_commit_id(commit_ids[0]))
105 response.mustcontain(_shorten_commit_id(commit_ids[0]))
106 response.mustcontain(_shorten_commit_id(commit_ids[1]))
106 response.mustcontain(_shorten_commit_id(commit_ids[1]))
107
107
108 # svn is special
108 # svn is special
109 if backend.alias == 'svn':
109 if backend.alias == 'svn':
110 response.mustcontain('new file 10644')
110 response.mustcontain('new file 10644')
111 response.mustcontain('34 files changed: 1184 inserted, 311 deleted')
111 response.mustcontain('34 files changed: 1184 inserted, 311 deleted')
112 else:
112 else:
113 response.mustcontain('new file 100644')
113 response.mustcontain('new file 100644')
114 response.mustcontain('33 files changed: 1165 inserted, 308 deleted')
114 response.mustcontain('33 files changed: 1165 inserted, 308 deleted')
115
115
116 # files op files
116 # files op files
117 response.mustcontain('File no longer present at commit: %s' %
117 response.mustcontain('File no longer present at commit: %s' %
118 _shorten_commit_id(commit_ids[1]))
118 _shorten_commit_id(commit_ids[1]))
119 response.mustcontain('Added docstrings to vcs.cli') # commit msg
119 response.mustcontain('Added docstrings to vcs.cli') # commit msg
120 response.mustcontain('Changed theme to ADC theme') # commit msg
120 response.mustcontain('Changed theme to ADC theme') # commit msg
121
121
122 self._check_diff_menus(response)
122 self._check_diff_menus(response)
123
123
124 def test_combined_compare_commit_page_different_ops(self, backend):
124 def test_combined_compare_commit_page_different_ops(self, backend):
125 commit_id_range = {
125 commit_id_range = {
126 'hg': (
126 'hg': (
127 '4fdd71e9427417b2e904e0464c634fdee85ec5a7',
127 '4fdd71e9427417b2e904e0464c634fdee85ec5a7',
128 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
128 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
129 'git': (
129 'git': (
130 'f5fbf9cfd5f1f1be146f6d3b38bcd791a7480c13',
130 'f5fbf9cfd5f1f1be146f6d3b38bcd791a7480c13',
131 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
131 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
132 'svn': (
132 'svn': (
133 '335',
133 '335',
134 '337'),
134 '337'),
135 }
135 }
136 commit_ids = commit_id_range[backend.alias]
136 commit_ids = commit_id_range[backend.alias]
137 response = self.app.get(url(
137 response = self.app.get(url(
138 controller='compare', action='compare',
138 controller='compare', action='compare',
139 repo_name=backend.repo_name,
139 repo_name=backend.repo_name,
140 source_ref_type='rev', source_ref=commit_ids[0],
140 source_ref_type='rev', source_ref=commit_ids[0],
141 target_ref_type='rev', target_ref=commit_ids[1], ))
141 target_ref_type='rev', target_ref=commit_ids[1], ))
142
142
143 response.mustcontain(_shorten_commit_id(commit_ids[0]))
143 response.mustcontain(_shorten_commit_id(commit_ids[0]))
144 response.mustcontain(_shorten_commit_id(commit_ids[1]))
144 response.mustcontain(_shorten_commit_id(commit_ids[1]))
145
145
146 # files op files
146 # files op files
147 response.mustcontain('File no longer present at commit: %s' %
147 response.mustcontain('File no longer present at commit: %s' %
148 _shorten_commit_id(commit_ids[1]))
148 _shorten_commit_id(commit_ids[1]))
149
149
150 # svn is special
150 # svn is special
151 if backend.alias == 'svn':
151 if backend.alias == 'svn':
152 response.mustcontain('new file 10644')
152 response.mustcontain('new file 10644')
153 response.mustcontain('32 files changed: 1179 inserted, 310 deleted')
153 response.mustcontain('32 files changed: 1179 inserted, 310 deleted')
154 else:
154 else:
155 response.mustcontain('new file 100644')
155 response.mustcontain('new file 100644')
156 response.mustcontain('32 files changed: 1165 inserted, 308 deleted')
156 response.mustcontain('32 files changed: 1165 inserted, 308 deleted')
157
157
158 response.mustcontain('Added docstrings to vcs.cli') # commit msg
158 response.mustcontain('Added docstrings to vcs.cli') # commit msg
159 response.mustcontain('Changed theme to ADC theme') # commit msg
159 response.mustcontain('Changed theme to ADC theme') # commit msg
160
160
161 self._check_diff_menus(response)
161 self._check_new_diff_menus(response)
162
162
163 def test_changeset_range(self, backend):
163 def test_changeset_range(self, backend):
164 self._check_changeset_range(
164 self._check_changeset_range(
165 backend, self.commit_id_range, self.commit_id_range_result)
165 backend, self.commit_id_range, self.commit_id_range_result)
166
166
167 def test_changeset_range_with_initial_commit(self, backend):
167 def test_changeset_range_with_initial_commit(self, backend):
168 commit_id_range = {
168 commit_id_range = {
169 'hg': (
169 'hg': (
170 'b986218ba1c9b0d6a259fac9b050b1724ed8e545'
170 'b986218ba1c9b0d6a259fac9b050b1724ed8e545'
171 '...6cba7170863a2411822803fa77a0a264f1310b35'),
171 '...6cba7170863a2411822803fa77a0a264f1310b35'),
172 'git': (
172 'git': (
173 'c1214f7e79e02fc37156ff215cd71275450cffc3'
173 'c1214f7e79e02fc37156ff215cd71275450cffc3'
174 '...fa6600f6848800641328adbf7811fd2372c02ab2'),
174 '...fa6600f6848800641328adbf7811fd2372c02ab2'),
175 'svn': '1...3',
175 'svn': '1...3',
176 }
176 }
177 commit_id_range_result = {
177 commit_id_range_result = {
178 'hg': ['b986218ba1c9', '3d8f361e72ab', '6cba7170863a'],
178 'hg': ['b986218ba1c9', '3d8f361e72ab', '6cba7170863a'],
179 'git': ['c1214f7e79e0', '38b5fe81f109', 'fa6600f68488'],
179 'git': ['c1214f7e79e0', '38b5fe81f109', 'fa6600f68488'],
180 'svn': ['1', '2', '3'],
180 'svn': ['1', '2', '3'],
181 }
181 }
182 self._check_changeset_range(
182 self._check_changeset_range(
183 backend, commit_id_range, commit_id_range_result)
183 backend, commit_id_range, commit_id_range_result)
184
184
185 def _check_changeset_range(
185 def _check_changeset_range(
186 self, backend, commit_id_ranges, commit_id_range_result):
186 self, backend, commit_id_ranges, commit_id_range_result):
187 response = self.app.get(
187 response = self.app.get(
188 url(controller='changeset', action='index',
188 url(controller='changeset', action='index',
189 repo_name=backend.repo_name,
189 repo_name=backend.repo_name,
190 revision=commit_id_ranges[backend.alias]))
190 revision=commit_id_ranges[backend.alias]))
191 expected_result = commit_id_range_result[backend.alias]
191 expected_result = commit_id_range_result[backend.alias]
192 response.mustcontain('{} commits'.format(len(expected_result)))
192 response.mustcontain('{} commits'.format(len(expected_result)))
193 for commit_id in expected_result:
193 for commit_id in expected_result:
194 response.mustcontain(commit_id)
194 response.mustcontain(commit_id)
195
195
196 commit_id = {
196 commit_id = {
197 'hg': '2062ec7beeeaf9f44a1c25c41479565040b930b2',
197 'hg': '2062ec7beeeaf9f44a1c25c41479565040b930b2',
198 'svn': '393',
198 'svn': '393',
199 'git': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
199 'git': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
200 }
200 }
201
201
202 commit_id_range = {
202 commit_id_range = {
203 'hg': (
203 'hg': (
204 'a53d9201d4bc278910d416d94941b7ea007ecd52'
204 'a53d9201d4bc278910d416d94941b7ea007ecd52'
205 '...2062ec7beeeaf9f44a1c25c41479565040b930b2'),
205 '...2062ec7beeeaf9f44a1c25c41479565040b930b2'),
206 'git': (
206 'git': (
207 '7ab37bc680b4aa72c34d07b230c866c28e9fc204'
207 '7ab37bc680b4aa72c34d07b230c866c28e9fc204'
208 '...fd627b9e0dd80b47be81af07c4a98518244ed2f7'),
208 '...fd627b9e0dd80b47be81af07c4a98518244ed2f7'),
209 'svn': '391...393',
209 'svn': '391...393',
210 }
210 }
211
211
212 commit_id_range_result = {
212 commit_id_range_result = {
213 'hg': ['a53d9201d4bc', '96507bd11ecc', '2062ec7beeea'],
213 'hg': ['a53d9201d4bc', '96507bd11ecc', '2062ec7beeea'],
214 'git': ['7ab37bc680b4', '5f2c6ee19592', 'fd627b9e0dd8'],
214 'git': ['7ab37bc680b4', '5f2c6ee19592', 'fd627b9e0dd8'],
215 'svn': ['391', '392', '393'],
215 'svn': ['391', '392', '393'],
216 }
216 }
217
217
218 diffs = {
218 diffs = {
219 'hg': r"""diff --git a/README b/README
219 'hg': r"""diff --git a/README b/README
220 new file mode 120000
220 new file mode 120000
221 --- /dev/null
221 --- /dev/null
222 +++ b/README
222 +++ b/README
223 @@ -0,0 +1,1 @@
223 @@ -0,0 +1,1 @@
224 +README.rst
224 +README.rst
225 \ No newline at end of file
225 \ No newline at end of file
226 """,
226 """,
227 'git': r"""diff --git a/README b/README
227 'git': r"""diff --git a/README b/README
228 new file mode 120000
228 new file mode 120000
229 index 0000000000000000000000000000000000000000..92cacd285355271487b7e379dba6ca60f9a554a4
229 index 0000000000000000000000000000000000000000..92cacd285355271487b7e379dba6ca60f9a554a4
230 --- /dev/null
230 --- /dev/null
231 +++ b/README
231 +++ b/README
232 @@ -0,0 +1 @@
232 @@ -0,0 +1 @@
233 +README.rst
233 +README.rst
234 \ No newline at end of file
234 \ No newline at end of file
235 """,
235 """,
236 'svn': """Index: README
236 'svn': """Index: README
237 ===================================================================
237 ===================================================================
238 diff --git a/README b/README
238 diff --git a/README b/README
239 new file mode 10644
239 new file mode 10644
240 --- /dev/null\t(revision 0)
240 --- /dev/null\t(revision 0)
241 +++ b/README\t(revision 393)
241 +++ b/README\t(revision 393)
242 @@ -0,0 +1 @@
242 @@ -0,0 +1 @@
243 +link README.rst
243 +link README.rst
244 \\ No newline at end of file
244 \\ No newline at end of file
245 """,
245 """,
246 }
246 }
247
247
248 patches = {
248 patches = {
249 'hg': r"""# HG changeset patch
249 'hg': r"""# HG changeset patch
250 # User Marcin Kuzminski <marcin@python-works.com>
250 # User Marcin Kuzminski <marcin@python-works.com>
251 # Date 2014-01-07 12:21:40
251 # Date 2014-01-07 12:21:40
252 # Node ID 2062ec7beeeaf9f44a1c25c41479565040b930b2
252 # Node ID 2062ec7beeeaf9f44a1c25c41479565040b930b2
253 # Parent 96507bd11ecc815ebc6270fdf6db110928c09c1e
253 # Parent 96507bd11ecc815ebc6270fdf6db110928c09c1e
254
254
255 Added a symlink
255 Added a symlink
256
256
257 """ + diffs['hg'],
257 """ + diffs['hg'],
258 'git': r"""From fd627b9e0dd80b47be81af07c4a98518244ed2f7 2014-01-07 12:22:20
258 'git': r"""From fd627b9e0dd80b47be81af07c4a98518244ed2f7 2014-01-07 12:22:20
259 From: Marcin Kuzminski <marcin@python-works.com>
259 From: Marcin Kuzminski <marcin@python-works.com>
260 Date: 2014-01-07 12:22:20
260 Date: 2014-01-07 12:22:20
261 Subject: [PATCH] Added a symlink
261 Subject: [PATCH] Added a symlink
262
262
263 ---
263 ---
264
264
265 """ + diffs['git'],
265 """ + diffs['git'],
266 'svn': r"""# SVN changeset patch
266 'svn': r"""# SVN changeset patch
267 # User marcin
267 # User marcin
268 # Date 2014-09-02 12:25:22.071142
268 # Date 2014-09-02 12:25:22.071142
269 # Revision 393
269 # Revision 393
270
270
271 Added a symlink
271 Added a symlink
272
272
273 """ + diffs['svn'],
273 """ + diffs['svn'],
274 }
274 }
275
275
276 def _check_diff_menus(self, response, right_menu=False):
276 def _check_diff_menus(self, response, right_menu=False,):
277 # diff menus
277 # diff menus
278 for elem in ['Show File', 'Unified Diff', 'Side-by-side Diff',
278 for elem in ['Show File', 'Unified Diff', 'Side-by-side Diff',
279 'Raw Diff', 'Download Diff']:
279 'Raw Diff', 'Download Diff']:
280 response.mustcontain(elem)
280 response.mustcontain(elem)
281
281
282 # right pane diff menus
282 # right pane diff menus
283 if right_menu:
283 if right_menu:
284 for elem in ['Ignore whitespace', 'Increase context',
284 for elem in ['Ignore whitespace', 'Increase context',
285 'Hide comments']:
285 'Hide comments']:
286 response.mustcontain(elem)
286 response.mustcontain(elem)
287
288
289 def _check_new_diff_menus(self, response, right_menu=False,):
290 # diff menus
291 for elem in ['Show file before', 'Show file after',
292 'Raw diff', 'Download diff']:
293 response.mustcontain(elem)
294
295 # right pane diff menus
296 if right_menu:
297 for elem in ['Ignore whitespace', 'Increase context',
298 'Hide comments']:
299 response.mustcontain(elem)
@@ -1,691 +1,695 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import lxml.html
23
24
24 from rhodecode.lib.vcs.backends.base import EmptyCommit
25 from rhodecode.lib.vcs.backends.base import EmptyCommit
25 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
26 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
26 from rhodecode.model.db import Repository
27 from rhodecode.model.db import Repository
27 from rhodecode.model.scm import ScmModel
28 from rhodecode.model.scm import ScmModel
28 from rhodecode.tests import url, TEST_USER_ADMIN_LOGIN, assert_session_flash
29 from rhodecode.tests import url, TEST_USER_ADMIN_LOGIN, assert_session_flash
29 from rhodecode.tests.utils import AssertResponse
30 from rhodecode.tests.utils import AssertResponse
30
31
31
32
32 @pytest.mark.usefixtures("autologin_user", "app")
33 @pytest.mark.usefixtures("autologin_user", "app")
33 class TestCompareController:
34 class TestCompareController:
34
35
35 @pytest.mark.xfail_backends("svn", reason="Requires pull")
36 @pytest.mark.xfail_backends("svn", reason="Requires pull")
36 def test_compare_remote_with_different_commit_indexes(self, backend):
37 def test_compare_remote_with_different_commit_indexes(self, backend):
37 # Preparing the following repository structure:
38 # Preparing the following repository structure:
38 #
39 #
39 # Origin repository has two commits:
40 # Origin repository has two commits:
40 #
41 #
41 # 0 1
42 # 0 1
42 # A -- D
43 # A -- D
43 #
44 #
44 # The fork of it has a few more commits and "D" has a commit index
45 # The fork of it has a few more commits and "D" has a commit index
45 # which does not exist in origin.
46 # which does not exist in origin.
46 #
47 #
47 # 0 1 2 3 4
48 # 0 1 2 3 4
48 # A -- -- -- D -- E
49 # A -- -- -- D -- E
49 # \- B -- C
50 # \- B -- C
50 #
51 #
51
52
52 fork = backend.create_repo()
53 fork = backend.create_repo()
53
54
54 # prepare fork
55 # prepare fork
55 commit0 = _commit_change(
56 commit0 = _commit_change(
56 fork.repo_name, filename='file1', content='A',
57 fork.repo_name, filename='file1', content='A',
57 message='A', vcs_type=backend.alias, parent=None, newfile=True)
58 message='A', vcs_type=backend.alias, parent=None, newfile=True)
58
59
59 commit1 = _commit_change(
60 commit1 = _commit_change(
60 fork.repo_name, filename='file1', content='B',
61 fork.repo_name, filename='file1', content='B',
61 message='B, child of A', vcs_type=backend.alias, parent=commit0)
62 message='B, child of A', vcs_type=backend.alias, parent=commit0)
62
63
63 _commit_change( # commit 2
64 _commit_change( # commit 2
64 fork.repo_name, filename='file1', content='C',
65 fork.repo_name, filename='file1', content='C',
65 message='C, child of B', vcs_type=backend.alias, parent=commit1)
66 message='C, child of B', vcs_type=backend.alias, parent=commit1)
66
67
67 commit3 = _commit_change(
68 commit3 = _commit_change(
68 fork.repo_name, filename='file1', content='D',
69 fork.repo_name, filename='file1', content='D',
69 message='D, child of A', vcs_type=backend.alias, parent=commit0)
70 message='D, child of A', vcs_type=backend.alias, parent=commit0)
70
71
71 commit4 = _commit_change(
72 commit4 = _commit_change(
72 fork.repo_name, filename='file1', content='E',
73 fork.repo_name, filename='file1', content='E',
73 message='E, child of D', vcs_type=backend.alias, parent=commit3)
74 message='E, child of D', vcs_type=backend.alias, parent=commit3)
74
75
75 # prepare origin repository, taking just the history up to D
76 # prepare origin repository, taking just the history up to D
76 origin = backend.create_repo()
77 origin = backend.create_repo()
77
78
78 origin_repo = origin.scm_instance(cache=False)
79 origin_repo = origin.scm_instance(cache=False)
79 origin_repo.config.clear_section('hooks')
80 origin_repo.config.clear_section('hooks')
80 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
81 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
81
82
82 # Verify test fixture setup
83 # Verify test fixture setup
83 # This does not work for git
84 # This does not work for git
84 if backend.alias != 'git':
85 if backend.alias != 'git':
85 assert 5 == len(fork.scm_instance().commit_ids)
86 assert 5 == len(fork.scm_instance().commit_ids)
86 assert 2 == len(origin_repo.commit_ids)
87 assert 2 == len(origin_repo.commit_ids)
87
88
88 # Comparing the revisions
89 # Comparing the revisions
89 response = self.app.get(
90 response = self.app.get(
90 url('compare_url',
91 url('compare_url',
91 repo_name=origin.repo_name,
92 repo_name=origin.repo_name,
92 source_ref_type="rev",
93 source_ref_type="rev",
93 source_ref=commit3.raw_id,
94 source_ref=commit3.raw_id,
94 target_repo=fork.repo_name,
95 target_repo=fork.repo_name,
95 target_ref_type="rev",
96 target_ref_type="rev",
96 target_ref=commit4.raw_id,
97 target_ref=commit4.raw_id,
97 merge='1',))
98 merge='1',))
98
99
99 compare_page = ComparePage(response)
100 compare_page = ComparePage(response)
100 compare_page.contains_commits([commit4])
101 compare_page.contains_commits([commit4])
101
102
102 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
103 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
103 def test_compare_forks_on_branch_extra_commits(self, backend):
104 def test_compare_forks_on_branch_extra_commits(self, backend):
104 repo1 = backend.create_repo()
105 repo1 = backend.create_repo()
105
106
106 # commit something !
107 # commit something !
107 commit0 = _commit_change(
108 commit0 = _commit_change(
108 repo1.repo_name, filename='file1', content='line1\n',
109 repo1.repo_name, filename='file1', content='line1\n',
109 message='commit1', vcs_type=backend.alias, parent=None,
110 message='commit1', vcs_type=backend.alias, parent=None,
110 newfile=True)
111 newfile=True)
111
112
112 # fork this repo
113 # fork this repo
113 repo2 = backend.create_fork()
114 repo2 = backend.create_fork()
114
115
115 # add two extra commit into fork
116 # add two extra commit into fork
116 commit1 = _commit_change(
117 commit1 = _commit_change(
117 repo2.repo_name, filename='file1', content='line1\nline2\n',
118 repo2.repo_name, filename='file1', content='line1\nline2\n',
118 message='commit2', vcs_type=backend.alias, parent=commit0)
119 message='commit2', vcs_type=backend.alias, parent=commit0)
119
120
120 commit2 = _commit_change(
121 commit2 = _commit_change(
121 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
122 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
122 message='commit3', vcs_type=backend.alias, parent=commit1)
123 message='commit3', vcs_type=backend.alias, parent=commit1)
123
124
124 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
125 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
125 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
126 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
126
127
127 response = self.app.get(
128 response = self.app.get(
128 url('compare_url',
129 url('compare_url',
129 repo_name=repo1.repo_name,
130 repo_name=repo1.repo_name,
130 source_ref_type="branch",
131 source_ref_type="branch",
131 source_ref=commit_id2,
132 source_ref=commit_id2,
132 target_repo=repo2.repo_name,
133 target_repo=repo2.repo_name,
133 target_ref_type="branch",
134 target_ref_type="branch",
134 target_ref=commit_id1,
135 target_ref=commit_id1,
135 merge='1',))
136 merge='1',))
136
137
137 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
138 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
138 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
139 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
139
140
140 compare_page = ComparePage(response)
141 compare_page = ComparePage(response)
141 compare_page.contains_change_summary(1, 2, 0)
142 compare_page.contains_change_summary(1, 2, 0)
142 compare_page.contains_commits([commit1, commit2])
143 compare_page.contains_commits([commit1, commit2])
143 compare_page.contains_file_links_and_anchors([
144 compare_page.contains_file_links_and_anchors([
144 ('file1', 'a_c--826e8142e6ba'),
145 ('file1', 'a_c--826e8142e6ba'),
145 ])
146 ])
146
147
147 # Swap is removed when comparing branches since it's a PR feature and
148 # Swap is removed when comparing branches since it's a PR feature and
148 # it is then a preview mode
149 # it is then a preview mode
149 compare_page.swap_is_hidden()
150 compare_page.swap_is_hidden()
150 compare_page.target_source_are_disabled()
151 compare_page.target_source_are_disabled()
151
152
152 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
153 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
153 def test_compare_forks_on_branch_extra_commits_origin_has_incomming(
154 def test_compare_forks_on_branch_extra_commits_origin_has_incomming(
154 self, backend):
155 self, backend):
155 repo1 = backend.create_repo()
156 repo1 = backend.create_repo()
156
157
157 # commit something !
158 # commit something !
158 commit0 = _commit_change(
159 commit0 = _commit_change(
159 repo1.repo_name, filename='file1', content='line1\n',
160 repo1.repo_name, filename='file1', content='line1\n',
160 message='commit1', vcs_type=backend.alias, parent=None,
161 message='commit1', vcs_type=backend.alias, parent=None,
161 newfile=True)
162 newfile=True)
162
163
163 # fork this repo
164 # fork this repo
164 repo2 = backend.create_fork()
165 repo2 = backend.create_fork()
165
166
166 # now commit something to origin repo
167 # now commit something to origin repo
167 _commit_change(
168 _commit_change(
168 repo1.repo_name, filename='file2', content='line1file2\n',
169 repo1.repo_name, filename='file2', content='line1file2\n',
169 message='commit2', vcs_type=backend.alias, parent=commit0,
170 message='commit2', vcs_type=backend.alias, parent=commit0,
170 newfile=True)
171 newfile=True)
171
172
172 # add two extra commit into fork
173 # add two extra commit into fork
173 commit1 = _commit_change(
174 commit1 = _commit_change(
174 repo2.repo_name, filename='file1', content='line1\nline2\n',
175 repo2.repo_name, filename='file1', content='line1\nline2\n',
175 message='commit2', vcs_type=backend.alias, parent=commit0)
176 message='commit2', vcs_type=backend.alias, parent=commit0)
176
177
177 commit2 = _commit_change(
178 commit2 = _commit_change(
178 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
179 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
179 message='commit3', vcs_type=backend.alias, parent=commit1)
180 message='commit3', vcs_type=backend.alias, parent=commit1)
180
181
181 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
182 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
182 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
183 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
183
184
184 response = self.app.get(
185 response = self.app.get(
185 url('compare_url',
186 url('compare_url',
186 repo_name=repo1.repo_name,
187 repo_name=repo1.repo_name,
187 source_ref_type="branch",
188 source_ref_type="branch",
188 source_ref=commit_id2,
189 source_ref=commit_id2,
189 target_repo=repo2.repo_name,
190 target_repo=repo2.repo_name,
190 target_ref_type="branch",
191 target_ref_type="branch",
191 target_ref=commit_id1,
192 target_ref=commit_id1,
192 merge='1'))
193 merge='1'))
193
194
194 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
195 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
195 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
196 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
196
197
197 compare_page = ComparePage(response)
198 compare_page = ComparePage(response)
198 compare_page.contains_change_summary(1, 2, 0)
199 compare_page.contains_change_summary(1, 2, 0)
199 compare_page.contains_commits([commit1, commit2])
200 compare_page.contains_commits([commit1, commit2])
200 compare_page.contains_file_links_and_anchors([
201 compare_page.contains_file_links_and_anchors([
201 ('file1', 'a_c--826e8142e6ba'),
202 ('file1', 'a_c--826e8142e6ba'),
202 ])
203 ])
203
204
204 # Swap is removed when comparing branches since it's a PR feature and
205 # Swap is removed when comparing branches since it's a PR feature and
205 # it is then a preview mode
206 # it is then a preview mode
206 compare_page.swap_is_hidden()
207 compare_page.swap_is_hidden()
207 compare_page.target_source_are_disabled()
208 compare_page.target_source_are_disabled()
208
209
209 @pytest.mark.xfail_backends("svn", "git")
210 @pytest.mark.xfail_backends("svn", "git")
210 def test_compare_of_unrelated_forks(self, backend):
211 def test_compare_of_unrelated_forks(self, backend):
211 # TODO: johbo: Fails for git due to some other issue it seems
212 # TODO: johbo: Fails for git due to some other issue it seems
212 orig = backend.create_repo(number_of_commits=1)
213 orig = backend.create_repo(number_of_commits=1)
213 fork = backend.create_repo(number_of_commits=1)
214 fork = backend.create_repo(number_of_commits=1)
214
215
215 response = self.app.get(
216 response = self.app.get(
216 url('compare_url',
217 url('compare_url',
217 repo_name=orig.repo_name,
218 repo_name=orig.repo_name,
218 action="compare",
219 action="compare",
219 source_ref_type="rev",
220 source_ref_type="rev",
220 source_ref="tip",
221 source_ref="tip",
221 target_ref_type="rev",
222 target_ref_type="rev",
222 target_ref="tip",
223 target_ref="tip",
223 merge='1',
224 merge='1',
224 target_repo=fork.repo_name),
225 target_repo=fork.repo_name),
225 status=400)
226 status=400)
226
227
227 response.mustcontain("Repositories unrelated.")
228 response.mustcontain("Repositories unrelated.")
228
229
229 @pytest.mark.xfail_backends("svn")
230 @pytest.mark.xfail_backends("svn")
230 def test_compare_cherry_pick_commits_from_bottom(self, backend):
231 def test_compare_cherry_pick_commits_from_bottom(self, backend):
231
232
232 # repo1:
233 # repo1:
233 # commit0:
234 # commit0:
234 # commit1:
235 # commit1:
235 # repo1-fork- in which we will cherry pick bottom commits
236 # repo1-fork- in which we will cherry pick bottom commits
236 # commit0:
237 # commit0:
237 # commit1:
238 # commit1:
238 # commit2: x
239 # commit2: x
239 # commit3: x
240 # commit3: x
240 # commit4: x
241 # commit4: x
241 # commit5:
242 # commit5:
242 # make repo1, and commit1+commit2
243 # make repo1, and commit1+commit2
243
244
244 repo1 = backend.create_repo()
245 repo1 = backend.create_repo()
245
246
246 # commit something !
247 # commit something !
247 commit0 = _commit_change(
248 commit0 = _commit_change(
248 repo1.repo_name, filename='file1', content='line1\n',
249 repo1.repo_name, filename='file1', content='line1\n',
249 message='commit1', vcs_type=backend.alias, parent=None,
250 message='commit1', vcs_type=backend.alias, parent=None,
250 newfile=True)
251 newfile=True)
251 commit1 = _commit_change(
252 commit1 = _commit_change(
252 repo1.repo_name, filename='file1', content='line1\nline2\n',
253 repo1.repo_name, filename='file1', content='line1\nline2\n',
253 message='commit2', vcs_type=backend.alias, parent=commit0)
254 message='commit2', vcs_type=backend.alias, parent=commit0)
254
255
255 # fork this repo
256 # fork this repo
256 repo2 = backend.create_fork()
257 repo2 = backend.create_fork()
257
258
258 # now make commit3-6
259 # now make commit3-6
259 commit2 = _commit_change(
260 commit2 = _commit_change(
260 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
261 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
261 message='commit3', vcs_type=backend.alias, parent=commit1)
262 message='commit3', vcs_type=backend.alias, parent=commit1)
262 commit3 = _commit_change(
263 commit3 = _commit_change(
263 repo1.repo_name, filename='file1',
264 repo1.repo_name, filename='file1',
264 content='line1\nline2\nline3\nline4\n', message='commit4',
265 content='line1\nline2\nline3\nline4\n', message='commit4',
265 vcs_type=backend.alias, parent=commit2)
266 vcs_type=backend.alias, parent=commit2)
266 commit4 = _commit_change(
267 commit4 = _commit_change(
267 repo1.repo_name, filename='file1',
268 repo1.repo_name, filename='file1',
268 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
269 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
269 vcs_type=backend.alias, parent=commit3)
270 vcs_type=backend.alias, parent=commit3)
270 _commit_change( # commit 5
271 _commit_change( # commit 5
271 repo1.repo_name, filename='file1',
272 repo1.repo_name, filename='file1',
272 content='line1\nline2\nline3\nline4\nline5\nline6\n',
273 content='line1\nline2\nline3\nline4\nline5\nline6\n',
273 message='commit6', vcs_type=backend.alias, parent=commit4)
274 message='commit6', vcs_type=backend.alias, parent=commit4)
274
275
275 response = self.app.get(
276 response = self.app.get(
276 url('compare_url',
277 url('compare_url',
277 repo_name=repo2.repo_name,
278 repo_name=repo2.repo_name,
278 source_ref_type="rev",
279 source_ref_type="rev",
279 # parent of commit2, in target repo2
280 # parent of commit2, in target repo2
280 source_ref=commit1.raw_id,
281 source_ref=commit1.raw_id,
281 target_repo=repo1.repo_name,
282 target_repo=repo1.repo_name,
282 target_ref_type="rev",
283 target_ref_type="rev",
283 target_ref=commit4.raw_id,
284 target_ref=commit4.raw_id,
284 merge='1',))
285 merge='1',))
285 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
286 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
286 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
287 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
287
288
288 # files
289 # files
289 compare_page = ComparePage(response)
290 compare_page = ComparePage(response)
290 compare_page.contains_change_summary(1, 3, 0)
291 compare_page.contains_change_summary(1, 3, 0)
291 compare_page.contains_commits([commit2, commit3, commit4])
292 compare_page.contains_commits([commit2, commit3, commit4])
292 compare_page.contains_file_links_and_anchors([
293 compare_page.contains_file_links_and_anchors([
293 ('file1', 'a_c--826e8142e6ba'),
294 ('file1', 'a_c--826e8142e6ba'),
294 ])
295 ])
295
296
296 @pytest.mark.xfail_backends("svn")
297 @pytest.mark.xfail_backends("svn")
297 def test_compare_cherry_pick_commits_from_top(self, backend):
298 def test_compare_cherry_pick_commits_from_top(self, backend):
298 # repo1:
299 # repo1:
299 # commit0:
300 # commit0:
300 # commit1:
301 # commit1:
301 # repo1-fork- in which we will cherry pick bottom commits
302 # repo1-fork- in which we will cherry pick bottom commits
302 # commit0:
303 # commit0:
303 # commit1:
304 # commit1:
304 # commit2:
305 # commit2:
305 # commit3: x
306 # commit3: x
306 # commit4: x
307 # commit4: x
307 # commit5: x
308 # commit5: x
308
309
309 # make repo1, and commit1+commit2
310 # make repo1, and commit1+commit2
310 repo1 = backend.create_repo()
311 repo1 = backend.create_repo()
311
312
312 # commit something !
313 # commit something !
313 commit0 = _commit_change(
314 commit0 = _commit_change(
314 repo1.repo_name, filename='file1', content='line1\n',
315 repo1.repo_name, filename='file1', content='line1\n',
315 message='commit1', vcs_type=backend.alias, parent=None,
316 message='commit1', vcs_type=backend.alias, parent=None,
316 newfile=True)
317 newfile=True)
317 commit1 = _commit_change(
318 commit1 = _commit_change(
318 repo1.repo_name, filename='file1', content='line1\nline2\n',
319 repo1.repo_name, filename='file1', content='line1\nline2\n',
319 message='commit2', vcs_type=backend.alias, parent=commit0)
320 message='commit2', vcs_type=backend.alias, parent=commit0)
320
321
321 # fork this repo
322 # fork this repo
322 backend.create_fork()
323 backend.create_fork()
323
324
324 # now make commit3-6
325 # now make commit3-6
325 commit2 = _commit_change(
326 commit2 = _commit_change(
326 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
327 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
327 message='commit3', vcs_type=backend.alias, parent=commit1)
328 message='commit3', vcs_type=backend.alias, parent=commit1)
328 commit3 = _commit_change(
329 commit3 = _commit_change(
329 repo1.repo_name, filename='file1',
330 repo1.repo_name, filename='file1',
330 content='line1\nline2\nline3\nline4\n', message='commit4',
331 content='line1\nline2\nline3\nline4\n', message='commit4',
331 vcs_type=backend.alias, parent=commit2)
332 vcs_type=backend.alias, parent=commit2)
332 commit4 = _commit_change(
333 commit4 = _commit_change(
333 repo1.repo_name, filename='file1',
334 repo1.repo_name, filename='file1',
334 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
335 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
335 vcs_type=backend.alias, parent=commit3)
336 vcs_type=backend.alias, parent=commit3)
336 commit5 = _commit_change(
337 commit5 = _commit_change(
337 repo1.repo_name, filename='file1',
338 repo1.repo_name, filename='file1',
338 content='line1\nline2\nline3\nline4\nline5\nline6\n',
339 content='line1\nline2\nline3\nline4\nline5\nline6\n',
339 message='commit6', vcs_type=backend.alias, parent=commit4)
340 message='commit6', vcs_type=backend.alias, parent=commit4)
340
341
341 response = self.app.get(
342 response = self.app.get(
342 url('compare_url',
343 url('compare_url',
343 repo_name=repo1.repo_name,
344 repo_name=repo1.repo_name,
344 source_ref_type="rev",
345 source_ref_type="rev",
345 # parent of commit3, not in source repo2
346 # parent of commit3, not in source repo2
346 source_ref=commit2.raw_id,
347 source_ref=commit2.raw_id,
347 target_ref_type="rev",
348 target_ref_type="rev",
348 target_ref=commit5.raw_id,
349 target_ref=commit5.raw_id,
349 merge='1',))
350 merge='1',))
350
351
351 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
352 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
352 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
353 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
353
354
354 compare_page = ComparePage(response)
355 compare_page = ComparePage(response)
355 compare_page.contains_change_summary(1, 3, 0)
356 compare_page.contains_change_summary(1, 3, 0)
356 compare_page.contains_commits([commit3, commit4, commit5])
357 compare_page.contains_commits([commit3, commit4, commit5])
357
358
358 # files
359 # files
359 compare_page.contains_file_links_and_anchors([
360 compare_page.contains_file_links_and_anchors([
360 ('file1', 'a_c--826e8142e6ba'),
361 ('file1', 'a_c--826e8142e6ba'),
361 ])
362 ])
362
363
363 @pytest.mark.xfail_backends("svn")
364 @pytest.mark.xfail_backends("svn")
364 def test_compare_remote_branches(self, backend):
365 def test_compare_remote_branches(self, backend):
365 repo1 = backend.repo
366 repo1 = backend.repo
366 repo2 = backend.create_fork()
367 repo2 = backend.create_fork()
367
368
368 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
369 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
369 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
370 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
370
371
371 response = self.app.get(
372 response = self.app.get(
372 url('compare_url',
373 url('compare_url',
373 repo_name=repo1.repo_name,
374 repo_name=repo1.repo_name,
374 source_ref_type="rev",
375 source_ref_type="rev",
375 source_ref=commit_id1,
376 source_ref=commit_id1,
376 target_ref_type="rev",
377 target_ref_type="rev",
377 target_ref=commit_id2,
378 target_ref=commit_id2,
378 target_repo=repo2.repo_name,
379 target_repo=repo2.repo_name,
379 merge='1',))
380 merge='1',))
380
381
381 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
382 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
382 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
383 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
383
384
384 compare_page = ComparePage(response)
385 compare_page = ComparePage(response)
385
386
386 # outgoing commits between those commits
387 # outgoing commits between those commits
387 compare_page.contains_commits(
388 compare_page.contains_commits(
388 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
389 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
389
390
390 # files
391 # files
391 compare_page.contains_file_links_and_anchors([
392 compare_page.contains_file_links_and_anchors([
392 ('vcs/backends/hg.py', 'a_c--9c390eb52cd6'),
393 ('vcs/backends/hg.py', 'a_c--9c390eb52cd6'),
393 ('vcs/backends/__init__.py', 'a_c--41b41c1f2796'),
394 ('vcs/backends/__init__.py', 'a_c--41b41c1f2796'),
394 ('vcs/backends/base.py', 'a_c--2f574d260608'),
395 ('vcs/backends/base.py', 'a_c--2f574d260608'),
395 ])
396 ])
396
397
397 @pytest.mark.xfail_backends("svn")
398 @pytest.mark.xfail_backends("svn")
398 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
399 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
399 repo1 = backend.create_repo()
400 repo1 = backend.create_repo()
400 r1_name = repo1.repo_name
401 r1_name = repo1.repo_name
401
402
402 commit0 = _commit_change(
403 commit0 = _commit_change(
403 repo=r1_name, filename='file1',
404 repo=r1_name, filename='file1',
404 content='line1', message='commit1', vcs_type=backend.alias,
405 content='line1', message='commit1', vcs_type=backend.alias,
405 newfile=True)
406 newfile=True)
406 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
407 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
407
408
408 # fork the repo1
409 # fork the repo1
409 repo2 = backend.create_fork()
410 repo2 = backend.create_fork()
410 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
411 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
411
412
412 self.r2_id = repo2.repo_id
413 self.r2_id = repo2.repo_id
413 r2_name = repo2.repo_name
414 r2_name = repo2.repo_name
414
415
415 commit1 = _commit_change(
416 commit1 = _commit_change(
416 repo=r2_name, filename='file1-fork',
417 repo=r2_name, filename='file1-fork',
417 content='file1-line1-from-fork', message='commit1-fork',
418 content='file1-line1-from-fork', message='commit1-fork',
418 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
419 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
419 newfile=True)
420 newfile=True)
420
421
421 commit2 = _commit_change(
422 commit2 = _commit_change(
422 repo=r2_name, filename='file2-fork',
423 repo=r2_name, filename='file2-fork',
423 content='file2-line1-from-fork', message='commit2-fork',
424 content='file2-line1-from-fork', message='commit2-fork',
424 vcs_type=backend.alias, parent=commit1,
425 vcs_type=backend.alias, parent=commit1,
425 newfile=True)
426 newfile=True)
426
427
427 _commit_change( # commit 3
428 _commit_change( # commit 3
428 repo=r2_name, filename='file3-fork',
429 repo=r2_name, filename='file3-fork',
429 content='file3-line1-from-fork', message='commit3-fork',
430 content='file3-line1-from-fork', message='commit3-fork',
430 vcs_type=backend.alias, parent=commit2, newfile=True)
431 vcs_type=backend.alias, parent=commit2, newfile=True)
431
432
432 # compare !
433 # compare !
433 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
434 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
434 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
435 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
435
436
436 response = self.app.get(
437 response = self.app.get(
437 url('compare_url',
438 url('compare_url',
438 repo_name=r2_name,
439 repo_name=r2_name,
439 source_ref_type="branch",
440 source_ref_type="branch",
440 source_ref=commit_id1,
441 source_ref=commit_id1,
441 target_ref_type="branch",
442 target_ref_type="branch",
442 target_ref=commit_id2,
443 target_ref=commit_id2,
443 target_repo=r1_name,
444 target_repo=r1_name,
444 merge='1',))
445 merge='1',))
445
446
446 response.mustcontain('%s@%s' % (r2_name, commit_id1))
447 response.mustcontain('%s@%s' % (r2_name, commit_id1))
447 response.mustcontain('%s@%s' % (r1_name, commit_id2))
448 response.mustcontain('%s@%s' % (r1_name, commit_id2))
448 response.mustcontain('No files')
449 response.mustcontain('No files')
449 response.mustcontain('No Commits')
450 response.mustcontain('No Commits')
450
451
451 commit0 = _commit_change(
452 commit0 = _commit_change(
452 repo=r1_name, filename='file2',
453 repo=r1_name, filename='file2',
453 content='line1-added-after-fork', message='commit2-parent',
454 content='line1-added-after-fork', message='commit2-parent',
454 vcs_type=backend.alias, parent=None, newfile=True)
455 vcs_type=backend.alias, parent=None, newfile=True)
455
456
456 # compare !
457 # compare !
457 response = self.app.get(
458 response = self.app.get(
458 url('compare_url',
459 url('compare_url',
459 repo_name=r2_name,
460 repo_name=r2_name,
460 source_ref_type="branch",
461 source_ref_type="branch",
461 source_ref=commit_id1,
462 source_ref=commit_id1,
462 target_ref_type="branch",
463 target_ref_type="branch",
463 target_ref=commit_id2,
464 target_ref=commit_id2,
464 target_repo=r1_name,
465 target_repo=r1_name,
465 merge='1',))
466 merge='1',))
466
467
467 response.mustcontain('%s@%s' % (r2_name, commit_id1))
468 response.mustcontain('%s@%s' % (r2_name, commit_id1))
468 response.mustcontain('%s@%s' % (r1_name, commit_id2))
469 response.mustcontain('%s@%s' % (r1_name, commit_id2))
469
470
470 response.mustcontain("""commit2-parent""")
471 response.mustcontain("""commit2-parent""")
471 response.mustcontain("""line1-added-after-fork""")
472 response.mustcontain("""line1-added-after-fork""")
472 compare_page = ComparePage(response)
473 compare_page = ComparePage(response)
473 compare_page.contains_change_summary(1, 1, 0)
474 compare_page.contains_change_summary(1, 1, 0)
474
475
475 @pytest.mark.xfail_backends("svn")
476 @pytest.mark.xfail_backends("svn")
476 def test_compare_commits(self, backend):
477 def test_compare_commits(self, backend):
477 commit0 = backend.repo.get_commit(commit_idx=0)
478 commit0 = backend.repo.get_commit(commit_idx=0)
478 commit1 = backend.repo.get_commit(commit_idx=1)
479 commit1 = backend.repo.get_commit(commit_idx=1)
479
480
480 response = self.app.get(
481 response = self.app.get(
481 url('compare_url',
482 url('compare_url',
482 repo_name=backend.repo_name,
483 repo_name=backend.repo_name,
483 source_ref_type="rev",
484 source_ref_type="rev",
484 source_ref=commit0.raw_id,
485 source_ref=commit0.raw_id,
485 target_ref_type="rev",
486 target_ref_type="rev",
486 target_ref=commit1.raw_id,
487 target_ref=commit1.raw_id,
487 merge='1',),
488 merge='1',),
488 extra_environ={'HTTP_X_PARTIAL_XHR': '1'},)
489 extra_environ={'HTTP_X_PARTIAL_XHR': '1'},)
489
490
490 # outgoing commits between those commits
491 # outgoing commits between those commits
491 compare_page = ComparePage(response)
492 compare_page = ComparePage(response)
492 compare_page.contains_commits(commits=[commit1], ancestors=[commit0])
493 compare_page.contains_commits(commits=[commit1], ancestors=[commit0])
493
494
494 def test_errors_when_comparing_unknown_repo(self, backend):
495 def test_errors_when_comparing_unknown_repo(self, backend):
495 repo = backend.repo
496 repo = backend.repo
496 badrepo = 'badrepo'
497 badrepo = 'badrepo'
497
498
498 response = self.app.get(
499 response = self.app.get(
499 url('compare_url',
500 url('compare_url',
500 repo_name=repo.repo_name,
501 repo_name=repo.repo_name,
501 source_ref_type="rev",
502 source_ref_type="rev",
502 source_ref='tip',
503 source_ref='tip',
503 target_ref_type="rev",
504 target_ref_type="rev",
504 target_ref='tip',
505 target_ref='tip',
505 target_repo=badrepo,
506 target_repo=badrepo,
506 merge='1',),
507 merge='1',),
507 status=302)
508 status=302)
508 redirected = response.follow()
509 redirected = response.follow()
509 redirected.mustcontain('Could not find the other repo: %s' % badrepo)
510 redirected.mustcontain('Could not find the other repo: %s' % badrepo)
510
511
511 def test_compare_not_in_preview_mode(self, backend_stub):
512 def test_compare_not_in_preview_mode(self, backend_stub):
512 commit0 = backend_stub.repo.get_commit(commit_idx=0)
513 commit0 = backend_stub.repo.get_commit(commit_idx=0)
513 commit1 = backend_stub.repo.get_commit(commit_idx=1)
514 commit1 = backend_stub.repo.get_commit(commit_idx=1)
514
515
515 response = self.app.get(url('compare_url',
516 response = self.app.get(url('compare_url',
516 repo_name=backend_stub.repo_name,
517 repo_name=backend_stub.repo_name,
517 source_ref_type="rev",
518 source_ref_type="rev",
518 source_ref=commit0.raw_id,
519 source_ref=commit0.raw_id,
519 target_ref_type="rev",
520 target_ref_type="rev",
520 target_ref=commit1.raw_id,
521 target_ref=commit1.raw_id,
521 ),)
522 ),)
522
523
523 # outgoing commits between those commits
524 # outgoing commits between those commits
524 compare_page = ComparePage(response)
525 compare_page = ComparePage(response)
525 compare_page.swap_is_visible()
526 compare_page.swap_is_visible()
526 compare_page.target_source_are_enabled()
527 compare_page.target_source_are_enabled()
527
528
528 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
529 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
529 orig = backend_hg.create_repo(number_of_commits=1)
530 orig = backend_hg.create_repo(number_of_commits=1)
530 fork = backend_hg.create_fork()
531 fork = backend_hg.create_fork()
531
532
532 settings_util.create_repo_rhodecode_ui(
533 settings_util.create_repo_rhodecode_ui(
533 orig, 'extensions', value='', key='largefiles', active=False)
534 orig, 'extensions', value='', key='largefiles', active=False)
534 settings_util.create_repo_rhodecode_ui(
535 settings_util.create_repo_rhodecode_ui(
535 fork, 'extensions', value='', key='largefiles', active=True)
536 fork, 'extensions', value='', key='largefiles', active=True)
536
537
537 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
538 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
538 'MercurialRepository.compare')
539 'MercurialRepository.compare')
539 with mock.patch(compare_module) as compare_mock:
540 with mock.patch(compare_module) as compare_mock:
540 compare_mock.side_effect = RepositoryRequirementError()
541 compare_mock.side_effect = RepositoryRequirementError()
541
542
542 response = self.app.get(
543 response = self.app.get(
543 url('compare_url',
544 url('compare_url',
544 repo_name=orig.repo_name,
545 repo_name=orig.repo_name,
545 action="compare",
546 action="compare",
546 source_ref_type="rev",
547 source_ref_type="rev",
547 source_ref="tip",
548 source_ref="tip",
548 target_ref_type="rev",
549 target_ref_type="rev",
549 target_ref="tip",
550 target_ref="tip",
550 merge='1',
551 merge='1',
551 target_repo=fork.repo_name),
552 target_repo=fork.repo_name),
552 status=302)
553 status=302)
553
554
554 assert_session_flash(
555 assert_session_flash(
555 response,
556 response,
556 'Could not compare repos with different large file settings')
557 'Could not compare repos with different large file settings')
557
558
558
559
559 @pytest.mark.usefixtures("autologin_user")
560 @pytest.mark.usefixtures("autologin_user")
560 class TestCompareControllerSvn:
561 class TestCompareControllerSvn:
561
562
562 def test_supports_references_with_path(self, app, backend_svn):
563 def test_supports_references_with_path(self, app, backend_svn):
563 repo = backend_svn['svn-simple-layout']
564 repo = backend_svn['svn-simple-layout']
564 commit_id = repo.get_commit(commit_idx=-1).raw_id
565 commit_id = repo.get_commit(commit_idx=-1).raw_id
565 response = app.get(
566 response = app.get(
566 url('compare_url',
567 url('compare_url',
567 repo_name=repo.repo_name,
568 repo_name=repo.repo_name,
568 source_ref_type="tag",
569 source_ref_type="tag",
569 source_ref="%s@%s" % ('tags/v0.1', commit_id),
570 source_ref="%s@%s" % ('tags/v0.1', commit_id),
570 target_ref_type="tag",
571 target_ref_type="tag",
571 target_ref="%s@%s" % ('tags/v0.2', commit_id),
572 target_ref="%s@%s" % ('tags/v0.2', commit_id),
572 merge='1',),
573 merge='1',),
573 status=200)
574 status=200)
574
575
575 # Expecting no commits, since both paths are at the same revision
576 # Expecting no commits, since both paths are at the same revision
576 response.mustcontain('No Commits')
577 response.mustcontain('No Commits')
577
578
578 # Should find only one file changed when comparing those two tags
579 # Should find only one file changed when comparing those two tags
579 response.mustcontain('example.py')
580 response.mustcontain('example.py')
580 compare_page = ComparePage(response)
581 compare_page = ComparePage(response)
581 compare_page.contains_change_summary(1, 5, 1)
582 compare_page.contains_change_summary(1, 5, 1)
582
583
583 def test_shows_commits_if_different_ids(self, app, backend_svn):
584 def test_shows_commits_if_different_ids(self, app, backend_svn):
584 repo = backend_svn['svn-simple-layout']
585 repo = backend_svn['svn-simple-layout']
585 source_id = repo.get_commit(commit_idx=-6).raw_id
586 source_id = repo.get_commit(commit_idx=-6).raw_id
586 target_id = repo.get_commit(commit_idx=-1).raw_id
587 target_id = repo.get_commit(commit_idx=-1).raw_id
587 response = app.get(
588 response = app.get(
588 url('compare_url',
589 url('compare_url',
589 repo_name=repo.repo_name,
590 repo_name=repo.repo_name,
590 source_ref_type="tag",
591 source_ref_type="tag",
591 source_ref="%s@%s" % ('tags/v0.1', source_id),
592 source_ref="%s@%s" % ('tags/v0.1', source_id),
592 target_ref_type="tag",
593 target_ref_type="tag",
593 target_ref="%s@%s" % ('tags/v0.2', target_id),
594 target_ref="%s@%s" % ('tags/v0.2', target_id),
594 merge='1',),
595 merge='1',),
595 status=200)
596 status=200)
596
597
597 # It should show commits
598 # It should show commits
598 assert 'No Commits' not in response.body
599 assert 'No Commits' not in response.body
599
600
600 # Should find only one file changed when comparing those two tags
601 # Should find only one file changed when comparing those two tags
601 response.mustcontain('example.py')
602 response.mustcontain('example.py')
602 compare_page = ComparePage(response)
603 compare_page = ComparePage(response)
603 compare_page.contains_change_summary(1, 5, 1)
604 compare_page.contains_change_summary(1, 5, 1)
604
605
605
606
606 class ComparePage(AssertResponse):
607 class ComparePage(AssertResponse):
607 """
608 """
608 Abstracts the page template from the tests
609 Abstracts the page template from the tests
609 """
610 """
610
611
611 def contains_file_links_and_anchors(self, files):
612 def contains_file_links_and_anchors(self, files):
613 doc = lxml.html.fromstring(self.response.body)
612 for filename, file_id in files:
614 for filename, file_id in files:
613 self.contains_one_link(filename, '#' + file_id)
614 self.contains_one_anchor(file_id)
615 self.contains_one_anchor(file_id)
616 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
617 assert len(diffblock) == 1
618 assert len(diffblock[0].cssselect('a[href="#%s"]' % file_id)) == 1
615
619
616 def contains_change_summary(self, files_changed, inserted, deleted):
620 def contains_change_summary(self, files_changed, inserted, deleted):
617 template = (
621 template = (
618 "{files_changed} file{plural} changed: "
622 "{files_changed} file{plural} changed: "
619 "{inserted} inserted, {deleted} deleted")
623 "{inserted} inserted, {deleted} deleted")
620 self.response.mustcontain(template.format(
624 self.response.mustcontain(template.format(
621 files_changed=files_changed,
625 files_changed=files_changed,
622 plural="s" if files_changed > 1 else "",
626 plural="s" if files_changed > 1 else "",
623 inserted=inserted,
627 inserted=inserted,
624 deleted=deleted))
628 deleted=deleted))
625
629
626 def contains_commits(self, commits, ancestors=None):
630 def contains_commits(self, commits, ancestors=None):
627 response = self.response
631 response = self.response
628
632
629 for commit in commits:
633 for commit in commits:
630 # Expecting to see the commit message in an element which
634 # Expecting to see the commit message in an element which
631 # has the ID "c-{commit.raw_id}"
635 # has the ID "c-{commit.raw_id}"
632 self.element_contains('#c-' + commit.raw_id, commit.message)
636 self.element_contains('#c-' + commit.raw_id, commit.message)
633 self.contains_one_link(
637 self.contains_one_link(
634 'r%s:%s' % (commit.idx, commit.short_id),
638 'r%s:%s' % (commit.idx, commit.short_id),
635 self._commit_url(commit))
639 self._commit_url(commit))
636 if ancestors:
640 if ancestors:
637 response.mustcontain('Ancestor')
641 response.mustcontain('Ancestor')
638 for ancestor in ancestors:
642 for ancestor in ancestors:
639 self.contains_one_link(
643 self.contains_one_link(
640 ancestor.short_id, self._commit_url(ancestor))
644 ancestor.short_id, self._commit_url(ancestor))
641
645
642 def _commit_url(self, commit):
646 def _commit_url(self, commit):
643 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
647 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
644
648
645 def swap_is_hidden(self):
649 def swap_is_hidden(self):
646 assert '<a id="btn-swap"' not in self.response.text
650 assert '<a id="btn-swap"' not in self.response.text
647
651
648 def swap_is_visible(self):
652 def swap_is_visible(self):
649 assert '<a id="btn-swap"' in self.response.text
653 assert '<a id="btn-swap"' in self.response.text
650
654
651 def target_source_are_disabled(self):
655 def target_source_are_disabled(self):
652 response = self.response
656 response = self.response
653 response.mustcontain("var enable_fields = false;")
657 response.mustcontain("var enable_fields = false;")
654 response.mustcontain('.select2("enable", enable_fields)')
658 response.mustcontain('.select2("enable", enable_fields)')
655
659
656 def target_source_are_enabled(self):
660 def target_source_are_enabled(self):
657 response = self.response
661 response = self.response
658 response.mustcontain("var enable_fields = true;")
662 response.mustcontain("var enable_fields = true;")
659
663
660
664
661 def _commit_change(
665 def _commit_change(
662 repo, filename, content, message, vcs_type, parent=None,
666 repo, filename, content, message, vcs_type, parent=None,
663 newfile=False):
667 newfile=False):
664 repo = Repository.get_by_repo_name(repo)
668 repo = Repository.get_by_repo_name(repo)
665 _commit = parent
669 _commit = parent
666 if not parent:
670 if not parent:
667 _commit = EmptyCommit(alias=vcs_type)
671 _commit = EmptyCommit(alias=vcs_type)
668
672
669 if newfile:
673 if newfile:
670 nodes = {
674 nodes = {
671 filename: {
675 filename: {
672 'content': content
676 'content': content
673 }
677 }
674 }
678 }
675 commit = ScmModel().create_nodes(
679 commit = ScmModel().create_nodes(
676 user=TEST_USER_ADMIN_LOGIN, repo=repo,
680 user=TEST_USER_ADMIN_LOGIN, repo=repo,
677 message=message,
681 message=message,
678 nodes=nodes,
682 nodes=nodes,
679 parent_commit=_commit,
683 parent_commit=_commit,
680 author=TEST_USER_ADMIN_LOGIN,
684 author=TEST_USER_ADMIN_LOGIN,
681 )
685 )
682 else:
686 else:
683 commit = ScmModel().commit_change(
687 commit = ScmModel().commit_change(
684 repo=repo.scm_instance(), repo_name=repo.repo_name,
688 repo=repo.scm_instance(), repo_name=repo.repo_name,
685 commit=parent, user=TEST_USER_ADMIN_LOGIN,
689 commit=parent, user=TEST_USER_ADMIN_LOGIN,
686 author=TEST_USER_ADMIN_LOGIN,
690 author=TEST_USER_ADMIN_LOGIN,
687 message=message,
691 message=message,
688 content=content,
692 content=content,
689 f_path=filename
693 f_path=filename
690 )
694 )
691 return commit
695 return commit
@@ -1,330 +1,330 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2016 RhodeCode GmbH
3 # Copyright (C) 2016-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.lib.codeblocks import (
23 from rhodecode.lib.codeblocks import (
24 tokenize_string, split_token_stream, rollup_tokenstream,
24 tokenize_string, split_token_stream, rollup_tokenstream,
25 render_tokenstream)
25 render_tokenstream)
26 from pygments.lexers import get_lexer_by_name
26 from pygments.lexers import get_lexer_by_name
27
27
28
28
29 class TestTokenizeString(object):
29 class TestTokenizeString(object):
30
30
31 python_code = '''
31 python_code = '''
32 import this
32 import this
33
33
34 var = 6
34 var = 6
35 print "this"
35 print "this"
36
36
37 '''
37 '''
38
38
39 def test_tokenize_as_python(self):
39 def test_tokenize_as_python(self):
40 lexer = get_lexer_by_name('python')
40 lexer = get_lexer_by_name('python')
41 tokens = list(tokenize_string(self.python_code, lexer))
41 tokens = list(tokenize_string(self.python_code, lexer))
42
42
43 assert tokens == [
43 assert tokens == [
44 ('', u'\n'),
44 ('', u'\n'),
45 ('', u' '),
45 ('', u' '),
46 ('kn', u'import'),
46 ('kn', u'import'),
47 ('', u' '),
47 ('', u' '),
48 ('nn', u'this'),
48 ('nn', u'this'),
49 ('', u'\n'),
49 ('', u'\n'),
50 ('', u'\n'),
50 ('', u'\n'),
51 ('', u' '),
51 ('', u' '),
52 ('n', u'var'),
52 ('n', u'var'),
53 ('', u' '),
53 ('', u' '),
54 ('o', u'='),
54 ('o', u'='),
55 ('', u' '),
55 ('', u' '),
56 ('mi', u'6'),
56 ('mi', u'6'),
57 ('', u'\n'),
57 ('', u'\n'),
58 ('', u' '),
58 ('', u' '),
59 ('k', u'print'),
59 ('k', u'print'),
60 ('', u' '),
60 ('', u' '),
61 ('s2', u'"'),
61 ('s2', u'"'),
62 ('s2', u'this'),
62 ('s2', u'this'),
63 ('s2', u'"'),
63 ('s2', u'"'),
64 ('', u'\n'),
64 ('', u'\n'),
65 ('', u'\n'),
65 ('', u'\n'),
66 ('', u' ')
66 ('', u' ')
67 ]
67 ]
68
68
69 def test_tokenize_as_text(self):
69 def test_tokenize_as_text(self):
70 lexer = get_lexer_by_name('text')
70 lexer = get_lexer_by_name('text')
71 tokens = list(tokenize_string(self.python_code, lexer))
71 tokens = list(tokenize_string(self.python_code, lexer))
72
72
73 assert tokens == [
73 assert tokens == [
74 ('',
74 ('',
75 u'\n import this\n\n var = 6\n print "this"\n\n ')
75 u'\n import this\n\n var = 6\n print "this"\n\n ')
76 ]
76 ]
77
77
78
78
79 class TestSplitTokenStream(object):
79 class TestSplitTokenStream(object):
80
80
81 def test_split_token_stream(self):
81 def test_split_token_stream(self):
82 lines = list(split_token_stream(
82 lines = list(split_token_stream(
83 [('type1', 'some\ntext'), ('type2', 'more\n')]))
83 [('type1', 'some\ntext'), ('type2', 'more\n')]))
84
84
85 assert lines == [
85 assert lines == [
86 [('type1', u'some')],
86 [('type1', u'some')],
87 [('type1', u'text'), ('type2', u'more')],
87 [('type1', u'text'), ('type2', u'more')],
88 [('type2', u'')],
88 [('type2', u'')],
89 ]
89 ]
90
90
91 def test_split_token_stream_other_char(self):
91 def test_split_token_stream_other_char(self):
92 lines = list(split_token_stream(
92 lines = list(split_token_stream(
93 [('type1', 'some\ntext'), ('type2', 'more\n')],
93 [('type1', 'some\ntext'), ('type2', 'more\n')],
94 split_string='m'))
94 split_string='m'))
95
95
96 assert lines == [
96 assert lines == [
97 [('type1', 'so')],
97 [('type1', 'so')],
98 [('type1', 'e\ntext'), ('type2', '')],
98 [('type1', 'e\ntext'), ('type2', '')],
99 [('type2', 'ore\n')],
99 [('type2', 'ore\n')],
100 ]
100 ]
101
101
102 def test_split_token_stream_without_char(self):
102 def test_split_token_stream_without_char(self):
103 lines = list(split_token_stream(
103 lines = list(split_token_stream(
104 [('type1', 'some\ntext'), ('type2', 'more\n')],
104 [('type1', 'some\ntext'), ('type2', 'more\n')],
105 split_string='z'))
105 split_string='z'))
106
106
107 assert lines == [
107 assert lines == [
108 [('type1', 'some\ntext'), ('type2', 'more\n')]
108 [('type1', 'some\ntext'), ('type2', 'more\n')]
109 ]
109 ]
110
110
111 def test_split_token_stream_single(self):
111 def test_split_token_stream_single(self):
112 lines = list(split_token_stream(
112 lines = list(split_token_stream(
113 [('type1', '\n')], split_string='\n'))
113 [('type1', '\n')], split_string='\n'))
114
114
115 assert lines == [
115 assert lines == [
116 [('type1', '')],
116 [('type1', '')],
117 [('type1', '')],
117 [('type1', '')],
118 ]
118 ]
119
119
120 def test_split_token_stream_single_repeat(self):
120 def test_split_token_stream_single_repeat(self):
121 lines = list(split_token_stream(
121 lines = list(split_token_stream(
122 [('type1', '\n\n\n')], split_string='\n'))
122 [('type1', '\n\n\n')], split_string='\n'))
123
123
124 assert lines == [
124 assert lines == [
125 [('type1', '')],
125 [('type1', '')],
126 [('type1', '')],
126 [('type1', '')],
127 [('type1', '')],
127 [('type1', '')],
128 [('type1', '')],
128 [('type1', '')],
129 ]
129 ]
130
130
131 def test_split_token_stream_multiple_repeat(self):
131 def test_split_token_stream_multiple_repeat(self):
132 lines = list(split_token_stream(
132 lines = list(split_token_stream(
133 [('type1', '\n\n'), ('type2', '\n\n')], split_string='\n'))
133 [('type1', '\n\n'), ('type2', '\n\n')], split_string='\n'))
134
134
135 assert lines == [
135 assert lines == [
136 [('type1', '')],
136 [('type1', '')],
137 [('type1', '')],
137 [('type1', '')],
138 [('type1', ''), ('type2', '')],
138 [('type1', ''), ('type2', '')],
139 [('type2', '')],
139 [('type2', '')],
140 [('type2', '')],
140 [('type2', '')],
141 ]
141 ]
142
142
143
143
144 class TestRollupTokens(object):
144 class TestRollupTokens(object):
145
145
146 @pytest.mark.parametrize('tokenstream,output', [
146 @pytest.mark.parametrize('tokenstream,output', [
147 ([],
147 ([],
148 []),
148 []),
149 ([('A', 'hell'), ('A', 'o')], [
149 ([('A', 'hell'), ('A', 'o')], [
150 ('A', [
150 ('A', [
151 ('', 'hello')]),
151 ('', 'hello')]),
152 ]),
152 ]),
153 ([('A', 'hell'), ('B', 'o')], [
153 ([('A', 'hell'), ('B', 'o')], [
154 ('A', [
154 ('A', [
155 ('', 'hell')]),
155 ('', 'hell')]),
156 ('B', [
156 ('B', [
157 ('', 'o')]),
157 ('', 'o')]),
158 ]),
158 ]),
159 ([('A', 'hel'), ('A', 'lo'), ('B', ' '), ('A', 'there')], [
159 ([('A', 'hel'), ('A', 'lo'), ('B', ' '), ('A', 'there')], [
160 ('A', [
160 ('A', [
161 ('', 'hello')]),
161 ('', 'hello')]),
162 ('B', [
162 ('B', [
163 ('', ' ')]),
163 ('', ' ')]),
164 ('A', [
164 ('A', [
165 ('', 'there')]),
165 ('', 'there')]),
166 ]),
166 ]),
167 ])
167 ])
168 def test_rollup_tokenstream_without_ops(self, tokenstream, output):
168 def test_rollup_tokenstream_without_ops(self, tokenstream, output):
169 assert list(rollup_tokenstream(tokenstream)) == output
169 assert list(rollup_tokenstream(tokenstream)) == output
170
170
171 @pytest.mark.parametrize('tokenstream,output', [
171 @pytest.mark.parametrize('tokenstream,output', [
172 ([],
172 ([],
173 []),
173 []),
174 ([('A', '', 'hell'), ('A', '', 'o')], [
174 ([('A', '', 'hell'), ('A', '', 'o')], [
175 ('A', [
175 ('A', [
176 ('', 'hello')]),
176 ('', 'hello')]),
177 ]),
177 ]),
178 ([('A', '', 'hell'), ('B', '', 'o')], [
178 ([('A', '', 'hell'), ('B', '', 'o')], [
179 ('A', [
179 ('A', [
180 ('', 'hell')]),
180 ('', 'hell')]),
181 ('B', [
181 ('B', [
182 ('', 'o')]),
182 ('', 'o')]),
183 ]),
183 ]),
184 ([('A', '', 'h'), ('B', '', 'e'), ('C', '', 'y')], [
184 ([('A', '', 'h'), ('B', '', 'e'), ('C', '', 'y')], [
185 ('A', [
185 ('A', [
186 ('', 'h')]),
186 ('', 'h')]),
187 ('B', [
187 ('B', [
188 ('', 'e')]),
188 ('', 'e')]),
189 ('C', [
189 ('C', [
190 ('', 'y')]),
190 ('', 'y')]),
191 ]),
191 ]),
192 ([('A', '', 'h'), ('A', '', 'e'), ('C', '', 'y')], [
192 ([('A', '', 'h'), ('A', '', 'e'), ('C', '', 'y')], [
193 ('A', [
193 ('A', [
194 ('', 'he')]),
194 ('', 'he')]),
195 ('C', [
195 ('C', [
196 ('', 'y')]),
196 ('', 'y')]),
197 ]),
197 ]),
198 ([('A', 'ins', 'h'), ('A', 'ins', 'e')], [
198 ([('A', 'ins', 'h'), ('A', 'ins', 'e')], [
199 ('A', [
199 ('A', [
200 ('ins', 'he')
200 ('ins', 'he')
201 ]),
201 ]),
202 ]),
202 ]),
203 ([('A', 'ins', 'h'), ('A', 'del', 'e')], [
203 ([('A', 'ins', 'h'), ('A', 'del', 'e')], [
204 ('A', [
204 ('A', [
205 ('ins', 'h'),
205 ('ins', 'h'),
206 ('del', 'e')
206 ('del', 'e')
207 ]),
207 ]),
208 ]),
208 ]),
209 ([('A', 'ins', 'h'), ('B', 'del', 'e'), ('B', 'del', 'y')], [
209 ([('A', 'ins', 'h'), ('B', 'del', 'e'), ('B', 'del', 'y')], [
210 ('A', [
210 ('A', [
211 ('ins', 'h'),
211 ('ins', 'h'),
212 ]),
212 ]),
213 ('B', [
213 ('B', [
214 ('del', 'ey'),
214 ('del', 'ey'),
215 ]),
215 ]),
216 ]),
216 ]),
217 ([('A', 'ins', 'h'), ('A', 'del', 'e'), ('B', 'del', 'y')], [
217 ([('A', 'ins', 'h'), ('A', 'del', 'e'), ('B', 'del', 'y')], [
218 ('A', [
218 ('A', [
219 ('ins', 'h'),
219 ('ins', 'h'),
220 ('del', 'e'),
220 ('del', 'e'),
221 ]),
221 ]),
222 ('B', [
222 ('B', [
223 ('del', 'y'),
223 ('del', 'y'),
224 ]),
224 ]),
225 ]),
225 ]),
226 ([('A', '', 'some'), ('A', 'ins', 'new'), ('A', '', 'name')], [
226 ([('A', '', 'some'), ('A', 'ins', 'new'), ('A', '', 'name')], [
227 ('A', [
227 ('A', [
228 ('', 'some'),
228 ('', 'some'),
229 ('ins', 'new'),
229 ('ins', 'new'),
230 ('', 'name'),
230 ('', 'name'),
231 ]),
231 ]),
232 ]),
232 ]),
233 ])
233 ])
234 def test_rollup_tokenstream_with_ops(self, tokenstream, output):
234 def test_rollup_tokenstream_with_ops(self, tokenstream, output):
235 assert list(rollup_tokenstream(tokenstream)) == output
235 assert list(rollup_tokenstream(tokenstream)) == output
236
236
237
237
238 class TestRenderTokenStream(object):
238 class TestRenderTokenStream(object):
239
239
240 @pytest.mark.parametrize('tokenstream,output', [
240 @pytest.mark.parametrize('tokenstream,output', [
241 (
241 (
242 [],
242 [],
243 '',
243 '',
244 ),
244 ),
245 (
245 (
246 [('', '', u'')],
246 [('', '', u'')],
247 '<span></span>',
247 '<span></span>',
248 ),
248 ),
249 (
249 (
250 [('', '', u'text')],
250 [('', '', u'text')],
251 '<span>text</span>',
251 '<span>text</span>',
252 ),
252 ),
253 (
253 (
254 [('A', '', u'')],
254 [('A', '', u'')],
255 '<span class="A"></span>',
255 '<span class="A"></span>',
256 ),
256 ),
257 (
257 (
258 [('A', '', u'hello')],
258 [('A', '', u'hello')],
259 '<span class="A">hello</span>',
259 '<span class="A">hello</span>',
260 ),
260 ),
261 (
261 (
262 [('A', '', u'hel'), ('A', '', u'lo')],
262 [('A', '', u'hel'), ('A', '', u'lo')],
263 '<span class="A">hello</span>',
263 '<span class="A">hello</span>',
264 ),
264 ),
265 (
265 (
266 [('A', '', u'two\n'), ('A', '', u'lines')],
266 [('A', '', u'two\n'), ('A', '', u'lines')],
267 '<span class="A">two<nl>\n</nl>lines</span>',
267 '<span class="A">two\nlines</span>',
268 ),
268 ),
269 (
269 (
270 [('A', '', u'\nthree\n'), ('A', '', u'lines')],
270 [('A', '', u'\nthree\n'), ('A', '', u'lines')],
271 '<span class="A"><nl>\n</nl>three<nl>\n</nl>lines</span>',
271 '<span class="A">\nthree\nlines</span>',
272 ),
272 ),
273 (
273 (
274 [('', '', u'\n'), ('A', '', u'line')],
274 [('', '', u'\n'), ('A', '', u'line')],
275 '<span><nl>\n</nl></span><span class="A">line</span>',
275 '<span>\n</span><span class="A">line</span>',
276 ),
276 ),
277 (
277 (
278 [('', 'ins', u'\n'), ('A', '', u'line')],
278 [('', 'ins', u'\n'), ('A', '', u'line')],
279 '<span><ins><nl>\n</nl></ins></span><span class="A">line</span>',
279 '<span><ins>\n</ins></span><span class="A">line</span>',
280 ),
280 ),
281 (
281 (
282 [('A', '', u'hel'), ('A', 'ins', u'lo')],
282 [('A', '', u'hel'), ('A', 'ins', u'lo')],
283 '<span class="A">hel<ins>lo</ins></span>',
283 '<span class="A">hel<ins>lo</ins></span>',
284 ),
284 ),
285 (
285 (
286 [('A', '', u'hel'), ('A', 'ins', u'l'), ('A', 'ins', u'o')],
286 [('A', '', u'hel'), ('A', 'ins', u'l'), ('A', 'ins', u'o')],
287 '<span class="A">hel<ins>lo</ins></span>',
287 '<span class="A">hel<ins>lo</ins></span>',
288 ),
288 ),
289 (
289 (
290 [('A', '', u'hel'), ('A', 'ins', u'l'), ('A', 'del', u'o')],
290 [('A', '', u'hel'), ('A', 'ins', u'l'), ('A', 'del', u'o')],
291 '<span class="A">hel<ins>l</ins><del>o</del></span>',
291 '<span class="A">hel<ins>l</ins><del>o</del></span>',
292 ),
292 ),
293 (
293 (
294 [('A', '', u'hel'), ('B', '', u'lo')],
294 [('A', '', u'hel'), ('B', '', u'lo')],
295 '<span class="A">hel</span><span class="B">lo</span>',
295 '<span class="A">hel</span><span class="B">lo</span>',
296 ),
296 ),
297 (
297 (
298 [('A', '', u'hel'), ('B', 'ins', u'lo')],
298 [('A', '', u'hel'), ('B', 'ins', u'lo')],
299 '<span class="A">hel</span><span class="B"><ins>lo</ins></span>',
299 '<span class="A">hel</span><span class="B"><ins>lo</ins></span>',
300 ),
300 ),
301 ])
301 ])
302 def test_render_tokenstream_with_ops(self, tokenstream, output):
302 def test_render_tokenstream_with_ops(self, tokenstream, output):
303 html = render_tokenstream(tokenstream)
303 html = render_tokenstream(tokenstream)
304 assert html == output
304 assert html == output
305
305
306 @pytest.mark.parametrize('tokenstream,output', [
306 @pytest.mark.parametrize('tokenstream,output', [
307 (
307 (
308 [('A', u'hel'), ('A', u'lo')],
308 [('A', u'hel'), ('A', u'lo')],
309 '<span class="A">hello</span>',
309 '<span class="A">hello</span>',
310 ),
310 ),
311 (
311 (
312 [('A', u'hel'), ('A', u'l'), ('A', u'o')],
312 [('A', u'hel'), ('A', u'l'), ('A', u'o')],
313 '<span class="A">hello</span>',
313 '<span class="A">hello</span>',
314 ),
314 ),
315 (
315 (
316 [('A', u'hel'), ('A', u'l'), ('A', u'o')],
316 [('A', u'hel'), ('A', u'l'), ('A', u'o')],
317 '<span class="A">hello</span>',
317 '<span class="A">hello</span>',
318 ),
318 ),
319 (
319 (
320 [('A', u'hel'), ('B', u'lo')],
320 [('A', u'hel'), ('B', u'lo')],
321 '<span class="A">hel</span><span class="B">lo</span>',
321 '<span class="A">hel</span><span class="B">lo</span>',
322 ),
322 ),
323 (
323 (
324 [('A', u'hel'), ('B', u'lo')],
324 [('A', u'hel'), ('B', u'lo')],
325 '<span class="A">hel</span><span class="B">lo</span>',
325 '<span class="A">hel</span><span class="B">lo</span>',
326 ),
326 ),
327 ])
327 ])
328 def test_render_tokenstream_without_ops(self, tokenstream, output):
328 def test_render_tokenstream_without_ops(self, tokenstream, output):
329 html = render_tokenstream(tokenstream)
329 html = render_tokenstream(tokenstream)
330 assert html == output
330 assert html == output
General Comments 0
You need to be logged in to leave comments. Login now