##// END OF EJS Templates
...
Matt Mackall -
r2298:4be9a79b merge default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

1 NO CONTENT: new file 100755
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100755
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100755
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100755
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,3653 +1,3654 b''
1 1 #!/usr/bin/env wish
2 2
3 3 # Copyright (C) 2005 Paul Mackerras. All rights reserved.
4 4 # This program is free software; it may be used, copied, modified
5 5 # and distributed under the terms of the GNU General Public Licence,
6 6 # either version 2, or (at your option) any later version.
7 7
8 8 proc gitdir {} {
9 9 global env
10 10 if {[info exists env(GIT_DIR)]} {
11 11 return $env(GIT_DIR)
12 12 } else {
13 13 return ".hg"
14 14 }
15 15 }
16 16
17 17 proc getcommits {rargs} {
18 18 global commits commfd phase canv mainfont env
19 19 global startmsecs nextupdate ncmupdate
20 20 global ctext maincursor textcursor leftover
21 21
22 22 # check that we can find a .git directory somewhere...
23 23 set gitdir [gitdir]
24 24 if {![file isdirectory $gitdir]} {
25 25 error_popup "Cannot find the git directory \"$gitdir\"."
26 26 exit 1
27 27 }
28 28 set commits {}
29 29 set phase getcommits
30 30 set startmsecs [clock clicks -milliseconds]
31 31 set nextupdate [expr $startmsecs + 100]
32 32 set ncmupdate 1
33 33 if [catch {
34 34 set parse_args [concat --default HEAD $rargs]
35 35 set parsed_args [split [eval exec hg debug-rev-parse $parse_args] "\n"]
36 36 }] {
37 37 # if git-rev-parse failed for some reason...
38 38 if {$rargs == {}} {
39 39 set rargs HEAD
40 40 }
41 41 set parsed_args $rargs
42 42 }
43 43 if [catch {
44 44 set commfd [open "|hg debug-rev-list --header --topo-order --parents $parsed_args" r]
45 45 } err] {
46 46 puts stderr "Error executing hg debug-rev-list: $err"
47 47 exit 1
48 48 }
49 49 set leftover {}
50 50 fconfigure $commfd -blocking 0 -translation lf
51 51 fileevent $commfd readable [list getcommitlines $commfd]
52 52 $canv delete all
53 53 $canv create text 3 3 -anchor nw -text "Reading commits..." \
54 54 -font $mainfont -tags textitems
55 55 . config -cursor watch
56 56 settextcursor watch
57 57 }
58 58
59 59 proc getcommitlines {commfd} {
60 60 global commits parents cdate children
61 61 global commitlisted phase commitinfo nextupdate
62 62 global stopped redisplaying leftover
63 63
64 64 set stuff [read $commfd]
65 65 if {$stuff == {}} {
66 66 if {![eof $commfd]} return
67 67 # set it blocking so we wait for the process to terminate
68 68 fconfigure $commfd -blocking 1
69 69 if {![catch {close $commfd} err]} {
70 70 after idle finishcommits
71 71 return
72 72 }
73 73 if {[string range $err 0 4] == "usage"} {
74 74 set err \
75 75 {Gitk: error reading commits: bad arguments to git-rev-list.
76 76 (Note: arguments to gitk are passed to git-rev-list
77 77 to allow selection of commits to be displayed.)}
78 78 } else {
79 79 set err "Error reading commits: $err"
80 80 }
81 81 error_popup $err
82 82 exit 1
83 83 }
84 84 set start 0
85 85 while 1 {
86 86 set i [string first "\0" $stuff $start]
87 87 if {$i < 0} {
88 88 append leftover [string range $stuff $start end]
89 89 return
90 90 }
91 91 set cmit [string range $stuff $start [expr {$i - 1}]]
92 92 if {$start == 0} {
93 93 set cmit "$leftover$cmit"
94 94 set leftover {}
95 95 }
96 96 set start [expr {$i + 1}]
97 97 set j [string first "\n" $cmit]
98 98 set ok 0
99 99 if {$j >= 0} {
100 100 set ids [string range $cmit 0 [expr {$j - 1}]]
101 101 set ok 1
102 102 foreach id $ids {
103 103 if {![regexp {^[0-9a-f]{40}$} $id]} {
104 104 set ok 0
105 105 break
106 106 }
107 107 }
108 108 }
109 109 if {!$ok} {
110 110 set shortcmit $cmit
111 111 if {[string length $shortcmit] > 80} {
112 112 set shortcmit "[string range $shortcmit 0 80]..."
113 113 }
114 114 error_popup "Can't parse hg debug-rev-list output: {$shortcmit}"
115 115 exit 1
116 116 }
117 117 set id [lindex $ids 0]
118 118 set olds [lrange $ids 1 end]
119 119 set cmit [string range $cmit [expr {$j + 1}] end]
120 120 lappend commits $id
121 121 set commitlisted($id) 1
122 122 parsecommit $id $cmit 1 [lrange $ids 1 end]
123 123 drawcommit $id
124 124 if {[clock clicks -milliseconds] >= $nextupdate} {
125 125 doupdate 1
126 126 }
127 127 while {$redisplaying} {
128 128 set redisplaying 0
129 129 if {$stopped == 1} {
130 130 set stopped 0
131 131 set phase "getcommits"
132 132 foreach id $commits {
133 133 drawcommit $id
134 134 if {$stopped} break
135 135 if {[clock clicks -milliseconds] >= $nextupdate} {
136 136 doupdate 1
137 137 }
138 138 }
139 139 }
140 140 }
141 141 }
142 142 }
143 143
144 144 proc doupdate {reading} {
145 145 global commfd nextupdate numcommits ncmupdate
146 146
147 147 if {$reading} {
148 148 fileevent $commfd readable {}
149 149 }
150 150 update
151 151 set nextupdate [expr {[clock clicks -milliseconds] + 100}]
152 152 if {$numcommits < 100} {
153 153 set ncmupdate [expr {$numcommits + 1}]
154 154 } elseif {$numcommits < 10000} {
155 155 set ncmupdate [expr {$numcommits + 10}]
156 156 } else {
157 157 set ncmupdate [expr {$numcommits + 100}]
158 158 }
159 159 if {$reading} {
160 160 fileevent $commfd readable [list getcommitlines $commfd]
161 161 }
162 162 }
163 163
164 164 proc readcommit {id} {
165 165 if [catch {set contents [exec hg debug-cat-file commit $id]}] return
166 166 parsecommit $id $contents 0 {}
167 167 }
168 168
169 169 proc parsecommit {id contents listed olds} {
170 170 global commitinfo children nchildren parents nparents cdate ncleft
171 171
172 172 set inhdr 1
173 173 set comment {}
174 174 set headline {}
175 175 set auname {}
176 176 set audate {}
177 177 set comname {}
178 178 set comdate {}
179 179 if {![info exists nchildren($id)]} {
180 180 set children($id) {}
181 181 set nchildren($id) 0
182 182 set ncleft($id) 0
183 183 }
184 184 set parents($id) $olds
185 185 set nparents($id) [llength $olds]
186 186 foreach p $olds {
187 187 if {![info exists nchildren($p)]} {
188 188 set children($p) [list $id]
189 189 set nchildren($p) 1
190 190 set ncleft($p) 1
191 191 } elseif {[lsearch -exact $children($p) $id] < 0} {
192 192 lappend children($p) $id
193 193 incr nchildren($p)
194 194 incr ncleft($p)
195 195 }
196 196 }
197 197 foreach line [split $contents "\n"] {
198 198 if {$inhdr} {
199 set line [split $line]
199 200 if {$line == {}} {
200 201 set inhdr 0
201 202 } else {
202 203 set tag [lindex $line 0]
203 204 if {$tag == "author"} {
204 205 set x [expr {[llength $line] - 2}]
205 206 set audate [lindex $line $x]
206 set auname [lrange $line 1 [expr {$x - 1}]]
207 set auname [join [lrange $line 1 [expr {$x - 1}]]]
207 208 } elseif {$tag == "committer"} {
208 209 set x [expr {[llength $line] - 2}]
209 210 set comdate [lindex $line $x]
210 set comname [lrange $line 1 [expr {$x - 1}]]
211 set comname [join [lrange $line 1 [expr {$x - 1}]]]
211 212 }
212 213 }
213 214 } else {
214 215 if {$comment == {}} {
215 216 set headline [string trim $line]
216 217 } else {
217 218 append comment "\n"
218 219 }
219 220 if {!$listed} {
220 221 # git-rev-list indents the comment by 4 spaces;
221 222 # if we got this via git-cat-file, add the indentation
222 223 append comment " "
223 224 }
224 225 append comment $line
225 226 }
226 227 }
227 228 if {$audate != {}} {
228 229 set audate [clock format $audate -format "%Y-%m-%d %H:%M:%S"]
229 230 }
230 231 if {$comdate != {}} {
231 232 set cdate($id) $comdate
232 233 set comdate [clock format $comdate -format "%Y-%m-%d %H:%M:%S"]
233 234 }
234 235 set commitinfo($id) [list $headline $auname $audate \
235 236 $comname $comdate $comment]
236 237 }
237 238
238 239 proc readrefs {} {
239 240 global tagids idtags headids idheads tagcontents
240 241
241 242 set tags [exec hg tags]
242 243 set lines [split $tags '\n']
243 244 foreach f $lines {
244 245 set f [regexp -all -inline {\S+} $f]
245 246 set direct [lindex $f 0]
246 247 set full [lindex $f 1]
247 248 set sha [split $full ':']
248 249 set tag [lindex $sha 1]
249 250 lappend tagids($direct) $tag
250 251 lappend idtags($tag) $direct
251 252 }
252 253 }
253 254
254 255 proc readotherrefs {base dname excl} {
255 256 global otherrefids idotherrefs
256 257
257 258 set git [gitdir]
258 259 set files [glob -nocomplain -types f [file join $git $base *]]
259 260 foreach f $files {
260 261 catch {
261 262 set fd [open $f r]
262 263 set line [read $fd 40]
263 264 if {[regexp {^[0-9a-f]{40}} $line id]} {
264 265 set name "$dname[file tail $f]"
265 266 set otherrefids($name) $id
266 267 lappend idotherrefs($id) $name
267 268 }
268 269 close $fd
269 270 }
270 271 }
271 272 set dirs [glob -nocomplain -types d [file join $git $base *]]
272 273 foreach d $dirs {
273 274 set dir [file tail $d]
274 275 if {[lsearch -exact $excl $dir] >= 0} continue
275 276 readotherrefs [file join $base $dir] "$dname$dir/" {}
276 277 }
277 278 }
278 279
279 280 proc error_popup msg {
280 281 set w .error
281 282 toplevel $w
282 283 wm transient $w .
283 284 message $w.m -text $msg -justify center -aspect 400
284 285 pack $w.m -side top -fill x -padx 20 -pady 20
285 286 button $w.ok -text OK -command "destroy $w"
286 287 pack $w.ok -side bottom -fill x
287 288 bind $w <Visibility> "grab $w; focus $w"
288 289 tkwait window $w
289 290 }
290 291
291 292 proc makewindow {} {
292 293 global canv canv2 canv3 linespc charspc ctext cflist textfont
293 294 global findtype findtypemenu findloc findstring fstring geometry
294 295 global entries sha1entry sha1string sha1but
295 296 global maincursor textcursor curtextcursor
296 297 global rowctxmenu gaudydiff mergemax
297 298
298 299 menu .bar
299 300 .bar add cascade -label "File" -menu .bar.file
300 301 menu .bar.file
301 302 .bar.file add command -label "Reread references" -command rereadrefs
302 303 .bar.file add command -label "Quit" -command doquit
303 304 menu .bar.help
304 305 .bar add cascade -label "Help" -menu .bar.help
305 306 .bar.help add command -label "About gitk" -command about
306 307 . configure -menu .bar
307 308
308 309 if {![info exists geometry(canv1)]} {
309 310 set geometry(canv1) [expr 45 * $charspc]
310 311 set geometry(canv2) [expr 30 * $charspc]
311 312 set geometry(canv3) [expr 15 * $charspc]
312 313 set geometry(canvh) [expr 25 * $linespc + 4]
313 314 set geometry(ctextw) 80
314 315 set geometry(ctexth) 30
315 316 set geometry(cflistw) 30
316 317 }
317 318 panedwindow .ctop -orient vertical
318 319 if {[info exists geometry(width)]} {
319 320 .ctop conf -width $geometry(width) -height $geometry(height)
320 321 set texth [expr {$geometry(height) - $geometry(canvh) - 56}]
321 322 set geometry(ctexth) [expr {($texth - 8) /
322 323 [font metrics $textfont -linespace]}]
323 324 }
324 325 frame .ctop.top
325 326 frame .ctop.top.bar
326 327 pack .ctop.top.bar -side bottom -fill x
327 328 set cscroll .ctop.top.csb
328 329 scrollbar $cscroll -command {allcanvs yview} -highlightthickness 0
329 330 pack $cscroll -side right -fill y
330 331 panedwindow .ctop.top.clist -orient horizontal -sashpad 0 -handlesize 4
331 332 pack .ctop.top.clist -side top -fill both -expand 1
332 333 .ctop add .ctop.top
333 334 set canv .ctop.top.clist.canv
334 335 canvas $canv -height $geometry(canvh) -width $geometry(canv1) \
335 336 -bg white -bd 0 \
336 337 -yscrollincr $linespc -yscrollcommand "$cscroll set"
337 338 .ctop.top.clist add $canv
338 339 set canv2 .ctop.top.clist.canv2
339 340 canvas $canv2 -height $geometry(canvh) -width $geometry(canv2) \
340 341 -bg white -bd 0 -yscrollincr $linespc
341 342 .ctop.top.clist add $canv2
342 343 set canv3 .ctop.top.clist.canv3
343 344 canvas $canv3 -height $geometry(canvh) -width $geometry(canv3) \
344 345 -bg white -bd 0 -yscrollincr $linespc
345 346 .ctop.top.clist add $canv3
346 347 bind .ctop.top.clist <Configure> {resizeclistpanes %W %w}
347 348
348 349 set sha1entry .ctop.top.bar.sha1
349 350 set entries $sha1entry
350 351 set sha1but .ctop.top.bar.sha1label
351 352 button $sha1but -text "SHA1 ID: " -state disabled -relief flat \
352 353 -command gotocommit -width 8
353 354 $sha1but conf -disabledforeground [$sha1but cget -foreground]
354 355 pack .ctop.top.bar.sha1label -side left
355 356 entry $sha1entry -width 40 -font $textfont -textvariable sha1string
356 357 trace add variable sha1string write sha1change
357 358 pack $sha1entry -side left -pady 2
358 359
359 360 image create bitmap bm-left -data {
360 361 #define left_width 16
361 362 #define left_height 16
362 363 static unsigned char left_bits[] = {
363 364 0x00, 0x00, 0xc0, 0x01, 0xe0, 0x00, 0x70, 0x00, 0x38, 0x00, 0x1c, 0x00,
364 365 0x0e, 0x00, 0xff, 0x7f, 0xff, 0x7f, 0xff, 0x7f, 0x0e, 0x00, 0x1c, 0x00,
365 366 0x38, 0x00, 0x70, 0x00, 0xe0, 0x00, 0xc0, 0x01};
366 367 }
367 368 image create bitmap bm-right -data {
368 369 #define right_width 16
369 370 #define right_height 16
370 371 static unsigned char right_bits[] = {
371 372 0x00, 0x00, 0xc0, 0x01, 0x80, 0x03, 0x00, 0x07, 0x00, 0x0e, 0x00, 0x1c,
372 373 0x00, 0x38, 0xff, 0x7f, 0xff, 0x7f, 0xff, 0x7f, 0x00, 0x38, 0x00, 0x1c,
373 374 0x00, 0x0e, 0x00, 0x07, 0x80, 0x03, 0xc0, 0x01};
374 375 }
375 376 button .ctop.top.bar.leftbut -image bm-left -command goback \
376 377 -state disabled -width 26
377 378 pack .ctop.top.bar.leftbut -side left -fill y
378 379 button .ctop.top.bar.rightbut -image bm-right -command goforw \
379 380 -state disabled -width 26
380 381 pack .ctop.top.bar.rightbut -side left -fill y
381 382
382 383 button .ctop.top.bar.findbut -text "Find" -command dofind
383 384 pack .ctop.top.bar.findbut -side left
384 385 set findstring {}
385 386 set fstring .ctop.top.bar.findstring
386 387 lappend entries $fstring
387 388 entry $fstring -width 30 -font $textfont -textvariable findstring
388 389 pack $fstring -side left -expand 1 -fill x
389 390 set findtype Exact
390 391 set findtypemenu [tk_optionMenu .ctop.top.bar.findtype \
391 392 findtype Exact IgnCase Regexp]
392 393 set findloc "All fields"
393 394 tk_optionMenu .ctop.top.bar.findloc findloc "All fields" Headline \
394 395 Comments Author Committer Files Pickaxe
395 396 pack .ctop.top.bar.findloc -side right
396 397 pack .ctop.top.bar.findtype -side right
397 398 # for making sure type==Exact whenever loc==Pickaxe
398 399 trace add variable findloc write findlocchange
399 400
400 401 panedwindow .ctop.cdet -orient horizontal
401 402 .ctop add .ctop.cdet
402 403 frame .ctop.cdet.left
403 404 set ctext .ctop.cdet.left.ctext
404 405 text $ctext -bg white -state disabled -font $textfont \
405 406 -width $geometry(ctextw) -height $geometry(ctexth) \
406 407 -yscrollcommand ".ctop.cdet.left.sb set" \
407 408 -xscrollcommand ".ctop.cdet.left.hb set" -wrap none
408 409 scrollbar .ctop.cdet.left.sb -command "$ctext yview"
409 410 scrollbar .ctop.cdet.left.hb -orient horizontal -command "$ctext xview"
410 411 pack .ctop.cdet.left.sb -side right -fill y
411 412 pack .ctop.cdet.left.hb -side bottom -fill x
412 413 pack $ctext -side left -fill both -expand 1
413 414 .ctop.cdet add .ctop.cdet.left
414 415
415 416 $ctext tag conf filesep -font [concat $textfont bold] -back "#aaaaaa"
416 417 if {$gaudydiff} {
417 418 $ctext tag conf hunksep -back blue -fore white
418 419 $ctext tag conf d0 -back "#ff8080"
419 420 $ctext tag conf d1 -back green
420 421 } else {
421 422 $ctext tag conf hunksep -fore blue
422 423 $ctext tag conf d0 -fore red
423 424 $ctext tag conf d1 -fore "#00a000"
424 425 $ctext tag conf m0 -fore red
425 426 $ctext tag conf m1 -fore blue
426 427 $ctext tag conf m2 -fore green
427 428 $ctext tag conf m3 -fore purple
428 429 $ctext tag conf m4 -fore brown
429 430 $ctext tag conf mmax -fore darkgrey
430 431 set mergemax 5
431 432 $ctext tag conf mresult -font [concat $textfont bold]
432 433 $ctext tag conf msep -font [concat $textfont bold]
433 434 $ctext tag conf found -back yellow
434 435 }
435 436
436 437 frame .ctop.cdet.right
437 438 set cflist .ctop.cdet.right.cfiles
438 439 listbox $cflist -bg white -selectmode extended -width $geometry(cflistw) \
439 440 -yscrollcommand ".ctop.cdet.right.sb set"
440 441 scrollbar .ctop.cdet.right.sb -command "$cflist yview"
441 442 pack .ctop.cdet.right.sb -side right -fill y
442 443 pack $cflist -side left -fill both -expand 1
443 444 .ctop.cdet add .ctop.cdet.right
444 445 bind .ctop.cdet <Configure> {resizecdetpanes %W %w}
445 446
446 447 pack .ctop -side top -fill both -expand 1
447 448
448 449 bindall <1> {selcanvline %W %x %y}
449 450 #bindall <B1-Motion> {selcanvline %W %x %y}
450 451 bindall <ButtonRelease-4> "allcanvs yview scroll -5 units"
451 452 bindall <ButtonRelease-5> "allcanvs yview scroll 5 units"
452 453 bindall <2> "allcanvs scan mark 0 %y"
453 454 bindall <B2-Motion> "allcanvs scan dragto 0 %y"
454 455 bind . <Key-Up> "selnextline -1"
455 456 bind . <Key-Down> "selnextline 1"
456 457 bind . <Key-Prior> "allcanvs yview scroll -1 pages"
457 458 bind . <Key-Next> "allcanvs yview scroll 1 pages"
458 459 bindkey <Key-Delete> "$ctext yview scroll -1 pages"
459 460 bindkey <Key-BackSpace> "$ctext yview scroll -1 pages"
460 461 bindkey <Key-space> "$ctext yview scroll 1 pages"
461 462 bindkey p "selnextline -1"
462 463 bindkey n "selnextline 1"
463 464 bindkey b "$ctext yview scroll -1 pages"
464 465 bindkey d "$ctext yview scroll 18 units"
465 466 bindkey u "$ctext yview scroll -18 units"
466 467 bindkey / {findnext 1}
467 468 bindkey <Key-Return> {findnext 0}
468 469 bindkey ? findprev
469 470 bindkey f nextfile
470 471 bind . <Control-q> doquit
471 472 bind . <Control-w> doquit
472 473 bind . <Control-f> dofind
473 474 bind . <Control-g> {findnext 0}
474 475 bind . <Control-r> findprev
475 476 bind . <Control-equal> {incrfont 1}
476 477 bind . <Control-KP_Add> {incrfont 1}
477 478 bind . <Control-minus> {incrfont -1}
478 479 bind . <Control-KP_Subtract> {incrfont -1}
479 480 bind $cflist <<ListboxSelect>> listboxsel
480 481 bind . <Destroy> {savestuff %W}
481 482 bind . <Button-1> "click %W"
482 483 bind $fstring <Key-Return> dofind
483 484 bind $sha1entry <Key-Return> gotocommit
484 485 bind $sha1entry <<PasteSelection>> clearsha1
485 486
486 487 set maincursor [. cget -cursor]
487 488 set textcursor [$ctext cget -cursor]
488 489 set curtextcursor $textcursor
489 490
490 491 set rowctxmenu .rowctxmenu
491 492 menu $rowctxmenu -tearoff 0
492 493 $rowctxmenu add command -label "Diff this -> selected" \
493 494 -command {diffvssel 0}
494 495 $rowctxmenu add command -label "Diff selected -> this" \
495 496 -command {diffvssel 1}
496 497 $rowctxmenu add command -label "Make patch" -command mkpatch
497 498 $rowctxmenu add command -label "Create tag" -command mktag
498 499 $rowctxmenu add command -label "Write commit to file" -command writecommit
499 500 }
500 501
501 502 # when we make a key binding for the toplevel, make sure
502 503 # it doesn't get triggered when that key is pressed in the
503 504 # find string entry widget.
504 505 proc bindkey {ev script} {
505 506 global entries
506 507 bind . $ev $script
507 508 set escript [bind Entry $ev]
508 509 if {$escript == {}} {
509 510 set escript [bind Entry <Key>]
510 511 }
511 512 foreach e $entries {
512 513 bind $e $ev "$escript; break"
513 514 }
514 515 }
515 516
516 517 # set the focus back to the toplevel for any click outside
517 518 # the entry widgets
518 519 proc click {w} {
519 520 global entries
520 521 foreach e $entries {
521 522 if {$w == $e} return
522 523 }
523 524 focus .
524 525 }
525 526
526 527 proc savestuff {w} {
527 528 global canv canv2 canv3 ctext cflist mainfont textfont
528 529 global stuffsaved findmergefiles gaudydiff maxgraphpct
529 530 global maxwidth
530 531
531 532 if {$stuffsaved} return
532 533 if {![winfo viewable .]} return
533 534 catch {
534 535 set f [open "~/.gitk-new" w]
535 536 puts $f [list set mainfont $mainfont]
536 537 puts $f [list set textfont $textfont]
537 538 puts $f [list set findmergefiles $findmergefiles]
538 539 puts $f [list set gaudydiff $gaudydiff]
539 540 puts $f [list set maxgraphpct $maxgraphpct]
540 541 puts $f [list set maxwidth $maxwidth]
541 542 puts $f "set geometry(width) [winfo width .ctop]"
542 543 puts $f "set geometry(height) [winfo height .ctop]"
543 544 puts $f "set geometry(canv1) [expr [winfo width $canv]-2]"
544 545 puts $f "set geometry(canv2) [expr [winfo width $canv2]-2]"
545 546 puts $f "set geometry(canv3) [expr [winfo width $canv3]-2]"
546 547 puts $f "set geometry(canvh) [expr [winfo height $canv]-2]"
547 548 set wid [expr {([winfo width $ctext] - 8) \
548 549 / [font measure $textfont "0"]}]
549 550 puts $f "set geometry(ctextw) $wid"
550 551 set wid [expr {([winfo width $cflist] - 11) \
551 552 / [font measure [$cflist cget -font] "0"]}]
552 553 puts $f "set geometry(cflistw) $wid"
553 554 close $f
554 555 file rename -force "~/.gitk-new" "~/.gitk"
555 556 }
556 557 set stuffsaved 1
557 558 }
558 559
559 560 proc resizeclistpanes {win w} {
560 561 global oldwidth
561 562 if [info exists oldwidth($win)] {
562 563 set s0 [$win sash coord 0]
563 564 set s1 [$win sash coord 1]
564 565 if {$w < 60} {
565 566 set sash0 [expr {int($w/2 - 2)}]
566 567 set sash1 [expr {int($w*5/6 - 2)}]
567 568 } else {
568 569 set factor [expr {1.0 * $w / $oldwidth($win)}]
569 570 set sash0 [expr {int($factor * [lindex $s0 0])}]
570 571 set sash1 [expr {int($factor * [lindex $s1 0])}]
571 572 if {$sash0 < 30} {
572 573 set sash0 30
573 574 }
574 575 if {$sash1 < $sash0 + 20} {
575 576 set sash1 [expr $sash0 + 20]
576 577 }
577 578 if {$sash1 > $w - 10} {
578 579 set sash1 [expr $w - 10]
579 580 if {$sash0 > $sash1 - 20} {
580 581 set sash0 [expr $sash1 - 20]
581 582 }
582 583 }
583 584 }
584 585 $win sash place 0 $sash0 [lindex $s0 1]
585 586 $win sash place 1 $sash1 [lindex $s1 1]
586 587 }
587 588 set oldwidth($win) $w
588 589 }
589 590
590 591 proc resizecdetpanes {win w} {
591 592 global oldwidth
592 593 if [info exists oldwidth($win)] {
593 594 set s0 [$win sash coord 0]
594 595 if {$w < 60} {
595 596 set sash0 [expr {int($w*3/4 - 2)}]
596 597 } else {
597 598 set factor [expr {1.0 * $w / $oldwidth($win)}]
598 599 set sash0 [expr {int($factor * [lindex $s0 0])}]
599 600 if {$sash0 < 45} {
600 601 set sash0 45
601 602 }
602 603 if {$sash0 > $w - 15} {
603 604 set sash0 [expr $w - 15]
604 605 }
605 606 }
606 607 $win sash place 0 $sash0 [lindex $s0 1]
607 608 }
608 609 set oldwidth($win) $w
609 610 }
610 611
611 612 proc allcanvs args {
612 613 global canv canv2 canv3
613 614 eval $canv $args
614 615 eval $canv2 $args
615 616 eval $canv3 $args
616 617 }
617 618
618 619 proc bindall {event action} {
619 620 global canv canv2 canv3
620 621 bind $canv $event $action
621 622 bind $canv2 $event $action
622 623 bind $canv3 $event $action
623 624 }
624 625
625 626 proc about {} {
626 627 set w .about
627 628 if {[winfo exists $w]} {
628 629 raise $w
629 630 return
630 631 }
631 632 toplevel $w
632 633 wm title $w "About gitk"
633 634 message $w.m -text {
634 635 Gitk version 1.2
635 636
636 637 Copyright � 2005 Paul Mackerras
637 638
638 639 Use and redistribute under the terms of the GNU General Public License} \
639 640 -justify center -aspect 400
640 641 pack $w.m -side top -fill x -padx 20 -pady 20
641 642 button $w.ok -text Close -command "destroy $w"
642 643 pack $w.ok -side bottom
643 644 }
644 645
645 646 proc assigncolor {id} {
646 647 global commitinfo colormap commcolors colors nextcolor
647 648 global parents nparents children nchildren
648 649 global cornercrossings crossings
649 650
650 651 if [info exists colormap($id)] return
651 652 set ncolors [llength $colors]
652 653 if {$nparents($id) <= 1 && $nchildren($id) == 1} {
653 654 set child [lindex $children($id) 0]
654 655 if {[info exists colormap($child)]
655 656 && $nparents($child) == 1} {
656 657 set colormap($id) $colormap($child)
657 658 return
658 659 }
659 660 }
660 661 set badcolors {}
661 662 if {[info exists cornercrossings($id)]} {
662 663 foreach x $cornercrossings($id) {
663 664 if {[info exists colormap($x)]
664 665 && [lsearch -exact $badcolors $colormap($x)] < 0} {
665 666 lappend badcolors $colormap($x)
666 667 }
667 668 }
668 669 if {[llength $badcolors] >= $ncolors} {
669 670 set badcolors {}
670 671 }
671 672 }
672 673 set origbad $badcolors
673 674 if {[llength $badcolors] < $ncolors - 1} {
674 675 if {[info exists crossings($id)]} {
675 676 foreach x $crossings($id) {
676 677 if {[info exists colormap($x)]
677 678 && [lsearch -exact $badcolors $colormap($x)] < 0} {
678 679 lappend badcolors $colormap($x)
679 680 }
680 681 }
681 682 if {[llength $badcolors] >= $ncolors} {
682 683 set badcolors $origbad
683 684 }
684 685 }
685 686 set origbad $badcolors
686 687 }
687 688 if {[llength $badcolors] < $ncolors - 1} {
688 689 foreach child $children($id) {
689 690 if {[info exists colormap($child)]
690 691 && [lsearch -exact $badcolors $colormap($child)] < 0} {
691 692 lappend badcolors $colormap($child)
692 693 }
693 694 if {[info exists parents($child)]} {
694 695 foreach p $parents($child) {
695 696 if {[info exists colormap($p)]
696 697 && [lsearch -exact $badcolors $colormap($p)] < 0} {
697 698 lappend badcolors $colormap($p)
698 699 }
699 700 }
700 701 }
701 702 }
702 703 if {[llength $badcolors] >= $ncolors} {
703 704 set badcolors $origbad
704 705 }
705 706 }
706 707 for {set i 0} {$i <= $ncolors} {incr i} {
707 708 set c [lindex $colors $nextcolor]
708 709 if {[incr nextcolor] >= $ncolors} {
709 710 set nextcolor 0
710 711 }
711 712 if {[lsearch -exact $badcolors $c]} break
712 713 }
713 714 set colormap($id) $c
714 715 }
715 716
716 717 proc initgraph {} {
717 718 global canvy canvy0 lineno numcommits nextcolor linespc
718 719 global mainline mainlinearrow sidelines
719 720 global nchildren ncleft
720 721 global displist nhyperspace
721 722
722 723 allcanvs delete all
723 724 set nextcolor 0
724 725 set canvy $canvy0
725 726 set lineno -1
726 727 set numcommits 0
727 728 catch {unset mainline}
728 729 catch {unset mainlinearrow}
729 730 catch {unset sidelines}
730 731 foreach id [array names nchildren] {
731 732 set ncleft($id) $nchildren($id)
732 733 }
733 734 set displist {}
734 735 set nhyperspace 0
735 736 }
736 737
737 738 proc bindline {t id} {
738 739 global canv
739 740
740 741 $canv bind $t <Enter> "lineenter %x %y $id"
741 742 $canv bind $t <Motion> "linemotion %x %y $id"
742 743 $canv bind $t <Leave> "lineleave $id"
743 744 $canv bind $t <Button-1> "lineclick %x %y $id 1"
744 745 }
745 746
746 747 proc drawlines {id xtra} {
747 748 global mainline mainlinearrow sidelines lthickness colormap canv
748 749
749 750 $canv delete lines.$id
750 751 if {[info exists mainline($id)]} {
751 752 set t [$canv create line $mainline($id) \
752 753 -width [expr {($xtra + 1) * $lthickness}] \
753 754 -fill $colormap($id) -tags lines.$id \
754 755 -arrow $mainlinearrow($id)]
755 756 $canv lower $t
756 757 bindline $t $id
757 758 }
758 759 if {[info exists sidelines($id)]} {
759 760 foreach ls $sidelines($id) {
760 761 set coords [lindex $ls 0]
761 762 set thick [lindex $ls 1]
762 763 set arrow [lindex $ls 2]
763 764 set t [$canv create line $coords -fill $colormap($id) \
764 765 -width [expr {($thick + $xtra) * $lthickness}] \
765 766 -arrow $arrow -tags lines.$id]
766 767 $canv lower $t
767 768 bindline $t $id
768 769 }
769 770 }
770 771 }
771 772
772 773 # level here is an index in displist
773 774 proc drawcommitline {level} {
774 775 global parents children nparents displist
775 776 global canv canv2 canv3 mainfont namefont canvy linespc
776 777 global lineid linehtag linentag linedtag commitinfo
777 778 global colormap numcommits currentparents dupparents
778 779 global idtags idline idheads idotherrefs
779 780 global lineno lthickness mainline mainlinearrow sidelines
780 781 global commitlisted rowtextx idpos lastuse displist
781 782 global oldnlines olddlevel olddisplist
782 783
783 784 incr numcommits
784 785 incr lineno
785 786 set id [lindex $displist $level]
786 787 set lastuse($id) $lineno
787 788 set lineid($lineno) $id
788 789 set idline($id) $lineno
789 790 set ofill [expr {[info exists commitlisted($id)]? "blue": "white"}]
790 791 if {![info exists commitinfo($id)]} {
791 792 readcommit $id
792 793 if {![info exists commitinfo($id)]} {
793 794 set commitinfo($id) {"No commit information available"}
794 795 set nparents($id) 0
795 796 }
796 797 }
797 798 assigncolor $id
798 799 set currentparents {}
799 800 set dupparents {}
800 801 if {[info exists commitlisted($id)] && [info exists parents($id)]} {
801 802 foreach p $parents($id) {
802 803 if {[lsearch -exact $currentparents $p] < 0} {
803 804 lappend currentparents $p
804 805 } else {
805 806 # remember that this parent was listed twice
806 807 lappend dupparents $p
807 808 }
808 809 }
809 810 }
810 811 set x [xcoord $level $level $lineno]
811 812 set y1 $canvy
812 813 set canvy [expr $canvy + $linespc]
813 814 allcanvs conf -scrollregion \
814 815 [list 0 0 0 [expr $y1 + 0.5 * $linespc + 2]]
815 816 if {[info exists mainline($id)]} {
816 817 lappend mainline($id) $x $y1
817 818 if {$mainlinearrow($id) ne "none"} {
818 819 set mainline($id) [trimdiagstart $mainline($id)]
819 820 }
820 821 }
821 822 drawlines $id 0
822 823 set orad [expr {$linespc / 3}]
823 824 set t [$canv create oval [expr $x - $orad] [expr $y1 - $orad] \
824 825 [expr $x + $orad - 1] [expr $y1 + $orad - 1] \
825 826 -fill $ofill -outline black -width 1]
826 827 $canv raise $t
827 828 $canv bind $t <1> {selcanvline {} %x %y}
828 829 set xt [xcoord [llength $displist] $level $lineno]
829 830 if {[llength $currentparents] > 2} {
830 831 set xt [expr {$xt + ([llength $currentparents] - 2) * $linespc}]
831 832 }
832 833 set rowtextx($lineno) $xt
833 834 set idpos($id) [list $x $xt $y1]
834 835 if {[info exists idtags($id)] || [info exists idheads($id)]
835 836 || [info exists idotherrefs($id)]} {
836 837 set xt [drawtags $id $x $xt $y1]
837 838 }
838 839 set headline [lindex $commitinfo($id) 0]
839 840 set name [lindex $commitinfo($id) 1]
840 841 set date [lindex $commitinfo($id) 2]
841 842 set linehtag($lineno) [$canv create text $xt $y1 -anchor w \
842 843 -text $headline -font $mainfont ]
843 844 $canv bind $linehtag($lineno) <Button-3> "rowmenu %X %Y $id"
844 845 set linentag($lineno) [$canv2 create text 3 $y1 -anchor w \
845 846 -text $name -font $namefont]
846 847 set linedtag($lineno) [$canv3 create text 3 $y1 -anchor w \
847 848 -text $date -font $mainfont]
848 849
849 850 set olddlevel $level
850 851 set olddisplist $displist
851 852 set oldnlines [llength $displist]
852 853 }
853 854
854 855 proc drawtags {id x xt y1} {
855 856 global idtags idheads idotherrefs
856 857 global linespc lthickness
857 858 global canv mainfont idline rowtextx
858 859
859 860 set marks {}
860 861 set ntags 0
861 862 set nheads 0
862 863 if {[info exists idtags($id)]} {
863 864 set marks $idtags($id)
864 865 set ntags [llength $marks]
865 866 }
866 867 if {[info exists idheads($id)]} {
867 868 set marks [concat $marks $idheads($id)]
868 869 set nheads [llength $idheads($id)]
869 870 }
870 871 if {[info exists idotherrefs($id)]} {
871 872 set marks [concat $marks $idotherrefs($id)]
872 873 }
873 874 if {$marks eq {}} {
874 875 return $xt
875 876 }
876 877
877 878 set delta [expr {int(0.5 * ($linespc - $lthickness))}]
878 879 set yt [expr $y1 - 0.5 * $linespc]
879 880 set yb [expr $yt + $linespc - 1]
880 881 set xvals {}
881 882 set wvals {}
882 883 foreach tag $marks {
883 884 set wid [font measure $mainfont $tag]
884 885 lappend xvals $xt
885 886 lappend wvals $wid
886 887 set xt [expr {$xt + $delta + $wid + $lthickness + $linespc}]
887 888 }
888 889 set t [$canv create line $x $y1 [lindex $xvals end] $y1 \
889 890 -width $lthickness -fill black -tags tag.$id]
890 891 $canv lower $t
891 892 foreach tag $marks x $xvals wid $wvals {
892 893 set xl [expr $x + $delta]
893 894 set xr [expr $x + $delta + $wid + $lthickness]
894 895 if {[incr ntags -1] >= 0} {
895 896 # draw a tag
896 897 set t [$canv create polygon $x [expr $yt + $delta] $xl $yt \
897 898 $xr $yt $xr $yb $xl $yb $x [expr $yb - $delta] \
898 899 -width 1 -outline black -fill yellow -tags tag.$id]
899 900 $canv bind $t <1> [list showtag $tag 1]
900 901 set rowtextx($idline($id)) [expr {$xr + $linespc}]
901 902 } else {
902 903 # draw a head or other ref
903 904 if {[incr nheads -1] >= 0} {
904 905 set col green
905 906 } else {
906 907 set col "#ddddff"
907 908 }
908 909 set xl [expr $xl - $delta/2]
909 910 $canv create polygon $x $yt $xr $yt $xr $yb $x $yb \
910 911 -width 1 -outline black -fill $col -tags tag.$id
911 912 }
912 913 set t [$canv create text $xl $y1 -anchor w -text $tag \
913 914 -font $mainfont -tags tag.$id]
914 915 if {$ntags >= 0} {
915 916 $canv bind $t <1> [list showtag $tag 1]
916 917 }
917 918 }
918 919 return $xt
919 920 }
920 921
921 922 proc notecrossings {id lo hi corner} {
922 923 global olddisplist crossings cornercrossings
923 924
924 925 for {set i $lo} {[incr i] < $hi} {} {
925 926 set p [lindex $olddisplist $i]
926 927 if {$p == {}} continue
927 928 if {$i == $corner} {
928 929 if {![info exists cornercrossings($id)]
929 930 || [lsearch -exact $cornercrossings($id) $p] < 0} {
930 931 lappend cornercrossings($id) $p
931 932 }
932 933 if {![info exists cornercrossings($p)]
933 934 || [lsearch -exact $cornercrossings($p) $id] < 0} {
934 935 lappend cornercrossings($p) $id
935 936 }
936 937 } else {
937 938 if {![info exists crossings($id)]
938 939 || [lsearch -exact $crossings($id) $p] < 0} {
939 940 lappend crossings($id) $p
940 941 }
941 942 if {![info exists crossings($p)]
942 943 || [lsearch -exact $crossings($p) $id] < 0} {
943 944 lappend crossings($p) $id
944 945 }
945 946 }
946 947 }
947 948 }
948 949
949 950 proc xcoord {i level ln} {
950 951 global canvx0 xspc1 xspc2
951 952
952 953 set x [expr {$canvx0 + $i * $xspc1($ln)}]
953 954 if {$i > 0 && $i == $level} {
954 955 set x [expr {$x + 0.5 * ($xspc2 - $xspc1($ln))}]
955 956 } elseif {$i > $level} {
956 957 set x [expr {$x + $xspc2 - $xspc1($ln)}]
957 958 }
958 959 return $x
959 960 }
960 961
961 962 # it seems Tk can't draw arrows on the end of diagonal line segments...
962 963 proc trimdiagend {line} {
963 964 while {[llength $line] > 4} {
964 965 set x1 [lindex $line end-3]
965 966 set y1 [lindex $line end-2]
966 967 set x2 [lindex $line end-1]
967 968 set y2 [lindex $line end]
968 969 if {($x1 == $x2) != ($y1 == $y2)} break
969 970 set line [lreplace $line end-1 end]
970 971 }
971 972 return $line
972 973 }
973 974
974 975 proc trimdiagstart {line} {
975 976 while {[llength $line] > 4} {
976 977 set x1 [lindex $line 0]
977 978 set y1 [lindex $line 1]
978 979 set x2 [lindex $line 2]
979 980 set y2 [lindex $line 3]
980 981 if {($x1 == $x2) != ($y1 == $y2)} break
981 982 set line [lreplace $line 0 1]
982 983 }
983 984 return $line
984 985 }
985 986
986 987 proc drawslants {id needonscreen nohs} {
987 988 global canv mainline mainlinearrow sidelines
988 989 global canvx0 canvy xspc1 xspc2 lthickness
989 990 global currentparents dupparents
990 991 global lthickness linespc canvy colormap lineno geometry
991 992 global maxgraphpct maxwidth
992 993 global displist onscreen lastuse
993 994 global parents commitlisted
994 995 global oldnlines olddlevel olddisplist
995 996 global nhyperspace numcommits nnewparents
996 997
997 998 if {$lineno < 0} {
998 999 lappend displist $id
999 1000 set onscreen($id) 1
1000 1001 return 0
1001 1002 }
1002 1003
1003 1004 set y1 [expr {$canvy - $linespc}]
1004 1005 set y2 $canvy
1005 1006
1006 1007 # work out what we need to get back on screen
1007 1008 set reins {}
1008 1009 if {$onscreen($id) < 0} {
1009 1010 # next to do isn't displayed, better get it on screen...
1010 1011 lappend reins [list $id 0]
1011 1012 }
1012 1013 # make sure all the previous commits's parents are on the screen
1013 1014 foreach p $currentparents {
1014 1015 if {$onscreen($p) < 0} {
1015 1016 lappend reins [list $p 0]
1016 1017 }
1017 1018 }
1018 1019 # bring back anything requested by caller
1019 1020 if {$needonscreen ne {}} {
1020 1021 lappend reins $needonscreen
1021 1022 }
1022 1023
1023 1024 # try the shortcut
1024 1025 if {$currentparents == $id && $onscreen($id) == 0 && $reins eq {}} {
1025 1026 set dlevel $olddlevel
1026 1027 set x [xcoord $dlevel $dlevel $lineno]
1027 1028 set mainline($id) [list $x $y1]
1028 1029 set mainlinearrow($id) none
1029 1030 set lastuse($id) $lineno
1030 1031 set displist [lreplace $displist $dlevel $dlevel $id]
1031 1032 set onscreen($id) 1
1032 1033 set xspc1([expr {$lineno + 1}]) $xspc1($lineno)
1033 1034 return $dlevel
1034 1035 }
1035 1036
1036 1037 # update displist
1037 1038 set displist [lreplace $displist $olddlevel $olddlevel]
1038 1039 set j $olddlevel
1039 1040 foreach p $currentparents {
1040 1041 set lastuse($p) $lineno
1041 1042 if {$onscreen($p) == 0} {
1042 1043 set displist [linsert $displist $j $p]
1043 1044 set onscreen($p) 1
1044 1045 incr j
1045 1046 }
1046 1047 }
1047 1048 if {$onscreen($id) == 0} {
1048 1049 lappend displist $id
1049 1050 set onscreen($id) 1
1050 1051 }
1051 1052
1052 1053 # remove the null entry if present
1053 1054 set nullentry [lsearch -exact $displist {}]
1054 1055 if {$nullentry >= 0} {
1055 1056 set displist [lreplace $displist $nullentry $nullentry]
1056 1057 }
1057 1058
1058 1059 # bring back the ones we need now (if we did it earlier
1059 1060 # it would change displist and invalidate olddlevel)
1060 1061 foreach pi $reins {
1061 1062 # test again in case of duplicates in reins
1062 1063 set p [lindex $pi 0]
1063 1064 if {$onscreen($p) < 0} {
1064 1065 set onscreen($p) 1
1065 1066 set lastuse($p) $lineno
1066 1067 set displist [linsert $displist [lindex $pi 1] $p]
1067 1068 incr nhyperspace -1
1068 1069 }
1069 1070 }
1070 1071
1071 1072 set lastuse($id) $lineno
1072 1073
1073 1074 # see if we need to make any lines jump off into hyperspace
1074 1075 set displ [llength $displist]
1075 1076 if {$displ > $maxwidth} {
1076 1077 set ages {}
1077 1078 foreach x $displist {
1078 1079 lappend ages [list $lastuse($x) $x]
1079 1080 }
1080 1081 set ages [lsort -integer -index 0 $ages]
1081 1082 set k 0
1082 1083 while {$displ > $maxwidth} {
1083 1084 set use [lindex $ages $k 0]
1084 1085 set victim [lindex $ages $k 1]
1085 1086 if {$use >= $lineno - 5} break
1086 1087 incr k
1087 1088 if {[lsearch -exact $nohs $victim] >= 0} continue
1088 1089 set i [lsearch -exact $displist $victim]
1089 1090 set displist [lreplace $displist $i $i]
1090 1091 set onscreen($victim) -1
1091 1092 incr nhyperspace
1092 1093 incr displ -1
1093 1094 if {$i < $nullentry} {
1094 1095 incr nullentry -1
1095 1096 }
1096 1097 set x [lindex $mainline($victim) end-1]
1097 1098 lappend mainline($victim) $x $y1
1098 1099 set line [trimdiagend $mainline($victim)]
1099 1100 set arrow "last"
1100 1101 if {$mainlinearrow($victim) ne "none"} {
1101 1102 set line [trimdiagstart $line]
1102 1103 set arrow "both"
1103 1104 }
1104 1105 lappend sidelines($victim) [list $line 1 $arrow]
1105 1106 unset mainline($victim)
1106 1107 }
1107 1108 }
1108 1109
1109 1110 set dlevel [lsearch -exact $displist $id]
1110 1111
1111 1112 # If we are reducing, put in a null entry
1112 1113 if {$displ < $oldnlines} {
1113 1114 # does the next line look like a merge?
1114 1115 # i.e. does it have > 1 new parent?
1115 1116 if {$nnewparents($id) > 1} {
1116 1117 set i [expr {$dlevel + 1}]
1117 1118 } elseif {$nnewparents([lindex $olddisplist $olddlevel]) == 0} {
1118 1119 set i $olddlevel
1119 1120 if {$nullentry >= 0 && $nullentry < $i} {
1120 1121 incr i -1
1121 1122 }
1122 1123 } elseif {$nullentry >= 0} {
1123 1124 set i $nullentry
1124 1125 while {$i < $displ
1125 1126 && [lindex $olddisplist $i] == [lindex $displist $i]} {
1126 1127 incr i
1127 1128 }
1128 1129 } else {
1129 1130 set i $olddlevel
1130 1131 if {$dlevel >= $i} {
1131 1132 incr i
1132 1133 }
1133 1134 }
1134 1135 if {$i < $displ} {
1135 1136 set displist [linsert $displist $i {}]
1136 1137 incr displ
1137 1138 if {$dlevel >= $i} {
1138 1139 incr dlevel
1139 1140 }
1140 1141 }
1141 1142 }
1142 1143
1143 1144 # decide on the line spacing for the next line
1144 1145 set lj [expr {$lineno + 1}]
1145 1146 set maxw [expr {$maxgraphpct * $geometry(canv1) / 100}]
1146 1147 if {$displ <= 1 || $canvx0 + $displ * $xspc2 <= $maxw} {
1147 1148 set xspc1($lj) $xspc2
1148 1149 } else {
1149 1150 set xspc1($lj) [expr {($maxw - $canvx0 - $xspc2) / ($displ - 1)}]
1150 1151 if {$xspc1($lj) < $lthickness} {
1151 1152 set xspc1($lj) $lthickness
1152 1153 }
1153 1154 }
1154 1155
1155 1156 foreach idi $reins {
1156 1157 set id [lindex $idi 0]
1157 1158 set j [lsearch -exact $displist $id]
1158 1159 set xj [xcoord $j $dlevel $lj]
1159 1160 set mainline($id) [list $xj $y2]
1160 1161 set mainlinearrow($id) first
1161 1162 }
1162 1163
1163 1164 set i -1
1164 1165 foreach id $olddisplist {
1165 1166 incr i
1166 1167 if {$id == {}} continue
1167 1168 if {$onscreen($id) <= 0} continue
1168 1169 set xi [xcoord $i $olddlevel $lineno]
1169 1170 if {$i == $olddlevel} {
1170 1171 foreach p $currentparents {
1171 1172 set j [lsearch -exact $displist $p]
1172 1173 set coords [list $xi $y1]
1173 1174 set xj [xcoord $j $dlevel $lj]
1174 1175 if {$xj < $xi - $linespc} {
1175 1176 lappend coords [expr {$xj + $linespc}] $y1
1176 1177 notecrossings $p $j $i [expr {$j + 1}]
1177 1178 } elseif {$xj > $xi + $linespc} {
1178 1179 lappend coords [expr {$xj - $linespc}] $y1
1179 1180 notecrossings $p $i $j [expr {$j - 1}]
1180 1181 }
1181 1182 if {[lsearch -exact $dupparents $p] >= 0} {
1182 1183 # draw a double-width line to indicate the doubled parent
1183 1184 lappend coords $xj $y2
1184 1185 lappend sidelines($p) [list $coords 2 none]
1185 1186 if {![info exists mainline($p)]} {
1186 1187 set mainline($p) [list $xj $y2]
1187 1188 set mainlinearrow($p) none
1188 1189 }
1189 1190 } else {
1190 1191 # normal case, no parent duplicated
1191 1192 set yb $y2
1192 1193 set dx [expr {abs($xi - $xj)}]
1193 1194 if {0 && $dx < $linespc} {
1194 1195 set yb [expr {$y1 + $dx}]
1195 1196 }
1196 1197 if {![info exists mainline($p)]} {
1197 1198 if {$xi != $xj} {
1198 1199 lappend coords $xj $yb
1199 1200 }
1200 1201 set mainline($p) $coords
1201 1202 set mainlinearrow($p) none
1202 1203 } else {
1203 1204 lappend coords $xj $yb
1204 1205 if {$yb < $y2} {
1205 1206 lappend coords $xj $y2
1206 1207 }
1207 1208 lappend sidelines($p) [list $coords 1 none]
1208 1209 }
1209 1210 }
1210 1211 }
1211 1212 } else {
1212 1213 set j $i
1213 1214 if {[lindex $displist $i] != $id} {
1214 1215 set j [lsearch -exact $displist $id]
1215 1216 }
1216 1217 if {$j != $i || $xspc1($lineno) != $xspc1($lj)
1217 1218 || ($olddlevel < $i && $i < $dlevel)
1218 1219 || ($dlevel < $i && $i < $olddlevel)} {
1219 1220 set xj [xcoord $j $dlevel $lj]
1220 1221 lappend mainline($id) $xi $y1 $xj $y2
1221 1222 }
1222 1223 }
1223 1224 }
1224 1225 return $dlevel
1225 1226 }
1226 1227
1227 1228 # search for x in a list of lists
1228 1229 proc llsearch {llist x} {
1229 1230 set i 0
1230 1231 foreach l $llist {
1231 1232 if {$l == $x || [lsearch -exact $l $x] >= 0} {
1232 1233 return $i
1233 1234 }
1234 1235 incr i
1235 1236 }
1236 1237 return -1
1237 1238 }
1238 1239
1239 1240 proc drawmore {reading} {
1240 1241 global displayorder numcommits ncmupdate nextupdate
1241 1242 global stopped nhyperspace parents commitlisted
1242 1243 global maxwidth onscreen displist currentparents olddlevel
1243 1244
1244 1245 set n [llength $displayorder]
1245 1246 while {$numcommits < $n} {
1246 1247 set id [lindex $displayorder $numcommits]
1247 1248 set ctxend [expr {$numcommits + 10}]
1248 1249 if {!$reading && $ctxend > $n} {
1249 1250 set ctxend $n
1250 1251 }
1251 1252 set dlist {}
1252 1253 if {$numcommits > 0} {
1253 1254 set dlist [lreplace $displist $olddlevel $olddlevel]
1254 1255 set i $olddlevel
1255 1256 foreach p $currentparents {
1256 1257 if {$onscreen($p) == 0} {
1257 1258 set dlist [linsert $dlist $i $p]
1258 1259 incr i
1259 1260 }
1260 1261 }
1261 1262 }
1262 1263 set nohs {}
1263 1264 set reins {}
1264 1265 set isfat [expr {[llength $dlist] > $maxwidth}]
1265 1266 if {$nhyperspace > 0 || $isfat} {
1266 1267 if {$ctxend > $n} break
1267 1268 # work out what to bring back and
1268 1269 # what we want to don't want to send into hyperspace
1269 1270 set room 1
1270 1271 for {set k $numcommits} {$k < $ctxend} {incr k} {
1271 1272 set x [lindex $displayorder $k]
1272 1273 set i [llsearch $dlist $x]
1273 1274 if {$i < 0} {
1274 1275 set i [llength $dlist]
1275 1276 lappend dlist $x
1276 1277 }
1277 1278 if {[lsearch -exact $nohs $x] < 0} {
1278 1279 lappend nohs $x
1279 1280 }
1280 1281 if {$reins eq {} && $onscreen($x) < 0 && $room} {
1281 1282 set reins [list $x $i]
1282 1283 }
1283 1284 set newp {}
1284 1285 if {[info exists commitlisted($x)]} {
1285 1286 set right 0
1286 1287 foreach p $parents($x) {
1287 1288 if {[llsearch $dlist $p] < 0} {
1288 1289 lappend newp $p
1289 1290 if {[lsearch -exact $nohs $p] < 0} {
1290 1291 lappend nohs $p
1291 1292 }
1292 1293 if {$reins eq {} && $onscreen($p) < 0 && $room} {
1293 1294 set reins [list $p [expr {$i + $right}]]
1294 1295 }
1295 1296 }
1296 1297 set right 1
1297 1298 }
1298 1299 }
1299 1300 set l [lindex $dlist $i]
1300 1301 if {[llength $l] == 1} {
1301 1302 set l $newp
1302 1303 } else {
1303 1304 set j [lsearch -exact $l $x]
1304 1305 set l [concat [lreplace $l $j $j] $newp]
1305 1306 }
1306 1307 set dlist [lreplace $dlist $i $i $l]
1307 1308 if {$room && $isfat && [llength $newp] <= 1} {
1308 1309 set room 0
1309 1310 }
1310 1311 }
1311 1312 }
1312 1313
1313 1314 set dlevel [drawslants $id $reins $nohs]
1314 1315 drawcommitline $dlevel
1315 1316 if {[clock clicks -milliseconds] >= $nextupdate
1316 1317 && $numcommits >= $ncmupdate} {
1317 1318 doupdate $reading
1318 1319 if {$stopped} break
1319 1320 }
1320 1321 }
1321 1322 }
1322 1323
1323 1324 # level here is an index in todo
1324 1325 proc updatetodo {level noshortcut} {
1325 1326 global ncleft todo nnewparents
1326 1327 global commitlisted parents onscreen
1327 1328
1328 1329 set id [lindex $todo $level]
1329 1330 set olds {}
1330 1331 if {[info exists commitlisted($id)]} {
1331 1332 foreach p $parents($id) {
1332 1333 if {[lsearch -exact $olds $p] < 0} {
1333 1334 lappend olds $p
1334 1335 }
1335 1336 }
1336 1337 }
1337 1338 if {!$noshortcut && [llength $olds] == 1} {
1338 1339 set p [lindex $olds 0]
1339 1340 if {$ncleft($p) == 1 && [lsearch -exact $todo $p] < 0} {
1340 1341 set ncleft($p) 0
1341 1342 set todo [lreplace $todo $level $level $p]
1342 1343 set onscreen($p) 0
1343 1344 set nnewparents($id) 1
1344 1345 return 0
1345 1346 }
1346 1347 }
1347 1348
1348 1349 set todo [lreplace $todo $level $level]
1349 1350 set i $level
1350 1351 set n 0
1351 1352 foreach p $olds {
1352 1353 incr ncleft($p) -1
1353 1354 set k [lsearch -exact $todo $p]
1354 1355 if {$k < 0} {
1355 1356 set todo [linsert $todo $i $p]
1356 1357 set onscreen($p) 0
1357 1358 incr i
1358 1359 incr n
1359 1360 }
1360 1361 }
1361 1362 set nnewparents($id) $n
1362 1363
1363 1364 return 1
1364 1365 }
1365 1366
1366 1367 proc decidenext {{noread 0}} {
1367 1368 global ncleft todo
1368 1369 global datemode cdate
1369 1370 global commitinfo
1370 1371
1371 1372 # choose which one to do next time around
1372 1373 set todol [llength $todo]
1373 1374 set level -1
1374 1375 set latest {}
1375 1376 for {set k $todol} {[incr k -1] >= 0} {} {
1376 1377 set p [lindex $todo $k]
1377 1378 if {$ncleft($p) == 0} {
1378 1379 if {$datemode} {
1379 1380 if {![info exists commitinfo($p)]} {
1380 1381 if {$noread} {
1381 1382 return {}
1382 1383 }
1383 1384 readcommit $p
1384 1385 }
1385 1386 if {$latest == {} || $cdate($p) > $latest} {
1386 1387 set level $k
1387 1388 set latest $cdate($p)
1388 1389 }
1389 1390 } else {
1390 1391 set level $k
1391 1392 break
1392 1393 }
1393 1394 }
1394 1395 }
1395 1396 if {$level < 0} {
1396 1397 if {$todo != {}} {
1397 1398 puts "ERROR: none of the pending commits can be done yet:"
1398 1399 foreach p $todo {
1399 1400 puts " $p ($ncleft($p))"
1400 1401 }
1401 1402 }
1402 1403 return -1
1403 1404 }
1404 1405
1405 1406 return $level
1406 1407 }
1407 1408
1408 1409 proc drawcommit {id} {
1409 1410 global phase todo nchildren datemode nextupdate
1410 1411 global numcommits ncmupdate displayorder todo onscreen
1411 1412
1412 1413 if {$phase != "incrdraw"} {
1413 1414 set phase incrdraw
1414 1415 set displayorder {}
1415 1416 set todo {}
1416 1417 initgraph
1417 1418 }
1418 1419 if {$nchildren($id) == 0} {
1419 1420 lappend todo $id
1420 1421 set onscreen($id) 0
1421 1422 }
1422 1423 set level [decidenext 1]
1423 1424 if {$level == {} || $id != [lindex $todo $level]} {
1424 1425 return
1425 1426 }
1426 1427 while 1 {
1427 1428 lappend displayorder [lindex $todo $level]
1428 1429 if {[updatetodo $level $datemode]} {
1429 1430 set level [decidenext 1]
1430 1431 if {$level == {}} break
1431 1432 }
1432 1433 set id [lindex $todo $level]
1433 1434 if {![info exists commitlisted($id)]} {
1434 1435 break
1435 1436 }
1436 1437 }
1437 1438 drawmore 1
1438 1439 }
1439 1440
1440 1441 proc finishcommits {} {
1441 1442 global phase
1442 1443 global canv mainfont ctext maincursor textcursor
1443 1444
1444 1445 if {$phase != "incrdraw"} {
1445 1446 $canv delete all
1446 1447 $canv create text 3 3 -anchor nw -text "No commits selected" \
1447 1448 -font $mainfont -tags textitems
1448 1449 set phase {}
1449 1450 } else {
1450 1451 drawrest
1451 1452 }
1452 1453 . config -cursor $maincursor
1453 1454 settextcursor $textcursor
1454 1455 }
1455 1456
1456 1457 # Don't change the text pane cursor if it is currently the hand cursor,
1457 1458 # showing that we are over a sha1 ID link.
1458 1459 proc settextcursor {c} {
1459 1460 global ctext curtextcursor
1460 1461
1461 1462 if {[$ctext cget -cursor] == $curtextcursor} {
1462 1463 $ctext config -cursor $c
1463 1464 }
1464 1465 set curtextcursor $c
1465 1466 }
1466 1467
1467 1468 proc drawgraph {} {
1468 1469 global nextupdate startmsecs ncmupdate
1469 1470 global displayorder onscreen
1470 1471
1471 1472 if {$displayorder == {}} return
1472 1473 set startmsecs [clock clicks -milliseconds]
1473 1474 set nextupdate [expr $startmsecs + 100]
1474 1475 set ncmupdate 1
1475 1476 initgraph
1476 1477 foreach id $displayorder {
1477 1478 set onscreen($id) 0
1478 1479 }
1479 1480 drawmore 0
1480 1481 }
1481 1482
1482 1483 proc drawrest {} {
1483 1484 global phase stopped redisplaying selectedline
1484 1485 global datemode todo displayorder
1485 1486 global numcommits ncmupdate
1486 1487 global nextupdate startmsecs
1487 1488
1488 1489 set level [decidenext]
1489 1490 if {$level >= 0} {
1490 1491 set phase drawgraph
1491 1492 while 1 {
1492 1493 lappend displayorder [lindex $todo $level]
1493 1494 set hard [updatetodo $level $datemode]
1494 1495 if {$hard} {
1495 1496 set level [decidenext]
1496 1497 if {$level < 0} break
1497 1498 }
1498 1499 }
1499 1500 drawmore 0
1500 1501 }
1501 1502 set phase {}
1502 1503 set drawmsecs [expr [clock clicks -milliseconds] - $startmsecs]
1503 1504 #puts "overall $drawmsecs ms for $numcommits commits"
1504 1505 if {$redisplaying} {
1505 1506 if {$stopped == 0 && [info exists selectedline]} {
1506 1507 selectline $selectedline 0
1507 1508 }
1508 1509 if {$stopped == 1} {
1509 1510 set stopped 0
1510 1511 after idle drawgraph
1511 1512 } else {
1512 1513 set redisplaying 0
1513 1514 }
1514 1515 }
1515 1516 }
1516 1517
1517 1518 proc findmatches {f} {
1518 1519 global findtype foundstring foundstrlen
1519 1520 if {$findtype == "Regexp"} {
1520 1521 set matches [regexp -indices -all -inline $foundstring $f]
1521 1522 } else {
1522 1523 if {$findtype == "IgnCase"} {
1523 1524 set str [string tolower $f]
1524 1525 } else {
1525 1526 set str $f
1526 1527 }
1527 1528 set matches {}
1528 1529 set i 0
1529 1530 while {[set j [string first $foundstring $str $i]] >= 0} {
1530 1531 lappend matches [list $j [expr $j+$foundstrlen-1]]
1531 1532 set i [expr $j + $foundstrlen]
1532 1533 }
1533 1534 }
1534 1535 return $matches
1535 1536 }
1536 1537
1537 1538 proc dofind {} {
1538 1539 global findtype findloc findstring markedmatches commitinfo
1539 1540 global numcommits lineid linehtag linentag linedtag
1540 1541 global mainfont namefont canv canv2 canv3 selectedline
1541 1542 global matchinglines foundstring foundstrlen
1542 1543
1543 1544 stopfindproc
1544 1545 unmarkmatches
1545 1546 focus .
1546 1547 set matchinglines {}
1547 1548 if {$findloc == "Pickaxe"} {
1548 1549 findpatches
1549 1550 return
1550 1551 }
1551 1552 if {$findtype == "IgnCase"} {
1552 1553 set foundstring [string tolower $findstring]
1553 1554 } else {
1554 1555 set foundstring $findstring
1555 1556 }
1556 1557 set foundstrlen [string length $findstring]
1557 1558 if {$foundstrlen == 0} return
1558 1559 if {$findloc == "Files"} {
1559 1560 findfiles
1560 1561 return
1561 1562 }
1562 1563 if {![info exists selectedline]} {
1563 1564 set oldsel -1
1564 1565 } else {
1565 1566 set oldsel $selectedline
1566 1567 }
1567 1568 set didsel 0
1568 1569 set fldtypes {Headline Author Date Committer CDate Comment}
1569 1570 for {set l 0} {$l < $numcommits} {incr l} {
1570 1571 set id $lineid($l)
1571 1572 set info $commitinfo($id)
1572 1573 set doesmatch 0
1573 1574 foreach f $info ty $fldtypes {
1574 1575 if {$findloc != "All fields" && $findloc != $ty} {
1575 1576 continue
1576 1577 }
1577 1578 set matches [findmatches $f]
1578 1579 if {$matches == {}} continue
1579 1580 set doesmatch 1
1580 1581 if {$ty == "Headline"} {
1581 1582 markmatches $canv $l $f $linehtag($l) $matches $mainfont
1582 1583 } elseif {$ty == "Author"} {
1583 1584 markmatches $canv2 $l $f $linentag($l) $matches $namefont
1584 1585 } elseif {$ty == "Date"} {
1585 1586 markmatches $canv3 $l $f $linedtag($l) $matches $mainfont
1586 1587 }
1587 1588 }
1588 1589 if {$doesmatch} {
1589 1590 lappend matchinglines $l
1590 1591 if {!$didsel && $l > $oldsel} {
1591 1592 findselectline $l
1592 1593 set didsel 1
1593 1594 }
1594 1595 }
1595 1596 }
1596 1597 if {$matchinglines == {}} {
1597 1598 bell
1598 1599 } elseif {!$didsel} {
1599 1600 findselectline [lindex $matchinglines 0]
1600 1601 }
1601 1602 }
1602 1603
1603 1604 proc findselectline {l} {
1604 1605 global findloc commentend ctext
1605 1606 selectline $l 1
1606 1607 if {$findloc == "All fields" || $findloc == "Comments"} {
1607 1608 # highlight the matches in the comments
1608 1609 set f [$ctext get 1.0 $commentend]
1609 1610 set matches [findmatches $f]
1610 1611 foreach match $matches {
1611 1612 set start [lindex $match 0]
1612 1613 set end [expr [lindex $match 1] + 1]
1613 1614 $ctext tag add found "1.0 + $start c" "1.0 + $end c"
1614 1615 }
1615 1616 }
1616 1617 }
1617 1618
1618 1619 proc findnext {restart} {
1619 1620 global matchinglines selectedline
1620 1621 if {![info exists matchinglines]} {
1621 1622 if {$restart} {
1622 1623 dofind
1623 1624 }
1624 1625 return
1625 1626 }
1626 1627 if {![info exists selectedline]} return
1627 1628 foreach l $matchinglines {
1628 1629 if {$l > $selectedline} {
1629 1630 findselectline $l
1630 1631 return
1631 1632 }
1632 1633 }
1633 1634 bell
1634 1635 }
1635 1636
1636 1637 proc findprev {} {
1637 1638 global matchinglines selectedline
1638 1639 if {![info exists matchinglines]} {
1639 1640 dofind
1640 1641 return
1641 1642 }
1642 1643 if {![info exists selectedline]} return
1643 1644 set prev {}
1644 1645 foreach l $matchinglines {
1645 1646 if {$l >= $selectedline} break
1646 1647 set prev $l
1647 1648 }
1648 1649 if {$prev != {}} {
1649 1650 findselectline $prev
1650 1651 } else {
1651 1652 bell
1652 1653 }
1653 1654 }
1654 1655
1655 1656 proc findlocchange {name ix op} {
1656 1657 global findloc findtype findtypemenu
1657 1658 if {$findloc == "Pickaxe"} {
1658 1659 set findtype Exact
1659 1660 set state disabled
1660 1661 } else {
1661 1662 set state normal
1662 1663 }
1663 1664 $findtypemenu entryconf 1 -state $state
1664 1665 $findtypemenu entryconf 2 -state $state
1665 1666 }
1666 1667
1667 1668 proc stopfindproc {{done 0}} {
1668 1669 global findprocpid findprocfile findids
1669 1670 global ctext findoldcursor phase maincursor textcursor
1670 1671 global findinprogress
1671 1672
1672 1673 catch {unset findids}
1673 1674 if {[info exists findprocpid]} {
1674 1675 if {!$done} {
1675 1676 catch {exec kill $findprocpid}
1676 1677 }
1677 1678 catch {close $findprocfile}
1678 1679 unset findprocpid
1679 1680 }
1680 1681 if {[info exists findinprogress]} {
1681 1682 unset findinprogress
1682 1683 if {$phase != "incrdraw"} {
1683 1684 . config -cursor $maincursor
1684 1685 settextcursor $textcursor
1685 1686 }
1686 1687 }
1687 1688 }
1688 1689
1689 1690 proc findpatches {} {
1690 1691 global findstring selectedline numcommits
1691 1692 global findprocpid findprocfile
1692 1693 global finddidsel ctext lineid findinprogress
1693 1694 global findinsertpos
1694 1695
1695 1696 if {$numcommits == 0} return
1696 1697
1697 1698 # make a list of all the ids to search, starting at the one
1698 1699 # after the selected line (if any)
1699 1700 if {[info exists selectedline]} {
1700 1701 set l $selectedline
1701 1702 } else {
1702 1703 set l -1
1703 1704 }
1704 1705 set inputids {}
1705 1706 for {set i 0} {$i < $numcommits} {incr i} {
1706 1707 if {[incr l] >= $numcommits} {
1707 1708 set l 0
1708 1709 }
1709 1710 append inputids $lineid($l) "\n"
1710 1711 }
1711 1712
1712 1713 if {[catch {
1713 1714 set f [open [list | hg debug-diff-tree --stdin -s -r -S$findstring \
1714 1715 << $inputids] r]
1715 1716 } err]} {
1716 1717 error_popup "Error starting search process: $err"
1717 1718 return
1718 1719 }
1719 1720
1720 1721 set findinsertpos end
1721 1722 set findprocfile $f
1722 1723 set findprocpid [pid $f]
1723 1724 fconfigure $f -blocking 0
1724 1725 fileevent $f readable readfindproc
1725 1726 set finddidsel 0
1726 1727 . config -cursor watch
1727 1728 settextcursor watch
1728 1729 set findinprogress 1
1729 1730 }
1730 1731
1731 1732 proc readfindproc {} {
1732 1733 global findprocfile finddidsel
1733 1734 global idline matchinglines findinsertpos
1734 1735
1735 1736 set n [gets $findprocfile line]
1736 1737 if {$n < 0} {
1737 1738 if {[eof $findprocfile]} {
1738 1739 stopfindproc 1
1739 1740 if {!$finddidsel} {
1740 1741 bell
1741 1742 }
1742 1743 }
1743 1744 return
1744 1745 }
1745 1746 if {![regexp {^[0-9a-f]{40}} $line id]} {
1746 1747 error_popup "Can't parse git-diff-tree output: $line"
1747 1748 stopfindproc
1748 1749 return
1749 1750 }
1750 1751 if {![info exists idline($id)]} {
1751 1752 puts stderr "spurious id: $id"
1752 1753 return
1753 1754 }
1754 1755 set l $idline($id)
1755 1756 insertmatch $l $id
1756 1757 }
1757 1758
1758 1759 proc insertmatch {l id} {
1759 1760 global matchinglines findinsertpos finddidsel
1760 1761
1761 1762 if {$findinsertpos == "end"} {
1762 1763 if {$matchinglines != {} && $l < [lindex $matchinglines 0]} {
1763 1764 set matchinglines [linsert $matchinglines 0 $l]
1764 1765 set findinsertpos 1
1765 1766 } else {
1766 1767 lappend matchinglines $l
1767 1768 }
1768 1769 } else {
1769 1770 set matchinglines [linsert $matchinglines $findinsertpos $l]
1770 1771 incr findinsertpos
1771 1772 }
1772 1773 markheadline $l $id
1773 1774 if {!$finddidsel} {
1774 1775 findselectline $l
1775 1776 set finddidsel 1
1776 1777 }
1777 1778 }
1778 1779
1779 1780 proc findfiles {} {
1780 1781 global selectedline numcommits lineid ctext
1781 1782 global ffileline finddidsel parents nparents
1782 1783 global findinprogress findstartline findinsertpos
1783 1784 global treediffs fdiffids fdiffsneeded fdiffpos
1784 1785 global findmergefiles
1785 1786
1786 1787 if {$numcommits == 0} return
1787 1788
1788 1789 if {[info exists selectedline]} {
1789 1790 set l [expr {$selectedline + 1}]
1790 1791 } else {
1791 1792 set l 0
1792 1793 }
1793 1794 set ffileline $l
1794 1795 set findstartline $l
1795 1796 set diffsneeded {}
1796 1797 set fdiffsneeded {}
1797 1798 while 1 {
1798 1799 set id $lineid($l)
1799 1800 if {$findmergefiles || $nparents($id) == 1} {
1800 1801 foreach p $parents($id) {
1801 1802 if {![info exists treediffs([list $id $p])]} {
1802 1803 append diffsneeded "$id $p\n"
1803 1804 lappend fdiffsneeded [list $id $p]
1804 1805 }
1805 1806 }
1806 1807 }
1807 1808 if {[incr l] >= $numcommits} {
1808 1809 set l 0
1809 1810 }
1810 1811 if {$l == $findstartline} break
1811 1812 }
1812 1813
1813 1814 # start off a git-diff-tree process if needed
1814 1815 if {$diffsneeded ne {}} {
1815 1816 if {[catch {
1816 1817 set df [open [list | hg debug-diff-tree -r --stdin << $diffsneeded] r]
1817 1818 } err ]} {
1818 1819 error_popup "Error starting search process: $err"
1819 1820 return
1820 1821 }
1821 1822 catch {unset fdiffids}
1822 1823 set fdiffpos 0
1823 1824 fconfigure $df -blocking 0
1824 1825 fileevent $df readable [list readfilediffs $df]
1825 1826 }
1826 1827
1827 1828 set finddidsel 0
1828 1829 set findinsertpos end
1829 1830 set id $lineid($l)
1830 1831 set p [lindex $parents($id) 0]
1831 1832 . config -cursor watch
1832 1833 settextcursor watch
1833 1834 set findinprogress 1
1834 1835 findcont [list $id $p]
1835 1836 update
1836 1837 }
1837 1838
1838 1839 proc readfilediffs {df} {
1839 1840 global findids fdiffids fdiffs
1840 1841
1841 1842 set n [gets $df line]
1842 1843 if {$n < 0} {
1843 1844 if {[eof $df]} {
1844 1845 donefilediff
1845 1846 if {[catch {close $df} err]} {
1846 1847 stopfindproc
1847 1848 bell
1848 1849 error_popup "Error in hg debug-diff-tree: $err"
1849 1850 } elseif {[info exists findids]} {
1850 1851 set ids $findids
1851 1852 stopfindproc
1852 1853 bell
1853 1854 error_popup "Couldn't find diffs for {$ids}"
1854 1855 }
1855 1856 }
1856 1857 return
1857 1858 }
1858 1859 if {[regexp {^([0-9a-f]{40}) \(from ([0-9a-f]{40})\)} $line match id p]} {
1859 1860 # start of a new string of diffs
1860 1861 donefilediff
1861 1862 set fdiffids [list $id $p]
1862 1863 set fdiffs {}
1863 1864 } elseif {[string match ":*" $line]} {
1864 1865 lappend fdiffs [lindex $line 5]
1865 1866 }
1866 1867 }
1867 1868
1868 1869 proc donefilediff {} {
1869 1870 global fdiffids fdiffs treediffs findids
1870 1871 global fdiffsneeded fdiffpos
1871 1872
1872 1873 if {[info exists fdiffids]} {
1873 1874 while {[lindex $fdiffsneeded $fdiffpos] ne $fdiffids
1874 1875 && $fdiffpos < [llength $fdiffsneeded]} {
1875 1876 # git-diff-tree doesn't output anything for a commit
1876 1877 # which doesn't change anything
1877 1878 set nullids [lindex $fdiffsneeded $fdiffpos]
1878 1879 set treediffs($nullids) {}
1879 1880 if {[info exists findids] && $nullids eq $findids} {
1880 1881 unset findids
1881 1882 findcont $nullids
1882 1883 }
1883 1884 incr fdiffpos
1884 1885 }
1885 1886 incr fdiffpos
1886 1887
1887 1888 if {![info exists treediffs($fdiffids)]} {
1888 1889 set treediffs($fdiffids) $fdiffs
1889 1890 }
1890 1891 if {[info exists findids] && $fdiffids eq $findids} {
1891 1892 unset findids
1892 1893 findcont $fdiffids
1893 1894 }
1894 1895 }
1895 1896 }
1896 1897
1897 1898 proc findcont {ids} {
1898 1899 global findids treediffs parents nparents
1899 1900 global ffileline findstartline finddidsel
1900 1901 global lineid numcommits matchinglines findinprogress
1901 1902 global findmergefiles
1902 1903
1903 1904 set id [lindex $ids 0]
1904 1905 set p [lindex $ids 1]
1905 1906 set pi [lsearch -exact $parents($id) $p]
1906 1907 set l $ffileline
1907 1908 while 1 {
1908 1909 if {$findmergefiles || $nparents($id) == 1} {
1909 1910 if {![info exists treediffs($ids)]} {
1910 1911 set findids $ids
1911 1912 set ffileline $l
1912 1913 return
1913 1914 }
1914 1915 set doesmatch 0
1915 1916 foreach f $treediffs($ids) {
1916 1917 set x [findmatches $f]
1917 1918 if {$x != {}} {
1918 1919 set doesmatch 1
1919 1920 break
1920 1921 }
1921 1922 }
1922 1923 if {$doesmatch} {
1923 1924 insertmatch $l $id
1924 1925 set pi $nparents($id)
1925 1926 }
1926 1927 } else {
1927 1928 set pi $nparents($id)
1928 1929 }
1929 1930 if {[incr pi] >= $nparents($id)} {
1930 1931 set pi 0
1931 1932 if {[incr l] >= $numcommits} {
1932 1933 set l 0
1933 1934 }
1934 1935 if {$l == $findstartline} break
1935 1936 set id $lineid($l)
1936 1937 }
1937 1938 set p [lindex $parents($id) $pi]
1938 1939 set ids [list $id $p]
1939 1940 }
1940 1941 stopfindproc
1941 1942 if {!$finddidsel} {
1942 1943 bell
1943 1944 }
1944 1945 }
1945 1946
1946 1947 # mark a commit as matching by putting a yellow background
1947 1948 # behind the headline
1948 1949 proc markheadline {l id} {
1949 1950 global canv mainfont linehtag commitinfo
1950 1951
1951 1952 set bbox [$canv bbox $linehtag($l)]
1952 1953 set t [$canv create rect $bbox -outline {} -tags matches -fill yellow]
1953 1954 $canv lower $t
1954 1955 }
1955 1956
1956 1957 # mark the bits of a headline, author or date that match a find string
1957 1958 proc markmatches {canv l str tag matches font} {
1958 1959 set bbox [$canv bbox $tag]
1959 1960 set x0 [lindex $bbox 0]
1960 1961 set y0 [lindex $bbox 1]
1961 1962 set y1 [lindex $bbox 3]
1962 1963 foreach match $matches {
1963 1964 set start [lindex $match 0]
1964 1965 set end [lindex $match 1]
1965 1966 if {$start > $end} continue
1966 1967 set xoff [font measure $font [string range $str 0 [expr $start-1]]]
1967 1968 set xlen [font measure $font [string range $str 0 [expr $end]]]
1968 1969 set t [$canv create rect [expr $x0+$xoff] $y0 [expr $x0+$xlen+2] $y1 \
1969 1970 -outline {} -tags matches -fill yellow]
1970 1971 $canv lower $t
1971 1972 }
1972 1973 }
1973 1974
1974 1975 proc unmarkmatches {} {
1975 1976 global matchinglines findids
1976 1977 allcanvs delete matches
1977 1978 catch {unset matchinglines}
1978 1979 catch {unset findids}
1979 1980 }
1980 1981
1981 1982 proc selcanvline {w x y} {
1982 1983 global canv canvy0 ctext linespc
1983 1984 global lineid linehtag linentag linedtag rowtextx
1984 1985 set ymax [lindex [$canv cget -scrollregion] 3]
1985 1986 if {$ymax == {}} return
1986 1987 set yfrac [lindex [$canv yview] 0]
1987 1988 set y [expr {$y + $yfrac * $ymax}]
1988 1989 set l [expr {int(($y - $canvy0) / $linespc + 0.5)}]
1989 1990 if {$l < 0} {
1990 1991 set l 0
1991 1992 }
1992 1993 if {$w eq $canv} {
1993 1994 if {![info exists rowtextx($l)] || $x < $rowtextx($l)} return
1994 1995 }
1995 1996 unmarkmatches
1996 1997 selectline $l 1
1997 1998 }
1998 1999
1999 2000 proc commit_descriptor {p} {
2000 2001 global commitinfo
2001 2002 set l "..."
2002 2003 if {[info exists commitinfo($p)]} {
2003 2004 set l [lindex $commitinfo($p) 0]
2004 2005 }
2005 2006 return "$p ($l)"
2006 2007 }
2007 2008
2008 2009 # append some text to the ctext widget, and make any SHA1 ID
2009 2010 # that we know about be a clickable link.
2010 2011 proc appendwithlinks {text} {
2011 2012 global ctext idline linknum
2012 2013
2013 2014 set start [$ctext index "end - 1c"]
2014 2015 $ctext insert end $text
2015 2016 $ctext insert end "\n"
2016 2017 set links [regexp -indices -all -inline {[0-9a-f]{40}} $text]
2017 2018 foreach l $links {
2018 2019 set s [lindex $l 0]
2019 2020 set e [lindex $l 1]
2020 2021 set linkid [string range $text $s $e]
2021 2022 if {![info exists idline($linkid)]} continue
2022 2023 incr e
2023 2024 $ctext tag add link "$start + $s c" "$start + $e c"
2024 2025 $ctext tag add link$linknum "$start + $s c" "$start + $e c"
2025 2026 $ctext tag bind link$linknum <1> [list selectline $idline($linkid) 1]
2026 2027 incr linknum
2027 2028 }
2028 2029 $ctext tag conf link -foreground blue -underline 1
2029 2030 $ctext tag bind link <Enter> { %W configure -cursor hand2 }
2030 2031 $ctext tag bind link <Leave> { %W configure -cursor $curtextcursor }
2031 2032 }
2032 2033
2033 2034 proc selectline {l isnew} {
2034 2035 global canv canv2 canv3 ctext commitinfo selectedline
2035 2036 global lineid linehtag linentag linedtag
2036 2037 global canvy0 linespc parents nparents children
2037 2038 global cflist currentid sha1entry
2038 2039 global commentend idtags idline linknum
2039 2040
2040 2041 $canv delete hover
2041 2042 normalline
2042 2043 if {![info exists lineid($l)] || ![info exists linehtag($l)]} return
2043 2044 $canv delete secsel
2044 2045 set t [eval $canv create rect [$canv bbox $linehtag($l)] -outline {{}} \
2045 2046 -tags secsel -fill [$canv cget -selectbackground]]
2046 2047 $canv lower $t
2047 2048 $canv2 delete secsel
2048 2049 set t [eval $canv2 create rect [$canv2 bbox $linentag($l)] -outline {{}} \
2049 2050 -tags secsel -fill [$canv2 cget -selectbackground]]
2050 2051 $canv2 lower $t
2051 2052 $canv3 delete secsel
2052 2053 set t [eval $canv3 create rect [$canv3 bbox $linedtag($l)] -outline {{}} \
2053 2054 -tags secsel -fill [$canv3 cget -selectbackground]]
2054 2055 $canv3 lower $t
2055 2056 set y [expr {$canvy0 + $l * $linespc}]
2056 2057 set ymax [lindex [$canv cget -scrollregion] 3]
2057 2058 set ytop [expr {$y - $linespc - 1}]
2058 2059 set ybot [expr {$y + $linespc + 1}]
2059 2060 set wnow [$canv yview]
2060 2061 set wtop [expr [lindex $wnow 0] * $ymax]
2061 2062 set wbot [expr [lindex $wnow 1] * $ymax]
2062 2063 set wh [expr {$wbot - $wtop}]
2063 2064 set newtop $wtop
2064 2065 if {$ytop < $wtop} {
2065 2066 if {$ybot < $wtop} {
2066 2067 set newtop [expr {$y - $wh / 2.0}]
2067 2068 } else {
2068 2069 set newtop $ytop
2069 2070 if {$newtop > $wtop - $linespc} {
2070 2071 set newtop [expr {$wtop - $linespc}]
2071 2072 }
2072 2073 }
2073 2074 } elseif {$ybot > $wbot} {
2074 2075 if {$ytop > $wbot} {
2075 2076 set newtop [expr {$y - $wh / 2.0}]
2076 2077 } else {
2077 2078 set newtop [expr {$ybot - $wh}]
2078 2079 if {$newtop < $wtop + $linespc} {
2079 2080 set newtop [expr {$wtop + $linespc}]
2080 2081 }
2081 2082 }
2082 2083 }
2083 2084 if {$newtop != $wtop} {
2084 2085 if {$newtop < 0} {
2085 2086 set newtop 0
2086 2087 }
2087 2088 allcanvs yview moveto [expr $newtop * 1.0 / $ymax]
2088 2089 }
2089 2090
2090 2091 if {$isnew} {
2091 2092 addtohistory [list selectline $l 0]
2092 2093 }
2093 2094
2094 2095 set selectedline $l
2095 2096
2096 2097 set id $lineid($l)
2097 2098 set currentid $id
2098 2099 $sha1entry delete 0 end
2099 2100 $sha1entry insert 0 $id
2100 2101 $sha1entry selection from 0
2101 2102 $sha1entry selection to end
2102 2103
2103 2104 $ctext conf -state normal
2104 2105 $ctext delete 0.0 end
2105 2106 set linknum 0
2106 2107 $ctext mark set fmark.0 0.0
2107 2108 $ctext mark gravity fmark.0 left
2108 2109 set info $commitinfo($id)
2109 2110 $ctext insert end "Author: [lindex $info 1] [lindex $info 2]\n"
2110 2111 $ctext insert end "Committer: [lindex $info 3] [lindex $info 4]\n"
2111 2112 if {[info exists idtags($id)]} {
2112 2113 $ctext insert end "Tags:"
2113 2114 foreach tag $idtags($id) {
2114 2115 $ctext insert end " $tag"
2115 2116 }
2116 2117 $ctext insert end "\n"
2117 2118 }
2118 2119
2119 2120 set comment {}
2120 2121 if {[info exists parents($id)]} {
2121 2122 foreach p $parents($id) {
2122 2123 append comment "Parent: [commit_descriptor $p]\n"
2123 2124 }
2124 2125 }
2125 2126 if {[info exists children($id)]} {
2126 2127 foreach c $children($id) {
2127 2128 append comment "Child: [commit_descriptor $c]\n"
2128 2129 }
2129 2130 }
2130 2131 append comment "\n"
2131 2132 append comment [lindex $info 5]
2132 2133
2133 2134 # make anything that looks like a SHA1 ID be a clickable link
2134 2135 appendwithlinks $comment
2135 2136
2136 2137 $ctext tag delete Comments
2137 2138 $ctext tag remove found 1.0 end
2138 2139 $ctext conf -state disabled
2139 2140 set commentend [$ctext index "end - 1c"]
2140 2141
2141 2142 $cflist delete 0 end
2142 2143 $cflist insert end "Comments"
2143 2144 if {$nparents($id) == 1} {
2144 2145 startdiff [concat $id $parents($id)]
2145 2146 } elseif {$nparents($id) > 1} {
2146 2147 mergediff $id
2147 2148 }
2148 2149 }
2149 2150
2150 2151 proc selnextline {dir} {
2151 2152 global selectedline
2152 2153 if {![info exists selectedline]} return
2153 2154 set l [expr $selectedline + $dir]
2154 2155 unmarkmatches
2155 2156 selectline $l 1
2156 2157 }
2157 2158
2158 2159 proc unselectline {} {
2159 2160 global selectedline
2160 2161
2161 2162 catch {unset selectedline}
2162 2163 allcanvs delete secsel
2163 2164 }
2164 2165
2165 2166 proc addtohistory {cmd} {
2166 2167 global history historyindex
2167 2168
2168 2169 if {$historyindex > 0
2169 2170 && [lindex $history [expr {$historyindex - 1}]] == $cmd} {
2170 2171 return
2171 2172 }
2172 2173
2173 2174 if {$historyindex < [llength $history]} {
2174 2175 set history [lreplace $history $historyindex end $cmd]
2175 2176 } else {
2176 2177 lappend history $cmd
2177 2178 }
2178 2179 incr historyindex
2179 2180 if {$historyindex > 1} {
2180 2181 .ctop.top.bar.leftbut conf -state normal
2181 2182 } else {
2182 2183 .ctop.top.bar.leftbut conf -state disabled
2183 2184 }
2184 2185 .ctop.top.bar.rightbut conf -state disabled
2185 2186 }
2186 2187
2187 2188 proc goback {} {
2188 2189 global history historyindex
2189 2190
2190 2191 if {$historyindex > 1} {
2191 2192 incr historyindex -1
2192 2193 set cmd [lindex $history [expr {$historyindex - 1}]]
2193 2194 eval $cmd
2194 2195 .ctop.top.bar.rightbut conf -state normal
2195 2196 }
2196 2197 if {$historyindex <= 1} {
2197 2198 .ctop.top.bar.leftbut conf -state disabled
2198 2199 }
2199 2200 }
2200 2201
2201 2202 proc goforw {} {
2202 2203 global history historyindex
2203 2204
2204 2205 if {$historyindex < [llength $history]} {
2205 2206 set cmd [lindex $history $historyindex]
2206 2207 incr historyindex
2207 2208 eval $cmd
2208 2209 .ctop.top.bar.leftbut conf -state normal
2209 2210 }
2210 2211 if {$historyindex >= [llength $history]} {
2211 2212 .ctop.top.bar.rightbut conf -state disabled
2212 2213 }
2213 2214 }
2214 2215
2215 2216 proc mergediff {id} {
2216 2217 global parents diffmergeid diffmergegca mergefilelist diffpindex
2217 2218
2218 2219 set diffmergeid $id
2219 2220 set diffpindex -1
2220 2221 set diffmergegca [findgca $parents($id)]
2221 2222 if {[info exists mergefilelist($id)]} {
2222 2223 if {$mergefilelist($id) ne {}} {
2223 2224 showmergediff
2224 2225 }
2225 2226 } else {
2226 2227 contmergediff {}
2227 2228 }
2228 2229 }
2229 2230
2230 2231 proc findgca {ids} {
2231 2232 set gca {}
2232 2233 foreach id $ids {
2233 2234 if {$gca eq {}} {
2234 2235 set gca $id
2235 2236 } else {
2236 2237 if {[catch {
2237 2238 set gca [exec hg debug-merge-base $gca $id]
2238 2239 } err]} {
2239 2240 return {}
2240 2241 }
2241 2242 }
2242 2243 }
2243 2244 return $gca
2244 2245 }
2245 2246
2246 2247 proc contmergediff {ids} {
2247 2248 global diffmergeid diffpindex parents nparents diffmergegca
2248 2249 global treediffs mergefilelist diffids treepending
2249 2250
2250 2251 # diff the child against each of the parents, and diff
2251 2252 # each of the parents against the GCA.
2252 2253 while 1 {
2253 2254 if {[lindex $ids 0] == $diffmergeid && $diffmergegca ne {}} {
2254 2255 set ids [list [lindex $ids 1] $diffmergegca]
2255 2256 } else {
2256 2257 if {[incr diffpindex] >= $nparents($diffmergeid)} break
2257 2258 set p [lindex $parents($diffmergeid) $diffpindex]
2258 2259 set ids [list $diffmergeid $p]
2259 2260 }
2260 2261 if {![info exists treediffs($ids)]} {
2261 2262 set diffids $ids
2262 2263 if {![info exists treepending]} {
2263 2264 gettreediffs $ids
2264 2265 }
2265 2266 return
2266 2267 }
2267 2268 }
2268 2269
2269 2270 # If a file in some parent is different from the child and also
2270 2271 # different from the GCA, then it's interesting.
2271 2272 # If we don't have a GCA, then a file is interesting if it is
2272 2273 # different from the child in all the parents.
2273 2274 if {$diffmergegca ne {}} {
2274 2275 set files {}
2275 2276 foreach p $parents($diffmergeid) {
2276 2277 set gcadiffs $treediffs([list $p $diffmergegca])
2277 2278 foreach f $treediffs([list $diffmergeid $p]) {
2278 2279 if {[lsearch -exact $files $f] < 0
2279 2280 && [lsearch -exact $gcadiffs $f] >= 0} {
2280 2281 lappend files $f
2281 2282 }
2282 2283 }
2283 2284 }
2284 2285 set files [lsort $files]
2285 2286 } else {
2286 2287 set p [lindex $parents($diffmergeid) 0]
2287 2288 set files $treediffs([list $diffmergeid $p])
2288 2289 for {set i 1} {$i < $nparents($diffmergeid) && $files ne {}} {incr i} {
2289 2290 set p [lindex $parents($diffmergeid) $i]
2290 2291 set df $treediffs([list $diffmergeid $p])
2291 2292 set nf {}
2292 2293 foreach f $files {
2293 2294 if {[lsearch -exact $df $f] >= 0} {
2294 2295 lappend nf $f
2295 2296 }
2296 2297 }
2297 2298 set files $nf
2298 2299 }
2299 2300 }
2300 2301
2301 2302 set mergefilelist($diffmergeid) $files
2302 2303 if {$files ne {}} {
2303 2304 showmergediff
2304 2305 }
2305 2306 }
2306 2307
2307 2308 proc showmergediff {} {
2308 2309 global cflist diffmergeid mergefilelist parents
2309 2310 global diffopts diffinhunk currentfile currenthunk filelines
2310 2311 global diffblocked groupfilelast mergefds groupfilenum grouphunks
2311 2312
2312 2313 set files $mergefilelist($diffmergeid)
2313 2314 foreach f $files {
2314 2315 $cflist insert end $f
2315 2316 }
2316 2317 set env(GIT_DIFF_OPTS) $diffopts
2317 2318 set flist {}
2318 2319 catch {unset currentfile}
2319 2320 catch {unset currenthunk}
2320 2321 catch {unset filelines}
2321 2322 catch {unset groupfilenum}
2322 2323 catch {unset grouphunks}
2323 2324 set groupfilelast -1
2324 2325 foreach p $parents($diffmergeid) {
2325 2326 set cmd [list | hg debug-diff-tree -p $p $diffmergeid]
2326 2327 set cmd [concat $cmd $mergefilelist($diffmergeid)]
2327 2328 if {[catch {set f [open $cmd r]} err]} {
2328 2329 error_popup "Error getting diffs: $err"
2329 2330 foreach f $flist {
2330 2331 catch {close $f}
2331 2332 }
2332 2333 return
2333 2334 }
2334 2335 lappend flist $f
2335 2336 set ids [list $diffmergeid $p]
2336 2337 set mergefds($ids) $f
2337 2338 set diffinhunk($ids) 0
2338 2339 set diffblocked($ids) 0
2339 2340 fconfigure $f -blocking 0
2340 2341 fileevent $f readable [list getmergediffline $f $ids $diffmergeid]
2341 2342 }
2342 2343 }
2343 2344
2344 2345 proc getmergediffline {f ids id} {
2345 2346 global diffmergeid diffinhunk diffoldlines diffnewlines
2346 2347 global currentfile currenthunk
2347 2348 global diffoldstart diffnewstart diffoldlno diffnewlno
2348 2349 global diffblocked mergefilelist
2349 2350 global noldlines nnewlines difflcounts filelines
2350 2351
2351 2352 set n [gets $f line]
2352 2353 if {$n < 0} {
2353 2354 if {![eof $f]} return
2354 2355 }
2355 2356
2356 2357 if {!([info exists diffmergeid] && $diffmergeid == $id)} {
2357 2358 if {$n < 0} {
2358 2359 close $f
2359 2360 }
2360 2361 return
2361 2362 }
2362 2363
2363 2364 if {$diffinhunk($ids) != 0} {
2364 2365 set fi $currentfile($ids)
2365 2366 if {$n > 0 && [regexp {^[-+ \\]} $line match]} {
2366 2367 # continuing an existing hunk
2367 2368 set line [string range $line 1 end]
2368 2369 set p [lindex $ids 1]
2369 2370 if {$match eq "-" || $match eq " "} {
2370 2371 set filelines($p,$fi,$diffoldlno($ids)) $line
2371 2372 incr diffoldlno($ids)
2372 2373 }
2373 2374 if {$match eq "+" || $match eq " "} {
2374 2375 set filelines($id,$fi,$diffnewlno($ids)) $line
2375 2376 incr diffnewlno($ids)
2376 2377 }
2377 2378 if {$match eq " "} {
2378 2379 if {$diffinhunk($ids) == 2} {
2379 2380 lappend difflcounts($ids) \
2380 2381 [list $noldlines($ids) $nnewlines($ids)]
2381 2382 set noldlines($ids) 0
2382 2383 set diffinhunk($ids) 1
2383 2384 }
2384 2385 incr noldlines($ids)
2385 2386 } elseif {$match eq "-" || $match eq "+"} {
2386 2387 if {$diffinhunk($ids) == 1} {
2387 2388 lappend difflcounts($ids) [list $noldlines($ids)]
2388 2389 set noldlines($ids) 0
2389 2390 set nnewlines($ids) 0
2390 2391 set diffinhunk($ids) 2
2391 2392 }
2392 2393 if {$match eq "-"} {
2393 2394 incr noldlines($ids)
2394 2395 } else {
2395 2396 incr nnewlines($ids)
2396 2397 }
2397 2398 }
2398 2399 # and if it's \ No newline at end of line, then what?
2399 2400 return
2400 2401 }
2401 2402 # end of a hunk
2402 2403 if {$diffinhunk($ids) == 1 && $noldlines($ids) != 0} {
2403 2404 lappend difflcounts($ids) [list $noldlines($ids)]
2404 2405 } elseif {$diffinhunk($ids) == 2
2405 2406 && ($noldlines($ids) != 0 || $nnewlines($ids) != 0)} {
2406 2407 lappend difflcounts($ids) [list $noldlines($ids) $nnewlines($ids)]
2407 2408 }
2408 2409 set currenthunk($ids) [list $currentfile($ids) \
2409 2410 $diffoldstart($ids) $diffnewstart($ids) \
2410 2411 $diffoldlno($ids) $diffnewlno($ids) \
2411 2412 $difflcounts($ids)]
2412 2413 set diffinhunk($ids) 0
2413 2414 # -1 = need to block, 0 = unblocked, 1 = is blocked
2414 2415 set diffblocked($ids) -1
2415 2416 processhunks
2416 2417 if {$diffblocked($ids) == -1} {
2417 2418 fileevent $f readable {}
2418 2419 set diffblocked($ids) 1
2419 2420 }
2420 2421 }
2421 2422
2422 2423 if {$n < 0} {
2423 2424 # eof
2424 2425 if {!$diffblocked($ids)} {
2425 2426 close $f
2426 2427 set currentfile($ids) [llength $mergefilelist($diffmergeid)]
2427 2428 set currenthunk($ids) [list $currentfile($ids) 0 0 0 0 {}]
2428 2429 processhunks
2429 2430 }
2430 2431 } elseif {[regexp {^diff --git a/(.*) b/} $line match fname]} {
2431 2432 # start of a new file
2432 2433 set currentfile($ids) \
2433 2434 [lsearch -exact $mergefilelist($diffmergeid) $fname]
2434 2435 } elseif {[regexp {^@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@(.*)} \
2435 2436 $line match f1l f1c f2l f2c rest]} {
2436 2437 if {[info exists currentfile($ids)] && $currentfile($ids) >= 0} {
2437 2438 # start of a new hunk
2438 2439 if {$f1l == 0 && $f1c == 0} {
2439 2440 set f1l 1
2440 2441 }
2441 2442 if {$f2l == 0 && $f2c == 0} {
2442 2443 set f2l 1
2443 2444 }
2444 2445 set diffinhunk($ids) 1
2445 2446 set diffoldstart($ids) $f1l
2446 2447 set diffnewstart($ids) $f2l
2447 2448 set diffoldlno($ids) $f1l
2448 2449 set diffnewlno($ids) $f2l
2449 2450 set difflcounts($ids) {}
2450 2451 set noldlines($ids) 0
2451 2452 set nnewlines($ids) 0
2452 2453 }
2453 2454 }
2454 2455 }
2455 2456
2456 2457 proc processhunks {} {
2457 2458 global diffmergeid parents nparents currenthunk
2458 2459 global mergefilelist diffblocked mergefds
2459 2460 global grouphunks grouplinestart grouplineend groupfilenum
2460 2461
2461 2462 set nfiles [llength $mergefilelist($diffmergeid)]
2462 2463 while 1 {
2463 2464 set fi $nfiles
2464 2465 set lno 0
2465 2466 # look for the earliest hunk
2466 2467 foreach p $parents($diffmergeid) {
2467 2468 set ids [list $diffmergeid $p]
2468 2469 if {![info exists currenthunk($ids)]} return
2469 2470 set i [lindex $currenthunk($ids) 0]
2470 2471 set l [lindex $currenthunk($ids) 2]
2471 2472 if {$i < $fi || ($i == $fi && $l < $lno)} {
2472 2473 set fi $i
2473 2474 set lno $l
2474 2475 set pi $p
2475 2476 }
2476 2477 }
2477 2478
2478 2479 if {$fi < $nfiles} {
2479 2480 set ids [list $diffmergeid $pi]
2480 2481 set hunk $currenthunk($ids)
2481 2482 unset currenthunk($ids)
2482 2483 if {$diffblocked($ids) > 0} {
2483 2484 fileevent $mergefds($ids) readable \
2484 2485 [list getmergediffline $mergefds($ids) $ids $diffmergeid]
2485 2486 }
2486 2487 set diffblocked($ids) 0
2487 2488
2488 2489 if {[info exists groupfilenum] && $groupfilenum == $fi
2489 2490 && $lno <= $grouplineend} {
2490 2491 # add this hunk to the pending group
2491 2492 lappend grouphunks($pi) $hunk
2492 2493 set endln [lindex $hunk 4]
2493 2494 if {$endln > $grouplineend} {
2494 2495 set grouplineend $endln
2495 2496 }
2496 2497 continue
2497 2498 }
2498 2499 }
2499 2500
2500 2501 # succeeding stuff doesn't belong in this group, so
2501 2502 # process the group now
2502 2503 if {[info exists groupfilenum]} {
2503 2504 processgroup
2504 2505 unset groupfilenum
2505 2506 unset grouphunks
2506 2507 }
2507 2508
2508 2509 if {$fi >= $nfiles} break
2509 2510
2510 2511 # start a new group
2511 2512 set groupfilenum $fi
2512 2513 set grouphunks($pi) [list $hunk]
2513 2514 set grouplinestart $lno
2514 2515 set grouplineend [lindex $hunk 4]
2515 2516 }
2516 2517 }
2517 2518
2518 2519 proc processgroup {} {
2519 2520 global groupfilelast groupfilenum difffilestart
2520 2521 global mergefilelist diffmergeid ctext filelines
2521 2522 global parents diffmergeid diffoffset
2522 2523 global grouphunks grouplinestart grouplineend nparents
2523 2524 global mergemax
2524 2525
2525 2526 $ctext conf -state normal
2526 2527 set id $diffmergeid
2527 2528 set f $groupfilenum
2528 2529 if {$groupfilelast != $f} {
2529 2530 $ctext insert end "\n"
2530 2531 set here [$ctext index "end - 1c"]
2531 2532 set difffilestart($f) $here
2532 2533 set mark fmark.[expr {$f + 1}]
2533 2534 $ctext mark set $mark $here
2534 2535 $ctext mark gravity $mark left
2535 2536 set header [lindex $mergefilelist($id) $f]
2536 2537 set l [expr {(78 - [string length $header]) / 2}]
2537 2538 set pad [string range "----------------------------------------" 1 $l]
2538 2539 $ctext insert end "$pad $header $pad\n" filesep
2539 2540 set groupfilelast $f
2540 2541 foreach p $parents($id) {
2541 2542 set diffoffset($p) 0
2542 2543 }
2543 2544 }
2544 2545
2545 2546 $ctext insert end "@@" msep
2546 2547 set nlines [expr {$grouplineend - $grouplinestart}]
2547 2548 set events {}
2548 2549 set pnum 0
2549 2550 foreach p $parents($id) {
2550 2551 set startline [expr {$grouplinestart + $diffoffset($p)}]
2551 2552 set ol $startline
2552 2553 set nl $grouplinestart
2553 2554 if {[info exists grouphunks($p)]} {
2554 2555 foreach h $grouphunks($p) {
2555 2556 set l [lindex $h 2]
2556 2557 if {$nl < $l} {
2557 2558 for {} {$nl < $l} {incr nl} {
2558 2559 set filelines($p,$f,$ol) $filelines($id,$f,$nl)
2559 2560 incr ol
2560 2561 }
2561 2562 }
2562 2563 foreach chunk [lindex $h 5] {
2563 2564 if {[llength $chunk] == 2} {
2564 2565 set olc [lindex $chunk 0]
2565 2566 set nlc [lindex $chunk 1]
2566 2567 set nnl [expr {$nl + $nlc}]
2567 2568 lappend events [list $nl $nnl $pnum $olc $nlc]
2568 2569 incr ol $olc
2569 2570 set nl $nnl
2570 2571 } else {
2571 2572 incr ol [lindex $chunk 0]
2572 2573 incr nl [lindex $chunk 0]
2573 2574 }
2574 2575 }
2575 2576 }
2576 2577 }
2577 2578 if {$nl < $grouplineend} {
2578 2579 for {} {$nl < $grouplineend} {incr nl} {
2579 2580 set filelines($p,$f,$ol) $filelines($id,$f,$nl)
2580 2581 incr ol
2581 2582 }
2582 2583 }
2583 2584 set nlines [expr {$ol - $startline}]
2584 2585 $ctext insert end " -$startline,$nlines" msep
2585 2586 incr pnum
2586 2587 }
2587 2588
2588 2589 set nlines [expr {$grouplineend - $grouplinestart}]
2589 2590 $ctext insert end " +$grouplinestart,$nlines @@\n" msep
2590 2591
2591 2592 set events [lsort -integer -index 0 $events]
2592 2593 set nevents [llength $events]
2593 2594 set nmerge $nparents($diffmergeid)
2594 2595 set l $grouplinestart
2595 2596 for {set i 0} {$i < $nevents} {set i $j} {
2596 2597 set nl [lindex $events $i 0]
2597 2598 while {$l < $nl} {
2598 2599 $ctext insert end " $filelines($id,$f,$l)\n"
2599 2600 incr l
2600 2601 }
2601 2602 set e [lindex $events $i]
2602 2603 set enl [lindex $e 1]
2603 2604 set j $i
2604 2605 set active {}
2605 2606 while 1 {
2606 2607 set pnum [lindex $e 2]
2607 2608 set olc [lindex $e 3]
2608 2609 set nlc [lindex $e 4]
2609 2610 if {![info exists delta($pnum)]} {
2610 2611 set delta($pnum) [expr {$olc - $nlc}]
2611 2612 lappend active $pnum
2612 2613 } else {
2613 2614 incr delta($pnum) [expr {$olc - $nlc}]
2614 2615 }
2615 2616 if {[incr j] >= $nevents} break
2616 2617 set e [lindex $events $j]
2617 2618 if {[lindex $e 0] >= $enl} break
2618 2619 if {[lindex $e 1] > $enl} {
2619 2620 set enl [lindex $e 1]
2620 2621 }
2621 2622 }
2622 2623 set nlc [expr {$enl - $l}]
2623 2624 set ncol mresult
2624 2625 set bestpn -1
2625 2626 if {[llength $active] == $nmerge - 1} {
2626 2627 # no diff for one of the parents, i.e. it's identical
2627 2628 for {set pnum 0} {$pnum < $nmerge} {incr pnum} {
2628 2629 if {![info exists delta($pnum)]} {
2629 2630 if {$pnum < $mergemax} {
2630 2631 lappend ncol m$pnum
2631 2632 } else {
2632 2633 lappend ncol mmax
2633 2634 }
2634 2635 break
2635 2636 }
2636 2637 }
2637 2638 } elseif {[llength $active] == $nmerge} {
2638 2639 # all parents are different, see if one is very similar
2639 2640 set bestsim 30
2640 2641 for {set pnum 0} {$pnum < $nmerge} {incr pnum} {
2641 2642 set sim [similarity $pnum $l $nlc $f \
2642 2643 [lrange $events $i [expr {$j-1}]]]
2643 2644 if {$sim > $bestsim} {
2644 2645 set bestsim $sim
2645 2646 set bestpn $pnum
2646 2647 }
2647 2648 }
2648 2649 if {$bestpn >= 0} {
2649 2650 lappend ncol m$bestpn
2650 2651 }
2651 2652 }
2652 2653 set pnum -1
2653 2654 foreach p $parents($id) {
2654 2655 incr pnum
2655 2656 if {![info exists delta($pnum)] || $pnum == $bestpn} continue
2656 2657 set olc [expr {$nlc + $delta($pnum)}]
2657 2658 set ol [expr {$l + $diffoffset($p)}]
2658 2659 incr diffoffset($p) $delta($pnum)
2659 2660 unset delta($pnum)
2660 2661 for {} {$olc > 0} {incr olc -1} {
2661 2662 $ctext insert end "-$filelines($p,$f,$ol)\n" m$pnum
2662 2663 incr ol
2663 2664 }
2664 2665 }
2665 2666 set endl [expr {$l + $nlc}]
2666 2667 if {$bestpn >= 0} {
2667 2668 # show this pretty much as a normal diff
2668 2669 set p [lindex $parents($id) $bestpn]
2669 2670 set ol [expr {$l + $diffoffset($p)}]
2670 2671 incr diffoffset($p) $delta($bestpn)
2671 2672 unset delta($bestpn)
2672 2673 for {set k $i} {$k < $j} {incr k} {
2673 2674 set e [lindex $events $k]
2674 2675 if {[lindex $e 2] != $bestpn} continue
2675 2676 set nl [lindex $e 0]
2676 2677 set ol [expr {$ol + $nl - $l}]
2677 2678 for {} {$l < $nl} {incr l} {
2678 2679 $ctext insert end "+$filelines($id,$f,$l)\n" $ncol
2679 2680 }
2680 2681 set c [lindex $e 3]
2681 2682 for {} {$c > 0} {incr c -1} {
2682 2683 $ctext insert end "-$filelines($p,$f,$ol)\n" m$bestpn
2683 2684 incr ol
2684 2685 }
2685 2686 set nl [lindex $e 1]
2686 2687 for {} {$l < $nl} {incr l} {
2687 2688 $ctext insert end "+$filelines($id,$f,$l)\n" mresult
2688 2689 }
2689 2690 }
2690 2691 }
2691 2692 for {} {$l < $endl} {incr l} {
2692 2693 $ctext insert end "+$filelines($id,$f,$l)\n" $ncol
2693 2694 }
2694 2695 }
2695 2696 while {$l < $grouplineend} {
2696 2697 $ctext insert end " $filelines($id,$f,$l)\n"
2697 2698 incr l
2698 2699 }
2699 2700 $ctext conf -state disabled
2700 2701 }
2701 2702
2702 2703 proc similarity {pnum l nlc f events} {
2703 2704 global diffmergeid parents diffoffset filelines
2704 2705
2705 2706 set id $diffmergeid
2706 2707 set p [lindex $parents($id) $pnum]
2707 2708 set ol [expr {$l + $diffoffset($p)}]
2708 2709 set endl [expr {$l + $nlc}]
2709 2710 set same 0
2710 2711 set diff 0
2711 2712 foreach e $events {
2712 2713 if {[lindex $e 2] != $pnum} continue
2713 2714 set nl [lindex $e 0]
2714 2715 set ol [expr {$ol + $nl - $l}]
2715 2716 for {} {$l < $nl} {incr l} {
2716 2717 incr same [string length $filelines($id,$f,$l)]
2717 2718 incr same
2718 2719 }
2719 2720 set oc [lindex $e 3]
2720 2721 for {} {$oc > 0} {incr oc -1} {
2721 2722 incr diff [string length $filelines($p,$f,$ol)]
2722 2723 incr diff
2723 2724 incr ol
2724 2725 }
2725 2726 set nl [lindex $e 1]
2726 2727 for {} {$l < $nl} {incr l} {
2727 2728 incr diff [string length $filelines($id,$f,$l)]
2728 2729 incr diff
2729 2730 }
2730 2731 }
2731 2732 for {} {$l < $endl} {incr l} {
2732 2733 incr same [string length $filelines($id,$f,$l)]
2733 2734 incr same
2734 2735 }
2735 2736 if {$same == 0} {
2736 2737 return 0
2737 2738 }
2738 2739 return [expr {200 * $same / (2 * $same + $diff)}]
2739 2740 }
2740 2741
2741 2742 proc startdiff {ids} {
2742 2743 global treediffs diffids treepending diffmergeid
2743 2744
2744 2745 set diffids $ids
2745 2746 catch {unset diffmergeid}
2746 2747 if {![info exists treediffs($ids)]} {
2747 2748 if {![info exists treepending]} {
2748 2749 gettreediffs $ids
2749 2750 }
2750 2751 } else {
2751 2752 addtocflist $ids
2752 2753 }
2753 2754 }
2754 2755
2755 2756 proc addtocflist {ids} {
2756 2757 global treediffs cflist
2757 2758 foreach f $treediffs($ids) {
2758 2759 $cflist insert end $f
2759 2760 }
2760 2761 getblobdiffs $ids
2761 2762 }
2762 2763
2763 2764 proc gettreediffs {ids} {
2764 2765 global treediff parents treepending
2765 2766 set treepending $ids
2766 2767 set treediff {}
2767 2768 set id [lindex $ids 0]
2768 2769 set p [lindex $ids 1]
2769 2770 if [catch {set gdtf [open "|hg debug-diff-tree -r $p $id" r]}] return
2770 2771 fconfigure $gdtf -blocking 0
2771 2772 fileevent $gdtf readable [list gettreediffline $gdtf $ids]
2772 2773 }
2773 2774
2774 2775 proc gettreediffline {gdtf ids} {
2775 2776 global treediff treediffs treepending diffids diffmergeid
2776 2777
2777 2778 set n [gets $gdtf line]
2778 2779 if {$n < 0} {
2779 2780 if {![eof $gdtf]} return
2780 2781 close $gdtf
2781 2782 set treediffs($ids) $treediff
2782 2783 unset treepending
2783 2784 if {$ids != $diffids} {
2784 2785 gettreediffs $diffids
2785 2786 } else {
2786 2787 if {[info exists diffmergeid]} {
2787 2788 contmergediff $ids
2788 2789 } else {
2789 2790 addtocflist $ids
2790 2791 }
2791 2792 }
2792 2793 return
2793 2794 }
2794 2795 set file [lindex $line 5]
2795 2796 lappend treediff $file
2796 2797 }
2797 2798
2798 2799 proc getblobdiffs {ids} {
2799 2800 global diffopts blobdifffd diffids env curdifftag curtagstart
2800 2801 global difffilestart nextupdate diffinhdr treediffs
2801 2802
2802 2803 set id [lindex $ids 0]
2803 2804 set p [lindex $ids 1]
2804 2805 set env(GIT_DIFF_OPTS) $diffopts
2805 2806 set cmd [list | hg debug-diff-tree -r -p -C $p $id]
2806 2807 if {[catch {set bdf [open $cmd r]} err]} {
2807 2808 puts "error getting diffs: $err"
2808 2809 return
2809 2810 }
2810 2811 set diffinhdr 0
2811 2812 fconfigure $bdf -blocking 0
2812 2813 set blobdifffd($ids) $bdf
2813 2814 set curdifftag Comments
2814 2815 set curtagstart 0.0
2815 2816 catch {unset difffilestart}
2816 2817 fileevent $bdf readable [list getblobdiffline $bdf $diffids]
2817 2818 set nextupdate [expr {[clock clicks -milliseconds] + 100}]
2818 2819 }
2819 2820
2820 2821 proc getblobdiffline {bdf ids} {
2821 2822 global diffids blobdifffd ctext curdifftag curtagstart
2822 2823 global diffnexthead diffnextnote difffilestart
2823 2824 global nextupdate diffinhdr treediffs
2824 2825 global gaudydiff
2825 2826
2826 2827 set n [gets $bdf line]
2827 2828 if {$n < 0} {
2828 2829 if {[eof $bdf]} {
2829 2830 close $bdf
2830 2831 if {$ids == $diffids && $bdf == $blobdifffd($ids)} {
2831 2832 $ctext tag add $curdifftag $curtagstart end
2832 2833 }
2833 2834 }
2834 2835 return
2835 2836 }
2836 2837 if {$ids != $diffids || $bdf != $blobdifffd($ids)} {
2837 2838 return
2838 2839 }
2839 2840 $ctext conf -state normal
2840 2841 if {[regexp {^diff --git a/(.*) b/(.*)} $line match fname newname]} {
2841 2842 # start of a new file
2842 2843 $ctext insert end "\n"
2843 2844 $ctext tag add $curdifftag $curtagstart end
2844 2845 set curtagstart [$ctext index "end - 1c"]
2845 2846 set header $newname
2846 2847 set here [$ctext index "end - 1c"]
2847 2848 set i [lsearch -exact $treediffs($diffids) $fname]
2848 2849 if {$i >= 0} {
2849 2850 set difffilestart($i) $here
2850 2851 incr i
2851 2852 $ctext mark set fmark.$i $here
2852 2853 $ctext mark gravity fmark.$i left
2853 2854 }
2854 2855 if {$newname != $fname} {
2855 2856 set i [lsearch -exact $treediffs($diffids) $newname]
2856 2857 if {$i >= 0} {
2857 2858 set difffilestart($i) $here
2858 2859 incr i
2859 2860 $ctext mark set fmark.$i $here
2860 2861 $ctext mark gravity fmark.$i left
2861 2862 }
2862 2863 }
2863 2864 set curdifftag "f:$fname"
2864 2865 $ctext tag delete $curdifftag
2865 2866 set l [expr {(78 - [string length $header]) / 2}]
2866 2867 set pad [string range "----------------------------------------" 1 $l]
2867 2868 $ctext insert end "$pad $header $pad\n" filesep
2868 2869 set diffinhdr 1
2869 2870 } elseif {[regexp {^(---|\+\+\+)} $line]} {
2870 2871 set diffinhdr 0
2871 2872 } elseif {[regexp {^@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@(.*)} \
2872 2873 $line match f1l f1c f2l f2c rest]} {
2873 2874 if {$gaudydiff} {
2874 2875 $ctext insert end "\t" hunksep
2875 2876 $ctext insert end " $f1l " d0 " $f2l " d1
2876 2877 $ctext insert end " $rest \n" hunksep
2877 2878 } else {
2878 2879 $ctext insert end "$line\n" hunksep
2879 2880 }
2880 2881 set diffinhdr 0
2881 2882 } else {
2882 2883 set x [string range $line 0 0]
2883 2884 if {$x == "-" || $x == "+"} {
2884 2885 set tag [expr {$x == "+"}]
2885 2886 if {$gaudydiff} {
2886 2887 set line [string range $line 1 end]
2887 2888 }
2888 2889 $ctext insert end "$line\n" d$tag
2889 2890 } elseif {$x == " "} {
2890 2891 if {$gaudydiff} {
2891 2892 set line [string range $line 1 end]
2892 2893 }
2893 2894 $ctext insert end "$line\n"
2894 2895 } elseif {$diffinhdr || $x == "\\"} {
2895 2896 # e.g. "\ No newline at end of file"
2896 2897 $ctext insert end "$line\n" filesep
2897 2898 } else {
2898 2899 # Something else we don't recognize
2899 2900 if {$curdifftag != "Comments"} {
2900 2901 $ctext insert end "\n"
2901 2902 $ctext tag add $curdifftag $curtagstart end
2902 2903 set curtagstart [$ctext index "end - 1c"]
2903 2904 set curdifftag Comments
2904 2905 }
2905 2906 $ctext insert end "$line\n" filesep
2906 2907 }
2907 2908 }
2908 2909 $ctext conf -state disabled
2909 2910 if {[clock clicks -milliseconds] >= $nextupdate} {
2910 2911 incr nextupdate 100
2911 2912 fileevent $bdf readable {}
2912 2913 update
2913 2914 fileevent $bdf readable "getblobdiffline $bdf {$ids}"
2914 2915 }
2915 2916 }
2916 2917
2917 2918 proc nextfile {} {
2918 2919 global difffilestart ctext
2919 2920 set here [$ctext index @0,0]
2920 2921 for {set i 0} {[info exists difffilestart($i)]} {incr i} {
2921 2922 if {[$ctext compare $difffilestart($i) > $here]} {
2922 2923 if {![info exists pos]
2923 2924 || [$ctext compare $difffilestart($i) < $pos]} {
2924 2925 set pos $difffilestart($i)
2925 2926 }
2926 2927 }
2927 2928 }
2928 2929 if {[info exists pos]} {
2929 2930 $ctext yview $pos
2930 2931 }
2931 2932 }
2932 2933
2933 2934 proc listboxsel {} {
2934 2935 global ctext cflist currentid
2935 2936 if {![info exists currentid]} return
2936 2937 set sel [lsort [$cflist curselection]]
2937 2938 if {$sel eq {}} return
2938 2939 set first [lindex $sel 0]
2939 2940 catch {$ctext yview fmark.$first}
2940 2941 }
2941 2942
2942 2943 proc setcoords {} {
2943 2944 global linespc charspc canvx0 canvy0 mainfont
2944 2945 global xspc1 xspc2 lthickness
2945 2946
2946 2947 set linespc [font metrics $mainfont -linespace]
2947 2948 set charspc [font measure $mainfont "m"]
2948 2949 set canvy0 [expr 3 + 0.5 * $linespc]
2949 2950 set canvx0 [expr 3 + 0.5 * $linespc]
2950 2951 set lthickness [expr {int($linespc / 9) + 1}]
2951 2952 set xspc1(0) $linespc
2952 2953 set xspc2 $linespc
2953 2954 }
2954 2955
2955 2956 proc redisplay {} {
2956 2957 global stopped redisplaying phase
2957 2958 if {$stopped > 1} return
2958 2959 if {$phase == "getcommits"} return
2959 2960 set redisplaying 1
2960 2961 if {$phase == "drawgraph" || $phase == "incrdraw"} {
2961 2962 set stopped 1
2962 2963 } else {
2963 2964 drawgraph
2964 2965 }
2965 2966 }
2966 2967
2967 2968 proc incrfont {inc} {
2968 2969 global mainfont namefont textfont ctext canv phase
2969 2970 global stopped entries
2970 2971 unmarkmatches
2971 2972 set mainfont [lreplace $mainfont 1 1 [expr {[lindex $mainfont 1] + $inc}]]
2972 2973 set namefont [lreplace $namefont 1 1 [expr {[lindex $namefont 1] + $inc}]]
2973 2974 set textfont [lreplace $textfont 1 1 [expr {[lindex $textfont 1] + $inc}]]
2974 2975 setcoords
2975 2976 $ctext conf -font $textfont
2976 2977 $ctext tag conf filesep -font [concat $textfont bold]
2977 2978 foreach e $entries {
2978 2979 $e conf -font $mainfont
2979 2980 }
2980 2981 if {$phase == "getcommits"} {
2981 2982 $canv itemconf textitems -font $mainfont
2982 2983 }
2983 2984 redisplay
2984 2985 }
2985 2986
2986 2987 proc clearsha1 {} {
2987 2988 global sha1entry sha1string
2988 2989 if {[string length $sha1string] == 40} {
2989 2990 $sha1entry delete 0 end
2990 2991 }
2991 2992 }
2992 2993
2993 2994 proc sha1change {n1 n2 op} {
2994 2995 global sha1string currentid sha1but
2995 2996 if {$sha1string == {}
2996 2997 || ([info exists currentid] && $sha1string == $currentid)} {
2997 2998 set state disabled
2998 2999 } else {
2999 3000 set state normal
3000 3001 }
3001 3002 if {[$sha1but cget -state] == $state} return
3002 3003 if {$state == "normal"} {
3003 3004 $sha1but conf -state normal -relief raised -text "Goto: "
3004 3005 } else {
3005 3006 $sha1but conf -state disabled -relief flat -text "SHA1 ID: "
3006 3007 }
3007 3008 }
3008 3009
3009 3010 proc gotocommit {} {
3010 3011 global sha1string currentid idline tagids
3011 3012 global lineid numcommits
3012 3013
3013 3014 if {$sha1string == {}
3014 3015 || ([info exists currentid] && $sha1string == $currentid)} return
3015 3016 if {[info exists tagids($sha1string)]} {
3016 3017 set id $tagids($sha1string)
3017 3018 } else {
3018 3019 set id [string tolower $sha1string]
3019 3020 if {[regexp {^[0-9a-f]{4,39}$} $id]} {
3020 3021 set matches {}
3021 3022 for {set l 0} {$l < $numcommits} {incr l} {
3022 3023 if {[string match $id* $lineid($l)]} {
3023 3024 lappend matches $lineid($l)
3024 3025 }
3025 3026 }
3026 3027 if {$matches ne {}} {
3027 3028 if {[llength $matches] > 1} {
3028 3029 error_popup "Short SHA1 id $id is ambiguous"
3029 3030 return
3030 3031 }
3031 3032 set id [lindex $matches 0]
3032 3033 }
3033 3034 }
3034 3035 }
3035 3036 if {[info exists idline($id)]} {
3036 3037 selectline $idline($id) 1
3037 3038 return
3038 3039 }
3039 3040 if {[regexp {^[0-9a-fA-F]{4,}$} $sha1string]} {
3040 3041 set type "SHA1 id"
3041 3042 } else {
3042 3043 set type "Tag"
3043 3044 }
3044 3045 error_popup "$type $sha1string is not known"
3045 3046 }
3046 3047
3047 3048 proc lineenter {x y id} {
3048 3049 global hoverx hovery hoverid hovertimer
3049 3050 global commitinfo canv
3050 3051
3051 3052 if {![info exists commitinfo($id)]} return
3052 3053 set hoverx $x
3053 3054 set hovery $y
3054 3055 set hoverid $id
3055 3056 if {[info exists hovertimer]} {
3056 3057 after cancel $hovertimer
3057 3058 }
3058 3059 set hovertimer [after 500 linehover]
3059 3060 $canv delete hover
3060 3061 }
3061 3062
3062 3063 proc linemotion {x y id} {
3063 3064 global hoverx hovery hoverid hovertimer
3064 3065
3065 3066 if {[info exists hoverid] && $id == $hoverid} {
3066 3067 set hoverx $x
3067 3068 set hovery $y
3068 3069 if {[info exists hovertimer]} {
3069 3070 after cancel $hovertimer
3070 3071 }
3071 3072 set hovertimer [after 500 linehover]
3072 3073 }
3073 3074 }
3074 3075
3075 3076 proc lineleave {id} {
3076 3077 global hoverid hovertimer canv
3077 3078
3078 3079 if {[info exists hoverid] && $id == $hoverid} {
3079 3080 $canv delete hover
3080 3081 if {[info exists hovertimer]} {
3081 3082 after cancel $hovertimer
3082 3083 unset hovertimer
3083 3084 }
3084 3085 unset hoverid
3085 3086 }
3086 3087 }
3087 3088
3088 3089 proc linehover {} {
3089 3090 global hoverx hovery hoverid hovertimer
3090 3091 global canv linespc lthickness
3091 3092 global commitinfo mainfont
3092 3093
3093 3094 set text [lindex $commitinfo($hoverid) 0]
3094 3095 set ymax [lindex [$canv cget -scrollregion] 3]
3095 3096 if {$ymax == {}} return
3096 3097 set yfrac [lindex [$canv yview] 0]
3097 3098 set x [expr {$hoverx + 2 * $linespc}]
3098 3099 set y [expr {$hovery + $yfrac * $ymax - $linespc / 2}]
3099 3100 set x0 [expr {$x - 2 * $lthickness}]
3100 3101 set y0 [expr {$y - 2 * $lthickness}]
3101 3102 set x1 [expr {$x + [font measure $mainfont $text] + 2 * $lthickness}]
3102 3103 set y1 [expr {$y + $linespc + 2 * $lthickness}]
3103 3104 set t [$canv create rectangle $x0 $y0 $x1 $y1 \
3104 3105 -fill \#ffff80 -outline black -width 1 -tags hover]
3105 3106 $canv raise $t
3106 3107 set t [$canv create text $x $y -anchor nw -text $text -tags hover]
3107 3108 $canv raise $t
3108 3109 }
3109 3110
3110 3111 proc clickisonarrow {id y} {
3111 3112 global mainline mainlinearrow sidelines lthickness
3112 3113
3113 3114 set thresh [expr {2 * $lthickness + 6}]
3114 3115 if {[info exists mainline($id)]} {
3115 3116 if {$mainlinearrow($id) ne "none"} {
3116 3117 if {abs([lindex $mainline($id) 1] - $y) < $thresh} {
3117 3118 return "up"
3118 3119 }
3119 3120 }
3120 3121 }
3121 3122 if {[info exists sidelines($id)]} {
3122 3123 foreach ls $sidelines($id) {
3123 3124 set coords [lindex $ls 0]
3124 3125 set arrow [lindex $ls 2]
3125 3126 if {$arrow eq "first" || $arrow eq "both"} {
3126 3127 if {abs([lindex $coords 1] - $y) < $thresh} {
3127 3128 return "up"
3128 3129 }
3129 3130 }
3130 3131 if {$arrow eq "last" || $arrow eq "both"} {
3131 3132 if {abs([lindex $coords end] - $y) < $thresh} {
3132 3133 return "down"
3133 3134 }
3134 3135 }
3135 3136 }
3136 3137 }
3137 3138 return {}
3138 3139 }
3139 3140
3140 3141 proc arrowjump {id dirn y} {
3141 3142 global mainline sidelines canv
3142 3143
3143 3144 set yt {}
3144 3145 if {$dirn eq "down"} {
3145 3146 if {[info exists mainline($id)]} {
3146 3147 set y1 [lindex $mainline($id) 1]
3147 3148 if {$y1 > $y} {
3148 3149 set yt $y1
3149 3150 }
3150 3151 }
3151 3152 if {[info exists sidelines($id)]} {
3152 3153 foreach ls $sidelines($id) {
3153 3154 set y1 [lindex $ls 0 1]
3154 3155 if {$y1 > $y && ($yt eq {} || $y1 < $yt)} {
3155 3156 set yt $y1
3156 3157 }
3157 3158 }
3158 3159 }
3159 3160 } else {
3160 3161 if {[info exists sidelines($id)]} {
3161 3162 foreach ls $sidelines($id) {
3162 3163 set y1 [lindex $ls 0 end]
3163 3164 if {$y1 < $y && ($yt eq {} || $y1 > $yt)} {
3164 3165 set yt $y1
3165 3166 }
3166 3167 }
3167 3168 }
3168 3169 }
3169 3170 if {$yt eq {}} return
3170 3171 set ymax [lindex [$canv cget -scrollregion] 3]
3171 3172 if {$ymax eq {} || $ymax <= 0} return
3172 3173 set view [$canv yview]
3173 3174 set yspan [expr {[lindex $view 1] - [lindex $view 0]}]
3174 3175 set yfrac [expr {$yt / $ymax - $yspan / 2}]
3175 3176 if {$yfrac < 0} {
3176 3177 set yfrac 0
3177 3178 }
3178 3179 $canv yview moveto $yfrac
3179 3180 }
3180 3181
3181 3182 proc lineclick {x y id isnew} {
3182 3183 global ctext commitinfo children cflist canv thickerline
3183 3184
3184 3185 unmarkmatches
3185 3186 unselectline
3186 3187 normalline
3187 3188 $canv delete hover
3188 3189 # draw this line thicker than normal
3189 3190 drawlines $id 1
3190 3191 set thickerline $id
3191 3192 if {$isnew} {
3192 3193 set ymax [lindex [$canv cget -scrollregion] 3]
3193 3194 if {$ymax eq {}} return
3194 3195 set yfrac [lindex [$canv yview] 0]
3195 3196 set y [expr {$y + $yfrac * $ymax}]
3196 3197 }
3197 3198 set dirn [clickisonarrow $id $y]
3198 3199 if {$dirn ne {}} {
3199 3200 arrowjump $id $dirn $y
3200 3201 return
3201 3202 }
3202 3203
3203 3204 if {$isnew} {
3204 3205 addtohistory [list lineclick $x $y $id 0]
3205 3206 }
3206 3207 # fill the details pane with info about this line
3207 3208 $ctext conf -state normal
3208 3209 $ctext delete 0.0 end
3209 3210 $ctext tag conf link -foreground blue -underline 1
3210 3211 $ctext tag bind link <Enter> { %W configure -cursor hand2 }
3211 3212 $ctext tag bind link <Leave> { %W configure -cursor $curtextcursor }
3212 3213 $ctext insert end "Parent:\t"
3213 3214 $ctext insert end $id [list link link0]
3214 3215 $ctext tag bind link0 <1> [list selbyid $id]
3215 3216 set info $commitinfo($id)
3216 3217 $ctext insert end "\n\t[lindex $info 0]\n"
3217 3218 $ctext insert end "\tAuthor:\t[lindex $info 1]\n"
3218 3219 $ctext insert end "\tDate:\t[lindex $info 2]\n"
3219 3220 if {[info exists children($id)]} {
3220 3221 $ctext insert end "\nChildren:"
3221 3222 set i 0
3222 3223 foreach child $children($id) {
3223 3224 incr i
3224 3225 set info $commitinfo($child)
3225 3226 $ctext insert end "\n\t"
3226 3227 $ctext insert end $child [list link link$i]
3227 3228 $ctext tag bind link$i <1> [list selbyid $child]
3228 3229 $ctext insert end "\n\t[lindex $info 0]"
3229 3230 $ctext insert end "\n\tAuthor:\t[lindex $info 1]"
3230 3231 $ctext insert end "\n\tDate:\t[lindex $info 2]\n"
3231 3232 }
3232 3233 }
3233 3234 $ctext conf -state disabled
3234 3235
3235 3236 $cflist delete 0 end
3236 3237 }
3237 3238
3238 3239 proc normalline {} {
3239 3240 global thickerline
3240 3241 if {[info exists thickerline]} {
3241 3242 drawlines $thickerline 0
3242 3243 unset thickerline
3243 3244 }
3244 3245 }
3245 3246
3246 3247 proc selbyid {id} {
3247 3248 global idline
3248 3249 if {[info exists idline($id)]} {
3249 3250 selectline $idline($id) 1
3250 3251 }
3251 3252 }
3252 3253
3253 3254 proc mstime {} {
3254 3255 global startmstime
3255 3256 if {![info exists startmstime]} {
3256 3257 set startmstime [clock clicks -milliseconds]
3257 3258 }
3258 3259 return [format "%.3f" [expr {([clock click -milliseconds] - $startmstime) / 1000.0}]]
3259 3260 }
3260 3261
3261 3262 proc rowmenu {x y id} {
3262 3263 global rowctxmenu idline selectedline rowmenuid
3263 3264
3264 3265 if {![info exists selectedline] || $idline($id) eq $selectedline} {
3265 3266 set state disabled
3266 3267 } else {
3267 3268 set state normal
3268 3269 }
3269 3270 $rowctxmenu entryconfigure 0 -state $state
3270 3271 $rowctxmenu entryconfigure 1 -state $state
3271 3272 $rowctxmenu entryconfigure 2 -state $state
3272 3273 set rowmenuid $id
3273 3274 tk_popup $rowctxmenu $x $y
3274 3275 }
3275 3276
3276 3277 proc diffvssel {dirn} {
3277 3278 global rowmenuid selectedline lineid
3278 3279
3279 3280 if {![info exists selectedline]} return
3280 3281 if {$dirn} {
3281 3282 set oldid $lineid($selectedline)
3282 3283 set newid $rowmenuid
3283 3284 } else {
3284 3285 set oldid $rowmenuid
3285 3286 set newid $lineid($selectedline)
3286 3287 }
3287 3288 addtohistory [list doseldiff $oldid $newid]
3288 3289 doseldiff $oldid $newid
3289 3290 }
3290 3291
3291 3292 proc doseldiff {oldid newid} {
3292 3293 global ctext cflist
3293 3294 global commitinfo
3294 3295
3295 3296 $ctext conf -state normal
3296 3297 $ctext delete 0.0 end
3297 3298 $ctext mark set fmark.0 0.0
3298 3299 $ctext mark gravity fmark.0 left
3299 3300 $cflist delete 0 end
3300 3301 $cflist insert end "Top"
3301 3302 $ctext insert end "From "
3302 3303 $ctext tag conf link -foreground blue -underline 1
3303 3304 $ctext tag bind link <Enter> { %W configure -cursor hand2 }
3304 3305 $ctext tag bind link <Leave> { %W configure -cursor $curtextcursor }
3305 3306 $ctext tag bind link0 <1> [list selbyid $oldid]
3306 3307 $ctext insert end $oldid [list link link0]
3307 3308 $ctext insert end "\n "
3308 3309 $ctext insert end [lindex $commitinfo($oldid) 0]
3309 3310 $ctext insert end "\n\nTo "
3310 3311 $ctext tag bind link1 <1> [list selbyid $newid]
3311 3312 $ctext insert end $newid [list link link1]
3312 3313 $ctext insert end "\n "
3313 3314 $ctext insert end [lindex $commitinfo($newid) 0]
3314 3315 $ctext insert end "\n"
3315 3316 $ctext conf -state disabled
3316 3317 $ctext tag delete Comments
3317 3318 $ctext tag remove found 1.0 end
3318 3319 startdiff [list $newid $oldid]
3319 3320 }
3320 3321
3321 3322 proc mkpatch {} {
3322 3323 global rowmenuid currentid commitinfo patchtop patchnum
3323 3324
3324 3325 if {![info exists currentid]} return
3325 3326 set oldid $currentid
3326 3327 set oldhead [lindex $commitinfo($oldid) 0]
3327 3328 set newid $rowmenuid
3328 3329 set newhead [lindex $commitinfo($newid) 0]
3329 3330 set top .patch
3330 3331 set patchtop $top
3331 3332 catch {destroy $top}
3332 3333 toplevel $top
3333 3334 label $top.title -text "Generate patch"
3334 3335 grid $top.title - -pady 10
3335 3336 label $top.from -text "From:"
3336 3337 entry $top.fromsha1 -width 40 -relief flat
3337 3338 $top.fromsha1 insert 0 $oldid
3338 3339 $top.fromsha1 conf -state readonly
3339 3340 grid $top.from $top.fromsha1 -sticky w
3340 3341 entry $top.fromhead -width 60 -relief flat
3341 3342 $top.fromhead insert 0 $oldhead
3342 3343 $top.fromhead conf -state readonly
3343 3344 grid x $top.fromhead -sticky w
3344 3345 label $top.to -text "To:"
3345 3346 entry $top.tosha1 -width 40 -relief flat
3346 3347 $top.tosha1 insert 0 $newid
3347 3348 $top.tosha1 conf -state readonly
3348 3349 grid $top.to $top.tosha1 -sticky w
3349 3350 entry $top.tohead -width 60 -relief flat
3350 3351 $top.tohead insert 0 $newhead
3351 3352 $top.tohead conf -state readonly
3352 3353 grid x $top.tohead -sticky w
3353 3354 button $top.rev -text "Reverse" -command mkpatchrev -padx 5
3354 3355 grid $top.rev x -pady 10
3355 3356 label $top.flab -text "Output file:"
3356 3357 entry $top.fname -width 60
3357 3358 $top.fname insert 0 [file normalize "patch$patchnum.patch"]
3358 3359 incr patchnum
3359 3360 grid $top.flab $top.fname -sticky w
3360 3361 frame $top.buts
3361 3362 button $top.buts.gen -text "Generate" -command mkpatchgo
3362 3363 button $top.buts.can -text "Cancel" -command mkpatchcan
3363 3364 grid $top.buts.gen $top.buts.can
3364 3365 grid columnconfigure $top.buts 0 -weight 1 -uniform a
3365 3366 grid columnconfigure $top.buts 1 -weight 1 -uniform a
3366 3367 grid $top.buts - -pady 10 -sticky ew
3367 3368 focus $top.fname
3368 3369 }
3369 3370
3370 3371 proc mkpatchrev {} {
3371 3372 global patchtop
3372 3373
3373 3374 set oldid [$patchtop.fromsha1 get]
3374 3375 set oldhead [$patchtop.fromhead get]
3375 3376 set newid [$patchtop.tosha1 get]
3376 3377 set newhead [$patchtop.tohead get]
3377 3378 foreach e [list fromsha1 fromhead tosha1 tohead] \
3378 3379 v [list $newid $newhead $oldid $oldhead] {
3379 3380 $patchtop.$e conf -state normal
3380 3381 $patchtop.$e delete 0 end
3381 3382 $patchtop.$e insert 0 $v
3382 3383 $patchtop.$e conf -state readonly
3383 3384 }
3384 3385 }
3385 3386
3386 3387 proc mkpatchgo {} {
3387 3388 global patchtop
3388 3389
3389 3390 set oldid [$patchtop.fromsha1 get]
3390 3391 set newid [$patchtop.tosha1 get]
3391 3392 set fname [$patchtop.fname get]
3392 3393 if {[catch {exec hg debug-diff-tree -p $oldid $newid >$fname &} err]} {
3393 3394 error_popup "Error creating patch: $err"
3394 3395 }
3395 3396 catch {destroy $patchtop}
3396 3397 unset patchtop
3397 3398 }
3398 3399
3399 3400 proc mkpatchcan {} {
3400 3401 global patchtop
3401 3402
3402 3403 catch {destroy $patchtop}
3403 3404 unset patchtop
3404 3405 }
3405 3406
3406 3407 proc mktag {} {
3407 3408 global rowmenuid mktagtop commitinfo
3408 3409
3409 3410 set top .maketag
3410 3411 set mktagtop $top
3411 3412 catch {destroy $top}
3412 3413 toplevel $top
3413 3414 label $top.title -text "Create tag"
3414 3415 grid $top.title - -pady 10
3415 3416 label $top.id -text "ID:"
3416 3417 entry $top.sha1 -width 40 -relief flat
3417 3418 $top.sha1 insert 0 $rowmenuid
3418 3419 $top.sha1 conf -state readonly
3419 3420 grid $top.id $top.sha1 -sticky w
3420 3421 entry $top.head -width 60 -relief flat
3421 3422 $top.head insert 0 [lindex $commitinfo($rowmenuid) 0]
3422 3423 $top.head conf -state readonly
3423 3424 grid x $top.head -sticky w
3424 3425 label $top.tlab -text "Tag name:"
3425 3426 entry $top.tag -width 60
3426 3427 grid $top.tlab $top.tag -sticky w
3427 3428 frame $top.buts
3428 3429 button $top.buts.gen -text "Create" -command mktaggo
3429 3430 button $top.buts.can -text "Cancel" -command mktagcan
3430 3431 grid $top.buts.gen $top.buts.can
3431 3432 grid columnconfigure $top.buts 0 -weight 1 -uniform a
3432 3433 grid columnconfigure $top.buts 1 -weight 1 -uniform a
3433 3434 grid $top.buts - -pady 10 -sticky ew
3434 3435 focus $top.tag
3435 3436 }
3436 3437
3437 3438 proc domktag {} {
3438 3439 global mktagtop env tagids idtags
3439 3440
3440 3441 set id [$mktagtop.sha1 get]
3441 3442 set tag [$mktagtop.tag get]
3442 3443 if {$tag == {}} {
3443 3444 error_popup "No tag name specified"
3444 3445 return
3445 3446 }
3446 3447 if {[info exists tagids($tag)]} {
3447 3448 error_popup "Tag \"$tag\" already exists"
3448 3449 return
3449 3450 }
3450 3451 if {[catch {
3451 3452 set out [exec hg tag -r $id $tag]
3452 3453 } err]} {
3453 3454 error_popup "Error creating tag: $err"
3454 3455 return
3455 3456 }
3456 3457
3457 3458 set tagids($tag) $id
3458 3459 lappend idtags($id) $tag
3459 3460 redrawtags $id
3460 3461 }
3461 3462
3462 3463 proc redrawtags {id} {
3463 3464 global canv linehtag idline idpos selectedline
3464 3465
3465 3466 if {![info exists idline($id)]} return
3466 3467 $canv delete tag.$id
3467 3468 set xt [eval drawtags $id $idpos($id)]
3468 3469 $canv coords $linehtag($idline($id)) $xt [lindex $idpos($id) 2]
3469 3470 if {[info exists selectedline] && $selectedline == $idline($id)} {
3470 3471 selectline $selectedline 0
3471 3472 }
3472 3473 }
3473 3474
3474 3475 proc mktagcan {} {
3475 3476 global mktagtop
3476 3477
3477 3478 catch {destroy $mktagtop}
3478 3479 unset mktagtop
3479 3480 }
3480 3481
3481 3482 proc mktaggo {} {
3482 3483 domktag
3483 3484 mktagcan
3484 3485 }
3485 3486
3486 3487 proc writecommit {} {
3487 3488 global rowmenuid wrcomtop commitinfo wrcomcmd
3488 3489
3489 3490 set top .writecommit
3490 3491 set wrcomtop $top
3491 3492 catch {destroy $top}
3492 3493 toplevel $top
3493 3494 label $top.title -text "Write commit to file"
3494 3495 grid $top.title - -pady 10
3495 3496 label $top.id -text "ID:"
3496 3497 entry $top.sha1 -width 40 -relief flat
3497 3498 $top.sha1 insert 0 $rowmenuid
3498 3499 $top.sha1 conf -state readonly
3499 3500 grid $top.id $top.sha1 -sticky w
3500 3501 entry $top.head -width 60 -relief flat
3501 3502 $top.head insert 0 [lindex $commitinfo($rowmenuid) 0]
3502 3503 $top.head conf -state readonly
3503 3504 grid x $top.head -sticky w
3504 3505 label $top.clab -text "Command:"
3505 3506 entry $top.cmd -width 60 -textvariable wrcomcmd
3506 3507 grid $top.clab $top.cmd -sticky w -pady 10
3507 3508 label $top.flab -text "Output file:"
3508 3509 entry $top.fname -width 60
3509 3510 $top.fname insert 0 [file normalize "commit-[string range $rowmenuid 0 6]"]
3510 3511 grid $top.flab $top.fname -sticky w
3511 3512 frame $top.buts
3512 3513 button $top.buts.gen -text "Write" -command wrcomgo
3513 3514 button $top.buts.can -text "Cancel" -command wrcomcan
3514 3515 grid $top.buts.gen $top.buts.can
3515 3516 grid columnconfigure $top.buts 0 -weight 1 -uniform a
3516 3517 grid columnconfigure $top.buts 1 -weight 1 -uniform a
3517 3518 grid $top.buts - -pady 10 -sticky ew
3518 3519 focus $top.fname
3519 3520 }
3520 3521
3521 3522 proc wrcomgo {} {
3522 3523 global wrcomtop
3523 3524
3524 3525 set id [$wrcomtop.sha1 get]
3525 3526 set cmd "echo $id | [$wrcomtop.cmd get]"
3526 3527 set fname [$wrcomtop.fname get]
3527 3528 if {[catch {exec sh -c $cmd >$fname &} err]} {
3528 3529 error_popup "Error writing commit: $err"
3529 3530 }
3530 3531 catch {destroy $wrcomtop}
3531 3532 unset wrcomtop
3532 3533 }
3533 3534
3534 3535 proc wrcomcan {} {
3535 3536 global wrcomtop
3536 3537
3537 3538 catch {destroy $wrcomtop}
3538 3539 unset wrcomtop
3539 3540 }
3540 3541
3541 3542 proc listrefs {id} {
3542 3543 global idtags idheads idotherrefs
3543 3544
3544 3545 set x {}
3545 3546 if {[info exists idtags($id)]} {
3546 3547 set x $idtags($id)
3547 3548 }
3548 3549 set y {}
3549 3550 if {[info exists idheads($id)]} {
3550 3551 set y $idheads($id)
3551 3552 }
3552 3553 set z {}
3553 3554 if {[info exists idotherrefs($id)]} {
3554 3555 set z $idotherrefs($id)
3555 3556 }
3556 3557 return [list $x $y $z]
3557 3558 }
3558 3559
3559 3560 proc rereadrefs {} {
3560 3561 global idtags idheads idotherrefs
3561 3562 global tagids headids otherrefids
3562 3563
3563 3564 set refids [concat [array names idtags] \
3564 3565 [array names idheads] [array names idotherrefs]]
3565 3566 foreach id $refids {
3566 3567 if {![info exists ref($id)]} {
3567 3568 set ref($id) [listrefs $id]
3568 3569 }
3569 3570 }
3570 3571 foreach v {tagids idtags headids idheads otherrefids idotherrefs} {
3571 3572 catch {unset $v}
3572 3573 }
3573 3574 readrefs
3574 3575 set refids [lsort -unique [concat $refids [array names idtags] \
3575 3576 [array names idheads] [array names idotherrefs]]]
3576 3577 foreach id $refids {
3577 3578 set v [listrefs $id]
3578 3579 if {![info exists ref($id)] || $ref($id) != $v} {
3579 3580 redrawtags $id
3580 3581 }
3581 3582 }
3582 3583 }
3583 3584
3584 3585 proc showtag {tag isnew} {
3585 3586 global ctext cflist tagcontents tagids linknum
3586 3587
3587 3588 if {$isnew} {
3588 3589 addtohistory [list showtag $tag 0]
3589 3590 }
3590 3591 $ctext conf -state normal
3591 3592 $ctext delete 0.0 end
3592 3593 set linknum 0
3593 3594 if {[info exists tagcontents($tag)]} {
3594 3595 set text $tagcontents($tag)
3595 3596 } else {
3596 3597 set text "Tag: $tag\nId: $tagids($tag)"
3597 3598 }
3598 3599 appendwithlinks $text
3599 3600 $ctext conf -state disabled
3600 3601 $cflist delete 0 end
3601 3602 }
3602 3603
3603 3604 proc doquit {} {
3604 3605 global stopped
3605 3606 set stopped 100
3606 3607 destroy .
3607 3608 }
3608 3609
3609 3610 # defaults...
3610 3611 set datemode 0
3611 3612 set boldnames 0
3612 3613 set diffopts "-U 5 -p"
3613 3614 set wrcomcmd "hg debug-diff-tree --stdin -p --pretty"
3614 3615
3615 3616 set mainfont {Helvetica 9}
3616 3617 set textfont {Courier 9}
3617 3618 set findmergefiles 0
3618 3619 set gaudydiff 0
3619 3620 set maxgraphpct 50
3620 3621 set maxwidth 16
3621 3622
3622 3623 set colors {green red blue magenta darkgrey brown orange}
3623 3624
3624 3625 catch {source ~/.gitk}
3625 3626
3626 3627 set namefont $mainfont
3627 3628 if {$boldnames} {
3628 3629 lappend namefont bold
3629 3630 }
3630 3631
3631 3632 set revtreeargs {}
3632 3633 foreach arg $argv {
3633 3634 switch -regexp -- $arg {
3634 3635 "^$" { }
3635 3636 "^-b" { set boldnames 1 }
3636 3637 "^-d" { set datemode 1 }
3637 3638 default {
3638 3639 lappend revtreeargs $arg
3639 3640 }
3640 3641 }
3641 3642 }
3642 3643
3643 3644 set history {}
3644 3645 set historyindex 0
3645 3646
3646 3647 set stopped 0
3647 3648 set redisplaying 0
3648 3649 set stuffsaved 0
3649 3650 set patchnum 0
3650 3651 setcoords
3651 3652 makewindow
3652 3653 readrefs
3653 3654 getcommits $revtreeargs
@@ -1,136 +1,136 b''
1 1 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
2 2 <html>
3 3 <head>
4 4 <title>Mercurial for Windows</title>
5 5 <meta http-equiv="Content-Type" content="text/html;charset=utf-8" >
6 6 <style type="text/css">
7 7 <!--
8 8 .indented
9 9 {
10 10 padding-left: 10pt;
11 11 }
12 12 -->
13 13 </style>
14 14 </head>
15 15
16 16 <body>
17 <h1>Mercurial version 0.8.1 for Windows</h1>
17 <h1>Mercurial version 0.9 for Windows</h1>
18 18
19 19 <p>Welcome to Mercurial for Windows!</p>
20 20
21 21 <p>Mercurial is a command-line application. You must run it from
22 22 the Windows command prompt (or if you're hard core, a <a
23 23 href="http://www.mingw.org/">MinGW</a> shell).</p>
24 24
25 25 <p><div class="indented"><i>Note: the standard <a
26 26 href="http://www.mingw.org/">MinGW</a> msys startup script uses
27 27 rxvt which has problems setting up standard input and output.
28 28 Running bash directly works correctly.</i></div>
29 29
30 30 <p>For documentation, please visit the <a
31 31 href="http://www.selenic.com/mercurial">Mercurial web site</a>.</p>
32 32
33 33 <p>By default, Mercurial installs to <tt>C:\Mercurial</tt>. The
34 34 Mercurial command is called <tt>hg.exe</tt>. To run this
35 35 command, the install directory must be in your search path.</p>
36 36
37 37 <h2>Setting your search path temporarily</h2>
38 38
39 39 <p>To set your search path temporarily, type the following into a
40 40 command prompt window:</p>
41 41
42 42 <pre>
43 43 set PATH=C:\Mercurial;%PATH%
44 44 </pre>
45 45
46 46 <h2>Setting your search path permanently</h2>
47 47
48 48 <p>To set your search path permanently, perform the following
49 49 steps. These instructions are for Windows NT, 2000 and XP.</p>
50 50
51 51 <ol>
52 52 <li>Open the Control Panel. Under Windows XP, select the
53 53 "Classic View".</li>
54 54
55 55 <li>Double-click on the "System" control panel.</li>
56 56
57 57 <li>Click on the "Advanced" tab.</li>
58 58
59 59 <li>Click on "Environment Variables". You'll find this near the
60 60 bottom of the window.</li>
61 61
62 62 <li>Under "System variables", you will see "Path". Double-click
63 63 it.</li>
64 64
65 65 <li>Edit "Variable value". Each path element is separated by a
66 66 semicolon (";") character. Append a semicolon to the end of the
67 67 list, followed by the path where you installed Mercurial
68 68 (e.g. <tt>C:\Mercurial</tt>).</li>
69 69
70 70 <li>Click on the various "OK" buttons until you've completely
71 71 exited from the System control panel.</li>
72 72
73 73 <li>Log out and log back in, or restart your system.</li>
74 74
75 75 <li>The next time you run the Windows command prompt, you will be
76 76 able to run the <tt>hg</tt> command without any special
77 77 help.</li>
78 78 </ol>
79 79
80 80 <h1>Testing Mercurial after you've installed it</h1>
81 81
82 82 <p>The easiest way to check that Mercurial is installed properly is to
83 83 just type the following at the command prompt:</p>
84 84
85 85 <pre>
86 86 hg
87 87 </pre>
88 88
89 89 <p>This command should print a useful help message. If it does,
90 90 other Mercurial commands should work fine for you.</p>
91 91
92 92 <h1>Reporting problems</h1>
93 93
94 94 <p>Before you report any problems, please consult the <a
95 95 href="http://www.selenic.com/mercurial">Mercurial web site</a> and
96 96 see if your question is already in our list of <a
97 97 href="http://www.selenic.com/mercurial/wiki/index.cgi/FAQ">Frequently
98 98 Answered Questions</a> (the "FAQ").
99 99
100 100 <p>If you cannot find an answer to your question, please feel
101 101 free to send mail to the Mercurial mailing list, at <a
102 102 href="mailto:mercurial@selenic.com">mercurial@selenic.com</a>.
103 103 <b>Remember</b>, the more useful information you include in your
104 104 report, the easier it will be for us to help you!</p>
105 105
106 106 <p>If you are IRC-savvy, that's usually the fastest way to get
107 107 help. Go to <tt>#mercurial</tt> on
108 108 <tt>irc.freenode.net</tt>.</p>
109 109
110 110 <h1>Author and copyright information</h1>
111 111
112 112 <p>Mercurial was written by <a href="http://www.selenic.com">Matt
113 113 Mackall</a>, and is maintained by Matt and a team of
114 114 volunteers.</p>
115 115
116 116 <p>The Windows installer was written by <a
117 117 href="http://www.serpentine.com/blog">Bryan
118 118 O'Sullivan</a>.</p>
119 119
120 120 <p>Mercurial is Copyright 2005, 2006 Matt Mackall and others. See the
121 121 <tt>Contributors.txt</tt> file for a list of contributors.</p>
122 122
123 123 <p>Mercurial is free software; you can redistribute it and/or
124 124 modify it under the terms of the <a
125 125 href="http://www.gnu.org/copyleft/gpl.html">GNU General Public
126 126 License</a> as published by the Free Software Foundation; either
127 127 version 2 of the License, or (at your option) any later
128 128 version.</p>
129 129
130 130 <p>Mercurial is distributed in the hope that it will be useful,
131 131 but <b>without any warranty</b>; without even the implied
132 132 warranty of <b>merchantability</b> or <b>fitness for a
133 133 particular purpose</b>. See the GNU General Public License for
134 134 more details.</p>
135 135 </body>
136 136 </html>
@@ -1,57 +1,57 b''
1 1 ; Script generated by the Inno Setup Script Wizard.
2 2 ; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
3 3
4 4 [Setup]
5 5 AppCopyright=Copyright 2005, 2006 Matt Mackall and others
6 6 AppName=Mercurial
7 AppVerName=Mercurial version 0.8.1
7 AppVerName=Mercurial version 0.9
8 8 InfoAfterFile=contrib/win32/postinstall.txt
9 9 LicenseFile=COPYING
10 10 ShowLanguageDialog=yes
11 11 AppPublisher=Matt Mackall and others
12 12 AppPublisherURL=http://www.selenic.com/mercurial
13 13 AppSupportURL=http://www.selenic.com/mercurial
14 14 AppUpdatesURL=http://www.selenic.com/mercurial
15 15 AppID={{4B95A5F1-EF59-4B08-BED8-C891C46121B3}
16 16 AppContact=mercurial@selenic.com
17 OutputBaseFilename=Mercurial-0.8.1
17 OutputBaseFilename=Mercurial-0.9
18 18 DefaultDirName={sd}\Mercurial
19 19 SourceDir=C:\hg\hg-release
20 VersionInfoVersion=0.8.1
20 VersionInfoVersion=0.9
21 21 VersionInfoDescription=Mercurial distributed SCM
22 22 VersionInfoCopyright=Copyright 2005, 2006 Matt Mackall and others
23 23 VersionInfoCompany=Matt Mackall and others
24 24 InternalCompressLevel=max
25 25 SolidCompression=true
26 26 SetupIconFile=contrib\favicon.ico
27 27 AllowNoIcons=true
28 28 DefaultGroupName=Mercurial
29 29
30 30 [Files]
31 31 Source: ..\..\msys\1.0\bin\patch.exe; DestDir: {app}
32 32 Source: contrib\mercurial.el; DestDir: {app}/Contrib
33 33 Source: contrib\win32\ReadMe.html; DestDir: {app}; Flags: isreadme
34 34 Source: contrib\win32\mercurial.ini; DestDir: {app}; DestName: Mercurial.ini; Flags: confirmoverwrite
35 35 Source: contrib\win32\postinstall.txt; DestDir: {app}; DestName: ReleaseNotes.txt
36 36 Source: dist\hg.exe; DestDir: {app}
37 37 Source: dist\library.zip; DestDir: {app}
38 38 Source: dist\mfc71.dll; DestDir: {sys}; Flags: sharedfile uninsnosharedfileprompt
39 39 Source: dist\msvcr71.dll; DestDir: {sys}; Flags: sharedfile uninsnosharedfileprompt
40 40 Source: dist\w9xpopen.exe; DestDir: {app}
41 41 Source: doc\*.txt; DestDir: {app}\Docs
42 42 Source: templates\*.*; DestDir: {app}\Templates; Flags: recursesubdirs createallsubdirs
43 43 Source: CONTRIBUTORS; DestDir: {app}; DestName: Contributors.txt
44 44 Source: COPYING; DestDir: {app}; DestName: Copying.txt
45 45 Source: comparison.txt; DestDir: {app}\Docs; DestName: Comparison.txt
46 46 Source: notes.txt; DestDir: {app}\Docs; DestName: DesignNotes.txt
47 47
48 48 [INI]
49 49 Filename: {app}\Mercurial.url; Section: InternetShortcut; Key: URL; String: http://www.selenic.com/mercurial/
50 50
51 51 [UninstallDelete]
52 52 Type: files; Name: {app}\Mercurial.url
53 53
54 54 [Icons]
55 55 Name: {group}\Uninstall Mercurial; Filename: {uninstallexe}
56 56 Name: {group}\Mercurial Command Reference; Filename: {app}\Docs\hg.1.txt
57 57 Name: {group}\Mercurial Web Site; Filename: {app}\Mercurial.url
@@ -1,75 +1,112 b''
1 1 Welcome to Mercurial for Windows!
2 2 ---------------------------------
3 3
4 4 For configuration and usage directions, please read the ReadMe.html
5 5 file that comes with this package.
6 6
7
8 7 Release Notes
9 8 -------------
10 9
10 2006-05-10 v0.9
11
12 * Major changes between Mercurial 0.8.1 and 0.9:
13
14 - The repository file format has been improved.
15 - This has resulted in an average 40% reduction in disk space usage.
16 - The new format (called RevlogNG) is now the default.
17 - Mercurial works perfectly with both the old and new repository
18 file formats. It can transfer changes transparently between
19 repositories of either format.
20 - To use the new repository format, simply use `hg clone --pull` to
21 clone an existing repository.
22 - Note: Versions 0.8.1 and earlier of Mercurial cannot read
23 RevlogNG repositories directly, but they can `clone`, `pull`
24 from, and `push` to servers that are serving RevlogNG
25 repositories.
26 - Memory usage has been improved by over 50% for many common operations.
27 - Substantial performance improvements on large repositories.
28 - New commands:
29 - 'archive' - generate a directory tree snapshot, tarball, or zip
30 file of a revision
31 - Deprecated commands:
32 - 'addremove' - replaced by 'add' and 'remove --after'
33 - 'forget' - replaced by 'revert'
34 - 'undo' - replaced by 'rollback'
35 - New extensions:
36 - Bugzilla integration hook
37 - Email notification hook
38 - Nested repositories are now supported. Mercurial will not recurse
39 into a subdirectory that contains a '.hg' directory. It is treated
40 as a separate repository.
41 - The standalone web server, 'hg serve', is now threaded, so it can
42 talk to multiple clients at a time.
43 - The web server can now display a "message of the day".
44 - Support added for hooks written in Python.
45 - Many improvements and clarifications to built-in help.
46
47
11 48 2006-04-07 v0.8.1
12 49
13 50 * Major changes from 0.8 to 0.8.1:
14 51
15 52 - new extensions:
16 53 mq (manage a queue of patches, like quilt only better)
17 54 email (send changes as series of email patches)
18 55 - new command: merge (replaces "update -m")
19 56 - improved commands: log (--limit option added), pull/push ("-r" works
20 57 on specific revisions), revert (rewritten, much better)
21 58 - comprehensive hook support
22 59 - output templating added, supporting e.g. GNU changelog style
23 60 - Windows, Mac OS X: prebuilt binary packages, better support
24 61 - many reliability, performance, and memory usage improvements
25 62
26 63
27 64 2006-01-29 v0.8
28 65
29 66 * Upgrade notes:
30 67
31 68 - diff and status command are now repo-wide by default
32 69 (use 'hg diff .' for the old behavior)
33 70 - GPG signing is now done with the gpg extension
34 71 - the --text option for commit, rawcommit, and tag has been removed
35 72 - the copy/rename --parents option has been removed
36 73
37 74 * Major changes from 0.7 to 0.8:
38 75
39 76 - faster status, diff, and commit
40 77 - reduced memory usage for push and pull
41 78 - improved extension API
42 79 - new bisect, gpg, hgk, and win32text extensions
43 80 - short URLs, binary file handling, and optional gitweb skin for hgweb
44 81 - numerous new command options including log --keyword and pull --rev
45 82 - improved hooks and file filtering
46 83
47 84
48 85 2005-09-21 v0.7 with modifications
49 86
50 87 * New INI files have been added to control Mercurial's behaviour:
51 88
52 89 System-wide - C:\Mercurial\Mercurial.ini
53 90 Per-user - C:\Documents and Settings\USERNAME\Mercurial.ini
54 91
55 92 A default version of the system-wide INI file is installed with
56 93 Mercurial. No per-user INI file is installed, but it will be
57 94 honoured if you create one.
58 95
59 96 * Windows line endings are now handled automatically and correctly by
60 97 the update and commit commands. See the INI file for how to
61 98 customise this behaviour.
62 99
63 100 * NOTE: Much of the rest of the Mercurial code does not handle Windows
64 101 line endings properly. Accordingly, the output of the diff command,
65 102 for example, will appear huge until I fix this.
66 103
67 104 * Packaged text files now have correct Windows line endings.
68 105
69 106
70 107 2005-09-21 v0.7 with modifications
71 108
72 109 * This is the first standalone release of Mercurial for Windows.
73 110
74 111 * I believe it to be mostly functional, with one exception: there is
75 112 no support yet for DOS <-> Unix line ending conversion.
@@ -1,381 +1,412 b''
1 1 HGRC(5)
2 2 =======
3 3 Bryan O'Sullivan <bos@serpentine.com>
4 4
5 5 NAME
6 6 ----
7 7 hgrc - configuration files for Mercurial
8 8
9 9 SYNOPSIS
10 10 --------
11 11
12 12 The Mercurial system uses a set of configuration files to control
13 13 aspects of its behaviour.
14 14
15 15 FILES
16 16 -----
17 17
18 18 Mercurial reads configuration data from several files, if they exist.
19 19 The names of these files depend on the system on which Mercurial is
20 20 installed.
21 21
22 22 (Unix) <install-root>/etc/mercurial/hgrc.d/*.rc::
23 23 (Unix) <install-root>/etc/mercurial/hgrc::
24 24 Per-installation configuration files, searched for in the
25 25 directory where Mercurial is installed. For example, if installed
26 26 in /shared/tools, Mercurial will look in
27 27 /shared/tools/etc/mercurial/hgrc. Options in these files apply to
28 28 all Mercurial commands executed by any user in any directory.
29 29
30 30 (Unix) /etc/mercurial/hgrc.d/*.rc::
31 31 (Unix) /etc/mercurial/hgrc::
32 32 (Windows) C:\Mercurial\Mercurial.ini::
33 33 Per-system configuration files, for the system on which Mercurial
34 34 is running. Options in these files apply to all Mercurial
35 35 commands executed by any user in any directory. Options in these
36 36 files override per-installation options.
37 37
38 38 (Unix) $HOME/.hgrc::
39 39 (Windows) C:\Documents and Settings\USERNAME\Mercurial.ini
40 40 Per-user configuration file, for the user running Mercurial.
41 41 Options in this file apply to all Mercurial commands executed by
42 42 any user in any directory. Options in this file override
43 43 per-installation and per-system options.
44 44
45 45 (Unix, Windows) <repo>/.hg/hgrc::
46 46 Per-repository configuration options that only apply in a
47 47 particular repository. This file is not version-controlled, and
48 48 will not get transferred during a "clone" operation. Options in
49 49 this file override options in all other configuration files.
50 50
51 51 SYNTAX
52 52 ------
53 53
54 54 A configuration file consists of sections, led by a "[section]" header
55 55 and followed by "name: value" entries; "name=value" is also accepted.
56 56
57 57 [spam]
58 58 eggs=ham
59 59 green=
60 60 eggs
61 61
62 62 Each line contains one entry. If the lines that follow are indented,
63 63 they are treated as continuations of that entry.
64 64
65 65 Leading whitespace is removed from values. Empty lines are skipped.
66 66
67 67 The optional values can contain format strings which refer to other
68 68 values in the same section, or values in a special DEFAULT section.
69 69
70 70 Lines beginning with "#" or ";" are ignored and may be used to provide
71 71 comments.
72 72
73 73 SECTIONS
74 74 --------
75 75
76 76 This section describes the different sections that may appear in a
77 77 Mercurial "hgrc" file, the purpose of each section, its possible
78 78 keys, and their possible values.
79 79
80 80 decode/encode::
81 81 Filters for transforming files on checkout/checkin. This would
82 82 typically be used for newline processing or other
83 83 localization/canonicalization of files.
84 84
85 85 Filters consist of a filter pattern followed by a filter command.
86 86 Filter patterns are globs by default, rooted at the repository
87 87 root. For example, to match any file ending in ".txt" in the root
88 88 directory only, use the pattern "*.txt". To match any file ending
89 89 in ".c" anywhere in the repository, use the pattern "**.c".
90 90
91 91 The filter command can start with a specifier, either "pipe:" or
92 92 "tempfile:". If no specifier is given, "pipe:" is used by default.
93 93
94 94 A "pipe:" command must accept data on stdin and return the
95 95 transformed data on stdout.
96 96
97 97 Pipe example:
98 98
99 99 [encode]
100 100 # uncompress gzip files on checkin to improve delta compression
101 101 # note: not necessarily a good idea, just an example
102 102 *.gz = pipe: gunzip
103 103
104 104 [decode]
105 105 # recompress gzip files when writing them to the working dir (we
106 106 # can safely omit "pipe:", because it's the default)
107 107 *.gz = gzip
108 108
109 109 A "tempfile:" command is a template. The string INFILE is replaced
110 110 with the name of a temporary file that contains the data to be
111 111 filtered by the command. The string OUTFILE is replaced with the
112 112 name of an empty temporary file, where the filtered data must be
113 113 written by the command.
114 114
115 115 NOTE: the tempfile mechanism is recommended for Windows systems,
116 116 where the standard shell I/O redirection operators often have
117 117 strange effects. In particular, if you are doing line ending
118 118 conversion on Windows using the popular dos2unix and unix2dos
119 119 programs, you *must* use the tempfile mechanism, as using pipes will
120 120 corrupt the contents of your files.
121 121
122 122 Tempfile example:
123 123
124 124 [encode]
125 125 # convert files to unix line ending conventions on checkin
126 126 **.txt = tempfile: dos2unix -n INFILE OUTFILE
127 127
128 128 [decode]
129 129 # convert files to windows line ending conventions when writing
130 130 # them to the working dir
131 131 **.txt = tempfile: unix2dos -n INFILE OUTFILE
132 132
133 133 email::
134 134 Settings for extensions that send email messages.
135 135 from;;
136 136 Optional. Email address to use in "From" header and SMTP envelope
137 137 of outgoing messages.
138 method;;
139 Optional. Method to use to send email messages. If value is
140 "smtp" (default), use SMTP (see section "[mail]" for
141 configuration). Otherwise, use as name of program to run that
142 acts like sendmail (takes "-f" option for sender, list of
143 recipients on command line, message on stdin). Normally, setting
144 this to "sendmail" or "/usr/sbin/sendmail" is enough to use
145 sendmail to send messages.
146
147 Email example:
148
149 [email]
150 from = Joseph User <joe.user@example.com>
151 method = /usr/sbin/sendmail
138 152
139 153 extensions::
140 154 Mercurial has an extension mechanism for adding new features. To
141 155 enable an extension, create an entry for it in this section.
142 156
143 157 If you know that the extension is already in Python's search path,
144 158 you can give the name of the module, followed by "=", with nothing
145 159 after the "=".
146 160
147 161 Otherwise, give a name that you choose, followed by "=", followed by
148 162 the path to the ".py" file (including the file name extension) that
149 163 defines the extension.
150 164
151 165 hooks::
152 166 Commands or Python functions that get automatically executed by
153 167 various actions such as starting or finishing a commit. Multiple
154 168 hooks can be run for the same action by appending a suffix to the
155 169 action. Overriding a site-wide hook can be done by changing its
156 170 value or setting it to an empty string.
157 171
158 172 Example .hg/hgrc:
159 173
160 174 [hooks]
161 175 # do not use the site-wide hook
162 176 incoming =
163 177 incoming.email = /my/email/hook
164 178 incoming.autobuild = /my/build/hook
165 179
166 180 Most hooks are run with environment variables set that give added
167 181 useful information. For each hook below, the environment variables
168 182 it is passed are listed with names of the form "$HG_foo".
169 183
170 184 changegroup;;
171 185 Run after a changegroup has been added via push, pull or
172 186 unbundle. ID of the first new changeset is in $HG_NODE.
173 187 commit;;
174 188 Run after a changeset has been created in the local repository.
175 189 ID of the newly created changeset is in $HG_NODE. Parent
176 190 changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
177 191 incoming;;
178 192 Run after a changeset has been pulled, pushed, or unbundled into
179 193 the local repository. The ID of the newly arrived changeset is in
180 194 $HG_NODE.
181 195 outgoing;;
182 196 Run after sending changes from local repository to another. ID of
183 197 first changeset sent is in $HG_NODE. Source of operation is in
184 198 $HG_SOURCE; see "preoutgoing" hook for description.
185 199 prechangegroup;;
186 200 Run before a changegroup is added via push, pull or unbundle.
187 201 Exit status 0 allows the changegroup to proceed. Non-zero status
188 202 will cause the push, pull or unbundle to fail.
189 203 precommit;;
190 204 Run before starting a local commit. Exit status 0 allows the
191 205 commit to proceed. Non-zero status will cause the commit to fail.
192 206 Parent changeset IDs are in $HG_PARENT1 and $HG_PARENT2.
193 207 preoutgoing;;
194 208 Run before computing changes to send from the local repository to
195 209 another. Non-zero status will cause failure. This lets you
196 210 prevent pull over http or ssh. Also prevents against local pull,
197 211 push (outbound) or bundle commands, but not effective, since you
198 212 can just copy files instead then. Source of operation is in
199 213 $HG_SOURCE. If "serve", operation is happening on behalf of
200 214 remote ssh or http repository. If "push", "pull" or "bundle",
201 215 operation is happening on behalf of repository on same system.
202 216 pretag;;
203 217 Run before creating a tag. Exit status 0 allows the tag to be
204 218 created. Non-zero status will cause the tag to fail. ID of
205 219 changeset to tag is in $HG_NODE. Name of tag is in $HG_TAG. Tag
206 220 is local if $HG_LOCAL=1, in repo if $HG_LOCAL=0.
207 221 pretxnchangegroup;;
208 222 Run after a changegroup has been added via push, pull or unbundle,
209 223 but before the transaction has been committed. Changegroup is
210 224 visible to hook program. This lets you validate incoming changes
211 225 before accepting them. Passed the ID of the first new changeset
212 226 in $HG_NODE. Exit status 0 allows the transaction to commit.
213 227 Non-zero status will cause the transaction to be rolled back and
214 228 the push, pull or unbundle will fail.
215 229 pretxncommit;;
216 230 Run after a changeset has been created but the transaction not yet
217 231 committed. Changeset is visible to hook program. This lets you
218 232 validate commit message and changes. Exit status 0 allows the
219 233 commit to proceed. Non-zero status will cause the transaction to
220 234 be rolled back. ID of changeset is in $HG_NODE. Parent changeset
221 235 IDs are in $HG_PARENT1 and $HG_PARENT2.
236 preupdate;;
237 Run before updating the working directory. Exit status 0 allows
238 the update to proceed. Non-zero status will prevent the update.
239 Changeset ID of first new parent is in $HG_PARENT1. If merge, ID
240 of second new parent is in $HG_PARENT2.
222 241 tag;;
223 242 Run after a tag is created. ID of tagged changeset is in
224 243 $HG_NODE. Name of tag is in $HG_TAG. Tag is local if
225 244 $HG_LOCAL=1, in repo if $HG_LOCAL=0.
245 update;;
246 Run after updating the working directory. Changeset ID of first
247 new parent is in $HG_PARENT1. If merge, ID of second new parent
248 is in $HG_PARENT2. If update succeeded, $HG_ERROR=0. If update
249 failed (e.g. because conflicts not resolved), $HG_ERROR=1.
226 250
227 In earlier releases, the names of hook environment variables did not
228 have a "HG_" prefix. These unprefixed names are still provided in
229 the environment for backwards compatibility, but their use is
230 deprecated, and they will be removed in a future release.
251 Note: In earlier releases, the names of hook environment variables
252 did not have a "HG_" prefix. The old unprefixed names are no longer
253 provided in the environment.
231 254
232 255 The syntax for Python hooks is as follows:
233 256
234 257 hookname = python:modulename.submodule.callable
235 258
236 259 Python hooks are run within the Mercurial process. Each hook is
237 260 called with at least three keyword arguments: a ui object (keyword
238 261 "ui"), a repository object (keyword "repo"), and a "hooktype"
239 262 keyword that tells what kind of hook is used. Arguments listed as
240 263 environment variables above are passed as keyword arguments, with no
241 264 "HG_" prefix, and names in lower case.
242 265
243 266 A Python hook must return a "true" value to succeed. Returning a
244 267 "false" value or raising an exception is treated as failure of the
245 268 hook.
246 269
247 270 http_proxy::
248 271 Used to access web-based Mercurial repositories through a HTTP
249 272 proxy.
250 273 host;;
251 274 Host name and (optional) port of the proxy server, for example
252 275 "myproxy:8000".
253 276 no;;
254 277 Optional. Comma-separated list of host names that should bypass
255 278 the proxy.
256 279 passwd;;
257 280 Optional. Password to authenticate with at the proxy server.
258 281 user;;
259 282 Optional. User name to authenticate with at the proxy server.
260 283
261 284 smtp::
262 285 Configuration for extensions that need to send email messages.
263 286 host;;
264 287 Optional. Host name of mail server. Default: "mail".
265 288 port;;
266 289 Optional. Port to connect to on mail server. Default: 25.
267 290 tls;;
268 291 Optional. Whether to connect to mail server using TLS. True or
269 292 False. Default: False.
270 293 username;;
271 294 Optional. User name to authenticate to SMTP server with.
272 295 If username is specified, password must also be specified.
273 296 Default: none.
274 297 password;;
275 298 Optional. Password to authenticate to SMTP server with.
276 299 If username is specified, password must also be specified.
277 300 Default: none.
278 301
279 302 paths::
280 303 Assigns symbolic names to repositories. The left side is the
281 304 symbolic name, and the right gives the directory or URL that is the
282 location of the repository.
305 location of the repository. Default paths can be declared by
306 setting the following entries.
307 default;;
308 Directory or URL to use when pulling if no source is specified.
309 Default is set to repository from which the current repository
310 was cloned.
311 default-push;;
312 Optional. Directory or URL to use when pushing if no destination
313 is specified.
283 314
284 315 ui::
285 316 User interface controls.
286 317 debug;;
287 318 Print debugging information. True or False. Default is False.
288 319 editor;;
289 320 The editor to use during a commit. Default is $EDITOR or "vi".
290 321 ignore;;
291 322 A file to read per-user ignore patterns from. This file should be in
292 323 the same format as a repository-wide .hgignore file. This option
293 324 supports hook syntax, so if you want to specify multiple ignore
294 325 files, you can do so by setting something like
295 326 "ignore.other = ~/.hgignore2". For details of the ignore file
296 327 format, see the hgignore(5) man page.
297 328 interactive;;
298 329 Allow to prompt the user. True or False. Default is True.
299 330 logtemplate;;
300 331 Template string for commands that print changesets.
301 332 style;;
302 333 Name of style to use for command output.
303 334 merge;;
304 335 The conflict resolution program to use during a manual merge.
305 336 Default is "hgmerge".
306 337 quiet;;
307 338 Reduce the amount of output printed. True or False. Default is False.
308 339 remotecmd;;
309 340 remote command to use for clone/push/pull operations. Default is 'hg'.
310 341 ssh;;
311 342 command to use for SSH connections. Default is 'ssh'.
312 343 timeout;;
313 344 The timeout used when a lock is held (in seconds), a negative value
314 345 means no timeout. Default is 600.
315 346 username;;
316 347 The committer of a changeset created when running "commit".
317 348 Typically a person's name and email address, e.g. "Fred Widget
318 349 <fred@example.com>". Default is $EMAIL or username@hostname, unless
319 350 username is set to an empty string, which enforces specifying the
320 351 username manually.
321 352 verbose;;
322 353 Increase the amount of output printed. True or False. Default is False.
323 354
324 355
325 356 web::
326 357 Web interface configuration.
327 358 accesslog;;
328 359 Where to output the access log. Default is stdout.
329 360 address;;
330 361 Interface address to bind to. Default is all.
331 362 allowbz2;;
332 363 Whether to allow .tar.bz2 downloading of repo revisions. Default is false.
333 364 allowgz;;
334 365 Whether to allow .tar.gz downloading of repo revisions. Default is false.
335 366 allowpull;;
336 367 Whether to allow pulling from the repository. Default is true.
337 368 allowzip;;
338 369 Whether to allow .zip downloading of repo revisions. Default is false.
339 370 This feature creates temporary files.
340 371 baseurl;;
341 372 Base URL to use when publishing URLs in other locations, so
342 373 third-party tools like email notification hooks can construct URLs.
343 374 Example: "http://hgserver/repos/"
344 375 description;;
345 376 Textual description of the repository's purpose or contents.
346 377 Default is "unknown".
347 378 errorlog;;
348 379 Where to output the error log. Default is stderr.
349 380 ipv6;;
350 381 Whether to use IPv6. Default is false.
351 382 name;;
352 383 Repository name to use in the web interface. Default is current
353 384 working directory.
354 385 maxchanges;;
355 386 Maximum number of changes to list on the changelog. Default is 10.
356 387 maxfiles;;
357 388 Maximum number of files to list per changeset. Default is 10.
358 389 port;;
359 390 Port to listen on. Default is 8000.
360 391 style;;
361 392 Which template map style to use.
362 393 templates;;
363 394 Where to find the HTML templates. Default is install path.
364 395
365 396
366 397 AUTHOR
367 398 ------
368 399 Bryan O'Sullivan <bos@serpentine.com>.
369 400
370 401 Mercurial was written by Matt Mackall <mpm@selenic.com>.
371 402
372 403 SEE ALSO
373 404 --------
374 405 hg(1), hgignore(5)
375 406
376 407 COPYING
377 408 -------
378 409 This manual page is copyright 2005 Bryan O'Sullivan.
379 410 Mercurial is copyright 2005, 2006 Matt Mackall.
380 411 Free use of this software is granted under the terms of the GNU General
381 412 Public License (GPL).
@@ -1,1305 +1,1306 b''
1 1 # queue.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from mercurial.demandload import *
9 9 demandload(globals(), "os sys re struct traceback errno bz2")
10 10 from mercurial.i18n import gettext as _
11 11 from mercurial import ui, hg, revlog, commands, util
12 12
13 13 versionstr = "0.45"
14 14
15 15 repomap = {}
16 16
17 17 commands.norepo += " qversion"
18 18 class queue:
19 19 def __init__(self, ui, path, patchdir=None):
20 20 self.basepath = path
21 21 if patchdir:
22 22 self.path = patchdir
23 23 else:
24 24 self.path = os.path.join(path, "patches")
25 25 self.opener = util.opener(self.path)
26 26 self.ui = ui
27 27 self.applied = []
28 28 self.full_series = []
29 29 self.applied_dirty = 0
30 30 self.series_dirty = 0
31 31 self.series_path = "series"
32 32 self.status_path = "status"
33 33
34 34 if os.path.exists(os.path.join(self.path, self.series_path)):
35 35 self.full_series = self.opener(self.series_path).read().splitlines()
36 36 self.read_series(self.full_series)
37 37
38 38 if os.path.exists(os.path.join(self.path, self.status_path)):
39 39 self.applied = self.opener(self.status_path).read().splitlines()
40 40
41 41 def find_series(self, patch):
42 42 pre = re.compile("(\s*)([^#]+)")
43 43 index = 0
44 44 for l in self.full_series:
45 45 m = pre.match(l)
46 46 if m:
47 47 s = m.group(2)
48 48 s = s.rstrip()
49 49 if s == patch:
50 50 return index
51 51 index += 1
52 52 return None
53 53
54 54 def read_series(self, list):
55 55 def matcher(list):
56 56 pre = re.compile("(\s*)([^#]+)")
57 57 for l in list:
58 58 m = pre.match(l)
59 59 if m:
60 60 s = m.group(2)
61 61 s = s.rstrip()
62 62 if len(s) > 0:
63 63 yield s
64 64 self.series = []
65 65 self.series = [ x for x in matcher(list) ]
66 66
67 67 def save_dirty(self):
68 68 if self.applied_dirty:
69 69 if len(self.applied) > 0:
70 70 nl = "\n"
71 71 else:
72 72 nl = ""
73 73 f = self.opener(self.status_path, "w")
74 74 f.write("\n".join(self.applied) + nl)
75 75 if self.series_dirty:
76 76 if len(self.full_series) > 0:
77 77 nl = "\n"
78 78 else:
79 79 nl = ""
80 80 f = self.opener(self.series_path, "w")
81 81 f.write("\n".join(self.full_series) + nl)
82 82
83 83 def readheaders(self, patch):
84 84 def eatdiff(lines):
85 85 while lines:
86 86 l = lines[-1]
87 87 if (l.startswith("diff -") or
88 88 l.startswith("Index:") or
89 89 l.startswith("===========")):
90 90 del lines[-1]
91 91 else:
92 92 break
93 93 def eatempty(lines):
94 94 while lines:
95 95 l = lines[-1]
96 96 if re.match('\s*$', l):
97 97 del lines[-1]
98 98 else:
99 99 break
100 100
101 101 pf = os.path.join(self.path, patch)
102 102 message = []
103 103 comments = []
104 104 user = None
105 105 format = None
106 106 subject = None
107 107 diffstart = 0
108 108
109 109 for line in file(pf):
110 110 line = line.rstrip()
111 111 if diffstart:
112 112 if line.startswith('+++ '):
113 113 diffstart = 2
114 114 break
115 115 if line.startswith("--- "):
116 116 diffstart = 1
117 117 continue
118 118 elif format == "hgpatch":
119 119 # parse values when importing the result of an hg export
120 120 if line.startswith("# User "):
121 121 user = line[7:]
122 122 elif not line.startswith("# ") and line:
123 123 message.append(line)
124 124 format = None
125 125 elif line == '# HG changeset patch':
126 126 format = "hgpatch"
127 127 elif (format != "tagdone" and (line.startswith("Subject: ") or
128 128 line.startswith("subject: "))):
129 129 subject = line[9:]
130 130 format = "tag"
131 131 elif (format != "tagdone" and (line.startswith("From: ") or
132 132 line.startswith("from: "))):
133 133 user = line[6:]
134 134 format = "tag"
135 135 elif format == "tag" and line == "":
136 136 # when looking for tags (subject: from: etc) they
137 137 # end once you find a blank line in the source
138 138 format = "tagdone"
139 139 else:
140 140 message.append(line)
141 141 comments.append(line)
142 142
143 143 eatdiff(message)
144 144 eatdiff(comments)
145 145 eatempty(message)
146 146 eatempty(comments)
147 147
148 148 # make sure message isn't empty
149 149 if format and format.startswith("tag") and subject:
150 150 message.insert(0, "")
151 151 message.insert(0, subject)
152 152 return (message, comments, user, diffstart > 1)
153 153
154 154 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
155 155 # first try just applying the patch
156 156 (err, n) = self.apply(repo, [ patch ], update_status=False,
157 157 strict=True, merge=rev, wlock=wlock)
158 158
159 159 if err == 0:
160 160 return (err, n)
161 161
162 162 if n is None:
163 163 self.ui.warn("apply failed for patch %s\n" % patch)
164 164 sys.exit(1)
165 165
166 166 self.ui.warn("patch didn't work out, merging %s\n" % patch)
167 167
168 168 # apply failed, strip away that rev and merge.
169 169 repo.update(head, allow=False, force=True, wlock=wlock)
170 170 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
171 171
172 172 c = repo.changelog.read(rev)
173 173 ret = repo.update(rev, allow=True, wlock=wlock)
174 174 if ret:
175 175 self.ui.warn("update returned %d\n" % ret)
176 176 sys.exit(1)
177 177 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
178 178 if n == None:
179 179 self.ui.warn("repo commit failed\n")
180 180 sys.exit(1)
181 181 try:
182 182 message, comments, user, patchfound = mergeq.readheaders(patch)
183 183 except:
184 184 self.ui.warn("Unable to read %s\n" % patch)
185 185 sys.exit(1)
186 186
187 187 patchf = self.opener(patch, "w")
188 188 if comments:
189 189 comments = "\n".join(comments) + '\n\n'
190 190 patchf.write(comments)
191 191 commands.dodiff(patchf, self.ui, repo, head, n)
192 192 patchf.close()
193 193 return (0, n)
194 194
195 195 def qparents(self, repo, rev=None):
196 196 if rev is None:
197 197 (p1, p2) = repo.dirstate.parents()
198 198 if p2 == revlog.nullid:
199 199 return p1
200 200 if len(self.applied) == 0:
201 201 return None
202 202 (top, patch) = self.applied[-1].split(':')
203 203 top = revlog.bin(top)
204 204 return top
205 205 pp = repo.changelog.parents(rev)
206 206 if pp[1] != revlog.nullid:
207 207 arevs = [ x.split(':')[0] for x in self.applied ]
208 208 p0 = revlog.hex(pp[0])
209 209 p1 = revlog.hex(pp[1])
210 210 if p0 in arevs:
211 211 return pp[0]
212 212 if p1 in arevs:
213 213 return pp[1]
214 214 return None
215 215 return pp[0]
216 216
217 217 def mergepatch(self, repo, mergeq, series, wlock):
218 218 if len(self.applied) == 0:
219 219 # each of the patches merged in will have two parents. This
220 220 # can confuse the qrefresh, qdiff, and strip code because it
221 221 # needs to know which parent is actually in the patch queue.
222 222 # so, we insert a merge marker with only one parent. This way
223 223 # the first patch in the queue is never a merge patch
224 224 #
225 225 pname = ".hg.patches.merge.marker"
226 226 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
227 227 wlock=wlock)
228 228 self.applied.append(revlog.hex(n) + ":" + pname)
229 229 self.applied_dirty = 1
230 230
231 231 head = self.qparents(repo)
232 232
233 233 for patch in series:
234 234 patch = mergeq.lookup(patch)
235 235 if not patch:
236 236 self.ui.warn("patch %s does not exist\n" % patch)
237 237 return (1, None)
238 238
239 239 info = mergeq.isapplied(patch)
240 240 if not info:
241 241 self.ui.warn("patch %s is not applied\n" % patch)
242 242 return (1, None)
243 243 rev = revlog.bin(info[1])
244 244 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
245 245 if head:
246 246 self.applied.append(revlog.hex(head) + ":" + patch)
247 247 self.applied_dirty = 1
248 248 if err:
249 249 return (err, head)
250 250 return (0, head)
251 251
252 252 def apply(self, repo, series, list=False, update_status=True,
253 253 strict=False, patchdir=None, merge=None, wlock=None):
254 254 # TODO unify with commands.py
255 255 if not patchdir:
256 256 patchdir = self.path
257 257 pwd = os.getcwd()
258 258 os.chdir(repo.root)
259 259 err = 0
260 260 if not wlock:
261 261 wlock = repo.wlock()
262 262 lock = repo.lock()
263 263 tr = repo.transaction()
264 264 n = None
265 265 for patch in series:
266 266 self.ui.warn("applying %s\n" % patch)
267 267 pf = os.path.join(patchdir, patch)
268 268
269 269 try:
270 270 message, comments, user, patchfound = self.readheaders(patch)
271 271 except:
272 272 self.ui.warn("Unable to read %s\n" % pf)
273 273 err = 1
274 274 break
275 275
276 276 if not message:
277 277 message = "imported patch %s\n" % patch
278 278 else:
279 279 if list:
280 280 message.append("\nimported patch %s" % patch)
281 281 message = '\n'.join(message)
282 282
283 283 try:
284 f = os.popen("patch -p1 --no-backup-if-mismatch < '%s'" % (pf))
284 pp = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
285 f = os.popen("%s -p1 --no-backup-if-mismatch < '%s'" % (pp, pf))
285 286 except:
286 287 self.ui.warn("patch failed, unable to continue (try -v)\n")
287 288 err = 1
288 289 break
289 290 files = []
290 291 fuzz = False
291 292 for l in f:
292 293 l = l.rstrip('\r\n');
293 294 if self.ui.verbose:
294 295 self.ui.warn(l + "\n")
295 296 if l[:14] == 'patching file ':
296 297 pf = os.path.normpath(l[14:])
297 298 # when patch finds a space in the file name, it puts
298 299 # single quotes around the filename. strip them off
299 300 if pf[0] == "'" and pf[-1] == "'":
300 301 pf = pf[1:-1]
301 302 if pf not in files:
302 303 files.append(pf)
303 304 printed_file = False
304 305 file_str = l
305 306 elif l.find('with fuzz') >= 0:
306 307 if not printed_file:
307 308 self.ui.warn(file_str + '\n')
308 309 printed_file = True
309 310 self.ui.warn(l + '\n')
310 311 fuzz = True
311 312 elif l.find('saving rejects to file') >= 0:
312 313 self.ui.warn(l + '\n')
313 314 elif l.find('FAILED') >= 0:
314 315 if not printed_file:
315 316 self.ui.warn(file_str + '\n')
316 317 printed_file = True
317 318 self.ui.warn(l + '\n')
318 319 patcherr = f.close()
319 320
320 321 if merge and len(files) > 0:
321 322 # Mark as merged and update dirstate parent info
322 323 repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
323 324 p1, p2 = repo.dirstate.parents()
324 325 repo.dirstate.setparents(p1, merge)
325 326 if len(files) > 0:
326 327 commands.addremove_lock(self.ui, repo, files,
327 328 opts={}, wlock=wlock)
328 329 n = repo.commit(files, message, user, force=1, lock=lock,
329 330 wlock=wlock)
330 331
331 332 if n == None:
332 333 self.ui.warn("repo commit failed\n")
333 334 sys.exit(1)
334 335
335 336 if update_status:
336 337 self.applied.append(revlog.hex(n) + ":" + patch)
337 338
338 339 if patcherr:
339 340 if not patchfound:
340 341 self.ui.warn("patch %s is empty\n" % patch)
341 342 err = 0
342 343 else:
343 344 self.ui.warn("patch failed, rejects left in working dir\n")
344 345 err = 1
345 346 break
346 347
347 348 if fuzz and strict:
348 349 self.ui.warn("fuzz found when applying patch, stopping\n")
349 350 err = 1
350 351 break
351 352 tr.close()
352 353 os.chdir(pwd)
353 354 return (err, n)
354 355
355 356 def delete(self, repo, patch):
356 357 patch = self.lookup(patch)
357 358 info = self.isapplied(patch)
358 359 if info:
359 360 self.ui.warn("cannot delete applied patch %s\n" % patch)
360 361 sys.exit(1)
361 362 if patch not in self.series:
362 363 self.ui.warn("patch %s not in series file\n" % patch)
363 364 sys.exit(1)
364 365 i = self.find_series(patch)
365 366 del self.full_series[i]
366 367 self.read_series(self.full_series)
367 368 self.series_dirty = 1
368 369
369 370 def check_toppatch(self, repo):
370 371 if len(self.applied) > 0:
371 372 (top, patch) = self.applied[-1].split(':')
372 373 top = revlog.bin(top)
373 374 pp = repo.dirstate.parents()
374 375 if top not in pp:
375 376 self.ui.warn("queue top not at dirstate parents. top %s dirstate %s %s\n" %( revlog.short(top), revlog.short(pp[0]), revlog.short(pp[1])))
376 377 sys.exit(1)
377 378 return top
378 379 return None
379 380 def check_localchanges(self, repo):
380 381 (c, a, r, d, u) = repo.changes(None, None)
381 382 if c or a or d or r:
382 383 self.ui.write("Local changes found, refresh first\n")
383 384 sys.exit(1)
384 385 def new(self, repo, patch, msg=None, force=None):
385 386 if not force:
386 387 self.check_localchanges(repo)
387 388 self.check_toppatch(repo)
388 389 wlock = repo.wlock()
389 390 insert = self.series_end()
390 391 if msg:
391 392 n = repo.commit([], "[mq]: %s" % msg, force=True, wlock=wlock)
392 393 else:
393 394 n = repo.commit([],
394 395 "New patch: %s" % patch, force=True, wlock=wlock)
395 396 if n == None:
396 397 self.ui.warn("repo commit failed\n")
397 398 sys.exit(1)
398 399 self.full_series[insert:insert] = [patch]
399 400 self.applied.append(revlog.hex(n) + ":" + patch)
400 401 self.read_series(self.full_series)
401 402 self.series_dirty = 1
402 403 self.applied_dirty = 1
403 404 p = self.opener(patch, "w")
404 405 if msg:
405 406 msg = msg + "\n"
406 407 p.write(msg)
407 408 p.close()
408 409 wlock = None
409 410 r = self.qrepo()
410 411 if r: r.add([patch])
411 412
412 413 def strip(self, repo, rev, update=True, backup="all", wlock=None):
413 414 def limitheads(chlog, stop):
414 415 """return the list of all nodes that have no children"""
415 416 p = {}
416 417 h = []
417 418 stoprev = 0
418 419 if stop in chlog.nodemap:
419 420 stoprev = chlog.rev(stop)
420 421
421 422 for r in range(chlog.count() - 1, -1, -1):
422 423 n = chlog.node(r)
423 424 if n not in p:
424 425 h.append(n)
425 426 if n == stop:
426 427 break
427 428 if r < stoprev:
428 429 break
429 430 for pn in chlog.parents(n):
430 431 p[pn] = 1
431 432 return h
432 433
433 434 def bundle(cg):
434 435 backupdir = repo.join("strip-backup")
435 436 if not os.path.isdir(backupdir):
436 437 os.mkdir(backupdir)
437 438 name = os.path.join(backupdir, "%s" % revlog.short(rev))
438 439 name = savename(name)
439 440 self.ui.warn("saving bundle to %s\n" % name)
440 441 # TODO, exclusive open
441 442 f = open(name, "wb")
442 443 try:
443 444 f.write("HG10")
444 445 z = bz2.BZ2Compressor(9)
445 446 while 1:
446 447 chunk = cg.read(4096)
447 448 if not chunk:
448 449 break
449 450 f.write(z.compress(chunk))
450 451 f.write(z.flush())
451 452 except:
452 453 os.unlink(name)
453 454 raise
454 455 f.close()
455 456 return name
456 457
457 458 def stripall(rev, revnum):
458 459 cl = repo.changelog
459 460 c = cl.read(rev)
460 461 mm = repo.manifest.read(c[0])
461 462 seen = {}
462 463
463 464 for x in xrange(revnum, cl.count()):
464 465 c = cl.read(cl.node(x))
465 466 for f in c[3]:
466 467 if f in seen:
467 468 continue
468 469 seen[f] = 1
469 470 if f in mm:
470 471 filerev = mm[f]
471 472 else:
472 473 filerev = 0
473 474 seen[f] = filerev
474 475 # we go in two steps here so the strip loop happens in a
475 476 # sensible order. When stripping many files, this helps keep
476 477 # our disk access patterns under control.
477 478 list = seen.keys()
478 479 list.sort()
479 480 for f in list:
480 481 ff = repo.file(f)
481 482 filerev = seen[f]
482 483 if filerev != 0:
483 484 if filerev in ff.nodemap:
484 485 filerev = ff.rev(filerev)
485 486 else:
486 487 filerev = 0
487 488 ff.strip(filerev, revnum)
488 489
489 490 if not wlock:
490 491 wlock = repo.wlock()
491 492 lock = repo.lock()
492 493 chlog = repo.changelog
493 494 # TODO delete the undo files, and handle undo of merge sets
494 495 pp = chlog.parents(rev)
495 496 revnum = chlog.rev(rev)
496 497
497 498 if update:
498 499 urev = self.qparents(repo, rev)
499 500 repo.update(urev, allow=False, force=True, wlock=wlock)
500 501 repo.dirstate.write()
501 502
502 503 # save is a list of all the branches we are truncating away
503 504 # that we actually want to keep. changegroup will be used
504 505 # to preserve them and add them back after the truncate
505 506 saveheads = []
506 507 savebases = {}
507 508
508 509 tip = chlog.tip()
509 510 heads = limitheads(chlog, rev)
510 511 seen = {}
511 512
512 513 # search through all the heads, finding those where the revision
513 514 # we want to strip away is an ancestor. Also look for merges
514 515 # that might be turned into new heads by the strip.
515 516 while heads:
516 517 h = heads.pop()
517 518 n = h
518 519 while True:
519 520 seen[n] = 1
520 521 pp = chlog.parents(n)
521 522 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
522 523 if pp[1] not in seen:
523 524 heads.append(pp[1])
524 525 if pp[0] == revlog.nullid:
525 526 break
526 527 if chlog.rev(pp[0]) < revnum:
527 528 break
528 529 n = pp[0]
529 530 if n == rev:
530 531 break
531 532 r = chlog.reachable(h, rev)
532 533 if rev not in r:
533 534 saveheads.append(h)
534 535 for x in r:
535 536 if chlog.rev(x) > revnum:
536 537 savebases[x] = 1
537 538
538 539 # create a changegroup for all the branches we need to keep
539 540 if backup is "all":
540 541 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
541 542 bundle(backupch)
542 543 if saveheads:
543 544 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
544 545 chgrpfile = bundle(backupch)
545 546
546 547 stripall(rev, revnum)
547 548
548 549 change = chlog.read(rev)
549 550 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
550 551 chlog.strip(revnum, revnum)
551 552 if saveheads:
552 553 self.ui.status("adding branch\n")
553 554 commands.unbundle(self.ui, repo, chgrpfile, update=False)
554 555 if backup is not "strip":
555 556 os.unlink(chgrpfile)
556 557
557 558 def isapplied(self, patch):
558 559 """returns (index, rev, patch)"""
559 560 for i in xrange(len(self.applied)):
560 561 p = self.applied[i]
561 562 a = p.split(':')
562 563 if a[1] == patch:
563 564 return (i, a[0], a[1])
564 565 return None
565 566
566 567 def lookup(self, patch):
567 568 if patch == None:
568 569 return None
569 570 if patch in self.series:
570 571 return patch
571 572 if not os.path.isfile(os.path.join(self.path, patch)):
572 573 try:
573 574 sno = int(patch)
574 575 except(ValueError, OverflowError):
575 576 self.ui.warn("patch %s not in series\n" % patch)
576 577 sys.exit(1)
577 578 if sno >= len(self.series):
578 579 self.ui.warn("patch number %d is out of range\n" % sno)
579 580 sys.exit(1)
580 581 patch = self.series[sno]
581 582 else:
582 583 self.ui.warn("patch %s not in series\n" % patch)
583 584 sys.exit(1)
584 585 return patch
585 586
586 587 def push(self, repo, patch=None, force=False, list=False,
587 588 mergeq=None, wlock=None):
588 589 if not wlock:
589 590 wlock = repo.wlock()
590 591 patch = self.lookup(patch)
591 592 if patch and self.isapplied(patch):
592 593 self.ui.warn("patch %s is already applied\n" % patch)
593 594 sys.exit(1)
594 595 if self.series_end() == len(self.series):
595 596 self.ui.warn("File series fully applied\n")
596 597 sys.exit(1)
597 598 if not force:
598 599 self.check_localchanges(repo)
599 600
600 601 self.applied_dirty = 1;
601 602 start = self.series_end()
602 603 if start > 0:
603 604 self.check_toppatch(repo)
604 605 if not patch:
605 606 patch = self.series[start]
606 607 end = start + 1
607 608 else:
608 609 end = self.series.index(patch, start) + 1
609 610 s = self.series[start:end]
610 611 if mergeq:
611 612 ret = self.mergepatch(repo, mergeq, s, wlock)
612 613 else:
613 614 ret = self.apply(repo, s, list, wlock=wlock)
614 615 top = self.applied[-1].split(':')[1]
615 616 if ret[0]:
616 617 self.ui.write("Errors during apply, please fix and refresh %s\n" %
617 618 top)
618 619 else:
619 620 self.ui.write("Now at: %s\n" % top)
620 621 return ret[0]
621 622
622 623 def pop(self, repo, patch=None, force=False, update=True, wlock=None):
623 624 def getfile(f, rev):
624 625 t = repo.file(f).read(rev)
625 626 try:
626 627 repo.wfile(f, "w").write(t)
627 628 except IOError:
628 629 try:
629 630 os.makedirs(os.path.dirname(repo.wjoin(f)))
630 631 except OSError, err:
631 632 if err.errno != errno.EEXIST: raise
632 633 repo.wfile(f, "w").write(t)
633 634
634 635 if not wlock:
635 636 wlock = repo.wlock()
636 637 if patch:
637 638 # index, rev, patch
638 639 info = self.isapplied(patch)
639 640 if not info:
640 641 patch = self.lookup(patch)
641 642 info = self.isapplied(patch)
642 643 if not info:
643 644 self.ui.warn("patch %s is not applied\n" % patch)
644 645 sys.exit(1)
645 646 if len(self.applied) == 0:
646 647 self.ui.warn("No patches applied\n")
647 648 sys.exit(1)
648 649
649 650 if not update:
650 651 parents = repo.dirstate.parents()
651 652 rr = [ revlog.bin(x.split(':')[0]) for x in self.applied ]
652 653 for p in parents:
653 654 if p in rr:
654 655 self.ui.warn("qpop: forcing dirstate update\n")
655 656 update = True
656 657
657 658 if not force and update:
658 659 self.check_localchanges(repo)
659 660
660 661 self.applied_dirty = 1;
661 662 end = len(self.applied)
662 663 if not patch:
663 664 info = [len(self.applied) - 1] + self.applied[-1].split(':')
664 665 start = info[0]
665 666 rev = revlog.bin(info[1])
666 667
667 668 # we know there are no local changes, so we can make a simplified
668 669 # form of hg.update.
669 670 if update:
670 671 top = self.check_toppatch(repo)
671 672 qp = self.qparents(repo, rev)
672 673 changes = repo.changelog.read(qp)
673 674 mf1 = repo.manifest.readflags(changes[0])
674 675 mmap = repo.manifest.read(changes[0])
675 676 (c, a, r, d, u) = repo.changes(qp, top)
676 677 if d:
677 678 raise util.Abort("deletions found between repo revs")
678 679 for f in c:
679 680 getfile(f, mmap[f])
680 681 for f in r:
681 682 getfile(f, mmap[f])
682 683 util.set_exec(repo.wjoin(f), mf1[f])
683 684 repo.dirstate.update(c + r, 'n')
684 685 for f in a:
685 686 try: os.unlink(repo.wjoin(f))
686 687 except: raise
687 688 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
688 689 except: pass
689 690 if a:
690 691 repo.dirstate.forget(a)
691 692 repo.dirstate.setparents(qp, revlog.nullid)
692 693 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
693 694 del self.applied[start:end]
694 695 if len(self.applied):
695 696 self.ui.write("Now at: %s\n" % self.applied[-1].split(':')[1])
696 697 else:
697 698 self.ui.write("Patch queue now empty\n")
698 699
699 700 def diff(self, repo, files):
700 701 top = self.check_toppatch(repo)
701 702 if not top:
702 703 self.ui.write("No patches applied\n")
703 704 return
704 705 qp = self.qparents(repo, top)
705 706 commands.dodiff(sys.stdout, self.ui, repo, qp, None, files)
706 707
707 708 def refresh(self, repo, short=False):
708 709 if len(self.applied) == 0:
709 710 self.ui.write("No patches applied\n")
710 711 return
711 712 wlock = repo.wlock()
712 713 self.check_toppatch(repo)
713 714 qp = self.qparents(repo)
714 715 (top, patch) = self.applied[-1].split(':')
715 716 top = revlog.bin(top)
716 717 cparents = repo.changelog.parents(top)
717 718 patchparent = self.qparents(repo, top)
718 719 message, comments, user, patchfound = self.readheaders(patch)
719 720
720 721 patchf = self.opener(patch, "w")
721 722 if comments:
722 723 comments = "\n".join(comments) + '\n\n'
723 724 patchf.write(comments)
724 725
725 726 tip = repo.changelog.tip()
726 727 if top == tip:
727 728 # if the top of our patch queue is also the tip, there is an
728 729 # optimization here. We update the dirstate in place and strip
729 730 # off the tip commit. Then just commit the current directory
730 731 # tree. We can also send repo.commit the list of files
731 732 # changed to speed up the diff
732 733 #
733 734 # in short mode, we only diff the files included in the
734 735 # patch already
735 736 #
736 737 # this should really read:
737 738 #(cc, dd, aa, aa2, uu) = repo.changes(tip, patchparent)
738 739 # but we do it backwards to take advantage of manifest/chlog
739 740 # caching against the next repo.changes call
740 741 #
741 742 (cc, aa, dd, aa2, uu) = repo.changes(patchparent, tip)
742 743 if short:
743 744 filelist = cc + aa + dd
744 745 else:
745 746 filelist = None
746 747 (c, a, r, d, u) = repo.changes(None, None, filelist)
747 748
748 749 # we might end up with files that were added between tip and
749 750 # the dirstate parent, but then changed in the local dirstate.
750 751 # in this case, we want them to only show up in the added section
751 752 for x in c:
752 753 if x not in aa:
753 754 cc.append(x)
754 755 # we might end up with files added by the local dirstate that
755 756 # were deleted by the patch. In this case, they should only
756 757 # show up in the changed section.
757 758 for x in a:
758 759 if x in dd:
759 760 del dd[dd.index(x)]
760 761 cc.append(x)
761 762 else:
762 763 aa.append(x)
763 764 # make sure any files deleted in the local dirstate
764 765 # are not in the add or change column of the patch
765 766 forget = []
766 767 for x in d + r:
767 768 if x in aa:
768 769 del aa[aa.index(x)]
769 770 forget.append(x)
770 771 continue
771 772 elif x in cc:
772 773 del cc[cc.index(x)]
773 774 dd.append(x)
774 775
775 776 c = list(util.unique(cc))
776 777 r = list(util.unique(dd))
777 778 a = list(util.unique(aa))
778 779 filelist = list(util.unique(c + r + a ))
779 780 commands.dodiff(patchf, self.ui, repo, patchparent, None,
780 781 filelist, changes=(c, a, r, [], u))
781 782 patchf.close()
782 783
783 784 changes = repo.changelog.read(tip)
784 785 repo.dirstate.setparents(*cparents)
785 786 repo.dirstate.update(a, 'a')
786 787 repo.dirstate.update(r, 'r')
787 788 repo.dirstate.update(c, 'n')
788 789 repo.dirstate.forget(forget)
789 790
790 791 if not message:
791 792 message = "patch queue: %s\n" % patch
792 793 else:
793 794 message = "\n".join(message)
794 795 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
795 796 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
796 797 self.applied[-1] = revlog.hex(n) + ':' + patch
797 798 self.applied_dirty = 1
798 799 else:
799 800 commands.dodiff(patchf, self.ui, repo, patchparent, None)
800 801 patchf.close()
801 802 self.pop(repo, force=True, wlock=wlock)
802 803 self.push(repo, force=True, wlock=wlock)
803 804
804 805 def init(self, repo, create=False):
805 806 if os.path.isdir(self.path):
806 807 raise util.Abort("patch queue directory already exists")
807 808 os.mkdir(self.path)
808 809 if create:
809 810 return self.qrepo(create=True)
810 811
811 812 def unapplied(self, repo, patch=None):
812 813 if patch and patch not in self.series:
813 814 self.ui.warn("%s not in the series file\n" % patch)
814 815 sys.exit(1)
815 816 if not patch:
816 817 start = self.series_end()
817 818 else:
818 819 start = self.series.index(patch) + 1
819 820 for p in self.series[start:]:
820 821 self.ui.write("%s\n" % p)
821 822
822 823 def qseries(self, repo, missing=None):
823 824 start = self.series_end()
824 825 if not missing:
825 826 for p in self.series[:start]:
826 827 if self.ui.verbose:
827 828 self.ui.write("%d A " % self.series.index(p))
828 829 self.ui.write("%s\n" % p)
829 830 for p in self.series[start:]:
830 831 if self.ui.verbose:
831 832 self.ui.write("%d U " % self.series.index(p))
832 833 self.ui.write("%s\n" % p)
833 834 else:
834 835 list = []
835 836 for root, dirs, files in os.walk(self.path):
836 837 d = root[len(self.path) + 1:]
837 838 for f in files:
838 839 fl = os.path.join(d, f)
839 840 if (fl not in self.series and
840 841 fl not in (self.status_path, self.series_path)
841 842 and not fl.startswith('.')):
842 843 list.append(fl)
843 844 list.sort()
844 845 if list:
845 846 for x in list:
846 847 if self.ui.verbose:
847 848 self.ui.write("D ")
848 849 self.ui.write("%s\n" % x)
849 850
850 851 def issaveline(self, l):
851 852 name = l.split(':')[1]
852 853 if name == '.hg.patches.save.line':
853 854 return True
854 855
855 856 def qrepo(self, create=False):
856 857 if create or os.path.isdir(os.path.join(self.path, ".hg")):
857 858 return hg.repository(self.ui, path=self.path, create=create)
858 859
859 860 def restore(self, repo, rev, delete=None, qupdate=None):
860 861 c = repo.changelog.read(rev)
861 862 desc = c[4].strip()
862 863 lines = desc.splitlines()
863 864 i = 0
864 865 datastart = None
865 866 series = []
866 867 applied = []
867 868 qpp = None
868 869 for i in xrange(0, len(lines)):
869 870 if lines[i] == 'Patch Data:':
870 871 datastart = i + 1
871 872 elif lines[i].startswith('Dirstate:'):
872 873 l = lines[i].rstrip()
873 874 l = l[10:].split(' ')
874 875 qpp = [ hg.bin(x) for x in l ]
875 876 elif datastart != None:
876 877 l = lines[i].rstrip()
877 878 index = l.index(':')
878 879 id = l[:index]
879 880 file = l[index + 1:]
880 881 if id:
881 882 applied.append(l)
882 883 series.append(file)
883 884 if datastart == None:
884 885 self.ui.warn("No saved patch data found\n")
885 886 return 1
886 887 self.ui.warn("restoring status: %s\n" % lines[0])
887 888 self.full_series = series
888 889 self.applied = applied
889 890 self.read_series(self.full_series)
890 891 self.series_dirty = 1
891 892 self.applied_dirty = 1
892 893 heads = repo.changelog.heads()
893 894 if delete:
894 895 if rev not in heads:
895 896 self.ui.warn("save entry has children, leaving it alone\n")
896 897 else:
897 898 self.ui.warn("removing save entry %s\n" % hg.short(rev))
898 899 pp = repo.dirstate.parents()
899 900 if rev in pp:
900 901 update = True
901 902 else:
902 903 update = False
903 904 self.strip(repo, rev, update=update, backup='strip')
904 905 if qpp:
905 906 self.ui.warn("saved queue repository parents: %s %s\n" %
906 907 (hg.short(qpp[0]), hg.short(qpp[1])))
907 908 if qupdate:
908 909 print "queue directory updating"
909 910 r = self.qrepo()
910 911 if not r:
911 912 self.ui.warn("Unable to load queue repository\n")
912 913 return 1
913 914 r.update(qpp[0], allow=False, force=True)
914 915
915 916 def save(self, repo, msg=None):
916 917 if len(self.applied) == 0:
917 918 self.ui.warn("save: no patches applied, exiting\n")
918 919 return 1
919 920 if self.issaveline(self.applied[-1]):
920 921 self.ui.warn("status is already saved\n")
921 922 return 1
922 923
923 924 ar = [ ':' + x for x in self.full_series ]
924 925 if not msg:
925 926 msg = "hg patches saved state"
926 927 else:
927 928 msg = "hg patches: " + msg.rstrip('\r\n')
928 929 r = self.qrepo()
929 930 if r:
930 931 pp = r.dirstate.parents()
931 932 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
932 933 msg += "\n\nPatch Data:\n"
933 934 text = msg + "\n".join(self.applied) + '\n' + (ar and "\n".join(ar)
934 935 + '\n' or "")
935 936 n = repo.commit(None, text, user=None, force=1)
936 937 if not n:
937 938 self.ui.warn("repo commit failed\n")
938 939 return 1
939 940 self.applied.append(revlog.hex(n) + ":" + '.hg.patches.save.line')
940 941 self.applied_dirty = 1
941 942
942 943 def series_end(self):
943 944 end = 0
944 945 if len(self.applied) > 0:
945 946 (top, p) = self.applied[-1].split(':')
946 947 try:
947 948 end = self.series.index(p)
948 949 except ValueError:
949 950 return 0
950 951 return end + 1
951 952 return end
952 953
953 954 def qapplied(self, repo, patch=None):
954 955 if patch and patch not in self.series:
955 956 self.ui.warn("%s not in the series file\n" % patch)
956 957 sys.exit(1)
957 958 if not patch:
958 959 end = len(self.applied)
959 960 else:
960 961 end = self.series.index(patch) + 1
961 962 for x in xrange(end):
962 963 p = self.appliedname(x)
963 964 self.ui.write("%s\n" % p)
964 965
965 966 def appliedname(self, index):
966 967 p = self.applied[index]
967 968 if not self.ui.verbose:
968 969 p = p.split(':')[1]
969 970 return p
970 971
971 972 def top(self, repo):
972 973 if len(self.applied):
973 974 p = self.appliedname(-1)
974 975 self.ui.write(p + '\n')
975 976 else:
976 977 self.ui.write("No patches applied\n")
977 978
978 979 def next(self, repo):
979 980 end = self.series_end()
980 981 if end == len(self.series):
981 982 self.ui.write("All patches applied\n")
982 983 else:
983 984 self.ui.write(self.series[end] + '\n')
984 985
985 986 def prev(self, repo):
986 987 if len(self.applied) > 1:
987 988 p = self.appliedname(-2)
988 989 self.ui.write(p + '\n')
989 990 elif len(self.applied) == 1:
990 991 self.ui.write("Only one patch applied\n")
991 992 else:
992 993 self.ui.write("No patches applied\n")
993 994
994 995 def qimport(self, repo, files, patch=None, existing=None, force=None):
995 996 if len(files) > 1 and patch:
996 997 self.ui.warn("-n option not valid when importing multiple files\n")
997 998 sys.exit(1)
998 999 i = 0
999 1000 for filename in files:
1000 1001 if existing:
1001 1002 if not patch:
1002 1003 patch = filename
1003 1004 if not os.path.isfile(os.path.join(self.path, patch)):
1004 1005 self.ui.warn("patch %s does not exist\n" % patch)
1005 1006 sys.exit(1)
1006 1007 else:
1007 1008 try:
1008 1009 text = file(filename).read()
1009 1010 except IOError:
1010 1011 self.ui.warn("Unable to read %s\n" % patch)
1011 1012 sys.exit(1)
1012 1013 if not patch:
1013 1014 patch = os.path.split(filename)[1]
1014 1015 if not force and os.path.isfile(os.path.join(self.path, patch)):
1015 1016 self.ui.warn("patch %s already exists\n" % patch)
1016 1017 sys.exit(1)
1017 1018 patchf = self.opener(patch, "w")
1018 1019 patchf.write(text)
1019 1020 if patch in self.series:
1020 1021 self.ui.warn("patch %s is already in the series file\n" % patch)
1021 1022 sys.exit(1)
1022 1023 index = self.series_end() + i
1023 1024 self.full_series[index:index] = [patch]
1024 1025 self.read_series(self.full_series)
1025 1026 self.ui.warn("adding %s to series file\n" % patch)
1026 1027 i += 1
1027 1028 patch = None
1028 1029 self.series_dirty = 1
1029 1030
1030 1031 def delete(ui, repo, patch, **opts):
1031 1032 """remove a patch from the series file"""
1032 1033 q = repomap[repo]
1033 1034 q.delete(repo, patch)
1034 1035 q.save_dirty()
1035 1036 return 0
1036 1037
1037 1038 def applied(ui, repo, patch=None, **opts):
1038 1039 """print the patches already applied"""
1039 1040 repomap[repo].qapplied(repo, patch)
1040 1041 return 0
1041 1042
1042 1043 def unapplied(ui, repo, patch=None, **opts):
1043 1044 """print the patches not yet applied"""
1044 1045 repomap[repo].unapplied(repo, patch)
1045 1046 return 0
1046 1047
1047 1048 def qimport(ui, repo, *filename, **opts):
1048 1049 """import a patch"""
1049 1050 q = repomap[repo]
1050 1051 q.qimport(repo, filename, patch=opts['name'],
1051 1052 existing=opts['existing'], force=opts['force'])
1052 1053 q.save_dirty()
1053 1054 return 0
1054 1055
1055 1056 def init(ui, repo, **opts):
1056 1057 """init a new queue repository"""
1057 1058 q = repomap[repo]
1058 1059 r = q.init(repo, create=opts['create_repo'])
1059 1060 q.save_dirty()
1060 1061 if r:
1061 1062 fp = r.wopener('.hgignore', 'w')
1062 1063 print >> fp, 'syntax: glob'
1063 1064 print >> fp, 'status'
1064 1065 fp.close()
1065 1066 r.wopener('series', 'w').close()
1066 1067 r.add(['.hgignore', 'series'])
1067 1068 return 0
1068 1069
1069 1070 def commit(ui, repo, *pats, **opts):
1070 1071 q = repomap[repo]
1071 1072 r = q.qrepo()
1072 1073 if not r: raise util.Abort('no queue repository')
1073 1074 commands.commit(r.ui, r, *pats, **opts)
1074 1075
1075 1076 def series(ui, repo, **opts):
1076 1077 """print the entire series file"""
1077 1078 repomap[repo].qseries(repo, missing=opts['missing'])
1078 1079 return 0
1079 1080
1080 1081 def top(ui, repo, **opts):
1081 1082 """print the name of the current patch"""
1082 1083 repomap[repo].top(repo)
1083 1084 return 0
1084 1085
1085 1086 def next(ui, repo, **opts):
1086 1087 """print the name of the next patch"""
1087 1088 repomap[repo].next(repo)
1088 1089 return 0
1089 1090
1090 1091 def prev(ui, repo, **opts):
1091 1092 """print the name of the previous patch"""
1092 1093 repomap[repo].prev(repo)
1093 1094 return 0
1094 1095
1095 1096 def new(ui, repo, patch, **opts):
1096 1097 """create a new patch"""
1097 1098 q = repomap[repo]
1098 1099 q.new(repo, patch, msg=opts['message'], force=opts['force'])
1099 1100 q.save_dirty()
1100 1101 return 0
1101 1102
1102 1103 def refresh(ui, repo, **opts):
1103 1104 """update the current patch"""
1104 1105 q = repomap[repo]
1105 1106 q.refresh(repo, short=opts['short'])
1106 1107 q.save_dirty()
1107 1108 return 0
1108 1109
1109 1110 def diff(ui, repo, *files, **opts):
1110 1111 """diff of the current patch"""
1111 1112 # deep in the dirstate code, the walkhelper method wants a list, not a tuple
1112 1113 repomap[repo].diff(repo, list(files))
1113 1114 return 0
1114 1115
1115 1116 def lastsavename(path):
1116 1117 (dir, base) = os.path.split(path)
1117 1118 names = os.listdir(dir)
1118 1119 namere = re.compile("%s.([0-9]+)" % base)
1119 1120 max = None
1120 1121 maxname = None
1121 1122 for f in names:
1122 1123 m = namere.match(f)
1123 1124 if m:
1124 1125 index = int(m.group(1))
1125 1126 if max == None or index > max:
1126 1127 max = index
1127 1128 maxname = f
1128 1129 if maxname:
1129 1130 return (os.path.join(dir, maxname), max)
1130 1131 return (None, None)
1131 1132
1132 1133 def savename(path):
1133 1134 (last, index) = lastsavename(path)
1134 1135 if last is None:
1135 1136 index = 0
1136 1137 newpath = path + ".%d" % (index + 1)
1137 1138 return newpath
1138 1139
1139 1140 def push(ui, repo, patch=None, **opts):
1140 1141 """push the next patch onto the stack"""
1141 1142 q = repomap[repo]
1142 1143 mergeq = None
1143 1144
1144 1145 if opts['all']:
1145 1146 patch = q.series[-1]
1146 1147 if opts['merge']:
1147 1148 if opts['name']:
1148 1149 newpath = opts['name']
1149 1150 else:
1150 1151 newpath, i = lastsavename(q.path)
1151 1152 if not newpath:
1152 1153 ui.warn("no saved queues found, please use -n\n")
1153 1154 return 1
1154 1155 mergeq = queue(ui, repo.join(""), newpath)
1155 1156 ui.warn("merging with queue at: %s\n" % mergeq.path)
1156 1157 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1157 1158 mergeq=mergeq)
1158 1159 q.save_dirty()
1159 1160 return ret
1160 1161
1161 1162 def pop(ui, repo, patch=None, **opts):
1162 1163 """pop the current patch off the stack"""
1163 1164 localupdate = True
1164 1165 if opts['name']:
1165 1166 q = queue(ui, repo.join(""), repo.join(opts['name']))
1166 1167 ui.warn('using patch queue: %s\n' % q.path)
1167 1168 localupdate = False
1168 1169 else:
1169 1170 q = repomap[repo]
1170 1171 if opts['all'] and len(q.applied) > 0:
1171 1172 patch = q.applied[0].split(':')[1]
1172 1173 q.pop(repo, patch, force=opts['force'], update=localupdate)
1173 1174 q.save_dirty()
1174 1175 return 0
1175 1176
1176 1177 def restore(ui, repo, rev, **opts):
1177 1178 """restore the queue state saved by a rev"""
1178 1179 rev = repo.lookup(rev)
1179 1180 q = repomap[repo]
1180 1181 q.restore(repo, rev, delete=opts['delete'],
1181 1182 qupdate=opts['update'])
1182 1183 q.save_dirty()
1183 1184 return 0
1184 1185
1185 1186 def save(ui, repo, **opts):
1186 1187 """save current queue state"""
1187 1188 q = repomap[repo]
1188 1189 ret = q.save(repo, msg=opts['message'])
1189 1190 if ret:
1190 1191 return ret
1191 1192 q.save_dirty()
1192 1193 if opts['copy']:
1193 1194 path = q.path
1194 1195 if opts['name']:
1195 1196 newpath = os.path.join(q.basepath, opts['name'])
1196 1197 if os.path.exists(newpath):
1197 1198 if not os.path.isdir(newpath):
1198 1199 ui.warn("destination %s exists and is not a directory\n" %
1199 1200 newpath)
1200 1201 sys.exit(1)
1201 1202 if not opts['force']:
1202 1203 ui.warn("destination %s exists, use -f to force\n" %
1203 1204 newpath)
1204 1205 sys.exit(1)
1205 1206 else:
1206 1207 newpath = savename(path)
1207 1208 ui.warn("copy %s to %s\n" % (path, newpath))
1208 1209 util.copyfiles(path, newpath)
1209 1210 if opts['empty']:
1210 1211 try:
1211 1212 os.unlink(os.path.join(q.path, q.status_path))
1212 1213 except:
1213 1214 pass
1214 1215 return 0
1215 1216
1216 1217 def strip(ui, repo, rev, **opts):
1217 1218 """strip a revision and all later revs on the same branch"""
1218 1219 rev = repo.lookup(rev)
1219 1220 backup = 'all'
1220 1221 if opts['backup']:
1221 1222 backup = 'strip'
1222 1223 elif opts['nobackup']:
1223 1224 backup = 'none'
1224 1225 repomap[repo].strip(repo, rev, backup=backup)
1225 1226 return 0
1226 1227
1227 1228 def version(ui, q=None):
1228 1229 """print the version number"""
1229 1230 ui.write("mq version %s\n" % versionstr)
1230 1231 return 0
1231 1232
1232 1233 def reposetup(ui, repo):
1233 1234 repomap[repo] = queue(ui, repo.join(""))
1234 1235
1235 1236 cmdtable = {
1236 1237 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1237 1238 "qcommit|qci":
1238 1239 (commit,
1239 1240 commands.table["^commit|ci"][1],
1240 1241 'hg qcommit [OPTION]... [FILE]...'),
1241 1242 "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
1242 1243 "qdelete": (delete, [], 'hg qdelete PATCH'),
1243 1244 "^qimport":
1244 1245 (qimport,
1245 1246 [('e', 'existing', None, 'import file in patch dir'),
1246 1247 ('n', 'name', '', 'patch file name'),
1247 1248 ('f', 'force', None, 'overwrite existing files')],
1248 1249 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1249 1250 "^qinit":
1250 1251 (init,
1251 1252 [('c', 'create-repo', None, 'create patch repository')],
1252 1253 'hg qinit [-c]'),
1253 1254 "qnew":
1254 1255 (new,
1255 1256 [('m', 'message', '', 'commit message'),
1256 1257 ('f', 'force', None, 'force')],
1257 1258 'hg qnew [-m TEXT] [-f] PATCH'),
1258 1259 "qnext": (next, [], 'hg qnext'),
1259 1260 "qprev": (prev, [], 'hg qprev'),
1260 1261 "^qpop":
1261 1262 (pop,
1262 1263 [('a', 'all', None, 'pop all patches'),
1263 1264 ('n', 'name', '', 'queue name to pop'),
1264 1265 ('f', 'force', None, 'forget any local changes')],
1265 1266 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1266 1267 "^qpush":
1267 1268 (push,
1268 1269 [('f', 'force', None, 'apply if the patch has rejects'),
1269 1270 ('l', 'list', None, 'list patch name in commit text'),
1270 1271 ('a', 'all', None, 'apply all patches'),
1271 1272 ('m', 'merge', None, 'merge from another queue'),
1272 1273 ('n', 'name', '', 'merge queue name')],
1273 1274 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1274 1275 "^qrefresh":
1275 1276 (refresh,
1276 1277 [('s', 'short', None, 'short refresh')],
1277 1278 'hg qrefresh [-s]'),
1278 1279 "qrestore":
1279 1280 (restore,
1280 1281 [('d', 'delete', None, 'delete save entry'),
1281 1282 ('u', 'update', None, 'update queue working dir')],
1282 1283 'hg qrestore [-d] [-u] REV'),
1283 1284 "qsave":
1284 1285 (save,
1285 1286 [('m', 'message', '', 'commit message'),
1286 1287 ('c', 'copy', None, 'copy patch directory'),
1287 1288 ('n', 'name', '', 'copy directory name'),
1288 1289 ('e', 'empty', None, 'clear queue status file'),
1289 1290 ('f', 'force', None, 'force copy')],
1290 1291 'hg qsave [-m TEXT] [-c] [-n NAME] [-e] [-f]'),
1291 1292 "qseries":
1292 1293 (series,
1293 1294 [('m', 'missing', None, 'print patches not in series')],
1294 1295 'hg qseries [-m]'),
1295 1296 "^strip":
1296 1297 (strip,
1297 1298 [('f', 'force', None, 'force multi-head removal'),
1298 1299 ('b', 'backup', None, 'bundle unrelated changesets'),
1299 1300 ('n', 'nobackup', None, 'no backups')],
1300 1301 'hg strip [-f] [-b] [-n] REV'),
1301 1302 "qtop": (top, [], 'hg qtop'),
1302 1303 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
1303 1304 "qversion": (version, [], 'hg qversion')
1304 1305 }
1305 1306
@@ -1,267 +1,267 b''
1 1 # notify.py - email notifications for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # hook extension to email notifications to people when changesets are
9 9 # committed to a repo they subscribe to.
10 10 #
11 11 # default mode is to print messages to stdout, for testing and
12 12 # configuring.
13 13 #
14 14 # to use, configure notify extension and enable in hgrc like this:
15 15 #
16 16 # [extensions]
17 17 # hgext.notify =
18 18 #
19 19 # [hooks]
20 20 # # one email for each incoming changeset
21 21 # incoming.notify = python:hgext.notify.hook
22 22 # # batch emails when many changesets incoming at one time
23 23 # changegroup.notify = python:hgext.notify.hook
24 24 #
25 25 # [notify]
26 26 # # config items go in here
27 27 #
28 28 # config items:
29 29 #
30 30 # REQUIRED:
31 31 # config = /path/to/file # file containing subscriptions
32 32 #
33 33 # OPTIONAL:
34 34 # test = True # print messages to stdout for testing
35 35 # strip = 3 # number of slashes to strip for url paths
36 36 # domain = example.com # domain to use if committer missing domain
37 37 # style = ... # style file to use when formatting email
38 38 # template = ... # template to use when formatting email
39 39 # incoming = ... # template to use when run as incoming hook
40 40 # changegroup = ... # template when run as changegroup hook
41 41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 42 # maxsubject = 67 # truncate subject line longer than this
43 43 # sources = serve # notify if source of incoming changes in this list
44 44 # # (serve == ssh or http, push, pull, bundle)
45 45 # [email]
46 46 # from = user@host.com # email address to send as if none given
47 47 # [web]
48 48 # baseurl = http://hgserver/... # root of hg web site for browsing commits
49 49 #
50 50 # notify config file has same format as regular hgrc. it has two
51 51 # sections so you can express subscriptions in whatever way is handier
52 52 # for you.
53 53 #
54 54 # [usersubs]
55 55 # # key is subscriber email, value is ","-separated list of glob patterns
56 56 # user@host = pattern
57 57 #
58 58 # [reposubs]
59 59 # # key is glob pattern, value is ","-separated list of subscriber emails
60 60 # pattern = user@host
61 61 #
62 62 # glob patterns are matched against path to repo root.
63 63 #
64 64 # if you like, you can put notify config file in repo that users can
65 65 # push changes to, they can manage their own subscriptions.
66 66
67 67 from mercurial.demandload import *
68 68 from mercurial.i18n import gettext as _
69 69 from mercurial.node import *
70 70 demandload(globals(), 'email.Parser mercurial:commands,templater,util')
71 71 demandload(globals(), 'fnmatch socket time')
72 72
73 73 # template for single changeset can include email headers.
74 74 single_template = '''
75 75 Subject: changeset in {webroot}: {desc|firstline|strip}
76 76 From: {author}
77 77
78 78 changeset {node|short} in {root}
79 79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
80 80 description:
81 81 \t{desc|tabindent|strip}
82 82 '''.lstrip()
83 83
84 84 # template for multiple changesets should not contain email headers,
85 85 # because only first set of headers will be used and result will look
86 86 # strange.
87 87 multiple_template = '''
88 88 changeset {node|short} in {root}
89 89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
90 90 summary: {desc|firstline}
91 91 '''
92 92
93 93 deftemplates = {
94 94 'changegroup': multiple_template,
95 95 }
96 96
97 97 class notifier(object):
98 98 '''email notification class.'''
99 99
100 100 def __init__(self, ui, repo, hooktype):
101 101 self.ui = ui
102 102 self.ui.readconfig(self.ui.config('notify', 'config'))
103 103 self.repo = repo
104 104 self.stripcount = int(self.ui.config('notify', 'strip', 0))
105 105 self.root = self.strip(self.repo.root)
106 106 self.domain = self.ui.config('notify', 'domain')
107 107 self.sio = templater.stringio()
108 108 self.subs = self.subscribers()
109 109
110 110 mapfile = self.ui.config('notify', 'style')
111 111 template = (self.ui.config('notify', hooktype) or
112 112 self.ui.config('notify', 'template'))
113 113 self.t = templater.changeset_templater(self.ui, self.repo, mapfile,
114 114 self.sio)
115 115 if not mapfile and not template:
116 116 template = deftemplates.get(hooktype) or single_template
117 117 if template:
118 118 template = templater.parsestring(template, quoted=False)
119 119 self.t.use_template(template)
120 120
121 121 def strip(self, path):
122 122 '''strip leading slashes from local path, turn into web-safe path.'''
123 123
124 124 path = util.pconvert(path)
125 125 count = self.stripcount
126 126 while path and count >= 0:
127 127 c = path.find('/')
128 128 if c == -1:
129 129 break
130 130 path = path[c+1:]
131 131 count -= 1
132 132 return path
133 133
134 134 def fixmail(self, addr):
135 135 '''try to clean up email addresses.'''
136 136
137 137 addr = templater.email(addr.strip())
138 138 a = addr.find('@localhost')
139 139 if a != -1:
140 140 addr = addr[:a]
141 141 if '@' not in addr:
142 142 return addr + '@' + self.domain
143 143 return addr
144 144
145 145 def subscribers(self):
146 146 '''return list of email addresses of subscribers to this repo.'''
147 147
148 148 subs = {}
149 149 for user, pats in self.ui.configitems('usersubs'):
150 150 for pat in pats.split(','):
151 151 if fnmatch.fnmatch(self.repo.root, pat.strip()):
152 152 subs[self.fixmail(user)] = 1
153 153 for pat, users in self.ui.configitems('reposubs'):
154 154 if fnmatch.fnmatch(self.repo.root, pat):
155 155 for user in users.split(','):
156 156 subs[self.fixmail(user)] = 1
157 157 subs = subs.keys()
158 158 subs.sort()
159 159 return subs
160 160
161 161 def url(self, path=None):
162 162 return self.ui.config('web', 'baseurl') + (path or self.root)
163 163
164 164 def node(self, node):
165 165 '''format one changeset.'''
166 166
167 167 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
168 168 baseurl=self.ui.config('web', 'baseurl'),
169 169 root=self.repo.root,
170 170 webroot=self.root)
171 171
172 172 def skipsource(self, source):
173 173 '''true if incoming changes from this source should be skipped.'''
174 174 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
175 175 return source not in ok_sources
176 176
177 177 def send(self, node, count):
178 178 '''send message.'''
179 179
180 180 p = email.Parser.Parser()
181 181 self.sio.seek(0)
182 182 msg = p.parse(self.sio)
183 183
184 184 def fix_subject():
185 185 '''try to make subject line exist and be useful.'''
186 186
187 187 subject = msg['Subject']
188 188 if not subject:
189 189 if count > 1:
190 190 subject = _('%s: %d new changesets') % (self.root, count)
191 191 else:
192 192 changes = self.repo.changelog.read(node)
193 193 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
194 194 subject = '%s: %s' % (self.root, s)
195 195 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
196 196 if maxsubject and len(subject) > maxsubject:
197 197 subject = subject[:maxsubject-3] + '...'
198 198 del msg['Subject']
199 199 msg['Subject'] = subject
200 200
201 201 def fix_sender():
202 202 '''try to make message have proper sender.'''
203 203
204 204 sender = msg['From']
205 205 if not sender:
206 206 sender = self.ui.config('email', 'from') or self.ui.username()
207 207 if '@' not in sender or '@localhost' in sender:
208 208 sender = self.fixmail(sender)
209 209 del msg['From']
210 210 msg['From'] = sender
211 211
212 212 fix_subject()
213 213 fix_sender()
214 214
215 215 msg['X-Hg-Notification'] = 'changeset ' + short(node)
216 216 if not msg['Message-Id']:
217 217 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
218 218 (short(node), int(time.time()),
219 219 hash(self.repo.root), socket.getfqdn()))
220 220 msg['To'] = ', '.join(self.subs)
221 221
222 222 msgtext = msg.as_string(0)
223 223 if self.ui.configbool('notify', 'test', True):
224 224 self.ui.write(msgtext)
225 225 if not msgtext.endswith('\n'):
226 226 self.ui.write('\n')
227 227 else:
228 228 mail = self.ui.sendmail()
229 229 mail.sendmail(templater.email(msg['From']), self.subs, msgtext)
230 230
231 def diff(self, node):
231 def diff(self, node, ref):
232 232 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
233 233 if maxdiff == 0:
234 234 return
235 235 fp = templater.stringio()
236 236 prev = self.repo.changelog.parents(node)[0]
237 commands.dodiff(fp, self.ui, self.repo, prev,
238 self.repo.changelog.tip())
237 commands.dodiff(fp, self.ui, self.repo, prev, ref)
239 238 difflines = fp.getvalue().splitlines(1)
240 239 if maxdiff > 0 and len(difflines) > maxdiff:
241 240 self.sio.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
242 241 (len(difflines), maxdiff))
243 242 difflines = difflines[:maxdiff]
244 243 elif difflines:
245 244 self.sio.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
246 245 self.sio.write(*difflines)
247 246
248 247 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
249 248 '''send email notifications to interested subscribers.
250 249
251 250 if used as changegroup hook, send one email for all changesets in
252 251 changegroup. else send one email per changeset.'''
253 252 n = notifier(ui, repo, hooktype)
254 253 if not n.subs or n.skipsource(source):
255 254 return
256 255 node = bin(node)
257 256 if hooktype == 'changegroup':
258 257 start = repo.changelog.rev(node)
259 258 end = repo.changelog.count()
260 259 count = end - start
261 260 for rev in xrange(start, end):
262 261 n.node(repo.changelog.node(rev))
262 n.diff(node, repo.changelog.tip())
263 263 else:
264 264 count = 1
265 265 n.node(node)
266 n.diff(node)
266 n.diff(node, node)
267 267 n.send(node, count)
@@ -1,272 +1,270 b''
1 1 # Command for sending a collection of Mercurial changesets as a series
2 2 # of patch emails.
3 3 #
4 4 # The series is started off with a "[PATCH 0 of N]" introduction,
5 5 # which describes the series as a whole.
6 6 #
7 7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
8 8 # the first line of the changeset description as the subject text.
9 9 # The message contains two or three body parts:
10 10 #
11 11 # The remainder of the changeset description.
12 12 #
13 13 # [Optional] If the diffstat program is installed, the result of
14 14 # running diffstat on the patch.
15 15 #
16 16 # The patch itself, as generated by "hg export".
17 17 #
18 18 # Each message refers to all of its predecessors using the In-Reply-To
19 19 # and References headers, so they will show up as a sequence in
20 20 # threaded mail and news readers, and in mail archives.
21 21 #
22 22 # For each changeset, you will be prompted with a diffstat summary and
23 23 # the changeset summary, so you can be sure you are sending the right
24 24 # changes.
25 25 #
26 26 # It is best to run this script with the "-n" (test only) flag before
27 27 # firing it up "for real", in which case it will use your pager to
28 28 # display each of the messages that it would send.
29 29 #
30 30 # The "-m" (mbox) option will create an mbox file instead of sending
31 31 # the messages directly. This can be reviewed e.g. with "mutt -R -f mbox",
32 32 # and finally sent with "formail -s sendmail -bm -t < mbox".
33 33 #
34 34 # To configure other defaults, add a section like this to your hgrc
35 35 # file:
36 36 #
37 37 # [email]
38 38 # from = My Name <my@email>
39 39 # to = recipient1, recipient2, ...
40 40 # cc = cc1, cc2, ...
41 41
42 42 from mercurial.demandload import *
43 43 demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils
44 44 mercurial:commands,hg,ui
45 45 os errno popen2 socket sys tempfile time''')
46 46 from mercurial.i18n import gettext as _
47 47
48 48 try:
49 49 # readline gives raw_input editing capabilities, but is not
50 50 # present on windows
51 51 import readline
52 52 except ImportError: pass
53 53
54 54 def diffstat(patch):
55 55 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
56 56 try:
57 57 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
58 58 try:
59 59 for line in patch: print >> p.tochild, line
60 60 p.tochild.close()
61 61 if p.wait(): return
62 62 fp = os.fdopen(fd, 'r')
63 63 stat = []
64 64 for line in fp: stat.append(line.lstrip())
65 65 last = stat.pop()
66 66 stat.insert(0, last)
67 67 stat = ''.join(stat)
68 68 if stat.startswith('0 files'): raise ValueError
69 69 return stat
70 70 except: raise
71 71 finally:
72 72 try: os.unlink(name)
73 73 except: pass
74 74
75 75 def patchbomb(ui, repo, *revs, **opts):
76 76 '''send changesets as a series of patch emails
77 77
78 78 The series starts with a "[PATCH 0 of N]" introduction, which
79 79 describes the series as a whole.
80 80
81 81 Each patch email has a Subject line of "[PATCH M of N] ...", using
82 82 the first line of the changeset description as the subject text.
83 83 The message contains two or three body parts. First, the rest of
84 84 the changeset description. Next, (optionally) if the diffstat
85 85 program is installed, the result of running diffstat on the patch.
86 86 Finally, the patch itself, as generated by "hg export".'''
87 87 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
88 88 if default: prompt += ' [%s]' % default
89 89 prompt += rest
90 90 while True:
91 91 r = raw_input(prompt)
92 92 if r: return r
93 93 if default is not None: return default
94 94 if empty_ok: return r
95 95 ui.warn(_('Please enter a valid value.\n'))
96 96
97 97 def confirm(s):
98 98 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
99 99 raise ValueError
100 100
101 101 def cdiffstat(summary, patch):
102 102 s = diffstat(patch)
103 103 if s:
104 104 if summary:
105 105 ui.write(summary, '\n')
106 106 ui.write(s, '\n')
107 107 confirm(_('Does the diffstat above look okay'))
108 108 return s
109 109
110 110 def makepatch(patch, idx, total):
111 111 desc = []
112 112 node = None
113 113 body = ''
114 114 for line in patch:
115 115 if line.startswith('#'):
116 116 if line.startswith('# Node ID'): node = line.split()[-1]
117 117 continue
118 118 if line.startswith('diff -r'): break
119 119 desc.append(line)
120 120 if not node: raise ValueError
121 121
122 122 #body = ('\n'.join(desc[1:]).strip() or
123 123 # 'Patch subject is complete summary.')
124 124 #body += '\n\n\n'
125 125
126 126 if opts['plain']:
127 127 while patch and patch[0].startswith('# '): patch.pop(0)
128 128 if patch: patch.pop(0)
129 129 while patch and not patch[0].strip(): patch.pop(0)
130 130 if opts['diffstat']:
131 131 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
132 132 body += '\n'.join(patch)
133 133 msg = email.MIMEText.MIMEText(body)
134 134 if total == 1:
135 135 subj = '[PATCH] ' + desc[0].strip()
136 136 else:
137 137 subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip())
138 138 if subj.endswith('.'): subj = subj[:-1]
139 139 msg['Subject'] = subj
140 140 msg['X-Mercurial-Node'] = node
141 141 return msg
142 142
143 143 start_time = int(time.time())
144 144
145 145 def genmsgid(id):
146 146 return '<%s.%s@%s>' % (id[:20], start_time, socket.getfqdn())
147 147
148 148 patches = []
149 149
150 150 class exportee:
151 151 def __init__(self, container):
152 152 self.lines = []
153 153 self.container = container
154 154 self.name = 'email'
155 155
156 156 def write(self, data):
157 157 self.lines.append(data)
158 158
159 159 def close(self):
160 160 self.container.append(''.join(self.lines).split('\n'))
161 161 self.lines = []
162 162
163 163 commands.export(ui, repo, *revs, **{'output': exportee(patches),
164 164 'switch_parent': False,
165 165 'text': None})
166 166
167 167 jumbo = []
168 168 msgs = []
169 169
170 170 ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
171 171
172 172 for p, i in zip(patches, range(len(patches))):
173 173 jumbo.extend(p)
174 174 msgs.append(makepatch(p, i + 1, len(patches)))
175 175
176 176 sender = (opts['from'] or ui.config('email', 'from') or
177 177 ui.config('patchbomb', 'from') or
178 178 prompt('From', ui.username()))
179 179
180 180 def getaddrs(opt, prpt, default = None):
181 181 addrs = opts[opt] or (ui.config('email', opt) or
182 182 ui.config('patchbomb', opt) or
183 183 prompt(prpt, default = default)).split(',')
184 184 return [a.strip() for a in addrs if a.strip()]
185 185 to = getaddrs('to', 'To')
186 186 cc = getaddrs('cc', 'Cc', '')
187 187
188 188 if len(patches) > 1:
189 189 ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
190 190
191 191 msg = email.MIMEMultipart.MIMEMultipart()
192 192 msg['Subject'] = '[PATCH 0 of %d] %s' % (
193 193 len(patches),
194 194 opts['subject'] or
195 195 prompt('Subject:', rest = ' [PATCH 0 of %d] ' % len(patches)))
196 196
197 197 ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
198 198
199 199 body = []
200 200
201 201 while True:
202 202 try: l = raw_input()
203 203 except EOFError: break
204 204 if l == '.': break
205 205 body.append(l)
206 206
207 207 msg.attach(email.MIMEText.MIMEText('\n'.join(body) + '\n'))
208 208
209 209 if opts['diffstat']:
210 210 d = cdiffstat(_('Final summary:\n'), jumbo)
211 211 if d: msg.attach(email.MIMEText.MIMEText(d))
212 212
213 213 msgs.insert(0, msg)
214 214
215 215 ui.write('\n')
216 216
217 217 if not opts['test'] and not opts['mbox']:
218 218 mail = ui.sendmail()
219 219 parent = None
220 220 tz = time.strftime('%z')
221 221 sender_addr = email.Utils.parseaddr(sender)[1]
222 222 for m in msgs:
223 223 try:
224 224 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
225 225 except TypeError:
226 226 m['Message-Id'] = genmsgid('patchbomb')
227 227 if parent:
228 228 m['In-Reply-To'] = parent
229 229 else:
230 230 parent = m['Message-Id']
231 231 m['Date'] = time.strftime('%a, %e %b %Y %T ', time.localtime(start_time)) + tz
232 232 start_time += 1
233 233 m['From'] = sender
234 234 m['To'] = ', '.join(to)
235 235 if cc: m['Cc'] = ', '.join(cc)
236 236 if opts['test']:
237 237 ui.status('Displaying ', m['Subject'], ' ...\n')
238 238 fp = os.popen(os.getenv('PAGER', 'more'), 'w')
239 239 try:
240 240 fp.write(m.as_string(0))
241 241 fp.write('\n')
242 242 except IOError, inst:
243 243 if inst.errno != errno.EPIPE:
244 244 raise
245 245 fp.close()
246 246 elif opts['mbox']:
247 247 ui.status('Writing ', m['Subject'], ' ...\n')
248 248 fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
249 249 date = time.asctime(time.localtime(start_time))
250 250 fp.write('From %s %s\n' % (sender_addr, date))
251 251 fp.write(m.as_string(0))
252 252 fp.write('\n\n')
253 253 fp.close()
254 254 else:
255 255 ui.status('Sending ', m['Subject'], ' ...\n')
256 256 mail.sendmail(sender, to + cc, m.as_string(0))
257 if not opts['test'] and not opts['mbox']:
258 mail.close()
259 257
260 258 cmdtable = {
261 259 'email':
262 260 (patchbomb,
263 261 [('c', 'cc', [], 'email addresses of copy recipients'),
264 262 ('d', 'diffstat', None, 'add diffstat output to messages'),
265 263 ('f', 'from', '', 'email address of sender'),
266 264 ('', 'plain', None, 'omit hg patch header'),
267 265 ('n', 'test', None, 'print messages that would be sent'),
268 266 ('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
269 267 ('s', 'subject', '', 'subject of introductory message'),
270 268 ('t', 'to', [], 'email addresses of recipients')],
271 269 "hg email [OPTION]... [REV]...")
272 270 }
@@ -1,207 +1,232 b''
1 1 """
2 2 bundlerepo.py - repository class for viewing uncompressed bundles
3 3
4 4 This provides a read-only repository interface to bundles as if
5 5 they were part of the actual repository.
6 6
7 7 Copyright 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License, incorporated herein by reference.
11 11 """
12 12
13 13 from node import *
14 14 from i18n import gettext as _
15 15 from demandload import demandload
16 demandload(globals(), "changegroup util os struct")
16 demandload(globals(), "changegroup util os struct bz2 tempfile")
17 17
18 18 import localrepo, changelog, manifest, filelog, revlog
19 19
20 20 class bundlerevlog(revlog.revlog):
21 21 def __init__(self, opener, indexfile, datafile, bundlefile,
22 22 linkmapper=None):
23 23 # How it works:
24 24 # to retrieve a revision, we need to know the offset of
25 25 # the revision in the bundlefile (an opened file).
26 26 #
27 27 # We store this offset in the index (start), to differentiate a
28 28 # rev in the bundle and from a rev in the revlog, we check
29 29 # len(index[r]). If the tuple is bigger than 7, it is a bundle
30 30 # (it is bigger since we store the node to which the delta is)
31 31 #
32 32 revlog.revlog.__init__(self, opener, indexfile, datafile)
33 33 self.bundlefile = bundlefile
34 34 self.basemap = {}
35 35 def chunkpositer():
36 36 for chunk in changegroup.chunkiter(bundlefile):
37 37 pos = bundlefile.tell()
38 38 yield chunk, pos - len(chunk)
39 39 n = self.count()
40 40 prev = None
41 41 for chunk, start in chunkpositer():
42 42 size = len(chunk)
43 43 if size < 80:
44 44 raise util.Abort("invalid changegroup")
45 45 start += 80
46 46 size -= 80
47 47 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
48 48 if node in self.nodemap:
49 49 prev = node
50 50 continue
51 51 for p in (p1, p2):
52 52 if not p in self.nodemap:
53 raise RevlogError(_("unknown parent %s") % short(p1))
53 raise revlog.RevlogError(_("unknown parent %s") % short(p1))
54 54 if linkmapper is None:
55 55 link = n
56 56 else:
57 57 link = linkmapper(cs)
58 58
59 59 if not prev:
60 60 prev = p1
61 61 # start, size, base is not used, link, p1, p2, delta ref
62 62 if self.version == 0:
63 63 e = (start, size, None, link, p1, p2, node)
64 64 else:
65 65 e = (self.offset_type(start, 0), size, -1, None, link,
66 66 self.rev(p1), self.rev(p2), node)
67 67 self.basemap[n] = prev
68 68 self.index.append(e)
69 69 self.nodemap[node] = n
70 70 prev = node
71 71 n += 1
72 72
73 73 def bundle(self, rev):
74 74 """is rev from the bundle"""
75 75 if rev < 0:
76 76 return False
77 77 return rev in self.basemap
78 78 def bundlebase(self, rev): return self.basemap[rev]
79 def chunk(self, rev, df=None):
79 def chunk(self, rev, df=None, cachelen=4096):
80 80 # Warning: in case of bundle, the diff is against bundlebase,
81 81 # not against rev - 1
82 82 # XXX: could use some caching
83 83 if not self.bundle(rev):
84 return revlog.revlog.chunk(self, rev)
84 return revlog.revlog.chunk(self, rev, df, cachelen)
85 85 self.bundlefile.seek(self.start(rev))
86 86 return self.bundlefile.read(self.length(rev))
87 87
88 88 def revdiff(self, rev1, rev2):
89 89 """return or calculate a delta between two revisions"""
90 90 if self.bundle(rev1) and self.bundle(rev2):
91 91 # hot path for bundle
92 92 revb = self.rev(self.bundlebase(rev2))
93 93 if revb == rev1:
94 94 return self.chunk(rev2)
95 95 elif not self.bundle(rev1) and not self.bundle(rev2):
96 96 return revlog.revlog.chunk(self, rev1, rev2)
97 97
98 98 return self.diff(self.revision(self.node(rev1)),
99 99 self.revision(self.node(rev2)))
100 100
101 101 def revision(self, node):
102 102 """return an uncompressed revision of a given"""
103 103 if node == nullid: return ""
104 104
105 105 text = None
106 106 chain = []
107 107 iter_node = node
108 108 rev = self.rev(iter_node)
109 109 # reconstruct the revision if it is from a changegroup
110 110 while self.bundle(rev):
111 111 if self.cache and self.cache[0] == iter_node:
112 112 text = self.cache[2]
113 113 break
114 114 chain.append(rev)
115 115 iter_node = self.bundlebase(rev)
116 116 rev = self.rev(iter_node)
117 117 if text is None:
118 118 text = revlog.revlog.revision(self, iter_node)
119 119
120 120 while chain:
121 121 delta = self.chunk(chain.pop())
122 122 text = self.patches(text, [delta])
123 123
124 124 p1, p2 = self.parents(node)
125 125 if node != revlog.hash(text, p1, p2):
126 raise RevlogError(_("integrity check failed on %s:%d")
127 % (self.datafile, self.rev(node)))
126 raise revlog.RevlogError(_("integrity check failed on %s:%d")
127 % (self.datafile, self.rev(node)))
128 128
129 129 self.cache = (node, self.rev(node), text)
130 130 return text
131 131
132 132 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
133 133 raise NotImplementedError
134 134 def addgroup(self, revs, linkmapper, transaction, unique=0):
135 135 raise NotImplementedError
136 136 def strip(self, rev, minlink):
137 137 raise NotImplementedError
138 138 def checksize(self):
139 139 raise NotImplementedError
140 140
141 141 class bundlechangelog(bundlerevlog, changelog.changelog):
142 142 def __init__(self, opener, bundlefile):
143 143 changelog.changelog.__init__(self, opener)
144 144 bundlerevlog.__init__(self, opener, "00changelog.i", "00changelog.d",
145 145 bundlefile)
146 146
147 147 class bundlemanifest(bundlerevlog, manifest.manifest):
148 148 def __init__(self, opener, bundlefile, linkmapper):
149 149 manifest.manifest.__init__(self, opener)
150 150 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
151 151 bundlefile, linkmapper)
152 152
153 153 class bundlefilelog(bundlerevlog, filelog.filelog):
154 154 def __init__(self, opener, path, bundlefile, linkmapper):
155 155 filelog.filelog.__init__(self, opener, path)
156 156 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
157 157 bundlefile, linkmapper)
158 158
159 159 class bundlerepository(localrepo.localrepository):
160 160 def __init__(self, ui, path, bundlename):
161 161 localrepo.localrepository.__init__(self, ui, path)
162 f = open(bundlename, "rb")
163 s = util.fstat(f)
164 self.bundlefile = f
162 self.tempfile = None
163 self.bundlefile = open(bundlename, "rb")
165 164 header = self.bundlefile.read(6)
166 165 if not header.startswith("HG"):
167 166 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
168 167 elif not header.startswith("HG10"):
169 168 raise util.Abort(_("%s: unknown bundle version") % bundlename)
170 169 elif header == "HG10BZ":
171 raise util.Abort(_("%s: compressed bundle not supported")
172 % bundlename)
170 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
171 suffix=".hg10un", dir=self.path)
172 self.tempfile = temp
173 fptemp = os.fdopen(fdtemp, 'wb')
174 def generator(f):
175 zd = bz2.BZ2Decompressor()
176 zd.decompress("BZ")
177 for chunk in f:
178 yield zd.decompress(chunk)
179 gen = generator(util.filechunkiter(self.bundlefile, 4096))
180
181 try:
182 fptemp.write("HG10UN")
183 for chunk in gen:
184 fptemp.write(chunk)
185 finally:
186 fptemp.close()
187 self.bundlefile.close()
188
189 self.bundlefile = open(self.tempfile, "rb")
190 # seek right after the header
191 self.bundlefile.seek(6)
173 192 elif header == "HG10UN":
174 # uncompressed bundle supported
193 # nothing to do
175 194 pass
176 195 else:
177 196 raise util.Abort(_("%s: unknown bundle compression type")
178 197 % bundlename)
179 198 self.changelog = bundlechangelog(self.opener, self.bundlefile)
180 199 self.manifest = bundlemanifest(self.opener, self.bundlefile,
181 200 self.changelog.rev)
182 201 # dict with the mapping 'filename' -> position in the bundle
183 202 self.bundlefilespos = {}
184 203 while 1:
185 204 f = changegroup.getchunk(self.bundlefile)
186 205 if not f:
187 206 break
188 207 self.bundlefilespos[f] = self.bundlefile.tell()
189 208 for c in changegroup.chunkiter(self.bundlefile):
190 209 pass
191 210
192 211 def dev(self):
193 212 return -1
194 213
195 214 def file(self, f):
196 215 if f[0] == '/':
197 216 f = f[1:]
198 217 if f in self.bundlefilespos:
199 218 self.bundlefile.seek(self.bundlefilespos[f])
200 219 return bundlefilelog(self.opener, f, self.bundlefile,
201 220 self.changelog.rev)
202 221 else:
203 222 return filelog.filelog(self.opener, f)
204 223
205 224 def close(self):
206 225 """Close assigned bundle file immediately."""
207 226 self.bundlefile.close()
227
228 def __del__(self):
229 if not self.bundlefile.closed:
230 self.bundlefile.close()
231 if self.tempfile is not None:
232 os.unlink(self.tempfile)
@@ -1,3431 +1,3453 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 13 demandload(globals(), "fnmatch hgweb mdiff random signal tempfile time")
14 14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 15 demandload(globals(), "archival changegroup")
16 16
17 17 class UnknownCommand(Exception):
18 18 """Exception raised if command is not in the command table."""
19 19 class AmbiguousCommand(Exception):
20 20 """Exception raised if command shortcut matches more than one command."""
21 21
22 22 def bail_if_changed(repo):
23 23 modified, added, removed, deleted, unknown = repo.changes()
24 24 if modified or added or removed or deleted:
25 25 raise util.Abort(_("outstanding uncommitted changes"))
26 26
27 27 def filterfiles(filters, files):
28 28 l = [x for x in files if x in filters]
29 29
30 30 for t in filters:
31 31 if t and t[-1] != "/":
32 32 t += "/"
33 33 l += [x for x in files if x.startswith(t)]
34 34 return l
35 35
36 36 def relpath(repo, args):
37 37 cwd = repo.getcwd()
38 38 if cwd:
39 39 return [util.normpath(os.path.join(cwd, x)) for x in args]
40 40 return args
41 41
42 42 def matchpats(repo, pats=[], opts={}, head=''):
43 43 cwd = repo.getcwd()
44 44 if not pats and cwd:
45 45 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
46 46 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
47 47 cwd = ''
48 48 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
49 49 opts.get('exclude'), head)
50 50
51 51 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
52 52 files, matchfn, anypats = matchpats(repo, pats, opts, head)
53 53 exact = dict(zip(files, files))
54 54 def walk():
55 55 for src, fn in repo.walk(node=node, files=files, match=matchfn,
56 56 badmatch=badmatch):
57 57 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
58 58 return files, matchfn, walk()
59 59
60 60 def walk(repo, pats, opts, node=None, head='', badmatch=None):
61 61 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
62 62 for r in results:
63 63 yield r
64 64
65 65 def walkchangerevs(ui, repo, pats, opts):
66 66 '''Iterate over files and the revs they changed in.
67 67
68 68 Callers most commonly need to iterate backwards over the history
69 69 it is interested in. Doing so has awful (quadratic-looking)
70 70 performance, so we use iterators in a "windowed" way.
71 71
72 72 We walk a window of revisions in the desired order. Within the
73 73 window, we first walk forwards to gather data, then in the desired
74 74 order (usually backwards) to display it.
75 75
76 76 This function returns an (iterator, getchange, matchfn) tuple. The
77 77 getchange function returns the changelog entry for a numeric
78 78 revision. The iterator yields 3-tuples. They will be of one of
79 79 the following forms:
80 80
81 81 "window", incrementing, lastrev: stepping through a window,
82 82 positive if walking forwards through revs, last rev in the
83 83 sequence iterated over - use to reset state for the current window
84 84
85 85 "add", rev, fns: out-of-order traversal of the given file names
86 86 fns, which changed during revision rev - use to gather data for
87 87 possible display
88 88
89 89 "iter", rev, None: in-order traversal of the revs earlier iterated
90 90 over with "add" - use to display data'''
91 91
92 92 def increasing_windows(start, end, windowsize=8, sizelimit=512):
93 93 if start < end:
94 94 while start < end:
95 95 yield start, min(windowsize, end-start)
96 96 start += windowsize
97 97 if windowsize < sizelimit:
98 98 windowsize *= 2
99 99 else:
100 100 while start > end:
101 101 yield start, min(windowsize, start-end-1)
102 102 start -= windowsize
103 103 if windowsize < sizelimit:
104 104 windowsize *= 2
105 105
106 106
107 107 files, matchfn, anypats = matchpats(repo, pats, opts)
108 108
109 109 if repo.changelog.count() == 0:
110 110 return [], False, matchfn
111 111
112 112 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
113 113 wanted = {}
114 114 slowpath = anypats
115 115 fncache = {}
116 116
117 117 chcache = {}
118 118 def getchange(rev):
119 119 ch = chcache.get(rev)
120 120 if ch is None:
121 121 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
122 122 return ch
123 123
124 124 if not slowpath and not files:
125 125 # No files, no patterns. Display all revs.
126 126 wanted = dict(zip(revs, revs))
127 127 if not slowpath:
128 128 # Only files, no patterns. Check the history of each file.
129 129 def filerevgen(filelog):
130 130 for i, window in increasing_windows(filelog.count()-1, -1):
131 131 revs = []
132 132 for j in xrange(i - window, i + 1):
133 133 revs.append(filelog.linkrev(filelog.node(j)))
134 134 revs.reverse()
135 135 for rev in revs:
136 136 yield rev
137 137
138 138 minrev, maxrev = min(revs), max(revs)
139 139 for file_ in files:
140 140 filelog = repo.file(file_)
141 141 # A zero count may be a directory or deleted file, so
142 142 # try to find matching entries on the slow path.
143 143 if filelog.count() == 0:
144 144 slowpath = True
145 145 break
146 146 for rev in filerevgen(filelog):
147 147 if rev <= maxrev:
148 148 if rev < minrev:
149 149 break
150 150 fncache.setdefault(rev, [])
151 151 fncache[rev].append(file_)
152 152 wanted[rev] = 1
153 153 if slowpath:
154 154 # The slow path checks files modified in every changeset.
155 155 def changerevgen():
156 156 for i, window in increasing_windows(repo.changelog.count()-1, -1):
157 157 for j in xrange(i - window, i + 1):
158 158 yield j, getchange(j)[3]
159 159
160 160 for rev, changefiles in changerevgen():
161 161 matches = filter(matchfn, changefiles)
162 162 if matches:
163 163 fncache[rev] = matches
164 164 wanted[rev] = 1
165 165
166 166 def iterate():
167 167 for i, window in increasing_windows(0, len(revs)):
168 168 yield 'window', revs[0] < revs[-1], revs[-1]
169 169 nrevs = [rev for rev in revs[i:i+window]
170 170 if rev in wanted]
171 171 srevs = list(nrevs)
172 172 srevs.sort()
173 173 for rev in srevs:
174 174 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
175 175 yield 'add', rev, fns
176 176 for rev in nrevs:
177 177 yield 'iter', rev, None
178 178 return iterate(), getchange, matchfn
179 179
180 180 revrangesep = ':'
181 181
182 182 def revrange(ui, repo, revs, revlog=None):
183 183 """Yield revision as strings from a list of revision specifications."""
184 184 if revlog is None:
185 185 revlog = repo.changelog
186 186 revcount = revlog.count()
187 187 def fix(val, defval):
188 188 if not val:
189 189 return defval
190 190 try:
191 191 num = int(val)
192 192 if str(num) != val:
193 193 raise ValueError
194 194 if num < 0:
195 195 num += revcount
196 196 if num < 0:
197 197 num = 0
198 198 elif num >= revcount:
199 199 raise ValueError
200 200 except ValueError:
201 201 try:
202 202 num = repo.changelog.rev(repo.lookup(val))
203 203 except KeyError:
204 204 try:
205 205 num = revlog.rev(revlog.lookup(val))
206 206 except KeyError:
207 207 raise util.Abort(_('invalid revision identifier %s'), val)
208 208 return num
209 209 seen = {}
210 210 for spec in revs:
211 211 if spec.find(revrangesep) >= 0:
212 212 start, end = spec.split(revrangesep, 1)
213 213 start = fix(start, 0)
214 214 end = fix(end, revcount - 1)
215 215 step = start > end and -1 or 1
216 216 for rev in xrange(start, end+step, step):
217 217 if rev in seen:
218 218 continue
219 219 seen[rev] = 1
220 220 yield str(rev)
221 221 else:
222 222 rev = fix(spec, None)
223 223 if rev in seen:
224 224 continue
225 225 seen[rev] = 1
226 226 yield str(rev)
227 227
228 228 def make_filename(repo, r, pat, node=None,
229 229 total=None, seqno=None, revwidth=None, pathname=None):
230 230 node_expander = {
231 231 'H': lambda: hex(node),
232 232 'R': lambda: str(r.rev(node)),
233 233 'h': lambda: short(node),
234 234 }
235 235 expander = {
236 236 '%': lambda: '%',
237 237 'b': lambda: os.path.basename(repo.root),
238 238 }
239 239
240 240 try:
241 241 if node:
242 242 expander.update(node_expander)
243 243 if node and revwidth is not None:
244 244 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
245 245 if total is not None:
246 246 expander['N'] = lambda: str(total)
247 247 if seqno is not None:
248 248 expander['n'] = lambda: str(seqno)
249 249 if total is not None and seqno is not None:
250 250 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
251 251 if pathname is not None:
252 252 expander['s'] = lambda: os.path.basename(pathname)
253 253 expander['d'] = lambda: os.path.dirname(pathname) or '.'
254 254 expander['p'] = lambda: pathname
255 255
256 256 newname = []
257 257 patlen = len(pat)
258 258 i = 0
259 259 while i < patlen:
260 260 c = pat[i]
261 261 if c == '%':
262 262 i += 1
263 263 c = pat[i]
264 264 c = expander[c]()
265 265 newname.append(c)
266 266 i += 1
267 267 return ''.join(newname)
268 268 except KeyError, inst:
269 269 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
270 270 inst.args[0])
271 271
272 272 def make_file(repo, r, pat, node=None,
273 273 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
274 274 if not pat or pat == '-':
275 275 return 'w' in mode and sys.stdout or sys.stdin
276 276 if hasattr(pat, 'write') and 'w' in mode:
277 277 return pat
278 278 if hasattr(pat, 'read') and 'r' in mode:
279 279 return pat
280 280 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
281 281 pathname),
282 282 mode)
283 283
284 284 def write_bundle(cg, filename=None, compress=True):
285 285 """Write a bundle file and return its filename.
286 286
287 287 Existing files will not be overwritten.
288 288 If no filename is specified, a temporary file is created.
289 289 bz2 compression can be turned off.
290 290 The bundle file will be deleted in case of errors.
291 291 """
292 292 class nocompress(object):
293 293 def compress(self, x):
294 294 return x
295 295 def flush(self):
296 296 return ""
297 297
298 298 fh = None
299 299 cleanup = None
300 300 try:
301 301 if filename:
302 302 if os.path.exists(filename):
303 303 raise util.Abort(_("file '%s' already exists"), filename)
304 304 fh = open(filename, "wb")
305 305 else:
306 306 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
307 307 fh = os.fdopen(fd, "wb")
308 308 cleanup = filename
309 309
310 310 if compress:
311 311 fh.write("HG10")
312 312 z = bz2.BZ2Compressor(9)
313 313 else:
314 314 fh.write("HG10UN")
315 315 z = nocompress()
316 316 # parse the changegroup data, otherwise we will block
317 317 # in case of sshrepo because we don't know the end of the stream
318 318
319 319 # an empty chunkiter is the end of the changegroup
320 320 empty = False
321 321 while not empty:
322 322 empty = True
323 323 for chunk in changegroup.chunkiter(cg):
324 324 empty = False
325 325 fh.write(z.compress(changegroup.genchunk(chunk)))
326 326 fh.write(z.compress(changegroup.closechunk()))
327 327 fh.write(z.flush())
328 328 cleanup = None
329 329 return filename
330 330 finally:
331 331 if fh is not None:
332 332 fh.close()
333 333 if cleanup is not None:
334 334 os.unlink(cleanup)
335 335
336 336 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
337 337 changes=None, text=False, opts={}):
338 338 if not node1:
339 339 node1 = repo.dirstate.parents()[0]
340 340 # reading the data for node1 early allows it to play nicely
341 341 # with repo.changes and the revlog cache.
342 342 change = repo.changelog.read(node1)
343 343 mmap = repo.manifest.read(change[0])
344 344 date1 = util.datestr(change[2])
345 345
346 346 if not changes:
347 347 changes = repo.changes(node1, node2, files, match=match)
348 348 modified, added, removed, deleted, unknown = changes
349 349 if files:
350 350 modified, added, removed = map(lambda x: filterfiles(files, x),
351 351 (modified, added, removed))
352 352
353 353 if not modified and not added and not removed:
354 354 return
355 355
356 356 if node2:
357 357 change = repo.changelog.read(node2)
358 358 mmap2 = repo.manifest.read(change[0])
359 359 date2 = util.datestr(change[2])
360 360 def read(f):
361 361 return repo.file(f).read(mmap2[f])
362 362 else:
363 363 date2 = util.datestr()
364 364 def read(f):
365 365 return repo.wread(f)
366 366
367 367 if ui.quiet:
368 368 r = None
369 369 else:
370 370 hexfunc = ui.verbose and hex or short
371 371 r = [hexfunc(node) for node in [node1, node2] if node]
372 372
373 373 diffopts = ui.diffopts()
374 374 showfunc = opts.get('show_function') or diffopts['showfunc']
375 375 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
376 376 for f in modified:
377 377 to = None
378 378 if f in mmap:
379 379 to = repo.file(f).read(mmap[f])
380 380 tn = read(f)
381 381 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
382 382 showfunc=showfunc, ignorews=ignorews))
383 383 for f in added:
384 384 to = None
385 385 tn = read(f)
386 386 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
387 387 showfunc=showfunc, ignorews=ignorews))
388 388 for f in removed:
389 389 to = repo.file(f).read(mmap[f])
390 390 tn = None
391 391 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
392 392 showfunc=showfunc, ignorews=ignorews))
393 393
394 394 def trimuser(ui, name, rev, revcache):
395 395 """trim the name of the user who committed a change"""
396 396 user = revcache.get(rev)
397 397 if user is None:
398 398 user = revcache[rev] = ui.shortuser(name)
399 399 return user
400 400
401 401 class changeset_printer(object):
402 402 '''show changeset information when templating not requested.'''
403 403
404 404 def __init__(self, ui, repo):
405 405 self.ui = ui
406 406 self.repo = repo
407 407
408 408 def show(self, rev=0, changenode=None, brinfo=None):
409 409 '''show a single changeset or file revision'''
410 410 log = self.repo.changelog
411 411 if changenode is None:
412 412 changenode = log.node(rev)
413 413 elif not rev:
414 414 rev = log.rev(changenode)
415 415
416 416 if self.ui.quiet:
417 417 self.ui.write("%d:%s\n" % (rev, short(changenode)))
418 418 return
419 419
420 420 changes = log.read(changenode)
421 421 date = util.datestr(changes[2])
422 422
423 423 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
424 424 for p in log.parents(changenode)
425 425 if self.ui.debugflag or p != nullid]
426 426 if (not self.ui.debugflag and len(parents) == 1 and
427 427 parents[0][0] == rev-1):
428 428 parents = []
429 429
430 430 if self.ui.verbose:
431 431 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
432 432 else:
433 433 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
434 434
435 435 for tag in self.repo.nodetags(changenode):
436 436 self.ui.status(_("tag: %s\n") % tag)
437 437 for parent in parents:
438 438 self.ui.write(_("parent: %d:%s\n") % parent)
439 439
440 440 if brinfo and changenode in brinfo:
441 441 br = brinfo[changenode]
442 442 self.ui.write(_("branch: %s\n") % " ".join(br))
443 443
444 444 self.ui.debug(_("manifest: %d:%s\n") %
445 445 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
446 446 self.ui.status(_("user: %s\n") % changes[1])
447 447 self.ui.status(_("date: %s\n") % date)
448 448
449 449 if self.ui.debugflag:
450 450 files = self.repo.changes(log.parents(changenode)[0], changenode)
451 451 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
452 452 files):
453 453 if value:
454 454 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
455 455 else:
456 456 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
457 457
458 458 description = changes[4].strip()
459 459 if description:
460 460 if self.ui.verbose:
461 461 self.ui.status(_("description:\n"))
462 462 self.ui.status(description)
463 463 self.ui.status("\n\n")
464 464 else:
465 465 self.ui.status(_("summary: %s\n") %
466 466 description.splitlines()[0])
467 467 self.ui.status("\n")
468 468
469 469 def show_changeset(ui, repo, opts):
470 470 '''show one changeset. uses template or regular display. caller
471 471 can pass in 'style' and 'template' options in opts.'''
472 472
473 473 tmpl = opts.get('template')
474 474 if tmpl:
475 475 tmpl = templater.parsestring(tmpl, quoted=False)
476 476 else:
477 477 tmpl = ui.config('ui', 'logtemplate')
478 478 if tmpl: tmpl = templater.parsestring(tmpl)
479 479 mapfile = opts.get('style') or ui.config('ui', 'style')
480 480 if tmpl or mapfile:
481 481 if mapfile:
482 482 if not os.path.isfile(mapfile):
483 483 mapname = templater.templatepath('map-cmdline.' + mapfile)
484 484 if not mapname: mapname = templater.templatepath(mapfile)
485 485 if mapname: mapfile = mapname
486 486 try:
487 487 t = templater.changeset_templater(ui, repo, mapfile)
488 488 except SyntaxError, inst:
489 489 raise util.Abort(inst.args[0])
490 490 if tmpl: t.use_template(tmpl)
491 491 return t
492 492 return changeset_printer(ui, repo)
493 493
494 494 def show_version(ui):
495 495 """output version and copyright information"""
496 496 ui.write(_("Mercurial Distributed SCM (version %s)\n")
497 497 % version.get_version())
498 498 ui.status(_(
499 499 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
500 500 "This is free software; see the source for copying conditions. "
501 501 "There is NO\nwarranty; "
502 502 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
503 503 ))
504 504
505 505 def help_(ui, cmd=None, with_version=False):
506 506 """show help for a given command or all commands"""
507 507 option_lists = []
508 508 if cmd and cmd != 'shortlist':
509 509 if with_version:
510 510 show_version(ui)
511 511 ui.write('\n')
512 512 aliases, i = find(cmd)
513 513 # synopsis
514 514 ui.write("%s\n\n" % i[2])
515 515
516 516 # description
517 517 doc = i[0].__doc__
518 518 if not doc:
519 519 doc = _("(No help text available)")
520 520 if ui.quiet:
521 521 doc = doc.splitlines(0)[0]
522 522 ui.write("%s\n" % doc.rstrip())
523 523
524 524 if not ui.quiet:
525 525 # aliases
526 526 if len(aliases) > 1:
527 527 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
528 528
529 529 # options
530 530 if i[1]:
531 531 option_lists.append(("options", i[1]))
532 532
533 533 else:
534 534 # program name
535 535 if ui.verbose or with_version:
536 536 show_version(ui)
537 537 else:
538 538 ui.status(_("Mercurial Distributed SCM\n"))
539 539 ui.status('\n')
540 540
541 541 # list of commands
542 542 if cmd == "shortlist":
543 543 ui.status(_('basic commands (use "hg help" '
544 544 'for the full list or option "-v" for details):\n\n'))
545 545 elif ui.verbose:
546 546 ui.status(_('list of commands:\n\n'))
547 547 else:
548 548 ui.status(_('list of commands (use "hg help -v" '
549 549 'to show aliases and global options):\n\n'))
550 550
551 551 h = {}
552 552 cmds = {}
553 553 for c, e in table.items():
554 554 f = c.split("|")[0]
555 555 if cmd == "shortlist" and not f.startswith("^"):
556 556 continue
557 557 f = f.lstrip("^")
558 558 if not ui.debugflag and f.startswith("debug"):
559 559 continue
560 560 doc = e[0].__doc__
561 561 if not doc:
562 562 doc = _("(No help text available)")
563 563 h[f] = doc.splitlines(0)[0].rstrip()
564 564 cmds[f] = c.lstrip("^")
565 565
566 566 fns = h.keys()
567 567 fns.sort()
568 568 m = max(map(len, fns))
569 569 for f in fns:
570 570 if ui.verbose:
571 571 commands = cmds[f].replace("|",", ")
572 572 ui.write(" %s:\n %s\n"%(commands, h[f]))
573 573 else:
574 574 ui.write(' %-*s %s\n' % (m, f, h[f]))
575 575
576 576 # global options
577 577 if ui.verbose:
578 578 option_lists.append(("global options", globalopts))
579 579
580 580 # list all option lists
581 581 opt_output = []
582 582 for title, options in option_lists:
583 583 opt_output.append(("\n%s:\n" % title, None))
584 584 for shortopt, longopt, default, desc in options:
585 585 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
586 586 longopt and " --%s" % longopt),
587 587 "%s%s" % (desc,
588 588 default
589 589 and _(" (default: %s)") % default
590 590 or "")))
591 591
592 592 if opt_output:
593 593 opts_len = max([len(line[0]) for line in opt_output if line[1]])
594 594 for first, second in opt_output:
595 595 if second:
596 596 ui.write(" %-*s %s\n" % (opts_len, first, second))
597 597 else:
598 598 ui.write("%s\n" % first)
599 599
600 600 # Commands start here, listed alphabetically
601 601
602 602 def add(ui, repo, *pats, **opts):
603 603 """add the specified files on the next commit
604 604
605 605 Schedule files to be version controlled and added to the repository.
606 606
607 607 The files will be added to the repository at the next commit.
608 608
609 609 If no names are given, add all files in the repository.
610 610 """
611 611
612 612 names = []
613 613 for src, abs, rel, exact in walk(repo, pats, opts):
614 614 if exact:
615 615 if ui.verbose:
616 616 ui.status(_('adding %s\n') % rel)
617 617 names.append(abs)
618 618 elif repo.dirstate.state(abs) == '?':
619 619 ui.status(_('adding %s\n') % rel)
620 620 names.append(abs)
621 621 repo.add(names)
622 622
623 623 def addremove(ui, repo, *pats, **opts):
624 624 """add all new files, delete all missing files (DEPRECATED)
625 625
626 626 (DEPRECATED)
627 627 Add all new files and remove all missing files from the repository.
628 628
629 629 New files are ignored if they match any of the patterns in .hgignore. As
630 630 with add, these changes take effect at the next commit.
631 631
632 632 This command is now deprecated and will be removed in a future
633 633 release. Please use add and remove --after instead.
634 634 """
635 635 ui.warn(_('(the addremove command is deprecated; use add and remove '
636 636 '--after instead)\n'))
637 637 return addremove_lock(ui, repo, pats, opts)
638 638
639 639 def addremove_lock(ui, repo, pats, opts, wlock=None):
640 640 add, remove = [], []
641 641 for src, abs, rel, exact in walk(repo, pats, opts):
642 642 if src == 'f' and repo.dirstate.state(abs) == '?':
643 643 add.append(abs)
644 644 if ui.verbose or not exact:
645 645 ui.status(_('adding %s\n') % ((pats and rel) or abs))
646 646 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
647 647 remove.append(abs)
648 648 if ui.verbose or not exact:
649 649 ui.status(_('removing %s\n') % ((pats and rel) or abs))
650 650 repo.add(add, wlock=wlock)
651 651 repo.remove(remove, wlock=wlock)
652 652
653 653 def annotate(ui, repo, *pats, **opts):
654 654 """show changeset information per file line
655 655
656 656 List changes in files, showing the revision id responsible for each line
657 657
658 658 This command is useful to discover who did a change or when a change took
659 659 place.
660 660
661 661 Without the -a option, annotate will avoid processing files it
662 662 detects as binary. With -a, annotate will generate an annotation
663 663 anyway, probably with undesirable results.
664 664 """
665 665 def getnode(rev):
666 666 return short(repo.changelog.node(rev))
667 667
668 668 ucache = {}
669 669 def getname(rev):
670 670 cl = repo.changelog.read(repo.changelog.node(rev))
671 671 return trimuser(ui, cl[1], rev, ucache)
672 672
673 673 dcache = {}
674 674 def getdate(rev):
675 675 datestr = dcache.get(rev)
676 676 if datestr is None:
677 677 cl = repo.changelog.read(repo.changelog.node(rev))
678 678 datestr = dcache[rev] = util.datestr(cl[2])
679 679 return datestr
680 680
681 681 if not pats:
682 682 raise util.Abort(_('at least one file name or pattern required'))
683 683
684 684 opmap = [['user', getname], ['number', str], ['changeset', getnode],
685 685 ['date', getdate]]
686 686 if not opts['user'] and not opts['changeset'] and not opts['date']:
687 687 opts['number'] = 1
688 688
689 689 if opts['rev']:
690 690 node = repo.changelog.lookup(opts['rev'])
691 691 else:
692 692 node = repo.dirstate.parents()[0]
693 693 change = repo.changelog.read(node)
694 694 mmap = repo.manifest.read(change[0])
695 695
696 696 for src, abs, rel, exact in walk(repo, pats, opts, node=node):
697 697 f = repo.file(abs)
698 698 if not opts['text'] and util.binary(f.read(mmap[abs])):
699 699 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
700 700 continue
701 701
702 702 lines = f.annotate(mmap[abs])
703 703 pieces = []
704 704
705 705 for o, f in opmap:
706 706 if opts[o]:
707 707 l = [f(n) for n, dummy in lines]
708 708 if l:
709 709 m = max(map(len, l))
710 710 pieces.append(["%*s" % (m, x) for x in l])
711 711
712 712 if pieces:
713 713 for p, l in zip(zip(*pieces), lines):
714 714 ui.write("%s: %s" % (" ".join(p), l[1]))
715 715
716 716 def archive(ui, repo, dest, **opts):
717 717 '''create unversioned archive of a repository revision
718 718
719 719 By default, the revision used is the parent of the working
720 720 directory; use "-r" to specify a different revision.
721 721
722 722 To specify the type of archive to create, use "-t". Valid
723 723 types are:
724 724
725 725 "files" (default): a directory full of files
726 726 "tar": tar archive, uncompressed
727 727 "tbz2": tar archive, compressed using bzip2
728 728 "tgz": tar archive, compressed using gzip
729 729 "uzip": zip archive, uncompressed
730 730 "zip": zip archive, compressed using deflate
731 731
732 732 The exact name of the destination archive or directory is given
733 733 using a format string; see "hg help export" for details.
734 734
735 735 Each member added to an archive file has a directory prefix
736 736 prepended. Use "-p" to specify a format string for the prefix.
737 737 The default is the basename of the archive, with suffixes removed.
738 738 '''
739 739
740 740 if opts['rev']:
741 741 node = repo.lookup(opts['rev'])
742 742 else:
743 743 node, p2 = repo.dirstate.parents()
744 744 if p2 != nullid:
745 745 raise util.Abort(_('uncommitted merge - please provide a '
746 746 'specific revision'))
747 747
748 748 dest = make_filename(repo, repo.changelog, dest, node)
749 749 prefix = make_filename(repo, repo.changelog, opts['prefix'], node)
750 750 if os.path.realpath(dest) == repo.root:
751 751 raise util.Abort(_('repository root cannot be destination'))
752 752 dummy, matchfn, dummy = matchpats(repo, [], opts)
753 753 archival.archive(repo, dest, node, opts.get('type') or 'files',
754 754 not opts['no_decode'], matchfn, prefix)
755 755
756 756 def backout(ui, repo, rev, **opts):
757 757 '''reverse effect of earlier changeset
758 758
759 Commit the backed out changes as a new changeset.
759 Commit the backed out changes as a new changeset. The new
760 changeset is a child of the backed out changeset.
760 761
761 762 If you back out a changeset other than the tip, a new head is
762 created. The --merge option remembers the parent of the working
763 directory before starting the backout, then merges the new head
764 with it afterwards, to save you from doing this by hand. The
765 result of this merge is not committed, as for a normal merge.'''
763 created. This head is the parent of the working directory. If
764 you back out an old changeset, your working directory will appear
765 old after the backout. You should merge the backout changeset
766 with another head.
767
768 The --merge option remembers the parent of the working directory
769 before starting the backout, then merges the new head with that
770 changeset afterwards. This saves you from doing the merge by
771 hand. The result of this merge is not committed, as for a normal
772 merge.'''
766 773
767 774 bail_if_changed(repo)
768 775 op1, op2 = repo.dirstate.parents()
769 776 if op2 != nullid:
770 777 raise util.Abort(_('outstanding uncommitted merge'))
771 778 node = repo.lookup(rev)
772 779 parent, p2 = repo.changelog.parents(node)
773 780 if parent == nullid:
774 781 raise util.Abort(_('cannot back out a change with no parents'))
775 782 if p2 != nullid:
776 783 raise util.Abort(_('cannot back out a merge'))
777 784 repo.update(node, force=True, show_stats=False)
778 785 revert_opts = opts.copy()
779 786 revert_opts['rev'] = hex(parent)
780 787 revert(ui, repo, **revert_opts)
781 788 commit_opts = opts.copy()
782 789 commit_opts['addremove'] = False
783 790 if not commit_opts['message'] and not commit_opts['logfile']:
784 791 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
792 commit_opts['force_editor'] = True
785 793 commit(ui, repo, **commit_opts)
786 794 def nice(node):
787 795 return '%d:%s' % (repo.changelog.rev(node), short(node))
788 796 ui.status(_('changeset %s backs out changeset %s\n') %
789 797 (nice(repo.changelog.tip()), nice(node)))
790 798 if opts['merge'] and op1 != node:
791 799 ui.status(_('merging with changeset %s\n') % nice(op1))
792 update(ui, repo, hex(op1), **opts)
800 doupdate(ui, repo, hex(op1), **opts)
793 801
794 802 def bundle(ui, repo, fname, dest="default-push", **opts):
795 803 """create a changegroup file
796 804
797 805 Generate a compressed changegroup file collecting all changesets
798 806 not found in the other repository.
799 807
800 808 This file can then be transferred using conventional means and
801 809 applied to another repository with the unbundle command. This is
802 810 useful when native push and pull are not available or when
803 811 exporting an entire repository is undesirable. The standard file
804 812 extension is ".hg".
805 813
806 814 Unlike import/export, this exactly preserves all changeset
807 815 contents including permissions, rename data, and revision history.
808 816 """
809 817 dest = ui.expandpath(dest)
810 818 other = hg.repository(ui, dest)
811 819 o = repo.findoutgoing(other, force=opts['force'])
812 820 cg = repo.changegroup(o, 'bundle')
813 821 write_bundle(cg, fname)
814 822
815 823 def cat(ui, repo, file1, *pats, **opts):
816 824 """output the latest or given revisions of files
817 825
818 826 Print the specified files as they were at the given revision.
819 827 If no revision is given then the tip is used.
820 828
821 829 Output may be to a file, in which case the name of the file is
822 830 given using a format string. The formatting rules are the same as
823 831 for the export command, with the following additions:
824 832
825 833 %s basename of file being printed
826 834 %d dirname of file being printed, or '.' if in repo root
827 835 %p root-relative path name of file being printed
828 836 """
829 837 mf = {}
830 838 rev = opts['rev']
831 839 if rev:
832 840 node = repo.lookup(rev)
833 841 else:
834 842 node = repo.changelog.tip()
835 843 change = repo.changelog.read(node)
836 844 mf = repo.manifest.read(change[0])
837 845 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
838 846 r = repo.file(abs)
839 847 n = mf[abs]
840 848 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
841 849 fp.write(r.read(n))
842 850
843 851 def clone(ui, source, dest=None, **opts):
844 852 """make a copy of an existing repository
845 853
846 854 Create a copy of an existing repository in a new directory.
847 855
848 856 If no destination directory name is specified, it defaults to the
849 857 basename of the source.
850 858
851 859 The location of the source is added to the new repository's
852 860 .hg/hgrc file, as the default to be used for future pulls.
853 861
854 862 For efficiency, hardlinks are used for cloning whenever the source
855 863 and destination are on the same filesystem. Some filesystems,
856 864 such as AFS, implement hardlinking incorrectly, but do not report
857 865 errors. In these cases, use the --pull option to avoid
858 866 hardlinking.
859 867
860 868 See pull for valid source format details.
861 869 """
862 870 if dest is None:
863 871 dest = os.path.basename(os.path.normpath(source))
864 872
865 873 if os.path.exists(dest):
866 874 raise util.Abort(_("destination '%s' already exists"), dest)
867 875
868 876 dest = os.path.realpath(dest)
869 877
870 878 class Dircleanup(object):
871 879 def __init__(self, dir_):
872 880 self.rmtree = shutil.rmtree
873 881 self.dir_ = dir_
874 882 os.mkdir(dir_)
875 883 def close(self):
876 884 self.dir_ = None
877 885 def __del__(self):
878 886 if self.dir_:
879 887 self.rmtree(self.dir_, True)
880 888
881 889 if opts['ssh']:
882 890 ui.setconfig("ui", "ssh", opts['ssh'])
883 891 if opts['remotecmd']:
884 892 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
885 893
886 894 source = ui.expandpath(source)
887 895
888 896 d = Dircleanup(dest)
889 897 abspath = source
890 898 other = hg.repository(ui, source)
891 899
892 900 copy = False
893 901 if other.dev() != -1:
894 902 abspath = os.path.abspath(source)
895 903 if not opts['pull'] and not opts['rev']:
896 904 copy = True
897 905
898 906 if copy:
899 907 try:
900 908 # we use a lock here because if we race with commit, we
901 909 # can end up with extra data in the cloned revlogs that's
902 910 # not pointed to by changesets, thus causing verify to
903 911 # fail
904 912 l1 = other.lock()
905 913 except lock.LockException:
906 914 copy = False
907 915
908 916 if copy:
909 917 # we lock here to avoid premature writing to the target
910 918 os.mkdir(os.path.join(dest, ".hg"))
911 919 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
912 920
913 921 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
914 922 for f in files.split():
915 923 src = os.path.join(source, ".hg", f)
916 924 dst = os.path.join(dest, ".hg", f)
917 925 try:
918 926 util.copyfiles(src, dst)
919 927 except OSError, inst:
920 928 if inst.errno != errno.ENOENT:
921 929 raise
922 930
923 931 repo = hg.repository(ui, dest)
924 932
925 933 else:
926 934 revs = None
927 935 if opts['rev']:
928 936 if not other.local():
929 937 error = _("clone -r not supported yet for remote repositories.")
930 938 raise util.Abort(error)
931 939 else:
932 940 revs = [other.lookup(rev) for rev in opts['rev']]
933 941 repo = hg.repository(ui, dest, create=1)
934 942 repo.pull(other, heads = revs)
935 943
936 944 f = repo.opener("hgrc", "w", text=True)
937 945 f.write("[paths]\n")
938 946 f.write("default = %s\n" % abspath)
939 947 f.close()
940 948
941 949 if not opts['noupdate']:
942 update(repo.ui, repo)
950 doupdate(repo.ui, repo)
943 951
944 952 d.close()
945 953
946 954 def commit(ui, repo, *pats, **opts):
947 955 """commit the specified files or all outstanding changes
948 956
949 957 Commit changes to the given files into the repository.
950 958
951 959 If a list of files is omitted, all changes reported by "hg status"
952 960 will be committed.
953 961
954 962 If no commit message is specified, the editor configured in your hgrc
955 963 or in the EDITOR environment variable is started to enter a message.
956 964 """
957 965 message = opts['message']
958 966 logfile = opts['logfile']
959 967
960 968 if message and logfile:
961 969 raise util.Abort(_('options --message and --logfile are mutually '
962 970 'exclusive'))
963 971 if not message and logfile:
964 972 try:
965 973 if logfile == '-':
966 974 message = sys.stdin.read()
967 975 else:
968 976 message = open(logfile).read()
969 977 except IOError, inst:
970 978 raise util.Abort(_("can't read commit message '%s': %s") %
971 979 (logfile, inst.strerror))
972 980
973 981 if opts['addremove']:
974 982 addremove_lock(ui, repo, pats, opts)
975 983 fns, match, anypats = matchpats(repo, pats, opts)
976 984 if pats:
977 985 modified, added, removed, deleted, unknown = (
978 986 repo.changes(files=fns, match=match))
979 987 files = modified + added + removed
980 988 else:
981 989 files = []
982 990 try:
983 repo.commit(files, message, opts['user'], opts['date'], match)
991 repo.commit(files, message, opts['user'], opts['date'], match,
992 force_editor=opts.get('force_editor'))
984 993 except ValueError, inst:
985 994 raise util.Abort(str(inst))
986 995
987 996 def docopy(ui, repo, pats, opts, wlock):
988 997 # called with the repo lock held
989 998 cwd = repo.getcwd()
990 999 errors = 0
991 1000 copied = []
992 1001 targets = {}
993 1002
994 1003 def okaytocopy(abs, rel, exact):
995 1004 reasons = {'?': _('is not managed'),
996 1005 'a': _('has been marked for add'),
997 1006 'r': _('has been marked for remove')}
998 1007 state = repo.dirstate.state(abs)
999 1008 reason = reasons.get(state)
1000 1009 if reason:
1001 1010 if state == 'a':
1002 1011 origsrc = repo.dirstate.copied(abs)
1003 1012 if origsrc is not None:
1004 1013 return origsrc
1005 1014 if exact:
1006 1015 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1007 1016 else:
1008 1017 return abs
1009 1018
1010 1019 def copy(origsrc, abssrc, relsrc, target, exact):
1011 1020 abstarget = util.canonpath(repo.root, cwd, target)
1012 1021 reltarget = util.pathto(cwd, abstarget)
1013 1022 prevsrc = targets.get(abstarget)
1014 1023 if prevsrc is not None:
1015 1024 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1016 1025 (reltarget, abssrc, prevsrc))
1017 1026 return
1018 1027 if (not opts['after'] and os.path.exists(reltarget) or
1019 1028 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1020 1029 if not opts['force']:
1021 1030 ui.warn(_('%s: not overwriting - file exists\n') %
1022 1031 reltarget)
1023 1032 return
1024 1033 if not opts['after']:
1025 1034 os.unlink(reltarget)
1026 1035 if opts['after']:
1027 1036 if not os.path.exists(reltarget):
1028 1037 return
1029 1038 else:
1030 1039 targetdir = os.path.dirname(reltarget) or '.'
1031 1040 if not os.path.isdir(targetdir):
1032 1041 os.makedirs(targetdir)
1033 1042 try:
1034 1043 restore = repo.dirstate.state(abstarget) == 'r'
1035 1044 if restore:
1036 1045 repo.undelete([abstarget], wlock)
1037 1046 try:
1038 1047 shutil.copyfile(relsrc, reltarget)
1039 1048 shutil.copymode(relsrc, reltarget)
1040 1049 restore = False
1041 1050 finally:
1042 1051 if restore:
1043 1052 repo.remove([abstarget], wlock)
1044 1053 except shutil.Error, inst:
1045 1054 raise util.Abort(str(inst))
1046 1055 except IOError, inst:
1047 1056 if inst.errno == errno.ENOENT:
1048 1057 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1049 1058 else:
1050 1059 ui.warn(_('%s: cannot copy - %s\n') %
1051 1060 (relsrc, inst.strerror))
1052 1061 errors += 1
1053 1062 return
1054 1063 if ui.verbose or not exact:
1055 1064 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1056 1065 targets[abstarget] = abssrc
1057 1066 if abstarget != origsrc:
1058 1067 repo.copy(origsrc, abstarget, wlock)
1059 1068 copied.append((abssrc, relsrc, exact))
1060 1069
1061 1070 def targetpathfn(pat, dest, srcs):
1062 1071 if os.path.isdir(pat):
1063 1072 abspfx = util.canonpath(repo.root, cwd, pat)
1064 1073 if destdirexists:
1065 1074 striplen = len(os.path.split(abspfx)[0])
1066 1075 else:
1067 1076 striplen = len(abspfx)
1068 1077 if striplen:
1069 1078 striplen += len(os.sep)
1070 1079 res = lambda p: os.path.join(dest, p[striplen:])
1071 1080 elif destdirexists:
1072 1081 res = lambda p: os.path.join(dest, os.path.basename(p))
1073 1082 else:
1074 1083 res = lambda p: dest
1075 1084 return res
1076 1085
1077 1086 def targetpathafterfn(pat, dest, srcs):
1078 1087 if util.patkind(pat, None)[0]:
1079 1088 # a mercurial pattern
1080 1089 res = lambda p: os.path.join(dest, os.path.basename(p))
1081 1090 else:
1082 1091 abspfx = util.canonpath(repo.root, cwd, pat)
1083 1092 if len(abspfx) < len(srcs[0][0]):
1084 1093 # A directory. Either the target path contains the last
1085 1094 # component of the source path or it does not.
1086 1095 def evalpath(striplen):
1087 1096 score = 0
1088 1097 for s in srcs:
1089 1098 t = os.path.join(dest, s[0][striplen:])
1090 1099 if os.path.exists(t):
1091 1100 score += 1
1092 1101 return score
1093 1102
1094 1103 striplen = len(abspfx)
1095 1104 if striplen:
1096 1105 striplen += len(os.sep)
1097 1106 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1098 1107 score = evalpath(striplen)
1099 1108 striplen1 = len(os.path.split(abspfx)[0])
1100 1109 if striplen1:
1101 1110 striplen1 += len(os.sep)
1102 1111 if evalpath(striplen1) > score:
1103 1112 striplen = striplen1
1104 1113 res = lambda p: os.path.join(dest, p[striplen:])
1105 1114 else:
1106 1115 # a file
1107 1116 if destdirexists:
1108 1117 res = lambda p: os.path.join(dest, os.path.basename(p))
1109 1118 else:
1110 1119 res = lambda p: dest
1111 1120 return res
1112 1121
1113 1122
1114 1123 pats = list(pats)
1115 1124 if not pats:
1116 1125 raise util.Abort(_('no source or destination specified'))
1117 1126 if len(pats) == 1:
1118 1127 raise util.Abort(_('no destination specified'))
1119 1128 dest = pats.pop()
1120 1129 destdirexists = os.path.isdir(dest)
1121 1130 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1122 1131 raise util.Abort(_('with multiple sources, destination must be an '
1123 1132 'existing directory'))
1124 1133 if opts['after']:
1125 1134 tfn = targetpathafterfn
1126 1135 else:
1127 1136 tfn = targetpathfn
1128 1137 copylist = []
1129 1138 for pat in pats:
1130 1139 srcs = []
1131 1140 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1132 1141 origsrc = okaytocopy(abssrc, relsrc, exact)
1133 1142 if origsrc:
1134 1143 srcs.append((origsrc, abssrc, relsrc, exact))
1135 1144 if not srcs:
1136 1145 continue
1137 1146 copylist.append((tfn(pat, dest, srcs), srcs))
1138 1147 if not copylist:
1139 1148 raise util.Abort(_('no files to copy'))
1140 1149
1141 1150 for targetpath, srcs in copylist:
1142 1151 for origsrc, abssrc, relsrc, exact in srcs:
1143 1152 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1144 1153
1145 1154 if errors:
1146 1155 ui.warn(_('(consider using --after)\n'))
1147 1156 return errors, copied
1148 1157
1149 1158 def copy(ui, repo, *pats, **opts):
1150 1159 """mark files as copied for the next commit
1151 1160
1152 1161 Mark dest as having copies of source files. If dest is a
1153 1162 directory, copies are put in that directory. If dest is a file,
1154 1163 there can only be one source.
1155 1164
1156 1165 By default, this command copies the contents of files as they
1157 1166 stand in the working directory. If invoked with --after, the
1158 1167 operation is recorded, but no copying is performed.
1159 1168
1160 1169 This command takes effect in the next commit.
1161 1170
1162 1171 NOTE: This command should be treated as experimental. While it
1163 1172 should properly record copied files, this information is not yet
1164 1173 fully used by merge, nor fully reported by log.
1165 1174 """
1166 1175 wlock = repo.wlock(0)
1167 1176 errs, copied = docopy(ui, repo, pats, opts, wlock)
1168 1177 return errs
1169 1178
1170 1179 def debugancestor(ui, index, rev1, rev2):
1171 1180 """find the ancestor revision of two revisions in a given index"""
1172 1181 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1173 1182 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1174 1183 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1175 1184
1176 1185 def debugcomplete(ui, cmd='', **opts):
1177 1186 """returns the completion list associated with the given command"""
1178 1187
1179 1188 if opts['options']:
1180 1189 options = []
1181 1190 otables = [globalopts]
1182 1191 if cmd:
1183 1192 aliases, entry = find(cmd)
1184 1193 otables.append(entry[1])
1185 1194 for t in otables:
1186 1195 for o in t:
1187 1196 if o[0]:
1188 1197 options.append('-%s' % o[0])
1189 1198 options.append('--%s' % o[1])
1190 1199 ui.write("%s\n" % "\n".join(options))
1191 1200 return
1192 1201
1193 1202 clist = findpossible(cmd).keys()
1194 1203 clist.sort()
1195 1204 ui.write("%s\n" % "\n".join(clist))
1196 1205
1197 1206 def debugrebuildstate(ui, repo, rev=None):
1198 1207 """rebuild the dirstate as it would look like for the given revision"""
1199 1208 if not rev:
1200 1209 rev = repo.changelog.tip()
1201 1210 else:
1202 1211 rev = repo.lookup(rev)
1203 1212 change = repo.changelog.read(rev)
1204 1213 n = change[0]
1205 1214 files = repo.manifest.readflags(n)
1206 1215 wlock = repo.wlock()
1207 1216 repo.dirstate.rebuild(rev, files.iteritems())
1208 1217
1209 1218 def debugcheckstate(ui, repo):
1210 1219 """validate the correctness of the current dirstate"""
1211 1220 parent1, parent2 = repo.dirstate.parents()
1212 1221 repo.dirstate.read()
1213 1222 dc = repo.dirstate.map
1214 1223 keys = dc.keys()
1215 1224 keys.sort()
1216 1225 m1n = repo.changelog.read(parent1)[0]
1217 1226 m2n = repo.changelog.read(parent2)[0]
1218 1227 m1 = repo.manifest.read(m1n)
1219 1228 m2 = repo.manifest.read(m2n)
1220 1229 errors = 0
1221 1230 for f in dc:
1222 1231 state = repo.dirstate.state(f)
1223 1232 if state in "nr" and f not in m1:
1224 1233 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1225 1234 errors += 1
1226 1235 if state in "a" and f in m1:
1227 1236 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1228 1237 errors += 1
1229 1238 if state in "m" and f not in m1 and f not in m2:
1230 1239 ui.warn(_("%s in state %s, but not in either manifest\n") %
1231 1240 (f, state))
1232 1241 errors += 1
1233 1242 for f in m1:
1234 1243 state = repo.dirstate.state(f)
1235 1244 if state not in "nrm":
1236 1245 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1237 1246 errors += 1
1238 1247 if errors:
1239 1248 error = _(".hg/dirstate inconsistent with current parent's manifest")
1240 1249 raise util.Abort(error)
1241 1250
1242 1251 def debugconfig(ui, repo):
1243 1252 """show combined config settings from all hgrc files"""
1244 1253 for section, name, value in ui.walkconfig():
1245 1254 ui.write('%s.%s=%s\n' % (section, name, value))
1246 1255
1247 1256 def debugsetparents(ui, repo, rev1, rev2=None):
1248 1257 """manually set the parents of the current working directory
1249 1258
1250 1259 This is useful for writing repository conversion tools, but should
1251 1260 be used with care.
1252 1261 """
1253 1262
1254 1263 if not rev2:
1255 1264 rev2 = hex(nullid)
1256 1265
1257 1266 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1258 1267
1259 1268 def debugstate(ui, repo):
1260 1269 """show the contents of the current dirstate"""
1261 1270 repo.dirstate.read()
1262 1271 dc = repo.dirstate.map
1263 1272 keys = dc.keys()
1264 1273 keys.sort()
1265 1274 for file_ in keys:
1266 1275 ui.write("%c %3o %10d %s %s\n"
1267 1276 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1268 1277 time.strftime("%x %X",
1269 1278 time.localtime(dc[file_][3])), file_))
1270 1279 for f in repo.dirstate.copies:
1271 1280 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1272 1281
1273 1282 def debugdata(ui, file_, rev):
1274 1283 """dump the contents of an data file revision"""
1275 1284 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1276 1285 file_[:-2] + ".i", file_, 0)
1277 1286 try:
1278 1287 ui.write(r.revision(r.lookup(rev)))
1279 1288 except KeyError:
1280 1289 raise util.Abort(_('invalid revision identifier %s'), rev)
1281 1290
1282 1291 def debugindex(ui, file_):
1283 1292 """dump the contents of an index file"""
1284 1293 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1285 1294 ui.write(" rev offset length base linkrev" +
1286 1295 " nodeid p1 p2\n")
1287 1296 for i in range(r.count()):
1288 1297 node = r.node(i)
1289 1298 pp = r.parents(node)
1290 1299 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1291 1300 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1292 1301 short(node), short(pp[0]), short(pp[1])))
1293 1302
1294 1303 def debugindexdot(ui, file_):
1295 1304 """dump an index DAG as a .dot file"""
1296 1305 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1297 1306 ui.write("digraph G {\n")
1298 1307 for i in range(r.count()):
1299 e = r.index[i]
1300 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1301 if e[5] != nullid:
1302 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1308 node = r.node(i)
1309 pp = r.parents(node)
1310 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1311 if pp[1] != nullid:
1312 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1303 1313 ui.write("}\n")
1304 1314
1305 1315 def debugrename(ui, repo, file, rev=None):
1306 1316 """dump rename information"""
1307 1317 r = repo.file(relpath(repo, [file])[0])
1308 1318 if rev:
1309 1319 try:
1310 1320 # assume all revision numbers are for changesets
1311 1321 n = repo.lookup(rev)
1312 1322 change = repo.changelog.read(n)
1313 1323 m = repo.manifest.read(change[0])
1314 1324 n = m[relpath(repo, [file])[0]]
1315 1325 except (hg.RepoError, KeyError):
1316 1326 n = r.lookup(rev)
1317 1327 else:
1318 1328 n = r.tip()
1319 1329 m = r.renamed(n)
1320 1330 if m:
1321 1331 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1322 1332 else:
1323 1333 ui.write(_("not renamed\n"))
1324 1334
1325 1335 def debugwalk(ui, repo, *pats, **opts):
1326 1336 """show how files match on given patterns"""
1327 1337 items = list(walk(repo, pats, opts))
1328 1338 if not items:
1329 1339 return
1330 1340 fmt = '%%s %%-%ds %%-%ds %%s' % (
1331 1341 max([len(abs) for (src, abs, rel, exact) in items]),
1332 1342 max([len(rel) for (src, abs, rel, exact) in items]))
1333 1343 for src, abs, rel, exact in items:
1334 1344 line = fmt % (src, abs, rel, exact and 'exact' or '')
1335 1345 ui.write("%s\n" % line.rstrip())
1336 1346
1337 1347 def diff(ui, repo, *pats, **opts):
1338 1348 """diff repository (or selected files)
1339 1349
1340 1350 Show differences between revisions for the specified files.
1341 1351
1342 1352 Differences between files are shown using the unified diff format.
1343 1353
1344 1354 When two revision arguments are given, then changes are shown
1345 1355 between those revisions. If only one revision is specified then
1346 1356 that revision is compared to the working directory, and, when no
1347 1357 revisions are specified, the working directory files are compared
1348 1358 to its parent.
1349 1359
1350 1360 Without the -a option, diff will avoid generating diffs of files
1351 1361 it detects as binary. With -a, diff will generate a diff anyway,
1352 1362 probably with undesirable results.
1353 1363 """
1354 1364 node1, node2 = None, None
1355 1365 revs = [repo.lookup(x) for x in opts['rev']]
1356 1366
1357 1367 if len(revs) > 0:
1358 1368 node1 = revs[0]
1359 1369 if len(revs) > 1:
1360 1370 node2 = revs[1]
1361 1371 if len(revs) > 2:
1362 1372 raise util.Abort(_("too many revisions to diff"))
1363 1373
1364 1374 fns, matchfn, anypats = matchpats(repo, pats, opts)
1365 1375
1366 1376 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1367 1377 text=opts['text'], opts=opts)
1368 1378
1369 1379 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1370 1380 node = repo.lookup(changeset)
1371 1381 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1372 1382 if opts['switch_parent']:
1373 1383 parents.reverse()
1374 1384 prev = (parents and parents[0]) or nullid
1375 1385 change = repo.changelog.read(node)
1376 1386
1377 1387 fp = make_file(repo, repo.changelog, opts['output'],
1378 1388 node=node, total=total, seqno=seqno,
1379 1389 revwidth=revwidth)
1380 1390 if fp != sys.stdout:
1381 1391 ui.note("%s\n" % fp.name)
1382 1392
1383 1393 fp.write("# HG changeset patch\n")
1384 1394 fp.write("# User %s\n" % change[1])
1385 1395 fp.write("# Node ID %s\n" % hex(node))
1386 1396 fp.write("# Parent %s\n" % hex(prev))
1387 1397 if len(parents) > 1:
1388 1398 fp.write("# Parent %s\n" % hex(parents[1]))
1389 1399 fp.write(change[4].rstrip())
1390 1400 fp.write("\n\n")
1391 1401
1392 1402 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1393 1403 if fp != sys.stdout:
1394 1404 fp.close()
1395 1405
1396 1406 def export(ui, repo, *changesets, **opts):
1397 1407 """dump the header and diffs for one or more changesets
1398 1408
1399 1409 Print the changeset header and diffs for one or more revisions.
1400 1410
1401 1411 The information shown in the changeset header is: author,
1402 1412 changeset hash, parent and commit comment.
1403 1413
1404 1414 Output may be to a file, in which case the name of the file is
1405 1415 given using a format string. The formatting rules are as follows:
1406 1416
1407 1417 %% literal "%" character
1408 1418 %H changeset hash (40 bytes of hexadecimal)
1409 1419 %N number of patches being generated
1410 1420 %R changeset revision number
1411 1421 %b basename of the exporting repository
1412 1422 %h short-form changeset hash (12 bytes of hexadecimal)
1413 1423 %n zero-padded sequence number, starting at 1
1414 1424 %r zero-padded changeset revision number
1415 1425
1416 1426 Without the -a option, export will avoid generating diffs of files
1417 1427 it detects as binary. With -a, export will generate a diff anyway,
1418 1428 probably with undesirable results.
1419 1429
1420 1430 With the --switch-parent option, the diff will be against the second
1421 1431 parent. It can be useful to review a merge.
1422 1432 """
1423 1433 if not changesets:
1424 1434 raise util.Abort(_("export requires at least one changeset"))
1425 1435 seqno = 0
1426 1436 revs = list(revrange(ui, repo, changesets))
1427 1437 total = len(revs)
1428 1438 revwidth = max(map(len, revs))
1429 1439 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1430 1440 ui.note(msg)
1431 1441 for cset in revs:
1432 1442 seqno += 1
1433 1443 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1434 1444
1435 1445 def forget(ui, repo, *pats, **opts):
1436 1446 """don't add the specified files on the next commit (DEPRECATED)
1437 1447
1438 1448 (DEPRECATED)
1439 1449 Undo an 'hg add' scheduled for the next commit.
1440 1450
1441 1451 This command is now deprecated and will be removed in a future
1442 1452 release. Please use revert instead.
1443 1453 """
1444 1454 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1445 1455 forget = []
1446 1456 for src, abs, rel, exact in walk(repo, pats, opts):
1447 1457 if repo.dirstate.state(abs) == 'a':
1448 1458 forget.append(abs)
1449 1459 if ui.verbose or not exact:
1450 1460 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1451 1461 repo.forget(forget)
1452 1462
1453 1463 def grep(ui, repo, pattern, *pats, **opts):
1454 1464 """search for a pattern in specified files and revisions
1455 1465
1456 1466 Search revisions of files for a regular expression.
1457 1467
1458 1468 This command behaves differently than Unix grep. It only accepts
1459 1469 Python/Perl regexps. It searches repository history, not the
1460 1470 working directory. It always prints the revision number in which
1461 1471 a match appears.
1462 1472
1463 1473 By default, grep only prints output for the first revision of a
1464 1474 file in which it finds a match. To get it to print every revision
1465 1475 that contains a change in match status ("-" for a match that
1466 1476 becomes a non-match, or "+" for a non-match that becomes a match),
1467 1477 use the --all flag.
1468 1478 """
1469 1479 reflags = 0
1470 1480 if opts['ignore_case']:
1471 1481 reflags |= re.I
1472 1482 regexp = re.compile(pattern, reflags)
1473 1483 sep, eol = ':', '\n'
1474 1484 if opts['print0']:
1475 1485 sep = eol = '\0'
1476 1486
1477 1487 fcache = {}
1478 1488 def getfile(fn):
1479 1489 if fn not in fcache:
1480 1490 fcache[fn] = repo.file(fn)
1481 1491 return fcache[fn]
1482 1492
1483 1493 def matchlines(body):
1484 1494 begin = 0
1485 1495 linenum = 0
1486 1496 while True:
1487 1497 match = regexp.search(body, begin)
1488 1498 if not match:
1489 1499 break
1490 1500 mstart, mend = match.span()
1491 1501 linenum += body.count('\n', begin, mstart) + 1
1492 1502 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1493 1503 lend = body.find('\n', mend)
1494 1504 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1495 1505 begin = lend + 1
1496 1506
1497 1507 class linestate(object):
1498 1508 def __init__(self, line, linenum, colstart, colend):
1499 1509 self.line = line
1500 1510 self.linenum = linenum
1501 1511 self.colstart = colstart
1502 1512 self.colend = colend
1503 1513 def __eq__(self, other):
1504 1514 return self.line == other.line
1505 1515 def __hash__(self):
1506 1516 return hash(self.line)
1507 1517
1508 1518 matches = {}
1509 1519 def grepbody(fn, rev, body):
1510 1520 matches[rev].setdefault(fn, {})
1511 1521 m = matches[rev][fn]
1512 1522 for lnum, cstart, cend, line in matchlines(body):
1513 1523 s = linestate(line, lnum, cstart, cend)
1514 1524 m[s] = s
1515 1525
1516 1526 # FIXME: prev isn't used, why ?
1517 1527 prev = {}
1518 1528 ucache = {}
1519 1529 def display(fn, rev, states, prevstates):
1520 1530 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1521 1531 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1522 1532 counts = {'-': 0, '+': 0}
1523 1533 filerevmatches = {}
1524 1534 for l in diff:
1525 1535 if incrementing or not opts['all']:
1526 1536 change = ((l in prevstates) and '-') or '+'
1527 1537 r = rev
1528 1538 else:
1529 1539 change = ((l in states) and '-') or '+'
1530 1540 r = prev[fn]
1531 1541 cols = [fn, str(rev)]
1532 1542 if opts['line_number']:
1533 1543 cols.append(str(l.linenum))
1534 1544 if opts['all']:
1535 1545 cols.append(change)
1536 1546 if opts['user']:
1537 1547 cols.append(trimuser(ui, getchange(rev)[1], rev,
1538 1548 ucache))
1539 1549 if opts['files_with_matches']:
1540 1550 c = (fn, rev)
1541 1551 if c in filerevmatches:
1542 1552 continue
1543 1553 filerevmatches[c] = 1
1544 1554 else:
1545 1555 cols.append(l.line)
1546 1556 ui.write(sep.join(cols), eol)
1547 1557 counts[change] += 1
1548 1558 return counts['+'], counts['-']
1549 1559
1550 1560 fstate = {}
1551 1561 skip = {}
1552 1562 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1553 1563 count = 0
1554 1564 incrementing = False
1555 1565 for st, rev, fns in changeiter:
1556 1566 if st == 'window':
1557 1567 incrementing = rev
1558 1568 matches.clear()
1559 1569 elif st == 'add':
1560 1570 change = repo.changelog.read(repo.lookup(str(rev)))
1561 1571 mf = repo.manifest.read(change[0])
1562 1572 matches[rev] = {}
1563 1573 for fn in fns:
1564 1574 if fn in skip:
1565 1575 continue
1566 1576 fstate.setdefault(fn, {})
1567 1577 try:
1568 1578 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1569 1579 except KeyError:
1570 1580 pass
1571 1581 elif st == 'iter':
1572 1582 states = matches[rev].items()
1573 1583 states.sort()
1574 1584 for fn, m in states:
1575 1585 if fn in skip:
1576 1586 continue
1577 1587 if incrementing or not opts['all'] or fstate[fn]:
1578 1588 pos, neg = display(fn, rev, m, fstate[fn])
1579 1589 count += pos + neg
1580 1590 if pos and not opts['all']:
1581 1591 skip[fn] = True
1582 1592 fstate[fn] = m
1583 1593 prev[fn] = rev
1584 1594
1585 1595 if not incrementing:
1586 1596 fstate = fstate.items()
1587 1597 fstate.sort()
1588 1598 for fn, state in fstate:
1589 1599 if fn in skip:
1590 1600 continue
1591 1601 display(fn, rev, {}, state)
1592 1602 return (count == 0 and 1) or 0
1593 1603
1594 1604 def heads(ui, repo, **opts):
1595 1605 """show current repository heads
1596 1606
1597 1607 Show all repository head changesets.
1598 1608
1599 1609 Repository "heads" are changesets that don't have children
1600 1610 changesets. They are where development generally takes place and
1601 1611 are the usual targets for update and merge operations.
1602 1612 """
1603 1613 if opts['rev']:
1604 1614 heads = repo.heads(repo.lookup(opts['rev']))
1605 1615 else:
1606 1616 heads = repo.heads()
1607 1617 br = None
1608 1618 if opts['branches']:
1609 1619 br = repo.branchlookup(heads)
1610 1620 displayer = show_changeset(ui, repo, opts)
1611 1621 for n in heads:
1612 1622 displayer.show(changenode=n, brinfo=br)
1613 1623
1614 1624 def identify(ui, repo):
1615 1625 """print information about the working copy
1616 1626
1617 1627 Print a short summary of the current state of the repo.
1618 1628
1619 1629 This summary identifies the repository state using one or two parent
1620 1630 hash identifiers, followed by a "+" if there are uncommitted changes
1621 1631 in the working directory, followed by a list of tags for this revision.
1622 1632 """
1623 1633 parents = [p for p in repo.dirstate.parents() if p != nullid]
1624 1634 if not parents:
1625 1635 ui.write(_("unknown\n"))
1626 1636 return
1627 1637
1628 1638 hexfunc = ui.verbose and hex or short
1629 1639 modified, added, removed, deleted, unknown = repo.changes()
1630 1640 output = ["%s%s" %
1631 1641 ('+'.join([hexfunc(parent) for parent in parents]),
1632 1642 (modified or added or removed or deleted) and "+" or "")]
1633 1643
1634 1644 if not ui.quiet:
1635 1645 # multiple tags for a single parent separated by '/'
1636 1646 parenttags = ['/'.join(tags)
1637 1647 for tags in map(repo.nodetags, parents) if tags]
1638 1648 # tags for multiple parents separated by ' + '
1639 1649 if parenttags:
1640 1650 output.append(' + '.join(parenttags))
1641 1651
1642 1652 ui.write("%s\n" % ' '.join(output))
1643 1653
1644 1654 def import_(ui, repo, patch1, *patches, **opts):
1645 1655 """import an ordered set of patches
1646 1656
1647 1657 Import a list of patches and commit them individually.
1648 1658
1649 1659 If there are outstanding changes in the working directory, import
1650 1660 will abort unless given the -f flag.
1651 1661
1652 1662 If a patch looks like a mail message (its first line starts with
1653 1663 "From " or looks like an RFC822 header), it will not be applied
1654 1664 unless the -f option is used. The importer neither parses nor
1655 1665 discards mail headers, so use -f only to override the "mailness"
1656 1666 safety check, not to import a real mail message.
1657 1667 """
1658 1668 patches = (patch1,) + patches
1659 1669
1660 1670 if not opts['force']:
1661 1671 bail_if_changed(repo)
1662 1672
1663 1673 d = opts["base"]
1664 1674 strip = opts["strip"]
1665 1675
1666 1676 mailre = re.compile(r'(?:From |[\w-]+:)')
1667 1677
1668 1678 # attempt to detect the start of a patch
1669 1679 # (this heuristic is borrowed from quilt)
1670 1680 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1671 1681 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1672 1682 '(---|\*\*\*)[ \t])')
1673 1683
1674 1684 for patch in patches:
1675 1685 ui.status(_("applying %s\n") % patch)
1676 1686 pf = os.path.join(d, patch)
1677 1687
1678 1688 message = []
1679 1689 user = None
1680 1690 hgpatch = False
1681 1691 for line in file(pf):
1682 1692 line = line.rstrip()
1683 1693 if (not message and not hgpatch and
1684 1694 mailre.match(line) and not opts['force']):
1685 1695 if len(line) > 35:
1686 1696 line = line[:32] + '...'
1687 1697 raise util.Abort(_('first line looks like a '
1688 1698 'mail header: ') + line)
1689 1699 if diffre.match(line):
1690 1700 break
1691 1701 elif hgpatch:
1692 1702 # parse values when importing the result of an hg export
1693 1703 if line.startswith("# User "):
1694 1704 user = line[7:]
1695 1705 ui.debug(_('User: %s\n') % user)
1696 1706 elif not line.startswith("# ") and line:
1697 1707 message.append(line)
1698 1708 hgpatch = False
1699 1709 elif line == '# HG changeset patch':
1700 1710 hgpatch = True
1701 1711 message = [] # We may have collected garbage
1702 1712 else:
1703 1713 message.append(line)
1704 1714
1705 1715 # make sure message isn't empty
1706 1716 if not message:
1707 1717 message = _("imported patch %s\n") % patch
1708 1718 else:
1709 1719 message = "%s\n" % '\n'.join(message)
1710 1720 ui.debug(_('message:\n%s\n') % message)
1711 1721
1712 1722 files = util.patch(strip, pf, ui)
1713 1723
1714 1724 if len(files) > 0:
1715 1725 addremove_lock(ui, repo, files, {})
1716 1726 repo.commit(files, message, user)
1717 1727
1718 1728 def incoming(ui, repo, source="default", **opts):
1719 1729 """show new changesets found in source
1720 1730
1721 1731 Show new changesets found in the specified path/URL or the default
1722 1732 pull location. These are the changesets that would be pulled if a pull
1723 1733 was requested.
1724 1734
1725 1735 For remote repository, using --bundle avoids downloading the changesets
1726 1736 twice if the incoming is followed by a pull.
1727 1737
1728 1738 See pull for valid source format details.
1729 1739 """
1730 1740 source = ui.expandpath(source)
1731 1741 if opts['ssh']:
1732 1742 ui.setconfig("ui", "ssh", opts['ssh'])
1733 1743 if opts['remotecmd']:
1734 1744 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1735 1745
1736 1746 other = hg.repository(ui, source)
1737 1747 incoming = repo.findincoming(other, force=opts["force"])
1738 1748 if not incoming:
1739 1749 ui.status(_("no changes found\n"))
1740 1750 return
1741 1751
1742 1752 cleanup = None
1743 1753 try:
1744 1754 fname = opts["bundle"]
1745 1755 if fname or not other.local():
1746 1756 # create a bundle (uncompressed if other repo is not local)
1747 1757 cg = other.changegroup(incoming, "incoming")
1748 1758 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1749 1759 # keep written bundle?
1750 1760 if opts["bundle"]:
1751 1761 cleanup = None
1752 1762 if not other.local():
1753 1763 # use the created uncompressed bundlerepo
1754 1764 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1755 1765
1756 1766 o = other.changelog.nodesbetween(incoming)[0]
1757 1767 if opts['newest_first']:
1758 1768 o.reverse()
1759 1769 displayer = show_changeset(ui, other, opts)
1760 1770 for n in o:
1761 1771 parents = [p for p in other.changelog.parents(n) if p != nullid]
1762 1772 if opts['no_merges'] and len(parents) == 2:
1763 1773 continue
1764 1774 displayer.show(changenode=n)
1765 1775 if opts['patch']:
1766 1776 prev = (parents and parents[0]) or nullid
1767 1777 dodiff(ui, ui, other, prev, n)
1768 1778 ui.write("\n")
1769 1779 finally:
1770 1780 if hasattr(other, 'close'):
1771 1781 other.close()
1772 1782 if cleanup:
1773 1783 os.unlink(cleanup)
1774 1784
1775 1785 def init(ui, dest="."):
1776 1786 """create a new repository in the given directory
1777 1787
1778 1788 Initialize a new repository in the given directory. If the given
1779 1789 directory does not exist, it is created.
1780 1790
1781 1791 If no directory is given, the current directory is used.
1782 1792 """
1783 1793 if not os.path.exists(dest):
1784 1794 os.mkdir(dest)
1785 1795 hg.repository(ui, dest, create=1)
1786 1796
1787 1797 def locate(ui, repo, *pats, **opts):
1788 1798 """locate files matching specific patterns
1789 1799
1790 1800 Print all files under Mercurial control whose names match the
1791 1801 given patterns.
1792 1802
1793 1803 This command searches the current directory and its
1794 1804 subdirectories. To search an entire repository, move to the root
1795 1805 of the repository.
1796 1806
1797 1807 If no patterns are given to match, this command prints all file
1798 1808 names.
1799 1809
1800 1810 If you want to feed the output of this command into the "xargs"
1801 1811 command, use the "-0" option to both this command and "xargs".
1802 1812 This will avoid the problem of "xargs" treating single filenames
1803 1813 that contain white space as multiple filenames.
1804 1814 """
1805 1815 end = opts['print0'] and '\0' or '\n'
1806 1816 rev = opts['rev']
1807 1817 if rev:
1808 1818 node = repo.lookup(rev)
1809 1819 else:
1810 1820 node = None
1811 1821
1812 1822 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1813 1823 head='(?:.*/|)'):
1814 1824 if not node and repo.dirstate.state(abs) == '?':
1815 1825 continue
1816 1826 if opts['fullpath']:
1817 1827 ui.write(os.path.join(repo.root, abs), end)
1818 1828 else:
1819 1829 ui.write(((pats and rel) or abs), end)
1820 1830
1821 1831 def log(ui, repo, *pats, **opts):
1822 1832 """show revision history of entire repository or files
1823 1833
1824 1834 Print the revision history of the specified files or the entire project.
1825 1835
1826 1836 By default this command outputs: changeset id and hash, tags,
1827 1837 non-trivial parents, user, date and time, and a summary for each
1828 1838 commit. When the -v/--verbose switch is used, the list of changed
1829 1839 files and full commit message is shown.
1830 1840 """
1831 1841 class dui(object):
1832 1842 # Implement and delegate some ui protocol. Save hunks of
1833 1843 # output for later display in the desired order.
1834 1844 def __init__(self, ui):
1835 1845 self.ui = ui
1836 1846 self.hunk = {}
1837 1847 self.header = {}
1838 1848 def bump(self, rev):
1839 1849 self.rev = rev
1840 1850 self.hunk[rev] = []
1841 1851 self.header[rev] = []
1842 1852 def note(self, *args):
1843 1853 if self.verbose:
1844 1854 self.write(*args)
1845 1855 def status(self, *args):
1846 1856 if not self.quiet:
1847 1857 self.write(*args)
1848 1858 def write(self, *args):
1849 1859 self.hunk[self.rev].append(args)
1850 1860 def write_header(self, *args):
1851 1861 self.header[self.rev].append(args)
1852 1862 def debug(self, *args):
1853 1863 if self.debugflag:
1854 1864 self.write(*args)
1855 1865 def __getattr__(self, key):
1856 1866 return getattr(self.ui, key)
1857 1867
1858 1868 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1859 1869
1860 1870 if opts['limit']:
1861 1871 try:
1862 1872 limit = int(opts['limit'])
1863 1873 except ValueError:
1864 1874 raise util.Abort(_('limit must be a positive integer'))
1865 1875 if limit <= 0: raise util.Abort(_('limit must be positive'))
1866 1876 else:
1867 1877 limit = sys.maxint
1868 1878 count = 0
1869 1879
1870 1880 displayer = show_changeset(ui, repo, opts)
1871 1881 for st, rev, fns in changeiter:
1872 1882 if st == 'window':
1873 1883 du = dui(ui)
1874 1884 displayer.ui = du
1875 1885 elif st == 'add':
1876 1886 du.bump(rev)
1877 1887 changenode = repo.changelog.node(rev)
1878 1888 parents = [p for p in repo.changelog.parents(changenode)
1879 1889 if p != nullid]
1880 1890 if opts['no_merges'] and len(parents) == 2:
1881 1891 continue
1882 1892 if opts['only_merges'] and len(parents) != 2:
1883 1893 continue
1884 1894
1885 1895 if opts['keyword']:
1886 1896 changes = getchange(rev)
1887 1897 miss = 0
1888 1898 for k in [kw.lower() for kw in opts['keyword']]:
1889 1899 if not (k in changes[1].lower() or
1890 1900 k in changes[4].lower() or
1891 1901 k in " ".join(changes[3][:20]).lower()):
1892 1902 miss = 1
1893 1903 break
1894 1904 if miss:
1895 1905 continue
1896 1906
1897 1907 br = None
1898 1908 if opts['branches']:
1899 1909 br = repo.branchlookup([repo.changelog.node(rev)])
1900 1910
1901 1911 displayer.show(rev, brinfo=br)
1902 1912 if opts['patch']:
1903 1913 prev = (parents and parents[0]) or nullid
1904 1914 dodiff(du, du, repo, prev, changenode, match=matchfn)
1905 1915 du.write("\n\n")
1906 1916 elif st == 'iter':
1907 1917 if count == limit: break
1908 1918 if du.header[rev]:
1909 1919 for args in du.header[rev]:
1910 1920 ui.write_header(*args)
1911 1921 if du.hunk[rev]:
1912 1922 count += 1
1913 1923 for args in du.hunk[rev]:
1914 1924 ui.write(*args)
1915 1925
1916 1926 def manifest(ui, repo, rev=None):
1917 1927 """output the latest or given revision of the project manifest
1918 1928
1919 1929 Print a list of version controlled files for the given revision.
1920 1930
1921 1931 The manifest is the list of files being version controlled. If no revision
1922 1932 is given then the tip is used.
1923 1933 """
1924 1934 if rev:
1925 1935 try:
1926 1936 # assume all revision numbers are for changesets
1927 1937 n = repo.lookup(rev)
1928 1938 change = repo.changelog.read(n)
1929 1939 n = change[0]
1930 1940 except hg.RepoError:
1931 1941 n = repo.manifest.lookup(rev)
1932 1942 else:
1933 1943 n = repo.manifest.tip()
1934 1944 m = repo.manifest.read(n)
1935 1945 mf = repo.manifest.readflags(n)
1936 1946 files = m.keys()
1937 1947 files.sort()
1938 1948
1939 1949 for f in files:
1940 1950 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1941 1951
1942 1952 def merge(ui, repo, node=None, **opts):
1943 1953 """Merge working directory with another revision
1944 1954
1945 1955 Merge the contents of the current working directory and the
1946 1956 requested revision. Files that changed between either parent are
1947 1957 marked as changed for the next commit and a commit must be
1948 1958 performed before any further updates are allowed.
1949 1959 """
1950 return update(ui, repo, node=node, merge=True, **opts)
1960 return doupdate(ui, repo, node=node, merge=True, **opts)
1951 1961
1952 1962 def outgoing(ui, repo, dest="default-push", **opts):
1953 1963 """show changesets not found in destination
1954 1964
1955 1965 Show changesets not found in the specified destination repository or
1956 1966 the default push location. These are the changesets that would be pushed
1957 1967 if a push was requested.
1958 1968
1959 1969 See pull for valid destination format details.
1960 1970 """
1961 1971 dest = ui.expandpath(dest)
1962 1972 if opts['ssh']:
1963 1973 ui.setconfig("ui", "ssh", opts['ssh'])
1964 1974 if opts['remotecmd']:
1965 1975 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1966 1976
1967 1977 other = hg.repository(ui, dest)
1968 1978 o = repo.findoutgoing(other, force=opts['force'])
1969 1979 if not o:
1970 1980 ui.status(_("no changes found\n"))
1971 1981 return
1972 1982 o = repo.changelog.nodesbetween(o)[0]
1973 1983 if opts['newest_first']:
1974 1984 o.reverse()
1975 1985 displayer = show_changeset(ui, repo, opts)
1976 1986 for n in o:
1977 1987 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1978 1988 if opts['no_merges'] and len(parents) == 2:
1979 1989 continue
1980 1990 displayer.show(changenode=n)
1981 1991 if opts['patch']:
1982 1992 prev = (parents and parents[0]) or nullid
1983 1993 dodiff(ui, ui, repo, prev, n)
1984 1994 ui.write("\n")
1985 1995
1986 1996 def parents(ui, repo, rev=None, branches=None, **opts):
1987 1997 """show the parents of the working dir or revision
1988 1998
1989 1999 Print the working directory's parent revisions.
1990 2000 """
1991 2001 if rev:
1992 2002 p = repo.changelog.parents(repo.lookup(rev))
1993 2003 else:
1994 2004 p = repo.dirstate.parents()
1995 2005
1996 2006 br = None
1997 2007 if branches is not None:
1998 2008 br = repo.branchlookup(p)
1999 2009 displayer = show_changeset(ui, repo, opts)
2000 2010 for n in p:
2001 2011 if n != nullid:
2002 2012 displayer.show(changenode=n, brinfo=br)
2003 2013
2004 2014 def paths(ui, repo, search=None):
2005 2015 """show definition of symbolic path names
2006 2016
2007 2017 Show definition of symbolic path name NAME. If no name is given, show
2008 2018 definition of available names.
2009 2019
2010 2020 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2011 2021 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2012 2022 """
2013 2023 if search:
2014 2024 for name, path in ui.configitems("paths"):
2015 2025 if name == search:
2016 2026 ui.write("%s\n" % path)
2017 2027 return
2018 2028 ui.warn(_("not found!\n"))
2019 2029 return 1
2020 2030 else:
2021 2031 for name, path in ui.configitems("paths"):
2022 2032 ui.write("%s = %s\n" % (name, path))
2023 2033
2024 2034 def postincoming(ui, repo, modheads, optupdate):
2025 2035 if modheads == 0:
2026 2036 return
2027 2037 if optupdate:
2028 2038 if modheads == 1:
2029 return update(ui, repo)
2039 return doupdate(ui, repo)
2030 2040 else:
2031 2041 ui.status(_("not updating, since new heads added\n"))
2032 2042 if modheads > 1:
2033 2043 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2034 2044 else:
2035 2045 ui.status(_("(run 'hg update' to get a working copy)\n"))
2036 2046
2037 2047 def pull(ui, repo, source="default", **opts):
2038 2048 """pull changes from the specified source
2039 2049
2040 2050 Pull changes from a remote repository to a local one.
2041 2051
2042 2052 This finds all changes from the repository at the specified path
2043 2053 or URL and adds them to the local repository. By default, this
2044 2054 does not update the copy of the project in the working directory.
2045 2055
2046 2056 Valid URLs are of the form:
2047 2057
2048 2058 local/filesystem/path
2049 2059 http://[user@]host[:port][/path]
2050 2060 https://[user@]host[:port][/path]
2051 2061 ssh://[user@]host[:port][/path]
2052 2062
2053 2063 Some notes about using SSH with Mercurial:
2054 2064 - SSH requires an accessible shell account on the destination machine
2055 2065 and a copy of hg in the remote path or specified with as remotecmd.
2056 2066 - /path is relative to the remote user's home directory by default.
2057 2067 Use two slashes at the start of a path to specify an absolute path.
2058 2068 - Mercurial doesn't use its own compression via SSH; the right thing
2059 2069 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2060 2070 Host *.mylocalnetwork.example.com
2061 2071 Compression off
2062 2072 Host *
2063 2073 Compression on
2064 2074 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2065 2075 with the --ssh command line option.
2066 2076 """
2067 2077 source = ui.expandpath(source)
2068 2078 ui.status(_('pulling from %s\n') % (source))
2069 2079
2070 2080 if opts['ssh']:
2071 2081 ui.setconfig("ui", "ssh", opts['ssh'])
2072 2082 if opts['remotecmd']:
2073 2083 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2074 2084
2075 2085 other = hg.repository(ui, source)
2076 2086 revs = None
2077 2087 if opts['rev'] and not other.local():
2078 2088 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2079 2089 elif opts['rev']:
2080 2090 revs = [other.lookup(rev) for rev in opts['rev']]
2081 2091 modheads = repo.pull(other, heads=revs, force=opts['force'])
2082 2092 return postincoming(ui, repo, modheads, opts['update'])
2083 2093
2084 2094 def push(ui, repo, dest="default-push", **opts):
2085 2095 """push changes to the specified destination
2086 2096
2087 2097 Push changes from the local repository to the given destination.
2088 2098
2089 2099 This is the symmetrical operation for pull. It helps to move
2090 2100 changes from the current repository to a different one. If the
2091 2101 destination is local this is identical to a pull in that directory
2092 2102 from the current one.
2093 2103
2094 2104 By default, push will refuse to run if it detects the result would
2095 2105 increase the number of remote heads. This generally indicates the
2096 2106 the client has forgotten to sync and merge before pushing.
2097 2107
2098 2108 Valid URLs are of the form:
2099 2109
2100 2110 local/filesystem/path
2101 2111 ssh://[user@]host[:port][/path]
2102 2112
2103 2113 Look at the help text for the pull command for important details
2104 2114 about ssh:// URLs.
2105 2115 """
2106 2116 dest = ui.expandpath(dest)
2107 2117 ui.status('pushing to %s\n' % (dest))
2108 2118
2109 2119 if opts['ssh']:
2110 2120 ui.setconfig("ui", "ssh", opts['ssh'])
2111 2121 if opts['remotecmd']:
2112 2122 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2113 2123
2114 2124 other = hg.repository(ui, dest)
2115 2125 revs = None
2116 2126 if opts['rev']:
2117 2127 revs = [repo.lookup(rev) for rev in opts['rev']]
2118 2128 r = repo.push(other, opts['force'], revs=revs)
2119 2129 return r == 0
2120 2130
2121 2131 def rawcommit(ui, repo, *flist, **rc):
2122 2132 """raw commit interface (DEPRECATED)
2123 2133
2124 2134 (DEPRECATED)
2125 2135 Lowlevel commit, for use in helper scripts.
2126 2136
2127 2137 This command is not intended to be used by normal users, as it is
2128 2138 primarily useful for importing from other SCMs.
2129 2139
2130 2140 This command is now deprecated and will be removed in a future
2131 2141 release, please use debugsetparents and commit instead.
2132 2142 """
2133 2143
2134 2144 ui.warn(_("(the rawcommit command is deprecated)\n"))
2135 2145
2136 2146 message = rc['message']
2137 2147 if not message and rc['logfile']:
2138 2148 try:
2139 2149 message = open(rc['logfile']).read()
2140 2150 except IOError:
2141 2151 pass
2142 2152 if not message and not rc['logfile']:
2143 2153 raise util.Abort(_("missing commit message"))
2144 2154
2145 2155 files = relpath(repo, list(flist))
2146 2156 if rc['files']:
2147 2157 files += open(rc['files']).read().splitlines()
2148 2158
2149 2159 rc['parent'] = map(repo.lookup, rc['parent'])
2150 2160
2151 2161 try:
2152 2162 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2153 2163 except ValueError, inst:
2154 2164 raise util.Abort(str(inst))
2155 2165
2156 2166 def recover(ui, repo):
2157 2167 """roll back an interrupted transaction
2158 2168
2159 2169 Recover from an interrupted commit or pull.
2160 2170
2161 2171 This command tries to fix the repository status after an interrupted
2162 2172 operation. It should only be necessary when Mercurial suggests it.
2163 2173 """
2164 2174 if repo.recover():
2165 2175 return repo.verify()
2166 2176 return 1
2167 2177
2168 2178 def remove(ui, repo, *pats, **opts):
2169 2179 """remove the specified files on the next commit
2170 2180
2171 2181 Schedule the indicated files for removal from the repository.
2172 2182
2173 2183 This command schedules the files to be removed at the next commit.
2174 2184 This only removes files from the current branch, not from the
2175 2185 entire project history. If the files still exist in the working
2176 2186 directory, they will be deleted from it. If invoked with --after,
2177 2187 files that have been manually deleted are marked as removed.
2178 2188 """
2179 2189 names = []
2180 2190 if not opts['after'] and not pats:
2181 2191 raise util.Abort(_('no files specified'))
2182 2192 def okaytoremove(abs, rel, exact):
2183 2193 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
2184 2194 reason = None
2185 2195 if not deleted and opts['after']:
2186 2196 reason = _('is still present')
2187 2197 elif modified and not opts['force']:
2188 2198 reason = _('is modified')
2189 2199 elif added:
2190 2200 reason = _('has been marked for add')
2191 2201 elif unknown:
2192 2202 reason = _('is not managed')
2193 2203 elif removed:
2194 2204 return False
2195 2205 if reason:
2196 2206 if exact:
2197 2207 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2198 2208 else:
2199 2209 return True
2200 2210 for src, abs, rel, exact in walk(repo, pats, opts):
2201 2211 if okaytoremove(abs, rel, exact):
2202 2212 if ui.verbose or not exact:
2203 2213 ui.status(_('removing %s\n') % rel)
2204 2214 names.append(abs)
2205 2215 repo.remove(names, unlink=not opts['after'])
2206 2216
2207 2217 def rename(ui, repo, *pats, **opts):
2208 2218 """rename files; equivalent of copy + remove
2209 2219
2210 2220 Mark dest as copies of sources; mark sources for deletion. If
2211 2221 dest is a directory, copies are put in that directory. If dest is
2212 2222 a file, there can only be one source.
2213 2223
2214 2224 By default, this command copies the contents of files as they
2215 2225 stand in the working directory. If invoked with --after, the
2216 2226 operation is recorded, but no copying is performed.
2217 2227
2218 2228 This command takes effect in the next commit.
2219 2229
2220 2230 NOTE: This command should be treated as experimental. While it
2221 2231 should properly record rename files, this information is not yet
2222 2232 fully used by merge, nor fully reported by log.
2223 2233 """
2224 2234 wlock = repo.wlock(0)
2225 2235 errs, copied = docopy(ui, repo, pats, opts, wlock)
2226 2236 names = []
2227 2237 for abs, rel, exact in copied:
2228 2238 if ui.verbose or not exact:
2229 2239 ui.status(_('removing %s\n') % rel)
2230 2240 names.append(abs)
2231 2241 repo.remove(names, True, wlock)
2232 2242 return errs
2233 2243
2234 2244 def revert(ui, repo, *pats, **opts):
2235 2245 """revert files or dirs to their states as of some revision
2236 2246
2237 2247 With no revision specified, revert the named files or directories
2238 2248 to the contents they had in the parent of the working directory.
2239 2249 This restores the contents of the affected files to an unmodified
2240 2250 state. If the working directory has two parents, you must
2241 2251 explicitly specify the revision to revert to.
2242 2252
2243 2253 Modified files are saved with a .orig suffix before reverting.
2244 2254 To disable these backups, use --no-backup.
2245 2255
2246 2256 Using the -r option, revert the given files or directories to
2247 2257 their contents as of a specific revision. This can be helpful to"roll
2248 2258 back" some or all of a change that should not have been committed.
2249 2259
2250 2260 Revert modifies the working directory. It does not commit any
2251 2261 changes, or change the parent of the working directory. If you
2252 2262 revert to a revision other than the parent of the working
2253 2263 directory, the reverted files will thus appear modified
2254 2264 afterwards.
2255 2265
2256 2266 If a file has been deleted, it is recreated. If the executable
2257 2267 mode of a file was changed, it is reset.
2258 2268
2259 2269 If names are given, all files matching the names are reverted.
2260 2270
2261 2271 If no arguments are given, all files in the repository are reverted.
2262 2272 """
2263 2273 parent, p2 = repo.dirstate.parents()
2264 2274 if opts['rev']:
2265 2275 node = repo.lookup(opts['rev'])
2266 2276 elif p2 != nullid:
2267 2277 raise util.Abort(_('working dir has two parents; '
2268 2278 'you must specify the revision to revert to'))
2269 2279 else:
2270 2280 node = parent
2281 pmf = None
2271 2282 mf = repo.manifest.read(repo.changelog.read(node)[0])
2272 2283
2273 2284 wlock = repo.wlock()
2274 2285
2275 2286 # need all matching names in dirstate and manifest of target rev,
2276 2287 # so have to walk both. do not print errors if files exist in one
2277 2288 # but not other.
2278 2289
2279 2290 names = {}
2280 2291 target_only = {}
2281 2292
2282 2293 # walk dirstate.
2283 2294
2284 2295 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2285 2296 names[abs] = (rel, exact)
2286 2297 if src == 'b':
2287 2298 target_only[abs] = True
2288 2299
2289 2300 # walk target manifest.
2290 2301
2291 2302 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2292 2303 badmatch=names.has_key):
2293 2304 if abs in names: continue
2294 2305 names[abs] = (rel, exact)
2295 2306 target_only[abs] = True
2296 2307
2297 2308 changes = repo.changes(match=names.has_key, wlock=wlock)
2298 2309 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2299 2310
2300 2311 revert = ([], _('reverting %s\n'))
2301 2312 add = ([], _('adding %s\n'))
2302 2313 remove = ([], _('removing %s\n'))
2303 2314 forget = ([], _('forgetting %s\n'))
2304 2315 undelete = ([], _('undeleting %s\n'))
2305 2316 update = {}
2306 2317
2307 2318 disptable = (
2308 2319 # dispatch table:
2309 2320 # file state
2310 2321 # action if in target manifest
2311 2322 # action if not in target manifest
2312 2323 # make backup if in target manifest
2313 2324 # make backup if not in target manifest
2314 2325 (modified, revert, remove, True, True),
2315 2326 (added, revert, forget, True, False),
2316 2327 (removed, undelete, None, False, False),
2317 2328 (deleted, revert, remove, False, False),
2318 2329 (unknown, add, None, True, False),
2319 2330 (target_only, add, None, False, False),
2320 2331 )
2321 2332
2322 2333 entries = names.items()
2323 2334 entries.sort()
2324 2335
2325 2336 for abs, (rel, exact) in entries:
2326 2337 in_mf = abs in mf
2327 2338 def handle(xlist, dobackup):
2328 2339 xlist[0].append(abs)
2329 2340 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2330 2341 bakname = "%s.orig" % rel
2331 2342 ui.note(_('saving current version of %s as %s\n') %
2332 2343 (rel, bakname))
2333 2344 shutil.copyfile(rel, bakname)
2334 2345 shutil.copymode(rel, bakname)
2335 2346 if ui.verbose or not exact:
2336 2347 ui.status(xlist[1] % rel)
2337 2348 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2338 2349 if abs not in table: continue
2339 2350 # file has changed in dirstate
2340 2351 if in_mf:
2341 2352 handle(hitlist, backuphit)
2342 2353 elif misslist is not None:
2343 2354 handle(misslist, backupmiss)
2344 2355 else:
2345 2356 if exact: ui.warn(_('file not managed: %s\n' % rel))
2346 2357 break
2347 2358 else:
2348 2359 # file has not changed in dirstate
2349 2360 if node == parent:
2350 2361 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2351 2362 continue
2352 2363 if not in_mf:
2353 handle(remove, False)
2364 if pmf is None:
2365 # only need parent manifest in this unlikely case,
2366 # so do not read by default
2367 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2368 if abs in pmf:
2369 handle(remove, False)
2354 2370 update[abs] = True
2355 2371
2356 2372 repo.dirstate.forget(forget[0])
2357 2373 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2358 2374 show_stats=False)
2359 2375 repo.dirstate.update(add[0], 'a')
2360 2376 repo.dirstate.update(undelete[0], 'n')
2361 2377 repo.dirstate.update(remove[0], 'r')
2362 2378 return r
2363 2379
2364 2380 def rollback(ui, repo):
2365 2381 """roll back the last transaction in this repository
2366 2382
2367 2383 Roll back the last transaction in this repository, restoring the
2368 2384 project to its state prior to the transaction.
2369 2385
2370 2386 Transactions are used to encapsulate the effects of all commands
2371 2387 that create new changesets or propagate existing changesets into a
2372 2388 repository. For example, the following commands are transactional,
2373 2389 and their effects can be rolled back:
2374 2390
2375 2391 commit
2376 2392 import
2377 2393 pull
2378 2394 push (with this repository as destination)
2379 2395 unbundle
2380 2396
2381 2397 This command should be used with care. There is only one level of
2382 2398 rollback, and there is no way to undo a rollback.
2383 2399
2384 2400 This command is not intended for use on public repositories. Once
2385 2401 changes are visible for pull by other users, rolling a transaction
2386 2402 back locally is ineffective (someone else may already have pulled
2387 2403 the changes). Furthermore, a race is possible with readers of the
2388 2404 repository; for example an in-progress pull from the repository
2389 2405 may fail if a rollback is performed.
2390 2406 """
2391 2407 repo.undo()
2392 2408
2393 2409 def root(ui, repo):
2394 2410 """print the root (top) of the current working dir
2395 2411
2396 2412 Print the root directory of the current repository.
2397 2413 """
2398 2414 ui.write(repo.root + "\n")
2399 2415
2400 2416 def serve(ui, repo, **opts):
2401 2417 """export the repository via HTTP
2402 2418
2403 2419 Start a local HTTP repository browser and pull server.
2404 2420
2405 2421 By default, the server logs accesses to stdout and errors to
2406 2422 stderr. Use the "-A" and "-E" options to log to files.
2407 2423 """
2408 2424
2409 2425 if opts["stdio"]:
2410 2426 if repo is None:
2411 2427 raise hg.RepoError(_('no repo found'))
2412 2428 fin, fout = sys.stdin, sys.stdout
2413 2429 sys.stdout = sys.stderr
2414 2430
2415 2431 # Prevent insertion/deletion of CRs
2416 2432 util.set_binary(fin)
2417 2433 util.set_binary(fout)
2418 2434
2419 2435 def getarg():
2420 2436 argline = fin.readline()[:-1]
2421 2437 arg, l = argline.split()
2422 2438 val = fin.read(int(l))
2423 2439 return arg, val
2424 2440 def respond(v):
2425 2441 fout.write("%d\n" % len(v))
2426 2442 fout.write(v)
2427 2443 fout.flush()
2428 2444
2429 2445 lock = None
2430 2446
2431 2447 while 1:
2432 2448 cmd = fin.readline()[:-1]
2433 2449 if cmd == '':
2434 2450 return
2435 2451 if cmd == "heads":
2436 2452 h = repo.heads()
2437 2453 respond(" ".join(map(hex, h)) + "\n")
2438 2454 if cmd == "lock":
2439 2455 lock = repo.lock()
2440 2456 respond("")
2441 2457 if cmd == "unlock":
2442 2458 if lock:
2443 2459 lock.release()
2444 2460 lock = None
2445 2461 respond("")
2446 2462 elif cmd == "branches":
2447 2463 arg, nodes = getarg()
2448 2464 nodes = map(bin, nodes.split(" "))
2449 2465 r = []
2450 2466 for b in repo.branches(nodes):
2451 2467 r.append(" ".join(map(hex, b)) + "\n")
2452 2468 respond("".join(r))
2453 2469 elif cmd == "between":
2454 2470 arg, pairs = getarg()
2455 2471 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2456 2472 r = []
2457 2473 for b in repo.between(pairs):
2458 2474 r.append(" ".join(map(hex, b)) + "\n")
2459 2475 respond("".join(r))
2460 2476 elif cmd == "changegroup":
2461 2477 nodes = []
2462 2478 arg, roots = getarg()
2463 2479 nodes = map(bin, roots.split(" "))
2464 2480
2465 2481 cg = repo.changegroup(nodes, 'serve')
2466 2482 while 1:
2467 2483 d = cg.read(4096)
2468 2484 if not d:
2469 2485 break
2470 2486 fout.write(d)
2471 2487
2472 2488 fout.flush()
2473 2489
2474 2490 elif cmd == "addchangegroup":
2475 2491 if not lock:
2476 2492 respond("not locked")
2477 2493 continue
2478 2494 respond("")
2479 2495
2480 2496 r = repo.addchangegroup(fin, 'serve')
2481 2497 respond(str(r))
2482 2498
2483 2499 optlist = ("name templates style address port ipv6"
2484 2500 " accesslog errorlog webdir_conf")
2485 2501 for o in optlist.split():
2486 2502 if opts[o]:
2487 2503 ui.setconfig("web", o, opts[o])
2488 2504
2489 2505 if repo is None and not ui.config("web", "webdir_conf"):
2490 2506 raise hg.RepoError(_('no repo found'))
2491 2507
2492 2508 if opts['daemon'] and not opts['daemon_pipefds']:
2493 2509 rfd, wfd = os.pipe()
2494 2510 args = sys.argv[:]
2495 2511 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2496 2512 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2497 2513 args[0], args)
2498 2514 os.close(wfd)
2499 2515 os.read(rfd, 1)
2500 2516 os._exit(0)
2501 2517
2502 2518 try:
2503 2519 httpd = hgweb.create_server(ui, repo)
2504 2520 except socket.error, inst:
2505 2521 raise util.Abort(_('cannot start server: ') + inst.args[1])
2506 2522
2507 2523 if ui.verbose:
2508 2524 addr, port = httpd.socket.getsockname()
2509 2525 if addr == '0.0.0.0':
2510 2526 addr = socket.gethostname()
2511 2527 else:
2512 2528 try:
2513 2529 addr = socket.gethostbyaddr(addr)[0]
2514 2530 except socket.error:
2515 2531 pass
2516 2532 if port != 80:
2517 2533 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2518 2534 else:
2519 2535 ui.status(_('listening at http://%s/\n') % addr)
2520 2536
2521 2537 if opts['pid_file']:
2522 2538 fp = open(opts['pid_file'], 'w')
2523 2539 fp.write(str(os.getpid()))
2524 2540 fp.close()
2525 2541
2526 2542 if opts['daemon_pipefds']:
2527 2543 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2528 2544 os.close(rfd)
2529 2545 os.write(wfd, 'y')
2530 2546 os.close(wfd)
2531 2547 sys.stdout.flush()
2532 2548 sys.stderr.flush()
2533 2549 fd = os.open(util.nulldev, os.O_RDWR)
2534 2550 if fd != 0: os.dup2(fd, 0)
2535 2551 if fd != 1: os.dup2(fd, 1)
2536 2552 if fd != 2: os.dup2(fd, 2)
2537 2553 if fd not in (0, 1, 2): os.close(fd)
2538 2554
2539 2555 httpd.serve_forever()
2540 2556
2541 2557 def status(ui, repo, *pats, **opts):
2542 2558 """show changed files in the working directory
2543 2559
2544 2560 Show changed files in the repository. If names are
2545 2561 given, only files that match are shown.
2546 2562
2547 2563 The codes used to show the status of files are:
2548 2564 M = modified
2549 2565 A = added
2550 2566 R = removed
2551 2567 ! = deleted, but still tracked
2552 2568 ? = not tracked
2553 2569 I = ignored (not shown by default)
2554 2570 """
2555 2571
2556 2572 show_ignored = opts['ignored'] and True or False
2557 2573 files, matchfn, anypats = matchpats(repo, pats, opts)
2558 2574 cwd = (pats and repo.getcwd()) or ''
2559 2575 modified, added, removed, deleted, unknown, ignored = [
2560 2576 [util.pathto(cwd, x) for x in n]
2561 2577 for n in repo.changes(files=files, match=matchfn,
2562 2578 show_ignored=show_ignored)]
2563 2579
2564 2580 changetypes = [('modified', 'M', modified),
2565 2581 ('added', 'A', added),
2566 2582 ('removed', 'R', removed),
2567 2583 ('deleted', '!', deleted),
2568 2584 ('unknown', '?', unknown),
2569 2585 ('ignored', 'I', ignored)]
2570 2586
2571 2587 end = opts['print0'] and '\0' or '\n'
2572 2588
2573 2589 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2574 2590 or changetypes):
2575 2591 if opts['no_status']:
2576 2592 format = "%%s%s" % end
2577 2593 else:
2578 2594 format = "%s %%s%s" % (char, end)
2579 2595
2580 2596 for f in changes:
2581 2597 ui.write(format % f)
2582 2598
2583 2599 def tag(ui, repo, name, rev_=None, **opts):
2584 2600 """add a tag for the current tip or a given revision
2585 2601
2586 2602 Name a particular revision using <name>.
2587 2603
2588 2604 Tags are used to name particular revisions of the repository and are
2589 2605 very useful to compare different revision, to go back to significant
2590 2606 earlier versions or to mark branch points as releases, etc.
2591 2607
2592 2608 If no revision is given, the tip is used.
2593 2609
2594 2610 To facilitate version control, distribution, and merging of tags,
2595 2611 they are stored as a file named ".hgtags" which is managed
2596 2612 similarly to other project files and can be hand-edited if
2597 2613 necessary. The file '.hg/localtags' is used for local tags (not
2598 2614 shared among repositories).
2599 2615 """
2600 2616 if name == "tip":
2601 2617 raise util.Abort(_("the name 'tip' is reserved"))
2602 2618 if rev_ is not None:
2603 2619 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2604 2620 "please use 'hg tag [-r REV] NAME' instead\n"))
2605 2621 if opts['rev']:
2606 2622 raise util.Abort(_("use only one form to specify the revision"))
2607 2623 if opts['rev']:
2608 2624 rev_ = opts['rev']
2609 2625 if rev_:
2610 2626 r = hex(repo.lookup(rev_))
2611 2627 else:
2612 2628 r = hex(repo.changelog.tip())
2613 2629
2614 2630 disallowed = (revrangesep, '\r', '\n')
2615 2631 for c in disallowed:
2616 2632 if name.find(c) >= 0:
2617 2633 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2618 2634
2619 2635 repo.hook('pretag', throw=True, node=r, tag=name,
2620 2636 local=int(not not opts['local']))
2621 2637
2622 2638 if opts['local']:
2623 2639 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2624 2640 repo.hook('tag', node=r, tag=name, local=1)
2625 2641 return
2626 2642
2627 2643 for x in repo.changes():
2628 2644 if ".hgtags" in x:
2629 2645 raise util.Abort(_("working copy of .hgtags is changed "
2630 2646 "(please commit .hgtags manually)"))
2631 2647
2632 2648 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2633 2649 if repo.dirstate.state(".hgtags") == '?':
2634 2650 repo.add([".hgtags"])
2635 2651
2636 2652 message = (opts['message'] or
2637 2653 _("Added tag %s for changeset %s") % (name, r))
2638 2654 try:
2639 2655 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2640 2656 repo.hook('tag', node=r, tag=name, local=0)
2641 2657 except ValueError, inst:
2642 2658 raise util.Abort(str(inst))
2643 2659
2644 2660 def tags(ui, repo):
2645 2661 """list repository tags
2646 2662
2647 2663 List the repository tags.
2648 2664
2649 2665 This lists both regular and local tags.
2650 2666 """
2651 2667
2652 2668 l = repo.tagslist()
2653 2669 l.reverse()
2654 2670 for t, n in l:
2655 2671 try:
2656 2672 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2657 2673 except KeyError:
2658 2674 r = " ?:?"
2659 2675 if ui.quiet:
2660 2676 ui.write("%s\n" % t)
2661 2677 else:
2662 2678 ui.write("%-30s %s\n" % (t, r))
2663 2679
2664 2680 def tip(ui, repo, **opts):
2665 2681 """show the tip revision
2666 2682
2667 2683 Show the tip revision.
2668 2684 """
2669 2685 n = repo.changelog.tip()
2670 2686 br = None
2671 2687 if opts['branches']:
2672 2688 br = repo.branchlookup([n])
2673 2689 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2674 2690 if opts['patch']:
2675 2691 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2676 2692
2677 2693 def unbundle(ui, repo, fname, **opts):
2678 2694 """apply a changegroup file
2679 2695
2680 2696 Apply a compressed changegroup file generated by the bundle
2681 2697 command.
2682 2698 """
2683 2699 f = urllib.urlopen(fname)
2684 2700
2685 2701 header = f.read(6)
2686 2702 if not header.startswith("HG"):
2687 2703 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2688 2704 elif not header.startswith("HG10"):
2689 2705 raise util.Abort(_("%s: unknown bundle version") % fname)
2690 2706 elif header == "HG10BZ":
2691 2707 def generator(f):
2692 2708 zd = bz2.BZ2Decompressor()
2693 2709 zd.decompress("BZ")
2694 2710 for chunk in f:
2695 2711 yield zd.decompress(chunk)
2696 2712 elif header == "HG10UN":
2697 2713 def generator(f):
2698 2714 for chunk in f:
2699 2715 yield chunk
2700 2716 else:
2701 2717 raise util.Abort(_("%s: unknown bundle compression type")
2702 2718 % fname)
2703 2719 gen = generator(util.filechunkiter(f, 4096))
2704 2720 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle')
2705 2721 return postincoming(ui, repo, modheads, opts['update'])
2706 2722
2707 2723 def undo(ui, repo):
2708 2724 """undo the last commit or pull (DEPRECATED)
2709 2725
2710 2726 (DEPRECATED)
2711 2727 This command is now deprecated and will be removed in a future
2712 2728 release. Please use the rollback command instead. For usage
2713 2729 instructions, see the rollback command.
2714 2730 """
2715 2731 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2716 2732 repo.undo()
2717 2733
2718 2734 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2719 2735 branch=None, **opts):
2720 2736 """update or merge working directory
2721 2737
2722 2738 Update the working directory to the specified revision.
2723 2739
2724 2740 If there are no outstanding changes in the working directory and
2725 2741 there is a linear relationship between the current version and the
2726 2742 requested version, the result is the requested version.
2727 2743
2728 Otherwise the result is a merge between the contents of the
2729 current working directory and the requested version. Files that
2730 changed between either parent are marked as changed for the next
2731 commit and a commit must be performed before any further updates
2732 are allowed.
2744 To merge the working directory with another revision, use the
2745 merge command.
2733 2746
2734 2747 By default, update will refuse to run if doing so would require
2735 2748 merging or discarding local changes.
2736 2749 """
2750 if merge:
2751 ui.warn(_('(the -m/--merge option is deprecated; '
2752 'use the merge command instead)\n'))
2753 return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
2754
2755 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2756 branch=None, **opts):
2737 2757 if branch:
2738 2758 br = repo.branchlookup(branch=branch)
2739 2759 found = []
2740 2760 for x in br:
2741 2761 if branch in br[x]:
2742 2762 found.append(x)
2743 2763 if len(found) > 1:
2744 2764 ui.warn(_("Found multiple heads for %s\n") % branch)
2745 2765 for x in found:
2746 2766 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2747 2767 return 1
2748 2768 if len(found) == 1:
2749 2769 node = found[0]
2750 2770 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2751 2771 else:
2752 2772 ui.warn(_("branch %s not found\n") % (branch))
2753 2773 return 1
2754 2774 else:
2755 2775 node = node and repo.lookup(node) or repo.changelog.tip()
2756 2776 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2757 2777
2758 2778 def verify(ui, repo):
2759 2779 """verify the integrity of the repository
2760 2780
2761 2781 Verify the integrity of the current repository.
2762 2782
2763 2783 This will perform an extensive check of the repository's
2764 2784 integrity, validating the hashes and checksums of each entry in
2765 2785 the changelog, manifest, and tracked files, as well as the
2766 2786 integrity of their crosslinks and indices.
2767 2787 """
2768 2788 return repo.verify()
2769 2789
2770 2790 # Command options and aliases are listed here, alphabetically
2771 2791
2772 2792 table = {
2773 2793 "^add":
2774 2794 (add,
2775 2795 [('I', 'include', [], _('include names matching the given patterns')),
2776 2796 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2777 2797 _('hg add [OPTION]... [FILE]...')),
2778 2798 "debugaddremove|addremove":
2779 2799 (addremove,
2780 2800 [('I', 'include', [], _('include names matching the given patterns')),
2781 2801 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2782 2802 _('hg addremove [OPTION]... [FILE]...')),
2783 2803 "^annotate":
2784 2804 (annotate,
2785 2805 [('r', 'rev', '', _('annotate the specified revision')),
2786 2806 ('a', 'text', None, _('treat all files as text')),
2787 2807 ('u', 'user', None, _('list the author')),
2788 2808 ('d', 'date', None, _('list the date')),
2789 2809 ('n', 'number', None, _('list the revision number (default)')),
2790 2810 ('c', 'changeset', None, _('list the changeset')),
2791 2811 ('I', 'include', [], _('include names matching the given patterns')),
2792 2812 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2793 2813 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2794 2814 "archive":
2795 2815 (archive,
2796 2816 [('', 'no-decode', None, _('do not pass files through decoders')),
2797 2817 ('p', 'prefix', '', _('directory prefix for files in archive')),
2798 2818 ('r', 'rev', '', _('revision to distribute')),
2799 2819 ('t', 'type', '', _('type of distribution to create')),
2800 2820 ('I', 'include', [], _('include names matching the given patterns')),
2801 2821 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2802 2822 _('hg archive [OPTION]... DEST')),
2803 2823 "backout":
2804 2824 (backout,
2805 2825 [('', 'merge', None,
2806 2826 _('merge with old dirstate parent after backout')),
2807 2827 ('m', 'message', '', _('use <text> as commit message')),
2808 2828 ('l', 'logfile', '', _('read commit message from <file>')),
2809 2829 ('d', 'date', '', _('record datecode as commit date')),
2810 2830 ('u', 'user', '', _('record user as committer')),
2811 2831 ('I', 'include', [], _('include names matching the given patterns')),
2812 2832 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2813 2833 _('hg backout [OPTION]... REV')),
2814 2834 "bundle":
2815 2835 (bundle,
2816 2836 [('f', 'force', None,
2817 2837 _('run even when remote repository is unrelated'))],
2818 2838 _('hg bundle FILE DEST')),
2819 2839 "cat":
2820 2840 (cat,
2821 2841 [('o', 'output', '', _('print output to file with formatted name')),
2822 2842 ('r', 'rev', '', _('print the given revision')),
2823 2843 ('I', 'include', [], _('include names matching the given patterns')),
2824 2844 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2825 2845 _('hg cat [OPTION]... FILE...')),
2826 2846 "^clone":
2827 2847 (clone,
2828 2848 [('U', 'noupdate', None, _('do not update the new working directory')),
2829 2849 ('r', 'rev', [],
2830 2850 _('a changeset you would like to have after cloning')),
2831 2851 ('', 'pull', None, _('use pull protocol to copy metadata')),
2832 2852 ('e', 'ssh', '', _('specify ssh command to use')),
2833 2853 ('', 'remotecmd', '',
2834 2854 _('specify hg command to run on the remote side'))],
2835 2855 _('hg clone [OPTION]... SOURCE [DEST]')),
2836 2856 "^commit|ci":
2837 2857 (commit,
2838 2858 [('A', 'addremove', None,
2839 2859 _('mark new/missing files as added/removed before committing')),
2840 2860 ('m', 'message', '', _('use <text> as commit message')),
2841 2861 ('l', 'logfile', '', _('read the commit message from <file>')),
2842 2862 ('d', 'date', '', _('record datecode as commit date')),
2843 2863 ('u', 'user', '', _('record user as commiter')),
2844 2864 ('I', 'include', [], _('include names matching the given patterns')),
2845 2865 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2846 2866 _('hg commit [OPTION]... [FILE]...')),
2847 2867 "copy|cp":
2848 2868 (copy,
2849 2869 [('A', 'after', None, _('record a copy that has already occurred')),
2850 2870 ('f', 'force', None,
2851 2871 _('forcibly copy over an existing managed file')),
2852 2872 ('I', 'include', [], _('include names matching the given patterns')),
2853 2873 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2854 2874 _('hg copy [OPTION]... [SOURCE]... DEST')),
2855 2875 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2856 2876 "debugcomplete":
2857 2877 (debugcomplete,
2858 2878 [('o', 'options', None, _('show the command options'))],
2859 2879 _('debugcomplete [-o] CMD')),
2860 2880 "debugrebuildstate":
2861 2881 (debugrebuildstate,
2862 2882 [('r', 'rev', '', _('revision to rebuild to'))],
2863 2883 _('debugrebuildstate [-r REV] [REV]')),
2864 2884 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2865 2885 "debugconfig": (debugconfig, [], _('debugconfig')),
2866 2886 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2867 2887 "debugstate": (debugstate, [], _('debugstate')),
2868 2888 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2869 2889 "debugindex": (debugindex, [], _('debugindex FILE')),
2870 2890 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2871 2891 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2872 2892 "debugwalk":
2873 2893 (debugwalk,
2874 2894 [('I', 'include', [], _('include names matching the given patterns')),
2875 2895 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2876 2896 _('debugwalk [OPTION]... [FILE]...')),
2877 2897 "^diff":
2878 2898 (diff,
2879 2899 [('r', 'rev', [], _('revision')),
2880 2900 ('a', 'text', None, _('treat all files as text')),
2881 2901 ('p', 'show-function', None,
2882 2902 _('show which function each change is in')),
2883 2903 ('w', 'ignore-all-space', None,
2884 2904 _('ignore white space when comparing lines')),
2885 2905 ('I', 'include', [], _('include names matching the given patterns')),
2886 2906 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2887 2907 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2888 2908 "^export":
2889 2909 (export,
2890 2910 [('o', 'output', '', _('print output to file with formatted name')),
2891 2911 ('a', 'text', None, _('treat all files as text')),
2892 2912 ('', 'switch-parent', None, _('diff against the second parent'))],
2893 2913 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2894 2914 "debugforget|forget":
2895 2915 (forget,
2896 2916 [('I', 'include', [], _('include names matching the given patterns')),
2897 2917 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2898 2918 _('hg forget [OPTION]... FILE...')),
2899 2919 "grep":
2900 2920 (grep,
2901 2921 [('0', 'print0', None, _('end fields with NUL')),
2902 2922 ('', 'all', None, _('print all revisions that match')),
2903 2923 ('i', 'ignore-case', None, _('ignore case when matching')),
2904 2924 ('l', 'files-with-matches', None,
2905 2925 _('print only filenames and revs that match')),
2906 2926 ('n', 'line-number', None, _('print matching line numbers')),
2907 2927 ('r', 'rev', [], _('search in given revision range')),
2908 2928 ('u', 'user', None, _('print user who committed change')),
2909 2929 ('I', 'include', [], _('include names matching the given patterns')),
2910 2930 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2911 2931 _('hg grep [OPTION]... PATTERN [FILE]...')),
2912 2932 "heads":
2913 2933 (heads,
2914 2934 [('b', 'branches', None, _('show branches')),
2915 2935 ('', 'style', '', _('display using template map file')),
2916 2936 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2917 2937 ('', 'template', '', _('display with template'))],
2918 2938 _('hg heads [-b] [-r <rev>]')),
2919 2939 "help": (help_, [], _('hg help [COMMAND]')),
2920 2940 "identify|id": (identify, [], _('hg identify')),
2921 2941 "import|patch":
2922 2942 (import_,
2923 2943 [('p', 'strip', 1,
2924 2944 _('directory strip option for patch. This has the same\n'
2925 2945 'meaning as the corresponding patch option')),
2926 2946 ('b', 'base', '', _('base path')),
2927 2947 ('f', 'force', None,
2928 2948 _('skip check for outstanding uncommitted changes'))],
2929 2949 _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
2930 2950 "incoming|in": (incoming,
2931 2951 [('M', 'no-merges', None, _('do not show merges')),
2932 2952 ('f', 'force', None,
2933 2953 _('run even when remote repository is unrelated')),
2934 2954 ('', 'style', '', _('display using template map file')),
2935 2955 ('n', 'newest-first', None, _('show newest record first')),
2936 2956 ('', 'bundle', '', _('file to store the bundles into')),
2937 2957 ('p', 'patch', None, _('show patch')),
2938 2958 ('', 'template', '', _('display with template')),
2939 2959 ('e', 'ssh', '', _('specify ssh command to use')),
2940 2960 ('', 'remotecmd', '',
2941 2961 _('specify hg command to run on the remote side'))],
2942 2962 _('hg incoming [-p] [-n] [-M] [--bundle FILENAME] [SOURCE]')),
2943 2963 "^init": (init, [], _('hg init [DEST]')),
2944 2964 "locate":
2945 2965 (locate,
2946 2966 [('r', 'rev', '', _('search the repository as it stood at rev')),
2947 2967 ('0', 'print0', None,
2948 2968 _('end filenames with NUL, for use with xargs')),
2949 2969 ('f', 'fullpath', None,
2950 2970 _('print complete paths from the filesystem root')),
2951 2971 ('I', 'include', [], _('include names matching the given patterns')),
2952 2972 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2953 2973 _('hg locate [OPTION]... [PATTERN]...')),
2954 2974 "^log|history":
2955 2975 (log,
2956 2976 [('b', 'branches', None, _('show branches')),
2957 2977 ('k', 'keyword', [], _('search for a keyword')),
2958 2978 ('l', 'limit', '', _('limit number of changes displayed')),
2959 2979 ('r', 'rev', [], _('show the specified revision or range')),
2960 2980 ('M', 'no-merges', None, _('do not show merges')),
2961 2981 ('', 'style', '', _('display using template map file')),
2962 2982 ('m', 'only-merges', None, _('show only merges')),
2963 2983 ('p', 'patch', None, _('show patch')),
2964 2984 ('', 'template', '', _('display with template')),
2965 2985 ('I', 'include', [], _('include names matching the given patterns')),
2966 2986 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2967 2987 _('hg log [OPTION]... [FILE]')),
2968 2988 "manifest": (manifest, [], _('hg manifest [REV]')),
2969 2989 "merge":
2970 2990 (merge,
2971 2991 [('b', 'branch', '', _('merge with head of a specific branch')),
2972 2992 ('f', 'force', None, _('force a merge with outstanding changes'))],
2973 2993 _('hg merge [-b TAG] [-f] [REV]')),
2974 2994 "outgoing|out": (outgoing,
2975 2995 [('M', 'no-merges', None, _('do not show merges')),
2976 2996 ('f', 'force', None,
2977 2997 _('run even when remote repository is unrelated')),
2978 2998 ('p', 'patch', None, _('show patch')),
2979 2999 ('', 'style', '', _('display using template map file')),
2980 3000 ('n', 'newest-first', None, _('show newest record first')),
2981 3001 ('', 'template', '', _('display with template')),
2982 3002 ('e', 'ssh', '', _('specify ssh command to use')),
2983 3003 ('', 'remotecmd', '',
2984 3004 _('specify hg command to run on the remote side'))],
2985 3005 _('hg outgoing [-M] [-p] [-n] [DEST]')),
2986 3006 "^parents":
2987 3007 (parents,
2988 3008 [('b', 'branches', None, _('show branches')),
2989 3009 ('', 'style', '', _('display using template map file')),
2990 3010 ('', 'template', '', _('display with template'))],
2991 3011 _('hg parents [-b] [REV]')),
2992 3012 "paths": (paths, [], _('hg paths [NAME]')),
2993 3013 "^pull":
2994 3014 (pull,
2995 3015 [('u', 'update', None,
2996 3016 _('update the working directory to tip after pull')),
2997 3017 ('e', 'ssh', '', _('specify ssh command to use')),
2998 3018 ('f', 'force', None,
2999 3019 _('run even when remote repository is unrelated')),
3000 3020 ('r', 'rev', [], _('a specific revision you would like to pull')),
3001 3021 ('', 'remotecmd', '',
3002 3022 _('specify hg command to run on the remote side'))],
3003 3023 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
3004 3024 "^push":
3005 3025 (push,
3006 3026 [('f', 'force', None, _('force push')),
3007 3027 ('e', 'ssh', '', _('specify ssh command to use')),
3008 3028 ('r', 'rev', [], _('a specific revision you would like to push')),
3009 3029 ('', 'remotecmd', '',
3010 3030 _('specify hg command to run on the remote side'))],
3011 3031 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
3012 3032 "debugrawcommit|rawcommit":
3013 3033 (rawcommit,
3014 3034 [('p', 'parent', [], _('parent')),
3015 3035 ('d', 'date', '', _('date code')),
3016 3036 ('u', 'user', '', _('user')),
3017 3037 ('F', 'files', '', _('file list')),
3018 3038 ('m', 'message', '', _('commit message')),
3019 3039 ('l', 'logfile', '', _('commit message file'))],
3020 3040 _('hg debugrawcommit [OPTION]... [FILE]...')),
3021 3041 "recover": (recover, [], _('hg recover')),
3022 3042 "^remove|rm":
3023 3043 (remove,
3024 3044 [('A', 'after', None, _('record remove that has already occurred')),
3025 3045 ('f', 'force', None, _('remove file even if modified')),
3026 3046 ('I', 'include', [], _('include names matching the given patterns')),
3027 3047 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3028 3048 _('hg remove [OPTION]... FILE...')),
3029 3049 "rename|mv":
3030 3050 (rename,
3031 3051 [('A', 'after', None, _('record a rename that has already occurred')),
3032 3052 ('f', 'force', None,
3033 3053 _('forcibly copy over an existing managed file')),
3034 3054 ('I', 'include', [], _('include names matching the given patterns')),
3035 3055 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3036 3056 _('hg rename [OPTION]... SOURCE... DEST')),
3037 3057 "^revert":
3038 3058 (revert,
3039 3059 [('r', 'rev', '', _('revision to revert to')),
3040 3060 ('', 'no-backup', None, _('do not save backup copies of files')),
3041 3061 ('I', 'include', [], _('include names matching given patterns')),
3042 3062 ('X', 'exclude', [], _('exclude names matching given patterns'))],
3043 3063 _('hg revert [-r REV] [NAME]...')),
3044 3064 "rollback": (rollback, [], _('hg rollback')),
3045 3065 "root": (root, [], _('hg root')),
3046 3066 "^serve":
3047 3067 (serve,
3048 3068 [('A', 'accesslog', '', _('name of access log file to write to')),
3049 3069 ('d', 'daemon', None, _('run server in background')),
3050 3070 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3051 3071 ('E', 'errorlog', '', _('name of error log file to write to')),
3052 3072 ('p', 'port', 0, _('port to use (default: 8000)')),
3053 3073 ('a', 'address', '', _('address to use')),
3054 3074 ('n', 'name', '',
3055 3075 _('name to show in web pages (default: working dir)')),
3056 3076 ('', 'webdir-conf', '', _('name of the webdir config file'
3057 3077 ' (serve more than one repo)')),
3058 3078 ('', 'pid-file', '', _('name of file to write process ID to')),
3059 3079 ('', 'stdio', None, _('for remote clients')),
3060 3080 ('t', 'templates', '', _('web templates to use')),
3061 3081 ('', 'style', '', _('template style to use')),
3062 3082 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3063 3083 _('hg serve [OPTION]...')),
3064 3084 "^status|st":
3065 3085 (status,
3066 3086 [('m', 'modified', None, _('show only modified files')),
3067 3087 ('a', 'added', None, _('show only added files')),
3068 3088 ('r', 'removed', None, _('show only removed files')),
3069 3089 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3070 3090 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3071 3091 ('i', 'ignored', None, _('show ignored files')),
3072 3092 ('n', 'no-status', None, _('hide status prefix')),
3073 3093 ('0', 'print0', None,
3074 3094 _('end filenames with NUL, for use with xargs')),
3075 3095 ('I', 'include', [], _('include names matching the given patterns')),
3076 3096 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3077 3097 _('hg status [OPTION]... [FILE]...')),
3078 3098 "tag":
3079 3099 (tag,
3080 3100 [('l', 'local', None, _('make the tag local')),
3081 3101 ('m', 'message', '', _('message for tag commit log entry')),
3082 3102 ('d', 'date', '', _('record datecode as commit date')),
3083 3103 ('u', 'user', '', _('record user as commiter')),
3084 3104 ('r', 'rev', '', _('revision to tag'))],
3085 3105 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3086 3106 "tags": (tags, [], _('hg tags')),
3087 3107 "tip":
3088 3108 (tip,
3089 3109 [('b', 'branches', None, _('show branches')),
3090 3110 ('', 'style', '', _('display using template map file')),
3091 3111 ('p', 'patch', None, _('show patch')),
3092 3112 ('', 'template', '', _('display with template'))],
3093 3113 _('hg tip [-b] [-p]')),
3094 3114 "unbundle":
3095 3115 (unbundle,
3096 3116 [('u', 'update', None,
3097 3117 _('update the working directory to tip after unbundle'))],
3098 3118 _('hg unbundle [-u] FILE')),
3099 3119 "debugundo|undo": (undo, [], _('hg undo')),
3100 3120 "^update|up|checkout|co":
3101 3121 (update,
3102 3122 [('b', 'branch', '', _('checkout the head of a specific branch')),
3103 ('m', 'merge', None, _('allow merging of branches')),
3123 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3104 3124 ('C', 'clean', None, _('overwrite locally modified files')),
3105 3125 ('f', 'force', None, _('force a merge with outstanding changes'))],
3106 3126 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3107 3127 "verify": (verify, [], _('hg verify')),
3108 3128 "version": (show_version, [], _('hg version')),
3109 3129 }
3110 3130
3111 3131 globalopts = [
3112 3132 ('R', 'repository', '',
3113 3133 _('repository root directory or symbolic path name')),
3114 3134 ('', 'cwd', '', _('change working directory')),
3115 3135 ('y', 'noninteractive', None,
3116 3136 _('do not prompt, assume \'yes\' for any required answers')),
3117 3137 ('q', 'quiet', None, _('suppress output')),
3118 3138 ('v', 'verbose', None, _('enable additional output')),
3139 ('', 'config', [], _('set/override config option')),
3119 3140 ('', 'debug', None, _('enable debugging output')),
3120 3141 ('', 'debugger', None, _('start debugger')),
3121 3142 ('', 'traceback', None, _('print traceback on exception')),
3122 3143 ('', 'time', None, _('time how long the command takes')),
3123 3144 ('', 'profile', None, _('print command execution profile')),
3124 3145 ('', 'version', None, _('output version information and exit')),
3125 3146 ('h', 'help', None, _('display help and exit')),
3126 3147 ]
3127 3148
3128 3149 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3129 3150 " debugindex debugindexdot")
3130 3151 optionalrepo = ("paths serve debugconfig")
3131 3152
3132 3153 def findpossible(cmd):
3133 3154 """
3134 3155 Return cmd -> (aliases, command table entry)
3135 3156 for each matching command.
3136 3157 Return debug commands (or their aliases) only if no normal command matches.
3137 3158 """
3138 3159 choice = {}
3139 3160 debugchoice = {}
3140 3161 for e in table.keys():
3141 3162 aliases = e.lstrip("^").split("|")
3142 3163 found = None
3143 3164 if cmd in aliases:
3144 3165 found = cmd
3145 3166 else:
3146 3167 for a in aliases:
3147 3168 if a.startswith(cmd):
3148 3169 found = a
3149 3170 break
3150 3171 if found is not None:
3151 3172 if aliases[0].startswith("debug"):
3152 3173 debugchoice[found] = (aliases, table[e])
3153 3174 else:
3154 3175 choice[found] = (aliases, table[e])
3155 3176
3156 3177 if not choice and debugchoice:
3157 3178 choice = debugchoice
3158 3179
3159 3180 return choice
3160 3181
3161 3182 def find(cmd):
3162 3183 """Return (aliases, command table entry) for command string."""
3163 3184 choice = findpossible(cmd)
3164 3185
3165 3186 if choice.has_key(cmd):
3166 3187 return choice[cmd]
3167 3188
3168 3189 if len(choice) > 1:
3169 3190 clist = choice.keys()
3170 3191 clist.sort()
3171 3192 raise AmbiguousCommand(cmd, clist)
3172 3193
3173 3194 if choice:
3174 3195 return choice.values()[0]
3175 3196
3176 3197 raise UnknownCommand(cmd)
3177 3198
3178 3199 def catchterm(*args):
3179 3200 raise util.SignalInterrupt
3180 3201
3181 3202 def run():
3182 3203 sys.exit(dispatch(sys.argv[1:]))
3183 3204
3184 3205 class ParseError(Exception):
3185 3206 """Exception raised on errors in parsing the command line."""
3186 3207
3187 3208 def parse(ui, args):
3188 3209 options = {}
3189 3210 cmdoptions = {}
3190 3211
3191 3212 try:
3192 3213 args = fancyopts.fancyopts(args, globalopts, options)
3193 3214 except fancyopts.getopt.GetoptError, inst:
3194 3215 raise ParseError(None, inst)
3195 3216
3196 3217 if args:
3197 3218 cmd, args = args[0], args[1:]
3198 3219 aliases, i = find(cmd)
3199 3220 cmd = aliases[0]
3200 3221 defaults = ui.config("defaults", cmd)
3201 3222 if defaults:
3202 3223 args = defaults.split() + args
3203 3224 c = list(i[1])
3204 3225 else:
3205 3226 cmd = None
3206 3227 c = []
3207 3228
3208 3229 # combine global options into local
3209 3230 for o in globalopts:
3210 3231 c.append((o[0], o[1], options[o[1]], o[3]))
3211 3232
3212 3233 try:
3213 3234 args = fancyopts.fancyopts(args, c, cmdoptions)
3214 3235 except fancyopts.getopt.GetoptError, inst:
3215 3236 raise ParseError(cmd, inst)
3216 3237
3217 3238 # separate global options back out
3218 3239 for o in globalopts:
3219 3240 n = o[1]
3220 3241 options[n] = cmdoptions[n]
3221 3242 del cmdoptions[n]
3222 3243
3223 3244 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3224 3245
3225 3246 def dispatch(args):
3226 3247 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3227 3248 num = getattr(signal, name, None)
3228 3249 if num: signal.signal(num, catchterm)
3229 3250
3230 3251 try:
3231 3252 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3232 3253 except util.Abort, inst:
3233 3254 sys.stderr.write(_("abort: %s\n") % inst)
3234 3255 return -1
3235 3256
3236 3257 external = []
3237 3258 for x in u.extensions():
3238 3259 try:
3239 3260 if x[1]:
3240 3261 mod = imp.load_source(x[0], x[1])
3241 3262 else:
3242 3263 def importh(name):
3243 3264 mod = __import__(name)
3244 3265 components = name.split('.')
3245 3266 for comp in components[1:]:
3246 3267 mod = getattr(mod, comp)
3247 3268 return mod
3248 3269 try:
3249 3270 mod = importh("hgext." + x[0])
3250 3271 except ImportError:
3251 3272 mod = importh(x[0])
3252 3273 external.append(mod)
3253 3274 except Exception, inst:
3254 3275 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3255 3276 if u.traceback:
3256 3277 traceback.print_exc()
3257 3278 return 1
3258 3279 continue
3259 3280
3260 3281 for x in external:
3261 3282 cmdtable = getattr(x, 'cmdtable', {})
3262 3283 for t in cmdtable:
3263 3284 if t in table:
3264 3285 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
3265 3286 table.update(cmdtable)
3266 3287
3267 3288 try:
3268 3289 cmd, func, args, options, cmdoptions = parse(u, args)
3269 3290 if options["time"]:
3270 3291 def get_times():
3271 3292 t = os.times()
3272 3293 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3273 3294 t = (t[0], t[1], t[2], t[3], time.clock())
3274 3295 return t
3275 3296 s = get_times()
3276 3297 def print_time():
3277 3298 t = get_times()
3278 3299 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3279 3300 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3280 3301 atexit.register(print_time)
3281 3302
3282 3303 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3283 not options["noninteractive"], options["traceback"])
3304 not options["noninteractive"], options["traceback"],
3305 options["config"])
3284 3306
3285 3307 # enter the debugger before command execution
3286 3308 if options['debugger']:
3287 3309 pdb.set_trace()
3288 3310
3289 3311 try:
3290 3312 if options['cwd']:
3291 3313 try:
3292 3314 os.chdir(options['cwd'])
3293 3315 except OSError, inst:
3294 3316 raise util.Abort('%s: %s' %
3295 3317 (options['cwd'], inst.strerror))
3296 3318
3297 3319 path = u.expandpath(options["repository"]) or ""
3298 3320 repo = path and hg.repository(u, path=path) or None
3299 3321
3300 3322 if options['help']:
3301 3323 return help_(u, cmd, options['version'])
3302 3324 elif options['version']:
3303 3325 return show_version(u)
3304 3326 elif not cmd:
3305 3327 return help_(u, 'shortlist')
3306 3328
3307 3329 if cmd not in norepo.split():
3308 3330 try:
3309 3331 if not repo:
3310 3332 repo = hg.repository(u, path=path)
3311 3333 u = repo.ui
3312 3334 for x in external:
3313 3335 if hasattr(x, 'reposetup'):
3314 3336 x.reposetup(u, repo)
3315 3337 except hg.RepoError:
3316 3338 if cmd not in optionalrepo.split():
3317 3339 raise
3318 3340 d = lambda: func(u, repo, *args, **cmdoptions)
3319 3341 else:
3320 3342 d = lambda: func(u, *args, **cmdoptions)
3321 3343
3322 3344 try:
3323 3345 if options['profile']:
3324 3346 import hotshot, hotshot.stats
3325 3347 prof = hotshot.Profile("hg.prof")
3326 3348 try:
3327 3349 try:
3328 3350 return prof.runcall(d)
3329 3351 except:
3330 3352 try:
3331 3353 u.warn(_('exception raised - generating '
3332 3354 'profile anyway\n'))
3333 3355 except:
3334 3356 pass
3335 3357 raise
3336 3358 finally:
3337 3359 prof.close()
3338 3360 stats = hotshot.stats.load("hg.prof")
3339 3361 stats.strip_dirs()
3340 3362 stats.sort_stats('time', 'calls')
3341 3363 stats.print_stats(40)
3342 3364 else:
3343 3365 return d()
3344 3366 finally:
3345 3367 u.flush()
3346 3368 except:
3347 3369 # enter the debugger when we hit an exception
3348 3370 if options['debugger']:
3349 3371 pdb.post_mortem(sys.exc_info()[2])
3350 3372 if u.traceback:
3351 3373 traceback.print_exc()
3352 3374 raise
3353 3375 except ParseError, inst:
3354 3376 if inst.args[0]:
3355 3377 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3356 3378 help_(u, inst.args[0])
3357 3379 else:
3358 3380 u.warn(_("hg: %s\n") % inst.args[1])
3359 3381 help_(u, 'shortlist')
3360 3382 except AmbiguousCommand, inst:
3361 3383 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3362 3384 (inst.args[0], " ".join(inst.args[1])))
3363 3385 except UnknownCommand, inst:
3364 3386 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3365 3387 help_(u, 'shortlist')
3366 3388 except hg.RepoError, inst:
3367 3389 u.warn(_("abort: %s!\n") % inst)
3368 3390 except lock.LockHeld, inst:
3369 3391 if inst.errno == errno.ETIMEDOUT:
3370 3392 reason = _('timed out waiting for lock held by %s') % inst.locker
3371 3393 else:
3372 3394 reason = _('lock held by %s') % inst.locker
3373 3395 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3374 3396 except lock.LockUnavailable, inst:
3375 3397 u.warn(_("abort: could not lock %s: %s\n") %
3376 3398 (inst.desc or inst.filename, inst.strerror))
3377 3399 except revlog.RevlogError, inst:
3378 3400 u.warn(_("abort: "), inst, "!\n")
3379 3401 except util.SignalInterrupt:
3380 3402 u.warn(_("killed!\n"))
3381 3403 except KeyboardInterrupt:
3382 3404 try:
3383 3405 u.warn(_("interrupted!\n"))
3384 3406 except IOError, inst:
3385 3407 if inst.errno == errno.EPIPE:
3386 3408 if u.debugflag:
3387 3409 u.warn(_("\nbroken pipe\n"))
3388 3410 else:
3389 3411 raise
3390 3412 except IOError, inst:
3391 3413 if hasattr(inst, "code"):
3392 3414 u.warn(_("abort: %s\n") % inst)
3393 3415 elif hasattr(inst, "reason"):
3394 3416 u.warn(_("abort: error: %s\n") % inst.reason[1])
3395 3417 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3396 3418 if u.debugflag:
3397 3419 u.warn(_("broken pipe\n"))
3398 3420 elif getattr(inst, "strerror", None):
3399 3421 if getattr(inst, "filename", None):
3400 3422 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3401 3423 else:
3402 3424 u.warn(_("abort: %s\n") % inst.strerror)
3403 3425 else:
3404 3426 raise
3405 3427 except OSError, inst:
3406 3428 if hasattr(inst, "filename"):
3407 3429 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3408 3430 else:
3409 3431 u.warn(_("abort: %s\n") % inst.strerror)
3410 3432 except util.Abort, inst:
3411 3433 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3412 3434 except TypeError, inst:
3413 3435 # was this an argument error?
3414 3436 tb = traceback.extract_tb(sys.exc_info()[2])
3415 3437 if len(tb) > 2: # no
3416 3438 raise
3417 3439 u.debug(inst, "\n")
3418 3440 u.warn(_("%s: invalid arguments\n") % cmd)
3419 3441 help_(u, cmd)
3420 3442 except SystemExit, inst:
3421 3443 # Commands shouldn't sys.exit directly, but give a return code.
3422 3444 # Just in case catch this and and pass exit code to caller.
3423 3445 return inst.code
3424 3446 except:
3425 3447 u.warn(_("** unknown exception encountered, details follow\n"))
3426 3448 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3427 3449 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3428 3450 % version.get_version())
3429 3451 raise
3430 3452
3431 3453 return -1
@@ -1,1142 +1,1142 b''
1 1 # hgweb.py - web interface to a mercurial repository
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, cgi, sys
10 10 import mimetypes
11 11 from demandload import demandload
12 12 demandload(globals(), "mdiff time re socket zlib errno ui hg ConfigParser")
13 13 demandload(globals(), "tempfile StringIO BaseHTTPServer util SocketServer")
14 14 demandload(globals(), "archival mimetypes templater urllib")
15 15 from node import *
16 16 from i18n import gettext as _
17 17
18 18 def splitURI(uri):
19 19 """ Return path and query splited from uri
20 20
21 21 Just like CGI environment, the path is unquoted, the query is
22 22 not.
23 23 """
24 24 if '?' in uri:
25 25 path, query = uri.split('?', 1)
26 26 else:
27 27 path, query = uri, ''
28 28 return urllib.unquote(path), query
29 29
30 30 def up(p):
31 31 if p[0] != "/":
32 32 p = "/" + p
33 33 if p[-1] == "/":
34 34 p = p[:-1]
35 35 up = os.path.dirname(p)
36 36 if up == "/":
37 37 return "/"
38 38 return up + "/"
39 39
40 40 def get_mtime(repo_path):
41 41 hg_path = os.path.join(repo_path, ".hg")
42 42 cl_path = os.path.join(hg_path, "00changelog.i")
43 43 if os.path.exists(os.path.join(cl_path)):
44 44 return os.stat(cl_path).st_mtime
45 45 else:
46 46 return os.stat(hg_path).st_mtime
47 47
48 48 def staticfile(directory, fname):
49 49 """return a file inside directory with guessed content-type header
50 50
51 51 fname always uses '/' as directory separator and isn't allowed to
52 52 contain unusual path components.
53 53 Content-type is guessed using the mimetypes module.
54 54 Return an empty string if fname is illegal or file not found.
55 55
56 56 """
57 57 parts = fname.split('/')
58 58 path = directory
59 59 for part in parts:
60 60 if (part in ('', os.curdir, os.pardir) or
61 61 os.sep in part or os.altsep is not None and os.altsep in part):
62 62 return ""
63 63 path = os.path.join(path, part)
64 64 try:
65 65 os.stat(path)
66 66 ct = mimetypes.guess_type(path)[0] or "text/plain"
67 67 return "Content-type: %s\n\n%s" % (ct, file(path).read())
68 68 except (TypeError, OSError):
69 69 # illegal fname or unreadable file
70 70 return ""
71 71
72 72 class hgrequest(object):
73 73 def __init__(self, inp=None, out=None, env=None):
74 74 self.inp = inp or sys.stdin
75 75 self.out = out or sys.stdout
76 76 self.env = env or os.environ
77 77 self.form = cgi.parse(self.inp, self.env, keep_blank_values=1)
78 78
79 79 def write(self, *things):
80 80 for thing in things:
81 81 if hasattr(thing, "__iter__"):
82 82 for part in thing:
83 83 self.write(part)
84 84 else:
85 85 try:
86 86 self.out.write(str(thing))
87 87 except socket.error, inst:
88 88 if inst[0] != errno.ECONNRESET:
89 89 raise
90 90
91 91 def header(self, headers=[('Content-type','text/html')]):
92 92 for header in headers:
93 93 self.out.write("%s: %s\r\n" % header)
94 94 self.out.write("\r\n")
95 95
96 96 def httphdr(self, type, file="", size=0):
97 97
98 98 headers = [('Content-type', type)]
99 99 if file:
100 100 headers.append(('Content-disposition', 'attachment; filename=%s' % file))
101 101 if size > 0:
102 102 headers.append(('Content-length', str(size)))
103 103 self.header(headers)
104 104
105 105 class hgweb(object):
106 106 def __init__(self, repo, name=None):
107 107 if type(repo) == type(""):
108 108 self.repo = hg.repository(ui.ui(), repo)
109 109 else:
110 110 self.repo = repo
111 111
112 112 self.mtime = -1
113 113 self.reponame = name
114 114 self.archives = 'zip', 'gz', 'bz2'
115 115
116 116 def refresh(self):
117 117 mtime = get_mtime(self.repo.root)
118 118 if mtime != self.mtime:
119 119 self.mtime = mtime
120 120 self.repo = hg.repository(self.repo.ui, self.repo.root)
121 121 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
122 122 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
123 123 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
124 124
125 125 def archivelist(self, nodeid):
126 126 for i in self.archives:
127 127 if self.repo.ui.configbool("web", "allow" + i, False):
128 128 yield {"type" : i, "node" : nodeid, "url": ""}
129 129
130 130 def listfiles(self, files, mf):
131 131 for f in files[:self.maxfiles]:
132 132 yield self.t("filenodelink", node=hex(mf[f]), file=f)
133 133 if len(files) > self.maxfiles:
134 134 yield self.t("fileellipses")
135 135
136 136 def listfilediffs(self, files, changeset):
137 137 for f in files[:self.maxfiles]:
138 138 yield self.t("filedifflink", node=hex(changeset), file=f)
139 139 if len(files) > self.maxfiles:
140 140 yield self.t("fileellipses")
141 141
142 142 def siblings(self, siblings=[], rev=None, hiderev=None, **args):
143 143 if not rev:
144 144 rev = lambda x: ""
145 145 siblings = [s for s in siblings if s != nullid]
146 146 if len(siblings) == 1 and rev(siblings[0]) == hiderev:
147 147 return
148 148 for s in siblings:
149 149 yield dict(node=hex(s), rev=rev(s), **args)
150 150
151 151 def renamelink(self, fl, node):
152 152 r = fl.renamed(node)
153 153 if r:
154 154 return [dict(file=r[0], node=hex(r[1]))]
155 155 return []
156 156
157 157 def showtag(self, t1, node=nullid, **args):
158 158 for t in self.repo.nodetags(node):
159 159 yield self.t(t1, tag=t, **args)
160 160
161 161 def diff(self, node1, node2, files):
162 162 def filterfiles(filters, files):
163 163 l = [x for x in files if x in filters]
164 164
165 165 for t in filters:
166 166 if t and t[-1] != os.sep:
167 167 t += os.sep
168 168 l += [x for x in files if x.startswith(t)]
169 169 return l
170 170
171 171 parity = [0]
172 172 def diffblock(diff, f, fn):
173 173 yield self.t("diffblock",
174 174 lines=prettyprintlines(diff),
175 175 parity=parity[0],
176 176 file=f,
177 177 filenode=hex(fn or nullid))
178 178 parity[0] = 1 - parity[0]
179 179
180 180 def prettyprintlines(diff):
181 181 for l in diff.splitlines(1):
182 182 if l.startswith('+'):
183 183 yield self.t("difflineplus", line=l)
184 184 elif l.startswith('-'):
185 185 yield self.t("difflineminus", line=l)
186 186 elif l.startswith('@'):
187 187 yield self.t("difflineat", line=l)
188 188 else:
189 189 yield self.t("diffline", line=l)
190 190
191 191 r = self.repo
192 192 cl = r.changelog
193 193 mf = r.manifest
194 194 change1 = cl.read(node1)
195 195 change2 = cl.read(node2)
196 196 mmap1 = mf.read(change1[0])
197 197 mmap2 = mf.read(change2[0])
198 198 date1 = util.datestr(change1[2])
199 199 date2 = util.datestr(change2[2])
200 200
201 201 modified, added, removed, deleted, unknown = r.changes(node1, node2)
202 202 if files:
203 203 modified, added, removed = map(lambda x: filterfiles(files, x),
204 204 (modified, added, removed))
205 205
206 206 diffopts = self.repo.ui.diffopts()
207 207 showfunc = diffopts['showfunc']
208 208 ignorews = diffopts['ignorews']
209 209 for f in modified:
210 210 to = r.file(f).read(mmap1[f])
211 211 tn = r.file(f).read(mmap2[f])
212 212 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
213 213 showfunc=showfunc, ignorews=ignorews), f, tn)
214 214 for f in added:
215 215 to = None
216 216 tn = r.file(f).read(mmap2[f])
217 217 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
218 218 showfunc=showfunc, ignorews=ignorews), f, tn)
219 219 for f in removed:
220 220 to = r.file(f).read(mmap1[f])
221 221 tn = None
222 222 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
223 223 showfunc=showfunc, ignorews=ignorews), f, tn)
224 224
225 225 def changelog(self, pos):
226 226 def changenav(**map):
227 227 def seq(factor, maxchanges=None):
228 228 if maxchanges:
229 229 yield maxchanges
230 230 if maxchanges >= 20 and maxchanges <= 40:
231 231 yield 50
232 232 else:
233 233 yield 1 * factor
234 234 yield 3 * factor
235 235 for f in seq(factor * 10):
236 236 yield f
237 237
238 238 l = []
239 239 last = 0
240 240 for f in seq(1, self.maxchanges):
241 241 if f < self.maxchanges or f <= last:
242 242 continue
243 243 if f > count:
244 244 break
245 245 last = f
246 246 r = "%d" % f
247 247 if pos + f < count:
248 248 l.append(("+" + r, pos + f))
249 249 if pos - f >= 0:
250 250 l.insert(0, ("-" + r, pos - f))
251 251
252 252 yield {"rev": 0, "label": "(0)"}
253 253
254 254 for label, rev in l:
255 255 yield {"label": label, "rev": rev}
256 256
257 257 yield {"label": "tip", "rev": "tip"}
258 258
259 259 def changelist(**map):
260 260 parity = (start - end) & 1
261 261 cl = self.repo.changelog
262 262 l = [] # build a list in forward order for efficiency
263 263 for i in range(start, end):
264 264 n = cl.node(i)
265 265 changes = cl.read(n)
266 266 hn = hex(n)
267 267
268 268 l.insert(0, {"parity": parity,
269 269 "author": changes[1],
270 270 "parent": self.siblings(cl.parents(n), cl.rev,
271 271 cl.rev(n) - 1),
272 272 "child": self.siblings(cl.children(n), cl.rev,
273 273 cl.rev(n) + 1),
274 274 "changelogtag": self.showtag("changelogtag",n),
275 275 "manifest": hex(changes[0]),
276 276 "desc": changes[4],
277 277 "date": changes[2],
278 278 "files": self.listfilediffs(changes[3], n),
279 279 "rev": i,
280 280 "node": hn})
281 281 parity = 1 - parity
282 282
283 283 for e in l:
284 284 yield e
285 285
286 286 cl = self.repo.changelog
287 287 mf = cl.read(cl.tip())[0]
288 288 count = cl.count()
289 289 start = max(0, pos - self.maxchanges + 1)
290 290 end = min(count, start + self.maxchanges)
291 291 pos = end - 1
292 292
293 293 yield self.t('changelog',
294 294 changenav=changenav,
295 295 manifest=hex(mf),
296 296 rev=pos, changesets=count, entries=changelist,
297 297 archives=self.archivelist("tip"))
298 298
299 299 def search(self, query):
300 300
301 301 def changelist(**map):
302 302 cl = self.repo.changelog
303 303 count = 0
304 304 qw = query.lower().split()
305 305
306 306 def revgen():
307 307 for i in range(cl.count() - 1, 0, -100):
308 308 l = []
309 309 for j in range(max(0, i - 100), i):
310 310 n = cl.node(j)
311 311 changes = cl.read(n)
312 312 l.append((n, j, changes))
313 313 l.reverse()
314 314 for e in l:
315 315 yield e
316 316
317 317 for n, i, changes in revgen():
318 318 miss = 0
319 319 for q in qw:
320 320 if not (q in changes[1].lower() or
321 321 q in changes[4].lower() or
322 322 q in " ".join(changes[3][:20]).lower()):
323 323 miss = 1
324 324 break
325 325 if miss:
326 326 continue
327 327
328 328 count += 1
329 329 hn = hex(n)
330 330
331 331 yield self.t('searchentry',
332 332 parity=count & 1,
333 333 author=changes[1],
334 334 parent=self.siblings(cl.parents(n), cl.rev),
335 335 child=self.siblings(cl.children(n), cl.rev),
336 336 changelogtag=self.showtag("changelogtag",n),
337 337 manifest=hex(changes[0]),
338 338 desc=changes[4],
339 339 date=changes[2],
340 340 files=self.listfilediffs(changes[3], n),
341 341 rev=i,
342 342 node=hn)
343 343
344 344 if count >= self.maxchanges:
345 345 break
346 346
347 347 cl = self.repo.changelog
348 348 mf = cl.read(cl.tip())[0]
349 349
350 350 yield self.t('search',
351 351 query=query,
352 352 manifest=hex(mf),
353 353 entries=changelist)
354 354
355 355 def changeset(self, nodeid):
356 356 cl = self.repo.changelog
357 357 n = self.repo.lookup(nodeid)
358 358 nodeid = hex(n)
359 359 changes = cl.read(n)
360 360 p1 = cl.parents(n)[0]
361 361
362 362 files = []
363 363 mf = self.repo.manifest.read(changes[0])
364 364 for f in changes[3]:
365 365 files.append(self.t("filenodelink",
366 366 filenode=hex(mf.get(f, nullid)), file=f))
367 367
368 368 def diff(**map):
369 369 yield self.diff(p1, n, None)
370 370
371 371 yield self.t('changeset',
372 372 diff=diff,
373 373 rev=cl.rev(n),
374 374 node=nodeid,
375 375 parent=self.siblings(cl.parents(n), cl.rev),
376 376 child=self.siblings(cl.children(n), cl.rev),
377 377 changesettag=self.showtag("changesettag",n),
378 378 manifest=hex(changes[0]),
379 379 author=changes[1],
380 380 desc=changes[4],
381 381 date=changes[2],
382 382 files=files,
383 383 archives=self.archivelist(nodeid))
384 384
385 385 def filelog(self, f, filenode):
386 386 cl = self.repo.changelog
387 387 fl = self.repo.file(f)
388 388 filenode = hex(fl.lookup(filenode))
389 389 count = fl.count()
390 390
391 391 def entries(**map):
392 392 l = []
393 393 parity = (count - 1) & 1
394 394
395 395 for i in range(count):
396 396 n = fl.node(i)
397 397 lr = fl.linkrev(n)
398 398 cn = cl.node(lr)
399 399 cs = cl.read(cl.node(lr))
400 400
401 401 l.insert(0, {"parity": parity,
402 402 "filenode": hex(n),
403 403 "filerev": i,
404 404 "file": f,
405 405 "node": hex(cn),
406 406 "author": cs[1],
407 407 "date": cs[2],
408 408 "rename": self.renamelink(fl, n),
409 409 "parent": self.siblings(fl.parents(n),
410 410 fl.rev, file=f),
411 411 "child": self.siblings(fl.children(n),
412 412 fl.rev, file=f),
413 413 "desc": cs[4]})
414 414 parity = 1 - parity
415 415
416 416 for e in l:
417 417 yield e
418 418
419 419 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
420 420
421 421 def filerevision(self, f, node):
422 422 fl = self.repo.file(f)
423 423 n = fl.lookup(node)
424 424 node = hex(n)
425 425 text = fl.read(n)
426 426 changerev = fl.linkrev(n)
427 427 cl = self.repo.changelog
428 428 cn = cl.node(changerev)
429 429 cs = cl.read(cn)
430 430 mfn = cs[0]
431 431
432 432 mt = mimetypes.guess_type(f)[0]
433 433 rawtext = text
434 434 if util.binary(text):
435 435 mt = mt or 'application/octet-stream'
436 436 text = "(binary:%s)" % mt
437 437 mt = mt or 'text/plain'
438 438
439 439 def lines():
440 440 for l, t in enumerate(text.splitlines(1)):
441 441 yield {"line": t,
442 442 "linenumber": "% 6d" % (l + 1),
443 443 "parity": l & 1}
444 444
445 445 yield self.t("filerevision",
446 446 file=f,
447 447 filenode=node,
448 448 path=up(f),
449 449 text=lines(),
450 450 raw=rawtext,
451 451 mimetype=mt,
452 452 rev=changerev,
453 453 node=hex(cn),
454 454 manifest=hex(mfn),
455 455 author=cs[1],
456 456 date=cs[2],
457 457 parent=self.siblings(fl.parents(n), fl.rev, file=f),
458 458 child=self.siblings(fl.children(n), fl.rev, file=f),
459 459 rename=self.renamelink(fl, n),
460 460 permissions=self.repo.manifest.readflags(mfn)[f])
461 461
462 462 def fileannotate(self, f, node):
463 463 bcache = {}
464 464 ncache = {}
465 465 fl = self.repo.file(f)
466 466 n = fl.lookup(node)
467 467 node = hex(n)
468 468 changerev = fl.linkrev(n)
469 469
470 470 cl = self.repo.changelog
471 471 cn = cl.node(changerev)
472 472 cs = cl.read(cn)
473 473 mfn = cs[0]
474 474
475 475 def annotate(**map):
476 476 parity = 1
477 477 last = None
478 478 for r, l in fl.annotate(n):
479 479 try:
480 480 cnode = ncache[r]
481 481 except KeyError:
482 482 cnode = ncache[r] = self.repo.changelog.node(r)
483 483
484 484 try:
485 485 name = bcache[r]
486 486 except KeyError:
487 487 cl = self.repo.changelog.read(cnode)
488 488 bcache[r] = name = self.repo.ui.shortuser(cl[1])
489 489
490 490 if last != cnode:
491 491 parity = 1 - parity
492 492 last = cnode
493 493
494 494 yield {"parity": parity,
495 495 "node": hex(cnode),
496 496 "rev": r,
497 497 "author": name,
498 498 "file": f,
499 499 "line": l}
500 500
501 501 yield self.t("fileannotate",
502 502 file=f,
503 503 filenode=node,
504 504 annotate=annotate,
505 505 path=up(f),
506 506 rev=changerev,
507 507 node=hex(cn),
508 508 manifest=hex(mfn),
509 509 author=cs[1],
510 510 date=cs[2],
511 511 rename=self.renamelink(fl, n),
512 512 parent=self.siblings(fl.parents(n), fl.rev, file=f),
513 513 child=self.siblings(fl.children(n), fl.rev, file=f),
514 514 permissions=self.repo.manifest.readflags(mfn)[f])
515 515
516 516 def manifest(self, mnode, path):
517 517 man = self.repo.manifest
518 518 mn = man.lookup(mnode)
519 519 mnode = hex(mn)
520 520 mf = man.read(mn)
521 521 rev = man.rev(mn)
522 522 node = self.repo.changelog.node(rev)
523 523 mff = man.readflags(mn)
524 524
525 525 files = {}
526 526
527 527 p = path[1:]
528 528 if p and p[-1] != "/":
529 529 p += "/"
530 530 l = len(p)
531 531
532 532 for f,n in mf.items():
533 533 if f[:l] != p:
534 534 continue
535 535 remain = f[l:]
536 536 if "/" in remain:
537 537 short = remain[:remain.find("/") + 1] # bleah
538 538 files[short] = (f, None)
539 539 else:
540 540 short = os.path.basename(remain)
541 541 files[short] = (f, n)
542 542
543 543 def filelist(**map):
544 544 parity = 0
545 545 fl = files.keys()
546 546 fl.sort()
547 547 for f in fl:
548 548 full, fnode = files[f]
549 549 if not fnode:
550 550 continue
551 551
552 552 yield {"file": full,
553 553 "manifest": mnode,
554 554 "filenode": hex(fnode),
555 555 "parity": parity,
556 556 "basename": f,
557 557 "permissions": mff[full]}
558 558 parity = 1 - parity
559 559
560 560 def dirlist(**map):
561 561 parity = 0
562 562 fl = files.keys()
563 563 fl.sort()
564 564 for f in fl:
565 565 full, fnode = files[f]
566 566 if fnode:
567 567 continue
568 568
569 569 yield {"parity": parity,
570 570 "path": os.path.join(path, f),
571 571 "manifest": mnode,
572 572 "basename": f[:-1]}
573 573 parity = 1 - parity
574 574
575 575 yield self.t("manifest",
576 576 manifest=mnode,
577 577 rev=rev,
578 578 node=hex(node),
579 579 path=path,
580 580 up=up(path),
581 581 fentries=filelist,
582 582 dentries=dirlist,
583 583 archives=self.archivelist(hex(node)))
584 584
585 585 def tags(self):
586 586 cl = self.repo.changelog
587 587 mf = cl.read(cl.tip())[0]
588 588
589 589 i = self.repo.tagslist()
590 590 i.reverse()
591 591
592 592 def entries(notip=False, **map):
593 593 parity = 0
594 594 for k,n in i:
595 595 if notip and k == "tip": continue
596 596 yield {"parity": parity,
597 597 "tag": k,
598 598 "tagmanifest": hex(cl.read(n)[0]),
599 599 "date": cl.read(n)[2],
600 600 "node": hex(n)}
601 601 parity = 1 - parity
602 602
603 603 yield self.t("tags",
604 604 manifest=hex(mf),
605 605 entries=lambda **x: entries(False, **x),
606 606 entriesnotip=lambda **x: entries(True, **x))
607 607
608 608 def summary(self):
609 609 cl = self.repo.changelog
610 610 mf = cl.read(cl.tip())[0]
611 611
612 612 i = self.repo.tagslist()
613 613 i.reverse()
614 614
615 615 def tagentries(**map):
616 616 parity = 0
617 617 count = 0
618 618 for k,n in i:
619 619 if k == "tip": # skip tip
620 620 continue;
621 621
622 622 count += 1
623 623 if count > 10: # limit to 10 tags
624 624 break;
625 625
626 626 c = cl.read(n)
627 627 m = c[0]
628 628 t = c[2]
629 629
630 630 yield self.t("tagentry",
631 631 parity = parity,
632 632 tag = k,
633 633 node = hex(n),
634 634 date = t,
635 635 tagmanifest = hex(m))
636 636 parity = 1 - parity
637 637
638 638 def changelist(**map):
639 639 parity = 0
640 640 cl = self.repo.changelog
641 641 l = [] # build a list in forward order for efficiency
642 642 for i in range(start, end):
643 643 n = cl.node(i)
644 644 changes = cl.read(n)
645 645 hn = hex(n)
646 646 t = changes[2]
647 647
648 648 l.insert(0, self.t(
649 649 'shortlogentry',
650 650 parity = parity,
651 651 author = changes[1],
652 652 manifest = hex(changes[0]),
653 653 desc = changes[4],
654 654 date = t,
655 655 rev = i,
656 656 node = hn))
657 657 parity = 1 - parity
658 658
659 659 yield l
660 660
661 661 cl = self.repo.changelog
662 662 mf = cl.read(cl.tip())[0]
663 663 count = cl.count()
664 664 start = max(0, count - self.maxchanges)
665 665 end = min(count, start + self.maxchanges)
666 666 pos = end - 1
667 667
668 668 yield self.t("summary",
669 669 desc = self.repo.ui.config("web", "description", "unknown"),
670 670 owner = (self.repo.ui.config("ui", "username") or # preferred
671 671 self.repo.ui.config("web", "contact") or # deprecated
672 672 self.repo.ui.config("web", "author", "unknown")), # also
673 673 lastchange = (0, 0), # FIXME
674 674 manifest = hex(mf),
675 675 tags = tagentries,
676 676 shortlog = changelist)
677 677
678 678 def filediff(self, file, changeset):
679 679 cl = self.repo.changelog
680 680 n = self.repo.lookup(changeset)
681 681 changeset = hex(n)
682 682 p1 = cl.parents(n)[0]
683 683 cs = cl.read(n)
684 684 mf = self.repo.manifest.read(cs[0])
685 685
686 686 def diff(**map):
687 yield self.diff(p1, n, file)
687 yield self.diff(p1, n, [file])
688 688
689 689 yield self.t("filediff",
690 690 file=file,
691 691 filenode=hex(mf.get(file, nullid)),
692 692 node=changeset,
693 693 rev=self.repo.changelog.rev(n),
694 694 parent=self.siblings(cl.parents(n), cl.rev),
695 695 child=self.siblings(cl.children(n), cl.rev),
696 696 diff=diff)
697 697
698 698 archive_specs = {
699 699 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', 'x-bzip2'),
700 700 'gz': ('application/x-tar', 'tgz', '.tar.gz', 'x-gzip'),
701 701 'zip': ('application/zip', 'zip', '.zip', None),
702 702 }
703 703
704 704 def archive(self, req, cnode, type):
705 705 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
706 706 name = "%s-%s" % (reponame, short(cnode))
707 707 mimetype, artype, extension, encoding = self.archive_specs[type]
708 708 headers = [('Content-type', mimetype),
709 709 ('Content-disposition', 'attachment; filename=%s%s' %
710 710 (name, extension))]
711 711 if encoding:
712 712 headers.append(('Content-encoding', encoding))
713 713 req.header(headers)
714 714 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
715 715
716 716 # add tags to things
717 717 # tags -> list of changesets corresponding to tags
718 718 # find tag, changeset, file
719 719
720 720 def run(self, req=hgrequest()):
721 721 def clean(path):
722 722 p = util.normpath(path)
723 723 if p[:2] == "..":
724 724 raise "suspicious path"
725 725 return p
726 726
727 727 def header(**map):
728 728 yield self.t("header", **map)
729 729
730 730 def footer(**map):
731 731 yield self.t("footer",
732 732 motd=self.repo.ui.config("web", "motd", ""),
733 733 **map)
734 734
735 735 def expand_form(form):
736 736 shortcuts = {
737 737 'cl': [('cmd', ['changelog']), ('rev', None)],
738 738 'cs': [('cmd', ['changeset']), ('node', None)],
739 739 'f': [('cmd', ['file']), ('filenode', None)],
740 740 'fl': [('cmd', ['filelog']), ('filenode', None)],
741 741 'fd': [('cmd', ['filediff']), ('node', None)],
742 742 'fa': [('cmd', ['annotate']), ('filenode', None)],
743 743 'mf': [('cmd', ['manifest']), ('manifest', None)],
744 744 'ca': [('cmd', ['archive']), ('node', None)],
745 745 'tags': [('cmd', ['tags'])],
746 746 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
747 747 'static': [('cmd', ['static']), ('file', None)]
748 748 }
749 749
750 750 for k in shortcuts.iterkeys():
751 751 if form.has_key(k):
752 752 for name, value in shortcuts[k]:
753 753 if value is None:
754 754 value = form[k]
755 755 form[name] = value
756 756 del form[k]
757 757
758 758 self.refresh()
759 759
760 760 expand_form(req.form)
761 761
762 762 t = self.repo.ui.config("web", "templates", templater.templatepath())
763 763 static = self.repo.ui.config("web", "static", os.path.join(t,"static"))
764 764 m = os.path.join(t, "map")
765 765 style = self.repo.ui.config("web", "style", "")
766 766 if req.form.has_key('style'):
767 767 style = req.form['style'][0]
768 768 if style:
769 769 b = os.path.basename("map-" + style)
770 770 p = os.path.join(t, b)
771 771 if os.path.isfile(p):
772 772 m = p
773 773
774 774 port = req.env["SERVER_PORT"]
775 775 port = port != "80" and (":" + port) or ""
776 776 uri = req.env["REQUEST_URI"]
777 777 if "?" in uri:
778 778 uri = uri.split("?")[0]
779 779 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
780 780 if not self.reponame:
781 781 self.reponame = (self.repo.ui.config("web", "name")
782 782 or uri.strip('/') or self.repo.root)
783 783
784 784 self.t = templater.templater(m, templater.common_filters,
785 785 defaults={"url": url,
786 786 "repo": self.reponame,
787 787 "header": header,
788 788 "footer": footer,
789 789 })
790 790
791 791 if not req.form.has_key('cmd'):
792 792 req.form['cmd'] = [self.t.cache['default'],]
793 793
794 794 cmd = req.form['cmd'][0]
795 795 if cmd == 'changelog':
796 796 hi = self.repo.changelog.count() - 1
797 797 if req.form.has_key('rev'):
798 798 hi = req.form['rev'][0]
799 799 try:
800 800 hi = self.repo.changelog.rev(self.repo.lookup(hi))
801 801 except hg.RepoError:
802 802 req.write(self.search(hi)) # XXX redirect to 404 page?
803 803 return
804 804
805 805 req.write(self.changelog(hi))
806 806
807 807 elif cmd == 'changeset':
808 808 req.write(self.changeset(req.form['node'][0]))
809 809
810 810 elif cmd == 'manifest':
811 811 req.write(self.manifest(req.form['manifest'][0],
812 812 clean(req.form['path'][0])))
813 813
814 814 elif cmd == 'tags':
815 815 req.write(self.tags())
816 816
817 817 elif cmd == 'summary':
818 818 req.write(self.summary())
819 819
820 820 elif cmd == 'filediff':
821 821 req.write(self.filediff(clean(req.form['file'][0]),
822 822 req.form['node'][0]))
823 823
824 824 elif cmd == 'file':
825 825 req.write(self.filerevision(clean(req.form['file'][0]),
826 826 req.form['filenode'][0]))
827 827
828 828 elif cmd == 'annotate':
829 829 req.write(self.fileannotate(clean(req.form['file'][0]),
830 830 req.form['filenode'][0]))
831 831
832 832 elif cmd == 'filelog':
833 833 req.write(self.filelog(clean(req.form['file'][0]),
834 834 req.form['filenode'][0]))
835 835
836 836 elif cmd == 'heads':
837 837 req.httphdr("application/mercurial-0.1")
838 838 h = self.repo.heads()
839 839 req.write(" ".join(map(hex, h)) + "\n")
840 840
841 841 elif cmd == 'branches':
842 842 req.httphdr("application/mercurial-0.1")
843 843 nodes = []
844 844 if req.form.has_key('nodes'):
845 845 nodes = map(bin, req.form['nodes'][0].split(" "))
846 846 for b in self.repo.branches(nodes):
847 847 req.write(" ".join(map(hex, b)) + "\n")
848 848
849 849 elif cmd == 'between':
850 850 req.httphdr("application/mercurial-0.1")
851 851 nodes = []
852 852 if req.form.has_key('pairs'):
853 853 pairs = [map(bin, p.split("-"))
854 854 for p in req.form['pairs'][0].split(" ")]
855 855 for b in self.repo.between(pairs):
856 856 req.write(" ".join(map(hex, b)) + "\n")
857 857
858 858 elif cmd == 'changegroup':
859 859 req.httphdr("application/mercurial-0.1")
860 860 nodes = []
861 861 if not self.allowpull:
862 862 return
863 863
864 864 if req.form.has_key('roots'):
865 865 nodes = map(bin, req.form['roots'][0].split(" "))
866 866
867 867 z = zlib.compressobj()
868 868 f = self.repo.changegroup(nodes, 'serve')
869 869 while 1:
870 870 chunk = f.read(4096)
871 871 if not chunk:
872 872 break
873 873 req.write(z.compress(chunk))
874 874
875 875 req.write(z.flush())
876 876
877 877 elif cmd == 'archive':
878 878 changeset = self.repo.lookup(req.form['node'][0])
879 879 type = req.form['type'][0]
880 880 if (type in self.archives and
881 881 self.repo.ui.configbool("web", "allow" + type, False)):
882 882 self.archive(req, changeset, type)
883 883 return
884 884
885 885 req.write(self.t("error"))
886 886
887 887 elif cmd == 'static':
888 888 fname = req.form['file'][0]
889 889 req.write(staticfile(static, fname)
890 890 or self.t("error", error="%r not found" % fname))
891 891
892 892 else:
893 893 req.write(self.t("error"))
894 894
895 895 def create_server(ui, repo):
896 896 use_threads = True
897 897
898 898 def openlog(opt, default):
899 899 if opt and opt != '-':
900 900 return open(opt, 'w')
901 901 return default
902 902
903 903 address = ui.config("web", "address", "")
904 904 port = int(ui.config("web", "port", 8000))
905 905 use_ipv6 = ui.configbool("web", "ipv6")
906 906 webdir_conf = ui.config("web", "webdir_conf")
907 907 accesslog = openlog(ui.config("web", "accesslog", "-"), sys.stdout)
908 908 errorlog = openlog(ui.config("web", "errorlog", "-"), sys.stderr)
909 909
910 910 if use_threads:
911 911 try:
912 912 from threading import activeCount
913 913 except ImportError:
914 914 use_threads = False
915 915
916 916 if use_threads:
917 917 _mixin = SocketServer.ThreadingMixIn
918 918 else:
919 919 if hasattr(os, "fork"):
920 920 _mixin = SocketServer.ForkingMixIn
921 921 else:
922 922 class _mixin: pass
923 923
924 924 class MercurialHTTPServer(_mixin, BaseHTTPServer.HTTPServer):
925 925 pass
926 926
927 927 class IPv6HTTPServer(MercurialHTTPServer):
928 928 address_family = getattr(socket, 'AF_INET6', None)
929 929
930 930 def __init__(self, *args, **kwargs):
931 931 if self.address_family is None:
932 932 raise hg.RepoError(_('IPv6 not available on this system'))
933 933 BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
934 934
935 935 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
936 936
937 937 def log_error(self, format, *args):
938 938 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
939 939 self.log_date_time_string(),
940 940 format % args))
941 941
942 942 def log_message(self, format, *args):
943 943 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
944 944 self.log_date_time_string(),
945 945 format % args))
946 946
947 947 def do_POST(self):
948 948 try:
949 949 self.do_hgweb()
950 950 except socket.error, inst:
951 951 if inst[0] != errno.EPIPE:
952 952 raise
953 953
954 954 def do_GET(self):
955 955 self.do_POST()
956 956
957 957 def do_hgweb(self):
958 958 path_info, query = splitURI(self.path)
959 959
960 960 env = {}
961 961 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
962 962 env['REQUEST_METHOD'] = self.command
963 963 env['SERVER_NAME'] = self.server.server_name
964 964 env['SERVER_PORT'] = str(self.server.server_port)
965 965 env['REQUEST_URI'] = "/"
966 966 env['PATH_INFO'] = path_info
967 967 if query:
968 968 env['QUERY_STRING'] = query
969 969 host = self.address_string()
970 970 if host != self.client_address[0]:
971 971 env['REMOTE_HOST'] = host
972 972 env['REMOTE_ADDR'] = self.client_address[0]
973 973
974 974 if self.headers.typeheader is None:
975 975 env['CONTENT_TYPE'] = self.headers.type
976 976 else:
977 977 env['CONTENT_TYPE'] = self.headers.typeheader
978 978 length = self.headers.getheader('content-length')
979 979 if length:
980 980 env['CONTENT_LENGTH'] = length
981 981 accept = []
982 982 for line in self.headers.getallmatchingheaders('accept'):
983 983 if line[:1] in "\t\n\r ":
984 984 accept.append(line.strip())
985 985 else:
986 986 accept = accept + line[7:].split(',')
987 987 env['HTTP_ACCEPT'] = ','.join(accept)
988 988
989 989 req = hgrequest(self.rfile, self.wfile, env)
990 990 self.send_response(200, "Script output follows")
991 991
992 992 if webdir_conf:
993 993 hgwebobj = hgwebdir(webdir_conf)
994 994 elif repo is not None:
995 995 hgwebobj = hgweb(repo.__class__(repo.ui, repo.origroot))
996 996 else:
997 997 raise hg.RepoError(_('no repo found'))
998 998 hgwebobj.run(req)
999 999
1000 1000
1001 1001 if use_ipv6:
1002 1002 return IPv6HTTPServer((address, port), hgwebhandler)
1003 1003 else:
1004 1004 return MercurialHTTPServer((address, port), hgwebhandler)
1005 1005
1006 1006 # This is a stopgap
1007 1007 class hgwebdir(object):
1008 1008 def __init__(self, config):
1009 1009 def cleannames(items):
1010 1010 return [(name.strip(os.sep), path) for name, path in items]
1011 1011
1012 1012 self.motd = ""
1013 1013 self.repos_sorted = ('name', False)
1014 1014 if isinstance(config, (list, tuple)):
1015 1015 self.repos = cleannames(config)
1016 1016 self.repos_sorted = ('', False)
1017 1017 elif isinstance(config, dict):
1018 1018 self.repos = cleannames(config.items())
1019 1019 self.repos.sort()
1020 1020 else:
1021 1021 cp = ConfigParser.SafeConfigParser()
1022 1022 cp.read(config)
1023 1023 self.repos = []
1024 1024 if cp.has_section('web') and cp.has_option('web', 'motd'):
1025 1025 self.motd = cp.get('web', 'motd')
1026 1026 if cp.has_section('paths'):
1027 1027 self.repos.extend(cleannames(cp.items('paths')))
1028 1028 if cp.has_section('collections'):
1029 1029 for prefix, root in cp.items('collections'):
1030 1030 for path in util.walkrepos(root):
1031 1031 repo = os.path.normpath(path)
1032 1032 name = repo
1033 1033 if name.startswith(prefix):
1034 1034 name = name[len(prefix):]
1035 1035 self.repos.append((name.lstrip(os.sep), repo))
1036 1036 self.repos.sort()
1037 1037
1038 1038 def run(self, req=hgrequest()):
1039 1039 def header(**map):
1040 1040 yield tmpl("header", **map)
1041 1041
1042 1042 def footer(**map):
1043 1043 yield tmpl("footer", motd=self.motd, **map)
1044 1044
1045 1045 m = os.path.join(templater.templatepath(), "map")
1046 1046 tmpl = templater.templater(m, templater.common_filters,
1047 1047 defaults={"header": header,
1048 1048 "footer": footer})
1049 1049
1050 1050 def archivelist(ui, nodeid, url):
1051 1051 for i in ['zip', 'gz', 'bz2']:
1052 1052 if ui.configbool("web", "allow" + i, False):
1053 1053 yield {"type" : i, "node": nodeid, "url": url}
1054 1054
1055 1055 def entries(sortcolumn="", descending=False, **map):
1056 1056 rows = []
1057 1057 parity = 0
1058 1058 for name, path in self.repos:
1059 1059 u = ui.ui()
1060 1060 try:
1061 1061 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
1062 1062 except IOError:
1063 1063 pass
1064 1064 get = u.config
1065 1065
1066 1066 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
1067 1067 .replace("//", "/"))
1068 1068
1069 1069 # update time with local timezone
1070 1070 try:
1071 1071 d = (get_mtime(path), util.makedate()[1])
1072 1072 except OSError:
1073 1073 continue
1074 1074
1075 1075 contact = (get("ui", "username") or # preferred
1076 1076 get("web", "contact") or # deprecated
1077 1077 get("web", "author", "")) # also
1078 1078 description = get("web", "description", "")
1079 1079 name = get("web", "name", name)
1080 1080 row = dict(contact=contact or "unknown",
1081 1081 contact_sort=contact.upper() or "unknown",
1082 1082 name=name,
1083 1083 name_sort=name,
1084 1084 url=url,
1085 1085 description=description or "unknown",
1086 1086 description_sort=description.upper() or "unknown",
1087 1087 lastchange=d,
1088 1088 lastchange_sort=d[1]-d[0],
1089 1089 archives=archivelist(u, "tip", url))
1090 1090 if (not sortcolumn
1091 1091 or (sortcolumn, descending) == self.repos_sorted):
1092 1092 # fast path for unsorted output
1093 1093 row['parity'] = parity
1094 1094 parity = 1 - parity
1095 1095 yield row
1096 1096 else:
1097 1097 rows.append((row["%s_sort" % sortcolumn], row))
1098 1098 if rows:
1099 1099 rows.sort()
1100 1100 if descending:
1101 1101 rows.reverse()
1102 1102 for key, row in rows:
1103 1103 row['parity'] = parity
1104 1104 parity = 1 - parity
1105 1105 yield row
1106 1106
1107 1107 virtual = req.env.get("PATH_INFO", "").strip('/')
1108 1108 if virtual:
1109 1109 real = dict(self.repos).get(virtual)
1110 1110 if real:
1111 1111 try:
1112 1112 hgweb(real).run(req)
1113 1113 except IOError, inst:
1114 1114 req.write(tmpl("error", error=inst.strerror))
1115 1115 except hg.RepoError, inst:
1116 1116 req.write(tmpl("error", error=str(inst)))
1117 1117 else:
1118 1118 req.write(tmpl("notfound", repo=virtual))
1119 1119 else:
1120 1120 if req.form.has_key('static'):
1121 1121 static = os.path.join(templater.templatepath(), "static")
1122 1122 fname = req.form['static'][0]
1123 1123 req.write(staticfile(static, fname)
1124 1124 or tmpl("error", error="%r not found" % fname))
1125 1125 else:
1126 1126 sortable = ["name", "description", "contact", "lastchange"]
1127 1127 sortcolumn, descending = self.repos_sorted
1128 1128 if req.form.has_key('sort'):
1129 1129 sortcolumn = req.form['sort'][0]
1130 1130 descending = sortcolumn.startswith('-')
1131 1131 if descending:
1132 1132 sortcolumn = sortcolumn[1:]
1133 1133 if sortcolumn not in sortable:
1134 1134 sortcolumn = ""
1135 1135
1136 1136 sort = [("sort_%s" % column,
1137 1137 "%s%s" % ((not descending and column == sortcolumn)
1138 1138 and "-" or "", column))
1139 1139 for column in sortable]
1140 1140 req.write(tmpl("index", entries=entries,
1141 1141 sortcolumn=sortcolumn, descending=descending,
1142 1142 **dict(sort)))
@@ -1,142 +1,172 b''
1 1 # httprepo.py - HTTP repository proxy classes for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from remoterepo import *
10 10 from i18n import gettext as _
11 11 from demandload import *
12 12 demandload(globals(), "hg os urllib urllib2 urlparse zlib util httplib")
13 13
14 class passwordmgr(urllib2.HTTPPasswordMgr):
15 def __init__(self, ui):
16 urllib2.HTTPPasswordMgr.__init__(self)
17 self.ui = ui
18
19 def find_user_password(self, realm, authuri):
20 authinfo = urllib2.HTTPPasswordMgr.find_user_password(
21 self, realm, authuri)
22 if authinfo != (None, None):
23 return authinfo
24
25 self.ui.write(_("http authorization required\n"))
26 self.ui.status(_("realm: %s\n") % realm)
27 user = self.ui.prompt(_("user:"), default=None)
28 passwd = self.ui.getpass()
29
30 self.add_password(realm, authuri, user, passwd)
31 return (user, passwd)
32
14 33 class httprepository(remoterepository):
15 34 def __init__(self, ui, path):
16 35 # fix missing / after hostname
17 36 s = urlparse.urlsplit(path)
18 37 partial = s[2]
19 38 if not partial: partial = "/"
20 39 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
21 40 self.ui = ui
22 41 no_list = [ "localhost", "127.0.0.1" ]
23 42 host = ui.config("http_proxy", "host")
24 43 if host is None:
25 44 host = os.environ.get("http_proxy")
26 45 if host and host.startswith('http://'):
27 46 host = host[7:]
28 47 user = ui.config("http_proxy", "user")
29 48 passwd = ui.config("http_proxy", "passwd")
30 49 no = ui.config("http_proxy", "no")
31 50 if no is None:
32 51 no = os.environ.get("no_proxy")
33 52 if no:
34 53 no_list = no_list + no.split(",")
35 54
36 55 no_proxy = 0
37 56 for h in no_list:
38 57 if (path.startswith("http://" + h + "/") or
39 58 path.startswith("http://" + h + ":") or
40 59 path == "http://" + h):
41 60 no_proxy = 1
42 61
43 62 # Note: urllib2 takes proxy values from the environment and those will
44 63 # take precedence
45 64 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
46 65 try:
47 66 if os.environ.has_key(env):
48 67 del os.environ[env]
49 68 except OSError:
50 69 pass
51 70
52 71 proxy_handler = urllib2.BaseHandler()
53 72 if host and not no_proxy:
54 73 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
55 74
56 authinfo = None
75 proxyauthinfo = None
57 76 if user and passwd:
58 77 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
59 78 passmgr.add_password(None, host, user, passwd)
60 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
79 proxyauthinfo = urllib2.ProxyBasicAuthHandler(passmgr)
61 80
62 opener = urllib2.build_opener(proxy_handler, authinfo)
81 if ui.interactive:
82 passmgr = passwordmgr(ui)
83 opener = urllib2.build_opener(
84 proxy_handler, proxyauthinfo,
85 urllib2.HTTPBasicAuthHandler(passmgr),
86 urllib2.HTTPDigestAuthHandler(passmgr))
87 else:
88 opener = urllib2.build_opener(proxy_handler, proxyauthinfo)
89
63 90 # 1.0 here is the _protocol_ version
64 91 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
65 92 urllib2.install_opener(opener)
66 93
67 94 def dev(self):
68 95 return -1
69 96
70 97 def lock(self):
71 98 raise util.Abort(_('operation not supported over http'))
72 99
73 100 def do_cmd(self, cmd, **args):
74 101 self.ui.debug(_("sending %s command\n") % cmd)
75 102 q = {"cmd": cmd}
76 103 q.update(args)
77 104 qs = urllib.urlencode(q)
78 105 cu = "%s?%s" % (self.url, qs)
79 resp = urllib2.urlopen(cu)
106 try:
107 resp = urllib2.urlopen(cu)
108 except httplib.HTTPException, inst:
109 raise IOError(None, _('http error while sending %s command') % cmd)
80 110 proto = resp.headers['content-type']
81 111
82 112 # accept old "text/plain" and "application/hg-changegroup" for now
83 113 if not proto.startswith('application/mercurial') and \
84 114 not proto.startswith('text/plain') and \
85 115 not proto.startswith('application/hg-changegroup'):
86 116 raise hg.RepoError(_("'%s' does not appear to be an hg repository") %
87 117 self.url)
88 118
89 119 if proto.startswith('application/mercurial'):
90 120 version = proto[22:]
91 121 if float(version) > 0.1:
92 122 raise hg.RepoError(_("'%s' uses newer protocol %s") %
93 123 (self.url, version))
94 124
95 125 return resp
96 126
97 127 def heads(self):
98 128 d = self.do_cmd("heads").read()
99 129 try:
100 130 return map(bin, d[:-1].split(" "))
101 131 except:
102 132 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
103 133 raise
104 134
105 135 def branches(self, nodes):
106 136 n = " ".join(map(hex, nodes))
107 137 d = self.do_cmd("branches", nodes=n).read()
108 138 try:
109 139 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
110 140 return br
111 141 except:
112 142 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
113 143 raise
114 144
115 145 def between(self, pairs):
116 146 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
117 147 d = self.do_cmd("between", pairs=n).read()
118 148 try:
119 149 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
120 150 return p
121 151 except:
122 152 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
123 153 raise
124 154
125 155 def changegroup(self, nodes, kind):
126 156 n = " ".join(map(hex, nodes))
127 157 f = self.do_cmd("changegroup", roots=n)
128 158 bytes = 0
129 159
130 160 def zgenerator(f):
131 161 zd = zlib.decompressobj()
132 162 try:
133 163 for chnk in f:
134 164 yield zd.decompress(chnk)
135 165 except httplib.HTTPException, inst:
136 166 raise IOError(None, _('connection ended unexpectedly'))
137 167 yield zd.flush()
138 168
139 169 return util.chunkbuffer(zgenerator(util.filechunkiter(f)))
140 170
141 171 class httpsrepository(httprepository):
142 172 pass
@@ -1,2089 +1,2099 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os, util
9 9 import filelog, manifest, changelog, dirstate, repo
10 10 from node import *
11 11 from i18n import gettext as _
12 12 from demandload import *
13 13 demandload(globals(), "appendfile changegroup")
14 14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 15 demandload(globals(), "revlog traceback")
16 16
17 17 class localrepository(object):
18 18 def __del__(self):
19 19 self.transhandle = None
20 20 def __init__(self, parentui, path=None, create=0):
21 21 if not path:
22 22 p = os.getcwd()
23 23 while not os.path.isdir(os.path.join(p, ".hg")):
24 24 oldp = p
25 25 p = os.path.dirname(p)
26 26 if p == oldp:
27 27 raise repo.RepoError(_("no repo found"))
28 28 path = p
29 29 self.path = os.path.join(path, ".hg")
30 30
31 31 if not create and not os.path.isdir(self.path):
32 32 raise repo.RepoError(_("repository %s not found") % path)
33 33
34 34 self.root = os.path.abspath(path)
35 35 self.origroot = path
36 36 self.ui = ui.ui(parentui=parentui)
37 37 self.opener = util.opener(self.path)
38 38 self.wopener = util.opener(self.root)
39 39
40 40 try:
41 41 self.ui.readconfig(self.join("hgrc"), self.root)
42 42 except IOError:
43 43 pass
44 44
45 45 v = self.ui.revlogopts
46 46 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
47 47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
48 48 fl = v.get('flags', None)
49 49 flags = 0
50 50 if fl != None:
51 51 for x in fl.split():
52 52 flags |= revlog.flagstr(x)
53 53 elif self.revlogv1:
54 54 flags = revlog.REVLOG_DEFAULT_FLAGS
55 55
56 56 v = self.revlogversion | flags
57 57 self.manifest = manifest.manifest(self.opener, v)
58 58 self.changelog = changelog.changelog(self.opener, v)
59 59
60 60 # the changelog might not have the inline index flag
61 61 # on. If the format of the changelog is the same as found in
62 62 # .hgrc, apply any flags found in the .hgrc as well.
63 63 # Otherwise, just version from the changelog
64 64 v = self.changelog.version
65 65 if v == self.revlogversion:
66 66 v |= flags
67 67 self.revlogversion = v
68 68
69 69 self.tagscache = None
70 70 self.nodetagscache = None
71 71 self.encodepats = None
72 72 self.decodepats = None
73 73 self.transhandle = None
74 74
75 75 if create:
76 76 os.mkdir(self.path)
77 77 os.mkdir(self.join("data"))
78 78
79 79 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
80 80
81 81 def hook(self, name, throw=False, **args):
82 82 def callhook(hname, funcname):
83 83 '''call python hook. hook is callable object, looked up as
84 84 name in python module. if callable returns "true", hook
85 85 fails, else passes. if hook raises exception, treated as
86 86 hook failure. exception propagates if throw is "true".
87 87
88 88 reason for "true" meaning "hook failed" is so that
89 89 unmodified commands (e.g. mercurial.commands.update) can
90 90 be run as hooks without wrappers to convert return values.'''
91 91
92 92 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
93 93 d = funcname.rfind('.')
94 94 if d == -1:
95 95 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
96 96 % (hname, funcname))
97 97 modname = funcname[:d]
98 98 try:
99 99 obj = __import__(modname)
100 100 except ImportError:
101 101 raise util.Abort(_('%s hook is invalid '
102 102 '(import of "%s" failed)') %
103 103 (hname, modname))
104 104 try:
105 105 for p in funcname.split('.')[1:]:
106 106 obj = getattr(obj, p)
107 107 except AttributeError, err:
108 108 raise util.Abort(_('%s hook is invalid '
109 109 '("%s" is not defined)') %
110 110 (hname, funcname))
111 111 if not callable(obj):
112 112 raise util.Abort(_('%s hook is invalid '
113 113 '("%s" is not callable)') %
114 114 (hname, funcname))
115 115 try:
116 116 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
117 117 except (KeyboardInterrupt, util.SignalInterrupt):
118 118 raise
119 119 except Exception, exc:
120 120 if isinstance(exc, util.Abort):
121 121 self.ui.warn(_('error: %s hook failed: %s\n') %
122 122 (hname, exc.args[0] % exc.args[1:]))
123 123 else:
124 124 self.ui.warn(_('error: %s hook raised an exception: '
125 125 '%s\n') % (hname, exc))
126 126 if throw:
127 127 raise
128 128 if self.ui.traceback:
129 129 traceback.print_exc()
130 130 return True
131 131 if r:
132 132 if throw:
133 133 raise util.Abort(_('%s hook failed') % hname)
134 134 self.ui.warn(_('warning: %s hook failed\n') % hname)
135 135 return r
136 136
137 137 def runhook(name, cmd):
138 138 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
139 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
140 [(k.upper(), v) for k, v in args.iteritems()])
139 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
141 140 r = util.system(cmd, environ=env, cwd=self.root)
142 141 if r:
143 142 desc, r = util.explain_exit(r)
144 143 if throw:
145 144 raise util.Abort(_('%s hook %s') % (name, desc))
146 145 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
147 146 return r
148 147
149 148 r = False
150 149 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
151 150 if hname.split(".", 1)[0] == name and cmd]
152 151 hooks.sort()
153 152 for hname, cmd in hooks:
154 153 if cmd.startswith('python:'):
155 154 r = callhook(hname, cmd[7:].strip()) or r
156 155 else:
157 156 r = runhook(hname, cmd) or r
158 157 return r
159 158
160 159 def tags(self):
161 160 '''return a mapping of tag to node'''
162 161 if not self.tagscache:
163 162 self.tagscache = {}
164 163
165 164 def parsetag(line, context):
166 165 if not line:
167 166 return
168 167 s = l.split(" ", 1)
169 168 if len(s) != 2:
170 169 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
171 170 return
172 171 node, key = s
173 172 try:
174 173 bin_n = bin(node)
175 174 except TypeError:
176 175 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
177 176 return
178 177 if bin_n not in self.changelog.nodemap:
179 178 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
180 179 return
181 180 self.tagscache[key.strip()] = bin_n
182 181
183 182 # read each head of the tags file, ending with the tip
184 183 # and add each tag found to the map, with "newer" ones
185 184 # taking precedence
186 185 fl = self.file(".hgtags")
187 186 h = fl.heads()
188 187 h.reverse()
189 188 for r in h:
190 189 count = 0
191 190 for l in fl.read(r).splitlines():
192 191 count += 1
193 192 parsetag(l, ".hgtags:%d" % count)
194 193
195 194 try:
196 195 f = self.opener("localtags")
197 196 count = 0
198 197 for l in f:
199 198 count += 1
200 199 parsetag(l, "localtags:%d" % count)
201 200 except IOError:
202 201 pass
203 202
204 203 self.tagscache['tip'] = self.changelog.tip()
205 204
206 205 return self.tagscache
207 206
208 207 def tagslist(self):
209 208 '''return a list of tags ordered by revision'''
210 209 l = []
211 210 for t, n in self.tags().items():
212 211 try:
213 212 r = self.changelog.rev(n)
214 213 except:
215 214 r = -2 # sort to the beginning of the list if unknown
216 215 l.append((r, t, n))
217 216 l.sort()
218 217 return [(t, n) for r, t, n in l]
219 218
220 219 def nodetags(self, node):
221 220 '''return the tags associated with a node'''
222 221 if not self.nodetagscache:
223 222 self.nodetagscache = {}
224 223 for t, n in self.tags().items():
225 224 self.nodetagscache.setdefault(n, []).append(t)
226 225 return self.nodetagscache.get(node, [])
227 226
228 227 def lookup(self, key):
229 228 try:
230 229 return self.tags()[key]
231 230 except KeyError:
232 231 try:
233 232 return self.changelog.lookup(key)
234 233 except:
235 234 raise repo.RepoError(_("unknown revision '%s'") % key)
236 235
237 236 def dev(self):
238 237 return os.stat(self.path).st_dev
239 238
240 239 def local(self):
241 240 return True
242 241
243 242 def join(self, f):
244 243 return os.path.join(self.path, f)
245 244
246 245 def wjoin(self, f):
247 246 return os.path.join(self.root, f)
248 247
249 248 def file(self, f):
250 249 if f[0] == '/':
251 250 f = f[1:]
252 251 return filelog.filelog(self.opener, f, self.revlogversion)
253 252
254 253 def getcwd(self):
255 254 return self.dirstate.getcwd()
256 255
257 256 def wfile(self, f, mode='r'):
258 257 return self.wopener(f, mode)
259 258
260 259 def wread(self, filename):
261 260 if self.encodepats == None:
262 261 l = []
263 262 for pat, cmd in self.ui.configitems("encode"):
264 263 mf = util.matcher(self.root, "", [pat], [], [])[1]
265 264 l.append((mf, cmd))
266 265 self.encodepats = l
267 266
268 267 data = self.wopener(filename, 'r').read()
269 268
270 269 for mf, cmd in self.encodepats:
271 270 if mf(filename):
272 271 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
273 272 data = util.filter(data, cmd)
274 273 break
275 274
276 275 return data
277 276
278 277 def wwrite(self, filename, data, fd=None):
279 278 if self.decodepats == None:
280 279 l = []
281 280 for pat, cmd in self.ui.configitems("decode"):
282 281 mf = util.matcher(self.root, "", [pat], [], [])[1]
283 282 l.append((mf, cmd))
284 283 self.decodepats = l
285 284
286 285 for mf, cmd in self.decodepats:
287 286 if mf(filename):
288 287 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
289 288 data = util.filter(data, cmd)
290 289 break
291 290
292 291 if fd:
293 292 return fd.write(data)
294 293 return self.wopener(filename, 'w').write(data)
295 294
296 295 def transaction(self):
297 296 tr = self.transhandle
298 297 if tr != None and tr.running():
299 298 return tr.nest()
300 299
301 300 # save dirstate for undo
302 301 try:
303 302 ds = self.opener("dirstate").read()
304 303 except IOError:
305 304 ds = ""
306 305 self.opener("journal.dirstate", "w").write(ds)
307 306
308 307 tr = transaction.transaction(self.ui.warn, self.opener,
309 308 self.join("journal"),
310 309 aftertrans(self.path))
311 310 self.transhandle = tr
312 311 return tr
313 312
314 313 def recover(self):
315 314 l = self.lock()
316 315 if os.path.exists(self.join("journal")):
317 316 self.ui.status(_("rolling back interrupted transaction\n"))
318 317 transaction.rollback(self.opener, self.join("journal"))
319 318 self.reload()
320 319 return True
321 320 else:
322 321 self.ui.warn(_("no interrupted transaction available\n"))
323 322 return False
324 323
325 324 def undo(self, wlock=None):
326 325 if not wlock:
327 326 wlock = self.wlock()
328 327 l = self.lock()
329 328 if os.path.exists(self.join("undo")):
330 329 self.ui.status(_("rolling back last transaction\n"))
331 330 transaction.rollback(self.opener, self.join("undo"))
332 331 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
333 332 self.reload()
334 333 self.wreload()
335 334 else:
336 335 self.ui.warn(_("no undo information available\n"))
337 336
338 337 def wreload(self):
339 338 self.dirstate.read()
340 339
341 340 def reload(self):
342 341 self.changelog.load()
343 342 self.manifest.load()
344 343 self.tagscache = None
345 344 self.nodetagscache = None
346 345
347 346 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
348 347 desc=None):
349 348 try:
350 349 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
351 350 except lock.LockHeld, inst:
352 351 if not wait:
353 352 raise
354 353 self.ui.warn(_("waiting for lock on %s held by %s\n") %
355 354 (desc, inst.args[0]))
356 355 # default to 600 seconds timeout
357 356 l = lock.lock(self.join(lockname),
358 357 int(self.ui.config("ui", "timeout") or 600),
359 358 releasefn, desc=desc)
360 359 if acquirefn:
361 360 acquirefn()
362 361 return l
363 362
364 363 def lock(self, wait=1):
365 364 return self.do_lock("lock", wait, acquirefn=self.reload,
366 365 desc=_('repository %s') % self.origroot)
367 366
368 367 def wlock(self, wait=1):
369 368 return self.do_lock("wlock", wait, self.dirstate.write,
370 369 self.wreload,
371 370 desc=_('working directory of %s') % self.origroot)
372 371
373 372 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
374 373 "determine whether a new filenode is needed"
375 374 fp1 = manifest1.get(filename, nullid)
376 375 fp2 = manifest2.get(filename, nullid)
377 376
378 377 if fp2 != nullid:
379 378 # is one parent an ancestor of the other?
380 379 fpa = filelog.ancestor(fp1, fp2)
381 380 if fpa == fp1:
382 381 fp1, fp2 = fp2, nullid
383 382 elif fpa == fp2:
384 383 fp2 = nullid
385 384
386 385 # is the file unmodified from the parent? report existing entry
387 386 if fp2 == nullid and text == filelog.read(fp1):
388 387 return (fp1, None, None)
389 388
390 389 return (None, fp1, fp2)
391 390
392 391 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
393 392 orig_parent = self.dirstate.parents()[0] or nullid
394 393 p1 = p1 or self.dirstate.parents()[0] or nullid
395 394 p2 = p2 or self.dirstate.parents()[1] or nullid
396 395 c1 = self.changelog.read(p1)
397 396 c2 = self.changelog.read(p2)
398 397 m1 = self.manifest.read(c1[0])
399 398 mf1 = self.manifest.readflags(c1[0])
400 399 m2 = self.manifest.read(c2[0])
401 400 changed = []
402 401
403 402 if orig_parent == p1:
404 403 update_dirstate = 1
405 404 else:
406 405 update_dirstate = 0
407 406
408 407 if not wlock:
409 408 wlock = self.wlock()
410 409 l = self.lock()
411 410 tr = self.transaction()
412 411 mm = m1.copy()
413 412 mfm = mf1.copy()
414 413 linkrev = self.changelog.count()
415 414 for f in files:
416 415 try:
417 416 t = self.wread(f)
418 417 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
419 418 r = self.file(f)
420 419 mfm[f] = tm
421 420
422 421 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
423 422 if entry:
424 423 mm[f] = entry
425 424 continue
426 425
427 426 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
428 427 changed.append(f)
429 428 if update_dirstate:
430 429 self.dirstate.update([f], "n")
431 430 except IOError:
432 431 try:
433 432 del mm[f]
434 433 del mfm[f]
435 434 if update_dirstate:
436 435 self.dirstate.forget([f])
437 436 except:
438 437 # deleted from p2?
439 438 pass
440 439
441 440 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
442 441 user = user or self.ui.username()
443 442 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
444 443 tr.close()
445 444 if update_dirstate:
446 445 self.dirstate.setparents(n, nullid)
447 446
448 447 def commit(self, files=None, text="", user=None, date=None,
449 match=util.always, force=False, lock=None, wlock=None):
448 match=util.always, force=False, lock=None, wlock=None,
449 force_editor=False):
450 450 commit = []
451 451 remove = []
452 452 changed = []
453 453
454 454 if files:
455 455 for f in files:
456 456 s = self.dirstate.state(f)
457 457 if s in 'nmai':
458 458 commit.append(f)
459 459 elif s == 'r':
460 460 remove.append(f)
461 461 else:
462 462 self.ui.warn(_("%s not tracked!\n") % f)
463 463 else:
464 464 modified, added, removed, deleted, unknown = self.changes(match=match)
465 465 commit = modified + added
466 466 remove = removed
467 467
468 468 p1, p2 = self.dirstate.parents()
469 469 c1 = self.changelog.read(p1)
470 470 c2 = self.changelog.read(p2)
471 471 m1 = self.manifest.read(c1[0])
472 472 mf1 = self.manifest.readflags(c1[0])
473 473 m2 = self.manifest.read(c2[0])
474 474
475 475 if not commit and not remove and not force and p2 == nullid:
476 476 self.ui.status(_("nothing changed\n"))
477 477 return None
478 478
479 479 xp1 = hex(p1)
480 480 if p2 == nullid: xp2 = ''
481 481 else: xp2 = hex(p2)
482 482
483 483 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
484 484
485 485 if not wlock:
486 486 wlock = self.wlock()
487 487 if not lock:
488 488 lock = self.lock()
489 489 tr = self.transaction()
490 490
491 491 # check in files
492 492 new = {}
493 493 linkrev = self.changelog.count()
494 494 commit.sort()
495 495 for f in commit:
496 496 self.ui.note(f + "\n")
497 497 try:
498 498 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
499 499 t = self.wread(f)
500 500 except IOError:
501 501 self.ui.warn(_("trouble committing %s!\n") % f)
502 502 raise
503 503
504 504 r = self.file(f)
505 505
506 506 meta = {}
507 507 cp = self.dirstate.copied(f)
508 508 if cp:
509 509 meta["copy"] = cp
510 510 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
511 511 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
512 512 fp1, fp2 = nullid, nullid
513 513 else:
514 514 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
515 515 if entry:
516 516 new[f] = entry
517 517 continue
518 518
519 519 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
520 520 # remember what we've added so that we can later calculate
521 521 # the files to pull from a set of changesets
522 522 changed.append(f)
523 523
524 524 # update manifest
525 525 m1 = m1.copy()
526 526 m1.update(new)
527 527 for f in remove:
528 528 if f in m1:
529 529 del m1[f]
530 530 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
531 531 (new, remove))
532 532
533 533 # add changeset
534 534 new = new.keys()
535 535 new.sort()
536 536
537 537 user = user or self.ui.username()
538 if not text:
539 edittext = [""]
538 if not text or force_editor:
539 edittext = []
540 if text:
541 edittext.append(text)
542 edittext.append("")
540 543 if p2 != nullid:
541 544 edittext.append("HG: branch merge")
542 545 edittext.extend(["HG: changed %s" % f for f in changed])
543 546 edittext.extend(["HG: removed %s" % f for f in remove])
544 547 if not changed and not remove:
545 548 edittext.append("HG: no files changed")
546 549 edittext.append("")
547 550 # run editor in the repository root
548 551 olddir = os.getcwd()
549 552 os.chdir(self.root)
550 553 edittext = self.ui.edit("\n".join(edittext), user)
551 554 os.chdir(olddir)
552 555 if not edittext.rstrip():
553 556 return None
554 557 text = edittext
555 558
556 559 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
557 560 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
558 561 parent2=xp2)
559 562 tr.close()
560 563
561 564 self.dirstate.setparents(n)
562 565 self.dirstate.update(new, "n")
563 566 self.dirstate.forget(remove)
564 567
565 568 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
566 569 return n
567 570
568 571 def walk(self, node=None, files=[], match=util.always, badmatch=None):
569 572 if node:
570 573 fdict = dict.fromkeys(files)
571 574 for fn in self.manifest.read(self.changelog.read(node)[0]):
572 575 fdict.pop(fn, None)
573 576 if match(fn):
574 577 yield 'm', fn
575 578 for fn in fdict:
576 579 if badmatch and badmatch(fn):
577 580 if match(fn):
578 581 yield 'b', fn
579 582 else:
580 583 self.ui.warn(_('%s: No such file in rev %s\n') % (
581 584 util.pathto(self.getcwd(), fn), short(node)))
582 585 else:
583 586 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
584 587 yield src, fn
585 588
586 589 def changes(self, node1=None, node2=None, files=[], match=util.always,
587 590 wlock=None, show_ignored=None):
588 591 """return changes between two nodes or node and working directory
589 592
590 593 If node1 is None, use the first dirstate parent instead.
591 594 If node2 is None, compare node1 with working directory.
592 595 """
593 596
594 597 def fcmp(fn, mf):
595 598 t1 = self.wread(fn)
596 599 t2 = self.file(fn).read(mf.get(fn, nullid))
597 600 return cmp(t1, t2)
598 601
599 602 def mfmatches(node):
600 603 change = self.changelog.read(node)
601 604 mf = dict(self.manifest.read(change[0]))
602 605 for fn in mf.keys():
603 606 if not match(fn):
604 607 del mf[fn]
605 608 return mf
606 609
607 610 if node1:
608 611 # read the manifest from node1 before the manifest from node2,
609 612 # so that we'll hit the manifest cache if we're going through
610 613 # all the revisions in parent->child order.
611 614 mf1 = mfmatches(node1)
612 615
613 616 # are we comparing the working directory?
614 617 if not node2:
615 618 if not wlock:
616 619 try:
617 620 wlock = self.wlock(wait=0)
618 621 except lock.LockException:
619 622 wlock = None
620 623 lookup, modified, added, removed, deleted, unknown, ignored = (
621 624 self.dirstate.changes(files, match, show_ignored))
622 625
623 626 # are we comparing working dir against its parent?
624 627 if not node1:
625 628 if lookup:
626 629 # do a full compare of any files that might have changed
627 630 mf2 = mfmatches(self.dirstate.parents()[0])
628 631 for f in lookup:
629 632 if fcmp(f, mf2):
630 633 modified.append(f)
631 634 elif wlock is not None:
632 635 self.dirstate.update([f], "n")
633 636 else:
634 637 # we are comparing working dir against non-parent
635 638 # generate a pseudo-manifest for the working dir
636 639 mf2 = mfmatches(self.dirstate.parents()[0])
637 640 for f in lookup + modified + added:
638 641 mf2[f] = ""
639 642 for f in removed:
640 643 if f in mf2:
641 644 del mf2[f]
642 645 else:
643 646 # we are comparing two revisions
644 647 deleted, unknown, ignored = [], [], []
645 648 mf2 = mfmatches(node2)
646 649
647 650 if node1:
648 651 # flush lists from dirstate before comparing manifests
649 652 modified, added = [], []
650 653
651 654 for fn in mf2:
652 655 if mf1.has_key(fn):
653 656 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
654 657 modified.append(fn)
655 658 del mf1[fn]
656 659 else:
657 660 added.append(fn)
658 661
659 662 removed = mf1.keys()
660 663
661 664 # sort and return results:
662 665 for l in modified, added, removed, deleted, unknown, ignored:
663 666 l.sort()
664 667 if show_ignored is None:
665 668 return (modified, added, removed, deleted, unknown)
666 669 else:
667 670 return (modified, added, removed, deleted, unknown, ignored)
668 671
669 672 def add(self, list, wlock=None):
670 673 if not wlock:
671 674 wlock = self.wlock()
672 675 for f in list:
673 676 p = self.wjoin(f)
674 677 if not os.path.exists(p):
675 678 self.ui.warn(_("%s does not exist!\n") % f)
676 679 elif not os.path.isfile(p):
677 680 self.ui.warn(_("%s not added: only files supported currently\n")
678 681 % f)
679 682 elif self.dirstate.state(f) in 'an':
680 683 self.ui.warn(_("%s already tracked!\n") % f)
681 684 else:
682 685 self.dirstate.update([f], "a")
683 686
684 687 def forget(self, list, wlock=None):
685 688 if not wlock:
686 689 wlock = self.wlock()
687 690 for f in list:
688 691 if self.dirstate.state(f) not in 'ai':
689 692 self.ui.warn(_("%s not added!\n") % f)
690 693 else:
691 694 self.dirstate.forget([f])
692 695
693 696 def remove(self, list, unlink=False, wlock=None):
694 697 if unlink:
695 698 for f in list:
696 699 try:
697 700 util.unlink(self.wjoin(f))
698 701 except OSError, inst:
699 702 if inst.errno != errno.ENOENT:
700 703 raise
701 704 if not wlock:
702 705 wlock = self.wlock()
703 706 for f in list:
704 707 p = self.wjoin(f)
705 708 if os.path.exists(p):
706 709 self.ui.warn(_("%s still exists!\n") % f)
707 710 elif self.dirstate.state(f) == 'a':
708 711 self.dirstate.forget([f])
709 712 elif f not in self.dirstate:
710 713 self.ui.warn(_("%s not tracked!\n") % f)
711 714 else:
712 715 self.dirstate.update([f], "r")
713 716
714 717 def undelete(self, list, wlock=None):
715 718 p = self.dirstate.parents()[0]
716 719 mn = self.changelog.read(p)[0]
717 720 mf = self.manifest.readflags(mn)
718 721 m = self.manifest.read(mn)
719 722 if not wlock:
720 723 wlock = self.wlock()
721 724 for f in list:
722 725 if self.dirstate.state(f) not in "r":
723 726 self.ui.warn("%s not removed!\n" % f)
724 727 else:
725 728 t = self.file(f).read(m[f])
726 729 self.wwrite(f, t)
727 730 util.set_exec(self.wjoin(f), mf[f])
728 731 self.dirstate.update([f], "n")
729 732
730 733 def copy(self, source, dest, wlock=None):
731 734 p = self.wjoin(dest)
732 735 if not os.path.exists(p):
733 736 self.ui.warn(_("%s does not exist!\n") % dest)
734 737 elif not os.path.isfile(p):
735 738 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
736 739 else:
737 740 if not wlock:
738 741 wlock = self.wlock()
739 742 if self.dirstate.state(dest) == '?':
740 743 self.dirstate.update([dest], "a")
741 744 self.dirstate.copy(source, dest)
742 745
743 746 def heads(self, start=None):
744 747 heads = self.changelog.heads(start)
745 748 # sort the output in rev descending order
746 749 heads = [(-self.changelog.rev(h), h) for h in heads]
747 750 heads.sort()
748 751 return [n for (r, n) in heads]
749 752
750 753 # branchlookup returns a dict giving a list of branches for
751 754 # each head. A branch is defined as the tag of a node or
752 755 # the branch of the node's parents. If a node has multiple
753 756 # branch tags, tags are eliminated if they are visible from other
754 757 # branch tags.
755 758 #
756 759 # So, for this graph: a->b->c->d->e
757 760 # \ /
758 761 # aa -----/
759 762 # a has tag 2.6.12
760 763 # d has tag 2.6.13
761 764 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
762 765 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
763 766 # from the list.
764 767 #
765 768 # It is possible that more than one head will have the same branch tag.
766 769 # callers need to check the result for multiple heads under the same
767 770 # branch tag if that is a problem for them (ie checkout of a specific
768 771 # branch).
769 772 #
770 773 # passing in a specific branch will limit the depth of the search
771 774 # through the parents. It won't limit the branches returned in the
772 775 # result though.
773 776 def branchlookup(self, heads=None, branch=None):
774 777 if not heads:
775 778 heads = self.heads()
776 779 headt = [ h for h in heads ]
777 780 chlog = self.changelog
778 781 branches = {}
779 782 merges = []
780 783 seenmerge = {}
781 784
782 785 # traverse the tree once for each head, recording in the branches
783 786 # dict which tags are visible from this head. The branches
784 787 # dict also records which tags are visible from each tag
785 788 # while we traverse.
786 789 while headt or merges:
787 790 if merges:
788 791 n, found = merges.pop()
789 792 visit = [n]
790 793 else:
791 794 h = headt.pop()
792 795 visit = [h]
793 796 found = [h]
794 797 seen = {}
795 798 while visit:
796 799 n = visit.pop()
797 800 if n in seen:
798 801 continue
799 802 pp = chlog.parents(n)
800 803 tags = self.nodetags(n)
801 804 if tags:
802 805 for x in tags:
803 806 if x == 'tip':
804 807 continue
805 808 for f in found:
806 809 branches.setdefault(f, {})[n] = 1
807 810 branches.setdefault(n, {})[n] = 1
808 811 break
809 812 if n not in found:
810 813 found.append(n)
811 814 if branch in tags:
812 815 continue
813 816 seen[n] = 1
814 817 if pp[1] != nullid and n not in seenmerge:
815 818 merges.append((pp[1], [x for x in found]))
816 819 seenmerge[n] = 1
817 820 if pp[0] != nullid:
818 821 visit.append(pp[0])
819 822 # traverse the branches dict, eliminating branch tags from each
820 823 # head that are visible from another branch tag for that head.
821 824 out = {}
822 825 viscache = {}
823 826 for h in heads:
824 827 def visible(node):
825 828 if node in viscache:
826 829 return viscache[node]
827 830 ret = {}
828 831 visit = [node]
829 832 while visit:
830 833 x = visit.pop()
831 834 if x in viscache:
832 835 ret.update(viscache[x])
833 836 elif x not in ret:
834 837 ret[x] = 1
835 838 if x in branches:
836 839 visit[len(visit):] = branches[x].keys()
837 840 viscache[node] = ret
838 841 return ret
839 842 if h not in branches:
840 843 continue
841 844 # O(n^2), but somewhat limited. This only searches the
842 845 # tags visible from a specific head, not all the tags in the
843 846 # whole repo.
844 847 for b in branches[h]:
845 848 vis = False
846 849 for bb in branches[h].keys():
847 850 if b != bb:
848 851 if b in visible(bb):
849 852 vis = True
850 853 break
851 854 if not vis:
852 855 l = out.setdefault(h, [])
853 856 l[len(l):] = self.nodetags(b)
854 857 return out
855 858
856 859 def branches(self, nodes):
857 860 if not nodes:
858 861 nodes = [self.changelog.tip()]
859 862 b = []
860 863 for n in nodes:
861 864 t = n
862 865 while n:
863 866 p = self.changelog.parents(n)
864 867 if p[1] != nullid or p[0] == nullid:
865 868 b.append((t, n, p[0], p[1]))
866 869 break
867 870 n = p[0]
868 871 return b
869 872
870 873 def between(self, pairs):
871 874 r = []
872 875
873 876 for top, bottom in pairs:
874 877 n, l, i = top, [], 0
875 878 f = 1
876 879
877 880 while n != bottom:
878 881 p = self.changelog.parents(n)[0]
879 882 if i == f:
880 883 l.append(n)
881 884 f = f * 2
882 885 n = p
883 886 i += 1
884 887
885 888 r.append(l)
886 889
887 890 return r
888 891
889 892 def findincoming(self, remote, base=None, heads=None, force=False):
890 893 m = self.changelog.nodemap
891 894 search = []
892 895 fetch = {}
893 896 seen = {}
894 897 seenbranch = {}
895 898 if base == None:
896 899 base = {}
897 900
898 901 if not heads:
899 902 heads = remote.heads()
900 903
901 904 if self.changelog.tip() == nullid:
902 905 if heads != [nullid]:
903 906 return [nullid]
904 907 return []
905 908
906 909 # assume we're closer to the tip than the root
907 910 # and start by examining the heads
908 911 self.ui.status(_("searching for changes\n"))
909 912
910 913 unknown = []
911 914 for h in heads:
912 915 if h not in m:
913 916 unknown.append(h)
914 917 else:
915 918 base[h] = 1
916 919
917 920 if not unknown:
918 921 return []
919 922
920 923 rep = {}
921 924 reqcnt = 0
922 925
923 926 # search through remote branches
924 927 # a 'branch' here is a linear segment of history, with four parts:
925 928 # head, root, first parent, second parent
926 929 # (a branch always has two parents (or none) by definition)
927 930 unknown = remote.branches(unknown)
928 931 while unknown:
929 932 r = []
930 933 while unknown:
931 934 n = unknown.pop(0)
932 935 if n[0] in seen:
933 936 continue
934 937
935 938 self.ui.debug(_("examining %s:%s\n")
936 939 % (short(n[0]), short(n[1])))
937 940 if n[0] == nullid:
938 941 break
939 942 if n in seenbranch:
940 943 self.ui.debug(_("branch already found\n"))
941 944 continue
942 945 if n[1] and n[1] in m: # do we know the base?
943 946 self.ui.debug(_("found incomplete branch %s:%s\n")
944 947 % (short(n[0]), short(n[1])))
945 948 search.append(n) # schedule branch range for scanning
946 949 seenbranch[n] = 1
947 950 else:
948 951 if n[1] not in seen and n[1] not in fetch:
949 952 if n[2] in m and n[3] in m:
950 953 self.ui.debug(_("found new changeset %s\n") %
951 954 short(n[1]))
952 955 fetch[n[1]] = 1 # earliest unknown
953 956 base[n[2]] = 1 # latest known
954 957 continue
955 958
956 959 for a in n[2:4]:
957 960 if a not in rep:
958 961 r.append(a)
959 962 rep[a] = 1
960 963
961 964 seen[n[0]] = 1
962 965
963 966 if r:
964 967 reqcnt += 1
965 968 self.ui.debug(_("request %d: %s\n") %
966 969 (reqcnt, " ".join(map(short, r))))
967 970 for p in range(0, len(r), 10):
968 971 for b in remote.branches(r[p:p+10]):
969 972 self.ui.debug(_("received %s:%s\n") %
970 973 (short(b[0]), short(b[1])))
971 974 if b[0] in m:
972 975 self.ui.debug(_("found base node %s\n")
973 976 % short(b[0]))
974 977 base[b[0]] = 1
975 978 elif b[0] not in seen:
976 979 unknown.append(b)
977 980
978 981 # do binary search on the branches we found
979 982 while search:
980 983 n = search.pop(0)
981 984 reqcnt += 1
982 985 l = remote.between([(n[0], n[1])])[0]
983 986 l.append(n[1])
984 987 p = n[0]
985 988 f = 1
986 989 for i in l:
987 990 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
988 991 if i in m:
989 992 if f <= 2:
990 993 self.ui.debug(_("found new branch changeset %s\n") %
991 994 short(p))
992 995 fetch[p] = 1
993 996 base[i] = 1
994 997 else:
995 998 self.ui.debug(_("narrowed branch search to %s:%s\n")
996 999 % (short(p), short(i)))
997 1000 search.append((p, i))
998 1001 break
999 1002 p, f = i, f * 2
1000 1003
1001 1004 # sanity check our fetch list
1002 1005 for f in fetch.keys():
1003 1006 if f in m:
1004 1007 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1005 1008
1006 1009 if base.keys() == [nullid]:
1007 1010 if force:
1008 1011 self.ui.warn(_("warning: repository is unrelated\n"))
1009 1012 else:
1010 1013 raise util.Abort(_("repository is unrelated"))
1011 1014
1012 1015 self.ui.note(_("found new changesets starting at ") +
1013 1016 " ".join([short(f) for f in fetch]) + "\n")
1014 1017
1015 1018 self.ui.debug(_("%d total queries\n") % reqcnt)
1016 1019
1017 1020 return fetch.keys()
1018 1021
1019 1022 def findoutgoing(self, remote, base=None, heads=None, force=False):
1020 1023 """Return list of nodes that are roots of subsets not in remote
1021 1024
1022 1025 If base dict is specified, assume that these nodes and their parents
1023 1026 exist on the remote side.
1024 1027 If a list of heads is specified, return only nodes which are heads
1025 1028 or ancestors of these heads, and return a second element which
1026 1029 contains all remote heads which get new children.
1027 1030 """
1028 1031 if base == None:
1029 1032 base = {}
1030 1033 self.findincoming(remote, base, heads, force=force)
1031 1034
1032 1035 self.ui.debug(_("common changesets up to ")
1033 1036 + " ".join(map(short, base.keys())) + "\n")
1034 1037
1035 1038 remain = dict.fromkeys(self.changelog.nodemap)
1036 1039
1037 1040 # prune everything remote has from the tree
1038 1041 del remain[nullid]
1039 1042 remove = base.keys()
1040 1043 while remove:
1041 1044 n = remove.pop(0)
1042 1045 if n in remain:
1043 1046 del remain[n]
1044 1047 for p in self.changelog.parents(n):
1045 1048 remove.append(p)
1046 1049
1047 1050 # find every node whose parents have been pruned
1048 1051 subset = []
1049 1052 # find every remote head that will get new children
1050 1053 updated_heads = {}
1051 1054 for n in remain:
1052 1055 p1, p2 = self.changelog.parents(n)
1053 1056 if p1 not in remain and p2 not in remain:
1054 1057 subset.append(n)
1055 1058 if heads:
1056 1059 if p1 in heads:
1057 1060 updated_heads[p1] = True
1058 1061 if p2 in heads:
1059 1062 updated_heads[p2] = True
1060 1063
1061 1064 # this is the set of all roots we have to push
1062 1065 if heads:
1063 1066 return subset, updated_heads.keys()
1064 1067 else:
1065 1068 return subset
1066 1069
1067 1070 def pull(self, remote, heads=None, force=False):
1068 1071 l = self.lock()
1069 1072
1070 1073 fetch = self.findincoming(remote, force=force)
1071 1074 if fetch == [nullid]:
1072 1075 self.ui.status(_("requesting all changes\n"))
1073 1076
1074 1077 if not fetch:
1075 1078 self.ui.status(_("no changes found\n"))
1076 1079 return 0
1077 1080
1078 1081 if heads is None:
1079 1082 cg = remote.changegroup(fetch, 'pull')
1080 1083 else:
1081 1084 cg = remote.changegroupsubset(fetch, heads, 'pull')
1082 1085 return self.addchangegroup(cg, 'pull')
1083 1086
1084 1087 def push(self, remote, force=False, revs=None):
1085 1088 lock = remote.lock()
1086 1089
1087 1090 base = {}
1088 1091 remote_heads = remote.heads()
1089 1092 inc = self.findincoming(remote, base, remote_heads, force=force)
1090 1093 if not force and inc:
1091 1094 self.ui.warn(_("abort: unsynced remote changes!\n"))
1092 1095 self.ui.status(_("(did you forget to sync?"
1093 1096 " use push -f to force)\n"))
1094 1097 return 1
1095 1098
1096 1099 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1097 1100 if revs is not None:
1098 1101 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1099 1102 else:
1100 1103 bases, heads = update, self.changelog.heads()
1101 1104
1102 1105 if not bases:
1103 1106 self.ui.status(_("no changes found\n"))
1104 1107 return 1
1105 1108 elif not force:
1106 1109 # FIXME we don't properly detect creation of new heads
1107 1110 # in the push -r case, assume the user knows what he's doing
1108 1111 if not revs and len(remote_heads) < len(heads) \
1109 1112 and remote_heads != [nullid]:
1110 1113 self.ui.warn(_("abort: push creates new remote branches!\n"))
1111 1114 self.ui.status(_("(did you forget to merge?"
1112 1115 " use push -f to force)\n"))
1113 1116 return 1
1114 1117
1115 1118 if revs is None:
1116 1119 cg = self.changegroup(update, 'push')
1117 1120 else:
1118 1121 cg = self.changegroupsubset(update, revs, 'push')
1119 1122 return remote.addchangegroup(cg, 'push')
1120 1123
1121 1124 def changegroupsubset(self, bases, heads, source):
1122 1125 """This function generates a changegroup consisting of all the nodes
1123 1126 that are descendents of any of the bases, and ancestors of any of
1124 1127 the heads.
1125 1128
1126 1129 It is fairly complex as determining which filenodes and which
1127 1130 manifest nodes need to be included for the changeset to be complete
1128 1131 is non-trivial.
1129 1132
1130 1133 Another wrinkle is doing the reverse, figuring out which changeset in
1131 1134 the changegroup a particular filenode or manifestnode belongs to."""
1132 1135
1133 1136 self.hook('preoutgoing', throw=True, source=source)
1134 1137
1135 1138 # Set up some initial variables
1136 1139 # Make it easy to refer to self.changelog
1137 1140 cl = self.changelog
1138 1141 # msng is short for missing - compute the list of changesets in this
1139 1142 # changegroup.
1140 1143 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1141 1144 # Some bases may turn out to be superfluous, and some heads may be
1142 1145 # too. nodesbetween will return the minimal set of bases and heads
1143 1146 # necessary to re-create the changegroup.
1144 1147
1145 1148 # Known heads are the list of heads that it is assumed the recipient
1146 1149 # of this changegroup will know about.
1147 1150 knownheads = {}
1148 1151 # We assume that all parents of bases are known heads.
1149 1152 for n in bases:
1150 1153 for p in cl.parents(n):
1151 1154 if p != nullid:
1152 1155 knownheads[p] = 1
1153 1156 knownheads = knownheads.keys()
1154 1157 if knownheads:
1155 1158 # Now that we know what heads are known, we can compute which
1156 1159 # changesets are known. The recipient must know about all
1157 1160 # changesets required to reach the known heads from the null
1158 1161 # changeset.
1159 1162 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1160 1163 junk = None
1161 1164 # Transform the list into an ersatz set.
1162 1165 has_cl_set = dict.fromkeys(has_cl_set)
1163 1166 else:
1164 1167 # If there were no known heads, the recipient cannot be assumed to
1165 1168 # know about any changesets.
1166 1169 has_cl_set = {}
1167 1170
1168 1171 # Make it easy to refer to self.manifest
1169 1172 mnfst = self.manifest
1170 1173 # We don't know which manifests are missing yet
1171 1174 msng_mnfst_set = {}
1172 1175 # Nor do we know which filenodes are missing.
1173 1176 msng_filenode_set = {}
1174 1177
1175 1178 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1176 1179 junk = None
1177 1180
1178 1181 # A changeset always belongs to itself, so the changenode lookup
1179 1182 # function for a changenode is identity.
1180 1183 def identity(x):
1181 1184 return x
1182 1185
1183 1186 # A function generating function. Sets up an environment for the
1184 1187 # inner function.
1185 1188 def cmp_by_rev_func(revlog):
1186 1189 # Compare two nodes by their revision number in the environment's
1187 1190 # revision history. Since the revision number both represents the
1188 1191 # most efficient order to read the nodes in, and represents a
1189 1192 # topological sorting of the nodes, this function is often useful.
1190 1193 def cmp_by_rev(a, b):
1191 1194 return cmp(revlog.rev(a), revlog.rev(b))
1192 1195 return cmp_by_rev
1193 1196
1194 1197 # If we determine that a particular file or manifest node must be a
1195 1198 # node that the recipient of the changegroup will already have, we can
1196 1199 # also assume the recipient will have all the parents. This function
1197 1200 # prunes them from the set of missing nodes.
1198 1201 def prune_parents(revlog, hasset, msngset):
1199 1202 haslst = hasset.keys()
1200 1203 haslst.sort(cmp_by_rev_func(revlog))
1201 1204 for node in haslst:
1202 1205 parentlst = [p for p in revlog.parents(node) if p != nullid]
1203 1206 while parentlst:
1204 1207 n = parentlst.pop()
1205 1208 if n not in hasset:
1206 1209 hasset[n] = 1
1207 1210 p = [p for p in revlog.parents(n) if p != nullid]
1208 1211 parentlst.extend(p)
1209 1212 for n in hasset:
1210 1213 msngset.pop(n, None)
1211 1214
1212 1215 # This is a function generating function used to set up an environment
1213 1216 # for the inner function to execute in.
1214 1217 def manifest_and_file_collector(changedfileset):
1215 1218 # This is an information gathering function that gathers
1216 1219 # information from each changeset node that goes out as part of
1217 1220 # the changegroup. The information gathered is a list of which
1218 1221 # manifest nodes are potentially required (the recipient may
1219 1222 # already have them) and total list of all files which were
1220 1223 # changed in any changeset in the changegroup.
1221 1224 #
1222 1225 # We also remember the first changenode we saw any manifest
1223 1226 # referenced by so we can later determine which changenode 'owns'
1224 1227 # the manifest.
1225 1228 def collect_manifests_and_files(clnode):
1226 1229 c = cl.read(clnode)
1227 1230 for f in c[3]:
1228 1231 # This is to make sure we only have one instance of each
1229 1232 # filename string for each filename.
1230 1233 changedfileset.setdefault(f, f)
1231 1234 msng_mnfst_set.setdefault(c[0], clnode)
1232 1235 return collect_manifests_and_files
1233 1236
1234 1237 # Figure out which manifest nodes (of the ones we think might be part
1235 1238 # of the changegroup) the recipient must know about and remove them
1236 1239 # from the changegroup.
1237 1240 def prune_manifests():
1238 1241 has_mnfst_set = {}
1239 1242 for n in msng_mnfst_set:
1240 1243 # If a 'missing' manifest thinks it belongs to a changenode
1241 1244 # the recipient is assumed to have, obviously the recipient
1242 1245 # must have that manifest.
1243 1246 linknode = cl.node(mnfst.linkrev(n))
1244 1247 if linknode in has_cl_set:
1245 1248 has_mnfst_set[n] = 1
1246 1249 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1247 1250
1248 1251 # Use the information collected in collect_manifests_and_files to say
1249 1252 # which changenode any manifestnode belongs to.
1250 1253 def lookup_manifest_link(mnfstnode):
1251 1254 return msng_mnfst_set[mnfstnode]
1252 1255
1253 1256 # A function generating function that sets up the initial environment
1254 1257 # the inner function.
1255 1258 def filenode_collector(changedfiles):
1256 1259 next_rev = [0]
1257 1260 # This gathers information from each manifestnode included in the
1258 1261 # changegroup about which filenodes the manifest node references
1259 1262 # so we can include those in the changegroup too.
1260 1263 #
1261 1264 # It also remembers which changenode each filenode belongs to. It
1262 1265 # does this by assuming the a filenode belongs to the changenode
1263 1266 # the first manifest that references it belongs to.
1264 1267 def collect_msng_filenodes(mnfstnode):
1265 1268 r = mnfst.rev(mnfstnode)
1266 1269 if r == next_rev[0]:
1267 1270 # If the last rev we looked at was the one just previous,
1268 1271 # we only need to see a diff.
1269 1272 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1270 1273 # For each line in the delta
1271 1274 for dline in delta.splitlines():
1272 1275 # get the filename and filenode for that line
1273 1276 f, fnode = dline.split('\0')
1274 1277 fnode = bin(fnode[:40])
1275 1278 f = changedfiles.get(f, None)
1276 1279 # And if the file is in the list of files we care
1277 1280 # about.
1278 1281 if f is not None:
1279 1282 # Get the changenode this manifest belongs to
1280 1283 clnode = msng_mnfst_set[mnfstnode]
1281 1284 # Create the set of filenodes for the file if
1282 1285 # there isn't one already.
1283 1286 ndset = msng_filenode_set.setdefault(f, {})
1284 1287 # And set the filenode's changelog node to the
1285 1288 # manifest's if it hasn't been set already.
1286 1289 ndset.setdefault(fnode, clnode)
1287 1290 else:
1288 1291 # Otherwise we need a full manifest.
1289 1292 m = mnfst.read(mnfstnode)
1290 1293 # For every file in we care about.
1291 1294 for f in changedfiles:
1292 1295 fnode = m.get(f, None)
1293 1296 # If it's in the manifest
1294 1297 if fnode is not None:
1295 1298 # See comments above.
1296 1299 clnode = msng_mnfst_set[mnfstnode]
1297 1300 ndset = msng_filenode_set.setdefault(f, {})
1298 1301 ndset.setdefault(fnode, clnode)
1299 1302 # Remember the revision we hope to see next.
1300 1303 next_rev[0] = r + 1
1301 1304 return collect_msng_filenodes
1302 1305
1303 1306 # We have a list of filenodes we think we need for a file, lets remove
1304 1307 # all those we now the recipient must have.
1305 1308 def prune_filenodes(f, filerevlog):
1306 1309 msngset = msng_filenode_set[f]
1307 1310 hasset = {}
1308 1311 # If a 'missing' filenode thinks it belongs to a changenode we
1309 1312 # assume the recipient must have, then the recipient must have
1310 1313 # that filenode.
1311 1314 for n in msngset:
1312 1315 clnode = cl.node(filerevlog.linkrev(n))
1313 1316 if clnode in has_cl_set:
1314 1317 hasset[n] = 1
1315 1318 prune_parents(filerevlog, hasset, msngset)
1316 1319
1317 1320 # A function generator function that sets up the a context for the
1318 1321 # inner function.
1319 1322 def lookup_filenode_link_func(fname):
1320 1323 msngset = msng_filenode_set[fname]
1321 1324 # Lookup the changenode the filenode belongs to.
1322 1325 def lookup_filenode_link(fnode):
1323 1326 return msngset[fnode]
1324 1327 return lookup_filenode_link
1325 1328
1326 1329 # Now that we have all theses utility functions to help out and
1327 1330 # logically divide up the task, generate the group.
1328 1331 def gengroup():
1329 1332 # The set of changed files starts empty.
1330 1333 changedfiles = {}
1331 1334 # Create a changenode group generator that will call our functions
1332 1335 # back to lookup the owning changenode and collect information.
1333 1336 group = cl.group(msng_cl_lst, identity,
1334 1337 manifest_and_file_collector(changedfiles))
1335 1338 for chnk in group:
1336 1339 yield chnk
1337 1340
1338 1341 # The list of manifests has been collected by the generator
1339 1342 # calling our functions back.
1340 1343 prune_manifests()
1341 1344 msng_mnfst_lst = msng_mnfst_set.keys()
1342 1345 # Sort the manifestnodes by revision number.
1343 1346 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1344 1347 # Create a generator for the manifestnodes that calls our lookup
1345 1348 # and data collection functions back.
1346 1349 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1347 1350 filenode_collector(changedfiles))
1348 1351 for chnk in group:
1349 1352 yield chnk
1350 1353
1351 1354 # These are no longer needed, dereference and toss the memory for
1352 1355 # them.
1353 1356 msng_mnfst_lst = None
1354 1357 msng_mnfst_set.clear()
1355 1358
1356 1359 changedfiles = changedfiles.keys()
1357 1360 changedfiles.sort()
1358 1361 # Go through all our files in order sorted by name.
1359 1362 for fname in changedfiles:
1360 1363 filerevlog = self.file(fname)
1361 1364 # Toss out the filenodes that the recipient isn't really
1362 1365 # missing.
1363 1366 if msng_filenode_set.has_key(fname):
1364 1367 prune_filenodes(fname, filerevlog)
1365 1368 msng_filenode_lst = msng_filenode_set[fname].keys()
1366 1369 else:
1367 1370 msng_filenode_lst = []
1368 1371 # If any filenodes are left, generate the group for them,
1369 1372 # otherwise don't bother.
1370 1373 if len(msng_filenode_lst) > 0:
1371 1374 yield changegroup.genchunk(fname)
1372 1375 # Sort the filenodes by their revision #
1373 1376 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1374 1377 # Create a group generator and only pass in a changenode
1375 1378 # lookup function as we need to collect no information
1376 1379 # from filenodes.
1377 1380 group = filerevlog.group(msng_filenode_lst,
1378 1381 lookup_filenode_link_func(fname))
1379 1382 for chnk in group:
1380 1383 yield chnk
1381 1384 if msng_filenode_set.has_key(fname):
1382 1385 # Don't need this anymore, toss it to free memory.
1383 1386 del msng_filenode_set[fname]
1384 1387 # Signal that no more groups are left.
1385 1388 yield changegroup.closechunk()
1386 1389
1387 1390 if msng_cl_lst:
1388 1391 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1389 1392
1390 1393 return util.chunkbuffer(gengroup())
1391 1394
1392 1395 def changegroup(self, basenodes, source):
1393 1396 """Generate a changegroup of all nodes that we have that a recipient
1394 1397 doesn't.
1395 1398
1396 1399 This is much easier than the previous function as we can assume that
1397 1400 the recipient has any changenode we aren't sending them."""
1398 1401
1399 1402 self.hook('preoutgoing', throw=True, source=source)
1400 1403
1401 1404 cl = self.changelog
1402 1405 nodes = cl.nodesbetween(basenodes, None)[0]
1403 1406 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1404 1407
1405 1408 def identity(x):
1406 1409 return x
1407 1410
1408 1411 def gennodelst(revlog):
1409 1412 for r in xrange(0, revlog.count()):
1410 1413 n = revlog.node(r)
1411 1414 if revlog.linkrev(n) in revset:
1412 1415 yield n
1413 1416
1414 1417 def changed_file_collector(changedfileset):
1415 1418 def collect_changed_files(clnode):
1416 1419 c = cl.read(clnode)
1417 1420 for fname in c[3]:
1418 1421 changedfileset[fname] = 1
1419 1422 return collect_changed_files
1420 1423
1421 1424 def lookuprevlink_func(revlog):
1422 1425 def lookuprevlink(n):
1423 1426 return cl.node(revlog.linkrev(n))
1424 1427 return lookuprevlink
1425 1428
1426 1429 def gengroup():
1427 1430 # construct a list of all changed files
1428 1431 changedfiles = {}
1429 1432
1430 1433 for chnk in cl.group(nodes, identity,
1431 1434 changed_file_collector(changedfiles)):
1432 1435 yield chnk
1433 1436 changedfiles = changedfiles.keys()
1434 1437 changedfiles.sort()
1435 1438
1436 1439 mnfst = self.manifest
1437 1440 nodeiter = gennodelst(mnfst)
1438 1441 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1439 1442 yield chnk
1440 1443
1441 1444 for fname in changedfiles:
1442 1445 filerevlog = self.file(fname)
1443 1446 nodeiter = gennodelst(filerevlog)
1444 1447 nodeiter = list(nodeiter)
1445 1448 if nodeiter:
1446 1449 yield changegroup.genchunk(fname)
1447 1450 lookup = lookuprevlink_func(filerevlog)
1448 1451 for chnk in filerevlog.group(nodeiter, lookup):
1449 1452 yield chnk
1450 1453
1451 1454 yield changegroup.closechunk()
1452 1455
1453 1456 if nodes:
1454 1457 self.hook('outgoing', node=hex(nodes[0]), source=source)
1455 1458
1456 1459 return util.chunkbuffer(gengroup())
1457 1460
1458 1461 def addchangegroup(self, source, srctype):
1459 1462 """add changegroup to repo.
1460 1463 returns number of heads modified or added + 1."""
1461 1464
1462 1465 def csmap(x):
1463 1466 self.ui.debug(_("add changeset %s\n") % short(x))
1464 1467 return cl.count()
1465 1468
1466 1469 def revmap(x):
1467 1470 return cl.rev(x)
1468 1471
1469 1472 if not source:
1470 1473 return 0
1471 1474
1472 1475 self.hook('prechangegroup', throw=True, source=srctype)
1473 1476
1474 1477 changesets = files = revisions = 0
1475 1478
1476 1479 tr = self.transaction()
1477 1480
1478 1481 # write changelog and manifest data to temp files so
1479 1482 # concurrent readers will not see inconsistent view
1480 1483 cl = None
1481 1484 try:
1482 1485 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1483 1486
1484 1487 oldheads = len(cl.heads())
1485 1488
1486 1489 # pull off the changeset group
1487 1490 self.ui.status(_("adding changesets\n"))
1488 1491 co = cl.tip()
1489 1492 chunkiter = changegroup.chunkiter(source)
1490 1493 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1491 1494 cnr, cor = map(cl.rev, (cn, co))
1492 1495 if cn == nullid:
1493 1496 cnr = cor
1494 1497 changesets = cnr - cor
1495 1498
1496 1499 mf = None
1497 1500 try:
1498 1501 mf = appendfile.appendmanifest(self.opener,
1499 1502 self.manifest.version)
1500 1503
1501 1504 # pull off the manifest group
1502 1505 self.ui.status(_("adding manifests\n"))
1503 1506 mm = mf.tip()
1504 1507 chunkiter = changegroup.chunkiter(source)
1505 1508 mo = mf.addgroup(chunkiter, revmap, tr)
1506 1509
1507 1510 # process the files
1508 1511 self.ui.status(_("adding file changes\n"))
1509 1512 while 1:
1510 1513 f = changegroup.getchunk(source)
1511 1514 if not f:
1512 1515 break
1513 1516 self.ui.debug(_("adding %s revisions\n") % f)
1514 1517 fl = self.file(f)
1515 1518 o = fl.count()
1516 1519 chunkiter = changegroup.chunkiter(source)
1517 1520 n = fl.addgroup(chunkiter, revmap, tr)
1518 1521 revisions += fl.count() - o
1519 1522 files += 1
1520 1523
1521 1524 # write order here is important so concurrent readers will see
1522 1525 # consistent view of repo
1523 1526 mf.writedata()
1524 1527 finally:
1525 1528 if mf:
1526 1529 mf.cleanup()
1527 1530 cl.writedata()
1528 1531 finally:
1529 1532 if cl:
1530 1533 cl.cleanup()
1531 1534
1532 1535 # make changelog and manifest see real files again
1533 1536 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1534 1537 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1535 1538 self.changelog.checkinlinesize(tr)
1536 1539 self.manifest.checkinlinesize(tr)
1537 1540
1538 1541 newheads = len(self.changelog.heads())
1539 1542 heads = ""
1540 1543 if oldheads and newheads > oldheads:
1541 1544 heads = _(" (+%d heads)") % (newheads - oldheads)
1542 1545
1543 1546 self.ui.status(_("added %d changesets"
1544 1547 " with %d changes to %d files%s\n")
1545 1548 % (changesets, revisions, files, heads))
1546 1549
1547 self.hook('pretxnchangegroup', throw=True,
1548 node=hex(self.changelog.node(cor+1)), source=srctype)
1550 if changesets > 0:
1551 self.hook('pretxnchangegroup', throw=True,
1552 node=hex(self.changelog.node(cor+1)), source=srctype)
1549 1553
1550 1554 tr.close()
1551 1555
1552 1556 if changesets > 0:
1553 1557 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1554 1558 source=srctype)
1555 1559
1556 1560 for i in range(cor + 1, cnr + 1):
1557 1561 self.hook("incoming", node=hex(self.changelog.node(i)),
1558 1562 source=srctype)
1559 1563
1560 1564 return newheads - oldheads + 1
1561 1565
1562 1566 def update(self, node, allow=False, force=False, choose=None,
1563 1567 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1564 1568 pl = self.dirstate.parents()
1565 1569 if not force and pl[1] != nullid:
1566 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1567 return 1
1570 raise util.Abort(_("outstanding uncommitted merges"))
1568 1571
1569 1572 err = False
1570 1573
1571 1574 p1, p2 = pl[0], node
1572 1575 pa = self.changelog.ancestor(p1, p2)
1573 1576 m1n = self.changelog.read(p1)[0]
1574 1577 m2n = self.changelog.read(p2)[0]
1575 1578 man = self.manifest.ancestor(m1n, m2n)
1576 1579 m1 = self.manifest.read(m1n)
1577 1580 mf1 = self.manifest.readflags(m1n)
1578 1581 m2 = self.manifest.read(m2n).copy()
1579 1582 mf2 = self.manifest.readflags(m2n)
1580 1583 ma = self.manifest.read(man)
1581 1584 mfa = self.manifest.readflags(man)
1582 1585
1583 1586 modified, added, removed, deleted, unknown = self.changes()
1584 1587
1585 1588 # is this a jump, or a merge? i.e. is there a linear path
1586 1589 # from p1 to p2?
1587 1590 linear_path = (pa == p1 or pa == p2)
1588 1591
1589 1592 if allow and linear_path:
1590 1593 raise util.Abort(_("there is nothing to merge, "
1591 1594 "just use 'hg update'"))
1592 1595 if allow and not forcemerge:
1593 1596 if modified or added or removed:
1594 1597 raise util.Abort(_("outstanding uncommitted changes"))
1598
1595 1599 if not forcemerge and not force:
1596 1600 for f in unknown:
1597 1601 if f in m2:
1598 1602 t1 = self.wread(f)
1599 1603 t2 = self.file(f).read(m2[f])
1600 1604 if cmp(t1, t2) != 0:
1601 1605 raise util.Abort(_("'%s' already exists in the working"
1602 1606 " dir and differs from remote") % f)
1603 1607
1604 1608 # resolve the manifest to determine which files
1605 1609 # we care about merging
1606 1610 self.ui.note(_("resolving manifests\n"))
1607 1611 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1608 1612 (force, allow, moddirstate, linear_path))
1609 1613 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1610 1614 (short(man), short(m1n), short(m2n)))
1611 1615
1612 1616 merge = {}
1613 1617 get = {}
1614 1618 remove = []
1615 1619
1616 1620 # construct a working dir manifest
1617 1621 mw = m1.copy()
1618 1622 mfw = mf1.copy()
1619 1623 umap = dict.fromkeys(unknown)
1620 1624
1621 1625 for f in added + modified + unknown:
1622 1626 mw[f] = ""
1623 1627 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1624 1628
1625 1629 if moddirstate and not wlock:
1626 1630 wlock = self.wlock()
1627 1631
1628 1632 for f in deleted + removed:
1629 1633 if f in mw:
1630 1634 del mw[f]
1631 1635
1632 1636 # If we're jumping between revisions (as opposed to merging),
1633 1637 # and if neither the working directory nor the target rev has
1634 1638 # the file, then we need to remove it from the dirstate, to
1635 1639 # prevent the dirstate from listing the file when it is no
1636 1640 # longer in the manifest.
1637 1641 if moddirstate and linear_path and f not in m2:
1638 1642 self.dirstate.forget((f,))
1639 1643
1640 1644 # Compare manifests
1641 1645 for f, n in mw.iteritems():
1642 1646 if choose and not choose(f):
1643 1647 continue
1644 1648 if f in m2:
1645 1649 s = 0
1646 1650
1647 1651 # is the wfile new since m1, and match m2?
1648 1652 if f not in m1:
1649 1653 t1 = self.wread(f)
1650 1654 t2 = self.file(f).read(m2[f])
1651 1655 if cmp(t1, t2) == 0:
1652 1656 n = m2[f]
1653 1657 del t1, t2
1654 1658
1655 1659 # are files different?
1656 1660 if n != m2[f]:
1657 1661 a = ma.get(f, nullid)
1658 1662 # are both different from the ancestor?
1659 1663 if n != a and m2[f] != a:
1660 1664 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1661 1665 # merge executable bits
1662 1666 # "if we changed or they changed, change in merge"
1663 1667 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1664 1668 mode = ((a^b) | (a^c)) ^ a
1665 1669 merge[f] = (m1.get(f, nullid), m2[f], mode)
1666 1670 s = 1
1667 1671 # are we clobbering?
1668 1672 # is remote's version newer?
1669 1673 # or are we going back in time?
1670 1674 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1671 1675 self.ui.debug(_(" remote %s is newer, get\n") % f)
1672 1676 get[f] = m2[f]
1673 1677 s = 1
1674 1678 elif f in umap or f in added:
1675 1679 # this unknown file is the same as the checkout
1676 1680 # we need to reset the dirstate if the file was added
1677 1681 get[f] = m2[f]
1678 1682
1679 1683 if not s and mfw[f] != mf2[f]:
1680 1684 if force:
1681 1685 self.ui.debug(_(" updating permissions for %s\n") % f)
1682 1686 util.set_exec(self.wjoin(f), mf2[f])
1683 1687 else:
1684 1688 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1685 1689 mode = ((a^b) | (a^c)) ^ a
1686 1690 if mode != b:
1687 1691 self.ui.debug(_(" updating permissions for %s\n")
1688 1692 % f)
1689 1693 util.set_exec(self.wjoin(f), mode)
1690 1694 del m2[f]
1691 1695 elif f in ma:
1692 1696 if n != ma[f]:
1693 1697 r = _("d")
1694 1698 if not force and (linear_path or allow):
1695 1699 r = self.ui.prompt(
1696 1700 (_(" local changed %s which remote deleted\n") % f) +
1697 1701 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1698 1702 if r == _("d"):
1699 1703 remove.append(f)
1700 1704 else:
1701 1705 self.ui.debug(_("other deleted %s\n") % f)
1702 1706 remove.append(f) # other deleted it
1703 1707 else:
1704 1708 # file is created on branch or in working directory
1705 1709 if force and f not in umap:
1706 1710 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1707 1711 remove.append(f)
1708 1712 elif n == m1.get(f, nullid): # same as parent
1709 1713 if p2 == pa: # going backwards?
1710 1714 self.ui.debug(_("remote deleted %s\n") % f)
1711 1715 remove.append(f)
1712 1716 else:
1713 1717 self.ui.debug(_("local modified %s, keeping\n") % f)
1714 1718 else:
1715 1719 self.ui.debug(_("working dir created %s, keeping\n") % f)
1716 1720
1717 1721 for f, n in m2.iteritems():
1718 1722 if choose and not choose(f):
1719 1723 continue
1720 1724 if f[0] == "/":
1721 1725 continue
1722 1726 if f in ma and n != ma[f]:
1723 1727 r = _("k")
1724 1728 if not force and (linear_path or allow):
1725 1729 r = self.ui.prompt(
1726 1730 (_("remote changed %s which local deleted\n") % f) +
1727 1731 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1728 1732 if r == _("k"):
1729 1733 get[f] = n
1730 1734 elif f not in ma:
1731 1735 self.ui.debug(_("remote created %s\n") % f)
1732 1736 get[f] = n
1733 1737 else:
1734 1738 if force or p2 == pa: # going backwards?
1735 1739 self.ui.debug(_("local deleted %s, recreating\n") % f)
1736 1740 get[f] = n
1737 1741 else:
1738 1742 self.ui.debug(_("local deleted %s\n") % f)
1739 1743
1740 1744 del mw, m1, m2, ma
1741 1745
1742 1746 if force:
1743 1747 for f in merge:
1744 1748 get[f] = merge[f][1]
1745 1749 merge = {}
1746 1750
1747 1751 if linear_path or force:
1748 1752 # we don't need to do any magic, just jump to the new rev
1749 1753 branch_merge = False
1750 1754 p1, p2 = p2, nullid
1751 1755 else:
1752 1756 if not allow:
1753 1757 self.ui.status(_("this update spans a branch"
1754 1758 " affecting the following files:\n"))
1755 1759 fl = merge.keys() + get.keys()
1756 1760 fl.sort()
1757 1761 for f in fl:
1758 1762 cf = ""
1759 1763 if f in merge:
1760 1764 cf = _(" (resolve)")
1761 1765 self.ui.status(" %s%s\n" % (f, cf))
1762 1766 self.ui.warn(_("aborting update spanning branches!\n"))
1763 1767 self.ui.status(_("(use 'hg merge' to merge across branches"
1764 1768 " or 'hg update -C' to lose changes)\n"))
1765 1769 return 1
1766 1770 branch_merge = True
1767 1771
1772 xp1 = hex(p1)
1773 xp2 = hex(p2)
1774 if p2 == nullid: xxp2 = ''
1775 else: xxp2 = xp2
1776
1777 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1778
1768 1779 # get the files we don't need to change
1769 1780 files = get.keys()
1770 1781 files.sort()
1771 1782 for f in files:
1772 1783 if f[0] == "/":
1773 1784 continue
1774 1785 self.ui.note(_("getting %s\n") % f)
1775 1786 t = self.file(f).read(get[f])
1776 1787 self.wwrite(f, t)
1777 1788 util.set_exec(self.wjoin(f), mf2[f])
1778 1789 if moddirstate:
1779 1790 if branch_merge:
1780 1791 self.dirstate.update([f], 'n', st_mtime=-1)
1781 1792 else:
1782 1793 self.dirstate.update([f], 'n')
1783 1794
1784 1795 # merge the tricky bits
1785 1796 failedmerge = []
1786 1797 files = merge.keys()
1787 1798 files.sort()
1788 xp1 = hex(p1)
1789 xp2 = hex(p2)
1790 1799 for f in files:
1791 1800 self.ui.status(_("merging %s\n") % f)
1792 1801 my, other, flag = merge[f]
1793 1802 ret = self.merge3(f, my, other, xp1, xp2)
1794 1803 if ret:
1795 1804 err = True
1796 1805 failedmerge.append(f)
1797 1806 util.set_exec(self.wjoin(f), flag)
1798 1807 if moddirstate:
1799 1808 if branch_merge:
1800 1809 # We've done a branch merge, mark this file as merged
1801 1810 # so that we properly record the merger later
1802 1811 self.dirstate.update([f], 'm')
1803 1812 else:
1804 1813 # We've update-merged a locally modified file, so
1805 1814 # we set the dirstate to emulate a normal checkout
1806 1815 # of that file some time in the past. Thus our
1807 1816 # merge will appear as a normal local file
1808 1817 # modification.
1809 1818 f_len = len(self.file(f).read(other))
1810 1819 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1811 1820
1812 1821 remove.sort()
1813 1822 for f in remove:
1814 1823 self.ui.note(_("removing %s\n") % f)
1815 1824 util.audit_path(f)
1816 1825 try:
1817 1826 util.unlink(self.wjoin(f))
1818 1827 except OSError, inst:
1819 1828 if inst.errno != errno.ENOENT:
1820 1829 self.ui.warn(_("update failed to remove %s: %s!\n") %
1821 1830 (f, inst.strerror))
1822 1831 if moddirstate:
1823 1832 if branch_merge:
1824 1833 self.dirstate.update(remove, 'r')
1825 1834 else:
1826 1835 self.dirstate.forget(remove)
1827 1836
1828 1837 if moddirstate:
1829 1838 self.dirstate.setparents(p1, p2)
1830 1839
1831 1840 if show_stats:
1832 1841 stats = ((len(get), _("updated")),
1833 1842 (len(merge) - len(failedmerge), _("merged")),
1834 1843 (len(remove), _("removed")),
1835 1844 (len(failedmerge), _("unresolved")))
1836 1845 note = ", ".join([_("%d files %s") % s for s in stats])
1837 1846 self.ui.status("%s\n" % note)
1838 1847 if moddirstate:
1839 1848 if branch_merge:
1840 1849 if failedmerge:
1841 1850 self.ui.status(_("There are unresolved merges,"
1842 1851 " you can redo the full merge using:\n"
1843 1852 " hg update -C %s\n"
1844 1853 " hg merge %s\n"
1845 1854 % (self.changelog.rev(p1),
1846 1855 self.changelog.rev(p2))))
1847 1856 else:
1848 1857 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1849 1858 elif failedmerge:
1850 1859 self.ui.status(_("There are unresolved merges with"
1851 1860 " locally modified files.\n"))
1852 1861
1862 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1853 1863 return err
1854 1864
1855 1865 def merge3(self, fn, my, other, p1, p2):
1856 1866 """perform a 3-way merge in the working directory"""
1857 1867
1858 1868 def temp(prefix, node):
1859 1869 pre = "%s~%s." % (os.path.basename(fn), prefix)
1860 1870 (fd, name) = tempfile.mkstemp(prefix=pre)
1861 1871 f = os.fdopen(fd, "wb")
1862 1872 self.wwrite(fn, fl.read(node), f)
1863 1873 f.close()
1864 1874 return name
1865 1875
1866 1876 fl = self.file(fn)
1867 1877 base = fl.ancestor(my, other)
1868 1878 a = self.wjoin(fn)
1869 1879 b = temp("base", base)
1870 1880 c = temp("other", other)
1871 1881
1872 1882 self.ui.note(_("resolving %s\n") % fn)
1873 1883 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1874 1884 (fn, short(my), short(other), short(base)))
1875 1885
1876 1886 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1877 1887 or "hgmerge")
1878 1888 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1879 1889 environ={'HG_FILE': fn,
1880 1890 'HG_MY_NODE': p1,
1881 1891 'HG_OTHER_NODE': p2,
1882 1892 'HG_FILE_MY_NODE': hex(my),
1883 1893 'HG_FILE_OTHER_NODE': hex(other),
1884 1894 'HG_FILE_BASE_NODE': hex(base)})
1885 1895 if r:
1886 1896 self.ui.warn(_("merging %s failed!\n") % fn)
1887 1897
1888 1898 os.unlink(b)
1889 1899 os.unlink(c)
1890 1900 return r
1891 1901
1892 1902 def verify(self):
1893 1903 filelinkrevs = {}
1894 1904 filenodes = {}
1895 1905 changesets = revisions = files = 0
1896 1906 errors = [0]
1897 1907 warnings = [0]
1898 1908 neededmanifests = {}
1899 1909
1900 1910 def err(msg):
1901 1911 self.ui.warn(msg + "\n")
1902 1912 errors[0] += 1
1903 1913
1904 1914 def warn(msg):
1905 1915 self.ui.warn(msg + "\n")
1906 1916 warnings[0] += 1
1907 1917
1908 1918 def checksize(obj, name):
1909 1919 d = obj.checksize()
1910 1920 if d[0]:
1911 1921 err(_("%s data length off by %d bytes") % (name, d[0]))
1912 1922 if d[1]:
1913 1923 err(_("%s index contains %d extra bytes") % (name, d[1]))
1914 1924
1915 1925 def checkversion(obj, name):
1916 1926 if obj.version != revlog.REVLOGV0:
1917 1927 if not revlogv1:
1918 1928 warn(_("warning: `%s' uses revlog format 1") % name)
1919 1929 elif revlogv1:
1920 1930 warn(_("warning: `%s' uses revlog format 0") % name)
1921 1931
1922 1932 revlogv1 = self.revlogversion != revlog.REVLOGV0
1923 1933 if self.ui.verbose or revlogv1 != self.revlogv1:
1924 1934 self.ui.status(_("repository uses revlog format %d\n") %
1925 1935 (revlogv1 and 1 or 0))
1926 1936
1927 1937 seen = {}
1928 1938 self.ui.status(_("checking changesets\n"))
1929 1939 checksize(self.changelog, "changelog")
1930 1940
1931 1941 for i in range(self.changelog.count()):
1932 1942 changesets += 1
1933 1943 n = self.changelog.node(i)
1934 1944 l = self.changelog.linkrev(n)
1935 1945 if l != i:
1936 1946 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1937 1947 if n in seen:
1938 1948 err(_("duplicate changeset at revision %d") % i)
1939 1949 seen[n] = 1
1940 1950
1941 1951 for p in self.changelog.parents(n):
1942 1952 if p not in self.changelog.nodemap:
1943 1953 err(_("changeset %s has unknown parent %s") %
1944 1954 (short(n), short(p)))
1945 1955 try:
1946 1956 changes = self.changelog.read(n)
1947 1957 except KeyboardInterrupt:
1948 1958 self.ui.warn(_("interrupted"))
1949 1959 raise
1950 1960 except Exception, inst:
1951 1961 err(_("unpacking changeset %s: %s") % (short(n), inst))
1952 1962 continue
1953 1963
1954 1964 neededmanifests[changes[0]] = n
1955 1965
1956 1966 for f in changes[3]:
1957 1967 filelinkrevs.setdefault(f, []).append(i)
1958 1968
1959 1969 seen = {}
1960 1970 self.ui.status(_("checking manifests\n"))
1961 1971 checkversion(self.manifest, "manifest")
1962 1972 checksize(self.manifest, "manifest")
1963 1973
1964 1974 for i in range(self.manifest.count()):
1965 1975 n = self.manifest.node(i)
1966 1976 l = self.manifest.linkrev(n)
1967 1977
1968 1978 if l < 0 or l >= self.changelog.count():
1969 1979 err(_("bad manifest link (%d) at revision %d") % (l, i))
1970 1980
1971 1981 if n in neededmanifests:
1972 1982 del neededmanifests[n]
1973 1983
1974 1984 if n in seen:
1975 1985 err(_("duplicate manifest at revision %d") % i)
1976 1986
1977 1987 seen[n] = 1
1978 1988
1979 1989 for p in self.manifest.parents(n):
1980 1990 if p not in self.manifest.nodemap:
1981 1991 err(_("manifest %s has unknown parent %s") %
1982 1992 (short(n), short(p)))
1983 1993
1984 1994 try:
1985 1995 delta = mdiff.patchtext(self.manifest.delta(n))
1986 1996 except KeyboardInterrupt:
1987 1997 self.ui.warn(_("interrupted"))
1988 1998 raise
1989 1999 except Exception, inst:
1990 2000 err(_("unpacking manifest %s: %s") % (short(n), inst))
1991 2001 continue
1992 2002
1993 2003 try:
1994 2004 ff = [ l.split('\0') for l in delta.splitlines() ]
1995 2005 for f, fn in ff:
1996 2006 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1997 2007 except (ValueError, TypeError), inst:
1998 2008 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1999 2009
2000 2010 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2001 2011
2002 2012 for m, c in neededmanifests.items():
2003 2013 err(_("Changeset %s refers to unknown manifest %s") %
2004 2014 (short(m), short(c)))
2005 2015 del neededmanifests
2006 2016
2007 2017 for f in filenodes:
2008 2018 if f not in filelinkrevs:
2009 2019 err(_("file %s in manifest but not in changesets") % f)
2010 2020
2011 2021 for f in filelinkrevs:
2012 2022 if f not in filenodes:
2013 2023 err(_("file %s in changeset but not in manifest") % f)
2014 2024
2015 2025 self.ui.status(_("checking files\n"))
2016 2026 ff = filenodes.keys()
2017 2027 ff.sort()
2018 2028 for f in ff:
2019 2029 if f == "/dev/null":
2020 2030 continue
2021 2031 files += 1
2022 2032 if not f:
2023 2033 err(_("file without name in manifest %s") % short(n))
2024 2034 continue
2025 2035 fl = self.file(f)
2026 2036 checkversion(fl, f)
2027 2037 checksize(fl, f)
2028 2038
2029 2039 nodes = {nullid: 1}
2030 2040 seen = {}
2031 2041 for i in range(fl.count()):
2032 2042 revisions += 1
2033 2043 n = fl.node(i)
2034 2044
2035 2045 if n in seen:
2036 2046 err(_("%s: duplicate revision %d") % (f, i))
2037 2047 if n not in filenodes[f]:
2038 2048 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2039 2049 else:
2040 2050 del filenodes[f][n]
2041 2051
2042 2052 flr = fl.linkrev(n)
2043 2053 if flr not in filelinkrevs.get(f, []):
2044 2054 err(_("%s:%s points to unexpected changeset %d")
2045 2055 % (f, short(n), flr))
2046 2056 else:
2047 2057 filelinkrevs[f].remove(flr)
2048 2058
2049 2059 # verify contents
2050 2060 try:
2051 2061 t = fl.read(n)
2052 2062 except KeyboardInterrupt:
2053 2063 self.ui.warn(_("interrupted"))
2054 2064 raise
2055 2065 except Exception, inst:
2056 2066 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2057 2067
2058 2068 # verify parents
2059 2069 (p1, p2) = fl.parents(n)
2060 2070 if p1 not in nodes:
2061 2071 err(_("file %s:%s unknown parent 1 %s") %
2062 2072 (f, short(n), short(p1)))
2063 2073 if p2 not in nodes:
2064 2074 err(_("file %s:%s unknown parent 2 %s") %
2065 2075 (f, short(n), short(p1)))
2066 2076 nodes[n] = 1
2067 2077
2068 2078 # cross-check
2069 2079 for node in filenodes[f]:
2070 2080 err(_("node %s in manifests not in %s") % (hex(node), f))
2071 2081
2072 2082 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2073 2083 (files, changesets, revisions))
2074 2084
2075 2085 if warnings[0]:
2076 2086 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2077 2087 if errors[0]:
2078 2088 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2079 2089 return 1
2080 2090
2081 2091 # used to avoid circular references so destructors work
2082 2092 def aftertrans(base):
2083 2093 p = base
2084 2094 def a():
2085 2095 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2086 2096 util.rename(os.path.join(p, "journal.dirstate"),
2087 2097 os.path.join(p, "undo.dirstate"))
2088 2098 return a
2089 2099
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now