Show More
The requested changes are too big and content was truncated. Show full diff
@@ -0,0 +1,9 b'' | |||
|
1 | @echo off | |
|
2 | rem Double-click this file to (re)build Mercurial for Windows in place. | |
|
3 | rem Useful for testing and development. | |
|
4 | cd ..\.. | |
|
5 | del /Q mercurial\*.pyd | |
|
6 | del /Q mercurial\*.pyc | |
|
7 | rmdir /Q /S mercurial\locale | |
|
8 | python setup.py build_py -c -d . build_ext -i build_mo | |
|
9 | pause |
@@ -0,0 +1,176 b'' | |||
|
1 | # Mercurial bookmark support code | |
|
2 | # | |
|
3 | # Copyright 2008 David Soria Parra <dsp@php.net> | |
|
4 | # | |
|
5 | # This software may be used and distributed according to the terms of the | |
|
6 | # GNU General Public License version 2 or any later version. | |
|
7 | ||
|
8 | from mercurial.i18n import _ | |
|
9 | from mercurial.node import nullid, nullrev, bin, hex, short | |
|
10 | from mercurial import encoding, util | |
|
11 | import os | |
|
12 | ||
|
13 | def valid(mark): | |
|
14 | for c in (':', '\0', '\n', '\r'): | |
|
15 | if c in mark: | |
|
16 | return False | |
|
17 | return True | |
|
18 | ||
|
19 | def read(repo): | |
|
20 | '''Parse .hg/bookmarks file and return a dictionary | |
|
21 | ||
|
22 | Bookmarks are stored as {HASH}\\s{NAME}\\n (localtags format) values | |
|
23 | in the .hg/bookmarks file. | |
|
24 | Read the file and return a (name=>nodeid) dictionary | |
|
25 | ''' | |
|
26 | try: | |
|
27 | bookmarks = {} | |
|
28 | for line in repo.opener('bookmarks'): | |
|
29 | sha, refspec = line.strip().split(' ', 1) | |
|
30 | refspec = encoding.tolocal(refspec) | |
|
31 | bookmarks[refspec] = repo.changelog.lookup(sha) | |
|
32 | except: | |
|
33 | pass | |
|
34 | return bookmarks | |
|
35 | ||
|
36 | def readcurrent(repo): | |
|
37 | '''Get the current bookmark | |
|
38 | ||
|
39 | If we use gittishsh branches we have a current bookmark that | |
|
40 | we are on. This function returns the name of the bookmark. It | |
|
41 | is stored in .hg/bookmarks.current | |
|
42 | ''' | |
|
43 | mark = None | |
|
44 | if os.path.exists(repo.join('bookmarks.current')): | |
|
45 | file = repo.opener('bookmarks.current') | |
|
46 | # No readline() in posixfile_nt, reading everything is cheap | |
|
47 | mark = encoding.tolocal((file.readlines() or [''])[0]) | |
|
48 | if mark == '': | |
|
49 | mark = None | |
|
50 | file.close() | |
|
51 | return mark | |
|
52 | ||
|
53 | def write(repo): | |
|
54 | '''Write bookmarks | |
|
55 | ||
|
56 | Write the given bookmark => hash dictionary to the .hg/bookmarks file | |
|
57 | in a format equal to those of localtags. | |
|
58 | ||
|
59 | We also store a backup of the previous state in undo.bookmarks that | |
|
60 | can be copied back on rollback. | |
|
61 | ''' | |
|
62 | refs = repo._bookmarks | |
|
63 | ||
|
64 | try: | |
|
65 | bms = repo.opener('bookmarks').read() | |
|
66 | except IOError: | |
|
67 | bms = '' | |
|
68 | repo.opener('undo.bookmarks', 'w').write(bms) | |
|
69 | ||
|
70 | if repo._bookmarkcurrent not in refs: | |
|
71 | setcurrent(repo, None) | |
|
72 | for mark in refs.keys(): | |
|
73 | if not valid(mark): | |
|
74 | raise util.Abort(_("bookmark '%s' contains illegal " | |
|
75 | "character" % mark)) | |
|
76 | ||
|
77 | wlock = repo.wlock() | |
|
78 | try: | |
|
79 | ||
|
80 | file = repo.opener('bookmarks', 'w', atomictemp=True) | |
|
81 | for refspec, node in refs.iteritems(): | |
|
82 | file.write("%s %s\n" % (hex(node), encoding.fromlocal(refspec))) | |
|
83 | file.rename() | |
|
84 | ||
|
85 | # touch 00changelog.i so hgweb reloads bookmarks (no lock needed) | |
|
86 | try: | |
|
87 | os.utime(repo.sjoin('00changelog.i'), None) | |
|
88 | except OSError: | |
|
89 | pass | |
|
90 | ||
|
91 | finally: | |
|
92 | wlock.release() | |
|
93 | ||
|
94 | def setcurrent(repo, mark): | |
|
95 | '''Set the name of the bookmark that we are currently on | |
|
96 | ||
|
97 | Set the name of the bookmark that we are on (hg update <bookmark>). | |
|
98 | The name is recorded in .hg/bookmarks.current | |
|
99 | ''' | |
|
100 | current = repo._bookmarkcurrent | |
|
101 | if current == mark: | |
|
102 | return | |
|
103 | ||
|
104 | refs = repo._bookmarks | |
|
105 | ||
|
106 | # do not update if we do update to a rev equal to the current bookmark | |
|
107 | if (mark and mark not in refs and | |
|
108 | current and refs[current] == repo.changectx('.').node()): | |
|
109 | return | |
|
110 | if mark not in refs: | |
|
111 | mark = '' | |
|
112 | if not valid(mark): | |
|
113 | raise util.Abort(_("bookmark '%s' contains illegal " | |
|
114 | "character" % mark)) | |
|
115 | ||
|
116 | wlock = repo.wlock() | |
|
117 | try: | |
|
118 | file = repo.opener('bookmarks.current', 'w', atomictemp=True) | |
|
119 | file.write(mark) | |
|
120 | file.rename() | |
|
121 | finally: | |
|
122 | wlock.release() | |
|
123 | repo._bookmarkcurrent = mark | |
|
124 | ||
|
125 | def update(repo, parents, node): | |
|
126 | marks = repo._bookmarks | |
|
127 | update = False | |
|
128 | mark = repo._bookmarkcurrent | |
|
129 | if mark and marks[mark] in parents: | |
|
130 | marks[mark] = node | |
|
131 | update = True | |
|
132 | if update: | |
|
133 | write(repo) | |
|
134 | ||
|
135 | def listbookmarks(repo): | |
|
136 | # We may try to list bookmarks on a repo type that does not | |
|
137 | # support it (e.g., statichttprepository). | |
|
138 | if not hasattr(repo, '_bookmarks'): | |
|
139 | return {} | |
|
140 | ||
|
141 | d = {} | |
|
142 | for k, v in repo._bookmarks.iteritems(): | |
|
143 | d[k] = hex(v) | |
|
144 | return d | |
|
145 | ||
|
146 | def pushbookmark(repo, key, old, new): | |
|
147 | w = repo.wlock() | |
|
148 | try: | |
|
149 | marks = repo._bookmarks | |
|
150 | if hex(marks.get(key, '')) != old: | |
|
151 | return False | |
|
152 | if new == '': | |
|
153 | del marks[key] | |
|
154 | else: | |
|
155 | if new not in repo: | |
|
156 | return False | |
|
157 | marks[key] = repo[new].node() | |
|
158 | write(repo) | |
|
159 | return True | |
|
160 | finally: | |
|
161 | w.release() | |
|
162 | ||
|
163 | def diff(ui, repo, remote): | |
|
164 | ui.status(_("searching for changed bookmarks\n")) | |
|
165 | ||
|
166 | lmarks = repo.listkeys('bookmarks') | |
|
167 | rmarks = remote.listkeys('bookmarks') | |
|
168 | ||
|
169 | diff = sorted(set(rmarks) - set(lmarks)) | |
|
170 | for k in diff: | |
|
171 | ui.write(" %-25s %s\n" % (k, rmarks[k][:12])) | |
|
172 | ||
|
173 | if len(diff) <= 0: | |
|
174 | ui.status(_("no changed bookmarks found\n")) | |
|
175 | return 1 | |
|
176 | return 0 |
@@ -0,0 +1,29 b'' | |||
|
1 | DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT | |
|
2 | GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE | |
|
3 | HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT | |
|
4 | HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET | |
|
5 | HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING | |
|
6 | HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE | |
|
7 | HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL | |
|
8 | HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION | |
|
9 | HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST | |
|
10 | HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE | |
|
11 | HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT | |
|
12 | PATH_INFO="/"; export PATH_INFO | |
|
13 | PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED | |
|
14 | QUERY_STRING=""; export QUERY_STRING | |
|
15 | REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR | |
|
16 | REMOTE_PORT="44703"; export REMOTE_PORT | |
|
17 | REQUEST_METHOD="GET"; export REQUEST_METHOD | |
|
18 | REQUEST_URI="/test/"; export REQUEST_URI | |
|
19 | SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME | |
|
20 | SCRIPT_NAME="/test"; export SCRIPT_NAME | |
|
21 | SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI | |
|
22 | SCRIPT_URL="/test/"; export SCRIPT_URL | |
|
23 | SERVER_ADDR="127.0.0.1"; export SERVER_ADDR | |
|
24 | SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN | |
|
25 | SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME | |
|
26 | SERVER_PORT="80"; export SERVER_PORT | |
|
27 | SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL | |
|
28 | SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>"; export SERVER_SIGNATURE | |
|
29 | SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE |
@@ -0,0 +1,10 b'' | |||
|
1 | #!/usr/bin/env python | |
|
2 | ||
|
3 | # Filter output by the progress extension to make it readable in tests | |
|
4 | ||
|
5 | import sys, re | |
|
6 | ||
|
7 | for line in sys.stdin: | |
|
8 | line = re.sub(r'\r+[^\n]', lambda m: '\n' + m.group()[-1:], line) | |
|
9 | sys.stdout.write(line) | |
|
10 |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100755 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
@@ -9,6 +9,8 b' syntax: glob' | |||
|
9 | 9 | *.so |
|
10 | 10 | *.pyd |
|
11 | 11 | *.pyc |
|
12 | *.pyo | |
|
13 | *$py.class | |
|
12 | 14 | *.swp |
|
13 | 15 | *.prof |
|
14 | 16 | \#*\# |
@@ -45,7 +45,7 b' doc:' | |||
|
45 | 45 | clean: |
|
46 | 46 | -$(PYTHON) setup.py clean --all # ignore errors from this command |
|
47 | 47 | find . \( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';' |
|
48 |
rm -f MANIFEST |
|
|
48 | rm -f MANIFEST tests/*.err | |
|
49 | 49 | rm -rf build mercurial/locale |
|
50 | 50 | $(MAKE) -C doc clean |
|
51 | 51 |
@@ -241,7 +241,7 b' shopt -s extglob' | |||
|
241 | 241 | _hg_tags |
|
242 | 242 | _hg_branches |
|
243 | 243 | ;; |
|
244 | commit) | |
|
244 | commit|record) | |
|
245 | 245 | _hg_status "mar" |
|
246 | 246 | ;; |
|
247 | 247 | remove) |
@@ -8,6 +8,7 b'' | |||
|
8 | 8 | # GNU General Public License version 2 or any later version. |
|
9 | 9 | |
|
10 | 10 | import re, glob, os, sys |
|
11 | import keyword | |
|
11 | 12 | import optparse |
|
12 | 13 | |
|
13 | 14 | def repquote(m): |
@@ -64,6 +65,7 b' testpats = [' | |||
|
64 | 65 | ('^([^"\']|("[^"]*")|(\'[^\']*\'))*\\^', "^ must be quoted"), |
|
65 | 66 | (r'^source\b', "don't use 'source', use '.'"), |
|
66 | 67 | (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"), |
|
68 | (r'ls\s+[^|-]+\s+-', "options to 'ls' must come before filenames"), | |
|
67 | 69 | ] |
|
68 | 70 | |
|
69 | 71 | testfilters = [ |
@@ -117,8 +119,8 b' pypats = [' | |||
|
117 | 119 | (r'^\s*(if|while|def|class|except|try)\s[^[]*:\s*[^\]#\s]+', |
|
118 | 120 | "linebreak after :"), |
|
119 | 121 | (r'class\s[^(]:', "old-style class, use class foo(object)"), |
|
120 | (r'^\s+del\(', "del isn't a function"), | |
|
121 | (r'^\s+except\(', "except isn't a function"), | |
|
122 | (r'\b(%s)\(' % '|'.join(keyword.kwlist), | |
|
123 | "Python keyword is not a function"), | |
|
122 | 124 | (r',]', "unneeded trailing ',' in list"), |
|
123 | 125 | # (r'class\s[A-Z][^\(]*\((?!Exception)', |
|
124 | 126 | # "don't capitalize non-exception classes"), |
@@ -127,11 +129,15 b' pypats = [' | |||
|
127 | 129 | (r'[\x80-\xff]', "non-ASCII character literal"), |
|
128 | 130 | (r'("\')\.format\(', "str.format() not available in Python 2.4"), |
|
129 | 131 | (r'^\s*with\s+', "with not available in Python 2.4"), |
|
132 | (r'^\s*except.* as .*:', "except as not available in Python 2.4"), | |
|
133 | (r'^\s*os\.path\.relpath', "relpath not available in Python 2.4"), | |
|
130 | 134 | (r'(?<!def)\s+(any|all|format)\(', |
|
131 | 135 | "any/all/format not available in Python 2.4"), |
|
132 | 136 | (r'(?<!def)\s+(callable)\(', |
|
133 | 137 | "callable not available in Python 3, use hasattr(f, '__call__')"), |
|
134 | 138 | (r'if\s.*\selse', "if ... else form not available in Python 2.4"), |
|
139 | (r'^\s*(%s)\s\s' % '|'.join(keyword.kwlist), | |
|
140 | "gratuitous whitespace after Python keyword"), | |
|
135 | 141 | (r'([\(\[]\s\S)|(\S\s[\)\]])', "gratuitous whitespace in () or []"), |
|
136 | 142 | # (r'\s\s=', "gratuitous whitespace before ="), |
|
137 | 143 | (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\S', |
@@ -145,6 +151,9 b' pypats = [' | |||
|
145 | 151 | (r'raise Exception', "don't raise generic exceptions"), |
|
146 | 152 | (r'ui\.(status|progress|write|note|warn)\([\'\"]x', |
|
147 | 153 | "warning: unwrapped ui message"), |
|
154 | (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"), | |
|
155 | (r' [=!]=\s+(True|False|None)', | |
|
156 | "comparison with singleton, use 'is' or 'is not' instead"), | |
|
148 | 157 | ] |
|
149 | 158 | |
|
150 | 159 | pyfilters = [ |
@@ -239,7 +248,9 b' def checkfile(f, logfunc=_defaultlogger.' | |||
|
239 | 248 | fc = 0 |
|
240 | 249 | if not re.match(match, f): |
|
241 | 250 | continue |
|
242 |
|
|
|
251 | fp = open(f) | |
|
252 | pre = post = fp.read() | |
|
253 | fp.close() | |
|
243 | 254 | if "no-" + "check-code" in pre: |
|
244 | 255 | break |
|
245 | 256 | for p, r in filters: |
@@ -482,7 +482,7 b' proc makewindow {} {' | |||
|
482 | 482 | .bar.file add command -label "Quit" -command doquit |
|
483 | 483 | menu .bar.help |
|
484 | 484 | .bar add cascade -label "Help" -menu .bar.help |
|
485 |
.bar.help add command -label "About g |
|
|
485 | .bar.help add command -label "About hgk" -command about | |
|
486 | 486 | . configure -menu .bar |
|
487 | 487 | |
|
488 | 488 | if {![info exists geometry(canv1)]} { |
@@ -867,9 +867,9 b' proc about {} {' | |||
|
867 | 867 | return |
|
868 | 868 | } |
|
869 | 869 | toplevel $w |
|
870 |
wm title $w "About g |
|
|
870 | wm title $w "About hgk" | |
|
871 | 871 | message $w.m -text { |
|
872 |
|
|
|
872 | Hgk version 1.2 | |
|
873 | 873 | |
|
874 | 874 | Copyright � 2005 Paul Mackerras |
|
875 | 875 |
@@ -80,11 +80,12 b' def perfmanifest(ui, repo):' | |||
|
80 | 80 | timer(d) |
|
81 | 81 | |
|
82 | 82 | def perfindex(ui, repo): |
|
83 |
import mercurial. |
|
|
83 | import mercurial.revlog | |
|
84 | mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg | |
|
85 | n = repo["tip"].node() | |
|
84 | 86 | def d(): |
|
85 | t = repo.changelog.tip() | |
|
86 | repo.changelog = mercurial.changelog.changelog(repo.sopener) | |
|
87 | repo.changelog._loadindexmap() | |
|
87 | repo.invalidate() | |
|
88 | repo[n] | |
|
88 | 89 | timer(d) |
|
89 | 90 | |
|
90 | 91 | def perfstartup(ui, repo): |
@@ -16,23 +16,14 b'' | |||
|
16 | 16 | <File Name="mercurial.parsers.pyd" /> |
|
17 | 17 | <File Name="pyexpat.pyd" /> |
|
18 | 18 | <File Name="python26.dll" /> |
|
19 | <File Name="pythoncom26.dll" /> | |
|
20 | <File Name="pywintypes26.dll" /> | |
|
21 | 19 | <File Name="bz2.pyd" /> |
|
22 | 20 | <File Name="select.pyd" /> |
|
23 | 21 | <File Name="unicodedata.pyd" /> |
|
24 |
<File Name=" |
|
|
25 | <File Name="win32com.shell.shell.pyd" /> | |
|
26 | <File Name="win32console.pyd" /> | |
|
27 | <File Name="win32file.pyd" /> | |
|
28 | <File Name="win32gui.pyd" /> | |
|
29 | <File Name="win32pipe.pyd" /> | |
|
30 | <File Name="win32process.pyd" /> | |
|
22 | <File Name="_ctypes.pyd" /> | |
|
31 | 23 | <File Name="_elementtree.pyd" /> |
|
32 | 24 | <File Name="_hashlib.pyd" /> |
|
33 | 25 | <File Name="_socket.pyd" /> |
|
34 | 26 | <File Name="_ssl.pyd" /> |
|
35 | <File Name="_win32sysloader.pyd" /> | |
|
36 | 27 | </Component> |
|
37 | 28 | </DirectoryRef> |
|
38 | 29 | </Fragment> |
@@ -9,7 +9,7 b'' | |||
|
9 | 9 | <?define contrib.vim.guid = {BB04903A-652D-4C4F-9590-2BD07A2304F2} ?> |
|
10 | 10 | |
|
11 | 11 | <!-- dist.wxs --> |
|
12 |
<?define dist.guid = { |
|
|
12 | <?define dist.guid = {C3B634A4-1B05-4A40-94A9-38EE853CF693} ?> | |
|
13 | 13 | |
|
14 | 14 | <!-- doc.wxs --> |
|
15 | 15 | <?define doc.hg.1.html.guid = {AAAA3FDA-EDC5-4220-B59D-D342722358A2} ?> |
@@ -1,11 +1,13 b'' | |||
|
1 | 1 | SOURCES=$(wildcard *.[0-9].txt) |
|
2 | 2 | MAN=$(SOURCES:%.txt=%) |
|
3 | 3 | HTML=$(SOURCES:%.txt=%.html) |
|
4 |
GENDOC=gendoc.py ../mercurial/commands.py ../mercurial/help.py |
|
|
4 | GENDOC=gendoc.py ../mercurial/commands.py ../mercurial/help.py \ | |
|
5 | ../mercurial/help/*.txt ../hgext/*.py ../hgext/*/__init__.py | |
|
5 | 6 | PREFIX=/usr/local |
|
6 | 7 | MANDIR=$(PREFIX)/share/man |
|
7 | 8 | INSTALL=install -c -m 644 |
|
8 | 9 | PYTHON=python |
|
10 | RSTARGS= | |
|
9 | 11 | |
|
10 | 12 | export LANGUAGE=C |
|
11 | 13 | export LC_ALL=C |
@@ -24,11 +26,11 b' hg.1.gendoc.txt: $(GENDOC)' | |||
|
24 | 26 | mv $@.tmp $@ |
|
25 | 27 | |
|
26 | 28 | %: %.txt common.txt |
|
27 | $(PYTHON) runrst hgmanpage --halt warning \ | |
|
29 | $(PYTHON) runrst hgmanpage $(RSTARGS) --halt warning \ | |
|
28 | 30 | --strip-elements-with-class htmlonly $*.txt $* |
|
29 | 31 | |
|
30 | 32 | %.html: %.txt common.txt |
|
31 | $(PYTHON) runrst html --halt warning \ | |
|
33 | $(PYTHON) runrst html $(RSTARGS) --halt warning \ | |
|
32 | 34 | --link-stylesheet --stylesheet-path style.css $*.txt $*.html |
|
33 | 35 | |
|
34 | 36 | MANIFEST: man html |
@@ -40,7 +40,7 b' def get_opts(opts):' | |||
|
40 | 40 | if longopt: |
|
41 | 41 | allopts.append("--%s" % longopt) |
|
42 | 42 | desc += default and _(" (default: %s)") % default or "" |
|
43 | yield(", ".join(allopts), desc) | |
|
43 | yield (", ".join(allopts), desc) | |
|
44 | 44 | |
|
45 | 45 | def get_cmd(cmd, cmdtable): |
|
46 | 46 | d = {} |
@@ -143,7 +143,7 b' def commandprinter(ui, cmdtable, section' | |||
|
143 | 143 | opt_output = list(d['opts']) |
|
144 | 144 | if opt_output: |
|
145 | 145 | opts_len = max([len(line[0]) for line in opt_output]) |
|
146 |
ui.write(_(" |
|
|
146 | ui.write(_("Options:\n\n")) | |
|
147 | 147 | for optstr, desc in opt_output: |
|
148 | 148 | if desc: |
|
149 | 149 | s = "%-*s %s" % (opts_len, optstr, desc) |
@@ -330,8 +330,8 b' to the aliases of the commands defined.' | |||
|
330 | 330 | ``diff`` |
|
331 | 331 | """""""" |
|
332 | 332 | |
|
333 |
Settings used when displaying diffs. |
|
|
334 | defaults to False. | |
|
333 | Settings used when displaying diffs. Everything except for ``unified`` is a | |
|
334 | Boolean and defaults to False. | |
|
335 | 335 | |
|
336 | 336 | ``git`` |
|
337 | 337 | Use git extended diff format. |
@@ -345,6 +345,8 b' defaults to False.' | |||
|
345 | 345 | Ignore changes in the amount of white space. |
|
346 | 346 | ``ignoreblanklines`` |
|
347 | 347 | Ignore changes whose lines are all blank. |
|
348 | ``unified`` | |
|
349 | Number of lines of context to show. | |
|
348 | 350 | |
|
349 | 351 | ``email`` |
|
350 | 352 | """"""""" |
@@ -727,8 +729,8 b' Configuration for extensions that need t' | |||
|
727 | 729 | ``port`` |
|
728 | 730 | Optional. Port to connect to on mail server. Default: 25. |
|
729 | 731 | ``tls`` |
|
730 |
Optional. |
|
|
731 |
|
|
|
732 | Optional. Method to enable TLS when connecting to mail server: starttls, | |
|
733 | smtps or none. Default: none. | |
|
732 | 734 | ``username`` |
|
733 | 735 | Optional. User name for authenticating with the SMTP server. |
|
734 | 736 | Default: none. |
@@ -876,6 +878,11 b' User interface controls.' | |||
|
876 | 878 | be prompted to enter a username. If no username is entered, the |
|
877 | 879 | default ``USER@HOST`` is used instead. |
|
878 | 880 | Default is False. |
|
881 | ``commitsubrepos`` | |
|
882 | Whether to commit modified subrepositories when committing the | |
|
883 | parent repository. If False and one subrepository has uncommitted | |
|
884 | changes, abort the commit. | |
|
885 | Default is True. | |
|
879 | 886 | ``debug`` |
|
880 | 887 | Print debugging information. True or False. Default is False. |
|
881 | 888 | ``editor`` |
@@ -92,6 +92,7 b' from mercurial.i18n import _' | |||
|
92 | 92 | 'cyan_background': 46, 'white_background': 47} |
|
93 | 93 | |
|
94 | 94 | _styles = {'grep.match': 'red bold', |
|
95 | 'bookmarks.current': 'green', | |
|
95 | 96 | 'branches.active': 'none', |
|
96 | 97 | 'branches.closed': 'black bold', |
|
97 | 98 | 'branches.current': 'green', |
@@ -59,10 +59,10 b' def convert(ui, src, dest=None, revmapfi' | |||
|
59 | 59 | --sourcesort try to preserve source revisions order, only |
|
60 | 60 | supported by Mercurial sources. |
|
61 | 61 | |
|
62 |
If |
|
|
63 |
(<dest>/.hg/shamap by default). The |
|
|
64 |
that maps each source commit ID to the destination ID |
|
|
65 | revision, like so:: | |
|
62 | If ``REVMAP`` isn't given, it will be put in a default location | |
|
63 | (``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple | |
|
64 | text file that maps each source commit ID to the destination ID | |
|
65 | for that revision, like so:: | |
|
66 | 66 | |
|
67 | 67 | <source ID> <destination ID> |
|
68 | 68 | |
@@ -138,15 +138,19 b' def convert(ui, src, dest=None, revmapfi' | |||
|
138 | 138 | Mercurial Source |
|
139 | 139 | '''''''''''''''' |
|
140 | 140 | |
|
141 | --config convert.hg.ignoreerrors=False (boolean) | |
|
142 | ignore integrity errors when reading. Use it to fix Mercurial | |
|
143 | repositories with missing revlogs, by converting from and to | |
|
144 | Mercurial. | |
|
145 | --config convert.hg.saverev=False (boolean) | |
|
146 | store original revision ID in changeset (forces target IDs to | |
|
147 | change) | |
|
148 | --config convert.hg.startrev=0 (hg revision identifier) | |
|
149 | convert start revision and its descendants | |
|
141 | The Mercurial source recognizes the following configuration | |
|
142 | options, which you can set on the command line with ``--config``: | |
|
143 | ||
|
144 | :convert.hg.ignoreerrors: ignore integrity errors when reading. | |
|
145 | Use it to fix Mercurial repositories with missing revlogs, by | |
|
146 | converting from and to Mercurial. Default is False. | |
|
147 | ||
|
148 | :convert.hg.saverev: store original. revision ID in changeset | |
|
149 | (forces target IDs to change). It takes and boolean argument | |
|
150 | and defaults to False. | |
|
151 | ||
|
152 | :convert.hg.startrev: convert start revision and its descendants. | |
|
153 | It takes a hg revision identifier and defaults to 0. | |
|
150 | 154 | |
|
151 | 155 | CVS Source |
|
152 | 156 | '''''''''' |
@@ -154,42 +158,46 b' def convert(ui, src, dest=None, revmapfi' | |||
|
154 | 158 | CVS source will use a sandbox (i.e. a checked-out copy) from CVS |
|
155 | 159 | to indicate the starting point of what will be converted. Direct |
|
156 | 160 | access to the repository files is not needed, unless of course the |
|
157 |
repository is :local:. The conversion uses the top level |
|
|
158 |
in the sandbox to find the CVS repository, and then uses |
|
|
159 |
commands to find files to convert. This means that unless |
|
|
160 | filemap is given, all files under the starting directory will be | |
|
161 | repository is ``:local:``. The conversion uses the top level | |
|
162 | directory in the sandbox to find the CVS repository, and then uses | |
|
163 | CVS rlog commands to find files to convert. This means that unless | |
|
164 | a filemap is given, all files under the starting directory will be | |
|
161 | 165 | converted, and that any directory reorganization in the CVS |
|
162 | 166 | sandbox is ignored. |
|
163 | 167 | |
|
164 | The options shown are the defaults. | |
|
168 | The following options can be used with ``--config``: | |
|
169 | ||
|
170 | :convert.cvsps.cache: Set to False to disable remote log caching, | |
|
171 | for testing and debugging purposes. Default is True. | |
|
172 | ||
|
173 | :convert.cvsps.fuzz: Specify the maximum time (in seconds) that is | |
|
174 | allowed between commits with identical user and log message in | |
|
175 | a single changeset. When very large files were checked in as | |
|
176 | part of a changeset then the default may not be long enough. | |
|
177 | The default is 60. | |
|
165 | 178 | |
|
166 | --config convert.cvsps.cache=True (boolean) | |
|
167 | Set to False to disable remote log caching, for testing and | |
|
168 | debugging purposes. | |
|
169 | --config convert.cvsps.fuzz=60 (integer) | |
|
170 | Specify the maximum time (in seconds) that is allowed between | |
|
171 | commits with identical user and log message in a single | |
|
172 | changeset. When very large files were checked in as part of a | |
|
173 | changeset then the default may not be long enough. | |
|
174 | --config convert.cvsps.mergeto='{{mergetobranch ([-\\w]+)}}' | |
|
175 | Specify a regular expression to which commit log messages are | |
|
176 | matched. If a match occurs, then the conversion process will | |
|
177 | insert a dummy revision merging the branch on which this log | |
|
178 | message occurs to the branch indicated in the regex. | |
|
179 | --config convert.cvsps.mergefrom='{{mergefrombranch ([-\\w]+)}}' | |
|
180 | Specify a regular expression to which commit log messages are | |
|
181 | matched. If a match occurs, then the conversion process will | |
|
182 | add the most recent revision on the branch indicated in the | |
|
183 | regex as the second parent of the changeset. | |
|
184 | --config hook.cvslog | |
|
185 | Specify a Python function to be called at the end of gathering | |
|
186 |
|
|
|
187 |
|
|
|
188 | --config hook.cvschangesets | |
|
189 | Specify a Python function to be called after the changesets | |
|
190 | are calculated from the the CVS log. The function is passed | |
|
191 | a list with the changeset entries, and can modify the changesets | |
|
192 | in-place, or add or delete them. | |
|
179 | :convert.cvsps.mergeto: Specify a regular expression to which | |
|
180 | commit log messages are matched. If a match occurs, then the | |
|
181 | conversion process will insert a dummy revision merging the | |
|
182 | branch on which this log message occurs to the branch | |
|
183 | indicated in the regex. Default is ``{{mergetobranch | |
|
184 | ([-\\w]+)}}`` | |
|
185 | ||
|
186 | :convert.cvsps.mergefrom: Specify a regular expression to which | |
|
187 | commit log messages are matched. If a match occurs, then the | |
|
188 | conversion process will add the most recent revision on the | |
|
189 | branch indicated in the regex as the second parent of the | |
|
190 | changeset. Default is ``{{mergefrombranch ([-\\w]+)}}`` | |
|
191 | ||
|
192 | :hook.cvslog: Specify a Python function to be called at the end of | |
|
193 | gathering the CVS log. The function is passed a list with the | |
|
194 | log entries, and can modify the entries in-place, or add or | |
|
195 | delete them. | |
|
196 | ||
|
197 | :hook.cvschangesets: Specify a Python function to be called after | |
|
198 | the changesets are calculated from the the CVS log. The | |
|
199 | function is passed a list with the changeset entries, and can | |
|
200 | modify the changesets in-place, or add or delete them. | |
|
193 | 201 | |
|
194 | 202 | An additional "debugcvsps" Mercurial command allows the builtin |
|
195 | 203 | changeset merging code to be run without doing a conversion. Its |
@@ -200,29 +208,33 b' def convert(ui, src, dest=None, revmapfi' | |||
|
200 | 208 | ''''''''''''''''' |
|
201 | 209 | |
|
202 | 210 | Subversion source detects classical trunk/branches/tags layouts. |
|
203 |
By default, the supplied |
|
|
204 |
converted as a single branch. If |
|
|
205 |
replaces the default branch. If |
|
|
206 | its subdirectories are listed as possible branches. If | |
|
207 |
|
|
|
208 |
converted branches. Default |
|
|
209 | can be overridden with following options. Set them to paths | |
|
211 | By default, the supplied ``svn://repo/path/`` source URL is | |
|
212 | converted as a single branch. If ``svn://repo/path/trunk`` exists | |
|
213 | it replaces the default branch. If ``svn://repo/path/branches`` | |
|
214 | exists, its subdirectories are listed as possible branches. If | |
|
215 | ``svn://repo/path/tags`` exists, it is looked for tags referencing | |
|
216 | converted branches. Default ``trunk``, ``branches`` and ``tags`` | |
|
217 | values can be overridden with following options. Set them to paths | |
|
210 | 218 | relative to the source URL, or leave them blank to disable auto |
|
211 | 219 | detection. |
|
212 | 220 | |
|
213 | --config convert.svn.branches=branches (directory name) | |
|
214 | specify the directory containing branches | |
|
215 | --config convert.svn.tags=tags (directory name) | |
|
216 | specify the directory containing tags | |
|
217 | --config convert.svn.trunk=trunk (directory name) | |
|
218 | specify the name of the trunk branch | |
|
221 | The following options can be set with ``--config``: | |
|
222 | ||
|
223 | :convert.svn.branches: specify the directory containing branches. | |
|
224 | The defaults is ``branches``. | |
|
225 | ||
|
226 | :convert.svn.tags: specify the directory containing tags. The | |
|
227 | default is ``tags``. | |
|
228 | ||
|
229 | :convert.svn.trunk: specify the name of the trunk branch The | |
|
230 | defauls is ``trunk``. | |
|
219 | 231 | |
|
220 | 232 | Source history can be retrieved starting at a specific revision, |
|
221 | 233 | instead of being integrally converted. Only single branch |
|
222 | 234 | conversions are supported. |
|
223 | 235 | |
|
224 |
|
|
|
225 | specify start Subversion revision. | |
|
236 | :convert.svn.startrev: specify start Subversion revision number. | |
|
237 | The default is 0. | |
|
226 | 238 | |
|
227 | 239 | Perforce Source |
|
228 | 240 | ''''''''''''''' |
@@ -232,24 +244,27 b' def convert(ui, src, dest=None, revmapfi' | |||
|
232 | 244 | source to a flat Mercurial repository, ignoring labels, branches |
|
233 | 245 | and integrations. Note that when a depot path is given you then |
|
234 | 246 | usually should specify a target directory, because otherwise the |
|
235 | target may be named ...-hg. | |
|
247 | target may be named ``...-hg``. | |
|
236 | 248 | |
|
237 | 249 | It is possible to limit the amount of source history to be |
|
238 |
converted by specifying an initial Perforce revision |
|
|
250 | converted by specifying an initial Perforce revision: | |
|
239 | 251 | |
|
240 | --config convert.p4.startrev=0 (perforce changelist number) | |
|
241 | specify initial Perforce revision. | |
|
252 | :convert.p4.startrev: specify initial Perforce revision, a | |
|
253 | Perforce changelist number). | |
|
242 | 254 | |
|
243 | 255 | Mercurial Destination |
|
244 | 256 | ''''''''''''''''''''' |
|
245 | 257 | |
|
246 | --config convert.hg.clonebranches=False (boolean) | |
|
247 | dispatch source branches in separate clones. | |
|
248 | --config convert.hg.tagsbranch=default (branch name) | |
|
249 | tag revisions branch name | |
|
250 | --config convert.hg.usebranchnames=True (boolean) | |
|
251 | preserve branch names | |
|
258 | The following options are supported: | |
|
259 | ||
|
260 | :convert.hg.clonebranches: dispatch source branches in separate | |
|
261 | clones. The default is False. | |
|
252 | 262 | |
|
263 | :convert.hg.tagsbranch: branch name for tag revisions, defaults to | |
|
264 | ``default``. | |
|
265 | ||
|
266 | :convert.hg.usebranchnames: preserve branch names. The default is | |
|
267 | True | |
|
253 | 268 | """ |
|
254 | 269 | return convcmd.convert(ui, src, dest, revmapfile, **opts) |
|
255 | 270 |
@@ -914,7 +914,7 b' class svn_source(converter_source):' | |||
|
914 | 914 | arg = encodeargs(args) |
|
915 | 915 | hgexe = util.hgexecutable() |
|
916 | 916 | cmd = '%s debugsvnlog' % util.shellquote(hgexe) |
|
917 | stdin, stdout = util.popen2(cmd) | |
|
917 | stdin, stdout = util.popen2(util.quotecommand(cmd)) | |
|
918 | 918 | stdin.write(arg) |
|
919 | 919 | try: |
|
920 | 920 | stdin.close() |
@@ -67,6 +67,11 b' behavior. There are two settings:' | |||
|
67 | 67 | Such files are normally not touched under the assumption that they |
|
68 | 68 | have mixed EOLs on purpose. |
|
69 | 69 | |
|
70 | The extension provides ``cleverencode:`` and ``cleverdecode:`` filters | |
|
71 | like the deprecated win32text extension does. This means that you can | |
|
72 | disable win32text and enable eol and your filters will still work. You | |
|
73 | only need to these filters until you have prepared a ``.hgeol`` file. | |
|
74 | ||
|
70 | 75 | The ``win32text.forbid*`` hooks provided by the win32text extension |
|
71 | 76 | have been unified into a single hook named ``eol.hook``. The hook will |
|
72 | 77 | lookup the expected line endings from the ``.hgeol`` file, which means |
@@ -115,6 +120,9 b' filters = {' | |||
|
115 | 120 | 'to-lf': tolf, |
|
116 | 121 | 'to-crlf': tocrlf, |
|
117 | 122 | 'is-binary': isbinary, |
|
123 | # The following provide backwards compatibility with win32text | |
|
124 | 'cleverencode:': tolf, | |
|
125 | 'cleverdecode:': tocrlf | |
|
118 | 126 | } |
|
119 | 127 | |
|
120 | 128 |
@@ -244,7 +244,9 b' def sign(ui, repo, *revs, **opts):' | |||
|
244 | 244 | "(please commit .hgsigs manually " |
|
245 | 245 | "or use --force)")) |
|
246 | 246 | |
|
247 |
repo.wfile(".hgsigs", "ab") |
|
|
247 | sigsfile = repo.wfile(".hgsigs", "ab") | |
|
248 | sigsfile.write(sigmessage) | |
|
249 | sigsfile.close() | |
|
248 | 250 | |
|
249 | 251 | if '.hgsigs' not in repo.dirstate: |
|
250 | 252 | repo[None].add([".hgsigs"]) |
@@ -181,14 +181,14 b' def revtree(ui, args, repo, full="tree",' | |||
|
181 | 181 | if i + x >= count: |
|
182 | 182 | l[chunk - x:] = [0] * (chunk - x) |
|
183 | 183 | break |
|
184 |
if full |
|
|
184 | if full is not None: | |
|
185 | 185 | l[x] = repo[i + x] |
|
186 | 186 | l[x].changeset() # force reading |
|
187 | 187 | else: |
|
188 | 188 | l[x] = 1 |
|
189 | 189 | for x in xrange(chunk - 1, -1, -1): |
|
190 | 190 | if l[x] != 0: |
|
191 |
yield (i + x, full |
|
|
191 | yield (i + x, full is not None and l[x] or None) | |
|
192 | 192 | if i == 0: |
|
193 | 193 | break |
|
194 | 194 |
@@ -26,7 +26,10 b" procfs_path = '/proc/sys/fs/inotify'" | |||
|
26 | 26 | def _read_procfs_value(name): |
|
27 | 27 | def read_value(): |
|
28 | 28 | try: |
|
29 |
|
|
|
29 | fp = open(procfs_path + '/' + name) | |
|
30 | r = int(fp.read()) | |
|
31 | fp.close() | |
|
32 | return r | |
|
30 | 33 | except OSError: |
|
31 | 34 | return None |
|
32 | 35 |
@@ -70,9 +70,8 b' The default template mappings (view with' | |||
|
70 | 70 | replaced with customized keywords and templates. Again, run |
|
71 | 71 | :hg:`kwdemo` to control the results of your configuration changes. |
|
72 | 72 | |
|
73 |
Before changing/disabling active keywords, run :hg:`kwshrink` |
|
|
74 |
t |
|
|
75 | history. | |
|
73 | Before changing/disabling active keywords, you must run :hg:`kwshrink` | |
|
74 | to avoid storing expanded keywords in the change history. | |
|
76 | 75 | |
|
77 | 76 | To force expansion after enabling it, or a configuration change, run |
|
78 | 77 | :hg:`kwexpand`. |
@@ -101,6 +100,14 b" restricted = 'merge kwexpand kwshrink re" | |||
|
101 | 100 | # names of extensions using dorecord |
|
102 | 101 | recordextensions = 'record' |
|
103 | 102 | |
|
103 | colortable = { | |
|
104 | 'kwfiles.enabled': 'green bold', | |
|
105 | 'kwfiles.deleted': 'cyan bold underline', | |
|
106 | 'kwfiles.enabledunknown': 'green', | |
|
107 | 'kwfiles.ignored': 'bold', | |
|
108 | 'kwfiles.ignoredunknown': 'none' | |
|
109 | } | |
|
110 | ||
|
104 | 111 | # date like in cvs' $Date |
|
105 | 112 | utcdate = lambda x: util.datestr((x[0], 0), '%Y/%m/%d %H:%M:%S') |
|
106 | 113 | # date like in svn's $Date |
@@ -111,7 +118,6 b' svnutcdate = lambda x: util.datestr((x[0' | |||
|
111 | 118 | # make keyword tools accessible |
|
112 | 119 | kwtools = {'templater': None, 'hgcmd': ''} |
|
113 | 120 | |
|
114 | ||
|
115 | 121 | def _defaultkwmaps(ui): |
|
116 | 122 | '''Returns default keywordmaps according to keywordset configuration.''' |
|
117 | 123 | templates = { |
@@ -170,14 +176,25 b' class kwtemplater(object):' | |||
|
170 | 176 | for k, v in kwmaps) |
|
171 | 177 | else: |
|
172 | 178 | self.templates = _defaultkwmaps(self.ui) |
|
173 | escaped = '|'.join(map(re.escape, self.templates.keys())) | |
|
174 | self.re_kw = re.compile(r'\$(%s)\$' % escaped) | |
|
175 | self.re_kwexp = re.compile(r'\$(%s): [^$\n\r]*? \$' % escaped) | |
|
176 | ||
|
177 | 179 | templatefilters.filters.update({'utcdate': utcdate, |
|
178 | 180 | 'svnisodate': svnisodate, |
|
179 | 181 | 'svnutcdate': svnutcdate}) |
|
180 | 182 | |
|
183 | @util.propertycache | |
|
184 | def escape(self): | |
|
185 | '''Returns bar-separated and escaped keywords.''' | |
|
186 | return '|'.join(map(re.escape, self.templates.keys())) | |
|
187 | ||
|
188 | @util.propertycache | |
|
189 | def rekw(self): | |
|
190 | '''Returns regex for unexpanded keywords.''' | |
|
191 | return re.compile(r'\$(%s)\$' % self.escape) | |
|
192 | ||
|
193 | @util.propertycache | |
|
194 | def rekwexp(self): | |
|
195 | '''Returns regex for expanded keywords.''' | |
|
196 | return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape) | |
|
197 | ||
|
181 | 198 | def substitute(self, data, path, ctx, subfunc): |
|
182 | 199 | '''Replaces keywords in data with expanded template.''' |
|
183 | 200 | def kwsub(mobj): |
@@ -191,11 +208,15 b' class kwtemplater(object):' | |||
|
191 | 208 | return '$%s: %s $' % (kw, ekw) |
|
192 | 209 | return subfunc(kwsub, data) |
|
193 | 210 | |
|
211 | def linkctx(self, path, fileid): | |
|
212 | '''Similar to filelog.linkrev, but returns a changectx.''' | |
|
213 | return self.repo.filectx(path, fileid=fileid).changectx() | |
|
214 | ||
|
194 | 215 | def expand(self, path, node, data): |
|
195 | 216 | '''Returns data with keywords expanded.''' |
|
196 | 217 | if not self.restrict and self.match(path) and not util.binary(data): |
|
197 |
ctx = self. |
|
|
198 |
return self.substitute(data, path, ctx, self.re |
|
|
218 | ctx = self.linkctx(path, node) | |
|
219 | return self.substitute(data, path, ctx, self.rekw.sub) | |
|
199 | 220 | return data |
|
200 | 221 | |
|
201 | 222 | def iskwfile(self, cand, ctx): |
@@ -212,8 +233,8 b' class kwtemplater(object):' | |||
|
212 | 233 | kwcmd = self.restrict and lookup # kwexpand/kwshrink |
|
213 | 234 | if self.restrict or expand and lookup: |
|
214 | 235 | mf = ctx.manifest() |
|
215 |
|
|
|
216 |
|
|
|
236 | lctx = ctx | |
|
237 | re_kw = (self.restrict or rekw) and self.rekw or self.rekwexp | |
|
217 | 238 | msg = (expand and _('overwriting %s expanding keywords\n') |
|
218 | 239 | or _('overwriting %s shrinking keywords\n')) |
|
219 | 240 | for f in candidates: |
@@ -225,12 +246,12 b' class kwtemplater(object):' | |||
|
225 | 246 | continue |
|
226 | 247 | if expand: |
|
227 | 248 | if lookup: |
|
228 |
|
|
|
229 |
data, found = self.substitute(data, f, |
|
|
249 | lctx = self.linkctx(f, mf[f]) | |
|
250 | data, found = self.substitute(data, f, lctx, re_kw.subn) | |
|
230 | 251 | elif self.restrict: |
|
231 |
found = |
|
|
252 | found = re_kw.search(data) | |
|
232 | 253 | else: |
|
233 | data, found = _shrinktext(data, subn) | |
|
254 | data, found = _shrinktext(data, re_kw.subn) | |
|
234 | 255 | if found: |
|
235 | 256 | self.ui.note(msg % f) |
|
236 | 257 | self.repo.wwrite(f, data, ctx.flags(f)) |
@@ -242,7 +263,7 b' class kwtemplater(object):' | |||
|
242 | 263 | def shrink(self, fname, text): |
|
243 | 264 | '''Returns text with all keyword substitutions removed.''' |
|
244 | 265 | if self.match(fname) and not util.binary(text): |
|
245 |
return _shrinktext(text, self.re |
|
|
266 | return _shrinktext(text, self.rekwexp.sub) | |
|
246 | 267 | return text |
|
247 | 268 | |
|
248 | 269 | def shrinklines(self, fname, lines): |
@@ -250,7 +271,7 b' class kwtemplater(object):' | |||
|
250 | 271 | if self.match(fname): |
|
251 | 272 | text = ''.join(lines) |
|
252 | 273 | if not util.binary(text): |
|
253 |
return _shrinktext(text, self.re |
|
|
274 | return _shrinktext(text, self.rekwexp.sub).splitlines(True) | |
|
254 | 275 | return lines |
|
255 | 276 | |
|
256 | 277 | def wread(self, fname, data): |
@@ -334,6 +355,9 b' def demo(ui, repo, *args, **opts):' | |||
|
334 | 355 | ui.note(_('creating temporary repository at %s\n') % tmpdir) |
|
335 | 356 | repo = localrepo.localrepository(ui, tmpdir, True) |
|
336 | 357 | ui.setconfig('keyword', fn, '') |
|
358 | svn = ui.configbool('keywordset', 'svn') | |
|
359 | # explicitly set keywordset for demo output | |
|
360 | ui.setconfig('keywordset', 'svn', svn) | |
|
337 | 361 | |
|
338 | 362 | uikwmaps = ui.configitems('keywordmaps') |
|
339 | 363 | if args or opts.get('rcfile'): |
@@ -341,7 +365,10 b' def demo(ui, repo, *args, **opts):' | |||
|
341 | 365 | if uikwmaps: |
|
342 | 366 | ui.status(_('\textending current template maps\n')) |
|
343 | 367 | if opts.get('default') or not uikwmaps: |
|
344 | ui.status(_('\toverriding default template maps\n')) | |
|
368 | if svn: | |
|
369 | ui.status(_('\toverriding default svn keywordset\n')) | |
|
370 | else: | |
|
371 | ui.status(_('\toverriding default cvs keywordset\n')) | |
|
345 | 372 | if opts.get('rcfile'): |
|
346 | 373 | ui.readconfig(opts.get('rcfile')) |
|
347 | 374 | if args: |
@@ -353,7 +380,10 b' def demo(ui, repo, *args, **opts):' | |||
|
353 | 380 | ui.readconfig(repo.join('hgrc')) |
|
354 | 381 | kwmaps = dict(ui.configitems('keywordmaps')) |
|
355 | 382 | elif opts.get('default'): |
|
356 | ui.status(_('\n\tconfiguration using default keyword template maps\n')) | |
|
383 | if svn: | |
|
384 | ui.status(_('\n\tconfiguration using default svn keywordset\n')) | |
|
385 | else: | |
|
386 | ui.status(_('\n\tconfiguration using default cvs keywordset\n')) | |
|
357 | 387 | kwmaps = _defaultkwmaps(ui) |
|
358 | 388 | if uikwmaps: |
|
359 | 389 | ui.status(_('\tdisabling current template maps\n')) |
@@ -367,6 +397,7 b' def demo(ui, repo, *args, **opts):' | |||
|
367 | 397 | reposetup(ui, repo) |
|
368 | 398 | ui.write('[extensions]\nkeyword =\n') |
|
369 | 399 | demoitems('keyword', ui.configitems('keyword')) |
|
400 | demoitems('keywordset', ui.configitems('keywordset')) | |
|
370 | 401 | demoitems('keywordmaps', kwmaps.iteritems()) |
|
371 | 402 | keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n' |
|
372 | 403 | repo.wopener(fn, 'w').write(keywords) |
@@ -424,24 +455,26 b' def files(ui, repo, *pats, **opts):' | |||
|
424 | 455 | files = sorted(modified + added + clean) |
|
425 | 456 | wctx = repo[None] |
|
426 | 457 | kwfiles = kwt.iskwfile(files, wctx) |
|
458 | kwdeleted = kwt.iskwfile(deleted, wctx) | |
|
427 | 459 | kwunknown = kwt.iskwfile(unknown, wctx) |
|
428 | 460 | if not opts.get('ignore') or opts.get('all'): |
|
429 | showfiles = kwfiles, kwunknown | |
|
461 | showfiles = kwfiles, kwdeleted, kwunknown | |
|
430 | 462 | else: |
|
431 | showfiles = [], [] | |
|
463 | showfiles = [], [], [] | |
|
432 | 464 | if opts.get('all') or opts.get('ignore'): |
|
433 | 465 | showfiles += ([f for f in files if f not in kwfiles], |
|
434 | 466 | [f for f in unknown if f not in kwunknown]) |
|
435 | for char, filenames in zip('KkIi', showfiles): | |
|
467 | kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split() | |
|
468 | kwstates = zip('K!kIi', showfiles, kwlabels) | |
|
469 | for char, filenames, kwstate in kwstates: | |
|
436 | 470 | fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n' |
|
437 | 471 | for f in filenames: |
|
438 | ui.write(fmt % repo.pathto(f, cwd)) | |
|
472 | ui.write(fmt % repo.pathto(f, cwd), label='kwfiles.' + kwstate) | |
|
439 | 473 | |
|
440 | 474 | def shrink(ui, repo, *pats, **opts): |
|
441 | 475 | '''revert expanded keywords in the working directory |
|
442 | 476 | |
|
443 |
|
|
|
444 | problems with :hg:`import` or :hg:`merge`. | |
|
477 | Must be run before changing/disabling active keywords. | |
|
445 | 478 | |
|
446 | 479 | kwshrink refuses to run if given files contain local changes. |
|
447 | 480 | ''' |
@@ -603,8 +636,6 b' def reposetup(ui, repo):' | |||
|
603 | 636 | finally: |
|
604 | 637 | wlock.release() |
|
605 | 638 | |
|
606 | repo.__class__ = kwrepo | |
|
607 | ||
|
608 | 639 | def kwfilectx_cmp(orig, self, fctx): |
|
609 | 640 | # keyword affects data size, comparing wdir and filelog size does |
|
610 | 641 | # not make sense |
@@ -628,6 +659,8 b' def reposetup(ui, repo):' | |||
|
628 | 659 | except KeyError: |
|
629 | 660 | pass |
|
630 | 661 | |
|
662 | repo.__class__ = kwrepo | |
|
663 | ||
|
631 | 664 | cmdtable = { |
|
632 | 665 | 'kwdemo': |
|
633 | 666 | (demo, |
@@ -86,6 +86,8 b' class patchheader(object):' | |||
|
86 | 86 | parent = None |
|
87 | 87 | format = None |
|
88 | 88 | subject = None |
|
89 | branch = None | |
|
90 | nodeid = None | |
|
89 | 91 | diffstart = 0 |
|
90 | 92 | |
|
91 | 93 | for line in file(pf): |
@@ -106,6 +108,10 b' class patchheader(object):' | |||
|
106 | 108 | date = line[7:] |
|
107 | 109 | elif line.startswith("# Parent "): |
|
108 | 110 | parent = line[9:] |
|
111 | elif line.startswith("# Branch "): | |
|
112 | branch = line[9:] | |
|
113 | elif line.startswith("# Node ID "): | |
|
114 | nodeid = line[10:] | |
|
109 | 115 | elif not line.startswith("# ") and line: |
|
110 | 116 | message.append(line) |
|
111 | 117 | format = None |
@@ -134,6 +140,9 b' class patchheader(object):' | |||
|
134 | 140 | |
|
135 | 141 | eatdiff(message) |
|
136 | 142 | eatdiff(comments) |
|
143 | # Remember the exact starting line of the patch diffs before consuming | |
|
144 | # empty lines, for external use by TortoiseHg and others | |
|
145 | self.diffstartline = len(comments) | |
|
137 | 146 | eatempty(message) |
|
138 | 147 | eatempty(comments) |
|
139 | 148 | |
@@ -147,6 +156,9 b' class patchheader(object):' | |||
|
147 | 156 | self.user = user |
|
148 | 157 | self.date = date |
|
149 | 158 | self.parent = parent |
|
159 | # nodeid and branch are for external use by TortoiseHg and others | |
|
160 | self.nodeid = nodeid | |
|
161 | self.branch = branch | |
|
150 | 162 | self.haspatch = diffstart > 1 |
|
151 | 163 | self.plainmode = plainmode |
|
152 | 164 | |
@@ -239,6 +251,7 b' class queue(object):' | |||
|
239 | 251 | try: |
|
240 | 252 | fh = open(os.path.join(path, 'patches.queue')) |
|
241 | 253 | cur = fh.read().rstrip() |
|
254 | fh.close() | |
|
242 | 255 | if not cur: |
|
243 | 256 | curpath = os.path.join(path, 'patches') |
|
244 | 257 | else: |
@@ -793,6 +806,19 b' class queue(object):' | |||
|
793 | 806 | return top, patch |
|
794 | 807 | return None, None |
|
795 | 808 | |
|
809 | def check_substate(self, repo): | |
|
810 | '''return list of subrepos at a different revision than substate. | |
|
811 | Abort if any subrepos have uncommitted changes.''' | |
|
812 | inclsubs = [] | |
|
813 | wctx = repo[None] | |
|
814 | for s in wctx.substate: | |
|
815 | if wctx.sub(s).dirty(True): | |
|
816 | raise util.Abort( | |
|
817 | _("uncommitted changes in subrepository %s") % s) | |
|
818 | elif wctx.sub(s).dirty(): | |
|
819 | inclsubs.append(s) | |
|
820 | return inclsubs | |
|
821 | ||
|
796 | 822 | def check_localchanges(self, repo, force=False, refresh=True): |
|
797 | 823 | m, a, r, d = repo.status()[:4] |
|
798 | 824 | if (m or a or r or d) and not force: |
@@ -826,16 +852,23 b' class queue(object):' | |||
|
826 | 852 | % patchfn) |
|
827 | 853 | else: |
|
828 | 854 | raise util.Abort(_('patch "%s" already exists') % patchfn) |
|
855 | ||
|
856 | inclsubs = self.check_substate(repo) | |
|
857 | if inclsubs: | |
|
858 | inclsubs.append('.hgsubstate') | |
|
829 | 859 | if opts.get('include') or opts.get('exclude') or pats: |
|
860 | if inclsubs: | |
|
861 | pats = list(pats or []) + inclsubs | |
|
830 | 862 | match = cmdutil.match(repo, pats, opts) |
|
831 | 863 | # detect missing files in pats |
|
832 | 864 | def badfn(f, msg): |
|
833 | raise util.Abort('%s: %s' % (f, msg)) | |
|
865 | if f != '.hgsubstate': # .hgsubstate is auto-created | |
|
866 | raise util.Abort('%s: %s' % (f, msg)) | |
|
834 | 867 | match.bad = badfn |
|
835 | 868 | m, a, r, d = repo.status(match=match)[:4] |
|
836 | 869 | else: |
|
837 | 870 | m, a, r, d = self.check_localchanges(repo, force=True) |
|
838 | match = cmdutil.matchfiles(repo, m + a + r) | |
|
871 | match = cmdutil.matchfiles(repo, m + a + r + inclsubs) | |
|
839 | 872 | if len(repo[None].parents()) > 1: |
|
840 | 873 | raise util.Abort(_('cannot manage merge changesets')) |
|
841 | 874 | commitfiles = m + a + r |
@@ -1006,7 +1039,7 b' class queue(object):' | |||
|
1006 | 1039 | raise util.Abort(_("patch %s not in series") % patch) |
|
1007 | 1040 | |
|
1008 | 1041 | def push(self, repo, patch=None, force=False, list=False, |
|
1009 | mergeq=None, all=False, move=False): | |
|
1042 | mergeq=None, all=False, move=False, exact=False): | |
|
1010 | 1043 | diffopts = self.diffopts() |
|
1011 | 1044 | wlock = repo.wlock() |
|
1012 | 1045 | try: |
@@ -1015,7 +1048,7 b' class queue(object):' | |||
|
1015 | 1048 | heads += ls |
|
1016 | 1049 | if not heads: |
|
1017 | 1050 | heads = [nullid] |
|
1018 | if repo.dirstate.parents()[0] not in heads: | |
|
1051 | if repo.dirstate.parents()[0] not in heads and not exact: | |
|
1019 | 1052 | self.ui.status(_("(working directory not at a head)\n")) |
|
1020 | 1053 | |
|
1021 | 1054 | if not self.series: |
@@ -1064,9 +1097,21 b' class queue(object):' | |||
|
1064 | 1097 | if not force: |
|
1065 | 1098 | self.check_localchanges(repo) |
|
1066 | 1099 | |
|
1100 | if exact: | |
|
1101 | if move: | |
|
1102 | raise util.Abort(_("cannot use --exact and --move together")) | |
|
1103 | if self.applied: | |
|
1104 | raise util.Abort(_("cannot push --exact with applied patches")) | |
|
1105 | root = self.series[start] | |
|
1106 | target = patchheader(self.join(root), self.plainmode).parent | |
|
1107 | if not target: | |
|
1108 | raise util.Abort(_("%s does not have a parent recorded" % root)) | |
|
1109 | if not repo[target] == repo['.']: | |
|
1110 | hg.update(repo, target) | |
|
1111 | ||
|
1067 | 1112 | if move: |
|
1068 | 1113 | if not patch: |
|
1069 |
raise |
|
|
1114 | raise util.Abort(_("please specify the patch to move")) | |
|
1070 | 1115 | for i, rpn in enumerate(self.full_series[start:]): |
|
1071 | 1116 | # strip markers for patch guards |
|
1072 | 1117 | if self.guard_re.split(rpn, 1)[0] == patch: |
@@ -1104,7 +1149,7 b' class queue(object):' | |||
|
1104 | 1149 | for f in all_files: |
|
1105 | 1150 | if f not in repo.dirstate: |
|
1106 | 1151 | try: |
|
1107 | util.unlink(repo.wjoin(f)) | |
|
1152 | util.unlinkpath(repo.wjoin(f)) | |
|
1108 | 1153 | except OSError, inst: |
|
1109 | 1154 | if inst.errno != errno.ENOENT: |
|
1110 | 1155 | raise |
@@ -1198,7 +1243,7 b' class queue(object):' | |||
|
1198 | 1243 | raise util.Abort(_("deletions found between repo revs")) |
|
1199 | 1244 | for f in a: |
|
1200 | 1245 | try: |
|
1201 | util.unlink(repo.wjoin(f)) | |
|
1246 | util.unlinkpath(repo.wjoin(f)) | |
|
1202 | 1247 | except OSError, e: |
|
1203 | 1248 | if e.errno != errno.ENOENT: |
|
1204 | 1249 | raise |
@@ -1249,6 +1294,8 b' class queue(object):' | |||
|
1249 | 1294 | if repo.changelog.heads(top) != [top]: |
|
1250 | 1295 | raise util.Abort(_("cannot refresh a revision with children")) |
|
1251 | 1296 | |
|
1297 | inclsubs = self.check_substate(repo) | |
|
1298 | ||
|
1252 | 1299 | cparents = repo.changelog.parents(top) |
|
1253 | 1300 | patchparent = self.qparents(repo, top) |
|
1254 | 1301 | ph = patchheader(self.join(patchfn), self.plainmode) |
@@ -1272,10 +1319,10 b' class queue(object):' | |||
|
1272 | 1319 | # and then commit. |
|
1273 | 1320 | # |
|
1274 | 1321 | # this should really read: |
|
1275 |
# mm, dd, aa |
|
|
1322 | # mm, dd, aa = repo.status(top, patchparent)[:3] | |
|
1276 | 1323 | # but we do it backwards to take advantage of manifest/chlog |
|
1277 | 1324 | # caching against the next repo.status call |
|
1278 |
mm, aa, dd |
|
|
1325 | mm, aa, dd = repo.status(patchparent, top)[:3] | |
|
1279 | 1326 | changes = repo.changelog.read(top) |
|
1280 | 1327 | man = repo.manifest.read(changes[0]) |
|
1281 | 1328 | aaa = aa[:] |
@@ -1291,49 +1338,43 b' class queue(object):' | |||
|
1291 | 1338 | else: |
|
1292 | 1339 | match = cmdutil.matchall(repo) |
|
1293 | 1340 | m, a, r, d = repo.status(match=match)[:4] |
|
1341 | mm = set(mm) | |
|
1342 | aa = set(aa) | |
|
1343 | dd = set(dd) | |
|
1294 | 1344 | |
|
1295 | 1345 | # we might end up with files that were added between |
|
1296 | 1346 | # qtip and the dirstate parent, but then changed in the |
|
1297 | 1347 | # local dirstate. in this case, we want them to only |
|
1298 | 1348 | # show up in the added section |
|
1299 | 1349 | for x in m: |
|
1300 | if x == '.hgsub' or x == '.hgsubstate': | |
|
1301 | self.ui.warn(_('warning: not refreshing %s\n') % x) | |
|
1302 | continue | |
|
1303 | 1350 | if x not in aa: |
|
1304 |
mm.a |
|
|
1351 | mm.add(x) | |
|
1305 | 1352 | # we might end up with files added by the local dirstate that |
|
1306 | 1353 | # were deleted by the patch. In this case, they should only |
|
1307 | 1354 | # show up in the changed section. |
|
1308 | 1355 | for x in a: |
|
1309 | if x == '.hgsub' or x == '.hgsubstate': | |
|
1310 | self.ui.warn(_('warning: not adding %s\n') % x) | |
|
1311 | continue | |
|
1312 | 1356 | if x in dd: |
|
1313 |
|
|
|
1314 |
mm.a |
|
|
1357 | dd.remove(x) | |
|
1358 | mm.add(x) | |
|
1315 | 1359 | else: |
|
1316 |
aa.a |
|
|
1360 | aa.add(x) | |
|
1317 | 1361 | # make sure any files deleted in the local dirstate |
|
1318 | 1362 | # are not in the add or change column of the patch |
|
1319 | 1363 | forget = [] |
|
1320 | 1364 | for x in d + r: |
|
1321 | if x == '.hgsub' or x == '.hgsubstate': | |
|
1322 | self.ui.warn(_('warning: not removing %s\n') % x) | |
|
1323 | continue | |
|
1324 | 1365 | if x in aa: |
|
1325 |
|
|
|
1366 | aa.remove(x) | |
|
1326 | 1367 | forget.append(x) |
|
1327 | 1368 | continue |
|
1328 |
el |
|
|
1329 |
|
|
|
1330 |
dd.a |
|
|
1331 | ||
|
1332 |
m = list( |
|
|
1333 |
r = list( |
|
|
1334 |
a = list( |
|
|
1369 | else: | |
|
1370 | mm.discard(x) | |
|
1371 | dd.add(x) | |
|
1372 | ||
|
1373 | m = list(mm) | |
|
1374 | r = list(dd) | |
|
1375 | a = list(aa) | |
|
1335 | 1376 | c = [filter(matchfn, l) for l in (m, a, r)] |
|
1336 | match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2])) | |
|
1377 | match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2] + inclsubs)) | |
|
1337 | 1378 | chunks = patch.diff(repo, patchparent, match=match, |
|
1338 | 1379 | changes=c, opts=diffopts) |
|
1339 | 1380 | for chunk in chunks: |
@@ -1531,7 +1572,7 b' class queue(object):' | |||
|
1531 | 1572 | l = line.rstrip() |
|
1532 | 1573 | l = l[10:].split(' ') |
|
1533 | 1574 | qpp = [bin(x) for x in l] |
|
1534 |
elif datastart |
|
|
1575 | elif datastart is not None: | |
|
1535 | 1576 | l = line.rstrip() |
|
1536 | 1577 | n, name = l.split(':', 1) |
|
1537 | 1578 | if n: |
@@ -1741,7 +1782,9 b' class queue(object):' | |||
|
1741 | 1782 | _('need --name to import a patch from -')) |
|
1742 | 1783 | text = sys.stdin.read() |
|
1743 | 1784 | else: |
|
1744 |
|
|
|
1785 | fp = url.open(self.ui, filename) | |
|
1786 | text = fp.read() | |
|
1787 | fp.close() | |
|
1745 | 1788 | except (OSError, IOError): |
|
1746 | 1789 | raise util.Abort(_("unable to read file %s") % filename) |
|
1747 | 1790 | if not patchname: |
@@ -1750,6 +1793,7 b' class queue(object):' | |||
|
1750 | 1793 | checkfile(patchname) |
|
1751 | 1794 | patchf = self.opener(patchname, "w") |
|
1752 | 1795 | patchf.write(text) |
|
1796 | patchf.close() | |
|
1753 | 1797 | if not force: |
|
1754 | 1798 | checkseries(patchname) |
|
1755 | 1799 | if patchname not in self.series: |
@@ -1761,6 +1805,8 b' class queue(object):' | |||
|
1761 | 1805 | self.added.append(patchname) |
|
1762 | 1806 | patchname = None |
|
1763 | 1807 | |
|
1808 | self.removeundo(repo) | |
|
1809 | ||
|
1764 | 1810 | def delete(ui, repo, *patches, **opts): |
|
1765 | 1811 | """remove patches from queue |
|
1766 | 1812 | |
@@ -2346,7 +2392,8 b' def push(ui, repo, patch=None, **opts):' | |||
|
2346 | 2392 | mergeq = queue(ui, repo.join(""), newpath) |
|
2347 | 2393 | ui.warn(_("merging with queue at: %s\n") % mergeq.path) |
|
2348 | 2394 | ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'), |
|
2349 |
mergeq=mergeq, all=opts.get('all'), move=opts.get('move') |
|
|
2395 | mergeq=mergeq, all=opts.get('all'), move=opts.get('move'), | |
|
2396 | exact=opts.get('exact')) | |
|
2350 | 2397 | return ret |
|
2351 | 2398 | |
|
2352 | 2399 | def pop(ui, repo, patch=None, **opts): |
@@ -2746,6 +2793,7 b' def qqueue(ui, repo, name=None, **opts):' | |||
|
2746 | 2793 | try: |
|
2747 | 2794 | fh = repo.opener(_allqueues, 'r') |
|
2748 | 2795 | queues = [queue.strip() for queue in fh if queue.strip()] |
|
2796 | fh.close() | |
|
2749 | 2797 | if current not in queues: |
|
2750 | 2798 | queues.append(current) |
|
2751 | 2799 | except IOError: |
@@ -2880,7 +2928,7 b' def reposetup(ui, repo):' | |||
|
2880 | 2928 | return super(mqrepo, self).commit(text, user, date, match, force, |
|
2881 | 2929 | editor, extra) |
|
2882 | 2930 | |
|
2883 |
def push(self, |
|
|
2931 | def checkpush(self, force, revs): | |
|
2884 | 2932 | if self.mq.applied and not force: |
|
2885 | 2933 | haspatches = True |
|
2886 | 2934 | if revs: |
@@ -2891,7 +2939,7 b' def reposetup(ui, repo):' | |||
|
2891 | 2939 | haspatches = bool([n for n in revs if n in applied]) |
|
2892 | 2940 | if haspatches: |
|
2893 | 2941 | raise util.Abort(_('source has mq patches applied')) |
|
2894 |
|
|
|
2942 | super(mqrepo, self).checkpush(force, revs) | |
|
2895 | 2943 | |
|
2896 | 2944 | def _findtags(self): |
|
2897 | 2945 | '''augment tags from base class with patch tags''' |
@@ -2903,7 +2951,7 b' def reposetup(ui, repo):' | |||
|
2903 | 2951 | |
|
2904 | 2952 | mqtags = [(patch.node, patch.name) for patch in q.applied] |
|
2905 | 2953 | |
|
2906 |
if mqtags[-1][0] not in self |
|
|
2954 | if mqtags[-1][0] not in self: | |
|
2907 | 2955 | self.ui.warn(_('mq status file refers to unknown node %s\n') |
|
2908 | 2956 | % short(mqtags[-1][0])) |
|
2909 | 2957 | return result |
@@ -2928,7 +2976,7 b' def reposetup(ui, repo):' | |||
|
2928 | 2976 | |
|
2929 | 2977 | cl = self.changelog |
|
2930 | 2978 | qbasenode = q.applied[0].node |
|
2931 |
if qbasenode not in |
|
|
2979 | if qbasenode not in self: | |
|
2932 | 2980 | self.ui.warn(_('mq status file refers to unknown node %s\n') |
|
2933 | 2981 | % short(qbasenode)) |
|
2934 | 2982 | return super(mqrepo, self)._branchtags(partial, lrev) |
@@ -3122,6 +3170,7 b' cmdtable = {' | |||
|
3122 | 3170 | "^qpush": |
|
3123 | 3171 | (push, |
|
3124 | 3172 | [('f', 'force', None, _('apply on top of local changes')), |
|
3173 | ('e', 'exact', None, _('apply the target patch to its recorded parent')), | |
|
3125 | 3174 | ('l', 'list', None, _('list patch name in commit text')), |
|
3126 | 3175 | ('a', 'all', None, _('apply all patches')), |
|
3127 | 3176 | ('m', 'merge', None, _('merge from another queue (DEPRECATED)')), |
@@ -193,6 +193,9 b' def patchbomb(ui, repo, *revs, **opts):' | |||
|
193 | 193 | PAGER environment variable is set, your pager will be fired up once |
|
194 | 194 | for each patchbomb message, so you can verify everything is alright. |
|
195 | 195 | |
|
196 | In case email sending fails, you will find a backup of your series | |
|
197 | introductory message in ``.hg/last-email.txt``. | |
|
198 | ||
|
196 | 199 | Examples:: |
|
197 | 200 | |
|
198 | 201 | hg email -r 3000 # send patch 3000 only |
@@ -258,7 +261,10 b' def patchbomb(ui, repo, *revs, **opts):' | |||
|
258 | 261 | tmpfn = os.path.join(tmpdir, 'bundle') |
|
259 | 262 | try: |
|
260 | 263 | commands.bundle(ui, repo, tmpfn, dest, **opts) |
|
261 |
|
|
|
264 | fp = open(tmpfn, 'rb') | |
|
265 | data = fp.read() | |
|
266 | fp.close() | |
|
267 | return data | |
|
262 | 268 | finally: |
|
263 | 269 | try: |
|
264 | 270 | os.unlink(tmpfn) |
@@ -309,6 +315,10 b' def patchbomb(ui, repo, *revs, **opts):' | |||
|
309 | 315 | ui.write(_('\nWrite the introductory message for the ' |
|
310 | 316 | 'patch series.\n\n')) |
|
311 | 317 | body = ui.edit(body, sender) |
|
318 | # Save serie description in case sendmail fails | |
|
319 | msgfile = repo.opener('last-email.txt', 'wb') | |
|
320 | msgfile.write(body) | |
|
321 | msgfile.close() | |
|
312 | 322 | return body |
|
313 | 323 | |
|
314 | 324 | def getpatchmsgs(patches, patchnames=None): |
@@ -28,7 +28,7 b' The following settings are available::' | |||
|
28 | 28 | [progress] |
|
29 | 29 | delay = 3 # number of seconds (float) before showing the progress bar |
|
30 | 30 | refresh = 0.1 # time in seconds between refreshes of the progress bar |
|
31 | format = topic bar number # format of the progress bar | |
|
31 | format = topic bar number estimate # format of the progress bar | |
|
32 | 32 | width = <none> # if set, the maximum width of the progress information |
|
33 | 33 | # (that is, min(width, term width) will be used) |
|
34 | 34 | clear-complete = True # clear the progress bar after it's done |
@@ -36,15 +36,17 b' The following settings are available::' | |||
|
36 | 36 | assume-tty = False # if true, ALWAYS show a progress bar, unless |
|
37 | 37 | # disable is given |
|
38 | 38 | |
|
39 |
Valid entries for the format field are topic, bar, number, unit, |
|
|
40 |
item. item defaults to the last 20 characters of the |
|
|
41 |
can be changed by adding either ``-<num>`` which would |
|
|
42 |
num characters, or ``+<num>`` for the first num |
|
|
39 | Valid entries for the format field are topic, bar, number, unit, | |
|
40 | estimate, and item. item defaults to the last 20 characters of the | |
|
41 | item, but this can be changed by adding either ``-<num>`` which would | |
|
42 | take the last num characters, or ``+<num>`` for the first num | |
|
43 | characters. | |
|
43 | 44 | """ |
|
44 | 45 | |
|
45 | 46 | import sys |
|
46 | 47 | import time |
|
47 | 48 | |
|
49 | from mercurial.i18n import _ | |
|
48 | 50 | from mercurial import util |
|
49 | 51 | |
|
50 | 52 | def spacejoin(*args): |
@@ -54,6 +56,43 b' def shouldprint(ui):' | |||
|
54 | 56 | return (getattr(sys.stderr, 'isatty', None) and |
|
55 | 57 | (sys.stderr.isatty() or ui.configbool('progress', 'assume-tty'))) |
|
56 | 58 | |
|
59 | def fmtremaining(seconds): | |
|
60 | if seconds < 60: | |
|
61 | # i18n: format XX seconds as "XXs" | |
|
62 | return _("%02ds") % (seconds) | |
|
63 | minutes = seconds // 60 | |
|
64 | if minutes < 60: | |
|
65 | seconds -= minutes * 60 | |
|
66 | # i18n: format X minutes and YY seconds as "XmYYs" | |
|
67 | return _("%dm%02ds") % (minutes, seconds) | |
|
68 | # we're going to ignore seconds in this case | |
|
69 | minutes += 1 | |
|
70 | hours = minutes // 60 | |
|
71 | minutes -= hours * 60 | |
|
72 | if hours < 30: | |
|
73 | # i18n: format X hours and YY minutes as "XhYYm" | |
|
74 | return _("%dh%02dm") % (hours, minutes) | |
|
75 | # we're going to ignore minutes in this case | |
|
76 | hours += 1 | |
|
77 | days = hours // 24 | |
|
78 | hours -= days * 24 | |
|
79 | if days < 15: | |
|
80 | # i18n: format X days and YY hours as "XdYYh" | |
|
81 | return _("%dd%02dh") % (days, hours) | |
|
82 | # we're going to ignore hours in this case | |
|
83 | days += 1 | |
|
84 | weeks = days // 7 | |
|
85 | days -= weeks * 7 | |
|
86 | if weeks < 55: | |
|
87 | # i18n: format X weeks and YY days as "XwYYd" | |
|
88 | return _("%dw%02dd") % (weeks, days) | |
|
89 | # we're going to ignore days and treat a year as 52 weeks | |
|
90 | weeks += 1 | |
|
91 | years = weeks // 52 | |
|
92 | weeks -= years * 52 | |
|
93 | # i18n: format X years and YY weeks as "XyYYw" | |
|
94 | return _("%dy%02dw") % (years, weeks) | |
|
95 | ||
|
57 | 96 | class progbar(object): |
|
58 | 97 | def __init__(self, ui): |
|
59 | 98 | self.ui = ui |
@@ -61,6 +100,9 b' class progbar(object):' | |||
|
61 | 100 | |
|
62 | 101 | def resetstate(self): |
|
63 | 102 | self.topics = [] |
|
103 | self.topicstates = {} | |
|
104 | self.starttimes = {} | |
|
105 | self.startvals = {} | |
|
64 | 106 | self.printed = False |
|
65 | 107 | self.lastprint = time.time() + float(self.ui.config( |
|
66 | 108 | 'progress', 'delay', default=3)) |
@@ -69,9 +111,9 b' class progbar(object):' | |||
|
69 | 111 | 'progress', 'refresh', default=0.1)) |
|
70 | 112 | self.order = self.ui.configlist( |
|
71 | 113 | 'progress', 'format', |
|
72 | default=['topic', 'bar', 'number']) | |
|
114 | default=['topic', 'bar', 'number', 'estimate']) | |
|
73 | 115 | |
|
74 | def show(self, topic, pos, item, unit, total): | |
|
116 | def show(self, now, topic, pos, item, unit, total): | |
|
75 | 117 | if not shouldprint(self.ui): |
|
76 | 118 | return |
|
77 | 119 | termwidth = self.width() |
@@ -108,10 +150,12 b' class progbar(object):' | |||
|
108 | 150 | needprogress = True |
|
109 | 151 | elif indicator == 'unit' and unit: |
|
110 | 152 | add = unit |
|
153 | elif indicator == 'estimate': | |
|
154 | add = self.estimate(topic, pos, total, now) | |
|
111 | 155 | if not needprogress: |
|
112 | 156 | head = spacejoin(head, add) |
|
113 | 157 | else: |
|
114 |
tail = spacejoin( |
|
|
158 | tail = spacejoin(tail, add) | |
|
115 | 159 | if needprogress: |
|
116 | 160 | used = 0 |
|
117 | 161 | if head: |
@@ -159,19 +203,44 b' class progbar(object):' | |||
|
159 | 203 | tw = self.ui.termwidth() |
|
160 | 204 | return min(int(self.ui.config('progress', 'width', default=tw)), tw) |
|
161 | 205 | |
|
206 | def estimate(self, topic, pos, total, now): | |
|
207 | if total is None: | |
|
208 | return '' | |
|
209 | initialpos = self.startvals[topic] | |
|
210 | target = total - initialpos | |
|
211 | delta = pos - initialpos | |
|
212 | if delta > 0: | |
|
213 | elapsed = now - self.starttimes[topic] | |
|
214 | if elapsed > float( | |
|
215 | self.ui.config('progress', 'estimate', default=2)): | |
|
216 | seconds = (elapsed * (target - delta)) // delta + 1 | |
|
217 | return fmtremaining(seconds) | |
|
218 | return '' | |
|
219 | ||
|
162 | 220 | def progress(self, topic, pos, item='', unit='', total=None): |
|
221 | now = time.time() | |
|
163 | 222 | if pos is None: |
|
164 | if self.topics and self.topics[-1] == topic and self.printed: | |
|
223 | self.starttimes.pop(topic, None) | |
|
224 | self.startvals.pop(topic, None) | |
|
225 | self.topicstates.pop(topic, None) | |
|
226 | # reset the progress bar if this is the outermost topic | |
|
227 | if self.topics and self.topics[0] == topic and self.printed: | |
|
165 | 228 | self.complete() |
|
166 | 229 | self.resetstate() |
|
230 | # truncate the list of topics assuming all topics within | |
|
231 | # this one are also closed | |
|
232 | if topic in self.topics: | |
|
233 | self.topics = self.topics[:self.topics.index(topic)] | |
|
167 | 234 | else: |
|
168 | 235 | if topic not in self.topics: |
|
236 | self.starttimes[topic] = now | |
|
237 | self.startvals[topic] = pos | |
|
169 | 238 | self.topics.append(topic) |
|
170 | now = time.time() | |
|
171 |
if |
|
|
172 | and topic == self.topics[-1]): | |
|
239 | self.topicstates[topic] = pos, item, unit, total | |
|
240 | if now - self.lastprint >= self.refresh and self.topics: | |
|
173 | 241 | self.lastprint = now |
|
174 | self.show(topic, pos, item, unit, total) | |
|
242 | current = self.topics[-1] | |
|
243 | self.show(now, topic, *self.topicstates[topic]) | |
|
175 | 244 | |
|
176 | 245 | def uisetup(ui): |
|
177 | 246 | class progressui(ui.__class__): |
@@ -215,7 +215,7 b' def rebase(ui, repo, **opts):' | |||
|
215 | 215 | clearstatus(repo) |
|
216 | 216 | ui.note(_("rebase completed\n")) |
|
217 | 217 | if os.path.exists(repo.sjoin('undo')): |
|
218 | util.unlink(repo.sjoin('undo')) | |
|
218 | util.unlinkpath(repo.sjoin('undo')) | |
|
219 | 219 | if skipped: |
|
220 | 220 | ui.note(_("%d revisions have been skipped\n") % len(skipped)) |
|
221 | 221 | finally: |
@@ -393,7 +393,7 b' def storestatus(repo, originalwd, target' | |||
|
393 | 393 | def clearstatus(repo): |
|
394 | 394 | 'Remove the status files' |
|
395 | 395 | if os.path.exists(repo.join("rebasestate")): |
|
396 | util.unlink(repo.join("rebasestate")) | |
|
396 | util.unlinkpath(repo.join("rebasestate")) | |
|
397 | 397 | |
|
398 | 398 | def restorestatus(repo): |
|
399 | 399 | 'Restore a previously stored status' |
@@ -10,7 +10,7 b'' | |||
|
10 | 10 | from mercurial.i18n import gettext, _ |
|
11 | 11 | from mercurial import cmdutil, commands, extensions, hg, mdiff, patch |
|
12 | 12 | from mercurial import util |
|
13 | import copy, cStringIO, errno, os, re, tempfile | |
|
13 | import copy, cStringIO, errno, os, re, shutil, tempfile | |
|
14 | 14 | |
|
15 | 15 | lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)') |
|
16 | 16 | |
@@ -42,7 +42,7 b' def scanpatch(fp):' | |||
|
42 | 42 | line = lr.readline() |
|
43 | 43 | if not line: |
|
44 | 44 | break |
|
45 | if line.startswith('diff --git a/'): | |
|
45 | if line.startswith('diff --git a/') or line.startswith('diff -r '): | |
|
46 | 46 | def notheader(line): |
|
47 | 47 | s = line.split(None, 1) |
|
48 | 48 | return not s or s[0] not in ('---', 'diff') |
@@ -70,7 +70,8 b' class header(object):' | |||
|
70 | 70 | |
|
71 | 71 | XXX shoudn't we move this to mercurial/patch.py ? |
|
72 | 72 | """ |
|
73 | diff_re = re.compile('diff --git a/(.*) b/(.*)$') | |
|
73 | diffgit_re = re.compile('diff --git a/(.*) b/(.*)$') | |
|
74 | diff_re = re.compile('diff -r .* (.*)$') | |
|
74 | 75 | allhunks_re = re.compile('(?:index|new file|deleted file) ') |
|
75 | 76 | pretty_re = re.compile('(?:new file|deleted file) ') |
|
76 | 77 | special_re = re.compile('(?:index|new|deleted|copy|rename) ') |
@@ -80,9 +81,7 b' class header(object):' | |||
|
80 | 81 | self.hunks = [] |
|
81 | 82 | |
|
82 | 83 | def binary(self): |
|
83 | for h in self.header: | |
|
84 | if h.startswith('index '): | |
|
85 | return True | |
|
84 | return util.any(h.startswith('index ') for h in self.header) | |
|
86 | 85 | |
|
87 | 86 | def pretty(self, fp): |
|
88 | 87 | for h in self.header: |
@@ -105,15 +104,17 b' class header(object):' | |||
|
105 | 104 | fp.write(''.join(self.header)) |
|
106 | 105 | |
|
107 | 106 | def allhunks(self): |
|
108 | for h in self.header: | |
|
109 | if self.allhunks_re.match(h): | |
|
110 | return True | |
|
107 | return util.any(self.allhunks_re.match(h) for h in self.header) | |
|
111 | 108 | |
|
112 | 109 | def files(self): |
|
113 |
|
|
|
114 | if fromfile == tofile: | |
|
115 | return [fromfile] | |
|
116 |
|
|
|
110 | match = self.diffgit_re.match(self.header[0]) | |
|
111 | if match: | |
|
112 | fromfile, tofile = match.groups() | |
|
113 | if fromfile == tofile: | |
|
114 | return [fromfile] | |
|
115 | return [fromfile, tofile] | |
|
116 | else: | |
|
117 | return self.diff_re.match(self.header[0]).groups() | |
|
117 | 118 | |
|
118 | 119 | def filename(self): |
|
119 | 120 | return self.files()[-1] |
@@ -122,9 +123,7 b' class header(object):' | |||
|
122 | 123 | return '<header %s>' % (' '.join(map(repr, self.files()))) |
|
123 | 124 | |
|
124 | 125 | def special(self): |
|
125 | for h in self.header: | |
|
126 | if self.special_re.match(h): | |
|
127 | return True | |
|
126 | return util.any(self.special_re.match(h) for h in self.header) | |
|
128 | 127 | |
|
129 | 128 | def countchanges(hunk): |
|
130 | 129 | """hunk -> (n+,n-)""" |
@@ -173,7 +172,7 b' class hunk(object):' | |||
|
173 | 172 | return '<hunk %r@%d>' % (self.filename(), self.fromline) |
|
174 | 173 | |
|
175 | 174 | def parsepatch(fp): |
|
176 | """patch -> [] of hunks """ | |
|
175 | """patch -> [] of headers -> [] of hunks """ | |
|
177 | 176 | class parser(object): |
|
178 | 177 | """patch parsing state machine""" |
|
179 | 178 | def __init__(self): |
@@ -184,7 +183,7 b' def parsepatch(fp):' | |||
|
184 | 183 | self.context = [] |
|
185 | 184 | self.before = [] |
|
186 | 185 | self.hunk = [] |
|
187 |
self. |
|
|
186 | self.headers = [] | |
|
188 | 187 | |
|
189 | 188 | def addrange(self, limits): |
|
190 | 189 | fromstart, fromend, tostart, toend, proc = limits |
@@ -197,7 +196,6 b' def parsepatch(fp):' | |||
|
197 | 196 | h = hunk(self.header, self.fromline, self.toline, self.proc, |
|
198 | 197 | self.before, self.hunk, context) |
|
199 | 198 | self.header.hunks.append(h) |
|
200 | self.stream.append(h) | |
|
201 | 199 | self.fromline += len(self.before) + h.removed |
|
202 | 200 | self.toline += len(self.before) + h.added |
|
203 | 201 | self.before = [] |
@@ -214,12 +212,12 b' def parsepatch(fp):' | |||
|
214 | 212 | def newfile(self, hdr): |
|
215 | 213 | self.addcontext([]) |
|
216 | 214 | h = header(hdr) |
|
217 |
self. |
|
|
215 | self.headers.append(h) | |
|
218 | 216 | self.header = h |
|
219 | 217 | |
|
220 | 218 | def finished(self): |
|
221 | 219 | self.addcontext([]) |
|
222 |
return self. |
|
|
220 | return self.headers | |
|
223 | 221 | |
|
224 | 222 | transitions = { |
|
225 | 223 | 'file': {'context': addcontext, |
@@ -248,27 +246,10 b' def parsepatch(fp):' | |||
|
248 | 246 | state = newstate |
|
249 | 247 | return p.finished() |
|
250 | 248 | |
|
251 |
def filterpatch(ui, |
|
|
249 | def filterpatch(ui, headers): | |
|
252 | 250 | """Interactively filter patch chunks into applied-only chunks""" |
|
253 | chunks = list(chunks) | |
|
254 | chunks.reverse() | |
|
255 | seen = set() | |
|
256 | def consumefile(): | |
|
257 | """fetch next portion from chunks until a 'header' is seen | |
|
258 | NB: header == new-file mark | |
|
259 | """ | |
|
260 | consumed = [] | |
|
261 | while chunks: | |
|
262 | if isinstance(chunks[-1], header): | |
|
263 | break | |
|
264 | else: | |
|
265 | consumed.append(chunks.pop()) | |
|
266 | return consumed | |
|
267 | 251 | |
|
268 | resp_all = [None] # this two are changed from inside prompt, | |
|
269 | resp_file = [None] # so can't be usual variables | |
|
270 | applied = {} # 'filename' -> [] of chunks | |
|
271 | def prompt(query): | |
|
252 | def prompt(skipfile, skipall, query): | |
|
272 | 253 | """prompt query, and process base inputs |
|
273 | 254 | |
|
274 | 255 | - y/n for the rest of file |
@@ -276,13 +257,12 b' def filterpatch(ui, chunks):' | |||
|
276 | 257 | - ? (help) |
|
277 | 258 | - q (quit) |
|
278 | 259 | |
|
279 |
Return |
|
|
280 | appropriate. | |
|
260 | Return True/False and possibly updated skipfile and skipall. | |
|
281 | 261 | """ |
|
282 |
if |
|
|
283 |
return |
|
|
284 |
if |
|
|
285 |
return |
|
|
262 | if skipall is not None: | |
|
263 | return skipall, skipfile, skipall | |
|
264 | if skipfile is not None: | |
|
265 | return skipfile, skipfile, skipall | |
|
286 | 266 | while True: |
|
287 | 267 | resps = _('[Ynsfdaq?]') |
|
288 | 268 | choices = (_('&Yes, record this change'), |
@@ -307,47 +287,48 b' def filterpatch(ui, chunks):' | |||
|
307 | 287 | elif r == 1: # no |
|
308 | 288 | ret = False |
|
309 | 289 | elif r == 2: # Skip |
|
310 |
ret = |
|
|
290 | ret = skipfile = False | |
|
311 | 291 | elif r == 3: # file (Record remaining) |
|
312 |
ret = |
|
|
292 | ret = skipfile = True | |
|
313 | 293 | elif r == 4: # done, skip remaining |
|
314 |
ret = |
|
|
294 | ret = skipall = False | |
|
315 | 295 | elif r == 5: # all |
|
316 |
ret = |
|
|
296 | ret = skipall = True | |
|
317 | 297 | elif r == 6: # quit |
|
318 | 298 | raise util.Abort(_('user quit')) |
|
319 | return ret | |
|
320 | pos, total = 0, len(chunks) - 1 | |
|
321 | while chunks: | |
|
322 | pos = total - len(chunks) + 1 | |
|
323 | chunk = chunks.pop() | |
|
324 | if isinstance(chunk, header): | |
|
325 | # new-file mark | |
|
326 | resp_file = [None] | |
|
327 | fixoffset = 0 | |
|
328 | hdr = ''.join(chunk.header) | |
|
329 | if hdr in seen: | |
|
330 | consumefile() | |
|
331 |
|
|
|
332 |
|
|
|
333 |
|
|
|
299 | return ret, skipfile, skipall | |
|
300 | ||
|
301 | seen = set() | |
|
302 | applied = {} # 'filename' -> [] of chunks | |
|
303 | skipfile, skipall = None, None | |
|
304 | pos, total = 1, sum(len(h.hunks) for h in headers) | |
|
305 | for h in headers: | |
|
306 | pos += len(h.hunks) | |
|
307 | skipfile = None | |
|
308 | fixoffset = 0 | |
|
309 | hdr = ''.join(h.header) | |
|
310 | if hdr in seen: | |
|
311 | continue | |
|
312 | seen.add(hdr) | |
|
313 | if skipall is None: | |
|
314 | h.pretty(ui) | |
|
315 | msg = (_('examine changes to %s?') % | |
|
316 | _(' and ').join(map(repr, h.files()))) | |
|
317 | r, skipfile, skipall = prompt(skipfile, skipall, msg) | |
|
318 | if not r: | |
|
319 | continue | |
|
320 | applied[h.filename()] = [h] | |
|
321 | if h.allhunks(): | |
|
322 | applied[h.filename()] += h.hunks | |
|
323 | continue | |
|
324 | for i, chunk in enumerate(h.hunks): | |
|
325 | if skipfile is None and skipall is None: | |
|
334 | 326 | chunk.pretty(ui) |
|
335 | r = prompt(_('examine changes to %s?') % | |
|
336 | _(' and ').join(map(repr, chunk.files()))) | |
|
337 | if r: | |
|
338 | applied[chunk.filename()] = [chunk] | |
|
339 | if chunk.allhunks(): | |
|
340 | applied[chunk.filename()] += consumefile() | |
|
341 | else: | |
|
342 | consumefile() | |
|
343 | else: | |
|
344 | # new hunk | |
|
345 | if resp_file[0] is None and resp_all[0] is None: | |
|
346 | chunk.pretty(ui) | |
|
347 | r = total == 1 and prompt(_('record this change to %r?') % | |
|
348 | chunk.filename()) \ | |
|
349 | or prompt(_('record change %d/%d to %r?') % | |
|
350 | (pos, total, chunk.filename())) | |
|
327 | msg = (total == 1 | |
|
328 | and (_('record this change to %r?') % chunk.filename()) | |
|
329 | or (_('record change %d/%d to %r?') % | |
|
330 | (pos - len(h.hunks) + i, total, chunk.filename()))) | |
|
331 | r, skipfile, skipall = prompt(skipfile, skipall, msg) | |
|
351 | 332 | if r: |
|
352 | 333 | if fixoffset: |
|
353 | 334 | chunk = copy.copy(chunk) |
@@ -403,8 +384,6 b' def qrecord(ui, repo, patch, *pats, **op' | |||
|
403 | 384 | def committomq(ui, repo, *pats, **opts): |
|
404 | 385 | mq.new(ui, repo, patch, *pats, **opts) |
|
405 | 386 | |
|
406 | opts = opts.copy() | |
|
407 | opts['force'] = True # always 'qnew -f' | |
|
408 | 387 | dorecord(ui, repo, committomq, *pats, **opts) |
|
409 | 388 | |
|
410 | 389 | |
@@ -415,21 +394,22 b' def dorecord(ui, repo, commitfunc, *pats' | |||
|
415 | 394 | def recordfunc(ui, repo, message, match, opts): |
|
416 | 395 | """This is generic record driver. |
|
417 | 396 | |
|
418 |
Its job is to interactively filter local changes, and |
|
|
419 |
prepare working dir into a state |
|
|
420 |
non-interactive commit command such as |
|
|
397 | Its job is to interactively filter local changes, and | |
|
398 | accordingly prepare working directory into a state in which the | |
|
399 | job can be delegated to a non-interactive commit command such as | |
|
400 | 'commit' or 'qrefresh'. | |
|
421 | 401 | |
|
422 |
After the actual job is done by non-interactive command, |
|
|
423 |
|
|
|
402 | After the actual job is done by non-interactive command, the | |
|
403 | working directory is restored to its original state. | |
|
424 | 404 | |
|
425 |
In the end we'll record interesting changes, and everything else |
|
|
426 |
left in place, so the user can continue |
|
|
405 | In the end we'll record interesting changes, and everything else | |
|
406 | will be left in place, so the user can continue working. | |
|
427 | 407 | """ |
|
428 | 408 | |
|
429 | 409 | merge = len(repo[None].parents()) > 1 |
|
430 | 410 | if merge: |
|
431 | 411 | raise util.Abort(_('cannot partially commit a merge ' |
|
432 | '(use hg commit instead)')) | |
|
412 | '(use "hg commit" instead)')) | |
|
433 | 413 | |
|
434 | 414 | changes = repo.status(match=match)[:3] |
|
435 | 415 | diffopts = mdiff.diffopts(git=True, nodates=True) |
@@ -475,6 +455,7 b' def dorecord(ui, repo, commitfunc, *pats' | |||
|
475 | 455 | os.close(fd) |
|
476 | 456 | ui.debug('backup %r as %r\n' % (f, tmpname)) |
|
477 | 457 | util.copyfile(repo.wjoin(f), tmpname) |
|
458 | shutil.copystat(repo.wjoin(f), tmpname) | |
|
478 | 459 | backups[f] = tmpname |
|
479 | 460 | |
|
480 | 461 | fp = cStringIO.StringIO() |
@@ -502,11 +483,13 b' def dorecord(ui, repo, commitfunc, *pats' | |||
|
502 | 483 | raise util.Abort(str(err)) |
|
503 | 484 | del fp |
|
504 | 485 | |
|
505 |
# 4. We prepared working directory according to filtered |
|
|
506 |
# Now is the time to delegate the job to |
|
|
486 | # 4. We prepared working directory according to filtered | |
|
487 | # patch. Now is the time to delegate the job to | |
|
488 | # commit/qrefresh or the like! | |
|
507 | 489 | |
|
508 |
# it is important to first chdir to repo root -- we'll call |
|
|
509 |
# highlevel command with list of pathnames relative to |
|
|
490 | # it is important to first chdir to repo root -- we'll call | |
|
491 | # a highlevel command with list of pathnames relative to | |
|
492 | # repo root | |
|
510 | 493 | cwd = os.getcwd() |
|
511 | 494 | os.chdir(repo.root) |
|
512 | 495 | try: |
@@ -521,6 +504,14 b' def dorecord(ui, repo, commitfunc, *pats' | |||
|
521 | 504 | for realname, tmpname in backups.iteritems(): |
|
522 | 505 | ui.debug('restoring %r to %r\n' % (tmpname, realname)) |
|
523 | 506 | util.copyfile(tmpname, repo.wjoin(realname)) |
|
507 | # Our calls to copystat() here and above are a | |
|
508 | # hack to trick any editors that have f open that | |
|
509 | # we haven't modified them. | |
|
510 | # | |
|
511 | # Also note that this racy as an editor could | |
|
512 | # notice the file's mtime before we've finished | |
|
513 | # writing it. | |
|
514 | shutil.copystat(tmpname, repo.wjoin(realname)) | |
|
524 | 515 | os.unlink(tmpname) |
|
525 | 516 | os.rmdir(backupdir) |
|
526 | 517 | except OSError: |
@@ -540,11 +531,7 b' def dorecord(ui, repo, commitfunc, *pats' | |||
|
540 | 531 | |
|
541 | 532 | cmdtable = { |
|
542 | 533 | "record": |
|
543 | (record, | |
|
544 | ||
|
545 | # add commit options | |
|
546 | commands.table['^commit|ci'][1], | |
|
547 | ||
|
534 | (record, commands.table['^commit|ci'][1], # same options as commit | |
|
548 | 535 | _('hg record [OPTION]... [FILE]...')), |
|
549 | 536 | } |
|
550 | 537 | |
@@ -557,11 +544,7 b' def uisetup(ui):' | |||
|
557 | 544 | |
|
558 | 545 | qcmdtable = { |
|
559 | 546 | "qrecord": |
|
560 | (qrecord, | |
|
561 | ||
|
562 | # add qnew options, except '--force' | |
|
563 | [opt for opt in mq.cmdtable['^qnew'][1] if opt[1] != 'force'], | |
|
564 | ||
|
547 | (qrecord, mq.cmdtable['^qnew'][1], # same options as qnew | |
|
565 | 548 | _('hg qrecord [OPTION]... PATCH [FILE]...')), |
|
566 | 549 | } |
|
567 | 550 |
@@ -401,7 +401,7 b' class transplanter(object):' | |||
|
401 | 401 | |
|
402 | 402 | def hasnode(repo, node): |
|
403 | 403 | try: |
|
404 |
return repo.changelog.rev(node) |
|
|
404 | return repo.changelog.rev(node) is not None | |
|
405 | 405 | except error.RevlogError: |
|
406 | 406 | return False |
|
407 | 407 |
This diff has been collapsed as it changes many lines, (579 lines changed) Show them Hide them | |||
@@ -17,8 +17,8 b' msgid ""' | |||
|
17 | 17 | msgstr "" |
|
18 | 18 | "Project-Id-Version: Mercurial\n" |
|
19 | 19 | "Report-Msgid-Bugs-To: <mercurial-devel@selenic.com>\n" |
|
20 |
"POT-Creation-Date: 201 |
|
|
21 |
"PO-Revision-Date: 201 |
|
|
20 | "POT-Creation-Date: 2011-01-04 12:03+0100\n" | |
|
21 | "PO-Revision-Date: 2011-01-04 12:15+0100\n" | |
|
22 | 22 | "Last-Translator: <mg@lazybytes.net>\n" |
|
23 | 23 | "Language-Team: Danish\n" |
|
24 | 24 | "Language: Danish\n" |
@@ -1142,10 +1142,10 b' msgid ""' | |||
|
1142 | 1142 | msgstr "" |
|
1143 | 1143 | |
|
1144 | 1144 | msgid "" |
|
1145 |
" If |
|
|
1146 |
" (<dest>/.hg/shamap by default). The |
|
|
1147 |
" that maps each source commit ID to the destination ID |
|
|
1148 | " revision, like so::" | |
|
1145 | " If ``REVMAP`` isn't given, it will be put in a default location\n" | |
|
1146 | " (``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple\n" | |
|
1147 | " text file that maps each source commit ID to the destination ID\n" | |
|
1148 | " for that revision, like so::" | |
|
1149 | 1149 | msgstr "" |
|
1150 | 1150 | |
|
1151 | 1151 | msgid " <source ID> <destination ID>" |
@@ -1251,15 +1251,25 b' msgid ""' | |||
|
1251 | 1251 | msgstr "" |
|
1252 | 1252 | |
|
1253 | 1253 | msgid "" |
|
1254 | " --config convert.hg.ignoreerrors=False (boolean)\n" | |
|
1255 | " ignore integrity errors when reading. Use it to fix Mercurial\n" | |
|
1256 | " repositories with missing revlogs, by converting from and to\n" | |
|
1257 | " Mercurial.\n" | |
|
1258 | " --config convert.hg.saverev=False (boolean)\n" | |
|
1259 | " store original revision ID in changeset (forces target IDs to\n" | |
|
1260 | " change)\n" | |
|
1261 | " --config convert.hg.startrev=0 (hg revision identifier)\n" | |
|
1262 | " convert start revision and its descendants" | |
|
1254 | " The Mercurial source recognizes the following configuration\n" | |
|
1255 | " options, which you can set on the command line with ``--config``:" | |
|
1256 | msgstr "" | |
|
1257 | ||
|
1258 | msgid "" | |
|
1259 | " :convert.hg.ignoreerrors: ignore integrity errors when reading.\n" | |
|
1260 | " Use it to fix Mercurial repositories with missing revlogs, by\n" | |
|
1261 | " converting from and to Mercurial. Default is False." | |
|
1262 | msgstr "" | |
|
1263 | ||
|
1264 | msgid "" | |
|
1265 | " :convert.hg.saverev: store original. revision ID in changeset\n" | |
|
1266 | " (forces target IDs to change). It takes and boolean argument\n" | |
|
1267 | " and defaults to False." | |
|
1268 | msgstr "" | |
|
1269 | ||
|
1270 | msgid "" | |
|
1271 | " :convert.hg.startrev: convert start revision and its descendants.\n" | |
|
1272 | " It takes a hg revision identifier and defaults to 0." | |
|
1263 | 1273 | msgstr "" |
|
1264 | 1274 | |
|
1265 | 1275 | msgid "" |
@@ -1271,45 +1281,59 b' msgid ""' | |||
|
1271 | 1281 | " CVS source will use a sandbox (i.e. a checked-out copy) from CVS\n" |
|
1272 | 1282 | " to indicate the starting point of what will be converted. Direct\n" |
|
1273 | 1283 | " access to the repository files is not needed, unless of course the\n" |
|
1274 |
" repository is :local:. The conversion uses the top level |
|
|
1275 |
" in the sandbox to find the CVS repository, and then uses |
|
|
1276 |
" commands to find files to convert. This means that unless |
|
|
1277 | " filemap is given, all files under the starting directory will be\n" | |
|
1284 | " repository is ``:local:``. The conversion uses the top level\n" | |
|
1285 | " directory in the sandbox to find the CVS repository, and then uses\n" | |
|
1286 | " CVS rlog commands to find files to convert. This means that unless\n" | |
|
1287 | " a filemap is given, all files under the starting directory will be\n" | |
|
1278 | 1288 | " converted, and that any directory reorganization in the CVS\n" |
|
1279 | 1289 | " sandbox is ignored." |
|
1280 | 1290 | msgstr "" |
|
1281 | 1291 | |
|
1282 | msgid " The options shown are the defaults." | |
|
1283 | msgstr "" | |
|
1284 | ||
|
1285 | msgid "" | |
|
1286 | " --config convert.cvsps.cache=True (boolean)\n" | |
|
1287 | " Set to False to disable remote log caching, for testing and\n" | |
|
1288 | " debugging purposes.\n" | |
|
1289 | " --config convert.cvsps.fuzz=60 (integer)\n" | |
|
1290 | " Specify the maximum time (in seconds) that is allowed between\n" | |
|
1291 | " commits with identical user and log message in a single\n" | |
|
1292 | " changeset. When very large files were checked in as part of a\n" | |
|
1293 | " changeset then the default may not be long enough.\n" | |
|
1294 | " --config convert.cvsps.mergeto='{{mergetobranch ([-\\w]+)}}'\n" | |
|
1295 | " Specify a regular expression to which commit log messages are\n" | |
|
1296 | " matched. If a match occurs, then the conversion process will\n" | |
|
1297 | " insert a dummy revision merging the branch on which this log\n" | |
|
1298 | " message occurs to the branch indicated in the regex.\n" | |
|
1299 | " --config convert.cvsps.mergefrom='{{mergefrombranch ([-\\w]+)}}'\n" | |
|
1300 | " Specify a regular expression to which commit log messages are\n" | |
|
1301 | " matched. If a match occurs, then the conversion process will\n" | |
|
1302 | " add the most recent revision on the branch indicated in the\n" | |
|
1303 | " regex as the second parent of the changeset.\n" | |
|
1304 | " --config hook.cvslog\n" | |
|
1305 | " Specify a Python function to be called at the end of gathering\n" | |
|
1306 | " the CVS log. The function is passed a list with the log entries,\n" | |
|
1307 | " and can modify the entries in-place, or add or delete them.\n" | |
|
1308 | " --config hook.cvschangesets\n" | |
|
1309 | " Specify a Python function to be called after the changesets\n" | |
|
1310 | " are calculated from the the CVS log. The function is passed\n" | |
|
1311 | " a list with the changeset entries, and can modify the changesets\n" | |
|
1312 | " in-place, or add or delete them." | |
|
1292 | msgid " The following options can be used with ``--config``:" | |
|
1293 | msgstr "" | |
|
1294 | ||
|
1295 | msgid "" | |
|
1296 | " :convert.cvsps.cache: Set to False to disable remote log caching,\n" | |
|
1297 | " for testing and debugging purposes. Default is True." | |
|
1298 | msgstr "" | |
|
1299 | ||
|
1300 | msgid "" | |
|
1301 | " :convert.cvsps.fuzz: Specify the maximum time (in seconds) that is\n" | |
|
1302 | " allowed between commits with identical user and log message in\n" | |
|
1303 | " a single changeset. When very large files were checked in as\n" | |
|
1304 | " part of a changeset then the default may not be long enough.\n" | |
|
1305 | " The default is 60." | |
|
1306 | msgstr "" | |
|
1307 | ||
|
1308 | msgid "" | |
|
1309 | " :convert.cvsps.mergeto: Specify a regular expression to which\n" | |
|
1310 | " commit log messages are matched. If a match occurs, then the\n" | |
|
1311 | " conversion process will insert a dummy revision merging the\n" | |
|
1312 | " branch on which this log message occurs to the branch\n" | |
|
1313 | " indicated in the regex. Default is ``{{mergetobranch\n" | |
|
1314 | " ([-\\w]+)}}``" | |
|
1315 | msgstr "" | |
|
1316 | ||
|
1317 | msgid "" | |
|
1318 | " :convert.cvsps.mergefrom: Specify a regular expression to which\n" | |
|
1319 | " commit log messages are matched. If a match occurs, then the\n" | |
|
1320 | " conversion process will add the most recent revision on the\n" | |
|
1321 | " branch indicated in the regex as the second parent of the\n" | |
|
1322 | " changeset. Default is ``{{mergefrombranch ([-\\w]+)}}``" | |
|
1323 | msgstr "" | |
|
1324 | ||
|
1325 | msgid "" | |
|
1326 | " :hook.cvslog: Specify a Python function to be called at the end of\n" | |
|
1327 | " gathering the CVS log. The function is passed a list with the\n" | |
|
1328 | " log entries, and can modify the entries in-place, or add or\n" | |
|
1329 | " delete them." | |
|
1330 | msgstr "" | |
|
1331 | ||
|
1332 | msgid "" | |
|
1333 | " :hook.cvschangesets: Specify a Python function to be called after\n" | |
|
1334 | " the changesets are calculated from the the CVS log. The\n" | |
|
1335 | " function is passed a list with the changeset entries, and can\n" | |
|
1336 | " modify the changesets in-place, or add or delete them." | |
|
1313 | 1337 | msgstr "" |
|
1314 | 1338 | |
|
1315 | 1339 | msgid "" |
@@ -1326,24 +1350,33 b' msgstr ""' | |||
|
1326 | 1350 | |
|
1327 | 1351 | msgid "" |
|
1328 | 1352 | " Subversion source detects classical trunk/branches/tags layouts.\n" |
|
1329 |
" By default, the supplied |
|
|
1330 |
" converted as a single branch. If |
|
|
1331 |
" replaces the default branch. If |
|
|
1332 | " its subdirectories are listed as possible branches. If\n" | |
|
1333 |
" |
|
|
1334 |
" converted branches. Default |
|
|
1335 | " can be overridden with following options. Set them to paths\n" | |
|
1353 | " By default, the supplied ``svn://repo/path/`` source URL is\n" | |
|
1354 | " converted as a single branch. If ``svn://repo/path/trunk`` exists\n" | |
|
1355 | " it replaces the default branch. If ``svn://repo/path/branches``\n" | |
|
1356 | " exists, its subdirectories are listed as possible branches. If\n" | |
|
1357 | " ``svn://repo/path/tags`` exists, it is looked for tags referencing\n" | |
|
1358 | " converted branches. Default ``trunk``, ``branches`` and ``tags``\n" | |
|
1359 | " values can be overridden with following options. Set them to paths\n" | |
|
1336 | 1360 | " relative to the source URL, or leave them blank to disable auto\n" |
|
1337 | 1361 | " detection." |
|
1338 | 1362 | msgstr "" |
|
1339 | 1363 | |
|
1340 | msgid "" | |
|
1341 | " --config convert.svn.branches=branches (directory name)\n" | |
|
1342 | " specify the directory containing branches\n" | |
|
1343 | " --config convert.svn.tags=tags (directory name)\n" | |
|
1344 |
" |
|
|
1345 | " --config convert.svn.trunk=trunk (directory name)\n" | |
|
1346 | " specify the name of the trunk branch" | |
|
1364 | msgid " The following options can be set with ``--config``:" | |
|
1365 | msgstr "" | |
|
1366 | ||
|
1367 | msgid "" | |
|
1368 | " :convert.svn.branches: specify the directory containing branches.\n" | |
|
1369 | " The defaults is ``branches``." | |
|
1370 | msgstr "" | |
|
1371 | ||
|
1372 | msgid "" | |
|
1373 | " :convert.svn.tags: specify the directory containing tags. The\n" | |
|
1374 | " default is ``tags``." | |
|
1375 | msgstr "" | |
|
1376 | ||
|
1377 | msgid "" | |
|
1378 | " :convert.svn.trunk: specify the name of the trunk branch The\n" | |
|
1379 | " defauls is ``trunk``." | |
|
1347 | 1380 | msgstr "" |
|
1348 | 1381 | |
|
1349 | 1382 | msgid "" |
@@ -1353,8 +1386,8 b' msgid ""' | |||
|
1353 | 1386 | msgstr "" |
|
1354 | 1387 | |
|
1355 | 1388 | msgid "" |
|
1356 |
" |
|
|
1357 | " specify start Subversion revision." | |
|
1389 | " :convert.svn.startrev: specify start Subversion revision number.\n" | |
|
1390 | " The default is 0." | |
|
1358 | 1391 | msgstr "" |
|
1359 | 1392 | |
|
1360 | 1393 | msgid "" |
@@ -1368,17 +1401,17 b' msgid ""' | |||
|
1368 | 1401 | " source to a flat Mercurial repository, ignoring labels, branches\n" |
|
1369 | 1402 | " and integrations. Note that when a depot path is given you then\n" |
|
1370 | 1403 | " usually should specify a target directory, because otherwise the\n" |
|
1371 | " target may be named ...-hg." | |
|
1404 | " target may be named ``...-hg``." | |
|
1372 | 1405 | msgstr "" |
|
1373 | 1406 | |
|
1374 | 1407 | msgid "" |
|
1375 | 1408 | " It is possible to limit the amount of source history to be\n" |
|
1376 |
" converted by specifying an initial Perforce revision |
|
|
1377 | msgstr "" | |
|
1378 | ||
|
1379 | msgid "" | |
|
1380 | " --config convert.p4.startrev=0 (perforce changelist number)\n" | |
|
1381 | " specify initial Perforce revision." | |
|
1409 | " converted by specifying an initial Perforce revision:" | |
|
1410 | msgstr "" | |
|
1411 | ||
|
1412 | msgid "" | |
|
1413 | " :convert.p4.startrev: specify initial Perforce revision, a\n" | |
|
1414 | " Perforce changelist number)." | |
|
1382 | 1415 | msgstr "" |
|
1383 | 1416 | |
|
1384 | 1417 | msgid "" |
@@ -1386,17 +1419,24 b' msgid ""' | |||
|
1386 | 1419 | " '''''''''''''''''''''" |
|
1387 | 1420 | msgstr "" |
|
1388 | 1421 | |
|
1389 | msgid "" | |
|
1390 | " --config convert.hg.clonebranches=False (boolean)\n" | |
|
1391 | " dispatch source branches in separate clones.\n" | |
|
1392 | " --config convert.hg.tagsbranch=default (branch name)\n" | |
|
1393 | " tag revisions branch name\n" | |
|
1394 | " --config convert.hg.usebranchnames=True (boolean)\n" | |
|
1395 | " preserve branch names" | |
|
1396 | msgstr "" | |
|
1397 | ||
|
1398 | msgid " " | |
|
1399 | msgstr " " | |
|
1422 | msgid " The following options are supported:" | |
|
1423 | msgstr "" | |
|
1424 | ||
|
1425 | msgid "" | |
|
1426 | " :convert.hg.clonebranches: dispatch source branches in separate\n" | |
|
1427 | " clones. The default is False." | |
|
1428 | msgstr "" | |
|
1429 | ||
|
1430 | msgid "" | |
|
1431 | " :convert.hg.tagsbranch: branch name for tag revisions, defaults to\n" | |
|
1432 | " ``default``." | |
|
1433 | msgstr "" | |
|
1434 | ||
|
1435 | msgid "" | |
|
1436 | " :convert.hg.usebranchnames: preserve branch names. The default is\n" | |
|
1437 | " True\n" | |
|
1438 | " " | |
|
1439 | msgstr "" | |
|
1400 | 1440 | |
|
1401 | 1441 | msgid "create changeset information from CVS" |
|
1402 | 1442 | msgstr "" |
@@ -1962,32 +2002,35 b' msgstr ""' | |||
|
1962 | 2002 | "``[repository]``." |
|
1963 | 2003 | |
|
1964 | 2004 | msgid "" |
|
1965 |
"The ``[patterns]`` section specifies |
|
|
1966 | "working directory. The format is specified by a file pattern. The\n" | |
|
1967 | "first match is used, so put more specific patterns first. The\n" | |
|
1968 | "available line endings are ``LF``, ``CRLF``, and ``BIN``." | |
|
1969 | msgstr "" | |
|
1970 | "Sektionen ``[patterns]`` angiver hvilken type linieskift der skal\n" | |
|
1971 | "bruges i arbejdskataloget. Typen er angivet ved et filmønster. Den\n" | |
|
1972 | "første træffer bliver brugt, så skriv mere specifikke mønstre først.\n" | |
|
1973 | "De mulige linieskifttyper er ``LF``, ``CRLF`` og ``BIN``." | |
|
2005 | "The ``[patterns]`` section specifies how line endings should be\n" | |
|
2006 | "converted between the working copy and the repository. The format is\n" | |
|
2007 | "specified by a file pattern. The first match is used, so put more\n" | |
|
2008 | "specific patterns first. The available line endings are ``LF``,\n" | |
|
2009 | "``CRLF``, and ``BIN``." | |
|
2010 | msgstr "" | |
|
2011 | "Sektionen ``[patterns]`` angiver hvordan linieskift skal konverteres\n" | |
|
2012 | "mellem arbejdskataloget og depotet. Formatet angives med et\n" | |
|
2013 | "filmønster. Den første træffer bliver brugt, så skriv mere specifikke\n" | |
|
2014 | "mønstre først. De mulige linieskifttyper er ``LF``, ``CRLF`` og\n" | |
|
2015 | "``BIN``." | |
|
1974 | 2016 | |
|
1975 | 2017 | msgid "" |
|
1976 | 2018 | "Files with the declared format of ``CRLF`` or ``LF`` are always\n" |
|
1977 | "checked out in that format and files declared to be binary (``BIN``)\n" | |
|
1978 | "are left unchanged. Additionally, ``native`` is an alias for the\n" | |
|
1979 | "platform's default line ending: ``LF`` on Unix (including Mac OS X)\n" | |
|
1980 | "and ``CRLF`` on Windows. Note that ``BIN`` (do nothing to line\n" | |
|
1981 | "endings) is Mercurial's default behaviour; it is only needed if you\n" | |
|
1982 | "need to override a later, more general pattern." | |
|
1983 | msgstr "" | |
|
1984 | "Filer deklareret som ``CRLF`` eller ``LF`` bliver altid hentet ud i\n" | |
|
1985 | "dette format og filer deklareret som binære (``BIN``) bliver ikke\n" | |
|
1986 | "ændret. Desuden er ``native`` et alias for platforms normale\n" | |
|
1987 | "linieskift: ``LF`` på Unix (samt Mac OS X) og ``CRLF`` på Windows.\n" | |
|
1988 | "Bemærk at ``BIN`` (gør ingenting ved linieskift) er Mercurials\n" | |
|
1989 | "standardopførsel; det er kun nødvendigt at bruge den hvis du skal\n" | |
|
1990 | "overskrive et senere og mere generelt mønster." | |
|
2019 | "checked out and stored in the repository in that format and files\n" | |
|
2020 | "declared to be binary (``BIN``) are left unchanged. Additionally,\n" | |
|
2021 | "``native`` is an alias for checking out in the platform's default line\n" | |
|
2022 | "ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on\n" | |
|
2023 | "Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's\n" | |
|
2024 | "default behaviour; it is only needed if you need to override a later,\n" | |
|
2025 | "more general pattern." | |
|
2026 | msgstr "" | |
|
2027 | "Filer deklareret som ``CRLF`` eller ``LF`` bliver altid hentet ud og\n" | |
|
2028 | "gemt i depotet i dette format og filer deklareret som binære (``BIN``)\n" | |
|
2029 | "bliver ikke ændret. Desuden er ``native`` et alias for platformens\n" | |
|
2030 | "normale linieskift: ``LF`` på Unix (samt Mac OS X) og ``CRLF`` på\n" | |
|
2031 | "Windows. Bemærk at ``BIN`` (gør ingenting ved linieskift) er\n" | |
|
2032 | "Mercurials standardopførsel; det er kun nødvendigt at bruge den hvis\n" | |
|
2033 | "du skal overskrive et senere og mere generelt mønster." | |
|
1991 | 2034 | |
|
1992 | 2035 | msgid "" |
|
1993 | 2036 | "The optional ``[repository]`` section specifies the line endings to\n" |
@@ -2036,6 +2079,12 b' msgstr ""' | |||
|
2036 | 2079 | " native = LF" |
|
2037 | 2080 | |
|
2038 | 2081 | msgid "" |
|
2082 | ".. note::\n" | |
|
2083 | " The rules will first apply when files are touched in the working\n" | |
|
2084 | " copy, e.g. by updating to null and back to tip to touch all files." | |
|
2085 | msgstr "" | |
|
2086 | ||
|
2087 | msgid "" | |
|
2039 | 2088 | "The extension uses an optional ``[eol]`` section in your hgrc file\n" |
|
2040 | 2089 | "(not the ``.hgeol`` file) for settings that control the overall\n" |
|
2041 | 2090 | "behavior. There are two settings:" |
@@ -2070,6 +2119,13 b' msgstr ""' | |||
|
2070 | 2119 | " antagelse af at de har miksede linieskift med vilje." |
|
2071 | 2120 | |
|
2072 | 2121 | msgid "" |
|
2122 | "The extension provides ``cleverencode:`` and ``cleverdecode:`` filters\n" | |
|
2123 | "like the deprecated win32text extension does. This means that you can\n" | |
|
2124 | "disable win32text and enable eol and your filters will still work. You\n" | |
|
2125 | "only need to these filters until you have prepared a ``.hgeol`` file." | |
|
2126 | msgstr "" | |
|
2127 | ||
|
2128 | msgid "" | |
|
2073 | 2129 | "The ``win32text.forbid*`` hooks provided by the win32text extension\n" |
|
2074 | 2130 | "have been unified into a single hook named ``eol.hook``. The hook will\n" |
|
2075 | 2131 | "lookup the expected line endings from the ``.hgeol`` file, which means\n" |
@@ -3392,6 +3448,10 b' msgstr "ingen rettelser anvendt"' | |||
|
3392 | 3448 | msgid "working directory revision is not qtip" |
|
3393 | 3449 | msgstr "arbejdskatalogets revision er ikke qtip" |
|
3394 | 3450 | |
|
3451 | #, python-format | |
|
3452 | msgid "uncommitted changes in subrepository %s" | |
|
3453 | msgstr "udeponerede ændringer i underdepot %s" | |
|
3454 | ||
|
3395 | 3455 | msgid "local changes found, refresh first" |
|
3396 | 3456 | msgstr "lokale ændringer fundet, genopfrisk først" |
|
3397 | 3457 | |
@@ -3460,6 +3520,16 b' msgstr "alle rettelser er i \xc3\xb8jeblikket anvendt\\n"' | |||
|
3460 | 3520 | msgid "patch series already fully applied\n" |
|
3461 | 3521 | msgstr "serien af rettelser er allerede anvendt fuldt ud\n" |
|
3462 | 3522 | |
|
3523 | msgid "cannot use --exact and --move together" | |
|
3524 | msgstr "kan ikke bruge --exact og --move sammen" | |
|
3525 | ||
|
3526 | msgid "cannot push --exact with applied patches" | |
|
3527 | msgstr "kan push --exact med anvendte rettelser" | |
|
3528 | ||
|
3529 | #, python-format | |
|
3530 | msgid "%s does not have a parent recorded" | |
|
3531 | msgstr "%s har ikke gemt nogen forælder" | |
|
3532 | ||
|
3463 | 3533 | msgid "please specify the patch to move" |
|
3464 | 3534 | msgstr "angiv venligst lappen der skal flyttes" |
|
3465 | 3535 | |
@@ -3508,18 +3578,6 b' msgstr "k\xc3\xb8en af rettelser er nu tom\\n"' | |||
|
3508 | 3578 | msgid "cannot refresh a revision with children" |
|
3509 | 3579 | msgstr "kan ikke genopfriske en revision som har børn" |
|
3510 | 3580 | |
|
3511 | #, python-format | |
|
3512 | msgid "warning: not refreshing %s\n" | |
|
3513 | msgstr "advarsel: genopfrisker ikke %s\n" | |
|
3514 | ||
|
3515 | #, python-format | |
|
3516 | msgid "warning: not adding %s\n" | |
|
3517 | msgstr "advarsel: tilføjer ikke %s\n" | |
|
3518 | ||
|
3519 | #, python-format | |
|
3520 | msgid "warning: not removing %s\n" | |
|
3521 | msgstr "advarsel: fjerner ikke %s\n" | |
|
3522 | ||
|
3523 | 3581 | msgid "" |
|
3524 | 3582 | "refresh interrupted while patch was popped! (revert --all, qpush to " |
|
3525 | 3583 | "recover)\n" |
@@ -4486,6 +4544,9 b' msgstr "hg qpop [-a] [-f] [RETTELSE | IN' | |||
|
4486 | 4544 | msgid "apply on top of local changes" |
|
4487 | 4545 | msgstr "anvend ovenpå lokale ændringer" |
|
4488 | 4546 | |
|
4547 | msgid "apply the target patch to its recorded parent" | |
|
4548 | msgstr "" | |
|
4549 | ||
|
4489 | 4550 | msgid "list patch name in commit text" |
|
4490 | 4551 | msgstr "" |
|
4491 | 4552 | |
@@ -4987,6 +5048,11 b' msgid ""' | |||
|
4987 | 5048 | msgstr "" |
|
4988 | 5049 | |
|
4989 | 5050 | msgid "" |
|
5051 | " In case email sending fails, you will find a backup of your series\n" | |
|
5052 | " introductory message in ``.hg/last-email.txt``." | |
|
5053 | msgstr "" | |
|
5054 | ||
|
5055 | msgid "" | |
|
4990 | 5056 | " hg email -r 3000 # send patch 3000 only\n" |
|
4991 | 5057 | " hg email -r 3000 -r 3001 # send patches 3000 and 3001\n" |
|
4992 | 5058 | " hg email -r 3000:3005 # send patches 3000 through 3005\n" |
@@ -5177,7 +5243,7 b' msgid ""' | |||
|
5177 | 5243 | " [progress]\n" |
|
5178 | 5244 | " delay = 3 # number of seconds (float) before showing the progress bar\n" |
|
5179 | 5245 | " refresh = 0.1 # time in seconds between refreshes of the progress bar\n" |
|
5180 | " format = topic bar number # format of the progress bar\n" | |
|
5246 | " format = topic bar number estimate # format of the progress bar\n" | |
|
5181 | 5247 | " width = <none> # if set, the maximum width of the progress information\n" |
|
5182 | 5248 | " # (that is, min(width, term width) will be used)\n" |
|
5183 | 5249 | " clear-complete = True # clear the progress bar after it's done\n" |
@@ -5187,10 +5253,26 b' msgid ""' | |||
|
5187 | 5253 | msgstr "" |
|
5188 | 5254 | |
|
5189 | 5255 | msgid "" |
|
5190 |
"Valid entries for the format field are topic, bar, number, unit, |
|
|
5191 |
"item. item defaults to the last 20 characters of the |
|
|
5192 |
"can be changed by adding either ``-<num>`` which would |
|
|
5193 |
"num characters, or ``+<num>`` for the first num |
|
|
5256 | "Valid entries for the format field are topic, bar, number, unit,\n" | |
|
5257 | "estimate, and item. item defaults to the last 20 characters of the\n" | |
|
5258 | "item, but this can be changed by adding either ``-<num>`` which would\n" | |
|
5259 | "take the last num characters, or ``+<num>`` for the first num\n" | |
|
5260 | "characters.\n" | |
|
5261 | msgstr "" | |
|
5262 | ||
|
5263 | #. i18n: format XX seconds as "XXs" | |
|
5264 | #, python-format | |
|
5265 | msgid "%02ds" | |
|
5266 | msgstr "" | |
|
5267 | ||
|
5268 | #. i18n: format X minutes and YY seconds as "XmYYs" | |
|
5269 | #, python-format | |
|
5270 | msgid "%dm%02ds" | |
|
5271 | msgstr "" | |
|
5272 | ||
|
5273 | #. i18n: format X hours and YY minutes as "XhYYm" | |
|
5274 | #, python-format | |
|
5275 | msgid "%dh%02dm" | |
|
5194 | 5276 | msgstr "" |
|
5195 | 5277 | |
|
5196 | 5278 | msgid "command to delete untracked files from the working directory" |
@@ -5382,7 +5464,8 b' msgid "changesets"' | |||
|
5382 | 5464 | msgstr "ændringer" |
|
5383 | 5465 | |
|
5384 | 5466 | msgid "unresolved conflicts (see hg resolve, then hg rebase --continue)" |
|
5385 | msgstr "uløste konflikter (se først hg resolve og dernæst hg rebase --continue)" | |
|
5467 | msgstr "" | |
|
5468 | "uløste konflikter (se først hg resolve og dernæst hg rebase --continue)" | |
|
5386 | 5469 | |
|
5387 | 5470 | #, python-format |
|
5388 | 5471 | msgid "no changes, revision %d skipped\n" |
@@ -5578,8 +5661,8 b' msgstr "\'mq\' udvidelsen er ikke indl\xc3\xa6st"' | |||
|
5578 | 5661 | msgid "running non-interactively, use commit instead" |
|
5579 | 5662 | msgstr "kører ikke interaktivt, brug commit i stedet" |
|
5580 | 5663 | |
|
5581 | msgid "cannot partially commit a merge (use hg commit instead)" | |
|
5582 | msgstr "kan ikke deponere en sammenføjning partielt (brug i stedet hg commit)" | |
|
5664 | msgid "cannot partially commit a merge (use \"hg commit\" instead)" | |
|
5665 | msgstr "kan ikke deponere en sammenføjning partielt (brug i stedet \"hg commit\")" | |
|
5583 | 5666 | |
|
5584 | 5667 | msgid "no changes to record\n" |
|
5585 | 5668 | msgstr "ingen ændringer at optage\n" |
@@ -6176,6 +6259,9 b' msgstr "kan ikke give pr\xc3\xa6fix ved arkivering til filer"' | |||
|
6176 | 6259 | msgid "unknown archive type '%s'" |
|
6177 | 6260 | msgstr "ukendt depottype '%s'" |
|
6178 | 6261 | |
|
6262 | msgid "archiving" | |
|
6263 | msgstr "arkiverer" | |
|
6264 | ||
|
6179 | 6265 | msgid "invalid changegroup" |
|
6180 | 6266 | msgstr "ugyldig changegroup" |
|
6181 | 6267 | |
@@ -8502,6 +8588,9 b' msgstr ""' | |||
|
8502 | 8588 | msgid "you can't specify a revision and a date" |
|
8503 | 8589 | msgstr "du kan ikke specificeret en revision og en dato" |
|
8504 | 8590 | |
|
8591 | msgid "uncommitted merge - use \"hg update\", see \"hg help revert\"" | |
|
8592 | msgstr "" | |
|
8593 | ||
|
8505 | 8594 | msgid "no files or directories specified; use --all to revert the whole repo" |
|
8506 | 8595 | msgstr "" |
|
8507 | 8596 | "ingen filer eller mapper specificeret; brug --all for at føre hele repo'et " |
@@ -8813,7 +8902,8 b' msgstr ""' | |||
|
8813 | 8902 | msgid "" |
|
8814 | 8903 | " Tags are used to name particular revisions of the repository and are\n" |
|
8815 | 8904 | " very useful to compare different revisions, to go back to significant\n" |
|
8816 | " earlier versions or to mark branch points as releases, etc." | |
|
8905 | " earlier versions or to mark branch points as releases, etc. Changing\n" | |
|
8906 | " an existing tag is normally disallowed; use -f/--force to override." | |
|
8817 | 8907 | msgstr "" |
|
8818 | 8908 | |
|
8819 | 8909 | msgid "" |
@@ -8823,10 +8913,18 b' msgstr ""' | |||
|
8823 | 8913 | |
|
8824 | 8914 | msgid "" |
|
8825 | 8915 | " To facilitate version control, distribution, and merging of tags,\n" |
|
8826 | " they are stored as a file named \".hgtags\" which is managed\n" | |
|
8827 |
" |
|
|
8828 | " necessary. The file '.hg/localtags' is used for local tags (not\n" | |
|
8829 | " shared among repositories)." | |
|
8916 | " they are stored as a file named \".hgtags\" which is managed similarly\n" | |
|
8917 | " to other project files and can be hand-edited if necessary. This\n" | |
|
8918 | " also means that tagging creates a new commit. The file\n" | |
|
8919 | " \".hg/localtags\" is used for local tags (not shared among\n" | |
|
8920 | " repositories)." | |
|
8921 | msgstr "" | |
|
8922 | ||
|
8923 | msgid "" | |
|
8924 | " Tag commits are usually made at the head of a branch. If the parent\n" | |
|
8925 | " of the working directory is not a branch head, :hg:`tag` aborts; use\n" | |
|
8926 | " -f/--force to force the tag commit to be based on a non-head\n" | |
|
8927 | " changeset." | |
|
8830 | 8928 | msgstr "" |
|
8831 | 8929 | |
|
8832 | 8930 | msgid "" |
@@ -8859,6 +8957,12 b' msgstr "m\xc3\xa6rkaten \'%s\' er ikke en lokal m\xc3\xa6rkat"' | |||
|
8859 | 8957 | msgid "tag '%s' already exists (use -f to force)" |
|
8860 | 8958 | msgstr "mærkaten '%s' eksisterer allerede (brug -f for at gennemtvinge)" |
|
8861 | 8959 | |
|
8960 | msgid "uncommitted merge" | |
|
8961 | msgstr "udeponeret sammenføjning" | |
|
8962 | ||
|
8963 | msgid "not at a branch head (use -f to force)" | |
|
8964 | msgstr "ej ved et grenhoved (brug -f for at gennemtvinge)" | |
|
8965 | ||
|
8862 | 8966 | msgid "list repository tags" |
|
8863 | 8967 | msgstr "vis depotmærkater" |
|
8864 | 8968 | |
@@ -9752,8 +9856,8 b' msgstr "vis forskelle fra revision"' | |||
|
9752 | 9856 | msgid "list the changed files of a revision" |
|
9753 | 9857 | msgstr "vis de ændrede filer i en revision" |
|
9754 | 9858 | |
|
9755 |
msgid "r |
|
|
9756 | msgstr "erstat eksisterende mærkat" | |
|
9859 | msgid "force tag" | |
|
9860 | msgstr "gennemtving markering" | |
|
9757 | 9861 | |
|
9758 | 9862 | msgid "make the tag local" |
|
9759 | 9863 | msgstr "gør mærkaten lokal" |
@@ -10133,6 +10237,10 b' msgid "*** failed to import extension %s' | |||
|
10133 | 10237 | msgstr "*** import af udvidelse %s fejlede: %s\n" |
|
10134 | 10238 | |
|
10135 | 10239 | #, python-format |
|
10240 | msgid "warning: error finding commands in %s\n" | |
|
10241 | msgstr "advarsel: fejl ved søgning efter kommandoer i %s\n" | |
|
10242 | ||
|
10243 | #, python-format | |
|
10136 | 10244 | msgid "couldn't find merge tool %s\n" |
|
10137 | 10245 | msgstr "kunne ikke finde sammenføjningsværktøj %s\n" |
|
10138 | 10246 | |
@@ -11514,6 +11622,13 b' msgid ""' | |||
|
11514 | 11622 | "Regexp pattern matching is anchored at the root of the repository." |
|
11515 | 11623 | msgstr "" |
|
11516 | 11624 | |
|
11625 | msgid "" | |
|
11626 | "To read name patterns from a file, use ``listfile:`` or ``listfile0:``.\n" | |
|
11627 | "The latter expects null delimited patterns while the former expects line\n" | |
|
11628 | "feeds. Each string read from the file is itself treated as a file\n" | |
|
11629 | "pattern." | |
|
11630 | msgstr "" | |
|
11631 | ||
|
11517 | 11632 | msgid "Plain examples::" |
|
11518 | 11633 | msgstr "" |
|
11519 | 11634 | |
@@ -11539,8 +11654,15 b' msgstr ""' | |||
|
11539 | 11654 | msgid "Regexp examples::" |
|
11540 | 11655 | msgstr "" |
|
11541 | 11656 | |
|
11542 | msgid "" | |
|
11543 | " re:.*\\.c$ any name ending in \".c\", anywhere in the repository\n" | |
|
11657 | msgid " re:.*\\.c$ any name ending in \".c\", anywhere in the repository" | |
|
11658 | msgstr "" | |
|
11659 | ||
|
11660 | msgid "File examples::" | |
|
11661 | msgstr "Fillisteeksempler::" | |
|
11662 | ||
|
11663 | msgid "" | |
|
11664 | " listfile:list.txt read list from list.txt with one file pattern per line\n" | |
|
11665 | " listfile0:list.txt read list from list.txt with null byte delimiters\n" | |
|
11544 | 11666 | msgstr "" |
|
11545 | 11667 | |
|
11546 | 11668 | msgid "Mercurial supports several ways to specify individual revisions." |
@@ -11959,8 +12081,9 b' msgid ":author: String. The unmodified a' | |||
|
11959 | 12081 | msgstr "" |
|
11960 | 12082 | |
|
11961 | 12083 | msgid "" |
|
11962 |
":branches: |
|
|
11963 |
" committed. Will be empty if the branch name was |
|
|
12084 | ":branches: List of strings. The name of the branch on which the\n" | |
|
12085 | " changeset was committed. Will be empty if the branch name was\n" | |
|
12086 | " default." | |
|
11964 | 12087 | msgstr "" |
|
11965 | 12088 | |
|
11966 | 12089 | msgid ":children: List of strings. The children of the changeset." |
@@ -12344,6 +12467,10 b' msgid "(branch merge, don\'t forget to co' | |||
|
12344 | 12467 | msgstr "(grensammenføjning, glem ikke at deponere)\n" |
|
12345 | 12468 | |
|
12346 | 12469 | #, python-format |
|
12470 | msgid "config file %s not found!" | |
|
12471 | msgstr "konfigurationsfilen %s blev ikke fundet!" | |
|
12472 | ||
|
12473 | #, python-format | |
|
12347 | 12474 | msgid "error reading %s/.hg/hgrc: %s\n" |
|
12348 | 12475 | msgstr "fejl ved læsning af %s/.hg/hgrc: %s\n" |
|
12349 | 12476 | |
@@ -12490,6 +12617,10 b' msgid ".hg/sharedpath points to nonexist' | |||
|
12490 | 12617 | msgstr ".hg/sharedpath peger på et ikke-eksisterende katalog %s" |
|
12491 | 12618 | |
|
12492 | 12619 | #, python-format |
|
12620 | msgid "warning: ignoring unknown working parent %s!\n" | |
|
12621 | msgstr "advarsel: ignorerer ukendt forælder %s til arbejdsbiblioteket!\n" | |
|
12622 | ||
|
12623 | #, python-format | |
|
12493 | 12624 | msgid "%r cannot be used in a tag name" |
|
12494 | 12625 | msgstr "%r kan ikke bruges i et mærkatnavnet" |
|
12495 | 12626 | |
@@ -12596,34 +12727,28 b' msgstr ""' | |||
|
12596 | 12727 | msgid "%d changesets found\n" |
|
12597 | 12728 | msgstr "fandt %d ændringer\n" |
|
12598 | 12729 | |
|
12599 |
msgid "bundling |
|
|
12600 |
msgstr "bundter |
|
|
12601 | ||
|
12602 |
msgid " |
|
|
12603 | msgstr "" | |
|
12604 | ||
|
12605 | msgid "bundling manifests" | |
|
12606 | msgstr "bundter manifester" | |
|
12730 | msgid "bundling" | |
|
12731 | msgstr "bundter" | |
|
12732 | ||
|
12733 | msgid "manifests" | |
|
12734 | msgstr "manifester" | |
|
12607 | 12735 | |
|
12608 | 12736 | #, python-format |
|
12609 | 12737 | msgid "empty or missing revlog for %s" |
|
12610 | 12738 | msgstr "tom eller manglende revlog for %s" |
|
12611 | 12739 | |
|
12612 | msgid "bundling files" | |
|
12613 | msgstr "bundter filer" | |
|
12614 | ||
|
12615 | 12740 | msgid "adding changesets\n" |
|
12616 | 12741 | msgstr "tilføjer ændringer\n" |
|
12617 | 12742 | |
|
12743 | msgid "chunks" | |
|
12744 | msgstr "" | |
|
12745 | ||
|
12618 | 12746 | msgid "received changelog group is empty" |
|
12619 | 12747 | msgstr "modtagen changelog-gruppe er tom" |
|
12620 | 12748 | |
|
12621 | 12749 | msgid "adding manifests\n" |
|
12622 | 12750 | msgstr "tilføjer manifester\n" |
|
12623 | 12751 | |
|
12624 | msgid "manifests" | |
|
12625 | msgstr "manifester" | |
|
12626 | ||
|
12627 | 12752 | msgid "adding file changes\n" |
|
12628 | 12753 | msgstr "tilføjer filændringer\n" |
|
12629 | 12754 | |
@@ -12665,6 +12790,12 b' msgstr "%d filer at overf\xc3\xb8re, %s data\\n"' | |||
|
12665 | 12790 | msgid "transferred %s in %.1f seconds (%s/sec)\n" |
|
12666 | 12791 | msgstr "overførte %s i %.1f sekunder (%s/sek)\n" |
|
12667 | 12792 | |
|
12793 | msgid "can't use TLS: Python SSL support not installed" | |
|
12794 | msgstr "kan ikke bruge TLS: Python SSL support er ikke installeret" | |
|
12795 | ||
|
12796 | msgid "(using smtps)\n" | |
|
12797 | msgstr "(bruger smtps)\n" | |
|
12798 | ||
|
12668 | 12799 | msgid "smtp.host not configured - cannot send mail" |
|
12669 | 12800 | msgstr "" |
|
12670 | 12801 | |
@@ -12672,11 +12803,8 b' msgstr ""' | |||
|
12672 | 12803 | msgid "sending mail: smtp host %s, port %s\n" |
|
12673 | 12804 | msgstr "sender mail: smtp host %s, port %s\n" |
|
12674 | 12805 | |
|
12675 | msgid "can't use TLS: Python SSL support not installed" | |
|
12676 | msgstr "kan ikke bruge TLS: Python SSL support er ikke installeret" | |
|
12677 | ||
|
12678 | msgid "(using tls)\n" | |
|
12679 | msgstr "(bruger tsl)\n" | |
|
12806 | msgid "(using starttls)\n" | |
|
12807 | msgstr "(bruger starttls)\n" | |
|
12680 | 12808 | |
|
12681 | 12809 | #, python-format |
|
12682 | 12810 | msgid "(authenticating to mail server as %s)\n" |
@@ -12717,6 +12845,10 b' msgid "invalid pattern"' | |||
|
12717 | 12845 | msgstr "ugyldig mønster" |
|
12718 | 12846 | |
|
12719 | 12847 | #, python-format |
|
12848 | msgid "unable to read file list (%s)" | |
|
12849 | msgstr "kan ikke læse filliste (%s)" | |
|
12850 | ||
|
12851 | #, python-format | |
|
12720 | 12852 | msgid "diff context lines count must be an integer, not %r" |
|
12721 | 12853 | msgstr "" |
|
12722 | 12854 | |
@@ -13008,10 +13140,10 b' msgstr "manglende parameter"' | |||
|
13008 | 13140 | |
|
13009 | 13141 | #, python-format |
|
13010 | 13142 | msgid "can't use %s here" |
|
13011 | msgstr "" | |
|
13143 | msgstr "kan ikke bruge %s her" | |
|
13012 | 13144 | |
|
13013 | 13145 | msgid "can't use a list in this context" |
|
13014 | msgstr "" | |
|
13146 | msgstr "en liste kan ikke bruges i denne konteks" | |
|
13015 | 13147 | |
|
13016 | 13148 | #, python-format |
|
13017 | 13149 | msgid "not a function: %s" |
@@ -13028,7 +13160,7 b' msgstr "id kr\xc3\xa6ver et argument"' | |||
|
13028 | 13160 | |
|
13029 | 13161 | #. i18n: "id" is a keyword |
|
13030 | 13162 | msgid "id requires a string" |
|
13031 | msgstr "" | |
|
13163 | msgstr "id kræver en streng" | |
|
13032 | 13164 | |
|
13033 | 13165 | msgid "" |
|
13034 | 13166 | "``rev(number)``\n" |
@@ -13037,29 +13169,30 b' msgstr ""' | |||
|
13037 | 13169 | |
|
13038 | 13170 | #. i18n: "rev" is a keyword |
|
13039 | 13171 | msgid "rev requires one argument" |
|
13040 | msgstr "" | |
|
13172 | msgstr "rev kræver et argument" | |
|
13041 | 13173 | |
|
13042 | 13174 | #. i18n: "rev" is a keyword |
|
13043 | 13175 | msgid "rev requires a number" |
|
13044 | msgstr "" | |
|
13176 | msgstr "rev kræver et tal" | |
|
13045 | 13177 | |
|
13046 | 13178 | #. i18n: "rev" is a keyword |
|
13047 | 13179 | msgid "rev expects a number" |
|
13048 | 13180 | msgstr "rev forventer et revisionsnummer" |
|
13049 | 13181 | |
|
13050 | 13182 | msgid "" |
|
13051 | "``p1(set)``\n" | |
|
13052 | " First parent of changesets in set." | |
|
13053 | msgstr "" | |
|
13054 | ||
|
13055 | msgid "" | |
|
13056 | "``p2(set)``\n" | |
|
13057 | " Second parent of changesets in set." | |
|
13058 | msgstr "" | |
|
13059 | ||
|
13060 | msgid "" | |
|
13061 | "``parents(set)``\n" | |
|
13062 |
" The set of all parents for all changesets in set |
|
|
13183 | "``p1([set])``\n" | |
|
13184 | " First parent of changesets in set, or the working directory." | |
|
13185 | msgstr "" | |
|
13186 | ||
|
13187 | msgid "" | |
|
13188 | "``p2([set])``\n" | |
|
13189 | " Second parent of changesets in set, or the working directory." | |
|
13190 | msgstr "" | |
|
13191 | ||
|
13192 | msgid "" | |
|
13193 | "``parents([set])``\n" | |
|
13194 | " The set of all parents for all changesets in set, or the working " | |
|
13195 | "directory." | |
|
13063 | 13196 | msgstr "" |
|
13064 | 13197 | |
|
13065 | 13198 | msgid "" |
@@ -13322,20 +13455,23 b' msgid ""' | |||
|
13322 | 13455 | "``tag(name)``\n" |
|
13323 | 13456 | " The specified tag by name, or all tagged revisions if no name is given." |
|
13324 | 13457 | msgstr "" |
|
13458 | "``tag(navn)``\n" | |
|
13459 | " Den navngivne mærkat eller alle revisioner med en mærkat hvis der\n" | |
|
13460 | " ikke angives noget navn." | |
|
13325 | 13461 | |
|
13326 | 13462 | #. i18n: "tag" is a keyword |
|
13327 | 13463 | msgid "tag takes one or no arguments" |
|
13328 | msgstr "" | |
|
13464 | msgstr "tag tager et eller to argumenter" | |
|
13329 | 13465 | |
|
13330 | 13466 | #. i18n: "tag" is a keyword |
|
13331 | 13467 | msgid "the argument to tag must be a string" |
|
13332 | msgstr "" | |
|
13468 | msgstr "argumentet til tag skal være en streng" | |
|
13333 | 13469 | |
|
13334 | 13470 | msgid "can't negate that" |
|
13335 | 13471 | msgstr "" |
|
13336 | 13472 | |
|
13337 | 13473 | msgid "not a symbol" |
|
13338 | msgstr "" | |
|
13474 | msgstr "ikke et symbol" | |
|
13339 | 13475 | |
|
13340 | 13476 | msgid "empty query" |
|
13341 | 13477 | msgstr "tomt forespørgsel" |
@@ -13435,6 +13571,10 b' msgid "unknown subrepo type %s"' | |||
|
13435 | 13571 | msgstr "ukendt underdepottype %s" |
|
13436 | 13572 | |
|
13437 | 13573 | #, python-format |
|
13574 | msgid "archiving (%s)" | |
|
13575 | msgstr "arkiverer (%s)" | |
|
13576 | ||
|
13577 | #, python-format | |
|
13438 | 13578 | msgid "warning: error \"%s\" in subrepository \"%s\"\n" |
|
13439 | 13579 | msgstr "advarsel: fejl \"%s\" i underdepot \"%s\"\n" |
|
13440 | 13580 | |
@@ -13458,6 +13598,39 b' msgid "not removing repo %s because it h' | |||
|
13458 | 13598 | msgstr "fjerner ikke depotet %s fordi det er ændret.\n" |
|
13459 | 13599 | |
|
13460 | 13600 | #, python-format |
|
13601 | msgid "cloning subrepo %s\n" | |
|
13602 | msgstr "kloner underdepot %s\n" | |
|
13603 | ||
|
13604 | #, python-format | |
|
13605 | msgid "pulling subrepo %s\n" | |
|
13606 | msgstr "hiver underdepot %s\n" | |
|
13607 | ||
|
13608 | #, python-format | |
|
13609 | msgid "revision %s does not exist in subrepo %s\n" | |
|
13610 | msgstr "revision %s findes ikke i underdepot %s\n" | |
|
13611 | ||
|
13612 | #, python-format | |
|
13613 | msgid "checking out detached HEAD in subrepo %s\n" | |
|
13614 | msgstr "" | |
|
13615 | ||
|
13616 | msgid "check out a git branch if you intend to make changes\n" | |
|
13617 | msgstr "" | |
|
13618 | ||
|
13619 | #, python-format | |
|
13620 | msgid "unrelated git branch checked out in subrepo %s\n" | |
|
13621 | msgstr "" | |
|
13622 | ||
|
13623 | #, python-format | |
|
13624 | msgid "pushing branch %s of subrepo %s\n" | |
|
13625 | msgstr "skubber gren %s af underdepot %s\n" | |
|
13626 | ||
|
13627 | #, python-format | |
|
13628 | msgid "" | |
|
13629 | "no branch checked out in subrepo %s\n" | |
|
13630 | "cannot push revision %s" | |
|
13631 | msgstr "" | |
|
13632 | ||
|
13633 | #, python-format | |
|
13461 | 13634 | msgid "%s, line %s: %s\n" |
|
13462 | 13635 | msgstr "%s, linie %s: %s\n" |
|
13463 | 13636 | |
@@ -13471,22 +13644,36 b' msgstr "knude \'%s\' er ikke korrekt forme' | |||
|
13471 | 13644 | msgid ".hg/tags.cache is corrupt, rebuilding it\n" |
|
13472 | 13645 | msgstr "" |
|
13473 | 13646 | |
|
13647 | #, python-format | |
|
13648 | msgid "unknown method '%s'" | |
|
13649 | msgstr "ukendt metode '%s'" | |
|
13650 | ||
|
13651 | msgid "expected a symbol" | |
|
13652 | msgstr "forventede et symbol" | |
|
13653 | ||
|
13654 | #, python-format | |
|
13655 | msgid "unknown function '%s'" | |
|
13656 | msgstr "ukendt funktion '%s'" | |
|
13657 | ||
|
13658 | msgid "expected template specifier" | |
|
13659 | msgstr "" | |
|
13660 | ||
|
13661 | #, python-format | |
|
13662 | msgid "filter %s expects one argument" | |
|
13663 | msgstr "filter %s kræver et argument" | |
|
13664 | ||
|
13474 | 13665 | msgid "unmatched quotes" |
|
13475 | 13666 | msgstr "" |
|
13476 | 13667 | |
|
13477 | 13668 | #, python-format |
|
13478 | msgid "error expanding '%s%%%s'" | |
|
13479 | msgstr "fejl ved ekspansion af '%s%%%s'" | |
|
13480 | ||
|
13481 | #, python-format | |
|
13482 | msgid "unknown filter '%s'" | |
|
13483 | msgstr "ukendt filter '%s'" | |
|
13484 | ||
|
13485 | #, python-format | |
|
13486 | 13669 | msgid "style not found: %s" |
|
13487 | 13670 | msgstr "" |
|
13488 | 13671 | |
|
13489 | 13672 | #, python-format |
|
13673 | msgid "\"%s\" not in template map" | |
|
13674 | msgstr "\"%s\" er ikke i skabelon-fil" | |
|
13675 | ||
|
13676 | #, python-format | |
|
13490 | 13677 | msgid "template file %s: %s" |
|
13491 | 13678 | msgstr "skabelon-fil %s: %s" |
|
13492 | 13679 | |
@@ -13577,6 +13764,9 b' msgstr "http godkendelse: bruger %s, kod' | |||
|
13577 | 13764 | msgid "ignoring invalid [auth] key '%s'\n" |
|
13578 | 13765 | msgstr "ignorerer ugyldig [auth] nøgle '%s'\n" |
|
13579 | 13766 | |
|
13767 | msgid "kb" | |
|
13768 | msgstr "" | |
|
13769 | ||
|
13580 | 13770 | msgid "certificate checking requires Python 2.6" |
|
13581 | 13771 | msgstr "" |
|
13582 | 13772 | |
@@ -13588,10 +13778,15 b' msgid "certificate is for %s"' | |||
|
13588 | 13778 | msgstr "certifikatet er for %s" |
|
13589 | 13779 | |
|
13590 | 13780 | msgid "no commonName found in certificate" |
|
13591 | msgstr "" | |
|
13781 | msgstr "fandt ikke noget commonName i certifikatet" | |
|
13592 | 13782 | |
|
13593 | 13783 | #, python-format |
|
13594 | 13784 | msgid "%s certificate error: %s" |
|
13785 | msgstr "%s certifikatfejl: %s" | |
|
13786 | ||
|
13787 | #, python-format | |
|
13788 | msgid "" | |
|
13789 | "warning: %s certificate not verified (check web.cacerts config setting)\n" | |
|
13595 | 13790 | msgstr "" |
|
13596 | 13791 | |
|
13597 | 13792 | #, python-format |
@@ -13599,6 +13794,10 b' msgid "command \'%s\' failed: %s"' | |||
|
13599 | 13794 | msgstr "kommandoen '%s' fejlede: %s" |
|
13600 | 13795 | |
|
13601 | 13796 | #, python-format |
|
13797 | msgid "path ends in directory separator: %s" | |
|
13798 | msgstr "" | |
|
13799 | ||
|
13800 | #, python-format | |
|
13602 | 13801 | msgid "path contains illegal component: %s" |
|
13603 | 13802 | msgstr "stien indeholder ugyldig komponent: %s" |
|
13604 | 13803 | |
@@ -13686,7 +13885,7 b' msgstr "%.0f byte"' | |||
|
13686 | 13885 | |
|
13687 | 13886 | #, python-format |
|
13688 | 13887 | msgid "no port number associated with service '%s'" |
|
13689 | msgstr "" | |
|
13888 | msgstr "der er ikke knyttet noget portnummer til servicen '%s'" | |
|
13690 | 13889 | |
|
13691 | 13890 | msgid "cannot verify bundle or remote repos" |
|
13692 | 13891 | msgstr "kan ikke verificere bundt eller fjerndepoter" |
@@ -13743,7 +13942,7 b' msgid "duplicate revision %d (%d)"' | |||
|
13743 | 13942 | msgstr "duplikeret revision %d (%d)" |
|
13744 | 13943 | |
|
13745 | 13944 | msgid "abandoned transaction found - run hg recover\n" |
|
13746 | msgstr "" | |
|
13945 | msgstr "fandt efterladt transaktion - kør hg recover\n" | |
|
13747 | 13946 | |
|
13748 | 13947 | #, python-format |
|
13749 | 13948 | msgid "repository uses revlog format %d\n" |
@@ -13777,7 +13976,7 b' msgid "crosschecking files in changesets' | |||
|
13777 | 13976 | msgstr "krydstjekker filer i ændringer og manifester\n" |
|
13778 | 13977 | |
|
13779 | 13978 | msgid "crosschecking" |
|
13780 | msgstr "" | |
|
13979 | msgstr "krydstjekker" | |
|
13781 | 13980 | |
|
13782 | 13981 | #, python-format |
|
13783 | 13982 | msgid "changeset refers to unknown manifest %s" |
@@ -13805,7 +14004,7 b' msgstr "manglende revlog!"' | |||
|
13805 | 14004 | |
|
13806 | 14005 | #, python-format |
|
13807 | 14006 | msgid "%s not in manifests" |
|
13808 | msgstr "" | |
|
14007 | msgstr "%s findes ikke i manifestet" | |
|
13809 | 14008 | |
|
13810 | 14009 | #, python-format |
|
13811 | 14010 | msgid "unpacked size is %s, %s expected" |
@@ -105,7 +105,7 b' def pofile(fpath, **kwargs):' | |||
|
105 | 105 | ... finally: |
|
106 | 106 | ... os.unlink(tmpf) |
|
107 | 107 | """ |
|
108 |
if kwargs.get('autodetect_encoding', True) |
|
|
108 | if kwargs.get('autodetect_encoding', True): | |
|
109 | 109 | enc = detect_encoding(fpath) |
|
110 | 110 | else: |
|
111 | 111 | enc = kwargs.get('encoding', default_encoding) |
@@ -159,7 +159,7 b' def mofile(fpath, **kwargs):' | |||
|
159 | 159 | ... finally: |
|
160 | 160 | ... os.unlink(tmpf) |
|
161 | 161 | """ |
|
162 |
if kwargs.get('autodetect_encoding', True) |
|
|
162 | if kwargs.get('autodetect_encoding', True): | |
|
163 | 163 | enc = detect_encoding(fpath, True) |
|
164 | 164 | else: |
|
165 | 165 | enc = kwargs.get('encoding', default_encoding) |
@@ -8,7 +8,7 b'' | |||
|
8 | 8 | from i18n import _ |
|
9 | 9 | from node import hex |
|
10 | 10 | import cmdutil |
|
11 | import util | |
|
11 | import util, encoding | |
|
12 | 12 | import cStringIO, os, stat, tarfile, time, zipfile |
|
13 | 13 | import zlib, gzip |
|
14 | 14 | |
@@ -84,6 +84,7 b' class tarit(object):' | |||
|
84 | 84 | |
|
85 | 85 | def __init__(self, dest, mtime, kind=''): |
|
86 | 86 | self.mtime = mtime |
|
87 | self.fileobj = None | |
|
87 | 88 | |
|
88 | 89 | def taropen(name, mode, fileobj=None): |
|
89 | 90 | if kind == 'gz': |
@@ -93,8 +94,10 b' class tarit(object):' | |||
|
93 | 94 | gzfileobj = self.GzipFileWithTime(name, mode + 'b', |
|
94 | 95 | zlib.Z_BEST_COMPRESSION, |
|
95 | 96 | fileobj, timestamp=mtime) |
|
97 | self.fileobj = gzfileobj | |
|
96 | 98 | return tarfile.TarFile.taropen(name, mode, gzfileobj) |
|
97 | 99 | else: |
|
100 | self.fileobj = fileobj | |
|
98 | 101 | return tarfile.open(name, mode + kind, fileobj) |
|
99 | 102 | |
|
100 | 103 | if isinstance(dest, str): |
@@ -120,6 +123,8 b' class tarit(object):' | |||
|
120 | 123 | |
|
121 | 124 | def done(self): |
|
122 | 125 | self.z.close() |
|
126 | if self.fileobj: | |
|
127 | self.fileobj.close() | |
|
123 | 128 | |
|
124 | 129 | class tellable(object): |
|
125 | 130 | '''provide tell method for zipfile.ZipFile when writing to http |
@@ -245,7 +250,7 b' def archive(repo, dest, node, kind, deco' | |||
|
245 | 250 | if repo.ui.configbool("ui", "archivemeta", True): |
|
246 | 251 | def metadata(): |
|
247 | 252 | base = 'repo: %s\nnode: %s\nbranch: %s\n' % ( |
|
248 | repo[0].hex(), hex(node), ctx.branch()) | |
|
253 | repo[0].hex(), hex(node), encoding.fromlocal(ctx.branch())) | |
|
249 | 254 | |
|
250 | 255 | tags = ''.join('tag: %s\n' % t for t in ctx.tags() |
|
251 | 256 | if repo.tagtype(t) == 'global') |
@@ -262,13 +267,18 b' def archive(repo, dest, node, kind, deco' | |||
|
262 | 267 | |
|
263 | 268 | write('.hg_archival.txt', 0644, False, metadata) |
|
264 | 269 | |
|
265 | for f in ctx: | |
|
270 | total = len(ctx.manifest()) | |
|
271 | repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total) | |
|
272 | for i, f in enumerate(ctx): | |
|
266 | 273 | ff = ctx.flags(f) |
|
267 | 274 | write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data) |
|
275 | repo.ui.progress(_('archiving'), i + 1, item=f, | |
|
276 | unit=_('files'), total=total) | |
|
277 | repo.ui.progress(_('archiving'), None) | |
|
268 | 278 | |
|
269 | 279 | if subrepos: |
|
270 | 280 | for subpath in ctx.substate: |
|
271 | 281 | sub = ctx.sub(subpath) |
|
272 | sub.archive(archiver, prefix) | |
|
282 | sub.archive(repo.ui, archiver, prefix) | |
|
273 | 283 | |
|
274 | 284 | archiver.done() |
@@ -57,12 +57,10 b' struct pos {' | |||
|
57 | 57 | int pos, len; |
|
58 | 58 | }; |
|
59 | 59 | |
|
60 | struct hunk; | |
|
60 | 61 | struct hunk { |
|
61 | 62 | int a1, a2, b1, b2; |
|
62 | }; | |
|
63 | ||
|
64 | struct hunklist { | |
|
65 | struct hunk *base, *head; | |
|
63 | struct hunk *next; | |
|
66 | 64 | }; |
|
67 | 65 | |
|
68 | 66 | int splitlines(const char *a, int len, struct line **lr) |
@@ -223,8 +221,8 b' static int longest_match(struct line *a,' | |||
|
223 | 221 | return mk + mb; |
|
224 | 222 | } |
|
225 | 223 | |
|
226 |
static |
|
|
227 |
int a1, int a2, int b1, int b2, struct hunk |
|
|
224 | static struct hunk *recurse(struct line *a, struct line *b, struct pos *pos, | |
|
225 | int a1, int a2, int b1, int b2, struct hunk *l) | |
|
228 | 226 | { |
|
229 | 227 | int i, j, k; |
|
230 | 228 | |
@@ -232,51 +230,66 b' static void recurse(struct line *a, stru' | |||
|
232 | 230 | /* find the longest match in this chunk */ |
|
233 | 231 | k = longest_match(a, b, pos, a1, a2, b1, b2, &i, &j); |
|
234 | 232 | if (!k) |
|
235 | return; | |
|
233 | return l; | |
|
236 | 234 | |
|
237 | 235 | /* and recurse on the remaining chunks on either side */ |
|
238 | recurse(a, b, pos, a1, i, b1, j, l); | |
|
239 | l->head->a1 = i; | |
|
240 | l->head->a2 = i + k; | |
|
241 | l->head->b1 = j; | |
|
242 | l->head->b2 = j + k; | |
|
243 | l->head++; | |
|
244 | /* tail-recursion didn't happen, so doing equivalent iteration */ | |
|
236 | l = recurse(a, b, pos, a1, i, b1, j, l); | |
|
237 | if (!l) | |
|
238 | return NULL; | |
|
239 | ||
|
240 | l->next = (struct hunk *)malloc(sizeof(struct hunk)); | |
|
241 | if (!l->next) | |
|
242 | return NULL; | |
|
243 | ||
|
244 | l = l->next; | |
|
245 | l->a1 = i; | |
|
246 | l->a2 = i + k; | |
|
247 | l->b1 = j; | |
|
248 | l->b2 = j + k; | |
|
249 | l->next = NULL; | |
|
250 | ||
|
251 | /* tail-recursion didn't happen, so do equivalent iteration */ | |
|
245 | 252 | a1 = i + k; |
|
246 | 253 | b1 = j + k; |
|
247 | 254 | } |
|
248 | 255 | } |
|
249 | 256 | |
|
250 |
static |
|
|
257 | static int diff(struct line *a, int an, struct line *b, int bn, | |
|
258 | struct hunk *base) | |
|
251 | 259 | { |
|
252 | struct hunklist l; | |
|
253 | 260 | struct hunk *curr; |
|
254 | 261 | struct pos *pos; |
|
255 | int t; | |
|
262 | int t, count = 0; | |
|
256 | 263 | |
|
257 | 264 | /* allocate and fill arrays */ |
|
258 | 265 | t = equatelines(a, an, b, bn); |
|
259 | 266 | pos = (struct pos *)calloc(bn ? bn : 1, sizeof(struct pos)); |
|
260 | /* we can't have more matches than lines in the shorter file */ | |
|
261 | l.head = l.base = (struct hunk *)malloc(sizeof(struct hunk) * | |
|
262 | ((an<bn ? an:bn) + 1)); | |
|
267 | ||
|
268 | if (pos && t) { | |
|
269 | /* generate the matching block list */ | |
|
270 | ||
|
271 | curr = recurse(a, b, pos, 0, an, 0, bn, base); | |
|
272 | if (!curr) | |
|
273 | return -1; | |
|
263 | 274 | |
|
264 | if (pos && l.base && t) { | |
|
265 | /* generate the matching block list */ | |
|
266 | recurse(a, b, pos, 0, an, 0, bn, &l); | |
|
267 | l.head->a1 = l.head->a2 = an; | |
|
268 | l.head->b1 = l.head->b2 = bn; | |
|
269 | l.head++; | |
|
275 | /* sentinel end hunk */ | |
|
276 | curr->next = (struct hunk *)malloc(sizeof(struct hunk)); | |
|
277 | if (!curr->next) | |
|
278 | return -1; | |
|
279 | curr = curr->next; | |
|
280 | curr->a1 = curr->a2 = an; | |
|
281 | curr->b1 = curr->b2 = bn; | |
|
282 | curr->next = NULL; | |
|
270 | 283 | } |
|
271 | 284 | |
|
272 | 285 | free(pos); |
|
273 | 286 | |
|
274 | 287 | /* normalize the hunk list, try to push each hunk towards the end */ |
|
275 |
for (curr = |
|
|
276 |
struct hunk *next = curr |
|
|
288 | for (curr = base->next; curr; curr = curr->next) { | |
|
289 | struct hunk *next = curr->next; | |
|
277 | 290 | int shift = 0; |
|
278 | 291 | |
|
279 |
if (next |
|
|
292 | if (!next) | |
|
280 | 293 | break; |
|
281 | 294 | |
|
282 | 295 | if (curr->a2 == next->a1) |
@@ -297,16 +310,26 b' static struct hunklist diff(struct line ' | |||
|
297 | 310 | next->a1 += shift; |
|
298 | 311 | } |
|
299 | 312 | |
|
300 | return l; | |
|
313 | for (curr = base->next; curr; curr = curr->next) | |
|
314 | count++; | |
|
315 | return count; | |
|
316 | } | |
|
317 | ||
|
318 | static void freehunks(struct hunk *l) | |
|
319 | { | |
|
320 | struct hunk *n; | |
|
321 | for (; l; l = n) { | |
|
322 | n = l->next; | |
|
323 | free(l); | |
|
324 | } | |
|
301 | 325 | } |
|
302 | 326 | |
|
303 | 327 | static PyObject *blocks(PyObject *self, PyObject *args) |
|
304 | 328 | { |
|
305 | 329 | PyObject *sa, *sb, *rl = NULL, *m; |
|
306 | 330 | struct line *a, *b; |
|
307 |
struct hunk |
|
|
308 | struct hunk *h; | |
|
309 | int an, bn, pos = 0; | |
|
331 | struct hunk l, *h; | |
|
332 | int an, bn, count, pos = 0; | |
|
310 | 333 | |
|
311 | 334 | if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb)) |
|
312 | 335 | return NULL; |
@@ -317,12 +340,16 b' static PyObject *blocks(PyObject *self, ' | |||
|
317 | 340 | if (!a || !b) |
|
318 | 341 | goto nomem; |
|
319 | 342 | |
|
320 | l = diff(a, an, b, bn); | |
|
321 | rl = PyList_New(l.head - l.base); | |
|
322 | if (!l.head || !rl) | |
|
343 | l.next = NULL; | |
|
344 | count = diff(a, an, b, bn, &l); | |
|
345 | if (count < 0) | |
|
323 | 346 | goto nomem; |
|
324 | 347 | |
|
325 | for (h = l.base; h != l.head; h++) { | |
|
348 | rl = PyList_New(count); | |
|
349 | if (!rl) | |
|
350 | goto nomem; | |
|
351 | ||
|
352 | for (h = l.next; h; h = h->next) { | |
|
326 | 353 | m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2); |
|
327 | 354 | PyList_SetItem(rl, pos, m); |
|
328 | 355 | pos++; |
@@ -331,7 +358,7 b' static PyObject *blocks(PyObject *self, ' | |||
|
331 | 358 | nomem: |
|
332 | 359 | free(a); |
|
333 | 360 | free(b); |
|
334 |
free(l. |
|
|
361 | freehunks(l.next); | |
|
335 | 362 | return rl ? rl : PyErr_NoMemory(); |
|
336 | 363 | } |
|
337 | 364 | |
@@ -340,10 +367,9 b' static PyObject *bdiff(PyObject *self, P' | |||
|
340 | 367 | char *sa, *sb; |
|
341 | 368 | PyObject *result = NULL; |
|
342 | 369 | struct line *al, *bl; |
|
343 |
struct hunk |
|
|
344 | struct hunk *h; | |
|
370 | struct hunk l, *h; | |
|
345 | 371 | char encode[12], *rb; |
|
346 | int an, bn, len = 0, la, lb; | |
|
372 | int an, bn, len = 0, la, lb, count; | |
|
347 | 373 | |
|
348 | 374 | if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb)) |
|
349 | 375 | return NULL; |
@@ -353,13 +379,14 b' static PyObject *bdiff(PyObject *self, P' | |||
|
353 | 379 | if (!al || !bl) |
|
354 | 380 | goto nomem; |
|
355 | 381 | |
|
356 | l = diff(al, an, bl, bn); | |
|
357 | if (!l.head) | |
|
382 | l.next = NULL; | |
|
383 | count = diff(al, an, bl, bn, &l); | |
|
384 | if (count < 0) | |
|
358 | 385 | goto nomem; |
|
359 | 386 | |
|
360 | 387 | /* calculate length of output */ |
|
361 | 388 | la = lb = 0; |
|
362 |
for (h = l. |
|
|
389 | for (h = l.next; h; h = h->next) { | |
|
363 | 390 | if (h->a1 != la || h->b1 != lb) |
|
364 | 391 | len += 12 + bl[h->b1].l - bl[lb].l; |
|
365 | 392 | la = h->a2; |
@@ -375,7 +402,7 b' static PyObject *bdiff(PyObject *self, P' | |||
|
375 | 402 | rb = PyBytes_AsString(result); |
|
376 | 403 | la = lb = 0; |
|
377 | 404 | |
|
378 |
for (h = l. |
|
|
405 | for (h = l.next; h; h = h->next) { | |
|
379 | 406 | if (h->a1 != la || h->b1 != lb) { |
|
380 | 407 | len = bl[h->b1].l - bl[lb].l; |
|
381 | 408 | *(uint32_t *)(encode) = htonl(al[la].l - al->l); |
@@ -392,7 +419,7 b' static PyObject *bdiff(PyObject *self, P' | |||
|
392 | 419 | nomem: |
|
393 | 420 | free(al); |
|
394 | 421 | free(bl); |
|
395 |
free(l. |
|
|
422 | freehunks(l.next); | |
|
396 | 423 | return result ? result : PyErr_NoMemory(); |
|
397 | 424 | } |
|
398 | 425 |
@@ -174,7 +174,7 b' class bundlerepository(localrepo.localre' | |||
|
174 | 174 | self._url = 'bundle:' + bundlename |
|
175 | 175 | |
|
176 | 176 | self.tempfile = None |
|
177 |
f = |
|
|
177 | f = util.posixfile(bundlename, "rb") | |
|
178 | 178 | self.bundle = changegroup.readbundle(f, bundlename) |
|
179 | 179 | if self.bundle.compressed(): |
|
180 | 180 | fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-", |
@@ -192,7 +192,7 b' class bundlerepository(localrepo.localre' | |||
|
192 | 192 | finally: |
|
193 | 193 | fptemp.close() |
|
194 | 194 | |
|
195 |
f = |
|
|
195 | f = util.posixfile(self.tempfile, "rb") | |
|
196 | 196 | self.bundle = changegroup.readbundle(f, bundlename) |
|
197 | 197 | |
|
198 | 198 | # dict with the mapping 'filename' -> position in the bundle |
@@ -251,11 +251,6 b' class bundlerepository(localrepo.localre' | |||
|
251 | 251 | self.bundle.close() |
|
252 | 252 | if self.tempfile is not None: |
|
253 | 253 | os.unlink(self.tempfile) |
|
254 | ||
|
255 | def __del__(self): | |
|
256 | del self.bundle | |
|
257 | if self.tempfile is not None: | |
|
258 | os.unlink(self.tempfile) | |
|
259 | 254 | if self._tempparent: |
|
260 | 255 | shutil.rmtree(self._tempparent, True) |
|
261 | 256 |
@@ -147,6 +147,11 b' def revrange(repo, revs):' | |||
|
147 | 147 | # attempt to parse old-style ranges first to deal with |
|
148 | 148 | # things like old-tag which contain query metacharacters |
|
149 | 149 | try: |
|
150 | if isinstance(spec, int): | |
|
151 | seen.add(spec) | |
|
152 | l.append(spec) | |
|
153 | continue | |
|
154 | ||
|
150 | 155 | if revrangesep in spec: |
|
151 | 156 | start, end = spec.split(revrangesep, 1) |
|
152 | 157 | start = revfix(repo, start, 0) |
@@ -228,7 +233,8 b' def make_file(repo, pat, node=None,' | |||
|
228 | 233 | writable = 'w' in mode or 'a' in mode |
|
229 | 234 | |
|
230 | 235 | if not pat or pat == '-': |
|
231 |
|
|
|
236 | fp = writable and sys.stdout or sys.stdin | |
|
237 | return os.fdopen(os.dup(fp.fileno()), mode) | |
|
232 | 238 | if hasattr(pat, 'write') and writable: |
|
233 | 239 | return pat |
|
234 | 240 | if hasattr(pat, 'read') and 'r' in mode: |
@@ -673,7 +679,9 b" def export(repo, revs, template='hg-%h.p" | |||
|
673 | 679 | parents.reverse() |
|
674 | 680 | prev = (parents and parents[0]) or nullid |
|
675 | 681 | |
|
682 | shouldclose = False | |
|
676 | 683 | if not fp: |
|
684 | shouldclose = True | |
|
677 | 685 | fp = make_file(repo, template, node, total=total, seqno=seqno, |
|
678 | 686 | revwidth=revwidth, mode='ab') |
|
679 | 687 | if fp != sys.stdout and hasattr(fp, 'name'): |
@@ -694,6 +702,9 b" def export(repo, revs, template='hg-%h.p" | |||
|
694 | 702 | for chunk in patch.diff(repo, prev, node, opts=opts): |
|
695 | 703 | fp.write(chunk) |
|
696 | 704 | |
|
705 | if shouldclose: | |
|
706 | fp.close() | |
|
707 | ||
|
697 | 708 | for seqno, rev in enumerate(revs): |
|
698 | 709 | single(rev, seqno + 1, fp) |
|
699 | 710 | |
@@ -796,9 +807,11 b' class changeset_printer(object):' | |||
|
796 | 807 | branch = ctx.branch() |
|
797 | 808 | # don't show the default branch name |
|
798 | 809 | if branch != 'default': |
|
799 | branch = encoding.tolocal(branch) | |
|
800 | 810 | self.ui.write(_("branch: %s\n") % branch, |
|
801 | 811 | label='log.branch') |
|
812 | for bookmark in self.repo.nodebookmarks(changenode): | |
|
813 | self.ui.write(_("bookmark: %s\n") % bookmark, | |
|
814 | label='log.bookmark') | |
|
802 | 815 | for tag in self.repo.nodetags(changenode): |
|
803 | 816 | self.ui.write(_("tag: %s\n") % tag, |
|
804 | 817 | label='log.tag') |
@@ -1352,8 +1365,7 b' def commitforceeditor(repo, ctx, subs):' | |||
|
1352 | 1365 | if ctx.p2(): |
|
1353 | 1366 | edittext.append(_("HG: branch merge")) |
|
1354 | 1367 | if ctx.branch(): |
|
1355 | edittext.append(_("HG: branch '%s'") | |
|
1356 | % encoding.tolocal(ctx.branch())) | |
|
1368 | edittext.append(_("HG: branch '%s'") % ctx.branch()) | |
|
1357 | 1369 | edittext.extend([_("HG: subrepo %s") % s for s in subs]) |
|
1358 | 1370 | edittext.extend([_("HG: added %s") % f for f in added]) |
|
1359 | 1371 | edittext.extend([_("HG: changed %s") % f for f in modified]) |
@@ -9,7 +9,7 b' from node import hex, nullid, nullrev, s' | |||
|
9 | 9 | from lock import release |
|
10 | 10 | from i18n import _, gettext |
|
11 | 11 | import os, re, sys, difflib, time, tempfile |
|
12 | import hg, util, revlog, extensions, copies, error | |
|
12 | import hg, util, revlog, extensions, copies, error, bookmarks | |
|
13 | 13 | import patch, help, mdiff, url, encoding, templatekw, discovery |
|
14 | 14 | import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server |
|
15 | 15 | import merge as mergemod |
@@ -58,7 +58,7 b' def addremove(ui, repo, *pats, **opts):' | |||
|
58 | 58 | repository. |
|
59 | 59 | |
|
60 | 60 | New files are ignored if they match any of the patterns in |
|
61 | .hgignore. As with add, these changes take effect at the next | |
|
61 | ``.hgignore``. As with add, these changes take effect at the next | |
|
62 | 62 | commit. |
|
63 | 63 | |
|
64 | 64 | Use the -s/--similarity option to detect renamed files. With a |
@@ -126,7 +126,7 b' def annotate(ui, repo, *pats, **opts):' | |||
|
126 | 126 | lastfunc = funcmap[-1] |
|
127 | 127 | funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1]) |
|
128 | 128 | |
|
129 |
ctx = repo |
|
|
129 | ctx = cmdutil.revsingle(repo, opts.get('rev')) | |
|
130 | 130 | m = cmdutil.match(repo, pats, opts) |
|
131 | 131 | follow = not opts.get('no_follow') |
|
132 | 132 | for abs in ctx.walk(m): |
@@ -178,7 +178,7 b' def archive(ui, repo, dest, **opts):' | |||
|
178 | 178 | Returns 0 on success. |
|
179 | 179 | ''' |
|
180 | 180 | |
|
181 |
ctx = repo |
|
|
181 | ctx = cmdutil.revsingle(repo, opts.get('rev')) | |
|
182 | 182 | if not ctx: |
|
183 | 183 | raise util.Abort(_('no working directory: please specify a revision')) |
|
184 | 184 | node = ctx.node() |
@@ -239,7 +239,7 b' def backout(ui, repo, node=None, rev=Non' | |||
|
239 | 239 | opts['date'] = util.parsedate(date) |
|
240 | 240 | |
|
241 | 241 | cmdutil.bail_if_changed(repo) |
|
242 | node = repo.lookup(rev) | |
|
242 | node = cmdutil.revsingle(repo, rev).node() | |
|
243 | 243 | |
|
244 | 244 | op1, op2 = repo.dirstate.parents() |
|
245 | 245 | a = repo.changelog.ancestor(op1, node) |
@@ -404,7 +404,8 b' def bisect(ui, repo, rev=None, extra=Non' | |||
|
404 | 404 | raise util.Abort(_("%s killed") % command) |
|
405 | 405 | else: |
|
406 | 406 | transition = "bad" |
|
407 |
ctx = repo |
|
|
407 | ctx = cmdutil.revsingle(repo, rev) | |
|
408 | rev = None # clear for future iterations | |
|
408 | 409 | state[transition].append(ctx.node()) |
|
409 | 410 | ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition)) |
|
410 | 411 | check_state(state, interactive=False) |
@@ -456,6 +457,95 b' def bisect(ui, repo, rev=None, extra=Non' | |||
|
456 | 457 | cmdutil.bail_if_changed(repo) |
|
457 | 458 | return hg.clean(repo, node) |
|
458 | 459 | |
|
460 | def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None): | |
|
461 | '''track a line of development with movable markers | |
|
462 | ||
|
463 | Bookmarks are pointers to certain commits that move when | |
|
464 | committing. Bookmarks are local. They can be renamed, copied and | |
|
465 | deleted. It is possible to use bookmark names in :hg:`merge` and | |
|
466 | :hg:`update` to merge and update respectively to a given bookmark. | |
|
467 | ||
|
468 | You can use :hg:`bookmark NAME` to set a bookmark on the working | |
|
469 | directory's parent revision with the given name. If you specify | |
|
470 | a revision using -r REV (where REV may be an existing bookmark), | |
|
471 | the bookmark is assigned to that revision. | |
|
472 | ||
|
473 | Bookmarks can be pushed and pulled between repositories (see :hg:`help | |
|
474 | push` and :hg:`help pull`). This requires both the local and remote | |
|
475 | repositories to support bookmarks. For versions prior to 1.8, this means | |
|
476 | the bookmarks extension must be enabled. | |
|
477 | ''' | |
|
478 | hexfn = ui.debugflag and hex or short | |
|
479 | marks = repo._bookmarks | |
|
480 | cur = repo.changectx('.').node() | |
|
481 | ||
|
482 | if rename: | |
|
483 | if rename not in marks: | |
|
484 | raise util.Abort(_("a bookmark of this name does not exist")) | |
|
485 | if mark in marks and not force: | |
|
486 | raise util.Abort(_("a bookmark of the same name already exists")) | |
|
487 | if mark is None: | |
|
488 | raise util.Abort(_("new bookmark name required")) | |
|
489 | marks[mark] = marks[rename] | |
|
490 | del marks[rename] | |
|
491 | if repo._bookmarkcurrent == rename: | |
|
492 | bookmarks.setcurrent(repo, mark) | |
|
493 | bookmarks.write(repo) | |
|
494 | return | |
|
495 | ||
|
496 | if delete: | |
|
497 | if mark is None: | |
|
498 | raise util.Abort(_("bookmark name required")) | |
|
499 | if mark not in marks: | |
|
500 | raise util.Abort(_("a bookmark of this name does not exist")) | |
|
501 | if mark == repo._bookmarkcurrent: | |
|
502 | bookmarks.setcurrent(repo, None) | |
|
503 | del marks[mark] | |
|
504 | bookmarks.write(repo) | |
|
505 | return | |
|
506 | ||
|
507 | if mark is not None: | |
|
508 | if "\n" in mark: | |
|
509 | raise util.Abort(_("bookmark name cannot contain newlines")) | |
|
510 | mark = mark.strip() | |
|
511 | if not mark: | |
|
512 | raise util.Abort(_("bookmark names cannot consist entirely of " | |
|
513 | "whitespace")) | |
|
514 | if mark in marks and not force: | |
|
515 | raise util.Abort(_("a bookmark of the same name already exists")) | |
|
516 | if ((mark in repo.branchtags() or mark == repo.dirstate.branch()) | |
|
517 | and not force): | |
|
518 | raise util.Abort( | |
|
519 | _("a bookmark cannot have the name of an existing branch")) | |
|
520 | if rev: | |
|
521 | marks[mark] = repo.lookup(rev) | |
|
522 | else: | |
|
523 | marks[mark] = repo.changectx('.').node() | |
|
524 | bookmarks.setcurrent(repo, mark) | |
|
525 | bookmarks.write(repo) | |
|
526 | return | |
|
527 | ||
|
528 | if mark is None: | |
|
529 | if rev: | |
|
530 | raise util.Abort(_("bookmark name required")) | |
|
531 | if len(marks) == 0: | |
|
532 | ui.status(_("no bookmarks set\n")) | |
|
533 | else: | |
|
534 | for bmark, n in sorted(marks.iteritems()): | |
|
535 | current = repo._bookmarkcurrent | |
|
536 | if bmark == current and n == cur: | |
|
537 | prefix, label = '*', 'bookmarks.current' | |
|
538 | else: | |
|
539 | prefix, label = ' ', '' | |
|
540 | ||
|
541 | if ui.quiet: | |
|
542 | ui.write("%s\n" % bmark, label=label) | |
|
543 | else: | |
|
544 | ui.write(" %s %-25s %d:%s\n" % ( | |
|
545 | prefix, bmark, repo.changelog.rev(n), hexfn(n)), | |
|
546 | label=label) | |
|
547 | return | |
|
548 | ||
|
459 | 549 | def branch(ui, repo, label=None, **opts): |
|
460 | 550 | """set or show the current branch name |
|
461 | 551 | |
@@ -483,15 +573,14 b' def branch(ui, repo, label=None, **opts)' | |||
|
483 | 573 | repo.dirstate.setbranch(label) |
|
484 | 574 | ui.status(_('reset working directory to branch %s\n') % label) |
|
485 | 575 | elif label: |
|
486 | utflabel = encoding.fromlocal(label) | |
|
487 | if not opts.get('force') and utflabel in repo.branchtags(): | |
|
576 | if not opts.get('force') and label in repo.branchtags(): | |
|
488 | 577 | if label not in [p.branch() for p in repo.parents()]: |
|
489 | 578 | raise util.Abort(_('a branch of the same name already exists' |
|
490 | 579 | " (use 'hg update' to switch to it)")) |
|
491 |
repo.dirstate.setbranch( |
|
|
580 | repo.dirstate.setbranch(label) | |
|
492 | 581 | ui.status(_('marked working directory as branch %s\n') % label) |
|
493 | 582 | else: |
|
494 |
ui.write("%s\n" % |
|
|
583 | ui.write("%s\n" % repo.dirstate.branch()) | |
|
495 | 584 | |
|
496 | 585 | def branches(ui, repo, active=False, closed=False): |
|
497 | 586 | """list repository named branches |
@@ -520,9 +609,8 b' def branches(ui, repo, active=False, clo' | |||
|
520 | 609 | |
|
521 | 610 | for isactive, node, tag in branches: |
|
522 | 611 | if (not active) or isactive: |
|
523 | encodedtag = encoding.tolocal(tag) | |
|
524 | 612 | if ui.quiet: |
|
525 |
ui.write("%s\n" % |
|
|
613 | ui.write("%s\n" % tag) | |
|
526 | 614 | else: |
|
527 | 615 | hn = repo.lookup(node) |
|
528 | 616 | if isactive: |
@@ -538,10 +626,10 b' def branches(ui, repo, active=False, clo' | |||
|
538 | 626 | notice = _(' (inactive)') |
|
539 | 627 | if tag == repo.dirstate.branch(): |
|
540 | 628 | label = 'branches.current' |
|
541 |
rev = str(node).rjust(31 - encoding.colwidth( |
|
|
629 | rev = str(node).rjust(31 - encoding.colwidth(tag)) | |
|
542 | 630 | rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset') |
|
543 |
|
|
|
544 |
ui.write("%s %s%s\n" % ( |
|
|
631 | tag = ui.label(tag, label) | |
|
632 | ui.write("%s %s%s\n" % (tag, rev, notice)) | |
|
545 | 633 | |
|
546 | 634 | def bundle(ui, repo, fname, dest=None, **opts): |
|
547 | 635 | """create a changegroup file |
@@ -568,11 +656,14 b' def bundle(ui, repo, fname, dest=None, *' | |||
|
568 | 656 | |
|
569 | 657 | Returns 0 on success, 1 if no changes found. |
|
570 | 658 | """ |
|
571 |
revs = |
|
|
659 | revs = None | |
|
660 | if 'rev' in opts: | |
|
661 | revs = cmdutil.revrange(repo, opts['rev']) | |
|
662 | ||
|
572 | 663 | if opts.get('all'): |
|
573 | 664 | base = ['null'] |
|
574 | 665 | else: |
|
575 | base = opts.get('base') | |
|
666 | base = cmdutil.revrange(repo, opts.get('base')) | |
|
576 | 667 | if base: |
|
577 | 668 | if dest: |
|
578 | 669 | raise util.Abort(_("--base is incompatible with specifying " |
@@ -654,6 +745,7 b' def cat(ui, repo, file1, *pats, **opts):' | |||
|
654 | 745 | if opts.get('decode'): |
|
655 | 746 | data = repo.wwritedata(abs, data) |
|
656 | 747 | fp.write(data) |
|
748 | fp.close() | |
|
657 | 749 | err = 0 |
|
658 | 750 | return err |
|
659 | 751 | |
@@ -666,12 +758,12 b' def clone(ui, source, dest=None, **opts)' | |||
|
666 | 758 | basename of the source. |
|
667 | 759 | |
|
668 | 760 | The location of the source is added to the new repository's |
|
669 | .hg/hgrc file, as the default to be used for future pulls. | |
|
761 | ``.hg/hgrc`` file, as the default to be used for future pulls. | |
|
670 | 762 | |
|
671 | 763 | See :hg:`help urls` for valid source format details. |
|
672 | 764 | |
|
673 | 765 | It is possible to specify an ``ssh://`` URL as the destination, but no |
|
674 | .hg/hgrc and working directory will be created on the remote side. | |
|
766 | ``.hg/hgrc`` and working directory will be created on the remote side. | |
|
675 | 767 | Please see :hg:`help urls` for important details about ``ssh://`` URLs. |
|
676 | 768 | |
|
677 | 769 | A set of changesets (tags, or branch names) to pull may be specified |
@@ -737,7 +829,7 b' def commit(ui, repo, *pats, **opts):' | |||
|
737 | 829 | """commit the specified files or all outstanding changes |
|
738 | 830 | |
|
739 | 831 | Commit changes to the given files into the repository. Unlike a |
|
740 |
centralized |
|
|
832 | centralized SCM, this operation is a local operation. See | |
|
741 | 833 | :hg:`push` for a way to actively distribute your changes. |
|
742 | 834 | |
|
743 | 835 | If a list of files is omitted, all changes reported by :hg:`status` |
@@ -1022,7 +1114,7 b' def debugfsinfo(ui, path = "."):' | |||
|
1022 | 1114 | |
|
1023 | 1115 | def debugrebuildstate(ui, repo, rev="tip"): |
|
1024 | 1116 | """rebuild the dirstate as it would look like for the given revision""" |
|
1025 |
ctx = repo |
|
|
1117 | ctx = cmdutil.revsingle(repo, rev) | |
|
1026 | 1118 | wlock = repo.wlock() |
|
1027 | 1119 | try: |
|
1028 | 1120 | repo.dirstate.rebuild(ctx.node(), ctx.manifest()) |
@@ -1112,7 +1204,7 b' def debugpushkey(ui, repopath, namespace' | |||
|
1112 | 1204 | key, old, new = keyinfo |
|
1113 | 1205 | r = target.pushkey(namespace, key, old, new) |
|
1114 | 1206 | ui.status(str(r) + '\n') |
|
1115 |
return not |
|
|
1207 | return not r | |
|
1116 | 1208 | else: |
|
1117 | 1209 | for k, v in target.listkeys(namespace).iteritems(): |
|
1118 | 1210 | ui.write("%s\t%s\n" % (k.encode('string-escape'), |
@@ -1136,12 +1228,12 b' def debugsetparents(ui, repo, rev1, rev2' | |||
|
1136 | 1228 | Returns 0 on success. |
|
1137 | 1229 | """ |
|
1138 | 1230 | |
|
1139 | if not rev2: | |
|
1140 | rev2 = hex(nullid) | |
|
1231 | r1 = cmdutil.revsingle(repo, rev1).node() | |
|
1232 | r2 = cmdutil.revsingle(repo, rev2, 'null').node() | |
|
1141 | 1233 | |
|
1142 | 1234 | wlock = repo.wlock() |
|
1143 | 1235 | try: |
|
1144 |
repo.dirstate.setparents(r |
|
|
1236 | repo.dirstate.setparents(r1, r2) | |
|
1145 | 1237 | finally: |
|
1146 | 1238 | wlock.release() |
|
1147 | 1239 | |
@@ -1170,9 +1262,8 b' def debugstate(ui, repo, nodates=None):' | |||
|
1170 | 1262 | ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f)) |
|
1171 | 1263 | |
|
1172 | 1264 | def debugsub(ui, repo, rev=None): |
|
1173 | if rev == '': | |
|
1174 | rev = None | |
|
1175 | for k, v in sorted(repo[rev].substate.items()): | |
|
1265 | ctx = cmdutil.revsingle(repo, rev, None) | |
|
1266 | for k, v in sorted(ctx.substate.items()): | |
|
1176 | 1267 | ui.write('path %s\n' % k) |
|
1177 | 1268 | ui.write(' source %s\n' % v[0]) |
|
1178 | 1269 | ui.write(' revision %s\n' % v[1]) |
@@ -1256,6 +1347,14 b' def debugdate(ui, date, range=None, **op' | |||
|
1256 | 1347 | m = util.matchdate(range) |
|
1257 | 1348 | ui.write("match: %s\n" % m(d[0])) |
|
1258 | 1349 | |
|
1350 | def debugignore(ui, repo, *values, **opts): | |
|
1351 | """display the combined ignore pattern""" | |
|
1352 | ignore = repo.dirstate._ignore | |
|
1353 | if hasattr(ignore, 'includepat'): | |
|
1354 | ui.write("%s\n" % ignore.includepat) | |
|
1355 | else: | |
|
1356 | raise util.Abort(_("no ignore patterns found")) | |
|
1357 | ||
|
1259 | 1358 | def debugindex(ui, repo, file_, **opts): |
|
1260 | 1359 | """dump the contents of an index file""" |
|
1261 | 1360 | r = None |
@@ -1431,7 +1530,7 b' def debuginstall(ui):' | |||
|
1431 | 1530 | def debugrename(ui, repo, file1, *pats, **opts): |
|
1432 | 1531 | """dump rename information""" |
|
1433 | 1532 | |
|
1434 |
ctx = repo |
|
|
1533 | ctx = cmdutil.revsingle(repo, opts.get('rev')) | |
|
1435 | 1534 | m = cmdutil.match(repo, (file1,) + pats, opts) |
|
1436 | 1535 | for abs in ctx.walk(m): |
|
1437 | 1536 | fctx = ctx[abs] |
@@ -1804,10 +1903,9 b' def heads(ui, repo, *branchrevs, **opts)' | |||
|
1804 | 1903 | Returns 0 if matching heads are found, 1 if not. |
|
1805 | 1904 | """ |
|
1806 | 1905 | |
|
1807 | if opts.get('rev'): | |
|
1808 | start = repo.lookup(opts['rev']) | |
|
1809 | else: | |
|
1810 | start = None | |
|
1906 | start = None | |
|
1907 | if 'rev' in opts: | |
|
1908 | start = cmdutil.revsingle(repo, opts['rev'], None).node() | |
|
1811 | 1909 | |
|
1812 | 1910 | if opts.get('topo'): |
|
1813 | 1911 | heads = [repo[h] for h in repo.heads(start)] |
@@ -1824,8 +1922,7 b' def heads(ui, repo, *branchrevs, **opts)' | |||
|
1824 | 1922 | heads += [repo[h] for h in ls if rev(h) in descendants] |
|
1825 | 1923 | |
|
1826 | 1924 | if branchrevs: |
|
1827 | decode, encode = encoding.fromlocal, encoding.tolocal | |
|
1828 | branches = set(repo[decode(br)].branch() for br in branchrevs) | |
|
1925 | branches = set(repo[br].branch() for br in branchrevs) | |
|
1829 | 1926 | heads = [h for h in heads if h.branch() in branches] |
|
1830 | 1927 | |
|
1831 | 1928 | if not opts.get('closed'): |
@@ -1838,7 +1935,7 b' def heads(ui, repo, *branchrevs, **opts)' | |||
|
1838 | 1935 | if branchrevs: |
|
1839 | 1936 | haveheads = set(h.branch() for h in heads) |
|
1840 | 1937 | if branches - haveheads: |
|
1841 |
headless = ', '.join( |
|
|
1938 | headless = ', '.join(b for b in branches - haveheads) | |
|
1842 | 1939 | msg = _('no open branch heads found on branches %s') |
|
1843 | 1940 | if opts.get('rev'): |
|
1844 | 1941 | msg += _(' (started at %s)' % opts['rev']) |
@@ -2031,7 +2128,7 b' def help_(ui, name=None, with_version=Fa' | |||
|
2031 | 2128 | 'extensions\n')) |
|
2032 | 2129 | |
|
2033 | 2130 | def helpextcmd(name): |
|
2034 | cmd, ext, mod = extensions.disabledcmd(name, ui.config('ui', 'strict')) | |
|
2131 | cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict')) | |
|
2035 | 2132 | doc = gettext(mod.__doc__).splitlines()[0] |
|
2036 | 2133 | |
|
2037 | 2134 | msg = help.listexts(_("'%s' is provided by the following " |
@@ -2196,14 +2293,14 b' def identify(ui, repo, source=None,' | |||
|
2196 | 2293 | output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]), |
|
2197 | 2294 | (changed) and "+" or "")) |
|
2198 | 2295 | else: |
|
2199 |
ctx = repo |
|
|
2296 | ctx = cmdutil.revsingle(repo, rev) | |
|
2200 | 2297 | if default or id: |
|
2201 | 2298 | output = [hexfunc(ctx.node())] |
|
2202 | 2299 | if num: |
|
2203 | 2300 | output.append(str(ctx.rev())) |
|
2204 | 2301 | |
|
2205 | 2302 | if repo.local() and default and not ui.quiet: |
|
2206 |
b = |
|
|
2303 | b = ctx.branch() | |
|
2207 | 2304 | if b != 'default': |
|
2208 | 2305 | output.append("(%s)" % b) |
|
2209 | 2306 | |
@@ -2213,7 +2310,7 b' def identify(ui, repo, source=None,' | |||
|
2213 | 2310 | output.append(t) |
|
2214 | 2311 | |
|
2215 | 2312 | if branch: |
|
2216 |
output.append |
|
|
2313 | output.append(ctx.branch()) | |
|
2217 | 2314 | |
|
2218 | 2315 | if tags: |
|
2219 | 2316 | output.extend(ctx.tags()) |
@@ -2275,6 +2372,7 b' def import_(ui, repo, patch1, *patches, ' | |||
|
2275 | 2372 | d = opts["base"] |
|
2276 | 2373 | strip = opts["strip"] |
|
2277 | 2374 | wlock = lock = None |
|
2375 | msgs = [] | |
|
2278 | 2376 | |
|
2279 | 2377 | def tryone(ui, hunk): |
|
2280 | 2378 | tmpname, message, user, date, branch, nodeid, p1, p2 = \ |
@@ -2325,7 +2423,10 b' def import_(ui, repo, patch1, *patches, ' | |||
|
2325 | 2423 | finally: |
|
2326 | 2424 | files = cmdutil.updatedir(ui, repo, files, |
|
2327 | 2425 | similarity=sim / 100.0) |
|
2328 |
if |
|
|
2426 | if opts.get('no_commit'): | |
|
2427 | if message: | |
|
2428 | msgs.append(message) | |
|
2429 | else: | |
|
2329 | 2430 | if opts.get('exact'): |
|
2330 | 2431 | m = None |
|
2331 | 2432 | else: |
@@ -2374,6 +2475,8 b' def import_(ui, repo, patch1, *patches, ' | |||
|
2374 | 2475 | if not haspatch: |
|
2375 | 2476 | raise util.Abort(_('no diffs found')) |
|
2376 | 2477 | |
|
2478 | if msgs: | |
|
2479 | repo.opener('last-message.txt', 'wb').write('\n* * *\n'.join(msgs)) | |
|
2377 | 2480 | finally: |
|
2378 | 2481 | release(lock, wlock) |
|
2379 | 2482 | |
@@ -2394,6 +2497,13 b' def incoming(ui, repo, source="default",' | |||
|
2394 | 2497 | if opts.get('bundle') and opts.get('subrepos'): |
|
2395 | 2498 | raise util.Abort(_('cannot combine --bundle and --subrepos')) |
|
2396 | 2499 | |
|
2500 | if opts.get('bookmarks'): | |
|
2501 | source, branches = hg.parseurl(ui.expandpath(source), | |
|
2502 | opts.get('branch')) | |
|
2503 | other = hg.repository(hg.remoteui(repo, opts), source) | |
|
2504 | ui.status(_('comparing with %s\n') % url.hidepassword(source)) | |
|
2505 | return bookmarks.diff(ui, repo, other) | |
|
2506 | ||
|
2397 | 2507 | ret = hg.incoming(ui, repo, source, opts) |
|
2398 | 2508 | return ret |
|
2399 | 2509 | |
@@ -2433,7 +2543,7 b' def locate(ui, repo, *pats, **opts):' | |||
|
2433 | 2543 | Returns 0 if a match is found, 1 otherwise. |
|
2434 | 2544 | """ |
|
2435 | 2545 | end = opts.get('print0') and '\0' or '\n' |
|
2436 |
rev = opts.get('rev') |
|
|
2546 | rev = cmdutil.revsingle(repo, opts.get('rev'), None).node() | |
|
2437 | 2547 | |
|
2438 | 2548 | ret = 1 |
|
2439 | 2549 | m = cmdutil.match(repo, pats, opts, default='relglob') |
@@ -2568,7 +2678,7 b' def manifest(ui, repo, node=None, rev=No' | |||
|
2568 | 2678 | node = rev |
|
2569 | 2679 | |
|
2570 | 2680 | decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '} |
|
2571 |
ctx = repo |
|
|
2681 | ctx = cmdutil.revsingle(repo, node) | |
|
2572 | 2682 | for f in ctx: |
|
2573 | 2683 | if ui.debugflag: |
|
2574 | 2684 | ui.write("%40s " % hex(ctx.manifest()[f])) |
@@ -2611,7 +2721,7 b' def merge(ui, repo, node=None, **opts):' | |||
|
2611 | 2721 | node = opts.get('rev') |
|
2612 | 2722 | |
|
2613 | 2723 | if not node: |
|
2614 |
branch = repo |
|
|
2724 | branch = repo[None].branch() | |
|
2615 | 2725 | bheads = repo.branchheads(branch) |
|
2616 | 2726 | if len(bheads) > 2: |
|
2617 | 2727 | raise util.Abort(_( |
@@ -2637,6 +2747,8 b' def merge(ui, repo, node=None, **opts):' | |||
|
2637 | 2747 | raise util.Abort(_('working dir not at a head rev - ' |
|
2638 | 2748 | 'use "hg update" or merge with an explicit rev')) |
|
2639 | 2749 | node = parent == bheads[0] and bheads[-1] or bheads[0] |
|
2750 | else: | |
|
2751 | node = cmdutil.revsingle(repo, node).node() | |
|
2640 | 2752 | |
|
2641 | 2753 | if opts.get('preview'): |
|
2642 | 2754 | # find nodes that are ancestors of p2 but not of p1 |
@@ -2668,6 +2780,14 b' def outgoing(ui, repo, dest=None, **opts' | |||
|
2668 | 2780 | |
|
2669 | 2781 | Returns 0 if there are outgoing changes, 1 otherwise. |
|
2670 | 2782 | """ |
|
2783 | ||
|
2784 | if opts.get('bookmarks'): | |
|
2785 | dest = ui.expandpath(dest or 'default-push', dest or 'default') | |
|
2786 | dest, branches = hg.parseurl(dest, opts.get('branch')) | |
|
2787 | other = hg.repository(hg.remoteui(repo, opts), dest) | |
|
2788 | ui.status(_('comparing with %s\n') % url.hidepassword(dest)) | |
|
2789 | return bookmarks.diff(ui, other, repo) | |
|
2790 | ||
|
2671 | 2791 | ret = hg.outgoing(ui, repo, dest, opts) |
|
2672 | 2792 | return ret |
|
2673 | 2793 | |
@@ -2682,11 +2802,8 b' def parents(ui, repo, file_=None, **opts' | |||
|
2682 | 2802 | |
|
2683 | 2803 | Returns 0 on success. |
|
2684 | 2804 | """ |
|
2685 | rev = opts.get('rev') | |
|
2686 | if rev: | |
|
2687 | ctx = repo[rev] | |
|
2688 | else: | |
|
2689 | ctx = repo[None] | |
|
2805 | ||
|
2806 | ctx = cmdutil.revsingle(repo, opts.get('rev'), None) | |
|
2690 | 2807 | |
|
2691 | 2808 | if file_: |
|
2692 | 2809 | m = cmdutil.match(repo, (file_,), opts) |
@@ -2787,6 +2904,16 b' def pull(ui, repo, source="default", **o' | |||
|
2787 | 2904 | other = hg.repository(hg.remoteui(repo, opts), source) |
|
2788 | 2905 | ui.status(_('pulling from %s\n') % url.hidepassword(source)) |
|
2789 | 2906 | revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev')) |
|
2907 | ||
|
2908 | if opts.get('bookmark'): | |
|
2909 | if not revs: | |
|
2910 | revs = [] | |
|
2911 | rb = other.listkeys('bookmarks') | |
|
2912 | for b in opts['bookmark']: | |
|
2913 | if b not in rb: | |
|
2914 | raise util.Abort(_('remote bookmark %s not found!') % b) | |
|
2915 | revs.append(rb[b]) | |
|
2916 | ||
|
2790 | 2917 | if revs: |
|
2791 | 2918 | try: |
|
2792 | 2919 | revs = [other.lookup(rev) for rev in revs] |
@@ -2800,10 +2927,21 b' def pull(ui, repo, source="default", **o' | |||
|
2800 | 2927 | checkout = str(repo.changelog.rev(other.lookup(checkout))) |
|
2801 | 2928 | repo._subtoppath = source |
|
2802 | 2929 | try: |
|
2803 |
ret |
|
|
2930 | ret = postincoming(ui, repo, modheads, opts.get('update'), checkout) | |
|
2931 | ||
|
2804 | 2932 | finally: |
|
2805 | 2933 | del repo._subtoppath |
|
2806 | 2934 | |
|
2935 | # update specified bookmarks | |
|
2936 | if opts.get('bookmark'): | |
|
2937 | for b in opts['bookmark']: | |
|
2938 | # explicit pull overrides local bookmark if any | |
|
2939 | ui.status(_("importing bookmark %s\n") % b) | |
|
2940 | repo._bookmarks[b] = repo[rb[b]].node() | |
|
2941 | bookmarks.write(repo) | |
|
2942 | ||
|
2943 | return ret | |
|
2944 | ||
|
2807 | 2945 | def push(ui, repo, dest=None, **opts): |
|
2808 | 2946 | """push changes to the specified destination |
|
2809 | 2947 | |
@@ -2833,6 +2971,17 b' def push(ui, repo, dest=None, **opts):' | |||
|
2833 | 2971 | |
|
2834 | 2972 | Returns 0 if push was successful, 1 if nothing to push. |
|
2835 | 2973 | """ |
|
2974 | ||
|
2975 | if opts.get('bookmark'): | |
|
2976 | for b in opts['bookmark']: | |
|
2977 | # translate -B options to -r so changesets get pushed | |
|
2978 | if b in repo._bookmarks: | |
|
2979 | opts.setdefault('rev', []).append(b) | |
|
2980 | else: | |
|
2981 | # if we try to push a deleted bookmark, translate it to null | |
|
2982 | # this lets simultaneous -r, -b options continue working | |
|
2983 | opts.setdefault('rev', []).append("null") | |
|
2984 | ||
|
2836 | 2985 | dest = ui.expandpath(dest or 'default-push', dest or 'default') |
|
2837 | 2986 | dest, branches = hg.parseurl(dest, opts.get('branch')) |
|
2838 | 2987 | revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev')) |
@@ -2851,9 +3000,33 b' def push(ui, repo, dest=None, **opts):' | |||
|
2851 | 3000 | return False |
|
2852 | 3001 | finally: |
|
2853 | 3002 | del repo._subtoppath |
|
2854 | r = repo.push(other, opts.get('force'), revs=revs, | |
|
2855 | newbranch=opts.get('new_branch')) | |
|
2856 | return r == 0 | |
|
3003 | result = repo.push(other, opts.get('force'), revs=revs, | |
|
3004 | newbranch=opts.get('new_branch')) | |
|
3005 | ||
|
3006 | result = (result == 0) | |
|
3007 | ||
|
3008 | if opts.get('bookmark'): | |
|
3009 | rb = other.listkeys('bookmarks') | |
|
3010 | for b in opts['bookmark']: | |
|
3011 | # explicit push overrides remote bookmark if any | |
|
3012 | if b in repo._bookmarks: | |
|
3013 | ui.status(_("exporting bookmark %s\n") % b) | |
|
3014 | new = repo[b].hex() | |
|
3015 | elif b in rb: | |
|
3016 | ui.status(_("deleting remote bookmark %s\n") % b) | |
|
3017 | new = '' # delete | |
|
3018 | else: | |
|
3019 | ui.warn(_('bookmark %s does not exist on the local ' | |
|
3020 | 'or remote repository!\n') % b) | |
|
3021 | return 2 | |
|
3022 | old = rb.get(b, '') | |
|
3023 | r = other.pushkey('bookmarks', b, old, new) | |
|
3024 | if not r: | |
|
3025 | ui.warn(_('updating bookmark %s failed!\n') % b) | |
|
3026 | if not result: | |
|
3027 | result = 2 | |
|
3028 | ||
|
3029 | return result | |
|
2857 | 3030 | |
|
2858 | 3031 | def recover(ui, repo): |
|
2859 | 3032 | """roll back an interrupted transaction |
@@ -3094,15 +3267,16 b' def revert(ui, repo, *pats, **opts):' | |||
|
3094 | 3267 | raise util.Abort(_("you can't specify a revision and a date")) |
|
3095 | 3268 | opts["rev"] = cmdutil.finddate(ui, repo, opts["date"]) |
|
3096 | 3269 | |
|
3270 | parent, p2 = repo.dirstate.parents() | |
|
3271 | if not opts.get('rev') and p2 != nullid: | |
|
3272 | raise util.Abort(_('uncommitted merge - ' | |
|
3273 | 'use "hg update", see "hg help revert"')) | |
|
3274 | ||
|
3097 | 3275 | if not pats and not opts.get('all'): |
|
3098 | 3276 | raise util.Abort(_('no files or directories specified; ' |
|
3099 | 3277 | 'use --all to revert the whole repo')) |
|
3100 | 3278 | |
|
3101 | parent, p2 = repo.dirstate.parents() | |
|
3102 | if not opts.get('rev') and p2 != nullid: | |
|
3103 | raise util.Abort(_('uncommitted merge - please provide a ' | |
|
3104 | 'specific revision')) | |
|
3105 | ctx = repo[opts.get('rev')] | |
|
3279 | ctx = cmdutil.revsingle(repo, opts.get('rev')) | |
|
3106 | 3280 | node = ctx.node() |
|
3107 | 3281 | mf = ctx.manifest() |
|
3108 | 3282 | if node == parent: |
@@ -3241,7 +3415,7 b' def revert(ui, repo, *pats, **opts):' | |||
|
3241 | 3415 | continue |
|
3242 | 3416 | audit_path(f) |
|
3243 | 3417 | try: |
|
3244 | util.unlink(repo.wjoin(f)) | |
|
3418 | util.unlinkpath(repo.wjoin(f)) | |
|
3245 | 3419 | except OSError: |
|
3246 | 3420 | pass |
|
3247 | 3421 | repo.dirstate.remove(f) |
@@ -3722,7 +3896,7 b' def tag(ui, repo, name1, *names, **opts)' | |||
|
3722 | 3896 | bheads = repo.branchheads() |
|
3723 | 3897 | if not opts.get('force') and bheads and p1 not in bheads: |
|
3724 | 3898 | raise util.Abort(_('not at a branch head (use -f to force)')) |
|
3725 |
r = repo |
|
|
3899 | r = cmdutil.revsingle(repo, rev_).node() | |
|
3726 | 3900 | |
|
3727 | 3901 | if not message: |
|
3728 | 3902 | # we don't translate commit messages |
@@ -3856,6 +4030,8 b' def update(ui, repo, node=None, rev=None' | |||
|
3856 | 4030 | if not rev: |
|
3857 | 4031 | rev = node |
|
3858 | 4032 | |
|
4033 | # if we defined a bookmark, we have to remember the original bookmark name | |
|
4034 | brev = rev | |
|
3859 | 4035 | rev = cmdutil.revsingle(repo, rev, rev).rev() |
|
3860 | 4036 | |
|
3861 | 4037 | if check and clean: |
@@ -3873,9 +4049,14 b' def update(ui, repo, node=None, rev=None' | |||
|
3873 | 4049 | rev = cmdutil.finddate(ui, repo, date) |
|
3874 | 4050 | |
|
3875 | 4051 | if clean or check: |
|
3876 |
ret |
|
|
4052 | ret = hg.clean(repo, rev) | |
|
3877 | 4053 | else: |
|
3878 |
ret |
|
|
4054 | ret = hg.update(repo, rev) | |
|
4055 | ||
|
4056 | if brev in repo._bookmarks: | |
|
4057 | bookmarks.setcurrent(repo, brev) | |
|
4058 | ||
|
4059 | return ret | |
|
3879 | 4060 | |
|
3880 | 4061 | def verify(ui, repo): |
|
3881 | 4062 | """verify the integrity of the repository |
@@ -4066,6 +4247,13 b' table = {' | |||
|
4066 | 4247 | _('use command to check changeset state'), _('CMD')), |
|
4067 | 4248 | ('U', 'noupdate', False, _('do not update to target'))], |
|
4068 | 4249 | _("[-gbsr] [-U] [-c CMD] [REV]")), |
|
4250 | "bookmarks": | |
|
4251 | (bookmark, | |
|
4252 | [('f', 'force', False, _('force')), | |
|
4253 | ('r', 'rev', '', _('revision'), _('REV')), | |
|
4254 | ('d', 'delete', False, _('delete a given bookmark')), | |
|
4255 | ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))], | |
|
4256 | _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')), | |
|
4069 | 4257 | "branch": |
|
4070 | 4258 | (branch, |
|
4071 | 4259 | [('f', 'force', None, |
@@ -4165,6 +4353,7 b' table = {' | |||
|
4165 | 4353 | _('[-e] DATE [RANGE]')), |
|
4166 | 4354 | "debugdata": (debugdata, [], _('FILE REV')), |
|
4167 | 4355 | "debugfsinfo": (debugfsinfo, [], _('[PATH]')), |
|
4356 | "debugignore": (debugignore, [], ''), | |
|
4168 | 4357 | "debugindex": (debugindex, |
|
4169 | 4358 | [('f', 'format', 0, _('revlog format'), _('FORMAT'))], |
|
4170 | 4359 | _('FILE')), |
@@ -4282,6 +4471,7 b' table = {' | |||
|
4282 | 4471 | _('file to store the bundles into'), _('FILE')), |
|
4283 | 4472 | ('r', 'rev', [], |
|
4284 | 4473 | _('a remote changeset intended to be added'), _('REV')), |
|
4474 | ('B', 'bookmarks', False, _("compare bookmarks")), | |
|
4285 | 4475 | ('b', 'branch', [], |
|
4286 | 4476 | _('a specific branch you would like to pull'), _('BRANCH')), |
|
4287 | 4477 | ] + logopts + remoteopts + subrepoopts, |
@@ -4350,6 +4540,7 b' table = {' | |||
|
4350 | 4540 | _('a changeset intended to be included in the destination'), |
|
4351 | 4541 | _('REV')), |
|
4352 | 4542 | ('n', 'newest-first', None, _('show newest record first')), |
|
4543 | ('B', 'bookmarks', False, _("compare bookmarks")), | |
|
4353 | 4544 | ('b', 'branch', [], |
|
4354 | 4545 | _('a specific branch you would like to push'), _('BRANCH')), |
|
4355 | 4546 | ] + logopts + remoteopts + subrepoopts, |
@@ -4369,6 +4560,7 b' table = {' | |||
|
4369 | 4560 | _('run even when remote repository is unrelated')), |
|
4370 | 4561 | ('r', 'rev', [], |
|
4371 | 4562 | _('a remote changeset intended to be added'), _('REV')), |
|
4563 | ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')), | |
|
4372 | 4564 | ('b', 'branch', [], |
|
4373 | 4565 | _('a specific branch you would like to pull'), _('BRANCH')), |
|
4374 | 4566 | ] + remoteopts, |
@@ -4379,6 +4571,7 b' table = {' | |||
|
4379 | 4571 | ('r', 'rev', [], |
|
4380 | 4572 | _('a changeset intended to be included in the destination'), |
|
4381 | 4573 | _('REV')), |
|
4574 | ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')), | |
|
4382 | 4575 | ('b', 'branch', [], |
|
4383 | 4576 | _('a specific branch you would like to push'), _('BRANCH')), |
|
4384 | 4577 | ('', 'new-branch', False, _('allow pushing a new branch')), |
@@ -130,7 +130,7 b' class config(object):' | |||
|
130 | 130 | name = m.group(1) |
|
131 | 131 | if sections and section not in sections: |
|
132 | 132 | continue |
|
133 |
if self.get(section, name) |
|
|
133 | if self.get(section, name) is not None: | |
|
134 | 134 | del self._data[section][name] |
|
135 | 135 | continue |
|
136 | 136 |
@@ -7,7 +7,7 b'' | |||
|
7 | 7 | |
|
8 | 8 | from node import nullid, nullrev, short, hex |
|
9 | 9 | from i18n import _ |
|
10 | import ancestor, bdiff, error, util, subrepo, patch | |
|
10 | import ancestor, bdiff, error, util, subrepo, patch, encoding | |
|
11 | 11 | import os, errno, stat |
|
12 | 12 | |
|
13 | 13 | propertycache = util.propertycache |
@@ -109,11 +109,13 b' class changectx(object):' | |||
|
109 | 109 | def description(self): |
|
110 | 110 | return self._changeset[4] |
|
111 | 111 | def branch(self): |
|
112 | return self._changeset[5].get("branch") | |
|
112 | return encoding.tolocal(self._changeset[5].get("branch")) | |
|
113 | 113 | def extra(self): |
|
114 | 114 | return self._changeset[5] |
|
115 | 115 | def tags(self): |
|
116 | 116 | return self._repo.nodetags(self._node) |
|
117 | def bookmarks(self): | |
|
118 | return self._repo.nodebookmarks(self._node) | |
|
117 | 119 | |
|
118 | 120 | def parents(self): |
|
119 | 121 | """return contexts for each parent changeset""" |
@@ -179,7 +181,7 b' class changectx(object):' | |||
|
179 | 181 | """ |
|
180 | 182 | # deal with workingctxs |
|
181 | 183 | n2 = c2._node |
|
182 |
if n2 |
|
|
184 | if n2 is None: | |
|
183 | 185 | n2 = c2._parents[0]._node |
|
184 | 186 | n = self._repo.changelog.ancestor(self._node, n2) |
|
185 | 187 | return changectx(self._repo, n) |
@@ -591,9 +593,8 b' class workingctx(changectx):' | |||
|
591 | 593 | if extra: |
|
592 | 594 | self._extra = extra.copy() |
|
593 | 595 | if 'branch' not in self._extra: |
|
594 | branch = self._repo.dirstate.branch() | |
|
595 | 596 | try: |
|
596 | branch = branch.decode('UTF-8').encode('UTF-8') | |
|
597 | branch = encoding.fromlocal(self._repo.dirstate.branch()) | |
|
597 | 598 | except UnicodeDecodeError: |
|
598 | 599 | raise util.Abort(_('branch name not in UTF-8!')) |
|
599 | 600 | self._extra['branch'] = branch |
@@ -603,6 +604,9 b' class workingctx(changectx):' | |||
|
603 | 604 | def __str__(self): |
|
604 | 605 | return str(self._parents[0]) + "+" |
|
605 | 606 | |
|
607 | def __repr__(self): | |
|
608 | return "<workingctx %s>" % str(self) | |
|
609 | ||
|
606 | 610 | def __nonzero__(self): |
|
607 | 611 | return True |
|
608 | 612 | |
@@ -712,13 +716,14 b' class workingctx(changectx):' | |||
|
712 | 716 | assert self._clean is not None # must call status first |
|
713 | 717 | return self._clean |
|
714 | 718 | def branch(self): |
|
715 | return self._extra['branch'] | |
|
719 | return encoding.tolocal(self._extra['branch']) | |
|
716 | 720 | def extra(self): |
|
717 | 721 | return self._extra |
|
718 | 722 | |
|
719 | 723 | def tags(self): |
|
720 | 724 | t = [] |
|
721 |
|
|
|
725 | for p in self.parents(): | |
|
726 | t.extend(p.tags()) | |
|
722 | 727 | return t |
|
723 | 728 | |
|
724 | 729 | def children(self): |
@@ -827,7 +832,7 b' class workingctx(changectx):' | |||
|
827 | 832 | if unlink: |
|
828 | 833 | for f in list: |
|
829 | 834 | try: |
|
830 | util.unlink(self._repo.wjoin(f)) | |
|
835 | util.unlinkpath(self._repo.wjoin(f)) | |
|
831 | 836 | except OSError, inst: |
|
832 | 837 | if inst.errno != errno.ENOENT: |
|
833 | 838 | raise |
@@ -902,6 +907,9 b' class workingfilectx(filectx):' | |||
|
902 | 907 | def __str__(self): |
|
903 | 908 | return "%s@%s" % (self.path(), self._changectx) |
|
904 | 909 | |
|
910 | def __repr__(self): | |
|
911 | return "<workingfilectx %s>" % str(self) | |
|
912 | ||
|
905 | 913 | def data(self): |
|
906 | 914 | return self._repo.wread(self._path) |
|
907 | 915 | def renamed(self): |
@@ -1042,7 +1050,7 b' class memctx(object):' | |||
|
1042 | 1050 | def clean(self): |
|
1043 | 1051 | return self._status[6] |
|
1044 | 1052 | def branch(self): |
|
1045 | return self._extra['branch'] | |
|
1053 | return encoding.tolocal(self._extra['branch']) | |
|
1046 | 1054 | def extra(self): |
|
1047 | 1055 | return self._extra |
|
1048 | 1056 | def flags(self, f): |
@@ -78,10 +78,10 b' class _demandmod(object):' | |||
|
78 | 78 | self._load() |
|
79 | 79 | setattr(self._module, attr, val) |
|
80 | 80 | |
|
81 |
def _demandimport(name, globals=None, locals=None, fromlist=None, level= |
|
|
81 | def _demandimport(name, globals=None, locals=None, fromlist=None, level=-1): | |
|
82 | 82 | if not locals or name in ignore or fromlist == ('*',): |
|
83 | 83 | # these cases we can't really delay |
|
84 |
if level |
|
|
84 | if level == -1: | |
|
85 | 85 | return _origimport(name, globals, locals, fromlist) |
|
86 | 86 | else: |
|
87 | 87 | return _origimport(name, globals, locals, fromlist, level) |
@@ -91,7 +91,10 b' def _demandimport(name, globals=None, lo' | |||
|
91 | 91 | base, rest = name.split('.', 1) |
|
92 | 92 | # email.__init__ loading email.mime |
|
93 | 93 | if globals and globals.get('__name__', None) == base: |
|
94 | return _origimport(name, globals, locals, fromlist) | |
|
94 | if level != -1: | |
|
95 | return _origimport(name, globals, locals, fromlist, level) | |
|
96 | else: | |
|
97 | return _origimport(name, globals, locals, fromlist) | |
|
95 | 98 | # if a is already demand-loaded, add b to its submodule list |
|
96 | 99 | if base in locals: |
|
97 | 100 | if isinstance(locals[base], _demandmod): |
@@ -99,7 +102,7 b' def _demandimport(name, globals=None, lo' | |||
|
99 | 102 | return locals[base] |
|
100 | 103 | return _demandmod(name, globals, locals) |
|
101 | 104 | else: |
|
102 |
if level |
|
|
105 | if level != -1: | |
|
103 | 106 | # from . import b,c,d or from .a import b,c,d |
|
104 | 107 | return _origimport(name, globals, locals, fromlist, level) |
|
105 | 108 | # from a import b,c,d |
@@ -111,7 +114,7 b' def _demandimport(name, globals=None, lo' | |||
|
111 | 114 | mod = getattr(mod, comp) |
|
112 | 115 | for x in fromlist: |
|
113 | 116 | # set requested submodules for demand load |
|
114 |
if not |
|
|
117 | if not hasattr(mod, x): | |
|
115 | 118 | setattr(mod, x, _demandmod(x, mod.__dict__, locals)) |
|
116 | 119 | return mod |
|
117 | 120 |
@@ -7,7 +7,7 b'' | |||
|
7 | 7 | |
|
8 | 8 | from node import nullid |
|
9 | 9 | from i18n import _ |
|
10 | import util, ignore, osutil, parsers | |
|
10 | import util, ignore, osutil, parsers, encoding | |
|
11 | 11 | import struct, os, stat, errno |
|
12 | 12 | import cStringIO |
|
13 | 13 | |
@@ -36,7 +36,7 b' def _decdirs(dirs, path):' | |||
|
36 | 36 | |
|
37 | 37 | class dirstate(object): |
|
38 | 38 | |
|
39 | def __init__(self, opener, ui, root): | |
|
39 | def __init__(self, opener, ui, root, validate): | |
|
40 | 40 | '''Create a new dirstate object. |
|
41 | 41 | |
|
42 | 42 | opener is an open()-like callable that can be used to open the |
@@ -44,6 +44,7 b' class dirstate(object):' | |||
|
44 | 44 | the dirstate. |
|
45 | 45 | ''' |
|
46 | 46 | self._opener = opener |
|
47 | self._validate = validate | |
|
47 | 48 | self._root = root |
|
48 | 49 | self._rootdir = os.path.join(root, '') |
|
49 | 50 | self._dirty = False |
@@ -79,7 +80,9 b' class dirstate(object):' | |||
|
79 | 80 | @propertycache |
|
80 | 81 | def _pl(self): |
|
81 | 82 | try: |
|
82 |
|
|
|
83 | fp = self._opener("dirstate") | |
|
84 | st = fp.read(40) | |
|
85 | fp.close() | |
|
83 | 86 | l = len(st) |
|
84 | 87 | if l == 40: |
|
85 | 88 | return st[:20], st[20:40] |
@@ -197,10 +200,10 b' class dirstate(object):' | |||
|
197 | 200 | yield x |
|
198 | 201 | |
|
199 | 202 | def parents(self): |
|
200 | return self._pl | |
|
203 | return [self._validate(p) for p in self._pl] | |
|
201 | 204 | |
|
202 | 205 | def branch(self): |
|
203 | return self._branch | |
|
206 | return encoding.tolocal(self._branch) | |
|
204 | 207 | |
|
205 | 208 | def setparents(self, p1, p2=nullid): |
|
206 | 209 | self._dirty = self._dirtypl = True |
@@ -209,8 +212,8 b' class dirstate(object):' | |||
|
209 | 212 | def setbranch(self, branch): |
|
210 | 213 | if branch in ['tip', '.', 'null']: |
|
211 | 214 | raise util.Abort(_('the name \'%s\' is reserved') % branch) |
|
212 | self._branch = branch | |
|
213 | self._opener("branch", "w").write(branch + '\n') | |
|
215 | self._branch = encoding.fromlocal(branch) | |
|
216 | self._opener("branch", "w").write(self._branch + '\n') | |
|
214 | 217 | |
|
215 | 218 | def _read(self): |
|
216 | 219 | self._map = {} |
@@ -229,7 +232,8 b' class dirstate(object):' | |||
|
229 | 232 | self._pl = p |
|
230 | 233 | |
|
231 | 234 | def invalidate(self): |
|
232 |
for a in "_map |
|
|
235 | for a in ("_map", "_copymap", "_foldmap", "_branch", "_pl", "_dirs", | |
|
236 | "_ignore"): | |
|
233 | 237 | if a in self.__dict__: |
|
234 | 238 | delattr(self, a) |
|
235 | 239 | self._dirty = False |
@@ -220,8 +220,6 b' def prepush(repo, remote, force, revs, n' | |||
|
220 | 220 | # - a local outgoing head descended from update |
|
221 | 221 | # - a remote head that's known locally and not |
|
222 | 222 | # ancestral to an outgoing head |
|
223 | # | |
|
224 | # New named branches cannot be created without --force. | |
|
225 | 223 | |
|
226 | 224 | # 1. Create set of branches involved in the push. |
|
227 | 225 | branches = set(repo[n].branch() for n in outg) |
@@ -280,20 +278,30 b' def prepush(repo, remote, force, revs, n' | |||
|
280 | 278 | |
|
281 | 279 | # 5. Check for new heads. |
|
282 | 280 | # If there are more heads after the push than before, a suitable |
|
283 |
# |
|
|
281 | # error message, depending on unsynced status, is displayed. | |
|
282 | error = None | |
|
284 | 283 | for branch in branches: |
|
285 |
|
|
|
284 | newhs = set(newmap[branch]) | |
|
285 | oldhs = set(oldmap[branch]) | |
|
286 | if len(newhs) > len(oldhs): | |
|
287 | if error is None: | |
|
288 | if branch: | |
|
289 | error = _("push creates new remote heads " | |
|
290 | "on branch '%s'!") % branch | |
|
291 | else: | |
|
292 | error = _("push creates new remote heads!") | |
|
293 | if branch in unsynced: | |
|
294 | hint = _("you should pull and merge or " | |
|
295 | "use push -f to force") | |
|
296 | else: | |
|
297 | hint = _("did you forget to merge? " | |
|
298 | "use push -f to force") | |
|
286 | 299 | if branch: |
|
287 | msg = _("push creates new remote heads " | |
|
288 | "on branch '%s'!") % branch | |
|
289 | else: | |
|
290 | msg = _("push creates new remote heads!") | |
|
291 | ||
|
292 | if branch in unsynced: | |
|
293 | hint = _("you should pull and merge or use push -f to force") | |
|
294 | else: | |
|
295 | hint = _("did you forget to merge? use push -f to force") | |
|
296 | raise util.Abort(msg, hint=hint) | |
|
300 | repo.ui.debug("new remote heads on branch '%s'\n" % branch) | |
|
301 | for h in (newhs - oldhs): | |
|
302 | repo.ui.debug("new remote head %s\n" % short(h)) | |
|
303 | if error: | |
|
304 | raise util.Abort(error, hint=hint) | |
|
297 | 305 | |
|
298 | 306 | # 6. Check for unsynced changes on involved branches. |
|
299 | 307 | if unsynced: |
@@ -221,15 +221,20 b' class cmdalias(object):' | |||
|
221 | 221 | def fn(ui, *args): |
|
222 | 222 | env = {'HG_ARGS': ' '.join((self.name,) + args)} |
|
223 | 223 | def _checkvar(m): |
|
224 |
if |
|
|
224 | if m.groups()[0] == '$': | |
|
225 | return m.group() | |
|
226 | elif int(m.groups()[0]) <= len(args): | |
|
225 | 227 | return m.group() |
|
226 | 228 | else: |
|
229 | ui.debug(_("No argument found for substitution" | |
|
230 | "of %i variable in alias '%s' definition.") | |
|
231 | % (int(m.groups()[0]), self.name)) | |
|
227 | 232 | return '' |
|
228 | cmd = re.sub(r'\$(\d+)', _checkvar, self.definition[1:]) | |
|
233 | cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:]) | |
|
229 | 234 | replace = dict((str(i + 1), arg) for i, arg in enumerate(args)) |
|
230 | 235 | replace['0'] = self.name |
|
231 | 236 | replace['@'] = ' '.join(args) |
|
232 | cmd = util.interpolate(r'\$', replace, cmd) | |
|
237 | cmd = util.interpolate(r'\$', replace, cmd, escape_prefix=True) | |
|
233 | 238 | return util.system(cmd, environ=env) |
|
234 | 239 | self.fn = fn |
|
235 | 240 | return |
@@ -290,7 +295,7 b' class cmdalias(object):' | |||
|
290 | 295 | ui.debug("alias '%s' shadows command '%s'\n" % |
|
291 | 296 | (self.name, self.cmdname)) |
|
292 | 297 | |
|
293 | if self.definition.startswith('!'): | |
|
298 | if hasattr(self, 'shell'): | |
|
294 | 299 | return self.fn(ui, *args, **opts) |
|
295 | 300 | else: |
|
296 | 301 | try: |
@@ -589,8 +594,12 b' def _dispatch(ui, args):' | |||
|
589 | 594 | msg = ' '.join(' ' in a and repr(a) or a for a in fullargs) |
|
590 | 595 | ui.log("command", msg + "\n") |
|
591 | 596 | d = lambda: util.checksignature(func)(ui, *args, **cmdoptions) |
|
592 | return runcommand(lui, repo, cmd, fullargs, ui, options, d, | |
|
593 | cmdpats, cmdoptions) | |
|
597 | try: | |
|
598 | return runcommand(lui, repo, cmd, fullargs, ui, options, d, | |
|
599 | cmdpats, cmdoptions) | |
|
600 | finally: | |
|
601 | if repo: | |
|
602 | repo.close() | |
|
594 | 603 | |
|
595 | 604 | def _runcommand(ui, options, cmd, cmdfunc): |
|
596 | 605 | def checkargs(): |
@@ -48,6 +48,16 b' except locale.Error:' | |||
|
48 | 48 | encodingmode = os.environ.get("HGENCODINGMODE", "strict") |
|
49 | 49 | fallbackencoding = 'ISO-8859-1' |
|
50 | 50 | |
|
51 | class localstr(str): | |
|
52 | '''This class allows strings that are unmodified to be | |
|
53 | round-tripped to the local encoding and back''' | |
|
54 | def __new__(cls, u, l): | |
|
55 | s = str.__new__(cls, l) | |
|
56 | s._utf8 = u | |
|
57 | return s | |
|
58 | def __hash__(self): | |
|
59 | return hash(self._utf8) # avoid collisions in local string space | |
|
60 | ||
|
51 | 61 | def tolocal(s): |
|
52 | 62 | """ |
|
53 | 63 | Convert a string from internal UTF-8 to local encoding |
@@ -57,17 +67,45 b' def tolocal(s):' | |||
|
57 | 67 | other character sets. We attempt to decode everything strictly |
|
58 | 68 | using UTF-8, then Latin-1, and failing that, we use UTF-8 and |
|
59 | 69 | replace unknown characters. |
|
70 | ||
|
71 | The localstr class is used to cache the known UTF-8 encoding of | |
|
72 | strings next to their local representation to allow lossless | |
|
73 | round-trip conversion back to UTF-8. | |
|
74 | ||
|
75 | >>> u = 'foo: \\xc3\\xa4' # utf-8 | |
|
76 | >>> l = tolocal(u) | |
|
77 | >>> l | |
|
78 | 'foo: ?' | |
|
79 | >>> fromlocal(l) | |
|
80 | 'foo: \\xc3\\xa4' | |
|
81 | >>> u2 = 'foo: \\xc3\\xa1' | |
|
82 | >>> d = { l: 1, tolocal(u2): 2 } | |
|
83 | >>> d # no collision | |
|
84 | {'foo: ?': 1, 'foo: ?': 2} | |
|
85 | >>> 'foo: ?' in d | |
|
86 | False | |
|
87 | >>> l1 = 'foo: \\xe4' # historical latin1 fallback | |
|
88 | >>> l = tolocal(l1) | |
|
89 | >>> l | |
|
90 | 'foo: ?' | |
|
91 | >>> fromlocal(l) # magically in utf-8 | |
|
92 | 'foo: \\xc3\\xa4' | |
|
60 | 93 | """ |
|
94 | ||
|
61 | 95 | for e in ('UTF-8', fallbackencoding): |
|
62 | 96 | try: |
|
63 | 97 | u = s.decode(e) # attempt strict decoding |
|
64 | return u.encode(encoding, "replace") | |
|
98 | if e == 'UTF-8': | |
|
99 | return localstr(s, u.encode(encoding, "replace")) | |
|
100 | else: | |
|
101 | return localstr(u.encode('UTF-8'), | |
|
102 | u.encode(encoding, "replace")) | |
|
65 | 103 | except LookupError, k: |
|
66 | 104 | raise error.Abort("%s, please check your locale settings" % k) |
|
67 | 105 | except UnicodeDecodeError: |
|
68 | 106 | pass |
|
69 | 107 | u = s.decode("utf-8", "replace") # last ditch |
|
70 | return u.encode(encoding, "replace") | |
|
108 | return u.encode(encoding, "replace") # can't round-trip | |
|
71 | 109 | |
|
72 | 110 | def fromlocal(s): |
|
73 | 111 | """ |
@@ -79,6 +117,11 b' def fromlocal(s):' | |||
|
79 | 117 | 'replace', which replaces unknown characters with a special |
|
80 | 118 | Unicode character, and 'ignore', which drops the character. |
|
81 | 119 | """ |
|
120 | ||
|
121 | # can we do a lossless round-trip? | |
|
122 | if isinstance(s, localstr): | |
|
123 | return s._utf8 | |
|
124 | ||
|
82 | 125 | try: |
|
83 | 126 | return s.decode(encoding, encodingmode).encode("utf-8") |
|
84 | 127 | except UnicodeDecodeError, inst: |
@@ -11,6 +11,7 b' from i18n import _, gettext' | |||
|
11 | 11 | |
|
12 | 12 | _extensions = {} |
|
13 | 13 | _order = [] |
|
14 | _ignore = ['hbisect', 'bookmarks'] | |
|
14 | 15 | |
|
15 | 16 | def extensions(): |
|
16 | 17 | for name in _order: |
@@ -45,6 +46,8 b' def load(ui, name, path):' | |||
|
45 | 46 | shortname = name[6:] |
|
46 | 47 | else: |
|
47 | 48 | shortname = name |
|
49 | if shortname in _ignore: | |
|
50 | return None | |
|
48 | 51 | if shortname in _extensions: |
|
49 | 52 | return _extensions[shortname] |
|
50 | 53 | _extensions[shortname] = None |
@@ -248,7 +251,7 b' def disabledext(name):' | |||
|
248 | 251 | if name in paths: |
|
249 | 252 | return _disabledhelp(paths[name]) |
|
250 | 253 | |
|
251 | def disabledcmd(cmd, strict=False): | |
|
254 | def disabledcmd(ui, cmd, strict=False): | |
|
252 | 255 | '''import disabled extensions until cmd is found. |
|
253 | 256 | returns (cmdname, extname, doc)''' |
|
254 | 257 | |
@@ -266,6 +269,10 b' def disabledcmd(cmd, strict=False):' | |||
|
266 | 269 | getattr(mod, 'cmdtable', {}), strict) |
|
267 | 270 | except (error.AmbiguousCommand, error.UnknownCommand): |
|
268 | 271 | return |
|
272 | except Exception: | |
|
273 | ui.warn(_('warning: error finding commands in %s\n') % path) | |
|
274 | ui.traceback() | |
|
275 | return | |
|
269 | 276 | for c in aliases: |
|
270 | 277 | if c.startswith(cmd): |
|
271 | 278 | cmd = c |
@@ -7,6 +7,17 b'' | |||
|
7 | 7 | |
|
8 | 8 | import revlog |
|
9 | 9 | |
|
10 | def _parsemeta(text): | |
|
11 | if not text.startswith('\1\n'): | |
|
12 | return {} | |
|
13 | s = text.index('\1\n', 2) | |
|
14 | mt = text[2:s] | |
|
15 | m = {} | |
|
16 | for l in mt.splitlines(): | |
|
17 | k, v = l.split(": ", 1) | |
|
18 | m[k] = v | |
|
19 | return m | |
|
20 | ||
|
10 | 21 | class filelog(revlog.revlog): |
|
11 | 22 | def __init__(self, opener, path): |
|
12 | 23 | revlog.revlog.__init__(self, opener, |
@@ -19,18 +30,6 b' class filelog(revlog.revlog):' | |||
|
19 | 30 | s = t.index('\1\n', 2) |
|
20 | 31 | return t[s + 2:] |
|
21 | 32 | |
|
22 | def _readmeta(self, node): | |
|
23 | t = self.revision(node) | |
|
24 | if not t.startswith('\1\n'): | |
|
25 | return {} | |
|
26 | s = t.index('\1\n', 2) | |
|
27 | mt = t[2:s] | |
|
28 | m = {} | |
|
29 | for l in mt.splitlines(): | |
|
30 | k, v = l.split(": ", 1) | |
|
31 | m[k] = v | |
|
32 | return m | |
|
33 | ||
|
34 | 33 | def add(self, text, meta, transaction, link, p1=None, p2=None): |
|
35 | 34 | if meta or text.startswith('\1\n'): |
|
36 | 35 | mt = ["%s: %s\n" % (k, v) for k, v in sorted(meta.iteritems())] |
@@ -40,7 +39,8 b' class filelog(revlog.revlog):' | |||
|
40 | 39 | def renamed(self, node): |
|
41 | 40 | if self.parents(node)[0] != revlog.nullid: |
|
42 | 41 | return False |
|
43 |
|
|
|
42 | t = self.revision(node) | |
|
43 | m = _parsemeta(t) | |
|
44 | 44 | if m and "copy" in m: |
|
45 | 45 | return (m["copy"], revlog.bin(m["copyrev"])) |
|
46 | 46 | return False |
@@ -20,6 +20,11 b' across path separators and ``{a,b}`` to ' | |||
|
20 | 20 | To use a Perl/Python regular expression, start a name with ``re:``. |
|
21 | 21 | Regexp pattern matching is anchored at the root of the repository. |
|
22 | 22 | |
|
23 | To read name patterns from a file, use ``listfile:`` or ``listfile0:``. | |
|
24 | The latter expects null delimited patterns while the former expects line | |
|
25 | feeds. Each string read from the file is itself treated as a file | |
|
26 | pattern. | |
|
27 | ||
|
23 | 28 | Plain examples:: |
|
24 | 29 | |
|
25 | 30 | path:foo/bar a name bar in a directory named foo in the root |
@@ -39,3 +44,8 b' Glob examples::' | |||
|
39 | 44 | Regexp examples:: |
|
40 | 45 | |
|
41 | 46 | re:.*\.c$ any name ending in ".c", anywhere in the repository |
|
47 | ||
|
48 | File examples:: | |
|
49 | ||
|
50 | listfile:list.txt read list from list.txt with one file pattern per line | |
|
51 | listfile0:list.txt read list from list.txt with null byte delimiters |
@@ -78,7 +78,10 b' Interaction with Mercurial Commands' | |||
|
78 | 78 | :commit: commit creates a consistent snapshot of the state of the |
|
79 | 79 | entire project and its subrepositories. It does this by first |
|
80 | 80 | attempting to commit all modified subrepositories, then recording |
|
81 |
their state and finally committing it in the parent |
|
|
81 | their state and finally committing it in the parent | |
|
82 | repository. Mercurial can be made to abort if any subrepository | |
|
83 | content is modified by setting "ui.commitsubrepos=no" in a | |
|
84 | configuration file (see :hg:`help config`). | |
|
82 | 85 | |
|
83 | 86 | :diff: diff does not recurse in subrepos unless -S/--subrepos is |
|
84 | 87 | specified. Changes are displayed as usual, on the subrepositories |
@@ -4,7 +4,7 b' Valid URLs are of the form::' | |||
|
4 | 4 | file://local/filesystem/path[#revision] |
|
5 | 5 | http://[user[:pass]@]host[:port]/[path][#revision] |
|
6 | 6 | https://[user[:pass]@]host[:port]/[path][#revision] |
|
7 |
ssh://[user |
|
|
7 | ssh://[user@]host[:port]/[path][#revision] | |
|
8 | 8 | |
|
9 | 9 | Paths in the local filesystem can either point to Mercurial |
|
10 | 10 | repositories or to bundle files (as created by :hg:`bundle` or :hg:` |
@@ -32,24 +32,22 b' def addbranchrevs(lrepo, repo, branches,' | |||
|
32 | 32 | return revs, revs[0] |
|
33 | 33 | branchmap = repo.branchmap() |
|
34 | 34 | |
|
35 |
def primary(b |
|
|
36 |
if b |
|
|
35 | def primary(branch): | |
|
36 | if branch == '.': | |
|
37 | 37 | if not lrepo or not lrepo.local(): |
|
38 | 38 | raise util.Abort(_("dirstate branch not accessible")) |
|
39 |
b |
|
|
40 |
if b |
|
|
41 |
revs.extend(node.hex(r) for r in reversed(branchmap[b |
|
|
39 | branch = lrepo.dirstate.branch() | |
|
40 | if branch in branchmap: | |
|
41 | revs.extend(node.hex(r) for r in reversed(branchmap[branch])) | |
|
42 | 42 | return True |
|
43 | 43 | else: |
|
44 | 44 | return False |
|
45 | 45 | |
|
46 | 46 | for branch in branches: |
|
47 | butf8 = encoding.fromlocal(branch) | |
|
48 | if not primary(butf8): | |
|
47 | if not primary(branch): | |
|
49 | 48 | raise error.RepoLookupError(_("unknown branch '%s'") % branch) |
|
50 | 49 | if hashbranch: |
|
51 | butf8 = encoding.fromlocal(hashbranch) | |
|
52 | if not primary(butf8): | |
|
50 | if not primary(hashbranch): | |
|
53 | 51 | revs.append(hashbranch) |
|
54 | 52 | return revs, revs[0] |
|
55 | 53 | |
@@ -365,8 +363,7 b' def clone(ui, source, dest=None, pull=Fa' | |||
|
365 | 363 | except error.RepoLookupError: |
|
366 | 364 | continue |
|
367 | 365 | bn = dest_repo[uprev].branch() |
|
368 | dest_repo.ui.status(_("updating to branch %s\n") | |
|
369 | % encoding.tolocal(bn)) | |
|
366 | dest_repo.ui.status(_("updating to branch %s\n") % bn) | |
|
370 | 367 | _update(dest_repo, uprev) |
|
371 | 368 | |
|
372 | 369 | return src_repo, dest_repo |
@@ -398,7 +395,8 b' def clean(repo, node, show_stats=True):' | |||
|
398 | 395 | return stats[3] > 0 |
|
399 | 396 | |
|
400 | 397 | def merge(repo, node, force=None, remind=True): |
|
401 |
""" |
|
|
398 | """Branch merge with node, resolving changes. Return true if any | |
|
399 | unresolved conflicts.""" | |
|
402 | 400 | stats = mergemod.update(repo, node, True, force, False) |
|
403 | 401 | _showstats(repo, stats) |
|
404 | 402 | if stats[3]: |
@@ -119,7 +119,10 b' def staticfile(directory, fname, req):' | |||
|
119 | 119 | os.stat(path) |
|
120 | 120 | ct = mimetypes.guess_type(path)[0] or "text/plain" |
|
121 | 121 | req.respond(HTTP_OK, ct, length = os.path.getsize(path)) |
|
122 |
|
|
|
122 | fp = open(path, 'rb') | |
|
123 | data = fp.read() | |
|
124 | fp.close() | |
|
125 | return data | |
|
123 | 126 | except TypeError: |
|
124 | 127 | raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal filename') |
|
125 | 128 | except OSError, err: |
@@ -33,14 +33,23 b' def findrepos(paths):' | |||
|
33 | 33 | repos.append((prefix, root)) |
|
34 | 34 | continue |
|
35 | 35 | roothead = os.path.normpath(os.path.abspath(roothead)) |
|
36 |
|
|
|
37 | path = os.path.normpath(path) | |
|
38 | name = util.pconvert(path[len(roothead):]).strip('/') | |
|
39 | if prefix: | |
|
40 | name = prefix + '/' + name | |
|
41 | repos.append((name, path)) | |
|
36 | paths = util.walkrepos(roothead, followsym=True, recurse=recurse) | |
|
37 | repos.extend(urlrepos(prefix, roothead, paths)) | |
|
42 | 38 | return repos |
|
43 | 39 | |
|
40 | def urlrepos(prefix, roothead, paths): | |
|
41 | """yield url paths and filesystem paths from a list of repo paths | |
|
42 | ||
|
43 | >>> list(urlrepos('hg', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) | |
|
44 | [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] | |
|
45 | >>> list(urlrepos('', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) | |
|
46 | [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] | |
|
47 | """ | |
|
48 | for path in paths: | |
|
49 | path = os.path.normpath(path) | |
|
50 | yield (prefix + '/' + | |
|
51 | util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path | |
|
52 | ||
|
44 | 53 | class hgwebdir(object): |
|
45 | 54 | refreshinterval = 20 |
|
46 | 55 |
@@ -550,7 +550,8 b' def annotate(web, req, tmpl):' | |||
|
550 | 550 | "targetline": targetline, |
|
551 | 551 | "line": l, |
|
552 | 552 | "lineid": "l%d" % (lineno + 1), |
|
553 |
"linenumber": "% 6d" % (lineno + 1) |
|
|
553 | "linenumber": "% 6d" % (lineno + 1), | |
|
554 | "revdate": f.date()} | |
|
554 | 555 | |
|
555 | 556 | return tmpl("fileannotate", |
|
556 | 557 | file=f, |
@@ -92,6 +92,12 b' def _exthook(ui, repo, name, cmd, args, ' | |||
|
92 | 92 | for k, v in args.iteritems(): |
|
93 | 93 | if hasattr(v, '__call__'): |
|
94 | 94 | v = v() |
|
95 | if isinstance(v, dict): | |
|
96 | # make the dictionary element order stable across Python | |
|
97 | # implementations | |
|
98 | v = ('{' + | |
|
99 | ', '.join('%r: %r' % i for i in sorted(v.iteritems())) + | |
|
100 | '}') | |
|
95 | 101 | env['HG_' + k.upper()] = v |
|
96 | 102 | |
|
97 | 103 | if repo: |
@@ -160,7 +160,7 b' class httprepository(wireproto.wirerepos' | |||
|
160 | 160 | break |
|
161 | 161 | |
|
162 | 162 | tempname = changegroup.writebundle(cg, None, type) |
|
163 | fp = url.httpsendfile(tempname, "rb") | |
|
163 | fp = url.httpsendfile(self.ui, tempname, "rb") | |
|
164 | 164 | headers = {'Content-Type': 'application/mercurial-0.1'} |
|
165 | 165 | |
|
166 | 166 | try: |
@@ -86,7 +86,8 b' def ignore(root, files, warn):' | |||
|
86 | 86 | (f, inst.strerror)) |
|
87 | 87 | |
|
88 | 88 | allpats = [] |
|
89 |
|
|
|
89 | for patlist in pats.values(): | |
|
90 | allpats.extend(patlist) | |
|
90 | 91 | if not allpats: |
|
91 | 92 | return util.never |
|
92 | 93 |
@@ -8,7 +8,7 b'' | |||
|
8 | 8 | from node import bin, hex, nullid, nullrev, short |
|
9 | 9 | from i18n import _ |
|
10 | 10 | import repo, changegroup, subrepo, discovery, pushkey |
|
11 | import changelog, dirstate, filelog, manifest, context | |
|
11 | import changelog, dirstate, filelog, manifest, context, bookmarks | |
|
12 | 12 | import lock, transaction, store, encoding |
|
13 | 13 | import util, extensions, hook, error |
|
14 | 14 | import match as matchmod |
@@ -105,7 +105,7 b' class localrepository(repo.repository):' | |||
|
105 | 105 | self._tags = None |
|
106 | 106 | self._tagtypes = None |
|
107 | 107 | |
|
108 |
self._branchcache = None |
|
|
108 | self._branchcache = None | |
|
109 | 109 | self._branchcachetip = None |
|
110 | 110 | self.nodetagscache = None |
|
111 | 111 | self.filterpats = {} |
@@ -161,6 +161,13 b' class localrepository(repo.repository):' | |||
|
161 | 161 | parts.pop() |
|
162 | 162 | return False |
|
163 | 163 | |
|
164 | @util.propertycache | |
|
165 | def _bookmarks(self): | |
|
166 | return bookmarks.read(self) | |
|
167 | ||
|
168 | @util.propertycache | |
|
169 | def _bookmarkcurrent(self): | |
|
170 | return bookmarks.readcurrent(self) | |
|
164 | 171 | |
|
165 | 172 | @propertycache |
|
166 | 173 | def changelog(self): |
@@ -178,7 +185,19 b' class localrepository(repo.repository):' | |||
|
178 | 185 | |
|
179 | 186 | @propertycache |
|
180 | 187 | def dirstate(self): |
|
181 | return dirstate.dirstate(self.opener, self.ui, self.root) | |
|
188 | warned = [0] | |
|
189 | def validate(node): | |
|
190 | try: | |
|
191 | r = self.changelog.rev(node) | |
|
192 | return node | |
|
193 | except error.LookupError: | |
|
194 | if not warned[0]: | |
|
195 | warned[0] = True | |
|
196 | self.ui.warn(_("warning: ignoring unknown" | |
|
197 | " working parent %s!\n") % short(node)) | |
|
198 | return nullid | |
|
199 | ||
|
200 | return dirstate.dirstate(self.opener, self.ui, self.root, validate) | |
|
182 | 201 | |
|
183 | 202 | def __getitem__(self, changeid): |
|
184 | 203 | if changeid is None: |
@@ -264,6 +283,8 b' class localrepository(repo.repository):' | |||
|
264 | 283 | # committed tags are stored in UTF-8 |
|
265 | 284 | writetags(fp, names, encoding.fromlocal, prevtags) |
|
266 | 285 | |
|
286 | fp.close() | |
|
287 | ||
|
267 | 288 | if '.hgtags' not in self.dirstate: |
|
268 | 289 | self[None].add(['.hgtags']) |
|
269 | 290 | |
@@ -379,6 +400,13 b' class localrepository(repo.repository):' | |||
|
379 | 400 | tags.sort() |
|
380 | 401 | return self.nodetagscache.get(node, []) |
|
381 | 402 | |
|
403 | def nodebookmarks(self, node): | |
|
404 | marks = [] | |
|
405 | for bookmark, n in self._bookmarks.iteritems(): | |
|
406 | if n == node: | |
|
407 | marks.append(bookmark) | |
|
408 | return sorted(marks) | |
|
409 | ||
|
382 | 410 | def _branchtags(self, partial, lrev): |
|
383 | 411 | # TODO: rename this function? |
|
384 | 412 | tiprev = len(self) - 1 |
@@ -424,11 +452,10 b' class localrepository(repo.repository):' | |||
|
424 | 452 | bt[bn] = tip |
|
425 | 453 | return bt |
|
426 | 454 | |
|
427 | ||
|
428 | 455 | def _readbranchcache(self): |
|
429 | 456 | partial = {} |
|
430 | 457 | try: |
|
431 |
f = self.opener("branchheads |
|
|
458 | f = self.opener("cache/branchheads") | |
|
432 | 459 | lines = f.read().split('\n') |
|
433 | 460 | f.close() |
|
434 | 461 | except (IOError, OSError): |
@@ -444,7 +471,8 b' class localrepository(repo.repository):' | |||
|
444 | 471 | if not l: |
|
445 | 472 | continue |
|
446 | 473 | node, label = l.split(" ", 1) |
|
447 | partial.setdefault(label.strip(), []).append(bin(node)) | |
|
474 | label = encoding.tolocal(label.strip()) | |
|
475 | partial.setdefault(label, []).append(bin(node)) | |
|
448 | 476 | except KeyboardInterrupt: |
|
449 | 477 | raise |
|
450 | 478 | except Exception, inst: |
@@ -455,11 +483,11 b' class localrepository(repo.repository):' | |||
|
455 | 483 | |
|
456 | 484 | def _writebranchcache(self, branches, tip, tiprev): |
|
457 | 485 | try: |
|
458 |
f = self.opener("branchheads |
|
|
486 | f = self.opener("cache/branchheads", "w", atomictemp=True) | |
|
459 | 487 | f.write("%s %s\n" % (hex(tip), tiprev)) |
|
460 | 488 | for label, nodes in branches.iteritems(): |
|
461 | 489 | for node in nodes: |
|
462 | f.write("%s %s\n" % (hex(node), label)) | |
|
490 | f.write("%s %s\n" % (hex(node), encoding.fromlocal(label))) | |
|
463 | 491 | f.rename() |
|
464 | 492 | except (IOError, OSError): |
|
465 | 493 | pass |
@@ -500,6 +528,8 b' class localrepository(repo.repository):' | |||
|
500 | 528 | n = self.changelog._match(key) |
|
501 | 529 | if n: |
|
502 | 530 | return n |
|
531 | if key in self._bookmarks: | |
|
532 | return self._bookmarks[key] | |
|
503 | 533 | if key in self.tags(): |
|
504 | 534 | return self.tags()[key] |
|
505 | 535 | if key in self.branchtags(): |
@@ -618,10 +648,6 b' class localrepository(repo.repository):' | |||
|
618 | 648 | |
|
619 | 649 | def wwrite(self, filename, data, flags): |
|
620 | 650 | data = self._filter(self._decodefilterpats, filename, data) |
|
621 | try: | |
|
622 | os.unlink(self.wjoin(filename)) | |
|
623 | except OSError: | |
|
624 | pass | |
|
625 | 651 | if 'l' in flags: |
|
626 | 652 | self.wopener.symlink(data, filename) |
|
627 | 653 | else: |
@@ -648,7 +674,8 b' class localrepository(repo.repository):' | |||
|
648 | 674 | except IOError: |
|
649 | 675 | ds = "" |
|
650 | 676 | self.opener("journal.dirstate", "w").write(ds) |
|
651 |
self.opener("journal.branch", "w").write( |
|
|
677 | self.opener("journal.branch", "w").write( | |
|
678 | encoding.fromlocal(self.dirstate.branch())) | |
|
652 | 679 | self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc)) |
|
653 | 680 | |
|
654 | 681 | renames = [(self.sjoin("journal"), self.sjoin("undo")), |
@@ -700,13 +727,16 b' class localrepository(repo.repository):' | |||
|
700 | 727 | transaction.rollback(self.sopener, self.sjoin("undo"), |
|
701 | 728 | self.ui.warn) |
|
702 | 729 | util.rename(self.join("undo.dirstate"), self.join("dirstate")) |
|
730 | if os.path.exists(self.join('undo.bookmarks')): | |
|
731 | util.rename(self.join('undo.bookmarks'), | |
|
732 | self.join('bookmarks')) | |
|
703 | 733 | try: |
|
704 | 734 | branch = self.opener("undo.branch").read() |
|
705 | 735 | self.dirstate.setbranch(branch) |
|
706 | 736 | except IOError: |
|
707 | 737 | self.ui.warn(_("Named branch could not be reset, " |
|
708 | 738 | "current branch still is: %s\n") |
|
709 |
% |
|
|
739 | % self.dirstate.branch()) | |
|
710 | 740 | self.invalidate() |
|
711 | 741 | self.dirstate.invalidate() |
|
712 | 742 | self.destroyed() |
@@ -724,7 +754,7 b' class localrepository(repo.repository):' | |||
|
724 | 754 | self._branchcachetip = None |
|
725 | 755 | |
|
726 | 756 | def invalidate(self): |
|
727 |
for a in "changelog |
|
|
757 | for a in ("changelog", "manifest", "_bookmarks", "_bookmarkscurrent"): | |
|
728 | 758 | if a in self.__dict__: |
|
729 | 759 | delattr(self, a) |
|
730 | 760 | self.invalidatecaches() |
@@ -753,8 +783,8 b' class localrepository(repo.repository):' | |||
|
753 | 783 | l.lock() |
|
754 | 784 | return l |
|
755 | 785 | |
|
756 |
l = self._lock(self.sjoin("lock"), wait, |
|
|
757 | _('repository %s') % self.origroot) | |
|
786 | l = self._lock(self.sjoin("lock"), wait, self.store.write, | |
|
787 | self.invalidate, _('repository %s') % self.origroot) | |
|
758 | 788 | self._lockref = weakref.ref(l) |
|
759 | 789 | return l |
|
760 | 790 | |
@@ -903,6 +933,12 b' class localrepository(repo.repository):' | |||
|
903 | 933 | if '.hgsubstate' not in changes[0]: |
|
904 | 934 | changes[0].insert(0, '.hgsubstate') |
|
905 | 935 | |
|
936 | if subs and not self.ui.configbool('ui', 'commitsubrepos', True): | |
|
937 | changedsubs = [s for s in subs if wctx.sub(s).dirty(True)] | |
|
938 | if changedsubs: | |
|
939 | raise util.Abort(_("uncommitted changes in subrepo %s") | |
|
940 | % changedsubs[0]) | |
|
941 | ||
|
906 | 942 | # make sure all explicit patterns are matched |
|
907 | 943 | if not force and match.files(): |
|
908 | 944 | matched = set(changes[0] + changes[1] + changes[2]) |
@@ -968,7 +1004,11 b' class localrepository(repo.repository):' | |||
|
968 | 1004 | _('note: commit message saved in %s\n') % msgfn) |
|
969 | 1005 | raise |
|
970 | 1006 | |
|
971 | # update dirstate and mergestate | |
|
1007 | # update bookmarks, dirstate and mergestate | |
|
1008 | parents = (p1, p2) | |
|
1009 | if p2 == nullid: | |
|
1010 | parents = (p1,) | |
|
1011 | bookmarks.update(self, parents, ret) | |
|
972 | 1012 | for f in changes[0] + changes[1]: |
|
973 | 1013 | self.dirstate.normal(f) |
|
974 | 1014 | for f in changes[2]: |
@@ -1202,14 +1242,14 b' class localrepository(repo.repository):' | |||
|
1202 | 1242 | self.ui.status(_("skipping missing subrepository: %s\n") |
|
1203 | 1243 | % subpath) |
|
1204 | 1244 | |
|
1205 |
|
|
|
1245 | for l in r: | |
|
1246 | l.sort() | |
|
1206 | 1247 | return r |
|
1207 | 1248 | |
|
1208 | 1249 | def heads(self, start=None): |
|
1209 | 1250 | heads = self.changelog.heads(start) |
|
1210 | 1251 | # sort the output in rev descending order |
|
1211 | heads = [(-self.changelog.rev(h), h) for h in heads] | |
|
1212 | return [n for (r, n) in sorted(heads)] | |
|
1252 | return sorted(heads, key=self.changelog.rev, reverse=True) | |
|
1213 | 1253 | |
|
1214 | 1254 | def branchheads(self, branch=None, start=None, closed=False): |
|
1215 | 1255 | '''return a (possibly filtered) list of heads for the given branch |
@@ -1276,26 +1316,57 b' class localrepository(repo.repository):' | |||
|
1276 | 1316 | common, fetch, rheads = tmp |
|
1277 | 1317 | if not fetch: |
|
1278 | 1318 | self.ui.status(_("no changes found\n")) |
|
1279 |
re |
|
|
1280 | ||
|
1281 | if heads is None and fetch == [nullid]: | |
|
1282 | self.ui.status(_("requesting all changes\n")) | |
|
1283 | elif heads is None and remote.capable('changegroupsubset'): | |
|
1284 | # issue1320, avoid a race if remote changed after discovery | |
|
1285 | heads = rheads | |
|
1319 | result = 0 | |
|
1320 | else: | |
|
1321 | if heads is None and fetch == [nullid]: | |
|
1322 | self.ui.status(_("requesting all changes\n")) | |
|
1323 | elif heads is None and remote.capable('changegroupsubset'): | |
|
1324 | # issue1320, avoid a race if remote changed after discovery | |
|
1325 | heads = rheads | |
|
1286 | 1326 | |
|
1287 | if heads is None: | |
|
1288 | cg = remote.changegroup(fetch, 'pull') | |
|
1289 | else: | |
|
1290 | if not remote.capable('changegroupsubset'): | |
|
1327 | if heads is None: | |
|
1328 | cg = remote.changegroup(fetch, 'pull') | |
|
1329 | elif not remote.capable('changegroupsubset'): | |
|
1291 | 1330 | raise util.Abort(_("partial pull cannot be done because " |
|
1292 | "other repository doesn't support " | |
|
1293 | "changegroupsubset.")) | |
|
1294 | cg = remote.changegroupsubset(fetch, heads, 'pull') | |
|
1295 | return self.addchangegroup(cg, 'pull', remote.url(), lock=lock) | |
|
1331 | "other repository doesn't support " | |
|
1332 | "changegroupsubset.")) | |
|
1333 | else: | |
|
1334 | cg = remote.changegroupsubset(fetch, heads, 'pull') | |
|
1335 | result = self.addchangegroup(cg, 'pull', remote.url(), | |
|
1336 | lock=lock) | |
|
1296 | 1337 | finally: |
|
1297 | 1338 | lock.release() |
|
1298 | 1339 | |
|
1340 | self.ui.debug("checking for updated bookmarks\n") | |
|
1341 | rb = remote.listkeys('bookmarks') | |
|
1342 | changed = False | |
|
1343 | for k in rb.keys(): | |
|
1344 | if k in self._bookmarks: | |
|
1345 | nr, nl = rb[k], self._bookmarks[k] | |
|
1346 | if nr in self: | |
|
1347 | cr = self[nr] | |
|
1348 | cl = self[nl] | |
|
1349 | if cl.rev() >= cr.rev(): | |
|
1350 | continue | |
|
1351 | if cr in cl.descendants(): | |
|
1352 | self._bookmarks[k] = cr.node() | |
|
1353 | changed = True | |
|
1354 | self.ui.status(_("updating bookmark %s\n") % k) | |
|
1355 | else: | |
|
1356 | self.ui.warn(_("not updating divergent" | |
|
1357 | " bookmark %s\n") % k) | |
|
1358 | if changed: | |
|
1359 | bookmarks.write(self) | |
|
1360 | ||
|
1361 | return result | |
|
1362 | ||
|
1363 | def checkpush(self, force, revs): | |
|
1364 | """Extensions can override this function if additional checks have | |
|
1365 | to be performed before pushing, or call it if they override push | |
|
1366 | command. | |
|
1367 | """ | |
|
1368 | pass | |
|
1369 | ||
|
1299 | 1370 | def push(self, remote, force=False, revs=None, newbranch=False): |
|
1300 | 1371 | '''Push outgoing changesets (limited by revs) from the current |
|
1301 | 1372 | repository to remote. Return an integer: |
@@ -1312,35 +1383,52 b' class localrepository(repo.repository):' | |||
|
1312 | 1383 | # unbundle assumes local user cannot lock remote repo (new ssh |
|
1313 | 1384 | # servers, http servers). |
|
1314 | 1385 | |
|
1386 | self.checkpush(force, revs) | |
|
1315 | 1387 | lock = None |
|
1316 | 1388 | unbundle = remote.capable('unbundle') |
|
1317 | 1389 | if not unbundle: |
|
1318 | 1390 | lock = remote.lock() |
|
1319 | 1391 | try: |
|
1320 |
ret = discovery.prepush(self, remote, force, revs, |
|
|
1321 | if ret[0] is None: | |
|
1322 | # and here we return 0 for "nothing to push" or 1 for | |
|
1323 | # "something to push but I refuse" | |
|
1324 | return ret[1] | |
|
1325 | ||
|
1326 | cg, remote_heads = ret | |
|
1327 | if unbundle: | |
|
1328 | # local repo finds heads on server, finds out what revs it must | |
|
1329 | # push. once revs transferred, if server finds it has | |
|
1330 | # different heads (someone else won commit/push race), server | |
|
1331 | # aborts. | |
|
1332 | if force: | |
|
1333 |
re |
|
|
1334 | # ssh: return remote's addchangegroup() | |
|
1335 | # http: return remote's addchangegroup() or 0 for error | |
|
1336 | return remote.unbundle(cg, remote_heads, 'push') | |
|
1337 | else: | |
|
1338 | # we return an integer indicating remote head count change | |
|
1339 | return remote.addchangegroup(cg, 'push', self.url(), lock=lock) | |
|
1392 | cg, remote_heads = discovery.prepush(self, remote, force, revs, | |
|
1393 | newbranch) | |
|
1394 | ret = remote_heads | |
|
1395 | if cg is not None: | |
|
1396 | if unbundle: | |
|
1397 | # local repo finds heads on server, finds out what | |
|
1398 | # revs it must push. once revs transferred, if server | |
|
1399 | # finds it has different heads (someone else won | |
|
1400 | # commit/push race), server aborts. | |
|
1401 | if force: | |
|
1402 | remote_heads = ['force'] | |
|
1403 | # ssh: return remote's addchangegroup() | |
|
1404 | # http: return remote's addchangegroup() or 0 for error | |
|
1405 | ret = remote.unbundle(cg, remote_heads, 'push') | |
|
1406 | else: | |
|
1407 | # we return an integer indicating remote head count change | |
|
1408 | ret = remote.addchangegroup(cg, 'push', self.url(), | |
|
1409 | lock=lock) | |
|
1340 | 1410 | finally: |
|
1341 | 1411 | if lock is not None: |
|
1342 | 1412 | lock.release() |
|
1343 | 1413 | |
|
1414 | self.ui.debug("checking for updated bookmarks\n") | |
|
1415 | rb = remote.listkeys('bookmarks') | |
|
1416 | for k in rb.keys(): | |
|
1417 | if k in self._bookmarks: | |
|
1418 | nr, nl = rb[k], hex(self._bookmarks[k]) | |
|
1419 | if nr in self: | |
|
1420 | cr = self[nr] | |
|
1421 | cl = self[nl] | |
|
1422 | if cl in cr.descendants(): | |
|
1423 | r = remote.pushkey('bookmarks', k, nr, nl) | |
|
1424 | if r: | |
|
1425 | self.ui.status(_("updating bookmark %s\n") % k) | |
|
1426 | else: | |
|
1427 | self.ui.warn(_('updating bookmark %s' | |
|
1428 | ' failed!\n') % k) | |
|
1429 | ||
|
1430 | return ret | |
|
1431 | ||
|
1344 | 1432 | def changegroupinfo(self, nodes, source): |
|
1345 | 1433 | if self.ui.verbose or source == 'bundle': |
|
1346 | 1434 | self.ui.status(_("%d changesets found\n") % len(nodes)) |
@@ -1404,9 +1492,6 b' class localrepository(repo.repository):' | |||
|
1404 | 1492 | # Nor do we know which filenodes are missing. |
|
1405 | 1493 | msng_filenode_set = {} |
|
1406 | 1494 | |
|
1407 | junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex | |
|
1408 | junk = None | |
|
1409 | ||
|
1410 | 1495 | # A changeset always belongs to itself, so the changenode lookup |
|
1411 | 1496 | # function for a changenode is identity. |
|
1412 | 1497 | def identity(x): |
@@ -1494,8 +1579,13 b' class localrepository(repo.repository):' | |||
|
1494 | 1579 | group = cl.group(msng_cl_lst, identity, collect) |
|
1495 | 1580 | for cnt, chnk in enumerate(group): |
|
1496 | 1581 | yield chnk |
|
1497 | self.ui.progress(_('bundling changes'), cnt, unit=_('chunks')) | |
|
1498 | self.ui.progress(_('bundling changes'), None) | |
|
1582 | # revlog.group yields three entries per node, so | |
|
1583 | # dividing by 3 gives an approximation of how many | |
|
1584 | # nodes have been processed. | |
|
1585 | self.ui.progress(_('bundling'), cnt / 3, | |
|
1586 | unit=_('changesets')) | |
|
1587 | changecount = cnt / 3 | |
|
1588 | self.ui.progress(_('bundling'), None) | |
|
1499 | 1589 | |
|
1500 | 1590 | prune(mnfst, msng_mnfst_set) |
|
1501 | 1591 | add_extra_nodes(1, msng_mnfst_set) |
@@ -1507,10 +1597,17 b' class localrepository(repo.repository):' | |||
|
1507 | 1597 | group = mnfst.group(msng_mnfst_lst, |
|
1508 | 1598 | lambda mnode: msng_mnfst_set[mnode], |
|
1509 | 1599 | filenode_collector(changedfiles)) |
|
1600 | efiles = {} | |
|
1510 | 1601 | for cnt, chnk in enumerate(group): |
|
1602 | if cnt % 3 == 1: | |
|
1603 | mnode = chnk[:20] | |
|
1604 | efiles.update(mnfst.readdelta(mnode)) | |
|
1511 | 1605 | yield chnk |
|
1512 | self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks')) | |
|
1513 |
self.ui.progress(_('bundling |
|
|
1606 | # see above comment for why we divide by 3 | |
|
1607 | self.ui.progress(_('bundling'), cnt / 3, | |
|
1608 | unit=_('manifests'), total=changecount) | |
|
1609 | self.ui.progress(_('bundling'), None) | |
|
1610 | efiles = len(efiles) | |
|
1514 | 1611 | |
|
1515 | 1612 | # These are no longer needed, dereference and toss the memory for |
|
1516 | 1613 | # them. |
@@ -1524,8 +1621,7 b' class localrepository(repo.repository):' | |||
|
1524 | 1621 | msng_filenode_set.setdefault(fname, {}) |
|
1525 | 1622 | changedfiles.add(fname) |
|
1526 | 1623 | # Go through all our files in order sorted by name. |
|
1527 | cnt = 0 | |
|
1528 | for fname in sorted(changedfiles): | |
|
1624 | for idx, fname in enumerate(sorted(changedfiles)): | |
|
1529 | 1625 | filerevlog = self.file(fname) |
|
1530 | 1626 | if not len(filerevlog): |
|
1531 | 1627 | raise util.Abort(_("empty or missing revlog for %s") % fname) |
@@ -1548,13 +1644,16 b' class localrepository(repo.repository):' | |||
|
1548 | 1644 | group = filerevlog.group(nodeiter, |
|
1549 | 1645 | lambda fnode: missingfnodes[fnode]) |
|
1550 | 1646 | for chnk in group: |
|
1647 | # even though we print the same progress on | |
|
1648 | # most loop iterations, put the progress call | |
|
1649 | # here so that time estimates (if any) can be updated | |
|
1551 | 1650 | self.ui.progress( |
|
1552 |
_('bundling |
|
|
1553 |
|
|
|
1651 | _('bundling'), idx, item=fname, | |
|
1652 | unit=_('files'), total=efiles) | |
|
1554 | 1653 | yield chnk |
|
1555 | 1654 | # Signal that no more groups are left. |
|
1556 | 1655 | yield changegroup.closechunk() |
|
1557 |
self.ui.progress(_('bundling |
|
|
1656 | self.ui.progress(_('bundling'), None) | |
|
1558 | 1657 | |
|
1559 | 1658 | if msng_cl_lst: |
|
1560 | 1659 | self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source) |
@@ -1602,20 +1701,30 b' class localrepository(repo.repository):' | |||
|
1602 | 1701 | collect = changegroup.collector(cl, mmfs, changedfiles) |
|
1603 | 1702 | |
|
1604 | 1703 | for cnt, chnk in enumerate(cl.group(nodes, identity, collect)): |
|
1605 | self.ui.progress(_('bundling changes'), cnt, unit=_('chunks')) | |
|
1704 | # revlog.group yields three entries per node, so | |
|
1705 | # dividing by 3 gives an approximation of how many | |
|
1706 | # nodes have been processed. | |
|
1707 | self.ui.progress(_('bundling'), cnt / 3, unit=_('changesets')) | |
|
1606 | 1708 | yield chnk |
|
1607 | self.ui.progress(_('bundling changes'), None) | |
|
1709 | changecount = cnt / 3 | |
|
1710 | self.ui.progress(_('bundling'), None) | |
|
1608 | 1711 | |
|
1609 | 1712 | mnfst = self.manifest |
|
1610 | 1713 | nodeiter = gennodelst(mnfst) |
|
1714 | efiles = {} | |
|
1611 | 1715 | for cnt, chnk in enumerate(mnfst.group(nodeiter, |
|
1612 | 1716 | lookuplinkrev_func(mnfst))): |
|
1613 | self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks')) | |
|
1717 | if cnt % 3 == 1: | |
|
1718 | mnode = chnk[:20] | |
|
1719 | efiles.update(mnfst.readdelta(mnode)) | |
|
1720 | # see above comment for why we divide by 3 | |
|
1721 | self.ui.progress(_('bundling'), cnt / 3, | |
|
1722 | unit=_('manifests'), total=changecount) | |
|
1614 | 1723 | yield chnk |
|
1615 | self.ui.progress(_('bundling manifests'), None) | |
|
1724 | efiles = len(efiles) | |
|
1725 | self.ui.progress(_('bundling'), None) | |
|
1616 | 1726 | |
|
1617 | cnt = 0 | |
|
1618 | for fname in sorted(changedfiles): | |
|
1727 | for idx, fname in enumerate(sorted(changedfiles)): | |
|
1619 | 1728 | filerevlog = self.file(fname) |
|
1620 | 1729 | if not len(filerevlog): |
|
1621 | 1730 | raise util.Abort(_("empty or missing revlog for %s") % fname) |
@@ -1627,10 +1736,10 b' class localrepository(repo.repository):' | |||
|
1627 | 1736 | lookup = lookuplinkrev_func(filerevlog) |
|
1628 | 1737 | for chnk in filerevlog.group(nodeiter, lookup): |
|
1629 | 1738 | self.ui.progress( |
|
1630 |
_('bundling |
|
|
1631 |
|
|
|
1739 | _('bundling'), idx, item=fname, | |
|
1740 | total=efiles, unit=_('files')) | |
|
1632 | 1741 | yield chnk |
|
1633 |
self.ui.progress(_('bundling |
|
|
1742 | self.ui.progress(_('bundling'), None) | |
|
1634 | 1743 | |
|
1635 | 1744 | yield changegroup.closechunk() |
|
1636 | 1745 | |
@@ -1643,6 +1752,8 b' class localrepository(repo.repository):' | |||
|
1643 | 1752 | """Add the changegroup returned by source.read() to this repo. |
|
1644 | 1753 | srctype is a string like 'push', 'pull', or 'unbundle'. url is |
|
1645 | 1754 | the URL of the repo where this changegroup is coming from. |
|
1755 | If lock is not None, the function takes ownership of the lock | |
|
1756 | and releases it after the changegroup is added. | |
|
1646 | 1757 | |
|
1647 | 1758 | Return an integer summarizing the change to this repo: |
|
1648 | 1759 | - nothing changed or no source: 0 |
@@ -1795,6 +1906,10 b' class localrepository(repo.repository):' | |||
|
1795 | 1906 | self.hook("incoming", node=hex(cl.node(i)), |
|
1796 | 1907 | source=srctype, url=url) |
|
1797 | 1908 | |
|
1909 | # FIXME - why does this care about tip? | |
|
1910 | if newheads == oldheads: | |
|
1911 | bookmarks.update(self, self.dirstate.parents(), self['tip'].node()) | |
|
1912 | ||
|
1798 | 1913 | # never return 0 here: |
|
1799 | 1914 | if newheads < oldheads: |
|
1800 | 1915 | return newheads - oldheads - 1 |
@@ -1803,59 +1918,63 b' class localrepository(repo.repository):' | |||
|
1803 | 1918 | |
|
1804 | 1919 | |
|
1805 | 1920 | def stream_in(self, remote, requirements): |
|
1806 | fp = remote.stream_out() | |
|
1807 | l = fp.readline() | |
|
1921 | lock = self.lock() | |
|
1808 | 1922 | try: |
|
1809 |
|
|
|
1810 | except ValueError: | |
|
1811 | raise error.ResponseError( | |
|
1812 | _('Unexpected response from remote server:'), l) | |
|
1813 | if resp == 1: | |
|
1814 | raise util.Abort(_('operation forbidden by server')) | |
|
1815 | elif resp == 2: | |
|
1816 | raise util.Abort(_('locking the remote repository failed')) | |
|
1817 | elif resp != 0: | |
|
1818 | raise util.Abort(_('the server sent an unknown error code')) | |
|
1819 | self.ui.status(_('streaming all changes\n')) | |
|
1820 | l = fp.readline() | |
|
1821 | try: | |
|
1822 | total_files, total_bytes = map(int, l.split(' ', 1)) | |
|
1823 | except (ValueError, TypeError): | |
|
1824 | raise error.ResponseError( | |
|
1825 | _('Unexpected response from remote server:'), l) | |
|
1826 | self.ui.status(_('%d files to transfer, %s of data\n') % | |
|
1827 | (total_files, util.bytecount(total_bytes))) | |
|
1828 | start = time.time() | |
|
1829 | for i in xrange(total_files): | |
|
1830 | # XXX doesn't support '\n' or '\r' in filenames | |
|
1923 | fp = remote.stream_out() | |
|
1831 | 1924 | l = fp.readline() |
|
1832 | 1925 | try: |
|
1833 |
|
|
|
1834 | size = int(size) | |
|
1926 | resp = int(l) | |
|
1927 | except ValueError: | |
|
1928 | raise error.ResponseError( | |
|
1929 | _('Unexpected response from remote server:'), l) | |
|
1930 | if resp == 1: | |
|
1931 | raise util.Abort(_('operation forbidden by server')) | |
|
1932 | elif resp == 2: | |
|
1933 | raise util.Abort(_('locking the remote repository failed')) | |
|
1934 | elif resp != 0: | |
|
1935 | raise util.Abort(_('the server sent an unknown error code')) | |
|
1936 | self.ui.status(_('streaming all changes\n')) | |
|
1937 | l = fp.readline() | |
|
1938 | try: | |
|
1939 | total_files, total_bytes = map(int, l.split(' ', 1)) | |
|
1835 | 1940 | except (ValueError, TypeError): |
|
1836 | 1941 | raise error.ResponseError( |
|
1837 | 1942 | _('Unexpected response from remote server:'), l) |
|
1838 | self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size))) | |
|
1839 | # for backwards compat, name was partially encoded | |
|
1840 | ofp = self.sopener(store.decodedir(name), 'w') | |
|
1841 | for chunk in util.filechunkiter(fp, limit=size): | |
|
1842 | ofp.write(chunk) | |
|
1843 |
|
|
|
1844 | elapsed = time.time() - start | |
|
1845 | if elapsed <= 0: | |
|
1846 | elapsed = 0.001 | |
|
1847 | self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') % | |
|
1848 | (util.bytecount(total_bytes), elapsed, | |
|
1849 | util.bytecount(total_bytes / elapsed))) | |
|
1943 | self.ui.status(_('%d files to transfer, %s of data\n') % | |
|
1944 | (total_files, util.bytecount(total_bytes))) | |
|
1945 | start = time.time() | |
|
1946 | for i in xrange(total_files): | |
|
1947 | # XXX doesn't support '\n' or '\r' in filenames | |
|
1948 | l = fp.readline() | |
|
1949 | try: | |
|
1950 | name, size = l.split('\0', 1) | |
|
1951 | size = int(size) | |
|
1952 | except (ValueError, TypeError): | |
|
1953 | raise error.ResponseError( | |
|
1954 | _('Unexpected response from remote server:'), l) | |
|
1955 | self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size))) | |
|
1956 | # for backwards compat, name was partially encoded | |
|
1957 | ofp = self.sopener(store.decodedir(name), 'w') | |
|
1958 | for chunk in util.filechunkiter(fp, limit=size): | |
|
1959 | ofp.write(chunk) | |
|
1960 | ofp.close() | |
|
1961 | elapsed = time.time() - start | |
|
1962 | if elapsed <= 0: | |
|
1963 | elapsed = 0.001 | |
|
1964 | self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') % | |
|
1965 | (util.bytecount(total_bytes), elapsed, | |
|
1966 | util.bytecount(total_bytes / elapsed))) | |
|
1850 | 1967 | |
|
1851 | # new requirements = old non-format requirements + new format-related | |
|
1852 | # requirements from the streamed-in repository | |
|
1853 | requirements.update(set(self.requirements) - self.supportedformats) | |
|
1854 | self._applyrequirements(requirements) | |
|
1855 | self._writerequirements() | |
|
1968 | # new requirements = old non-format requirements + new format-related | |
|
1969 | # requirements from the streamed-in repository | |
|
1970 | requirements.update(set(self.requirements) - self.supportedformats) | |
|
1971 | self._applyrequirements(requirements) | |
|
1972 | self._writerequirements() | |
|
1856 | 1973 | |
|
1857 | self.invalidate() | |
|
1858 | return len(self.heads()) + 1 | |
|
1974 | self.invalidate() | |
|
1975 | return len(self.heads()) + 1 | |
|
1976 | finally: | |
|
1977 | lock.release() | |
|
1859 | 1978 | |
|
1860 | 1979 | def clone(self, remote, heads=[], stream=False): |
|
1861 | 1980 | '''clone remote repository. |
@@ -113,7 +113,7 b' class lock(object):' | |||
|
113 | 113 | # held, or can race and break valid lock. |
|
114 | 114 | try: |
|
115 | 115 | l = lock(self.f + '.break', timeout=0) |
|
116 |
|
|
|
116 | util.unlink(self.f) | |
|
117 | 117 | l.release() |
|
118 | 118 | except error.LockError: |
|
119 | 119 | return locker |
@@ -126,7 +126,7 b' class lock(object):' | |||
|
126 | 126 | if self.releasefn: |
|
127 | 127 | self.releasefn() |
|
128 | 128 | try: |
|
129 |
|
|
|
129 | util.unlink(self.f) | |
|
130 | 130 | except OSError: |
|
131 | 131 | pass |
|
132 | 132 |
@@ -33,7 +33,17 b" email.Header.Header.__dict__['__init__']" | |||
|
33 | 33 | def _smtp(ui): |
|
34 | 34 | '''build an smtp connection and return a function to send mail''' |
|
35 | 35 | local_hostname = ui.config('smtp', 'local_hostname') |
|
36 | s = smtplib.SMTP(local_hostname=local_hostname) | |
|
36 | tls = ui.config('smtp', 'tls', 'none') | |
|
37 | # backward compatible: when tls = true, we use starttls. | |
|
38 | starttls = tls == 'starttls' or util.parsebool(tls) | |
|
39 | smtps = tls == 'smtps' | |
|
40 | if (starttls or smtps) and not hasattr(socket, 'ssl'): | |
|
41 | raise util.Abort(_("can't use TLS: Python SSL support not installed")) | |
|
42 | if smtps: | |
|
43 | ui.note(_('(using smtps)\n')) | |
|
44 | s = smtplib.SMTP_SSL(local_hostname=local_hostname) | |
|
45 | else: | |
|
46 | s = smtplib.SMTP(local_hostname=local_hostname) | |
|
37 | 47 | mailhost = ui.config('smtp', 'host') |
|
38 | 48 | if not mailhost: |
|
39 | 49 | raise util.Abort(_('smtp.host not configured - cannot send mail')) |
@@ -41,11 +51,8 b' def _smtp(ui):' | |||
|
41 | 51 | ui.note(_('sending mail: smtp host %s, port %s\n') % |
|
42 | 52 | (mailhost, mailport)) |
|
43 | 53 | s.connect(host=mailhost, port=mailport) |
|
44 | if ui.configbool('smtp', 'tls'): | |
|
45 | if not hasattr(socket, 'ssl'): | |
|
46 | raise util.Abort(_("can't use TLS: Python SSL support " | |
|
47 | "not installed")) | |
|
48 | ui.note(_('(using tls)\n')) | |
|
54 | if starttls: | |
|
55 | ui.note(_('(using starttls)\n')) | |
|
49 | 56 | s.ehlo() |
|
50 | 57 | s.starttls() |
|
51 | 58 | s.ehlo() |
@@ -171,19 +171,19 b' class manifest(revlog.revlog):' | |||
|
171 | 171 | raise AssertionError( |
|
172 | 172 | _("failed to remove %s from manifest") % f) |
|
173 | 173 | l = "" |
|
174 |
if dstart |
|
|
174 | if dstart is not None and dstart <= start and dend >= start: | |
|
175 | 175 | if dend < end: |
|
176 | 176 | dend = end |
|
177 | 177 | if l: |
|
178 | 178 | dline.append(l) |
|
179 | 179 | else: |
|
180 |
if dstart |
|
|
180 | if dstart is not None: | |
|
181 | 181 | delta.append([dstart, dend, "".join(dline)]) |
|
182 | 182 | dstart = start |
|
183 | 183 | dend = end |
|
184 | 184 | dline = [l] |
|
185 | 185 | |
|
186 |
if dstart |
|
|
186 | if dstart is not None: | |
|
187 | 187 | delta.append([dstart, dend, "".join(dline)]) |
|
188 | 188 | # apply the delta to the addlist, and get a delta for addrevision |
|
189 | 189 | cachedelta = (self.rev(p1), addlistdelta(addlist, delta)) |
@@ -39,11 +39,11 b' class match(object):' | |||
|
39 | 39 | self._anypats = bool(include or exclude) |
|
40 | 40 | |
|
41 | 41 | if include: |
|
42 |
|
|
|
43 | '(?:/|$)') | |
|
42 | pats = _normalize(include, 'glob', root, cwd, auditor) | |
|
43 | self.includepat, im = _buildmatch(pats, '(?:/|$)') | |
|
44 | 44 | if exclude: |
|
45 |
|
|
|
46 | '(?:/|$)') | |
|
45 | pats = _normalize(exclude, 'glob', root, cwd, auditor) | |
|
46 | self.excludepat, em = _buildmatch(pats, '(?:/|$)') | |
|
47 | 47 | if exact: |
|
48 | 48 | self._files = patterns |
|
49 | 49 | pm = self.exact |
@@ -51,7 +51,7 b' class match(object):' | |||
|
51 | 51 | pats = _normalize(patterns, default, root, cwd, auditor) |
|
52 | 52 | self._files = _roots(pats) |
|
53 | 53 | self._anypats = self._anypats or _anypats(pats) |
|
54 | pm = _buildmatch(pats, '$') | |
|
54 | self.patternspat, pm = _buildmatch(pats, '$') | |
|
55 | 55 | |
|
56 | 56 | if patterns or exact: |
|
57 | 57 | if include: |
@@ -161,7 +161,8 b' def _patsplit(pat, default):' | |||
|
161 | 161 | actual pattern.""" |
|
162 | 162 | if ':' in pat: |
|
163 | 163 | kind, val = pat.split(':', 1) |
|
164 |
if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre' |
|
|
164 | if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre', | |
|
165 | 'listfile', 'listfile0'): | |
|
165 | 166 | return kind, val |
|
166 | 167 | return default, pat |
|
167 | 168 | |
@@ -245,7 +246,7 b' def _buildmatch(pats, tail):' | |||
|
245 | 246 | pat = '(?:%s)' % '|'.join([_regex(k, p, tail) for (k, p) in pats]) |
|
246 | 247 | if len(pat) > 20000: |
|
247 | 248 | raise OverflowError() |
|
248 | return re.compile(pat).match | |
|
249 | return pat, re.compile(pat).match | |
|
249 | 250 | except OverflowError: |
|
250 | 251 | # We're using a Python with a tiny regex engine and we |
|
251 | 252 | # made it explode, so we'll divide the pattern list in two |
@@ -253,8 +254,9 b' def _buildmatch(pats, tail):' | |||
|
253 | 254 | l = len(pats) |
|
254 | 255 | if l < 2: |
|
255 | 256 | raise |
|
256 |
a, |
|
|
257 | return lambda s: a(s) or b(s) | |
|
257 | pata, a = _buildmatch(pats[:l//2], tail), | |
|
258 | patb, b = _buildmatch(pats[l//2:], tail) | |
|
259 | return pat, lambda s: a(s) or b(s) | |
|
258 | 260 | except re.error: |
|
259 | 261 | for k, p in pats: |
|
260 | 262 | try: |
@@ -270,6 +272,15 b' def _normalize(names, default, root, cwd' | |||
|
270 | 272 | name = util.canonpath(root, cwd, name, auditor) |
|
271 | 273 | elif kind in ('relglob', 'path'): |
|
272 | 274 | name = util.normpath(name) |
|
275 | elif kind in ('listfile', 'listfile0'): | |
|
276 | delimiter = kind == 'listfile0' and '\0' or '\n' | |
|
277 | try: | |
|
278 | files = open(name, 'r').read().split(delimiter) | |
|
279 | files = [f for f in files if f] | |
|
280 | except EnvironmentError: | |
|
281 | raise util.Abort(_("unable to read file list (%s)") % name) | |
|
282 | pats += _normalize(files, default, root, cwd, auditor) | |
|
283 | continue | |
|
273 | 284 | |
|
274 | 285 | pats.append((kind, name)) |
|
275 | 286 | return pats |
@@ -32,6 +32,7 b' class mergestate(object):' | |||
|
32 | 32 | else: |
|
33 | 33 | bits = l[:-1].split("\0") |
|
34 | 34 | self._state[bits[0]] = bits[1:] |
|
35 | f.close() | |
|
35 | 36 | except IOError, err: |
|
36 | 37 | if err.errno != errno.ENOENT: |
|
37 | 38 | raise |
@@ -42,6 +43,7 b' class mergestate(object):' | |||
|
42 | 43 | f.write(hex(self._local) + "\n") |
|
43 | 44 | for d, v in self._state.iteritems(): |
|
44 | 45 | f.write("\0".join([d] + v) + "\n") |
|
46 | f.close() | |
|
45 | 47 | self._dirty = False |
|
46 | 48 | def add(self, fcl, fco, fca, fd, flags): |
|
47 | 49 | hash = util.sha1(fcl.path()).hexdigest() |
@@ -67,6 +69,7 b' class mergestate(object):' | |||
|
67 | 69 | state, hash, lfile, afile, anode, ofile, flags = self._state[dfile] |
|
68 | 70 | f = self._repo.opener("merge/" + hash) |
|
69 | 71 | self._repo.wwrite(dfile, f.read(), flags) |
|
72 | f.close() | |
|
70 | 73 | fcd = wctx[dfile] |
|
71 | 74 | fco = octx[ofile] |
|
72 | 75 | fca = self._repo.filectx(afile, fileid=anode) |
@@ -255,6 +258,9 b' def applyupdates(repo, action, wctx, mct' | |||
|
255 | 258 | wctx is the working copy context |
|
256 | 259 | mctx is the context to be merged into the working copy |
|
257 | 260 | actx is the context of the common ancestor |
|
261 | ||
|
262 | Return a tuple of counts (updated, merged, removed, unresolved) that | |
|
263 | describes how many files were affected by the update. | |
|
258 | 264 | """ |
|
259 | 265 | |
|
260 | 266 | updated, merged, removed, unresolved = 0, 0, 0, 0 |
@@ -309,7 +315,7 b' def applyupdates(repo, action, wctx, mct' | |||
|
309 | 315 | if f == '.hgsubstate': # subrepo states need updating |
|
310 | 316 | subrepo.submerge(repo, wctx, mctx, wctx, overwrite) |
|
311 | 317 | try: |
|
312 | util.unlink(repo.wjoin(f)) | |
|
318 | util.unlinkpath(repo.wjoin(f)) | |
|
313 | 319 | except OSError, inst: |
|
314 | 320 | if inst.errno != errno.ENOENT: |
|
315 | 321 | repo.ui.warn(_("update failed to remove %s: %s!\n") % |
@@ -347,7 +353,7 b' def applyupdates(repo, action, wctx, mct' | |||
|
347 | 353 | repo.ui.note(_("moving %s to %s\n") % (f, fd)) |
|
348 | 354 | t = wctx.filectx(f).data() |
|
349 | 355 | repo.wwrite(fd, t, flags) |
|
350 | util.unlink(repo.wjoin(f)) | |
|
356 | util.unlinkpath(repo.wjoin(f)) | |
|
351 | 357 | if f2: |
|
352 | 358 | repo.ui.note(_("getting %s to %s\n") % (f2, fd)) |
|
353 | 359 | t = mctx.filectx(f2).data() |
@@ -462,6 +468,8 b' def update(repo, node, branchmerge, forc' | |||
|
462 | 468 | use 'hg update -C' to discard changes) |
|
463 | 469 | 3 = abort: uncommitted local changes |
|
464 | 470 | 4 = incompatible options (checked in commands.py) |
|
471 | ||
|
472 | Return the same tuple as applyupdates(). | |
|
465 | 473 | """ |
|
466 | 474 | |
|
467 | 475 | onode = node |
@@ -524,7 +532,7 b' def update(repo, node, branchmerge, forc' | |||
|
524 | 532 | action += manifestmerge(repo, wc, p2, pa, overwrite, partial) |
|
525 | 533 | |
|
526 | 534 | ### apply phase |
|
527 | if not branchmerge: # just jump to the new rev | |
|
535 | if not branchmerge or fastforward: # just jump to the new rev | |
|
528 | 536 | fp1, fp2, xp1, xp2 = fp2, nullid, xp2, '' |
|
529 | 537 | if not partial: |
|
530 | 538 | repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2) |
@@ -533,7 +541,7 b' def update(repo, node, branchmerge, forc' | |||
|
533 | 541 | |
|
534 | 542 | if not partial: |
|
535 | 543 | repo.dirstate.setparents(fp1, fp2) |
|
536 | recordupdates(repo, action, branchmerge) | |
|
544 | recordupdates(repo, action, branchmerge and not fastforward) | |
|
537 | 545 | if not branchmerge and not fastforward: |
|
538 | 546 | repo.dirstate.setbranch(p2.branch()) |
|
539 | 547 | finally: |
@@ -14,27 +14,8 b' It cheats in a major way: nested blocks ' | |||
|
14 | 14 | are just indented blocks that look like they are nested. This relies |
|
15 | 15 | on the user to keep the right indentation for the blocks. |
|
16 | 16 | |
|
17 | It only supports a small subset of reStructuredText: | |
|
18 | ||
|
19 | - sections | |
|
20 | ||
|
21 | - paragraphs | |
|
22 | ||
|
23 | - literal blocks | |
|
24 | ||
|
25 | - definition lists | |
|
26 | ||
|
27 | - specific admonitions | |
|
28 | ||
|
29 | - bullet lists (items must start with '-') | |
|
30 | ||
|
31 | - enumerated lists (no autonumbering) | |
|
32 | ||
|
33 | - field lists (colons cannot be escaped) | |
|
34 | ||
|
35 | - option lists (supports only long options without arguments) | |
|
36 | ||
|
37 | - inline literals (no other inline markup is not recognized) | |
|
17 | Remember to update http://mercurial.selenic.com/wiki/HelpStyleGuide | |
|
18 | when adding support for new constructs. | |
|
38 | 19 | """ |
|
39 | 20 | |
|
40 | 21 | import re, sys |
@@ -118,7 +99,8 b' def findliteralblocks(blocks):' | |||
|
118 | 99 | return blocks |
|
119 | 100 | |
|
120 | 101 | _bulletre = re.compile(r'(-|[0-9A-Za-z]+\.|\(?[0-9A-Za-z]+\)|\|) ') |
|
121 |
_optionre = re.compile(r'^(- |
|
|
102 | _optionre = re.compile(r'^(-([a-zA-Z0-9]), )?(--[a-z0-9-]+)' | |
|
103 | r'((.*) +)(.*)$') | |
|
122 | 104 | _fieldre = re.compile(r':(?![: ])([^:]*)(?<! ):[ ]+(.*)') |
|
123 | 105 | _definitionre = re.compile(r'[^ ]') |
|
124 | 106 | |
@@ -192,6 +174,42 b' def updatefieldlists(blocks):' | |||
|
192 | 174 | return blocks |
|
193 | 175 | |
|
194 | 176 | |
|
177 | def updateoptionlists(blocks): | |
|
178 | i = 0 | |
|
179 | while i < len(blocks): | |
|
180 | if blocks[i]['type'] != 'option': | |
|
181 | i += 1 | |
|
182 | continue | |
|
183 | ||
|
184 | optstrwidth = 0 | |
|
185 | j = i | |
|
186 | while j < len(blocks) and blocks[j]['type'] == 'option': | |
|
187 | m = _optionre.match(blocks[j]['lines'][0]) | |
|
188 | ||
|
189 | shortoption = m.group(2) | |
|
190 | group3 = m.group(3) | |
|
191 | longoption = group3[2:].strip() | |
|
192 | desc = m.group(6).strip() | |
|
193 | longoptionarg = m.group(5).strip() | |
|
194 | blocks[j]['lines'][0] = desc | |
|
195 | ||
|
196 | noshortop = '' | |
|
197 | if not shortoption: | |
|
198 | noshortop = ' ' | |
|
199 | ||
|
200 | opt = "%s%s" % (shortoption and "-%s " % shortoption or '', | |
|
201 | ("%s--%s %s") % (noshortop, longoption, | |
|
202 | longoptionarg)) | |
|
203 | opt = opt.rstrip() | |
|
204 | blocks[j]['optstr'] = opt | |
|
205 | optstrwidth = max(optstrwidth, encoding.colwidth(opt)) | |
|
206 | j += 1 | |
|
207 | ||
|
208 | for block in blocks[i:j]: | |
|
209 | block['optstrwidth'] = optstrwidth | |
|
210 | i = j + 1 | |
|
211 | return blocks | |
|
212 | ||
|
195 | 213 | def prunecontainers(blocks, keep): |
|
196 | 214 | """Prune unwanted containers. |
|
197 | 215 | |
@@ -297,8 +315,11 b' def prunecomments(blocks):' | |||
|
297 | 315 | i = 0 |
|
298 | 316 | while i < len(blocks): |
|
299 | 317 | b = blocks[i] |
|
300 |
if b['type'] == 'paragraph' and b['lines'][0].startswith('.. ') |
|
|
318 | if b['type'] == 'paragraph' and (b['lines'][0].startswith('.. ') or | |
|
319 | b['lines'] == ['..']): | |
|
301 | 320 | del blocks[i] |
|
321 | if i < len(blocks) and blocks[i]['type'] == 'margin': | |
|
322 | del blocks[i] | |
|
302 | 323 | else: |
|
303 | 324 | i += 1 |
|
304 | 325 | return blocks |
@@ -338,6 +359,17 b' def findadmonitions(blocks):' | |||
|
338 | 359 | 'tip': _('Tip:'), |
|
339 | 360 | 'warning': _('Warning!')} |
|
340 | 361 | |
|
362 | def formatoption(block, width): | |
|
363 | desc = ' '.join(map(str.strip, block['lines'])) | |
|
364 | colwidth = encoding.colwidth(block['optstr']) | |
|
365 | usablewidth = width - 1 | |
|
366 | hanging = block['optstrwidth'] | |
|
367 | initindent = '%s%s ' % (block['optstr'], ' ' * ((hanging - colwidth))) | |
|
368 | hangindent = ' ' * (encoding.colwidth(initindent) + 1) | |
|
369 | return ' %s' % (util.wrap(desc, usablewidth, | |
|
370 | initindent=initindent, | |
|
371 | hangindent=hangindent)) | |
|
372 | ||
|
341 | 373 | def formatblock(block, width): |
|
342 | 374 | """Format a block according to width.""" |
|
343 | 375 | if width <= 0: |
@@ -394,9 +426,7 b' def formatblock(block, width):' | |||
|
394 | 426 | key = key.ljust(_fieldwidth) |
|
395 | 427 | block['lines'][0] = key + block['lines'][0] |
|
396 | 428 | elif block['type'] == 'option': |
|
397 | m = _optionre.match(block['lines'][0]) | |
|
398 | option, arg, rest = m.groups() | |
|
399 | subindent = indent + (len(option) + len(arg)) * ' ' | |
|
429 | return formatoption(block, width) | |
|
400 | 430 | |
|
401 | 431 | text = ' '.join(map(str.strip, block['lines'])) |
|
402 | 432 | return util.wrap(text, width=width, |
@@ -416,8 +446,9 b' def format(text, width, indent=0, keep=N' | |||
|
416 | 446 | blocks = hgrole(blocks) |
|
417 | 447 | blocks = splitparagraphs(blocks) |
|
418 | 448 | blocks = updatefieldlists(blocks) |
|
449 | blocks = updateoptionlists(blocks) | |
|
450 | blocks = addmargins(blocks) | |
|
419 | 451 | blocks = prunecomments(blocks) |
|
420 | blocks = addmargins(blocks) | |
|
421 | 452 | blocks = findadmonitions(blocks) |
|
422 | 453 | text = '\n'.join(formatblock(b, width) for b in blocks) |
|
423 | 454 | if keep is None: |
@@ -443,8 +474,9 b' if __name__ == "__main__":' | |||
|
443 | 474 | blocks = debug(inlineliterals, blocks) |
|
444 | 475 | blocks = debug(splitparagraphs, blocks) |
|
445 | 476 | blocks = debug(updatefieldlists, blocks) |
|
477 | blocks = debug(updateoptionlists, blocks) | |
|
446 | 478 | blocks = debug(findsections, blocks) |
|
479 | blocks = debug(addmargins, blocks) | |
|
447 | 480 | blocks = debug(prunecomments, blocks) |
|
448 | blocks = debug(addmargins, blocks) | |
|
449 | 481 | blocks = debug(findadmonitions, blocks) |
|
450 | 482 | print '\n'.join(formatblock(b, 30) for b in blocks) |
@@ -436,7 +436,14 b' static PyObject *posixfile(PyObject *sel' | |||
|
436 | 436 | } |
|
437 | 437 | else |
|
438 | 438 | flags = _O_TEXT; |
|
439 | if (plus) { | |
|
439 | if (m0 == 'r' && !plus) { | |
|
440 | flags |= _O_RDONLY; | |
|
441 | access = GENERIC_READ; | |
|
442 | } else { | |
|
443 | /* | |
|
444 | work around http://support.microsoft.com/kb/899149 and | |
|
445 | set _O_RDWR for 'w' and 'a', even if mode has no '+' | |
|
446 | */ | |
|
440 | 447 | flags |= _O_RDWR; |
|
441 | 448 | access = GENERIC_READ | GENERIC_WRITE; |
|
442 | 449 | fpmode[fppos++] = '+'; |
@@ -446,25 +453,13 b' static PyObject *posixfile(PyObject *sel' | |||
|
446 | 453 | switch (m0) { |
|
447 | 454 | case 'r': |
|
448 | 455 | creation = OPEN_EXISTING; |
|
449 | if (!plus) { | |
|
450 | flags |= _O_RDONLY; | |
|
451 | access = GENERIC_READ; | |
|
452 | } | |
|
453 | 456 | break; |
|
454 | 457 | case 'w': |
|
455 | 458 | creation = CREATE_ALWAYS; |
|
456 | if (!plus) { | |
|
457 | access = GENERIC_WRITE; | |
|
458 | flags |= _O_WRONLY; | |
|
459 | } | |
|
460 | 459 | break; |
|
461 | 460 | case 'a': |
|
462 | 461 | creation = OPEN_ALWAYS; |
|
463 | 462 | flags |= _O_APPEND; |
|
464 | if (!plus) { | |
|
465 | flags |= _O_WRONLY; | |
|
466 | access = GENERIC_WRITE; | |
|
467 | } | |
|
468 | 463 | break; |
|
469 | 464 | default: |
|
470 | 465 | PyErr_Format(PyExc_ValueError, |
@@ -22,6 +22,7 b' class parser(object):' | |||
|
22 | 22 | self._tokenizer = tokenizer |
|
23 | 23 | self._elements = elements |
|
24 | 24 | self._methods = methods |
|
25 | self.current = None | |
|
25 | 26 | def _advance(self): |
|
26 | 27 | 'advance the tokenizer' |
|
27 | 28 | t = self.current |
@@ -76,7 +77,7 b' class parser(object):' | |||
|
76 | 77 | def parse(self, message): |
|
77 | 78 | 'generate a parse tree from a message' |
|
78 | 79 | self._iter = self._tokenizer(message) |
|
79 | self.current = self._iter.next() | |
|
80 | self._advance() | |
|
80 | 81 | return self._parse() |
|
81 | 82 | def eval(self, tree): |
|
82 | 83 | 'recursively evaluate a parse tree using node methods' |
@@ -244,41 +244,6 b' quit:' | |||
|
244 | 244 | const char nullid[20]; |
|
245 | 245 | const int nullrev = -1; |
|
246 | 246 | |
|
247 | /* create an index tuple, insert into the nodemap */ | |
|
248 | static PyObject * _build_idx_entry(PyObject *nodemap, int n, uint64_t offset_flags, | |
|
249 | int comp_len, int uncomp_len, int base_rev, | |
|
250 | int link_rev, int parent_1, int parent_2, | |
|
251 | const char *c_node_id) | |
|
252 | { | |
|
253 | int err; | |
|
254 | PyObject *entry, *node_id, *n_obj; | |
|
255 | ||
|
256 | node_id = PyBytes_FromStringAndSize(c_node_id, 20); | |
|
257 | n_obj = PyInt_FromLong(n); | |
|
258 | ||
|
259 | if (!node_id || !n_obj) | |
|
260 | err = -1; | |
|
261 | else | |
|
262 | err = PyDict_SetItem(nodemap, node_id, n_obj); | |
|
263 | ||
|
264 | Py_XDECREF(n_obj); | |
|
265 | if (err) | |
|
266 | goto error_dealloc; | |
|
267 | ||
|
268 | entry = Py_BuildValue("LiiiiiiN", offset_flags, comp_len, | |
|
269 | uncomp_len, base_rev, link_rev, | |
|
270 | parent_1, parent_2, node_id); | |
|
271 | if (!entry) | |
|
272 | goto error_dealloc; | |
|
273 | PyObject_GC_UnTrack(entry); /* don't waste time with this */ | |
|
274 | ||
|
275 | return entry; | |
|
276 | ||
|
277 | error_dealloc: | |
|
278 | Py_XDECREF(node_id); | |
|
279 | return NULL; | |
|
280 | } | |
|
281 | ||
|
282 | 247 | /* RevlogNG format (all in big endian, data may be inlined): |
|
283 | 248 | * 6 bytes: offset |
|
284 | 249 | * 2 bytes: flags |
@@ -290,8 +255,8 b' error_dealloc:' | |||
|
290 | 255 | * 4 bytes: parent 2 revision |
|
291 | 256 | * 32 bytes: nodeid (only 20 bytes used) |
|
292 | 257 | */ |
|
293 |
static int _parse_index_ng |
|
|
294 |
|
|
|
258 | static int _parse_index_ng(const char *data, int size, int inlined, | |
|
259 | PyObject *index) | |
|
295 | 260 | { |
|
296 | 261 | PyObject *entry; |
|
297 | 262 | int n = 0, err; |
@@ -321,13 +286,15 b' static int _parse_index_ng (const char *' | |||
|
321 | 286 | parent_2 = ntohl(*((uint32_t *)(decode + 28))); |
|
322 | 287 | c_node_id = decode + 32; |
|
323 | 288 | |
|
324 | entry = _build_idx_entry(nodemap, n, offset_flags, | |
|
325 |
|
|
|
326 | link_rev, parent_1, parent_2, | |
|
327 | c_node_id); | |
|
289 | entry = Py_BuildValue("Liiiiiis#", offset_flags, comp_len, | |
|
290 | uncomp_len, base_rev, link_rev, | |
|
291 | parent_1, parent_2, c_node_id, 20); | |
|
292 | ||
|
328 | 293 | if (!entry) |
|
329 | 294 | return 0; |
|
330 | 295 | |
|
296 | PyObject_GC_UnTrack(entry); /* don't waste time with this */ | |
|
297 | ||
|
331 | 298 | if (inlined) { |
|
332 | 299 | err = PyList_Append(index, entry); |
|
333 | 300 | Py_DECREF(entry); |
@@ -348,12 +315,14 b' static int _parse_index_ng (const char *' | |||
|
348 | 315 | return 0; |
|
349 | 316 | } |
|
350 | 317 | |
|
351 |
/* create the nullid |
|
|
352 | * magic nullid entry in the index at [-1] */ | |
|
353 | entry = _build_idx_entry(nodemap, | |
|
354 | nullrev, 0, 0, 0, -1, -1, -1, -1, nullid); | |
|
318 | /* create the magic nullid entry in the index at [-1] */ | |
|
319 | entry = Py_BuildValue("Liiiiiis#", (uint64_t)0, 0, 0, -1, -1, -1, -1, nullid, 20); | |
|
320 | ||
|
355 | 321 | if (!entry) |
|
356 | 322 | return 0; |
|
323 | ||
|
324 | PyObject_GC_UnTrack(entry); /* don't waste time with this */ | |
|
325 | ||
|
357 | 326 | if (inlined) { |
|
358 | 327 | err = PyList_Append(index, entry); |
|
359 | 328 | Py_DECREF(entry); |
@@ -366,17 +335,16 b' static int _parse_index_ng (const char *' | |||
|
366 | 335 | } |
|
367 | 336 | |
|
368 | 337 | /* This function parses a index file and returns a Python tuple of the |
|
369 |
* following format: (index, |
|
|
338 | * following format: (index, cache) | |
|
370 | 339 | * |
|
371 | 340 | * index: a list of tuples containing the RevlogNG records |
|
372 | * nodemap: a dict mapping node ids to indices in the index list | |
|
373 | 341 | * cache: if data is inlined, a tuple (index_file_content, 0) else None |
|
374 | 342 | */ |
|
375 | static PyObject *parse_index(PyObject *self, PyObject *args) | |
|
343 | static PyObject *parse_index2(PyObject *self, PyObject *args) | |
|
376 | 344 | { |
|
377 | 345 | const char *data; |
|
378 | 346 | int size, inlined; |
|
379 |
PyObject *rval = NULL, *index = NULL, * |
|
|
347 | PyObject *rval = NULL, *index = NULL, *cache = NULL; | |
|
380 | 348 | PyObject *data_obj = NULL, *inlined_obj; |
|
381 | 349 | |
|
382 | 350 | if (!PyArg_ParseTuple(args, "s#O", &data, &size, &inlined_obj)) |
@@ -384,16 +352,12 b' static PyObject *parse_index(PyObject *s' | |||
|
384 | 352 | inlined = inlined_obj && PyObject_IsTrue(inlined_obj); |
|
385 | 353 | |
|
386 | 354 | /* If no data is inlined, we know the size of the index list in |
|
387 |
* advance: size divided by size of |
|
|
388 |
* plus one for |
|
|
355 | * advance: size divided by the size of one revlog record (64 bytes) | |
|
356 | * plus one for nullid */ | |
|
389 | 357 | index = inlined ? PyList_New(0) : PyList_New(size / 64 + 1); |
|
390 | 358 | if (!index) |
|
391 | 359 | goto quit; |
|
392 | 360 | |
|
393 | nodemap = PyDict_New(); | |
|
394 | if (!nodemap) | |
|
395 | goto quit; | |
|
396 | ||
|
397 | 361 | /* set up the cache return value */ |
|
398 | 362 | if (inlined) { |
|
399 | 363 | /* Note that the reference to data_obj is only borrowed */ |
@@ -406,18 +370,17 b' static PyObject *parse_index(PyObject *s' | |||
|
406 | 370 | Py_INCREF(Py_None); |
|
407 | 371 | } |
|
408 | 372 | |
|
409 |
/* actually populate the index |
|
|
410 |
if (!_parse_index_ng |
|
|
373 | /* actually populate the index with data */ | |
|
374 | if (!_parse_index_ng(data, size, inlined, index)) | |
|
411 | 375 | goto quit; |
|
412 | 376 | |
|
413 |
rval = Py_BuildValue("NN |
|
|
377 | rval = Py_BuildValue("NN", index, cache); | |
|
414 | 378 | if (!rval) |
|
415 | 379 | goto quit; |
|
416 | 380 | return rval; |
|
417 | 381 | |
|
418 | 382 | quit: |
|
419 | 383 | Py_XDECREF(index); |
|
420 | Py_XDECREF(nodemap); | |
|
421 | 384 | Py_XDECREF(cache); |
|
422 | 385 | Py_XDECREF(rval); |
|
423 | 386 | return NULL; |
@@ -429,7 +392,7 b' static char parsers_doc[] = "Efficient c' | |||
|
429 | 392 | static PyMethodDef methods[] = { |
|
430 | 393 | {"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"}, |
|
431 | 394 | {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"}, |
|
432 | {"parse_index", parse_index, METH_VARARGS, "parse a revlog index\n"}, | |
|
395 | {"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"}, | |
|
433 | 396 | {NULL, NULL} |
|
434 | 397 | }; |
|
435 | 398 |
@@ -6,7 +6,7 b'' | |||
|
6 | 6 | # This software may be used and distributed according to the terms of the |
|
7 | 7 | # GNU General Public License version 2 or any later version. |
|
8 | 8 | |
|
9 | import cStringIO, email.Parser, os, re | |
|
9 | import cStringIO, email.Parser, os, errno, re | |
|
10 | 10 | import tempfile, zlib |
|
11 | 11 | |
|
12 | 12 | from i18n import _ |
@@ -429,10 +429,16 b' class patchfile(object):' | |||
|
429 | 429 | # Ensure supplied data ends in fname, being a regular file or |
|
430 | 430 | # a symlink. cmdutil.updatedir will -too magically- take care |
|
431 | 431 | # of setting it to the proper type afterwards. |
|
432 | st_mode = None | |
|
432 | 433 | islink = os.path.islink(fname) |
|
433 | 434 | if islink: |
|
434 | 435 | fp = cStringIO.StringIO() |
|
435 | 436 | else: |
|
437 | try: | |
|
438 | st_mode = os.lstat(fname).st_mode & 0777 | |
|
439 | except OSError, e: | |
|
440 | if e.errno != errno.ENOENT: | |
|
441 | raise | |
|
436 | 442 | fp = self.opener(fname, 'w') |
|
437 | 443 | try: |
|
438 | 444 | if self.eolmode == 'auto': |
@@ -451,6 +457,8 b' class patchfile(object):' | |||
|
451 | 457 | fp.writelines(lines) |
|
452 | 458 | if islink: |
|
453 | 459 | self.opener.symlink(fp.getvalue(), fname) |
|
460 | if st_mode is not None: | |
|
461 | os.chmod(fname, st_mode) | |
|
454 | 462 | finally: |
|
455 | 463 | fp.close() |
|
456 | 464 | |
@@ -976,7 +984,7 b' def scangitpatch(lr, firstline):' | |||
|
976 | 984 | fp.seek(pos) |
|
977 | 985 | return gitpatches |
|
978 | 986 | |
|
979 |
def iterhunks(ui, fp |
|
|
987 | def iterhunks(ui, fp): | |
|
980 | 988 | """Read a patch and yield the following events: |
|
981 | 989 | - ("file", afile, bfile, firsthunk): select a new target file. |
|
982 | 990 | - ("hunk", hunk): a new hunk is ready to be applied, follows a |
@@ -997,10 +1005,6 b' def iterhunks(ui, fp, sourcefile=None):' | |||
|
997 | 1005 | BFILE = 1 |
|
998 | 1006 | context = None |
|
999 | 1007 | lr = linereader(fp) |
|
1000 | # gitworkdone is True if a git operation (copy, rename, ...) was | |
|
1001 | # performed already for the current file. Useful when the file | |
|
1002 | # section may have no hunk. | |
|
1003 | gitworkdone = False | |
|
1004 | 1008 | |
|
1005 | 1009 | while True: |
|
1006 | 1010 | newfile = newgitfile = False |
@@ -1012,7 +1016,7 b' def iterhunks(ui, fp, sourcefile=None):' | |||
|
1012 | 1016 | current_hunk.fix_newline() |
|
1013 | 1017 | yield 'hunk', current_hunk |
|
1014 | 1018 | current_hunk = None |
|
1015 |
if ( |
|
|
1019 | if (state == BFILE and ((not context and x[0] == '@') or | |
|
1016 | 1020 | ((context is not False) and x.startswith('***************')))): |
|
1017 | 1021 | if context is None and x.startswith('***************'): |
|
1018 | 1022 | context = True |
@@ -1034,7 +1038,6 b' def iterhunks(ui, fp, sourcefile=None):' | |||
|
1034 | 1038 | elif x.startswith('diff --git'): |
|
1035 | 1039 | # check for git diff, scanning the whole patch file if needed |
|
1036 | 1040 | m = gitre.match(x) |
|
1037 | gitworkdone = False | |
|
1038 | 1041 | if m: |
|
1039 | 1042 | afile, bfile = m.group(1, 2) |
|
1040 | 1043 | if not git: |
@@ -1049,7 +1052,6 b' def iterhunks(ui, fp, sourcefile=None):' | |||
|
1049 | 1052 | if gp and (gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD') |
|
1050 | 1053 | or gp.mode): |
|
1051 | 1054 | afile = bfile |
|
1052 | gitworkdone = True | |
|
1053 | 1055 | newgitfile = True |
|
1054 | 1056 | elif x.startswith('---'): |
|
1055 | 1057 | # check for a unified diff |
@@ -1077,9 +1079,6 b' def iterhunks(ui, fp, sourcefile=None):' | |||
|
1077 | 1079 | afile = parsefilename(x) |
|
1078 | 1080 | bfile = parsefilename(l2) |
|
1079 | 1081 | |
|
1080 | if newfile: | |
|
1081 | gitworkdone = False | |
|
1082 | ||
|
1083 | 1082 | if newgitfile or newfile: |
|
1084 | 1083 | emitfile = True |
|
1085 | 1084 | state = BFILE |
@@ -1091,7 +1090,7 b' def iterhunks(ui, fp, sourcefile=None):' | |||
|
1091 | 1090 | raise PatchError(_("malformed patch %s %s") % (afile, |
|
1092 | 1091 | current_hunk.desc)) |
|
1093 | 1092 | |
|
1094 |
def applydiff(ui, fp, changed, strip=1, |
|
|
1093 | def applydiff(ui, fp, changed, strip=1, eolmode='strict'): | |
|
1095 | 1094 | """Reads a patch from fp and tries to apply it. |
|
1096 | 1095 | |
|
1097 | 1096 | The dict 'changed' is filled in with all of the filenames changed |
@@ -1105,13 +1104,10 b' def applydiff(ui, fp, changed, strip=1, ' | |||
|
1105 | 1104 | Callers probably want to call 'cmdutil.updatedir' after this to |
|
1106 | 1105 | apply certain categories of changes not done by this function. |
|
1107 | 1106 | """ |
|
1108 | return _applydiff( | |
|
1109 | ui, fp, patchfile, copyfile, | |
|
1110 | changed, strip=strip, sourcefile=sourcefile, eolmode=eolmode) | |
|
1107 | return _applydiff(ui, fp, patchfile, copyfile, changed, strip=strip, | |
|
1108 | eolmode=eolmode) | |
|
1111 | 1109 | |
|
1112 | ||
|
1113 | def _applydiff(ui, fp, patcher, copyfn, changed, strip=1, | |
|
1114 | sourcefile=None, eolmode='strict'): | |
|
1110 | def _applydiff(ui, fp, patcher, copyfn, changed, strip=1, eolmode='strict'): | |
|
1115 | 1111 | rejects = 0 |
|
1116 | 1112 | err = 0 |
|
1117 | 1113 | current_file = None |
@@ -1126,7 +1122,7 b' def _applydiff(ui, fp, patcher, copyfn, ' | |||
|
1126 | 1122 | current_file.write_rej() |
|
1127 | 1123 | return len(current_file.rej) |
|
1128 | 1124 | |
|
1129 |
for state, values in iterhunks(ui, fp |
|
|
1125 | for state, values in iterhunks(ui, fp): | |
|
1130 | 1126 | if state == 'hunk': |
|
1131 | 1127 | if not current_file: |
|
1132 | 1128 | continue |
@@ -1139,14 +1135,10 b' def _applydiff(ui, fp, patcher, copyfn, ' | |||
|
1139 | 1135 | rejects += closefile() |
|
1140 | 1136 | afile, bfile, first_hunk = values |
|
1141 | 1137 | try: |
|
1142 | if sourcefile: | |
|
1143 | current_file = patcher(ui, sourcefile, opener, | |
|
1144 | eolmode=eolmode) | |
|
1145 | else: | |
|
1146 | current_file, missing = selectfile(afile, bfile, | |
|
1147 | first_hunk, strip) | |
|
1148 | current_file = patcher(ui, current_file, opener, | |
|
1149 | missing=missing, eolmode=eolmode) | |
|
1138 | current_file, missing = selectfile(afile, bfile, | |
|
1139 | first_hunk, strip) | |
|
1140 | current_file = patcher(ui, current_file, opener, | |
|
1141 | missing=missing, eolmode=eolmode) | |
|
1150 | 1142 | except PatchError, err: |
|
1151 | 1143 | ui.warn(str(err) + '\n') |
|
1152 | 1144 | current_file = None |
@@ -1537,6 +1529,8 b' def trydiff(repo, revs, ctx1, ctx2, modi' | |||
|
1537 | 1529 | yield text |
|
1538 | 1530 | |
|
1539 | 1531 | def diffstatdata(lines): |
|
1532 | diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$') | |
|
1533 | ||
|
1540 | 1534 | filename, adds, removes = None, 0, 0 |
|
1541 | 1535 | for line in lines: |
|
1542 | 1536 | if line.startswith('diff'): |
@@ -1547,9 +1541,9 b' def diffstatdata(lines):' | |||
|
1547 | 1541 | adds, removes = 0, 0 |
|
1548 | 1542 | if line.startswith('diff --git'): |
|
1549 | 1543 | filename = gitre.search(line).group(1) |
|
1550 | else: | |
|
1544 | elif line.startswith('diff -r'): | |
|
1551 | 1545 | # format: "diff -r ... -r ... filename" |
|
1552 |
filename = line. |
|
|
1546 | filename = diffre.search(line).group(1) | |
|
1553 | 1547 | elif line.startswith('+') and not line.startswith('+++'): |
|
1554 | 1548 | adds += 1 |
|
1555 | 1549 | elif line.startswith('-') and not line.startswith('---'): |
@@ -13,6 +13,8 b' posixfile = open' | |||
|
13 | 13 | nulldev = '/dev/null' |
|
14 | 14 | normpath = os.path.normpath |
|
15 | 15 | samestat = os.path.samestat |
|
16 | os_link = os.link | |
|
17 | unlink = os.unlink | |
|
16 | 18 | rename = os.rename |
|
17 | 19 | expandglobs = False |
|
18 | 20 | |
@@ -23,6 +25,10 b' def openhardlinks():' | |||
|
23 | 25 | '''return true if it is safe to hold open file handles to hardlinks''' |
|
24 | 26 | return True |
|
25 | 27 | |
|
28 | def nlinks(name): | |
|
29 | '''return number of hardlinks for the given file''' | |
|
30 | return os.lstat(name).st_nlink | |
|
31 | ||
|
26 | 32 | def rcfiles(path): |
|
27 | 33 | rcs = [os.path.join(path, 'hgrc')] |
|
28 | 34 | rcdir = os.path.join(path, 'hgrc.d') |
@@ -71,20 +77,26 b' def set_flags(f, l, x):' | |||
|
71 | 77 | if l: |
|
72 | 78 | if not stat.S_ISLNK(s): |
|
73 | 79 | # switch file to link |
|
74 |
|
|
|
80 | fp = open(f) | |
|
81 | data = fp.read() | |
|
82 | fp.close() | |
|
75 | 83 | os.unlink(f) |
|
76 | 84 | try: |
|
77 | 85 | os.symlink(data, f) |
|
78 | 86 | except: |
|
79 | 87 | # failed to make a link, rewrite file |
|
80 |
open(f, "w") |
|
|
88 | fp = open(f, "w") | |
|
89 | fp.write(data) | |
|
90 | fp.close() | |
|
81 | 91 | # no chmod needed at this point |
|
82 | 92 | return |
|
83 | 93 | if stat.S_ISLNK(s): |
|
84 | 94 | # switch link to file |
|
85 | 95 | data = os.readlink(f) |
|
86 | 96 | os.unlink(f) |
|
87 |
open(f, "w") |
|
|
97 | fp = open(f, "w") | |
|
98 | fp.write(data) | |
|
99 | fp.close() | |
|
88 | 100 | s = 0666 & ~umask # avoid restatting for chmod |
|
89 | 101 | |
|
90 | 102 | sx = s & 0100 |
@@ -24,7 +24,7 b' def parse_manifest(mfdict, fdict, lines)' | |||
|
24 | 24 | else: |
|
25 | 25 | mfdict[f] = bin(n) |
|
26 | 26 | |
|
27 | def parse_index(data, inline): | |
|
27 | def parse_index2(data, inline): | |
|
28 | 28 | def gettype(q): |
|
29 | 29 | return int(q & 0xFFFF) |
|
30 | 30 | |
@@ -36,16 +36,14 b' def parse_index(data, inline):' | |||
|
36 | 36 | s = struct.calcsize(indexformatng) |
|
37 | 37 | index = [] |
|
38 | 38 | cache = None |
|
39 | nodemap = {nullid: nullrev} | |
|
40 | 39 | n = off = 0 |
|
41 | # if we're not using lazymap, always read the whole index | |
|
40 | ||
|
42 | 41 | l = len(data) - s |
|
43 | 42 | append = index.append |
|
44 | 43 | if inline: |
|
45 | 44 | cache = (0, data) |
|
46 | 45 | while off <= l: |
|
47 | 46 | e = _unpack(indexformatng, data[off:off + s]) |
|
48 | nodemap[e[7]] = n | |
|
49 | 47 | append(e) |
|
50 | 48 | n += 1 |
|
51 | 49 | if e[1] < 0: |
@@ -54,7 +52,6 b' def parse_index(data, inline):' | |||
|
54 | 52 | else: |
|
55 | 53 | while off <= l: |
|
56 | 54 | e = _unpack(indexformatng, data[off:off + s]) |
|
57 | nodemap[e[7]] = n | |
|
58 | 55 | append(e) |
|
59 | 56 | n += 1 |
|
60 | 57 | off += s |
@@ -67,7 +64,7 b' def parse_index(data, inline):' | |||
|
67 | 64 | # add the magic null revision at -1 |
|
68 | 65 | index.append((0, 0, 0, -1, -1, -1, -1, nullid)) |
|
69 | 66 | |
|
70 |
return index, |
|
|
67 | return index, cache | |
|
71 | 68 | |
|
72 | 69 | def parse_dirstate(dmap, copymap, st): |
|
73 | 70 | parents = [st[:20], st[20: 40]] |
@@ -5,13 +5,16 b'' | |||
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | import bookmarks | |
|
9 | ||
|
8 | 10 | def _nslist(repo): |
|
9 | 11 | n = {} |
|
10 | 12 | for k in _namespaces: |
|
11 | 13 | n[k] = "" |
|
12 | 14 | return n |
|
13 | 15 | |
|
14 |
_namespaces = {"namespaces": (lambda *x: False, _nslist) |
|
|
16 | _namespaces = {"namespaces": (lambda *x: False, _nslist), | |
|
17 | "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks)} | |
|
15 | 18 | |
|
16 | 19 | def register(namespace, pushkey, listkeys): |
|
17 | 20 | _namespaces[namespace] = (pushkey, listkeys) |
@@ -6,7 +6,7 b'' | |||
|
6 | 6 | # This software may be used and distributed according to the terms of the |
|
7 | 7 | # GNU General Public License version 2 or any later version. |
|
8 | 8 | |
|
9 | import changegroup | |
|
9 | import changegroup, bookmarks | |
|
10 | 10 | from node import nullrev, short |
|
11 | 11 | from i18n import _ |
|
12 | 12 | import os |
@@ -105,6 +105,13 b' def strip(ui, repo, node, backup="all"):' | |||
|
105 | 105 | saveheads.difference_update(parents) |
|
106 | 106 | saveheads.add(r) |
|
107 | 107 | |
|
108 | bm = repo._bookmarks | |
|
109 | updatebm = [] | |
|
110 | for m in bm: | |
|
111 | rev = repo[bm[m]].rev() | |
|
112 | if rev in tostrip: | |
|
113 | updatebm.append(m) | |
|
114 | ||
|
108 | 115 | saveheads = [cl.node(r) for r in saveheads] |
|
109 | 116 | files = _collectfiles(repo, striprev) |
|
110 | 117 | |
@@ -155,6 +162,11 b' def strip(ui, repo, node, backup="all"):' | |||
|
155 | 162 | f.close() |
|
156 | 163 | if not keeppartialbundle: |
|
157 | 164 | os.unlink(chgrpfile) |
|
165 | ||
|
166 | for m in updatebm: | |
|
167 | bm[m] = repo['.'].node() | |
|
168 | bookmarks.write(repo) | |
|
169 | ||
|
158 | 170 | except: |
|
159 | 171 | if backupfile: |
|
160 | 172 | ui.warn(_("strip failed, full bundle stored in '%s'\n") |
@@ -35,3 +35,6 b' class repository(object):' | |||
|
35 | 35 | |
|
36 | 36 | def cancopy(self): |
|
37 | 37 | return self.local() |
|
38 | ||
|
39 | def close(self): | |
|
40 | pass |
@@ -38,11 +38,9 b' REVIDX_PARENTDELTA = 1' | |||
|
38 | 38 | REVIDX_PUNCHED_FLAG = 2 |
|
39 | 39 | REVIDX_KNOWN_FLAGS = REVIDX_PUNCHED_FLAG | REVIDX_PARENTDELTA |
|
40 | 40 | |
|
41 | # amount of data read unconditionally, should be >= 4 | |
|
42 | # when not inline: threshold for using lazy index | |
|
43 | _prereadsize = 1048576 | |
|
44 | 41 | # max size of revlog with inline data |
|
45 | 42 | _maxinline = 131072 |
|
43 | _chunksize = 1048576 | |
|
46 | 44 | |
|
47 | 45 | RevlogError = error.RevlogError |
|
48 | 46 | LookupError = error.LookupError |
@@ -121,209 +119,6 b' def decompress(bin):' | |||
|
121 | 119 | return bin[1:] |
|
122 | 120 | raise RevlogError(_("unknown compression type %r") % t) |
|
123 | 121 | |
|
124 | class lazyparser(object): | |
|
125 | """ | |
|
126 | this class avoids the need to parse the entirety of large indices | |
|
127 | """ | |
|
128 | ||
|
129 | # lazyparser is not safe to use on windows if win32 extensions not | |
|
130 | # available. it keeps file handle open, which make it not possible | |
|
131 | # to break hardlinks on local cloned repos. | |
|
132 | ||
|
133 | def __init__(self, dataf): | |
|
134 | try: | |
|
135 | size = util.fstat(dataf).st_size | |
|
136 | except AttributeError: | |
|
137 | size = 0 | |
|
138 | self.dataf = dataf | |
|
139 | self.s = struct.calcsize(indexformatng) | |
|
140 | self.datasize = size | |
|
141 | self.l = size // self.s | |
|
142 | self.index = [None] * self.l | |
|
143 | self.map = {nullid: nullrev} | |
|
144 | self.allmap = 0 | |
|
145 | self.all = 0 | |
|
146 | self.mapfind_count = 0 | |
|
147 | ||
|
148 | def loadmap(self): | |
|
149 | """ | |
|
150 | during a commit, we need to make sure the rev being added is | |
|
151 | not a duplicate. This requires loading the entire index, | |
|
152 | which is fairly slow. loadmap can load up just the node map, | |
|
153 | which takes much less time. | |
|
154 | """ | |
|
155 | if self.allmap: | |
|
156 | return | |
|
157 | end = self.datasize | |
|
158 | self.allmap = 1 | |
|
159 | cur = 0 | |
|
160 | count = 0 | |
|
161 | blocksize = self.s * 256 | |
|
162 | self.dataf.seek(0) | |
|
163 | while cur < end: | |
|
164 | data = self.dataf.read(blocksize) | |
|
165 | off = 0 | |
|
166 | for x in xrange(256): | |
|
167 | n = data[off + ngshaoffset:off + ngshaoffset + 20] | |
|
168 | self.map[n] = count | |
|
169 | count += 1 | |
|
170 | if count >= self.l: | |
|
171 | break | |
|
172 | off += self.s | |
|
173 | cur += blocksize | |
|
174 | ||
|
175 | def loadblock(self, blockstart, blocksize, data=None): | |
|
176 | if self.all: | |
|
177 | return | |
|
178 | if data is None: | |
|
179 | self.dataf.seek(blockstart) | |
|
180 | if blockstart + blocksize > self.datasize: | |
|
181 | # the revlog may have grown since we've started running, | |
|
182 | # but we don't have space in self.index for more entries. | |
|
183 | # limit blocksize so that we don't get too much data. | |
|
184 | blocksize = max(self.datasize - blockstart, 0) | |
|
185 | data = self.dataf.read(blocksize) | |
|
186 | lend = len(data) // self.s | |
|
187 | i = blockstart // self.s | |
|
188 | off = 0 | |
|
189 | # lazyindex supports __delitem__ | |
|
190 | if lend > len(self.index) - i: | |
|
191 | lend = len(self.index) - i | |
|
192 | for x in xrange(lend): | |
|
193 | if self.index[i + x] is None: | |
|
194 | b = data[off : off + self.s] | |
|
195 | self.index[i + x] = b | |
|
196 | n = b[ngshaoffset:ngshaoffset + 20] | |
|
197 | self.map[n] = i + x | |
|
198 | off += self.s | |
|
199 | ||
|
200 | def findnode(self, node): | |
|
201 | """search backwards through the index file for a specific node""" | |
|
202 | if self.allmap: | |
|
203 | return None | |
|
204 | ||
|
205 | # hg log will cause many many searches for the manifest | |
|
206 | # nodes. After we get called a few times, just load the whole | |
|
207 | # thing. | |
|
208 | if self.mapfind_count > 8: | |
|
209 | self.loadmap() | |
|
210 | if node in self.map: | |
|
211 | return node | |
|
212 | return None | |
|
213 | self.mapfind_count += 1 | |
|
214 | last = self.l - 1 | |
|
215 | while self.index[last] != None: | |
|
216 | if last == 0: | |
|
217 | self.all = 1 | |
|
218 | self.allmap = 1 | |
|
219 | return None | |
|
220 | last -= 1 | |
|
221 | end = (last + 1) * self.s | |
|
222 | blocksize = self.s * 256 | |
|
223 | while end >= 0: | |
|
224 | start = max(end - blocksize, 0) | |
|
225 | self.dataf.seek(start) | |
|
226 | data = self.dataf.read(end - start) | |
|
227 | findend = end - start | |
|
228 | while True: | |
|
229 | # we're searching backwards, so we have to make sure | |
|
230 | # we don't find a changeset where this node is a parent | |
|
231 | off = data.find(node, 0, findend) | |
|
232 | findend = off | |
|
233 | if off >= 0: | |
|
234 | i = off / self.s | |
|
235 | off = i * self.s | |
|
236 | n = data[off + ngshaoffset:off + ngshaoffset + 20] | |
|
237 | if n == node: | |
|
238 | self.map[n] = i + start / self.s | |
|
239 | return node | |
|
240 | else: | |
|
241 | break | |
|
242 | end -= blocksize | |
|
243 | return None | |
|
244 | ||
|
245 | def loadindex(self, i=None, end=None): | |
|
246 | if self.all: | |
|
247 | return | |
|
248 | all = False | |
|
249 | if i is None: | |
|
250 | blockstart = 0 | |
|
251 | blocksize = (65536 / self.s) * self.s | |
|
252 | end = self.datasize | |
|
253 | all = True | |
|
254 | else: | |
|
255 | if end: | |
|
256 | blockstart = i * self.s | |
|
257 | end = end * self.s | |
|
258 | blocksize = end - blockstart | |
|
259 | else: | |
|
260 | blockstart = (i & ~1023) * self.s | |
|
261 | blocksize = self.s * 1024 | |
|
262 | end = blockstart + blocksize | |
|
263 | while blockstart < end: | |
|
264 | self.loadblock(blockstart, blocksize) | |
|
265 | blockstart += blocksize | |
|
266 | if all: | |
|
267 | self.all = True | |
|
268 | ||
|
269 | class lazyindex(object): | |
|
270 | """a lazy version of the index array""" | |
|
271 | def __init__(self, parser): | |
|
272 | self.p = parser | |
|
273 | def __len__(self): | |
|
274 | return len(self.p.index) | |
|
275 | def load(self, pos): | |
|
276 | if pos < 0: | |
|
277 | pos += len(self.p.index) | |
|
278 | self.p.loadindex(pos) | |
|
279 | return self.p.index[pos] | |
|
280 | def __getitem__(self, pos): | |
|
281 | return _unpack(indexformatng, self.p.index[pos] or self.load(pos)) | |
|
282 | def __setitem__(self, pos, item): | |
|
283 | self.p.index[pos] = _pack(indexformatng, *item) | |
|
284 | def __delitem__(self, pos): | |
|
285 | del self.p.index[pos] | |
|
286 | def insert(self, pos, e): | |
|
287 | self.p.index.insert(pos, _pack(indexformatng, *e)) | |
|
288 | def append(self, e): | |
|
289 | self.p.index.append(_pack(indexformatng, *e)) | |
|
290 | ||
|
291 | class lazymap(object): | |
|
292 | """a lazy version of the node map""" | |
|
293 | def __init__(self, parser): | |
|
294 | self.p = parser | |
|
295 | def load(self, key): | |
|
296 | n = self.p.findnode(key) | |
|
297 | if n is None: | |
|
298 | raise KeyError(key) | |
|
299 | def __contains__(self, key): | |
|
300 | if key in self.p.map: | |
|
301 | return True | |
|
302 | self.p.loadmap() | |
|
303 | return key in self.p.map | |
|
304 | def __iter__(self): | |
|
305 | yield nullid | |
|
306 | for i, ret in enumerate(self.p.index): | |
|
307 | if not ret: | |
|
308 | self.p.loadindex(i) | |
|
309 | ret = self.p.index[i] | |
|
310 | if isinstance(ret, str): | |
|
311 | ret = _unpack(indexformatng, ret) | |
|
312 | yield ret[7] | |
|
313 | def __getitem__(self, key): | |
|
314 | try: | |
|
315 | return self.p.map[key] | |
|
316 | except KeyError: | |
|
317 | try: | |
|
318 | self.load(key) | |
|
319 | return self.p.map[key] | |
|
320 | except KeyError: | |
|
321 | raise KeyError("node " + hex(key)) | |
|
322 | def __setitem__(self, key, val): | |
|
323 | self.p.map[key] = val | |
|
324 | def __delitem__(self, key): | |
|
325 | del self.p.map[key] | |
|
326 | ||
|
327 | 122 | indexformatv0 = ">4l20s20s20s" |
|
328 | 123 | v0shaoffset = 56 |
|
329 | 124 | |
@@ -331,13 +126,11 b' class revlogoldio(object):' | |||
|
331 | 126 | def __init__(self): |
|
332 | 127 | self.size = struct.calcsize(indexformatv0) |
|
333 | 128 | |
|
334 |
def parseindex(self, |
|
|
129 | def parseindex(self, data, inline): | |
|
335 | 130 | s = self.size |
|
336 | 131 | index = [] |
|
337 | 132 | nodemap = {nullid: nullrev} |
|
338 | 133 | n = off = 0 |
|
339 | if len(data) == _prereadsize: | |
|
340 | data += fp.read() # read the rest | |
|
341 | 134 | l = len(data) |
|
342 | 135 | while off + s <= l: |
|
343 | 136 | cur = data[off:off + s] |
@@ -350,6 +143,9 b' class revlogoldio(object):' | |||
|
350 | 143 | nodemap[e[6]] = n |
|
351 | 144 | n += 1 |
|
352 | 145 | |
|
146 | # add the magic null revision at -1 | |
|
147 | index.append((0, 0, 0, -1, -1, -1, -1, nullid)) | |
|
148 | ||
|
353 | 149 | return index, nodemap, None |
|
354 | 150 | |
|
355 | 151 | def packentry(self, entry, node, version, rev): |
@@ -377,24 +173,10 b' class revlogio(object):' | |||
|
377 | 173 | def __init__(self): |
|
378 | 174 | self.size = struct.calcsize(indexformatng) |
|
379 | 175 | |
|
380 |
def parseindex(self, |
|
|
381 | if len(data) == _prereadsize: | |
|
382 | if util.openhardlinks() and not inline: | |
|
383 | # big index, let's parse it on demand | |
|
384 | parser = lazyparser(fp) | |
|
385 | index = lazyindex(parser) | |
|
386 | nodemap = lazymap(parser) | |
|
387 | e = list(index[0]) | |
|
388 | type = gettype(e[0]) | |
|
389 | e[0] = offset_type(0, type) | |
|
390 | index[0] = e | |
|
391 | return index, nodemap, None | |
|
392 | else: | |
|
393 | data += fp.read() | |
|
394 | ||
|
176 | def parseindex(self, data, inline): | |
|
395 | 177 | # call the C implementation to parse the index data |
|
396 |
index |
|
|
397 |
return index, |
|
|
178 | index, cache = parsers.parse_index2(data, inline) | |
|
179 | return index, None, cache | |
|
398 | 180 | |
|
399 | 181 | def packentry(self, entry, node, version, rev): |
|
400 | 182 | p = _pack(indexformatng, *entry) |
@@ -439,10 +221,12 b' class revlog(object):' | |||
|
439 | 221 | self.opener = opener |
|
440 | 222 | self._cache = None |
|
441 | 223 | self._chunkcache = (0, '') |
|
442 | self.nodemap = {nullid: nullrev} | |
|
443 | 224 | self.index = [] |
|
444 | 225 | self._shallowroot = shallowroot |
|
445 | 226 | self._parentdelta = 0 |
|
227 | self._pcache = {} | |
|
228 | self._nodecache = {nullid: nullrev} | |
|
229 | self._nodepos = None | |
|
446 | 230 | |
|
447 | 231 | v = REVLOG_DEFAULT_VERSION |
|
448 | 232 | if hasattr(opener, 'options') and 'defversion' in opener.options: |
@@ -458,10 +242,8 b' class revlog(object):' | |||
|
458 | 242 | i = '' |
|
459 | 243 | try: |
|
460 | 244 | f = self.opener(self.indexfile) |
|
461 | if "nonlazy" in getattr(self.opener, 'options', {}): | |
|
462 |
|
|
|
463 | else: | |
|
464 | i = f.read(_prereadsize) | |
|
245 | i = f.read() | |
|
246 | f.close() | |
|
465 | 247 | if len(i) > 0: |
|
466 | 248 | v = struct.unpack(versionformat, i[:4])[0] |
|
467 | 249 | except IOError, inst: |
@@ -486,37 +268,15 b' class revlog(object):' | |||
|
486 | 268 | self._io = revlogio() |
|
487 | 269 | if self.version == REVLOGV0: |
|
488 | 270 | self._io = revlogoldio() |
|
489 |
|
|
|
490 | try: | |
|
491 | d = self._io.parseindex(f, i, self._inline) | |
|
492 | except (ValueError, IndexError): | |
|
493 | raise RevlogError(_("index %s is corrupted") % (self.indexfile)) | |
|
494 | self.index, self.nodemap, self._chunkcache = d | |
|
495 |
|
|
|
496 |
|
|
|
497 | ||
|
498 | # add the magic null revision at -1 (if it hasn't been done already) | |
|
499 | if (self.index == [] or isinstance(self.index, lazyindex) or | |
|
500 | self.index[-1][7] != nullid) : | |
|
501 | self.index.append((0, 0, 0, -1, -1, -1, -1, nullid)) | |
|
502 | ||
|
503 | def _loadindex(self, start, end): | |
|
504 | """load a block of indexes all at once from the lazy parser""" | |
|
505 | if isinstance(self.index, lazyindex): | |
|
506 | self.index.p.loadindex(start, end) | |
|
507 | ||
|
508 | def _loadindexmap(self): | |
|
509 | """loads both the map and the index from the lazy parser""" | |
|
510 | if isinstance(self.index, lazyindex): | |
|
511 | p = self.index.p | |
|
512 | p.loadindex() | |
|
513 | self.nodemap = p.map | |
|
514 | ||
|
515 | def _loadmap(self): | |
|
516 | """loads the map from the lazy parser""" | |
|
517 | if isinstance(self.nodemap, lazymap): | |
|
518 | self.nodemap.p.loadmap() | |
|
519 | self.nodemap = self.nodemap.p.map | |
|
271 | try: | |
|
272 | d = self._io.parseindex(i, self._inline) | |
|
273 | except (ValueError, IndexError): | |
|
274 | raise RevlogError(_("index %s is corrupted") % (self.indexfile)) | |
|
275 | self.index, nodemap, self._chunkcache = d | |
|
276 | if nodemap is not None: | |
|
277 | self.nodemap = self._nodecache = nodemap | |
|
278 | if not self._chunkcache: | |
|
279 | self._chunkclear() | |
|
520 | 280 | |
|
521 | 281 | def tip(self): |
|
522 | 282 | return self.node(len(self.index) - 2) |
@@ -525,11 +285,29 b' class revlog(object):' | |||
|
525 | 285 | def __iter__(self): |
|
526 | 286 | for i in xrange(len(self)): |
|
527 | 287 | yield i |
|
288 | ||
|
289 | @util.propertycache | |
|
290 | def nodemap(self): | |
|
291 | n = self.rev(self.node(0)) | |
|
292 | return self._nodecache | |
|
293 | ||
|
528 | 294 | def rev(self, node): |
|
529 | 295 | try: |
|
530 |
return self. |
|
|
296 | return self._nodecache[node] | |
|
531 | 297 | except KeyError: |
|
298 | n = self._nodecache | |
|
299 | i = self.index | |
|
300 | p = self._nodepos | |
|
301 | if p is None: | |
|
302 | p = len(i) - 2 | |
|
303 | for r in xrange(p, -1, -1): | |
|
304 | v = i[r][7] | |
|
305 | n[v] = r | |
|
306 | if v == node: | |
|
307 | self._nodepos = r - 1 | |
|
308 | return r | |
|
532 | 309 | raise LookupError(node, self.indexfile, _('no node')) |
|
310 | ||
|
533 | 311 | def node(self, rev): |
|
534 | 312 | return self.index[rev][7] |
|
535 | 313 | def linkrev(self, rev): |
@@ -937,15 +715,19 b' class revlog(object):' | |||
|
937 | 715 | pass |
|
938 | 716 | |
|
939 | 717 | def _partialmatch(self, id): |
|
718 | if id in self._pcache: | |
|
719 | return self._pcache[id] | |
|
720 | ||
|
940 | 721 | if len(id) < 40: |
|
941 | 722 | try: |
|
942 | 723 | # hex(node)[:...] |
|
943 | 724 | l = len(id) // 2 # grab an even number of digits |
|
944 |
|
|
|
945 |
nl = [ |
|
|
725 | prefix = bin(id[:l * 2]) | |
|
726 | nl = [e[7] for e in self.index if e[7].startswith(prefix)] | |
|
946 | 727 | nl = [n for n in nl if hex(n).startswith(id)] |
|
947 | 728 | if len(nl) > 0: |
|
948 | 729 | if len(nl) == 1: |
|
730 | self._pcache[id] = nl[0] | |
|
949 | 731 | return nl[0] |
|
950 | 732 | raise LookupError(id, self.indexfile, |
|
951 | 733 | _('ambiguous identifier')) |
@@ -978,7 +760,7 b' class revlog(object):' | |||
|
978 | 760 | def _addchunk(self, offset, data): |
|
979 | 761 | o, d = self._chunkcache |
|
980 | 762 | # try to add to existing cache |
|
981 |
if o + len(d) == offset and len(d) + len(data) < _ |
|
|
763 | if o + len(d) == offset and len(d) + len(data) < _chunksize: | |
|
982 | 764 | self._chunkcache = o, d + data |
|
983 | 765 | else: |
|
984 | 766 | self._chunkcache = offset, data |
@@ -1060,7 +842,6 b' class revlog(object):' | |||
|
1060 | 842 | (self.flags(rev) & ~REVIDX_KNOWN_FLAGS)) |
|
1061 | 843 | |
|
1062 | 844 | # build delta chain |
|
1063 | self._loadindex(base, rev + 1) | |
|
1064 | 845 | chain = [] |
|
1065 | 846 | index = self.index # for performance |
|
1066 | 847 | iterrev = rev |
@@ -1088,13 +869,18 b' class revlog(object):' | |||
|
1088 | 869 | |
|
1089 | 870 | bins = [self._chunk(r) for r in chain] |
|
1090 | 871 | text = mdiff.patches(text, bins) |
|
872 | ||
|
873 | text = self._checkhash(text, node, rev) | |
|
874 | ||
|
875 | self._cache = (node, rev, text) | |
|
876 | return text | |
|
877 | ||
|
878 | def _checkhash(self, text, node, rev): | |
|
1091 | 879 | p1, p2 = self.parents(node) |
|
1092 | 880 | if (node != hash(text, p1, p2) and |
|
1093 | 881 | not (self.flags(rev) & REVIDX_PUNCHED_FLAG)): |
|
1094 | 882 | raise RevlogError(_("integrity check failed on %s:%d") |
|
1095 | 883 | % (self.indexfile, rev)) |
|
1096 | ||
|
1097 | self._cache = (node, rev, text) | |
|
1098 | 884 | return text |
|
1099 | 885 | |
|
1100 | 886 | def checkinlinesize(self, tr, fp=None): |
@@ -1382,6 +1168,7 b' class revlog(object):' | |||
|
1382 | 1168 | if not dfh and not self._inline: |
|
1383 | 1169 | # addrevision switched from inline to conventional |
|
1384 | 1170 | # reopen the index |
|
1171 | ifh.close() | |
|
1385 | 1172 | dfh = self.opener(self.datafile, "a") |
|
1386 | 1173 | ifh = self.opener(self.indexfile, "a") |
|
1387 | 1174 | finally: |
@@ -1408,9 +1195,6 b' class revlog(object):' | |||
|
1408 | 1195 | if len(self) == 0: |
|
1409 | 1196 | return |
|
1410 | 1197 | |
|
1411 | if isinstance(self.index, lazyindex): | |
|
1412 | self._loadindexmap() | |
|
1413 | ||
|
1414 | 1198 | for rev in self: |
|
1415 | 1199 | if self.index[rev][4] >= minlink: |
|
1416 | 1200 | break |
@@ -1444,6 +1228,7 b' class revlog(object):' | |||
|
1444 | 1228 | f = self.opener(self.datafile) |
|
1445 | 1229 | f.seek(0, 2) |
|
1446 | 1230 | actual = f.tell() |
|
1231 | f.close() | |
|
1447 | 1232 | dd = actual - expected |
|
1448 | 1233 | except IOError, inst: |
|
1449 | 1234 | if inst.errno != errno.ENOENT: |
@@ -1454,6 +1239,7 b' class revlog(object):' | |||
|
1454 | 1239 | f = self.opener(self.indexfile) |
|
1455 | 1240 | f.seek(0, 2) |
|
1456 | 1241 | actual = f.tell() |
|
1242 | f.close() | |
|
1457 | 1243 | s = self._io.size |
|
1458 | 1244 | i = max(0, actual // s) |
|
1459 | 1245 | di = actual - (i * s) |
@@ -7,6 +7,7 b'' | |||
|
7 | 7 | |
|
8 | 8 | import re |
|
9 | 9 | import parser, util, error, discovery |
|
10 | import bookmarks as bookmarksmod | |
|
10 | 11 | import match as matchmod |
|
11 | 12 | from i18n import _, gettext |
|
12 | 13 | |
@@ -202,9 +203,13 b' def rev(repo, subset, x):' | |||
|
202 | 203 | return [r for r in subset if r == l] |
|
203 | 204 | |
|
204 | 205 | def p1(repo, subset, x): |
|
205 | """``p1(set)`` | |
|
206 | First parent of changesets in set. | |
|
206 | """``p1([set])`` | |
|
207 | First parent of changesets in set, or the working directory. | |
|
207 | 208 | """ |
|
209 | if x is None: | |
|
210 | p = repo[x].parents()[0].rev() | |
|
211 | return [r for r in subset if r == p] | |
|
212 | ||
|
208 | 213 | ps = set() |
|
209 | 214 | cl = repo.changelog |
|
210 | 215 | for r in getset(repo, range(len(repo)), x): |
@@ -212,9 +217,17 b' def p1(repo, subset, x):' | |||
|
212 | 217 | return [r for r in subset if r in ps] |
|
213 | 218 | |
|
214 | 219 | def p2(repo, subset, x): |
|
215 | """``p2(set)`` | |
|
216 | Second parent of changesets in set. | |
|
220 | """``p2([set])`` | |
|
221 | Second parent of changesets in set, or the working directory. | |
|
217 | 222 | """ |
|
223 | if x is None: | |
|
224 | ps = repo[x].parents() | |
|
225 | try: | |
|
226 | p = ps[1].rev() | |
|
227 | return [r for r in subset if r == p] | |
|
228 | except IndexError: | |
|
229 | return [] | |
|
230 | ||
|
218 | 231 | ps = set() |
|
219 | 232 | cl = repo.changelog |
|
220 | 233 | for r in getset(repo, range(len(repo)), x): |
@@ -222,9 +235,13 b' def p2(repo, subset, x):' | |||
|
222 | 235 | return [r for r in subset if r in ps] |
|
223 | 236 | |
|
224 | 237 | def parents(repo, subset, x): |
|
225 | """``parents(set)`` | |
|
226 | The set of all parents for all changesets in set. | |
|
238 | """``parents([set])`` | |
|
239 | The set of all parents for all changesets in set, or the working directory. | |
|
227 | 240 | """ |
|
241 | if x is None: | |
|
242 | ps = tuple(p.rev() for p in repo[x].parents()) | |
|
243 | return [r for r in subset if r in ps] | |
|
244 | ||
|
228 | 245 | ps = set() |
|
229 | 246 | cl = repo.changelog |
|
230 | 247 | for r in getset(repo, range(len(repo)), x): |
@@ -648,12 +665,31 b' def tag(repo, subset, x):' | |||
|
648 | 665 | def tagged(repo, subset, x): |
|
649 | 666 | return tag(repo, subset, x) |
|
650 | 667 | |
|
668 | def bookmark(repo, subset, x): | |
|
669 | """``bookmark([name])`` | |
|
670 | The named bookmark or all bookmarks. | |
|
671 | """ | |
|
672 | # i18n: "bookmark" is a keyword | |
|
673 | args = getargs(x, 0, 1, _('bookmark takes one or no arguments')) | |
|
674 | if args: | |
|
675 | bm = getstring(args[0], | |
|
676 | # i18n: "bookmark" is a keyword | |
|
677 | _('the argument to bookmark must be a string')) | |
|
678 | bmrev = bookmarksmod.listbookmarks(repo).get(bm, None) | |
|
679 | if bmrev: | |
|
680 | bmrev = repo[bmrev].rev() | |
|
681 | return [r for r in subset if r == bmrev] | |
|
682 | bms = set([repo[r].rev() | |
|
683 | for r in bookmarksmod.listbookmarks(repo).values()]) | |
|
684 | return [r for r in subset if r in bms] | |
|
685 | ||
|
651 | 686 | symbols = { |
|
652 | 687 | "adds": adds, |
|
653 | 688 | "all": getall, |
|
654 | 689 | "ancestor": ancestor, |
|
655 | 690 | "ancestors": ancestors, |
|
656 | 691 | "author": author, |
|
692 | "bookmark": bookmark, | |
|
657 | 693 | "branch": branch, |
|
658 | 694 | "children": children, |
|
659 | 695 | "closed": closed, |
@@ -699,7 +735,7 b' methods = {' | |||
|
699 | 735 | } |
|
700 | 736 | |
|
701 | 737 | def optimize(x, small): |
|
702 |
if x |
|
|
738 | if x is None: | |
|
703 | 739 | return 0, x |
|
704 | 740 | |
|
705 | 741 | smallbonus = 1 |
@@ -91,10 +91,11 b' class sshrepository(wireproto.wirereposi' | |||
|
91 | 91 | size = util.fstat(self.pipee).st_size |
|
92 | 92 | if size == 0: |
|
93 | 93 | break |
|
94 |
|
|
|
95 |
if not |
|
|
94 | s = self.pipee.read(size) | |
|
95 | if not s: | |
|
96 | 96 | break |
|
97 | self.ui.status(_("remote: "), l) | |
|
97 | for l in s.splitlines(): | |
|
98 | self.ui.status(_("remote: "), l, '\n') | |
|
98 | 99 | |
|
99 | 100 | def _abort(self, exception): |
|
100 | 101 | self.cleanup() |
@@ -77,7 +77,6 b' def build_opener(ui, authinfo):' | |||
|
77 | 77 | return httprangereader(f, urlopener) |
|
78 | 78 | return o |
|
79 | 79 | |
|
80 | opener.options = {'nonlazy': 1} | |
|
81 | 80 | return opener |
|
82 | 81 | |
|
83 | 82 | class statichttprepository(localrepo.localrepository): |
@@ -99,7 +98,9 b' class statichttprepository(localrepo.loc' | |||
|
99 | 98 | raise |
|
100 | 99 | # check if it is a non-empty old-style repository |
|
101 | 100 | try: |
|
102 |
self.opener("00changelog.i") |
|
|
101 | fp = self.opener("00changelog.i") | |
|
102 | fp.read(1) | |
|
103 | fp.close() | |
|
103 | 104 | except IOError, inst: |
|
104 | 105 | if inst.errno != errno.ENOENT: |
|
105 | 106 | raise |
@@ -114,9 +115,7 b' class statichttprepository(localrepo.loc' | |||
|
114 | 115 | raise error.RepoError(_("requirement '%s' not supported") % r) |
|
115 | 116 | |
|
116 | 117 | # setup store |
|
117 | def pjoin(a, b): | |
|
118 | return a + '/' + b | |
|
119 | self.store = store.store(requirements, self.path, opener, pjoin) | |
|
118 | self.store = store.store(requirements, self.path, opener) | |
|
120 | 119 | self.spath = self.store.path |
|
121 | 120 | self.sopener = self.store.opener |
|
122 | 121 | self.sjoin = self.store.join |
@@ -169,8 +169,7 b' def _calcmode(path):' | |||
|
169 | 169 | |
|
170 | 170 | class basicstore(object): |
|
171 | 171 | '''base class for local repository stores''' |
|
172 |
def __init__(self, path, opener |
|
|
173 | self.pathjoiner = pathjoiner | |
|
172 | def __init__(self, path, opener): | |
|
174 | 173 | self.path = path |
|
175 | 174 | self.createmode = _calcmode(path) |
|
176 | 175 | op = opener(self.path) |
@@ -178,19 +177,21 b' class basicstore(object):' | |||
|
178 | 177 | self.opener = lambda f, *args, **kw: op(encodedir(f), *args, **kw) |
|
179 | 178 | |
|
180 | 179 | def join(self, f): |
|
181 |
return self. |
|
|
180 | return self.path + '/' + encodedir(f) | |
|
182 | 181 | |
|
183 | 182 | def _walk(self, relpath, recurse): |
|
184 | 183 | '''yields (unencoded, encoded, size)''' |
|
185 |
path = self. |
|
|
186 | striplen = len(self.path) + len(os.sep) | |
|
184 | path = self.path | |
|
185 | if relpath: | |
|
186 | path += '/' + relpath | |
|
187 | striplen = len(self.path) + 1 | |
|
187 | 188 | l = [] |
|
188 | 189 | if os.path.isdir(path): |
|
189 | 190 | visit = [path] |
|
190 | 191 | while visit: |
|
191 | 192 | p = visit.pop() |
|
192 | 193 | for f, kind, st in osutil.listdir(p, stat=True): |
|
193 |
fp = |
|
|
194 | fp = p + '/' + f | |
|
194 | 195 | if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'): |
|
195 | 196 | n = util.pconvert(fp[striplen:]) |
|
196 | 197 | l.append((decodedir(n), n, st.st_size)) |
@@ -213,10 +214,12 b' class basicstore(object):' | |||
|
213 | 214 | def copylist(self): |
|
214 | 215 | return ['requires'] + _data.split() |
|
215 | 216 | |
|
217 | def write(self): | |
|
218 | pass | |
|
219 | ||
|
216 | 220 | class encodedstore(basicstore): |
|
217 |
def __init__(self, path, opener |
|
|
218 |
self.path |
|
|
219 | self.path = self.pathjoiner(path, 'store') | |
|
221 | def __init__(self, path, opener): | |
|
222 | self.path = path + '/store' | |
|
220 | 223 | self.createmode = _calcmode(self.path) |
|
221 | 224 | op = opener(self.path) |
|
222 | 225 | op.createmode = self.createmode |
@@ -231,11 +234,11 b' class encodedstore(basicstore):' | |||
|
231 | 234 | yield a, b, size |
|
232 | 235 | |
|
233 | 236 | def join(self, f): |
|
234 |
return self. |
|
|
237 | return self.path + '/' + encodefilename(f) | |
|
235 | 238 | |
|
236 | 239 | def copylist(self): |
|
237 | 240 | return (['requires', '00changelog.i'] + |
|
238 |
[ |
|
|
241 | ['store/' + f for f in _data.split()]) | |
|
239 | 242 | |
|
240 | 243 | class fncache(object): |
|
241 | 244 | # the filename used to be partially encoded |
@@ -243,10 +246,12 b' class fncache(object):' | |||
|
243 | 246 | def __init__(self, opener): |
|
244 | 247 | self.opener = opener |
|
245 | 248 | self.entries = None |
|
249 | self._dirty = False | |
|
246 | 250 | |
|
247 | 251 | def _load(self): |
|
248 | 252 | '''fill the entries from the fncache file''' |
|
249 | 253 | self.entries = set() |
|
254 | self._dirty = False | |
|
250 | 255 | try: |
|
251 | 256 | fp = self.opener('fncache', mode='rb') |
|
252 | 257 | except IOError: |
@@ -265,12 +270,22 b' class fncache(object):' | |||
|
265 | 270 | fp.write(encodedir(p) + '\n') |
|
266 | 271 | fp.close() |
|
267 | 272 | self.entries = set(files) |
|
273 | self._dirty = False | |
|
274 | ||
|
275 | def write(self): | |
|
276 | if not self._dirty: | |
|
277 | return | |
|
278 | fp = self.opener('fncache', mode='wb', atomictemp=True) | |
|
279 | for p in self.entries: | |
|
280 | fp.write(encodedir(p) + '\n') | |
|
281 | fp.rename() | |
|
282 | self._dirty = False | |
|
268 | 283 | |
|
269 | 284 | def add(self, fn): |
|
270 | 285 | if self.entries is None: |
|
271 | 286 | self._load() |
|
272 | 287 | if fn not in self.entries: |
|
273 | self.opener('fncache', 'ab').write(encodedir(fn) + '\n') | |
|
288 | self._dirty = True | |
|
274 | 289 | self.entries.add(fn) |
|
275 | 290 | |
|
276 | 291 | def __contains__(self, fn): |
@@ -284,10 +299,9 b' class fncache(object):' | |||
|
284 | 299 | return iter(self.entries) |
|
285 | 300 | |
|
286 | 301 | class fncachestore(basicstore): |
|
287 |
def __init__(self, path, opener, |
|
|
302 | def __init__(self, path, opener, encode): | |
|
288 | 303 | self.encode = encode |
|
289 |
self.path |
|
|
290 | self.path = self.pathjoiner(path, 'store') | |
|
304 | self.path = path + '/store' | |
|
291 | 305 | self.createmode = _calcmode(self.path) |
|
292 | 306 | op = opener(self.path) |
|
293 | 307 | op.createmode = self.createmode |
@@ -301,17 +315,16 b' class fncachestore(basicstore):' | |||
|
301 | 315 | self.opener = fncacheopener |
|
302 | 316 | |
|
303 | 317 | def join(self, f): |
|
304 |
return self. |
|
|
318 | return self.path + '/' + self.encode(f) | |
|
305 | 319 | |
|
306 | 320 | def datafiles(self): |
|
307 | 321 | rewrite = False |
|
308 | 322 | existing = [] |
|
309 | pjoin = self.pathjoiner | |
|
310 | 323 | spath = self.path |
|
311 | 324 | for f in self.fncache: |
|
312 | 325 | ef = self.encode(f) |
|
313 | 326 | try: |
|
314 |
st = os.stat( |
|
|
327 | st = os.stat(spath + '/' + ef) | |
|
315 | 328 | yield f, ef, st.st_size |
|
316 | 329 | existing.append(f) |
|
317 | 330 | except OSError: |
@@ -326,14 +339,16 b' class fncachestore(basicstore):' | |||
|
326 | 339 | d = ('data dh fncache' |
|
327 | 340 | ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i') |
|
328 | 341 | return (['requires', '00changelog.i'] + |
|
329 |
[ |
|
|
342 | ['store/' + f for f in d.split()]) | |
|
330 | 343 | |
|
331 | def store(requirements, path, opener, pathjoiner=None): | |
|
332 | pathjoiner = pathjoiner or os.path.join | |
|
344 | def write(self): | |
|
345 | self.fncache.write() | |
|
346 | ||
|
347 | def store(requirements, path, opener): | |
|
333 | 348 | if 'store' in requirements: |
|
334 | 349 | if 'fncache' in requirements: |
|
335 | 350 | auxencode = lambda f: _auxencode(f, 'dotencode' in requirements) |
|
336 | 351 | encode = lambda f: _hybridencode(f, auxencode) |
|
337 |
return fncachestore(path, opener, |
|
|
338 |
return encodedstore(path, opener |
|
|
339 |
return basicstore(path, opener |
|
|
352 | return fncachestore(path, opener, encode) | |
|
353 | return encodedstore(path, opener) | |
|
354 | return basicstore(path, opener) |
@@ -6,7 +6,7 b'' | |||
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | import errno, os, re, xml.dom.minidom, shutil, urlparse, posixpath |
|
9 | import stat, subprocess | |
|
9 | import stat, subprocess, tarfile | |
|
10 | 10 | from i18n import _ |
|
11 | 11 | import config, util, node, error, cmdutil |
|
12 | 12 | hg = None |
@@ -163,6 +163,17 b' def submerge(repo, wctx, mctx, actx, ove' | |||
|
163 | 163 | # record merged .hgsubstate |
|
164 | 164 | writestate(repo, sm) |
|
165 | 165 | |
|
166 | def _updateprompt(ui, sub, dirty, local, remote): | |
|
167 | if dirty: | |
|
168 | msg = (_(' subrepository sources for %s differ\n' | |
|
169 | 'use (l)ocal source (%s) or (r)emote source (%s)?\n') | |
|
170 | % (subrelpath(sub), local, remote)) | |
|
171 | else: | |
|
172 | msg = (_(' subrepository sources for %s differ (in checked out version)\n' | |
|
173 | 'use (l)ocal source (%s) or (r)emote source (%s)?\n') | |
|
174 | % (subrelpath(sub), local, remote)) | |
|
175 | return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0) | |
|
176 | ||
|
166 | 177 | def reporelpath(repo): |
|
167 | 178 | """return path to this (sub)repo as seen from outermost repo""" |
|
168 | 179 | parent = repo |
@@ -172,6 +183,8 b' def reporelpath(repo):' | |||
|
172 | 183 | |
|
173 | 184 | def subrelpath(sub): |
|
174 | 185 | """return path to this subrepo as seen from outermost repo""" |
|
186 | if hasattr(sub, '_relpath'): | |
|
187 | return sub._relpath | |
|
175 | 188 | if not hasattr(sub, '_repo'): |
|
176 | 189 | return sub._path |
|
177 | 190 | return reporelpath(sub._repo) |
@@ -236,9 +249,10 b' def subrepo(ctx, path):' | |||
|
236 | 249 | |
|
237 | 250 | class abstractsubrepo(object): |
|
238 | 251 | |
|
239 | def dirty(self): | |
|
240 |
"""returns true if the dirstate of the subrepo does not |
|
|
241 | current stored state | |
|
252 | def dirty(self, ignoreupdate=False): | |
|
253 | """returns true if the dirstate of the subrepo is dirty or does not | |
|
254 | match current stored state. If ignoreupdate is true, only check | |
|
255 | whether the subrepo has uncommitted changes in its dirstate. | |
|
242 | 256 | """ |
|
243 | 257 | raise NotImplementedError |
|
244 | 258 | |
@@ -266,7 +280,7 b' class abstractsubrepo(object):' | |||
|
266 | 280 | """ |
|
267 | 281 | raise NotImplementedError |
|
268 | 282 | |
|
269 |
def merge(self, state |
|
|
283 | def merge(self, state): | |
|
270 | 284 | """merge currently-saved state with the new state.""" |
|
271 | 285 | raise NotImplementedError |
|
272 | 286 | |
@@ -304,13 +318,21 b' class abstractsubrepo(object):' | |||
|
304 | 318 | """return file flags""" |
|
305 | 319 | return '' |
|
306 | 320 | |
|
307 | def archive(self, archiver, prefix): | |
|
308 |
f |
|
|
321 | def archive(self, ui, archiver, prefix): | |
|
322 | files = self.files() | |
|
323 | total = len(files) | |
|
324 | relpath = subrelpath(self) | |
|
325 | ui.progress(_('archiving (%s)') % relpath, 0, | |
|
326 | unit=_('files'), total=total) | |
|
327 | for i, name in enumerate(files): | |
|
309 | 328 | flags = self.fileflags(name) |
|
310 | 329 | mode = 'x' in flags and 0755 or 0644 |
|
311 | 330 | symlink = 'l' in flags |
|
312 | 331 | archiver.addfile(os.path.join(prefix, self._path, name), |
|
313 | 332 | mode, symlink, self.filedata(name)) |
|
333 | ui.progress(_('archiving (%s)') % relpath, i + 1, | |
|
334 | unit=_('files'), total=total) | |
|
335 | ui.progress(_('archiving (%s)') % relpath, None) | |
|
314 | 336 | |
|
315 | 337 | |
|
316 | 338 | class hgsubrepo(abstractsubrepo): |
@@ -373,21 +395,22 b' class hgsubrepo(abstractsubrepo):' | |||
|
373 | 395 | self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n') |
|
374 | 396 | % (inst, subrelpath(self))) |
|
375 | 397 | |
|
376 | def archive(self, archiver, prefix): | |
|
377 | abstractsubrepo.archive(self, archiver, prefix) | |
|
398 | def archive(self, ui, archiver, prefix): | |
|
399 | abstractsubrepo.archive(self, ui, archiver, prefix) | |
|
378 | 400 | |
|
379 | 401 | rev = self._state[1] |
|
380 | 402 | ctx = self._repo[rev] |
|
381 | 403 | for subpath in ctx.substate: |
|
382 | 404 | s = subrepo(ctx, subpath) |
|
383 | s.archive(archiver, os.path.join(prefix, self._path)) | |
|
405 | s.archive(ui, archiver, os.path.join(prefix, self._path)) | |
|
384 | 406 | |
|
385 | def dirty(self): | |
|
407 | def dirty(self, ignoreupdate=False): | |
|
386 | 408 | r = self._state[1] |
|
387 | if r == '': | |
|
409 | if r == '' and not ignoreupdate: # no state recorded | |
|
388 | 410 | return True |
|
389 | 411 | w = self._repo[None] |
|
390 |
if w.p1() != self._repo[r]: |
|
|
412 | if w.p1() != self._repo[r] and not ignoreupdate: | |
|
413 | # different version checked out | |
|
391 | 414 | return True |
|
392 | 415 | return w.dirty() # working directory changed |
|
393 | 416 | |
@@ -430,14 +453,26 b' class hgsubrepo(abstractsubrepo):' | |||
|
430 | 453 | cur = self._repo['.'] |
|
431 | 454 | dst = self._repo[state[1]] |
|
432 | 455 | anc = dst.ancestor(cur) |
|
433 | if anc == cur: | |
|
434 | self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self)) | |
|
435 | hg.update(self._repo, state[1]) | |
|
436 | elif anc == dst: | |
|
437 | self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self)) | |
|
456 | ||
|
457 | def mergefunc(): | |
|
458 | if anc == cur: | |
|
459 | self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self)) | |
|
460 | hg.update(self._repo, state[1]) | |
|
461 | elif anc == dst: | |
|
462 | self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self)) | |
|
463 | else: | |
|
464 | self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self)) | |
|
465 | hg.merge(self._repo, state[1], remind=False) | |
|
466 | ||
|
467 | wctx = self._repo[None] | |
|
468 | if self.dirty(): | |
|
469 | if anc != dst: | |
|
470 | if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst): | |
|
471 | mergefunc() | |
|
472 | else: | |
|
473 | mergefunc() | |
|
438 | 474 | else: |
|
439 | self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self)) | |
|
440 | hg.merge(self._repo, state[1], remind=False) | |
|
475 | mergefunc() | |
|
441 | 476 | |
|
442 | 477 | def push(self, force): |
|
443 | 478 | # push subrepos depth-first for coherent ordering |
@@ -484,13 +519,10 b' class svnsubrepo(abstractsubrepo):' | |||
|
484 | 519 | def _svncommand(self, commands, filename=''): |
|
485 | 520 | path = os.path.join(self._ctx._repo.origroot, self._path, filename) |
|
486 | 521 | cmd = ['svn'] + commands + [path] |
|
487 | cmd = [util.shellquote(arg) for arg in cmd] | |
|
488 | cmd = util.quotecommand(' '.join(cmd)) | |
|
489 | 522 | env = dict(os.environ) |
|
490 | 523 | # Avoid localized output, preserve current locale for everything else. |
|
491 | 524 | env['LC_MESSAGES'] = 'C' |
|
492 |
p = subprocess.Popen(cmd, |
|
|
493 | close_fds=util.closefds, | |
|
525 | p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds, | |
|
494 | 526 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, |
|
495 | 527 | universal_newlines=True, env=env) |
|
496 | 528 | stdout, stderr = p.communicate() |
@@ -543,9 +575,10 b' class svnsubrepo(abstractsubrepo):' | |||
|
543 | 575 | return True, True |
|
544 | 576 | return bool(changes), False |
|
545 | 577 | |
|
546 | def dirty(self): | |
|
547 |
if |
|
|
548 | return False | |
|
578 | def dirty(self, ignoreupdate=False): | |
|
579 | if not self._wcchanged()[0]: | |
|
580 | if self._state[1] in self._wcrevs() or ignoreupdate: | |
|
581 | return False | |
|
549 | 582 | return True |
|
550 | 583 | |
|
551 | 584 | def commit(self, text, user, date): |
@@ -598,10 +631,12 b' class svnsubrepo(abstractsubrepo):' | |||
|
598 | 631 | self._ui.status(status) |
|
599 | 632 | |
|
600 | 633 | def merge(self, state): |
|
601 |
old = |
|
|
602 |
new = |
|
|
603 |
if new |
|
|
604 | self.get(state) | |
|
634 | old = self._state[1] | |
|
635 | new = state[1] | |
|
636 | if new != self._wcrev(): | |
|
637 | dirty = old == self._wcrev() or self._wcchanged()[0] | |
|
638 | if _updateprompt(self._ui, self, dirty, self._wcrev(), new): | |
|
639 | self.get(state, False) | |
|
605 | 640 | |
|
606 | 641 | def push(self, force): |
|
607 | 642 | # push is a no-op for SVN |
@@ -616,7 +651,347 b' class svnsubrepo(abstractsubrepo):' | |||
|
616 | 651 | return self._svncommand(['cat'], name) |
|
617 | 652 | |
|
618 | 653 | |
|
654 | class gitsubrepo(abstractsubrepo): | |
|
655 | def __init__(self, ctx, path, state): | |
|
656 | # TODO add git version check. | |
|
657 | self._state = state | |
|
658 | self._ctx = ctx | |
|
659 | self._path = path | |
|
660 | self._relpath = os.path.join(reporelpath(ctx._repo), path) | |
|
661 | self._abspath = ctx._repo.wjoin(path) | |
|
662 | self._ui = ctx._repo.ui | |
|
663 | ||
|
664 | def _gitcommand(self, commands, env=None, stream=False): | |
|
665 | return self._gitdir(commands, env=env, stream=stream)[0] | |
|
666 | ||
|
667 | def _gitdir(self, commands, env=None, stream=False): | |
|
668 | return self._gitnodir(commands, env=env, stream=stream, | |
|
669 | cwd=self._abspath) | |
|
670 | ||
|
671 | def _gitnodir(self, commands, env=None, stream=False, cwd=None): | |
|
672 | """Calls the git command | |
|
673 | ||
|
674 | The methods tries to call the git command. versions previor to 1.6.0 | |
|
675 | are not supported and very probably fail. | |
|
676 | """ | |
|
677 | self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands))) | |
|
678 | # unless ui.quiet is set, print git's stderr, | |
|
679 | # which is mostly progress and useful info | |
|
680 | errpipe = None | |
|
681 | if self._ui.quiet: | |
|
682 | errpipe = open(os.devnull, 'w') | |
|
683 | p = subprocess.Popen(['git'] + commands, bufsize=-1, cwd=cwd, env=env, | |
|
684 | close_fds=util.closefds, | |
|
685 | stdout=subprocess.PIPE, stderr=errpipe) | |
|
686 | if stream: | |
|
687 | return p.stdout, None | |
|
688 | ||
|
689 | retdata = p.stdout.read().strip() | |
|
690 | # wait for the child to exit to avoid race condition. | |
|
691 | p.wait() | |
|
692 | ||
|
693 | if p.returncode != 0 and p.returncode != 1: | |
|
694 | # there are certain error codes that are ok | |
|
695 | command = commands[0] | |
|
696 | if command in ('cat-file', 'symbolic-ref'): | |
|
697 | return retdata, p.returncode | |
|
698 | # for all others, abort | |
|
699 | raise util.Abort('git %s error %d in %s' % | |
|
700 | (command, p.returncode, self._relpath)) | |
|
701 | ||
|
702 | return retdata, p.returncode | |
|
703 | ||
|
704 | def _gitstate(self): | |
|
705 | return self._gitcommand(['rev-parse', 'HEAD']) | |
|
706 | ||
|
707 | def _gitcurrentbranch(self): | |
|
708 | current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet']) | |
|
709 | if err: | |
|
710 | current = None | |
|
711 | return current | |
|
712 | ||
|
713 | def _githavelocally(self, revision): | |
|
714 | out, code = self._gitdir(['cat-file', '-e', revision]) | |
|
715 | return code == 0 | |
|
716 | ||
|
717 | def _gitisancestor(self, r1, r2): | |
|
718 | base = self._gitcommand(['merge-base', r1, r2]) | |
|
719 | return base == r1 | |
|
720 | ||
|
721 | def _gitbranchmap(self): | |
|
722 | '''returns 2 things: | |
|
723 | a map from git branch to revision | |
|
724 | a map from revision to branches''' | |
|
725 | branch2rev = {} | |
|
726 | rev2branch = {} | |
|
727 | ||
|
728 | out = self._gitcommand(['for-each-ref', '--format', | |
|
729 | '%(objectname) %(refname)']) | |
|
730 | for line in out.split('\n'): | |
|
731 | revision, ref = line.split(' ') | |
|
732 | if ref.startswith('refs/tags/'): | |
|
733 | continue | |
|
734 | if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'): | |
|
735 | continue # ignore remote/HEAD redirects | |
|
736 | branch2rev[ref] = revision | |
|
737 | rev2branch.setdefault(revision, []).append(ref) | |
|
738 | return branch2rev, rev2branch | |
|
739 | ||
|
740 | def _gittracking(self, branches): | |
|
741 | 'return map of remote branch to local tracking branch' | |
|
742 | # assumes no more than one local tracking branch for each remote | |
|
743 | tracking = {} | |
|
744 | for b in branches: | |
|
745 | if b.startswith('refs/remotes/'): | |
|
746 | continue | |
|
747 | remote = self._gitcommand(['config', 'branch.%s.remote' % b]) | |
|
748 | if remote: | |
|
749 | ref = self._gitcommand(['config', 'branch.%s.merge' % b]) | |
|
750 | tracking['refs/remotes/%s/%s' % | |
|
751 | (remote, ref.split('/', 2)[2])] = b | |
|
752 | return tracking | |
|
753 | ||
|
754 | def _fetch(self, source, revision): | |
|
755 | if not os.path.exists(os.path.join(self._abspath, '.git')): | |
|
756 | self._ui.status(_('cloning subrepo %s\n') % self._relpath) | |
|
757 | self._gitnodir(['clone', source, self._abspath]) | |
|
758 | if self._githavelocally(revision): | |
|
759 | return | |
|
760 | self._ui.status(_('pulling subrepo %s\n') % self._relpath) | |
|
761 | # first try from origin | |
|
762 | self._gitcommand(['fetch']) | |
|
763 | if self._githavelocally(revision): | |
|
764 | return | |
|
765 | # then try from known subrepo source | |
|
766 | self._gitcommand(['fetch', source]) | |
|
767 | if not self._githavelocally(revision): | |
|
768 | raise util.Abort(_("revision %s does not exist in subrepo %s\n") % | |
|
769 | (revision, self._relpath)) | |
|
770 | ||
|
771 | def dirty(self, ignoreupdate=False): | |
|
772 | if not ignoreupdate and self._state[1] != self._gitstate(): | |
|
773 | # different version checked out | |
|
774 | return True | |
|
775 | # check for staged changes or modified files; ignore untracked files | |
|
776 | out, code = self._gitdir(['diff-index', '--quiet', 'HEAD']) | |
|
777 | return code == 1 | |
|
778 | ||
|
779 | def get(self, state, overwrite=False): | |
|
780 | source, revision, kind = state | |
|
781 | self._fetch(source, revision) | |
|
782 | # if the repo was set to be bare, unbare it | |
|
783 | if self._gitcommand(['config', '--bool', 'core.bare']) == 'true': | |
|
784 | self._gitcommand(['config', 'core.bare', 'false']) | |
|
785 | if self._gitstate() == revision: | |
|
786 | self._gitcommand(['reset', '--hard', 'HEAD']) | |
|
787 | return | |
|
788 | elif self._gitstate() == revision: | |
|
789 | if overwrite: | |
|
790 | # first reset the index to unmark new files for commit, because | |
|
791 | # reset --hard will otherwise throw away files added for commit, | |
|
792 | # not just unmark them. | |
|
793 | self._gitcommand(['reset', 'HEAD']) | |
|
794 | self._gitcommand(['reset', '--hard', 'HEAD']) | |
|
795 | return | |
|
796 | branch2rev, rev2branch = self._gitbranchmap() | |
|
797 | ||
|
798 | def checkout(args): | |
|
799 | cmd = ['checkout'] | |
|
800 | if overwrite: | |
|
801 | # first reset the index to unmark new files for commit, because | |
|
802 | # the -f option will otherwise throw away files added for | |
|
803 | # commit, not just unmark them. | |
|
804 | self._gitcommand(['reset', 'HEAD']) | |
|
805 | cmd.append('-f') | |
|
806 | self._gitcommand(cmd + args) | |
|
807 | ||
|
808 | def rawcheckout(): | |
|
809 | # no branch to checkout, check it out with no branch | |
|
810 | self._ui.warn(_('checking out detached HEAD in subrepo %s\n') % | |
|
811 | self._relpath) | |
|
812 | self._ui.warn(_('check out a git branch if you intend ' | |
|
813 | 'to make changes\n')) | |
|
814 | checkout(['-q', revision]) | |
|
815 | ||
|
816 | if revision not in rev2branch: | |
|
817 | rawcheckout() | |
|
818 | return | |
|
819 | branches = rev2branch[revision] | |
|
820 | firstlocalbranch = None | |
|
821 | for b in branches: | |
|
822 | if b == 'refs/heads/master': | |
|
823 | # master trumps all other branches | |
|
824 | checkout(['refs/heads/master']) | |
|
825 | return | |
|
826 | if not firstlocalbranch and not b.startswith('refs/remotes/'): | |
|
827 | firstlocalbranch = b | |
|
828 | if firstlocalbranch: | |
|
829 | checkout([firstlocalbranch]) | |
|
830 | return | |
|
831 | ||
|
832 | tracking = self._gittracking(branch2rev.keys()) | |
|
833 | # choose a remote branch already tracked if possible | |
|
834 | remote = branches[0] | |
|
835 | if remote not in tracking: | |
|
836 | for b in branches: | |
|
837 | if b in tracking: | |
|
838 | remote = b | |
|
839 | break | |
|
840 | ||
|
841 | if remote not in tracking: | |
|
842 | # create a new local tracking branch | |
|
843 | local = remote.split('/', 2)[2] | |
|
844 | checkout(['-b', local, remote]) | |
|
845 | elif self._gitisancestor(branch2rev[tracking[remote]], remote): | |
|
846 | # When updating to a tracked remote branch, | |
|
847 | # if the local tracking branch is downstream of it, | |
|
848 | # a normal `git pull` would have performed a "fast-forward merge" | |
|
849 | # which is equivalent to updating the local branch to the remote. | |
|
850 | # Since we are only looking at branching at update, we need to | |
|
851 | # detect this situation and perform this action lazily. | |
|
852 | if tracking[remote] != self._gitcurrentbranch(): | |
|
853 | checkout([tracking[remote]]) | |
|
854 | self._gitcommand(['merge', '--ff', remote]) | |
|
855 | else: | |
|
856 | # a real merge would be required, just checkout the revision | |
|
857 | rawcheckout() | |
|
858 | ||
|
859 | def commit(self, text, user, date): | |
|
860 | cmd = ['commit', '-a', '-m', text] | |
|
861 | env = os.environ.copy() | |
|
862 | if user: | |
|
863 | cmd += ['--author', user] | |
|
864 | if date: | |
|
865 | # git's date parser silently ignores when seconds < 1e9 | |
|
866 | # convert to ISO8601 | |
|
867 | env['GIT_AUTHOR_DATE'] = util.datestr(date, | |
|
868 | '%Y-%m-%dT%H:%M:%S %1%2') | |
|
869 | self._gitcommand(cmd, env=env) | |
|
870 | # make sure commit works otherwise HEAD might not exist under certain | |
|
871 | # circumstances | |
|
872 | return self._gitstate() | |
|
873 | ||
|
874 | def merge(self, state): | |
|
875 | source, revision, kind = state | |
|
876 | self._fetch(source, revision) | |
|
877 | base = self._gitcommand(['merge-base', revision, self._state[1]]) | |
|
878 | out, code = self._gitdir(['diff-index', '--quiet', 'HEAD']) | |
|
879 | ||
|
880 | def mergefunc(): | |
|
881 | if base == revision: | |
|
882 | self.get(state) # fast forward merge | |
|
883 | elif base != self._state[1]: | |
|
884 | self._gitcommand(['merge', '--no-commit', revision]) | |
|
885 | ||
|
886 | if self.dirty(): | |
|
887 | if self._gitstate() != revision: | |
|
888 | dirty = self._gitstate() == self._state[1] or code != 0 | |
|
889 | if _updateprompt(self._ui, self, dirty, self._state[1], revision): | |
|
890 | mergefunc() | |
|
891 | else: | |
|
892 | mergefunc() | |
|
893 | ||
|
894 | def push(self, force): | |
|
895 | # if a branch in origin contains the revision, nothing to do | |
|
896 | branch2rev, rev2branch = self._gitbranchmap() | |
|
897 | if self._state[1] in rev2branch: | |
|
898 | for b in rev2branch[self._state[1]]: | |
|
899 | if b.startswith('refs/remotes/origin/'): | |
|
900 | return True | |
|
901 | for b, revision in branch2rev.iteritems(): | |
|
902 | if b.startswith('refs/remotes/origin/'): | |
|
903 | if self._gitisancestor(self._state[1], revision): | |
|
904 | return True | |
|
905 | # otherwise, try to push the currently checked out branch | |
|
906 | cmd = ['push'] | |
|
907 | if force: | |
|
908 | cmd.append('--force') | |
|
909 | ||
|
910 | current = self._gitcurrentbranch() | |
|
911 | if current: | |
|
912 | # determine if the current branch is even useful | |
|
913 | if not self._gitisancestor(self._state[1], current): | |
|
914 | self._ui.warn(_('unrelated git branch checked out ' | |
|
915 | 'in subrepo %s\n') % self._relpath) | |
|
916 | return False | |
|
917 | self._ui.status(_('pushing branch %s of subrepo %s\n') % | |
|
918 | (current.split('/', 2)[2], self._relpath)) | |
|
919 | self._gitcommand(cmd + ['origin', current]) | |
|
920 | return True | |
|
921 | else: | |
|
922 | self._ui.warn(_('no branch checked out in subrepo %s\n' | |
|
923 | 'cannot push revision %s') % | |
|
924 | (self._relpath, self._state[1])) | |
|
925 | return False | |
|
926 | ||
|
927 | def remove(self): | |
|
928 | if self.dirty(): | |
|
929 | self._ui.warn(_('not removing repo %s because ' | |
|
930 | 'it has changes.\n') % self._relpath) | |
|
931 | return | |
|
932 | # we can't fully delete the repository as it may contain | |
|
933 | # local-only history | |
|
934 | self._ui.note(_('removing subrepo %s\n') % self._relpath) | |
|
935 | self._gitcommand(['config', 'core.bare', 'true']) | |
|
936 | for f in os.listdir(self._abspath): | |
|
937 | if f == '.git': | |
|
938 | continue | |
|
939 | path = os.path.join(self._abspath, f) | |
|
940 | if os.path.isdir(path) and not os.path.islink(path): | |
|
941 | shutil.rmtree(path) | |
|
942 | else: | |
|
943 | os.remove(path) | |
|
944 | ||
|
945 | def archive(self, ui, archiver, prefix): | |
|
946 | source, revision = self._state | |
|
947 | self._fetch(source, revision) | |
|
948 | ||
|
949 | # Parse git's native archive command. | |
|
950 | # This should be much faster than manually traversing the trees | |
|
951 | # and objects with many subprocess calls. | |
|
952 | tarstream = self._gitcommand(['archive', revision], stream=True) | |
|
953 | tar = tarfile.open(fileobj=tarstream, mode='r|') | |
|
954 | relpath = subrelpath(self) | |
|
955 | ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files')) | |
|
956 | for i, info in enumerate(tar): | |
|
957 | if info.isdir(): | |
|
958 | continue | |
|
959 | if info.issym(): | |
|
960 | data = info.linkname | |
|
961 | else: | |
|
962 | data = tar.extractfile(info).read() | |
|
963 | archiver.addfile(os.path.join(prefix, self._path, info.name), | |
|
964 | info.mode, info.issym(), data) | |
|
965 | ui.progress(_('archiving (%s)') % relpath, i + 1, | |
|
966 | unit=_('files')) | |
|
967 | ui.progress(_('archiving (%s)') % relpath, None) | |
|
968 | ||
|
969 | ||
|
970 | def status(self, rev2, **opts): | |
|
971 | rev1 = self._state[1] | |
|
972 | modified, added, removed = [], [], [] | |
|
973 | if rev2: | |
|
974 | command = ['diff-tree', rev1, rev2] | |
|
975 | else: | |
|
976 | command = ['diff-index', rev1] | |
|
977 | out = self._gitcommand(command) | |
|
978 | for line in out.split('\n'): | |
|
979 | tab = line.find('\t') | |
|
980 | if tab == -1: | |
|
981 | continue | |
|
982 | status, f = line[tab - 1], line[tab + 1:] | |
|
983 | if status == 'M': | |
|
984 | modified.append(f) | |
|
985 | elif status == 'A': | |
|
986 | added.append(f) | |
|
987 | elif status == 'D': | |
|
988 | removed.append(f) | |
|
989 | ||
|
990 | deleted = unknown = ignored = clean = [] | |
|
991 | return modified, added, removed, deleted, unknown, ignored, clean | |
|
992 | ||
|
619 | 993 | types = { |
|
620 | 994 | 'hg': hgsubrepo, |
|
621 | 995 | 'svn': svnsubrepo, |
|
996 | 'git': gitsubrepo, | |
|
622 | 997 | } |
@@ -12,6 +12,7 b'' | |||
|
12 | 12 | |
|
13 | 13 | from node import nullid, bin, hex, short |
|
14 | 14 | from i18n import _ |
|
15 | import os.path | |
|
15 | 16 | import encoding |
|
16 | 17 | import error |
|
17 | 18 | |
@@ -99,9 +100,6 b' def _readtags(ui, repo, lines, fn, recod' | |||
|
99 | 100 | except TypeError: |
|
100 | 101 | warn(_("node '%s' is not well formed") % nodehex) |
|
101 | 102 | continue |
|
102 | if nodebin not in repo.changelog.nodemap: | |
|
103 | # silently ignore as pull -r might cause this | |
|
104 | continue | |
|
105 | 103 | |
|
106 | 104 | # update filetags |
|
107 | 105 | hist = [] |
@@ -154,7 +152,7 b' def _readtagcache(ui, repo):' | |||
|
154 | 152 | set, caller is responsible for reading tag info from each head.''' |
|
155 | 153 | |
|
156 | 154 | try: |
|
157 |
cachefile = repo.opener(' |
|
|
155 | cachefile = repo.opener('cache/tags', 'r') | |
|
158 | 156 | # force reading the file for static-http |
|
159 | 157 | cachelines = iter(cachefile) |
|
160 | 158 | except IOError: |
@@ -188,8 +186,8 b' def _readtagcache(ui, repo):' | |||
|
188 | 186 | fnode = bin(line[2]) |
|
189 | 187 | cachefnode[headnode] = fnode |
|
190 | 188 | except (ValueError, TypeError): |
|
191 |
# corruption of t |
|
|
192 |
ui.warn(_('.hg/ |
|
|
189 | # corruption of the tags cache, just recompute it | |
|
190 | ui.warn(_('.hg/cache/tags is corrupt, rebuilding it\n')) | |
|
193 | 191 | cacheheads = [] |
|
194 | 192 | cacherevs = [] |
|
195 | 193 | cachefnode = {} |
@@ -251,7 +249,7 b' def _readtagcache(ui, repo):' | |||
|
251 | 249 | def _writetagcache(ui, repo, heads, tagfnode, cachetags): |
|
252 | 250 | |
|
253 | 251 | try: |
|
254 |
cachefile = repo.opener(' |
|
|
252 | cachefile = repo.opener('cache/tags', 'w', atomictemp=True) | |
|
255 | 253 | except (OSError, IOError): |
|
256 | 254 | return |
|
257 | 255 |
@@ -145,12 +145,18 b' def getrenamedfn(repo, endrev=None):' | |||
|
145 | 145 | def showauthor(repo, ctx, templ, **args): |
|
146 | 146 | return ctx.user() |
|
147 | 147 | |
|
148 | def showbranch(**args): | |
|
149 | return args['ctx'].branch() | |
|
150 | ||
|
148 | 151 | def showbranches(**args): |
|
149 | 152 | branch = args['ctx'].branch() |
|
150 | 153 | if branch != 'default': |
|
151 | branch = encoding.tolocal(branch) | |
|
152 | 154 | return showlist('branch', [branch], plural='branches', **args) |
|
153 | 155 | |
|
156 | def showbookmarks(**args): | |
|
157 | bookmarks = args['ctx'].bookmarks() | |
|
158 | return showlist('bookmark', bookmarks, **args) | |
|
159 | ||
|
154 | 160 | def showchildren(**args): |
|
155 | 161 | ctx = args['ctx'] |
|
156 | 162 | childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()] |
@@ -163,9 +169,8 b' def showdescription(repo, ctx, templ, **' | |||
|
163 | 169 | return ctx.description().strip() |
|
164 | 170 | |
|
165 | 171 | def showdiffstat(repo, ctx, templ, **args): |
|
166 | diff = patch.diff(repo, ctx.parents()[0].node(), ctx.node()) | |
|
167 | 172 | files, adds, removes = 0, 0, 0 |
|
168 | for i in patch.diffstatdata(util.iterlines(diff)): | |
|
173 | for i in patch.diffstatdata(util.iterlines(ctx.diff())): | |
|
169 | 174 | files += 1 |
|
170 | 175 | adds += i[1] |
|
171 | 176 | removes += i[2] |
@@ -249,7 +254,9 b' def showtags(**args):' | |||
|
249 | 254 | # revcache - a cache dictionary for the current revision |
|
250 | 255 | keywords = { |
|
251 | 256 | 'author': showauthor, |
|
257 | 'branch': showbranch, | |
|
252 | 258 | 'branches': showbranches, |
|
259 | 'bookmarks': showbookmarks, | |
|
253 | 260 | 'children': showchildren, |
|
254 | 261 | 'date': showdate, |
|
255 | 262 | 'desc': showdescription, |
@@ -7,7 +7,192 b'' | |||
|
7 | 7 | |
|
8 | 8 | from i18n import _ |
|
9 | 9 | import sys, os |
|
10 | import util, config, templatefilters | |
|
10 | import util, config, templatefilters, parser, error | |
|
11 | ||
|
12 | # template parsing | |
|
13 | ||
|
14 | elements = { | |
|
15 | "(": (20, ("group", 1, ")"), ("func", 1, ")")), | |
|
16 | ",": (2, None, ("list", 2)), | |
|
17 | "|": (5, None, ("|", 5)), | |
|
18 | "%": (6, None, ("%", 6)), | |
|
19 | ")": (0, None, None), | |
|
20 | "symbol": (0, ("symbol",), None), | |
|
21 | "string": (0, ("string",), None), | |
|
22 | "end": (0, None, None), | |
|
23 | } | |
|
24 | ||
|
25 | def tokenizer(data): | |
|
26 | program, start, end = data | |
|
27 | pos = start | |
|
28 | while pos < end: | |
|
29 | c = program[pos] | |
|
30 | if c.isspace(): # skip inter-token whitespace | |
|
31 | pass | |
|
32 | elif c in "(,)%|": # handle simple operators | |
|
33 | yield (c, None, pos) | |
|
34 | elif (c in '"\'' or c == 'r' and | |
|
35 | program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings | |
|
36 | if c == 'r': | |
|
37 | pos += 1 | |
|
38 | c = program[pos] | |
|
39 | decode = lambda x: x | |
|
40 | else: | |
|
41 | decode = lambda x: x.decode('string-escape') | |
|
42 | pos += 1 | |
|
43 | s = pos | |
|
44 | while pos < end: # find closing quote | |
|
45 | d = program[pos] | |
|
46 | if d == '\\': # skip over escaped characters | |
|
47 | pos += 2 | |
|
48 | continue | |
|
49 | if d == c: | |
|
50 | yield ('string', decode(program[s:pos]), s) | |
|
51 | break | |
|
52 | pos += 1 | |
|
53 | else: | |
|
54 | raise error.ParseError(_("unterminated string"), s) | |
|
55 | elif c.isalnum() or c in '_': | |
|
56 | s = pos | |
|
57 | pos += 1 | |
|
58 | while pos < end: # find end of symbol | |
|
59 | d = program[pos] | |
|
60 | if not (d.isalnum() or d == "_"): | |
|
61 | break | |
|
62 | pos += 1 | |
|
63 | sym = program[s:pos] | |
|
64 | yield ('symbol', sym, s) | |
|
65 | pos -= 1 | |
|
66 | elif c == '}': | |
|
67 | pos += 1 | |
|
68 | break | |
|
69 | else: | |
|
70 | raise error.ParseError(_("syntax error"), pos) | |
|
71 | pos += 1 | |
|
72 | data[2] = pos | |
|
73 | yield ('end', None, pos) | |
|
74 | ||
|
75 | def compiletemplate(tmpl, context): | |
|
76 | parsed = [] | |
|
77 | pos, stop = 0, len(tmpl) | |
|
78 | p = parser.parser(tokenizer, elements) | |
|
79 | ||
|
80 | while pos < stop: | |
|
81 | n = tmpl.find('{', pos) | |
|
82 | if n < 0: | |
|
83 | parsed.append(("string", tmpl[pos:])) | |
|
84 | break | |
|
85 | if n > 0 and tmpl[n - 1] == '\\': | |
|
86 | # escaped | |
|
87 | parsed.append(("string", tmpl[pos:n - 1] + "{")) | |
|
88 | pos = n + 1 | |
|
89 | continue | |
|
90 | if n > pos: | |
|
91 | parsed.append(("string", tmpl[pos:n])) | |
|
92 | ||
|
93 | pd = [tmpl, n + 1, stop] | |
|
94 | parsed.append(p.parse(pd)) | |
|
95 | pos = pd[2] | |
|
96 | ||
|
97 | return [compileexp(e, context) for e in parsed] | |
|
98 | ||
|
99 | def compileexp(exp, context): | |
|
100 | t = exp[0] | |
|
101 | if t in methods: | |
|
102 | return methods[t](exp, context) | |
|
103 | raise error.ParseError(_("unknown method '%s'") % t) | |
|
104 | ||
|
105 | # template evaluation | |
|
106 | ||
|
107 | def getsymbol(exp): | |
|
108 | if exp[0] == 'symbol': | |
|
109 | return exp[1] | |
|
110 | raise error.ParseError(_("expected a symbol")) | |
|
111 | ||
|
112 | def getlist(x): | |
|
113 | if not x: | |
|
114 | return [] | |
|
115 | if x[0] == 'list': | |
|
116 | return getlist(x[1]) + [x[2]] | |
|
117 | return [x] | |
|
118 | ||
|
119 | def getfilter(exp, context): | |
|
120 | f = getsymbol(exp) | |
|
121 | if f not in context._filters: | |
|
122 | raise error.ParseError(_("unknown function '%s'") % f) | |
|
123 | return context._filters[f] | |
|
124 | ||
|
125 | def gettemplate(exp, context): | |
|
126 | if exp[0] == 'string': | |
|
127 | return compiletemplate(exp[1], context) | |
|
128 | if exp[0] == 'symbol': | |
|
129 | return context._load(exp[1]) | |
|
130 | raise error.ParseError(_("expected template specifier")) | |
|
131 | ||
|
132 | def runstring(context, mapping, data): | |
|
133 | return data | |
|
134 | ||
|
135 | def runsymbol(context, mapping, key): | |
|
136 | v = mapping.get(key) | |
|
137 | if v is None: | |
|
138 | v = context._defaults.get(key, '') | |
|
139 | if hasattr(v, '__call__'): | |
|
140 | return v(**mapping) | |
|
141 | return v | |
|
142 | ||
|
143 | def buildfilter(exp, context): | |
|
144 | func, data = compileexp(exp[1], context) | |
|
145 | filt = getfilter(exp[2], context) | |
|
146 | return (runfilter, (func, data, filt)) | |
|
147 | ||
|
148 | def runfilter(context, mapping, data): | |
|
149 | func, data, filt = data | |
|
150 | return filt(func(context, mapping, data)) | |
|
151 | ||
|
152 | def buildmap(exp, context): | |
|
153 | func, data = compileexp(exp[1], context) | |
|
154 | ctmpl = gettemplate(exp[2], context) | |
|
155 | return (runmap, (func, data, ctmpl)) | |
|
156 | ||
|
157 | def runmap(context, mapping, data): | |
|
158 | func, data, ctmpl = data | |
|
159 | d = func(context, mapping, data) | |
|
160 | lm = mapping.copy() | |
|
161 | ||
|
162 | for i in d: | |
|
163 | if isinstance(i, dict): | |
|
164 | lm.update(i) | |
|
165 | for f, d in ctmpl: | |
|
166 | yield f(context, lm, d) | |
|
167 | else: | |
|
168 | # v is not an iterable of dicts, this happen when 'key' | |
|
169 | # has been fully expanded already and format is useless. | |
|
170 | # If so, return the expanded value. | |
|
171 | yield i | |
|
172 | ||
|
173 | def buildfunc(exp, context): | |
|
174 | n = getsymbol(exp[1]) | |
|
175 | args = [compileexp(x, context) for x in getlist(exp[2])] | |
|
176 | if n in context._filters: | |
|
177 | if len(args) != 1: | |
|
178 | raise error.ParseError(_("filter %s expects one argument") % n) | |
|
179 | f = context._filters[n] | |
|
180 | return (runfilter, (args[0][0], args[0][1], f)) | |
|
181 | elif n in context._funcs: | |
|
182 | f = context._funcs[n] | |
|
183 | return (f, args) | |
|
184 | ||
|
185 | methods = { | |
|
186 | "string": lambda e, c: (runstring, e[1]), | |
|
187 | "symbol": lambda e, c: (runsymbol, e[1]), | |
|
188 | "group": lambda e, c: compileexp(e[1], c), | |
|
189 | # ".": buildmember, | |
|
190 | "|": buildfilter, | |
|
191 | "%": buildmap, | |
|
192 | "func": buildfunc, | |
|
193 | } | |
|
194 | ||
|
195 | # template engine | |
|
11 | 196 | |
|
12 | 197 | path = ['templates', '../templates'] |
|
13 | 198 | stringify = templatefilters.stringify |
@@ -66,104 +251,18 b' class engine(object):' | |||
|
66 | 251 | self._defaults = defaults |
|
67 | 252 | self._cache = {} |
|
68 | 253 | |
|
254 | def _load(self, t): | |
|
255 | '''load, parse, and cache a template''' | |
|
256 | if t not in self._cache: | |
|
257 | self._cache[t] = compiletemplate(self._loader(t), self) | |
|
258 | return self._cache[t] | |
|
259 | ||
|
69 | 260 | def process(self, t, mapping): |
|
70 | 261 | '''Perform expansion. t is name of map element to expand. |
|
71 | 262 | mapping contains added elements for use during expansion. Is a |
|
72 | 263 | generator.''' |
|
73 |
return _flatten( |
|
|
74 | ||
|
75 | def _load(self, t): | |
|
76 | '''load, parse, and cache a template''' | |
|
77 | if t not in self._cache: | |
|
78 | self._cache[t] = self._parse(self._loader(t)) | |
|
79 | return self._cache[t] | |
|
80 | ||
|
81 | def _get(self, mapping, key): | |
|
82 | v = mapping.get(key) | |
|
83 | if v is None: | |
|
84 | v = self._defaults.get(key, '') | |
|
85 | if hasattr(v, '__call__'): | |
|
86 | v = v(**mapping) | |
|
87 | return v | |
|
88 | ||
|
89 | def _filter(self, mapping, parts): | |
|
90 | filters, val = parts | |
|
91 | x = self._get(mapping, val) | |
|
92 | for f in filters: | |
|
93 | x = f(x) | |
|
94 | return x | |
|
95 | ||
|
96 | def _format(self, mapping, args): | |
|
97 | key, parsed = args | |
|
98 | v = self._get(mapping, key) | |
|
99 | if not hasattr(v, '__iter__'): | |
|
100 | raise SyntaxError(_("error expanding '%s%%%s'") | |
|
101 | % (key, parsed)) | |
|
102 | lm = mapping.copy() | |
|
103 | for i in v: | |
|
104 | if isinstance(i, dict): | |
|
105 | lm.update(i) | |
|
106 | yield self._process(parsed, lm) | |
|
107 | else: | |
|
108 | # v is not an iterable of dicts, this happen when 'key' | |
|
109 | # has been fully expanded already and format is useless. | |
|
110 | # If so, return the expanded value. | |
|
111 | yield i | |
|
112 | ||
|
113 | def _parse(self, tmpl): | |
|
114 | '''preparse a template''' | |
|
115 | parsed = [] | |
|
116 | pos, stop = 0, len(tmpl) | |
|
117 | while pos < stop: | |
|
118 | n = tmpl.find('{', pos) | |
|
119 | if n < 0: | |
|
120 | parsed.append((None, tmpl[pos:stop])) | |
|
121 | break | |
|
122 | if n > 0 and tmpl[n - 1] == '\\': | |
|
123 | # escaped | |
|
124 | parsed.append((None, tmpl[pos:n - 1] + "{")) | |
|
125 | pos = n + 1 | |
|
126 | continue | |
|
127 | if n > pos: | |
|
128 | parsed.append((None, tmpl[pos:n])) | |
|
129 | ||
|
130 | pos = n | |
|
131 | n = tmpl.find('}', pos) | |
|
132 | if n < 0: | |
|
133 | # no closing | |
|
134 | parsed.append((None, tmpl[pos:stop])) | |
|
135 | break | |
|
136 | ||
|
137 | expr = tmpl[pos + 1:n] | |
|
138 | pos = n + 1 | |
|
139 | ||
|
140 | if '%' in expr: | |
|
141 | # the keyword should be formatted with a template | |
|
142 | key, t = expr.split('%') | |
|
143 | parsed.append((self._format, (key.strip(), | |
|
144 | self._load(t.strip())))) | |
|
145 | elif '|' in expr: | |
|
146 | # process the keyword value with one or more filters | |
|
147 | parts = expr.split('|') | |
|
148 | val = parts[0].strip() | |
|
149 | try: | |
|
150 | filters = [self._filters[f.strip()] for f in parts[1:]] | |
|
151 | except KeyError, i: | |
|
152 | raise SyntaxError(_("unknown filter '%s'") % i[0]) | |
|
153 | parsed.append((self._filter, (filters, val))) | |
|
154 | else: | |
|
155 | # just get the keyword | |
|
156 | parsed.append((self._get, expr.strip())) | |
|
157 | ||
|
158 | return parsed | |
|
159 | ||
|
160 | def _process(self, parsed, mapping): | |
|
161 | '''Render a template. Returns a generator.''' | |
|
162 | for f, e in parsed: | |
|
163 | if f: | |
|
164 | yield f(mapping, e) | |
|
165 | else: | |
|
166 | yield e | |
|
264 | return _flatten(func(self, mapping, data) for func, data in | |
|
265 | self._load(t)) | |
|
167 | 266 | |
|
168 | 267 | engines = {'default': engine} |
|
169 | 268 | |
@@ -183,7 +282,7 b' class templater(object):' | |||
|
183 | 282 | self.filters.update(filters) |
|
184 | 283 | self.defaults = defaults |
|
185 | 284 | self.minchunk, self.maxchunk = minchunk, maxchunk |
|
186 |
self.e |
|
|
285 | self.ecache = {} | |
|
187 | 286 | |
|
188 | 287 | if not mapfile: |
|
189 | 288 | return |
@@ -214,6 +313,8 b' class templater(object):' | |||
|
214 | 313 | if not t in self.cache: |
|
215 | 314 | try: |
|
216 | 315 | self.cache[t] = open(self.map[t][1]).read() |
|
316 | except KeyError, inst: | |
|
317 | raise util.Abort(_('"%s" not in template map') % inst.args[0]) | |
|
217 | 318 | except IOError, inst: |
|
218 | 319 | raise IOError(inst.args[0], _('template file %s: %s') % |
|
219 | 320 | (self.map[t][1], inst.args[1])) |
@@ -221,10 +322,10 b' class templater(object):' | |||
|
221 | 322 | |
|
222 | 323 | def __call__(self, t, **mapping): |
|
223 | 324 | ttype = t in self.map and self.map[t][0] or 'default' |
|
224 | proc = self.engines.get(ttype) | |
|
225 | if proc is None: | |
|
226 | proc = engines[ttype](self.load, self.filters, self.defaults) | |
|
227 |
|
|
|
325 | if ttype not in self.ecache: | |
|
326 | self.ecache[ttype] = engines[ttype](self.load, | |
|
327 | self.filters, self.defaults) | |
|
328 | proc = self.ecache[ttype] | |
|
228 | 329 | |
|
229 | 330 | stream = proc.process(t, mapping) |
|
230 | 331 | if self.minchunk: |
@@ -1,7 +1,7 b'' | |||
|
1 | changeset = 'changeset: {rev}:{node|short}\n{branches}{tags}{parents}user: {author}\ndate: {date|date}\nsummary: {desc|firstline}\n\n' | |
|
1 | changeset = 'changeset: {rev}:{node|short}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\nsummary: {desc|firstline}\n\n' | |
|
2 | 2 | changeset_quiet = '{rev}:{node|short}\n' |
|
3 | changeset_verbose = 'changeset: {rev}:{node|short}\n{branches}{tags}{parents}user: {author}\ndate: {date|date}\n{files}{file_copies_switch}description:\n{desc|strip}\n\n\n' | |
|
4 | changeset_debug = 'changeset: {rev}:{node}\n{branches}{tags}{parents}{manifest}user: {author}\ndate: {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies_switch}{extras}description:\n{desc|strip}\n\n\n' | |
|
3 | changeset_verbose = 'changeset: {rev}:{node|short}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\n{files}{file_copies_switch}description:\n{desc|strip}\n\n\n' | |
|
4 | changeset_debug = 'changeset: {rev}:{node}\n{branches}{bookmarks}{tags}{parents}{manifest}user: {author}\ndate: {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies_switch}{extras}description:\n{desc|strip}\n\n\n' | |
|
5 | 5 | start_files = 'files: ' |
|
6 | 6 | file = ' {file}' |
|
7 | 7 | end_files = '\n' |
@@ -21,4 +21,5 b" parent = 'parent: {rev}:{node|forma" | |||
|
21 | 21 | manifest = 'manifest: {rev}:{node}\n' |
|
22 | 22 | branch = 'branch: {branch}\n' |
|
23 | 23 | tag = 'tag: {tag}\n' |
|
24 | bookmark = 'bookmark: {bookmark}\n' | |
|
24 | 25 | extra = 'extra: {key}={value|stringescape}\n' |
@@ -1,9 +1,9 b'' | |||
|
1 | 1 | header = '<?xml version="1.0"?>\n<log>\n' |
|
2 | 2 | footer = '</log>\n' |
|
3 | 3 | |
|
4 | changeset = '<logentry revision="{rev}" node="{node}">\n{branches}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n</logentry>\n' | |
|
5 | changeset_verbose = '<logentry revision="{rev}" node="{node}">\n{branches}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n<paths>\n{file_adds}{file_dels}{file_mods}</paths>\n{file_copies}</logentry>\n' | |
|
6 | changeset_debug = '<logentry revision="{rev}" node="{node}">\n{branches}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n<paths>\n{file_adds}{file_dels}{file_mods}</paths>\n{file_copies}{extras}</logentry>\n' | |
|
4 | changeset = '<logentry revision="{rev}" node="{node}">\n{branches}{bookmarks}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n</logentry>\n' | |
|
5 | changeset_verbose = '<logentry revision="{rev}" node="{node}">\n{branches}{bookmarks}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n<paths>\n{file_adds}{file_dels}{file_mods}</paths>\n{file_copies}</logentry>\n' | |
|
6 | changeset_debug = '<logentry revision="{rev}" node="{node}">\n{branches}{bookmarks}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n<paths>\n{file_adds}{file_dels}{file_mods}</paths>\n{file_copies}{extras}</logentry>\n' | |
|
7 | 7 | |
|
8 | 8 | file_add = '<path action="A">{file_add|xmlescape}</path>\n' |
|
9 | 9 | file_mod = '<path action="M">{file_mod|xmlescape}</path>\n' |
@@ -16,4 +16,5 b" end_file_copies = '</copies>\\n'" | |||
|
16 | 16 | parent = '<parent revision="{rev}" node="{node}" />\n' |
|
17 | 17 | branch = '<branch>{branch|xmlescape}</branch>\n' |
|
18 | 18 | tag = '<tag>{tag|xmlescape}</tag>\n' |
|
19 | bookmark = '<bookmark>{bookmark|xmlescape}</bookmark>\n' | |
|
19 | 20 | extra = '<extra key="{key|xmlescape}">{value|xmlescape}</extra>\n' |
@@ -40,7 +40,18 b' files, or words in the commit message</d' | |||
|
40 | 40 | <th>branch</th> |
|
41 | 41 | <th>node</th> |
|
42 | 42 | </tr> |
|
43 | {entries%branchentry} | |
|
43 | {entries % | |
|
44 | ' <tr class="tagEntry parity{parity}"> | |
|
45 | <td> | |
|
46 | <a href="{url}shortlog/{node|short}{sessionvars%urlparameter}" class="{status}"> | |
|
47 | {branch|escape} | |
|
48 | </a> | |
|
49 | </td> | |
|
50 | <td class="node"> | |
|
51 | {node|short} | |
|
52 | </td> | |
|
53 | </tr>' | |
|
54 | } | |
|
44 | 55 | </table> |
|
45 | 56 | </div> |
|
46 | 57 | </div> |
@@ -1,5 +1,5 b'' | |||
|
1 | 1 | <tr class="parity{parity}"> |
|
2 |
<td class="age">{ |
|
|
2 | <td class="age">{age(date)}</td> | |
|
3 | 3 | <td class="author">{author|person}</td> |
|
4 |
<td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>{inbranch%changelogbranchname}{branches%changelogbranchhead}{tags |
|
|
4 | <td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>{inbranch%changelogbranchname}{branches%changelogbranchhead}{tags % '<span class="tag">{name|escape}</span> '}</td> | |
|
5 | 5 | </tr> |
@@ -13,7 +13,7 b'' | |||
|
13 | 13 | |
|
14 | 14 | from i18n import _ |
|
15 | 15 | import os, errno |
|
16 | import error | |
|
16 | import error, util | |
|
17 | 17 | |
|
18 | 18 | def active(func): |
|
19 | 19 | def _active(self, *args, **kwds): |
@@ -27,18 +27,22 b' def _playback(journal, report, opener, e' | |||
|
27 | 27 | for f, o, ignore in entries: |
|
28 | 28 | if o or not unlink: |
|
29 | 29 | try: |
|
30 |
opener(f, 'a') |
|
|
30 | fp = opener(f, 'a') | |
|
31 | fp.truncate(o) | |
|
32 | fp.close() | |
|
31 | 33 | except IOError: |
|
32 | 34 | report(_("failed to truncate %s\n") % f) |
|
33 | 35 | raise |
|
34 | 36 | else: |
|
35 | 37 | try: |
|
36 |
f |
|
|
37 |
|
|
|
38 | fp = opener(f) | |
|
39 | fn = fp.name | |
|
40 | fp.close() | |
|
41 | util.unlink(fn) | |
|
38 | 42 | except (IOError, OSError), inst: |
|
39 | 43 | if inst.errno != errno.ENOENT: |
|
40 | 44 | raise |
|
41 |
|
|
|
45 | util.unlink(journal) | |
|
42 | 46 | |
|
43 | 47 | class transaction(object): |
|
44 | 48 | def __init__(self, report, opener, journal, after=None, createmode=None): |
@@ -52,7 +56,7 b' class transaction(object):' | |||
|
52 | 56 | self.journal = journal |
|
53 | 57 | self._queue = [] |
|
54 | 58 | |
|
55 |
self.file = |
|
|
59 | self.file = util.posixfile(self.journal, "w") | |
|
56 | 60 | if createmode is not None: |
|
57 | 61 | os.chmod(self.journal, createmode & 0666) |
|
58 | 62 | |
@@ -133,7 +137,7 b' class transaction(object):' | |||
|
133 | 137 | if self.after: |
|
134 | 138 | self.after() |
|
135 | 139 | if os.path.isfile(self.journal): |
|
136 |
|
|
|
140 | util.unlink(self.journal) | |
|
137 | 141 | self.journal = None |
|
138 | 142 | |
|
139 | 143 | @active |
@@ -151,7 +155,7 b' class transaction(object):' | |||
|
151 | 155 | try: |
|
152 | 156 | if not self.entries: |
|
153 | 157 | if self.journal: |
|
154 |
|
|
|
158 | util.unlink(self.journal) | |
|
155 | 159 | return |
|
156 | 160 | |
|
157 | 161 | self.report(_("transaction abort!\n")) |
@@ -169,7 +173,10 b' class transaction(object):' | |||
|
169 | 173 | def rollback(opener, file, report): |
|
170 | 174 | entries = [] |
|
171 | 175 | |
|
172 | for l in open(file).readlines(): | |
|
176 | fp = util.posixfile(file) | |
|
177 | lines = fp.readlines() | |
|
178 | fp.close() | |
|
179 | for l in lines: | |
|
173 | 180 | f, o = l.split('\0') |
|
174 | 181 | entries.append((f, int(o), None)) |
|
175 | 182 |
@@ -153,6 +153,16 b' class ui(object):' | |||
|
153 | 153 | "%s.%s = %s\n") % (section, name, uvalue)) |
|
154 | 154 | return value |
|
155 | 155 | |
|
156 | def configpath(self, section, name, default=None, untrusted=False): | |
|
157 | 'get a path config item, expanded relative to config file' | |
|
158 | v = self.config(section, name, default, untrusted) | |
|
159 | if not os.path.isabs(v) or "://" not in v: | |
|
160 | src = self.configsource(section, name, untrusted) | |
|
161 | if ':' in src: | |
|
162 | base = os.path.dirname(src.rsplit(':')) | |
|
163 | v = os.path.join(base, os.path.expanduser(v)) | |
|
164 | return v | |
|
165 | ||
|
156 | 166 | def configbool(self, section, name, default=False, untrusted=False): |
|
157 | 167 | v = self.config(section, name, None, untrusted) |
|
158 | 168 | if v is None: |
@@ -589,7 +599,7 b' class ui(object):' | |||
|
589 | 599 | termination. |
|
590 | 600 | ''' |
|
591 | 601 | |
|
592 |
if pos |
|
|
602 | if pos is None or not self.debugflag: | |
|
593 | 603 | return |
|
594 | 604 | |
|
595 | 605 | if unit: |
@@ -71,6 +71,38 b' def netlocunsplit(host, port, user=None,' | |||
|
71 | 71 | return userpass + '@' + hostport |
|
72 | 72 | return hostport |
|
73 | 73 | |
|
74 | def readauthforuri(ui, uri): | |
|
75 | # Read configuration | |
|
76 | config = dict() | |
|
77 | for key, val in ui.configitems('auth'): | |
|
78 | if '.' not in key: | |
|
79 | ui.warn(_("ignoring invalid [auth] key '%s'\n") % key) | |
|
80 | continue | |
|
81 | group, setting = key.rsplit('.', 1) | |
|
82 | gdict = config.setdefault(group, dict()) | |
|
83 | if setting in ('username', 'cert', 'key'): | |
|
84 | val = util.expandpath(val) | |
|
85 | gdict[setting] = val | |
|
86 | ||
|
87 | # Find the best match | |
|
88 | scheme, hostpath = uri.split('://', 1) | |
|
89 | bestlen = 0 | |
|
90 | bestauth = None | |
|
91 | for group, auth in config.iteritems(): | |
|
92 | prefix = auth.get('prefix') | |
|
93 | if not prefix: | |
|
94 | continue | |
|
95 | p = prefix.split('://', 1) | |
|
96 | if len(p) > 1: | |
|
97 | schemes, prefix = [p[0]], p[1] | |
|
98 | else: | |
|
99 | schemes = (auth.get('schemes') or 'https').split() | |
|
100 | if (prefix == '*' or hostpath.startswith(prefix)) and \ | |
|
101 | len(prefix) > bestlen and scheme in schemes: | |
|
102 | bestlen = len(prefix) | |
|
103 | bestauth = group, auth | |
|
104 | return bestauth | |
|
105 | ||
|
74 | 106 | _safe = ('abcdefghijklmnopqrstuvwxyz' |
|
75 | 107 | 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' |
|
76 | 108 | '0123456789' '_.-/') |
@@ -123,9 +155,11 b' class passwordmgr(urllib2.HTTPPasswordMg' | |||
|
123 | 155 | return (user, passwd) |
|
124 | 156 | |
|
125 | 157 | if not user: |
|
126 |
|
|
|
127 |
if |
|
|
158 | res = readauthforuri(self.ui, authuri) | |
|
159 | if res: | |
|
160 | group, auth = res | |
|
128 | 161 | user, passwd = auth.get('username'), auth.get('password') |
|
162 | self.ui.debug("using auth.%s.* for authentication\n" % group) | |
|
129 | 163 | if not user or not passwd: |
|
130 | 164 | if not self.ui.interactive(): |
|
131 | 165 | raise util.Abort(_('http authorization required')) |
@@ -148,38 +182,6 b' class passwordmgr(urllib2.HTTPPasswordMg' | |||
|
148 | 182 | msg = _('http auth: user %s, password %s\n') |
|
149 | 183 | self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set')) |
|
150 | 184 | |
|
151 | def readauthtoken(self, uri): | |
|
152 | # Read configuration | |
|
153 | config = dict() | |
|
154 | for key, val in self.ui.configitems('auth'): | |
|
155 | if '.' not in key: | |
|
156 | self.ui.warn(_("ignoring invalid [auth] key '%s'\n") % key) | |
|
157 | continue | |
|
158 | group, setting = key.split('.', 1) | |
|
159 | gdict = config.setdefault(group, dict()) | |
|
160 | if setting in ('username', 'cert', 'key'): | |
|
161 | val = util.expandpath(val) | |
|
162 | gdict[setting] = val | |
|
163 | ||
|
164 | # Find the best match | |
|
165 | scheme, hostpath = uri.split('://', 1) | |
|
166 | bestlen = 0 | |
|
167 | bestauth = None | |
|
168 | for auth in config.itervalues(): | |
|
169 | prefix = auth.get('prefix') | |
|
170 | if not prefix: | |
|
171 | continue | |
|
172 | p = prefix.split('://', 1) | |
|
173 | if len(p) > 1: | |
|
174 | schemes, prefix = [p[0]], p[1] | |
|
175 | else: | |
|
176 | schemes = (auth.get('schemes') or 'https').split() | |
|
177 | if (prefix == '*' or hostpath.startswith(prefix)) and \ | |
|
178 | len(prefix) > bestlen and scheme in schemes: | |
|
179 | bestlen = len(prefix) | |
|
180 | bestauth = auth | |
|
181 | return bestauth | |
|
182 | ||
|
183 | 185 | class proxyhandler(urllib2.ProxyHandler): |
|
184 | 186 | def __init__(self, ui): |
|
185 | 187 | proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy') |
@@ -258,29 +260,47 b' class httpsendfile(object):' | |||
|
258 | 260 | defines a __len__ attribute to feed the Content-Length header. |
|
259 | 261 | """ |
|
260 | 262 | |
|
261 | def __init__(self, *args, **kwargs): | |
|
263 | def __init__(self, ui, *args, **kwargs): | |
|
262 | 264 | # We can't just "self._data = open(*args, **kwargs)" here because there |
|
263 | 265 | # is an "open" function defined in this module that shadows the global |
|
264 | 266 | # one |
|
267 | self.ui = ui | |
|
265 | 268 | self._data = __builtin__.open(*args, **kwargs) |
|
266 | self.read = self._data.read | |
|
267 | 269 | self.seek = self._data.seek |
|
268 | 270 | self.close = self._data.close |
|
269 | 271 | self.write = self._data.write |
|
272 | self._len = os.fstat(self._data.fileno()).st_size | |
|
273 | self._pos = 0 | |
|
274 | self._total = len(self) / 1024 * 2 | |
|
275 | ||
|
276 | def read(self, *args, **kwargs): | |
|
277 | try: | |
|
278 | ret = self._data.read(*args, **kwargs) | |
|
279 | except EOFError: | |
|
280 | self.ui.progress(_('sending'), None) | |
|
281 | self._pos += len(ret) | |
|
282 | # We pass double the max for total because we currently have | |
|
283 | # to send the bundle twice in the case of a server that | |
|
284 | # requires authentication. Since we can't know until we try | |
|
285 | # once whether authentication will be required, just lie to | |
|
286 | # the user and maybe the push succeeds suddenly at 50%. | |
|
287 | self.ui.progress(_('sending'), self._pos / 1024, | |
|
288 | unit=_('kb'), total=self._total) | |
|
289 | return ret | |
|
270 | 290 | |
|
271 | 291 | def __len__(self): |
|
272 |
return |
|
|
292 | return self._len | |
|
273 | 293 | |
|
274 |
def _gen_sendfile( |
|
|
294 | def _gen_sendfile(orgsend): | |
|
275 | 295 | def _sendfile(self, data): |
|
276 | 296 | # send a file |
|
277 | 297 | if isinstance(data, httpsendfile): |
|
278 | 298 | # if auth required, some data sent twice, so rewind here |
|
279 | 299 | data.seek(0) |
|
280 | 300 | for chunk in util.filechunkiter(data): |
|
281 |
|
|
|
301 | orgsend(self, chunk) | |
|
282 | 302 | else: |
|
283 |
|
|
|
303 | orgsend(self, data) | |
|
284 | 304 | return _sendfile |
|
285 | 305 | |
|
286 | 306 | has_https = hasattr(urllib2, 'HTTPSHandler') |
@@ -333,7 +353,7 b' if has_https:' | |||
|
333 | 353 | |
|
334 | 354 | class httpconnection(keepalive.HTTPConnection): |
|
335 | 355 | # must be able to send big bundle as stream. |
|
336 | send = _gen_sendfile(keepalive.HTTPConnection) | |
|
356 | send = _gen_sendfile(keepalive.HTTPConnection.send) | |
|
337 | 357 | |
|
338 | 358 | def connect(self): |
|
339 | 359 | if has_https and self.realhostport: # use CONNECT proxy |
@@ -522,32 +542,36 b' def _verifycert(cert, hostname):' | |||
|
522 | 542 | return _('no commonName or subjectAltName found in certificate') |
|
523 | 543 | |
|
524 | 544 | if has_https: |
|
525 |
class |
|
|
526 |
|
|
|
545 | class httpsconnection(httplib.HTTPSConnection): | |
|
546 | response_class = keepalive.HTTPResponse | |
|
547 | # must be able to send big bundle as stream. | |
|
548 | send = _gen_sendfile(keepalive.safesend) | |
|
549 | getresponse = keepalive.wrapgetresponse(httplib.HTTPSConnection) | |
|
527 | 550 | |
|
528 | 551 | def connect(self): |
|
529 | if hasattr(self, 'ui'): | |
|
530 | cacerts = self.ui.config('web', 'cacerts') | |
|
531 | if cacerts: | |
|
532 | cacerts = util.expandpath(cacerts) | |
|
533 | else: | |
|
534 | cacerts = None | |
|
552 | self.sock = _create_connection((self.host, self.port)) | |
|
553 | ||
|
554 | host = self.host | |
|
555 | if self.realhostport: # use CONNECT proxy | |
|
556 | something = _generic_proxytunnel(self) | |
|
557 | host = self.realhostport.rsplit(':', 1)[0] | |
|
535 | 558 | |
|
536 |
|
|
|
559 | cacerts = self.ui.config('web', 'cacerts') | |
|
560 | hostfingerprint = self.ui.config('hostfingerprints', host) | |
|
561 | ||
|
537 | 562 | if cacerts and not hostfingerprint: |
|
538 |
sock = _ |
|
|
539 | self.sock = _ssl_wrap_socket(sock, self.key_file, | |
|
540 | self.cert_file, cert_reqs=CERT_REQUIRED, | |
|
541 | ca_certs=cacerts) | |
|
542 | msg = _verifycert(self.sock.getpeercert(), self.host) | |
|
563 | self.sock = _ssl_wrap_socket(self.sock, self.key_file, | |
|
564 | self.cert_file, cert_reqs=CERT_REQUIRED, | |
|
565 | ca_certs=util.expandpath(cacerts)) | |
|
566 | msg = _verifycert(self.sock.getpeercert(), host) | |
|
543 | 567 | if msg: |
|
544 | 568 | raise util.Abort(_('%s certificate error: %s ' |
|
545 | 569 | '(use --insecure to connect ' |
|
546 |
'insecurely)') % ( |
|
|
547 | self.ui.debug('%s certificate successfully verified\n' % | |
|
548 | self.host) | |
|
570 | 'insecurely)') % (host, msg)) | |
|
571 | self.ui.debug('%s certificate successfully verified\n' % host) | |
|
549 | 572 | else: |
|
550 | httplib.HTTPSConnection.connect(self) | |
|
573 | self.sock = _ssl_wrap_socket(self.sock, self.key_file, | |
|
574 | self.cert_file) | |
|
551 | 575 | if hasattr(self.sock, 'getpeercert'): |
|
552 | 576 | peercert = self.sock.getpeercert(True) |
|
553 | 577 | peerfingerprint = util.sha1(peercert).hexdigest() |
@@ -558,38 +582,22 b' if has_https:' | |||
|
558 | 582 | hostfingerprint.replace(':', '').lower(): |
|
559 | 583 | raise util.Abort(_('invalid certificate for %s ' |
|
560 | 584 | 'with fingerprint %s') % |
|
561 |
( |
|
|
585 | (host, nicefingerprint)) | |
|
562 | 586 | self.ui.debug('%s certificate matched fingerprint %s\n' % |
|
563 |
( |
|
|
587 | (host, nicefingerprint)) | |
|
564 | 588 | else: |
|
565 | 589 | self.ui.warn(_('warning: %s certificate ' |
|
566 | 590 | 'with fingerprint %s not verified ' |
|
567 | 591 | '(check hostfingerprints or web.cacerts ' |
|
568 | 592 | 'config setting)\n') % |
|
569 |
( |
|
|
593 | (host, nicefingerprint)) | |
|
570 | 594 | else: # python 2.5 ? |
|
571 | 595 | if hostfingerprint: |
|
572 | raise util.Abort(_('no certificate for %s ' | |
|
573 |
' |
|
|
596 | raise util.Abort(_('no certificate for %s with ' | |
|
597 | 'configured hostfingerprint') % host) | |
|
574 | 598 | self.ui.warn(_('warning: %s certificate not verified ' |
|
575 | 599 | '(check web.cacerts config setting)\n') % |
|
576 |
|
|
|
577 | ||
|
578 | class httpsconnection(BetterHTTPS): | |
|
579 | response_class = keepalive.HTTPResponse | |
|
580 | # must be able to send big bundle as stream. | |
|
581 | send = _gen_sendfile(BetterHTTPS) | |
|
582 | getresponse = keepalive.wrapgetresponse(httplib.HTTPSConnection) | |
|
583 | ||
|
584 | def connect(self): | |
|
585 | if self.realhostport: # use CONNECT proxy | |
|
586 | self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | |
|
587 | self.sock.connect((self.host, self.port)) | |
|
588 | if _generic_proxytunnel(self): | |
|
589 | self.sock = _ssl_wrap_socket(self.sock, self.key_file, | |
|
590 | self.cert_file) | |
|
591 | else: | |
|
592 | BetterHTTPS.connect(self) | |
|
600 | host) | |
|
593 | 601 | |
|
594 | 602 | class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler): |
|
595 | 603 | def __init__(self, ui): |
@@ -603,7 +611,13 b' if has_https:' | |||
|
603 | 611 | return keepalive.KeepAliveHandler._start_transaction(self, h, req) |
|
604 | 612 | |
|
605 | 613 | def https_open(self, req): |
|
606 |
|
|
|
614 | res = readauthforuri(self.ui, req.get_full_url()) | |
|
615 | if res: | |
|
616 | group, auth = res | |
|
617 | self.auth = auth | |
|
618 | self.ui.debug("using auth.%s.* for authentication\n" % group) | |
|
619 | else: | |
|
620 | self.auth = None | |
|
607 | 621 | return self.do_open(self._makeconnection, req) |
|
608 | 622 | |
|
609 | 623 | def _makeconnection(self, host, port=None, *args, **kwargs): |
@@ -198,7 +198,10 b' def tempfilter(s, cmd):' | |||
|
198 | 198 | if code: |
|
199 | 199 | raise Abort(_("command '%s' failed: %s") % |
|
200 | 200 | (cmd, explain_exit(code))) |
|
201 |
|
|
|
201 | fp = open(outname, 'rb') | |
|
202 | r = fp.read() | |
|
203 | fp.close() | |
|
204 | return r | |
|
202 | 205 | finally: |
|
203 | 206 | try: |
|
204 | 207 | if inname: |
@@ -431,7 +434,7 b' def checksignature(func):' | |||
|
431 | 434 | |
|
432 | 435 | return check |
|
433 | 436 | |
|
434 | def unlink(f): | |
|
437 | def unlinkpath(f): | |
|
435 | 438 | """unlink and remove the directory if it is empty""" |
|
436 | 439 | os.unlink(f) |
|
437 | 440 | # try removing directories that might now be empty |
@@ -451,7 +454,7 b' def copyfile(src, dest):' | |||
|
451 | 454 | else: |
|
452 | 455 | try: |
|
453 | 456 | shutil.copyfile(src, dest) |
|
454 |
shutil.copy |
|
|
457 | shutil.copymode(src, dest) | |
|
455 | 458 | except shutil.Error, inst: |
|
456 | 459 | raise Abort(str(inst)) |
|
457 | 460 | |
@@ -487,6 +490,7 b' class path_auditor(object):' | |||
|
487 | 490 | '''ensure that a filesystem path contains no banned components. |
|
488 | 491 | the following properties of a path are checked: |
|
489 | 492 | |
|
493 | - ends with a directory separator | |
|
490 | 494 | - under top-level .hg |
|
491 | 495 | - starts at the root of a windows drive |
|
492 | 496 | - contains ".." |
@@ -504,6 +508,9 b' class path_auditor(object):' | |||
|
504 | 508 | def __call__(self, path): |
|
505 | 509 | if path in self.audited: |
|
506 | 510 | return |
|
511 | # AIX ignores "/" at end of path, others raise EISDIR. | |
|
512 | if endswithsep(path): | |
|
513 | raise Abort(_("path ends in directory separator: %s") % path) | |
|
507 | 514 | normpath = os.path.normcase(path) |
|
508 | 515 | parts = splitpath(normpath) |
|
509 | 516 | if (os.path.splitdrive(path)[0] |
@@ -550,16 +557,6 b' class path_auditor(object):' | |||
|
550 | 557 | # want to add "foo/bar/baz" before checking if there's a "foo/.hg" |
|
551 | 558 | self.auditeddir.update(prefixes) |
|
552 | 559 | |
|
553 | def nlinks(pathname): | |
|
554 | """Return number of hardlinks for the given file.""" | |
|
555 | return os.lstat(pathname).st_nlink | |
|
556 | ||
|
557 | if hasattr(os, 'link'): | |
|
558 | os_link = os.link | |
|
559 | else: | |
|
560 | def os_link(src, dst): | |
|
561 | raise OSError(0, _("Hardlinks not supported")) | |
|
562 | ||
|
563 | 560 | def lookup_reg(key, name=None, scope=None): |
|
564 | 561 | return None |
|
565 | 562 | |
@@ -597,7 +594,10 b' def readlock(pathname):' | |||
|
597 | 594 | raise |
|
598 | 595 | except AttributeError: # no symlink in os |
|
599 | 596 | pass |
|
600 |
|
|
|
597 | fp = posixfile(pathname) | |
|
598 | r = fp.read() | |
|
599 | fp.close() | |
|
600 | return r | |
|
601 | 601 | |
|
602 | 602 | def fstat(fp): |
|
603 | 603 | '''stat file object that may not have fileno method.''' |
@@ -738,7 +738,7 b' def checknlink(testfile):' | |||
|
738 | 738 | |
|
739 | 739 | # nlinks() may behave differently for files on Windows shares if |
|
740 | 740 | # the file is open. |
|
741 |
fd = |
|
|
741 | fd = posixfile(f2) | |
|
742 | 742 | return nlinks(f2) > 1 |
|
743 | 743 | finally: |
|
744 | 744 | if fd is not None: |
@@ -837,7 +837,7 b' class atomictempfile(object):' | |||
|
837 | 837 | self._fp.close() |
|
838 | 838 | rename(self.temp, localpath(self.__name)) |
|
839 | 839 | |
|
840 |
def |
|
|
840 | def close(self): | |
|
841 | 841 | if not self._fp: |
|
842 | 842 | return |
|
843 | 843 | if not self._fp.closed: |
@@ -846,6 +846,9 b' class atomictempfile(object):' | |||
|
846 | 846 | except: pass |
|
847 | 847 | self._fp.close() |
|
848 | 848 | |
|
849 | def __del__(self): | |
|
850 | self.close() | |
|
851 | ||
|
849 | 852 | def makedirs(name, mode=None): |
|
850 | 853 | """recursive directory creation with parent mode inheritance""" |
|
851 | 854 | parent = os.path.abspath(os.path.dirname(name)) |
@@ -894,7 +897,6 b' class opener(object):' | |||
|
894 | 897 | mode += "b" # for that other OS |
|
895 | 898 | |
|
896 | 899 | nlink = -1 |
|
897 | st_mode = None | |
|
898 | 900 | dirname, basename = os.path.split(f) |
|
899 | 901 | # If basename is empty, then the path is malformed because it points |
|
900 | 902 | # to a directory. Let the posixfile() call below raise IOError. |
@@ -905,18 +907,19 b' class opener(object):' | |||
|
905 | 907 | return atomictempfile(f, mode, self.createmode) |
|
906 | 908 | try: |
|
907 | 909 | if 'w' in mode: |
|
908 | st_mode = os.lstat(f).st_mode & 0777 | |
|
909 | os.unlink(f) | |
|
910 | unlink(f) | |
|
910 | 911 | nlink = 0 |
|
911 | 912 | else: |
|
912 | 913 | # nlinks() may behave differently for files on Windows |
|
913 | 914 | # shares if the file is open. |
|
914 |
fd = |
|
|
915 | fd = posixfile(f) | |
|
915 | 916 | nlink = nlinks(f) |
|
916 | 917 | if nlink < 1: |
|
917 | 918 | nlink = 2 # force mktempcopy (issue1922) |
|
918 | 919 | fd.close() |
|
919 | except (OSError, IOError): | |
|
920 | except (OSError, IOError), e: | |
|
921 | if e.errno != errno.ENOENT: | |
|
922 | raise | |
|
920 | 923 | nlink = 0 |
|
921 | 924 | if not os.path.isdir(dirname): |
|
922 | 925 | makedirs(dirname, self.createmode) |
@@ -927,10 +930,7 b' class opener(object):' | |||
|
927 | 930 | rename(mktempcopy(f), f) |
|
928 | 931 | fp = posixfile(f, mode) |
|
929 | 932 | if nlink == 0: |
|
930 | if st_mode is None: | |
|
931 | self._fixfilemode(f) | |
|
932 | else: | |
|
933 | os.chmod(f, st_mode) | |
|
933 | self._fixfilemode(f) | |
|
934 | 934 | return fp |
|
935 | 935 | |
|
936 | 936 | def symlink(self, src, dst): |
@@ -1075,7 +1075,7 b' def strdate(string, format, defaults=[])' | |||
|
1075 | 1075 | |
|
1076 | 1076 | # NOTE: unixtime = localunixtime + offset |
|
1077 | 1077 | offset, date = timezone(string), string |
|
1078 |
if offset |
|
|
1078 | if offset is not None: | |
|
1079 | 1079 | date = " ".join(string.split()[:-1]) |
|
1080 | 1080 | |
|
1081 | 1081 | # add missing elements from defaults |
@@ -1120,7 +1120,7 b' def parsedate(date, formats=None, bias={' | |||
|
1120 | 1120 | now = makedate() |
|
1121 | 1121 | defaults = {} |
|
1122 | 1122 | nowmap = {} |
|
1123 |
for part in "d |
|
|
1123 | for part in ("d", "mb", "yY", "HI", "M", "S"): | |
|
1124 | 1124 | # this piece is for rounding the specific end of unknowns |
|
1125 | 1125 | b = bias.get(part) |
|
1126 | 1126 | if b is None: |
@@ -1190,7 +1190,7 b' def matchdate(date):' | |||
|
1190 | 1190 | |
|
1191 | 1191 | def upper(date): |
|
1192 | 1192 | d = dict(mb="12", HI="23", M="59", S="59") |
|
1193 |
for days in "31 |
|
|
1193 | for days in ("31", "30", "29"): | |
|
1194 | 1194 | try: |
|
1195 | 1195 | d["d"] = days |
|
1196 | 1196 | return parsedate(date, extendeddateformats, d)[0] |
@@ -1387,37 +1387,48 b' def uirepr(s):' | |||
|
1387 | 1387 | # Avoid double backslash in Windows path repr() |
|
1388 | 1388 | return repr(s).replace('\\\\', '\\') |
|
1389 | 1389 | |
|
1390 | #### naming convention of below implementation follows 'textwrap' module | |
|
1390 | # delay import of textwrap | |
|
1391 | def MBTextWrapper(**kwargs): | |
|
1392 | class tw(textwrap.TextWrapper): | |
|
1393 | """ | |
|
1394 | Extend TextWrapper for double-width characters. | |
|
1391 | 1395 |
|
|
1392 | class MBTextWrapper(textwrap.TextWrapper): | |
|
1393 | def __init__(self, **kwargs): | |
|
1394 | textwrap.TextWrapper.__init__(self, **kwargs) | |
|
1396 | Some Asian characters use two terminal columns instead of one. | |
|
1397 | A good example of this behavior can be seen with u'\u65e5\u672c', | |
|
1398 | the two Japanese characters for "Japan": | |
|
1399 | len() returns 2, but when printed to a terminal, they eat 4 columns. | |
|
1400 | ||
|
1401 | (Note that this has nothing to do whatsoever with unicode | |
|
1402 | representation, or encoding of the underlying string) | |
|
1403 | """ | |
|
1404 | def __init__(self, **kwargs): | |
|
1405 | textwrap.TextWrapper.__init__(self, **kwargs) | |
|
1395 | 1406 | |
|
1396 | def _cutdown(self, str, space_left): | |
|
1397 | l = 0 | |
|
1398 | ucstr = unicode(str, encoding.encoding) | |
|
1399 | w = unicodedata.east_asian_width | |
|
1400 | for i in xrange(len(ucstr)): | |
|
1401 | l += w(ucstr[i]) in 'WFA' and 2 or 1 | |
|
1402 | if space_left < l: | |
|
1403 | return (ucstr[:i].encode(encoding.encoding), | |
|
1404 | ucstr[i:].encode(encoding.encoding)) | |
|
1405 | return str, '' | |
|
1407 | def _cutdown(self, str, space_left): | |
|
1408 | l = 0 | |
|
1409 | ucstr = unicode(str, encoding.encoding) | |
|
1410 | colwidth = unicodedata.east_asian_width | |
|
1411 | for i in xrange(len(ucstr)): | |
|
1412 | l += colwidth(ucstr[i]) in 'WFA' and 2 or 1 | |
|
1413 | if space_left < l: | |
|
1414 | return (ucstr[:i].encode(encoding.encoding), | |
|
1415 | ucstr[i:].encode(encoding.encoding)) | |
|
1416 | return str, '' | |
|
1406 | 1417 | |
|
1407 | # ---------------------------------------- | |
|
1408 | # overriding of base class | |
|
1409 | ||
|
1410 | def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): | |
|
1411 | space_left = max(width - cur_len, 1) | |
|
1418 | # overriding of base class | |
|
1419 | def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): | |
|
1420 | space_left = max(width - cur_len, 1) | |
|
1412 | 1421 | |
|
1413 | if self.break_long_words: | |
|
1414 | cut, res = self._cutdown(reversed_chunks[-1], space_left) | |
|
1415 | cur_line.append(cut) | |
|
1416 | reversed_chunks[-1] = res | |
|
1417 | elif not cur_line: | |
|
1418 | cur_line.append(reversed_chunks.pop()) | |
|
1422 | if self.break_long_words: | |
|
1423 | cut, res = self._cutdown(reversed_chunks[-1], space_left) | |
|
1424 | cur_line.append(cut) | |
|
1425 | reversed_chunks[-1] = res | |
|
1426 | elif not cur_line: | |
|
1427 | cur_line.append(reversed_chunks.pop()) | |
|
1419 | 1428 | |
|
1420 | #### naming convention of above implementation follows 'textwrap' module | |
|
1429 | global MBTextWrapper | |
|
1430 | MBTextWrapper = tw | |
|
1431 | return tw(**kwargs) | |
|
1421 | 1432 | |
|
1422 | 1433 | def wrap(line, width, initindent='', hangindent=''): |
|
1423 | 1434 | maxindent = max(len(hangindent), len(initindent)) |
@@ -1497,7 +1508,7 b' except NameError:' | |||
|
1497 | 1508 | return False |
|
1498 | 1509 | return True |
|
1499 | 1510 | |
|
1500 | def interpolate(prefix, mapping, s, fn=None): | |
|
1511 | def interpolate(prefix, mapping, s, fn=None, escape_prefix=False): | |
|
1501 | 1512 | """Return the result of interpolating items in the mapping into string s. |
|
1502 | 1513 | |
|
1503 | 1514 | prefix is a single character string, or a two character string with |
@@ -1506,9 +1517,20 b' def interpolate(prefix, mapping, s, fn=N' | |||
|
1506 | 1517 | |
|
1507 | 1518 | fn is an optional function that will be applied to the replacement text |
|
1508 | 1519 | just before replacement. |
|
1520 | ||
|
1521 | escape_prefix is an optional flag that allows using doubled prefix for | |
|
1522 | its escaping. | |
|
1509 | 1523 | """ |
|
1510 | 1524 | fn = fn or (lambda s: s) |
|
1511 |
r = |
|
|
1525 | patterns = '|'.join(mapping.keys()) | |
|
1526 | if escape_prefix: | |
|
1527 | patterns += '|' + prefix | |
|
1528 | if len(prefix) > 1: | |
|
1529 | prefix_char = prefix[1:] | |
|
1530 | else: | |
|
1531 | prefix_char = prefix | |
|
1532 | mapping[prefix_char] = prefix_char | |
|
1533 | r = re.compile(r'%s(%s)' % (prefix, patterns)) | |
|
1512 | 1534 | return r.sub(lambda x: fn(mapping[x.group()[1:]]), s) |
|
1513 | 1535 | |
|
1514 | 1536 | def getport(port): |
@@ -34,7 +34,7 b' def _verify(repo):' | |||
|
34 | 34 | raise util.Abort(_("cannot verify bundle or remote repos")) |
|
35 | 35 | |
|
36 | 36 | def err(linkrev, msg, filename=None): |
|
37 |
if linkrev |
|
|
37 | if linkrev is not None: | |
|
38 | 38 | badrevs.add(linkrev) |
|
39 | 39 | else: |
|
40 | 40 | linkrev = '?' |
@@ -5,73 +5,173 b'' | |||
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | """Utility functions that use win32 API. | |
|
8 | import encoding | |
|
9 | import ctypes, errno, os, struct, subprocess | |
|
10 | ||
|
11 | _kernel32 = ctypes.windll.kernel32 | |
|
12 | ||
|
13 | _BOOL = ctypes.c_long | |
|
14 | _WORD = ctypes.c_ushort | |
|
15 | _DWORD = ctypes.c_ulong | |
|
16 | _LPCSTR = _LPSTR = ctypes.c_char_p | |
|
17 | _HANDLE = ctypes.c_void_p | |
|
18 | _HWND = _HANDLE | |
|
19 | ||
|
20 | _INVALID_HANDLE_VALUE = -1 | |
|
21 | ||
|
22 | # GetLastError | |
|
23 | _ERROR_SUCCESS = 0 | |
|
24 | _ERROR_INVALID_PARAMETER = 87 | |
|
25 | _ERROR_INSUFFICIENT_BUFFER = 122 | |
|
26 | ||
|
27 | # WPARAM is defined as UINT_PTR (unsigned type) | |
|
28 | # LPARAM is defined as LONG_PTR (signed type) | |
|
29 | if ctypes.sizeof(ctypes.c_long) == ctypes.sizeof(ctypes.c_void_p): | |
|
30 | _WPARAM = ctypes.c_ulong | |
|
31 | _LPARAM = ctypes.c_long | |
|
32 | elif ctypes.sizeof(ctypes.c_longlong) == ctypes.sizeof(ctypes.c_void_p): | |
|
33 | _WPARAM = ctypes.c_ulonglong | |
|
34 | _LPARAM = ctypes.c_longlong | |
|
35 | ||
|
36 | class _FILETIME(ctypes.Structure): | |
|
37 | _fields_ = [('dwLowDateTime', _DWORD), | |
|
38 | ('dwHighDateTime', _DWORD)] | |
|
39 | ||
|
40 | class _BY_HANDLE_FILE_INFORMATION(ctypes.Structure): | |
|
41 | _fields_ = [('dwFileAttributes', _DWORD), | |
|
42 | ('ftCreationTime', _FILETIME), | |
|
43 | ('ftLastAccessTime', _FILETIME), | |
|
44 | ('ftLastWriteTime', _FILETIME), | |
|
45 | ('dwVolumeSerialNumber', _DWORD), | |
|
46 | ('nFileSizeHigh', _DWORD), | |
|
47 | ('nFileSizeLow', _DWORD), | |
|
48 | ('nNumberOfLinks', _DWORD), | |
|
49 | ('nFileIndexHigh', _DWORD), | |
|
50 | ('nFileIndexLow', _DWORD)] | |
|
51 | ||
|
52 | # CreateFile | |
|
53 | _FILE_SHARE_READ = 0x00000001 | |
|
54 | _FILE_SHARE_WRITE = 0x00000002 | |
|
55 | _FILE_SHARE_DELETE = 0x00000004 | |
|
56 | ||
|
57 | _OPEN_EXISTING = 3 | |
|
58 | ||
|
59 | # Process Security and Access Rights | |
|
60 | _PROCESS_QUERY_INFORMATION = 0x0400 | |
|
61 | ||
|
62 | # GetExitCodeProcess | |
|
63 | _STILL_ACTIVE = 259 | |
|
64 | ||
|
65 | # registry | |
|
66 | _HKEY_CURRENT_USER = 0x80000001L | |
|
67 | _HKEY_LOCAL_MACHINE = 0x80000002L | |
|
68 | _KEY_READ = 0x20019 | |
|
69 | _REG_SZ = 1 | |
|
70 | _REG_DWORD = 4 | |
|
9 | 71 | |
|
10 | Mark Hammond's win32all package allows better functionality on | |
|
11 | Windows. This module overrides definitions in util.py. If not | |
|
12 | available, import of this module will fail, and generic code will be | |
|
13 | used. | |
|
14 | """ | |
|
72 | class _STARTUPINFO(ctypes.Structure): | |
|
73 | _fields_ = [('cb', _DWORD), | |
|
74 | ('lpReserved', _LPSTR), | |
|
75 | ('lpDesktop', _LPSTR), | |
|
76 | ('lpTitle', _LPSTR), | |
|
77 | ('dwX', _DWORD), | |
|
78 | ('dwY', _DWORD), | |
|
79 | ('dwXSize', _DWORD), | |
|
80 | ('dwYSize', _DWORD), | |
|
81 | ('dwXCountChars', _DWORD), | |
|
82 | ('dwYCountChars', _DWORD), | |
|
83 | ('dwFillAttribute', _DWORD), | |
|
84 | ('dwFlags', _DWORD), | |
|
85 | ('wShowWindow', _WORD), | |
|
86 | ('cbReserved2', _WORD), | |
|
87 | ('lpReserved2', ctypes.c_char_p), | |
|
88 | ('hStdInput', _HANDLE), | |
|
89 | ('hStdOutput', _HANDLE), | |
|
90 | ('hStdError', _HANDLE)] | |
|
91 | ||
|
92 | class _PROCESS_INFORMATION(ctypes.Structure): | |
|
93 | _fields_ = [('hProcess', _HANDLE), | |
|
94 | ('hThread', _HANDLE), | |
|
95 | ('dwProcessId', _DWORD), | |
|
96 | ('dwThreadId', _DWORD)] | |
|
97 | ||
|
98 | _DETACHED_PROCESS = 0x00000008 | |
|
99 | _STARTF_USESHOWWINDOW = 0x00000001 | |
|
100 | _SW_HIDE = 0 | |
|
15 | 101 | |
|
16 | import win32api | |
|
102 | class _COORD(ctypes.Structure): | |
|
103 | _fields_ = [('X', ctypes.c_short), | |
|
104 | ('Y', ctypes.c_short)] | |
|
105 | ||
|
106 | class _SMALL_RECT(ctypes.Structure): | |
|
107 | _fields_ = [('Left', ctypes.c_short), | |
|
108 | ('Top', ctypes.c_short), | |
|
109 | ('Right', ctypes.c_short), | |
|
110 | ('Bottom', ctypes.c_short)] | |
|
111 | ||
|
112 | class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure): | |
|
113 | _fields_ = [('dwSize', _COORD), | |
|
114 | ('dwCursorPosition', _COORD), | |
|
115 | ('wAttributes', _WORD), | |
|
116 | ('srWindow', _SMALL_RECT), | |
|
117 | ('dwMaximumWindowSize', _COORD)] | |
|
17 | 118 | |
|
18 | import errno, os, sys, pywintypes, win32con, win32file, win32process | |
|
19 | import winerror, win32gui, win32console | |
|
20 | import osutil, encoding | |
|
21 | from win32com.shell import shell, shellcon | |
|
119 | _STD_ERROR_HANDLE = 0xfffffff4L # (DWORD)-12 | |
|
120 | ||
|
121 | def _raiseoserror(name): | |
|
122 | err = ctypes.WinError() | |
|
123 | raise OSError(err.errno, '%s: %s' % (name, err.strerror)) | |
|
124 | ||
|
125 | def _getfileinfo(name): | |
|
126 | fh = _kernel32.CreateFileA(name, 0, | |
|
127 | _FILE_SHARE_READ | _FILE_SHARE_WRITE | _FILE_SHARE_DELETE, | |
|
128 | None, _OPEN_EXISTING, 0, None) | |
|
129 | if fh == _INVALID_HANDLE_VALUE: | |
|
130 | _raiseoserror(name) | |
|
131 | try: | |
|
132 | fi = _BY_HANDLE_FILE_INFORMATION() | |
|
133 | if not _kernel32.GetFileInformationByHandle(fh, ctypes.byref(fi)): | |
|
134 | _raiseoserror(name) | |
|
135 | return fi | |
|
136 | finally: | |
|
137 | _kernel32.CloseHandle(fh) | |
|
22 | 138 | |
|
23 | 139 | def os_link(src, dst): |
|
24 | try: | |
|
25 | win32file.CreateHardLink(dst, src) | |
|
26 | except pywintypes.error: | |
|
27 | raise OSError(errno.EINVAL, 'target implements hardlinks improperly') | |
|
28 | except NotImplementedError: # Another fake error win Win98 | |
|
29 | raise OSError(errno.EINVAL, 'Hardlinking not supported') | |
|
140 | if not _kernel32.CreateHardLinkA(dst, src, None): | |
|
141 | _raiseoserror(src) | |
|
30 | 142 | |
|
31 |
def |
|
|
32 |
|
|
|
33 | try: | |
|
34 | fh = win32file.CreateFile(pathname, | |
|
35 | win32file.GENERIC_READ, win32file.FILE_SHARE_READ, | |
|
36 | None, win32file.OPEN_EXISTING, 0, None) | |
|
37 | except pywintypes.error: | |
|
38 | raise OSError(errno.ENOENT, 'The system cannot find the file specified') | |
|
39 | try: | |
|
40 | return win32file.GetFileInformationByHandle(fh) | |
|
41 | finally: | |
|
42 | fh.Close() | |
|
43 | ||
|
44 | def nlinks(pathname): | |
|
45 | """Return number of hardlinks for the given file.""" | |
|
46 | return _getfileinfo(pathname)[7] | |
|
143 | def nlinks(name): | |
|
144 | '''return number of hardlinks for the given file''' | |
|
145 | return _getfileinfo(name).nNumberOfLinks | |
|
47 | 146 | |
|
48 | 147 | def samefile(fpath1, fpath2): |
|
49 |
|
|
|
50 |
guaranteed to work for files, not directories. |
|
|
148 | '''Returns whether fpath1 and fpath2 refer to the same file. This is only | |
|
149 | guaranteed to work for files, not directories.''' | |
|
51 | 150 | res1 = _getfileinfo(fpath1) |
|
52 | 151 | res2 = _getfileinfo(fpath2) |
|
53 | # Index 4 is the volume serial number, and 8 and 9 contain the file ID | |
|
54 | return res1[4] == res2[4] and res1[8] == res2[8] and res1[9] == res2[9] | |
|
152 | return (res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber | |
|
153 | and res1.nFileIndexHigh == res2.nFileIndexHigh | |
|
154 | and res1.nFileIndexLow == res2.nFileIndexLow) | |
|
55 | 155 | |
|
56 | 156 | def samedevice(fpath1, fpath2): |
|
57 |
|
|
|
58 |
guaranteed to work for files, not directories. |
|
|
157 | '''Returns whether fpath1 and fpath2 are on the same device. This is only | |
|
158 | guaranteed to work for files, not directories.''' | |
|
59 | 159 | res1 = _getfileinfo(fpath1) |
|
60 | 160 | res2 = _getfileinfo(fpath2) |
|
61 | return res1[4] == res2[4] | |
|
161 | return res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber | |
|
62 | 162 | |
|
63 | 163 | def testpid(pid): |
|
64 | 164 | '''return True if pid is still running or unable to |
|
65 | 165 | determine, False otherwise''' |
|
66 | try: | |
|
67 | handle = win32api.OpenProcess( | |
|
68 | win32con.PROCESS_QUERY_INFORMATION, False, pid) | |
|
69 | if handle: | |
|
70 |
|
|
|
71 |
return status == |
|
|
72 | except pywintypes.error, details: | |
|
73 | return details[0] != winerror.ERROR_INVALID_PARAMETER | |
|
74 | return True | |
|
166 | h = _kernel32.OpenProcess(_PROCESS_QUERY_INFORMATION, False, pid) | |
|
167 | if h: | |
|
168 | try: | |
|
169 | status = _DWORD() | |
|
170 | if _kernel32.GetExitCodeProcess(h, ctypes.byref(status)): | |
|
171 | return status.value == _STILL_ACTIVE | |
|
172 | finally: | |
|
173 | _kernel32.CloseHandle(h) | |
|
174 | return _kernel32.GetLastError() != _ERROR_INVALID_PARAMETER | |
|
75 | 175 | |
|
76 | 176 | def lookup_reg(key, valname=None, scope=None): |
|
77 | 177 | ''' Look up a key/value name in the Windows registry. |
@@ -82,101 +182,137 b' def lookup_reg(key, valname=None, scope=' | |||
|
82 | 182 | a sequence of scopes to look up in order. Default (CURRENT_USER, |
|
83 | 183 | LOCAL_MACHINE). |
|
84 | 184 | ''' |
|
85 | try: | |
|
86 | from _winreg import HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, \ | |
|
87 | QueryValueEx, OpenKey | |
|
88 | except ImportError: | |
|
89 | return None | |
|
90 | ||
|
185 | adv = ctypes.windll.advapi32 | |
|
186 | byref = ctypes.byref | |
|
91 | 187 | if scope is None: |
|
92 | scope = (HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE) | |
|
188 | scope = (_HKEY_CURRENT_USER, _HKEY_LOCAL_MACHINE) | |
|
93 | 189 | elif not isinstance(scope, (list, tuple)): |
|
94 | 190 | scope = (scope,) |
|
95 | 191 | for s in scope: |
|
192 | kh = _HANDLE() | |
|
193 | res = adv.RegOpenKeyExA(s, key, 0, _KEY_READ, ctypes.byref(kh)) | |
|
194 | if res != _ERROR_SUCCESS: | |
|
195 | continue | |
|
96 | 196 | try: |
|
97 | val = QueryValueEx(OpenKey(s, key), valname)[0] | |
|
98 | # never let a Unicode string escape into the wild | |
|
99 | return encoding.tolocal(val.encode('UTF-8')) | |
|
100 | except EnvironmentError: | |
|
101 | pass | |
|
197 | size = _DWORD(600) | |
|
198 | type = _DWORD() | |
|
199 | buf = ctypes.create_string_buffer(size.value + 1) | |
|
200 | res = adv.RegQueryValueExA(kh.value, valname, None, | |
|
201 | byref(type), buf, byref(size)) | |
|
202 | if res != _ERROR_SUCCESS: | |
|
203 | continue | |
|
204 | if type.value == _REG_SZ: | |
|
205 | # never let a Unicode string escape into the wild | |
|
206 | return encoding.tolocal(buf.value.encode('UTF-8')) | |
|
207 | elif type.value == _REG_DWORD: | |
|
208 | fmt = '<L' | |
|
209 | s = ctypes.string_at(byref(buf), struct.calcsize(fmt)) | |
|
210 | return struct.unpack(fmt, s)[0] | |
|
211 | finally: | |
|
212 | adv.RegCloseKey(kh.value) | |
|
102 | 213 | |
|
103 | def system_rcpath_win32(): | |
|
104 | '''return default os-specific hgrc search path''' | |
|
105 | filename = win32api.GetModuleFileName(0) | |
|
106 | # Use mercurial.ini found in directory with hg.exe | |
|
107 | progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini') | |
|
108 | if os.path.isfile(progrc): | |
|
109 | return [progrc] | |
|
110 | # Use hgrc.d found in directory with hg.exe | |
|
111 | progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d') | |
|
112 | if os.path.isdir(progrcd): | |
|
113 | rcpath = [] | |
|
114 | for f, kind in osutil.listdir(progrcd): | |
|
115 | if f.endswith('.rc'): | |
|
116 | rcpath.append(os.path.join(progrcd, f)) | |
|
117 | return rcpath | |
|
118 | # else look for a system rcpath in the registry | |
|
119 | try: | |
|
120 | value = win32api.RegQueryValue( | |
|
121 | win32con.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mercurial') | |
|
122 | rcpath = [] | |
|
123 | for p in value.split(os.pathsep): | |
|
124 | if p.lower().endswith('mercurial.ini'): | |
|
125 | rcpath.append(p) | |
|
126 | elif os.path.isdir(p): | |
|
127 | for f, kind in osutil.listdir(p): | |
|
128 | if f.endswith('.rc'): | |
|
129 | rcpath.append(os.path.join(p, f)) | |
|
130 | return rcpath | |
|
131 | except pywintypes.error: | |
|
132 | return [] | |
|
133 | ||
|
134 | def user_rcpath_win32(): | |
|
135 | '''return os-specific hgrc search path to the user dir''' | |
|
136 | userdir = os.path.expanduser('~') | |
|
137 | if sys.getwindowsversion()[3] != 2 and userdir == '~': | |
|
138 | # We are on win < nt: fetch the APPDATA directory location and use | |
|
139 | # the parent directory as the user home dir. | |
|
140 | appdir = shell.SHGetPathFromIDList( | |
|
141 | shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_APPDATA)) | |
|
142 | userdir = os.path.dirname(appdir) | |
|
143 | return [os.path.join(userdir, 'mercurial.ini'), | |
|
144 | os.path.join(userdir, '.hgrc')] | |
|
214 | def executable_path(): | |
|
215 | '''return full path of hg.exe''' | |
|
216 | size = 600 | |
|
217 | buf = ctypes.create_string_buffer(size + 1) | |
|
218 | len = _kernel32.GetModuleFileNameA(None, ctypes.byref(buf), size) | |
|
219 | if len == 0: | |
|
220 | raise ctypes.WinError() | |
|
221 | elif len == size: | |
|
222 | raise ctypes.WinError(_ERROR_INSUFFICIENT_BUFFER) | |
|
223 | return buf.value | |
|
145 | 224 | |
|
146 | 225 | def getuser(): |
|
147 | 226 | '''return name of current user''' |
|
148 | return win32api.GetUserName() | |
|
227 | adv = ctypes.windll.advapi32 | |
|
228 | size = _DWORD(300) | |
|
229 | buf = ctypes.create_string_buffer(size.value + 1) | |
|
230 | if not adv.GetUserNameA(ctypes.byref(buf), ctypes.byref(size)): | |
|
231 | raise ctypes.WinError() | |
|
232 | return buf.value | |
|
149 | 233 | |
|
150 | def set_signal_handler_win32(): | |
|
151 | """Register a termination handler for console events including | |
|
234 | _SIGNAL_HANDLER = ctypes.WINFUNCTYPE(_BOOL, _DWORD) | |
|
235 | _signal_handler = [] | |
|
236 | ||
|
237 | def set_signal_handler(): | |
|
238 | '''Register a termination handler for console events including | |
|
152 | 239 | CTRL+C. python signal handlers do not work well with socket |
|
153 | 240 | operations. |
|
154 | """ | |
|
241 | ''' | |
|
155 | 242 | def handler(event): |
|
156 |
|
|
|
157 | win32api.SetConsoleCtrlHandler(handler) | |
|
243 | _kernel32.ExitProcess(1) | |
|
244 | ||
|
245 | if _signal_handler: | |
|
246 | return # already registered | |
|
247 | h = _SIGNAL_HANDLER(handler) | |
|
248 | _signal_handler.append(h) # needed to prevent garbage collection | |
|
249 | if not _kernel32.SetConsoleCtrlHandler(h, True): | |
|
250 | raise ctypes.WinError() | |
|
251 | ||
|
252 | _WNDENUMPROC = ctypes.WINFUNCTYPE(_BOOL, _HWND, _LPARAM) | |
|
158 | 253 | |
|
159 | 254 | def hidewindow(): |
|
160 | def callback(*args, **kwargs): | |
|
161 | hwnd, pid = args | |
|
162 | wpid = win32process.GetWindowThreadProcessId(hwnd)[1] | |
|
163 | if pid == wpid: | |
|
164 | win32gui.ShowWindow(hwnd, win32con.SW_HIDE) | |
|
255 | user32 = ctypes.windll.user32 | |
|
165 | 256 | |
|
166 | pid = win32process.GetCurrentProcessId() | |
|
167 | win32gui.EnumWindows(callback, pid) | |
|
257 | def callback(hwnd, pid): | |
|
258 | wpid = _DWORD() | |
|
259 | user32.GetWindowThreadProcessId(hwnd, ctypes.byref(wpid)) | |
|
260 | if pid == wpid.value: | |
|
261 | user32.ShowWindow(hwnd, _SW_HIDE) | |
|
262 | return False # stop enumerating windows | |
|
263 | return True | |
|
264 | ||
|
265 | pid = _kernel32.GetCurrentProcessId() | |
|
266 | user32.EnumWindows(_WNDENUMPROC(callback), pid) | |
|
168 | 267 | |
|
169 | 268 | def termwidth(): |
|
170 | try: | |
|
171 | # Query stderr to avoid problems with redirections | |
|
172 | screenbuf = win32console.GetStdHandle(win32console.STD_ERROR_HANDLE) | |
|
173 | if screenbuf is None: | |
|
174 | return 79 | |
|
175 | try: | |
|
176 | window = screenbuf.GetConsoleScreenBufferInfo()['Window'] | |
|
177 | width = window.Right - window.Left | |
|
178 | return width | |
|
179 | finally: | |
|
180 | screenbuf.Detach() | |
|
181 | except pywintypes.error: | |
|
182 | return 79 | |
|
269 | # cmd.exe does not handle CR like a unix console, the CR is | |
|
270 | # counted in the line length. On 80 columns consoles, if 80 | |
|
271 | # characters are written, the following CR won't apply on the | |
|
272 | # current line but on the new one. Keep room for it. | |
|
273 | width = 79 | |
|
274 | # Query stderr to avoid problems with redirections | |
|
275 | screenbuf = _kernel32.GetStdHandle( | |
|
276 | _STD_ERROR_HANDLE) # don't close the handle returned | |
|
277 | if screenbuf is None or screenbuf == _INVALID_HANDLE_VALUE: | |
|
278 | return width | |
|
279 | csbi = _CONSOLE_SCREEN_BUFFER_INFO() | |
|
280 | if not _kernel32.GetConsoleScreenBufferInfo( | |
|
281 | screenbuf, ctypes.byref(csbi)): | |
|
282 | return width | |
|
283 | width = csbi.srWindow.Right - csbi.srWindow.Left | |
|
284 | return width | |
|
285 | ||
|
286 | def spawndetached(args): | |
|
287 | # No standard library function really spawns a fully detached | |
|
288 | # process under win32 because they allocate pipes or other objects | |
|
289 | # to handle standard streams communications. Passing these objects | |
|
290 | # to the child process requires handle inheritance to be enabled | |
|
291 | # which makes really detached processes impossible. | |
|
292 | si = _STARTUPINFO() | |
|
293 | si.cb = ctypes.sizeof(_STARTUPINFO) | |
|
294 | si.dwFlags = _STARTF_USESHOWWINDOW | |
|
295 | si.wShowWindow = _SW_HIDE | |
|
296 | ||
|
297 | pi = _PROCESS_INFORMATION() | |
|
298 | ||
|
299 | env = '' | |
|
300 | for k in os.environ: | |
|
301 | env += "%s=%s\0" % (k, os.environ[k]) | |
|
302 | if not env: | |
|
303 | env = '\0' | |
|
304 | env += '\0' | |
|
305 | ||
|
306 | args = subprocess.list2cmdline(args) | |
|
307 | # Not running the command in shell mode makes python26 hang when | |
|
308 | # writing to hgweb output socket. | |
|
309 | comspec = os.environ.get("COMSPEC", "cmd.exe") | |
|
310 | args = comspec + " /c " + args | |
|
311 | ||
|
312 | res = _kernel32.CreateProcessA( | |
|
313 | None, args, None, None, False, _DETACHED_PROCESS, | |
|
314 | env, os.getcwd(), ctypes.byref(si), ctypes.byref(pi)) | |
|
315 | if not res: | |
|
316 | raise ctypes.WinError() | |
|
317 | ||
|
318 | return pi.dwProcessId |
@@ -71,22 +71,45 b' def _is_win_9x():' | |||
|
71 | 71 | return 'command' in os.environ.get('comspec', '') |
|
72 | 72 | |
|
73 | 73 | def openhardlinks(): |
|
74 |
return not _is_win_9x() |
|
|
74 | return not _is_win_9x() | |
|
75 | ||
|
76 | _HKEY_LOCAL_MACHINE = 0x80000002L | |
|
75 | 77 | |
|
76 | 78 | def system_rcpath(): |
|
77 | try: | |
|
78 | return system_rcpath_win32() | |
|
79 | except: | |
|
80 | return [r'c:\mercurial\mercurial.ini'] | |
|
79 | '''return default os-specific hgrc search path''' | |
|
80 | rcpath = [] | |
|
81 | filename = executable_path() | |
|
82 | # Use mercurial.ini found in directory with hg.exe | |
|
83 | progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini') | |
|
84 | if os.path.isfile(progrc): | |
|
85 | rcpath.append(progrc) | |
|
86 | return rcpath | |
|
87 | # Use hgrc.d found in directory with hg.exe | |
|
88 | progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d') | |
|
89 | if os.path.isdir(progrcd): | |
|
90 | for f, kind in osutil.listdir(progrcd): | |
|
91 | if f.endswith('.rc'): | |
|
92 | rcpath.append(os.path.join(progrcd, f)) | |
|
93 | return rcpath | |
|
94 | # else look for a system rcpath in the registry | |
|
95 | value = lookup_reg('SOFTWARE\\Mercurial', None, _HKEY_LOCAL_MACHINE) | |
|
96 | if not isinstance(value, str) or not value: | |
|
97 | return rcpath | |
|
98 | value = value.replace('/', os.sep) | |
|
99 | for p in value.split(os.pathsep): | |
|
100 | if p.lower().endswith('mercurial.ini'): | |
|
101 | rcpath.append(p) | |
|
102 | elif os.path.isdir(p): | |
|
103 | for f, kind in osutil.listdir(p): | |
|
104 | if f.endswith('.rc'): | |
|
105 | rcpath.append(os.path.join(p, f)) | |
|
106 | return rcpath | |
|
81 | 107 | |
|
82 | 108 | def user_rcpath(): |
|
83 | 109 | '''return os-specific hgrc search path to the user dir''' |
|
84 | try: | |
|
85 | path = user_rcpath_win32() | |
|
86 | except: | |
|
87 | home = os.path.expanduser('~') | |
|
88 | path = [os.path.join(home, 'mercurial.ini'), | |
|
89 | os.path.join(home, '.hgrc')] | |
|
110 | home = os.path.expanduser('~') | |
|
111 | path = [os.path.join(home, 'mercurial.ini'), | |
|
112 | os.path.join(home, '.hgrc')] | |
|
90 | 113 | userprofile = os.environ.get('USERPROFILE') |
|
91 | 114 | if userprofile: |
|
92 | 115 | path.append(os.path.join(userprofile, 'mercurial.ini')) |
@@ -106,10 +129,6 b' def sshargs(sshcmd, host, user, port):' | |||
|
106 | 129 | args = user and ("%s@%s" % (user, host)) or host |
|
107 | 130 | return port and ("%s %s %s" % (args, pflag, port)) or args |
|
108 | 131 | |
|
109 | def testpid(pid): | |
|
110 | '''return False if pid dead, True if running or not known''' | |
|
111 | return True | |
|
112 | ||
|
113 | 132 | def set_flags(f, l, x): |
|
114 | 133 | pass |
|
115 | 134 | |
@@ -208,12 +227,6 b' def find_exe(command):' | |||
|
208 | 227 | return executable |
|
209 | 228 | return findexisting(os.path.expanduser(os.path.expandvars(command))) |
|
210 | 229 | |
|
211 | def set_signal_handler(): | |
|
212 | try: | |
|
213 | set_signal_handler_win32() | |
|
214 | except NameError: | |
|
215 | pass | |
|
216 | ||
|
217 | 230 | def statfiles(files): |
|
218 | 231 | '''Stat each file in files and yield stat or None if file does not exist. |
|
219 | 232 | Cluster and cache stat per directory to minimize number of OS stat calls.''' |
@@ -241,11 +254,6 b' def statfiles(files):' | |||
|
241 | 254 | cache = dircache.setdefault(dir, dmap) |
|
242 | 255 | yield cache.get(base, None) |
|
243 | 256 | |
|
244 | def getuser(): | |
|
245 | '''return name of current user''' | |
|
246 | raise error.Abort(_('user name not available - set USERNAME ' | |
|
247 | 'environment variable')) | |
|
248 | ||
|
249 | 257 | def username(uid=None): |
|
250 | 258 | """Return the name of the user with the given uid. |
|
251 | 259 | |
@@ -276,7 +284,7 b' def _removedirs(name):' | |||
|
276 | 284 | break |
|
277 | 285 | head, tail = os.path.split(head) |
|
278 | 286 | |
|
279 | def unlink(f): | |
|
287 | def unlinkpath(f): | |
|
280 | 288 | """unlink and remove the directory if it is empty""" |
|
281 | 289 | os.unlink(f) |
|
282 | 290 | # try removing directories that might now be empty |
@@ -285,73 +293,56 b' def unlink(f):' | |||
|
285 | 293 | except OSError: |
|
286 | 294 | pass |
|
287 | 295 | |
|
296 | def unlink(f): | |
|
297 | '''try to implement POSIX' unlink semantics on Windows''' | |
|
298 | ||
|
299 | # POSIX allows to unlink and rename open files. Windows has serious | |
|
300 | # problems with doing that: | |
|
301 | # - Calling os.unlink (or os.rename) on a file f fails if f or any | |
|
302 | # hardlinked copy of f has been opened with Python's open(). There is no | |
|
303 | # way such a file can be deleted or renamed on Windows (other than | |
|
304 | # scheduling the delete or rename for the next reboot). | |
|
305 | # - Calling os.unlink on a file that has been opened with Mercurial's | |
|
306 | # posixfile (or comparable methods) will delay the actual deletion of | |
|
307 | # the file for as long as the file is held open. The filename is blocked | |
|
308 | # during that time and cannot be used for recreating a new file under | |
|
309 | # that same name ("zombie file"). Directories containing such zombie files | |
|
310 | # cannot be removed or moved. | |
|
311 | # A file that has been opened with posixfile can be renamed, so we rename | |
|
312 | # f to a random temporary name before calling os.unlink on it. This allows | |
|
313 | # callers to recreate f immediately while having other readers do their | |
|
314 | # implicit zombie filename blocking on a temporary name. | |
|
315 | ||
|
316 | for tries in xrange(10): | |
|
317 | temp = '%s-%08x' % (f, random.randint(0, 0xffffffff)) | |
|
318 | try: | |
|
319 | os.rename(f, temp) # raises OSError EEXIST if temp exists | |
|
320 | break | |
|
321 | except OSError, e: | |
|
322 | if e.errno != errno.EEXIST: | |
|
323 | raise | |
|
324 | else: | |
|
325 | raise IOError, (errno.EEXIST, "No usable temporary filename found") | |
|
326 | ||
|
327 | try: | |
|
328 | os.unlink(temp) | |
|
329 | except: | |
|
330 | # Some very rude AV-scanners on Windows may cause this unlink to fail. | |
|
331 | # Not aborting here just leaks the temp file, whereas aborting at this | |
|
332 | # point may leave serious inconsistencies. Ideally, we would notify | |
|
333 | # the user in this case here. | |
|
334 | pass | |
|
335 | ||
|
288 | 336 | def rename(src, dst): |
|
289 | 337 | '''atomically rename file src to dst, replacing dst if it exists''' |
|
290 | 338 | try: |
|
291 | 339 | os.rename(src, dst) |
|
292 | except OSError: # FIXME: check err (EEXIST ?) | |
|
293 | ||
|
294 | # On windows, rename to existing file is not allowed, so we | |
|
295 | # must delete destination first. But if a file is open, unlink | |
|
296 | # schedules it for delete but does not delete it. Rename | |
|
297 | # happens immediately even for open files, so we rename | |
|
298 | # destination to a temporary name, then delete that. Then | |
|
299 | # rename is safe to do. | |
|
300 | # The temporary name is chosen at random to avoid the situation | |
|
301 | # where a file is left lying around from a previous aborted run. | |
|
302 | ||
|
303 | for tries in xrange(10): | |
|
304 | temp = '%s-%08x' % (dst, random.randint(0, 0xffffffff)) | |
|
305 | try: | |
|
306 | os.rename(dst, temp) # raises OSError EEXIST if temp exists | |
|
307 | break | |
|
308 | except OSError, e: | |
|
309 | if e.errno != errno.EEXIST: | |
|
310 | raise | |
|
311 | else: | |
|
312 | raise IOError, (errno.EEXIST, "No usable temporary filename found") | |
|
313 | ||
|
314 | try: | |
|
315 | os.unlink(temp) | |
|
316 | except: | |
|
317 | # Some rude AV-scanners on Windows may cause the unlink to | |
|
318 | # fail. Not aborting here just leaks the temp file, whereas | |
|
319 | # aborting at this point may leave serious inconsistencies. | |
|
320 | # Ideally, we would notify the user here. | |
|
321 | pass | |
|
340 | except OSError, e: | |
|
341 | if e.errno != errno.EEXIST: | |
|
342 | raise | |
|
343 | unlink(dst) | |
|
322 | 344 | os.rename(src, dst) |
|
323 | 345 | |
|
324 | def spawndetached(args): | |
|
325 | # No standard library function really spawns a fully detached | |
|
326 | # process under win32 because they allocate pipes or other objects | |
|
327 | # to handle standard streams communications. Passing these objects | |
|
328 | # to the child process requires handle inheritance to be enabled | |
|
329 | # which makes really detached processes impossible. | |
|
330 | class STARTUPINFO: | |
|
331 | dwFlags = subprocess.STARTF_USESHOWWINDOW | |
|
332 | hStdInput = None | |
|
333 | hStdOutput = None | |
|
334 | hStdError = None | |
|
335 | wShowWindow = subprocess.SW_HIDE | |
|
336 | ||
|
337 | args = subprocess.list2cmdline(args) | |
|
338 | # Not running the command in shell mode makes python26 hang when | |
|
339 | # writing to hgweb output socket. | |
|
340 | comspec = os.environ.get("COMSPEC", "cmd.exe") | |
|
341 | args = comspec + " /c " + args | |
|
342 | hp, ht, pid, tid = subprocess.CreateProcess( | |
|
343 | None, args, | |
|
344 | # no special security | |
|
345 | None, None, | |
|
346 | # Do not inherit handles | |
|
347 | 0, | |
|
348 | # DETACHED_PROCESS | |
|
349 | 0x00000008, | |
|
350 | os.environ, | |
|
351 | os.getcwd(), | |
|
352 | STARTUPINFO()) | |
|
353 | return pid | |
|
354 | ||
|
355 | 346 | def gethgcmd(): |
|
356 | 347 | return [sys.executable] + sys.argv[:1] |
|
357 | 348 | |
@@ -366,10 +357,6 b' def groupmembers(name):' | |||
|
366 | 357 | # Don't support groups on Windows for now |
|
367 | 358 | raise KeyError() |
|
368 | 359 | |
|
369 | try: | |
|
370 | # override functions with win32 versions if possible | |
|
371 | from win32 import * | |
|
372 | except ImportError: | |
|
373 | pass | |
|
360 | from win32 import * | |
|
374 | 361 | |
|
375 | 362 | expandglobs = True |
@@ -25,7 +25,7 b" def encodelist(l, sep=' '):" | |||
|
25 | 25 | class wirerepository(repo.repository): |
|
26 | 26 | def lookup(self, key): |
|
27 | 27 | self.requirecap('lookup', _('look up remote revision')) |
|
28 | d = self._call("lookup", key=key) | |
|
28 | d = self._call("lookup", key=encoding.fromlocal(key)) | |
|
29 | 29 | success, data = d[:-1].split(" ", 1) |
|
30 | 30 | if int(success): |
|
31 | 31 | return bin(data) |
@@ -44,14 +44,7 b' class wirerepository(repo.repository):' | |||
|
44 | 44 | branchmap = {} |
|
45 | 45 | for branchpart in d.splitlines(): |
|
46 | 46 | branchname, branchheads = branchpart.split(' ', 1) |
|
47 | branchname = urllib.unquote(branchname) | |
|
48 | # Earlier servers (1.3.x) send branch names in (their) local | |
|
49 | # charset. The best we can do is assume it's identical to our | |
|
50 | # own local charset, in case it's not utf-8. | |
|
51 | try: | |
|
52 | branchname.decode('utf-8') | |
|
53 | except UnicodeDecodeError: | |
|
54 | branchname = encoding.fromlocal(branchname) | |
|
47 | branchname = encoding.tolocal(urllib.unquote(branchname)) | |
|
55 | 48 | branchheads = decodelist(branchheads) |
|
56 | 49 | branchmap[branchname] = branchheads |
|
57 | 50 | return branchmap |
@@ -83,17 +76,20 b' class wirerepository(repo.repository):' | |||
|
83 | 76 | if not self.capable('pushkey'): |
|
84 | 77 | return False |
|
85 | 78 | d = self._call("pushkey", |
|
86 |
namespace=namespace, |
|
|
79 | namespace=encoding.fromlocal(namespace), | |
|
80 | key=encoding.fromlocal(key), | |
|
81 | old=encoding.fromlocal(old), | |
|
82 | new=encoding.fromlocal(new)) | |
|
87 | 83 | return bool(int(d)) |
|
88 | 84 | |
|
89 | 85 | def listkeys(self, namespace): |
|
90 | 86 | if not self.capable('pushkey'): |
|
91 | 87 | return {} |
|
92 | d = self._call("listkeys", namespace=namespace) | |
|
88 | d = self._call("listkeys", namespace=encoding.fromlocal(namespace)) | |
|
93 | 89 | r = {} |
|
94 | 90 | for l in d.splitlines(): |
|
95 | 91 | k, v = l.split('\t') |
|
96 |
r[k |
|
|
92 | r[encoding.tolocal(k)] = encoding.tolocal(v) | |
|
97 | 93 | return r |
|
98 | 94 | |
|
99 | 95 | def stream_out(self): |
@@ -162,7 +158,7 b' def branchmap(repo, proto):' | |||
|
162 | 158 | branchmap = repo.branchmap() |
|
163 | 159 | heads = [] |
|
164 | 160 | for branch, nodes in branchmap.iteritems(): |
|
165 | branchname = urllib.quote(branch) | |
|
161 | branchname = urllib.quote(encoding.fromlocal(branch)) | |
|
166 | 162 | branchnodes = encodelist(nodes) |
|
167 | 163 | heads.append('%s %s' % (branchname, branchnodes)) |
|
168 | 164 | return '\n'.join(heads) |
@@ -213,14 +209,14 b' def hello(repo, proto):' | |||
|
213 | 209 | return "capabilities: %s\n" % (capabilities(repo, proto)) |
|
214 | 210 | |
|
215 | 211 | def listkeys(repo, proto, namespace): |
|
216 | d = pushkeymod.list(repo, namespace).items() | |
|
217 |
t = '\n'.join(['%s\t%s' % (k |
|
|
218 |
|
|
|
212 | d = pushkeymod.list(repo, encoding.tolocal(namespace)).items() | |
|
213 | t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v)) | |
|
214 | for k, v in d]) | |
|
219 | 215 | return t |
|
220 | 216 | |
|
221 | 217 | def lookup(repo, proto, key): |
|
222 | 218 | try: |
|
223 | r = hex(repo.lookup(key)) | |
|
219 | r = hex(repo.lookup(encoding.tolocal(key))) | |
|
224 | 220 | success = 1 |
|
225 | 221 | except Exception, inst: |
|
226 | 222 | r = str(inst) |
@@ -228,7 +224,21 b' def lookup(repo, proto, key):' | |||
|
228 | 224 | return "%s %s\n" % (success, r) |
|
229 | 225 | |
|
230 | 226 | def pushkey(repo, proto, namespace, key, old, new): |
|
231 | r = pushkeymod.push(repo, namespace, key, old, new) | |
|
227 | # compatibility with pre-1.8 clients which were accidentally | |
|
228 | # sending raw binary nodes rather than utf-8-encoded hex | |
|
229 | if len(new) == 20 and new.encode('string-escape') != new: | |
|
230 | # looks like it could be a binary node | |
|
231 | try: | |
|
232 | u = new.decode('utf-8') | |
|
233 | new = encoding.tolocal(new) # but cleanly decodes as UTF-8 | |
|
234 | except UnicodeDecodeError: | |
|
235 | pass # binary, leave unmodified | |
|
236 | else: | |
|
237 | new = encoding.tolocal(new) # normal path | |
|
238 | ||
|
239 | r = pushkeymod.push(repo, | |
|
240 | encoding.tolocal(namespace), encoding.tolocal(key), | |
|
241 | encoding.tolocal(old), new) | |
|
232 | 242 | return '%s\n' % int(r) |
|
233 | 243 | |
|
234 | 244 | def _allowstream(ui): |
@@ -294,14 +294,18 b' class hginstallscripts(install_scripts):' | |||
|
294 | 294 | libdir = uplevel * ('..' + os.sep) + self.install_lib[len(common):] |
|
295 | 295 | |
|
296 | 296 | for outfile in self.outfiles: |
|
297 |
|
|
|
297 | fp = open(outfile, 'rb') | |
|
298 | data = fp.read() | |
|
299 | fp.close() | |
|
298 | 300 | |
|
299 | 301 | # skip binary files |
|
300 | 302 | if '\0' in data: |
|
301 | 303 | continue |
|
302 | 304 | |
|
303 | 305 | data = data.replace('@LIBDIR@', libdir.encode('string_escape')) |
|
304 |
open(outfile, 'wb') |
|
|
306 | fp = open(outfile, 'wb') | |
|
307 | fp.write(data) | |
|
308 | fp.close() | |
|
305 | 309 | |
|
306 | 310 | cmdclass = {'build_mo': hgbuildmo, |
|
307 | 311 | 'build_ext': hgbuildext, |
@@ -101,15 +101,6 b' def has_inotify():' | |||
|
101 | 101 | def has_fifo(): |
|
102 | 102 | return hasattr(os, "mkfifo") |
|
103 | 103 | |
|
104 | def has_hotshot(): | |
|
105 | try: | |
|
106 | # hotshot.stats tests hotshot and many problematic dependencies | |
|
107 | # like profile. | |
|
108 | import hotshot.stats | |
|
109 | return True | |
|
110 | except ImportError: | |
|
111 | return False | |
|
112 | ||
|
113 | 104 | def has_lsprof(): |
|
114 | 105 | try: |
|
115 | 106 | import _lsprof |
@@ -182,6 +173,8 b' def has_outer_repo():' | |||
|
182 | 173 | def has_ssl(): |
|
183 | 174 | try: |
|
184 | 175 | import ssl |
|
176 | import OpenSSL | |
|
177 | OpenSSL.SSL.Context | |
|
185 | 178 | return True |
|
186 | 179 | except ImportError: |
|
187 | 180 | return False |
@@ -198,7 +191,6 b' checks = {' | |||
|
198 | 191 | "fifo": (has_fifo, "named pipes"), |
|
199 | 192 | "git": (has_git, "git command line client"), |
|
200 | 193 | "gpg": (has_gpg, "gpg client"), |
|
201 | "hotshot": (has_hotshot, "python hotshot module"), | |
|
202 | 194 | "icasefs": (has_icasefs, "case insensitive file system"), |
|
203 | 195 | "inotify": (has_inotify, "inotify extension support"), |
|
204 | 196 | "lsprof": (has_lsprof, "python lsprof module"), |
@@ -1,12 +1,9 b'' | |||
|
1 | 1 | # simple script to be used in hooks |
|
2 | # copy it to the current directory when the test starts: | |
|
3 | # | |
|
4 | # cp "$TESTDIR"/printenv.py . | |
|
5 | 2 | # |
|
6 | 3 | # put something like this in the repo .hg/hgrc: |
|
7 | 4 | # |
|
8 | 5 | # [hooks] |
|
9 |
# changegroup = python |
|
|
6 | # changegroup = python "$TESTDIR"/printenv.py <hookname> [exit] [output] | |
|
10 | 7 | # |
|
11 | 8 | # - <hookname> is a mandatory argument (e.g. "changegroup") |
|
12 | 9 | # - [exit] is the exit code of the hook (default: 0) |
@@ -39,13 +36,6 b' env = [k for k, v in os.environ.iteritem' | |||
|
39 | 36 | if k.startswith("HG_") and v] |
|
40 | 37 | env.sort() |
|
41 | 38 | |
|
42 | # edit the variable part of the variable | |
|
43 | url = os.environ.get("HG_URL", "") | |
|
44 | if url.startswith("file:"): | |
|
45 | os.environ["HG_URL"] = "file:" | |
|
46 | elif url.startswith("remote:http"): | |
|
47 | os.environ["HG_URL"] = "remote:http" | |
|
48 | ||
|
49 | 39 | out.write("%s hook: " % name) |
|
50 | 40 | for v in env: |
|
51 | 41 | out.write("%s=%s " % (v, os.environ[v])) |
@@ -231,6 +231,8 b' def parseargs():' | |||
|
231 | 231 | if line and not line.startswith('#'): |
|
232 | 232 | blacklist[line] = filename |
|
233 | 233 | |
|
234 | f.close() | |
|
235 | ||
|
234 | 236 | options.blacklist = blacklist |
|
235 | 237 | |
|
236 | 238 | return (options, args) |
@@ -491,6 +493,8 b' def tsttest(test, options, replacements)' | |||
|
491 | 493 | # non-command/result - queue up for merged output |
|
492 | 494 | after.setdefault(pos, []).append(l) |
|
493 | 495 | |
|
496 | t.close() | |
|
497 | ||
|
494 | 498 | script.append('echo %s %s $?\n' % (salt, n + 1)) |
|
495 | 499 | |
|
496 | 500 | fd, name = tempfile.mkstemp(suffix='hg-tst') |
@@ -504,7 +508,8 b' def tsttest(test, options, replacements)' | |||
|
504 | 508 | vlog("# Running", cmd) |
|
505 | 509 | exitcode, output = run(cmd, options, replacements) |
|
506 | 510 | # do not merge output if skipped, return hghave message instead |
|
507 | if exitcode == SKIPPED_STATUS: | |
|
511 | # similarly, with --debug, output is None | |
|
512 | if exitcode == SKIPPED_STATUS or output is None: | |
|
508 | 513 | return exitcode, output |
|
509 | 514 | finally: |
|
510 | 515 | os.remove(name) |
@@ -579,6 +584,7 b' def tsttest(test, options, replacements)' | |||
|
579 | 584 | |
|
580 | 585 | return exitcode, postout |
|
581 | 586 | |
|
587 | wifexited = getattr(os, "WIFEXITED", lambda x: False) | |
|
582 | 588 | def run(cmd, options, replacements): |
|
583 | 589 | """Run command in a sub-process, capturing the output (stdout and stderr). |
|
584 | 590 | Return a tuple (exitcode, output). output is None in debug mode.""" |
@@ -593,7 +599,7 b' def run(cmd, options, replacements):' | |||
|
593 | 599 | tochild.close() |
|
594 | 600 | output = fromchild.read() |
|
595 | 601 | ret = fromchild.close() |
|
596 |
if ret |
|
|
602 | if ret is None: | |
|
597 | 603 | ret = 0 |
|
598 | 604 | else: |
|
599 | 605 | proc = Popen4(cmd) |
@@ -610,7 +616,7 b' def run(cmd, options, replacements):' | |||
|
610 | 616 | proc.tochild.close() |
|
611 | 617 | output = proc.fromchild.read() |
|
612 | 618 | ret = proc.wait() |
|
613 |
if |
|
|
619 | if wifexited(ret): | |
|
614 | 620 | ret = os.WEXITSTATUS(ret) |
|
615 | 621 | except Timeout: |
|
616 | 622 | vlog('# Process %d timed out - killing it' % proc.pid) |
@@ -713,7 +719,7 b' def runone(options, test, skips, fails):' | |||
|
713 | 719 | # If we're not in --debug mode and reference output file exists, |
|
714 | 720 | # check test output against it. |
|
715 | 721 | if options.debug: |
|
716 |
refout = None # to match |
|
|
722 | refout = None # to match "out is None" | |
|
717 | 723 | elif os.path.exists(ref): |
|
718 | 724 | f = open(ref, "r") |
|
719 | 725 | refout = splitnewlines(f.read()) |
@@ -925,7 +931,9 b' def runtests(options, tests):' | |||
|
925 | 931 | continue |
|
926 | 932 | |
|
927 | 933 | if options.keywords: |
|
928 |
|
|
|
934 | fp = open(test) | |
|
935 | t = fp.read().lower() + test.lower() | |
|
936 | fp.close() | |
|
929 | 937 | for k in options.keywords.lower().split(): |
|
930 | 938 | if k in t: |
|
931 | 939 | break |
@@ -1108,4 +1116,5 b' def main():' | |||
|
1108 | 1116 | time.sleep(1) |
|
1109 | 1117 | cleanup(options) |
|
1110 | 1118 | |
|
1111 | main() | |
|
1119 | if __name__ == '__main__': | |
|
1120 | main() |
This diff has been collapsed as it changes many lines, (1159 lines changed) Show them Hide them | |||
@@ -90,38 +90,38 b' Extension disabled for lack of a hook' | |||
|
90 | 90 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
91 | 91 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
92 | 92 | adding changesets |
|
93 |
bundling |
|
|
94 |
bundling |
|
|
95 |
bundling |
|
|
96 |
bundling |
|
|
97 |
bundling |
|
|
98 |
bundling |
|
|
99 |
bundling |
|
|
100 |
bundling |
|
|
101 |
bundling |
|
|
102 |
bundling |
|
|
103 |
bundling |
|
|
104 |
bundling manifests |
|
|
105 |
bundling manifests |
|
|
106 |
bundling manifests |
|
|
107 |
bundling manifests |
|
|
108 |
bundling manifests |
|
|
109 |
bundling manifests |
|
|
110 |
bundling manifests |
|
|
111 |
bundling manifests |
|
|
112 |
bundling manifests |
|
|
113 |
bundling |
|
|
114 |
bundling |
|
|
115 |
bundling |
|
|
116 |
bundling |
|
|
117 |
bundling |
|
|
118 |
bundling |
|
|
119 |
bundling |
|
|
120 |
bundling |
|
|
121 |
bundling |
|
|
122 |
bundling |
|
|
123 |
bundling |
|
|
124 |
bundling |
|
|
93 | bundling: 0 changesets | |
|
94 | bundling: 0 changesets | |
|
95 | bundling: 0 changesets | |
|
96 | bundling: 1 changesets | |
|
97 | bundling: 1 changesets | |
|
98 | bundling: 1 changesets | |
|
99 | bundling: 2 changesets | |
|
100 | bundling: 2 changesets | |
|
101 | bundling: 2 changesets | |
|
102 | bundling: 3 changesets | |
|
103 | bundling: 0/3 manifests (0.00%) | |
|
104 | bundling: 0/3 manifests (0.00%) | |
|
105 | bundling: 0/3 manifests (0.00%) | |
|
106 | bundling: 1/3 manifests (33.33%) | |
|
107 | bundling: 1/3 manifests (33.33%) | |
|
108 | bundling: 1/3 manifests (33.33%) | |
|
109 | bundling: 2/3 manifests (66.67%) | |
|
110 | bundling: 2/3 manifests (66.67%) | |
|
111 | bundling: 2/3 manifests (66.67%) | |
|
112 | bundling: 3/3 manifests (100.00%) | |
|
113 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
114 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
115 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
116 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
117 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
118 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
119 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
120 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
121 | bundling: quux/file.py 2/3 files (66.67%) | |
|
122 | bundling: quux/file.py 2/3 files (66.67%) | |
|
123 | bundling: quux/file.py 2/3 files (66.67%) | |
|
124 | bundling: quux/file.py 2/3 files (66.67%) | |
|
125 | 125 | changesets: 1 chunks |
|
126 | 126 | add changeset ef1ea85a6374 |
|
127 | 127 | changesets: 2 chunks |
@@ -141,6 +141,7 b' Extension disabled for lack of a hook' | |||
|
141 | 141 | files: 3/3 chunks (100.00%) |
|
142 | 142 | added 3 changesets with 3 changes to 3 files |
|
143 | 143 | updating the branch cache |
|
144 | checking for updated bookmarks | |
|
144 | 145 | rolling back to revision 0 (undo push) |
|
145 | 146 | 0:6675d58eff77 |
|
146 | 147 | |
@@ -166,38 +167,38 b' Extension disabled for lack of acl.sourc' | |||
|
166 | 167 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
167 | 168 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
168 | 169 | adding changesets |
|
169 |
bundling |
|
|
170 |
bundling |
|
|
171 |
bundling |
|
|
172 |
bundling |
|
|
173 |
bundling |
|
|
174 |
bundling |
|
|
175 |
bundling |
|
|
176 |
bundling |
|
|
177 |
bundling |
|
|
178 |
bundling |
|
|
179 |
bundling |
|
|
180 |
bundling manifests |
|
|
181 |
bundling manifests |
|
|
182 |
bundling manifests |
|
|
183 |
bundling manifests |
|
|
184 |
bundling manifests |
|
|
185 |
bundling manifests |
|
|
186 |
bundling manifests |
|
|
187 |
bundling manifests |
|
|
188 |
bundling manifests |
|
|
189 |
bundling |
|
|
190 |
bundling |
|
|
191 |
bundling |
|
|
192 |
bundling |
|
|
193 |
bundling |
|
|
194 |
bundling |
|
|
195 |
bundling |
|
|
196 |
bundling |
|
|
197 |
bundling |
|
|
198 |
bundling |
|
|
199 |
bundling |
|
|
200 |
bundling |
|
|
170 | bundling: 0 changesets | |
|
171 | bundling: 0 changesets | |
|
172 | bundling: 0 changesets | |
|
173 | bundling: 1 changesets | |
|
174 | bundling: 1 changesets | |
|
175 | bundling: 1 changesets | |
|
176 | bundling: 2 changesets | |
|
177 | bundling: 2 changesets | |
|
178 | bundling: 2 changesets | |
|
179 | bundling: 3 changesets | |
|
180 | bundling: 0/3 manifests (0.00%) | |
|
181 | bundling: 0/3 manifests (0.00%) | |
|
182 | bundling: 0/3 manifests (0.00%) | |
|
183 | bundling: 1/3 manifests (33.33%) | |
|
184 | bundling: 1/3 manifests (33.33%) | |
|
185 | bundling: 1/3 manifests (33.33%) | |
|
186 | bundling: 2/3 manifests (66.67%) | |
|
187 | bundling: 2/3 manifests (66.67%) | |
|
188 | bundling: 2/3 manifests (66.67%) | |
|
189 | bundling: 3/3 manifests (100.00%) | |
|
190 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
191 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
192 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
193 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
194 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
195 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
196 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
197 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
198 | bundling: quux/file.py 2/3 files (66.67%) | |
|
199 | bundling: quux/file.py 2/3 files (66.67%) | |
|
200 | bundling: quux/file.py 2/3 files (66.67%) | |
|
201 | bundling: quux/file.py 2/3 files (66.67%) | |
|
201 | 202 | changesets: 1 chunks |
|
202 | 203 | add changeset ef1ea85a6374 |
|
203 | 204 | changesets: 2 chunks |
@@ -219,6 +220,7 b' Extension disabled for lack of acl.sourc' | |||
|
219 | 220 | calling hook pretxnchangegroup.acl: hgext.acl.hook |
|
220 | 221 | acl: changes have source "push" - skipping |
|
221 | 222 | updating the branch cache |
|
223 | checking for updated bookmarks | |
|
222 | 224 | rolling back to revision 0 (undo push) |
|
223 | 225 | 0:6675d58eff77 |
|
224 | 226 | |
@@ -245,38 +247,38 b' No [acl.allow]/[acl.deny]' | |||
|
245 | 247 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
246 | 248 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
247 | 249 | adding changesets |
|
248 |
bundling |
|
|
249 |
bundling |
|
|
250 |
bundling |
|
|
251 |
bundling |
|
|
252 |
bundling |
|
|
253 |
bundling |
|
|
254 |
bundling |
|
|
255 |
bundling |
|
|
256 |
bundling |
|
|
257 |
bundling |
|
|
258 |
bundling |
|
|
259 |
bundling manifests |
|
|
260 |
bundling manifests |
|
|
261 |
bundling manifests |
|
|
262 |
bundling manifests |
|
|
263 |
bundling manifests |
|
|
264 |
bundling manifests |
|
|
265 |
bundling manifests |
|
|
266 |
bundling manifests |
|
|
267 |
bundling manifests |
|
|
268 |
bundling |
|
|
269 |
bundling |
|
|
270 |
bundling |
|
|
271 |
bundling |
|
|
272 |
bundling |
|
|
273 |
bundling |
|
|
274 |
bundling |
|
|
275 |
bundling |
|
|
276 |
bundling |
|
|
277 |
bundling |
|
|
278 |
bundling |
|
|
279 |
bundling |
|
|
250 | bundling: 0 changesets | |
|
251 | bundling: 0 changesets | |
|
252 | bundling: 0 changesets | |
|
253 | bundling: 1 changesets | |
|
254 | bundling: 1 changesets | |
|
255 | bundling: 1 changesets | |
|
256 | bundling: 2 changesets | |
|
257 | bundling: 2 changesets | |
|
258 | bundling: 2 changesets | |
|
259 | bundling: 3 changesets | |
|
260 | bundling: 0/3 manifests (0.00%) | |
|
261 | bundling: 0/3 manifests (0.00%) | |
|
262 | bundling: 0/3 manifests (0.00%) | |
|
263 | bundling: 1/3 manifests (33.33%) | |
|
264 | bundling: 1/3 manifests (33.33%) | |
|
265 | bundling: 1/3 manifests (33.33%) | |
|
266 | bundling: 2/3 manifests (66.67%) | |
|
267 | bundling: 2/3 manifests (66.67%) | |
|
268 | bundling: 2/3 manifests (66.67%) | |
|
269 | bundling: 3/3 manifests (100.00%) | |
|
270 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
271 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
272 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
273 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
274 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
275 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
276 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
277 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
278 | bundling: quux/file.py 2/3 files (66.67%) | |
|
279 | bundling: quux/file.py 2/3 files (66.67%) | |
|
280 | bundling: quux/file.py 2/3 files (66.67%) | |
|
281 | bundling: quux/file.py 2/3 files (66.67%) | |
|
280 | 282 | changesets: 1 chunks |
|
281 | 283 | add changeset ef1ea85a6374 |
|
282 | 284 | changesets: 2 chunks |
@@ -307,6 +309,7 b' No [acl.allow]/[acl.deny]' | |||
|
307 | 309 | acl: branch access granted: "911600dab2ae" on branch "default" |
|
308 | 310 | acl: allowing changeset 911600dab2ae |
|
309 | 311 | updating the branch cache |
|
312 | checking for updated bookmarks | |
|
310 | 313 | rolling back to revision 0 (undo push) |
|
311 | 314 | 0:6675d58eff77 |
|
312 | 315 | |
@@ -333,38 +336,38 b' Empty [acl.allow]' | |||
|
333 | 336 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
334 | 337 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
335 | 338 | adding changesets |
|
336 |
bundling |
|
|
337 |
bundling |
|
|
338 |
bundling |
|
|
339 |
bundling |
|
|
340 |
bundling |
|
|
341 |
bundling |
|
|
342 |
bundling |
|
|
343 |
bundling |
|
|
344 |
bundling |
|
|
345 |
bundling |
|
|
346 |
bundling |
|
|
347 |
bundling manifests |
|
|
348 |
bundling manifests |
|
|
349 |
bundling manifests |
|
|
350 |
bundling manifests |
|
|
351 |
bundling manifests |
|
|
352 |
bundling manifests |
|
|
353 |
bundling manifests |
|
|
354 |
bundling manifests |
|
|
355 |
bundling manifests |
|
|
356 |
bundling |
|
|
357 |
bundling |
|
|
358 |
bundling |
|
|
359 |
bundling |
|
|
360 |
bundling |
|
|
361 |
bundling |
|
|
362 |
bundling |
|
|
363 |
bundling |
|
|
364 |
bundling |
|
|
365 |
bundling |
|
|
366 |
bundling |
|
|
367 |
bundling |
|
|
339 | bundling: 0 changesets | |
|
340 | bundling: 0 changesets | |
|
341 | bundling: 0 changesets | |
|
342 | bundling: 1 changesets | |
|
343 | bundling: 1 changesets | |
|
344 | bundling: 1 changesets | |
|
345 | bundling: 2 changesets | |
|
346 | bundling: 2 changesets | |
|
347 | bundling: 2 changesets | |
|
348 | bundling: 3 changesets | |
|
349 | bundling: 0/3 manifests (0.00%) | |
|
350 | bundling: 0/3 manifests (0.00%) | |
|
351 | bundling: 0/3 manifests (0.00%) | |
|
352 | bundling: 1/3 manifests (33.33%) | |
|
353 | bundling: 1/3 manifests (33.33%) | |
|
354 | bundling: 1/3 manifests (33.33%) | |
|
355 | bundling: 2/3 manifests (66.67%) | |
|
356 | bundling: 2/3 manifests (66.67%) | |
|
357 | bundling: 2/3 manifests (66.67%) | |
|
358 | bundling: 3/3 manifests (100.00%) | |
|
359 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
360 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
361 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
362 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
363 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
364 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
365 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
366 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
367 | bundling: quux/file.py 2/3 files (66.67%) | |
|
368 | bundling: quux/file.py 2/3 files (66.67%) | |
|
369 | bundling: quux/file.py 2/3 files (66.67%) | |
|
370 | bundling: quux/file.py 2/3 files (66.67%) | |
|
368 | 371 | changesets: 1 chunks |
|
369 | 372 | add changeset ef1ea85a6374 |
|
370 | 373 | changesets: 2 chunks |
@@ -420,38 +423,38 b' fred is allowed inside foo/' | |||
|
420 | 423 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
421 | 424 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
422 | 425 | adding changesets |
|
423 |
bundling |
|
|
424 |
bundling |
|
|
425 |
bundling |
|
|
426 |
bundling |
|
|
427 |
bundling |
|
|
428 |
bundling |
|
|
429 |
bundling |
|
|
430 |
bundling |
|
|
431 |
bundling |
|
|
432 |
bundling |
|
|
433 |
bundling |
|
|
434 |
bundling manifests |
|
|
435 |
bundling manifests |
|
|
436 |
bundling manifests |
|
|
437 |
bundling manifests |
|
|
438 |
bundling manifests |
|
|
439 |
bundling manifests |
|
|
440 |
bundling manifests |
|
|
441 |
bundling manifests |
|
|
442 |
bundling manifests |
|
|
443 |
bundling |
|
|
444 |
bundling |
|
|
445 |
bundling |
|
|
446 |
bundling |
|
|
447 |
bundling |
|
|
448 |
bundling |
|
|
449 |
bundling |
|
|
450 |
bundling |
|
|
451 |
bundling |
|
|
452 |
bundling |
|
|
453 |
bundling |
|
|
454 |
bundling |
|
|
426 | bundling: 0 changesets | |
|
427 | bundling: 0 changesets | |
|
428 | bundling: 0 changesets | |
|
429 | bundling: 1 changesets | |
|
430 | bundling: 1 changesets | |
|
431 | bundling: 1 changesets | |
|
432 | bundling: 2 changesets | |
|
433 | bundling: 2 changesets | |
|
434 | bundling: 2 changesets | |
|
435 | bundling: 3 changesets | |
|
436 | bundling: 0/3 manifests (0.00%) | |
|
437 | bundling: 0/3 manifests (0.00%) | |
|
438 | bundling: 0/3 manifests (0.00%) | |
|
439 | bundling: 1/3 manifests (33.33%) | |
|
440 | bundling: 1/3 manifests (33.33%) | |
|
441 | bundling: 1/3 manifests (33.33%) | |
|
442 | bundling: 2/3 manifests (66.67%) | |
|
443 | bundling: 2/3 manifests (66.67%) | |
|
444 | bundling: 2/3 manifests (66.67%) | |
|
445 | bundling: 3/3 manifests (100.00%) | |
|
446 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
447 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
448 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
449 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
450 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
451 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
452 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
453 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
454 | bundling: quux/file.py 2/3 files (66.67%) | |
|
455 | bundling: quux/file.py 2/3 files (66.67%) | |
|
456 | bundling: quux/file.py 2/3 files (66.67%) | |
|
457 | bundling: quux/file.py 2/3 files (66.67%) | |
|
455 | 458 | changesets: 1 chunks |
|
456 | 459 | add changeset ef1ea85a6374 |
|
457 | 460 | changesets: 2 chunks |
@@ -512,38 +515,38 b' Empty [acl.deny]' | |||
|
512 | 515 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
513 | 516 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
514 | 517 | adding changesets |
|
515 |
bundling |
|
|
516 |
bundling |
|
|
517 |
bundling |
|
|
518 |
bundling |
|
|
519 |
bundling |
|
|
520 |
bundling |
|
|
521 |
bundling |
|
|
522 |
bundling |
|
|
523 |
bundling |
|
|
524 |
bundling |
|
|
525 |
bundling |
|
|
526 |
bundling manifests |
|
|
527 |
bundling manifests |
|
|
528 |
bundling manifests |
|
|
529 |
bundling manifests |
|
|
530 |
bundling manifests |
|
|
531 |
bundling manifests |
|
|
532 |
bundling manifests |
|
|
533 |
bundling manifests |
|
|
534 |
bundling manifests |
|
|
535 |
bundling |
|
|
536 |
bundling |
|
|
537 |
bundling |
|
|
538 |
bundling |
|
|
539 |
bundling |
|
|
540 |
bundling |
|
|
541 |
bundling |
|
|
542 |
bundling |
|
|
543 |
bundling |
|
|
544 |
bundling |
|
|
545 |
bundling |
|
|
546 |
bundling |
|
|
518 | bundling: 0 changesets | |
|
519 | bundling: 0 changesets | |
|
520 | bundling: 0 changesets | |
|
521 | bundling: 1 changesets | |
|
522 | bundling: 1 changesets | |
|
523 | bundling: 1 changesets | |
|
524 | bundling: 2 changesets | |
|
525 | bundling: 2 changesets | |
|
526 | bundling: 2 changesets | |
|
527 | bundling: 3 changesets | |
|
528 | bundling: 0/3 manifests (0.00%) | |
|
529 | bundling: 0/3 manifests (0.00%) | |
|
530 | bundling: 0/3 manifests (0.00%) | |
|
531 | bundling: 1/3 manifests (33.33%) | |
|
532 | bundling: 1/3 manifests (33.33%) | |
|
533 | bundling: 1/3 manifests (33.33%) | |
|
534 | bundling: 2/3 manifests (66.67%) | |
|
535 | bundling: 2/3 manifests (66.67%) | |
|
536 | bundling: 2/3 manifests (66.67%) | |
|
537 | bundling: 3/3 manifests (100.00%) | |
|
538 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
539 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
540 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
541 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
542 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
543 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
544 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
545 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
546 | bundling: quux/file.py 2/3 files (66.67%) | |
|
547 | bundling: quux/file.py 2/3 files (66.67%) | |
|
548 | bundling: quux/file.py 2/3 files (66.67%) | |
|
549 | bundling: quux/file.py 2/3 files (66.67%) | |
|
547 | 550 | changesets: 1 chunks |
|
548 | 551 | add changeset ef1ea85a6374 |
|
549 | 552 | changesets: 2 chunks |
@@ -601,38 +604,38 b' fred is allowed inside foo/, but not foo' | |||
|
601 | 604 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
602 | 605 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
603 | 606 | adding changesets |
|
604 |
bundling |
|
|
605 |
bundling |
|
|
606 |
bundling |
|
|
607 |
bundling |
|
|
608 |
bundling |
|
|
609 |
bundling |
|
|
610 |
bundling |
|
|
611 |
bundling |
|
|
612 |
bundling |
|
|
613 |
bundling |
|
|
614 |
bundling |
|
|
615 |
bundling manifests |
|
|
616 |
bundling manifests |
|
|
617 |
bundling manifests |
|
|
618 |
bundling manifests |
|
|
619 |
bundling manifests |
|
|
620 |
bundling manifests |
|
|
621 |
bundling manifests |
|
|
622 |
bundling manifests |
|
|
623 |
bundling manifests |
|
|
624 |
bundling |
|
|
625 |
bundling |
|
|
626 |
bundling |
|
|
627 |
bundling |
|
|
628 |
bundling |
|
|
629 |
bundling |
|
|
630 |
bundling |
|
|
631 |
bundling |
|
|
632 |
bundling |
|
|
633 |
bundling |
|
|
634 |
bundling |
|
|
635 |
bundling |
|
|
607 | bundling: 0 changesets | |
|
608 | bundling: 0 changesets | |
|
609 | bundling: 0 changesets | |
|
610 | bundling: 1 changesets | |
|
611 | bundling: 1 changesets | |
|
612 | bundling: 1 changesets | |
|
613 | bundling: 2 changesets | |
|
614 | bundling: 2 changesets | |
|
615 | bundling: 2 changesets | |
|
616 | bundling: 3 changesets | |
|
617 | bundling: 0/3 manifests (0.00%) | |
|
618 | bundling: 0/3 manifests (0.00%) | |
|
619 | bundling: 0/3 manifests (0.00%) | |
|
620 | bundling: 1/3 manifests (33.33%) | |
|
621 | bundling: 1/3 manifests (33.33%) | |
|
622 | bundling: 1/3 manifests (33.33%) | |
|
623 | bundling: 2/3 manifests (66.67%) | |
|
624 | bundling: 2/3 manifests (66.67%) | |
|
625 | bundling: 2/3 manifests (66.67%) | |
|
626 | bundling: 3/3 manifests (100.00%) | |
|
627 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
628 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
629 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
630 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
631 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
632 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
633 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
634 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
635 | bundling: quux/file.py 2/3 files (66.67%) | |
|
636 | bundling: quux/file.py 2/3 files (66.67%) | |
|
637 | bundling: quux/file.py 2/3 files (66.67%) | |
|
638 | bundling: quux/file.py 2/3 files (66.67%) | |
|
636 | 639 | changesets: 1 chunks |
|
637 | 640 | add changeset ef1ea85a6374 |
|
638 | 641 | changesets: 2 chunks |
@@ -695,38 +698,38 b' fred is allowed inside foo/, but not foo' | |||
|
695 | 698 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
696 | 699 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
697 | 700 | adding changesets |
|
698 |
bundling |
|
|
699 |
bundling |
|
|
700 |
bundling |
|
|
701 |
bundling |
|
|
702 |
bundling |
|
|
703 |
bundling |
|
|
704 |
bundling |
|
|
705 |
bundling |
|
|
706 |
bundling |
|
|
707 |
bundling |
|
|
708 |
bundling |
|
|
709 |
bundling manifests |
|
|
710 |
bundling manifests |
|
|
711 |
bundling manifests |
|
|
712 |
bundling manifests |
|
|
713 |
bundling manifests |
|
|
714 |
bundling manifests |
|
|
715 |
bundling manifests |
|
|
716 |
bundling manifests |
|
|
717 |
bundling manifests |
|
|
718 |
bundling |
|
|
719 |
bundling |
|
|
720 |
bundling |
|
|
721 |
bundling |
|
|
722 |
bundling |
|
|
723 |
bundling |
|
|
724 |
bundling |
|
|
725 |
bundling |
|
|
726 |
bundling |
|
|
727 |
bundling |
|
|
728 |
bundling |
|
|
729 |
bundling |
|
|
701 | bundling: 0 changesets | |
|
702 | bundling: 0 changesets | |
|
703 | bundling: 0 changesets | |
|
704 | bundling: 1 changesets | |
|
705 | bundling: 1 changesets | |
|
706 | bundling: 1 changesets | |
|
707 | bundling: 2 changesets | |
|
708 | bundling: 2 changesets | |
|
709 | bundling: 2 changesets | |
|
710 | bundling: 3 changesets | |
|
711 | bundling: 0/3 manifests (0.00%) | |
|
712 | bundling: 0/3 manifests (0.00%) | |
|
713 | bundling: 0/3 manifests (0.00%) | |
|
714 | bundling: 1/3 manifests (33.33%) | |
|
715 | bundling: 1/3 manifests (33.33%) | |
|
716 | bundling: 1/3 manifests (33.33%) | |
|
717 | bundling: 2/3 manifests (66.67%) | |
|
718 | bundling: 2/3 manifests (66.67%) | |
|
719 | bundling: 2/3 manifests (66.67%) | |
|
720 | bundling: 3/3 manifests (100.00%) | |
|
721 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
722 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
723 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
724 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
725 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
726 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
727 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
728 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
729 | bundling: quux/file.py 2/3 files (66.67%) | |
|
730 | bundling: quux/file.py 2/3 files (66.67%) | |
|
731 | bundling: quux/file.py 2/3 files (66.67%) | |
|
732 | bundling: quux/file.py 2/3 files (66.67%) | |
|
730 | 733 | changesets: 1 chunks |
|
731 | 734 | add changeset ef1ea85a6374 |
|
732 | 735 | changesets: 2 chunks |
@@ -786,38 +789,38 b' fred is allowed inside foo/, but not foo' | |||
|
786 | 789 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
787 | 790 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
788 | 791 | adding changesets |
|
789 |
bundling |
|
|
790 |
bundling |
|
|
791 |
bundling |
|
|
792 |
bundling |
|
|
793 |
bundling |
|
|
794 |
bundling |
|
|
795 |
bundling |
|
|
796 |
bundling |
|
|
797 |
bundling |
|
|
798 |
bundling |
|
|
799 |
bundling |
|
|
800 |
bundling manifests |
|
|
801 |
bundling manifests |
|
|
802 |
bundling manifests |
|
|
803 |
bundling manifests |
|
|
804 |
bundling manifests |
|
|
805 |
bundling manifests |
|
|
806 |
bundling manifests |
|
|
807 |
bundling manifests |
|
|
808 |
bundling manifests |
|
|
809 |
bundling |
|
|
810 |
bundling |
|
|
811 |
bundling |
|
|
812 |
bundling |
|
|
813 |
bundling |
|
|
814 |
bundling |
|
|
815 |
bundling |
|
|
816 |
bundling |
|
|
817 |
bundling |
|
|
818 |
bundling |
|
|
819 |
bundling |
|
|
820 |
bundling |
|
|
792 | bundling: 0 changesets | |
|
793 | bundling: 0 changesets | |
|
794 | bundling: 0 changesets | |
|
795 | bundling: 1 changesets | |
|
796 | bundling: 1 changesets | |
|
797 | bundling: 1 changesets | |
|
798 | bundling: 2 changesets | |
|
799 | bundling: 2 changesets | |
|
800 | bundling: 2 changesets | |
|
801 | bundling: 3 changesets | |
|
802 | bundling: 0/3 manifests (0.00%) | |
|
803 | bundling: 0/3 manifests (0.00%) | |
|
804 | bundling: 0/3 manifests (0.00%) | |
|
805 | bundling: 1/3 manifests (33.33%) | |
|
806 | bundling: 1/3 manifests (33.33%) | |
|
807 | bundling: 1/3 manifests (33.33%) | |
|
808 | bundling: 2/3 manifests (66.67%) | |
|
809 | bundling: 2/3 manifests (66.67%) | |
|
810 | bundling: 2/3 manifests (66.67%) | |
|
811 | bundling: 3/3 manifests (100.00%) | |
|
812 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
813 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
814 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
815 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
816 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
817 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
818 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
819 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
820 | bundling: quux/file.py 2/3 files (66.67%) | |
|
821 | bundling: quux/file.py 2/3 files (66.67%) | |
|
822 | bundling: quux/file.py 2/3 files (66.67%) | |
|
823 | bundling: quux/file.py 2/3 files (66.67%) | |
|
821 | 824 | changesets: 1 chunks |
|
822 | 825 | add changeset ef1ea85a6374 |
|
823 | 826 | changesets: 2 chunks |
@@ -879,38 +882,38 b' barney is allowed everywhere' | |||
|
879 | 882 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
880 | 883 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
881 | 884 | adding changesets |
|
882 |
bundling |
|
|
883 |
bundling |
|
|
884 |
bundling |
|
|
885 |
bundling |
|
|
886 |
bundling |
|
|
887 |
bundling |
|
|
888 |
bundling |
|
|
889 |
bundling |
|
|
890 |
bundling |
|
|
891 |
bundling |
|
|
892 |
bundling |
|
|
893 |
bundling manifests |
|
|
894 |
bundling manifests |
|
|
895 |
bundling manifests |
|
|
896 |
bundling manifests |
|
|
897 |
bundling manifests |
|
|
898 |
bundling manifests |
|
|
899 |
bundling manifests |
|
|
900 |
bundling manifests |
|
|
901 |
bundling manifests |
|
|
902 |
bundling |
|
|
903 |
bundling |
|
|
904 |
bundling |
|
|
905 |
bundling |
|
|
906 |
bundling |
|
|
907 |
bundling |
|
|
908 |
bundling |
|
|
909 |
bundling |
|
|
910 |
bundling |
|
|
911 |
bundling |
|
|
912 |
bundling |
|
|
913 |
bundling |
|
|
885 | bundling: 0 changesets | |
|
886 | bundling: 0 changesets | |
|
887 | bundling: 0 changesets | |
|
888 | bundling: 1 changesets | |
|
889 | bundling: 1 changesets | |
|
890 | bundling: 1 changesets | |
|
891 | bundling: 2 changesets | |
|
892 | bundling: 2 changesets | |
|
893 | bundling: 2 changesets | |
|
894 | bundling: 3 changesets | |
|
895 | bundling: 0/3 manifests (0.00%) | |
|
896 | bundling: 0/3 manifests (0.00%) | |
|
897 | bundling: 0/3 manifests (0.00%) | |
|
898 | bundling: 1/3 manifests (33.33%) | |
|
899 | bundling: 1/3 manifests (33.33%) | |
|
900 | bundling: 1/3 manifests (33.33%) | |
|
901 | bundling: 2/3 manifests (66.67%) | |
|
902 | bundling: 2/3 manifests (66.67%) | |
|
903 | bundling: 2/3 manifests (66.67%) | |
|
904 | bundling: 3/3 manifests (100.00%) | |
|
905 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
906 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
907 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
908 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
909 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
910 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
911 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
912 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
913 | bundling: quux/file.py 2/3 files (66.67%) | |
|
914 | bundling: quux/file.py 2/3 files (66.67%) | |
|
915 | bundling: quux/file.py 2/3 files (66.67%) | |
|
916 | bundling: quux/file.py 2/3 files (66.67%) | |
|
914 | 917 | changesets: 1 chunks |
|
915 | 918 | add changeset ef1ea85a6374 |
|
916 | 919 | changesets: 2 chunks |
@@ -941,6 +944,7 b' barney is allowed everywhere' | |||
|
941 | 944 | acl: branch access granted: "911600dab2ae" on branch "default" |
|
942 | 945 | acl: allowing changeset 911600dab2ae |
|
943 | 946 | updating the branch cache |
|
947 | checking for updated bookmarks | |
|
944 | 948 | rolling back to revision 0 (undo push) |
|
945 | 949 | 0:6675d58eff77 |
|
946 | 950 | |
@@ -974,38 +978,38 b' wilma can change files with a .txt exten' | |||
|
974 | 978 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
975 | 979 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
976 | 980 | adding changesets |
|
977 |
bundling |
|
|
978 |
bundling |
|
|
979 |
bundling |
|
|
980 |
bundling |
|
|
981 |
bundling |
|
|
982 |
bundling |
|
|
983 |
bundling |
|
|
984 |
bundling |
|
|
985 |
bundling |
|
|
986 |
bundling |
|
|
987 |
bundling |
|
|
988 |
bundling manifests |
|
|
989 |
bundling manifests |
|
|
990 |
bundling manifests |
|
|
991 |
bundling manifests |
|
|
992 |
bundling manifests |
|
|
993 |
bundling manifests |
|
|
994 |
bundling manifests |
|
|
995 |
bundling manifests |
|
|
996 |
bundling manifests |
|
|
997 |
bundling |
|
|
998 |
bundling |
|
|
999 |
bundling |
|
|
1000 |
bundling |
|
|
1001 |
bundling |
|
|
1002 |
bundling |
|
|
1003 |
bundling |
|
|
1004 |
bundling |
|
|
1005 |
bundling |
|
|
1006 |
bundling |
|
|
1007 |
bundling |
|
|
1008 |
bundling |
|
|
981 | bundling: 0 changesets | |
|
982 | bundling: 0 changesets | |
|
983 | bundling: 0 changesets | |
|
984 | bundling: 1 changesets | |
|
985 | bundling: 1 changesets | |
|
986 | bundling: 1 changesets | |
|
987 | bundling: 2 changesets | |
|
988 | bundling: 2 changesets | |
|
989 | bundling: 2 changesets | |
|
990 | bundling: 3 changesets | |
|
991 | bundling: 0/3 manifests (0.00%) | |
|
992 | bundling: 0/3 manifests (0.00%) | |
|
993 | bundling: 0/3 manifests (0.00%) | |
|
994 | bundling: 1/3 manifests (33.33%) | |
|
995 | bundling: 1/3 manifests (33.33%) | |
|
996 | bundling: 1/3 manifests (33.33%) | |
|
997 | bundling: 2/3 manifests (66.67%) | |
|
998 | bundling: 2/3 manifests (66.67%) | |
|
999 | bundling: 2/3 manifests (66.67%) | |
|
1000 | bundling: 3/3 manifests (100.00%) | |
|
1001 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1002 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1003 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1004 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1005 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1006 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1007 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1008 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1009 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1010 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1011 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1012 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1009 | 1013 | changesets: 1 chunks |
|
1010 | 1014 | add changeset ef1ea85a6374 |
|
1011 | 1015 | changesets: 2 chunks |
@@ -1074,38 +1078,38 b' file specified by acl.config does not ex' | |||
|
1074 | 1078 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
1075 | 1079 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
1076 | 1080 | adding changesets |
|
1077 |
bundling |
|
|
1078 |
bundling |
|
|
1079 |
bundling |
|
|
1080 |
bundling |
|
|
1081 |
bundling |
|
|
1082 |
bundling |
|
|
1083 |
bundling |
|
|
1084 |
bundling |
|
|
1085 |
bundling |
|
|
1086 |
bundling |
|
|
1087 |
bundling |
|
|
1088 |
bundling manifests |
|
|
1089 |
bundling manifests |
|
|
1090 |
bundling manifests |
|
|
1091 |
bundling manifests |
|
|
1092 |
bundling manifests |
|
|
1093 |
bundling manifests |
|
|
1094 |
bundling manifests |
|
|
1095 |
bundling manifests |
|
|
1096 |
bundling manifests |
|
|
1097 |
bundling |
|
|
1098 |
bundling |
|
|
1099 |
bundling |
|
|
1100 |
bundling |
|
|
1101 |
bundling |
|
|
1102 |
bundling |
|
|
1103 |
bundling |
|
|
1104 |
bundling |
|
|
1105 |
bundling |
|
|
1106 |
bundling |
|
|
1107 |
bundling |
|
|
1108 |
bundling |
|
|
1081 | bundling: 0 changesets | |
|
1082 | bundling: 0 changesets | |
|
1083 | bundling: 0 changesets | |
|
1084 | bundling: 1 changesets | |
|
1085 | bundling: 1 changesets | |
|
1086 | bundling: 1 changesets | |
|
1087 | bundling: 2 changesets | |
|
1088 | bundling: 2 changesets | |
|
1089 | bundling: 2 changesets | |
|
1090 | bundling: 3 changesets | |
|
1091 | bundling: 0/3 manifests (0.00%) | |
|
1092 | bundling: 0/3 manifests (0.00%) | |
|
1093 | bundling: 0/3 manifests (0.00%) | |
|
1094 | bundling: 1/3 manifests (33.33%) | |
|
1095 | bundling: 1/3 manifests (33.33%) | |
|
1096 | bundling: 1/3 manifests (33.33%) | |
|
1097 | bundling: 2/3 manifests (66.67%) | |
|
1098 | bundling: 2/3 manifests (66.67%) | |
|
1099 | bundling: 2/3 manifests (66.67%) | |
|
1100 | bundling: 3/3 manifests (100.00%) | |
|
1101 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1102 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1103 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1104 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1105 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1106 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1107 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1108 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1109 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1110 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1111 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1112 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1109 | 1113 | changesets: 1 chunks |
|
1110 | 1114 | add changeset ef1ea85a6374 |
|
1111 | 1115 | changesets: 2 chunks |
@@ -1168,38 +1172,38 b' betty is allowed inside foo/ by a acl.co' | |||
|
1168 | 1172 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
1169 | 1173 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
1170 | 1174 | adding changesets |
|
1171 |
bundling |
|
|
1172 |
bundling |
|
|
1173 |
bundling |
|
|
1174 |
bundling |
|
|
1175 |
bundling |
|
|
1176 |
bundling |
|
|
1177 |
bundling |
|
|
1178 |
bundling |
|
|
1179 |
bundling |
|
|
1180 |
bundling |
|
|
1181 |
bundling |
|
|
1182 |
bundling manifests |
|
|
1183 |
bundling manifests |
|
|
1184 |
bundling manifests |
|
|
1185 |
bundling manifests |
|
|
1186 |
bundling manifests |
|
|
1187 |
bundling manifests |
|
|
1188 |
bundling manifests |
|
|
1189 |
bundling manifests |
|
|
1190 |
bundling manifests |
|
|
1191 |
bundling |
|
|
1192 |
bundling |
|
|
1193 |
bundling |
|
|
1194 |
bundling |
|
|
1195 |
bundling |
|
|
1196 |
bundling |
|
|
1197 |
bundling |
|
|
1198 |
bundling |
|
|
1199 |
bundling |
|
|
1200 |
bundling |
|
|
1201 |
bundling |
|
|
1202 |
bundling |
|
|
1175 | bundling: 0 changesets | |
|
1176 | bundling: 0 changesets | |
|
1177 | bundling: 0 changesets | |
|
1178 | bundling: 1 changesets | |
|
1179 | bundling: 1 changesets | |
|
1180 | bundling: 1 changesets | |
|
1181 | bundling: 2 changesets | |
|
1182 | bundling: 2 changesets | |
|
1183 | bundling: 2 changesets | |
|
1184 | bundling: 3 changesets | |
|
1185 | bundling: 0/3 manifests (0.00%) | |
|
1186 | bundling: 0/3 manifests (0.00%) | |
|
1187 | bundling: 0/3 manifests (0.00%) | |
|
1188 | bundling: 1/3 manifests (33.33%) | |
|
1189 | bundling: 1/3 manifests (33.33%) | |
|
1190 | bundling: 1/3 manifests (33.33%) | |
|
1191 | bundling: 2/3 manifests (66.67%) | |
|
1192 | bundling: 2/3 manifests (66.67%) | |
|
1193 | bundling: 2/3 manifests (66.67%) | |
|
1194 | bundling: 3/3 manifests (100.00%) | |
|
1195 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1196 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1197 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1198 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1199 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1200 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1201 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1202 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1203 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1204 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1205 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1206 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1203 | 1207 | changesets: 1 chunks |
|
1204 | 1208 | add changeset ef1ea85a6374 |
|
1205 | 1209 | changesets: 2 chunks |
@@ -1274,38 +1278,38 b' acl.config can set only [acl.allow]/[acl' | |||
|
1274 | 1278 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
1275 | 1279 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
1276 | 1280 | adding changesets |
|
1277 |
bundling |
|
|
1278 |
bundling |
|
|
1279 |
bundling |
|
|
1280 |
bundling |
|
|
1281 |
bundling |
|
|
1282 |
bundling |
|
|
1283 |
bundling |
|
|
1284 |
bundling |
|
|
1285 |
bundling |
|
|
1286 |
bundling |
|
|
1287 |
bundling |
|
|
1288 |
bundling manifests |
|
|
1289 |
bundling manifests |
|
|
1290 |
bundling manifests |
|
|
1291 |
bundling manifests |
|
|
1292 |
bundling manifests |
|
|
1293 |
bundling manifests |
|
|
1294 |
bundling manifests |
|
|
1295 |
bundling manifests |
|
|
1296 |
bundling manifests |
|
|
1297 |
bundling |
|
|
1298 |
bundling |
|
|
1299 |
bundling |
|
|
1300 |
bundling |
|
|
1301 |
bundling |
|
|
1302 |
bundling |
|
|
1303 |
bundling |
|
|
1304 |
bundling |
|
|
1305 |
bundling |
|
|
1306 |
bundling |
|
|
1307 |
bundling |
|
|
1308 |
bundling |
|
|
1281 | bundling: 0 changesets | |
|
1282 | bundling: 0 changesets | |
|
1283 | bundling: 0 changesets | |
|
1284 | bundling: 1 changesets | |
|
1285 | bundling: 1 changesets | |
|
1286 | bundling: 1 changesets | |
|
1287 | bundling: 2 changesets | |
|
1288 | bundling: 2 changesets | |
|
1289 | bundling: 2 changesets | |
|
1290 | bundling: 3 changesets | |
|
1291 | bundling: 0/3 manifests (0.00%) | |
|
1292 | bundling: 0/3 manifests (0.00%) | |
|
1293 | bundling: 0/3 manifests (0.00%) | |
|
1294 | bundling: 1/3 manifests (33.33%) | |
|
1295 | bundling: 1/3 manifests (33.33%) | |
|
1296 | bundling: 1/3 manifests (33.33%) | |
|
1297 | bundling: 2/3 manifests (66.67%) | |
|
1298 | bundling: 2/3 manifests (66.67%) | |
|
1299 | bundling: 2/3 manifests (66.67%) | |
|
1300 | bundling: 3/3 manifests (100.00%) | |
|
1301 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1302 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1303 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1304 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1305 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1306 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1307 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1308 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1309 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1310 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1311 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1312 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1309 | 1313 | changesets: 1 chunks |
|
1310 | 1314 | add changeset ef1ea85a6374 |
|
1311 | 1315 | changesets: 2 chunks |
@@ -1336,6 +1340,7 b' acl.config can set only [acl.allow]/[acl' | |||
|
1336 | 1340 | acl: branch access granted: "911600dab2ae" on branch "default" |
|
1337 | 1341 | acl: allowing changeset 911600dab2ae |
|
1338 | 1342 | updating the branch cache |
|
1343 | checking for updated bookmarks | |
|
1339 | 1344 | rolling back to revision 0 (undo push) |
|
1340 | 1345 | 0:6675d58eff77 |
|
1341 | 1346 | |
@@ -1370,38 +1375,38 b' fred is always allowed' | |||
|
1370 | 1375 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
1371 | 1376 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
1372 | 1377 | adding changesets |
|
1373 |
bundling |
|
|
1374 |
bundling |
|
|
1375 |
bundling |
|
|
1376 |
bundling |
|
|
1377 |
bundling |
|
|
1378 |
bundling |
|
|
1379 |
bundling |
|
|
1380 |
bundling |
|
|
1381 |
bundling |
|
|
1382 |
bundling |
|
|
1383 |
bundling |
|
|
1384 |
bundling manifests |
|
|
1385 |
bundling manifests |
|
|
1386 |
bundling manifests |
|
|
1387 |
bundling manifests |
|
|
1388 |
bundling manifests |
|
|
1389 |
bundling manifests |
|
|
1390 |
bundling manifests |
|
|
1391 |
bundling manifests |
|
|
1392 |
bundling manifests |
|
|
1393 |
bundling |
|
|
1394 |
bundling |
|
|
1395 |
bundling |
|
|
1396 |
bundling |
|
|
1397 |
bundling |
|
|
1398 |
bundling |
|
|
1399 |
bundling |
|
|
1400 |
bundling |
|
|
1401 |
bundling |
|
|
1402 |
bundling |
|
|
1403 |
bundling |
|
|
1404 |
bundling |
|
|
1378 | bundling: 0 changesets | |
|
1379 | bundling: 0 changesets | |
|
1380 | bundling: 0 changesets | |
|
1381 | bundling: 1 changesets | |
|
1382 | bundling: 1 changesets | |
|
1383 | bundling: 1 changesets | |
|
1384 | bundling: 2 changesets | |
|
1385 | bundling: 2 changesets | |
|
1386 | bundling: 2 changesets | |
|
1387 | bundling: 3 changesets | |
|
1388 | bundling: 0/3 manifests (0.00%) | |
|
1389 | bundling: 0/3 manifests (0.00%) | |
|
1390 | bundling: 0/3 manifests (0.00%) | |
|
1391 | bundling: 1/3 manifests (33.33%) | |
|
1392 | bundling: 1/3 manifests (33.33%) | |
|
1393 | bundling: 1/3 manifests (33.33%) | |
|
1394 | bundling: 2/3 manifests (66.67%) | |
|
1395 | bundling: 2/3 manifests (66.67%) | |
|
1396 | bundling: 2/3 manifests (66.67%) | |
|
1397 | bundling: 3/3 manifests (100.00%) | |
|
1398 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1399 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1400 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1401 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1402 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1403 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1404 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1405 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1406 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1407 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1408 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1409 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1405 | 1410 | changesets: 1 chunks |
|
1406 | 1411 | add changeset ef1ea85a6374 |
|
1407 | 1412 | changesets: 2 chunks |
@@ -1432,6 +1437,7 b' fred is always allowed' | |||
|
1432 | 1437 | acl: branch access granted: "911600dab2ae" on branch "default" |
|
1433 | 1438 | acl: allowing changeset 911600dab2ae |
|
1434 | 1439 | updating the branch cache |
|
1440 | checking for updated bookmarks | |
|
1435 | 1441 | rolling back to revision 0 (undo push) |
|
1436 | 1442 | 0:6675d58eff77 |
|
1437 | 1443 | |
@@ -1462,38 +1468,38 b' no one is allowed inside foo/Bar/' | |||
|
1462 | 1468 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
1463 | 1469 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
1464 | 1470 | adding changesets |
|
1465 |
bundling |
|
|
1466 |
bundling |
|
|
1467 |
bundling |
|
|
1468 |
bundling |
|
|
1469 |
bundling |
|
|
1470 |
bundling |
|
|
1471 |
bundling |
|
|
1472 |
bundling |
|
|
1473 |
bundling |
|
|
1474 |
bundling |
|
|
1475 |
bundling |
|
|
1476 |
bundling manifests |
|
|
1477 |
bundling manifests |
|
|
1478 |
bundling manifests |
|
|
1479 |
bundling manifests |
|
|
1480 |
bundling manifests |
|
|
1481 |
bundling manifests |
|
|
1482 |
bundling manifests |
|
|
1483 |
bundling manifests |
|
|
1484 |
bundling manifests |
|
|
1485 |
bundling |
|
|
1486 |
bundling |
|
|
1487 |
bundling |
|
|
1488 |
bundling |
|
|
1489 |
bundling |
|
|
1490 |
bundling |
|
|
1491 |
bundling |
|
|
1492 |
bundling |
|
|
1493 |
bundling |
|
|
1494 |
bundling |
|
|
1495 |
bundling |
|
|
1496 |
bundling |
|
|
1471 | bundling: 0 changesets | |
|
1472 | bundling: 0 changesets | |
|
1473 | bundling: 0 changesets | |
|
1474 | bundling: 1 changesets | |
|
1475 | bundling: 1 changesets | |
|
1476 | bundling: 1 changesets | |
|
1477 | bundling: 2 changesets | |
|
1478 | bundling: 2 changesets | |
|
1479 | bundling: 2 changesets | |
|
1480 | bundling: 3 changesets | |
|
1481 | bundling: 0/3 manifests (0.00%) | |
|
1482 | bundling: 0/3 manifests (0.00%) | |
|
1483 | bundling: 0/3 manifests (0.00%) | |
|
1484 | bundling: 1/3 manifests (33.33%) | |
|
1485 | bundling: 1/3 manifests (33.33%) | |
|
1486 | bundling: 1/3 manifests (33.33%) | |
|
1487 | bundling: 2/3 manifests (66.67%) | |
|
1488 | bundling: 2/3 manifests (66.67%) | |
|
1489 | bundling: 2/3 manifests (66.67%) | |
|
1490 | bundling: 3/3 manifests (100.00%) | |
|
1491 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1492 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1493 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1494 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1495 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1496 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1497 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1498 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1499 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1500 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1501 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1502 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1497 | 1503 | changesets: 1 chunks |
|
1498 | 1504 | add changeset ef1ea85a6374 |
|
1499 | 1505 | changesets: 2 chunks |
@@ -1558,38 +1564,38 b' OS-level groups' | |||
|
1558 | 1564 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
1559 | 1565 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
1560 | 1566 | adding changesets |
|
1561 |
bundling |
|
|
1562 |
bundling |
|
|
1563 |
bundling |
|
|
1564 |
bundling |
|
|
1565 |
bundling |
|
|
1566 |
bundling |
|
|
1567 |
bundling |
|
|
1568 |
bundling |
|
|
1569 |
bundling |
|
|
1570 |
bundling |
|
|
1571 |
bundling |
|
|
1572 |
bundling manifests |
|
|
1573 |
bundling manifests |
|
|
1574 |
bundling manifests |
|
|
1575 |
bundling manifests |
|
|
1576 |
bundling manifests |
|
|
1577 |
bundling manifests |
|
|
1578 |
bundling manifests |
|
|
1579 |
bundling manifests |
|
|
1580 |
bundling manifests |
|
|
1581 |
bundling |
|
|
1582 |
bundling |
|
|
1583 |
bundling |
|
|
1584 |
bundling |
|
|
1585 |
bundling |
|
|
1586 |
bundling |
|
|
1587 |
bundling |
|
|
1588 |
bundling |
|
|
1589 |
bundling |
|
|
1590 |
bundling |
|
|
1591 |
bundling |
|
|
1592 |
bundling |
|
|
1567 | bundling: 0 changesets | |
|
1568 | bundling: 0 changesets | |
|
1569 | bundling: 0 changesets | |
|
1570 | bundling: 1 changesets | |
|
1571 | bundling: 1 changesets | |
|
1572 | bundling: 1 changesets | |
|
1573 | bundling: 2 changesets | |
|
1574 | bundling: 2 changesets | |
|
1575 | bundling: 2 changesets | |
|
1576 | bundling: 3 changesets | |
|
1577 | bundling: 0/3 manifests (0.00%) | |
|
1578 | bundling: 0/3 manifests (0.00%) | |
|
1579 | bundling: 0/3 manifests (0.00%) | |
|
1580 | bundling: 1/3 manifests (33.33%) | |
|
1581 | bundling: 1/3 manifests (33.33%) | |
|
1582 | bundling: 1/3 manifests (33.33%) | |
|
1583 | bundling: 2/3 manifests (66.67%) | |
|
1584 | bundling: 2/3 manifests (66.67%) | |
|
1585 | bundling: 2/3 manifests (66.67%) | |
|
1586 | bundling: 3/3 manifests (100.00%) | |
|
1587 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1588 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1589 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1590 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1591 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1592 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1593 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1594 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1595 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1596 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1597 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1598 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1593 | 1599 | changesets: 1 chunks |
|
1594 | 1600 | add changeset ef1ea85a6374 |
|
1595 | 1601 | changesets: 2 chunks |
@@ -1621,6 +1627,7 b' OS-level groups' | |||
|
1621 | 1627 | acl: branch access granted: "911600dab2ae" on branch "default" |
|
1622 | 1628 | acl: allowing changeset 911600dab2ae |
|
1623 | 1629 | updating the branch cache |
|
1630 | checking for updated bookmarks | |
|
1624 | 1631 | rolling back to revision 0 (undo push) |
|
1625 | 1632 | 0:6675d58eff77 |
|
1626 | 1633 | |
@@ -1651,38 +1658,38 b' OS-level groups' | |||
|
1651 | 1658 | f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd |
|
1652 | 1659 | 911600dab2ae7a9baff75958b84fe606851ce955 |
|
1653 | 1660 | adding changesets |
|
1654 |
bundling |
|
|
1655 |
bundling |
|
|
1656 |
bundling |
|
|
1657 |
bundling |
|
|
1658 |
bundling |
|
|
1659 |
bundling |
|
|
1660 |
bundling |
|
|
1661 |
bundling |
|
|
1662 |
bundling |
|
|
1663 |
bundling |
|
|
1664 |
bundling |
|
|
1665 |
bundling manifests |
|
|
1666 |
bundling manifests |
|
|
1667 |
bundling manifests |
|
|
1668 |
bundling manifests |
|
|
1669 |
bundling manifests |
|
|
1670 |
bundling manifests |
|
|
1671 |
bundling manifests |
|
|
1672 |
bundling manifests |
|
|
1673 |
bundling manifests |
|
|
1674 |
bundling |
|
|
1675 |
bundling |
|
|
1676 |
bundling |
|
|
1677 |
bundling |
|
|
1678 |
bundling |
|
|
1679 |
bundling |
|
|
1680 |
bundling |
|
|
1681 |
bundling |
|
|
1682 |
bundling |
|
|
1683 |
bundling |
|
|
1684 |
bundling |
|
|
1685 |
bundling |
|
|
1661 | bundling: 0 changesets | |
|
1662 | bundling: 0 changesets | |
|
1663 | bundling: 0 changesets | |
|
1664 | bundling: 1 changesets | |
|
1665 | bundling: 1 changesets | |
|
1666 | bundling: 1 changesets | |
|
1667 | bundling: 2 changesets | |
|
1668 | bundling: 2 changesets | |
|
1669 | bundling: 2 changesets | |
|
1670 | bundling: 3 changesets | |
|
1671 | bundling: 0/3 manifests (0.00%) | |
|
1672 | bundling: 0/3 manifests (0.00%) | |
|
1673 | bundling: 0/3 manifests (0.00%) | |
|
1674 | bundling: 1/3 manifests (33.33%) | |
|
1675 | bundling: 1/3 manifests (33.33%) | |
|
1676 | bundling: 1/3 manifests (33.33%) | |
|
1677 | bundling: 2/3 manifests (66.67%) | |
|
1678 | bundling: 2/3 manifests (66.67%) | |
|
1679 | bundling: 2/3 manifests (66.67%) | |
|
1680 | bundling: 3/3 manifests (100.00%) | |
|
1681 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1682 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1683 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1684 | bundling: foo/Bar/file.txt 0/3 files (0.00%) | |
|
1685 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1686 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1687 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1688 | bundling: foo/file.txt 1/3 files (33.33%) | |
|
1689 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1690 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1691 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1692 | bundling: quux/file.py 2/3 files (66.67%) | |
|
1686 | 1693 | changesets: 1 chunks |
|
1687 | 1694 | add changeset ef1ea85a6374 |
|
1688 | 1695 | changesets: 2 chunks |
@@ -30,7 +30,6 b'' | |||
|
30 | 30 | |
|
31 | 31 | check http return codes |
|
32 | 32 | |
|
33 | ||
|
34 | 33 | $ test_archtype gz tar.gz tar.bz2 zip |
|
35 | 34 | % gz allowed should give 200 |
|
36 | 35 | 200 Script output follows |
@@ -150,9 +149,8 b" The '-t' should override autodetection" | |||
|
150 | 149 | > print h1 == h2 or "md5 differ: " + repr((h1, h2)) |
|
151 | 150 | > EOF |
|
152 | 151 | |
|
153 | archive name is stored in the archive, so create similar | |
|
154 | ||
|
155 | archives and rename them afterwards. | |
|
152 | archive name is stored in the archive, so create similar archives and | |
|
153 | rename them afterwards. | |
|
156 | 154 | |
|
157 | 155 | $ hg archive -t tgz tip.tar.gz |
|
158 | 156 | $ mv tip.tar.gz tip1.tar.gz |
@@ -208,6 +206,38 b' test .hg_archival.txt' | |||
|
208 | 206 | abort: unknown archive type 'bogus' |
|
209 | 207 | [255] |
|
210 | 208 | |
|
209 | enable progress extension: | |
|
210 | ||
|
211 | $ cp $HGRCPATH $HGRCPATH.no-progress | |
|
212 | $ cat >> $HGRCPATH <<EOF | |
|
213 | > [extensions] | |
|
214 | > progress = | |
|
215 | > [progress] | |
|
216 | > assume-tty = 1 | |
|
217 | > format = topic bar number | |
|
218 | > delay = 0 | |
|
219 | > refresh = 0 | |
|
220 | > width = 60 | |
|
221 | > EOF | |
|
222 | ||
|
223 | $ hg archive ../with-progress 2>&1 | $TESTDIR/filtercr.py | |
|
224 | ||
|
225 | archiving [ ] 0/4 | |
|
226 | archiving [ ] 0/4 | |
|
227 | archiving [=========> ] 1/4 | |
|
228 | archiving [=========> ] 1/4 | |
|
229 | archiving [====================> ] 2/4 | |
|
230 | archiving [====================> ] 2/4 | |
|
231 | archiving [===============================> ] 3/4 | |
|
232 | archiving [===============================> ] 3/4 | |
|
233 | archiving [==========================================>] 4/4 | |
|
234 | archiving [==========================================>] 4/4 | |
|
235 | \r (esc) | |
|
236 | ||
|
237 | cleanup after progress extension test: | |
|
238 | ||
|
239 | $ cp $HGRCPATH.no-progress $HGRCPATH | |
|
240 | ||
|
211 | 241 | server errors |
|
212 | 242 | |
|
213 | 243 | $ cat errors.log |
@@ -219,6 +249,7 b' empty repo' | |||
|
219 | 249 | $ hg archive ../test-empty |
|
220 | 250 | abort: no working directory: please specify a revision |
|
221 | 251 | [255] |
|
252 | ||
|
222 | 253 | old file -- date clamped to 1980 |
|
223 | 254 | |
|
224 | 255 | $ touch -t 197501010000 old |
@@ -2,7 +2,6 b'' | |||
|
2 | 2 | $ echo "bookmarks=" >> $HGRCPATH |
|
3 | 3 | |
|
4 | 4 | $ echo "[bookmarks]" >> $HGRCPATH |
|
5 | $ echo "track.current = True" >> $HGRCPATH | |
|
6 | 5 | |
|
7 | 6 | $ hg init |
|
8 | 7 |
@@ -1,9 +1,6 b'' | |||
|
1 | 1 | $ echo "[extensions]" >> $HGRCPATH |
|
2 | 2 | $ echo "bookmarks=" >> $HGRCPATH |
|
3 | 3 | |
|
4 | $ echo "[bookmarks]" >> $HGRCPATH | |
|
5 | $ echo "track.current = True" >> $HGRCPATH | |
|
6 | ||
|
7 | 4 | initialize |
|
8 | 5 | |
|
9 | 6 | $ hg init a |
@@ -48,8 +45,8 b' import bookmark by name' | |||
|
48 | 45 | no changes found |
|
49 | 46 | importing bookmark X |
|
50 | 47 | $ hg bookmark |
|
48 | X 0:4e3505fd9583 | |
|
51 | 49 | Y 0:4e3505fd9583 |
|
52 | X 0:4e3505fd9583 | |
|
53 | 50 | |
|
54 | 51 | export bookmark by name |
|
55 | 52 | |
@@ -62,23 +59,30 b' export bookmark by name' | |||
|
62 | 59 | no changes found |
|
63 | 60 | exporting bookmark W |
|
64 | 61 | $ hg -R ../a bookmarks |
|
65 |
|
|
|
62 | W -1:000000000000 | |
|
66 | 63 | X 0:4e3505fd9583 |
|
64 | Y 0:4e3505fd9583 | |
|
67 | 65 | * Z 0:4e3505fd9583 |
|
68 | W -1:000000000000 | |
|
69 | 66 | |
|
70 | 67 | delete a remote bookmark |
|
71 | 68 | |
|
72 | 69 | $ hg book -d W |
|
73 | 70 | $ hg push -B W ../a |
|
71 | pushing to ../a | |
|
72 | searching for changes | |
|
73 | no changes found | |
|
74 | 74 | deleting remote bookmark W |
|
75 | 75 | |
|
76 | 76 | push/pull name that doesn't exist |
|
77 | 77 | |
|
78 | 78 | $ hg push -B badname ../a |
|
79 | pushing to ../a | |
|
80 | searching for changes | |
|
81 | no changes found | |
|
79 | 82 | bookmark badname does not exist on the local or remote repository! |
|
80 | 83 | [2] |
|
81 | 84 | $ hg pull -B anotherbadname ../a |
|
85 | pulling from ../a | |
|
82 | 86 | abort: remote bookmark anotherbadname not found! |
|
83 | 87 | [255] |
|
84 | 88 | |
@@ -90,8 +94,8 b' divergent bookmarks' | |||
|
90 | 94 | adding f1 |
|
91 | 95 | $ hg book -f X |
|
92 | 96 | $ hg book |
|
97 | * X 1:0d2164f0ce0d | |
|
93 | 98 | Y 0:4e3505fd9583 |
|
94 | * X 1:0d2164f0ce0d | |
|
95 | 99 | Z 1:0d2164f0ce0d |
|
96 | 100 | |
|
97 | 101 | $ cd ../b |
@@ -102,8 +106,8 b' divergent bookmarks' | |||
|
102 | 106 | adding f2 |
|
103 | 107 | $ hg book -f X |
|
104 | 108 | $ hg book |
|
109 | * X 1:9b140be10808 | |
|
105 | 110 | Y 0:4e3505fd9583 |
|
106 | * X 1:9b140be10808 | |
|
107 | 111 | foo -1:000000000000 |
|
108 | 112 | foobar -1:000000000000 |
|
109 | 113 | |
@@ -117,8 +121,8 b' divergent bookmarks' | |||
|
117 | 121 | not updating divergent bookmark X |
|
118 | 122 | (run 'hg heads' to see heads, 'hg merge' to merge) |
|
119 | 123 | $ hg book |
|
124 | * X 1:9b140be10808 | |
|
120 | 125 | Y 0:4e3505fd9583 |
|
121 | * X 1:9b140be10808 | |
|
122 | 126 | foo -1:000000000000 |
|
123 | 127 | foobar -1:000000000000 |
|
124 | 128 | $ hg push -f ../a |
@@ -129,8 +133,8 b' divergent bookmarks' | |||
|
129 | 133 | adding file changes |
|
130 | 134 | added 1 changesets with 1 changes to 1 files (+1 heads) |
|
131 | 135 | $ hg -R ../a book |
|
136 | * X 1:0d2164f0ce0d | |
|
132 | 137 | Y 0:4e3505fd9583 |
|
133 | * X 1:0d2164f0ce0d | |
|
134 | 138 | Z 1:0d2164f0ce0d |
|
135 | 139 | |
|
136 | 140 | hgweb |
@@ -31,8 +31,8 b' initialize repository' | |||
|
31 | 31 | bookmark list |
|
32 | 32 | |
|
33 | 33 | $ hg bookmark |
|
34 | one 1:925d80f479bb | |
|
34 | 35 | * two 3:2ae46b1d99a7 |
|
35 | one 1:925d80f479bb | |
|
36 | 36 | |
|
37 | 37 | rebase |
|
38 | 38 | |
@@ -41,9 +41,8 b' rebase' | |||
|
41 | 41 | |
|
42 | 42 | $ hg log |
|
43 | 43 | changeset: 3:9163974d1cb5 |
|
44 | tag: one | |
|
44 | bookmark: two | |
|
45 | 45 | tag: tip |
|
46 | tag: two | |
|
47 | 46 | parent: 1:925d80f479bb |
|
48 | 47 | parent: 2:db815d6d32e6 |
|
49 | 48 | user: test |
@@ -57,6 +56,7 b' rebase' | |||
|
57 | 56 | summary: 2 |
|
58 | 57 | |
|
59 | 58 | changeset: 1:925d80f479bb |
|
59 | bookmark: one | |
|
60 | 60 | user: test |
|
61 | 61 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
62 | 62 | summary: 1 |
@@ -61,7 +61,7 b' strip to revision 1' | |||
|
61 | 61 | list bookmarks |
|
62 | 62 | |
|
63 | 63 | $ hg book |
|
64 |
|
|
|
64 | test 1:8cf31af87a2b | |
|
65 | 65 | * test2 1:8cf31af87a2b |
|
66 | 66 | |
|
67 | 67 | immediate rollback and reentrancy issue |
@@ -93,6 +93,8 b' can you be added again?' | |||
|
93 | 93 | rollback dry run with rollback information |
|
94 | 94 | |
|
95 | 95 | $ hg rollback -n |
|
96 | no rollback information available | |
|
97 | [1] | |
|
96 | 98 | $ hg bookmarks |
|
97 | 99 | * markb 0:07f494440405 |
|
98 | 100 |
@@ -36,7 +36,7 b' look up bookmark' | |||
|
36 | 36 | |
|
37 | 37 | $ hg log -r X |
|
38 | 38 | changeset: 0:f7b1eb17ad24 |
|
39 | tag: X | |
|
39 | bookmark: X | |
|
40 | 40 | tag: tip |
|
41 | 41 | user: test |
|
42 | 42 | date: Thu Jan 01 00:00:00 1970 +0000 |
@@ -54,8 +54,8 b' bookmark rev -1 again' | |||
|
54 | 54 | list bookmarks |
|
55 | 55 | |
|
56 | 56 | $ hg bookmarks |
|
57 |
|
|
|
58 |
|
|
|
57 | X 0:f7b1eb17ad24 | |
|
58 | X2 0:f7b1eb17ad24 | |
|
59 | 59 | Y -1:000000000000 |
|
60 | 60 | |
|
61 | 61 | $ echo b > b |
@@ -65,23 +65,21 b' list bookmarks' | |||
|
65 | 65 | bookmarks revset |
|
66 | 66 | |
|
67 | 67 | $ hg log -r 'bookmark()' |
|
68 |
changeset: |
|
|
69 | tag: X | |
|
70 | tag: X2 | |
|
71 | tag: tip | |
|
68 | changeset: 0:f7b1eb17ad24 | |
|
69 | bookmark: X | |
|
70 | bookmark: X2 | |
|
72 | 71 | user: test |
|
73 | 72 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
74 |
summary: |
|
|
73 | summary: 0 | |
|
75 | 74 | |
|
76 | 75 | $ hg log -r 'bookmark(Y)' |
|
77 | 76 | $ hg log -r 'bookmark(X2)' |
|
78 |
changeset: |
|
|
79 | tag: X | |
|
80 | tag: X2 | |
|
81 | tag: tip | |
|
77 | changeset: 0:f7b1eb17ad24 | |
|
78 | bookmark: X | |
|
79 | bookmark: X2 | |
|
82 | 80 | user: test |
|
83 | 81 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
84 |
summary: |
|
|
82 | summary: 0 | |
|
85 | 83 | |
|
86 | 84 | $ hg help revsets | grep 'bookmark(' |
|
87 | 85 | "bookmark([name])" |
@@ -89,25 +87,28 b' bookmarks revset' | |||
|
89 | 87 | bookmarks X and X2 moved to rev 1, Y at rev -1 |
|
90 | 88 | |
|
91 | 89 | $ hg bookmarks |
|
92 |
|
|
|
93 |
|
|
|
90 | X 0:f7b1eb17ad24 | |
|
91 | X2 0:f7b1eb17ad24 | |
|
94 | 92 | Y -1:000000000000 |
|
95 | 93 | |
|
96 | 94 | bookmark rev 0 again |
|
97 | 95 | |
|
98 | 96 | $ hg bookmark -r 0 Z |
|
99 | 97 | |
|
98 | $ hg update X | |
|
99 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved | |
|
100 | 100 | $ echo c > c |
|
101 | 101 | $ hg add c |
|
102 | 102 | $ hg commit -m 2 |
|
103 | created new head | |
|
103 | 104 | |
|
104 |
bookmarks X |
|
|
105 | bookmarks X moved to rev 2, Y at rev -1, Z at rev 0 | |
|
105 | 106 | |
|
106 | 107 | $ hg bookmarks |
|
107 |
* X |
|
|
108 |
|
|
|
108 | * X 2:db815d6d32e6 | |
|
109 | X2 0:f7b1eb17ad24 | |
|
110 | Y -1:000000000000 | |
|
109 | 111 | Z 0:f7b1eb17ad24 |
|
110 | Y -1:000000000000 | |
|
111 | 112 | |
|
112 | 113 | rename nonexistent bookmark |
|
113 | 114 | |
@@ -128,8 +129,8 b' force rename to existent bookmark' | |||
|
128 | 129 | list bookmarks |
|
129 | 130 | |
|
130 | 131 | $ hg bookmark |
|
131 |
|
|
|
132 |
* Y 2: |
|
|
132 | X2 0:f7b1eb17ad24 | |
|
133 | * Y 2:db815d6d32e6 | |
|
133 | 134 | Z 0:f7b1eb17ad24 |
|
134 | 135 | |
|
135 | 136 | rename without new name |
@@ -157,19 +158,19 b' bookmark name with spaces should be stri' | |||
|
157 | 158 | list bookmarks |
|
158 | 159 | |
|
159 | 160 | $ hg bookmarks |
|
160 |
|
|
|
161 |
|
|
|
161 | X2 0:f7b1eb17ad24 | |
|
162 | Y 2:db815d6d32e6 | |
|
162 | 163 | Z 0:f7b1eb17ad24 |
|
163 |
* x y 2: |
|
|
164 | * x y 2:db815d6d32e6 | |
|
164 | 165 | |
|
165 | 166 | look up stripped bookmark name |
|
166 | 167 | |
|
167 | 168 | $ hg log -r '"x y"' |
|
168 |
changeset: 2: |
|
|
169 | tag: X2 | |
|
170 | tag: Y | |
|
169 | changeset: 2:db815d6d32e6 | |
|
170 | bookmark: Y | |
|
171 | bookmark: x y | |
|
171 | 172 | tag: tip |
|
172 | tag: x y | |
|
173 | parent: 0:f7b1eb17ad24 | |
|
173 | 174 | user: test |
|
174 | 175 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
175 | 176 | summary: 2 |
@@ -195,10 +196,10 b' force bookmark with existing name' | |||
|
195 | 196 | list bookmarks |
|
196 | 197 | |
|
197 | 198 | $ hg bookmark |
|
198 |
|
|
|
199 |
|
|
|
200 |
* Z 2: |
|
|
201 |
|
|
|
199 | X2 0:f7b1eb17ad24 | |
|
200 | Y 2:db815d6d32e6 | |
|
201 | * Z 2:db815d6d32e6 | |
|
202 | x y 2:db815d6d32e6 | |
|
202 | 203 | |
|
203 | 204 | revision but no bookmark name |
|
204 | 205 | |
@@ -211,3 +212,10 b' bookmark name with whitespace only' | |||
|
211 | 212 | $ hg bookmark ' ' |
|
212 | 213 | abort: bookmark names cannot consist entirely of whitespace |
|
213 | 214 | [255] |
|
215 | ||
|
216 | invalid bookmark | |
|
217 | ||
|
218 | $ hg bookmark 'foo:bar' | |
|
219 | abort: bookmark 'foo:bar' contains illegal character | |
|
220 | [255] | |
|
221 |
@@ -1,5 +1,3 b'' | |||
|
1 | $ cp "$TESTDIR"/printenv.py . | |
|
2 | ||
|
3 | 1 | Setting up test |
|
4 | 2 | |
|
5 | 3 | $ hg init test |
@@ -188,11 +186,18 b' Log -R full.hg in fresh empty' | |||
|
188 | 186 | date: Thu Jan 01 00:00:00 1970 +0000 |
|
189 | 187 | summary: 0.0 |
|
190 | 188 | |
|
189 | Make sure bundlerepo doesn't leak tempfiles (issue2491) | |
|
190 | ||
|
191 | $ ls .hg | |
|
192 | 00changelog.i | |
|
193 | cache | |
|
194 | requires | |
|
195 | store | |
|
191 | 196 | |
|
192 | 197 | Pull ../full.hg into empty (with hook) |
|
193 | 198 | |
|
194 | 199 | $ echo '[hooks]' >> .hg/hgrc |
|
195 |
$ |
|
|
200 | $ echo 'changegroup = python "$TESTDIR"/printenv.py changegroup' >> .hg/hgrc | |
|
196 | 201 | |
|
197 | 202 |
|
|
198 | 203 | |
@@ -543,26 +548,26 b' bundle single branch' | |||
|
543 | 548 | list of changesets: |
|
544 | 549 | d2ae7f538514cd87c17547b0de4cea71fe1af9fb |
|
545 | 550 | 5ece8e77363e2b5269e27c66828b72da29e4341a |
|
546 |
bundling changes |
|
|
547 |
bundling changes |
|
|
548 |
bundling changes |
|
|
549 |
bundling changes |
|
|
550 |
bundling changes |
|
|
551 |
bundling changes |
|
|
552 |
bundling changes |
|
|
553 |
bundling manifests |
|
|
554 |
bundling manifests |
|
|
555 |
bundling manifests |
|
|
556 |
bundling manifests |
|
|
557 |
bundling manifests |
|
|
558 |
bundling manifests |
|
|
559 |
bundling manifests |
|
|
560 |
bundling files |
|
|
561 | bundling files: b 1 chunks | |
|
562 | bundling files: b 2 chunks | |
|
563 | bundling files: b 3 chunks | |
|
564 | bundling files: b1 4 chunks | |
|
565 | bundling files: b1 5 chunks | |
|
566 | bundling files: b1 6 chunks | |
|
567 | bundling files: b1 7 chunks | |
|
551 | bundling: 0 changesets | |
|
552 | bundling: 0 changesets | |
|
553 | bundling: 0 changesets | |
|
554 | bundling: 1 changesets | |
|
555 | bundling: 1 changesets | |
|
556 | bundling: 1 changesets | |
|
557 | bundling: 2 changesets | |
|
558 | bundling: 0/2 manifests (0.00%) | |
|
559 | bundling: 0/2 manifests (0.00%) | |
|
560 | bundling: 0/2 manifests (0.00%) | |
|
561 | bundling: 1/2 manifests (50.00%) | |
|
562 | bundling: 1/2 manifests (50.00%) | |
|
563 | bundling: 1/2 manifests (50.00%) | |
|
564 | bundling: 2/2 manifests (100.00%) | |
|
565 | bundling: b 0/2 files (0.00%) | |
|
566 | bundling: b 0/2 files (0.00%) | |
|
567 | bundling: b 0/2 files (0.00%) | |
|
568 | bundling: b 0/2 files (0.00%) | |
|
569 | bundling: b1 1/2 files (50.00%) | |
|
570 | bundling: b1 1/2 files (50.00%) | |
|
571 | bundling: b1 1/2 files (50.00%) | |
|
572 | bundling: b1 1/2 files (50.00%) | |
|
568 | 573 |
@@ -34,7 +34,7 b'' | |||
|
34 | 34 | gratuitous whitespace in () or [] |
|
35 | 35 | ./wrong.py:2: |
|
36 | 36 | > del(arg2) |
|
37 |
|
|
|
37 | Python keyword is not a function | |
|
38 | 38 | ./wrong.py:3: |
|
39 | 39 | > return ( 5+6, 9) |
|
40 | 40 | missing whitespace in expression |
@@ -52,3 +52,44 b'' | |||
|
52 | 52 | > y = format(x) |
|
53 | 53 | any/all/format not available in Python 2.4 |
|
54 | 54 | [1] |
|
55 | ||
|
56 | $ cat > is-op.py <<EOF | |
|
57 | > # is-operator comparing number or string literal | |
|
58 | > x = None | |
|
59 | > y = x is 'foo' | |
|
60 | > y = x is "foo" | |
|
61 | > y = x is 5346 | |
|
62 | > y = x is -6 | |
|
63 | > y = x is not 'foo' | |
|
64 | > y = x is not "foo" | |
|
65 | > y = x is not 5346 | |
|
66 | > y = x is not -6 | |
|
67 | > EOF | |
|
68 | ||
|
69 | $ "$check_code" ./is-op.py | |
|
70 | ./is-op.py:3: | |
|
71 | > y = x is 'foo' | |
|
72 | object comparison with literal | |
|
73 | ./is-op.py:4: | |
|
74 | > y = x is "foo" | |
|
75 | object comparison with literal | |
|
76 | ./is-op.py:5: | |
|
77 | > y = x is 5346 | |
|
78 | object comparison with literal | |
|
79 | ./is-op.py:6: | |
|
80 | > y = x is -6 | |
|
81 | object comparison with literal | |
|
82 | ./is-op.py:7: | |
|
83 | > y = x is not 'foo' | |
|
84 | object comparison with literal | |
|
85 | ./is-op.py:8: | |
|
86 | > y = x is not "foo" | |
|
87 | object comparison with literal | |
|
88 | ./is-op.py:9: | |
|
89 | > y = x is not 5346 | |
|
90 | object comparison with literal | |
|
91 | ./is-op.py:10: | |
|
92 | > y = x is not -6 | |
|
93 | object comparison with literal | |
|
94 | [1] | |
|
95 |
@@ -19,37 +19,10 b' initialize repository' | |||
|
19 | 19 | > wsgicgi.launch(application) |
|
20 | 20 | > HGWEB |
|
21 | 21 | $ chmod 755 hgweb.cgi |
|
22 | $ DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT | |
|
23 | $ GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE | |
|
24 | $ HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT | |
|
25 | $ HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET | |
|
26 | $ HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING | |
|
27 | $ HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE | |
|
28 | $ HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL | |
|
29 | $ HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION | |
|
30 | $ HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST | |
|
31 | $ HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE | |
|
32 | $ HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT | |
|
33 | $ PATH_INFO="/"; export PATH_INFO | |
|
34 | $ PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED | |
|
35 | $ REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR | |
|
36 | $ REMOTE_PORT="44703"; export REMOTE_PORT | |
|
37 | $ REQUEST_METHOD="GET"; export REQUEST_METHOD | |
|
38 | $ REQUEST_URI="/test/"; export REQUEST_URI | |
|
39 | $ SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME | |
|
40 | $ SCRIPT_NAME="/test"; export SCRIPT_NAME | |
|
41 | $ SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI | |
|
42 | $ SCRIPT_URL="/test/"; export SCRIPT_URL | |
|
43 | $ SERVER_ADDR="127.0.0.1"; export SERVER_ADDR | |
|
44 | $ SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN | |
|
45 | $ SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME | |
|
46 | $ SERVER_PORT="80"; export SERVER_PORT | |
|
47 | $ SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL | |
|
48 | $ SERVER_SIGNATURE="<address>Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80</address>"; export SERVER_SIGNATURE | |
|
49 | $ SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE | |
|
50 | 22 | |
|
51 | 23 |
|
|
52 | 24 | |
|
25 | $ . "$TESTDIR/cgienv" | |
|
53 | 26 |
$ |
|
54 | 27 |
$ python |
|
55 | 28 |
$ python |
@@ -39,7 +39,6 b' Source of wrong type' | |||
|
39 | 39 | > rm a |
|
40 | 40 | > else |
|
41 | 41 | > echo "abort: repository a not found!" |
|
42 | > echo 255 | |
|
43 | 42 | > fi |
|
44 | 43 | abort: repository a not found! |
|
45 | 44 |
@@ -449,7 +449,7 b' Error if style missing key:' | |||
|
449 | 449 | |
|
450 | 450 | $ echo 'q = q' > t |
|
451 | 451 | $ hg log --style ./t |
|
452 | abort: ./t: no key named 'changeset' | |
|
452 | abort: "changeset" not in template map | |
|
453 | 453 | [255] |
|
454 | 454 | |
|
455 | 455 | Error if include fails: |
@@ -570,7 +570,7 b" Issue2130: xml output for 'hg heads' is " | |||
|
570 | 570 | |
|
571 | 571 | Keys work: |
|
572 | 572 | |
|
573 | $ for key in author branches date desc file_adds file_dels file_mods \ | |
|
573 | $ for key in author branch branches date desc file_adds file_dels file_mods \ | |
|
574 | 574 | > file_copies file_copies_switch files \ |
|
575 | 575 | > manifest node parents rev tags diffstat extras; do |
|
576 | 576 | > for mode in '' --verbose --debug; do |
@@ -604,6 +604,33 b' Keys work:' | |||
|
604 | 604 | author--debug: other@place |
|
605 | 605 | author--debug: A. N. Other <other@place> |
|
606 | 606 | author--debug: User Name <user@hostname> |
|
607 | branch: default | |
|
608 | branch: default | |
|
609 | branch: default | |
|
610 | branch: default | |
|
611 | branch: foo | |
|
612 | branch: default | |
|
613 | branch: default | |
|
614 | branch: default | |
|
615 | branch: default | |
|
616 | branch--verbose: default | |
|
617 | branch--verbose: default | |
|
618 | branch--verbose: default | |
|
619 | branch--verbose: default | |
|
620 | branch--verbose: foo | |
|
621 | branch--verbose: default | |
|
622 | branch--verbose: default | |
|
623 | branch--verbose: default | |
|
624 | branch--verbose: default | |
|
625 | branch--debug: default | |
|
626 | branch--debug: default | |
|
627 | branch--debug: default | |
|
628 | branch--debug: default | |
|
629 | branch--debug: foo | |
|
630 | branch--debug: default | |
|
631 | branch--debug: default | |
|
632 | branch--debug: default | |
|
633 | branch--debug: default | |
|
607 | 634 | branches: |
|
608 | 635 | branches: |
|
609 | 636 | branches: |
@@ -58,8 +58,8 b' Should show a removed and b added:' | |||
|
58 | 58 | |
|
59 | 59 | Revert should fail: |
|
60 | 60 | |
|
61 |
$ hg revert |
|
|
62 |
abort: uncommitted merge - |
|
|
61 | $ hg revert | |
|
62 | abort: uncommitted merge - use "hg update", see "hg help revert" | |
|
63 | 63 | [255] |
|
64 | 64 | |
|
65 | 65 | Revert should be ok now: |
@@ -167,83 +167,73 b" Test convert progress bar'" | |||
|
167 | 167 | > [progress] |
|
168 | 168 | > assume-tty = 1 |
|
169 | 169 | > delay = 0 |
|
170 | > format = topic bar number | |
|
170 | 171 | > refresh = 0 |
|
171 | > EOF | |
|
172 | $ cat > filtercr.py <<EOF | |
|
173 | > import sys, re | |
|
174 | > for line in sys.stdin: | |
|
175 | > line = re.sub(r'\r+[^\n]', lambda m: '\n' + m.group()[-1:], line) | |
|
176 | > sys.stdout.write(line) | |
|
172 | > width = 60 | |
|
177 | 173 | > EOF |
|
178 | 174 | |
|
179 |
$ hg convert svn-repo hg-progress 2>&1 | |
|
|
175 | $ hg convert svn-repo hg-progress 2>&1 | $TESTDIR/filtercr.py | |
|
180 | 176 | |
|
181 |
scanning [ <=> |
|
|
182 |
scanning [ <=> |
|
|
183 |
scanning [ <=> |
|
|
184 |
scanning [ <=> |
|
|
185 |
scanning [ <=> |
|
|
186 |
scanning [ <=> |
|
|
187 |
scanning [ <=> |
|
|
188 |
|
|
|
189 |
converting [ |
|
|
190 |
getting files [===== |
|
|
191 |
getting files [============ |
|
|
192 |
getting files [================== |
|
|
193 |
getting files [========================= |
|
|
194 |
getting files [=============================== |
|
|
195 |
getting files [====================================== |
|
|
196 |
|
|
|
197 |
converting [==== |
|
|
198 |
scanning paths [ |
|
|
199 | ||
|
200 | getting files [==========================================================>] 1/1 | |
|
201 | ||
|
202 |
|
|
|
203 | scanning paths [ ] 0/2 | |
|
204 |
|
|
|
205 | ||
|
206 |
getting files [=============> |
|
|
207 |
getting files [============================> |
|
|
208 | getting files [===========================================> ] 3/4 | |
|
209 | getting files [==========================================================>] 4/4 | |
|
210 | ||
|
211 | converting [=========================> ] 3/7 | |
|
212 |
|
|
|
213 | ||
|
214 | getting files [==========================================================>] 1/1 | |
|
215 | ||
|
216 | converting [==================================> ] 4/7 | |
|
217 | scanning paths [ ] 0/1 | |
|
218 | ||
|
219 | getting files [==========================================================>] 1/1 | |
|
220 | ||
|
221 | converting [===========================================> ] 5/7 | |
|
222 | scanning paths [ ] 0/3 | |
|
223 |
|
|
|
224 |
|
|
|
225 | ||
|
226 | getting files [======> ] 1/8 | |
|
227 | getting files [=============> ] 2/8 | |
|
228 |
getting files [=====================> |
|
|
229 | getting files [============================> ] 4/8 | |
|
230 |
|
|
|
231 | getting files [===========================================> ] 6/8 | |
|
232 | getting files [==================================================> ] 7/8 | |
|
233 | getting files [==========================================================>] 8/8 | |
|
234 | ||
|
235 | converting [====================================================> ] 6/7 | |
|
236 | scanning paths [ ] 0/1 | |
|
237 | ||
|
238 | getting files [======> ] 1/8 | |
|
239 | getting files [=============> ] 2/8 | |
|
240 | getting files [=====================> ] 3/8 | |
|
241 | getting files [============================> ] 4/8 | |
|
242 | getting files [===================================> ] 5/8 | |
|
243 | getting files [===========================================> ] 6/8 | |
|
244 | getting files [==================================================> ] 7/8 | |
|
245 | getting files [==========================================================>] 8/8 | |
|
246 | ||
|
177 | scanning [ <=> ] 1 | |
|
178 | scanning [ <=> ] 2 | |
|
179 | scanning [ <=> ] 3 | |
|
180 | scanning [ <=> ] 4 | |
|
181 | scanning [ <=> ] 5 | |
|
182 | scanning [ <=> ] 6 | |
|
183 | scanning [ <=> ] 7 | |
|
184 | ||
|
185 | converting [ ] 0/7 | |
|
186 | getting files [=====> ] 1/6 | |
|
187 | getting files [============> ] 2/6 | |
|
188 | getting files [==================> ] 3/6 | |
|
189 | getting files [=========================> ] 4/6 | |
|
190 | getting files [===============================> ] 5/6 | |
|
191 | getting files [======================================>] 6/6 | |
|
192 | ||
|
193 | converting [=====> ] 1/7 | |
|
194 | scanning paths [ ] 0/1 | |
|
195 | getting files [======================================>] 1/1 | |
|
196 | ||
|
197 | converting [===========> ] 2/7 | |
|
198 | scanning paths [ ] 0/2 | |
|
199 | scanning paths [==================> ] 1/2 | |
|
200 | getting files [========> ] 1/4 | |
|
201 | getting files [==================> ] 2/4 | |
|
202 | getting files [============================> ] 3/4 | |
|
203 | getting files [======================================>] 4/4 | |
|
204 | ||
|
205 | converting [=================> ] 3/7 | |
|
206 | scanning paths [ ] 0/1 | |
|
207 | getting files [======================================>] 1/1 | |
|
208 | ||
|
209 | converting [=======================> ] 4/7 | |
|
210 | scanning paths [ ] 0/1 | |
|
211 | getting files [======================================>] 1/1 | |
|
212 | ||
|
213 | converting [=============================> ] 5/7 | |
|
214 | scanning paths [ ] 0/3 | |
|
215 | scanning paths [===========> ] 1/3 | |
|
216 | scanning paths [========================> ] 2/3 | |
|
217 | getting files [===> ] 1/8 | |
|
218 | getting files [========> ] 2/8 | |
|
219 | getting files [=============> ] 3/8 | |
|
220 | getting files [==================> ] 4/8 | |
|
221 | getting files [=======================> ] 5/8 | |
|
222 | getting files [============================> ] 6/8 | |
|
223 | getting files [=================================> ] 7/8 | |
|
224 | getting files [======================================>] 8/8 | |
|
225 | ||
|
226 | converting [===================================> ] 6/7 | |
|
227 | scanning paths [ ] 0/1 | |
|
228 | getting files [===> ] 1/8 | |
|
229 | getting files [========> ] 2/8 | |
|
230 | getting files [=============> ] 3/8 | |
|
231 | getting files [==================> ] 4/8 | |
|
232 | getting files [=======================> ] 5/8 | |
|
233 | getting files [============================> ] 6/8 | |
|
234 | getting files [=================================> ] 7/8 | |
|
235 | getting files [======================================>] 8/8 | |
|
236 | ||
|
247 | 237 | initializing destination hg-progress repository |
|
248 | 238 | scanning source... |
|
249 | 239 | sorting... |
@@ -255,3 +245,4 b" Test convert progress bar'" | |||
|
255 | 245 | 2 adddb |
|
256 | 246 | 1 branch |
|
257 | 247 | 0 clobberdir |
|
248 |
@@ -40,16 +40,16 b'' | |||
|
40 | 40 | have the following effects: |
|
41 | 41 | |
|
42 | 42 | --branchsort convert from parent to child revision when possible, which |
|
43 |
means branches are usually converted one after the other. |
|
|
44 | generates more compact repositories. | |
|
43 | means branches are usually converted one after the other. | |
|
44 | It generates more compact repositories. | |
|
45 | 45 | --datesort sort revisions by date. Converted repositories have good- |
|
46 | 46 | looking changelogs but are often an order of magnitude |
|
47 | 47 | larger than the same ones generated by --branchsort. |
|
48 | 48 | --sourcesort try to preserve source revisions order, only supported by |
|
49 | 49 | Mercurial sources. |
|
50 | 50 | |
|
51 |
If |
|
|
52 |
(<dest>/.hg/shamap by default). The |
|
|
51 | If "REVMAP" isn't given, it will be put in a default location | |
|
52 | ("<dest>/.hg/shamap" by default). The "REVMAP" is a simple text file that | |
|
53 | 53 | maps each source commit ID to the destination ID for that revision, like |
|
54 | 54 | so: |
|
55 | 55 | |
@@ -123,16 +123,19 b'' | |||
|
123 | 123 | Mercurial Source |
|
124 | 124 | '''''''''''''''' |
|
125 | 125 | |
|
126 | --config convert.hg.ignoreerrors=False (boolean) | |
|
127 | ignore integrity errors when reading. Use it to fix Mercurial | |
|
128 | repositories with missing revlogs, by converting from and to | |
|
129 | Mercurial. | |
|
126 | The Mercurial source recognizes the following configuration options, which | |
|
127 | you can set on the command line with "--config": | |
|
130 | 128 | |
|
131 | --config convert.hg.saverev=False (boolean) | |
|
132 | store original revision ID in changeset (forces target IDs to change) | |
|
133 | ||
|
134 | --config convert.hg.startrev=0 (hg revision identifier) | |
|
135 | convert start revision and its descendants | |
|
129 | convert.hg.ignoreerrors | |
|
130 | ignore integrity errors when reading. Use it to fix Mercurial | |
|
131 | repositories with missing revlogs, by converting from and to | |
|
132 | Mercurial. Default is False. | |
|
133 | convert.hg.saverev | |
|
134 | store original. revision ID in changeset (forces target IDs to | |
|
135 | change). It takes and boolean argument and defaults to False. | |
|
136 | convert.hg.startrev | |
|
137 | convert start revision and its descendants. It takes a hg | |
|
138 | revision identifier and defaults to 0. | |
|
136 | 139 | |
|
137 | 140 | CVS Source |
|
138 | 141 | '''''''''' |
@@ -140,46 +143,45 b'' | |||
|
140 | 143 | CVS source will use a sandbox (i.e. a checked-out copy) from CVS to |
|
141 | 144 | indicate the starting point of what will be converted. Direct access to |
|
142 | 145 | the repository files is not needed, unless of course the repository is |
|
143 | :local:. The conversion uses the top level directory in the sandbox to | |
|
146 | ":local:". The conversion uses the top level directory in the sandbox to | |
|
144 | 147 | find the CVS repository, and then uses CVS rlog commands to find files to |
|
145 | 148 | convert. This means that unless a filemap is given, all files under the |
|
146 | 149 | starting directory will be converted, and that any directory |
|
147 | 150 | reorganization in the CVS sandbox is ignored. |
|
148 | 151 | |
|
149 | The options shown are the defaults. | |
|
150 | ||
|
151 | --config convert.cvsps.cache=True (boolean) | |
|
152 | Set to False to disable remote log caching, for testing and debugging | |
|
153 | purposes. | |
|
154 | ||
|
155 | --config convert.cvsps.fuzz=60 (integer) | |
|
156 | Specify the maximum time (in seconds) that is allowed between commits | |
|
157 | with identical user and log message in a single changeset. When very | |
|
158 | large files were checked in as part of a changeset then the default | |
|
159 | may not be long enough. | |
|
152 | The following options can be used with "--config": | |
|
160 | 153 | |
|
161 | --config convert.cvsps.mergeto='{{mergetobranch ([-\w]+)}}' | |
|
162 | Specify a regular expression to which commit log messages are matched. | |
|
163 | If a match occurs, then the conversion process will insert a dummy | |
|
164 | revision merging the branch on which this log message occurs to the | |
|
165 | branch indicated in the regex. | |
|
166 | ||
|
167 | --config convert.cvsps.mergefrom='{{mergefrombranch ([-\w]+)}}' | |
|
168 | Specify a regular expression to which commit log messages are matched. | |
|
169 | If a match occurs, then the conversion process will add the most | |
|
170 | recent revision on the branch indicated in the regex as the second | |
|
171 | parent of the changeset. | |
|
172 | ||
|
173 | --config hook.cvslog | |
|
174 | Specify a Python function to be called at the end of gathering the CVS | |
|
175 | log. The function is passed a list with the log entries, and can | |
|
176 | modify the entries in-place, or add or delete them. | |
|
177 | ||
|
178 | --config hook.cvschangesets | |
|
179 | Specify a Python function to be called after the changesets are | |
|
180 | calculated from the the CVS log. The function is passed a list with | |
|
181 | the changeset entries, and can modify the changesets in-place, or add | |
|
182 | or delete them. | |
|
154 | convert.cvsps.cache | |
|
155 | Set to False to disable remote log caching, for testing and | |
|
156 | debugging purposes. Default is True. | |
|
157 | convert.cvsps.fuzz | |
|
158 | Specify the maximum time (in seconds) that is allowed between | |
|
159 | commits with identical user and log message in a single | |
|
160 | changeset. When very large files were checked in as part of a | |
|
161 | changeset then the default may not be long enough. The default | |
|
162 | is 60. | |
|
163 | convert.cvsps.mergeto | |
|
164 | Specify a regular expression to which commit log messages are | |
|
165 | matched. If a match occurs, then the conversion process will | |
|
166 | insert a dummy revision merging the branch on which this log | |
|
167 | message occurs to the branch indicated in the regex. Default | |
|
168 | is "{{mergetobranch ([-\w]+)}}" | |
|
169 | convert.cvsps.mergefrom | |
|
170 | Specify a regular expression to which commit log messages are | |
|
171 | matched. If a match occurs, then the conversion process will | |
|
172 | add the most recent revision on the branch indicated in the | |
|
173 | regex as the second parent of the changeset. Default is | |
|
174 | "{{mergefrombranch ([-\w]+)}}" | |
|
175 | hook.cvslog | |
|
176 | Specify a Python function to be called at the end of gathering | |
|
177 | the CVS log. The function is passed a list with the log | |
|
178 | entries, and can modify the entries in-place, or add or delete | |
|
179 | them. | |
|
180 | hook.cvschangesets | |
|
181 | Specify a Python function to be called after the changesets | |
|
182 | are calculated from the the CVS log. The function is passed a | |
|
183 | list with the changeset entries, and can modify the changesets | |
|
184 | in-place, or add or delete them. | |
|
183 | 185 | |
|
184 | 186 | An additional "debugcvsps" Mercurial command allows the builtin changeset |
|
185 | 187 | merging code to be run without doing a conversion. Its parameters and |
@@ -199,21 +201,22 b'' | |||
|
199 | 201 | them to paths relative to the source URL, or leave them blank to disable |
|
200 | 202 | auto detection. |
|
201 | 203 | |
|
202 | --config convert.svn.branches=branches (directory name) | |
|
203 | specify the directory containing branches | |
|
204 | The following options can be set with "--config": | |
|
204 | 205 | |
|
205 | --config convert.svn.tags=tags (directory name) | |
|
206 |
specify the directory containing |
|
|
207 | ||
|
208 | --config convert.svn.trunk=trunk (directory name) | |
|
209 | specify the name of the trunk branch | |
|
206 | convert.svn.branches | |
|
207 | specify the directory containing branches. The defaults is | |
|
208 | "branches". | |
|
209 | convert.svn.tags | |
|
210 | specify the directory containing tags. The default is "tags". | |
|
211 | convert.svn.trunk | |
|
212 | specify the name of the trunk branch The defauls is "trunk". | |
|
210 | 213 | |
|
211 | 214 | Source history can be retrieved starting at a specific revision, instead |
|
212 | 215 | of being integrally converted. Only single branch conversions are |
|
213 | 216 | supported. |
|
214 | 217 | |
|
215 | --config convert.svn.startrev=0 (svn revision number) | |
|
216 | specify start Subversion revision. | |
|
218 | convert.svn.startrev | |
|
219 | specify start Subversion revision number. The default is 0. | |
|
217 | 220 | |
|
218 | 221 | Perforce Source |
|
219 | 222 | ''''''''''''''' |
@@ -222,25 +225,27 b'' | |||
|
222 | 225 | specification as source. It will convert all files in the source to a flat |
|
223 | 226 | Mercurial repository, ignoring labels, branches and integrations. Note |
|
224 | 227 | that when a depot path is given you then usually should specify a target |
|
225 | directory, because otherwise the target may be named ...-hg. | |
|
228 | directory, because otherwise the target may be named "...-hg". | |
|
226 | 229 | |
|
227 | 230 | It is possible to limit the amount of source history to be converted by |
|
228 |
specifying an initial Perforce revision |
|
|
231 | specifying an initial Perforce revision: | |
|
229 | 232 | |
|
230 | --config convert.p4.startrev=0 (perforce changelist number) | |
|
231 |
specify initial Perforce revision |
|
|
233 | convert.p4.startrev | |
|
234 | specify initial Perforce revision, a Perforce changelist | |
|
235 | number). | |
|
232 | 236 | |
|
233 | 237 | Mercurial Destination |
|
234 | 238 | ''''''''''''''''''''' |
|
235 | 239 | |
|
236 | --config convert.hg.clonebranches=False (boolean) | |
|
237 | dispatch source branches in separate clones. | |
|
240 | The following options are supported: | |
|
238 | 241 | |
|
239 | --config convert.hg.tagsbranch=default (branch name) | |
|
240 | tag revisions branch name | |
|
241 | ||
|
242 | --config convert.hg.usebranchnames=True (boolean) | |
|
243 | preserve branch names | |
|
242 | convert.hg.clonebranches | |
|
243 | dispatch source branches in separate clones. The default is | |
|
244 | False. | |
|
245 | convert.hg.tagsbranch | |
|
246 | branch name for tag revisions, defaults to "default". | |
|
247 | convert.hg.usebranchnames | |
|
248 | preserve branch names. The default is True | |
|
244 | 249 | |
|
245 | 250 | options: |
|
246 | 251 | |
@@ -376,7 +381,7 b' testing: convert must not produce duplic' | |||
|
376 | 381 | |
|
377 | 382 | contents of fncache file: |
|
378 | 383 | |
|
379 | $ cat b/.hg/store/fncache | |
|
384 | $ cat b/.hg/store/fncache | sort | |
|
380 | 385 | data/a.i |
|
381 | 386 | data/b.i |
|
382 | 387 |
@@ -6,6 +6,7 b' Show all commands except debug commands' | |||
|
6 | 6 | archive |
|
7 | 7 | backout |
|
8 | 8 | bisect |
|
9 | bookmarks | |
|
9 | 10 | branch |
|
10 | 11 | branches |
|
11 | 12 | bundle |
@@ -74,6 +75,7 b' Show debug commands if there are no othe' | |||
|
74 | 75 | debugdata |
|
75 | 76 | debugdate |
|
76 | 77 | debugfsinfo |
|
78 | debugignore | |
|
77 | 79 | debugindex |
|
78 | 80 | debugindexdot |
|
79 | 81 | debuginstall |
@@ -187,8 +189,8 b' Show all commands + options' | |||
|
187 | 189 | init: ssh, remotecmd, insecure |
|
188 | 190 | log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, style, template, include, exclude |
|
189 | 191 | merge: force, tool, rev, preview |
|
190 | pull: update, force, rev, branch, ssh, remotecmd, insecure | |
|
191 | push: force, rev, branch, new-branch, ssh, remotecmd, insecure | |
|
192 | pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure | |
|
193 | push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure | |
|
192 | 194 | remove: after, force, include, exclude |
|
193 | 195 | serve: accesslog, daemon, daemon-pipefds, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, templates, style, ipv6, certificate |
|
194 | 196 | status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos |
@@ -198,6 +200,7 b' Show all commands + options' | |||
|
198 | 200 | archive: no-decode, prefix, rev, type, subrepos, include, exclude |
|
199 | 201 | backout: merge, parent, tool, rev, include, exclude, message, logfile, date, user |
|
200 | 202 | bisect: reset, good, bad, skip, command, noupdate |
|
203 | bookmarks: force, rev, delete, rename | |
|
201 | 204 | branch: force, clean |
|
202 | 205 | branches: active, closed |
|
203 | 206 | bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure |
@@ -212,6 +215,7 b' Show all commands + options' | |||
|
212 | 215 | debugdata: |
|
213 | 216 | debugdate: extended |
|
214 | 217 | debugfsinfo: |
|
218 | debugignore: | |
|
215 | 219 | debugindex: format |
|
216 | 220 | debugindexdot: |
|
217 | 221 | debuginstall: |
@@ -228,10 +232,10 b' Show all commands + options' | |||
|
228 | 232 | help: |
|
229 | 233 | identify: rev, num, id, branch, tags |
|
230 | 234 | import: strip, base, force, no-commit, exact, import-branch, message, logfile, date, user, similarity |
|
231 | incoming: force, newest-first, bundle, rev, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos | |
|
235 | incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos | |
|
232 | 236 | locate: rev, print0, fullpath, include, exclude |
|
233 | 237 | manifest: rev |
|
234 | outgoing: force, rev, newest-first, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos | |
|
238 | outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos | |
|
235 | 239 | parents: rev, style, template |
|
236 | 240 | paths: |
|
237 | 241 | recover: |
@@ -8,6 +8,7 b' def f(obj):' | |||
|
8 | 8 | l = repr(obj) |
|
9 | 9 | l = rsub("0x[0-9a-fA-F]+", "0x?", l) |
|
10 | 10 | l = rsub("from '.*'", "from '?'", l) |
|
11 | l = rsub("'<[a-z]*>'", "'<whatever>'", l) | |
|
11 | 12 | return l |
|
12 | 13 | |
|
13 | 14 | import os |
@@ -11,5 +11,5 b" fred = <unloaded module 're'>" | |||
|
11 | 11 | fred.sub = <function sub at 0x?> |
|
12 | 12 | fred = <proxied module 're'> |
|
13 | 13 | re = <unloaded module 'sys'> |
|
14 |
re.stderr = <open file '< |
|
|
14 | re.stderr = <open file '<whatever>', mode 'w' at 0x?> | |
|
15 | 15 | re = <proxied module 'sys'> |
@@ -46,3 +46,20 b' Binary git diffstat:' | |||
|
46 | 46 | b | Bin |
|
47 | 47 | 1 files changed, 0 insertions(+), 0 deletions(-) |
|
48 | 48 | |
|
49 | $ hg ci -m createb | |
|
50 | ||
|
51 | $ printf '\0' > "file with spaces" | |
|
52 | $ hg add "file with spaces" | |
|
53 | ||
|
54 | Filename with spaces diffstat: | |
|
55 | ||
|
56 | $ hg diff --stat | |
|
57 | file with spaces | 0 | |
|
58 | 1 files changed, 0 insertions(+), 0 deletions(-) | |
|
59 | ||
|
60 | Filename with spaces git diffstat: | |
|
61 | ||
|
62 | $ hg diff --stat --git | |
|
63 | file with spaces | Bin | |
|
64 | 1 files changed, 0 insertions(+), 0 deletions(-) | |
|
65 |
@@ -19,5 +19,11 b' doctest.testmod(mercurial.url)' | |||
|
19 | 19 | import mercurial.util |
|
20 | 20 | doctest.testmod(mercurial.util) |
|
21 | 21 | |
|
22 | import mercurial.encoding | |
|
23 | doctest.testmod(mercurial.encoding) | |
|
24 | ||
|
25 | import mercurial.hgweb.hgwebdir_mod | |
|
26 | doctest.testmod(mercurial.hgweb.hgwebdir_mod) | |
|
27 | ||
|
22 | 28 | import hgext.convert.cvsps |
|
23 | 29 | doctest.testmod(hgext.convert.cvsps) |
@@ -240,6 +240,4 b' hg log (dolphin)' | |||
|
240 | 240 | abort: decoding near '\xe9': 'ascii' codec can't decode byte 0xe9 in position 0: ordinal not in range(128)! (esc) |
|
241 | 241 | [255] |
|
242 | 242 | $ cp latin-1-tag .hg/branch |
|
243 |
$ HGENCODING=latin-1 hg ci -m ' |
|
|
244 | abort: branch name not in UTF-8! | |
|
245 | [255] | |
|
243 | $ HGENCODING=latin-1 hg ci -m 'auto-promote legacy name' |
@@ -315,6 +315,11 b' Broken disabled extension and command:' | |||
|
315 | 315 | |
|
316 | 316 | use "hg help extensions" for information on enabling extensions |
|
317 | 317 | |
|
318 | $ cat > hgext/forest.py <<EOF | |
|
319 | > cmdtable = None | |
|
320 | > EOF | |
|
318 | 321 | $ hg --config extensions.path=./path.py help foo > /dev/null |
|
322 | warning: error finding commands in $TESTTMP/hgext/forest.py | |
|
319 | 323 | hg: unknown command 'foo' |
|
324 | warning: error finding commands in $TESTTMP/hgext/forest.py | |
|
320 | 325 | [255] |
@@ -6,7 +6,7 b' Init repo1:' | |||
|
6 | 6 | $ hg add |
|
7 | 7 | adding a |
|
8 | 8 | $ hg ci -m first |
|
9 | $ cat .hg/store/fncache | |
|
9 | $ cat .hg/store/fncache | sort | |
|
10 | 10 | data/a.i |
|
11 | 11 | |
|
12 | 12 | Testing a.i/b: |
@@ -16,7 +16,7 b' Testing a.i/b:' | |||
|
16 | 16 | $ hg add |
|
17 | 17 | adding a.i/b |
|
18 | 18 | $ hg ci -m second |
|
19 | $ cat .hg/store/fncache | |
|
19 | $ cat .hg/store/fncache | sort | |
|
20 | 20 | data/a.i |
|
21 | 21 | data/a.i.hg/b.i |
|
22 | 22 | |
@@ -27,10 +27,10 b' Testing a.i.hg/c:' | |||
|
27 | 27 | $ hg add |
|
28 | 28 | adding a.i.hg/c |
|
29 | 29 | $ hg ci -m third |
|
30 | $ cat .hg/store/fncache | |
|
30 | $ cat .hg/store/fncache | sort | |
|
31 | 31 | data/a.i |
|
32 | data/a.i.hg.hg/c.i | |
|
32 | 33 | data/a.i.hg/b.i |
|
33 | data/a.i.hg.hg/c.i | |
|
34 | 34 | |
|
35 | 35 | Testing verify: |
|
36 | 36 |
@@ -284,6 +284,7 b' Testing -h/--help:' | |||
|
284 | 284 | archive create an unversioned archive of a repository revision |
|
285 | 285 | backout reverse effect of earlier changeset |
|
286 | 286 | bisect subdivision search of changesets |
|
287 | bookmarks track a line of development with movable markers | |
|
287 | 288 | branch set or show the current branch name |
|
288 | 289 | branches list repository named branches |
|
289 | 290 | bundle create a changegroup file |
@@ -360,6 +361,7 b' Testing -h/--help:' | |||
|
360 | 361 | archive create an unversioned archive of a repository revision |
|
361 | 362 | backout reverse effect of earlier changeset |
|
362 | 363 | bisect subdivision search of changesets |
|
364 | bookmarks track a line of development with movable markers | |
|
363 | 365 | branch set or show the current branch name |
|
364 | 366 | branches list repository named branches |
|
365 | 367 | bundle create a changegroup file |
@@ -10,6 +10,19 b'' | |||
|
10 | 10 | > find $1 -type f | python $TESTTMP/nlinks.py |
|
11 | 11 | > } |
|
12 | 12 | |
|
13 | Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux): | |
|
14 | ||
|
15 | $ cat > linkcp.py <<EOF | |
|
16 | > from mercurial import util | |
|
17 | > import sys | |
|
18 | > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True) | |
|
19 | > EOF | |
|
20 | ||
|
21 | $ linkcp() | |
|
22 | > { | |
|
23 | > python $TESTTMP/linkcp.py $1 $2 | |
|
24 | > } | |
|
25 | ||
|
13 | 26 | Prepare repo r1: |
|
14 | 27 | |
|
15 | 28 | $ mkdir r1 |
@@ -152,3 +165,167 b' Committing a change to f1 in r1 must bre' | |||
|
152 | 165 | 1 r2/.hg/store/data/f1.i |
|
153 | 166 | 1 r2/.hg/store/fncache |
|
154 | 167 | |
|
168 | ||
|
169 | $ cd r3 | |
|
170 | $ hg tip --template '{rev}:{node|short}\n' | |
|
171 | 11:a6451b6bc41f | |
|
172 | $ echo bla > f1 | |
|
173 | $ hg ci -m1 | |
|
174 | $ cd .. | |
|
175 | ||
|
176 | Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'): | |
|
177 | ||
|
178 | $ linkcp r3 r4 | |
|
179 | ||
|
180 | r4 has hardlinks in the working dir (not just inside .hg): | |
|
181 | ||
|
182 | $ nlinksdir r4 | |
|
183 | 2 r4/.hg/00changelog.i | |
|
184 | 2 r4/.hg/branch | |
|
185 | 2 r4/.hg/cache/branchheads | |
|
186 | 2 r4/.hg/cache/tags | |
|
187 | 2 r4/.hg/dirstate | |
|
188 | 2 r4/.hg/hgrc | |
|
189 | 2 r4/.hg/last-message.txt | |
|
190 | 2 r4/.hg/requires | |
|
191 | 2 r4/.hg/store/00changelog.i | |
|
192 | 2 r4/.hg/store/00manifest.i | |
|
193 | 2 r4/.hg/store/data/d1/f2.d | |
|
194 | 2 r4/.hg/store/data/d1/f2.i | |
|
195 | 2 r4/.hg/store/data/f1.i | |
|
196 | 2 r4/.hg/store/fncache | |
|
197 | 2 r4/.hg/store/undo | |
|
198 | 2 r4/.hg/undo.branch | |
|
199 | 2 r4/.hg/undo.desc | |
|
200 | 2 r4/.hg/undo.dirstate | |
|
201 | 2 r4/d1/data1 | |
|
202 | 2 r4/d1/f2 | |
|
203 | 2 r4/f1 | |
|
204 | ||
|
205 | Update back to revision 11 in r4 should break hardlink of file f1: | |
|
206 | ||
|
207 | $ hg -R r4 up 11 | |
|
208 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
|
209 | ||
|
210 | $ nlinksdir r4 | |
|
211 | 2 r4/.hg/00changelog.i | |
|
212 | 1 r4/.hg/branch | |
|
213 | 2 r4/.hg/cache/branchheads | |
|
214 | 2 r4/.hg/cache/tags | |
|
215 | 1 r4/.hg/dirstate | |
|
216 | 2 r4/.hg/hgrc | |
|
217 | 2 r4/.hg/last-message.txt | |
|
218 | 2 r4/.hg/requires | |
|
219 | 2 r4/.hg/store/00changelog.i | |
|
220 | 2 r4/.hg/store/00manifest.i | |
|
221 | 2 r4/.hg/store/data/d1/f2.d | |
|
222 | 2 r4/.hg/store/data/d1/f2.i | |
|
223 | 2 r4/.hg/store/data/f1.i | |
|
224 | 2 r4/.hg/store/fncache | |
|
225 | 2 r4/.hg/store/undo | |
|
226 | 2 r4/.hg/undo.branch | |
|
227 | 2 r4/.hg/undo.desc | |
|
228 | 2 r4/.hg/undo.dirstate | |
|
229 | 2 r4/d1/data1 | |
|
230 | 2 r4/d1/f2 | |
|
231 | 1 r4/f1 | |
|
232 | ||
|
233 | ||
|
234 | Test hardlinking outside hg: | |
|
235 | ||
|
236 | $ mkdir x | |
|
237 | $ echo foo > x/a | |
|
238 | ||
|
239 | $ linkcp x y | |
|
240 | $ echo bar >> y/a | |
|
241 | ||
|
242 | No diff if hardlink: | |
|
243 | ||
|
244 | $ diff x/a y/a | |
|
245 | ||
|
246 | Test mq hardlinking: | |
|
247 | ||
|
248 | $ echo "[extensions]" >> $HGRCPATH | |
|
249 | $ echo "mq=" >> $HGRCPATH | |
|
250 | ||
|
251 | $ hg init a | |
|
252 | $ cd a | |
|
253 | ||
|
254 | $ hg qimport -n foo - << EOF | |
|
255 | > # HG changeset patch | |
|
256 | > # Date 1 0 | |
|
257 | > diff -r 2588a8b53d66 a | |
|
258 | > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 | |
|
259 | > +++ b/a Wed Jul 23 15:54:29 2008 +0200 | |
|
260 | > @@ -0,0 +1,1 @@ | |
|
261 | > +a | |
|
262 | > EOF | |
|
263 | adding foo to series file | |
|
264 | ||
|
265 | $ hg qpush | |
|
266 | applying foo | |
|
267 | now at: foo | |
|
268 | ||
|
269 | $ cd .. | |
|
270 | $ linkcp a b | |
|
271 | $ cd b | |
|
272 | ||
|
273 | $ hg qimport -n bar - << EOF | |
|
274 | > # HG changeset patch | |
|
275 | > # Date 2 0 | |
|
276 | > diff -r 2588a8b53d66 a | |
|
277 | > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 | |
|
278 | > +++ b/b Wed Jul 23 15:54:29 2008 +0200 | |
|
279 | > @@ -0,0 +1,1 @@ | |
|
280 | > +b | |
|
281 | > EOF | |
|
282 | adding bar to series file | |
|
283 | ||
|
284 | $ hg qpush | |
|
285 | applying bar | |
|
286 | now at: bar | |
|
287 | ||
|
288 | $ cat .hg/patches/status | |
|
289 | 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo | |
|
290 | 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar | |
|
291 | ||
|
292 | $ cat .hg/patches/series | |
|
293 | foo | |
|
294 | bar | |
|
295 | ||
|
296 | $ cat ../a/.hg/patches/status | |
|
297 | 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo | |
|
298 | ||
|
299 | $ cat ../a/.hg/patches/series | |
|
300 | foo | |
|
301 | ||
|
302 | Test tags hardlinking: | |
|
303 | ||
|
304 | $ hg qdel -r qbase:qtip | |
|
305 | patch foo finalized without changeset message | |
|
306 | patch bar finalized without changeset message | |
|
307 | ||
|
308 | $ hg tag -l lfoo | |
|
309 | $ hg tag foo | |
|
310 | ||
|
311 | $ cd .. | |
|
312 | $ linkcp b c | |
|
313 | $ cd c | |
|
314 | ||
|
315 | $ hg tag -l -r 0 lbar | |
|
316 | $ hg tag -r 0 bar | |
|
317 | ||
|
318 | $ cat .hgtags | |
|
319 | 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo | |
|
320 | 430ed4828a74fa4047bc816a25500f7472ab4bfe bar | |
|
321 | ||
|
322 | $ cat .hg/localtags | |
|
323 | 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo | |
|
324 | 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar | |
|
325 | ||
|
326 | $ cat ../b/.hgtags | |
|
327 | 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo | |
|
328 | ||
|
329 | $ cat ../b/.hg/localtags | |
|
330 | 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo | |
|
331 |
@@ -55,6 +55,7 b' Short help:' | |||
|
55 | 55 | archive create an unversioned archive of a repository revision |
|
56 | 56 | backout reverse effect of earlier changeset |
|
57 | 57 | bisect subdivision search of changesets |
|
58 | bookmarks track a line of development with movable markers | |
|
58 | 59 | branch set or show the current branch name |
|
59 | 60 | branches list repository named branches |
|
60 | 61 | bundle create a changegroup file |
@@ -127,6 +128,7 b' Short help:' | |||
|
127 | 128 | archive create an unversioned archive of a repository revision |
|
128 | 129 | backout reverse effect of earlier changeset |
|
129 | 130 | bisect subdivision search of changesets |
|
131 | bookmarks track a line of development with movable markers | |
|
130 | 132 | branch set or show the current branch name |
|
131 | 133 | branches list repository named branches |
|
132 | 134 | bundle create a changegroup file |
@@ -649,6 +651,7 b' Test that default list of commands omits' | |||
|
649 | 651 | archive create an unversioned archive of a repository revision |
|
650 | 652 | backout reverse effect of earlier changeset |
|
651 | 653 | bisect subdivision search of changesets |
|
654 | bookmarks track a line of development with movable markers | |
|
652 | 655 | branch set or show the current branch name |
|
653 | 656 | branches list repository named branches |
|
654 | 657 | bundle create a changegroup file |
@@ -120,3 +120,5 b' Check it does not ignore the current dir' | |||
|
120 | 120 | $ hg status . |
|
121 | 121 | A b.o |
|
122 | 122 | |
|
123 | $ hg debugignore | |
|
124 | (?:(?:|.*/)[^/]*(?:/|$)) |
@@ -99,6 +99,7 b' rss-log without basedir' | |||
|
99 | 99 | > rcoll=$root/** |
|
100 | 100 | > star=* |
|
101 | 101 | > starstar=** |
|
102 | > astar=webdir/a/* | |
|
102 | 103 | > EOF |
|
103 | 104 | $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \ |
|
104 | 105 | > -A access-paths.log -E error-paths-2.log |
@@ -130,6 +131,8 b' should succeed, slashy names' | |||
|
130 | 131 | /starstar/webdir/b/ |
|
131 | 132 | /starstar/webdir/b/d/ |
|
132 | 133 | /starstar/webdir/c/ |
|
134 | /astar/ | |
|
135 | /astar/.hg/patches/ | |
|
133 | 136 | |
|
134 | 137 | $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/?style=paper' |
|
135 | 138 | 200 Script output follows |
@@ -322,6 +325,22 b' should succeed, slashy names' | |||
|
322 | 325 | <td class="indexlinks"></td> |
|
323 | 326 | </tr> |
|
324 | 327 | |
|
328 | <tr class="parity0"> | |
|
329 | <td><a href="/astar/?style=paper">astar</a></td> | |
|
330 | <td>unknown</td> | |
|
331 | <td>Foo Bar <foo.bar@example.com></td> | |
|
332 | <td class="age">* ago</td> (glob) | |
|
333 | <td class="indexlinks"></td> | |
|
334 | </tr> | |
|
335 | ||
|
336 | <tr class="parity1"> | |
|
337 | <td><a href="/astar/.hg/patches/?style=paper">astar/.hg/patches</a></td> | |
|
338 | <td>unknown</td> | |
|
339 | <td>Foo Bar <foo.bar@example.com></td> | |
|
340 | <td class="age">* ago</td> (glob) | |
|
341 | <td class="indexlinks"></td> | |
|
342 | </tr> | |
|
343 | ||
|
325 | 344 | </table> |
|
326 | 345 | </div> |
|
327 | 346 | </div> |
@@ -470,7 +489,7 b" Test [paths] '*' extension" | |||
|
470 | 489 | |
|
471 | 490 | a |
|
472 | 491 | |
|
473 | est [paths] '**' extension | |
|
492 | Test [paths] '**' extension | |
|
474 | 493 | |
|
475 | 494 | $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/rcoll/?style=raw' |
|
476 | 495 | 200 Script output follows |
@@ -486,6 +505,12 b" est [paths] '**' extension" | |||
|
486 | 505 | 200 Script output follows |
|
487 | 506 | |
|
488 | 507 | d |
|
508 | ||
|
509 | Test [paths] '*' in a repo root | |
|
510 | ||
|
511 | $ hg id http://localhost:$HGPORT1/astar | |
|
512 | 8580ff50825a | |
|
513 | ||
|
489 | 514 | $ "$TESTDIR/killdaemons.py" |
|
490 | 515 | $ cat > paths.conf <<EOF |
|
491 | 516 | > [paths] |
@@ -1,18 +1,16 b'' | |||
|
1 | $ cp "$TESTDIR"/printenv.py . | |
|
2 | ||
|
3 | 1 | commit hooks can see env vars |
|
4 | 2 | |
|
5 | 3 | $ hg init a |
|
6 | 4 | $ cd a |
|
7 | 5 | $ echo "[hooks]" > .hg/hgrc |
|
8 |
$ echo 'commit = unset HG_LOCAL HG_TAG; python |
|
|
9 |
$ echo 'commit.b = unset HG_LOCAL HG_TAG; python |
|
|
10 |
$ echo 'precommit = unset HG_LOCAL HG_NODE HG_TAG; python |
|
|
11 |
$ echo 'pretxncommit = unset HG_LOCAL HG_TAG; python |
|
|
6 | $ echo 'commit = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py commit' >> .hg/hgrc | |
|
7 | $ echo 'commit.b = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py commit.b' >> .hg/hgrc | |
|
8 | $ echo 'precommit = unset HG_LOCAL HG_NODE HG_TAG; python "$TESTDIR"/printenv.py precommit' >> .hg/hgrc | |
|
9 | $ echo 'pretxncommit = unset HG_LOCAL HG_TAG; python "$TESTDIR"/printenv.py pretxncommit' >> .hg/hgrc | |
|
12 | 10 | $ echo 'pretxncommit.tip = hg -q tip' >> .hg/hgrc |
|
13 |
$ echo 'pre-identify = python |
|
|
14 |
$ echo 'pre-cat = python |
|
|
15 |
$ echo 'post-cat = python |
|
|
11 | $ echo 'pre-identify = python "$TESTDIR"/printenv.py pre-identify 1' >> .hg/hgrc | |
|
12 | $ echo 'pre-cat = python "$TESTDIR"/printenv.py pre-cat' >> .hg/hgrc | |
|
13 | $ echo 'post-cat = python "$TESTDIR"/printenv.py post-cat' >> .hg/hgrc | |
|
16 | 14 | $ echo a > a |
|
17 | 15 | $ hg add a |
|
18 | 16 | $ hg commit -m a |
@@ -30,9 +28,9 b' commit hooks can see env vars' | |||
|
30 | 28 | changegroup hooks can see env vars |
|
31 | 29 | |
|
32 | 30 | $ echo '[hooks]' > .hg/hgrc |
|
33 |
$ echo 'prechangegroup = python |
|
|
34 |
$ echo 'changegroup = python |
|
|
35 |
$ echo 'incoming = python |
|
|
31 | $ echo 'prechangegroup = python "$TESTDIR"/printenv.py prechangegroup' >> .hg/hgrc | |
|
32 | $ echo 'changegroup = python "$TESTDIR"/printenv.py changegroup' >> .hg/hgrc | |
|
33 | $ echo 'incoming = python "$TESTDIR"/printenv.py incoming' >> .hg/hgrc | |
|
36 | 34 | |
|
37 | 35 | pretxncommit and commit hooks can see both parents of merge |
|
38 | 36 | |
@@ -68,21 +66,21 b' pretxncommit and commit hooks can see bo' | |||
|
68 | 66 | test generic hooks |
|
69 | 67 | |
|
70 | 68 | $ hg id |
|
71 |
pre-identify hook: HG_ARGS=id HG_OPTS={' |
|
|
69 | pre-identify hook: HG_ARGS=id HG_OPTS={'branch': None, 'id': None, 'num': None, 'rev': '', 'tags': None} HG_PATS=[] | |
|
72 | 70 | warning: pre-identify hook exited with status 1 |
|
73 | 71 | [1] |
|
74 | 72 | $ hg cat b |
|
75 |
pre-cat hook: HG_ARGS=cat b HG_OPTS={ |
|
|
76 | post-cat hook: HG_ARGS=cat b HG_OPTS={'rev': '', 'decode': None, 'exclude': [], 'output': '', 'include': []} HG_PATS=['b'] HG_RESULT=0 | |
|
73 | pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] | |
|
77 | 74 | b |
|
75 | post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0 | |
|
78 | 76 | |
|
79 | 77 | $ cd ../b |
|
80 | 78 | $ hg pull ../a |
|
81 | prechangegroup hook: HG_SOURCE=pull HG_URL=file: | |
|
82 | changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file: | |
|
83 | incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file: | |
|
84 | incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file: | |
|
85 | incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file: | |
|
79 | prechangegroup hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a | |
|
80 | changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a | |
|
81 | incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a | |
|
82 | incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a | |
|
83 | incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a | |
|
86 | 84 | pulling from ../a |
|
87 | 85 | searching for changes |
|
88 | 86 | adding changesets |
@@ -94,8 +92,8 b' test generic hooks' | |||
|
94 | 92 | tag hooks can see env vars |
|
95 | 93 | |
|
96 | 94 | $ cd ../a |
|
97 |
$ echo 'pretag = python |
|
|
98 |
$ echo 'tag = unset HG_PARENT1 HG_PARENT2; python |
|
|
95 | $ echo 'pretag = python "$TESTDIR"/printenv.py pretag' >> .hg/hgrc | |
|
96 | $ echo 'tag = unset HG_PARENT1 HG_PARENT2; python "$TESTDIR"/printenv.py tag' >> .hg/hgrc | |
|
99 | 97 | $ hg tag -d '3 0' a |
|
100 | 98 | pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a |
|
101 | 99 | precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 |
@@ -110,7 +108,7 b' tag hooks can see env vars' | |||
|
110 | 108 | |
|
111 | 109 | pretag hook can forbid tagging |
|
112 | 110 | |
|
113 |
$ echo 'pretag.forbid = python |
|
|
111 | $ echo 'pretag.forbid = python "$TESTDIR"/printenv.py pretag.forbid 1' >> .hg/hgrc | |
|
114 | 112 | $ hg tag -d '4 0' fa |
|
115 | 113 | pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa |
|
116 | 114 | pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa |
@@ -126,7 +124,7 b' pretxncommit hook can see changeset, can' | |||
|
126 | 124 | more there after |
|
127 | 125 | |
|
128 | 126 | $ echo 'pretxncommit.forbid0 = hg tip -q' >> .hg/hgrc |
|
129 |
$ echo 'pretxncommit.forbid1 = python |
|
|
127 | $ echo 'pretxncommit.forbid1 = python "$TESTDIR"/printenv.py pretxncommit.forbid 1' >> .hg/hgrc | |
|
130 | 128 | $ echo z > z |
|
131 | 129 | $ hg add z |
|
132 | 130 | $ hg -q tip |
@@ -146,7 +144,7 b' more there after' | |||
|
146 | 144 | |
|
147 | 145 | precommit hook can prevent commit |
|
148 | 146 | |
|
149 |
$ echo 'precommit.forbid = python |
|
|
147 | $ echo 'precommit.forbid = python "$TESTDIR"/printenv.py precommit.forbid 1' >> .hg/hgrc | |
|
150 | 148 | $ hg commit -m 'fail' -d '4 0' |
|
151 | 149 | precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 |
|
152 | 150 | precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 |
@@ -157,14 +155,14 b' precommit hook can prevent commit' | |||
|
157 | 155 | |
|
158 | 156 | preupdate hook can prevent update |
|
159 | 157 | |
|
160 |
$ echo 'preupdate = python |
|
|
158 | $ echo 'preupdate = python "$TESTDIR"/printenv.py preupdate' >> .hg/hgrc | |
|
161 | 159 | $ hg update 1 |
|
162 | 160 | preupdate hook: HG_PARENT1=ab228980c14d |
|
163 | 161 | 0 files updated, 0 files merged, 2 files removed, 0 files unresolved |
|
164 | 162 | |
|
165 | 163 | update hook |
|
166 | 164 | |
|
167 |
$ echo 'update = python |
|
|
165 | $ echo 'update = python "$TESTDIR"/printenv.py update' >> .hg/hgrc | |
|
168 | 166 | $ hg update |
|
169 | 167 | preupdate hook: HG_PARENT1=539e4b31b6dc |
|
170 | 168 | update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc |
@@ -176,9 +174,9 b' prechangegroup hook can prevent incoming' | |||
|
176 | 174 | $ hg -q tip |
|
177 | 175 | 3:07f3376c1e65 |
|
178 | 176 | $ echo '[hooks]' > .hg/hgrc |
|
179 |
$ echo 'prechangegroup.forbid = python |
|
|
177 | $ echo 'prechangegroup.forbid = python "$TESTDIR"/printenv.py prechangegroup.forbid 1' >> .hg/hgrc | |
|
180 | 178 | $ hg pull ../a |
|
181 | prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file: | |
|
179 | prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a | |
|
182 | 180 | pulling from ../a |
|
183 | 181 | searching for changes |
|
184 | 182 | abort: prechangegroup.forbid hook exited with status 1 |
@@ -189,10 +187,10 b' incoming changes no longer there after' | |||
|
189 | 187 | |
|
190 | 188 | $ echo '[hooks]' > .hg/hgrc |
|
191 | 189 | $ echo 'pretxnchangegroup.forbid0 = hg tip -q' >> .hg/hgrc |
|
192 |
$ echo 'pretxnchangegroup.forbid1 = python |
|
|
190 | $ echo 'pretxnchangegroup.forbid1 = python "$TESTDIR"/printenv.py pretxnchangegroup.forbid 1' >> .hg/hgrc | |
|
193 | 191 | $ hg pull ../a |
|
194 | 192 | 4:539e4b31b6dc |
|
195 | pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file: | |
|
193 | pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file:$TESTTMP/a | |
|
196 | 194 | pulling from ../a |
|
197 | 195 | searching for changes |
|
198 | 196 | adding changesets |
@@ -210,8 +208,8 b' outgoing hooks can see env vars' | |||
|
210 | 208 | |
|
211 | 209 | $ rm .hg/hgrc |
|
212 | 210 | $ echo '[hooks]' > ../a/.hg/hgrc |
|
213 |
$ echo 'preoutgoing = python |
|
|
214 |
$ echo 'outgoing = python |
|
|
211 | $ echo 'preoutgoing = python "$TESTDIR"/printenv.py preoutgoing' >> ../a/.hg/hgrc | |
|
212 | $ echo 'outgoing = python "$TESTDIR"/printenv.py outgoing' >> ../a/.hg/hgrc | |
|
215 | 213 | $ hg pull ../a |
|
216 | 214 | preoutgoing hook: HG_SOURCE=pull |
|
217 | 215 | outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull |
@@ -227,7 +225,7 b' outgoing hooks can see env vars' | |||
|
227 | 225 | |
|
228 | 226 | preoutgoing hook can prevent outgoing changes |
|
229 | 227 | |
|
230 |
$ echo 'preoutgoing.forbid = python |
|
|
228 | $ echo 'preoutgoing.forbid = python "$TESTDIR"/printenv.py preoutgoing.forbid 1' >> ../a/.hg/hgrc | |
|
231 | 229 | $ hg pull ../a |
|
232 | 230 | preoutgoing hook: HG_SOURCE=pull |
|
233 | 231 | preoutgoing.forbid hook: HG_SOURCE=pull |
@@ -240,8 +238,8 b' outgoing hooks work for local clones' | |||
|
240 | 238 | |
|
241 | 239 | $ cd .. |
|
242 | 240 | $ echo '[hooks]' > a/.hg/hgrc |
|
243 |
$ echo 'preoutgoing = python |
|
|
244 |
$ echo 'outgoing = python |
|
|
241 | $ echo 'preoutgoing = python "$TESTDIR"/printenv.py preoutgoing' >> a/.hg/hgrc | |
|
242 | $ echo 'outgoing = python "$TESTDIR"/printenv.py outgoing' >> a/.hg/hgrc | |
|
245 | 243 | $ hg clone a c |
|
246 | 244 | preoutgoing hook: HG_SOURCE=clone |
|
247 | 245 | outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone |
@@ -251,7 +249,7 b' outgoing hooks work for local clones' | |||
|
251 | 249 | |
|
252 | 250 | preoutgoing hook can prevent outgoing changes for local clones |
|
253 | 251 | |
|
254 |
$ echo 'preoutgoing.forbid = python |
|
|
252 | $ echo 'preoutgoing.forbid = python "$TESTDIR"/printenv.py preoutgoing.forbid 1' >> a/.hg/hgrc | |
|
255 | 253 | $ hg clone a zzz |
|
256 | 254 | preoutgoing hook: HG_SOURCE=clone |
|
257 | 255 | preoutgoing.forbid hook: HG_SOURCE=clone |
@@ -104,13 +104,21 b' do not use the proxy if it is in the no ' | |||
|
104 | 104 | * - - [*] "GET http://localhost:$HGPORT/?pairs=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000&cmd=between HTTP/1.1" - - (glob) |
|
105 | 105 | * - - [*] "GET http://localhost:$HGPORT/?cmd=heads HTTP/1.1" - - (glob) |
|
106 | 106 | * - - [*] "GET http://localhost:$HGPORT/?cmd=changegroup&roots=0000000000000000000000000000000000000000 HTTP/1.1" - - (glob) |
|
107 | * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) | |
|
108 | * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob) | |
|
107 | 109 | * - - [*] "GET http://localhost:$HGPORT/?pairs=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000&cmd=between HTTP/1.1" - - (glob) |
|
108 | 110 | * - - [*] "GET http://localhost:$HGPORT/?cmd=heads HTTP/1.1" - - (glob) |
|
109 | 111 | * - - [*] "GET http://localhost:$HGPORT/?cmd=changegroup&roots=0000000000000000000000000000000000000000 HTTP/1.1" - - (glob) |
|
112 | * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) | |
|
113 | * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob) | |
|
110 | 114 | * - - [*] "GET http://localhost:$HGPORT/?pairs=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000&cmd=between HTTP/1.1" - - (glob) |
|
111 | 115 | * - - [*] "GET http://localhost:$HGPORT/?cmd=heads HTTP/1.1" - - (glob) |
|
112 | 116 | * - - [*] "GET http://localhost:$HGPORT/?cmd=changegroup&roots=0000000000000000000000000000000000000000 HTTP/1.1" - - (glob) |
|
117 | * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) | |
|
118 | * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob) | |
|
113 | 119 | * - - [*] "GET http://localhost:$HGPORT/?pairs=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000&cmd=between HTTP/1.1" - - (glob) |
|
114 | 120 | * - - [*] "GET http://localhost:$HGPORT/?cmd=heads HTTP/1.1" - - (glob) |
|
115 | 121 | * - - [*] "GET http://localhost:$HGPORT/?cmd=changegroup&roots=0000000000000000000000000000000000000000 HTTP/1.1" - - (glob) |
|
122 | * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) | |
|
123 | * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys&namespace=bookmarks HTTP/1.1" - - (glob) | |
|
116 | 124 |
@@ -1,5 +1,4 b'' | |||
|
1 | 1 | |
|
2 | $ cp "$TESTDIR"/printenv.py . | |
|
3 | 2 | $ hg init test |
|
4 | 3 | $ cd test |
|
5 | 4 | $ echo foo>foo |
@@ -75,7 +74,7 b' pull' | |||
|
75 | 74 | |
|
76 | 75 | $ cd copy-pull |
|
77 | 76 | $ echo '[hooks]' >> .hg/hgrc |
|
78 |
$ echo 'changegroup = python |
|
|
77 | $ echo 'changegroup = python "$TESTDIR"/printenv.py changegroup' >> .hg/hgrc | |
|
79 | 78 | $ hg pull |
|
80 | 79 | changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_URL=http://localhost:$HGPORT1/ |
|
81 | 80 | pulling from http://localhost:$HGPORT1/ |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: file renamed from tests/test-no-symlinks to tests/test-no-symlinks.t | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: file was removed | |
This diff has been collapsed as it changes many lines, (582 lines changed) Show them Hide them |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now