Show More
@@ -11,9 +11,34 b'' | |||
|
11 | 11 | # |
|
12 | 12 | # <alias email> <actual email> |
|
13 | 13 | |
|
14 | import sys | |
|
15 | 14 | from mercurial.i18n import gettext as _ |
|
16 | 15 | from mercurial import hg, mdiff, cmdutil, ui, util, templater, node |
|
16 | import os, sys | |
|
17 | ||
|
18 | def get_tty_width(): | |
|
19 | if 'COLUMNS' in os.environ: | |
|
20 | try: | |
|
21 | return int(os.environ['COLUMNS']) | |
|
22 | except ValueError: | |
|
23 | pass | |
|
24 | try: | |
|
25 | import termios, fcntl, struct | |
|
26 | buf = 'abcd' | |
|
27 | for dev in (sys.stdout, sys.stdin): | |
|
28 | try: | |
|
29 | if buf != 'abcd': | |
|
30 | break | |
|
31 | fd = dev.fileno() | |
|
32 | if not os.isatty(fd): | |
|
33 | continue | |
|
34 | buf = fcntl.ioctl(fd, termios.TIOCGWINSZ, buf) | |
|
35 | except ValueError: | |
|
36 | pass | |
|
37 | if buf != 'abcd': | |
|
38 | return struct.unpack('hh', buf)[1] | |
|
39 | except ImportError: | |
|
40 | pass | |
|
41 | return 80 | |
|
17 | 42 | |
|
18 | 43 | def __gather(ui, repo, node1, node2): |
|
19 | 44 | def dirtywork(f, mmap1, mmap2): |
@@ -159,8 +184,9 b' def churn(ui, repo, **opts):' | |||
|
159 | 184 | |
|
160 | 185 | maximum = ordered[0][1] |
|
161 | 186 | |
|
162 | ui.note("Assuming 80 character terminal\n") | |
|
163 | width = 80 - 1 | |
|
187 | width = get_tty_width() | |
|
188 | ui.note(_("assuming %i character terminal\n") % width) | |
|
189 | width -= 1 | |
|
164 | 190 | |
|
165 | 191 | for i in ordered: |
|
166 | 192 | person = i[0] |
@@ -72,6 +72,5 b' def uisetup(ui):' | |||
|
72 | 72 | args = target.split(' ') |
|
73 | 73 | tcmd = args.pop(0) |
|
74 | 74 | if args: |
|
75 | pui = ui.parentui or ui | |
|
76 | pui.setconfig('defaults', cmd, ' '.join(args)) | |
|
75 | ui.setconfig('defaults', cmd, ' '.join(args)) | |
|
77 | 76 | cmdtable[cmd] = lazycommand(ui, cmd, tcmd) |
@@ -260,7 +260,7 b' class convert(object):' | |||
|
260 | 260 | self.mapfilefd.close() |
|
261 | 261 | |
|
262 | 262 | def _convert(ui, src, dest=None, mapfile=None, **opts): |
|
263 |
|
|
|
263 | """Convert a foreign SCM repository to a Mercurial one. | |
|
264 | 264 | |
|
265 | 265 | Accepted source formats: |
|
266 | 266 | - GIT |
@@ -293,7 +293,7 b' def _convert(ui, src, dest=None, mapfile' | |||
|
293 | 293 | that use unix logins to identify authors (eg: CVS). One line per author |
|
294 | 294 | mapping and the line format is: |
|
295 | 295 | srcauthor=whatever string you want |
|
296 | ''' | |
|
296 | """ | |
|
297 | 297 | |
|
298 | 298 | util._encoding = 'UTF-8' |
|
299 | 299 |
@@ -1,10 +1,21 b'' | |||
|
1 | 1 | # Subversion 1.4/1.5 Python API backend |
|
2 | 2 | # |
|
3 | 3 | # Copyright(C) 2007 Daniel Holth et al |
|
4 | # | |
|
5 | # Configuration options: | |
|
6 | # | |
|
7 | # convert.svn.trunk | |
|
8 | # Relative path to the trunk (default: "trunk") | |
|
9 | # convert.svn.branches | |
|
10 | # Relative path to tree of branches (default: "branches") | |
|
11 | # | |
|
12 | # Set these in a hgrc, or on the command line as follows: | |
|
13 | # | |
|
14 | # hg convert --config convert.svn.trunk=wackoname [...] | |
|
4 | 15 | |
|
5 | import pprint | |
|
6 | 16 | import locale |
|
7 | ||
|
17 | import os | |
|
18 | import cPickle as pickle | |
|
8 | 19 | from mercurial import util |
|
9 | 20 | |
|
10 | 21 | # Subversion stuff. Works best with very recent Python SVN bindings |
@@ -27,6 +38,12 b' except ImportError:' | |||
|
27 | 38 | |
|
28 | 39 | class CompatibilityException(Exception): pass |
|
29 | 40 | |
|
41 | class changedpath(object): | |
|
42 | def __init__(self, p): | |
|
43 | self.copyfrom_path = p.copyfrom_path | |
|
44 | self.copyfrom_rev = p.copyfrom_rev | |
|
45 | self.action = p.action | |
|
46 | ||
|
30 | 47 | # SVN conversion code stolen from bzr-svn and tailor |
|
31 | 48 | class convert_svn(converter_source): |
|
32 | 49 | def __init__(self, ui, url, rev=None): |
@@ -51,8 +68,10 b' class convert_svn(converter_source):' | |||
|
51 | 68 | try: |
|
52 | 69 | # Support file://path@rev syntax. Useful e.g. to convert |
|
53 | 70 | # deleted branches. |
|
54 |
|
|
|
55 |
|
|
|
71 | at = url.rfind('@') | |
|
72 | if at >= 0: | |
|
73 | latest = int(url[at+1:]) | |
|
74 | url = url[:at] | |
|
56 | 75 | except ValueError, e: |
|
57 | 76 | pass |
|
58 | 77 | self.url = url |
@@ -60,7 +79,7 b' class convert_svn(converter_source):' | |||
|
60 | 79 | try: |
|
61 | 80 |
self.transport = transport.SvnRaTransport(url |
|
62 | 81 | self.ra = self.transport.ra |
|
63 |
self.ctx = s |
|
|
82 | self.ctx = self.transport.client | |
|
64 | 83 | self.base = svn.ra.get_repos_root(self.ra) |
|
65 | 84 | self.module = self.url[len(self.base):] |
|
66 | 85 | self.modulemap = {} # revision, module |
@@ -88,26 +107,47 b' class convert_svn(converter_source):' | |||
|
88 | 107 | lastrevs[module] = revnum |
|
89 | 108 | self.lastrevs = lastrevs |
|
90 | 109 | |
|
110 | def exists(self, path, optrev): | |
|
111 | try: | |
|
112 | return svn.client.ls(self.url.rstrip('/') + '/' + path, | |
|
113 | optrev, False, self.ctx) | |
|
114 | except SubversionException, err: | |
|
115 | return [] | |
|
116 | ||
|
91 | 117 | def getheads(self): |
|
92 | 118 | # detect standard /branches, /tags, /trunk layout |
|
93 | 119 | optrev = svn.core.svn_opt_revision_t() |
|
94 | 120 | optrev.kind = svn.core.svn_opt_revision_number |
|
95 | 121 | optrev.value.number = self.last_changed |
|
96 | 122 | rpath = self.url.strip('/') |
|
97 | paths = svn.client.ls(rpath, optrev, False, self.ctx) | |
|
98 | if 'branches' in paths and 'trunk' in paths: | |
|
99 | self.module += '/trunk' | |
|
123 | cfgtrunk = self.ui.config('convert', 'svn.trunk') | |
|
124 | cfgbranches = self.ui.config('convert', 'svn.branches') | |
|
125 | trunk = (cfgtrunk or 'trunk').strip('/') | |
|
126 | branches = (cfgbranches or 'branches').strip('/') | |
|
127 | if self.exists(trunk, optrev) and self.exists(branches, optrev): | |
|
128 | self.ui.note('found trunk at %r and branches at %r\n' % | |
|
129 | (trunk, branches)) | |
|
130 | oldmodule = self.module | |
|
131 | self.module += '/' + trunk | |
|
100 | 132 | lt = self.latest(self.module, self.last_changed) |
|
101 | 133 | self.head = self.revid(lt) |
|
102 | 134 | self.heads = [self.head] |
|
103 |
branches = svn.client.ls(rpath + '/ |
|
|
104 | for branch in branches.keys(): | |
|
105 | module = '/branches/' + branch | |
|
135 | branchnames = svn.client.ls(rpath + '/' + branches, optrev, False, | |
|
136 | self.ctx) | |
|
137 | for branch in branchnames.keys(): | |
|
138 | if oldmodule: | |
|
139 | module = '/' + oldmodule + '/' + branches + '/' + branch | |
|
140 | else: | |
|
141 | module = '/' + branches + '/' + branch | |
|
106 | 142 | brevnum = self.latest(module, self.last_changed) |
|
107 | 143 | brev = self.revid(brevnum, module) |
|
108 | 144 | self.ui.note('found branch %s at %d\n' % (branch, brevnum)) |
|
109 | 145 | self.heads.append(brev) |
|
146 | elif cfgtrunk or cfgbranches: | |
|
147 | raise util.Abort(_('trunk/branch layout expected, ' | |
|
148 | 'but not found')) | |
|
110 | 149 | else: |
|
150 | self.ui.note('working with one branch\n') | |
|
111 | 151 | self.heads = [self.head] |
|
112 | 152 | return self.heads |
|
113 | 153 | |
@@ -140,10 +180,68 b' class convert_svn(converter_source):' | |||
|
140 | 180 | del self.commits[rev] |
|
141 | 181 | return commit |
|
142 | 182 | |
|
183 | def get_log(self, paths, start, end, limit=0, discover_changed_paths=True, | |
|
184 | strict_node_history=False): | |
|
185 | '''wrapper for svn.ra.get_log. | |
|
186 | on a large repository, svn.ra.get_log pins huge amounts of | |
|
187 | memory that cannot be recovered. work around it by forking | |
|
188 | and writing results over a pipe.''' | |
|
189 | ||
|
190 | def child(fp): | |
|
191 | protocol = -1 | |
|
192 | def receiver(orig_paths, revnum, author, date, message, pool): | |
|
193 | if orig_paths is not None: | |
|
194 | for k, v in orig_paths.iteritems(): | |
|
195 | orig_paths[k] = changedpath(v) | |
|
196 | pickle.dump((orig_paths, revnum, author, date, message), | |
|
197 | fp, protocol) | |
|
198 | ||
|
199 | try: | |
|
200 | # Use an ra of our own so that our parent can consume | |
|
201 | # our results without confusing the server. | |
|
202 | t = transport.SvnRaTransport(url=self.url) | |
|
203 | svn.ra.get_log(t.ra, paths, start, end, limit, | |
|
204 | discover_changed_paths, | |
|
205 | strict_node_history, | |
|
206 | receiver) | |
|
207 | except SubversionException, (_, num): | |
|
208 | self.ui.print_exc() | |
|
209 | pickle.dump(num, fp, protocol) | |
|
210 | else: | |
|
211 | pickle.dump(None, fp, protocol) | |
|
212 | fp.close() | |
|
213 | ||
|
214 | def parent(fp): | |
|
215 | while True: | |
|
216 | entry = pickle.load(fp) | |
|
217 | try: | |
|
218 | orig_paths, revnum, author, date, message = entry | |
|
219 | except: | |
|
220 | if entry is None: | |
|
221 | break | |
|
222 | raise SubversionException("child raised exception", entry) | |
|
223 | yield entry | |
|
224 | ||
|
225 | rfd, wfd = os.pipe() | |
|
226 | pid = os.fork() | |
|
227 | if pid: | |
|
228 | os.close(wfd) | |
|
229 | for p in parent(os.fdopen(rfd, 'rb')): | |
|
230 | yield p | |
|
231 | ret = os.waitpid(pid, 0)[1] | |
|
232 | if ret: | |
|
233 | raise util.Abort(_('get_log %s') % util.explain_exit(ret)) | |
|
234 | else: | |
|
235 | os.close(rfd) | |
|
236 | child(os.fdopen(wfd, 'wb')) | |
|
237 | os._exit(0) | |
|
238 | ||
|
143 | 239 | def gettags(self): |
|
144 | 240 | tags = {} |
|
145 | def parselogentry(*arg, **args): | |
|
146 | orig_paths, revnum, author, date, message, pool = arg | |
|
241 | start = self.revnum(self.head) | |
|
242 | try: | |
|
243 | for entry in self.get_log(['/tags'], 0, start): | |
|
244 | orig_paths, revnum, author, date, message = entry | |
|
147 | 245 | for path in orig_paths: |
|
148 | 246 | if not path.startswith('/tags/'): |
|
149 | 247 | continue |
@@ -152,15 +250,9 b' class convert_svn(converter_source):' | |||
|
152 | 250 | rev = ent.copyfrom_rev |
|
153 | 251 | tag = path.split('/', 2)[2] |
|
154 | 252 | tags[tag] = self.revid(rev, module=source) |
|
155 | ||
|
156 | start = self.revnum(self.head) | |
|
157 | try: | |
|
158 | svn.ra.get_log(self.ra, ['/tags'], 0, start, 0, True, False, | |
|
159 | parselogentry) | |
|
253 | except SubversionException, (_, num): | |
|
254 | self.ui.note('no tags found at revision %d\n' % start) | |
|
160 | 255 |
|
|
161 | except SubversionException: | |
|
162 | self.ui.note('no tags found at revision %d\n' % start) | |
|
163 | return {} | |
|
164 | 256 | |
|
165 | 257 | # -- helper functions -- |
|
166 | 258 | |
@@ -193,8 +285,8 b' class convert_svn(converter_source):' | |||
|
193 | 285 | except SubversionException: |
|
194 | 286 | dirent = None |
|
195 | 287 | if not dirent: |
|
196 | raise util.Abort('%s not found up to revision %d' \ | |
|
197 | % (path, stop)) | |
|
288 | print self.base, path | |
|
289 | raise util.Abort('%s not found up to revision %d' % (path, stop)) | |
|
198 | 290 | |
|
199 | 291 | return dirent.created_rev |
|
200 | 292 | |
@@ -242,25 +334,10 b' class convert_svn(converter_source):' | |||
|
242 | 334 | self.ui.debug('Ignoring %r since it is not under %r\n' % (path, module)) |
|
243 | 335 | return None |
|
244 | 336 | |
|
245 | received = [] | |
|
246 | # svn.ra.get_log requires no other calls to the ra until it completes, | |
|
247 | # so we just collect the log entries and parse them afterwards | |
|
248 | def receivelog(*arg, **args): | |
|
249 | received.append(arg) | |
|
250 | ||
|
251 | 337 | self.child_cset = None |
|
252 |
def parselogentry( |
|
|
253 | orig_paths, revnum, author, date, message, pool = arg | |
|
254 | ||
|
255 | if self.is_blacklisted(revnum): | |
|
256 | self.ui.note('skipping blacklisted revision %d\n' % revnum) | |
|
257 | return | |
|
258 | ||
|
259 | self.ui.debug("parsing revision %d\n" % revnum) | |
|
260 | ||
|
261 | if orig_paths is None: | |
|
262 | self.ui.debug('revision %d has no entries\n' % revnum) | |
|
263 | return | |
|
338 | def parselogentry(orig_paths, revnum, author, date, message): | |
|
339 | self.ui.debug("parsing revision %d (%d changes)\n" % | |
|
340 | (revnum, len(orig_paths))) | |
|
264 | 341 | |
|
265 | 342 | if revnum in self.modulemap: |
|
266 | 343 | new_module = self.modulemap[revnum] |
@@ -286,12 +363,11 b' class convert_svn(converter_source):' | |||
|
286 | 363 | except IndexError: |
|
287 | 364 | branch = None |
|
288 | 365 | |
|
289 |
paths = orig_paths. |
|
|
290 | paths.sort() | |
|
291 | for path in paths: | |
|
366 | orig_paths = orig_paths.items() | |
|
367 | orig_paths.sort() | |
|
368 | for path, ent in orig_paths: | |
|
292 | 369 | # self.ui.write("path %s\n" % path) |
|
293 | 370 | if path == self.module: # Follow branching back in history |
|
294 | ent = orig_paths[path] | |
|
295 | 371 | if ent: |
|
296 | 372 | if ent.copyfrom_path: |
|
297 | 373 | # ent.copyfrom_rev may not be the actual last revision |
@@ -310,7 +386,6 b' class convert_svn(converter_source):' | |||
|
310 | 386 | self.ui.debug("boring@%s: %s\n" % (revnum, path)) |
|
311 | 387 | continue |
|
312 | 388 | entry = entrypath.decode(self.encoding) |
|
313 | ent = orig_paths[path] | |
|
314 | 389 | |
|
315 | 390 | kind = svn.ra.check_path(self.ra, entrypath, revnum) |
|
316 | 391 | if kind == svn.core.svn_node_file: |
@@ -492,17 +567,21 b' class convert_svn(converter_source):' | |||
|
492 | 567 | self.child_cset.parents = [rev] |
|
493 | 568 | self.child_cset = cset |
|
494 | 569 | |
|
495 |
self.ui.note('fetching revision log for "%s" from %d to %d\n' % |
|
|
570 | self.ui.note('fetching revision log for "%s" from %d to %d\n' % | |
|
496 | 571 | (self.module, from_revnum, to_revnum)) |
|
497 | 572 | |
|
498 | 573 | try: |
|
499 | 574 | discover_changed_paths = True |
|
500 | 575 | strict_node_history = False |
|
501 |
|
|
|
502 | discover_changed_paths, strict_node_history, | |
|
503 | receivelog) | |
|
504 | for entry in received: | |
|
505 |
|
|
|
576 | for entry in self.get_log([self.module], from_revnum, to_revnum): | |
|
577 | orig_paths, revnum, author, date, message = entry | |
|
578 | if self.is_blacklisted(revnum): | |
|
579 | self.ui.note('skipping blacklisted revision %d\n' % revnum) | |
|
580 | continue | |
|
581 | if orig_paths is None: | |
|
582 | self.ui.debug('revision %d has no entries\n' % revnum) | |
|
583 | continue | |
|
584 | parselogentry(orig_paths, revnum, author, date, message) | |
|
506 | 585 | except SubversionException, (_, num): |
|
507 | 586 | if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION: |
|
508 | 587 |
raise NoSuchRevision(branch=self, |
@@ -567,7 +646,6 b' class convert_svn(converter_source):' | |||
|
567 | 646 | dirents = getdir[0] |
|
568 | 647 | if type(dirents) == int: |
|
569 | 648 | # got here once due to infinite recursion bug |
|
570 | # pprint.pprint(getdir) | |
|
571 | 649 | return |
|
572 | 650 | c = dirents.keys() |
|
573 | 651 | c.sort() |
@@ -24,6 +24,7 b' from tempfile import mktemp' | |||
|
24 | 24 | |
|
25 | 25 | from svn.core import SubversionException, Pool |
|
26 | 26 | import svn.ra |
|
27 | import svn.client | |
|
27 | 28 | import svn.core |
|
28 | 29 | |
|
29 | 30 |
# Some older versions of the Python bindings need to be |
@@ -48,21 +49,6 b' def _create_auth_baton(pool):' | |||
|
48 | 49 | ] |
|
49 | 50 | return svn.core.svn_auth_open(providers, pool) |
|
50 | 51 | |
|
51 | ||
|
52 | # # The SVN libraries don't like trailing slashes... | |
|
53 | # return url.rstrip('/') | |
|
54 | ||
|
55 | ||
|
56 | class SvnRaCallbacks(svn.ra.callbacks2_t): | |
|
57 | """Remote access callbacks implementation for bzr-svn.""" | |
|
58 | def __init__(self, pool): | |
|
59 | svn.ra.callbacks2_t.__init__(self) | |
|
60 | self.auth_baton = _create_auth_baton(pool) | |
|
61 | self.pool = pool | |
|
62 | ||
|
63 | def open_tmp_file(self, pool): | |
|
64 | return mktemp(prefix='tailor-svn') | |
|
65 | ||
|
66 | 52 | class NotBranchError(SubversionException): |
|
67 | 53 | pass |
|
68 | 54 | |
@@ -73,25 +59,30 b' class SvnRaTransport(object):' | |||
|
73 | 59 | def __init__(self, url="", ra=None): |
|
74 | 60 | self.pool = Pool() |
|
75 | 61 | self.svn_url = url |
|
62 | self.username = '' | |
|
63 | self.password = '' | |
|
76 | 64 | |
|
77 | 65 | # Only Subversion 1.4 has reparent() |
|
78 | 66 | if ra is None or not hasattr(svn.ra, 'reparent'): |
|
79 |
self.c |
|
|
67 | self.client = svn.client.create_context(self.pool) | |
|
68 | ab = _create_auth_baton(self.pool) | |
|
69 | if False: | |
|
70 | svn.core.svn_auth_set_parameter( | |
|
71 | ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username) | |
|
72 | svn.core.svn_auth_set_parameter( | |
|
73 | ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password) | |
|
74 | self.client.auth_baton = ab | |
|
75 | self.client.config = svn_config | |
|
80 | 76 | try: |
|
81 |
|
|
|
82 | try: # Older SVN bindings | |
|
83 | self.ra = svn.ra.open2(self.svn_url.encode('utf8'), self.callbacks, None, svn_config, None) | |
|
84 | except TypeError, e: | |
|
85 | self.ra = svn.ra.open2(self.svn_url.encode('utf8'), self.callbacks, svn_config, None) | |
|
77 | self.ra = svn.client.open_ra_session( | |
|
78 | self.svn_url.encode('utf8'), | |
|
79 | self.client, self.pool) | |
|
86 | 80 | except SubversionException, (_, num): |
|
87 |
if num |
|
|
88 | raise NotBranchError(url) | |
|
89 |
|
|
|
90 | raise NotBranchError(url) | |
|
91 | if num == svn.core.SVN_ERR_BAD_URL: | |
|
81 | if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL, | |
|
82 | svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED, | |
|
83 | svn.core.SVN_ERR_BAD_URL): | |
|
92 | 84 | raise NotBranchError(url) |
|
93 | 85 | raise |
|
94 | ||
|
95 | 86 | else: |
|
96 | 87 | self.ra = ra |
|
97 | 88 | svn.ra.reparent(self.ra, self.svn_url.encode('utf8')) |
@@ -200,8 +200,9 b' def archive(repo, dest, node, kind, deco' | |||
|
200 | 200 | |
|
201 | 201 | prefix is name of path to put before every archive member.''' |
|
202 | 202 | |
|
203 | def write(name, mode, islink, data): | |
|
203 | def write(name, mode, islink, getdata): | |
|
204 | 204 | if matchfn and not matchfn(name): return |
|
205 | data = getdata() | |
|
205 | 206 | if decode: |
|
206 | 207 | data = repo.wwritedata(name, data) |
|
207 | 208 | archiver.addfile(name, mode, islink, data) |
@@ -212,8 +213,8 b' def archive(repo, dest, node, kind, deco' | |||
|
212 | 213 | items = m.items() |
|
213 | 214 | items.sort() |
|
214 | 215 | write('.hg_archival.txt', 0644, False, |
|
215 | 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node))) | |
|
216 | lambda: 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node))) | |
|
216 | 217 | for filename, filenode in items: |
|
217 | 218 | write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename), |
|
218 | repo.file(filename).read(filenode)) | |
|
219 | lambda: repo.file(filename).read(filenode)) | |
|
219 | 220 | archiver.done() |
@@ -42,7 +42,7 b' class appender:' | |||
|
42 | 42 | def flush(self): |
|
43 | 43 | pass |
|
44 | 44 | def close(self): |
|
45 |
|
|
|
45 | self.fp.close() | |
|
46 | 46 | |
|
47 | 47 | def seek(self, offset, whence=0): |
|
48 | 48 | '''virtual file offset spans real file and data''' |
@@ -58,7 +58,6 b' class appender:' | |||
|
58 | 58 | def read(self, count=-1): |
|
59 | 59 | '''only trick here is reads that span real file and data''' |
|
60 | 60 | ret = "" |
|
61 | old_offset = self.offset | |
|
62 | 61 | if self.offset < self.size: |
|
63 | 62 | s = self.fp.read(count) |
|
64 | 63 | ret = s |
@@ -316,7 +316,7 b' def dispatch(ui, args, argv0=None):' | |||
|
316 | 316 | util._fallbackencoding = fallback |
|
317 | 317 | |
|
318 | 318 | fullargs = args |
|
319 | cmd, func, args, options, cmdoptions = parse(ui, args) | |
|
319 | cmd, func, args, options, cmdoptions = parse(lui, args) | |
|
320 | 320 | |
|
321 | 321 | if options["config"]: |
|
322 | 322 | raise util.Abort(_("Option --config may not be abbreviated!")) |
@@ -8,7 +8,7 b'' | |||
|
8 | 8 | import demandimport; demandimport.enable() |
|
9 | 9 | from node import * |
|
10 | 10 | from i18n import _ |
|
11 |
import bisect, os, re, sys, urllib, |
|
|
11 | import bisect, os, re, sys, urllib, stat | |
|
12 | 12 | import ui, hg, util, revlog, bundlerepo, extensions |
|
13 | 13 | import difflib, patch, time, help, mdiff, tempfile |
|
14 | 14 | import errno, version, socket |
@@ -1362,7 +1362,7 b' def help_(ui, name=None, with_version=Fa' | |||
|
1362 | 1362 | |
|
1363 | 1363 | addglobalopts(False) |
|
1364 | 1364 | |
|
1365 | def helplist(select=None): | |
|
1365 | def helplist(header, select=None): | |
|
1366 | 1366 | h = {} |
|
1367 | 1367 | cmds = {} |
|
1368 | 1368 | for c, e in table.items(): |
@@ -1380,6 +1380,11 b' def help_(ui, name=None, with_version=Fa' | |||
|
1380 | 1380 | h[f] = doc.splitlines(0)[0].rstrip() |
|
1381 | 1381 | cmds[f] = c.lstrip("^") |
|
1382 | 1382 | |
|
1383 | if not h: | |
|
1384 | ui.status(_('no commands defined\n')) | |
|
1385 | return | |
|
1386 | ||
|
1387 | ui.status(header) | |
|
1383 | 1388 | fns = h.keys() |
|
1384 | 1389 | fns.sort() |
|
1385 | 1390 | m = max(map(len, fns)) |
@@ -1429,14 +1434,10 b' def help_(ui, name=None, with_version=Fa' | |||
|
1429 | 1434 | try: |
|
1430 | 1435 | ct = mod.cmdtable |
|
1431 | 1436 | except AttributeError: |
|
1432 |
ct = |
|
|
1433 | if not ct: | |
|
1434 | ui.status(_('no commands defined\n')) | |
|
1435 | return | |
|
1436 | ||
|
1437 | ui.status(_('list of commands:\n\n')) | |
|
1437 | ct = {} | |
|
1438 | ||
|
1438 | 1439 | modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct]) |
|
1439 | helplist(modcmds.has_key) | |
|
1440 | helplist(_('list of commands:\n\n'), modcmds.has_key) | |
|
1440 | 1441 | |
|
1441 | 1442 | if name and name != 'shortlist': |
|
1442 | 1443 | i = None |
@@ -1460,11 +1461,11 b' def help_(ui, name=None, with_version=Fa' | |||
|
1460 | 1461 | |
|
1461 | 1462 | # list of commands |
|
1462 | 1463 | if name == "shortlist": |
|
1463 |
|
|
|
1464 | header = _('basic commands:\n\n') | |
|
1464 | 1465 | else: |
|
1465 |
|
|
|
1466 | ||
|
1467 | helplist() | |
|
1466 | header = _('list of commands:\n\n') | |
|
1467 | ||
|
1468 | helplist(header) | |
|
1468 | 1469 | |
|
1469 | 1470 | # list all option lists |
|
1470 | 1471 | opt_output = [] |
@@ -2040,14 +2041,12 b' def paths(ui, repo, search=None):' | |||
|
2040 | 2041 | for name, path in ui.configitems("paths"): |
|
2041 | 2042 | ui.write("%s = %s\n" % (name, path)) |
|
2042 | 2043 | |
|
2043 |
def postincoming(ui, repo, modheads, optupdate |
|
|
2044 | def postincoming(ui, repo, modheads, optupdate): | |
|
2044 | 2045 | if modheads == 0: |
|
2045 | 2046 | return |
|
2046 | 2047 | if optupdate: |
|
2047 |
if |
|
|
2048 |
return hg.update(repo, |
|
|
2049 | elif modheads == 1: | |
|
2050 | return hg.update(repo, repo.changelog.tip()) # update | |
|
2048 | if modheads == 1: | |
|
2049 | return hg.update(repo, None) | |
|
2051 | 2050 | else: |
|
2052 | 2051 | ui.status(_("not updating, since new heads added\n")) |
|
2053 | 2052 | if modheads > 1: |
@@ -2108,9 +2107,8 b' def pull(ui, repo, source="default", **o' | |||
|
2108 | 2107 | error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.") |
|
2109 | 2108 | raise util.Abort(error) |
|
2110 | 2109 | |
|
2111 | wasempty = repo.changelog.count() == 0 | |
|
2112 | 2110 | modheads = repo.pull(other, heads=revs, force=opts['force']) |
|
2113 |
return postincoming(ui, repo, modheads, opts['update'] |
|
|
2111 | return postincoming(ui, repo, modheads, opts['update']) | |
|
2114 | 2112 | |
|
2115 | 2113 | def push(ui, repo, dest=None, **opts): |
|
2116 | 2114 | """push changes to the specified destination |
@@ -2211,7 +2209,6 b' def remove(ui, repo, *pats, **opts):' | |||
|
2211 | 2209 | Modified files and added files are not removed by default. To |
|
2212 | 2210 | remove them, use the -f/--force option. |
|
2213 | 2211 | """ |
|
2214 | names = [] | |
|
2215 | 2212 | if not opts['after'] and not pats: |
|
2216 | 2213 | raise util.Abort(_('no files specified')) |
|
2217 | 2214 | files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts) |
@@ -2681,8 +2678,6 b' def unbundle(ui, repo, fname1, *fnames, ' | |||
|
2681 | 2678 | bundle command. |
|
2682 | 2679 | """ |
|
2683 | 2680 | fnames = (fname1,) + fnames |
|
2684 | result = None | |
|
2685 | wasempty = repo.changelog.count() == 0 | |
|
2686 | 2681 | for fname in fnames: |
|
2687 | 2682 | if os.path.exists(fname): |
|
2688 | 2683 | f = open(fname, "rb") |
@@ -2691,7 +2686,7 b' def unbundle(ui, repo, fname1, *fnames, ' | |||
|
2691 | 2686 | gen = changegroup.readbundle(f, fname) |
|
2692 | 2687 | modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname) |
|
2693 | 2688 | |
|
2694 |
return postincoming(ui, repo, modheads, opts['update'] |
|
|
2689 | return postincoming(ui, repo, modheads, opts['update']) | |
|
2695 | 2690 | |
|
2696 | 2691 | def update(ui, repo, node=None, rev=None, clean=False, date=None): |
|
2697 | 2692 | """update working directory |
@@ -21,6 +21,7 b' class dirstate(object):' | |||
|
21 | 21 | self._opener = opener |
|
22 | 22 | self._root = root |
|
23 | 23 | self._dirty = False |
|
24 | self._dirtypl = False | |
|
24 | 25 | self._ui = ui |
|
25 | 26 | |
|
26 | 27 | def __getattr__(self, name): |
@@ -113,7 +114,7 b' class dirstate(object):' | |||
|
113 | 114 | return self._branch |
|
114 | 115 | |
|
115 | 116 | def setparents(self, p1, p2=nullid): |
|
116 | self._dirty = True | |
|
117 | self._dirty = self._dirtypl = True | |
|
117 | 118 | self._pl = p1, p2 |
|
118 | 119 | |
|
119 | 120 | def setbranch(self, branch): |
@@ -123,6 +124,7 b' class dirstate(object):' | |||
|
123 | 124 | def _read(self): |
|
124 | 125 | self._map = {} |
|
125 | 126 | self._copymap = {} |
|
127 | if not self._dirtypl: | |
|
126 | 128 | self._pl = [nullid, nullid] |
|
127 | 129 | try: |
|
128 | 130 | st = self._opener("dirstate").read() |
@@ -132,6 +134,7 b' class dirstate(object):' | |||
|
132 | 134 | if not st: |
|
133 | 135 | return |
|
134 | 136 | |
|
137 | if not self._dirtypl: | |
|
135 | 138 | self._pl = [st[:20], st[20: 40]] |
|
136 | 139 | |
|
137 | 140 | # deref fields so they will be local in loop |
@@ -157,8 +160,8 b' class dirstate(object):' | |||
|
157 | 160 | |
|
158 | 161 | def invalidate(self): |
|
159 | 162 | for a in "_map _copymap _branch _pl _dirs _ignore".split(): |
|
160 |
if |
|
|
161 |
self |
|
|
163 | if a in self.__dict__: | |
|
164 | delattr(self, a) | |
|
162 | 165 | self._dirty = False |
|
163 | 166 | |
|
164 | 167 | def copy(self, source, dest): |
@@ -271,7 +274,7 b' class dirstate(object):' | |||
|
271 | 274 | st = self._opener("dirstate", "w", atomictemp=True) |
|
272 | 275 | st.write(cs.getvalue()) |
|
273 | 276 | st.rename() |
|
274 | self._dirty = False | |
|
277 | self._dirty = self._dirtypl = False | |
|
275 | 278 | |
|
276 | 279 | def _filter(self, files): |
|
277 | 280 | ret = {} |
@@ -29,14 +29,13 b' class lock(object):' | |||
|
29 | 29 | # old-style lock: symlink to pid |
|
30 | 30 | # new-style lock: symlink to hostname:pid |
|
31 | 31 | |
|
32 | _host = None | |
|
33 | ||
|
32 | 34 | def __init__(self, file, timeout=-1, releasefn=None, desc=None): |
|
33 | 35 | self.f = file |
|
34 | 36 | self.held = 0 |
|
35 | 37 | self.timeout = timeout |
|
36 | 38 | self.releasefn = releasefn |
|
37 | self.id = None | |
|
38 | self.host = None | |
|
39 | self.pid = None | |
|
40 | 39 | self.desc = desc |
|
41 | 40 | self.lock() |
|
42 | 41 | |
@@ -59,13 +58,12 b' class lock(object):' | |||
|
59 | 58 | inst.locker) |
|
60 | 59 | |
|
61 | 60 | def trylock(self): |
|
62 |
if |
|
|
63 |
|
|
|
64 | self.pid = os.getpid() | |
|
65 | self.id = '%s:%s' % (self.host, self.pid) | |
|
61 | if lock._host is None: | |
|
62 | lock._host = socket.gethostname() | |
|
63 | lockname = '%s:%s' % (lock._host, os.getpid()) | |
|
66 | 64 | while not self.held: |
|
67 | 65 | try: |
|
68 |
util.makelock( |
|
|
66 | util.makelock(lockname, self.f) | |
|
69 | 67 | self.held = 1 |
|
70 | 68 | except (OSError, IOError), why: |
|
71 | 69 | if why.errno == errno.EEXIST: |
@@ -93,7 +91,7 b' class lock(object):' | |||
|
93 | 91 | host, pid = locker.split(":", 1) |
|
94 | 92 | except ValueError: |
|
95 | 93 | return locker |
|
96 |
if host != |
|
|
94 | if host != lock._host: | |
|
97 | 95 | return locker |
|
98 | 96 | try: |
|
99 | 97 | pid = int(pid) |
@@ -281,7 +281,7 b' def externalpatch(patcher, args, patchna' | |||
|
281 | 281 | def internalpatch(patchname, ui, strip, cwd, files): |
|
282 | 282 | """use builtin patch to apply <patchname> to the working directory. |
|
283 | 283 | returns whether patch was applied with fuzz factor.""" |
|
284 | fp = file(patchname) | |
|
284 | fp = file(patchname, 'rb') | |
|
285 | 285 | if cwd: |
|
286 | 286 | curdir = os.getcwd() |
|
287 | 287 | os.chdir(cwd) |
@@ -303,7 +303,7 b' class patchfile:' | |||
|
303 | 303 | self.fname = fname |
|
304 | 304 | self.ui = ui |
|
305 | 305 | try: |
|
306 | fp = file(fname, 'r') | |
|
306 | fp = file(fname, 'rb') | |
|
307 | 307 | self.lines = fp.readlines() |
|
308 | 308 | self.exists = True |
|
309 | 309 | except IOError: |
@@ -383,7 +383,7 b' class patchfile:' | |||
|
383 | 383 | try: os.unlink(fname) |
|
384 | 384 | except: |
|
385 | 385 | pass |
|
386 | fp = file(fname, 'w') | |
|
386 | fp = file(fname, 'wb') | |
|
387 | 387 | base = os.path.basename(self.fname) |
|
388 | 388 | fp.write("--- %s\n+++ %s\n" % (base, base)) |
|
389 | 389 | for x in self.rej: |
@@ -402,7 +402,7 b' class patchfile:' | |||
|
402 | 402 | if st.st_nlink > 1: |
|
403 | 403 | os.unlink(dest) |
|
404 | 404 | except: pass |
|
405 | fp = file(dest, 'w') | |
|
405 | fp = file(dest, 'wb') | |
|
406 | 406 | if st: |
|
407 | 407 | os.chmod(dest, st.st_mode) |
|
408 | 408 | fp.writelines(self.lines) |
@@ -777,13 +777,13 b' def selectfile(afile_orig, bfile_orig, h' | |||
|
777 | 777 | if count == 0: |
|
778 | 778 | return path.rstrip() |
|
779 | 779 | while count > 0: |
|
780 |
i = path.find( |
|
|
780 | i = path.find('/', i) | |
|
781 | 781 | if i == -1: |
|
782 | 782 | raise PatchError(_("unable to strip away %d dirs from %s") % |
|
783 | 783 | (count, path)) |
|
784 | 784 | i += 1 |
|
785 | 785 | # consume '//' in the path |
|
786 |
while i < pathlen - 1 and path[i] == |
|
|
786 | while i < pathlen - 1 and path[i] == '/': | |
|
787 | 787 | i += 1 |
|
788 | 788 | count -= 1 |
|
789 | 789 | return path[i:].rstrip() |
@@ -616,7 +616,7 b' def rename(src, dst):' | |||
|
616 | 616 | """forcibly rename a file""" |
|
617 | 617 | try: |
|
618 | 618 | os.rename(src, dst) |
|
619 | except OSError, err: | |
|
619 | except OSError, err: # FIXME: check err (EEXIST ?) | |
|
620 | 620 | # on windows, rename to existing file is not allowed, so we |
|
621 | 621 | # must delete destination first. but if file is open, unlink |
|
622 | 622 | # schedules it for delete but does not delete it. rename |
@@ -1303,7 +1303,11 b' class opener(object):' | |||
|
1303 | 1303 | os.makedirs(dirname) |
|
1304 | 1304 | |
|
1305 | 1305 | if self._can_symlink: |
|
1306 | try: | |
|
1306 | 1307 | os.symlink(src, linkname) |
|
1308 | except OSError, err: | |
|
1309 | raise OSError(err.errno, _('could not symlink to %r: %s') % | |
|
1310 | (src, err.strerror), linkname) | |
|
1307 | 1311 | else: |
|
1308 | 1312 | f = self(self, dst, "w") |
|
1309 | 1313 | f.write(src) |
@@ -209,9 +209,9 b' class posixfile_nt(object):' | |||
|
209 | 209 | |
|
210 | 210 | def __init__(self, name, mode='rb'): |
|
211 | 211 | access = 0 |
|
212 |
if 'r' in mode |
|
|
212 | if 'r' in mode: | |
|
213 | 213 | access |= win32file.GENERIC_READ |
|
214 | if 'w' in mode or 'a' in mode: | |
|
214 | if 'w' in mode or 'a' in mode or '+' in mode: | |
|
215 | 215 | access |= win32file.GENERIC_WRITE |
|
216 | 216 | if 'r' in mode: |
|
217 | 217 | creation = win32file.OPEN_EXISTING |
@@ -64,3 +64,18 b' emptypath=`pwd`/empty.py' | |||
|
64 | 64 | echo '[extensions]' > $HGRCPATH |
|
65 | 65 | echo "empty = $emptypath" >> $HGRCPATH |
|
66 | 66 | hg help empty |
|
67 | ||
|
68 | cat > debugextension.py <<EOF | |
|
69 | '''only debugcommands | |
|
70 | ''' | |
|
71 | def debugfoobar(ui, repo, *args, **opts): | |
|
72 | "yet another debug command" | |
|
73 | pass | |
|
74 | ||
|
75 | cmdtable = {"debugfoobar": (debugfoobar, (), "hg debugfoobar")} | |
|
76 | EOF | |
|
77 | debugpath=`pwd`/debugextension.py | |
|
78 | echo '[extensions]' > $HGRCPATH | |
|
79 | echo "debugextension = $debugpath" >> $HGRCPATH | |
|
80 | hg help debugextension | |
|
81 | hg --debug help debugextension |
@@ -22,3 +22,30 b' Foo' | |||
|
22 | 22 | empty extension - empty cmdtable |
|
23 | 23 | |
|
24 | 24 | no commands defined |
|
25 | debugextension extension - only debugcommands | |
|
26 | ||
|
27 | no commands defined | |
|
28 | debugextension extension - only debugcommands | |
|
29 | ||
|
30 | list of commands: | |
|
31 | ||
|
32 | debugfoobar: | |
|
33 | yet another debug command | |
|
34 | ||
|
35 | global options: | |
|
36 | -R --repository repository root directory or symbolic path name | |
|
37 | --cwd change working directory | |
|
38 | -y --noninteractive do not prompt, assume 'yes' for any required answers | |
|
39 | -q --quiet suppress output | |
|
40 | -v --verbose enable additional output | |
|
41 | --config set/override config option | |
|
42 | --debug enable debugging output | |
|
43 | --debugger start debugger | |
|
44 | --encoding set the charset encoding (default: ascii) | |
|
45 | --encodingmode set the charset encoding mode (default: strict) | |
|
46 | --lsprof print improved command execution profile | |
|
47 | --traceback print traceback on exception | |
|
48 | --time time how long the command takes | |
|
49 | --profile print command execution profile | |
|
50 | --version output version information and exit | |
|
51 | -h --help display help and exit |
@@ -29,14 +29,18 b" newline'" | |||
|
29 | 29 | hg tag -l 'xx:xx' |
|
30 | 30 | |
|
31 | 31 | echo % issue 601 |
|
32 | mv .hg/localtags .hg/ltags | |
|
33 | head -1 .hg/ltags | tr -d '\n' > .hg/localtags | |
|
32 | python << EOF | |
|
33 | f = file('.hg/localtags'); last = f.readlines()[-1][:-1]; f.close() | |
|
34 | f = file('.hg/localtags', 'w'); f.write(last); f.close() | |
|
35 | EOF | |
|
34 | 36 | cat .hg/localtags |
|
35 | 37 | hg tag -l localnewline |
|
36 | 38 | cat .hg/localtags |
|
37 | 39 | |
|
38 | mv .hgtags hgtags | |
|
39 | head -1 hgtags | tr -d '\n' > .hgtags | |
|
40 | python << EOF | |
|
41 | f = file('.hgtags'); last = f.readlines()[-1][:-1]; f.close() | |
|
42 | f = file('.hgtags', 'w'); f.write(last); f.close() | |
|
43 | EOF | |
|
40 | 44 | hg ci -d '1000000 0' -m'broken manual edit of .hgtags' |
|
41 | 45 | cat .hgtags |
|
42 | 46 | hg tag -d '1000000 0' newline |
General Comments 0
You need to be logged in to leave comments.
Login now