Show More
@@ -11,9 +11,34 b'' | |||||
11 | # |
|
11 | # | |
12 | # <alias email> <actual email> |
|
12 | # <alias email> <actual email> | |
13 |
|
13 | |||
14 | import sys |
|
|||
15 | from mercurial.i18n import gettext as _ |
|
14 | from mercurial.i18n import gettext as _ | |
16 | from mercurial import hg, mdiff, cmdutil, ui, util, templater, node |
|
15 | from mercurial import hg, mdiff, cmdutil, ui, util, templater, node | |
|
16 | import os, sys | |||
|
17 | ||||
|
18 | def get_tty_width(): | |||
|
19 | if 'COLUMNS' in os.environ: | |||
|
20 | try: | |||
|
21 | return int(os.environ['COLUMNS']) | |||
|
22 | except ValueError: | |||
|
23 | pass | |||
|
24 | try: | |||
|
25 | import termios, fcntl, struct | |||
|
26 | buf = 'abcd' | |||
|
27 | for dev in (sys.stdout, sys.stdin): | |||
|
28 | try: | |||
|
29 | if buf != 'abcd': | |||
|
30 | break | |||
|
31 | fd = dev.fileno() | |||
|
32 | if not os.isatty(fd): | |||
|
33 | continue | |||
|
34 | buf = fcntl.ioctl(fd, termios.TIOCGWINSZ, buf) | |||
|
35 | except ValueError: | |||
|
36 | pass | |||
|
37 | if buf != 'abcd': | |||
|
38 | return struct.unpack('hh', buf)[1] | |||
|
39 | except ImportError: | |||
|
40 | pass | |||
|
41 | return 80 | |||
17 |
|
42 | |||
18 | def __gather(ui, repo, node1, node2): |
|
43 | def __gather(ui, repo, node1, node2): | |
19 | def dirtywork(f, mmap1, mmap2): |
|
44 | def dirtywork(f, mmap1, mmap2): | |
@@ -159,8 +184,9 b' def churn(ui, repo, **opts):' | |||||
159 |
|
184 | |||
160 | maximum = ordered[0][1] |
|
185 | maximum = ordered[0][1] | |
161 |
|
186 | |||
162 | ui.note("Assuming 80 character terminal\n") |
|
187 | width = get_tty_width() | |
163 | width = 80 - 1 |
|
188 | ui.note(_("assuming %i character terminal\n") % width) | |
|
189 | width -= 1 | |||
164 |
|
190 | |||
165 | for i in ordered: |
|
191 | for i in ordered: | |
166 | person = i[0] |
|
192 | person = i[0] |
@@ -72,6 +72,5 b' def uisetup(ui):' | |||||
72 | args = target.split(' ') |
|
72 | args = target.split(' ') | |
73 | tcmd = args.pop(0) |
|
73 | tcmd = args.pop(0) | |
74 | if args: |
|
74 | if args: | |
75 | pui = ui.parentui or ui |
|
75 | ui.setconfig('defaults', cmd, ' '.join(args)) | |
76 | pui.setconfig('defaults', cmd, ' '.join(args)) |
|
|||
77 | cmdtable[cmd] = lazycommand(ui, cmd, tcmd) |
|
76 | cmdtable[cmd] = lazycommand(ui, cmd, tcmd) |
@@ -260,7 +260,7 b' class convert(object):' | |||||
260 | self.mapfilefd.close() |
|
260 | self.mapfilefd.close() | |
261 |
|
261 | |||
262 | def _convert(ui, src, dest=None, mapfile=None, **opts): |
|
262 | def _convert(ui, src, dest=None, mapfile=None, **opts): | |
263 |
|
|
263 | """Convert a foreign SCM repository to a Mercurial one. | |
264 |
|
264 | |||
265 | Accepted source formats: |
|
265 | Accepted source formats: | |
266 | - GIT |
|
266 | - GIT | |
@@ -293,7 +293,7 b' def _convert(ui, src, dest=None, mapfile' | |||||
293 | that use unix logins to identify authors (eg: CVS). One line per author |
|
293 | that use unix logins to identify authors (eg: CVS). One line per author | |
294 | mapping and the line format is: |
|
294 | mapping and the line format is: | |
295 | srcauthor=whatever string you want |
|
295 | srcauthor=whatever string you want | |
296 | ''' |
|
296 | """ | |
297 |
|
297 | |||
298 | util._encoding = 'UTF-8' |
|
298 | util._encoding = 'UTF-8' | |
299 |
|
299 |
@@ -1,10 +1,21 b'' | |||||
1 | # Subversion 1.4/1.5 Python API backend |
|
1 | # Subversion 1.4/1.5 Python API backend | |
2 | # |
|
2 | # | |
3 | # Copyright(C) 2007 Daniel Holth et al |
|
3 | # Copyright(C) 2007 Daniel Holth et al | |
|
4 | # | |||
|
5 | # Configuration options: | |||
|
6 | # | |||
|
7 | # convert.svn.trunk | |||
|
8 | # Relative path to the trunk (default: "trunk") | |||
|
9 | # convert.svn.branches | |||
|
10 | # Relative path to tree of branches (default: "branches") | |||
|
11 | # | |||
|
12 | # Set these in a hgrc, or on the command line as follows: | |||
|
13 | # | |||
|
14 | # hg convert --config convert.svn.trunk=wackoname [...] | |||
4 |
|
15 | |||
5 | import pprint |
|
|||
6 | import locale |
|
16 | import locale | |
7 |
|
17 | import os | ||
|
18 | import cPickle as pickle | |||
8 | from mercurial import util |
|
19 | from mercurial import util | |
9 |
|
20 | |||
10 | # Subversion stuff. Works best with very recent Python SVN bindings |
|
21 | # Subversion stuff. Works best with very recent Python SVN bindings | |
@@ -27,6 +38,12 b' except ImportError:' | |||||
27 |
|
38 | |||
28 | class CompatibilityException(Exception): pass |
|
39 | class CompatibilityException(Exception): pass | |
29 |
|
40 | |||
|
41 | class changedpath(object): | |||
|
42 | def __init__(self, p): | |||
|
43 | self.copyfrom_path = p.copyfrom_path | |||
|
44 | self.copyfrom_rev = p.copyfrom_rev | |||
|
45 | self.action = p.action | |||
|
46 | ||||
30 | # SVN conversion code stolen from bzr-svn and tailor |
|
47 | # SVN conversion code stolen from bzr-svn and tailor | |
31 | class convert_svn(converter_source): |
|
48 | class convert_svn(converter_source): | |
32 | def __init__(self, ui, url, rev=None): |
|
49 | def __init__(self, ui, url, rev=None): | |
@@ -51,8 +68,10 b' class convert_svn(converter_source):' | |||||
51 | try: |
|
68 | try: | |
52 | # Support file://path@rev syntax. Useful e.g. to convert |
|
69 | # Support file://path@rev syntax. Useful e.g. to convert | |
53 | # deleted branches. |
|
70 | # deleted branches. | |
54 |
|
|
71 | at = url.rfind('@') | |
55 |
|
|
72 | if at >= 0: | |
|
73 | latest = int(url[at+1:]) | |||
|
74 | url = url[:at] | |||
56 | except ValueError, e: |
|
75 | except ValueError, e: | |
57 | pass |
|
76 | pass | |
58 | self.url = url |
|
77 | self.url = url | |
@@ -60,7 +79,7 b' class convert_svn(converter_source):' | |||||
60 | try: |
|
79 | try: | |
61 |
self.transport = transport.SvnRaTransport(url |
|
80 | self.transport = transport.SvnRaTransport(url=url) | |
62 | self.ra = self.transport.ra |
|
81 | self.ra = self.transport.ra | |
63 |
self.ctx = s |
|
82 | self.ctx = self.transport.client | |
64 | self.base = svn.ra.get_repos_root(self.ra) |
|
83 | self.base = svn.ra.get_repos_root(self.ra) | |
65 | self.module = self.url[len(self.base):] |
|
84 | self.module = self.url[len(self.base):] | |
66 | self.modulemap = {} # revision, module |
|
85 | self.modulemap = {} # revision, module | |
@@ -88,26 +107,47 b' class convert_svn(converter_source):' | |||||
88 | lastrevs[module] = revnum |
|
107 | lastrevs[module] = revnum | |
89 | self.lastrevs = lastrevs |
|
108 | self.lastrevs = lastrevs | |
90 |
|
109 | |||
|
110 | def exists(self, path, optrev): | |||
|
111 | try: | |||
|
112 | return svn.client.ls(self.url.rstrip('/') + '/' + path, | |||
|
113 | optrev, False, self.ctx) | |||
|
114 | except SubversionException, err: | |||
|
115 | return [] | |||
|
116 | ||||
91 | def getheads(self): |
|
117 | def getheads(self): | |
92 | # detect standard /branches, /tags, /trunk layout |
|
118 | # detect standard /branches, /tags, /trunk layout | |
93 | optrev = svn.core.svn_opt_revision_t() |
|
119 | optrev = svn.core.svn_opt_revision_t() | |
94 | optrev.kind = svn.core.svn_opt_revision_number |
|
120 | optrev.kind = svn.core.svn_opt_revision_number | |
95 | optrev.value.number = self.last_changed |
|
121 | optrev.value.number = self.last_changed | |
96 | rpath = self.url.strip('/') |
|
122 | rpath = self.url.strip('/') | |
97 | paths = svn.client.ls(rpath, optrev, False, self.ctx) |
|
123 | cfgtrunk = self.ui.config('convert', 'svn.trunk') | |
98 | if 'branches' in paths and 'trunk' in paths: |
|
124 | cfgbranches = self.ui.config('convert', 'svn.branches') | |
99 | self.module += '/trunk' |
|
125 | trunk = (cfgtrunk or 'trunk').strip('/') | |
|
126 | branches = (cfgbranches or 'branches').strip('/') | |||
|
127 | if self.exists(trunk, optrev) and self.exists(branches, optrev): | |||
|
128 | self.ui.note('found trunk at %r and branches at %r\n' % | |||
|
129 | (trunk, branches)) | |||
|
130 | oldmodule = self.module | |||
|
131 | self.module += '/' + trunk | |||
100 | lt = self.latest(self.module, self.last_changed) |
|
132 | lt = self.latest(self.module, self.last_changed) | |
101 | self.head = self.revid(lt) |
|
133 | self.head = self.revid(lt) | |
102 | self.heads = [self.head] |
|
134 | self.heads = [self.head] | |
103 |
branches = svn.client.ls(rpath + '/ |
|
135 | branchnames = svn.client.ls(rpath + '/' + branches, optrev, False, | |
104 | for branch in branches.keys(): |
|
136 | self.ctx) | |
105 | module = '/branches/' + branch |
|
137 | for branch in branchnames.keys(): | |
|
138 | if oldmodule: | |||
|
139 | module = '/' + oldmodule + '/' + branches + '/' + branch | |||
|
140 | else: | |||
|
141 | module = '/' + branches + '/' + branch | |||
106 | brevnum = self.latest(module, self.last_changed) |
|
142 | brevnum = self.latest(module, self.last_changed) | |
107 | brev = self.revid(brevnum, module) |
|
143 | brev = self.revid(brevnum, module) | |
108 | self.ui.note('found branch %s at %d\n' % (branch, brevnum)) |
|
144 | self.ui.note('found branch %s at %d\n' % (branch, brevnum)) | |
109 | self.heads.append(brev) |
|
145 | self.heads.append(brev) | |
|
146 | elif cfgtrunk or cfgbranches: | |||
|
147 | raise util.Abort(_('trunk/branch layout expected, ' | |||
|
148 | 'but not found')) | |||
110 | else: |
|
149 | else: | |
|
150 | self.ui.note('working with one branch\n') | |||
111 | self.heads = [self.head] |
|
151 | self.heads = [self.head] | |
112 | return self.heads |
|
152 | return self.heads | |
113 |
|
153 | |||
@@ -140,10 +180,68 b' class convert_svn(converter_source):' | |||||
140 | del self.commits[rev] |
|
180 | del self.commits[rev] | |
141 | return commit |
|
181 | return commit | |
142 |
|
182 | |||
|
183 | def get_log(self, paths, start, end, limit=0, discover_changed_paths=True, | |||
|
184 | strict_node_history=False): | |||
|
185 | '''wrapper for svn.ra.get_log. | |||
|
186 | on a large repository, svn.ra.get_log pins huge amounts of | |||
|
187 | memory that cannot be recovered. work around it by forking | |||
|
188 | and writing results over a pipe.''' | |||
|
189 | ||||
|
190 | def child(fp): | |||
|
191 | protocol = -1 | |||
|
192 | def receiver(orig_paths, revnum, author, date, message, pool): | |||
|
193 | if orig_paths is not None: | |||
|
194 | for k, v in orig_paths.iteritems(): | |||
|
195 | orig_paths[k] = changedpath(v) | |||
|
196 | pickle.dump((orig_paths, revnum, author, date, message), | |||
|
197 | fp, protocol) | |||
|
198 | ||||
|
199 | try: | |||
|
200 | # Use an ra of our own so that our parent can consume | |||
|
201 | # our results without confusing the server. | |||
|
202 | t = transport.SvnRaTransport(url=self.url) | |||
|
203 | svn.ra.get_log(t.ra, paths, start, end, limit, | |||
|
204 | discover_changed_paths, | |||
|
205 | strict_node_history, | |||
|
206 | receiver) | |||
|
207 | except SubversionException, (_, num): | |||
|
208 | self.ui.print_exc() | |||
|
209 | pickle.dump(num, fp, protocol) | |||
|
210 | else: | |||
|
211 | pickle.dump(None, fp, protocol) | |||
|
212 | fp.close() | |||
|
213 | ||||
|
214 | def parent(fp): | |||
|
215 | while True: | |||
|
216 | entry = pickle.load(fp) | |||
|
217 | try: | |||
|
218 | orig_paths, revnum, author, date, message = entry | |||
|
219 | except: | |||
|
220 | if entry is None: | |||
|
221 | break | |||
|
222 | raise SubversionException("child raised exception", entry) | |||
|
223 | yield entry | |||
|
224 | ||||
|
225 | rfd, wfd = os.pipe() | |||
|
226 | pid = os.fork() | |||
|
227 | if pid: | |||
|
228 | os.close(wfd) | |||
|
229 | for p in parent(os.fdopen(rfd, 'rb')): | |||
|
230 | yield p | |||
|
231 | ret = os.waitpid(pid, 0)[1] | |||
|
232 | if ret: | |||
|
233 | raise util.Abort(_('get_log %s') % util.explain_exit(ret)) | |||
|
234 | else: | |||
|
235 | os.close(rfd) | |||
|
236 | child(os.fdopen(wfd, 'wb')) | |||
|
237 | os._exit(0) | |||
|
238 | ||||
143 | def gettags(self): |
|
239 | def gettags(self): | |
144 | tags = {} |
|
240 | tags = {} | |
145 | def parselogentry(*arg, **args): |
|
241 | start = self.revnum(self.head) | |
146 | orig_paths, revnum, author, date, message, pool = arg |
|
242 | try: | |
|
243 | for entry in self.get_log(['/tags'], 0, start): | |||
|
244 | orig_paths, revnum, author, date, message = entry | |||
147 | for path in orig_paths: |
|
245 | for path in orig_paths: | |
148 | if not path.startswith('/tags/'): |
|
246 | if not path.startswith('/tags/'): | |
149 | continue |
|
247 | continue | |
@@ -152,15 +250,9 b' class convert_svn(converter_source):' | |||||
152 | rev = ent.copyfrom_rev |
|
250 | rev = ent.copyfrom_rev | |
153 | tag = path.split('/', 2)[2] |
|
251 | tag = path.split('/', 2)[2] | |
154 | tags[tag] = self.revid(rev, module=source) |
|
252 | tags[tag] = self.revid(rev, module=source) | |
155 |
|
253 | except SubversionException, (_, num): | ||
156 | start = self.revnum(self.head) |
|
254 | self.ui.note('no tags found at revision %d\n' % start) | |
157 | try: |
|
|||
158 | svn.ra.get_log(self.ra, ['/tags'], 0, start, 0, True, False, |
|
|||
159 | parselogentry) |
|
|||
160 |
|
|
255 | return tags | |
161 | except SubversionException: |
|
|||
162 | self.ui.note('no tags found at revision %d\n' % start) |
|
|||
163 | return {} |
|
|||
164 |
|
256 | |||
165 | # -- helper functions -- |
|
257 | # -- helper functions -- | |
166 |
|
258 | |||
@@ -193,8 +285,8 b' class convert_svn(converter_source):' | |||||
193 | except SubversionException: |
|
285 | except SubversionException: | |
194 | dirent = None |
|
286 | dirent = None | |
195 | if not dirent: |
|
287 | if not dirent: | |
196 | raise util.Abort('%s not found up to revision %d' \ |
|
288 | print self.base, path | |
197 | % (path, stop)) |
|
289 | raise util.Abort('%s not found up to revision %d' % (path, stop)) | |
198 |
|
290 | |||
199 | return dirent.created_rev |
|
291 | return dirent.created_rev | |
200 |
|
292 | |||
@@ -242,25 +334,10 b' class convert_svn(converter_source):' | |||||
242 | self.ui.debug('Ignoring %r since it is not under %r\n' % (path, module)) |
|
334 | self.ui.debug('Ignoring %r since it is not under %r\n' % (path, module)) | |
243 | return None |
|
335 | return None | |
244 |
|
336 | |||
245 | received = [] |
|
|||
246 | # svn.ra.get_log requires no other calls to the ra until it completes, |
|
|||
247 | # so we just collect the log entries and parse them afterwards |
|
|||
248 | def receivelog(*arg, **args): |
|
|||
249 | received.append(arg) |
|
|||
250 |
|
||||
251 | self.child_cset = None |
|
337 | self.child_cset = None | |
252 |
def parselogentry( |
|
338 | def parselogentry(orig_paths, revnum, author, date, message): | |
253 | orig_paths, revnum, author, date, message, pool = arg |
|
339 | self.ui.debug("parsing revision %d (%d changes)\n" % | |
254 |
|
340 | (revnum, len(orig_paths))) | ||
255 | if self.is_blacklisted(revnum): |
|
|||
256 | self.ui.note('skipping blacklisted revision %d\n' % revnum) |
|
|||
257 | return |
|
|||
258 |
|
||||
259 | self.ui.debug("parsing revision %d\n" % revnum) |
|
|||
260 |
|
||||
261 | if orig_paths is None: |
|
|||
262 | self.ui.debug('revision %d has no entries\n' % revnum) |
|
|||
263 | return |
|
|||
264 |
|
341 | |||
265 | if revnum in self.modulemap: |
|
342 | if revnum in self.modulemap: | |
266 | new_module = self.modulemap[revnum] |
|
343 | new_module = self.modulemap[revnum] | |
@@ -286,12 +363,11 b' class convert_svn(converter_source):' | |||||
286 | except IndexError: |
|
363 | except IndexError: | |
287 | branch = None |
|
364 | branch = None | |
288 |
|
365 | |||
289 |
paths = orig_paths. |
|
366 | orig_paths = orig_paths.items() | |
290 | paths.sort() |
|
367 | orig_paths.sort() | |
291 | for path in paths: |
|
368 | for path, ent in orig_paths: | |
292 | # self.ui.write("path %s\n" % path) |
|
369 | # self.ui.write("path %s\n" % path) | |
293 | if path == self.module: # Follow branching back in history |
|
370 | if path == self.module: # Follow branching back in history | |
294 | ent = orig_paths[path] |
|
|||
295 | if ent: |
|
371 | if ent: | |
296 | if ent.copyfrom_path: |
|
372 | if ent.copyfrom_path: | |
297 | # ent.copyfrom_rev may not be the actual last revision |
|
373 | # ent.copyfrom_rev may not be the actual last revision | |
@@ -310,7 +386,6 b' class convert_svn(converter_source):' | |||||
310 | self.ui.debug("boring@%s: %s\n" % (revnum, path)) |
|
386 | self.ui.debug("boring@%s: %s\n" % (revnum, path)) | |
311 | continue |
|
387 | continue | |
312 | entry = entrypath.decode(self.encoding) |
|
388 | entry = entrypath.decode(self.encoding) | |
313 | ent = orig_paths[path] |
|
|||
314 |
|
389 | |||
315 | kind = svn.ra.check_path(self.ra, entrypath, revnum) |
|
390 | kind = svn.ra.check_path(self.ra, entrypath, revnum) | |
316 | if kind == svn.core.svn_node_file: |
|
391 | if kind == svn.core.svn_node_file: | |
@@ -492,17 +567,21 b' class convert_svn(converter_source):' | |||||
492 | self.child_cset.parents = [rev] |
|
567 | self.child_cset.parents = [rev] | |
493 | self.child_cset = cset |
|
568 | self.child_cset = cset | |
494 |
|
569 | |||
495 |
self.ui.note('fetching revision log for "%s" from %d to %d\n' % |
|
570 | self.ui.note('fetching revision log for "%s" from %d to %d\n' % | |
496 | (self.module, from_revnum, to_revnum)) |
|
571 | (self.module, from_revnum, to_revnum)) | |
497 |
|
572 | |||
498 | try: |
|
573 | try: | |
499 | discover_changed_paths = True |
|
574 | discover_changed_paths = True | |
500 | strict_node_history = False |
|
575 | strict_node_history = False | |
501 |
|
|
576 | for entry in self.get_log([self.module], from_revnum, to_revnum): | |
502 | discover_changed_paths, strict_node_history, |
|
577 | orig_paths, revnum, author, date, message = entry | |
503 | receivelog) |
|
578 | if self.is_blacklisted(revnum): | |
504 | for entry in received: |
|
579 | self.ui.note('skipping blacklisted revision %d\n' % revnum) | |
505 |
|
|
580 | continue | |
|
581 | if orig_paths is None: | |||
|
582 | self.ui.debug('revision %d has no entries\n' % revnum) | |||
|
583 | continue | |||
|
584 | parselogentry(orig_paths, revnum, author, date, message) | |||
506 | except SubversionException, (_, num): |
|
585 | except SubversionException, (_, num): | |
507 | if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION: |
|
586 | if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION: | |
508 |
raise NoSuchRevision(branch=self, |
|
587 | raise NoSuchRevision(branch=self, | |
@@ -567,7 +646,6 b' class convert_svn(converter_source):' | |||||
567 | dirents = getdir[0] |
|
646 | dirents = getdir[0] | |
568 | if type(dirents) == int: |
|
647 | if type(dirents) == int: | |
569 | # got here once due to infinite recursion bug |
|
648 | # got here once due to infinite recursion bug | |
570 | # pprint.pprint(getdir) |
|
|||
571 | return |
|
649 | return | |
572 | c = dirents.keys() |
|
650 | c = dirents.keys() | |
573 | c.sort() |
|
651 | c.sort() |
@@ -24,6 +24,7 b' from tempfile import mktemp' | |||||
24 |
|
24 | |||
25 | from svn.core import SubversionException, Pool |
|
25 | from svn.core import SubversionException, Pool | |
26 | import svn.ra |
|
26 | import svn.ra | |
|
27 | import svn.client | |||
27 | import svn.core |
|
28 | import svn.core | |
28 |
|
29 | |||
29 |
# Some older versions of the Python bindings need to be |
|
30 | # Some older versions of the Python bindings need to be | |
@@ -48,21 +49,6 b' def _create_auth_baton(pool):' | |||||
48 | ] |
|
49 | ] | |
49 | return svn.core.svn_auth_open(providers, pool) |
|
50 | return svn.core.svn_auth_open(providers, pool) | |
50 |
|
51 | |||
51 |
|
||||
52 | # # The SVN libraries don't like trailing slashes... |
|
|||
53 | # return url.rstrip('/') |
|
|||
54 |
|
||||
55 |
|
||||
56 | class SvnRaCallbacks(svn.ra.callbacks2_t): |
|
|||
57 | """Remote access callbacks implementation for bzr-svn.""" |
|
|||
58 | def __init__(self, pool): |
|
|||
59 | svn.ra.callbacks2_t.__init__(self) |
|
|||
60 | self.auth_baton = _create_auth_baton(pool) |
|
|||
61 | self.pool = pool |
|
|||
62 |
|
||||
63 | def open_tmp_file(self, pool): |
|
|||
64 | return mktemp(prefix='tailor-svn') |
|
|||
65 |
|
||||
66 | class NotBranchError(SubversionException): |
|
52 | class NotBranchError(SubversionException): | |
67 | pass |
|
53 | pass | |
68 |
|
54 | |||
@@ -73,25 +59,30 b' class SvnRaTransport(object):' | |||||
73 | def __init__(self, url="", ra=None): |
|
59 | def __init__(self, url="", ra=None): | |
74 | self.pool = Pool() |
|
60 | self.pool = Pool() | |
75 | self.svn_url = url |
|
61 | self.svn_url = url | |
|
62 | self.username = '' | |||
|
63 | self.password = '' | |||
76 |
|
64 | |||
77 | # Only Subversion 1.4 has reparent() |
|
65 | # Only Subversion 1.4 has reparent() | |
78 | if ra is None or not hasattr(svn.ra, 'reparent'): |
|
66 | if ra is None or not hasattr(svn.ra, 'reparent'): | |
79 |
self.c |
|
67 | self.client = svn.client.create_context(self.pool) | |
|
68 | ab = _create_auth_baton(self.pool) | |||
|
69 | if False: | |||
|
70 | svn.core.svn_auth_set_parameter( | |||
|
71 | ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username) | |||
|
72 | svn.core.svn_auth_set_parameter( | |||
|
73 | ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password) | |||
|
74 | self.client.auth_baton = ab | |||
|
75 | self.client.config = svn_config | |||
80 | try: |
|
76 | try: | |
81 |
|
|
77 | self.ra = svn.client.open_ra_session( | |
82 | try: # Older SVN bindings |
|
78 | self.svn_url.encode('utf8'), | |
83 | self.ra = svn.ra.open2(self.svn_url.encode('utf8'), self.callbacks, None, svn_config, None) |
|
79 | self.client, self.pool) | |
84 | except TypeError, e: |
|
|||
85 | self.ra = svn.ra.open2(self.svn_url.encode('utf8'), self.callbacks, svn_config, None) |
|
|||
86 | except SubversionException, (_, num): |
|
80 | except SubversionException, (_, num): | |
87 |
if num |
|
81 | if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL, | |
88 | raise NotBranchError(url) |
|
82 | svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED, | |
89 |
|
|
83 | svn.core.SVN_ERR_BAD_URL): | |
90 | raise NotBranchError(url) |
|
|||
91 | if num == svn.core.SVN_ERR_BAD_URL: |
|
|||
92 | raise NotBranchError(url) |
|
84 | raise NotBranchError(url) | |
93 | raise |
|
85 | raise | |
94 |
|
||||
95 | else: |
|
86 | else: | |
96 | self.ra = ra |
|
87 | self.ra = ra | |
97 | svn.ra.reparent(self.ra, self.svn_url.encode('utf8')) |
|
88 | svn.ra.reparent(self.ra, self.svn_url.encode('utf8')) |
@@ -200,8 +200,9 b' def archive(repo, dest, node, kind, deco' | |||||
200 |
|
200 | |||
201 | prefix is name of path to put before every archive member.''' |
|
201 | prefix is name of path to put before every archive member.''' | |
202 |
|
202 | |||
203 | def write(name, mode, islink, data): |
|
203 | def write(name, mode, islink, getdata): | |
204 | if matchfn and not matchfn(name): return |
|
204 | if matchfn and not matchfn(name): return | |
|
205 | data = getdata() | |||
205 | if decode: |
|
206 | if decode: | |
206 | data = repo.wwritedata(name, data) |
|
207 | data = repo.wwritedata(name, data) | |
207 | archiver.addfile(name, mode, islink, data) |
|
208 | archiver.addfile(name, mode, islink, data) | |
@@ -212,8 +213,8 b' def archive(repo, dest, node, kind, deco' | |||||
212 | items = m.items() |
|
213 | items = m.items() | |
213 | items.sort() |
|
214 | items.sort() | |
214 | write('.hg_archival.txt', 0644, False, |
|
215 | write('.hg_archival.txt', 0644, False, | |
215 | 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node))) |
|
216 | lambda: 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node))) | |
216 | for filename, filenode in items: |
|
217 | for filename, filenode in items: | |
217 | write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename), |
|
218 | write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename), | |
218 | repo.file(filename).read(filenode)) |
|
219 | lambda: repo.file(filename).read(filenode)) | |
219 | archiver.done() |
|
220 | archiver.done() |
@@ -42,7 +42,7 b' class appender:' | |||||
42 | def flush(self): |
|
42 | def flush(self): | |
43 | pass |
|
43 | pass | |
44 | def close(self): |
|
44 | def close(self): | |
45 |
|
|
45 | self.fp.close() | |
46 |
|
46 | |||
47 | def seek(self, offset, whence=0): |
|
47 | def seek(self, offset, whence=0): | |
48 | '''virtual file offset spans real file and data''' |
|
48 | '''virtual file offset spans real file and data''' | |
@@ -58,7 +58,6 b' class appender:' | |||||
58 | def read(self, count=-1): |
|
58 | def read(self, count=-1): | |
59 | '''only trick here is reads that span real file and data''' |
|
59 | '''only trick here is reads that span real file and data''' | |
60 | ret = "" |
|
60 | ret = "" | |
61 | old_offset = self.offset |
|
|||
62 | if self.offset < self.size: |
|
61 | if self.offset < self.size: | |
63 | s = self.fp.read(count) |
|
62 | s = self.fp.read(count) | |
64 | ret = s |
|
63 | ret = s |
@@ -316,7 +316,7 b' def dispatch(ui, args, argv0=None):' | |||||
316 | util._fallbackencoding = fallback |
|
316 | util._fallbackencoding = fallback | |
317 |
|
317 | |||
318 | fullargs = args |
|
318 | fullargs = args | |
319 | cmd, func, args, options, cmdoptions = parse(ui, args) |
|
319 | cmd, func, args, options, cmdoptions = parse(lui, args) | |
320 |
|
320 | |||
321 | if options["config"]: |
|
321 | if options["config"]: | |
322 | raise util.Abort(_("Option --config may not be abbreviated!")) |
|
322 | raise util.Abort(_("Option --config may not be abbreviated!")) |
@@ -8,7 +8,7 b'' | |||||
8 | import demandimport; demandimport.enable() |
|
8 | import demandimport; demandimport.enable() | |
9 | from node import * |
|
9 | from node import * | |
10 | from i18n import _ |
|
10 | from i18n import _ | |
11 |
import bisect, os, re, sys, urllib, |
|
11 | import bisect, os, re, sys, urllib, stat | |
12 | import ui, hg, util, revlog, bundlerepo, extensions |
|
12 | import ui, hg, util, revlog, bundlerepo, extensions | |
13 | import difflib, patch, time, help, mdiff, tempfile |
|
13 | import difflib, patch, time, help, mdiff, tempfile | |
14 | import errno, version, socket |
|
14 | import errno, version, socket | |
@@ -1362,7 +1362,7 b' def help_(ui, name=None, with_version=Fa' | |||||
1362 |
|
1362 | |||
1363 | addglobalopts(False) |
|
1363 | addglobalopts(False) | |
1364 |
|
1364 | |||
1365 | def helplist(select=None): |
|
1365 | def helplist(header, select=None): | |
1366 | h = {} |
|
1366 | h = {} | |
1367 | cmds = {} |
|
1367 | cmds = {} | |
1368 | for c, e in table.items(): |
|
1368 | for c, e in table.items(): | |
@@ -1380,6 +1380,11 b' def help_(ui, name=None, with_version=Fa' | |||||
1380 | h[f] = doc.splitlines(0)[0].rstrip() |
|
1380 | h[f] = doc.splitlines(0)[0].rstrip() | |
1381 | cmds[f] = c.lstrip("^") |
|
1381 | cmds[f] = c.lstrip("^") | |
1382 |
|
1382 | |||
|
1383 | if not h: | |||
|
1384 | ui.status(_('no commands defined\n')) | |||
|
1385 | return | |||
|
1386 | ||||
|
1387 | ui.status(header) | |||
1383 | fns = h.keys() |
|
1388 | fns = h.keys() | |
1384 | fns.sort() |
|
1389 | fns.sort() | |
1385 | m = max(map(len, fns)) |
|
1390 | m = max(map(len, fns)) | |
@@ -1429,14 +1434,10 b' def help_(ui, name=None, with_version=Fa' | |||||
1429 | try: |
|
1434 | try: | |
1430 | ct = mod.cmdtable |
|
1435 | ct = mod.cmdtable | |
1431 | except AttributeError: |
|
1436 | except AttributeError: | |
1432 |
ct = |
|
1437 | ct = {} | |
1433 | if not ct: |
|
1438 | ||
1434 | ui.status(_('no commands defined\n')) |
|
|||
1435 | return |
|
|||
1436 |
|
||||
1437 | ui.status(_('list of commands:\n\n')) |
|
|||
1438 | modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct]) |
|
1439 | modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct]) | |
1439 | helplist(modcmds.has_key) |
|
1440 | helplist(_('list of commands:\n\n'), modcmds.has_key) | |
1440 |
|
1441 | |||
1441 | if name and name != 'shortlist': |
|
1442 | if name and name != 'shortlist': | |
1442 | i = None |
|
1443 | i = None | |
@@ -1460,11 +1461,11 b' def help_(ui, name=None, with_version=Fa' | |||||
1460 |
|
1461 | |||
1461 | # list of commands |
|
1462 | # list of commands | |
1462 | if name == "shortlist": |
|
1463 | if name == "shortlist": | |
1463 |
|
|
1464 | header = _('basic commands:\n\n') | |
1464 | else: |
|
1465 | else: | |
1465 |
|
|
1466 | header = _('list of commands:\n\n') | |
1466 |
|
1467 | |||
1467 | helplist() |
|
1468 | helplist(header) | |
1468 |
|
1469 | |||
1469 | # list all option lists |
|
1470 | # list all option lists | |
1470 | opt_output = [] |
|
1471 | opt_output = [] | |
@@ -2040,14 +2041,12 b' def paths(ui, repo, search=None):' | |||||
2040 | for name, path in ui.configitems("paths"): |
|
2041 | for name, path in ui.configitems("paths"): | |
2041 | ui.write("%s = %s\n" % (name, path)) |
|
2042 | ui.write("%s = %s\n" % (name, path)) | |
2042 |
|
2043 | |||
2043 |
def postincoming(ui, repo, modheads, optupdate |
|
2044 | def postincoming(ui, repo, modheads, optupdate): | |
2044 | if modheads == 0: |
|
2045 | if modheads == 0: | |
2045 | return |
|
2046 | return | |
2046 | if optupdate: |
|
2047 | if optupdate: | |
2047 |
if |
|
2048 | if modheads == 1: | |
2048 |
return hg.update(repo, |
|
2049 | return hg.update(repo, None) | |
2049 | elif modheads == 1: |
|
|||
2050 | return hg.update(repo, repo.changelog.tip()) # update |
|
|||
2051 | else: |
|
2050 | else: | |
2052 | ui.status(_("not updating, since new heads added\n")) |
|
2051 | ui.status(_("not updating, since new heads added\n")) | |
2053 | if modheads > 1: |
|
2052 | if modheads > 1: | |
@@ -2108,9 +2107,8 b' def pull(ui, repo, source="default", **o' | |||||
2108 | error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.") |
|
2107 | error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.") | |
2109 | raise util.Abort(error) |
|
2108 | raise util.Abort(error) | |
2110 |
|
2109 | |||
2111 | wasempty = repo.changelog.count() == 0 |
|
|||
2112 | modheads = repo.pull(other, heads=revs, force=opts['force']) |
|
2110 | modheads = repo.pull(other, heads=revs, force=opts['force']) | |
2113 |
return postincoming(ui, repo, modheads, opts['update'] |
|
2111 | return postincoming(ui, repo, modheads, opts['update']) | |
2114 |
|
2112 | |||
2115 | def push(ui, repo, dest=None, **opts): |
|
2113 | def push(ui, repo, dest=None, **opts): | |
2116 | """push changes to the specified destination |
|
2114 | """push changes to the specified destination | |
@@ -2211,7 +2209,6 b' def remove(ui, repo, *pats, **opts):' | |||||
2211 | Modified files and added files are not removed by default. To |
|
2209 | Modified files and added files are not removed by default. To | |
2212 | remove them, use the -f/--force option. |
|
2210 | remove them, use the -f/--force option. | |
2213 | """ |
|
2211 | """ | |
2214 | names = [] |
|
|||
2215 | if not opts['after'] and not pats: |
|
2212 | if not opts['after'] and not pats: | |
2216 | raise util.Abort(_('no files specified')) |
|
2213 | raise util.Abort(_('no files specified')) | |
2217 | files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts) |
|
2214 | files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts) | |
@@ -2681,8 +2678,6 b' def unbundle(ui, repo, fname1, *fnames, ' | |||||
2681 | bundle command. |
|
2678 | bundle command. | |
2682 | """ |
|
2679 | """ | |
2683 | fnames = (fname1,) + fnames |
|
2680 | fnames = (fname1,) + fnames | |
2684 | result = None |
|
|||
2685 | wasempty = repo.changelog.count() == 0 |
|
|||
2686 | for fname in fnames: |
|
2681 | for fname in fnames: | |
2687 | if os.path.exists(fname): |
|
2682 | if os.path.exists(fname): | |
2688 | f = open(fname, "rb") |
|
2683 | f = open(fname, "rb") | |
@@ -2691,7 +2686,7 b' def unbundle(ui, repo, fname1, *fnames, ' | |||||
2691 | gen = changegroup.readbundle(f, fname) |
|
2686 | gen = changegroup.readbundle(f, fname) | |
2692 | modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname) |
|
2687 | modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname) | |
2693 |
|
2688 | |||
2694 |
return postincoming(ui, repo, modheads, opts['update'] |
|
2689 | return postincoming(ui, repo, modheads, opts['update']) | |
2695 |
|
2690 | |||
2696 | def update(ui, repo, node=None, rev=None, clean=False, date=None): |
|
2691 | def update(ui, repo, node=None, rev=None, clean=False, date=None): | |
2697 | """update working directory |
|
2692 | """update working directory |
@@ -21,6 +21,7 b' class dirstate(object):' | |||||
21 | self._opener = opener |
|
21 | self._opener = opener | |
22 | self._root = root |
|
22 | self._root = root | |
23 | self._dirty = False |
|
23 | self._dirty = False | |
|
24 | self._dirtypl = False | |||
24 | self._ui = ui |
|
25 | self._ui = ui | |
25 |
|
26 | |||
26 | def __getattr__(self, name): |
|
27 | def __getattr__(self, name): | |
@@ -113,7 +114,7 b' class dirstate(object):' | |||||
113 | return self._branch |
|
114 | return self._branch | |
114 |
|
115 | |||
115 | def setparents(self, p1, p2=nullid): |
|
116 | def setparents(self, p1, p2=nullid): | |
116 | self._dirty = True |
|
117 | self._dirty = self._dirtypl = True | |
117 | self._pl = p1, p2 |
|
118 | self._pl = p1, p2 | |
118 |
|
119 | |||
119 | def setbranch(self, branch): |
|
120 | def setbranch(self, branch): | |
@@ -123,6 +124,7 b' class dirstate(object):' | |||||
123 | def _read(self): |
|
124 | def _read(self): | |
124 | self._map = {} |
|
125 | self._map = {} | |
125 | self._copymap = {} |
|
126 | self._copymap = {} | |
|
127 | if not self._dirtypl: | |||
126 | self._pl = [nullid, nullid] |
|
128 | self._pl = [nullid, nullid] | |
127 | try: |
|
129 | try: | |
128 | st = self._opener("dirstate").read() |
|
130 | st = self._opener("dirstate").read() | |
@@ -132,6 +134,7 b' class dirstate(object):' | |||||
132 | if not st: |
|
134 | if not st: | |
133 | return |
|
135 | return | |
134 |
|
136 | |||
|
137 | if not self._dirtypl: | |||
135 | self._pl = [st[:20], st[20: 40]] |
|
138 | self._pl = [st[:20], st[20: 40]] | |
136 |
|
139 | |||
137 | # deref fields so they will be local in loop |
|
140 | # deref fields so they will be local in loop | |
@@ -157,8 +160,8 b' class dirstate(object):' | |||||
157 |
|
160 | |||
158 | def invalidate(self): |
|
161 | def invalidate(self): | |
159 | for a in "_map _copymap _branch _pl _dirs _ignore".split(): |
|
162 | for a in "_map _copymap _branch _pl _dirs _ignore".split(): | |
160 |
if |
|
163 | if a in self.__dict__: | |
161 |
self |
|
164 | delattr(self, a) | |
162 | self._dirty = False |
|
165 | self._dirty = False | |
163 |
|
166 | |||
164 | def copy(self, source, dest): |
|
167 | def copy(self, source, dest): | |
@@ -271,7 +274,7 b' class dirstate(object):' | |||||
271 | st = self._opener("dirstate", "w", atomictemp=True) |
|
274 | st = self._opener("dirstate", "w", atomictemp=True) | |
272 | st.write(cs.getvalue()) |
|
275 | st.write(cs.getvalue()) | |
273 | st.rename() |
|
276 | st.rename() | |
274 | self._dirty = False |
|
277 | self._dirty = self._dirtypl = False | |
275 |
|
278 | |||
276 | def _filter(self, files): |
|
279 | def _filter(self, files): | |
277 | ret = {} |
|
280 | ret = {} |
@@ -29,14 +29,13 b' class lock(object):' | |||||
29 | # old-style lock: symlink to pid |
|
29 | # old-style lock: symlink to pid | |
30 | # new-style lock: symlink to hostname:pid |
|
30 | # new-style lock: symlink to hostname:pid | |
31 |
|
31 | |||
|
32 | _host = None | |||
|
33 | ||||
32 | def __init__(self, file, timeout=-1, releasefn=None, desc=None): |
|
34 | def __init__(self, file, timeout=-1, releasefn=None, desc=None): | |
33 | self.f = file |
|
35 | self.f = file | |
34 | self.held = 0 |
|
36 | self.held = 0 | |
35 | self.timeout = timeout |
|
37 | self.timeout = timeout | |
36 | self.releasefn = releasefn |
|
38 | self.releasefn = releasefn | |
37 | self.id = None |
|
|||
38 | self.host = None |
|
|||
39 | self.pid = None |
|
|||
40 | self.desc = desc |
|
39 | self.desc = desc | |
41 | self.lock() |
|
40 | self.lock() | |
42 |
|
41 | |||
@@ -59,13 +58,12 b' class lock(object):' | |||||
59 | inst.locker) |
|
58 | inst.locker) | |
60 |
|
59 | |||
61 | def trylock(self): |
|
60 | def trylock(self): | |
62 |
if |
|
61 | if lock._host is None: | |
63 |
|
|
62 | lock._host = socket.gethostname() | |
64 | self.pid = os.getpid() |
|
63 | lockname = '%s:%s' % (lock._host, os.getpid()) | |
65 | self.id = '%s:%s' % (self.host, self.pid) |
|
|||
66 | while not self.held: |
|
64 | while not self.held: | |
67 | try: |
|
65 | try: | |
68 |
util.makelock( |
|
66 | util.makelock(lockname, self.f) | |
69 | self.held = 1 |
|
67 | self.held = 1 | |
70 | except (OSError, IOError), why: |
|
68 | except (OSError, IOError), why: | |
71 | if why.errno == errno.EEXIST: |
|
69 | if why.errno == errno.EEXIST: | |
@@ -93,7 +91,7 b' class lock(object):' | |||||
93 | host, pid = locker.split(":", 1) |
|
91 | host, pid = locker.split(":", 1) | |
94 | except ValueError: |
|
92 | except ValueError: | |
95 | return locker |
|
93 | return locker | |
96 |
if host != |
|
94 | if host != lock._host: | |
97 | return locker |
|
95 | return locker | |
98 | try: |
|
96 | try: | |
99 | pid = int(pid) |
|
97 | pid = int(pid) |
@@ -281,7 +281,7 b' def externalpatch(patcher, args, patchna' | |||||
281 | def internalpatch(patchname, ui, strip, cwd, files): |
|
281 | def internalpatch(patchname, ui, strip, cwd, files): | |
282 | """use builtin patch to apply <patchname> to the working directory. |
|
282 | """use builtin patch to apply <patchname> to the working directory. | |
283 | returns whether patch was applied with fuzz factor.""" |
|
283 | returns whether patch was applied with fuzz factor.""" | |
284 | fp = file(patchname) |
|
284 | fp = file(patchname, 'rb') | |
285 | if cwd: |
|
285 | if cwd: | |
286 | curdir = os.getcwd() |
|
286 | curdir = os.getcwd() | |
287 | os.chdir(cwd) |
|
287 | os.chdir(cwd) | |
@@ -303,7 +303,7 b' class patchfile:' | |||||
303 | self.fname = fname |
|
303 | self.fname = fname | |
304 | self.ui = ui |
|
304 | self.ui = ui | |
305 | try: |
|
305 | try: | |
306 | fp = file(fname, 'r') |
|
306 | fp = file(fname, 'rb') | |
307 | self.lines = fp.readlines() |
|
307 | self.lines = fp.readlines() | |
308 | self.exists = True |
|
308 | self.exists = True | |
309 | except IOError: |
|
309 | except IOError: | |
@@ -383,7 +383,7 b' class patchfile:' | |||||
383 | try: os.unlink(fname) |
|
383 | try: os.unlink(fname) | |
384 | except: |
|
384 | except: | |
385 | pass |
|
385 | pass | |
386 | fp = file(fname, 'w') |
|
386 | fp = file(fname, 'wb') | |
387 | base = os.path.basename(self.fname) |
|
387 | base = os.path.basename(self.fname) | |
388 | fp.write("--- %s\n+++ %s\n" % (base, base)) |
|
388 | fp.write("--- %s\n+++ %s\n" % (base, base)) | |
389 | for x in self.rej: |
|
389 | for x in self.rej: | |
@@ -402,7 +402,7 b' class patchfile:' | |||||
402 | if st.st_nlink > 1: |
|
402 | if st.st_nlink > 1: | |
403 | os.unlink(dest) |
|
403 | os.unlink(dest) | |
404 | except: pass |
|
404 | except: pass | |
405 | fp = file(dest, 'w') |
|
405 | fp = file(dest, 'wb') | |
406 | if st: |
|
406 | if st: | |
407 | os.chmod(dest, st.st_mode) |
|
407 | os.chmod(dest, st.st_mode) | |
408 | fp.writelines(self.lines) |
|
408 | fp.writelines(self.lines) | |
@@ -777,13 +777,13 b' def selectfile(afile_orig, bfile_orig, h' | |||||
777 | if count == 0: |
|
777 | if count == 0: | |
778 | return path.rstrip() |
|
778 | return path.rstrip() | |
779 | while count > 0: |
|
779 | while count > 0: | |
780 |
i = path.find( |
|
780 | i = path.find('/', i) | |
781 | if i == -1: |
|
781 | if i == -1: | |
782 | raise PatchError(_("unable to strip away %d dirs from %s") % |
|
782 | raise PatchError(_("unable to strip away %d dirs from %s") % | |
783 | (count, path)) |
|
783 | (count, path)) | |
784 | i += 1 |
|
784 | i += 1 | |
785 | # consume '//' in the path |
|
785 | # consume '//' in the path | |
786 |
while i < pathlen - 1 and path[i] == |
|
786 | while i < pathlen - 1 and path[i] == '/': | |
787 | i += 1 |
|
787 | i += 1 | |
788 | count -= 1 |
|
788 | count -= 1 | |
789 | return path[i:].rstrip() |
|
789 | return path[i:].rstrip() |
@@ -616,7 +616,7 b' def rename(src, dst):' | |||||
616 | """forcibly rename a file""" |
|
616 | """forcibly rename a file""" | |
617 | try: |
|
617 | try: | |
618 | os.rename(src, dst) |
|
618 | os.rename(src, dst) | |
619 | except OSError, err: |
|
619 | except OSError, err: # FIXME: check err (EEXIST ?) | |
620 | # on windows, rename to existing file is not allowed, so we |
|
620 | # on windows, rename to existing file is not allowed, so we | |
621 | # must delete destination first. but if file is open, unlink |
|
621 | # must delete destination first. but if file is open, unlink | |
622 | # schedules it for delete but does not delete it. rename |
|
622 | # schedules it for delete but does not delete it. rename | |
@@ -1303,7 +1303,11 b' class opener(object):' | |||||
1303 | os.makedirs(dirname) |
|
1303 | os.makedirs(dirname) | |
1304 |
|
1304 | |||
1305 | if self._can_symlink: |
|
1305 | if self._can_symlink: | |
|
1306 | try: | |||
1306 | os.symlink(src, linkname) |
|
1307 | os.symlink(src, linkname) | |
|
1308 | except OSError, err: | |||
|
1309 | raise OSError(err.errno, _('could not symlink to %r: %s') % | |||
|
1310 | (src, err.strerror), linkname) | |||
1307 | else: |
|
1311 | else: | |
1308 | f = self(self, dst, "w") |
|
1312 | f = self(self, dst, "w") | |
1309 | f.write(src) |
|
1313 | f.write(src) |
@@ -209,9 +209,9 b' class posixfile_nt(object):' | |||||
209 |
|
209 | |||
210 | def __init__(self, name, mode='rb'): |
|
210 | def __init__(self, name, mode='rb'): | |
211 | access = 0 |
|
211 | access = 0 | |
212 |
if 'r' in mode |
|
212 | if 'r' in mode: | |
213 | access |= win32file.GENERIC_READ |
|
213 | access |= win32file.GENERIC_READ | |
214 | if 'w' in mode or 'a' in mode: |
|
214 | if 'w' in mode or 'a' in mode or '+' in mode: | |
215 | access |= win32file.GENERIC_WRITE |
|
215 | access |= win32file.GENERIC_WRITE | |
216 | if 'r' in mode: |
|
216 | if 'r' in mode: | |
217 | creation = win32file.OPEN_EXISTING |
|
217 | creation = win32file.OPEN_EXISTING |
@@ -64,3 +64,18 b' emptypath=`pwd`/empty.py' | |||||
64 | echo '[extensions]' > $HGRCPATH |
|
64 | echo '[extensions]' > $HGRCPATH | |
65 | echo "empty = $emptypath" >> $HGRCPATH |
|
65 | echo "empty = $emptypath" >> $HGRCPATH | |
66 | hg help empty |
|
66 | hg help empty | |
|
67 | ||||
|
68 | cat > debugextension.py <<EOF | |||
|
69 | '''only debugcommands | |||
|
70 | ''' | |||
|
71 | def debugfoobar(ui, repo, *args, **opts): | |||
|
72 | "yet another debug command" | |||
|
73 | pass | |||
|
74 | ||||
|
75 | cmdtable = {"debugfoobar": (debugfoobar, (), "hg debugfoobar")} | |||
|
76 | EOF | |||
|
77 | debugpath=`pwd`/debugextension.py | |||
|
78 | echo '[extensions]' > $HGRCPATH | |||
|
79 | echo "debugextension = $debugpath" >> $HGRCPATH | |||
|
80 | hg help debugextension | |||
|
81 | hg --debug help debugextension |
@@ -22,3 +22,30 b' Foo' | |||||
22 | empty extension - empty cmdtable |
|
22 | empty extension - empty cmdtable | |
23 |
|
23 | |||
24 | no commands defined |
|
24 | no commands defined | |
|
25 | debugextension extension - only debugcommands | |||
|
26 | ||||
|
27 | no commands defined | |||
|
28 | debugextension extension - only debugcommands | |||
|
29 | ||||
|
30 | list of commands: | |||
|
31 | ||||
|
32 | debugfoobar: | |||
|
33 | yet another debug command | |||
|
34 | ||||
|
35 | global options: | |||
|
36 | -R --repository repository root directory or symbolic path name | |||
|
37 | --cwd change working directory | |||
|
38 | -y --noninteractive do not prompt, assume 'yes' for any required answers | |||
|
39 | -q --quiet suppress output | |||
|
40 | -v --verbose enable additional output | |||
|
41 | --config set/override config option | |||
|
42 | --debug enable debugging output | |||
|
43 | --debugger start debugger | |||
|
44 | --encoding set the charset encoding (default: ascii) | |||
|
45 | --encodingmode set the charset encoding mode (default: strict) | |||
|
46 | --lsprof print improved command execution profile | |||
|
47 | --traceback print traceback on exception | |||
|
48 | --time time how long the command takes | |||
|
49 | --profile print command execution profile | |||
|
50 | --version output version information and exit | |||
|
51 | -h --help display help and exit |
@@ -29,14 +29,18 b" newline'" | |||||
29 | hg tag -l 'xx:xx' |
|
29 | hg tag -l 'xx:xx' | |
30 |
|
30 | |||
31 | echo % issue 601 |
|
31 | echo % issue 601 | |
32 | mv .hg/localtags .hg/ltags |
|
32 | python << EOF | |
33 | head -1 .hg/ltags | tr -d '\n' > .hg/localtags |
|
33 | f = file('.hg/localtags'); last = f.readlines()[-1][:-1]; f.close() | |
|
34 | f = file('.hg/localtags', 'w'); f.write(last); f.close() | |||
|
35 | EOF | |||
34 | cat .hg/localtags |
|
36 | cat .hg/localtags | |
35 | hg tag -l localnewline |
|
37 | hg tag -l localnewline | |
36 | cat .hg/localtags |
|
38 | cat .hg/localtags | |
37 |
|
39 | |||
38 | mv .hgtags hgtags |
|
40 | python << EOF | |
39 | head -1 hgtags | tr -d '\n' > .hgtags |
|
41 | f = file('.hgtags'); last = f.readlines()[-1][:-1]; f.close() | |
|
42 | f = file('.hgtags', 'w'); f.write(last); f.close() | |||
|
43 | EOF | |||
40 | hg ci -d '1000000 0' -m'broken manual edit of .hgtags' |
|
44 | hg ci -d '1000000 0' -m'broken manual edit of .hgtags' | |
41 | cat .hgtags |
|
45 | cat .hgtags | |
42 | hg tag -d '1000000 0' newline |
|
46 | hg tag -d '1000000 0' newline |
General Comments 0
You need to be logged in to leave comments.
Login now