##// END OF EJS Templates
pager: recreate stdout to make it line-buffered...
pager: recreate stdout to make it line-buffered We want to see partial command results as soon as possible. But the buffering mode of stdout (= pager's stdin) was set to fully-buffered because it isn't associated with a tty. So, this patch recreates new stdout object to force its buffering mode. Because two file objects are associated with the same stdout fd and their destructors will call close(), one of them must be closed carefully. Python expects that the stdout fd never be closed even after sys.stdout.close() [1], but newstdout has no such hack. So this patch calls newstdout.close() immediately before duplicating the original stdout fd to sys.stdout. operation sys.stdout newstdout fd --------------------- ---------- --------- -------- newstdout.close() open closed closed os.dup2(stdoutfd, ..) open closed open del sys.stdout closed closed open [1] [1]: https://hg.python.org/cpython/file/v2.7.10/Python/sysmodule.c#l1391

File last commit:

r19008:9d33d6e0 default
r26454:62c5e937 default
Show More
wirestore.py
40 lines | 1.3 KiB | text/x-python | PythonLexer
# Copyright 2010-2011 Fog Creek Software
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''largefile store working over Mercurial's wire protocol'''
import lfutil
import remotestore
class wirestore(remotestore.remotestore):
def __init__(self, ui, repo, remote):
cap = remote.capable('largefiles')
if not cap:
raise lfutil.storeprotonotcapable([])
storetypes = cap.split(',')
if 'serve' not in storetypes:
raise lfutil.storeprotonotcapable(storetypes)
self.remote = remote
super(wirestore, self).__init__(ui, repo, remote.url())
def _put(self, hash, fd):
return self.remote.putlfile(hash, fd)
def _get(self, hash):
return self.remote.getlfile(hash)
def _stat(self, hashes):
'''For each hash, return 0 if it is available, other values if not.
It is usually 2 if the largefile is missing, but might be 1 the server
has a corrupted copy.'''
batch = self.remote.batch()
futures = {}
for hash in hashes:
futures[hash] = batch.statlfile(hash)
batch.submit()
retval = {}
for hash in hashes:
retval[hash] = futures[hash].value
return retval