From 6ceb4492a1a8da5dc62bce841ae51ca09206c315 2015-01-30 21:57:37
From: Min RK <benjaminrk@gmail.com>
Date: 2015-01-30 21:57:37
Subject: [PATCH] python3 syntax fixes on various scripts

revealed by running tools/build_relese

---

diff --git a/docs/gh-pages.py b/docs/gh-pages.py
index b32cf5d..2db0d31 100755
--- a/docs/gh-pages.py
+++ b/docs/gh-pages.py
@@ -14,6 +14,8 @@ something like 'current' as a stable URL for the most current version of the """
 #-----------------------------------------------------------------------------
 # Imports
 #-----------------------------------------------------------------------------
+from __future__ import print_function
+
 import os
 import shutil
 import sys
@@ -121,13 +123,13 @@ if __name__ == '__main__':
 
         sh('git add -A %s' % tag)
         sh('git commit -m"Updated doc release: %s"' % tag)
-        print
-        print 'Most recent 3 commits:'
+        print()
+        print('Most recent 3 commits:')
         sys.stdout.flush()
         sh('git --no-pager log --oneline HEAD~3..')
     finally:
         cd(startdir)
 
-    print
-    print 'Now verify the build in: %r' % dest
-    print "If everything looks good, 'git push'"
+    print()
+    print('Now verify the build in: %r' % dest)
+    print("If everything looks good, 'git push'")
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 8de7e35..8a7b1e1 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -207,10 +207,10 @@ latex_font_size = '11pt'
 
 latex_documents = [
     ('index', 'ipython.tex', 'IPython Documentation',
-     ur"""The IPython Development Team""", 'manual', True),
+     u"""The IPython Development Team""", 'manual', True),
     ('parallel/winhpc_index', 'winhpc_whitepaper.tex',
      'Using IPython on Windows HPC Server 2008',
-     ur"Brian E. Granger", 'manual', True)
+     u"Brian E. Granger", 'manual', True)
 ]
 
 # The name of an image file (relative to this directory) to place at the top of
diff --git a/examples/Parallel Computing/interengine/bintree_script.py b/examples/Parallel Computing/interengine/bintree_script.py
index f7caf67..45dfce4 100755
--- a/examples/Parallel Computing/interengine/bintree_script.py
+++ b/examples/Parallel Computing/interengine/bintree_script.py
@@ -16,6 +16,7 @@ impose the aggregation function to be commutative and distributive. It might
 not be the case if you implement the naive gather / reduce / broadcast strategy 
 where you can reorder the partial data before performing the reduce.
 """
+from __future__ import print_function
 
 from IPython.parallel import Client, Reference
 
@@ -31,12 +32,12 @@ root = rc[root_id]
 view = rc[:]
 
 # run bintree.py script defining bintree functions, etc.
-execfile('bintree.py')
+exec(compile(open('bintree.py').read(), 'bintree.py', 'exec'))
 
 # generate binary tree of parents
 btree = bintree(ids)
 
-print "setting up binary tree interconnect:"
+print("setting up binary tree interconnect:")
 print_bintree(btree)
 
 view.run('bintree.py')
@@ -73,15 +74,15 @@ view['add'] = add
 view['mul'] = mul
 
 # scatter some data
-data = range(1000)
+data = list(range(1000))
 view.scatter('data', data)
 
 # perform cumulative sum via allreduce
 view.execute("data_sum = com.allreduce(add, data, flat=False)")
-print "allreduce sum of data on all engines:", view['data_sum']
+print("allreduce sum of data on all engines:", view['data_sum'])
 
 # perform cumulative sum *without* final broadcast
 # when not broadcasting with allreduce, the final result resides on the root node:
 view.execute("ids_sum = com.reduce(add, id, flat=True)")
-print "reduce sum of engine ids (not broadcast):", root['ids_sum']
-print "partial result on each engine:", view['ids_sum']
+print("reduce sum of engine ids (not broadcast):", root['ids_sum'])
+print("partial result on each engine:", view['ids_sum'])
diff --git a/examples/Parallel Computing/wave2D/parallelwave-mpi.py b/examples/Parallel Computing/wave2D/parallelwave-mpi.py
index 0f7d47a..18f805a 100755
--- a/examples/Parallel Computing/wave2D/parallelwave-mpi.py
+++ b/examples/Parallel Computing/wave2D/parallelwave-mpi.py
@@ -111,7 +111,7 @@ if __name__ == '__main__':
     assert partition[0]*partition[1] == num_procs, "can't map partition %s to %i engines"%(partition, num_procs)
 
     view = rc[:]
-    print "Running %s system on %s processes until %f"%(grid, partition, tstop)
+    print("Running %s system on %s processes until %f" % (grid, partition, tstop))
 
     # functions defining initial/boundary/source conditions
     def I(x,y):
@@ -170,7 +170,7 @@ if __name__ == '__main__':
         else:
             norm = -1
         t1 = time.time()
-        print 'scalar inner-version, Wtime=%g, norm=%g'%(t1-t0, norm)
+        print('scalar inner-version, Wtime=%g, norm=%g' % (t1-t0, norm))
 
     impl['inner'] = 'vectorized'
     # setup new solvers
@@ -188,7 +188,7 @@ if __name__ == '__main__':
     else:
         norm = -1
     t1 = time.time()
-    print 'vector inner-version, Wtime=%g, norm=%g'%(t1-t0, norm)
+    print('vector inner-version, Wtime=%g, norm=%g' % (t1-t0, norm))
 
     # if ns.save is True, then u_hist stores the history of u as a list
     # If the partion scheme is Nx1, then u can be reconstructed via 'gather':
diff --git a/tools/build_release b/tools/build_release
index c19870e..1c5246b 100755
--- a/tools/build_release
+++ b/tools/build_release
@@ -12,7 +12,7 @@ ipdir = get_ipdir()
 cd(ipdir)
 
 # Load release info
-execfile(pjoin('IPython', 'core', 'release.py'))
+execfile(pjoin('IPython', 'core', 'release.py'), globals())
 
 # Check that everything compiles
 compile_tree()
diff --git a/tools/check_sources.py b/tools/check_sources.py
index 5b64c6e..dc0761e 100755
--- a/tools/check_sources.py
+++ b/tools/check_sources.py
@@ -8,6 +8,7 @@ Usage:
 It prints summaries and if chosen, line-by-line info of where \\t or \\r
 characters can be found in our source tree.
 """
+from __future__ import print_function
 
 # Config
 # If true, all lines that have tabs are printed, with line number
@@ -33,22 +34,22 @@ for f in path('..').walkfiles('*.py'):
         rets.append(f)
         
     if errs:
-        print "%3s" % errs, f
+        print("%3s" % errs, f)
 
     if 't' in errs and full_report_tabs:
         for ln,line in enumerate(f.lines()):
             if '\t' in line:
-                print 'TAB:',ln,':',line,
+                print('TAB:',ln,':',line, end=' ')
 
     if 'r' in errs and full_report_rets:
         for ln,line in enumerate(open(f.abspath(),'rb')):
             if '\r' in line:
-                print 'RET:',ln,':',line,
+                print('RET:',ln,':',line, end=' ')
 
 # Summary at the end, to call cleanup tools if necessary
 if tabs:
-    print 'Hard tabs found. These can be cleaned with untabify:'
-    for f in tabs: print f,
+    print('Hard tabs found. These can be cleaned with untabify:')
+    for f in tabs: print(f, end=' ')
 if rets:
-    print 'Carriage returns (\\r) found in:'
-    for f in rets: print f,
+    print('Carriage returns (\\r) found in:')
+    for f in rets: print(f, end=' ')