summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorKaleb S. KEITHLEY <kkeithle@redhat.com>2018-04-13 09:13:16 -0400
committerKaleb KEITHLEY <kkeithle@redhat.com>2018-05-02 11:28:46 +0000
commit4e7b423d3c3137c3f83b71b36279e1a544154833 (patch)
treee59dc57aa319a5e145161a0e32fba9fc74773e91
parent9da91172538a2a95fba609c93e199db159fd1938 (diff)
core/various: python3 compat, prepare for python2 -> python3
see https://review.gluster.org/#/c/19788/ use print fn from __future__ Change-Id: If5075d8d9ca9641058fbc71df8a52aa35804cda4 updates: #411 Signed-off-by: Kaleb S. KEITHLEY <kkeithle@redhat.com>
-rwxr-xr-xapi/examples/getvolfile.py7
-rw-r--r--events/tools/eventsdash.py13
-rwxr-xr-xextras/create_new_xlator/generate_xlator.py58
-rwxr-xr-xextras/failed-tests.py25
-rwxr-xr-xextras/git-branch-diff.py47
-rw-r--r--extras/gnfs-loganalyse.py5
-rwxr-xr-xextras/prot_filter.py21
-rwxr-xr-xextras/quota/xattr_analysis.py9
-rwxr-xr-xextras/rebalance.py37
-rw-r--r--extras/volfilter.py15
-rw-r--r--geo-replication/syncdaemon/__codecheck.py3
-rw-r--r--geo-replication/syncdaemon/argsupgrade.py7
-rw-r--r--geo-replication/syncdaemon/gsyncdstatus.py5
-rw-r--r--geo-replication/syncdaemon/subcmds.py7
-rw-r--r--libglusterfs/src/gen-defaults.py17
-rwxr-xr-xtests/bugs/distribute/overlap.py3
-rwxr-xr-xtests/bugs/nfs/socket-as-fifo.py3
-rwxr-xr-xtests/features/ipctest.py5
-rwxr-xr-xtests/utils/getfattr.py19
-rwxr-xr-xtests/utils/gfid-access.py7
-rwxr-xr-xtests/utils/pidof.py11
-rwxr-xr-xxlators/experimental/fdl/src/gen_dumper.py11
-rwxr-xr-xxlators/experimental/fdl/src/gen_fdl.py27
-rwxr-xr-xxlators/experimental/fdl/src/gen_recon.py11
-rwxr-xr-xxlators/experimental/jbr-client/src/gen-fops.py13
-rwxr-xr-xxlators/experimental/jbr-server/src/gen-fops.py39
-rw-r--r--xlators/features/changelog/lib/examples/python/changes.py11
-rw-r--r--xlators/features/cloudsync/src/cloudsync-fops-c.py17
-rw-r--r--xlators/features/cloudsync/src/cloudsync-fops-h.py9
-rw-r--r--xlators/features/glupy/examples/debug-trace.py230
-rw-r--r--xlators/features/glupy/examples/helloworld.py6
-rw-r--r--xlators/features/glupy/examples/negative.py20
32 files changed, 377 insertions, 341 deletions
diff --git a/api/examples/getvolfile.py b/api/examples/getvolfile.py
index 961a89c9808..f1d5761d6b1 100755
--- a/api/examples/getvolfile.py
+++ b/api/examples/getvolfile.py
@@ -1,5 +1,6 @@
#!/usr/bin/python2
+from __future__ import print_function
import ctypes
import ctypes.util
@@ -35,10 +36,10 @@ if __name__ == "__main__":
try:
res = apply(get_volfile, sys.argv[1:3])
except:
- print "fetching volfile failed (volume not started?)"
+ print("fetching volfile failed (volume not started?)")
try:
for line in res.split('\n'):
- print line
+ print(line)
except:
- print "bad return value %s" % res
+ print("bad return value %s" % res)
diff --git a/events/tools/eventsdash.py b/events/tools/eventsdash.py
index dc03a61983f..4346c834ee4 100644
--- a/events/tools/eventsdash.py
+++ b/events/tools/eventsdash.py
@@ -10,6 +10,7 @@
# cases as published by the Free Software Foundation.
#
+from __future__ import print_function
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import logging
from datetime import datetime
@@ -41,11 +42,11 @@ def listen():
for k, v in data.get("message", {}).items():
message.append("{0}={1}".format(k, v))
- print ("{0:20s} {1:20s} {2:36} {3}".format(
+ print(("{0:20s} {1:20s} {2:36} {3}".format(
human_time(data.get("ts")),
data.get("event"),
data.get("nodeid"),
- " ".join(message)))
+ " ".join(message))))
return "OK"
@@ -58,12 +59,12 @@ def main():
action="store_true")
args = parser.parse_args()
- print ("{0:20s} {1:20s} {2:36} {3}".format(
+ print(("{0:20s} {1:20s} {2:36} {3}".format(
"TIMESTAMP", "EVENT", "NODE ID", "MESSAGE"
- ))
- print ("{0:20s} {1:20s} {2:36} {3}".format(
+ )))
+ print(("{0:20s} {1:20s} {2:36} {3}".format(
"-"*20, "-"*20, "-"*36, "-"*20
- ))
+ )))
if args.debug:
app.debug = True
diff --git a/extras/create_new_xlator/generate_xlator.py b/extras/create_new_xlator/generate_xlator.py
index c51c3a41a22..281afc2e0df 100755
--- a/extras/create_new_xlator/generate_xlator.py
+++ b/extras/create_new_xlator/generate_xlator.py
@@ -1,4 +1,6 @@
#!/usr/bin/python2
+
+from __future__ import print_function
import os
import re
import sys
@@ -74,42 +76,42 @@ def generate(tmpl, name, table):
def gen_xlator():
xl = open(src_dir_path+"/"+xl_name+".c", 'w+')
- print >> xl, COPYRIGHT
- print >> xl, fragments["INCLUDE_IN_SRC_FILE"].replace("@XL_NAME@",
- xl_name)
+ print(COPYRIGHT, file=xl)
+ print(fragments["INCLUDE_IN_SRC_FILE"].replace("@XL_NAME@",
+ xl_name), file=xl)
#Generate cbks and fops
for fop in ops:
- print >> xl, generate(fragments["CBK_TEMPLATE"], fop, ops)
- print >> xl, generate(fragments["FOP_TEMPLATE"], fop, ops)
+ print(generate(fragments["CBK_TEMPLATE"], fop, ops), file=xl)
+ print(generate(fragments["FOP_TEMPLATE"], fop, ops), file=xl)
for cbk in xlator_cbks:
- print >> xl, generate(fragments["FUNC_TEMPLATE"], cbk,
- xlator_cbks)
+ print(generate(fragments["FUNC_TEMPLATE"], cbk,
+ xlator_cbks), file=xl)
for dops in xlator_dumpops:
- print >> xl, generate(fragments["FUNC_TEMPLATE"], dops,
- xlator_dumpops)
+ print(generate(fragments["FUNC_TEMPLATE"], dops,
+ xlator_dumpops), file=xl)
- print >> xl, fragments["XLATOR_METHODS"]
+ print(fragments["XLATOR_METHODS"], file=xl)
#Generate fop table
- print >> xl, "struct xlator_fops fops = {"
+ print("struct xlator_fops fops = {", file=xl)
for fop in ops:
- print >> xl, " .{0:20} = {1}_{2},".format(fop, fop_prefix, fop)
- print >> xl, "};"
+ print(" .{0:20} = {1}_{2},".format(fop, fop_prefix, fop), file=xl)
+ print("};", file=xl)
#Generate xlator_cbks table
- print >> xl, "struct xlator_cbks cbks = {"
+ print("struct xlator_cbks cbks = {", file=xl)
for cbk in xlator_cbks:
- print >> xl, " .{0:20} = {1}_{2},".format(cbk, fop_prefix, cbk)
- print >> xl, "};"
+ print(" .{0:20} = {1}_{2},".format(cbk, fop_prefix, cbk), file=xl)
+ print("};", file=xl)
#Generate xlator_dumpops table
- print >> xl, "struct xlator_dumpops dumpops = {"
+ print("struct xlator_dumpops dumpops = {", file=xl)
for dops in xlator_dumpops:
- print >> xl, " .{0:20} = {1}_{2},".format(dops, fop_prefix, dops)
- print >> xl, "};"
+ print(" .{0:20} = {1}_{2},".format(dops, fop_prefix, dops), file=xl)
+ print("};", file=xl)
xl.close()
@@ -122,38 +124,38 @@ def create_dir_struct():
def gen_header_files():
upname = xl_name_no_hyphen.upper()
h = open(src_dir_path+"/"+xl_name+".h", 'w+')
- print >> h, COPYRIGHT
+ print(COPYRIGHT, file=h)
txt = fragments["HEADER_FMT"].replace("@HFL_NAME@", upname)
txt2 = fragments["INCLUDE_IN_HEADER_FILE"].replace("@XL_NAME@", xl_name)
txt = txt.replace("@INCLUDE_SECT@",txt2)
- print >> h, txt
+ print(txt, file=h)
h.close()
h = open(src_dir_path+"/"+xl_name+"-mem-types.h", 'w+')
- print >> h, COPYRIGHT
+ print(COPYRIGHT, file=h)
txt = fragments["HEADER_FMT"].replace("@HFL_NAME@", upname+"_MEM_TYPES")
txt = txt.replace("@INCLUDE_SECT@", '#include "mem-types.h"')
- print >> h, txt
+ print(txt, file=h)
h.close()
h = open(src_dir_path+"/"+xl_name+"-messages.h", 'w+')
- print >> h, COPYRIGHT
+ print(COPYRIGHT, file=h)
txt = fragments["HEADER_FMT"].replace("@HFL_NAME@", upname+"_MESSAGES")
txt = txt.replace("@INCLUDE_SECT@", '')
- print >> h, txt
+ print(txt, file=h)
h.close()
def gen_makefiles():
m = open(dir_path+"/Makefile.am", 'w+')
- print >> m, "SUBDIRS = src\n\nCLEANFILES ="
+ print("SUBDIRS = src\n\nCLEANFILES =", file=m)
m.close()
m = open(src_dir_path+"/Makefile.am", 'w+')
txt = MAKEFILE_FMT.replace("@XL_NAME@", xl_name)
txt = txt.replace("@XL_NAME_NO_HYPHEN@", xl_name_no_hyphen)
txt = txt.replace("@XL_TYPE@",xlator_type)
- print >> m, txt
+ print(txt, file=m)
m.close()
def get_copyright ():
@@ -183,7 +185,7 @@ def load_fragments ():
if __name__ == '__main__':
if len(sys.argv) < 3:
- print "USAGE: ./gen_xlator <XLATOR_DIR> <XLATOR_NAME> <FOP_PREFIX>"
+ print("USAGE: ./gen_xlator <XLATOR_DIR> <XLATOR_NAME> <FOP_PREFIX>")
sys.exit(0)
xl_name = sys.argv[2]
diff --git a/extras/failed-tests.py b/extras/failed-tests.py
index 8391d229b45..1ef1a954f4f 100755
--- a/extras/failed-tests.py
+++ b/extras/failed-tests.py
@@ -1,5 +1,6 @@
#!/usr/bin/python2
+from __future__ import print_function
import blessings
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
@@ -25,7 +26,7 @@ def process_failure(url, node):
if t.find("Result: FAIL") != -1:
for t2 in accum:
if VERBOSE:
- print t2.encode('utf-8')
+ print(t2.encode('utf-8'))
if t2.find("Wstat") != -1:
test_case = re.search('\./tests/.*\.t', t2)
if test_case:
@@ -69,26 +70,26 @@ def print_summary(failed_builds, total_builds, html=False):
template = 0
if html:
template = 1
- print render(
+ print(render(
count[template],
{'failed': failed_builds, 'total': total_builds}
- )
+ ))
for k, v in summary.iteritems():
if k == 'core':
- print ''.join([TERM.red, "Found cores:", TERM.normal])
+ print(''.join([TERM.red, "Found cores:", TERM.normal]))
for comp, link in zip(v[::2], v[1::2]):
- print render(component[template], {'comp': comp})
- print render(
+ print(render(component[template], {'comp': comp}))
+ print(render(
regression_link[template],
{'link': link[0], 'node': link[1]}
- )
+ ))
else:
- print render(failure_count[template], {'test': k, 'count': len(v)})
+ print(render(failure_count[template], {'test': k, 'count': len(v)}))
for link in v:
- print render(
+ print(render(
regression_link[template],
{'link': link[0], 'node': link[1]}
- )
+ ))
def get_summary(cut_off_date, reg_link):
@@ -114,11 +115,11 @@ def get_summary(cut_off_date, reg_link):
success_count += 1
continue
if VERBOSE:
- print ''.join([
+ print(''.join([
TERM.red,
'FAILURE on {0}'.format(build['url']),
TERM.normal
- ])
+ ]))
url = ''.join([build['url'], 'consoleText'])
failure_count += 1
process_failure(url, build['builtOn'])
diff --git a/extras/git-branch-diff.py b/extras/git-branch-diff.py
index c9e9dd0da06..99cc707b837 100755
--- a/extras/git-branch-diff.py
+++ b/extras/git-branch-diff.py
@@ -75,6 +75,7 @@
Prasanna Kumar Kalever <prasanna.kalever@redhat.com>
"""
+from __future__ import print_function
import os
import sys
import argparse
@@ -118,11 +119,11 @@ class GitBranchDiff:
status_tbr, op = commands.getstatusoutput('git log ' +
self.t_pattern)
if status_sbr != 0:
- print "Error: --source=" + self.s_pattern + " doesn't exit\n"
+ print("Error: --source=" + self.s_pattern + " doesn't exit\n")
self.parser.print_help()
exit(status_sbr)
elif status_tbr != 0:
- print "Error: --target=" + self.t_pattern + " doesn't exit\n"
+ print("Error: --target=" + self.t_pattern + " doesn't exit\n")
self.parser.print_help()
exit(status_tbr)
@@ -137,8 +138,8 @@ class GitBranchDiff:
cmd4 = 'git log ' + self.s_pattern + ' --author=' + ide
c_list = subprocess.check_output(cmd4, shell = True)
if len(c_list) is 0:
- print "Error: --author=%s doesn't exit" %self.g_author
- print "see '%s --help'" %__file__
+ print("Error: --author=%s doesn't exit" %self.g_author)
+ print("see '%s --help'" %__file__)
exit(1)
if len(ide_list) > 1:
self.g_author = "\|".join(ide_list)
@@ -150,16 +151,16 @@ class GitBranchDiff:
return True
except requests.Timeout as err:
" request timed out"
- print "Warning: failed to get list of open review commits on " \
+ print("Warning: failed to get list of open review commits on " \
"gerrit.\n" \
"hint: Request timed out! gerrit server could possibly " \
- "slow ...\n"
+ "slow ...\n")
return False
except requests.RequestException as err:
" handle other errors"
- print "Warning: failed to get list of open review commits on " \
+ print("Warning: failed to get list of open review commits on " \
"gerrit\n" \
- "hint: check with internet connection ...\n"
+ "hint: check with internet connection ...\n")
return False
def parse_cmd_args (self):
@@ -212,18 +213,18 @@ class GitBranchDiff:
def print_output (self):
" display the result list"
- print "\n------------------------------------------------------------\n"
- print self.tick + " Successfully Backported changes:"
- print ' {' + 'from: ' + self.s_pattern + \
- ' to: '+ self.t_pattern + '}\n'
+ print("\n------------------------------------------------------------\n")
+ print(self.tick + " Successfully Backported changes:")
+ print(' {' + 'from: ' + self.s_pattern + \
+ ' to: '+ self.t_pattern + '}\n')
for key, value in self.s_dict.iteritems():
if value in self.t_dict.itervalues():
- print "[%s%s%s] %s" %(self.yello_set,
+ print("[%s%s%s] %s" %(self.yello_set,
key,
self.color_unset,
- value)
- print "\n------------------------------------------------------------\n"
- print self.cross + " Missing patches in " + self.t_pattern + ':\n'
+ value))
+ print("\n------------------------------------------------------------\n")
+ print(self.cross + " Missing patches in " + self.t_pattern + ':\n')
if self.connected_to_gerrit():
cmd3 = "git review -r origin -l"
review_list = subprocess.check_output(cmd3, shell = True).split('\n')
@@ -233,18 +234,18 @@ class GitBranchDiff:
for key, value in self.s_dict.iteritems():
if value not in self.t_dict.itervalues():
if any(value in s for s in review_list):
- print "[%s%s%s] %s %s(under review)%s" %(self.yello_set,
+ print("[%s%s%s] %s %s(under review)%s" %(self.yello_set,
key,
self.color_unset,
value,
self.green_set,
- self.color_unset)
+ self.color_unset))
else:
- print "[%s%s%s] %s" %(self.yello_set,
+ print("[%s%s%s] %s" %(self.yello_set,
key,
self.color_unset,
- value)
- print "\n------------------------------------------------------------\n"
+ value))
+ print("\n------------------------------------------------------------\n")
def main (self):
self.check_pattern_exist()
@@ -262,8 +263,8 @@ class GitBranchDiff:
t_list = subprocess.check_output(cmd2, shell = True)
if len(t_list) is 0:
- print "No commits in the target: %s" %self.t_pattern
- print "see '%s --help'" %__file__
+ print("No commits in the target: %s" %self.t_pattern)
+ print("see '%s --help'" %__file__)
exit()
else:
t_list = t_list.split('\n')
diff --git a/extras/gnfs-loganalyse.py b/extras/gnfs-loganalyse.py
index 71e79b6be4e..6341d007188 100644
--- a/extras/gnfs-loganalyse.py
+++ b/extras/gnfs-loganalyse.py
@@ -10,6 +10,7 @@
"""
+from __future__ import print_function
import os
import string
import sys
@@ -72,7 +73,7 @@ class NFSRequest:
self.replygfid = tokens [gfididx + 1].strip(",")
def dump (self):
- print "ReqLine: " + str(self.calllinecount) + " TimeStamp: " + self.timestamp + ", XID: " + self.xid + " " + self.op + " ARGS: " + self.opdata + " RepLine: " + str(self.replylinecount) + " " + self.replydata
+ print("ReqLine: " + str(self.calllinecount) + " TimeStamp: " + self.timestamp + ", XID: " + self.xid + " " + self.op + " ARGS: " + self.opdata + " RepLine: " + str(self.replylinecount) + " " + self.replydata)
class NFSLogAnalyzer:
@@ -149,7 +150,7 @@ class NFSLogAnalyzer:
return
rcount = len (self.xid_request_map.keys ())
orphancount = len (self.orphan_replies.keys ())
- print "Requests: " + str(rcount) + ", Orphans: " + str(orphancount)
+ print("Requests: " + str(rcount) + ", Orphans: " + str(orphancount))
def dump (self):
self.getStats ()
diff --git a/extras/prot_filter.py b/extras/prot_filter.py
index 290792697a2..0c48fd5b8e1 100755
--- a/extras/prot_filter.py
+++ b/extras/prot_filter.py
@@ -21,13 +21,14 @@
deliberate choice so that it will catch deletions from those sources as well.
"""
-volume_list = [ "jdtest" ]
-
+from __future__ import print_function
import copy
import string
import sys
import types
+volume_list = [ "jdtest" ]
+
class Translator:
def __init__ (self, name):
self.name = name
@@ -86,16 +87,16 @@ def generate (graph, last, stream=sys.stdout):
for sv in last.subvols:
if not sv.dumped:
generate(graph,sv,stream)
- print >> stream, ""
+ print("", file=stream)
sv.dumped = True
- print >> stream, "volume %s" % last.name
- print >> stream, " type %s" % last.xl_type
+ print("volume %s" % last.name, file=stream)
+ print(" type %s" % last.xl_type, file=stream)
for k, v in last.opts.iteritems():
- print >> stream, " option %s %s" % (k, v)
+ print(" option %s %s" % (k, v), file=stream)
if last.subvols:
- print >> stream, " subvolumes %s" % string.join(
- [ sv.name for sv in last.subvols ])
- print >> stream, "end-volume"
+ print(" subvolumes %s" % string.join(
+ [ sv.name for sv in last.subvols ]), file=stream)
+ print("end-volume", file=stream)
def push_filter (graph, old_xl, filt_type, opts={}):
new_type = "-" + filt_type.split("/")[1]
@@ -128,7 +129,7 @@ if __name__ == "__main__":
if graph.has_key(v):
break
else:
- print "No configured volumes found - aborting."
+ print("No configured volumes found - aborting.")
sys.exit(0)
for v in graph.values():
if v.xl_type == "cluster/distribute":
diff --git a/extras/quota/xattr_analysis.py b/extras/quota/xattr_analysis.py
index 512fcd39b88..9a178e058c2 100755
--- a/extras/quota/xattr_analysis.py
+++ b/extras/quota/xattr_analysis.py
@@ -7,6 +7,7 @@
# The script takes only one input LOG _FILE generated from the command,
# find <brick_path> | xargs getfattr -d -m. -e hex > log_gluster_xattr
+from __future__ import print_function
import re
import subprocess
import sys
@@ -28,14 +29,14 @@ def get_quota_xattr_brick():
for xattr in pairs:
k = xattr.split("=")[0]
if re.search("# file:",k):
- print xdict
+ print(xdict)
filename=k
- print "=====" + filename + "======="
+ print("=====" + filename + "=======")
xdict = {}
elif k is "":
pass
else:
- print xattr
+ print(xattr)
v = xattr.split("=")[1]
if re.search("contri",k):
if len(v) == 34:
@@ -64,7 +65,7 @@ def get_quota_xattr_brick():
mismatch_size.append((xdict['contri_size'], xdict['size'], filename))
for values in mismatch_size:
- print values
+ print(values)
if __name__ == '__main__':
diff --git a/extras/rebalance.py b/extras/rebalance.py
index b2ec6a52290..69ce282b39e 100755
--- a/extras/rebalance.py
+++ b/extras/rebalance.py
@@ -1,5 +1,6 @@
#!/usr/bin/python2
+from __future__ import print_function
import atexit
import copy
import optparse
@@ -86,7 +87,7 @@ def get_range (brick):
try:
value = f.readline().rstrip().split('=')[1][2:]
except:
- print "could not get layout for %s (might be OK)" % brick
+ print("could not get layout for %s (might be OK)" % brick)
return None
v_start = int("0x"+value[16:24],16)
v_end = int("0x"+value[24:32],16)
@@ -126,7 +127,7 @@ def normalize (in_bricks):
curr_hash = b.r_end + 1
break
else:
- print "gap found at 0x%08x" % curr_hash
+ print("gap found at 0x%08x" % curr_hash)
sys.exit(1)
return out_bricks + in_bricks, used
@@ -183,7 +184,7 @@ if __name__ == "__main__":
def cleanup_workdir ():
os.chdir(orig_dir)
if options.verbose:
- print "Cleaning up %s" % work_dir
+ print("Cleaning up %s" % work_dir)
for b in bricks:
subprocess.call(["umount",b.path])
shutil.rmtree(work_dir)
@@ -193,7 +194,7 @@ if __name__ == "__main__":
# Mount each brick individually, so we can issue brick-specific calls.
if options.verbose:
- print "Mounting subvolumes..."
+ print("Mounting subvolumes...")
index = 0
volfile_pipe = get_bricks(hostname,volname)
all_xlators, last_xlator = volfilter.load(volfile_pipe)
@@ -201,7 +202,7 @@ if __name__ == "__main__":
if dht_vol.type == "cluster/distribute":
break
else:
- print "no DHT volume found"
+ print("no DHT volume found")
sys.exit(1)
for sv in dht_vol.subvols:
#print "found subvol %s" % sv.name
@@ -210,12 +211,12 @@ if __name__ == "__main__":
mount_brick(lpath,all_xlators,sv)
bricks.append(Brick(lpath,sv.name))
if index == 0:
- print "no bricks"
+ print("no bricks")
sys.exit(1)
# Collect all of the sizes.
if options.verbose:
- print "Collecting information..."
+ print("Collecting information...")
total = 0
for b in bricks:
info = os.statvfs(b.path)
@@ -237,7 +238,7 @@ if __name__ == "__main__":
else:
size = info[2] / blocksper100mb
if size <= 0:
- print "brick %s has invalid size %d" % (b.path, size)
+ print("brick %s has invalid size %d" % (b.path, size))
sys.exit(1)
b.set_size(size)
total += size
@@ -248,12 +249,12 @@ if __name__ == "__main__":
if hash_range is not None:
rs, re = hash_range
if rs > re:
- print "%s has backwards hash range" % b.path
+ print("%s has backwards hash range" % b.path)
sys.exit(1)
b.set_range(hash_range[0],hash_range[1])
if options.verbose:
- print "Calculating new layouts..."
+ print("Calculating new layouts...")
calc_sizes(bricks,total)
bricks, used = normalize(bricks)
@@ -283,25 +284,25 @@ if __name__ == "__main__":
curr_hash += b.good_size
b.r_end = curr_hash - 1
- print "Here are the xattr values for your size-weighted layout:"
+ print("Here are the xattr values for your size-weighted layout:")
for b in bricks:
- print " %s: 0x0000000200000000%08x%08x" % (
- b.sv_name, b.r_start, b.r_end)
+ print(" %s: 0x0000000200000000%08x%08x" % (
+ b.sv_name, b.r_start, b.r_end))
if fix_dir:
if options.verbose:
- print "Fixing layout for %s" % fix_dir
+ print("Fixing layout for %s" % fix_dir)
for b in bricks:
value = "0x0000000200000000%08x%08x" % (
b.r_start, b.r_end)
path = "%s/%s" % (b.path, fix_dir)
cmd = "setfattr -n trusted.glusterfs.dht -v %s %s" % (
value, path)
- print cmd
+ print(cmd)
if options.leave_mounted:
- print "The following subvolumes are still mounted:"
+ print("The following subvolumes are still mounted:")
for b in bricks:
- print "%s on %s" % (b.sv_name, b.path)
- print "Don't forget to clean up when you're done."
+ print("%s on %s" % (b.sv_name, b.path))
+ print("Don't forget to clean up when you're done.")
diff --git a/extras/volfilter.py b/extras/volfilter.py
index 0ca456a7882..d242e60dcba 100644
--- a/extras/volfilter.py
+++ b/extras/volfilter.py
@@ -13,6 +13,7 @@
# You should have received a copy of the GNU General Public License * along
# with HekaFS. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import print_function
import copy
import string
import sys
@@ -127,16 +128,16 @@ def generate (graph, last, stream=sys.stdout):
for sv in last.subvols:
if not sv.dumped:
generate(graph,sv,stream)
- print >> stream, ""
+ print("", file=stream)
sv.dumped = True
- print >> stream, "volume %s" % last.name
- print >> stream, " type %s" % last.type
+ print("volume %s" % last.name, file=stream)
+ print(" type %s" % last.type, file=stream)
for k, v in last.opts.iteritems():
- print >> stream, " option %s %s" % (k, v)
+ print(" option %s %s" % (k, v), file=stream)
if last.subvols:
- print >> stream, " subvolumes %s" % string.join(
- [ sv.name for sv in last.subvols ])
- print >> stream, "end-volume"
+ print(" subvolumes %s" % string.join(
+ [ sv.name for sv in last.subvols ]), file=stream)
+ print("end-volume", file=stream)
def push_filter (graph, old_xl, filt_type, opts={}):
suffix = "-" + old_xl.type.split("/")[1]
diff --git a/geo-replication/syncdaemon/__codecheck.py b/geo-replication/syncdaemon/__codecheck.py
index 45dbd26bb64..9437147f7d9 100644
--- a/geo-replication/syncdaemon/__codecheck.py
+++ b/geo-replication/syncdaemon/__codecheck.py
@@ -8,6 +8,7 @@
# cases as published by the Free Software Foundation.
#
+from __future__ import print_function
import os
import os.path
import sys
@@ -45,7 +46,7 @@ class IPNetwork(list):
gsyncd = sys.modules['gsyncd']
for a in [['--help'], ['--version'],
['--canonicalize-escape-url', '/foo']]:
- print('>>> invoking program with args: %s' % ' '.join(a))
+ print(('>>> invoking program with args: %s' % ' '.join(a)))
pid = os.fork()
if not pid:
sys_argv_set(a)
diff --git a/geo-replication/syncdaemon/argsupgrade.py b/geo-replication/syncdaemon/argsupgrade.py
index 18edb6ba5b7..a97c748c40b 100644
--- a/geo-replication/syncdaemon/argsupgrade.py
+++ b/geo-replication/syncdaemon/argsupgrade.py
@@ -1,5 +1,6 @@
# Converts old style args into new style args
+from __future__ import print_function
import sys
from argparse import ArgumentParser
import socket
@@ -136,8 +137,8 @@ def upgrade():
user, hname = remote_addr.split("@")
- print("ssh://%s@%s:gluster://127.0.0.1:%s" % (
- user, gethostbyname(hname), vol))
+ print(("ssh://%s@%s:gluster://127.0.0.1:%s" % (
+ user, gethostbyname(hname), vol)))
sys.exit(0)
elif "--normalize-url" in sys.argv:
@@ -146,7 +147,7 @@ def upgrade():
p = ArgumentParser()
p.add_argument("--normalize-url")
pargs = p.parse_known_args(sys.argv[1:])[0]
- print("ssh://%s" % slave_url(pargs.normalize_url))
+ print(("ssh://%s" % slave_url(pargs.normalize_url)))
sys.exit(0)
elif "--config-get-all" in sys.argv:
# -c gsyncd.conf --iprefix=/var :gv1 f241::gv2 --config-get-all
diff --git a/geo-replication/syncdaemon/gsyncdstatus.py b/geo-replication/syncdaemon/gsyncdstatus.py
index b1fed727cbe..02584b311d6 100644
--- a/geo-replication/syncdaemon/gsyncdstatus.py
+++ b/geo-replication/syncdaemon/gsyncdstatus.py
@@ -9,6 +9,7 @@
# cases as published by the Free Software Foundation.
#
+from __future__ import print_function
import fcntl
import os
import tempfile
@@ -406,8 +407,8 @@ class GeorepStatus(object):
# Convert all values as string
for k, v in status_out.items():
out[k] = str(v)
- print json.dumps(out)
+ print(json.dumps(out))
return
for key, value in status_out.items():
- print ("%s: %s" % (key, value))
+ print(("%s: %s" % (key, value)))
diff --git a/geo-replication/syncdaemon/subcmds.py b/geo-replication/syncdaemon/subcmds.py
index 1b306ad5b62..b9e02855392 100644
--- a/geo-replication/syncdaemon/subcmds.py
+++ b/geo-replication/syncdaemon/subcmds.py
@@ -1,6 +1,7 @@
-import logging
+from __future__ import print_function
from syncdutils import lf
+import logging
import gsyncdconfig as gconf
@@ -224,7 +225,7 @@ def print_config(name, value, only_value=False, use_underscore=False):
if use_underscore:
name = name.replace("-", "_")
- print("%s:%s" % (name, val))
+ print(("%s:%s" % (name, val)))
def config_name_format(val):
@@ -260,7 +261,7 @@ def subcmd_config_get(args):
"modified": v["modified"]
})
- print(json.dumps(out))
+ print((json.dumps(out)))
return
for k in sorted(all_config):
diff --git a/libglusterfs/src/gen-defaults.py b/libglusterfs/src/gen-defaults.py
index b94fbe1444e..e577eb71863 100644
--- a/libglusterfs/src/gen-defaults.py
+++ b/libglusterfs/src/gen-defaults.py
@@ -1,5 +1,6 @@
#!/usr/bin/python2
+from __future__ import print_function
import sys
from generator import ops, fop_subs, cbk_subs, generate
@@ -61,20 +62,20 @@ default_@NAME@ (
def gen_defaults ():
for name in ops.iterkeys():
- print generate(FAILURE_CBK_TEMPLATE,name,cbk_subs)
+ print(generate(FAILURE_CBK_TEMPLATE,name,cbk_subs))
for name in ops.iterkeys():
- print generate(CBK_RESUME_TEMPLATE,name,cbk_subs)
+ print(generate(CBK_RESUME_TEMPLATE,name,cbk_subs))
for name in ops.iterkeys():
- print generate(CBK_TEMPLATE,name,cbk_subs)
+ print(generate(CBK_TEMPLATE,name,cbk_subs))
for name in ops.iterkeys():
- print generate(RESUME_TEMPLATE,name,fop_subs)
+ print(generate(RESUME_TEMPLATE,name,fop_subs))
for name in ops.iterkeys():
- print generate(FOP_TEMPLATE,name,fop_subs)
+ print(generate(FOP_TEMPLATE,name,fop_subs))
for l in open(sys.argv[1],'r').readlines():
if l.find('#pragma generate') != -1:
- print "/* BEGIN GENERATED CODE - DO NOT MODIFY */"
+ print("/* BEGIN GENERATED CODE - DO NOT MODIFY */")
gen_defaults()
- print "/* END GENERATED CODE */"
+ print("/* END GENERATED CODE */")
else:
- print l[:-1]
+ print(l[:-1])
diff --git a/tests/bugs/distribute/overlap.py b/tests/bugs/distribute/overlap.py
index 253bb4052ec..105aa6792cf 100755
--- a/tests/bugs/distribute/overlap.py
+++ b/tests/bugs/distribute/overlap.py
@@ -1,5 +1,6 @@
#!/usr/bin/python2
+from __future__ import print_function
import sys
def calculate_one (ov, nv):
@@ -56,4 +57,4 @@ print '= %08x' % calculate_all(test2_vals)
if __name__ == '__main__':
# Return decimal so bash can reason about it.
- print '%d' % calculate_all(sys.argv[1:])
+ print('%d' % calculate_all(sys.argv[1:]))
diff --git a/tests/bugs/nfs/socket-as-fifo.py b/tests/bugs/nfs/socket-as-fifo.py
index da9886ea364..796b8ef671b 100755
--- a/tests/bugs/nfs/socket-as-fifo.py
+++ b/tests/bugs/nfs/socket-as-fifo.py
@@ -5,6 +5,7 @@
# Author: Niels de Vos <ndevos@redhat.com>
#
+from __future__ import print_function
import os
import stat
import sys
@@ -13,7 +14,7 @@ import socket
ret = 1
if len(sys.argv) != 2:
- print 'Usage: %s <socket>' % (sys.argv[0])
+ print('Usage: %s <socket>' % (sys.argv[0]))
sys.exit(ret)
path = sys.argv[1]
diff --git a/tests/features/ipctest.py b/tests/features/ipctest.py
index e89642f589b..a8de3936b27 100755
--- a/tests/features/ipctest.py
+++ b/tests/features/ipctest.py
@@ -1,5 +1,6 @@
#!/usr/bin/python2
+from __future__ import print_function
import ctypes
api = ctypes.CDLL("libgfapi.so",mode=ctypes.RTLD_GLOBAL)
@@ -23,6 +24,6 @@ if __name__ == "__main__":
try:
res = apply(do_ipc,sys.argv[1:3])
- print res
+ print(res)
except:
- print "IPC failed (volume not started?)"
+ print("IPC failed (volume not started?)")
diff --git a/tests/utils/getfattr.py b/tests/utils/getfattr.py
index 6636644f67c..7d1f7368e3e 100755
--- a/tests/utils/getfattr.py
+++ b/tests/utils/getfattr.py
@@ -1,5 +1,6 @@
#!/usr/bin/python2
+from __future__ import print_function
import os
import sys
from optparse import OptionParser
@@ -32,22 +33,22 @@ def getfattr(path, option):
def print_getfattr (path, option, encoded_attr=None):
if encoded_attr:
if option.encoding == "hex":
- print ("%s=0x%s" % (option.name, encoded_attr))
+ print(("%s=0x%s" % (option.name, encoded_attr)))
elif option.encoding == "base64":
- print ("%s=0s%s" % (option.name, encoded_attr))
+ print(("%s=0s%s" % (option.name, encoded_attr)))
else:
- print ("%s=\"%s\"" % (option.name, encoded_attr))
+ print(("%s=\"%s\"" % (option.name, encoded_attr)))
else:
- print option.name
+ print(option.name)
return
def print_header (path, absnames):
if absnames:
- print ("# file: %s" % path)
+ print(("# file: %s" % path))
else:
print ("getfattr: Removing leading '/' from absolute path names")
- print ("# file: %s" % path[1:])
+ print(("# file: %s" % path[1:]))
if __name__ == '__main__':
usage = "usage: %prog [-n name|-d] [-e en] [-m pattern] path...."
@@ -99,8 +100,8 @@ if __name__ == '__main__':
if (not (option.encoding.strip() == "hex" or
option.encoding.strip() == "base64" or
option.encoding.strip() == "text")):
- print ("unrecognized encoding parameter... %s, please use"
- " `text`, `base64` or `hex`" % option.encoding)
+ print(("unrecognized encoding parameter... %s, please use"
+ " `text`, `base64` or `hex`" % option.encoding))
sys.exit(1)
args[0] = os.path.abspath(args[0])
@@ -110,7 +111,7 @@ if __name__ == '__main__':
try:
getfattr(args[0], option)
except KeyError as err:
- print ("Invalid key %s" % err)
+ print(("Invalid key %s" % err))
sys.exit(1)
except IOError as err:
print (err)
diff --git a/tests/utils/gfid-access.py b/tests/utils/gfid-access.py
index 4ca1665fec4..2a58bfebc4e 100755
--- a/tests/utils/gfid-access.py
+++ b/tests/utils/gfid-access.py
@@ -9,6 +9,7 @@
# cases as published by the Free Software Foundation.
#
+from __future__ import print_function
import os
import sys
import stat
@@ -53,8 +54,8 @@ def entry_pack_symlink(gf, bn, lnk, mo, uid, gid):
if __name__ == '__main__':
if len(sys.argv) < 9:
- print("USAGE: %s <mount> <pargfid|ROOT> <filename> <GFID> <file type>"
- " <uid> <gid> <file permission(octal str)>" % (sys.argv[0]))
+ print(("USAGE: %s <mount> <pargfid|ROOT> <filename> <GFID> <file type>"
+ " <uid> <gid> <file permission(octal str)>" % (sys.argv[0])))
sys.exit(-1) # nothing to do
mtpt = sys.argv[1]
pargfid = sys.argv[2]
@@ -92,5 +93,5 @@ if __name__ == '__main__':
if not ex.errno in [EEXIST]:
raise
sys.exit(-1)
- print "File creation OK"
+ print("File creation OK")
sys.exit(0)
diff --git a/tests/utils/pidof.py b/tests/utils/pidof.py
index 01f2f0ad5bd..9f4a6a48a39 100755
--- a/tests/utils/pidof.py
+++ b/tests/utils/pidof.py
@@ -1,5 +1,6 @@
#!/usr/bin/python2
+from __future__ import print_function
import sys
try:
@@ -21,14 +22,14 @@ def pidof(processname):
continue
if "gluster" in processname:
if processname == "glusterd" and pmap_find(p, "glusterd"):
- print (p.pid)
+ print((p.pid))
if processname == "glusterfs" and pmap_find(p, "client"):
- print (p.pid)
+ print((p.pid))
if processname == "glusterfsd" and pmap_find(p, "posix-acl"):
- print (p.pid)
+ print((p.pid))
continue
if processname.strip() == p.name():
- print (p.pid)
+ print((p.pid))
def main(argv):
if len(argv) < 2:
@@ -37,7 +38,7 @@ def main(argv):
try:
pidof(argv[1])
except Exception as err:
- print err
+ print(err)
sys.stderr.write("Please be root - %s\n" % err);
sys.exit(1)
diff --git a/xlators/experimental/fdl/src/gen_dumper.py b/xlators/experimental/fdl/src/gen_dumper.py
index 2950be44e01..363ba6ce63f 100755
--- a/xlators/experimental/fdl/src/gen_dumper.py
+++ b/xlators/experimental/fdl/src/gen_dumper.py
@@ -1,5 +1,6 @@
#!/usr/bin/python2
+from __future__ import print_function
import os
import re
import sys
@@ -109,8 +110,8 @@ def load_fragments (path="recon-tmpl.c"):
if __name__ == "__main__":
fragments = load_fragments(sys.argv[1])
- print "/* BEGIN GENERATED CODE - DO NOT MODIFY */"
- print fragments["PROLOG"]
- print gen_functions()
- print fragments["EPILOG"].replace("@SWITCH_BODY@",gen_cases())
- print "/* END GENERATED CODE */"
+ print("/* BEGIN GENERATED CODE - DO NOT MODIFY */")
+ print(fragments["PROLOG"])
+ print(gen_functions())
+ print(fragments["EPILOG"].replace("@SWITCH_BODY@",gen_cases()))
+ print("/* END GENERATED CODE */")
diff --git a/xlators/experimental/fdl/src/gen_fdl.py b/xlators/experimental/fdl/src/gen_fdl.py
index 136aa112861..d2e7dd5dfb2 100755
--- a/xlators/experimental/fdl/src/gen_fdl.py
+++ b/xlators/experimental/fdl/src/gen_fdl.py
@@ -1,5 +1,6 @@
#!/usr/bin/python2
+from __future__ import print_function
import os
import sys
@@ -325,29 +326,29 @@ def gen_fdl ():
fop_subs[name]["@LEN_CODE@"] = len_code[:-1]
fop_subs[name]["@SER_CODE@"] = ser_code[:-1]
if 'len' in gen_funcs:
- print generate(LEN_TEMPLATE,name,fop_subs)
+ print(generate(LEN_TEMPLATE,name,fop_subs))
if 'serialize' in gen_funcs:
- print generate(SER_TEMPLATE,name,fop_subs)
+ print(generate(SER_TEMPLATE,name,fop_subs))
if name == 'writev':
- print "#define DESTAGE_ASYNC"
+ print("#define DESTAGE_ASYNC")
if 'callback' in gen_funcs:
- print generate(CBK_TEMPLATE,name,cbk_subs)
+ print(generate(CBK_TEMPLATE,name,cbk_subs))
if 'continue' in gen_funcs:
- print generate(CONTINUE_TEMPLATE,name,fop_subs)
+ print(generate(CONTINUE_TEMPLATE,name,fop_subs))
if 'fop' in gen_funcs:
- print generate(FOP_TEMPLATE,name,fop_subs)
+ print(generate(FOP_TEMPLATE,name,fop_subs))
if name == 'writev':
- print "#undef DESTAGE_ASYNC"
+ print("#undef DESTAGE_ASYNC")
entrypoints.append(name)
- print "struct xlator_fops fops = {"
+ print("struct xlator_fops fops = {")
for ep in entrypoints:
- print "\t.%s = fdl_%s," % (ep, ep)
- print "};"
+ print("\t.%s = fdl_%s," % (ep, ep))
+ print("};")
for l in open(sys.argv[1],'r').readlines():
if l.find('#pragma generate') != -1:
- print "/* BEGIN GENERATED CODE - DO NOT MODIFY */"
+ print("/* BEGIN GENERATED CODE - DO NOT MODIFY */")
gen_fdl()
- print "/* END GENERATED CODE */"
+ print("/* END GENERATED CODE */")
else:
- print l[:-1]
+ print(l[:-1])
diff --git a/xlators/experimental/fdl/src/gen_recon.py b/xlators/experimental/fdl/src/gen_recon.py
index af1765517f3..75323ea3dd9 100755
--- a/xlators/experimental/fdl/src/gen_recon.py
+++ b/xlators/experimental/fdl/src/gen_recon.py
@@ -1,5 +1,6 @@
#!/usr/bin/python2
+from __future__ import print_function
import os
import re
import string
@@ -210,8 +211,8 @@ def load_fragments (path="recon-tmpl.c"):
if __name__ == "__main__":
fragments = load_fragments(sys.argv[1])
- print "/* BEGIN GENERATED CODE - DO NOT MODIFY */"
- print fragments["PROLOG"]
- print gen_functions()
- print fragments["EPILOG"].replace("@SWITCH_BODY@",gen_cases())
- print "/* END GENERATED CODE */"
+ print("/* BEGIN GENERATED CODE - DO NOT MODIFY */")
+ print(fragments["PROLOG"])
+ print(gen_functions())
+ print(fragments["EPILOG"].replace("@SWITCH_BODY@",gen_cases()))
+ print("/* END GENERATED CODE */")
diff --git a/xlators/experimental/jbr-client/src/gen-fops.py b/xlators/experimental/jbr-client/src/gen-fops.py
index 64bd8b06cae..9893e0c5968 100755
--- a/xlators/experimental/jbr-client/src/gen-fops.py
+++ b/xlators/experimental/jbr-client/src/gen-fops.py
@@ -1,5 +1,6 @@
#!/usr/bin/python2
+from __future__ import print_function
import os
import re
import string
@@ -43,15 +44,15 @@ def gen_client (templates):
if name == 'getspec':
# It's not real if it doesn't have a stub function.
continue
- print generate(templates['cbk'],name,cbk_subs)
- print generate(templates['cont-func'],name,fop_subs)
- print generate(templates['fop'],name,fop_subs)
+ print(generate(templates['cbk'],name,cbk_subs))
+ print(generate(templates['cont-func'],name,fop_subs))
+ print(generate(templates['fop'],name,fop_subs))
tmpl = load_templates(sys.argv[1])
for l in open(sys.argv[2],'r').readlines():
if l.find('#pragma generate') != -1:
- print "/* BEGIN GENERATED CODE - DO NOT MODIFY */"
+ print("/* BEGIN GENERATED CODE - DO NOT MODIFY */")
gen_client(tmpl)
- print "/* END GENERATED CODE */"
+ print("/* END GENERATED CODE */")
else:
- print l[:-1]
+ print(l[:-1])
diff --git a/xlators/experimental/jbr-server/src/gen-fops.py b/xlators/experimental/jbr-server/src/gen-fops.py
index 2005e3932d4..e728f473372 100755
--- a/xlators/experimental/jbr-server/src/gen-fops.py
+++ b/xlators/experimental/jbr-server/src/gen-fops.py
@@ -5,6 +5,7 @@
# between leader and followers (including fan-out), and basic error checking
# to be centralized one place, with per-operation code kept to a minimum.
+from __future__ import print_function
import os
import re
import string
@@ -132,49 +133,49 @@ def gen_server (templates):
if ("fsync" in flags) or ("queue" in flags):
flags.append("need_fd")
for fname in flags:
- print "#define JBR_CG_%s" % fname.upper()
+ print("#define JBR_CG_%s" % fname.upper())
if 'complete' in gen_funcs:
- print generate(templates[kind+"-complete"],
- name,cbk_subs)
+ print(generate(templates[kind+"-complete"],
+ name,cbk_subs))
if 'continue' in gen_funcs:
- print generate(templates[kind+"-continue"],
- name,fop_subs)
+ print(generate(templates[kind+"-continue"],
+ name,fop_subs))
if 'fan-in' in gen_funcs:
- print generate(templates[kind+"-fan-in"],
- name,cbk_subs)
+ print(generate(templates[kind+"-fan-in"],
+ name,cbk_subs))
if 'dispatch' in gen_funcs:
- print generate(templates[kind+"-dispatch"],
- name,fop_subs)
+ print(generate(templates[kind+"-dispatch"],
+ name,fop_subs))
if 'call_dispatch' in gen_funcs:
- print generate(templates[kind+"-call_dispatch"],
- name,fop_subs)
+ print(generate(templates[kind+"-call_dispatch"],
+ name,fop_subs))
if 'perform_local_op' in gen_funcs:
- print generate(templates[kind+"-perform_local_op"],
- name, fop_subs)
+ print(generate(templates[kind+"-perform_local_op"],
+ name, fop_subs))
if 'fop' in gen_funcs:
- print generate(templates[kind+"-fop"],name,fop_subs)
+ print(generate(templates[kind+"-fop"],name,fop_subs))
for fname in flags:
- print "#undef JBR_CG_%s" % fname.upper()
+ print("#undef JBR_CG_%s" % fname.upper())
fops_done.append(name)
# Just for fun, emit the fops table too.
print("struct xlator_fops fops = {")
for x in fops_done:
- print(" .%s = jbr_%s,"%(x,x))
+ print((" .%s = jbr_%s,"%(x,x)))
print("};")
tmpl = load_templates(sys.argv[1])
for l in open(sys.argv[2],'r').readlines():
if l.find('#pragma generate') != -1:
- print "/* BEGIN GENERATED CODE - DO NOT MODIFY */"
+ print("/* BEGIN GENERATED CODE - DO NOT MODIFY */")
gen_server(tmpl)
- print "/* END GENERATED CODE */"
+ print("/* END GENERATED CODE */")
else:
- print l[:-1]
+ print(l[:-1])
diff --git a/xlators/features/changelog/lib/examples/python/changes.py b/xlators/features/changelog/lib/examples/python/changes.py
index c0341b52ca3..077c1108cce 100644
--- a/xlators/features/changelog/lib/examples/python/changes.py
+++ b/xlators/features/changelog/lib/examples/python/changes.py
@@ -1,5 +1,6 @@
#!/usr/bin/python2
+from __future__ import print_function
import os
import sys
import time
@@ -16,18 +17,18 @@ def get_changes(brick, scratch_dir, log_file, log_level, interval):
cl.cl_scan()
change_list = cl.cl_getchanges()
if change_list:
- print change_list
+ print(change_list)
for change in change_list:
- print('done with %s' % (change))
+ print(('done with %s' % (change)))
cl.cl_done(change)
time.sleep(interval)
except OSError:
ex = sys.exc_info()[1]
- print ex
+ print(ex)
if __name__ == '__main__':
if len(sys.argv) != 6:
- print("usage: %s <brick> <scratch-dir> <log-file> <fetch-interval>"
- % (sys.argv[0]))
+ print(("usage: %s <brick> <scratch-dir> <log-file> <fetch-interval>"
+ % (sys.argv[0])))
sys.exit(1)
get_changes(sys.argv[1], sys.argv[2], sys.argv[3], 9, int(sys.argv[4]))
diff --git a/xlators/features/cloudsync/src/cloudsync-fops-c.py b/xlators/features/cloudsync/src/cloudsync-fops-c.py
index e3030724468..fda56752871 100644
--- a/xlators/features/cloudsync/src/cloudsync-fops-c.py
+++ b/xlators/features/cloudsync/src/cloudsync-fops-c.py
@@ -1,5 +1,6 @@
#!/usr/bin/python
+from __future__ import print_function
import os
import sys
@@ -289,17 +290,17 @@ special_fops = ['readdirp', 'statfs', 'setxattr', 'unlink', 'getxattr',
def gen_defaults():
for name in ops:
if name in fd_data_modify_op_fop_template:
- print generate(FD_DATA_MODIFYING_OP_FOP_CBK_TEMPLATE, name, cbk_subs)
- print generate(FD_DATA_MODIFYING_RESUME_OP_FOP_TEMPLATE, name, fop_subs)
- print generate(FD_DATA_MODIFYING_OP_FOP_TEMPLATE, name, fop_subs)
+ print(generate(FD_DATA_MODIFYING_OP_FOP_CBK_TEMPLATE, name, cbk_subs))
+ print(generate(FD_DATA_MODIFYING_RESUME_OP_FOP_TEMPLATE, name, fop_subs))
+ print(generate(FD_DATA_MODIFYING_OP_FOP_TEMPLATE, name, fop_subs))
elif name in loc_stat_op_fop_template:
- print generate(LOC_STAT_OP_FOP_CBK_TEMPLATE, name, cbk_subs)
- print generate(LOC_STAT_OP_FOP_TEMPLATE, name, fop_subs)
+ print(generate(LOC_STAT_OP_FOP_CBK_TEMPLATE, name, cbk_subs))
+ print(generate(LOC_STAT_OP_FOP_TEMPLATE, name, fop_subs))
for l in open(sys.argv[1], 'r').readlines():
if l.find('#pragma generate') != -1:
- print "/* BEGIN GENERATED CODE - DO NOT MODIFY */"
+ print("/* BEGIN GENERATED CODE - DO NOT MODIFY */")
gen_defaults()
- print "/* END GENERATED CODE */"
+ print("/* END GENERATED CODE */")
else:
- print l[:-1]
+ print(l[:-1])
diff --git a/xlators/features/cloudsync/src/cloudsync-fops-h.py b/xlators/features/cloudsync/src/cloudsync-fops-h.py
index 552c6b58e3a..37dfa5b10be 100644
--- a/xlators/features/cloudsync/src/cloudsync-fops-h.py
+++ b/xlators/features/cloudsync/src/cloudsync-fops-h.py
@@ -1,5 +1,6 @@
#!/usr/bin/python
+from __future__ import print_function
import os
import sys
@@ -18,13 +19,13 @@ def gen_defaults():
for name, value in ops.iteritems():
if name == 'getspec':
continue
- print generate(OP_FOP_TEMPLATE, name, fop_subs)
+ print(generate(OP_FOP_TEMPLATE, name, fop_subs))
for l in open(sys.argv[1], 'r').readlines():
if l.find('#pragma generate') != -1:
- print "/* BEGIN GENERATED CODE - DO NOT MODIFY */"
+ print("/* BEGIN GENERATED CODE - DO NOT MODIFY */")
gen_defaults()
- print "/* END GENERATED CODE */"
+ print("/* END GENERATED CODE */")
else:
- print l[:-1]
+ print(l[:-1])
diff --git a/xlators/features/glupy/examples/debug-trace.py b/xlators/features/glupy/examples/debug-trace.py
index 6eef1b58b8f..ca726ed21cb 100644
--- a/xlators/features/glupy/examples/debug-trace.py
+++ b/xlators/features/glupy/examples/debug-trace.py
@@ -1,3 +1,5 @@
+
+from __future__ import print_function
import sys
import stat
from uuid import UUID
@@ -106,8 +108,8 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(loc.contents.gfid)
- print("GLUPY TRACE LOOKUP FOP- {0:d}: gfid={1:s}; " +
- "path={2:s}").format(unique, gfid, loc.contents.path)
+ print(("GLUPY TRACE LOOKUP FOP- {0:d}: gfid={1:s}; " +
+ "path={2:s}").format(unique, gfid, loc.contents.path))
self.gfids[key] = gfid
dl.wind_lookup(frame, POINTER(xlator_t)(), loc, xdata)
return 0
@@ -120,18 +122,18 @@ class xlator(Translator):
gfid = uuid2str(buf.contents.ia_gfid)
statstr = trace_stat2str(buf)
postparentstr = trace_stat2str(postparent)
- print("GLUPY TRACE LOOKUP CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE LOOKUP CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; *buf={3:s}; " +
"*postparent={4:s}").format(unique, gfid,
op_ret, statstr,
- postparentstr)
+ postparentstr))
else:
gfid = self.gfids[key]
- print("GLUPY TRACE LOOKUP CBK - {0:d}: gfid={1:s};" +
+ print(("GLUPY TRACE LOOKUP CBK - {0:d}: gfid={1:s};" +
" op_ret={2:d}; op_errno={3:d}").format(unique,
gfid,
op_ret,
- op_errno)
+ op_errno))
del self.gfids[key]
dl.unwind_lookup(frame, cookie, this, op_ret, op_errno,
inode, buf, xdata, postparent)
@@ -141,10 +143,10 @@ class xlator(Translator):
xdata):
unique = dl.get_rootunique(frame)
gfid = uuid2str(loc.contents.gfid)
- print("GLUPY TRACE CREATE FOP- {0:d}: gfid={1:s}; path={2:s}; " +
+ print(("GLUPY TRACE CREATE FOP- {0:d}: gfid={1:s}; path={2:s}; " +
"fd={3:s}; flags=0{4:o}; mode=0{5:o}; " +
"umask=0{6:o}").format(unique, gfid, loc.contents.path,
- fd, flags, mode, umask)
+ fd, flags, mode, umask))
dl.wind_create(frame, POINTER(xlator_t)(), loc, flags,mode,
umask, fd, xdata)
return 0
@@ -157,16 +159,16 @@ class xlator(Translator):
statstr = trace_stat2str(buf)
preparentstr = trace_stat2str(preparent)
postparentstr = trace_stat2str(postparent)
- print("GLUPY TRACE CREATE CBK- {0:d}: gfid={1:s};" +
+ print(("GLUPY TRACE CREATE CBK- {0:d}: gfid={1:s};" +
" op_ret={2:d}; fd={3:s}; *stbuf={4:s}; " +
"*preparent={5:s};" +
" *postparent={6:s}").format(unique, gfid, op_ret,
fd, statstr,
preparentstr,
- postparentstr)
+ postparentstr))
else:
- print ("GLUPY TRACE CREATE CBK- {0:d}: op_ret={1:d}; " +
- "op_errno={2:d}").format(unique, op_ret, op_errno)
+ print(("GLUPY TRACE CREATE CBK- {0:d}: op_ret={1:d}; " +
+ "op_errno={2:d}").format(unique, op_ret, op_errno))
dl.unwind_create(frame, cookie, this, op_ret, op_errno, fd,
inode, buf, preparent, postparent, xdata)
return 0
@@ -175,10 +177,10 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(loc.contents.inode.contents.gfid)
- print("GLUPY TRACE OPEN FOP- {0:d}: gfid={1:s}; path={2:s}; "+
+ print(("GLUPY TRACE OPEN FOP- {0:d}: gfid={1:s}; path={2:s}; "+
"flags={3:d}; fd={4:s}").format(unique, gfid,
loc.contents.path, flags,
- fd)
+ fd))
self.gfids[key] = gfid
dl.wind_open(frame, POINTER(xlator_t)(), loc, flags, fd, xdata)
return 0
@@ -187,9 +189,9 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = self.gfids[key]
- print("GLUPY TRACE OPEN CBK- {0:d}: gfid={1:s}; op_ret={2:d}; "
+ print(("GLUPY TRACE OPEN CBK- {0:d}: gfid={1:s}; op_ret={2:d}; "
"op_errno={3:d}; *fd={4:s}").format(unique, gfid,
- op_ret, op_errno, fd)
+ op_ret, op_errno, fd))
del self.gfids[key]
dl.unwind_open(frame, cookie, this, op_ret, op_errno, fd,
xdata)
@@ -199,10 +201,10 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(fd.contents.inode.contents.gfid)
- print("GLUPY TRACE READV FOP- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE READV FOP- {0:d}: gfid={1:s}; "+
"fd={2:s}; size ={3:d}; offset={4:d}; " +
"flags=0{5:x}").format(unique, gfid, fd, size, offset,
- flags)
+ flags))
self.gfids[key] = gfid
dl.wind_readv (frame, POINTER(xlator_t)(), fd, size, offset,
flags, xdata)
@@ -215,17 +217,17 @@ class xlator(Translator):
gfid = self.gfids[key]
if op_ret >= 0:
statstr = trace_stat2str(buf)
- print("GLUPY TRACE READV CBK- {0:d}: gfid={1:s}, "+
+ print(("GLUPY TRACE READV CBK- {0:d}: gfid={1:s}, "+
"op_ret={2:d}; *buf={3:s};").format(unique, gfid,
op_ret,
- statstr)
+ statstr))
else:
- print("GLUPY TRACE READV CBK- {0:d}: gfid={1:s}, "+
+ print(("GLUPY TRACE READV CBK- {0:d}: gfid={1:s}, "+
"op_ret={2:d}; op_errno={3:d}").format(unique,
gfid,
op_ret,
- op_errno)
+ op_errno))
del self.gfids[key]
dl.unwind_readv (frame, cookie, this, op_ret, op_errno,
vector, count, buf, iobref, xdata)
@@ -236,10 +238,10 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(fd.contents.inode.contents.gfid)
- print("GLUPY TRACE WRITEV FOP- {0:d}: gfid={1:s}; " +
+ print(("GLUPY TRACE WRITEV FOP- {0:d}: gfid={1:s}; " +
"fd={2:s}; count={3:d}; offset={4:d}; " +
"flags=0{5:x}").format(unique, gfid, fd, count, offset,
- flags)
+ flags))
self.gfids[key] = gfid
dl.wind_writev(frame, POINTER(xlator_t)(), fd, vector, count,
offset, flags, iobref, xdata)
@@ -252,17 +254,17 @@ class xlator(Translator):
if op_ret >= 0:
preopstr = trace_stat2str(prebuf)
postopstr = trace_stat2str(postbuf)
- print("GLUPY TRACE WRITEV CBK- {0:d}: op_ret={1:d}; " +
+ print(("GLUPY TRACE WRITEV CBK- {0:d}: op_ret={1:d}; " +
"*prebuf={2:s}; " +
"*postbuf={3:s}").format(unique, op_ret, preopstr,
- postopstr)
+ postopstr))
else:
gfid = self.gfids[key]
- print("GLUPY TRACE WRITEV CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE WRITEV CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; op_errno={3:d}").format(unique,
gfid,
op_ret,
- op_errno)
+ op_errno))
del self.gfids[key]
dl.unwind_writev (frame, cookie, this, op_ret, op_errno,
prebuf, postbuf, xdata)
@@ -272,8 +274,8 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(loc.contents.inode.contents.gfid)
- print("GLUPY TRACE OPENDIR FOP- {0:d}: gfid={1:s}; path={2:s}; "+
- "fd={3:s}").format(unique, gfid, loc.contents.path, fd)
+ print(("GLUPY TRACE OPENDIR FOP- {0:d}: gfid={1:s}; path={2:s}; "+
+ "fd={3:s}").format(unique, gfid, loc.contents.path, fd))
self.gfids[key] = gfid
dl.wind_opendir(frame, POINTER(xlator_t)(), loc, fd, xdata)
return 0
@@ -283,9 +285,9 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = self.gfids[key]
- print("GLUPY TRACE OPENDIR CBK- {0:d}: gfid={1:s}; op_ret={2:d};"+
+ print(("GLUPY TRACE OPENDIR CBK- {0:d}: gfid={1:s}; op_ret={2:d};"+
" op_errno={3:d}; fd={4:s}").format(unique, gfid, op_ret,
- op_errno, fd)
+ op_errno, fd))
del self.gfids[key]
dl.unwind_opendir(frame, cookie, this, op_ret, op_errno,
fd, xdata)
@@ -295,9 +297,9 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(fd.contents.inode.contents.gfid)
- print("GLUPY TRACE READDIR FOP- {0:d}: gfid={1:s}; fd={2:s}; " +
+ print(("GLUPY TRACE READDIR FOP- {0:d}: gfid={1:s}; fd={2:s}; " +
"size={3:d}; offset={4:d}").format(unique, gfid, fd, size,
- offset)
+ offset))
self.gfids[key] = gfid
dl.wind_readdir(frame, POINTER(xlator_t)(), fd, size, offset,
xdata)
@@ -308,8 +310,8 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = self.gfids[key]
- print("GLUPY TRACE READDIR CBK- {0:d}: gfid={1:s}; op_ret={2:d};"+
- " op_errno={3:d}").format(unique, gfid, op_ret, op_errno)
+ print(("GLUPY TRACE READDIR CBK- {0:d}: gfid={1:s}; op_ret={2:d};"+
+ " op_errno={3:d}").format(unique, gfid, op_ret, op_errno))
del self.gfids[key]
dl.unwind_readdir(frame, cookie, this, op_ret, op_errno, buf,
xdata)
@@ -319,9 +321,9 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(fd.contents.inode.contents.gfid)
- print("GLUPY TRACE READDIRP FOP- {0:d}: gfid={1:s}; fd={2:s}; "+
+ print(("GLUPY TRACE READDIRP FOP- {0:d}: gfid={1:s}; fd={2:s}; "+
" size={3:d}; offset={4:d}").format(unique, gfid, fd, size,
- offset)
+ offset))
self.gfids[key] = gfid
dl.wind_readdirp(frame, POINTER(xlator_t)(), fd, size, offset,
dictionary)
@@ -332,9 +334,9 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = self.gfids[key]
- print("GLUPY TRACE READDIRP CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE READDIRP CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; op_errno={3:d}").format(unique, gfid,
- op_ret, op_errno)
+ op_ret, op_errno))
del self.gfids[key]
dl.unwind_readdirp(frame, cookie, this, op_ret, op_errno, buf,
xdata)
@@ -343,10 +345,10 @@ class xlator(Translator):
def mkdir_fop(self, frame, this, loc, mode, umask, xdata):
unique = dl.get_rootunique(frame)
gfid = uuid2str(loc.contents.inode.contents.gfid)
- print("GLUPY TRACE MKDIR FOP- {0:d}: gfid={1:s}; path={2:s}; " +
+ print(("GLUPY TRACE MKDIR FOP- {0:d}: gfid={1:s}; path={2:s}; " +
"mode={3:d}; umask=0{4:o}").format(unique, gfid,
loc.contents.path, mode,
- umask)
+ umask))
dl.wind_mkdir(frame, POINTER(xlator_t)(), loc, mode, umask,
xdata)
return 0
@@ -359,15 +361,15 @@ class xlator(Translator):
statstr = trace_stat2str(buf)
preparentstr = trace_stat2str(preparent)
postparentstr = trace_stat2str(postparent)
- print("GLUPY TRACE MKDIR CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE MKDIR CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; *stbuf={3:s}; *prebuf={4:s}; "+
"*postbuf={5:s} ").format(unique, gfid, op_ret,
statstr,
preparentstr,
- postparentstr)
+ postparentstr))
else:
- print("GLUPY TRACE MKDIR CBK- {0:d}: op_ret={1:d}; "+
- "op_errno={2:d}").format(unique, op_ret, op_errno)
+ print(("GLUPY TRACE MKDIR CBK- {0:d}: op_ret={1:d}; "+
+ "op_errno={2:d}").format(unique, op_ret, op_errno))
dl.unwind_mkdir(frame, cookie, this, op_ret, op_errno, inode,
buf, preparent, postparent, xdata)
return 0
@@ -376,9 +378,9 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(loc.contents.inode.contents.gfid)
- print("GLUPY TRACE RMDIR FOP- {0:d}: gfid={1:s}; path={2:s}; "+
+ print(("GLUPY TRACE RMDIR FOP- {0:d}: gfid={1:s}; path={2:s}; "+
"flags={3:d}").format(unique, gfid, loc.contents.path,
- flags)
+ flags))
self.gfids[key] = gfid
dl.wind_rmdir(frame, POINTER(xlator_t)(), loc, flags, xdata)
return 0
@@ -391,17 +393,17 @@ class xlator(Translator):
if op_ret == 0:
preparentstr = trace_stat2str(preparent)
postparentstr = trace_stat2str(postparent)
- print("GLUPY TRACE RMDIR CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE RMDIR CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; *prebuf={3:s}; "+
"*postbuf={4:s}").format(unique, gfid, op_ret,
preparentstr,
- postparentstr)
+ postparentstr))
else:
- print("GLUPY TRACE RMDIR CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE RMDIR CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; op_errno={3:d}").format(unique,
gfid,
op_ret,
- op_errno)
+ op_errno))
del self.gfids[key]
dl.unwind_rmdir(frame, cookie, this, op_ret, op_errno,
preparent, postparent, xdata)
@@ -411,8 +413,8 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(loc.contents.inode.contents.gfid)
- print("GLUPY TRACE STAT FOP- {0:d}: gfid={1:s}; " +
- " path={2:s}").format(unique, gfid, loc.contents.path)
+ print(("GLUPY TRACE STAT FOP- {0:d}: gfid={1:s}; " +
+ " path={2:s}").format(unique, gfid, loc.contents.path))
self.gfids[key] = gfid
dl.wind_stat(frame, POINTER(xlator_t)(), loc, xdata)
return 0
@@ -424,17 +426,17 @@ class xlator(Translator):
gfid = self.gfids[key]
if op_ret == 0:
statstr = trace_stat2str(buf)
- print("GLUPY TRACE STAT CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE STAT CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; *buf={3:s};").format(unique,
gfid,
op_ret,
- statstr)
+ statstr))
else:
- print("GLUPY TRACE STAT CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE STAT CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; op_errno={3:d}").format(unique,
gfid,
op_ret,
- op_errno)
+ op_errno))
del self.gfids[key]
dl.unwind_stat(frame, cookie, this, op_ret, op_errno,
buf, xdata)
@@ -444,8 +446,8 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(fd.contents.inode.contents.gfid)
- print("GLUPY TRACE FSTAT FOP- {0:d}: gfid={1:s}; " +
- "fd={2:s}").format(unique, gfid, fd)
+ print(("GLUPY TRACE FSTAT FOP- {0:d}: gfid={1:s}; " +
+ "fd={2:s}").format(unique, gfid, fd))
self.gfids[key] = gfid
dl.wind_fstat(frame, POINTER(xlator_t)(), fd, xdata)
return 0
@@ -457,17 +459,17 @@ class xlator(Translator):
gfid = self.gfids[key]
if op_ret == 0:
statstr = trace_stat2str(buf)
- print("GLUPY TRACE FSTAT CBK- {0:d}: gfid={1:s} "+
+ print(("GLUPY TRACE FSTAT CBK- {0:d}: gfid={1:s} "+
" op_ret={2:d}; *buf={3:s}").format(unique,
gfid,
op_ret,
- statstr)
+ statstr))
else:
- print("GLUPY TRACE FSTAT CBK- {0:d}: gfid={1:s} "+
+ print(("GLUPY TRACE FSTAT CBK- {0:d}: gfid={1:s} "+
"op_ret={2:d}; op_errno={3:d}").format(unique.
gfid,
op_ret,
- op_errno)
+ op_errno))
del self.gfids[key]
dl.unwind_fstat(frame, cookie, this, op_ret, op_errno,
buf, xdata)
@@ -479,8 +481,8 @@ class xlator(Translator):
gfid = uuid2str(loc.contents.inode.contents.gfid)
else:
gfid = "0"
- print("GLUPY TRACE STATFS FOP- {0:d}: gfid={1:s}; "+
- "path={2:s}").format(unique, gfid, loc.contents.path)
+ print(("GLUPY TRACE STATFS FOP- {0:d}: gfid={1:s}; "+
+ "path={2:s}").format(unique, gfid, loc.contents.path))
dl.wind_statfs(frame, POINTER(xlator_t)(), loc, xdata)
return 0
@@ -489,13 +491,13 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
if op_ret == 0:
#TBD: print buf (pointer to an iovec type object)
- print("GLUPY TRACE STATFS CBK {0:d}: "+
- "op_ret={1:d}").format(unique, op_ret)
+ print(("GLUPY TRACE STATFS CBK {0:d}: "+
+ "op_ret={1:d}").format(unique, op_ret))
else:
- print("GLUPY TRACE STATFS CBK- {0:d}"+
+ print(("GLUPY TRACE STATFS CBK- {0:d}"+
"op_ret={1:d}; op_errno={2:d}").format(unique,
op_ret,
- op_errno)
+ op_errno))
dl.unwind_statfs(frame, cookie, this, op_ret, op_errno,
buf, xdata)
return 0
@@ -504,9 +506,9 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(loc.contents.inode.contents.gfid)
- print("GLUPY TRACE GETXATTR FOP- {0:d}: gfid={1:s}; path={2:s};"+
+ print(("GLUPY TRACE GETXATTR FOP- {0:d}: gfid={1:s}; path={2:s};"+
" name={3:s}").format(unique, gfid, loc.contents.path,
- name)
+ name))
self.gfids[key]=gfid
dl.wind_getxattr(frame, POINTER(xlator_t)(), loc, name, xdata)
return 0
@@ -516,10 +518,10 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = self.gfids[key]
- print("GLUPY TRACE GETXATTR CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE GETXATTR CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; op_errno={3:d}; "+
" dictionary={4:s}").format(unique, gfid, op_ret, op_errno,
- dictionary)
+ dictionary))
del self.gfids[key]
dl.unwind_getxattr(frame, cookie, this, op_ret, op_errno,
dictionary, xdata)
@@ -529,8 +531,8 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(fd.contents.inode.contents.gfid)
- print("GLUPY TRACE FGETXATTR FOP- {0:d}: gfid={1:s}; fd={2:s}; "+
- "name={3:s}").format(unique, gfid, fd, name)
+ print(("GLUPY TRACE FGETXATTR FOP- {0:d}: gfid={1:s}; fd={2:s}; "+
+ "name={3:s}").format(unique, gfid, fd, name))
self.gfids[key] = gfid
dl.wind_fgetxattr(frame, POINTER(xlator_t)(), fd, name, xdata)
return 0
@@ -540,10 +542,10 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = self.gfids[key]
- print("GLUPY TRACE FGETXATTR CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE FGETXATTR CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; op_errno={3:d};"+
" dictionary={4:s}").format(unique, gfid, op_ret,
- op_errno, dictionary)
+ op_errno, dictionary))
del self.gfids[key]
dl.unwind_fgetxattr(frame, cookie, this, op_ret, op_errno,
dictionary, xdata)
@@ -553,9 +555,9 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(loc.contents.inode.contents.gfid)
- print("GLUPY TRACE SETXATTR FOP- {0:d}: gfid={1:s}; path={2:s};"+
+ print(("GLUPY TRACE SETXATTR FOP- {0:d}: gfid={1:s}; path={2:s};"+
" flags={3:d}").format(unique, gfid, loc.contents.path,
- flags)
+ flags))
self.gfids[key] = gfid
dl.wind_setxattr(frame, POINTER(xlator_t)(), loc, dictionary,
flags, xdata)
@@ -565,9 +567,9 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = self.gfids[key]
- print("GLUPY TRACE SETXATTR CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE SETXATTR CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; op_errno={3:d}").format(unique, gfid,
- op_ret, op_errno)
+ op_ret, op_errno))
del self.gfids[key]
dl.unwind_setxattr(frame, cookie, this, op_ret, op_errno,
xdata)
@@ -577,8 +579,8 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(fd.contents.inode.contents.gfid)
- print("GLUPY TRACE FSETXATTR FOP- {0:d}: gfid={1:s}; fd={2:p}; "+
- "flags={3:d}").format(unique, gfid, fd, flags)
+ print(("GLUPY TRACE FSETXATTR FOP- {0:d}: gfid={1:s}; fd={2:p}; "+
+ "flags={3:d}").format(unique, gfid, fd, flags))
self.gfids[key] = gfid
dl.wind_fsetxattr(frame, POINTER(xlator_t)(), fd, dictionary,
flags, xdata)
@@ -588,9 +590,9 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = self.gfids[key]
- print("GLUPY TRACE FSETXATTR CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE FSETXATTR CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; op_errno={3:d}").format(unique, gfid,
- op_ret, op_errno)
+ op_ret, op_errno))
del self.gfids[key]
dl.unwind_fsetxattr(frame, cookie, this, op_ret, op_errno,
xdata)
@@ -600,10 +602,10 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(loc.contents.inode.contents.gfid)
- print("GLUPY TRACE REMOVEXATTR FOP- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE REMOVEXATTR FOP- {0:d}: gfid={1:s}; "+
"path={2:s}; name={3:s}").format(unique, gfid,
loc.contents.path,
- name)
+ name))
self.gfids[key] = gfid
dl.wind_removexattr(frame, POINTER(xlator_t)(), loc, name,
xdata)
@@ -614,9 +616,9 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = self.gfids[key]
- print("GLUPY TRACE REMOVEXATTR CBK- {0:d}: gfid={1:s} "+
+ print(("GLUPY TRACE REMOVEXATTR CBK- {0:d}: gfid={1:s} "+
" op_ret={2:d}; op_errno={3:d}").format(unique, gfid,
- op_ret, op_errno)
+ op_ret, op_errno))
del self.gfids[key]
dl.unwind_removexattr(frame, cookie, this, op_ret, op_errno,
xdata)
@@ -630,12 +632,12 @@ class xlator(Translator):
else:
newgfid = "0"
oldgfid = uuid2str(oldloc.contents.inode.contents.gfid)
- print("GLUPY TRACE LINK FOP-{0:d}: oldgfid={1:s}; oldpath={2:s};"+
+ print(("GLUPY TRACE LINK FOP-{0:d}: oldgfid={1:s}; oldpath={2:s};"+
"newgfid={3:s};"+
"newpath={4:s}").format(unique, oldgfid,
oldloc.contents.path,
newgfid,
- newloc.contents.path)
+ newloc.contents.path))
self.gfids[key] = oldgfid
dl.wind_link(frame, POINTER(xlator_t)(), oldloc, newloc,
xdata)
@@ -650,16 +652,16 @@ class xlator(Translator):
statstr = trace_stat2str(buf)
preparentstr = trace_stat2str(preparent)
postparentstr = trace_stat2str(postparent)
- print("GLUPY TRACE LINK CBK- {0:d}: op_ret={1:d} "+
+ print(("GLUPY TRACE LINK CBK- {0:d}: op_ret={1:d} "+
"*stbuf={2:s}; *prebuf={3:s}; "+
"*postbuf={4:s} ").format(unique, op_ret, statstr,
preparentstr,
- postparentstr)
+ postparentstr))
else:
- print("GLUPY TRACE LINK CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE LINK CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; "+
"op_errno={3:d}").format(unique, gfid,
- op_ret, op_errno)
+ op_ret, op_errno))
del self.gfids[key]
dl.unwind_link(frame, cookie, this, op_ret, op_errno, inode,
buf, preparent, postparent, xdata)
@@ -669,9 +671,9 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(loc.contents.inode.contents.gfid)
- print("GLUPY TRACE UNLINK FOP- {0:d}; gfid={1:s}; path={2:s}; "+
+ print(("GLUPY TRACE UNLINK FOP- {0:d}; gfid={1:s}; path={2:s}; "+
"flag={3:d}").format(unique, gfid, loc.contents.path,
- xflag)
+ xflag))
self.gfids[key] = gfid
dl.wind_unlink(frame, POINTER(xlator_t)(), loc, xflag,
xdata)
@@ -685,16 +687,16 @@ class xlator(Translator):
if op_ret == 0:
preparentstr = trace_stat2str(preparent)
postparentstr = trace_stat2str(postparent)
- print("GLUPY TRACE UNLINK CBK- {0:d}: gfid ={1:s}; "+
+ print(("GLUPY TRACE UNLINK CBK- {0:d}: gfid ={1:s}; "+
"op_ret={2:d}; *prebuf={3:s}; "+
"*postbuf={4:s} ").format(unique, gfid, op_ret,
preparentstr,
- postparentstr)
+ postparentstr))
else:
- print("GLUPY TRACE UNLINK CBK: {0:d}: gfid ={1:s}; "+
+ print(("GLUPY TRACE UNLINK CBK: {0:d}: gfid ={1:s}; "+
"op_ret={2:d}; "+
"op_errno={3:d}").format(unique, gfid, op_ret,
- op_errno)
+ op_errno))
del self.gfids[key]
dl.unwind_unlink(frame, cookie, this, op_ret, op_errno,
preparent, postparent, xdata)
@@ -704,9 +706,9 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(loc.contents.inode.contents.gfid)
- print("GLUPY TRACE READLINK FOP- {0:d}: gfid={1:s}; path={2:s};"+
+ print(("GLUPY TRACE READLINK FOP- {0:d}: gfid={1:s}; path={2:s};"+
" size={3:d}").format(unique, gfid, loc.contents.path,
- size)
+ size))
self.gfids[key] = gfid
dl.wind_readlink(frame, POINTER(xlator_t)(), loc, size,
xdata)
@@ -719,17 +721,17 @@ class xlator(Translator):
gfid = self.gfids[key]
if op_ret == 0:
statstr = trace_stat2str(stbuf)
- print("GLUPY TRACE READLINK CBK- {0:d}: gfid={1:s} "+
+ print(("GLUPY TRACE READLINK CBK- {0:d}: gfid={1:s} "+
" op_ret={2:d}; op_errno={3:d}; *prebuf={4:s}; "+
"*postbuf={5:s} ").format(unique, gfid,
op_ret, op_errno,
- buf, statstr)
+ buf, statstr))
else:
- print("GLUPY TRACE READLINK CBK- {0:d}: gfid={1:s} "+
+ print(("GLUPY TRACE READLINK CBK- {0:d}: gfid={1:s} "+
" op_ret={2:d}; op_errno={3:d}").format(unique,
gfid,
op_ret,
- op_errno)
+ op_errno))
del self.gfids[key]
dl.unwind_readlink(frame, cookie, this, op_ret, op_errno, buf,
stbuf, xdata)
@@ -739,10 +741,10 @@ class xlator(Translator):
unique = dl.get_rootunique(frame)
key = dl.get_id(frame)
gfid = uuid2str(loc.contents.inode.contents.gfid)
- print("GLUPY TRACE SYMLINK FOP- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE SYMLINK FOP- {0:d}: gfid={1:s}; "+
"linkpath={2:s}; path={3:s};"+
"umask=0{4:o}").format(unique, gfid, linkpath,
- loc.contents.path, umask)
+ loc.contents.path, umask))
self.gfids[key] = gfid
dl.wind_symlink(frame, POINTER(xlator_t)(), linkpath, loc,
umask, xdata)
@@ -757,18 +759,18 @@ class xlator(Translator):
statstr = trace_stat2str(buf)
preparentstr = trace_stat2str(preparent)
postparentstr = trace_stat2str(postparent)
- print("GLUPY TRACE SYMLINK CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE SYMLINK CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; *stbuf={3:s}; *preparent={4:s}; "+
"*postparent={5:s}").format(unique, gfid,
op_ret, statstr,
preparentstr,
- postparentstr)
+ postparentstr))
else:
- print("GLUPY TRACE SYMLINK CBK- {0:d}: gfid={1:s}; "+
+ print(("GLUPY TRACE SYMLINK CBK- {0:d}: gfid={1:s}; "+
"op_ret={2:d}; op_errno={3:d}").format(unique,
gfid,
op_ret,
- op_errno)
+ op_errno))
del self.gfids[key]
dl.unwind_symlink(frame, cookie, this, op_ret, op_errno,
inode, buf, preparent, postparent, xdata)
diff --git a/xlators/features/glupy/examples/helloworld.py b/xlators/features/glupy/examples/helloworld.py
index b565a4e5bc3..282f9207949 100644
--- a/xlators/features/glupy/examples/helloworld.py
+++ b/xlators/features/glupy/examples/helloworld.py
@@ -1,3 +1,5 @@
+
+from __future__ import print_function
import sys
from gluster.glupy import *
@@ -7,13 +9,13 @@ class xlator (Translator):
Translator.__init__(self, c_this)
def lookup_fop(self, frame, this, loc, xdata):
- print "Python xlator: Hello!"
+ print("Python xlator: Hello!")
dl.wind_lookup(frame, POINTER(xlator_t)(), loc, xdata)
return 0
def lookup_cbk(self, frame, cookie, this, op_ret, op_errno, inode, buf,
xdata, postparent):
- print "Python xlator: Hello again!"
+ print("Python xlator: Hello again!")
dl.unwind_lookup(frame, cookie, this, op_ret, op_errno, inode, buf,
xdata, postparent)
return 0
diff --git a/xlators/features/glupy/examples/negative.py b/xlators/features/glupy/examples/negative.py
index e7a4fc07ced..e44ff4deed2 100644
--- a/xlators/features/glupy/examples/negative.py
+++ b/xlators/features/glupy/examples/negative.py
@@ -1,3 +1,5 @@
+
+from __future__ import print_function
import sys
from uuid import UUID
from gluster.glupy import *
@@ -31,12 +33,12 @@ class xlator (Translator):
def lookup_fop (self, frame, this, loc, xdata):
pargfid = uuid2str(loc.contents.pargfid)
- print "lookup FOP: %s:%s" % (pargfid, loc.contents.name)
+ print("lookup FOP: %s:%s" % (pargfid, loc.contents.name))
# Check the cache.
if cache.has_key(pargfid):
if loc.contents.name in cache[pargfid]:
- print "short-circuiting for %s:%s" % (pargfid,
- loc.contents.name)
+ print("short-circuiting for %s:%s" % (pargfid,
+ loc.contents.name))
dl.unwind_lookup(frame,0,this,-1,2,None,None,None,None)
return 0
key = dl.get_id(frame)
@@ -47,16 +49,16 @@ class xlator (Translator):
def lookup_cbk (self, frame, cookie, this, op_ret, op_errno, inode, buf,
xdata, postparent):
- print "lookup CBK: %d (%d)" % (op_ret, op_errno)
+ print("lookup CBK: %d (%d)" % (op_ret, op_errno))
key = dl.get_id(frame)
pargfid, name = self.requests[key]
# Update the cache.
if op_ret == 0:
- print "found %s, removing from cache" % name
+ print("found %s, removing from cache" % name)
if cache.has_key(pargfid):
cache[pargfid].discard(name)
elif op_errno == 2: # ENOENT
- print "failed to find %s, adding to cache" % name
+ print("failed to find %s, adding to cache" % name)
if cache.has_key(pargfid):
cache[pargfid].add(name)
else:
@@ -68,7 +70,7 @@ class xlator (Translator):
def create_fop (self, frame, this, loc, flags, mode, umask, fd, xdata):
pargfid = uuid2str(loc.contents.pargfid)
- print "create FOP: %s:%s" % (pargfid, loc.contents.name)
+ print("create FOP: %s:%s" % (pargfid, loc.contents.name))
key = dl.get_id(frame)
self.requests[key] = (pargfid, loc.contents.name[:])
# TBD: get real child xl from init, pass it here
@@ -77,12 +79,12 @@ class xlator (Translator):
def create_cbk (self, frame, cookie, this, op_ret, op_errno, fd, inode,
buf, preparent, postparent, xdata):
- print "create CBK: %d (%d)" % (op_ret, op_errno)
+ print("create CBK: %d (%d)" % (op_ret, op_errno))
key = dl.get_id(frame)
pargfid, name = self.requests[key]
# Update the cache.
if op_ret == 0:
- print "created %s, removing from cache" % name
+ print("created %s, removing from cache" % name)
if cache.has_key(pargfid):
cache[pargfid].discard(name)
del self.requests[key]