summaryrefslogtreecommitdiffstats
path: root/tests/functional/common
diff options
context:
space:
mode:
authorApeksha D Khakharia <akhakhar@redhat.com>2018-02-05 11:14:21 +0530
committerApeksha D Khakharia <akhakhar@redhat.com>2018-02-21 17:20:04 +0530
commit1aba0134455b846ffb3a9a4dd9b4ba622c6515ca (patch)
treed7c95e1a678a34eb07122b11574c3a90324d8eb3 /tests/functional/common
parent3ecdfb197aeb68665ff3264d8effe32af6066240 (diff)
CNS: heketi_url change
Change-Id: I927166d4649a5318cea221219d471534bd258545 Signed-off-by: Apeksha D Khakharia <akhakhar@redhat.com>
Diffstat (limited to 'tests/functional/common')
-rw-r--r--tests/functional/common/heketi/test_volume_multi_req.py12
-rw-r--r--tests/functional/common/provisioning/test_dynamic_provisioning_block_p0_cases.py155
-rw-r--r--tests/functional/common/provisioning/test_dynamic_provisioning_p0_cases.py113
3 files changed, 196 insertions, 84 deletions
diff --git a/tests/functional/common/heketi/test_volume_multi_req.py b/tests/functional/common/heketi/test_volume_multi_req.py
index 7b723e7c..03658fae 100644
--- a/tests/functional/common/heketi/test_volume_multi_req.py
+++ b/tests/functional/common/heketi/test_volume_multi_req.py
@@ -227,7 +227,9 @@ class TestVolumeMultiReq(HeketiClientSetupBaseClass):
# deploy a temporary storage class
sc = build_storage_class(
name=tname,
- resturl=self.heketi_server_url)
+ resturl=self.heketi_server_url,
+ restuser=self.heketi_cli_user,
+ restuserkey=self.heketi_cli_key)
with temp_config(ocp_node, sc) as tmpfn:
oc_create(ocp_node, tmpfn)
self.addCleanup(delete_storageclass, ocp_node, tname)
@@ -286,7 +288,9 @@ class TestVolumeMultiReq(HeketiClientSetupBaseClass):
# deploy a temporary storage class
sc = build_storage_class(
name=tname,
- resturl=self.heketi_server_url)
+ resturl=self.heketi_server_url,
+ restuser=self.heketi_cli_user,
+ restuserkey=self.heketi_cli_key)
with temp_config(ocp_node, sc) as tmpfn:
oc_create(ocp_node, tmpfn)
self.addCleanup(delete_storageclass, ocp_node, tname)
@@ -339,7 +343,9 @@ class TestVolumeMultiReq(HeketiClientSetupBaseClass):
# deploy a temporary storage class
sc = build_storage_class(
name=tname,
- resturl=self.heketi_server_url)
+ resturl=self.heketi_server_url,
+ restuser=self.heketi_cli_user,
+ restuserkey=self.heketi_cli_key)
with temp_config(ocp_node, sc) as tmpfn:
oc_create(ocp_node, tmpfn)
self.addCleanup(delete_storageclass, ocp_node, tname)
diff --git a/tests/functional/common/provisioning/test_dynamic_provisioning_block_p0_cases.py b/tests/functional/common/provisioning/test_dynamic_provisioning_block_p0_cases.py
index 3c01427e..933033ae 100644
--- a/tests/functional/common/provisioning/test_dynamic_provisioning_block_p0_cases.py
+++ b/tests/functional/common/provisioning/test_dynamic_provisioning_block_p0_cases.py
@@ -5,6 +5,11 @@ from cnslibs.common.dynamic_provisioning import (
get_pvc_status,
verify_pod_status_running)
from cnslibs.cns.cns_baseclass import CnsGlusterBlockBaseClass
+from cnslibs.common.exceptions import (
+ ConfigError,
+ ExecutionError)
+from cnslibs.common.heketi_ops import (
+ export_heketi_cli_server)
from cnslibs.common.openshift_ops import (
get_ocp_gluster_pod_names,
oc_create,
@@ -12,7 +17,7 @@ from cnslibs.common.openshift_ops import (
oc_rsh)
from cnslibs.common.waiter import Waiter
from glusto.core import Glusto as g
-import time
+import unittest
class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
@@ -23,19 +28,6 @@ class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
def test_dynamic_provisioning_glusterblock(self):
g.log.info("test_dynamic_provisioning_glusterblock")
storage_class = self.cns_storage_class['storage_class2']
- cmd = "export HEKETI_CLI_SERVER=%s" % storage_class['resturl']
- ret, out, err = g.run(self.ocp_client[0], cmd, "root")
- self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
- cmd, self.ocp_client[0]))
- cmd = ("export HEKETI_CLI_SERVER=%s && heketi-cli cluster list "
- "| grep Id | cut -d ':' -f 2 | cut -d '[' -f 1" % (
- storage_class['resturl']))
- ret, out, err = g.run(self.ocp_client[0], cmd, "root")
- self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
- cmd, self.ocp_client[0]))
- cluster_id = out.strip().split("\n")[0]
- sc_name = storage_class['name']
- pvc_name1 = "mongodb1-block"
cmd = ("oc get svc | grep heketi | grep -v endpoints "
"| awk '{print $2}'")
ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
@@ -43,6 +35,21 @@ class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
cmd, self.ocp_master_node[0]))
heketi_cluster_ip = out.strip().split("\n")[0]
resturl_block = "http://%s:8080" % heketi_cluster_ip
+ if not export_heketi_cli_server(
+ self.heketi_client_node,
+ heketi_cli_server=resturl_block,
+ heketi_cli_user=self.heketi_cli_user,
+ heketi_cli_key=self.heketi_cli_key):
+ raise ExecutionError("Failed to export heketi cli server on %s"
+ % self.heketi_client_node)
+ cmd = ("heketi-cli cluster list "
+ "| grep Id | cut -d ':' -f 2 | cut -d '[' -f 1")
+ ret, out, err = g.run(self.ocp_client[0], cmd, "root")
+ self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
+ cmd, self.ocp_client[0]))
+ cluster_id = out.strip().split("\n")[0]
+ sc_name = storage_class['name']
+ pvc_name1 = "mongodb1-block"
ret = create_storage_class_file(
self.ocp_master_node[0],
sc_name,
@@ -64,7 +71,7 @@ class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
ret = create_secret_file(self.ocp_master_node[0],
secret['secret_name'],
secret['namespace'],
- secret['data_key'],
+ self.secret_data_key,
secret['type'])
self.assertTrue(ret, "creation of heketi-secret file failed")
oc_create(self.ocp_master_node[0],
@@ -112,7 +119,6 @@ class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
oc_delete(self.ocp_master_node[0], 'pod', pod_name)
ret = verify_pod_status_running(self.ocp_master_node[0],
pvc_name1)
- ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
self.assertTrue(ret, "verify mongodb pod status as running failed")
cmd = ("oc get pods | grep %s | grep -v deploy "
"| awk {'print $1'}") % pvc_name1
@@ -129,22 +135,10 @@ class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
cmd, self.ocp_master_node[0]))
+ @unittest.skip("skiping heketi-pod failure testcase")
def test_dynamic_provisioning_glusterblock_heketipod_failure(self):
g.log.info("test_dynamic_provisioning_glusterblock_Heketipod_Failure")
storage_class = self.cns_storage_class['storage_class2']
- cmd = "export HEKETI_CLI_SERVER=%s" % storage_class['resturl']
- ret, out, err = g.run(self.ocp_client[0], cmd, "root")
- self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
- cmd, self.ocp_client[0]))
- cmd = ("export HEKETI_CLI_SERVER=%s && heketi-cli cluster list "
- "| grep Id | cut -d ':' -f 2 | cut -d '[' -f 1") % (
- storage_class['resturl'])
- ret, out, err = g.run(self.ocp_client[0], cmd, "root")
- self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
- cmd, self.ocp_client[0]))
- cluster_id = out.strip().split("\n")[0]
- sc_name = storage_class['name']
- pvc_name2 = "mongodb2-block"
cmd = ("oc get svc | grep heketi | grep -v endpoints "
"| awk '{print $2}'")
ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
@@ -152,6 +146,21 @@ class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
cmd, self.ocp_master_node[0]))
heketi_cluster_ip = out.strip().split("\n")[0]
resturl_block = "http://%s:8080" % heketi_cluster_ip
+ if not export_heketi_cli_server(
+ self.heketi_client_node,
+ heketi_cli_server=resturl_block,
+ heketi_cli_user=self.heketi_cli_user,
+ heketi_cli_key=self.heketi_cli_key):
+ raise ExecutionError("Failed to export heketi cli server on %s"
+ % self.heketi_client_node)
+ cmd = ("heketi-cli cluster list "
+ "| grep Id | cut -d ':' -f 2 | cut -d '[' -f 1")
+ ret, out, err = g.run(self.ocp_client[0], cmd, "root")
+ self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
+ cmd, self.ocp_client[0]))
+ cluster_id = out.strip().split("\n")[0]
+ sc_name = storage_class['name']
+ pvc_name2 = "mongodb2-block"
ret = create_storage_class_file(
self.ocp_master_node[0],
sc_name,
@@ -173,7 +182,7 @@ class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
ret = create_secret_file(self.ocp_master_node[0],
secret['secret_name'],
secret['namespace'],
- secret['data_key'],
+ self.secret_data_key,
secret['type'])
self.assertTrue(ret, "creation of heketi-secret file failed")
oc_create(self.ocp_master_node[0],
@@ -227,30 +236,29 @@ class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
cmd, self.ocp_master_node[0]))
ret = verify_pod_status_running(self.ocp_master_node[0], "heketi")
self.assertTrue(ret, "verify heketi pod status as running failed")
- oc_delete(self.ocp_master_node[0], 'sc', sc_name)
cmd = ("oc get svc | grep heketi | grep -v endpoints "
"| awk '{print $2}'")
ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
cmd, self.ocp_master_node[0]))
- heketi_cluster_ip = out.strip().split("\n")[0]
- resturl_block = "http://%s:8080" % heketi_cluster_ip
- ret = create_storage_class_file(
- self.ocp_master_node[0],
- sc_name,
- resturl_block,
- storage_class['provisioner'],
- restuser=storage_class['restuser'],
- restsecretnamespace=storage_class['restsecretnamespace'],
- restsecretname=storage_class['restsecretname'],
- hacount=storage_class['hacount'],
- clusterids=cluster_id)
- self.assertTrue(ret, "creation of storage-class file failed")
- provisioner_name = storage_class['provisioner'].split("/")
- file_path = "/%s-%s-storage-class.yaml" % (
- sc_name, provisioner_name[1])
- oc_create(self.ocp_master_node[0], file_path)
- for w in Waiter(300, 30):
+ heketi_cluster_new_ip = out.strip().split("\n")[0]
+ if heketi_cluster_new_ip != heketi_cluster_ip:
+ oc_delete(self.ocp_master_node[0], 'sc', sc_name)
+ resturl_block = "http://%s:8080" % heketi_cluster_new_ip
+ ret = create_storage_class_file(
+ self.ocp_master_node[0],
+ sc_name,
+ resturl_block,
+ storage_class['provisioner'],
+ restuser=storage_class['restuser'],
+ secretnamespace=storage_class['secretnamespace'],
+ secretname=storage_class['secretname'])
+ self.assertTrue(ret, "creation of storage-class file failed")
+ provisioner_name = storage_class['provisioner'].split("/")
+ file_path = "/%s-%s-storage-class.yaml" % (
+ sc_name, provisioner_name[1])
+ oc_create(self.ocp_master_node[0], file_path)
+ for w in Waiter(600, 30):
ret, status = get_pvc_status(self.ocp_master_node[0],
pvc_name3)
self.assertTrue(ret, "failed to get pvc status of %s" % (
@@ -273,6 +281,12 @@ class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
pvc_name3)
self.assertTrue(ret, "verify %s pod status as "
"running failed" % pvc_name3)
+ cmd = ("oc get pods | grep %s | grep -v deploy "
+ "|awk {'print $1'}") % pvc_name3
+ ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
+ self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
+ cmd, self.ocp_master_node[0]))
+ pod_name = out.strip().split("\n")[0]
cmd = ("dd if=/dev/urandom of=/var/lib/mongodb/data/file "
"bs=1K count=100")
ret, out, err = oc_rsh(self.ocp_master_node[0], pod_name, cmd)
@@ -282,19 +296,6 @@ class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
def test_dynamic_provisioning_glusterblock_glusterpod_failure(self):
g.log.info("test_dynamic_provisioning_glusterblock_Glusterpod_Failure")
storage_class = self.cns_storage_class['storage_class2']
- cmd = "export HEKETI_CLI_SERVER=%s" % storage_class['resturl']
- ret, out, err = g.run(self.ocp_client[0], cmd, "root")
- self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
- cmd, self.ocp_client[0]))
- cmd = ("export HEKETI_CLI_SERVER=%s && heketi-cli cluster list "
- "| grep Id | cut -d ':' -f 2 | cut -d '[' -f 1") % (
- storage_class['resturl'])
- ret, out, err = g.run(self.ocp_client[0], cmd, "root")
- self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
- cmd, self.ocp_client[0]))
- cluster_id = out.strip().split("\n")[0]
- sc_name = storage_class['name']
- pvc_name4 = "mongodb-4-block"
cmd = ("oc get svc | grep heketi | grep -v endpoints "
"| awk '{print $2}'")
ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
@@ -302,6 +303,21 @@ class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
cmd, self.ocp_master_node[0]))
heketi_cluster_ip = out.strip().split("\n")[0]
resturl_block = "http://%s:8080" % heketi_cluster_ip
+ if not export_heketi_cli_server(
+ self.heketi_client_node,
+ heketi_cli_server=resturl_block,
+ heketi_cli_user=self.heketi_cli_user,
+ heketi_cli_key=self.heketi_cli_key):
+ raise ExecutionError("Failed to export heketi cli server on %s"
+ % self.heketi_client_node)
+ cmd = ("heketi-cli cluster list "
+ "| grep Id | cut -d ':' -f 2 | cut -d '[' -f 1")
+ ret, out, err = g.run(self.ocp_client[0], cmd, "root")
+ self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
+ cmd, self.ocp_client[0]))
+ cluster_id = out.strip().split("\n")[0]
+ sc_name = storage_class['name']
+ pvc_name4 = "mongodb-4-block"
ret = create_storage_class_file(
self.ocp_master_node[0],
sc_name,
@@ -323,7 +339,7 @@ class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
ret = create_secret_file(self.ocp_master_node[0],
secret['secret_name'],
secret['namespace'],
- secret['data_key'],
+ self.secret_data_key,
secret['type'])
self.assertTrue(ret, "creation of heketi-secret file failed")
oc_create(self.ocp_master_node[0],
@@ -366,10 +382,21 @@ class TestDynamicProvisioningBlockP0(CnsGlusterBlockBaseClass):
cmd = ("oc get pods -o wide | grep glusterfs | grep %s | "
"grep -v Terminating | awk '{print $1}'") % (
gluster_pod_node_name)
- ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
+ for w in Waiter(600, 30):
+ ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
+ new_gluster_pod_name = out.strip().split("\n")[0].strip()
+ if ret == 0 and not new_gluster_pod_name:
+ continue
+ else:
+ break
+ if w.expired:
+ error_msg = "exceeded timeout, new gluster pod not created"
+ g.log.error(error_msg)
+ raise ExecutionError(error_msg)
self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
cmd, self.ocp_master_node[0]))
new_gluster_pod_name = out.strip().split("\n")[0].strip()
+ g.log.info("new gluster pod name is %s" % new_gluster_pod_name)
ret = verify_pod_status_running(self.ocp_master_node[0],
new_gluster_pod_name)
self.assertTrue(ret, "verify %s pod status as running "
diff --git a/tests/functional/common/provisioning/test_dynamic_provisioning_p0_cases.py b/tests/functional/common/provisioning/test_dynamic_provisioning_p0_cases.py
index 9ae0e987..0167573f 100644
--- a/tests/functional/common/provisioning/test_dynamic_provisioning_p0_cases.py
+++ b/tests/functional/common/provisioning/test_dynamic_provisioning_p0_cases.py
@@ -4,14 +4,20 @@ from cnslibs.common.dynamic_provisioning import (
create_storage_class_file,
get_pvc_status,
verify_pod_status_running)
+from cnslibs.cns.cns_baseclass import (
+ CnsBaseClass,
+ CnsSetupBaseClass)
+from cnslibs.common.exceptions import (
+ ConfigError,
+ ExecutionError)
from cnslibs.common.openshift_ops import (
get_ocp_gluster_pod_names,
- oc_rsh)
-from cnslibs.cns.cns_baseclass import CnsBaseClass
-from cnslibs.common.openshift_ops import (
oc_create,
- oc_delete)
+ oc_delete,
+ oc_rsh)
+from cnslibs.common.waiter import Waiter
from glusto.core import Glusto as g
+import unittest
class TestDynamicProvisioningP0(CnsBaseClass):
@@ -19,16 +25,22 @@ class TestDynamicProvisioningP0(CnsBaseClass):
Class that contain P0 dynamic provisioning test cases for
glusterfile volume
'''
-
def test_dynamic_provisioning_glusterfile(self):
g.log.info("test_dynamic_provisioning_glusterfile")
storage_class = self.cns_storage_class['storage_class1']
sc_name = storage_class['name']
pvc_name1 = "mongodb1"
+ cmd = ("oc get svc | grep heketi | grep -v endpoints "
+ "| awk '{print $2}'")
+ ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
+ self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
+ cmd, self.ocp_master_node[0]))
+ heketi_cluster_ip = out.strip().split("\n")[0]
+ resturl = "http://%s:8080" % heketi_cluster_ip
ret = create_storage_class_file(
self.ocp_master_node[0],
sc_name,
- storage_class['resturl'],
+ resturl,
storage_class['provisioner'],
restuser=storage_class['restuser'],
secretnamespace=storage_class['secretnamespace'],
@@ -44,7 +56,7 @@ class TestDynamicProvisioningP0(CnsBaseClass):
ret = create_secret_file(self.ocp_master_node[0],
secret['secret_name'],
secret['namespace'],
- secret['data_key'],
+ self.secret_data_key,
secret['type'])
self.assertTrue(ret, "creation of heketi-secret file failed")
oc_create(self.ocp_master_node[0],
@@ -77,7 +89,6 @@ class TestDynamicProvisioningP0(CnsBaseClass):
oc_delete(self.ocp_master_node[0], 'pod', pod_name)
ret = verify_pod_status_running(self.ocp_master_node[0],
pvc_name1)
- ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
self.assertTrue(ret, "verify mongodb pod status as running failed")
cmd = ("oc get pods | grep %s | grep -v deploy "
"| awk {'print $1'}") % pvc_name1
@@ -94,15 +105,23 @@ class TestDynamicProvisioningP0(CnsBaseClass):
self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
cmd, self.ocp_master_node[0]))
+ @unittest.skip("skiping heketi-pod failure testcase")
def test_dynamic_provisioning_glusterfile_heketipod_failure(self):
g.log.info("test_dynamic_provisioning_glusterfile_Heketipod_Failure")
storage_class = self.cns_storage_class['storage_class1']
sc_name = storage_class['name']
pvc_name2 = "mongodb2"
+ cmd = ("oc get svc | grep heketi | grep -v endpoints "
+ "| awk '{print $2}'")
+ ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
+ self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
+ cmd, self.ocp_master_node[0]))
+ heketi_cluster_ip = out.strip().split("\n")[0]
+ resturl = "http://%s:8080" % heketi_cluster_ip
ret = create_storage_class_file(
self.ocp_master_node[0],
sc_name,
- storage_class['resturl'],
+ resturl,
storage_class['provisioner'],
restuser=storage_class['restuser'],
secretnamespace=storage_class['secretnamespace'],
@@ -118,7 +137,7 @@ class TestDynamicProvisioningP0(CnsBaseClass):
ret = create_secret_file(self.ocp_master_node[0],
secret['secret_name'],
secret['namespace'],
- secret['data_key'],
+ self.secret_data_key,
secret['type'])
self.assertTrue(ret, "creation of heketi-secret file failed")
oc_create(self.ocp_master_node[0],
@@ -172,15 +191,57 @@ class TestDynamicProvisioningP0(CnsBaseClass):
cmd, self.ocp_master_node[0]))
ret = verify_pod_status_running(self.ocp_master_node[0], "heketi")
self.assertTrue(ret, "verify heketi pod status as running failed")
- ret, status = get_pvc_status(self.ocp_master_node[0],
+ cmd = ("oc get svc | grep heketi | grep -v endpoints "
+ "| awk '{print $2}'")
+ ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
+ self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
+ cmd, self.ocp_master_node[0]))
+ heketi_cluster_new_ip = out.strip().split("\n")[0]
+ if heketi_cluster_new_ip != heketi_cluster_ip:
+ oc_delete(self.ocp_master_node[0], 'sc', sc_name)
+ resturl = "http://%s:8080" % heketi_cluster_ip
+ ret = create_storage_class_file(
+ self.ocp_master_node[0],
+ sc_name,
+ resturl,
+ storage_class['provisioner'],
+ restuser=storage_class['restuser'],
+ secretnamespace=storage_class['secretnamespace'],
+ secretname=storage_class['secretname'])
+ self.assertTrue(ret, "creation of storage-class file failed")
+ provisioner_name = storage_class['provisioner'].split("/")
+ file_path = "/%s-%s-storage-class.yaml" % (
+ sc_name, provisioner_name[1])
+ oc_create(self.ocp_master_node[0], file_path)
+ for w in Waiter(600, 30):
+ ret, status = get_pvc_status(self.ocp_master_node[0],
pvc_name3)
- self.assertTrue(ret, "failed to get pvc status of %s" % pvc_name3)
+ self.assertTrue(ret, "failed to get pvc status of %s" % (
+ pvc_name3))
+ if status != "Bound":
+ g.log.info("pvc status of %s is not in Bound state,"
+ " sleeping for 30 sec" % pvc_name3)
+ continue
+ else:
+ break
+ if w.expired:
+ error_msg = ("exceeded timeout 300 sec, pvc %s not in"
+ " Bound state" % pvc_name3)
+ g.log.error(error_msg)
+ raise ExecutionError(error_msg)
self.assertEqual(status, "Bound", "pvc status of %s "
- "is not in Bound state" % pvc_name3)
+ "is not in Bound state, its state is %s" % (
+ pvc_name3, status))
ret = verify_pod_status_running(self.ocp_master_node[0],
pvc_name3)
self.assertTrue(ret, "verify %s pod status "
"as running failed" % pvc_name3)
+ cmd = ("oc get pods | grep %s | grep -v deploy "
+ "|awk {'print $1'}") % pvc_name3
+ ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
+ self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
+ cmd, self.ocp_master_node[0]))
+ pod_name = out.strip().split("\n")[0]
cmd = ("dd if=/dev/urandom of=/var/lib/mongodb/data/file "
"bs=1K count=100")
ret, out, err = oc_rsh(self.ocp_master_node[0], pod_name, cmd)
@@ -192,10 +253,17 @@ class TestDynamicProvisioningP0(CnsBaseClass):
storage_class = self.cns_storage_class['storage_class1']
sc_name = storage_class['name']
pvc_name4 = "mongodb4"
+ cmd = ("oc get svc | grep heketi | grep -v endpoints "
+ "| awk '{print $2}'")
+ ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
+ self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
+ cmd, self.ocp_master_node[0]))
+ heketi_cluster_ip = out.strip().split("\n")[0]
+ resturl = "http://%s:8080" % heketi_cluster_ip
ret = create_storage_class_file(
self.ocp_master_node[0],
sc_name,
- storage_class['resturl'],
+ resturl,
storage_class['provisioner'],
restuser=storage_class['restuser'],
secretnamespace=storage_class['secretnamespace'],
@@ -211,7 +279,7 @@ class TestDynamicProvisioningP0(CnsBaseClass):
ret = create_secret_file(self.ocp_master_node[0],
secret['secret_name'],
secret['namespace'],
- secret['data_key'],
+ self.secret_data_key,
secret['type'])
self.assertTrue(ret, "creation of heketi-secret file failed")
oc_create(self.ocp_master_node[0],
@@ -245,7 +313,7 @@ class TestDynamicProvisioningP0(CnsBaseClass):
g.log.info("gluster_pod_list - %s" % gluster_pod_list)
gluster_pod_name = gluster_pod_list[0]
cmd = ("oc get pods -o wide | grep %s | grep -v deploy "
- "|awk '{print $7}'") % gluster_pod_name
+ "| awk '{print $7}'") % gluster_pod_name
ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
cmd, self.ocp_master_node[0]))
@@ -254,10 +322,21 @@ class TestDynamicProvisioningP0(CnsBaseClass):
cmd = ("oc get pods -o wide | grep glusterfs | grep %s | "
"grep -v Terminating | awk '{print $1}'") % (
gluster_pod_node_name)
- ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
+ for w in Waiter(600, 30):
+ ret, out, err = g.run(self.ocp_master_node[0], cmd, "root")
+ new_gluster_pod_name = out.strip().split("\n")[0].strip()
+ if ret == 0 and not new_gluster_pod_name:
+ continue
+ else:
+ break
+ if w.expired:
+ error_msg = "exceeded timeout, new gluster pod not created"
+ g.log.error(error_msg)
+ raise ExecutionError(error_msg)
self.assertEqual(ret, 0, "failed to execute command %s on %s" % (
cmd, self.ocp_master_node[0]))
new_gluster_pod_name = out.strip().split("\n")[0].strip()
+ g.log.info("new gluster pod name is %s" % new_gluster_pod_name)
ret = verify_pod_status_running(self.ocp_master_node[0],
new_gluster_pod_name)
self.assertTrue(ret, "verify %s pod status as running "