summaryrefslogtreecommitdiffstats
path: root/openshift-storage-libs/openshiftstoragelibs
diff options
context:
space:
mode:
Diffstat (limited to 'openshift-storage-libs/openshiftstoragelibs')
-rw-r--r--openshift-storage-libs/openshiftstoragelibs/cloundproviders/vmware.py178
-rw-r--r--openshift-storage-libs/openshiftstoragelibs/node_ops.py69
2 files changed, 247 insertions, 0 deletions
diff --git a/openshift-storage-libs/openshiftstoragelibs/cloundproviders/vmware.py b/openshift-storage-libs/openshiftstoragelibs/cloundproviders/vmware.py
index 1d4d4c38..959fc7da 100644
--- a/openshift-storage-libs/openshiftstoragelibs/cloundproviders/vmware.py
+++ b/openshift-storage-libs/openshiftstoragelibs/cloundproviders/vmware.py
@@ -3,6 +3,7 @@ Note: Do not use this module directly in the Test Cases. This module can be
used with the help of 'node_ops'
"""
import re
+import string
from glusto.core import Glusto as g
from pyVim import connect
@@ -239,3 +240,180 @@ class VmWare(object):
tasks = [vm[0].PowerOff()]
self._wait_for_tasks(tasks, self.vsphere_client)
+
+ def get_obj(self, name, vimtype):
+ """
+ Retrieves the managed object for the name and type specified
+ Args:
+ name (str): Name of the VM.
+ vimtype (str): Type of managed object
+ Returns:
+ obj (str): Object for specified vimtype and name
+ Example:
+ 'vim.VirtualMachine:vm-1268'
+ Raises:
+ CloudProviderError: In case of any failures.
+ """
+ obj = None
+ content = self.vsphere_client.content.viewManager.CreateContainerView(
+ self.vsphere_client.content.rootFolder, vimtype, True)
+ for c in content.view:
+ if c.name == name:
+ obj = c
+ break
+ if not obj:
+ msg = "Virtual machine with {} name not found.".format(name)
+ g.log.error(msg)
+ raise exceptions.CloudProviderError(msg)
+ return obj
+
+ def get_disk_labels(self, vm_name):
+ """Retrieve disk labels which are attached to vm.
+
+ Args:
+ vm_name (str): Name of the VM.
+ Returns:
+ disk_labels (list): list of disks labels which are attached to vm.
+ Example:
+ ['Hard disk 1', 'Hard disk 2', 'Hard disk 3']
+ Raises:
+ CloudProviderError: In case of any failures.
+ """
+
+ # Find vm
+ vm = self.get_obj(vm_name, vimtype=[vim.VirtualMachine])
+
+ disk_labels = []
+ for dev in vm.config.hardware.device:
+ disk_labels.append(dev.deviceInfo.label)
+ return disk_labels
+
+ def detach_disk(self, vm_name, disk_path):
+ """Detach disk for given vmname by diskPath.
+
+ Args:
+ vm_name (str): Name of the VM.
+ disk_path (str): Disk path which needs to be unplugged.
+ Example:
+ '/dev/sdd'
+ '/dev/sde'
+ Returns:
+ vdisk_path (str): Path of vmdk file to be detached from vm.
+ Raises:
+ CloudProviderError: In case of any failures.
+ """
+
+ # Translate given disk to a disk label of vmware.
+ letter = disk_path[-1]
+ ucase = string.ascii_uppercase
+ pos = ucase.find(letter.upper()) + 1
+ if pos:
+ disk_label = 'Hard disk {}'.format(str(pos))
+ else:
+ raise exceptions.CloudProviderError(
+ "Hard disk '{}' missing from vm '{}'".format(pos, vm_name))
+
+ # Find vm
+ vm = self.get_obj(vm_name, vimtype=[vim.VirtualMachine])
+
+ # Find if the given hard disk is attached to the system.
+ virtual_hdd_device = None
+ for dev in vm.config.hardware.device:
+ if dev.deviceInfo.label == disk_label:
+ virtual_hdd_device = dev
+ vdisk_path = virtual_hdd_device.backing.fileName
+ break
+
+ if not virtual_hdd_device:
+ raise exceptions.CloudProviderError(
+ 'Virtual disk label {} could not be found'.format(disk_label))
+ disk_labels = self.get_disk_labels(vm_name)
+ if disk_label in disk_labels:
+
+ # Remove disk from the vm
+ virtual_hdd_spec = vim.vm.device.VirtualDeviceSpec()
+ virtual_hdd_spec.operation = (
+ vim.vm.device.VirtualDeviceSpec.Operation.remove)
+ virtual_hdd_spec.device = virtual_hdd_device
+
+ # Wait for the task to be completed.
+ spec = vim.vm.ConfigSpec()
+ spec.deviceChange = [virtual_hdd_spec]
+ task = vm.ReconfigVM_Task(spec=spec)
+ self._wait_for_tasks([task], self.vsphere_client)
+ else:
+ msg = ("Could not find provided disk {} in list of disks {}"
+ " in vm {}".format(disk_label, disk_labels, vm_name))
+ g.log.error(msg)
+ raise exceptions.CloudProviderError(msg)
+ return vdisk_path
+
+ def attach_existing_vmdk(self, vm_name, disk_path, vmdk_name):
+ """
+ Attach already existing disk to vm
+ Args:
+ vm_name (str): Name of the VM.
+ disk_path (str): Disk path which needs to be unplugged.
+ Example:
+ '/dev/sdd'
+ '/dev/sde'
+ vmdk_name (str): Path of vmdk file to attach in vm.
+ Returns:
+ None
+ Raises:
+ CloudProviderError: In case of any failures.
+ """
+
+ # Find vm
+ vm = self.get_obj(vm_name, vimtype=[vim.VirtualMachine])
+
+ # Translate given disk to a disk label of vmware.
+ letter = disk_path[-1]
+ ucase = string.ascii_uppercase
+ pos = ucase.find(letter.upper()) + 1
+ if pos:
+ disk_label = 'Hard disk {}'.format(str(pos))
+ else:
+ raise exceptions.CloudProviderError(
+ "Hard disk '{}' missing from vm '{}'".format(pos, vm_name))
+
+ # Find if the given hard disk is not attached to the vm
+ for dev in vm.config.hardware.device:
+ if dev.deviceInfo.label == disk_label:
+ raise exceptions.CloudProviderError(
+ 'Virtual disk label {} already exists'.format(disk_label))
+
+ # Find unit number for attaching vmdk
+ unit_number = 0
+ for dev in vm.config.hardware.device:
+ if hasattr(dev.backing, 'fileName'):
+ unit_number = int(dev.unitNumber) + 1
+
+ # unit_number 7 reserved for scsi controller, max(16)
+ if unit_number == 7:
+ unit_number += 1
+ if unit_number >= 16:
+ raise Exception(
+ "SCSI controller is full. Cannot attach vmdk file")
+ if isinstance(dev, vim.vm.device.VirtualSCSIController):
+ controller = dev
+
+ # Attach vmdk file to the disk and setting backings
+ spec = vim.vm.ConfigSpec()
+ disk_spec = vim.vm.device.VirtualDeviceSpec()
+ disk_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
+ disk_spec.device = vim.vm.device.VirtualDisk()
+ disk_spec.device.backing = (
+ vim.vm.device.VirtualDisk.FlatVer2BackingInfo())
+ disk_spec.device.backing.diskMode = 'persistent'
+ disk_spec.device.backing.fileName = vmdk_name
+ disk_spec.device.backing.thinProvisioned = True
+ disk_spec.device.unitNumber = unit_number
+ disk_spec.device.controllerKey = controller.key
+
+ # creating the list
+ dev_changes = []
+ dev_changes.append(disk_spec)
+ spec.deviceChange = dev_changes
+ task = vm.ReconfigVM_Task(spec=spec)
+ self._wait_for_tasks([task], self.vsphere_client)
diff --git a/openshift-storage-libs/openshiftstoragelibs/node_ops.py b/openshift-storage-libs/openshiftstoragelibs/node_ops.py
index 943ad194..5ae8cf2a 100644
--- a/openshift-storage-libs/openshiftstoragelibs/node_ops.py
+++ b/openshift-storage-libs/openshiftstoragelibs/node_ops.py
@@ -215,3 +215,72 @@ def node_delete_iptables_rules(node, chain, rules, raise_on_error=True):
command.cmd_run(
delete_iptables_rule_cmd % (chain, rule), node,
raise_on_error=raise_on_error)
+
+
+def attach_disk_to_vm(name, disk_size, disk_type='thin'):
+ """Add the disk specified to virtual machine.
+
+ Args:
+ name (str): name of the VM for which disk needs to be added.
+ disk_size (int) : Specify disk size in KB
+ disk_type (str) : Type of the disk, could be thick or thin.
+ Default value is "thin".
+ Returns:
+ None
+ """
+ cloudProvider = _get_cloud_provider()
+
+ vm_name = find_vm_name_by_ip_or_hostname(name)
+ cloudProvider.attach_disk(vm_name, disk_size, disk_type)
+
+
+def attach_existing_vmdk_from_vmstore(name, disk_path, vmdk_name):
+ """Attach existing disk vmdk specified to virtual machine.
+
+ Args:
+ name (str): name of the VM for which disk needs to be added.
+ vmdk_name (str) : name of the vmdk file which needs to be added.
+
+ Returns:
+ None
+ """
+ cloudProvider = _get_cloud_provider()
+
+ vm_name = find_vm_name_by_ip_or_hostname(name)
+ cloudProvider.attach_existing_vmdk(vm_name, disk_path, vmdk_name)
+
+
+def detach_disk_from_vm(name, disk_name):
+ """Remove the disk specified from virtual machine.
+
+ Args:
+ name (str): name of the VM from where the disk needs to be removed.
+ disk_name (str) : name of the disk which needs to be removed.
+ Example:
+ '/dev/sdd'
+ '/dev/sde'
+ Returns:
+ vdisk (str): vmdk filepath of removed disk
+ """
+ cloudProvider = _get_cloud_provider()
+
+ vm_name = find_vm_name_by_ip_or_hostname(name)
+ vdisk = cloudProvider.detach_disk(vm_name, disk_name)
+ return vdisk
+
+
+def get_disk_labels(name):
+ """Remove the disk specified from virtual machine.
+
+ Args:
+ name (str) : name of the disk which needs to be removed.
+ Example:
+ '/dev/sdd'
+ '/dev/sde'
+ Returns:
+ None
+ """
+ cloudProvider = _get_cloud_provider()
+ vm_name = find_vm_name_by_ip_or_hostname(name)
+ disk_labels = cloudProvider.get_all_disks(vm_name)
+ return disk_labels