diff --git a/python/drydock_provisioner/orchestrator/validations/storage_mountpoints.py b/python/drydock_provisioner/orchestrator/validations/storage_mountpoints.py new file mode 100644 index 00000000..ce0535c3 --- /dev/null +++ b/python/drydock_provisioner/orchestrator/validations/storage_mountpoints.py @@ -0,0 +1,73 @@ +# Copyright 2018, Intracom-Telecom +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from drydock_provisioner.orchestrator.validations.validators import Validators + +class StorageMountpoints(Validators): + def __init__(self): + super().__init__('Storage Mountpoint', "DD2004") + + def run_validation(self, site_design, orchestrator=None): + """ + Ensures that any partitioned physical device or logical volumes + in a volume group do not use duplicate mount points. + """ + baremetal_nodes = site_design.baremetal_nodes or [] + + for baremetal_node in baremetal_nodes: + mountpoint_list = [] + storage_device_list = baremetal_node.storage_devices or [] + for storage_device in storage_device_list: + # Parsing the partitions and volume group of + # physical storage devices + + partition_list = storage_device.partitions or [] + device_volume_group = storage_device.volume_group + + for partition in partition_list: + # Load the mount point of each partition + # to a list + + mountpoint = partition.mountpoint + if mountpoint in mountpoint_list: + msg = ('Mountpoint "{}" already exists' + .format(mountpoint)) + self.report_error( + msg, [baremetal_node.doc_ref], + 'Please use unique mountpoints.') + return + else: + mountpoint_list.append(mountpoint) + + if device_volume_group: + volume_groups = baremetal_node.volume_groups or [] + for volume_group in volume_groups: + if volume_group.name == device_volume_group: + logical_volume_list = volume_group.logical_volumes or [] + for logical_volume in logical_volume_list: + # Load the mount point of each logical volume + # which belongs to the assigned volume group + # to a list + + mountpoint = logical_volume.mountpoint + if mountpoint in mountpoint_list: + msg = ('Mountpoint "{}" already exists' + .format(mountpoint)) + self.report_error( + msg, [baremetal_node.doc_ref], + 'Please use unique mountpoints.') + return + else: + mountpoint_list.append(mountpoint) + return diff --git a/python/drydock_provisioner/orchestrator/validations/validator.py b/python/drydock_provisioner/orchestrator/validations/validator.py index 3f73ceab..e36eef02 100644 --- a/python/drydock_provisioner/orchestrator/validations/validator.py +++ b/python/drydock_provisioner/orchestrator/validations/validator.py @@ -33,6 +33,7 @@ from drydock_provisioner.orchestrator.validations.oob_valid_ipmi import IpmiVali from drydock_provisioner.orchestrator.validations.oob_valid_libvirt import LibvirtValidity from drydock_provisioner.orchestrator.validations.bootaction_validity import BootactionDefined from drydock_provisioner.orchestrator.validations.bootaction_validity import BootactionPackageListValid +from drydock_provisioner.orchestrator.validations.storage_mountpoints import StorageMountpoints class Validator(): @@ -92,6 +93,7 @@ rule_set = [ RationalNetworkBond(), StoragePartitioning(), StorageSizing(), + StorageMountpoints(), UniqueNetworkCheck(), HostnameValidity(), IpmiValidity(), diff --git a/python/tests/unit/test_validation_rule_storage_mountpoint.py b/python/tests/unit/test_validation_rule_storage_mountpoint.py new file mode 100644 index 00000000..2847b53e --- /dev/null +++ b/python/tests/unit/test_validation_rule_storage_mountpoint.py @@ -0,0 +1,86 @@ +# Copyright 2018, Intracom-Telecom +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test Validation Rule Storage Mountpoints""" + +import re +import logging + +from drydock_provisioner.orchestrator.orchestrator import Orchestrator +from drydock_provisioner.orchestrator.validations.\ + storage_mountpoints import StorageMountpoints + +LOG = logging.getLogger(__name__) + +class TestStorageMountpoints(object): + def test_storage_mountpoints(self, deckhand_ingester, drydock_state, + input_files): + + input_file = input_files.join("validation.yaml") + design_ref = "file://%s" % str(input_file) + + orch = Orchestrator( + state_manager=drydock_state, ingester=deckhand_ingester) + status, site_design = Orchestrator.get_effective_site(orch, design_ref) + + validator = StorageMountpoints() + message_list = validator.execute(site_design, orchestrator=orch) + msg = message_list[0].to_dict() + + assert len(message_list) == 1 + assert msg.get('error') is False + + def test_invalid_partition_mountpoints(self, deckhand_ingester, + drydock_state, input_files, + mock_get_build_data): + + input_file = input_files.join("invalid_validation.yaml") + design_ref = "file://%s" % str(input_file) + + orch = Orchestrator( + state_manager=drydock_state, ingester=deckhand_ingester) + + status, site_design = Orchestrator.get_effective_site(orch, design_ref) + + validator = StorageMountpoints() + message_list = validator.execute(site_design, orchestrator=orch) + + regex = re.compile('Mountpoint .+ already exists') + + for msg in message_list: + msg = msg.to_dict() + LOG.debug(msg) + assert regex.search(msg.get('message')) is not None + assert msg.get('error') is True + + def test_invalid_vg_mountpoints(self, deckhand_ingester, drydock_state, + input_files, mock_get_build_data): + + input_file = input_files.join("invalid_mountpoint.yaml") + design_ref = "file://%s" % str(input_file) + + orch = Orchestrator( + state_manager=drydock_state, ingester=deckhand_ingester) + + status, site_design = Orchestrator.get_effective_site(orch, design_ref) + + validator = StorageMountpoints() + message_list = validator.execute(site_design, orchestrator=orch) + + regex = re.compile('Mountpoint .+ already exists') + + for msg in message_list: + msg = msg.to_dict() + LOG.debug(msg) + assert regex.search(msg.get('message')) is not None + assert msg.get('error') is True diff --git a/python/tests/yaml_samples/invalid_mountpoint.yaml b/python/tests/yaml_samples/invalid_mountpoint.yaml new file mode 100644 index 00000000..6d1ad17f --- /dev/null +++ b/python/tests/yaml_samples/invalid_mountpoint.yaml @@ -0,0 +1,455 @@ +# Copyright 2018, Intracom-Telecom +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#################### +# +# bootstrap_seed.yaml - Site server design definition for physical layer +# +#################### +# version the schema in this file so consumers can rationally parse it +--- +schema: 'drydock/Region/v1' +metadata: + schema: 'metadata/Document/v1' + name: 'sitename' + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + tag_definitions: + - tag: 'test' + definition_type: 'lshw_xpath' + definition: "//node[@id=\"display\"]/'clock units=\"Hz\"' > 1000000000" + authorized_keys: + - | + ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDENeyO5hLPbLLQRZ0oafTYWs1ieo5Q+XgyZQs51Ju + jDGc8lKlWsg1/6yei2JewKMgcwG2Buu1eqU92Xn1SvMZLyt9GZURuBkyjcfVc/8GiU5QP1Of8B7CV0c + kfUpHWYJ17olTzT61Hgz10ioicBF6cjgQrLNcyn05xoaJHD2Vpf8Unxzi0YzA2e77yRqBo9jJVRaX2q + wUJuZrzb62x3zw8Knz6GGSZBn8xRKLaw1SKFpd1hwvL62GfqX5ZBAT1AYTZP1j8GcAoK8AFVn193SEU + vjSdUFa+RNWuJhkjBRfylJczIjTIFb5ls0jpbA3bMA9DE7lFKVQl6vVwFmiIVBI1 samplekey +--- +schema: 'drydock/NetworkLink/v1' +metadata: + schema: 'metadata/Document/v1' + name: oob + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + bonding: + mode: disabled + mtu: 1500 + linkspeed: 100full + trunking: + mode: disabled + default_network: oob + allowed_networks: + - oob +--- +schema: 'drydock/NetworkLink/v1' +metadata: + schema: 'metadata/Document/v1' + name: pxe + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + bonding: + mode: disabled + mtu: 1500 + linkspeed: auto + trunking: + mode: disabled + default_network: pxe + allowed_networks: + - pxe +--- +schema: 'drydock/NetworkLink/v1' +metadata: + schema: 'metadata/Document/v1' + name: gp + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + bonding: + mode: 802.3ad + hash: layer3+4 + peer_rate: slow + mtu: 9000 + linkspeed: auto + trunking: + mode: 802.1q + default_network: mgmt + allowed_networks: + - public + - private + - mgmt +--- +schema: 'drydock/Rack/v1' +metadata: + schema: 'metadata/Document/v1' + name: rack1 + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + tor_switches: + switch01name: + mgmt_ip: 1.1.1.1 + sdn_api_uri: polo+https://api.sdn.example.com/switchmgmt?switch=switch01name + switch02name: + mgmt_ip: 1.1.1.2 + sdn_api_uri: polo+https://api.sdn.example.com/switchmgmt?switch=switch02name + location: + clli: HSTNTXMOCG0 + grid: EG12 + local_networks: + - pxe-rack1 +--- +schema: 'drydock/Network/v1' +metadata: + schema: 'metadata/Document/v1' + name: oob + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + cidr: 172.16.100.0/24 + ranges: + - type: static + start: 172.16.100.15 + end: 172.16.100.254 + dns: + domain: ilo.sitename.att.com + servers: 172.16.100.10 +--- +schema: 'drydock/Network/v1' +metadata: + schema: 'metadata/Document/v1' + name: pxe + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + dhcp_relay: + self_ip: 172.16.0.4 + upstream_target: 172.16.5.5 + mtu: 1500 + cidr: 172.16.0.0/24 + ranges: + - type: dhcp + start: 172.16.0.5 + end: 172.16.0.254 + dns: + domain: admin.sitename.att.com + servers: 172.16.0.10 +--- +schema: 'drydock/Network/v1' +metadata: + schema: 'metadata/Document/v1' + name: mgmt + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + vlan: '100' + mtu: 1500 + cidr: 172.16.1.0/24 + ranges: + - type: static + start: 172.16.1.15 + end: 172.16.1.254 + routes: + - subnet: 0.0.0.0/0 + gateway: 172.16.1.1 + metric: 10 + dns: + domain: mgmt.sitename.example.com + servers: 172.16.1.9,172.16.1.10 +--- +schema: 'drydock/Network/v1' +metadata: + schema: 'metadata/Document/v1' + name: private + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + vlan: '101' + mtu: 9000 + cidr: 172.16.2.0/24 + ranges: + - type: static + start: 172.16.2.15 + end: 172.16.2.254 + dns: + domain: priv.sitename.example.com + servers: 172.16.2.9,172.16.2.10 +--- +schema: 'drydock/Network/v1' +metadata: + schema: 'metadata/Document/v1' + name: public + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + vlan: '102' + mtu: 1500 + cidr: 172.16.3.0/24 + ranges: + - type: static + start: 172.16.3.15 + end: 172.16.3.254 + routes: + - subnet: 0.0.0.0/0 + gateway: 172.16.3.1 + metric: 10 + dns: + domain: sitename.example.com + servers: 8.8.8.8 +--- +schema: 'drydock/HostProfile/v1' +metadata: + schema: 'metadata/Document/v1' + name: defaults + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + oob: + type: ipmi + network: oob + account: admin + credential: admin + storage: + physical_devices: + sda: + labels: + role: rootdisk + partitions: + - name: root + size: 20g + bootable: true + filesystem: + mountpoint: '/' + fstype: 'ext4' + mount_options: 'defaults' + - name: boot + size: 1g + bootable: false + filesystem: + mountpoint: '/boot' + fstype: 'ext4' + mount_options: 'defaults' + sdb: + volume_group: 'log_vg' + volume_groups: + log_vg: + logical_volumes: + - name: 'log_lv' + size: '500m' + filesystem: + mountpoint: '/var/log' + fstype: 'xfs' + mount_options: 'defaults' + - name: 'log_lv_two' + size: '500m' + filesystem: + mountpoint: '/var/log' + fstype: 'xfs' + mount_options: 'defaults' + platform: + image: 'xenial' + kernel: 'ga-16.04' + kernel_params: + quiet: true + console: ttyS2 + metadata: + owner_data: + foo: bar +--- +schema: 'drydock/HostProfile/v1' +metadata: + schema: 'metadata/Document/v1' + name: 'k8-node' + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + host_profile: defaults + hardware_profile: HPGen9v3 + primary_network: mgmt + interfaces: + pxe: + device_link: pxe + labels: + noconfig: true + slaves: + - prim_nic01 + networks: + - pxe + bond0: + device_link: gp + slaves: + - prim_nic01 + - prim_nic02 + networks: + - mgmt + - private + metadata: + tags: + - 'test' +--- +schema: 'drydock/BaremetalNode/v1' +metadata: + schema: 'metadata/Document/v1' + name: controller01 + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + host_profile: k8-node + interfaces: + bond0: + networks: + - '!private' + addressing: + - network: pxe + address: dhcp + - network: mgmt + address: 172.16.1.20 + - network: public + address: 172.16.3.20 + - network: oob + address: 172.16.100.20 + metadata: + rack: rack1 +--- +schema: 'drydock/BaremetalNode/v1' +metadata: + schema: 'metadata/Document/v1' + name: compute01 + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + host_profile: k8-node + addressing: + - network: pxe + address: dhcp + - network: mgmt + address: 172.16.1.21 + - network: private + address: 172.16.2.21 + - network: oob + address: 172.16.100.21 + metadata: + rack: rack2 +--- +schema: 'drydock/HardwareProfile/v1' +metadata: + schema: 'metadata/Document/v1' + name: HPGen9v3 + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + vendor: HP + generation: '8' + hw_version: '3' + bios_version: '2.2.3' + boot_mode: bios + bootstrap_protocol: pxe + pxe_interface: 0 + device_aliases: + prim_nic01: + address: '0000:00:03.0' + dev_type: '82540EM Gigabit Ethernet Controller' + bus_type: 'pci' + prim_nic02: + address: '0000:00:04.0' + dev_type: '82540EM Gigabit Ethernet Controller' + bus_type: 'pci' + primary_boot: + address: '2:0.0.0' + dev_type: 'VBOX HARDDISK' + bus_type: 'scsi' +--- +schema: 'drydock/BootAction/v1' +metadata: + schema: 'metadata/Document/v1' + name: helloworld + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + assets: + - path: /var/tmp/hello.sh + type: file + permissions: '555' + data: |- + IyEvYmluL2Jhc2gKCmVjaG8gJ0hlbGxvIFdvcmxkISAtZnJvbSB7eyBub2RlLmhvc3RuYW1lIH19 + Jwo= + data_pipeline: + - base64_decode + - utf8_decode + - template + - path: /lib/systemd/system/hello.service + type: unit + permissions: '600' + data: |- + W1VuaXRdCkRlc2NyaXB0aW9uPUhlbGxvIFdvcmxkCgpbU2VydmljZV0KVHlwZT1vbmVzaG90CkV4 + ZWNTdGFydD0vdmFyL3RtcC9oZWxsby5zaAoKW0luc3RhbGxdCldhbnRlZEJ5PW11bHRpLXVzZXIu + dGFyZ2V0Cg== + data_pipeline: + - base64_decode + - utf8_decode +--- +schema: 'drydock/BootAction/v1' +metadata: + schema: 'metadata/Document/v1' + name: hw_filtered + storagePolicy: 'cleartext' + labels: + application: 'drydock' +data: + node_filter: + filter_set_type: 'union' + filter_set: + - filter_type: 'union' + node_names: + - 'compute01' + assets: + - path: /var/tmp/hello.sh + type: file + permissions: '555' + data: |- + IyEvYmluL2Jhc2gKCmVjaG8gJ0hlbGxvIFdvcmxkISAtZnJvbSB7eyBub2RlLmhvc3RuYW1lIH19 + Jwo= + data_pipeline: + - base64_decode + - utf8_decode + - template + - path: /lib/systemd/system/hello.service + type: unit + permissions: '600' + data: |- + W1VuaXRdCkRlc2NyaXB0aW9uPUhlbGxvIFdvcmxkCgpbU2VydmljZV0KVHlwZT1vbmVzaG90CkV4 + ZWNTdGFydD0vdmFyL3RtcC9oZWxsby5zaAoKW0luc3RhbGxdCldhbnRlZEJ5PW11bHRpLXVzZXIu + dGFyZ2V0Cg== + data_pipeline: + - base64_decode + - utf8_decode +... diff --git a/python/tests/yaml_samples/invalid_validation.yaml b/python/tests/yaml_samples/invalid_validation.yaml index 37d27b27..b24ec3e0 100644 --- a/python/tests/yaml_samples/invalid_validation.yaml +++ b/python/tests/yaml_samples/invalid_validation.yaml @@ -252,11 +252,12 @@ data: # FAILS HERE: root not set ############################################# # FAILS HERE: partitions size > 99% +# FAILS HERE: duplicate mount point - name: test size: 100% bootable: true filesystem: - mountpoint: '/' + mountpoint: '/boot' fstype: 'ext4' mount_options: 'defaults' ###############################################