This patch set puts in a rudimentary gate.

Change-Id: I3a2466bd7be5352b46273b385d215913eb8079ba
Signed-off-by: Tin Lam <tin@irrational.io>
This commit is contained in:
Tin Lam 2019-04-04 19:25:11 -05:00
parent dcf0735b08
commit 89dfec7b4c
12 changed files with 779 additions and 609 deletions

View File

@ -9,7 +9,10 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
- project: - project:
templates: check:
- noop-jobs jobs:
- openstack-tox-pep8
gate:
jobs:
- openstack-tox-pep8

View File

@ -281,8 +281,8 @@ class BaseDataSourcePlugin(object):
# For each host list fill host profile and network IPs # For each host list fill host profile and network IPs
for host in hosts: for host in hosts:
host_name = host['name'] host_name = host["name"]
rack_name = host['rack_name'] rack_name = host["rack_name"]
if rack_name not in baremetal: if rack_name not in baremetal:
baremetal[rack_name] = {} baremetal[rack_name] = {}
@ -290,32 +290,39 @@ class BaseDataSourcePlugin(object):
# Prepare temp dict for each host and append it to baremetal # Prepare temp dict for each host and append it to baremetal
# at a rack level # at a rack level
temp_host = {} temp_host = {}
if host['host_profile'] is None: if host["host_profile"] is None:
temp_host['host_profile'] = "#CHANGE_ME" temp_host["host_profile"] = "#CHANGE_ME"
else: else:
temp_host['host_profile'] = host['host_profile'] temp_host["host_profile"] = host["host_profile"]
# Get Host IPs from plugin # Get Host IPs from plugin
temp_host_ips = self.get_ips(self.region, host_name) temp_host_ips = self.get_ips(self.region, host_name)
# Fill network IP for this host # Fill network IP for this host
temp_host['ip'] = {} temp_host["ip"] = {}
temp_host['ip']['oob'] = temp_host_ips[host_name].get( temp_host["ip"]["oob"] = temp_host_ips[host_name].get(
'oob', "#CHANGE_ME") "oob", "#CHANGE_ME"
temp_host['ip']['calico'] = temp_host_ips[host_name].get( )
'calico', "#CHANGE_ME") temp_host["ip"]["calico"] = temp_host_ips[host_name].get(
temp_host['ip']['oam'] = temp_host_ips[host_name].get( "calico", "#CHANGE_ME"
'oam', "#CHANGE_ME") )
temp_host['ip']['storage'] = temp_host_ips[host_name].get( temp_host["ip"]["oam"] = temp_host_ips[host_name].get(
'storage', "#CHANGE_ME") "oam", "#CHANGE_ME"
temp_host['ip']['overlay'] = temp_host_ips[host_name].get( )
'overlay', "#CHANGE_ME") temp_host["ip"]["storage"] = temp_host_ips[host_name].get(
temp_host['ip']['pxe'] = temp_host_ips[host_name].get( "storage", "#CHANGE_ME"
'pxe', "#CHANGE_ME") )
temp_host["ip"]["overlay"] = temp_host_ips[host_name].get(
"overlay", "#CHANGE_ME"
)
temp_host["ip"]["pxe"] = temp_host_ips[host_name].get(
"pxe", "#CHANGE_ME"
)
baremetal[rack_name][host_name] = temp_host baremetal[rack_name][host_name] = temp_host
LOG.debug("Baremetal information:\n{}".format( LOG.debug(
pprint.pformat(baremetal))) "Baremetal information:\n{}".format(pprint.pformat(baremetal))
)
return baremetal return baremetal
@ -348,19 +355,20 @@ class BaseDataSourcePlugin(object):
site_info = location_data site_info = location_data
dns_data = self.get_dns_servers(self.region) dns_data = self.get_dns_servers(self.region)
site_info['dns'] = dns_data site_info["dns"] = dns_data
ntp_data = self.get_ntp_servers(self.region) ntp_data = self.get_ntp_servers(self.region)
site_info['ntp'] = ntp_data site_info["ntp"] = ntp_data
ldap_data = self.get_ldap_information(self.region) ldap_data = self.get_ldap_information(self.region)
site_info['ldap'] = ldap_data site_info["ldap"] = ldap_data
domain_data = self.get_domain_name(self.region) domain_data = self.get_domain_name(self.region)
site_info['domain'] = domain_data site_info["domain"] = domain_data
LOG.debug("Extracted site information:\n{}".format( LOG.debug(
pprint.pformat(site_info))) "Extracted site information:\n{}".format(pprint.pformat(site_info))
)
return site_info return site_info
@ -393,21 +401,28 @@ class BaseDataSourcePlugin(object):
# networks_to_scan, so look for these networks from the data # networks_to_scan, so look for these networks from the data
# returned by plugin # returned by plugin
networks_to_scan = [ networks_to_scan = [
'calico', 'overlay', 'pxe', 'storage', 'oam', 'oob', 'ingress' "calico",
"overlay",
"pxe",
"storage",
"oam",
"oob",
"ingress",
] ]
network_data['vlan_network_data'] = {} network_data["vlan_network_data"] = {}
for net in networks: for net in networks:
tmp_net = {} tmp_net = {}
if net['name'] in networks_to_scan: if net["name"] in networks_to_scan:
tmp_net['subnet'] = net.get('subnet', '#CHANGE_ME') tmp_net["subnet"] = net.get("subnet", "#CHANGE_ME")
if ((net['name'] != 'ingress') and (net['name'] != 'oob')): if (net["name"] != "ingress") and (net["name"] != "oob"):
tmp_net['vlan'] = net.get('vlan', '#CHANGE_ME') tmp_net["vlan"] = net.get("vlan", "#CHANGE_ME")
network_data['vlan_network_data'][net['name']] = tmp_net network_data["vlan_network_data"][net["name"]] = tmp_net
LOG.debug("Extracted network data:\n{}".format( LOG.debug(
pprint.pformat(network_data))) "Extracted network data:\n{}".format(pprint.pformat(network_data))
)
return network_data return network_data
def extract_data(self): def extract_data(self):
@ -418,9 +433,9 @@ class BaseDataSourcePlugin(object):
""" """
LOG.info("Extract data from plugin") LOG.info("Extract data from plugin")
site_data = {} site_data = {}
site_data['baremetal'] = self.extract_baremetal_information() site_data["baremetal"] = self.extract_baremetal_information()
site_data['site_info'] = self.extract_site_information() site_data["site_info"] = self.extract_site_information()
site_data['network'] = self.extract_network_information() site_data["network"] = self.extract_network_information()
self.site_data = site_data self.site_data = site_data
return site_data return site_data

View File

@ -31,8 +31,11 @@ class NoSpecMatched(BaseError):
self.specs = excel_specs self.specs = excel_specs
def display_error(self): def display_error(self):
print('No spec matched. Following are the available specs:\n'.format( print(
self.specs)) "No spec matched. Following are the available specs:\n".format(
self.specs
)
)
sys.exit(1) sys.exit(1)

View File

@ -22,8 +22,11 @@ import urllib3
from spyglass.data_extractor.base import BaseDataSourcePlugin from spyglass.data_extractor.base import BaseDataSourcePlugin
from spyglass.data_extractor.custom_exceptions import ( from spyglass.data_extractor.custom_exceptions import (
ApiClientError, ConnectionError, MissingAttributeError, ApiClientError,
TokenGenerationError) ConnectionError,
MissingAttributeError,
TokenGenerationError,
)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
@ -41,8 +44,8 @@ class FormationPlugin(BaseDataSourcePlugin):
LOG.info("Check spyglass --help for details") LOG.info("Check spyglass --help for details")
exit() exit()
self.source_type = 'rest' self.source_type = "rest"
self.source_name = 'formation' self.source_name = "formation"
# Configuration parameters # Configuration parameters
self.formation_api_url = None self.formation_api_url = None
@ -67,10 +70,10 @@ class FormationPlugin(BaseDataSourcePlugin):
""" Sets the config params passed by CLI""" """ Sets the config params passed by CLI"""
LOG.info("Plugin params passed:\n{}".format(pprint.pformat(conf))) LOG.info("Plugin params passed:\n{}".format(pprint.pformat(conf)))
self._validate_config_options(conf) self._validate_config_options(conf)
self.formation_api_url = conf['url'] self.formation_api_url = conf["url"]
self.user = conf['user'] self.user = conf["user"]
self.password = conf['password'] self.password = conf["password"]
self.token = conf.get('token', None) self.token = conf.get("token", None)
self._get_formation_client() self._get_formation_client()
self._update_site_and_zone(self.region) self._update_site_and_zone(self.region)
@ -78,21 +81,24 @@ class FormationPlugin(BaseDataSourcePlugin):
def get_plugin_conf(self, kwargs): def get_plugin_conf(self, kwargs):
""" Validates the plugin param and return if success""" """ Validates the plugin param and return if success"""
try: try:
assert (kwargs['formation_url'] assert (
) is not None, "formation_url is Not Specified" kwargs["formation_url"]
url = kwargs['formation_url'] ) is not None, "formation_url is Not Specified"
assert (kwargs['formation_user'] url = kwargs["formation_url"]
) is not None, "formation_user is Not Specified" assert (
user = kwargs['formation_user'] kwargs["formation_user"]
assert (kwargs['formation_password'] ) is not None, "formation_user is Not Specified"
) is not None, "formation_password is Not Specified" user = kwargs["formation_user"]
password = kwargs['formation_password'] assert (
kwargs["formation_password"]
) is not None, "formation_password is Not Specified"
password = kwargs["formation_password"]
except AssertionError: except AssertionError:
LOG.error("Insufficient plugin parameter! Spyglass exited!") LOG.error("Insufficient plugin parameter! Spyglass exited!")
raise raise
exit() exit()
plugin_conf = {'url': url, 'user': user, 'password': password} plugin_conf = {"url": url, "user": user, "password": password}
return plugin_conf return plugin_conf
def _validate_config_options(self, conf): def _validate_config_options(self, conf):
@ -129,21 +135,24 @@ class FormationPlugin(BaseDataSourcePlugin):
if self.token: if self.token:
return self.token return self.token
url = self.formation_api_url + '/zones' url = self.formation_api_url + "/zones"
try: try:
token_response = requests.get( token_response = requests.get(
url, url,
auth=(self.user, self.password), auth=(self.user, self.password),
verify=self.client_config.verify_ssl) verify=self.client_config.verify_ssl,
)
except requests.exceptions.ConnectionError: except requests.exceptions.ConnectionError:
raise ConnectionError('Incorrect URL: {}'.format(url)) raise ConnectionError("Incorrect URL: {}".format(url))
if token_response.status_code == 200: if token_response.status_code == 200:
self.token = token_response.json().get('X-Subject-Token', None) self.token = token_response.json().get("X-Subject-Token", None)
else: else:
raise TokenGenerationError( raise TokenGenerationError(
'Unable to generate token because {}'.format( "Unable to generate token because {}".format(
token_response.reason)) token_response.reason
)
)
return self.token return self.token
@ -155,9 +164,10 @@ class FormationPlugin(BaseDataSourcePlugin):
Generate the token and add it formation config object. Generate the token and add it formation config object.
""" """
token = self._generate_token() token = self._generate_token()
self.client_config.api_key = {'X-Auth-Token': self.user + '|' + token} self.client_config.api_key = {"X-Auth-Token": self.user + "|" + token}
self.formation_api_client = formation_client.ApiClient( self.formation_api_client = formation_client.ApiClient(
self.client_config) self.client_config
)
def _update_site_and_zone(self, region): def _update_site_and_zone(self, region):
"""Get Zone name and Site name from region""" """Get Zone name and Site name from region"""
@ -169,8 +179,8 @@ class FormationPlugin(BaseDataSourcePlugin):
# site = zone[:-1] # site = zone[:-1]
self.region_zone_map[region] = {} self.region_zone_map[region] = {}
self.region_zone_map[region]['zone'] = zone self.region_zone_map[region]["zone"] = zone
self.region_zone_map[region]['site'] = site self.region_zone_map[region]["site"] = site
def _get_zone_by_region_name(self, region_name): def _get_zone_by_region_name(self, region_name):
zone_api = formation_client.ZonesApi(self.formation_api_client) zone_api = formation_client.ZonesApi(self.formation_api_client)
@ -248,7 +258,7 @@ class FormationPlugin(BaseDataSourcePlugin):
return self.device_name_id_mapping.get(device_name, None) return self.device_name_id_mapping.get(device_name, None)
def _get_racks(self, zone, rack_type='compute'): def _get_racks(self, zone, rack_type="compute"):
zone_id = self._get_zone_id_by_name(zone) zone_id = self._get_zone_id_by_name(zone)
rack_api = formation_client.RacksApi(self.formation_api_client) rack_api = formation_client.RacksApi(self.formation_api_client)
racks = rack_api.zones_zone_id_racks_get(zone_id) racks = rack_api.zones_zone_id_racks_get(zone_id)
@ -296,35 +306,40 @@ class FormationPlugin(BaseDataSourcePlugin):
# Implement Abstract functions # Implement Abstract functions
def get_racks(self, region): def get_racks(self, region):
zone = self.region_zone_map[region]['zone'] zone = self.region_zone_map[region]["zone"]
return self._get_racks(zone, rack_type='compute') return self._get_racks(zone, rack_type="compute")
def get_hosts(self, region, rack=None): def get_hosts(self, region, rack=None):
zone = self.region_zone_map[region]['zone'] zone = self.region_zone_map[region]["zone"]
zone_id = self._get_zone_id_by_name(zone) zone_id = self._get_zone_id_by_name(zone)
device_api = formation_client.DevicesApi(self.formation_api_client) device_api = formation_client.DevicesApi(self.formation_api_client)
control_hosts = device_api.zones_zone_id_control_nodes_get(zone_id) control_hosts = device_api.zones_zone_id_control_nodes_get(zone_id)
compute_hosts = device_api.zones_zone_id_devices_get( compute_hosts = device_api.zones_zone_id_devices_get(
zone_id, type='KVM') zone_id, type="KVM"
)
hosts_list = [] hosts_list = []
for host in control_hosts: for host in control_hosts:
self.device_name_id_mapping[host.aic_standard_name] = host.id self.device_name_id_mapping[host.aic_standard_name] = host.id
hosts_list.append({ hosts_list.append(
'name': host.aic_standard_name, {
'type': 'controller', "name": host.aic_standard_name,
'rack_name': host.rack_name, "type": "controller",
'host_profile': host.host_profile_name "rack_name": host.rack_name,
}) "host_profile": host.host_profile_name,
}
)
for host in compute_hosts: for host in compute_hosts:
self.device_name_id_mapping[host.aic_standard_name] = host.id self.device_name_id_mapping[host.aic_standard_name] = host.id
hosts_list.append({ hosts_list.append(
'name': host.aic_standard_name, {
'type': 'compute', "name": host.aic_standard_name,
'rack_name': host.rack_name, "type": "compute",
'host_profile': host.host_profile_name "rack_name": host.rack_name,
}) "host_profile": host.host_profile_name,
}
)
""" """
for host in itertools.chain(control_hosts, compute_hosts): for host in itertools.chain(control_hosts, compute_hosts):
self.device_name_id_mapping[host.aic_standard_name] = host.id self.device_name_id_mapping[host.aic_standard_name] = host.id
@ -339,40 +354,43 @@ class FormationPlugin(BaseDataSourcePlugin):
return hosts_list return hosts_list
def get_networks(self, region): def get_networks(self, region):
zone = self.region_zone_map[region]['zone'] zone = self.region_zone_map[region]["zone"]
zone_id = self._get_zone_id_by_name(zone) zone_id = self._get_zone_id_by_name(zone)
region_id = self._get_region_id_by_name(region) region_id = self._get_region_id_by_name(region)
vlan_api = formation_client.VlansApi(self.formation_api_client) vlan_api = formation_client.VlansApi(self.formation_api_client)
vlans = vlan_api.zones_zone_id_regions_region_id_vlans_get( vlans = vlan_api.zones_zone_id_regions_region_id_vlans_get(
zone_id, region_id) zone_id, region_id
)
# Case when vlans list is empty from # Case when vlans list is empty from
# zones_zone_id_regions_region_id_vlans_get # zones_zone_id_regions_region_id_vlans_get
if len(vlans) is 0: if len(vlans) == 0:
# get device-id from the first host and get the network details # get device-id from the first host and get the network details
hosts = self.get_hosts(self.region) hosts = self.get_hosts(self.region)
host = hosts[0]['name'] host = hosts[0]["name"]
device_id = self._get_device_id_by_name(host) device_id = self._get_device_id_by_name(host)
vlans = vlan_api.zones_zone_id_devices_device_id_vlans_get( vlans = vlan_api.zones_zone_id_devices_device_id_vlans_get(
zone_id, device_id) zone_id, device_id
)
LOG.debug("Extracted region network information\n{}".format(vlans)) LOG.debug("Extracted region network information\n{}".format(vlans))
vlans_list = [] vlans_list = []
for vlan_ in vlans: for vlan_ in vlans:
if len(vlan_.vlan.ipv4) is not 0: if len(vlan_.vlan.ipv4) != 0:
tmp_vlan = {} tmp_vlan = {}
tmp_vlan['name'] = self._get_network_name_from_vlan_name( tmp_vlan["name"] = self._get_network_name_from_vlan_name(
vlan_.vlan.name) vlan_.vlan.name
tmp_vlan['vlan'] = vlan_.vlan.vlan_id )
tmp_vlan['subnet'] = vlan_.vlan.subnet_range tmp_vlan["vlan"] = vlan_.vlan.vlan_id
tmp_vlan['gateway'] = vlan_.ipv4_gateway tmp_vlan["subnet"] = vlan_.vlan.subnet_range
tmp_vlan['subnet_level'] = vlan_.vlan.subnet_level tmp_vlan["gateway"] = vlan_.ipv4_gateway
tmp_vlan["subnet_level"] = vlan_.vlan.subnet_level
vlans_list.append(tmp_vlan) vlans_list.append(tmp_vlan)
return vlans_list return vlans_list
def get_ips(self, region, host=None): def get_ips(self, region, host=None):
zone = self.region_zone_map[region]['zone'] zone = self.region_zone_map[region]["zone"]
zone_id = self._get_zone_id_by_name(zone) zone_id = self._get_zone_id_by_name(zone)
if host: if host:
@ -381,7 +399,7 @@ class FormationPlugin(BaseDataSourcePlugin):
hosts = [] hosts = []
hosts_dict = self.get_hosts(zone) hosts_dict = self.get_hosts(zone)
for host in hosts_dict: for host in hosts_dict:
hosts.append(host['name']) hosts.append(host["name"])
vlan_api = formation_client.VlansApi(self.formation_api_client) vlan_api = formation_client.VlansApi(self.formation_api_client)
ip_ = {} ip_ = {}
@ -389,18 +407,23 @@ class FormationPlugin(BaseDataSourcePlugin):
for host in hosts: for host in hosts:
device_id = self._get_device_id_by_name(host) device_id = self._get_device_id_by_name(host)
vlans = vlan_api.zones_zone_id_devices_device_id_vlans_get( vlans = vlan_api.zones_zone_id_devices_device_id_vlans_get(
zone_id, device_id) zone_id, device_id
)
LOG.debug("Received VLAN Network Information\n{}".format(vlans)) LOG.debug("Received VLAN Network Information\n{}".format(vlans))
ip_[host] = {} ip_[host] = {}
for vlan_ in vlans: for vlan_ in vlans:
# TODO(pg710r) We need to handle the case when incoming ipv4 # TODO(pg710r) We need to handle the case when incoming ipv4
# list is empty # list is empty
if len(vlan_.vlan.ipv4) is not 0: if len(vlan_.vlan.ipv4) != 0:
name = self._get_network_name_from_vlan_name( name = self._get_network_name_from_vlan_name(
vlan_.vlan.name) vlan_.vlan.name
)
ipv4 = vlan_.vlan.ipv4[0].ip ipv4 = vlan_.vlan.ipv4[0].ip
LOG.debug("vlan:{},name:{},ip:{},vlan_name:{}".format( LOG.debug(
vlan_.vlan.vlan_id, name, ipv4, vlan_.vlan.name)) "vlan:{},name:{},ip:{},vlan_name:{}".format(
vlan_.vlan.vlan_id, name, ipv4, vlan_.vlan.name
)
)
# TODD(pg710r) This code needs to extended to support ipv4 # TODD(pg710r) This code needs to extended to support ipv4
# and ipv6 # and ipv6
# ip_[host][name] = {'ipv4': ipv4} # ip_[host][name] = {'ipv4': ipv4}
@ -419,12 +442,12 @@ class FormationPlugin(BaseDataSourcePlugin):
vlan_name contains "ILO" the network name is "oob" vlan_name contains "ILO" the network name is "oob"
""" """
network_names = { network_names = {
'ksn': 'calico', "ksn": "calico",
'storage': 'storage', "storage": "storage",
'server': 'oam', "server": "oam",
'ovs': 'overlay', "ovs": "overlay",
'ILO': 'oob', "ILO": "oob",
'pxe': 'pxe' "pxe": "pxe",
} }
for name in network_names: for name in network_names:
@ -438,7 +461,7 @@ class FormationPlugin(BaseDataSourcePlugin):
def get_dns_servers(self, region): def get_dns_servers(self, region):
try: try:
zone = self.region_zone_map[region]['zone'] zone = self.region_zone_map[region]["zone"]
zone_id = self._get_zone_id_by_name(zone) zone_id = self._get_zone_id_by_name(zone)
zone_api = formation_client.ZonesApi(self.formation_api_client) zone_api = formation_client.ZonesApi(self.formation_api_client)
zone_ = zone_api.zones_zone_id_get(zone_id) zone_ = zone_api.zones_zone_id_get(zone_id)
@ -463,7 +486,7 @@ class FormationPlugin(BaseDataSourcePlugin):
def get_location_information(self, region): def get_location_information(self, region):
""" get location information for a zone and return """ """ get location information for a zone and return """
site = self.region_zone_map[region]['site'] site = self.region_zone_map[region]["site"]
site_id = self._get_site_id_by_name(site) site_id = self._get_site_id_by_name(site)
site_api = formation_client.SitesApi(self.formation_api_client) site_api = formation_client.SitesApi(self.formation_api_client)
site_info = site_api.sites_site_id_get(site_id) site_info = site_api.sites_site_id_get(site_id)
@ -471,18 +494,19 @@ class FormationPlugin(BaseDataSourcePlugin):
try: try:
return { return {
# 'corridor': site_info.corridor, # 'corridor': site_info.corridor,
'name': site_info.city, "name": site_info.city,
'state': site_info.state, "state": site_info.state,
'country': site_info.country, "country": site_info.country,
'physical_location_id': site_info.clli, "physical_location_id": site_info.clli,
} }
except AttributeError as e: except AttributeError as e:
raise MissingAttributeError('Missing {} information in {}'.format( raise MissingAttributeError(
e, site_info.city)) "Missing {} information in {}".format(e, site_info.city)
)
def get_domain_name(self, region): def get_domain_name(self, region):
try: try:
zone = self.region_zone_map[region]['zone'] zone = self.region_zone_map[region]["zone"]
zone_id = self._get_zone_id_by_name(zone) zone_id = self._get_zone_id_by_name(zone)
zone_api = formation_client.ZonesApi(self.formation_api_client) zone_api = formation_client.ZonesApi(self.formation_api_client)
zone_ = zone_api.zones_zone_id_get(zone_id) zone_ = zone_api.zones_zone_id_get(zone_id)
@ -490,7 +514,7 @@ class FormationPlugin(BaseDataSourcePlugin):
raise ApiClientError(e.msg) raise ApiClientError(e.msg)
if not zone_.dns: if not zone_.dns:
LOG.warn('Got None while running get domain name') LOG.warn("Got None while running get domain name")
return None return None
return zone_.dns return zone_.dns

View File

@ -23,7 +23,7 @@ class NotEnoughIp(BaseError):
self.total_nodes = total_nodes self.total_nodes = total_nodes
def display_error(self): def display_error(self):
print('{} can not handle {} nodes'.format(self.cidr, self.total_nodes)) print("{} can not handle {} nodes".format(self.cidr, self.total_nodes))
class NoSpecMatched(BaseError): class NoSpecMatched(BaseError):
@ -31,5 +31,8 @@ class NoSpecMatched(BaseError):
self.specs = excel_specs self.specs = excel_specs
def display_error(self): def display_error(self):
print('No spec matched. Following are the available specs:\n'.format( print(
self.specs)) "No spec matched. Following are the available specs:\n".format(
self.specs
)
)

View File

@ -20,17 +20,18 @@ import yaml
from openpyxl import load_workbook from openpyxl import load_workbook
from openpyxl import Workbook from openpyxl import Workbook
from spyglass.data_extractor.custom_exceptions import NoSpecMatched from spyglass.data_extractor.custom_exceptions import NoSpecMatched
# from spyglass.data_extractor.custom_exceptions # from spyglass.data_extractor.custom_exceptions
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
class ExcelParser(): class ExcelParser:
""" Parse data from excel into a dict """ """ Parse data from excel into a dict """
def __init__(self, file_name, excel_specs): def __init__(self, file_name, excel_specs):
self.file_name = file_name self.file_name = file_name
with open(excel_specs, 'r') as f: with open(excel_specs, "r") as f:
spec_raw_data = f.read() spec_raw_data = f.read()
self.excel_specs = yaml.safe_load(spec_raw_data) self.excel_specs = yaml.safe_load(spec_raw_data)
# A combined design spec, returns a workbok object after combining # A combined design spec, returns a workbok object after combining
@ -38,12 +39,12 @@ class ExcelParser():
combined_design_spec = self.combine_excel_design_specs(file_name) combined_design_spec = self.combine_excel_design_specs(file_name)
self.wb_combined = combined_design_spec self.wb_combined = combined_design_spec
self.filenames = file_name self.filenames = file_name
self.spec = 'xl_spec' self.spec = "xl_spec"
@staticmethod @staticmethod
def sanitize(string): def sanitize(string):
""" Remove extra spaces and convert string to lower case """ """ Remove extra spaces and convert string to lower case """
return string.replace(' ', '').lower() return string.replace(" ", "").lower()
def compare(self, string1, string2): def compare(self, string1, string2):
""" Compare the strings """ """ Compare the strings """
@ -52,19 +53,19 @@ class ExcelParser():
def validate_sheet(self, spec, sheet): def validate_sheet(self, spec, sheet):
""" Check if the sheet is correct or not """ """ Check if the sheet is correct or not """
ws = self.wb_combined[sheet] ws = self.wb_combined[sheet]
header_row = self.excel_specs['specs'][spec]['header_row'] header_row = self.excel_specs["specs"][spec]["header_row"]
ipmi_header = self.excel_specs['specs'][spec]['ipmi_address_header'] ipmi_header = self.excel_specs["specs"][spec]["ipmi_address_header"]
ipmi_column = self.excel_specs['specs'][spec]['ipmi_address_col'] ipmi_column = self.excel_specs["specs"][spec]["ipmi_address_col"]
header_value = ws.cell(row=header_row, column=ipmi_column).value header_value = ws.cell(row=header_row, column=ipmi_column).value
return bool(self.compare(ipmi_header, header_value)) return bool(self.compare(ipmi_header, header_value))
def find_correct_spec(self): def find_correct_spec(self):
""" Find the correct spec """ """ Find the correct spec """
for spec in self.excel_specs['specs']: for spec in self.excel_specs["specs"]:
sheet_name = self.excel_specs['specs'][spec]['ipmi_sheet_name'] sheet_name = self.excel_specs["specs"][spec]["ipmi_sheet_name"]
for sheet in self.wb_combined.sheetnames: for sheet in self.wb_combined.sheetnames:
if self.compare(sheet_name, sheet): if self.compare(sheet_name, sheet):
self.excel_specs['specs'][spec]['ipmi_sheet_name'] = sheet self.excel_specs["specs"][spec]["ipmi_sheet_name"] = sheet
if self.validate_sheet(spec, sheet): if self.validate_sheet(spec, sheet):
return spec return spec
raise NoSpecMatched(self.excel_specs) raise NoSpecMatched(self.excel_specs)
@ -73,31 +74,37 @@ class ExcelParser():
""" Read IPMI data from the sheet """ """ Read IPMI data from the sheet """
ipmi_data = {} ipmi_data = {}
hosts = [] hosts = []
provided_sheetname = self.excel_specs['specs'][self. provided_sheetname = self.excel_specs["specs"][self.spec][
spec]['ipmi_sheet_name'] "ipmi_sheet_name"
]
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname( workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
provided_sheetname) provided_sheetname
)
if workbook_object is not None: if workbook_object is not None:
ws = workbook_object[extracted_sheetname] ws = workbook_object[extracted_sheetname]
else: else:
ws = self.wb_combined[provided_sheetname] ws = self.wb_combined[provided_sheetname]
row = self.excel_specs['specs'][self.spec]['start_row'] row = self.excel_specs["specs"][self.spec]["start_row"]
end_row = self.excel_specs['specs'][self.spec]['end_row'] end_row = self.excel_specs["specs"][self.spec]["end_row"]
hostname_col = self.excel_specs['specs'][self.spec]['hostname_col'] hostname_col = self.excel_specs["specs"][self.spec]["hostname_col"]
ipmi_address_col = self.excel_specs['specs'][self. ipmi_address_col = self.excel_specs["specs"][self.spec][
spec]['ipmi_address_col'] "ipmi_address_col"
host_profile_col = self.excel_specs['specs'][self. ]
spec]['host_profile_col'] host_profile_col = self.excel_specs["specs"][self.spec][
ipmi_gateway_col = self.excel_specs['specs'][self. "host_profile_col"
spec]['ipmi_gateway_col'] ]
ipmi_gateway_col = self.excel_specs["specs"][self.spec][
"ipmi_gateway_col"
]
previous_server_gateway = None previous_server_gateway = None
while row <= end_row: while row <= end_row:
hostname = self.sanitize( hostname = self.sanitize(
ws.cell(row=row, column=hostname_col).value) ws.cell(row=row, column=hostname_col).value
)
hosts.append(hostname) hosts.append(hostname)
ipmi_address = ws.cell(row=row, column=ipmi_address_col).value ipmi_address = ws.cell(row=row, column=ipmi_address_col).value
if '/' in ipmi_address: if "/" in ipmi_address:
ipmi_address = ipmi_address.split('/')[0] ipmi_address = ipmi_address.split("/")[0]
ipmi_gateway = ws.cell(row=row, column=ipmi_gateway_col).value ipmi_gateway = ws.cell(row=row, column=ipmi_gateway_col).value
if ipmi_gateway: if ipmi_gateway:
previous_server_gateway = ipmi_gateway previous_server_gateway = ipmi_gateway
@ -106,32 +113,39 @@ class ExcelParser():
host_profile = ws.cell(row=row, column=host_profile_col).value host_profile = ws.cell(row=row, column=host_profile_col).value
try: try:
if host_profile is None: if host_profile is None:
raise RuntimeError("No value read from {} ".format( raise RuntimeError(
self.file_name) + "sheet:{} row:{}, col:{}".format( "No value read from {} ".format(self.file_name)
self.spec, row, host_profile_col)) + "sheet:{} row:{}, col:{}".format(
self.spec, row, host_profile_col
)
)
except RuntimeError as rerror: except RuntimeError as rerror:
LOG.critical(rerror) LOG.critical(rerror)
sys.exit("Tugboat exited!!") sys.exit("Tugboat exited!!")
ipmi_data[hostname] = { ipmi_data[hostname] = {
'ipmi_address': ipmi_address, "ipmi_address": ipmi_address,
'ipmi_gateway': ipmi_gateway, "ipmi_gateway": ipmi_gateway,
'host_profile': host_profile, "host_profile": host_profile,
'type': type, "type": type,
} }
row += 1 row += 1
LOG.debug("ipmi data extracted from excel:\n{}".format( LOG.debug(
pprint.pformat(ipmi_data))) "ipmi data extracted from excel:\n{}".format(
LOG.debug("host data extracted from excel:\n{}".format( pprint.pformat(ipmi_data)
pprint.pformat(hosts))) )
)
LOG.debug(
"host data extracted from excel:\n{}".format(pprint.pformat(hosts))
)
return [ipmi_data, hosts] return [ipmi_data, hosts]
def get_private_vlan_data(self, ws): def get_private_vlan_data(self, ws):
""" Get private vlan data from private IP sheet """ """ Get private vlan data from private IP sheet """
vlan_data = {} vlan_data = {}
row = self.excel_specs['specs'][self.spec]['vlan_start_row'] row = self.excel_specs["specs"][self.spec]["vlan_start_row"]
end_row = self.excel_specs['specs'][self.spec]['vlan_end_row'] end_row = self.excel_specs["specs"][self.spec]["vlan_end_row"]
type_col = self.excel_specs['specs'][self.spec]['net_type_col'] type_col = self.excel_specs["specs"][self.spec]["net_type_col"]
vlan_col = self.excel_specs['specs'][self.spec]['vlan_col'] vlan_col = self.excel_specs["specs"][self.spec]["vlan_col"]
while row <= end_row: while row <= end_row:
cell_value = ws.cell(row=row, column=type_col).value cell_value = ws.cell(row=row, column=type_col).value
if cell_value: if cell_value:
@ -140,27 +154,30 @@ class ExcelParser():
vlan = vlan.lower() vlan = vlan.lower()
vlan_data[vlan] = cell_value vlan_data[vlan] = cell_value
row += 1 row += 1
LOG.debug("vlan data extracted from excel:\n%s", LOG.debug(
pprint.pformat(vlan_data)) "vlan data extracted from excel:\n%s", pprint.pformat(vlan_data)
)
return vlan_data return vlan_data
def get_private_network_data(self): def get_private_network_data(self):
""" Read network data from the private ip sheet """ """ Read network data from the private ip sheet """
provided_sheetname = self.excel_specs['specs'][ provided_sheetname = self.excel_specs["specs"][self.spec][
self.spec]['private_ip_sheet'] "private_ip_sheet"
]
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname( workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
provided_sheetname) provided_sheetname
)
if workbook_object is not None: if workbook_object is not None:
ws = workbook_object[extracted_sheetname] ws = workbook_object[extracted_sheetname]
else: else:
ws = self.wb_combined[provided_sheetname] ws = self.wb_combined[provided_sheetname]
vlan_data = self.get_private_vlan_data(ws) vlan_data = self.get_private_vlan_data(ws)
network_data = {} network_data = {}
row = self.excel_specs['specs'][self.spec]['net_start_row'] row = self.excel_specs["specs"][self.spec]["net_start_row"]
end_row = self.excel_specs['specs'][self.spec]['net_end_row'] end_row = self.excel_specs["specs"][self.spec]["net_end_row"]
col = self.excel_specs['specs'][self.spec]['net_col'] col = self.excel_specs["specs"][self.spec]["net_col"]
vlan_col = self.excel_specs['specs'][self.spec]['net_vlan_col'] vlan_col = self.excel_specs["specs"][self.spec]["net_vlan_col"]
old_vlan = '' old_vlan = ""
while row <= end_row: while row <= end_row:
vlan = ws.cell(row=row, column=vlan_col).value vlan = ws.cell(row=row, column=vlan_col).value
if vlan: if vlan:
@ -168,11 +185,8 @@ class ExcelParser():
network = ws.cell(row=row, column=col).value network = ws.cell(row=row, column=col).value
if vlan and network: if vlan and network:
net_type = vlan_data[vlan] net_type = vlan_data[vlan]
if 'vlan' not in network_data: if "vlan" not in network_data:
network_data[net_type] = { network_data[net_type] = {"vlan": vlan, "subnet": []}
'vlan': vlan,
'subnet': [],
}
elif not vlan and network: elif not vlan and network:
# If vlan is not present then assign old vlan to vlan as vlan # If vlan is not present then assign old vlan to vlan as vlan
# value is spread over several rows # value is spread over several rows
@ -180,11 +194,11 @@ class ExcelParser():
else: else:
row += 1 row += 1
continue continue
network_data[vlan_data[vlan]]['subnet'].append(network) network_data[vlan_data[vlan]]["subnet"].append(network)
old_vlan = vlan old_vlan = vlan
row += 1 row += 1
for network in network_data: for network in network_data:
network_data[network]['is_common'] = True network_data[network]["is_common"] = True
""" """
if len(network_data[network]['subnet']) > 1: if len(network_data[network]['subnet']) > 1:
network_data[network]['is_common'] = False network_data[network]['is_common'] = False
@ -199,153 +213,167 @@ class ExcelParser():
def get_public_network_data(self): def get_public_network_data(self):
""" Read public network data from public ip data """ """ Read public network data from public ip data """
network_data = {} network_data = {}
provided_sheetname = self.excel_specs['specs'][self. provided_sheetname = self.excel_specs["specs"][self.spec][
spec]['public_ip_sheet'] "public_ip_sheet"
]
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname( workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
provided_sheetname) provided_sheetname
)
if workbook_object is not None: if workbook_object is not None:
ws = workbook_object[extracted_sheetname] ws = workbook_object[extracted_sheetname]
else: else:
ws = self.wb_combined[provided_sheetname] ws = self.wb_combined[provided_sheetname]
oam_row = self.excel_specs['specs'][self.spec]['oam_ip_row'] oam_row = self.excel_specs["specs"][self.spec]["oam_ip_row"]
oam_col = self.excel_specs['specs'][self.spec]['oam_ip_col'] oam_col = self.excel_specs["specs"][self.spec]["oam_ip_col"]
oam_vlan_col = self.excel_specs['specs'][self.spec]['oam_vlan_col'] oam_vlan_col = self.excel_specs["specs"][self.spec]["oam_vlan_col"]
ingress_row = self.excel_specs['specs'][self.spec]['ingress_ip_row'] ingress_row = self.excel_specs["specs"][self.spec]["ingress_ip_row"]
oob_row = self.excel_specs['specs'][self.spec]['oob_net_row'] oob_row = self.excel_specs["specs"][self.spec]["oob_net_row"]
col = self.excel_specs['specs'][self.spec]['oob_net_start_col'] col = self.excel_specs["specs"][self.spec]["oob_net_start_col"]
end_col = self.excel_specs['specs'][self.spec]['oob_net_end_col'] end_col = self.excel_specs["specs"][self.spec]["oob_net_end_col"]
network_data = { network_data = {
'oam': { "oam": {
'subnet': [ws.cell(row=oam_row, column=oam_col).value], "subnet": [ws.cell(row=oam_row, column=oam_col).value],
'vlan': ws.cell(row=oam_row, column=oam_vlan_col).value, "vlan": ws.cell(row=oam_row, column=oam_vlan_col).value,
}, },
'ingress': ws.cell(row=ingress_row, column=oam_col).value, "ingress": ws.cell(row=ingress_row, column=oam_col).value,
}
network_data['oob'] = {
'subnet': [],
} }
network_data["oob"] = {"subnet": []}
while col <= end_col: while col <= end_col:
cell_value = ws.cell(row=oob_row, column=col).value cell_value = ws.cell(row=oob_row, column=col).value
if cell_value: if cell_value:
network_data['oob']['subnet'].append(self.sanitize(cell_value)) network_data["oob"]["subnet"].append(self.sanitize(cell_value))
col += 1 col += 1
LOG.debug( LOG.debug(
"public network data extracted from\ "public network data extracted from\
excel:\n%s", pprint.pformat(network_data)) excel:\n%s",
pprint.pformat(network_data),
)
return network_data return network_data
def get_site_info(self): def get_site_info(self):
""" Read location, dns, ntp and ldap data""" """ Read location, dns, ntp and ldap data"""
site_info = {} site_info = {}
provided_sheetname = self.excel_specs['specs'][ provided_sheetname = self.excel_specs["specs"][self.spec][
self.spec]['dns_ntp_ldap_sheet'] "dns_ntp_ldap_sheet"
]
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname( workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
provided_sheetname) provided_sheetname
)
if workbook_object is not None: if workbook_object is not None:
ws = workbook_object[extracted_sheetname] ws = workbook_object[extracted_sheetname]
else: else:
ws = self.wb_combined[provided_sheetname] ws = self.wb_combined[provided_sheetname]
dns_row = self.excel_specs['specs'][self.spec]['dns_row'] dns_row = self.excel_specs["specs"][self.spec]["dns_row"]
dns_col = self.excel_specs['specs'][self.spec]['dns_col'] dns_col = self.excel_specs["specs"][self.spec]["dns_col"]
ntp_row = self.excel_specs['specs'][self.spec]['ntp_row'] ntp_row = self.excel_specs["specs"][self.spec]["ntp_row"]
ntp_col = self.excel_specs['specs'][self.spec]['ntp_col'] ntp_col = self.excel_specs["specs"][self.spec]["ntp_col"]
domain_row = self.excel_specs['specs'][self.spec]['domain_row'] domain_row = self.excel_specs["specs"][self.spec]["domain_row"]
domain_col = self.excel_specs['specs'][self.spec]['domain_col'] domain_col = self.excel_specs["specs"][self.spec]["domain_col"]
login_domain_row = self.excel_specs['specs'][self. login_domain_row = self.excel_specs["specs"][self.spec][
spec]['login_domain_row'] "login_domain_row"
ldap_col = self.excel_specs['specs'][self.spec]['ldap_col'] ]
global_group = self.excel_specs['specs'][self.spec]['global_group'] ldap_col = self.excel_specs["specs"][self.spec]["ldap_col"]
ldap_search_url_row = self.excel_specs['specs'][ global_group = self.excel_specs["specs"][self.spec]["global_group"]
self.spec]['ldap_search_url_row'] ldap_search_url_row = self.excel_specs["specs"][self.spec][
"ldap_search_url_row"
]
dns_servers = ws.cell(row=dns_row, column=dns_col).value dns_servers = ws.cell(row=dns_row, column=dns_col).value
ntp_servers = ws.cell(row=ntp_row, column=ntp_col).value ntp_servers = ws.cell(row=ntp_row, column=ntp_col).value
try: try:
if dns_servers is None: if dns_servers is None:
raise RuntimeError( raise RuntimeError(
"No value for dns_server from:{} Sheet:'{}' Row:{} Col:{}". (
format(self.file_name, provided_sheetname, dns_row, "No value for dns_server from:{} Sheet:'{}' ",
dns_col)) "Row:{} Col:{}",
raise RuntimeError( ).format(
"No value for ntp_server frome:{} Sheet:'{}' Row:{} Col:{}" self.file_name, provided_sheetname, dns_row, dns_col
.format(self.file_name, provided_sheetname, ntp_row, )
ntp_col)) )
except RuntimeError as rerror: except RuntimeError as rerror:
LOG.critical(rerror) LOG.critical(rerror)
sys.exit("Tugboat exited!!") sys.exit("Tugboat exited!!")
dns_servers = dns_servers.replace('\n', ' ') dns_servers = dns_servers.replace("\n", " ")
ntp_servers = ntp_servers.replace('\n', ' ') ntp_servers = ntp_servers.replace("\n", " ")
if ',' in dns_servers: if "," in dns_servers:
dns_servers = dns_servers.split(',') dns_servers = dns_servers.split(",")
else: else:
dns_servers = dns_servers.split() dns_servers = dns_servers.split()
if ',' in ntp_servers: if "," in ntp_servers:
ntp_servers = ntp_servers.split(',') ntp_servers = ntp_servers.split(",")
else: else:
ntp_servers = ntp_servers.split() ntp_servers = ntp_servers.split()
site_info = { site_info = {
'location': self.get_location_data(), "location": self.get_location_data(),
'dns': dns_servers, "dns": dns_servers,
'ntp': ntp_servers, "ntp": ntp_servers,
'domain': ws.cell(row=domain_row, column=domain_col).value, "domain": ws.cell(row=domain_row, column=domain_col).value,
'ldap': { "ldap": {
'subdomain': ws.cell(row=login_domain_row, "subdomain": ws.cell(
column=ldap_col).value, row=login_domain_row, column=ldap_col
'common_name': ws.cell(row=global_group, ).value,
column=ldap_col).value, "common_name": ws.cell(
'url': ws.cell(row=ldap_search_url_row, column=ldap_col).value, row=global_group, column=ldap_col
} ).value,
"url": ws.cell(row=ldap_search_url_row, column=ldap_col).value,
},
} }
LOG.debug( LOG.debug(
"Site Info extracted from\ "Site Info extracted from\
excel:\n%s", pprint.pformat(site_info)) excel:\n%s",
pprint.pformat(site_info),
)
return site_info return site_info
def get_location_data(self): def get_location_data(self):
""" Read location data from the site and zone sheet """ """ Read location data from the site and zone sheet """
provided_sheetname = self.excel_specs['specs'][self. provided_sheetname = self.excel_specs["specs"][self.spec][
spec]['location_sheet'] "location_sheet"
]
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname( workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
provided_sheetname) provided_sheetname
)
if workbook_object is not None: if workbook_object is not None:
ws = workbook_object[extracted_sheetname] ws = workbook_object[extracted_sheetname]
else: else:
ws = self.wb_combined[provided_sheetname] ws = self.wb_combined[provided_sheetname]
corridor_row = self.excel_specs['specs'][self.spec]['corridor_row'] corridor_row = self.excel_specs["specs"][self.spec]["corridor_row"]
column = self.excel_specs['specs'][self.spec]['column'] column = self.excel_specs["specs"][self.spec]["column"]
site_name_row = self.excel_specs['specs'][self.spec]['site_name_row'] site_name_row = self.excel_specs["specs"][self.spec]["site_name_row"]
state_name_row = self.excel_specs['specs'][self.spec]['state_name_row'] state_name_row = self.excel_specs["specs"][self.spec]["state_name_row"]
country_name_row = self.excel_specs['specs'][self. country_name_row = self.excel_specs["specs"][self.spec][
spec]['country_name_row'] "country_name_row"
clli_name_row = self.excel_specs['specs'][self.spec]['clli_name_row'] ]
clli_name_row = self.excel_specs["specs"][self.spec]["clli_name_row"]
return { return {
'corridor': ws.cell(row=corridor_row, column=column).value, "corridor": ws.cell(row=corridor_row, column=column).value,
'name': ws.cell(row=site_name_row, column=column).value, "name": ws.cell(row=site_name_row, column=column).value,
'state': ws.cell(row=state_name_row, column=column).value, "state": ws.cell(row=state_name_row, column=column).value,
'country': ws.cell(row=country_name_row, column=column).value, "country": ws.cell(row=country_name_row, column=column).value,
'physical_location': ws.cell(row=clli_name_row, "physical_location": ws.cell(
column=column).value, row=clli_name_row, column=column
).value,
} }
def validate_sheet_names_with_spec(self): def validate_sheet_names_with_spec(self):
""" Checks is sheet name in spec file matches with excel file""" """ Checks is sheet name in spec file matches with excel file"""
spec = list(self.excel_specs['specs'].keys())[0] spec = list(self.excel_specs["specs"].keys())[0]
spec_item = self.excel_specs['specs'][spec] spec_item = self.excel_specs["specs"][spec]
sheet_name_list = [] sheet_name_list = []
ipmi_header_sheet_name = spec_item['ipmi_sheet_name'] ipmi_header_sheet_name = spec_item["ipmi_sheet_name"]
sheet_name_list.append(ipmi_header_sheet_name) sheet_name_list.append(ipmi_header_sheet_name)
private_ip_sheet_name = spec_item['private_ip_sheet'] private_ip_sheet_name = spec_item["private_ip_sheet"]
sheet_name_list.append(private_ip_sheet_name) sheet_name_list.append(private_ip_sheet_name)
public_ip_sheet_name = spec_item['public_ip_sheet'] public_ip_sheet_name = spec_item["public_ip_sheet"]
sheet_name_list.append(public_ip_sheet_name) sheet_name_list.append(public_ip_sheet_name)
dns_ntp_ldap_sheet_name = spec_item['dns_ntp_ldap_sheet'] dns_ntp_ldap_sheet_name = spec_item["dns_ntp_ldap_sheet"]
sheet_name_list.append(dns_ntp_ldap_sheet_name) sheet_name_list.append(dns_ntp_ldap_sheet_name)
location_sheet_name = spec_item['location_sheet'] location_sheet_name = spec_item["location_sheet"]
sheet_name_list.append(location_sheet_name) sheet_name_list.append(location_sheet_name)
try: try:
for sheetname in sheet_name_list: for sheetname in sheet_name_list:
workbook_object, extracted_sheetname = \ workbook_object, extracted_sheetname = (
self.get_xl_obj_and_sheetname(sheetname) self.get_xl_obj_and_sheetname(sheetname))
if workbook_object is not None: if workbook_object is not None:
wb = workbook_object wb = workbook_object
sheetname = extracted_sheetname sheetname = extracted_sheetname
@ -354,7 +382,8 @@ class ExcelParser():
if sheetname not in wb.sheetnames: if sheetname not in wb.sheetnames:
raise RuntimeError( raise RuntimeError(
"SheetName '{}' not found ".format(sheetname)) "SheetName '{}' not found ".format(sheetname)
)
except RuntimeError as rerror: except RuntimeError as rerror:
LOG.critical(rerror) LOG.critical(rerror)
sys.exit("Tugboat exited!!") sys.exit("Tugboat exited!!")
@ -369,16 +398,18 @@ class ExcelParser():
public_network_data = self.get_public_network_data() public_network_data = self.get_public_network_data()
site_info_data = self.get_site_info() site_info_data = self.get_site_info()
data = { data = {
'ipmi_data': ipmi_data, "ipmi_data": ipmi_data,
'network_data': { "network_data": {
'private': network_data, "private": network_data,
'public': public_network_data, "public": public_network_data,
}, },
'site_info': site_info_data, "site_info": site_info_data,
} }
LOG.debug( LOG.debug(
"Location data extracted from\ "Location data extracted from\
excel:\n%s", pprint.pformat(data)) excel:\n%s",
pprint.pformat(data),
)
return data return data
def combine_excel_design_specs(self, filenames): def combine_excel_design_specs(self, filenames):
@ -391,8 +422,9 @@ class ExcelParser():
loaded_workbook_ws = loaded_workbook[names] loaded_workbook_ws = loaded_workbook[names]
for row in loaded_workbook_ws: for row in loaded_workbook_ws:
for cell in row: for cell in row:
design_spec_worksheet[cell. design_spec_worksheet[
coordinate].value = cell.value cell.coordinate
].value = cell.value
return design_spec return design_spec
def get_xl_obj_and_sheetname(self, sheetname): def get_xl_obj_and_sheetname(self, sheetname):
@ -400,10 +432,10 @@ class ExcelParser():
The logic confirms if the sheetname is specified for example as: The logic confirms if the sheetname is specified for example as:
"MTN57a_AEC_Network_Design_v1.6.xlsx:Public IPs" "MTN57a_AEC_Network_Design_v1.6.xlsx:Public IPs"
""" """
if (re.search('.xlsx', sheetname) or re.search('.xls', sheetname)): if re.search(".xlsx", sheetname) or re.search(".xls", sheetname):
""" Extract file name """ """ Extract file name """
source_xl_file = sheetname.split(':')[0] source_xl_file = sheetname.split(":")[0]
wb = load_workbook(source_xl_file, data_only=True) wb = load_workbook(source_xl_file, data_only=True)
return [wb, sheetname.split(':')[1]] return [wb, sheetname.split(":")[1]]
else: else:
return [None, sheetname] return [None, sheetname]

View File

@ -25,8 +25,8 @@ LOG = logging.getLogger(__name__)
class TugboatPlugin(BaseDataSourcePlugin): class TugboatPlugin(BaseDataSourcePlugin):
def __init__(self, region): def __init__(self, region):
LOG.info("Tugboat Initializing") LOG.info("Tugboat Initializing")
self.source_type = 'excel' self.source_type = "excel"
self.source_name = 'tugboat' self.source_name = "tugboat"
# Configuration parameters # Configuration parameters
self.excel_path = None self.excel_path = None
@ -52,8 +52,8 @@ class TugboatPlugin(BaseDataSourcePlugin):
Each plugin will have their own config opts. Each plugin will have their own config opts.
""" """
self.excel_path = conf['excel_path'] self.excel_path = conf["excel_path"]
self.excel_spec = conf['excel_spec'] self.excel_spec = conf["excel_spec"]
# Extract raw data from excel sheets # Extract raw data from excel sheets
self._get_excel_obj() self._get_excel_obj()
@ -69,18 +69,18 @@ class TugboatPlugin(BaseDataSourcePlugin):
written as an additional safeguard. written as an additional safeguard.
""" """
try: try:
assert (len( assert len(kwargs["excel"]), "Engineering Spec file not specified"
kwargs['excel'])), "Engineering Spec file not specified" excel_file_info = kwargs["excel"]
excel_file_info = kwargs['excel'] assert (
assert (kwargs['excel_spec'] kwargs["excel_spec"]
) is not None, "Excel Spec file not specified" ) is not None, "Excel Spec file not specified"
excel_spec_info = kwargs['excel_spec'] excel_spec_info = kwargs["excel_spec"]
except AssertionError as e: except AssertionError as e:
LOG.error("{}:Spyglass exited!".format(e)) LOG.error("{}:Spyglass exited!".format(e))
exit() exit()
plugin_conf = { plugin_conf = {
'excel_path': excel_file_info, "excel_path": excel_file_info,
'excel_spec': excel_spec_info "excel_spec": excel_spec_info,
} }
return plugin_conf return plugin_conf
@ -103,19 +103,18 @@ class TugboatPlugin(BaseDataSourcePlugin):
] ]
""" """
LOG.info("Get Host Information") LOG.info("Get Host Information")
ipmi_data = self.parsed_xl_data['ipmi_data'][0] ipmi_data = self.parsed_xl_data["ipmi_data"][0]
rackwise_hosts = self._get_rackwise_hosts() rackwise_hosts = self._get_rackwise_hosts()
host_list = [] host_list = []
for rack in rackwise_hosts.keys(): for rack in rackwise_hosts.keys():
for host in rackwise_hosts[rack]: for host in rackwise_hosts[rack]:
host_list.append({ host_list.append(
'rack_name': {
rack, "rack_name": rack,
'name': "name": host,
host, "host_profile": ipmi_data[host]["host_profile"],
'host_profile': }
ipmi_data[host]['host_profile'] )
})
return host_list return host_list
def get_networks(self, region): def get_networks(self, region):
@ -123,39 +122,44 @@ class TugboatPlugin(BaseDataSourcePlugin):
vlan_list = [] vlan_list = []
# Network data extracted from xl is formatted to have a predictable # Network data extracted from xl is formatted to have a predictable
# data type. For e.g VlAN 45 extracted from xl is formatted as 45 # data type. For e.g VlAN 45 extracted from xl is formatted as 45
vlan_pattern = r'\d+' vlan_pattern = r"\d+"
private_net = self.parsed_xl_data['network_data']['private'] private_net = self.parsed_xl_data["network_data"]["private"]
public_net = self.parsed_xl_data['network_data']['public'] public_net = self.parsed_xl_data["network_data"]["public"]
# Extract network information from private and public network data # Extract network information from private and public network data
for net_type, net_val in itertools.chain(private_net.items(), for net_type, net_val in itertools.chain(
public_net.items()): private_net.items(), public_net.items()
):
tmp_vlan = {} tmp_vlan = {}
# Ingress is special network that has no vlan, only a subnet string # Ingress is special network that has no vlan, only a subnet string
# So treatment for ingress is different # So treatment for ingress is different
if net_type is not 'ingress': if net_type != "ingress":
# standardize the network name as net_type may ne different. # standardize the network name as net_type may ne different.
# For e.g insteas of pxe it may be PXE or instead of calico # For e.g insteas of pxe it may be PXE or instead of calico
# it may be ksn. Valid network names are pxe, calico, oob, oam, # it may be ksn. Valid network names are pxe, calico, oob, oam,
# overlay, storage, ingress # overlay, storage, ingress
tmp_vlan['name'] = self._get_network_name_from_vlan_name( tmp_vlan["name"] = self._get_network_name_from_vlan_name(
net_type) net_type
)
# extract vlan tag. It was extracted from xl file as 'VlAN 45' # extract vlan tag. It was extracted from xl file as 'VlAN 45'
# The code below extracts the numeric data fron net_val['vlan'] # The code below extracts the numeric data fron net_val['vlan']
if net_val.get('vlan', "") is not "": if net_val.get("vlan", "") != "":
value = re.findall(vlan_pattern, net_val['vlan']) value = re.findall(vlan_pattern, net_val["vlan"])
tmp_vlan['vlan'] = value[0] tmp_vlan["vlan"] = value[0]
else: else:
tmp_vlan['vlan'] = "#CHANGE_ME" tmp_vlan["vlan"] = "#CHANGE_ME"
tmp_vlan['subnet'] = net_val.get('subnet', "#CHANGE_ME") tmp_vlan["subnet"] = net_val.get("subnet", "#CHANGE_ME")
tmp_vlan['gateway'] = net_val.get('gateway', "#CHANGE_ME") tmp_vlan["gateway"] = net_val.get("gateway", "#CHANGE_ME")
else: else:
tmp_vlan['name'] = 'ingress' tmp_vlan["name"] = "ingress"
tmp_vlan['subnet'] = net_val tmp_vlan["subnet"] = net_val
vlan_list.append(tmp_vlan) vlan_list.append(tmp_vlan)
LOG.debug("vlan list extracted from tugboat:\n{}".format( LOG.debug(
pprint.pformat(vlan_list))) "vlan list extracted from tugboat:\n{}".format(
pprint.pformat(vlan_list)
)
)
return vlan_list return vlan_list
def get_ips(self, region, host=None): def get_ips(self, region, host=None):
@ -172,33 +176,34 @@ class TugboatPlugin(BaseDataSourcePlugin):
""" """
ip_ = {} ip_ = {}
ipmi_data = self.parsed_xl_data['ipmi_data'][0] ipmi_data = self.parsed_xl_data["ipmi_data"][0]
ip_[host] = { ip_[host] = {
'oob': ipmi_data[host].get('ipmi_address', '#CHANGE_ME'), "oob": ipmi_data[host].get("ipmi_address", "#CHANGE_ME"),
'oam': ipmi_data[host].get('oam', '#CHANGE_ME'), "oam": ipmi_data[host].get("oam", "#CHANGE_ME"),
'calico': ipmi_data[host].get('calico', '#CHANGE_ME'), "calico": ipmi_data[host].get("calico", "#CHANGE_ME"),
'overlay': ipmi_data[host].get('overlay', '#CHANGE_ME'), "overlay": ipmi_data[host].get("overlay", "#CHANGE_ME"),
'pxe': ipmi_data[host].get('pxe', '#CHANGE_ME'), "pxe": ipmi_data[host].get("pxe", "#CHANGE_ME"),
'storage': ipmi_data[host].get('storage', '#CHANGE_ME') "storage": ipmi_data[host].get("storage", "#CHANGE_ME"),
} }
return ip_ return ip_
def get_ldap_information(self, region): def get_ldap_information(self, region):
""" Extract ldap information from excel""" """ Extract ldap information from excel"""
ldap_raw_data = self.parsed_xl_data['site_info']['ldap'] ldap_raw_data = self.parsed_xl_data["site_info"]["ldap"]
ldap_info = {} ldap_info = {}
# raw url is 'url: ldap://example.com' so we are converting to # raw url is 'url: ldap://example.com' so we are converting to
# 'ldap://example.com' # 'ldap://example.com'
url = ldap_raw_data.get('url', '#CHANGE_ME') url = ldap_raw_data.get("url", "#CHANGE_ME")
try: try:
ldap_info['url'] = url.split(' ')[1] ldap_info["url"] = url.split(" ")[1]
ldap_info['domain'] = url.split('.')[1] ldap_info["domain"] = url.split(".")[1]
except IndexError as e: except IndexError as e:
LOG.error("url.split:{}".format(e)) LOG.error("url.split:{}".format(e))
ldap_info['common_name'] = ldap_raw_data.get('common_name', ldap_info["common_name"] = ldap_raw_data.get(
'#CHANGE_ME') "common_name", "#CHANGE_ME"
ldap_info['subdomain'] = ldap_raw_data.get('subdomain', '#CHANGE_ME') )
ldap_info["subdomain"] = ldap_raw_data.get("subdomain", "#CHANGE_ME")
return ldap_info return ldap_info
@ -206,41 +211,44 @@ class TugboatPlugin(BaseDataSourcePlugin):
""" Returns a comma separated list of ntp ip addresses""" """ Returns a comma separated list of ntp ip addresses"""
ntp_server_list = self._get_formatted_server_list( ntp_server_list = self._get_formatted_server_list(
self.parsed_xl_data['site_info']['ntp']) self.parsed_xl_data["site_info"]["ntp"]
)
return ntp_server_list return ntp_server_list
def get_dns_servers(self, region): def get_dns_servers(self, region):
""" Returns a comma separated list of dns ip addresses""" """ Returns a comma separated list of dns ip addresses"""
dns_server_list = self._get_formatted_server_list( dns_server_list = self._get_formatted_server_list(
self.parsed_xl_data['site_info']['dns']) self.parsed_xl_data["site_info"]["dns"]
)
return dns_server_list return dns_server_list
def get_domain_name(self, region): def get_domain_name(self, region):
""" Returns domain name extracted from excel file""" """ Returns domain name extracted from excel file"""
return self.parsed_xl_data['site_info']['domain'] return self.parsed_xl_data["site_info"]["domain"]
def get_location_information(self, region): def get_location_information(self, region):
""" """
Prepare location data from information extracted Prepare location data from information extracted
by ExcelParser(i.e raw data) by ExcelParser(i.e raw data)
""" """
location_data = self.parsed_xl_data['site_info']['location'] location_data = self.parsed_xl_data["site_info"]["location"]
corridor_pattern = r'\d+' corridor_pattern = r"\d+"
corridor_number = re.findall(corridor_pattern, corridor_number = re.findall(
location_data['corridor'])[0] corridor_pattern, location_data["corridor"]
name = location_data.get('name', '#CHANGE_ME') )[0]
state = location_data.get('state', '#CHANGE_ME') name = location_data.get("name", "#CHANGE_ME")
country = location_data.get('country', '#CHANGE_ME') state = location_data.get("state", "#CHANGE_ME")
physical_location_id = location_data.get('physical_location', '') country = location_data.get("country", "#CHANGE_ME")
physical_location_id = location_data.get("physical_location", "")
return { return {
'name': name, "name": name,
'physical_location_id': physical_location_id, "physical_location_id": physical_location_id,
'state': state, "state": state,
'country': country, "country": country,
'corridor': 'c{}'.format(corridor_number), "corridor": "c{}".format(corridor_number),
} }
def get_racks(self, region): def get_racks(self, region):
@ -277,29 +285,35 @@ class TugboatPlugin(BaseDataSourcePlugin):
vlan_name contains "pxe" the network name is "pxe" vlan_name contains "pxe" the network name is "pxe"
""" """
network_names = [ network_names = [
'ksn|calico', 'storage', 'oam|server', 'ovs|overlay', 'oob', 'pxe' "ksn|calico",
"storage",
"oam|server",
"ovs|overlay",
"oob",
"pxe",
] ]
for name in network_names: for name in network_names:
# Make a pattern that would ignore case. # Make a pattern that would ignore case.
# if name is 'ksn' pattern name is '(?i)(ksn)' # if name is 'ksn' pattern name is '(?i)(ksn)'
name_pattern = "(?i)({})".format(name) name_pattern = "(?i)({})".format(name)
if re.search(name_pattern, vlan_name): if re.search(name_pattern, vlan_name):
if name is 'ksn|calico': if name == "ksn|calico":
return 'calico' return "calico"
if name is 'storage': if name == "storage":
return 'storage' return "storage"
if name is 'oam|server': if name == "oam|server":
return 'oam' return "oam"
if name is 'ovs|overlay': if name == "ovs|overlay":
return 'overlay' return "overlay"
if name is 'oob': if name == "oob":
return 'oob' return "oob"
if name is 'pxe': if name == "pxe":
return 'pxe' return "pxe"
# if nothing matches # if nothing matches
LOG.error( LOG.error(
"Unable to recognize VLAN name extracted from Plugin data source") "Unable to recognize VLAN name extracted from Plugin data source"
return ("") )
return ""
def _get_formatted_server_list(self, server_list): def _get_formatted_server_list(self, server_list):
""" Format dns and ntp server list as comma separated string """ """ Format dns and ntp server list as comma separated string """
@ -309,9 +323,9 @@ class TugboatPlugin(BaseDataSourcePlugin):
# The function returns a list of comma separated dns ip addresses # The function returns a list of comma separated dns ip addresses
servers = [] servers = []
for data in server_list: for data in server_list:
if '(' not in data: if "(" not in data:
servers.append(data) servers.append(data)
formatted_server_list = ','.join(servers) formatted_server_list = ",".join(servers)
return formatted_server_list return formatted_server_list
def _get_rack(self, host): def _get_rack(self, host):
@ -319,7 +333,7 @@ class TugboatPlugin(BaseDataSourcePlugin):
Get rack id from the rack string extracted Get rack id from the rack string extracted
from xl from xl
""" """
rack_pattern = r'\w.*(r\d+)\w.*' rack_pattern = r"\w.*(r\d+)\w.*"
rack = re.findall(rack_pattern, host)[0] rack = re.findall(rack_pattern, host)[0]
if not self.region: if not self.region:
self.region = host.split(rack)[0] self.region = host.split(rack)[0]
@ -328,7 +342,7 @@ class TugboatPlugin(BaseDataSourcePlugin):
def _get_rackwise_hosts(self): def _get_rackwise_hosts(self):
""" Mapping hosts with rack ids """ """ Mapping hosts with rack ids """
rackwise_hosts = {} rackwise_hosts = {}
hostnames = self.parsed_xl_data['ipmi_data'][1] hostnames = self.parsed_xl_data["ipmi_data"][1]
racks = self._get_rack_data() racks = self._get_rack_data()
for rack in racks: for rack in racks:
if rack not in rackwise_hosts: if rack not in rackwise_hosts:
@ -343,8 +357,8 @@ class TugboatPlugin(BaseDataSourcePlugin):
""" Format rack name """ """ Format rack name """
LOG.info("Getting rack data") LOG.info("Getting rack data")
racks = {} racks = {}
hostnames = self.parsed_xl_data['ipmi_data'][1] hostnames = self.parsed_xl_data["ipmi_data"][1]
for host in hostnames: for host in hostnames:
rack = self._get_rack(host) rack = self._get_rack(host)
racks[rack] = rack.replace('r', 'rack') racks[rack] = rack.replace("r", "rack")
return racks return racks

View File

@ -28,7 +28,7 @@ import yaml
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
class ProcessDataSource(): class ProcessDataSource:
def __init__(self, sitetype): def __init__(self, sitetype):
# Initialize intermediary and save site type # Initialize intermediary and save site type
self._initialize_intermediary() self._initialize_intermediary()
@ -36,18 +36,18 @@ class ProcessDataSource():
@staticmethod @staticmethod
def _read_file(file_name): def _read_file(file_name):
with open(file_name, 'r') as f: with open(file_name, "r") as f:
raw_data = f.read() raw_data = f.read()
return raw_data return raw_data
def _initialize_intermediary(self): def _initialize_intermediary(self):
self.host_type = {} self.host_type = {}
self.data = { self.data = {
'network': {}, "network": {},
'baremetal': {}, "baremetal": {},
'region_name': '', "region_name": "",
'storage': {}, "storage": {},
'site_info': {}, "site_info": {},
} }
self.sitetype = None self.sitetype = None
self.genesis_node = None self.genesis_node = None
@ -62,37 +62,43 @@ class ProcessDataSource():
we assign only the first subnet """ we assign only the first subnet """
LOG.info("Extracting network subnets") LOG.info("Extracting network subnets")
network_subnets = {} network_subnets = {}
for net_type in self.data['network']['vlan_network_data']: for net_type in self.data["network"]["vlan_network_data"]:
# One of the type is ingress and we don't want that here # One of the type is ingress and we don't want that here
if (net_type != 'ingress'): if net_type != "ingress":
network_subnets[net_type] = netaddr.IPNetwork( network_subnets[net_type] = netaddr.IPNetwork(
self.data['network']['vlan_network_data'][net_type] self.data["network"]["vlan_network_data"][net_type][
['subnet'][0]) "subnet"
][0]
)
LOG.debug("Network subnets:\n{}".format( LOG.debug(
pprint.pformat(network_subnets))) "Network subnets:\n{}".format(pprint.pformat(network_subnets))
)
return network_subnets return network_subnets
def _get_genesis_node_details(self): def _get_genesis_node_details(self):
# Get genesis host node details from the hosts based on host type # Get genesis host node details from the hosts based on host type
for racks in self.data['baremetal'].keys(): for racks in self.data["baremetal"].keys():
rack_hosts = self.data['baremetal'][racks] rack_hosts = self.data["baremetal"][racks]
for host in rack_hosts: for host in rack_hosts:
if rack_hosts[host]['type'] == 'genesis': if rack_hosts[host]["type"] == "genesis":
self.genesis_node = rack_hosts[host] self.genesis_node = rack_hosts[host]
self.genesis_node['name'] = host self.genesis_node["name"] = host
LOG.debug("Genesis Node Details:\n{}".format( LOG.debug(
pprint.pformat(self.genesis_node))) "Genesis Node Details:\n{}".format(
pprint.pformat(self.genesis_node)
)
)
def _get_genesis_node_ip(self): def _get_genesis_node_ip(self):
""" Returns the genesis node ip """ """ Returns the genesis node ip """
ip = '0.0.0.0' ip = "0.0.0.0"
LOG.info("Getting Genesis Node IP") LOG.info("Getting Genesis Node IP")
if not self.genesis_node: if not self.genesis_node:
self._get_genesis_node_details() self._get_genesis_node_details()
ips = self.genesis_node.get('ip', '') ips = self.genesis_node.get("ip", "")
if ips: if ips:
ip = ips.get('oam', '0.0.0.0') ip = ips.get("oam", "0.0.0.0")
return ip return ip
def _validate_intermediary_data(self, data): def _validate_intermediary_data(self, data):
@ -103,21 +109,21 @@ class ProcessDataSource():
The method validates this with regex pattern defined for each The method validates this with regex pattern defined for each
data type. data type.
""" """
LOG.info('Validating Intermediary data') LOG.info("Validating Intermediary data")
temp_data = {} temp_data = {}
# Peforming a deep copy # Peforming a deep copy
temp_data = copy.deepcopy(data) temp_data = copy.deepcopy(data)
# Converting baremetal dict to list. # Converting baremetal dict to list.
baremetal_list = [] baremetal_list = []
for rack in temp_data['baremetal'].keys(): for rack in temp_data["baremetal"].keys():
temp = [{k: v} for k, v in temp_data['baremetal'][rack].items()] temp = [{k: v} for k, v in temp_data["baremetal"][rack].items()]
baremetal_list = baremetal_list + temp baremetal_list = baremetal_list + temp
temp_data['baremetal'] = baremetal_list temp_data["baremetal"] = baremetal_list
schema_dir = pkg_resources.resource_filename('spyglass', 'schemas/') schema_dir = pkg_resources.resource_filename("spyglass", "schemas/")
schema_file = schema_dir + "data_schema.json" schema_file = schema_dir + "data_schema.json"
json_data = json.loads(json.dumps(temp_data)) json_data = json.loads(json.dumps(temp_data))
with open(schema_file, 'r') as f: with open(schema_file, "r") as f:
json_schema = json.load(f) json_schema = json.load(f)
try: try:
# Suppressing writing of data2.json. Can use it for debugging # Suppressing writing of data2.json. Can use it for debugging
@ -152,14 +158,14 @@ class ProcessDataSource():
based on rule name and applies them to appropriate data objects. based on rule name and applies them to appropriate data objects.
""" """
LOG.info("Apply design rules") LOG.info("Apply design rules")
rules_dir = pkg_resources.resource_filename('spyglass', 'config/') rules_dir = pkg_resources.resource_filename("spyglass", "config/")
rules_file = rules_dir + 'rules.yaml' rules_file = rules_dir + "rules.yaml"
rules_data_raw = self._read_file(rules_file) rules_data_raw = self._read_file(rules_file)
rules_yaml = yaml.safe_load(rules_data_raw) rules_yaml = yaml.safe_load(rules_data_raw)
rules_data = {} rules_data = {}
rules_data.update(rules_yaml) rules_data.update(rules_yaml)
for rule in rules_data.keys(): for rule in rules_data.keys():
rule_name = rules_data[rule]['name'] rule_name = rules_data[rule]["name"]
function_str = "_apply_rule_" + rule_name function_str = "_apply_rule_" + rule_name
rule_data_name = rules_data[rule][rule_name] rule_data_name = rules_data[rule][rule_name]
function = getattr(self, function_str) function = getattr(self, function_str)
@ -182,23 +188,25 @@ class ProcessDataSource():
compute or controller based on host_profile. For defining 'genesis' compute or controller based on host_profile. For defining 'genesis'
the first controller host is defined as genesis.""" the first controller host is defined as genesis."""
is_genesis = False is_genesis = False
hardware_profile = rule_data[self.data['site_info']['sitetype']] hardware_profile = rule_data[self.data["site_info"]["sitetype"]]
# Getting individual racks. The racks are sorted to ensure that the # Getting individual racks. The racks are sorted to ensure that the
# first controller of the first rack is assigned as 'genesis' node. # first controller of the first rack is assigned as 'genesis' node.
for rack in sorted(self.data['baremetal'].keys()): for rack in sorted(self.data["baremetal"].keys()):
# Getting individual hosts in each rack. Sorting of the hosts are # Getting individual hosts in each rack. Sorting of the hosts are
# done to determine the genesis node. # done to determine the genesis node.
for host in sorted(self.data['baremetal'][rack].keys()): for host in sorted(self.data["baremetal"][rack].keys()):
host_info = self.data['baremetal'][rack][host] host_info = self.data["baremetal"][rack][host]
if (host_info['host_profile'] == hardware_profile[ if (
'profile_name']['ctrl']): host_info["host_profile"]
== hardware_profile["profile_name"]["ctrl"]
):
if not is_genesis: if not is_genesis:
host_info['type'] = 'genesis' host_info["type"] = "genesis"
is_genesis = True is_genesis = True
else: else:
host_info['type'] = 'controller' host_info["type"] = "controller"
else: else:
host_info['type'] = 'compute' host_info["type"] = "compute"
def _apply_rule_ip_alloc_offset(self, rule_data): def _apply_rule_ip_alloc_offset(self, rule_data):
""" Apply offset rules to update baremetal host ip's and vlan network """ Apply offset rules to update baremetal host ip's and vlan network
@ -219,21 +227,24 @@ class ProcessDataSource():
If a particular ip exists it is overridden.""" If a particular ip exists it is overridden."""
# Ger defult ip offset # Ger defult ip offset
default_ip_offset = rule_data['default'] default_ip_offset = rule_data["default"]
host_idx = 0 host_idx = 0
LOG.info("Update baremetal host ip's") LOG.info("Update baremetal host ip's")
for racks in self.data['baremetal'].keys(): for racks in self.data["baremetal"].keys():
rack_hosts = self.data['baremetal'][racks] rack_hosts = self.data["baremetal"][racks]
for host in rack_hosts: for host in rack_hosts:
host_networks = rack_hosts[host]['ip'] host_networks = rack_hosts[host]["ip"]
for net in host_networks: for net in host_networks:
ips = list(self.network_subnets[net]) ips = list(self.network_subnets[net])
host_networks[net] = str(ips[host_idx + default_ip_offset]) host_networks[net] = str(ips[host_idx + default_ip_offset])
host_idx = host_idx + 1 host_idx = host_idx + 1
LOG.debug("Updated baremetal host:\n{}".format( LOG.debug(
pprint.pformat(self.data['baremetal']))) "Updated baremetal host:\n{}".format(
pprint.pformat(self.data["baremetal"])
)
)
def _update_vlan_net_data(self, rule_data): def _update_vlan_net_data(self, rule_data):
""" Offset allocation rules to determine ip address range(s) """ Offset allocation rules to determine ip address range(s)
@ -245,31 +256,37 @@ class ProcessDataSource():
LOG.info("Apply network design rules") LOG.info("Apply network design rules")
# Collect Rules # Collect Rules
default_ip_offset = rule_data['default'] default_ip_offset = rule_data["default"]
oob_ip_offset = rule_data['oob'] oob_ip_offset = rule_data["oob"]
gateway_ip_offset = rule_data['gateway'] gateway_ip_offset = rule_data["gateway"]
ingress_vip_offset = rule_data['ingress_vip'] ingress_vip_offset = rule_data["ingress_vip"]
# static_ip_end_offset for non pxe network # static_ip_end_offset for non pxe network
static_ip_end_offset = rule_data['static_ip_end'] static_ip_end_offset = rule_data["static_ip_end"]
# dhcp_ip_end_offset for pxe network # dhcp_ip_end_offset for pxe network
dhcp_ip_end_offset = rule_data['dhcp_ip_end'] dhcp_ip_end_offset = rule_data["dhcp_ip_end"]
# Set ingress vip and CIDR for bgp # Set ingress vip and CIDR for bgp
LOG.info("Apply network design rules:bgp") LOG.info("Apply network design rules:bgp")
subnet = netaddr.IPNetwork( subnet = netaddr.IPNetwork(
self.data['network']['vlan_network_data']['ingress']['subnet'][0]) self.data["network"]["vlan_network_data"]["ingress"]["subnet"][0]
)
ips = list(subnet) ips = list(subnet)
self.data['network']['bgp']['ingress_vip'] = str( self.data["network"]["bgp"]["ingress_vip"] = str(
ips[ingress_vip_offset]) ips[ingress_vip_offset]
self.data['network']['bgp']['public_service_cidr'] = self.data[ )
'network']['vlan_network_data']['ingress']['subnet'][0] self.data["network"]["bgp"]["public_service_cidr"] = self.data[
LOG.debug("Updated network bgp data:\n{}".format( "network"
pprint.pformat(self.data['network']['bgp']))) ]["vlan_network_data"]["ingress"]["subnet"][0]
LOG.debug(
"Updated network bgp data:\n{}".format(
pprint.pformat(self.data["network"]["bgp"])
)
)
LOG.info("Apply network design rules:vlan") LOG.info("Apply network design rules:vlan")
# Apply rules to vlan networks # Apply rules to vlan networks
for net_type in self.network_subnets: for net_type in self.network_subnets:
if net_type == 'oob': if net_type == "oob":
ip_offset = oob_ip_offset ip_offset = oob_ip_offset
else: else:
ip_offset = default_ip_offset ip_offset = default_ip_offset
@ -277,49 +294,60 @@ class ProcessDataSource():
subnet = self.network_subnets[net_type] subnet = self.network_subnets[net_type]
ips = list(subnet) ips = list(subnet)
self.data['network']['vlan_network_data'][net_type][ self.data["network"]["vlan_network_data"][net_type][
'gateway'] = str(ips[gateway_ip_offset]) "gateway"
] = str(ips[gateway_ip_offset])
self.data['network']['vlan_network_data'][net_type][ self.data["network"]["vlan_network_data"][net_type][
'reserved_start'] = str(ips[1]) "reserved_start"
self.data['network']['vlan_network_data'][net_type][ ] = str(ips[1])
'reserved_end'] = str(ips[ip_offset]) self.data["network"]["vlan_network_data"][net_type][
"reserved_end"
] = str(ips[ip_offset])
static_start = str(ips[ip_offset + 1]) static_start = str(ips[ip_offset + 1])
static_end = str(ips[static_ip_end_offset]) static_end = str(ips[static_ip_end_offset])
if net_type == 'pxe': if net_type == "pxe":
mid = len(ips) // 2 mid = len(ips) // 2
static_end = str(ips[mid - 1]) static_end = str(ips[mid - 1])
dhcp_start = str(ips[mid]) dhcp_start = str(ips[mid])
dhcp_end = str(ips[dhcp_ip_end_offset]) dhcp_end = str(ips[dhcp_ip_end_offset])
self.data['network']['vlan_network_data'][net_type][ self.data["network"]["vlan_network_data"][net_type][
'dhcp_start'] = dhcp_start "dhcp_start"
self.data['network']['vlan_network_data'][net_type][ ] = dhcp_start
'dhcp_end'] = dhcp_end self.data["network"]["vlan_network_data"][net_type][
"dhcp_end"
] = dhcp_end
self.data['network']['vlan_network_data'][net_type][ self.data["network"]["vlan_network_data"][net_type][
'static_start'] = static_start "static_start"
self.data['network']['vlan_network_data'][net_type][ ] = static_start
'static_end'] = static_end self.data["network"]["vlan_network_data"][net_type][
"static_end"
] = static_end
# There is no vlan for oob network # There is no vlan for oob network
if (net_type != 'oob'): if net_type != "oob":
self.data['network']['vlan_network_data'][net_type][ self.data["network"]["vlan_network_data"][net_type][
'vlan'] = self.data['network']['vlan_network_data'][ "vlan"
net_type]['vlan'] ] = self.data["network"]["vlan_network_data"][net_type]["vlan"]
# OAM have default routes. Only for cruiser. TBD # OAM have default routes. Only for cruiser. TBD
if (net_type == 'oam'): if net_type == "oam":
routes = ["0.0.0.0/0"] routes = ["0.0.0.0/0"]
else: else:
routes = [] routes = []
self.data['network']['vlan_network_data'][net_type][ self.data["network"]["vlan_network_data"][net_type][
'routes'] = routes "routes"
] = routes
LOG.debug("Updated vlan network data:\n{}".format( LOG.debug(
pprint.pformat(self.data['network']['vlan_network_data']))) "Updated vlan network data:\n{}".format(
pprint.pformat(self.data["network"]["vlan_network_data"])
)
)
def load_extracted_data_from_data_source(self, extracted_data): def load_extracted_data_from_data_source(self, extracted_data):
""" """
@ -334,8 +362,11 @@ class ProcessDataSource():
LOG.info("Loading plugin data source") LOG.info("Loading plugin data source")
self.data = extracted_data self.data = extracted_data
LOG.debug("Extracted data from plugin:\n{}".format( LOG.debug(
pprint.pformat(extracted_data))) "Extracted data from plugin:\n{}".format(
pprint.pformat(extracted_data)
)
)
# Uncommeent following segment for debugging purpose. # Uncommeent following segment for debugging purpose.
# extracted_file = "extracted_file.yaml" # extracted_file = "extracted_file.yaml"
# yaml_file = yaml.dump(extracted_data, default_flow_style=False) # yaml_file = yaml.dump(extracted_data, default_flow_style=False)
@ -344,13 +375,14 @@ class ProcessDataSource():
# f.close() # f.close()
# Append region_data supplied from CLI to self.data # Append region_data supplied from CLI to self.data
self.data['region_name'] = self.region_name self.data["region_name"] = self.region_name
def dump_intermediary_file(self, intermediary_dir): def dump_intermediary_file(self, intermediary_dir):
""" Writing intermediary yaml """ """ Writing intermediary yaml """
LOG.info("Writing intermediary yaml") LOG.info("Writing intermediary yaml")
intermediary_file = "{}_intermediary.yaml".format( intermediary_file = "{}_intermediary.yaml".format(
self.data['region_name']) self.data["region_name"]
)
# Check of if output dir = intermediary_dir exists # Check of if output dir = intermediary_dir exists
if intermediary_dir is not None: if intermediary_dir is not None:
outfile = "{}/{}".format(intermediary_dir, intermediary_file) outfile = "{}/{}".format(intermediary_dir, intermediary_file)
@ -358,7 +390,7 @@ class ProcessDataSource():
outfile = intermediary_file outfile = intermediary_file
LOG.info("Intermediary file:{}".format(outfile)) LOG.info("Intermediary file:{}".format(outfile))
yaml_file = yaml.dump(self.data, default_flow_style=False) yaml_file = yaml.dump(self.data, default_flow_style=False)
with open(outfile, 'w') as f: with open(outfile, "w") as f:
f.write(yaml_file) f.write(yaml_file)
f.close() f.close()
@ -379,10 +411,11 @@ class ProcessDataSource():
def edit_intermediary_yaml(self): def edit_intermediary_yaml(self):
""" Edit generated data using on browser """ """ Edit generated data using on browser """
LOG.info( LOG.info(
"edit_intermediary_yaml: Invoking web server for yaml editing") "edit_intermediary_yaml: Invoking web server for yaml editing"
with tempfile.NamedTemporaryFile(mode='r+') as file_obj: )
with tempfile.NamedTemporaryFile(mode="r+") as file_obj:
yaml.safe_dump(self.data, file_obj, default_flow_style=False) yaml.safe_dump(self.data, file_obj, default_flow_style=False)
host = self._get_genesis_node_ip() host = self._get_genesis_node_ip()
os.system('yaml-editor -f {0} -h {1}'.format(file_obj.name, host)) os.system("yaml-editor -f {0} -h {1}".format(file_obj.name, host))
file_obj.seek(0) file_obj.seek(0)
self.data = yaml.safe_load(file_obj) self.data = yaml.safe_load(file_obj)

View File

@ -22,23 +22,20 @@ class BaseProcessor:
@staticmethod @staticmethod
def get_role_wise_nodes(yaml_data): def get_role_wise_nodes(yaml_data):
hosts = { hosts = {"genesis": {}, "masters": [], "workers": []}
'genesis': {},
'masters': [],
'workers': [],
}
for rack in yaml_data['baremetal']: for rack in yaml_data["baremetal"]:
for host in yaml_data['baremetal'][rack]: for host in yaml_data["baremetal"][rack]:
if yaml_data['baremetal'][rack][host]['type'] == 'genesis': if yaml_data["baremetal"][rack][host]["type"] == "genesis":
hosts['genesis'] = { hosts["genesis"] = {
'name': host, "name": host,
'pxe': yaml_data['baremetal'][rack][host]['ip']['pxe'], "pxe": yaml_data["baremetal"][rack][host]["ip"]["pxe"],
'oam': yaml_data['baremetal'][rack][host]['ip']['oam'], "oam": yaml_data["baremetal"][rack][host]["ip"]["oam"],
} }
elif yaml_data['baremetal'][rack][host][ elif (
'type'] == 'controller': yaml_data["baremetal"][rack][host]["type"] == "controller"
hosts['masters'].append(host) ):
hosts["masters"].append(host)
else: else:
hosts['workers'].append(host) hosts["workers"].append(host)
return hosts return hosts

View File

@ -36,12 +36,12 @@ class SiteProcessor(BaseProcessor):
""" """
# Check of manifest_dir exists # Check of manifest_dir exists
if self.manifest_dir is not None: if self.manifest_dir is not None:
site_manifest_dir = self.manifest_dir + '/pegleg_manifests/site/' site_manifest_dir = self.manifest_dir + "/pegleg_manifests/site/"
else: else:
site_manifest_dir = 'pegleg_manifests/site/' site_manifest_dir = "pegleg_manifests/site/"
LOG.info("Site manifest output dir:{}".format(site_manifest_dir)) LOG.info("Site manifest output dir:{}".format(site_manifest_dir))
template_software_dir = template_dir + '/' template_software_dir = template_dir + "/"
template_dir_abspath = os.path.dirname(template_software_dir) template_dir_abspath = os.path.dirname(template_software_dir)
LOG.debug("Template Path:%s", template_dir_abspath) LOG.debug("Template Path:%s", template_dir_abspath)
@ -50,16 +50,19 @@ class SiteProcessor(BaseProcessor):
j2_env = Environment( j2_env = Environment(
autoescape=False, autoescape=False,
loader=FileSystemLoader(dirpath), loader=FileSystemLoader(dirpath),
trim_blocks=True) trim_blocks=True,
)
j2_env.filters[ j2_env.filters[
'get_role_wise_nodes'] = self.get_role_wise_nodes "get_role_wise_nodes"
] = self.get_role_wise_nodes
templatefile = os.path.join(dirpath, filename) templatefile = os.path.join(dirpath, filename)
outdirs = dirpath.split('templates')[1] outdirs = dirpath.split("templates")[1]
outfile_path = '{}{}{}'.format( outfile_path = "{}{}{}".format(
site_manifest_dir, self.yaml_data['region_name'], outdirs) site_manifest_dir, self.yaml_data["region_name"], outdirs
outfile_yaml = templatefile.split('.j2')[0].split('/')[-1] )
outfile = outfile_path + '/' + outfile_yaml outfile_yaml = templatefile.split(".j2")[0].split("/")[-1]
outfile = outfile_path + "/" + outfile_yaml
outfile_dir = os.path.dirname(outfile) outfile_dir = os.path.dirname(outfile)
if not os.path.exists(outfile_dir): if not os.path.exists(outfile_dir):
os.makedirs(outfile_dir) os.makedirs(outfile_dir)
@ -71,7 +74,10 @@ class SiteProcessor(BaseProcessor):
out.close() out.close()
except IOError as ioe: except IOError as ioe:
LOG.error( LOG.error(
"IOError during rendering:{}".format(outfile_yaml)) "IOError during rendering:{}".format(outfile_yaml)
)
raise SystemExit( raise SystemExit(
"Error when generating {:s}:\n{:s}".format( "Error when generating {:s}:\n{:s}".format(
outfile, ioe.strerror)) outfile, ioe.strerror
)
)

View File

@ -22,98 +22,115 @@ import yaml
from spyglass.parser.engine import ProcessDataSource from spyglass.parser.engine import ProcessDataSource
from spyglass.site_processors.site_processor import SiteProcessor from spyglass.site_processors.site_processor import SiteProcessor
LOG = logging.getLogger('spyglass') LOG = logging.getLogger("spyglass")
@click.command() @click.command()
@click.option( @click.option(
'--site', "--site", "-s", help="Specify the site for which manifests to be generated"
'-s', )
help='Specify the site for which manifests to be generated')
@click.option( @click.option(
'--type', '-t', help='Specify the plugin type formation or tugboat') "--type", "-t", help="Specify the plugin type formation or tugboat"
@click.option('--formation_url', '-f', help='Specify the formation url') )
@click.option('--formation_user', '-u', help='Specify the formation user id') @click.option("--formation_url", "-f", help="Specify the formation url")
@click.option("--formation_user", "-u", help="Specify the formation user id")
@click.option( @click.option(
'--formation_password', '-p', help='Specify the formation user password') "--formation_password", "-p", help="Specify the formation user password"
)
@click.option( @click.option(
'--intermediary', "--intermediary",
'-i', "-i",
type=click.Path(exists=True), type=click.Path(exists=True),
help= help=(
'Intermediary file path generate manifests, use -m also with this option') "Intermediary file path generate manifests, "
"use -m also with this option"
),
)
@click.option( @click.option(
'--additional_config', "--additional_config",
'-d', "-d",
type=click.Path(exists=True), type=click.Path(exists=True),
help='Site specific configuraton details') help="Site specific configuraton details",
)
@click.option( @click.option(
'--generate_intermediary', "--generate_intermediary",
'-g', "-g",
is_flag=True, is_flag=True,
help='Dump intermediary file from passed excel and excel spec') help="Dump intermediary file from passed excel and excel spec",
)
@click.option( @click.option(
'--intermediary_dir', "--intermediary_dir",
'-idir', "-idir",
type=click.Path(exists=True), type=click.Path(exists=True),
help='The path where intermediary file needs to be generated') help="The path where intermediary file needs to be generated",
)
@click.option( @click.option(
'--edit_intermediary/--no_edit_intermediary', "--edit_intermediary/--no_edit_intermediary",
'-e/-nedit', "-e/-nedit",
default=True, default=True,
help='Flag to let user edit intermediary') help="Flag to let user edit intermediary",
)
@click.option( @click.option(
'--generate_manifests', "--generate_manifests",
'-m', "-m",
is_flag=True, is_flag=True,
help='Generate manifests from the generated intermediary file') help="Generate manifests from the generated intermediary file",
)
@click.option( @click.option(
'--manifest_dir', "--manifest_dir",
'-mdir', "-mdir",
type=click.Path(exists=True), type=click.Path(exists=True),
help='The path where manifest files needs to be generated') help="The path where manifest files needs to be generated",
)
@click.option( @click.option(
'--template_dir', "--template_dir",
'-tdir', "-tdir",
type=click.Path(exists=True), type=click.Path(exists=True),
help='The path where J2 templates are available') help="The path where J2 templates are available",
)
@click.option( @click.option(
'--excel', "--excel",
'-x', "-x",
multiple=True, multiple=True,
type=click.Path(exists=True), type=click.Path(exists=True),
help= help=(
'Path to engineering excel file, to be passed with generate_intermediary') "Path to engineering excel file, to be passed with "
"generate_intermediary"
),
)
@click.option( @click.option(
'--excel_spec', "--excel_spec",
'-e', "-e",
type=click.Path(exists=True), type=click.Path(exists=True),
help='Path to excel spec, to be passed with generate_intermediary') help="Path to excel spec, to be passed with generate_intermediary",
)
@click.option( @click.option(
'--loglevel', "--loglevel",
'-l', "-l",
default=20, default=20,
multiple=False, multiple=False,
show_default=True, show_default=True,
help='Loglevel NOTSET:0 ,DEBUG:10, \ help="Loglevel NOTSET:0 ,DEBUG:10, \
INFO:20, WARNING:30, ERROR:40, CRITICAL:50') INFO:20, WARNING:30, ERROR:40, CRITICAL:50",
)
def main(*args, **kwargs): def main(*args, **kwargs):
# Extract user provided inputs # Extract user provided inputs
generate_intermediary = kwargs['generate_intermediary'] generate_intermediary = kwargs["generate_intermediary"]
intermediary_dir = kwargs['intermediary_dir'] intermediary_dir = kwargs["intermediary_dir"]
edit_intermediary = kwargs['edit_intermediary'] edit_intermediary = kwargs["edit_intermediary"]
generate_manifests = kwargs['generate_manifests'] generate_manifests = kwargs["generate_manifests"]
manifest_dir = kwargs['manifest_dir'] manifest_dir = kwargs["manifest_dir"]
intermediary = kwargs['intermediary'] intermediary = kwargs["intermediary"]
site = kwargs['site'] site = kwargs["site"]
template_dir = kwargs['template_dir'] template_dir = kwargs["template_dir"]
loglevel = kwargs['loglevel'] loglevel = kwargs["loglevel"]
# Set Logging format # Set Logging format
LOG.setLevel(loglevel) LOG.setLevel(loglevel)
stream_handle = logging.StreamHandler() stream_handle = logging.StreamHandler()
formatter = logging.Formatter( formatter = logging.Formatter(
'(%(name)s): %(asctime)s %(levelname)s %(message)s') "(%(name)s): %(asctime)s %(levelname)s %(message)s"
)
stream_handle.setFormatter(formatter) stream_handle.setFormatter(formatter)
LOG.addHandler(stream_handle) LOG.addHandler(stream_handle)
@ -139,19 +156,21 @@ def main(*args, **kwargs):
intermediary_yaml = {} intermediary_yaml = {}
if intermediary is None: if intermediary is None:
LOG.info("Generating Intermediary yaml") LOG.info("Generating Intermediary yaml")
plugin_type = kwargs.get('type', None) plugin_type = kwargs.get("type", None)
plugin_class = None plugin_class = None
# Discover the plugin and load the plugin class # Discover the plugin and load the plugin class
LOG.info("Load the plugin class") LOG.info("Load the plugin class")
for entry_point in pkg_resources.iter_entry_points( for entry_point in pkg_resources.iter_entry_points(
'data_extractor_plugins'): "data_extractor_plugins"
):
if entry_point.name == plugin_type: if entry_point.name == plugin_type:
plugin_class = entry_point.load() plugin_class = entry_point.load()
if plugin_class is None: if plugin_class is None:
LOG.error( LOG.error(
"Unsupported Plugin type. Plugin type:{}".format(plugin_type)) "Unsupported Plugin type. Plugin type:{}".format(plugin_type)
)
exit() exit()
# Extract data from plugin data source # Extract data from plugin data source
@ -162,16 +181,22 @@ def main(*args, **kwargs):
data_extractor.extract_data() data_extractor.extract_data()
# Apply any additional_config provided by user # Apply any additional_config provided by user
additional_config = kwargs.get('additional_config', None) additional_config = kwargs.get("additional_config", None)
if additional_config is not None: if additional_config is not None:
with open(additional_config, 'r') as config: with open(additional_config, "r") as config:
raw_data = config.read() raw_data = config.read()
additional_config_data = yaml.safe_load(raw_data) additional_config_data = yaml.safe_load(raw_data)
LOG.debug("Additional config data:\n{}".format( LOG.debug(
pprint.pformat(additional_config_data))) "Additional config data:\n{}".format(
pprint.pformat(additional_config_data)
)
)
LOG.info("Apply additional configuration from:{}".format( LOG.info(
additional_config)) "Apply additional configuration from:{}".format(
additional_config
)
)
data_extractor.apply_additional_data(additional_config_data) data_extractor.apply_additional_data(additional_config_data)
LOG.debug(pprint.pformat(data_extractor.site_data)) LOG.debug(pprint.pformat(data_extractor.site_data))
@ -179,14 +204,16 @@ def main(*args, **kwargs):
LOG.info("Apply design rules to the extracted data") LOG.info("Apply design rules to the extracted data")
process_input_ob = ProcessDataSource(site) process_input_ob = ProcessDataSource(site)
process_input_ob.load_extracted_data_from_data_source( process_input_ob.load_extracted_data_from_data_source(
data_extractor.site_data) data_extractor.site_data
)
LOG.info("Generate intermediary yaml") LOG.info("Generate intermediary yaml")
intermediary_yaml = process_input_ob.generate_intermediary_yaml( intermediary_yaml = process_input_ob.generate_intermediary_yaml(
edit_intermediary) edit_intermediary
)
else: else:
LOG.info("Loading intermediary from user provided input") LOG.info("Loading intermediary from user provided input")
with open(intermediary, 'r') as intermediary_file: with open(intermediary, "r") as intermediary_file:
raw_data = intermediary_file.read() raw_data = intermediary_file.read()
intermediary_yaml = yaml.safe_load(raw_data) intermediary_yaml = yaml.safe_load(raw_data)
@ -201,5 +228,5 @@ def main(*args, **kwargs):
LOG.info("Spyglass Execution Completed") LOG.info("Spyglass Execution Completed")
if __name__ == '__main__': if __name__ == "__main__":
main() main()

View File

@ -26,49 +26,55 @@ from flask_bootstrap import Bootstrap
app_path = os.path.dirname(os.path.abspath(__file__)) app_path = os.path.dirname(os.path.abspath(__file__))
app = Flask('Yaml Editor!', app = Flask(
template_folder=os.path.join(app_path, 'templates'), "Yaml Editor!",
static_folder=os.path.join(app_path, 'static')) template_folder=os.path.join(app_path, "templates"),
static_folder=os.path.join(app_path, "static"),
)
Bootstrap(app) Bootstrap(app)
logging.getLogger('werkzeug').setLevel(logging.ERROR) logging.getLogger("werkzeug").setLevel(logging.ERROR)
LOG = app.logger LOG = app.logger
@app.route('/favicon.ico') @app.route("/favicon.ico")
def favicon(): def favicon():
return send_from_directory(app.static_folder, 'favicon.ico') return send_from_directory(app.static_folder, "favicon.ico")
@app.route('/', methods=['GET', 'POST']) @app.route("/", methods=["GET", "POST"])
def index(): def index():
"""Renders index page to edit provided yaml file.""" """Renders index page to edit provided yaml file."""
LOG.info('Rendering yaml file for editing') LOG.info("Rendering yaml file for editing")
with open(app.config['YAML_FILE']) as file_obj: with open(app.config["YAML_FILE"]) as file_obj:
data = yaml.safe_load(file_obj) data = yaml.safe_load(file_obj)
return render_template('yaml.html', return render_template(
data=json.dumps(data), "yaml.html",
change_str=app.config['STRING_TO_CHANGE']) data=json.dumps(data),
change_str=app.config["STRING_TO_CHANGE"],
)
@app.route('/save', methods=['POST']) @app.route("/save", methods=["POST"])
def save(): def save():
"""Save current progress on file.""" """Save current progress on file."""
LOG.info('Saving edited inputs from user to yaml file') LOG.info("Saving edited inputs from user to yaml file")
out = request.json.get('yaml_data') out = request.json.get("yaml_data")
with open(app.config['YAML_FILE'], 'w') as file_obj: with open(app.config["YAML_FILE"], "w") as file_obj:
yaml.safe_dump(out, file_obj, default_flow_style=False) yaml.safe_dump(out, file_obj, default_flow_style=False)
return "Data saved successfully!" return "Data saved successfully!"
@app.route('/saveExit', methods=['POST']) @app.route("/saveExit", methods=["POST"])
def save_exit(): def save_exit():
"""Save current progress on file and shuts down the server.""" """Save current progress on file and shuts down the server."""
LOG.info('Saving edited inputs from user to yaml file and shutting' LOG.info(
' down server') "Saving edited inputs from user to yaml file and shutting"
out = request.json.get('yaml_data') " down server"
with open(app.config['YAML_FILE'], 'w') as file_obj: )
out = request.json.get("yaml_data")
with open(app.config["YAML_FILE"], "w") as file_obj:
yaml.safe_dump(out, file_obj, default_flow_style=False) yaml.safe_dump(out, file_obj, default_flow_style=False)
func = request.environ.get('werkzeug.server.shutdown') func = request.environ.get("werkzeug.server.shutdown")
if func: if func:
func() func()
return "Saved successfully, Shutting down app! You may close the tab!" return "Saved successfully, Shutting down app! You may close the tab!"
@ -77,68 +83,72 @@ def save_exit():
@app.errorhandler(404) @app.errorhandler(404)
def page_not_found(e): def page_not_found(e):
"""Serves 404 error.""" """Serves 404 error."""
LOG.info('User tried to access unavailable page.') LOG.info("User tried to access unavailable page.")
return '<h1>404: Page not Found!</h1>' return "<h1>404: Page not Found!</h1>"
def run(*args, **kwargs): def run(*args, **kwargs):
"""Starts the server.""" """Starts the server."""
LOG.info('Initiating web server for yaml editing') LOG.info("Initiating web server for yaml editing")
port = kwargs.get('port', None) port = kwargs.get("port", None)
if not port: if not port:
port = 8161 port = 8161
app.run(host='0.0.0.0', port=port, debug=False) app.run(host="0.0.0.0", port=port, debug=False)
@click.command() @click.command()
@click.option( @click.option(
'--file', "--file",
'-f', "-f",
required=True, required=True,
type=click.File(), type=click.File(),
multiple=False, multiple=False,
help="Path with file name to the intermediary yaml file." help="Path with file name to the intermediary yaml file.",
) )
@click.option( @click.option(
'--host', "--host",
'-h', "-h",
default='0.0.0.0', default="0.0.0.0",
type=click.STRING, type=click.STRING,
multiple=False, multiple=False,
help="Optional host parameter to run Flask on." help="Optional host parameter to run Flask on.",
) )
@click.option( @click.option(
'--port', "--port",
'-p', "-p",
default=8161, default=8161,
type=click.INT, type=click.INT,
multiple=False, multiple=False,
help="Optional port parameter to run Flask on." help="Optional port parameter to run Flask on.",
) )
@click.option( @click.option(
'--string', "--string",
'-s', "-s",
default='#CHANGE_ME', default="#CHANGE_ME",
type=click.STRING, type=click.STRING,
multiple=False, multiple=False,
help="Text which is required to be changed on yaml file." help="Text which is required to be changed on yaml file.",
) )
def main(*args, **kwargs): def main(*args, **kwargs):
LOG.setLevel(logging.INFO) LOG.setLevel(logging.INFO)
LOG.info('Initiating yaml-editor') LOG.info("Initiating yaml-editor")
try: try:
yaml.safe_load(kwargs['file']) yaml.safe_load(kwargs["file"])
except yaml.YAMLError as e: except yaml.YAMLError as e:
LOG.error('EXITTING - Please provide a valid yaml file.') LOG.error("EXITTING - Please provide a valid yaml file.")
if hasattr(e, 'problem_mark'): if hasattr(e, "problem_mark"):
mark = e.problem_mark mark = e.problem_mark
LOG.error("Error position: ({0}:{1})".format( LOG.error(
mark.line + 1, mark.column + 1)) "Error position: ({0}:{1})".format(
mark.line + 1, mark.column + 1
)
)
sys.exit(2) sys.exit(2)
except Exception: except Exception:
LOG.error('EXITTING - Please provide a valid yaml file.') LOG.error("EXITTING - Please provide a valid yaml file.")
sys.exit(2) sys.exit(2)
LOG.info(""" LOG.info(
"""
############################################################################## ##############################################################################
@ -146,12 +156,15 @@ Please go to http://{0}:{1}/ to edit your yaml file.
############################################################################## ##############################################################################
""".format(kwargs['host'], kwargs['port'])) """.format(
app.config['YAML_FILE'] = kwargs['file'].name kwargs["host"], kwargs["port"]
app.config['STRING_TO_CHANGE'] = kwargs['string'] )
)
app.config["YAML_FILE"] = kwargs["file"].name
app.config["STRING_TO_CHANGE"] = kwargs["string"]
run(*args, **kwargs) run(*args, **kwargs)
if __name__ == '__main__': if __name__ == "__main__":
"""Invoked when used as a script.""" """Invoked when used as a script."""
main() main()