Merge "PEP-8 code styling and linting"

This commit is contained in:
Zuul 2019-04-24 13:04:51 +00:00 committed by Gerrit Code Review
commit 7da3e51798
19 changed files with 402 additions and 471 deletions

16
doc/source/conf.py Normal file → Executable file
View File

@ -18,9 +18,10 @@
#
import os
import sys
sys.path.insert(0, os.path.abspath('../../'))
import sphinx_rtd_theme
sys.path.insert(0, os.path.abspath('../../'))
# -- General configuration ------------------------------------------------
@ -50,18 +51,18 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
project = u'tugboat'
copyright = u'2018 AT&T Intellectual Property.'
author = u'Tugboat Authors'
project = 'tugboat'
copyright = '2018 AT&T Intellectual Property.'
author = 'Tugboat Authors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1.0'
version = '0.1.0'
# The full version, including alpha/beta/rc tags.
release = u'0.1.0'
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@ -81,7 +82,6 @@ pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
@ -101,13 +101,11 @@ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'ucpintdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {

View File

@ -0,0 +1,67 @@
..
Copyright 2018 AT&T Intellectual Property.
All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
==========================
Developer Quickstart Guide
==========================
To run your first spyglass job, follow these steps from inside the
airship-spyglass directory.
1. Install external dependencies if not already installed.
.. code-block:: console
sudo apt install -y python3-pip
sudo apt install -y tox
2. Set up an environment with tox.
.. code-block:: console
tox -e py36 --notest
3. Enter the tox environment.
.. code-block:: console
source .tox/py36/bin/activate
4. Install spyglass in the tox environment.
.. code-block:: console
pip install -e .
5. Run spyglass on the example files to generate an intermediate document.
.. code-block:: console
mkdir intermediate
spyglass -g -s airship-seaworthy -t tugboat \
-idir intermediate \
--excel_spec spyglass/examples/excel_spec.yaml \
--excel spyglass/examples/SiteDesignSpec_v0.1.xlsx \
--additional_config spyglass/examples/site_config.yaml \
--template_dir spyglass/examples/templates/
6. Run spyglass on the intermediate document to generate manifests.
.. code-block:: console
mkdir manifest_dir
spyglass -m -i intermediate/airship-seaworthy_intermediary.yaml \
-mdir manifest_dir/ -tdir spyglass/examples/templates/

View File

@ -32,4 +32,5 @@ fed to Shipyard for site deployment / updates.
:maxdepth: 2
getting_started
developer_quickstart
tugboat

View File

@ -4,7 +4,7 @@ FROM ${FROM}
VOLUME /var/spyglass
WORKDIR /var/spyglass
ARG ctx_base=.
ARG ctx_base=./
COPY ${ctx_base}/requirements.txt /opt/spyglass/requirements.txt
RUN pip3 install --no-cache-dir -r /opt/spyglass/requirements.txt

View File

@ -3,5 +3,4 @@ jsonschema
netaddr
openpyxl==2.5.4
pyyaml==3.12
requests
six
requests

10
setup.py Normal file → Executable file
View File

@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
from setuptools import find_packages
from setuptools import setup
setup(
name='spyglass',
@ -35,9 +35,11 @@ setup(
'console_scripts': [
'spyglass=spyglass.spyglass:main',
],
'data_extractor_plugins':
['formation=spyglass.data_extractor.plugins.formation:FormationPlugin',
'tugboat=spyglass.data_extractor.plugins.tugboat.tugboat:TugboatPlugin',
'data_extractor_plugins': [
'formation='
'spyglass.data_extractor.plugins.formation:FormationPlugin',
'tugboat='
'spyglass.data_extractor.plugins.tugboat.tugboat:TugboatPlugin',
]
},
include_package_data=True,

69
spyglass/data_extractor/base.py Normal file → Executable file
View File

@ -15,15 +15,13 @@
import abc
import logging
import pprint
import six
from spyglass.utils import utils
LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class BaseDataSourcePlugin(object):
class BaseDataSourcePlugin(metaclass=abc.ABCMeta):
"""Provide basic hooks for data source plugins"""
def __init__(self, region):
@ -52,10 +50,10 @@ class BaseDataSourcePlugin(object):
If validation fails, Spyglass exits.
:param char pointer: Spyglass CLI parameters.
:param char kwargs: Spyglass CLI parameters.
:returns plugin conf if successfully validated.
Each plugin implements their own validaton mechanism.
Each plugin implements their own validation mechanism.
"""
return {}
@ -74,7 +72,7 @@ class BaseDataSourcePlugin(object):
return []
@abc.abstractmethod
def get_hosts(self, region, rack):
def get_hosts(self, region, rack=None):
"""Return list of hosts in the region
:param string region: Region name
@ -291,29 +289,23 @@ class BaseDataSourcePlugin(object):
# Fill network IP for this host
temp_host["ip"] = {}
temp_host["ip"]["oob"] = temp_host_ips[host_name].get(
"oob", "#CHANGE_ME"
)
temp_host["ip"]["calico"] = temp_host_ips[host_name].get(
"calico", "#CHANGE_ME"
)
temp_host["ip"]["oam"] = temp_host_ips[host_name].get(
"oam", "#CHANGE_ME"
)
temp_host["ip"]["storage"] = temp_host_ips[host_name].get(
"storage", "#CHANGE_ME"
)
temp_host["ip"]["overlay"] = temp_host_ips[host_name].get(
"overlay", "#CHANGE_ME"
)
temp_host["ip"]["pxe"] = temp_host_ips[host_name].get(
"pxe", "#CHANGE_ME"
)
temp_host["ip"]["oob"] = \
temp_host_ips[host_name].get("oob", "#CHANGE_ME")
temp_host["ip"]["calico"] = \
temp_host_ips[host_name].get("calico", "#CHANGE_ME")
temp_host["ip"]["oam"] = \
temp_host_ips[host_name].get("oam", "#CHANGE_ME")
temp_host["ip"]["storage"] = \
temp_host_ips[host_name].get("storage", "#CHANGE_ME")
temp_host["ip"]["overlay"] = \
temp_host_ips[host_name].get("overlay", "#CHANGE_ME")
temp_host["ip"]["pxe"] = \
temp_host_ips[host_name].get("pxe", "#CHANGE_ME")
baremetal[rack_name][host_name] = temp_host
LOG.debug(
"Baremetal information:\n{}".format(pprint.pformat(baremetal))
)
LOG.debug("Baremetal information:\n{}".format(
pprint.pformat(baremetal)))
return baremetal
@ -357,9 +349,8 @@ class BaseDataSourcePlugin(object):
domain_data = self.get_domain_name(self.region)
site_info["domain"] = domain_data
LOG.debug(
"Extracted site information:\n{}".format(pprint.pformat(site_info))
)
LOG.debug("Extracted site information:\n{}".format(
pprint.pformat(site_info)))
return site_info
@ -405,14 +396,13 @@ class BaseDataSourcePlugin(object):
tmp_net = {}
if net["name"] in networks_to_scan:
tmp_net["subnet"] = net.get("subnet", "#CHANGE_ME")
if (net["name"] != "ingress") and (net["name"] != "oob"):
if net["name"] != "ingress" and net["name"] != "oob":
tmp_net["vlan"] = net.get("vlan", "#CHANGE_ME")
network_data["vlan_network_data"][net["name"]] = tmp_net
LOG.debug(
"Extracted network data:\n{}".format(pprint.pformat(network_data))
)
LOG.debug("Extracted network data:\n{}".format(
pprint.pformat(network_data)))
return network_data
def extract_data(self):
@ -423,17 +413,18 @@ class BaseDataSourcePlugin(object):
"""
LOG.info("Extract data from plugin")
site_data = {}
site_data["baremetal"] = self.extract_baremetal_information()
site_data["site_info"] = self.extract_site_information()
site_data["network"] = self.extract_network_information()
site_data = {
"baremetal": self.extract_baremetal_information(),
"site_info": self.extract_site_information(),
"network": self.extract_network_information()
}
self.site_data = site_data
return site_data
def apply_additional_data(self, extra_data):
"""Apply any additional inputs from user
In case plugin doesnot provide some data, user can specify
In case plugin does not provide some data, user can specify
the same as part of additional data in form of dict. The user
provided dict will be merged recursively to site_data.
If there is repetition of data then additional data supplied

View File

@ -32,11 +32,9 @@ class NoSpecMatched(BaseError):
self.specs = excel_specs
def display_error(self):
print(
"No spec matched. Following are the available specs:\n".format(
self.specs
)
)
# FIXME (Ian Pittwood): use log instead of print
print("No spec matched. Following are the available specs:\n".format(
self.specs))
sys.exit(1)
@ -56,5 +54,5 @@ class TokenGenerationError(BaseError):
pass
class ConnectionError(BaseError):
class FormationConnectionError(BaseError):
pass

107
spyglass/data_extractor/plugins/formation.py Normal file → Executable file
View File

@ -12,15 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import formation_client
import logging
import pprint
import re
import formation_client
import requests
import urllib3
from spyglass.data_extractor.base import BaseDataSourcePlugin
import spyglass.data_extractor.custom_exceptions as exceptions
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
@ -136,17 +136,16 @@ class FormationPlugin(BaseDataSourcePlugin):
auth=(self.user, self.password),
verify=self.client_config.verify_ssl,
)
except requests.exceptions.exceptions.ConnectionError:
raise exceptions.ConnectionError("Incorrect URL: {}".format(url))
except requests.exceptions.ConnectionError:
raise exceptions.FormationConnectionError(
"Incorrect URL: {}".format(url))
if token_response.status_code == 200:
self.token = token_response.json().get("X-Subject-Token", None)
else:
raise exceptions.TokenGenerationError(
"Unable to generate token because {}".format(
token_response.reason
)
)
token_response.reason))
return self.token
@ -160,9 +159,8 @@ class FormationPlugin(BaseDataSourcePlugin):
token = self._generate_token()
self.client_config.api_key = {"X-Auth-Token": self.user + "|" + token}
self.formation_api_client = formation_client.ApiClient(
self.client_config
)
self.formation_api_client = \
formation_client.ApiClient(self.client_config)
def _update_site_and_zone(self, region):
"""Get Zone name and Site name from region"""
@ -309,40 +307,35 @@ class FormationPlugin(BaseDataSourcePlugin):
zone_id = self._get_zone_id_by_name(zone)
device_api = formation_client.DevicesApi(self.formation_api_client)
control_hosts = device_api.zones_zone_id_control_nodes_get(zone_id)
compute_hosts = device_api.zones_zone_id_devices_get(
zone_id, type="KVM"
)
compute_hosts = device_api.zones_zone_id_devices_get(zone_id,
type="KVM")
hosts_list = []
for host in control_hosts:
self.device_name_id_mapping[host.aic_standard_name] = host.id
hosts_list.append(
{
"name": host.aic_standard_name,
"type": "controller",
"rack_name": host.rack_name,
"host_profile": host.host_profile_name,
}
)
hosts_list.append({
"name": host.aic_standard_name,
"type": "controller",
"rack_name": host.rack_name,
"host_profile": host.host_profile_name,
})
for host in compute_hosts:
self.device_name_id_mapping[host.aic_standard_name] = host.id
hosts_list.append(
{
"name": host.aic_standard_name,
"type": "compute",
"rack_name": host.rack_name,
"host_profile": host.host_profile_name,
}
)
hosts_list.append({
"name": host.aic_standard_name,
"type": "compute",
"rack_name": host.rack_name,
"host_profile": host.host_profile_name,
})
"""
for host in itertools.chain(control_hosts, compute_hosts):
self.device_name_id_mapping[host.aic_standard_name] = host.id
hosts_list.append({
'name': host.aic_standard_name,
'type': host.categories[0],
'rack_name': host.rack_name,
'host_profile': host.host_profile_name
'name': host.aic_standard_name,
'type': host.categories[0],
'rack_name': host.rack_name,
'host_profile': host.host_profile_name
})
"""
@ -354,8 +347,7 @@ class FormationPlugin(BaseDataSourcePlugin):
region_id = self._get_region_id_by_name(region)
vlan_api = formation_client.VlansApi(self.formation_api_client)
vlans = vlan_api.zones_zone_id_regions_region_id_vlans_get(
zone_id, region_id
)
zone_id, region_id)
# Case when vlans list is empty from
# zones_zone_id_regions_region_id_vlans_get
@ -364,22 +356,22 @@ class FormationPlugin(BaseDataSourcePlugin):
hosts = self.get_hosts(self.region)
host = hosts[0]["name"]
device_id = self._get_device_id_by_name(host)
vlans = vlan_api.zones_zone_id_devices_device_id_vlans_get(
zone_id, device_id
)
vlans = \
vlan_api.zones_zone_id_devices_device_id_vlans_get(zone_id,
device_id)
LOG.debug("Extracted region network information\n{}".format(vlans))
vlans_list = []
for vlan_ in vlans:
if len(vlan_.vlan.ipv4) != 0:
tmp_vlan = {}
tmp_vlan["name"] = self._get_network_name_from_vlan_name(
vlan_.vlan.name
)
tmp_vlan["vlan"] = vlan_.vlan.vlan_id
tmp_vlan["subnet"] = vlan_.vlan.subnet_range
tmp_vlan["gateway"] = vlan_.ipv4_gateway
tmp_vlan["subnet_level"] = vlan_.vlan.subnet_level
tmp_vlan = {
"name":
self._get_network_name_from_vlan_name(vlan_.vlan.name),
"vlan": vlan_.vlan.vlan_id,
"subnet": vlan_.vlan.subnet_range,
"gateway": vlan_.ipv4_gateway,
"subnet_level": vlan_.vlan.subnet_level
}
vlans_list.append(tmp_vlan)
return vlans_list
@ -401,9 +393,9 @@ class FormationPlugin(BaseDataSourcePlugin):
for host in hosts:
device_id = self._get_device_id_by_name(host)
vlans = vlan_api.zones_zone_id_devices_device_id_vlans_get(
zone_id, device_id
)
vlans = \
vlan_api.zones_zone_id_devices_device_id_vlans_get(zone_id,
device_id)
LOG.debug("Received VLAN Network Information\n{}".format(vlans))
ip_[host] = {}
for vlan_ in vlans:
@ -411,14 +403,10 @@ class FormationPlugin(BaseDataSourcePlugin):
# list is empty
if len(vlan_.vlan.ipv4) != 0:
name = self._get_network_name_from_vlan_name(
vlan_.vlan.name
)
vlan_.vlan.name)
ipv4 = vlan_.vlan.ipv4[0].ip
LOG.debug(
"vlan:{},name:{},ip:{},vlan_name:{}".format(
vlan_.vlan.vlan_id, name, ipv4, vlan_.vlan.name
)
)
LOG.debug("vlan:{},name:{},ip:{},vlan_name:{}".format(
vlan_.vlan.vlan_id, name, ipv4, vlan_.vlan.name))
# TODD(pg710r) This code needs to extended to support ipv4
# and ipv6
# ip_[host][name] = {'ipv4': ipv4}
@ -465,7 +453,7 @@ class FormationPlugin(BaseDataSourcePlugin):
raise exceptions.ApiClientError(e.msg)
if not zone_.ipv4_dns:
LOG.warn("No dns server")
LOG.warning("No dns server")
return []
dns_list = []
@ -498,8 +486,7 @@ class FormationPlugin(BaseDataSourcePlugin):
}
except AttributeError as e:
raise exceptions.MissingAttributeError(
"Missing {} information in {}".format(e, site_info.city)
)
"Missing {} information in {}".format(e, site_info.city))
def get_domain_name(self, region):
try:
@ -511,7 +498,7 @@ class FormationPlugin(BaseDataSourcePlugin):
raise exceptions.ApiClientError(e.msg)
if not zone_.dns:
LOG.warn("Got None while running get domain name")
LOG.warning("Got None while running get domain name")
return None
return zone_.dns

View File

@ -31,8 +31,5 @@ class NoSpecMatched(BaseError):
self.specs = excel_specs
def display_error(self):
print(
"No spec matched. Following are the available specs:\n".format(
self.specs
)
)
print("No spec matched. Following are the available specs:\n".format(
self.specs))

240
spyglass/data_extractor/plugins/tugboat/excel_parser.py Normal file → Executable file
View File

@ -13,17 +13,16 @@
# limitations under the License.
import logging
from openpyxl import load_workbook
from openpyxl import Workbook
import pprint
import re
import sys
from openpyxl import load_workbook
from openpyxl import Workbook
import yaml
from spyglass.data_extractor.custom_exceptions import NoSpecMatched
# from spyglass.data_extractor.custom_exceptions
LOG = logging.getLogger(__name__)
@ -35,7 +34,7 @@ class ExcelParser(object):
with open(excel_specs, "r") as f:
spec_raw_data = f.read()
self.excel_specs = yaml.safe_load(spec_raw_data)
# A combined design spec, returns a workbok object after combining
# A combined design spec, returns a workbook object after combining
# all the inputs excel specs
combined_design_spec = self.combine_excel_design_specs(file_name)
self.wb_combined = combined_design_spec
@ -80,33 +79,24 @@ class ExcelParser(object):
ipmi_data = {}
hosts = []
provided_sheetname = self.excel_specs["specs"][self.spec][
"ipmi_sheet_name"
]
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
provided_sheetname
)
spec_ = self.excel_specs["specs"][self.spec]
provided_sheetname = spec_["ipmi_sheet_name"]
workbook_object, extracted_sheetname = \
self.get_xl_obj_and_sheetname(provided_sheetname)
if workbook_object is not None:
ws = workbook_object[extracted_sheetname]
else:
ws = self.wb_combined[provided_sheetname]
row = self.excel_specs["specs"][self.spec]["start_row"]
end_row = self.excel_specs["specs"][self.spec]["end_row"]
hostname_col = self.excel_specs["specs"][self.spec]["hostname_col"]
ipmi_address_col = self.excel_specs["specs"][self.spec][
"ipmi_address_col"
]
host_profile_col = self.excel_specs["specs"][self.spec][
"host_profile_col"
]
ipmi_gateway_col = self.excel_specs["specs"][self.spec][
"ipmi_gateway_col"
]
row = spec_["start_row"]
end_row = spec_["end_row"]
hostname_col = spec_["hostname_col"]
ipmi_address_col = spec_["ipmi_address_col"]
host_profile_col = spec_["host_profile_col"]
ipmi_gateway_col = spec_["ipmi_gateway_col"]
previous_server_gateway = None
while row <= end_row:
hostname = self.sanitize(
ws.cell(row=row, column=hostname_col).value
)
hostname = \
self.sanitize(ws.cell(row=row, column=hostname_col).value)
hosts.append(hostname)
ipmi_address = ws.cell(row=row, column=ipmi_address_col).value
if "/" in ipmi_address:
@ -119,12 +109,10 @@ class ExcelParser(object):
host_profile = ws.cell(row=row, column=host_profile_col).value
try:
if host_profile is None:
raise RuntimeError(
"No value read from {} ".format(self.file_name)
+ "sheet:{} row:{}, col:{}".format(
self.spec, row, host_profile_col
)
)
raise RuntimeError("No value read from "
"{} sheet:{} row:{}, col:{}".format(
self.file_name, self.spec, row,
host_profile_col))
except RuntimeError as rerror:
LOG.critical(rerror)
sys.exit("Tugboat exited!!")
@ -132,17 +120,13 @@ class ExcelParser(object):
"ipmi_address": ipmi_address,
"ipmi_gateway": ipmi_gateway,
"host_profile": host_profile,
"type": type,
"type": type, # FIXME (Ian Pittwood): shadows type built-in
}
row += 1
LOG.debug(
"ipmi data extracted from excel:\n{}".format(
pprint.pformat(ipmi_data)
)
)
LOG.debug(
"host data extracted from excel:\n{}".format(pprint.pformat(hosts))
)
LOG.debug("ipmi data extracted from excel:\n{}".format(
pprint.pformat(ipmi_data)))
LOG.debug("host data extracted from excel:\n{}".format(
pprint.pformat(hosts)))
return [ipmi_data, hosts]
def get_private_vlan_data(self, ws):
@ -161,30 +145,27 @@ class ExcelParser(object):
vlan = vlan.lower()
vlan_data[vlan] = cell_value
row += 1
LOG.debug(
"vlan data extracted from excel:\n%s", pprint.pformat(vlan_data)
)
LOG.debug("vlan data extracted from excel:\n%s" %
pprint.pformat(vlan_data))
return vlan_data
def get_private_network_data(self):
"""Read network data from the private ip sheet"""
provided_sheetname = self.excel_specs["specs"][self.spec][
"private_ip_sheet"
]
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
provided_sheetname
)
spec_ = self.excel_specs["specs"][self.spec]
provided_sheetname = spec_["private_ip_sheet"]
workbook_object, extracted_sheetname = \
self.get_xl_obj_and_sheetname(provided_sheetname)
if workbook_object is not None:
ws = workbook_object[extracted_sheetname]
else:
ws = self.wb_combined[provided_sheetname]
vlan_data = self.get_private_vlan_data(ws)
network_data = {}
row = self.excel_specs["specs"][self.spec]["net_start_row"]
end_row = self.excel_specs["specs"][self.spec]["net_end_row"]
col = self.excel_specs["specs"][self.spec]["net_col"]
vlan_col = self.excel_specs["specs"][self.spec]["net_vlan_col"]
row = spec_["net_start_row"]
end_row = spec_["net_end_row"]
col = spec_["net_col"]
vlan_col = spec_["net_vlan_col"]
old_vlan = ""
while row <= end_row:
vlan = ws.cell(row=row, column=vlan_col).value
@ -212,93 +193,82 @@ class ExcelParser(object):
network_data[network]['is_common'] = False
else:
network_data[network]['is_common'] = True
LOG.debug(
"private network data extracted from\
excel:\n%s", pprint.pformat(network_data))
LOG.debug("private network data extracted from excel:\n%s"
% pprint.pformat(network_data))
"""
return network_data
def get_public_network_data(self):
"""Read public network data from public ip data"""
network_data = {}
provided_sheetname = self.excel_specs["specs"][self.spec][
"public_ip_sheet"
]
spec_ = self.excel_specs["specs"][self.spec]
provided_sheetname = spec_["public_ip_sheet"]
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
provided_sheetname
)
provided_sheetname)
if workbook_object is not None:
ws = workbook_object[extracted_sheetname]
else:
ws = self.wb_combined[provided_sheetname]
oam_row = self.excel_specs["specs"][self.spec]["oam_ip_row"]
oam_col = self.excel_specs["specs"][self.spec]["oam_ip_col"]
oam_vlan_col = self.excel_specs["specs"][self.spec]["oam_vlan_col"]
ingress_row = self.excel_specs["specs"][self.spec]["ingress_ip_row"]
oob_row = self.excel_specs["specs"][self.spec]["oob_net_row"]
col = self.excel_specs["specs"][self.spec]["oob_net_start_col"]
end_col = self.excel_specs["specs"][self.spec]["oob_net_end_col"]
oam_row = spec_["oam_ip_row"]
oam_col = spec_["oam_ip_col"]
oam_vlan_col = spec_["oam_vlan_col"]
ingress_row = spec_["ingress_ip_row"]
oob_row = spec_["oob_net_row"]
col = spec_["oob_net_start_col"]
end_col = spec_["oob_net_end_col"]
network_data = {
"oam": {
"subnet": [ws.cell(row=oam_row, column=oam_col).value],
"vlan": ws.cell(row=oam_row, column=oam_vlan_col).value,
},
"ingress": ws.cell(row=ingress_row, column=oam_col).value,
"oob": {
"subnet": [],
}
}
network_data["oob"] = {"subnet": []}
while col <= end_col:
cell_value = ws.cell(row=oob_row, column=col).value
if cell_value:
network_data["oob"]["subnet"].append(self.sanitize(cell_value))
col += 1
LOG.debug(
"public network data extracted from\
excel:\n%s",
pprint.pformat(network_data),
)
LOG.debug("public network data extracted from excel:\n%s" %
pprint.pformat(network_data))
return network_data
def get_site_info(self):
"""Read location, dns, ntp and ldap data"""
site_info = {}
provided_sheetname = self.excel_specs["specs"][self.spec][
"dns_ntp_ldap_sheet"
]
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
provided_sheetname
)
spec_ = self.excel_specs["specs"][self.spec]
provided_sheetname = spec_["dns_ntp_ldap_sheet"]
workbook_object, extracted_sheetname = \
self.get_xl_obj_and_sheetname(provided_sheetname)
if workbook_object is not None:
ws = workbook_object[extracted_sheetname]
else:
ws = self.wb_combined[provided_sheetname]
dns_row = self.excel_specs["specs"][self.spec]["dns_row"]
dns_col = self.excel_specs["specs"][self.spec]["dns_col"]
ntp_row = self.excel_specs["specs"][self.spec]["ntp_row"]
ntp_col = self.excel_specs["specs"][self.spec]["ntp_col"]
domain_row = self.excel_specs["specs"][self.spec]["domain_row"]
domain_col = self.excel_specs["specs"][self.spec]["domain_col"]
login_domain_row = self.excel_specs["specs"][self.spec][
"login_domain_row"
]
ldap_col = self.excel_specs["specs"][self.spec]["ldap_col"]
global_group = self.excel_specs["specs"][self.spec]["global_group"]
ldap_search_url_row = self.excel_specs["specs"][self.spec][
"ldap_search_url_row"
]
dns_row = spec_["dns_row"]
dns_col = spec_["dns_col"]
ntp_row = spec_["ntp_row"]
ntp_col = spec_["ntp_col"]
domain_row = spec_["domain_row"]
domain_col = spec_["domain_col"]
login_domain_row = spec_["login_domain_row"]
ldap_col = spec_["ldap_col"]
global_group = spec_["global_group"]
ldap_search_url_row = spec_["ldap_search_url_row"]
dns_servers = ws.cell(row=dns_row, column=dns_col).value
ntp_servers = ws.cell(row=ntp_row, column=ntp_col).value
try:
if dns_servers is None:
raise RuntimeError(
(
"No value for dns_server from:{} Sheet:'{}' ",
"Row:{} Col:{}",
).format(
self.file_name, provided_sheetname, dns_row, dns_col
)
)
raise RuntimeError("No value for dns_server from: "
"{} Sheet:'{}' Row:{} Col:{}".format(
self.file_name, provided_sheetname,
dns_row, dns_col))
if ntp_servers is None:
raise RuntimeError("No value for ntp_server from: "
"{} Sheet:'{}' Row:{} Col:{}".format(
self.file_name, provided_sheetname,
ntp_row, ntp_col))
except RuntimeError as rerror:
LOG.critical(rerror)
sys.exit("Tugboat exited!!")
@ -319,12 +289,10 @@ class ExcelParser(object):
"ntp": ntp_servers,
"domain": ws.cell(row=domain_row, column=domain_col).value,
"ldap": {
"subdomain": ws.cell(
row=login_domain_row, column=ldap_col
).value,
"common_name": ws.cell(
row=global_group, column=ldap_col
).value,
"subdomain": ws.cell(row=login_domain_row,
column=ldap_col).value,
"common_name": ws.cell(row=global_group,
column=ldap_col).value,
"url": ws.cell(row=ldap_search_url_row, column=ldap_col).value,
},
}
@ -338,32 +306,27 @@ class ExcelParser(object):
def get_location_data(self):
"""Read location data from the site and zone sheet"""
provided_sheetname = self.excel_specs["specs"][self.spec][
"location_sheet"
]
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
provided_sheetname
)
spec_ = self.excel_specs["specs"][self.spec]
provided_sheetname = spec_["location_sheet"]
workbook_object, extracted_sheetname = \
self.get_xl_obj_and_sheetname(provided_sheetname)
if workbook_object is not None:
ws = workbook_object[extracted_sheetname]
else:
ws = self.wb_combined[provided_sheetname]
corridor_row = self.excel_specs["specs"][self.spec]["corridor_row"]
column = self.excel_specs["specs"][self.spec]["column"]
site_name_row = self.excel_specs["specs"][self.spec]["site_name_row"]
state_name_row = self.excel_specs["specs"][self.spec]["state_name_row"]
country_name_row = self.excel_specs["specs"][self.spec][
"country_name_row"
]
clli_name_row = self.excel_specs["specs"][self.spec]["clli_name_row"]
corridor_row = spec_["corridor_row"]
column = spec_["column"]
site_name_row = spec_["site_name_row"]
state_name_row = spec_["state_name_row"]
country_name_row = spec_["country_name_row"]
clli_name_row = spec_["clli_name_row"]
return {
"corridor": ws.cell(row=corridor_row, column=column).value,
"name": ws.cell(row=site_name_row, column=column).value,
"state": ws.cell(row=state_name_row, column=column).value,
"country": ws.cell(row=country_name_row, column=column).value,
"physical_location": ws.cell(
row=clli_name_row, column=column
).value,
"physical_location": ws.cell(row=clli_name_row,
column=column).value,
}
def validate_sheet_names_with_spec(self):
@ -384,8 +347,8 @@ class ExcelParser(object):
sheet_name_list.append(location_sheet_name)
try:
for sheetname in sheet_name_list:
workbook_object, extracted_sheetname = (
self.get_xl_obj_and_sheetname(sheetname))
workbook_object, extracted_sheetname = \
self.get_xl_obj_and_sheetname(sheetname)
if workbook_object is not None:
wb = workbook_object
sheetname = extracted_sheetname
@ -394,8 +357,7 @@ class ExcelParser(object):
if sheetname not in wb.sheetnames:
raise RuntimeError(
"SheetName '{}' not found ".format(sheetname)
)
"SheetName '{}' not found ".format(sheetname))
except RuntimeError as rerror:
LOG.critical(rerror)
sys.exit("Tugboat exited!!")
@ -418,11 +380,8 @@ class ExcelParser(object):
},
"site_info": site_info_data,
}
LOG.debug(
"Location data extracted from\
excel:\n%s",
pprint.pformat(data),
)
LOG.debug("Location data extracted from excel:\n%s" %
pprint.pformat(data))
return data
def combine_excel_design_specs(self, filenames):
@ -436,9 +395,8 @@ class ExcelParser(object):
loaded_workbook_ws = loaded_workbook[names]
for row in loaded_workbook_ws:
for cell in row:
design_spec_worksheet[
cell.coordinate
].value = cell.value
design_spec_worksheet[cell.coordinate].value = \
cell.value
return design_spec
def get_xl_obj_and_sheetname(self, sheetname):
@ -448,7 +406,7 @@ class ExcelParser(object):
"""
if re.search(".xlsx", sheetname) or re.search(".xls", sheetname):
""" Extract file name """
# Extract file name
source_xl_file = sheetname.split(":")[0]
wb = load_workbook(source_xl_file, data_only=True)
return [wb, sheetname.split(":")[1]]

66
spyglass/data_extractor/plugins/tugboat/tugboat.py Normal file → Executable file
View File

@ -16,6 +16,7 @@ import itertools
import logging
import pprint
import re
from spyglass.data_extractor.base import BaseDataSourcePlugin
from spyglass.data_extractor.plugins.tugboat.excel_parser import ExcelParser
@ -106,13 +107,14 @@ class TugboatPlugin(BaseDataSourcePlugin):
host_list = []
for rack in rackwise_hosts.keys():
for host in rackwise_hosts[rack]:
host_list.append(
{
"rack_name": rack,
"name": host,
"host_profile": ipmi_data[host]["host_profile"],
}
)
host_list.append({
"rack_name":
rack,
"name":
host,
"host_profile":
ipmi_data[host]["host_profile"],
})
return host_list
def get_networks(self, region):
@ -125,20 +127,18 @@ class TugboatPlugin(BaseDataSourcePlugin):
private_net = self.parsed_xl_data["network_data"]["private"]
public_net = self.parsed_xl_data["network_data"]["public"]
# Extract network information from private and public network data
for net_type, net_val in itertools.chain(
private_net.items(), public_net.items()
):
for net_type, net_val in itertools.chain(private_net.items(),
public_net.items()):
tmp_vlan = {}
# Ingress is special network that has no vlan, only a subnet string
# So treatment for ingress is different
if net_type != "ingress":
# standardize the network name as net_type may ne different.
# For e.g insteas of pxe it may be PXE or instead of calico
# For e.g instead of pxe it may be PXE or instead of calico
# it may be ksn. Valid network names are pxe, calico, oob, oam,
# overlay, storage, ingress
tmp_vlan["name"] = self._get_network_name_from_vlan_name(
net_type
)
tmp_vlan["name"] = \
self._get_network_name_from_vlan_name(net_type)
# extract vlan tag. It was extracted from xl file as 'VlAN 45'
# The code below extracts the numeric data fron net_val['vlan']
@ -154,11 +154,8 @@ class TugboatPlugin(BaseDataSourcePlugin):
tmp_vlan["name"] = "ingress"
tmp_vlan["subnet"] = net_val
vlan_list.append(tmp_vlan)
LOG.debug(
"vlan list extracted from tugboat:\n{}".format(
pprint.pformat(vlan_list)
)
)
LOG.debug("vlan list extracted from tugboat:\n{}".format(
pprint.pformat(vlan_list)))
return vlan_list
def get_ips(self, region, host=None):
@ -200,9 +197,8 @@ class TugboatPlugin(BaseDataSourcePlugin):
ldap_info["domain"] = url.split(".")[1]
except IndexError as e:
LOG.error("url.split:{}".format(e))
ldap_info["common_name"] = ldap_raw_data.get(
"common_name", "#CHANGE_ME"
)
ldap_info["common_name"] = \
ldap_raw_data.get("common_name", "#CHANGE_ME")
ldap_info["subdomain"] = ldap_raw_data.get("subdomain", "#CHANGE_ME")
return ldap_info
@ -210,16 +206,16 @@ class TugboatPlugin(BaseDataSourcePlugin):
def get_ntp_servers(self, region):
"""Returns a comma separated list of ntp ip addresses"""
ntp_server_list = self._get_formatted_server_list(
self.parsed_xl_data["site_info"]["ntp"]
)
ntp_server_list = \
self._get_formatted_server_list(self.parsed_xl_data["site_info"]
["ntp"])
return ntp_server_list
def get_dns_servers(self, region):
"""Returns a comma separated list of dns ip addresses"""
dns_server_list = self._get_formatted_server_list(
self.parsed_xl_data["site_info"]["dns"]
)
dns_server_list = \
self._get_formatted_server_list(self.parsed_xl_data["site_info"]
["dns"])
return dns_server_list
def get_domain_name(self, region):
@ -228,17 +224,13 @@ class TugboatPlugin(BaseDataSourcePlugin):
return self.parsed_xl_data["site_info"]["domain"]
def get_location_information(self, region):
"""Prepare location data from information extracted
by ExcelParser(i.e raw data)
"""
"""Prepare location data from information extracted by ExcelParser"""
location_data = self.parsed_xl_data["site_info"]["location"]
corridor_pattern = r"\d+"
corridor_number = re.findall(
corridor_pattern, location_data["corridor"]
)[0]
corridor_number = \
re.findall(corridor_pattern, location_data["corridor"])[0]
name = location_data.get("name", "#CHANGE_ME")
state = location_data.get("state", "#CHANGE_ME")
country = location_data.get("country", "#CHANGE_ME")
@ -274,7 +266,6 @@ class TugboatPlugin(BaseDataSourcePlugin):
def _get_network_name_from_vlan_name(self, vlan_name):
"""Network names are ksn, oam, oob, overlay, storage, pxe
This is a utility function to determine the vlan acceptable
vlan from the name extracted from excel file
@ -314,8 +305,7 @@ class TugboatPlugin(BaseDataSourcePlugin):
return "pxe"
# if nothing matches
LOG.error(
"Unable to recognize VLAN name extracted from Plugin data source"
)
"Unable to recognize VLAN name extracted from Plugin data source")
return ""
def _get_formatted_server_list(self, server_list):

140
spyglass/parser/engine.py Normal file → Executable file
View File

@ -15,22 +15,22 @@
import copy
import json
import logging
import pkg_resources
import pprint
import sys
import jsonschema
import netaddr
from netaddr import IPNetwork
from pkg_resources import resource_filename
import yaml
LOG = logging.getLogger(__name__)
class ProcessDataSource(object):
def __init__(self, sitetype):
def __init__(self, site_type):
# Initialize intermediary and save site type
self._initialize_intermediary()
self.region_name = sitetype
self.region_name = site_type
@staticmethod
def _read_file(file_name):
@ -64,15 +64,15 @@ class ProcessDataSource(object):
for net_type in self.data["network"]["vlan_network_data"]:
# One of the type is ingress and we don't want that here
if net_type != "ingress":
network_subnets[net_type] = netaddr.IPNetwork(
self.data["network"]["vlan_network_data"][net_type][
"subnet"
][0]
)
network_subnets[net_type] = \
IPNetwork(self.data["network"]
["vlan_network_data"]
[net_type]
["subnet"]
[0])
LOG.debug(
"Network subnets:\n{}".format(pprint.pformat(network_subnets))
)
LOG.debug("Network subnets:\n{}".format(
pprint.pformat(network_subnets)))
return network_subnets
def _get_genesis_node_details(self):
@ -83,11 +83,8 @@ class ProcessDataSource(object):
if rack_hosts[host]["type"] == "genesis":
self.genesis_node = rack_hosts[host]
self.genesis_node["name"] = host
LOG.debug(
"Genesis Node Details:\n{}".format(
pprint.pformat(self.genesis_node)
)
)
LOG.debug("Genesis Node Details:\n{}".format(
pprint.pformat(self.genesis_node)))
def _get_genesis_node_ip(self):
"""Returns the genesis node ip"""
@ -104,14 +101,13 @@ class ProcessDataSource(object):
def _validate_intermediary_data(self, data):
"""Validates the intermediary data before generating manifests.
It checks wether the data types and data format are as expected.
It checks whether the data types and data format are as expected.
The method validates this with regex pattern defined for each
data type.
"""
LOG.info("Validating Intermediary data")
temp_data = {}
# Peforming a deep copy
# Performing a deep copy
temp_data = copy.deepcopy(data)
# Converting baremetal dict to list.
baremetal_list = []
@ -120,7 +116,7 @@ class ProcessDataSource(object):
baremetal_list = baremetal_list + temp
temp_data["baremetal"] = baremetal_list
schema_dir = pkg_resources.resource_filename("spyglass", "schemas/")
schema_dir = resource_filename("spyglass", "schemas/")
schema_file = schema_dir + "data_schema.json"
json_data = json.loads(json.dumps(temp_data))
with open(schema_file, "r") as f:
@ -153,12 +149,12 @@ class ProcessDataSource(object):
These rules are used to determine ip address allocation ranges,
host profile interfaces and also to create hardware profile
information. The method calls corresponding rule hander function
information. The method calls corresponding rule handler function
based on rule name and applies them to appropriate data objects.
"""
LOG.info("Apply design rules")
rules_dir = pkg_resources.resource_filename("spyglass", "config/")
rules_dir = resource_filename("spyglass", "config/")
rules_file = rules_dir + "rules.yaml"
rules_data_raw = self._read_file(rules_file)
rules_yaml = yaml.safe_load(rules_data_raw)
@ -197,10 +193,8 @@ class ProcessDataSource(object):
# done to determine the genesis node.
for host in sorted(self.data["baremetal"][rack].keys()):
host_info = self.data["baremetal"][rack][host]
if (
host_info["host_profile"]
== hardware_profile["profile_name"]["ctrl"]
):
if host_info["host_profile"] \
== hardware_profile["profile_name"]["ctrl"]:
if not is_genesis:
host_info["type"] = "genesis"
is_genesis = True
@ -229,7 +223,7 @@ class ProcessDataSource(object):
If a particular ip exists it is overridden.
"""
# Ger defult ip offset
# Ger default ip offset
default_ip_offset = rule_data["default"]
host_idx = 0
@ -243,11 +237,8 @@ class ProcessDataSource(object):
host_networks[net] = str(ips[host_idx + default_ip_offset])
host_idx = host_idx + 1
LOG.debug(
"Updated baremetal host:\n{}".format(
pprint.pformat(self.data["baremetal"])
)
)
LOG.debug("Updated baremetal host:\n{}".format(
pprint.pformat(self.data["baremetal"])))
def _update_vlan_net_data(self, rule_data):
"""Offset allocation rules to determine ip address range(s)
@ -270,21 +261,17 @@ class ProcessDataSource(object):
# Set ingress vip and CIDR for bgp
LOG.info("Apply network design rules:bgp")
subnet = netaddr.IPNetwork(
self.data["network"]["vlan_network_data"]["ingress"]["subnet"][0]
)
vlan_network_data_ = self.data["network"]["vlan_network_data"]
subnet = IPNetwork(vlan_network_data_["ingress"]["subnet"][0])
ips = list(subnet)
self.data["network"]["bgp"]["ingress_vip"] = str(
ips[ingress_vip_offset]
)
self.data["network"]["bgp"]["public_service_cidr"] = self.data[
"network"
]["vlan_network_data"]["ingress"]["subnet"][0]
LOG.debug(
"Updated network bgp data:\n{}".format(
pprint.pformat(self.data["network"]["bgp"])
)
)
self.data["network"]["bgp"]["ingress_vip"] = \
str(ips[ingress_vip_offset])
self.data["network"]["bgp"]["public_service_cidr"] = \
(vlan_network_data_["ingress"]
["subnet"]
[0])
LOG.debug("Updated network bgp data:\n{}".format(
pprint.pformat(self.data["network"]["bgp"])))
LOG.info("Apply network design rules:vlan")
# Apply rules to vlan networks
@ -297,16 +284,11 @@ class ProcessDataSource(object):
subnet = self.network_subnets[net_type]
ips = list(subnet)
self.data["network"]["vlan_network_data"][net_type][
"gateway"
] = str(ips[gateway_ip_offset])
vlan_network_data_[net_type]["gateway"] = \
str(ips[gateway_ip_offset])
self.data["network"]["vlan_network_data"][net_type][
"reserved_start"
] = str(ips[1])
self.data["network"]["vlan_network_data"][net_type][
"reserved_end"
] = str(ips[ip_offset])
vlan_network_data_[net_type]["reserved_start"] = str(ips[1])
vlan_network_data_[net_type]["reserved_end"] = str(ips[ip_offset])
static_start = str(ips[ip_offset + 1])
static_end = str(ips[static_ip_end_offset])
@ -317,40 +299,26 @@ class ProcessDataSource(object):
dhcp_start = str(ips[mid])
dhcp_end = str(ips[dhcp_ip_end_offset])
self.data["network"]["vlan_network_data"][net_type][
"dhcp_start"
] = dhcp_start
self.data["network"]["vlan_network_data"][net_type][
"dhcp_end"
] = dhcp_end
vlan_network_data_[net_type]["dhcp_start"] = dhcp_start
vlan_network_data_[net_type]["dhcp_end"] = dhcp_end
self.data["network"]["vlan_network_data"][net_type][
"static_start"
] = static_start
self.data["network"]["vlan_network_data"][net_type][
"static_end"
] = static_end
vlan_network_data_[net_type]["static_start"] = static_start
vlan_network_data_[net_type]["static_end"] = static_end
# There is no vlan for oob network
if net_type != "oob":
self.data["network"]["vlan_network_data"][net_type][
"vlan"
] = self.data["network"]["vlan_network_data"][net_type]["vlan"]
vlan_network_data_[net_type]["vlan"] = \
vlan_network_data_[net_type]["vlan"]
# OAM have default routes. Only for cruiser. TBD
if net_type == "oam":
routes = ["0.0.0.0/0"]
else:
routes = []
self.data["network"]["vlan_network_data"][net_type][
"routes"
] = routes
vlan_network_data_[net_type]["routes"] = routes
LOG.debug(
"Updated vlan network data:\n{}".format(
pprint.pformat(self.data["network"]["vlan_network_data"])
)
)
LOG.debug("Updated vlan network data:\n{}".format(
pprint.pformat(vlan_network_data_)))
def load_extracted_data_from_data_source(self, extracted_data):
"""Function called from spyglass.py to pass extracted data
@ -366,12 +334,9 @@ class ProcessDataSource(object):
LOG.info("Loading plugin data source")
self.data = extracted_data
LOG.debug(
"Extracted data from plugin:\n{}".format(
pprint.pformat(extracted_data)
)
)
# Uncommeent following segment for debugging purpose.
LOG.debug("Extracted data from plugin:\n{}".format(
pprint.pformat(extracted_data)))
# Uncomment following segment for debugging purpose.
# extracted_file = "extracted_file.yaml"
# yaml_file = yaml.dump(extracted_data, default_flow_style=False)
# with open(extracted_file, 'w') as f:
@ -385,9 +350,8 @@ class ProcessDataSource(object):
"""Writing intermediary yaml"""
LOG.info("Writing intermediary yaml")
intermediary_file = "{}_intermediary.yaml".format(
self.data["region_name"]
)
intermediary_file = "{}_intermediary.yaml" \
.format(self.data["region_name"])
# Check of if output dir = intermediary_dir exists
if intermediary_dir is not None:
outfile = "{}/{}".format(intermediary_dir, intermediary_file)

View File

@ -14,7 +14,7 @@
class BaseProcessor(object):
def __init__(self, file_name):
def __init__(self):
pass
def render_template(self, template):
@ -32,9 +32,8 @@ class BaseProcessor(object):
"pxe": yaml_data["baremetal"][rack][host]["ip"]["pxe"],
"oam": yaml_data["baremetal"][rack][host]["ip"]["oam"],
}
elif (
yaml_data["baremetal"][rack][host]["type"] == "controller"
):
elif yaml_data["baremetal"][rack][host]["type"] \
== "controller":
hosts["masters"].append(host)
else:
hosts["workers"].append(host)

View File

@ -12,10 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from jinja2 import Environment
from jinja2 import FileSystemLoader
import logging
import os
from jinja2 import Environment
from jinja2 import FileSystemLoader
from spyglass.site_processors.base import BaseProcessor
LOG = logging.getLogger(__name__)
@ -23,6 +25,7 @@ LOG = logging.getLogger(__name__)
class SiteProcessor(BaseProcessor):
def __init__(self, intermediary_yaml, manifest_dir):
super().__init__()
self.yaml_data = intermediary_yaml
self.manifest_dir = manifest_dir
@ -47,20 +50,17 @@ class SiteProcessor(BaseProcessor):
for dirpath, dirs, files in os.walk(template_dir_abspath):
for filename in files:
j2_env = Environment(
autoescape=True,
loader=FileSystemLoader(dirpath),
trim_blocks=True,
)
j2_env.filters[
"get_role_wise_nodes"
] = self.get_role_wise_nodes
j2_env = Environment(autoescape=True,
loader=FileSystemLoader(dirpath),
trim_blocks=True)
j2_env.filters["get_role_wise_nodes"] = \
self.get_role_wise_nodes
templatefile = os.path.join(dirpath, filename)
outdirs = dirpath.split("templates")[1]
outfile_path = "{}{}{}".format(
site_manifest_dir, self.yaml_data["region_name"], outdirs
)
outfile_path = "{}{}{}".format(site_manifest_dir,
self.yaml_data["region_name"],
outdirs)
outfile_yaml = templatefile.split(".j2")[0].split("/")[-1]
outfile = outfile_path + "/" + outfile_yaml
outfile_dir = os.path.dirname(outfile)
@ -74,10 +74,7 @@ class SiteProcessor(BaseProcessor):
out.close()
except IOError as ioe:
LOG.error(
"IOError during rendering:{}".format(outfile_yaml)
)
"IOError during rendering:{}".format(outfile_yaml))
raise SystemExit(
"Error when generating {:s}:\n{:s}".format(
outfile, ioe.strerror
)
)
outfile, ioe.strerror))

View File

@ -13,10 +13,10 @@
# limitations under the License.
import logging
import pkg_resources
import pprint
import click
from pkg_resources import iter_entry_points
import yaml
from spyglass.parser.engine import ProcessDataSource
@ -26,25 +26,23 @@ LOG = logging.getLogger("spyglass")
@click.command()
@click.option(
"--site", "-s", help="Specify the site for which manifests to be generated"
)
@click.option(
"--type", "-t", help="Specify the plugin type formation or tugboat"
)
@click.option("--site",
"-s",
help="Specify the site for which manifests to be generated")
@click.option("--type",
"-t",
help="Specify the plugin type formation or tugboat")
@click.option("--formation_url", "-f", help="Specify the formation url")
@click.option("--formation_user", "-u", help="Specify the formation user id")
@click.option(
"--formation_password", "-p", help="Specify the formation user password"
)
@click.option("--formation_password",
"-p",
help="Specify the formation user password")
@click.option(
"--intermediary",
"-i",
type=click.Path(exists=True),
help=(
"Intermediary file path generate manifests, "
"use -m also with this option"
),
help="Intermediary file path generate manifests, "
"use -m also with this option",
)
@click.option(
"--additional_config",
@ -87,10 +85,8 @@ LOG = logging.getLogger("spyglass")
"-x",
multiple=True,
type=click.Path(exists=True),
help=(
"Path to engineering excel file, to be passed with "
"generate_intermediary"
),
help="Path to engineering excel file, to be passed with "
"generate_intermediary",
)
@click.option(
"--excel_spec",
@ -104,8 +100,8 @@ LOG = logging.getLogger("spyglass")
default=20,
multiple=False,
show_default=True,
help="Loglevel NOTSET:0 ,DEBUG:10, \
INFO:20, WARNING:30, ERROR:40, CRITICAL:50",
help="Loglevel NOTSET:0 ,DEBUG:10, INFO:20, WARNING:30, ERROR:40, "
"CRITICAL:50",
)
def main(*args, **kwargs):
# Extract user provided inputs
@ -121,9 +117,8 @@ def main(*args, **kwargs):
# Set Logging format
LOG.setLevel(loglevel)
stream_handle = logging.StreamHandler()
formatter = logging.Formatter(
"(%(name)s): %(asctime)s %(levelname)s %(message)s"
)
formatter = \
logging.Formatter("(%(name)s): %(asctime)s %(levelname)s %(message)s")
stream_handle.setFormatter(formatter)
LOG.addHandler(stream_handle)
@ -139,14 +134,12 @@ def main(*args, **kwargs):
if generate_manifests:
if template_dir is None:
LOG.error("Template directory not specified!! Spyglass exited")
LOG.error(
"It is mandatory to provide it when generate_manifests is true"
)
LOG.error("It is mandatory to provide it when "
"generate_manifests is true")
exit()
# Generate Intermediary yaml and manifests extracting data
# from data source specified by plugin type
intermediary_yaml = {}
if intermediary is None:
LOG.info("Generating Intermediary yaml")
plugin_type = kwargs.get("type", None)
@ -154,16 +147,13 @@ def main(*args, **kwargs):
# Discover the plugin and load the plugin class
LOG.info("Load the plugin class")
for entry_point in pkg_resources.iter_entry_points(
"data_extractor_plugins"
):
for entry_point in iter_entry_points("data_extractor_plugins"):
if entry_point.name == plugin_type:
plugin_class = entry_point.load()
if plugin_class is None:
LOG.error(
"Unsupported Plugin type. Plugin type:{}".format(plugin_type)
)
"Unsupported Plugin type. Plugin type:{}".format(plugin_type))
exit()
# Extract data from plugin data source
@ -179,17 +169,11 @@ def main(*args, **kwargs):
with open(additional_config, "r") as config:
raw_data = config.read()
additional_config_data = yaml.safe_load(raw_data)
LOG.debug(
"Additional config data:\n{}".format(
pprint.pformat(additional_config_data)
)
)
LOG.debug("Additional config data:\n{}".format(
pprint.pformat(additional_config_data)))
LOG.info(
"Apply additional configuration from:{}".format(
additional_config
)
)
LOG.info("Apply additional configuration from:{}".format(
additional_config))
data_extractor.apply_additional_data(additional_config_data)
LOG.debug(pprint.pformat(data_extractor.site_data))
@ -197,8 +181,7 @@ def main(*args, **kwargs):
LOG.info("Apply design rules to the extracted data")
process_input_ob = ProcessDataSource(site)
process_input_ob.load_extracted_data_from_data_source(
data_extractor.site_data
)
data_extractor.site_data)
LOG.info("Generate intermediary yaml")
intermediary_yaml = process_input_ob.generate_intermediary_yaml()

19
spyglass/utils/utils.py Normal file → Executable file
View File

@ -14,10 +14,9 @@
# Merge two dictionaries
def dict_merge(dictA, dictB, path=None):
def dict_merge(dict_a, dict_b, path=None):
"""Recursively Merge dictionary dictB into dictA
DictA represents the data extracted by a plugin and DictB
represents the additional site config dictionary that is passed
to CLI. The merge process compares the dictionary keys and if they
@ -28,14 +27,14 @@ def dict_merge(dictA, dictB, path=None):
if path is None:
path = []
for key in dictB:
if key in dictA:
if isinstance(dictA[key], dict) and isinstance(dictB[key], dict):
dict_merge(dictA[key], dictB[key], path + [str(key)])
elif dictA[key] == dictB[key]:
for key in dict_b:
if key in dict_a:
if isinstance(dict_a[key], dict) and isinstance(dict_b[key], dict):
dict_merge(dict_a[key], dict_b[key], path + [str(key)])
elif dict_a[key] == dict_b[key]:
pass # values are same, so no processing here
else:
dictA[key] = dictB[key]
dict_a[key] = dict_b[key]
else:
dictA[key] = dictB[key]
return dictA
dict_a[key] = dict_b[key]
return dict_a

View File

@ -1,5 +1,5 @@
# Formatting
yapf==0.20.0
yapf==0.27.0
# Linting
hacking>=1.1.0,<1.2.0 # Apache-2.0

View File

@ -31,7 +31,8 @@ basepython = python3
deps =
-r{toxinidir}/test-requirements.txt
commands =
bash -c "{toxinidir}/tools/gate/whitespace-linter.sh"
bash -c {toxinidir}/tools/gate/whitespace-linter.sh
yapf -dr {toxinidir}/spyglass {toxinidir}/setup.py
flake8 {toxinidir}/spyglass
whitelist_externals =
bash