summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorScott Hussey <sh8121@att.com>2018-09-26 08:57:51 -0500
committerScott Hussey <sh8121@att.com>2018-09-26 08:57:51 -0500
commit6ca7aa4bffe60986cd2d708fa045c126f12dec13 (patch)
tree421a203a3ea2790287c68bb289d0997c525691aa
parent2c9ec2503a557e854ba2d503e27625871e81bab9 (diff)
Catchup YAPF formatting
Notes
Notes (review): Code-Review+2: Felipe Monteiro <felipe.monteiro@att.com> Code-Review+2: Aaron Sheffield <ajs@sheffieldfamily.net> Workflow+1: Aaron Sheffield <ajs@sheffieldfamily.net> Verified+2: Zuul Submitted-by: Zuul Submitted-at: Tue, 02 Oct 2018 20:52:13 +0000 Reviewed-on: https://review.openstack.org/605431 Project: openstack/airship-drydock Branch: refs/heads/master
-rw-r--r--docs/source/_static/drydock.conf.sample6
-rw-r--r--etc/drydock/drydock.conf.sample9
-rw-r--r--etc/drydock/policy.yaml.sample4
-rw-r--r--python/drydock_provisioner/cli/part/commands.py4
-rw-r--r--python/drydock_provisioner/cli/task/commands.py4
-rw-r--r--python/drydock_provisioner/drivers/kubernetes/promenade_driver/actions/k8s_node.py3
-rw-r--r--python/drydock_provisioner/drivers/kubernetes/promenade_driver/driver.py10
-rw-r--r--python/drydock_provisioner/drivers/kubernetes/promenade_driver/promenade_client.py22
-rw-r--r--python/drydock_provisioner/drivers/node/maasdriver/actions/node.py55
-rw-r--r--python/drydock_provisioner/drivers/node/maasdriver/models/machine.py4
-rw-r--r--python/drydock_provisioner/objects/bootaction.py3
-rw-r--r--python/drydock_provisioner/objects/fields.py12
-rw-r--r--python/drydock_provisioner/objects/node.py1
-rw-r--r--python/drydock_provisioner/objects/task.py6
-rw-r--r--python/drydock_provisioner/orchestrator/actions/orchestrator.py10
-rw-r--r--python/drydock_provisioner/orchestrator/orchestrator.py5
-rw-r--r--python/drydock_provisioner/orchestrator/validations/network_trunking_rational.py10
-rw-r--r--python/drydock_provisioner/policy.py27
-rw-r--r--python/drydock_provisioner/statemgmt/state.py9
-rw-r--r--python/tests/integration/postgres/test_api_bootaction.py9
-rw-r--r--python/tests/integration/postgres/test_postgres_builddata.py5
-rw-r--r--python/tests/unit/test_k8sdriver_promenade_client.py126
22 files changed, 172 insertions, 172 deletions
diff --git a/docs/source/_static/drydock.conf.sample b/docs/source/_static/drydock.conf.sample
index d0fc93d..b0c17d5 100644
--- a/docs/source/_static/drydock.conf.sample
+++ b/docs/source/_static/drydock.conf.sample
@@ -404,12 +404,12 @@
404# Timeout in minutes for deploying a node (integer value) 404# Timeout in minutes for deploying a node (integer value)
405#deploy_node = 45 405#deploy_node = 45
406 406
407# Timeout in minutes for relabeling a node (integer value)
408#relabel_node = 5
409
410# Timeout in minutes between deployment completion and the all boot actions 407# Timeout in minutes between deployment completion and the all boot actions
411# reporting status (integer value) 408# reporting status (integer value)
412#bootaction_final_status = 15 409#bootaction_final_status = 15
413 410
414# Timeout in minutes for releasing a node (integer value) 411# Timeout in minutes for releasing a node (integer value)
415#destroy_node = 30 412#destroy_node = 30
413
414# Timeout in minutes for relabeling a node (integer value)
415#relabel_node = 5
diff --git a/etc/drydock/drydock.conf.sample b/etc/drydock/drydock.conf.sample
index 64f6fb8..b0c17d5 100644
--- a/etc/drydock/drydock.conf.sample
+++ b/etc/drydock/drydock.conf.sample
@@ -276,6 +276,9 @@
276# Logger name for Node driver logging (string value) 276# Logger name for Node driver logging (string value)
277#nodedriver_logger_name = ${global_logger_name}.nodedriver 277#nodedriver_logger_name = ${global_logger_name}.nodedriver
278 278
279# Logger name for Kubernetes driver logging (string value)
280#kubernetesdriver_logger_name = ${global_logger_name}.kubernetesdriver
281
279# Logger name for API server logging (string value) 282# Logger name for API server logging (string value)
280#control_logger_name = ${global_logger_name}.control 283#control_logger_name = ${global_logger_name}.control
281 284
@@ -350,6 +353,9 @@
350# Module path string of the Node driver to enable (string value) 353# Module path string of the Node driver to enable (string value)
351#node_driver = drydock_provisioner.drivers.node.maasdriver.driver.MaasNodeDriver 354#node_driver = drydock_provisioner.drivers.node.maasdriver.driver.MaasNodeDriver
352 355
356# Module path string of the Kubernetes driver to enable (string value)
357#kubernetes_driver = drydock_provisioner.drivers.kubernetes.promenade_driver.driver.PromenadeDriver
358
353# Module path string of the Network driver enable (string value) 359# Module path string of the Network driver enable (string value)
354#network_driver = <None> 360#network_driver = <None>
355 361
@@ -404,3 +410,6 @@
404 410
405# Timeout in minutes for releasing a node (integer value) 411# Timeout in minutes for releasing a node (integer value)
406#destroy_node = 30 412#destroy_node = 30
413
414# Timeout in minutes for relabeling a node (integer value)
415#relabel_node = 5
diff --git a/etc/drydock/policy.yaml.sample b/etc/drydock/policy.yaml.sample
index 65706bf..22b2365 100644
--- a/etc/drydock/policy.yaml.sample
+++ b/etc/drydock/policy.yaml.sample
@@ -38,6 +38,10 @@
38# POST /api/v1.0/tasks 38# POST /api/v1.0/tasks
39#"physical_provisioner:destroy_nodes": "role:admin" 39#"physical_provisioner:destroy_nodes": "role:admin"
40 40
41# Create relabel_nodes task
42# POST /api/v1.0/tasks
43#"physical_provisioner:relabel_nodes": "role:admin"
44
41# Read build data for a node 45# Read build data for a node
42# GET /api/v1.0/nodes/{nodename}/builddata 46# GET /api/v1.0/nodes/{nodename}/builddata
43#"physical_provisioner:read_build_data": "role:admin" 47#"physical_provisioner:read_build_data": "role:admin"
diff --git a/python/drydock_provisioner/cli/part/commands.py b/python/drydock_provisioner/cli/part/commands.py
index 2a90a3a..b4abe9d 100644
--- a/python/drydock_provisioner/cli/part/commands.py
+++ b/python/drydock_provisioner/cli/part/commands.py
@@ -64,8 +64,8 @@ def part_list(ctx):
64 """List parts of a design.""" 64 """List parts of a design."""
65 click.echo( 65 click.echo(
66 json.dumps( 66 json.dumps(
67 PartList(ctx.obj['CLIENT'], design_id=ctx.obj['DESIGN_ID']) 67 PartList(ctx.obj['CLIENT'],
68 .invoke())) 68 design_id=ctx.obj['DESIGN_ID']).invoke()))
69 69
70 70
71@part.command(name='show') 71@part.command(name='show')
diff --git a/python/drydock_provisioner/cli/task/commands.py b/python/drydock_provisioner/cli/task/commands.py
index 1e592c9..d7f207a 100644
--- a/python/drydock_provisioner/cli/task/commands.py
+++ b/python/drydock_provisioner/cli/task/commands.py
@@ -79,8 +79,8 @@ def task_create(ctx,
79 if node_names else [], 79 if node_names else [],
80 rack_names=[x.strip() for x in rack_names.split(',')] 80 rack_names=[x.strip() for x in rack_names.split(',')]
81 if rack_names else [], 81 if rack_names else [],
82 node_tags=[x.strip() for x in node_tags.split(',')] 82 node_tags=[x.strip()
83 if node_tags else [], 83 for x in node_tags.split(',')] if node_tags else [],
84 block=block, 84 block=block,
85 poll_interval=poll_interval).invoke())) 85 poll_interval=poll_interval).invoke()))
86 86
diff --git a/python/drydock_provisioner/drivers/kubernetes/promenade_driver/actions/k8s_node.py b/python/drydock_provisioner/drivers/kubernetes/promenade_driver/actions/k8s_node.py
index 359a679..7930727 100644
--- a/python/drydock_provisioner/drivers/kubernetes/promenade_driver/actions/k8s_node.py
+++ b/python/drydock_provisioner/drivers/kubernetes/promenade_driver/actions/k8s_node.py
@@ -58,7 +58,8 @@ class RelabelNode(PromenadeAction):
58 for n in nodes: 58 for n in nodes:
59 # Relabel node through Promenade 59 # Relabel node through Promenade
60 try: 60 try:
61 self.logger.info("Relabeling node %s with node label data." % n.name) 61 self.logger.info(
62 "Relabeling node %s with node label data." % n.name)
62 63
63 labels_dict = n.get_node_labels() 64 labels_dict = n.get_node_labels()
64 msg = "Set labels %s for node %s" % (str(labels_dict), n.name) 65 msg = "Set labels %s for node %s" % (str(labels_dict), n.name)
diff --git a/python/drydock_provisioner/drivers/kubernetes/promenade_driver/driver.py b/python/drydock_provisioner/drivers/kubernetes/promenade_driver/driver.py
index 876d5fa..583d97c 100644
--- a/python/drydock_provisioner/drivers/kubernetes/promenade_driver/driver.py
+++ b/python/drydock_provisioner/drivers/kubernetes/promenade_driver/driver.py
@@ -35,8 +35,7 @@ class PromenadeDriver(KubernetesDriver):
35 driver_desc = 'Promenade Kubernetes Driver' 35 driver_desc = 'Promenade Kubernetes Driver'
36 36
37 action_class_map = { 37 action_class_map = {
38 hd_fields.OrchestratorAction.RelabelNode: 38 hd_fields.OrchestratorAction.RelabelNode: RelabelNode,
39 RelabelNode,
40 } 39 }
41 40
42 def __init__(self, **kwargs): 41 def __init__(self, **kwargs):
@@ -103,8 +102,7 @@ class PromenadeDriver(KubernetesDriver):
103 action.start) 102 action.start)
104 103
105 timeout = action_timeouts.get( 104 timeout = action_timeouts.get(
106 task.action, 105 task.action, config.config_mgr.conf.timeouts.relabel_node)
107 config.config_mgr.conf.timeouts.relabel_node)
108 finished, running = concurrent.futures.wait( 106 finished, running = concurrent.futures.wait(
109 subtask_futures.values(), timeout=(timeout * 60)) 107 subtask_futures.values(), timeout=(timeout * 60))
110 108
@@ -118,8 +116,8 @@ class PromenadeDriver(KubernetesDriver):
118 task.failure() 116 task.failure()
119 else: 117 else:
120 if f.exception(): 118 if f.exception():
121 msg = ("Subtask %s raised unexpected exception: %s" 119 msg = ("Subtask %s raised unexpected exception: %s" %
122 % (str(uuid.UUID(bytes=t)), str(f.exception()))) 120 (str(uuid.UUID(bytes=t)), str(f.exception())))
123 self.logger.error(msg, exc_info=f.exception()) 121 self.logger.error(msg, exc_info=f.exception())
124 task.add_status_msg( 122 task.add_status_msg(
125 msg=msg, 123 msg=msg,
diff --git a/python/drydock_provisioner/drivers/kubernetes/promenade_driver/promenade_client.py b/python/drydock_provisioner/drivers/kubernetes/promenade_driver/promenade_client.py
index 386c992..c4b6ef9 100644
--- a/python/drydock_provisioner/drivers/kubernetes/promenade_driver/promenade_client.py
+++ b/python/drydock_provisioner/drivers/kubernetes/promenade_driver/promenade_client.py
@@ -22,6 +22,7 @@ from keystoneauth1 import exceptions as exc
22import drydock_provisioner.error as errors 22import drydock_provisioner.error as errors
23from drydock_provisioner.util import KeystoneUtils 23from drydock_provisioner.util import KeystoneUtils
24 24
25
25# TODO: Remove this local implementation of Promenade Session and client once 26# TODO: Remove this local implementation of Promenade Session and client once
26# Promenade api client is available as part of Promenade project. 27# Promenade api client is available as part of Promenade project.
27class PromenadeSession(object): 28class PromenadeSession(object):
@@ -35,10 +36,7 @@ class PromenadeSession(object):
35 read timeout to use 36 read timeout to use
36 """ 37 """
37 38
38 def __init__(self, 39 def __init__(self, scheme='http', marker=None, timeout=None):
39 scheme='http',
40 marker=None,
41 timeout=None):
42 self.logger = logging.getLogger(__name__) 40 self.logger = logging.getLogger(__name__)
43 self.__session = requests.Session() 41 self.__session = requests.Session()
44 42
@@ -63,8 +61,8 @@ class PromenadeSession(object):
63 61
64 def set_auth(self): 62 def set_auth(self):
65 63
66 auth_header = self._auth_gen() 64 auth_header = self._auth_gen()
67 self.__session.headers.update(auth_header) 65 self.__session.headers.update(auth_header)
68 66
69 def get(self, route, query=None, timeout=None): 67 def get(self, route, query=None, timeout=None):
70 """ 68 """
@@ -220,11 +218,10 @@ class PromenadeSession(object):
220 try: 218 try:
221 ks_session = KeystoneUtils.get_session() 219 ks_session = KeystoneUtils.get_session()
222 except exc.AuthorizationFailure as aferr: 220 except exc.AuthorizationFailure as aferr:
223 self.logger.error( 221 self.logger.error('Could not authorize against Keystone: %s',
224 'Could not authorize against Keystone: %s', 222 str(aferr))
225 str(aferr)) 223 raise errors.DriverError(
226 raise errors.DriverError('Could not authorize against Keystone: %s', 224 'Could not authorize against Keystone: %s', str(aferr))
227 str(aferr))
228 225
229 return ks_session 226 return ks_session
230 227
@@ -235,8 +232,7 @@ class PromenadeSession(object):
235 232
236 try: 233 try:
237 prom_endpoint = ks_session.get_endpoint( 234 prom_endpoint = ks_session.get_endpoint(
238 interface='internal', 235 interface='internal', service_type='kubernetesprovisioner')
239 service_type='kubernetesprovisioner')
240 except exc.EndpointNotFound: 236 except exc.EndpointNotFound:
241 self.logger.error("Could not find an internal interface" 237 self.logger.error("Could not find an internal interface"
242 " defined in Keystone for Promenade") 238 " defined in Keystone for Promenade")
diff --git a/python/drydock_provisioner/drivers/node/maasdriver/actions/node.py b/python/drydock_provisioner/drivers/node/maasdriver/actions/node.py
index 6a376b0..732aec9 100644
--- a/python/drydock_provisioner/drivers/node/maasdriver/actions/node.py
+++ b/python/drydock_provisioner/drivers/node/maasdriver/actions/node.py
@@ -278,7 +278,8 @@ class DestroyNode(BaseMaasAction):
278 site_design) 278 site_design)
279 for n in nodes: 279 for n in nodes:
280 try: 280 try:
281 machine = machine_list.identify_baremetal_node(n, update_name=False) 281 machine = machine_list.identify_baremetal_node(
282 n, update_name=False)
282 283
283 if machine is None: 284 if machine is None:
284 msg = "Could not locate machine for node {}".format(n.name) 285 msg = "Could not locate machine for node {}".format(n.name)
@@ -297,7 +298,8 @@ class DestroyNode(BaseMaasAction):
297 try: 298 try:
298 machine.release(erase_disk=True, quick_erase=True) 299 machine.release(erase_disk=True, quick_erase=True)
299 except errors.DriverError: 300 except errors.DriverError:
300 msg = "Error Releasing node {}, skipping".format(n.name) 301 msg = "Error Releasing node {}, skipping".format(
302 n.name)
301 self.logger.warning(msg) 303 self.logger.warning(msg)
302 self.task.add_status_msg( 304 self.task.add_status_msg(
303 msg=msg, error=True, ctx=n.name, ctx_type='node') 305 msg=msg, error=True, ctx=n.name, ctx_type='node')
@@ -306,25 +308,26 @@ class DestroyNode(BaseMaasAction):
306 308
307 # node release with erase disk will take sometime monitor it 309 # node release with erase disk will take sometime monitor it
308 attempts = 0 310 attempts = 0
309 max_attempts = (config.config_mgr.conf.timeouts.destroy_node 311 max_attempts = (
310 * 60) // config.config_mgr.conf.maasdriver.poll_interval 312 config.config_mgr.conf.timeouts.destroy_node *
313 60) // config.config_mgr.conf.maasdriver.poll_interval
311 314
312 while (attempts < max_attempts 315 while (attempts < max_attempts and
313 and (not machine.status_name.startswith('Ready') 316 (not machine.status_name.startswith('Ready')
314 and not machine.status_name.startswith( 317 and not machine.status_name.startswith('Failed'))):
315 'Failed'))):
316 attempts = attempts + 1 318 attempts = attempts + 1
317 time.sleep( 319 time.sleep(
318 config.config_mgr.conf.maasdriver.poll_interval) 320 config.config_mgr.conf.maasdriver.poll_interval)
319 try: 321 try:
320 machine.refresh() 322 machine.refresh()
321 self.logger.debug( 323 self.logger.debug(
322 "Polling node {} status attempt {:d} of {:d}: {}".format( 324 "Polling node {} status attempt {:d} of {:d}: {}"
323 n.name, attempts, max_attempts, 325 .format(n.name, attempts, max_attempts,
324 machine.status_name)) 326 machine.status_name))
325 except Exception: 327 except Exception:
326 self.logger.warning( 328 self.logger.warning(
327 "Error updating node {} status during release node, will re-attempt.".format(n.name)) 329 "Error updating node {} status during release node, will re-attempt."
330 .format(n.name))
328 if machine.status_name.startswith('Ready'): 331 if machine.status_name.startswith('Ready'):
329 msg = "Node {} released and disk erased.".format( 332 msg = "Node {} released and disk erased.".format(
330 n.name) 333 n.name)
@@ -354,8 +357,8 @@ class DestroyNode(BaseMaasAction):
354 try: 357 try:
355 if n.oob_type == 'libvirt': 358 if n.oob_type == 'libvirt':
356 self.logger.info( 359 self.logger.info(
357 'Resetting MaaS virsh power parameters for node {}.'.format( 360 'Resetting MaaS virsh power parameters for node {}.'
358 n.name)) 361 .format(n.name))
359 # setting power type attibutes to empty string 362 # setting power type attibutes to empty string
360 # will remove them from maas BMC table 363 # will remove them from maas BMC table
361 machine.reset_power_parameters() 364 machine.reset_power_parameters()
@@ -363,8 +366,8 @@ class DestroyNode(BaseMaasAction):
363 pass 366 pass
364 367
365 machine.delete() 368 machine.delete()
366 msg = "Deleted Node: {} in status: {}.".format(n.name, 369 msg = "Deleted Node: {} in status: {}.".format(
367 machine.status_name) 370 n.name, machine.status_name)
368 self.logger.info(msg) 371 self.logger.info(msg)
369 self.task.add_status_msg( 372 self.task.add_status_msg(
370 msg=msg, error=False, ctx=n.name, ctx_type='node') 373 msg=msg, error=False, ctx=n.name, ctx_type='node')
@@ -1147,16 +1150,17 @@ class ConfigureHardware(BaseMaasAction):
1147 1150
1148 # Poll machine status 1151 # Poll machine status
1149 attempts = 0 1152 attempts = 0
1150 max_attempts = (config.config_mgr.conf.timeouts.configure_hardware 1153 max_attempts = (
1151 * 60) // config.config_mgr.conf.maasdriver.poll_interval 1154 config.config_mgr.conf.timeouts.configure_hardware
1155 * 60
1156 ) // config.config_mgr.conf.maasdriver.poll_interval
1152 1157
1153 while (attempts < max_attempts and 1158 while (attempts < max_attempts and
1154 (machine.status_name != 'Ready' and 1159 (machine.status_name != 'Ready' and
1155 not machine.status_name.startswith('Failed'))): 1160 not machine.status_name.startswith('Failed'))):
1156 attempts = attempts + 1 1161 attempts = attempts + 1
1157 time.sleep( 1162 time.sleep(config.config_mgr.conf.maasdriver.
1158 config.config_mgr.conf.maasdriver.poll_interval 1163 poll_interval)
1159 )
1160 try: 1164 try:
1161 machine.refresh() 1165 machine.refresh()
1162 self.logger.debug( 1166 self.logger.debug(
@@ -1226,7 +1230,9 @@ class ConfigureHardware(BaseMaasAction):
1226 except Exception as ex: 1230 except Exception as ex:
1227 msg = "Error commissioning node %s: %s" % (n.name, str(ex)) 1231 msg = "Error commissioning node %s: %s" % (n.name, str(ex))
1228 self.logger.warning(msg) 1232 self.logger.warning(msg)
1229 self.logger.debug("Unhandled exception attempting to commission node.", exc_info=ex) 1233 self.logger.debug(
1234 "Unhandled exception attempting to commission node.",
1235 exc_info=ex)
1230 self.task.add_status_msg( 1236 self.task.add_status_msg(
1231 msg=msg, error=True, ctx=n.name, ctx_type='node') 1237 msg=msg, error=True, ctx=n.name, ctx_type='node')
1232 self.task.failure(focus=n.get_id()) 1238 self.task.failure(focus=n.get_id())
@@ -2312,8 +2318,9 @@ class DeployNode(BaseMaasAction):
2312 continue 2318 continue
2313 2319
2314 attempts = 0 2320 attempts = 0
2315 max_attempts = (config.config_mgr.conf.timeouts.deploy_node 2321 max_attempts = (
2316 * 60) // config.config_mgr.conf.maasdriver.poll_interval 2322 config.config_mgr.conf.timeouts.deploy_node *
2323 60) // config.config_mgr.conf.maasdriver.poll_interval
2317 2324
2318 while (attempts < max_attempts 2325 while (attempts < max_attempts
2319 and (not machine.status_name.startswith('Deployed') 2326 and (not machine.status_name.startswith('Deployed')
diff --git a/python/drydock_provisioner/drivers/node/maasdriver/models/machine.py b/python/drydock_provisioner/drivers/node/maasdriver/models/machine.py
index ff6b5a5..4add289 100644
--- a/python/drydock_provisioner/drivers/node/maasdriver/models/machine.py
+++ b/python/drydock_provisioner/drivers/node/maasdriver/models/machine.py
@@ -423,8 +423,8 @@ class Machine(model_base.ResourceBase):
423 return True 423 return True
424 424
425 raise errors.DriverError( 425 raise errors.DriverError(
426 "Failed updating power parameters MAAS url {} - return code {}\n{}".format( 426 "Failed updating power parameters MAAS url {} - return code {}\n{}"
427 url, resp.status_code.resp.text)) 427 .format(url, resp.status_code.resp.text))
428 428
429 def to_dict(self): 429 def to_dict(self):
430 """Serialize this resource instance into a dict. 430 """Serialize this resource instance into a dict.
diff --git a/python/drydock_provisioner/objects/bootaction.py b/python/drydock_provisioner/objects/bootaction.py
index b55a962..b04326b 100644
--- a/python/drydock_provisioner/objects/bootaction.py
+++ b/python/drydock_provisioner/objects/bootaction.py
@@ -87,7 +87,8 @@ class BootAction(base.DrydockPersistentObject, base.DrydockObject):
87 for a in self.asset_list: 87 for a in self.asset_list:
88 if type_filter is None or (type_filter is not None 88 if type_filter is None or (type_filter is not None
89 and a.type == type_filter): 89 and a.type == type_filter):
90 a.render(nodename, site_design, action_id, action_key, design_ref) 90 a.render(nodename, site_design, action_id, action_key,
91 design_ref)
91 assets.append(a) 92 assets.append(a)
92 93
93 return assets 94 return assets
diff --git a/python/drydock_provisioner/objects/fields.py b/python/drydock_provisioner/objects/fields.py
index 71c6d48..87cf0f4 100644
--- a/python/drydock_provisioner/objects/fields.py
+++ b/python/drydock_provisioner/objects/fields.py
@@ -70,12 +70,12 @@ class OrchestratorAction(BaseDrydockEnum):
70 70
71 ALL = (Noop, ValidateDesign, VerifySite, PrepareSite, VerifyNodes, 71 ALL = (Noop, ValidateDesign, VerifySite, PrepareSite, VerifyNodes,
72 PrepareNodes, DeployNodes, BootactionReport, DestroyNodes, 72 PrepareNodes, DeployNodes, BootactionReport, DestroyNodes,
73 RelabelNodes, ConfigNodePxe, SetNodeBoot, PowerOffNode, 73 RelabelNodes, ConfigNodePxe, SetNodeBoot, PowerOffNode, PowerOnNode,
74 PowerOnNode, PowerCycleNode, InterrogateOob, RelabelNode, 74 PowerCycleNode, InterrogateOob, RelabelNode, CreateNetworkTemplate,
75 CreateNetworkTemplate, CreateStorageTemplate, CreateBootMedia, 75 CreateStorageTemplate, CreateBootMedia, PrepareHardwareConfig,
76 PrepareHardwareConfig, ConfigureHardware, InterrogateNode, 76 ConfigureHardware, InterrogateNode, ApplyNodeNetworking,
77 ApplyNodeNetworking, ApplyNodeStorage, ApplyNodePlatform, 77 ApplyNodeStorage, ApplyNodePlatform, DeployNode, DestroyNode,
78 DeployNode, DestroyNode, ConfigureNodeProvisioner) 78 ConfigureNodeProvisioner)
79 79
80 80
81class OrchestratorActionField(fields.BaseEnumField): 81class OrchestratorActionField(fields.BaseEnumField):
diff --git a/python/drydock_provisioner/objects/node.py b/python/drydock_provisioner/objects/node.py
index a1ad556..6e38f97 100644
--- a/python/drydock_provisioner/objects/node.py
+++ b/python/drydock_provisioner/objects/node.py
@@ -338,6 +338,7 @@ class BaremetalNode(drydock_provisioner.objects.hostprofile.HostProfile):
338 338
339 return labels_dict 339 return labels_dict
340 340
341
341@base.DrydockObjectRegistry.register 342@base.DrydockObjectRegistry.register
342class BaremetalNodeList(base.DrydockObjectListBase, base.DrydockObject): 343class BaremetalNodeList(base.DrydockObjectListBase, base.DrydockObject):
343 344
diff --git a/python/drydock_provisioner/objects/task.py b/python/drydock_provisioner/objects/task.py
index 8de7870..5fc910c 100644
--- a/python/drydock_provisioner/objects/task.py
+++ b/python/drydock_provisioner/objects/task.py
@@ -274,7 +274,8 @@ class Task(object):
274 "Bubbling subtask success for entity %s." % se) 274 "Bubbling subtask success for entity %s." % se)
275 self.result.add_success(se) 275 self.result.add_success(se)
276 else: 276 else:
277 self.logger.debug("Skipping subtask success due to action filter.") 277 self.logger.debug(
278 "Skipping subtask success due to action filter.")
278 # All failures are bubbled up. 279 # All failures are bubbled up.
279 if self.retry == 0 or (self.retry == st.retry): 280 if self.retry == 0 or (self.retry == st.retry):
280 for fe in st.result.failures: 281 for fe in st.result.failures:
@@ -283,8 +284,7 @@ class Task(object):
283 self.result.add_failure(fe) 284 self.result.add_failure(fe)
284 else: 285 else:
285 self.logger.debug( 286 self.logger.debug(
286 "Skipping failures as they mismatch task retry sequence." 287 "Skipping failures as they mismatch task retry sequence.")
287 )
288 288
289 def align_result(self, action_filter=None, reset_status=True): 289 def align_result(self, action_filter=None, reset_status=True):
290 """Align the result of this task with the combined results of all the subtasks. 290 """Align the result of this task with the combined results of all the subtasks.
diff --git a/python/drydock_provisioner/orchestrator/actions/orchestrator.py b/python/drydock_provisioner/orchestrator/actions/orchestrator.py
index 90c6647..26fbdd8 100644
--- a/python/drydock_provisioner/orchestrator/actions/orchestrator.py
+++ b/python/drydock_provisioner/orchestrator/actions/orchestrator.py
@@ -245,9 +245,8 @@ class DestroyNodes(BaseAction):
245 node_filter=self.task.node_filter) 245 node_filter=self.task.node_filter)
246 self.task.register_subtask(node_release_task) 246 self.task.register_subtask(node_release_task)
247 247
248 self.logger.info( 248 self.logger.info("Starting node driver task %s to Release nodes." %
249 "Starting node driver task %s to Release nodes." % 249 (node_release_task.get_id()))
250 (node_release_task.get_id()))
251 node_driver.execute_task(node_release_task.get_id()) 250 node_driver.execute_task(node_release_task.get_id())
252 251
253 node_release_task = self.state_manager.get_task( 252 node_release_task = self.state_manager.get_task(
@@ -1079,9 +1078,8 @@ class RelabelNodes(BaseAction):
1079 node_filter=nf) 1078 node_filter=nf)
1080 self.task.register_subtask(relabel_node_task) 1079 self.task.register_subtask(relabel_node_task)
1081 1080
1082 self.logger.info( 1081 self.logger.info("Starting kubernetes driver task %s to relabel nodes."
1083 "Starting kubernetes driver task %s to relabel nodes." % 1082 % (relabel_node_task.get_id()))
1084 (relabel_node_task.get_id()))
1085 kubernetes_driver.execute_task(relabel_node_task.get_id()) 1083 kubernetes_driver.execute_task(relabel_node_task.get_id())
1086 1084
1087 relabel_node_task = self.state_manager.get_task( 1085 relabel_node_task = self.state_manager.get_task(
diff --git a/python/drydock_provisioner/orchestrator/orchestrator.py b/python/drydock_provisioner/orchestrator/orchestrator.py
index 6ea9f66..44f23e5 100644
--- a/python/drydock_provisioner/orchestrator/orchestrator.py
+++ b/python/drydock_provisioner/orchestrator/orchestrator.py
@@ -109,8 +109,9 @@ class Orchestrator(object):
109 kubernetes_driver_class = getattr( 109 kubernetes_driver_class = getattr(
110 importlib.import_module(m), c, None) 110 importlib.import_module(m), c, None)
111 if kubernetes_driver_class is not None: 111 if kubernetes_driver_class is not None:
112 self.enabled_drivers['kubernetes'] = kubernetes_driver_class( 112 self.enabled_drivers[
113 state_manager=state_manager, orchestrator=self) 113 'kubernetes'] = kubernetes_driver_class(
114 state_manager=state_manager, orchestrator=self)
114 115
115 def watch_for_tasks(self): 116 def watch_for_tasks(self):
116 """Start polling the database watching for Queued tasks to execute.""" 117 """Start polling the database watching for Queued tasks to execute."""
diff --git a/python/drydock_provisioner/orchestrator/validations/network_trunking_rational.py b/python/drydock_provisioner/orchestrator/validations/network_trunking_rational.py
index eb30e95..bc2f30d 100644
--- a/python/drydock_provisioner/orchestrator/validations/network_trunking_rational.py
+++ b/python/drydock_provisioner/orchestrator/validations/network_trunking_rational.py
@@ -40,17 +40,15 @@ class NetworkTrunkingRational(Validators):
40 ) 40 )
41 41
42 # trunking mode is disabled, default_network must be defined 42 # trunking mode is disabled, default_network must be defined
43 if (network_link.trunk_mode == 43 if (network_link.trunk_mode == hd_fields.NetworkLinkTrunkingMode.
44 hd_fields.NetworkLinkTrunkingMode.Disabled 44 Disabled and network_link.native_network is None):
45 and network_link.native_network is None):
46 45
47 msg = 'Trunking mode is disabled, a trunking default_network must be defined' 46 msg = 'Trunking mode is disabled, a trunking default_network must be defined'
48 self.report_error( 47 self.report_error(
49 msg, [network_link.doc_ref], 48 msg, [network_link.doc_ref],
50 "Non-trunked links must have a native network defined.") 49 "Non-trunked links must have a native network defined.")
51 elif (network_link.trunk_mode == 50 elif (network_link.trunk_mode == hd_fields.NetworkLinkTrunkingMode.
52 hd_fields.NetworkLinkTrunkingMode.Disabled 51 Disabled and network_link.native_network is not None):
53 and network_link.native_network is not None):
54 network = site_design.get_network(network_link.native_network) 52 network = site_design.get_network(network_link.native_network)
55 if network and network.vlan_id: 53 if network and network.vlan_id:
56 msg = "Network link native network has a defined VLAN tag." 54 msg = "Network link native network has a defined VLAN tag."
diff --git a/python/drydock_provisioner/policy.py b/python/drydock_provisioner/policy.py
index a4c182f..8fa1fcc 100644
--- a/python/drydock_provisioner/policy.py
+++ b/python/drydock_provisioner/policy.py
@@ -38,15 +38,15 @@ class DrydockPolicy(object):
38 38
39 # Orchestrator Policy 39 # Orchestrator Policy
40 task_rules = [ 40 task_rules = [
41 policy.DocumentedRuleDefault('physical_provisioner:read_task', 41 policy.DocumentedRuleDefault(
42 'role:admin', 'Get task status', 42 'physical_provisioner:read_task', 'role:admin', 'Get task status',
43 [{ 43 [{
44 'path': '/api/v1.0/tasks', 44 'path': '/api/v1.0/tasks',
45 'method': 'GET' 45 'method': 'GET'
46 }, { 46 }, {
47 'path': '/api/v1.0/tasks/{task_id}', 47 'path': '/api/v1.0/tasks/{task_id}',
48 'method': 'GET' 48 'method': 'GET'
49 }]), 49 }]),
50 policy.DocumentedRuleDefault('physical_provisioner:create_task', 50 policy.DocumentedRuleDefault('physical_provisioner:create_task',
51 'role:admin', 'Create a task', 51 'role:admin', 'Create a task',
52 [{ 52 [{
@@ -103,10 +103,11 @@ class DrydockPolicy(object):
103 }]), 103 }]),
104 policy.DocumentedRuleDefault( 104 policy.DocumentedRuleDefault(
105 'physical_provisioner:read_build_data', 'role:admin', 105 'physical_provisioner:read_build_data', 'role:admin',
106 'Read build data for a node', 106 'Read build data for a node', [{
107 [{ 107 'path':
108 'path': '/api/v1.0/nodes/{nodename}/builddata', 108 '/api/v1.0/nodes/{nodename}/builddata',
109 'method': 'GET', 109 'method':
110 'GET',
110 }]), 111 }]),
111 ] 112 ]
112 113
diff --git a/python/drydock_provisioner/statemgmt/state.py b/python/drydock_provisioner/statemgmt/state.py
index 2166b16..0055681 100644
--- a/python/drydock_provisioner/statemgmt/state.py
+++ b/python/drydock_provisioner/statemgmt/state.py
@@ -169,9 +169,8 @@ class DrydockState(object):
169 with self.db_engine.connect() as conn: 169 with self.db_engine.connect() as conn:
170 if allowed_actions is None: 170 if allowed_actions is None:
171 query = self.tasks_tbl.select().where( 171 query = self.tasks_tbl.select().where(
172 self.tasks_tbl.c.status == 172 self.tasks_tbl.c.status == hd_fields.TaskStatus.
173 hd_fields.TaskStatus.Queued).order_by( 173 Queued).order_by(self.tasks_tbl.c.created.asc())
174 self.tasks_tbl.c.created.asc())
175 rs = conn.execute(query) 174 rs = conn.execute(query)
176 else: 175 else:
177 query = sql.text("SELECT * FROM tasks WHERE " 176 query = sql.text("SELECT * FROM tasks WHERE "
@@ -340,8 +339,8 @@ class DrydockState(object):
340 try: 339 try:
341 with self.db_engine.connect() as conn: 340 with self.db_engine.connect() as conn:
342 query = self.active_instance_tbl.update().where( 341 query = self.active_instance_tbl.update().where(
343 self.active_instance_tbl.c.identity == 342 self.active_instance_tbl.c.identity == leader_id.
344 leader_id.bytes).values(last_ping=datetime.utcnow()) 343 bytes).values(last_ping=datetime.utcnow())
345 rs = conn.execute(query) 344 rs = conn.execute(query)
346 rc = rs.rowcount 345 rc = rs.rowcount
347 346
diff --git a/python/tests/integration/postgres/test_api_bootaction.py b/python/tests/integration/postgres/test_api_bootaction.py
index fa3bcc7..421897c 100644
--- a/python/tests/integration/postgres/test_api_bootaction.py
+++ b/python/tests/integration/postgres/test_api_bootaction.py
@@ -26,7 +26,8 @@ from drydock_provisioner.control.api import start_api
26class TestClass(object): 26class TestClass(object):
27 def test_bootaction_context(self, falcontest, seed_bootaction): 27 def test_bootaction_context(self, falcontest, seed_bootaction):
28 """Test that the API will return a boot action context""" 28 """Test that the API will return a boot action context"""
29 url = "/api/v1.0/bootactions/nodes/%s/units" % seed_bootaction['nodename'] 29 url = "/api/v1.0/bootactions/nodes/%s/units" % seed_bootaction[
30 'nodename']
30 auth_hdr = {'X-Bootaction-Key': "%s" % seed_bootaction['identity_key']} 31 auth_hdr = {'X-Bootaction-Key': "%s" % seed_bootaction['identity_key']}
31 32
32 result = falcontest.simulate_get(url, headers=auth_hdr) 33 result = falcontest.simulate_get(url, headers=auth_hdr)
@@ -47,7 +48,8 @@ class TestClass(object):
47 48
48 def test_bootaction_context_noauth(self, falcontest, seed_bootaction): 49 def test_bootaction_context_noauth(self, falcontest, seed_bootaction):
49 """Test that the API will return a boot action context""" 50 """Test that the API will return a boot action context"""
50 url = "/api/v1.0/bootactions/nodes/%s/units" % seed_bootaction['nodename'] 51 url = "/api/v1.0/bootactions/nodes/%s/units" % seed_bootaction[
52 'nodename']
51 53
52 result = falcontest.simulate_get(url) 54 result = falcontest.simulate_get(url)
53 55
@@ -55,7 +57,8 @@ class TestClass(object):
55 57
56 def test_bootaction_context_badauth(self, falcontest, seed_bootaction): 58 def test_bootaction_context_badauth(self, falcontest, seed_bootaction):
57 """Test that the API will return a boot action context""" 59 """Test that the API will return a boot action context"""
58 url = "/api/v1.0/bootactions/nodes/%s/units" % seed_bootaction['nodename'] 60 url = "/api/v1.0/bootactions/nodes/%s/units" % seed_bootaction[
61 'nodename']
59 auth_hdr = {'X-Bootaction-Key': 'deadbeef'} 62 auth_hdr = {'X-Bootaction-Key': 'deadbeef'}
60 63
61 result = falcontest.simulate_get(url, headers=auth_hdr) 64 result = falcontest.simulate_get(url, headers=auth_hdr)
diff --git a/python/tests/integration/postgres/test_postgres_builddata.py b/python/tests/integration/postgres/test_postgres_builddata.py
index 377ee62..6cf24ee 100644
--- a/python/tests/integration/postgres/test_postgres_builddata.py
+++ b/python/tests/integration/postgres/test_postgres_builddata.py
@@ -90,9 +90,8 @@ class TestBuildData(object):
90 } 90 }
91 91
92 build_data_old = copy.deepcopy(build_data_latest) 92 build_data_old = copy.deepcopy(build_data_latest)
93 build_data_old[ 93 build_data_old['collected_date'] = build_data_latest[
94 'collected_date'] = build_data_latest['collected_date'] - timedelta( 94 'collected_date'] - timedelta(days=1)
95 days=1)
96 build_data_old['task_id'] = uuid.uuid4() 95 build_data_old['task_id'] = uuid.uuid4()
97 96
98 build_data1 = objects.BuildData(**build_data_latest) 97 build_data1 = objects.BuildData(**build_data_latest)
diff --git a/python/tests/unit/test_k8sdriver_promenade_client.py b/python/tests/unit/test_k8sdriver_promenade_client.py
index 088bd6c..72e63f4 100644
--- a/python/tests/unit/test_k8sdriver_promenade_client.py
+++ b/python/tests/unit/test_k8sdriver_promenade_client.py
@@ -25,16 +25,14 @@ PROM_URL = urlparse('http://promhost:80/api/v1.0')
25PROM_HOST = 'promhost' 25PROM_HOST = 'promhost'
26 26
27 27
28@mock.patch( 28@mock.patch('drydock_provisioner.drivers.kubernetes'
29 'drydock_provisioner.drivers.kubernetes' 29 '.promenade_driver.promenade_client'
30 '.promenade_driver.promenade_client' 30 '.PromenadeSession._get_prom_url',
31 '.PromenadeSession._get_prom_url', 31 return_value=PROM_URL)
32 return_value=PROM_URL) 32@mock.patch('drydock_provisioner.drivers.kubernetes'
33@mock.patch( 33 '.promenade_driver.promenade_client'
34 'drydock_provisioner.drivers.kubernetes' 34 '.PromenadeSession.set_auth',
35 '.promenade_driver.promenade_client' 35 return_value=None)
36 '.PromenadeSession.set_auth',
37 return_value=None)
38@responses.activate 36@responses.activate
39def test_put(patch1, patch2): 37def test_put(patch1, patch2):
40 """ 38 """
@@ -47,51 +45,43 @@ def test_put(patch1, patch2):
47 status=200) 45 status=200)
48 46
49 prom_session = PromenadeSession() 47 prom_session = PromenadeSession()
50 result = prom_session.put('v1.0/node-label/n1', 48 result = prom_session.put(
51 body='{"key1":"label1"}', 49 'v1.0/node-label/n1', body='{"key1":"label1"}', timeout=(60, 60))
52 timeout=(60, 60))
53 50
54 assert PROM_HOST == prom_session.host 51 assert PROM_HOST == prom_session.host
55 assert result.status_code == 200 52 assert result.status_code == 200
56 53
57 54
58@mock.patch( 55@mock.patch('drydock_provisioner.drivers.kubernetes'
59 'drydock_provisioner.drivers.kubernetes' 56 '.promenade_driver.promenade_client'
60 '.promenade_driver.promenade_client' 57 '.PromenadeSession._get_prom_url',
61 '.PromenadeSession._get_prom_url', 58 return_value=PROM_URL)
62 return_value=PROM_URL) 59@mock.patch('drydock_provisioner.drivers.kubernetes'
63@mock.patch( 60 '.promenade_driver.promenade_client'
64 'drydock_provisioner.drivers.kubernetes' 61 '.PromenadeSession.set_auth',
65 '.promenade_driver.promenade_client' 62 return_value=None)
66 '.PromenadeSession.set_auth',
67 return_value=None)
68@responses.activate 63@responses.activate
69def test_get(patch1, patch2): 64def test_get(patch1, patch2):
70 """ 65 """
71 Test get functionality 66 Test get functionality
72 """ 67 """
73 responses.add( 68 responses.add(
74 responses.GET, 69 responses.GET, 'http://promhost:80/api/v1.0/node-label/n1', status=200)
75 'http://promhost:80/api/v1.0/node-label/n1',
76 status=200)
77 70
78 prom_session = PromenadeSession() 71 prom_session = PromenadeSession()
79 result = prom_session.get('v1.0/node-label/n1', 72 result = prom_session.get('v1.0/node-label/n1', timeout=(60, 60))
80 timeout=(60, 60))
81 73
82 assert result.status_code == 200 74 assert result.status_code == 200
83 75
84 76
85@mock.patch( 77@mock.patch('drydock_provisioner.drivers.kubernetes'
86 'drydock_provisioner.drivers.kubernetes' 78 '.promenade_driver.promenade_client'
87 '.promenade_driver.promenade_client' 79 '.PromenadeSession._get_prom_url',
88 '.PromenadeSession._get_prom_url', 80 return_value=PROM_URL)
89 return_value=PROM_URL) 81@mock.patch('drydock_provisioner.drivers.kubernetes'
90@mock.patch( 82 '.promenade_driver.promenade_client'
91 'drydock_provisioner.drivers.kubernetes' 83 '.PromenadeSession.set_auth',
92 '.promenade_driver.promenade_client' 84 return_value=None)
93 '.PromenadeSession.set_auth',
94 return_value=None)
95@responses.activate 85@responses.activate
96def test_post(patch1, patch2): 86def test_post(patch1, patch2):
97 """ 87 """
@@ -104,24 +94,21 @@ def test_post(patch1, patch2):
104 status=200) 94 status=200)
105 95
106 prom_session = PromenadeSession() 96 prom_session = PromenadeSession()
107 result = prom_session.post('v1.0/node-label/n1', 97 result = prom_session.post(
108 body='{"key1":"label1"}', 98 'v1.0/node-label/n1', body='{"key1":"label1"}', timeout=(60, 60))
109 timeout=(60, 60))
110 99
111 assert PROM_HOST == prom_session.host 100 assert PROM_HOST == prom_session.host
112 assert result.status_code == 200 101 assert result.status_code == 200
113 102
114 103
115@mock.patch( 104@mock.patch('drydock_provisioner.drivers.kubernetes'
116 'drydock_provisioner.drivers.kubernetes' 105 '.promenade_driver.promenade_client'
117 '.promenade_driver.promenade_client' 106 '.PromenadeSession._get_prom_url',
118 '.PromenadeSession._get_prom_url', 107 return_value=PROM_URL)
119 return_value=PROM_URL) 108@mock.patch('drydock_provisioner.drivers.kubernetes'
120@mock.patch( 109 '.promenade_driver.promenade_client'
121 'drydock_provisioner.drivers.kubernetes' 110 '.PromenadeSession.set_auth',
122 '.promenade_driver.promenade_client' 111 return_value=None)
123 '.PromenadeSession.set_auth',
124 return_value=None)
125@responses.activate 112@responses.activate
126def test_relabel_node(patch1, patch2): 113def test_relabel_node(patch1, patch2):
127 """ 114 """
@@ -141,16 +128,14 @@ def test_relabel_node(patch1, patch2):
141 assert result == {"key1": "label1"} 128 assert result == {"key1": "label1"}
142 129
143 130
144@mock.patch( 131@mock.patch('drydock_provisioner.drivers.kubernetes'
145 'drydock_provisioner.drivers.kubernetes' 132 '.promenade_driver.promenade_client'
146 '.promenade_driver.promenade_client' 133 '.PromenadeSession._get_prom_url',
147 '.PromenadeSession._get_prom_url', 134 return_value=PROM_URL)
148 return_value=PROM_URL) 135@mock.patch('drydock_provisioner.drivers.kubernetes'
149@mock.patch( 136 '.promenade_driver.promenade_client'
150 'drydock_provisioner.drivers.kubernetes' 137 '.PromenadeSession.set_auth',
151 '.promenade_driver.promenade_client' 138 return_value=None)
152 '.PromenadeSession.set_auth',
153 return_value=None)
154@responses.activate 139@responses.activate
155def test_relabel_node_403_status(patch1, patch2): 140def test_relabel_node_403_status(patch1, patch2):
156 """ 141 """
@@ -167,16 +152,15 @@ def test_relabel_node_403_status(patch1, patch2):
167 with pytest.raises(errors.ClientForbiddenError): 152 with pytest.raises(errors.ClientForbiddenError):
168 prom_client.relabel_node('n1', {"key1": "label1"}) 153 prom_client.relabel_node('n1', {"key1": "label1"})
169 154
170@mock.patch( 155
171 'drydock_provisioner.drivers.kubernetes' 156@mock.patch('drydock_provisioner.drivers.kubernetes'
172 '.promenade_driver.promenade_client' 157 '.promenade_driver.promenade_client'
173 '.PromenadeSession._get_prom_url', 158 '.PromenadeSession._get_prom_url',
174 return_value=PROM_URL) 159 return_value=PROM_URL)
175@mock.patch( 160@mock.patch('drydock_provisioner.drivers.kubernetes'
176 'drydock_provisioner.drivers.kubernetes' 161 '.promenade_driver.promenade_client'
177 '.promenade_driver.promenade_client' 162 '.PromenadeSession.set_auth',
178 '.PromenadeSession.set_auth', 163 return_value=None)
179 return_value=None)
180@responses.activate 164@responses.activate
181def test_relabel_node_401_status(patch1, patch2): 165def test_relabel_node_401_status(patch1, patch2):
182 """ 166 """