Shipyard upgrade for focal

- upgraded Airflow to 1.10.15  -
  https://airflow.apache.org/docs/apache-airflow/1.10.15/changelog.html
- disabled xenial, bionic and opensuse images build  gates
- added focal image build  gate
- added focal zuul build node
- adjusted Makefile for focal
- added bindep.txt to utilize bindep zuul base role for zuul build node
  pre-setup
- added focal Dockerfile
- implemented freeze requirements.txt approach like in other Airship
  projects
- removed specific requirements.txt for airflow in favor of using
  requirements-frozen.txt from shipyard_airflow project when building
  airflow docker image
- fixed docker image publishing to Quay
- replaces deprecated LOG.warn with new LOG.warning call
- replaced deprecated body attribute in responce wiht responce.text
  attribute
- update of falcon module deprecated .API call - replaced wiht
  falcon.App call
- deprecated routing.create_http_method_map method replaced with
  routing.map_http_methods
- re-formatted code tabulations based on yapf recommendations
- replaced deprecated protocol attribute in Pytest create_environ() with
  http_version attribute
- replaced deprecated app attribute in Pytest create_environ() with
  root_path attribute
- fixed airflow CLI commands to match 1.10.15 version
- updated zuul gates to work on focal nodes and added focal specific
  node setup items by adding appriate ansible tasks and roles
- uplifted Helm to 3.9.4
- uplifted stable HTK commit id
- updated tox.in to with with tox v4
- uplifted dependences references to other Airship projects
- common python dependences were syncronized with other Airship
  projects(Promenade, Deckhand, Armada, Drydock)
- fixed airskiff deployment gate
- fixed genconfig* profiles in shipyard-airflow tox.ini responsible for
  maintanance of policy.yaml.sample and shipyard.conf.sample

Change-Id: I0c85187dc9bacf0849382563dd5ff7e9b2814c59
This commit is contained in:
Sergiy Markin 2023-03-10 00:20:04 +00:00
parent e3e71f7d16
commit 154a099b28
65 changed files with 686 additions and 478 deletions

View File

@ -13,7 +13,7 @@
- project:
templates:
- docs-on-readthedocs
- openstack-python36-jobs
- openstack-python38-jobs
vars:
rtd_webhook_id: '38576'
rtd_project_name: 'airship-shipyard'
@ -25,22 +25,16 @@
- airship-shipyard-chart-build-latest-htk
- airship-shipyard-whitespace-lint-gate
- airship-shipyard-airskiff-deployment
- airship-shipyard-image-gate-ubuntu_xenial
- airship-shipyard-image-gate-ubuntu_bionic
- airship-shipyard-image-gate-opensuse
- airship-shipyard-image-gate-ubuntu_focal
gate:
jobs:
- openstack-tox-pep8
- airship-shipyard-chart-build-gate
- airship-shipyard-whitespace-lint-gate
- airship-shipyard-image-gate-ubuntu_xenial
- airship-shipyard-image-gate-ubuntu_bionic
- airship-shipyard-image-gate-opensuse
- airship-shipyard-image-gate-ubuntu_focal
post:
jobs:
- airship-shipyard-docker-build-post-ubuntu_xenial
- airship-shipyard-docker-build-post-ubuntu_bionic
- airship-shipyard-docker-build-post-opensuse
- airship-shipyard-docker-build-post-ubuntu_focal
- shipyard-upload-git-mirror
- nodeset:
@ -49,12 +43,18 @@
- name: primary
label: ubuntu-bionic
- nodeset:
name: airship-shipyard-single-node-focal
nodes:
- name: primary
label: ubuntu-focal
- job:
name: airship-shipyard-chart-build-gate
description: Build charts using pinned Helm toolkit.
timeout: 900
run: tools/gate/playbooks/build-charts.yaml
nodeset: airship-shipyard-single-node
nodeset: airship-shipyard-single-node-focal
- job:
name: airship-shipyard-chart-build-latest-htk
@ -62,7 +62,7 @@
timeout: 900
voting: false
run: tools/gate/playbooks/build-charts.yaml
nodeset: airship-shipyard-single-node
nodeset: airship-shipyard-single-node-focal
vars:
HTK_COMMIT: master
@ -71,7 +71,7 @@
description: |
Lints all files by checking them for whitespace.
run: tools/gate/playbooks/zuul-linter.yaml
nodeset: airship-shipyard-single-node
nodeset: airship-shipyard-single-node-focal
- job:
name: airship-shipyard-image-base
@ -79,44 +79,31 @@
Base job for running airship-shipyard image related jobs.
timeout: 3600
run: tools/gate/playbooks/run-image.yaml
nodeset: airship-shipyard-single-node
nodeset: airship-shipyard-single-node-focal
irrelevant-files:
- ^.*\.rst$
- ^doc/.*$
- ^releasenotes/.*$
- job:
name: airship-shipyard-image-gate-ubuntu_xenial
voting: false
name: airship-shipyard-image-gate-ubuntu_focal
description: |
Run shipyard-image build for ubuntu_xenial.
Run shipyard-image build for ubuntu_focal.
parent: airship-shipyard-image-base
vars:
publish: false
distro: ubuntu_xenial
tags:
dynamic:
patch_set: true
- job:
name: airship-shipyard-image-gate-ubuntu_bionic
description: |
Run shipyard-image build for ubuntu_bionic.
parent: airship-shipyard-image-base
vars:
publish: false
distro: ubuntu_bionic
distro: ubuntu_focal
tags:
dynamic:
patch_set: true
- job:
name: airship-shipyard-airskiff-deployment
nodeset: airship-shipyard-single-node
nodeset: airship-shipyard-single-node-focal
description: |
Deploy Memcached using Airskiff and submitted Armada changes.
timeout: 9600
voting: false
voting: true
pre-run:
- tools/gate/playbooks/airskiff-reduce-site.yaml
- tools/gate/playbooks/git-config.yaml
@ -144,27 +131,9 @@
dynamic:
patch_set: true
- job:
name: airship-shipyard-docker-build-post-ubuntu_xenial
voting: false
timeout: 1800
run: tools/gate/playbooks/run-image.yaml
nodeset: airship-shipyard-single-node
irrelevant-files: *irrelevant-files
secrets:
- airship_shipyard_quay_creds
vars:
publish: true
distro: ubuntu_xenial
tags:
dynamic:
branch: true
commit: true
static:
- latest
- job:
name: airship-shipyard-docker-build-post-ubuntu_bionic
name: airship-shipyard-docker-build-post-ubuntu_focal
timeout: 1800
run: tools/gate/playbooks/run-image.yaml
nodeset: airship-shipyard-single-node
@ -173,7 +142,7 @@
- airship_shipyard_quay_creds
vars:
publish: true
distro: ubuntu_bionic
distro: ubuntu_focal
tags:
dynamic:
branch: true

View File

@ -34,7 +34,7 @@ USE_PROXY ?= false
AIRFLOW_SRC ?=
AIRFLOW_HOME ?=
DISTRO_BASE_IMAGE ?=
DISTRO ?= ubuntu_bionic
DISTRO ?= ubuntu_focal
IMAGE:=${DOCKER_REGISTRY}/${IMAGE_PREFIX}/$(IMAGE_NAME):${IMAGE_TAG}-${DISTRO}
IMAGE_DIR:=images/$(IMAGE_NAME)
@ -61,7 +61,7 @@ charts: clean helm-toolkit
# Perform Linting
.PHONY: lint
lint: pep8 helm_lint build_docs
lint: pep8 helm-lint build_docs
# Dry run templating of chart
.PHONY: dry-run
@ -129,20 +129,26 @@ clean:
rm -rf doc/build
cd $(BUILD_CTX)/shipyard_client; rm -rf build
cd $(BUILD_CTX)/shipyard_airflow; rm -rf build
rm -rf doc/build
rm -f charts/*.tgz
rm -f charts/*/requirements.lock
rm -rf charts/*/charts
rm -rf .tox
.PHONY: pep8
pep8:
cd $(BUILD_CTX)/shipyard_client; tox -e pep8
cd $(BUILD_CTX)/shipyard_airflow; tox -e pep8
.PHONY: helm_lint
helm_lint: clean helm-toolkit
.PHONY: helm-lint
helm-lint: clean helm-toolkit
$(HELM) dep up charts/shipyard
$(HELM) lint charts/shipyard
# Initialize local helm config
.PHONY: helm-toolkit
helm-toolkit: helm-install
tools/helm_tk.sh $(HELM)
./tools/helm_tk.sh $(HELM)
# Install helm binary
.PHONY: helm-install

10
bindep.txt Normal file
View File

@ -0,0 +1,10 @@
# This is a cross-platform list tracking distribution packages needed by tests;
# see http://docs.openstack.org/infra/bindep/ for additional information.
libffi-dev [test platform:dpkg]
libkrb5-dev [platform:dpkg]
libpq-dev [platform:dpkg]
libsasl2-dev [platform:dpkg]
libssl-dev [platform:dpkg]
libre2-dev [platform:dpkg]
libzmq3-dev [platform:dpkg]

View File

@ -1,5 +1,6 @@
#
# Requirements for creating documentation only.
#
sphinx>=1.6.2
sphinx_rtd_theme==0.2.4
Sphinx
sphinx-rtd-theme==0.5.0
Jinja2<=3.0.3

View File

@ -106,4 +106,3 @@
# targeted release
# POST /api/v1.0/actions
#"workflow_orchestrator:action_test_site": "rule:admin_required"

View File

@ -13,7 +13,7 @@
# limitations under the License.
# Docker image to run Airflow on Kubernetes
ARG FROM=ubuntu:16.04
ARG FROM=ubuntu:focal
FROM ${FROM}
LABEL org.opencontainers.image.authors='airship-discuss@lists.airshipit.org, irc://#airshipit@freenode' \
@ -43,12 +43,12 @@ ARG AIRFLOW_HOME=/usr/local/airflow
# Moved celery to images/airflow/requirements.txt as apache-airflow uses a
# version of celery incompatibile with the version of kombu needed by other
# Airship components
ARG AIRFLOW_SRC="apache-airflow[crypto,postgres,hive,hdfs,jdbc]==1.10.5"
ARG AIRFLOW_SRC="apache-airflow[crypto,postgres,hive,hdfs]==1.10.15"
ARG DEBIAN_FRONTEND=noninteractive
ARG ctx_base=src/bin
# Kubectl version
ARG KUBECTL_VERSION=1.24.6
ARG KUBECTL_VERSION=1.26.3
# Needed from apache-airflow 1.10.2, since core.airflow_home config is deprecated
ENV AIRFLOW_HOME=${AIRFLOW_HOME}
@ -61,22 +61,23 @@ RUN set -ex && \
curl \
git \
g++ \
libkrb5-dev \
libffi-dev \
libssl-dev \
libpq-dev \
libtool \
libsasl2-dev \
locales \
netcat \
netbase \
python3 \
python3-setuptools \
python3-pip \
python3-dev \
python3-dateutil \
python3-openssl \
make \
--no-install-recommends \
&& python3 -m pip install -U 'pip<21.0' \
&& python3 -m pip install -U 'setuptools<58' \
&& python3 -m pip install -U pip \
&& apt-get clean \
&& rm -rf \
/var/lib/apt/lists/* \
@ -111,9 +112,9 @@ RUN useradd -ms /bin/bash -d ${AIRFLOW_HOME} airflow \
# Note - removing snakebite (python 2 vs. 3). See:
# https://github.com/puckel/docker-airflow/issues/77
# Install Airflow directly to allow overriding source
COPY images/airflow/requirements.txt /tmp/
RUN sed -i "s/pyOpenSSL==.*//" /tmp/requirements.txt \
&& pip3 install -r /tmp/requirements.txt --no-cache-dir \
# COPY images/airflow/requirements.txt /tmp/
COPY ${ctx_base}/shipyard_airflow/requirements-frozen.txt /tmp/requirements.txt
RUN pip3 install -r /tmp/requirements.txt --no-cache-dir \
&& pip3 install $AIRFLOW_SRC --no-cache-dir \
&& (pip3 uninstall -y snakebite || true) \
&& (pip3 uninstall -y psycopg2 || true) \
@ -137,12 +138,12 @@ ENV PBR_VERSION 0.1a1
# Since Shipyard and Airflow are built together as images, this should prevent
# stale or out-of-date code between these parts.
# Shipyard requirements, source and installation
COPY ${ctx_base}/shipyard_airflow/requirements.txt /tmp/api_requirements.txt
COPY ${ctx_base}/shipyard_airflow/requirements-frozen.txt /tmp/api_requirements.txt
RUN pip3 install -r /tmp/api_requirements.txt --no-cache-dir
COPY ${ctx_base}/shipyard_airflow /tmp/shipyard/
RUN cd /tmp/shipyard \
&& python3 setup.py install
&& pip3 install $(pwd)
# Note: The value for the dags and plugins directories that are sourced
# from the values.yaml of the Shipyard Helm chart need to align with these

View File

@ -1,44 +0,0 @@
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
celery==4.3
pytz==2018.5
python-dateutil==2.8.1
pyOpenSSL==22.0.0
ndg-httpsclient==0.5.1
oslo.utils==3.42.1
oslo.config==7.0.0
oslo.serialization==2.29.2
pyasn1<0.5.0,>=0.4.6
psycopg2-binary==2.8.4
docker==3.7.2
# Airflow is now installed in the Dockerfile directory to allow for
# overriding where it is sourced from
python-openstackclient==3.16.1
python-keystoneclient==3.22.0
kubernetes>=6.0.0
# Need to lock marshmellow-sqlalchemy and tabulate for compatibility issues
marshmallow-sqlalchemy==0.18.0
WTForms~=2.2.1
tabulate==0.8.03
pbr==5.4.5
setuptools>=40.4.3,<58
six>=1.15.0
urllib3==1.25.9
vine<5.0.0a1,>=1.1.3
# Dependencies for other UCP components
git+https://opendev.org/airship/deckhand.git@1f0c011a1708c1235a2be65edada7d386cd55d2a#egg=deckhand
git+https://opendev.org/airship/drydock.git@96db568d4b28f5f163efbf606854243614a6c821#egg=drydock_provisioner&subdirectory=python
git+https://opendev.org/airship/armada.git@416eff4e52f72169c3ebf49a8ffca36086692b7a#egg=armada

View File

@ -41,9 +41,9 @@ elif [[ $cmd == *scheduler* ]]; then
elif [[ $cmd == 'quicktest' ]]; then
${python3_path} ${airflow_path} initdb
${python3_path} ${airflow_path} webserver -p 8080 &
airflow run example_bash_operator runme_0 2018-01-01
airflow tasks run example_bash_operator runme_0 2018-01-01
airflow backfill example_bash_operator -s 2018-01-01 -e 2018-01-02
airflow dag_state example_bash_operator 2018-01-01
airflow dags state example_bash_operator 2018-01-01
else
echo "Invalid Command!"
exit 1

View File

@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
ARG FROM=ubuntu:16.04
ARG FROM=ubuntu:focal
FROM ${FROM}
LABEL org.opencontainers.image.authors='airship-discuss@lists.airshipit.org, irc://#airshipit@freenode' \
@ -41,11 +41,13 @@ RUN set -ex && \
automake \
ca-certificates \
curl \
build-essential \
git \
libtool \
make \
netbase \
python3-dev \
python3-setuptools \
--no-install-recommends \
&& apt-get autoremove -yqq --purge \
&& apt-get clean \
@ -84,33 +86,36 @@ RUN chown -R shipyard: /home/shipyard \
&& chmod +x /home/shipyard/entrypoint.sh
# Requirements and Shipyard source
COPY ${ctx_base}/shipyard_airflow/requirements.txt /home/shipyard/api_requirements.txt
COPY ${ctx_base}/shipyard_client/requirements.txt /home/shipyard/client_requirements.txt
COPY ${ctx_base}/shipyard_airflow/requirements-frozen.txt /home/shipyard/api_requirements.txt
COPY ${ctx_base}/shipyard_client/requirements-frozen.txt /home/shipyard/client_requirements.txt
COPY ${ctx_base}/shipyard_client /home/shipyard/shipyard_client/
COPY ${ctx_base}/shipyard_airflow /home/shipyard/shipyard/
# Build
RUN set -ex \
&& buildDeps=' \
gcc \
build-essential \
git \
libssl-dev \
libffi-dev \
libkrb5-dev \
libpq-dev \
libsasl2-dev \
libssl-dev \
libre2-dev \
libzmq3-dev \
make \
python3-pip \
' \
&& apt-get -qq update \
&& apt-get -y install -y \
$buildDeps \
python3-openssl \
libffi-dev \
python3-dev \
--no-install-recommends \
&& python3 -m pip install -U 'pip<21.0' \
&& python3 -m pip install -U setuptools \
&& sed -i "s/pyOpenSSL==.*//" /home/shipyard/client_requirements.txt\
&& apt-get -y install -y $buildDeps --no-install-recommends \
&& python3 -m pip install -U pip \
&& pip3 install -r /home/shipyard/client_requirements.txt --no-cache-dir \
&& cd /home/shipyard/shipyard_client \
&& pip3 install $(pwd) --use-pep517 \
&& pip3 install -r /home/shipyard/api_requirements.txt --no-cache-dir \
&& cd /home/shipyard/shipyard \
&& python3 setup.py install \
&& pip3 install $(pwd) --use-pep517 \
&& apt-get purge -y --auto-remove $buildDeps \
&& apt-get autoremove -yqq --purge \
&& apt-get clean \

View File

@ -180,9 +180,13 @@
# and will be removed in the S release.
#auth_uri = <None>
# API version of the admin Identity API endpoint. (string value)
# API version of the Identity API endpoint. (string value)
#auth_version = <None>
# Interface to use for the Identity API endpoint. Valid values are "public",
# "internal" (default) or "admin". (string value)
#interface = internal
# Do not handle authorization requests within the middleware, but delegate the
# authorization decision to downstream WSGI components. (boolean value)
#delay_auth_decision = false
@ -217,14 +221,6 @@
# The region in which the identity server can be found. (string value)
#region_name = <None>
# DEPRECATED: Directory used to cache files related to PKI tokens. This option
# has been deprecated in the Ocata release and will be removed in the P
# release. (string value)
# This option is deprecated for removal since Ocata.
# Its value may be silently ignored in the future.
# Reason: PKI token format is no longer supported.
#signing_dir = <None>
# Optionally specify a list of memcached server(s) to use for caching. If left
# undefined, tokens will instead be cached in-process. (list value)
# Deprecated group/name - [keystone_authtoken]/memcache_servers
@ -235,16 +231,6 @@
# to -1 to disable caching completely. (integer value)
#token_cache_time = 300
# DEPRECATED: Determines the frequency at which the list of revoked tokens is
# retrieved from the Identity service (in seconds). A high number of revocation
# events combined with a low cache duration may significantly reduce
# performance. Only valid for PKI tokens. This option has been deprecated in
# the Ocata release and will be removed in the P release. (integer value)
# This option is deprecated for removal since Ocata.
# Its value may be silently ignored in the future.
# Reason: PKI token format is no longer supported.
#revocation_cache_time = 10
# (Optional) If defined, indicate whether token data should be authenticated or
# authenticated and encrypted. If MAC, token data is authenticated (with HMAC)
# in the cache. If ENCRYPT, token data is encrypted and authenticated in the
@ -280,9 +266,9 @@
# client connection from the pool. (integer value)
#memcache_pool_conn_get_timeout = 10
# (Optional) Use the advanced (eventlet safe) memcached client pool. The
# advanced pool will only work under python 2.x. (boolean value)
#memcache_use_advanced_pool = false
# (Optional) Use the advanced (eventlet safe) memcached client pool. (boolean
# value)
#memcache_use_advanced_pool = true
# (Optional) Indicate whether to set the X-Service-Catalog header. If False,
# middleware will not ask for service catalog on token validation and will not
@ -298,27 +284,6 @@
# (string value)
#enforce_token_bind = permissive
# DEPRECATED: If true, the revocation list will be checked for cached tokens.
# This requires that PKI tokens are configured on the identity server. (boolean
# value)
# This option is deprecated for removal since Ocata.
# Its value may be silently ignored in the future.
# Reason: PKI token format is no longer supported.
#check_revocations_for_cached = false
# DEPRECATED: Hash algorithms to use for hashing PKI tokens. This may be a
# single algorithm or multiple. The algorithms are those supported by Python
# standard hashlib.new(). The hashes will be tried in the order given, so put
# the preferred one first for performance. The result of the first hash will be
# stored in the cache. This will typically be set to multiple values only while
# migrating from a less secure algorithm to a more secure one. Once all the old
# tokens are expired this option should be set to a single value for better
# performance. (list value)
# This option is deprecated for removal since Ocata.
# Its value may be silently ignored in the future.
# Reason: PKI token format is no longer supported.
#hash_algorithms = md5
# A choice of roles that must be present in a service token. Service tokens are
# allowed to request that an expired token can be used and so this check should
# tightly control that only actual services should be sending this token. Roles
@ -333,6 +298,10 @@
# possible. (boolean value)
#service_token_roles_required = false
# The name or type of the service as it appears in the service catalog. This is
# used to validate tokens that have restricted access rules. (string value)
#service_type = <None>
# Authentication type to load (string value)
# Deprecated group/name - [keystone_authtoken]/auth_plugin
#auth_type = <None>

View File

@ -0,0 +1,29 @@
arrow<=0.17.0
# apache-airflow[crypto,celery,hive,hdfs,jdbc]==1.10.15
apache-airflow[crypto,celery,hive,hdfs]==1.10.15
coverage==5.3
falcon
jsonschema<=3.2.0
keystoneauth1<=5.1.1
kubernetes==26.1.0
networkx
oslo.config<=8.7.1
oslo.policy<=3.10.1
oslo.utils<=4.12.3
pyarrow==6.0.1
pylibyaml==0.1.0
PyYAML<=5.4.1
regex==2020.11.13
requests==2.23.0
responses==0.12.1
setuptools<=45.2.0
SQLAlchemy<=1.3.20
ulid==1.1
uWSGI==2.0.21
Werkzeug
WTForms<=2.3.3
git+https://opendev.org/airship/deckhand.git@13c5199f18664d66ccdd3a1e54bb40ad2c293d1a#egg=deckhand
git+https://opendev.org/airship/drydock.git@d00eaf0303f50478d6503c2c2096e931134189ac#egg=drydock_provisioner&subdirectory=python
git+https://opendev.org/airship/armada.git@a6dcf17d178a89bad626b0e5ef6fafc428dca7e4#egg=armada
git+https://opendev.org/airship/promenade.git@5602e85fb936eeafdc3e4699fd3afbe1576078bc#egg=promenade

View File

@ -0,0 +1,255 @@
alabaster==0.7.13
alembic==1.4.3
amqp==2.6.1
apache-airflow==1.10.15
apispec==1.3.3
argcomplete==1.12.3
Armada @ git+https://opendev.org/airship/armada.git@a6dcf17d178a89bad626b0e5ef6fafc428dca7e4#egg=armada
arrow==0.17.0
attrs==20.3.0
autopage==0.5.1
Babel==2.12.1
bandit==1.6.0
bcrypt==4.0.1
Beaker==1.12.0
billiard==3.6.4.0
cached-property==1.5.2
cachetools==5.3.0
cattrs==1.10.0
celery==4.4.7
certifi==2022.12.7
cffi==1.15.1
chardet==3.0.4
charset-normalizer==3.1.0
click==7.1.2
cliff==3.10.1
cmd2==2.4.3
colorama==0.4.6
colorlog==4.0.2
configparser==3.5.3
coverage==5.3
croniter==0.3.37
cryptography==3.4.8
debtcollector==2.5.0
Deckhand @ git+https://opendev.org/airship/deckhand.git@13c5199f18664d66ccdd3a1e54bb40ad2c293d1a#egg=deckhand
decorator==5.1.1
deepdiff==5.8.1
defusedxml==0.7.1
dill==0.3.6
distlib==0.3.6
dnspython==2.3.0
docutils==0.17
dogpile.cache==1.2.0
drydock-provisioner @ git+https://opendev.org/airship/drydock.git@d00eaf0303f50478d6503c2c2096e931134189ac#egg=drydock_provisioner&subdirectory=python
dulwich==0.21.3
email-validator==1.3.1
eventlet==0.33.3
extras==1.0.0
falcon==3.1.1
fasteners==0.18
filelock==3.12.0
fixtures==3.0.0
flake8==3.8.4
Flask==1.1.4
Flask-Admin==1.5.4
Flask-AppBuilder==2.3.4
Flask-Babel==1.0.0
Flask-Caching==1.3.3
Flask-JWT-Extended==3.25.1
Flask-Login==0.4.1
Flask-OpenID==1.3.0
Flask-SQLAlchemy==2.5.1
flask-swagger==0.2.14
Flask-WTF==0.14.3
flower==0.9.7
funcsigs==1.0.2
future==0.18.3
futurist==2.4.1
gitdb==4.0.10
GitPython==3.1.31
google-auth==2.17.3
graphviz==0.20.1
greenlet==2.0.2
gunicorn==20.1.0
hacking==4.1.0
hmsclient==0.1.1
html5lib==0.9999999
httpexceptor==1.4.0
humanize==4.6.0
idna==2.10
imagesize==1.4.1
importlib-metadata==2.1.3
importlib-resources==1.5.0
iso8601==1.1.0
itsdangerous==1.1.0
Jinja2==2.11.3
json-merge-patch==0.2
jsonpath-ng==1.5.3
jsonpath-rw==1.4.0
jsonpath-rw-ext==1.2.2
jsonpickle==1.4.1
jsonschema==3.2.0
keystoneauth1==5.1.1
keystonemiddleware==10.2.0
kombu==4.6.11
kubernetes==26.1.0
lazy-object-proxy==1.4.3
lockfile==0.12.2
Mako==1.2.4
Markdown==2.6.11
MarkupSafe==2.0.1
marshmallow==2.21.0
marshmallow-enum==1.5.1
marshmallow-sqlalchemy==0.23.1
mccabe==0.6.1
mock==5.0.2
more-itertools==9.1.0
msgpack==1.0.5
natsort==8.3.1
netaddr==0.8.0
netifaces==0.11.0
networkx==3.1
nose==1.3.7
numpy==1.24.3
oauthlib==3.2.2
ordered-set==4.1.0
os-service-types==1.7.0
oslo.cache==2.10.1
oslo.concurrency==5.1.1
oslo.config==8.7.1
oslo.context==4.1.0
oslo.db==10.0.0
oslo.i18n==6.0.0
oslo.log==4.6.0
oslo.messaging==12.13.0
oslo.metrics==0.6.0
oslo.middleware==4.4.0
oslo.policy==3.10.1
oslo.serialization==4.2.0
oslo.service==3.1.1
oslo.utils==4.12.3
packaging==21.3
pandas==1.5.3
Paste==3.5.0
PasteDeploy==3.0.1
PasteScript==3.3.0
pbr==5.5.1
pendulum==1.4.4
pip==23.0.1
platformdirs==3.4.0
pluggy==0.6.0
ply==3.11
prettytable==3.7.0
prison==0.2.1
promenade @ git+https://opendev.org/airship/promenade.git@5602e85fb936eeafdc3e4699fd3afbe1576078bc#egg=promenade
prometheus-client==0.8.0
protobuf==4.22.3
psutil==5.9.5
psycopg2-binary==2.9.6
pure-sasl==0.6.2
py==1.11.0
pyarrow==6.0.1
pyasn1==0.5.0
pyasn1-modules==0.3.0
pycadf==3.1.1
pycodestyle==2.6.0
pycparser==2.21
pyflakes==2.2.0
Pygments==2.14.0
PyHive==0.6.5
PyJWT==1.7.1
pylibyaml==0.1.0
pymongo==4.3.3
pyparsing==2.4.7
pyperclip==1.8.2
pyproject_api==1.5.0
pyrsistent==0.19.3
pytest==3.5.0
pytest-cov==2.5.1
python-barbicanclient==5.2.0
python-daemon==2.3.2
python-dateutil==2.8.1
python-editor==1.0.4
python-keystoneclient==5.1.0
python-memcached==1.59
python-mimeparse==1.6.0
python-nvd3==0.15.0
python-slugify==4.0.1
python-subunit==1.4.0
python3-openid==3.2.0
pytz==2023.3
pytzdata==2020.1
PyYAML==5.4.1
regex==2020.11.13
reno==4.0.0
repoze.lru==0.7
requests==2.23.0
requests-oauthlib==1.3.1
resolver==0.2.1
responses==0.12.1
retry==0.9.2
rfc3986==2.0.0
Routes==2.5.1
rsa==4.9
sasl==0.3.1
selector==0.10.1
setproctitle==1.3.2
setuptools==45.2.0
simplejson==3.19.1
six==1.16.0
smmap==5.0.0
snakebite-py3==3.0.5
snowballstemmer==2.2.0
Sphinx==3.3.1
sphinx-rtd-theme==0.5.0
sphinxcontrib-applehelp==1.0.4
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
SQLAlchemy==1.3.20
SQLAlchemy-JSONField==0.9.0
sqlalchemy-migrate==0.13.0
SQLAlchemy-Utils==0.41.0
sqlparse==0.4.4
statsd==4.0.1
stevedore==5.0.0
tabulate==0.8.10
Tempita==0.5.2
tenacity==4.12.0
testfixtures==7.1.0
testrepository==0.0.20
testresources==2.0.1
testscenarios==0.5.0
testtools==2.5.0
text-unidecode==1.3
thrift==0.16.0
thrift-sasl==0.4.3
tiddlyweb==2.4.3
toml==0.10.2
tomli==2.0.1
tomlkit==0.11.7
tornado==5.1.1
tox==3.12.1
typing-extensions==3.7.2
tzlocal==1.5.1
ulid==1.1
unicodecsv==0.14.1
urllib3==1.25.11
uWSGI==2.0.21
vine==1.3.0
virtualenv==20.22.0
wcwidth==0.2.6
WebOb==1.8.7
websocket-client==1.5.1
Werkzeug==0.16.1
wheel==0.40.0
wrapt==1.15.0
wsgi-intercept==1.11.0
WTForms==2.3.3
xmltodict==0.13.0
yappi==1.4.0
yq==3.2.1
zipp==3.15.0
zope.deprecation==4.4.0

View File

@ -1,61 +1,3 @@
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# API requirements
alembic==1.0.1
arrow==0.12.1
celery==4.3
kombu<4.7,>=4.6.10
vine<5.0.0a1,>=1.1.3
configparser==3.5.0
cryptography>=2.7
falcon==1.4.1
jsonschema>=3.0.1,<4
grpcio>=1.16.0
keystoneauth1>=3.18.0
keystonemiddleware==5.3.0
networkx==2.2 # common/deployment_group
oslo.config==7.0.0
oslo.policy==1.40.1
PasteDeploy==1.5.2
psycopg2-binary==2.8.4
pylibyaml~=0.1
oslo.cache==1.38.1
oslo.log==3.45.2
oslo.utils==3.42.1
oslo.serialization==2.29.2
pbr==5.4.5
python-dateutil==2.8.1
python-memcached==1.59
python-keystoneclient==3.22.0
requests!=2.20.0,>=2.14.2
setuptools==40.4.3
SQLAlchemy==1.3.15
ulid==1.1
urllib3==1.25.9
uwsgi~=2.0.19.1
# To support profiling in non-prod
Werkzeug==0.16.1
WTForms~=2.2.1
# Need to lock marshmellow-sqlalchemy and tabulate for compatibility issues
marshmallow-sqlalchemy==0.18.0
tabulate==0.8.03
# Dependencies for other UCP components
git+https://opendev.org/airship/deckhand.git@5cd799cc5d04527ac782270008ff647b3779ff05#egg=deckhand
git+https://opendev.org/airship/drydock.git@b431f7a2cfacd199fa5587487f372dc5589f9126#egg=drydock_provisioner&subdirectory=python
git+https://opendev.org/airship/armada.git@379c88d6195eed1a0ef61d0465d5746bcfda1a42#egg=armada
git+https://opendev.org/airship/promenade.git@c10165c144e0a18137596a3c89f1339d6ed30d0c#egg=promenade
# Warning: This file should be empty.
# Specify direct dependencies in requirements-direct.txt instead.
-r requirements-direct.txt

View File

@ -12,8 +12,8 @@ classifier =
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: 3
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.10
[files]
packages =

View File

@ -117,11 +117,11 @@ class DocumentValidator(metaclass=abc.ABCMeta):
Triggers the specific checks after any common checks
"""
if self.missing_severity not in ["Error", "Warning", "Info"]:
LOG.warn("Document Validator for {}, {} does not have a valid "
"value set for missing_severity (got {}). "
"Assuming Error".format(
self.schema, self.doc_name, self.missing_severity
))
LOG.warning("Document Validator for {}, {} does not have a valid "
"value set for missing_severity (got {}). "
"Assuming Error".format(
self.schema, self.doc_name, self.missing_severity
))
self.missing_severity = "Error"
try:

View File

@ -147,7 +147,7 @@ class NotesHelper:
note_timestamp=note_timestamp
)
except Exception as ex:
LOG.warn(
LOG.warning(
"Creating note for {} encountered a problem, exception info "
"follows, but processing is not halted for notes.",
assoc_id
@ -163,7 +163,7 @@ class NotesHelper:
q = Query(assoc_id_pattern, verbosity, exact_match)
return self.nm.retrieve(q)
except Exception as ex:
LOG.warn(
LOG.warning(
"Note retrieval for {} encountered a problem, exception "
"info follows, but processing is not halted for notes.",
assoc_id_pattern

View File

@ -109,7 +109,7 @@ class ActionsResource(BaseResource):
Return actions that have been invoked through shipyard.
:returns: a json array of action entities
"""
resp.body = self.to_json(self.get_all_actions(
resp.text = self.to_json(self.get_all_actions(
verbosity=req.context.verbosity)
)
resp.status = falcon.HTTP_200
@ -133,7 +133,7 @@ class ActionsResource(BaseResource):
LOG.info("Id %s generated for action %s", action['id'], action['name'])
# respond with the action and location for checking status
resp.status = falcon.HTTP_201
resp.body = self.to_json(action)
resp.text = self.to_json(action)
resp.location = '/api/v1.0/actions/{}'.format(action['id'])
def create_action(self, action, context, allow_intermediate_commits=False):

View File

@ -36,7 +36,7 @@ class ActionsIdResource(BaseResource):
Return actions that have been invoked through shipyard.
:returns: a json array of action entities
"""
resp.body = self.to_json(self.get_action(
resp.text = self.to_json(self.get_action(
action_id=kwargs['action_id'],
verbosity=req.context.verbosity
))

View File

@ -32,7 +32,7 @@ class ActionsStepsResource(BaseResource):
Return step details for an action step
:returns: a json object describing a step
"""
resp.body = self.to_json(
resp.text = self.to_json(
self.get_action_step(
action_id=kwargs['action_id'],
step_id=kwargs['step_id'],

View File

@ -51,7 +51,7 @@ class ActionsStepsLogsResource(BaseResource):
step_id = ActionsHelper.parse_step_id(**kwargs)
# Retrieve logs for the action step
resp.body = self.get_action_step_logs(action_id,
resp.text = self.get_action_step_logs(action_id,
step_id,
try_number)

View File

@ -31,7 +31,7 @@ class ActionsValidationsResource(BaseResource):
Return validation details for an action validation
:returns: a json object describing a validation
"""
resp.body = self.to_json(
resp.text = self.to_json(
self.get_action_validation(kwargs['action_id'],
kwargs['validation_id']))
resp.status = falcon.HTTP_200

View File

@ -38,7 +38,7 @@ class WorkflowResource(BaseResource):
"""
since_date = req.params.get('since')
helper = WorkflowHelper(req.context.external_marker)
resp.body = self.to_json(
resp.text = self.to_json(
self.get_all_workflows(helper=helper, since_date=since_date)
)
resp.status = falcon.HTTP_200
@ -67,7 +67,7 @@ class WorkflowIdResource(BaseResource):
:returns: a json object of a workflow entity
"""
helper = WorkflowHelper(req.context.external_marker)
resp.body = self.to_json(
resp.text = self.to_json(
self.get_workflow_detail(helper=helper, workflow_id=workflow_id)
)
resp.status = falcon.HTTP_200

View File

@ -57,7 +57,7 @@ def start_api():
LoggingMiddleware(),
CommonParametersMiddleware()
]
control_api = falcon.API(
control_api = falcon.App(
request_type=ShipyardRequest, middleware=middlewares)
control_api.add_route('/versions', VersionsResource())
@ -113,7 +113,7 @@ class VersionsResource(BaseResource):
"""
def on_get(self, req, resp):
resp.body = self.to_json({
resp.text = self.to_json({
'v1.0': {
'path': '/api/v1.0',
'status': 'stable'

View File

@ -33,7 +33,7 @@ class BaseResource(object):
"""
def on_options(self, req, resp, **kwargs):
"""Handle options requests"""
method_map = routing.create_http_method_map(self)
method_map = routing.map_http_methods(self)
for method in method_map:
if method_map.get(method).__name__ != 'method_not_allowed':
resp.append_header('Allow', method)

View File

@ -48,7 +48,7 @@ class ConfigDocsStatusResource(BaseResource):
"""Returns a list of the configdocs and their statuses"""
versions = req.params.get('versions') or None
helper = ConfigdocsHelper(req.context)
resp.body = self.to_json(helper.get_configdocs_status(versions))
resp.text = self.to_json(helper.get_configdocs_status(versions))
resp.status = falcon.HTTP_200
@ -90,7 +90,7 @@ class ConfigDocsResource(BaseResource):
if validations and validations['status'] == 'Success':
validations['code'] = resp.status
resp.location = '/api/v1.0/configdocs/{}'.format(collection_id)
resp.body = self.to_json(validations)
resp.text = self.to_json(validations)
def validate_content_length(self, content_length):
"""Validates that the content length header is valid
@ -131,7 +131,7 @@ class ConfigDocsResource(BaseResource):
policy.check_auth(req.context, policy.GET_CONFIGDOCS_CLRTXT)
# Not reformatting to JSON or YAML since just passing through
resp.body = self.get_collection(
resp.text = self.get_collection(
helper=helper, collection_id=collection_id, version=version,
cleartext_secrets=cleartext_secrets)
resp.append_header('Content-Type', 'application/x-yaml')
@ -277,7 +277,7 @@ class CommitConfigDocsResource(BaseResource):
dryrun = req.get_param_as_bool(name='dryrun') or False
helper = ConfigdocsHelper(req.context)
validations = self.commit_configdocs(helper, force, dryrun)
resp.body = self.to_json(validations)
resp.text = self.to_json(validations)
resp.status = validations.get('code', falcon.HTTP_200)
def commit_configdocs(self, helper, force, dryrun):

View File

@ -52,7 +52,7 @@ class RenderedConfigDocsResource(BaseResource):
policy.check_auth(req.context,
policy.GET_RENDEREDCONFIGDOCS_CLRTXT)
resp.body = self.get_rendered_configdocs(
resp.text = self.get_rendered_configdocs(
helper=helper,
version=version,
cleartext_secrets=cleartext_secrets

View File

@ -49,7 +49,10 @@ class CommonParametersMiddleware(object):
try:
verbosity = req.get_param_as_int(
'verbosity', required=False, min=0, max=MAX_VERBOSITY
'verbosity',
required=False,
min_value=0,
max_value=MAX_VERBOSITY
)
if verbosity is not None:
# if not set, retains the context default value.

View File

@ -60,7 +60,7 @@ class LoggingMiddleware(object):
# have response scrubbing or way to categorize responses in the
# future, this may be an appropriate place to utilize it.
if resp_code >= 400:
LOG.debug('Errored Response body: %s', resp.body)
LOG.debug('Errored Response body: %s', resp.text)
def _log_headers(self, headers):
""" Log request headers, while scrubbing sensitive values

View File

@ -47,7 +47,7 @@ class NoteDetailsResource(BaseResource):
note_id = kwargs['note_id']
self.validate_note_id(note_id)
note = self.get_note_with_access_check(req.context, note_id)
resp.body = self.get_note_details(note)
resp.text = self.get_note_details(note)
resp.status = falcon.HTTP_200
def validate_note_id(self, note_id):

View File

@ -42,5 +42,5 @@ class StatusResource(BaseResource):
else:
fltrs = None
helper = StatusHelper(req.context)
resp.body = self.to_json(helper.get_site_statuses(fltrs))
resp.text = self.to_json(helper.get_site_statuses(fltrs))
resp.status = falcon.HTTP_200

View File

@ -57,8 +57,9 @@ class ValidateDeploymentAction:
results = self.doc_val_mgr.validate()
if self.doc_val_mgr.errored:
if self.cont_on_fail:
LOG.warn("Validation failures occured, but 'continue-on-fail' "
"is set to true. Processing continues")
LOG.warning("Validation failures occured, "
"but 'continue-on-fail' "
"is set to true. Processing continues")
else:
raise ApiError(
title='Document validation failed',

View File

@ -104,7 +104,7 @@ def format_error_resp(req,
'retry': retry
}
resp.body = json.dumps(error_response, default=str)
resp.text = json.dumps(error_response, default=str)
resp.content_type = 'application/json'
resp.status = status_code

View File

@ -404,10 +404,11 @@ class DrydockBaseOperator(UcpBaseOperator):
task_status = task_dict.get('status', "Not Specified")
task_result = task_dict.get('result')
if task_result is None:
LOG.warn("Task result is missing for task %s, with status %s."
" Neither successes nor further details can be"
" extracted from this result",
task_id, task_status)
LOG.warning("Task result is missing for task %s, "
"with status %s."
" Neither successes nor further details can be"
" extracted from this result",
task_id, task_status)
else:
if extend_success:
try:
@ -429,7 +430,7 @@ class DrydockBaseOperator(UcpBaseOperator):
", ".join(success_nodes), task_id)
except KeyError:
# missing key on the path to getting nodes - don't add
LOG.warn(
LOG.warning(
"Missing successes field on result of task %s, "
"but a success field was expected. No successes"
" can be extracted from this result", task_id
@ -448,8 +449,8 @@ class DrydockBaseOperator(UcpBaseOperator):
except Exception:
# since we are reporting task results, if we can't get the
# results, do not block the processing.
LOG.warn("Failed to retrieve a result for task %s. Exception "
"follows:", task_id, exc_info=True)
LOG.warning("Failed to retrieve a result for task %s. Exception "
"follows:", task_id, exc_info=True)
# deduplicate and return
return set(success_nodes)
@ -507,8 +508,8 @@ class DrydockBaseOperator(UcpBaseOperator):
note_timestamp=msg.get('ts'),
verbosity=3)
except Exception as ex:
LOG.warn("Error while creating a task result note, "
"processing continues. Source info %s", msg)
LOG.warning("Error while creating a task result note, "
"processing continues. Source info %s", msg)
LOG.exception(ex)
links = task_result.get('links', [])
@ -528,8 +529,8 @@ class DrydockBaseOperator(UcpBaseOperator):
is_auth_link=True,
verbosity=5)
except Exception as ex:
LOG.warn("Error while creating a link-based note, "
"processing continues. Source info: %s", link)
LOG.warning("Error while creating a link-based note, "
"processing continues. Source info: %s", link)
LOG.exception(ex)

View File

@ -78,10 +78,11 @@ class DrydockDestroyNodeOperator(DrydockBaseOperator):
"destroyed. The report at the end of processing "
"this step contains the results", task_name)
except DrydockTaskTimeoutException:
LOG.warn("Task %s has timed out after %s seconds. Some nodes may "
"have been destroyed. The report at the end of "
"processing this step contains the results", task_name,
self.dest_timeout)
LOG.warning("Task %s has timed out after %s seconds. "
"Some nodes may "
"have been destroyed. The report at the end of "
"processing this step contains the results", task_name,
self.dest_timeout)
def report_summary(self):
"""Reports the successfully destroyed nodes"""

View File

@ -252,7 +252,7 @@ class DrydockNodesOperator(DrydockBaseOperator):
except DrydockTaskFailedException:
# Task failure may be successful enough based on success criteria.
# This should not halt the overall flow of this workflow step.
LOG.warn(
LOG.warning(
"Task %s with Drydock task-id: %s has failed. Logs contain "
"details of the failure. Some nodes may be succesful, "
"processing continues", task_name, self.drydock_task_id
@ -263,7 +263,7 @@ class DrydockNodesOperator(DrydockBaseOperator):
except DrydockTaskTimeoutException:
# Task timeout may be successful enough based on success criteria.
# This should not halt the overall flow of this workflow step.
LOG.warn(
LOG.warning(
"Task %s with Drydock task-id: %s has timed out after %s "
"seconds. Logs contain details of the failure. Some nodes may "
"be succesful, processing continues", task_name,

View File

@ -81,10 +81,11 @@ class DrydockRelabelNodesOperator(DrydockBaseOperator):
"relabeled. The report at the end of processing "
"this step contains the results", task_name)
except DrydockTaskTimeoutException:
LOG.warn("Task %s has timed out after %s seconds. Some nodes may "
"have been relabeled. The report at the end of "
"processing this step contains the results", task_name,
self.task_timeout)
LOG.warning("Task %s has timed out after %s seconds. "
"Some nodes may "
"have been relabeled. The report at the end of "
"processing this step contains the results", task_name,
self.task_timeout)
def report_summary(self):
"""Reports the successfully relabeled nodes"""

View File

@ -66,7 +66,8 @@ class DrydockValidateDesignOperator(DrydockBaseOperator):
try:
design_validate_response = requests.post(validation_endpoint,
headers=headers,
data=json.dumps(payload))
data=json.dumps(payload),
timeout=5)
except requests.exceptions.RequestException as e:
raise AirflowException(e)

View File

@ -66,7 +66,8 @@ class PromenadeValidateSiteDesignOperator(PromenadeBaseOperator):
try:
design_validate_response = requests.post(validation_endpoint,
headers=headers,
data=json.dumps(payload))
data=json.dumps(payload),
timeout=5)
except requests.exceptions.RequestException as e:
raise AirflowException(e)

View File

@ -1,21 +1,8 @@
# Testing
pyflakes>=2.1.1
amqp~=2.6.0
bandit==1.6.0
flake8==3.8.4
pytest==3.5.0
pytest-cov==2.5.1
responses==0.10.2
testfixtures==5.1.1
testfixtures
tox<=4.0.0
typing-extensions==3.7.2
# Pin apache-airflow 1.10.5 to align with version in airflow images requirements.txt
apache-airflow[crypto,celery,hive,hdfs,jdbc]==1.10.5
# install postgres seperately as apache-airflow 1.10.5 forces postgres to use
# psycopg2 instead of psycopg2-binary which requires additional apt packages
# to be installed, i.e. postgres-devel. Otherwise the following error is seen:
# Error: pg_config executable not found. Only seems to be an issue in xenial
postgres>=3.0.0
# Linting
flake8<3.8.0,>=3.6.0
# Security scanning
bandit>=1.5.0 # Apache-2.0
pyflakes>=2.1.1

View File

@ -39,12 +39,12 @@ def create_req(ctx, body):
env = testing.create_environ(
path='/',
query_string='',
protocol='HTTP/1.1',
http_version='1.1',
scheme='http',
host='falconframework.org',
port=None,
headers={'Content-Type': 'application/json'},
app='',
root_path='',
body=body,
method='POST',
wsgierrors=None,

View File

@ -80,12 +80,12 @@ def create_req(ctx, body):
env = testing.create_environ(
path='/',
query_string='',
protocol='HTTP/1.1',
http_version='1.1',
scheme='http',
host='falconframework.org',
port=None,
headers={'Content-Type': 'application/json'},
app='',
root_path='',
body=body,
method='POST',
wsgierrors=None,
@ -284,7 +284,7 @@ def test_on_get(mock_get_all_actions, mock_authorize):
mock_authorize.assert_called_once_with(
'workflow_orchestrator:list_actions', context)
assert mock_get_all_actions.call_count == 1
assert resp.body is not None
assert resp.text is not None
assert resp.status == '200 OK'
@ -318,7 +318,7 @@ def test_on_post(mock_info, mock_create_action, mock_authorize, *args):
mock_info.assert_called_with("Id %s generated for action %s", 'test_id',
'test_name')
assert resp.status == '201 Created'
assert resp.body is not None
assert resp.text is not None
assert '/api/v1.0/actions/' in resp.location

View File

@ -166,7 +166,7 @@ def test_on_get(mock_authorize, mock_get_action):
mock_authorize.assert_called_once_with('workflow_orchestrator:get_action',
context)
mock_get_action.assert_called_once_with(action_id=None, verbosity=1)
assert resp.body == '"action_returned"'
assert resp.text == '"action_returned"'
assert resp.status == '200 OK'
@mock.patch('shipyard_airflow.control.helpers.action_helper.notes_helper',

View File

@ -121,7 +121,7 @@ Starting attempt 1 of 1
--------------------------------------------------------------------------------
Executing Task(PythonOperator): action_xcom
Running: ['bash', '-c', 'airflow run deploy_site action_xcom \
Running: ['bash', '-c', 'airflow tasks run deploy_site action_xcom \
2018-04-11T07:30:37 --job_id 2 --raw -sd DAGS_FOLDER/deploy_site.py']
Running on host airflow-worker-0.airflow-worker-discovery.ucp.svc.cluster.local
Subtask: [2018-04-11 07:30:43,944] {{python_operator.py:90}} \

View File

@ -24,6 +24,6 @@ class TestVersionsResource():
req = create_req(context, None)
resp = create_resp()
version_resource.on_get(req, resp)
assert sorted(resp.body) == sorted(
assert sorted(resp.text) == sorted(
'{"v1.0": {"status": "stable", "path": "/api/v1.0"}}')
assert resp.status == '200 OK'

View File

@ -46,7 +46,7 @@ class TestConfigDocsStatusResource():
assert result.status_code == 200
assert result.text == json.dumps(common.str_responder(), default=str)
assert result.headers[
'content-type'] == 'application/json; charset=UTF-8'
'content-type'] == 'application/json'
class TestConfigDocsResource():

View File

@ -33,4 +33,4 @@ class TestStatusResource():
assert result.status_code == 200
assert result.text == json.dumps(common.str_responder(), default=str)
assert result.headers[
'content-type'] == 'application/json; charset=UTF-8'
'content-type'] == 'application/json'

View File

@ -1,15 +1,29 @@
[tox]
skipsdist=True
envlist = py36, pep8, bandit
envlist = py38, pep8, bandit
[testenv]
setenv=
PYTHONWARNING=all
LANG=C.UTF-8
LC_ALL=C.UTF-8
deps = -r{toxinidir}/requirements.txt
deps = -r{toxinidir}/requirements-frozen.txt
[testenv:py36]
[testenv:freeze]
recreate = True
allowlist_externals=
rm
sh
deps=
-r{toxinidir}/requirements-direct.txt
-r{toxinidir}/test-requirements.txt
commands=
rm -f {toxinidir}/requirements-frozen.txt
sh -c "pip freeze --all | grep -vE 'shipyard_airflow|pyinotify|pkg-resources' > requirements-frozen.txt"
[testenv:py38]
skipsdist=True
setenv=
SLUGIFY_USES_TEXT_UNIDECODE=yes
@ -22,10 +36,10 @@ commands =
--cov-report=term-missing:skip-covered \
--cov-config=.coveragerc \
--cov=shipyard_airflow \
--cov-report=html
--cov-report=html \
-vv
[testenv:pep8]
basepython=python3
deps=
flake8>=3.3.0
bandit>=1.5.0
@ -34,7 +48,6 @@ commands =
bandit -r shipyard_airflow
[testenv:bandit]
basepython=python3
skipsdist= True
commands =
bandit -r shipyard_airflow -n 5
@ -42,20 +55,32 @@ commands =
[testenv:genconfig]
setenv=
SLUGIFY_USES_TEXT_UNIDECODE=yes
commands = oslo-config-generator --config-file=generator/config-generator.conf
deps =
-r{toxinidir}/test-requirements.txt
-r{toxinidir}/requirements-frozen.txt
commands =
pip install . --use-pep517
oslo-config-generator --config-file=generator/config-generator.conf
[testenv:genpolicy]
setenv=
SLUGIFY_USES_TEXT_UNIDECODE=yes
commands = oslopolicy-sample-generator --config-file=generator/policy-generator.conf
deps =
-r{toxinidir}/test-requirements.txt
-r{toxinidir}/requirements-frozen.txt
commands =
pip install . --use-pep517
oslopolicy-sample-generator --config-file=generator/policy-generator.conf
[testenv:gen_all]
# Combined to make these run together instead of setting up separately
basepython=python3
setenv=
SLUGIFY_USES_TEXT_UNIDECODE=yes
deps =
-r{toxinidir}/test-requirements.txt
-r{toxinidir}/requirements-frozen.txt
commands =
pip install -r{toxinidir}/test-requirements.txt
pip install . --use-pep517
oslo-config-generator --config-file=generator/config-generator.conf
oslopolicy-sample-generator --config-file=generator/policy-generator.conf

View File

@ -0,0 +1,24 @@
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# CLI/Client requirements
arrow<=0.17.0
click==6.7
click-default-group==1.2
keystoneauth1<=5.1.1
requests==2.23.0
PTable==0.9.2
pylibyaml==0.1.0
PyYAML<=5.4.1

View File

@ -0,0 +1,40 @@
amqp==2.6.1
arrow==0.17.0
attrs==23.1.0
bandit==1.6.0
certifi==2022.12.7
chardet==3.0.4
click==6.7
click-default-group==1.2
coverage==7.2.3
flake8==3.8.4
gitdb==4.0.10
GitPython==3.1.31
idna==2.10
iso8601==1.1.0
keystoneauth1==5.1.1
mccabe==0.6.1
more-itertools==9.1.0
os-service-types==1.7.0
pbr==5.11.1
pip==23.0.1
pluggy==0.6.0
PTable==0.9.2
py==1.11.0
pycodestyle==2.6.0
pyflakes==2.2.0
pylibyaml==0.1.0
pytest==3.5.0
pytest-cov==2.5.1
python-dateutil==2.8.2
PyYAML==5.4.1
requests==2.23.0
responses==0.10.2
setuptools==67.6.0
six==1.16.0
smmap==5.0.0
stevedore==5.0.0
testfixtures==5.1.1
urllib3==1.25.11
vine==1.3.0
wheel==0.40.0

View File

@ -1,24 +1,3 @@
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# CLI/Client requirements
arrow==0.12.1
click==6.7
click-default-group==1.2
keystoneauth1>=3.18.0
requests!=2.20.0,>=2.14.2
PTable==0.9.2
pylibyaml==0.1.0
pyyaml~=5.1
# Warning: This file should be empty.
# Specify direct dependencies in requirements-direct.txt instead.
-r requirements-direct.txt

View File

@ -12,8 +12,8 @@ classifier =
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: 3
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.10
[files]
packages =

View File

@ -1,14 +1,14 @@
# Testing
pyflakes>=2.1.1
amqp~=2.6.0
pyflakes==2.2.0
amqp==2.6.1
pytest==3.5.0
pytest-cov==2.5.1
responses==0.10.2
testfixtures==5.1.1
# Linting
flake8<3.8.0,>=3.6.0
flake8==3.8.4
# Security scanning
bandit>=1.1.0 # Apache-2.0
bandit==1.6.0 # Apache-2.0

View File

@ -1,16 +1,29 @@
[tox]
skipsdist=True
envlist = py36, pep8, bandit
envlist = py38, pep8, bandit
[testenv]
setenv=
PYTHONWARNING=all
LC_ALL=C.UTF-8
LC_ALL=C.UTF-8
deps = -r{toxinidir}/requirements.txt
deps = -r{toxinidir}/requirements-frozen.txt
-r{toxinidir}/test-requirements.txt
[testenv:py36]
[testenv:freeze]
recreate = True
allowlist_externals=
rm
sh
deps=
-r{toxinidir}/requirements-direct.txt
-r{toxinidir}/test-requirements.txt
commands=
rm -f {toxinidir}/requirements-frozen.txt
sh -c "pip freeze --all | grep -vE 'shipyard_airflow|pyinotify|pkg-resources' > requirements-frozen.txt"
[testenv:py38]
skipsdist=True
commands =
pytest \
@ -19,10 +32,10 @@ commands =
--cov-report=term-missing:skip-covered \
--cov-config=.coveragerc \
--cov=shipyard_client \
--cov-report=html
--cov-report=html \
-vv
[testenv:pep8]
basepython=python3
deps=
flake8>=3.3.0
bandit>=1.5.0
@ -31,7 +44,6 @@ commands =
bandit -r shipyard_airflow
[testenv:bandit]
basepython=python3
skipsdist=True
commands =
bandit -r shipyard_client -n 5

View File

@ -47,7 +47,7 @@ fi
docker stop airflow_test
docker rm airflow_test
if [[ ${TEST_RESP:(-7)} == "success" ]]; then
if [ ${TEST_RESP:(-7)} == "success" ]; then
exit 0
else
exit 1

View File

@ -1,4 +1,4 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
# Copyright 2019, AT&T Intellectual Property
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -13,24 +13,15 @@
# limitations under the License.
- hosts: primary
roles:
- clear-firewall
- bindep
- disable-systemd-resolved
- ensure-python
- ensure-pip
- ensure-docker
tasks:
# Stop systemd-resolved service before starting docker.
- name: stop systemd-resolved service
systemd:
state: stopped
enabled: no
masked: yes
daemon_reload: yes
name: systemd-resolved
become: yes
- name: ensure pip3 installed
apt:
name: "{{ item }}"
with_items:
- python3-pip
become: yes
- name: Clone Required Repositories
shell: |
export CLONE_SHIPYARD={{ CLONE_SHIPYARD }}
@ -39,7 +30,7 @@
args:
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
- name: Setup AppArmor
- name: Setup Apparmor
shell: |
./tools/deployment/airskiff/developer/009-setup-apparmor.sh
args:
@ -58,16 +49,22 @@
chdir: "{{ zuul.project.src_dir }}"
become: yes
- name: Setup clients
shell: |
./tools/deployment/airskiff/developer/020-setup-client.sh
args:
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
- name: Deploy Airship components using Armada
shell: |
mkdir ~/.kube
mkdir -p ~/.kube
cp -rp /home/zuul/.kube/config ~/.kube/config
./tools/deployment/airskiff/developer/030-armada-bootstrap.sh
args:
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"
become: yes
- name: Deploy OpenStack using Airship
- name: Deploy Memcached using Airship
shell: |
./tools/deployment/airskiff/developer/100-deploy-osh.sh
args:

View File

@ -14,40 +14,10 @@
- hosts: primary
tasks:
- name: ensure pip packages are installed
apt:
name: "{{ item }}"
with_items:
- python3-pip
- python3-git
- python3-requests
- python3-yaml
become: yes
- name: deploy iptables packages
include_role:
name: deploy-package
tasks_from: dist
vars:
packages:
deb:
- iptables
rpm:
- iptables
- command: sudo iptables -S
- command: sudo iptables -F
- command: sudo iptables -P INPUT ACCEPT
- command: sudo iptables -S
become: yes
- name: Overwrite Armada manifest
shell: |
git checkout v1.9
mv tools/gate/manifests/full-site.yaml \
type/skiff/manifests/full-site.yaml
# Overwrite versions.yaml with the latest Airship charts and xenial images
./tools/updater.py --in-file global/software/config/versions.yaml \
--tag-filter ubuntu_xenial --skip mariadb
args:
chdir: "{{ zuul.projects['opendev.org/airship/treasuremap'].src_dir }}"

View File

@ -18,6 +18,16 @@
vars_files:
- vars.yaml
roles:
- bindep
- clear-firewall
- ensure-python
- ensure-pip
- ensure-docker
- build-images
tags:
- bindep
- clear-firewall
- ensure-python
- ensure-pip
- ensure-docker
- build-images

View File

@ -1,8 +0,0 @@
# NOTE(SamYaple): CentOS cannot be build with userns-remap enabled. httpd uses
# cap_set_file capability and there is no way to pass that in at build as of
# docker 17.06.
# TODO(SamYaple): Periodically check to see if this is possible in newer
# versions of Docker
[Service]
ExecStart=
ExecStart=/usr/bin/dockerd

View File

@ -38,52 +38,15 @@
debug:
var: image_tags
- name: Install Docker (Debian)
when: ansible_os_family == 'Debian'
- name: Install Docker python module for ansible docker login
block:
- file:
path: "{{ item }}"
state: directory
with_items:
- /etc/docker/
- /etc/systemd/system/docker.service.d/
- /var/lib/docker/
- mount:
path: /var/lib/docker/
src: tmpfs
fstype: tmpfs
opts: size=25g
state: mounted
- copy: "{{ item }}"
with_items:
- content: "{{ docker_daemon | to_json }}"
dest: /etc/docker/daemon.json
- src: files/docker-systemd.conf
dest: /etc/systemd/system/docker.service.d/
- apt_key:
url: https://download.docker.com/linux/ubuntu/gpg
- apt_repository:
repo: deb http://{{ zuul_site_mirror_fqdn }}/deb-docker/{{ ansible_distribution_release }} {{ ansible_distribution_release }} stable
- apt:
name: "{{ item }}"
allow_unauthenticated: True
with_items:
- docker-ce
- python3-pip
- python3-setuptools
- pip:
name: docker
version: 2.7.0
version: 4.4.4
executable: pip3
# NOTE(SamYaple): Allow all connections from containers to host so the
# containers can access the http server for git and wheels
- iptables:
action: insert
chain: INPUT
in_interface: docker0
jump: ACCEPT
become: True
- name: Make images
when: not publish
block:

View File

@ -0,0 +1,37 @@
# Copyright 2020 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
- name: Disable systemd-resolved service
systemd:
state: stopped
enabled: no
masked: yes
daemon_reload: yes
name: systemd-resolved
become: yes
- name: Remove local stub dns from resolv.conf, if it exists
lineinfile:
path: /etc/resolv.conf
state: absent
regexp: '^nameserver.*127.0.0.1'
become: yes
- name: Add upstream nameservers in resolv.conf
blockinfile:
path: /etc/resolv.conf
block: |
nameserver 8.8.8.8
nameserver 8.8.4.4
become: yes

View File

@ -0,0 +1,15 @@
# Copyright 2020 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
- include: disable-systemd-resolved.yaml

View File

@ -17,7 +17,7 @@
set -x
HELM=$1
HELM_ARTIFACT_URL=${HELM_ARTIFACT_URL:-"https://get.helm.sh/helm-v3.6.3-linux-amd64.tar.gz"}
HELM_ARTIFACT_URL=${HELM_ARTIFACT_URL:-"https://get.helm.sh/helm-v3.9.4-linux-amd64.tar.gz"}
function install_helm_binary {

View File

@ -17,7 +17,9 @@
set -eux
HTK_REPO=${HTK_REPO:-"https://opendev.org/openstack/openstack-helm-infra.git"}
HTK_STABLE_COMMIT=${HTK_COMMIT:-"5c4056ad341afcc577e63902b6ddbfb222d757e1"}
HTK_STABLE_COMMIT=${HTK_COMMIT:-"a7cd689280cdbc0acd04a7a1b745941260e8700b"}
TMP_DIR=$(mktemp -d)

24
tox.ini
View File

@ -5,15 +5,20 @@ envlist = all_jobs, docs
[testenv]
setenv=
PYTHONWARNING=all
passenv=HTTP_PROXY HTTPS_PROXY http_proxy https_proxy NO_PROXY no_proxy
passenv=
HTTP_PROXY
HTTPS_PROXY
http_proxy
https_proxy
NO_PROXY
no_proxy
[testenv:venv]
commands = {posargs}
# Please use `make docs` instead
[testenv:docs]
basepython=python3
whitelist_externals=
allowlist_externals=
rm
cp
tox
@ -28,22 +33,21 @@ deps= -r{toxinidir}/doc/requirements.txt
# `all_jobs` is here to support invocation as simply `tox`, as is done in
# current pipeline.
[testenv:all_jobs]
whitelist_externals=
allowlist_externals=
tox
commands=
tox -c {toxinidir}/src/bin/shipyard_airflow/tox.ini
tox -c {toxinidir}/src/bin/shipyard_client/tox.ini
[testenv:py36]
whitelist_externals=
[testenv:py38]
allowlist_externals=
tox
commands=
tox -c {toxinidir}/src/bin/shipyard_airflow/tox.ini -e py36
tox -c {toxinidir}/src/bin/shipyard_client/tox.ini -e py36
tox -c {toxinidir}/src/bin/shipyard_airflow/tox.ini -e py38
tox -c {toxinidir}/src/bin/shipyard_client/tox.ini -e py38
[testenv:pep8]
basepython=python3
whitelist_externals=
allowlist_externals=
tox
commands=
tox -c {toxinidir}/src/bin/shipyard_airflow/tox.ini -e pep8