Added DeploymentData document generation

This PS adds a DeploymentData document to sites collected by Pegleg.
This document describes the repos Pegleg collected, including their
commit SHA, tag, and whether the repo was dirty.

If the source directory is not a git repo, these values will be
None.

Change-Id: I7919b02d70c9797f689cdad85066d3953b978901
This commit is contained in:
Lev Morgan 2019-03-20 12:43:13 -05:00 committed by Alexander Hughes
parent 05dc91eda4
commit 45767e1e48
3 changed files with 87 additions and 1 deletions

View File

@ -16,10 +16,12 @@ import logging
import os
import click
import git
import yaml
from prettytable import PrettyTable
from pegleg import config
from pegleg.engine import util
from pegleg.engine.util import files
@ -48,6 +50,13 @@ def _collect_to_stdout(site_name):
for line in _read_and_format_yaml(filename):
# This code is a pattern to convert \r\n to \n.
click.echo("\n".join(line.splitlines()))
res = yaml.safe_dump(_get_deployment_data_doc(),
explicit_start=True,
explicit_end=True,
default_flow_style=False)
# Click isn't splitting these lines correctly, so do it manually
for line in res.split('\n'):
click.echo(line)
except Exception as ex:
raise click.ClickException("Error printing output: %s" % str(ex))
@ -60,6 +69,8 @@ def _collect_to_file(site_name, save_location):
files.check_file_save_location(save_location)
save_files = dict()
curr_site_repo = files.path_leaf(config.get_site_repo())
try:
for repo_base, filename in util.definition.site_files_by_repo(
site_name):
@ -69,6 +80,9 @@ def _collect_to_file(site_name, save_location):
save_files[repo_name] = open(save_file, "w")
LOG.debug("Collecting file %s to file %s", filename, save_file)
save_files[repo_name].writelines(_read_and_format_yaml(filename))
save_files[curr_site_repo].writelines(yaml.safe_dump(
_get_deployment_data_doc(), default_flow_style=False,
explicit_start=True, explicit_end=True))
except Exception as ex:
raise click.ClickException("Error saving output: %s" % str(ex))
finally:
@ -140,3 +154,55 @@ def show(site_name, output_stream):
["", data['site_name'], data['site_type'], file])
# Write tables to specified output_stream
output_stream.write(site_table.get_string() + "\n")
def _get_deployment_data_doc():
stanzas = {files.path_leaf(repo): _get_repo_deployment_data_stanza(repo)
for repo in config.all_repos()}
return {
"schema": "pegleg/DeploymentData/v1",
"metadata": {
"schema": "metadata/Document/v1",
"name": "deployment-version",
},
"layeringDefinition": {
"abstract": "false",
"layer": "global"
},
"storagePolicy": "cleartext",
"data": {
"documents": stanzas
}
}
def _get_repo_deployment_data_stanza(repo_path):
try:
repo = git.Repo(repo_path)
commit = repo.commit()
# If we're at a particular tag, reference it
tag = [tag.name for tag in
repo.tags if tag.commit == commit]
if tag:
tag == ", ".join(tag)
else:
# Otherwise just use the branch name
try:
tag = repo.active_branch.name
except TypeError as e:
if "HEAD is a detached symbolic reference" in str(e):
tag = "Detached HEAD"
else:
raise e
return {
"commit": commit.hexsha,
"tag": tag,
"dirty": repo.is_dirty()
}
except git.InvalidGitRepositoryError:
return {
"commit": "None",
"tag": "None",
"dirty": "None"
}

View File

@ -394,3 +394,20 @@ def file_in_subdir(filename, _dir):
file_path, file_name = os.path.split(
os.path.realpath(filename))
return _dir in file_path.split(os.path.sep)
def path_leaf(path):
"""
Return the last element in a path, UNLESS it's empty,
then return the second to last element (unlike os.path.split)
:param path: a path as a string
:return: the last non-empty element of a string
:rtype: str
"""
split_path = [i for i in path.split(os.sep)
if i]
if split_path:
return split_path[-1]
else:
return None

View File

@ -58,6 +58,7 @@ def _expected_document_names(site_name):
_site_definition(site_name)["metadata"]["name"],
'%s-chart' % site_name,
'%s-passphrase' % site_name,
'deployment-version'
]
return EXPECTED_DOCUMENT_NAMES
@ -77,6 +78,7 @@ def _test_site_collect_to_file(tmpdir, site_name, collection_path):
assert sorted(_expected_document_names(site_name)) == sorted(
[x['metadata']['name'] for x in deployment_documents])
assert "pegleg/DeploymentData/v1" in lines
finally:
if os.path.exists(collection_str_path):
shutil.rmtree(collection_str_path, ignore_errors=True)
@ -96,8 +98,9 @@ def _test_site_collect_to_stdout(site_name):
all_lines = [x[1][0].strip() for x in mock_echo.mock_calls]
assert all_lines, "Nothing written to stdout"
assert any("pegleg/DeploymentData/v1" in line for line in all_lines)
for expected in expected_names:
assert 'name: %s' % expected in all_lines
assert 'name: {}'.format(expected) in all_lines
def test_site_collect_to_stdout(create_tmp_deployment_files):