Bugfix: Get absolute path when making directories

Previously Pegleg would attempt to create directories using the path
specified directly. This path wasn't always an absolute path,
resulting in errors such as:

  File "/opt/pegleg/pegleg/engine/util/files.py", line 265, in dump_all
    os.makedirs(os.path.dirname(path), exist_ok=True)
  File "/usr/local/lib/python3.6/os.py", line 220, in makedirs
    mkdir(name, mode)
FileNotFoundError: [Errno 2] No such file or directory: ''

This bugfix determines an absolute path before attempting to create
any directories.

Change-Id: I84a0e7bc63d6f56a56b9c5c41de1ede99dfbacc7
This commit is contained in:
HUGHES, ALEXANDER (ah8742) 2019-07-10 13:23:07 -05:00
parent e79672987e
commit c7d745fdbe
3 changed files with 43 additions and 34 deletions

View File

@ -216,7 +216,7 @@ class PKIGenerator(object):
if not os.path.exists(dir_name): if not os.path.exists(dir_name):
LOG.debug('Creating secrets path: %s', dir_name) LOG.debug('Creating secrets path: %s', dir_name)
os.makedirs(dir_name) os.makedirs(os.path.abspath(dir_name))
# Encrypt the document # Encrypt the document
document['data']['managedDocument']['metadata']['storagePolicy']\ document['data']['managedDocument']['metadata']['storagePolicy']\

View File

@ -118,7 +118,7 @@ FULL_STRUCTURE = {
def _create_tree(root_path, *, tree=FULL_STRUCTURE): def _create_tree(root_path, *, tree=FULL_STRUCTURE):
for name, data in tree.get('directories', {}).items(): for name, data in tree.get('directories', {}).items():
path = os.path.join(root_path, name) path = os.path.join(root_path, name)
os.makedirs(path, exist_ok=True) os.makedirs(os.path.abspath(path), exist_ok=True)
_create_tree(path, tree=data) _create_tree(path, tree=data)
for filename, yaml_data in tree.get('files', {}).items(): for filename, yaml_data in tree.get('files', {}).items():
@ -242,7 +242,7 @@ def dump(data, path, flag='w', **kwargs):
if flag == 'w' and os.path.exists(path): if flag == 'w' and os.path.exists(path):
raise click.ClickException('%s already exists, aborting' % path) raise click.ClickException('%s already exists, aborting' % path)
os.makedirs(os.path.dirname(path), exist_ok=True) os.makedirs(os.path.dirname(os.path.abspath(path)), exist_ok=True)
with open(path, flag) as f: with open(path, flag) as f:
yaml.dump(data, f, **kwargs) yaml.dump(data, f, **kwargs)
@ -252,7 +252,7 @@ def safe_dump(data, path, flag='w', **kwargs):
if flag == 'w' and os.path.exists(path): if flag == 'w' and os.path.exists(path):
raise click.ClickException('%s already exists, aborting' % path) raise click.ClickException('%s already exists, aborting' % path)
os.makedirs(os.path.dirname(path), exist_ok=True) os.makedirs(os.path.dirname(os.path.abspath(path)), exist_ok=True)
with open(path, flag) as f: with open(path, flag) as f:
yaml.safe_dump(data, f, **kwargs) yaml.safe_dump(data, f, **kwargs)
@ -262,7 +262,7 @@ def dump_all(data, path, flag='w', **kwargs):
if flag == 'w' and os.path.exists(path): if flag == 'w' and os.path.exists(path):
raise click.ClickException('%s already exists, aborting' % path) raise click.ClickException('%s already exists, aborting' % path)
os.makedirs(os.path.dirname(path), exist_ok=True) os.makedirs(os.path.dirname(os.path.abspath(path)), exist_ok=True)
with open(path, flag) as f: with open(path, flag) as f:
yaml.dump_all(data, f, **kwargs) yaml.dump_all(data, f, **kwargs)
@ -309,8 +309,8 @@ def read(path):
'', lambda loader, suffix, node: None) '', lambda loader, suffix, node: None)
try: try:
return [ return [
d for d in yaml.safe_load_all(stream) if d and d for d in yaml.safe_load_all(stream) if d and (
(is_deckhand_document(d) or is_pegleg_managed_document(d)) is_deckhand_document(d) or is_pegleg_managed_document(d))
] ]
except yaml.YAMLError as e: except yaml.YAMLError as e:
raise click.ClickException('Failed to parse %s:\n%s' % (path, e)) raise click.ClickException('Failed to parse %s:\n%s' % (path, e))
@ -329,7 +329,7 @@ def write(data, file_path):
:type data: str, dict, or a list of dicts :type data: str, dict, or a list of dicts
""" """
try: try:
os.makedirs(os.path.dirname(file_path), exist_ok=True) os.makedirs(os.path.dirname(os.path.abspath(file_path)), exist_ok=True)
with open(file_path, 'w') as stream: with open(file_path, 'w') as stream:
if isinstance(data, str): if isinstance(data, str):
stream.write(data) stream.write(data)
@ -404,7 +404,7 @@ def check_file_save_location(save_location):
LOG.debug( LOG.debug(
"Save location %s does not exist. Creating " "Save location %s does not exist. Creating "
"automatically.", save_location) "automatically.", save_location)
os.makedirs(save_location) os.makedirs(os.path.abspath(save_location))
# In case save_location already exists and isn't a directory. # In case save_location already exists and isn't a directory.
if not os.path.isdir(save_location): if not os.path.isdir(save_location):
raise click.ClickException( raise click.ClickException(

View File

@ -90,8 +90,9 @@ def git_handler(repo_url,
# we do not need to clone, we may need to process the reference # we do not need to clone, we may need to process the reference
# by checking that out and returning the directory they passed in # by checking that out and returning the directory they passed in
else: else:
LOG.debug('Treating repo_url=%s as an already-cloned repository. ' LOG.debug(
'Attempting to checkout ref=%s', repo_url, ref) 'Treating repo_url=%s as an already-cloned repository. '
'Attempting to checkout ref=%s', repo_url, ref)
# Normalize the repo path. # Normalize the repo path.
repo_url, _ = normalize_repo_path(repo_url) repo_url, _ = normalize_repo_path(repo_url)
@ -102,9 +103,10 @@ def git_handler(repo_url,
# real local repository. Wrapper logic around this module will # real local repository. Wrapper logic around this module will
# only perform this functionality against a temporary replica of # only perform this functionality against a temporary replica of
# local repositories, making the below operations safe. # local repositories, making the below operations safe.
LOG.info('Replica of local repo=%s is dirty. Committing all ' LOG.info(
'tracked/untracked changes to ref=%s', 'Replica of local repo=%s is dirty. Committing all '
repo_name(repo_url), ref) 'tracked/untracked changes to ref=%s', repo_name(repo_url),
ref)
repo.git.add(all=True) repo.git.add(all=True)
repo.index.commit('Temporary Pegleg commit') repo.index.commit('Temporary Pegleg commit')
@ -115,8 +117,9 @@ def git_handler(repo_url,
_try_git_checkout(repo, repo_url, ref, fetch=False) _try_git_checkout(repo, repo_url, ref, fetch=False)
except exceptions.GitException: except exceptions.GitException:
# Otherwise, attempt to fetch and checkout the missing ref. # Otherwise, attempt to fetch and checkout the missing ref.
LOG.info('ref=%s not found locally for repo_url=%s, fetching from ' LOG.info(
'remote', ref, repo_url) 'ref=%s not found locally for repo_url=%s, fetching from '
'remote', ref, repo_url)
# Allow any errors to bubble up. # Allow any errors to bubble up.
_try_git_checkout(repo, repo_url, ref, fetch=True) _try_git_checkout(repo, repo_url, ref, fetch=True)
@ -136,8 +139,9 @@ def _get_current_ref(repo_url):
try: try:
repo = Repo(repo_url, search_parent_directories=True) repo = Repo(repo_url, search_parent_directories=True)
current_ref = repo.head.ref.name current_ref = repo.head.ref.name
LOG.debug('ref for repo_url=%s not specified, defaulting to currently ' LOG.debug(
'checked out ref=%s', repo_url, current_ref) 'ref for repo_url=%s not specified, defaulting to currently '
'checked out ref=%s', repo_url, current_ref)
return current_ref return current_ref
except Exception as e: except Exception as e:
return None return None
@ -182,7 +186,7 @@ def _try_git_clone(repo_url,
temp_dir = os.path.join(clone_path, repo_name) temp_dir = os.path.join(clone_path, repo_name)
try: try:
os.makedirs(temp_dir) os.makedirs(os.path.abspath(temp_dir))
except FileExistsError: except FileExistsError:
msg = "The repository already exists in the given path. Either " \ msg = "The repository already exists in the given path. Either " \
"provide a new clone path or pass in the path of the local " \ "provide a new clone path or pass in the path of the local " \
@ -198,21 +202,20 @@ def _try_git_clone(repo_url,
LOG.debug('Cloning [%s] with proxy [%s]', repo_url, proxy_server) LOG.debug('Cloning [%s] with proxy [%s]', repo_url, proxy_server)
# TODO(felipemonteiro): proxy_server can be finicky. Need a config # TODO(felipemonteiro): proxy_server can be finicky. Need a config
# option to retry up to N times. # option to retry up to N times.
repo = Repo.clone_from( repo = Repo.clone_from(repo_url,
repo_url, temp_dir,
temp_dir, config='http.proxy=%s' % proxy_server,
config='http.proxy=%s' % proxy_server, env=env_vars)
env=env_vars)
else: else:
LOG.debug('Cloning [%s]', repo_url) LOG.debug('Cloning [%s]', repo_url)
repo = Repo.clone_from(repo_url, temp_dir, env=env_vars) repo = Repo.clone_from(repo_url, temp_dir, env=env_vars)
except git_exc.GitCommandError as e: except git_exc.GitCommandError as e:
LOG.exception('Failed to clone repo_url=%s using ref=%s.', repo_url, LOG.exception('Failed to clone repo_url=%s using ref=%s.', repo_url,
ref) ref)
if (ssh_cmd and ssh_cmd in e.stderr or if (ssh_cmd and ssh_cmd in e.stderr
'permission denied' in e.stderr.lower()): or 'permission denied' in e.stderr.lower()):
raise exceptions.GitAuthException( raise exceptions.GitAuthException(repo_url=repo_url,
repo_url=repo_url, ssh_key_path=auth_key) ssh_key_path=auth_key)
elif 'could not resolve proxy' in e.stderr.lower(): elif 'could not resolve proxy' in e.stderr.lower():
raise exceptions.GitProxyException(location=proxy_server) raise exceptions.GitProxyException(location=proxy_server)
else: else:
@ -247,8 +250,8 @@ def _get_remote_env_vars(auth_key=None):
# required CLI input. # required CLI input.
ssh_cmd = ( ssh_cmd = (
'ssh -i {} -o ConnectionAttempts=20 -o ConnectTimeout=10 -o ' 'ssh -i {} -o ConnectionAttempts=20 -o ConnectTimeout=10 -o '
'StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' 'StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'.
.format(os.path.expanduser(auth_key))) format(os.path.expanduser(auth_key)))
env_vars.update({'GIT_SSH_COMMAND': ssh_cmd}) env_vars.update({'GIT_SSH_COMMAND': ssh_cmd})
else: else:
msg = "The auth_key path '%s' was not found" % auth_key msg = "The auth_key path '%s' was not found" % auth_key
@ -293,10 +296,16 @@ def _try_git_checkout(repo, repo_url, ref=None, fetch=True):
# for each so that future checkouts can be performed using either # for each so that future checkouts can be performed using either
# format. This way, no future processing is required to figure # format. This way, no future processing is required to figure
# out whether a refpath/hexsha exists within the repo. # out whether a refpath/hexsha exists within the repo.
_create_local_ref( _create_local_ref(g,
g, branches, ref=ref, newref=hexsha, reftype='hexsha') branches,
_create_local_ref( ref=ref,
g, branches, ref=ref, newref=ref_path, reftype='refpath') newref=hexsha,
reftype='hexsha')
_create_local_ref(g,
branches,
ref=ref,
newref=ref_path,
reftype='refpath')
_create_or_checkout_local_ref(g, branches, ref=ref) _create_or_checkout_local_ref(g, branches, ref=ref)
else: else:
LOG.debug('Checking out ref=%s from local repo_url=%s', ref, LOG.debug('Checking out ref=%s from local repo_url=%s', ref,