Bugfix: Get absolute path when making directories
Previously Pegleg would attempt to create directories using the path specified directly. This path wasn't always an absolute path, resulting in errors such as: File "/opt/pegleg/pegleg/engine/util/files.py", line 265, in dump_all os.makedirs(os.path.dirname(path), exist_ok=True) File "/usr/local/lib/python3.6/os.py", line 220, in makedirs mkdir(name, mode) FileNotFoundError: [Errno 2] No such file or directory: '' This bugfix determines an absolute path before attempting to create any directories. Change-Id: I84a0e7bc63d6f56a56b9c5c41de1ede99dfbacc7
This commit is contained in:
parent
e79672987e
commit
c7d745fdbe
|
@ -216,7 +216,7 @@ class PKIGenerator(object):
|
|||
|
||||
if not os.path.exists(dir_name):
|
||||
LOG.debug('Creating secrets path: %s', dir_name)
|
||||
os.makedirs(dir_name)
|
||||
os.makedirs(os.path.abspath(dir_name))
|
||||
|
||||
# Encrypt the document
|
||||
document['data']['managedDocument']['metadata']['storagePolicy']\
|
||||
|
|
|
@ -118,7 +118,7 @@ FULL_STRUCTURE = {
|
|||
def _create_tree(root_path, *, tree=FULL_STRUCTURE):
|
||||
for name, data in tree.get('directories', {}).items():
|
||||
path = os.path.join(root_path, name)
|
||||
os.makedirs(path, exist_ok=True)
|
||||
os.makedirs(os.path.abspath(path), exist_ok=True)
|
||||
_create_tree(path, tree=data)
|
||||
|
||||
for filename, yaml_data in tree.get('files', {}).items():
|
||||
|
@ -242,7 +242,7 @@ def dump(data, path, flag='w', **kwargs):
|
|||
if flag == 'w' and os.path.exists(path):
|
||||
raise click.ClickException('%s already exists, aborting' % path)
|
||||
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
os.makedirs(os.path.dirname(os.path.abspath(path)), exist_ok=True)
|
||||
with open(path, flag) as f:
|
||||
|
||||
yaml.dump(data, f, **kwargs)
|
||||
|
@ -252,7 +252,7 @@ def safe_dump(data, path, flag='w', **kwargs):
|
|||
if flag == 'w' and os.path.exists(path):
|
||||
raise click.ClickException('%s already exists, aborting' % path)
|
||||
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
os.makedirs(os.path.dirname(os.path.abspath(path)), exist_ok=True)
|
||||
with open(path, flag) as f:
|
||||
|
||||
yaml.safe_dump(data, f, **kwargs)
|
||||
|
@ -262,7 +262,7 @@ def dump_all(data, path, flag='w', **kwargs):
|
|||
if flag == 'w' and os.path.exists(path):
|
||||
raise click.ClickException('%s already exists, aborting' % path)
|
||||
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
os.makedirs(os.path.dirname(os.path.abspath(path)), exist_ok=True)
|
||||
with open(path, flag) as f:
|
||||
|
||||
yaml.dump_all(data, f, **kwargs)
|
||||
|
@ -309,8 +309,8 @@ def read(path):
|
|||
'', lambda loader, suffix, node: None)
|
||||
try:
|
||||
return [
|
||||
d for d in yaml.safe_load_all(stream) if d and
|
||||
(is_deckhand_document(d) or is_pegleg_managed_document(d))
|
||||
d for d in yaml.safe_load_all(stream) if d and (
|
||||
is_deckhand_document(d) or is_pegleg_managed_document(d))
|
||||
]
|
||||
except yaml.YAMLError as e:
|
||||
raise click.ClickException('Failed to parse %s:\n%s' % (path, e))
|
||||
|
@ -329,7 +329,7 @@ def write(data, file_path):
|
|||
:type data: str, dict, or a list of dicts
|
||||
"""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
os.makedirs(os.path.dirname(os.path.abspath(file_path)), exist_ok=True)
|
||||
with open(file_path, 'w') as stream:
|
||||
if isinstance(data, str):
|
||||
stream.write(data)
|
||||
|
@ -404,7 +404,7 @@ def check_file_save_location(save_location):
|
|||
LOG.debug(
|
||||
"Save location %s does not exist. Creating "
|
||||
"automatically.", save_location)
|
||||
os.makedirs(save_location)
|
||||
os.makedirs(os.path.abspath(save_location))
|
||||
# In case save_location already exists and isn't a directory.
|
||||
if not os.path.isdir(save_location):
|
||||
raise click.ClickException(
|
||||
|
|
|
@ -90,8 +90,9 @@ def git_handler(repo_url,
|
|||
# we do not need to clone, we may need to process the reference
|
||||
# by checking that out and returning the directory they passed in
|
||||
else:
|
||||
LOG.debug('Treating repo_url=%s as an already-cloned repository. '
|
||||
'Attempting to checkout ref=%s', repo_url, ref)
|
||||
LOG.debug(
|
||||
'Treating repo_url=%s as an already-cloned repository. '
|
||||
'Attempting to checkout ref=%s', repo_url, ref)
|
||||
|
||||
# Normalize the repo path.
|
||||
repo_url, _ = normalize_repo_path(repo_url)
|
||||
|
@ -102,9 +103,10 @@ def git_handler(repo_url,
|
|||
# real local repository. Wrapper logic around this module will
|
||||
# only perform this functionality against a temporary replica of
|
||||
# local repositories, making the below operations safe.
|
||||
LOG.info('Replica of local repo=%s is dirty. Committing all '
|
||||
'tracked/untracked changes to ref=%s',
|
||||
repo_name(repo_url), ref)
|
||||
LOG.info(
|
||||
'Replica of local repo=%s is dirty. Committing all '
|
||||
'tracked/untracked changes to ref=%s', repo_name(repo_url),
|
||||
ref)
|
||||
repo.git.add(all=True)
|
||||
repo.index.commit('Temporary Pegleg commit')
|
||||
|
||||
|
@ -115,8 +117,9 @@ def git_handler(repo_url,
|
|||
_try_git_checkout(repo, repo_url, ref, fetch=False)
|
||||
except exceptions.GitException:
|
||||
# Otherwise, attempt to fetch and checkout the missing ref.
|
||||
LOG.info('ref=%s not found locally for repo_url=%s, fetching from '
|
||||
'remote', ref, repo_url)
|
||||
LOG.info(
|
||||
'ref=%s not found locally for repo_url=%s, fetching from '
|
||||
'remote', ref, repo_url)
|
||||
# Allow any errors to bubble up.
|
||||
_try_git_checkout(repo, repo_url, ref, fetch=True)
|
||||
|
||||
|
@ -136,8 +139,9 @@ def _get_current_ref(repo_url):
|
|||
try:
|
||||
repo = Repo(repo_url, search_parent_directories=True)
|
||||
current_ref = repo.head.ref.name
|
||||
LOG.debug('ref for repo_url=%s not specified, defaulting to currently '
|
||||
'checked out ref=%s', repo_url, current_ref)
|
||||
LOG.debug(
|
||||
'ref for repo_url=%s not specified, defaulting to currently '
|
||||
'checked out ref=%s', repo_url, current_ref)
|
||||
return current_ref
|
||||
except Exception as e:
|
||||
return None
|
||||
|
@ -182,7 +186,7 @@ def _try_git_clone(repo_url,
|
|||
temp_dir = os.path.join(clone_path, repo_name)
|
||||
|
||||
try:
|
||||
os.makedirs(temp_dir)
|
||||
os.makedirs(os.path.abspath(temp_dir))
|
||||
except FileExistsError:
|
||||
msg = "The repository already exists in the given path. Either " \
|
||||
"provide a new clone path or pass in the path of the local " \
|
||||
|
@ -198,21 +202,20 @@ def _try_git_clone(repo_url,
|
|||
LOG.debug('Cloning [%s] with proxy [%s]', repo_url, proxy_server)
|
||||
# TODO(felipemonteiro): proxy_server can be finicky. Need a config
|
||||
# option to retry up to N times.
|
||||
repo = Repo.clone_from(
|
||||
repo_url,
|
||||
temp_dir,
|
||||
config='http.proxy=%s' % proxy_server,
|
||||
env=env_vars)
|
||||
repo = Repo.clone_from(repo_url,
|
||||
temp_dir,
|
||||
config='http.proxy=%s' % proxy_server,
|
||||
env=env_vars)
|
||||
else:
|
||||
LOG.debug('Cloning [%s]', repo_url)
|
||||
repo = Repo.clone_from(repo_url, temp_dir, env=env_vars)
|
||||
except git_exc.GitCommandError as e:
|
||||
LOG.exception('Failed to clone repo_url=%s using ref=%s.', repo_url,
|
||||
ref)
|
||||
if (ssh_cmd and ssh_cmd in e.stderr or
|
||||
'permission denied' in e.stderr.lower()):
|
||||
raise exceptions.GitAuthException(
|
||||
repo_url=repo_url, ssh_key_path=auth_key)
|
||||
if (ssh_cmd and ssh_cmd in e.stderr
|
||||
or 'permission denied' in e.stderr.lower()):
|
||||
raise exceptions.GitAuthException(repo_url=repo_url,
|
||||
ssh_key_path=auth_key)
|
||||
elif 'could not resolve proxy' in e.stderr.lower():
|
||||
raise exceptions.GitProxyException(location=proxy_server)
|
||||
else:
|
||||
|
@ -247,8 +250,8 @@ def _get_remote_env_vars(auth_key=None):
|
|||
# required CLI input.
|
||||
ssh_cmd = (
|
||||
'ssh -i {} -o ConnectionAttempts=20 -o ConnectTimeout=10 -o '
|
||||
'StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
|
||||
.format(os.path.expanduser(auth_key)))
|
||||
'StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'.
|
||||
format(os.path.expanduser(auth_key)))
|
||||
env_vars.update({'GIT_SSH_COMMAND': ssh_cmd})
|
||||
else:
|
||||
msg = "The auth_key path '%s' was not found" % auth_key
|
||||
|
@ -293,10 +296,16 @@ def _try_git_checkout(repo, repo_url, ref=None, fetch=True):
|
|||
# for each so that future checkouts can be performed using either
|
||||
# format. This way, no future processing is required to figure
|
||||
# out whether a refpath/hexsha exists within the repo.
|
||||
_create_local_ref(
|
||||
g, branches, ref=ref, newref=hexsha, reftype='hexsha')
|
||||
_create_local_ref(
|
||||
g, branches, ref=ref, newref=ref_path, reftype='refpath')
|
||||
_create_local_ref(g,
|
||||
branches,
|
||||
ref=ref,
|
||||
newref=hexsha,
|
||||
reftype='hexsha')
|
||||
_create_local_ref(g,
|
||||
branches,
|
||||
ref=ref,
|
||||
newref=ref_path,
|
||||
reftype='refpath')
|
||||
_create_or_checkout_local_ref(g, branches, ref=ref)
|
||||
else:
|
||||
LOG.debug('Checking out ref=%s from local repo_url=%s', ref,
|
||||
|
|
Loading…
Reference in New Issue