commit
4ef1f0f71e
11 changed files with 337 additions and 0 deletions
@ -0,0 +1 @@ |
|||
__pycache__ |
@ -0,0 +1,7 @@ |
|||
import logging |
|||
|
|||
__author__ = "Adam Pippin" |
|||
__email__ = "hello@adampippin.ca" |
|||
__version__ = "0.0.1" |
|||
|
|||
logging.getLogger("configs").addHandler(logging.NullHandler()) |
@ -0,0 +1,14 @@ |
|||
import logging |
|||
from .cli import cli |
|||
|
|||
# Set up logging |
|||
logger = logging.getLogger("configs") |
|||
handler = logging.StreamHandler() |
|||
formatter = logging.Formatter( |
|||
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s') |
|||
handler.setFormatter(formatter) |
|||
logger.addHandler(handler) |
|||
logger.setLevel(logging.DEBUG) |
|||
|
|||
cli() |
|||
|
@ -0,0 +1,66 @@ |
|||
import click |
|||
import logging |
|||
from pprint import pprint |
|||
|
|||
from .config import Config |
|||
from .transform import Transforms |
|||
from .vault import Vaults |
|||
from .vault import Stack as VaultStack |
|||
|
|||
@click.group() |
|||
def cli(): |
|||
""" |
|||
Tool for working with YAML-formatted config generation. Or something. |
|||
""" |
|||
pass |
|||
|
|||
@cli.command() |
|||
@click.argument('input', type=click.File('rb')) |
|||
@click.argument('format') |
|||
@click.argument('output', type=click.File('wb')) |
|||
@click.option('-v', '--vault', 'vault', default='sops', required=False, multiple=True) |
|||
def transform(input, format, output, vault): |
|||
"""Transform INPUT into FORMAT format and output to OUTPUT |
|||
""" |
|||
logger = logging.getLogger("configs") |
|||
|
|||
logger.info('Reading input config') |
|||
cfg = Config() |
|||
cfg.read(input) |
|||
|
|||
logger.info('Initializing vaults') |
|||
vaults = [] |
|||
for vault_name in vault: |
|||
logger.debug(vault_name) |
|||
vault_config = cfg.get_vault_config(vault_name) |
|||
vault_obj = Vaults[vault_name](vault_config) |
|||
vaults.append(vault_obj) |
|||
|
|||
vault_stack = VaultStack(vaults) |
|||
|
|||
logger.info('Initializing transform') |
|||
transform_config = cfg.get_transform_config(format) |
|||
transform = Transforms[format](transform_config, vault_stack) |
|||
|
|||
logger.info('Transforming') |
|||
result = transform.transform(cfg) |
|||
print(result) |
|||
|
|||
@cli.command() |
|||
@click.argument('input', type=click.File('rb')) |
|||
def provision(input): |
|||
"""Read INPUT and store in the vault service |
|||
""" |
|||
logger = logging.getLogger("configs") |
|||
|
|||
logger.info('Reading config') |
|||
cfg = Config() |
|||
cfg.read(input) |
|||
|
|||
logger.info('Fetching vault config') |
|||
vault_config = cfg.get_vault_config("aws") |
|||
|
|||
vault = VaultAws(vault_config) |
|||
|
|||
logger.info('Storing') |
|||
vault.provision(cfg.get_merged()) |
@ -0,0 +1,59 @@ |
|||
import yaml |
|||
|
|||
try: |
|||
from yaml import CLoader as YamlLoader, CDumper as YamlDumper |
|||
except ImportError: |
|||
from yaml import Loader as YamlLoader, Dumper as YamlDumper |
|||
|
|||
|
|||
class Config: |
|||
|
|||
def __init__(self): |
|||
self.data = None |
|||
|
|||
def read(self, stream): |
|||
self.data = yaml.load(stream, Loader=YamlLoader) |
|||
meta = self.get_meta() |
|||
if not "version" in meta: |
|||
raise Exception('Config missing version') |
|||
if not meta["version"] == 0: |
|||
raise Exception('Unsupported config version') |
|||
|
|||
def export(self): |
|||
return yaml.dump(self.data, default_flow_style=False) |
|||
|
|||
def get_secrets(self): |
|||
return self.data['secrets_encrypted'] |
|||
|
|||
def get_meta(self): |
|||
return self.data['meta'] |
|||
|
|||
def get_transform_config(self, format): |
|||
if not format in self.data['transform']: |
|||
raise Exception('Config does not have transform definition for format: ' + format) |
|||
|
|||
return self.data['transform'][format] |
|||
|
|||
def get_vault_config(self, vault): |
|||
if not vault in self.data['vault']: |
|||
raise Exception('Config does not have config for vault: ' + vault) |
|||
|
|||
return self.data['vault'][vault] |
|||
|
|||
def get_merged(self): |
|||
return Config._merge_dicts(self.data['config'], self.data['secrets_encrypted']) |
|||
|
|||
def _merge_dicts(a, b, path=None): |
|||
# https://stackoverflow.com/questions/7204805/dictionaries-of-dictionaries-merge |
|||
if path is None: path = [] |
|||
for key in b: |
|||
if key in a: |
|||
if isinstance(a[key], dict) and isinstance(b[key], dict): |
|||
Config._merge_dicts(a[key], b[key], path + [str(key)]) |
|||
elif a[key] == b[key]: |
|||
pass # same leaf value |
|||
else: |
|||
raise Exception('Conflict at %s' % '.'.join(path + [str(key)])) |
|||
else: |
|||
a[key] = b[key] |
|||
return a |
@ -0,0 +1,5 @@ |
|||
from .env import Env |
|||
|
|||
Transforms = { |
|||
"env": Env |
|||
} |
@ -0,0 +1,18 @@ |
|||
from pprint import pprint |
|||
import os |
|||
|
|||
class Env: |
|||
|
|||
def __init__(self, transform_config, vault): |
|||
self.config = transform_config |
|||
self.vault = vault |
|||
|
|||
def transform(self, config): |
|||
out = [] |
|||
for k in self.config["fields"].keys(): |
|||
value = self.vault.resolve(config, self.config["fields"][k]) |
|||
if value is None: |
|||
out.append('# ' + k + '=null') |
|||
else: |
|||
out.append(k + '="' + value + '"') |
|||
return os.linesep.join(out) |
@ -0,0 +1,8 @@ |
|||
from .aws import Aws |
|||
from .sops import Sops |
|||
from .stack import Stack |
|||
|
|||
Vaults = { |
|||
"aws": Aws, |
|||
"sops": Sops |
|||
} |
@ -0,0 +1,78 @@ |
|||
import boto3 |
|||
from botocore.exceptions import ClientError |
|||
import json |
|||
import logging |
|||
from pprint import pprint |
|||
|
|||
class Aws: |
|||
|
|||
def __init__(self, config): |
|||
self.config = config |
|||
self.logger = logging.getLogger("configs") |
|||
session = boto3.session.Session() |
|||
client = session.client( |
|||
service_name = 'secretsmanager' |
|||
) |
|||
self.secretsmanager = client |
|||
|
|||
def provision(self, config): |
|||
secrets = Aws._build_secrets(config.get_secrets()) |
|||
pprint(secrets) |
|||
|
|||
def _build_secrets(data, path=[]): |
|||
secrets = {} |
|||
path_str = "/".join(path) |
|||
for k in data.keys(): |
|||
if type(data[k]) is dict: |
|||
path.append(k) |
|||
sub_secrets = Aws._build_secrets(data[k], path) |
|||
path.pop() |
|||
secrets.update(sub_secrets) |
|||
else: |
|||
if path_str not in secrets.keys(): |
|||
secrets[path_str] = {} |
|||
secrets[path_str][k] = data[k] |
|||
return secrets |
|||
|
|||
def resolve(self, config, path): |
|||
path = Aws._local_path_to_secretmanager_path(path.split(".")) |
|||
|
|||
secret_path = path[0] |
|||
if "base_path" in self.config: |
|||
secret_path = self.config['base_path'] + secret_path |
|||
|
|||
self.logger.debug("Resolving " + secret_path + "." + path[1]) |
|||
|
|||
try: |
|||
get_secret_value_response = self.secretsmanager.get_secret_value( |
|||
SecretId=secret_path |
|||
) |
|||
except ClientError as e: |
|||
if e.response['Error']['Code'] == 'ResourceNotFoundException': |
|||
raise Exception("Secret not found") |
|||
else: |
|||
raise e |
|||
|
|||
if not 'SecretString' in get_secret_value_response: |
|||
raise Exception('Only support resolving JSON-formatted SecretString') |
|||
|
|||
secret = json.loads(get_secret_value_response['SecretString']) |
|||
|
|||
return Aws._resolve_from_json(secret, path[1].split('.')) |
|||
|
|||
def _local_path_to_secretmanager_path(path): |
|||
return "/".join(path[:len(path)-1]), ".".join(path[-1:]) |
|||
|
|||
|
|||
def _resolve_from_json(obj, path): |
|||
nextkey = path.pop(0) |
|||
|
|||
if not nextkey in obj: |
|||
return None |
|||
|
|||
if len(path) == 0: |
|||
return obj[nextkey] |
|||
else: |
|||
return Aws._resolve_from_json(obj[nextkey], path) |
|||
|
|||
|
@ -0,0 +1,65 @@ |
|||
import logging |
|||
import subprocess |
|||
import yaml |
|||
|
|||
from pprint import pprint |
|||
|
|||
try: |
|||
from yaml import CLoader as YamlLoader, CDumper as YamlDumper |
|||
except ImportError: |
|||
from yaml import Loader as YamlLoader, Dumper as YamlDumper |
|||
|
|||
class Sops: |
|||
|
|||
def __init__(self, config): |
|||
self.config = config |
|||
self.logger = logging.getLogger("configs") |
|||
|
|||
def resolve(self, config, path): |
|||
self.logger.debug("Resolving " + path) |
|||
|
|||
# Convert data back to yaml |
|||
config_str = config.export() |
|||
|
|||
# Decrypt file |
|||
result = subprocess.run([ |
|||
'sops', |
|||
'--ignore-mac', |
|||
'--input-type', 'yaml', |
|||
'--output-type', 'yaml', |
|||
'--decrypt', '/dev/stdin' |
|||
], stdout=subprocess.PIPE, input=config_str.encode('utf-8')) |
|||
|
|||
# Read decrypted file back |
|||
decrypted_config = yaml.load(result.stdout.decode('utf-8'), Loader=YamlLoader) |
|||
data = Sops._merge_dicts(decrypted_config['config'], decrypted_config['secrets_encrypted']) |
|||
|
|||
return Sops._resolve_in_object(data, path.split('.')) |
|||
|
|||
def _resolve_in_object(obj, path): |
|||
nextkey = path.pop(0) |
|||
|
|||
if not nextkey in obj: |
|||
return None |
|||
|
|||
if len(path) == 0: |
|||
return obj[nextkey] |
|||
else: |
|||
return Sops._resolve_in_object(obj[nextkey], path) |
|||
|
|||
def _merge_dicts(a, b, path=None): |
|||
# https://stackoverflow.com/questions/7204805/dictionaries-of-dictionaries-merge |
|||
if path is None: path = [] |
|||
for key in b: |
|||
if key in a: |
|||
if isinstance(a[key], dict) and isinstance(b[key], dict): |
|||
Sops._merge_dicts(a[key], b[key], path + [str(key)]) |
|||
elif a[key] == b[key]: |
|||
pass # same leaf value |
|||
else: |
|||
raise Exception('Conflict at %s' % '.'.join(path + [str(key)])) |
|||
else: |
|||
a[key] = b[key] |
|||
return a |
|||
|
|||
|
@ -0,0 +1,16 @@ |
|||
import logging |
|||
|
|||
class Stack: |
|||
|
|||
def __init__(self, vaults): |
|||
self.logger = logging.getLogger("configs") |
|||
self.vaults = vaults |
|||
|
|||
def resolve(self, config, path): |
|||
|
|||
for vault in self.vaults: |
|||
result = vault.resolve(config, path) |
|||
if result is not None: |
|||
return result |
|||
|
|||
return None |
Loading…
Reference in new issue