Created
August 25, 2017 19:41
-
-
Save endzyme/d195b3ca91b325b2f9cfb5cfdd301fb2 to your computer and use it in GitHub Desktop.
gross firehose importer for terraform
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[ | |
{ | |
"DeliveryStreamDescription": { | |
"HasMoreDestinations": false, | |
"VersionId": "1", | |
"CreateTimestamp": 12345678.047, | |
"DeliveryStreamARN": "arn:aws:firehose:us-east-1:1234567:deliverystream/some_name", | |
"DeliveryStreamStatus": "ACTIVE", | |
"DeliveryStreamName": "some_name", | |
"Destinations": [ | |
{ | |
"DestinationId": "destinationId-00000000001", | |
"ExtendedS3DestinationDescription": { | |
"RoleARN": "arn:aws:iam::1234567:role/some_role", | |
"Prefix": "asdf/asdf/", | |
"BufferingHints": { | |
"IntervalInSeconds": 60, | |
"SizeInMBs": 64 | |
}, | |
"EncryptionConfiguration": { | |
"KMSEncryptionConfig": { | |
"AWSKMSKeyARN": "arn:aws:kms:us-east-1:1234567:key/abcdef-1234-4567-asdf-asdfasdfasdf" | |
} | |
}, | |
"CompressionFormat": "GZIP", | |
"S3BackupMode": "Enabled", | |
"CloudWatchLoggingOptions": { | |
"Enabled": true, | |
"LogStreamName": "S3Delivery", | |
"LogGroupName": "/aws/kinesisfirehose/someplace" | |
}, | |
"BucketARN": "arn:aws:s3:::my-bucket", | |
"ProcessingConfiguration": { | |
"Enabled": true, | |
"Processors": [ | |
{ | |
"Type": "Lambda", | |
"Parameters": [ | |
{ | |
"ParameterName": "NumberOfRetries", | |
"ParameterValue": "3" | |
}, | |
{ | |
"ParameterName": "LambdaArn", | |
"ParameterValue": "arn:aws:lambda:us-east-1:1234567:function:somelambda:$LATEST" | |
} | |
] | |
} | |
] | |
}, | |
"S3BackupDescription": { | |
"RoleARN": "arn:aws:iam::1234567:role/somerole", | |
"Prefix": "someplace/", | |
"BufferingHints": { | |
"IntervalInSeconds": 300, | |
"SizeInMBs": 5 | |
}, | |
"EncryptionConfiguration": { | |
"NoEncryptionConfig": "NoEncryption" | |
}, | |
"CompressionFormat": "UNCOMPRESSED", | |
"BucketARN": "arn:aws:s3:::mybucket" | |
} | |
}, | |
"S3DestinationDescription": { | |
"RoleARN": "arn:aws:iam::1234567:role/some_role", | |
"Prefix": "asdf/", | |
"BufferingHints": { | |
"IntervalInSeconds": 300, | |
"SizeInMBs": 64 | |
}, | |
"EncryptionConfiguration": { | |
"KMSEncryptionConfig": { | |
"AWSKMSKeyARN": "arn:aws:kms:us-east-1:1234567:key/asdfasdf-asdf-asdf-asdfasdf" | |
} | |
}, | |
"CompressionFormat": "GZIP", | |
"CloudWatchLoggingOptions": { | |
"Enabled": true, | |
"LogStreamName": "S3Delivery", | |
"LogGroupName": "/aws/kinesisfirehose/someplace" | |
}, | |
"BucketARN": "arn:aws:s3:::my-bucket" | |
} | |
} | |
] | |
} | |
} | |
] |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
from __future__ import print_function | |
from sys import argv | |
from pprint import pprint | |
import json, re | |
json_file = argv[1] | |
with open(json_file) as data_file: | |
resource_json = json.load(data_file) | |
# DEBUG | |
#pprint(resource_json) | |
# Crawl and start building the output | |
indent = 0 | |
def convert(name): | |
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) | |
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() | |
def eval_dict(item): | |
global indent | |
for (key,value) in item.items(): | |
if type(value) is (unicode or str): | |
indent_print(convert(key), end='') | |
print(' = "%s"' % str(value)) | |
elif type(value) is int: | |
indent_print(convert(key), end='') | |
print(' = %i' % value) | |
elif type(value) is float: | |
indent_print(convert(key), end='') | |
print(' = %f' % value) | |
elif type(value) is dict: | |
indent_print('%s {' % convert(key)) | |
indent += 2 | |
eval_dict(value) | |
indent -= 2 | |
indent_print('}') | |
elif type(value) is list: | |
if all(isinstance(x, unicode) for x in value): | |
indent_print(convert(key), end='') | |
print('["%s"]' % '","'.join(value)) | |
elif all(isinstance(x, dict) for x in value): | |
for val in value: | |
eval_dict(val) | |
else: | |
raise StandardError('shitsBROKE') | |
elif type(value) is bool: | |
indent_print(convert(key), end='') | |
print(' = %s' % str(value).lower()) | |
def indent_print(string, end='\n'): | |
global indent | |
for spaces in range(0,indent): | |
print(' ', end='') | |
print(string, end=end) | |
for resource in resource_json: | |
for base,value in resource.items(): | |
title = convert(value['DeliveryStreamName']) | |
print('resource "aws_kinesis_firehose_delivery_stream" "%s" {' % title) | |
indent = 2 | |
eval_dict(value) | |
print('}') |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
python 2.7 - used to import stuff from existing aws firehose streams to terraform. NOTE this does not get you immediately runnable terraform code -- you will need to massage it to confirm to the terraform provider syntax and contracts. Not a 1:1 converter, just gets you formatted more easily then you can regexp nightmare your way out to freedom