Upgrade to Pro — share decks privately, control downloads, hide ads and more …

Terrafromで構築するマルチクラウドプラットフォームインフラストラクチャ / ND...

Hayato Imai
September 30, 2017

Terrafromで構築するマルチクラウドプラットフォームインフラストラクチャ / NDS53 Terraform

Hayato Imai

September 30, 2017
Tweet

More Decks by Hayato Imai

Other Decks in Programming

Transcript

  1. ଟ͘ͷϓϥοτϑΥʔϜ΍ αʔϏεΛαϙʔτ w *BB4 "84 ($1 "[VSF 0QFO4UBDL w 1BB4

    )FSPLV w 4BB4 'BTUMZ .BJM(VO /FX3FMJD w ଞʹ΋ͨ͘͞Μ w ֤ϓϥοτϑΥʔϜ΍αʔϏεݻ༗ͷଟ͘ͷϦιʔεʹରԠ
  2. ྫ&$ &*1Λߏங͢Δ 5FSSBGPSN$POpHVSBUJPO provider "aws" { region = "ap-northeast-1" }

    resource "aws_instance" "example" { ami = "ami-3bd3c45c" instance_type = "t2.micro" } resource "aws_eip" "example" { instance = "${aws_instance.example.id}" } IUUQTHJUIVCDPNIBZBKPOETUFSSBGPSN
  3. ࡞ۀ؀ڥͷॳظԽ $ terraform init Initializing provider plugins... - Checking for

    available provider plugins on https:// releases.hashicorp.com... - Downloading plugin for provider "aws" (1.0.0)... Terraform has been successfully initialized!
  4. ࣮ߦܭըͷ֬ೝʢ̍ʣ $ terraform plan + aws_eip.example id: <computed> instance: "${aws_instance.example.id}"

    vpc: <computed> + aws_instance.example id: <computed> ami: "ami-3bd3c45c" instance_type: "t2.nano" subnet_id: <computed> Plan: 2 to add, 0 to change, 0 to destroy.
  5. Πϯϑϥͷߏஙͱ֬ೝʢ̍ʣ $ terraform apply aws_instance.example: Creating... aws_instance.example: Creation complete after

    16s (ID: i-0311a0b3adc495d2b) aws_eip.example: Creating... aws_eip.example: Creation complete after 1s (ID: eipalloc- a1c9a39b) Apply complete! Resources: 2 added, 0 changed, 0 destroyed
  6. Πϯϑϥͷߏஙͱ֬ೝʢ̎ʣ $ terraform show aws_eip.example: id = eipalloc-a1c9a39b instance =

    i-0311a0b3adc495d2b public_ip = 13.115.163.200 aws_instance.example: id = i-0311a0b3adc495d2b ami = ami-3bd3c45c instance_type = t2.nano w ঢ়ଶ͸UFSSBGPSNUGTUBUFϑΝΠϧͰ؅ཧ͞ΕΔ  ϩʔΧϧʢσϑΥϧτʣ΍4ɺ($4ɺFUDEͳͲͷόοΫΤϯυͰ ؅ཧՄೳ  όοΫΤϯυʹΑͬͯ͸ϩοΫ΋Մೳ
  7. Πϯϑϥͷߋ৽ $ terraform plan ~ aws_eip.example instance: "i-0311a0b3adc495d2b" => "$

    {aws_instance.example.id}" -/+ aws_instance.example (new resource required) id: "i-0311a0b3adc495d2b" => <computed> (forces new resource) ami: "ami-3bd3c45c" => "ami- dfd0c7b8" (forces new resource) instance_type: "t2.nano" => "t2.nano" subnet_id: "subnet-ec29e9a5" => <computed> Plan: 1 to add, 1 to change, 1 to destroy. ami = "ami-3bd3c45c" => "ami-dfd0c7b8"
  8. Πϯϑϥͷഁغ $ terraform plan --destroy - aws_eip.example - aws_instance.example Plan:

    0 to add, 0 to change, 2 to destroy. $ terraform destroy aws_eip.example: Destroying... (ID: eipalloc-a1c9a39b) aws_instance.example: Destroying... (ID: i-0311a0b3adc495d2b) Destroy complete! Resources: 2 destroyed.
  9. terraform { required_version = "~> 0.10.0" } provider "aws" {

    access_key = "${var.aws["access_key"]}" secret_key = "${var.aws["secret_key"]}" region = "${var.aws["region"]}" } provider "google" { credentials = "${file(var.google["credentials"])}" project = "${var.google["project"]}" region = "${var.google["region"]}" }
  10. variable "aws" { default = { access_key = "" secret_key

    = "" region = "ap-northeast-1" } } variable "google" { default = { credentials = "credentials.json" project = "" region = "" } } variable "service_name" { default = "nds53" }
  11. resource "aws_kinesis_stream" "main" { name = "${var.service_name}-${terraform.workspace}" shard_count = 1

    } output "Kinesis stream" { value = "${aws_kinesis_stream.main.name}" }
  12. resource "google_bigquery_dataset" "main" { # only [0-9a-zA-Z_] dataset_id = "${var.service_name}_${terraform.workspace}"

    } resource "google_bigquery_table" "main" { dataset_id = "${google_bigquery_dataset.main.dataset_id}" table_id = "${var.bq["table_id"]}" schema = "${file("bq/schema.json")}" } variable "bq" { default = { table_id = "myapp" } } output "BigQuery" { value = "${var.google["project"]}:$ {google_bigquery_dataset.main.dataset_id}:$ {google_bigquery_table.main.table_id}" }
  13. [ { "name": "time", "type": "timestamp", "mode": "required" }, {

    "name": "tag", "type": "string", "mode": "required" }, { "name": "value", "type": "float", "mode": "required" } ] CRTDIFNBKTPO
  14. ,.4

  15. resource "aws_kms_key" "main" {} resource "aws_kms_alias" "main" { name =

    "alias/${var.service_name}-${terraform.workspace}" target_key_id = "${aws_kms_key.main.key_id}" }
  16. data "archive_file" "kinesis2bq" { type = "zip" source_dir = "lambda/kinesis2bq"

    output_path = "lambda/kinesis2bq.zip" } resource "aws_lambda_function" "kinesis2bq" { filename = "${data.archive_file.kinesis2bq.output_path}" function_name = "${var.service_name}-${terraform.workspace}-kinesis2bq" role = "${aws_iam_role.kinesis_lambda.arn}" handler = "kinesis2bq.handler" source_code_hash = "${data.archive_file.kinesis2bq.output_base64sha256}" runtime = "python2.7" timeout = 60 # NOTE: ΩʔΛؚΊͯ4KB·Ͱ environment { variables = { BQ_CREDENTIALS = "${data.aws_kms_ciphertext.bq_credentials.ciphertext_blob}" BQ_PROJECT = "${var.google["project"]}" BQ_DATASET = "${google_bigquery_dataset.main.dataset_id}" BQ_TABLE = "${google_bigquery_table.main.table_id}" } } } resource "aws_lambda_event_source_mapping" "kinesis2bq" { batch_size = 100 # default 100 event_source_arn = "${aws_kinesis_stream.main.arn}" function_name = "${aws_lambda_function.kinesis2bq.arn}" starting_position = "TRIM_HORIZON" }
  17. data "aws_kms_ciphertext" "bq_credentials" { key_id = "${aws_kms_key.main.id}" plaintext = "${file(var.bq["credentials"])}"

    } variable "bq" { default = { table_id = "myapp" credentials = "credentials.json" # <-௥Ճ } }
  18. resource "aws_iam_role" "kinesis_lambda" { name = "${var.service_name}-${terraform.workspace}-kinesis_lambda" assume_role_policy = "${data.aws_iam_policy_document.kinesis_lambda_assume_role_policy.json}"

    } data "aws_iam_policy_document" "kinesis_lambda_assume_role_policy" { statement { actions = [ "sts:AssumeRole", ] principals { type = "Service" identifiers = ["lambda.amazonaws.com"] } } } resource "aws_iam_role_policy" "kinesis_lambda" { name = "${var.service_name}-${terraform.workspace}-kinesis_lambda" role = "${aws_iam_role.kinesis_lambda.id}" policy = "${data.aws_iam_policy_document.kinesis_lambda_role_policy.json}" } data "aws_iam_policy_document" "kinesis_lambda_role_policy" { statement { actions = ["kinesis:*"] resources = ["${aws_kinesis_stream.main.arn}"] } statement { actions = ["kms:Decrypt"] resources = ["${aws_kms_key.main.arn}"] } statement { actions = [ "logs:CreateLogGroup", "logs:CreateLogStream", "logs:PutLogEvents", ] resources = [ "arn:aws:logs:*:*:*", ] } }
  19. 1 import base64 2 import datetime 3 import json 4

    import os 5 import sys 6 7 import boto3 8 9 sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'vendor')) 10 from gcloud import bigquery 11 from oauth2client.service_account import ServiceAccountCredentials 12 13 BQ_CREDENTIALS = os.environ['BQ_CREDENTIALS'] 14 BQ_PROJECT = os.environ['BQ_PROJECT'] 15 BQ_DATASET = os.environ['BQ_DATASET'] 16 BQ_TABLE = os.environ['BQ_TABLE'] 17 18 def handler(event, context): 19 rows = [] 20 21 for r in event['Records']: 22 payload = r['kinesis']['data'] 23 try: 24 data = json.loads(base64.b64decode(payload)) 25 row = [] 26 for key in ['time', 'tag', 'value']: 27 if key == 'time': 28 row.append(datetime.datetime.fromtimestamp(data[key])) 29 else: 30 row.append(data[key]) 31 rows.append(tuple(row)) 32 except Exception as e: 33 print('Invalid data "{0}": {1}'.format(payload, e)) 34 pass 35 36 if len(rows) == 0: 37 return 38 39 kms = boto3.client('kms') 40 blob = base64.b64decode(BQ_CREDENTIALS) 41 dec = kms.decrypt(CiphertextBlob = blob) 42 keyfile_dict = json.loads(dec['Plaintext']) 43 credentials = ServiceAccountCredentials.from_json_keyfile_dict(keyfile_dict) 44 45 bq = bigquery.Client(credentials = credentials, project = BQ_PROJECT) 46 dataset = bq.dataset(BQ_DATASET) 47 table = dataset.table(BQ_TABLE) 48 table.reload() 49 res = table.insert_data(rows) 50 51 print(res)
  20. ߏங $ pip install gcloud -t lambda/kinesis2bq/vendor/ $ test -e

    lambda/kinesis2bq/vendor/google/__init__.py || \ > touch lambda/kinesis2bq/vendor/google/__init__.py $ terraform init $ terraform plan --var-file=config.tfvars $ terraform apply --var-file=config.tfvars $ terraform show ࣄલ४උʢ3&"%.&NEࢀরʣ ߏஙͱ֬ೝ
  21. ಈ࡞֬ೝ $ TEST_JSON=$(mktemp) $ test/gen_kinesis_records.sh $KINESIS_STREAM >$TEST_JSON $ cat $TEST_JSON

    | jq . -C | less -R $ aws kinesis put-records --cli-input-json file://$TEST_JSON ,JOFTJTʹσʔλΛ౤ೖ #JH2VFSZͰσʔλΛ֬ೝ $ bq query "SELECT COUNT(time) FROM $BIGQUERY" ˞,*/&4*4@453&".ͱ#*(26&3:͸UFSSBGPSNBQQMZͷ݁ՌͰஔ͖׵͑