The main function is to query the database , Generate... Locally test.csv, Then upload it to s3://test-bucket-dev bucket ,bthlt Under the table of contents . test.csv is generated locally and uploaded to s3://test-bucket-dev bucket,bthlt path.
import pymysql import logging import boto3 from botocore.exceptions import ClientError import os db = pymysql.connect(host='****.****', user='****', password='****', database='****') cursor = db.cursor() def cursor_query_all(sql): try: cursor.execute(sql) except Exception as e: print("Catch exception : " + str(e)) return cursor.fetchall() def get_db_data(): sql = """select * from test""" result = cursor_query_all(sql) return result def upload_file(file_name, bucket, object_name=None): if object_name is None: object_name = os.path.basename(file_name) s3_client = boto3.client('s3') try: s3_client.upload_file(file_name, bucket, object_name) except ClientError as e: logging.error(e) return False return True def lambda_handler(event, context): with open('/tmp/test.csv', 'w') as v_file: results = get_db_data() v_file.write('head1,head2,head3' + '\n') for result in results: v_file.write(','.join(result) + '\n') upload_file('/tmp/test.csv', 'test-bucket-dev', 'bthlt/test.csv')
requirements.txt
boto3==1.20.23 PyMySQL==1.0.2 botocore==1.23.23
package.sh
#!/bin/bash mkdir deploy cp test.py deploy cp requirements.txt deploy cd deploy pip install -r requirements.txt -t./ zip -r ../test.zip * rm -rf ../deploy
lambda.tf
resource "aws_iam_role" "test" { assume_role_policy = jsonencode( { Statement = [ { Action = "sts:AssumeRole" Effect = "Allow" Principal = { Service = "lambda.amazonaws.com" } }, ] Version = "2021-10-17" } ) force_detach_policies = false max_session_duration = 3600 name = "test" path = "/service-role/" } resource "aws_lambda_function" "test" { function_name = "test-upload-s3" handler = "test.lambda_handler" role = aws_iam_role.test.arn runtime = "python3.8" memory_size = "128" filename = "lambda/test.zip" source_code_hash = filebase64sha256("lambda/test.zip") }
event.tf
resource "aws_cloudwatch_event_rule" "every_day_upload_file_hours" { name = "test-file-every-day-${terraform.workspace}" schedule_expression = "cron(0 1 * * ? *)" } resource "aws_cloudwatch_event_target" "event_target_upload_files_s3" { count = terraform.workspace == "prod" ? 1 : 0 target_id = "every_day_upload_file_hours" rule = aws_cloudwatch_event_rule.every_day_upload_file_hours.name arn = "arn:aws-cn:lambda:region:account_id:function:test-upload-s3" } resource "aws_lambda_permission" "lambda_permission_upload_files_s3" { count = terraform.workspace == "prod" ? 1 : 0 action = "lambda:InvokeFunction" function_name = "test-upload-s3" principal = "events.amazonaws.com" source_arn = aws_cloudwatch_event_rule.every_day_upload_file_hours.arn }