lib-af-unify/lib_afc_unify/microsite.py

165 lines
5.1 KiB
Python

"""
Functions for MiroSite setup
"""
import logging
import tarfile
import tempfile
import os
import mimetypes
import boto3
from jinja2 import Environment, FileSystemLoader
def upload_website(bucket_name, website_dir, aws_access_key, aws_secret_key):
"""
upload_website
:param bucket_name: The name of the bucket to work with
:param website_dir: Folder of website files
:param aws_access_key: Authentication access key
:param aws_secret_key: Authentication secret key
:return
"""
# Create client connection
s3 = boto3.client('s3',
aws_access_key_id=aws_access_key,
aws_secret_access_key=aws_secret_key)
# Loop all the files
for root, _, files in os.walk(website_dir):
for file in files:
try:
local_path = os.path.join(root, file)
s3_path = os.path.relpath(local_path, website_dir)
# Determine the Content-Type based on file extension
content_type, _ = mimetypes.guess_type(local_path)
content_type = content_type or 'application/octet-stream'
# Upload file to S3
s3.upload_file(local_path,
bucket_name,
s3_path,
ExtraArgs={'ContentType': content_type})
except Exception as ex:
logging.error("[FILE UPLOAD] [%s] [%s] [%s]", bucket_name, file, str(ex))
def get_template(bucket_name, region_name, template_name, template_dir,
aws_access_key, aws_secret_key):
"""
get_template
:param bucket_name: The name of the bucket to work with
:param region_name: The region of the bucket
:param template_name: Name of template_name
:param template_dir: Name of template_dir
:param aws_access_key: Authentication access key
:param aws_secret_key: Authentication secret key
:return Boolean for success
"""
# Create client connection
s3 = boto3.client('s3',
endpoint_url=f'https://{region_name}.linodeobjects.com',
aws_access_key_id=aws_access_key,
aws_secret_access_key=aws_secret_key)
# Download file from S3
full_path_template = os.path.join(template_dir, f"{template_name}.tar.gz")
s3.download_file(bucket_name,
f"{template_name}.tar.gz",
full_path_template)
# Extract template to directory
with tarfile.open(full_path_template, 'r:gz') as tar:
tar.extractall(template_dir)
# Delete the tar file
os.remove(full_path_template)
def apply_template_website(website_dir, template_dict):
"""
apply_template_website
:param website_dir: Folder of website files
:param template_dict: Template dict
:return
"""
# Setup skip dirs
skip_dirs = [os.path.join(website_dir, "img")]
# Prepare common context
common_context = template_dict.get('common', {}).get('context', {})
# Loop all the files
for root, _, files in os.walk(website_dir):
# Skip certain dirs
if root in skip_dirs:
continue
# Setup jinja2 loader
env = Environment(loader=FileSystemLoader(root))
# Loop all files in directory
for file in files:
try:
# Build file path
local_path = os.path.join(root, file)
config_key = root.replace(website_dir, "")
config_key = "/" if config_key == "" else config_key
# Get jinja2 template
template = env.get_template(file)
stem_context = template_dict.get(config_key, {}).get('context', {})
context = {**common_context, **stem_context}
rendered_content = template.render(context)
# Write the rendered file
with open(local_path, 'w', encoding='utf-8') as outfile:
outfile.write(rendered_content)
except Exception as ex:
logging.error("[FILE JINJA] [%s] [%s]", file, str(ex))
def get_images(website_dir, image_list, campaign_key, bucket_name,
region_name, aws_access_key, aws_secret_key):
"""
get_images
:param website_dir: Folder of website files
:param image_list: List of images to pull
:param campaign_key:
:param bucket_name:
:param region_name:
:param aws_access_key:
:param aws_secret_key:
:return
"""
# Create client connection
s3 = boto3.client('s3',
endpoint_url=f'https://{region_name}.linodeobjects.com',
aws_access_key_id=aws_access_key,
aws_secret_access_key=aws_secret_key)
# Loop and download all the images
for image in image_list:
# Check that the campaign key is valid
if image.split('/')[1] != campaign_key:
logging.error("[BAD IMAGE MATCH] [%s] [%s]", image.split('/')[1], campaign_key)
continue
file_name = os.path.basename(image)
download_file = os.path.join(website_dir, "img", file_name)
s3.download_file(bucket_name, image, download_file)