BDC-team-1/0_Input_cleaning.py
2024-02-19 22:11:44 +00:00

57 lines
1.9 KiB
Python

# Business Data Challenge - Team 1
import pandas as pd
import numpy as np
import os
import s3fs
import re
import warnings
# Create filesystem object
S3_ENDPOINT_URL = "https://" + os.environ["AWS_S3_ENDPOINT"]
fs = s3fs.S3FileSystem(client_kwargs={'endpoint_url': S3_ENDPOINT_URL})
# Import cleaning and merge functions
exec(open('0_Cleaning_and_merge_functions.py').read())
# Output folder
BUCKET_OUT = "projet-bdc2324-team1"
# Ignore warning
warnings.filterwarnings('ignore')
def export_dataset(df, output_name):
print('Exportation of temporary dataset :', output_name)
FILE_PATH_OUT_S3 = BUCKET_OUT + "/" + output_name
with fs.open(FILE_PATH_OUT_S3, 'w') as file_out:
df.to_csv(file_out, index = False)
## 1 - Cleaning of the datasets
# Cleaning customerplus
df1_customerplus_clean = preprocessing_customerplus(directory_path = "1")
## Exportation
export_dataset(df = df1_customerplus_clean, output_name = "0_Input/Company_1/customerplus_cleaned.csv")
# Cleaning target area
df1_target_information = preprocessing_target_area(directory_path = "1")
## Exportation
export_dataset(df = df1_campaigns_information, output_name = "0_Input/Company_1/Campaigns dataset clean.csv")
# Cleaning campaign area
df1_campaigns_information = preprocessing_campaigns_area(directory_path = "1")
## Exportation
export_dataset(df = df1_campaigns_information, output_name = "0_Input/Company_1/Campaigns dataset clean.csv")
## Exportation
export_dataset(df = df1_campaigns_information, output_name = "0_Temp/Company 1 - Campaigns dataset clean.csv")
# Cleaning product area
df1_products_purchased_reduced = uniform_product_df(directory_path = "1")
## Exportation
export_dataset(df = df1_campaigns_information, output_name = "0_Input/Company_1/Campaigns dataset clean.csv")
#Exportation
export_dataset(df = df1_products_purchased_reduced, output_name = "0_Temp/Company 1 - Purchases.csv")