Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

NBs for cleaning and storing email data #11

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
172 changes: 172 additions & 0 deletions notebooks/add_new_data.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
{
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What bucket are you using for this data? I can't find a cdolfi subdir on the DH-PLAYPEN instance.

Also the naming convention you are using is a bit confusing. I think s3_bucket_name should be something like s3_prefix since its the directory name you want to use inside of the bucket, right?


Reply via ReviewNB

"cells": [
{
"cell_type": "code",
"execution_count": 60,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"import numpy as np \n",
"import os \n",
"import boto3 \n",
"import gzip\n",
"from dotenv import load_dotenv, find_dotenv"
]
},
{
"cell_type": "code",
"execution_count": 61,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"https://s3.upshift.redhat.com\n",
"cdolfi\n"
]
}
],
"source": [
"dotenv_path = find_dotenv()\n",
"load_dotenv(dotenv_path)\n",
"s3_secret_key = os.environ['AWS_SECRET_ACCESS_KEY']\n",
"s3_bucket_name = os.environ['JUPYTERHUB_USER']\n",
"s3_endpoint_url = os.environ['S3_ENDPOINT_URL']\n",
"s3_access_key = os.environ['AWS_ACCESS_KEY_ID']\n",
"s3bucket = os.environ['BUCKET']\n",
"\n",
"print(s3_endpoint_url)\n",
"print(s3_bucket_name)\n",
"s3 = boto3.client('s3','us-east-1', endpoint_url= s3_endpoint_url,\n",
" aws_access_key_id = s3_access_key,\n",
" aws_secret_access_key = s3_secret_key)"
]
},
{
"cell_type": "code",
"execution_count": 80,
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"#CHANGE WITH WHAT YOU ARE UPDATING \n",
"folder = \"user\"\n",
"files = []\n",
"\n",
"for key in s3.list_objects(Bucket=s3_bucket_name)['Contents']:\n",
" if key['Key'][:4] == folder:\n",
" files.append(key['Key'])\n",
" \n",
"obj = s3.get_object(Bucket=s3_bucket_name, Key = files[0]) \n",
"combined_emails = pd.read_csv(obj['Body'])\n",
" \n",
"for f in files[1:]:\n",
" temp = s3.get_object(Bucket=s3_bucket_name, Key = f) \n",
" n_df = pd.read_csv(temp['Body'])\n",
" combined_emails = pd.concat([combined_emails, n_df])\n",
" \n",
" \n",
"combined_emails.sort_values(by= 'datetime', inplace = True)\n",
"combined_emails.drop('Unnamed: 0',axis=1, inplace=True )\n"
]
},
{
"cell_type": "code",
"execution_count": 82,
"metadata": {},
"outputs": [],
"source": [
"#update complete csv \n",
"combined_emails.to_csv('../data/interim/temp_complete.csv')\n",
"s3_location = folder + \"/\" + folder + \"_complete.csv\"\n",
"s3.upload_file(Filename='../data/interim/temp_complete.csv',Bucket=s3_bucket_name, Key=s3_location)"
]
},
{
"cell_type": "code",
"execution_count": 78,
"metadata": {},
"outputs": [],
"source": [
"#remove merged files\n",
"for k in files:\n",
" if k != s3_location:\n",
" s3.delete_object(Bucket=s3_bucket_name, Key=k)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"To check what items are in the bucket"
]
},
{
"cell_type": "code",
"execution_count": 79,
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"devl/devl_complete.csv\n",
"user/2004_3-2005_1.csv\n",
"user/2005_2-2006_1.csv\n",
"user/2006_02-2006_05.csv\n",
"user/2006_06-2006_07.csv\n",
"user/2006_07-2006_08.csv\n",
"user/2006_09-2007_01.csv\n",
"user/2007_01-2007_06.csv\n",
"user/2007_07-2008_01.csv\n",
"user/2008_01-2009_01.csv\n",
"user/2009_01-2009_06.csv\n",
"user/2009_07-2010_01.csv\n",
"user/2010_01-2010_01.csv\n",
"user/2010_01-2011_01.csv\n",
"user/2011_01-2012_01.csv\n",
"user/2012_01-2013_01.csv\n",
"user/2013_01-2014_01.csv\n",
"user/2014_01-2015_01.csv\n",
"user/2016_01-2017_01.csv\n",
"user/2017_01-2018_01.csv\n",
"user/2018_01-2019_01.csv\n",
"user/2019_01-2020_01.csv\n",
"user/2020_01-2021_01.csv\n",
"user/user_complete.csv\n"
]
}
],
"source": [
"for key in s3.list_objects(Bucket=s3_bucket_name)['Contents']:\n",
" print(key['Key'])"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "mailing",
"language": "python",
"name": "mailing"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Loading