Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initial functionality to ppe #1

Merged
merged 17 commits into from
Dec 1, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 49 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# .github/workflows/test.yml
name: Run tests

on:
pull_request:
branches:
- main
- ppe

jobs:
test:
runs-on: ubuntu-latest
env:
DB_HOST: ${{ secrets.DB_HOST }}
DB_USER: ${{ secrets.DB_USER }}
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
DB_NAME: ${{ secrets.DB_NAME }}
DB_PORT: ${{ secrets.DB_PORT }}
DB_VIEW_USER: ${{ secrets.DB_VIEW_USER }}
DB_VIEW_USER_PASSWORD: ${{ secrets.DB_VIEW_USER_PASSWORD }}
steps:
- name: Checkout code
uses: actions/checkout@v3

- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.12

- name: Install dependencies
run: |
python -m pip install --upgrade pip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi

- name: Set up Docker Compose
run: |
sudo apt-get update
sudo apt-get install -y docker-compose
docker-compose up -d
#output the postgres db host name
docker ps
sleep 10
docker exec kasa_carbon_db_1 psql -U ${{ secrets.DB_USER }} -c "\l"

- name: Run tests
env:
PYTHONPATH: ${{ github.workspace }}
EM_CACHE_EXPIRY_MINS: 30
run: pytest -m "not real_device and not real_em_api"
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -158,3 +158,9 @@ cython_debug/
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/

#powerbi files
*.pbix

#project specific ignores
em_cache.json
7 changes: 7 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"python.testing.pytestArgs": [
"tests"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true
}
4 changes: 4 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Use an official Python runtime as a parent image
FROM python:3.12-slim-bookworm

RUN apt-get update && apt-get install -y software-properties-common postgresql postgresql-contrib
22 changes: 22 additions & 0 deletions config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import os
# Configuration for Kasa

# Configuration for Grafana
GRAFANA_URL = "your_grafana_url"
GRAFANA_API_KEY = "your_grafana_api_key"

# Configuration for General API
API_KEY = "your_api_key"
API_TYPE = "electricitymaps" # or "watttime"

# Update interval in seconds
UPDATE_INTERVAL_SEC = 15 # second interval to update

# database connection information
db_config = {
"user": os.getenv("DB_USER"),
"password": os.getenv("DB_PASSWORD"),
"database": os.getenv("DB_NAME"),
"host": os.getenv("DB_HOST"),
"port": os.getenv("DB_PORT"),
}
17 changes: 17 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
version: '3.1'

services:
db:
image: postgres
restart: always
environment:
POSTGRES_DB: ${DB_NAME}
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_USER: ${DB_USER}
VIEW_USER: ${DB_VIEW_USER}
VIEW_USER_PASSWORD: ${DB_VIEW_USER_PASSWORD}
volumes:
- ./sql:/docker-entrypoint-initdb.d
ports:
- ${DB_PORT}:5432

23 changes: 23 additions & 0 deletions interfaces/carbon_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
from abc import ABC, abstractmethod

class CarbonAPI(ABC):
@abstractmethod
async def get_co2_by_latlon(self, lat: float, lon: float) -> float:
"""
Get the current CO2 emission rate per kWh for a specific location, identified by latitude and longitude.

:param lat: The latitude of the location.
:param lon: The longitude of the location.
:return: The current CO2 emission rate per kWh.
"""
pass

@abstractmethod
async def get_co2_by_gridid(self, grid_id: str) -> float:
"""
Get the current CO2 emission rate per kWh for a specific location, identified by grid ID.

:param grid_id: The ID of the grid.
:return: The current CO2 emission rate per kWh.
"""
pass
31 changes: 31 additions & 0 deletions main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
import asyncio
import config
from modules import kasa_monitor, grafana_display, electricitymaps_api, watttime_api, database
from interfaces import carbon_api
import os
from dotenv import load_dotenv

async def main():
load_dotenv()
# Create instances of each module
kasa = kasa_monitor.KasaMonitor()
#grafana = grafana_display.GrafanaDisplay()
db = database.Database(config.db_config)

# Monitor energy use continuously in a subthread to avoid blocking the main thread
energy_use_task = asyncio.create_task(kasa.monitor_energy_use_continuously(db, delay=config.UPDATE_INTERVAL_SEC))

while True:
if energy_use_task.done():
print("The energy monitoring task has completed.")
else:
print("The energy monitoring task is still running.")

# view the current database values
print(await db.read_usage())

# Wait for next update
await asyncio.sleep(config.UPDATE_INTERVAL_SEC)

if __name__ == "__main__":
asyncio.run(main())
Empty file added modules/__init__.py
Empty file.
41 changes: 41 additions & 0 deletions modules/database.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import asyncio
import asyncpg

class Database:
def __init__(self, db_config):
self.db_config = db_config
self.conn = None

async def write_usage(self, energy_usage):
self.conn = await asyncpg.connect(**self.db_config)
try:
sql_query = self._generate_insert_sql_query(energy_usage)
await self.conn.execute(sql_query, *energy_usage.values())
finally:
await self.close()

async def read_usage(self, columns="*"):
self.conn = await asyncpg.connect(**self.db_config)
try:
sql_query = self._generate_select_sql_query(columns)
result = await self.conn.fetch(sql_query)
finally:
await self.close()
return result

async def close(self):
if self.conn is not None and not self.conn.is_closed():
await self.conn.close()
self.conn = None

def _generate_insert_sql_query(self, energy_usage):
columns = ', '.join(energy_usage.keys())
values = ', '.join(['$' + str(i) for i in range(1, len(energy_usage) + 1)])
return f"INSERT INTO energy_usage ({columns}) VALUES ({values})"

def _generate_select_sql_query(self, columns="*"):
if columns == "*":
columns_str = "*"
else:
columns_str = ', '.join(columns)
return f"SELECT {columns_str} FROM energy_usage"
97 changes: 97 additions & 0 deletions modules/electricitymaps_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@

import aiohttp
from interfaces.carbon_api import CarbonAPI
import os
from datetime import datetime, timezone, timedelta
import json

class ElectricityMapAPI(CarbonAPI):
BASE_URL = "https://api-access.electricitymaps.com/free-tier/carbon-intensity/latest"
CACHE_FILE = "em_cache.json"

def __init__(self, co2_time_threshold_mins=120, clear_cache=False):
self.api_key = os.getenv("EM_API_KEY")
self.co2_time_threshold_mins = co2_time_threshold_mins
self.CACHE_EXPIRY = timedelta(int(os.getenv("EM_CACHE_EXPIRY_MINS")))
if clear_cache:
self._clear_cache()
else:
self._load_cache()

def _load_cache(self):
if os.path.exists(self.CACHE_FILE):
with open(self.CACHE_FILE, 'r') as f:
cache = json.load(f)
# Convert the keys from strings to dictionaries and the timestamps from strings to datetime objects
self.cache = {key: [value[0], datetime.fromisoformat(value[1])] for key, value in cache.items()}
else:
self.cache = {}

def _save_cache(self):
# Convert the timestamps from datetime objects to strings
cache = {key: [value[0], value[1].isoformat()] for key, value in self.cache.items()}
with open(self.CACHE_FILE, 'w') as f:
json.dump(cache, f)

def _clear_cache(self):
self.cache = {}
self._save_cache()

async def get_co2_by_gridid(self, grid_id: str) -> float:
'''
Get the current CO2 emission rate per kWh for a specific location, identified by grid ID.
Will use a local cache to store the data for 30 minutes to ease the load on the API.

Args:
grid_id (str): The grid ID of the specific location.

Returns:
float: The CO2 emission rate in gCO2/kWh if data is available and less than 120 minutes old, otherwise returns None.
'''
params = {"zone": grid_id}
return await self._get_co2_data(params)

async def get_co2_by_latlon(self, lat: float, lon: float) -> float:
'''
Get the current CO2 emission rate per kWh for a specific location, identified by latitude and longitude.
Will use a local cache to store the data for 30 minutes to ease the load on the API.

Args:
lat (float): The latitude of the location.
lon (float): The longitude of the location.

Returns:
float: The CO2 emission rate in gCO2/kWh if data is available and less than 120 minutes old, otherwise returns None.
'''
params = {"lat": lat, "lon": lon}
return await self._get_co2_data(params)

async def _get_co2_data(self, params) -> float:
# Check if the data is in the cache and is less than 30 minutes old
# Convert the params dictionary to a string to use as a key
cache_key = '_'.join(str(v) for v in params.values())
if cache_key in self.cache and datetime.now(timezone.utc) - self.cache[cache_key][1] < self.CACHE_EXPIRY:
return self.cache[cache_key][0]

headers = {"auth-token": self.api_key}
co2_data = None
async with aiohttp.ClientSession() as session:
async with session.get(self.BASE_URL, headers=headers, params=params) as response:
data = await response.json()
co2_data = self._process_co2_data(data)

# Store the data and the current time in the cache
self.cache[cache_key] = (co2_data, datetime.now(timezone.utc))
self._save_cache() # Save the cache to the file

return co2_data

def _process_co2_data(self, data):
data_updated_at = datetime.strptime(data["updatedAt"], '%Y-%m-%dT%H:%M:%S.%fZ').replace(tzinfo=timezone.utc)
#verify its within the last minutes defined by co2_time_threshold
if abs((data_updated_at - datetime.now(timezone.utc)).total_seconds()) / 60 > self.co2_time_threshold_mins:
return None
return float(data["carbonIntensity"])



Empty file added modules/grafana_display.py
Empty file.
Loading
Loading