diff --git a/Bootstrapper/IAS_G4.zip b/Bootstrapper/IAS_G4.zip
new file mode 100644
index 0000000..c86f024
Binary files /dev/null and b/Bootstrapper/IAS_G4.zip differ
diff --git a/Bootstrapper/IAS_G4/ActionManager.sh b/Bootstrapper/IAS_G4/ActionManager.sh
new file mode 100644
index 0000000..88fa617
--- /dev/null
+++ b/Bootstrapper/IAS_G4/ActionManager.sh
@@ -0,0 +1,12 @@
+printf "\n\n"
+printf \"********************************************************\"; \
+echo ---------------------- ActionManager -------------------------; \
+printf \"********************************************************\"; \
+echo "Deploying ActionManager.........."
+printf "\n\n"
+
+path="./ActionManager"
+cd ${path}
+echo $1 | sudo -S docker build . -t actionmanager:latest;
+echo $1 | sudo -S docker run actionmanager
+
diff --git a/Bootstrapper/IAS_G4/ApplicationManager.sh b/Bootstrapper/IAS_G4/ApplicationManager.sh
new file mode 100644
index 0000000..7b4c09f
--- /dev/null
+++ b/Bootstrapper/IAS_G4/ApplicationManager.sh
@@ -0,0 +1,12 @@
+printf "\n\n"
+printf \"********************************************************\"; \
+echo ---------------------- ApplicationManager -------------------------; \
+printf \"********************************************************\"; \
+echo "Deploying ApplicationManager.........."
+printf "\n\n"
+
+path="./ApplicationManager"
+cd ${path}
+echo $1 | sudo -S docker build . -t applicationmanager:latest;
+echo $1 | sudo -S docker run applicationmanager
+
diff --git a/Bootstrapper/IAS_G4/Authentication.sh b/Bootstrapper/IAS_G4/Authentication.sh
new file mode 100644
index 0000000..5529f0c
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication.sh
@@ -0,0 +1,12 @@
+printf "\n\n"
+printf \"********************************************************\n\"; \
+echo ---------------------- Authentication -------------------------; \
+printf \"********************************************************\"; \
+echo "Deploying Authentication.........."
+printf "\n\n"
+
+path="./Authentication"
+cd ${path}
+echo $1 | sudo -S docker build . -t aut:latest;
+echo $1 | sudo -S docker run aut
+
diff --git a/Bootstrapper/IAS_G4/Authentication/Dockerfile b/Bootstrapper/IAS_G4/Authentication/Dockerfile
new file mode 100644
index 0000000..4349d49
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/Dockerfile
@@ -0,0 +1,8 @@
+FROM alpine:latest
+
+RUN apk update && apk add python3 py3-pip
+# COPY requirements.txt ./ActionManager/requirements.txt
+ADD . ./Authentication
+RUN cd Authentication && pip install -r /Authentication/requirements.txt
+
+ENTRYPOINT python3 -u /Authentication/app.py
diff --git a/Bootstrapper/IAS_G4/Authentication/__pycache__/authentication.cpython-310.pyc b/Bootstrapper/IAS_G4/Authentication/__pycache__/authentication.cpython-310.pyc
new file mode 100644
index 0000000..edee3db
Binary files /dev/null and b/Bootstrapper/IAS_G4/Authentication/__pycache__/authentication.cpython-310.pyc differ
diff --git a/Bootstrapper/IAS_G4/Authentication/__pycache__/developer.cpython-310.pyc b/Bootstrapper/IAS_G4/Authentication/__pycache__/developer.cpython-310.pyc
new file mode 100644
index 0000000..18837c1
Binary files /dev/null and b/Bootstrapper/IAS_G4/Authentication/__pycache__/developer.cpython-310.pyc differ
diff --git a/Bootstrapper/IAS_G4/Authentication/__pycache__/developerHome.cpython-310.pyc b/Bootstrapper/IAS_G4/Authentication/__pycache__/developerHome.cpython-310.pyc
new file mode 100644
index 0000000..aad3ce0
Binary files /dev/null and b/Bootstrapper/IAS_G4/Authentication/__pycache__/developerHome.cpython-310.pyc differ
diff --git a/Bootstrapper/IAS_G4/Authentication/__pycache__/platformAdmin.cpython-310.pyc b/Bootstrapper/IAS_G4/Authentication/__pycache__/platformAdmin.cpython-310.pyc
new file mode 100644
index 0000000..76ddf5b
Binary files /dev/null and b/Bootstrapper/IAS_G4/Authentication/__pycache__/platformAdmin.cpython-310.pyc differ
diff --git a/Bootstrapper/IAS_G4/Authentication/__pycache__/settings.cpython-310.pyc b/Bootstrapper/IAS_G4/Authentication/__pycache__/settings.cpython-310.pyc
new file mode 100644
index 0000000..95966b5
Binary files /dev/null and b/Bootstrapper/IAS_G4/Authentication/__pycache__/settings.cpython-310.pyc differ
diff --git a/Bootstrapper/IAS_G4/Authentication/app.py b/Bootstrapper/IAS_G4/Authentication/app.py
new file mode 100644
index 0000000..cdad130
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/app.py
@@ -0,0 +1,48 @@
+from flask import render_template, request, redirect, url_for, flash, session
+from views.global_ldap_authentication import *
+from forms.LoginForm import *
+from flask_cors import CORS, cross_origin
+from flask_session import Session
+import developer
+import authentication
+import platformAdmin
+
+
+app.config["SESSION_PERMANENT"] = False
+app.config["SESSION_TYPE"] = "filesystem"
+Session(app)
+
+# app.secret_key = "testing"
+
+# client = pymongo.MongoClient(host="localhost", port=27017)
+
+app.register_blueprint(developer.developer)
+app.register_blueprint(authentication.authentication)
+app.register_blueprint(platformAdmin.platformAdmin)
+
+@app.route('/')
+def home():
+ if not session.get("name"):
+ return redirect("/login")
+ if session.get("name") == "developer":
+ return redirect("/developerHome")
+ if session.get("name") == "admin":
+ return redirect("/adminHome")
+
+ return "HOME"
+
+
+
+# @app.route('/upload', methods=['GET', 'POST'])
+# def upload_file():
+# if request.method == 'POST':
+
+
+
+
+
+if __name__ == '__main__':
+ app.run(host="0.0.0.0",debug=True,port=5005)
+
+
+import developer
diff --git a/Bootstrapper/IAS_G4/Authentication/authentication.py b/Bootstrapper/IAS_G4/Authentication/authentication.py
new file mode 100644
index 0000000..f265a28
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/authentication.py
@@ -0,0 +1,39 @@
+from flask import render_template, request, redirect, url_for, flash, session,Blueprint
+from views.global_ldap_authentication import *
+from forms.LoginForm import *
+from flask_cors import CORS, cross_origin
+from flask_session import Session
+
+authentication = Blueprint("authentication", __name__)
+
+
+@authentication.route('/login', methods=['GET','POST'])
+def index():
+
+ # initiate the form..
+ form = LoginValidation()
+
+ if request.method in ('POST') :
+ login_id = request.form['user_name_pid']
+ login_password = request.form['user_pid_Password']
+
+ # create a directory to hold the Logs
+ login_msg = global_ldap_authentication(login_id, login_password)
+
+ # validate the connection
+ if login_msg == "Success":
+ success_message = f"*** Authentication Success "
+ session["name"] = request.form.get("user_name_pid")
+ return redirect("/")
+
+ else:
+ error_message = f"*** Authentication Failed - {login_msg}"
+ return render_template("error.html", error_message=str(error_message))
+
+ return render_template('login.html', form=form)
+
+@authentication.route("/logout")
+def logout():
+ session["name"] = None
+ return redirect("/")
+
diff --git a/Bootstrapper/IAS_G4/Authentication/developer.py b/Bootstrapper/IAS_G4/Authentication/developer.py
new file mode 100644
index 0000000..017e4cc
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/developer.py
@@ -0,0 +1,10 @@
+from flask import render_template, request, redirect, url_for, flash, session, Blueprint
+
+developer = Blueprint("developer", __name__)
+
+
+@developer.route('/developerHome', methods=['GET','POST'])
+def developerHome():
+ # if not session.get("name"):
+ # return redirect("/login")
+ return render_template('developerHome.html')
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/flask_session/2029240f6d1128be89ddc32729463129 b/Bootstrapper/IAS_G4/Authentication/flask_session/2029240f6d1128be89ddc32729463129
new file mode 100644
index 0000000..60b84f8
Binary files /dev/null and b/Bootstrapper/IAS_G4/Authentication/flask_session/2029240f6d1128be89ddc32729463129 differ
diff --git a/Bootstrapper/IAS_G4/Authentication/forms/LoginForm.py b/Bootstrapper/IAS_G4/Authentication/forms/LoginForm.py
new file mode 100644
index 0000000..2f85c28
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/forms/LoginForm.py
@@ -0,0 +1,11 @@
+from main import app
+from flask_wtf import Form
+from wtforms import StringField, PasswordField, validators
+
+
+class LoginValidation(Form):
+ user_name_pid = StringField('', [validators.DataRequired()],
+ render_kw={'autofocus': True, 'placeholder': 'Enter User'})
+
+ user_pid_Password = PasswordField('', [validators.DataRequired()],
+ render_kw={'autofocus': True, 'placeholder': 'Enter your login Password'})
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/forms/__pycache__/LoginForm.cpython-310.pyc b/Bootstrapper/IAS_G4/Authentication/forms/__pycache__/LoginForm.cpython-310.pyc
new file mode 100644
index 0000000..018d2c3
Binary files /dev/null and b/Bootstrapper/IAS_G4/Authentication/forms/__pycache__/LoginForm.cpython-310.pyc differ
diff --git a/Bootstrapper/IAS_G4/Authentication/main/__init__.py b/Bootstrapper/IAS_G4/Authentication/main/__init__.py
new file mode 100644
index 0000000..6477707
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/main/__init__.py
@@ -0,0 +1,44 @@
+import os
+
+from flask import Flask
+from flask_bootstrap import Bootstrap
+from flask_wtf.csrf import CSRFProtect
+
+# import redis
+# from flask_session import Session
+
+# -- ---------------------------------------------------------------------------------
+# -- Script Name : Ldap Authentication with FLask
+# -- Author : *******
+# -- Date : *******
+# -- ---------------------------------------------------------------------------------
+# -- Description : Authenticate users with Flask
+# -- ---------------------------------------------------------------------------------
+# -- Version History
+# -- ===============
+# --
+# -- Who version Date Description. 3
+# -- === ======= ====== ======================
+# -- XXXXXXXX 1.0 Jan 21 Initial Version.
+# -- ---------------------------------------------------------------------------------
+
+
+current_path = os.environ['PATH']
+print(current_path)
+
+# -- ---------------------------------------------------------------------------------
+# -- Function : Initiate the App C:\Users\p784138\AWS\Non-Prod\
+# -- ---------------------------------------------------------------------------------
+app = Flask(__name__,
+ template_folder="../templates",static_folder="../static"
+ )
+
+bootstrap = Bootstrap(app)
+app.config.from_object('settings')
+app.secret_key = os.urandom(24)
+app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
+
+
+print('Inside __init__py')
+
+from main import app
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/main/__pycache__/__init__.cpython-310.pyc b/Bootstrapper/IAS_G4/Authentication/main/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000..fb1cbaf
Binary files /dev/null and b/Bootstrapper/IAS_G4/Authentication/main/__pycache__/__init__.cpython-310.pyc differ
diff --git a/Bootstrapper/IAS_G4/Authentication/manage.py b/Bootstrapper/IAS_G4/Authentication/manage.py
new file mode 100644
index 0000000..2cddc34
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/manage.py
@@ -0,0 +1,15 @@
+from flask_script import Server, Manager
+import os
+from main import app
+
+manager = Manager(app)
+
+manager.add_command("runserver", Server(
+ use_debugger=True,
+ use_reloader=True,
+ host='localhost',
+ port=5005
+))
+
+if __name__ == '__main__':
+ manager.run()
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/platformAdmin.py b/Bootstrapper/IAS_G4/Authentication/platformAdmin.py
new file mode 100644
index 0000000..d4dc262
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/platformAdmin.py
@@ -0,0 +1,10 @@
+from flask import render_template, request, redirect, url_for, flash, session, Blueprint
+
+platformAdmin = Blueprint("platformAdmin", __name__)
+
+
+@platformAdmin.route('/platformAdminHome', methods=['GET','POST'])
+def platformAdminHome():
+ # if not session.get("name"):
+ # return redirect("/login")
+ return render_template('platformAdminHome.html')
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/requirements.txt b/Bootstrapper/IAS_G4/Authentication/requirements.txt
new file mode 100644
index 0000000..d2ff916
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/requirements.txt
@@ -0,0 +1,8 @@
+Flask==2.2.3
+Flask-Script==2.0.6
+Flask-Cors==3.0.10
+Flask-Session==0.4.0
+ldap3==2.9.1
+Flask-Bootstrap==3.3.7.1
+Flask-WTF==1.1.1
+WTForms==3.0.1
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/settings.py b/Bootstrapper/IAS_G4/Authentication/settings.py
new file mode 100644
index 0000000..e69de29
diff --git a/Bootstrapper/IAS_G4/Authentication/static/developerHome.css b/Bootstrapper/IAS_G4/Authentication/static/developerHome.css
new file mode 100644
index 0000000..c8ed831
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/static/developerHome.css
@@ -0,0 +1,132 @@
+body {
+ background-color: darkslategray;
+ padding: 0;
+ display: flex;
+ justify-content: center;
+ font-family: 'Roboto', sans-serif;
+}
+.container {
+ margin:0;
+ width: 100%;
+}
+.card2 {
+ margin: 0;
+ margin-top: 1%;
+ /* min-width: 100em; */
+ display: flex;
+ flex-direction: row;
+}
+
+.card21 {
+ display: flex;
+ flex-direction: row;
+ justify-content: left;
+ width: 50%;
+ padding: 5px;
+}
+
+.card22 {
+ margin-right: auto;
+ display: flex;
+ width: 50%;
+ justify-content: right;
+ padding: 5px;
+}
+.card3 {
+ max-width: 28%;
+ display: flex;
+ padding-top: 5%;
+}
+.card4 {
+ max-width: 71%;
+ margin-left: 1%;
+ padding-top: 2%;
+}
+.card5 {
+ max-width: 100%;
+ padding-top: 2%;
+}
+.row {
+ display: flex;
+ flex-direction: row;
+ padding: 12px;
+ min-height: 45vh;
+}
+.row1 {
+ display: flex;
+ flex-direction: row;
+ padding: 12px;
+ min-height: 40vh;
+}
+
+.uploadHeading{
+ font-weight: bold;
+ font-size: 30px;
+ display: flex;
+ justify-content: center;
+}
+div.box2 {
+ height: 200px;
+ display: flex;
+ flex-direction: column;
+ overflow: auto;
+ }
+ .box2::-webkit-scrollbar {
+ display: none;
+ }
+.permSensorHeading {
+ font-weight: bold;
+ font-size: 30px;
+ display: flex;
+ justify-content: center;
+ margin-bottom: 10px;
+}
+
+.uploadbtndiv {
+ display: flex;
+ justify-content: center;
+ padding: 30px;
+}
+
+.buttonUpload {
+ background-color: darkslategray;
+ color: white;
+ width: 250px;
+ display: flex;
+ justify-content: center;
+ font-weight: bold;
+ border-radius: 5px;
+ height: 70px;
+ padding-top: 3%;
+ font-size: 20px;
+ transition: 0.5s;
+}
+.buttonUpload:hover {
+ background-color: white;
+ color: darkslategray;
+ height: 75px;
+ width: 260px;
+ transition: 0.5s;
+}
+
+.btn
+{
+ background-color: darkslategray;
+ color: white;
+ width: 100%;
+}
+.heading {
+ display: flex;
+ justify-content: center;
+}
+.card1 {
+ margin-top: 10%;
+}
+
+.welcomeheading{
+ font-weight: bold;
+ display: flex;
+ justify-content: center;
+ margin: 0;
+
+}
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/static/platformAdmin.css b/Bootstrapper/IAS_G4/Authentication/static/platformAdmin.css
new file mode 100644
index 0000000..1cf361a
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/static/platformAdmin.css
@@ -0,0 +1,158 @@
+body {
+ background-color: darkslategray;
+ padding: 0;
+ display: flex;
+ justify-content: center;
+ font-family: 'Roboto', sans-serif;
+}
+.container {
+ margin:0;
+ width: 100%;
+}
+.card2 {
+
+ /* min-width: 100em; */
+ margin: 0;
+ margin-top: 1%;
+ display: flex;
+ flex-direction: row;
+ /* justify-content: center; */
+}
+.card21 {
+ display: flex;
+ flex-direction: row;
+ justify-content: left;
+ width: 50%;
+ padding: 5px;
+}
+
+.card22 {
+ margin-right: auto;
+ display: flex;
+ width: 50%;
+ justify-content: right;
+ padding: 5px;
+}
+.startstopbtn {
+ margin-right: auto;
+ display: flex;
+ width: 80%;
+ justify-content: right;
+ padding: 5px;
+}
+.sensorName {
+ width: 20%;
+}
+
+.card4 {
+ max-width: 100%;
+ padding-top: 2%;
+}
+.card5 {
+ max-width: 100%;
+ padding-top: 2%;
+}
+.row {
+ display: flex;
+ flex-direction: row;
+ padding: 12px;
+ min-height: 45vh;
+}
+.row1 {
+ display: flex;
+ flex-direction: row;
+ padding: 12px;
+ min-height: 40vh;
+}
+
+.uploadHeading{
+ font-weight: bold;
+ font-size: 30px;
+ display: flex;
+ justify-content: center;
+}
+div.box2 {
+ height: 200px;
+ display: flex;
+ flex-direction: column;
+ overflow: auto;
+ }
+ .box2::-webkit-scrollbar {
+ display: none;
+ }
+.permSensorHeading {
+ font-weight: bold;
+ font-size: 30px;
+ display: flex;
+ justify-content: center;
+ margin-bottom: 10px;
+}
+
+.uploadbtndiv {
+ display: flex;
+ justify-content: center;
+ padding: 30px;
+}
+
+.cardHeader {
+ display: flex;
+}
+
+.buttonUpload {
+ background-color: darkslategray;
+ color: white;
+ width: 250px;
+ display: flex;
+ justify-content: center;
+ font-weight: bold;
+ border-radius: 5px;
+ height: 70px;
+ padding-top: 3%;
+ font-size: 20px;
+ transition: 0.5s;
+}
+.buttonUpload:hover {
+ background-color: white;
+ color: darkslategray;
+ height: 75px;
+ width: 260px;
+ transition: 0.5s;
+}
+
+.btn1
+{
+ background-color: darkslategray;
+ color: white;
+ width: 100%;
+}
+
+.startbtn
+{
+ background-color: darkgreen;
+ color: white;
+ width: 100%;
+}
+.stopbtn
+{
+ background-color: red;
+ color: white;
+ width: 100%;
+}
+.heading {
+ display: flex;
+ justify-content: center;
+}
+.card1 {
+ margin-top: 10%;
+}
+
+.welcomeheading{
+ font-weight: bold;
+ display: flex;
+ justify-content: center;
+ margin: 0;
+}
+
+.btn2 {
+ margin-right: 5px;
+}
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/static/style.css b/Bootstrapper/IAS_G4/Authentication/static/style.css
new file mode 100644
index 0000000..da31e18
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/static/style.css
@@ -0,0 +1,20 @@
+body {
+ background-color: darkslategray;
+
+}
+.card {
+ margin-top: 20%;
+}
+.btn
+{
+ background-color: darkslategray;
+ color: white;
+ width: 100%;
+}
+.heading {
+ display: flex;
+ justify-content: center;
+}
+.card1 {
+ margin-top: 10%;
+}
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/templates/developerHome.html b/Bootstrapper/IAS_G4/Authentication/templates/developerHome.html
new file mode 100644
index 0000000..2a22e5e
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/templates/developerHome.html
@@ -0,0 +1,124 @@
+
+
+
+
+ HackaToons IOT Platform
+
+
+
+
+
+
+
+
+
+
+
+
+
Welcome Platform Admin
+
+
+
+
+
+
+
Upload Zip File
+
+
+
+
+
+
+
Sensors with Permission
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/templates/error.html b/Bootstrapper/IAS_G4/Authentication/templates/error.html
new file mode 100644
index 0000000..9cb05e7
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/templates/error.html
@@ -0,0 +1,39 @@
+
+
+
+
+
+
+
+
+
+
+ Flask LDAP3 Authentication Example
+
+
+
+
+
+
+
+
+
Authentication Failed
+
Well! You couldn't pass through the authentication,
+
+
Authentication failed due to {{ error_message }}
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/templates/login.html b/Bootstrapper/IAS_G4/Authentication/templates/login.html
new file mode 100644
index 0000000..83efbce
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/templates/login.html
@@ -0,0 +1,38 @@
+
+
+
+
+ HackaToons IOT Platform
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/templates/platformAdminHome.html b/Bootstrapper/IAS_G4/Authentication/templates/platformAdminHome.html
new file mode 100644
index 0000000..29e6efc
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/templates/platformAdminHome.html
@@ -0,0 +1,137 @@
+
+
+
+
+ HackaToons IOT Platform
+
+
+
+
+
+
+
+
+
+
+
+
+
Welcome Platform Admin
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/templates/success.html b/Bootstrapper/IAS_G4/Authentication/templates/success.html
new file mode 100644
index 0000000..22e1463
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/templates/success.html
@@ -0,0 +1,36 @@
+
+
+
+
+
+
+
+
+
+
+ Flask LDAP3 Authentication Example
+
+
+
+
+
+
+
Authentication Success
+
Well! You have passed through the authentication.
+
+
Authentication Success - {{ success_message }}
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Authentication/views/__pycache__/global_ldap_authentication.cpython-310.pyc b/Bootstrapper/IAS_G4/Authentication/views/__pycache__/global_ldap_authentication.cpython-310.pyc
new file mode 100644
index 0000000..2cb4cac
Binary files /dev/null and b/Bootstrapper/IAS_G4/Authentication/views/__pycache__/global_ldap_authentication.cpython-310.pyc differ
diff --git a/Bootstrapper/IAS_G4/Authentication/views/global_ldap_authentication.py b/Bootstrapper/IAS_G4/Authentication/views/global_ldap_authentication.py
new file mode 100644
index 0000000..f3f5ef4
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Authentication/views/global_ldap_authentication.py
@@ -0,0 +1,43 @@
+from ldap3 import Server, Connection, ALL
+import ssl
+
+
+def global_ldap_authentication(user_name, user_pwd):
+ """
+ Function: global_ldap_authentication
+ Purpose: Make a connection to encrypted LDAP server.
+ :params: ** Mandatory Positional Parameters
+ 1. user_name - LDAP user Name
+ 2. user_pwd - LDAP User Password
+ :return: None
+ """
+
+ # fetch the username and password
+ ldap_user_name = user_name.strip()
+ ldap_user_pwd = user_pwd.strip()
+ # tls_configuration = Tls(validate=ssl.CERT_REQUIRED, version=ssl.PROTOCOL_TLSv1_2)
+ # ldap server hostname and port
+ ldsp_server = f"ldap://localhost:389"
+
+ # dn
+ root_dn = "dc=example,dc=org"
+
+ # user
+ user = f'cn={ldap_user_name},{root_dn}'
+
+ print("LDAP USER == ",user)
+ server = Server(ldsp_server,get_info=ALL)
+ print(f" *** Server \n{server}" )
+ connection = Connection(server,
+ user=user,
+ password=ldap_user_pwd,
+ )
+ print(f" *** Response from the ldap bind is \n{connection}" )
+ if not connection.bind():
+ print(f" *** Cannot bind to ldap server: {connection.last_error} ")
+ l_success_msg = f' ** Failed Authentication: {connection.last_error}'
+ else:
+ print(f" *** Successful bind to ldap server")
+ l_success_msg = 'Success'
+
+ return l_success_msg
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Deployer.sh b/Bootstrapper/IAS_G4/Deployer.sh
new file mode 100644
index 0000000..30fd3bd
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Deployer.sh
@@ -0,0 +1,12 @@
+printf "\n\n"
+printf \"********************************************************\"; \
+echo ---------------------- Deployer -------------------------; \
+printf \"********************************************************\"; \
+echo "Deploying Deployer.........."
+printf "\n\n"
+
+path="./Deployer"
+cd ${path}
+echo $1 | sudo -S docker build . -t deployer:latest;
+echo $1 | sudo -S docker run deployer
+
diff --git a/Bootstrapper/IAS_G4/FaultTolerance.sh b/Bootstrapper/IAS_G4/FaultTolerance.sh
new file mode 100644
index 0000000..d687b03
--- /dev/null
+++ b/Bootstrapper/IAS_G4/FaultTolerance.sh
@@ -0,0 +1,12 @@
+printf "\n\n"
+printf \"********************************************************\"; \
+echo ---------------------- FaultTolerance -------------------------; \
+printf \"********************************************************\"; \
+echo "Deploying FaultTolerance.........."
+printf "\n\n"
+
+path="./FaultTolerance"
+cd ${path}
+echo $1 | sudo -S docker build . -t faulttolerance:latest;
+echo $1 | sudo -S docker run faulttolerance
+
diff --git a/Bootstrapper/IAS_G4/Monitoring.sh b/Bootstrapper/IAS_G4/Monitoring.sh
new file mode 100644
index 0000000..2ee4bcf
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Monitoring.sh
@@ -0,0 +1,12 @@
+printf "\n\n"
+printf \"********************************************************\"; \
+echo ---------------------- Monitoring -------------------------; \
+printf \"********************************************************\"; \
+echo "Deploying Monitoring.........."
+printf "\n\n"
+
+path="./Monitoring"
+cd ${path}
+echo $1 | sudo -S docker build . -t monitoring:latest;
+echo $1 | sudo -S docker run monitoring
+
diff --git a/Bootstrapper/IAS_G4/NodeManager.sh b/Bootstrapper/IAS_G4/NodeManager.sh
new file mode 100644
index 0000000..a7fcc9d
--- /dev/null
+++ b/Bootstrapper/IAS_G4/NodeManager.sh
@@ -0,0 +1,12 @@
+printf "\n\n"
+printf \"********************************************************\"; \
+echo ---------------------- NodeManager -------------------------; \
+printf \"********************************************************\"; \
+echo "Deploying NodeManager.........."
+printf "\n\n"
+
+path="./NodeManager"
+cd ${path}
+echo $1 | sudo -S docker build . -t nodemanager:latest;
+echo $1 | sudo -S docker run nodemanager
+
diff --git a/Bootstrapper/IAS_G4/Scheduler.sh b/Bootstrapper/IAS_G4/Scheduler.sh
new file mode 100644
index 0000000..93f374e
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Scheduler.sh
@@ -0,0 +1,12 @@
+printf "\n\n"
+printf \"********************************************************\"; \
+echo ---------------------- Scheduler -------------------------; \
+printf \"********************************************************\"; \
+echo "Deploying Scheduler.........."
+printf "\n\n"
+
+path="./Scheduler"
+cd ${path}
+echo $1 | sudo -S docker build . -t scheduler:latest;
+echo $1 | sudo -S docker run scheduler
+
diff --git a/Bootstrapper/IAS_G4/Scheduler/Dockerfile b/Bootstrapper/IAS_G4/Scheduler/Dockerfile
new file mode 100644
index 0000000..f2cbc35
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Scheduler/Dockerfile
@@ -0,0 +1,8 @@
+FROM alpine:latest
+
+RUN apk update && apk add python3 py3-pip curl unzip
+COPY requirements.txt ./home/requirements.txt
+ADD Scheduler.py ./home
+RUN cd home && pip3 install --no-cache-dir -r /home/requirements.txt
+
+ENTRYPOINT python3 -u /home/Scheduler.py
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Scheduler/Scheduler.py b/Bootstrapper/IAS_G4/Scheduler/Scheduler.py
new file mode 100644
index 0000000..6665c2b
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Scheduler/Scheduler.py
@@ -0,0 +1,354 @@
+import schedule
+import time
+import threading
+from flask import Flask,request,jsonify
+import random
+import json
+import requests
+import argparse
+from datetime import datetime
+import pickle
+app = Flask(__name__)
+
+service_life_cycle_ip = "10.2.132.235"
+service_life_cycle_port = 8888
+Myport = 5053
+
+schedules = []
+
+schedules_ = [{'service_id' : '1', 'request' : {"username": "A",
+ "application_id": "100",
+ "servicename": "XYZ",
+ "Repeat": "False",
+ "day": "sunday",
+ "Schedule_time":"14:10:00",
+ "Stop_time":"13:00:00",
+ "priority":0,
+ "period":"5"}},
+ {'service_id' : '2', 'request' : {"username": "B",
+ "application_id": "100",
+ "servicename": "XYZ",
+ "Repeat": "False",
+ "day": None,
+ "Schedule_time":"12:00:00",
+ "Stop_time":"12:00:20",
+ "priority":1,
+ "period":"2"}},
+ {'service_id' : '3', 'request' : {"username": "C",
+ "application_id": "100",
+ "servicename": "XYZ",
+ "Repeat": "False",
+ "day": None,
+ "Schedule_time":"14:14:00",
+ "Stop_time":"12:00:20",
+ "priority":0,
+ "period":"2"}},
+ {'service_id' : '5', 'request' : {"username": "D",
+ "application_id": "100",
+ "servicename": "XYZ",
+ "Repeat": "True",
+ "day": "sunday",
+ "Schedule_time":"14:23:00",
+ "Stop_time":"19:03:00",
+ "priority":0,
+ "period":"2",
+ "end":"saturday"}}]
+started = {'key' : 'Jagdish', 'name' : 'Jagdish Pathak',
+'age' : 50, 'pay' : 50000}
+
+# database
+db1 = {}
+db1['schedules'] = schedules
+db1['started'] = started
+
+# For storing
+b = pickle.dumps(db1)
+
+DUMPING_DELAY_IN_3_SECS = 1
+def time_add(time,minutes_to_add) :
+ hr = int(str(time).split(":")[0])
+ mn = int(str(time).split(":")[1])
+ mn = (mn+minutes_to_add)
+ hr = (hr + int(mn/60))%24
+ mn=mn%60
+ hr = str(hr)
+ mn = str(mn)
+ if(len(hr)==1):
+ hr="0"+hr
+ if(len(mn)==1):
+ mn="0"+mn
+ return hr+":"+mn
+
+class Scheduler:
+ def __init__(self) -> None:
+ self.job_dict = {}
+ self.main_service_id_dict={}
+ self.single_instances ={} #
+ self.started = {} #done
+ self.loop_schedules=[] #done
+ self.main_id_sch_id={}
+ pass
+
+ def pending_jobs(self):
+ while True:
+ schedule.run_pending()
+ time.sleep(10)
+ def send_request_to_service_life_cyle(self,username,application_id,servicename,service_instance_id,type_):
+ # print(username,application_id,servicename,service_instance_id,self.main_service_id_dict[service_instance_id])
+ response = {"username":username,"applicationname":application_id,"servicename":servicename,"serviceId":self.main_service_id_dict[service_instance_id]}
+ # print(response)
+ if type_=="start":
+ # print("start",response)
+ print('http://'+service_life_cycle_ip+':'+str(service_life_cycle_port)+'/test')
+ # res = requests.post('http://'+service_life_cycle_ip+':'+str(service_life_cycle_port)+'/test', json=json.dumps(response))
+ else:
+ print("stop",response)
+ # res = requests.post('http://'+service_life_cycle_ip+':'+str(service_life_cycle_port)+'/test', json=json.dumps(response))
+
+ def getInfo(self):
+ # dbfile = open("/home/sch_data.pickle","rb")
+ db = pickle.loads(b)
+ # print(db)
+ schedules_ = db["schedules"]
+ started = db["started"]
+ return schedules_ , started
+ pass
+ def run(self):
+ t1 = threading.Thread(target=self.pending_jobs)
+ t1.start()
+ def exit_service(self,service_instance_id):
+ service_instance_id,username,application_id,servicename = service_instance_id[0],service_instance_id[1],service_instance_id[2],service_instance_id[3]
+ print("+MSG TO SLCM TO STOP \t\t",service_instance_id)
+ #send request to service life cycle manager to cancel service
+ self.send_request_to_service_life_cyle(username,application_id,servicename,service_instance_id,"stop")
+ # print(self.started)
+ del self.started[service_instance_id]
+ print(self.job_dict[service_instance_id])
+ schedule.cancel_job(self.job_dict[service_instance_id])
+ # del self.job_dict[service_instance_id]
+ def exit_service_parent(self,job_id):
+ print("jj - ",job_id)
+ schedule.cancel_job(job_id)
+ return schedule.CancelJob
+ def run_service(self,service_detail):
+ username,application_id,servicename,end,service_instance_id = service_detail[0],service_detail[1],service_detail[2],service_detail[3],service_detail[4]
+ print("+MSG TO SLCM TO START \t\t",service_instance_id)
+ #send request to service life cycle manager to start service
+ self.send_request_to_service_life_cyle(username,application_id,servicename,service_instance_id,"start")
+ data = {
+ "service_id": service_instance_id,
+ "username":username,
+ "application_id":application_id,
+ "servicename":servicename,
+ "end":end
+ }
+ self.started[service_instance_id]=data
+ job_id = schedule.every().day.at(end).do(self.exit_service,((service_instance_id,username,application_id,servicename)))
+ self.job_dict[service_instance_id]=job_id
+
+ def run_service_period(self,service_detail):
+ username,application_id,servicename,end,service_instance_id = service_detail[0],service_detail[1],service_detail[2],service_detail[3],service_detail[4]
+ print("+MSG TO SLCM TO START \t\t",service_instance_id)
+ #send request to service life cycle manager to start service
+ self.send_request_to_service_life_cyle(username,application_id,servicename,service_instance_id,"start")
+
+ now = datetime.now()
+ current_time = now.strftime("%H:%M")
+ Stop_time = time_add(current_time,int(end))
+
+ data = {
+ "service_id": service_instance_id,
+ "username":username,
+ "application_id":application_id,
+ "servicename":servicename,
+ "end":Stop_time
+ }
+ self.started[service_instance_id]=data
+
+ job_id = schedule.every().day.at(Stop_time).do(self.exit_service,((service_instance_id,username,application_id,servicename)))
+ self.job_dict[service_instance_id]=job_id
+
+ def run_service1(self,service_detail):
+ username,application_id,servicename,end,service_instance_id = service_detail[0],service_detail[1],service_detail[2],service_detail[3],service_detail[4]
+ print("+MSG TO SLCM TO START \t\t",service_instance_id)
+ #send request to service life cycle manager to start service
+ self.send_request_to_service_life_cyle(username,application_id,servicename,service_instance_id,"start")
+ data = {
+ "service_id": service_instance_id,
+ "username":username,
+ "application_id":application_id,
+ "servicename":servicename,
+ "end":end
+ }
+ self.started[service_instance_id]=data
+ if(service_instance_id in self.single_instances.keys()):
+ del self.single_instances[service_instance_id]
+ job_id = schedule.every().day.at(end).do(self.exit_service,((service_instance_id,username,application_id,servicename)))
+ try:
+ if(self.job_dict[service_instance_id]):
+ # print("here")
+ schedule.cancel_job(self.job_dict[service_instance_id])
+ except:
+ pass
+ self.job_dict[service_instance_id]=job_id
+ pass
+
+ def run_service2(self,Schedule_time,day,service_detail):
+ service_instance_id = service_detail[4]
+ self.single_instances[service_instance_id]=request_
+ job_id = None
+ if(day=="monday"):
+ job_id = schedule.every().monday.at(Schedule_time).do( self.run_service1,(service_detail))
+ elif(day=="tuesday"):
+ job_id = schedule.every().tuesday.at(Schedule_time).do( self.run_service1,(service_detail))
+ elif(day=="wednesday"):
+ job_id = schedule.every().wednesday.at(Schedule_time).do( self.run_service1,(service_detail))
+ elif(day=="thursday"):
+ job_id = schedule.every().thursday.at(Schedule_time).do( self.run_service1,(service_detail))
+ elif(day=="friday"):
+ job_id = schedule.every().friday.at(Schedule_time).do( self.run_service1,(service_detail))
+ elif(day=="saturday"):
+ job_id = schedule.every().saturday.at(Schedule_time).do( self.run_service1,(service_detail))
+ else:
+ job_id = schedule.every().sunday.at(Schedule_time).do( self.run_service1,(service_detail))
+ self.job_dict[service_instance_id]=job_id
+ pass
+
+ def run_service3(self,Schedule_time,day,service_detail):
+ service_instance_id = service_detail[4]
+ self.single_instances[service_instance_id]=request_
+ job_id = schedule.every().day.at(Schedule_time).do( self.run_service1,(service_detail))
+ self.job_dict[service_instance_id]=job_id
+ pass
+
+ def run_service4(self,period,end_time,service_detail):
+ service_instance_id = service_detail[4]
+ self.loop_schedules.append({"service_id":service_instance_id,"request": request_})
+ interval = int(period)
+ end = end_time
+ # print(interval)
+ job_id = schedule.every(interval).seconds.do( self.run_service_period,(service_detail))
+ self.job_dict[service_instance_id]=job_id
+ pass
+
+ def run_service5(self,Schedule_time,day,service_detail):
+ service_instance_id = service_detail[4]
+ self.loop_schedules.append({"service_id":service_instance_id,"request": request_})
+ if(day=="monday"):
+ job_id = schedule.every().monday.at(Schedule_time).do( self.run_service,(service_detail))
+ elif(day=="tuesday"):
+ job_id = schedule.every().tuesday.at(Schedule_time).do( self.run_service,(service_detail))
+ elif(day=="wednesday"):
+ job_id = schedule.every().wednesday.at(Schedule_time).do( self.run_service,(service_detail))
+ elif(day=="thursday"):
+ job_id = schedule.every().thursday.at(Schedule_time).do( self.run_service,(service_detail))
+ elif(day=="friday"):
+ job_id = schedule.every().friday.at(Schedule_time).do( self.run_service,(service_detail))
+ elif(day=="saturday"):
+ job_id = schedule.every().saturday.at(Schedule_time).do( self.run_service,(service_detail))
+ else:
+ job_id = schedule.every(40).seconds.do( self.run_service,(service_detail))
+ print("jj1 - ",job_id)
+ job_id1 = schedule.every().day.at('12:10').do(self.exit_service_parent,(job_id))
+ pass
+ def StartSchedulling(self,request_,s_id=None):
+ username = request_["username"]
+ application_id = request_["application_id"]
+ servicename = request_["servicename"]
+ repeat = request_["Repeat"]
+ day = request_["day"]
+ Schedule_time = request_["Schedule_time"]
+ end = request_["Stop_time"]
+ period = request_["period"]
+ priority = request_["priority"]
+ main_service_id = username+"_"+application_id+"_"+servicename
+
+ service_instance_id = s_id
+
+ if service_instance_id is None:
+ service_instance_id=username+"_"+application_id+"_"+servicename+"_"+str(random.randrange(10000))
+
+ self.main_service_id_dict[service_instance_id]=main_service_id
+ self.main_id_sch_id[main_service_id] = service_instance_id
+
+ result = "OK"
+ if(str(repeat)=="False"):
+ # print("single instance ",bool(repeat))
+ if(priority==1 and day is None):
+ print("1")
+ self.run_service1((username,application_id,servicename,end,s_id))
+ elif day is not None and priority!=1:
+ print("2")
+ self.run_service2(Schedule_time,day,(username,application_id,servicename,end,s_id))
+ else:
+ print("3")
+ self.run_service3(Schedule_time,day,(username,application_id,servicename,end,s_id))
+ elif day is None and period is not None:
+ print("4")
+ self.run_service4(period,end,(username,application_id,servicename,end,s_id))
+ elif day is not None:
+ print("5")
+ self.run_service5(Schedule_time,day,(username,application_id,servicename,end,s_id))
+ else:
+ result = "ERROR : wrong scheduling format"
+ return result,s_id
+ pass
+
+ def stop_all_started_at_their_Stop_time(self):
+ # for key in self.started.keys():
+ # service_instance_id,username,application_id,service_name,end = self.started[key]["service_id"],self.started[key]["username"],self.started[key]["application_id"],self.started[key]["service_name"],self.started[key]["end"]
+ # job_id = schedule.every().day.at(end).do(self.exit_service,((service_instance_id,username,application_id,service_name)))
+ # self.job_dict[service_instance_id]=job_id
+ # # del self.started[service_instance_id]
+ # self.main_service_id_dict[service_instance_id] = username+"_"+application_id+"_"+service_name
+ # self.main_id_sch_id[username+"_"+application_id+"_"+service_name]=service_instance_id
+ pass
+
+@app.route('/schedule_service', methods=['GET', 'POST'])
+def schedule_service():
+ content = request.get_json()
+
+ res = "OK"
+ # print(content)
+ # print(type(content))
+ if(content["action"]=="Stop"):
+ id = 1
+ print("+MSG TO SLCM TO STOP ",id)
+
+ else:
+ if(content["action"]=="Start"):
+ print("start")
+
+ return {"result":res}
+
+def get_schedules():
+ url = 'http://10.2.128.235:5000/get'
+ while(True):
+ response = requests.get(url)
+ s1 = json.loads(response.content)
+ if(s1):
+ schedules_.append(s1)
+ else:
+ time.sleep(5)
+
+# t1 = threading.Thread(target = get_schedules)
+# t1.start()
+
+sch = Scheduler()
+sch.run()
+schedules1_ , started = sch.getInfo()
+sch.loop_schedules == schedules_
+sch.started = started
+while(True):
+ if(schedules_):
+ schedule2 = schedules_.pop(0)
+ service_id = schedule2["service_id"]
+ request_ = schedule2["request"]
+ # print(schedule2)
+ sch.StartSchedulling(request_,service_id)
+
+
+sch.stop_all_started_at_their_Stop_time()
+# t2 = threading.Thread(target=dumping_thread)
+# t2.start()
+# app.run(debug=False,host="0.0.0.0",port=int(Myport))
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/Scheduler/requirements.txt b/Bootstrapper/IAS_G4/Scheduler/requirements.txt
new file mode 100644
index 0000000..ea77e70
--- /dev/null
+++ b/Bootstrapper/IAS_G4/Scheduler/requirements.txt
@@ -0,0 +1,5 @@
+kafka-python
+sockets
+schedule
+flask
+requests
\ No newline at end of file
diff --git a/Bootstrapper/IAS_G4/SensorRegistry.sh b/Bootstrapper/IAS_G4/SensorRegistry.sh
new file mode 100644
index 0000000..4784f46
--- /dev/null
+++ b/Bootstrapper/IAS_G4/SensorRegistry.sh
@@ -0,0 +1,12 @@
+printf "\n\n"
+printf \"********************************************************\"; \
+echo ---------------------- SensorRegistry -------------------------; \
+printf \"********************************************************\"; \
+echo "Deploying SensorRegistry.........."
+printf "\n\n"
+
+path="./SensorRegistry"
+cd ${path}
+echo $1 | sudo -S docker build . -t sensorregistry:latest;
+echo $1 | sudo -S docker run sensorregistry
+
diff --git a/Bootstrapper/IAS_G4/ias_key.pem b/Bootstrapper/IAS_G4/ias_key.pem
new file mode 100644
index 0000000..3f84666
--- /dev/null
+++ b/Bootstrapper/IAS_G4/ias_key.pem
@@ -0,0 +1,39 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIG5QIBAAKCAYEA7a/Ja+Vh7GhLkqfQ975t3hZMatuxKtzGnvbiaCsbjKZTebFg
+5e4AI4cktulJAFF1mq9E+gRVhB7Djii7Gojd8GbVqOc/sE+9j9++3T2qnfaTnTCK
+JdSqpEkEq+y2WhViima7PUl3GAljOeKgCylS57rJ41EvIx0h/pDfXpGbcGJip1+m
+KpeFENi4xY8bilyxB3HPzNGMvSvLFmurBzFEBt3asMwjm8Ylg3K3vi/zABHZZWR7
+XtqtdHRkGqVtMT5EP+DtoWLvvPS7sPXtlonDApKqQTYqjSaoHCVCDwbSDp8zuksb
+Drsl2CMA1u3TSbtP1YHXmT3Y0GkbNXwI4etyrJZ7FjdqmsFvJqDh3caFbw3AbCu2
+K16BwRLiaWxuNRzsWHoGcN7DF4lnvjS//rgJoO7QH5GqjUFtC/X0+sDemsV2Cn86
+G03GfbwkF+oPAR9qU1se7SM9087xHBEZwNEZVA2lTnbv79hrgr5UxZZlEB7wLmBw
+w6vSwz9nsovYRPXlAgMBAAECggGBAOzZUKOdzjifrT8Uzx0wu5alLb0F8sO9gIcD
+iZU375n2XBRV9zLoOWct0h4a1U3l3oUnxlpFW1nutfvBBsFoRZC7SImo9kQhEcp+
+sVkRjCUggvQOXHtaMjjHMkqH9SZLTQiRWl+2B2ECachS1hyJLz4PWgKU8Z3R+AAO
+KSe5fOU+hQQ4QgMLPeQNsgHbmxwDRb0fYmy9G7ueGh/bZnAiXaFiDfxDT/B4QQV9
+mS+B5GnbX4Cz4mQhENuM5/vNgMOjQL+cEpjcnWemj2BRGD/nW2PQ4zPySj55sdCp
+04dp7jxIhcKdwl0t3keX/bSejd4izeUNcgSnA5kGcoWvQNN3BTn5Rrp0u5mg3Au+
+JdyknUxvZke5JHQk6SXidcyDT5EBwSM2zSYz5yIAV812ybofGVH3ile13AQTteTX
+eEz/YHQsfzXdwm4zqPG4Ww7ysjpnhKtyNk91aQ91+XEg7p9kB2E5TM6m9plhMcoK
+S/s1UpEyzSauOpUnyeZqA4/M4JIDOQKBwQD7POwlcjS041Gq7v3alfKvR22dp29L
+m7jw34T8eiZnNOGG/VMXhpViNEKJlxj7cFLwCB+2eejqYk4dzxo7ldKnqy4tREAg
+jPwf1Unw0xo9Lu7JAk5cmsmCtWMzmpX6xsyDSC6xcdrDEEj3nWfoTalKvGXjhtZs
+4M6bMClbcoRO5uWhyhsSuACSnkeVFsoy9BCmuLCTfLTXRNnhdO9Sw3W1tSWRFsBU
+9GloGG9F0rjw8QOOCzFK2la04PAgP03sucsCgcEA8jEcDN+7fjeZTaDLZk9DMVJw
+FvLpEfA54gpNew0ZY8rO2CZdkq4EHmHQYvnobQnwbGmcV+wpyf+TDWXsV4r9RbUB
+bWa0OZnyisH1qmEh0ubMFw4bGpB05Q+nlyAogiVR1yeqPYkoYBwZZfPuFEOeEpjR
+7fR4T/Oijeqdj3RvDvjmr9wTvHlNMUWXbXRQUWmEdBrCs8PGvSYafnC7qDU8bGDt
+R9bQ7345fSxUQcSrPd4glYgp9BngCf6ffFhgm9kPAoHBAIySWNrW3NC9RAutLxJn
+xeL2x0lnzqjyV3dGyUTvH5+tgeaixSQrpFfPP892YH8HKRCuqvfRnFXQbT7Wiyn9
+uWEhMXQYdP0BhWxWTcnWhB7prKeBh9DetpNQ5FGSVAp9wx5Ne/rC5pEwAnOxedsL
+4QY1BZlZ74359v/z4aAOIiN3o9Jzn9RIaeGfeBPeLYJ29v4h5oKPSywLlRFF+UZI
+oQrrcTqODU07d+LLkS7zKZF+eOu4LXthURRhq1mPsg0bIwKBwQCcCef4o205NLkg
+uqhRkl/IN0TghECFvjCM4fZpgVcKZHA/8vf2uQcOTBbmrfyBxBWSw7hnLVD/RBqK
+V1CeVn43mxcAVVtogTczDyQoUcxpBSoY8I2l38xwiFWBmMaAk2rXJEcavnO0+GWk
+ujR5+dauRKd8mDWrzVy98prxAiNLkjGOvEw68iw5wox9iKL20PT1mQg4qboxw4ru
+S9Ob3DDB4vD9h/m1U63WZ+khNoTdp2925/OfwcwGpHY8tvQHdSECgcA9i1EpimoT
+r044t0EigsxX85WCeBYBlmt+dL19j7fW2BEnVvRpAP+dZrdNFEUQeBhptzdZbkXu
+nJYRlEUJjYmFynfr07Fftqdk6IVUBjSNqJi864ethTzDoYuK6wfg1YcitbADBEQ2
+BcPu4AgCFCtRbnEx2wX7NcRaI/kR1FKOp9x0xwF6HJwMxMsPNhflQO75vGzIBRpx
+8L+ZGScdH1pJWoRkEZyeZLD2VsJe8PpCOkzIuO9du7X5DLrj5rBScyE=
+-----END RSA PRIVATE KEY-----
diff --git a/Bootstrapper/IAS_G4/install.sh b/Bootstrapper/IAS_G4/install.sh
new file mode 100644
index 0000000..4ce6762
--- /dev/null
+++ b/Bootstrapper/IAS_G4/install.sh
@@ -0,0 +1,69 @@
+#!/bin/bash
+
+pswd="1234"
+
+echo ""
+echo ""
+echo ""
+echo "**************************************************************************"
+echo "*************************** INSTALLING DOCKER ****************************"
+echo "**************************************************************************"
+echo ""
+echo ""
+echo ""
+# Update the package index
+echo ${pswd} | sudo -S apt-get update
+
+# Install packages to allow apt to use a repository over HTTPS
+echo ${pswd} | sudo -S apt-get install -y apt-transport-https ca-certificates curl gnupg lsb-release
+
+# Add Docker's official GPG key
+curl -fsSL https://download.docker.com/linux/ubuntu/gpg | echo ${pswd} | sudo -S gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
+
+# Add Docker repository to APT sources
+echo "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | echo ${pswd} | sudo -S tee /etc/apt/sources.list.d/docker.list > /dev/null
+
+# Update the package index again
+echo ${pswd} | sudo -S apt-get update
+
+# Install the latest version of Docker
+echo ${pswd} | sudo -S apt-get install -y docker-ce docker-ce-cli containerd.io
+
+# Add the current user to the docker group to run docker without sudo
+echo ${pswd} | sudo -S usermod -aG docker $USER
+
+# Display the Docker version
+docker --version
+echo ""
+echo ""
+echo ""
+echo "**************************************************************************"
+echo "********************** DOCKER INSTALLED SUCCESSFULLY *********************"
+echo "**************************************************************************"
+echo ""
+echo ""
+echo ""
+echo "**************************************************************************"
+echo "************************ INSTALLING GNOME-TEMINAL ************************"
+echo "**************************************************************************"
+echo ""
+echo ""
+echo ""
+# Update the package index
+echo ${pswd} | sudo -S apt update
+
+# Install the GNOME Terminal package
+echo ${pswd} | sudo -S apt install gnome-terminal
+
+# Verify the installation
+gnome-terminal --version
+
+echo ""
+echo ""
+echo ""
+echo "**************************************************************************"
+echo "********************** GNOME-TEMINAL INSTALLED SUCCESSFULLY **************"
+echo "**************************************************************************"
+echo ""
+echo ""
+echo ""
diff --git a/Bootstrapper/bootstrapper.py b/Bootstrapper/bootstrapper.py
new file mode 100644
index 0000000..61158c0
--- /dev/null
+++ b/Bootstrapper/bootstrapper.py
@@ -0,0 +1,19 @@
+import os
+import time
+
+pswd="1234"
+
+#files=["install.sh", "Scheduler.sh", "Monitoring.sh", "ApplicationManager.sh", "ActionManager.sh", "Deployer.sh", "FaultTolerance.sh", #"NodeManager.sh", "SensorRegistry"]
+
+files=["Authentication.sh","Scheduler.sh"]
+
+print("Installing prerequisites.....\n")
+
+# os.system(f"echo {pswd} | sudo -S scp -i ./IAS_G4/ias_key.pem ./IAS_G4.zip anm8@20.193.144.28:/home/anm8")
+
+os.system(f"gnome-terminal --command=\"bash -c 'cd IAS_G4; echo {pswd} | sudo -S bash install.sh {pswd}; echo ; exec bash'\"")
+
+time.sleep(20)
+
+for i in files:
+ os.system(f"gnome-terminal --command=\"bash -c 'cd IAS_G4; echo {pswd} | sudo -S bash {i} {pswd}; echo ; exec bash'\"")
diff --git a/Scheduler/Dockerfile b/Scheduler/Dockerfile
new file mode 100644
index 0000000..69b30be
--- /dev/null
+++ b/Scheduler/Dockerfile
@@ -0,0 +1,9 @@
+FROM alpine:latest
+
+RUN apk update && apk add python3 py3-pip curl unzip
+COPY requirements.txt ./home/requirements.txt
+ADD scheduler.py ./home
+ADD heartBeat.py ./home
+RUN cd home && pip3 install --no-cache-dir -r /home/requirements.txt
+
+ENTRYPOINT python3 -u /home/scheduler.py
\ No newline at end of file
diff --git a/Scheduler/heartBeat.py b/Scheduler/heartBeat.py
new file mode 100644
index 0000000..3f4e1bd
--- /dev/null
+++ b/Scheduler/heartBeat.py
@@ -0,0 +1,36 @@
+from kafka import KafkaProducer
+import json
+import datetime
+from time import sleep
+import threading
+from dotenv import load_dotenv
+import os
+load_dotenv()
+# from kafkautilities import kafka_consume, kafka_produce
+
+kafkaIPPort = os.getenv('kafka_host')
+producer = KafkaProducer(bootstrap_servers=['10.2.135.69:9092'],
+ value_serializer=lambda v: json.dumps(v).encode('utf-8')
+ )
+
+# kafka_ip = "192.168.43.219"
+# kafka_port = "9092"
+
+
+def heart_beat(module_name):
+ while True:
+ curr_time = str(datetime.datetime.utcnow())
+ message = {
+ 'moduleName': module_name,
+ 'currentTime': curr_time
+ }
+ print("message : ", message)
+ # kafka_produce(kafka_ip, kafka_port, "module_heart_rate", message)
+ producer.send('module_heart_rate', message)
+ sleep(5)
+
+
+def monitor_thread(module_name):
+ t = threading.Thread(target=heart_beat, args=(module_name,))
+ t.daemon = True
+ t.start()
diff --git a/Scheduler/requirements.txt b/Scheduler/requirements.txt
new file mode 100644
index 0000000..0ffbaa3
--- /dev/null
+++ b/Scheduler/requirements.txt
@@ -0,0 +1,6 @@
+kafka-python
+sockets
+schedule
+flask
+requests
+python-dotenv
\ No newline at end of file
diff --git a/Scheduler/scheduler.py b/Scheduler/scheduler.py
new file mode 100644
index 0000000..47c81dd
--- /dev/null
+++ b/Scheduler/scheduler.py
@@ -0,0 +1,445 @@
+from kafka import KafkaProducer
+from kafka import KafkaConsumer
+import schedule
+import time
+import threading
+from flask import Flask,request,jsonify
+import random
+import json
+import requests
+import argparse
+import datetime
+import pickle
+import time
+import os
+from dotenv import load_dotenv
+from heartBeat import heart_beat
+app = Flask(__name__)
+
+service_life_cycle_ip = "10.2.132.235"
+service_life_cycle_port = 8888
+Myport = 5053
+
+load_dotenv()
+kafka_host = os.getenv('kafka_host')
+# Create a Kafka producer
+producer = KafkaProducer(bootstrap_servers=['10.2.135.69:9092'], value_serializer=lambda x: json.dumps(x).encode('utf-8'))
+# response = {"username":"username","applicationname":"application_id","servicename":"servicename","serviceId":"sid", "type":"start"}
+# producer.send("sch_dep", response)
+
+# producer = KafkaProducer(
+# bootstrap_servers=[kafka_host],
+# value_serializer=lambda m: json.dumps(m).encode('ascii'))
+ # for _,row in df.iterrows():
+# while True:
+ # call_sensor_instance
+# producer.send("sch_dep", key=None,value=response)
+# time.sleep(30)
+ # sleep(rate)
+
+# consumer = KafkaConsumer('sch_dep', bootstrap_servers=['192.168.137.51:19092'],
+# value_deserializer=lambda x: json.loads(x.decode('utf-8')))
+# while(1):
+# for m in consumer:
+# print(m)
+# time.sleep(10)
+
+# Produce a message to a topic and wait for acknowledgement
+topic = 'sch_dep'
+
+schedules = []
+schedules_ = []
+
+# schedules_ = [
+# {'service_id' : '1', 'request' : {"username": "A",
+# "application_id": "100",
+# "servicename": "XYZ",
+# "Repeat": "True",
+# "day": None,
+# "Schedule_time":"18:20:00",
+# "Stop_time":"18:22:00",
+# "priority":1,
+# "period":"30",
+# "duration":"20"}}
+
+ # ,
+ # {'service_id' : '2', 'request' : {"username": "B",
+ # "application_id": "101",
+ # "servicename": "aaa",
+ # "Repeat": "False",
+ # "day": None,
+ # "Schedule_time":"15:58:00",
+ # "Stop_time":"15:59:00",
+ # "priority":1,
+ # "period":"5"}}
+ # ,
+ # {'service_id' : '3', 'request' : {"username": "C",
+ # "application_id": "102",
+ # "servicename": "bbb",
+ # "Repeat": "False",
+ # "day": "sunday",
+ # "Schedule_time":"15:52:00",
+ # "Stop_time":"15:53:00",
+ # "priority":0,
+ # "period":"5"}}
+ # ,
+ # {'service_id' : '4', 'request' : {"username": "D",
+ # "application_id": "103",
+ # "servicename": "ccc",
+ # "Repeat": "False",
+ # "day": None,
+ # "Schedule_time":"15:54:00",
+ # "Stop_time":"15:55:00",
+ # "priority":0,
+ # "period":"5"}}
+ # ]
+started = {'key' : 'Jagdish', 'name' : 'Jagdish Pathak',
+'age' : 50, 'pay' : 50000}
+
+# database
+db1 = {}
+db1['schedules'] = schedules
+db1['started'] = started
+
+# For storing
+b = pickle.dumps(db1)
+
+DUMPING_DELAY_IN_3_SECS = 1
+# def time_add(time,sec_to_add) :
+# hr = int(str(time).split(":")[0])
+# mn = int(str(time).split(":")[1])
+# sc = int(str(time).split(":")[2])
+# sc = (sc+sec_to_add)
+# hr = (hr + mn+ int(sc/3600))%24
+# sc=sc%3600
+# hr = str(hr)
+# mn = str(mn)
+# if(len(hr)==1):
+# hr="0"+hr
+# if(len(mn)==1):
+# mn="0"+mn
+# return hr+":"+mn
+
+class Scheduler:
+ def __init__(self) -> None:
+ self.job_dict = {}
+ self.main_service_id_dict={}
+ self.single_instances ={} #
+ self.started = {} #done
+ self.loop_schedules=[] #done
+ self.main_id_sch_id={}
+ pass
+
+ def pending_jobs(self):
+ while True:
+ schedule.run_pending()
+ time.sleep(10)
+ def send_request_to_service_life_cyle(self,username,application_id,servicename,service_instance_id,type_):
+ # print(username,application_id,servicename,service_instance_id,self.main_service_id_dict[service_instance_id])
+ response = {"username":username,"applicationname":application_id,"servicename":servicename,"serviceId":self.main_service_id_dict[service_instance_id], "type":""}
+ # print(response)
+ if type_=="start":
+ print("start",response)
+ print('http://'+service_life_cycle_ip+':'+str(service_life_cycle_port)+'/test')
+ response['type'] = "start"
+ future = producer.send(topic, response)
+ result = future.get(timeout=60)
+ # res = requests.post('http://'+service_life_cycle_ip+':'+str(service_life_cycle_port)+'/test', json=json.dumps(response))
+ else:
+ print("stop",response)
+ response['type'] = "stop"
+ future = producer.send(topic, response)
+ result = future.get(timeout=60)
+ # res = requests.post('http://'+service_life_cycle_ip+':'+str(service_life_cycle_port)+'/test', json=json.dumps(response))
+
+ def getInfo(self):
+ # dbfile = open("/home/sch_data.pickle","rb")
+ db = pickle.loads(b)
+ # print(db)
+ schedules_ = db["schedules"]
+ started = db["started"]
+ return schedules_ , started
+ pass
+ def run(self):
+ t1 = threading.Thread(target=self.pending_jobs)
+ t1.start()
+
+ def exit_service(self,service_instance_id):
+ service_instance_id,username,application_id,servicename = service_instance_id[0],service_instance_id[1],service_instance_id[2],service_instance_id[3]
+ print("+MSG TO SLCM TO STOP \t\t",service_instance_id)
+ #send request to service life cycle manager to cancel service
+ self.send_request_to_service_life_cyle(username,application_id,servicename,service_instance_id,"stop")
+ print(self.started)
+ del self.started[service_instance_id]
+ schedule.cancel_job(self.job_dict[service_instance_id])
+ # del self.job_dict[service_instance_id]
+
+ def exit_service_parent(self,job_id):
+ print("Removed - ",job_id)
+ schedule.cancel_job(job_id)
+ return schedule.CancelJob
+
+ def run_service(self,service_detail):
+ username,application_id,servicename,end,service_instance_id = service_detail[0],service_detail[1],service_detail[2],service_detail[3],service_detail[4]
+ print("+MSG TO SLCM TO START \t\t",service_instance_id)
+ #send request to service life cycle manager to start service
+ self.send_request_to_service_life_cyle(username,application_id,servicename,service_instance_id,"start")
+ data = {
+ "service_id": service_instance_id,
+ "username":username,
+ "application_id":application_id,
+ "servicename":servicename,
+ "end":end
+ }
+ self.started[service_instance_id]=data
+ job_id = schedule.every().day.at(end).do(self.exit_service,((service_instance_id,username,application_id,servicename)))
+ self.job_dict[service_instance_id]=job_id
+
+ def run_service_period(self,service_detail, duration):
+ username,application_id,servicename,end,service_instance_id = service_detail[0],service_detail[1],service_detail[2],service_detail[3],service_detail[4]
+ print("+MSG TO SLCM TO START \t\t",service_instance_id)
+ #send request to service life cycle manager to start service
+ self.send_request_to_service_life_cyle(username,application_id,servicename,service_instance_id,"start")
+
+ now = datetime.datetime.now()
+ current_time = now.strftime("%H:%M:%S")
+
+ print("curr: ", current_time)
+ print("duration: ", duration)
+
+ time_change = datetime.timedelta(seconds=int(duration))
+ Stop_time_1 = now + time_change
+ print("stop time:", Stop_time_1)
+ data = {
+ "service_id": service_instance_id,
+ "username":username,
+ "application_id":application_id,
+ "servicename":servicename,
+ "end":Stop_time_1
+ }
+ self.started[service_instance_id]=data
+
+ n11 = str(Stop_time_1)
+ n1 = n11[11:19]
+ job_id = schedule.every().day.at(n1).do(self.exit_service,((service_instance_id,username,application_id,servicename)))
+ self.job_dict[service_instance_id]=job_id
+
+ def run_service1(self,service_detail):
+ username,application_id,servicename,end,service_instance_id = service_detail[0],service_detail[1],service_detail[2],service_detail[3],service_detail[4]
+ print("+MSG TO SLCM TO START \t\t",service_instance_id)
+ #send request to service life cycle manager to start service
+ self.send_request_to_service_life_cyle(username,application_id,servicename,service_instance_id,"start")
+ data = {
+ "service_id": service_instance_id,
+ "username":username,
+ "application_id":application_id,
+ "servicename":servicename,
+ "end":end
+ }
+ self.started[service_instance_id]=data
+ if(service_instance_id in self.single_instances.keys()):
+ del self.single_instances[service_instance_id]
+
+ print("Running everyday\n")
+ print("end: ", end)
+ print(type(end))
+ job_id = schedule.every().day.at(end).do(self.exit_service,((service_instance_id,username,application_id,servicename)))
+ try:
+ if(self.job_dict[service_instance_id]):
+ # print("here")
+ schedule.cancel_job(self.job_dict[service_instance_id])
+ except:
+ pass
+ self.job_dict[service_instance_id]=job_id
+ pass
+
+ def run_service2(self,Schedule_time,day,service_detail):
+ service_instance_id = service_detail[4]
+ self.single_instances[service_instance_id]=request_
+ job_id = None
+ if(day=="monday"):
+ job_id = schedule.every().monday.at(Schedule_time).do( self.run_service1,(service_detail))
+ elif(day=="tuesday"):
+ job_id = schedule.every().tuesday.at(Schedule_time).do( self.run_service1,(service_detail))
+ elif(day=="wednesday"):
+ job_id = schedule.every().wednesday.at(Schedule_time).do( self.run_service1,(service_detail))
+ elif(day=="thursday"):
+ job_id = schedule.every().thursday.at(Schedule_time).do( self.run_service1,(service_detail))
+ elif(day=="friday"):
+ job_id = schedule.every().friday.at(Schedule_time).do( self.run_service1,(service_detail))
+ elif(day=="saturday"):
+ job_id = schedule.every().saturday.at(Schedule_time).do( self.run_service1,(service_detail))
+ else:
+ print("Sunday\n")
+ job_id = schedule.every().sunday.at(Schedule_time).do( self.run_service1,(service_detail))
+ self.job_dict[service_instance_id]=job_id
+ pass
+
+ def run_service3(self,Schedule_time,day,service_detail):
+ service_instance_id = service_detail[4]
+ self.single_instances[service_instance_id]=request_
+ job_id = schedule.every().day.at(Schedule_time).do( self.run_service1,(service_detail))
+ self.job_dict[service_instance_id]=job_id
+ pass
+
+ def run_service4(self,period,duration,end_time,service_detail):
+ service_instance_id = service_detail[4]
+ self.loop_schedules.append({"service_id":service_instance_id,"request": request_})
+ interval = int(period)
+ end = end_time
+
+ job_id = schedule.every(interval).seconds.do( self.run_service_period,service_detail, duration)
+ self.job_dict[service_instance_id]=job_id
+ job_id1 = schedule.every().day.at(end_time).do(self.exit_service_parent,(job_id))
+ pass
+
+ def run_service5(self,Schedule_time,day,service_detail):
+ service_instance_id = service_detail[4]
+ self.loop_schedules.append({"service_id":service_instance_id,"request": request_})
+ if(day=="monday"):
+ job_id = schedule.every().monday.at(Schedule_time).do( self.run_service,(service_detail))
+ elif(day=="tuesday"):
+ job_id = schedule.every().tuesday.at(Schedule_time).do( self.run_service,(service_detail))
+ elif(day=="wednesday"):
+ job_id = schedule.every().wednesday.at(Schedule_time).do( self.run_service,(service_detail))
+ elif(day=="thursday"):
+ job_id = schedule.every().thursday.at(Schedule_time).do( self.run_service,(service_detail))
+ elif(day=="friday"):
+ job_id = schedule.every().friday.at(Schedule_time).do( self.run_service,(service_detail))
+ elif(day=="saturday"):
+ job_id = schedule.every().saturday.at(Schedule_time).do( self.run_service,(service_detail))
+ else:
+ job_id = schedule.every().sunday.at(Schedule_time).do( self.run_service,(service_detail))
+ pass
+
+ def StartSchedulling(self,request_,s_id=None):
+ username = request_["username"]
+ application_id = request_["application_id"]
+ servicename = request_["servicename"]
+ repeat = request_["Repeat"]
+ day = request_["day"]
+ Schedule_time = request_["Schedule_time"]
+ end = request_["Stop_time"]
+ period = request_["period"]
+ priority = request_["priority"]
+ duration = request_["duration"]
+ main_service_id = username+"_"+application_id+"_"+servicename
+
+ service_instance_id = s_id
+
+ if service_instance_id is None:
+ service_instance_id=username+"_"+application_id+"_"+servicename+"_"+str(random.randrange(10000))
+
+ self.main_service_id_dict[service_instance_id]=main_service_id
+ self.main_id_sch_id[main_service_id] = service_instance_id
+
+ result = "OK"
+ if(str(repeat)=="False"):
+ print("single instance ",bool(repeat))
+ if(priority==1 and day is None): # every day
+ print("1")
+ self.run_service1((username,application_id,servicename,end,s_id))
+ elif priority==1 and day is not None: #specific day
+ print("2")
+ self.run_service2(Schedule_time,day,(username,application_id,servicename,end,s_id))
+ elif priority!=1 and day is not None: # specific day
+ print("3")
+ self.run_service2(Schedule_time,day,(username,application_id,servicename,end,s_id))
+ else: # every day
+ print("4")
+ self.run_service1((username,application_id,servicename,end,s_id))
+
+ elif day is None and period is not None: #everyday
+ print("5")
+ self.run_service4(period,duration,end,(username,application_id,servicename,end,s_id))
+ # elif day is not None:
+ # print("6")
+ # self.run_service5(Schedule_time,day,(username,application_id,servicename,end,s_id))
+ else:
+ result = "ERROR : wrong scheduling format"
+ return result,s_id
+ pass
+
+ def stop_all_started_at_their_Stop_time(self):
+ # for key in self.started.keys():
+ # service_instance_id,username,application_id,service_name,end = self.started[key]["service_id"],self.started[key]["username"],self.started[key]["application_id"],self.started[key]["service_name"],self.started[key]["end"]
+ # job_id = schedule.every().day.at(end).do(self.exit_service,((service_instance_id,username,application_id,service_name)))
+ # self.job_dict[service_instance_id]=job_id
+ # # del self.started[service_instance_id]
+ # self.main_service_id_dict[service_instance_id] = username+"_"+application_id+"_"+service_name
+ # self.main_id_sch_id[username+"_"+application_id+"_"+service_name]=service_instance_id
+ pass
+
+# @app.route('/schedule_service', methods=['GET', 'POST'])
+# def schedule_service():
+# content = request.get_json()
+
+# res = "OK"
+# # print(content)
+# # print(type(content))
+# if(content["action"]=="Stop"):
+# id = 1
+# print("+MSG TO SLCM TO STOP ",id)
+
+# else:
+# if(content["action"]=="Start"):
+# print("start")
+
+# return {"result":res}
+
+# def get_schedules():
+# url = 'http://10.2.128.235:5000/get'
+# while(True):
+# response = requests.get(url)
+# s1 = json.loads(response.content)
+# if(s1):
+# schedules_.append(s1)
+# else:
+# time.sleep(5)
+
+# t1 = threading.Thread(target = get_schedules)
+# t1.start()
+
+
+# sch = []
+def get_schedules():
+ # Consume messages from a topic
+ for message in consumer:
+ schedules_.extend(message[6])
+ print("1kja")
+ print("message", message[6])
+ print(message.topic, message.partition, message.offset, message.value)
+
+# Create a Kafka consumer
+consumer = KafkaConsumer('appmgr_sch', bootstrap_servers=['10.2.135.69:9092'],
+ value_deserializer=lambda x: json.loads(x.decode('utf-8')), auto_offset_reset='latest')
+
+
+t1 = threading.Thread(target = get_schedules)
+t1.start()
+
+#Heartbeat
+t2 = threading.Thread(target = heart_beat, args=("Scheduler",))
+t2.daemon = True
+t2.start()
+
+sch = Scheduler()
+sch.run()
+# schedules1_ , started = sch.getInfo()
+sch.loop_schedules == schedules_
+sch.started = started
+while(True):
+ if(schedules_):
+ print(schedules_)
+ schedule2 = schedules_.pop(0)
+ print("schedule2", schedule2)
+ service_id = schedule2["service_id"]
+ request_ = schedule2["request"]
+ # print("schedule2", schedule2)
+ sch.StartSchedulling(request_,service_id)
+
+
+sch.stop_all_started_at_their_Stop_time()
+# t2 = threading.Thread(target=dumping_thread)
+# t2.start()
+# app.run(debug=False,host="0.0.0.0",port=int(Myport))
\ No newline at end of file
diff --git a/Sensors/README.md b/Sensors/README.md
new file mode 100644
index 0000000..7d81f04
--- /dev/null
+++ b/Sensors/README.md
@@ -0,0 +1 @@
+### Final Version of Sensors' Module for Team Hackatoons for the course Internals of Application Servers, Dpring'23, IIIT Hyderabad
diff --git a/Sensors/app.py b/Sensors/app.py
new file mode 100644
index 0000000..d4e3ea4
--- /dev/null
+++ b/Sensors/app.py
@@ -0,0 +1,87 @@
+from apscheduler.schedulers.background import BackgroundScheduler
+from azure.storage.blob import BlobServiceClient, BlobClient, generate_blob_sas, BlobSasPermissions
+import config
+from datetime import datetime, timedelta
+from flask import render_template, request
+from models import Node
+import os
+from time import sleep
+
+
+app = config.connex_app
+app.add_api(config.basedir / 'swagger.yml')
+flask_app = app.app
+
+
+@app.route('/')
+def home():
+ config.db.create_all()
+ nodes = Node.query.all()
+ return render_template('home.html', nodes=nodes)
+
+
+@app.route('/stream')
+def stream():
+ '''
+ Generator Function that yields new lines in a file
+ '''
+ def generate():
+ with open('sensormanager.log') as logfile:
+ logfile.seek(0, os.SEEK_END)
+ while True:
+ line = logfile.readline()
+ # sleep if file hasn't been updated
+ if not line:
+ sleep(0.1)
+ continue
+ yield line
+ return flask_app.response_class(generate(), mimetype='text/plain')
+
+
+'''
+Storage of log files in Azure Blob Storage
+'''
+def store_logs():
+ try:
+ account_name = 'amanias'
+ account_key = 'VtqTdX+sG1/dkhqhHT80jXUGULqJKlBsn++AmB2NnPvefgjyGggJ3dIo8wNBOudu6EdQwicUSEqd+AStbhmWZg=='
+ container_name = 'sensormanager'
+ # create a client to interact with blob storage
+ connect_str = 'DefaultEndpointsProtocol=https;AccountName=' + account_name + ';AccountKey=' + account_key + ';EndpointSuffix=core.windows.net'
+ blob_service_client = BlobServiceClient.from_connection_string(connect_str)
+ # use the client to connect to the container
+ container_client = blob_service_client.get_container_client(container_name)
+ # get a list of all blob files in the container
+ blob_list = []
+ for blob_i in container_client.list_blobs():
+ blob_list.append(blob_i.name)
+ print("BLOB list : ", blob_list)
+ for blob_i in blob_list:
+ # generate a shared access signature for each blob file
+ sas_i = generate_blob_sas(account_name=account_name,
+ container_name=container_name,
+ blob_name=blob_i,
+ account_key=account_key,
+ permission=BlobSasPermissions(read=True),
+ expiry=datetime.utcnow() + timedelta(hours=1))
+ #sas_url = 'https://' + account_name + '.blob.core.windows.net/' + container_name + '/' + blob_i + '?' + sas_i
+ try:
+ file_path = 'sensormanager.log'
+ print("Inside Azure Log File Write: ", file_path)
+ with open(file=file_path, mode="rb") as data:
+ blob_client = container_client.upload_blob(name="sensormanager.log", data=data, overwrite=True)
+ print("Blob client : ", blob_client)
+ open(file_path, "w").close()
+ except Exception as e:
+ return f'Error reading file: {str(e)}'
+ except Exception as e:
+ print(e)
+ print('Data appended successfully')
+
+
+if __name__ == '__main__':
+ # Configure and start the scheduler
+ scheduler = BackgroundScheduler()
+ scheduler.add_job(store_logs, 'interval', minutes=1)
+ scheduler.start()
+ app.run(host='0.0.0.0', port=8040, debug=True)
diff --git a/Sensors/buildNodesDB.py b/Sensors/buildNodesDB.py
new file mode 100644
index 0000000..4105c82
--- /dev/null
+++ b/Sensors/buildNodesDB.py
@@ -0,0 +1,275 @@
+from config import app, db
+from datetime import datetime
+from dotenv import load_dotenv
+load_dotenv()
+from heartBeat import heart_beat
+import json
+from kafka import KafkaConsumer, KafkaProducer
+from kafka.errors import KafkaError
+import logging
+from models import Node, Parameters, parameters_schema
+import os
+import pickle
+from random import randint
+import requests
+import sys
+import threading
+from time import sleep
+
+
+#KAFKA_IP_PORT = '127.0.0.1:53471'
+KAFKA_IP_PORT = os.getenv('KAFKA_URI')
+
+
+'''
+Message on KAFKA Push success
+'''
+def onSuccess(metadata):
+ print(f"Message produced to topic '{metadata.topic}' at offset {metadata.offset}")
+
+
+'''
+Message on KAFKA Push Error
+'''
+def onError(e):
+ print(f"Error sending message: {e}")
+
+
+'''
+Initial filling of the latest instance of data for all the nodes
+'''
+def initializeAllNodes():
+ # The list of sensor-types is pre-decided
+ sensor_types = ['PM10', 'Temperature', 'AQI', 'AQL', 'pH', 'Pressure', 'Occupancy', \
+ 'Current', 'Frequency', 'Light_Status', 'Turbidity', 'Flowrate', 'Rain', \
+ 'Energy', 'Power', 'Voltage', 'CO2', 'VOC', 'RSSI', 'Latency', 'Alarm', 'Packet_Size', \
+ 'Data_Rate', 'Mac_Address', 'Node_Status']
+ node_names, node_latitudes, node_longitudes, node_types, node_ips, node_ports = [], [], [], [], [], []
+ unique_node_names = set()
+ for sensor_type in sensor_types:
+ om2m_url1 = 'https://iudx-rs-onem2m.iiit.ac.in/resource/nodes/' + sensor_type
+ node_list = requests.get(om2m_url1).json()['results']
+ for node in node_list:
+ om2m_url2 = 'https://iudx-rs-onem2m.iiit.ac.in/resource/descriptor/' + node
+ node_dict = requests.get(om2m_url2).json()
+ if 'Node ID' not in node_dict:
+ continue
+ node_name = node_dict['Node ID']
+ if node_name in unique_node_names:
+ continue
+ unique_node_names.add(node_name)
+ node_names.append(node_name)
+ node_latitudes.append(node_dict['Node Location']['Latitude'])
+ node_longitudes.append(node_dict['Node Location']['Longitude'])
+ node_types.append(sensor_type)
+ node_ip = '192.168.36.' + str(randint(10, 50))
+ node_ips.append(node_ip)
+ node_ports.append(randint(8000, 9000))
+ with app.app_context():
+ db.drop_all()
+ db.create_all()
+ for i in range(len(node_names)):
+ node_name = node_names[i]
+ node_type = node_types[i]
+ node_latitude = node_latitudes[i]
+ node_longitude = node_longitudes[i]
+ node_ip = node_ips[i]
+ node_port = node_ports[i]
+ new_node = Node(nodename=node_name, nodetype=node_type, nodelatitude=node_latitude,
+ nodelongitude=node_longitude, nodeip=node_ip, nodeport=node_port)
+ logging.info('Added new node to the SQLITE3 DB!')
+ db.session.add(new_node)
+ db.session.commit()
+
+
+'''
+Addition of Data to local SQLITE3 DB
+'''
+def addDataToDB():
+ producer = KafkaProducer(bootstrap_servers=KAFKA_IP_PORT,
+ value_serializer=lambda m: json.dumps(m).encode('ascii'))
+ consumer = KafkaConsumer(bootstrap_servers=[KAFKA_IP_PORT], group_id="demo-group",
+ auto_offset_reset="earliest", enable_auto_commit=False,
+ consumer_timeout_ms=1000,
+ value_deserializer=lambda m: json.loads(m.decode('ascii')))
+ nodes = []
+ external_request = 'action_device' # Fixed by the Action Manager Module
+ action_manager_module_info = []
+ with app.app_context():
+ while True:
+ node_info = json.loads(requests.get('http://127.0.0.1:8040/api/nodes').text)
+ for node in node_info:
+ nodes.append((node['id'], node['nodename'], node['nodetype']))
+ logging.info('Got list of all nodes currently present!')
+ for node_id, node_name, node_type in nodes:
+ kafka_topic = str(node_id)
+ dt_iso = datetime.now().isoformat()
+ dot_index = dt_iso.index('.')
+ dt_iso = dt_iso[:dot_index] + 'Z'
+ om2m_url = 'https://iudx-rs-onem2m.iiit.ac.in/channels/' + node_name + '/feeds?start=' + dt_iso
+ try:
+ node_data_dict = requests.get(om2m_url).json()
+ node_parameter_value = randint(20, 40)
+ if 'channel' in node_data_dict:
+ node_parameter_fields = node_data_dict['channel']
+ node_parameter_field = None
+ for node_parameter in node_parameter_fields:
+ if node_data_dict['channel'][node_parameter] == node_type:
+ node_parameter_field = node_parameter
+ break
+ temp_value = node_data_dict['feeds'][0][node_parameter_field]
+ if not isinstance(temp_value, str):
+ node_parameter_value = temp_value
+ node = db.session.get(Node, node_id)
+ consumer.subscribe(external_request)
+ # from the other team -> user_id, device_id, new_value
+ for msg in consumer:
+ logging.info('Message received from the Action Manager Module!')
+ user_id, nid, new_value = msg.value['user_id'], msg.value['device_id'], msg.value['new_value']
+ action_manager_module_info.append([int(nid), new_value])
+ for i in range(len(action_manager_module_info)):
+ if action_manager_module_info[0][0] == node_id:
+ node_parameter_value = new_value
+ content_dict = {}
+ content_dict[node_type] = node_parameter_value
+ parameter = {'content' : str(content_dict), 'node_id' : node_id}
+ future = producer.send(kafka_topic, parameter)
+ logging.info('Uploaded sensor node data to corresponding Kafta topic!')
+ future.add_callback(onSuccess)
+ future.add_errback(onError)
+ new_parameter = parameters_schema.load(parameter, session=db.session)
+ node.parameters.append(new_parameter)
+ db.session.commit()
+ logging.info('Uploaded sensor node data to SQLITE3 DB!')
+ sleep(2)
+ except Exception as e:
+ logging.info('Erroneous data generated!')
+ continue
+ producer.flush()
+ producer.close()
+
+
+'''
+Dummy Data for Initialization of Sensor Nodes
+'''
+def initializeAllDummyNodes():
+ # The list of sensor-types is pre-decided
+ sensor_types = ['Temperature', 'AQI', 'pH', 'Pressure', 'Occupancy', 'Current', \
+ 'Rain', 'RoomEnergy', 'Power', 'Voltage', 'SolarEnergy']
+ type_name_mappings = {'Temperature' : 'WE-GS', 'AQI' : 'SR-AQ', 'pH' : 'WM-WD', \
+ 'Pressure' : 'WM-WF', 'Occupancy' : 'SR-OC-GW', 'Current' : 'SR-EC', \
+ 'Rain' : 'WE-VN', 'RoomEnergy' : 'SR-AC', 'Power' : 'SR-EP', \
+ 'Voltage' : 'SR-EV', 'SolarEnergy' : 'SR-OC'}
+ # The list of unique node locations is uploaded from a static file which has already
+ # been pre-collected from OM2M API - can't rely on it!!
+ all_node_locations = []
+ with open('sensorNodeLocations', 'rb') as fp:
+ all_node_locations = pickle.load(fp)
+ node_names, node_types, node_locations, node_ips, node_ports = [], [], [], [], []
+ for sensor_type in sensor_types:
+ num_sensor_nodes = randint(10, 15)
+ for node_index in range(num_sensor_nodes):
+ pos = ''
+ if node_index < 10:
+ pos = '0' + str(node_index)
+ else:
+ pos = str(node_index)
+ node_location = all_node_locations[randint(0, len(all_node_locations) - 1)]
+ node_name = type_name_mappings[sensor_type] + '-' + node_location + '-' + pos
+ node_names.append(node_name)
+ node_types.append(sensor_type)
+ node_locations.append(node_location)
+ node_ips.append('192.168.10.' + str(randint(10, 80)))
+ node_ports.append(randint(8100, 8900))
+ with app.app_context():
+ db.drop_all()
+ db.create_all()
+ for i in range(len(node_names)):
+ new_node = Node(nodename=node_names[i], nodetype=node_types[i], nodelocation=node_locations[i],
+ nodeip=node_ips[i], nodeport=node_ports[i])
+ logging.info('Added new sensor node to the SQLITE3 DB!')
+ db.session.add(new_node)
+ db.session.commit()
+
+
+'''
+Filling up of Dummy Data to the local SQLITE3 DB
+'''
+def addDummyDataToDB():
+ producer = KafkaProducer(bootstrap_servers=KAFKA_IP_PORT,
+ value_serializer=lambda m: json.dumps(m).encode('ascii'))
+ consumer = KafkaConsumer(bootstrap_servers=[KAFKA_IP_PORT], group_id="demo-group",
+ auto_offset_reset="earliest", enable_auto_commit=False,
+ consumer_timeout_ms=1000,
+ value_deserializer=lambda m: json.loads(m.decode('ascii')))
+ nodes = []
+ external_request = 'action_device' # Fixed by the Action Manager Module
+ action_manager_module_info = []
+ with app.app_context():
+ while True:
+ node_info = json.loads(requests.get('http://127.0.0.1:8040/api/nodes').text)
+ for node in node_info:
+ nodes.append((node['id'], node['nodename'], node['nodetype']))
+ logging.info('Got list of all nodes currently present!')
+ for node_id, node_name, node_type in nodes:
+ kafka_topic = str(node_id)
+ dt_iso = datetime.now().isoformat()
+ try:
+ node_parameter_value = -1
+ if node_type == 'AQI':
+ node_parameter_value = randint(20, 26) # > 25
+ elif node_type == 'SolarEnergy':
+ node_parameter_value = randint(22, 38) # > 35
+ elif node_type == 'RoomEnergy':
+ node_parameter_value = randint(21, 28) # > 26
+ else:
+ node_parameter_value = randint(20, 40) # > 36
+ node = db.session.get(Node, node_id)
+ consumer.subscribe(external_request)
+ # from the other team -> user_id, device_id, new_value
+ for msg in consumer:
+ logging.info('Message received from the Action Manager Module!')
+ user_id, nid, new_value = msg.value['user_id'], msg.value['device_id'], msg.value['new_value']
+ action_manager_module_info.append([int(nid), new_value])
+ for i in range(len(action_manager_module_info)):
+ if action_manager_module_info[0][0] == node_id:
+ node_parameter_value = new_value
+ content_dict = {}
+ content_dict[node_type] = node_parameter_value
+ parameter = {'content' : str(content_dict), 'node_id' : node_id}
+ future = producer.send(kafka_topic, parameter)
+ logging.info('Uploaded sensor node data to corresponding Kafta topic!')
+ future.add_callback(onSuccess)
+ future.add_errback(onError)
+ new_parameter = parameters_schema.load(parameter, session=db.session)
+ node.parameters.append(new_parameter)
+ db.session.commit()
+ logging.info('Uploaded sensor node data to SQLITE3 DB!')
+ sleep(1)
+ except Exception as e:
+ logging.info('Some unforeseen error!')
+ continue
+ producer.flush()
+ producer.close()
+
+
+'''
+The controller function of the script that calls the desired functions
+'''
+def main():
+ module_name = 'sensor_manager'
+ t = threading.Thread(target=heart_beat, args=(module_name,))
+ t.daemon = True
+ t.start()
+ log = 'sensormanager.log'
+ logging.basicConfig(filename=log, filemode='w', level=logging.DEBUG, \
+ format='%(asctime)s %(message)s', datefmt='%d/%m/%Y %H:%M:%S')
+ #initializeAllNodes()
+ initializeAllDummyNodes()
+ logging.info('Initialized all the sensor nodes!')
+ #addDataToDB()
+ addDummyDataToDB()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/Sensors/config.py b/Sensors/config.py
new file mode 100644
index 0000000..32ef817
--- /dev/null
+++ b/Sensors/config.py
@@ -0,0 +1,15 @@
+import connexion
+from flask_marshmallow import Marshmallow
+from flask_sqlalchemy import SQLAlchemy
+import pathlib
+
+
+basedir = pathlib.Path(__file__).parent.resolve()
+connex_app = connexion.App(__name__,specification_dir=basedir)
+
+app = connex_app.app
+app.config['SQLALCHEMY_DATABASE_URI'] = f"sqlite:///{basedir / 'nodes.db'}"
+app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
+
+db = SQLAlchemy(app)
+ma = Marshmallow(app)
diff --git a/Sensors/getSensorNodesLocations.py b/Sensors/getSensorNodesLocations.py
new file mode 100644
index 0000000..604d9bf
--- /dev/null
+++ b/Sensors/getSensorNodesLocations.py
@@ -0,0 +1,27 @@
+import pickle
+import requests
+
+
+'''
+The controller function of the script that gets all available nodes' locations from OM2MURL!!
+'''
+def main():
+ nodes_url = 'https://iudx-rs-onem2m.iiit.ac.in/resource/nodes'
+ sensor_node_locations = set()
+ nodes_info = requests.get(nodes_url).json()['results']
+ for node_type in nodes_info:
+ node_list = nodes_info[node_type]
+ for node in node_list:
+ tempNode = node
+ tempNode = tempNode.split('-')
+ for i in tempNode:
+ if len(i) == 4:
+ sensor_node_locations.add(i)
+ break
+ sensor_node_locations = list(sensor_node_locations)
+ with open('sensorNodeLocations', 'wb') as fp:
+ pickle.dump(sensor_node_locations, fp)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/Sensors/heartBeat.py b/Sensors/heartBeat.py
new file mode 100644
index 0000000..1b2f37b
--- /dev/null
+++ b/Sensors/heartBeat.py
@@ -0,0 +1,28 @@
+from kafka import KafkaProducer
+import json
+import datetime
+from time import sleep
+import threading
+
+
+kafkaIPPort = '10.2.138.158:19092'
+producer = KafkaProducer(bootstrap_servers=[kafkaIPPort],
+ value_serializer=lambda v: json.dumps(v).encode('utf-8'))
+
+
+def heart_beat(module_name):
+ while True:
+ curr_time = str(datetime.datetime.utcnow())
+ message = {
+ 'moduleName': module_name,
+ 'currentTime': curr_time
+ }
+ print("message : ", message)
+ producer.send('module_heart_rate', message)
+ sleep(5)
+
+
+def monitor_thread(module_name):
+ t = threading.Thread(target=heart_beat, args=(module_name,))
+ t.daemon = True
+ t.start()
diff --git a/Sensors/models.py b/Sensors/models.py
new file mode 100644
index 0000000..4d71c89
--- /dev/null
+++ b/Sensors/models.py
@@ -0,0 +1,54 @@
+from config import db, ma
+from datetime import datetime
+from marshmallow_sqlalchemy import fields
+
+
+'''
+Parameters (id, node_id, content, timestamp)
+'''
+class Parameters(db.Model):
+ __table__name = 'parameters'
+ id = db.Column(db.Integer, primary_key=True)
+ node_id = db.Column(db.Integer, db.ForeignKey('node.id'))
+ content = db.Column(db.String, nullable=False)
+ timestamp = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
+
+
+class ParametersSchema(ma.SQLAlchemyAutoSchema):
+ class Meta:
+ model = Parameters
+ load_instance = True
+ sqla_session = db.session
+ include_fk = True
+
+
+'''
+Node (id, name, type, location, ip, port)
+'''
+class Node(db.Model):
+ __tablename__ = 'node'
+ id = db.Column(db.Integer, primary_key=True)
+ nodename = db.Column(db.String(100), unique=True)
+ nodetype = db.Column(db.String(100))
+ nodelocation = db.Column(db.String(20))
+ nodeip = db.Column(db.String(32))
+ nodeport = db.Column(db.Integer)
+ timestamp = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
+ parameters = db.relationship(Parameters, backref='node',
+ cascade='all, delete, delete-orphan',
+ single_parent=True, order_by='desc(Parameters.timestamp)')
+
+
+class NodeSchema(ma.SQLAlchemyAutoSchema):
+ class Meta:
+ model = Node
+ load_instance = True
+ sqla_session = db.session
+ include_relationships = True
+ parameters = fields.Nested(ParametersSchema, many=True)
+
+
+node_schema = NodeSchema()
+nodes_schema = NodeSchema(many=True)
+parameters_schema = ParametersSchema()
+all_parameters_schema = ParametersSchema(many=True)
diff --git a/Sensors/nodes.py b/Sensors/nodes.py
new file mode 100644
index 0000000..0f1efca
--- /dev/null
+++ b/Sensors/nodes.py
@@ -0,0 +1,85 @@
+from config import db
+from flask import abort, make_response
+import json
+from models import Node, nodes_schema, node_schema
+import pickle
+import requests
+
+
+def read_all():
+ nodes = Node.query.all()
+ return nodes_schema.dump(nodes)
+
+
+def create(node):
+ nodename = node.get("nodename")
+ existing_node = Node.query.filter(Node.nodename == nodename).one_or_none()
+ if existing_node is None:
+ new_node = node_schema.load(node, session=db.session)
+ db.session.add(new_node)
+ db.session.commit()
+ return node_schema.dump(new_node), 201
+ else:
+ abort(406, f"Node with nodename {nodename} already exists")
+
+
+def read_one(nodename):
+ node = Node.query.filter(Node.nodename == nodename).one_or_none()
+ if node is not None:
+ return node_schema.dump(node)
+ else:
+ abort(404, f"Node with nodename {nodename} not found")
+
+
+def update(nodename, node):
+ existing_node = Node.query.filter(Node.nodename == nodename).one_or_none()
+ if existing_node:
+ update_node = node_schema.load(node, session=db.session)
+ existing_node.nodelocation = update_node.nodelocation
+ existing_node.nodeip = update_node.nodeip
+ existing_node.nodeport = update_node.nodeport
+ db.session.merge(existing_node)
+ db.session.commit()
+ return node_schema.dump(existing_node), 201
+ else:
+ abort(404, f"Node with nodename {nodename} not found")
+
+
+def delete(nodename):
+ existing_node = Node.query.filter(Node.nodename == nodename).one_or_none()
+ if existing_node:
+ db.session.delete(existing_node)
+ db.session.commit()
+ return make_response(f"{nodename} successfully deleted", 200)
+ else:
+ abort(404, f"Node with nodename {nodename} not found")
+
+
+def check_for_available_nodes():
+ nodes_info = json.loads(requests.get('http://127.0.0.1:8040/api/nodes').text)
+ free_nodes_dict = {}
+ for node in nodes_info:
+ node_type = node['nodetype']
+ free_node_content = {}
+ free_node_content['id'] = node['id']
+ free_node_content['nodename'] = node['nodename']
+ free_node_content['nodelocation'] = node['nodelocation']
+ if node_type not in free_nodes_dict:
+ free_nodes_dict[node_type] = list()
+ free_nodes_dict[node_type].append(free_node_content)
+ return free_nodes_dict
+
+
+def get_sensor_nodes_locations():
+ '''
+ sensor_node_locations = []
+ with open('sensorNodeLocations', 'rb') as fp:
+ sensor_node_locations = pickle.load(fp)
+ return sensor_node_locations
+ '''
+ nodes_info = json.loads(requests.get('http://127.0.0.1:8040/api/nodes').text)
+ locations = set()
+ for node in nodes_info:
+ locations.add(node['nodelocation'])
+ node_locations = list(locations)
+ return node_locations
diff --git a/Sensors/parameters.py b/Sensors/parameters.py
new file mode 100644
index 0000000..b23ed11
--- /dev/null
+++ b/Sensors/parameters.py
@@ -0,0 +1,50 @@
+from config import db
+from flask import abort, make_response
+from models import Parameters, Node, parameters_schema, all_parameters_schema
+
+
+def read_all():
+ parameters = Parameters.query.all()
+ return all_parameters_schema.dump(parameters)
+
+
+def read_one(parameter_id):
+ parameter = Parameters.query.get(parameter_id)
+ if parameter is not None:
+ return parameters_schema.dump(parameter)
+ else:
+ abort(404, f"Parameter with ID {parameter_id} not found")
+
+
+def update(parameter_id, parameter):
+ existing_parameter = Parameters.query.get(parameter_id)
+ if existing_parameter:
+ update_parameter = parameters_schema.load(parameter, session=db.session)
+ existing_parameter.content = update_parameter.content
+ db.session.merge(existing_parameter)
+ db.session.commit()
+ return parameters_schema.dump(existing_parameter), 201
+ else:
+ abort(404, f"Parameter with ID {parameter_id} not found")
+
+
+def delete(parameter_id):
+ existing_parameter = Parameters.query.get(parameter_id)
+ if existing_parameter:
+ db.session.delete(existing_parameter)
+ db.session.commit()
+ return make_response(f"{parameter_id} successfully deleted", 204)
+ else:
+ abort(404, f"Parameter with ID {parameter_id} not found")
+
+
+def create(parameter):
+ node_id = parameter.get('node_id')
+ node = Node.query.get(node_id)
+ if node:
+ new_parameter = parameters_schema.load(parameter, session=db.session)
+ node.parameters.append(new_parameter)
+ db.session.commit()
+ return parameters_schema.dump(new_parameter), 201
+ else:
+ abort(404, f"Node not found for ID: {node_id}")
diff --git a/Sensors/sensorNodeLocations b/Sensors/sensorNodeLocations
new file mode 100644
index 0000000..9ba0921
Binary files /dev/null and b/Sensors/sensorNodeLocations differ
diff --git a/Sensors/swagger.yml b/Sensors/swagger.yml
new file mode 100644
index 0000000..a1f862f
--- /dev/null
+++ b/Sensors/swagger.yml
@@ -0,0 +1,193 @@
+openapi: 3.0.0
+info:
+ title: "HACKATOONS Rest API"
+ description: "An API about sensors, nodes and their data"
+ version: "1.0.0"
+
+servers:
+ - url: "/api"
+
+components:
+ schemas:
+ Node:
+ type: "object"
+ required:
+ - nodename
+ properties:
+ nodename:
+ type: "string"
+ nodetype:
+ type: "string"
+ nodelocation:
+ type: "string"
+ nodeip:
+ type: "string"
+ nodeport:
+ type: "string"
+ parameters:
+ nodename:
+ name: "nodename"
+ description: "Name of the Node to get"
+ in: path
+ required: True
+ schema:
+ type: "string"
+ parameter_id:
+ name: "parameter_id"
+ description: "ID of the node parameters"
+ in: path
+ required: True
+ schema:
+ type: "string"
+
+paths:
+ /nodes:
+ get:
+ operationId: "nodes.read_all"
+ tags:
+ - Nodes
+ summary: "Read the list of sensor nodes"
+ responses:
+ "200":
+ description: "Successfully read sensor nodes list"
+ post:
+ operationId: "nodes.create"
+ tags:
+ - Nodes
+ summary: "Create a sensor node"
+ requestBody:
+ description: "Sensor Node to create"
+ required: True
+ content:
+ application/json:
+ schema:
+ x-body-name: "node"
+ $ref: "#/components/schemas/Node"
+ responses:
+ "201":
+ description: "Successfully created sensor node"
+ /nodes/{nodename}:
+ get:
+ operationId: "nodes.read_one"
+ tags:
+ - Nodes
+ summary: "Read one sensor node"
+ parameters:
+ - $ref: "#/components/parameters/nodename"
+ responses:
+ "200":
+ description: "Successfully read sensor node"
+ put:
+ tags:
+ - Nodes
+ operationId: "nodes.update"
+ summary: "Update a sensor node"
+ parameters:
+ - $ref: "#/components/parameters/nodename"
+ responses:
+ "200":
+ description: Successfully updated sensor node"
+ requestBody:
+ content:
+ application/json:
+ schema:
+ x-body-name: "node"
+ $ref: "#/components/schemas/Node"
+ delete:
+ tags:
+ - Nodes
+ operationId: "nodes.delete"
+ summary: "Delete a sensor node"
+ parameters:
+ - $ref: "#/components/parameters/nodename"
+ responses:
+ "204":
+ description: "Successfully deleted sensor node"
+ /availablenodes:
+ get:
+ operationId: "nodes.check_for_available_nodes"
+ tags:
+ - Nodes
+ summary: "Read the number of free sensor nodes of each sensor type"
+ responses:
+ "200":
+ description: "Successfully read the list of available sensor nodes"
+ /nodelocations:
+ get:
+ operationId: "nodes.get_sensor_nodes_locations"
+ tags:
+ - Nodes
+ summary: "Get the list of available locations where sensors are present"
+ responses:
+ "200":
+ description: "Successfully read the list of sensor node locations"
+ /parameters:
+ get:
+ operationId: "parameters.read_all"
+ tags:
+ - Parameters
+ summary: "Read the list of parameters"
+ responses:
+ "200":
+ description: "Successfully read parameters list"
+ post:
+ operationId: "parameters.create"
+ tags:
+ - Parameters
+ summary: "Create a parameter list associated with a sensor node"
+ requestBody:
+ description: "Parameter to create"
+ required: True
+ content:
+ application/json:
+ schema:
+ x-body-name: "parameter"
+ type: "object"
+ properties:
+ node_id:
+ type: "integer"
+ content:
+ type: "string"
+ responses:
+ "201":
+ description: "Successfully created a parameter"
+ /parameters/{parameter_id}:
+ get:
+ operationId: "parameters.read_one"
+ tags:
+ - Parameters
+ summary: "Read one parameter list"
+ parameters:
+ - $ref: "#/components/parameters/parameter_id"
+ responses:
+ "200":
+ description: "Successfully read one parameter list"
+ put:
+ tags:
+ - Parameters
+ operationId: "parameters.update"
+ summary: "Update a parameter"
+ parameters:
+ - $ref: "#/components/parameters/parameter_id"
+ responses:
+ "200":
+ "description": "Successfully updated parameter list"
+ requestBody:
+ content:
+ application/json:
+ schema:
+ x-body-name: "parameter"
+ type: "object"
+ properties:
+ content:
+ type: "string"
+ delete:
+ tags:
+ - Parameters
+ operationId: "parameters.delete"
+ summary: "Delete a parameter"
+ parameters:
+ - $ref: "#/components/parameters/parameter_id"
+ responses:
+ "204":
+ description: "Successfully deleted parameter"
\ No newline at end of file
diff --git a/Sensors/templates/home.html b/Sensors/templates/home.html
new file mode 100644
index 0000000..23093cf
--- /dev/null
+++ b/Sensors/templates/home.html
@@ -0,0 +1,36 @@
+
+
+
+
+
+
+ Flask REST API for HACKATOONS
+
+
+ Welcome
+
+
+
+
+