From 9c5f284318834042f6f95bbc757281d2f8edb143 Mon Sep 17 00:00:00 2001 From: sasi Date: Mon, 12 Aug 2024 19:30:17 +0530 Subject: [PATCH] supabase content removed --- app.py | 56 ------------------------------- db.py | 100 ------------------------------------------------------- query.py | 1 - 3 files changed, 157 deletions(-) diff --git a/app.py b/app.py index e606322..027e1ea 100644 --- a/app.py +++ b/app.py @@ -1,5 +1,4 @@ from flask import Flask, jsonify,request,url_for -from db import SupabaseInterface from collections import defaultdict from flasgger import Swagger import re,os,traceback @@ -54,61 +53,6 @@ def greeting(): - -@app.route('/get-data', methods=['GET']) -@cross_origin(supports_credentials=True) -@require_secret_key -def get_data(): - """ - Fetch data from Supabase. - --- - responses: - 200: - description: Data fetched successfully - schema: - type: array - items: - type: object - 500: - description: Error fetching data - schema: - type: object - properties: - error: - type: string - """ - try: - response = SupabaseInterface().get_instance().client.table('dmp_pr_updates').select('*').execute() - data = response.data - return jsonify(data) - except Exception as e: - return jsonify({'error': str(e)}), 200 - - - -@app.route('/v1/issues', methods=['GET']) -@require_secret_key -def v1get_issues(): - try: - response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').execute() - data = response.data - - #group data based on issues - grouped_data = defaultdict(list) - for record in data: - issue_url = record['issue_url'] - grouped_data[issue_url].append({ - 'id': record['id'], - 'name': record['body_text'] - }) - - result = [{'issue_url': issue_url, 'issues': issues} for issue_url, issues in grouped_data.items()] - grouped_data = group_by_owner(result) - return jsonify(grouped_data) - - except Exception as e: - error_traceback = traceback.format_exc() - return jsonify({'error': str(e), 'traceback': error_traceback}), 200 @app.route('/issues', methods=['GET']) diff --git a/db.py b/db.py index 565f26c..e69de29 100644 --- a/db.py +++ b/db.py @@ -1,100 +0,0 @@ -import os, sys -from typing import Any -from supabase import create_client, Client -from supabase.lib.client_options import ClientOptions -from abc import ABC, abstractmethod -import psycopg2,json -from psycopg2.extras import RealDictCursor -from dotenv import load_dotenv - - -load_dotenv() - -client_options = ClientOptions(postgrest_client_timeout=None) - - - -class SupabaseInterface(): - - _instance = None - - def __init__(self): - if not SupabaseInterface._instance: - - # Load environment variables - - SUPABASE_URL = os.getenv('SUPABASE_URL') - SUPABASE_KEY = os.getenv('SUPABASE_KEY') - self.client: Client = create_client(SUPABASE_URL, SUPABASE_KEY) - SupabaseInterface._instance = self - else: - SupabaseInterface._instance = self._instance - - - - @staticmethod - def get_instance(): - # Static method to retrieve the singleton instance - if not SupabaseInterface._instance: - # If no instance exists, create a new one - SupabaseInterface._instance = SupabaseInterface() - return SupabaseInterface._instance - - - def get_postgres_connection(): - - # Database configuration - DB_HOST =os.getenv('POSTGRES_DB_HOST') - DB_NAME =os.getenv('POSTGRES_DB_NAME') - DB_USER =os.getenv('POSTGRES_DB_USER') - DB_PASS =os.getenv('POSTGRES_DB_PASS') - conn = psycopg2.connect( - host=DB_HOST, - database=DB_NAME, - user=DB_USER, - password=DB_PASS - ) - return conn - - - - def postgres_query(query,params=None): - conn = SupabaseInterface.get_postgres_connection() - - cursor = conn.cursor(cursor_factory=RealDictCursor) - - # cursor = conn.cursor() - if not params: - cursor.execute(query) - else: - cursor.execute(query,params) - - rows = cursor.fetchall() - results_as_dicts = [dict(row) for row in rows] - - cursor.close() - conn.close() - return results_as_dicts - - def readAll(self, table): - data = self.client.table(f"{table}").select("*").execute() - return data.data - - def add_data(self, data,table_name): - data = self.client.table(table_name).insert(data).execute() - return data.data - - def add_data_filter(self, data, table_name): - # Construct the filter based on the provided column names and values - filter_data = {column: data[column] for column in ['dmp_id','issue_number','owner']} - - # Check if the data already exists in the table based on the filter - existing_data = self.client.table(table_name).select("*").eq('dmp_id',data['dmp_id']).execute() - - # If the data already exists, return without creating a new record - if existing_data.data: - return "Data already exists" - - # If the data doesn't exist, insert it into the table - new_data = self.client.table(table_name).insert(data).execute() - return new_data.data \ No newline at end of file diff --git a/query.py b/query.py index dde7fed..bcce7b1 100644 --- a/query.py +++ b/query.py @@ -1,4 +1,3 @@ -from db import SupabaseInterface from models import * from sqlalchemy import func import os