Skip to content

Commit

Permalink
Merge pull request #4 from fga-eps-mds/fix/generate-metrics
Browse files Browse the repository at this point in the history
fix: coleta de metricas
  • Loading branch information
clara-ribeiro authored Jan 19, 2025
2 parents 82ca953 + 41387a6 commit 7e95c83
Show file tree
Hide file tree
Showing 2 changed files with 222 additions and 75 deletions.
59 changes: 35 additions & 24 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
@@ -1,46 +1,57 @@
name: backend-financeiro-release
name: Export de métricas

on:
on:
push:
branches: [master]
branches:
- master
- devel
tags:
- "v*"
pull_request:
branches:
- master
- devel
types: [closed]
types: [ closed ]

jobs:
generate-release:
runs-on: ubuntu-latest
release:
if: github.event.pull_request.merged == true && contains(github.event.pull_request.labels.*.name, 'NOT RELEASE') == false
runs-on: "ubuntu-latest"
environment: actions

steps:
- name: Checkout code
uses: actions/checkout@v3

- name: "Get Previous tag"
id: previoustag
uses: "WyriHaximus/github-action-get-previous-tag@v1"
- uses: actions/checkout@v3
with:
fallback: 1.0.0
fetch-depth: 0

- name: Install dotenv
run: pip install python-dotenv packaging pandas

- name: Cria arquivo .env
run: |
touch ./metrics/.env
echo GITHUB_TOKEN=${{ secrets.PERSONAL_TOKEN }} >> ./metrics/.env
echo RELEASE_MAJOR=${{ contains(github.event.pull_request.labels.*.name, 'MAJOR RELEASE') }} >> ./metrics/.env
echo RELEASE_MINOR=${{ contains(github.event.pull_request.labels.*.name, 'MINOR RELEASE') }} >> ./metrics/.env
echo RELEASE_FIX=${{ contains(github.event.pull_request.labels.*.name, 'FIX RELEASE') }} >> ./metrics/.env
echo DEVELOP=${{ contains(github.event.pull_request.labels.*.name, 'DEVELOP') }} >> ./metrics/.env
echo REPO=${{ github.event.repository.name }} >> ./metrics/.env
echo REPO_DOC=${{ secrets.GIT_DOC_REPO }} >> ./metrics/.env
- name: Use Node.js 16.x
uses: actions/setup-node@v3
with:
node-version: 16.x
- name: Criar diretório
run: mkdir -p analytics-raw-data

- name: Cria métricas do SonarCloud
- name: Coletar métricas no SonarCloud
run: python metrics/sonar-metrics.py ${{ github.event.repository.name }} ${{ github.ref_name }}

- name: Commita arquivos de métricas do SonarCloud
- name: Envia métricas para repo de Doc
run: |
git config --global user.email "${{ secrets.GIT_EMAIL }}"
git config --global user.name "${{ secrets.GIT_USER }}"
git clone --single-branch --branch main "https://x-access-token:${{ secrets.PERSONAL_TOKEN }}@github.com/fga-eps-mds/2024.2-SENTINELA-DOC" doc
git config --global user.email "${{secrets.GIT_EMAIL}}"
git config --global user.name "${{secrets.GIT_USER}}"
git clone --single-branch --branch main "https://x-access-token:${{secrets.PERSONAL_TOKEN}}@github.com/fga-eps-mds/2024.2-SENTINELA-DOC" doc
mkdir -p doc/analytics-raw-data
cp -R fga-eps-mds*.json doc/analytics-raw-data
cp -R analytics-raw-data/*.json doc/analytics-raw-data
cd doc
git add .
git commit -m "Métricas SonarCloud - ${{ github.event.repository.name }} ${{ github.ref_name }}"
git push
echo "Arquivos de métricas gerado com sucesso."
238 changes: 187 additions & 51 deletions metrics/sonar-metrics.py
Original file line number Diff line number Diff line change
@@ -1,58 +1,194 @@
import json
import requests
import sys
import urllib.request
from datetime import datetime
import requests
# import datetime
import pandas as pd
import os
from packaging import version
from dotenv import load_dotenv

def generate_metrics():
# url base do sonar, inicio da rota que devolverá as métricas
base_url = "https://sonarcloud.io/api/measures/component_tree?component="
# prefixo da disciplina, identificador da organização no sonarcloud
prefix = "fga-eps-mds"
# todas as métricas que serão requisitadas para o sonarcloud
metrics = [
"files",
"functions",
"complexity",
"comment_lines_density",
"duplicated_lines_density",
"coverage",
"ncloc",
"tests",
"test_errors",
"test_failures",
"test_execution_time",
"security_rating"
]

# nome do repositório, vem como argumento no release.yml
repository_name = sys.argv[1]
# nome da branch onde foi chamada o script, vem de argumento no release.yml
ref_name = sys.argv[2]

# url montada
# base url = api do sonar
# prefix = id da org da disciplina
# repository_name = nome do repositorio (unido com prefix separado por _ é o identificador do projeto no sonar)
# o join do metrics une as métricas a serem solicitadas como parâmetros
# branch = específica o nome da branch para pegar as métricas daquela branch em específicp

# Verifica se a referência é uma branch ou uma tag
url = f'{base_url}{prefix}_{repository_name}&metricKeys={",".join(metrics)}&branch={ref_name}' if 'refs/heads/' in sys.argv[2] else f'{base_url}{prefix}_{repository_name}&metricKeys={",".join(metrics)}&tag={ref_name}'


with urllib.request.urlopen(url) as res:
data = json.load(res)
date = datetime.now()
date_padrao_hilmer = f"{date.month}-{date.day}-{date.year}-{date.hour}-{date.minute}-{date.second}"

underlined_repo_name = repository_name[:16] + \
repository_name[16:].replace('-', "_")

filename = f"{prefix}-{underlined_repo_name}-{date_padrao_hilmer}-{ref_name}.json"
print(filename)
with open(filename, "w") as file:
json.dump(data, file)
######################################
# DECLARAÇÃO DE CONSTANTES/VARIÁVEIS #
######################################
TODAY = datetime.now()

load_dotenv()
# Variáveis globais ao repositório
OWNER = "fga-eps-mds"
REPO = os.getenv('REPO')
REPO_ISSUES = os.getenv('REPO_DOC')

# Configurar as variáveis de ambiente
GITHUB_TOKEN = os.getenv('GITHUB_TOKEN')
RELEASE_MAJOR = os.getenv('RELEASE_MAJOR')
RELEASE_MINOR = os.getenv('RELEASE_MINOR')
RELEASE_FIX = os.getenv('RELEASE_FIX')
DEVELOP = os.getenv('DEVELOP')

METRICS_SONAR = [
"files",
"functions",
"complexity",
"comment_lines_density",
"duplicated_lines_density",
"coverage",
"ncloc",
"tests",
"test_errors",
"test_failures",
"test_execution_time",
"security_rating",
]

BASE_URL_SONAR = "https://sonarcloud.io/api/measures/component_tree?component=fga-eps-mds_"

# Utilize a api que for necessária
# api_url_workflows = f"https://api.github.com/repos/{owner}/{repo}/actions/workflows"
# api_url_jobs = f"https://api.github.com/repos/{owner}/{repo}/actions/runs/3624383254/jobs"
# api_url_deployments = f"https://api.github.com/repos/{owner}/{repo}/deployments"
api_url_runs = f"https://api.github.com/repos/{OWNER}/{REPO}/actions/runs"
api_url_issues = f"https://api.github.com/repos/{OWNER}/{REPO_ISSUES}/issues"
print(api_url_issues)

###################
# FUNÇÕES RELEASE #
###################
# Pega a última release
def get_latest_release():
url = f'https://api.github.com/repos/{OWNER}/{REPO}/releases'
headers = {
'Authorization': f'token {GITHUB_TOKEN}'
}
response = requests.get(url, headers=headers)
releases = response.json()

if releases:
return releases[0].get('tag_name', '0.0.0')
return '0.0.0'

# Cria um novo nome de tag
def new_tag_name():
old_tag = get_latest_release()
try:
old_version = version.parse(old_tag)
except version.InvalidVersion:
old_version = version.parse('0.0.0')

if RELEASE_MAJOR == 'true':
return f'{old_version.major + 1}.0.0'
elif RELEASE_MINOR == 'true':
return f'{old_version.major}.{old_version.minor + 1}.0'
elif RELEASE_FIX == 'true':
return f'{old_version.major}.{old_version.minor}.{old_version.micro + 1}'
else:
return f'{old_version.major}.{old_version.minor}.{old_version.micro + 1}'

# Cria a nova release
def create_release():
tag = new_tag_name()
url = f'https://api.github.com/repos/{OWNER}/{REPO}/releases'
headers = {
'Authorization': f'token {GITHUB_TOKEN}',
'Accept': 'application/vnd.github.v3+json'
}
payload = {
'tag_name': tag,
'name': tag
}
response = requests.post(url, headers=headers, json=payload)
res_data = response.json()
return res_data.get('upload_url'), tag

#################
# FUNÇÕES SONAR #
#################

def save_sonar_metrics(tag):
response = requests.get(f'{BASE_URL_SONAR}{REPO}&metricKeys={",".join(METRICS_SONAR)}&ps=500')

j = json.loads(response.text)

print("Extração do Sonar concluída.")

file_path = f'./analytics-raw-data/fga-eps-mds-{REPO}-{TODAY.strftime("%m-%d-%Y-%H-%M-%S")}-{tag}.json'

with open(file_path, 'w') as fp:
fp.write(json.dumps(j))
fp.close()

return

##################
# FUNÇÕES GITHUB #
##################

def all_request_pages(data):
total_runs = data["total_count"]
pages = (total_runs // 100) + (1 if total_runs % 100 > 0 else 0)
for i in range(pages+1):
if i == 0 or i == 1:
continue
api_url_now = api_url_runs + "?page=" + str(i)
response = requests.get(api_url_now)
for j in ((response.json()['workflow_runs'])):
data['workflow_runs'].append(j)
return data

def filter_request_per_date(data, date):
data_filtered = []
for i in data["workflow_runs"]:
if datetime.strptime(i["created_at"][:10],"%Y-%m-%d").strftime("%Y-%m-%d") == date:
data_filtered.append(i)
return {"workflow_runs": data_filtered}

def save_github_metrics_runs():
response = requests.get(api_url_runs, params={'per_page': 100,})

data = response.json()

# date = datetime.strptime("2023-03-23","%Y-%m-%d").strftime("%Y-%m-%d")
data = all_request_pages(data)

print("Quantidade de workflow_runs: " + str(len(data["workflow_runs"])))

file_path = f'./analytics-raw-data/GitHub_API-Runs-fga-eps-mds-{REPO}-{TODAY.strftime("%m-%d-%Y-%H-%M-%S")}.json'

# Salva os dados em um json file
with open(file_path, 'w') as fp:
fp.write(json.dumps(data))
fp.close()

return

def save_github_metrics_issues():
issues = []
page = 1

while True:
response = requests.get(api_url_issues, params={'state': 'all', 'per_page': 100, 'page': page})

page_issues = response.json()
if not page_issues:
break

issues.extend(page_issues)
print(f"Página {page}: {len(page_issues)} issues carregadas.")

page += 1

print("Quantidade total de issues: " + str(len(issues)))

file_path = f'./analytics-raw-data/GitHub_API-Issues-fga-eps-mds-{REPO_ISSUES}.json'

# Salvar todas as issues em um arquivo JSON
with open(file_path, 'w') as fp:
json.dump(issues, fp, indent=4)

if __name__ == "__main__":
generate_metrics()
_, tag = create_release()

save_sonar_metrics(tag)
save_github_metrics_runs()
save_github_metrics_issues()

0 comments on commit 7e95c83

Please sign in to comment.