Skip to content

Commit

Permalink
feat: kin demo homepage
Browse files Browse the repository at this point in the history
refactor: data generation to include only projects and organisations
fix: improve deployment files and release
chore: data generation reduced to only needed data
feat: home page without counts and news
test: fix home page tests
chore: fix meta description for project and organisation pages
  • Loading branch information
dmijatovic committed Jul 5, 2024
1 parent 5dbf3f8 commit 058ab1b
Show file tree
Hide file tree
Showing 25 changed files with 1,631 additions and 1,314 deletions.
21 changes: 2 additions & 19 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -133,9 +133,9 @@ jobs:
- name: zip deployment files
run: |
zip --junk-paths deployment.zip \
./deployment/nginx.conf \
./deployment/docker-compose.yml \
./nginx/nginx.conf \
.env.example \
./deployment/env.example \
./deployment/README.md
- name: Upload deployment.zip
Expand Down Expand Up @@ -184,20 +184,3 @@ jobs:
prerelease: ${{inputs.pre_release}}
files: deployment.zip

# citation:
# # it needs to be checked on string value
# if: needs.release_tag.outputs.skipped == 'false'
# needs: [ release_tag, deployment_files, release_draft ]
# name: citations
# uses: ./.github/workflows/_cff.yml
# with:
# artifact: citation
# branch: main
# commit_message: "chore(release): update citation file"
# secrets:
# # need to pass PAT using secrets prop to reusable workflow (module)
# # the secrets are not passed automatically to child modules
# # see https://docs.github.com/en/enterprise-cloud@latest/actions/using-workflows/reusing-workflows#passing-inputs-and-secrets-to-a-reusable-workflow
# token: ${{ secrets.PAT_RELEASE }}


13 changes: 7 additions & 6 deletions data-generation/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,11 @@

FROM node:21.4.0-bullseye-slim
WORKDIR /usr/app
COPY ./package.json /usr/app
RUN npm install
COPY ./img /usr/app/img
COPY ./images.js /usr/app
COPY ./real-data.js /usr/app
COPY ./main.js /usr/app
# copy
COPY package.json package-lock.json ./
# install
RUN npm install --frozen-lockfile --silent
# copy all files
COPY . .

CMD npx wait-on --timeout 10000 $POSTGREST_URL && node main.js
82 changes: 82 additions & 0 deletions data-generation/accounts.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import {faker} from '@faker-js/faker';

import {postToBackend} from './utils.js'

export async function generateAccounts(orcids){
const accounts = await postAccountsToBackend(100);
const ids = accounts.map(a => a.id)
const logins = await postToBackend('/login_for_account', generateLoginForAccount(ids, orcids))
// console.log('accounts, login_for_accounts done');
return ids
}

export async function postAccountsToBackend(amount = 100) {
const accounts = [];
for (let i = 0; i < amount; i++) {
accounts.push({
public_orcid_profile: !!faker.helpers.maybe(() => true, {
probability: 0.8,
}),
agree_terms: !!faker.helpers.maybe(() => true, {probability: 0.8}),
notice_privacy_statement: !!faker.helpers.maybe(() => true, {
probability: 0.8,
}),
});
}

return postToBackend('/account', accounts);
}

// Generate one login_for_account per given account
export function generateLoginForAccount(accountIds, orcids) {
const homeOrganisations = [null];
for (let i = 0; i < 10; i++) {
homeOrganisations.push('Organisation for ' + faker.word.noun());
}
const providers = ['ipd1', 'idp2', 'idp3', 'ip4'];

let orcidsAdded = 0;
const login_for_accounts = [];
accountIds.forEach(accountId => {
let firstName = faker.person.firstName();
let givenName = faker.person.lastName();

if (orcidsAdded < orcids.length) {
const orcid = orcids[orcidsAdded];
orcidsAdded += 1;
login_for_accounts.push({
account: accountId,
name: firstName + ' ' + givenName,
email: faker.internet.email({
firstName: firstName,
lastName: givenName,
}),
sub: orcid,
provider: 'orcid',
home_organisation: faker.helpers.arrayElement(homeOrganisations),
last_login_date:
faker.helpers.maybe(() => faker.date.past({years: 3}), {
probability: 0.8,
}) ?? null,
});
} else {
login_for_accounts.push({
account: accountId,
name: firstName + ' ' + givenName,
email: faker.internet.email({
firstName: firstName,
lastName: givenName,
}),
sub: faker.string.alphanumeric(30),
provider: faker.helpers.arrayElement(providers),
home_organisation: faker.helpers.arrayElement(homeOrganisations),
last_login_date:
faker.helpers.maybe(() => faker.date.past({years: 3}), {
probability: 0.8,
}) ?? null,
});
}
});
return login_for_accounts;
}

14 changes: 14 additions & 0 deletions data-generation/auth.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import jwt from 'jsonwebtoken';

function createJWT() {
const secret = process.env.PGRST_JWT_SECRET || 'reallyreallyreallyreallyverysafe';
return jwt.sign({role: 'rsd_admin'}, secret, {expiresIn: '2m'});
}

export const token = createJWT();

export const headers = {
'Content-Type': 'application/json',
Authorization: 'bearer ' + token,
Prefer: 'return=representation,resolution=ignore-duplicates',
};
103 changes: 103 additions & 0 deletions data-generation/community.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
import {faker} from '@faker-js/faker';

import {
generateRelationsForDifferingEntities,
generateUniqueCaseInsensitiveString,
generateKeywordsForEntity,
postToBackend,
getKeywordIds
} from './utils.js'
import {organisationLogos,getLocalImageIds} from './images.js';

export async function generateCommunities({idsSoftware,amount = 500}){
const localOrganisationLogoIds = await getLocalImageIds(organisationLogos);
const idsKeywords = await getKeywordIds()
// add communities
const communities = await postToBackend('/community', createCommunities(localOrganisationLogoIds,amount))
const idsCommunities = communities.map(c=>c.id)
// add other data
const comData = await Promise.all([
postToBackend('/keyword_for_community', generateKeywordsForEntity(idsCommunities, idsKeywords, 'community')),
postToBackend('/software_for_community', generateSoftwareForCommunity(idsSoftware, idsCommunities)),
generateCategories(idsCommunities)
])

return idsCommunities
}

export function createCommunities(localOrganisationLogoIds,amount = 500) {
const result = [];

for (let index = 0; index < amount; index++) {
const maxWords = faker.helpers.maybe(() => 5, {probability: 0.8}) ?? 31;
const name = generateUniqueCaseInsensitiveString(() =>
('Community ' + faker.word.words(faker.number.int({max: maxWords, min: 1}))).substring(0, 200),
);

result.push({
slug: faker.helpers.slugify(name).toLowerCase().replaceAll(/-{2,}/g, '-').replaceAll(/-+$/g, ''), // removes double dashes and trailing dashes
name: name,
short_description: faker.helpers.maybe(() => faker.lorem.paragraphs(1, '\n\n'), {probability: 0.8}) ?? null,
description: faker.helpers.maybe(() => faker.lorem.paragraphs(1, '\n\n'), {probability: 0.8}) ?? null,
logo_id:
faker.helpers.maybe(() => localOrganisationLogoIds[index % localOrganisationLogoIds.length], {probability: 0.8}) ??
null,
});
}

return result;
}

export async function generateCategories(idsCommunities, maxDepth = 3) {
const communityPromises = [];
for (const commId of idsCommunities) {
communityPromises.push(generateAndSaveCategoriesForCommunity(commId, maxDepth));
}
communityPromises.push(generateAndSaveCategoriesForCommunity(null, maxDepth));

return await Promise.all(communityPromises);
}

export async function generateAndSaveCategoriesForCommunity(idCommunity, maxDepth) {
return new Promise(async res => {
let parentIds = [null];
for (let level = 1; level <= maxDepth; level++) {
const newParentIds = [];
for (const parent of parentIds) {
let toGenerateCount = faker.number.int(4);
if (idCommunity === null && level === 1) {
toGenerateCount += 1;
}
for (let i = 0; i < toGenerateCount; i++) {
const name = `Parent ${parent}, level ${level}, item ${i + 1}`;
const shortName = `Level ${level}, item ${i + 1}`;
const body = {
community: idCommunity,
parent: parent,
short_name: shortName,
name: name,
};
const categories = await postToBackend('/category', body)
newParentIds.push(categories[0].id)
}
}
parentIds = newParentIds;
}
res();
});
}

export function generateSoftwareForCommunity(idsSoftware, idsCommunities) {
const result = generateRelationsForDifferingEntities(idsCommunities, idsSoftware, 'community', 'software');

const statuses = [
{weight: 1, value: 'pending'},
{weight: 8, value: 'approved'},
{weight: 1, value: 'rejected'},
];
result.forEach(entry => {
entry['status'] = faker.helpers.weightedArrayElement(statuses);
});

return result;
}
67 changes: 67 additions & 0 deletions data-generation/images.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@
//
// SPDX-License-Identifier: Apache-2.0

import fs from 'fs/promises';
import {getFromBackend, mimeTypeFromFileName,postToBackend} from "./utils.js"

export const images = [
'img/213x640-pexels-1262302.png',
'img/426x640-pexels-357573.jpg',
Expand Down Expand Up @@ -52,3 +55,67 @@ export const softwareLogos = [
'img/software/Tux.svg',
'img/software/Xenon_logo.svg',
];


// returns the IDs of the images after they have been posted to the database
export async function getLocalImageIds(fileNames) {
const imageAsBase64Promises = [];

for (let index = 0; index < fileNames.length; index++) {
const fileName = fileNames[index];
imageAsBase64Promises[index] = fs.readFile(fileName, {encoding: 'base64'}).then(base64 => {
return {
data: base64,
mime_type: mimeTypeFromFileName(fileName),
};
});
}

const imagesAsBase64 = await Promise.all(imageAsBase64Promises);
// create images
let images = await postToBackend('/image?select=id', imagesAsBase64);
// same images posted - no return
if (images.length === 0){
// get images from backend
images = await getFromBackend('/image?select=id')
}
const ids = images.map(a => a.id);
return ids;
}


// returns the IDs of the images after they have been posted to the database
export async function downloadAndGetImages(urlGenerator, amount) {
const imageAsBase64Promises = [];
const timeOuts = [];
for (let index = 0; index < amount; index++) {
const url = urlGenerator();
imageAsBase64Promises.push(
Promise.race([
fetch(url)
.then(resp => {
clearTimeout(timeOuts[index]);
return resp.arrayBuffer();
})
.then(ab => Buffer.from(ab))
.then(bf => bf.toString('base64')),
new Promise((res, rej) => (timeOuts[index] = setTimeout(res, 3000))).then(() => {
console.warn('Timeout for ' + url + ', skipping');
return null;
}),
]),
);
}
const imagesAsBase64 = await Promise.all(imageAsBase64Promises);

const imagesWithoutNulls = imagesAsBase64
.filter(img => img !== null)
.map(base64 => {
return {data: base64, mime_type: 'image/jpeg'};
});

const resp = await postToBackend('/image?select=id', imagesWithoutNulls);
const idsAsObjects = await resp.json();
const ids = idsAsObjects.map(idAsObject => idAsObject.id);
return ids;
}
Loading

0 comments on commit 058ab1b

Please sign in to comment.