Skip to content

Commit

Permalink
fixed HOST and PORT config
Browse files Browse the repository at this point in the history
  • Loading branch information
Zaki-1052 committed Feb 9, 2024
1 parent 73aced5 commit 41ce9ce
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 14 deletions.
48 changes: 39 additions & 9 deletions public/script.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,25 @@
// script.js

// configures host and port

// Initialize a variable to hold the base URL
let baseURL = 'http://localhost:3000'; // default value

// Function to fetch configuration from the server
async function fetchConfig() {
try {
const response = await fetch('/config');
const config = await response.json();
baseURL = `http://${config.host}:${config.port}`;
console.log(`Base URL set to: ${baseURL}`);
} catch (error) {
console.error("Error fetching configuration:", error);
}
}

fetchConfig();


// detects safari browser

function isSafariBrowser() {
Expand Down Expand Up @@ -56,7 +76,16 @@ let currentModelID = 'gpt-4'; // Global declaration
let selectedImage = null;

// Convert markdown to HTML using marked.js and sanitize it with DOMPurify
marked.setOptions({ breaks: true }); // Enable new lines to be interpreted as <br>
marked.setOptions({
// Enable new lines to be interpreted as <br>
breaks: true,

// Syntax highlighting for code blocks
highlight: function(code, lang) {
const language = hljs.getLanguage(lang) ? lang : 'plaintext';
return hljs.highlight(code, { language }).value;
}
});


// Function to select a model and update the displayed text
Expand Down Expand Up @@ -108,7 +137,7 @@ async function handleImageGenerationRequest(message) {
const prompt = message.substring("Generate:".length).trim();

try {
const response = await fetch('http://localhost:3000/generate-image', {
const response = await fetch(`${baseURL}/generate-image`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ prompt: prompt })
Expand Down Expand Up @@ -154,13 +183,13 @@ function displayGeneratedImage(imageUrl) {
function sendShutdownMessage() {
// Sending "Bye!" to both /message and Gemini endpoints
const messagePayload = JSON.stringify({ message: "Bye!" });
const messageRequest = fetch('http://localhost:3000/message', {
const messageRequest = fetch(`${baseURL}/message`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: messagePayload
});

const geminiRequest = fetch('http://localhost:3000/gemini', {
const geminiRequest = fetch(`${baseURL}/gemini`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: messagePayload
Expand All @@ -178,6 +207,7 @@ function sendShutdownMessage() {
const selectedModelDisplayName = document.getElementById('selected-model').textContent.trim();

document.addEventListener('DOMContentLoaded', () => {

// Define model descriptions
const modelDescriptions = {
"gpt-4": "GPT-4: Most Intelligent — Default",
Expand Down Expand Up @@ -235,10 +265,10 @@ document.querySelector('.custom-select').addEventListener('click', toggleDropdow
function determineEndpoint(modelID) {
if (modelID.startsWith('gemini')) {
isGemini = true;
return 'http://localhost:3000/gemini'; // URL for the Gemini endpoint
return `${baseURL}/gemini`; // URL for the Gemini endpoint
} else {
isGemini = false;
return 'http://localhost:3000/message'; // URL for the OpenAI endpoint
return `${baseURL}/message`; // URL for the OpenAI endpoint
}
}

Expand Down Expand Up @@ -574,7 +604,7 @@ async function uploadImageAndGetUrl(imageFile) {
formData.append('image', imageFile);

try {
const response = await fetch('http://localhost:3000/upload-image', {
const response = await fetch(`${baseURL}/upload-image`, {
method: 'POST',
body: formData
});
Expand Down Expand Up @@ -609,7 +639,7 @@ async function uploadImageAndGetUrl(imageFile) {
model: currentModelID,
imageParts: imageFilename ? [{ filename: imageFilename, mimeType: 'image/jpeg' }] : []
};
endpoint = 'http://localhost:3000/gemini'; // Gemini endpoint
endpoint = `${baseURL}/gemini`; // Gemini endpoint
} else {
// Prepare the payload for OpenAI API
payload = {
Expand All @@ -618,7 +648,7 @@ async function uploadImageAndGetUrl(imageFile) {
instructions: instructions,
image: imageUrl // Existing image handling for OpenAI
};
endpoint = 'http://localhost:3000/message'; // OpenAI endpoint
endpoint = `${baseURL}/message`; // OpenAI endpoint
}

try {
Expand Down
18 changes: 13 additions & 5 deletions server.js
Original file line number Diff line number Diff line change
Expand Up @@ -671,7 +671,6 @@ if (modelID.startsWith('gpt')) {
}



conversationHistory.push(user_input);


Expand Down Expand Up @@ -876,14 +875,23 @@ app.get('/export-chat-html', (req, res) => {
app.get('/portal', (req, res) => {
res.sendFile('portal.html', { root: 'public' });
});

// Expose a configuration endpoint
app.get('/config', (req, res) => {
res.json({
host: process.env.HOST,
port: process.env.PORT
});
});



// Start the server
// Assuming `app` is an instance of your server (like an Express app)
const PORT = process.env.PORT;
// Ensure that the server can be accessed via any host
app.set('trust proxy', true);

// Listen only on the loopback interface (localhost)
const HOST = process.env.HOST;
const PORT = process.env.PORT || 3000;
const HOST = process.env.HOST || '0.0.0.0';

const server = app.listen(PORT, HOST, () => {
console.log(`Server running at http://${HOST}:${PORT}`);
Expand Down

0 comments on commit 41ce9ce

Please sign in to comment.