Skip to content

Commit

Permalink
fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
Zaki-1052 committed Jun 30, 2024
1 parent 31fe6d4 commit 3d84e8d
Show file tree
Hide file tree
Showing 3 changed files with 48 additions and 9 deletions.
2 changes: 2 additions & 0 deletions public/script.js
Original file line number Diff line number Diff line change
Expand Up @@ -1287,8 +1287,10 @@ document.getElementById('open-router-model-cohere-command-r-plus').addEventListe
// Fetch the list of chats from the backend and display them in the sidebar
async function fetchChatList() {
try {
console.log("fetching chat list");
const response = await fetch('/listChats');
const data = await response.json();
console.log("response", data);
const chatList = document.getElementById('chatList');
chatList.innerHTML = data.files.map(file => {
// Remove the .txt extension
Expand Down
2 changes: 1 addition & 1 deletion public/uploads/prompts/calculus.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ Follow these steps:
2. Application: Think through the problem, apply the chosen methods, and work step by step, ensuring accuracy in your calculations and steps.
3. Solution Presentation: Print the final answer and steps taken in Markdown, and provide summaries or explanations as needed. Fully focus on comprehensive clarity.
4. Error Checking: Review and reiterate on the process, identify and correct inconsistencies, verify the solution using advanced Python, and present the correctly explained final answer.
Take a thoughtful approach, aiming for precision and clarity in solving and explaining calculus problems. Your primary goal is to aid the user in both solving and comprehending each problem in depth by showcasing and illuminating the underlying process and concepts.
Take a thoughtful approach, aiming for precision and clarity in solving and explaining calculus problems. Your primary goal is to aid the user in both solving and comprehending each problem in depth by showcasing and illuminating the underlying process and concepts.
As CalculusGPT, you will generate thorough, sophisticated, thoughtful, nuanced answers with careful and precise reasoning, solving all problems in a thoughtful and systematic, step-by-step process, and formatting in markdown, applying your actions in a Chain of Thoughts, but backtracking in a Tree Of Decisions as needed.
Before beginning, take a deep breath and think carefully. You will need to use high effort in order to be accurate and help me get the correct answers. Showing all of your work and steps is essential!

Expand Down
53 changes: 45 additions & 8 deletions server.js
Original file line number Diff line number Diff line change
Expand Up @@ -560,7 +560,7 @@ app.post('/setChat', async (req, res) => {
// Endpoint to list chat files
app.get('/listChats', (req, res) => {
const folderPath = path.join(__dirname, 'public/uploads/chats');
fs.readdirSync(folderPath, (err, files) => {
fs.readdir(folderPath, (err, files) => {
if (err) {
console.error('Error reading chat files:', err);
res.status(500).json({ message: 'Failed to list chat files', error: err.message });
Expand Down Expand Up @@ -772,11 +772,15 @@ async function titleChat(history, tokens, cost) {
// Define the full file path
const filePath = path.join(folderPath, `${title}.txt`);
let chatText;
chatText = `${history}\n---\nTotal Tokens: ${tokens.totalTokens}\nTotal Cost: $${cost.toFixed(6)}\n\n-----\n\nCONTEXT: Above, you may be shown a conversation between the User -- a Human -- and an AI Assistant (yourself). If not, a summary of said conversation is below for you to reference. INSTRUCTION: The User will send a message/prompt with the expectation that you will pick up where you left off and seamlessly continue the conversation. Do not give any indication that the conversation had paused or resumed; simply answer the User's next query in the context of the above Chat, inferring the Context and asking for additional information if necessary.\n---\nConversation Summary: ${summary}`;
/*
if (summariesOnly) {
console.log("summaries only ")
chatText = `${history}\n\nTotal Tokens: ${tokens.totalTokens}\nTotal Cost: $${cost.toFixed(6)}\n\n-----\n\nCONTEXT: Below is a summary of the conversation between the User -- a Human -- and an AI Assistant (yourself). INSTRUCTION: The User will send a message/prompt with the expectation that you will pick up where you left off and seamlessly continue the conversation. Do not give any indication that the conversation had paused or resumed; simply answer the User's next query in the context of the above Chat, inferring the Context and asking for additional information if necessary.\n---\nConversation Summary: ${summary}`;
} else {
chatText = `${history}\n\nTotal Tokens: ${tokens.totalTokens}\nTotal Cost: $${cost.toFixed(6)}\n\n-----\n\nCONTEXT: Above is the conversation between the User -- a Human -- and an AI Assistant (yourself). A summary of said conversation is below for you to reference. INSTRUCTION: The User will send a message/prompt with the expectation that you will pick up where you left off and seamlessly continue the conversation. Do not give any indication that the conversation had paused or resumed; simply answer the User's next query in the context of the above Chat, inferring the Context and asking for additional information if necessary.\n---\nConversation Summary: ${summary}`;
chatText = `${history}\n---\nTotal Tokens: ${tokens.totalTokens}\nTotal Cost: $${cost.toFixed(6)}\n\n-----\n\nCONTEXT: Above, you may be shown a conversation between the User -- a Human -- and an AI Assistant (yourself). If not, summary of said conversation is below for you to reference. INSTRUCTION: The User will send a message/prompt with the expectation that you will pick up where you left off and seamlessly continue the conversation. Do not give any indication that the conversation had paused or resumed; simply answer the User's next query in the context of the above Chat, inferring the Context and asking for additional information if necessary.\n---\nConversation Summary: ${summary}`;
}
*/
fs.writeFileSync(filePath, chatText);

// test...
Expand Down Expand Up @@ -1105,6 +1109,7 @@ async function initializeClaudeInstructions() {
systemMessage = await continueConversation(chosenChat);
}
}
return systemMessage;
}

// Call this function when the server starts
Expand Down Expand Up @@ -1461,11 +1466,14 @@ async function nameChat(chatHistory, tokens) {
// Define the full file path
const filePath = path.join(folderPath, `${title}.txt`);
let chatText;
chatText = `${chatHistory}\n\nTotal Tokens: ${tokens.totalTokens}\nTotal Cost: $0.00!\n\n-----\n\nCONTEXT: Above, you may be shown a conversation between the User -- a Human -- and an AI Assistant (yourself). If not, a summary of said conversation is below for you to reference. INSTRUCTION: The User will send a message/prompt with the expectation that you will pick up where you left off and seamlessly continue the conversation. Do not give any indication that the conversation had paused or resumed; simply answer the User's next query in the context of the above Chat, inferring the Context and asking for additional information if necessary.\n---\nConversation Summary: ${summary}`;
/*
if (summariesOnly) {
chatText = `${chatHistory}\n\nTotal Tokens: ${tokens.totalTokens}\nTotal Cost: $0.00!\n\n-----\n\nCONTEXT: Below is a summary of the conversation between the User -- a Human -- and an AI Assistant (yourself). INSTRUCTION: The User will send a message/prompt with the expectation that you will pick up where you left off and seamlessly continue the conversation. Do not give any indication that the conversation had paused or resumed; simply answer the User's next query in the context of the above Chat, inferring the Context and asking for additional information if necessary.\n---\nConversation Summary: ${summary}`;
} else {
chatText = `${chatHistory}\n\nTotal Tokens: ${tokens.totalTokens}\nTotal Cost: $0.00!\n\n-----\n\nCONTEXT: Below is a summary of the conversation between the User -- a Human -- and an AI Assistant (yourself). A summary of said conversation is below for you to reference. INSTRUCTION: The User will send a message/prompt with the expectation that you will pick up where you left off and seamlessly continue the conversation. Do not give any indication that the conversation had paused or resumed; simply answer the User's next query in the context of the above Chat, inferring the Context and asking for additional information if necessary.\n---\nConversation Summary: ${summary}`;
chatText = `${chatHistory}\n\nTotal Tokens: ${tokens.totalTokens}\nTotal Cost: $0.00!\n\n-----\n\nCONTEXT: Above, you may be shown a conversation between the User -- a Human -- and an AI Assistant (yourself). If not, a summary of said conversation is below for you to reference. INSTRUCTION: The User will send a message/prompt with the expectation that you will pick up where you left off and seamlessly continue the conversation. Do not give any indication that the conversation had paused or resumed; simply answer the User's next query in the context of the above Chat, inferring the Context and asking for additional information if necessary.\n---\nConversation Summary: ${summary}`;
}
*/
fs.writeFileSync(filePath, chatText);

// test...
Expand All @@ -1492,6 +1500,27 @@ async function imageURLToBase64(url) {
}
}

// Function to convert an image URL to base64
async function imageURLToBase64(url) {
try {
const response = await axios.get(url, {
responseType: 'arraybuffer' // Ensure the image data is received in the correct format
});

// Extract the MIME type from the response headers
const contentType = response.headers['content-type'];

// Convert the image data to base64
const base64Image = Buffer.from(response.data).toString('base64');

// Return the base64-encoded image with the appropriate MIME type
return `data:${contentType};base64,${base64Image}`;
} catch (error) {
console.error('Error fetching image:', error);
return null; // Return null if there is an error
}
}

let imageName;
let uploadedImagePath;

Expand Down Expand Up @@ -1722,6 +1751,7 @@ let headers;
let apiUrl = '';
let data;
let claudeHistory = [];
let epochs = 0;

app.post('/message', async (req, res) => {
console.log("req.file:", req.file); // Check if the file is received
Expand Down Expand Up @@ -1779,12 +1809,17 @@ if (user_message === "Bye!") {
// Assuming modelID is declared globally and available here
// Determine the structure of user_input.content based on modelID
if (modelID.startsWith('gpt') || modelID.startsWith('claude')) {

if (modelID.startsWith('gpt')) {
systemMessage = await initializeConversationHistory();
} else if (modelID.startsWith('claude')) {
systemMessage = await initializeClaudeInstructions();
if (epochs === 0) {
if (modelID.startsWith('gpt')) {
systemMessage = await initializeConversationHistory();
epochs = epochs + 1;
} else if (modelID.startsWith('claude')) {
systemMessage = await initializeClaudeInstructions();
epochs = epochs + 1;
}
}


// Add text content if present
if (user_message) {
if (modelID.startsWith('gpt')) {
Expand Down Expand Up @@ -1819,8 +1854,10 @@ if (modelID.startsWith('gpt') || modelID.startsWith('claude')) {
let base64Image;
// If req.file is defined, it means the image is uploaded as a file
if (req.file) {
console.log("first if", req.file.path)
base64Image = imageToBase64(req.file.path);
} else {
console.log("second if", req.body.image)
// If req.file is not present, fetch the image from the URL
base64Image = await imageURLToBase64(req.body.image);
}
Expand Down

0 comments on commit 3d84e8d

Please sign in to comment.