SnappyLLM/plugins/llm-prompts/js/llm-prompts.js

148 lines
5.6 KiB
JavaScript

// LLM Prompts Plugin for SnappyMail
// Provides UI and API interaction for AI-driven email writing prompts
(function () {
'use strict';
// Initialize plugin when DOM is ready
document.addEventListener('DOMContentLoaded', function () {
if (typeof rl !== 'undefined') {
rl.addSettingsViewModel(
'LLMPromptsSettings',
'LLMPromptsSettings',
'LLM Prompts',
'Settings for AI-driven email writing prompts powered by Grok 3',
true
);
// Add UI for Smart Prompts in compose window
rl.addHook('view-model', function (vm) {
if (vm.viewModelName === 'ComposeViewModel') {
initComposeUI(vm);
}
});
}
});
function initComposeUI(composeVM) {
// Add Smart Prompts button or sidebar to compose window
const toolbar = document.querySelector('.compose-toolbar');
if (toolbar) {
const smartPromptsButton = document.createElement('button');
smartPromptsButton.className = 'button smart-prompts';
smartPromptsButton.innerHTML = 'Smart Prompts';
smartPromptsButton.onclick = function () {
toggleSmartPromptsSidebar(composeVM);
};
toolbar.appendChild(smartPromptsButton);
}
}
function toggleSmartPromptsSidebar(composeVM) {
// Toggle visibility of prompts sidebar
let sidebar = document.querySelector('.smart-prompts-sidebar');
if (!sidebar) {
sidebar = document.createElement('div');
sidebar.className = 'smart-prompts-sidebar';
sidebar.innerHTML = getPromptsHTML();
document.querySelector('.compose-box').appendChild(sidebar);
// Add event listeners for prompt selection
setupPrompts(sidebar, composeVM);
} else {
sidebar.style.display = sidebar.style.display === 'none' ? 'block' : 'none';
}
}
function getPromptsHTML() {
// HTML for prompt categories and suggestions
return `
<h3>Smart Prompts (Grok 3)</h3>
<div class="prompt-category">
<h4>Professional</h4>
<ul>
<li data-prompt="Write a concise follow-up email to a client">Follow-up Email</li>
<li data-prompt="Draft a professional proposal email">Proposal Email</li>
</ul>
</div>
<div class="prompt-category">
<h4>Creative</h4>
<ul>
<li data-prompt="Compose a friendly invitation for a team event">Team Event Invite</li>
</ul>
</div>
<div class="prompt-category">
<h4>Assistance</h4>
<ul>
<li data-prompt="Rephrase this email to sound more polite">Make Polite</li>
</ul>
</div>
<div class="prompt-category">
<h4>Quick Replies</h4>
<ul>
<li data-prompt="Generate a thank-you response for a received email">Thank You Reply</li>
</ul>
</div>
<div class="context-input">
<textarea placeholder="Add context or modify prompt..." rows="3"></textarea>
<button onclick="generateDraft(this)">Generate Draft</button>
</div>
`;
}
function setupPrompts(sidebar, composeVM) {
const prompts = sidebar.querySelectorAll('.prompt-category li');
prompts.forEach(prompt => {
prompt.onclick = function () {
const promptText = this.getAttribute('data-prompt');
sidebar.querySelector('.context-input textarea').value = promptText;
// Highlight selected prompt
prompts.forEach(p => p.classList.remove('selected'));
this.classList.add('selected');
};
});
}
function generateDraft(button) {
const sidebar = button.closest('.smart-prompts-sidebar');
const prompt = sidebar.querySelector('.context-input textarea').value;
const subject = document.querySelector('.compose-subject input').value;
const body = document.querySelector('.compose-body .editor').innerText;
// Make API call to Grok 3 or configured LLM
fetchLLMDraft(prompt, subject, body).then(draft => {
if (draft) {
// Insert draft into compose body
const editor = document.querySelector('.compose-body .editor');
editor.innerHTML = draft.replace(/\n/g, '<br>');
}
}).catch(error => {
console.error('Error generating draft:', error);
alert('Failed to generate draft. Please check settings.');
});
}
async function fetchLLMDraft(prompt, subject, body) {
// This would make an API call to the backend which handles Grok 3 API interaction
// For now, simulate an API call
const response = await fetch('/?/Api/LLMPrompts/GenerateDraft', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
prompt: prompt,
subject: subject,
body: body,
enableContextAnalysis: rl.settingsGet('LLMPromptsSettings.enable_context_analysis')
})
});
if (!response.ok) {
throw new Error('Network response was not ok');
}
const data = await response.json();
return data.result;
}
})();