Update gpt/index.html
This commit is contained in:
+259
-318
@@ -379,345 +379,286 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
/* ============================================
|
||||
AI CONFIGURATION
|
||||
============================================ */
|
||||
let aiConfig = {
|
||||
enabled: false,
|
||||
endpoint: '',
|
||||
model: '',
|
||||
apiKey: '',
|
||||
type: 'ollama' // ollama, openai, lmstudio, gpt4all
|
||||
};
|
||||
<script>
|
||||
/* ============================================
|
||||
AI CONFIGURATION
|
||||
============================================ */
|
||||
let aiConfig = {
|
||||
enabled: false,
|
||||
endpoint: '',
|
||||
model: '',
|
||||
apiKey: '',
|
||||
type: 'openai'
|
||||
};
|
||||
|
||||
// Load saved config
|
||||
const savedConfig = localStorage.getItem('neuralTerminalConfig');
|
||||
if (savedConfig) {
|
||||
aiConfig = JSON.parse(savedConfig);
|
||||
}
|
||||
// Load saved config
|
||||
const savedConfig = localStorage.getItem('neuralTerminalConfig');
|
||||
if (savedConfig) {
|
||||
aiConfig = JSON.parse(savedConfig);
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
TERMINAL STATE
|
||||
============================================ */
|
||||
const output = document.getElementById('output');
|
||||
const inputField = document.getElementById('inputField');
|
||||
const statusDot = document.getElementById('statusDot');
|
||||
const statusText = document.getElementById('statusText');
|
||||
const timeDisplay = document.getElementById('timeDisplay');
|
||||
const configPanel = document.getElementById('configPanel');
|
||||
/* ============================================
|
||||
TERMINAL STATE
|
||||
============================================ */
|
||||
const output = document.getElementById('output');
|
||||
const inputField = document.getElementById('inputField');
|
||||
const statusDot = document.getElementById('statusDot');
|
||||
const statusText = document.getElementById('statusText');
|
||||
const timeDisplay = document.getElementById('timeDisplay');
|
||||
const configPanel = document.getElementById('configPanel');
|
||||
|
||||
let commandHistory = [];
|
||||
let historyIndex = -1;
|
||||
let isProcessing = false;
|
||||
let commandHistory = [];
|
||||
let historyIndex = -1;
|
||||
let isProcessing = false;
|
||||
|
||||
/* ============================================
|
||||
UI FUNCTIONS
|
||||
============================================ */
|
||||
function updateStatus() {
|
||||
if (aiConfig.enabled) {
|
||||
statusDot.className = 'status-dot online';
|
||||
statusText.textContent = 'ONLINE';
|
||||
} else {
|
||||
statusDot.className = 'status-dot offline';
|
||||
statusText.textContent = 'OFFLINE';
|
||||
}
|
||||
}
|
||||
/* ============================================
|
||||
UI
|
||||
============================================ */
|
||||
function updateStatus() {
|
||||
if (aiConfig.enabled) {
|
||||
statusDot.className = 'status-dot online';
|
||||
statusText.textContent = 'ONLINE';
|
||||
} else {
|
||||
statusDot.className = 'status-dot offline';
|
||||
statusText.textContent = 'OFFLINE';
|
||||
}
|
||||
}
|
||||
|
||||
function updateTime() {
|
||||
const now = new Date();
|
||||
timeDisplay.textContent = now.toLocaleTimeString('en-US', { hour12: false });
|
||||
}
|
||||
function updateTime() {
|
||||
const now = new Date();
|
||||
timeDisplay.textContent = now.toLocaleTimeString('en-US', { hour12: false });
|
||||
}
|
||||
|
||||
function addLine(text, type = '') {
|
||||
const line = document.createElement('div');
|
||||
line.className = 'output-line' + (type ? ' ' + type : '');
|
||||
line.textContent = text;
|
||||
output.appendChild(line);
|
||||
output.scrollTop = output.scrollHeight;
|
||||
}
|
||||
function addLine(text, type = '') {
|
||||
const line = document.createElement('div');
|
||||
line.className = 'output-line' + (type ? ' ' + type : '');
|
||||
line.textContent = text;
|
||||
output.appendChild(line);
|
||||
output.scrollTop = output.scrollHeight;
|
||||
}
|
||||
|
||||
function clearOutput() {
|
||||
output.innerHTML = '';
|
||||
addLine('Screen cleared', 'system');
|
||||
}
|
||||
function clearOutput() {
|
||||
output.innerHTML = '';
|
||||
addLine('Screen cleared', 'system');
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
CONFIG PANEL
|
||||
============================================ */
|
||||
function openConfig() {
|
||||
document.getElementById('endpointInput').value = aiConfig.endpoint;
|
||||
document.getElementById('modelInput').value = aiConfig.model;
|
||||
document.getElementById('apiKeyInput').value = aiConfig.apiKey;
|
||||
configPanel.classList.add('visible');
|
||||
}
|
||||
/* ============================================
|
||||
CONFIG PANEL
|
||||
============================================ */
|
||||
function openConfig() {
|
||||
document.getElementById('endpointInput').value = aiConfig.endpoint;
|
||||
document.getElementById('modelInput').value = aiConfig.model;
|
||||
document.getElementById('apiKeyInput').value = aiConfig.apiKey;
|
||||
configPanel.classList.add('visible');
|
||||
}
|
||||
|
||||
function closeConfig() {
|
||||
configPanel.classList.remove('visible');
|
||||
inputField.focus();
|
||||
}
|
||||
function closeConfig() {
|
||||
configPanel.classList.remove('visible');
|
||||
inputField.focus();
|
||||
}
|
||||
|
||||
function saveConfig() {
|
||||
aiConfig.endpoint = document.getElementById('endpointInput').value.trim();
|
||||
aiConfig.model = document.getElementById('modelInput').value.trim();
|
||||
aiConfig.apiKey = document.getElementById('apiKeyInput').value.trim();
|
||||
aiConfig.enabled = aiConfig.endpoint !== '' && aiConfig.model !== '';
|
||||
function saveConfig() {
|
||||
aiConfig.endpoint = document.getElementById('endpointInput').value.trim();
|
||||
aiConfig.model = document.getElementById('modelInput').value.trim();
|
||||
aiConfig.apiKey = document.getElementById('apiKeyInput').value.trim();
|
||||
aiConfig.enabled = aiConfig.endpoint !== '' && aiConfig.model !== '';
|
||||
|
||||
localStorage.setItem('neuralTerminalConfig', JSON.stringify(aiConfig));
|
||||
updateStatus();
|
||||
closeConfig();
|
||||
addLine('Configuration saved', 'system');
|
||||
}
|
||||
localStorage.setItem('neuralTerminalConfig', JSON.stringify(aiConfig));
|
||||
updateStatus();
|
||||
closeConfig();
|
||||
addLine('Configuration saved', 'system');
|
||||
}
|
||||
|
||||
async function testConnection() {
|
||||
const endpoint = document.getElementById('endpointInput').value.trim();
|
||||
const model = document.getElementById('modelInput').value.trim();
|
||||
/* ============================================
|
||||
TEST CONNECTION (FIXED)
|
||||
============================================ */
|
||||
async function testConnection() {
|
||||
const endpoint = document.getElementById('endpointInput').value.trim();
|
||||
|
||||
if (!endpoint || !model) {
|
||||
alert('Please enter endpoint and model name');
|
||||
return;
|
||||
}
|
||||
if (!endpoint) {
|
||||
alert('Please enter endpoint');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Try a simple request based on detected type
|
||||
const testEndpoint = endpoint.replace('/chat', '/tags').replace('/completions', '/models');
|
||||
const response = await fetch(testEndpoint, {
|
||||
method: 'GET',
|
||||
headers: aiConfig.apiKey ? { 'Authorization': 'Bearer ' + aiConfig.apiKey } : {}
|
||||
});
|
||||
try {
|
||||
// ONLY OpenAI-style test
|
||||
const testUrl = endpoint.replace('/chat/completions', '/models');
|
||||
|
||||
if (response.ok) {
|
||||
alert('Connection successful!');
|
||||
} else {
|
||||
alert('Connection failed: ' + response.status);
|
||||
}
|
||||
} catch (e) {
|
||||
alert('Connection error: ' + e.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Preset handling
|
||||
document.getElementById('presetSelect').addEventListener('change', function() {
|
||||
const presets = {
|
||||
ollama: { endpoint: 'http://localhost:11434/api/chat', model: 'llama2', type: 'ollama' },
|
||||
lmstudio: { endpoint: 'http://localhost:1234/v1/chat/completions', model: 'local-model', type: 'openai' },
|
||||
gpt4all: { endpoint: 'http://localhost:4891/v1/chat/completions', model: 'gpt4all-model', type: 'openai' },
|
||||
openai: { endpoint: 'https://api.openai.com/v1/chat/completions', model: 'gpt-4o-mini', type: 'openai' },
|
||||
claude: { endpoint: 'https://api.anthropic.com/v1/messages', model: 'claude-sonnet-4-20250514', type: 'claude' }
|
||||
};
|
||||
|
||||
const preset = presets[this.value];
|
||||
if (preset) {
|
||||
document.getElementById('endpointInput').value = preset.endpoint;
|
||||
document.getElementById('modelInput').value = preset.model;
|
||||
aiConfig.type = preset.type;
|
||||
}
|
||||
const response = await fetch(testUrl, {
|
||||
method: 'GET',
|
||||
headers: aiConfig.apiKey
|
||||
? { 'Authorization': 'Bearer ' + aiConfig.apiKey }
|
||||
: {}
|
||||
});
|
||||
|
||||
/* ============================================
|
||||
HELPER FUNCTIONS
|
||||
============================================ */
|
||||
function getFormattedDate() {
|
||||
const now = new Date();
|
||||
const dd = String(now.getDate()).padStart(2, '0');
|
||||
const mm = String(now.getMonth() + 1).padStart(2, '0');
|
||||
const yyyy = now.getFullYear();
|
||||
const HH = String(now.getHours()).padStart(2, '0');
|
||||
const MM = String(now.getMinutes()).padStart(2, '0');
|
||||
const SS = String(now.getSeconds()).padStart(2, '0');
|
||||
return `${dd}/${mm}/${yyyy}:${HH}/${MM}/${SS}`;
|
||||
if (response.ok) {
|
||||
alert('Connection successful!');
|
||||
} else {
|
||||
alert('Connection failed: ' + response.status);
|
||||
}
|
||||
} catch (e) {
|
||||
alert('Connection error: ' + e.message);
|
||||
}
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
PRESETS (CLEAN)
|
||||
============================================ */
|
||||
document.getElementById('presetSelect').addEventListener('change', function () {
|
||||
const presets = {
|
||||
local: {
|
||||
endpoint: 'http://localhost:4891/v1/chat/completions',
|
||||
model: 'local-model',
|
||||
type: 'openai'
|
||||
},
|
||||
openai: {
|
||||
endpoint: 'https://api.openai.com/v1/chat/completions',
|
||||
model: 'gpt-4o-mini',
|
||||
type: 'openai'
|
||||
},
|
||||
claude: {
|
||||
endpoint: 'https://api.anthropic.com/v1/messages',
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
type: 'claude'
|
||||
}
|
||||
};
|
||||
|
||||
const preset = presets[this.value];
|
||||
if (preset) {
|
||||
document.getElementById('endpointInput').value = preset.endpoint;
|
||||
document.getElementById('modelInput').value = preset.model;
|
||||
aiConfig.type = preset.type;
|
||||
}
|
||||
});
|
||||
|
||||
/* ============================================
|
||||
HELPERS
|
||||
============================================ */
|
||||
function getFormattedDate() {
|
||||
const now = new Date();
|
||||
return `${String(now.getDate()).padStart(2, '0')}/${
|
||||
String(now.getMonth() + 1).padStart(2, '0')
|
||||
}/${now.getFullYear()}:${String(now.getHours()).padStart(2, '0')}/${
|
||||
String(now.getMinutes()).padStart(2, '0')
|
||||
}/${String(now.getSeconds()).padStart(2, '0')}`;
|
||||
}
|
||||
|
||||
function getStatusInfo() {
|
||||
return `[Connection: ${aiConfig.enabled ? 'ONLINE' : 'OFFLINE'}, Endpoint: ${aiConfig.endpoint || 'none'}, Model: ${aiConfig.model || 'none'}]`;
|
||||
}
|
||||
|
||||
function preprocessMessage(message) {
|
||||
return message
|
||||
.replace(/\bdate\b/gi, `date (${getFormattedDate()})`)
|
||||
.replace(/\bstatus\b/gi, `status ${getStatusInfo()}`);
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
AI CALL (OPENAI ONLY)
|
||||
============================================ */
|
||||
async function queryAI(message) {
|
||||
if (!aiConfig.enabled) {
|
||||
addLine('AI is offline. Use config to connect.', 'error');
|
||||
return;
|
||||
}
|
||||
|
||||
isProcessing = true;
|
||||
|
||||
const processedMessage = preprocessMessage(message);
|
||||
|
||||
try {
|
||||
const headers = { 'Content-Type': 'application/json' };
|
||||
|
||||
if (aiConfig.apiKey) {
|
||||
headers['Authorization'] = 'Bearer ' + aiConfig.apiKey;
|
||||
}
|
||||
|
||||
function getStatusInfo() {
|
||||
return `[Connection: ${aiConfig.enabled ? 'ONLINE' : 'OFFLINE'}, Endpoint: ${aiConfig.endpoint || 'none'}, Model: ${aiConfig.model || 'none'}]`;
|
||||
}
|
||||
|
||||
function preprocessMessage(message) {
|
||||
// Replace "date" keyword with actual date (case insensitive, whole word)
|
||||
let processed = message.replace(/\bdate\b/gi, `date (${getFormattedDate()})`);
|
||||
// Replace "status" keyword with actual status info (case insensitive, whole word)
|
||||
processed = processed.replace(/\bstatus\b/gi, `status ${getStatusInfo()}`);
|
||||
return processed;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
AI COMMUNICATION
|
||||
============================================ */
|
||||
async function queryAI(message) {
|
||||
if (!aiConfig.enabled) {
|
||||
addLine('', '');
|
||||
addLine('The AI you are trying to reach is not online at the moment.', 'error');
|
||||
addLine('', '');
|
||||
addLine('To connect an AI backend:', 'system');
|
||||
addLine(' 1. Type "config" to open settings', 'system');
|
||||
addLine(' 2. Choose a preset or enter custom endpoint', 'system');
|
||||
addLine(' 3. Supported: Ollama, LM Studio, GPT4All, OpenAI, Claude', 'system');
|
||||
return;
|
||||
}
|
||||
|
||||
isProcessing = true;
|
||||
|
||||
// Preprocess message to replace date/status keywords
|
||||
const processedMessage = preprocessMessage(message);
|
||||
|
||||
try {
|
||||
let response;
|
||||
|
||||
if (aiConfig.type === 'ollama') {
|
||||
response = await fetch(aiConfig.endpoint, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
model: aiConfig.model,
|
||||
messages: [{ role: 'user', content: processedMessage }],
|
||||
stream: false
|
||||
})
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
if (data.message && data.message.content) {
|
||||
addLine(data.message.content, 'ai');
|
||||
} else if (data.error) {
|
||||
addLine('Error: ' + data.error, 'error');
|
||||
}
|
||||
|
||||
} else if (aiConfig.type === 'claude') {
|
||||
// Claude/Anthropic API
|
||||
response = await fetch(aiConfig.endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': aiConfig.apiKey,
|
||||
'anthropic-version': '2023-06-01',
|
||||
'anthropic-dangerous-direct-browser-access': 'true'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: aiConfig.model,
|
||||
max_tokens: 1024,
|
||||
messages: [{ role: 'user', content: processedMessage }]
|
||||
})
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
if (data.content && data.content[0]) {
|
||||
addLine(data.content[0].text, 'ai');
|
||||
} else if (data.error) {
|
||||
addLine('Error: ' + (data.error.message || data.error), 'error');
|
||||
}
|
||||
|
||||
} else {
|
||||
// OpenAI-compatible API (LM Studio, GPT4All, OpenAI, etc.)
|
||||
const headers = { 'Content-Type': 'application/json' };
|
||||
if (aiConfig.apiKey) {
|
||||
headers['Authorization'] = 'Bearer ' + aiConfig.apiKey;
|
||||
}
|
||||
|
||||
response = await fetch(aiConfig.endpoint, {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
body: JSON.stringify({
|
||||
model: aiConfig.model,
|
||||
messages: [{ role: 'user', content: processedMessage }],
|
||||
stream: false
|
||||
})
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
if (data.choices && data.choices[0]) {
|
||||
addLine(data.choices[0].message.content, 'ai');
|
||||
} else if (data.error) {
|
||||
addLine('Error: ' + (data.error.message || data.error), 'error');
|
||||
}
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
addLine('Connection error: ' + e.message, 'error');
|
||||
addLine('Make sure your AI backend is running', 'system');
|
||||
}
|
||||
|
||||
isProcessing = false;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
COMMAND PROCESSING
|
||||
============================================ */
|
||||
function processCommand(input) {
|
||||
const trimmed = input.trim();
|
||||
if (!trimmed) return;
|
||||
|
||||
addLine(trimmed, 'user');
|
||||
commandHistory.unshift(trimmed);
|
||||
historyIndex = -1;
|
||||
|
||||
const cmd = trimmed.toLowerCase();
|
||||
|
||||
switch (cmd) {
|
||||
case 'help':
|
||||
addLine('', '');
|
||||
addLine('Available commands:', 'system');
|
||||
addLine(' help - Show this help', 'system');
|
||||
addLine(' config - Configure AI backend', 'system');
|
||||
addLine(' clear - Clear screen', 'system');
|
||||
addLine('', '');
|
||||
addLine('Keywords (replaced in messages to AI):', 'system');
|
||||
addLine(' date - Replaced with current date/time', 'system');
|
||||
addLine(' status - Replaced with connection info', 'system');
|
||||
addLine('', '');
|
||||
addLine('Any other input is sent to the AI', 'system');
|
||||
break;
|
||||
|
||||
case 'config':
|
||||
openConfig();
|
||||
break;
|
||||
|
||||
case 'clear':
|
||||
clearOutput();
|
||||
break;
|
||||
|
||||
default:
|
||||
queryAI(trimmed);
|
||||
}
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
INPUT HANDLING
|
||||
============================================ */
|
||||
inputField.addEventListener('keydown', function(e) {
|
||||
if (e.key === 'Enter' && !isProcessing) {
|
||||
processCommand(this.value);
|
||||
this.value = '';
|
||||
} else if (e.key === 'ArrowUp') {
|
||||
e.preventDefault();
|
||||
if (historyIndex < commandHistory.length - 1) {
|
||||
historyIndex++;
|
||||
this.value = commandHistory[historyIndex];
|
||||
}
|
||||
} else if (e.key === 'ArrowDown') {
|
||||
e.preventDefault();
|
||||
if (historyIndex > 0) {
|
||||
historyIndex--;
|
||||
this.value = commandHistory[historyIndex];
|
||||
} else {
|
||||
historyIndex = -1;
|
||||
this.value = '';
|
||||
}
|
||||
}
|
||||
const response = await fetch(aiConfig.endpoint, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
model: aiConfig.model,
|
||||
messages: [{ role: 'user', content: processedMessage }],
|
||||
stream: false
|
||||
})
|
||||
});
|
||||
|
||||
/* ============================================
|
||||
INITIALIZATION
|
||||
============================================ */
|
||||
updateStatus();
|
||||
updateTime();
|
||||
setInterval(updateTime, 1000);
|
||||
const data = await response.json();
|
||||
|
||||
if (data.choices && data.choices[0]) {
|
||||
addLine(data.choices[0].message.content, 'ai');
|
||||
} else if (data.error) {
|
||||
addLine('Error: ' + (data.error.message || data.error), 'error');
|
||||
} else {
|
||||
addLine('Invalid response from server', 'error');
|
||||
}
|
||||
} catch (e) {
|
||||
addLine('Connection error: ' + e.message, 'error');
|
||||
}
|
||||
|
||||
isProcessing = false;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
COMMANDS
|
||||
============================================ */
|
||||
function processCommand(input) {
|
||||
const trimmed = input.trim();
|
||||
if (!trimmed) return;
|
||||
|
||||
addLine(trimmed, 'user');
|
||||
commandHistory.unshift(trimmed);
|
||||
historyIndex = -1;
|
||||
|
||||
switch (trimmed.toLowerCase()) {
|
||||
case 'help':
|
||||
addLine('help | config | clear | date | status', 'system');
|
||||
break;
|
||||
|
||||
case 'config':
|
||||
openConfig();
|
||||
break;
|
||||
|
||||
case 'clear':
|
||||
clearOutput();
|
||||
break;
|
||||
|
||||
case 'date':
|
||||
addLine(getFormattedDate(), 'system');
|
||||
break;
|
||||
|
||||
case 'status':
|
||||
addLine(getStatusInfo(), 'system');
|
||||
break;
|
||||
|
||||
default:
|
||||
queryAI(trimmed);
|
||||
}
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
INPUT
|
||||
============================================ */
|
||||
inputField.addEventListener('keydown', (e) => {
|
||||
if (e.key === 'Enter' && !isProcessing) {
|
||||
processCommand(inputField.value);
|
||||
inputField.value = '';
|
||||
}
|
||||
});
|
||||
|
||||
/* ============================================
|
||||
INIT
|
||||
============================================ */
|
||||
updateStatus();
|
||||
updateTime();
|
||||
setInterval(updateTime, 1000);
|
||||
inputField.focus();
|
||||
|
||||
document.addEventListener('click', (e) => {
|
||||
if (!configPanel.contains(e.target)) {
|
||||
inputField.focus();
|
||||
|
||||
// Click anywhere to focus input
|
||||
document.addEventListener('click', function(e) {
|
||||
if (!configPanel.contains(e.target)) {
|
||||
inputField.focus();
|
||||
}
|
||||
});
|
||||
</script>
|
||||
}
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
Reference in New Issue
Block a user