Files
generated-chat-gpt/main.go
2025-02-04 00:20:30 -07:00

526 lines
18 KiB
Go

package main
import (
"bytes"
"encoding/json"
"html/template"
"io"
"log"
"net/http"
"os"
"time"
)
// Message represents one message in the conversation.
type Message struct {
Role string `json:"role"`
Content string `json:"content"`
}
// ChatCompletionRequest is the payload sent to the OpenAI API.
type ChatCompletionRequest struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
}
// ChatCompletionResponseChoice is a single reply (choice) from the API.
type ChatCompletionResponseChoice struct {
Message Message `json:"message"`
}
// ChatCompletionResponse represents the API response.
type ChatCompletionResponse struct {
Choices []ChatCompletionResponseChoice `json:"choices"`
Error *struct {
Message string `json:"message"`
Type string `json:"type"`
} `json:"error,omitempty"`
}
var (
openaiAPIKey string
indexTmpl *template.Template
)
func init() {
openaiAPIKey = os.Getenv("OPENAI_API_KEY")
if openaiAPIKey == "" {
log.Fatal("Environment variable OPENAI_API_KEY is not set")
}
// Parse the HTML template with responsive layout updates.
indexTmpl = template.Must(template.New("index").Parse(indexHTML))
}
func main() {
http.HandleFunc("/", indexHandler)
http.HandleFunc("/chat", chatHandler)
http.HandleFunc("/title", titleHandler)
addr := ":8080"
log.Println("Server starting on", addr)
log.Fatal(http.ListenAndServe(addr, nil))
}
func indexHandler(w http.ResponseWriter, r *http.Request) {
if err := indexTmpl.Execute(w, nil); err != nil {
http.Error(w, "Unable to load page", http.StatusInternalServerError)
}
}
func chatHandler(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
http.Error(w, "Only POST allowed", http.StatusMethodNotAllowed)
return
}
var clientRequest struct {
Messages []Message `json:"messages"`
}
if err := json.NewDecoder(r.Body).Decode(&clientRequest); err != nil {
http.Error(w, "Invalid JSON: "+err.Error(), http.StatusBadRequest)
return
}
apiRequestData := ChatCompletionRequest{
Model: "o3-mini",
Messages: clientRequest.Messages,
}
requestBytes, err := json.Marshal(apiRequestData)
if err != nil {
http.Error(w, "Error marshalling request: "+err.Error(), http.StatusInternalServerError)
return
}
client := http.Client{Timeout: 60 * time.Second}
req, err := http.NewRequest("POST", "https://api.openai.com/v1/chat/completions", bytes.NewBuffer(requestBytes))
if err != nil {
http.Error(w, "Failed to create request: "+err.Error(), http.StatusInternalServerError)
return
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+openaiAPIKey)
apiResponse, err := client.Do(req)
if err != nil {
http.Error(w, "Error calling OpenAI API: "+err.Error(), http.StatusInternalServerError)
return
}
defer apiResponse.Body.Close()
bodyBytes, err := io.ReadAll(apiResponse.Body)
if err != nil {
http.Error(w, "Error reading API response: "+err.Error(), http.StatusInternalServerError)
return
}
var completionResponse ChatCompletionResponse
if err := json.Unmarshal(bodyBytes, &completionResponse); err != nil {
http.Error(w, "Error parsing API response: "+err.Error(), http.StatusInternalServerError)
return
}
if completionResponse.Error != nil {
http.Error(w, "OpenAI API error: "+completionResponse.Error.Message, http.StatusInternalServerError)
return
}
if len(completionResponse.Choices) == 0 {
http.Error(w, "No choices returned from OpenAI API", http.StatusInternalServerError)
return
}
responsePayload := map[string]string{
"reply": completionResponse.Choices[0].Message.Content,
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(responsePayload)
}
func titleHandler(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
http.Error(w, "Only POST allowed", http.StatusMethodNotAllowed)
return
}
var clientRequest struct {
Messages []Message `json:"messages"`
}
if err := json.NewDecoder(r.Body).Decode(&clientRequest); err != nil {
http.Error(w, "Invalid JSON: "+err.Error(), http.StatusBadRequest)
return
}
titleRequestMessages := []Message{
{
Role: "system",
Content: "You are an AI that generates conversation titles. Based on the following conversation, please provide a short, descriptive title of no more than five words with no extra commentary.",
},
}
titleRequestMessages = append(titleRequestMessages, clientRequest.Messages...)
apiRequestData := ChatCompletionRequest{
Model: "o3-mini",
Messages: titleRequestMessages,
}
requestBytes, err := json.Marshal(apiRequestData)
if err != nil {
http.Error(w, "Error marshalling title request: "+err.Error(), http.StatusInternalServerError)
return
}
client := http.Client{Timeout: 30 * time.Second}
req, err := http.NewRequest("POST", "https://api.openai.com/v1/chat/completions", bytes.NewBuffer(requestBytes))
if err != nil {
http.Error(w, "Failed to create title request: "+err.Error(), http.StatusInternalServerError)
return
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+openaiAPIKey)
apiResponse, err := client.Do(req)
if err != nil {
http.Error(w, "Error calling OpenAI API for title: "+err.Error(), http.StatusInternalServerError)
return
}
defer apiResponse.Body.Close()
bodyBytes, err := io.ReadAll(apiResponse.Body)
if err != nil {
http.Error(w, "Error reading title API response: "+err.Error(), http.StatusInternalServerError)
return
}
var completionResponse ChatCompletionResponse
if err := json.Unmarshal(bodyBytes, &completionResponse); err != nil {
http.Error(w, "Error parsing title API response: "+err.Error(), http.StatusInternalServerError)
return
}
if completionResponse.Error != nil {
http.Error(w, "OpenAI API title error: "+completionResponse.Error.Message, http.StatusInternalServerError)
return
}
if len(completionResponse.Choices) == 0 {
http.Error(w, "No title returned from OpenAI API", http.StatusInternalServerError)
return
}
responsePayload := map[string]string{
"title": completionResponse.Choices[0].Message.Content,
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(responsePayload)
}
const indexHTML = `
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>ChatGPT Clone - Conversations</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Bootstrap 5 CSS and offcanvas component support -->
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
<!-- Highlight.js CSS for code highlighting -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.7.0/styles/default.min.css">
<style>
/* Use full viewport height */
html, body {
height: 100%;
margin: 0;
}
/* Main container uses flex layout in row direction on md+ and column on small screens */
.container-main {
height: 100vh;
display: flex;
flex-direction: row;
}
/* Sidebar styles (hidden on small screens) */
.sidebar {
width: 25%;
max-width: 300px;
background-color: #fff;
padding: 0.5rem;
box-shadow: 0 0 10px rgba(0,0,0,0.1);
overflow-y: auto;
}
/* On small devices, hide the sidebar and use offcanvas instead */
@media (max-width: 767.98px) {
.sidebar {
display: none;
}
}
/* Chat container fills remaining space and uses column layout */
.chat-container {
flex: 1;
display: flex;
flex-direction: column;
background-color: #fff;
padding: 1rem;
box-shadow: 0 0 10px rgba(0,0,0,0.1);
}
.chat-header {
flex-shrink: 0;
}
.chat-log {
flex: 1;
min-height: 0;
overflow-y: auto;
background-color: #eee;
border: 1px solid #ddd;
border-radius: 5px;
padding: 0.5rem;
margin: 0.5rem 0;
}
.chat-input {
flex-shrink: 0;
}
.message {
padding: 0.75rem;
border-radius: 5px;
margin-bottom: 0.5rem;
white-space: pre-wrap;
}
.user { background-color: #d1e7dd; text-align: right; }
.assistant { background-color: #f8d7da; text-align: left; }
pre {
max-width: 100%;
box-sizing: border-box;
white-space: pre-wrap;
overflow-x: auto;
background-color: #2d2d2d;
color: #f8f8f2;
padding: 0.5rem;
border-radius: 5px;
}
.conversation-item {
padding: 0.5rem;
cursor: pointer;
border-bottom: 1px solid #ddd;
position: relative;
}
.conversation-item:hover,
.conversation-item.active { background-color: #f0f0f0; }
.delete-btn {
position: absolute;
right: 5px;
top: 5px;
background: transparent;
border: none;
color: #dc3545;
font-size: 1rem;
}
textarea#messageInput {
resize: none;
height: 80px;
overflow-y: auto;
}
</style>
</head>
<body>
<!-- Offcanvas for mobile conversation list -->
<div class="offcanvas offcanvas-start" tabindex="-1" id="offcanvasConversations" aria-labelledby="offcanvasConversationsLabel">
<div class="offcanvas-header">
<h5 class="offcanvas-title" id="offcanvasConversationsLabel">Conversations</h5>
<button type="button" class="btn-close" data-bs-dismiss="offcanvas" aria-label="Close"></button>
</div>
<div class="offcanvas-body" id="offcanvasConversationList" style="overflow-y:auto;"></div>
</div>
<div class="container-main">
<!-- Sidebar for md+ screens -->
<div class="sidebar d-none d-md-block" id="conversationListContainer">
<h5>Conversations</h5>
<button id="newConvoBtn" class="btn btn-sm btn-primary mb-2">New Conversation</button>
<div id="conversationList"></div>
</div>
<!-- Chat container -->
<div class="chat-container">
<!-- On small screens, a button to open conversation offcanvas -->
<div class="d-block d-md-none mb-2">
<button class="btn btn-sm btn-secondary" type="button" data-bs-toggle="offcanvas" data-bs-target="#offcanvasConversations">
Open Conversations
</button>
<button id="newConvoBtnMobile" class="btn btn-sm btn-primary ms-2">New Conversation</button>
</div>
<div class="chat-header">
<h4 id="chatTitle">Conversation</h4>
</div>
<div id="chatLog" class="chat-log"></div>
<div class="chat-input">
<form id="chatForm">
<div class="input-group">
<textarea id="messageInput" class="form-control" placeholder="Type your message here" required></textarea>
<button class="btn btn-primary" type="submit">Send</button>
</div>
</form>
</div>
</div>
</div>
<!-- Include Bootstrap 5 JS bundle (includes Popper and offcanvas support) -->
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
<!-- Marked & Highlight.js Libraries -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/marked/4.3.0/marked.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.7.0/highlight.min.js"></script>
<script>
marked.setOptions({
highlight: function(code, lang) {
if (lang && hljs.getLanguage(lang)) {
return hljs.highlight(code, { language: lang }).value;
}
return hljs.highlightAuto(code).value;
}
});
// --- Conversation Management ---
let conversations = [];
let currentConversation = null;
function generateId() {
return 'c-' + Date.now() + '-' + Math.floor(Math.random() * 1000);
}
function loadConversations() {
const loaded = localStorage.getItem('conversations');
conversations = loaded ? JSON.parse(loaded) : [];
renderConversationList();
}
function saveConversations() {
localStorage.setItem('conversations', JSON.stringify(conversations));
}
function createNewConversation() {
const newConvo = {
id: generateId(),
title: 'New Conversation',
messages: [
{ role: "system", content: "You are ChatGPT, a helpful assistant. When providing code, please always wrap the code in three backticks so that it renders correctly." }
]
};
conversations.push(newConvo);
currentConversation = newConvo;
saveConversations();
renderConversationList();
renderChatLog();
document.getElementById('chatTitle').textContent = "Conversation (" + newConvo.id + ")";
}
function deleteConversation(id) {
if (!confirm("Are you sure you want to delete this conversation?")) return;
conversations = conversations.filter(convo => convo.id !== id);
if (currentConversation && currentConversation.id === id) {
currentConversation = conversations.length > 0 ? conversations[0] : null;
if (!currentConversation) {
createNewConversation();
} else {
document.getElementById('chatTitle').textContent = "Conversation (" + currentConversation.id + ")";
}
}
saveConversations();
renderConversationList();
renderChatLog();
}
function renderConversationList() {
// Render for sidebar (md+)
const listElem = document.getElementById('conversationList');
listElem.innerHTML = "";
// Also render for offcanvas (mobile)
const offcanvasElem = document.getElementById('offcanvasConversationList');
offcanvasElem.innerHTML = "";
conversations.forEach(convo => {
const createConvoItem = (container) => {
const div = document.createElement("div");
div.className = "conversation-item" + (currentConversation && currentConversation.id === convo.id ? " active" : "");
div.textContent = convo.title || convo.id;
const deleteBtn = document.createElement("button");
deleteBtn.className = "delete-btn";
deleteBtn.innerHTML = "&times;";
deleteBtn.onclick = function(e) { e.stopPropagation(); deleteConversation(convo.id); };
div.appendChild(deleteBtn);
div.onclick = () => {
currentConversation = convo;
document.getElementById('chatTitle').textContent = "Conversation (" + convo.id + ")";
renderConversationList();
renderChatLog();
// Hide offcanvas after selection on mobile
var offcanvas = bootstrap.Offcanvas.getInstance(document.getElementById('offcanvasConversations'));
if(offcanvas) offcanvas.hide();
};
container.appendChild(div);
};
createConvoItem(listElem);
createConvoItem(offcanvasElem);
});
}
function renderChatLog() {
const chatLog = document.getElementById('chatLog');
chatLog.innerHTML = "";
if (!currentConversation) return;
currentConversation.messages.forEach(msg => {
appendMessage(msg.role, msg.content, false);
});
chatLog.scrollTop = chatLog.scrollHeight;
}
function appendMessage(role, content, update = true) {
const chatLog = document.getElementById('chatLog');
const messageElem = document.createElement("div");
messageElem.className = "message " + role;
messageElem.innerHTML = marked.parse(content);
chatLog.appendChild(messageElem);
chatLog.scrollTop = chatLog.scrollHeight;
if (update && currentConversation) {
currentConversation.messages.push({ role: role, content: content });
if (role === "assistant" && currentConversation.title === "New Conversation") {
autoTitleConversation();
}
saveConversations();
}
return messageElem;
}
async function autoTitleConversation() {
try {
const response = await fetch("/title", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ messages: currentConversation.messages })
});
if (!response.ok) throw new Error("Title API error: " + response.status);
const data = await response.json();
const newTitle = data.title.trim();
if(newTitle) {
currentConversation.title = newTitle;
document.getElementById('chatTitle').textContent = "Conversation (" + currentConversation.id + "): " + newTitle;
renderConversationList();
saveConversations();
}
} catch (error) {
console.error("Error auto-titling conversation:", error);
}
}
const chatForm = document.getElementById('chatForm');
const messageInput = document.getElementById('messageInput');
messageInput.addEventListener('keydown', function(e) {
if (e.key === "Enter" && !e.shiftKey) {
e.preventDefault();
chatForm.requestSubmit();
}
});
chatForm.addEventListener('submit', async function(e) {
e.preventDefault();
const userMessage = messageInput.value.trim();
if (!userMessage || !currentConversation) return;
appendMessage("user", userMessage);
messageInput.value = "";
const typingIndicator = appendMessage("assistant", "Typing...", false);
try {
const response = await fetch("/chat", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ messages: currentConversation.messages })
});
if (!response.ok) throw new Error("Server error: " + response.status);
const data = await response.json();
typingIndicator.remove();
appendMessage("assistant", data.reply);
} catch (error) {
typingIndicator.remove();
appendMessage("assistant", "Error: " + error.message);
}
});
document.getElementById('newConvoBtn').addEventListener("click", function() {
createNewConversation();
});
document.getElementById('newConvoBtnMobile').addEventListener("click", function() {
createNewConversation();
});
loadConversations();
if(conversations.length === 0) {
createNewConversation();
} else {
currentConversation = conversations[0];
document.getElementById('chatTitle').textContent = "Conversation (" + currentConversation.id + ")";
renderChatLog();
}
</script>
</body>
</html>
`