Spaces:
Running
Running
Commit
·
b09d5e9
0
Parent(s):
Corex Codes
Browse files- .gitignore +40 -0
- Dockerfile +37 -0
- Kubernetes/deployment.yml +30 -0
- Kubernetes/namespace.yml +4 -0
- Kubernetes/service.yml +13 -0
- Procfile +1 -0
- README.md +129 -0
- data/my_document.txt +33 -0
- data/sample.pdf +0 -0
- endpoints.py +35 -0
- main.py +24 -0
- rag.py +107 -0
- requirements.txt +14 -0
- static/script.js +371 -0
- static/styles.css +622 -0
- templates/index.html +90 -0
- vector_rag.py +101 -0
.gitignore
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Ignore virtual environment
|
| 2 |
+
venv/
|
| 3 |
+
ragenv/
|
| 4 |
+
ENV/
|
| 5 |
+
env/
|
| 6 |
+
.venv/
|
| 7 |
+
|
| 8 |
+
# Python compiled files
|
| 9 |
+
__pycache__/
|
| 10 |
+
*.py[cod]
|
| 11 |
+
*.so
|
| 12 |
+
|
| 13 |
+
# Environment variables
|
| 14 |
+
.env
|
| 15 |
+
|
| 16 |
+
# VS Code settings
|
| 17 |
+
.vscode/
|
| 18 |
+
*.code-workspace
|
| 19 |
+
|
| 20 |
+
# OS-specific
|
| 21 |
+
.DS_Store
|
| 22 |
+
Thumbs.db
|
| 23 |
+
|
| 24 |
+
# Logs and databases (optional)
|
| 25 |
+
*.log
|
| 26 |
+
*.sqlite3
|
| 27 |
+
|
| 28 |
+
# Jupyter/IPython
|
| 29 |
+
.ipynb_checkpoints/
|
| 30 |
+
|
| 31 |
+
# Cache
|
| 32 |
+
*.cache
|
| 33 |
+
*.pkl
|
| 34 |
+
*.db
|
| 35 |
+
|
| 36 |
+
# Node modules (if ever added)
|
| 37 |
+
node_modules/
|
| 38 |
+
|
| 39 |
+
Kubernetes/secret.yml
|
| 40 |
+
|
Dockerfile
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Use an official Python runtime as a parent image
|
| 2 |
+
FROM python:3.10-slim
|
| 3 |
+
|
| 4 |
+
# Set the working directory in the container
|
| 5 |
+
WORKDIR /app
|
| 6 |
+
|
| 7 |
+
# Create a non-root user and set cache directory permissions
|
| 8 |
+
RUN useradd --create-home --shell /bin/bash app && \
|
| 9 |
+
mkdir -p /home/app/.cache && \
|
| 10 |
+
chown -R app:app /home/app/.cache && \
|
| 11 |
+
chown -R app:app /app
|
| 12 |
+
|
| 13 |
+
# Set environment variables for Hugging Face cache
|
| 14 |
+
ENV HF_HOME=/home/app/.cache/huggingface
|
| 15 |
+
ENV TRANSFORMERS_CACHE=/home/app/.cache/huggingface/transformers
|
| 16 |
+
ENV HF_DATASETS_CACHE=/home/app/.cache/huggingface/datasets
|
| 17 |
+
|
| 18 |
+
# Copy the requirements file into the container
|
| 19 |
+
COPY requirements.txt .
|
| 20 |
+
|
| 21 |
+
# Install dependencies
|
| 22 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 23 |
+
|
| 24 |
+
# Copy the rest of the application code into the container
|
| 25 |
+
COPY . .
|
| 26 |
+
|
| 27 |
+
# Change ownership of all files to app user
|
| 28 |
+
RUN chown -R app:app /app
|
| 29 |
+
|
| 30 |
+
# Switch to non-root user
|
| 31 |
+
USER app
|
| 32 |
+
|
| 33 |
+
# Expose the port your app runs on
|
| 34 |
+
EXPOSE 8000
|
| 35 |
+
|
| 36 |
+
# Command to run the application; Hugging Face Spaces sets PORT env
|
| 37 |
+
CMD sh -c "uvicorn main:app --host 0.0.0.0 --port ${PORT:-8000}"
|
Kubernetes/deployment.yml
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
apiVersion: apps/v1
|
| 2 |
+
kind: Deployment
|
| 3 |
+
metadata:
|
| 4 |
+
name: rag-app
|
| 5 |
+
namespace: rag
|
| 6 |
+
spec:
|
| 7 |
+
replicas: 1
|
| 8 |
+
selector:
|
| 9 |
+
matchLabels:
|
| 10 |
+
app: rag-app
|
| 11 |
+
template:
|
| 12 |
+
metadata:
|
| 13 |
+
labels:
|
| 14 |
+
app: rag-app
|
| 15 |
+
spec:
|
| 16 |
+
containers:
|
| 17 |
+
- name: rag-container
|
| 18 |
+
image: yadavkapil23/rag-app:latest
|
| 19 |
+
ports:
|
| 20 |
+
- containerPort: 8000
|
| 21 |
+
# --- NEW CODE: INJECT HUGGINGFACE TOKEN FROM A SECRET ---
|
| 22 |
+
env:
|
| 23 |
+
- name: HUGGINGFACE_API_TOKEN
|
| 24 |
+
valueFrom:
|
| 25 |
+
secretKeyRef:
|
| 26 |
+
# You must create a secret named 'huggingface-secret' beforehand
|
| 27 |
+
name: huggingface-secret
|
| 28 |
+
# Assuming the key inside the secret is also named HUGGINGFACE_API_TOKEN
|
| 29 |
+
key: HUGGINGFACE_API_TOKEN
|
| 30 |
+
# --------------------------------------------------------
|
Kubernetes/namespace.yml
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
apiVersion: v1
|
| 2 |
+
kind: Namespace
|
| 3 |
+
metadata:
|
| 4 |
+
name: rag
|
Kubernetes/service.yml
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
apiVersion: v1
|
| 2 |
+
kind: Service
|
| 3 |
+
metadata:
|
| 4 |
+
name: rag-service
|
| 5 |
+
namespace: rag
|
| 6 |
+
spec:
|
| 7 |
+
type: NodePort
|
| 8 |
+
selector:
|
| 9 |
+
app: rag-app
|
| 10 |
+
ports:
|
| 11 |
+
- port: 8000
|
| 12 |
+
targetPort: 8000
|
| 13 |
+
nodePort: 30036 # optional fixed port, else Kubernetes assigns a random one
|
Procfile
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
web: uvicorn main:app --host=0.0.0.0 --port=8000
|
README.md
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: RAG Project
|
| 3 |
+
emoji: 🧠
|
| 4 |
+
colorFrom: blue
|
| 5 |
+
colorTo: purple
|
| 6 |
+
sdk: docker
|
| 7 |
+
app_port: 8000
|
| 8 |
+
python_version: 3.10
|
| 9 |
+
---
|
| 10 |
+
|
| 11 |
+
# 🚀 RAG System with LangChain and FastAPI 🌐
|
| 12 |
+
|
| 13 |
+
Welcome to this repository! This project demonstrates how to build a powerful RAG system using **LangChain** and **FastAPI** for generating contextually relevant and accurate responses by integrating external data into the generative process.
|
| 14 |
+
|
| 15 |
+
## 📋 Project Overview
|
| 16 |
+
|
| 17 |
+
The RAG system combines retrieval and generation to provide smarter AI-driven responses. Using **LangChain** for document handling and embeddings, and **FastAPI** for deploying a fast, scalable API, this project includes:
|
| 18 |
+
|
| 19 |
+
- 🗂️ **Document Loading**: Load data from various sources (text, PDFs, etc.).
|
| 20 |
+
- ✂️ **Text Splitting**: Break large documents into manageable chunks.
|
| 21 |
+
- 🧠 **Embeddings**: Generate vector embeddings for efficient search and retrieval.
|
| 22 |
+
- 🔍 **Vector Stores**: Store embeddings in a vector store for fast similarity searches.
|
| 23 |
+
- 🔧 **Retrieval**: Retrieve the most relevant document chunks based on user queries.
|
| 24 |
+
- 💬 **Generative Response**: Use retrieved data with language models (LLMs) to generate accurate, context-aware answers.
|
| 25 |
+
- 🌐 **FastAPI**: Deploy the RAG system as a scalable API for easy interaction.
|
| 26 |
+
|
| 27 |
+
## ⚙️ Setup and Installation
|
| 28 |
+
|
| 29 |
+
### Prerequisites
|
| 30 |
+
|
| 31 |
+
Make sure you have the following installed:
|
| 32 |
+
- 🐍 Python 3.10+
|
| 33 |
+
- 🐳 Docker (optional, for deployment)
|
| 34 |
+
- 🛠️ PostgreSQL or FAISS (for vector storage)
|
| 35 |
+
|
| 36 |
+
### Installation Steps
|
| 37 |
+
|
| 38 |
+
1. **Clone the repository**:
|
| 39 |
+
```bash
|
| 40 |
+
git clone https://github.com/yadavkapil23/RAG_Project.git
|
| 41 |
+
```
|
| 42 |
+
|
| 43 |
+
2. **Set up a virtual environment**:
|
| 44 |
+
```bash
|
| 45 |
+
python -m venv venv
|
| 46 |
+
source venv/bin/activate # For Linux/Mac
|
| 47 |
+
venv\Scripts\activate # For Windows
|
| 48 |
+
```
|
| 49 |
+
|
| 50 |
+
3. **Install dependencies**:
|
| 51 |
+
```bash
|
| 52 |
+
pip install -r requirements.txt
|
| 53 |
+
```
|
| 54 |
+
|
| 55 |
+
4. **Run the FastAPI server**:
|
| 56 |
+
```bash
|
| 57 |
+
uvicorn main:app --reload
|
| 58 |
+
```
|
| 59 |
+
|
| 60 |
+
Now, your FastAPI app will be running at `http://127.0.0.1:8000` 🎉!
|
| 61 |
+
|
| 62 |
+
### Set up Ollama 🦙
|
| 63 |
+
|
| 64 |
+
This project uses Ollama to run local large language models.
|
| 65 |
+
|
| 66 |
+
1. **Install Ollama:** Follow the instructions on the [Ollama website](https://ollama.ai/) to download and install Ollama.
|
| 67 |
+
|
| 68 |
+
2. **Pull a model:** Pull a model to use with the application. This project uses `llama3`.
|
| 69 |
+
```bash
|
| 70 |
+
ollama pull llama3
|
| 71 |
+
```
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
## 🛠️ Features
|
| 75 |
+
|
| 76 |
+
- **Retrieval-Augmented Generation**: Combines the best of both worlds—retrieving relevant data and generating insightful responses.
|
| 77 |
+
- **Scalable API**: FastAPI makes it easy to deploy and scale the RAG system.
|
| 78 |
+
- **Document Handling**: Supports multiple document types for loading and processing.
|
| 79 |
+
- **Vector Embeddings**: Efficient search with FAISS or other vector stores.
|
| 80 |
+
|
| 81 |
+
## 🛡️ Security
|
| 82 |
+
|
| 83 |
+
- 🔐 **OAuth2 and API Key** authentication support for secure API access.
|
| 84 |
+
- 🔒 **TLS/SSL** for encrypting data in transit.
|
| 85 |
+
- 🛡️ **Data encryption** for sensitive document storage.
|
| 86 |
+
|
| 87 |
+
## 🚀 Deployment
|
| 88 |
+
|
| 89 |
+
### Hugging Face Spaces (Docker) Deployment
|
| 90 |
+
This project is configured for a Hugging Face Space using the Docker runtime.
|
| 91 |
+
|
| 92 |
+
1. Push this repository to GitHub (or connect local).
|
| 93 |
+
2. Create a new Space on Hugging Face → Choose "Docker" SDK.
|
| 94 |
+
3. Point it to this repo. Spaces will build using the `Dockerfile` and run `uvicorn` binding to the provided `PORT`.
|
| 95 |
+
4. Ensure the file `data/sample.pdf` exists (or replace it) to allow FAISS index creation on startup.
|
| 96 |
+
|
| 97 |
+
Notes:
|
| 98 |
+
- Models `Qwen/Qwen2-0.5B-Instruct` and `all-MiniLM-L6-v2` will be downloaded on first run; initial cold start may take several minutes.
|
| 99 |
+
- Dependencies are CPU-friendly; no GPU is required.
|
| 100 |
+
- If you see OOM, consider reducing `max_new_tokens` in `vector_rag.py` or swapping to an even smaller instruct model.
|
| 101 |
+
|
| 102 |
+
### Docker Deployment (Local)
|
| 103 |
+
If you want to deploy your RAG system using Docker, simply build the Docker image and run the container:
|
| 104 |
+
|
| 105 |
+
```bash
|
| 106 |
+
docker build -t rag-system .
|
| 107 |
+
docker run -p 8000:8000 rag-system
|
| 108 |
+
```
|
| 109 |
+
|
| 110 |
+
### Cloud Deployment
|
| 111 |
+
Deploy your RAG system to the cloud using platforms like **AWS**, **Azure**, or **Google Cloud** with minimal setup.
|
| 112 |
+
|
| 113 |
+
## 🧠 Future Enhancements
|
| 114 |
+
|
| 115 |
+
- 🔄 **Real-time Data Integration**: Add real-time data sources for dynamic responses.
|
| 116 |
+
- 🤖 **Advanced Retrieval Techniques**: Implement deep learning-based retrievers for better query understanding.
|
| 117 |
+
- 📊 **Monitoring Tools**: Add monitoring with tools like Prometheus or Grafana for performance insights.
|
| 118 |
+
|
| 119 |
+
## 🤝 Contributing
|
| 120 |
+
|
| 121 |
+
Want to contribute? Feel free to fork this repository, submit a pull request, or open an issue. We welcome all contributions! 🛠️
|
| 122 |
+
|
| 123 |
+
## 📄 License
|
| 124 |
+
|
| 125 |
+
This project is licensed under the MIT License.
|
| 126 |
+
|
| 127 |
+
---
|
| 128 |
+
|
| 129 |
+
🎉 **Thank you for checking out the RAG System with LangChain and FastAPI!** If you have any questions or suggestions, feel free to reach out or open an issue. Let's build something amazing!
|
data/my_document.txt
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Knowledge Base
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
Quantum computing uses qubits that can represent both 0 and 1 simultaneously, offering immense parallelism for computation.
|
| 5 |
+
A transformer model uses self-attention to weigh the importance of each word in a sentence for tasks like translation or summarization.
|
| 6 |
+
Python 3.12 introduced new error messages, better performance, and support for isolated subinterpreters.
|
| 7 |
+
|
| 8 |
+
The French Revolution (1789–1799) radically transformed French society, ending monarchy and spreading ideas of liberty and equality.
|
| 9 |
+
Mahatma Gandhi led the Indian independence movement through nonviolent civil disobedience, notably during the Salt March.
|
| 10 |
+
|
| 11 |
+
A Random Forest is an ensemble of decision trees used for classification or regression. It reduces overfitting and improves accuracy.
|
| 12 |
+
LangChain is a framework for developing LLM-powered apps with components like chains, tools, memory, and agents.
|
| 13 |
+
|
| 14 |
+
Meditation helps in reducing stress, enhancing concentration, and improving emotional regulation. Regular practice can reduce anxiety.
|
| 15 |
+
Intermittent fasting involves alternating periods of eating and fasting. It can help in weight loss and metabolic health.
|
| 16 |
+
|
| 17 |
+
GDP (Gross Domestic Product) measures a country's economic output. A growing GDP usually indicates a healthy economy.
|
| 18 |
+
Inflation refers to the general rise in prices over time, reducing purchasing power. Central banks use interest rates to control inflation.
|
| 19 |
+
|
| 20 |
+
Photosynthesis is the process where green plants use sunlight, CO₂, and water to produce oxygen and glucose.
|
| 21 |
+
Black holes are regions in space where gravity is so strong that nothing—not even light—can escape.
|
| 22 |
+
|
| 23 |
+
A binary search tree is a node-based data structure where left children are smaller and right children are larger than the parent node.
|
| 24 |
+
Recursion is a function calling itself until a base condition is met. It’s used in tree traversal, backtracking, and divide-and-conquer.
|
| 25 |
+
|
| 26 |
+
Japan is an island country in East Asia known for its technology, cherry blossoms, and cultural traditions like tea ceremony and sumo.
|
| 27 |
+
The Eiffel Tower was constructed in 1889 in Paris and is one of the most visited monuments in the world.
|
| 28 |
+
|
| 29 |
+
Q: What is a black hole?
|
| 30 |
+
A: A black hole is a region in space where gravity is so strong that nothing, not even light, can escape its pull.
|
| 31 |
+
|
| 32 |
+
Q: How do neural networks work?
|
| 33 |
+
A: Neural networks consist of layers of nodes that process inputs through weighted connections and activation functions to detect patterns.
|
data/sample.pdf
ADDED
|
Binary file (71.9 kB). View file
|
|
|
endpoints.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException
|
| 2 |
+
from pydantic import BaseModel
|
| 3 |
+
from typing import List, Literal
|
| 4 |
+
|
| 5 |
+
router = APIRouter()
|
| 6 |
+
|
| 7 |
+
from rag import get_smart_rag_response
|
| 8 |
+
|
| 9 |
+
# Pydantic models for request/response validation
|
| 10 |
+
class Message(BaseModel):
|
| 11 |
+
role: Literal["user", "assistant"]
|
| 12 |
+
content: str
|
| 13 |
+
|
| 14 |
+
class QueryRequest(BaseModel):
|
| 15 |
+
query: str
|
| 16 |
+
conversation_history: List[Message] = []
|
| 17 |
+
|
| 18 |
+
class QueryResponse(BaseModel):
|
| 19 |
+
query: str
|
| 20 |
+
response: str
|
| 21 |
+
source: str
|
| 22 |
+
|
| 23 |
+
@router.post("/query/")
|
| 24 |
+
async def query_rag_system(request: QueryRequest):
|
| 25 |
+
try:
|
| 26 |
+
# Convert Pydantic models to dicts for processing
|
| 27 |
+
history = [msg.dict() for msg in request.conversation_history]
|
| 28 |
+
response, source = await get_smart_rag_response(request.query, history)
|
| 29 |
+
return QueryResponse(
|
| 30 |
+
query=request.query,
|
| 31 |
+
response=response,
|
| 32 |
+
source=source
|
| 33 |
+
)
|
| 34 |
+
except Exception as e:
|
| 35 |
+
raise HTTPException(status_code=500, detail=str(e))
|
main.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI
|
| 2 |
+
from fastapi.staticfiles import StaticFiles
|
| 3 |
+
from fastapi.templating import Jinja2Templates
|
| 4 |
+
from fastapi.requests import Request
|
| 5 |
+
from endpoints import router
|
| 6 |
+
import uvicorn
|
| 7 |
+
|
| 8 |
+
app = FastAPI()
|
| 9 |
+
|
| 10 |
+
# Serve static files (CSS, JS)
|
| 11 |
+
app.mount("/static", StaticFiles(directory="static"), name="static")
|
| 12 |
+
|
| 13 |
+
# Serve HTML templates
|
| 14 |
+
templates = Jinja2Templates(directory="templates")
|
| 15 |
+
|
| 16 |
+
@app.get("/")
|
| 17 |
+
def home(request: Request):
|
| 18 |
+
return templates.TemplateResponse("index.html", {"request": request})
|
| 19 |
+
|
| 20 |
+
# Include your API endpoints
|
| 21 |
+
app.include_router(router)
|
| 22 |
+
|
| 23 |
+
if __name__ == "__main__":
|
| 24 |
+
uvicorn.run(app, host="127.0.0.1", port=8000)
|
rag.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from vector_rag import query_vector_store, llm # <--- FIX: Import llm here!
|
| 2 |
+
import wikipedia
|
| 3 |
+
from typing import List, Dict
|
| 4 |
+
# REMOVED: All duplicate model/pipeline/tokenizer imports and initialization code
|
| 5 |
+
|
| 6 |
+
# The 'llm' instance is now imported from vector_rag.py and is ready to use.
|
| 7 |
+
wikipedia.set_lang("en")
|
| 8 |
+
|
| 9 |
+
def format_conversation_context(history: List[Dict], max_messages: int = 10) -> str:
|
| 10 |
+
"""
|
| 11 |
+
Formats conversation history into a context string for the LLM.
|
| 12 |
+
Keeps only the most recent messages to prevent token overflow.
|
| 13 |
+
|
| 14 |
+
Args:
|
| 15 |
+
history: List of message dicts with 'role' and 'content' keys
|
| 16 |
+
max_messages: Maximum number of messages to include (default: 10)
|
| 17 |
+
|
| 18 |
+
Returns:
|
| 19 |
+
Formatted conversation history string
|
| 20 |
+
"""
|
| 21 |
+
if not history:
|
| 22 |
+
return ""
|
| 23 |
+
|
| 24 |
+
# Keep only the last N messages
|
| 25 |
+
recent_history = history[-max_messages:]
|
| 26 |
+
|
| 27 |
+
formatted_lines = []
|
| 28 |
+
for msg in recent_history:
|
| 29 |
+
role = "User" if msg["role"] == "user" else "Assistant"
|
| 30 |
+
formatted_lines.append(f"{role}: {msg['content']}")
|
| 31 |
+
|
| 32 |
+
return "\n".join(formatted_lines)
|
| 33 |
+
|
| 34 |
+
async def get_smart_rag_response(query: str, conversation_history: List[Dict] = None) -> tuple[str, str]:
|
| 35 |
+
"""
|
| 36 |
+
Get a smart RAG response with conversation context.
|
| 37 |
+
|
| 38 |
+
Args:
|
| 39 |
+
query: The user's current question
|
| 40 |
+
conversation_history: List of previous messages (optional)
|
| 41 |
+
|
| 42 |
+
Returns:
|
| 43 |
+
Tuple of (response, source)
|
| 44 |
+
"""
|
| 45 |
+
print(" Received Query:", query)
|
| 46 |
+
|
| 47 |
+
if conversation_history is None:
|
| 48 |
+
conversation_history = []
|
| 49 |
+
|
| 50 |
+
# Format conversation history for context
|
| 51 |
+
context_str = format_conversation_context(conversation_history)
|
| 52 |
+
|
| 53 |
+
# First: Try Wikipedia
|
| 54 |
+
try:
|
| 55 |
+
summary = wikipedia.summary(query, sentences=5)
|
| 56 |
+
print("Wikipedia summary found.")
|
| 57 |
+
|
| 58 |
+
# Build prompt with conversation context
|
| 59 |
+
prompt = f"""You are a helpful assistant engaged in a conversation.
|
| 60 |
+
"""
|
| 61 |
+
if context_str:
|
| 62 |
+
prompt += f"""
|
| 63 |
+
Previous conversation:
|
| 64 |
+
{context_str}
|
| 65 |
+
|
| 66 |
+
"""
|
| 67 |
+
prompt += f"""Use the following Wikipedia information to answer the current question as clearly as possible.
|
| 68 |
+
|
| 69 |
+
Wikipedia Context:
|
| 70 |
+
{summary}
|
| 71 |
+
|
| 72 |
+
Current question: {query}
|
| 73 |
+
Answer:"""
|
| 74 |
+
result = llm.invoke(prompt)
|
| 75 |
+
answer = result.replace(prompt, "").strip()
|
| 76 |
+
return answer, "Wikipedia"
|
| 77 |
+
except wikipedia.exceptions.PageError:
|
| 78 |
+
print("Wikipedia page not found.")
|
| 79 |
+
except wikipedia.exceptions.DisambiguationError as e:
|
| 80 |
+
return f"The query is ambiguous. Did you mean: {', '.join(e.options[:5])}", "Wikipedia"
|
| 81 |
+
|
| 82 |
+
# Second: Fallback to LLM with conversation context
|
| 83 |
+
try:
|
| 84 |
+
print("Fallback: LLM with conversation context")
|
| 85 |
+
|
| 86 |
+
fallback_prompt = "You are a knowledgeable assistant engaged in a conversation.\n\n"
|
| 87 |
+
if context_str:
|
| 88 |
+
fallback_prompt += f"Previous conversation:\n{context_str}\n\n"
|
| 89 |
+
fallback_prompt += f"Current question: {query}\nAnswer:"
|
| 90 |
+
|
| 91 |
+
llm_answer = llm.invoke(fallback_prompt)
|
| 92 |
+
answer = llm_answer.replace(fallback_prompt, "").strip()
|
| 93 |
+
if answer and "not sure" not in answer.lower():
|
| 94 |
+
return answer.strip(), "LLM"
|
| 95 |
+
except Exception as e:
|
| 96 |
+
print("Error during LLM fallback:", e)
|
| 97 |
+
|
| 98 |
+
# Finally: Fallback to Local Documents
|
| 99 |
+
try:
|
| 100 |
+
print("Fallback: Local vector search")
|
| 101 |
+
vector_answer = query_vector_store(query, conversation_history)
|
| 102 |
+
if vector_answer:
|
| 103 |
+
return vector_answer, "Local Document"
|
| 104 |
+
except Exception as e:
|
| 105 |
+
print("Error during local vector search:", e)
|
| 106 |
+
|
| 107 |
+
return "Sorry, I couldn't find any information to answer your question.", "System"
|
requirements.txt
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
fastapi
|
| 2 |
+
uvicorn
|
| 3 |
+
langchain
|
| 4 |
+
langchain-community
|
| 5 |
+
python-dotenv
|
| 6 |
+
langchain-huggingface
|
| 7 |
+
faiss-cpu
|
| 8 |
+
jinja2
|
| 9 |
+
wikipedia
|
| 10 |
+
pypdf
|
| 11 |
+
sentence-transformers
|
| 12 |
+
torch
|
| 13 |
+
transformers
|
| 14 |
+
accelerate
|
static/script.js
ADDED
|
@@ -0,0 +1,371 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
document.addEventListener('DOMContentLoaded', () => {
|
| 2 |
+
const queryInput = document.getElementById('queryInput');
|
| 3 |
+
const askButton = document.getElementById('askButton');
|
| 4 |
+
const chatMessages = document.getElementById('chatMessages');
|
| 5 |
+
|
| 6 |
+
// 💬 Conversation state management
|
| 7 |
+
let conversationHistory = [];
|
| 8 |
+
|
| 9 |
+
function addUserMessage(content) {
|
| 10 |
+
conversationHistory.push({
|
| 11 |
+
role: 'user',
|
| 12 |
+
content: content,
|
| 13 |
+
timestamp: Date.now()
|
| 14 |
+
});
|
| 15 |
+
}
|
| 16 |
+
|
| 17 |
+
function addAssistantMessage(content, source) {
|
| 18 |
+
conversationHistory.push({
|
| 19 |
+
role: 'assistant',
|
| 20 |
+
content: content,
|
| 21 |
+
timestamp: Date.now(),
|
| 22 |
+
source: source
|
| 23 |
+
});
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
function clearConversation() {
|
| 27 |
+
conversationHistory = [];
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
function getHistory() {
|
| 31 |
+
return conversationHistory;
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
// 💬 Message rendering functions
|
| 35 |
+
function renderUserMessage(content, timestamp = null) {
|
| 36 |
+
const ts = timestamp || Date.now();
|
| 37 |
+
return `
|
| 38 |
+
<div class="message user" data-timestamp="${ts}">
|
| 39 |
+
<div class="message-avatar">
|
| 40 |
+
<i class="fas fa-user"></i>
|
| 41 |
+
</div>
|
| 42 |
+
<div class="message-content">
|
| 43 |
+
<div class="message-bubble">
|
| 44 |
+
<div class="message-text">${escapeHtml(content)}</div>
|
| 45 |
+
</div>
|
| 46 |
+
</div>
|
| 47 |
+
</div>
|
| 48 |
+
`;
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
function renderAssistantMessage(content, source) {
|
| 52 |
+
const sourceBadge = source ? `<span class="source-badge">${source}</span>` : '';
|
| 53 |
+
return `
|
| 54 |
+
<div class="message assistant">
|
| 55 |
+
<div class="message-avatar">
|
| 56 |
+
<i class="fas fa-robot"></i>
|
| 57 |
+
</div>
|
| 58 |
+
<div class="message-content">
|
| 59 |
+
${sourceBadge}
|
| 60 |
+
<div class="message-bubble">
|
| 61 |
+
<div class="message-text">${formatAnswer(content)}</div>
|
| 62 |
+
</div>
|
| 63 |
+
</div>
|
| 64 |
+
</div>
|
| 65 |
+
`;
|
| 66 |
+
}
|
| 67 |
+
|
| 68 |
+
function renderLoadingMessage() {
|
| 69 |
+
return `
|
| 70 |
+
<div class="message assistant">
|
| 71 |
+
<div class="message-avatar">
|
| 72 |
+
<i class="fas fa-robot"></i>
|
| 73 |
+
</div>
|
| 74 |
+
<div class="message-content">
|
| 75 |
+
<div class="message-bubble">
|
| 76 |
+
<div class="loading-message">
|
| 77 |
+
<span>Thinking...</span>
|
| 78 |
+
<div class="typing-indicator">
|
| 79 |
+
<span></span>
|
| 80 |
+
<span></span>
|
| 81 |
+
<span></span>
|
| 82 |
+
</div>
|
| 83 |
+
</div>
|
| 84 |
+
</div>
|
| 85 |
+
</div>
|
| 86 |
+
</div>
|
| 87 |
+
`;
|
| 88 |
+
}
|
| 89 |
+
|
| 90 |
+
function renderWelcomeMessage() {
|
| 91 |
+
return `
|
| 92 |
+
<div class="welcome-message">
|
| 93 |
+
<h2>Welcome to Corex!</h2>
|
| 94 |
+
<p>Ask me anything and I'll help you with accurate, document-backed answers.</p>
|
| 95 |
+
</div>
|
| 96 |
+
`;
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
function escapeHtml(text) {
|
| 100 |
+
const div = document.createElement('div');
|
| 101 |
+
div.textContent = text;
|
| 102 |
+
return div.innerHTML;
|
| 103 |
+
}
|
| 104 |
+
|
| 105 |
+
function displayAllMessages() {
|
| 106 |
+
if (conversationHistory.length === 0) {
|
| 107 |
+
chatMessages.innerHTML = renderWelcomeMessage();
|
| 108 |
+
return;
|
| 109 |
+
}
|
| 110 |
+
|
| 111 |
+
let html = '';
|
| 112 |
+
conversationHistory.forEach(msg => {
|
| 113 |
+
if (msg.role === 'user') {
|
| 114 |
+
html += renderUserMessage(msg.content);
|
| 115 |
+
} else {
|
| 116 |
+
html += renderAssistantMessage(msg.content, msg.source);
|
| 117 |
+
}
|
| 118 |
+
});
|
| 119 |
+
chatMessages.innerHTML = html;
|
| 120 |
+
scrollToBottom();
|
| 121 |
+
}
|
| 122 |
+
|
| 123 |
+
function scrollToBottom() {
|
| 124 |
+
chatMessages.scrollTop = chatMessages.scrollHeight;
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
function formatAnswer(text) {
|
| 128 |
+
if (typeof text !== "string") {
|
| 129 |
+
text = String(text ?? "No response received.");
|
| 130 |
+
}
|
| 131 |
+
return text
|
| 132 |
+
.split('\n')
|
| 133 |
+
.filter(line => line.trim())
|
| 134 |
+
.map(line => `<p>${line}</p>`)
|
| 135 |
+
.join('');
|
| 136 |
+
}
|
| 137 |
+
|
| 138 |
+
// 🔍 Query handler
|
| 139 |
+
async function handleQuery() {
|
| 140 |
+
const query = queryInput.value.trim();
|
| 141 |
+
if (!query) return;
|
| 142 |
+
|
| 143 |
+
// Add user message to conversation
|
| 144 |
+
addUserMessage(query);
|
| 145 |
+
displayAllMessages();
|
| 146 |
+
|
| 147 |
+
// Clear input
|
| 148 |
+
queryInput.value = '';
|
| 149 |
+
|
| 150 |
+
// Show loading message
|
| 151 |
+
const loadingMessage = renderLoadingMessage();
|
| 152 |
+
chatMessages.innerHTML += loadingMessage;
|
| 153 |
+
scrollToBottom();
|
| 154 |
+
|
| 155 |
+
try {
|
| 156 |
+
// Send conversation history to backend
|
| 157 |
+
const response = await fetch('/query/', {
|
| 158 |
+
method: 'POST',
|
| 159 |
+
headers: {
|
| 160 |
+
'Content-Type': 'application/json',
|
| 161 |
+
},
|
| 162 |
+
body: JSON.stringify({
|
| 163 |
+
query: query,
|
| 164 |
+
conversation_history: getHistory()
|
| 165 |
+
})
|
| 166 |
+
});
|
| 167 |
+
|
| 168 |
+
if (!response.ok) throw new Error(`Server returned ${response.status}`);
|
| 169 |
+
const data = await response.json();
|
| 170 |
+
|
| 171 |
+
// Remove loading message and add assistant response
|
| 172 |
+
chatMessages.innerHTML = chatMessages.innerHTML.replace(loadingMessage, '');
|
| 173 |
+
addAssistantMessage(data.response, data.source);
|
| 174 |
+
displayAllMessages();
|
| 175 |
+
|
| 176 |
+
} catch (err) {
|
| 177 |
+
// Remove loading message and add error message
|
| 178 |
+
chatMessages.innerHTML = chatMessages.innerHTML.replace(loadingMessage, '');
|
| 179 |
+
addAssistantMessage(`Failed to get response: ${err.message}`, 'Error');
|
| 180 |
+
displayAllMessages();
|
| 181 |
+
}
|
| 182 |
+
}
|
| 183 |
+
|
| 184 |
+
// 🔗 Event listeners
|
| 185 |
+
askButton.addEventListener('click', handleQuery);
|
| 186 |
+
queryInput.addEventListener('keypress', e => {
|
| 187 |
+
if (e.key === 'Enter') handleQuery();
|
| 188 |
+
});
|
| 189 |
+
|
| 190 |
+
// Auto-resize input
|
| 191 |
+
queryInput.addEventListener('input', () => {
|
| 192 |
+
queryInput.style.height = 'auto';
|
| 193 |
+
queryInput.style.height = queryInput.scrollHeight + 'px';
|
| 194 |
+
});
|
| 195 |
+
|
| 196 |
+
// Dropdown menu functionality
|
| 197 |
+
const optionsBtn = document.getElementById('optionsBtn');
|
| 198 |
+
const optionsMenu = document.getElementById('optionsMenu');
|
| 199 |
+
const downloadTxtBtn = document.getElementById('downloadTxt');
|
| 200 |
+
const downloadPdfBtn = document.getElementById('downloadPdf');
|
| 201 |
+
const clearChatBtn = document.getElementById('clearChat');
|
| 202 |
+
|
| 203 |
+
// Toggle dropdown menu
|
| 204 |
+
optionsBtn.addEventListener('click', (e) => {
|
| 205 |
+
e.stopPropagation();
|
| 206 |
+
optionsMenu.classList.toggle('show');
|
| 207 |
+
});
|
| 208 |
+
|
| 209 |
+
// Close dropdown when clicking outside
|
| 210 |
+
document.addEventListener('click', (e) => {
|
| 211 |
+
if (!optionsBtn.contains(e.target) && !optionsMenu.contains(e.target)) {
|
| 212 |
+
optionsMenu.classList.remove('show');
|
| 213 |
+
}
|
| 214 |
+
});
|
| 215 |
+
|
| 216 |
+
// Download as TXT
|
| 217 |
+
downloadTxtBtn.addEventListener('click', () => {
|
| 218 |
+
downloadChatAsTxt();
|
| 219 |
+
optionsMenu.classList.remove('show');
|
| 220 |
+
});
|
| 221 |
+
|
| 222 |
+
// Download as PDF
|
| 223 |
+
downloadPdfBtn.addEventListener('click', () => {
|
| 224 |
+
downloadChatAsPdf();
|
| 225 |
+
optionsMenu.classList.remove('show');
|
| 226 |
+
});
|
| 227 |
+
|
| 228 |
+
// Clear chat
|
| 229 |
+
clearChatBtn.addEventListener('click', () => {
|
| 230 |
+
clearConversation();
|
| 231 |
+
displayAllMessages();
|
| 232 |
+
optionsMenu.classList.remove('show');
|
| 233 |
+
});
|
| 234 |
+
|
| 235 |
+
// Download functions
|
| 236 |
+
function downloadChatAsTxt() {
|
| 237 |
+
if (conversationHistory.length === 0) {
|
| 238 |
+
alert('No conversation to download');
|
| 239 |
+
return;
|
| 240 |
+
}
|
| 241 |
+
|
| 242 |
+
let content = 'Corex Chat History\n';
|
| 243 |
+
content += '='.repeat(50) + '\n\n';
|
| 244 |
+
|
| 245 |
+
conversationHistory.forEach((msg, index) => {
|
| 246 |
+
const timestamp = new Date(msg.timestamp).toLocaleString();
|
| 247 |
+
const role = msg.role === 'user' ? 'You' : 'Corex';
|
| 248 |
+
const source = msg.source ? ` (${msg.source})` : '';
|
| 249 |
+
|
| 250 |
+
content += `[${timestamp}] ${role}${source}:\n`;
|
| 251 |
+
content += msg.content + '\n\n';
|
| 252 |
+
});
|
| 253 |
+
|
| 254 |
+
const blob = new Blob([content], { type: 'text/plain' });
|
| 255 |
+
const url = URL.createObjectURL(blob);
|
| 256 |
+
const a = document.createElement('a');
|
| 257 |
+
a.href = url;
|
| 258 |
+
a.download = `corex-chat-${new Date().toISOString().split('T')[0]}.txt`;
|
| 259 |
+
document.body.appendChild(a);
|
| 260 |
+
a.click();
|
| 261 |
+
document.body.removeChild(a);
|
| 262 |
+
URL.revokeObjectURL(url);
|
| 263 |
+
}
|
| 264 |
+
|
| 265 |
+
function downloadChatAsPdf() {
|
| 266 |
+
if (conversationHistory.length === 0) {
|
| 267 |
+
alert('No conversation to download');
|
| 268 |
+
return;
|
| 269 |
+
}
|
| 270 |
+
|
| 271 |
+
try {
|
| 272 |
+
const { jsPDF } = window.jspdf;
|
| 273 |
+
const doc = new jsPDF();
|
| 274 |
+
|
| 275 |
+
// Set up the document
|
| 276 |
+
let yPosition = 20;
|
| 277 |
+
const pageHeight = doc.internal.pageSize.height;
|
| 278 |
+
const pageWidth = doc.internal.pageSize.width;
|
| 279 |
+
const margin = 20;
|
| 280 |
+
const maxWidth = pageWidth - (margin * 2);
|
| 281 |
+
|
| 282 |
+
// Helper function to add text with word wrapping
|
| 283 |
+
function addTextWithWrap(text, x, y, maxWidth, fontSize = 10) {
|
| 284 |
+
doc.setFontSize(fontSize);
|
| 285 |
+
const lines = doc.splitTextToSize(text, maxWidth);
|
| 286 |
+
doc.text(lines, x, y);
|
| 287 |
+
return y + (lines.length * (fontSize * 0.4));
|
| 288 |
+
}
|
| 289 |
+
|
| 290 |
+
// Helper function to check if we need a new page
|
| 291 |
+
function checkNewPage(requiredSpace) {
|
| 292 |
+
if (yPosition + requiredSpace > pageHeight - 20) {
|
| 293 |
+
doc.addPage();
|
| 294 |
+
yPosition = 20;
|
| 295 |
+
return true;
|
| 296 |
+
}
|
| 297 |
+
return false;
|
| 298 |
+
}
|
| 299 |
+
|
| 300 |
+
// Title
|
| 301 |
+
doc.setFontSize(16);
|
| 302 |
+
doc.setFont(undefined, 'bold');
|
| 303 |
+
doc.text('Corex Chat History', pageWidth / 2, yPosition, { align: 'center' });
|
| 304 |
+
yPosition += 10;
|
| 305 |
+
|
| 306 |
+
// Date
|
| 307 |
+
doc.setFontSize(10);
|
| 308 |
+
doc.setFont(undefined, 'normal');
|
| 309 |
+
doc.text(`Generated on: ${new Date().toLocaleString()}`, pageWidth / 2, yPosition, { align: 'center' });
|
| 310 |
+
yPosition += 15;
|
| 311 |
+
|
| 312 |
+
// Add a line
|
| 313 |
+
doc.line(margin, yPosition, pageWidth - margin, yPosition);
|
| 314 |
+
yPosition += 10;
|
| 315 |
+
|
| 316 |
+
// Process each message
|
| 317 |
+
conversationHistory.forEach((msg, index) => {
|
| 318 |
+
const timestamp = new Date(msg.timestamp).toLocaleString();
|
| 319 |
+
const role = msg.role === 'user' ? 'You' : 'Corex';
|
| 320 |
+
const source = msg.source ? ` (${msg.source})` : '';
|
| 321 |
+
|
| 322 |
+
// Check if we need a new page for this message
|
| 323 |
+
const messageText = `[${timestamp}] ${role}${source}:\n${msg.content}`;
|
| 324 |
+
const estimatedHeight = (messageText.split('\n').length * 4) + 10;
|
| 325 |
+
|
| 326 |
+
if (checkNewPage(estimatedHeight)) {
|
| 327 |
+
// Add a continuation marker
|
| 328 |
+
doc.setFontSize(8);
|
| 329 |
+
doc.text('...continued from previous page...', margin, yPosition);
|
| 330 |
+
yPosition += 5;
|
| 331 |
+
}
|
| 332 |
+
|
| 333 |
+
// Message header
|
| 334 |
+
doc.setFontSize(10);
|
| 335 |
+
doc.setFont(undefined, 'bold');
|
| 336 |
+
yPosition = addTextWithWrap(`[${timestamp}] ${role}${source}:`, margin, yPosition, maxWidth, 10);
|
| 337 |
+
|
| 338 |
+
// Message content
|
| 339 |
+
doc.setFont(undefined, 'normal');
|
| 340 |
+
yPosition = addTextWithWrap(msg.content, margin + 5, yPosition, maxWidth - 5, 9);
|
| 341 |
+
|
| 342 |
+
// Add some space between messages
|
| 343 |
+
yPosition += 8;
|
| 344 |
+
|
| 345 |
+
// Add a subtle line between messages (except for the last one)
|
| 346 |
+
if (index < conversationHistory.length - 1) {
|
| 347 |
+
doc.setDrawColor(200, 200, 200);
|
| 348 |
+
doc.line(margin, yPosition, pageWidth - margin, yPosition);
|
| 349 |
+
yPosition += 5;
|
| 350 |
+
}
|
| 351 |
+
});
|
| 352 |
+
|
| 353 |
+
// Save the PDF
|
| 354 |
+
const fileName = `corex-chat-${new Date().toISOString().split('T')[0]}.pdf`;
|
| 355 |
+
doc.save(fileName);
|
| 356 |
+
|
| 357 |
+
} catch (error) {
|
| 358 |
+
console.error('Error generating PDF:', error);
|
| 359 |
+
alert('Error generating PDF. Please try downloading as TXT instead.');
|
| 360 |
+
}
|
| 361 |
+
}
|
| 362 |
+
|
| 363 |
+
// Scroll to bottom when new messages arrive
|
| 364 |
+
const observer = new MutationObserver(() => {
|
| 365 |
+
scrollToBottom();
|
| 366 |
+
});
|
| 367 |
+
observer.observe(chatMessages, { childList: true, subtree: true });
|
| 368 |
+
|
| 369 |
+
// Initialize
|
| 370 |
+
displayAllMessages();
|
| 371 |
+
});
|
static/styles.css
ADDED
|
@@ -0,0 +1,622 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/* Corex Colorful Theme */
|
| 2 |
+
:root {
|
| 3 |
+
--bg-primary: #0f0f23;
|
| 4 |
+
--bg-secondary: #1a1a2e;
|
| 5 |
+
--bg-tertiary: #16213e;
|
| 6 |
+
--text-primary: #ffffff;
|
| 7 |
+
--text-secondary: #e0e6ed;
|
| 8 |
+
--text-muted: #a0aec0;
|
| 9 |
+
--border-color: #2d3748;
|
| 10 |
+
--accent-color: #667eea;
|
| 11 |
+
--accent-hover: #5a67d8;
|
| 12 |
+
--accent-secondary: #f093fb;
|
| 13 |
+
--accent-tertiary: #4facfe;
|
| 14 |
+
--user-message-bg: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
| 15 |
+
--ai-message-bg: linear-gradient(135deg, #4e78ae 0%, #000000 100%);
|
| 16 |
+
--input-bg: #2d3748;
|
| 17 |
+
--input-border: #4a5568;
|
| 18 |
+
--shadow: 0 10px 25px -5px rgba(0, 0, 0, 0.3);
|
| 19 |
+
--gradient-primary: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
| 20 |
+
--gradient-secondary: linear-gradient(135deg, #f093fb 0%, #f5576c 100%);
|
| 21 |
+
--gradient-tertiary: linear-gradient(135deg, #4facfe 0%, #00f2fe 100%);
|
| 22 |
+
}
|
| 23 |
+
|
| 24 |
+
* {
|
| 25 |
+
margin: 0;
|
| 26 |
+
padding: 0;
|
| 27 |
+
box-sizing: border-box;
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
body {
|
| 31 |
+
font-family: 'Inter', -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
|
| 32 |
+
background-color: var(--bg-primary);
|
| 33 |
+
color: var(--text-primary);
|
| 34 |
+
height: 100vh;
|
| 35 |
+
overflow: hidden;
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
/* Chat Container */
|
| 39 |
+
.chat-container {
|
| 40 |
+
display: flex;
|
| 41 |
+
flex-direction: column;
|
| 42 |
+
height: 100vh;
|
| 43 |
+
background-color: var(--bg-primary);
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
/* Header */
|
| 47 |
+
.chat-header {
|
| 48 |
+
display: flex;
|
| 49 |
+
justify-content: space-between;
|
| 50 |
+
align-items: center;
|
| 51 |
+
padding: 1rem 1.5rem;
|
| 52 |
+
border-bottom: 1px solid var(--border-color);
|
| 53 |
+
background-color: var(--bg-secondary);
|
| 54 |
+
}
|
| 55 |
+
|
| 56 |
+
.header-left {
|
| 57 |
+
display: flex;
|
| 58 |
+
align-items: center;
|
| 59 |
+
gap: 0.5rem;
|
| 60 |
+
}
|
| 61 |
+
|
| 62 |
+
.header-left h1 {
|
| 63 |
+
font-size: 1.25rem;
|
| 64 |
+
font-weight: 600;
|
| 65 |
+
color: var(--text-primary);
|
| 66 |
+
}
|
| 67 |
+
|
| 68 |
+
.header-dropdown {
|
| 69 |
+
color: var(--text-muted);
|
| 70 |
+
cursor: pointer;
|
| 71 |
+
padding: 0.25rem;
|
| 72 |
+
}
|
| 73 |
+
|
| 74 |
+
.header-right {
|
| 75 |
+
display: flex;
|
| 76 |
+
gap: 0.5rem;
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
.header-btn {
|
| 80 |
+
background: none;
|
| 81 |
+
border: none;
|
| 82 |
+
color: var(--text-muted);
|
| 83 |
+
cursor: pointer;
|
| 84 |
+
padding: 0.5rem;
|
| 85 |
+
border-radius: 0.375rem;
|
| 86 |
+
transition: background-color 0.2s;
|
| 87 |
+
}
|
| 88 |
+
|
| 89 |
+
.header-btn:hover {
|
| 90 |
+
background-color: var(--bg-tertiary);
|
| 91 |
+
color: var(--text-primary);
|
| 92 |
+
}
|
| 93 |
+
|
| 94 |
+
/* Message Reactions */
|
| 95 |
+
.reaction-btn {
|
| 96 |
+
background: none;
|
| 97 |
+
border: none;
|
| 98 |
+
color: var(--text-muted);
|
| 99 |
+
cursor: pointer;
|
| 100 |
+
padding: 0.25rem 0.5rem;
|
| 101 |
+
border-radius: 1rem;
|
| 102 |
+
transition: all 0.2s;
|
| 103 |
+
font-size: 0.875rem;
|
| 104 |
+
display: flex;
|
| 105 |
+
align-items: center;
|
| 106 |
+
gap: 0.25rem;
|
| 107 |
+
}
|
| 108 |
+
|
| 109 |
+
.reaction-btn:hover {
|
| 110 |
+
background-color: var(--bg-tertiary);
|
| 111 |
+
color: var(--text-primary);
|
| 112 |
+
transform: scale(1.05);
|
| 113 |
+
}
|
| 114 |
+
|
| 115 |
+
.reaction-btn.active {
|
| 116 |
+
background: var(--gradient-primary);
|
| 117 |
+
color: white;
|
| 118 |
+
box-shadow: 0 2px 8px rgba(102, 126, 234, 0.3);
|
| 119 |
+
}
|
| 120 |
+
|
| 121 |
+
.reaction-count {
|
| 122 |
+
font-size: 0.75rem;
|
| 123 |
+
font-weight: 500;
|
| 124 |
+
}
|
| 125 |
+
|
| 126 |
+
/* Search Bar */
|
| 127 |
+
.search-bar {
|
| 128 |
+
position: absolute;
|
| 129 |
+
top: 100%;
|
| 130 |
+
left: 0;
|
| 131 |
+
right: 0;
|
| 132 |
+
background: var(--bg-secondary);
|
| 133 |
+
border: 1px solid var(--border-color);
|
| 134 |
+
border-radius: 0.5rem;
|
| 135 |
+
padding: 1rem;
|
| 136 |
+
box-shadow: var(--shadow);
|
| 137 |
+
z-index: 1000;
|
| 138 |
+
opacity: 0;
|
| 139 |
+
visibility: hidden;
|
| 140 |
+
transform: translateY(-0.5rem);
|
| 141 |
+
transition: all 0.3s ease;
|
| 142 |
+
}
|
| 143 |
+
|
| 144 |
+
.search-bar.show {
|
| 145 |
+
opacity: 1;
|
| 146 |
+
visibility: visible;
|
| 147 |
+
transform: translateY(0);
|
| 148 |
+
}
|
| 149 |
+
|
| 150 |
+
.search-input {
|
| 151 |
+
width: 100%;
|
| 152 |
+
background: var(--input-bg);
|
| 153 |
+
border: 1px solid var(--input-border);
|
| 154 |
+
border-radius: 0.5rem;
|
| 155 |
+
padding: 0.75rem 1rem;
|
| 156 |
+
color: var(--text-primary);
|
| 157 |
+
font-size: 0.875rem;
|
| 158 |
+
margin-bottom: 0.75rem;
|
| 159 |
+
}
|
| 160 |
+
|
| 161 |
+
.search-input:focus {
|
| 162 |
+
outline: none;
|
| 163 |
+
border-color: var(--accent-color);
|
| 164 |
+
box-shadow: 0 0 0 3px rgba(102, 126, 234, 0.1);
|
| 165 |
+
}
|
| 166 |
+
|
| 167 |
+
.search-results {
|
| 168 |
+
max-height: 200px;
|
| 169 |
+
overflow-y: auto;
|
| 170 |
+
}
|
| 171 |
+
|
| 172 |
+
.search-result {
|
| 173 |
+
padding: 0.5rem;
|
| 174 |
+
border-radius: 0.375rem;
|
| 175 |
+
cursor: pointer;
|
| 176 |
+
transition: background-color 0.2s;
|
| 177 |
+
border: 1px solid transparent;
|
| 178 |
+
}
|
| 179 |
+
|
| 180 |
+
.search-result:hover {
|
| 181 |
+
background-color: var(--bg-tertiary);
|
| 182 |
+
border-color: var(--accent-color);
|
| 183 |
+
}
|
| 184 |
+
|
| 185 |
+
.search-result-content {
|
| 186 |
+
font-size: 0.875rem;
|
| 187 |
+
color: var(--text-secondary);
|
| 188 |
+
margin-bottom: 0.25rem;
|
| 189 |
+
}
|
| 190 |
+
|
| 191 |
+
.search-result-meta {
|
| 192 |
+
font-size: 0.75rem;
|
| 193 |
+
color: var(--text-muted);
|
| 194 |
+
}
|
| 195 |
+
|
| 196 |
+
/* Streaming Animation */
|
| 197 |
+
.streaming-text {
|
| 198 |
+
position: relative;
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
.streaming-cursor {
|
| 202 |
+
display: inline-block;
|
| 203 |
+
width: 2px;
|
| 204 |
+
height: 1em;
|
| 205 |
+
background: var(--accent-color);
|
| 206 |
+
animation: blink 1s infinite;
|
| 207 |
+
margin-left: 2px;
|
| 208 |
+
}
|
| 209 |
+
|
| 210 |
+
@keyframes blink {
|
| 211 |
+
0%, 50% { opacity: 1; }
|
| 212 |
+
51%, 100% { opacity: 0; }
|
| 213 |
+
}
|
| 214 |
+
|
| 215 |
+
/* Dropdown Menu */
|
| 216 |
+
.dropdown-container {
|
| 217 |
+
position: relative;
|
| 218 |
+
display: inline-block;
|
| 219 |
+
}
|
| 220 |
+
|
| 221 |
+
.dropdown-menu {
|
| 222 |
+
position: absolute;
|
| 223 |
+
top: 100%;
|
| 224 |
+
right: 0;
|
| 225 |
+
background-color: var(--bg-secondary);
|
| 226 |
+
border: 1px solid var(--border-color);
|
| 227 |
+
border-radius: 0.5rem;
|
| 228 |
+
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06);
|
| 229 |
+
min-width: 12rem;
|
| 230 |
+
z-index: 1000;
|
| 231 |
+
opacity: 0;
|
| 232 |
+
visibility: hidden;
|
| 233 |
+
transform: translateY(-0.5rem);
|
| 234 |
+
transition: all 0.2s ease;
|
| 235 |
+
margin-top: 0.5rem;
|
| 236 |
+
}
|
| 237 |
+
|
| 238 |
+
.dropdown-menu.show {
|
| 239 |
+
opacity: 1;
|
| 240 |
+
visibility: visible;
|
| 241 |
+
transform: translateY(0);
|
| 242 |
+
}
|
| 243 |
+
|
| 244 |
+
.dropdown-item {
|
| 245 |
+
display: flex;
|
| 246 |
+
align-items: center;
|
| 247 |
+
gap: 0.75rem;
|
| 248 |
+
width: 100%;
|
| 249 |
+
padding: 0.75rem 1rem;
|
| 250 |
+
background: none;
|
| 251 |
+
border: none;
|
| 252 |
+
color: var(--text-primary);
|
| 253 |
+
text-align: left;
|
| 254 |
+
cursor: pointer;
|
| 255 |
+
transition: background-color 0.2s;
|
| 256 |
+
font-size: 0.875rem;
|
| 257 |
+
}
|
| 258 |
+
|
| 259 |
+
.dropdown-item:hover {
|
| 260 |
+
background-color: var(--bg-tertiary);
|
| 261 |
+
}
|
| 262 |
+
|
| 263 |
+
.dropdown-item i {
|
| 264 |
+
width: 1rem;
|
| 265 |
+
text-align: center;
|
| 266 |
+
color: var(--text-muted);
|
| 267 |
+
}
|
| 268 |
+
|
| 269 |
+
.dropdown-divider {
|
| 270 |
+
height: 1px;
|
| 271 |
+
background-color: var(--border-color);
|
| 272 |
+
margin: 0.25rem 0;
|
| 273 |
+
}
|
| 274 |
+
|
| 275 |
+
/* Chat Messages */
|
| 276 |
+
.chat-messages {
|
| 277 |
+
flex: 1;
|
| 278 |
+
overflow-y: auto;
|
| 279 |
+
padding: 1rem;
|
| 280 |
+
display: flex;
|
| 281 |
+
flex-direction: column;
|
| 282 |
+
gap: 1rem;
|
| 283 |
+
}
|
| 284 |
+
|
| 285 |
+
/* Welcome Message */
|
| 286 |
+
.welcome-message {
|
| 287 |
+
text-align: center;
|
| 288 |
+
padding: 2rem;
|
| 289 |
+
color: var(--text-secondary);
|
| 290 |
+
}
|
| 291 |
+
|
| 292 |
+
.welcome-message h2 {
|
| 293 |
+
font-size: 1.5rem;
|
| 294 |
+
margin-bottom: 0.5rem;
|
| 295 |
+
color: var(--text-primary);
|
| 296 |
+
}
|
| 297 |
+
|
| 298 |
+
.welcome-message p {
|
| 299 |
+
font-size: 1rem;
|
| 300 |
+
color: var(--text-muted);
|
| 301 |
+
}
|
| 302 |
+
|
| 303 |
+
/* Message Bubbles */
|
| 304 |
+
.message {
|
| 305 |
+
display: flex;
|
| 306 |
+
gap: 0.75rem;
|
| 307 |
+
margin-bottom: 1rem;
|
| 308 |
+
max-width: 100%;
|
| 309 |
+
}
|
| 310 |
+
|
| 311 |
+
.message.user {
|
| 312 |
+
justify-content: flex-end;
|
| 313 |
+
}
|
| 314 |
+
|
| 315 |
+
.message.assistant {
|
| 316 |
+
justify-content: flex-start;
|
| 317 |
+
}
|
| 318 |
+
|
| 319 |
+
.message-avatar {
|
| 320 |
+
width: 2rem;
|
| 321 |
+
height: 2rem;
|
| 322 |
+
border-radius: 50%;
|
| 323 |
+
display: flex;
|
| 324 |
+
align-items: center;
|
| 325 |
+
justify-content: center;
|
| 326 |
+
flex-shrink: 0;
|
| 327 |
+
margin-top: 0.25rem;
|
| 328 |
+
}
|
| 329 |
+
|
| 330 |
+
.message.user .message-avatar {
|
| 331 |
+
background: var(--gradient-primary);
|
| 332 |
+
color: white;
|
| 333 |
+
box-shadow: 0 4px 15px rgba(102, 126, 234, 0.4);
|
| 334 |
+
}
|
| 335 |
+
|
| 336 |
+
.message.assistant .message-avatar {
|
| 337 |
+
background: var(--gradient-secondary);
|
| 338 |
+
color: white;
|
| 339 |
+
box-shadow: 0 4px 15px rgba(240, 147, 251, 0.4);
|
| 340 |
+
}
|
| 341 |
+
|
| 342 |
+
.message-content {
|
| 343 |
+
max-width: 70%;
|
| 344 |
+
min-width: 0;
|
| 345 |
+
}
|
| 346 |
+
|
| 347 |
+
.message.user .message-content {
|
| 348 |
+
display: flex;
|
| 349 |
+
flex-direction: column;
|
| 350 |
+
align-items: flex-end;
|
| 351 |
+
}
|
| 352 |
+
|
| 353 |
+
.message.assistant .message-content {
|
| 354 |
+
display: flex;
|
| 355 |
+
flex-direction: column;
|
| 356 |
+
align-items: flex-start;
|
| 357 |
+
}
|
| 358 |
+
|
| 359 |
+
.message-bubble {
|
| 360 |
+
padding: 0.75rem 1rem;
|
| 361 |
+
border-radius: 1rem;
|
| 362 |
+
word-wrap: break-word;
|
| 363 |
+
line-height: 1.5;
|
| 364 |
+
}
|
| 365 |
+
|
| 366 |
+
.message.user .message-bubble {
|
| 367 |
+
background: var(--user-message-bg);
|
| 368 |
+
color: var(--text-primary);
|
| 369 |
+
border-bottom-right-radius: 0.25rem;
|
| 370 |
+
box-shadow: 0 4px 15px rgba(102, 126, 234, 0.3);
|
| 371 |
+
border: 1px solid rgba(255, 255, 255, 0.1);
|
| 372 |
+
}
|
| 373 |
+
|
| 374 |
+
.message.assistant .message-bubble {
|
| 375 |
+
background: var(--ai-message-bg);
|
| 376 |
+
color: var(--text-primary);
|
| 377 |
+
border-bottom-left-radius: 0.25rem;
|
| 378 |
+
box-shadow: 0 4px 15px rgba(240, 147, 251, 0.3);
|
| 379 |
+
border: 1px solid rgba(255, 255, 255, 0.1);
|
| 380 |
+
}
|
| 381 |
+
|
| 382 |
+
.message-text {
|
| 383 |
+
margin-bottom: 0.5rem;
|
| 384 |
+
}
|
| 385 |
+
|
| 386 |
+
.message-text p {
|
| 387 |
+
margin-bottom: 0.5rem;
|
| 388 |
+
}
|
| 389 |
+
|
| 390 |
+
.message-text p:last-child {
|
| 391 |
+
margin-bottom: 0;
|
| 392 |
+
}
|
| 393 |
+
|
| 394 |
+
.message-text strong {
|
| 395 |
+
font-weight: 600;
|
| 396 |
+
color: var(--text-primary);
|
| 397 |
+
}
|
| 398 |
+
|
| 399 |
+
/* Source Badge */
|
| 400 |
+
.source-badge {
|
| 401 |
+
display: inline-block;
|
| 402 |
+
background-color: var(--accent-color);
|
| 403 |
+
color: white;
|
| 404 |
+
padding: 0.25rem 0.5rem;
|
| 405 |
+
border-radius: 0.375rem;
|
| 406 |
+
font-size: 0.75rem;
|
| 407 |
+
font-weight: 500;
|
| 408 |
+
margin-bottom: 0.5rem;
|
| 409 |
+
}
|
| 410 |
+
|
| 411 |
+
/* Message Actions */
|
| 412 |
+
.message-actions {
|
| 413 |
+
display: flex;
|
| 414 |
+
gap: 0.5rem;
|
| 415 |
+
margin-top: 0.5rem;
|
| 416 |
+
opacity: 0;
|
| 417 |
+
transition: opacity 0.2s;
|
| 418 |
+
}
|
| 419 |
+
|
| 420 |
+
.message:hover .message-actions {
|
| 421 |
+
opacity: 1;
|
| 422 |
+
}
|
| 423 |
+
|
| 424 |
+
.action-btn {
|
| 425 |
+
background: none;
|
| 426 |
+
border: none;
|
| 427 |
+
color: var(--text-muted);
|
| 428 |
+
cursor: pointer;
|
| 429 |
+
padding: 0.25rem;
|
| 430 |
+
border-radius: 0.25rem;
|
| 431 |
+
transition: color 0.2s;
|
| 432 |
+
}
|
| 433 |
+
|
| 434 |
+
.action-btn:hover {
|
| 435 |
+
color: var(--text-primary);
|
| 436 |
+
}
|
| 437 |
+
|
| 438 |
+
/* Loading Message */
|
| 439 |
+
.loading-message {
|
| 440 |
+
display: flex;
|
| 441 |
+
align-items: center;
|
| 442 |
+
gap: 0.5rem;
|
| 443 |
+
color: #75ff6b;
|
| 444 |
+
font-style: italic;
|
| 445 |
+
font-weight: 500;
|
| 446 |
+
text-shadow: 0 0 10px rgba(255, 107, 107, 0.4);
|
| 447 |
+
}
|
| 448 |
+
|
| 449 |
+
.typing-indicator {
|
| 450 |
+
display: flex;
|
| 451 |
+
gap: 0.25rem;
|
| 452 |
+
}
|
| 453 |
+
|
| 454 |
+
.typing-indicator span {
|
| 455 |
+
width: 0.5rem;
|
| 456 |
+
height: 0.5rem;
|
| 457 |
+
background: #ff6b6b;
|
| 458 |
+
border-radius: 50%;
|
| 459 |
+
animation: typing 1.4s infinite ease-in-out;
|
| 460 |
+
box-shadow: 0 0 8px rgba(255, 107, 107, 0.4);
|
| 461 |
+
}
|
| 462 |
+
|
| 463 |
+
.typing-indicator span:nth-child(2) {
|
| 464 |
+
animation-delay: 0.2s;
|
| 465 |
+
}
|
| 466 |
+
|
| 467 |
+
.typing-indicator span:nth-child(3) {
|
| 468 |
+
animation-delay: 0.4s;
|
| 469 |
+
}
|
| 470 |
+
|
| 471 |
+
@keyframes typing {
|
| 472 |
+
0%, 60%, 100% {
|
| 473 |
+
transform: translateY(0);
|
| 474 |
+
opacity: 0.5;
|
| 475 |
+
}
|
| 476 |
+
30% {
|
| 477 |
+
transform: translateY(-0.5rem);
|
| 478 |
+
opacity: 1;
|
| 479 |
+
}
|
| 480 |
+
}
|
| 481 |
+
|
| 482 |
+
/* Input Area */
|
| 483 |
+
.chat-input-container {
|
| 484 |
+
padding: 1rem;
|
| 485 |
+
background-color: var(--bg-secondary);
|
| 486 |
+
border-top: 1px solid var(--border-color);
|
| 487 |
+
}
|
| 488 |
+
|
| 489 |
+
.chat-input-wrapper {
|
| 490 |
+
display: flex;
|
| 491 |
+
align-items: center;
|
| 492 |
+
gap: 0.5rem;
|
| 493 |
+
background-color: var(--input-bg);
|
| 494 |
+
border: 1px solid var(--input-border);
|
| 495 |
+
border-radius: 1rem;
|
| 496 |
+
padding: 0.75rem 1rem;
|
| 497 |
+
max-width: 48rem;
|
| 498 |
+
margin: 0 auto;
|
| 499 |
+
transition: border-color 0.2s;
|
| 500 |
+
}
|
| 501 |
+
|
| 502 |
+
.chat-input-wrapper:focus-within {
|
| 503 |
+
border-color: var(--accent-color);
|
| 504 |
+
}
|
| 505 |
+
|
| 506 |
+
.chat-input {
|
| 507 |
+
flex: 1;
|
| 508 |
+
background: none;
|
| 509 |
+
border: none;
|
| 510 |
+
outline: none;
|
| 511 |
+
color: var(--text-primary);
|
| 512 |
+
font-size: 1rem;
|
| 513 |
+
line-height: 1.5;
|
| 514 |
+
}
|
| 515 |
+
|
| 516 |
+
.chat-input::placeholder {
|
| 517 |
+
color: var(--text-muted);
|
| 518 |
+
}
|
| 519 |
+
|
| 520 |
+
.input-btn {
|
| 521 |
+
background: none;
|
| 522 |
+
border: none;
|
| 523 |
+
color: var(--text-muted);
|
| 524 |
+
cursor: pointer;
|
| 525 |
+
padding: 0.5rem;
|
| 526 |
+
border-radius: 0.375rem;
|
| 527 |
+
transition: color 0.2s;
|
| 528 |
+
}
|
| 529 |
+
|
| 530 |
+
.input-btn:hover {
|
| 531 |
+
color: var(--text-primary);
|
| 532 |
+
}
|
| 533 |
+
|
| 534 |
+
.send-btn {
|
| 535 |
+
background-color: var(--accent-color);
|
| 536 |
+
border: none;
|
| 537 |
+
color: white;
|
| 538 |
+
cursor: pointer;
|
| 539 |
+
padding: 0.5rem;
|
| 540 |
+
border-radius: 0.375rem;
|
| 541 |
+
transition: background-color 0.2s;
|
| 542 |
+
}
|
| 543 |
+
|
| 544 |
+
.send-btn:hover {
|
| 545 |
+
background-color: var(--accent-hover);
|
| 546 |
+
}
|
| 547 |
+
|
| 548 |
+
.send-btn:disabled {
|
| 549 |
+
background-color: var(--bg-tertiary);
|
| 550 |
+
color: var(--text-muted);
|
| 551 |
+
cursor: not-allowed;
|
| 552 |
+
}
|
| 553 |
+
|
| 554 |
+
/* Scroll Indicator */
|
| 555 |
+
.scroll-indicator {
|
| 556 |
+
text-align: center;
|
| 557 |
+
margin-top: 0.5rem;
|
| 558 |
+
color: var(--text-muted);
|
| 559 |
+
cursor: pointer;
|
| 560 |
+
transition: color 0.2s;
|
| 561 |
+
}
|
| 562 |
+
|
| 563 |
+
.scroll-indicator:hover {
|
| 564 |
+
color: var(--text-primary);
|
| 565 |
+
}
|
| 566 |
+
|
| 567 |
+
/* Responsive Design */
|
| 568 |
+
@media (max-width: 768px) {
|
| 569 |
+
.chat-header {
|
| 570 |
+
padding: 0.75rem 1rem;
|
| 571 |
+
}
|
| 572 |
+
|
| 573 |
+
.chat-messages {
|
| 574 |
+
padding: 0.75rem;
|
| 575 |
+
}
|
| 576 |
+
|
| 577 |
+
.chat-input-container {
|
| 578 |
+
padding: 0.75rem;
|
| 579 |
+
}
|
| 580 |
+
|
| 581 |
+
.message-content {
|
| 582 |
+
max-width: 85%;
|
| 583 |
+
}
|
| 584 |
+
|
| 585 |
+
.header-left h1 {
|
| 586 |
+
font-size: 1.125rem;
|
| 587 |
+
}
|
| 588 |
+
}
|
| 589 |
+
|
| 590 |
+
/* Hide scrollbar but keep functionality */
|
| 591 |
+
.chat-messages::-webkit-scrollbar {
|
| 592 |
+
width: 0.25rem;
|
| 593 |
+
}
|
| 594 |
+
|
| 595 |
+
.chat-messages::-webkit-scrollbar-track {
|
| 596 |
+
background: transparent;
|
| 597 |
+
}
|
| 598 |
+
|
| 599 |
+
.chat-messages::-webkit-scrollbar-thumb {
|
| 600 |
+
background: var(--border-color);
|
| 601 |
+
border-radius: 0.125rem;
|
| 602 |
+
}
|
| 603 |
+
|
| 604 |
+
.chat-messages::-webkit-scrollbar-thumb:hover {
|
| 605 |
+
background: var(--text-muted);
|
| 606 |
+
}
|
| 607 |
+
|
| 608 |
+
/* Animation for new messages */
|
| 609 |
+
@keyframes slideIn {
|
| 610 |
+
from {
|
| 611 |
+
opacity: 0;
|
| 612 |
+
transform: translateY(1rem);
|
| 613 |
+
}
|
| 614 |
+
to {
|
| 615 |
+
opacity: 1;
|
| 616 |
+
transform: translateY(0);
|
| 617 |
+
}
|
| 618 |
+
}
|
| 619 |
+
|
| 620 |
+
.message {
|
| 621 |
+
animation: slideIn 0.3s ease-out;
|
| 622 |
+
}
|
templates/index.html
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="en">
|
| 3 |
+
<head>
|
| 4 |
+
<meta charset="UTF-8" />
|
| 5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
| 6 |
+
<meta name="theme-color" content="#3b82f6" />
|
| 7 |
+
<title>Corex | AI Assistant</title>
|
| 8 |
+
|
| 9 |
+
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css" />
|
| 10 |
+
<link rel="stylesheet" href="/static/styles.css" />
|
| 11 |
+
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
| 12 |
+
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
| 13 |
+
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&display=swap" rel="stylesheet" />
|
| 14 |
+
<script src="https://cdnjs.cloudflare.com/ajax/libs/jspdf/2.5.1/jspdf.umd.min.js"></script>
|
| 15 |
+
</head>
|
| 16 |
+
<body>
|
| 17 |
+
<div class="chat-container">
|
| 18 |
+
<!-- Header -->
|
| 19 |
+
<header class="chat-header">
|
| 20 |
+
<div class="header-left">
|
| 21 |
+
<h1>Corex</h1>
|
| 22 |
+
<div class="header-dropdown">
|
| 23 |
+
<i class="fas fa-chevron-down"></i>
|
| 24 |
+
</div>
|
| 25 |
+
</div>
|
| 26 |
+
<div class="header-right">
|
| 27 |
+
<button class="header-btn" title="Share">
|
| 28 |
+
<i class="fas fa-share"></i>
|
| 29 |
+
</button>
|
| 30 |
+
<div class="dropdown-container">
|
| 31 |
+
<button class="header-btn" id="optionsBtn" title="More options">
|
| 32 |
+
<i class="fas fa-ellipsis-h"></i>
|
| 33 |
+
</button>
|
| 34 |
+
<div class="dropdown-menu" id="optionsMenu">
|
| 35 |
+
<button class="dropdown-item" id="downloadTxt">
|
| 36 |
+
<i class="fas fa-file-text"></i>
|
| 37 |
+
Download as TXT
|
| 38 |
+
</button>
|
| 39 |
+
<button class="dropdown-item" id="downloadPdf">
|
| 40 |
+
<i class="fas fa-file-pdf"></i>
|
| 41 |
+
Download as PDF
|
| 42 |
+
</button>
|
| 43 |
+
<div class="dropdown-divider"></div>
|
| 44 |
+
<button class="dropdown-item" id="clearChat">
|
| 45 |
+
<i class="fas fa-trash"></i>
|
| 46 |
+
Clear Chat
|
| 47 |
+
</button>
|
| 48 |
+
</div>
|
| 49 |
+
</div>
|
| 50 |
+
</div>
|
| 51 |
+
</header>
|
| 52 |
+
|
| 53 |
+
<!-- Chat Messages -->
|
| 54 |
+
<main class="chat-messages" id="chatMessages">
|
| 55 |
+
<div class="welcome-message">
|
| 56 |
+
<h2>Welcome to Corex!</h2>
|
| 57 |
+
<p>Ask me anything and I'll help you with accurate, document-backed answers.</p>
|
| 58 |
+
</div>
|
| 59 |
+
</main>
|
| 60 |
+
|
| 61 |
+
<!-- Input Area -->
|
| 62 |
+
<div class="chat-input-container">
|
| 63 |
+
<div class="chat-input-wrapper">
|
| 64 |
+
<button class="input-btn" title="Attach file">
|
| 65 |
+
<i class="fas fa-plus"></i>
|
| 66 |
+
</button>
|
| 67 |
+
<input
|
| 68 |
+
type="text"
|
| 69 |
+
id="queryInput"
|
| 70 |
+
placeholder="Ask anything"
|
| 71 |
+
autocomplete="off"
|
| 72 |
+
class="chat-input"
|
| 73 |
+
/>
|
| 74 |
+
<button class="input-btn" title="Voice input">
|
| 75 |
+
<i class="fas fa-microphone"></i>
|
| 76 |
+
</button>
|
| 77 |
+
<button id="askButton" class="send-btn" title="Send message">
|
| 78 |
+
<i class="fas fa-paper-plane"></i>
|
| 79 |
+
</button>
|
| 80 |
+
</div>
|
| 81 |
+
<div class="scroll-indicator">
|
| 82 |
+
<i class="fas fa-chevron-down"></i>
|
| 83 |
+
</div>
|
| 84 |
+
</div>
|
| 85 |
+
</div>
|
| 86 |
+
|
| 87 |
+
<script src="/static/script.js"></script>
|
| 88 |
+
</body>
|
| 89 |
+
</html>
|
| 90 |
+
|
vector_rag.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_community.document_loaders import PyPDFLoader
|
| 2 |
+
from langchain_community.vectorstores import FAISS
|
| 3 |
+
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
| 4 |
+
# Use the generic HuggingFaceEmbeddings for the smaller model
|
| 5 |
+
from langchain_huggingface import HuggingFaceEmbeddings
|
| 6 |
+
from langchain_huggingface import HuggingFacePipeline
|
| 7 |
+
# Remove BitsAndBytesConfig import
|
| 8 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
| 9 |
+
import os
|
| 10 |
+
from dotenv import load_dotenv
|
| 11 |
+
|
| 12 |
+
load_dotenv()
|
| 13 |
+
|
| 14 |
+
# Set cache directories with fallback for permission issues
|
| 15 |
+
os.environ.setdefault('HF_HOME', '/tmp/huggingface_cache')
|
| 16 |
+
os.environ.setdefault('TRANSFORMERS_CACHE', '/tmp/huggingface_cache/transformers')
|
| 17 |
+
os.environ.setdefault('HF_DATASETS_CACHE', '/tmp/huggingface_cache/datasets')
|
| 18 |
+
|
| 19 |
+
# --- MODEL INITIALIZATION (Minimal Footprint) ---
|
| 20 |
+
print("Loading Qwen2-0.5B-Instruct...")
|
| 21 |
+
model_name = "Qwen/Qwen2-0.5B-Instruct"
|
| 22 |
+
|
| 23 |
+
# Removed: quantization_config = BitsAndBytesConfig(load_in_8bit=True)
|
| 24 |
+
|
| 25 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
| 26 |
+
# Removed: quantization_config parameter from from_pretrained
|
| 27 |
+
model = AutoModelForCausalLM.from_pretrained(
|
| 28 |
+
model_name,
|
| 29 |
+
device_map="cpu",
|
| 30 |
+
trust_remote_code=True
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
llm_pipeline = pipeline(
|
| 34 |
+
"text-generation",
|
| 35 |
+
model=model,
|
| 36 |
+
tokenizer=tokenizer,
|
| 37 |
+
max_new_tokens=256,
|
| 38 |
+
do_sample=True,
|
| 39 |
+
temperature=0.5,
|
| 40 |
+
top_p=0.9,
|
| 41 |
+
)
|
| 42 |
+
llm = HuggingFacePipeline(pipeline=llm_pipeline)
|
| 43 |
+
|
| 44 |
+
# Use the lighter all-MiniLM-L6-v2 embeddings model
|
| 45 |
+
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
|
| 46 |
+
|
| 47 |
+
# --- DOCUMENT LOADING & CHUNKING ---
|
| 48 |
+
loader = PyPDFLoader("data/sample.pdf") # Correct path for Docker: data/sample.pdf
|
| 49 |
+
documents = loader.load()
|
| 50 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
|
| 51 |
+
chunks = text_splitter.split_documents(documents)
|
| 52 |
+
|
| 53 |
+
if not chunks:
|
| 54 |
+
raise ValueError("No document chunks found.")
|
| 55 |
+
|
| 56 |
+
# Initialize FAISS and retriever
|
| 57 |
+
vectorstore = FAISS.from_documents(chunks, embeddings)
|
| 58 |
+
retriever = vectorstore.as_retriever()
|
| 59 |
+
|
| 60 |
+
# Expose the necessary components for rag.py to import
|
| 61 |
+
def query_vector_store(query: str, conversation_history: list = None) -> str:
|
| 62 |
+
"""
|
| 63 |
+
Query the vector store with conversation context.
|
| 64 |
+
|
| 65 |
+
Args:
|
| 66 |
+
query: The user's current question
|
| 67 |
+
conversation_history: List of previous messages (optional)
|
| 68 |
+
|
| 69 |
+
Returns:
|
| 70 |
+
Answer string or None if no documents found
|
| 71 |
+
"""
|
| 72 |
+
if conversation_history is None:
|
| 73 |
+
conversation_history = []
|
| 74 |
+
|
| 75 |
+
docs = retriever.get_relevant_documents(query)
|
| 76 |
+
if docs:
|
| 77 |
+
context = "\n\n".join([doc.page_content for doc in docs])
|
| 78 |
+
|
| 79 |
+
# Build prompt with conversation context
|
| 80 |
+
prompt = "You are a helpful assistant engaged in a conversation.\n\n"
|
| 81 |
+
|
| 82 |
+
if conversation_history:
|
| 83 |
+
# Format conversation history
|
| 84 |
+
history_lines = []
|
| 85 |
+
for msg in conversation_history[-10:]: # Last 10 messages
|
| 86 |
+
role = "User" if msg["role"] == "user" else "Assistant"
|
| 87 |
+
history_lines.append(f"{role}: {msg['content']}")
|
| 88 |
+
history_text = '\n'.join(history_lines)
|
| 89 |
+
prompt += f"Previous conversation:\n{history_text}\n\n"
|
| 90 |
+
|
| 91 |
+
prompt += f"""Use the following context from documents to answer the current question:
|
| 92 |
+
|
| 93 |
+
{context}
|
| 94 |
+
|
| 95 |
+
Current question: {query}
|
| 96 |
+
Answer:"""
|
| 97 |
+
|
| 98 |
+
raw_output = llm.invoke(prompt)
|
| 99 |
+
answer = raw_output.replace(prompt, "").strip()
|
| 100 |
+
return answer
|
| 101 |
+
return None
|