The rest of the files

This commit is contained in:
PK13274 2025-04-01 09:45:55 -05:00
parent 7cb7324868
commit 03a38cd29e
5 changed files with 1306 additions and 0 deletions

75
app.py Normal file
View File

@ -0,0 +1,75 @@
from flask import Flask, request, jsonify
import asyncio
import axios
app = Flask(__name__)
# Load Markdown files
def loadMarkdownFiles(directory):
const files = fs.readdirSync(directory);
const markdownFiles = [];
for (const file of files) {
if (path.extname(file).toLowerCase() === '.md') {
const filePath = path.join(directory, file);
const content = fs.readFileSync(filePath, 'utf-8');
markdownFiles.push({ name: file, content });
}
}
return markdownFiles
# Initialize RAG model and tokenizer
async def initRagModel():
const tokenizer = await AutoTokenizer.from_pretrained('facebook/rag-token-nq');
const model = await RagTokenForGeneration.from_pretrained('facebook/rag-token-nq');
return { tokenizer, model }
# Retrieve relevant information from Markdown files using Ollama API
async def retrieveInformation(query):
try:
response = await axios.post('http://localhost:8080/chat', { query });
return response.data.response;
except (axios.AxiosError, Exception) as error:
print(f'Error: {error.message}')
raise
# Chatbot logic
async def chatbot():
const directory = './notes'; // Directory containing Markdown files
const markdownFiles = loadMarkdownFiles(directory);
const ragModel = await initRagModel();
print('Chatbot is ready! Ask your questions.')
process.stdin.on('data', async (data) => {
const query = data.toString().trim();
if (query.toLowerCase() === 'exit') {
process.exit(0);
}
try:
const response = await retrieveInformation(query);
print(f'Chatbot: {response}')
except Exception as error:
print(f'Error: {error.message}')
# Flask route to handle chat requests
@app.route('/chat', methods=['POST'])
async def chat():
data = request.json
query = data.get('query')
if not query:
return jsonify({'error': 'No query provided'}), 400
try:
response = await retrieveInformation(query)
return jsonify({'response': response})
except Exception as error:
return jsonify({'error': str(error)}), 500
if __name__ == '__main__':
asyncio.run(chatbot())
app.run(debug=True)

70
index.js Normal file
View File

@ -0,0 +1,70 @@
const { AutoTokenizer, RagTokenForGeneration } = require('@huggingface/transformers');
const fs = require('fs');
const path = require('path');
const axios = require('axios');
const configparser = require('configparser');
// Load Markdown files
function loadMarkdownFiles(directory) {
const files = fs.readdirSync(directory);
const markdownFiles = [];
for (const file of files) {
if (path.extname(file).toLowerCase() === '.md') {
const filePath = path.join(directory, file);
const content = fs.readFileSync(filePath, 'utf-8');
markdownFiles.push({ name: file, content });
}
}
return markdownFiles;
}
// Initialize RAG model and tokenizer
async function initRagModel() {
const tokenizer = await AutoTokenizer.from_pretrained('facebook/rag-token-nq');
const model = await RagTokenForGeneration.from_pretrained('facebook/rag-token-nq');
return { tokenizer, model };
}
// Retrieve relevant information from Markdown files using Ollama API
async function retrieveInformation(query) {
try {
const config = new configparser.ConfigParser();
config.read('ollama.ini');
const host = config.get('Ollama', 'host');
const port = config.get('Ollama', 'port');
const response = await axios.post(`http://${host}:${port}/chat`, { query });
return response.data.response;
} catch (error) {
console.error('Error:', error.message);
throw error;
}
}
// Chatbot logic
async function chatbot() {
const directory = './notes'; // Directory containing Markdown files
const markdownFiles = loadMarkdownFiles(directory);
const ragModel = await initRagModel();
console.log('Chatbot is ready! Ask your questions.');
process.stdin.on('data', async (data) => {
const query = data.toString().trim();
if (query.toLowerCase() === 'exit') {
process.exit(0);
}
try {
const response = await retrieveInformation(query);
console.log(`Chatbot: ${response}`);
} catch (error) {
console.error('Error:', error.message);
}
});
}
chatbot();

3
ollama.ini.example Normal file
View File

@ -0,0 +1,3 @@
[Ollama]
host = localhost
port = 8080

1141
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

17
package.json Normal file
View File

@ -0,0 +1,17 @@
{
"name": "chatbot",
"version": "1.0.0",
"description": "An AI chatbot using RAG for retrieval from Markdown files.",
"main": "index.js",
"scripts": {
"start": "node index.js"
},
"devDependencies": {
"@huggingface/transformers": "^3.4.1",
"axios": "^1.8.4",
"configparser": "^0.3.10",
"flask": "^0.2.10",
"fs": "^0.0.1-security",
"path": "^0.12.7"
}
}