from flask import Flask, request, jsonify import asyncio import axios app = Flask(__name__) # Load Markdown files def loadMarkdownFiles(directory): const files = fs.readdirSync(directory); const markdownFiles = []; for (const file of files) { if (path.extname(file).toLowerCase() === '.md') { const filePath = path.join(directory, file); const content = fs.readFileSync(filePath, 'utf-8'); markdownFiles.push({ name: file, content }); } } return markdownFiles # Initialize RAG model and tokenizer async def initRagModel(): const tokenizer = await AutoTokenizer.from_pretrained('facebook/rag-token-nq'); const model = await RagTokenForGeneration.from_pretrained('facebook/rag-token-nq'); return { tokenizer, model } # Retrieve relevant information from Markdown files using Ollama API async def retrieveInformation(query): try: response = await axios.post('http://localhost:8080/chat', { query }); return response.data.response; except (axios.AxiosError, Exception) as error: print(f'Error: {error.message}') raise # Chatbot logic async def chatbot(): const directory = './notes'; // Directory containing Markdown files const markdownFiles = loadMarkdownFiles(directory); const ragModel = await initRagModel(); print('Chatbot is ready! Ask your questions.') process.stdin.on('data', async (data) => { const query = data.toString().trim(); if (query.toLowerCase() === 'exit') { process.exit(0); } try: const response = await retrieveInformation(query); print(f'Chatbot: {response}') except Exception as error: print(f'Error: {error.message}') # Flask route to handle chat requests @app.route('/chat', methods=['POST']) async def chat(): data = request.json query = data.get('query') if not query: return jsonify({'error': 'No query provided'}), 400 try: response = await retrieveInformation(query) return jsonify({'response': response}) except Exception as error: return jsonify({'error': str(error)}), 500 if __name__ == '__main__': asyncio.run(chatbot()) app.run(debug=True)