const axios = require('axios'); const fs = require('fs'); const cron = require('node-cron'); const http = require('http'); const path = require('path'); const port = process.env.PORT || 7860; // 定义要访问的网页URL数组 const urls = [ //huggingface 'https://connorlixyz-autoliveformcst.hf.space', // huggingface-本项目 //Goorm 'https://my-nodejs-ivzud.run-us-west2.goorm.site', // Goorm-linshi 'https://ide-run.goorm.io/workspace/d8MS7D1BaRxuHgRKnSW?token=ada5536dbe5768f245c935ab547af078&guestname=0123', // MC-servers-py // 添加更多的URL ]; // 创建日志文件 //const logFile = 'visit-log.txt'; // 访问网页并将结果写入日志 async function scrapeAndLog(url) { try { const response = await axios.get(url, {timeout: 1000}); const timestamp = new Date().toISOString(); const logMessage = `${timestamp}: Web visited Successfully ${url}\n`; // 将访问结果写入日志文件 // fs.appendFileSync(logFile, logMessage); console.log(logMessage); } catch (error) { const timestamp = new Date().toISOString(); const errorMessage = `${timestamp}: Web visited Error ${url}: ${error.message}\n`; // 将错误信息写入日志文件 // fs.appendFileSync(logFile, errorMessage); console.error(errorMessage); } } // 使用cron来安排定期任务 cron.schedule('0 */2 * * * *', () => { console.log('Running webpage access...'); // 循环访问每个URL urls.forEach((url) => { scrapeAndLog(url); }); }); const server = http.createServer((req, res) => { if (req.url === '/') { const filePath = path.join(__dirname, 'index.html'); fs.readFile(filePath, (err, data) => { if (err) { res.writeHead(500); res.end('Error loading index.html'); } else { res.writeHead(200, { 'Content-Type': 'text/html' }); res.end(data); } }); } else { res.writeHead(404); res.end('Not Found'); } }); server.listen(port, () => { console.log(`Server is running on port ${port}`); });