server.js 1.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. const schedule = require('node-schedule');
  2. const { spawn } = require('child_process');
  3. const { ipc } = require('./config.server.js');
  4. const path = require('node:path')
  5. require('./thirdparty/naturalSort.js');
  6. require('./src/settings.js');
  7. require('./src/db.js');
  8. require('./src/dao.js');
  9. require('./src/crawler.js');
  10. const app = require('./config.server.js').app
  11. const ipb = require('./config.server.js').ipb
  12. let crawlState = 'off';
  13. let crawlFn = async () => {
  14. ipb('global:refresh')
  15. }
  16. ipc.on("global:refresh:done", () => {
  17. let getProcess = spawn("bash", ["get.sh"], {cwd: path.join(__dirname, 'tmp')})
  18. getProcess.stdout.pipe(process.stdout)
  19. getProcess.stderr.pipe(process.stderr)
  20. getProcess.on('close', (code) => {
  21. console.log(`child process exited with code ${code}`);
  22. if(code == 0){
  23. }
  24. });
  25. })
  26. let crawlFnSafe = async () => {
  27. try {
  28. crawlState = 'on'
  29. await crawlFn()
  30. } catch (e) {
  31. console.error("error on job", e)
  32. } finally {
  33. crawlState = 'off'
  34. }
  35. }
  36. schedule.scheduleJob('0 0 3 * *', async () => {
  37. if(crawlState === 'off'){
  38. await crawlFnSafe()
  39. }
  40. });