server.js 1.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556
  1. const schedule = require('node-schedule');
  2. const { spawn } = require('child_process');
  3. const { ipc } = require('./config.server.js');
  4. const path = require('node:path')
  5. require('./thirdparty/naturalSort.js');
  6. require('./src/settings.js');
  7. require('./src/db.js');
  8. require('./src/dao.js');
  9. require('./src/crawler.js');
  10. const app = require('./config.server.js').app
  11. const ipb = require('./config.server.js').ipb
  12. let crawlState = 'off';
  13. let crawlFn = async () => {
  14. ipb('global:refresh')
  15. }
  16. ipc.on("global:refresh:done", () => {
  17. let getProcess = spawn("bash", ["get.sh"], {cwd: path.join(__dirname, 'tmp')})
  18. getProcess.stdout.pipe(process.stdout)
  19. getProcess.stderr.pipe(process.stderr)
  20. getProcess.on('close', (code) => {
  21. console.log(`child process exited with code ${code}`);
  22. if(code == 0){
  23. }
  24. });
  25. crawlState = 'off'
  26. })
  27. let crawlFnSafe = async () => {
  28. try {
  29. crawlState = 'on'
  30. await crawlFn()
  31. } catch (e) {
  32. console.error("error on job", e)
  33. }
  34. }
  35. schedule.scheduleJob('0 3 * * *', () => {
  36. if(crawlState === 'off'){
  37. return new Promise(async () => await crawlFnSafe())
  38. } else {
  39. console.log("job already running")
  40. }
  41. });
  42. schedule.scheduleJob('*/5 * * * *', () => {
  43. console.log("service is alive")
  44. })