server.js 1.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061
  1. const schedule = require('node-schedule');
  2. const { spawn } = require('child_process');
  3. const { ipc } = require('./config.server.js');
  4. const path = require('node:path')
  5. require('./thirdparty/naturalSort.js');
  6. require('./src/settings.js');
  7. require('./src/db.js');
  8. require('./src/dao.js');
  9. require('./src/crawler.js');
  10. const app = require('./config.server.js').app
  11. const ipb = require('./config.server.js').ipb
  12. let crawlState = 'off';
  13. let crawlFn = async () => {
  14. ipb('global:refresh:force')
  15. }
  16. ipc.on("global:refresh:done", () => {
  17. try {
  18. let getProcess = spawn("bash", ["get.sh"], {cwd: path.join(__dirname, 'tmp')})
  19. getProcess.stdout.pipe(process.stdout)
  20. getProcess.stderr.pipe(process.stderr)
  21. getProcess.on('close', (code) => {
  22. console.log(`child process exited with code ${code}`);
  23. if(code == 0){
  24. }
  25. });
  26. } catch (e) {
  27. console.error("erron on refresh handle", e)
  28. } finally {
  29. crawlState = 'off'
  30. }
  31. })
  32. let crawlFnSafe = async () => {
  33. try {
  34. crawlState = 'on'
  35. await crawlFn()
  36. } catch (e) {
  37. console.error("error on job", e)
  38. }
  39. }
  40. schedule.scheduleJob('50 23 * * *', () => {
  41. if(crawlState === 'off'){
  42. return new Promise(async () => await crawlFnSafe())
  43. } else {
  44. console.log("job already running")
  45. }
  46. });
  47. schedule.scheduleJob('*/30 * * * *', () => {
  48. console.log(`service is alive ${new Date()}`)
  49. })