summaryrefslogtreecommitdiffstats
path: root/crawl-ref/source/webserver/process_handler.py
diff options
context:
space:
mode:
authorFlorian Diebold <flodiebold@gmail.com>2012-08-28 00:32:00 +0200
committerFlorian Diebold <flodiebold@gmail.com>2012-08-28 00:32:15 +0200
commit5bb725201a068fa211b165a8b6c1e1694d335d7e (patch)
tree9a9ebcac849802564136cb21feadbcacb4d935c6 /crawl-ref/source/webserver/process_handler.py
parent2026c22a35a737cc6bfecafe3da595e979a83de1 (diff)
downloadcrawl-ref-5bb725201a068fa211b165a8b6c1e1694d335d7e.tar.gz
crawl-ref-5bb725201a068fa211b165a8b6c1e1694d335d7e.zip
Webtiles: Be a bit more resilient against errors while starting Crawl.
Diffstat (limited to 'crawl-ref/source/webserver/process_handler.py')
-rw-r--r--crawl-ref/source/webserver/process_handler.py38
1 files changed, 23 insertions, 15 deletions
diff --git a/crawl-ref/source/webserver/process_handler.py b/crawl-ref/source/webserver/process_handler.py
index 274aa714ac..2fe206a45e 100644
--- a/crawl-ref/source/webserver/process_handler.py
+++ b/crawl-ref/source/webserver/process_handler.py
@@ -346,7 +346,7 @@ class CrawlProcessHandler(CrawlProcessHandlerBase):
self._process_hup_timeout = to
else:
self._kill_stale_process()
- except:
+ except Exception:
self.logger.error("Error while handling lockfile %s.", lockfile,
exc_info=True)
errmsg = ("Error while trying to terminate a stale process.<br>" +
@@ -451,25 +451,32 @@ class CrawlProcessHandler(CrawlProcessHandlerBase):
self.logger.info("Starting %s.", game["id"])
- self.process = TerminalRecorder(call, self.ttyrec_filename,
- self._ttyrec_id_header(),
- self.logger, self.io_loop,
- config.recording_term_size)
- self.process.end_callback = self._on_process_end
- self.process.output_callback = self._on_process_output
- self.process.activity_callback = self.note_activity
+ try:
+ self.process = TerminalRecorder(call, self.ttyrec_filename,
+ self._ttyrec_id_header(),
+ self.logger, self.io_loop,
+ config.recording_term_size)
+ self.process.end_callback = self._on_process_end
+ self.process.output_callback = self._on_process_output
+ self.process.activity_callback = self.note_activity
- self.gen_inprogress_lock()
+ self.gen_inprogress_lock()
- self.connect(self.socketpath, True)
+ self.connect(self.socketpath, True)
- self.logger.info("Crawl FDs: fd%s, fd%s.",
- self.process.child_fd,
- self.process.errpipe_read)
+ self.logger.info("Crawl FDs: fd%s, fd%s.",
+ self.process.child_fd,
+ self.process.errpipe_read)
- self.last_activity_time = time.time()
+ self.last_activity_time = time.time()
- self.check_where()
+ self.check_where()
+ except Exception:
+ self.logger.warning("Error while starting the Crawl process!", exc_info=True)
+ if self.process:
+ self.stop()
+ else:
+ self._on_process_end()
def connect(self, socketpath, primary = False):
self.socketpath = socketpath
@@ -488,6 +495,7 @@ class CrawlProcessHandler(CrawlProcessHandlerBase):
f.flush()
def remove_inprogress_lock(self):
+ if self.inprogress_lock_file is None: return
fcntl.lockf(self.inprogress_lock_file.fileno(), fcntl.LOCK_UN)
self.inprogress_lock_file.close()
try: