Adjust queue server process timeout, a=chris
Adjust queue server process timeout, a=chris
diff --git a/src/configs/Config.php b/src/configs/Config.php
index 4a279634b..ed1ae6426 100755
--- a/src/configs/Config.php
+++ b/src/configs/Config.php
@@ -592,14 +592,18 @@ nsdefine('PAGE_TIMEOUT', 30);
/** time in seconds before we give up on a single page request*/
nsdefine('SINGLE_PAGE_TIMEOUT', ONE_MINUTE);
/** max time in seconds in a process before write a log message if
- crawlTimeoutLog is called repeatedly from a loop
+ * crawlTimeoutLog is called repeatedly from a loop
*/
nsdefine('LOG_TIMEOUT', 30);
+/** Number of lines of QueueServer log file to check to make sure both
+ * Indexer and Scheduler are running. 6000 should be roughly 20-30 minutes
+ */
+nsdefine('LOG_LINES_TO_RESTART', 6000);
/**
* Maximum time a crawl daemon process can go before calling
* @see CrawlDaemon::processHandler
*/
-nsdefine('PROCESS_TIMEOUT', 4 * ONE_MINUTE);
+nsdefine('PROCESS_TIMEOUT', 15 * ONE_MINUTE);
/**
* Number of error page 400 or greater seen from a host before crawl-delay
* host and dump remainder from current schedule
diff --git a/src/executables/QueueServer.php b/src/executables/QueueServer.php
index 9c4f03670..4c048f37d 100755
--- a/src/executables/QueueServer.php
+++ b/src/executables/QueueServer.php
@@ -466,7 +466,7 @@ class QueueServer implements CrawlConstants, Join
if($time - $last_check < C\LOG_TIMEOUT ||
$time - $first_check < C\PROCESS_TIMEOUT ) { return; }
L\crawlLog("Checking if both processes still running ...");
- $lines_to_check = 2000; //about 8-9 minutes of log data
+ $lines_to_check = C\LOG_LINES_TO_RESTART; //about 20-30 minutes of log data
$lines = L\tail(C\LOG_DIR."/QueueServer.log", $lines_to_check);
L\crawlLog("...Got QueueServer.log lines");
if(count($lines) < $lines_to_check) {