Add a crawl timeout message to addUrlsQueue in WebQueueBundle, a=chris

Chris Pollett [2015-12-18 20:Dec:th]
Add a crawl timeout message to addUrlsQueue in WebQueueBundle, a=chris
Filename
src/library/FetchUrl.php
src/library/WebQueueBundle.php
diff --git a/src/library/FetchUrl.php b/src/library/FetchUrl.php
index c08fdc1af..8d12a4c2c 100755
--- a/src/library/FetchUrl.php
+++ b/src/library/FetchUrl.php
@@ -320,7 +320,7 @@ class FetchUrl implements CrawlConstants
                    at crawl time. Not have it on other systems causes crashes
                    at query time
                  */
-                if (!$minimal) {
+                if ($minimal) {
                    curl_multi_remove_handle($agent_handler, $sites[$i][0]);
                 }
                 curl_close($sites[$i][0]);
diff --git a/src/library/WebQueueBundle.php b/src/library/WebQueueBundle.php
index 5e9e44fa3..776cc2afc 100755
--- a/src/library/WebQueueBundle.php
+++ b/src/library/WebQueueBundle.php
@@ -296,17 +296,19 @@ class WebQueueBundle implements Notifier
     {
         $add_urls = [];
         $count = count($url_pairs);
-        if ( $count < 1) return;
+        if ( $count < 1) { return; }
         for ($i = 0; $i < $count; $i++) {
             $add_urls[$i][0] = & $url_pairs[$i][0];
         }
         $objects = $this->to_crawl_archive->addObjects("offset", $add_urls);
         for ($i = 0; $i < $count; $i++) {
+            L\crawlTimeoutLog("..Scheduler: Adding url %s of %s ",
+                $i, $count);
             $url = & $url_pairs[$i][0];
             $weight = $url_pairs[$i][1];
             if (isset($objects[$i]['offset'])) {
                 $offset = $objects[$i]['offset'];
-                $data = packInt($offset).packInt(0).packInt(self::NO_FLAGS);
+                $data = packInt($offset) . packInt(0) . packInt(self::NO_FLAGS);
                 if ($this->insertHashTable(crawlHash($url, true), $data)) {
                     /*
                        we will change 0 to priority queue index in the
ViewGit