]> git.mxchange.org Git - hub.git/commitdiff
Continued:
authorRoland Häder <roland@mxchange.org>
Thu, 26 Aug 2021 02:13:03 +0000 (04:13 +0200)
committerRoland Häder <roland@mxchange.org>
Thu, 26 Aug 2021 02:13:03 +0000 (04:13 +0200)
- Use readCsvFileLine()'s 2nd parameter to check if read CSV lines are properly
  formatted (3 elements are present)
- updated 'core' framework

Signed-off-by: Roland Häder <roland@mxchange.org>
application/hub/classes/source/urls/class_CrawlerUploadedListUrlSource.php
core

index f012e66fc8f1197d709d2c422d6de9df5343d65e..d9cfeb9bede3841d9bd4de70b01d1a4f45fcb459 100644 (file)
@@ -14,6 +14,7 @@ use Org\Mxchange\CoreFramework\Generic\NullPointerException;
 use Org\Mxchange\CoreFramework\Registry\Registerable;
 
 // Import SPL stuff
+use \OutOfBoundsException;
 use \SplFileInfo;
 
 /**
@@ -196,7 +197,7 @@ class CrawlerUploadedListUrlSource extends BaseUrlSource implements UrlSource, R
        private function saveCsvDataInCrawlerQueue (array $csvData) {
                // The array must have a fixed amount of elements, later enhancements may accept more
                /* NOISY-DEBUG: */ self::createDebugInstance(__CLASS__, __LINE__)->debugOutput('CRAWLER-UPLOADED-LIST-URL-SOURCE: csvData()=' . count($csvData) . ' - CALLED!');
-               assert(count($csvData) == self::CRAWL_ENTRY_SIZE);
+               assert(count($csvData) == self::CRAWL_ENTRY_SIZE, sprintf('csvData has unexpected size %d/%d', count($csvData), self::CRAWL_ENTRY_SIZE));
 
                /*
                 * First converted the indexed array into an assoziative array. Don't
@@ -302,7 +303,7 @@ class CrawlerUploadedListUrlSource extends BaseUrlSource implements UrlSource, R
                $csvFileInstance = $this->getStackSourceInstance()->popNamed(self::STACK_NAME_CSV_FILE);
 
                // Read full "CSV line"
-               $csvData = $csvFileInstance->readCsvFileLine($this->columnSeparator);
+               $csvData = $csvFileInstance->readCsvFileLine($this->columnSeparator, self::CRAWL_ENTRY_SIZE);
 
                // Is the array empty?
                /* NOISY-DEBUG: */ self::createDebugInstance(__CLASS__, __LINE__)->debugOutput(sprintf('CRAWLER-UPLOADED-LIST-URL-SOURCE: csvData[%s]=%s', gettype($csvData), print_r($csvData, TRUE)));
@@ -317,7 +318,7 @@ class CrawlerUploadedListUrlSource extends BaseUrlSource implements UrlSource, R
                }
 
                // ...  with a fixed amount of elements, later enhancements may accept more
-               assert(count($csvData) == self::CRAWL_ENTRY_SIZE);
+               assert(count($csvData) == self::CRAWL_ENTRY_SIZE, sprintf('csvData has unexpected size %d/%d', count($csvData), self::CRAWL_ENTRY_SIZE));
 
                /*
                 * Push the file back on stack as it may contain more entries. This way
@@ -344,7 +345,7 @@ class CrawlerUploadedListUrlSource extends BaseUrlSource implements UrlSource, R
 
                // It must have a fixed amount of elements (see method parseCsvFile() for details)
                /* PRINTR-DEBUG: */ self::createDebugInstance(__CLASS__, __LINE__)->debugOutput('CRAWLER-UPLOADED-LIST-URL-SOURCE: csvData[' . gettype($csvData) . ']=' . print_r($csvData, TRUE));
-               assert(count($csvData) == self::CRAWL_ENTRY_SIZE);
+               assert(count($csvData) == self::CRAWL_ENTRY_SIZE, sprintf('csvData has unexpected size %d/%d', count($csvData), self::CRAWL_ENTRY_SIZE));
 
                // Save it in crawler queue (which will enrich it with way more informations
                $this->saveCsvDataInCrawlerQueue($csvData);
diff --git a/core b/core
index 7798dfbfa76e420d9c696c2cb5be60cfd66b23d2..7c7b31032ea193ccc98c64038bff678023bc4619 160000 (submodule)
--- a/core
+++ b/core
@@ -1 +1 @@
-Subproject commit 7798dfbfa76e420d9c696c2cb5be60cfd66b23d2
+Subproject commit 7c7b31032ea193ccc98c64038bff678023bc4619