Look at this piece of code at the top of thew script …
[code]
function crawl_page($url, $depth = 5)
{
And this one at the bottom of the script ….
[code]
crawl_page(“http://google.com”, 2);
What is the depth of the pages to crawl ? 5 or 2 ? And why two different values here ?
One a default and the other an override ? if so, top code default ?
Full Code from StackOverflow …
I can’t ask over there as i do not have 50 points to make a comment to question. So asking here.
[code]
<?php
//https://stackoverflow.com/questions/2313107/how-do-i-make-a-simple-crawler-in-php
//WORKING!
ini_set(‘display_errors’, true);
error_reporting(E_ALL);
function crawl_page($url, $depth = 5)
{
static $seen = array();
if (isset($seen[$url]) || $depth === 0) {
return;
}
$seen[$url] = true;
$dom = new DOMDocument(‘1.0’);
@$dom->loadHTMLFile($url);
$anchors = $dom->getElementsByTagName(‘a’);
foreach ($anchors as $element) {
$href = $element->getAttribute(‘href’);
if (0 !== strpos($href, ‘http’)) {
$path = ‘/’ . ltrim($href, ‘/’);
if (extension_loaded(‘http’)) {
$href = http_build_url($url, array(‘path’ => $path));
} else {
$parts = parse_url($url);
$href = $parts[‘scheme’] . ‘://’;
if (isset($parts[‘user’]) && isset($parts[‘pass’])) {
$href .= $parts[‘user’] . ‘:’ . $parts[‘pass’] . ‘@’;
}
$href .= $parts[‘host’];
if (isset($parts[‘port’])) {
$href .= ‘:’ . $parts[‘port’];
}
$href .= dirname($parts[‘path’], 1).$path;
}
}
crawl_page($href, $depth – 1);
}
echo “URL:”,$url,PHP_EOL,”CONTENT:”,PHP_EOL,$dom->saveHTML(),PHP_EOL,PHP_EOL;
}
crawl_page(“http://localhost/ebrute/crawler/6/1.php”, 2);
?>