'www.example.org',
'test.example.org'
),
- //list of URL beginnings that should be ignored
- 'blacklist' => array(
- 'http://bad.example.org/'
- ),
//list of regexes for URLs that should not be crawled
'crawlBlacklist' => array(
),
//verbose output
'debug' => true,
+ //full path to log file
+ 'logfile' => null,
//time in seconds after which URLs may be re-indexed
'refreshtime' => 86400,
//if directly linked URLs shall be indexed, even if they are