#include <iostream>
int main()
{
Spider spider;
constchar *url = "()";
constchar *domain = "()";
printf("Started");//point 1
spider.Initialize(domain);
printf("Started");//point 2
// Add the 1st URL:
spider.AddUnspidered"http://www.amazon.com";
// Start crawling the site by calling CrawlNext repeatedly.
long i;
for (i = 0; i <= 9; i++) {
bool success;
success = spider.CrawlNext();
if (success == true) {
// Show the URL of the page just spidered.
printf("%s\n",spider.lastUrl());
// The HTML is available in the LastHtml property
}
else {
// Did we get an error or are there no more URLs to crawl?
if (spider.get_NumUnspidered() == 0) {
printf("No more URLs to spider\n");
}
else {
printf("%s\n",spider.lastErrorText());
}
break; // break the for() loop
}
// Sleep 1 second before spidering the next URL.
spider.SleepMs(1000);
}
printf("End");