Introduction - If you have any usage issues, please Google them yourself
Crawler::~Crawler(){
http->abort()
delete http
delete tr_result
delete root
delete cookie_tr
}
Crawler::Crawler(QUrl &url,QTreeWidget*tr) : QWidget() {
http=new QHttp(this)
connect(http,SIGNAL(done(bool)),this,SLOT(getPage()))
//settings ...
QSettings settings
if(settings.value("network/useproxy").toBool())
{
http->setProxy(
settings.value("network/cra_host").toString(),
settings.value("network/cra_port").toInt(),
settings.value("network/cra_name").toString(),
settings.value("network/cra_pass").toString()
)
}
//the request ...
QHttpRequestHeader header
header.setValue("User-agent",settings.value("crawler/user-agent","Firefox 3").toString())
header.setValue("Host",url.host())
header.setRequest("GET",url.path())
http->setHost(url.host(),url.port(80))