Added callbacks, range requests and head requests to Downloader
This commit is contained in:
parent
c1c35c75f9
commit
3fd55b1647
2 changed files with 187 additions and 28 deletions
|
@ -43,9 +43,9 @@ namespace HTTP{
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Simply turns link into a HTTP::URL and calls get(const HTTP::URL&)
|
/// Simply turns link into a HTTP::URL and calls get(const HTTP::URL&)
|
||||||
bool Downloader::get(const std::string &link){
|
bool Downloader::get(const std::string &link, Util::DataCallback &cb){
|
||||||
HTTP::URL uri(link);
|
HTTP::URL uri(link);
|
||||||
return get(uri);
|
return get(uri, 6, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets an extra (or overridden) header to be sent with outgoing requests.
|
/// Sets an extra (or overridden) header to be sent with outgoing requests.
|
||||||
|
@ -60,24 +60,18 @@ namespace HTTP{
|
||||||
Parser &Downloader::getHTTP(){return H;}
|
Parser &Downloader::getHTTP(){return H;}
|
||||||
|
|
||||||
/// Returns a reference to the internal Socket::Connection class instance.
|
/// Returns a reference to the internal Socket::Connection class instance.
|
||||||
Socket::Connection &Downloader::getSocket(){
|
Socket::Connection &Downloader::getSocket(){return S;}
|
||||||
return S;
|
|
||||||
}
|
|
||||||
|
|
||||||
Downloader::~Downloader(){
|
Downloader::~Downloader(){S.close();}
|
||||||
S.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sends a request for the given URL, does no waiting.
|
/// Sends a request for the given URL, does no waiting.
|
||||||
void Downloader::doRequest(const HTTP::URL &link, const std::string &method,
|
void Downloader::doRequest(const HTTP::URL &link, const std::string &method, const std::string &body){
|
||||||
const std::string &body){
|
|
||||||
if (!canRequest(link)){return;}
|
if (!canRequest(link)){return;}
|
||||||
bool needSSL = (link.protocol == "https");
|
bool needSSL = (link.protocol == "https");
|
||||||
H.Clean();
|
H.Clean();
|
||||||
// Reconnect if needed
|
// Reconnect if needed
|
||||||
if (!proxied || needSSL){
|
if (!proxied || needSSL){
|
||||||
if (!getSocket() || link.host != connectedHost || link.getPort() != connectedPort ||
|
if (!getSocket() || link.host != connectedHost || link.getPort() != connectedPort || needSSL != ssl){
|
||||||
needSSL != ssl){
|
|
||||||
getSocket().close();
|
getSocket().close();
|
||||||
connectedHost = link.host;
|
connectedHost = link.host;
|
||||||
connectedPort = link.getPort();
|
connectedPort = link.getPort();
|
||||||
|
@ -92,8 +86,7 @@ namespace HTTP{
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
}else{
|
}else{
|
||||||
if (!getSocket() || proxyUrl.host != connectedHost || proxyUrl.getPort() != connectedPort ||
|
if (!getSocket() || proxyUrl.host != connectedHost || proxyUrl.getPort() != connectedPort || needSSL != ssl){
|
||||||
needSSL != ssl){
|
|
||||||
getSocket().close();
|
getSocket().close();
|
||||||
connectedHost = proxyUrl.host;
|
connectedHost = proxyUrl.host;
|
||||||
connectedPort = proxyUrl.getPort();
|
connectedPort = proxyUrl.getPort();
|
||||||
|
@ -120,6 +113,7 @@ namespace HTTP{
|
||||||
H.SetHeader("Host", link.host);
|
H.SetHeader("Host", link.host);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (method.size()){H.method = method;}
|
if (method.size()){H.method = method;}
|
||||||
H.SetHeader("User-Agent", "MistServer " PACKAGE_VERSION);
|
H.SetHeader("User-Agent", "MistServer " PACKAGE_VERSION);
|
||||||
H.SetHeader("X-Version", PACKAGE_VERSION);
|
H.SetHeader("X-Version", PACKAGE_VERSION);
|
||||||
|
@ -136,19 +130,19 @@ namespace HTTP{
|
||||||
H.SetHeader(it->first, it->second);
|
H.SetHeader(it->first, it->second);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
H.SendRequest(getSocket(), body);
|
H.SendRequest(getSocket(), body);
|
||||||
H.Clean();
|
H.Clean();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Downloads the given URL into 'H', returns true on success.
|
/// Do a HEAD request to download the HTTP headers only, returns true on success
|
||||||
/// Makes at most 5 attempts, and will wait no longer than 5 seconds without receiving data.
|
bool Downloader::head(const HTTP::URL &link, uint8_t maxRecursiveDepth){
|
||||||
bool Downloader::get(const HTTP::URL &link, uint8_t maxRecursiveDepth){
|
|
||||||
if (!canRequest(link)){return false;}
|
if (!canRequest(link)){return false;}
|
||||||
size_t loop = retryCount + 1; // max 5 attempts
|
size_t loop = retryCount + 1; // max 5 attempts
|
||||||
while (--loop){// loop while we are unsuccessful
|
while (--loop){// loop while we are unsuccessful
|
||||||
MEDIUM_MSG("Retrieving %s (%zu/%" PRIu32 ")", link.getUrl().c_str(), retryCount - loop + 1,
|
MEDIUM_MSG("Retrieving %s (%zu/%" PRIu32 ")", link.getUrl().c_str(), retryCount - loop + 1, retryCount);
|
||||||
retryCount);
|
doRequest(link, "HEAD");
|
||||||
doRequest(link);
|
H.headerOnly = true;
|
||||||
uint64_t reqTime = Util::bootSecs();
|
uint64_t reqTime = Util::bootSecs();
|
||||||
while (getSocket() && Util::bootSecs() < reqTime + dataTimeout){
|
while (getSocket() && Util::bootSecs() < reqTime + dataTimeout){
|
||||||
// No data? Wait for a second or so.
|
// No data? Wait for a second or so.
|
||||||
|
@ -156,6 +150,7 @@ namespace HTTP{
|
||||||
if (progressCallback != 0){
|
if (progressCallback != 0){
|
||||||
if (!progressCallback()){
|
if (!progressCallback()){
|
||||||
WARN_MSG("Download aborted by callback");
|
WARN_MSG("Download aborted by callback");
|
||||||
|
H.headerOnly = false;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -164,6 +159,7 @@ namespace HTTP{
|
||||||
}
|
}
|
||||||
// Data! Check if we can parse it...
|
// Data! Check if we can parse it...
|
||||||
if (H.Read(getSocket())){
|
if (H.Read(getSocket())){
|
||||||
|
H.headerOnly = false;
|
||||||
if (shouldContinue()){
|
if (shouldContinue()){
|
||||||
if (maxRecursiveDepth == 0){
|
if (maxRecursiveDepth == 0){
|
||||||
FAIL_MSG("Maximum recursion depth reached");
|
FAIL_MSG("Maximum recursion depth reached");
|
||||||
|
@ -171,11 +167,16 @@ namespace HTTP{
|
||||||
}
|
}
|
||||||
if (!canContinue(link)){return false;}
|
if (!canContinue(link)){return false;}
|
||||||
if (getStatusCode() >= 300 && getStatusCode() < 400){
|
if (getStatusCode() >= 300 && getStatusCode() < 400){
|
||||||
return get(link.link(getHeader("Location")), --maxRecursiveDepth);
|
return head(link.link(getHeader("Location")), --maxRecursiveDepth);
|
||||||
}else{
|
}else{
|
||||||
return get(link, --maxRecursiveDepth);
|
return head(link, --maxRecursiveDepth);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(H.protocol == "HTTP/1.0"){
|
||||||
|
getSocket().close();
|
||||||
|
}
|
||||||
|
|
||||||
return true; // Success!
|
return true; // Success!
|
||||||
}
|
}
|
||||||
// reset the data timeout
|
// reset the data timeout
|
||||||
|
@ -183,12 +184,15 @@ namespace HTTP{
|
||||||
if (progressCallback != 0){
|
if (progressCallback != 0){
|
||||||
if (!progressCallback()){
|
if (!progressCallback()){
|
||||||
WARN_MSG("Download aborted by callback");
|
WARN_MSG("Download aborted by callback");
|
||||||
|
H.headerOnly = false;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
reqTime = Util::bootSecs();
|
reqTime = Util::bootSecs();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
H.headerOnly = false;
|
||||||
|
|
||||||
if (getSocket()){
|
if (getSocket()){
|
||||||
FAIL_MSG("Timeout while retrieving %s (%zu/%" PRIu32 ")", link.getUrl().c_str(),
|
FAIL_MSG("Timeout while retrieving %s (%zu/%" PRIu32 ")", link.getUrl().c_str(),
|
||||||
retryCount - loop + 1, retryCount);
|
retryCount - loop + 1, retryCount);
|
||||||
|
@ -206,13 +210,150 @@ namespace HTTP{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Downloader::post(const HTTP::URL &link, const std::string &payload, bool sync,
|
bool Downloader::getRangeNonBlocking(const HTTP::URL &link, size_t byteStart, size_t byteEnd, Util::DataCallback &cb){
|
||||||
uint8_t maxRecursiveDepth){
|
char tmp[32];
|
||||||
|
if (byteEnd <= 0){// get range from byteStart til eof
|
||||||
|
sprintf(tmp, "bytes=%llu-", byteStart);
|
||||||
|
}else{
|
||||||
|
sprintf(tmp, "bytes=%llu-%llu", byteStart, byteEnd - 1);
|
||||||
|
}
|
||||||
|
setHeader("Range", tmp);
|
||||||
|
return getNonBlocking(link, 6);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Downloader::getRange(const HTTP::URL &link, size_t byteStart, size_t byteEnd, Util::DataCallback &cb){
|
||||||
|
char tmp[32];
|
||||||
|
if (byteEnd <= 0){// get range from byteStart til eof
|
||||||
|
sprintf(tmp, "bytes=%llu-", byteStart);
|
||||||
|
}else{
|
||||||
|
sprintf(tmp, "bytes=%llu-%llu", byteStart, byteEnd - 1);
|
||||||
|
}
|
||||||
|
setHeader("Range", tmp);
|
||||||
|
return get(link, 6, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Downloads the given URL into 'H', returns true on success.
|
||||||
|
/// Makes at most 5 attempts, and will wait no longer than 5 seconds without receiving data.
|
||||||
|
bool Downloader::get(const HTTP::URL &link, uint8_t maxRecursiveDepth, Util::DataCallback &cb){
|
||||||
|
if (!getNonBlocking(link, maxRecursiveDepth)){return false;}
|
||||||
|
|
||||||
|
while (!continueNonBlocking(cb)){Util::sleep(100);}
|
||||||
|
|
||||||
|
if (isComplete){return true;}
|
||||||
|
|
||||||
|
FAIL_MSG("Could not retrieve %s", link.getUrl().c_str());
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// prepare a request to be handled in a nonblocking fashion by the continueNonbBocking()
|
||||||
|
bool Downloader::getNonBlocking(const HTTP::URL &link, uint8_t maxRecursiveDepth){
|
||||||
|
if (!canRequest(link)){return false;}
|
||||||
|
nbLink = link;
|
||||||
|
nbMaxRecursiveDepth = maxRecursiveDepth;
|
||||||
|
nbLoop = retryCount + 1; // max 5 attempts
|
||||||
|
isComplete = false;
|
||||||
|
doRequest(nbLink);
|
||||||
|
nbReqTime = Util::bootSecs();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// continue handling a request, origininally set up by the getNonBlocking() function
|
||||||
|
// returns true if the request is complete
|
||||||
|
bool Downloader::continueNonBlocking(Util::DataCallback &cb){
|
||||||
|
while (true){
|
||||||
|
if (!getSocket() && !isComplete){
|
||||||
|
if (nbLoop < 2){
|
||||||
|
FAIL_MSG("Exceeded retry limit while retrieving %s (%zu/%" PRIu32 ")",
|
||||||
|
nbLink.getUrl().c_str(), retryCount - nbLoop + 1, retryCount);
|
||||||
|
Util::sleep(1000);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
nbLoop--;
|
||||||
|
if (nbLoop == retryCount){
|
||||||
|
MEDIUM_MSG("Retrieving %s (%zu/%" PRIu32 ")", nbLink.getUrl().c_str(),
|
||||||
|
retryCount - nbLoop + 1, retryCount);
|
||||||
|
}else{
|
||||||
|
if (retryCount - nbLoop + 1 > 2){
|
||||||
|
INFO_MSG("Lost connection while retrieving %s (%zu/%" PRIu32 ")",
|
||||||
|
nbLink.getUrl().c_str(), retryCount - nbLoop + 1, retryCount);
|
||||||
|
}else{
|
||||||
|
MEDIUM_MSG("Lost connection while retrieving %s (%zu/%" PRIu32 ")",
|
||||||
|
nbLink.getUrl().c_str(), retryCount - nbLoop + 1, retryCount);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (H.hasHeader("Accept-Ranges") && getHeader("Accept-Ranges").size() > 0){
|
||||||
|
INFO_MSG("new request? range! len: %llu, currlength: %llu", H.length, H.currentLength);
|
||||||
|
getRangeNonBlocking(nbLink, H.currentLength, 0, cb);
|
||||||
|
return true;
|
||||||
|
}else{
|
||||||
|
doRequest(nbLink);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!getSocket()){
|
||||||
|
WARN_MSG("Aborting download: could not open connection");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
nbReqTime = Util::bootSecs();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Util::bootSecs() >= nbReqTime + dataTimeout){
|
||||||
|
FAIL_MSG("Timeout while retrieving %s (%zu/%" PRIu32 ")", nbLink.getUrl().c_str(),
|
||||||
|
retryCount - nbLoop + 1, retryCount);
|
||||||
|
getSocket().close();
|
||||||
|
return false; // because we may have retries left
|
||||||
|
}
|
||||||
|
|
||||||
|
// No data? Wait for a second or so.
|
||||||
|
if (!getSocket().spool()){
|
||||||
|
if (progressCallback != 0){
|
||||||
|
if (!progressCallback()){
|
||||||
|
WARN_MSG("Download aborted by callback");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// Data! Check if we can parse it...
|
||||||
|
if (H.Read(getSocket(), cb)){
|
||||||
|
if (shouldContinue()){
|
||||||
|
if (nbMaxRecursiveDepth == 0){
|
||||||
|
FAIL_MSG("Maximum recursion depth reached");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (!canContinue(nbLink)){return false;}
|
||||||
|
--nbMaxRecursiveDepth;
|
||||||
|
if (getStatusCode() >= 300 && getStatusCode() < 400){
|
||||||
|
doRequest(nbLink.link(getHeader("Location")));
|
||||||
|
}else{
|
||||||
|
doRequest(nbLink);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
isComplete = true; // Success
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
// reset the data timeout
|
||||||
|
if (nbReqTime != Util::bootSecs()){
|
||||||
|
if (progressCallback != 0){
|
||||||
|
if (!progressCallback()){
|
||||||
|
WARN_MSG("Download aborted by callback");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
nbReqTime = Util::bootSecs();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false; //we should never get here
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Downloader::post(const HTTP::URL &link, const std::string &payload, bool sync, uint8_t maxRecursiveDepth){
|
||||||
if (!canRequest(link)){return false;}
|
if (!canRequest(link)){return false;}
|
||||||
size_t loop = retryCount; // max 5 attempts
|
size_t loop = retryCount; // max 5 attempts
|
||||||
while (--loop){// loop while we are unsuccessful
|
while (--loop){// loop while we are unsuccessful
|
||||||
MEDIUM_MSG("Posting to %s (%zu/%" PRIu32 ")", link.getUrl().c_str(), retryCount - loop + 1,
|
MEDIUM_MSG("Posting to %s (%zu/%" PRIu32 ")", link.getUrl().c_str(), retryCount - loop + 1, retryCount);
|
||||||
retryCount);
|
|
||||||
doRequest(link, "POST", payload);
|
doRequest(link, "POST", payload);
|
||||||
// Not synced? Ignore the response and immediately return true.
|
// Not synced? Ignore the response and immediately return true.
|
||||||
if (!sync){return true;}
|
if (!sync){return true;}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#include "http_parser.h"
|
#include "http_parser.h"
|
||||||
#include "url.h"
|
#include "url.h"
|
||||||
#include "socket.h"
|
#include "socket.h"
|
||||||
|
#include "util.h"
|
||||||
|
|
||||||
namespace HTTP{
|
namespace HTTP{
|
||||||
class Downloader{
|
class Downloader{
|
||||||
|
@ -11,10 +12,17 @@ namespace HTTP{
|
||||||
const std::string &const_data() const;
|
const std::string &const_data() const;
|
||||||
void doRequest(const HTTP::URL &link, const std::string &method = "",
|
void doRequest(const HTTP::URL &link, const std::string &method = "",
|
||||||
const std::string &body = "");
|
const std::string &body = "");
|
||||||
bool get(const std::string &link);
|
bool get(const std::string &link, Util::DataCallback &cb = Util::defaultDataCallback);
|
||||||
bool get(const HTTP::URL &link, uint8_t maxRecursiveDepth = 6);
|
bool get(const HTTP::URL &link, uint8_t maxRecursiveDepth = 6, Util::DataCallback &cb = Util::defaultDataCallback);
|
||||||
|
bool head(const HTTP::URL &link,uint8_t maxRecursiveDepth = 6);
|
||||||
|
bool getRange(const HTTP::URL &link, size_t byteStart, size_t byteEnd, Util::DataCallback &cb = Util::defaultDataCallback);
|
||||||
|
bool getRangeNonBlocking(const HTTP::URL &link, size_t byteStart, size_t byteEnd, Util::DataCallback &cb = Util::defaultDataCallback);
|
||||||
bool post(const HTTP::URL &link, const std::string &payload, bool sync = true,
|
bool post(const HTTP::URL &link, const std::string &payload, bool sync = true,
|
||||||
uint8_t maxRecursiveDepth = 6);
|
uint8_t maxRecursiveDepth = 6);
|
||||||
|
|
||||||
|
bool getNonBlocking(const HTTP::URL &link, uint8_t maxRecursiveDepth = 6);
|
||||||
|
bool continueNonBlocking(Util::DataCallback &cb);
|
||||||
|
|
||||||
std::string getHeader(const std::string &headerName);
|
std::string getHeader(const std::string &headerName);
|
||||||
std::string &getStatusText();
|
std::string &getStatusText();
|
||||||
uint32_t getStatusCode();
|
uint32_t getStatusCode();
|
||||||
|
@ -27,12 +35,14 @@ namespace HTTP{
|
||||||
void setHeader(const std::string &name, const std::string &val);
|
void setHeader(const std::string &name, const std::string &val);
|
||||||
void clearHeaders();
|
void clearHeaders();
|
||||||
bool canRequest(const HTTP::URL &link);
|
bool canRequest(const HTTP::URL &link);
|
||||||
|
bool completed(){return isComplete;}
|
||||||
Parser &getHTTP();
|
Parser &getHTTP();
|
||||||
Socket::Connection &getSocket();
|
Socket::Connection &getSocket();
|
||||||
uint32_t retryCount, dataTimeout;
|
uint32_t retryCount, dataTimeout;
|
||||||
bool isProxied() const;
|
bool isProxied() const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
bool isComplete;
|
||||||
std::map<std::string, std::string> extraHeaders; ///< Holds extra headers to sent with request
|
std::map<std::string, std::string> extraHeaders; ///< Holds extra headers to sent with request
|
||||||
std::string connectedHost; ///< Currently connected host name
|
std::string connectedHost; ///< Currently connected host name
|
||||||
uint32_t connectedPort; ///< Currently connected port number
|
uint32_t connectedPort; ///< Currently connected port number
|
||||||
|
@ -43,6 +53,14 @@ namespace HTTP{
|
||||||
std::string proxyAuthStr; ///< Most recently seen Proxy-Authenticate request
|
std::string proxyAuthStr; ///< Most recently seen Proxy-Authenticate request
|
||||||
bool proxied; ///< True if proxy server is configured.
|
bool proxied; ///< True if proxy server is configured.
|
||||||
HTTP::URL proxyUrl; ///< Set to the URL of the configured proxy.
|
HTTP::URL proxyUrl; ///< Set to the URL of the configured proxy.
|
||||||
|
size_t nbLoop;
|
||||||
|
HTTP::URL nbLink;
|
||||||
|
uint8_t nbMaxRecursiveDepth;
|
||||||
|
uint64_t nbReqTime;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}// namespace HTTP
|
}// namespace HTTP
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue