class HTTPDirectory;
+ class HTTPRepoGetRequest : public HTTP::Request
+ {
+ public:
+ HTTPRepoGetRequest(HTTPDirectory* d, const std::string& u) :
+ HTTP::Request(u),
+ _directory(d)
+ {
+ }
+
+ virtual void cancel()
+ {
+ _directory = 0;
+ abort("Repository cancelled request");
+ }
+ protected:
+ HTTPDirectory* _directory;
+ };
+
+ typedef SGSharedPtr<HTTPRepoGetRequest> RepoRequestPtr;
+
class HTTPRepoPrivate
{
public:
typedef std::vector<HashCacheEntry> HashCache;
HashCache hashes;
+ struct Failure
+ {
+ SGPath path;
+ AbstractRepository::ResultCode error;
+ };
+
+ typedef std::vector<Failure> FailureList;
+ FailureList failures;
+
HTTPRepoPrivate(HTTPRepository* parent) :
p(parent),
isUpdating(false),
status(AbstractRepository::REPO_NO_ERROR)
{ ; }
+ ~HTTPRepoPrivate();
+
HTTPRepository* p; // link back to outer
HTTP::Client* http;
std::string baseUrl;
HTTPDirectory* rootDir;
HTTP::Request_ptr updateFile(HTTPDirectory* dir, const std::string& name);
- HTTP::Request_ptr updateDir(HTTPDirectory* dir);
+ HTTP::Request_ptr updateDir(HTTPDirectory* dir, const std::string& hash);
std::string hashForPath(const SGPath& p);
void updatedFileContents(const SGPath& p, const std::string& newHash);
void failedToUpdateChild(const SGPath& relativePath,
AbstractRepository::ResultCode fileStatus);
- typedef std::vector<HTTP::Request_ptr> RequestVector;
+ typedef std::vector<RepoRequestPtr> RequestVector;
RequestVector requests;
- void finishedRequest(const HTTP::Request_ptr& req);
+ void finishedRequest(const RepoRequestPtr& req);
HTTPDirectory* getOrCreateDirectory(const std::string& path);
bool deleteDirectory(const std::string& path);
_repository(repo),
_relativePath(path)
{
+ assert(repo);
+
SGPath p(absolutePath());
if (p.exists()) {
try {
// root dir failed
_repository->failedToGetRootIndex(status);
} else {
- SG_LOG(SG_TERRASYNC, SG_WARN, "failed to update dir:" << _relativePath);
_repository->failedToUpdateChild(_relativePath, status);
}
}
void updateChildrenBasedOnHash()
{
- SG_LOG(SG_TERRASYNC, SG_DEBUG, "updated children for:" << relativePath());
+ //SG_LOG(SG_TERRASYNC, SG_DEBUG, "updated children for:" << relativePath());
string_list indexNames = indexChildren(),
toBeUpdated, orphans;
} else if (c->hash != hash) {
// file exists, but hash mismatch, schedule update
if (!hash.empty()) {
- SG_LOG(SG_TERRASYNC, SG_INFO, "file exists but hash is wrong for:" << c->name);
+ //SG_LOG(SG_TERRASYNC, SG_INFO, "file exists but hash is wrong for:" << c->name);
+ //SG_LOG(SG_TERRASYNC, SG_INFO, "on disk:" << hash << " vs in info:" << c->hash);
}
toBeUpdated.push_back(c->name);
SGPath p(relativePath());
p.append(*it);
HTTPDirectory* childDir = _repository->getOrCreateDirectory(p.str());
- _repository->updateDir(childDir);
+ _repository->updateDir(childDir, cit->hash);
}
}
}
void didUpdateFile(const std::string& file, const std::string& hash)
{
- SGPath fpath(_relativePath);
- fpath.append(file);
- _repository->updatedFileContents(fpath, hash);
- SG_LOG(SG_TERRASYNC, SG_INFO, "did update:" << fpath);
+ // check hash matches what we expected
+ ChildInfoList::iterator it = findIndexChild(file);
+ if (it == children.end()) {
+ SG_LOG(SG_TERRASYNC, SG_WARN, "updated file but not found in dir:" << _relativePath << " " << file);
+ } else {
+ SGPath fpath(_relativePath);
+ fpath.append(file);
+
+ if (it->hash != hash) {
+ _repository->failedToUpdateChild(_relativePath, AbstractRepository::REPO_ERROR_CHECKSUM);
+ } else {
+ _repository->updatedFileContents(fpath, hash);
+ //SG_LOG(SG_TERRASYNC, SG_INFO, "did update:" << fpath);
+ } // of hash matches
+ } // of found in child list
}
void didFailToUpdateFile(const std::string& file,
{
SGPath fpath(_relativePath);
fpath.append(file);
- SG_LOG(SG_TERRASYNC, SG_WARN, "failed to update:" << fpath);
_repository->failedToUpdateChild(fpath, status);
}
private:
_d->status = REPO_NO_ERROR;
_d->isUpdating = true;
- _d->updateDir(_d->rootDir);
+ _d->failures.clear();
+ _d->updateDir(_d->rootDir, std::string());
}
bool HTTPRepository::isDoingSync() const
AbstractRepository::ResultCode
HTTPRepository::failure() const
{
+ if ((_d->status == REPO_NO_ERROR) && !_d->failures.empty()) {
+ return REPO_PARTIAL_UPDATE;
+ }
+
return _d->status;
}
- class FileGetRequest : public HTTP::Request
+ class FileGetRequest : public HTTPRepoGetRequest
{
public:
FileGetRequest(HTTPDirectory* d, const std::string& file) :
- HTTP::Request(makeUrl(d, file)),
- directory(d),
+ HTTPRepoGetRequest(d, makeUrl(d, file)),
fileName(file)
{
- SG_LOG(SG_TERRASYNC, SG_INFO, "will GET file " << url());
+ pathInRepo = _directory->absolutePath();
+ pathInRepo.append(fileName);
+ //SG_LOG(SG_TERRASYNC, SG_INFO, "will GET file " << url());
}
protected:
virtual void gotBodyData(const char* s, int n)
{
if (!file.get()) {
- SGPath p(pathInRepo());
- file.reset(new SGFile(p.str()));
+ file.reset(new SGFile(pathInRepo.str()));
if (!file->open(SG_IO_OUT)) {
- SG_LOG(SG_TERRASYNC, SG_WARN, "unable to create file " << p);
+ SG_LOG(SG_TERRASYNC, SG_WARN, "unable to create file " << pathInRepo);
abort("Unable to create output file");
}
sha1_init(&hashContext);
}
+ sha1_write(&hashContext, s, n);
file->write(s, n);
}
{
file->close();
if (responseCode() == 200) {
- std::string hash = strutils::encodeHex((char*) sha1_result(&hashContext));
- directory->didUpdateFile(fileName, hash);
-
- SG_LOG(SG_TERRASYNC, SG_DEBUG, "got file " << fileName << " in " << directory->absolutePath());
+ std::string hash = strutils::encodeHex(sha1_result(&hashContext), HASH_LENGTH);
+ _directory->didUpdateFile(fileName, hash);
+ //SG_LOG(SG_TERRASYNC, SG_INFO, "got file " << fileName << " in " << _directory->absolutePath());
} else if (responseCode() == 404) {
- directory->didFailToUpdateFile(fileName, AbstractRepository::REPO_ERROR_FILE_NOT_FOUND);
+ _directory->didFailToUpdateFile(fileName, AbstractRepository::REPO_ERROR_FILE_NOT_FOUND);
} else {
- directory->didFailToUpdateFile(fileName, AbstractRepository::REPO_ERROR_HTTP);
+ _directory->didFailToUpdateFile(fileName, AbstractRepository::REPO_ERROR_HTTP);
}
- directory->repository()->finishedRequest(this);
+ _directory->repository()->finishedRequest(this);
}
virtual void onFail()
{
file.reset();
- directory->didFailToUpdateFile(fileName, AbstractRepository::REPO_ERROR_SOCKET);
- directory->repository()->finishedRequest(this);
+ pathInRepo.remove();
+ if (_directory) {
+ _directory->didFailToUpdateFile(fileName, AbstractRepository::REPO_ERROR_SOCKET);
+ _directory->repository()->finishedRequest(this);
+ }
}
private:
static std::string makeUrl(HTTPDirectory* d, const std::string& file)
return d->url() + "/" + file;
}
- SGPath pathInRepo() const
- {
- SGPath p(directory->absolutePath());
- p.append(fileName);
- return p;
- }
-
- HTTPDirectory* directory;
std::string fileName; // if empty, we're getting the directory itself
+ SGPath pathInRepo;
simgear::sha1nfo hashContext;
std::auto_ptr<SGFile> file;
};
- class DirGetRequest : public HTTP::Request
+ class DirGetRequest : public HTTPRepoGetRequest
{
public:
- DirGetRequest(HTTPDirectory* d) :
- HTTP::Request(makeUrl(d)),
- directory(d),
- _isRootDir(false)
+ DirGetRequest(HTTPDirectory* d, const std::string& targetHash) :
+ HTTPRepoGetRequest(d, makeUrl(d)),
+ _isRootDir(false),
+ _targetHash(targetHash)
{
sha1_init(&hashContext);
- SG_LOG(SG_TERRASYNC, SG_INFO, "will GET dir " << url());
-
+ //SG_LOG(SG_TERRASYNC, SG_INFO, "will GET dir " << url());
}
void setIsRootDir()
virtual void onDone()
{
if (responseCode() == 200) {
- std::string hash = strutils::encodeHex((char*) sha1_result(&hashContext));
- std::string curHash = directory->repository()->hashForPath(path());
- if (hash != curHash) {
+ std::string hash = strutils::encodeHex(sha1_result(&hashContext), HASH_LENGTH);
+ if (!_targetHash.empty() && (hash != _targetHash)) {
+ _directory->failedToUpdate(AbstractRepository::REPO_ERROR_CHECKSUM);
+ _directory->repository()->finishedRequest(this);
+ return;
+ }
- simgear::Dir d(directory->absolutePath());
+ std::string curHash = _directory->repository()->hashForPath(path());
+ if (hash != curHash) {
+ simgear::Dir d(_directory->absolutePath());
if (!d.exists()) {
if (!d.create(0700)) {
throw sg_io_exception("Unable to create directory", d.path());
of.write(body.data(), body.size());
of.close();
- directory->dirIndexUpdated(hash);
-
- SG_LOG(SG_TERRASYNC, SG_DEBUG, "updated dir index " << directory->absolutePath());
+ _directory->dirIndexUpdated(hash);
+ //SG_LOG(SG_TERRASYNC, SG_INFO, "updated dir index " << _directory->absolutePath());
}
try {
// either way we've confirmed the index is valid so update
// children now
- directory->updateChildrenBasedOnHash();
+ _directory->updateChildrenBasedOnHash();
} catch (sg_exception& e) {
- directory->failedToUpdate(AbstractRepository::REPO_ERROR_IO);
+ _directory->failedToUpdate(AbstractRepository::REPO_ERROR_IO);
}
} else if (responseCode() == 404) {
- directory->failedToUpdate(AbstractRepository::REPO_ERROR_FILE_NOT_FOUND);
+ _directory->failedToUpdate(AbstractRepository::REPO_ERROR_FILE_NOT_FOUND);
} else {
- directory->failedToUpdate(AbstractRepository::REPO_ERROR_HTTP);
+ _directory->failedToUpdate(AbstractRepository::REPO_ERROR_HTTP);
}
- directory->repository()->finishedRequest(this);
+ _directory->repository()->finishedRequest(this);
}
virtual void onFail()
{
- directory->failedToUpdate(AbstractRepository::REPO_ERROR_SOCKET);
- directory->repository()->finishedRequest(this);
+ if (_directory) {
+ _directory->failedToUpdate(AbstractRepository::REPO_ERROR_SOCKET);
+ _directory->repository()->finishedRequest(this);
+ }
}
private:
static std::string makeUrl(HTTPDirectory* d)
SGPath pathInRepo() const
{
- SGPath p(directory->absolutePath());
+ SGPath p(_directory->absolutePath());
p.append(".dirindex");
return p;
}
- HTTPDirectory* directory;
simgear::sha1nfo hashContext;
std::string body;
bool _isRootDir; ///< is this the repository root?
+ std::string _targetHash;
};
+ HTTPRepoPrivate::~HTTPRepoPrivate()
+ {
+ DirectoryVector::iterator it;
+ for (it=directories.begin(); it != directories.end(); ++it) {
+ delete *it;
+ }
+
+ RequestVector::iterator r;
+ for (r=requests.begin(); r != requests.end(); ++r) {
+ (*r)->cancel();
+ }
+ }
HTTP::Request_ptr HTTPRepoPrivate::updateFile(HTTPDirectory* dir, const std::string& name)
{
- HTTP::Request_ptr r(new FileGetRequest(dir, name));
+ RepoRequestPtr r(new FileGetRequest(dir, name));
requests.push_back(r);
http->makeRequest(r);
return r;
}
- HTTP::Request_ptr HTTPRepoPrivate::updateDir(HTTPDirectory* dir)
+ HTTP::Request_ptr HTTPRepoPrivate::updateDir(HTTPDirectory* dir, const std::string& hash)
{
- HTTP::Request_ptr r(new DirGetRequest(dir));
+ RepoRequestPtr r(new DirGetRequest(dir, hash));
requests.push_back(r);
http->makeRequest(r);
return r;
return false;
}
- void HTTPRepoPrivate::finishedRequest(const HTTP::Request_ptr& req)
+ void HTTPRepoPrivate::finishedRequest(const RepoRequestPtr& req)
{
RequestVector::iterator it = std::find(requests.begin(), requests.end(), req);
if (it == requests.end()) {
void HTTPRepoPrivate::failedToUpdateChild(const SGPath& relativePath,
AbstractRepository::ResultCode fileStatus)
{
- // this means we only record the last error, should this be improved?
- status = fileStatus;
+ Failure f;
+ f.path = relativePath;
+ f.error = fileStatus;
+ failures.push_back(f);
+
+ SG_LOG(SG_TERRASYNC, SG_WARN, "failed to update entry:" << relativePath << " code:" << fileStatus);
}
return os.str();
}
+std::string hashForData(const std::string& d)
+{
+ simgear::sha1nfo info;
+ sha1_init(&info);
+ sha1_write(&info, d.data(), d.size());
+ return strutils::encodeHex(sha1_result(&info), HASH_LENGTH);
+}
+
class TestRepoEntry
{
public:
bool isDir;
int revision; // for files
int requestCount;
+ bool getWillFail;
+ bool returnCorruptData;
void clearRequestCounts();
+ void setGetWillFail(bool b)
+ {
+ getWillFail = b;
+ }
+
+ void setReturnCorruptData(bool d)
+ {
+ returnCorruptData = d;
+ }
+
std::string pathInRepo() const
{
return parent ? (parent->pathInRepo() + "/" + name) : name;
{
revision = 2;
requestCount = 0;
+ getWillFail = false;
+ returnCorruptData = false;
}
TestRepoEntry::~TestRepoEntry()
return;
}
+ if (entry->getWillFail) {
+ sendErrorResponse(404, false, "entry marked to fail explicitly:" + repoPath);
+ return;
+ }
+
entry->requestCount++;
- std::string content(entry->data());
+
+ std::string content;
+ if (entry->returnCorruptData) {
+ content = dataForFile("!$£$!" + entry->parent->name,
+ "corrupt_" + entry->name,
+ entry->revision);
+ } else {
+ content = entry->data();
+ }
+
std::stringstream d;
d << "HTTP/1.1 " << 200 << " " << reasonForCode(200) << "\r\n";
d << "Content-Length:" << content.size() << "\r\n";
std::cout << "Passed test: lose and replace local files" << std::endl;
}
+void testAbandonMissingFiles(HTTP::Client* cl)
+{
+ std::auto_ptr<HTTPRepository> repo;
+ SGPath p(simgear::Dir::current().path());
+ p.append("http_repo_missing_files");
+ simgear::Dir pd(p);
+ if (pd.exists()) {
+ pd.removeChildren();
+ }
+
+ global_repo->defineFile("dirA/subdirE/fileAEA");
+ global_repo->findEntry("dirA/subdirE/fileAEA")->setGetWillFail(true);
+
+ repo.reset(new HTTPRepository(p, cl));
+ repo->setBaseUrl("http://localhost:2000/repo");
+ repo->update();
+ waitForUpdateComplete(cl, repo.get());
+ if (repo->failure() != AbstractRepository::REPO_PARTIAL_UPDATE) {
+ throw sg_exception("Bad result from missing files test");
+ }
+
+ global_repo->findEntry("dirA/subdirE/fileAEA")->setGetWillFail(false);
+}
+
+void testAbandonCorruptFiles(HTTP::Client* cl)
+{
+ std::auto_ptr<HTTPRepository> repo;
+ SGPath p(simgear::Dir::current().path());
+ p.append("http_repo_corrupt_files");
+ simgear::Dir pd(p);
+ if (pd.exists()) {
+ pd.removeChildren();
+ }
+
+ global_repo->defineFile("dirB/subdirG/fileBGA");
+ global_repo->findEntry("dirB/subdirG/fileBGA")->setReturnCorruptData(true);
+
+ repo.reset(new HTTPRepository(p, cl));
+ repo->setBaseUrl("http://localhost:2000/repo");
+ repo->update();
+ waitForUpdateComplete(cl, repo.get());
+ if (repo->failure() != AbstractRepository::REPO_PARTIAL_UPDATE) {
+ throw sg_exception("Bad result from corrupt files test");
+ }
+
+ std::cout << "Passed test: detect corrupted download" << std::endl;
+}
+
int main(int argc, char* argv[])
{
sglog().setLogLevels( SG_ALL, SG_INFO );
testMergeExistingFileWithoutDownload(&cl);
+ testAbandonMissingFiles(&cl);
+
+ testAbandonCorruptFiles(&cl);
+
return 0;
}