From: Mikael Nordfeldth Date: Mon, 10 Jul 2017 12:41:03 +0000 (+0200) Subject: Avoid excessive remote requests on oEmbed lookups X-Git-Url: https://git.mxchange.org/?a=commitdiff_plain;h=3bc2454e91760469305726cdfb6fd9d9f62eb788;p=quix0rs-gnu-social.git Avoid excessive remote requests on oEmbed lookups I noticed that each time a notice was accessed it'd do a remote lookup with HEAD (and continue despite 404 etc.) and then another attempt to download the resource. If this wasn't successful new attempts would be made for each loading of the resource, which is extremely resource intensive. Whenever we can say "it's been n seconds since the last attempt" we could probably enable this again - or just manually reload remote thumbnails (as part of the StoreRemoteMedia plugin etc.) --- diff --git a/plugins/Oembed/OembedPlugin.php b/plugins/Oembed/OembedPlugin.php index 64e3e8940c..57ff3f467c 100644 --- a/plugins/Oembed/OembedPlugin.php +++ b/plugins/Oembed/OembedPlugin.php @@ -389,6 +389,13 @@ class OembedPlugin extends Plugin // First see if it's too large for us common_debug(__METHOD__ . ': '.sprintf('Performing HEAD request for remote file id==%u to avoid unnecessarily downloading too large files. URL: %s', $thumbnail->getFileId(), $remoteUrl)); $head = $http->head($remoteUrl); + if (!$head->isOk()) { + common_log(LOG_WARN, 'HEAD request returned HTTP failure, so we will abort now and delete the thumbnail object.'); + $thumbnail->delete(); + return false; + } else { + common_debug('HEAD request returned HTTP success, so we will continue.'); + } $remoteUrl = $head->getEffectiveUrl(); // to avoid going through redirects again $headers = $head->getHeader();