X-Git-Url: https://git.mxchange.org/?a=blobdiff_plain;f=classes%2FFile.php;h=bc1b4a6d09f4c5b5d4ed521f6d48edcacef105c9;hb=8ac8e2e7344f85b8c2f0f86ee97396eb46315bcf;hp=ce2f9401618286bcfd9485f8f3682add5c8df0f2;hpb=67d09532ddb324897d8222b5b8744b0118e72d70;p=quix0rs-gnu-social.git diff --git a/classes/File.php b/classes/File.php index ce2f940161..bc1b4a6d09 100644 --- a/classes/File.php +++ b/classes/File.php @@ -26,7 +26,8 @@ class File extends Managed_DataObject { public $__table = 'file'; // table name public $id; // int(4) primary_key not_null - public $url; // varchar(255) unique_key + public $urlhash; // varchar(64) unique_key + public $url; // text public $mimetype; // varchar(50) public $size; // int(4) public $title; // varchar(255) @@ -37,12 +38,15 @@ class File extends Managed_DataObject public $height; // int(4) public $modified; // timestamp() not_null default_CURRENT_TIMESTAMP + const URLHASH_ALG = 'sha256'; + public static function schemaDef() { return array( 'fields' => array( 'id' => array('type' => 'serial', 'not null' => true), - 'url' => array('type' => 'varchar', 'length' => 255, 'description' => 'destination URL after following redirections'), + 'urlhash' => array('type' => 'varchar', 'length' => 64, 'description' => 'sha256 of destination URL (url field)'), + 'url' => array('type' => 'text', 'description' => 'destination URL after following redirections'), 'mimetype' => array('type' => 'varchar', 'length' => 50, 'description' => 'mime type of resource'), 'size' => array('type' => 'int', 'description' => 'size of resource when available'), 'title' => array('type' => 'varchar', 'length' => 255, 'description' => 'title of resource when available'), @@ -56,7 +60,7 @@ class File extends Managed_DataObject ), 'primary key' => array('id'), 'unique keys' => array( - 'file_url_key' => array('url'), + 'file_urlhash_key' => array('urlhash'), ), ); } @@ -77,10 +81,11 @@ class File extends Managed_DataObject // I don't know why we have to keep doing this but I'm adding this last check to avoid // uniqueness bugs. - $file = File::getKV('url', $given_url); + $file = File::getKV('urlhash', self::hashurl($given_url)); if (!$file instanceof File) { $file = new File; + $file->urlhash = self::hashurl($given_url); $file->url = $given_url; if (!empty($redir_data['protected'])) $file->protected = $redir_data['protected']; if (!empty($redir_data['title'])) $file->title = $redir_data['title']; @@ -122,51 +127,56 @@ class File extends Managed_DataObject throw new ServerException('No canonical URL from given URL to process'); } - $file = File::getKV('url', $given_url); - if (!$file instanceof File) { + $file = null; + + try { + $file = File::getByUrl($given_url); + } catch (NoResultException $e) { // First check if we have a lookup trace for this URL already - $file_redir = File_redirection::getKV('url', $given_url); - if ($file_redir instanceof File_redirection) { + try { + $file_redir = File_redirection::getByUrl($given_url); $file = File::getKV('id', $file_redir->file_id); if (!$file instanceof File) { // File did not exist, let's clean up the File_redirection entry $file_redir->delete(); } + } catch (NoResultException $e) { + // We just wanted to doublecheck whether a File_thumbnail we might've had + // actually referenced an existing File object. } + } - // If we still don't have a File object, let's create one now! - if (!$file instanceof File) { - // @fixme for new URLs this also looks up non-redirect data - // such as target content type, size, etc, which we need - // for File::saveNew(); so we call it even if not following - // new redirects. - $redir_data = File_redirection::where($given_url); - if (is_array($redir_data)) { - $redir_url = $redir_data['url']; - } elseif (is_string($redir_data)) { - $redir_url = $redir_data; - $redir_data = array(); - } else { - // TRANS: Server exception thrown when a URL cannot be processed. - throw new ServerException(sprintf(_("Cannot process URL '%s'"), $given_url)); - } + // If we still don't have a File object, let's create one now! + if (!$file instanceof File) { + // @fixme for new URLs this also looks up non-redirect data + // such as target content type, size, etc, which we need + // for File::saveNew(); so we call it even if not following + // new redirects. + $redir_data = File_redirection::where($given_url); + if (is_array($redir_data)) { + $redir_url = $redir_data['url']; + } elseif (is_string($redir_data)) { + $redir_url = $redir_data; + $redir_data = array(); + } else { + // TRANS: Server exception thrown when a URL cannot be processed. + throw new ServerException(sprintf(_("Cannot process URL '%s'"), $given_url)); + } - // TODO: max field length - if ($redir_url === $given_url || strlen($redir_url) > 255 || !$followRedirects) { - // Save the File object based on our lookup trace - $file = File::saveNew($redir_data, $given_url); - } else { - // This seems kind of messed up... for now skipping this part - // if we're already under a redirect, so we don't go into - // horrible infinite loops if we've been given an unstable - // redirect (where the final destination of the first request - // doesn't match what we get when we ask for it again). - // - // Seen in the wild with clojure.org, which redirects through - // wikispaces for auth and appends session data in the URL params. - $file = self::processNew($redir_url, $notice_id, /*followRedirects*/false); - File_redirection::saveNew($redir_data, $file->id, $given_url); - } + if ($redir_url === $given_url || !$followRedirects) { + // Save the File object based on our lookup trace + $file = File::saveNew($redir_data, $given_url); + } else { + // This seems kind of messed up... for now skipping this part + // if we're already under a redirect, so we don't go into + // horrible infinite loops if we've been given an unstable + // redirect (where the final destination of the first request + // doesn't match what we get when we ask for it again). + // + // Seen in the wild with clojure.org, which redirects through + // wikispaces for auth and appends session data in the URL params. + $file = self::processNew($redir_url, $notice_id, /*followRedirects*/false); + File_redirection::saveNew($redir_data, $file->id, $given_url); } if (!$file instanceof File) { @@ -462,7 +472,7 @@ class File extends Managed_DataObject if (!empty($this->filename)) { // A locally stored file, so let's generate a URL for our instance. $url = self::url($this->filename); - if ($url != $this->url) { + if (self::hashurl($url) !== $this->urlhash) { // For indexing purposes, in case we do a lookup on the 'url' field. // also we're fixing possible changes from http to https, or paths $this->updateUrl($url); @@ -474,16 +484,27 @@ class File extends Managed_DataObject return $this->url; } + static public function getByUrl($url) + { + $file = new File(); + $file->urlhash = self::hashurl($url); + if (!$file->find(true)) { + throw new NoResultException($file); + } + return $file; + } + public function updateUrl($url) { - $file = File::getKV('url', $url); + $file = File::getKV('urlhash', self::hashurl($url)); if ($file instanceof File) { throw new ServerException('URL already exists in DB'); } - $sql = 'UPDATE %1$s SET url=%2$s WHERE url=%3$s;'; + $sql = 'UPDATE %1$s SET urlhash=%2$s, url=%3$s WHERE urlhash=%4$s;'; $result = $this->query(sprintf($sql, $this->__table, + $this->_quote((string)self::hashurl($url)), $this->_quote((string)$url), - $this->_quote((string)$this->url))); + $this->_quote((string)$this->urlhash))); if ($result === false) { common_log_db_error($this, 'UPDATE', __FILE__); throw new ServerException("Could not UPDATE {$this->__table}.url"); @@ -502,9 +523,9 @@ class File extends Managed_DataObject function blowCache($last=false) { - self::blow('file:notice-ids:%s', $this->url); + self::blow('file:notice-ids:%s', $this->urlhash); if ($last) { - self::blow('file:notice-ids:%s;last', $this->url); + self::blow('file:notice-ids:%s;last', $this->urlhash); } self::blow('file:notice-count:%d', $this->id); } @@ -582,4 +603,48 @@ class File extends Managed_DataObject return $title ?: null; } + + static public function hashurl($url) + { + if (empty($url)) { + throw new Exception('No URL provided to hash algorithm.'); + } + return hash(self::URLHASH_ALG, $url); + } + + static public function beforeSchemaUpdate() + { + $table = strtolower(get_called_class()); + $schema = Schema::get(); + $schemadef = $schema->getTableDef($table); + + // 2015-02-19 We have to upgrade our table definitions to have the urlhash field populated + if (isset($schemadef['fields']['urlhash']) && in_array('file_urlhash_key', $schemadef['unique keys'])) { + // We already have the urlhash field, so no need to migrate it. + return; + } + echo "\nFound old $table table, upgrading it to contain 'urlhash' field...\n"; + // We have to create a urlhash that is _not_ the primary key, + // transfer data and THEN run checkSchema + $schemadef['fields']['urlhash'] = array ( + 'type' => 'varchar', + 'length' => 64, + 'description' => 'sha256 of destination URL after following redirections', + ); + $schema->ensureTable($table, $schemadef); + echo "DONE.\n"; + + $classname = ucfirst($table); + $tablefix = new $classname; + // urlhash is hash('sha256', $url) in the File table + echo "Updating urlhash fields in $table table...\n"; + // Maybe very MySQL specific :( + $tablefix->query(sprintf('UPDATE %1$s SET %2$s=%3$s;', + $schema->quoteIdentifier($table), + 'urlhash', + // The line below is "result of sha256 on column `url`" + 'SHA2(url, 256)')); + echo "DONE.\n"; + echo "Resuming core schema upgrade..."; + } }