X-Git-Url: https://git.mxchange.org/?a=blobdiff_plain;f=classes%2FFile.php;h=fadbda1f9bb008ccd90c39373a508ff47173a38b;hb=6d9ea620a3717921f8ad96b76a3e45afba4977db;hp=1cb7eab6b856c203e6023d79e72cb98415fe538f;hpb=1d85fd35a2330a430e91a8b67b832bc2a23130d4;p=quix0rs-gnu-social.git diff --git a/classes/File.php b/classes/File.php index 1cb7eab6b8..fadbda1f9b 100644 --- a/classes/File.php +++ b/classes/File.php @@ -82,30 +82,45 @@ class File extends Managed_DataObject * @param string $given_url * @return File */ - public static function saveNew(array $redir_data, $given_url) { - - // I don't know why we have to keep doing this but I'm adding this last check to avoid - // uniqueness bugs. + public static function saveNew(array $redir_data, $given_url) + { + $file = null; + try { + // I don't know why we have to keep doing this but we run a last check to avoid + // uniqueness bugs. + $file = File::getByUrl($given_url); + return $file; + } catch (NoResultException $e) { + // We don't have the file's URL since before, so let's continue. + } - $file = File::getKV('urlhash', self::hashurl($given_url)); - - if (!$file instanceof File) { - $file = new File; - $file->urlhash = self::hashurl($given_url); - $file->url = $given_url; - if (!empty($redir_data['protected'])) $file->protected = $redir_data['protected']; - if (!empty($redir_data['title'])) $file->title = $redir_data['title']; - if (!empty($redir_data['type'])) $file->mimetype = $redir_data['type']; - if (!empty($redir_data['size'])) $file->size = intval($redir_data['size']); - if (isset($redir_data['time']) && $redir_data['time'] > 0) $file->date = intval($redir_data['time']); - $file_id = $file->insert(); - } - - Event::handle('EndFileSaveNew', array($file, $redir_data, $given_url)); - assert ($file instanceof File); + $file = new File; + $file->url = $given_url; + if (!empty($redir_data['protected'])) $file->protected = $redir_data['protected']; + if (!empty($redir_data['title'])) $file->title = $redir_data['title']; + if (!empty($redir_data['type'])) $file->mimetype = $redir_data['type']; + if (!empty($redir_data['size'])) $file->size = intval($redir_data['size']); + if (isset($redir_data['time']) && $redir_data['time'] > 0) $file->date = intval($redir_data['time']); + $file->saveFile(); return $file; } + public function saveFile() { + $this->urlhash = self::hashurl($this->url); + + if (!Event::handle('StartFileSaveNew', array(&$this))) { + throw new ServerException('File not saved due to an aborted StartFileSaveNew event.'); + } + + $this->id = $this->insert(); + + if ($this->id === false) { + throw new ServerException('File/URL metadata could not be saved to the database.'); + } + + Event::handle('EndFileSaveNew', array($this)); + } + /** * Go look at a URL and possibly save data about it if it's new: * - follow redirect chains and store them in file_redirection @@ -114,16 +129,15 @@ class File extends Managed_DataObject * - optionally save a file_to_post record * - return the File object with the full reference * - * @fixme refactor this mess, it's gotten pretty scary. * @param string $given_url the URL we're looking at - * @param int $notice_id (optional) + * @param Notice $notice (optional) * @param bool $followRedirects defaults to true * * @return mixed File on success, -1 on some errors * * @throws ServerException on failure */ - public static function processNew($given_url, $notice_id=null, $followRedirects=true) { + public static function processNew($given_url, Notice $notice=null, $followRedirects=true) { if (empty($given_url)) { throw new ServerException('No given URL to process'); } @@ -133,69 +147,30 @@ class File extends Managed_DataObject throw new ServerException('No canonical URL from given URL to process'); } - $file = null; - - try { - $file = File::getByUrl($given_url); - } catch (NoResultException $e) { - // First check if we have a lookup trace for this URL already - try { - $file_redir = File_redirection::getByUrl($given_url); - $file = File::getKV('id', $file_redir->file_id); - if (!$file instanceof File) { - // File did not exist, let's clean up the File_redirection entry - $file_redir->delete(); - } - } catch (NoResultException $e) { - // We just wanted to doublecheck whether a File_thumbnail we might've had - // actually referenced an existing File object. - } - } + $redir = File_redirection::where($given_url); + $file = $redir->getFile(); // If we still don't have a File object, let's create one now! - if (!$file instanceof File) { - // @fixme for new URLs this also looks up non-redirect data - // such as target content type, size, etc, which we need - // for File::saveNew(); so we call it even if not following - // new redirects. - $redir_data = File_redirection::where($given_url); - if (is_array($redir_data)) { - $redir_url = $redir_data['url']; - } elseif (is_string($redir_data)) { - $redir_url = $redir_data; - $redir_data = array(); - } else { - // TRANS: Server exception thrown when a URL cannot be processed. - throw new ServerException(sprintf(_("Cannot process URL '%s'"), $given_url)); - } - - if ($redir_url === $given_url || !$followRedirects) { + if (empty($file->id)) { + if ($redir->url === $given_url || !$followRedirects) { // Save the File object based on our lookup trace - $file = File::saveNew($redir_data, $given_url); + $file->saveFile(); } else { - // This seems kind of messed up... for now skipping this part - // if we're already under a redirect, so we don't go into - // horrible infinite loops if we've been given an unstable - // redirect (where the final destination of the first request - // doesn't match what we get when we ask for it again). - // - // Seen in the wild with clojure.org, which redirects through - // wikispaces for auth and appends session data in the URL params. - $file = self::processNew($redir_url, $notice_id, /*followRedirects*/false); - File_redirection::saveNew($redir_data, $file->id, $given_url); + $file->saveFile(); + $redir->file_id = $file->id; + $redir->insert(); } + } - if (!$file instanceof File) { - // This should only happen if File::saveNew somehow did not return a File object, - // though we have an assert for that in case the event there might've gone wrong. - // If anything else goes wrong, there should've been an exception thrown. - throw new ServerException('URL processing failed without new File object'); - } + if (!$file instanceof File || empty($file->id)) { + // This should not happen + throw new ServerException('URL processing failed without new File object'); } - if (!empty($notice_id)) { - File_to_post::processNew($file->id, $notice_id); + if ($notice instanceof Notice) { + File_to_post::processNew($file, $notice); } + return $file; } @@ -471,7 +446,7 @@ class File extends Managed_DataObject /** * @param string $hashstr String of (preferrably lower case) hexadecimal characters, same as result of 'hash_file(...)' */ - static public function getByHash($hashstr, $alg=File::FILEHASH_ALG) + static public function getByHash($hashstr) { $file = new File(); $file->filehash = strtolower($hashstr); @@ -488,13 +463,13 @@ class File extends Managed_DataObject throw new ServerException('URL already exists in DB'); } $sql = 'UPDATE %1$s SET urlhash=%2$s, url=%3$s WHERE urlhash=%4$s;'; - $result = $this->query(sprintf($sql, $this->__table, + $result = $this->query(sprintf($sql, $this->tableName(), $this->_quote((string)self::hashurl($url)), $this->_quote((string)$url), $this->_quote((string)$this->urlhash))); if ($result === false) { common_log_db_error($this, 'UPDATE', __FILE__); - throw new ServerException("Could not UPDATE {$this->__table}.url"); + throw new ServerException("Could not UPDATE {$this->tableName()}.url"); } return $result; @@ -510,9 +485,9 @@ class File extends Managed_DataObject function blowCache($last=false) { - self::blow('file:notice-ids:%s', $this->urlhash); + self::blow('file:notice-ids:%s', $this->id); if ($last) { - self::blow('file:notice-ids:%s;last', $this->urlhash); + self::blow('file:notice-ids:%s;last', $this->id); } self::blow('file:notice-count:%d', $this->id); }