* @param string $given_url
* @return File
*/
- public static function saveNew(array $redir_data, $given_url) {
+ public static function saveNew(array $redir_data, $given_url)
+ {
+ $file = null;
+ try {
+ // I don't know why we have to keep doing this but we run a last check to avoid
+ // uniqueness bugs.
+ $file = File::getByUrl($given_url);
+ return $file;
+ } catch (NoResultException $e) {
+ // We don't have the file's URL since before, so let's continue.
+ }
- // I don't know why we have to keep doing this but I'm adding this last check to avoid
- // uniqueness bugs.
+ if (!Event::handle('StartFileSaveNew', array(&$redir_data, $given_url))) {
+ throw new ServerException('File not saved due to an aborted StartFileSaveNew event.');
+ }
- $file = File::getKV('urlhash', self::hashurl($given_url));
-
- if (!$file instanceof File) {
- $file = new File;
- $file->urlhash = self::hashurl($given_url);
- $file->url = $given_url;
- if (!empty($redir_data['protected'])) $file->protected = $redir_data['protected'];
- if (!empty($redir_data['title'])) $file->title = $redir_data['title'];
- if (!empty($redir_data['type'])) $file->mimetype = $redir_data['type'];
- if (!empty($redir_data['size'])) $file->size = intval($redir_data['size']);
- if (isset($redir_data['time']) && $redir_data['time'] > 0) $file->date = intval($redir_data['time']);
- $file_id = $file->insert();
+ $file = new File;
+ $file->urlhash = self::hashurl($given_url);
+ $file->url = $given_url;
+ if (!empty($redir_data['protected'])) $file->protected = $redir_data['protected'];
+ if (!empty($redir_data['title'])) $file->title = $redir_data['title'];
+ if (!empty($redir_data['type'])) $file->mimetype = $redir_data['type'];
+ if (!empty($redir_data['size'])) $file->size = intval($redir_data['size']);
+ if (isset($redir_data['time']) && $redir_data['time'] > 0) $file->date = intval($redir_data['time']);
+ $file_id = $file->insert();
+
+ if ($file_id === false) {
+ throw new ServerException('File/URL metadata could not be saved to the database.');
}
Event::handle('EndFileSaveNew', array($file, $redir_data, $given_url));
- assert ($file instanceof File);
return $file;
}
*
* @fixme refactor this mess, it's gotten pretty scary.
* @param string $given_url the URL we're looking at
- * @param int $notice_id (optional)
+ * @param Notice $notice (optional)
* @param bool $followRedirects defaults to true
*
* @return mixed File on success, -1 on some errors
*
* @throws ServerException on failure
*/
- public static function processNew($given_url, $notice_id=null, $followRedirects=true) {
+ public static function processNew($given_url, Notice $notice=null, $followRedirects=true) {
if (empty($given_url)) {
throw new ServerException('No given URL to process');
}
//
// Seen in the wild with clojure.org, which redirects through
// wikispaces for auth and appends session data in the URL params.
- $file = self::processNew($redir_url, $notice_id, /*followRedirects*/false);
+ $file = self::processNew($redir_url, $notice, /*followRedirects*/false);
File_redirection::saveNew($redir_data, $file->id, $given_url);
}
}
}
- if (!empty($notice_id)) {
- File_to_post::processNew($file->id, $notice_id);
+ if ($notice instanceof Notice) {
+ File_to_post::processNew($file, $notice);
}
return $file;
}
return true;
}
+ public function getFilename()
+ {
+ if (!self::validFilename($this->filename)) {
+ // TRANS: Client exception thrown if a file upload does not have a valid name.
+ throw new ClientException(_("Invalid filename."));
+ }
+ return $this->filename;
+ }
+
// where should the file go?
static function filename(Profile $profile, $origname, $mimetype)
}
- if (StatusNet::useHTTPS()) {
+ if (GNUsocial::useHTTPS()) {
$sslserver = common_config('attachments', 'sslserver');
}
}
- if ($width === null) {
- $width = common_config('thumbnail', 'width');
- $height = common_config('thumbnail', 'height');
- $crop = common_config('thumbnail', 'crop');
- }
-
- if ($height === null) {
- $height = $width;
- $crop = true;
- }
-
- // Get proper aspect ratio width and height before lookup
- // We have to do it through an ImageFile object because of orientation etc.
- // Only other solution would've been to rotate + rewrite uploaded files
- // which we don't want to do because we like original, untouched data!
- list($width, $height, $x, $y, $w, $h) =
- $image->scaleToFit($width, $height, $crop);
-
- $params = array('file_id'=> $this->id,
- 'width' => $width,
- 'height' => $height);
- $thumb = File_thumbnail::pkeyGet($params);
- if ($thumb instanceof File_thumbnail) {
- return $thumb;
- }
-
- $filename = $this->filehash ?: $image->filename; // Remote files don't have $this->filename
- $outname = "thumb-{$this->id}-{$width}x{$height}-{$filename}." . File::guessMimeExtension($image->mimetype);
- $outpath = self::path($outname);
-
- // The boundary box for our resizing
- $box = array('width'=>$width, 'height'=>$height,
- 'x'=>$x, 'y'=>$y,
- 'w'=>$w, 'h'=>$h);
-
- // Doublecheck that parameters are sane and integers.
- if ($box['width'] < 1 || $box['width'] > common_config('thumbnail', 'maxsize')
- || $box['height'] < 1 || $box['height'] > common_config('thumbnail', 'maxsize')
- || $box['w'] < 1 || $box['x'] >= $image->width
- || $box['h'] < 1 || $box['y'] >= $image->height) {
- // Fail on bad width parameter. If this occurs, it's due to algorithm in ImageFile->scaleToFit
- common_debug("Boundary box parameters for resize of {$image->filepath} : ".var_export($box,true));
- throw new ServerException('Bad thumbnail size parameters.');
- }
-
- common_debug(sprintf('Generating a thumbnail of File id==%u of size %ux%u', $this->id, $width, $height));
- // Perform resize and store into file
- $image->resizeTo($outpath, $box);
-
- // Avoid deleting the original
- if ($image->getPath() != self::path($image->filename)) {
- $image->unlink();
- }
- return File_thumbnail::saveThumbnail($this->id,
- self::url($outname),
- $width, $height,
- $outname);
+ return $image->getFileThumbnail($width, $height, $crop);
}
public function getPath()
/**
* @param string $hashstr String of (preferrably lower case) hexadecimal characters, same as result of 'hash_file(...)'
*/
- static public function getByHash($hashstr, $alg=File::FILEHASH_ALG)
+ static public function getByHash($hashstr)
{
$file = new File();
$file->filehash = strtolower($hashstr);
throw new ServerException('URL already exists in DB');
}
$sql = 'UPDATE %1$s SET urlhash=%2$s, url=%3$s WHERE urlhash=%4$s;';
- $result = $this->query(sprintf($sql, $this->__table,
+ $result = $this->query(sprintf($sql, $this->tableName(),
$this->_quote((string)self::hashurl($url)),
$this->_quote((string)$url),
$this->_quote((string)$this->urlhash)));
if ($result === false) {
common_log_db_error($this, 'UPDATE', __FILE__);
- throw new ServerException("Could not UPDATE {$this->__table}.url");
+ throw new ServerException("Could not UPDATE {$this->tableName()}.url");
}
return $result;
function blowCache($last=false)
{
- self::blow('file:notice-ids:%s', $this->urlhash);
+ self::blow('file:notice-ids:%s', $this->id);
if ($last) {
- self::blow('file:notice-ids:%s;last', $this->urlhash);
+ self::blow('file:notice-ids:%s;last', $this->id);
}
self::blow('file:notice-count:%d', $this->id);
}
$thumbs->delete();
}
}
+
+ $f2p = new File_to_post();
+ $f2p->file_id = $this->id;
+ if ($f2p->find()) {
+ while ($f2p->fetch()) {
+ $f2p->delete();
+ }
+ }
}
// And finally remove the entry from the database
return;
}
echo "\nFound old $table table, upgrading it to contain 'urlhash' field...";
+
+ $file = new File();
+ $file->query(sprintf('SELECT id, LEFT(url, 191) AS shortenedurl, COUNT(*) AS c FROM %1$s WHERE LENGTH(url)>191 GROUP BY shortenedurl HAVING c > 1', $schema->quoteIdentifier($table)));
+ print "\nFound {$file->N} URLs with too long entries in file table\n";
+ while ($file->fetch()) {
+ // We've got a URL that is too long for our future file table
+ // so we'll cut it. We could save the original URL, but there is
+ // no guarantee it is complete anyway since the previous max was 255 chars.
+ $dupfile = new File();
+ // First we find file entries that would be duplicates of this when shortened
+ // ... and we'll just throw the dupes out the window for now! It's already so borken.
+ $dupfile->query(sprintf('SELECT * FROM file WHERE LEFT(url, 191) = "%1$s"', $file->shortenedurl));
+ // Leave one of the URLs in the database by using ->find(true) (fetches first entry)
+ if ($dupfile->find(true)) {
+ print "\nShortening url entry for $table id: {$file->id} [";
+ $orig = clone($dupfile);
+ $dupfile->url = $file->shortenedurl; // make sure it's only 191 chars from now on
+ $dupfile->update($orig);
+ print "\nDeleting duplicate entries of too long URL on $table id: {$file->id} [";
+ // only start deleting with this fetch.
+ while($dupfile->fetch()) {
+ print ".";
+ $dupfile->delete();
+ }
+ print "]\n";
+ } else {
+ print "\nWarning! URL suddenly disappeared from database: {$file->url}\n";
+ }
+ }
+ echo "...and now all the non-duplicates which are longer than 191 characters...\n";
+ $file->query('UPDATE file SET url=LEFT(url, 191) WHERE LENGTH(url)>191');
+
+ echo "\n...now running hacky pre-schemaupdate change for $table:";
// We have to create a urlhash that is _not_ the primary key,
// transfer data and THEN run checkSchema
$schemadef['fields']['urlhash'] = array (
'type' => 'varchar',
'length' => 64,
- 'not null' => true,
+ 'not null' => false, // this is because when adding column, all entries will _be_ NULL!
'description' => 'sha256 of destination URL (url field)',
);
$schemadef['fields']['url'] = array (