]> git.mxchange.org Git - quix0rs-gnu-social.git/commitdiff
upgrade fix for file URLs longer than 191 chars
authorMikael Nordfeldth <mmn@hethane.se>
Wed, 27 May 2015 19:31:29 +0000 (21:31 +0200)
committerMikael Nordfeldth <mmn@hethane.se>
Wed, 27 May 2015 19:31:29 +0000 (21:31 +0200)
classes/File.php

index 1e296242b79f350c610fa327819ed2cfa4ed481a..0018a03e32f9bc632452dbf93b6e3c7df05fee15 100644 (file)
@@ -610,6 +610,37 @@ class File extends Managed_DataObject
             return;
         }
         echo "\nFound old $table table, upgrading it to contain 'urlhash' field...";
+
+        $file = new File();
+        $file->query(sprintf('SELECT id, LEFT(url, 191) AS shortenedurl, COUNT(*) AS c FROM %1$s WHERE LENGTH(url)>191 GROUP BY shortenedurl HAVING c > 1', $schema->quoteIdentifier($table)));
+        print "\nFound {$file->N} URLs with too long entries in file table\n";
+        while ($file->fetch()) {
+            // We've got a URL that is too long for our future file table
+            // so we'll cut it. We could save the original URL, but there is
+            // no guarantee it is complete anyway since the previous max was 255 chars.
+            $dupfile = new File();
+            // First we find file entries that would be duplicates of this when shortened
+            // ... and we'll just throw the dupes out the window for now! It's already so borken.
+            $dupfile->query(sprintf('SELECT * FROM file WHERE LEFT(url, 191) = "%1$s"', $file->shortenedurl));
+            // Leave one of the URLs in the database by using ->find(true) (fetches first entry)
+            if ($dupfile->find(true)) {
+                print "\nShortening url entry for $table id: {$file->id} [";
+                $orig = clone($dupfile);
+                $dupfile->url = $file->shortenedurl;    // make sure it's only 191 chars from now on
+                $dupfile->update($orig);
+                print "\nDeleting duplicate entries of too long URL on $table id: {$file->id} [";
+                // only start deleting with this fetch.
+                while($dupfile->fetch()) {
+                    print ".";
+                    $dupfile->delete();
+                }
+                print "]\n";
+            } else {
+                print "\nWarning! URL suddenly disappeared from database: {$file->url}\n";
+            }
+        }
+
+        echo "\n...now running hacky pre-schemaupdate change for $table:";
         // We have to create a urlhash that is _not_ the primary key,
         // transfer data and THEN run checkSchema
         $schemadef['fields']['urlhash'] = array (