4 * StatusNet - a distributed open-source microblogging tool
5 * Copyright (C) 2008, 2009, StatusNet, Inc.
7 * This program is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU Affero General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU Affero General Public License for more details.
17 * You should have received a copy of the GNU Affero General Public License
18 * along with this program. If not, see <http://www.gnu.org/licenses/>.
21 define('INSTALLDIR', dirname(__DIR__));
22 define('PUBLICDIR', INSTALLDIR . DIRECTORY_SEPARATOR . 'public');
25 $longoptions = array('yes');
27 $helptext = <<<END_OF_HELP
28 remove_duplicate_file_urls.php [options]
29 Remove duplicate URL entries in the file and file_redirection tables because they for some reason were not unique.
31 -y --yes do not wait for confirmation
35 require_once INSTALLDIR.'/scripts/commandline.inc';
37 if (!have_option('y', 'yes')) {
38 print "About to remove duplicate URL entries in file and file_redirection tables. Are you sure? [y/N] ";
39 $response = fgets(STDIN);
40 if (strtolower(trim($response)) != 'y') {
47 $file->query('SELECT id, url, COUNT(*) AS c FROM file GROUP BY url HAVING c > 1');
48 print "\nFound {$file->N} URLs with duplicate entries in file table";
49 while ($file->fetch()) {
50 // We've got a URL that is duplicated in the file table
51 $dupfile = new File();
52 $dupfile->url = $file->url;
53 if ($dupfile->find(true)) {
54 print "\nDeleting duplicate entries in file table for URL: {$file->url} [";
55 // Leave one of the URLs in the database by using ->find(true)
56 // and only deleting starting with this fetch.
57 while($dupfile->fetch()) {
63 print "\nWarning! URL suddenly disappeared from database: {$file->url}\n";
67 $file = new File_redirection();
68 $file->query('SELECT file_id, url, COUNT(*) AS c FROM file_redirection GROUP BY url HAVING c > 1');
69 print "\nFound {$file->N} URLs with duplicate entries in file_redirection table";
70 while ($file->fetch()) {
71 // We've got a URL that is duplicated in the file_redirection table
72 $dupfile = new File_redirection();
73 $dupfile->url = $file->url;
74 if ($dupfile->find(true)) {
75 print "\nDeleting duplicate entries in file table for URL: {$file->url} [";
76 // Leave one of the URLs in the database by using ->find(true)
77 // and only deleting starting with this fetch.
78 while($dupfile->fetch()) {
84 print "\nWarning! URL suddenly disappeared from database: {$file->url}\n";