Manual:Chris G's botclasses/FixFileDescriptionPagesBot.php
This bot uses Chris G's botclasses to fix file description pages whose contents were buried in the history when a description page import was done after the image import. It generates edits that look like this. To generate the text file that you'll need, you might want to use AllPagesBot.php.
<?php
/* FixFileDescriptionPagesBot.php
* By Leucosticte, https://www.mediawiki.org/wiki/User:Leucosticte
* GNU Public License 2.0
*
* This bot fixes file description pages whose contents were buried in the history when a
* description page import was done after the image import.
*/
/* Setup my classes. */
include( 'botclasses.php' );
$wiki = new wikipedia;
$wiki->url = "https://en.wikipedia.org/w/api.php";
/* All the login stuff. */
$user = 'REMOVED';
$pass = 'REMOVED';
$wiki->login( $user, $pass );
$startWith = ''; // Replace this with the filename you want to start with
$pageTitlesNs6File = 'PageTitlesNs6.txt';
if ( !file_exists ( $pageTitlesNs6File ) ) {
die ( "File $pageTitlesNs6File not found\n" );
}
$lines = file( $pageTitlesNs6File, FILE_IGNORE_NEW_LINES );
$url1 = '?action=query&prop=revisions&titles=';
$url3 = '&rvprop=timestamp|user|comment|content&rvdir=newer&format=php';
// TODO: Change this so that it makes one query to gather the data, rather than a query for each filename
if ( $startWith ) {
$keepSkipping = true;
} else {
$keepSkipping = false;
}
foreach ( $lines as $line ) {
if ( !$keepSkipping || $line === $startWith ) {
$keepSkipping = false;
$url2 = urlencode ( $line );
$url = $url1 . $url2 . $url3;
$output = $wiki->query ( $url, true );
$pageNumbers = array_keys ( $output['query']['pages'] );
$pageNumber = $pageNumbers[0];
$revisions = $output['query']['pages'][$pageNumber]['revisions'];
$lastRevision = $revisions[count ( $revisions ) - 1];
if ( $lastRevision['*'] == 'Importing image file' && count ( $revisions ) > 1 ) {
$wiki->edit ( $line, $revisions[count ( $revisions ) - 2]['*'], 'Fixing botched file descriptions' );
}
}
}