User:Leucosticte/EmailDeletedPages

From MediaWiki.org
Jump to navigation Jump to search

This is/was the beginning of an extension to fix bugzilla:38642. I cannibalized SpecialExport.php and Export.php and modified the code to the point that it successfully pulled data from the archive table. I'm sure I could get it to provide all the functionality called for in 38642, but the outcome would probably not be all that elegant, due to my current level of programming skill.

Testing[edit]

During testing, I called the function using a modified SpecialHelloWorld.php:

$edp = new EmailDeletedPages;
$edp->pagesByArPageId ( 2 );

That way, I could just go to Special:HelloWorld and it would run the function.

Files[edit]

EmailDeletedPages.php[edit]

<?php
/*
* @author Nathan Larson <nathanlarson3141@gmail.com>
* @copyright Copyright © 2012, Nathan Larson, Brion Vibber, etc.
* @license http://www.gnu.org/copyleft/gpl.html GNU General Public License 2.0 or later
*/

/** 
 * Prevent a user from accessing this file directly and provide a helpful 
 * message explaining how to install this extension.
 */
if ( !defined( 'MEDIAWIKI' ) ) { 
	if ( !defined( 'MEDIAWIKI' ) ) {
    	echo <<<EOT
To install the Example extension, put the following line in your 
LocalSettings.php file: 
require_once( "\$IP/extensions/EmailDeletedPages/EmailDeletedPages.php" );
EOT;
    	exit( 1 );
	}
}

// Extension credits that will show up on Special:Version
$wgExtensionCredits[ 'other' ][] = array(
	'path' => __FILE__,
	'name' => 'Email Deleted Pages',
	'author' =>'Nathan Larson', 
	'url' => 'https://www.mediawiki.org/wiki/Extension:EmailDeletedPages', 
	'description' => 'Emails deleted articles to users who have watchlisted them',
	'version'  => 1.0,
);

// Find the full directory path of this extension
$current_dir = dirname( __FILE__ ) . DIRECTORY_SEPARATOR;

// Add the i18n message file
$wgExtensionMessagesFiles[ 'Example' ] = $current_dir . 'EmailDeletedPages.i18n.php';

$wgHooks['GetPreferences'][] = 'EmailDeletedPages::wfEmailDeletedPagesPrefHook';

class EmailDeletedPages {
	function __construct( ) {
		$this->writer  = new XmlDumpWriter();
		$this->sink    = new DumpOutput();
	}
	
	/**
	 * Add a preferences checkbox for 'Send me copies of pages on my watchlist that are
	 * deleted'
	 * @param $user
	 * @param &$preferences
	 */
	function wfEmailDeletedPagesPrefHook( $user, &$preferences ) {
		// A checkbox
		$preferences['emaildeletedpages'] = array(
		        'type' => 'toggle',
		        'label-message' => 'tog-emaildeletedpages', // a system message
		        'section' => 'personal/email',
		);

		// Required return value of a hook function.
		return true;
	}

	/**
	 * Dumps page and revision records for that page in the
	 * in the archive table with the ar_page_id range given.
	 * @param $arPageId: ar_page_id
	 */
	function pagesByArPageId ( $arPageId ) {
		$lb = wfGetLBFactory()->newMainLB();
		$buffer = WikiExporter::STREAM;
		
		// This might take a while... :D
		wfSuppressWarnings();
		set_time_limit(0);
		wfRestoreWarnings();
		#$exporter = new WikiExporter( $db, $history, $buffer );
		#$exporter->list_authors = $list_authors;
		$this->openStream();

		# Use the master because these revisions just got moved into the archive table,
		# and if it's laggy they won't be there yet. An alternative would be to create
		# two new hooks in the core, the first to ask if any extensions need the full
		# revision history, and the second to provide it to them. The second hook would
		# then send an object with all revisions of the page to the hook function(s), if
		# an extension(s) so requested. The core would get that data from the slave
		# prior to article deletion, and then send it to the hook function after article
		# deletion. Or, if it's not too expensive to get all those revisions even when
		# they're not needed, only the second hook would be needed.

		$dbr = wfGetDB( DB_MASTER );
		$prev = $dbr->bufferResults( false );
		$tables = array( 'archive', 'text' );
		$vars = array('*');
		$opts = array();
		$join['text'] = array( 'INNER JOIN', 'ar_text_id=old_id' );
		# First find out if the latest revision was a redirect
		$cond = array ( "ar_page_id=$arPageId" , 'ar_parent_id=NULL' );
		#$cond = "ar_page_id=$arPageId";
		$result = $dbr->select( $tables, $vars, $cond, __METHOD__, $opts, $join );
		$mostRecentRevisionRow = $result->fetchrow();
		$mostRecentRevisionText = $mostRecentRevisionRow[ 'old_id' ];
		$pageIsRedirect = Title::NewFromRedirectRecurse ( $mostRecentRevisionText ) ? 1: 0;
		# Now get all the other data
		unset ( $cond );
		$cond = "ar_page_id=$arPageId";
		# Aliases because we're going to be exporting from the archive table and
		# the user will be importing into another wiki's page table
		$vars = array(
			'*',
			'ar_namespace AS page_namespace',
			'ar_title AS page_title',
			'ar_comment AS rev_comment',
			'ar_user AS rev_user',
			'ar_user_text AS rev_user_text',
			'ar_timestamp AS rev_timestamp',
			'ar_minor_edit AS rev_minor_edit',
			'ar_flags AS old_flags',
			'ar_rev_id AS rev_id',
			'ar_text_id AS rev_text_id',
			'ar_deleted AS rev_deleted',
			'ar_len AS rev_len',
			'ar_page_id AS page_id',
			'ar_parent_id AS rev_parent_id',
			'ar_sha1 AS rev_sha1',
			'NULL as page_restrictions',
			"$pageIsRedirect AS page_is_redirect",
		);
		# Do the query!
		$result = $dbr->select( $tables, $vars, $cond, __METHOD__, $opts, $join );
		$wrapper = $dbr->resultObject( $result );
		# Output dump results
		$this->outputPageStream( $wrapper );
		$dbr->bufferResults( $prev );
		$this->closeStream();
		if( $lb ) {
			$lb->closeAll();
		}
	}

	/**
	 * Runs through a query result set dumping page and revision records.
	 * The result set should be sorted/grouped by page to avoid duplicate
	 * page records in the output.
	 *
	 * The result set will be freed once complete. Should be safe for
	 * streaming (non-buffered) queries, as long as it was made on a
	 * separate database connection not managed by LoadBalancer; some
	 * blob storage types will make queries to pull source data.
	 *
	 * @param $resultset ResultWrapper
	 */
	protected function outputPageStream( $resultset ) {
		$last = null;
		foreach ( $resultset as $row ) {
			if ( is_null( $last ) ||
				$last->page_namespace != $row->page_namespace ||
				$last->page_title     != $row->page_title ) {
				if ( isset( $last ) ) {
					$output = '';
					/*if ( $dumpUploads ) {
						$output .= $writer->writeUploads( $last, $this->dumpUploadFileContents );
					}*/
					$output .= $his->writer->closePage();
					$this->sink->writeClosePage( $output );
				}
				$output = $this->writer->openPage( $row );
				$this->sink->writeOpenPage( $row, $output );
				$last = $row;
			}
			$output = $this->writer->writeRevision( $row );
			$this->sink->writeRevision( $row, $output );
		}
		if ( isset( $last ) ) {
			$output = '';
			$output .= $this->writer->closePage();
			$this->sink->writeClosePage( $output );
		}
	}

	public function openStream() {
		$output = $this->writer->openStream();
		$this->sink->writeOpenStream( $output );
	}

	public function closeStream() {
		$output = $this->writer->closeStream();
		$this->sink->writeCloseStream( $output );
	}
}

EmailDeletedPages.i18n.php[edit]

<?php

// initialize the messages variable
$messages = array();

// English
$messages[ 'en' ] = array(
	'tog-emaildeletedpages' => 'Send me copies of pages on my watchlist that are deleted',
);

// Message documentation for translators
$messages[ 'qqq' ] = array(
);