article_list[] = urlencode ( trim ( $title ) ) ;
}
function is_an_article ( $title ) {
$title = urlencode ( trim ( $title ) ) ;
return in_array ( $title , $this->article_list ) ;
}
/**
* XXX TODO: why are some negative?
* Gets the numeric namespace
* "6" = images
* "-8" = category link
* "-9" = interlanguage link
* "11" = templates
*/ function get_namespace_id ( $text ) {
$text = strtoupper ( $text ) ;
$text = explode ( ":" , $text , 2 ) ;
if ( count ( $text ) != 2 ) return 0 ;
$text = trim ( array_shift ( $text ) ) ;
if ( $text == "" ) return 0 ;
$ns = 0 ;
if ( $text == "CATEGORY" || $text == "KATEGORIE" ) return -8 ; # Hackish, for category link
if ( strlen ( $text ) < 4 ) return -9 ; # Hackish, for interlanguage link
if ( $text == "SIMPLE" ) return -9 ;
# Horrible manual hack, for now
if ( $text == "IMAGE" || $text == "BILD" ) $ns = 6 ;
if ( $text == "TEMPLATE" || $text == "VORLAGE" ) $ns = 11 ;
return $ns ;
}
function copyimagefromwiki ( $name , $url = "" ) {
global $xmlg ;
$dir = $xmlg['image_destination'] ;
if ( $url == "" )
$url = $this->get_image_url ( name ) ;
$fname = urlencode ( $name ) ;
$target = $dir . "/" . $fname ;
if ( !file_exists ( $target ) && !$this->block_file_download ) {
@mkdir ( $dir ) ;
# dub sez... use cURL
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, $url);
$fh = @fopen($target, 'w');
curl_setopt($ch, CURLOPT_FILE, $fh);
curl_exec($ch);
curl_close($ch);
@fclose($fh);
}
return $fname ;
}
function myurlencode ( $t ) {
$t = str_replace ( " " , "_" , $t ) ;
$t = urlencode ( $t ) ;
return $t ;
}
function get_image_url ( $name ) {
global $xmlg ;
$site = $xmlg['site_base_url'] ;
$parts = explode ( ".wikipedia.org/" , $site ) ;
$parts2 = explode ( ".wikibooks.org/" , $site ) ;
$image = utf8_encode ( $name ) ;
$image2 = ucfirst ( str_replace ( " " , "_" , $name ) ) ;
$m = md5( $image2 ) ;
$m1 = substr ( $m , 0 , 1 ) ;
$m2 = substr ( $m , 0 , 2 ) ;
$i = "{$m1}/{$m2}/" . $this->myurlencode ( ucfirst ( $name ) ) ;
if ( count ($parts ) > 1 ) {
$lang = array_shift ( $parts ) ;
$url = "http://upload.wikimedia.org/wikipedia/{$lang}/{$i}" ;
$url2 = "http://upload.wikimedia.org/wikipedia/commons/{$i}" ;
$h = @fopen ( $url , "r" ) ;
if ( $h === false ) $url = $url2 ;
else fclose ( $h ) ;
} else if ( count ($parts2 ) > 1 ) {
$lang = array_shift ( $parts2 ) ;
$url = "http://upload.wikimedia.org/wikibooks/{$lang}/{$i}" ;
$url2 = "http://upload.wikimedia.org/wikipedia/commons/{$i}" ;
$h = @fopen ( $url , "r" ) ;
if ( $h === false ) $url = $url2 ;
else fclose ( $h ) ;
} else {
$url = "http://{$site}/images/{$i}" ;
}
# print "{$url}
" ;
return $url ;
}
function do_show_images () {
return true ;
}
}
# Access through HTTP protocol
class ContentProviderHTTP extends ContentProvider {
var $article_cache = array () ;
var $first_title = "" ;
var $load_error ;
function between_tag ( $tag , &$text ) {
$a = explode ( "<{$tag}" , $text , 2 ) ;
if ( count ( $a ) == 1 ) return "" ;
$a = explode ( ">" , " " . array_pop ( $a ) , 2 ) ;
if ( count ( $a ) == 1 ) return "" ;
$a = explode ( "{$tag}>" , array_pop ( $a ) , 2 ) ;
if ( count ( $a ) == 1 ) return "" ;
return array_shift ( $a ) ;
}
function do_get_contents ( $title ) {
global $xmlg ;
$use_se = false ;
if ( isset ( $xmlg["use_special_export"] ) && $xmlg["use_special_export"] == 1 ) $use_se = true ;
if ( $xmlg["useapi"] ) {
$url = "http://" . $xmlg["site_base_url"] . "/api.php?format=php&action=query&prop=revisions&rvexpandtemplates=1&rvprop=timestamp|user|comment|content&titles=" . urlencode ( $title ) ;
$data = @file_get_contents ( $url ) ;
$data = unserialize ( $data ) ;
$data = $data['query'] ; if ( !isset ( $data ) ) return "" ;
$data = $data['pages'] ; if ( !isset ( $data ) ) return "" ;
$data = array_shift ( $data ) ;
$data = $data['revisions'] ; if ( !isset ( $data ) ) return "" ;
$data = $data['0'] ; if ( !isset ( $data ) ) return "" ;
$data = $data['*'] ; if ( !isset ( $data ) ) return "" ;
return $data ;
# $data = $data['page'] ; if ( !isset ( $data ) ) return "" ;
# $data = $data['revision'] ; if ( !isset ( $data ) ) return "" ;
# $data = $data['ref'] ; if ( !isset ( $data ) ) return "" ;
#print urldecode ( $url ) . "\n" ;
print "
" ; print_r ( $data ) ; print "" ; exit ; $s = "Still here..." ; return $s ; } else if ( $use_se ) { $url = "http://" . $xmlg["site_base_url"] . "/index.php?listauthors=1&title=Special:Export/" . urlencode ( $title ) ; } else { if ( $xmlg["use_toolserver_url"] ) { # $url = "http://" . $xmlg["site_base_url"] . "/index.php?action=raw&title=" . urlencode ( $title ) ; $u = urlencode ( $title ) ; $site = array_shift ( explode ( "/" , $xmlg["site_base_url"] ) ) ; $url = "http://tools.wikimedia.de/~daniel/WikiSense/WikiProxy.php?wiki={$site}&title={$u}&rev=0&go=Fetch" ; } else { $url = "http://" . $xmlg["site_base_url"] . "/index.php?action=raw&title=" . urlencode ( $title ) ; } } $s = @file_get_contents ( $url ) ; if ( $use_se ) { $text = html_entity_decode ( $this->between_tag ( "text" , $s ) ) ; $this->authors = array () ; $authors = $this->between_tag ( "contributors" , $s ) ; $authors = explode ( "