ことのおこり
これがそろそろうざったいのでDokuWikiをバージョンアップしようかなと思い立つ。
現在使用中はdokuwiki-2009-02-14b
。あいだにdokuwiki-2009-12-02
ってのがあったらしい。すんなりいけばいいな
indexer.php(2009-02-14mecab対応)
2009-02-14のときに手を入れた部分(02-14bは数ファイルのみの更新だった)
--- indexer.php.org 2009-02-14 21:13:24.000000000 +0900
+++ indexer.php.mod 2009-04-23 16:17:13.000000000 +0900
@@ -45,6 +45,8 @@
']?');
define('IDX_ASIAN', '(?:'.IDX_ASIAN1.'|'.IDX_ASIAN2.'|'.IDX_ASIAN3.')');
+define('PRE_TOKENIZER', '/usr/bin/mecab -O wakati');
+
/**
* Measure the length of a string.
* Differs from strlen in handling of asian characters.
@@ -52,11 +54,16 @@
* @author Tom N Harris <tnharris@whoopdedo.org>
*/
function wordlen($w){
- $l = strlen($w);
+ //$l = strlen($w);
+ $l = utf8_strlen($w);
+
+ /*
// If left alone, all chinese "words" will get put into w3.idx
// So the "length" of a "word" is faked
if(preg_match('/'.IDX_ASIAN2.'/u',$w))
$l += ord($w) - 0xE1; // Lead bytes from 0xE2-0xEF
+ */
+
return $l;
}
@@ -220,6 +227,28 @@
list($page,$body) = $data;
+ if(function_exists(proc_open) && defined('PRE_TOKENIZER')) {
+ $dspec = array(
+ 0 => array("pipe", "r"),
+ 1 => array("pipe", "w"),
+ 2 => array("file", "/dev/null", "w")
+ );
+ $process = proc_open(PRE_TOKENIZER, $dspec, $pipes);
+ if(is_resource($process)) {
+ stream_set_blocking($pipes[0], FALSE);
+ stream_set_blocking($pipes[1], FALSE);
+ fwrite($pipes[0], $body . "\n");
+ fclose($pipes[0]);
+
+ $body = '';
+ while(!feof($pipes[1])) {
+ $body .= fgets($pipes[1], 32768);
+ }
+ fclose($pipes[1]);
+ proc_close($process);
+ }
+ }
+
$body = strtr($body, "\r\n\t", ' ');
$tokens = explode(' ', $body);
$tokens = array_count_values($tokens); // count the frequency of each token
@@ -489,7 +518,8 @@
$wild |= 2;
$wlen -= 1;
}
- if ($wlen < IDX_MINWORDLENGTH && $wild == 0 && !is_numeric($xword)) continue;
+ //if ($wlen < IDX_MINWORDLENGTH && $wild == 0 && !is_numeric($xword)) continue;
+ if (preg_match('/[^0-9A-Za-z]/u', $string) && $wlen < IDX_MINWORDLENGTH && $wild == 0 && !is_numeric($xword)) continue;
if(!isset($tokens[$xword])){
$tokenlength[$wlen][] = $xword;
}
@@ -628,12 +658,36 @@
*/
function idx_tokenizer($string,&$stopwords,$wc=false){
$words = array();
+
+ if(function_exists(proc_open) && defined('PRE_TOKENIZER')) {
+ $dspec = array(
+ 0 => array("pipe", "r"),
+ 1 => array("pipe", "w"),
+ 2 => array("file", "/dev/null", "w")
+ );
+ $process = proc_open(PRE_TOKENIZER, $dspec, $pipes);
+ if(is_resource($process)) {
+ stream_set_blocking($pipes[0], FALSE);
+ stream_set_blocking($pipes[1], FALSE);
+ fwrite($pipes[0], $string . "\n");
+ fclose($pipes[0]);
+ $string = '';
+ while(!feof($pipes[1])) {
+ $string .= fgets($pipes[1], 32768);
+ }
+ fclose($pipes[1]);
+ proc_close($process);
+ }
+ }
+
$wc = ($wc) ? '' : $wc = '\*';
if(preg_match('/[^0-9A-Za-z]/u', $string)){
+ /*
// handle asian chars as single words (may fail on older PHP version)
$asia = @preg_replace('/('.IDX_ASIAN.')/u',' \1 ',$string);
if(!is_null($asia)) $string = $asia; //recover from regexp failure
+ */
$arr = explode(' ', utf8_stripspecials($string,' ','\._\-:'.$wc));
foreach ($arr as $w) {
Debianでmecabをaptでインストールすると,/usr/bin/mecab
にインストールされるけど,ほかの環境の人はここを変えないと。
つづく
0 件のコメント:
コメントを投稿