--- trunk/all2xml.pl 2004/04/18 00:57:39 320
+++ trunk/all2xml.pl 2006/06/06 12:34:25 747
@@ -1,7 +1,7 @@
#!/usr/bin/perl -w
use strict;
-use OpenIsis;
+use Biblio::Isis;
use Getopt::Std;
use Data::Dumper;
use XML::Simple;
@@ -11,6 +11,7 @@
#use GDBM_File;
use Fcntl; # for O_RDWR
use TDB_File;
+use Carp;
$|=1;
@@ -22,7 +23,8 @@
my $config;
#use index_DBI; # default DBI module for index
-use index_DBI_cache; # faster DBI module using memory cache
+#use index_DBI_cache; # faster DBI module using memory cache
+use index_DBI_filter; # filter support for indexes
my $index;
my %opts;
@@ -61,7 +63,7 @@
'isis' => 'isis',
'excel' => 'column',
'marc' => 'marc',
- 'feed' => 'feed'
+ 'feed' => 'feed',
);
my $cache; # for cacheing
@@ -74,6 +76,9 @@
my $last_field_name; # cache to prevent repeated fields
+my $broken_cdata = XMLin(']]>') eq '>';
+warn "XML::Simple on this system seems broken with .\n" if ($broken_cdata);
+
sub data2xml {
use xmlify;
@@ -111,6 +116,10 @@
$cache->{tags_by_order} = \@sorted_tags;
}
+ if (! @sorted_tags) {
+ print STDERR "WARNING: no tags for this type found in import_xml file!\n";
+ }
+
# lookup key
my $lookup_key;
@@ -120,6 +129,7 @@
delete $cache->{swish_exact_data};
delete $cache->{index_data};
delete $cache->{index_delimiter};
+ delete $cache->{distinct};
my @page_fields; # names of fields
@@ -139,12 +149,14 @@
} else {
print STDERR "WARNING: field '$field' doesn't have 'name' attribute!";
}
+
if ($field_name) {
+ $field_name = x($field_name);
if (! $last_field_name) {
- $last_field_name = x($field_name);
+ $last_field_name = $field_name;
return $last_field_name;
} elsif ($field_name ne $last_field_name) {
- $last_field_name = x($field_name);
+ $last_field_name = $field_name;
return $last_field_name;
}
}
@@ -168,6 +180,8 @@
($s,$se,$d,$i) = (0,1,0,0);
} elsif (lc($type) =~ /^lookup/) {
($s,$se,$d,$i,$il) = (0,1,0,0,1);
+ } elsif ($type) {
+ print STDERR "WARNING: unknown type: $type\n";
}
return ($s,$se,$d,$i,$il);
}
@@ -196,6 +210,7 @@
delete $x->{value};
delete $x->{delimiter};
$x->{content} = $v;
+ $d =~ s#>$## if ($d && $broken_cdata);
$x->{delimiter} = $d;
}
return $x;
@@ -215,7 +230,7 @@
my ($swish,$display);
- my $tag = $type2tag{$type} || die "can't find which tag to use for type $type";
+ my $tag = $cfg->val($database, 'import_xml_tag') || $type2tag{$type} || die "can't find which tag to use for type $type";
# is this field page-by-page?
my $iterate_by_page = $config->{indexer}->{$field}->{iterate_by_page};
@@ -224,6 +239,11 @@
# default line_delimiter if using
my $page_line_delimiter = $config->{indexer}->{$field}->{page_line_delimiter} || '
';
$cache->{index_delimiter}->{$field} = $config->{indexer}->{$field}->{index_delimiter};
+ my $distinct = $config->{indexer}->{$field}->{distinct};
+ if ($distinct && !$iterate_by_page) {
+ warn "WARNING: distinct is currently not supported without iterate_by_page!\n";
+ $distinct = 0;
+ }
my $format_name = $config->{indexer}->{$field}->{format_name};
my $format_delimiter = $config->{indexer}->{$field}->{format_delimiter};
@@ -253,8 +273,6 @@
# init vars so that we go into while...
($swish,$display) = (1,1);
- # placeholder for all repeatable entries for index
-
sub mkformat($$) {
my $x = shift || die "mkformat needs tag reference";
my $data = shift || return;
@@ -404,9 +422,10 @@
$ldel = " " if ($append);
#print STDERR "line delimiter: ",Dumper($ldel) if ($ldel);
if (! $cache->{$what}->{$field}->[$page]) {
- $cache->{$what}->{$field}->[$page] = $data;
- } else {
- $cache->{$what}->{$field}->[$page] .= $ldel.$data;
+ push @{$cache->{$what}->{$field}->[$page]}, {
+ data => $data,
+ delimiter => $ldel,
+ };
}
}
@@ -473,7 +492,7 @@
if ($val) {
$display_data .= $delimiter.$val if ($d);
$swish_data .= " ".$val if ($s);
- $index->insert($field, $val, $path) if ($i);
+ $index->insert($field, $val, $val, $path) if ($i);
}
if ($iterate_by_page) {
@@ -481,15 +500,15 @@
# on first page!!!
my $page = 0;
if ($display_data) {
- $cache->{display_data}->{$field}->[$page] = $display_data;
+ push @{$cache->{display_data}->{$field}->[$page]}, { data => $display_data };
$display_data = "";
}
if ($swish_data) {
- $cache->{swish_data}->{$field}->[$page] = $swish_data;
+ push @{$cache->{swish_data}->{$field}->[$page]}, { data => $swish_data };
$swish_data = "";
}
if ($swish_exact_data) {
- $cache->{swish_exact_data}->{$field}->[$page] = $swish_exact_data;
+ push @{$cache->{swish_exact_data}->{$field}->[$page]}, { data => $swish_exact_data };
$swish_exact_data = "";
}
}
@@ -500,14 +519,28 @@
my $nr_pages = $page_max{$field} || next;
#print STDERR "field '$field' iterate over ",($nr_pages || 0)," pages...\n";
#print STDERR Dumper($cache->{display_data});
+ my $seen; # used for distinct
for (my $page=0; $page <= $nr_pages; $page++) {
my $display_data;
- if ($cache->{format}->{$field}) {
- my $tmp = mkformat($cache->{format}->{$field},$cache->{display_data}->{$field}->[$page]);
- $display_data=$tmp if ($tmp);
- } else {
- $display_data = $cache->{display_data}->{$field}->[$page];
+ my $delimiter = '';
+ foreach my $element (@{ $cache->{display_data}->{$field}->[$page] }) {
+ my $data = $element->{data};
+ die "BUG! no data in element?" unless ($data);
+
+ if ($distinct) {
+ next if ($cache->{distinct}->{$field}->{ $data });
+ $cache->{distinct}->{$field}->{ $data } = 1;
+ }
+
+ if ($cache->{format}->{$field}) {
+ my $tmp = mkformat($cache->{format}->{$field},$data);
+ $display_data .= $delimiter . $tmp if ($tmp);
+ } else {
+ $display_data .= $delimiter . $data;
+ }
+ $delimiter = $element->{delimiter} if ($element->{delimiter});
}
+
if ($display_data) { # default
if ($field eq "headline") {
$xml .= xmlify("headline", $display_data);
@@ -519,7 +552,7 @@
}
}
- my $swish_data = $cache->{swish_data}->{$field}->[$page];
+ my $swish_data = join(" ",map { $_->{data} } @{ $cache->{swish_data}->{$field}->[$page] });
if ($swish_data) {
# remove extra spaces
$swish_data =~ s/ +/ /g;
@@ -528,7 +561,7 @@
$xml .= xmlify($field."_swish", my_unac_string($codepage,$swish_data));
}
- my $swish_exact_data = $cache->{swish_exact_data}->{$field}->[$page];
+ my $swish_exact_data = join(" ", map { $_->{data} } @{ $cache->{swish_exact_data}->{$field}->[$page] });
if ($swish_exact_data) {
$swish_exact_data =~ s/ +/ /g;
$swish_exact_data =~ s/ +$//g;
@@ -630,6 +663,10 @@
foreach my $database ($cfg->Sections) {
+ # save database name in global variable path for later
+ # (need for index filter creation)
+ $path = $database;
+
my $type = lc($cfg -> val($database, 'type')) || die "$database doesn't have 'type' defined";
my $add_xml = $cfg -> val($database, 'xml'); # optional
@@ -637,6 +674,10 @@
my $lookup_file = $cfg -> val($database, 'lookup_newfile'); # optional
if ($lookup_file) {
#tie %lhash, 'GDBM_File', $lookup_file, &GDBM_NEWDB, 0644;
+ if (! -e $lookup_file) {
+ open(LOOKUP, "> $lookup_file") || die "can't create $lookup_file': $!";
+ close(LOOKUP);
+ }
tie %lhash, 'TDB_File', $lookup_file, TDB_CLEAR_IF_FIRST, O_RDWR, 0644;
print STDERR "creating lookup file '$lookup_file'\n";
# delete memory cache for lookup file
@@ -651,43 +692,90 @@
print STDERR "opening lookup file '$lookup_file'\n";
}
-print STDERR "reading ./import_xml/$type.xml\n";
+ my $import_xml_type = $cfg->val($database, 'import_xml_file') || $type;
+ my $import_xml_file = "./import_xml/$import_xml_type.xml";
+
+ if (! -r $import_xml_file) {
+ print STDERR "ERROR: file $import_xml_file not readable skipping!\n";
+ next;
+ }
+
+ print STDERR "reading $import_xml_file\n";
# extract just type basic
my $type_base = $type;
$type_base =~ s/_.+$//g;
- $config=XMLin("./import_xml/$type.xml", ForceArray => [ $type2tag{$type_base}, 'config', 'format' ], ForceContent => 1 );
+ my $tag = $cfg->val($database, 'import_xml_tag') || $type2tag{$type_base} || die "can't find which tag to use for type $type";
+ $config=XMLin($import_xml_file, ForceArray => [ $tag, 'config', 'format' ], ForceContent => 1 );
+
+ # check for broken XML::Simple
+ if ( $broken_cdata ) {
+ map {
+ $config->{format}->{$_}->{content} =~ s#>$##;
+ } keys %{ $config->{format} };
+ }
+
+ # helper for progress bar
+ sub fmt_time {
+ my $t = shift || 0;
+ my $out = "";
+
+ my ($ss,$mm,$hh) = gmtime($t);
+ $out .= "${hh}h" if ($hh);
+ $out .= sprintf("%02d:%02d", $mm,$ss);
+ $out .= " " if ($hh == 0);
+ return $out;
+ }
# output current progress indicator
my $last_p = 0;
+ my $start_t = time();
sub progress {
return if (! $show_progress);
my $current = shift;
my $total = shift || 1;
my $p = int($current * 100 / $total);
- if ($p != $last_p) {
- printf STDERR ("%5d / %5d [%-51s] %-2d %% \r",$current,$total,"=" x ($p/2).">", $p );
+ if ($p < $last_p || $current == 1) {
+ $start_t = time();
+ $last_p = 0;
+ } elsif ($p != $last_p) {
+ my $rate = ($current / (time() - $start_t || 1));
+ my $eta = ($total-$current) / ($rate || 1);
+ printf STDERR ("%5d [%-38s] %-5d %0.1f/s %s\r",$current,"=" x ($p/3)."$p%>", $total, $rate, fmt_time($eta));
$last_p = $p;
}
}
my $fake_dir = 1;
+ my $fake_pos = 0;
+ my $last_fake_t = time();
sub fakeprogress {
return if (! $show_progress);
my $current = shift @_;
- my @ind = ('-','\\','|','/','-','\\','|','/', '-');
+ my @ind = ('-','\\','|','/','-','\\','|','/');
+
+ if ($current < $fake_pos) {
+ $start_t = time();
+ $last_fake_t = 0;
+ $fake_dir = 1;
+ $fake_pos = 0;
+ }
+
+ if (time()-$last_fake_t >= 1) {
+ $last_fake_t = time();
+ $fake_pos += $fake_dir;
+ $fake_dir = -$fake_dir if ($fake_pos > 38);
+ }
- $last_p += $fake_dir;
- $fake_dir = -$fake_dir if ($last_p > 1000 || $last_p < 0);
- if ($last_p % 10 == 0) {
- printf STDERR ("%5d / %5s [%-51s]\r",$current,"?"," " x ($last_p/20).$ind[($last_p/20) % $#ind]);
+ if ($current % 10 == 0) {
+ my $rate = ($current / (time() - $start_t || 1));
+ printf STDERR ("%5d [%-38s] %0.1f/s\r",$current, " " x $fake_pos .$ind[($current / 10) % 8], $rate);
}
}
# now read database
-print STDERR "using: $type...\n";
# erase cache for tags by order in this database
delete $cache->{tags_by_order};
@@ -697,66 +785,24 @@
my $isis_db = $cfg -> val($database, 'isis_db') || die "$database doesn't have 'isis_db' defined!";
$import2cp = Text::Iconv->new($config->{isis_codepage},$codepage);
- my $db = OpenIsis::open( $isis_db );
+ my $db = new Biblio::Isis( isisdb => $isis_db );
- # check if .txt database for OpenIsis is zero length,
- # if so, erase it and re-open database
- sub check_txt_db {
- my $isis_db = shift || die "need isis database name";
- my $reopen = 0;
-
- if (-e $isis_db.".TXT") {
- print STDERR "WARNING: removing $isis_db.TXT OpenIsis database...\n";
- unlink $isis_db.".TXT" || warn "FATAL: unlink error on '$isis_db.TXT': $!";
- $reopen++;
- }
- if (-e $isis_db.".PTR") {
- print STDERR "WARNING: removing $isis_db.PTR OpenIsis database...\n";
- unlink $isis_db.".PTR" || warn "FATAL: unlink error on '$isis_db.PTR': $!";
- $reopen++;
- }
- return OpenIsis::open( $isis_db ) if ($reopen);
- }
-
- # EOF error
- if ($db == -1) {
- $db = check_txt_db($isis_db);
- if ($db == -1) {
- print STDERR "FATAL: OpenIsis can't open zero size file $isis_db\n";
- next;
- }
- }
-
- # OpenIsis::ERR_BADF
- if ($db == -4) {
- print STDERR "FATAL: OpenIsis can't find file $isis_db\n";
- next;
- # OpenIsis::ERR_IO
- } elsif ($db == -5) {
- print STDERR "FATAL: OpenIsis can't access file $isis_db\n";
- next;
- } elsif ($db < 0) {
- print STDERR "FATAL: OpenIsis unknown error $db with file $isis_db\n";
+ if (! $db) {
+ print STDERR "FATAL: can't read ISIS database: $isis_db, skipping...\n";
next;
}
- my $max_rowid = OpenIsis::maxRowid( $db );
-
- # if 0 records, try to rease isis .txt database
- if ($max_rowid == 0) {
- # force removal of database
- $db = check_txt_db($isis_db);
- $max_rowid = OpenIsis::maxRowid( $db );
- }
+ my $max_rowid = $db->count if ($db);
print STDERR "Reading database: $isis_db [$max_rowid rows]\n";
- my $path = $database;
-
for (my $row_id = 1; $row_id <= $max_rowid; $row_id++ ) {
- my $row = OpenIsis::read( $db, $row_id );
- if ($row && $row->{mfn}) {
-
+ my $row = $db->to_hash( $row_id );
+ if ($row) {
+
+ $row->{mfn} = $row_id;
+ $row->{record} = $db->{record};
+
progress($row->{mfn}, $max_rowid);
my $swishpath = $path."#".int($row->{mfn});
@@ -770,10 +816,6 @@
}
}
}
- # for this to work with current version of OpenIsis (0.9.0)
- # you might need my patch from
- # http://www.rot13.org/~dpavlin/projects/openisis-0.9.0-perl_close.diff
- OpenIsis::close($db);
print STDERR "\n";
} elsif ($type_base eq "excel") {
@@ -785,7 +827,8 @@
my $excel_file = $cfg -> val($database, 'excel_file') || die "$database doesn't have 'excel_file' defined!";
my $sheet = x($config->{sheet}) || die "no sheet in $type.xml";
- my $start_row = x($config->{start_row}) - 1 || die "no start_row in $type.xml";
+ my $start_row = x($config->{start_row}) || die "no start_row in $type.xml";
+ $start_row--;
my $oBook = Spreadsheet::ParseExcel::Workbook->Parse($excel_file) || die "can't open Excel file '$excel_file'";
@@ -807,7 +850,11 @@
for(my $iC = $oWorksheet->{MinCol} ; defined $oWorksheet->{MaxCol} && $iC <= $oWorksheet->{MaxCol} ; $iC++) {
my $cell = $oWorksheet->{Cells}[$iR][$iC];
if ($cell) {
- $row->{int2col($iC)} = $cell->Value;
+ # this conversion is a cludge.
+ # Files from Excell could have
+ # characters which don't fit into
+ # destination encoding.
+ $row->{int2col($iC)} = $utf2cp->convert($cell->Value) || $cell->Value;
}
}
@@ -831,44 +878,57 @@
print "Document-Type: XML\n\n$xml\n";
}
}
+
+ print STDERR "\n";
+
} elsif ($type_base eq "marc") {
- require MARC;
+ require MARC::File::USMARC;
$import2cp = Text::Iconv->new($config->{marc_codepage},$codepage);
my $marc_file = $cfg -> val($database, 'marc_file') || die "$database doesn't have 'marc_file' defined!";
# optional argument is format
- my $format = x($config->{marc_format}) || 'usmarc';
-
+ warn "marc_format is no longer used!" if ($config->{marc_format});
print STDERR "Reading MARC file '$marc_file'\n";
- my $marc = new MARC;
- my $nr = $marc->openmarc({
- file=>$marc_file, format=>$format
- }) || die "Can't open MARC file '$marc_file' with format '$format'";
+ my $marc = MARC::File::USMARC->in( $marc_file );
+
+ if (! $marc) {
+ print STDERR "FATAL: can't read MARC file: $marc_file, skipping...\n";
+ next;
+ }
- # read MARC file in memory
- $marc->nextmarc(-1);
+ # count records in MARC file
+ sub marc_count {
+ my $filename = shift || die;
+ my $file = MARC::File::USMARC->in($filename) || return;
+ my $count = 0;
+ while ($file->skip()) {
+ $count++;
+ }
+ return $count;
+ }
- my $max_rec = $marc->marc_count();
+ my $count = marc_count($marc_file) || warn "no records in '$marc_file'?";
- for(my $i=1; $i<=$max_rec; $i++) {
+ my $i = 1;
- progress($i,$max_rec);
+ while( my $rec = $marc->next() ) {
- # store value for marc_sf.pm
- $main::cache->{marc_record} = $i;
+ progress($i,$count);
my $swishpath = $database."#".$i;
- if (my $xml = data2xml($type_base,$marc,$add_xml,$cfg,$database)) {
+ if (my $xml = data2xml($type_base,$rec,$add_xml,$cfg,$database)) {
$xml = $cp2utf->convert($xml);
use bytes; # as opposed to chars
print "Path-Name: $swishpath\n";
print "Content-Length: ".(length($xml)+1)."\n";
print "Document-Type: XML\n\n$xml\n";
}
+
+ $i++;
}
print STDERR "\n";
@@ -913,6 +973,72 @@
}
# close lookup
untie %lhash if (%lhash);
+
+ } elsif ($type_base eq "dbf") {
+
+ my $dbf_file = $cfg -> val($database, 'dbf_file') || die "$database doesn't have 'dbf_file' defined!";
+ my $dbf_codepage = $cfg -> val($database, 'dbf_codepage') || die "$database doesn't have 'dbf_codepage' defined!";
+ my $dbf_mapping = $cfg -> val($database, 'dbf_mapping') || die "$database doesn't have 'dbf_mapping' defined!";
+
+ $import2cp = Text::Iconv->new($dbf_codepage,$codepage);
+ require XBase;
+ my $db = new XBase $dbf_file;
+
+ if (! $db) {
+ print STDERR "ERROR: can't read DBF database: $dbf_file, skipping...\n";
+ next;
+ }
+
+ my $max_rowid = $db->last_record;
+
+ print STDERR "Reading database: $dbf_file [$max_rowid rows]\n";
+
+ my %dbf2iso;
+ foreach my $m (split(/[\n\r]+/,$dbf_mapping)) {
+ my ($col,$fld) = split(/\s+/,$m,2);
+ $dbf2iso{$col} = $fld;
+ }
+
+#print STDERR "## dbf2iso: ",Dumper(\%dbf2iso),"\n## /dbf2iso\n";
+
+ # bad, bad...
+ require "to_hash.pm";
+
+ foreach my $row_id (0 .. $max_rowid) {
+ my $dbf_row = $db->get_record_as_hash($row_id);
+ if ($dbf_row) {
+
+#print STDERR "## dbf_row: ",Dumper($dbf_row),"\n## /dbf_row\n";
+ # apply mapping from config file
+ # all unspecified records will get _ in
+ # front of them - _DELETE will be __DELETE
+ my $rec;
+ map {
+ my $new_fld = $dbf2iso{$_} || '_'.$_;
+ my $data = $dbf_row->{$_};
+ push @{ $rec->{$new_fld} }, $data if ($data && $data !~ /^(?:\s+|\$a\.|)$/);
+ } keys %{$dbf_row};
+#print STDERR "## rec: ",Dumper($rec),"\n## /rec\n";
+ my $row = to_hash($row_id+1, $rec);
+
+ $row->{mfn} = $row_id+1;
+ $row->{record} = $rec;
+
+#print STDERR "## row: ",Dumper($row),"\n## /row\n";
+ progress($row->{mfn}, $max_rowid);
+
+ my $swishpath = $path."#".int($row->{mfn});
+
+ if (my $xml = data2xml($type_base,$row,$add_xml,$cfg,$database)) {
+ $xml = $cp2utf->convert($xml);
+ use bytes; # as opposed to chars
+ print "Path-Name: $swishpath\n";
+ print "Content-Length: ".(length($xml)+1)."\n";
+ print "Document-Type: XML\n\n$xml\n";
+ }
+ }
+ }
+ print STDERR "\n";
}
}
@@ -933,8 +1059,8 @@
=head1 DESCRIPTION
-This command will read ISIS data file using OpenIsis perl module, MARC
-records using MARC module and optionally Micro$oft Excel files to
+This command will read ISIS data file using Biblio::Isis perl module, MARC
+records using MARC::File module and optionally Micro$oft Excel files to
create one XML file for usage with I indexer. Dispite it's name,
this script B from isis files (isis allready
has something like that). Output of this script is tailor-made for SWISH-E.