--- trunk/all2xml.pl 2005/01/23 02:02:10 641 +++ trunk/all2xml.pl 2006/04/13 19:35:19 726 @@ -11,6 +11,7 @@ #use GDBM_File; use Fcntl; # for O_RDWR use TDB_File; +use Carp; $|=1; @@ -23,7 +24,7 @@ #use index_DBI; # default DBI module for index #use index_DBI_cache; # faster DBI module using memory cache -use index_DBI_tag; # tag support for indexes +use index_DBI_filter; # filter support for indexes my $index; my %opts; @@ -62,7 +63,7 @@ 'isis' => 'isis', 'excel' => 'column', 'marc' => 'marc', - 'feed' => 'feed' + 'feed' => 'feed', ); my $cache; # for cacheing @@ -112,6 +113,10 @@ $cache->{tags_by_order} = \@sorted_tags; } + if (! @sorted_tags) { + print STDERR "WARNING: no tags for this type found in import_xml file!\n"; + } + # lookup key my $lookup_key; @@ -220,7 +225,7 @@ my ($swish,$display); - my $tag = $type2tag{$type} || die "can't find which tag to use for type $type"; + my $tag = $cfg->val($database, 'import_xml_tag') || $type2tag{$type} || die "can't find which tag to use for type $type"; # is this field page-by-page? my $iterate_by_page = $config->{indexer}->{$field}->{iterate_by_page}; @@ -633,6 +638,10 @@ foreach my $database ($cfg->Sections) { + # save database name in global variable path for later + # (need for index filter creation) + $path = $database; + my $type = lc($cfg -> val($database, 'type')) || die "$database doesn't have 'type' defined"; my $add_xml = $cfg -> val($database, 'xml'); # optional @@ -658,13 +667,22 @@ print STDERR "opening lookup file '$lookup_file'\n"; } -print STDERR "reading ./import_xml/$type.xml\n"; + my $import_xml_type = $cfg->val($database, 'import_xml_file') || $type; + my $import_xml_file = "./import_xml/$import_xml_type.xml"; + + if (! -r $import_xml_file) { + print STDERR "ERROR: file $import_xml_file not readable skipping!\n"; + next; + } + + print STDERR "reading $import_xml_file\n"; # extract just type basic my $type_base = $type; $type_base =~ s/_.+$//g; - $config=XMLin("./import_xml/$type.xml", ForceArray => [ $type2tag{$type_base}, 'config', 'format' ], ForceContent => 1 ); + my $tag = $cfg->val($database, 'import_xml_tag') || $type2tag{$type_base} || die "can't find which tag to use for type $type"; + $config=XMLin($import_xml_file, ForceArray => [ $tag, 'config', 'format' ], ForceContent => 1 ); # helper for progress bar sub fmt_time { @@ -726,7 +744,6 @@ } # now read database -print STDERR "using: $type...\n"; # erase cache for tags by order in this database delete $cache->{tags_by_order}; @@ -738,11 +755,14 @@ $import2cp = Text::Iconv->new($config->{isis_codepage},$codepage); my $db = new Biblio::Isis( isisdb => $isis_db ); - my $max_rowid = $db->count || die "can't find maxmfn"; + if (! $db) { + print STDERR "FATAL: can't read ISIS database: $isis_db, skipping...\n"; + next; + } - print STDERR "Reading database: $isis_db [$max_rowid rows]\n"; + my $max_rowid = $db->count if ($db); - $path = $database; + print STDERR "Reading database: $isis_db [$max_rowid rows]\n"; for (my $row_id = 1; $row_id <= $max_rowid; $row_id++ ) { my $row = $db->to_hash( $row_id ); @@ -775,7 +795,8 @@ my $excel_file = $cfg -> val($database, 'excel_file') || die "$database doesn't have 'excel_file' defined!"; my $sheet = x($config->{sheet}) || die "no sheet in $type.xml"; - my $start_row = x($config->{start_row}) - 1 || die "no start_row in $type.xml"; + my $start_row = x($config->{start_row}) || die "no start_row in $type.xml"; + $start_row--; my $oBook = Spreadsheet::ParseExcel::Workbook->Parse($excel_file) || die "can't open Excel file '$excel_file'"; @@ -825,6 +846,9 @@ print "Document-Type: XML\n\n$xml\n"; } } + + print STDERR "\n"; + } elsif ($type_base eq "marc") { require MARC::File::USMARC; @@ -836,13 +860,17 @@ warn "marc_format is no longer used!" if ($config->{marc_format}); print STDERR "Reading MARC file '$marc_file'\n"; - my $marc = MARC::File::USMARC->in( $marc_file ) - || die "Can't open MARC file '$marc_file': ".$MARC::File::ERROR; + my $marc = MARC::File::USMARC->in( $marc_file ); + + if (! $marc) { + print STDERR "FATAL: can't read MARC file: $marc_file, skipping...\n"; + next; + } # count records in MARC file sub marc_count { my $filename = shift || die; - my $file = MARC::File::USMARC->in($filename) || die $MARC::File::ERROR; + my $file = MARC::File::USMARC->in($filename) || return; my $count = 0; while ($file->skip()) { $count++; @@ -852,11 +880,11 @@ my $count = marc_count($marc_file) || warn "no records in '$marc_file'?"; - my $i = 0; + my $i = 1; while( my $rec = $marc->next() ) { - progress($i++,$count); + progress($i,$count); my $swishpath = $database."#".$i; @@ -867,6 +895,8 @@ print "Content-Length: ".(length($xml)+1)."\n"; print "Document-Type: XML\n\n$xml\n"; } + + $i++; } print STDERR "\n"; @@ -911,6 +941,72 @@ } # close lookup untie %lhash if (%lhash); + + } elsif ($type_base eq "dbf") { + + my $dbf_file = $cfg -> val($database, 'dbf_file') || die "$database doesn't have 'dbf_file' defined!"; + my $dbf_codepage = $cfg -> val($database, 'dbf_codepage') || die "$database doesn't have 'dbf_codepage' defined!"; + my $dbf_mapping = $cfg -> val($database, 'dbf_mapping') || die "$database doesn't have 'dbf_mapping' defined!"; + + $import2cp = Text::Iconv->new($dbf_codepage,$codepage); + require XBase; + my $db = new XBase $dbf_file; + + if (! $db) { + print STDERR "ERROR: can't read DBF database: $dbf_file, skipping...\n"; + next; + } + + my $max_rowid = $db->last_record; + + print STDERR "Reading database: $dbf_file [$max_rowid rows]\n"; + + my %dbf2iso; + foreach my $m (split(/[\n\r]+/,$dbf_mapping)) { + my ($col,$fld) = split(/\s+/,$m,2); + $dbf2iso{$col} = $fld; + } + +#print STDERR "## dbf2iso: ",Dumper(\%dbf2iso),"\n## /dbf2iso\n"; + + # bad, bad... + require "to_hash.pm"; + + foreach my $row_id (0 .. $max_rowid) { + my $dbf_row = $db->get_record_as_hash($row_id); + if ($dbf_row) { + +#print STDERR "## dbf_row: ",Dumper($dbf_row),"\n## /dbf_row\n"; + # apply mapping from config file + # all unspecified records will get _ in + # front of them - _DELETE will be __DELETE + my $rec; + map { + my $new_fld = $dbf2iso{$_} || '_'.$_; + my $data = $dbf_row->{$_}; + push @{ $rec->{$new_fld} }, $data if ($data && $data !~ /^(?:\s+|\$a\.|)$/); + } keys %{$dbf_row}; +#print STDERR "## rec: ",Dumper($rec),"\n## /rec\n"; + my $row = to_hash($row_id+1, $rec); + + $row->{mfn} = $row_id+1; + $row->{record} = $rec; + +#print STDERR "## row: ",Dumper($row),"\n## /row\n"; + progress($row->{mfn}, $max_rowid); + + my $swishpath = $path."#".int($row->{mfn}); + + if (my $xml = data2xml($type_base,$row,$add_xml,$cfg,$database)) { + $xml = $cp2utf->convert($xml); + use bytes; # as opposed to chars + print "Path-Name: $swishpath\n"; + print "Content-Length: ".(length($xml)+1)."\n"; + print "Document-Type: XML\n\n$xml\n"; + } + } + } + print STDERR "\n"; } } @@ -931,8 +1027,8 @@ =head1 DESCRIPTION -This command will read ISIS data file using IsisDB perl module, MARC -records using MARC module and optionally Micro$oft Excel files to +This command will read ISIS data file using Biblio::Isis perl module, MARC +records using MARC::File module and optionally Micro$oft Excel files to create one XML file for usage with I indexer. Dispite it's name, this script B from isis files (isis allready has something like that). Output of this script is tailor-made for SWISH-E.