I am trying to code a trigger with PL/SQL, where I want to show different messages when an error happens. The problem is when It should show more than one message.
That is my code
create or replace trigger Restrictionns
BEFORE INSERT ON numbers FOR EACH ROW
DECLARE
v_message varchar2(500);
v_error boolean;
v_type number;
BEGIN
v_message := '';
v_error := false;
v_type := -20000;
IF LENGTH(:new.number) > 6 THEN
v_message := v_message || '{The length number can't be bigger than 6}';
v_error := true;
v_type := -20001;
END IF;
IF :new.number > 9 or :new.number< 0 THEN
v_message := v_message || '{The number must be beetween 0 and 9}';
v_error := true;
v_type := -20002;
END IF;
IF :new.number < 0 THEN
v_message := v_message || '{The number can't be negative)}';
v_error := true;
v_type := -20003;
END IF;
IF v_error = true THEN
RAISE_APPLICATION_ERROR(v_type,v_message)
END IF;
END;
/
For example if INSERT a negative number it should show two messages, the error -20002 and -20003... but it only shows the last one. What is the way to show all error messages when they happens?
I am trying to get data from SQLite database table, but i cannot get more than 50 rows. Is there a limitation of 50 rows?
My code looks like that:
unit Unit1;
interface
uses
FireDAC.Stan.Def, FireDAC.DApt, FireDAC.Phys.SQLite, FireDAC.VCLUI.Wait, FireDAC.Comp.Client, FireDAC.Stan.Async;
type
TRaportas = record
Pradzia: TDateTime;
Pabaiga: TDateTime;
Trukme: Integer;
idPriezastis: Integer;
Priezastis: string;
idVieta: Integer;
Vieta: string;
Komentaras: string;
end;
procedure TForm1.btnRaportasClick(Sender: TObject);
var
sqlConn: TFDConnection;
query: TFDQuery;
prastovuRec: array of TRaportas;
i: Integer;
begin
dbVieta := edt2.Text;
sqlConn := TFDConnection.Create(nil);
//sqlConn.Connected := False;
sqlConn.DriverName := 'SQLITE';
sqlConn.Params.Values['DataBase'] := dbVieta;
query := TFDQuery.Create(nil);
query.Connection := sqlConn;
query.SQL.Text := 'SELECT * FROM Prastovos WHERE ID >= :_ID';
query.ParamByName('_ID').Value := StrToIntDef(edt3.Text, 656);
sqlConn.Open();
query.Open();
SetLength(prastovuRec, query.RowsAffected);
edt4.Text := IntToStr(query.RowsAffected);
for i := 0 to query.RowsAffected - 1 do
begin
with mRaportas do
begin
Pradzia := query.FieldByName('Pradzia').AsDateTime;
Pabaiga := query.FieldByName('Pabaiga').AsDateTime;
Trukme := query.FieldByName('Trukme').AsInteger;
idPriezastis := query.FieldByName('IDpriezastis').AsInteger;
Priezastis := query.FieldByName('Priezastis').AsString;
idVieta := query.FieldByName('IDvieta').AsInteger;
Vieta := query.FieldByName('Vieta').AsString;
Komentaras := query.FieldByName('Komentaras').AsString;
end;
prastovuRec[i] := mRaportas;
query.Next;
end;
query.Close;
query.DisposeOf;
sqlConn.Close;
sqlConn.Free;
end;
There is a lot of mistakes and misunderstandings inyour code. To simplify, I'm just going to fix your code to make it work. Study the differences yourself.
unit Unit1;
interface
uses
FireDAC.Stan.Def, FireDAC.DApt, FireDAC.Phys.SQLite, FireDAC.VCLUI.Wait, FireDAC.Comp.Client,
FireDAC.Stan.Async, FireDAC.Stan.Option;
type
TRaportas = record
Pradzia: TDateTime;
Pabaiga: TDateTime;
Trukme: Integer;
idPriezastis: Integer;
Priezastis: string;
idVieta: Integer;
Vieta: string;
Komentaras: string;
end;
var
prastovuRec: array of TRaportas;
procedure TForm1.Button7Click(Sender: TObject);
var
sqlConn: TFDConnection;
query: TFDQuery;
mRaportas: TRaportas;
i: Integer;
begin
sqlConn := TFDConnection.Create(nil);
query := TFDQuery.Create(nil);
try
sqlConn.DriverName := 'SQLITE';
sqlConn.Params.Values['DataBase'] := edt2.Text;
query.Connection := sqlConn;
query.FetchOptions.Mode := fmAll; // essential if you want to use RecordCount
query.SQL.Text := 'SELECT * FROM Prastovos WHERE ID >= :_ID';
query.ParamByName('_ID').Value := StrToIntDef(edt3.Text, 656);
query.Open();
edt4.Text := IntToStr(query.RecordCount);
SetLength(prastovuRec, query.RecordCount);
i := 0;
while not query.Eof do
begin
mRaportas := Default(TRaportas); // not necessary if you assign all record fields
mRaportas.Pradzia := query.FieldByName('Pradzia').AsDateTime;
mRaportas.Pabaiga := query.FieldByName('Pabaiga').AsDateTime;
mRaportas.Trukme := query.FieldByName('Trukme').AsInteger;
mRaportas.idPriezastis := query.FieldByName('IDpriezastis').AsInteger;
mRaportas.Priezastis := query.FieldByName('Priezastis').AsString;
mRaportas.idVieta := query.FieldByName('IDvieta').AsInteger;
mRaportas.Vieta := query.FieldByName('Vieta').AsString;
mRaportas.Komentaras := query.FieldByName('Komentaras').AsString;
prastovuRec[i] := mRaportas;
Inc(i);
query.Next;
end;
query.Close;
finally
query.Free;
sqlConn.Free;
end;
end;
No, there is no special limit.
But you have a WHERE clause in your query. It is likely that which limits the result set. Check the value you use.
I am attempting to pick apart a DBF file using the code in THIS AskTom post however I have no clue where I would even go to figure out how the files I'm wanting to pick apart are formatted? How was the original AskTom answer even produced? How do I figure out the dbf Header. How do I know where within the dbf file the data is stored or even how to pull out that data?
My goal is to work with the code provided and come up with a solution as others have done but I'm stuck at the very first part.
While researching this I found that there are many systems that use DBF files. ACCESS and MS Excel have the ability to import DBF files but none of those features worked when I attempted to import the DBF files from my Oracle Express installation. When in ACCESS there are options to choose from several different dbf file formats including dBASE III, dBASE IV, dBASE 5, and dBASE 7. I have no idea if Oracle is even any of these formats and wish I knew how to find out this information. Fox Pro appears to be the main user of dbf files regarding the formats I listed.
The code in that AskTom link is very long and isn't necessarily relevant to this question but just so you know where I'm coming from I've included it anyway. I can run the code but it just doesn't work. I'd love to fix it but I would need knowledge of Oracles DBF files or perhaps there is something else I am missing?
How to Read a DBase File in Java or Oracle PL/SQL
What is the best opensource dbf driver for java?
create or replace package dbase_fox as
-- procedure to a load a table with records
-- from a DBASE file.
--
-- Uses a BFILE to read binary data and dbms_sql
-- to dynamically insert into any table you
-- have insert on.
--
-- p_filename is the name of a file in that directory
-- will be the name of the DBASE file
-- p_colnames is an optional list of comma separated
-- column names. If not supplied, this pkg
-- assumes the column names in the DBASE file
-- are the same as the column names in the
-- table
-- p_rownum boolean that activates an autonum
-- functionality such that a sequential
-- numbered virtual column is created
/* EXAMPLE USAGE
| BEGIN
|
| dbase_fox.showtable(p_filename => 'CONTROL'
| , p_colnames => null
| , p_rownum => false);
|
| END;
*/
procedure loadtable(p_filename in varchar2
, p_colnames in varchar2 default null
, p_rownum in boolean default false);
-- procedure to print (and not insert) what we find in
-- the DBASE files (not the data, just the info
-- from the dbase headers....)
--
-- p_filename is the name of a file in that directory
-- will be the name of the DBASE file
-- p_colnames is an optional list of comma separated
-- column names. If not supplied, this pkg
-- assumes the column names in the DBASE file
-- are the same as the column names in the
-- table
-- p_rownum boolean that activates an autonum
-- functionality such that a sequential
-- numbered virtual column is created
procedure showtable(p_filename in varchar2
, p_colnames in varchar2 default null
, p_rownum in boolean default false);
end;
/
/* Package BODY */
create or replace package body dbase_fox as
PREFIX constant varchar2(32) default 'stage_';
CR constant varchar(2) default chr(13)||chr(10);
MEMODTYPE constant varchar2(32) default 'varchar2(4000)';
ROWNUMNAME constant varchar2(32) default '"ROWNUM"';
FRAMESIZE constant integer default 1000;
addrownum boolean := false;
colnames varchar2(255) := '';
filename varchar2(32) := '';
dbfbfile bfile := null;
fptbfile bfile := null;
DBF_HEADER_SIZE constant number default 32;
type dbf_header_type is record (
version varchar2(25) -- dBASE version number
,year int -- 1 byte int year, add to 1900
,month int -- 1 byte month
,day int -- 1 byte day
,no_records int -- number of records in file, 4 byte int
,hdr_len int -- length of header, 2 byte int
,rec_len int -- number of bytes in record, 2 byte int
,no_fields int -- number of fields
);
dbf_header dbf_header_type := null;
subtype dbf_header_data is raw(32);
DBF_FIELD_DESCRIPTOR_SIZE constant number default 32;
type dbf_field_descriptor_type is record (
name varchar2(11)
,type char(1)
,length int -- 1 byte length
,decimals int -- 1 byte scale
);
type dbf_field_descriptor_array is table of dbf_field_descriptor_type index by binary_integer;
subtype dbf_field_descriptor_data is raw(32);
dbf_field_descriptor dbf_field_descriptor_array;
type rowarray_type is table of dbms_sql.varchar2_table index by binary_integer;
rowarray rowarray_type;
subtype raw_type is raw(4000);
type rawarray_type is table of raw_type index by binary_integer;
rawarray rawarray_type;
loadcursor binary_integer;
mblocksize number := 0;
procedure get_header is
l_data dbf_header_data;
begin
l_data := dbms_lob.substr(dbfbfile, DBF_HEADER_SIZE, 1);
dbf_header.version := utl_raw.cast_to_binary_integer(utl_raw.substr(l_data, 1, 1));
dbf_header.year := 1900 + utl_raw.cast_to_binary_integer(utl_raw.substr( l_data, 2, 1));
dbf_header.month := utl_raw.cast_to_binary_integer(utl_raw.substr(l_data, 3, 1));
dbf_header.day := utl_raw.cast_to_binary_integer(utl_raw.substr(l_data, 4, 1));
dbf_header.no_records := utl_raw.cast_to_binary_integer(utl_raw.substr(l_data, 5, 4),2);
dbf_header.hdr_len := utl_raw.cast_to_binary_integer(utl_raw.substr(l_data, 9, 2),2);
dbf_header.rec_len := utl_raw.cast_to_binary_integer(utl_raw.substr(l_data, 11, 2),2);
dbf_header.no_fields := trunc((dbf_header.hdr_len - DBF_HEADER_SIZE) / DBF_FIELD_DESCRIPTOR_SIZE);
end;
procedure get_header_fields is
l_data dbf_field_descriptor_data;
begin
for i in 1 .. dbf_header.no_fields loop
l_data := dbms_lob.substr(dbfbfile, DBF_FIELD_DESCRIPTOR_SIZE, 1+DBF_HEADER_SIZE+(i-1)*DBF_FIELD_DESCRIPTOR_SIZE); -- starting past the header
dbf_field_descriptor(i).name := rtrim(utl_raw.cast_to_varchar2(utl_raw.substr(l_data,1,11)),chr(0));
dbf_field_descriptor(i).type := utl_raw.cast_to_varchar2(utl_raw.substr(l_data, 12, 1));
dbf_field_descriptor(i).length := utl_raw.cast_to_binary_integer(utl_raw.substr(l_data, 17, 1));
dbf_field_descriptor(i).decimals := utl_raw.cast_to_binary_integer(utl_raw.substr(l_data,18,1));
end loop;
end;
procedure show_field_header_columns is
begin
dbms_output.put_line(CR||'Num'
||chr(9)||'Name '
||chr(9)||'Type'
||chr(9)||'Length'
||chr(9)||'Decimals');
dbms_output.put_line('==='
||chr(9)||'==== '
||chr(9)||'===='
||chr(9)||'======'
||chr(9)||'========');
end;
procedure show_header(p_file_length in integer) is
begin
dbms_output.put_line(chr(9)||dbf_header.version
||chr(9)||dbf_header.year
||chr(9)||dbf_header.month
||chr(9)||dbf_header.day
||chr(9)||dbf_header.no_records
||chr(9)||dbf_header.hdr_len
||chr(9)||dbf_header.rec_len
||chr(9)||dbf_header.no_fields
||chr(9)||p_file_length);
end;
procedure show_fields is
begin
for i in dbf_field_descriptor.first .. dbf_field_descriptor.last loop
dbms_output.put_line(i
||chr(9)||rpad(dbf_field_descriptor(i).name,11,' ')
||chr(9)||dbf_field_descriptor(i).type
||chr(9)||dbf_field_descriptor(i).length
||chr(9)||dbf_field_descriptor(i).decimals);
end loop;
end;
function build_insert return varchar2 is
l_statement long;
begin
l_statement := 'insert into ' || PREFIX || filename || ' (';
if colnames is not null then
l_statement := l_statement || colnames;
else
for i in dbf_field_descriptor.first .. dbf_field_descriptor.last loop
if i <> 1 then
l_statement := l_statement || ',';
end if;
l_statement := l_statement || '"'|| dbf_field_descriptor(i).name || '"';
end loop;
if addrownum then
l_statement := l_statement || ',' || ROWNUMNAME;
end if;
end if;
l_statement := l_statement || ') values (';
for i in dbf_field_descriptor.first .. dbf_field_descriptor.last loop
if i <> 1 then
l_statement := l_statement || ',';
end if;
if dbf_field_descriptor(i).type = 'D' then
l_statement := l_statement || 'to_date(:bv' || i || ',''yyyymmdd'' )';
else
l_statement := l_statement || ':bv' || i;
end if;
end loop;
if addrownum then
l_statement := l_statement || ',:bv' || (dbf_field_descriptor.last + 1);
end if;
l_statement := l_statement || ')';
return l_statement;
end;
function build_create return varchar2 is
l_statement long;
begin
l_statement := 'create table ' || PREFIX || filename || ' (';
for i in dbf_field_descriptor.first .. dbf_field_descriptor.last loop
l_statement := l_statement || CR;
if i <> dbf_field_descriptor.first then
l_statement := l_statement ||',';
else
l_statement := l_statement ||' ';
end if;
l_statement := l_statement || '"'|| dbf_field_descriptor(i).name || '"'||chr(9);
if dbf_field_descriptor(i).type = 'D' then
l_statement := l_statement || 'date';
elsif dbf_field_descriptor(i).type = 'F' then
l_statement := l_statement || 'float';
elsif dbf_field_descriptor(i).type = 'N' then
if dbf_field_descriptor(i).decimals > 0 then
l_statement := l_statement || 'number('||dbf_field_descriptor(i).length||','|| dbf_field_descriptor(i).decimals || ')';
else
l_statement := l_statement || 'number('||dbf_field_descriptor(i).length||')';
end if;
elsif dbf_field_descriptor(i).type = 'M' then
l_statement := l_statement || MEMODTYPE;
else
l_statement := l_statement || 'varchar2(' || dbf_field_descriptor(i).length || ')';
end if;
end loop;
if addrownum then
l_statement := l_statement || CR || ',' || ROWNUMNAME || chr(9) || 'number';
end if;
l_statement := l_statement ||CR||');'||CR||'/';
return l_statement;
end;
procedure show_header_columns is
begin
dbms_output.put_line(CR||'DBASE File'
||chr(9)||'Version'
||chr(9)||'Year'
||chr(9)||'Month'
||chr(9)||'Day'
||chr(9)||'#Recs'
||chr(9)||'HdrLen'
||chr(9)||'RecLen'
||chr(9)||'#Fields'
||chr(9)||'Size');
dbms_output.put_line('=========='
||chr(9)||'======='
||chr(9)||'===='
||chr(9)||'====='
||chr(9)||'==='
||chr(9)||'====='
||chr(9)||'======'
||chr(9)||'======'
||chr(9)||'======='
||chr(9)||'====');
end;
procedure loadtablerecord(i in number) is
l_n number;
l_fblock number;
l_data raw_type;
begin
l_data := dbms_lob.substr(dbfbfile,dbf_header.rec_len,2+DBF_HEADER_SIZE+dbf_header.no_fields*DBF_FIELD_DESCRIPTOR_SIZE+(i-1)*dbf_header.rec_len); -- starting past the header and field descriptors
rawarray(0) := utl_raw.substr(l_data, 1, 1);
l_n := 2;
for j in 1 .. dbf_header.no_fields loop
rawarray(j) := utl_raw.substr(l_data,l_n,dbf_field_descriptor(j).length);
if dbf_field_descriptor(j).type = 'F' and rawarray(j) = '.' then
rawarray(j) := null;
elsif dbf_field_descriptor(j).type = 'M' then
if dbms_lob.isopen(fptbfile) != 0 then
l_fblock := nvl(utl_raw.cast_to_binary_integer(dbms_lob.substr(fptbfile, 4, to_number(trim(utl_raw.cast_to_varchar2(rawarray(j))))*mblocksize+5)),0);
rawarray(j) := dbms_lob.substr(fptbfile, l_fblock, to_number(trim(utl_raw.cast_to_varchar2(rawarray(j))))*mblocksize+9);
else
dbms_output.put_line(filename || '.fpt not found');
end if;
end if;
l_n := l_n + dbf_field_descriptor(j).length;
end loop;
end;
procedure loadtablearray(p_cntarr in int) is
l_bulkcnt number;
begin
for j in 1 .. dbf_header.no_fields loop
dbms_sql.bind_array(loadcursor, ':bv'||j, rowarray(j),1,p_cntarr);
end loop;
if addrownum then
dbms_sql.bind_array(loadcursor, ':bv'||(dbf_header.no_fields+1), rowarray(dbf_header.no_fields+1),1,p_cntarr);
end if;
begin
l_bulkcnt := dbms_sql.execute(loadcursor);
--dbms_output.put_line('Bulk insert count ' || l_bulkcnt);
exception
when others then
dbms_output.put_line('Bulk insert failed ' || sqlerrm);
dbms_output.put_line(build_insert);
end;
end;
procedure loadtablebulk is
l_cntrow int default 0;
l_cntarr int default 0;
begin
loadcursor := dbms_sql.open_cursor;
dbms_sql.parse(loadcursor, build_insert, dbms_sql.native);
for i in 1 .. dbf_header.no_records loop
loadtablerecord(i);
if utl_raw.cast_to_varchar2(rawarray(0)) <> '*' then
l_cntarr := l_cntarr + 1;
for j in 1 .. dbf_header.no_fields loop
rowarray(j)(l_cntarr) := trim(utl_raw.cast_to_varchar2(rawarray(j)));
end loop;
if addrownum then
l_cntrow := l_cntrow + 1;
rowarray((dbf_header.no_fields+1))(l_cntarr) := l_cntrow;
end if;
if l_cntarr >= FRAMESIZE then
loadtablearray(l_cntarr);
l_cntarr := 0;
end if;
end if;
end loop;
if l_cntarr > 0 then
loadtablearray(l_cntarr);
end if;
dbms_sql.close_cursor(loadcursor);
exception
when others then
if dbms_sql.is_open(loadcursor) then
dbms_sql.close_cursor(loadcursor);
end if;
dbms_output.put_line('loadtable failed for ' || filename);
dbms_output.put_line('insert ' || build_insert);
end;
procedure open_dbf is
begin
dbfbfile := bfilename('FILE_GET_DIR', filename || '.dbf');
dbms_lob.fileopen(dbfbfile);
end;
procedure open_fpt is
begin
fptbfile := bfilename('FILE_GET_DIR', filename || '.fpt');
if dbms_lob.fileexists(fptbfile) != 0 then
dbms_lob.fileopen(fptbfile);
end if;
end;
procedure close_dbf is
begin
if dbms_lob.isopen(dbfbfile) > 0 then
dbms_lob.fileclose(dbfbfile);
end if;
end;
procedure close_fpt is
begin
if dbms_lob.isopen(fptbfile) > 0 then
dbms_lob.fileclose(fptbfile);
end if;
end;
procedure initialize is
l_empty_dbf_field_descriptor dbf_field_descriptor_array;
l_empty_rowarray rowarray_type;
l_empty_rawarray rawarray_type;
begin
dbfbfile := null;
fptbfile := null;
dbf_field_descriptor := l_empty_dbf_field_descriptor;
dbf_header := null;
rowarray := l_empty_rowarray;
rawarray := l_empty_rawarray;
loadcursor := 0;
mblocksize := 0;
end;
procedure showtable(p_filename in varchar2, p_colnames in varchar2 default null, p_rownum in boolean default false) is
errorAtLine NUMBER := 0;
begin
filename := p_filename;
addrownum := p_rownum;
colnames := p_colnames;
initialize;
errorAtLine := 1;
open_dbf;
errorAtLine := 2;
get_header;
errorAtLine := 3;
get_header_fields;
errorAtLine := 4;
show_header_columns;
errorAtLine := 5;
dbms_output.put(filename || '.dbf');
errorAtLine := 6;
show_header(dbms_lob.getlength(dbfbfile));
errorAtLine := 7;
show_field_header_columns;
errorAtLine := 8;
show_fields;
errorAtLine := 9;
dbms_output.put_line(CR||'Insert statement:');
dbms_output.put_line(build_insert);
dbms_output.put_line(CR||'Create statement:');
dbms_output.put_line(build_create);
close_dbf;
exception
when others then
close_dbf;
dbms_output.put_line('Error At: ' || errorAtLine);
raise;
end;
procedure loadtable(p_filename in varchar2, p_colnames in varchar2 default null, p_rownum in boolean default false) is
begin
filename := p_filename;
addrownum := p_rownum;
colnames := p_colnames;
initialize;
open_dbf;
open_fpt;
if dbms_lob.isopen(fptbfile) > 0 then
mblocksize := utl_raw.cast_to_binary_integer(dbms_lob.substr(fptbfile, 2, 7));
else
mblocksize := 0;
end if;
get_header;
get_header_fields;
loadtablebulk;
close_dbf;
close_fpt;
exception
when others then
close_dbf;
close_fpt;
raise;
end;
end;
I had a similar problem and this is how I did it.
TL;DR: You'll need to use Apache Tika to parse DBase files. It converts the content into a XHTML table and returns it as a java.lang.String, which you can parse via a DOM or a SAX parser to get the data in the format you need. Here are some examples: https://tika.apache.org/1.20/examples.html
To start, add the following Maven dependency to your POM:
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-parsers</artifactId>
<version>1.21</version>
</dependency>
Then initialize the parser:
Parser parser = new DBFParser(); //Alternatively, you can use AutoDetectParser
ContentHandler handler = new BodyContentHandler(new ToXMLContentHandler()); //This is tells the parser to produce an XHTML as an output.
parser.parse(dbaseInputStream, handler, new Metadata(), new ParseContext()); // Here, dbaseInputStream is a FileInputStream object for the DBase file.
String dbaseAsXhtml = handler.toString(); //This will have the content in XHTML format
Now, to convert the data into a more convenient format (in this case CSV), I did the following:
First, convert the whole String into a DOM object:
DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Document xhtmlDoc= builder.parse(new InputSource(new StringReader(xmlString.trim().replaceAll("\t", "")))); //I'm trimming out the tabs and whitespaces here, so that I don't have to dealt with them later
Now, to get the headers:
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList tableHeader = (NodeList)xPath.evaluate("//table/thead/th", xhtmlDoc, XPathConstants.NODESET);
String [] headers = new String[tableHeader.getLength()];
for(int i = 0; i < tableHeader.getLength(); i++) {
headers[i] = tableHeader.item(i).getTextContent();
}
Then the records:
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList tableRecords = (NodeList)xPath.evaluate("//table/tbody/tr", xhtmlDoc, XPathConstants.NODESET);
List<String[]> records = new ArrayList<String[]>(tableRecords.getLength());
for(int i = 0; i < tableRecords.getLength(); i++) {
NodeList recordNodes = tableRecords.item(i).getChildNodes();
String[] record = new String[recordNodes.getLength()];
for(int j = 0; j < recordNodes.getLength(); j++)
record[j] = recordNodes.item(j).getTextContent();
records.add(record);
}
Finally, we put them together to form a CSV:
StringBuilder dbaseCsvStringBuilder = new StringBuilder(String.join(",", headers) + "\n");
for(String[] record : records)
dbaseCsvStringBuilder.append(String.join(",", record) + "\n");
String csvString = dbaseCsvStringBuilder.toString();
Here's the complete source code: https://github.com/Debojit/DbaseTranslater/blob/master/src/main/java/nom/side/poc/file/dbf/DbaseReader.java
I have two column in a table whose datatype are clob and blob . I insert a clob data in clob column and insert this data to blob data by decoding this clob in oracle by the following code :
function decode_base64(p_clob_in in clob) return blob is
v_blob blob;
v_result blob;
v_offset integer;
v_buffer_size binary_integer := 48;
v_buffer_varchar varchar2(48);
v_buffer_raw raw(48);
begin
if p_clob_in is null then
return null;
end if;
dbms_lob.createtemporary(v_blob, true);
v_offset := 1;
for i in 1 .. ceil(dbms_lob.getlength(p_clob_in) / v_buffer_size) loop
dbms_lob.read(p_clob_in, v_buffer_size, v_offset, v_buffer_varchar);
v_buffer_raw := utl_raw.cast_to_raw(v_buffer_varchar);
v_buffer_raw := utl_encode.base64_decode(v_buffer_raw);
dbms_lob.writeappend(v_blob, utl_raw.length(v_buffer_raw), v_buffer_raw);
v_offset := v_offset + v_buffer_size;
end loop;
v_result := v_blob;
dbms_lob.freetemporary(v_blob);
return v_result;
end decode_base64;
Then I get this blob data in asp.net by the following code :
strSQL ="SELECT BIO_DATA FingerData , DATA_LENGTH len_of_data , SERIAL_NO sl_no FROM FP_BIOMETRIC_DATA WHERE CUST_NO =" & trim(Request("name")) & " "
Set objExec = Conn.Execute(strSQL)
fingerData1 = objExec("FingerData")
Then I am encoding this data into base64 by the following code :
Function Base64Encode(sText)
Dim oXML, oNode
Set oXML = CreateObject("Msxml2.DOMDocument.3.0")
Set oNode = oXML.CreateElement("base64")
oNode.dataType = "bin.base64"
oNode.nodeTypedValue =sText
Base64Encode = oNode.text
Set oNode = Nothing
Set oXML = Nothing
End Function
Then I am trying to compare this data and clob data in oracle database by this website . This website tells that the two data are different . Why ? Where is the error ? How can I get blob data by decoding a clob data in oracle ?
I think the problem is at this line
dbms_lob.read(p_clob_in, v_buffer_size, v_offset, v_buffer_varchar);
v_buffer_size is fixed at 48 characters, however your BASE64 string may contain NEW_LINE characters which are ignored for decoding but they are counted for v_buffer_size.
You have to remove all NEW_LINE characters before you read your buffer or increase value of v_buffer_size by number of NEW_LINE characters in substring.
Try this one:
CREATE OR REPLACE FUNCTION DecodeBASE64(InBase64Char IN OUT NOCOPY CLOB) RETURN BLOB IS
res BLOB;
clob_trim CLOB;
dest_offset INTEGER := 1;
src_offset INTEGER := 1;
read_offset INTEGER := 1;
ClobLen INTEGER;
amount INTEGER := 1440; -- must be a whole multiple of 4
buffer RAW(1440);
stringBuffer VARCHAR2(1440);
BEGIN
IF DBMS_LOB.GETLENGTH(InBase64Char) IS NULL THEN
RETURN NULL;
END IF;
-- Remove all NEW_LINE from base64 string
ClobLen := DBMS_LOB.GETLENGTH(InBase64Char);
DBMS_LOB.CREATETEMPORARY(clob_trim, TRUE);
LOOP
EXIT WHEN read_offset > ClobLen;
stringBuffer := REPLACE(REPLACE(DBMS_LOB.SUBSTR(InBase64Char, amount, read_offset), CHR(13), NULL), CHR(10), NULL);
DBMS_LOB.WRITEAPPEND(clob_trim, LENGTH(stringBuffer), stringBuffer);
read_offset := read_offset + amount;
END LOOP;
read_offset := 1;
ClobLen := DBMS_LOB.GETLENGTH(clob_trim);
DBMS_LOB.CREATETEMPORARY(res, TRUE);
LOOP
EXIT WHEN read_offset > ClobLen;
buffer := UTL_ENCODE.BASE64_DECODE(UTL_RAW.CAST_TO_RAW(DBMS_LOB.SUBSTR(clob_trim, amount, read_offset)));
DBMS_LOB.WRITEAPPEND(res, DBMS_LOB.GETLENGTH(buffer), buffer);
read_offset := read_offset + amount;
END LOOP;
RETURN res;
END DecodeBASE64;
my problem is that the table that my user will be loading, i'm not sure what they columns will be so how do define these unknown elements (columns), and also what they will be defined as (e.g. asstring, asinteger,asreal) etc
(I use SQLlite)
So here's my code
procedure TFrmsearchpage.btnloadClick(Sender: TObject);
var
con:tfdconnection;
loadquery:tfdquery;
i:integer;
j:integer;
row:integer;
col1,col2,col3,col4, col5, col6, col7 : string;
begin
con:=tfdconnection.Create(nil);
loadquery:=tfdquery.Create(con);
loadquery.Connection:=con;
con.DriverName:='SQL';
con.Open('DriverID=SQLite;Database='+Dir+'/Stock_V5;');
loadquery.SQL.Text:='SELECT * FROM ' + edtdatabasename.Text; //' Con Column';
loadquery.Open;
if loadquery.Eof then
ShowMessage('not exists')
else
ShowMessage('exists');
for i := 0 to sgdproduct.RowCount do
for j := 0 to sgdproduct.ColCount do
sgdproduct.Cells[i,j]:='';
showmessage(loadquery.SQL.Text);
Sgdproduct.colcount:=7;
sgdproduct.fixedcols:=0;
for i := 0 to 3 do
sgdproduct.colwidths[i]:=100;
sgdproduct.cells[0,0] := 'Col1'; //?
sgdproduct.cells[1,0] := 'Col2'; //?
sgdproduct.cells[2,0] := 'Col3';
sgdproduct.cells[3,0] := 'Col4'; //?
sgdproduct.cells[4,0] := 'Col5'; //?
sgdproduct.cells[5,0] := 'Col6'; //?
sgdproduct.cells[6,0] := 'Col7'; //?
row:=1;
while not loadquery.Eof do
begin
Col1:=query.FieldByName('Col1')//.As
Col2:=query.FieldByName('Col2')//.As
Col3:=query.FieldByName('Col3')//.As
Col4:=query.FieldByName('Col4')//.As
Col5:=query.FieldByName('Col5')//.As
Col6:=query.FieldByName('Col6')//.As
col7:=query.FieldByName('Col7')//.As
sgdproduct.Cells[0,row]:=Col1;
sgdproduct.Cells[1,row]:=Col2;
sgdproduct.Cells[2,row]:=Col3;
sgdproduct.Cells[3,row]:=Col4;
sgdproduct.Cells[4,row]:=Col5;
sgdproduct.Cells[5,row]:=Col6;
sgdproduct.Cells[6,row]:=Col7;
row:=row+1;
query.Next;
end;
end;
to give label in stringgrid based on Tdataset generated by your SQL, take a look at below code :
for i := 0 to DataSet.FieldCount - 1 do
begin
sgdproduct.cells[i,0] := DataSet.Fields[i].DisplayName;
end;