purge des blob oracle de plus de 2 Ko
Bonjour,
j' essaye de recupérer des blob dans ma base Oracle.
Pour cela, j'utilise la méthode Hanson Char trouvée sur l'url
http://hansonchar.blogspot.com/2005/...o-byte-in.html
l'idée est d'avoir dans le POJO un attribut privé et ses accesseurs
Code:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
|
private byte[] text;
public java.lang.String getBody() throws UnsupportedEncodingException {
String res=null;
if (text!=null) {
res = new String(text, fileEncoding);
}
return res;
}
public void setBodyByte(byte[] text) {
this.text=text;
} |
puis des accesseurs pour Hibernate
Code:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64
|
/** Don't invoke this. Used by Hibernate only. */
public void setTextBlob(Blob TextBlob) {
if(TextBlob == null) {
logger.warn("cyrille => ca va planter");
this.text=null;
return;
}
this.text = this.toByteArray(TextBlob);
}
/** Don't invoke this. Used by Hibernate only.
* @throws IOException */
public Blob getTextBlob() throws IOException {
//InputStream is = new InputStream(text);
ByteArrayInputStream bais = new ByteArrayInputStream(text);
return Hibernate.createBlob(bais);
//return Hibernate.createBlob(this.text);
}
private byte[] toByteArray(Blob fromBlob) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
return toByteArrayImpl(fromBlob, baos);
} catch (SQLException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
if (baos != null) {
try {
baos.close();
} catch (IOException ex) {
}
}
}
}
private byte[] toByteArrayImpl(Blob fromBlob, ByteArrayOutputStream baos)
throws SQLException, IOException {
byte[] buf = new byte[4000];
InputStream is = fromBlob.getBinaryStream();
try {
for (;;) {
int dataSize = is.read(buf);
if (dataSize == -1)
break;
baos.write(buf, 0, dataSize);
}
} finally {
if (is != null) {
try {
is.close();
} catch (IOException ex) {
}
}
}
return baos.toByteArray();
} |
et de mapper dans le fichier la propriété sur les accesseurs hibernate
Code:
1 2 3 4 5 6 7 8
|
<property
column="FILESTORED"
name="textBlob"
not-null="false"
type="blob"
/> |
ca marche tres bien pour postgresql 8.2 et Oracle 10.
Le problème est que cela ne marche pas pour les blob de plus de 2Ko avec Oracle, c'est à dire que le blob est purgé de la table lorsqu'on fait une transaction et un load.
J'utilise le dernier driver thin de Oracle, et je ne peux pas utiliser de driver oci.
Est ce que quelqu'un a une solution svp ?
ci dessous, le mapping xml
Code:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60
|
<?xml version="1.0"?>
<!DOCTYPE hibernate-mapping PUBLIC
"-//Hibernate/Hibernate Mapping DTD//EN"
"http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd" >
<hibernate-mapping package="test.junit.blob">
<class name="EdiFileByteBlobVO" table="EDIFILE" lazy="true">
<id name="ediFilePK" type="integer">
<generator class="sequence">
<param name="sequence">SEQ_EDIFILEPK</param>
</generator>
</id>
<property
column="FILENAME"
length="50"
name="fileName"
not-null="true"
type="string"
/>
<property
column="FILE_ENCODING"
length="15"
name="fileEncoding"
not-null="true"
type="string"
/>
<!--on ne peut pas mapper 2 champs differents sur le meme champ de BD-->
<property
column="FILESTORED"
name="textBlob"
not-null="false"
type="blob"
/>
<property
column="IS_IN_CFEC"
name="isInCFEC"
not-null="true"
type="boolean"
/>
<!--
<set name="invoiceSetByEdifiledisplayfk">
<key column="EDIFILEDISPLAYFK" />
<one-to-many class="model.InvoiceVO" />
</set>
<set name="invoiceSetByEdifilesentbytranslatorfk">
<key column="EDIFILESENTBYTRANSLATORFK" />
<one-to-many class="model.InvoiceVO" />
</set>
-->
</class>
</hibernate-mapping> |
et la classe
Code:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249
|
/*
* Created on 17 déc. 2004
*
* TODO To change the template for this generated file go to
* Window - Preferences - Java - Code Style - Code Templates
*/
package test.junit.blob;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.sql.Blob;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.hibernate.Hibernate;
/**
* @author croy
* but: avoir le meme code pour mapper des blob dans oracle et postgres
* solution 1:
* utiliser des byte[] dans l'objet
* utiliser le type blob dans le mapping hibernate
*
* a noter => utiliser les type binary et sql-type="raw" pour oracle ou "bytea" pour postgres marche
* voir url => http://www.hibernate.org/236.html
*
* TODO To change the template for this generated type comment go to
* Window - Preferences - Java - Code Style - Code Templates
*/
public class EdiFileByteBlobVO implements Serializable{
private static Logger logger = Logger.getLogger(EdiFileByteBlobVO.class);
private Integer ediFilePK;
private String fileName;
//encoding
private String fileEncoding;
//ne marche pas avec hibernate a cause de bB ?
private boolean isInCFEC;
/*
private Set<InvoiceVO> invoiceSetByEdifiledisplayfk;
private Set<InvoiceVO> invoiceSetByEdifilesentbytranslatorfk;
*/
private byte[] text;
public java.lang.String getBody() throws UnsupportedEncodingException {
String res=null;
if (text!=null) {
res = new String(text, fileEncoding);
}
return res;
}
public void setBodyBytes(byte[] text) {
this.text=text;
}
//no more in use
/*
public void setBody(java.lang.String text) {
if(text!=null) {
try {
byte[]b=text.getBytes(fileEncoding);
//this.text.setBytes(0, b);
this.text=Hibernate.createBlob(b);
//this.text = text.getBytes(fileEncoding);
} catch (UnsupportedEncodingException e) {
logger.error("Error while setting edifile content because of unkown encoding :"+fileEncoding,e);
//this.text = text.getBytes();
}
}
else
this.text=null;
}
*/
/** Don't invoke this. Used by Hibernate only. */
public void setTextBlob(Blob TextBlob) {
if(TextBlob == null) {
logger.warn("cyrille => ca va planter");
this.text=null;
return;
}
this.text = this.toByteArray(TextBlob);
}
/** Don't invoke this. Used by Hibernate only.
* @throws IOException */
public Blob getTextBlob() throws IOException {
//InputStream is = new InputStream(text);
ByteArrayInputStream bais = new ByteArrayInputStream(text);
return Hibernate.createBlob(bais);
//return Hibernate.createBlob(this.text);
}
private byte[] toByteArray(Blob fromBlob) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
return toByteArrayImpl(fromBlob, baos);
} catch (SQLException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
if (baos != null) {
try {
baos.close();
} catch (IOException ex) {
}
}
}
}
private byte[] toByteArrayImpl(Blob fromBlob, ByteArrayOutputStream baos)
throws SQLException, IOException {
byte[] buf = new byte[4000];
InputStream is = fromBlob.getBinaryStream();
try {
for (;;) {
int dataSize = is.read(buf);
if (dataSize == -1)
break;
baos.write(buf, 0, dataSize);
}
} finally {
if (is != null) {
try {
is.close();
} catch (IOException ex) {
}
}
}
return baos.toByteArray();
}
/**
* @return Returns the ediFilePK.
*/
public Integer getEdiFilePK() {
return ediFilePK;
}
/**
* @param ediFilePK The ediFilePK to set.
*/
public void setEdiFilePK(Integer ediFilePK) {
this.ediFilePK = ediFilePK;
}
/**
* @return Returns the filename.
*/
public String getFileName() {
return fileName;
}
/**
* @param filename The filename to set.
*/
public void setFileName(String fileName) {
this.fileName = fileName;
}
/**
* @return Returns the isInCFEC.
*/
public boolean getIsInCFEC() {
return isInCFEC;
}
/**
* @param isInCFEC The isInCFEC to set.
*/
public void setIsInCFEC(boolean isInCFEC) {
this.isInCFEC = isInCFEC;
}
/*
*
* @author gma
*
* TODO To change the template for this generated type comment go to
* Window - Preferences - Java - Code Style - Code Templates
*/
public EdiFileByteBlobVO(Integer pEdiFilePK, String pFileName, byte[] pBody, String pFileEncoding) {
this.setEdiFilePK(pEdiFilePK);
this.setFileName(pFileName);
//this.setBody(pBody);
this.setBodyBytes(pBody);
this.setFileEncoding(pFileEncoding);
}
public EdiFileByteBlobVO(){
super();
}
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append("ediFilePK=").append(ediFilePK).append("; ");
sb.append("isInCFEC=").append(isInCFEC).append("; ");
sb.append("fileEncoding=").append(fileEncoding);
return sb.toString();
}
public EdiFileByteBlobVO deepCopy() {
return this;
}
/**
* @return Returns the fileEncoding.
*/
public String getFileEncoding() {
return fileEncoding;
}
/**
* @param fileEncoding The fileEncoding to set.
*/
public void setFileEncoding(String fileEncoding) {
this.fileEncoding = fileEncoding;
}
} |
et le code de test
Code:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131
|
package test.junit.blob;
import hibernate.HibernateUtil;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import org.apache.log4j.Logger;
import org.hibernate.Hibernate;
import org.hibernate.HibernateException;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.junit.Test;
/**
* test de la solution 1
* @author croy
*
* resultat => marche pour oracle pour des petits fichiers < 2Ko
*
*/
public class TestBlobByte {
Logger logger = Logger.getLogger(TestBlobByte.class);
//@Test
public void testInsert() throws IOException {
Session session=HibernateUtil.currentSession();
Transaction tx=session.beginTransaction();
EdiFileByteBlobVO edifile = new EdiFileByteBlobVO();
try {
edifile.setFileEncoding("CP1252");
edifile.setFileName("toto");
edifile.setIsInCFEC(false);
//edifile.setBodyBytes("toto".getBytes());
File f = new File("c:/o.message.pf");
byte[] fileContent = new byte[(int)f.length()];
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));
bis.read(fileContent);
bis.close();
edifile.setBodyBytes(fileContent);
session.save(edifile);
logger.info("get edifile "+edifile.getEdiFilePK());
logger.info("get edifile body =>"+edifile.getBody());
//edifile.setBodyBytes("toto".getBytes());
tx.commit();
} catch (HibernateException e) {
tx.rollback();
throw e;
} finally {
HibernateUtil.closeSession();
}
}
@Test
/**
* test de chargement d'un edifile pour voir si le mapping ne le purge lors du load
*/
public void testLoad() throws UnsupportedEncodingException {
Session session=HibernateUtil.currentSession();
Transaction tx=session.beginTransaction();
Integer firstId=(Integer)session.createSQLQuery("select min(edifilePK) as firstId from edifile")
.addScalar("firstId", Hibernate.INTEGER)
.uniqueResult();
logger.info("trying to load edifilepk "+firstId);
EdiFileByteBlobVO edifile = new EdiFileByteBlobVO();
try {
session.load(edifile, firstId);
logger.info("get edifile "+edifile.getEdiFilePK());
logger.info("get edifile body =>"+edifile.getBody());
//edifile.setBodyBytes("toto".getBytes());
logger.info("avant commit");
tx.commit();
logger.info("apres commit");
} catch (HibernateException e) {
tx.rollback();
throw e;
} finally {
HibernateUtil.closeSession();
}
//reouvertue du meme blob pour verifier si il n'a pas ete purge
session=HibernateUtil.currentSession();
tx=session.beginTransaction();
try {
EdiFileByteBlobVO edifile2 = new EdiFileByteBlobVO();
session.load(edifile2, edifile.getEdiFilePK());
if(edifile2!=null) {
logger.info("get edifile "+edifile2.getEdiFilePK());
logger.info("get edifile body =>"+edifile2.getBody());
if(!edifile.getBody().equals(edifile2.getBody()))
throw new RuntimeException("edifile has been corrupted while loading it");
} else {
throw new RuntimeException("no data found in EdiFileVO edifile2 is null");
}
tx.commit();
} catch (HibernateException e) {
tx.rollback();
throw e;
} finally {
HibernateUtil.closeSession();
}
}
} |