doubts about Raster and BufferedImage
hi,
what is the purpose of the optionally preallocated double array in the
method getPixels of class Raster ?
public double[] getPixels(int x,int y,int w, int h,double[] dArray)
I was playing around with the api to create a BufferedImage using a
double array,andwas trying to normalise the pixel values as follows.
I expected to find the normalised values in the final array ,but it
still contains the original image data. Obviously the
this.bi.getRaster().setPixels(0,0,this.getWidth(),this.getHeight(),ndata)
doesn't do what I thought it would do.
if somebody can tell me where I messed it up ,it will be great.
thanks and regards,
mark
<code>
import java.awt.image.BufferedImage;
import java.awt.image.ColorConvertOp;
import java.io.File;
import java.io.IOException;
import javax.imageio.ImageIO;
public class MyImage {
private BufferedImage bi;//of type BufferedImage.TYPE_BYTE_GRAY
private String fileName;
public MyImage(String imageFileName) throws IOException{
this.fileName = imageFileName;
File imageFile = new File(imageFileName);
this.bi=makeGrayScaleImage(ImageIO.read(imageFile));
}
public MyImage(String imageName,int width,int height,double[] data){
if (data.length != width*height){
throw new IllegalArgumentException("data size must be equal to
"+width*height);
}
this.bi = new BufferedImage(width, height,
BufferedImage.TYPE_BYTE_GRAY);
this.bi.getRaster().setPixels(0, 0, width, height, data);
this.fileName = imageName;
}
private BufferedImage makeGrayScaleImage(BufferedImage img) {
BufferedImage gray = null;
try{
gray = new BufferedImage(img.getWidth(),img.getHeight(),
BufferedImage.TYPE_BYTE_GRAY);
ColorConvertOp ccop = new ColorConvertOp(
img.getColorModel().getColorSpace(),
gray.getColorModel().getColorSpace(),null);
ccop.filter(img,gray);
}catch(Exception e){
System.err.println("grayscale conversion failed");
}
return gray;
}
public int getWidth(){
return this.bi.getWidth();
}
public int getHeight(){
return this.bi.getHeight();
}
public double[] getData(){
int h = getHeight();
int w = getWidth();
double[] data = new double[h*w];
double[] pdata=this.bi.getData().getPixels(0,0,w,h,data);
return pdata;
}
public void normaliseImageData(){
double[] ndata = getNormalisedData();
this.bi.getRaster().setPixels(0,0,this.getWidth(),this.getHeight(),ndata);
}
private double[] getNormalisedData(){
double[] d=getData();
double maxval=max(d);
for (int i=0;i<d.length;i++){
d[i]/=maxval;
}
return d;
}
private static double max(double[] arr){
double m=Double.MIN_VALUE;
for(int i=0;i<arr.length;i++){
m=Math.max(m,arr[i]);
}
return m;
}
private static void printArray(double[] a){
for (double x:a){
System.out.print(x+" ");
}
System.out.println();
}
private static void debug(String msg) {
System.out.println(msg);
}
public static void main(String[] args) {
MyImage my = new MyImage("dummy.png", 4, 5, new double[]
{23,32,13,55,65,36,46,64,27,43,71,58,38,25,62,47,19,72,37,55});
double[] data = my.getData();
debug("original image data:");
printArray(data);
double[] ndata = my.getNormalisedData();
debug("normalised data:");
printArray(ndata);
debug("image data after normalisation:");
double[] newdata = my.getData();
printArray(newdata);
}
}
</code>