Use JCufft with a file audio

I’ve tried this: seems that works but really does nothing:


import java.util.Random;
import jcuda.jcufft.*;
import edu.emory.mathcs.jtransforms.fft.FloatFFT_1D;

import java.io.File;
import java.io.IOException;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.FloatControl;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
import javax.sound.sampled.UnsupportedAudioFileException;

public class AePlayWave extends Thread {

    private String filename;
    private Position curPosition;
    private final int EXTERNAL_BUFFER_SIZE = 524288; // 128Kb

    enum Position {

        LEFT, RIGHT, NORMAL
    };

    public static void main(String[] args) {
        new AePlayWave("montefusco a 3.wav").start();
    }

    public AePlayWave(String wavfile) {
        filename = wavfile;
        curPosition = Position.NORMAL;
    }

    public AePlayWave(String wavfile, Position p) {
        filename = wavfile;
        curPosition = p;
    }

    public void run() {

        File soundFile = new File(filename);
        if (!soundFile.exists()) {
            System.err.println("Wave file not found: " + filename);
            return;
        }

        AudioInputStream audioInputStream = null;
        try {
            audioInputStream = AudioSystem.getAudioInputStream(soundFile);
        } catch (UnsupportedAudioFileException e1) {
            e1.printStackTrace();
            return;
        } catch (IOException e1) {
            e1.printStackTrace();
            return;
        }

        AudioFormat format = audioInputStream.getFormat();
        SourceDataLine auline = null;
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);

        try {
            auline = (SourceDataLine) AudioSystem.getLine(info);
            auline.open(format);
        } catch (LineUnavailableException e) {
            e.printStackTrace();
            return;
        } catch (Exception e) {
            e.printStackTrace();
            return;
        }

        if (auline.isControlSupported(FloatControl.Type.PAN)) {
            FloatControl pan = (FloatControl) auline.getControl(FloatControl.Type.PAN);
            if (curPosition == Position.RIGHT) {
                pan.setValue(1.0f);
            } else if (curPosition == Position.LEFT) {
                pan.setValue(-1.0f);
            }
        }

        auline.start();
        int nBytesRead = 0;
        byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];

        try {
            while (nBytesRead != -1) {
                nBytesRead = audioInputStream.read(abData, 0, abData.length);
                if (nBytesRead >= 0) {
                    auline.write(abData, 0, nBytesRead);
                }
            }
        } catch (IOException e) {
            e.printStackTrace();
            return;
        } finally {
            auline.drain();
            auline.close();
        }

        int b = byteArrayToInt(abData);

        System.out.println("Performing 1D C2C transform with JCufft...");

        float outputJCufft[] = new float[EXTERNAL_BUFFER_SIZE];

        for (int i = 0; i < outputJCufft.length; i++) {
            outputJCufft** = byteArrayToFloat(abData);
        }

        long begin = System.currentTimeMillis();
        cufftHandle plan = new cufftHandle();
        JCufft.cufftPlan1d(plan, b, cufftType.CUFFT_C2C, 1);
        JCufft.cufftExecC2C(plan, outputJCufft, outputJCufft, JCufft.CUFFT_FORWARD);
        JCufft.cufftDestroy(plan);
        System.out.println("CUDA " + (System.currentTimeMillis() - begin));

    }

    public static final int byteArrayToInt(byte[] b) {
        return (b[0] << 24)
                + ((b[1] & 0xFF) << 16)
                + ((b[2] & 0xFF) << 8)
                + (b[3] & 0xFF);
    }

    public static final float byteArrayToFloat(byte[] b) {

        // INPUT : an array of 4 bytes
        // OUTPUT: equivalent float value

        {
            int tempbits = ((0xff & b[3]) | ((0xff & b[2]) << 8) | ((0xff & b[1]) << 16) | ((0xff & b[0]) << 24));
            float ff = Float.intBitsToFloat(tempbits);
            return ff;
        }
    }
}

Isn’t it ?

Of course… you are transforming an empty buffer, immediately after the program has finished playing the sound

BTW: I don’t want to post the code of my “tests and experiments” here, but I can send it to you via mail (at least, after I cleaned it up a little, maybe this week or early next week). Just drop me a note, if you like.

Ok. But here everyone can view the result (experimental or not). Thanks anyway.

Yes, I’ll try to clean it up a little, and maybe I’ll post it here or even on the website (I alrady wrote a component for painting the „wave“ from the audio file and so on… :wink: ) but I definitely have to get more familiar with the „audio meaning“ of FFTs before I can create anything that really makes sense… (Any hints are welcome…)