我收到例外java.lang.ArrayStoreException
:
我发布了我的完整代码。
这是编码wav文件的代码。
ArrayStoreException信息
package org.xiph.speex;
import static java.nio.file.Files.size;
/**
* Main Speex Encoder class.
* This class encodes the given PCM 16bit samples into Speex packets.
*
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.6 $
*/
public class SpeexEncoder
{
/**
* Version of the Speex Encoder
*/
public static final String VERSION = "Java Speex Encoder v0.9.7 ($Revision: 1.6 $)";
private Encoder encoder;
private Bits bits;
private float[] rawData;
private int sampleRate;
private int channels;
private int frameSize;
/**
* Constructor
*/
public SpeexEncoder()
{
bits = new Bits();
}
/**
* initialization
* @param mode the mode of the encoder (0=NB, 1=WB, 2=UWB).
* @param quality the quality setting of the encoder (between 0 and 10).
* @param sampleRate the number of samples per second.
* @param channels the number of audio channels (1=mono, 2=stereo, ...).
* @return true if initialisation successful.
*/
public boolean init(final int mode,
final int quality,
final int sampleRate,
final int channels)
{
switch (mode) {
case 0:
encoder = new NbEncoder();
((NbEncoder)encoder).nbinit();
break;
//Wideband
case 1:
encoder = new SbEncoder();
((SbEncoder)encoder).wbinit();
break;
case 2:
encoder = new SbEncoder();
((SbEncoder)encoder).uwbinit();
break;
//*/
default:
return false;
}
/* initialize the speex decoder */
encoder.setQuality(quality);
/* set decoder format and properties */
this.frameSize = encoder.getFrameSize();
this.sampleRate = sampleRate;
this.channels = channels;
rawData = new float[channels*frameSize];
bits.init();
return true;
}
/**
* Returns the Encoder being used (Narrowband, Wideband or Ultrawideband).
* @return the Encoder being used (Narrowband, Wideband or Ultrawideband).
*/
public Encoder getEncoder()
{
return encoder;
}
/**
* Returns the sample rate.
* @return the sample rate.
*/
public int getSampleRate()
{
return sampleRate;
}
/**
* Returns the number of channels.
* @return the number of channels.
*/
public int getChannels()
{
return channels;
}
/**
* Returns the size of a frame.
* @return the size of a frame.
*/
public int getFrameSize()
{
return frameSize;
}
public void setComplexity(int complexity) {
// throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
/**
* Pull the decoded data out into a byte array at the given offset
* and returns the number of bytes of encoded data just read.
* @param offset
* @return the number of bytes of encoded data just read.
*/
public int getProcessedData(final byte[] data, final int offset)
{
int size = bits.getBufferSize();
System.out.println("value is:"+bits.getBuffer());
System.arraycopy(bits.getBuffer(),0, data, offset,size);
bits.init();
// System.out.println("size is:"+ size);
return size;
}
/**
* Returns the number of bytes of encoded data ready to be read.
* @return the number of bytes of encoded data ready to be read.
*/
public void getProcessedDataByteSize()
{
}
/**
* This is where the actual encoding takes place
* @param data
* @param offset
* @param len
* @return true if successful.
*/
public boolean processData(final byte[] data,
final int offset,
final int len)
{
// converty raw bytes into float samples
mapPcm16bitLittleEndian2Float(data, offset, rawData, 0, len/2);
// encode the bitstream
return processData(rawData, len/2);
}
/**
* Encode an array of shorts.
* @param data
* @param offset
* @param numShorts
* @return true if successful.
*/
public boolean processData(final short[] data,
final int offset,
final int numShorts)
{
int numSamplesRequired = channels * frameSize;
if (numShorts != numSamplesRequired) {
throw new IllegalArgumentException("SpeexEncoder requires " + numSamplesRequired + " samples to process a Frame, not " + numShorts);
}
// convert shorts into float samples,
for (int i=0; i<numShorts; i++) {
rawData[i] = data[offset + i ];
}
// encode the bitstream
return processData(rawData, numShorts);
}
/**
* Encode an array of floats.
* @param data
* @param numSamples
* @return true if successful.
*/
public boolean processData(final float[] data, final int numSamples)
{
int numSamplesRequired = channels * frameSize;
if (numSamples != numSamplesRequired) {
throw new IllegalArgumentException("SpeexEncoder requires " + numSamplesRequired + " samples to process a Frame, not " + numSamples );
}
// encode the bitstream
if (channels==2) {
Stereo.encode(bits, data, frameSize);
}
encoder.encode(bits, data);
return true;
}
/**
* Converts a 16 bit linear PCM stream (in the form of a byte array)
* into a floating point PCM stream (in the form of an float array).
* Here are some important details about the encoding:
* <ul>
* <li> Java uses big endian for shorts and ints, and Windows uses little Endian.
* Therefore, shorts and ints must be read as sequences of bytes and
* combined with shifting operations.
* </ul>
* @param pcm16bitBytes - byte array of linear 16-bit PCM formated audio.
* @param offsetInput
* @param samples - float array to receive the 16-bit linear audio samples.
* @param offsetOutput
* @param length
*/
public static void mapPcm16bitLittleEndian2Float(final byte[] pcm16bitBytes,
final int offsetInput,
final float[] samples,
final int offsetOutput,
final int length)
{
if (pcm16bitBytes.length - offsetInput < 2 * length) {
throw new IllegalArgumentException("Insufficient Samples to convert to floats");
}
if (samples.length - offsetOutput < length) {
throw new IllegalArgumentException("Insufficient float buffer to convert the samples");
}
for (int i = 0; i < length; i++) {
samples[offsetOutput+i] = ((pcm16bitBytes[offsetInput+2*i] & 0xff) | (pcm16bitBytes[offsetInput+2*i+1] << 8)); // no & 0xff at the end to keep the sign
}
}
// public int getProcessedData(byte[] temp, int i) {
// return 0;
// throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
// }
private static class Stereo {
private static void encode(Bits bits, float[] data, int frameSize) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
private Stereo() {
}
}
static class Bits {
// private Object getBuffer;
// private Object getBufferSize;
// private void Bits() {
// }
private int getBufferSize() {
// System.out.println("hello");
return 0;
// throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
public int getBuffer() {
// return null;
return 0;
// throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
public int init() {
return 0;
// throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
}
请帮我摆脱这个。 这是我的完整代码。 在getProcessedData
中获取异常ArrayStoreException答案 0 :(得分:0)
看到您提供的其余代码,Google找到了
代码的副本https://github.com/phono/PhonoSDK/blob/master/modules/phono-java-audio/src/java/org/xiph/speex/
的实施事实上,更简单的解决方案是
git clone https://github.com/phono/PhonoSDK
您将拥有可能需要的所有代码的副本。
bits.getBuffer()的返回类型是int。
方法arraycopy
仅适用于数组,顾名思义。您必须从一个数组复制到另一个相同类型的数组。您无法从说int[]
复制到byte[]
,也无法从int
复制到byte[]
。
BTW如果你有getBufferSize()
,你可能希望getBuffer()
返回一个大小为byte[]
getBufferSize()
的缓冲区。您的API似乎有些问题。
答案 1 :(得分:0)
确保数组的数据类型。
System.arraycopy(bits.getBuffer(),0, data, offset,size);
在此行控件中 bits.getBuffer() -type和数据 -type
你传递了第一个参数的原始数据类型,它必须是Object。你说System.arraycopy(bits.getBuffer(),0, data, offset,size); bits.getBuffer()
是原始的而不是对象。
答案 2 :(得分:0)
我不确定你要做什么,但如果我是你,我将按如下方式修改代码。由于代码中包含许多占位符,因此请将其替换为逻辑。
package org.xiph.speex;
class Bits {
byte[] backingArray;
Bits() {
this.backingArray = new byte[65535];
}
Bits(int size) {
this.backingArray = new byte[size];
}
int getBufferSize() {
return this.backingArray.length;
}
public byte[] getBuffer() {
return this.backingArray;
}
public int feed(byte[] src, int offset, int sz) throws IllegalArgumentException {
if ( sz > this.backingArray.length ) {
throw new IllegalArgumentException("data is too big to fit in");
}
if ( src.length - offset < sz ) {
sz = src.length - offset;
}
System.arraycopy(src, offset, this.backingArray, 0, sz);
return sz;
}
public void init() {
// whatever
}
}
class Stereo {
static void encode(Bits bits, float[] data, int frameSize) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
Stereo() {
}
}
interface Encoder {
void init();
void setQuality(int quality);
int getFrameSize();
void encode(Bits bits, float[] data);
}
class NarrowBandEncoder implements Encoder {
@Override
public void init() {
// whatever
}
@Override
public void setQuality(int quality) {
// TODO Auto-generated method stub
}
@Override
public int getFrameSize() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void encode(Bits bits, float[] data) {
// TODO Auto-generated method stub
}
}
class WidebandEncoder implements Encoder {
@Override
public void init() {
// whatever
}
@Override
public void setQuality(int quality) {
// TODO Auto-generated method stub
}
@Override
public int getFrameSize() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void encode(Bits bits, float[] data) {
// TODO Auto-generated method stub
}
}
class UltraWidebandEncoder implements Encoder {
@Override
public void init() {
// whatever
}
@Override
public void setQuality(int quality) {
// TODO Auto-generated method stub
}
@Override
public int getFrameSize() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void encode(Bits bits, float[] data) {
// TODO Auto-generated method stub
}
}
enum EncodingMode {
NARROW_BAND(NarrowBandEncoder::new),
WIDE_BAND(WidebandEncoder::new),
ULTRA_WIDE_BAND(UltraWidebandEncoder::new);
private Supplier<? extends Encoder> encoder;
private EncodingMode(Supplier<? extends Encoder> supplier) {
this.encoder = supplier;
}
public Encoder createEncoder() {
return this.encoder.get();
}
}
/**
* Main Speex Encoder class.
* This class encodes the given PCM 16bit samples into Speex packets.
*
* @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com)
* @version $Revision: 1.6 $
*/
public class SpeexEncoder
{
/**
* Version of the Speex Encoder
*/
public static final String VERSION = "Java Speex Encoder v0.9.7 ($Revision: 1.6 $)";
private Encoder encoder;
private Bits bits;
private float[] rawData;
private int sampleRate;
private int channels;
private int frameSize;
/**
* Constructor
*/
public SpeexEncoder()
{
bits = new Bits();
}
/**
* initialization
* @param mode the mode of the encoder (0=NB, 1=WB, 2=UWB).
* @param quality the quality setting of the encoder (between 0 and 10).
* @param sampleRate the number of samples per second.
* @param channels the number of audio channels (1=mono, 2=stereo, ...).
* @return true if initialisation successful.
*/
public boolean init(final EncodingMode mode,
final int quality,
final int sampleRate,
final int channels)
{
this.encoder = mode.createEncoder();
this.encoder.init();
/* initialize the speex decoder */
this.encoder.setQuality(quality);
/* set decoder format and properties */
this.frameSize = encoder.getFrameSize();
this.sampleRate = sampleRate;
this.channels = channels;
this.rawData = new float[channels*frameSize];
this.bits.init();
return true;
}
/**
* Returns the Encoder being used (Narrowband, Wideband or Ultrawideband).
* @return the Encoder being used (Narrowband, Wideband or Ultrawideband).
*/
public Encoder getEncoder()
{
return encoder;
}
/**
* Returns the sample rate.
* @return the sample rate.
*/
public int getSampleRate()
{
return sampleRate;
}
/**
* Returns the number of channels.
* @return the number of channels.
*/
public int getChannels()
{
return channels;
}
/**
* Returns the size of a frame.
* @return the size of a frame.
*/
public int getFrameSize()
{
return frameSize;
}
public void setComplexity(int complexity) {
// throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
/**
* Pull the decoded data out into a byte array at the given offset
* and returns the number of bytes of encoded data just read.
* @param offset
* @return the number of bytes of encoded data just read.
*/
public int getProcessedData(final byte[] data, final int offset)
{
int sz = this.bits.feed(data, offset, this.bits.getBufferSize());
this.bits.init();
return sz;
}
/**
* Returns the number of bytes of encoded data ready to be read.
* @return the number of bytes of encoded data ready to be read.
*/
public void getProcessedDataByteSize()
{
// ?
}
/**
* This is where the actual encoding takes place
* @param data
* @param offset
* @param len
* @return true if successful.
*/
public boolean processData(final byte[] data,
final int offset,
final int len)
{
// converty raw bytes into float samples
mapPcm16bitLittleEndian2Float(data, offset, rawData, 0, len/2);
// encode the bitstream
return processData(rawData, len/2);
}
/**
* Encode an array of shorts.
* @param data
* @param offset
* @param numShorts
* @return true if successful.
*/
public boolean processData(final short[] data,
final int offset,
final int numShorts)
{
int numSamplesRequired = channels * frameSize;
if (numShorts != numSamplesRequired) {
throw new IllegalArgumentException("SpeexEncoder requires " + numSamplesRequired + " samples to process a Frame, not " + numShorts);
}
// convert shorts into float samples,
for (int i=0; i<numShorts; i++) {
rawData[i] = data[offset + i ];
}
// encode the bitstream
return processData(rawData, numShorts);
}
/**
* Encode an array of floats.
* @param data
* @param numSamples
* @return true if successful.
*/
public boolean processData(final float[] data, final int numSamples)
{
int numSamplesRequired = channels * frameSize;
if (numSamples != numSamplesRequired) {
throw new IllegalArgumentException("SpeexEncoder requires " + numSamplesRequired + " samples to process a Frame, not " + numSamples );
}
// encode the bitstream
if (channels==2) {
Stereo.encode(bits, data, frameSize);
}
encoder.encode(bits, data);
return true;
}
/**
* Converts a 16 bit linear PCM stream (in the form of a byte array)
* into a floating point PCM stream (in the form of an float array).
* Here are some important details about the encoding:
* <ul>
* <li> Java uses big endian for shorts and ints, and Windows uses little Endian.
* Therefore, shorts and ints must be read as sequences of bytes and
* combined with shifting operations.
* </ul>
* @param pcm16bitBytes - byte array of linear 16-bit PCM formated audio.
* @param offsetInput
* @param samples - float array to receive the 16-bit linear audio samples.
* @param offsetOutput
* @param length
*/
public static void mapPcm16bitLittleEndian2Float(final byte[] pcm16bitBytes,
final int offsetInput,
final float[] samples,
final int offsetOutput,
final int length)
{
if (pcm16bitBytes.length - offsetInput < 2 * length) {
throw new IllegalArgumentException("Insufficient Samples to convert to floats");
}
if (samples.length - offsetOutput < length) {
throw new IllegalArgumentException("Insufficient float buffer to convert the samples");
}
for (int i = 0; i < length; i++) {
samples[offsetOutput+i] = ((pcm16bitBytes[offsetInput+2*i] & 0xff) | (pcm16bitBytes[offsetInput+2*i+1] << 8)); // no & 0xff at the end to keep the sign
}
}
// public int getProcessedData(byte[] temp, int i) {
// return 0;
// throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
// }
}