欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页  >  IT编程

unity实现录音并保存本地

程序员文章站 2023-11-15 15:21:16
本文实例为大家分享了unity实现录音并保存本地的具体代码,供大家参考,具体内容如下我们可以使用unity自带的microphone类来录音,回放录音,保存录音具体代码如下:using system;...

本文实例为大家分享了unity实现录音并保存本地的具体代码,供大家参考,具体内容如下

我们可以使用unity自带的microphone类来录音,回放录音,保存录音

具体代码如下:

using system;
using system.collections;
using system.collections.generic;
using system.io;
using unityengine;

public class microphonemanager : monobehaviour {

  public int devicelength;
  /// <summary>
  /// 录音频率
  /// </summary>
  public string frequency = "44100";
  public int samplerate = 44100;
  /// <summary>
  /// 录音时长
  /// </summary>
  public int micsecond = 2;
  string infolog = "";

  audiosource _curaudiosource;

  audiosource curaudiosource
  {
    get
    {
      if (_curaudiosource == null)
      {
        _curaudiosource = gameobject.addcomponent<audiosource>();
      }
      return _curaudiosource;
    }
  }

  #region [public way]

  /// <summary>
  /// 获取麦克风设备
  /// </summary>
  public void getmicrophonedevice()
  {
    string[] mdevice = microphone.devices;
    devicelength = mdevice.length;
    if (devicelength == 0)
      showinfolog("找不到麦克风设备!");
  }

  /// <summary>
  /// 开始录音
  /// </summary>
  public void startrecordaudio()
  {
    curaudiosource.stop();
    curaudiosource.loop = false;
    curaudiosource.mute = true;
    curaudiosource.clip = microphone.start(null, true, micsecond, int.parse(frequency));
    while (!(microphone.getposition(null) > 0))
    {

    }
    curaudiosource.play();
    showinfolog("开始录音.....");
  }

  /// <summary>
  /// 停止录音
  /// </summary>
  public void stoprecordaudio()
  {
    showinfolog("结束录音.....");
    if (!microphone.isrecording(null))
      return;
    microphone.end(null);
    curaudiosource.stop();

  }
  /// <summary>s
  /// 回放录音
  /// </summary>
  public void playrecordaudio()
  {
    if (microphone.isrecording(null))
      return;
    if (curaudiosource.clip == null)
      return;
    curaudiosource.mute = false;
    curaudiosource.loop = false;
    curaudiosource.play();
    showinfolog("播放录音.....");
  }

  /// <summary>
  /// 打印录音信息
  /// </summary>
  public void printrecorddata()
  {
    if (microphone.isrecording(null))
      return;
    byte[] data = getclipdata();
    #region 用户*固定录音时长
    int position = microphone.getposition(null);
    var soundata = new float[curaudiosource.clip.samples * curaudiosource.clip.channels];
    curaudiosource.clip.getdata(soundata, 0);

    var newdata = new float[position * curaudiosource.clip.channels];
    for (int i = 0; i < newdata.length; i++) {
      newdata[i] = soundata[i];
    }
    curaudiosource.clip = audioclip.create(curaudiosource.clip.name, position, curaudiosource.clip.channels, curaudiosource.clip.frequency, false);
    curaudiosource.clip.setdata(newdata, 0);

    microphone.end(null);
    #endregion
    using (filestream fs = createempty(application.persistentdatapath + "/dd.wav")) {
      convertandwrite(fs, curaudiosource.clip);
      writeheader(fs, curaudiosource.clip);
    }

    string infolog = "total length:" + data.length + " time:" + curaudiosource.time;
    showinfolog(infolog);
  }
  private void writeheader(filestream stream, audioclip clip)
  {
    int hz = clip.frequency;
    int channels = clip.channels;
    int samples = clip.samples;

    stream.seek(0, seekorigin.begin);

    byte[] riff = system.text.encoding.utf8.getbytes("riff");
    stream.write(riff, 0, 4);

    byte[] chunksize = bitconverter.getbytes(stream.length - 8);
    stream.write(chunksize, 0, 4);

    byte[] wave = system.text.encoding.utf8.getbytes("wave");
    stream.write(wave, 0, 4);

    byte[] fmt = system.text.encoding.utf8.getbytes("fmt ");
    stream.write(fmt, 0, 4);

    byte[] subchunk1 = bitconverter.getbytes(16);
    stream.write(subchunk1, 0, 4);

    uint16 two = 2;
    uint16 one = 1;

    byte[] audioformat = bitconverter.getbytes(one);
    stream.write(audioformat, 0, 2);

    byte[] numchannels = bitconverter.getbytes(channels);
    stream.write(numchannels, 0, 2);

    byte[] samplerate = bitconverter.getbytes(hz);
    stream.write(samplerate, 0, 4);

    byte[] byterate = bitconverter.getbytes(hz * channels * 2); // samplerate * bytespersample*number of channels, here 44100*2*2 
    stream.write(byterate, 0, 4);

    uint16 blockalign = (ushort)(channels * 2);
    stream.write(bitconverter.getbytes(blockalign), 0, 2);

    uint16 bps = 16;
    byte[] bitspersample = bitconverter.getbytes(bps);
    stream.write(bitspersample, 0, 2);

    byte[] datastring = system.text.encoding.utf8.getbytes("data");
    stream.write(datastring, 0, 4);

    byte[] subchunk2 = bitconverter.getbytes(samples * channels * 2);
    stream.write(subchunk2, 0, 4);

  }
  private filestream createempty(string filepath)
  {
    filestream filestream = new filestream(filepath, filemode.create);
    byte emptybyte = new byte();

    for (int i = 0; i < 44; i++) //preparing the header 
    {
      filestream.writebyte(emptybyte);
    }

    return filestream;
  }
  private void convertandwrite(filestream filestream, audioclip clip)
  {

    float[] samples = new float[clip.samples];

    clip.getdata(samples, 0);

    int16[] intdata = new int16[samples.length];

    byte[] bytesdata = new byte[samples.length * 2];

    int rescalefactor = 32767; //to convert float to int16 

    for (int i = 0; i < samples.length; i++)
    {
      intdata[i] = (short)(samples[i] * rescalefactor);
      byte[] bytearr = new byte[2];
      bytearr = bitconverter.getbytes(intdata[i]);
      bytearr.copyto(bytesdata, i * 2);
    }
    filestream.write(bytesdata, 0, bytesdata.length);
  }
  /// <summary>
  /// 获取音频数据
  /// </summary>
  /// <returns>the clip data.</returns>
  public byte[] getclipdata()
  {
    if (curaudiosource.clip == null)
    {
      showinfolog("缺少音频资源!");
      return null;
    }

    float[] samples = new float[curaudiosource.clip.samples];
    curaudiosource.clip.getdata(samples, 0);

    byte[] outdata = new byte[samples.length * 2];
    int rescalefactor = 32767;

    for (int i = 0; i < samples.length; i++)
    {
      short tempshort = (short)(samples[i] * rescalefactor);
      byte[] tempdata = system.bitconverter.getbytes(tempshort);

      outdata[i * 2] = tempdata[0];
      outdata[i * 2 + 1] = tempdata[1];
    }
    if (outdata == null || outdata.length <= 0)
    {

      showinfolog("获取音频数据失败!");
      return null;
    }
    return outdata;
  }

  #endregion


  void ongui()
  {

    if (devicelength == 0)
    {
      if (showguibutton("获取麦克风设备"))
      {
        getmicrophonedevice();
      }
    }
    else if (devicelength > 0)
    {
      guilayout.label("录音频率:");
      frequency = guilayout.textfield(frequency, guilayout.width(screen.width / 5), guilayout.height(screen.height / 20));
      guilayout.beginvertical();

      if (showguibutton("开始录音"))
      {
        startrecordaudio();
      }
      if (showguibutton("结束录音"))
      {
        stoprecordaudio();
      }
      if (showguibutton("回放录音"))
      {
        playrecordaudio();
      }
      if (showguibutton("获取录音数据"))
      {
        printrecorddata();
      }

      guilayout.endvertical();
    }
    guilayout.label(infolog);

  }

  #region [private way]

  /// <summary>
  /// 显示gui 按钮
  /// </summary>
  /// <returns><c>true</c>, if gui button was shown, <c>false</c> otherwise.</returns>
  /// <param name="buttonname">button name.</param>
  bool showguibutton(string buttonname)
  {
    return guilayout.button(buttonname, guilayout.height(screen.height / 20), guilayout.width(screen.width / 5));
  }

  void showinfolog(string info)
  {
    infolog += info;
    infolog += "\r\n";
  }

  #endregion
}

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持。