提交 779f384e 作者: 王苏进

feat: 添加安卓类到 plugin

上级 1f741427
......@@ -72,3 +72,7 @@ android {
}
}
}
dependencies {
implementation 'androidx.lifecycle:lifecycle-process:2.8.5'
}
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.aivoice_plugin">
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
</manifest>
package com.example.aivoice_plugin
import android.content.Context
import androidx.annotation.NonNull
import io.flutter.embedding.engine.plugins.FlutterPlugin
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.common.MethodChannel.MethodCallHandler
import io.flutter.plugin.common.MethodChannel.Result
/** AivoicePlugin */
class AivoicePlugin: FlutterPlugin, MethodCallHandler {
/// The MethodChannel that will the communication between Flutter and native Android
///
/// This local reference serves to register the plugin with the Flutter Engine and unregister it
/// when the Flutter Engine is detached from the Activity
private lateinit var channel : MethodChannel
private var context: Context? = null
override fun onAttachedToEngine(@NonNull flutterPluginBinding: FlutterPlugin.FlutterPluginBinding) {
context = flutterPluginBinding.getApplicationContext();
channel = MethodChannel(flutterPluginBinding.binaryMessenger, "aivoice_plugin")
channel.setMethodCallHandler(this)
}
......@@ -39,6 +44,7 @@ class AivoicePlugin: FlutterPlugin, MethodCallHandler {
// 现有的实现
}
"ttsStartEngineBtnClick" -> {
println("来自安卓")
// 空实现
result.success(null)
}
......@@ -47,6 +53,7 @@ class AivoicePlugin: FlutterPlugin, MethodCallHandler {
result.success(null)
}
"ttsStopEngineBtnClicked" -> {
// 空实现
result.success(null)
}
......
package com.example.aivoice_plugin;
/**
* SensitiveDefines
* Defines in this class should be different for different business,
* please contact with @Bytedance AILab about what value should be set before use it.
*/
public class SensitiveDefines {
// User Info
public static final String UID = "YOUR USER ID";
// Device Info
public static final String DID = "YOUR DEVICE ID";
// Online & Resource Authorization
public static final String APPID = "2301072440";
public static final String TOKEN = "Bearer;75UvJCpxRjTCppqQUkQ-o-4UfjnCgVmp";
public static final String APP_VERSION = "1.0.0";
// Offline Authorization
public static final String AUTHENTICATE_ADDRESS = "AUTHENTICAT ADDRESS";
public static final String AUTHENTICATE_URI = "AUTHENTICATE URI";
public static final String LICENSE_NAME = "YOUR LICENSE NAME";
public static final String LICENSE_BUSI_ID = "YOUR LICENSE BUSI_ID";
public static final String SECRET = "YOUR SECRET";
public static final String BUSINESS_KEY = "YOUR BUSINESS KEY";
// Address
public static final String DEFAULT_ADDRESS = "wss://openspeech.bytedance.com";
public static final String DEFAULT_HTTP_ADDRESS = "https://openspeech.bytedance.com";
// ASR
public static final String ASR_DEFAULT_URI = "/api/v2/asr";
public static final String ASR_DEFAULT_CLUSTER = "volcengine_streaming_common";
public static final String ASR_DEFAULT_MODEL_NAME = "YOUR ASR MODEL NAME";
// AU
public static final String AU_DEFAULT_URI = "/api/v1/sauc";
public static final String AU_DEFAULT_CLUSTER = "YOUR AU CLUSTER";
// TTS
public static final String TTS_DEFAULT_URI = "/api/v1/tts/ws_binary";
public static final String TTS_DEFAULT_CLUSTER = "volcano_tts";
public static final String TTS_DEFAULT_BACKEND_CLUSTER = "YOUR TTS BACKEND CLUSTER";
public static final String TTS_DEFAULT_ONLINE_VOICE = "灿灿";
public static final String TTS_DEFAULT_ONLINE_VOICE_TYPE = "BV002_streaming";
public static final String TTS_DEFAULT_OFFLINE_VOICE = "TTS OFFLINE VOICE";
public static final String TTS_DEFAULT_OFFLINE_VOICE_TYPE = "TTS OFFLINE VOICE TYPE";
public static final String TTS_DEFAULT_ONLINE_LANGUAGE = "TTS ONLINE LANGUAGE";
public static final String TTS_DEFAULT_OFFLINE_LANGUAGE = "TTS OFFLINE LANGUAGE";
public static final String[] TTS_DEFAULT_DOWNLOAD_OFFLINE_VOICES = new String[]{};
// VoiceClone
public static final String VOICECLONE_DEFAULT_UIDS = "uid_1;uid_2";
public static final int VOICECLONE_DEFAULT_TASK_ID = -1;
// VoiceConv
public static final String VOICECONV_DEFAULT_URI = "/api/v1/voice_conv/ws";
public static final String VOICECONV_DEFAULT_CLUSTER = "YOUR VOICECONV CLUSTER";
public static final String VOICECONV_DEFAULT_VOICE = "VOICECONV VOICE";
public static final String VOICECONV_DEFAULT_VOICE_TYPE = "VOICECONV VOICE TYPE";
// Fulllink
public static final String FULLLINK_DEFAULT_URI = "FULLLINK URI";
// Dialog
public static final String DIALOG_DEFAULT_URI = "DIALOG URI";
public static final String DIALOG_DEFAULT_APP_ID = "DIALOG APP ID";
public static final String DIALOG_DEFAULT_ID = "DIALOG ID";
public static final String DIALOG_DEFAULT_ROLE = "DIALOG ROLE";
public static final String DIALOG_DEFAULT_CLOTHES_TYPE = "DIALOG CLOTHES TYPE";
public static final String DIALOG_DEFAULT_TTA_VOICE_TYPE = "DIALOG TTA_VOICE_TYPE";
// CAPT
public static final String CAPT_DEFAULT_MDD_URI = "CAPT MDD URI";
public static final String CAPT_DEFAULT_CLUSTER = "YOUR CAPT CLUSTER";
}
// Copyright 2021 Bytedance Inc. All Rights Reserved.
// Author: chengzihao.ds@bytedance.com (chengzihao.ds)
package com.example.aivoice_plugin;
import java.util.List;
public class SettingItem {
public static final int TYPE_TOTAL_NUM = 5;
public static class Type {
public static final int GROUP = 0; // group: means followed-items construct a group
public static final int BOOL = 1;
public static final int NUMBER = 2;
public static final int STRING = 3;
public static final int OPTIONS = 4;
}
public static class Options {
public int arrayId;
public List<String> arrayObj;
public int chooseIdx;
public Options(int id, int idx) {
this.arrayId = id;
this.arrayObj = null;
this.chooseIdx = idx;
}
public Options(List<String> obj, int idx) {
this.arrayId = 0;
this.arrayObj = obj;
this.chooseIdx = idx;
}
}
// id must be resource id: R.string.config_xxx, will be used for TextView.setText()
public int id;
// type must be one of SettingItem.Type
public int type;
// value must correspond to type
public Object value;
// hint must be resource id: can be R.string.nohint, will be used for TextView.setHint()
public int hint;
public SettingItem(int t, int i, Object v, int h) {
type = t;
id = i;
value = v;
hint = h;
}
}
// Copyright 2021 Bytedance Inc. All Rights Reserved.
// Author: chengzihao.ds@bytedance.com (chengzihao.ds)
package com.example.aivoice_plugin;
import android.content.Context;
import android.content.res.Resources;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
public class Settings {
// TODO: can be optimized here, use hashmap + arraylist to accelerate
public List<SettingItem> configs;
public Settings() {
configs = new ArrayList<>();
}
public void register(SettingItem item) {
for (int i = 0; i < configs.size(); ++i) {
if (configs.get(i).id == item.id) {
configs.set(i, item);
return;
}
}
configs.add(item);
}
public void register(List<SettingItem> items) {
for (SettingItem item : items) {
register(item);
}
}
public void set(String key, boolean val, Context context) {
set(key, val, SettingItem.Type.BOOL, context);
}
public void set(int id, boolean val) {
set(id, val, SettingItem.Type.BOOL);
}
public void set(String key, Number val, Context context) {
set(key, val, SettingItem.Type.NUMBER, context);
}
public void set(int id, Number val) {
set(id, val, SettingItem.Type.NUMBER);
}
public void set(String key, String val, Context context) {
set(key, val, SettingItem.Type.STRING, context);
}
public void set(int id, String val) {
set(id, val, SettingItem.Type.STRING);
}
public void set(String key, SettingItem.Options val, Context context) {
set(key, val, SettingItem.Type.OPTIONS, context);
}
public void set(int id, SettingItem.Options val) {
set(id, val, SettingItem.Type.OPTIONS);
}
private void set(String key, Object val, int type, Context context) {
for (int i = 0; i < configs.size(); ++i) {
SettingItem item = configs.get(i);
if (Objects.equals(context.getString(item.id), key)) {
item.type = type;
item.value = val;
configs.set(i, item);
return;
}
}
}
private void set(int id, Object val, int type) {
for (int i = 0; i < configs.size(); ++i) {
SettingItem item = configs.get(i);
if (item.id == id) {
if (item.type == type) {
item.value = val;
configs.set(i, item);
}
return;
}
}
// not found
register(new SettingItem(type, id, val, 0));
}
public boolean getBoolean(int id) {
return getBoolean(id, false);
}
public boolean getBoolean(int id, boolean def) {
SettingItem item = get(id, SettingItem.Type.BOOL);
if (item != null) {
return (Boolean) item.value;
}
return def;
}
public int getInt(int id) {
return getInt(id, 0);
}
public int getInt(int id, int def) {
SettingItem item = get(id, SettingItem.Type.NUMBER);
if (item != null && item.value != null) {
return ((Number) item.value).intValue();
}
return def;
}
public Double getDouble(int id) {
return getDouble(id, 0.);
}
public Double getDouble(int id, Double def) {
SettingItem item = get(id, SettingItem.Type.NUMBER);
if (item != null && item.value != null) {
return ((Number) item.value).doubleValue();
}
return def;
}
public String getString(int id) {
return getString(id, "");
}
public String getString(int id, String def) {
SettingItem item = get(id, SettingItem.Type.STRING);
if (item != null) {
return (String) item.value;
}
return def;
}
public SettingItem.Options getOptions(int id) {
return getOptions(id, new SettingItem.Options(0, 0));
}
public SettingItem.Options getOptions(int id, SettingItem.Options def) {
SettingItem item = get(id, SettingItem.Type.OPTIONS);
if (item != null) {
return (SettingItem.Options) item.value;
}
return def;
}
public String getOptionsValue(int id, Context context) {
SettingItem.Options options = getOptions(id);
try {
String[] array = context.getResources().getStringArray(options.arrayId);
if (array == null) {
return "";
}
if (array.length <= options.chooseIdx) {
return "";
}
return array[options.chooseIdx];
} catch (Resources.NotFoundException e) {
return "";
}
}
public String getOptionsValue(int id) {
SettingItem.Options options = getOptions(id);
List<String> array = options.arrayObj;
if (array == null) {
return "";
}
if (array.size() <= options.chooseIdx) {
return "";
}
return array.get(options.chooseIdx);
}
private SettingItem get(int id, int type) {
for (SettingItem item : configs) {
if (item.id == id) {
if (item.type == type) {
return item;
}
return null;
}
}
return null;
}
}
package com.example.aivoice_plugin;
public class SpeechDemoDefines {
public static final String TAG = "SpeechDemo";
public static final String ASR_VIEW = "ASR";
public static final String ASR_OFFLINE_VIEW = "ASR_OFFLINE";
public static final String CAPT_VIEW = "CAPT";
public static final String FULLLINK_VIEW = "FULLLINK";
public static final String TTS_VIEW = "TTS";
public static final String VOICECLONE_VIEW = "VOICECLONE";
public static final String VOICECONV_VIEW = "VOICECONV";
public static final String DIALOG_VIEW = "DIALOG";
public static final String VAD_VIEW = "VAD";
public static final String AFP_VIEW = "AFP";
public static final String AU_VIEW = "AU";
}
package com.example.aivoice_plugin;
import android.util.Log;
import com.bytedance.speech.speechengine.SpeechEngine;
import java.io.InputStream;
public class SpeechFileRecorder {
private static final int SAMPLE_RATE = 8000;
private static final int BYTES_PER_SAMPLE = 2;
private static final float BUFFER_SIZE_IN_SECONDS = 0.08f;
private Thread mWorker = null;
private SpeechEngine mSpeechEngine = null;
private long mSpeechEngineHandler = -1;
private String mPath;
private String mFilename;
public int GetStreamSampleRate() {
return SAMPLE_RATE;
}
public void SetSpeechEngine(SpeechEngine speechEngine, long speechEngineHandler) {
mSpeechEngine = speechEngine;
mSpeechEngineHandler = speechEngineHandler;
}
public boolean Start(String path, String filename) {
mPath = path;
mFilename = filename;
if (null != mWorker) {
if (mWorker.isAlive()) {
Log.w(SpeechDemoDefines.TAG, "Already start!");
return true;
}
mWorker = null;
}
mWorker = new RecorderThread();
mWorker.start();
Log.i(SpeechDemoDefines.TAG, "Stream Recorder Started.");
return true;
}
public void Stop() {
if (null == mWorker) {
Log.w(SpeechDemoDefines.TAG, "Not start yet!");
return;
}
mWorker.interrupt();
try {
mWorker.join();
} catch (InterruptedException e) {
e.printStackTrace();
Thread.currentThread().interrupt();
}
mWorker = null;
Log.i(SpeechDemoDefines.TAG, "Stream Recorder Stopped.");
}
private final class RecorderThread extends Thread {
@Override
public void run() {
InputStream in = SpeechFileUtils.OpenInputFile(mPath, mFilename);
int bufferSize = Math.round(SAMPLE_RATE * BUFFER_SIZE_IN_SECONDS * BYTES_PER_SAMPLE);
byte[] buffer = new byte[bufferSize];
int nread;
int total = 0;
while (!interrupted()) {
nread = SpeechFileUtils.ReadData(in, buffer, bufferSize);
if (nread > 0) {
total += nread;
int ret = mSpeechEngine.feedAudio(mSpeechEngineHandler, buffer, nread);
if (ret != 0) {
Log.e(SpeechDemoDefines.TAG, "Feed audio failed.");
}
} else if (nread < 0) {
Log.e(SpeechDemoDefines.TAG, "Recorder error.");
break;
}
}
Log.i(SpeechDemoDefines.TAG, "total: " + total);
SpeechFileUtils.CloseInputFile(in);
}
}
}
package com.example.aivoice_plugin;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class SpeechFileUtils {
public static OutputStream OpenOutputFile(String path, String filename) {
try {
File outFile = new File(path, filename);
return new FileOutputStream(outFile);
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
public static boolean WriteData(OutputStream out, byte[] data, int len) {
if (out == null || len == 0) {
return false;
}
try {
out.write(data, 0, len);
return true;
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
public static void CloseOutputFile(OutputStream out) {
if (out != null) {
try {
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
static InputStream OpenInputFile(String path, String filename) {
try {
File inFile = new File(path, filename);
return new FileInputStream(inFile);
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
static int ReadData(InputStream in, byte[] data, int len) {
if (in == null) {
return 0;
}
int ret;
try {
ret = in.read(data, 0, len);
} catch (IOException e) {
e.printStackTrace();
ret = -1;
}
return ret;
}
static void CloseInputFile(InputStream in) {
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
package com.example.aivoice_plugin;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.util.Log;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
public class SpeechStreamPlayer {
private static int mSampleRate = 24000;
private BlockingQueue mAudioBuffer = new LinkedBlockingQueue<byte[]>();
private AudioTrack mPlayer = null;
private Thread mWorker = null;
// Player Status
private boolean mIsPlaying = false;
private boolean mIsPaused = false;
private Lock mLock = new ReentrantLock();
private Condition mWaitStop = mLock.newCondition();
private Condition mWaitResume = mLock.newCondition();
private AtomicBoolean mAudioEnd = new AtomicBoolean(true);
private boolean InitStreamPlayer() {
final int minBufferSize = AudioTrack.getMinBufferSize(mSampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT);
mPlayer = new AudioTrack(AudioManager.STREAM_MUSIC,
mSampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT,
minBufferSize,
AudioTrack.MODE_STREAM);
if (mPlayer.getState() != AudioTrack.STATE_INITIALIZED) {
Log.e(SpeechDemoDefines.TAG, "Failed to initialize stream player.");
mPlayer.release();
mPlayer = null;
return false;
}
return true;
}
public void SetPlayerSampleRate(int sampleRate) {
mSampleRate = sampleRate;
}
public void WaitPlayerStop() {
mLock.lock();
try {
Log.d(SpeechDemoDefines.TAG, "Demo player is_playing: " + mIsPlaying);
while (mIsPlaying) {
mWaitStop.await();
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
} finally {
mLock.unlock();
}
}
public boolean Start() {
if (!InitStreamPlayer()) {
return false;
}
mIsPaused = false;
if (null != mWorker) {
if (mWorker.isAlive()) {
Log.w(SpeechDemoDefines.TAG, "Already start!");
return true;
}
mWorker = null;
}
mWorker = new PlayerThread();
mAudioEnd.set(false);
mLock.lock();
try {
mIsPlaying = true;
} finally {
mLock.unlock();
}
mWorker.start();
Log.i(SpeechDemoDefines.TAG, "Stream Player Started.");
return true;
}
public void Stop() {
if (null == mWorker) {
Log.w(SpeechDemoDefines.TAG, "Not start yet!");
return;
}
mLock.lock();
try {
mIsPaused = false;
mWaitResume.signal();
mIsPlaying = false;
mWaitStop.signal();
} finally {
mLock.unlock();
}
mWorker.interrupt();
try {
mWorker.join();
} catch (InterruptedException e) {
e.printStackTrace();
Thread.currentThread().interrupt();
}
mAudioBuffer.clear();
mWorker = null;
Log.i(SpeechDemoDefines.TAG, "Stream Player Stopped.");
}
public void Feed(byte[] audio, boolean isFinal) {
if (mPlayer == null || mAudioBuffer == null) {
return;
}
try {
final int singleBufferMaxSize = mSampleRate / 1000 * 2 * 40; // 40ms
int start = 0;
while (audio.length > start) {
int end = Math.min(start + singleBufferMaxSize, audio.length);
mAudioBuffer.put(Arrays.copyOfRange(audio, start, end));
start += (end - start);
}
} catch (InterruptedException e) {
Log.e(SpeechDemoDefines.TAG, "Put audio to block queue failed.");
e.printStackTrace();
}
mAudioEnd.set(isFinal);
}
public void Pause() {
if (mPlayer == null) {
return;
}
Log.i(SpeechDemoDefines.TAG, "Pause Stream Player.");
mLock.lock();
try {
if (!mIsPaused) {
mPlayer.pause();
mIsPaused = true;
}
} finally {
mLock.unlock();
}
}
public void Resume() {
if (mPlayer == null) {
return;
}
mLock.lock();
try {
if (mIsPaused) {
mIsPaused = false;
mPlayer.play();
mWaitResume.signal();
}
} finally {
mLock.unlock();
}
}
private final class PlayerThread extends Thread {
@Override
public void run() {
if (mPlayer == null) {
return;
}
mPlayer.play();
while (!interrupted()) {
try {
if (mAudioEnd.get()) {
LinkedList<byte[]> audioBlocks = new LinkedList<>();
mAudioBuffer.drainTo(audioBlocks);
for (int i = 0; i < audioBlocks.size(); ++i) {
byte[] audio = audioBlocks.get(i);
writeAudio(audio, i < audioBlocks.size() - 1);
if (interrupted()) {
break;
}
}
break;
} else {
writeAudio((byte[]) (mAudioBuffer.take()), false);
}
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}
mPlayer.stop();
mLock.lock();
try {
mIsPlaying = false;
mWaitStop.signalAll();
} finally {
mLock.unlock();
}
}
}
private void writeAudio(byte[] audio, boolean isFinal) {
if (audio.length <= 0) {
Log.w(SpeechDemoDefines.TAG, "Audio block length is invalid.");
}
int playedBytes = mPlayer.write(audio, 0, audio.length);
Log.d(SpeechDemoDefines.TAG, "Audio block size: " + audio.length + ", played size: " + playedBytes);
if (playedBytes < audio.length) {
mLock.lock();
try {
while (mIsPaused) {
mWaitResume.await();
}
mPlayer.write(audio, playedBytes, audio.length - playedBytes);
} catch (InterruptedException e) {
e.printStackTrace();
return;
} finally {
mLock.unlock();
}
}
}
}
package com.example.aivoice_plugin;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.util.Log;
import com.bytedance.speech.speechengine.SpeechEngine;
import java.io.ByteArrayOutputStream;
public class SpeechStreamRecorder {
private static final int SAMPLE_RATE = 44100;
private static final int CHANNEL_NUM = 2;
private static final int BYTES_PER_SAMPLE = 2;
private static final float BUFFER_SIZE_IN_SECONDS = 0.08f;
private static final int DEFAULT_PACKAGE_DURATION = 100;
private AudioRecord mRecorder;
private Thread mWorker = null;
private int mBufferSize = 0;
private int mPackageDuration = DEFAULT_PACKAGE_DURATION;
private String mViewId = "";
private SpeechEngine mSpeechEngine = null;
public int GetStreamSampleRate() {
return SAMPLE_RATE;
}
public int GetStreamChannel() {
return CHANNEL_NUM;
}
public void SetSpeechEngine(String viewId, SpeechEngine speechEngine) {
mViewId = viewId;
mSpeechEngine = speechEngine;
}
public boolean Start() {
if (!InitStreamRecorder()) {
return false;
}
if (null != mWorker) {
if (mWorker.isAlive()) {
Log.w(SpeechDemoDefines.TAG, "Already start!");
return true;
}
mWorker = null;
}
mPackageDuration = SettingsActivity.getSettings(mViewId).getInt(R.string.config_stream_package_duration, DEFAULT_PACKAGE_DURATION);
mWorker = new RecorderThread();
mWorker.start();
Log.i(SpeechDemoDefines.TAG, "Stream Recorder Started.");
return true;
}
public void Stop() {
if (null == mWorker) {
Log.w(SpeechDemoDefines.TAG, "Not start yet!");
return;
}
mWorker.interrupt();
try {
mWorker.join();
} catch (InterruptedException e) {
e.printStackTrace();
Thread.currentThread().interrupt();
}
mWorker = null;
Log.i(SpeechDemoDefines.TAG, "Stream Recorder Stopped.");
}
private final class RecorderThread extends Thread {
@Override
public void run() {
if (mRecorder == null) {
return;
}
mRecorder.startRecording();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
int nread = 0;
long totalPackageSize = (long)SAMPLE_RATE * CHANNEL_NUM * BYTES_PER_SAMPLE * mPackageDuration / 1000;
while (!isInterrupted() && nread >= 0) {
byte[] buffer = new byte[mBufferSize];
bos.reset();
long curPackageSize = 0;
while (!isInterrupted() && nread >= 0 && curPackageSize < totalPackageSize) {
nread = mRecorder.read(buffer, 0, mBufferSize);
if (nread > 0) {
Log.i(SpeechDemoDefines.TAG, "Current package size: " + curPackageSize + ", total package size: " + totalPackageSize);
curPackageSize += nread;
bos.write(buffer, 0, nread);
} else if (nread < 0) {
Log.e(SpeechDemoDefines.TAG, "Recorder error.");
}
}
if (!isInterrupted()) {
buffer = bos.toByteArray();
int ret = mSpeechEngine.feedAudio(buffer, buffer.length);
if (ret != 0) {
Log.e(SpeechDemoDefines.TAG, "Feed audio failed.");
break;
}
}
}
mRecorder.stop();
}
}
private boolean InitStreamRecorder() {
if (mRecorder != null) {
return true;
}
mBufferSize = Math.round(SAMPLE_RATE * BUFFER_SIZE_IN_SECONDS * BYTES_PER_SAMPLE * CHANNEL_NUM);
int minBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
CHANNEL_NUM == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
minBufferSize = Math.max(minBufferSize, mBufferSize);
mRecorder = new AudioRecord(
MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
CHANNEL_NUM == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT, minBufferSize * 10);
if (mRecorder.getState() == AudioRecord.STATE_UNINITIALIZED) {
Log.e(SpeechDemoDefines.TAG, "Failed to initialize stream recorder.");
mRecorder.release();
mRecorder = null;
return false;
}
return true;
}
}
......@@ -46,7 +46,7 @@ android {
// You can update the following values to match your application needs.
// For more information, see: https://docs.flutter.dev/deployment/android#reviewing-the-gradle-build-configuration.
minSdkVersion flutter.minSdkVersion
targetSdkVersion flutter.targetSdkVersion
targetSdkVersion 29
versionCode flutterVersionCode.toInteger()
versionName flutterVersionName
multiDexEnabled true
......
......@@ -2,4 +2,4 @@ package com.example.aivoice_plugin_example
import io.flutter.embedding.android.FlutterActivity
class MainActivity: FlutterActivity()
class MainActivity: FlutterActivity()
\ No newline at end of file
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论