提交 e0706898 作者: edy

feat: 安卓增加相关方法

上级 7bedddd4
......@@ -74,5 +74,5 @@ android {
}
dependencies {
implementation 'androidx.lifecycle:lifecycle-process:2.8.5'
// implementation 'androidx.lifecycle:lifecycle-process:2.8.5'
}
package com.example.aivoice_plugin
import android.app.Application
import android.content.Context
import androidx.annotation.NonNull
import com.bytedance.speech.speechengine.SpeechEngineGenerator
import io.flutter.embedding.engine.plugins.FlutterPlugin
import io.flutter.plugin.common.EventChannel
import io.flutter.plugin.common.EventChannel.EventSink
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.common.MethodChannel.MethodCallHandler
......@@ -10,7 +14,7 @@ import io.flutter.plugin.common.MethodChannel.Result
/** AivoicePlugin */
class AivoicePlugin: FlutterPlugin, MethodCallHandler {
class AivoicePlugin: FlutterPlugin, MethodCallHandler, EventChannel.StreamHandler {
/// The MethodChannel that will the communication between Flutter and native Android
......@@ -20,28 +24,51 @@ class AivoicePlugin: FlutterPlugin, MethodCallHandler {
private lateinit var channel : MethodChannel
private var context: Context? = null
private var asrConfig : Map<String, String>? = null
private val eventChannel: EventChannel? = null
private var events: EventSink? = null
private val asrCenter: AsrCenter by lazy {
AsrCenter(context, AsrCenter.AsrCenterCallback {
sendMessageToFlutter(it)
})
}
override fun onAttachedToEngine(@NonNull flutterPluginBinding: FlutterPlugin.FlutterPluginBinding) {
context = flutterPluginBinding.getApplicationContext();
context = flutterPluginBinding.applicationContext;
channel = MethodChannel(flutterPluginBinding.binaryMessenger, "aivoice_plugin")
channel.setMethodCallHandler(this)
val eventChannel = EventChannel(flutterPluginBinding.binaryMessenger, "aivoice_plugin/events")
eventChannel.setStreamHandler(this)
}
override fun onMethodCall(@NonNull call: MethodCall, @NonNull result: Result) {
when (call.method) {
"initEngine" -> {
println("来自安卓222")
asrConfig = call.arguments as Map<String, String>?
// 现有的实现
asrCenter.initEngineBtnClicked()
}
"stopEngine" -> {
// 现有的实现
asrCenter.stopEngineBtnClicked()
}
"uninitEngine" -> {
// 现有的实现
asrCenter.uninitEngineBtnClicked()
}
"startOrStopEngine" -> {
// 现有的实现
asrCenter.startEngineBtnClicked()
}
"prepareEnvironment" -> {
// 现有的实现
val re = SpeechEngineGenerator.PrepareEnvironment(context, context as Application?)
println("prepare:===>$re")
}
"ttsStartEngineBtnClick" -> {
println("来自安卓")
......@@ -87,7 +114,23 @@ class AivoicePlugin: FlutterPlugin, MethodCallHandler {
}
}
override fun onDetachedFromEngine(@NonNull binding: FlutterPlugin.FlutterPluginBinding) {
override fun onDetachedFromEngine(binding: FlutterPlugin.FlutterPluginBinding) {
channel.setMethodCallHandler(null)
}
override fun onListen(arguments: Any?, events: EventSink) {
this.events = events
// 你可以在这里发送初始事件
// sendMessageToFlutter("Initial message from Android")
}
override fun onCancel(arguments: Any?) {
this.events = null
}
fun sendMessageToFlutter(message: Map<*, *>?) {
if (events != null) {
events?.success(message)
}
}
}
......@@ -6,19 +6,17 @@ package com.example.aivoice_plugin;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.os.Bundle;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.Environment;
import android.os.Handler;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.view.MotionEvent;
import android.widget.Button;
import android.widget.TextView;
import android.content.ContextWrapper;
//import androidx.lifecycle.LifecycleObserver;
import androidx.core.content.ContextCompat;
import androidx.lifecycle.ProcessLifecycleOwner;
//import androidx.lifecycle.ProcessLifecycleOwner;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import java.io.File;
......@@ -30,14 +28,21 @@ import com.bytedance.speech.speechengine.SpeechEngineGenerator;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class AsrCenter implements SpeechEngine.SpeechListener {
public class AsrActivity implements SpeechEngine.SpeechListener {
// 定义回调接口
public interface AsrCenterCallback {
void onRevive(Map<String, Object> message);
}
private Context _context;
private Context _context = null;
// Record
private Handler recordHandler = null;
......@@ -54,21 +59,24 @@ public class AsrActivity implements SpeechEngine.SpeechListener {
private SpeechEngine mSpeechEngine = null;
private boolean mEngineStarted = false;
public AsrCenterCallback _callback;
// Permissions
private static final List<String> ASR_PERMISSIONS = Collections.singletonList(
Manifest.permission.RECORD_AUDIO
);
// StreamRecorder
private SpeechStreamRecorder mStreamRecorder;
private final SpeechStreamRecorder mStreamRecorder;
// Statistics
private long mFinishTalkingTimestamp = -1;
@SuppressLint({"ClickableViewAccessibility", "InflateParams", "HardwareIds", "UseCompatLoadingForDrawables"})
public AsrActivity(Context context) {
public AsrCenter(Context context,AsrCenterCallback callback ) {
_context = context;
_callback = callback;
Log.i(SpeechDemoDefines.TAG, "Asr onCreate");
......@@ -300,21 +308,21 @@ public class AsrActivity implements SpeechEngine.SpeechListener {
}
}
private void initEngineBtnClicked() {
public void initEngineBtnClicked() {
if (mEngineStarted) {
return;
}
initEngine();
}
private void uninitEngineBtnClicked() {
public void uninitEngineBtnClicked() {
if (mEngineStarted) {
return;
}
uninitEngine();
}
private void startEngineBtnClicked() {
public void startEngineBtnClicked() {
Log.i(SpeechDemoDefines.TAG, "配置启动参数.");
configStartAsrParams();
//【可选配置】是否启用云端自动判停,仅一句话识别场景生效
......@@ -342,7 +350,7 @@ public class AsrActivity implements SpeechEngine.SpeechListener {
clearResultText();
}
private void stopEngineBtnClicked() {
public void stopEngineBtnClicked() {
Log.i(SpeechDemoDefines.TAG, "关闭引擎(异步)");
Log.i(SpeechDemoDefines.TAG, "Directive: DIRECTIVE_STOP_ENGINE");
mSpeechEngine.sendDirective(SpeechEngineDefines.DIRECTIVE_STOP_ENGINE, "");
......@@ -495,6 +503,10 @@ public class AsrActivity implements SpeechEngine.SpeechListener {
text += "\nreqid: " + reader.getString("reqid");
text += "\nresponse_delay: " + response_delay;
}
Map<String, Object> map = new HashMap<>();
map.put("text",text);
map.put("isFinal", isFinal);
_callback.onRevive(map);
setResultText(text);
} catch (JSONException e) {
e.printStackTrace();
......@@ -521,4 +533,28 @@ public class AsrActivity implements SpeechEngine.SpeechListener {
public void clearResultText() {
}
// /**
// * check and request multiple permissions
// * @param permissions: permission list
// * @return if all permissions already granted.
// */
// public boolean requestPermission(List<String> permissions) {
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
// ArrayList<String> unAuthedPermission = new ArrayList<>();
// for (String permission : permissions) {
// if (ContextCompat.checkSelfPermission(_context, permission)
// != PackageManager.PERMISSION_GRANTED) {
// unAuthedPermission.add(permission);
// }
// }
// if (unAuthedPermission.isEmpty()) {
// return true;
// }
// ActivityCompat.requestPermissions(_context.getac, unAuthedPermission.toArray(new String[0]), 999);
// return false;
// } else {
// return true;
// }
// }
}
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<manifest xmlns:tools="http://schemas.android.com/tools"
xmlns:android="http://schemas.android.com/apk/res/android">
<application
android:label="aivoice_plugin_example"
android:name="${applicationName}"
......@@ -43,8 +44,11 @@
</queries>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.RECORD_AUDIO"
tools:ignore="ManifestOrder" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"
tools:ignore="ScopedStorage" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE"/>
......
package com.example.aivoice_plugin_example
import io.flutter.embedding.android.FlutterActivity
class MainActivity: FlutterActivity()
\ No newline at end of file
class MainActivity: FlutterActivity()
......@@ -53,6 +53,88 @@ class _MyAppState extends State<MyApp> {
@override
Widget build(BuildContext context) {
var ttsView = Row(
children: [
TextButton(
onPressed: () {
_aivoicePlugin.ttsInitEngine(configMap);
},
child: const Text('init')),
TextButton(
onPressed: () {
_aivoicePlugin.ttsStartEngineBtnClick(
{"text": "引擎启动成功,收到该回调后,在单次合成场景下收到该回调时语音合成已经开始,同时数据字段为该次请求的请求 ID; 连续合成场景下还需要再发送合成指令,才真正的开始合成。"});
},
child: const Text('start')),
TextButton(
onPressed: () {
_aivoicePlugin.ttsStopEngineBtnClicked();
},
child: const Text('stop')),
TextButton(
onPressed: () {
_aivoicePlugin.ttsSynthesis({});
},
child: const Text('合成')),
TextButton(
onPressed: () {
_aivoicePlugin.ttsStartEngineBtnClick(
{"text": "引擎启动成功,收到该回调后,在单次合成场景下收到该回调时语音合成已经开始,同时数据字段为该次请求的请求 ID; 连续合成场景下还需要再发送合成指令,才真正的开始合成。"});
_aivoicePlugin.ttsSynthesis({});
},
child: const Text('一件开始'))
// TextButton(
// onPressed: () {
// _aivoicePlugin.initEngine(configMap);
// },
// child: const Text('init')),
// TextButton(
// onPressed: () {
// _aivoicePlugin.startOrStopEngine(true);
// },
// child: const Text('start')),
// TextButton(
// onPressed: () {
// _aivoicePlugin.stopEngine();
// },
// child: const Text('stop')),
],
);
var asrView = Row(
children: [
TextButton(
onPressed: () {
_aivoicePlugin.initEngine(configMap);
},
child: const Text('init2')),
TextButton(
onPressed: () {
_aivoicePlugin.startOrStopEngine(true);
},
child: const Text('start')),
TextButton(
onPressed: () {
_aivoicePlugin.stopEngine();
},
child: const Text('stop')),
// TextButton(
// onPressed: () {
// _aivoicePlugin.initEngine(configMap);
// },
// child: const Text('init')),
// TextButton(
// onPressed: () {
// _aivoicePlugin.startOrStopEngine(true);
// },
// child: const Text('start')),
// TextButton(
// onPressed: () {
// _aivoicePlugin.stopEngine();
// },
// child: const Text('stop')),
],
);
return MaterialApp(
home: Scaffold(
appBar: AppBar(
......@@ -65,53 +147,7 @@ class _MyAppState extends State<MyApp> {
],
),
),
floatingActionButton: Row(
children: [
TextButton(
onPressed: () {
_aivoicePlugin.ttsInitEngine(configMap);
},
child: const Text('init')),
TextButton(
onPressed: () {
_aivoicePlugin.ttsStartEngineBtnClick(
{"text": "引擎启动成功,收到该回调后,在单次合成场景下收到该回调时语音合成已经开始,同时数据字段为该次请求的请求 ID; 连续合成场景下还需要再发送合成指令,才真正的开始合成。"});
},
child: const Text('start')),
TextButton(
onPressed: () {
_aivoicePlugin.ttsStopEngineBtnClicked();
},
child: const Text('stop')),
TextButton(
onPressed: () {
_aivoicePlugin.ttsSynthesis({});
},
child: const Text('合成')),
TextButton(
onPressed: () {
_aivoicePlugin.ttsStartEngineBtnClick(
{"text": "引擎启动成功,收到该回调后,在单次合成场景下收到该回调时语音合成已经开始,同时数据字段为该次请求的请求 ID; 连续合成场景下还需要再发送合成指令,才真正的开始合成。"});
_aivoicePlugin.ttsSynthesis({});
},
child: const Text('一件开始'))
// TextButton(
// onPressed: () {
// _aivoicePlugin.initEngine(configMap);
// },
// child: const Text('init')),
// TextButton(
// onPressed: () {
// _aivoicePlugin.startOrStopEngine(true);
// },
// child: const Text('start')),
// TextButton(
// onPressed: () {
// _aivoicePlugin.stopEngine();
// },
// child: const Text('stop')),
],
),
floatingActionButton: asrView,
),
);
}
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论