修改了唤醒识别初始化部分,可以唤醒识别,但还存在问题;增加了合成

This commit is contained in:
2022-01-24 22:37:20 +08:00
parent e33c5a9737
commit 9bf7634f58
4 changed files with 336 additions and 69 deletions

View File

@ -2,6 +2,7 @@ package cn.com.motse.iflytek_speech_demo;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
@ -24,12 +25,15 @@ import java.util.LinkedHashMap;
import java.util.Map;
import com.iflytek.cloud.GrammarListener;
import com.iflytek.cloud.InitListener;
import com.iflytek.cloud.SpeechConstant;
import com.iflytek.cloud.ErrorCode;
import com.iflytek.cloud.SpeechError;
import com.iflytek.cloud.SpeechEvent;
import com.iflytek.cloud.SpeechRecognizer;
import com.iflytek.cloud.SpeechSynthesizer;
import com.iflytek.cloud.SpeechUtility;
import com.iflytek.cloud.SynthesizerListener;
import com.iflytek.cloud.VoiceWakeuper;
import com.iflytek.cloud.WakeuperListener;
import com.iflytek.cloud.WakeuperResult;
@ -71,12 +75,19 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
private VoiceWakeuper mIvw;
// 语音识别对象
private SpeechRecognizer mAsr;
// 语音合成对象
private SpeechSynthesizer mTts;
// -----唤醒识别参数-----
// 唤醒门限值 门限值越低越容易被唤醒 [0-3000]
private final int curThresh = 1450;
private final String mEngineType = SpeechConstant.TYPE_LOCAL;
// 唤醒结果内容
private String resultString;
// 识别结果内容
private String recoString;
// -----语法参数-----
// 本地语法id
private String mLocalGrammarID;
// 本地语法文件
@ -84,12 +95,17 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
// 本地语法构建路径
private String grmPath;
// 设置门限值 门限值越低越容易被唤醒 [0-3000]
private final int curThresh = 1450;
// ------合成参数-----
// 默认本地发音人
public static String voicerLocal = "xiaoyan";
//缓冲进度
private int mPercentForBuffering = 0;
//播放进度
private int mPercentForPlaying = 0;
// -----语音识别对象参数-----
private final String mEngineType = SpeechConstant.TYPE_LOCAL;
private final String resultType = "json";
private Toast mToast;
/*
@ -160,14 +176,26 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
case "init":
initSpeechUtility(call, result);
break;
case "start":
start(call, result);
case "wakeupStart":
wakeupStart(call, result);
break;
case "stop":
stop(call, result);
case "wakeupStop":
wakeupStop(call, result);
break;
case "cancel":
cancel(call, result);
case "wakeupCancel":
wakeupCancel(call, result);
break;
case "ttsStart":
ttsStart(call, result);
break;
case "ttsCancel":
ttsCancel(call, result);
break;
case "ttsPause":
ttsPause(call, result);
break;
case "ttsResume":
ttsResume(call, result);
break;
default:
result.notImplemented();
@ -179,10 +207,18 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
channel.setMethodCallHandler(null);
// 退出释放连接
if (null != mIvw) {
mIvw.cancel();
mIvw.destroy();
}
if (null != mAsr) {
mAsr.cancel();
mAsr.destroy();
}
if (null != mTts) {
mTts.stopSpeaking();
mTts.destroy();
}
}
/*
@ -202,14 +238,15 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
// 注意此接口在非主进程调用会返回null对象如需在非主进程使用语音功能请增加参数SpeechConstant.FORCE_LOGIN+"=true"
SpeechUtility.createUtility(mActivity.get(), SpeechConstant.APPID + '=' + appId);
final ResultStateful resultStateful = ResultStateful.of(result);
Log.d(TAG, "ResultStateful.of(result); ++++++++++++++ appId = " + appId);
// 获取语法路径
grmPath = mActivity.get().getExternalFilesDir("msc").getAbsolutePath() + "/test";
// 2. 初始化唤醒对象
// InitListener参数用lambda语句简化
mIvw = VoiceWakeuper.createWakeuper(mActivity.get(), code -> {
Log.d(TAG, "VoiceWakeuper init() code = " + code);
if (code == ErrorCode.SUCCESS) {
Log.d(TAG, "VoiceWakeuper init() code = " + code);
resultStateful.success(0);
} else {
Log.d("TAG", "唤醒对象初始化失败,错误码:" + code);
@ -219,9 +256,8 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
// 3. 初始化识别对象
mAsr = SpeechRecognizer.createRecognizer(mActivity.get(), code -> {
Log.d(TAG, "SpeechRecognizer init() code = " + code);
if (code == ErrorCode.SUCCESS) {
Log.d(TAG, "SpeechRecognizer init() code = " + code);
resultStateful.success(0);
} else {
Log.d("TAG", "识别对象初始化失败,错误码:" + code);
@ -232,8 +268,32 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
// 4. 初始化语法文件
mLocalGrammar = readFile(mActivity.get(), "query.bnf", "utf-8");
// 5. 参数配置
setParam();
// 5. 初始化合成对象
mTts = SpeechSynthesizer.createSynthesizer(mActivity.get(), code -> {
Log.d(TAG, "SpeechSynthesizer init() code = " + code);
if (code == ErrorCode.SUCCESS) {
// 初始化成功之后可以调用startSpeaking方法
// 注有的开发者在onCreate方法中创建完合成对象之后马上就调用startSpeaking进行合成
// 正确的做法是将onCreate中的startSpeaking调用移至这里
resultStateful.success(0);
} else {
Log.d("TAG", "合成对象初始化失败,错误码:" + code);
resultStateful.error("合成对象初始化失败:", "" + code, null);
}
});
// 6. 参数配置
setTtsParam();
setAsrParam();
// 开始构建语法
int ret = 0;
ret = mAsr.buildGrammar("bnf", mLocalGrammar, grammarListener);
if (ret != ErrorCode.SUCCESS) {
Log.d(TAG, "mAsr.buildGrammar语法构建失败,错误码:" + ret);
Toast.makeText(mActivity.get(), "语法构建失败,错误码:" + ret, Toast.LENGTH_SHORT).show();
}
// 非空判断,防止因空指针使程序崩溃
mIvw = VoiceWakeuper.getWakeuper();
@ -253,7 +313,7 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
* 示例demo默认设置第一个唤醒词建议开发者根据定制资源中唤醒词个数进行设置
*/
mIvw.setParameter(SpeechConstant.IVW_THRESHOLD, "0:"+ curThresh);
// 设置唤醒+识别模式
// 唤醒服务类型:设置唤醒+识别模式
mIvw.setParameter(SpeechConstant.IVW_SST, "oneshot");
// 设置返回结果格式
mIvw.setParameter(SpeechConstant.RESULT_TYPE, "json");
@ -268,24 +328,17 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
mActivity.get().getExternalFilesDir("msc").getAbsolutePath() + "/ivw.wav");
mIvw.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");
// 开始构建语法
int ret = 0;
ret = mAsr.buildGrammar("bnf", mLocalGrammar, grammarListener);
if (ret != ErrorCode.SUCCESS) {
Toast.makeText(mActivity.get(), "语法构建失败,错误码:" + ret, Toast.LENGTH_SHORT).show();
}
//
// 语法构建成功,则唤醒对象调用语法
if (!TextUtils.isEmpty(mLocalGrammarID)) {
// 设置本地识别资源
mIvw.setParameter(ResourceUtil.ASR_RES_PATH, getResourcePath());
// 设置语法构建路径
mIvw.setParameter(ResourceUtil.GRM_BUILD_PATH, grmPath);
// 设置本地识别使用语法id
mIvw.setParameter(SpeechConstant.LOCAL_GRAMMAR,
mLocalGrammarID);
Log.d(TAG, "mLocalGrammarID 287:" + mLocalGrammarID);
mIvw.startListening(mWakeuperListener);
mIvw.setParameter(SpeechConstant.LOCAL_GRAMMAR, mLocalGrammarID);
// 启动唤醒监听
// mIvw.startListening(mWakeuperListener);
} else {
Toast.makeText(mActivity.get(), "请先构建语法", Toast.LENGTH_SHORT).show();
}
@ -298,9 +351,8 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
});
}
private void start(@NonNull MethodCall call, @NonNull Result result) {
// 是否有视图的参数
// final Boolean haveView = call.argument("haveView");
private void wakeupStart(@NonNull MethodCall call, @NonNull Result result) {
if (mActivity == null) {
Log.e(TAG, "Ignored start, current activity is null.");
result.error("Ignored start, current activity is null.", null, null);
@ -308,8 +360,6 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
String[] permissions = {
Manifest.permission.RECORD_AUDIO,
// Manifest.permission.ACCESS_NETWORK_STATE,
// Manifest.permission.INTERNET,
Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_EXTERNAL_STORAGE
};
@ -326,33 +376,100 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
ActivityCompat.requestPermissions(mActivity.get(), toApplyList.toArray(tmpList), REQUEST_CODE);
result.error("request permissons.", null, null);
} else {
Log.d(TAG, "int ret = mAsr.startListening(mRecognizerListener) 328");
// 如果flutter层传入参数haveView
int ret = mAsr.startListening(mRecognizerListener);
Log.d(TAG, "start WakeuperListener............");
// 启动唤醒监听
int ret = mIvw.startListening(mWakeuperListener);
if (ret != ErrorCode.SUCCESS) {
result.error("start listen error", ""+ret, null);
result.error("start wakeup listen error", ""+ret, null);
} else {
result.success(0);
}
}
}
private void stop(@NonNull MethodCall call, @NonNull Result result) {
if(mAsr != null) {
mAsr.stopListening();
private void wakeupStop(@NonNull MethodCall call, @NonNull Result result) {
if(mIvw != null) {
mIvw.stopListening();
}
}
private void cancel(@NonNull MethodCall call, @NonNull Result result) {
if(mAsr != null) {
mAsr.cancel();
private void wakeupCancel(@NonNull MethodCall call, @NonNull Result result) {
if(mIvw != null) {
mIvw.cancel();
}
}
private void ttsStart(@NonNull MethodCall call, @NonNull Result result) {
String text = call.argument("ttsText");
if (mActivity == null) {
Log.e(TAG, "Ignored start, current activity is null.");
result.error("Ignored start, current activity is null.", null, null);
}
Log.d(TAG, "start TtsListener............");
Log.d(TAG, "我想要合成的文本:" + text);
// 启动合成监听
int ret = mTts.startSpeaking(text, mTtsListener);
if (ret != ErrorCode.SUCCESS) {
result.error("start tts listen error", ""+ret, null);
} else {
result.success(0);
}
// String[] permissions = {
// Manifest.permission.RECORD_AUDIO,
// Manifest.permission.WRITE_EXTERNAL_STORAGE,
// Manifest.permission.READ_EXTERNAL_STORAGE
// };
//
// ArrayList<String> toApplyList = new ArrayList<String>();
// for (String perm :permissions){
// if (PackageManager.PERMISSION_GRANTED != ContextCompat.checkSelfPermission(mActivity.get(), perm)) {
// toApplyList.add(perm);
// }
// }
//
// String[] tmpList = new String[toApplyList.size()];
// if (!toApplyList.isEmpty()){
// ActivityCompat.requestPermissions(mActivity.get(), toApplyList.toArray(tmpList), REQUEST_CODE);
// result.error("request permissons.", null, null);
// } else {
// Log.d(TAG, "start TtsListener............");
//
// // 启动合成监听
// int ret = mTts.startSpeaking(text, mTtsListener);
// if (ret != ErrorCode.SUCCESS) {
// result.error("start tts listen error", ""+ret, null);
// } else {
// result.success(0);
// }
// }
}
private void ttsCancel(@NonNull MethodCall call, @NonNull Result result) {
if(mTts != null) {
mTts.stopSpeaking();
}
}
private void ttsPause(@NonNull MethodCall call, @NonNull Result result) {
if(mTts != null) {
mTts.pauseSpeaking();
}
}
private void ttsResume(@NonNull MethodCall call, @NonNull Result result) {
if(mTts != null) {
mTts.resumeSpeaking();
}
}
/*
* 语音识别回调 ★★★★★
* */
private final RecognizerListener mRecognizerListener = new RecognizerListener() {
/*private final RecognizerListener mRecognizerListener = new RecognizerListener() {
@Override
public void onVolumeChanged(int volume, byte[] data) {
Log.d(TAG, "返回音频数据:"+ data.length);
@ -390,6 +507,7 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
@Override
public void onError(SpeechError speechError) {
Log.d(TAG, "++++++++++++SpeechError: " + speechError.getPlainDescription(true));
// 错误码10118(您没有说话),可能是录音机权限被禁,需要提示用户打开应用的录音权限。
Map ret = new HashMap();
ret.put("error 395:", speechError.getPlainDescription(true));
@ -405,7 +523,7 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
// Log.d(TAG, "session id =" + sid);
// }
}
};
};*/
/*
* 语法构建回调
@ -424,7 +542,7 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
};
/*
* 唤醒回调
* 唤醒回调 ★★★★★
* */
private WakeuperListener mWakeuperListener = new WakeuperListener() {
@ -452,18 +570,22 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
e.printStackTrace();
}
// 结果返回给flutter层
//todo
// textView.setText(resultString);
channel.invokeMethod("onReceiveSpeechText", resultString);
}
@Override
public void onError(SpeechError error) {
// Log.d(TAG, "唤醒回调错误: " + error.getPlainDescription(true));
Toast.makeText(mActivity.get(), "唤醒回调错误: " + error.getPlainDescription(true), Toast.LENGTH_SHORT).show();
Log.d(TAG, "唤醒监听-----------------回调错误: " + error.getPlainDescription(true));
// 错误码10118(您没有说话),可能是录音机权限被禁,需要提示用户打开应用的录音权限。
Map ret = new HashMap();
ret.put("唤醒监听error:", error.getPlainDescription(true));
channel.invokeMethod("onReceiveSpeechText", ret);
}
@Override
public void onBeginOfSpeech() {
Log.d(TAG, "唤醒监听--------------开始说话" );
Toast.makeText(mActivity.get(), "开始说话", Toast.LENGTH_SHORT).show();
}
@ -483,38 +605,97 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
@Override
public void onVolumeChanged(int volume) {
// TODO Auto-generated method stub
Log.d(TAG, "唤醒监听--------------音量变化:"+ volume);
}
};
/*
* 合成回调
* */
private SynthesizerListener mTtsListener = new SynthesizerListener() {
@Override
public void onSpeakBegin() {
//showTip("开始播放");
Log.d(TAG, "开始播放:" + System.currentTimeMillis());
}
@Override
public void onSpeakPaused() {
showTip("暂停播放");
}
@Override
public void onSpeakResumed() {
showTip("继续播放");
}
@SuppressLint("DefaultLocale")
@Override
public void onBufferProgress(int percent, int beginPos, int endPos, String info) {
// 合成进度
mPercentForBuffering = percent;
showTip(String.format("缓冲进度为%d%%,播放进度为%d%%", mPercentForBuffering, mPercentForPlaying));
}
@SuppressLint("DefaultLocale")
@Override
public void onSpeakProgress(int percent, int beginPos, int endPos) {
// 播放进度
mPercentForPlaying = percent;
showTip(String.format("缓冲进度为%d%%,播放进度为%d%%", mPercentForBuffering, mPercentForPlaying));
}
@Override
public void onCompleted(SpeechError error) {
if (error == null) {
showTip("播放完成");
} else {
showTip(error.getPlainDescription(true));
}
}
@Override
public void onEvent(int eventType, int arg1, int arg2, Bundle obj) {
// 以下代码用于获取与云端的会话id当业务出错时将会话id提供给技术支持人员可用于查询会话日志定位出错原因
// 若使用本地能力会话id为null
/*if (SpeechEvent.EVENT_SESSION_ID == eventType) {
String sid = obj.getString(SpeechEvent.KEY_EVENT_AUDIO_URL);
Log.d(TAG, "session id =" + sid);
}*/
//实时音频流输出参考
/*if (SpeechEvent.EVENT_TTS_BUFFER == eventType) {
byte[] buf = obj.getByteArray(SpeechEvent.KEY_EVENT_TTS_BUFFER);
Log.e("MscSpeechLog", "buf is =" + buf);
}*/
}
};
/**
* 配置识别对象参数
*/
public void setParam() {
public void setAsrParam() {
// 清空参数
mAsr.setParameter(SpeechConstant.PARAMS, null);
// 离线听写设置ENGINE_TYPE、RESULT_TYPE、ASR_RES_PATH、LANGUAGE、ACCENT、ASR_PTT、VAD_BOS、VAD_EOS
// 离线命令词识别设置ENGINE_TYPE、RESULT_TYPE、ASR_THRESHOLD、GRM_BUILD_PATH、ASR_RES_PATH
// 设置听写引擎
mAsr.setParameter(SpeechConstant.TEXT_ENCODING, "utf-8");
mAsr.setParameter(SpeechConstant.ENGINE_TYPE, mEngineType);
// 设置返回结果格式
mAsr.setParameter(SpeechConstant.RESULT_TYPE, resultType);
mAsr.setParameter(SpeechConstant.RESULT_TYPE, "json");
// 设置语法构建路径
mAsr.setParameter(ResourceUtil.GRM_BUILD_PATH, grmPath);
Log.d(TAG, "grmPath 512:" + grmPath );
// 设置资源路径
mAsr.setParameter(ResourceUtil.ASR_RES_PATH, getResourcePath());
Log.d(TAG, "getResourcePath() 516:" + getResourcePath() );
//此处用于设置dialog中不显示错误码信息
//mIat.setParameter("view_tips_plain","false");
@ -530,7 +711,7 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
mAsr.setParameter(SpeechConstant.VAD_EOS, "2000");
// 设置标点符号,设置为"0"返回结果无标点,设置为"1"返回结果有标点
mAsr.setParameter(SpeechConstant.ASR_PTT, "0");
mAsr.setParameter(SpeechConstant.ASR_PTT, "1");
Log.d(TAG, "grmPath ++++++++:" + grmPath );
@ -540,6 +721,33 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
//mIat.setParameter(SpeechConstant.ASR_AUDIO_PATH, Environment.getExternalStorageDirectory()+"/msc/iat.wav");
}
/*
* 配置合成对象参数
* */
public void setTtsParam() {
// 清空参数
mTts.setParameter(SpeechConstant.PARAMS, null);
//设置使用本地引擎
mTts.setParameter(SpeechConstant.ENGINE_TYPE, SpeechConstant.TYPE_LOCAL);
//设置发音人资源路径
mTts.setParameter(ResourceUtil.TTS_RES_PATH, getTtsResourcePath());
//设置发音人
mTts.setParameter(SpeechConstant.VOICE_NAME, voicerLocal);
//设置合成语速
mTts.setParameter(SpeechConstant.SPEED, "50");
//设置合成音调
mTts.setParameter(SpeechConstant.PITCH, "50");
//设置合成音量
mTts.setParameter(SpeechConstant.VOLUME, "50");
//设置播放器音频流类型
mTts.setParameter(SpeechConstant.STREAM_TYPE, "3");
// 设置播放合成音频打断音乐播放默认为true
mTts.setParameter(SpeechConstant.KEY_REQUEST_FOCUS, "true");
// 设置音频保存路径保存音频格式支持pcm、wav设置路径为sd卡请注意WRITE_EXTERNAL_STORAGE权限
mTts.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");
mTts.setParameter(SpeechConstant.TTS_AUDIO_PATH, mActivity.get().getExternalFilesDir("msc").getAbsolutePath() + "/tts.wav");
}
/**
* 读取asset目录下文件
* @return content
@ -569,9 +777,24 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
// 识别通用资源
tempBuffer.append(ResourceUtil.generateResourcePath(mActivity.get(),
ResourceUtil.RESOURCE_TYPE.assets, "asr/common.jet"));
Log.d(TAG, "getResourcePath: " + ResourceUtil.RESOURCE_TYPE.assets + "asr/common.jet");
return tempBuffer.toString();
}
/*
* 获取发音人资源路径
* */
private String getTtsResourcePath() {
StringBuffer tempBuffer = new StringBuffer();
String type = "tts";
//合成通用资源
tempBuffer.append(ResourceUtil.generateResourcePath(mActivity.get(), ResourceUtil.RESOURCE_TYPE.assets, type + "/common.jet"));
tempBuffer.append(";");
//发音人资源
tempBuffer.append(ResourceUtil.generateResourcePath(mActivity.get(), ResourceUtil.RESOURCE_TYPE.assets, type + "/" + voicerLocal + ".jet"));
return tempBuffer.toString();
}
/*
* 格式化输出结果
* */
@ -599,4 +822,17 @@ public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler
return resultBuffer.toString();
}
/*
* 屏幕打印结果
* */
private void showTip(final String str) {
mActivity.get().runOnUiThread(() -> {
if (mToast != null) {
mToast.cancel();
}
mToast = Toast.makeText(mActivity.get().getApplicationContext(), str, Toast.LENGTH_SHORT);
mToast.show();
});
}
}

View File

@ -55,5 +55,5 @@ flutter {
}
dependencies {
implementation files('..\\..\\..\\android\\libs\\Msc.jar')
implementation files('../../../android/libs/Msc.jar')
}

View File

@ -62,6 +62,7 @@ class Content extends StatefulWidget {
class _ContentState extends State<Content> {
String appID = 'da2aaf9b';
String ttsText = '今天合肥天气很不好';
IflytekSpeechDemo iflytekSpeechDemo = IflytekSpeechDemo();
@ -82,13 +83,20 @@ class _ContentState extends State<Content> {
ElevatedButton(
child: Text("start"),
onPressed: () {
print('被点击了');
iflytekSpeechDemo.start((Map<String, dynamic> event) async {
print('要唤醒');
iflytekSpeechDemo.wakeupStart((Map<String, dynamic> event) async {
print("flutter onOpenNotification: $event");
});
},
),
ElevatedButton(
child: Text("tts"),
onPressed: () {
print('我要合成');
iflytekSpeechDemo.ttsStart(ttsText);
},
),
],
),
);

View File

@ -43,22 +43,45 @@ class IflytekSpeechDemo {
}
}
Future<String> start(EventHandler onReceiveSpeechText) async {
Future<String> wakeupStart(EventHandler onReceiveSpeechText) async {
_onReceiveSpeechText = onReceiveSpeechText;
try{
return await _channel.invokeMethod('start');
return await _channel.invokeMethod('wakeupStart');
}catch(e) {
print(e);
return "9999999";
}
}
Future<String> stop() async {
return await _channel.invokeMethod('stop');
Future<String> wakeupStop() async {
return await _channel.invokeMethod('wakeupStop');
}
Future<String> cancel() async {
return await _channel.invokeMethod('cancel');
Future<String> wakeupCancel() async {
return await _channel.invokeMethod('wakeupCancel');
}
Future<int> ttsStart(String ttsText) async {
try{
int ret = await _channel.invokeMethod('ttsStart', {
"ttsText": ttsText
});
return ret;
}catch(e) {
print(e);
return 9999999;
}
}
Future<String> ttsCancel() async {
return await _channel.invokeMethod('ttsCancel');
}
Future<String> ttsPause() async {
return await _channel.invokeMethod('ttsPause');
}
Future<String> ttsResume() async {
return await _channel.invokeMethod('ttsResume');
}
}