This commit is contained in:
hh
2022-01-24 16:10:41 +08:00
commit e33c5a9737
69 changed files with 2056 additions and 0 deletions

29
.gitignore vendored Normal file
View File

@ -0,0 +1,29 @@
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.buildlog/
.history
.svn/
# IntelliJ related
*.iml
*.ipr
*.iws
.idea/
# The .vscode folder contains launch configuration and tasks you configure in
# VS Code which you may wish to be included in version control, so this line
# is commented out by default.
#.vscode/
# Flutter/Dart/Pub related
# Libraries should not include pubspec.lock, per https://dart.dev/guides/libraries/private-files#pubspeclock.
/pubspec.lock
**/doc/api/
.dart_tool/
.packages
build/

10
.metadata Normal file
View File

@ -0,0 +1,10 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.
version:
revision: 77d935af4db863f6abd0b9c31c7e6df2a13de57b
channel: unknown
project_type: plugin

3
CHANGELOG.md Normal file
View File

@ -0,0 +1,3 @@
## 0.0.1
* TODO: Describe initial release.

1
LICENSE Normal file
View File

@ -0,0 +1 @@
TODO: Add your license here.

15
README.md Normal file
View File

@ -0,0 +1,15 @@
# iflytek_speech_demo
语音插件
## Getting Started
This project is a starting point for a Flutter
[plug-in package](https://flutter.dev/developing-packages/),
a specialized package that includes platform-specific implementation code for
Android and/or iOS.
For help getting started with Flutter, view our
[online documentation](https://flutter.dev/docs), which offers tutorials,
samples, guidance on mobile development, and a full API reference.

4
analysis_options.yaml Normal file
View File

@ -0,0 +1,4 @@
include: package:flutter_lints/flutter.yaml
# Additional information about this file can be found at
# https://dart.dev/guides/language/analysis-options

8
android/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
*.iml
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures

51
android/build.gradle Normal file
View File

@ -0,0 +1,51 @@
group 'cn.com.motse.iflytek_speech_demo'
version '1.0'
buildscript {
repositories {
google()
mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:4.1.3'
}
}
rootProject.allprojects {
repositories {
google()
mavenCentral()
}
}
apply plugin: 'com.android.library'
android {
compileSdkVersion 30
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
defaultConfig {
minSdkVersion 16
//noinspection OldTargetApi
targetSdkVersion 29
}
// 添加硬件库依赖
sourceSets {
main {
jniLibs.srcDirs = ['libs']
}
}
}
// 添加sdk依赖后面2个不一定需要
dependencies {
implementation files('libs/Msc.jar')
implementation 'androidx.legacy:legacy-support-v4:1.0.0'
implementation 'com.google.android.material:material:1.4.0'
}

BIN
android/libs/Msc.jar Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

1
android/settings.gradle Normal file
View File

@ -0,0 +1 @@
rootProject.name = 'iflytek_speech_demo'

View File

@ -0,0 +1,13 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="cn.com.motse.iflytek_speech_demo">
<!--连接网络权限,用于执行云端语音能力 -->
<uses-permission android:name="android.permission.INTERNET"/>
<!--获取手机录音机使用权限,听写、识别、语义理解需要用到此权限 -->
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<!--外存储写权限,构建语法需要用到此权限 -->
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<!--外存储读权限,构建语法需要用到此权限 -->
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
</manifest>

Binary file not shown.

View File

@ -0,0 +1,12 @@
#BNF+IAT 1.0 UTF-8;
!grammar call;
!slot <contact>;
!slot <callPre>;
!slot <callPhone>;
!slot <callTo>;
!start <callStart>;
<callStart>:[<callPre>][<callTo>]<contact><callPhone>|[<callPre>]<callPhone>[<callTo>]<contact>;
<contact>:张海洋;
<callPre>:我要|我想|我想要;
<callPhone>:打电话;
<callTo>:给;

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 KiB

Binary file not shown.

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:gravity="center_vertical">
<ImageView
android:id="@+id/ifly_mnotice_image_container"
android:layout_width="match_parent"
android:scaleType="fitXY"
android:layout_height="match_parent"/>
</RelativeLayout>

View File

@ -0,0 +1,10 @@
#BNF+IAT 1.0 UTF-8;
!grammar command;
!slot <cmdPre>;
!slot <articleName>;
!slot <queryCmd>;
!start <cmdStart>;
<cmdStart>:[<cmdPre>]<queryCmd><articleName>;
<cmdPre>:我要|我想|我想要;
<queryCmd>:查询|查找|找一下;
<articleName>:鱼香肉丝饭!id(0)|麻辣鸡丁饭!id(1);

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1 @@
{"userword":[{"name":"我的常用词","words":["佳晨实业","蜀南庭苑","高兰路","复联二"]},{"name":"我的好友","words":["李馨琪","鹿晓雷","张集栋","周家莉","叶震珂","熊泽萌"]}]}

View File

@ -0,0 +1,8 @@
#BNF+IAT 1.0 UTF-8;
!grammar wake;
!slot <callCmd>;
!slot <contact>;
!start <callStart>;
<callStart>:[<callCmd>]<callName>;
<callCmd>:讯飞语音|讯飞语点|叮咚叮咚;
<callName>:张三|李四|张海洋;

View File

@ -0,0 +1,602 @@
package cn.com.motse.iflytek_speech_demo;
import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.os.Looper;
import android.os.Handler;
import android.text.TextUtils;
import android.util.Log;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import java.io.InputStream;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import com.iflytek.cloud.GrammarListener;
import com.iflytek.cloud.SpeechConstant;
import com.iflytek.cloud.ErrorCode;
import com.iflytek.cloud.SpeechError;
import com.iflytek.cloud.SpeechEvent;
import com.iflytek.cloud.SpeechRecognizer;
import com.iflytek.cloud.SpeechUtility;
import com.iflytek.cloud.VoiceWakeuper;
import com.iflytek.cloud.WakeuperListener;
import com.iflytek.cloud.WakeuperResult;
import com.iflytek.cloud.util.ResourceUtil;
import com.iflytek.cloud.RecognizerListener;
import com.iflytek.cloud.RecognizerResult;
import org.json.JSONException;
import org.json.JSONObject;
import io.flutter.embedding.engine.plugins.FlutterPlugin;
import io.flutter.embedding.engine.plugins.activity.ActivityAware;
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
import io.flutter.plugin.common.MethodChannel.Result;
import io.flutter.plugin.common.PluginRegistry;
/**
* IflytekSpeechDemoPlugin
*/
public class IflytekSpeechDemoPlugin implements FlutterPlugin, MethodCallHandler, ActivityAware {
private final static String TAG = IflytekSpeechDemoPlugin.class.getSimpleName();
private final static int REQUEST_CODE = 123;
/// The MethodChannel that will the communication between Flutter and native Android
///
/// This local reference serves to register the plugin with the Flutter Engine and unregister it
/// when the Flutter Engine is detached from the Activity
private MethodChannel channel;
private WeakReference<Activity> mActivity;
// -----speech对象-----
// 语音唤醒对象
private VoiceWakeuper mIvw;
// 语音识别对象
private SpeechRecognizer mAsr;
// 唤醒结果内容
private String resultString;
// 识别结果内容
private String recoString;
// 本地语法id
private String mLocalGrammarID;
// 本地语法文件
private String mLocalGrammar = null;
// 本地语法构建路径
private String grmPath;
// 设置门限值 门限值越低越容易被唤醒 [0-3000]
private final int curThresh = 1450;
// -----语音识别对象参数-----
private final String mEngineType = SpeechConstant.TYPE_LOCAL;
private final String resultType = "json";
/*
* onAttachedToEngine 和 onDetachedFromEngine是FlutterPlugin的接口方法,是flutter新的加载插件的方式.
* onAttachedToActivity和onDetachedFromActivity是ActivityAware的接口方法,主要是用于获取当前flutter页面所处的Activity
* onMethodCall是MethodCallHandler的接口方法,主要用于接收Flutter端对原生方法调用的实现
* */
@Override
public void onAttachedToEngine(@NonNull FlutterPluginBinding flutterPluginBinding) {
channel = new MethodChannel(flutterPluginBinding.getBinaryMessenger(), "iflytek_speech_demo");
channel.setMethodCallHandler(this);
// eg:context = flutterPluginBinding.applicationContext
}
@Override
public void onAttachedToActivity(ActivityPluginBinding activityPluginBinding) {
mActivity = new WeakReference<>(activityPluginBinding.getActivity());
activityPluginBinding.addRequestPermissionsResultListener(new PluginRegistry.RequestPermissionsResultListener() {
@Override
public boolean onRequestPermissionsResult(int i, String[] permissions, int[] grantResults) {
if(i == REQUEST_CODE) {
for (int ret :grantResults){
if (ret != PackageManager.PERMISSION_GRANTED) {
//进入到这里代表没有权限.
Toast.makeText(mActivity.get(), "请先开通语音权限", Toast.LENGTH_SHORT).show();
return false;
}
}
return false;
}
return true;
}
});
}
@Override
public void onDetachedFromActivityForConfigChanges() {
// TODO: the Activity your plugin was attached to was
// destroyed to change configuration.
// This call will be followed by onReattachedToActivityForConfigChanges().
mActivity = null;
}
@Override
public void onReattachedToActivityForConfigChanges(@NonNull ActivityPluginBinding activityPluginBinding) {
// TODO: your plugin is now attached to a new Activity
// after a configuration change.
}
@Override
public void onDetachedFromActivity() {
// TODO: your plugin is no longer associated with an Activity.
// Clean up references.
}
/*
* 自定义MethodChannel/EventChannel/BasicMessageChannel传递sdk中方法调用
*
* */
@Override
public void onMethodCall(@NonNull MethodCall call, @NonNull Result result) {
// 改写成switch
switch (call.method) {
case "getPlatformVersion":
result.success("Android " + android.os.Build.VERSION.RELEASE);
break;
case "init":
initSpeechUtility(call, result);
break;
case "start":
start(call, result);
break;
case "stop":
stop(call, result);
break;
case "cancel":
cancel(call, result);
break;
default:
result.notImplemented();
}
}
@Override
public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) {
channel.setMethodCallHandler(null);
// 退出释放连接
if (null != mAsr) {
mAsr.cancel();
mAsr.destroy();
}
}
/*
* 初始化语音调用
* */
private void initSpeechUtility(@NonNull MethodCall call, @NonNull Result result) {
// 从flutter层获取参数
final String appId = call.argument("appID");
Log.d(TAG, "initSpeechUtility init() appId = " + appId);
new Handler(Looper.getMainLooper()).post(new Runnable() {
@Override
public void run() {
// 1. 创建语音配置对象
// 注意: appid 必须和下载的SDK保持一致否则会出现10407错误
// 注意此接口在非主进程调用会返回null对象如需在非主进程使用语音功能请增加参数SpeechConstant.FORCE_LOGIN+"=true"
SpeechUtility.createUtility(mActivity.get(), SpeechConstant.APPID + '=' + appId);
final ResultStateful resultStateful = ResultStateful.of(result);
Log.d(TAG, "ResultStateful.of(result); ++++++++++++++ appId = " + appId);
grmPath = mActivity.get().getExternalFilesDir("msc").getAbsolutePath() + "/test";
// 2. 初始化唤醒对象
// InitListener参数用lambda语句简化
mIvw = VoiceWakeuper.createWakeuper(mActivity.get(), code -> {
if (code == ErrorCode.SUCCESS) {
Log.d(TAG, "VoiceWakeuper init() code = " + code);
resultStateful.success(0);
} else {
Log.d("TAG", "唤醒对象初始化失败,错误码:" + code);
resultStateful.error("唤醒对象初始化失败: ", "" + code, null);
}
});
// 3. 初始化识别对象
mAsr = SpeechRecognizer.createRecognizer(mActivity.get(), code -> {
if (code == ErrorCode.SUCCESS) {
Log.d(TAG, "SpeechRecognizer init() code = " + code);
resultStateful.success(0);
} else {
Log.d("TAG", "识别对象初始化失败,错误码:" + code);
resultStateful.error("识别对象初始化失败:", "" + code, null);
}
});
// 4. 初始化语法文件
mLocalGrammar = readFile(mActivity.get(), "query.bnf", "utf-8");
// 5. 参数配置
setParam();
// 非空判断,防止因空指针使程序崩溃
mIvw = VoiceWakeuper.getWakeuper();
if (mIvw != null) {
resultString = "";
recoString = "";
// 唤醒本地资源
final String resPath = ResourceUtil.generateResourcePath(mActivity.get(), ResourceUtil.RESOURCE_TYPE.assets, "ivw/" + appId + ".jet");
// 清空参数
mIvw.setParameter(SpeechConstant.PARAMS, null);
// 设置识别引擎
mIvw.setParameter(SpeechConstant.ENGINE_TYPE, mEngineType);
// 设置唤醒资源路径
mIvw.setParameter(ResourceUtil.IVW_RES_PATH, resPath);
/**
* 唤醒门限值根据资源携带的唤醒词个数按照“id:门限;id:门限”的格式传入
* 示例demo默认设置第一个唤醒词建议开发者根据定制资源中唤醒词个数进行设置
*/
mIvw.setParameter(SpeechConstant.IVW_THRESHOLD, "0:"+ curThresh);
// 设置唤醒+识别模式
mIvw.setParameter(SpeechConstant.IVW_SST, "oneshot");
// 设置返回结果格式
mIvw.setParameter(SpeechConstant.RESULT_TYPE, "json");
// 设置唤醒识别时,用于识别的音频是否包含唤醒词, 默认值1
// 当包含时,唤醒词作为识别语法的一部分,在语法中必须包含唤醒词,否则将没有 匹配结果。
// 在不包含时,唤醒词不参与识别,在说完唤醒词后,需要完整的说出语法中的句子。
// sdk的demo中采用默认值识别结果不包含唤醒词
// mIvw.setParameter(SpeechConstant.IVW_SHOT_WORD, "0");
// 设置唤醒录音保存路径,保存最近一分钟的音频
mIvw.setParameter(SpeechConstant.IVW_AUDIO_PATH,
mActivity.get().getExternalFilesDir("msc").getAbsolutePath() + "/ivw.wav");
mIvw.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");
// 开始构建语法
int ret = 0;
ret = mAsr.buildGrammar("bnf", mLocalGrammar, grammarListener);
if (ret != ErrorCode.SUCCESS) {
Toast.makeText(mActivity.get(), "语法构建失败,错误码:" + ret, Toast.LENGTH_SHORT).show();
}
//
if (!TextUtils.isEmpty(mLocalGrammarID)) {
// 设置本地识别资源
mIvw.setParameter(ResourceUtil.ASR_RES_PATH, getResourcePath());
// 设置语法构建路径
mIvw.setParameter(ResourceUtil.GRM_BUILD_PATH, grmPath);
// 设置本地识别使用语法id
mIvw.setParameter(SpeechConstant.LOCAL_GRAMMAR,
mLocalGrammarID);
Log.d(TAG, "mLocalGrammarID 287:" + mLocalGrammarID);
mIvw.startListening(mWakeuperListener);
} else {
Toast.makeText(mActivity.get(), "请先构建语法", Toast.LENGTH_SHORT).show();
}
} else {
Toast.makeText(mActivity.get(), "唤醒未初始化", Toast.LENGTH_SHORT).show();
}
// ------------------------配置完成
}
});
}
private void start(@NonNull MethodCall call, @NonNull Result result) {
// 是否有视图的参数
// final Boolean haveView = call.argument("haveView");
if (mActivity == null) {
Log.e(TAG, "Ignored start, current activity is null.");
result.error("Ignored start, current activity is null.", null, null);
}
String[] permissions = {
Manifest.permission.RECORD_AUDIO,
// Manifest.permission.ACCESS_NETWORK_STATE,
// Manifest.permission.INTERNET,
Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_EXTERNAL_STORAGE
};
ArrayList<String> toApplyList = new ArrayList<String>();
for (String perm :permissions){
if (PackageManager.PERMISSION_GRANTED != ContextCompat.checkSelfPermission(mActivity.get(), perm)) {
toApplyList.add(perm);
}
}
String[] tmpList = new String[toApplyList.size()];
if (!toApplyList.isEmpty()){
ActivityCompat.requestPermissions(mActivity.get(), toApplyList.toArray(tmpList), REQUEST_CODE);
result.error("request permissons.", null, null);
} else {
Log.d(TAG, "int ret = mAsr.startListening(mRecognizerListener) 328");
// 如果flutter层传入参数haveView
int ret = mAsr.startListening(mRecognizerListener);
if (ret != ErrorCode.SUCCESS) {
result.error("start listen error", ""+ret, null);
} else {
result.success(0);
}
}
}
private void stop(@NonNull MethodCall call, @NonNull Result result) {
if(mAsr != null) {
mAsr.stopListening();
}
}
private void cancel(@NonNull MethodCall call, @NonNull Result result) {
if(mAsr != null) {
mAsr.cancel();
}
}
/*
* 语音识别回调 ★★★★★
* */
private final RecognizerListener mRecognizerListener = new RecognizerListener() {
@Override
public void onVolumeChanged(int volume, byte[] data) {
Log.d(TAG, "返回音频数据:"+ data.length);
}
@Override
public void onBeginOfSpeech() {
Log.d(TAG, "onBeginOfSpeech 361" );
// 此回调表示sdk内部录音机已经准备好了用户可以开始语音输入
}
@Override
public void onEndOfSpeech() {
Log.d(TAG, "onEndOfSpeech 367" );
// 此回调表示:检测到了语音的尾端点,已经进入识别过程,不再接受语音输入
}
@Override
public void onResult(RecognizerResult results, boolean isLast) {
Log.d(TAG, "onResult 375" + results.getResultString());
String retString = "";
retString = formatResult(results);
// if (resultType.equals("json")) {
// retString = formatResult(results);
// }else if(resultType.equals("plain")) {
// retString = results.getResultString();
// }
Map ret = new HashMap();
ret.put("text", retString);
ret.put("isLast", isLast);
ret.put("type", 1);
channel.invokeMethod("onReceiveSpeechText", ret);
}
@Override
public void onError(SpeechError speechError) {
// 错误码10118(您没有说话),可能是录音机权限被禁,需要提示用户打开应用的录音权限。
Map ret = new HashMap();
ret.put("error 395:", speechError.getPlainDescription(true));
channel.invokeMethod("onReceiveSpeechText", ret);
}
@Override
public void onEvent(int eventType, int arg1, int arg2, Bundle obj) {
// 以下代码用于获取与云端的会话id当业务出错时将会话id提供给技术支持人员可用于查询会话日志定位出错原因
// 若使用本地能力会话id为null
// if (SpeechEvent.EVENT_SESSION_ID == eventType) {
// String sid = obj.getString(SpeechEvent.KEY_EVENT_SESSION_ID);
// Log.d(TAG, "session id =" + sid);
// }
}
};
/*
* 语法构建回调
* https://www.xfyun.cn/document/error-code查询解决方案
* */
GrammarListener grammarListener = new GrammarListener() {
@Override
public void onBuildFinish(String grammarId, SpeechError error) {
if (error == null) {
mLocalGrammarID = grammarId;
Toast.makeText(mActivity.get(), "语法构建成功:" + grammarId, Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(mActivity.get(), "语法构建失败,错误码:" + error.getErrorCode() + grammarId, Toast.LENGTH_SHORT).show();
}
}
};
/*
* 唤醒回调
* */
private WakeuperListener mWakeuperListener = new WakeuperListener() {
@Override
public void onResult(WakeuperResult result) {
try {
String text = result.getResultString();
JSONObject object;
object = new JSONObject(text);
StringBuffer buffer = new StringBuffer();
buffer.append("【RAW】 ").append(text);
buffer.append("\n");
buffer.append("【操作类型】").append(object.optString("sst"));
buffer.append("\n");
buffer.append("【唤醒词id】").append(object.optString("id"));
buffer.append("\n");
buffer.append("【得分】").append(object.optString("score"));
buffer.append("\n");
buffer.append("【前端点】").append(object.optString("bos"));
buffer.append("\n");
buffer.append("【尾端点】").append(object.optString("eos"));
resultString = buffer.toString();
} catch (JSONException e) {
resultString = "结果解析出错";
e.printStackTrace();
}
// 结果返回给flutter层
//todo
// textView.setText(resultString);
}
@Override
public void onError(SpeechError error) {
// Log.d(TAG, "唤醒回调错误: " + error.getPlainDescription(true));
Toast.makeText(mActivity.get(), "唤醒回调错误: " + error.getPlainDescription(true), Toast.LENGTH_SHORT).show();
}
@Override
public void onBeginOfSpeech() {
Toast.makeText(mActivity.get(), "开始说话", Toast.LENGTH_SHORT).show();
}
@Override
public void onEvent(int eventType, int isLast, int arg2, Bundle obj) {
Log.d(TAG, "eventType:" + eventType + "arg1:" + isLast + "arg2:" + arg2);
// 识别结果
if (SpeechEvent.EVENT_IVW_RESULT == eventType) {
RecognizerResult reslut = ((RecognizerResult) obj.get(SpeechEvent.KEY_EVENT_IVW_RESULT));
recoString += JsonParser.parseGrammarResult(reslut.getResultString());
// 结果返回给flutter层
//todo
// textView.setText(recoString);
Toast.makeText(mActivity.get(), recoString, Toast.LENGTH_LONG).show();
}
}
@Override
public void onVolumeChanged(int volume) {
// TODO Auto-generated method stub
}
};
/**
* 配置识别对象参数
*/
public void setParam() {
// 清空参数
mAsr.setParameter(SpeechConstant.PARAMS, null);
// 离线听写设置ENGINE_TYPE、RESULT_TYPE、ASR_RES_PATH、LANGUAGE、ACCENT、ASR_PTT、VAD_BOS、VAD_EOS
// 离线命令词识别设置ENGINE_TYPE、RESULT_TYPE、ASR_THRESHOLD、GRM_BUILD_PATH、ASR_RES_PATH
// 设置听写引擎
mAsr.setParameter(SpeechConstant.TEXT_ENCODING, "utf-8");
mAsr.setParameter(SpeechConstant.ENGINE_TYPE, mEngineType);
// 设置返回结果格式
mAsr.setParameter(SpeechConstant.RESULT_TYPE, resultType);
// 设置语法构建路径
mAsr.setParameter(ResourceUtil.GRM_BUILD_PATH, grmPath);
Log.d(TAG, "grmPath 512:" + grmPath );
// 设置资源路径
mAsr.setParameter(ResourceUtil.ASR_RES_PATH, getResourcePath());
Log.d(TAG, "getResourcePath() 516:" + getResourcePath() );
//此处用于设置dialog中不显示错误码信息
//mIat.setParameter("view_tips_plain","false");
//设置语音输入语言zh_cn为简体中文
mAsr.setParameter(SpeechConstant.LANGUAGE, "zh_cn");
//设置结果返回语言
mAsr.setParameter(SpeechConstant.ACCENT, "mandarin");
// 设置语音前端点:静音超时时间,即用户多长时间不说话则当做超时处理
mAsr.setParameter(SpeechConstant.VAD_BOS, "4000");
// 设置语音后端点:后端点静音检测时间,即用户停止说话多长时间内即认为不再输入, 自动停止录音
mAsr.setParameter(SpeechConstant.VAD_EOS, "2000");
// 设置标点符号,设置为"0"返回结果无标点,设置为"1"返回结果有标点
mAsr.setParameter(SpeechConstant.ASR_PTT, "0");
Log.d(TAG, "grmPath ++++++++:" + grmPath );
Log.d(TAG, "getResourcePath() ++++++++::" + getResourcePath() );
// 设置音频保存路径保存音频格式支持pcm、wav设置路径为sd卡请注意WRITE_EXTERNAL_STORAGE权限
//mIat.setParameter(SpeechConstant.AUDIO_FORMAT,"wav");
//mIat.setParameter(SpeechConstant.ASR_AUDIO_PATH, Environment.getExternalStorageDirectory()+"/msc/iat.wav");
}
/**
* 读取asset目录下文件
* @return content
*/
public static String readFile(Context mContext, String file, String code) {
int len = 0;
byte[] buf = null;
String result = "";
try {
InputStream in = mContext.getAssets().open(file);
len = in.available();
buf = new byte[len];
in.read(buf, 0, len);
result = new String(buf, code);
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
/*
* 获取识别资源路径
* */
private String getResourcePath() {
StringBuffer tempBuffer = new StringBuffer();
// 识别通用资源
tempBuffer.append(ResourceUtil.generateResourcePath(mActivity.get(),
ResourceUtil.RESOURCE_TYPE.assets, "asr/common.jet"));
return tempBuffer.toString();
}
/*
* 格式化输出结果
* */
private String formatResult(RecognizerResult results) {
String text = JsonParser.parseIatResult(results.getResultString());
String sn = null;
// 读取json结果中的sn字段
try {
JSONObject resultJson = new JSONObject(results.getResultString());
sn = resultJson.optString("sn");
} catch (JSONException e) {
e.printStackTrace();
}
HashMap<String, String> iatResult = new LinkedHashMap<String, String>();
iatResult.put(sn, text);
StringBuffer resultBuffer = new StringBuffer();
for (String key : iatResult.keySet()) {
resultBuffer.append(iatResult.get(key));
}
return resultBuffer.toString();
}
}

View File

@ -0,0 +1,186 @@
package cn.com.motse.iflytek_speech_demo;
import org.json.JSONArray;
import org.json.JSONObject;
import org.json.JSONTokener;
/**
* Json结果解析类
* modified by et
* 2022-01-22
*/
public class JsonParser {
public static String parseIatResult(String json) {
StringBuffer ret = new StringBuffer();
try {
JSONTokener tokener = new JSONTokener(json);
JSONObject joResult = new JSONObject(tokener);
JSONArray words = joResult.getJSONArray("ws");
for (int i = 0; i < words.length(); i++) {
// 转写结果词,默认使用第一个结果
JSONArray items = words.getJSONObject(i).getJSONArray("cw");
JSONObject obj = items.getJSONObject(0);
ret.append(obj.getString("w"));
// 如果需要多候选结果,解析数组其他字段
// for(int j = 0; j < items.length(); j++)
// {
// JSONObject obj = items.getJSONObject(j);
// ret.append(obj.getString("w"));
// }
}
} catch (Exception e) {
e.printStackTrace();
}
return ret.toString();
}
/*
* 语法解析
* 本应用场景不需要云端解析
* 本地解析逻辑待parseLocalGrammarResult修改
* TODO: 修改解析逻辑
* */
public static String parseGrammarResult(String json, String engType) {
StringBuffer ret = new StringBuffer();
try {
JSONTokener tokener = new JSONTokener(json);
JSONObject joResult = new JSONObject(tokener);
JSONArray words = joResult.getJSONArray("ws");
// 云端和本地结果分情况解析
if ("cloud".equals(engType)) {
for (int i = 0; i < words.length(); i++) {
JSONArray items = words.getJSONObject(i).getJSONArray("cw");
for (int j = 0; j < items.length(); j++) {
JSONObject obj = items.getJSONObject(j);
if (obj.getString("w").contains("nomatch")) {
ret.append("没有匹配结果.");
return ret.toString();
}
ret.append("【结果】").append(obj.getString("w"));
ret.append("【置信度】").append(obj.getInt("sc"));
ret.append("\n");
}
}
} else if ("local".equals(engType)) {
ret.append("【结果】");
for (int i = 0; i < words.length(); i++) {
JSONObject wsItem = words.getJSONObject(i);
JSONArray items = wsItem.getJSONArray("cw");
if ("<contact>".equals(wsItem.getString("slot"))) {
// 可能会有多个联系人供选择,用中括号括起来,这些候选项具有相同的置信度
ret.append("");
for (int j = 0; j < items.length(); j++) {
JSONObject obj = items.getJSONObject(j);
if (obj.getString("w").contains("nomatch")) {
ret.append("没有匹配结果.");
return ret.toString();
}
ret.append(obj.getString("w")).append("|");
}
ret.setCharAt(ret.length() - 1, '】');
} else {
//本地多候选按照置信度高低排序,一般选取第一个结果即可
JSONObject obj = items.getJSONObject(0);
if (obj.getString("w").contains("nomatch")) {
ret.append("没有匹配结果.");
return ret.toString();
}
ret.append(obj.getString("w"));
}
}
ret.append("【置信度】").append(joResult.getInt("sc"));
ret.append("\n");
}
} catch (Exception e) {
e.printStackTrace();
ret.append("没有匹配结果.");
}
return ret.toString();
}
public static String parseGrammarResult(String json) {
StringBuffer ret = new StringBuffer();
try {
JSONTokener tokener = new JSONTokener(json);
JSONObject joResult = new JSONObject(tokener);
JSONArray words = joResult.getJSONArray("ws");
for (int i = 0; i < words.length(); i++) {
JSONArray items = words.getJSONObject(i).getJSONArray("cw");
for (int j = 0; j < items.length(); j++) {
JSONObject obj = items.getJSONObject(j);
if (obj.getString("w").contains("nomatch")) {
ret.append("没有匹配结果.");
return ret.toString();
}
ret.append("【结果】").append(obj.getString("w"));
ret.append("【置信度】").append(obj.getInt("sc"));
ret.append("\n");
}
}
} catch (Exception e) {
e.printStackTrace();
ret.append("没有匹配结果.");
}
return ret.toString();
}
public static String parseLocalGrammarResult(String json) {
StringBuffer ret = new StringBuffer();
try {
JSONTokener tokener = new JSONTokener(json);
JSONObject joResult = new JSONObject(tokener);
JSONArray words = joResult.getJSONArray("ws");
for (int i = 0; i < words.length(); i++) {
JSONArray items = words.getJSONObject(i).getJSONArray("cw");
for (int j = 0; j < items.length(); j++) {
JSONObject obj = items.getJSONObject(j);
if (obj.getString("w").contains("nomatch")) {
ret.append("没有匹配结果.");
return ret.toString();
}
ret.append("【结果】").append(obj.getString("w"));
ret.append("\n");
}
}
ret.append("【置信度】").append(joResult.optInt("sc"));
} catch (Exception e) {
e.printStackTrace();
ret.append("没有匹配结果.");
}
return ret.toString();
}
public static String parseTransResult(String json, String key) {
StringBuffer ret = new StringBuffer();
try {
JSONTokener tokener = new JSONTokener(json);
JSONObject joResult = new JSONObject(tokener);
String errorCode = joResult.optString("ret");
if (!errorCode.equals("0")) {
return joResult.optString("errmsg");
}
JSONObject transResult = joResult.optJSONObject("trans_result");
ret.append(transResult.optString(key));
/*JSONArray words = joResult.getJSONArray("results");
for (int i = 0; i < words.length(); i++) {
JSONObject obj = words.getJSONObject(i);
ret.append(obj.getString(key));
}*/
} catch (Exception e) {
e.printStackTrace();
}
return ret.toString();
}
}

View File

@ -0,0 +1,56 @@
package cn.com.motse.iflytek_speech_demo;
import android.util.Log;
import androidx.annotation.Nullable;
import io.flutter.plugin.common.MethodChannel;
public class ResultStateful implements MethodChannel.Result {
private final static String TAG = "ResultStateful";
private final MethodChannel.Result result;
private boolean called;
public static ResultStateful of(MethodChannel.Result result) {
return new ResultStateful(result);
}
private ResultStateful(MethodChannel.Result result) {
this.result = result;
}
@Override
public void success(@Nullable Object o) {
if (called) {
printError();
return;
}
called = true;
result.success(o);
}
@Override
public void error(String s, @Nullable String s1, @Nullable Object o) {
if (called) {
printError();
return;
}
called = true;
result.error(s, s1, o);
}
@Override
public void notImplemented() {
if (called) {
printError();
return;
}
called = true;
result.notImplemented();
}
private void printError() {
Log.e(TAG, "error:result called");
}
}

View File

@ -0,0 +1,155 @@
# 笔记
## sdk说明
IatSettings和TtsSettings本场景不需要
SettingTextWatcher用于监听配置项变化自动保存或数据校验
IatDemo: 语音听写
AsrDemo: 语法识别
TtsDemo: 语音合成 & 增强版语音合成 xtts
IvwActivity 唤醒
- WakeDemo: 唤醒
- OneShotDemo: 唤醒+识别
ps: 本场景中主要是唤醒+命令词识别+合成
命令词识别和听写同样初始化识别对象 SpeechRecognizer.createRecognizer
## 识别
** 离线识别 - 无UI识别 **
1. 初始化
创建语音配置对象初始化后才可以使用MSC包的各项服务。初始化建议放在程序入口处如onCreate方法中
```
SpeechUtility.createUtility(context, SpeechConstant.APPID +"=12345678");
```
2. 配置
```
//使用SpeechRecognizer对象可根据回调消息自定义界面
mIat = SpeechRecognizer.createRecognizer(IatDemo.this, mInitListener);
//设置语法ID和 SUBJECT 为空,以免因之前有语法调用而设置了此参数;或直接清空所有参数,具体可参考 DEMO 的示例。
mIat.setParameter( SpeechConstant.CLOUD_GRAMMAR, null );
mIat.setParameter( SpeechConstant.SUBJECT, null );
//设置返回结果格式目前支持json,xml以及plain 三种格式其中plain为纯听写文本内容
mIat.setParameter(SpeechConstant.RESULT_TYPE, "json");
//此处engineType为“local”
mIat.setParameter( SpeechConstant.ENGINE_TYPE, "local" );
//设置语音输入语言zh_cn为简体中文
mIat.setParameter(SpeechConstant.LANGUAGE, "zh_cn");
//设置结果返回语言
mIat.setParameter(SpeechConstant.ACCENT, "mandarin");
// 设置语音前端点:静音超时时间单位ms即用户多长时间不说话则当做超时处理
//取值范围{100010000}
mIat.setParameter(SpeechConstant.VAD_BOS, "4000");
//设置语音后端点:后端点静音检测时间单位ms即用户停止说话多长时间内即认为不再输入
//自动停止录音,范围{0~10000}
mIat.setParameter(SpeechConstant.VAD_EOS, "2000");
//设置标点符号,设置为"0"返回结果无标点,设置为"1"返回结果有标点
mIat.setParameter(SpeechConstant.ASR_PTT,"0");
//开始识别,并设置监听器
mIat.startListening(mRecogListener);
```
3. 设置离线资源路径
## 离线命令词识别
1. 初始化
创建语音配置对象初始化后才可以使用MSC包的各项服务。初始化建议放在程序入口处如onCreate方法中
2. 配置
配置参数ENGINE_TYPE("local"), RESULT_TYPE("json"), ASR_THRESHOLD(25), GRM_BUILD_PATH("")
参数名称 名称 说明
ENGINE_TYPE 引擎类型 离线命令词识别为本地引擎。设置为local
RESULT_TYPE 返回结果格式 主要分为三种jsonxmlplain 默认json
ASR_THRESHOLD 识别门限值 门限值越高,则识别的成功率越低,但返回的结果与实际结果更接近 值范围:[0100] 默认值30
GRM_BUILD_PATH 语法构建目使用离线语法时,需要构建语法并保存到本地,在构建和使用语法时,都需要设置语法的构建目录 值范围:有效文件夹径值(含文件名) 默认值null
ASR_RES_PATH 识别资源路径 离线命令词识别需要使用本地资源,通过此参数设置本地资源所在的路径 值范围:有效的资源文件路径 默认值null
3. 构建语法
编写bnf语法文件
```
// 设置引擎类型
mAsr.setParamter( SpeechConstant.ENGINE_TYPE, SpeechConstant.TYPE_LOCAL );
// 设置语法结果文件保存路径,以在本地识别时使用
mAsr.setParameter( ResourceUtil.GRM_BUILD_PATH, grmPath );
//设置识别资源路径
mAsr.setParameter( ResourceUtil.ASR_RES_PATH, asrResPath );
/* 其中 "bnf" 指定语法类型为 BNF, grammarContent 为语法内容grammarListener 为构建结果监听器*/
ret = mAsr.buildGrammar( "bnf", grammarContent, grammarListener );
```
4. 更新词典
离线语法词典,旨在更新已构建的语法文件中某个规则里的内容,因此在更新时,需要指定识别资源路径,语法文件路径,语法列表(语法文件的语法名字)。
```
mAsr.setParameter( SpeechConstant.ENGINE_TYPE, SpeechConstant.TYPE_LOCAL );
// 指定资源路径
mAsr.setParameter( ResourceUtil.ASR_RES_PATH, asrResPath );
// 指定语法路径
mAsr.setParameter( ResourceUtil.GRM_BUILD_PATH, grmPath );
// 指定语法名字
mAsr.setParameter( SpeechConstant.GRAMMAR_LIST, grammarName );
// lexiconName 为词典名字lexiconContents 为词典内容lexiconListener 为回调监听器
ret = mAsr.updateLexicon( lexiconName, lexiconContents, lexiconListener );
```
5. 离线命令词识别结果
json格式
```json
{
"sn":1,
"ls":true,
"bg":0,
"ed":0,
"ws":[
{
"bg":0,
"cw":[
{
"id":10001,
"sc":81,
"w":"打电话给",
"gm":0
}
],
"slot":"<dialpre>"
},
{
"bg":0,
"cw":[
{
"id":65535,
"sc":100,
"w":"丁伟",
"gm":0
}
],
"slot":"<contact>"
}
],
"sc":94
}
```
结果在ws节点中
## 合成
## 唤醒

46
example/.gitignore vendored Normal file
View File

@ -0,0 +1,46 @@
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.buildlog/
.history
.svn/
# IntelliJ related
*.iml
*.ipr
*.iws
.idea/
# The .vscode folder contains launch configuration and tasks you configure in
# VS Code which you may wish to be included in version control, so this line
# is commented out by default.
#.vscode/
# Flutter/Dart/Pub related
**/doc/api/
**/ios/Flutter/.last_build_id
.dart_tool/
.flutter-plugins
.flutter-plugins-dependencies
.packages
.pub-cache/
.pub/
/build/
# Web related
lib/generated_plugin_registrant.dart
# Symbolication related
app.*.symbols
# Obfuscation related
app.*.map.json
# Android Studio will place build artifacts here
/android/app/debug
/android/app/profile
/android/app/release

10
example/.metadata Normal file
View File

@ -0,0 +1,10 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.
version:
revision: 77d935af4db863f6abd0b9c31c7e6df2a13de57b
channel: unknown
project_type: app

16
example/README.md Normal file
View File

@ -0,0 +1,16 @@
# iflytek_speech_demo_example
Demonstrates how to use the iflytek_speech_demo plugin.
## Getting Started
This project is a starting point for a Flutter application.
A few resources to get you started if this is your first Flutter project:
- [Lab: Write your first Flutter app](https://flutter.dev/docs/get-started/codelab)
- [Cookbook: Useful Flutter samples](https://flutter.dev/docs/cookbook)
For help getting started with Flutter, view our
[online documentation](https://flutter.dev/docs), which offers tutorials,
samples, guidance on mobile development, and a full API reference.

View File

@ -0,0 +1,29 @@
# This file configures the analyzer, which statically analyzes Dart code to
# check for errors, warnings, and lints.
#
# The issues identified by the analyzer are surfaced in the UI of Dart-enabled
# IDEs (https://dart.dev/tools#ides-and-editors). The analyzer can also be
# invoked from the command line by running `flutter analyze`.
# The following line activates a set of recommended lints for Flutter apps,
# packages, and plugins designed to encourage good coding practices.
include: package:flutter_lints/flutter.yaml
linter:
# The lint rules applied to this project can be customized in the
# section below to disable rules from the `package:flutter_lints/flutter.yaml`
# included above or to enable additional rules. A list of all available lints
# and their documentation is published at
# https://dart-lang.github.io/linter/lints/index.html.
#
# Instead of disabling a lint rule for the entire project in the
# section below, it can also be suppressed for a single line of code
# or a specific dart file by using the `// ignore: name_of_lint` and
# `// ignore_for_file: name_of_lint` syntax on the line or in the file
# producing the lint.
rules:
# avoid_print: false # Uncomment to disable the `avoid_print` rule
# prefer_single_quotes: true # Uncomment to enable the `prefer_single_quotes` rule
# Additional information about this file can be found at
# https://dart.dev/guides/language/analysis-options

13
example/android/.gitignore vendored Normal file
View File

@ -0,0 +1,13 @@
gradle-wrapper.jar
/.gradle
/captures/
/gradlew
/gradlew.bat
/local.properties
GeneratedPluginRegistrant.java
# Remember to never publicly share your keystore.
# See https://flutter.dev/docs/deployment/android#reference-the-keystore-from-the-app
key.properties
**/*.keystore
**/*.jks

View File

@ -0,0 +1,59 @@
def localProperties = new Properties()
def localPropertiesFile = rootProject.file('local.properties')
if (localPropertiesFile.exists()) {
localPropertiesFile.withReader('UTF-8') { reader ->
localProperties.load(reader)
}
}
def flutterRoot = localProperties.getProperty('flutter.sdk')
if (flutterRoot == null) {
throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.")
}
def flutterVersionCode = localProperties.getProperty('flutter.versionCode')
if (flutterVersionCode == null) {
flutterVersionCode = '1'
}
def flutterVersionName = localProperties.getProperty('flutter.versionName')
if (flutterVersionName == null) {
flutterVersionName = '1.0'
}
apply plugin: 'com.android.application'
apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
android {
compileSdkVersion flutter.compileSdkVersion
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
defaultConfig {
// TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
applicationId "cn.com.motse.iflytek_speech_demo_example"
minSdkVersion flutter.minSdkVersion
targetSdkVersion flutter.targetSdkVersion
versionCode flutterVersionCode.toInteger()
versionName flutterVersionName
}
buildTypes {
release {
// TODO: Add your own signing config for the release build.
// Signing with the debug keys for now, so `flutter run --release` works.
signingConfig signingConfigs.debug
}
}
}
flutter {
source '../..'
}
dependencies {
implementation files('..\\..\\..\\android\\libs\\Msc.jar')
}

View File

@ -0,0 +1,3 @@
# 代码混淆
-keep class com.iflytek.**{*;}
-keepattributes Signature

View File

@ -0,0 +1,7 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="cn.com.motse.iflytek_speech_demo_example">
<!-- Flutter needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
</manifest>

View File

@ -0,0 +1,34 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="cn.com.motse.iflytek_speech_demo_example">
<application
android:label="iflytek_speech_demo_example"
android:name="${applicationName}"
android:icon="@mipmap/ic_launcher">
<activity
android:name=".MainActivity"
android:exported="true"
android:launchMode="singleTop"
android:theme="@style/LaunchTheme"
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|smallestScreenSize|locale|layoutDirection|fontScale|screenLayout|density|uiMode"
android:hardwareAccelerated="true"
android:windowSoftInputMode="adjustResize">
<!-- Specifies an Android theme to apply to this Activity as soon as
the Android process has started. This theme is visible to the user
while the Flutter UI initializes. After that, this theme continues
to determine the Window background behind the Flutter UI. -->
<meta-data
android:name="io.flutter.embedding.android.NormalTheme"
android:resource="@style/NormalTheme"
/>
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
<!-- Don't delete the meta-data below.
This is used by the Flutter tool to generate GeneratedPluginRegistrant.java -->
<meta-data
android:name="flutterEmbedding"
android:value="2" />
</application>
</manifest>

View File

@ -0,0 +1,6 @@
package cn.com.motse.iflytek_speech_demo_example;
import io.flutter.embedding.android.FlutterActivity;
public class MainActivity extends FlutterActivity {
}

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="?android:colorBackground" />
<!-- You can insert your own image assets here -->
<!-- <item>
<bitmap
android:gravity="center"
android:src="@mipmap/launch_image" />
</item> -->
</layer-list>

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="@android:color/white" />
<!-- You can insert your own image assets here -->
<!-- <item>
<bitmap
android:gravity="center"
android:src="@mipmap/launch_image" />
</item> -->
</layer-list>

Binary file not shown.

After

Width:  |  Height:  |  Size: 544 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 442 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 721 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is on -->
<style name="LaunchTheme" parent="@android:style/Theme.Black.NoTitleBar">
<!-- Show a splash screen on the activity. Automatically removed when
Flutter draws its first frame -->
<item name="android:windowBackground">@drawable/launch_background</item>
</style>
<!-- Theme applied to the Android Window as soon as the process has started.
This theme determines the color of the Android Window while your
Flutter UI initializes, as well as behind your Flutter UI while its
running.
This Theme is only used starting with V2 of Flutter's Android embedding. -->
<style name="NormalTheme" parent="@android:style/Theme.Black.NoTitleBar">
<item name="android:windowBackground">?android:colorBackground</item>
</style>
</resources>

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is off -->
<style name="LaunchTheme" parent="@android:style/Theme.Light.NoTitleBar">
<!-- Show a splash screen on the activity. Automatically removed when
Flutter draws its first frame -->
<item name="android:windowBackground">@drawable/launch_background</item>
</style>
<!-- Theme applied to the Android Window as soon as the process has started.
This theme determines the color of the Android Window while your
Flutter UI initializes, as well as behind your Flutter UI while its
running.
This Theme is only used starting with V2 of Flutter's Android embedding. -->
<style name="NormalTheme" parent="@android:style/Theme.Light.NoTitleBar">
<item name="android:windowBackground">?android:colorBackground</item>
</style>
</resources>

View File

@ -0,0 +1,7 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="cn.com.motse.iflytek_speech_demo_example">
<!-- Flutter needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
</manifest>

View File

@ -0,0 +1,29 @@
buildscript {
repositories {
google()
mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:4.1.0'
}
}
allprojects {
repositories {
google()
mavenCentral()
}
}
rootProject.buildDir = '../build'
subprojects {
project.buildDir = "${rootProject.buildDir}/${project.name}"
}
subprojects {
project.evaluationDependsOn(':app')
}
task clean(type: Delete) {
delete rootProject.buildDir
}

View File

@ -0,0 +1,3 @@
org.gradle.jvmargs=-Xmx1536M
android.useAndroidX=true
android.enableJetifier=true

View File

@ -0,0 +1,6 @@
#Fri Jun 23 08:50:38 CEST 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.7-all.zip

View File

@ -0,0 +1,11 @@
include ':app'
def localPropertiesFile = new File(rootProject.projectDir, "local.properties")
def properties = new Properties()
assert localPropertiesFile.exists()
localPropertiesFile.withReader("UTF-8") { reader -> properties.load(reader) }
def flutterSdkPath = properties.getProperty("flutter.sdk")
assert flutterSdkPath != null, "flutter.sdk not set in local.properties"
apply from: "$flutterSdkPath/packages/flutter_tools/gradle/app_plugin_loader.gradle"

96
example/lib/main.dart Normal file
View File

@ -0,0 +1,96 @@
import 'package:flutter/material.dart';
import 'dart:async';
import 'package:flutter/services.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:iflytek_speech_demo/iflytek_speech_demo.dart';
//插件的示例代码调用插件dart中
void main() => runApp(MyApp());
class MyApp extends StatelessWidget {
@override
Widget build(BuildContext context) {
return MaterialApp(
debugShowCheckedModeBanner: false,
title: 'Flutter_ScreenUtil',
theme: ThemeData(
primarySwatch: Colors.blue,
),
home: HomePage(title: 'FlutterScreenUtil Demo'),
);
}
}
class HomePage extends StatefulWidget {
const HomePage({Key? key, required this.title}) : super(key: key);
final String title;
@override
_HomePageState createState() => _HomePageState();
}
class _HomePageState extends State<HomePage> {
@override
Widget build(BuildContext context) {
//设置尺寸填写设计中设备的屏幕尺寸如果设计基于360dp * 690dp的屏幕
ScreenUtil.init(
BoxConstraints(
maxWidth: MediaQuery.of(context).size.width,
maxHeight: MediaQuery.of(context).size.height),
designSize: Size(360, 690),
context: context,
minTextAdapt: true,
orientation: Orientation.portrait);
return Scaffold(
appBar: AppBar(
title: Text('测试'),
),
body: Content(),
);
}
}
class Content extends StatefulWidget {
const Content({Key? key}) : super(key: key);
@override
_ContentState createState() => _ContentState();
}
class _ContentState extends State<Content> {
String appID = 'da2aaf9b';
IflytekSpeechDemo iflytekSpeechDemo = IflytekSpeechDemo();
@override
Widget build(BuildContext context) {
return Container(
height: 1.sh - 90,
width: 1.sw,
child: ListView(
children: [
ElevatedButton(
child: Text("test"),
onPressed: () {
print('我被点击了');
iflytekSpeechDemo.init(appID);
},
),
ElevatedButton(
child: Text("start"),
onPressed: () {
print('我被点击了');
iflytekSpeechDemo.start((Map<String, dynamic> event) async {
print("flutter onOpenNotification: $event");
});
},
),
],
),
);
}
}

182
example/pubspec.lock Normal file
View File

@ -0,0 +1,182 @@
# Generated by pub
# See https://dart.dev/tools/pub/glossary#lockfile
packages:
async:
dependency: transitive
description:
name: async
url: "https://pub.flutter-io.cn"
source: hosted
version: "2.8.2"
boolean_selector:
dependency: transitive
description:
name: boolean_selector
url: "https://pub.flutter-io.cn"
source: hosted
version: "2.1.0"
characters:
dependency: transitive
description:
name: characters
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.2.0"
charcode:
dependency: transitive
description:
name: charcode
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.3.1"
clock:
dependency: transitive
description:
name: clock
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.1.0"
collection:
dependency: transitive
description:
name: collection
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.15.0"
cupertino_icons:
dependency: "direct main"
description:
name: cupertino_icons
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.0.4"
fake_async:
dependency: transitive
description:
name: fake_async
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.2.0"
flutter:
dependency: "direct main"
description: flutter
source: sdk
version: "0.0.0"
flutter_lints:
dependency: "direct dev"
description:
name: flutter_lints
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.0.4"
flutter_screenutil:
dependency: "direct main"
description:
name: flutter_screenutil
url: "https://pub.flutter-io.cn"
source: hosted
version: "5.1.0"
flutter_test:
dependency: "direct dev"
description: flutter
source: sdk
version: "0.0.0"
iflytek_speech_demo:
dependency: "direct main"
description:
path: ".."
relative: true
source: path
version: "0.0.1"
lints:
dependency: transitive
description:
name: lints
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.0.1"
matcher:
dependency: transitive
description:
name: matcher
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.12.11"
meta:
dependency: transitive
description:
name: meta
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.7.0"
path:
dependency: transitive
description:
name: path
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.8.0"
sky_engine:
dependency: transitive
description: flutter
source: sdk
version: "0.0.99"
source_span:
dependency: transitive
description:
name: source_span
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.8.1"
stack_trace:
dependency: transitive
description:
name: stack_trace
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.10.0"
stream_channel:
dependency: transitive
description:
name: stream_channel
url: "https://pub.flutter-io.cn"
source: hosted
version: "2.1.0"
string_scanner:
dependency: transitive
description:
name: string_scanner
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.1.0"
term_glyph:
dependency: transitive
description:
name: term_glyph
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.2.0"
test_api:
dependency: transitive
description:
name: test_api
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.4.3"
typed_data:
dependency: transitive
description:
name: typed_data
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.3.0"
vector_math:
dependency: transitive
description:
name: vector_math
url: "https://pub.flutter-io.cn"
source: hosted
version: "2.1.1"
sdks:
dart: ">=2.15.1 <3.0.0"
flutter: ">=2.5.0"

85
example/pubspec.yaml Normal file
View File

@ -0,0 +1,85 @@
name: iflytek_speech_demo_example
description: Demonstrates how to use the iflytek_speech_demo plugin.
# The following line prevents the package from being accidentally published to
# pub.dev using `flutter pub publish`. This is preferred for private packages.
publish_to: 'none' # Remove this line if you wish to publish to pub.dev
environment:
sdk: ">=2.15.1 <3.0.0"
# Dependencies specify other packages that your package needs in order to work.
# To automatically upgrade your package dependencies to the latest versions
# consider running `flutter pub upgrade --major-versions`. Alternatively,
# dependencies can be manually updated by changing the version numbers below to
# the latest version available on pub.dev. To see which dependencies have newer
# versions available, run `flutter pub outdated`.
dependencies:
flutter:
sdk: flutter
flutter_screenutil: ^5.1.0
iflytek_speech_demo:
# When depending on this package from a real application you should use:
# iflytek_speech_demo: ^x.y.z
# See https://dart.dev/tools/pub/dependencies#version-constraints
# The example app is bundled with the plugin so we use a path dependency on
# the parent directory to use the current plugin's version.
path: ../
# The following adds the Cupertino Icons font to your application.
# Use with the CupertinoIcons class for iOS style icons.
cupertino_icons: ^1.0.2
dev_dependencies:
flutter_test:
sdk: flutter
# The "flutter_lints" package below contains a set of recommended lints to
# encourage good coding practices. The lint set provided by the package is
# activated in the `analysis_options.yaml` file located at the root of your
# package. See that file for information about deactivating specific lint
# rules and activating additional ones.
flutter_lints: ^1.0.0
# For information on the generic Dart part of this file, see the
# following page: https://dart.dev/tools/pub/pubspec
# The following section is specific to Flutter.
flutter:
# The following line ensures that the Material Icons font is
# included with your application, so that you can use the icons in
# the material Icons class.
uses-material-design: true
# To add assets to your application, add an assets section, like this:
# assets:
# - images/a_dot_burr.jpeg
# - images/a_dot_ham.jpeg
# An image asset can refer to one or more resolution-specific "variants", see
# https://flutter.dev/assets-and-images/#resolution-aware.
# For details regarding adding assets from package dependencies, see
# https://flutter.dev/assets-and-images/#from-packages
# To add custom fonts to your application, add a fonts section here,
# in this "flutter" section. Each entry in this list should have a
# "family" key with the font family name, and a "fonts" key with a
# list giving the asset and other descriptors for the font. For
# example:
# fonts:
# - family: Schyler
# fonts:
# - asset: fonts/Schyler-Regular.ttf
# - asset: fonts/Schyler-Italic.ttf
# style: italic
# - family: Trajan Pro
# fonts:
# - asset: fonts/TrajanPro.ttf
# - asset: fonts/TrajanPro_Bold.ttf
# weight: 700
#
# For details regarding fonts from package dependencies,
# see https://flutter.dev/custom-fonts/#from-packages

View File

@ -0,0 +1,27 @@
// This is a basic Flutter widget test.
//
// To perform an interaction with a widget in your test, use the WidgetTester
// utility that Flutter provides. For example, you can send tap and scroll
// gestures. You can also use WidgetTester to find child widgets in the widget
// tree, read text, and verify that the values of widget properties are correct.
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:iflytek_speech_demo_example/main.dart';
void main() {
testWidgets('Verify Platform version', (WidgetTester tester) async {
// Build our app and trigger a frame.
await tester.pumpWidget( MyApp());
// Verify that platform version is retrieved.
expect(
find.byWidgetPredicate(
(Widget widget) => widget is Text &&
widget.data!.startsWith('Running on:'),
),
findsOneWidget,
);
});
}

View File

@ -0,0 +1,64 @@
import 'dart:async';
import 'package:flutter/services.dart';
typedef Future<dynamic> EventHandler(Map<String, dynamic> event);
class IflytekSpeechDemo {
static const MethodChannel _channel = MethodChannel('iflytek_speech_demo');
late EventHandler _onReceiveSpeechText;
// test方法
static Future<String?> get platformVersion async {
final String? version = await _channel.invokeMethod('getPlatformVersion');
return version;
}
// 编写自定义异步方法调用原生sdk传递过来的数据
// 1. 初始化方法
Future<int> init(String appId) async {
try{
int ret = await _channel.invokeMethod('init', {
"appID": appId
});
if(ret == 0) {
_channel.setMethodCallHandler(_handleMethod);
}
return ret;
}catch(e) {
print(e);
return 9999999;
}
}
Future<Future> _handleMethod(MethodCall call) async {
print("speech_handleMethod:");
switch (call.method) {
case "onReceiveSpeechText":
return _onReceiveSpeechText(call.arguments.cast<String, dynamic>());
default:
throw UnsupportedError("Unrecognized Event:"+call.method);
}
}
Future<String> start(EventHandler onReceiveSpeechText) async {
_onReceiveSpeechText = onReceiveSpeechText;
try{
return await _channel.invokeMethod('start');
}catch(e) {
print(e);
return "9999999";
}
}
Future<String> stop() async {
return await _channel.invokeMethod('stop');
}
Future<String> cancel() async {
return await _channel.invokeMethod('cancel');
}
}

62
pubspec.yaml Normal file
View File

@ -0,0 +1,62 @@
name: iflytek_speech_demo
description: 语音插件
version: 0.0.1
homepage:
environment:
sdk: ">=2.15.1 <3.0.0"
flutter: ">=2.5.0"
dependencies:
flutter:
sdk: flutter
dev_dependencies:
flutter_test:
sdk: flutter
flutter_lints: ^1.0.0
# For information on the generic Dart part of this file, see the
# following page: https://dart.dev/tools/pub/pubspec
# The following section is specific to Flutter.
flutter:
# This section identifies this Flutter project as a plugin project.
# The 'pluginClass' and Android 'package' identifiers should not ordinarily
# be modified. They are used by the tooling to maintain consistency when
# adding or updating assets for this project.
plugin:
platforms:
android:
package: cn.com.motse.iflytek_speech_demo
pluginClass: IflytekSpeechDemoPlugin
# To add assets to your plugin package, add an assets section, like this:
# assets:
# - images/a_dot_burr.jpeg
# - images/a_dot_ham.jpeg
#
# For details regarding assets in packages, see
# https://flutter.dev/assets-and-images/#from-packages
#
# An image asset can refer to one or more resolution-specific "variants", see
# https://flutter.dev/assets-and-images/#resolution-aware.
# To add custom fonts to your plugin package, add a fonts section here,
# in this "flutter" section. Each entry in this list should have a
# "family" key with the font family name, and a "fonts" key with a
# list giving the asset and other descriptors for the font. For
# example:
# fonts:
# - family: Schyler
# fonts:
# - asset: fonts/Schyler-Regular.ttf
# - asset: fonts/Schyler-Italic.ttf
# style: italic
# - family: Trajan Pro
# fonts:
# - asset: fonts/TrajanPro.ttf
# - asset: fonts/TrajanPro_Bold.ttf
# weight: 700
#
# For details regarding fonts in packages, see
# https://flutter.dev/custom-fonts/#from-packages

View File

@ -0,0 +1,23 @@
import 'package:flutter/services.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:iflytek_speech_demo/iflytek_speech_demo.dart';
void main() {
const MethodChannel channel = MethodChannel('iflytek_speech_demo');
TestWidgetsFlutterBinding.ensureInitialized();
setUp(() {
channel.setMockMethodCallHandler((MethodCall methodCall) async {
return '42';
});
});
tearDown(() {
channel.setMockMethodCallHandler(null);
});
test('getPlatformVersion', () async {
expect(await IflytekSpeechDemo.platformVersion, '42');
});
}