new changes

merge-requests/34/merge
Sultan Khan 4 years ago
parent 9afeecf9fa
commit 4c1a8dc38f

@ -62,4 +62,5 @@ flutter {
dependencies {
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation "com.google.firebase:firebase-messaging:20.1.0"
implementation 'pub.devrel:easypermissions:0.4.0'
}

@ -11,6 +11,7 @@
<uses-permission android:name="android.permission.VIBRATE" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />

Binary file not shown.

After

Width:  |  Height:  |  Size: 62 KiB

@ -5,89 +5,99 @@ import 'package:diplomaticquarterapp/uitl/app_shared_preferences.dart';
const MAX_SMALL_SCREEN = 660;
const BASE_URL = 'https://hmgwebservices.com/Services';
const BASE_URL = 'https://hmgwebservices.com/';
const GET_PROJECT = '/Lists.svc/REST/GetProject';
///Doctor
const GET_MY_DOCTOR = '/Doctors.svc/REST/GetPatientDoctorAppointmentResult';
const GET_DOCTOR_PROFILE = '/Doctors.svc/REST/GetDocProfiles';
const GET_DOCTOR_RATING = '/Doctors.svc/REST/dr_GetAvgDoctorRating';
const GET_MY_DOCTOR =
'Services/Doctors.svc/REST/GetPatientDoctorAppointmentResult';
const GET_DOCTOR_PROFILE = 'Services/Doctors.svc/REST/GetDocProfiles';
const GET_DOCTOR_RATING = 'Services/Doctors.svc/REST/dr_GetAvgDoctorRating';
///Prescriptions
const PRESCRIPTIONS = '/Patients.svc/REST/GetPrescriptionApptList';
const PRESCRIPTIONS = 'Services/Patients.svc/REST/GetPrescriptionApptList';
const GET_PRESCRIPTIONS_ALL_ORDERS =
'/Patients.svc/REST/PatientER_GetPatientAllPresOrders';
const GET_PRESCRIPTION_REPORT = '/Patients.svc/REST/INP_GetPrescriptionReport';
const SEND_PRESCRIPTION_EMAIL = '/Notifications.svc/REST/SendPrescriptionEmail';
'Services/Patients.svc/REST/PatientER_GetPatientAllPresOrders';
const GET_PRESCRIPTION_REPORT =
'Services/Patients.svc/REST/INP_GetPrescriptionReport';
const SEND_PRESCRIPTION_EMAIL =
'Services/Notifications.svc/REST/SendPrescriptionEmail';
const GET_PRESCRIPTION_REPORT_ENH =
'/Patients.svc/REST/GetPrescriptionReport_enh';
'Services/Patients.svc/REST/GetPrescriptionReport_enh';
///Lab Order
const GET_Patient_LAB_ORDERS = '/Patients.svc/REST/GetPatientLabOrders';
const GET_Patient_LAB_ORDERS = 'Services/Patients.svc/REST/GetPatientLabOrders';
const GET_Patient_LAB_SPECIAL_RESULT =
'/Patients.svc/REST/GetPatientLabSpecialResults';
'Services/Patients.svc/REST/GetPatientLabSpecialResults';
///
const GET_PATIENT_ORDERS = '/Patients.svc/REST/GetPatientRadOrders';
const GET_PATIENT_ORDERS = 'Services/Patients.svc/REST/GetPatientRadOrders';
const GET_PATIENT_ORDERS_DETAILS =
'/Patients.svc/REST/Rad_UpdatePatientRadOrdersToRead';
const GET_RAD_IMAGE_URL = '/Patients.svc/Rest/GetRadImageURL';
const SEND_RAD_REPORT_EMAIL = '/Notifications.svc/REST/SendRadReportEmail';
'Services/Patients.svc/REST/Rad_UpdatePatientRadOrdersToRead';
const GET_RAD_IMAGE_URL = 'Services/Patients.svc/Rest/GetRadImageURL';
const SEND_RAD_REPORT_EMAIL =
'Services/Notifications.svc/REST/SendRadReportEmail';
///Feedback
const SEND_FEEDBACK = '/COCWS.svc/REST/InsertCOCItemInSPList';
const GET_STATUS_FOR_COCO = '/COCWS.svc/REST/GetStatusforCOC';
const SEND_FEEDBACK = 'Services/COCWS.svc/REST/InsertCOCItemInSPList';
const GET_STATUS_FOR_COCO = 'Services/COCWS.svc/REST/GetStatusforCOC';
//const BASE_URL = 'https://hmgwebservices.com/Services'; // Production Environment
//const BASE_URL = 'https://uat.hmgwebservices.com/Services'; // UAT Environment
//URL to get clinic list
const GET_CLINICS_LIST_URL = "/lists.svc/REST/GetClinicCentralized";
const GET_CLINICS_LIST_URL = "Services/lists.svc/REST/GetClinicCentralized";
//URL to get doctors list
const GET_DOCTORS_LIST_URL = "/Doctors.svc/REST/SearchDoctorsByTime";
const GET_DOCTORS_LIST_URL = "Services/Doctors.svc/REST/SearchDoctorsByTime";
//URL to get doctor free slots
const GET_DOCTOR_FREE_SLOTS = "/Doctors.svc/REST/GetDoctorFreeSlots";
const GET_DOCTOR_FREE_SLOTS = "Services/Doctors.svc/REST/GetDoctorFreeSlots";
//URL to insert appointment
const INSERT_SPECIFIC_APPOINTMENT =
"/Doctors.svc/REST/InsertSpecificAppointment";
"Services/Doctors.svc/REST/InsertSpecificAppointment";
//URL to get patient share
const GET_PATIENT_SHARE =
"/Doctors.svc/REST/GetCheckinScreenAppointmentDetailsByAppointmentNO";
"Services/Doctors.svc/REST/GetCheckinScreenAppointmentDetailsByAppointmentNO";
//URL to get patient appointment history
const GET_PATIENT_APPOINTMENT_HISTORY =
"/Doctors.svc/REST/PateintHasAppoimentHistory";
"Services/Doctors.svc/REST/PateintHasAppoimentHistory";
//URL to get patient appointment curfew history
const GET_PATIENT_APPOINTMENT_CURFEW_HISTORY =
"/Doctors.svc/REST/AppoimentHistoryForCurfew";
"Services/Doctors.svc/REST/AppoimentHistoryForCurfew";
//URL to confirm appointment
const CONFIRM_APPOINTMENT = "/MobileNotifications.svc/REST/ConfirmAppointment";
const CONFIRM_APPOINTMENT =
"Services/MobileNotifications.svc/REST/ConfirmAppointment";
//URL to cancel appointment
const CANCEL_APPOINTMENT = "/Doctors.svc/REST/CancelAppointment";
const CANCEL_APPOINTMENT = "Services/Doctors.svc/REST/CancelAppointment";
//URL get appointment QR
const GENERATE_QR_APPOINTMENT = "/Doctors.svc/REST/GenerateQRAppointmentNo";
const GENERATE_QR_APPOINTMENT =
"Services/Doctors.svc/REST/GenerateQRAppointmentNo";
//URL send email appointment QR
const EMAIL_QR_APPOINTMENT = "/Notifications.svc/REST/sendEmailForOnLineCheckin";
const EMAIL_QR_APPOINTMENT =
"Services/Notifications.svc/REST/sendEmailForOnLineCheckin";
//URL check payment status
const CHECK_PAYMENT_STATUS = "/PayFort_Serv.svc/REST/GetRequestStatusByRequestID";
const CHECK_PAYMENT_STATUS =
"Services/PayFort_Serv.svc/REST/GetRequestStatusByRequestID";
//URL create advance payment
const CREATE_ADVANCE_PAYMENT = "/Doctors.svc/REST/CreateAdvancePayment";
const CREATE_ADVANCE_PAYMENT = "Services/Doctors.svc/REST/CreateAdvancePayment";
const ADD_ADVANCE_NUMBER_REQUEST = '/PayFort_Serv.svc/REST/AddAdvancedNumberRequest';
const ADD_ADVANCE_NUMBER_REQUEST =
'Services/PayFort_Serv.svc/REST/AddAdvancedNumberRequest';
const IS_ALLOW_ASK_DOCTOR = '/Doctors.svc/REST/GetPatientDoctorAppointmentResult';
const IS_ALLOW_ASK_DOCTOR =
'Services/Doctors.svc/REST/GetPatientDoctorAppointmentResult';
//URL to get medicine and pharmacies list
const CHANNEL = 3;
@ -104,12 +114,16 @@ const PATIENT_TYPE_ID = 1;
var DeviceTypeID = Platform.isIOS ? 1 : 2;
const LANGUAGE_ID = 2;
const GET_PHARMCY_ITEMS = "/Lists.svc/REST/GetPharmcyItems_Region";
const GET_PHARMACY_LIST = "/Patients.svc/REST/GetPharmcyList";
const GET_PAtIENTS_INSURANCE = "/Patients.svc/REST/Get_PatientInsuranceDetails";
const GET_PHARMCY_ITEMS = "Services/Lists.svc/REST/GetPharmcyItems_Region";
const GET_PHARMACY_LIST = "Services/Patients.svc/REST/GetPharmcyList";
const GET_PAtIENTS_INSURANCE =
"Services/Patients.svc/REST/Get_PatientInsuranceDetails";
const GET_PAtIENTS_INSURANCE_UPDATED =
"/Patients.svc/REST/PatientER_GetPatientInsuranceCardUpdateHistory";
const GET_PAtIENTS_INSURANCE_APPROVALS = "/Patients.svc/REST/GetApprovalStatus";
"Services/Patients.svc/REST/PatientER_GetPatientInsuranceCardUpdateHistory";
const GET_PAtIENTS_INSURANCE_APPROVALS =
"Services/Patients.svc/REST/GetApprovalStatus";
const SEARCH_BOT =
'Services/HabibiChatBotApi/BotInterface/GetVoiceCommandResponse';
class AppGlobal {
static var context;

@ -271,10 +271,7 @@ class _ConfirmLogin extends State<ConfirmLogin> {
if (type == 2 || type == 3) {
fingrePrintBefore = type;
}
this.selectedOption =
(user != null && (user.logInType == 3 || user.logInType == 2))
? user.logInType
: fingrePrintBefore != null ? fingrePrintBefore : type;
this.selectedOption = fingrePrintBefore != null ? fingrePrintBefore : type;
switch (type) {
case 1:
@ -411,21 +408,12 @@ class _ConfirmLogin extends State<ConfirmLogin> {
});
}
setUser() async {
setUser() async {}
setDefault() async {
if (await sharedPref.getObject(IMEI_USER_DATA) != null)
user = SelectDeviceIMEIRES.fromJson(
await sharedPref.getObject(IMEI_USER_DATA));
}
setDefault() async {
// this.user = this.cs.sharedService.getSharedData(
// AuthenticationService.IMEI_USER_DATA,
// false
// );
setState(() {
setUser();
});
if (await sharedPref.getObject(REGISTER_DATA_FOR_LOGIIN) != null) {
isMoreOption = true;
@ -488,10 +476,7 @@ class _ConfirmLogin extends State<ConfirmLogin> {
SMSOTP.showLoadingDialog(context, true);
var request = this.getCommonRequest().toJson();
this
.authService
.checkActivationCode(request, value)
.then((result) => {
this.authService.checkActivationCode(request, value).then((result) => {
if (result is Map)
{
result = CheckActivationCode.fromJson(result),
@ -540,7 +525,7 @@ class _ConfirmLogin extends State<ConfirmLogin> {
}
insertIMEI() {
authService.insertDeviceImei().then((value) => {goToHome()});
authService.insertDeviceImei(selectedOption).then((value) => {goToHome()});
}
goToHome() {

@ -18,22 +18,24 @@ import 'package:flutter/cupertino.dart';
enum APP_STATUS { LOADING, UNAUTHENTICATED, AUTHENTICATED }
AppSharedPreferences sharedPref = new AppSharedPreferences();
const String INSERT_DEVICE_IMEI = '/Patients.svc/REST/Patient_INSERTDeviceIMEI';
const String INSERT_DEVICE_IMEI =
'Services/Patients.svc/REST/Patient_INSERTDeviceIMEI';
const String SELECT_DEVICE_IMEI =
'/Patients.svc/REST/Patient_SELECTDeviceIMEIbyIMEI';
'Services/Patients.svc/REST/Patient_SELECTDeviceIMEIbyIMEI';
const String CHECK_PATIENT_AUTH =
'/Authentication.svc/REST/CheckPatientAuthentication';
const GET_MOBILE_INFO = '/Authentication.svc/REST/GetMobileLoginInfo';
'Services/Authentication.svc/REST/CheckPatientAuthentication';
const GET_MOBILE_INFO = 'Services/Authentication.svc/REST/GetMobileLoginInfo';
const SEND_ACTIVATION_CODE =
'/Authentication.svc/REST/SendActivationCodebyOTPNotificationType';
const CHECK_ACTIVATION_CODE = '/Authentication.svc/REST/CheckActivationCode';
'Services/Authentication.svc/REST/SendActivationCodebyOTPNotificationType';
const CHECK_ACTIVATION_CODE =
'Services/Authentication.svc/REST/CheckActivationCode';
const CHECK_PATIENT_FOR_REGISTRATION =
"/Authentication.svc/REST/CheckPatientForRegisteration";
"Services/Authentication.svc/REST/CheckPatientForRegisteration";
const CHECK_USER_STATUS = "/NHIC.svc/REST/GetPatientInfo";
const REGISTER_USER = '/Authentication.svc/REST/PatientRegistration';
const CHECK_USER_STATUS = "Services/NHIC.svc/REST/GetPatientInfo";
const REGISTER_USER = 'Services/Authentication.svc/REST/PatientRegistration';
const LOGGED_IN_USER_URL =
'/MobileNotifications.svc/REST/Insert_PatientMobileDeviceInfo';
'Services/MobileNotifications.svc/REST/Insert_PatientMobileDeviceInfo';
class AuthProvider with ChangeNotifier {
bool isLogin = false;
@ -85,11 +87,11 @@ class AuthProvider with ChangeNotifier {
// }
// }
Future<dynamic> insertDeviceImei() async {
Future<dynamic> insertDeviceImei(lstLogin) async {
try {
dynamic localRes;
var lastLogin = await sharedPref.getInt(
LAST_LOGIN); //this.cs.sharedService.getStorage(AuthenticationService.LAST_LOGIN); //this.cs.sharedService.getSharedData(AuthenticationService.LAST_LOGIN, false);
var lastLogin = lstLogin; //await sharedPref.getInt(
// LAST_LOGIN); //this.cs.sharedService.getStorage(AuthenticationService.LAST_LOGIN); //this.cs.sharedService.getSharedData(AuthenticationService.LAST_LOGIN, false);
var request =
AuthenticatedUser.fromJson(await sharedPref.getObject(USER_PROFILE));

@ -11,7 +11,7 @@ enum APP_STATUS { LOADING, UNAUTHENTICATED, AUTHENTICATED }
AppSharedPreferences sharedPref = new AppSharedPreferences();
const String GET_SHARED_RECORD_BY_STATUS =
'/Authentication.svc/REST/GetAllSharedRecordsByStatus';
'Services/Authentication.svc/REST/GetAllSharedRecordsByStatus';
class FamilyFilesProvider with ChangeNotifier {
bool isLogin = false;

@ -0,0 +1,33 @@
import 'package:diplomaticquarterapp/config/config.dart';
import 'package:diplomaticquarterapp/core/service/client/base_app_client.dart';
import 'package:flutter/cupertino.dart';
class SearchProvider with ChangeNotifier {
bool isLogin = false;
bool isLoading = true;
dynamic pageData;
AuthProvider() {
getLisner();
}
Future getBotPages(request) async {
try {
await BaseAppClient().post(SEARCH_BOT,
onSuccess: (dynamic response, int statusCode) {
pageData = response;
notifyListeners();
}, onFailure: (String error, int statusCode) {
throw error;
}, body: request);
// notifyListeners();
return Future.value(pageData);
} catch (error) {
print(error);
throw error;
}
}
getLisner() {
return pageData;
}
}

@ -1,5 +1,6 @@
import 'package:diplomaticquarterapp/config/shared_pref_kay.dart';
import 'package:diplomaticquarterapp/core/service/client/base_app_client.dart';
import 'package:diplomaticquarterapp/core/viewModels/project_view_model.dart';
import 'package:diplomaticquarterapp/models/Authentication/authenticated_user.dart';
import 'package:diplomaticquarterapp/models/FamilyFiles/GetAllSharedRecordByStatusResponse.dart';
import 'package:diplomaticquarterapp/routes.dart';
@ -10,6 +11,7 @@ import 'package:diplomaticquarterapp/widgets/data_display/list/flexible_containe
import 'package:diplomaticquarterapp/widgets/data_display/text.dart';
import 'package:diplomaticquarterapp/widgets/text/app_texts_widget.dart';
import 'package:flutter/material.dart';
import 'package:provider/provider.dart';
import '../../config/size_config.dart';
import 'drawer_item_widget.dart';
@ -26,11 +28,14 @@ class _AppDrawerState extends State<AppDrawer> {
checkUserData();
}
ProjectViewModel projectProvider;
var sharedPref = new AppSharedPreferences();
var familyFileProvider = FamilyFilesProvider();
AuthenticatedUser user;
@override
Widget build(BuildContext context) {
projectProvider = Provider.of(context);
return SizedBox(
width: MediaQuery.of(context).size.width * 0.75,
child: Container(
@ -120,9 +125,14 @@ class _AppDrawerState extends State<AppDrawer> {
TranslationBase.of(context).arabicChange,
Icons.translate),
onTap: () {
Navigator.of(context).pushNamed(
WELCOME_LOGIN,
);
// Navigator.of(context).pushNamed(
// WELCOME_LOGIN,
// );
if (projectProvider.isArabic) {
projectProvider.changeLanguage('en');
} else {
projectProvider.changeLanguage('ar');
}
},
),
user != null

@ -1,6 +1,7 @@
import 'package:diplomaticquarterapp/config/config.dart';
import 'package:diplomaticquarterapp/core/viewModels/base_view_model.dart';
import 'package:diplomaticquarterapp/widgets/progress_indicator/app_loader_widget.dart';
import 'package:diplomaticquarterapp/widgets/robo-search/robosearch.dart';
import 'package:flutter/material.dart';
import 'package:font_awesome_flutter/font_awesome_flutter.dart';
@ -64,6 +65,14 @@ class AppScaffold extends StatelessWidget {
)
: buildBodyWidget(),
bottomSheet: bottomSheet,
floatingActionButton: Padding(
padding: const EdgeInsets.only(bottom: 80),
child: FloatingActionButton(
child: Icon(Icons.mic),
onPressed: () {
roboSearch(context);
},
)),
);
}
@ -74,4 +83,9 @@ class AppScaffold extends StatelessWidget {
buildBodyWidget() {
return Stack(children: <Widget>[body, buildAppLoaderWidget(isLoading)]);
}
roboSearch(context) {
var dialog = RoboSearch(context: context);
dialog.showAlertDialog(context);
}
}

@ -0,0 +1,58 @@
import 'package:diplomaticquarterapp/config/size_config.dart';
import 'package:diplomaticquarterapp/uitl/translations_delegate_base.dart';
import 'package:diplomaticquarterapp/widgets/robo-search/search.dart';
import 'package:flutter/cupertino.dart';
import 'package:flutter/material.dart';
class RoboSearch {
final BuildContext context;
RoboSearch({
@required this.context,
});
showAlertDialog(BuildContext context) {
// set up the buttons
// set up the AlertDialog
AlertDialog alert = AlertDialog(
title: Center(child: Text(TranslationBase.of(context).search)),
content: SearchOption(),
);
// show the dialog
showDialog(
context: context,
builder: (BuildContext context) {
return alert;
},
);
}
static closeAlertDialog(BuildContext context) {
Navigator.of(context).pop();
}
}
class SearchOption extends StatefulWidget {
@override
_SearchOption createState() => _SearchOption();
}
class _SearchOption extends State<SearchOption> {
bool isLoading = true;
// var familyFileProvider = RobotProvider();
@override
void initState() {
isLoading = true;
super.initState();
}
Widget build(BuildContext context) {
return Container(
color: Colors.white,
height: SizeConfig.realScreenHeight * 0.5,
width: SizeConfig.realScreenWidth * 0.8,
child: SearchBot());
}
}

@ -0,0 +1,159 @@
import 'package:diplomaticquarterapp/config/size_config.dart';
import 'package:diplomaticquarterapp/services/robo_search/search_provider.dart';
import 'package:diplomaticquarterapp/uitl/translations_delegate_base.dart';
import 'package:diplomaticquarterapp/widgets/text/app_texts_widget.dart';
import 'package:flutter/cupertino.dart';
import 'package:flutter/material.dart';
import 'package:flutter_tts/flutter_tts.dart';
import 'package:hexcolor/hexcolor.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:speech_to_text/speech_recognition_error.dart';
import 'package:speech_to_text/speech_recognition_result.dart';
import 'package:speech_to_text/speech_to_text.dart';
import 'dart:math';
class SearchBot extends StatefulWidget {
@override
_SearchBot createState() => _SearchBot();
}
class _SearchBot extends State<SearchBot> {
// DrAppSharedPreferances sharedPref = new DrAppSharedPreferances();
SearchProvider searchProvider = new SearchProvider();
bool isLoading = false;
bool isError = false;
final SpeechToText speech = SpeechToText();
String error = '';
String _currentLocaleId = "";
String lastError;
double level = 0.0;
double minSoundLevel = 50000;
double maxSoundLevel = -50000;
String reconizedWord = '';
FlutterTts flutterTts = FlutterTts();
bool isSearching = false;
String lastStatus;
bool _isInit = true;
@override
void didChangeDependencies() {
super.didChangeDependencies();
if (_isInit) {
requestPermissions();
initSpeechState();
}
_isInit = false;
}
@override
Widget build(BuildContext context) {
return Column(
children: <Widget>[
Image.asset('assets/images/siri-ianzhao.gif'),
AppText(
reconizedWord,
fontSize: 28,
),
SizedBox(
child: FlatButton(
child: Text('Start'),
onPressed: () {
startVoiceSearch();
},
),
height: 40,
width: 80)
],
);
}
startVoiceSearch() {
speech.listen(
onResult: resultListener,
listenFor: Duration(seconds: 10),
localeId: _currentLocaleId,
onSoundLevelChange: soundLevelListener,
cancelOnError: true,
partialResults: true,
onDevice: true,
listenMode: ListenMode.confirmation);
}
void resultListener(SpeechRecognitionResult result) {
// lastWords = "${result.recognizedWords} - ${result.finalResult}";
if (result.finalResult == true) {
setState(() {
reconizedWord = result.recognizedWords;
});
Future.delayed(const Duration(seconds: 1), () {
_speak(reconizedWord);
});
}
//});
}
Future _speak(reconizedWord) async {
await flutterTts.speak(reconizedWord);
getPages(reconizedWord);
//Future.delayed(const Duration(seconds: 2), () {
//
// });
}
void soundLevelListener(double level) {
minSoundLevel = min(minSoundLevel, level);
maxSoundLevel = max(maxSoundLevel, level);
// print("sound level $level: $minSoundLevel - $maxSoundLevel ");
setState(() {
this.level = level;
});
}
void requestPermissions() async {
Map<Permission, PermissionStatus> statuses = await [
Permission.microphone,
].request();
}
Future<void> initSpeechState() async {
await speech.initialize(onError: errorListener, onStatus: statusListener);
_currentLocaleId = TranslationBase.of(context).locale.languageCode == 'en'
? 'en-GB'
: 'ar-SA'; // systemLocale.localeId;
flutterTts.setLanguage(_currentLocaleId);
if (!mounted) return;
}
void errorListener(SpeechRecognitionError error) {
setState(() {
// reconizedWord = "${error.errorMsg} - ${error.permanent}";
});
}
void statusListener(String status) {
setState(() {
reconizedWord = status == 'listening' ? 'Lisening...' : 'Wait....';
});
}
getPages(text) {
var request = {
'VoiceMessage': text,
'Lang':
TranslationBase.of(context).locale.languageCode == 'en' ? 'En' : 'Ar'
};
searchProvider.getBotPages(request).then((result) => getCommands(result));
}
getCommands(result) {
isSearching = false;
print(result);
}
}

@ -93,7 +93,11 @@ dependencies:
#hijri
hijri: ^2.0.3
#flutter tts
flutter_tts: ^1.2.6
speech_to_text:
path: speech_to_text
dev_dependencies:
flutter_test:

@ -0,0 +1,19 @@
name: build
on:
push:
branches:
- master
jobs:
test:
name: Test on Ubuntu
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: subosito/flutter-action@v1.3.2
with:
flutter-version: '1.17.1'
channel: 'stable'
- run: flutter pub get
- run: flutter test

@ -0,0 +1,11 @@
.DS_Store
.dart_tool/
.packages
.pub/
build/
coverage/
example/.flutter-plugins-dependencies
**/ios/Flutter/flutter_export_environment.sh
android/.idea/

@ -0,0 +1,10 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.
version:
revision: 2d2a1ffec95cc70a3218872a2cd3f8de4933c42f
channel: stable
project_type: plugin

@ -0,0 +1,166 @@
# Changelog
## 2.3.0
### New
* new parameter `onDevice` on the `listen` method enforces on device recognition for sensitive content
* onSoundLevelChange now supported on iOS
* added compile troubleshooting help to README.md
* `SpeechToTextProvider` is an alternate and simpler way to interact with the `SpeechToText` plugin.
* new `provider_example.dart` example for usage of `SpeechToTextProvider`.
### Fix
* on iOS handles some conflicts with other applications better to keep speech working after calls for example
## 2.2.0
### New
* improved error handling and logging in the iOS implementation
* added general guides for iOS to the README
* moved stress testing out of the main example
* iOS now defaults to using the speaker rather than the receiver for start /stop sounds when no headphones
### Fix
* iOS now properly deactivates the audio session when no longer listening
* start and stop sounds on iOS should be more reliable when available
## 2.1.0
### Breaking
* `listenFor` now calls `stop` rather than `cancel` as this seems like more useful behaviour
### Fix
* Android no longer stops or cancels the speech recognizer if it has already been shutdown by a
timeout or other platform behaviour.
* Android no longer tries to restart the listener when it is already active
* Now properly notifies errors that happen after listening stops due to platform callback rather than
client request. See https://github.com/csdcorp/speech_to_text/issues/51
## 2.0.1
### Fix
* Resolves an issue with the Android implementation not handling permission requests properly on apps
that didn't use the 1.12.x plugin APIs for registration. The permission dialog would not appear and
permission was denied.
## 2.0.0
### Breaking
* Upgraded to New Swift 1.12 plugin structure, may work with older Flutter version but not guaranteed
### New
* the plugin now requests both speech and microphone permission on initialize on iOS
* added `debugLogging` parameter to the `initialize` method to control native logging
### Fix
* The Android implementation now blocks duplicate results notifications. It appears that at least on some
Android versions the final results notification onResults is notified twice when Android automatically
terminates the session due to a pause time. The de-duplication looks for successive notifications
with < 100 ms between them and blocks the second. If you miss any onResult notifications please post
an issue.
## 1.1.0
### New
* error_timeout has been separated into error_network_timeout and error_speech_timeout
## 1.0.0
### New
* hasPermission to check for the current permission without bringing up the system dialog
* `listen` has a new optional `cancelOnError` parameter to support automatically canceling
a listening session on a permanent error.
* `listen` has a new optional `partialResults` parameter that controls whether the callback
receives partial or only final results.
## 0.8.0
### New
* speech recognizer now exposes multiple possible transcriptions for each recognized speech
* alternates list on SpeechRecognitionResult exposes alternate transcriptions of voice
* confidence on SpeechRecognitionResult gives an estimate of confidence in the transcription
* isConfident on SpeechRecognitionResult supports testing confidence
* hasConfidenceRating on SpeechRecognitionResult indicates if confidence was provided from the device
* new SpeechRecognitionWords class gives details on per transcription words and confidence
### Fix
* speechRecognizer availabilityDidChange was crashing if invoked due to an invalid parameter type
* Added iOS platform 10 to example Podfile to resolve compilation warnings
## 0.7.2
### Breaking
* Upgrade Swift to version 5 to match Flutter. Projects using this plugin must now switch to 5.
## 0.7.1
### Fix
* Upgrade Kotlin to 1.3.5 to match the Flutter 1.12 version
* Upgrade Gradle build to 3.5.0 to match the Flutter 1.12 version
* Android version of the plugin was repeating the system default locale in the `locales` list
## 0.7.0
### New
* locales method returns the list of available languages for speech
* new optional localeId parameter on listen method supports choosing the comprehension language separately from the current system locale.
### Breaking
* `cancel` and `stop` are now async
## 0.6.3
### Fix
* request permission fix on Android to ensure it doesn't conflict with other requests
## 0.6.2
### Fix
* channel invoke wasn't being done on the main thread in iOS
## 0.6.1
### Fix
* listening sound was failing due to timing, now uses play and record mode on iOS.
## 0.6.0
### Breaking
* The filenames for the optional sounds for iOS have changed.
### New
* Added an optional listenFor parameter to set a max duration to listen for speech and then automatically cancel.
### Fix
* Was failing to play sounds because of record mode. Now plays sounds before going into record mode and after coming out.
* Status listener was being ignored, now properly notifies on status changes.
## 0.5.1
* Fixes a problem where the recognizer left the AVAudioSession in record mode which meant that subsequent sounds couldn't be played.
## 0.5.0
Initial draft with limited functionality, supports:
* initializing speech recognition
* asking the user for permission if required
* listening for recognized speech
* canceling the current recognition session
* stopping the current recognition session
* Android and iOS 10+ support
Missing:
* some error handling
* testing across multiple OS versions
* and more, to be discovered...

@ -0,0 +1,29 @@
BSD 3-Clause License
Copyright (c) 2019, Corner Software Development Corp.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -0,0 +1,150 @@
# speech_to_text
[![pub package](https://img.shields.io/badge/pub-v2.3.0-blue)](https://pub.dartlang.org/packages/speech_to_text) [![build status](https://github.com/csdcorp/speech_to_text/workflows/build/badge.svg)](https://github.com/csdcorp/speech_to_text/actions?query=workflow%3Abuild)
A library that exposes device specific speech recognition capability.
This plugin contains a set of classes that make it easy to use the speech recognition
capabilities of the mobile device in Flutter. It supports both Android and iOS. The
target use cases for this library are commands and short phrases, not continuous spoken
conversion or always on listening.
## Recent Updates
The 2.3.0 version adds `SpeechToTextProvider` as a simpler way to interact with the plugin. Checkout
the new `provider_example.dart` for intended usage.
The 2.2.0 version improves audio session handling and start / stop sound playback on iOS.
*Note*: Feedback from any test devices is welcome.
## Using
To recognize text from the microphone import the package and call the plugin, like so:
```dart
import 'package:speech_to_text/speech_to_text.dart' as stt;
stt.SpeechToText speech = stt.SpeechToText();
bool available = await speech.initialize( onStatus: statusListener, onError: errorListener );
if ( available ) {
speech.listen( onResult: resultListener );
}
else {
print("The user has denied the use of speech recognition.");
}
// some time later...
speech.stop()
```
### Initialize once
The `initialize` method only needs to be called once per application session. After that `listen`,
`start`, `stop`, and `cancel` can be used to interact with the plugin. Subsequent calls to `initialize`
are ignored which is safe but does mean that the `onStatus` and `onError` callbacks cannot be reset after
the first call to `initialize`. For that reason there should be only one instance of the plugin per
application. The `SpeechToTextProvider` is one way to create a single instance and easily reuse it in
multiple widgets.
## Permissions
Applications using this plugin require user permissions.
### iOS
Add the following keys to your _Info.plist_ file, located in `<project root>/ios/Runner/Info.plist`:
* `NSSpeechRecognitionUsageDescription` - describe why your app uses speech recognition. This is called _Privacy - Speech Recognition Usage Description_ in the visual editor.
* `NSMicrophoneUsageDescription` - describe why your app needs access to the microphone. This is called _Privacy - Microphone Usage Description_ in the visual editor.
### Android
Add the record audio permission to your _AndroidManifest.xml_ file, located in `<project root>/android/app/src/main/AndroidManifest.xml`.
* `android.permission.RECORD_AUDIO` - this permission is required for microphone access.
* `android.permission.INTERNET` - this permission is required because speech recognition may use remote services.
## Adding Sounds for iOS (optional)
Android automatically plays system sounds when speech listening starts or stops but iOS does not. This plugin supports playing sounds to indicate listening status on iOS if sound files are available as assets in the application. To enable sounds in an application using this plugin add the sound files to the project and reference them in the assets section of the application `pubspec.yaml`. The location and filenames of the sound files must exactly match what
is shown below or they will not be found. The example application for the plugin shows the usage. *Note* These files should be very short as they delay
the start / end of the speech recognizer until the sound playback is complete.
```yaml
assets:
- assets/sounds/speech_to_text_listening.m4r
- assets/sounds/speech_to_text_cancel.m4r
- assets/sounds/speech_to_text_stop.m4r
```
* `speech_to_text_listening.m4r` - played when the listen method is called.
* `speech_to_text_cancel.m4r` - played when the cancel method is called.
* `speech_to_text_stop.m4r` - played when the stop method is called.
## Troubleshooting
### SDK version error trying to compile for Android
```
Manifest merger failed : uses-sdk:minSdkVersion 16 cannot be smaller than version 21 declared in library [:speech_to_text]
```
The speech_to_text plugin requires at least Android SDK 21 because some of the speech functions in Android
were only introduced in that version. To fix this error you need to change the `build.gradle` entry to reflect
this version. Here's what the relevant part of that file looked like as of this writing:
```
defaultConfig {
applicationId "com.example.app"
minSdkVersion 21
targetSdkVersion 28
versionCode flutterVersionCode.toInteger()
versionName flutterVersionName
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
```
### Incorrect Swift version trying to compile for iOS
```
/Users/markvandergon/flutter/.pub-cache/hosted/pub.dartlang.org/speech_to_text-1.1.0/ios/Classes/SwiftSpeechToTextPlugin.swift:224:44: error: value of type 'SwiftSpeechToTextPlugin' has no member 'AVAudioSession'
rememberedAudioCategory = self.AVAudioSession.Category
~~~~ ^~~~~~~~~~~~~~
/Users/markvandergon/flutter/.pub-cache/hosted/pub.dartlang.org/speech_to_text-1.1.0/ios/Classes/SwiftSpeechToTextPlugin.swift:227:63: error: type 'Int' has no member 'notifyOthersOnDeactivation'
try self.audioSession.setActive(true, withFlags: .notifyOthersOnDeactivation)
```
This happens when the Swift language version is not set correctly. See this thread for help https://github.com/csdcorp/speech_to_text/issues/45.
### Swift not supported trying to compile for iOS
```
`speech_to_text` does not specify a Swift version and none of the targets (`Runner`) integrating it have the `SWIFT_VERSION` attribute set.
```
This usually happens for older projects that only support Objective-C. See this thread for help https://github.com/csdcorp/speech_to_text/issues/88.
### Not working on a particular Android device
The symptom for this issue is that the `initialize` method will always fail. If you turn on debug logging
using the `debugLogging: true` flag on the `initialize` method you'll see `'Speech recognition unavailable'`
in the Android log. There's a lengthy issue discussion here https://github.com/csdcorp/speech_to_text/issues/36
about this. The issue seems to be that the recognizer is not always automatically enabled on the device. Two
key things helped resolve the issue in this case at least.
#### First
1. Go to Google Play
2. Search for 'Google'
3. You should find this app: https://play.google.com/store/apps/details?id=com.google.android.googlequicksearchbox
If 'Disabled' enable it
This is the SO post that helped: https://stackoverflow.com/questions/28769320/how-to-check-wether-speech-recognition-is-available-or-not
#### Second
Ensure the app has the required permissions. The symptom for this that you get a permanent error notification
'error_audio_error` when starting a listen session. Here's a Stack Overflow post that addresses that
https://stackoverflow.com/questions/46376193/android-speechrecognizer-audio-recording-error
Here's the important excerpt:
>You should go to system setting, Apps, Google app, then enable its permission of microphone.
### iOS recognition guidelines
Apple has quite a good guide on the user experience for using speech, the original is here
https://developer.apple.com/documentation/speech/sfspeechrecognizer This is the section that I think is particularly relevant:
>#### Create a Great User Experience for Speech Recognition
>Here are some tips to consider when adding speech recognition support to your app.
>**Be prepared to handle failures caused by speech recognition limits.** Because speech recognition is a network-based service, limits are enforced so that the service can remain freely available to all apps. Individual devices may be limited in the number of recognitions that can be performed per day, and each app may be throttled globally based on the number of requests it makes per day. If a recognition request fails quickly (within a second or two of starting), check to see if the recognition service became unavailable. If it is, you may want to ask users to try again later.
>**Plan for a one-minute limit on audio duration.** Speech recognition places a relatively high burden on battery life and network usage. To minimize this burden, the framework stops speech recognition tasks that last longer than one minute. This limit is similar to the one for keyboard-related dictation.
Remind the user when your app is recording. For example, display a visual indicator and play sounds at the beginning and end of speech recognition to help users understand that they're being actively recorded. You can also display speech as it is being recognized so that users understand what your app is doing and see any mistakes made during the recognition process.
>**Do not perform speech recognition on private or sensitive information.** Some speech is not appropriate for recognition. Don't send passwords, health or financial data, and other sensitive speech for recognition.

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8/"/>
<classpathentry kind="con" path="org.eclipse.buildship.core.gradleclasspathcontainer"/>
<classpathentry kind="output" path="bin/default"/>
</classpath>

@ -0,0 +1,8 @@
*.iml
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>speech_to_text</name>
<comment>Project android_____ created by Buildship.</comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.buildship.core.gradleprojectbuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.buildship.core.gradleprojectnature</nature>
</natures>
</projectDescription>

@ -0,0 +1,13 @@
arguments=
auto.sync=false
build.scans.enabled=false
connection.gradle.distribution=GRADLE_DISTRIBUTION(VERSION(5.6.1))
connection.project.dir=
eclipse.preferences.version=1
gradle.user.home=
java.home=
jvm.arguments=
offline.mode=false
override.workspace.settings=true
show.console.view=true
show.executions.view=true

@ -0,0 +1,44 @@
group 'com.csdcorp.speech_to_text'
version '1.0-SNAPSHOT'
buildscript {
ext.kotlin_version = '1.3.50'
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.5.0'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
rootProject.allprojects {
repositories {
google()
jcenter()
}
}
apply plugin: 'com.android.library'
apply plugin: 'kotlin-android'
android {
compileSdkVersion 28
sourceSets {
main.java.srcDirs += 'src/main/kotlin'
}
defaultConfig {
minSdkVersion 21
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
lintOptions {
disable 'InvalidPackage'
}
}
dependencies {
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
}

@ -0,0 +1,3 @@
org.gradle.jvmargs=-Xmx1536M
android.useAndroidX=true
android.enableJetifier=true

@ -0,0 +1,2 @@
sdk.dir=/Users/stephen.owens/Library/Android/sdk
flutter.sdk=/Users/stephen.owens/Documents/dev/flutter/sdk/flutter

@ -0,0 +1 @@
rootProject.name = 'speech_to_text'

@ -0,0 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip

@ -0,0 +1,3 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.csdcorp.speech_to_text">
</manifest>

@ -0,0 +1,595 @@
package com.csdcorp.speech_to_text
import androidx.annotation.NonNull;
import io.flutter.embedding.engine.plugins.FlutterPlugin
import android.Manifest
import android.annotation.TargetApi
import android.app.Activity
import android.content.Intent
import android.content.pm.PackageManager
import android.os.Build
import android.os.Bundle
import android.speech.RecognitionListener
import android.speech.SpeechRecognizer.createSpeechRecognizer
import android.speech.RecognizerIntent
import android.speech.SpeechRecognizer
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.common.MethodChannel.MethodCallHandler
import io.flutter.plugin.common.MethodChannel.Result
import io.flutter.plugin.common.PluginRegistry
import io.flutter.plugin.common.PluginRegistry.Registrar
import org.json.JSONObject
import android.content.Context
import android.content.BroadcastReceiver
import android.os.Handler
import android.os.Looper
import android.util.Log
import io.flutter.embedding.engine.plugins.activity.ActivityAware
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding
import io.flutter.plugin.common.BinaryMessenger
import org.json.JSONArray
import java.util.*
enum class SpeechToTextErrors {
multipleRequests,
unimplemented,
noLanguageIntent,
recognizerNotAvailable,
missingOrInvalidArg,
unknown
}
enum class SpeechToTextCallbackMethods {
textRecognition,
notifyStatus,
notifyError,
soundLevelChange,
}
enum class SpeechToTextStatus {
listening,
notListening,
unavailable,
available,
}
enum class ListenMode {
deviceDefault,
dictation,
search,
confirmation,
}
const val pluginChannelName = "plugin.csdcorp.com/speech_to_text"
@TargetApi(8)
/** SpeechToTextPlugin */
public class SpeechToTextPlugin :
MethodCallHandler, RecognitionListener,
PluginRegistry.RequestPermissionsResultListener, FlutterPlugin,
ActivityAware {
private var pluginContext: Context? = null
private var channel: MethodChannel? = null
private val minSdkForSpeechSupport = 21
private val speechToTextPermissionCode = 28521
private val missingConfidence: Double = -1.0
private val logTag = "SpeechToTextPlugin"
private var currentActivity: Activity? = null
private var activeResult: Result? = null
private var initializedSuccessfully: Boolean = false
private var permissionToRecordAudio: Boolean = false
private var listening = false
private var debugLogging: Boolean = false
private var speechRecognizer: SpeechRecognizer? = null
private var recognizerIntent: Intent? = null
private var previousRecognizerLang: String? = null
private var previousPartialResults: Boolean = true
private var previousListenMode: ListenMode = ListenMode.deviceDefault
private var lastFinalTime: Long = 0
private val handler: Handler = Handler(Looper.getMainLooper())
private val defaultLanguageTag: String = Locale.getDefault().toLanguageTag()
override fun onAttachedToEngine(@NonNull flutterPluginBinding: FlutterPlugin.FlutterPluginBinding) {
onAttachedToEngine(flutterPluginBinding.getApplicationContext(), flutterPluginBinding.getBinaryMessenger());
}
// This static function is optional and equivalent to onAttachedToEngine. It supports the old
// pre-Flutter-1.12 Android projects. You are encouraged to continue supporting
// plugin registration via this function while apps migrate to use the new Android APIs
// post-flutter-1.12 via https://flutter.dev/go/android-project-migration.
//
// It is encouraged to share logic between onAttachedToEngine and registerWith to keep
// them functionally equivalent. Only one of onAttachedToEngine or registerWith will be called
// depending on the user's project. onAttachedToEngine or registerWith must both be defined
// in the same class.
companion object {
@JvmStatic
fun registerWith(registrar: Registrar) {
val speechPlugin = SpeechToTextPlugin()
speechPlugin.currentActivity = registrar.activity()
registrar.addRequestPermissionsResultListener(speechPlugin)
speechPlugin.onAttachedToEngine(registrar.context(), registrar.messenger())
}
}
private fun onAttachedToEngine(applicationContext: Context, messenger: BinaryMessenger) {
this.pluginContext = applicationContext;
channel = MethodChannel(messenger, pluginChannelName)
channel?.setMethodCallHandler(this)
}
override fun onDetachedFromEngine(@NonNull binding: FlutterPlugin.FlutterPluginBinding) {
this.pluginContext = null;
channel?.setMethodCallHandler(null)
channel = null
}
override fun onDetachedFromActivity() {
currentActivity = null
}
override fun onReattachedToActivityForConfigChanges(binding: ActivityPluginBinding) {
currentActivity = binding.activity
binding.addRequestPermissionsResultListener(this)
}
override fun onAttachedToActivity(binding: ActivityPluginBinding) {
currentActivity = binding.activity
binding.addRequestPermissionsResultListener(this)
}
override fun onDetachedFromActivityForConfigChanges() {
currentActivity = null
}
override fun onMethodCall(@NonNull call: MethodCall, @NonNull rawrResult: Result) {
val result = ChannelResultWrapper(rawrResult)
try {
when (call.method) {
"has_permission" -> hasPermission(result)
"initialize" -> {
var dlog = call.argument<Boolean>("debugLogging")
if (null != dlog) {
debugLogging = dlog
}
initialize(result)
}
"listen" -> {
var localeId = call.argument<String>("localeId")
if (null == localeId) {
localeId = defaultLanguageTag
}
var partialResults = call.argument<Boolean>("partialResults")
if (null == partialResults) {
partialResults = true
}
val listenModeIndex = call.argument<Int>("listenMode")
if ( null == listenModeIndex ) {
result.error(SpeechToTextErrors.missingOrInvalidArg.name,
"listenMode is required", null)
return
}
startListening(result, localeId, partialResults, listenModeIndex )
}
"stop" -> stopListening(result)
"cancel" -> cancelListening(result)
"locales" -> locales(result)
else -> result.notImplemented()
}
} catch (exc: Exception) {
Log.e(logTag, "Unexpected exception", exc)
result.error(SpeechToTextErrors.unknown.name,
"Unexpected exception", exc.localizedMessage)
}
}
private fun hasPermission(result: Result) {
if (sdkVersionTooLow(result)) {
return
}
debugLog("Start has_permission")
val localContext = pluginContext
if (localContext != null) {
val hasPerm = ContextCompat.checkSelfPermission(localContext,
Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED
result.success(hasPerm)
}
}
private fun initialize(result: Result) {
if (sdkVersionTooLow(result)) {
return
}
debugLog("Start initialize")
if (null != activeResult) {
result.error(SpeechToTextErrors.multipleRequests.name,
"Only one initialize at a time", null)
return
}
activeResult = result
val localContext = pluginContext
initializeIfPermitted(pluginContext)
}
private fun sdkVersionTooLow(result: Result): Boolean {
if (Build.VERSION.SDK_INT < minSdkForSpeechSupport) {
result.success(false)
return true;
}
return false;
}
private fun isNotInitialized(result: Result): Boolean {
if (!initializedSuccessfully || null == pluginContext) {
result.success(false)
}
return !initializedSuccessfully
}
private fun isListening(): Boolean {
return listening
}
private fun isNotListening(): Boolean {
return !listening
}
private fun startListening(result: Result, languageTag: String, partialResults: Boolean,
listenModeIndex: Int) {
if (sdkVersionTooLow(result) || isNotInitialized(result) || isListening()) {
return
}
debugLog("Start listening")
var listenMode = ListenMode.deviceDefault
if ( listenModeIndex == ListenMode.dictation.ordinal) {
listenMode = ListenMode.dictation
}
setupRecognizerIntent(languageTag, partialResults, listenMode)
handler.post {
run {
speechRecognizer?.startListening(recognizerIntent)
}
}
notifyListening(isRecording = true)
result.success(true)
debugLog("Start listening done")
}
private fun stopListening(result: Result) {
if (sdkVersionTooLow(result) || isNotInitialized(result) || isNotListening()) {
return
}
debugLog("Stop listening")
handler.post {
run {
speechRecognizer?.stopListening()
}
}
notifyListening(isRecording = false)
result.success(true)
debugLog("Stop listening done")
}
private fun cancelListening(result: Result) {
if (sdkVersionTooLow(result) || isNotInitialized(result) || isNotListening()) {
return
}
debugLog("Cancel listening")
handler.post {
run {
speechRecognizer?.cancel()
}
}
notifyListening(isRecording = false)
result.success(true)
debugLog("Cancel listening done")
}
private fun locales(result: Result) {
if (sdkVersionTooLow(result) || isNotInitialized(result)) {
return
}
var detailsIntent = RecognizerIntent.getVoiceDetailsIntent(pluginContext)
if (null == detailsIntent) {
detailsIntent = Intent(RecognizerIntent.ACTION_GET_LANGUAGE_DETAILS)
}
if (null == detailsIntent) {
result.error(SpeechToTextErrors.noLanguageIntent.name,
"Could not get voice details", null)
return
}
pluginContext?.sendOrderedBroadcast(
detailsIntent, null, LanguageDetailsChecker(result),
null, Activity.RESULT_OK, null, null)
}
private fun notifyListening(isRecording: Boolean) {
debugLog("Notify listening")
listening = isRecording
val status = when (isRecording) {
true -> SpeechToTextStatus.listening.name
false -> SpeechToTextStatus.notListening.name
}
channel?.invokeMethod(SpeechToTextCallbackMethods.notifyStatus.name, status)
debugLog("Notify listening done")
}
private fun updateResults(speechBundle: Bundle?, isFinal: Boolean) {
if (isDuplicateFinal( isFinal )) {
debugLog("Discarding duplicate final")
return
}
val userSaid = speechBundle?.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION)
if (null != userSaid && userSaid.isNotEmpty()) {
val speechResult = JSONObject()
speechResult.put("finalResult", isFinal)
val confidence = speechBundle?.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES)
val alternates = JSONArray()
for (resultIndex in 0..userSaid.size - 1) {
val speechWords = JSONObject()
speechWords.put("recognizedWords", userSaid[resultIndex])
if (null != confidence && confidence.size >= userSaid.size) {
speechWords.put("confidence", confidence[resultIndex])
} else {
speechWords.put("confidence", missingConfidence)
}
alternates.put(speechWords)
}
speechResult.put("alternates", alternates)
val jsonResult = speechResult.toString()
debugLog("Calling results callback")
channel?.invokeMethod(SpeechToTextCallbackMethods.textRecognition.name,
jsonResult)
}
}
private fun isDuplicateFinal( isFinal: Boolean ) : Boolean {
if ( !isFinal ) {
return false
}
val delta = System.currentTimeMillis() - lastFinalTime
lastFinalTime = System.currentTimeMillis()
return delta >= 0 && delta < 100
}
private fun initializeIfPermitted(context: Context?) {
val localContext = context
if (null == localContext) {
completeInitialize()
return
}
permissionToRecordAudio = ContextCompat.checkSelfPermission(localContext,
Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED
debugLog("Checked permission")
if (!permissionToRecordAudio) {
val localActivity = currentActivity
if (null != localActivity) {
debugLog("Requesting permission")
ActivityCompat.requestPermissions(localActivity,
arrayOf(Manifest.permission.RECORD_AUDIO), speechToTextPermissionCode)
} else {
debugLog("no permission, no activity, completing")
completeInitialize()
}
} else {
debugLog("has permission, completing")
completeInitialize()
}
debugLog("leaving initializeIfPermitted")
}
private fun completeInitialize() {
debugLog("completeInitialize")
if (permissionToRecordAudio) {
debugLog("Testing recognition availability")
if (!SpeechRecognizer.isRecognitionAvailable(pluginContext)) {
Log.e(logTag, "Speech recognition not available on this device")
activeResult?.error(SpeechToTextErrors.recognizerNotAvailable.name,
"Speech recognition not available on this device", "")
activeResult = null
return
}
debugLog("Creating recognizer")
speechRecognizer = createSpeechRecognizer(pluginContext).apply {
debugLog("Setting listener")
setRecognitionListener(this@SpeechToTextPlugin)
}
if (null == speechRecognizer) {
Log.e(logTag, "Speech recognizer null")
activeResult?.error(
SpeechToTextErrors.recognizerNotAvailable.name,
"Speech recognizer null", "")
activeResult = null
}
debugLog("before setup intent")
setupRecognizerIntent(defaultLanguageTag, true, ListenMode.deviceDefault)
debugLog("after setup intent")
}
initializedSuccessfully = permissionToRecordAudio
debugLog("sending result")
activeResult?.success(permissionToRecordAudio)
debugLog("leaving complete")
activeResult = null
}
private fun setupRecognizerIntent(languageTag: String, partialResults: Boolean, listenMode: ListenMode) {
debugLog("setupRecognizerIntent")
if (previousRecognizerLang == null ||
previousRecognizerLang != languageTag ||
partialResults != previousPartialResults || previousListenMode != listenMode ) {
previousRecognizerLang = languageTag;
previousPartialResults = partialResults
previousListenMode = listenMode
handler.post {
run {
recognizerIntent = Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH).apply {
debugLog("In RecognizerIntent apply")
putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM)
debugLog("put model")
val localContext = pluginContext
if (null != localContext) {
putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
localContext.applicationInfo.packageName)
}
debugLog("put package")
putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, partialResults)
debugLog("put partial")
if (languageTag != Locale.getDefault().toLanguageTag()) {
putExtra(RecognizerIntent.EXTRA_LANGUAGE, languageTag);
debugLog("put languageTag")
}
}
}
}
}
}
override fun onRequestPermissionsResult(requestCode: Int, permissions: Array<out String>?,
grantResults: IntArray?): Boolean {
when (requestCode) {
speechToTextPermissionCode -> {
if (null != grantResults) {
permissionToRecordAudio = grantResults.isNotEmpty() &&
grantResults.get(0) == PackageManager.PERMISSION_GRANTED
}
completeInitialize()
return true
}
}
return false
}
override fun onPartialResults(results: Bundle?) = updateResults(results, false)
override fun onResults(results: Bundle?) = updateResults(results, true)
override fun onEndOfSpeech() = notifyListening(isRecording = false)
override fun onError(errorCode: Int) {
val errorMsg = when (errorCode) {
SpeechRecognizer.ERROR_AUDIO -> "error_audio_error"
SpeechRecognizer.ERROR_CLIENT -> "error_client"
SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS -> "error_permission"
SpeechRecognizer.ERROR_NETWORK -> "error_network"
SpeechRecognizer.ERROR_NETWORK_TIMEOUT -> "error_network_timeout"
SpeechRecognizer.ERROR_NO_MATCH -> "error_no_match"
SpeechRecognizer.ERROR_RECOGNIZER_BUSY -> "error_busy"
SpeechRecognizer.ERROR_SERVER -> "error_server"
SpeechRecognizer.ERROR_SPEECH_TIMEOUT -> "error_speech_timeout"
else -> "error_unknown"
}
sendError(errorMsg)
}
private fun debugLog( msg: String ) {
if ( debugLogging ) {
Log.d( logTag, msg )
}
}
private fun sendError(errorMsg: String) {
val speechError = JSONObject()
speechError.put("errorMsg", errorMsg)
speechError.put("permanent", true)
handler.post {
run {
channel?.invokeMethod(SpeechToTextCallbackMethods.notifyError.name, speechError.toString())
}
}
}
override fun onRmsChanged(rmsdB: Float) {
handler.post {
run {
channel?.invokeMethod(SpeechToTextCallbackMethods.soundLevelChange.name, rmsdB)
}
}
}
override fun onReadyForSpeech(p0: Bundle?) {}
override fun onBufferReceived(p0: ByteArray?) {}
override fun onEvent(p0: Int, p1: Bundle?) {}
override fun onBeginningOfSpeech() {}
}
// See https://stackoverflow.com/questions/10538791/how-to-set-the-language-in-speech-recognition-on-android/10548680#10548680
class LanguageDetailsChecker(flutterResult: Result) : BroadcastReceiver() {
private val result: Result = flutterResult
private var supportedLanguages: List<String>? = null
private var languagePreference: String? = null
override fun onReceive(context: Context, intent: Intent) {
val results = getResultExtras(true)
if (results.containsKey(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE)) {
languagePreference = results.getString(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE)
}
if (results.containsKey(RecognizerIntent.EXTRA_SUPPORTED_LANGUAGES)) {
supportedLanguages = results.getStringArrayList(
RecognizerIntent.EXTRA_SUPPORTED_LANGUAGES)
createResponse(supportedLanguages)
}
}
private fun createResponse(supportedLanguages: List<String>?) {
val currentLocale = Locale.getDefault()
val localeNames = ArrayList<String>()
localeNames.add(buildIdNameForLocale(currentLocale))
if (null != supportedLanguages) {
for (lang in supportedLanguages) {
if (currentLocale.toLanguageTag() == lang) {
continue
}
val locale = Locale.forLanguageTag(lang)
localeNames.add(buildIdNameForLocale(locale))
}
}
result.success(localeNames)
}
private fun buildIdNameForLocale(locale: Locale): String {
val name = locale.displayName.replace(':', ' ')
return "${locale.language}_${locale.country}:$name"
}
}
private class ChannelResultWrapper(result: Result) : Result {
// Caller handler
val handler: Handler = Handler(Looper.getMainLooper())
val result: Result = result
// make sure to respond in the caller thread
override fun success(results: Any?) {
handler.post {
run {
result.success(results);
}
}
}
override fun error(errorCode: String?, errorMessage: String?, data: Any?) {
handler.post {
run {
result.error(errorCode, errorMessage, data);
}
}
}
override fun notImplemented() {
handler.post {
run {
result.notImplemented();
}
}
}
}

@ -0,0 +1,73 @@
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.buildlog/
.history
.svn/
# IntelliJ related
*.iml
*.ipr
*.iws
.idea/
# The .vscode folder contains launch configuration and tasks you configure in
# VS Code which you may wish to be included in version control, so this line
# is commented out by default.
#.vscode/
# Flutter/Dart/Pub related
**/doc/api/
.dart_tool/
.flutter-plugins
.packages
.pub-cache/
.pub/
/build/
# Android related
**/android/**/gradle-wrapper.jar
**/android/.gradle
**/android/captures/
**/android/gradlew
**/android/gradlew.bat
**/android/local.properties
**/android/**/GeneratedPluginRegistrant.java
# iOS/XCode related
**/ios/**/*.mode1v3
**/ios/**/*.mode2v3
**/ios/**/*.moved-aside
**/ios/**/*.pbxuser
**/ios/**/*.perspectivev3
**/ios/**/*sync/
**/ios/**/.sconsign.dblite
**/ios/**/.tags*
**/ios/**/.vagrant/
**/ios/**/DerivedData/
**/ios/**/Icon?
**/ios/**/Pods/
**/ios/**/.symlinks/
**/ios/**/profile
**/ios/**/xcuserdata
**/ios/.generated/
**/ios/Flutter/App.framework
**/ios/Flutter/Flutter.framework
**/ios/Flutter/Generated.xcconfig
**/ios/Flutter/app.flx
**/ios/Flutter/app.zip
**/ios/Flutter/flutter_assets/
**/ios/Flutter/flutter_export_environment.sh
**/ios/ServiceDefinitions.json
**/ios/Runner/GeneratedPluginRegistrant.*
# Exceptions to above rules.
!**/ios/**/default.mode1v3
!**/ios/**/default.mode2v3
!**/ios/**/default.pbxuser
!**/ios/**/default.perspectivev3
!/packages/flutter_tools/test/data/dart_dependencies_test/**/.packages

@ -0,0 +1,10 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.
version:
revision: 2d2a1ffec95cc70a3218872a2cd3f8de4933c42f
channel: stable
project_type: app

@ -0,0 +1,155 @@
# speech_to_text_example
Demonstrates how to use the speech_to_text plugin. This example requires
that the plugin has been installed. It initializes speech recognition,
listens for words and prints them.
## Source
```dart
import 'package:flutter/material.dart';
import 'dart:async';
import 'package:speech_to_text/speech_to_text.dart';
import 'package:speech_to_text/speech_recognition_result.dart';
import 'package:speech_to_text/speech_recognition_error.dart';
void main() => runApp(MyApp());
class MyApp extends StatefulWidget {
@override
_MyAppState createState() => _MyAppState();
}
class _MyAppState extends State<MyApp> {
bool _hasSpeech = false;
String lastWords = "";
String lastError = "";
String lastStatus = "";
final SpeechToText speech = SpeechToText();
@override
void initState() {
super.initState();
initSpeechState();
}
Future<void> initSpeechState() async {
bool hasSpeech = await speech.initialize(onError: errorListener, onStatus: statusListener );
if (!mounted) return;
setState(() {
_hasSpeech = hasSpeech;
});
}
@override
Widget build(BuildContext context) {
return MaterialApp(
home: Scaffold(
appBar: AppBar(
title: const Text('Speech to Text Example'),
),
body: _hasSpeech
? Column(children: [
Expanded(
child: Center(
child: Text('Speech recognition available'),
),
),
Expanded(
child: Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
FlatButton(
child: Text('Start'),
onPressed: startListening,
),
FlatButton(
child: Text('Stop'),
onPressed: stopListening,
),
FlatButton(
child: Text('Cancel'),
onPressed:cancelListening,
),
],
),
),
Expanded(
child: Column(
children: <Widget>[
Center(
child: Text('Recognized Words'),
),
Center(
child: Text(lastWords),
),
],
),
),
Expanded(
child: Column(
children: <Widget>[
Center(
child: Text('Error'),
),
Center(
child: Text(lastError),
),
],
),
),
Expanded(
child: Center(
child: speech.isListening ? Text("I'm listening...") : Text( 'Not listening' ),
),
),
])
: Center( child: Text('Speech recognition unavailable', style: TextStyle(fontSize: 20.0, fontWeight: FontWeight.bold))),
),
);
}
void startListening() {
lastWords = "";
lastError = "";
speech.listen(onResult: resultListener );
setState(() {
});
}
void stopListening() {
speech.stop( );
setState(() {
});
}
void cancelListening() {
speech.cancel( );
setState(() {
});
}
void resultListener(SpeechRecognitionResult result) {
setState(() {
lastWords = "${result.recognizedWords} - ${result.finalResult}";
});
}
void errorListener(SpeechRecognitionError error ) {
setState(() {
lastError = "${error.errorMsg} - ${error.permanent}";
});
}
void statusListener(String status ) {
setState(() {
lastStatus = "$status";
});
}
}
```

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>android___</name>
<comment>Project android___ created by Buildship.</comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.buildship.core.gradleprojectbuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.buildship.core.gradleprojectnature</nature>
</natures>
</projectDescription>

@ -0,0 +1,2 @@
connection.project.dir=
eclipse.preferences.version=1

@ -0,0 +1,67 @@
def localProperties = new Properties()
def localPropertiesFile = rootProject.file('local.properties')
if (localPropertiesFile.exists()) {
localPropertiesFile.withReader('UTF-8') { reader ->
localProperties.load(reader)
}
}
def flutterRoot = localProperties.getProperty('flutter.sdk')
if (flutterRoot == null) {
throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.")
}
def flutterVersionCode = localProperties.getProperty('flutter.versionCode')
if (flutterVersionCode == null) {
flutterVersionCode = '1'
}
def flutterVersionName = localProperties.getProperty('flutter.versionName')
if (flutterVersionName == null) {
flutterVersionName = '1.0'
}
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
android {
compileSdkVersion 28
sourceSets {
main.java.srcDirs += 'src/main/kotlin'
}
lintOptions {
disable 'InvalidPackage'
}
defaultConfig {
// TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
applicationId "com.csdcorp.speech_to_text_example"
minSdkVersion 21
targetSdkVersion 28
versionCode flutterVersionCode.toInteger()
versionName flutterVersionName
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
// TODO: Add your own signing config for the release build.
// Signing with the debug keys for now, so `flutter run --release` works.
signingConfig signingConfigs.debug
}
}
}
flutter {
source '../..'
}
dependencies {
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test:runner:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1'
}

@ -0,0 +1,7 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.csdcorp.speech_to_text_example">
<!-- Flutter needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
</manifest>

@ -0,0 +1,32 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.csdcorp.speech_to_text_example">
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.INTERNET"/>
<!-- io.flutter.app.FlutterApplication is an android.app.Application that
calls FlutterMain.startInitialization(this); in its onCreate method.
In most cases you can leave this as-is, but you if you want to provide
additional functionality it is fine to subclass or reimplement
FlutterApplication and put your custom class here. -->
<application
android:name="io.flutter.app.FlutterApplication"
android:label="speech_to_text_example"
android:icon="@mipmap/ic_launcher">
<activity
android:name=".MainActivity"
android:launchMode="singleTop"
android:theme="@style/LaunchTheme"
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|smallestScreenSize|locale|layoutDirection|fontScale|screenLayout|density|uiMode"
android:hardwareAccelerated="true"
android:windowSoftInputMode="adjustResize">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
<!-- Don't delete the meta-data below.
This is used by the Flutter tool to generate GeneratedPluginRegistrant.java -->
<meta-data
android:name="flutterEmbedding"
android:value="2" />
</application>
</manifest>

@ -0,0 +1,12 @@
package com.csdcorp.speech_to_text_example
import androidx.annotation.NonNull;
import io.flutter.embedding.android.FlutterActivity
import io.flutter.embedding.engine.FlutterEngine
import io.flutter.plugins.GeneratedPluginRegistrant
class MainActivity: FlutterActivity() {
override fun configureFlutterEngine(@NonNull flutterEngine: FlutterEngine) {
GeneratedPluginRegistrant.registerWith(flutterEngine);
}
}

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="@android:color/white" />
<!-- You can insert your own image assets here -->
<!-- <item>
<bitmap
android:gravity="center"
android:src="@mipmap/launch_image" />
</item> -->
</layer-list>

Binary file not shown.

After

Width:  |  Height:  |  Size: 544 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 442 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 721 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<style name="LaunchTheme" parent="@android:style/Theme.Black.NoTitleBar">
<!-- Show a splash screen on the activity. Automatically removed when
Flutter draws its first frame -->
<item name="android:windowBackground">@drawable/launch_background</item>
</style>
</resources>

@ -0,0 +1,7 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.csdcorp.speech_to_text_example">
<!-- Flutter needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
</manifest>

@ -0,0 +1,31 @@
buildscript {
ext.kotlin_version = '1.3.50'
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.6.1'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
allprojects {
repositories {
google()
jcenter()
}
}
rootProject.buildDir = '../build'
subprojects {
project.buildDir = "${rootProject.buildDir}/${project.name}"
}
subprojects {
project.evaluationDependsOn(':app')
}
task clean(type: Delete) {
delete rootProject.buildDir
}

@ -0,0 +1,4 @@
org.gradle.jvmargs=-Xmx1536M
android.useAndroidX=true
android.enableJetifier=true
android.enableR8=true

@ -0,0 +1,6 @@
#Mon Mar 16 08:57:32 EDT 2020
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip

@ -0,0 +1,15 @@
include ':app'
def flutterProjectRoot = rootProject.projectDir.parentFile.toPath()
def plugins = new Properties()
def pluginsFile = new File(flutterProjectRoot.toFile(), '.flutter-plugins')
if (pluginsFile.exists()) {
pluginsFile.withReader('UTF-8') { reader -> plugins.load(reader) }
}
plugins.each { name, path ->
def pluginDirectory = flutterProjectRoot.resolve(path).resolve('android').toFile()
include ":$name"
project(":$name").projectDir = pluginDirectory
}

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>App</string>
<key>CFBundleIdentifier</key>
<string>io.flutter.flutter.app</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>App</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1.0</string>
<key>MinimumOSVersion</key>
<string>8.0</string>
</dict>
</plist>

@ -0,0 +1,2 @@
#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"
#include "Generated.xcconfig"

@ -0,0 +1,18 @@
#
# NOTE: This podspec is NOT to be published. It is only used as a local source!
#
Pod::Spec.new do |s|
s.name = 'Flutter'
s.version = '1.0.0'
s.summary = 'High-performance, high-fidelity mobile apps.'
s.description = <<-DESC
Flutter provides an easy and productive way to build and deploy high-performance mobile apps for Android and iOS.
DESC
s.homepage = 'https://flutter.io'
s.license = { :type => 'MIT' }
s.author = { 'Flutter Dev Team' => 'flutter-dev@googlegroups.com' }
s.source = { :git => 'https://github.com/flutter/engine', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.vendored_frameworks = 'Flutter.framework'
end

@ -0,0 +1,2 @@
#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"
#include "Generated.xcconfig"

@ -0,0 +1,90 @@
# Uncomment this line to define a global platform for your project
platform :ios, '10.0'
# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
ENV['COCOAPODS_DISABLE_STATS'] = 'true'
project 'Runner', {
'Debug' => :debug,
'Profile' => :release,
'Release' => :release,
}
def parse_KV_file(file, separator='=')
file_abs_path = File.expand_path(file)
if !File.exists? file_abs_path
return [];
end
generated_key_values = {}
skip_line_start_symbols = ["#", "/"]
File.foreach(file_abs_path) do |line|
next if skip_line_start_symbols.any? { |symbol| line =~ /^\s*#{symbol}/ }
plugin = line.split(pattern=separator)
if plugin.length == 2
podname = plugin[0].strip()
path = plugin[1].strip()
podpath = File.expand_path("#{path}", file_abs_path)
generated_key_values[podname] = podpath
else
puts "Invalid plugin specification: #{line}"
end
end
generated_key_values
end
target 'Runner' do
use_frameworks!
use_modular_headers!
# Flutter Pod
copied_flutter_dir = File.join(__dir__, 'Flutter')
copied_framework_path = File.join(copied_flutter_dir, 'Flutter.framework')
copied_podspec_path = File.join(copied_flutter_dir, 'Flutter.podspec')
unless File.exist?(copied_framework_path) && File.exist?(copied_podspec_path)
# Copy Flutter.framework and Flutter.podspec to Flutter/ to have something to link against if the xcode backend script has not run yet.
# That script will copy the correct debug/profile/release version of the framework based on the currently selected Xcode configuration.
# CocoaPods will not embed the framework on pod install (before any build phases can generate) if the dylib does not exist.
generated_xcode_build_settings_path = File.join(copied_flutter_dir, 'Generated.xcconfig')
unless File.exist?(generated_xcode_build_settings_path)
raise "Generated.xcconfig must exist. If you're running pod install manually, make sure flutter pub get is executed first"
end
generated_xcode_build_settings = parse_KV_file(generated_xcode_build_settings_path)
cached_framework_dir = generated_xcode_build_settings['FLUTTER_FRAMEWORK_DIR'];
unless File.exist?(copied_framework_path)
FileUtils.cp_r(File.join(cached_framework_dir, 'Flutter.framework'), copied_flutter_dir)
end
unless File.exist?(copied_podspec_path)
FileUtils.cp(File.join(cached_framework_dir, 'Flutter.podspec'), copied_flutter_dir)
end
end
# Keep pod path relative so it can be checked into Podfile.lock.
pod 'Flutter', :path => 'Flutter'
# Plugin Pods
# Prepare symlinks folder. We use symlinks to avoid having Podfile.lock
# referring to absolute paths on developers' machines.
system('rm -rf .symlinks')
system('mkdir -p .symlinks/plugins')
plugin_pods = parse_KV_file('../.flutter-plugins')
plugin_pods.each do |name, path|
symlink = File.join('.symlinks', 'plugins', name)
File.symlink(path, symlink)
pod name, :path => File.join(symlink, 'ios')
end
end
# Prevent Cocoapods from embedding a second Flutter framework and causing an error with the new Xcode build system.
install! 'cocoapods', :disable_input_output_paths => true
post_install do |installer|
installer.pods_project.targets.each do |target|
target.build_configurations.each do |config|
config.build_settings['ENABLE_BITCODE'] = 'NO'
end
end
end

@ -0,0 +1,29 @@
PODS:
- Flutter (1.0.0)
- speech_to_text (0.0.1):
- Flutter
- Try
- Try (2.1.1)
DEPENDENCIES:
- Flutter (from `Flutter`)
- speech_to_text (from `.symlinks/plugins/speech_to_text/ios`)
SPEC REPOS:
trunk:
- Try
EXTERNAL SOURCES:
Flutter:
:path: Flutter
speech_to_text:
:path: ".symlinks/plugins/speech_to_text/ios"
SPEC CHECKSUMS:
Flutter: 0e3d915762c693b495b44d77113d4970485de6ec
speech_to_text: b43a7d99aef037bd758ed8e45d79bbac035d2dfe
Try: 5ef669ae832617b3cee58cb2c6f99fb767a4ff96
PODFILE CHECKSUM: 0ba44ad07df4ab62269dc769727cf0f12b1e453d
COCOAPODS: 1.9.3

@ -0,0 +1,578 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; };
3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; };
74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74858FAE1ED2DC5600515810 /* AppDelegate.swift */; };
9740EEB41CF90195004384FC /* Debug.xcconfig in Resources */ = {isa = PBXBuildFile; fileRef = 9740EEB21CF90195004384FC /* Debug.xcconfig */; };
97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; };
97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; };
C446300A034BF27D9F1ACEF9 /* Pods_Runner.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = E76E9615C6B4FABD88067D55 /* Pods_Runner.framework */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
9705A1C41CF9048500538489 /* Embed Frameworks */ = {
isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647;
dstPath = "";
dstSubfolderSpec = 10;
files = (
);
name = "Embed Frameworks";
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = "<group>"; };
1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = "<group>"; };
3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; };
59AFE6BB0B596A0E0811BDFF /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = "<group>"; };
6280E2A777726D2043BF80B7 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = "<group>"; };
74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Runner-Bridging-Header.h"; sourceTree = "<group>"; };
74858FAE1ED2DC5600515810 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; };
9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = "<group>"; };
9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = "<group>"; };
97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; };
97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
C3909A4B7EC98A20255210E3 /* Pods-Runner.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.profile.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.profile.xcconfig"; sourceTree = "<group>"; };
E76E9615C6B4FABD88067D55 /* Pods_Runner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Runner.framework; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
97C146EB1CF9000F007C117D /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
C446300A034BF27D9F1ACEF9 /* Pods_Runner.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
7937AF765430D66F28F7FEEF /* Frameworks */ = {
isa = PBXGroup;
children = (
E76E9615C6B4FABD88067D55 /* Pods_Runner.framework */,
);
name = Frameworks;
sourceTree = "<group>";
};
9740EEB11CF90186004384FC /* Flutter */ = {
isa = PBXGroup;
children = (
3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */,
9740EEB21CF90195004384FC /* Debug.xcconfig */,
7AFA3C8E1D35360C0083082E /* Release.xcconfig */,
9740EEB31CF90195004384FC /* Generated.xcconfig */,
);
name = Flutter;
sourceTree = "<group>";
};
97C146E51CF9000F007C117D = {
isa = PBXGroup;
children = (
9740EEB11CF90186004384FC /* Flutter */,
97C146F01CF9000F007C117D /* Runner */,
97C146EF1CF9000F007C117D /* Products */,
A68CCF1640763A551D35BD31 /* Pods */,
7937AF765430D66F28F7FEEF /* Frameworks */,
);
sourceTree = "<group>";
};
97C146EF1CF9000F007C117D /* Products */ = {
isa = PBXGroup;
children = (
97C146EE1CF9000F007C117D /* Runner.app */,
);
name = Products;
sourceTree = "<group>";
};
97C146F01CF9000F007C117D /* Runner */ = {
isa = PBXGroup;
children = (
97C146FA1CF9000F007C117D /* Main.storyboard */,
97C146FD1CF9000F007C117D /* Assets.xcassets */,
97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */,
97C147021CF9000F007C117D /* Info.plist */,
97C146F11CF9000F007C117D /* Supporting Files */,
1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */,
1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */,
74858FAE1ED2DC5600515810 /* AppDelegate.swift */,
74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */,
);
path = Runner;
sourceTree = "<group>";
};
97C146F11CF9000F007C117D /* Supporting Files */ = {
isa = PBXGroup;
children = (
);
name = "Supporting Files";
sourceTree = "<group>";
};
A68CCF1640763A551D35BD31 /* Pods */ = {
isa = PBXGroup;
children = (
59AFE6BB0B596A0E0811BDFF /* Pods-Runner.debug.xcconfig */,
6280E2A777726D2043BF80B7 /* Pods-Runner.release.xcconfig */,
C3909A4B7EC98A20255210E3 /* Pods-Runner.profile.xcconfig */,
);
path = Pods;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
97C146ED1CF9000F007C117D /* Runner */ = {
isa = PBXNativeTarget;
buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */;
buildPhases = (
949FCB95217187F2C022D6A9 /* [CP] Check Pods Manifest.lock */,
9740EEB61CF901F6004384FC /* Run Script */,
97C146EA1CF9000F007C117D /* Sources */,
97C146EB1CF9000F007C117D /* Frameworks */,
97C146EC1CF9000F007C117D /* Resources */,
9705A1C41CF9048500538489 /* Embed Frameworks */,
3B06AD1E1E4923F5004D2608 /* Thin Binary */,
8B0988F04B6AE44AA0304FEF /* [CP] Embed Pods Frameworks */,
);
buildRules = (
);
dependencies = (
);
name = Runner;
productName = Runner;
productReference = 97C146EE1CF9000F007C117D /* Runner.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
97C146E61CF9000F007C117D /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 1020;
ORGANIZATIONNAME = "The Chromium Authors";
TargetAttributes = {
97C146ED1CF9000F007C117D = {
CreatedOnToolsVersion = 7.3.1;
DevelopmentTeam = 3X949YE9K2;
LastSwiftMigration = 0910;
};
};
};
buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 97C146E51CF9000F007C117D;
productRefGroup = 97C146EF1CF9000F007C117D /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
97C146ED1CF9000F007C117D /* Runner */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
97C146EC1CF9000F007C117D /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */,
3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */,
9740EEB41CF90195004384FC /* Debug.xcconfig in Resources */,
97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */,
97C146FC1CF9000F007C117D /* Main.storyboard in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXShellScriptBuildPhase section */
3B06AD1E1E4923F5004D2608 /* Thin Binary */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "Thin Binary";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin";
};
8B0988F04B6AE44AA0304FEF /* [CP] Embed Pods Frameworks */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "[CP] Embed Pods Frameworks";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";
showEnvVarsInLog = 0;
};
949FCB95217187F2C022D6A9 /* [CP] Check Pods Manifest.lock */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
);
inputPaths = (
"${PODS_PODFILE_DIR_PATH}/Podfile.lock",
"${PODS_ROOT}/Manifest.lock",
);
name = "[CP] Check Pods Manifest.lock";
outputFileListPaths = (
);
outputPaths = (
"$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
showEnvVarsInLog = 0;
};
9740EEB61CF901F6004384FC /* Run Script */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "Run Script";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
};
/* End PBXShellScriptBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
97C146EA1CF9000F007C117D /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */,
1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
97C146FA1CF9000F007C117D /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
97C146FB1CF9000F007C117D /* Base */,
);
name = Main.storyboard;
sourceTree = "<group>";
};
97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
97C147001CF9000F007C117D /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
249021D3217E4FDB00AE95B9 /* Profile */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Profile;
};
249021D4217E4FDB00AE95B9 /* Profile */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
DEVELOPMENT_TEAM = 3X949YE9K2;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
PRODUCT_BUNDLE_IDENTIFIER = com.csdcorp.speechToTextExample;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_VERSION = 5.0;
VERSIONING_SYSTEM = "apple-generic";
};
name = Profile;
};
97C147031CF9000F007C117D /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
97C147041CF9000F007C117D /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
97C147061CF9000F007C117D /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
DEVELOPMENT_TEAM = 3X949YE9K2;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
PRODUCT_BUNDLE_IDENTIFIER = com.csdcorp.speechToTextExample;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 5.0;
VERSIONING_SYSTEM = "apple-generic";
};
name = Debug;
};
97C147071CF9000F007C117D /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
DEVELOPMENT_TEAM = 3X949YE9K2;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
PRODUCT_BUNDLE_IDENTIFIER = com.csdcorp.speechToTextExample;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_VERSION = 5.0;
VERSIONING_SYSTEM = "apple-generic";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = {
isa = XCConfigurationList;
buildConfigurations = (
97C147031CF9000F007C117D /* Debug */,
97C147041CF9000F007C117D /* Release */,
249021D3217E4FDB00AE95B9 /* Profile */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = {
isa = XCConfigurationList;
buildConfigurations = (
97C147061CF9000F007C117D /* Debug */,
97C147071CF9000F007C117D /* Release */,
249021D4217E4FDB00AE95B9 /* Profile */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 97C146E61CF9000F007C117D /* Project object */;
}

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:Runner.xcodeproj">
</FileRef>
</Workspace>

@ -0,0 +1,91 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1020"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</MacroExpansion>
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
buildConfiguration = "Profile"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:Runner.xcodeproj">
</FileRef>
<FileRef
location = "group:Pods/Pods.xcodeproj">
</FileRef>
</Workspace>

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>

@ -0,0 +1,13 @@
import UIKit
import Flutter
@UIApplicationMain
@objc class AppDelegate: FlutterAppDelegate {
override func application(
_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
) -> Bool {
GeneratedPluginRegistrant.register(with: self)
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
}

@ -0,0 +1,122 @@
{
"images" : [
{
"size" : "20x20",
"idiom" : "iphone",
"filename" : "Icon-App-20x20@2x.png",
"scale" : "2x"
},
{
"size" : "20x20",
"idiom" : "iphone",
"filename" : "Icon-App-20x20@3x.png",
"scale" : "3x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@1x.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@2x.png",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@3x.png",
"scale" : "3x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-App-40x40@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-App-40x40@3x.png",
"scale" : "3x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-App-60x60@2x.png",
"scale" : "2x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-App-60x60@3x.png",
"scale" : "3x"
},
{
"size" : "20x20",
"idiom" : "ipad",
"filename" : "Icon-App-20x20@1x.png",
"scale" : "1x"
},
{
"size" : "20x20",
"idiom" : "ipad",
"filename" : "Icon-App-20x20@2x.png",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-App-29x29@1x.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-App-29x29@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-App-40x40@1x.png",
"scale" : "1x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-App-40x40@2x.png",
"scale" : "2x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-App-76x76@1x.png",
"scale" : "1x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-App-76x76@2x.png",
"scale" : "2x"
},
{
"size" : "83.5x83.5",
"idiom" : "ipad",
"filename" : "Icon-App-83.5x83.5@2x.png",
"scale" : "2x"
},
{
"size" : "1024x1024",
"idiom" : "ios-marketing",
"filename" : "Icon-App-1024x1024@1x.png",
"scale" : "1x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 564 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

@ -0,0 +1,23 @@
{
"images" : [
{
"idiom" : "universal",
"filename" : "LaunchImage.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "LaunchImage@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "LaunchImage@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 B

@ -0,0 +1,5 @@
# Launch Screen Assets
You can customize the launch screen with your own desired assets by replacing the image files in this directory.
You can also do it by opening your Flutter project's Xcode project with `open ios/Runner.xcworkspace`, selecting `Runner/Assets.xcassets` in the Project Navigator and dropping in the desired images.

@ -0,0 +1,37 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="12121" systemVersion="16G29" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="12089"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="Ydg-fD-yQy"/>
<viewControllerLayoutGuide type="bottom" id="xbc-2k-c8Z"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<imageView opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" image="LaunchImage" translatesAutoresizingMaskIntoConstraints="NO" id="YRO-k0-Ey4">
</imageView>
</subviews>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="YRO-k0-Ey4" firstAttribute="centerX" secondItem="Ze5-6b-2t3" secondAttribute="centerX" id="1a2-6s-vTC"/>
<constraint firstItem="YRO-k0-Ey4" firstAttribute="centerY" secondItem="Ze5-6b-2t3" secondAttribute="centerY" id="4X2-HB-R7a"/>
</constraints>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
<resources>
<image name="LaunchImage" width="168" height="185"/>
</resources>
</document>

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="10117" systemVersion="15F34" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="10085"/>
</dependencies>
<scenes>
<!--Flutter View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="FlutterViewController" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
<viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
</scene>
</scenes>
</document>

@ -0,0 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>NSMicrophoneUsageDescription</key>
<string>This example listens for speech on the device microphone on your request.</string>
<key>NSSpeechRecognitionUsageDescription</key>
<string>This example recognizes words as you speak them and displays them. </string>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>speech_to_text_example</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>$(FLUTTER_BUILD_NAME)</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>$(FLUTTER_BUILD_NUMBER)</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UIViewControllerBasedStatusBarAppearance</key>
<false/>
</dict>
</plist>

@ -0,0 +1 @@
#import "GeneratedPluginRegistrant.h"

@ -0,0 +1,275 @@
import 'dart:async';
import 'dart:math';
import 'package:flutter/material.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:speech_to_text/speech_recognition_error.dart';
import 'package:speech_to_text/speech_recognition_result.dart';
import 'package:speech_to_text/speech_to_text.dart';
void main() => runApp(MyApp());
class MyApp extends StatefulWidget {
@override
_MyAppState createState() => _MyAppState();
}
class _MyAppState extends State<MyApp> {
bool _hasSpeech = false;
double level = 0.0;
double minSoundLevel = 50000;
double maxSoundLevel = -50000;
String lastWords = "";
String lastError = "";
String lastStatus = "";
String _currentLocaleId = "";
List<LocaleName> _localeNames = [];
final SpeechToText speech = SpeechToText();
@override
void initState() {
requestPermissions();
super.initState();
}
Future<void> initSpeechState() async {
bool hasSpeech = await speech.initialize(
onError: errorListener, onStatus: statusListener);
if (hasSpeech) {
_localeNames = await speech.locales();
var systemLocale = await speech.systemLocale();
_currentLocaleId = systemLocale.localeId;
}
if (!mounted) return;
setState(() {
_hasSpeech = hasSpeech;
});
}
void requestPermissions() async{
Map<Permission, PermissionStatus> statuses = await [
Permission.microphone,
].request();
}
@override
Widget build(BuildContext context) {
return MaterialApp(
home: Scaffold(
appBar: AppBar(
title: const Text('Speech to Text CloudSolution'),
),
body: Column(children: [
Center(
child: Text(
'Speech recognition available',
style: TextStyle(fontSize: 22.0),
),
),
Container(
child: Column(
children: <Widget>[
Row(
mainAxisAlignment: MainAxisAlignment.spaceAround,
children: <Widget>[
FlatButton(
child: Text('Initialize'),
onPressed: _hasSpeech ? null : initSpeechState,
),
],
),
Row(
mainAxisAlignment: MainAxisAlignment.spaceAround,
children: <Widget>[
FlatButton(
child: Text('Start'),
onPressed: !_hasSpeech || speech.isListening
? null
: startListening,
),
FlatButton(
child: Text('Stop'),
onPressed: speech.isListening ? stopListening : null,
),
FlatButton(
child: Text('Cancel'),
onPressed: speech.isListening ? cancelListening : null,
),
],
),
Row(
mainAxisAlignment: MainAxisAlignment.spaceAround,
children: <Widget>[
DropdownButton(
onChanged: (selectedVal) => _switchLang(selectedVal),
value: _currentLocaleId,
items: _localeNames
.map(
(localeName) => DropdownMenuItem(
value: localeName.localeId,
child: Text(localeName.name),
),
)
.toList(),
),
],
)
],
),
),
Expanded(
flex: 4,
child: Column(
children: <Widget>[
Center(
child: Text(
'Recognized Words',
style: TextStyle(fontSize: 22.0),
),
),
Expanded(
child: Stack(
children: <Widget>[
Container(
color: Theme.of(context).selectedRowColor,
child: Center(
child: Text(
lastWords,
textAlign: TextAlign.center,
),
),
),
Positioned.fill(
bottom: 10,
child: Align(
alignment: Alignment.bottomCenter,
child: Container(
width: 40,
height: 40,
alignment: Alignment.center,
decoration: BoxDecoration(
boxShadow: [
BoxShadow(
blurRadius: .26,
spreadRadius: level * 1.5,
color: Colors.black.withOpacity(.05))
],
color: Colors.white,
borderRadius:
BorderRadius.all(Radius.circular(50)),
),
child: IconButton(icon: Icon(Icons.mic)),
),
),
),
],
),
),
],
),
),
Expanded(
flex: 1,
child: Column(
children: <Widget>[
Center(
child: Text(
'Error Status',
style: TextStyle(fontSize: 22.0),
),
),
Center(
child: Text(lastError),
),
],
),
),
Container(
padding: EdgeInsets.symmetric(vertical: 20),
color: Theme.of(context).backgroundColor,
child: Center(
child: speech.isListening
? Text(
"I'm listening...",
style: TextStyle(fontWeight: FontWeight.bold),
)
: Text(
'Not listening',
style: TextStyle(fontWeight: FontWeight.bold),
),
),
),
]),
),
);
}
void startListening() {
lastWords = "";
lastError = "";
speech.listen(
onResult: resultListener,
listenFor: Duration(seconds: 10),
localeId: _currentLocaleId,
onSoundLevelChange: soundLevelListener,
cancelOnError: true,
partialResults: true,
onDevice: true,
listenMode: ListenMode.confirmation);
setState(() {});
}
void stopListening() {
speech.stop();
setState(() {
level = 0.0;
});
}
void cancelListening() {
speech.cancel();
setState(() {
level = 0.0;
});
}
void resultListener(SpeechRecognitionResult result) {
setState(() {
lastWords = "${result.recognizedWords} - ${result.finalResult}";
});
}
void soundLevelListener(double level) {
minSoundLevel = min(minSoundLevel, level);
maxSoundLevel = max(maxSoundLevel, level);
// print("sound level $level: $minSoundLevel - $maxSoundLevel ");
setState(() {
this.level = level;
});
}
void errorListener(SpeechRecognitionError error) {
// print("Received error status: $error, listening: ${speech.isListening}");
setState(() {
lastError = "${error.errorMsg} - ${error.permanent}";
});
}
void statusListener(String status) {
// print(
// "Received listener status: $status, listening: ${speech.isListening}");
setState(() {
lastStatus = "$status";
});
}
_switchLang(selectedVal) {
setState(() {
_currentLocaleId = selectedVal;
});
print(selectedVal);
}
}

@ -0,0 +1,33 @@
name: speech_to_text_example
description: Demonstrates how to use the speech_to_text plugin.
version: 1.1.0
publish_to: 'none'
environment:
sdk: ">=2.1.0 <3.0.0"
dependencies:
flutter:
sdk: flutter
cupertino_icons: ^0.1.2
permission_handler: ^5.0.1+1
provider:
dev_dependencies:
flutter_test:
sdk: flutter
speech_to_text:
path: ../
# The following section is specific to Flutter.
flutter:
uses-material-design: true
assets:
- assets/sounds/speech_to_text_listening.m4r
- assets/sounds/speech_to_text_cancel.m4r
- assets/sounds/speech_to_text_stop.m4r

@ -0,0 +1,27 @@
// This is a basic Flutter widget test.
//
// To perform an interaction with a widget in your test, use the WidgetTester
// utility that Flutter provides. For example, you can send tap and scroll
// gestures. You can also use WidgetTester to find child widgets in the widget
// tree, read text, and verify that the values of widget properties are correct.
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
import '../lib/main.dart';
void main() {
testWidgets('Verify Platform version', (WidgetTester tester) async {
// Build our app and trigger a frame.
await tester.pumpWidget(MyApp());
// Verify that platform version is retrieved.
expect(
find.byWidgetPredicate(
(Widget widget) =>
widget is Text && widget.data.startsWith('Running on:'),
),
findsOneWidget,
);
});
}

@ -0,0 +1,37 @@
.idea/
.vagrant/
.sconsign.dblite
.svn/
.DS_Store
*.swp
profile
DerivedData/
build/
GeneratedPluginRegistrant.h
GeneratedPluginRegistrant.m
.generated/
*.pbxuser
*.mode1v3
*.mode2v3
*.perspectivev3
!default.pbxuser
!default.mode1v3
!default.mode2v3
!default.perspectivev3
xcuserdata
*.moved-aside
*.pyc
*sync/
Icon?
.tags*
/Flutter/Generated.xcconfig
/Flutter/flutter_export_environment.sh

@ -0,0 +1,4 @@
#import <Flutter/Flutter.h>
@interface SpeechToTextPlugin : NSObject<FlutterPlugin>
@end

@ -0,0 +1,8 @@
#import "SpeechToTextPlugin.h"
#import <speech_to_text/speech_to_text-Swift.h>
@implementation SpeechToTextPlugin
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
[SwiftSpeechToTextPlugin registerWithRegistrar:registrar];
}
@end

@ -0,0 +1,580 @@
import Flutter
import UIKit
import Speech
import os.log
import Try
public enum SwiftSpeechToTextMethods: String {
case has_permission
case initialize
case listen
case stop
case cancel
case locales
case unknown // just for testing
}
public enum SwiftSpeechToTextCallbackMethods: String {
case textRecognition
case notifyStatus
case notifyError
case soundLevelChange
}
public enum SpeechToTextStatus: String {
case listening
case notListening
case unavailable
case available
}
public enum SpeechToTextErrors: String {
case onDeviceError
case noRecognizerError
case listenFailedError
case missingOrInvalidArg
}
public enum ListenMode: Int {
case deviceDefault = 0
case dictation = 1
case search = 2
case confirmation = 3
}
struct SpeechRecognitionWords : Codable {
let recognizedWords: String
let confidence: Decimal
}
struct SpeechRecognitionResult : Codable {
let alternates: [SpeechRecognitionWords]
let finalResult: Bool
}
struct SpeechRecognitionError : Codable {
let errorMsg: String
let permanent: Bool
}
enum SpeechToTextError: Error {
case runtimeError(String)
}
@available(iOS 10.0, *)
public class SwiftSpeechToTextPlugin: NSObject, FlutterPlugin {
private var channel: FlutterMethodChannel
private var registrar: FlutterPluginRegistrar
private var recognizer: SFSpeechRecognizer?
private var currentRequest: SFSpeechAudioBufferRecognitionRequest?
private var currentTask: SFSpeechRecognitionTask?
private var listeningSound: AVAudioPlayer?
private var successSound: AVAudioPlayer?
private var cancelSound: AVAudioPlayer?
private var rememberedAudioCategory: AVAudioSession.Category?
private var previousLocale: Locale?
private var onPlayEnd: (() -> Void)?
private var returnPartialResults: Bool = true
private var failedListen: Bool = false
private var listening = false
private let audioSession = AVAudioSession.sharedInstance()
private let audioEngine = AVAudioEngine()
private let jsonEncoder = JSONEncoder()
private let busForNodeTap = 0
private let speechBufferSize: AVAudioFrameCount = 1024
private static var subsystem = Bundle.main.bundleIdentifier!
private let pluginLog = OSLog(subsystem: "com.csdcorp.speechToText", category: "plugin")
public static func register(with registrar: FlutterPluginRegistrar) {
let channel = FlutterMethodChannel(name: "plugin.csdcorp.com/speech_to_text", binaryMessenger: registrar.messenger())
let instance = SwiftSpeechToTextPlugin( channel, registrar: registrar )
registrar.addMethodCallDelegate(instance, channel: channel )
}
init( _ channel: FlutterMethodChannel, registrar: FlutterPluginRegistrar ) {
self.channel = channel
self.registrar = registrar
}
public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
switch call.method {
case SwiftSpeechToTextMethods.has_permission.rawValue:
hasPermission( result )
case SwiftSpeechToTextMethods.initialize.rawValue:
initialize( result )
case SwiftSpeechToTextMethods.listen.rawValue:
guard let argsArr = call.arguments as? Dictionary<String,AnyObject>,
let partialResults = argsArr["partialResults"] as? Bool, let onDevice = argsArr["onDevice"] as? Bool, let listenModeIndex = argsArr["listenMode"] as? Int
else {
DispatchQueue.main.async {
result(FlutterError( code: SpeechToTextErrors.missingOrInvalidArg.rawValue,
message:"Missing arg partialResults, onDevice, and listenMode are required",
details: nil ))
}
return
}
var localeStr: String? = nil
if let localeParam = argsArr["localeId"] as? String {
localeStr = localeParam
}
guard let listenMode = ListenMode(rawValue: listenModeIndex) else {
DispatchQueue.main.async {
result(FlutterError( code: SpeechToTextErrors.missingOrInvalidArg.rawValue,
message:"invalid value for listenMode, must be 0-2, was \(listenModeIndex)",
details: nil ))
}
return
}
listenForSpeech( result, localeStr: localeStr, partialResults: partialResults, onDevice: onDevice, listenMode: listenMode )
case SwiftSpeechToTextMethods.stop.rawValue:
stopSpeech( result )
case SwiftSpeechToTextMethods.cancel.rawValue:
cancelSpeech( result )
case SwiftSpeechToTextMethods.locales.rawValue:
locales( result )
default:
os_log("Unrecognized method: %{PUBLIC}@", log: pluginLog, type: .error, call.method)
DispatchQueue.main.async {
result( FlutterMethodNotImplemented)
}
}
}
private func hasPermission( _ result: @escaping FlutterResult) {
let has = SFSpeechRecognizer.authorizationStatus() == SFSpeechRecognizerAuthorizationStatus.authorized &&
AVAudioSession.sharedInstance().recordPermission == AVAudioSession.RecordPermission.granted
DispatchQueue.main.async {
result( has )
}
}
private func initialize( _ result: @escaping FlutterResult) {
var success = false
let status = SFSpeechRecognizer.authorizationStatus()
switch status {
case SFSpeechRecognizerAuthorizationStatus.notDetermined:
SFSpeechRecognizer.requestAuthorization({(status)->Void in
success = status == SFSpeechRecognizerAuthorizationStatus.authorized
if ( success ) {
AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in
if granted {
self.setupSpeechRecognition(result)
} else{
self.sendBoolResult( false, result );
os_log("User denied permission", log: self.pluginLog, type: .info)
}
})
}
else {
self.sendBoolResult( false, result );
}
});
case SFSpeechRecognizerAuthorizationStatus.denied:
os_log("Permission permanently denied", log: self.pluginLog, type: .info)
sendBoolResult( false, result );
case SFSpeechRecognizerAuthorizationStatus.restricted:
os_log("Device restriction prevented initialize", log: self.pluginLog, type: .info)
sendBoolResult( false, result );
default:
os_log("Has permissions continuing with setup", log: self.pluginLog, type: .debug)
setupSpeechRecognition(result)
}
}
fileprivate func sendBoolResult( _ value: Bool, _ result: @escaping FlutterResult) {
DispatchQueue.main.async {
result( value )
}
}
fileprivate func setupListeningSound() {
listeningSound = loadSound("assets/sounds/speech_to_text_listening.m4r")
successSound = loadSound("assets/sounds/speech_to_text_stop.m4r")
cancelSound = loadSound("assets/sounds/speech_to_text_cancel.m4r")
}
fileprivate func loadSound( _ assetPath: String ) -> AVAudioPlayer? {
var player: AVAudioPlayer? = nil
let soundKey = registrar.lookupKey(forAsset: assetPath )
guard !soundKey.isEmpty else {
return player
}
if let soundPath = Bundle.main.path(forResource: soundKey, ofType:nil) {
let soundUrl = URL(fileURLWithPath: soundPath )
do {
player = try AVAudioPlayer(contentsOf: soundUrl )
player?.delegate = self
} catch {
// no audio
}
}
return player
}
private func setupSpeechRecognition( _ result: @escaping FlutterResult) {
setupRecognizerForLocale( locale: Locale.current )
guard recognizer != nil else {
sendBoolResult( false, result );
return
}
recognizer?.delegate = self
setupListeningSound()
sendBoolResult( true, result );
}
private func setupRecognizerForLocale( locale: Locale ) {
if ( previousLocale == locale ) {
return
}
previousLocale = locale
recognizer = SFSpeechRecognizer( locale: locale )
}
private func getLocale( _ localeStr: String? ) -> Locale {
guard let aLocaleStr = localeStr else {
return Locale.current
}
let locale = Locale(identifier: aLocaleStr)
return locale
}
private func stopSpeech( _ result: @escaping FlutterResult) {
if ( !listening ) {
sendBoolResult( false, result );
return
}
stopAllPlayers()
if let sound = successSound {
onPlayEnd = {() -> Void in
self.currentTask?.finish()
self.stopCurrentListen( )
self.sendBoolResult( true, result )
return
}
sound.play()
}
else {
stopCurrentListen( )
sendBoolResult( true, result );
}
}
private func cancelSpeech( _ result: @escaping FlutterResult) {
if ( !listening ) {
sendBoolResult( false, result );
return
}
stopAllPlayers()
if let sound = cancelSound {
onPlayEnd = {() -> Void in
self.currentTask?.cancel()
self.stopCurrentListen( )
self.sendBoolResult( true, result )
return
}
sound.play()
}
else {
self.currentTask?.cancel()
stopCurrentListen( )
sendBoolResult( true, result );
}
}
private func stopAllPlayers() {
cancelSound?.stop()
successSound?.stop()
listeningSound?.stop()
}
private func stopCurrentListen( ) {
stopAllPlayers()
currentRequest?.endAudio()
do {
try trap {
self.audioEngine.stop()
}
}
catch {
os_log("Error stopping engine: %{PUBLIC}@", log: pluginLog, type: .error, error.localizedDescription)
}
do {
try trap {
let inputNode = self.audioEngine.inputNode
inputNode.removeTap(onBus: self.busForNodeTap);
}
}
catch {
os_log("Error removing trap: %{PUBLIC}@", log: pluginLog, type: .error, error.localizedDescription)
}
do {
if let rememberedAudioCategory = rememberedAudioCategory {
try self.audioSession.setCategory(rememberedAudioCategory)
}
}
catch {
os_log("Error stopping listen: %{PUBLIC}@", log: pluginLog, type: .error, error.localizedDescription)
}
do {
try self.audioSession.setActive(false, options: .notifyOthersOnDeactivation)
}
catch {
os_log("Error deactivation: %{PUBLIC}@", log: pluginLog, type: .info, error.localizedDescription)
}
currentRequest = nil
currentTask = nil
onPlayEnd = nil
listening = false
}
private func listenForSpeech( _ result: @escaping FlutterResult, localeStr: String?, partialResults: Bool, onDevice: Bool, listenMode: ListenMode ) {
if ( nil != currentTask || listening ) {
sendBoolResult( false, result );
return
}
do {
// let inErrorTest = true
failedListen = false
returnPartialResults = partialResults
setupRecognizerForLocale(locale: getLocale(localeStr))
guard let localRecognizer = recognizer else {
result(FlutterError( code: SpeechToTextErrors.noRecognizerError.rawValue,
message:"Failed to create speech recognizer",
details: nil ))
return
}
if ( onDevice ) {
if #available(iOS 13.0, *), !localRecognizer.supportsOnDeviceRecognition {
result(FlutterError( code: SpeechToTextErrors.onDeviceError.rawValue,
message:"on device recognition is not supported on this device",
details: nil ))
}
}
rememberedAudioCategory = self.audioSession.category
try self.audioSession.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
// try self.audioSession.setMode(AVAudioSession.Mode.measurement)
try self.audioSession.setMode(AVAudioSession.Mode.default)
try self.audioSession.setActive(true, options: .notifyOthersOnDeactivation)
if let sound = listeningSound {
self.onPlayEnd = {()->Void in
if ( !self.failedListen ) {
self.listening = true
self.invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.listening.rawValue )
}
}
sound.play()
}
self.audioEngine.reset();
let inputNode = self.audioEngine.inputNode
if(inputNode.inputFormat(forBus: 0).channelCount == 0){
throw SpeechToTextError.runtimeError("Not enough available inputs.")
}
self.currentRequest = SFSpeechAudioBufferRecognitionRequest()
guard let currentRequest = self.currentRequest else {
sendBoolResult( false, result );
return
}
currentRequest.shouldReportPartialResults = true
if #available(iOS 13.0, *), onDevice {
currentRequest.requiresOnDeviceRecognition = true
}
switch listenMode {
case ListenMode.dictation:
currentRequest.taskHint = SFSpeechRecognitionTaskHint.dictation
break
case ListenMode.search:
currentRequest.taskHint = SFSpeechRecognitionTaskHint.search
break
case ListenMode.confirmation:
currentRequest.taskHint = SFSpeechRecognitionTaskHint.confirmation
break
default:
break
}
self.currentTask = self.recognizer?.recognitionTask(with: currentRequest, delegate: self )
let recordingFormat = inputNode.outputFormat(forBus: self.busForNodeTap)
try trap {
inputNode.installTap(onBus: self.busForNodeTap, bufferSize: self.speechBufferSize, format: recordingFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in
currentRequest.append(buffer)
self.updateSoundLevel( buffer: buffer )
}
}
// if ( inErrorTest ){
// throw SpeechToTextError.runtimeError("for testing only")
// }
self.audioEngine.prepare()
try self.audioEngine.start()
if nil == listeningSound {
listening = true
self.invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.listening.rawValue )
}
sendBoolResult( true, result );
}
catch {
failedListen = true
os_log("Error starting listen: %{PUBLIC}@", log: pluginLog, type: .error, error.localizedDescription)
stopCurrentListen()
sendBoolResult( false, result );
invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.notListening.rawValue )
let speechError = SpeechRecognitionError(errorMsg: "error_listen_failed", permanent: true )
do {
let errorResult = try jsonEncoder.encode(speechError)
invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyError, arguments: String( data:errorResult, encoding: .utf8) )
} catch {
os_log("Could not encode JSON", log: pluginLog, type: .error)
}
}
}
private func updateSoundLevel( buffer: AVAudioPCMBuffer) {
guard
let channelData = buffer.floatChannelData
else {
return
}
let channelDataValue = channelData.pointee
let channelDataValueArray = stride(from: 0,
to: Int(buffer.frameLength),
by: buffer.stride).map{ channelDataValue[$0] }
let frameLength = Float(buffer.frameLength)
let rms = sqrt(channelDataValueArray.map{ $0 * $0 }.reduce(0, +) / frameLength )
let avgPower = 20 * log10(rms)
self.invokeFlutter( SwiftSpeechToTextCallbackMethods.soundLevelChange, arguments: avgPower )
}
/// Build a list of localId:name with the current locale first
private func locales( _ result: @escaping FlutterResult ) {
var localeNames = [String]();
let locales = SFSpeechRecognizer.supportedLocales();
let currentLocale = Locale.current
if let idName = buildIdNameForLocale(forIdentifier: currentLocale.identifier ) {
localeNames.append(idName)
}
for locale in locales {
if ( locale.identifier == currentLocale.identifier) {
continue
}
if let idName = buildIdNameForLocale(forIdentifier: locale.identifier ) {
localeNames.append(idName)
}
}
DispatchQueue.main.async {
result(localeNames)
}
}
private func buildIdNameForLocale( forIdentifier: String ) -> String? {
var idName: String?
if let name = Locale.current.localizedString(forIdentifier: forIdentifier ) {
let sanitizedName = name.replacingOccurrences(of: ":", with: " ")
idName = "\(forIdentifier):\(sanitizedName)"
}
return idName
}
private func handleResult( _ transcriptions: [SFTranscription], isFinal: Bool ) {
if ( !isFinal && !returnPartialResults ) {
return
}
var speechWords: [SpeechRecognitionWords] = []
for transcription in transcriptions {
let words: SpeechRecognitionWords = SpeechRecognitionWords(recognizedWords: transcription.formattedString, confidence: confidenceIn( transcription))
speechWords.append( words )
}
let speechInfo = SpeechRecognitionResult(alternates: speechWords, finalResult: isFinal )
do {
let speechMsg = try jsonEncoder.encode(speechInfo)
if let speechStr = String( data:speechMsg, encoding: .utf8) {
os_log("Encoded JSON result: %{PUBLIC}@", log: pluginLog, type: .debug, speechStr )
invokeFlutter( SwiftSpeechToTextCallbackMethods.textRecognition, arguments: speechStr )
}
} catch {
os_log("Could not encode JSON", log: pluginLog, type: .error)
}
}
private func confidenceIn( _ transcription: SFTranscription ) -> Decimal {
guard ( transcription.segments.count > 0 ) else {
return 0;
}
var totalConfidence: Float = 0.0;
for segment in transcription.segments {
totalConfidence += segment.confidence
}
let avgConfidence: Float = totalConfidence / Float(transcription.segments.count )
let confidence: Float = (avgConfidence * 1000).rounded() / 1000
return Decimal( string: String( describing: confidence ) )!
}
private func invokeFlutter( _ method: SwiftSpeechToTextCallbackMethods, arguments: Any? ) {
DispatchQueue.main.async {
self.channel.invokeMethod( method.rawValue, arguments: arguments )
}
}
}
@available(iOS 10.0, *)
extension SwiftSpeechToTextPlugin : SFSpeechRecognizerDelegate {
public func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) {
let availability = available ? SpeechToTextStatus.available.rawValue : SpeechToTextStatus.unavailable.rawValue
os_log("Availability changed: %{PUBLIC}@", log: pluginLog, type: .debug, availability)
invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: availability )
}
}
@available(iOS 10.0, *)
extension SwiftSpeechToTextPlugin : SFSpeechRecognitionTaskDelegate {
public func speechRecognitionDidDetectSpeech(_ task: SFSpeechRecognitionTask) {
// Do nothing for now
}
public func speechRecognitionTaskFinishedReadingAudio(_ task: SFSpeechRecognitionTask) {
reportError(source: "FinishedReadingAudio", error: task.error)
invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.notListening.rawValue )
}
public func speechRecognitionTaskWasCancelled(_ task: SFSpeechRecognitionTask) {
reportError(source: "TaskWasCancelled", error: task.error)
invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.notListening.rawValue )
}
public func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didFinishSuccessfully successfully: Bool) {
reportError(source: "FinishSuccessfully", error: task.error)
stopCurrentListen( )
}
public func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didHypothesizeTranscription transcription: SFTranscription) {
reportError(source: "HypothesizeTranscription", error: task.error)
handleResult( [transcription], isFinal: false )
}
public func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didFinishRecognition recognitionResult: SFSpeechRecognitionResult) {
reportError(source: "FinishRecognition", error: task.error)
let isFinal = recognitionResult.isFinal
handleResult( recognitionResult.transcriptions, isFinal: isFinal )
}
private func reportError( source: String, error: Error?) {
if ( nil != error) {
os_log("%{PUBLIC}@ with error: %{PUBLIC}@", log: pluginLog, type: .debug, source, error.debugDescription)
}
}
}
@available(iOS 10.0, *)
extension SwiftSpeechToTextPlugin : AVAudioPlayerDelegate {
public func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer,
successfully flag: Bool) {
if let playEnd = self.onPlayEnd {
playEnd()
}
}
}

@ -0,0 +1,22 @@
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'speech_to_text'
s.version = '0.0.1'
s.summary = 'A new flutter plugin project.'
s.description = <<-DESC
A new flutter plugin project.
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Your Company' => 'email@example.com' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.dependency 'Try'
s.ios.deployment_target = '8.0'
end

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save