diff --git a/assets/fonts/DoctorApp.ttf b/assets/fonts/DoctorApp.ttf index 718a3afc..86289098 100644 Binary files a/assets/fonts/DoctorApp.ttf and b/assets/fonts/DoctorApp.ttf differ diff --git a/assets/images/dashboard/1.png b/assets/images/dashboard/1.png new file mode 100644 index 00000000..d917360b Binary files /dev/null and b/assets/images/dashboard/1.png differ diff --git a/assets/images/dashboard/2.png b/assets/images/dashboard/2.png new file mode 100644 index 00000000..3789ddd6 Binary files /dev/null and b/assets/images/dashboard/2.png differ diff --git a/assets/images/dashboard/3.png b/assets/images/dashboard/3.png new file mode 100644 index 00000000..38585ec8 Binary files /dev/null and b/assets/images/dashboard/3.png differ diff --git a/assets/images/dashboard/4.png b/assets/images/dashboard/4.png new file mode 100644 index 00000000..46958ede Binary files /dev/null and b/assets/images/dashboard/4.png differ diff --git a/assets/images/dashboard/5.png b/assets/images/dashboard/5.png new file mode 100644 index 00000000..2a64c27a Binary files /dev/null and b/assets/images/dashboard/5.png differ diff --git a/ios/Podfile.lock b/ios/Podfile.lock index 70b9d103..cddfd571 100644 --- a/ios/Podfile.lock +++ b/ios/Podfile.lock @@ -33,7 +33,11 @@ PODS: - Flutter - shared_preferences_web (0.0.1): - Flutter + - speech_to_text (0.0.1): + - Flutter + - Try - SwiftProtobuf (1.9.0) + - Try (2.1.1) - url_launcher (0.0.1): - Flutter - url_launcher_macos (0.0.1): @@ -58,6 +62,7 @@ DEPENDENCIES: - shared_preferences (from `.symlinks/plugins/shared_preferences/ios`) - shared_preferences_macos (from `.symlinks/plugins/shared_preferences_macos/ios`) - shared_preferences_web (from `.symlinks/plugins/shared_preferences_web/ios`) + - speech_to_text (from `.symlinks/plugins/speech_to_text/ios`) - url_launcher (from `.symlinks/plugins/url_launcher/ios`) - url_launcher_macos (from `.symlinks/plugins/url_launcher_macos/ios`) - url_launcher_web (from `.symlinks/plugins/url_launcher_web/ios`) @@ -69,6 +74,7 @@ SPEC REPOS: - OpenTok - Reachability - SwiftProtobuf + - Try EXTERNAL SOURCES: barcode_scan: @@ -99,6 +105,8 @@ EXTERNAL SOURCES: :path: ".symlinks/plugins/shared_preferences_macos/ios" shared_preferences_web: :path: ".symlinks/plugins/shared_preferences_web/ios" + speech_to_text: + :path: ".symlinks/plugins/speech_to_text/ios" url_launcher: :path: ".symlinks/plugins/url_launcher/ios" url_launcher_macos: @@ -125,11 +133,13 @@ SPEC CHECKSUMS: shared_preferences: af6bfa751691cdc24be3045c43ec037377ada40d shared_preferences_macos: f3f29b71ccbb56bf40c9dd6396c9acf15e214087 shared_preferences_web: 141cce0c3ed1a1c5bf2a0e44f52d31eeb66e5ea9 + speech_to_text: b43a7d99aef037bd758ed8e45d79bbac035d2dfe SwiftProtobuf: ecbec1be9036d15655f6b3443a1c4ea693c97932 + Try: 5ef669ae832617b3cee58cb2c6f99fb767a4ff96 url_launcher: 6fef411d543ceb26efce54b05a0a40bfd74cbbef url_launcher_macos: fd7894421cd39320dce5f292fc99ea9270b2a313 url_launcher_web: e5527357f037c87560776e36436bf2b0288b965c PODFILE CHECKSUM: 649616dc336b3659ac6b2b25159d8e488e042b69 -COCOAPODS: 1.9.3 +COCOAPODS: 1.10.0.beta.1 diff --git a/lib/config/config.dart b/lib/config/config.dart index 613839f0..e5b948ec 100644 --- a/lib/config/config.dart +++ b/lib/config/config.dart @@ -2,9 +2,9 @@ const MAX_SMALL_SCREEN = 660; const ONLY_NUMBERS = "[0-9]"; const ONLY_LETTERS = "[a-zA-Z &'\"]"; const ONLY_DATE = "[0-9/]"; -//const BASE_URL = 'https://hmgwebservices.com/'; -const BASE_URL = 'https://uat.hmgwebservices.com/'; -const PHARMACY_ITEMS_URL = "Services/Lists.svc/REST/GetPharmcyItems_Region"; +const BASE_URL = 'https://hmgwebservices.com/'; +//const BASE_URL = 'https://uat.hmgwebservices.com/'; +const PHARMACY_ITEMS_URL = "Services/Lists.svc/REST/GetPharmcyItems_Region_enh"; const PHARMACY_LIST_URL = "Services/Patients.svc/REST/GetPharmcyList"; const PATIENT_PROGRESS_NOTE_URL = "Services/DoctorApplication.svc/REST/GetProgressNoteForInPatient"; @@ -74,6 +74,9 @@ const SEND_ACTIVATION_CODE_BY_OTP_NOTIFICATION_TYPE = const MEMBER_CHECK_ACTIVATION_CODE_NEW = 'Services/Sentry.svc/REST/MemberCheckActivationCode_New'; const GET_DOC_PROFILES = 'Services/Doctors.svc/REST/GetDocProfiles'; +const TRANSFERT_TO_ADMIN = 'LiveCareApi/DoctorApp/TransferToAdmin'; +const END_CALL = 'LiveCareApi/DoctorApp/EndCall'; +const END_CALL_WITH_CHARGE = 'LiveCareApi/DoctorApp/CompleteCallWithCharge'; var selectedPatientType = 1; diff --git a/lib/config/localized_values.dart b/lib/config/localized_values.dart index c5332425..93f4a6b8 100644 --- a/lib/config/localized_values.dart +++ b/lib/config/localized_values.dart @@ -9,7 +9,7 @@ const Map> localizedValues = { 'fileNo': {'en': 'File No:', 'ar': 'رقم الملف:'}, 'mobileNo': {'en': 'Mobile No', 'ar': 'رقم الموبايل'}, 'messagesScreenToolbarTitle': {'en': 'Messages', 'ar': 'الرسائل'}, - 'mySchedule': {'en': 'My Schedule', 'ar': 'جدولي'}, + 'mySchedule': {'en': 'Schedule', 'ar': 'جدولي'}, 'errorNoSchedule': { 'en': 'You don\'t have any Schedule', 'ar': 'ليس لديك أي جدول زمني' @@ -216,4 +216,8 @@ const Map> localizedValues = { 'beingGreat': {'en': 'being great', 'ar': 'رائع'}, 'cancel': {'en': 'CANCEL', 'ar': 'الغاء'}, 'done': {'en': 'DONE', 'ar': 'تأكيد'}, + 'resumecall': {'en': 'Resume call', 'ar': 'استئناف المكالمة'}, + 'endcallwithcharge': {'en': 'End with charge', 'ar': 'ينتهي مع الشحن'}, + 'endcall': {'en': 'End Call', 'ar': 'إنهاء المكالمة'}, + 'transfertoadmin': {'en': 'Transfer to admin', 'ar': 'نقل إلى المسؤول'}, }; diff --git a/lib/icons_app/config.json b/lib/icons_app/config.json index 0e33e54a..f5a6bca1 100644 --- a/lib/icons_app/config.json +++ b/lib/icons_app/config.json @@ -272,6 +272,160 @@ "search_patient" ] }, + { + "uid": "5a324eddf382f5a1167d7d40325f82c8", + "css": "reject_icon", + "code": 59394, + "src": "custom_icons", + "selected": true, + "svg": { + "path": "M681 626.7L554.3 500 681 373.3A38.5 38.5 0 0 0 626.7 318.9L500 445.7 373.3 319A38.5 38.5 0 1 0 318.9 373.3L445.7 500 318.9 626.8A37.2 37.2 0 0 0 318.9 681.1 38.2 38.2 0 0 0 373.3 681.1L500 554.4 626.7 681.1A38.5 38.5 0 0 0 681.1 681.1 38.2 38.2 0 0 0 681 626.7ZM500 67.3A432.5 432.5 0 1 1 194 194 429.8 429.8 0 0 1 500 67.3M500 0A500 500 0 1 0 1000 500 500 500 0 0 0 500 0Z", + "width": 1000 + }, + "search": [ + "reject_icon" + ] + }, + { + "uid": "2742f64b5e69cc4f39a2dcc5a081ad03", + "css": "approved_icon", + "code": 59403, + "src": "custom_icons", + "selected": true, + "svg": { + "path": "M756.3 348.3L714 304.8A9.1 9.1 0 0 0 707.2 302H707.2A8.7 8.7 0 0 0 700.5 304.8L407.2 600.3 300.5 493.5A9.3 9.3 0 0 0 287.1 493.5L244.2 536.3A9.6 9.6 0 0 0 244.2 550L378.8 684.5A42.6 42.6 0 0 0 406.9 698.2 44.6 44.6 0 0 0 434.8 685H435L756.5 362A10.3 10.3 0 0 0 756.3 348.3ZM500 67.3A432.5 432.5 0 1 1 194 194 429.8 429.8 0 0 1 500 67.3M500 0A499.9 499.9 0 1 0 999.9 500 499.9 499.9 0 0 0 500 0Z", + "width": 1000 + }, + "search": [ + "approved_icon" + ] + }, + { + "uid": "148de09f7fd22c378cdfdbaacaa8e205", + "css": "pending_icon", + "code": 59404, + "src": "custom_icons", + "selected": true, + "svg": { + "path": "M809.8 932H797.3C781.5 932 768.2 929.8 766.5 914V913.7C731 583.9 523.3 577.6 523.3 499.9S731.1 415.9 766.5 86V85.9C768.2 70.1 781.5 68 797.3 68H809.8A34.8 34.8 0 0 0 844.7 35.7 34 34 0 0 0 810.7 0H35A34.8 34.8 0 0 0 0 32.3 34 34 0 0 0 34 68H47.2C63 68 76.3 70.2 78.1 85.9V86.2C113.7 416 321.5 422.3 321.5 500S113.7 584 78.3 913.8V914C76.6 929.8 63.3 932 47.5 932H35A34.8 34.8 0 0 0 0.1 964.3 34 34 0 0 0 34.1 1000H810.7A33.9 33.9 0 0 0 844.7 964.3 34.8 34.8 0 0 0 809.8 932ZM197.4 848.9C267 655.2 390.6 678.7 390.6 602.3V467.5C390.6 420.4 301.2 387.6 245.4 311.1A19 19 0 0 1 261.2 281H583.8A18.9 18.9 0 0 1 600 310.6C545.2 387.3 454.2 420.4 454.2 467.4V602.4C454.2 678.1 572.9 657.5 647.8 849 654.7 866.3 649.5 887.8 631.2 887.8H214.1C195.4 887.8 191.1 866.5 197.4 849Z", + "width": 845 + }, + "search": [ + "pending_icon" + ] + }, + { + "uid": "0bbb324cc39e62b3a4e05639a4f4008f", + "css": "home_icon", + "code": 59407, + "src": "custom_icons", + "selected": true, + "svg": { + "path": "M973.1 435L973.1 435 565 27A92 92 0 0 0 434.9 27L27.2 434.7 26.8 435.1A92 92 0 0 0 88 591.9C88.9 591.9 89.9 591.9 90.8 591.9H107.1V892.1A107.9 107.9 0 0 0 214.8 999.9H374.4A29.3 29.3 0 0 0 403.7 970.6V735.3A49.2 49.2 0 0 1 452.9 686.2H547A49.2 49.2 0 0 1 596.2 735.3V970.7A29.3 29.3 0 0 0 625.5 1000H785.1A107.9 107.9 0 0 0 892.8 892.3V592H907.8A92.1 92.1 0 0 0 973 434.9ZM931.6 523.7A33.4 33.4 0 0 1 907.9 533.5H863.5A29.3 29.3 0 0 0 834.2 562.8V892.3A49.2 49.2 0 0 1 785.1 941.4H654.9V735.3A107.9 107.9 0 0 0 547 627.5H452.9A107.9 107.9 0 0 0 345.1 735.2V941.3H215A49.2 49.2 0 0 1 165.8 892.1V562.8A29.3 29.3 0 0 0 136.5 533.5H92.8L91.4 533.5A33.4 33.4 0 0 1 68.4 476.4H68.4L476.3 68.4A33.4 33.4 0 0 1 523.6 68.4L931.4 476.2 931.6 476.4A33.4 33.4 0 0 1 931.6 523.7ZM931.6 523.7", + "width": 1000 + }, + "search": [ + "home_icon" + ] + }, + { + "uid": "d8fc8a6e9f001147307d87b3b620b86f", + "css": "mail", + "code": 59422, + "src": "custom_icons", + "selected": true, + "svg": { + "path": "M1055.6 667.9V111.1A111.2 111.2 0 0 0 944.4 0H111.1A111.2 111.2 0 0 0 0 111.1V666.7A111.2 111.2 0 0 0 111.1 777.8H777.8A46 46 0 0 0 789.7 810C819.2 841 869.8 889 914.5 931.2 936.9 952.3 957.3 971.7 972.1 986A48.3 48.3 0 0 0 1006 1000 50.2 50.2 0 0 0 1055.5 949.3V889.4C1198.9 894.6 1277.9 940.2 1277.9 972.2A27.8 27.8 0 1 0 1333.4 972.2 306 306 0 0 0 1055.6 667.9ZM111.1 55.7H944.4C945.6 55.7 946.6 56.2 947.7 56.3L551.3 381.8A46.3 46.3 0 0 1 506.8 383.6L107.9 56.2C109 56.1 110 55.6 111.1 55.6ZM111.1 722.3A55.6 55.6 0 0 1 55.6 666.8V111.1A55 55 0 0 1 60.3 89L474 428.4A98.2 98.2 0 0 0 527.8 444.5 100.7 100.7 0 0 0 584.2 426.5L995.3 89A55 55 0 0 1 1000 111.1V556.8A48.1 48.1 0 0 0 972.1 569.7C957.4 584.1 936.9 603.4 914.6 624.5 879.9 657.3 841.9 693.2 812.7 722.3H111.1ZM1027.9 833.3A27.8 27.8 0 0 0 1000.2 861.1V935.7C986.8 922.8 970.4 907.3 952.9 890.8 911.6 851.7 865 807.8 836 777.8 865 747.8 911.6 703.8 952.9 664.7 971.6 647.1 989 630.6 1000.2 618.2V694.4A27.8 27.8 0 0 0 1027.9 722.2 250.3 250.3 0 0 1 1259.3 877.5 525.9 525.9 0 0 0 1027.9 833.3Z", + "width": 1333 + }, + "search": [ + "mail" + ] + }, + { + "uid": "afe3a55435e46aeeeeae8f60731d4706", + "css": "medicinesearch", + "code": 59423, + "src": "custom_icons", + "selected": true, + "svg": { + "path": "M986 125.2A171.6 171.6 0 1 0 1157.6 296.8 171.8 171.8 0 0 0 986 125.2ZM986 449.3A152.6 152.6 0 1 1 1138.6 296.8 152.7 152.7 0 0 1 986 449.3ZM862.1 306.4H881.1A114.6 114.6 0 0 1 995.5 192V173A133.5 133.5 0 0 0 862.1 306.4ZM1332.6 589.6L1346.1 576.2 1209.6 439.6 1196.2 453 1172.3 429.1A228.3 228.3 0 1 0 1118.4 483.3L1142.3 507.2 1128.8 520.6 1265.1 657 1278.6 643.5 1155.7 520.6 1209.6 466.7ZM985.9 506.6A209.7 209.7 0 1 1 1195.6 296.9 210 210 0 0 1 985.9 506.6ZM1155.7 493.5L1133.6 471.4A228.6 228.6 0 0 0 1160.6 444.5L1182.7 466.6ZM416.7 700V733.3H100A16.7 16.7 0 0 1 83.3 716.6V566.6A16.7 16.7 0 0 1 100 550H416.7V583.2H116.6V699.9ZM33.3 866.7H416.7V900H33.3A66.7 66.7 0 0 0 99.9 966.5H466.6A67.2 67.2 0 0 0 479.9 965.2L486.6 997.9A100.5 100.5 0 0 1 466.6 999.9H99.9A100.1 100.1 0 0 1-0.1 899.9V583.3H33.2ZM79.2 316.7H83.3V266.7H70.9A37.5 37.5 0 0 1 33.3 229.2V58.3A58.4 58.4 0 0 1 91.7 0H400V33.3H91.7A25 25 0 0 0 66.7 58.3V229.2A4.2 4.2 0 0 0 70.9 233.3H495.9A4.2 4.2 0 0 0 500.1 229.2V58.3A25 25 0 0 0 475.1 33.3H433.4V0H475.1A58.4 58.4 0 0 1 533.4 58.3V229.2A37.5 37.5 0 0 1 495.9 266.7H483.4V316.7H487.6A79.4 79.4 0 0 1 566.8 395.9V400H533.5V395.9A45.9 45.9 0 0 0 487.6 350H79.2A45.9 45.9 0 0 0 33.3 395.9V466.7H450V500H33.3V550H0V395.8A79.4 79.4 0 0 1 79.2 316.7ZM450 316.7V266.7H116.7V316.7ZM466.6 66.7H433.3V200H466.6ZM400 66.7H366.7V200H400ZM333.3 66.7H300V200H333.3ZM266.6 66.7H233.3V200H266.6ZM200 66.7H166.7V200H200ZM133.3 66.7H100V200H133.3ZM416.7 783.3H316.7V816.6H416.7ZM283.3 783.3H83.3V816.6H283.3ZM450 595.8A162.7 162.7 0 0 1 612.5 433.3H620.8A163 163 0 0 1 780 563.3L747.4 570A129.5 129.5 0 0 0 620.8 466.7H612.5A129.3 129.3 0 0 0 483.3 595.8V683.3H583.3V716.6H483.3V749.9H566.7V783.2H483.3V837.3A129.3 129.3 0 0 0 612.5 966.5H620.8A129.1 129.1 0 0 0 661.4 960L671.9 991.6A162.3 162.3 0 0 1 620.8 999.8H612.5A162.7 162.7 0 0 1 450 837.3ZM600 800A200 200 0 1 1 800 1000 200 200 0 0 1 600 800ZM633.3 800A165.6 165.6 0 0 0 652.2 877L877 652.2A166.6 166.6 0 0 0 633.3 800ZM694.5 929L838.2 785.3 861.7 808.9 722.9 947.8A166.6 166.6 0 0 0 947.7 722.9L892.2 778.5 868.6 754.9 928.9 694.6A168.3 168.3 0 0 0 905.4 671L670.9 905.4A168.1 168.1 0 0 0 694.5 929Z", + "width": 1346 + }, + "search": [ + "medicinesearch" + ] + }, + { + "uid": "c129853724095c9f90addf42325e14bc", + "css": "qr-code-1", + "code": 59424, + "src": "custom_icons", + "selected": true, + "svg": { + "path": "M970.7 746.1A29.3 29.3 0 0 0 941.4 775.4V941.4H775.4A29.3 29.3 0 1 0 775.4 1000H970.7A29.3 29.3 0 0 0 1000 970.7V775.4A29.3 29.3 0 0 0 970.7 746.1ZM224.6 941.4H58.6V775.4A29.3 29.3 0 1 0 0 775.4V970.7A29.3 29.3 0 0 0 29.3 1000H224.6A29.3 29.3 0 1 0 224.6 941.4ZM970.7 0H775.4A29.3 29.3 0 1 0 775.4 58.6H941.4V224.6A29.3 29.3 0 1 0 1000 224.6V29.3A29.3 29.3 0 0 0 970.7 0ZM29.3 253.9A29.3 29.3 0 0 0 58.6 224.6V58.6H224.6A29.3 29.3 0 1 0 224.6 0H29.3A29.3 29.3 0 0 0 0 29.3V224.6A29.3 29.3 0 0 0 29.3 253.9ZM617.2 412.1H851.6A29.3 29.3 0 0 0 880.8 382.8V148.4A29.3 29.3 0 0 0 851.6 119.2H617.2A29.3 29.3 0 0 0 587.9 148.4V382.8A29.3 29.3 0 0 0 617.2 412.1ZM646.5 177.8H822.2V353.5H646.5ZM382.8 587.9H148.4A29.3 29.3 0 0 0 119.2 617.2V851.6A29.3 29.3 0 0 0 148.4 880.8H382.8A29.3 29.3 0 0 0 412.1 851.6V617.2A29.3 29.3 0 0 0 382.8 587.9ZM353.5 822.3H177.8V646.5H353.5ZM880.9 851.6V734.4A29.3 29.3 0 0 0 822.3 734.4V822.3H734.4A29.3 29.3 0 1 0 734.4 880.9H851.6A29.3 29.3 0 0 0 880.9 851.6ZM617.2 529.3H822.2V617.2A29.3 29.3 0 1 0 880.8 617.2V500A29.3 29.3 0 0 0 851.6 470.7H617.2A29.3 29.3 0 0 0 617.2 529.3ZM617.2 822.3H529.3V734.4A29.3 29.3 0 1 0 470.7 734.4V851.6A29.3 29.3 0 0 0 500 880.8H617.2A29.3 29.3 0 0 0 617.2 822.2ZM382.8 119.2H148.4A29.3 29.3 0 0 0 119.2 148.4V382.8A29.3 29.3 0 0 0 148.4 412.1H382.8A29.3 29.3 0 0 0 412.1 382.8V148.4A29.3 29.3 0 0 0 382.8 119.2ZM353.5 353.5H177.8V177.8H353.5ZM734.4 705.1H617.2A29.3 29.3 0 0 0 617.2 763.7H734.4A29.3 29.3 0 0 0 734.4 705.1ZM500 646.5H734.4A29.3 29.3 0 0 0 734.4 587.9H529.3V500A29.3 29.3 0 0 0 470.7 500V617.2A29.3 29.3 0 0 0 500 646.5ZM148.4 529.3H226.6A29.3 29.3 0 1 0 226.6 470.7H148.4A29.3 29.3 0 1 0 148.4 529.3ZM382.8 470.7H343.8A29.3 29.3 0 1 0 343.8 529.3H382.8A29.3 29.3 0 0 0 382.8 470.7ZM500 412.1A29.3 29.3 0 0 0 529.3 382.8V265.6A29.3 29.3 0 1 0 470.7 265.6V382.8A29.3 29.3 0 0 0 500 412.1Z", + "width": 1000 + }, + "search": [ + "qr-code" + ] + }, + { + "uid": "6df6856cee070f6f2f1be832aff1920e", + "css": "referral-1", + "code": 59425, + "src": "custom_icons", + "selected": true, + "svg": { + "path": "M111.9 752A107.4 107.4 0 0 1 55.5 658.2 110.6 110.6 0 0 1 276.6 658.2 107.4 107.4 0 0 1 220.1 752.1 166.2 166.2 0 0 1 332 908.9V983.4A16.7 16.7 0 0 1 315.3 1000.1H16.7A16.7 16.7 0 0 1 0 983.4V908.8A166.2 166.2 0 0 1 111.9 752ZM243.3 658.2A77.3 77.3 0 1 0 166 732.7 76 76 0 0 0 243.3 658.2ZM33.3 966.6H298.8V908.8A132.6 132.6 0 0 0 33.4 908.8ZM111.8 201.7A107.4 107.4 0 0 1 55.5 107.8 110.6 110.6 0 0 1 276.6 107.8 107.4 107.4 0 0 1 220.3 201.7 166.2 166.2 0 0 1 332.1 358.5V433A16.7 16.7 0 0 1 315.4 449.7H16.7A16.7 16.7 0 0 1 0 433V358.4A166.2 166.2 0 0 1 111.8 201.7ZM243.3 107.8A77.3 77.3 0 0 0 88.9 107.8 77.3 77.3 0 0 0 243.3 107.8ZM33.3 416.3H298.8V358.4A132.6 132.6 0 0 0 33.4 358.4ZM462.6 492.6L368.8 411.8A16.7 16.7 0 0 1 390.6 386.5L494.7 476.2H612.1A16.7 16.7 0 1 1 612.1 509.6H494.5L390.3 595A18.6 18.6 0 0 1 366.8 592.7 16.3 16.3 0 0 1 369.2 569.2ZM707.4 452.7A126.1 126.1 0 0 1 676.3 369.9 128.7 128.7 0 0 1 933.8 369.9 126 126 0 0 1 902.7 452.7 184 184 0 0 1 1000 613.3V711.4A16.7 16.7 0 0 1 983.3 728.1H626.8A16.7 16.7 0 0 1 610.1 711.4V613.3A184 184 0 0 1 707.4 452.7ZM805.1 275.8A94.1 94.1 0 1 0 900.5 369.9 94.8 94.8 0 0 0 805.1 275.8ZM823.1 495.9A130.7 130.7 0 0 1 805.1 497.3 128.3 128.3 0 0 1 787.1 495.9V573.8L805.1 596.8 823.1 573.8ZM643.5 694.7H695.9V613A16.7 16.7 0 0 1 729.3 613V694.7H880.8V613A16.7 16.7 0 0 1 914.2 613V694.7H966.7V613.3A150.6 150.6 0 0 0 875.1 476.6 129 129 0 0 1 856.4 486.7V585.4L818.2 634.3A16.7 16.7 0 0 1 791.9 634.3L753.6 585.4V486.7A129.8 129.8 0 0 1 734.9 476.6 150.6 150.6 0 0 0 643.4 613.3V694.7Z", + "width": 1000 + }, + "search": [ + "referral" + ] + }, + { + "uid": "609e74ef20b926b8e212d28a24bf0f36", + "css": "referred", + "code": 59426, + "src": "custom_icons", + "selected": true, + "svg": { + "path": "M887.8 751.4A107.7 107.7 0 0 0 944.3 657.2 110.9 110.9 0 0 0 722.6 657.2 107.7 107.7 0 0 0 779.1 751.4 166.6 166.6 0 0 0 666.9 908.6V983.2A16.7 16.7 0 0 0 683.7 999.9H983.3A16.7 16.7 0 0 0 1000 983.2V908.6A166.6 166.6 0 0 0 887.8 751.4ZM756.1 657.2A77.5 77.5 0 1 1 833.5 731.9 76.1 76.1 0 0 1 756.1 657.2ZM966.6 966.5H700.5V908.6A133 133 0 0 1 966.6 908.6ZM887.9 202.2A107.7 107.7 0 0 0 944.4 108.1 110.9 110.9 0 0 0 722.7 108.1 107.7 107.7 0 0 0 779.2 202.2 166.6 166.6 0 0 0 667.1 359.3V434.1A16.7 16.7 0 0 0 683.8 450.8H983.3A16.7 16.7 0 0 0 1000 434.1V359.3A166.6 166.6 0 0 0 887.9 202.2ZM756.2 108.1A77.5 77.5 0 0 1 911 108.1 77.5 77.5 0 0 1 756.2 108.1ZM966.6 417.4H700.5V359.3A133 133 0 0 1 966.6 359.3ZM537.5 492.7L631.5 411.6A16.7 16.7 0 0 0 609.7 386.3L505.4 476.3H387.6A16.7 16.7 0 1 0 387.6 509.7H505.5L610 595.4A18.7 18.7 0 0 0 633.5 593 16.3 16.3 0 0 0 631.1 569.5ZM293.3 452.6A126.3 126.3 0 0 0 324.5 369.6 129.1 129.1 0 0 0 66.4 369.6 126.3 126.3 0 0 0 97.5 452.6 184.4 184.4 0 0 0 0 613.7V712A16.7 16.7 0 0 0 16.7 728.7H374.1A16.7 16.7 0 0 0 390.8 712V613.7A184.4 184.4 0 0 0 293.3 452.6ZM195.5 275.3A94.3 94.3 0 1 1 99.9 369.6 95.1 95.1 0 0 1 195.4 275.3ZM177.5 496A131.1 131.1 0 0 0 195.5 497.4 128.5 128.5 0 0 0 213.6 496V574.2L195.4 597.2 177.4 574.1ZM357.5 695.3H304.8V613.3A16.7 16.7 0 0 0 271.4 613.3V695.3H119.4V613.3A16.7 16.7 0 0 0 86 613.3V695.3H33.4V613.7A151 151 0 0 1 125.2 476.7 129.3 129.3 0 0 0 144 486.7V585.8L182.3 634.8A16.7 16.7 0 0 0 208.6 634.8L246.9 585.7V486.7A130.2 130.2 0 0 0 265.7 476.7 151 151 0 0 1 357.4 613.6V695.2Z", + "width": 1000 + }, + "search": [ + "referred" + ] + }, + { + "uid": "dea3e85838e83dcd8818ec36186eae5e", + "css": "searchpatient", + "code": 59427, + "src": "custom_icons", + "selected": true, + "svg": { + "path": "M1142.7 436.8A165.2 165.2 0 1 0 1307.8 601.9 165.4 165.4 0 0 0 1142.7 436.8ZM1142.7 748.8A146.9 146.9 0 1 1 1289.5 601.9 147 147 0 0 1 1142.7 748.8ZM1023.4 611.1H1041.7A110.3 110.3 0 0 1 1151.8 500.9V482.7A128.5 128.5 0 0 0 1023.4 611.1ZM1476.4 883.8L1489.3 870.9 1357.9 739.4 1345 752.4 1322 729.4A219.8 219.8 0 1 0 1270.1 781.5L1293.1 804.5 1280.2 817.4 1411.4 948.7 1424.4 935.7 1306 817.3 1357.9 765.4ZM1142.6 803.9A201.9 201.9 0 1 1 1344.4 602 202.2 202.2 0 0 1 1142.6 803.9ZM1306 791.3L1284.7 770.1A220.1 220.1 0 0 0 1310.7 744.1L1332.1 765.5ZM48.4 483.9H301.4A96.4 96.4 0 0 1 276.7 346.5 305.1 305.1 0 0 1 257.9 242V201.9A201.8 201.8 0 0 1 564.5 29.2 201.8 201.8 0 0 1 871 201.8V205.4L887 177.2 957.1 299.9A80.6 80.6 0 0 1 869.7 417.5 96.9 96.9 0 0 1 785.1 499.2 226.4 226.4 0 0 1 645.1 662.2V696.2L780.4 720.8A267.4 267.4 0 0 1 999.9 983.9V999.9H129V983.9A267.4 267.4 0 0 1 348.6 720.8L483.9 696.2V662.3A224.6 224.6 0 0 1 396.8 602.3C396 602.6 395.1 602.8 394.3 603.1A92.8 92.8 0 0 1 352.9 612.9H48.4A48.4 48.4 0 0 1 0 564.5V532.3A48.4 48.4 0 0 1 48.4 483.9ZM339.5 465.8C339.3 461.1 338.7 456.4 338.7 451.7V341A64.2 64.2 0 0 0 339.5 465.7ZM838.7 242V201.9A169.6 169.6 0 0 0 575.1 60.8L564.5 67.8 554 60.8A169.6 169.6 0 0 0 290.3 201.9V242A273.2 273.2 0 0 0 302.3 322.2 96.2 96.2 0 0 1 338.7 308.1V282.4L403.2 234V193.5A32.3 32.3 0 0 1 435.5 161.3H693.5A32.3 32.3 0 0 1 725.8 193.5V233.9L790.3 282.3V307.8A95.4 95.4 0 0 1 810.8 313.9 78.8 78.8 0 0 1 817 299.9L837.7 263.6C838.3 256.4 838.7 249.2 838.7 242ZM435.5 290.4H693.6V193.5H435.5ZM887.9 387.2A47.6 47.6 0 0 0 929.3 316L887.1 242.2 844.9 315.9A47.6 47.6 0 0 0 887.6 387.1ZM838.8 403.2A79.4 79.4 0 0 1 807.4 348.1 63.2 63.2 0 0 0 790.4 340.9V451.6C790.4 456.4 790 461 789.7 465.7A64.5 64.5 0 0 0 838.7 403.2ZM758.1 451.6V298.4L725.9 274.3V290.4A32.3 32.3 0 0 1 693.6 322.6H435.5A32.3 32.3 0 0 1 403.2 290.4V274.3L370.9 298.4V451.6A195.2 195.2 0 0 0 374.4 486.4 92.8 92.8 0 0 1 394.3 493.6 60.6 60.6 0 0 0 421.2 500H532.2A48.4 48.4 0 1 1 532.2 596.7H436.9A193.2 193.2 0 0 0 758.1 451.6ZM532.3 564.5A16 16 0 1 0 532.3 532.2H516.3V564.5ZM637.4 727.6A80.7 80.7 0 0 1 583.6 771.6 64.5 64.5 0 0 0 645.1 806.4C680.7 806.4 709.6 784.7 709.6 758A36.4 36.4 0 0 0 704.4 739.7ZM161.8 967.8H967.2A235 235 0 0 0 774.7 752.6L740.8 746.5A64.8 64.8 0 0 1 742 758.2C742 802.6 698.6 838.8 645.2 838.8A102.9 102.9 0 0 1 564.6 802.6 102.9 102.9 0 0 1 483.9 838.7C430.4 838.7 387.1 802.5 387.1 758.1A65.9 65.9 0 0 1 388.2 746.4L354.4 752.6A235 235 0 0 0 161.8 967.8ZM424.5 739.9A36.4 36.4 0 0 0 419.4 758.2C419.4 784.9 448.3 806.6 483.9 806.6A64.5 64.5 0 0 0 545.4 771.8 80.6 80.6 0 0 1 491.6 727.7ZM516 677.4V693.4A48.4 48.4 0 1 0 612.8 693.4V672.1A222.6 222.6 0 0 1 516 672.1ZM379.6 574.3A92.9 92.9 0 0 1 421 564.5H483.9V532.3H421.2A93.1 93.1 0 0 1 379.9 522.5 60.6 60.6 0 0 0 352.9 516H129V580.5H352.9A60.6 60.6 0 0 0 379.9 574.3ZM32.1 564.5A16 16 0 0 0 48.1 580.5H96.8V516H48.4A16 16 0 0 0 32.4 532ZM524.9 368.4L507 341.5 410.3 406.1 428.2 432.9ZM718.5 405.8L621.8 341.3 603.9 368.1 700.6 432.7Z", + "width": 1489 + }, + "search": [ + "searchpatient" + ] + }, + { + "uid": "5af6e4cb377acd0ad97c464f6813ab1a", + "css": "sync", + "code": 59428, + "src": "custom_icons", + "selected": true, + "svg": { + "path": "M1000 168.7V346.9A62.5 62.5 0 0 1 937.5 409.4H759.3A62.5 62.5 0 0 1 759.3 284.4H807.1A375.4 375.4 0 0 0 150.8 363.4 62.5 62.5 0 0 1 34.3 317.9 500.4 500.4 0 0 1 875 169.1V169.1A62.5 62.5 0 0 1 1000 169.1ZM930.4 601.1A62.5 62.5 0 0 0 849.4 636.5 375.4 375.4 0 0 1 197.4 721.8H240.7A62.5 62.5 0 1 0 240.7 596.8H62.5A62.5 62.5 0 0 0 0 659.3V837.5A62.5 62.5 0 0 0 125 837.5V830.9A503 503 0 0 0 215.2 910.8 500.4 500.4 0 0 0 966 682.2 62.5 62.5 0 0 0 930.3 601.2Z", + "width": 1000 + }, + "search": [ + "sync" + ] + }, { "uid": "740f78c2b53c8cc100a8b0d283bbd34f", "css": "home_icon-1", @@ -369,62 +523,6 @@ "search": [ "scdedule_icon_active" ] - }, - { - "uid": "5a324eddf382f5a1167d7d40325f82c8", - "css": "reject_icon", - "code": 59402, - "src": "custom_icons", - "selected": true, - "svg": { - "path": "M681 626.7L554.3 500 681 373.3A38.5 38.5 0 0 0 626.7 318.9L500 445.7 373.3 319A38.5 38.5 0 1 0 318.9 373.3L445.7 500 318.9 626.8A37.2 37.2 0 0 0 318.9 681.1 38.2 38.2 0 0 0 373.3 681.1L500 554.4 626.7 681.1A38.5 38.5 0 0 0 681.1 681.1 38.2 38.2 0 0 0 681 626.7ZM500 67.3A432.5 432.5 0 1 1 194 194 429.8 429.8 0 0 1 500 67.3M500 0A500 500 0 1 0 1000 500 500 500 0 0 0 500 0Z", - "width": 1000 - }, - "search": [ - "reject_icon" - ] - }, - { - "uid": "2742f64b5e69cc4f39a2dcc5a081ad03", - "css": "approved_icon", - "code": 59403, - "src": "custom_icons", - "selected": true, - "svg": { - "path": "M756.3 348.3L714 304.8A9.1 9.1 0 0 0 707.2 302H707.2A8.7 8.7 0 0 0 700.5 304.8L407.2 600.3 300.5 493.5A9.3 9.3 0 0 0 287.1 493.5L244.2 536.3A9.6 9.6 0 0 0 244.2 550L378.8 684.5A42.6 42.6 0 0 0 406.9 698.2 44.6 44.6 0 0 0 434.8 685H435L756.5 362A10.3 10.3 0 0 0 756.3 348.3ZM500 67.3A432.5 432.5 0 1 1 194 194 429.8 429.8 0 0 1 500 67.3M500 0A499.9 499.9 0 1 0 999.9 500 499.9 499.9 0 0 0 500 0Z", - "width": 1000 - }, - "search": [ - "approved_icon" - ] - }, - { - "uid": "148de09f7fd22c378cdfdbaacaa8e205", - "css": "pending_icon", - "code": 59404, - "src": "custom_icons", - "selected": true, - "svg": { - "path": "M809.8 932H797.3C781.5 932 768.2 929.8 766.5 914V913.7C731 583.9 523.3 577.6 523.3 499.9S731.1 415.9 766.5 86V85.9C768.2 70.1 781.5 68 797.3 68H809.8A34.8 34.8 0 0 0 844.7 35.7 34 34 0 0 0 810.7 0H35A34.8 34.8 0 0 0 0 32.3 34 34 0 0 0 34 68H47.2C63 68 76.3 70.2 78.1 85.9V86.2C113.7 416 321.5 422.3 321.5 500S113.7 584 78.3 913.8V914C76.6 929.8 63.3 932 47.5 932H35A34.8 34.8 0 0 0 0.1 964.3 34 34 0 0 0 34.1 1000H810.7A33.9 33.9 0 0 0 844.7 964.3 34.8 34.8 0 0 0 809.8 932ZM197.4 848.9C267 655.2 390.6 678.7 390.6 602.3V467.5C390.6 420.4 301.2 387.6 245.4 311.1A19 19 0 0 1 261.2 281H583.8A18.9 18.9 0 0 1 600 310.6C545.2 387.3 454.2 420.4 454.2 467.4V602.4C454.2 678.1 572.9 657.5 647.8 849 654.7 866.3 649.5 887.8 631.2 887.8H214.1C195.4 887.8 191.1 866.5 197.4 849Z", - "width": 845 - }, - "search": [ - "pending_icon" - ] - }, - { - "uid": "0bbb324cc39e62b3a4e05639a4f4008f", - "css": "home_icon", - "code": 59394, - "src": "custom_icons", - "selected": true, - "svg": { - "path": "M973.1 435L973.1 435 565 27A92 92 0 0 0 434.9 27L27.2 434.7 26.8 435.1A92 92 0 0 0 88 591.9C88.9 591.9 89.9 591.9 90.8 591.9H107.1V892.1A107.9 107.9 0 0 0 214.8 999.9H374.4A29.3 29.3 0 0 0 403.7 970.6V735.3A49.2 49.2 0 0 1 452.9 686.2H547A49.2 49.2 0 0 1 596.2 735.3V970.7A29.3 29.3 0 0 0 625.5 1000H785.1A107.9 107.9 0 0 0 892.8 892.3V592H907.8A92.1 92.1 0 0 0 973 434.9ZM931.6 523.7A33.4 33.4 0 0 1 907.9 533.5H863.5A29.3 29.3 0 0 0 834.2 562.8V892.3A49.2 49.2 0 0 1 785.1 941.4H654.9V735.3A107.9 107.9 0 0 0 547 627.5H452.9A107.9 107.9 0 0 0 345.1 735.2V941.3H215A49.2 49.2 0 0 1 165.8 892.1V562.8A29.3 29.3 0 0 0 136.5 533.5H92.8L91.4 533.5A33.4 33.4 0 0 1 68.4 476.4H68.4L476.3 68.4A33.4 33.4 0 0 1 523.6 68.4L931.4 476.2 931.6 476.4A33.4 33.4 0 0 1 931.6 523.7ZM931.6 523.7", - "width": 1000 - }, - "search": [ - "home_icon" - ] } ] } \ No newline at end of file diff --git a/lib/icons_app/doctor_app_icons.dart b/lib/icons_app/doctor_app_icons.dart index 272f3539..9180c929 100644 --- a/lib/icons_app/doctor_app_icons.dart +++ b/lib/icons_app/doctor_app_icons.dart @@ -11,7 +11,7 @@ /// fonts: /// - asset: fonts/DoctorApp.ttf /// -/// +/// /// import 'package:flutter/widgets.dart'; @@ -21,50 +21,31 @@ class DoctorApp { static const _kFontFam = 'DoctorApp'; static const _kFontPkg = null; - static const IconData femaleicon = - IconData(0xe800, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData male = - IconData(0xe801, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData home_icon = - IconData(0xe802, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData home_icon_active = - IconData(0xe803, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData menu_icon = - IconData(0xe804, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData menu_icon_active = - IconData(0xe805, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData message_icon = - IconData(0xe806, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData message_icon_active = - IconData(0xe807, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData scdedule_icon_active = - IconData(0xe808, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData schedule_icon = - IconData(0xe809, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData discharge_patient = - IconData(0xe80a, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData in_patient_white = - IconData(0xe80d, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData lab_results = - IconData(0xe80e, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData operations = - IconData(0xe813, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData out_patient = - IconData(0xe814, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData patient = - IconData(0xe815, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData qr_code = - IconData(0xe816, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData radiology = - IconData(0xe817, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData referral = - IconData(0xe818, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData search_patient = - IconData(0xe81a, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData reject_icon = - IconData(0xe80a, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData approved_icon = - IconData(0xe80b, fontFamily: _kFontFam, fontPackage: _kFontPkg); - static const IconData pending_icon = - IconData(0xe80c, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData femaleicon = IconData(0xe800, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData male = IconData(0xe801, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData reject_icon = IconData(0xe802, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData home_icon_active = IconData(0xe803, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData menu_icon = IconData(0xe804, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData menu_icon_active = IconData(0xe805, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData message_icon = IconData(0xe806, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData message_icon_active = IconData(0xe807, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData scdedule_icon_active = IconData(0xe808, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData schedule_icon = IconData(0xe809, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData discharge_patient = IconData(0xe80a, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData approved_icon = IconData(0xe80b, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData pending_icon = IconData(0xe80c, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData in_patient_white = IconData(0xe80d, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData lab_results = IconData(0xe80e, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData home_icon = IconData(0xe80f, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData operations = IconData(0xe813, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData out_patient = IconData(0xe814, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData patient = IconData(0xe815, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData radiology = IconData(0xe817, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData mail = IconData(0xe81e, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData medicinesearch = IconData(0xe81f, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData qr_code = IconData(0xe820, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData referral = IconData(0xe821, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData referred = IconData(0xe822, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData search_patient = IconData(0xe823, fontFamily: _kFontFam, fontPackage: _kFontPkg); + static const IconData sync_icon = IconData(0xe824, fontFamily: _kFontFam, fontPackage: _kFontPkg); } diff --git a/lib/models/livecare/end_call_req.dart b/lib/models/livecare/end_call_req.dart new file mode 100644 index 00000000..7a1ae8eb --- /dev/null +++ b/lib/models/livecare/end_call_req.dart @@ -0,0 +1,28 @@ +class EndCallReq { + int vCID; + String tokenID; + String generalid; + int doctorId; + bool isDestroy; + + EndCallReq( + {this.vCID, this.tokenID, this.generalid, this.doctorId, this.isDestroy}); + + EndCallReq.fromJson(Map json) { + vCID = json['VC_ID']; + tokenID = json['TokenID']; + generalid = json['generalid']; + doctorId = json['DoctorId']; + isDestroy = json['IsDestroy']; + } + + Map toJson() { + final Map data = new Map(); + data['VC_ID'] = this.vCID; + data['TokenID'] = this.tokenID; + data['generalid'] = this.generalid; + data['DoctorId'] = this.doctorId; + data['IsDestroy'] = this.isDestroy; + return data; + } +} diff --git a/lib/models/livecare/start_call_res.dart b/lib/models/livecare/start_call_res.dart new file mode 100644 index 00000000..0566d76d --- /dev/null +++ b/lib/models/livecare/start_call_res.dart @@ -0,0 +1,32 @@ +class StartCallRes { + String result; + String openSessionID; + String openTokenID; + bool isAuthenticated; + int messageStatus; + + StartCallRes( + {this.result, + this.openSessionID, + this.openTokenID, + this.isAuthenticated, + this.messageStatus}); + + StartCallRes.fromJson(Map json) { + result = json['Result']; + openSessionID = json['OpenSessionID']; + openTokenID = json['OpenTokenID']; + isAuthenticated = json['IsAuthenticated']; + messageStatus = json['MessageStatus']; + } + + Map toJson() { + final Map data = new Map(); + data['Result'] = this.result; + data['OpenSessionID'] = this.openSessionID; + data['OpenTokenID'] = this.openTokenID; + data['IsAuthenticated'] = this.isAuthenticated; + data['MessageStatus'] = this.messageStatus; + return data; + } +} diff --git a/lib/models/livecare/transfer_to_admin.dart b/lib/models/livecare/transfer_to_admin.dart new file mode 100644 index 00000000..841f5e7d --- /dev/null +++ b/lib/models/livecare/transfer_to_admin.dart @@ -0,0 +1,36 @@ +class TransferToAdminReq { + int vCID; + String tokenID; + String generalid; + int doctorId; + bool isOutKsa; + String notes; + + TransferToAdminReq( + {this.vCID, + this.tokenID, + this.generalid, + this.doctorId, + this.isOutKsa, + this.notes}); + + TransferToAdminReq.fromJson(Map json) { + vCID = json['VC_ID']; + tokenID = json['TokenID']; + generalid = json['generalid']; + doctorId = json['DoctorId']; + isOutKsa = json['IsOutKsa']; + notes = json['Notes']; + } + + Map toJson() { + final Map data = new Map(); + data['VC_ID'] = this.vCID; + data['TokenID'] = this.tokenID; + data['generalid'] = this.generalid; + data['DoctorId'] = this.doctorId; + data['IsOutKsa'] = this.isOutKsa; + data['Notes'] = this.notes; + return data; + } +} diff --git a/lib/models/pharmacies/pharmacies_List_request_model.dart b/lib/models/pharmacies/pharmacies_List_request_model.dart index 70cda5d1..33588345 100644 --- a/lib/models/pharmacies/pharmacies_List_request_model.dart +++ b/lib/models/pharmacies/pharmacies_List_request_model.dart @@ -1,3 +1,4 @@ + /* *@author: Ibrahim Albitar *@Date:27/4/2020 @@ -25,7 +26,7 @@ class PharmaciesListRequestModel { this.languageID = 2, this.stamp = '2020-04-23T21:01:21.492Z', this.ipAdress = '11.11.11.11', - this.versionID = 1.2, + this.versionID = 5.3, this.tokenID, this.sessionID = 'e29zoooEJ4', this.isLoginForDoctorApp = true, @@ -61,4 +62,4 @@ class PharmaciesListRequestModel { data['Channel'] = this.channel; return data; } -} +} \ No newline at end of file diff --git a/lib/models/pharmacies/pharmacies_items_request_model.dart b/lib/models/pharmacies/pharmacies_items_request_model.dart index 84fc56ce..9602a803 100644 --- a/lib/models/pharmacies/pharmacies_items_request_model.dart +++ b/lib/models/pharmacies/pharmacies_items_request_model.dart @@ -10,7 +10,7 @@ class PharmaciesItemsRequestModel { String pHRItemName; int pageIndex = 0; int pageSize = 20; - double versionID = 5.2; + double versionID = 5.3; int channel = 3; int languageID = 2; String iPAdress = "10.20.10.20"; @@ -24,7 +24,7 @@ class PharmaciesItemsRequestModel { {this.pHRItemName, this.pageIndex = 0, this.pageSize = 20, - this.versionID = 5.2, + this.versionID = 5.3, this.channel = 3, this.languageID = 2, this.iPAdress = "10.20.10.20", diff --git a/lib/providers/livecare_provider.dart b/lib/providers/livecare_provider.dart index 20ee8db0..7afb11af 100644 --- a/lib/providers/livecare_provider.dart +++ b/lib/providers/livecare_provider.dart @@ -2,9 +2,12 @@ import 'dart:convert'; import 'package:doctor_app_flutter/client/base_app_client.dart'; import 'package:doctor_app_flutter/config/config.dart'; +import 'package:doctor_app_flutter/models/livecare/end_call_req.dart'; import 'package:doctor_app_flutter/models/livecare/get_panding_req_list.dart'; import 'package:doctor_app_flutter/models/livecare/get_pending_res_list.dart'; import 'package:doctor_app_flutter/models/livecare/start_call_req.dart'; +import 'package:doctor_app_flutter/models/livecare/start_call_res.dart'; +import 'package:doctor_app_flutter/models/livecare/transfer_to_admin.dart'; import 'package:doctor_app_flutter/util/dr_app_shared_pref.dart'; import 'package:doctor_app_flutter/util/helpers.dart'; import 'package:flutter/cupertino.dart'; @@ -14,7 +17,9 @@ class LiveCareProvider with ChangeNotifier { DrAppSharedPreferances sharedPref = new DrAppSharedPreferances(); List liveCarePendingList = []; - var inCallResponse = {}; + StartCallRes inCallResponse; + var transferToAdmin = {}; + var endCallResponse = {}; bool isFinished = true; bool hasError = false; String errorMsg = ''; @@ -45,27 +50,76 @@ class LiveCareProvider with ChangeNotifier { return Future.value(liveCarePendingList); } - Future startCall(request, bool isReCall) async { + Future startCall(request, bool isReCall) async { var profile = await sharedPref.getObj(DOCTOR_PROFILE); resetDefaultValues(); /* the request model is not same hence added manually */ var newRequest = new StartCallReq(); newRequest.clinicId = profile["ClinicID"]; - newRequest.vCID = request["VC_ID"]; + newRequest.vCID = request.vCID; //["VC_ID"]; newRequest.isrecall = isReCall; newRequest.doctorId = profile["DoctorID"]; - newRequest.isOutKsa = request["IsOutKSA"]; + newRequest.isOutKsa = request.isOutKSA; //["IsOutKSA"]; newRequest.projectName = profile["ProjectName"]; newRequest.docotrName = profile["DoctorName"]; newRequest.clincName = profile["ClinicDescription"]; - newRequest.clincName = profile["ClinicDescription"]; newRequest.docSpec = profile["DoctorTitleForProfile"]; newRequest.generalid = 'Cs2020@2016\$2958'; isFinished = false; await BaseAppClient.post(START_LIVECARE_CALL, onSuccess: (response, statusCode) async { isFinished = true; - inCallResponse = response; + inCallResponse = StartCallRes.fromJson(response); + }, onFailure: (String error, int statusCode) { + isFinished = true; + throw error; + }, body: newRequest.toJson()); + return Future.value(inCallResponse); + } + + transfterToAdmin(request, notes) async { + var profile = await sharedPref.getObj(DOCTOR_PROFILE); + var newRequest = new TransferToAdminReq(); + newRequest.doctorId = profile["DoctorID"]; + newRequest.isOutKsa = request.isOutKSA; + newRequest.generalid = 'Cs2020@2016\$2958'; + newRequest.vCID = request.vCID; //["VC_ID"]; + newRequest.notes = await BaseAppClient.post(TRANSFERT_TO_ADMIN, + onSuccess: (response, statusCode) async { + isFinished = true; + transferToAdmin = response; + }, onFailure: (String error, int statusCode) { + isFinished = true; + throw error; + }, body: newRequest.toJson()); + return Future.value(inCallResponse); + } + + endCall(request, isPaitent, doctorID) async { + var newRequest = new EndCallReq(); + newRequest.doctorId = doctorID; //profile["DoctorID"]; + newRequest.generalid = 'Cs2020@2016\$2958'; + newRequest.vCID = request.vCID; //["VC_ID"]; + newRequest.isDestroy = isPaitent; + await BaseAppClient.post(END_CALL, onSuccess: (response, statusCode) async { + isFinished = true; + endCallResponse = response; + }, onFailure: (String error, int statusCode) { + isFinished = true; + throw error; + }, body: newRequest.toJson()); + return Future.value(inCallResponse); + } + + endCallWithCharge(vcID, doctorID) async { + var newRequest = new EndCallReq(); + newRequest.vCID = vcID; + newRequest.doctorId = doctorID; + newRequest.generalid = 'Cs2020@2016\$2958'; + await BaseAppClient.post(END_CALL_WITH_CHARGE, + onSuccess: (response, statusCode) async { + isFinished = true; + endCallResponse = response; }, onFailure: (String error, int statusCode) { isFinished = true; throw error; diff --git a/lib/providers/medicine_provider.dart b/lib/providers/medicine_provider.dart index 873d28a7..8a0bef74 100644 --- a/lib/providers/medicine_provider.dart +++ b/lib/providers/medicine_provider.dart @@ -15,16 +15,15 @@ class MedicineProvider with ChangeNotifier { String errorMsg = ''; PharmaciesItemsRequestModel _itemsRequestModel = - PharmaciesItemsRequestModel(); + PharmaciesItemsRequestModel(); PharmaciesListRequestModel _listRequestModel = PharmaciesListRequestModel(); - - clearPharmacyItemsList(){ + clearPharmacyItemsList() { pharmacyItemsList.clear(); notifyListeners(); } - getMedicineItem(String itemName) async { + getMedicineItem(String itemName) async { _itemsRequestModel.pHRItemName = itemName; resetDefaultValues(); pharmacyItemsList.clear(); @@ -32,37 +31,37 @@ class MedicineProvider with ChangeNotifier { try { await BaseAppClient.post(PHARMACY_ITEMS_URL, onSuccess: (dynamic response, int statusCode) { - pharmacyItemsList = response['ListPharmcy_Region']; - hasError = false; - isFinished = true; - errorMsg = "Done"; - }, onFailure: (String error, int statusCode) { - isFinished = true; - hasError = true; - errorMsg = error; - }, body: _itemsRequestModel.toJson()); + pharmacyItemsList = response['ListPharmcy_Region_enh']; + hasError = false; + isFinished = true; + errorMsg = "Done"; + }, onFailure: (String error, int statusCode) { + isFinished = true; + hasError = true; + errorMsg = error; + }, body: _itemsRequestModel.toJson()); notifyListeners(); } catch (error) { throw error; } } - getPharmaciesList(int itemId) async { + getPharmaciesList(int itemId) async { resetDefaultValues(); try { _listRequestModel.itemID = itemId; isFinished = true; await BaseAppClient.post(PHARMACY_LIST_URL, onSuccess: (dynamic response, int statusCode) { - pharmaciesList = response['PharmList']; - hasError = false; - isFinished = true; - errorMsg = "Done"; - }, onFailure: (String error, int statusCode) { - isFinished = true; - hasError = true; - errorMsg = error; - }, body: _listRequestModel.toJson()); + pharmaciesList = response['PharmList']; + hasError = false; + isFinished = true; + errorMsg = "Done"; + }, onFailure: (String error, int statusCode) { + isFinished = true; + hasError = true; + errorMsg = error; + }, body: _listRequestModel.toJson()); notifyListeners(); } catch (error) { throw error; diff --git a/lib/routes.dart b/lib/routes.dart index 9280d95e..d820e83e 100644 --- a/lib/routes.dart +++ b/lib/routes.dart @@ -72,8 +72,9 @@ const String PATIENT_INSURANCE_APPROVALS = const String VITAL_SIGN_DETAILS = 'patients/vital-sign-details'; const String BODY_MEASUREMENTS = 'patients/body-measurements'; const String IN_PATIENT_PRESCRIPTIONS_DETAILS = 'patients/prescription-details'; -const String VIDEO_CALL = 'video-call'; +// const String VIDEO_CALL = 'video-call'; const String LIVECARE_PENDING_LIST = 'livecare-pendinglist'; +// const String LIVECARE_END_DIALOG = 'video-call/EndCallDialogBox'; var routes = { ROOT: (_) => RootPage(), HOME: (_) => LandingPage(), @@ -108,6 +109,7 @@ var routes = { VITAL_SIGN_DETAILS: (_) => VitalSignDetailsScreen(), BODY_MEASUREMENTS: (_) => VitalSignItemDetailsScreen(), IN_PATIENT_PRESCRIPTIONS_DETAILS: (_) => InpatientPrescriptionDetailsScreen(), - VIDEO_CALL: (_) => VideoCallPage(), - LIVECARE_PENDING_LIST: (_) => LiveCarePandingListScreen() + // VIDEO_CALL: (_) => VideoCallPage(patientData: null), + LIVECARE_PENDING_LIST: (_) => LiveCarePandingListScreen(), + // LIVECARE_END_DIALOG: (_) => EndCallDialogBox() }; diff --git a/lib/screens/dashboard_screen.dart b/lib/screens/dashboard_screen.dart index 8743e6cb..4e2dbfc6 100644 --- a/lib/screens/dashboard_screen.dart +++ b/lib/screens/dashboard_screen.dart @@ -1,12 +1,9 @@ -import 'dart:convert'; - import 'package:doctor_app_flutter/config/shared_pref_kay.dart'; import 'package:doctor_app_flutter/config/size_config.dart'; import 'package:doctor_app_flutter/icons_app/doctor_app_icons.dart'; import 'package:doctor_app_flutter/models/doctor/clinic_model.dart'; import 'package:doctor_app_flutter/models/doctor/doctor_profile_model.dart'; import 'package:doctor_app_flutter/models/doctor/profile_req_Model.dart'; -import 'package:doctor_app_flutter/models/livecare/session_status_model.dart'; import 'package:doctor_app_flutter/providers/auth_provider.dart'; import 'package:doctor_app_flutter/providers/doctor_reply_provider.dart'; import 'package:doctor_app_flutter/providers/hospital_provider.dart'; @@ -14,27 +11,22 @@ import 'package:doctor_app_flutter/providers/medicine_provider.dart'; import 'package:doctor_app_flutter/providers/project_provider.dart'; import 'package:doctor_app_flutter/providers/referral_patient_provider.dart'; import 'package:doctor_app_flutter/providers/referred_patient_provider.dart'; -import 'package:doctor_app_flutter/screens/medicine/medicine_search_screen.dart'; -import 'package:doctor_app_flutter/util/VideoChannel.dart'; import 'package:doctor_app_flutter/util/dr_app_shared_pref.dart'; import 'package:doctor_app_flutter/util/helpers.dart'; import 'package:doctor_app_flutter/util/translations_delegate_base.dart'; import 'package:doctor_app_flutter/widgets/shared/app_scaffold_widget.dart'; -import 'package:doctor_app_flutter/widgets/shared/user-guid/app_showcase.dart'; -import 'package:doctor_app_flutter/widgets/shared/user-guid/app_showcase_widget.dart'; import 'package:flutter/cupertino.dart'; import 'package:flutter/material.dart'; +import 'package:hexcolor/hexcolor.dart'; import 'package:percent_indicator/circular_percent_indicator.dart'; import 'package:provider/provider.dart'; import '../routes.dart'; -import '../widgets/dashboard/dashboard_item_icons_texts.dart'; -import '../widgets/dashboard/dashboard_item_texts_widget.dart'; import '../widgets/shared/app_texts_widget.dart'; -import '../widgets/shared/rounded_container_widget.dart'; import 'doctor/doctor_reply_screen.dart'; import 'doctor/my_referral_patient_screen.dart'; import 'doctor/my_referred_patient_screen.dart'; +import 'medicine/medicine_search_screen.dart'; DrAppSharedPreferances sharedPref = new DrAppSharedPreferances(); Helpers helpers = Helpers(); @@ -86,444 +78,759 @@ class _DashboardScreenState extends State { child: AppScaffold( isShowAppBar: false, isLoading: isLoading, - body: SingleChildScrollView( - child: SizedBox( - height: MediaQuery.of(context).size.height * 1.3, - child: Column( - children: [ - Row( - mainAxisAlignment: MainAxisAlignment.spaceBetween, - mainAxisSize: MainAxisSize.max, - crossAxisAlignment: CrossAxisAlignment.start, - children: [ - Expanded( - flex: 1, - child: Container( - margin: EdgeInsets.only(left: 10, top: 10), - child: AppText( - authProvider.selectedClinicName != null - ? authProvider.selectedClinicName - : authProvider.doctorProfile.clinicDescription, - fontWeight: FontWeight.bold, - fontSize: SizeConfig.textMultiplier * 2.5, - ), - alignment: projectsProvider.isArabic - ? Alignment.topRight - : Alignment.topLeft, - ), - ), - Row( - mainAxisAlignment: MainAxisAlignment.start, - mainAxisSize: MainAxisSize.max, - crossAxisAlignment: CrossAxisAlignment.end, - children: [ - InkWell( - onTap: () async { - showCupertinoPicker( - decKey: '',context: context, - actionList: projectsProvider.doctorClinicsList); - }, - child: Container( - margin: - EdgeInsets.only(left: 5, top: 10, right: 5), - child: Icon( - Icons.settings, - size: SizeConfig.textMultiplier * 2.5, - )), - ), - ], - ), - InkWell( - onTap: () { - ShowCaseWidget.of(context).startShowCase([_one]); - }, - child: Container( - margin: EdgeInsets.only(left: 5, top: 10, right: 5), - child: Icon( - Icons.info, - size: SizeConfig.textMultiplier * 2.5, - )), - ) - ]), - Container( - margin: EdgeInsets.only(left: 10, top: 5), - child: AppText( - TranslationBase.of(context).todayStatistics, - fontWeight: FontWeight.bold, - ), - alignment: projectsProvider.isArabic - ? Alignment.topRight - : Alignment.topLeft, - ), - Expanded( - flex: 3, - child: Row( - crossAxisAlignment: CrossAxisAlignment.stretch, - children: [ - Expanded( - flex: 2, - child: AppShowcase( - key: _one, - description: "Test User Guid", - onSkipClick: () { - ShowCaseWidget.of(context).dismiss(); - }, - child: RoundedContainer( - child: CircularPercentIndicator( - radius: 100, - animation: true, - animationDuration: 1200, - lineWidth: 7.0, - percent: .75, - center: Column( - mainAxisAlignment: MainAxisAlignment.center, - children: [ - AppText("38", + body: ListView(children: [ + Column( + children: [ + Stack(children: [ + Column( + children: [ + Container( + height: 150, + color: Hexcolor('#515B5D'), + width: double.infinity, + child: FractionallySizedBox( + widthFactor: 0.9, + child: Row( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Column( + crossAxisAlignment: CrossAxisAlignment.start, + mainAxisAlignment: MainAxisAlignment.start, + children: [ + Row( + children: [ + AppText( + "Welcome", + fontSize: SizeConfig.textMultiplier * 2, + color: Colors.white, + ) + ], + ), + Row( + mainAxisAlignment: MainAxisAlignment.start, + children: [ + AppText( + 'Dr. ${authProvider.doctorProfile + .doctorName}', fontWeight: FontWeight.bold, - fontSize: SizeConfig.textMultiplier * 4), - AppText( - TranslationBase.of(context).outPatients, - fontWeight: FontWeight.normal, - fontSize: SizeConfig.textMultiplier * 1.1, - color: Colors.grey[800]), + fontSize: SizeConfig.textMultiplier * 2.5, + color: Colors.white, + ) + ], + ), + Row( + mainAxisAlignment: + MainAxisAlignment.spaceBetween, + children: [ + Container( + child: AppText( + authProvider.selectedClinicName != + null + ? authProvider.selectedClinicName + : authProvider.doctorProfile + .clinicDescription, + fontSize: + SizeConfig.textMultiplier * 2, + color: Colors.white, + ), + alignment: projectsProvider.isArabic + ? Alignment.topRight + : Alignment.topLeft, + ), + Row( + mainAxisAlignment: + MainAxisAlignment.start, + mainAxisSize: MainAxisSize.max, + crossAxisAlignment: + CrossAxisAlignment.start, + children: [ + InkWell( + onTap: () async { + showCupertinoPicker( + decKey: '', + context: context, + actionList: projectsProvider + .doctorClinicsList); + }, + child: Container( + margin: EdgeInsets.only( + left: 5, top: 5, right: 10), + child: Icon( + DoctorApp.sync_icon, + + color: Colors.white, + size: SizeConfig + .textMultiplier * + 1.8, + )), + ), + ], + ), + ]) + ], + ), + Expanded( + child: Column( + mainAxisAlignment: MainAxisAlignment.start, + crossAxisAlignment: CrossAxisAlignment.end, + + children: [ + Container( + decoration: BoxDecoration( + color: Theme + .of(context) + .backgroundColor, + borderRadius: BorderRadius.all( + Radius.circular(10.0), + ), + ), + height: 50, + width: 60, + child: Image.network( + authProvider.doctorProfile + .doctorImageURL, +// fit: BoxFit.fill, + ), + ), ], ), - circularStrokeCap: CircularStrokeCap.butt, - backgroundColor: Colors.blueGrey[100], - progressColor: Colors.red, ), - ), + ], ), ), - Expanded( - flex: 2, - child: Padding( - padding: EdgeInsets.all(8), - child: Row( - crossAxisAlignment: CrossAxisAlignment.start, + ), + Container( + color: Colors.white, + height: 150, + ), + ], + ), + Positioned( + right: 0.0, + left: 0, + bottom: 40, + child: Container( + decoration: BoxDecoration( + color: Hexcolor("#DED8CF"), + borderRadius: BorderRadius.all( + Radius.circular(10.0), + ), + ), + margin: EdgeInsets.only(left: 10, right: 10), + height: 140, + width: double.infinity, + child: Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + crossAxisAlignment: CrossAxisAlignment.center, + children: [ + Divider(), + Padding( + padding: const EdgeInsets.only( + top: 8.0, bottom: 8, right: 40, left: 8), + child: CircularPercentIndicator( + radius: 100, + animation: true, + animationDuration: 1200, + lineWidth: 7.0, + percent: .75, + center: Column( + mainAxisAlignment: MainAxisAlignment.center, children: [ - Expanded( - flex: 1, - child: Column( - crossAxisAlignment: - CrossAxisAlignment.stretch, - children: [ - Expanded( - child: DashboardItemTexts( + AppText("38", + fontSize: + SizeConfig.textMultiplier * 3.7, + color: Hexcolor('#5D4C35'), + fontWeight: FontWeight.bold,), + AppText(TranslationBase + .of(context) + .outPatients, + fontWeight: FontWeight.normal, + fontSize: SizeConfig.textMultiplier * 1.4, + color: Hexcolor('#5D4C35'), + ), + ], + ), + circularStrokeCap: CircularStrokeCap.butt, + backgroundColor: Colors.blueGrey[100], + progressColor: Hexcolor('#B8382C'), + ), + ), + Container( + child: Table( + defaultColumnWidth: FixedColumnWidth( + MediaQuery.of(context).size.width / 5), +// border: TableBorder.all( +// color: Colors.black26, +// width: 1, +// style: BorderStyle.solid), + border: TableBorder.symmetric( + inside: BorderSide( + width: 0.5, + color: Hexcolor('#5D4C35'), + ), + ), + children: [ + TableRow( + children: [ + TableCell( + child: Center( + child: Column( + children: [ + AppText( TranslationBase.of(context).arrived, - "23", + fontSize: + SizeConfig.textMultiplier * 1.5, + color: Hexcolor('#5D4C35'), ), - ), - Expanded( - child: DashboardItemTexts( - TranslationBase.of(context) - .notArrived, + AppText( "23", + fontSize: + SizeConfig.textMultiplier * 2.7, + color: Hexcolor('#5D4C35'), + fontWeight: FontWeight.bold, + ), + SizedBox( + height: 6, + ), + ], + ), + )), + TableCell( + child: Column( + children: [ + AppText( + TranslationBase.of(context).er, + fontSize: + SizeConfig.textMultiplier * 1.5, + color: Hexcolor('#5D4C35'), ), + AppText( + "03", + fontSize: + SizeConfig.textMultiplier * 2.7, + color: Hexcolor('#5D4C35'), + fontWeight: FontWeight.bold, + ), + SizedBox( + height: 6, + ), + ], + ), + ), + ], +// + ), + TableRow(children: [ + TableCell( + child: Column( + children: [ + SizedBox( + height: 6, + ), + AppText( + TranslationBase.of(context).notArrived, + fontSize: + SizeConfig.textMultiplier * 1.5, + color: Hexcolor('#5D4C35'), + ), + AppText( + "15", + fontSize: + SizeConfig.textMultiplier * 2.7, + color: Hexcolor('#5D4C35'), + fontWeight: FontWeight.bold, ), ], ), ), - Expanded( - child: Column( - crossAxisAlignment: - CrossAxisAlignment.stretch, - children: [ - Expanded( - child: DashboardItemTexts( - TranslationBase.of(context).er, - "23", - )), - Expanded( - child: DashboardItemTexts( - TranslationBase.of(context).walkIn, - "23", - )), - ], - )), - ], - ), - )) - ], - ), - ), - Expanded( - flex: 2, - child: Row( - crossAxisAlignment: CrossAxisAlignment.stretch, - children: [ - Expanded( - flex: 2, - child: new DashboardItemIconText( - DoctorApp.lab_results, - "08", - TranslationBase.of(context).labResult, - backgroundColor: Colors.black45, + TableCell( + child: Column( + children: [ + SizedBox( + height: 6, + ), + AppText( + TranslationBase.of(context).walkIn, + fontSize: + SizeConfig.textMultiplier * 1.5, + color: Hexcolor('#5D4C35'), + ), + AppText( + "04", + fontSize: + SizeConfig.textMultiplier * 2.7, + color: Hexcolor('#5D4C35'), + fontWeight: FontWeight.bold, + ), + ], + ), + ), + ]), + ], + ), ), - ), - Expanded( - flex: 2, - child: new DashboardItemIconText( - DoctorApp.radiology, - "10", - TranslationBase.of(context).radiology, - backgroundColor: Colors.black45, - )), - Expanded( - flex: 2, - child: new DashboardItemIconText( - DoctorApp.referral, - "05", - TranslationBase.of(context).referral, - backgroundColor: Colors.black45, - )), - ], - ), - ), - Expanded( - flex: 2, - child: Row( - crossAxisAlignment: CrossAxisAlignment.stretch, - children: [ - Expanded( - flex: 2, - child: InkWell( - onTap: () {}, - child: new DashboardItemIconText( - DoctorApp.in_patient_white, - "23", - TranslationBase.of(context).outPatients, - showBorder: true, - backgroundColor: Colors.red[900], - iconColor: Colors.white), - )), - Expanded( - flex: 2, - child: new DashboardItemIconText( - //widget.iconURL + 'operations.svg', - DoctorApp.operations, - "23", - TranslationBase.of(context).inPatient, - showBorder: true, - backgroundColor: Colors.red[900], - )), - ], - ), - ), - Expanded( - flex: 1, - child: Container( - margin: EdgeInsets.all(10), - child: AppText( - TranslationBase.of(context).patientServices, - fontWeight: FontWeight.bold, + Divider(), + ], ), - alignment: projectsProvider.isArabic - ? Alignment.topRight - : Alignment.topLeft, ), - ), - Expanded( - flex: 2, - child: Row( + ) + ]), + FractionallySizedBox( + widthFactor: 0.95, + child: Container( + color: Colors.white, + child: Column( children: [ - Expanded( - flex: 2, - child: InkWell( - child: DashboardItemIconText( - DoctorApp.search_patient, - "", - TranslationBase.of(context).searchPatient, - showBorder: true, - backgroundColor: Colors.white, - valueFontColor: Colors.black, - titleFontColor: Colors.black, - iconColor: Colors.black, - titleFontSize: SizeConfig.textMultiplier * 2, + Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + DashboardItem( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + Container( + margin: EdgeInsets.only(top: 10), + child: Icon( + DoctorApp.lab_results, + size: 40, + color: Colors.white, + ), + ), + Container( + margin: EdgeInsets.only(bottom: 10), + child: Column( + children: [ + AppText( + "08", + fontSize: SizeConfig.textMultiplier * 6, + color: Colors.white, + ), + AppText( + TranslationBase + .of(context) + .labResult, + color: Colors.white, + ) + ], + ), + ), + + ], + ), + imageName: '1.png', + opacity: 0.82, ), - onTap: () { - Navigator.of(context).pushNamed(PATIENT_SEARCH); - }, - ), - ), - Expanded( - flex: 2, - child: InkWell( - onTap: () { - Navigator.push( - context, - MaterialPageRoute( - builder: (context) => ChangeNotifierProvider( - create: (_) => DoctorReplyProvider(), - child: DoctorReplyScreen(), + DashboardItem( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + Container( + child: Icon( + DoctorApp.radiology, + size: 40, + color: Colors.white, + ), ), - ), - ); - }, - child: DashboardItemIconText( - DoctorApp.message_icon, - "", - TranslationBase.of(context).doctorReply, - showBorder: true, - backgroundColor: Colors.white, - valueFontColor: Colors.black, - titleFontColor: Colors.black, - iconColor: Colors.black, - titleFontSize: SizeConfig.textMultiplier * 2, + Container( + child: Column( + children: [ + AppText( + "10", + fontSize: SizeConfig.textMultiplier * 6, + color: Colors.white, + ), + AppText( + TranslationBase + .of(context) + .radiology, + color: Colors.white, + ) + ], + ), + ), + + ], + ), + imageName: '2.png', + opacity: 0.9, ), - ), + DashboardItem( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + Container( + margin: EdgeInsets.only(top: 10), + child: Icon( + DoctorApp.referral, + size: 40, + color: Colors.white, + ), + ), + + Container( + margin: EdgeInsets.only(bottom: 10), + + child: Column( + children: [ + AppText( + "05", + fontSize: SizeConfig.textMultiplier * 6, + color: Colors.white, + ), + AppText( + TranslationBase + .of(context) + .referral, + color: Colors.white, + ) + ], + ), + ), + + ], + ), + imageName: '3.png', + opacity: 0.9, + + ), + ], ), - ], - ), - ), - Expanded( - flex: 2, - child: Row( - crossAxisAlignment: CrossAxisAlignment.stretch, - children: [ - Expanded( - flex: 1, - child: InkWell( - child: DashboardItemIconText( - DoctorApp.search_patient, - "", - TranslationBase.of(context).searchMedicine, - showBorder: true, - backgroundColor: Colors.white, - valueFontColor: Colors.black, - titleFontColor: Colors.black, - iconColor: Colors.black, - titleFontSize: SizeConfig.textMultiplier * 2, + SizedBox( + height: 15, + ), + Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + DashboardItem( + child: Padding( + padding: const EdgeInsets.all(8.0), + child: Row( + mainAxisAlignment: + MainAxisAlignment.spaceBetween, + children: [ + Column( + children: [ + Icon( + DoctorApp.in_patient_white, + size: 40, + color: Colors.white, + ), + ], + ), + Column( + mainAxisAlignment: MainAxisAlignment.end, + children: [ + AppText( + "14", + fontSize: SizeConfig.textMultiplier * 6, + color: Colors.white, + ), + AppText( + TranslationBase + .of(context) + .inPatient, + color: Colors.white, + ) + ], + ) + ], + ), + ), + imageName: '4.png', + color: Hexcolor('#B8382C'), + hasBorder: false, + width: MediaQuery + .of(context) + .size + .width * 0.45, + height: MediaQuery + .of(context) + .orientation == Orientation.portrait + ? MediaQuery + .of(context) + .size + .height * 0.13: + MediaQuery + .of(context) + .size + .height * 0.25, ), - onTap: () { - Navigator.push( - context, - MaterialPageRoute( - builder: (context) => ChangeNotifierProvider( - create: (_) => MedicineProvider(), - child: MedicineSearchScreen(), - ), + DashboardItem( + child: Padding( + padding: const EdgeInsets.all(8.0), + child: Row( + mainAxisAlignment: + MainAxisAlignment.spaceBetween, + children: [ + Column( + children: [ + Icon( + DoctorApp.operations, + size: 40, + color: Colors.white, + ), + ], + ), + Column( + mainAxisAlignment: MainAxisAlignment.end, + children: [ + AppText( + "07", + fontSize: SizeConfig.textMultiplier * 6, + color: Colors.white, + ), + AppText( + TranslationBase + .of(context) + .operations, + color: Colors.white, + ) + ], + ) + ], ), - ); - }, - ), + ), + imageName: '5.png', + color: Hexcolor('#B8382C'), + hasBorder: false, + width: MediaQuery + .of(context) + .size + .width * 0.45, + height: MediaQuery + .of(context) + .orientation == Orientation.portrait + ? MediaQuery + .of(context) + .size + .height * 0.13: + MediaQuery + .of(context) + .size + .height * 0.25, + ), + ], ), - Expanded( - flex: 1, - child: InkWell( - onTap: () { - Navigator.push( - context, - MaterialPageRoute( - builder: (context) => ChangeNotifierProvider( - create: (_) => MyReferralPatientProvider(), - child: MyReferralPatient(), - ), - ), - ); - }, - child: DashboardItemIconText( - DoctorApp.referral, - "", - TranslationBase.of(context).myReferralPatient, - showBorder: true, - backgroundColor: Colors.white, - valueFontColor: Colors.black, - titleFontColor: Colors.black, - iconColor: Colors.black, - titleFontSize: SizeConfig.textMultiplier * 2, + SizedBox( + height: 15, + ), + Row( + children: [ + AppText( + TranslationBase + .of(context) + .patientServices, + fontSize: SizeConfig.textMultiplier * 3, ), - ), + ], ), - ], - ), - ), - Expanded( - flex: 2, - child: Row( - crossAxisAlignment: CrossAxisAlignment.stretch, - children: [ - Expanded( - flex: 1, - child: InkWell( + SizedBox( + height: 10, + ), + Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + DashboardItem( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + Center( + child: Icon( + DoctorApp.search_patient, + size: 50, + color: Colors.black, + ), + ), + AppText( + TranslationBase + .of(context) + .searchPatient, + color: Colors.black, + ) + ], + ), + hasBorder: true, + onTap: () { + Navigator.of(context).pushNamed(PATIENT_SEARCH); + }, + ), + DashboardItem( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + Icon( + DoctorApp.mail, + size: 50, + color: Colors.black, + ), + AppText( + TranslationBase + .of(context) + .doctorReply, + color: Colors.black, + ) + ], + ), + hasBorder: true, onTap: () { Navigator.push( context, MaterialPageRoute( - builder: (context) => ChangeNotifierProvider( - create: (_) => MyReferredPatientProvider(), - child: MyReferredPatient(), - ), + builder: (context) => + ChangeNotifierProvider( + create: (_) => DoctorReplyProvider(), + child: DoctorReplyScreen(), + ), ), ); }, - child: DashboardItemIconText( - DoctorApp.referral, - "", - TranslationBase.of(context).myReferredPatient, - showBorder: true, - backgroundColor: Colors.white, - valueFontColor: Colors.black, - titleFontColor: Colors.black, - iconColor: Colors.black, - titleFontSize: SizeConfig.textMultiplier * 2, + ), + DashboardItem( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + Icon( + DoctorApp.medicinesearch, + size: 50, + color: Colors.black, + ), + AppText( + TranslationBase + .of(context) + .searchMedicine, + color: Colors.black, + textAlign: TextAlign.center, + ) + ], ), + hasBorder: true, + onTap: () { + Navigator.push( + context, + MaterialPageRoute( + builder: (context) => + ChangeNotifierProvider( + create: (_) => MedicineProvider(), + child: MedicineSearchScreen(), + ), + ), + ); + }, ), - ), - Expanded( - flex: 1, - child: InkWell( + ], + ), + SizedBox( + height: 10, + ), + Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + DashboardItem( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + Icon( + DoctorApp.qr_code, + size: 50, + color: Colors.black, + ), + AppText( + TranslationBase + .of(context) + .qrReader, + color: Colors.black, + textAlign: TextAlign.center, + ) + ], + ), +// imageName: '1.png', + hasBorder: true, onTap: () { Navigator.of(context).pushNamed(QR_READER); }, - child: DashboardItemIconText( - DoctorApp.qr_code, - "", - TranslationBase.of(context).qrReader, - showBorder: true, - backgroundColor: Colors.white, - valueFontColor: Colors.black, - titleFontColor: Colors.black, - iconColor: Colors.black, - titleFontSize: SizeConfig.textMultiplier * 2, - ), ), - ), - ], - )), - Expanded( - flex: 2, - child: Row( - crossAxisAlignment: CrossAxisAlignment.stretch, - children: [ - Expanded( - flex: 1, - child: InkWell( + DashboardItem( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + Icon( + DoctorApp.referral, + size: 50, + color: Colors.black, + ), + AppText( + TranslationBase + .of(context) + .myReferralPatient, + textAlign: TextAlign.center, + color: Colors.black, + ) + ], + ), + hasBorder: true, onTap: () { - // Navigator.of(context).pushNamed(VIDEO_CALL); - Navigator.of(context) - .pushNamed(LIVECARE_PENDING_LIST); + Navigator.push( + context, + MaterialPageRoute( + builder: (context) => + ChangeNotifierProvider( + create: (_) => + MyReferralPatientProvider(), + child: MyReferralPatient(), + ), + ), + ); }, - child: DashboardItemIconText( - DoctorApp.referral, - "", - TranslationBase.of(context).livecare, - showBorder: true, - backgroundColor: Colors.white, - valueFontColor: Colors.black, - titleFontColor: Colors.black, - iconColor: Colors.black, - titleFontSize: SizeConfig.textMultiplier * 2, + ), + DashboardItem( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + Icon( + DoctorApp.referred, + size: 50, + color: Colors.black, + ), + AppText( + TranslationBase + .of(context) + .myReferredPatient, + color: Colors.black, + textAlign: TextAlign.center, + ) + ], ), + hasBorder: true, + onTap: () { + Navigator.push( + context, + MaterialPageRoute( + builder: (context) => + ChangeNotifierProvider( + create: (_) => + MyReferredPatientProvider(), + child: MyReferredPatient(), + ), + ), + ); + }, ), - ), - Expanded(flex: 1, child: Container()) - ], - )) - ], - ), + ], + ), + SizedBox( + height: 20, + ), + ], + ), + ), + ), + ], ), - ), + ]), ), ); } @@ -548,17 +855,21 @@ class _DashboardScreenState extends State { mainAxisAlignment: MainAxisAlignment.end, children: [ CupertinoButton( - child: Text(TranslationBase.of(context).cancel - // style: TextStyle(context) - ), + child: AppText(TranslationBase + .of(context) + .cancel + // style: TextStyle(context) + ), onPressed: () { Navigator.pop(context); }, ), CupertinoButton( - child: Text(TranslationBase.of(context).done - // style: textStyle(context), - ), + child: AppText(TranslationBase + .of(context) + .done + // style: textStyle(context), + ), onPressed: () { Navigator.pop(context); // onSelectFun(cupertinoPickerIndex); @@ -577,12 +888,13 @@ class _DashboardScreenState extends State { child: InkWell( onTap: () => changeClinic(e.clinicID, context), - child: Text( + child: AppText( e.clinicName, - style: TextStyle( - fontSize: - SizeConfig.textMultiplier * - 1.9), + fontSize: + SizeConfig.textMultiplier * + 1.9, + + )), ), )) @@ -623,3 +935,66 @@ class _DashboardScreenState extends State { }); } } +// TODO Move to it file +class DashboardItem extends StatelessWidget { + const DashboardItem({ + this.hasBorder = false, + this.imageName, + @required this.child, + this.onTap, + Key key, + this.width, + this.height, + this.color, + this.opacity = 0.4 + }) : super(key: key); + final bool hasBorder; + final String imageName; + final Widget child; + final Function onTap; + final double width; + final double height; + final Color color; + final double opacity; + + @override + Widget build(BuildContext context) { + return InkWell( + onTap: onTap, + child: Container( + width: width != null ? width : MediaQuery + .of(context) + .size + .width * 0.29, + height: height != null ? height : MediaQuery + .of(context) + .orientation == Orientation.portrait ? MediaQuery + .of(context) + .size + .height * 0.19 : MediaQuery + .of(context) + .size + .height * 0.35, + + decoration: BoxDecoration( + color: !hasBorder ? color != null ? color : Hexcolor('#050705') + .withOpacity(opacity) : Colors + .white, + borderRadius: BorderRadius.circular(6.0), + border: hasBorder ? Border.all( + width: 1.0, color: const Color(0xffcccccc)) : Border.all( + width: 0.0, color: Colors.transparent), + image: imageName != null ? DecorationImage( + image: AssetImage('assets/images/dashboard/${imageName}'), + fit: BoxFit.cover, + colorFilter: new ColorFilter.mode( + Colors.black.withOpacity(0.2), BlendMode.dstIn), + + ) : null, + + ), + child: Center(child: child,), + ), + ); + } +} diff --git a/lib/screens/live_care/panding_list.dart b/lib/screens/live_care/panding_list.dart index 7aea2fcd..ad129d55 100644 --- a/lib/screens/live_care/panding_list.dart +++ b/lib/screens/live_care/panding_list.dart @@ -1,5 +1,6 @@ import 'package:doctor_app_flutter/config/size_config.dart'; import 'package:doctor_app_flutter/providers/livecare_provider.dart'; +import 'package:doctor_app_flutter/screens/live_care/video_call.dart'; import 'package:doctor_app_flutter/util/dr_app_shared_pref.dart'; import 'package:doctor_app_flutter/util/helpers.dart'; import 'package:doctor_app_flutter/widgets/shared/app_scaffold_widget.dart'; @@ -204,12 +205,20 @@ class _LiveCarePandingListState extends State { .green, //Colors.black, onPressed: () => { _isInit = true, - sharedPref.setObj( - LIVE_CARE_PATIENT, - item), - Navigator.of(context) - .pushNamed( - VIDEO_CALL) + // sharedPref.setObj( + // LIVE_CARE_PATIENT, + // item), + // Navigator.of(context) + // .pushNamed( + // VIDEO_CALL, + // item) + Navigator.push( + context, + MaterialPageRoute( + builder: (context) => + VideoCallPage( + item, + context))) }, ), ) @@ -250,3 +259,13 @@ class _LiveCarePandingListState extends State { ); } } + +MyGlobals myGlobals = new MyGlobals(); + +class MyGlobals { + GlobalKey _scaffoldKey; + MyGlobals() { + _scaffoldKey = GlobalKey(); + } + GlobalKey get scaffoldKey => _scaffoldKey; +} diff --git a/lib/screens/live_care/video_call.dart b/lib/screens/live_care/video_call.dart index 4d7d4170..c4c59af1 100644 --- a/lib/screens/live_care/video_call.dart +++ b/lib/screens/live_care/video_call.dart @@ -1,16 +1,25 @@ import 'dart:async'; +import 'package:doctor_app_flutter/models/livecare/get_pending_res_list.dart'; import 'package:doctor_app_flutter/models/livecare/session_status_model.dart'; +import 'package:doctor_app_flutter/models/livecare/start_call_res.dart'; import 'package:doctor_app_flutter/providers/livecare_provider.dart'; +import 'package:doctor_app_flutter/screens/live_care/panding_list.dart'; import 'package:doctor_app_flutter/util/VideoChannel.dart'; import 'package:doctor_app_flutter/util/dr_app_shared_pref.dart'; +import 'package:doctor_app_flutter/util/translations_delegate_base.dart'; import 'package:flutter/material.dart'; import 'package:doctor_app_flutter/config/shared_pref_kay.dart'; import 'package:provider/provider.dart'; import 'package:doctor_app_flutter/util/dr_app_toast_msg.dart'; import 'package:doctor_app_flutter/util/helpers.dart'; +import '../../routes.dart'; class VideoCallPage extends StatefulWidget { + final LiveCarePendingListResponse patientData; + final listContext; + VideoCallPage(this.patientData, this.listContext); + @override _VideoCallPageState createState() => _VideoCallPageState(); } @@ -24,65 +33,67 @@ class _VideoCallPageState extends State { LiveCareProvider _liveCareProvider; bool _isInit = true; var _tokenData; - var patientData = {}; + bool isTransfer = false; String image_url = 'https://hmgwebservices.com/Images/MobileImages/DUBAI/'; //bool _isOutOfStuck = false; Helpers helpers = new Helpers(); + var doctorprofile = {}; + var notes; @override void didChangeDependencies() { super.didChangeDependencies(); if (_isInit) { _liveCareProvider = Provider.of(context); - startCall(); + startCall(false); } _isInit = false; } - void connectOpenTok(tokenData) async { + void connectOpenTok(StartCallRes tokenData) async { _tokenData = tokenData; + //var profile = await sharedPref.getObj(DOCTOR_PROFILE); + + var token = await sharedPref.getString(TOKEN); + doctorprofile = await sharedPref.getObj(DOCTOR_PROFILE); + /* opentok functionalites need to be written */ await VideoChannel.openVideoCallScreen( - kToken: - 'T1==cGFydG5lcl9pZD00NjgwMzIyNCZzaWc9NWRhNmExMzU4ZDViZGU3OTA5NDY4ODRhNzI4ZGUxZTRmMjZmNzcwMjpzZXNzaW9uX2lkPTFfTVg0ME5qZ3dNekl5Tkg1LU1UVTVNelk0TXpZek9EWXdNMzV1Y0V4V1lWUlZTbTVIY3k5dVdHWm1NMWxPYTNjelpIVi1mZyZjcmVhdGVfdGltZT0xNTkzNjgzNjYyJm5vbmNlPTAuODAxMzMzMzUxMDQwNzE5NSZyb2xlPXB1Ymxpc2hlciZleHBpcmVfdGltZT0xNTk2Mjc1NjYyJmluaXRpYWxfbGF5b3V0X2NsYXNzX2xpc3Q9', - kSessionId: - '1_MX40NjgwMzIyNH5-MTU5MzY4MzYzODYwM35ucExWYVRVSm5Hcy9uWGZmM1lOa3czZHV-fg', - kApiKey: '46803224', - vcId: 3245, - tokenID: "hfkjshdf347r8743", + kToken: tokenData.openTokenID, + + //'T1==cGFydG5lcl9pZD00NjgwMzIyNCZzaWc9NWRhNmExMzU4ZDViZGU3OTA5NDY4ODRhNzI4ZGUxZTRmMjZmNzcwMjpzZXNzaW9uX2lkPTFfTVg0ME5qZ3dNekl5Tkg1LU1UVTVNelk0TXpZek9EWXdNMzV1Y0V4V1lWUlZTbTVIY3k5dVdHWm1NMWxPYTNjelpIVi1mZyZjcmVhdGVfdGltZT0xNTkzNjgzNjYyJm5vbmNlPTAuODAxMzMzMzUxMDQwNzE5NSZyb2xlPXB1Ymxpc2hlciZleHBpcmVfdGltZT0xNTk2Mjc1NjYyJmluaXRpYWxfbGF5b3V0X2NsYXNzX2xpc3Q9', + kSessionId: tokenData.openSessionID, + //'1_MX40NjgwMzIyNH5-MTU5MzY4MzYzODYwM35ucExWYVRVSm5Hcy9uWGZmM1lOa3czZHV-fg', + kApiKey: '46209962', + vcId: widget.patientData.vCID, + tokenID: token, //"hfkjshdf347r8743", generalId: "Cs2020@2016\$2958", - doctorId: 1485, + doctorId: doctorprofile['DoctorID'], onFailure: (String error) { //TODO handling Failure + //changeRoute(context); }, onCallEnd: () { //TODO handling onCallEnd + WidgetsBinding.instance.addPostFrameCallback((_) { + changeRoute(context); + }); }, onCallNotRespond: (SessionStatusModel sessionStatusModel) { //TODO handling onCalcallNotRespondlEnd + WidgetsBinding.instance.addPostFrameCallback((_) { + changeRoute(context); + }); }); } - String getTimerTime(int start) { - int minutes = (start ~/ 60); - String sMinute = ''; - if (minutes.toString().length == 1) { - sMinute = '0' + minutes.toString(); - } else - sMinute = minutes.toString(); - - int seconds = (start % 60); - String sSeconds = ''; - if (seconds.toString().length == 1) { - sSeconds = '0' + seconds.toString(); - } else - sSeconds = seconds.toString(); - - return sMinute + ':' + sSeconds; - } + startCall(bool isRecall) async { + //patientData = await sharedPref.getObj(LIVE_CARE_PATIENT); + _liveCareProvider.startCall(widget.patientData, isRecall).then((result) { + // //startTimmer(); + setState(() { + _start = 1; + }); - startCall() async { - patientData = await sharedPref.getObj(LIVE_CARE_PATIENT); - _liveCareProvider.startCall(patientData, false).then((result) { connectOpenTok(result); }).catchError((error) => {helpers.showErrorToast(error), Navigator.of(context).pop()}); @@ -96,134 +107,231 @@ class _VideoCallPageState extends State { @override Widget build(BuildContext context) { return Scaffold( - body: SafeArea( - child: Container( - height: MediaQuery.of(context).size.height, - width: MediaQuery.of(context).size.width, - decoration: BoxDecoration( - color: Colors.white, + body: Container( + height: MediaQuery.of(context).size.height, + width: MediaQuery.of(context).size.width, + decoration: BoxDecoration( + color: Colors.white, + ), + padding: EdgeInsets.all(50.0), + child: Column( + mainAxisAlignment: MainAxisAlignment.start, + mainAxisSize: MainAxisSize.max, + crossAxisAlignment: CrossAxisAlignment.center, + children: [ + SizedBox( + height: 10.0, ), - padding: EdgeInsets.all(50.0), - child: Column( - mainAxisAlignment: MainAxisAlignment.start, - mainAxisSize: MainAxisSize.max, - crossAxisAlignment: CrossAxisAlignment.center, - children: [ - SizedBox( - height: 10.0, - ), - Text( - 'Dailing...', - style: TextStyle( - color: Colors.deepPurpleAccent, - fontWeight: FontWeight.w300, - fontSize: 15), - ), - SizedBox( - height: MediaQuery.of(context).size.height * 0.02, - ), - Text( - patientData["PatientName"], - style: TextStyle( - color: Colors.deepPurpleAccent, - fontWeight: FontWeight.w900, - fontSize: 20), - ), - SizedBox( - height: MediaQuery.of(context).size.height * 0.02, - ), - Container( - child: Text( - _timmer == '' ? 'Connecting' : 'Connected', - style: TextStyle( - color: Colors.deepPurpleAccent, - fontWeight: FontWeight.w300, - fontSize: 15), - )), - SizedBox( - height: MediaQuery.of(context).size.height * 0.02, - ), - ClipRRect( - borderRadius: BorderRadius.circular(200.0), - child: Image.network( - patientData["Gender"] == "1" - ? image_url + 'unkown.png' - : image_url + 'unkowwn_female.png', - height: 200.0, - width: 200.0, - ), - ), - SizedBox( - height: MediaQuery.of(context).size.height * .2, - ), - Container( - width: 70.0, - height: 70.0, - child: FloatingActionButton( - onPressed: () { - Navigator.of(context).pop(); - }, - elevation: 30.0, - shape: CircleBorder(side: BorderSide(color: Colors.red)), - mini: false, - child: Icon( - Icons.call_end, - color: Colors.red, - size: 35, - ), - backgroundColor: Colors.red[100], - )) - ], + Text( + _start == 0 ? 'Dailing' : 'Connected', + style: TextStyle( + color: Colors.deepPurpleAccent, + fontWeight: FontWeight.w300, + fontSize: 15), + ), + SizedBox( + height: MediaQuery.of(context).size.height * 0.02, + ), + Text( + widget.patientData.patientName, + style: TextStyle( + color: Colors.deepPurpleAccent, + fontWeight: FontWeight.w900, + fontSize: 20), ), - ), + SizedBox( + height: MediaQuery.of(context).size.height * 0.02, + ), + Container( + child: Text( + _start == 0 ? 'Connecting...' : _timmer.toString(), + style: TextStyle( + color: Colors.deepPurpleAccent, + fontWeight: FontWeight.w300, + fontSize: 15), + )), + SizedBox( + height: MediaQuery.of(context).size.height * 0.02, + ), + ClipRRect( + borderRadius: BorderRadius.circular(200.0), + child: Image.network( + image_url + 'unkown.png', + height: 200.0, + width: 200.0, + ), + ), + SizedBox( + height: MediaQuery.of(context).size.height * .2, + ), + Container( + width: 70.0, + height: 70.0, + child: FloatingActionButton( + onPressed: () { + Navigator.of(context).pop(); + }, + elevation: 30.0, + shape: CircleBorder(side: BorderSide(color: Colors.red)), + mini: false, + child: Icon( + Icons.call_end, + color: Colors.red, + size: 35, + ), + backgroundColor: Colors.red[100], + )) + ], ), - ); + )); + } + + changeRoute(con) async { + // await Future.delayed(Duration(seconds: 1), () { + _showAlert(con); + //}); } -} -class FunctionalButton extends StatefulWidget { - final title; - final icon; - final Function() onPressed; + _showAlert(BuildContext context) async { + await showDialog( + context: context, + builder: (dialogContex) => AlertDialog(content: StatefulBuilder( + builder: (BuildContext context, StateSetter setState) { + return Container( + height: MediaQuery.of(context).size.height * 0.7, + width: MediaQuery.of(context).size.width * .9, + child: Stack( + fit: StackFit.loose, + overflow: Overflow.visible, + children: [ + Positioned( + right: -40.0, + top: -40.0, + child: InkResponse( + onTap: () { + Navigator.of(context, rootNavigator: true) + .pop('dialog'); + Navigator.of(context).pop(); + }, + child: CircleAvatar( + child: Icon(Icons.close), + backgroundColor: Colors.red, + ), + ), + ), + Center( + child: Column( + crossAxisAlignment: CrossAxisAlignment.center, + mainAxisSize: MainAxisSize.min, + children: [ + Padding( + padding: EdgeInsets.all(8.0), + child: RaisedButton( + onPressed: () => {endCall()}, + child: + Text(TranslationBase.of(context).endcall), + color: Colors.red, + textColor: Colors.white, + )), + Padding( + padding: EdgeInsets.all(8.0), + child: RaisedButton( + onPressed: () => {resumeCall()}, + child: + Text(TranslationBase.of(context).resumecall), + color: Colors.green[900], + textColor: Colors.white, + ), + ), + Padding( + padding: EdgeInsets.all(8.0), + child: RaisedButton( + onPressed: () => {endCallWithCharge()}, + child: Text(TranslationBase.of(context) + .endcallwithcharge), + textColor: Colors.white, + ), + ), + Padding( + padding: EdgeInsets.all(8.0), + child: RaisedButton( + onPressed: () => { + setState(() => {isTransfer = true}) + }, + child: Text( + TranslationBase.of(context).transfertoadmin), + color: Colors.yellow[900], + ), + ), + isTransfer == true + ? Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text('Notes'), + Padding( + padding: EdgeInsets.all(5.0), + child: TextField( + maxLines: 3, + controller: notes, + decoration: InputDecoration.collapsed( + hintText: + "Enter your notes here"), + )), + Center( + child: RaisedButton( + onPressed: () => + {this.transferToAdmin(notes)}, + child: Text('Transfer'), + color: Colors.yellow[900], + )) + ], + ) + : SizedBox() + ], + )) + ], + )); + }))); + Navigator.pop(context); + } - const FunctionalButton({Key key, this.title, this.icon, this.onPressed}) - : super(key: key); + resumeCall() { + closeRoute(); + startCall(true); + } - @override - _FunctionalButtonState createState() => _FunctionalButtonState(); -} + transferToAdmin(notes) { + closeRoute(); + _liveCareProvider + .transfterToAdmin(widget.patientData, notes) + .then((result) { + connectOpenTok(result); + }).catchError((error) => + {helpers.showErrorToast(error), Navigator.of(context).pop()}); + } -class _FunctionalButtonState extends State { - @override - Widget build(BuildContext context) { - return Column( - mainAxisAlignment: MainAxisAlignment.start, - crossAxisAlignment: CrossAxisAlignment.center, - mainAxisSize: MainAxisSize.min, - children: [ - RawMaterialButton( - onPressed: widget.onPressed, - splashColor: Colors.deepPurpleAccent, - fillColor: Colors.white, - elevation: 10.0, - shape: CircleBorder(), - child: Padding( - padding: const EdgeInsets.all(15.0), - child: Icon( - widget.icon, - size: 30.0, - color: Colors.deepPurpleAccent, - ), - ), - ), - Container( - margin: EdgeInsets.symmetric(vertical: 10.0, horizontal: 2.0), - child: Text( - widget.title, - style: TextStyle(fontSize: 15.0, color: Colors.deepPurpleAccent), - ), - ) - ], - ); + endCall() { + closeRoute(); + _liveCareProvider + .endCall(widget.patientData, false, doctorprofile['DoctorID']) + .then((result) { + print(result); + }).catchError((error) => + {helpers.showErrorToast(error), Navigator.of(context).pop()}); + } + + endCallWithCharge() { + _liveCareProvider + .endCallWithCharge(widget.patientData.vCID, doctorprofile['DoctorID']) + .then((result) { + closeRoute(); + print('end callwith charge'); + print(result); + }).catchError((error) => + {helpers.showErrorToast(error), Navigator.of(context).pop()}); + } + + closeRoute() { + Navigator.of(context).pop(); } } diff --git a/lib/screens/medicine/medicine_search_screen.dart b/lib/screens/medicine/medicine_search_screen.dart index af1eb11d..48a53af3 100644 --- a/lib/screens/medicine/medicine_search_screen.dart +++ b/lib/screens/medicine/medicine_search_screen.dart @@ -13,8 +13,14 @@ import 'package:doctor_app_flutter/widgets/shared/app_texts_widget.dart'; import 'package:doctor_app_flutter/widgets/shared/dr_app_circular_progress_Indeicator.dart'; import 'package:flutter/material.dart'; import 'package:provider/provider.dart'; + import '../../util/extenstions.dart'; import 'package:doctor_app_flutter/util/translations_delegate_base.dart'; +import 'package:permission_handler/permission_handler.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_to_text.dart'; +import 'dart:math'; DrAppSharedPreferances sharedPref = DrAppSharedPreferances(); @@ -30,20 +36,55 @@ class _MedicineSearchState extends State { var data; final myController = TextEditingController(); Helpers helpers = new Helpers(); - + bool _hasSpeech = false; MedicineProvider _medicineProvider; - + String _currentLocaleId = ""; bool _isInit = true; - + final SpeechToText speech = SpeechToText(); + String lastStatus = ''; + // String lastWords; + List _localeNames = []; + String lastError; + double level = 0.0; + double minSoundLevel = 50000; + double maxSoundLevel = -50000; + String reconizedWord; @override void didChangeDependencies() { super.didChangeDependencies(); if (_isInit) { _medicineProvider = Provider.of(context); + requestPermissions(); + initSpeechState(); } _isInit = false; } + void requestPermissions() async { + Map statuses = await [ + Permission.microphone, + ].request(); + } + + Future initSpeechState() async { + bool hasSpeech = await speech.initialize( + onError: errorListener, onStatus: statusListener); + // if (hasSpeech) { + // _localeNames = await speech.locales(); + + // var systemLocale = await speech.systemLocale(); + _currentLocaleId = TranslationBase.of(context).locale.languageCode == 'en' + ? 'en-GB' + : 'ar-SA'; // systemLocale.localeId; + // } + + if (!mounted) return; + + setState(() { + _hasSpeech = hasSpeech; + }); + } + @override Widget build(BuildContext context) { return AppScaffold( @@ -64,6 +105,18 @@ class _MedicineSearchState extends State { searchMedicine(context); }, textInputAction: TextInputAction.search, + prefix: IconButton( + icon: Icon(Icons.mic), + color: + lastStatus == 'listening' ? Colors.red : Colors.grey, + onPressed: () { + myController.text = ''; + setState(() { + lastStatus = 'listening'; + }); + + startVoiceSearch(); + }), inputFormatter: ONLY_LETTERS), ), Container( @@ -131,7 +184,7 @@ class _MedicineSearchState extends State { ["ItemDescription"], url: _medicineProvider.pharmacyItemsList[index] - ["ProductImageBase64"], + ["ImageThumbUrl"], ), onTap: () { Navigator.push( @@ -166,4 +219,62 @@ class _MedicineSearchState extends State { } _medicineProvider.getMedicineItem(myController.text); } + + startVoiceSearch() { + // lastWords = ""; + lastError = ""; + speech.listen( + onResult: resultListener, + listenFor: Duration(seconds: 10), + localeId: _currentLocaleId, + onSoundLevelChange: soundLevelListener, + cancelOnError: true, + partialResults: true, + onDevice: true, + listenMode: ListenMode.confirmation); + setState(() {}); + } + + void resultListener(SpeechRecognitionResult result) { + setState(() { + // lastWords = "${result.recognizedWords} - ${result.finalResult}"; + reconizedWord = result.recognizedWords; + lastStatus = ''; + myController.text = reconizedWord; + Future.delayed(const Duration(seconds: 2), () { + searchMedicine(context); + }); + }); + } + + void errorListener(SpeechRecognitionError error) { + // print("Received error status: $error, listening: ${speech.isListening}"); + setState(() { + lastError = "${error.errorMsg} - ${error.permanent}"; + }); + } + + void statusListener(String status) { + // print( + // "Received listener status: $status, listening: ${speech.isListening}"); + setState(() { + lastStatus = status; + }); + } + + // _switchLang(selectedVal) { + // setState(() { + // _currentLocaleId = selectedVal; + // }); + // print(selectedVal); + // } + + void soundLevelListener(double level) { + minSoundLevel = min(minSoundLevel, level); + maxSoundLevel = max(maxSoundLevel, level); + // print("sound level $level: $minSoundLevel - $maxSoundLevel "); + setState(() { + this.level = level; + }); + } } diff --git a/lib/util/translations_delegate_base.dart b/lib/util/translations_delegate_base.dart index ad52edd8..a8821dda 100644 --- a/lib/util/translations_delegate_base.dart +++ b/lib/util/translations_delegate_base.dart @@ -235,6 +235,12 @@ class TranslationBase { String get beingGreat => localizedValues['beingGreat'][locale.languageCode]; String get cancel => localizedValues['cancel'][locale.languageCode]; String get done => localizedValues['done'][locale.languageCode]; + String get resumecall => localizedValues['resumecall'][locale.languageCode]; + String get endcallwithcharge => + localizedValues['endcallwithcharge'][locale.languageCode]; + String get endcall => localizedValues['endcall'][locale.languageCode]; + String get transfertoadmin => + localizedValues['transfertoadmin'][locale.languageCode]; } class TranslationBaseDelegate extends LocalizationsDelegate { diff --git a/lib/widgets/auth/login_form.dart b/lib/widgets/auth/login_form.dart index a4b60be7..b87cc800 100644 --- a/lib/widgets/auth/login_form.dart +++ b/lib/widgets/auth/login_form.dart @@ -273,7 +273,7 @@ class _LoginFormState extends State { "ProjectName": "", "DoctorImageURL": "UNKNOWN", "LogInTokenID": preRes['LogInTokenID'], - "VersionID": 1.2 + "VersionID": 5.3 }; authProv.insertDeviceImei(imeiInfo).then((res) { if (res['MessageStatus'] == 1) { diff --git a/lib/widgets/medicine/medicine_item_widget.dart b/lib/widgets/medicine/medicine_item_widget.dart index ad15ddab..d3a3830c 100644 --- a/lib/widgets/medicine/medicine_item_widget.dart +++ b/lib/widgets/medicine/medicine_item_widget.dart @@ -47,8 +47,8 @@ class _MedicineItemWidgetState extends State { width: 39, child: ClipRRect( borderRadius: BorderRadius.all(Radius.circular(7)), - child: Image.memory( - dataFromBase64String(widget.url), + child: Image.network( + widget.url, height: SizeConfig.imageSizeMultiplier * 15, width: SizeConfig.imageSizeMultiplier * 15, fit: BoxFit.cover, diff --git a/lib/widgets/shared/app_text_form_field.dart b/lib/widgets/shared/app_text_form_field.dart index b0b9a8ec..efe8d389 100644 --- a/lib/widgets/shared/app_text_form_field.dart +++ b/lib/widgets/shared/app_text_form_field.dart @@ -8,61 +8,63 @@ import 'package:flutter/services.dart'; // DESCRIPTION : Custom Text Form Field for app. class AppTextFormField extends FormField { - - AppTextFormField({ - FormFieldSetter onSaved, - String inputFormatter, - FormFieldValidator validator, - ValueChanged onChanged, - GestureTapCallback onTap, - TextEditingController controller, - bool autovalidate = true, - TextInputType textInputType, - String hintText, - FocusNode focusNode, - TextInputAction textInputAction, - ValueChanged onFieldSubmitted, - }) : super( - onSaved: onSaved, - validator: validator, - autovalidate: autovalidate, - builder: (FormFieldState state) { - return Column( - children: [ - TextFormField( - focusNode: focusNode, - keyboardType: textInputType, - inputFormatters: [WhitelistingTextInputFormatter(RegExp(inputFormatter)),], - onChanged: onChanged?? (value){ - state.didChange(value); - }, - textInputAction: textInputAction, - onFieldSubmitted: onFieldSubmitted, - decoration: InputDecoration( - hintText: hintText, - hintStyle: TextStyle(fontSize: SizeConfig.textMultiplier * 2), - enabledBorder: OutlineInputBorder( - borderRadius: BorderRadius.all(Radius.circular(10)), - borderSide: BorderSide(color: Color(0xff707070)), - ), - focusedBorder: OutlineInputBorder( - borderRadius: BorderRadius.all(Radius.circular(10)), - ) - //BorderRadius.all(Radius.circular(20)); - ), - onTap: onTap, - controller: controller, - ), - state.hasError? - Text( - state.errorText, - style: TextStyle( - color: Colors.red - ), - ) : - Container() - ], - ); - } - ); + AppTextFormField( + {FormFieldSetter onSaved, + String inputFormatter, + FormFieldValidator validator, + ValueChanged onChanged, + GestureTapCallback onTap, + TextEditingController controller, + bool autovalidate = true, + TextInputType textInputType, + String hintText, + FocusNode focusNode, + TextInputAction textInputAction, + ValueChanged onFieldSubmitted, + IconButton prefix}) + : super( + onSaved: onSaved, + validator: validator, + autovalidate: autovalidate, + builder: (FormFieldState state) { + return Column( + children: [ + TextFormField( + focusNode: focusNode, + keyboardType: textInputType, + inputFormatters: [ + WhitelistingTextInputFormatter(RegExp(inputFormatter)), + ], + onChanged: onChanged ?? + (value) { + state.didChange(value); + }, + textInputAction: textInputAction, + onFieldSubmitted: onFieldSubmitted, + decoration: InputDecoration( + hintText: hintText, + suffixIcon: prefix, + hintStyle: + TextStyle(fontSize: SizeConfig.textMultiplier * 2), + enabledBorder: OutlineInputBorder( + borderRadius: BorderRadius.all(Radius.circular(10)), + borderSide: BorderSide(color: Color(0xff707070)), + ), + focusedBorder: OutlineInputBorder( + borderRadius: BorderRadius.all(Radius.circular(10)), + ) + //BorderRadius.all(Radius.circular(20)); + ), + onTap: onTap, + controller: controller, + ), + state.hasError + ? Text( + state.errorText, + style: TextStyle(color: Colors.red), + ) + : Container() + ], + ); + }); } diff --git a/lib/widgets/shared/bottom_navigation_item.dart b/lib/widgets/shared/bottom_navigation_item.dart index d35d66c3..501a354d 100644 --- a/lib/widgets/shared/bottom_navigation_item.dart +++ b/lib/widgets/shared/bottom_navigation_item.dart @@ -50,12 +50,14 @@ class BottomNavigationItem extends StatelessWidget { size: 22.0), ), SizedBox(height: 5,), - Text( - name, - style: TextStyle( - color: currentIndex == index - ? Theme.of(context).primaryColor - : Theme.of(context).dividerColor, + Expanded( + child: Text( + name, + style: TextStyle( + color: currentIndex == index + ? Theme.of(context).primaryColor + : Theme.of(context).dividerColor, + ), ), ), ], diff --git a/lib/widgets/shared/card_with_bg_widget.dart b/lib/widgets/shared/card_with_bg_widget.dart index 39468b83..a2bb0cfe 100644 --- a/lib/widgets/shared/card_with_bg_widget.dart +++ b/lib/widgets/shared/card_with_bg_widget.dart @@ -1,6 +1,4 @@ -import 'package:doctor_app_flutter/config/size_config.dart'; import 'package:doctor_app_flutter/providers/project_provider.dart'; -import 'package:doctor_app_flutter/widgets/shared/rounded_container_widget.dart'; import 'package:flutter/material.dart'; import 'package:hexcolor/hexcolor.dart'; import 'package:provider/provider.dart'; diff --git a/pubspec.lock b/pubspec.lock index 71445e35..7c695999 100644 --- a/pubspec.lock +++ b/pubspec.lock @@ -155,6 +155,13 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "1.0.2" + clock: + dependency: transitive + description: + name: clock + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.1" code_builder: dependency: transitive description: @@ -635,6 +642,13 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "1.7.0" + speech_to_text: + dependency: "direct main" + description: + path: speech_to_text + relative: true + source: path + version: "0.0.0" stack_trace: dependency: transitive description: diff --git a/pubspec.yaml b/pubspec.yaml index c6e998f2..a061ba4a 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -52,6 +52,10 @@ dependencies: #flutter_svg: ^0.17.4 percent_indicator: "^2.1.1" + #speech to text + speech_to_text: + path: speech_to_text + dev_dependencies: flutter_test: sdk: flutter @@ -71,6 +75,7 @@ flutter: # To add assets to your application, add an assets section, like this: assets: - assets/images/ + - assets/images/dashboard/ # - images/a_dot_ham.jpeg # An image asset can refer to one or more resolution-specific "variants", see diff --git a/speech_to_text/.github/workflows/master.yml b/speech_to_text/.github/workflows/master.yml new file mode 100644 index 00000000..4d4cff1c --- /dev/null +++ b/speech_to_text/.github/workflows/master.yml @@ -0,0 +1,19 @@ +name: build + +on: + push: + branches: + - master + +jobs: + test: + name: Test on Ubuntu + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: subosito/flutter-action@v1.3.2 + with: + flutter-version: '1.17.1' + channel: 'stable' + - run: flutter pub get + - run: flutter test diff --git a/speech_to_text/.gitignore b/speech_to_text/.gitignore new file mode 100644 index 00000000..8969cbcd --- /dev/null +++ b/speech_to_text/.gitignore @@ -0,0 +1,11 @@ +.DS_Store +.dart_tool/ + +.packages +.pub/ + +build/ +coverage/ +example/.flutter-plugins-dependencies +**/ios/Flutter/flutter_export_environment.sh +android/.idea/ diff --git a/speech_to_text/.metadata b/speech_to_text/.metadata new file mode 100644 index 00000000..1940d996 --- /dev/null +++ b/speech_to_text/.metadata @@ -0,0 +1,10 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: 2d2a1ffec95cc70a3218872a2cd3f8de4933c42f + channel: stable + +project_type: plugin diff --git a/speech_to_text/CHANGELOG.md b/speech_to_text/CHANGELOG.md new file mode 100644 index 00000000..477e110c --- /dev/null +++ b/speech_to_text/CHANGELOG.md @@ -0,0 +1,166 @@ +# Changelog + +## 2.3.0 + +### New + * new parameter `onDevice` on the `listen` method enforces on device recognition for sensitive content + * onSoundLevelChange now supported on iOS + * added compile troubleshooting help to README.md + * `SpeechToTextProvider` is an alternate and simpler way to interact with the `SpeechToText` plugin. + * new `provider_example.dart` example for usage of `SpeechToTextProvider`. +### Fix + * on iOS handles some conflicts with other applications better to keep speech working after calls for example + + +## 2.2.0 + +### New + * improved error handling and logging in the iOS implementation + * added general guides for iOS to the README + * moved stress testing out of the main example + * iOS now defaults to using the speaker rather than the receiver for start /stop sounds when no headphones +### Fix + * iOS now properly deactivates the audio session when no longer listening + * start and stop sounds on iOS should be more reliable when available + +## 2.1.0 +### Breaking + * `listenFor` now calls `stop` rather than `cancel` as this seems like more useful behaviour + +### Fix + * Android no longer stops or cancels the speech recognizer if it has already been shutdown by a + timeout or other platform behaviour. + * Android no longer tries to restart the listener when it is already active + * Now properly notifies errors that happen after listening stops due to platform callback rather than + client request. See https://github.com/csdcorp/speech_to_text/issues/51 + +## 2.0.1 +### Fix + * Resolves an issue with the Android implementation not handling permission requests properly on apps + that didn't use the 1.12.x plugin APIs for registration. The permission dialog would not appear and + permission was denied. + + +## 2.0.0 + +### Breaking + + * Upgraded to New Swift 1.12 plugin structure, may work with older Flutter version but not guaranteed + +### New + + * the plugin now requests both speech and microphone permission on initialize on iOS + * added `debugLogging` parameter to the `initialize` method to control native logging + +### Fix + + * The Android implementation now blocks duplicate results notifications. It appears that at least on some + Android versions the final results notification onResults is notified twice when Android automatically + terminates the session due to a pause time. The de-duplication looks for successive notifications + with < 100 ms between them and blocks the second. If you miss any onResult notifications please post + an issue. + +## 1.1.0 + +### New + + * error_timeout has been separated into error_network_timeout and error_speech_timeout + +## 1.0.0 + +### New + * hasPermission to check for the current permission without bringing up the system dialog + * `listen` has a new optional `cancelOnError` parameter to support automatically canceling + a listening session on a permanent error. + * `listen` has a new optional `partialResults` parameter that controls whether the callback + receives partial or only final results. + +## 0.8.0 + +### New + + * speech recognizer now exposes multiple possible transcriptions for each recognized speech + * alternates list on SpeechRecognitionResult exposes alternate transcriptions of voice + * confidence on SpeechRecognitionResult gives an estimate of confidence in the transcription + * isConfident on SpeechRecognitionResult supports testing confidence + * hasConfidenceRating on SpeechRecognitionResult indicates if confidence was provided from the device + * new SpeechRecognitionWords class gives details on per transcription words and confidence + +### Fix + + * speechRecognizer availabilityDidChange was crashing if invoked due to an invalid parameter type + * Added iOS platform 10 to example Podfile to resolve compilation warnings + +## 0.7.2 + +### Breaking + + * Upgrade Swift to version 5 to match Flutter. Projects using this plugin must now switch to 5. + +## 0.7.1 + +### Fix + + * Upgrade Kotlin to 1.3.5 to match the Flutter 1.12 version + * Upgrade Gradle build to 3.5.0 to match the Flutter 1.12 version + * Android version of the plugin was repeating the system default locale in the `locales` list + +## 0.7.0 + +### New + + * locales method returns the list of available languages for speech + * new optional localeId parameter on listen method supports choosing the comprehension language separately from the current system locale. + +### Breaking + + * `cancel` and `stop` are now async + +## 0.6.3 + +### Fix + + * request permission fix on Android to ensure it doesn't conflict with other requests + +## 0.6.2 + +### Fix + + * channel invoke wasn't being done on the main thread in iOS + +## 0.6.1 + +### Fix + + * listening sound was failing due to timing, now uses play and record mode on iOS. + + ## 0.6.0 +### Breaking + + * The filenames for the optional sounds for iOS have changed. + +### New + + * Added an optional listenFor parameter to set a max duration to listen for speech and then automatically cancel. + +### Fix + + * Was failing to play sounds because of record mode. Now plays sounds before going into record mode and after coming out. + * Status listener was being ignored, now properly notifies on status changes. + +## 0.5.1 + * Fixes a problem where the recognizer left the AVAudioSession in record mode which meant that subsequent sounds couldn't be played. + +## 0.5.0 +Initial draft with limited functionality, supports: + * initializing speech recognition + * asking the user for permission if required + * listening for recognized speech + * canceling the current recognition session + * stopping the current recognition session +* Android and iOS 10+ support + +Missing: + * some error handling + * testing across multiple OS versions + * and more, to be discovered... diff --git a/speech_to_text/LICENSE b/speech_to_text/LICENSE new file mode 100644 index 00000000..7c3991c8 --- /dev/null +++ b/speech_to_text/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2019, Corner Software Development Corp. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/speech_to_text/README.md b/speech_to_text/README.md new file mode 100644 index 00000000..af726f0e --- /dev/null +++ b/speech_to_text/README.md @@ -0,0 +1,150 @@ +# speech_to_text + +[![pub package](https://img.shields.io/badge/pub-v2.3.0-blue)](https://pub.dartlang.org/packages/speech_to_text) [![build status](https://github.com/csdcorp/speech_to_text/workflows/build/badge.svg)](https://github.com/csdcorp/speech_to_text/actions?query=workflow%3Abuild) + +A library that exposes device specific speech recognition capability. + +This plugin contains a set of classes that make it easy to use the speech recognition +capabilities of the mobile device in Flutter. It supports both Android and iOS. The +target use cases for this library are commands and short phrases, not continuous spoken +conversion or always on listening. + +## Recent Updates + +The 2.3.0 version adds `SpeechToTextProvider` as a simpler way to interact with the plugin. Checkout +the new `provider_example.dart` for intended usage. + +The 2.2.0 version improves audio session handling and start / stop sound playback on iOS. + +*Note*: Feedback from any test devices is welcome. + +## Using + +To recognize text from the microphone import the package and call the plugin, like so: + +```dart +import 'package:speech_to_text/speech_to_text.dart' as stt; + + stt.SpeechToText speech = stt.SpeechToText(); + bool available = await speech.initialize( onStatus: statusListener, onError: errorListener ); + if ( available ) { + speech.listen( onResult: resultListener ); + } + else { + print("The user has denied the use of speech recognition."); + } + // some time later... + speech.stop() +``` + +### Initialize once +The `initialize` method only needs to be called once per application session. After that `listen`, +`start`, `stop`, and `cancel` can be used to interact with the plugin. Subsequent calls to `initialize` +are ignored which is safe but does mean that the `onStatus` and `onError` callbacks cannot be reset after +the first call to `initialize`. For that reason there should be only one instance of the plugin per +application. The `SpeechToTextProvider` is one way to create a single instance and easily reuse it in +multiple widgets. + +## Permissions + +Applications using this plugin require user permissions. +### iOS + +Add the following keys to your _Info.plist_ file, located in `/ios/Runner/Info.plist`: + +* `NSSpeechRecognitionUsageDescription` - describe why your app uses speech recognition. This is called _Privacy - Speech Recognition Usage Description_ in the visual editor. +* `NSMicrophoneUsageDescription` - describe why your app needs access to the microphone. This is called _Privacy - Microphone Usage Description_ in the visual editor. + +### Android + +Add the record audio permission to your _AndroidManifest.xml_ file, located in `/android/app/src/main/AndroidManifest.xml`. + +* `android.permission.RECORD_AUDIO` - this permission is required for microphone access. +* `android.permission.INTERNET` - this permission is required because speech recognition may use remote services. + +## Adding Sounds for iOS (optional) + +Android automatically plays system sounds when speech listening starts or stops but iOS does not. This plugin supports playing sounds to indicate listening status on iOS if sound files are available as assets in the application. To enable sounds in an application using this plugin add the sound files to the project and reference them in the assets section of the application `pubspec.yaml`. The location and filenames of the sound files must exactly match what +is shown below or they will not be found. The example application for the plugin shows the usage. *Note* These files should be very short as they delay +the start / end of the speech recognizer until the sound playback is complete. +```yaml + assets: + - assets/sounds/speech_to_text_listening.m4r + - assets/sounds/speech_to_text_cancel.m4r + - assets/sounds/speech_to_text_stop.m4r +``` +* `speech_to_text_listening.m4r` - played when the listen method is called. +* `speech_to_text_cancel.m4r` - played when the cancel method is called. +* `speech_to_text_stop.m4r` - played when the stop method is called. + +## Troubleshooting + +### SDK version error trying to compile for Android +``` +Manifest merger failed : uses-sdk:minSdkVersion 16 cannot be smaller than version 21 declared in library [:speech_to_text] +``` +The speech_to_text plugin requires at least Android SDK 21 because some of the speech functions in Android +were only introduced in that version. To fix this error you need to change the `build.gradle` entry to reflect +this version. Here's what the relevant part of that file looked like as of this writing: +``` + defaultConfig { + applicationId "com.example.app" + minSdkVersion 21 + targetSdkVersion 28 + versionCode flutterVersionCode.toInteger() + versionName flutterVersionName + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } +``` + +### Incorrect Swift version trying to compile for iOS +``` +/Users/markvandergon/flutter/.pub-cache/hosted/pub.dartlang.org/speech_to_text-1.1.0/ios/Classes/SwiftSpeechToTextPlugin.swift:224:44: error: value of type 'SwiftSpeechToTextPlugin' has no member 'AVAudioSession' + rememberedAudioCategory = self.AVAudioSession.Category + ~~~~ ^~~~~~~~~~~~~~ + /Users/markvandergon/flutter/.pub-cache/hosted/pub.dartlang.org/speech_to_text-1.1.0/ios/Classes/SwiftSpeechToTextPlugin.swift:227:63: error: type 'Int' has no member 'notifyOthersOnDeactivation' + try self.audioSession.setActive(true, withFlags: .notifyOthersOnDeactivation) +``` +This happens when the Swift language version is not set correctly. See this thread for help https://github.com/csdcorp/speech_to_text/issues/45. + +### Swift not supported trying to compile for iOS +``` +`speech_to_text` does not specify a Swift version and none of the targets (`Runner`) integrating it have the `SWIFT_VERSION` attribute set. +``` +This usually happens for older projects that only support Objective-C. See this thread for help https://github.com/csdcorp/speech_to_text/issues/88. + +### Not working on a particular Android device +The symptom for this issue is that the `initialize` method will always fail. If you turn on debug logging +using the `debugLogging: true` flag on the `initialize` method you'll see `'Speech recognition unavailable'` +in the Android log. There's a lengthy issue discussion here https://github.com/csdcorp/speech_to_text/issues/36 +about this. The issue seems to be that the recognizer is not always automatically enabled on the device. Two +key things helped resolve the issue in this case at least. + +#### First +1. Go to Google Play +2. Search for 'Google' +3. You should find this app: https://play.google.com/store/apps/details?id=com.google.android.googlequicksearchbox +If 'Disabled' enable it + +This is the SO post that helped: https://stackoverflow.com/questions/28769320/how-to-check-wether-speech-recognition-is-available-or-not + +#### Second +Ensure the app has the required permissions. The symptom for this that you get a permanent error notification + 'error_audio_error` when starting a listen session. Here's a Stack Overflow post that addresses that + https://stackoverflow.com/questions/46376193/android-speechrecognizer-audio-recording-error + Here's the important excerpt: + >You should go to system setting, Apps, Google app, then enable its permission of microphone. + +### iOS recognition guidelines +Apple has quite a good guide on the user experience for using speech, the original is here +https://developer.apple.com/documentation/speech/sfspeechrecognizer This is the section that I think is particularly relevant: + +>#### Create a Great User Experience for Speech Recognition +>Here are some tips to consider when adding speech recognition support to your app. + +>**Be prepared to handle failures caused by speech recognition limits.** Because speech recognition is a network-based service, limits are enforced so that the service can remain freely available to all apps. Individual devices may be limited in the number of recognitions that can be performed per day, and each app may be throttled globally based on the number of requests it makes per day. If a recognition request fails quickly (within a second or two of starting), check to see if the recognition service became unavailable. If it is, you may want to ask users to try again later. + +>**Plan for a one-minute limit on audio duration.** Speech recognition places a relatively high burden on battery life and network usage. To minimize this burden, the framework stops speech recognition tasks that last longer than one minute. This limit is similar to the one for keyboard-related dictation. +Remind the user when your app is recording. For example, display a visual indicator and play sounds at the beginning and end of speech recognition to help users understand that they're being actively recorded. You can also display speech as it is being recognized so that users understand what your app is doing and see any mistakes made during the recognition process. + +>**Do not perform speech recognition on private or sensitive information.** Some speech is not appropriate for recognition. Don't send passwords, health or financial data, and other sensitive speech for recognition. diff --git a/speech_to_text/android/.classpath b/speech_to_text/android/.classpath new file mode 100644 index 00000000..eb19361b --- /dev/null +++ b/speech_to_text/android/.classpath @@ -0,0 +1,6 @@ + + + + + + diff --git a/speech_to_text/android/.gitignore b/speech_to_text/android/.gitignore new file mode 100644 index 00000000..c6cbe562 --- /dev/null +++ b/speech_to_text/android/.gitignore @@ -0,0 +1,8 @@ +*.iml +.gradle +/local.properties +/.idea/workspace.xml +/.idea/libraries +.DS_Store +/build +/captures diff --git a/speech_to_text/android/.project b/speech_to_text/android/.project new file mode 100644 index 00000000..3050653c --- /dev/null +++ b/speech_to_text/android/.project @@ -0,0 +1,23 @@ + + + speech_to_text + Project android_____ created by Buildship. + + + + + org.eclipse.jdt.core.javabuilder + + + + + org.eclipse.buildship.core.gradleprojectbuilder + + + + + + org.eclipse.jdt.core.javanature + org.eclipse.buildship.core.gradleprojectnature + + diff --git a/speech_to_text/android/.settings/org.eclipse.buildship.core.prefs b/speech_to_text/android/.settings/org.eclipse.buildship.core.prefs new file mode 100644 index 00000000..7a23d112 --- /dev/null +++ b/speech_to_text/android/.settings/org.eclipse.buildship.core.prefs @@ -0,0 +1,13 @@ +arguments= +auto.sync=false +build.scans.enabled=false +connection.gradle.distribution=GRADLE_DISTRIBUTION(VERSION(5.6.1)) +connection.project.dir= +eclipse.preferences.version=1 +gradle.user.home= +java.home= +jvm.arguments= +offline.mode=false +override.workspace.settings=true +show.console.view=true +show.executions.view=true diff --git a/speech_to_text/android/build.gradle b/speech_to_text/android/build.gradle new file mode 100644 index 00000000..cc06ea57 --- /dev/null +++ b/speech_to_text/android/build.gradle @@ -0,0 +1,44 @@ +group 'com.csdcorp.speech_to_text' +version '1.0-SNAPSHOT' + +buildscript { + ext.kotlin_version = '1.3.50' + repositories { + google() + jcenter() + } + + dependencies { + classpath 'com.android.tools.build:gradle:3.5.0' + classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" + } +} + +rootProject.allprojects { + repositories { + google() + jcenter() + } +} + +apply plugin: 'com.android.library' +apply plugin: 'kotlin-android' + +android { + compileSdkVersion 28 + + sourceSets { + main.java.srcDirs += 'src/main/kotlin' + } + defaultConfig { + minSdkVersion 18 + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } + lintOptions { + disable 'InvalidPackage' + } +} + +dependencies { + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" +} diff --git a/speech_to_text/android/gradle/gradle.properties b/speech_to_text/android/gradle/gradle.properties new file mode 100644 index 00000000..94adc3a3 --- /dev/null +++ b/speech_to_text/android/gradle/gradle.properties @@ -0,0 +1,3 @@ +org.gradle.jvmargs=-Xmx1536M +android.useAndroidX=true +android.enableJetifier=true diff --git a/speech_to_text/android/gradle/local.properties b/speech_to_text/android/gradle/local.properties new file mode 100644 index 00000000..b85628e7 --- /dev/null +++ b/speech_to_text/android/gradle/local.properties @@ -0,0 +1,2 @@ +sdk.dir=/Users/stephen.owens/Library/Android/sdk +flutter.sdk=/Users/stephen.owens/Documents/dev/flutter/sdk/flutter \ No newline at end of file diff --git a/speech_to_text/android/gradle/settings.gradle b/speech_to_text/android/gradle/settings.gradle new file mode 100644 index 00000000..cdfc1c4b --- /dev/null +++ b/speech_to_text/android/gradle/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'speech_to_text' diff --git a/speech_to_text/android/gradle/wrapper/gradle-wrapper.properties b/speech_to_text/android/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..674bdda0 --- /dev/null +++ b/speech_to_text/android/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip diff --git a/speech_to_text/android/src/main/AndroidManifest.xml b/speech_to_text/android/src/main/AndroidManifest.xml new file mode 100644 index 00000000..61a73f32 --- /dev/null +++ b/speech_to_text/android/src/main/AndroidManifest.xml @@ -0,0 +1,3 @@ + + diff --git a/speech_to_text/android/src/main/kotlin/com/csdcorp/speech_to_text/SpeechToTextPlugin.kt b/speech_to_text/android/src/main/kotlin/com/csdcorp/speech_to_text/SpeechToTextPlugin.kt new file mode 100644 index 00000000..7954add3 --- /dev/null +++ b/speech_to_text/android/src/main/kotlin/com/csdcorp/speech_to_text/SpeechToTextPlugin.kt @@ -0,0 +1,595 @@ +package com.csdcorp.speech_to_text + +import androidx.annotation.NonNull; +import io.flutter.embedding.engine.plugins.FlutterPlugin +import android.Manifest +import android.annotation.TargetApi +import android.app.Activity +import android.content.Intent +import android.content.pm.PackageManager +import android.os.Build +import android.os.Bundle +import android.speech.RecognitionListener +import android.speech.SpeechRecognizer.createSpeechRecognizer +import android.speech.RecognizerIntent +import android.speech.SpeechRecognizer +import androidx.core.app.ActivityCompat +import androidx.core.content.ContextCompat +import io.flutter.plugin.common.MethodCall +import io.flutter.plugin.common.MethodChannel +import io.flutter.plugin.common.MethodChannel.MethodCallHandler +import io.flutter.plugin.common.MethodChannel.Result +import io.flutter.plugin.common.PluginRegistry +import io.flutter.plugin.common.PluginRegistry.Registrar +import org.json.JSONObject +import android.content.Context +import android.content.BroadcastReceiver +import android.os.Handler +import android.os.Looper +import android.util.Log +import io.flutter.embedding.engine.plugins.activity.ActivityAware +import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding +import io.flutter.plugin.common.BinaryMessenger +import org.json.JSONArray +import java.util.* + + +enum class SpeechToTextErrors { + multipleRequests, + unimplemented, + noLanguageIntent, + recognizerNotAvailable, + missingOrInvalidArg, + unknown +} + +enum class SpeechToTextCallbackMethods { + textRecognition, + notifyStatus, + notifyError, + soundLevelChange, +} + +enum class SpeechToTextStatus { + listening, + notListening, + unavailable, + available, +} + +enum class ListenMode { + deviceDefault, + dictation, + search, + confirmation, +} + +const val pluginChannelName = "plugin.csdcorp.com/speech_to_text" + +@TargetApi(8) +/** SpeechToTextPlugin */ +public class SpeechToTextPlugin : + MethodCallHandler, RecognitionListener, + PluginRegistry.RequestPermissionsResultListener, FlutterPlugin, + ActivityAware { + private var pluginContext: Context? = null + private var channel: MethodChannel? = null + private val minSdkForSpeechSupport = 21 + private val speechToTextPermissionCode = 28521 + private val missingConfidence: Double = -1.0 + private val logTag = "SpeechToTextPlugin" + private var currentActivity: Activity? = null + private var activeResult: Result? = null + private var initializedSuccessfully: Boolean = false + private var permissionToRecordAudio: Boolean = false + private var listening = false + private var debugLogging: Boolean = false + private var speechRecognizer: SpeechRecognizer? = null + private var recognizerIntent: Intent? = null + private var previousRecognizerLang: String? = null + private var previousPartialResults: Boolean = true + private var previousListenMode: ListenMode = ListenMode.deviceDefault + private var lastFinalTime: Long = 0 + private val handler: Handler = Handler(Looper.getMainLooper()) + private val defaultLanguageTag: String = Locale.getDefault().toLanguageTag() + + override fun onAttachedToEngine(@NonNull flutterPluginBinding: FlutterPlugin.FlutterPluginBinding) { + + onAttachedToEngine(flutterPluginBinding.getApplicationContext(), flutterPluginBinding.getBinaryMessenger()); + } + + // This static function is optional and equivalent to onAttachedToEngine. It supports the old + // pre-Flutter-1.12 Android projects. You are encouraged to continue supporting + // plugin registration via this function while apps migrate to use the new Android APIs + // post-flutter-1.12 via https://flutter.dev/go/android-project-migration. + // + // It is encouraged to share logic between onAttachedToEngine and registerWith to keep + // them functionally equivalent. Only one of onAttachedToEngine or registerWith will be called + // depending on the user's project. onAttachedToEngine or registerWith must both be defined + // in the same class. + companion object { + @JvmStatic + fun registerWith(registrar: Registrar) { + val speechPlugin = SpeechToTextPlugin() + speechPlugin.currentActivity = registrar.activity() + registrar.addRequestPermissionsResultListener(speechPlugin) + speechPlugin.onAttachedToEngine(registrar.context(), registrar.messenger()) + } + } + + private fun onAttachedToEngine(applicationContext: Context, messenger: BinaryMessenger) { + this.pluginContext = applicationContext; + channel = MethodChannel(messenger, pluginChannelName) + channel?.setMethodCallHandler(this) + } + + override fun onDetachedFromEngine(@NonNull binding: FlutterPlugin.FlutterPluginBinding) { + this.pluginContext = null; + channel?.setMethodCallHandler(null) + channel = null + } + + override fun onDetachedFromActivity() { + currentActivity = null + } + + override fun onReattachedToActivityForConfigChanges(binding: ActivityPluginBinding) { + currentActivity = binding.activity + binding.addRequestPermissionsResultListener(this) + } + + override fun onAttachedToActivity(binding: ActivityPluginBinding) { + currentActivity = binding.activity + binding.addRequestPermissionsResultListener(this) + } + + override fun onDetachedFromActivityForConfigChanges() { + currentActivity = null + } + + override fun onMethodCall(@NonNull call: MethodCall, @NonNull rawrResult: Result) { + val result = ChannelResultWrapper(rawrResult) + try { + when (call.method) { + "has_permission" -> hasPermission(result) + "initialize" -> { + var dlog = call.argument("debugLogging") + if (null != dlog) { + debugLogging = dlog + } + initialize(result) + } + "listen" -> { + var localeId = call.argument("localeId") + if (null == localeId) { + localeId = defaultLanguageTag + } + var partialResults = call.argument("partialResults") + if (null == partialResults) { + partialResults = true + } + val listenModeIndex = call.argument("listenMode") + if ( null == listenModeIndex ) { + result.error(SpeechToTextErrors.missingOrInvalidArg.name, + "listenMode is required", null) + return + } + startListening(result, localeId, partialResults, listenModeIndex ) + } + "stop" -> stopListening(result) + "cancel" -> cancelListening(result) + "locales" -> locales(result) + else -> result.notImplemented() + } + } catch (exc: Exception) { + Log.e(logTag, "Unexpected exception", exc) + result.error(SpeechToTextErrors.unknown.name, + "Unexpected exception", exc.localizedMessage) + } + } + + private fun hasPermission(result: Result) { + if (sdkVersionTooLow(result)) { + return + } + debugLog("Start has_permission") + val localContext = pluginContext + if (localContext != null) { + val hasPerm = ContextCompat.checkSelfPermission(localContext, + Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED + result.success(hasPerm) + } + } + + private fun initialize(result: Result) { + if (sdkVersionTooLow(result)) { + return + } + debugLog("Start initialize") + if (null != activeResult) { + result.error(SpeechToTextErrors.multipleRequests.name, + "Only one initialize at a time", null) + return + } + activeResult = result + val localContext = pluginContext + initializeIfPermitted(pluginContext) + } + + private fun sdkVersionTooLow(result: Result): Boolean { + if (Build.VERSION.SDK_INT < minSdkForSpeechSupport) { + result.success(false) + return true; + } + return false; + } + + private fun isNotInitialized(result: Result): Boolean { + if (!initializedSuccessfully || null == pluginContext) { + result.success(false) + } + return !initializedSuccessfully + } + + private fun isListening(): Boolean { + return listening + } + + private fun isNotListening(): Boolean { + return !listening + } + + private fun startListening(result: Result, languageTag: String, partialResults: Boolean, + listenModeIndex: Int) { + if (sdkVersionTooLow(result) || isNotInitialized(result) || isListening()) { + return + } + debugLog("Start listening") + var listenMode = ListenMode.deviceDefault + if ( listenModeIndex == ListenMode.dictation.ordinal) { + listenMode = ListenMode.dictation + } + setupRecognizerIntent(languageTag, partialResults, listenMode) + handler.post { + run { + speechRecognizer?.startListening(recognizerIntent) + } + } + notifyListening(isRecording = true) + result.success(true) + debugLog("Start listening done") + } + + private fun stopListening(result: Result) { + if (sdkVersionTooLow(result) || isNotInitialized(result) || isNotListening()) { + return + } + debugLog("Stop listening") + handler.post { + run { + speechRecognizer?.stopListening() + } + } + notifyListening(isRecording = false) + result.success(true) + debugLog("Stop listening done") + } + + private fun cancelListening(result: Result) { + if (sdkVersionTooLow(result) || isNotInitialized(result) || isNotListening()) { + return + } + debugLog("Cancel listening") + handler.post { + run { + speechRecognizer?.cancel() + } + } + notifyListening(isRecording = false) + result.success(true) + debugLog("Cancel listening done") + } + + private fun locales(result: Result) { + if (sdkVersionTooLow(result) || isNotInitialized(result)) { + return + } + var detailsIntent = RecognizerIntent.getVoiceDetailsIntent(pluginContext) + if (null == detailsIntent) { + detailsIntent = Intent(RecognizerIntent.ACTION_GET_LANGUAGE_DETAILS) + } + if (null == detailsIntent) { + result.error(SpeechToTextErrors.noLanguageIntent.name, + "Could not get voice details", null) + return + } + pluginContext?.sendOrderedBroadcast( + detailsIntent, null, LanguageDetailsChecker(result), + null, Activity.RESULT_OK, null, null) + } + + private fun notifyListening(isRecording: Boolean) { + debugLog("Notify listening") + listening = isRecording + val status = when (isRecording) { + true -> SpeechToTextStatus.listening.name + false -> SpeechToTextStatus.notListening.name + } + channel?.invokeMethod(SpeechToTextCallbackMethods.notifyStatus.name, status) + debugLog("Notify listening done") + } + + private fun updateResults(speechBundle: Bundle?, isFinal: Boolean) { + if (isDuplicateFinal( isFinal )) { + debugLog("Discarding duplicate final") + return + } + val userSaid = speechBundle?.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION) + if (null != userSaid && userSaid.isNotEmpty()) { + val speechResult = JSONObject() + speechResult.put("finalResult", isFinal) + val confidence = speechBundle?.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES) + val alternates = JSONArray() + for (resultIndex in 0..userSaid.size - 1) { + val speechWords = JSONObject() + speechWords.put("recognizedWords", userSaid[resultIndex]) + if (null != confidence && confidence.size >= userSaid.size) { + speechWords.put("confidence", confidence[resultIndex]) + } else { + speechWords.put("confidence", missingConfidence) + } + alternates.put(speechWords) + } + speechResult.put("alternates", alternates) + val jsonResult = speechResult.toString() + debugLog("Calling results callback") + channel?.invokeMethod(SpeechToTextCallbackMethods.textRecognition.name, + jsonResult) + } + } + + private fun isDuplicateFinal( isFinal: Boolean ) : Boolean { + if ( !isFinal ) { + return false + } + val delta = System.currentTimeMillis() - lastFinalTime + lastFinalTime = System.currentTimeMillis() + return delta >= 0 && delta < 100 + } + + private fun initializeIfPermitted(context: Context?) { + val localContext = context + if (null == localContext) { + completeInitialize() + return + } + permissionToRecordAudio = ContextCompat.checkSelfPermission(localContext, + Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED + debugLog("Checked permission") + if (!permissionToRecordAudio) { + val localActivity = currentActivity + if (null != localActivity) { + debugLog("Requesting permission") + ActivityCompat.requestPermissions(localActivity, + arrayOf(Manifest.permission.RECORD_AUDIO), speechToTextPermissionCode) + } else { + debugLog("no permission, no activity, completing") + completeInitialize() + } + } else { + debugLog("has permission, completing") + completeInitialize() + } + debugLog("leaving initializeIfPermitted") + } + + private fun completeInitialize() { + + debugLog("completeInitialize") + if (permissionToRecordAudio) { + debugLog("Testing recognition availability") + if (!SpeechRecognizer.isRecognitionAvailable(pluginContext)) { + Log.e(logTag, "Speech recognition not available on this device") + activeResult?.error(SpeechToTextErrors.recognizerNotAvailable.name, + "Speech recognition not available on this device", "") + activeResult = null + return + } + + debugLog("Creating recognizer") + speechRecognizer = createSpeechRecognizer(pluginContext).apply { + debugLog("Setting listener") + setRecognitionListener(this@SpeechToTextPlugin) + } + if (null == speechRecognizer) { + Log.e(logTag, "Speech recognizer null") + activeResult?.error( + SpeechToTextErrors.recognizerNotAvailable.name, + "Speech recognizer null", "") + activeResult = null + } + + debugLog("before setup intent") + setupRecognizerIntent(defaultLanguageTag, true, ListenMode.deviceDefault) + debugLog("after setup intent") + } + + initializedSuccessfully = permissionToRecordAudio + debugLog("sending result") + activeResult?.success(permissionToRecordAudio) + debugLog("leaving complete") + activeResult = null + } + + private fun setupRecognizerIntent(languageTag: String, partialResults: Boolean, listenMode: ListenMode) { + debugLog("setupRecognizerIntent") + if (previousRecognizerLang == null || + previousRecognizerLang != languageTag || + partialResults != previousPartialResults || previousListenMode != listenMode ) { + previousRecognizerLang = languageTag; + previousPartialResults = partialResults + previousListenMode = listenMode + handler.post { + run { + recognizerIntent = Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH).apply { + debugLog("In RecognizerIntent apply") + putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM) + debugLog("put model") + val localContext = pluginContext + if (null != localContext) { + putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, + localContext.applicationInfo.packageName) + } + debugLog("put package") + putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, partialResults) + debugLog("put partial") + if (languageTag != Locale.getDefault().toLanguageTag()) { + putExtra(RecognizerIntent.EXTRA_LANGUAGE, languageTag); + debugLog("put languageTag") + } + } + } + } + } + } + + override fun onRequestPermissionsResult(requestCode: Int, permissions: Array?, + grantResults: IntArray?): Boolean { + when (requestCode) { + speechToTextPermissionCode -> { + if (null != grantResults) { + permissionToRecordAudio = grantResults.isNotEmpty() && + grantResults.get(0) == PackageManager.PERMISSION_GRANTED + } + completeInitialize() + return true + } + } + return false + } + + + override fun onPartialResults(results: Bundle?) = updateResults(results, false) + override fun onResults(results: Bundle?) = updateResults(results, true) + override fun onEndOfSpeech() = notifyListening(isRecording = false) + + override fun onError(errorCode: Int) { + val errorMsg = when (errorCode) { + SpeechRecognizer.ERROR_AUDIO -> "error_audio_error" + SpeechRecognizer.ERROR_CLIENT -> "error_client" + SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS -> "error_permission" + SpeechRecognizer.ERROR_NETWORK -> "error_network" + SpeechRecognizer.ERROR_NETWORK_TIMEOUT -> "error_network_timeout" + SpeechRecognizer.ERROR_NO_MATCH -> "error_no_match" + SpeechRecognizer.ERROR_RECOGNIZER_BUSY -> "error_busy" + SpeechRecognizer.ERROR_SERVER -> "error_server" + SpeechRecognizer.ERROR_SPEECH_TIMEOUT -> "error_speech_timeout" + else -> "error_unknown" + } + sendError(errorMsg) + } + + private fun debugLog( msg: String ) { + if ( debugLogging ) { + Log.d( logTag, msg ) + } + } + + private fun sendError(errorMsg: String) { + val speechError = JSONObject() + speechError.put("errorMsg", errorMsg) + speechError.put("permanent", true) + handler.post { + run { + channel?.invokeMethod(SpeechToTextCallbackMethods.notifyError.name, speechError.toString()) + } + } + } + + override fun onRmsChanged(rmsdB: Float) { + handler.post { + run { + channel?.invokeMethod(SpeechToTextCallbackMethods.soundLevelChange.name, rmsdB) + } + } + } + + override fun onReadyForSpeech(p0: Bundle?) {} + override fun onBufferReceived(p0: ByteArray?) {} + override fun onEvent(p0: Int, p1: Bundle?) {} + override fun onBeginningOfSpeech() {} +} + +// See https://stackoverflow.com/questions/10538791/how-to-set-the-language-in-speech-recognition-on-android/10548680#10548680 +class LanguageDetailsChecker(flutterResult: Result) : BroadcastReceiver() { + private val result: Result = flutterResult + private var supportedLanguages: List? = null + + private var languagePreference: String? = null + + override fun onReceive(context: Context, intent: Intent) { + val results = getResultExtras(true) + if (results.containsKey(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE)) { + languagePreference = results.getString(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE) + } + if (results.containsKey(RecognizerIntent.EXTRA_SUPPORTED_LANGUAGES)) { + supportedLanguages = results.getStringArrayList( + RecognizerIntent.EXTRA_SUPPORTED_LANGUAGES) + createResponse(supportedLanguages) + } + } + + private fun createResponse(supportedLanguages: List?) { + val currentLocale = Locale.getDefault() + val localeNames = ArrayList() + localeNames.add(buildIdNameForLocale(currentLocale)) + if (null != supportedLanguages) { + for (lang in supportedLanguages) { + if (currentLocale.toLanguageTag() == lang) { + continue + } + val locale = Locale.forLanguageTag(lang) + localeNames.add(buildIdNameForLocale(locale)) + } + } + result.success(localeNames) + + } + + private fun buildIdNameForLocale(locale: Locale): String { + val name = locale.displayName.replace(':', ' ') + return "${locale.language}_${locale.country}:$name" + } +} + +private class ChannelResultWrapper(result: Result) : Result { + // Caller handler + val handler: Handler = Handler(Looper.getMainLooper()) + val result: Result = result + + // make sure to respond in the caller thread + override fun success(results: Any?) { + + handler.post { + run { + result.success(results); + } + } + } + + override fun error(errorCode: String?, errorMessage: String?, data: Any?) { + handler.post { + run { + result.error(errorCode, errorMessage, data); + } + } + } + + override fun notImplemented() { + handler.post { + run { + result.notImplemented(); + } + } + } +} diff --git a/speech_to_text/example/.gitignore b/speech_to_text/example/.gitignore new file mode 100644 index 00000000..2ddde2a5 --- /dev/null +++ b/speech_to_text/example/.gitignore @@ -0,0 +1,73 @@ +# Miscellaneous +*.class +*.log +*.pyc +*.swp +.DS_Store +.atom/ +.buildlog/ +.history +.svn/ + +# IntelliJ related +*.iml +*.ipr +*.iws +.idea/ + +# The .vscode folder contains launch configuration and tasks you configure in +# VS Code which you may wish to be included in version control, so this line +# is commented out by default. +#.vscode/ + +# Flutter/Dart/Pub related +**/doc/api/ +.dart_tool/ +.flutter-plugins +.packages +.pub-cache/ +.pub/ +/build/ + +# Android related +**/android/**/gradle-wrapper.jar +**/android/.gradle +**/android/captures/ +**/android/gradlew +**/android/gradlew.bat +**/android/local.properties +**/android/**/GeneratedPluginRegistrant.java + +# iOS/XCode related +**/ios/**/*.mode1v3 +**/ios/**/*.mode2v3 +**/ios/**/*.moved-aside +**/ios/**/*.pbxuser +**/ios/**/*.perspectivev3 +**/ios/**/*sync/ +**/ios/**/.sconsign.dblite +**/ios/**/.tags* +**/ios/**/.vagrant/ +**/ios/**/DerivedData/ +**/ios/**/Icon? +**/ios/**/Pods/ +**/ios/**/.symlinks/ +**/ios/**/profile +**/ios/**/xcuserdata +**/ios/.generated/ +**/ios/Flutter/App.framework +**/ios/Flutter/Flutter.framework +**/ios/Flutter/Generated.xcconfig +**/ios/Flutter/app.flx +**/ios/Flutter/app.zip +**/ios/Flutter/flutter_assets/ +**/ios/Flutter/flutter_export_environment.sh +**/ios/ServiceDefinitions.json +**/ios/Runner/GeneratedPluginRegistrant.* + +# Exceptions to above rules. +!**/ios/**/default.mode1v3 +!**/ios/**/default.mode2v3 +!**/ios/**/default.pbxuser +!**/ios/**/default.perspectivev3 +!/packages/flutter_tools/test/data/dart_dependencies_test/**/.packages diff --git a/speech_to_text/example/.metadata b/speech_to_text/example/.metadata new file mode 100644 index 00000000..aeb01ee2 --- /dev/null +++ b/speech_to_text/example/.metadata @@ -0,0 +1,10 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: 2d2a1ffec95cc70a3218872a2cd3f8de4933c42f + channel: stable + +project_type: app diff --git a/speech_to_text/example/README.md b/speech_to_text/example/README.md new file mode 100644 index 00000000..92252821 --- /dev/null +++ b/speech_to_text/example/README.md @@ -0,0 +1,155 @@ +# speech_to_text_example + +Demonstrates how to use the speech_to_text plugin. This example requires +that the plugin has been installed. It initializes speech recognition, +listens for words and prints them. + + +## Source + +```dart +import 'package:flutter/material.dart'; +import 'dart:async'; + +import 'package:speech_to_text/speech_to_text.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; + +void main() => runApp(MyApp()); + +class MyApp extends StatefulWidget { + @override + _MyAppState createState() => _MyAppState(); +} + +class _MyAppState extends State { + bool _hasSpeech = false; + String lastWords = ""; + String lastError = ""; + String lastStatus = ""; + final SpeechToText speech = SpeechToText(); + + @override + void initState() { + super.initState(); + initSpeechState(); + } + + Future initSpeechState() async { + bool hasSpeech = await speech.initialize(onError: errorListener, onStatus: statusListener ); + + if (!mounted) return; + setState(() { + _hasSpeech = hasSpeech; + }); + } + + @override + Widget build(BuildContext context) { + return MaterialApp( + home: Scaffold( + appBar: AppBar( + title: const Text('Speech to Text Example'), + ), + body: _hasSpeech + ? Column(children: [ + Expanded( + child: Center( + child: Text('Speech recognition available'), + ), + ), + Expanded( + child: Row( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + FlatButton( + child: Text('Start'), + onPressed: startListening, + ), + FlatButton( + child: Text('Stop'), + onPressed: stopListening, + ), + FlatButton( + child: Text('Cancel'), + onPressed:cancelListening, + ), + ], + ), + ), + Expanded( + child: Column( + children: [ + Center( + child: Text('Recognized Words'), + ), + Center( + child: Text(lastWords), + ), + ], + ), + ), + Expanded( + child: Column( + children: [ + Center( + child: Text('Error'), + ), + Center( + child: Text(lastError), + ), + ], + ), + ), + Expanded( + child: Center( + child: speech.isListening ? Text("I'm listening...") : Text( 'Not listening' ), + ), + ), + ]) + : Center( child: Text('Speech recognition unavailable', style: TextStyle(fontSize: 20.0, fontWeight: FontWeight.bold))), + ), + ); + } + + void startListening() { + lastWords = ""; + lastError = ""; + speech.listen(onResult: resultListener ); + setState(() { + + }); + } + + void stopListening() { + speech.stop( ); + setState(() { + + }); + } + + void cancelListening() { + speech.cancel( ); + setState(() { + + }); + } + + void resultListener(SpeechRecognitionResult result) { + setState(() { + lastWords = "${result.recognizedWords} - ${result.finalResult}"; + }); + } + + void errorListener(SpeechRecognitionError error ) { + setState(() { + lastError = "${error.errorMsg} - ${error.permanent}"; + }); + } + void statusListener(String status ) { + setState(() { + lastStatus = "$status"; + }); + } +} +``` \ No newline at end of file diff --git a/speech_to_text/example/android/.project b/speech_to_text/example/android/.project new file mode 100644 index 00000000..d7d48141 --- /dev/null +++ b/speech_to_text/example/android/.project @@ -0,0 +1,17 @@ + + + android___ + Project android___ created by Buildship. + + + + + org.eclipse.buildship.core.gradleprojectbuilder + + + + + + org.eclipse.buildship.core.gradleprojectnature + + diff --git a/speech_to_text/example/android/.settings/org.eclipse.buildship.core.prefs b/speech_to_text/example/android/.settings/org.eclipse.buildship.core.prefs new file mode 100644 index 00000000..e8895216 --- /dev/null +++ b/speech_to_text/example/android/.settings/org.eclipse.buildship.core.prefs @@ -0,0 +1,2 @@ +connection.project.dir= +eclipse.preferences.version=1 diff --git a/speech_to_text/example/android/app/build.gradle b/speech_to_text/example/android/app/build.gradle new file mode 100644 index 00000000..104069d3 --- /dev/null +++ b/speech_to_text/example/android/app/build.gradle @@ -0,0 +1,67 @@ +def localProperties = new Properties() +def localPropertiesFile = rootProject.file('local.properties') +if (localPropertiesFile.exists()) { + localPropertiesFile.withReader('UTF-8') { reader -> + localProperties.load(reader) + } +} + +def flutterRoot = localProperties.getProperty('flutter.sdk') +if (flutterRoot == null) { + throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.") +} + +def flutterVersionCode = localProperties.getProperty('flutter.versionCode') +if (flutterVersionCode == null) { + flutterVersionCode = '1' +} + +def flutterVersionName = localProperties.getProperty('flutter.versionName') +if (flutterVersionName == null) { + flutterVersionName = '1.0' +} + +apply plugin: 'com.android.application' +apply plugin: 'kotlin-android' +apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle" + +android { + compileSdkVersion 28 + + sourceSets { + main.java.srcDirs += 'src/main/kotlin' + } + + lintOptions { + disable 'InvalidPackage' + } + + defaultConfig { + // TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html). + applicationId "com.csdcorp.speech_to_text_example" + minSdkVersion 18 + targetSdkVersion 28 + versionCode flutterVersionCode.toInteger() + versionName flutterVersionName + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } + + buildTypes { + release { + // TODO: Add your own signing config for the release build. + // Signing with the debug keys for now, so `flutter run --release` works. + signingConfig signingConfigs.debug + } + } +} + +flutter { + source '../..' +} + +dependencies { + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" + testImplementation 'junit:junit:4.12' + androidTestImplementation 'androidx.test:runner:1.1.1' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1' +} diff --git a/speech_to_text/example/android/app/src/debug/AndroidManifest.xml b/speech_to_text/example/android/app/src/debug/AndroidManifest.xml new file mode 100644 index 00000000..36edf838 --- /dev/null +++ b/speech_to_text/example/android/app/src/debug/AndroidManifest.xml @@ -0,0 +1,7 @@ + + + + diff --git a/speech_to_text/example/android/app/src/main/AndroidManifest.xml b/speech_to_text/example/android/app/src/main/AndroidManifest.xml new file mode 100644 index 00000000..b0912061 --- /dev/null +++ b/speech_to_text/example/android/app/src/main/AndroidManifest.xml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + diff --git a/speech_to_text/example/android/app/src/main/kotlin/com/csdcorp/speech_to_text_example/MainActivity.kt b/speech_to_text/example/android/app/src/main/kotlin/com/csdcorp/speech_to_text_example/MainActivity.kt new file mode 100644 index 00000000..f44e470e --- /dev/null +++ b/speech_to_text/example/android/app/src/main/kotlin/com/csdcorp/speech_to_text_example/MainActivity.kt @@ -0,0 +1,12 @@ +package com.csdcorp.speech_to_text_example + +import androidx.annotation.NonNull; +import io.flutter.embedding.android.FlutterActivity +import io.flutter.embedding.engine.FlutterEngine +import io.flutter.plugins.GeneratedPluginRegistrant + +class MainActivity: FlutterActivity() { + override fun configureFlutterEngine(@NonNull flutterEngine: FlutterEngine) { + GeneratedPluginRegistrant.registerWith(flutterEngine); + } +} diff --git a/speech_to_text/example/android/app/src/main/res/drawable/launch_background.xml b/speech_to_text/example/android/app/src/main/res/drawable/launch_background.xml new file mode 100644 index 00000000..304732f8 --- /dev/null +++ b/speech_to_text/example/android/app/src/main/res/drawable/launch_background.xml @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/speech_to_text/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/speech_to_text/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png new file mode 100644 index 00000000..db77bb4b Binary files /dev/null and b/speech_to_text/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png differ diff --git a/speech_to_text/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/speech_to_text/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png new file mode 100644 index 00000000..17987b79 Binary files /dev/null and b/speech_to_text/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png differ diff --git a/speech_to_text/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/speech_to_text/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png new file mode 100644 index 00000000..09d43914 Binary files /dev/null and b/speech_to_text/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ diff --git a/speech_to_text/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/speech_to_text/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png new file mode 100644 index 00000000..d5f1c8d3 Binary files /dev/null and b/speech_to_text/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ diff --git a/speech_to_text/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/speech_to_text/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png new file mode 100644 index 00000000..4d6372ee Binary files /dev/null and b/speech_to_text/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ diff --git a/speech_to_text/example/android/app/src/main/res/values/styles.xml b/speech_to_text/example/android/app/src/main/res/values/styles.xml new file mode 100644 index 00000000..00fa4417 --- /dev/null +++ b/speech_to_text/example/android/app/src/main/res/values/styles.xml @@ -0,0 +1,8 @@ + + + + diff --git a/speech_to_text/example/android/app/src/profile/AndroidManifest.xml b/speech_to_text/example/android/app/src/profile/AndroidManifest.xml new file mode 100644 index 00000000..36edf838 --- /dev/null +++ b/speech_to_text/example/android/app/src/profile/AndroidManifest.xml @@ -0,0 +1,7 @@ + + + + diff --git a/speech_to_text/example/android/build.gradle b/speech_to_text/example/android/build.gradle new file mode 100644 index 00000000..13546311 --- /dev/null +++ b/speech_to_text/example/android/build.gradle @@ -0,0 +1,31 @@ +buildscript { + ext.kotlin_version = '1.3.50' + repositories { + google() + jcenter() + } + + dependencies { + classpath 'com.android.tools.build:gradle:3.6.1' + classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" + } +} + +allprojects { + repositories { + google() + jcenter() + } +} + +rootProject.buildDir = '../build' +subprojects { + project.buildDir = "${rootProject.buildDir}/${project.name}" +} +subprojects { + project.evaluationDependsOn(':app') +} + +task clean(type: Delete) { + delete rootProject.buildDir +} diff --git a/speech_to_text/example/android/gradle.properties b/speech_to_text/example/android/gradle.properties new file mode 100644 index 00000000..a6738207 --- /dev/null +++ b/speech_to_text/example/android/gradle.properties @@ -0,0 +1,4 @@ +org.gradle.jvmargs=-Xmx1536M +android.useAndroidX=true +android.enableJetifier=true +android.enableR8=true diff --git a/speech_to_text/example/android/gradle/wrapper/gradle-wrapper.properties b/speech_to_text/example/android/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..052e7951 --- /dev/null +++ b/speech_to_text/example/android/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Mon Mar 16 08:57:32 EDT 2020 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip diff --git a/speech_to_text/example/android/settings.gradle b/speech_to_text/example/android/settings.gradle new file mode 100644 index 00000000..5a2f14fb --- /dev/null +++ b/speech_to_text/example/android/settings.gradle @@ -0,0 +1,15 @@ +include ':app' + +def flutterProjectRoot = rootProject.projectDir.parentFile.toPath() + +def plugins = new Properties() +def pluginsFile = new File(flutterProjectRoot.toFile(), '.flutter-plugins') +if (pluginsFile.exists()) { + pluginsFile.withReader('UTF-8') { reader -> plugins.load(reader) } +} + +plugins.each { name, path -> + def pluginDirectory = flutterProjectRoot.resolve(path).resolve('android').toFile() + include ":$name" + project(":$name").projectDir = pluginDirectory +} diff --git a/speech_to_text/example/assets/sounds/speech_to_text_cancel.m4r b/speech_to_text/example/assets/sounds/speech_to_text_cancel.m4r new file mode 100644 index 00000000..ccb3afe3 Binary files /dev/null and b/speech_to_text/example/assets/sounds/speech_to_text_cancel.m4r differ diff --git a/speech_to_text/example/assets/sounds/speech_to_text_listening.m4r b/speech_to_text/example/assets/sounds/speech_to_text_listening.m4r new file mode 100644 index 00000000..3131d60f Binary files /dev/null and b/speech_to_text/example/assets/sounds/speech_to_text_listening.m4r differ diff --git a/speech_to_text/example/assets/sounds/speech_to_text_stop.m4r b/speech_to_text/example/assets/sounds/speech_to_text_stop.m4r new file mode 100644 index 00000000..8817f01b Binary files /dev/null and b/speech_to_text/example/assets/sounds/speech_to_text_stop.m4r differ diff --git a/speech_to_text/example/ios/Flutter/AppFrameworkInfo.plist b/speech_to_text/example/ios/Flutter/AppFrameworkInfo.plist new file mode 100644 index 00000000..6b4c0f78 --- /dev/null +++ b/speech_to_text/example/ios/Flutter/AppFrameworkInfo.plist @@ -0,0 +1,26 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + App + CFBundleIdentifier + io.flutter.flutter.app + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + App + CFBundlePackageType + FMWK + CFBundleShortVersionString + 1.0 + CFBundleSignature + ???? + CFBundleVersion + 1.0 + MinimumOSVersion + 8.0 + + diff --git a/speech_to_text/example/ios/Flutter/Debug.xcconfig b/speech_to_text/example/ios/Flutter/Debug.xcconfig new file mode 100644 index 00000000..e8efba11 --- /dev/null +++ b/speech_to_text/example/ios/Flutter/Debug.xcconfig @@ -0,0 +1,2 @@ +#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig" +#include "Generated.xcconfig" diff --git a/speech_to_text/example/ios/Flutter/Flutter.podspec b/speech_to_text/example/ios/Flutter/Flutter.podspec new file mode 100644 index 00000000..5ca30416 --- /dev/null +++ b/speech_to_text/example/ios/Flutter/Flutter.podspec @@ -0,0 +1,18 @@ +# +# NOTE: This podspec is NOT to be published. It is only used as a local source! +# + +Pod::Spec.new do |s| + s.name = 'Flutter' + s.version = '1.0.0' + s.summary = 'High-performance, high-fidelity mobile apps.' + s.description = <<-DESC +Flutter provides an easy and productive way to build and deploy high-performance mobile apps for Android and iOS. + DESC + s.homepage = 'https://flutter.io' + s.license = { :type => 'MIT' } + s.author = { 'Flutter Dev Team' => 'flutter-dev@googlegroups.com' } + s.source = { :git => 'https://github.com/flutter/engine', :tag => s.version.to_s } + s.ios.deployment_target = '8.0' + s.vendored_frameworks = 'Flutter.framework' +end diff --git a/speech_to_text/example/ios/Flutter/Release.xcconfig b/speech_to_text/example/ios/Flutter/Release.xcconfig new file mode 100644 index 00000000..399e9340 --- /dev/null +++ b/speech_to_text/example/ios/Flutter/Release.xcconfig @@ -0,0 +1,2 @@ +#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig" +#include "Generated.xcconfig" diff --git a/speech_to_text/example/ios/Podfile b/speech_to_text/example/ios/Podfile new file mode 100644 index 00000000..ab7d5b46 --- /dev/null +++ b/speech_to_text/example/ios/Podfile @@ -0,0 +1,90 @@ +# Uncomment this line to define a global platform for your project +platform :ios, '10.0' + +# CocoaPods analytics sends network stats synchronously affecting flutter build latency. +ENV['COCOAPODS_DISABLE_STATS'] = 'true' + +project 'Runner', { + 'Debug' => :debug, + 'Profile' => :release, + 'Release' => :release, +} + +def parse_KV_file(file, separator='=') + file_abs_path = File.expand_path(file) + if !File.exists? file_abs_path + return []; + end + generated_key_values = {} + skip_line_start_symbols = ["#", "/"] + File.foreach(file_abs_path) do |line| + next if skip_line_start_symbols.any? { |symbol| line =~ /^\s*#{symbol}/ } + plugin = line.split(pattern=separator) + if plugin.length == 2 + podname = plugin[0].strip() + path = plugin[1].strip() + podpath = File.expand_path("#{path}", file_abs_path) + generated_key_values[podname] = podpath + else + puts "Invalid plugin specification: #{line}" + end + end + generated_key_values +end + +target 'Runner' do + use_frameworks! + use_modular_headers! + + # Flutter Pod + + copied_flutter_dir = File.join(__dir__, 'Flutter') + copied_framework_path = File.join(copied_flutter_dir, 'Flutter.framework') + copied_podspec_path = File.join(copied_flutter_dir, 'Flutter.podspec') + unless File.exist?(copied_framework_path) && File.exist?(copied_podspec_path) + # Copy Flutter.framework and Flutter.podspec to Flutter/ to have something to link against if the xcode backend script has not run yet. + # That script will copy the correct debug/profile/release version of the framework based on the currently selected Xcode configuration. + # CocoaPods will not embed the framework on pod install (before any build phases can generate) if the dylib does not exist. + + generated_xcode_build_settings_path = File.join(copied_flutter_dir, 'Generated.xcconfig') + unless File.exist?(generated_xcode_build_settings_path) + raise "Generated.xcconfig must exist. If you're running pod install manually, make sure flutter pub get is executed first" + end + generated_xcode_build_settings = parse_KV_file(generated_xcode_build_settings_path) + cached_framework_dir = generated_xcode_build_settings['FLUTTER_FRAMEWORK_DIR']; + + unless File.exist?(copied_framework_path) + FileUtils.cp_r(File.join(cached_framework_dir, 'Flutter.framework'), copied_flutter_dir) + end + unless File.exist?(copied_podspec_path) + FileUtils.cp(File.join(cached_framework_dir, 'Flutter.podspec'), copied_flutter_dir) + end + end + + # Keep pod path relative so it can be checked into Podfile.lock. + pod 'Flutter', :path => 'Flutter' + + # Plugin Pods + + # Prepare symlinks folder. We use symlinks to avoid having Podfile.lock + # referring to absolute paths on developers' machines. + system('rm -rf .symlinks') + system('mkdir -p .symlinks/plugins') + plugin_pods = parse_KV_file('../.flutter-plugins') + plugin_pods.each do |name, path| + symlink = File.join('.symlinks', 'plugins', name) + File.symlink(path, symlink) + pod name, :path => File.join(symlink, 'ios') + end +end + +# Prevent Cocoapods from embedding a second Flutter framework and causing an error with the new Xcode build system. +install! 'cocoapods', :disable_input_output_paths => true + +post_install do |installer| + installer.pods_project.targets.each do |target| + target.build_configurations.each do |config| + config.build_settings['ENABLE_BITCODE'] = 'NO' + end + end +end diff --git a/speech_to_text/example/ios/Podfile.lock b/speech_to_text/example/ios/Podfile.lock new file mode 100644 index 00000000..60c8b57d --- /dev/null +++ b/speech_to_text/example/ios/Podfile.lock @@ -0,0 +1,29 @@ +PODS: + - Flutter (1.0.0) + - speech_to_text (0.0.1): + - Flutter + - Try + - Try (2.1.1) + +DEPENDENCIES: + - Flutter (from `Flutter`) + - speech_to_text (from `.symlinks/plugins/speech_to_text/ios`) + +SPEC REPOS: + trunk: + - Try + +EXTERNAL SOURCES: + Flutter: + :path: Flutter + speech_to_text: + :path: ".symlinks/plugins/speech_to_text/ios" + +SPEC CHECKSUMS: + Flutter: 0e3d915762c693b495b44d77113d4970485de6ec + speech_to_text: b43a7d99aef037bd758ed8e45d79bbac035d2dfe + Try: 5ef669ae832617b3cee58cb2c6f99fb767a4ff96 + +PODFILE CHECKSUM: 0ba44ad07df4ab62269dc769727cf0f12b1e453d + +COCOAPODS: 1.9.3 diff --git a/speech_to_text/example/ios/Runner.xcodeproj/project.pbxproj b/speech_to_text/example/ios/Runner.xcodeproj/project.pbxproj new file mode 100644 index 00000000..c40af650 --- /dev/null +++ b/speech_to_text/example/ios/Runner.xcodeproj/project.pbxproj @@ -0,0 +1,578 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 46; + objects = { + +/* Begin PBXBuildFile section */ + 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; }; + 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; + 74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74858FAE1ED2DC5600515810 /* AppDelegate.swift */; }; + 9740EEB41CF90195004384FC /* Debug.xcconfig in Resources */ = {isa = PBXBuildFile; fileRef = 9740EEB21CF90195004384FC /* Debug.xcconfig */; }; + 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; + 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; }; + 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; + C446300A034BF27D9F1ACEF9 /* Pods_Runner.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = E76E9615C6B4FABD88067D55 /* Pods_Runner.framework */; }; +/* End PBXBuildFile section */ + +/* Begin PBXCopyFilesBuildPhase section */ + 9705A1C41CF9048500538489 /* Embed Frameworks */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 2147483647; + dstPath = ""; + dstSubfolderSpec = 10; + files = ( + ); + name = "Embed Frameworks"; + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXCopyFilesBuildPhase section */ + +/* Begin PBXFileReference section */ + 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = ""; }; + 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; }; + 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; + 59AFE6BB0B596A0E0811BDFF /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; + 6280E2A777726D2043BF80B7 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; + 74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Runner-Bridging-Header.h"; sourceTree = ""; }; + 74858FAE1ED2DC5600515810 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; + 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; + 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; + 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = ""; }; + 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; + 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; + 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + C3909A4B7EC98A20255210E3 /* Pods-Runner.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.profile.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.profile.xcconfig"; sourceTree = ""; }; + E76E9615C6B4FABD88067D55 /* Pods_Runner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Runner.framework; sourceTree = BUILT_PRODUCTS_DIR; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 97C146EB1CF9000F007C117D /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + C446300A034BF27D9F1ACEF9 /* Pods_Runner.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 7937AF765430D66F28F7FEEF /* Frameworks */ = { + isa = PBXGroup; + children = ( + E76E9615C6B4FABD88067D55 /* Pods_Runner.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; + 9740EEB11CF90186004384FC /* Flutter */ = { + isa = PBXGroup; + children = ( + 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */, + 9740EEB21CF90195004384FC /* Debug.xcconfig */, + 7AFA3C8E1D35360C0083082E /* Release.xcconfig */, + 9740EEB31CF90195004384FC /* Generated.xcconfig */, + ); + name = Flutter; + sourceTree = ""; + }; + 97C146E51CF9000F007C117D = { + isa = PBXGroup; + children = ( + 9740EEB11CF90186004384FC /* Flutter */, + 97C146F01CF9000F007C117D /* Runner */, + 97C146EF1CF9000F007C117D /* Products */, + A68CCF1640763A551D35BD31 /* Pods */, + 7937AF765430D66F28F7FEEF /* Frameworks */, + ); + sourceTree = ""; + }; + 97C146EF1CF9000F007C117D /* Products */ = { + isa = PBXGroup; + children = ( + 97C146EE1CF9000F007C117D /* Runner.app */, + ); + name = Products; + sourceTree = ""; + }; + 97C146F01CF9000F007C117D /* Runner */ = { + isa = PBXGroup; + children = ( + 97C146FA1CF9000F007C117D /* Main.storyboard */, + 97C146FD1CF9000F007C117D /* Assets.xcassets */, + 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */, + 97C147021CF9000F007C117D /* Info.plist */, + 97C146F11CF9000F007C117D /* Supporting Files */, + 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */, + 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */, + 74858FAE1ED2DC5600515810 /* AppDelegate.swift */, + 74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */, + ); + path = Runner; + sourceTree = ""; + }; + 97C146F11CF9000F007C117D /* Supporting Files */ = { + isa = PBXGroup; + children = ( + ); + name = "Supporting Files"; + sourceTree = ""; + }; + A68CCF1640763A551D35BD31 /* Pods */ = { + isa = PBXGroup; + children = ( + 59AFE6BB0B596A0E0811BDFF /* Pods-Runner.debug.xcconfig */, + 6280E2A777726D2043BF80B7 /* Pods-Runner.release.xcconfig */, + C3909A4B7EC98A20255210E3 /* Pods-Runner.profile.xcconfig */, + ); + path = Pods; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 97C146ED1CF9000F007C117D /* Runner */ = { + isa = PBXNativeTarget; + buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */; + buildPhases = ( + 949FCB95217187F2C022D6A9 /* [CP] Check Pods Manifest.lock */, + 9740EEB61CF901F6004384FC /* Run Script */, + 97C146EA1CF9000F007C117D /* Sources */, + 97C146EB1CF9000F007C117D /* Frameworks */, + 97C146EC1CF9000F007C117D /* Resources */, + 9705A1C41CF9048500538489 /* Embed Frameworks */, + 3B06AD1E1E4923F5004D2608 /* Thin Binary */, + 8B0988F04B6AE44AA0304FEF /* [CP] Embed Pods Frameworks */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = Runner; + productName = Runner; + productReference = 97C146EE1CF9000F007C117D /* Runner.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 97C146E61CF9000F007C117D /* Project object */ = { + isa = PBXProject; + attributes = { + LastUpgradeCheck = 1020; + ORGANIZATIONNAME = "The Chromium Authors"; + TargetAttributes = { + 97C146ED1CF9000F007C117D = { + CreatedOnToolsVersion = 7.3.1; + DevelopmentTeam = 3X949YE9K2; + LastSwiftMigration = 0910; + }; + }; + }; + buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */; + compatibilityVersion = "Xcode 3.2"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 97C146E51CF9000F007C117D; + productRefGroup = 97C146EF1CF9000F007C117D /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 97C146ED1CF9000F007C117D /* Runner */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 97C146EC1CF9000F007C117D /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */, + 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */, + 9740EEB41CF90195004384FC /* Debug.xcconfig in Resources */, + 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */, + 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXShellScriptBuildPhase section */ + 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputPaths = ( + ); + name = "Thin Binary"; + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; + }; + 8B0988F04B6AE44AA0304FEF /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputPaths = ( + ); + name = "[CP] Embed Pods Frameworks"; + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; + 949FCB95217187F2C022D6A9 /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; + 9740EEB61CF901F6004384FC /* Run Script */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputPaths = ( + ); + name = "Run Script"; + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; + }; +/* End PBXShellScriptBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 97C146EA1CF9000F007C117D /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */, + 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXVariantGroup section */ + 97C146FA1CF9000F007C117D /* Main.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 97C146FB1CF9000F007C117D /* Base */, + ); + name = Main.storyboard; + sourceTree = ""; + }; + 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 97C147001CF9000F007C117D /* Base */, + ); + name = LaunchScreen.storyboard; + sourceTree = ""; + }; +/* End PBXVariantGroup section */ + +/* Begin XCBuildConfiguration section */ + 249021D3217E4FDB00AE95B9 /* Profile */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 8.0; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = iphoneos; + TARGETED_DEVICE_FAMILY = "1,2"; + VALIDATE_PRODUCT = YES; + }; + name = Profile; + }; + 249021D4217E4FDB00AE95B9 /* Profile */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = 3X949YE9K2; + ENABLE_BITCODE = NO; + FRAMEWORK_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/Flutter", + ); + INFOPLIST_FILE = Runner/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LIBRARY_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/Flutter", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.csdcorp.speechToTextExample; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; + SWIFT_VERSION = 5.0; + VERSIONING_SYSTEM = "apple-generic"; + }; + name = Profile; + }; + 97C147031CF9000F007C117D /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 8.0; + MTL_ENABLE_DEBUG_INFO = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 97C147041CF9000F007C117D /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 8.0; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = iphoneos; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + TARGETED_DEVICE_FAMILY = "1,2"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 97C147061CF9000F007C117D /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = 3X949YE9K2; + ENABLE_BITCODE = NO; + FRAMEWORK_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/Flutter", + ); + INFOPLIST_FILE = Runner/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LIBRARY_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/Flutter", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.csdcorp.speechToTextExample; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 5.0; + VERSIONING_SYSTEM = "apple-generic"; + }; + name = Debug; + }; + 97C147071CF9000F007C117D /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = 3X949YE9K2; + ENABLE_BITCODE = NO; + FRAMEWORK_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/Flutter", + ); + INFOPLIST_FILE = Runner/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LIBRARY_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/Flutter", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.csdcorp.speechToTextExample; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; + SWIFT_VERSION = 5.0; + VERSIONING_SYSTEM = "apple-generic"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 97C147031CF9000F007C117D /* Debug */, + 97C147041CF9000F007C117D /* Release */, + 249021D3217E4FDB00AE95B9 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 97C147061CF9000F007C117D /* Debug */, + 97C147071CF9000F007C117D /* Release */, + 249021D4217E4FDB00AE95B9 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 97C146E61CF9000F007C117D /* Project object */; +} diff --git a/speech_to_text/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/speech_to_text/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata new file mode 100644 index 00000000..1d526a16 --- /dev/null +++ b/speech_to_text/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/speech_to_text/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/speech_to_text/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme new file mode 100644 index 00000000..a28140cf --- /dev/null +++ b/speech_to_text/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -0,0 +1,91 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/speech_to_text/example/ios/Runner.xcworkspace/contents.xcworkspacedata b/speech_to_text/example/ios/Runner.xcworkspace/contents.xcworkspacedata new file mode 100644 index 00000000..21a3cc14 --- /dev/null +++ b/speech_to_text/example/ios/Runner.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,10 @@ + + + + + + + diff --git a/speech_to_text/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/speech_to_text/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 00000000..18d98100 --- /dev/null +++ b/speech_to_text/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/speech_to_text/example/ios/Runner/AppDelegate.swift b/speech_to_text/example/ios/Runner/AppDelegate.swift new file mode 100644 index 00000000..70693e4a --- /dev/null +++ b/speech_to_text/example/ios/Runner/AppDelegate.swift @@ -0,0 +1,13 @@ +import UIKit +import Flutter + +@UIApplicationMain +@objc class AppDelegate: FlutterAppDelegate { + override func application( + _ application: UIApplication, + didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]? + ) -> Bool { + GeneratedPluginRegistrant.register(with: self) + return super.application(application, didFinishLaunchingWithOptions: launchOptions) + } +} diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 00000000..d36b1fab --- /dev/null +++ b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,122 @@ +{ + "images" : [ + { + "size" : "20x20", + "idiom" : "iphone", + "filename" : "Icon-App-20x20@2x.png", + "scale" : "2x" + }, + { + "size" : "20x20", + "idiom" : "iphone", + "filename" : "Icon-App-20x20@3x.png", + "scale" : "3x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "Icon-App-29x29@1x.png", + "scale" : "1x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "Icon-App-29x29@2x.png", + "scale" : "2x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "Icon-App-29x29@3x.png", + "scale" : "3x" + }, + { + "size" : "40x40", + "idiom" : "iphone", + "filename" : "Icon-App-40x40@2x.png", + "scale" : "2x" + }, + { + "size" : "40x40", + "idiom" : "iphone", + "filename" : "Icon-App-40x40@3x.png", + "scale" : "3x" + }, + { + "size" : "60x60", + "idiom" : "iphone", + "filename" : "Icon-App-60x60@2x.png", + "scale" : "2x" + }, + { + "size" : "60x60", + "idiom" : "iphone", + "filename" : "Icon-App-60x60@3x.png", + "scale" : "3x" + }, + { + "size" : "20x20", + "idiom" : "ipad", + "filename" : "Icon-App-20x20@1x.png", + "scale" : "1x" + }, + { + "size" : "20x20", + "idiom" : "ipad", + "filename" : "Icon-App-20x20@2x.png", + "scale" : "2x" + }, + { + "size" : "29x29", + "idiom" : "ipad", + "filename" : "Icon-App-29x29@1x.png", + "scale" : "1x" + }, + { + "size" : "29x29", + "idiom" : "ipad", + "filename" : "Icon-App-29x29@2x.png", + "scale" : "2x" + }, + { + "size" : "40x40", + "idiom" : "ipad", + "filename" : "Icon-App-40x40@1x.png", + "scale" : "1x" + }, + { + "size" : "40x40", + "idiom" : "ipad", + "filename" : "Icon-App-40x40@2x.png", + "scale" : "2x" + }, + { + "size" : "76x76", + "idiom" : "ipad", + "filename" : "Icon-App-76x76@1x.png", + "scale" : "1x" + }, + { + "size" : "76x76", + "idiom" : "ipad", + "filename" : "Icon-App-76x76@2x.png", + "scale" : "2x" + }, + { + "size" : "83.5x83.5", + "idiom" : "ipad", + "filename" : "Icon-App-83.5x83.5@2x.png", + "scale" : "2x" + }, + { + "size" : "1024x1024", + "idiom" : "ios-marketing", + "filename" : "Icon-App-1024x1024@1x.png", + "scale" : "1x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png new file mode 100644 index 00000000..dc9ada47 Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png new file mode 100644 index 00000000..28c6bf03 Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png new file mode 100644 index 00000000..2ccbfd96 Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png new file mode 100644 index 00000000..f091b6b0 Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png new file mode 100644 index 00000000..4cde1211 Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png new file mode 100644 index 00000000..d0ef06e7 Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png new file mode 100644 index 00000000..dcdc2306 Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png new file mode 100644 index 00000000..2ccbfd96 Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png new file mode 100644 index 00000000..c8f9ed8f Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png new file mode 100644 index 00000000..a6d6b860 Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png new file mode 100644 index 00000000..a6d6b860 Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png new file mode 100644 index 00000000..75b2d164 Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png new file mode 100644 index 00000000..c4df70d3 Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png new file mode 100644 index 00000000..6a84f41e Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png new file mode 100644 index 00000000..d0e1f585 Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json new file mode 100644 index 00000000..0bedcf2f --- /dev/null +++ b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json @@ -0,0 +1,23 @@ +{ + "images" : [ + { + "idiom" : "universal", + "filename" : "LaunchImage.png", + "scale" : "1x" + }, + { + "idiom" : "universal", + "filename" : "LaunchImage@2x.png", + "scale" : "2x" + }, + { + "idiom" : "universal", + "filename" : "LaunchImage@3x.png", + "scale" : "3x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png new file mode 100644 index 00000000..9da19eac Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png new file mode 100644 index 00000000..9da19eac Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png new file mode 100644 index 00000000..9da19eac Binary files /dev/null and b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png differ diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md new file mode 100644 index 00000000..89c2725b --- /dev/null +++ b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md @@ -0,0 +1,5 @@ +# Launch Screen Assets + +You can customize the launch screen with your own desired assets by replacing the image files in this directory. + +You can also do it by opening your Flutter project's Xcode project with `open ios/Runner.xcworkspace`, selecting `Runner/Assets.xcassets` in the Project Navigator and dropping in the desired images. \ No newline at end of file diff --git a/speech_to_text/example/ios/Runner/Base.lproj/LaunchScreen.storyboard b/speech_to_text/example/ios/Runner/Base.lproj/LaunchScreen.storyboard new file mode 100644 index 00000000..f2e259c7 --- /dev/null +++ b/speech_to_text/example/ios/Runner/Base.lproj/LaunchScreen.storyboard @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/speech_to_text/example/ios/Runner/Base.lproj/Main.storyboard b/speech_to_text/example/ios/Runner/Base.lproj/Main.storyboard new file mode 100644 index 00000000..f3c28516 --- /dev/null +++ b/speech_to_text/example/ios/Runner/Base.lproj/Main.storyboard @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/speech_to_text/example/ios/Runner/Info.plist b/speech_to_text/example/ios/Runner/Info.plist new file mode 100644 index 00000000..a69c0fce --- /dev/null +++ b/speech_to_text/example/ios/Runner/Info.plist @@ -0,0 +1,49 @@ + + + + + NSMicrophoneUsageDescription + This example listens for speech on the device microphone on your request. + NSSpeechRecognitionUsageDescription + This example recognizes words as you speak them and displays them. + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + speech_to_text_example + CFBundlePackageType + APPL + CFBundleShortVersionString + $(FLUTTER_BUILD_NAME) + CFBundleSignature + ???? + CFBundleVersion + $(FLUTTER_BUILD_NUMBER) + LSRequiresIPhoneOS + + UILaunchStoryboardName + LaunchScreen + UIMainStoryboardFile + Main + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UIViewControllerBasedStatusBarAppearance + + + diff --git a/speech_to_text/example/ios/Runner/Runner-Bridging-Header.h b/speech_to_text/example/ios/Runner/Runner-Bridging-Header.h new file mode 100644 index 00000000..7335fdf9 --- /dev/null +++ b/speech_to_text/example/ios/Runner/Runner-Bridging-Header.h @@ -0,0 +1 @@ +#import "GeneratedPluginRegistrant.h" \ No newline at end of file diff --git a/speech_to_text/example/lib/main.dart b/speech_to_text/example/lib/main.dart new file mode 100644 index 00000000..0115b828 --- /dev/null +++ b/speech_to_text/example/lib/main.dart @@ -0,0 +1,275 @@ +import 'dart:async'; +import 'dart:math'; + +import 'package:flutter/material.dart'; +import 'package:permission_handler/permission_handler.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_to_text.dart'; + +void main() => runApp(MyApp()); + +class MyApp extends StatefulWidget { + @override + _MyAppState createState() => _MyAppState(); +} + +class _MyAppState extends State { + bool _hasSpeech = false; + double level = 0.0; + double minSoundLevel = 50000; + double maxSoundLevel = -50000; + String lastWords = ""; + String lastError = ""; + String lastStatus = ""; + String _currentLocaleId = ""; + List _localeNames = []; + final SpeechToText speech = SpeechToText(); + + @override + void initState() { + requestPermissions(); + super.initState(); + } + + Future initSpeechState() async { + bool hasSpeech = await speech.initialize( + onError: errorListener, onStatus: statusListener); + if (hasSpeech) { + _localeNames = await speech.locales(); + + var systemLocale = await speech.systemLocale(); + _currentLocaleId = systemLocale.localeId; + } + + if (!mounted) return; + + setState(() { + _hasSpeech = hasSpeech; + }); + } + + void requestPermissions() async{ + Map statuses = await [ + Permission.microphone, + ].request(); + } + + @override + Widget build(BuildContext context) { + return MaterialApp( + home: Scaffold( + appBar: AppBar( + title: const Text('Speech to Text CloudSolution'), + ), + body: Column(children: [ + Center( + child: Text( + 'Speech recognition available', + style: TextStyle(fontSize: 22.0), + ), + ), + Container( + child: Column( + children: [ + Row( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + FlatButton( + child: Text('Initialize'), + onPressed: _hasSpeech ? null : initSpeechState, + ), + ], + ), + Row( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + FlatButton( + child: Text('Start'), + onPressed: !_hasSpeech || speech.isListening + ? null + : startListening, + ), + FlatButton( + child: Text('Stop'), + onPressed: speech.isListening ? stopListening : null, + ), + FlatButton( + child: Text('Cancel'), + onPressed: speech.isListening ? cancelListening : null, + ), + ], + ), + Row( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + DropdownButton( + onChanged: (selectedVal) => _switchLang(selectedVal), + value: _currentLocaleId, + items: _localeNames + .map( + (localeName) => DropdownMenuItem( + value: localeName.localeId, + child: Text(localeName.name), + ), + ) + .toList(), + ), + ], + ) + ], + ), + ), + Expanded( + flex: 4, + child: Column( + children: [ + Center( + child: Text( + 'Recognized Words', + style: TextStyle(fontSize: 22.0), + ), + ), + Expanded( + child: Stack( + children: [ + Container( + color: Theme.of(context).selectedRowColor, + child: Center( + child: Text( + lastWords, + textAlign: TextAlign.center, + ), + ), + ), + Positioned.fill( + bottom: 10, + child: Align( + alignment: Alignment.bottomCenter, + child: Container( + width: 40, + height: 40, + alignment: Alignment.center, + decoration: BoxDecoration( + boxShadow: [ + BoxShadow( + blurRadius: .26, + spreadRadius: level * 1.5, + color: Colors.black.withOpacity(.05)) + ], + color: Colors.white, + borderRadius: + BorderRadius.all(Radius.circular(50)), + ), + child: IconButton(icon: Icon(Icons.mic)), + ), + ), + ), + ], + ), + ), + ], + ), + ), + Expanded( + flex: 1, + child: Column( + children: [ + Center( + child: Text( + 'Error Status', + style: TextStyle(fontSize: 22.0), + ), + ), + Center( + child: Text(lastError), + ), + ], + ), + ), + Container( + padding: EdgeInsets.symmetric(vertical: 20), + color: Theme.of(context).backgroundColor, + child: Center( + child: speech.isListening + ? Text( + "I'm listening...", + style: TextStyle(fontWeight: FontWeight.bold), + ) + : Text( + 'Not listening', + style: TextStyle(fontWeight: FontWeight.bold), + ), + ), + ), + ]), + ), + ); + } + + void startListening() { + lastWords = ""; + lastError = ""; + speech.listen( + onResult: resultListener, + listenFor: Duration(seconds: 10), + localeId: _currentLocaleId, + onSoundLevelChange: soundLevelListener, + cancelOnError: true, + partialResults: true, + onDevice: true, + listenMode: ListenMode.confirmation); + setState(() {}); + } + + void stopListening() { + speech.stop(); + setState(() { + level = 0.0; + }); + } + + void cancelListening() { + speech.cancel(); + setState(() { + level = 0.0; + }); + } + + void resultListener(SpeechRecognitionResult result) { + setState(() { + lastWords = "${result.recognizedWords} - ${result.finalResult}"; + }); + } + + void soundLevelListener(double level) { + minSoundLevel = min(minSoundLevel, level); + maxSoundLevel = max(maxSoundLevel, level); + // print("sound level $level: $minSoundLevel - $maxSoundLevel "); + setState(() { + this.level = level; + }); + } + + void errorListener(SpeechRecognitionError error) { + // print("Received error status: $error, listening: ${speech.isListening}"); + setState(() { + lastError = "${error.errorMsg} - ${error.permanent}"; + }); + } + + void statusListener(String status) { + // print( + // "Received listener status: $status, listening: ${speech.isListening}"); + setState(() { + lastStatus = "$status"; + }); + } + + _switchLang(selectedVal) { + setState(() { + _currentLocaleId = selectedVal; + }); + print(selectedVal); + } +} diff --git a/speech_to_text/example/pubspec.lock b/speech_to_text/example/pubspec.lock new file mode 100644 index 00000000..e0e9b753 --- /dev/null +++ b/speech_to_text/example/pubspec.lock @@ -0,0 +1,245 @@ +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + archive: + dependency: transitive + description: + name: archive + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.13" + args: + dependency: transitive + description: + name: args + url: "https://pub.dartlang.org" + source: hosted + version: "1.6.0" + async: + dependency: transitive + description: + name: async + url: "https://pub.dartlang.org" + source: hosted + version: "2.4.1" + boolean_selector: + dependency: transitive + description: + name: boolean_selector + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.0" + charcode: + dependency: transitive + description: + name: charcode + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.3" + clock: + dependency: transitive + description: + name: clock + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.1" + collection: + dependency: transitive + description: + name: collection + url: "https://pub.dartlang.org" + source: hosted + version: "1.14.12" + convert: + dependency: transitive + description: + name: convert + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.1" + crypto: + dependency: transitive + description: + name: crypto + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.4" + cupertino_icons: + dependency: "direct main" + description: + name: cupertino_icons + url: "https://pub.dartlang.org" + source: hosted + version: "0.1.3" + flutter: + dependency: "direct main" + description: flutter + source: sdk + version: "0.0.0" + flutter_test: + dependency: "direct dev" + description: flutter + source: sdk + version: "0.0.0" + image: + dependency: transitive + description: + name: image + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.12" + json_annotation: + dependency: transitive + description: + name: json_annotation + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.1" + matcher: + dependency: transitive + description: + name: matcher + url: "https://pub.dartlang.org" + source: hosted + version: "0.12.6" + meta: + dependency: transitive + description: + name: meta + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.8" + nested: + dependency: transitive + description: + name: nested + url: "https://pub.dartlang.org" + source: hosted + version: "0.0.4" + path: + dependency: transitive + description: + name: path + url: "https://pub.dartlang.org" + source: hosted + version: "1.6.4" + permission_handler: + dependency: "direct main" + description: + name: permission_handler + url: "https://pub.dartlang.org" + source: hosted + version: "5.0.1+1" + permission_handler_platform_interface: + dependency: transitive + description: + name: permission_handler_platform_interface + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.1" + petitparser: + dependency: transitive + description: + name: petitparser + url: "https://pub.dartlang.org" + source: hosted + version: "2.4.0" + plugin_platform_interface: + dependency: transitive + description: + name: plugin_platform_interface + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.2" + provider: + dependency: "direct main" + description: + name: provider + url: "https://pub.dartlang.org" + source: hosted + version: "4.3.1" + quiver: + dependency: transitive + description: + name: quiver + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.3" + sky_engine: + dependency: transitive + description: flutter + source: sdk + version: "0.0.99" + source_span: + dependency: transitive + description: + name: source_span + url: "https://pub.dartlang.org" + source: hosted + version: "1.7.0" + speech_to_text: + dependency: "direct dev" + description: + path: ".." + relative: true + source: path + version: "0.0.0" + stack_trace: + dependency: transitive + description: + name: stack_trace + url: "https://pub.dartlang.org" + source: hosted + version: "1.9.3" + stream_channel: + dependency: transitive + description: + name: stream_channel + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.0" + string_scanner: + dependency: transitive + description: + name: string_scanner + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.5" + term_glyph: + dependency: transitive + description: + name: term_glyph + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.0" + test_api: + dependency: transitive + description: + name: test_api + url: "https://pub.dartlang.org" + source: hosted + version: "0.2.15" + typed_data: + dependency: transitive + description: + name: typed_data + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.6" + vector_math: + dependency: transitive + description: + name: vector_math + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.8" + xml: + dependency: transitive + description: + name: xml + url: "https://pub.dartlang.org" + source: hosted + version: "3.6.1" +sdks: + dart: ">=2.7.0 <3.0.0" + flutter: ">=1.16.0 <2.0.0" diff --git a/speech_to_text/example/pubspec.yaml b/speech_to_text/example/pubspec.yaml new file mode 100644 index 00000000..d2bfcff7 --- /dev/null +++ b/speech_to_text/example/pubspec.yaml @@ -0,0 +1,33 @@ +name: speech_to_text_example +description: Demonstrates how to use the speech_to_text plugin. +version: 1.1.0 +publish_to: 'none' + +environment: + sdk: ">=2.1.0 <3.0.0" + +dependencies: + flutter: + sdk: flutter + + cupertino_icons: ^0.1.2 + permission_handler: ^5.0.1+1 + + provider: + +dev_dependencies: + flutter_test: + sdk: flutter + + speech_to_text: + path: ../ + +# The following section is specific to Flutter. +flutter: + + uses-material-design: true + + assets: + - assets/sounds/speech_to_text_listening.m4r + - assets/sounds/speech_to_text_cancel.m4r + - assets/sounds/speech_to_text_stop.m4r diff --git a/speech_to_text/example/test/widget_test.dart b/speech_to_text/example/test/widget_test.dart new file mode 100644 index 00000000..639a52fb --- /dev/null +++ b/speech_to_text/example/test/widget_test.dart @@ -0,0 +1,27 @@ +// This is a basic Flutter widget test. +// +// To perform an interaction with a widget in your test, use the WidgetTester +// utility that Flutter provides. For example, you can send tap and scroll +// gestures. You can also use WidgetTester to find child widgets in the widget +// tree, read text, and verify that the values of widget properties are correct. + +import 'package:flutter/material.dart'; +import 'package:flutter_test/flutter_test.dart'; + +import '../lib/main.dart'; + +void main() { + testWidgets('Verify Platform version', (WidgetTester tester) async { + // Build our app and trigger a frame. + await tester.pumpWidget(MyApp()); + + // Verify that platform version is retrieved. + expect( + find.byWidgetPredicate( + (Widget widget) => + widget is Text && widget.data.startsWith('Running on:'), + ), + findsOneWidget, + ); + }); +} diff --git a/speech_to_text/ios/.gitignore b/speech_to_text/ios/.gitignore new file mode 100644 index 00000000..aa479fd3 --- /dev/null +++ b/speech_to_text/ios/.gitignore @@ -0,0 +1,37 @@ +.idea/ +.vagrant/ +.sconsign.dblite +.svn/ + +.DS_Store +*.swp +profile + +DerivedData/ +build/ +GeneratedPluginRegistrant.h +GeneratedPluginRegistrant.m + +.generated/ + +*.pbxuser +*.mode1v3 +*.mode2v3 +*.perspectivev3 + +!default.pbxuser +!default.mode1v3 +!default.mode2v3 +!default.perspectivev3 + +xcuserdata + +*.moved-aside + +*.pyc +*sync/ +Icon? +.tags* + +/Flutter/Generated.xcconfig +/Flutter/flutter_export_environment.sh \ No newline at end of file diff --git a/speech_to_text/ios/Assets/.gitkeep b/speech_to_text/ios/Assets/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/speech_to_text/ios/Classes/SpeechToTextPlugin.h b/speech_to_text/ios/Classes/SpeechToTextPlugin.h new file mode 100644 index 00000000..1785eb8f --- /dev/null +++ b/speech_to_text/ios/Classes/SpeechToTextPlugin.h @@ -0,0 +1,4 @@ +#import + +@interface SpeechToTextPlugin : NSObject +@end diff --git a/speech_to_text/ios/Classes/SpeechToTextPlugin.m b/speech_to_text/ios/Classes/SpeechToTextPlugin.m new file mode 100644 index 00000000..20d0327d --- /dev/null +++ b/speech_to_text/ios/Classes/SpeechToTextPlugin.m @@ -0,0 +1,8 @@ +#import "SpeechToTextPlugin.h" +#import + +@implementation SpeechToTextPlugin ++ (void)registerWithRegistrar:(NSObject*)registrar { + [SwiftSpeechToTextPlugin registerWithRegistrar:registrar]; +} +@end diff --git a/speech_to_text/ios/Classes/SwiftSpeechToTextPlugin.swift b/speech_to_text/ios/Classes/SwiftSpeechToTextPlugin.swift new file mode 100644 index 00000000..68687967 --- /dev/null +++ b/speech_to_text/ios/Classes/SwiftSpeechToTextPlugin.swift @@ -0,0 +1,580 @@ +import Flutter +import UIKit +import Speech +import os.log +import Try + +public enum SwiftSpeechToTextMethods: String { + case has_permission + case initialize + case listen + case stop + case cancel + case locales + case unknown // just for testing +} + +public enum SwiftSpeechToTextCallbackMethods: String { + case textRecognition + case notifyStatus + case notifyError + case soundLevelChange +} + +public enum SpeechToTextStatus: String { + case listening + case notListening + case unavailable + case available +} + +public enum SpeechToTextErrors: String { + case onDeviceError + case noRecognizerError + case listenFailedError + case missingOrInvalidArg +} + +public enum ListenMode: Int { + case deviceDefault = 0 + case dictation = 1 + case search = 2 + case confirmation = 3 +} + +struct SpeechRecognitionWords : Codable { + let recognizedWords: String + let confidence: Decimal +} + +struct SpeechRecognitionResult : Codable { + let alternates: [SpeechRecognitionWords] + let finalResult: Bool +} + +struct SpeechRecognitionError : Codable { + let errorMsg: String + let permanent: Bool +} + +enum SpeechToTextError: Error { + case runtimeError(String) +} + + +@available(iOS 10.0, *) +public class SwiftSpeechToTextPlugin: NSObject, FlutterPlugin { + private var channel: FlutterMethodChannel + private var registrar: FlutterPluginRegistrar + private var recognizer: SFSpeechRecognizer? + private var currentRequest: SFSpeechAudioBufferRecognitionRequest? + private var currentTask: SFSpeechRecognitionTask? + private var listeningSound: AVAudioPlayer? + private var successSound: AVAudioPlayer? + private var cancelSound: AVAudioPlayer? + private var rememberedAudioCategory: AVAudioSession.Category? + private var previousLocale: Locale? + private var onPlayEnd: (() -> Void)? + private var returnPartialResults: Bool = true + private var failedListen: Bool = false + private var listening = false + private let audioSession = AVAudioSession.sharedInstance() + private let audioEngine = AVAudioEngine() + private let jsonEncoder = JSONEncoder() + private let busForNodeTap = 0 + private let speechBufferSize: AVAudioFrameCount = 1024 + private static var subsystem = Bundle.main.bundleIdentifier! + private let pluginLog = OSLog(subsystem: "com.csdcorp.speechToText", category: "plugin") + + public static func register(with registrar: FlutterPluginRegistrar) { + let channel = FlutterMethodChannel(name: "plugin.csdcorp.com/speech_to_text", binaryMessenger: registrar.messenger()) + let instance = SwiftSpeechToTextPlugin( channel, registrar: registrar ) + registrar.addMethodCallDelegate(instance, channel: channel ) + } + + init( _ channel: FlutterMethodChannel, registrar: FlutterPluginRegistrar ) { + self.channel = channel + self.registrar = registrar + } + + public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) { + switch call.method { + case SwiftSpeechToTextMethods.has_permission.rawValue: + hasPermission( result ) + case SwiftSpeechToTextMethods.initialize.rawValue: + initialize( result ) + case SwiftSpeechToTextMethods.listen.rawValue: + guard let argsArr = call.arguments as? Dictionary, + let partialResults = argsArr["partialResults"] as? Bool, let onDevice = argsArr["onDevice"] as? Bool, let listenModeIndex = argsArr["listenMode"] as? Int + else { + DispatchQueue.main.async { + result(FlutterError( code: SpeechToTextErrors.missingOrInvalidArg.rawValue, + message:"Missing arg partialResults, onDevice, and listenMode are required", + details: nil )) + } + return + } + var localeStr: String? = nil + if let localeParam = argsArr["localeId"] as? String { + localeStr = localeParam + } + guard let listenMode = ListenMode(rawValue: listenModeIndex) else { + DispatchQueue.main.async { + result(FlutterError( code: SpeechToTextErrors.missingOrInvalidArg.rawValue, + message:"invalid value for listenMode, must be 0-2, was \(listenModeIndex)", + details: nil )) + } + return + } + + listenForSpeech( result, localeStr: localeStr, partialResults: partialResults, onDevice: onDevice, listenMode: listenMode ) + case SwiftSpeechToTextMethods.stop.rawValue: + stopSpeech( result ) + case SwiftSpeechToTextMethods.cancel.rawValue: + cancelSpeech( result ) + case SwiftSpeechToTextMethods.locales.rawValue: + locales( result ) + default: + os_log("Unrecognized method: %{PUBLIC}@", log: pluginLog, type: .error, call.method) + DispatchQueue.main.async { + result( FlutterMethodNotImplemented) + } + } + } + + private func hasPermission( _ result: @escaping FlutterResult) { + let has = SFSpeechRecognizer.authorizationStatus() == SFSpeechRecognizerAuthorizationStatus.authorized && + AVAudioSession.sharedInstance().recordPermission == AVAudioSession.RecordPermission.granted + DispatchQueue.main.async { + result( has ) + } + } + + private func initialize( _ result: @escaping FlutterResult) { + var success = false + let status = SFSpeechRecognizer.authorizationStatus() + switch status { + case SFSpeechRecognizerAuthorizationStatus.notDetermined: + SFSpeechRecognizer.requestAuthorization({(status)->Void in + success = status == SFSpeechRecognizerAuthorizationStatus.authorized + if ( success ) { + AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in + if granted { + self.setupSpeechRecognition(result) + } else{ + self.sendBoolResult( false, result ); + os_log("User denied permission", log: self.pluginLog, type: .info) + } + }) + } + else { + self.sendBoolResult( false, result ); + } + }); + case SFSpeechRecognizerAuthorizationStatus.denied: + os_log("Permission permanently denied", log: self.pluginLog, type: .info) + sendBoolResult( false, result ); + case SFSpeechRecognizerAuthorizationStatus.restricted: + os_log("Device restriction prevented initialize", log: self.pluginLog, type: .info) + sendBoolResult( false, result ); + default: + os_log("Has permissions continuing with setup", log: self.pluginLog, type: .debug) + setupSpeechRecognition(result) + } + } + + fileprivate func sendBoolResult( _ value: Bool, _ result: @escaping FlutterResult) { + DispatchQueue.main.async { + result( value ) + } + } + + fileprivate func setupListeningSound() { + listeningSound = loadSound("assets/sounds/speech_to_text_listening.m4r") + successSound = loadSound("assets/sounds/speech_to_text_stop.m4r") + cancelSound = loadSound("assets/sounds/speech_to_text_cancel.m4r") + } + + fileprivate func loadSound( _ assetPath: String ) -> AVAudioPlayer? { + var player: AVAudioPlayer? = nil + let soundKey = registrar.lookupKey(forAsset: assetPath ) + guard !soundKey.isEmpty else { + return player + } + if let soundPath = Bundle.main.path(forResource: soundKey, ofType:nil) { + let soundUrl = URL(fileURLWithPath: soundPath ) + do { + player = try AVAudioPlayer(contentsOf: soundUrl ) + player?.delegate = self + } catch { + // no audio + } + } + return player + } + + private func setupSpeechRecognition( _ result: @escaping FlutterResult) { + setupRecognizerForLocale( locale: Locale.current ) + guard recognizer != nil else { + sendBoolResult( false, result ); + return + } + recognizer?.delegate = self + setupListeningSound() + + sendBoolResult( true, result ); + } + + private func setupRecognizerForLocale( locale: Locale ) { + if ( previousLocale == locale ) { + return + } + previousLocale = locale + recognizer = SFSpeechRecognizer( locale: locale ) + } + + private func getLocale( _ localeStr: String? ) -> Locale { + guard let aLocaleStr = localeStr else { + return Locale.current + } + let locale = Locale(identifier: aLocaleStr) + return locale + } + + private func stopSpeech( _ result: @escaping FlutterResult) { + if ( !listening ) { + sendBoolResult( false, result ); + return + } + stopAllPlayers() + if let sound = successSound { + onPlayEnd = {() -> Void in + self.currentTask?.finish() + self.stopCurrentListen( ) + self.sendBoolResult( true, result ) + return + } + sound.play() + } + else { + stopCurrentListen( ) + sendBoolResult( true, result ); + } + } + + private func cancelSpeech( _ result: @escaping FlutterResult) { + if ( !listening ) { + sendBoolResult( false, result ); + return + } + stopAllPlayers() + if let sound = cancelSound { + onPlayEnd = {() -> Void in + self.currentTask?.cancel() + self.stopCurrentListen( ) + self.sendBoolResult( true, result ) + return + } + sound.play() + } + else { + self.currentTask?.cancel() + stopCurrentListen( ) + sendBoolResult( true, result ); + } + } + + private func stopAllPlayers() { + cancelSound?.stop() + successSound?.stop() + listeningSound?.stop() + } + + private func stopCurrentListen( ) { + stopAllPlayers() + currentRequest?.endAudio() + + do { + try trap { + self.audioEngine.stop() + } + } + catch { + os_log("Error stopping engine: %{PUBLIC}@", log: pluginLog, type: .error, error.localizedDescription) + } + do { + try trap { + let inputNode = self.audioEngine.inputNode + inputNode.removeTap(onBus: self.busForNodeTap); + } + } + catch { + os_log("Error removing trap: %{PUBLIC}@", log: pluginLog, type: .error, error.localizedDescription) + } + do { + if let rememberedAudioCategory = rememberedAudioCategory { + try self.audioSession.setCategory(rememberedAudioCategory) + } + } + catch { + os_log("Error stopping listen: %{PUBLIC}@", log: pluginLog, type: .error, error.localizedDescription) + } + do { + try self.audioSession.setActive(false, options: .notifyOthersOnDeactivation) + } + catch { + os_log("Error deactivation: %{PUBLIC}@", log: pluginLog, type: .info, error.localizedDescription) + } + currentRequest = nil + currentTask = nil + onPlayEnd = nil + listening = false + } + + private func listenForSpeech( _ result: @escaping FlutterResult, localeStr: String?, partialResults: Bool, onDevice: Bool, listenMode: ListenMode ) { + if ( nil != currentTask || listening ) { + sendBoolResult( false, result ); + return + } + do { + // let inErrorTest = true + failedListen = false + returnPartialResults = partialResults + setupRecognizerForLocale(locale: getLocale(localeStr)) + guard let localRecognizer = recognizer else { + result(FlutterError( code: SpeechToTextErrors.noRecognizerError.rawValue, + message:"Failed to create speech recognizer", + details: nil )) + return + } + if ( onDevice ) { + if #available(iOS 13.0, *), !localRecognizer.supportsOnDeviceRecognition { + result(FlutterError( code: SpeechToTextErrors.onDeviceError.rawValue, + message:"on device recognition is not supported on this device", + details: nil )) + } + } + rememberedAudioCategory = self.audioSession.category + try self.audioSession.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker) + // try self.audioSession.setMode(AVAudioSession.Mode.measurement) + try self.audioSession.setMode(AVAudioSession.Mode.default) + try self.audioSession.setActive(true, options: .notifyOthersOnDeactivation) + if let sound = listeningSound { + self.onPlayEnd = {()->Void in + if ( !self.failedListen ) { + self.listening = true + self.invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.listening.rawValue ) + + } + } + sound.play() + } + self.audioEngine.reset(); + let inputNode = self.audioEngine.inputNode + if(inputNode.inputFormat(forBus: 0).channelCount == 0){ + throw SpeechToTextError.runtimeError("Not enough available inputs.") + } + self.currentRequest = SFSpeechAudioBufferRecognitionRequest() + guard let currentRequest = self.currentRequest else { + sendBoolResult( false, result ); + return + } + currentRequest.shouldReportPartialResults = true + if #available(iOS 13.0, *), onDevice { + currentRequest.requiresOnDeviceRecognition = true + } + switch listenMode { + case ListenMode.dictation: + currentRequest.taskHint = SFSpeechRecognitionTaskHint.dictation + break + case ListenMode.search: + currentRequest.taskHint = SFSpeechRecognitionTaskHint.search + break + case ListenMode.confirmation: + currentRequest.taskHint = SFSpeechRecognitionTaskHint.confirmation + break + default: + break + } + self.currentTask = self.recognizer?.recognitionTask(with: currentRequest, delegate: self ) + let recordingFormat = inputNode.outputFormat(forBus: self.busForNodeTap) + try trap { + inputNode.installTap(onBus: self.busForNodeTap, bufferSize: self.speechBufferSize, format: recordingFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in + currentRequest.append(buffer) + self.updateSoundLevel( buffer: buffer ) + } + } + // if ( inErrorTest ){ + // throw SpeechToTextError.runtimeError("for testing only") + // } + self.audioEngine.prepare() + try self.audioEngine.start() + if nil == listeningSound { + listening = true + self.invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.listening.rawValue ) + } + sendBoolResult( true, result ); + } + catch { + failedListen = true + os_log("Error starting listen: %{PUBLIC}@", log: pluginLog, type: .error, error.localizedDescription) + stopCurrentListen() + sendBoolResult( false, result ); + invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.notListening.rawValue ) + let speechError = SpeechRecognitionError(errorMsg: "error_listen_failed", permanent: true ) + do { + let errorResult = try jsonEncoder.encode(speechError) + invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyError, arguments: String( data:errorResult, encoding: .utf8) ) + } catch { + os_log("Could not encode JSON", log: pluginLog, type: .error) + } + } + } + + private func updateSoundLevel( buffer: AVAudioPCMBuffer) { + guard + let channelData = buffer.floatChannelData + else { + return + } + + let channelDataValue = channelData.pointee + let channelDataValueArray = stride(from: 0, + to: Int(buffer.frameLength), + by: buffer.stride).map{ channelDataValue[$0] } + let frameLength = Float(buffer.frameLength) + let rms = sqrt(channelDataValueArray.map{ $0 * $0 }.reduce(0, +) / frameLength ) + let avgPower = 20 * log10(rms) + self.invokeFlutter( SwiftSpeechToTextCallbackMethods.soundLevelChange, arguments: avgPower ) + } + + /// Build a list of localId:name with the current locale first + private func locales( _ result: @escaping FlutterResult ) { + var localeNames = [String](); + let locales = SFSpeechRecognizer.supportedLocales(); + let currentLocale = Locale.current + if let idName = buildIdNameForLocale(forIdentifier: currentLocale.identifier ) { + localeNames.append(idName) + } + for locale in locales { + if ( locale.identifier == currentLocale.identifier) { + continue + } + if let idName = buildIdNameForLocale(forIdentifier: locale.identifier ) { + localeNames.append(idName) + } + } + DispatchQueue.main.async { + result(localeNames) + } + } + + private func buildIdNameForLocale( forIdentifier: String ) -> String? { + var idName: String? + if let name = Locale.current.localizedString(forIdentifier: forIdentifier ) { + let sanitizedName = name.replacingOccurrences(of: ":", with: " ") + idName = "\(forIdentifier):\(sanitizedName)" + } + return idName + } + + private func handleResult( _ transcriptions: [SFTranscription], isFinal: Bool ) { + if ( !isFinal && !returnPartialResults ) { + return + } + var speechWords: [SpeechRecognitionWords] = [] + for transcription in transcriptions { + let words: SpeechRecognitionWords = SpeechRecognitionWords(recognizedWords: transcription.formattedString, confidence: confidenceIn( transcription)) + speechWords.append( words ) + } + let speechInfo = SpeechRecognitionResult(alternates: speechWords, finalResult: isFinal ) + do { + let speechMsg = try jsonEncoder.encode(speechInfo) + if let speechStr = String( data:speechMsg, encoding: .utf8) { + os_log("Encoded JSON result: %{PUBLIC}@", log: pluginLog, type: .debug, speechStr ) + invokeFlutter( SwiftSpeechToTextCallbackMethods.textRecognition, arguments: speechStr ) + } + } catch { + os_log("Could not encode JSON", log: pluginLog, type: .error) + } + } + + private func confidenceIn( _ transcription: SFTranscription ) -> Decimal { + guard ( transcription.segments.count > 0 ) else { + return 0; + } + var totalConfidence: Float = 0.0; + for segment in transcription.segments { + totalConfidence += segment.confidence + } + let avgConfidence: Float = totalConfidence / Float(transcription.segments.count ) + let confidence: Float = (avgConfidence * 1000).rounded() / 1000 + return Decimal( string: String( describing: confidence ) )! + } + + private func invokeFlutter( _ method: SwiftSpeechToTextCallbackMethods, arguments: Any? ) { + DispatchQueue.main.async { + self.channel.invokeMethod( method.rawValue, arguments: arguments ) + } + } + +} + +@available(iOS 10.0, *) +extension SwiftSpeechToTextPlugin : SFSpeechRecognizerDelegate { + public func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) { + let availability = available ? SpeechToTextStatus.available.rawValue : SpeechToTextStatus.unavailable.rawValue + os_log("Availability changed: %{PUBLIC}@", log: pluginLog, type: .debug, availability) + invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: availability ) + } +} + +@available(iOS 10.0, *) +extension SwiftSpeechToTextPlugin : SFSpeechRecognitionTaskDelegate { + public func speechRecognitionDidDetectSpeech(_ task: SFSpeechRecognitionTask) { + // Do nothing for now + } + + public func speechRecognitionTaskFinishedReadingAudio(_ task: SFSpeechRecognitionTask) { + reportError(source: "FinishedReadingAudio", error: task.error) + invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.notListening.rawValue ) + } + + public func speechRecognitionTaskWasCancelled(_ task: SFSpeechRecognitionTask) { + reportError(source: "TaskWasCancelled", error: task.error) + invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.notListening.rawValue ) + } + + public func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didFinishSuccessfully successfully: Bool) { + reportError(source: "FinishSuccessfully", error: task.error) + stopCurrentListen( ) + } + + public func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didHypothesizeTranscription transcription: SFTranscription) { + reportError(source: "HypothesizeTranscription", error: task.error) + handleResult( [transcription], isFinal: false ) + } + + public func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didFinishRecognition recognitionResult: SFSpeechRecognitionResult) { + reportError(source: "FinishRecognition", error: task.error) + let isFinal = recognitionResult.isFinal + handleResult( recognitionResult.transcriptions, isFinal: isFinal ) + } + + private func reportError( source: String, error: Error?) { + if ( nil != error) { + os_log("%{PUBLIC}@ with error: %{PUBLIC}@", log: pluginLog, type: .debug, source, error.debugDescription) + } + } +} + +@available(iOS 10.0, *) +extension SwiftSpeechToTextPlugin : AVAudioPlayerDelegate { + + public func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, + successfully flag: Bool) { + if let playEnd = self.onPlayEnd { + playEnd() + } + } +} diff --git a/speech_to_text/ios/speech_to_text.podspec b/speech_to_text/ios/speech_to_text.podspec new file mode 100644 index 00000000..1db79aa0 --- /dev/null +++ b/speech_to_text/ios/speech_to_text.podspec @@ -0,0 +1,22 @@ +# +# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html +# +Pod::Spec.new do |s| + s.name = 'speech_to_text' + s.version = '0.0.1' + s.summary = 'A new flutter plugin project.' + s.description = <<-DESC +A new flutter plugin project. + DESC + s.homepage = 'http://example.com' + s.license = { :file => '../LICENSE' } + s.author = { 'Your Company' => 'email@example.com' } + s.source = { :path => '.' } + s.source_files = 'Classes/**/*' + s.public_header_files = 'Classes/**/*.h' + s.dependency 'Flutter' + s.dependency 'Try' + + s.ios.deployment_target = '8.0' +end + diff --git a/speech_to_text/lib/speech_recognition_error.dart b/speech_to_text/lib/speech_recognition_error.dart new file mode 100644 index 00000000..2ab6cd4d --- /dev/null +++ b/speech_to_text/lib/speech_recognition_error.dart @@ -0,0 +1,44 @@ +import 'package:json_annotation/json_annotation.dart'; + +part 'speech_recognition_error.g.dart'; + +/// A single error returned from the underlying speech services. +/// +/// Errors are either transient or permanent. Permanent errors +/// block speech recognition from continuing and must be +/// addressed before recogntion will work. Transient errors +/// cause individual recognition sessions to fail but subsequent +/// attempts may well succeed. +@JsonSerializable() +class SpeechRecognitionError { + /// Use this to differentiate the various error conditions. + /// + /// Not meant for display to the user. + final String errorMsg; + + /// True means that recognition cannot continue until + /// the error is resolved. + final bool permanent; + + SpeechRecognitionError(this.errorMsg, this.permanent); + + factory SpeechRecognitionError.fromJson(Map json) => + _$SpeechRecognitionErrorFromJson(json); + Map toJson() => _$SpeechRecognitionErrorToJson(this); + + @override + String toString() { + return "SpeechRecognitionError msg: $errorMsg, permanent: $permanent"; + } + + @override + bool operator ==(Object other) { + return identical(this, other) || + other is SpeechRecognitionError && + errorMsg == other.errorMsg && + permanent == other.permanent; + } + + @override + int get hashCode => errorMsg.hashCode; +} diff --git a/speech_to_text/lib/speech_recognition_error.g.dart b/speech_to_text/lib/speech_recognition_error.g.dart new file mode 100644 index 00000000..65299f6d --- /dev/null +++ b/speech_to_text/lib/speech_recognition_error.g.dart @@ -0,0 +1,22 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'speech_recognition_error.dart'; + +// ************************************************************************** +// JsonSerializableGenerator +// ************************************************************************** + +SpeechRecognitionError _$SpeechRecognitionErrorFromJson( + Map json) { + return SpeechRecognitionError( + json['errorMsg'] as String, + json['permanent'] as bool, + ); +} + +Map _$SpeechRecognitionErrorToJson( + SpeechRecognitionError instance) => + { + 'errorMsg': instance.errorMsg, + 'permanent': instance.permanent, + }; diff --git a/speech_to_text/lib/speech_recognition_event.dart b/speech_to_text/lib/speech_recognition_event.dart new file mode 100644 index 00000000..71729365 --- /dev/null +++ b/speech_to_text/lib/speech_recognition_event.dart @@ -0,0 +1,30 @@ +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; + +enum SpeechRecognitionEventType { + finalRecognitionEvent, + partialRecognitionEvent, + errorEvent, + statusChangeEvent, + soundLevelChangeEvent, +} + +/// A single event in a stream of speech recognition events. +/// +/// Use [eventType] to determine what type of event it is and depending on that +/// use the other properties to get information about it. +class SpeechRecognitionEvent { + final SpeechRecognitionEventType eventType; + final SpeechRecognitionError _error; + final SpeechRecognitionResult _result; + final bool _listening; + final double _level; + + SpeechRecognitionEvent( + this.eventType, this._result, this._error, this._listening, this._level); + + bool get isListening => _listening; + double get level => _level; + SpeechRecognitionResult get recognitionResult => _result; + SpeechRecognitionError get error => _error; +} diff --git a/speech_to_text/lib/speech_recognition_result.dart b/speech_to_text/lib/speech_recognition_result.dart new file mode 100644 index 00000000..38509f65 --- /dev/null +++ b/speech_to_text/lib/speech_recognition_result.dart @@ -0,0 +1,140 @@ +import 'dart:collection'; + +import 'package:json_annotation/json_annotation.dart'; + +part 'speech_recognition_result.g.dart'; + +/// A sequence of recognized words from the speech recognition +/// service. +/// +/// Depending on the platform behaviour the words may come in all +/// at once at the end or as partial results as each word is +/// recognized. Use the [finalResult] flag to determine if the +/// result is considered final by the platform. +@JsonSerializable(explicitToJson: true) +class SpeechRecognitionResult { + List _alternates; + + /// Returns a list of possible transcriptions of the speech. + /// + /// The first value is always the same as the [recognizedWords] + /// value. Use the confidence for each alternate transcription + /// to determine how likely it is. Note that not all platforms + /// do a good job with confidence, there are convenience methods + /// on [SpeechRecogntionWords] to work with possibly missing + /// confidence values. + List get alternates => + UnmodifiableListView(_alternates); + + /// The sequence of words that is the best transcription of + /// what was said. + /// + /// This is the same as the first value of [alternates]. + String get recognizedWords => + _alternates.isNotEmpty ? _alternates.first.recognizedWords : ""; + + /// False means the words are an interim result, true means + /// they are the final recognition. + final bool finalResult; + + /// The confidence that the [recognizedWords] are correct. + /// + /// Confidence is expressed as a value between 0 and 1. -1 + /// means that the confidence value was not available. + double get confidence => + _alternates.isNotEmpty ? _alternates.first.confidence : 0; + + /// true if there is confidence in this recognition, false otherwise. + /// + /// There are two separate ways for there to be confidence, the first + /// is if the confidence is missing, which is indicated by a value of + /// -1. The second is if the confidence is greater than or equal + /// [threshold]. If [threshold] is not provided it defaults to 0.8. + bool isConfident( + {double threshold = SpeechRecognitionWords.confidenceThreshold}) => + _alternates.isNotEmpty + ? _alternates.first.isConfident(threshold: threshold) + : false; + + /// true if [confidence] is not the [missingConfidence] value, false + /// otherwise. + bool get hasConfidenceRating => + _alternates.isNotEmpty ? _alternates.first.hasConfidenceRating : false; + + SpeechRecognitionResult(this._alternates, this.finalResult); + + @override + String toString() { + return "SpeechRecognitionResult words: $_alternates, final: $finalResult"; + } + + @override + bool operator ==(Object other) { + return identical(this, other) || + other is SpeechRecognitionResult && + recognizedWords == other.recognizedWords && + finalResult == other.finalResult; + } + + @override + int get hashCode => recognizedWords.hashCode; + + factory SpeechRecognitionResult.fromJson(Map json) => + _$SpeechRecognitionResultFromJson(json); + Map toJson() => _$SpeechRecognitionResultToJson(this); +} + +/// A set of words recognized in a [SpeechRecognitionResult]. +/// +/// Each result will have one or more [SpeechRecognitionWords] +/// with a varying degree of confidence about each set of words. +@JsonSerializable() +class SpeechRecognitionWords { + /// The sequence of words recognized + final String recognizedWords; + + /// The confidence that the [recognizedWords] are correct. + /// + /// Confidence is expressed as a value between 0 and 1. 0 + /// means that the confidence value was not available. Use + /// [isConfident] which will ignore 0 values automatically. + final double confidence; + + static const double confidenceThreshold = 0.8; + static const double missingConfidence = -1; + + const SpeechRecognitionWords(this.recognizedWords, this.confidence); + + /// true if there is confidence in this recognition, false otherwise. + /// + /// There are two separate ways for there to be confidence, the first + /// is if the confidence is missing, which is indicated by a value of + /// -1. The second is if the confidence is greater than or equal + /// [threshold]. If [threshold] is not provided it defaults to 0.8. + bool isConfident({double threshold = confidenceThreshold}) => + confidence == missingConfidence || confidence >= threshold; + + /// true if [confidence] is not the [missingConfidence] value, false + /// otherwise. + bool get hasConfidenceRating => confidence != missingConfidence; + + @override + String toString() { + return "SpeechRecognitionWords words: $recognizedWords, confidence: $confidence"; + } + + @override + bool operator ==(Object other) { + return identical(this, other) || + other is SpeechRecognitionWords && + recognizedWords == other.recognizedWords && + confidence == other.confidence; + } + + @override + int get hashCode => recognizedWords.hashCode; + + factory SpeechRecognitionWords.fromJson(Map json) => + _$SpeechRecognitionWordsFromJson(json); + Map toJson() => _$SpeechRecognitionWordsToJson(this); +} diff --git a/speech_to_text/lib/speech_recognition_result.g.dart b/speech_to_text/lib/speech_recognition_result.g.dart new file mode 100644 index 00000000..023e5485 --- /dev/null +++ b/speech_to_text/lib/speech_recognition_result.g.dart @@ -0,0 +1,41 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'speech_recognition_result.dart'; + +// ************************************************************************** +// JsonSerializableGenerator +// ************************************************************************** + +SpeechRecognitionResult _$SpeechRecognitionResultFromJson( + Map json) { + return SpeechRecognitionResult( + (json['alternates'] as List) + ?.map((e) => e == null + ? null + : SpeechRecognitionWords.fromJson(e as Map)) + ?.toList(), + json['finalResult'] as bool, + ); +} + +Map _$SpeechRecognitionResultToJson( + SpeechRecognitionResult instance) => + { + 'alternates': instance.alternates?.map((e) => e?.toJson())?.toList(), + 'finalResult': instance.finalResult, + }; + +SpeechRecognitionWords _$SpeechRecognitionWordsFromJson( + Map json) { + return SpeechRecognitionWords( + json['recognizedWords'] as String, + (json['confidence'] as num)?.toDouble(), + ); +} + +Map _$SpeechRecognitionWordsToJson( + SpeechRecognitionWords instance) => + { + 'recognizedWords': instance.recognizedWords, + 'confidence': instance.confidence, + }; diff --git a/speech_to_text/lib/speech_to_text.dart b/speech_to_text/lib/speech_to_text.dart new file mode 100644 index 00000000..343706e6 --- /dev/null +++ b/speech_to_text/lib/speech_to_text.dart @@ -0,0 +1,511 @@ +import 'dart:async'; +import 'dart:convert'; +import 'dart:math'; + +import 'package:clock/clock.dart'; +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; + +enum ListenMode { + deviceDefault, + dictation, + search, + confirmation, +} + +/// Notified as words are recognized with the current set of recognized words. +/// +/// See the [onResult] argument on the [listen] method for use. +typedef SpeechResultListener = void Function(SpeechRecognitionResult result); + +/// Notified if errors occur during recognition or intialization. +/// +/// Possible errors per the Android docs are described here: +/// https://developer.android.com/reference/android/speech/SpeechRecognizer +/// "error_audio_error" +/// "error_client" +/// "error_permission" +/// "error_network" +/// "error_network_timeout" +/// "error_no_match" +/// "error_busy" +/// "error_server" +/// "error_speech_timeout" +/// See the [onError] argument on the [initialize] method for use. +typedef SpeechErrorListener = void Function( + SpeechRecognitionError errorNotification); + +/// Notified when recognition status changes. +/// +/// See the [onStatus] argument on the [initialize] method for use. +typedef SpeechStatusListener = void Function(String status); + +/// Notified when the sound level changes during a listen method. +/// +/// [level] is a measure of the decibels of the current sound on +/// the recognition input. See the [onSoundLevelChange] argument on +/// the [listen] method for use. +typedef SpeechSoundLevelChange = Function(double level); + +/// An interface to device specific speech recognition services. +/// +/// The general flow of a speech recognition session is as follows: +/// ```Dart +/// SpeechToText speech = SpeechToText(); +/// bool isReady = await speech.initialize(); +/// if ( isReady ) { +/// await speech.listen( resultListener: resultListener ); +/// } +/// ... +/// // At some point later +/// speech.stop(); +/// ``` +class SpeechToText { + static const String listenMethod = 'listen'; + static const String textRecognitionMethod = 'textRecognition'; + static const String notifyErrorMethod = 'notifyError'; + static const String notifyStatusMethod = 'notifyStatus'; + static const String soundLevelChangeMethod = "soundLevelChange"; + static const String notListeningStatus = "notListening"; + static const String listeningStatus = "listening"; + + static const MethodChannel speechChannel = + const MethodChannel('plugin.csdcorp.com/speech_to_text'); + static final SpeechToText _instance = + SpeechToText.withMethodChannel(speechChannel); + bool _initWorked = false; + bool _recognized = false; + bool _listening = false; + bool _cancelOnError = false; + bool _partialResults = false; + int _listenStartedAt = 0; + int _lastSpeechEventAt = 0; + Duration _pauseFor; + Duration _listenFor; + + /// True if not listening or the user called cancel / stop, false + /// if cancel/stop were invoked by timeout or error condition. + bool _userEnded = false; + String _lastRecognized = ""; + String _lastStatus = ""; + double _lastSoundLevel = 0; + Timer _listenTimer; + LocaleName _systemLocale; + SpeechRecognitionError _lastError; + SpeechResultListener _resultListener; + SpeechErrorListener errorListener; + SpeechStatusListener statusListener; + SpeechSoundLevelChange _soundLevelChange; + + final MethodChannel channel; + factory SpeechToText() => _instance; + + @visibleForTesting + SpeechToText.withMethodChannel(this.channel); + + /// True if words have been recognized during the current [listen] call. + /// + /// Goes false as soon as [cancel] is called. + bool get hasRecognized => _recognized; + + /// The last set of recognized words received. + /// + /// This is maintained across [cancel] calls but cleared on the next + /// [listen]. + String get lastRecognizedWords => _lastRecognized; + + /// The last status update received, see [initialize] to register + /// an optional listener to be notified when this changes. + String get lastStatus => _lastStatus; + + /// The last sound level received during a listen event. + /// + /// The sound level is a measure of how loud the current + /// input is during listening. Use the [onSoundLevelChange] + /// argument in the [listen] method to get notified of + /// changes. + double get lastSoundLevel => _lastSoundLevel; + + /// True if [initialize] succeeded + bool get isAvailable => _initWorked; + + /// True if [listen] succeeded and [stop] or [cancel] has not been called. + /// + /// Also goes false when listening times out if listenFor was set. + bool get isListening => _listening; + bool get isNotListening => !isListening; + + /// The last error received or null if none, see [initialize] to + /// register an optional listener to be notified of errors. + SpeechRecognitionError get lastError => _lastError; + + /// True if an error has been received, see [lastError] for details + bool get hasError => null != lastError; + + /// Returns true if the user has already granted permission to access the + /// microphone, does not prompt the user. + /// + /// This method can be called before [initialize] to check if permission + /// has already been granted. If this returns false then the [initialize] + /// call will prompt the user for permission if it is allowed to do so. + /// Note that applications cannot ask for permission again if the user has + /// denied them permission in the past. + Future get hasPermission async { + bool hasPermission = await channel.invokeMethod('has_permission'); + return hasPermission; + } + + /// Initialize speech recognition services, returns true if + /// successful, false if failed. + /// + /// This method must be called before any other speech functions. + /// If this method returns false no further [SpeechToText] methods + /// should be used. Should only be called once if successful but does protect + /// itself if called repeatedly. False usually means that the user has denied + /// permission to use speech. The usual option in that case is to give them + /// instructions on how to open system settings and grant permission. + /// + /// [onError] is an optional listener for errors like + /// timeout, or failure of the device speech recognition. + /// [onStatus] is an optional listener for status changes from + /// listening to not listening. + /// [debugLogging] controls whether there is detailed logging from the underlying + /// plugins. It is off by default, usually only useful for troubleshooting issues + /// with a paritcular OS version or device, fairly verbose + Future initialize( + {SpeechErrorListener onError, + SpeechStatusListener onStatus, + debugLogging = false}) async { + if (_initWorked) { + return Future.value(_initWorked); + } + errorListener = onError; + statusListener = onStatus; + channel.setMethodCallHandler(_handleCallbacks); + _initWorked = await channel + .invokeMethod('initialize', {"debugLogging": debugLogging}); + return _initWorked; + } + + /// Stops the current listen for speech if active, does nothing if not. + /// + /// Stopping a listen session will cause a final result to be sent. Each + /// listen session should be ended with either [stop] or [cancel], for + /// example in the dispose method of a Widget. [cancel] is automatically + /// invoked by a permanent error if [cancelOnError] is set to true in the + /// [listen] call. + /// + /// *Note:* Cannot be used until a successful [initialize] call. Should + /// only be used after a successful [listen] call. + Future stop() async { + _userEnded = true; + return _stop(); + } + + Future _stop() async { + if (!_initWorked) { + return; + } + _shutdownListener(); + await channel.invokeMethod('stop'); + } + + /// Cancels the current listen for speech if active, does nothing if not. + /// + /// Canceling means that there will be no final result returned from the + /// recognizer. Each listen session should be ended with either [stop] or + /// [cancel], for example in the dispose method of a Widget. [cancel] is + /// automatically invoked by a permanent error if [cancelOnError] is set + /// to true in the [listen] call. + /// + /// *Note* Cannot be used until a successful [initialize] call. Should only + /// be used after a successful [listen] call. + Future cancel() async { + _userEnded = true; + return _cancel(); + } + + Future _cancel() async { + if (!_initWorked) { + return; + } + _shutdownListener(); + await channel.invokeMethod('cancel'); + } + + /// Starts a listening session for speech and converts it to text, + /// invoking the provided [onResult] method as words are recognized. + /// + /// Cannot be used until a successful [initialize] call. There is a + /// time limit on listening imposed by both Android and iOS. The time + /// depends on the device, network, etc. Android is usually quite short, + /// especially if there is no active speech event detected, on the order + /// of ten seconds or so. + /// + /// When listening is done always invoke either [cancel] or [stop] to + /// end the session, even if it times out. [cancelOnError] provides an + /// automatic way to ensure this happens. + /// + /// [onResult] is an optional listener that is notified when words + /// are recognized. + /// + /// [listenFor] sets the maximum duration that it will listen for, after + /// that it automatically stops the listen for you. + /// + /// [pauseFor] sets the maximum duration of a pause in speech with no words + /// detected, after that it automatically stops the listen for you. + /// + /// [localeId] is an optional locale that can be used to listen in a language + /// other than the current system default. See [locales] to find the list of + /// supported languages for listening. + /// + /// [onSoundLevelChange] is an optional listener that is notified when the + /// sound level of the input changes. Use this to update the UI in response to + /// more or less input. The values currently differ between Ancroid and iOS, + /// haven't yet been able to determine from the Android documentation what the + /// value means. On iOS the value returned is in decibels. + /// + /// [cancelOnError] if true then listening is automatically canceled on a + /// permanent error. This defaults to false. When false cancel should be + /// called from the error handler. + /// + /// [partialResults] if true the listen reports results as they are recognized, + /// when false only final results are reported. Defaults to true. + /// + /// [onDevice] if true the listen attempts to recognize locally with speech never + /// leaving the device. If it cannot do this the listen attempt will fail. This is + /// usually only needed for sensitive content where privacy or security is a concern. + Future listen( + {SpeechResultListener onResult, + Duration listenFor, + Duration pauseFor, + String localeId, + SpeechSoundLevelChange onSoundLevelChange, + cancelOnError = false, + partialResults = true, + onDevice = false, + ListenMode listenMode = ListenMode.confirmation}) async { + if (!_initWorked) { + throw SpeechToTextNotInitializedException(); + } + _userEnded = false; + _cancelOnError = cancelOnError; + _recognized = false; + _resultListener = onResult; + _soundLevelChange = onSoundLevelChange; + _partialResults = partialResults; + Map listenParams = { + "partialResults": partialResults || null != pauseFor, + "onDevice": onDevice, + "listenMode": listenMode.index, + }; + if (null != localeId) { + listenParams["localeId"] = localeId; + } + try { + bool started = await channel.invokeMethod(listenMethod, listenParams); + if (started) { + _listenStartedAt = clock.now().millisecondsSinceEpoch; + _setupListenAndPause(pauseFor, listenFor); + } + } on PlatformException catch (e) { + throw ListenFailedException(e.details); + } + } + + void _setupListenAndPause(Duration pauseFor, Duration listenFor) { + _pauseFor = null; + _listenFor = null; + if (null == pauseFor && null == listenFor) { + return; + } + var minDuration; + if (null == pauseFor) { + _listenFor = Duration(milliseconds: listenFor.inMilliseconds); + minDuration = listenFor; + } else if (null == listenFor) { + _pauseFor = Duration(milliseconds: pauseFor.inMilliseconds); + minDuration = pauseFor; + } else { + _listenFor = Duration(milliseconds: listenFor.inMilliseconds); + _pauseFor = Duration(milliseconds: pauseFor.inMilliseconds); + var minMillis = min(listenFor.inMilliseconds - _elapsedListenMillis, + pauseFor.inMilliseconds); + minDuration = Duration(milliseconds: minMillis); + } + _listenTimer = Timer(minDuration, _stopOnPauseOrListen); + } + + int get _elapsedListenMillis => + clock.now().millisecondsSinceEpoch - _listenStartedAt; + int get _elapsedSinceSpeechEvent => + clock.now().millisecondsSinceEpoch - _lastSpeechEventAt; + + void _stopOnPauseOrListen() { + if (null != _listenFor && + _elapsedListenMillis >= _listenFor.inMilliseconds) { + _stop(); + } else if (null != _pauseFor && + _elapsedSinceSpeechEvent >= _pauseFor.inMilliseconds) { + _stop(); + } else { + _setupListenAndPause(_pauseFor, _listenFor); + } + } + + /// returns the list of speech locales available on the device. + /// + /// This method is useful to find the identifier to use + /// for the [listen] method, it is the [localeId] member of the + /// [LocaleName]. + /// + /// Each [LocaleName] in the returned list has the + /// identifier for the locale as well as a name for + /// display. The name is localized for the system locale on + /// the device. + Future> locales() async { + if (!_initWorked) { + throw SpeechToTextNotInitializedException(); + } + final List locales = await channel.invokeMethod('locales'); + List filteredLocales = locales + .map((locale) { + var components = locale.split(":"); + if (components.length != 2) { + return null; + } + return LocaleName(components[0], components[1]); + }) + .where((item) => item != null) + .toList(); + if (filteredLocales.isNotEmpty) { + _systemLocale = filteredLocales.first; + } else { + _systemLocale = null; + } + filteredLocales.sort((ln1, ln2) => ln1.name.compareTo(ln2.name)); + return filteredLocales; + } + + /// returns the locale that will be used if no localeId is passed + /// to the [listen] method. + Future systemLocale() async { + if (null == _systemLocale) { + await locales(); + } + return Future.value(_systemLocale); + } + + Future _handleCallbacks(MethodCall call) async { + // print("SpeechToText call: ${call.method} ${call.arguments}"); + switch (call.method) { + case textRecognitionMethod: + if (call.arguments is String) { + _onTextRecognition(call.arguments); + } + break; + case notifyErrorMethod: + if (call.arguments is String) { + await _onNotifyError(call.arguments); + } + break; + case notifyStatusMethod: + if (call.arguments is String) { + _onNotifyStatus(call.arguments); + } + break; + case soundLevelChangeMethod: + if (call.arguments is double) { + _onSoundLevelChange(call.arguments); + } + break; + default: + } + } + + void _onTextRecognition(String resultJson) { + _lastSpeechEventAt = clock.now().millisecondsSinceEpoch; + Map resultMap = jsonDecode(resultJson); + SpeechRecognitionResult speechResult = + SpeechRecognitionResult.fromJson(resultMap); + if (!_partialResults && !speechResult.finalResult) { + return; + } + _recognized = true; + // print("Recognized text $resultJson"); + + _lastRecognized = speechResult.recognizedWords; + if (null != _resultListener) { + _resultListener(speechResult); + } + } + + Future _onNotifyError(String errorJson) async { + if (isNotListening && _userEnded) { + return; + } + Map errorMap = jsonDecode(errorJson); + SpeechRecognitionError speechError = + SpeechRecognitionError.fromJson(errorMap); + _lastError = speechError; + if (null != errorListener) { + errorListener(speechError); + } + if (_cancelOnError && speechError.permanent) { + await _cancel(); + } + } + + void _onNotifyStatus(String status) { + _lastStatus = status; + _listening = status == listeningStatus; + // print(status); + if (null != statusListener) { + statusListener(status); + } + } + + void _onSoundLevelChange(double level) { + if (isNotListening) { + return; + } + _lastSoundLevel = level; + if (null != _soundLevelChange) { + _soundLevelChange(level); + } + } + + _shutdownListener() { + _listening = false; + _recognized = false; + _listenTimer?.cancel(); + _listenTimer = null; + } + + @visibleForTesting + Future processMethodCall(MethodCall call) async { + return await _handleCallbacks(call); + } +} + +/// A single locale with a [name], localized to the current system locale, +/// and a [localeId] which can be used in the [listen] method to choose a +/// locale for speech recognition. +class LocaleName { + final String localeId; + final String name; + LocaleName(this.localeId, this.name); +} + +/// Thrown when a method is called that requires successful +/// initialization first. +class SpeechToTextNotInitializedException implements Exception {} + +/// Thrown when listen fails to properly start a speech listening session +/// on the device +class ListenFailedException implements Exception { + final String details; + ListenFailedException(this.details); +} diff --git a/speech_to_text/lib/speech_to_text_provider.dart b/speech_to_text/lib/speech_to_text_provider.dart new file mode 100644 index 00000000..91adf3b4 --- /dev/null +++ b/speech_to_text/lib/speech_to_text_provider.dart @@ -0,0 +1,201 @@ +import 'dart:async'; + +import 'package:flutter/material.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_event.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_to_text.dart'; + +/// Simplifies interaction with [SpeechToText] by handling all the callbacks and notifying +/// listeners as events happen. +/// +/// Here's an example of using the [SpeechToTextProvider] +/// ``` +/// var speechProvider = SpeechToTextProvider( SpeechToText()); +/// var available = await speechProvider.initialize(); +/// StreamSubscription _subscription; +/// _subscription = speechProvider.recognitionController.stream.listen((recognitionEvent) { +/// if (recognitionEvent.eventType == SpeechRecognitionEventType.finalRecognitionEvent ) { +/// print("I heard: ${recognitionEvent.recognitionResult.recognizedWords}"); +/// } +/// }); +/// speechProvider.addListener(() { +/// var words = speechProvider.lastWords; +/// }); +class SpeechToTextProvider extends ChangeNotifier { + final StreamController _recognitionController = + StreamController.broadcast(); + final SpeechToText _speechToText; + SpeechRecognitionResult _lastResult; + double _lastLevel = 0; + List _locales = []; + LocaleName _systemLocale; + + /// Only construct one instance in an application. + /// + /// Do not call `initialize` on the [SpeechToText] that is passed as a parameter, instead + /// call the [initialize] method on this class. + SpeechToTextProvider(this._speechToText); + + Stream get stream => _recognitionController.stream; + + /// Returns the last result received, may be null. + SpeechRecognitionResult get lastResult => _lastResult; + + /// Returns the last error received, may be null. + SpeechRecognitionError get lastError => _speechToText.lastError; + + /// Returns the last sound level received. + /// + /// Note this is only available when the `soundLevel` is set to true on + /// a call to [listen], will be 0 at all other times. + double get lastLevel => _lastLevel; + + /// Initializes the provider and the contained [SpeechToText] instance. + /// + /// Returns true if [SpeechToText] was initialized successful and can now + /// be used, false otherwse. + Future initialize() async { + if (isAvailable) { + return isAvailable; + } + bool availableBefore = _speechToText.isAvailable; + bool available = + await _speechToText.initialize(onStatus: _onStatus, onError: _onError); + if (available) { + _locales = []; + _locales.addAll(await _speechToText.locales()); + _systemLocale = await _speechToText.systemLocale(); + } + if (availableBefore != available) { + notifyListeners(); + } + return available; + } + + /// Returns true if the provider has been initialized and can be used to recognize speech. + bool get isAvailable => _speechToText.isAvailable; + + /// Returns true if the provider cannot be used to recognize speech, either because it has not + /// yet been initialized or because initialization failed. + bool get isNotAvailable => !_speechToText.isAvailable; + + /// Returns true if [SpeechToText] is listening for new speech. + bool get isListening => _speechToText.isListening; + + /// Returns true if [SpeechToText] is not listening for new speech. + bool get isNotListening => _speechToText.isNotListening; + + /// Returns true if [SpeechToText] has a previous error. + bool get hasError => _speechToText.hasError; + + /// Returns true if [lastResult] has a last result. + bool get hasResults => null != _lastResult; + + /// Returns the list of locales that are available on the device for speech recognition. + List get locales => _locales; + + /// Returns the locale that is currently set as active on the device. + LocaleName get systemLocale => _systemLocale; + + /// Start listening for new events, set [partialResults] to true to receive interim + /// recognition results. + /// + /// [soundLevel] set to true to be notified on changes to the input sound level + /// on the microphone. + /// + /// [listenFor] sets the maximum duration that it will listen for, after + /// that it automatically stops the listen for you. + /// + /// [pauseFor] sets the maximum duration of a pause in speech with no words + /// detected, after that it automatically stops the listen for you. + /// + /// Call this only after a successful [initialize] call + void listen( + {bool partialResults = false, + bool soundLevel = false, + Duration listenFor, + Duration pauseFor}) { + _lastLevel = 0; + _lastResult = null; + if (soundLevel) { + _speechToText.listen( + partialResults: partialResults, + listenFor: listenFor, + pauseFor: pauseFor, + cancelOnError: true, + onResult: _onListenResult, + // onSoundLevelChange: _onSoundLevelChange); + ); + } else { + _speechToText.listen( + partialResults: partialResults, + listenFor: listenFor, + pauseFor: pauseFor, + cancelOnError: true, + onResult: _onListenResult); + } + } + + /// Stops a current active listening session. + /// + /// Call this after calling [listen] to stop the recognizer from listening further + /// and return the current result as final. + void stop() { + _speechToText.stop(); + notifyListeners(); + } + + /// Cancel a current active listening session. + /// + /// Call this after calling [listen] to stop the recognizer from listening further + /// and ignore any results recognized so far. + void cancel() { + _speechToText.cancel(); + notifyListeners(); + } + + void _onError(SpeechRecognitionError errorNotification) { + _recognitionController.add(SpeechRecognitionEvent( + SpeechRecognitionEventType.errorEvent, + null, + errorNotification, + isListening, + null)); + notifyListeners(); + } + + void _onStatus(String status) { + _recognitionController.add(SpeechRecognitionEvent( + SpeechRecognitionEventType.statusChangeEvent, + null, + null, + isListening, + null)); + notifyListeners(); + } + + void _onListenResult(SpeechRecognitionResult result) { + _lastResult = result; + _recognitionController.add(SpeechRecognitionEvent( + result.finalResult + ? SpeechRecognitionEventType.finalRecognitionEvent + : SpeechRecognitionEventType.partialRecognitionEvent, + result, + null, + isListening, + null)); + notifyListeners(); + } + + // void _onSoundLevelChange(double level) { + // _lastLevel = level; + // _recognitionController.add(SpeechRecognitionEvent( + // SpeechRecognitionEventType.soundLevelChangeEvent, + // null, + // null, + // null, + // level)); + // notifyListeners(); + // } +} diff --git a/speech_to_text/pubspec.lock b/speech_to_text/pubspec.lock new file mode 100644 index 00000000..7877604f --- /dev/null +++ b/speech_to_text/pubspec.lock @@ -0,0 +1,483 @@ +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + _fe_analyzer_shared: + dependency: transitive + description: + name: _fe_analyzer_shared + url: "https://pub.dartlang.org" + source: hosted + version: "5.0.0" + analyzer: + dependency: transitive + description: + name: analyzer + url: "https://pub.dartlang.org" + source: hosted + version: "0.39.13" + archive: + dependency: transitive + description: + name: archive + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.13" + args: + dependency: transitive + description: + name: args + url: "https://pub.dartlang.org" + source: hosted + version: "1.6.0" + async: + dependency: transitive + description: + name: async + url: "https://pub.dartlang.org" + source: hosted + version: "2.4.1" + boolean_selector: + dependency: transitive + description: + name: boolean_selector + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.0" + build: + dependency: transitive + description: + name: build + url: "https://pub.dartlang.org" + source: hosted + version: "1.3.0" + build_config: + dependency: transitive + description: + name: build_config + url: "https://pub.dartlang.org" + source: hosted + version: "0.4.2" + build_daemon: + dependency: transitive + description: + name: build_daemon + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.4" + build_resolvers: + dependency: transitive + description: + name: build_resolvers + url: "https://pub.dartlang.org" + source: hosted + version: "1.3.10" + build_runner: + dependency: "direct dev" + description: + name: build_runner + url: "https://pub.dartlang.org" + source: hosted + version: "1.10.0" + build_runner_core: + dependency: transitive + description: + name: build_runner_core + url: "https://pub.dartlang.org" + source: hosted + version: "5.2.0" + built_collection: + dependency: transitive + description: + name: built_collection + url: "https://pub.dartlang.org" + source: hosted + version: "4.3.2" + built_value: + dependency: transitive + description: + name: built_value + url: "https://pub.dartlang.org" + source: hosted + version: "7.1.0" + charcode: + dependency: transitive + description: + name: charcode + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.3" + checked_yaml: + dependency: transitive + description: + name: checked_yaml + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.2" + clock: + dependency: "direct main" + description: + name: clock + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.1" + code_builder: + dependency: transitive + description: + name: code_builder + url: "https://pub.dartlang.org" + source: hosted + version: "3.4.0" + collection: + dependency: transitive + description: + name: collection + url: "https://pub.dartlang.org" + source: hosted + version: "1.14.12" + convert: + dependency: transitive + description: + name: convert + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.1" + crypto: + dependency: transitive + description: + name: crypto + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.4" + csslib: + dependency: transitive + description: + name: csslib + url: "https://pub.dartlang.org" + source: hosted + version: "0.16.1" + dart_style: + dependency: transitive + description: + name: dart_style + url: "https://pub.dartlang.org" + source: hosted + version: "1.3.6" + fake_async: + dependency: "direct dev" + description: + name: fake_async + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.0" + fixnum: + dependency: transitive + description: + name: fixnum + url: "https://pub.dartlang.org" + source: hosted + version: "0.10.11" + flutter: + dependency: "direct main" + description: flutter + source: sdk + version: "0.0.0" + flutter_test: + dependency: "direct dev" + description: flutter + source: sdk + version: "0.0.0" + glob: + dependency: transitive + description: + name: glob + url: "https://pub.dartlang.org" + source: hosted + version: "1.2.0" + graphs: + dependency: transitive + description: + name: graphs + url: "https://pub.dartlang.org" + source: hosted + version: "0.2.0" + html: + dependency: transitive + description: + name: html + url: "https://pub.dartlang.org" + source: hosted + version: "0.14.0+3" + http_multi_server: + dependency: transitive + description: + name: http_multi_server + url: "https://pub.dartlang.org" + source: hosted + version: "2.2.0" + http_parser: + dependency: transitive + description: + name: http_parser + url: "https://pub.dartlang.org" + source: hosted + version: "3.1.4" + image: + dependency: transitive + description: + name: image + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.12" + io: + dependency: transitive + description: + name: io + url: "https://pub.dartlang.org" + source: hosted + version: "0.3.4" + js: + dependency: transitive + description: + name: js + url: "https://pub.dartlang.org" + source: hosted + version: "0.6.2" + json_annotation: + dependency: "direct main" + description: + name: json_annotation + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.1" + json_serializable: + dependency: "direct dev" + description: + name: json_serializable + url: "https://pub.dartlang.org" + source: hosted + version: "3.3.0" + logging: + dependency: transitive + description: + name: logging + url: "https://pub.dartlang.org" + source: hosted + version: "0.11.4" + matcher: + dependency: transitive + description: + name: matcher + url: "https://pub.dartlang.org" + source: hosted + version: "0.12.6" + meta: + dependency: transitive + description: + name: meta + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.8" + mime: + dependency: transitive + description: + name: mime + url: "https://pub.dartlang.org" + source: hosted + version: "0.9.6+3" + node_interop: + dependency: transitive + description: + name: node_interop + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.1" + node_io: + dependency: transitive + description: + name: node_io + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.1" + package_config: + dependency: transitive + description: + name: package_config + url: "https://pub.dartlang.org" + source: hosted + version: "1.9.3" + path: + dependency: transitive + description: + name: path + url: "https://pub.dartlang.org" + source: hosted + version: "1.6.4" + pedantic: + dependency: transitive + description: + name: pedantic + url: "https://pub.dartlang.org" + source: hosted + version: "1.9.0" + petitparser: + dependency: transitive + description: + name: petitparser + url: "https://pub.dartlang.org" + source: hosted + version: "2.4.0" + pool: + dependency: transitive + description: + name: pool + url: "https://pub.dartlang.org" + source: hosted + version: "1.4.0" + pub_semver: + dependency: transitive + description: + name: pub_semver + url: "https://pub.dartlang.org" + source: hosted + version: "1.4.4" + pubspec_parse: + dependency: transitive + description: + name: pubspec_parse + url: "https://pub.dartlang.org" + source: hosted + version: "0.1.5" + quiver: + dependency: transitive + description: + name: quiver + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.3" + shelf: + dependency: transitive + description: + name: shelf + url: "https://pub.dartlang.org" + source: hosted + version: "0.7.7" + shelf_web_socket: + dependency: transitive + description: + name: shelf_web_socket + url: "https://pub.dartlang.org" + source: hosted + version: "0.2.3" + sky_engine: + dependency: transitive + description: flutter + source: sdk + version: "0.0.99" + source_gen: + dependency: transitive + description: + name: source_gen + url: "https://pub.dartlang.org" + source: hosted + version: "0.9.6" + source_span: + dependency: transitive + description: + name: source_span + url: "https://pub.dartlang.org" + source: hosted + version: "1.7.0" + stack_trace: + dependency: transitive + description: + name: stack_trace + url: "https://pub.dartlang.org" + source: hosted + version: "1.9.3" + stream_channel: + dependency: transitive + description: + name: stream_channel + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.0" + stream_transform: + dependency: transitive + description: + name: stream_transform + url: "https://pub.dartlang.org" + source: hosted + version: "1.2.0" + string_scanner: + dependency: transitive + description: + name: string_scanner + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.5" + term_glyph: + dependency: transitive + description: + name: term_glyph + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.0" + test_api: + dependency: transitive + description: + name: test_api + url: "https://pub.dartlang.org" + source: hosted + version: "0.2.15" + timing: + dependency: transitive + description: + name: timing + url: "https://pub.dartlang.org" + source: hosted + version: "0.1.1+2" + typed_data: + dependency: transitive + description: + name: typed_data + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.6" + vector_math: + dependency: transitive + description: + name: vector_math + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.8" + watcher: + dependency: transitive + description: + name: watcher + url: "https://pub.dartlang.org" + source: hosted + version: "0.9.7+15" + web_socket_channel: + dependency: transitive + description: + name: web_socket_channel + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.0" + xml: + dependency: transitive + description: + name: xml + url: "https://pub.dartlang.org" + source: hosted + version: "3.6.1" + yaml: + dependency: transitive + description: + name: yaml + url: "https://pub.dartlang.org" + source: hosted + version: "2.2.1" +sdks: + dart: ">=2.7.0 <3.0.0" + flutter: ">=1.10.0" diff --git a/speech_to_text/pubspec.yaml b/speech_to_text/pubspec.yaml new file mode 100644 index 00000000..34b3da29 --- /dev/null +++ b/speech_to_text/pubspec.yaml @@ -0,0 +1,31 @@ +name: speech_to_text +description: A Flutter plugin that exposes device specific speech to text recognition capability. + + + +environment: + sdk: ">=2.1.0 <3.0.0" + flutter: ">=1.10.0" + +dependencies: + flutter: + sdk: flutter + json_annotation: ^3.0.0 + clock: ^1.0.1 + +dev_dependencies: + flutter_test: + sdk: flutter + build_runner: ^1.0.0 + json_serializable: ^3.0.0 + fake_async: ^1.0.1 + +flutter: + plugin: + platforms: + android: + package: com.csdcorp.speech_to_text + pluginClass: SpeechToTextPlugin + ios: + pluginClass: SpeechToTextPlugin + diff --git a/speech_to_text/test/speech_recognition_error_test.dart b/speech_to_text/test/speech_recognition_error_test.dart new file mode 100644 index 00000000..202ae4cd --- /dev/null +++ b/speech_to_text/test/speech_recognition_error_test.dart @@ -0,0 +1,65 @@ +import 'dart:convert'; + +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; + +void main() { + const String msg1 = "msg1"; + + setUp(() {}); + + group('properties', () { + test('equals true for same object', () { + SpeechRecognitionError error = SpeechRecognitionError(msg1, false); + expect(error, error); + }); + test('equals true for different object same values', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + SpeechRecognitionError error2 = SpeechRecognitionError(msg1, false); + expect(error1, error2); + }); + test('equals false for different object', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + SpeechRecognitionError error2 = SpeechRecognitionError("msg2", false); + expect(error1, isNot(error2)); + }); + test('hash same for same object', () { + SpeechRecognitionError error = SpeechRecognitionError(msg1, false); + expect(error.hashCode, error.hashCode); + }); + test('hash same for different object same values', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + SpeechRecognitionError error2 = SpeechRecognitionError(msg1, false); + expect(error1.hashCode, error2.hashCode); + }); + test('hash different for different object', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + SpeechRecognitionError error2 = SpeechRecognitionError("msg2", false); + expect(error1.hashCode, isNot(error2.hashCode)); + }); + test('toString as expected', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + expect(error1.toString(), + "SpeechRecognitionError msg: $msg1, permanent: false"); + }); + }); + group('json', () { + test('loads properly', () { + var json = jsonDecode('{"errorMsg":"$msg1","permanent":true}'); + SpeechRecognitionError error = SpeechRecognitionError.fromJson(json); + expect(error.errorMsg, msg1); + expect(error.permanent, isTrue); + json = jsonDecode('{"errorMsg":"$msg1","permanent":false}'); + error = SpeechRecognitionError.fromJson(json); + expect(error.permanent, isFalse); + }); + test('roundtrips properly', () { + var json = jsonDecode('{"errorMsg":"$msg1","permanent":true}'); + SpeechRecognitionError error = SpeechRecognitionError.fromJson(json); + var roundtripJson = error.toJson(); + SpeechRecognitionError roundtripError = + SpeechRecognitionError.fromJson(roundtripJson); + expect(error, roundtripError); + }); + }); +} diff --git a/speech_to_text/test/speech_recognition_event_test.dart b/speech_to_text/test/speech_recognition_event_test.dart new file mode 100644 index 00000000..ceaaab8a --- /dev/null +++ b/speech_to_text/test/speech_recognition_event_test.dart @@ -0,0 +1,42 @@ +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_event.dart'; + +import 'test_speech_channel_handler.dart'; + +void main() { + group('properties', () { + test('status listening matches', () { + var event = SpeechRecognitionEvent( + SpeechRecognitionEventType.statusChangeEvent, null, null, true, null); + expect(event.eventType, SpeechRecognitionEventType.statusChangeEvent); + expect(event.isListening, isTrue); + }); + test('result matches', () { + var event = SpeechRecognitionEvent( + SpeechRecognitionEventType.finalRecognitionEvent, + TestSpeechChannelHandler.firstRecognizedResult, + null, + null, + null); + expect(event.eventType, SpeechRecognitionEventType.finalRecognitionEvent); + expect(event.recognitionResult, + TestSpeechChannelHandler.firstRecognizedResult); + }); + test('error matches', () { + var event = SpeechRecognitionEvent(SpeechRecognitionEventType.errorEvent, + null, TestSpeechChannelHandler.firstError, null, null); + expect(event.eventType, SpeechRecognitionEventType.errorEvent); + expect(event.error, TestSpeechChannelHandler.firstError); + }); + test('sound level matches', () { + var event = SpeechRecognitionEvent( + SpeechRecognitionEventType.soundLevelChangeEvent, + null, + null, + null, + TestSpeechChannelHandler.level1); + expect(event.eventType, SpeechRecognitionEventType.soundLevelChangeEvent); + expect(event.level, TestSpeechChannelHandler.level1); + }); + }); +} diff --git a/speech_to_text/test/speech_recognition_result_test.dart b/speech_to_text/test/speech_recognition_result_test.dart new file mode 100644 index 00000000..1516779a --- /dev/null +++ b/speech_to_text/test/speech_recognition_result_test.dart @@ -0,0 +1,134 @@ +import 'dart:convert'; + +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; + +void main() { + final String firstRecognizedWords = 'hello'; + final String secondRecognizedWords = 'hello there'; + final double firstConfidence = 0.85; + final double secondConfidence = 0.62; + final String firstRecognizedJson = + '{"alternates":[{"recognizedWords":"$firstRecognizedWords","confidence":$firstConfidence}],"finalResult":false}'; + final String secondRecognizedJson = + '{"alternates":[{"recognizedWords":"$secondRecognizedWords","confidence":$secondConfidence}],"finalResult":false}'; + final SpeechRecognitionWords firstWords = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + final SpeechRecognitionWords secondWords = + SpeechRecognitionWords(secondRecognizedWords, secondConfidence); + + setUp(() {}); + + group('recognizedWords', () { + test('empty if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.recognizedWords, isEmpty); + }); + test('matches first alternate', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.recognizedWords, firstRecognizedWords); + }); + }); + group('alternates', () { + test('empty if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.alternates, isEmpty); + }); + test('expected contents', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.alternates, contains(firstWords)); + expect(result.alternates, contains(secondWords)); + }); + test('in order', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.alternates.first, firstWords); + }); + }); + group('confidence', () { + test('0 if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.confidence, 0); + }); + test('isConfident false if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.isConfident(), isFalse); + }); + test('isConfident matches first alternate', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.isConfident(), firstWords.isConfident()); + }); + test('hasConfidenceRating false if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.hasConfidenceRating, isFalse); + }); + test('hasConfidenceRating matches first alternate', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.hasConfidenceRating, firstWords.hasConfidenceRating); + }); + }); + group('json', () { + test('loads correctly', () { + var json = jsonDecode(firstRecognizedJson); + SpeechRecognitionResult result = SpeechRecognitionResult.fromJson(json); + expect(result.recognizedWords, firstRecognizedWords); + expect(result.confidence, firstConfidence); + }); + test('roundtrips correctly', () { + var json = jsonDecode(firstRecognizedJson); + SpeechRecognitionResult result = SpeechRecognitionResult.fromJson(json); + var roundTripJson = result.toJson(); + SpeechRecognitionResult roundtripResult = + SpeechRecognitionResult.fromJson(roundTripJson); + expect(result, roundtripResult); + }); + }); + group('overrides', () { + test('toString works with no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect( + result.toString(), "SpeechRecognitionResult words: [], final: true"); + }); + test('toString works with alternates', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords], true); + expect(result.toString(), + "SpeechRecognitionResult words: [SpeechRecognitionWords words: hello, confidence: 0.85], final: true"); + }); + test('hash same for same object', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords], true); + expect(result.hashCode, result.hashCode); + }); + test('hash differs for different objects', () { + SpeechRecognitionResult result1 = + SpeechRecognitionResult([firstWords], true); + SpeechRecognitionResult result2 = + SpeechRecognitionResult([secondWords], true); + expect(result1.hashCode, isNot(result2.hashCode)); + }); + test('equals same for same object', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords], true); + expect(result, result); + }); + test('equals same for different object same values', () { + SpeechRecognitionResult result1 = + SpeechRecognitionResult([firstWords], true); + SpeechRecognitionResult result1a = + SpeechRecognitionResult([firstWords], true); + expect(result1, result1a); + }); + test('equals differs for different objects', () { + SpeechRecognitionResult result1 = + SpeechRecognitionResult([firstWords], true); + SpeechRecognitionResult result2 = + SpeechRecognitionResult([secondWords], true); + expect(result1, isNot(result2)); + }); + }); +} diff --git a/speech_to_text/test/speech_recognitions_words_test.dart b/speech_to_text/test/speech_recognitions_words_test.dart new file mode 100644 index 00000000..36a9ef0e --- /dev/null +++ b/speech_to_text/test/speech_recognitions_words_test.dart @@ -0,0 +1,86 @@ +import 'dart:convert'; + +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; + +void main() { + final String firstRecognizedWords = 'hello'; + final String secondRecognizedWords = 'hello there'; + final double firstConfidence = 0.85; + final double secondConfidence = 0.62; + final String firstRecognizedJson = + '{"recognizedWords":"$firstRecognizedWords","confidence":$firstConfidence}'; + final SpeechRecognitionWords firstWords = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + final SpeechRecognitionWords secondWords = + SpeechRecognitionWords(secondRecognizedWords, secondConfidence); + + setUp(() {}); + + group('properties', () { + test('words', () { + expect(firstWords.recognizedWords, firstRecognizedWords); + expect(secondWords.recognizedWords, secondRecognizedWords); + }); + test('confidence', () { + expect(firstWords.confidence, firstConfidence); + expect(secondWords.confidence, secondConfidence); + expect(firstWords.hasConfidenceRating, isTrue); + }); + test('equals true for same object', () { + expect(firstWords, firstWords); + }); + test('equals true for different object with same values', () { + SpeechRecognitionWords firstWordsA = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + expect(firstWords, firstWordsA); + }); + test('equals false for different results', () { + expect(firstWords, isNot(secondWords)); + }); + test('hash same for same object', () { + expect(firstWords.hashCode, firstWords.hashCode); + }); + test('hash same for different object with same values', () { + SpeechRecognitionWords firstWordsA = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + expect(firstWords.hashCode, firstWordsA.hashCode); + }); + test('hash different for different results', () { + expect(firstWords.hashCode, isNot(secondWords.hashCode)); + }); + }); + group('isConfident', () { + test('true when >= 0.8', () { + expect(firstWords.isConfident(), isTrue); + }); + test('false when < 0.8', () { + expect(secondWords.isConfident(), isFalse); + }); + test('respects threshold', () { + expect(secondWords.isConfident(threshold: 0.5), isTrue); + }); + test('true when missing', () { + SpeechRecognitionWords words = SpeechRecognitionWords( + firstRecognizedWords, SpeechRecognitionWords.missingConfidence); + expect(words.isConfident(), isTrue); + expect(words.hasConfidenceRating, isFalse); + }); + }); + group('json', () { + test('loads correctly', () { + var json = jsonDecode(firstRecognizedJson); + SpeechRecognitionWords words = SpeechRecognitionWords.fromJson(json); + expect(words.recognizedWords, firstRecognizedWords); + expect(words.confidence, firstConfidence); + }); + test('roundtrips correctly', () { + var json = jsonDecode(firstRecognizedJson); + SpeechRecognitionWords words = SpeechRecognitionWords.fromJson(json); + var roundTripJson = words.toJson(); + SpeechRecognitionWords roundtripWords = + SpeechRecognitionWords.fromJson(roundTripJson); + expect(words, roundtripWords); + }); + }); +} diff --git a/speech_to_text/test/speech_to_text_provider_test.dart b/speech_to_text/test/speech_to_text_provider_test.dart new file mode 100644 index 00000000..25366b6f --- /dev/null +++ b/speech_to_text/test/speech_to_text_provider_test.dart @@ -0,0 +1,196 @@ +import 'package:fake_async/fake_async.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_to_text.dart'; +import 'package:speech_to_text/speech_to_text_provider.dart'; + +import 'test_speech_channel_handler.dart'; +import 'test_speech_listener.dart'; + +void main() { + SpeechToTextProvider provider; + SpeechToText speechToText; + TestSpeechChannelHandler speechHandler; + TestSpeechListener speechListener; + + TestWidgetsFlutterBinding.ensureInitialized(); + + setUp(() { + speechToText = SpeechToText.withMethodChannel(SpeechToText.speechChannel); + speechHandler = TestSpeechChannelHandler(speechToText); + speechToText.channel + .setMockMethodCallHandler(speechHandler.methodCallHandler); + provider = SpeechToTextProvider(speechToText); + speechListener = TestSpeechListener(provider); + provider.addListener(speechListener.onNotify); + }); + + tearDown(() { + speechToText.channel.setMockMethodCallHandler(null); + }); + + group('delegates', () { + test('isListening matches delegate defaults', () { + expect(provider.isListening, speechToText.isListening); + expect(provider.isNotListening, speechToText.isNotListening); + }); + test('isAvailable matches delegate defaults', () { + expect(provider.isAvailable, speechToText.isAvailable); + expect(provider.isNotAvailable, !speechToText.isAvailable); + }); + test('isAvailable matches delegate after init', () async { + expect(await provider.initialize(), isTrue); + expect(provider.isAvailable, speechToText.isAvailable); + expect(provider.isNotAvailable, !speechToText.isAvailable); + }); + test('hasError matches delegate after error', () async { + expect(await provider.initialize(), isTrue); + expect(provider.hasError, speechToText.hasError); + }); + }); + group('listening', () { + test('notifies on initialize', () async { + fakeAsync((fa) { + provider.initialize(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(speechListener.isAvailable, isTrue); + }); + }); + test('notifies on listening', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener); + expect(speechListener.notified, isTrue); + expect(speechListener.isListening, isTrue); + expect(provider.hasResults, isFalse); + }); + }); + test('notifies on final words', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener); + speechListener.reset(); + speechHandler.notifyFinalWords(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(provider.hasResults, isTrue); + var result = speechListener.recognitionResult; + expect(result.recognizedWords, + TestSpeechChannelHandler.secondRecognizedWords); + expect(result.finalResult, isTrue); + }); + }); + test('hasResult false after listening before new results', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener); + speechHandler.notifyFinalWords(); + provider.stop(); + setupForListen(provider, fa, speechListener); + fa.flushMicrotasks(); + expect(provider.hasResults, isFalse); + }); + }); + test('notifies on partial words', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener, partialResults: true); + speechListener.reset(); + speechHandler.notifyPartialWords(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(provider.hasResults, isTrue); + var result = speechListener.recognitionResult; + expect(result.recognizedWords, + TestSpeechChannelHandler.firstRecognizedWords); + expect(result.finalResult, isFalse); + }); + }); + }); + group('soundLevel', () { + test('notifies when requested', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener, + partialResults: true, soundLevel: true); + speechListener.reset(); + speechHandler.notifySoundLevel(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(speechListener.soundLevel, TestSpeechChannelHandler.level2); + }); + }); + test('no notification by default', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener, + partialResults: true, soundLevel: false); + speechListener.reset(); + speechHandler.notifySoundLevel(); + fa.flushMicrotasks(); + expect(speechListener.notified, isFalse); + expect(speechListener.soundLevel, 0); + }); + }); + }); + group('stop/cancel', () { + test('notifies on stop', () async { + fakeAsync((fa) { + provider.initialize(); + setupForListen(provider, fa, speechListener); + speechListener.reset(); + provider.stop(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(speechListener.isListening, isFalse); + }); + }); + test('notifies on cancel', () async { + fakeAsync((fa) { + provider.initialize(); + setupForListen(provider, fa, speechListener); + speechListener.reset(); + provider.cancel(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(speechListener.isListening, isFalse); + }); + }); + }); + group('error handling', () { + test('hasError matches delegate default', () async { + expect(await provider.initialize(), isTrue); + expect(provider.hasError, speechToText.hasError); + }); + test('notifies on error', () async { + fakeAsync((fa) { + provider.initialize(); + setupForListen(provider, fa, speechListener); + speechListener.reset(); + speechHandler.notifyPermanentError(); + expect(speechListener.notified, isTrue); + expect(speechListener.hasError, isTrue); + }); + }); + }); + group('locale', () { + test('locales empty before init', () async { + expect(provider.systemLocale, isNull); + expect(provider.locales, isEmpty); + }); + test('set from SpeechToText after init', () async { + fakeAsync((fa) { + speechHandler.setupLocales(); + provider.initialize(); + fa.flushMicrotasks(); + expect( + provider.systemLocale.localeId, TestSpeechChannelHandler.localeId1); + expect(provider.locales, hasLength(speechHandler.locales.length)); + }); + }); + }); +} + +void setupForListen(SpeechToTextProvider provider, FakeAsync fa, + TestSpeechListener speechListener, + {bool partialResults = false, bool soundLevel = false}) { + provider.initialize(); + fa.flushMicrotasks(); + speechListener.reset(); + provider.listen(partialResults: partialResults, soundLevel: soundLevel); + fa.flushMicrotasks(); +} diff --git a/speech_to_text/test/speech_to_text_test.dart b/speech_to_text/test/speech_to_text_test.dart new file mode 100644 index 00000000..7b4701ff --- /dev/null +++ b/speech_to_text/test/speech_to_text_test.dart @@ -0,0 +1,425 @@ +import 'package:fake_async/fake_async.dart'; +import 'package:flutter/services.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_to_text.dart'; + +import 'test_speech_channel_handler.dart'; + +void main() { + TestWidgetsFlutterBinding.ensureInitialized(); + + TestSpeechListener listener; + TestSpeechChannelHandler speechHandler; + SpeechToText speech; + + setUp(() { + listener = TestSpeechListener(); + speech = SpeechToText.withMethodChannel(SpeechToText.speechChannel); + speechHandler = TestSpeechChannelHandler(speech); + speech.channel.setMockMethodCallHandler(speechHandler.methodCallHandler); + }); + + tearDown(() { + speech.channel.setMockMethodCallHandler(null); + }); + + group('hasPermission', () { + test('true if platform reports true', () async { + expect(await speech.hasPermission, true); + }); + test('false if platform reports false', () async { + speechHandler.hasPermissionResult = false; + expect(await speech.hasPermission, false); + }); + }); + group('init', () { + test('succeeds on platform success', () async { + expect(await speech.initialize(), true); + expect(speechHandler.initInvoked, true); + expect(speech.isAvailable, true); + }); + test('only invokes once', () async { + expect(await speech.initialize(), true); + speechHandler.initInvoked = false; + expect(await speech.initialize(), true); + expect(speechHandler.initInvoked, false); + }); + test('fails on platform failure', () async { + speechHandler.initResult = false; + expect(await speech.initialize(), false); + expect(speech.isAvailable, false); + }); + }); + + group('listen', () { + test('fails with exception if not initialized', () async { + try { + await speech.listen(); + fail("Expected an exception."); + } on SpeechToTextNotInitializedException { + // This is a good result + } + }); + test('fails with exception if init fails', () async { + try { + speechHandler.initResult = false; + await speech.initialize(); + await speech.listen(); + fail("Expected an exception."); + } on SpeechToTextNotInitializedException { + // This is a good result + } + }); + test('invokes listen after successful init', () async { + await speech.initialize(); + await speech.listen(); + expect(speechHandler.listenLocale, isNull); + expect(speechHandler.listenInvoked, true); + }); + test('converts platformException to listenFailed', () async { + await speech.initialize(); + speechHandler.listenException = true; + try { + await speech.listen(); + fail("Should have thrown"); + } on ListenFailedException catch (e) { + expect(e.details, TestSpeechChannelHandler.listenExceptionDetails); + } catch (wrongE) { + fail("Should have been ListenFailedException"); + } + }); + test('stops listen after listenFor duration', () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen(listenFor: Duration(seconds: 2)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 2)); + expect(speech.isListening, isFalse); + }); + }); + test('stops listen after listenFor duration even with speech event', + () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen(listenFor: Duration(seconds: 1)); + speech.processMethodCall(MethodCall(SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.firstRecognizedJson)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 1)); + expect(speech.isListening, isFalse); + }); + }); + test('stops listen after pauseFor duration with no speech', () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen(pauseFor: Duration(seconds: 2)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 2)); + expect(speech.isListening, isFalse); + }); + }); + test('stops listen after pauseFor with longer listenFor duration', + () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen( + pauseFor: Duration(seconds: 1), listenFor: Duration(seconds: 5)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 1)); + expect(speech.isListening, isFalse); + }); + }); + test('stops listen after listenFor with longer pauseFor duration', + () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen( + listenFor: Duration(seconds: 1), pauseFor: Duration(seconds: 5)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 1)); + expect(speech.isListening, isFalse); + }); + }); + test('keeps listening after pauseFor with speech event', () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen(pauseFor: Duration(seconds: 2)); + fa.flushMicrotasks(); + fa.elapse(Duration(seconds: 1)); + speech.processMethodCall(MethodCall(SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.firstRecognizedJson)); + fa.flushMicrotasks(); + fa.elapse(Duration(seconds: 1)); + expect(speech.isListening, isTrue); + }); + }); + test('uses localeId if provided', () async { + await speech.initialize(); + await speech.listen(localeId: TestSpeechChannelHandler.localeId1); + expect(speechHandler.listenInvoked, true); + expect(speechHandler.listenLocale, TestSpeechChannelHandler.localeId1); + }); + test('calls speech listener', () async { + await speech.initialize(); + await speech.listen(onResult: listener.onSpeechResult); + await speech.processMethodCall(MethodCall( + SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.firstRecognizedJson)); + expect(listener.speechResults, 1); + expect( + listener.results, [TestSpeechChannelHandler.firstRecognizedResult]); + expect(speech.lastRecognizedWords, + TestSpeechChannelHandler.firstRecognizedWords); + }); + test('calls speech listener with multiple', () async { + await speech.initialize(); + await speech.listen(onResult: listener.onSpeechResult); + await speech.processMethodCall(MethodCall( + SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.firstRecognizedJson)); + await speech.processMethodCall(MethodCall( + SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.secondRecognizedJson)); + expect(listener.speechResults, 2); + expect(listener.results, [ + TestSpeechChannelHandler.firstRecognizedResult, + TestSpeechChannelHandler.secondRecognizedResult + ]); + expect(speech.lastRecognizedWords, + TestSpeechChannelHandler.secondRecognizedWords); + }); + }); + + group('status callback', () { + test('invoked on listen', () async { + await speech.initialize( + onError: listener.onSpeechError, onStatus: listener.onSpeechStatus); + await speech.processMethodCall(MethodCall( + SpeechToText.notifyStatusMethod, SpeechToText.listeningStatus)); + expect(listener.speechStatus, 1); + expect(listener.statuses.contains(SpeechToText.listeningStatus), true); + }); + }); + + group('soundLevel callback', () { + test('invoked on listen', () async { + await speech.initialize(); + await speech.listen(onSoundLevelChange: listener.onSoundLevel); + await speech.processMethodCall(MethodCall( + SpeechToText.soundLevelChangeMethod, + TestSpeechChannelHandler.level1)); + expect(listener.soundLevel, 1); + expect(listener.soundLevels, contains(TestSpeechChannelHandler.level1)); + }); + test('sets lastLevel', () async { + await speech.initialize(); + await speech.listen(onSoundLevelChange: listener.onSoundLevel); + await speech.processMethodCall(MethodCall( + SpeechToText.soundLevelChangeMethod, + TestSpeechChannelHandler.level1)); + expect(speech.lastSoundLevel, TestSpeechChannelHandler.level1); + }); + }); + + group('cancel', () { + test('does nothing if not initialized', () async { + speech.cancel(); + expect(speechHandler.cancelInvoked, false); + }); + test('cancels an active listen', () async { + await speech.initialize(); + await speech.listen(); + await speech.cancel(); + expect(speechHandler.cancelInvoked, true); + expect(speech.isListening, isFalse); + }); + }); + group('stop', () { + test('does nothing if not initialized', () async { + speech.stop(); + expect(speechHandler.cancelInvoked, false); + }); + test('stops an active listen', () async { + await speech.initialize(); + speech.listen(); + speech.stop(); + expect(speechHandler.stopInvoked, true); + }); + }); + group('error', () { + test('notifies handler with transient', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.transientErrorJson)); + expect(listener.speechErrors, 1); + expect(listener.errors.first.permanent, isFalse); + }); + test('notifies handler with permanent', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(listener.speechErrors, 1); + expect(listener.errors.first.permanent, isTrue); + }); + test('continues listening on transient', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.transientErrorJson)); + expect(speech.isListening, isTrue); + }); + test('continues listening on permanent if cancel not explicitly requested', + () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(speech.isListening, isTrue); + }); + test('stops listening on permanent if cancel explicitly requested', + () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(cancelOnError: true); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(speech.isListening, isFalse); + }); + test('Error not sent after cancel', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.cancel(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(speech.isListening, isFalse); + expect(listener.speechErrors, 0); + }); + test('Error still sent after implicit cancel', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(cancelOnError: true); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(speech.isListening, isFalse); + expect(listener.speechErrors, 2); + }); + }); + group('locales', () { + test('fails with exception if not initialized', () async { + try { + await speech.locales(); + fail("Expected an exception."); + } on SpeechToTextNotInitializedException { + // This is a good result + } + }); + test('system locale null if not initialized', () async { + LocaleName current; + try { + current = await speech.systemLocale(); + fail("Expected an exception."); + } on SpeechToTextNotInitializedException { + expect(current, isNull); + } + }); + test('handles an empty list', () async { + await speech.initialize(onError: listener.onSpeechError); + List localeNames = await speech.locales(); + expect(speechHandler.localesInvoked, isTrue); + expect(localeNames, isEmpty); + }); + test('returns expected locales', () async { + await speech.initialize(onError: listener.onSpeechError); + speechHandler.locales.add(TestSpeechChannelHandler.locale1); + speechHandler.locales.add(TestSpeechChannelHandler.locale2); + List localeNames = await speech.locales(); + expect(localeNames, hasLength(speechHandler.locales.length)); + expect(localeNames[0].localeId, TestSpeechChannelHandler.localeId1); + expect(localeNames[0].name, TestSpeechChannelHandler.name1); + expect(localeNames[1].localeId, TestSpeechChannelHandler.localeId2); + expect(localeNames[1].name, TestSpeechChannelHandler.name2); + }); + test('skips incorrect locales', () async { + await speech.initialize(onError: listener.onSpeechError); + speechHandler.locales.add("InvalidJunk"); + speechHandler.locales.add(TestSpeechChannelHandler.locale1); + List localeNames = await speech.locales(); + expect(localeNames, hasLength(1)); + expect(localeNames[0].localeId, TestSpeechChannelHandler.localeId1); + expect(localeNames[0].name, TestSpeechChannelHandler.name1); + }); + test('system locale matches first returned locale', () async { + await speech.initialize(onError: listener.onSpeechError); + speechHandler.locales.add(TestSpeechChannelHandler.locale1); + speechHandler.locales.add(TestSpeechChannelHandler.locale2); + LocaleName current = await speech.systemLocale(); + expect(current.localeId, TestSpeechChannelHandler.localeId1); + }); + }); + group('status', () { + test('recognized false at start', () async { + expect(speech.hasRecognized, isFalse); + }); + test('listening false at start', () async { + expect(speech.isListening, isFalse); + }); + }); + test('available false at start', () async { + expect(speech.isAvailable, isFalse); + }); + test('hasError false at start', () async { + expect(speech.hasError, isFalse); + }); + test('lastError null at start', () async { + expect(speech.lastError, isNull); + }); + test('status empty at start', () async { + expect(speech.lastStatus, isEmpty); + }); +} + +class TestSpeechListener { + int speechResults = 0; + List results = []; + int speechErrors = 0; + List errors = []; + int speechStatus = 0; + List statuses = []; + int soundLevel = 0; + List soundLevels = []; + + void onSpeechResult(SpeechRecognitionResult result) { + ++speechResults; + results.add(result); + } + + void onSpeechError(SpeechRecognitionError errorResult) { + ++speechErrors; + errors.add(errorResult); + } + + void onSpeechStatus(String status) { + ++speechStatus; + statuses.add(status); + } + + void onSoundLevel(double level) { + ++soundLevel; + soundLevels.add(level); + } +} diff --git a/speech_to_text/test/test_speech_channel_handler.dart b/speech_to_text/test/test_speech_channel_handler.dart new file mode 100644 index 00000000..a55f0670 --- /dev/null +++ b/speech_to_text/test/test_speech_channel_handler.dart @@ -0,0 +1,134 @@ +import 'package:flutter/services.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_to_text.dart'; + +/// Holds a set of responses and acts as a mock for the platform specific +/// implementations allowing test cases to determine what the result of +/// a call should be. +class TestSpeechChannelHandler { + final SpeechToText _speech; + + bool listenException = false; + + static const String listenExceptionCode = "listenFailedError"; + static const String listenExceptionMessage = "Failed"; + static const String listenExceptionDetails = "Device Listen Failure"; + + TestSpeechChannelHandler(this._speech); + + bool initResult = true; + bool initInvoked = false; + bool listenInvoked = false; + bool cancelInvoked = false; + bool stopInvoked = false; + bool localesInvoked = false; + bool hasPermissionResult = true; + String listeningStatusResponse = SpeechToText.listeningStatus; + String listenLocale; + List locales = []; + static const String localeId1 = "en_US"; + static const String localeId2 = "fr_CA"; + static const String name1 = "English US"; + static const String name2 = "French Canada"; + static const String locale1 = "$localeId1:$name1"; + static const String locale2 = "$localeId2:$name2"; + static const String firstRecognizedWords = 'hello'; + static const String secondRecognizedWords = 'hello there'; + static const double firstConfidence = 0.85; + static const double secondConfidence = 0.62; + static const String firstRecognizedJson = + '{"alternates":[{"recognizedWords":"$firstRecognizedWords","confidence":$firstConfidence}],"finalResult":false}'; + static const String secondRecognizedJson = + '{"alternates":[{"recognizedWords":"$secondRecognizedWords","confidence":$secondConfidence}],"finalResult":false}'; + static const String finalRecognizedJson = + '{"alternates":[{"recognizedWords":"$secondRecognizedWords","confidence":$secondConfidence}],"finalResult":true}'; + static const SpeechRecognitionWords firstWords = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + static const SpeechRecognitionWords secondWords = + SpeechRecognitionWords(secondRecognizedWords, secondConfidence); + static final SpeechRecognitionResult firstRecognizedResult = + SpeechRecognitionResult([firstWords], false); + static final SpeechRecognitionResult secondRecognizedResult = + SpeechRecognitionResult([secondWords], false); + static final SpeechRecognitionResult finalRecognizedResult = + SpeechRecognitionResult([secondWords], true); + static const String transientErrorJson = + '{"errorMsg":"network","permanent":false}'; + static const String permanentErrorJson = + '{"errorMsg":"network","permanent":true}'; + static final SpeechRecognitionError firstError = + SpeechRecognitionError("network", true); + static const double level1 = 0.5; + static const double level2 = 10; + + Future methodCallHandler(MethodCall methodCall) async { + switch (methodCall.method) { + case "has_permission": + return hasPermissionResult; + break; + case "initialize": + initInvoked = true; + return initResult; + break; + case "cancel": + cancelInvoked = true; + return true; + break; + case "stop": + stopInvoked = true; + return true; + break; + case SpeechToText.listenMethod: + listenInvoked = true; + if (listenException) { + throw PlatformException( + code: listenExceptionCode, + message: listenExceptionMessage, + details: listenExceptionDetails); + } + listenLocale = methodCall.arguments["localeId"]; + await _speech.processMethodCall(MethodCall( + SpeechToText.notifyStatusMethod, listeningStatusResponse)); + return initResult; + break; + case "locales": + localesInvoked = true; + return locales; + break; + default: + } + return initResult; + } + + void notifyFinalWords() { + _speech.processMethodCall( + MethodCall(SpeechToText.textRecognitionMethod, finalRecognizedJson)); + } + + void notifyPartialWords() { + _speech.processMethodCall( + MethodCall(SpeechToText.textRecognitionMethod, firstRecognizedJson)); + } + + void notifyPermanentError() { + _speech.processMethodCall( + MethodCall(SpeechToText.notifyErrorMethod, permanentErrorJson)); + } + + void notifyTransientError() { + _speech.processMethodCall( + MethodCall(SpeechToText.notifyErrorMethod, transientErrorJson)); + } + + void notifySoundLevel() { + _speech.processMethodCall( + MethodCall(SpeechToText.soundLevelChangeMethod, level2)); + } + + void setupLocales() { + locales.clear(); + locales.add(locale1); + locales.add(locale2); + } +} diff --git a/speech_to_text/test/test_speech_listener.dart b/speech_to_text/test/test_speech_listener.dart new file mode 100644 index 00000000..1efcd81c --- /dev/null +++ b/speech_to_text/test/test_speech_listener.dart @@ -0,0 +1,36 @@ +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_to_text_provider.dart'; + +/// Holds the results of notification by the [SpeechToTextProvider] +class TestSpeechListener { + final SpeechToTextProvider _speechProvider; + + bool isListening = false; + bool isFinal = false; + bool isAvailable = false; + bool notified = false; + bool hasError = false; + SpeechRecognitionResult recognitionResult; + SpeechRecognitionError lastError; + double soundLevel; + + TestSpeechListener(this._speechProvider); + + void reset() { + isListening = false; + isFinal = false; + isAvailable = false; + notified = false; + } + + void onNotify() { + notified = true; + isAvailable = _speechProvider.isAvailable; + isListening = _speechProvider.isListening; + recognitionResult = _speechProvider.lastResult; + hasError = _speechProvider.hasError; + lastError = _speechProvider.lastError; + soundLevel = _speechProvider.lastLevel; + } +}