Merge branch 'development' into 'master'

Development

See merge request Cloud_Solution/doctor_app_flutter!159
pull/164/head
Mohammad Aljammal 4 years ago
commit 885b212193

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 613 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 651 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 655 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 814 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 577 KiB

@ -33,7 +33,11 @@ PODS:
- Flutter
- shared_preferences_web (0.0.1):
- Flutter
- speech_to_text (0.0.1):
- Flutter
- Try
- SwiftProtobuf (1.9.0)
- Try (2.1.1)
- url_launcher (0.0.1):
- Flutter
- url_launcher_macos (0.0.1):
@ -58,6 +62,7 @@ DEPENDENCIES:
- shared_preferences (from `.symlinks/plugins/shared_preferences/ios`)
- shared_preferences_macos (from `.symlinks/plugins/shared_preferences_macos/ios`)
- shared_preferences_web (from `.symlinks/plugins/shared_preferences_web/ios`)
- speech_to_text (from `.symlinks/plugins/speech_to_text/ios`)
- url_launcher (from `.symlinks/plugins/url_launcher/ios`)
- url_launcher_macos (from `.symlinks/plugins/url_launcher_macos/ios`)
- url_launcher_web (from `.symlinks/plugins/url_launcher_web/ios`)
@ -69,6 +74,7 @@ SPEC REPOS:
- OpenTok
- Reachability
- SwiftProtobuf
- Try
EXTERNAL SOURCES:
barcode_scan:
@ -99,6 +105,8 @@ EXTERNAL SOURCES:
:path: ".symlinks/plugins/shared_preferences_macos/ios"
shared_preferences_web:
:path: ".symlinks/plugins/shared_preferences_web/ios"
speech_to_text:
:path: ".symlinks/plugins/speech_to_text/ios"
url_launcher:
:path: ".symlinks/plugins/url_launcher/ios"
url_launcher_macos:
@ -125,11 +133,13 @@ SPEC CHECKSUMS:
shared_preferences: af6bfa751691cdc24be3045c43ec037377ada40d
shared_preferences_macos: f3f29b71ccbb56bf40c9dd6396c9acf15e214087
shared_preferences_web: 141cce0c3ed1a1c5bf2a0e44f52d31eeb66e5ea9
speech_to_text: b43a7d99aef037bd758ed8e45d79bbac035d2dfe
SwiftProtobuf: ecbec1be9036d15655f6b3443a1c4ea693c97932
Try: 5ef669ae832617b3cee58cb2c6f99fb767a4ff96
url_launcher: 6fef411d543ceb26efce54b05a0a40bfd74cbbef
url_launcher_macos: fd7894421cd39320dce5f292fc99ea9270b2a313
url_launcher_web: e5527357f037c87560776e36436bf2b0288b965c
PODFILE CHECKSUM: 649616dc336b3659ac6b2b25159d8e488e042b69
COCOAPODS: 1.9.3
COCOAPODS: 1.10.0.beta.1

@ -2,9 +2,9 @@ const MAX_SMALL_SCREEN = 660;
const ONLY_NUMBERS = "[0-9]";
const ONLY_LETTERS = "[a-zA-Z &'\"]";
const ONLY_DATE = "[0-9/]";
//const BASE_URL = 'https://hmgwebservices.com/';
const BASE_URL = 'https://uat.hmgwebservices.com/';
const PHARMACY_ITEMS_URL = "Services/Lists.svc/REST/GetPharmcyItems_Region";
const BASE_URL = 'https://hmgwebservices.com/';
//const BASE_URL = 'https://uat.hmgwebservices.com/';
const PHARMACY_ITEMS_URL = "Services/Lists.svc/REST/GetPharmcyItems_Region_enh";
const PHARMACY_LIST_URL = "Services/Patients.svc/REST/GetPharmcyList";
const PATIENT_PROGRESS_NOTE_URL =
"Services/DoctorApplication.svc/REST/GetProgressNoteForInPatient";
@ -74,6 +74,9 @@ const SEND_ACTIVATION_CODE_BY_OTP_NOTIFICATION_TYPE =
const MEMBER_CHECK_ACTIVATION_CODE_NEW =
'Services/Sentry.svc/REST/MemberCheckActivationCode_New';
const GET_DOC_PROFILES = 'Services/Doctors.svc/REST/GetDocProfiles';
const TRANSFERT_TO_ADMIN = 'LiveCareApi/DoctorApp/TransferToAdmin';
const END_CALL = 'LiveCareApi/DoctorApp/EndCall';
const END_CALL_WITH_CHARGE = 'LiveCareApi/DoctorApp/CompleteCallWithCharge';
var selectedPatientType = 1;

@ -9,7 +9,7 @@ const Map<String, Map<String, String>> localizedValues = {
'fileNo': {'en': 'File No:', 'ar': 'رقم الملف:'},
'mobileNo': {'en': 'Mobile No', 'ar': 'رقم الموبايل'},
'messagesScreenToolbarTitle': {'en': 'Messages', 'ar': 'الرسائل'},
'mySchedule': {'en': 'My Schedule', 'ar': 'جدولي'},
'mySchedule': {'en': 'Schedule', 'ar': 'جدولي'},
'errorNoSchedule': {
'en': 'You don\'t have any Schedule',
'ar': 'ليس لديك أي جدول زمني'
@ -216,4 +216,8 @@ const Map<String, Map<String, String>> localizedValues = {
'beingGreat': {'en': 'being great', 'ar': 'رائع'},
'cancel': {'en': 'CANCEL', 'ar': 'الغاء'},
'done': {'en': 'DONE', 'ar': 'تأكيد'},
'resumecall': {'en': 'Resume call', 'ar': 'استئناف المكالمة'},
'endcallwithcharge': {'en': 'End with charge', 'ar': 'ينتهي مع الشحن'},
'endcall': {'en': 'End Call', 'ar': 'إنهاء المكالمة'},
'transfertoadmin': {'en': 'Transfer to admin', 'ar': 'نقل إلى المسؤول'},
};

@ -272,6 +272,160 @@
"search_patient"
]
},
{
"uid": "5a324eddf382f5a1167d7d40325f82c8",
"css": "reject_icon",
"code": 59394,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M681 626.7L554.3 500 681 373.3A38.5 38.5 0 0 0 626.7 318.9L500 445.7 373.3 319A38.5 38.5 0 1 0 318.9 373.3L445.7 500 318.9 626.8A37.2 37.2 0 0 0 318.9 681.1 38.2 38.2 0 0 0 373.3 681.1L500 554.4 626.7 681.1A38.5 38.5 0 0 0 681.1 681.1 38.2 38.2 0 0 0 681 626.7ZM500 67.3A432.5 432.5 0 1 1 194 194 429.8 429.8 0 0 1 500 67.3M500 0A500 500 0 1 0 1000 500 500 500 0 0 0 500 0Z",
"width": 1000
},
"search": [
"reject_icon"
]
},
{
"uid": "2742f64b5e69cc4f39a2dcc5a081ad03",
"css": "approved_icon",
"code": 59403,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M756.3 348.3L714 304.8A9.1 9.1 0 0 0 707.2 302H707.2A8.7 8.7 0 0 0 700.5 304.8L407.2 600.3 300.5 493.5A9.3 9.3 0 0 0 287.1 493.5L244.2 536.3A9.6 9.6 0 0 0 244.2 550L378.8 684.5A42.6 42.6 0 0 0 406.9 698.2 44.6 44.6 0 0 0 434.8 685H435L756.5 362A10.3 10.3 0 0 0 756.3 348.3ZM500 67.3A432.5 432.5 0 1 1 194 194 429.8 429.8 0 0 1 500 67.3M500 0A499.9 499.9 0 1 0 999.9 500 499.9 499.9 0 0 0 500 0Z",
"width": 1000
},
"search": [
"approved_icon"
]
},
{
"uid": "148de09f7fd22c378cdfdbaacaa8e205",
"css": "pending_icon",
"code": 59404,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M809.8 932H797.3C781.5 932 768.2 929.8 766.5 914V913.7C731 583.9 523.3 577.6 523.3 499.9S731.1 415.9 766.5 86V85.9C768.2 70.1 781.5 68 797.3 68H809.8A34.8 34.8 0 0 0 844.7 35.7 34 34 0 0 0 810.7 0H35A34.8 34.8 0 0 0 0 32.3 34 34 0 0 0 34 68H47.2C63 68 76.3 70.2 78.1 85.9V86.2C113.7 416 321.5 422.3 321.5 500S113.7 584 78.3 913.8V914C76.6 929.8 63.3 932 47.5 932H35A34.8 34.8 0 0 0 0.1 964.3 34 34 0 0 0 34.1 1000H810.7A33.9 33.9 0 0 0 844.7 964.3 34.8 34.8 0 0 0 809.8 932ZM197.4 848.9C267 655.2 390.6 678.7 390.6 602.3V467.5C390.6 420.4 301.2 387.6 245.4 311.1A19 19 0 0 1 261.2 281H583.8A18.9 18.9 0 0 1 600 310.6C545.2 387.3 454.2 420.4 454.2 467.4V602.4C454.2 678.1 572.9 657.5 647.8 849 654.7 866.3 649.5 887.8 631.2 887.8H214.1C195.4 887.8 191.1 866.5 197.4 849Z",
"width": 845
},
"search": [
"pending_icon"
]
},
{
"uid": "0bbb324cc39e62b3a4e05639a4f4008f",
"css": "home_icon",
"code": 59407,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M973.1 435L973.1 435 565 27A92 92 0 0 0 434.9 27L27.2 434.7 26.8 435.1A92 92 0 0 0 88 591.9C88.9 591.9 89.9 591.9 90.8 591.9H107.1V892.1A107.9 107.9 0 0 0 214.8 999.9H374.4A29.3 29.3 0 0 0 403.7 970.6V735.3A49.2 49.2 0 0 1 452.9 686.2H547A49.2 49.2 0 0 1 596.2 735.3V970.7A29.3 29.3 0 0 0 625.5 1000H785.1A107.9 107.9 0 0 0 892.8 892.3V592H907.8A92.1 92.1 0 0 0 973 434.9ZM931.6 523.7A33.4 33.4 0 0 1 907.9 533.5H863.5A29.3 29.3 0 0 0 834.2 562.8V892.3A49.2 49.2 0 0 1 785.1 941.4H654.9V735.3A107.9 107.9 0 0 0 547 627.5H452.9A107.9 107.9 0 0 0 345.1 735.2V941.3H215A49.2 49.2 0 0 1 165.8 892.1V562.8A29.3 29.3 0 0 0 136.5 533.5H92.8L91.4 533.5A33.4 33.4 0 0 1 68.4 476.4H68.4L476.3 68.4A33.4 33.4 0 0 1 523.6 68.4L931.4 476.2 931.6 476.4A33.4 33.4 0 0 1 931.6 523.7ZM931.6 523.7",
"width": 1000
},
"search": [
"home_icon"
]
},
{
"uid": "d8fc8a6e9f001147307d87b3b620b86f",
"css": "mail",
"code": 59422,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M1055.6 667.9V111.1A111.2 111.2 0 0 0 944.4 0H111.1A111.2 111.2 0 0 0 0 111.1V666.7A111.2 111.2 0 0 0 111.1 777.8H777.8A46 46 0 0 0 789.7 810C819.2 841 869.8 889 914.5 931.2 936.9 952.3 957.3 971.7 972.1 986A48.3 48.3 0 0 0 1006 1000 50.2 50.2 0 0 0 1055.5 949.3V889.4C1198.9 894.6 1277.9 940.2 1277.9 972.2A27.8 27.8 0 1 0 1333.4 972.2 306 306 0 0 0 1055.6 667.9ZM111.1 55.7H944.4C945.6 55.7 946.6 56.2 947.7 56.3L551.3 381.8A46.3 46.3 0 0 1 506.8 383.6L107.9 56.2C109 56.1 110 55.6 111.1 55.6ZM111.1 722.3A55.6 55.6 0 0 1 55.6 666.8V111.1A55 55 0 0 1 60.3 89L474 428.4A98.2 98.2 0 0 0 527.8 444.5 100.7 100.7 0 0 0 584.2 426.5L995.3 89A55 55 0 0 1 1000 111.1V556.8A48.1 48.1 0 0 0 972.1 569.7C957.4 584.1 936.9 603.4 914.6 624.5 879.9 657.3 841.9 693.2 812.7 722.3H111.1ZM1027.9 833.3A27.8 27.8 0 0 0 1000.2 861.1V935.7C986.8 922.8 970.4 907.3 952.9 890.8 911.6 851.7 865 807.8 836 777.8 865 747.8 911.6 703.8 952.9 664.7 971.6 647.1 989 630.6 1000.2 618.2V694.4A27.8 27.8 0 0 0 1027.9 722.2 250.3 250.3 0 0 1 1259.3 877.5 525.9 525.9 0 0 0 1027.9 833.3Z",
"width": 1333
},
"search": [
"mail"
]
},
{
"uid": "afe3a55435e46aeeeeae8f60731d4706",
"css": "medicinesearch",
"code": 59423,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M986 125.2A171.6 171.6 0 1 0 1157.6 296.8 171.8 171.8 0 0 0 986 125.2ZM986 449.3A152.6 152.6 0 1 1 1138.6 296.8 152.7 152.7 0 0 1 986 449.3ZM862.1 306.4H881.1A114.6 114.6 0 0 1 995.5 192V173A133.5 133.5 0 0 0 862.1 306.4ZM1332.6 589.6L1346.1 576.2 1209.6 439.6 1196.2 453 1172.3 429.1A228.3 228.3 0 1 0 1118.4 483.3L1142.3 507.2 1128.8 520.6 1265.1 657 1278.6 643.5 1155.7 520.6 1209.6 466.7ZM985.9 506.6A209.7 209.7 0 1 1 1195.6 296.9 210 210 0 0 1 985.9 506.6ZM1155.7 493.5L1133.6 471.4A228.6 228.6 0 0 0 1160.6 444.5L1182.7 466.6ZM416.7 700V733.3H100A16.7 16.7 0 0 1 83.3 716.6V566.6A16.7 16.7 0 0 1 100 550H416.7V583.2H116.6V699.9ZM33.3 866.7H416.7V900H33.3A66.7 66.7 0 0 0 99.9 966.5H466.6A67.2 67.2 0 0 0 479.9 965.2L486.6 997.9A100.5 100.5 0 0 1 466.6 999.9H99.9A100.1 100.1 0 0 1-0.1 899.9V583.3H33.2ZM79.2 316.7H83.3V266.7H70.9A37.5 37.5 0 0 1 33.3 229.2V58.3A58.4 58.4 0 0 1 91.7 0H400V33.3H91.7A25 25 0 0 0 66.7 58.3V229.2A4.2 4.2 0 0 0 70.9 233.3H495.9A4.2 4.2 0 0 0 500.1 229.2V58.3A25 25 0 0 0 475.1 33.3H433.4V0H475.1A58.4 58.4 0 0 1 533.4 58.3V229.2A37.5 37.5 0 0 1 495.9 266.7H483.4V316.7H487.6A79.4 79.4 0 0 1 566.8 395.9V400H533.5V395.9A45.9 45.9 0 0 0 487.6 350H79.2A45.9 45.9 0 0 0 33.3 395.9V466.7H450V500H33.3V550H0V395.8A79.4 79.4 0 0 1 79.2 316.7ZM450 316.7V266.7H116.7V316.7ZM466.6 66.7H433.3V200H466.6ZM400 66.7H366.7V200H400ZM333.3 66.7H300V200H333.3ZM266.6 66.7H233.3V200H266.6ZM200 66.7H166.7V200H200ZM133.3 66.7H100V200H133.3ZM416.7 783.3H316.7V816.6H416.7ZM283.3 783.3H83.3V816.6H283.3ZM450 595.8A162.7 162.7 0 0 1 612.5 433.3H620.8A163 163 0 0 1 780 563.3L747.4 570A129.5 129.5 0 0 0 620.8 466.7H612.5A129.3 129.3 0 0 0 483.3 595.8V683.3H583.3V716.6H483.3V749.9H566.7V783.2H483.3V837.3A129.3 129.3 0 0 0 612.5 966.5H620.8A129.1 129.1 0 0 0 661.4 960L671.9 991.6A162.3 162.3 0 0 1 620.8 999.8H612.5A162.7 162.7 0 0 1 450 837.3ZM600 800A200 200 0 1 1 800 1000 200 200 0 0 1 600 800ZM633.3 800A165.6 165.6 0 0 0 652.2 877L877 652.2A166.6 166.6 0 0 0 633.3 800ZM694.5 929L838.2 785.3 861.7 808.9 722.9 947.8A166.6 166.6 0 0 0 947.7 722.9L892.2 778.5 868.6 754.9 928.9 694.6A168.3 168.3 0 0 0 905.4 671L670.9 905.4A168.1 168.1 0 0 0 694.5 929Z",
"width": 1346
},
"search": [
"medicinesearch"
]
},
{
"uid": "c129853724095c9f90addf42325e14bc",
"css": "qr-code-1",
"code": 59424,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M970.7 746.1A29.3 29.3 0 0 0 941.4 775.4V941.4H775.4A29.3 29.3 0 1 0 775.4 1000H970.7A29.3 29.3 0 0 0 1000 970.7V775.4A29.3 29.3 0 0 0 970.7 746.1ZM224.6 941.4H58.6V775.4A29.3 29.3 0 1 0 0 775.4V970.7A29.3 29.3 0 0 0 29.3 1000H224.6A29.3 29.3 0 1 0 224.6 941.4ZM970.7 0H775.4A29.3 29.3 0 1 0 775.4 58.6H941.4V224.6A29.3 29.3 0 1 0 1000 224.6V29.3A29.3 29.3 0 0 0 970.7 0ZM29.3 253.9A29.3 29.3 0 0 0 58.6 224.6V58.6H224.6A29.3 29.3 0 1 0 224.6 0H29.3A29.3 29.3 0 0 0 0 29.3V224.6A29.3 29.3 0 0 0 29.3 253.9ZM617.2 412.1H851.6A29.3 29.3 0 0 0 880.8 382.8V148.4A29.3 29.3 0 0 0 851.6 119.2H617.2A29.3 29.3 0 0 0 587.9 148.4V382.8A29.3 29.3 0 0 0 617.2 412.1ZM646.5 177.8H822.2V353.5H646.5ZM382.8 587.9H148.4A29.3 29.3 0 0 0 119.2 617.2V851.6A29.3 29.3 0 0 0 148.4 880.8H382.8A29.3 29.3 0 0 0 412.1 851.6V617.2A29.3 29.3 0 0 0 382.8 587.9ZM353.5 822.3H177.8V646.5H353.5ZM880.9 851.6V734.4A29.3 29.3 0 0 0 822.3 734.4V822.3H734.4A29.3 29.3 0 1 0 734.4 880.9H851.6A29.3 29.3 0 0 0 880.9 851.6ZM617.2 529.3H822.2V617.2A29.3 29.3 0 1 0 880.8 617.2V500A29.3 29.3 0 0 0 851.6 470.7H617.2A29.3 29.3 0 0 0 617.2 529.3ZM617.2 822.3H529.3V734.4A29.3 29.3 0 1 0 470.7 734.4V851.6A29.3 29.3 0 0 0 500 880.8H617.2A29.3 29.3 0 0 0 617.2 822.2ZM382.8 119.2H148.4A29.3 29.3 0 0 0 119.2 148.4V382.8A29.3 29.3 0 0 0 148.4 412.1H382.8A29.3 29.3 0 0 0 412.1 382.8V148.4A29.3 29.3 0 0 0 382.8 119.2ZM353.5 353.5H177.8V177.8H353.5ZM734.4 705.1H617.2A29.3 29.3 0 0 0 617.2 763.7H734.4A29.3 29.3 0 0 0 734.4 705.1ZM500 646.5H734.4A29.3 29.3 0 0 0 734.4 587.9H529.3V500A29.3 29.3 0 0 0 470.7 500V617.2A29.3 29.3 0 0 0 500 646.5ZM148.4 529.3H226.6A29.3 29.3 0 1 0 226.6 470.7H148.4A29.3 29.3 0 1 0 148.4 529.3ZM382.8 470.7H343.8A29.3 29.3 0 1 0 343.8 529.3H382.8A29.3 29.3 0 0 0 382.8 470.7ZM500 412.1A29.3 29.3 0 0 0 529.3 382.8V265.6A29.3 29.3 0 1 0 470.7 265.6V382.8A29.3 29.3 0 0 0 500 412.1Z",
"width": 1000
},
"search": [
"qr-code"
]
},
{
"uid": "6df6856cee070f6f2f1be832aff1920e",
"css": "referral-1",
"code": 59425,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M111.9 752A107.4 107.4 0 0 1 55.5 658.2 110.6 110.6 0 0 1 276.6 658.2 107.4 107.4 0 0 1 220.1 752.1 166.2 166.2 0 0 1 332 908.9V983.4A16.7 16.7 0 0 1 315.3 1000.1H16.7A16.7 16.7 0 0 1 0 983.4V908.8A166.2 166.2 0 0 1 111.9 752ZM243.3 658.2A77.3 77.3 0 1 0 166 732.7 76 76 0 0 0 243.3 658.2ZM33.3 966.6H298.8V908.8A132.6 132.6 0 0 0 33.4 908.8ZM111.8 201.7A107.4 107.4 0 0 1 55.5 107.8 110.6 110.6 0 0 1 276.6 107.8 107.4 107.4 0 0 1 220.3 201.7 166.2 166.2 0 0 1 332.1 358.5V433A16.7 16.7 0 0 1 315.4 449.7H16.7A16.7 16.7 0 0 1 0 433V358.4A166.2 166.2 0 0 1 111.8 201.7ZM243.3 107.8A77.3 77.3 0 0 0 88.9 107.8 77.3 77.3 0 0 0 243.3 107.8ZM33.3 416.3H298.8V358.4A132.6 132.6 0 0 0 33.4 358.4ZM462.6 492.6L368.8 411.8A16.7 16.7 0 0 1 390.6 386.5L494.7 476.2H612.1A16.7 16.7 0 1 1 612.1 509.6H494.5L390.3 595A18.6 18.6 0 0 1 366.8 592.7 16.3 16.3 0 0 1 369.2 569.2ZM707.4 452.7A126.1 126.1 0 0 1 676.3 369.9 128.7 128.7 0 0 1 933.8 369.9 126 126 0 0 1 902.7 452.7 184 184 0 0 1 1000 613.3V711.4A16.7 16.7 0 0 1 983.3 728.1H626.8A16.7 16.7 0 0 1 610.1 711.4V613.3A184 184 0 0 1 707.4 452.7ZM805.1 275.8A94.1 94.1 0 1 0 900.5 369.9 94.8 94.8 0 0 0 805.1 275.8ZM823.1 495.9A130.7 130.7 0 0 1 805.1 497.3 128.3 128.3 0 0 1 787.1 495.9V573.8L805.1 596.8 823.1 573.8ZM643.5 694.7H695.9V613A16.7 16.7 0 0 1 729.3 613V694.7H880.8V613A16.7 16.7 0 0 1 914.2 613V694.7H966.7V613.3A150.6 150.6 0 0 0 875.1 476.6 129 129 0 0 1 856.4 486.7V585.4L818.2 634.3A16.7 16.7 0 0 1 791.9 634.3L753.6 585.4V486.7A129.8 129.8 0 0 1 734.9 476.6 150.6 150.6 0 0 0 643.4 613.3V694.7Z",
"width": 1000
},
"search": [
"referral"
]
},
{
"uid": "609e74ef20b926b8e212d28a24bf0f36",
"css": "referred",
"code": 59426,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M887.8 751.4A107.7 107.7 0 0 0 944.3 657.2 110.9 110.9 0 0 0 722.6 657.2 107.7 107.7 0 0 0 779.1 751.4 166.6 166.6 0 0 0 666.9 908.6V983.2A16.7 16.7 0 0 0 683.7 999.9H983.3A16.7 16.7 0 0 0 1000 983.2V908.6A166.6 166.6 0 0 0 887.8 751.4ZM756.1 657.2A77.5 77.5 0 1 1 833.5 731.9 76.1 76.1 0 0 1 756.1 657.2ZM966.6 966.5H700.5V908.6A133 133 0 0 1 966.6 908.6ZM887.9 202.2A107.7 107.7 0 0 0 944.4 108.1 110.9 110.9 0 0 0 722.7 108.1 107.7 107.7 0 0 0 779.2 202.2 166.6 166.6 0 0 0 667.1 359.3V434.1A16.7 16.7 0 0 0 683.8 450.8H983.3A16.7 16.7 0 0 0 1000 434.1V359.3A166.6 166.6 0 0 0 887.9 202.2ZM756.2 108.1A77.5 77.5 0 0 1 911 108.1 77.5 77.5 0 0 1 756.2 108.1ZM966.6 417.4H700.5V359.3A133 133 0 0 1 966.6 359.3ZM537.5 492.7L631.5 411.6A16.7 16.7 0 0 0 609.7 386.3L505.4 476.3H387.6A16.7 16.7 0 1 0 387.6 509.7H505.5L610 595.4A18.7 18.7 0 0 0 633.5 593 16.3 16.3 0 0 0 631.1 569.5ZM293.3 452.6A126.3 126.3 0 0 0 324.5 369.6 129.1 129.1 0 0 0 66.4 369.6 126.3 126.3 0 0 0 97.5 452.6 184.4 184.4 0 0 0 0 613.7V712A16.7 16.7 0 0 0 16.7 728.7H374.1A16.7 16.7 0 0 0 390.8 712V613.7A184.4 184.4 0 0 0 293.3 452.6ZM195.5 275.3A94.3 94.3 0 1 1 99.9 369.6 95.1 95.1 0 0 1 195.4 275.3ZM177.5 496A131.1 131.1 0 0 0 195.5 497.4 128.5 128.5 0 0 0 213.6 496V574.2L195.4 597.2 177.4 574.1ZM357.5 695.3H304.8V613.3A16.7 16.7 0 0 0 271.4 613.3V695.3H119.4V613.3A16.7 16.7 0 0 0 86 613.3V695.3H33.4V613.7A151 151 0 0 1 125.2 476.7 129.3 129.3 0 0 0 144 486.7V585.8L182.3 634.8A16.7 16.7 0 0 0 208.6 634.8L246.9 585.7V486.7A130.2 130.2 0 0 0 265.7 476.7 151 151 0 0 1 357.4 613.6V695.2Z",
"width": 1000
},
"search": [
"referred"
]
},
{
"uid": "dea3e85838e83dcd8818ec36186eae5e",
"css": "searchpatient",
"code": 59427,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M1142.7 436.8A165.2 165.2 0 1 0 1307.8 601.9 165.4 165.4 0 0 0 1142.7 436.8ZM1142.7 748.8A146.9 146.9 0 1 1 1289.5 601.9 147 147 0 0 1 1142.7 748.8ZM1023.4 611.1H1041.7A110.3 110.3 0 0 1 1151.8 500.9V482.7A128.5 128.5 0 0 0 1023.4 611.1ZM1476.4 883.8L1489.3 870.9 1357.9 739.4 1345 752.4 1322 729.4A219.8 219.8 0 1 0 1270.1 781.5L1293.1 804.5 1280.2 817.4 1411.4 948.7 1424.4 935.7 1306 817.3 1357.9 765.4ZM1142.6 803.9A201.9 201.9 0 1 1 1344.4 602 202.2 202.2 0 0 1 1142.6 803.9ZM1306 791.3L1284.7 770.1A220.1 220.1 0 0 0 1310.7 744.1L1332.1 765.5ZM48.4 483.9H301.4A96.4 96.4 0 0 1 276.7 346.5 305.1 305.1 0 0 1 257.9 242V201.9A201.8 201.8 0 0 1 564.5 29.2 201.8 201.8 0 0 1 871 201.8V205.4L887 177.2 957.1 299.9A80.6 80.6 0 0 1 869.7 417.5 96.9 96.9 0 0 1 785.1 499.2 226.4 226.4 0 0 1 645.1 662.2V696.2L780.4 720.8A267.4 267.4 0 0 1 999.9 983.9V999.9H129V983.9A267.4 267.4 0 0 1 348.6 720.8L483.9 696.2V662.3A224.6 224.6 0 0 1 396.8 602.3C396 602.6 395.1 602.8 394.3 603.1A92.8 92.8 0 0 1 352.9 612.9H48.4A48.4 48.4 0 0 1 0 564.5V532.3A48.4 48.4 0 0 1 48.4 483.9ZM339.5 465.8C339.3 461.1 338.7 456.4 338.7 451.7V341A64.2 64.2 0 0 0 339.5 465.7ZM838.7 242V201.9A169.6 169.6 0 0 0 575.1 60.8L564.5 67.8 554 60.8A169.6 169.6 0 0 0 290.3 201.9V242A273.2 273.2 0 0 0 302.3 322.2 96.2 96.2 0 0 1 338.7 308.1V282.4L403.2 234V193.5A32.3 32.3 0 0 1 435.5 161.3H693.5A32.3 32.3 0 0 1 725.8 193.5V233.9L790.3 282.3V307.8A95.4 95.4 0 0 1 810.8 313.9 78.8 78.8 0 0 1 817 299.9L837.7 263.6C838.3 256.4 838.7 249.2 838.7 242ZM435.5 290.4H693.6V193.5H435.5ZM887.9 387.2A47.6 47.6 0 0 0 929.3 316L887.1 242.2 844.9 315.9A47.6 47.6 0 0 0 887.6 387.1ZM838.8 403.2A79.4 79.4 0 0 1 807.4 348.1 63.2 63.2 0 0 0 790.4 340.9V451.6C790.4 456.4 790 461 789.7 465.7A64.5 64.5 0 0 0 838.7 403.2ZM758.1 451.6V298.4L725.9 274.3V290.4A32.3 32.3 0 0 1 693.6 322.6H435.5A32.3 32.3 0 0 1 403.2 290.4V274.3L370.9 298.4V451.6A195.2 195.2 0 0 0 374.4 486.4 92.8 92.8 0 0 1 394.3 493.6 60.6 60.6 0 0 0 421.2 500H532.2A48.4 48.4 0 1 1 532.2 596.7H436.9A193.2 193.2 0 0 0 758.1 451.6ZM532.3 564.5A16 16 0 1 0 532.3 532.2H516.3V564.5ZM637.4 727.6A80.7 80.7 0 0 1 583.6 771.6 64.5 64.5 0 0 0 645.1 806.4C680.7 806.4 709.6 784.7 709.6 758A36.4 36.4 0 0 0 704.4 739.7ZM161.8 967.8H967.2A235 235 0 0 0 774.7 752.6L740.8 746.5A64.8 64.8 0 0 1 742 758.2C742 802.6 698.6 838.8 645.2 838.8A102.9 102.9 0 0 1 564.6 802.6 102.9 102.9 0 0 1 483.9 838.7C430.4 838.7 387.1 802.5 387.1 758.1A65.9 65.9 0 0 1 388.2 746.4L354.4 752.6A235 235 0 0 0 161.8 967.8ZM424.5 739.9A36.4 36.4 0 0 0 419.4 758.2C419.4 784.9 448.3 806.6 483.9 806.6A64.5 64.5 0 0 0 545.4 771.8 80.6 80.6 0 0 1 491.6 727.7ZM516 677.4V693.4A48.4 48.4 0 1 0 612.8 693.4V672.1A222.6 222.6 0 0 1 516 672.1ZM379.6 574.3A92.9 92.9 0 0 1 421 564.5H483.9V532.3H421.2A93.1 93.1 0 0 1 379.9 522.5 60.6 60.6 0 0 0 352.9 516H129V580.5H352.9A60.6 60.6 0 0 0 379.9 574.3ZM32.1 564.5A16 16 0 0 0 48.1 580.5H96.8V516H48.4A16 16 0 0 0 32.4 532ZM524.9 368.4L507 341.5 410.3 406.1 428.2 432.9ZM718.5 405.8L621.8 341.3 603.9 368.1 700.6 432.7Z",
"width": 1489
},
"search": [
"searchpatient"
]
},
{
"uid": "5af6e4cb377acd0ad97c464f6813ab1a",
"css": "sync",
"code": 59428,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M1000 168.7V346.9A62.5 62.5 0 0 1 937.5 409.4H759.3A62.5 62.5 0 0 1 759.3 284.4H807.1A375.4 375.4 0 0 0 150.8 363.4 62.5 62.5 0 0 1 34.3 317.9 500.4 500.4 0 0 1 875 169.1V169.1A62.5 62.5 0 0 1 1000 169.1ZM930.4 601.1A62.5 62.5 0 0 0 849.4 636.5 375.4 375.4 0 0 1 197.4 721.8H240.7A62.5 62.5 0 1 0 240.7 596.8H62.5A62.5 62.5 0 0 0 0 659.3V837.5A62.5 62.5 0 0 0 125 837.5V830.9A503 503 0 0 0 215.2 910.8 500.4 500.4 0 0 0 966 682.2 62.5 62.5 0 0 0 930.3 601.2Z",
"width": 1000
},
"search": [
"sync"
]
},
{
"uid": "740f78c2b53c8cc100a8b0d283bbd34f",
"css": "home_icon-1",
@ -369,62 +523,6 @@
"search": [
"scdedule_icon_active"
]
},
{
"uid": "5a324eddf382f5a1167d7d40325f82c8",
"css": "reject_icon",
"code": 59402,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M681 626.7L554.3 500 681 373.3A38.5 38.5 0 0 0 626.7 318.9L500 445.7 373.3 319A38.5 38.5 0 1 0 318.9 373.3L445.7 500 318.9 626.8A37.2 37.2 0 0 0 318.9 681.1 38.2 38.2 0 0 0 373.3 681.1L500 554.4 626.7 681.1A38.5 38.5 0 0 0 681.1 681.1 38.2 38.2 0 0 0 681 626.7ZM500 67.3A432.5 432.5 0 1 1 194 194 429.8 429.8 0 0 1 500 67.3M500 0A500 500 0 1 0 1000 500 500 500 0 0 0 500 0Z",
"width": 1000
},
"search": [
"reject_icon"
]
},
{
"uid": "2742f64b5e69cc4f39a2dcc5a081ad03",
"css": "approved_icon",
"code": 59403,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M756.3 348.3L714 304.8A9.1 9.1 0 0 0 707.2 302H707.2A8.7 8.7 0 0 0 700.5 304.8L407.2 600.3 300.5 493.5A9.3 9.3 0 0 0 287.1 493.5L244.2 536.3A9.6 9.6 0 0 0 244.2 550L378.8 684.5A42.6 42.6 0 0 0 406.9 698.2 44.6 44.6 0 0 0 434.8 685H435L756.5 362A10.3 10.3 0 0 0 756.3 348.3ZM500 67.3A432.5 432.5 0 1 1 194 194 429.8 429.8 0 0 1 500 67.3M500 0A499.9 499.9 0 1 0 999.9 500 499.9 499.9 0 0 0 500 0Z",
"width": 1000
},
"search": [
"approved_icon"
]
},
{
"uid": "148de09f7fd22c378cdfdbaacaa8e205",
"css": "pending_icon",
"code": 59404,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M809.8 932H797.3C781.5 932 768.2 929.8 766.5 914V913.7C731 583.9 523.3 577.6 523.3 499.9S731.1 415.9 766.5 86V85.9C768.2 70.1 781.5 68 797.3 68H809.8A34.8 34.8 0 0 0 844.7 35.7 34 34 0 0 0 810.7 0H35A34.8 34.8 0 0 0 0 32.3 34 34 0 0 0 34 68H47.2C63 68 76.3 70.2 78.1 85.9V86.2C113.7 416 321.5 422.3 321.5 500S113.7 584 78.3 913.8V914C76.6 929.8 63.3 932 47.5 932H35A34.8 34.8 0 0 0 0.1 964.3 34 34 0 0 0 34.1 1000H810.7A33.9 33.9 0 0 0 844.7 964.3 34.8 34.8 0 0 0 809.8 932ZM197.4 848.9C267 655.2 390.6 678.7 390.6 602.3V467.5C390.6 420.4 301.2 387.6 245.4 311.1A19 19 0 0 1 261.2 281H583.8A18.9 18.9 0 0 1 600 310.6C545.2 387.3 454.2 420.4 454.2 467.4V602.4C454.2 678.1 572.9 657.5 647.8 849 654.7 866.3 649.5 887.8 631.2 887.8H214.1C195.4 887.8 191.1 866.5 197.4 849Z",
"width": 845
},
"search": [
"pending_icon"
]
},
{
"uid": "0bbb324cc39e62b3a4e05639a4f4008f",
"css": "home_icon",
"code": 59394,
"src": "custom_icons",
"selected": true,
"svg": {
"path": "M973.1 435L973.1 435 565 27A92 92 0 0 0 434.9 27L27.2 434.7 26.8 435.1A92 92 0 0 0 88 591.9C88.9 591.9 89.9 591.9 90.8 591.9H107.1V892.1A107.9 107.9 0 0 0 214.8 999.9H374.4A29.3 29.3 0 0 0 403.7 970.6V735.3A49.2 49.2 0 0 1 452.9 686.2H547A49.2 49.2 0 0 1 596.2 735.3V970.7A29.3 29.3 0 0 0 625.5 1000H785.1A107.9 107.9 0 0 0 892.8 892.3V592H907.8A92.1 92.1 0 0 0 973 434.9ZM931.6 523.7A33.4 33.4 0 0 1 907.9 533.5H863.5A29.3 29.3 0 0 0 834.2 562.8V892.3A49.2 49.2 0 0 1 785.1 941.4H654.9V735.3A107.9 107.9 0 0 0 547 627.5H452.9A107.9 107.9 0 0 0 345.1 735.2V941.3H215A49.2 49.2 0 0 1 165.8 892.1V562.8A29.3 29.3 0 0 0 136.5 533.5H92.8L91.4 533.5A33.4 33.4 0 0 1 68.4 476.4H68.4L476.3 68.4A33.4 33.4 0 0 1 523.6 68.4L931.4 476.2 931.6 476.4A33.4 33.4 0 0 1 931.6 523.7ZM931.6 523.7",
"width": 1000
},
"search": [
"home_icon"
]
}
]
}

@ -11,7 +11,7 @@
/// fonts:
/// - asset: fonts/DoctorApp.ttf
///
///
///
///
import 'package:flutter/widgets.dart';
@ -21,50 +21,31 @@ class DoctorApp {
static const _kFontFam = 'DoctorApp';
static const _kFontPkg = null;
static const IconData femaleicon =
IconData(0xe800, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData male =
IconData(0xe801, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData home_icon =
IconData(0xe802, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData home_icon_active =
IconData(0xe803, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData menu_icon =
IconData(0xe804, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData menu_icon_active =
IconData(0xe805, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData message_icon =
IconData(0xe806, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData message_icon_active =
IconData(0xe807, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData scdedule_icon_active =
IconData(0xe808, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData schedule_icon =
IconData(0xe809, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData discharge_patient =
IconData(0xe80a, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData in_patient_white =
IconData(0xe80d, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData lab_results =
IconData(0xe80e, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData operations =
IconData(0xe813, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData out_patient =
IconData(0xe814, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData patient =
IconData(0xe815, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData qr_code =
IconData(0xe816, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData radiology =
IconData(0xe817, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData referral =
IconData(0xe818, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData search_patient =
IconData(0xe81a, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData reject_icon =
IconData(0xe80a, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData approved_icon =
IconData(0xe80b, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData pending_icon =
IconData(0xe80c, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData femaleicon = IconData(0xe800, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData male = IconData(0xe801, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData reject_icon = IconData(0xe802, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData home_icon_active = IconData(0xe803, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData menu_icon = IconData(0xe804, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData menu_icon_active = IconData(0xe805, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData message_icon = IconData(0xe806, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData message_icon_active = IconData(0xe807, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData scdedule_icon_active = IconData(0xe808, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData schedule_icon = IconData(0xe809, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData discharge_patient = IconData(0xe80a, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData approved_icon = IconData(0xe80b, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData pending_icon = IconData(0xe80c, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData in_patient_white = IconData(0xe80d, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData lab_results = IconData(0xe80e, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData home_icon = IconData(0xe80f, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData operations = IconData(0xe813, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData out_patient = IconData(0xe814, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData patient = IconData(0xe815, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData radiology = IconData(0xe817, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData mail = IconData(0xe81e, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData medicinesearch = IconData(0xe81f, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData qr_code = IconData(0xe820, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData referral = IconData(0xe821, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData referred = IconData(0xe822, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData search_patient = IconData(0xe823, fontFamily: _kFontFam, fontPackage: _kFontPkg);
static const IconData sync_icon = IconData(0xe824, fontFamily: _kFontFam, fontPackage: _kFontPkg);
}

@ -0,0 +1,28 @@
class EndCallReq {
int vCID;
String tokenID;
String generalid;
int doctorId;
bool isDestroy;
EndCallReq(
{this.vCID, this.tokenID, this.generalid, this.doctorId, this.isDestroy});
EndCallReq.fromJson(Map<String, dynamic> json) {
vCID = json['VC_ID'];
tokenID = json['TokenID'];
generalid = json['generalid'];
doctorId = json['DoctorId'];
isDestroy = json['IsDestroy'];
}
Map<String, dynamic> toJson() {
final Map<String, dynamic> data = new Map<String, dynamic>();
data['VC_ID'] = this.vCID;
data['TokenID'] = this.tokenID;
data['generalid'] = this.generalid;
data['DoctorId'] = this.doctorId;
data['IsDestroy'] = this.isDestroy;
return data;
}
}

@ -0,0 +1,32 @@
class StartCallRes {
String result;
String openSessionID;
String openTokenID;
bool isAuthenticated;
int messageStatus;
StartCallRes(
{this.result,
this.openSessionID,
this.openTokenID,
this.isAuthenticated,
this.messageStatus});
StartCallRes.fromJson(Map<String, dynamic> json) {
result = json['Result'];
openSessionID = json['OpenSessionID'];
openTokenID = json['OpenTokenID'];
isAuthenticated = json['IsAuthenticated'];
messageStatus = json['MessageStatus'];
}
Map<String, dynamic> toJson() {
final Map<String, dynamic> data = new Map<String, dynamic>();
data['Result'] = this.result;
data['OpenSessionID'] = this.openSessionID;
data['OpenTokenID'] = this.openTokenID;
data['IsAuthenticated'] = this.isAuthenticated;
data['MessageStatus'] = this.messageStatus;
return data;
}
}

@ -0,0 +1,36 @@
class TransferToAdminReq {
int vCID;
String tokenID;
String generalid;
int doctorId;
bool isOutKsa;
String notes;
TransferToAdminReq(
{this.vCID,
this.tokenID,
this.generalid,
this.doctorId,
this.isOutKsa,
this.notes});
TransferToAdminReq.fromJson(Map<String, dynamic> json) {
vCID = json['VC_ID'];
tokenID = json['TokenID'];
generalid = json['generalid'];
doctorId = json['DoctorId'];
isOutKsa = json['IsOutKsa'];
notes = json['Notes'];
}
Map<String, dynamic> toJson() {
final Map<String, dynamic> data = new Map<String, dynamic>();
data['VC_ID'] = this.vCID;
data['TokenID'] = this.tokenID;
data['generalid'] = this.generalid;
data['DoctorId'] = this.doctorId;
data['IsOutKsa'] = this.isOutKsa;
data['Notes'] = this.notes;
return data;
}
}

@ -1,3 +1,4 @@
/*
*@author: Ibrahim Albitar
*@Date:27/4/2020
@ -25,7 +26,7 @@ class PharmaciesListRequestModel {
this.languageID = 2,
this.stamp = '2020-04-23T21:01:21.492Z',
this.ipAdress = '11.11.11.11',
this.versionID = 1.2,
this.versionID = 5.3,
this.tokenID,
this.sessionID = 'e29zoooEJ4',
this.isLoginForDoctorApp = true,
@ -61,4 +62,4 @@ class PharmaciesListRequestModel {
data['Channel'] = this.channel;
return data;
}
}
}

@ -10,7 +10,7 @@ class PharmaciesItemsRequestModel {
String pHRItemName;
int pageIndex = 0;
int pageSize = 20;
double versionID = 5.2;
double versionID = 5.3;
int channel = 3;
int languageID = 2;
String iPAdress = "10.20.10.20";
@ -24,7 +24,7 @@ class PharmaciesItemsRequestModel {
{this.pHRItemName,
this.pageIndex = 0,
this.pageSize = 20,
this.versionID = 5.2,
this.versionID = 5.3,
this.channel = 3,
this.languageID = 2,
this.iPAdress = "10.20.10.20",

@ -2,9 +2,12 @@ import 'dart:convert';
import 'package:doctor_app_flutter/client/base_app_client.dart';
import 'package:doctor_app_flutter/config/config.dart';
import 'package:doctor_app_flutter/models/livecare/end_call_req.dart';
import 'package:doctor_app_flutter/models/livecare/get_panding_req_list.dart';
import 'package:doctor_app_flutter/models/livecare/get_pending_res_list.dart';
import 'package:doctor_app_flutter/models/livecare/start_call_req.dart';
import 'package:doctor_app_flutter/models/livecare/start_call_res.dart';
import 'package:doctor_app_flutter/models/livecare/transfer_to_admin.dart';
import 'package:doctor_app_flutter/util/dr_app_shared_pref.dart';
import 'package:doctor_app_flutter/util/helpers.dart';
import 'package:flutter/cupertino.dart';
@ -14,7 +17,9 @@ class LiveCareProvider with ChangeNotifier {
DrAppSharedPreferances sharedPref = new DrAppSharedPreferances();
List<LiveCarePendingListResponse> liveCarePendingList = [];
var inCallResponse = {};
StartCallRes inCallResponse;
var transferToAdmin = {};
var endCallResponse = {};
bool isFinished = true;
bool hasError = false;
String errorMsg = '';
@ -45,27 +50,76 @@ class LiveCareProvider with ChangeNotifier {
return Future.value(liveCarePendingList);
}
Future<Map> startCall(request, bool isReCall) async {
Future<StartCallRes> startCall(request, bool isReCall) async {
var profile = await sharedPref.getObj(DOCTOR_PROFILE);
resetDefaultValues();
/* the request model is not same hence added manually */
var newRequest = new StartCallReq();
newRequest.clinicId = profile["ClinicID"];
newRequest.vCID = request["VC_ID"];
newRequest.vCID = request.vCID; //["VC_ID"];
newRequest.isrecall = isReCall;
newRequest.doctorId = profile["DoctorID"];
newRequest.isOutKsa = request["IsOutKSA"];
newRequest.isOutKsa = request.isOutKSA; //["IsOutKSA"];
newRequest.projectName = profile["ProjectName"];
newRequest.docotrName = profile["DoctorName"];
newRequest.clincName = profile["ClinicDescription"];
newRequest.clincName = profile["ClinicDescription"];
newRequest.docSpec = profile["DoctorTitleForProfile"];
newRequest.generalid = 'Cs2020@2016\$2958';
isFinished = false;
await BaseAppClient.post(START_LIVECARE_CALL,
onSuccess: (response, statusCode) async {
isFinished = true;
inCallResponse = response;
inCallResponse = StartCallRes.fromJson(response);
}, onFailure: (String error, int statusCode) {
isFinished = true;
throw error;
}, body: newRequest.toJson());
return Future.value(inCallResponse);
}
transfterToAdmin(request, notes) async {
var profile = await sharedPref.getObj(DOCTOR_PROFILE);
var newRequest = new TransferToAdminReq();
newRequest.doctorId = profile["DoctorID"];
newRequest.isOutKsa = request.isOutKSA;
newRequest.generalid = 'Cs2020@2016\$2958';
newRequest.vCID = request.vCID; //["VC_ID"];
newRequest.notes = await BaseAppClient.post(TRANSFERT_TO_ADMIN,
onSuccess: (response, statusCode) async {
isFinished = true;
transferToAdmin = response;
}, onFailure: (String error, int statusCode) {
isFinished = true;
throw error;
}, body: newRequest.toJson());
return Future.value(inCallResponse);
}
endCall(request, isPaitent, doctorID) async {
var newRequest = new EndCallReq();
newRequest.doctorId = doctorID; //profile["DoctorID"];
newRequest.generalid = 'Cs2020@2016\$2958';
newRequest.vCID = request.vCID; //["VC_ID"];
newRequest.isDestroy = isPaitent;
await BaseAppClient.post(END_CALL, onSuccess: (response, statusCode) async {
isFinished = true;
endCallResponse = response;
}, onFailure: (String error, int statusCode) {
isFinished = true;
throw error;
}, body: newRequest.toJson());
return Future.value(inCallResponse);
}
endCallWithCharge(vcID, doctorID) async {
var newRequest = new EndCallReq();
newRequest.vCID = vcID;
newRequest.doctorId = doctorID;
newRequest.generalid = 'Cs2020@2016\$2958';
await BaseAppClient.post(END_CALL_WITH_CHARGE,
onSuccess: (response, statusCode) async {
isFinished = true;
endCallResponse = response;
}, onFailure: (String error, int statusCode) {
isFinished = true;
throw error;

@ -15,16 +15,15 @@ class MedicineProvider with ChangeNotifier {
String errorMsg = '';
PharmaciesItemsRequestModel _itemsRequestModel =
PharmaciesItemsRequestModel();
PharmaciesItemsRequestModel();
PharmaciesListRequestModel _listRequestModel = PharmaciesListRequestModel();
clearPharmacyItemsList(){
clearPharmacyItemsList() {
pharmacyItemsList.clear();
notifyListeners();
}
getMedicineItem(String itemName) async {
getMedicineItem(String itemName) async {
_itemsRequestModel.pHRItemName = itemName;
resetDefaultValues();
pharmacyItemsList.clear();
@ -32,37 +31,37 @@ class MedicineProvider with ChangeNotifier {
try {
await BaseAppClient.post(PHARMACY_ITEMS_URL,
onSuccess: (dynamic response, int statusCode) {
pharmacyItemsList = response['ListPharmcy_Region'];
hasError = false;
isFinished = true;
errorMsg = "Done";
}, onFailure: (String error, int statusCode) {
isFinished = true;
hasError = true;
errorMsg = error;
}, body: _itemsRequestModel.toJson());
pharmacyItemsList = response['ListPharmcy_Region_enh'];
hasError = false;
isFinished = true;
errorMsg = "Done";
}, onFailure: (String error, int statusCode) {
isFinished = true;
hasError = true;
errorMsg = error;
}, body: _itemsRequestModel.toJson());
notifyListeners();
} catch (error) {
throw error;
}
}
getPharmaciesList(int itemId) async {
getPharmaciesList(int itemId) async {
resetDefaultValues();
try {
_listRequestModel.itemID = itemId;
isFinished = true;
await BaseAppClient.post(PHARMACY_LIST_URL,
onSuccess: (dynamic response, int statusCode) {
pharmaciesList = response['PharmList'];
hasError = false;
isFinished = true;
errorMsg = "Done";
}, onFailure: (String error, int statusCode) {
isFinished = true;
hasError = true;
errorMsg = error;
}, body: _listRequestModel.toJson());
pharmaciesList = response['PharmList'];
hasError = false;
isFinished = true;
errorMsg = "Done";
}, onFailure: (String error, int statusCode) {
isFinished = true;
hasError = true;
errorMsg = error;
}, body: _listRequestModel.toJson());
notifyListeners();
} catch (error) {
throw error;

@ -72,8 +72,9 @@ const String PATIENT_INSURANCE_APPROVALS =
const String VITAL_SIGN_DETAILS = 'patients/vital-sign-details';
const String BODY_MEASUREMENTS = 'patients/body-measurements';
const String IN_PATIENT_PRESCRIPTIONS_DETAILS = 'patients/prescription-details';
const String VIDEO_CALL = 'video-call';
// const String VIDEO_CALL = 'video-call';
const String LIVECARE_PENDING_LIST = 'livecare-pendinglist';
// const String LIVECARE_END_DIALOG = 'video-call/EndCallDialogBox';
var routes = {
ROOT: (_) => RootPage(),
HOME: (_) => LandingPage(),
@ -108,6 +109,7 @@ var routes = {
VITAL_SIGN_DETAILS: (_) => VitalSignDetailsScreen(),
BODY_MEASUREMENTS: (_) => VitalSignItemDetailsScreen(),
IN_PATIENT_PRESCRIPTIONS_DETAILS: (_) => InpatientPrescriptionDetailsScreen(),
VIDEO_CALL: (_) => VideoCallPage(),
LIVECARE_PENDING_LIST: (_) => LiveCarePandingListScreen()
// VIDEO_CALL: (_) => VideoCallPage(patientData: null),
LIVECARE_PENDING_LIST: (_) => LiveCarePandingListScreen(),
// LIVECARE_END_DIALOG: (_) => EndCallDialogBox()
};

File diff suppressed because it is too large Load Diff

@ -1,5 +1,6 @@
import 'package:doctor_app_flutter/config/size_config.dart';
import 'package:doctor_app_flutter/providers/livecare_provider.dart';
import 'package:doctor_app_flutter/screens/live_care/video_call.dart';
import 'package:doctor_app_flutter/util/dr_app_shared_pref.dart';
import 'package:doctor_app_flutter/util/helpers.dart';
import 'package:doctor_app_flutter/widgets/shared/app_scaffold_widget.dart';
@ -204,12 +205,20 @@ class _LiveCarePandingListState extends State<LiveCarePandingListScreen> {
.green, //Colors.black,
onPressed: () => {
_isInit = true,
sharedPref.setObj(
LIVE_CARE_PATIENT,
item),
Navigator.of(context)
.pushNamed(
VIDEO_CALL)
// sharedPref.setObj(
// LIVE_CARE_PATIENT,
// item),
// Navigator.of(context)
// .pushNamed(
// VIDEO_CALL,
// item)
Navigator.push(
context,
MaterialPageRoute(
builder: (context) =>
VideoCallPage(
item,
context)))
},
),
)
@ -250,3 +259,13 @@ class _LiveCarePandingListState extends State<LiveCarePandingListScreen> {
);
}
}
MyGlobals myGlobals = new MyGlobals();
class MyGlobals {
GlobalKey _scaffoldKey;
MyGlobals() {
_scaffoldKey = GlobalKey();
}
GlobalKey get scaffoldKey => _scaffoldKey;
}

@ -1,16 +1,25 @@
import 'dart:async';
import 'package:doctor_app_flutter/models/livecare/get_pending_res_list.dart';
import 'package:doctor_app_flutter/models/livecare/session_status_model.dart';
import 'package:doctor_app_flutter/models/livecare/start_call_res.dart';
import 'package:doctor_app_flutter/providers/livecare_provider.dart';
import 'package:doctor_app_flutter/screens/live_care/panding_list.dart';
import 'package:doctor_app_flutter/util/VideoChannel.dart';
import 'package:doctor_app_flutter/util/dr_app_shared_pref.dart';
import 'package:doctor_app_flutter/util/translations_delegate_base.dart';
import 'package:flutter/material.dart';
import 'package:doctor_app_flutter/config/shared_pref_kay.dart';
import 'package:provider/provider.dart';
import 'package:doctor_app_flutter/util/dr_app_toast_msg.dart';
import 'package:doctor_app_flutter/util/helpers.dart';
import '../../routes.dart';
class VideoCallPage extends StatefulWidget {
final LiveCarePendingListResponse patientData;
final listContext;
VideoCallPage(this.patientData, this.listContext);
@override
_VideoCallPageState createState() => _VideoCallPageState();
}
@ -24,65 +33,67 @@ class _VideoCallPageState extends State<VideoCallPage> {
LiveCareProvider _liveCareProvider;
bool _isInit = true;
var _tokenData;
var patientData = {};
bool isTransfer = false;
String image_url = 'https://hmgwebservices.com/Images/MobileImages/DUBAI/';
//bool _isOutOfStuck = false;
Helpers helpers = new Helpers();
var doctorprofile = {};
var notes;
@override
void didChangeDependencies() {
super.didChangeDependencies();
if (_isInit) {
_liveCareProvider = Provider.of<LiveCareProvider>(context);
startCall();
startCall(false);
}
_isInit = false;
}
void connectOpenTok(tokenData) async {
void connectOpenTok(StartCallRes tokenData) async {
_tokenData = tokenData;
//var profile = await sharedPref.getObj(DOCTOR_PROFILE);
var token = await sharedPref.getString(TOKEN);
doctorprofile = await sharedPref.getObj(DOCTOR_PROFILE);
/* opentok functionalites need to be written */
await VideoChannel.openVideoCallScreen(
kToken:
'T1==cGFydG5lcl9pZD00NjgwMzIyNCZzaWc9NWRhNmExMzU4ZDViZGU3OTA5NDY4ODRhNzI4ZGUxZTRmMjZmNzcwMjpzZXNzaW9uX2lkPTFfTVg0ME5qZ3dNekl5Tkg1LU1UVTVNelk0TXpZek9EWXdNMzV1Y0V4V1lWUlZTbTVIY3k5dVdHWm1NMWxPYTNjelpIVi1mZyZjcmVhdGVfdGltZT0xNTkzNjgzNjYyJm5vbmNlPTAuODAxMzMzMzUxMDQwNzE5NSZyb2xlPXB1Ymxpc2hlciZleHBpcmVfdGltZT0xNTk2Mjc1NjYyJmluaXRpYWxfbGF5b3V0X2NsYXNzX2xpc3Q9',
kSessionId:
'1_MX40NjgwMzIyNH5-MTU5MzY4MzYzODYwM35ucExWYVRVSm5Hcy9uWGZmM1lOa3czZHV-fg',
kApiKey: '46803224',
vcId: 3245,
tokenID: "hfkjshdf347r8743",
kToken: tokenData.openTokenID,
//'T1==cGFydG5lcl9pZD00NjgwMzIyNCZzaWc9NWRhNmExMzU4ZDViZGU3OTA5NDY4ODRhNzI4ZGUxZTRmMjZmNzcwMjpzZXNzaW9uX2lkPTFfTVg0ME5qZ3dNekl5Tkg1LU1UVTVNelk0TXpZek9EWXdNMzV1Y0V4V1lWUlZTbTVIY3k5dVdHWm1NMWxPYTNjelpIVi1mZyZjcmVhdGVfdGltZT0xNTkzNjgzNjYyJm5vbmNlPTAuODAxMzMzMzUxMDQwNzE5NSZyb2xlPXB1Ymxpc2hlciZleHBpcmVfdGltZT0xNTk2Mjc1NjYyJmluaXRpYWxfbGF5b3V0X2NsYXNzX2xpc3Q9',
kSessionId: tokenData.openSessionID,
//'1_MX40NjgwMzIyNH5-MTU5MzY4MzYzODYwM35ucExWYVRVSm5Hcy9uWGZmM1lOa3czZHV-fg',
kApiKey: '46209962',
vcId: widget.patientData.vCID,
tokenID: token, //"hfkjshdf347r8743",
generalId: "Cs2020@2016\$2958",
doctorId: 1485,
doctorId: doctorprofile['DoctorID'],
onFailure: (String error) {
//TODO handling Failure
//changeRoute(context);
},
onCallEnd: () {
//TODO handling onCallEnd
WidgetsBinding.instance.addPostFrameCallback((_) {
changeRoute(context);
});
},
onCallNotRespond: (SessionStatusModel sessionStatusModel) {
//TODO handling onCalcallNotRespondlEnd
WidgetsBinding.instance.addPostFrameCallback((_) {
changeRoute(context);
});
});
}
String getTimerTime(int start) {
int minutes = (start ~/ 60);
String sMinute = '';
if (minutes.toString().length == 1) {
sMinute = '0' + minutes.toString();
} else
sMinute = minutes.toString();
int seconds = (start % 60);
String sSeconds = '';
if (seconds.toString().length == 1) {
sSeconds = '0' + seconds.toString();
} else
sSeconds = seconds.toString();
return sMinute + ':' + sSeconds;
}
startCall(bool isRecall) async {
//patientData = await sharedPref.getObj(LIVE_CARE_PATIENT);
_liveCareProvider.startCall(widget.patientData, isRecall).then((result) {
// //startTimmer();
setState(() {
_start = 1;
});
startCall() async {
patientData = await sharedPref.getObj(LIVE_CARE_PATIENT);
_liveCareProvider.startCall(patientData, false).then((result) {
connectOpenTok(result);
}).catchError((error) =>
{helpers.showErrorToast(error), Navigator.of(context).pop()});
@ -96,134 +107,231 @@ class _VideoCallPageState extends State<VideoCallPage> {
@override
Widget build(BuildContext context) {
return Scaffold(
body: SafeArea(
child: Container(
height: MediaQuery.of(context).size.height,
width: MediaQuery.of(context).size.width,
decoration: BoxDecoration(
color: Colors.white,
body: Container(
height: MediaQuery.of(context).size.height,
width: MediaQuery.of(context).size.width,
decoration: BoxDecoration(
color: Colors.white,
),
padding: EdgeInsets.all(50.0),
child: Column(
mainAxisAlignment: MainAxisAlignment.start,
mainAxisSize: MainAxisSize.max,
crossAxisAlignment: CrossAxisAlignment.center,
children: <Widget>[
SizedBox(
height: 10.0,
),
padding: EdgeInsets.all(50.0),
child: Column(
mainAxisAlignment: MainAxisAlignment.start,
mainAxisSize: MainAxisSize.max,
crossAxisAlignment: CrossAxisAlignment.center,
children: <Widget>[
SizedBox(
height: 10.0,
),
Text(
'Dailing...',
style: TextStyle(
color: Colors.deepPurpleAccent,
fontWeight: FontWeight.w300,
fontSize: 15),
),
SizedBox(
height: MediaQuery.of(context).size.height * 0.02,
),
Text(
patientData["PatientName"],
style: TextStyle(
color: Colors.deepPurpleAccent,
fontWeight: FontWeight.w900,
fontSize: 20),
),
SizedBox(
height: MediaQuery.of(context).size.height * 0.02,
),
Container(
child: Text(
_timmer == '' ? 'Connecting' : 'Connected',
style: TextStyle(
color: Colors.deepPurpleAccent,
fontWeight: FontWeight.w300,
fontSize: 15),
)),
SizedBox(
height: MediaQuery.of(context).size.height * 0.02,
),
ClipRRect(
borderRadius: BorderRadius.circular(200.0),
child: Image.network(
patientData["Gender"] == "1"
? image_url + 'unkown.png'
: image_url + 'unkowwn_female.png',
height: 200.0,
width: 200.0,
),
),
SizedBox(
height: MediaQuery.of(context).size.height * .2,
),
Container(
width: 70.0,
height: 70.0,
child: FloatingActionButton(
onPressed: () {
Navigator.of(context).pop();
},
elevation: 30.0,
shape: CircleBorder(side: BorderSide(color: Colors.red)),
mini: false,
child: Icon(
Icons.call_end,
color: Colors.red,
size: 35,
),
backgroundColor: Colors.red[100],
))
],
Text(
_start == 0 ? 'Dailing' : 'Connected',
style: TextStyle(
color: Colors.deepPurpleAccent,
fontWeight: FontWeight.w300,
fontSize: 15),
),
SizedBox(
height: MediaQuery.of(context).size.height * 0.02,
),
Text(
widget.patientData.patientName,
style: TextStyle(
color: Colors.deepPurpleAccent,
fontWeight: FontWeight.w900,
fontSize: 20),
),
),
SizedBox(
height: MediaQuery.of(context).size.height * 0.02,
),
Container(
child: Text(
_start == 0 ? 'Connecting...' : _timmer.toString(),
style: TextStyle(
color: Colors.deepPurpleAccent,
fontWeight: FontWeight.w300,
fontSize: 15),
)),
SizedBox(
height: MediaQuery.of(context).size.height * 0.02,
),
ClipRRect(
borderRadius: BorderRadius.circular(200.0),
child: Image.network(
image_url + 'unkown.png',
height: 200.0,
width: 200.0,
),
),
SizedBox(
height: MediaQuery.of(context).size.height * .2,
),
Container(
width: 70.0,
height: 70.0,
child: FloatingActionButton(
onPressed: () {
Navigator.of(context).pop();
},
elevation: 30.0,
shape: CircleBorder(side: BorderSide(color: Colors.red)),
mini: false,
child: Icon(
Icons.call_end,
color: Colors.red,
size: 35,
),
backgroundColor: Colors.red[100],
))
],
),
);
));
}
changeRoute(con) async {
// await Future.delayed(Duration(seconds: 1), () {
_showAlert(con);
//});
}
}
class FunctionalButton extends StatefulWidget {
final title;
final icon;
final Function() onPressed;
_showAlert(BuildContext context) async {
await showDialog(
context: context,
builder: (dialogContex) => AlertDialog(content: StatefulBuilder(
builder: (BuildContext context, StateSetter setState) {
return Container(
height: MediaQuery.of(context).size.height * 0.7,
width: MediaQuery.of(context).size.width * .9,
child: Stack(
fit: StackFit.loose,
overflow: Overflow.visible,
children: <Widget>[
Positioned(
right: -40.0,
top: -40.0,
child: InkResponse(
onTap: () {
Navigator.of(context, rootNavigator: true)
.pop('dialog');
Navigator.of(context).pop();
},
child: CircleAvatar(
child: Icon(Icons.close),
backgroundColor: Colors.red,
),
),
),
Center(
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
mainAxisSize: MainAxisSize.min,
children: <Widget>[
Padding(
padding: EdgeInsets.all(8.0),
child: RaisedButton(
onPressed: () => {endCall()},
child:
Text(TranslationBase.of(context).endcall),
color: Colors.red,
textColor: Colors.white,
)),
Padding(
padding: EdgeInsets.all(8.0),
child: RaisedButton(
onPressed: () => {resumeCall()},
child:
Text(TranslationBase.of(context).resumecall),
color: Colors.green[900],
textColor: Colors.white,
),
),
Padding(
padding: EdgeInsets.all(8.0),
child: RaisedButton(
onPressed: () => {endCallWithCharge()},
child: Text(TranslationBase.of(context)
.endcallwithcharge),
textColor: Colors.white,
),
),
Padding(
padding: EdgeInsets.all(8.0),
child: RaisedButton(
onPressed: () => {
setState(() => {isTransfer = true})
},
child: Text(
TranslationBase.of(context).transfertoadmin),
color: Colors.yellow[900],
),
),
isTransfer == true
? Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: <Widget>[
Text('Notes'),
Padding(
padding: EdgeInsets.all(5.0),
child: TextField(
maxLines: 3,
controller: notes,
decoration: InputDecoration.collapsed(
hintText:
"Enter your notes here"),
)),
Center(
child: RaisedButton(
onPressed: () =>
{this.transferToAdmin(notes)},
child: Text('Transfer'),
color: Colors.yellow[900],
))
],
)
: SizedBox()
],
))
],
));
})));
Navigator.pop(context);
}
const FunctionalButton({Key key, this.title, this.icon, this.onPressed})
: super(key: key);
resumeCall() {
closeRoute();
startCall(true);
}
@override
_FunctionalButtonState createState() => _FunctionalButtonState();
}
transferToAdmin(notes) {
closeRoute();
_liveCareProvider
.transfterToAdmin(widget.patientData, notes)
.then((result) {
connectOpenTok(result);
}).catchError((error) =>
{helpers.showErrorToast(error), Navigator.of(context).pop()});
}
class _FunctionalButtonState extends State<FunctionalButton> {
@override
Widget build(BuildContext context) {
return Column(
mainAxisAlignment: MainAxisAlignment.start,
crossAxisAlignment: CrossAxisAlignment.center,
mainAxisSize: MainAxisSize.min,
children: <Widget>[
RawMaterialButton(
onPressed: widget.onPressed,
splashColor: Colors.deepPurpleAccent,
fillColor: Colors.white,
elevation: 10.0,
shape: CircleBorder(),
child: Padding(
padding: const EdgeInsets.all(15.0),
child: Icon(
widget.icon,
size: 30.0,
color: Colors.deepPurpleAccent,
),
),
),
Container(
margin: EdgeInsets.symmetric(vertical: 10.0, horizontal: 2.0),
child: Text(
widget.title,
style: TextStyle(fontSize: 15.0, color: Colors.deepPurpleAccent),
),
)
],
);
endCall() {
closeRoute();
_liveCareProvider
.endCall(widget.patientData, false, doctorprofile['DoctorID'])
.then((result) {
print(result);
}).catchError((error) =>
{helpers.showErrorToast(error), Navigator.of(context).pop()});
}
endCallWithCharge() {
_liveCareProvider
.endCallWithCharge(widget.patientData.vCID, doctorprofile['DoctorID'])
.then((result) {
closeRoute();
print('end callwith charge');
print(result);
}).catchError((error) =>
{helpers.showErrorToast(error), Navigator.of(context).pop()});
}
closeRoute() {
Navigator.of(context).pop();
}
}

@ -13,8 +13,14 @@ import 'package:doctor_app_flutter/widgets/shared/app_texts_widget.dart';
import 'package:doctor_app_flutter/widgets/shared/dr_app_circular_progress_Indeicator.dart';
import 'package:flutter/material.dart';
import 'package:provider/provider.dart';
import '../../util/extenstions.dart';
import 'package:doctor_app_flutter/util/translations_delegate_base.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:speech_to_text/speech_recognition_error.dart';
import 'package:speech_to_text/speech_recognition_result.dart';
import 'package:speech_to_text/speech_to_text.dart';
import 'dart:math';
DrAppSharedPreferances sharedPref = DrAppSharedPreferances();
@ -30,20 +36,55 @@ class _MedicineSearchState extends State<MedicineSearchScreen> {
var data;
final myController = TextEditingController();
Helpers helpers = new Helpers();
bool _hasSpeech = false;
MedicineProvider _medicineProvider;
String _currentLocaleId = "";
bool _isInit = true;
final SpeechToText speech = SpeechToText();
String lastStatus = '';
// String lastWords;
List<LocaleName> _localeNames = [];
String lastError;
double level = 0.0;
double minSoundLevel = 50000;
double maxSoundLevel = -50000;
String reconizedWord;
@override
void didChangeDependencies() {
super.didChangeDependencies();
if (_isInit) {
_medicineProvider = Provider.of<MedicineProvider>(context);
requestPermissions();
initSpeechState();
}
_isInit = false;
}
void requestPermissions() async {
Map<Permission, PermissionStatus> statuses = await [
Permission.microphone,
].request();
}
Future<void> initSpeechState() async {
bool hasSpeech = await speech.initialize(
onError: errorListener, onStatus: statusListener);
// if (hasSpeech) {
// _localeNames = await speech.locales();
// var systemLocale = await speech.systemLocale();
_currentLocaleId = TranslationBase.of(context).locale.languageCode == 'en'
? 'en-GB'
: 'ar-SA'; // systemLocale.localeId;
// }
if (!mounted) return;
setState(() {
_hasSpeech = hasSpeech;
});
}
@override
Widget build(BuildContext context) {
return AppScaffold(
@ -64,6 +105,18 @@ class _MedicineSearchState extends State<MedicineSearchScreen> {
searchMedicine(context);
},
textInputAction: TextInputAction.search,
prefix: IconButton(
icon: Icon(Icons.mic),
color:
lastStatus == 'listening' ? Colors.red : Colors.grey,
onPressed: () {
myController.text = '';
setState(() {
lastStatus = 'listening';
});
startVoiceSearch();
}),
inputFormatter: ONLY_LETTERS),
),
Container(
@ -131,7 +184,7 @@ class _MedicineSearchState extends State<MedicineSearchScreen> {
["ItemDescription"],
url:
_medicineProvider.pharmacyItemsList[index]
["ProductImageBase64"],
["ImageThumbUrl"],
),
onTap: () {
Navigator.push(
@ -166,4 +219,62 @@ class _MedicineSearchState extends State<MedicineSearchScreen> {
}
_medicineProvider.getMedicineItem(myController.text);
}
startVoiceSearch() {
// lastWords = "";
lastError = "";
speech.listen(
onResult: resultListener,
listenFor: Duration(seconds: 10),
localeId: _currentLocaleId,
onSoundLevelChange: soundLevelListener,
cancelOnError: true,
partialResults: true,
onDevice: true,
listenMode: ListenMode.confirmation);
setState(() {});
}
void resultListener(SpeechRecognitionResult result) {
setState(() {
// lastWords = "${result.recognizedWords} - ${result.finalResult}";
reconizedWord = result.recognizedWords;
lastStatus = '';
myController.text = reconizedWord;
Future.delayed(const Duration(seconds: 2), () {
searchMedicine(context);
});
});
}
void errorListener(SpeechRecognitionError error) {
// print("Received error status: $error, listening: ${speech.isListening}");
setState(() {
lastError = "${error.errorMsg} - ${error.permanent}";
});
}
void statusListener(String status) {
// print(
// "Received listener status: $status, listening: ${speech.isListening}");
setState(() {
lastStatus = status;
});
}
// _switchLang(selectedVal) {
// setState(() {
// _currentLocaleId = selectedVal;
// });
// print(selectedVal);
// }
void soundLevelListener(double level) {
minSoundLevel = min(minSoundLevel, level);
maxSoundLevel = max(maxSoundLevel, level);
// print("sound level $level: $minSoundLevel - $maxSoundLevel ");
setState(() {
this.level = level;
});
}
}

@ -235,6 +235,12 @@ class TranslationBase {
String get beingGreat => localizedValues['beingGreat'][locale.languageCode];
String get cancel => localizedValues['cancel'][locale.languageCode];
String get done => localizedValues['done'][locale.languageCode];
String get resumecall => localizedValues['resumecall'][locale.languageCode];
String get endcallwithcharge =>
localizedValues['endcallwithcharge'][locale.languageCode];
String get endcall => localizedValues['endcall'][locale.languageCode];
String get transfertoadmin =>
localizedValues['transfertoadmin'][locale.languageCode];
}
class TranslationBaseDelegate extends LocalizationsDelegate<TranslationBase> {

@ -273,7 +273,7 @@ class _LoginFormState extends State<LoginForm> {
"ProjectName": "",
"DoctorImageURL": "UNKNOWN",
"LogInTokenID": preRes['LogInTokenID'],
"VersionID": 1.2
"VersionID": 5.3
};
authProv.insertDeviceImei(imeiInfo).then((res) {
if (res['MessageStatus'] == 1) {

@ -47,8 +47,8 @@ class _MedicineItemWidgetState extends State<MedicineItemWidget> {
width: 39,
child: ClipRRect(
borderRadius: BorderRadius.all(Radius.circular(7)),
child: Image.memory(
dataFromBase64String(widget.url),
child: Image.network(
widget.url,
height: SizeConfig.imageSizeMultiplier * 15,
width: SizeConfig.imageSizeMultiplier * 15,
fit: BoxFit.cover,

@ -8,61 +8,63 @@ import 'package:flutter/services.dart';
// DESCRIPTION : Custom Text Form Field for app.
class AppTextFormField extends FormField<String> {
AppTextFormField({
FormFieldSetter<String> onSaved,
String inputFormatter,
FormFieldValidator<String> validator,
ValueChanged<String> onChanged,
GestureTapCallback onTap,
TextEditingController controller,
bool autovalidate = true,
TextInputType textInputType,
String hintText,
FocusNode focusNode,
TextInputAction textInputAction,
ValueChanged<String> onFieldSubmitted,
}) : super(
onSaved: onSaved,
validator: validator,
autovalidate: autovalidate,
builder: (FormFieldState<String> state) {
return Column(
children: <Widget>[
TextFormField(
focusNode: focusNode,
keyboardType: textInputType,
inputFormatters: [WhitelistingTextInputFormatter(RegExp(inputFormatter)),],
onChanged: onChanged?? (value){
state.didChange(value);
},
textInputAction: textInputAction,
onFieldSubmitted: onFieldSubmitted,
decoration: InputDecoration(
hintText: hintText,
hintStyle: TextStyle(fontSize: SizeConfig.textMultiplier * 2),
enabledBorder: OutlineInputBorder(
borderRadius: BorderRadius.all(Radius.circular(10)),
borderSide: BorderSide(color: Color(0xff707070)),
),
focusedBorder: OutlineInputBorder(
borderRadius: BorderRadius.all(Radius.circular(10)),
)
//BorderRadius.all(Radius.circular(20));
),
onTap: onTap,
controller: controller,
),
state.hasError?
Text(
state.errorText,
style: TextStyle(
color: Colors.red
),
) :
Container()
],
);
}
);
AppTextFormField(
{FormFieldSetter<String> onSaved,
String inputFormatter,
FormFieldValidator<String> validator,
ValueChanged<String> onChanged,
GestureTapCallback onTap,
TextEditingController controller,
bool autovalidate = true,
TextInputType textInputType,
String hintText,
FocusNode focusNode,
TextInputAction textInputAction,
ValueChanged<String> onFieldSubmitted,
IconButton prefix})
: super(
onSaved: onSaved,
validator: validator,
autovalidate: autovalidate,
builder: (FormFieldState<String> state) {
return Column(
children: <Widget>[
TextFormField(
focusNode: focusNode,
keyboardType: textInputType,
inputFormatters: [
WhitelistingTextInputFormatter(RegExp(inputFormatter)),
],
onChanged: onChanged ??
(value) {
state.didChange(value);
},
textInputAction: textInputAction,
onFieldSubmitted: onFieldSubmitted,
decoration: InputDecoration(
hintText: hintText,
suffixIcon: prefix,
hintStyle:
TextStyle(fontSize: SizeConfig.textMultiplier * 2),
enabledBorder: OutlineInputBorder(
borderRadius: BorderRadius.all(Radius.circular(10)),
borderSide: BorderSide(color: Color(0xff707070)),
),
focusedBorder: OutlineInputBorder(
borderRadius: BorderRadius.all(Radius.circular(10)),
)
//BorderRadius.all(Radius.circular(20));
),
onTap: onTap,
controller: controller,
),
state.hasError
? Text(
state.errorText,
style: TextStyle(color: Colors.red),
)
: Container()
],
);
});
}

@ -50,12 +50,14 @@ class BottomNavigationItem extends StatelessWidget {
size: 22.0),
),
SizedBox(height: 5,),
Text(
name,
style: TextStyle(
color: currentIndex == index
? Theme.of(context).primaryColor
: Theme.of(context).dividerColor,
Expanded(
child: Text(
name,
style: TextStyle(
color: currentIndex == index
? Theme.of(context).primaryColor
: Theme.of(context).dividerColor,
),
),
),
],

@ -1,6 +1,4 @@
import 'package:doctor_app_flutter/config/size_config.dart';
import 'package:doctor_app_flutter/providers/project_provider.dart';
import 'package:doctor_app_flutter/widgets/shared/rounded_container_widget.dart';
import 'package:flutter/material.dart';
import 'package:hexcolor/hexcolor.dart';
import 'package:provider/provider.dart';

@ -155,6 +155,13 @@ packages:
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.2"
clock:
dependency: transitive
description:
name: clock
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.1"
code_builder:
dependency: transitive
description:
@ -635,6 +642,13 @@ packages:
url: "https://pub.dartlang.org"
source: hosted
version: "1.7.0"
speech_to_text:
dependency: "direct main"
description:
path: speech_to_text
relative: true
source: path
version: "0.0.0"
stack_trace:
dependency: transitive
description:

@ -52,6 +52,10 @@ dependencies:
#flutter_svg: ^0.17.4
percent_indicator: "^2.1.1"
#speech to text
speech_to_text:
path: speech_to_text
dev_dependencies:
flutter_test:
sdk: flutter
@ -71,6 +75,7 @@ flutter:
# To add assets to your application, add an assets section, like this:
assets:
- assets/images/
- assets/images/dashboard/
# - images/a_dot_ham.jpeg
# An image asset can refer to one or more resolution-specific "variants", see

@ -0,0 +1,19 @@
name: build
on:
push:
branches:
- master
jobs:
test:
name: Test on Ubuntu
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: subosito/flutter-action@v1.3.2
with:
flutter-version: '1.17.1'
channel: 'stable'
- run: flutter pub get
- run: flutter test

@ -0,0 +1,11 @@
.DS_Store
.dart_tool/
.packages
.pub/
build/
coverage/
example/.flutter-plugins-dependencies
**/ios/Flutter/flutter_export_environment.sh
android/.idea/

@ -0,0 +1,10 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.
version:
revision: 2d2a1ffec95cc70a3218872a2cd3f8de4933c42f
channel: stable
project_type: plugin

@ -0,0 +1,166 @@
# Changelog
## 2.3.0
### New
* new parameter `onDevice` on the `listen` method enforces on device recognition for sensitive content
* onSoundLevelChange now supported on iOS
* added compile troubleshooting help to README.md
* `SpeechToTextProvider` is an alternate and simpler way to interact with the `SpeechToText` plugin.
* new `provider_example.dart` example for usage of `SpeechToTextProvider`.
### Fix
* on iOS handles some conflicts with other applications better to keep speech working after calls for example
## 2.2.0
### New
* improved error handling and logging in the iOS implementation
* added general guides for iOS to the README
* moved stress testing out of the main example
* iOS now defaults to using the speaker rather than the receiver for start /stop sounds when no headphones
### Fix
* iOS now properly deactivates the audio session when no longer listening
* start and stop sounds on iOS should be more reliable when available
## 2.1.0
### Breaking
* `listenFor` now calls `stop` rather than `cancel` as this seems like more useful behaviour
### Fix
* Android no longer stops or cancels the speech recognizer if it has already been shutdown by a
timeout or other platform behaviour.
* Android no longer tries to restart the listener when it is already active
* Now properly notifies errors that happen after listening stops due to platform callback rather than
client request. See https://github.com/csdcorp/speech_to_text/issues/51
## 2.0.1
### Fix
* Resolves an issue with the Android implementation not handling permission requests properly on apps
that didn't use the 1.12.x plugin APIs for registration. The permission dialog would not appear and
permission was denied.
## 2.0.0
### Breaking
* Upgraded to New Swift 1.12 plugin structure, may work with older Flutter version but not guaranteed
### New
* the plugin now requests both speech and microphone permission on initialize on iOS
* added `debugLogging` parameter to the `initialize` method to control native logging
### Fix
* The Android implementation now blocks duplicate results notifications. It appears that at least on some
Android versions the final results notification onResults is notified twice when Android automatically
terminates the session due to a pause time. The de-duplication looks for successive notifications
with < 100 ms between them and blocks the second. If you miss any onResult notifications please post
an issue.
## 1.1.0
### New
* error_timeout has been separated into error_network_timeout and error_speech_timeout
## 1.0.0
### New
* hasPermission to check for the current permission without bringing up the system dialog
* `listen` has a new optional `cancelOnError` parameter to support automatically canceling
a listening session on a permanent error.
* `listen` has a new optional `partialResults` parameter that controls whether the callback
receives partial or only final results.
## 0.8.0
### New
* speech recognizer now exposes multiple possible transcriptions for each recognized speech
* alternates list on SpeechRecognitionResult exposes alternate transcriptions of voice
* confidence on SpeechRecognitionResult gives an estimate of confidence in the transcription
* isConfident on SpeechRecognitionResult supports testing confidence
* hasConfidenceRating on SpeechRecognitionResult indicates if confidence was provided from the device
* new SpeechRecognitionWords class gives details on per transcription words and confidence
### Fix
* speechRecognizer availabilityDidChange was crashing if invoked due to an invalid parameter type
* Added iOS platform 10 to example Podfile to resolve compilation warnings
## 0.7.2
### Breaking
* Upgrade Swift to version 5 to match Flutter. Projects using this plugin must now switch to 5.
## 0.7.1
### Fix
* Upgrade Kotlin to 1.3.5 to match the Flutter 1.12 version
* Upgrade Gradle build to 3.5.0 to match the Flutter 1.12 version
* Android version of the plugin was repeating the system default locale in the `locales` list
## 0.7.0
### New
* locales method returns the list of available languages for speech
* new optional localeId parameter on listen method supports choosing the comprehension language separately from the current system locale.
### Breaking
* `cancel` and `stop` are now async
## 0.6.3
### Fix
* request permission fix on Android to ensure it doesn't conflict with other requests
## 0.6.2
### Fix
* channel invoke wasn't being done on the main thread in iOS
## 0.6.1
### Fix
* listening sound was failing due to timing, now uses play and record mode on iOS.
## 0.6.0
### Breaking
* The filenames for the optional sounds for iOS have changed.
### New
* Added an optional listenFor parameter to set a max duration to listen for speech and then automatically cancel.
### Fix
* Was failing to play sounds because of record mode. Now plays sounds before going into record mode and after coming out.
* Status listener was being ignored, now properly notifies on status changes.
## 0.5.1
* Fixes a problem where the recognizer left the AVAudioSession in record mode which meant that subsequent sounds couldn't be played.
## 0.5.0
Initial draft with limited functionality, supports:
* initializing speech recognition
* asking the user for permission if required
* listening for recognized speech
* canceling the current recognition session
* stopping the current recognition session
* Android and iOS 10+ support
Missing:
* some error handling
* testing across multiple OS versions
* and more, to be discovered...

@ -0,0 +1,29 @@
BSD 3-Clause License
Copyright (c) 2019, Corner Software Development Corp.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -0,0 +1,150 @@
# speech_to_text
[![pub package](https://img.shields.io/badge/pub-v2.3.0-blue)](https://pub.dartlang.org/packages/speech_to_text) [![build status](https://github.com/csdcorp/speech_to_text/workflows/build/badge.svg)](https://github.com/csdcorp/speech_to_text/actions?query=workflow%3Abuild)
A library that exposes device specific speech recognition capability.
This plugin contains a set of classes that make it easy to use the speech recognition
capabilities of the mobile device in Flutter. It supports both Android and iOS. The
target use cases for this library are commands and short phrases, not continuous spoken
conversion or always on listening.
## Recent Updates
The 2.3.0 version adds `SpeechToTextProvider` as a simpler way to interact with the plugin. Checkout
the new `provider_example.dart` for intended usage.
The 2.2.0 version improves audio session handling and start / stop sound playback on iOS.
*Note*: Feedback from any test devices is welcome.
## Using
To recognize text from the microphone import the package and call the plugin, like so:
```dart
import 'package:speech_to_text/speech_to_text.dart' as stt;
stt.SpeechToText speech = stt.SpeechToText();
bool available = await speech.initialize( onStatus: statusListener, onError: errorListener );
if ( available ) {
speech.listen( onResult: resultListener );
}
else {
print("The user has denied the use of speech recognition.");
}
// some time later...
speech.stop()
```
### Initialize once
The `initialize` method only needs to be called once per application session. After that `listen`,
`start`, `stop`, and `cancel` can be used to interact with the plugin. Subsequent calls to `initialize`
are ignored which is safe but does mean that the `onStatus` and `onError` callbacks cannot be reset after
the first call to `initialize`. For that reason there should be only one instance of the plugin per
application. The `SpeechToTextProvider` is one way to create a single instance and easily reuse it in
multiple widgets.
## Permissions
Applications using this plugin require user permissions.
### iOS
Add the following keys to your _Info.plist_ file, located in `<project root>/ios/Runner/Info.plist`:
* `NSSpeechRecognitionUsageDescription` - describe why your app uses speech recognition. This is called _Privacy - Speech Recognition Usage Description_ in the visual editor.
* `NSMicrophoneUsageDescription` - describe why your app needs access to the microphone. This is called _Privacy - Microphone Usage Description_ in the visual editor.
### Android
Add the record audio permission to your _AndroidManifest.xml_ file, located in `<project root>/android/app/src/main/AndroidManifest.xml`.
* `android.permission.RECORD_AUDIO` - this permission is required for microphone access.
* `android.permission.INTERNET` - this permission is required because speech recognition may use remote services.
## Adding Sounds for iOS (optional)
Android automatically plays system sounds when speech listening starts or stops but iOS does not. This plugin supports playing sounds to indicate listening status on iOS if sound files are available as assets in the application. To enable sounds in an application using this plugin add the sound files to the project and reference them in the assets section of the application `pubspec.yaml`. The location and filenames of the sound files must exactly match what
is shown below or they will not be found. The example application for the plugin shows the usage. *Note* These files should be very short as they delay
the start / end of the speech recognizer until the sound playback is complete.
```yaml
assets:
- assets/sounds/speech_to_text_listening.m4r
- assets/sounds/speech_to_text_cancel.m4r
- assets/sounds/speech_to_text_stop.m4r
```
* `speech_to_text_listening.m4r` - played when the listen method is called.
* `speech_to_text_cancel.m4r` - played when the cancel method is called.
* `speech_to_text_stop.m4r` - played when the stop method is called.
## Troubleshooting
### SDK version error trying to compile for Android
```
Manifest merger failed : uses-sdk:minSdkVersion 16 cannot be smaller than version 21 declared in library [:speech_to_text]
```
The speech_to_text plugin requires at least Android SDK 21 because some of the speech functions in Android
were only introduced in that version. To fix this error you need to change the `build.gradle` entry to reflect
this version. Here's what the relevant part of that file looked like as of this writing:
```
defaultConfig {
applicationId "com.example.app"
minSdkVersion 21
targetSdkVersion 28
versionCode flutterVersionCode.toInteger()
versionName flutterVersionName
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
```
### Incorrect Swift version trying to compile for iOS
```
/Users/markvandergon/flutter/.pub-cache/hosted/pub.dartlang.org/speech_to_text-1.1.0/ios/Classes/SwiftSpeechToTextPlugin.swift:224:44: error: value of type 'SwiftSpeechToTextPlugin' has no member 'AVAudioSession'
rememberedAudioCategory = self.AVAudioSession.Category
~~~~ ^~~~~~~~~~~~~~
/Users/markvandergon/flutter/.pub-cache/hosted/pub.dartlang.org/speech_to_text-1.1.0/ios/Classes/SwiftSpeechToTextPlugin.swift:227:63: error: type 'Int' has no member 'notifyOthersOnDeactivation'
try self.audioSession.setActive(true, withFlags: .notifyOthersOnDeactivation)
```
This happens when the Swift language version is not set correctly. See this thread for help https://github.com/csdcorp/speech_to_text/issues/45.
### Swift not supported trying to compile for iOS
```
`speech_to_text` does not specify a Swift version and none of the targets (`Runner`) integrating it have the `SWIFT_VERSION` attribute set.
```
This usually happens for older projects that only support Objective-C. See this thread for help https://github.com/csdcorp/speech_to_text/issues/88.
### Not working on a particular Android device
The symptom for this issue is that the `initialize` method will always fail. If you turn on debug logging
using the `debugLogging: true` flag on the `initialize` method you'll see `'Speech recognition unavailable'`
in the Android log. There's a lengthy issue discussion here https://github.com/csdcorp/speech_to_text/issues/36
about this. The issue seems to be that the recognizer is not always automatically enabled on the device. Two
key things helped resolve the issue in this case at least.
#### First
1. Go to Google Play
2. Search for 'Google'
3. You should find this app: https://play.google.com/store/apps/details?id=com.google.android.googlequicksearchbox
If 'Disabled' enable it
This is the SO post that helped: https://stackoverflow.com/questions/28769320/how-to-check-wether-speech-recognition-is-available-or-not
#### Second
Ensure the app has the required permissions. The symptom for this that you get a permanent error notification
'error_audio_error` when starting a listen session. Here's a Stack Overflow post that addresses that
https://stackoverflow.com/questions/46376193/android-speechrecognizer-audio-recording-error
Here's the important excerpt:
>You should go to system setting, Apps, Google app, then enable its permission of microphone.
### iOS recognition guidelines
Apple has quite a good guide on the user experience for using speech, the original is here
https://developer.apple.com/documentation/speech/sfspeechrecognizer This is the section that I think is particularly relevant:
>#### Create a Great User Experience for Speech Recognition
>Here are some tips to consider when adding speech recognition support to your app.
>**Be prepared to handle failures caused by speech recognition limits.** Because speech recognition is a network-based service, limits are enforced so that the service can remain freely available to all apps. Individual devices may be limited in the number of recognitions that can be performed per day, and each app may be throttled globally based on the number of requests it makes per day. If a recognition request fails quickly (within a second or two of starting), check to see if the recognition service became unavailable. If it is, you may want to ask users to try again later.
>**Plan for a one-minute limit on audio duration.** Speech recognition places a relatively high burden on battery life and network usage. To minimize this burden, the framework stops speech recognition tasks that last longer than one minute. This limit is similar to the one for keyboard-related dictation.
Remind the user when your app is recording. For example, display a visual indicator and play sounds at the beginning and end of speech recognition to help users understand that they're being actively recorded. You can also display speech as it is being recognized so that users understand what your app is doing and see any mistakes made during the recognition process.
>**Do not perform speech recognition on private or sensitive information.** Some speech is not appropriate for recognition. Don't send passwords, health or financial data, and other sensitive speech for recognition.

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8/"/>
<classpathentry kind="con" path="org.eclipse.buildship.core.gradleclasspathcontainer"/>
<classpathentry kind="output" path="bin/default"/>
</classpath>

@ -0,0 +1,8 @@
*.iml
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>speech_to_text</name>
<comment>Project android_____ created by Buildship.</comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.buildship.core.gradleprojectbuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.buildship.core.gradleprojectnature</nature>
</natures>
</projectDescription>

@ -0,0 +1,13 @@
arguments=
auto.sync=false
build.scans.enabled=false
connection.gradle.distribution=GRADLE_DISTRIBUTION(VERSION(5.6.1))
connection.project.dir=
eclipse.preferences.version=1
gradle.user.home=
java.home=
jvm.arguments=
offline.mode=false
override.workspace.settings=true
show.console.view=true
show.executions.view=true

@ -0,0 +1,44 @@
group 'com.csdcorp.speech_to_text'
version '1.0-SNAPSHOT'
buildscript {
ext.kotlin_version = '1.3.50'
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.5.0'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
rootProject.allprojects {
repositories {
google()
jcenter()
}
}
apply plugin: 'com.android.library'
apply plugin: 'kotlin-android'
android {
compileSdkVersion 28
sourceSets {
main.java.srcDirs += 'src/main/kotlin'
}
defaultConfig {
minSdkVersion 18
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
lintOptions {
disable 'InvalidPackage'
}
}
dependencies {
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
}

@ -0,0 +1,3 @@
org.gradle.jvmargs=-Xmx1536M
android.useAndroidX=true
android.enableJetifier=true

@ -0,0 +1,2 @@
sdk.dir=/Users/stephen.owens/Library/Android/sdk
flutter.sdk=/Users/stephen.owens/Documents/dev/flutter/sdk/flutter

@ -0,0 +1 @@
rootProject.name = 'speech_to_text'

@ -0,0 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip

@ -0,0 +1,3 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.csdcorp.speech_to_text">
</manifest>

@ -0,0 +1,595 @@
package com.csdcorp.speech_to_text
import androidx.annotation.NonNull;
import io.flutter.embedding.engine.plugins.FlutterPlugin
import android.Manifest
import android.annotation.TargetApi
import android.app.Activity
import android.content.Intent
import android.content.pm.PackageManager
import android.os.Build
import android.os.Bundle
import android.speech.RecognitionListener
import android.speech.SpeechRecognizer.createSpeechRecognizer
import android.speech.RecognizerIntent
import android.speech.SpeechRecognizer
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.common.MethodChannel.MethodCallHandler
import io.flutter.plugin.common.MethodChannel.Result
import io.flutter.plugin.common.PluginRegistry
import io.flutter.plugin.common.PluginRegistry.Registrar
import org.json.JSONObject
import android.content.Context
import android.content.BroadcastReceiver
import android.os.Handler
import android.os.Looper
import android.util.Log
import io.flutter.embedding.engine.plugins.activity.ActivityAware
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding
import io.flutter.plugin.common.BinaryMessenger
import org.json.JSONArray
import java.util.*
enum class SpeechToTextErrors {
multipleRequests,
unimplemented,
noLanguageIntent,
recognizerNotAvailable,
missingOrInvalidArg,
unknown
}
enum class SpeechToTextCallbackMethods {
textRecognition,
notifyStatus,
notifyError,
soundLevelChange,
}
enum class SpeechToTextStatus {
listening,
notListening,
unavailable,
available,
}
enum class ListenMode {
deviceDefault,
dictation,
search,
confirmation,
}
const val pluginChannelName = "plugin.csdcorp.com/speech_to_text"
@TargetApi(8)
/** SpeechToTextPlugin */
public class SpeechToTextPlugin :
MethodCallHandler, RecognitionListener,
PluginRegistry.RequestPermissionsResultListener, FlutterPlugin,
ActivityAware {
private var pluginContext: Context? = null
private var channel: MethodChannel? = null
private val minSdkForSpeechSupport = 21
private val speechToTextPermissionCode = 28521
private val missingConfidence: Double = -1.0
private val logTag = "SpeechToTextPlugin"
private var currentActivity: Activity? = null
private var activeResult: Result? = null
private var initializedSuccessfully: Boolean = false
private var permissionToRecordAudio: Boolean = false
private var listening = false
private var debugLogging: Boolean = false
private var speechRecognizer: SpeechRecognizer? = null
private var recognizerIntent: Intent? = null
private var previousRecognizerLang: String? = null
private var previousPartialResults: Boolean = true
private var previousListenMode: ListenMode = ListenMode.deviceDefault
private var lastFinalTime: Long = 0
private val handler: Handler = Handler(Looper.getMainLooper())
private val defaultLanguageTag: String = Locale.getDefault().toLanguageTag()
override fun onAttachedToEngine(@NonNull flutterPluginBinding: FlutterPlugin.FlutterPluginBinding) {
onAttachedToEngine(flutterPluginBinding.getApplicationContext(), flutterPluginBinding.getBinaryMessenger());
}
// This static function is optional and equivalent to onAttachedToEngine. It supports the old
// pre-Flutter-1.12 Android projects. You are encouraged to continue supporting
// plugin registration via this function while apps migrate to use the new Android APIs
// post-flutter-1.12 via https://flutter.dev/go/android-project-migration.
//
// It is encouraged to share logic between onAttachedToEngine and registerWith to keep
// them functionally equivalent. Only one of onAttachedToEngine or registerWith will be called
// depending on the user's project. onAttachedToEngine or registerWith must both be defined
// in the same class.
companion object {
@JvmStatic
fun registerWith(registrar: Registrar) {
val speechPlugin = SpeechToTextPlugin()
speechPlugin.currentActivity = registrar.activity()
registrar.addRequestPermissionsResultListener(speechPlugin)
speechPlugin.onAttachedToEngine(registrar.context(), registrar.messenger())
}
}
private fun onAttachedToEngine(applicationContext: Context, messenger: BinaryMessenger) {
this.pluginContext = applicationContext;
channel = MethodChannel(messenger, pluginChannelName)
channel?.setMethodCallHandler(this)
}
override fun onDetachedFromEngine(@NonNull binding: FlutterPlugin.FlutterPluginBinding) {
this.pluginContext = null;
channel?.setMethodCallHandler(null)
channel = null
}
override fun onDetachedFromActivity() {
currentActivity = null
}
override fun onReattachedToActivityForConfigChanges(binding: ActivityPluginBinding) {
currentActivity = binding.activity
binding.addRequestPermissionsResultListener(this)
}
override fun onAttachedToActivity(binding: ActivityPluginBinding) {
currentActivity = binding.activity
binding.addRequestPermissionsResultListener(this)
}
override fun onDetachedFromActivityForConfigChanges() {
currentActivity = null
}
override fun onMethodCall(@NonNull call: MethodCall, @NonNull rawrResult: Result) {
val result = ChannelResultWrapper(rawrResult)
try {
when (call.method) {
"has_permission" -> hasPermission(result)
"initialize" -> {
var dlog = call.argument<Boolean>("debugLogging")
if (null != dlog) {
debugLogging = dlog
}
initialize(result)
}
"listen" -> {
var localeId = call.argument<String>("localeId")
if (null == localeId) {
localeId = defaultLanguageTag
}
var partialResults = call.argument<Boolean>("partialResults")
if (null == partialResults) {
partialResults = true
}
val listenModeIndex = call.argument<Int>("listenMode")
if ( null == listenModeIndex ) {
result.error(SpeechToTextErrors.missingOrInvalidArg.name,
"listenMode is required", null)
return
}
startListening(result, localeId, partialResults, listenModeIndex )
}
"stop" -> stopListening(result)
"cancel" -> cancelListening(result)
"locales" -> locales(result)
else -> result.notImplemented()
}
} catch (exc: Exception) {
Log.e(logTag, "Unexpected exception", exc)
result.error(SpeechToTextErrors.unknown.name,
"Unexpected exception", exc.localizedMessage)
}
}
private fun hasPermission(result: Result) {
if (sdkVersionTooLow(result)) {
return
}
debugLog("Start has_permission")
val localContext = pluginContext
if (localContext != null) {
val hasPerm = ContextCompat.checkSelfPermission(localContext,
Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED
result.success(hasPerm)
}
}
private fun initialize(result: Result) {
if (sdkVersionTooLow(result)) {
return
}
debugLog("Start initialize")
if (null != activeResult) {
result.error(SpeechToTextErrors.multipleRequests.name,
"Only one initialize at a time", null)
return
}
activeResult = result
val localContext = pluginContext
initializeIfPermitted(pluginContext)
}
private fun sdkVersionTooLow(result: Result): Boolean {
if (Build.VERSION.SDK_INT < minSdkForSpeechSupport) {
result.success(false)
return true;
}
return false;
}
private fun isNotInitialized(result: Result): Boolean {
if (!initializedSuccessfully || null == pluginContext) {
result.success(false)
}
return !initializedSuccessfully
}
private fun isListening(): Boolean {
return listening
}
private fun isNotListening(): Boolean {
return !listening
}
private fun startListening(result: Result, languageTag: String, partialResults: Boolean,
listenModeIndex: Int) {
if (sdkVersionTooLow(result) || isNotInitialized(result) || isListening()) {
return
}
debugLog("Start listening")
var listenMode = ListenMode.deviceDefault
if ( listenModeIndex == ListenMode.dictation.ordinal) {
listenMode = ListenMode.dictation
}
setupRecognizerIntent(languageTag, partialResults, listenMode)
handler.post {
run {
speechRecognizer?.startListening(recognizerIntent)
}
}
notifyListening(isRecording = true)
result.success(true)
debugLog("Start listening done")
}
private fun stopListening(result: Result) {
if (sdkVersionTooLow(result) || isNotInitialized(result) || isNotListening()) {
return
}
debugLog("Stop listening")
handler.post {
run {
speechRecognizer?.stopListening()
}
}
notifyListening(isRecording = false)
result.success(true)
debugLog("Stop listening done")
}
private fun cancelListening(result: Result) {
if (sdkVersionTooLow(result) || isNotInitialized(result) || isNotListening()) {
return
}
debugLog("Cancel listening")
handler.post {
run {
speechRecognizer?.cancel()
}
}
notifyListening(isRecording = false)
result.success(true)
debugLog("Cancel listening done")
}
private fun locales(result: Result) {
if (sdkVersionTooLow(result) || isNotInitialized(result)) {
return
}
var detailsIntent = RecognizerIntent.getVoiceDetailsIntent(pluginContext)
if (null == detailsIntent) {
detailsIntent = Intent(RecognizerIntent.ACTION_GET_LANGUAGE_DETAILS)
}
if (null == detailsIntent) {
result.error(SpeechToTextErrors.noLanguageIntent.name,
"Could not get voice details", null)
return
}
pluginContext?.sendOrderedBroadcast(
detailsIntent, null, LanguageDetailsChecker(result),
null, Activity.RESULT_OK, null, null)
}
private fun notifyListening(isRecording: Boolean) {
debugLog("Notify listening")
listening = isRecording
val status = when (isRecording) {
true -> SpeechToTextStatus.listening.name
false -> SpeechToTextStatus.notListening.name
}
channel?.invokeMethod(SpeechToTextCallbackMethods.notifyStatus.name, status)
debugLog("Notify listening done")
}
private fun updateResults(speechBundle: Bundle?, isFinal: Boolean) {
if (isDuplicateFinal( isFinal )) {
debugLog("Discarding duplicate final")
return
}
val userSaid = speechBundle?.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION)
if (null != userSaid && userSaid.isNotEmpty()) {
val speechResult = JSONObject()
speechResult.put("finalResult", isFinal)
val confidence = speechBundle?.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES)
val alternates = JSONArray()
for (resultIndex in 0..userSaid.size - 1) {
val speechWords = JSONObject()
speechWords.put("recognizedWords", userSaid[resultIndex])
if (null != confidence && confidence.size >= userSaid.size) {
speechWords.put("confidence", confidence[resultIndex])
} else {
speechWords.put("confidence", missingConfidence)
}
alternates.put(speechWords)
}
speechResult.put("alternates", alternates)
val jsonResult = speechResult.toString()
debugLog("Calling results callback")
channel?.invokeMethod(SpeechToTextCallbackMethods.textRecognition.name,
jsonResult)
}
}
private fun isDuplicateFinal( isFinal: Boolean ) : Boolean {
if ( !isFinal ) {
return false
}
val delta = System.currentTimeMillis() - lastFinalTime
lastFinalTime = System.currentTimeMillis()
return delta >= 0 && delta < 100
}
private fun initializeIfPermitted(context: Context?) {
val localContext = context
if (null == localContext) {
completeInitialize()
return
}
permissionToRecordAudio = ContextCompat.checkSelfPermission(localContext,
Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED
debugLog("Checked permission")
if (!permissionToRecordAudio) {
val localActivity = currentActivity
if (null != localActivity) {
debugLog("Requesting permission")
ActivityCompat.requestPermissions(localActivity,
arrayOf(Manifest.permission.RECORD_AUDIO), speechToTextPermissionCode)
} else {
debugLog("no permission, no activity, completing")
completeInitialize()
}
} else {
debugLog("has permission, completing")
completeInitialize()
}
debugLog("leaving initializeIfPermitted")
}
private fun completeInitialize() {
debugLog("completeInitialize")
if (permissionToRecordAudio) {
debugLog("Testing recognition availability")
if (!SpeechRecognizer.isRecognitionAvailable(pluginContext)) {
Log.e(logTag, "Speech recognition not available on this device")
activeResult?.error(SpeechToTextErrors.recognizerNotAvailable.name,
"Speech recognition not available on this device", "")
activeResult = null
return
}
debugLog("Creating recognizer")
speechRecognizer = createSpeechRecognizer(pluginContext).apply {
debugLog("Setting listener")
setRecognitionListener(this@SpeechToTextPlugin)
}
if (null == speechRecognizer) {
Log.e(logTag, "Speech recognizer null")
activeResult?.error(
SpeechToTextErrors.recognizerNotAvailable.name,
"Speech recognizer null", "")
activeResult = null
}
debugLog("before setup intent")
setupRecognizerIntent(defaultLanguageTag, true, ListenMode.deviceDefault)
debugLog("after setup intent")
}
initializedSuccessfully = permissionToRecordAudio
debugLog("sending result")
activeResult?.success(permissionToRecordAudio)
debugLog("leaving complete")
activeResult = null
}
private fun setupRecognizerIntent(languageTag: String, partialResults: Boolean, listenMode: ListenMode) {
debugLog("setupRecognizerIntent")
if (previousRecognizerLang == null ||
previousRecognizerLang != languageTag ||
partialResults != previousPartialResults || previousListenMode != listenMode ) {
previousRecognizerLang = languageTag;
previousPartialResults = partialResults
previousListenMode = listenMode
handler.post {
run {
recognizerIntent = Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH).apply {
debugLog("In RecognizerIntent apply")
putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM)
debugLog("put model")
val localContext = pluginContext
if (null != localContext) {
putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
localContext.applicationInfo.packageName)
}
debugLog("put package")
putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, partialResults)
debugLog("put partial")
if (languageTag != Locale.getDefault().toLanguageTag()) {
putExtra(RecognizerIntent.EXTRA_LANGUAGE, languageTag);
debugLog("put languageTag")
}
}
}
}
}
}
override fun onRequestPermissionsResult(requestCode: Int, permissions: Array<out String>?,
grantResults: IntArray?): Boolean {
when (requestCode) {
speechToTextPermissionCode -> {
if (null != grantResults) {
permissionToRecordAudio = grantResults.isNotEmpty() &&
grantResults.get(0) == PackageManager.PERMISSION_GRANTED
}
completeInitialize()
return true
}
}
return false
}
override fun onPartialResults(results: Bundle?) = updateResults(results, false)
override fun onResults(results: Bundle?) = updateResults(results, true)
override fun onEndOfSpeech() = notifyListening(isRecording = false)
override fun onError(errorCode: Int) {
val errorMsg = when (errorCode) {
SpeechRecognizer.ERROR_AUDIO -> "error_audio_error"
SpeechRecognizer.ERROR_CLIENT -> "error_client"
SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS -> "error_permission"
SpeechRecognizer.ERROR_NETWORK -> "error_network"
SpeechRecognizer.ERROR_NETWORK_TIMEOUT -> "error_network_timeout"
SpeechRecognizer.ERROR_NO_MATCH -> "error_no_match"
SpeechRecognizer.ERROR_RECOGNIZER_BUSY -> "error_busy"
SpeechRecognizer.ERROR_SERVER -> "error_server"
SpeechRecognizer.ERROR_SPEECH_TIMEOUT -> "error_speech_timeout"
else -> "error_unknown"
}
sendError(errorMsg)
}
private fun debugLog( msg: String ) {
if ( debugLogging ) {
Log.d( logTag, msg )
}
}
private fun sendError(errorMsg: String) {
val speechError = JSONObject()
speechError.put("errorMsg", errorMsg)
speechError.put("permanent", true)
handler.post {
run {
channel?.invokeMethod(SpeechToTextCallbackMethods.notifyError.name, speechError.toString())
}
}
}
override fun onRmsChanged(rmsdB: Float) {
handler.post {
run {
channel?.invokeMethod(SpeechToTextCallbackMethods.soundLevelChange.name, rmsdB)
}
}
}
override fun onReadyForSpeech(p0: Bundle?) {}
override fun onBufferReceived(p0: ByteArray?) {}
override fun onEvent(p0: Int, p1: Bundle?) {}
override fun onBeginningOfSpeech() {}
}
// See https://stackoverflow.com/questions/10538791/how-to-set-the-language-in-speech-recognition-on-android/10548680#10548680
class LanguageDetailsChecker(flutterResult: Result) : BroadcastReceiver() {
private val result: Result = flutterResult
private var supportedLanguages: List<String>? = null
private var languagePreference: String? = null
override fun onReceive(context: Context, intent: Intent) {
val results = getResultExtras(true)
if (results.containsKey(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE)) {
languagePreference = results.getString(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE)
}
if (results.containsKey(RecognizerIntent.EXTRA_SUPPORTED_LANGUAGES)) {
supportedLanguages = results.getStringArrayList(
RecognizerIntent.EXTRA_SUPPORTED_LANGUAGES)
createResponse(supportedLanguages)
}
}
private fun createResponse(supportedLanguages: List<String>?) {
val currentLocale = Locale.getDefault()
val localeNames = ArrayList<String>()
localeNames.add(buildIdNameForLocale(currentLocale))
if (null != supportedLanguages) {
for (lang in supportedLanguages) {
if (currentLocale.toLanguageTag() == lang) {
continue
}
val locale = Locale.forLanguageTag(lang)
localeNames.add(buildIdNameForLocale(locale))
}
}
result.success(localeNames)
}
private fun buildIdNameForLocale(locale: Locale): String {
val name = locale.displayName.replace(':', ' ')
return "${locale.language}_${locale.country}:$name"
}
}
private class ChannelResultWrapper(result: Result) : Result {
// Caller handler
val handler: Handler = Handler(Looper.getMainLooper())
val result: Result = result
// make sure to respond in the caller thread
override fun success(results: Any?) {
handler.post {
run {
result.success(results);
}
}
}
override fun error(errorCode: String?, errorMessage: String?, data: Any?) {
handler.post {
run {
result.error(errorCode, errorMessage, data);
}
}
}
override fun notImplemented() {
handler.post {
run {
result.notImplemented();
}
}
}
}

@ -0,0 +1,73 @@
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.buildlog/
.history
.svn/
# IntelliJ related
*.iml
*.ipr
*.iws
.idea/
# The .vscode folder contains launch configuration and tasks you configure in
# VS Code which you may wish to be included in version control, so this line
# is commented out by default.
#.vscode/
# Flutter/Dart/Pub related
**/doc/api/
.dart_tool/
.flutter-plugins
.packages
.pub-cache/
.pub/
/build/
# Android related
**/android/**/gradle-wrapper.jar
**/android/.gradle
**/android/captures/
**/android/gradlew
**/android/gradlew.bat
**/android/local.properties
**/android/**/GeneratedPluginRegistrant.java
# iOS/XCode related
**/ios/**/*.mode1v3
**/ios/**/*.mode2v3
**/ios/**/*.moved-aside
**/ios/**/*.pbxuser
**/ios/**/*.perspectivev3
**/ios/**/*sync/
**/ios/**/.sconsign.dblite
**/ios/**/.tags*
**/ios/**/.vagrant/
**/ios/**/DerivedData/
**/ios/**/Icon?
**/ios/**/Pods/
**/ios/**/.symlinks/
**/ios/**/profile
**/ios/**/xcuserdata
**/ios/.generated/
**/ios/Flutter/App.framework
**/ios/Flutter/Flutter.framework
**/ios/Flutter/Generated.xcconfig
**/ios/Flutter/app.flx
**/ios/Flutter/app.zip
**/ios/Flutter/flutter_assets/
**/ios/Flutter/flutter_export_environment.sh
**/ios/ServiceDefinitions.json
**/ios/Runner/GeneratedPluginRegistrant.*
# Exceptions to above rules.
!**/ios/**/default.mode1v3
!**/ios/**/default.mode2v3
!**/ios/**/default.pbxuser
!**/ios/**/default.perspectivev3
!/packages/flutter_tools/test/data/dart_dependencies_test/**/.packages

@ -0,0 +1,10 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.
version:
revision: 2d2a1ffec95cc70a3218872a2cd3f8de4933c42f
channel: stable
project_type: app

@ -0,0 +1,155 @@
# speech_to_text_example
Demonstrates how to use the speech_to_text plugin. This example requires
that the plugin has been installed. It initializes speech recognition,
listens for words and prints them.
## Source
```dart
import 'package:flutter/material.dart';
import 'dart:async';
import 'package:speech_to_text/speech_to_text.dart';
import 'package:speech_to_text/speech_recognition_result.dart';
import 'package:speech_to_text/speech_recognition_error.dart';
void main() => runApp(MyApp());
class MyApp extends StatefulWidget {
@override
_MyAppState createState() => _MyAppState();
}
class _MyAppState extends State<MyApp> {
bool _hasSpeech = false;
String lastWords = "";
String lastError = "";
String lastStatus = "";
final SpeechToText speech = SpeechToText();
@override
void initState() {
super.initState();
initSpeechState();
}
Future<void> initSpeechState() async {
bool hasSpeech = await speech.initialize(onError: errorListener, onStatus: statusListener );
if (!mounted) return;
setState(() {
_hasSpeech = hasSpeech;
});
}
@override
Widget build(BuildContext context) {
return MaterialApp(
home: Scaffold(
appBar: AppBar(
title: const Text('Speech to Text Example'),
),
body: _hasSpeech
? Column(children: [
Expanded(
child: Center(
child: Text('Speech recognition available'),
),
),
Expanded(
child: Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
FlatButton(
child: Text('Start'),
onPressed: startListening,
),
FlatButton(
child: Text('Stop'),
onPressed: stopListening,
),
FlatButton(
child: Text('Cancel'),
onPressed:cancelListening,
),
],
),
),
Expanded(
child: Column(
children: <Widget>[
Center(
child: Text('Recognized Words'),
),
Center(
child: Text(lastWords),
),
],
),
),
Expanded(
child: Column(
children: <Widget>[
Center(
child: Text('Error'),
),
Center(
child: Text(lastError),
),
],
),
),
Expanded(
child: Center(
child: speech.isListening ? Text("I'm listening...") : Text( 'Not listening' ),
),
),
])
: Center( child: Text('Speech recognition unavailable', style: TextStyle(fontSize: 20.0, fontWeight: FontWeight.bold))),
),
);
}
void startListening() {
lastWords = "";
lastError = "";
speech.listen(onResult: resultListener );
setState(() {
});
}
void stopListening() {
speech.stop( );
setState(() {
});
}
void cancelListening() {
speech.cancel( );
setState(() {
});
}
void resultListener(SpeechRecognitionResult result) {
setState(() {
lastWords = "${result.recognizedWords} - ${result.finalResult}";
});
}
void errorListener(SpeechRecognitionError error ) {
setState(() {
lastError = "${error.errorMsg} - ${error.permanent}";
});
}
void statusListener(String status ) {
setState(() {
lastStatus = "$status";
});
}
}
```

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>android___</name>
<comment>Project android___ created by Buildship.</comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.buildship.core.gradleprojectbuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.buildship.core.gradleprojectnature</nature>
</natures>
</projectDescription>

@ -0,0 +1,2 @@
connection.project.dir=
eclipse.preferences.version=1

@ -0,0 +1,67 @@
def localProperties = new Properties()
def localPropertiesFile = rootProject.file('local.properties')
if (localPropertiesFile.exists()) {
localPropertiesFile.withReader('UTF-8') { reader ->
localProperties.load(reader)
}
}
def flutterRoot = localProperties.getProperty('flutter.sdk')
if (flutterRoot == null) {
throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.")
}
def flutterVersionCode = localProperties.getProperty('flutter.versionCode')
if (flutterVersionCode == null) {
flutterVersionCode = '1'
}
def flutterVersionName = localProperties.getProperty('flutter.versionName')
if (flutterVersionName == null) {
flutterVersionName = '1.0'
}
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
android {
compileSdkVersion 28
sourceSets {
main.java.srcDirs += 'src/main/kotlin'
}
lintOptions {
disable 'InvalidPackage'
}
defaultConfig {
// TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
applicationId "com.csdcorp.speech_to_text_example"
minSdkVersion 18
targetSdkVersion 28
versionCode flutterVersionCode.toInteger()
versionName flutterVersionName
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
// TODO: Add your own signing config for the release build.
// Signing with the debug keys for now, so `flutter run --release` works.
signingConfig signingConfigs.debug
}
}
}
flutter {
source '../..'
}
dependencies {
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test:runner:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1'
}

@ -0,0 +1,7 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.csdcorp.speech_to_text_example">
<!-- Flutter needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
</manifest>

@ -0,0 +1,32 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.csdcorp.speech_to_text_example">
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.INTERNET"/>
<!-- io.flutter.app.FlutterApplication is an android.app.Application that
calls FlutterMain.startInitialization(this); in its onCreate method.
In most cases you can leave this as-is, but you if you want to provide
additional functionality it is fine to subclass or reimplement
FlutterApplication and put your custom class here. -->
<application
android:name="io.flutter.app.FlutterApplication"
android:label="speech_to_text_example"
android:icon="@mipmap/ic_launcher">
<activity
android:name=".MainActivity"
android:launchMode="singleTop"
android:theme="@style/LaunchTheme"
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|smallestScreenSize|locale|layoutDirection|fontScale|screenLayout|density|uiMode"
android:hardwareAccelerated="true"
android:windowSoftInputMode="adjustResize">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
<!-- Don't delete the meta-data below.
This is used by the Flutter tool to generate GeneratedPluginRegistrant.java -->
<meta-data
android:name="flutterEmbedding"
android:value="2" />
</application>
</manifest>

@ -0,0 +1,12 @@
package com.csdcorp.speech_to_text_example
import androidx.annotation.NonNull;
import io.flutter.embedding.android.FlutterActivity
import io.flutter.embedding.engine.FlutterEngine
import io.flutter.plugins.GeneratedPluginRegistrant
class MainActivity: FlutterActivity() {
override fun configureFlutterEngine(@NonNull flutterEngine: FlutterEngine) {
GeneratedPluginRegistrant.registerWith(flutterEngine);
}
}

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="@android:color/white" />
<!-- You can insert your own image assets here -->
<!-- <item>
<bitmap
android:gravity="center"
android:src="@mipmap/launch_image" />
</item> -->
</layer-list>

Binary file not shown.

After

Width:  |  Height:  |  Size: 544 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 442 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 721 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<style name="LaunchTheme" parent="@android:style/Theme.Black.NoTitleBar">
<!-- Show a splash screen on the activity. Automatically removed when
Flutter draws its first frame -->
<item name="android:windowBackground">@drawable/launch_background</item>
</style>
</resources>

@ -0,0 +1,7 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.csdcorp.speech_to_text_example">
<!-- Flutter needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
</manifest>

@ -0,0 +1,31 @@
buildscript {
ext.kotlin_version = '1.3.50'
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.6.1'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
allprojects {
repositories {
google()
jcenter()
}
}
rootProject.buildDir = '../build'
subprojects {
project.buildDir = "${rootProject.buildDir}/${project.name}"
}
subprojects {
project.evaluationDependsOn(':app')
}
task clean(type: Delete) {
delete rootProject.buildDir
}

@ -0,0 +1,4 @@
org.gradle.jvmargs=-Xmx1536M
android.useAndroidX=true
android.enableJetifier=true
android.enableR8=true

@ -0,0 +1,6 @@
#Mon Mar 16 08:57:32 EDT 2020
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip

@ -0,0 +1,15 @@
include ':app'
def flutterProjectRoot = rootProject.projectDir.parentFile.toPath()
def plugins = new Properties()
def pluginsFile = new File(flutterProjectRoot.toFile(), '.flutter-plugins')
if (pluginsFile.exists()) {
pluginsFile.withReader('UTF-8') { reader -> plugins.load(reader) }
}
plugins.each { name, path ->
def pluginDirectory = flutterProjectRoot.resolve(path).resolve('android').toFile()
include ":$name"
project(":$name").projectDir = pluginDirectory
}

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>App</string>
<key>CFBundleIdentifier</key>
<string>io.flutter.flutter.app</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>App</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1.0</string>
<key>MinimumOSVersion</key>
<string>8.0</string>
</dict>
</plist>

@ -0,0 +1,2 @@
#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"
#include "Generated.xcconfig"

@ -0,0 +1,18 @@
#
# NOTE: This podspec is NOT to be published. It is only used as a local source!
#
Pod::Spec.new do |s|
s.name = 'Flutter'
s.version = '1.0.0'
s.summary = 'High-performance, high-fidelity mobile apps.'
s.description = <<-DESC
Flutter provides an easy and productive way to build and deploy high-performance mobile apps for Android and iOS.
DESC
s.homepage = 'https://flutter.io'
s.license = { :type => 'MIT' }
s.author = { 'Flutter Dev Team' => 'flutter-dev@googlegroups.com' }
s.source = { :git => 'https://github.com/flutter/engine', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.vendored_frameworks = 'Flutter.framework'
end

@ -0,0 +1,2 @@
#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"
#include "Generated.xcconfig"

@ -0,0 +1,90 @@
# Uncomment this line to define a global platform for your project
platform :ios, '10.0'
# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
ENV['COCOAPODS_DISABLE_STATS'] = 'true'
project 'Runner', {
'Debug' => :debug,
'Profile' => :release,
'Release' => :release,
}
def parse_KV_file(file, separator='=')
file_abs_path = File.expand_path(file)
if !File.exists? file_abs_path
return [];
end
generated_key_values = {}
skip_line_start_symbols = ["#", "/"]
File.foreach(file_abs_path) do |line|
next if skip_line_start_symbols.any? { |symbol| line =~ /^\s*#{symbol}/ }
plugin = line.split(pattern=separator)
if plugin.length == 2
podname = plugin[0].strip()
path = plugin[1].strip()
podpath = File.expand_path("#{path}", file_abs_path)
generated_key_values[podname] = podpath
else
puts "Invalid plugin specification: #{line}"
end
end
generated_key_values
end
target 'Runner' do
use_frameworks!
use_modular_headers!
# Flutter Pod
copied_flutter_dir = File.join(__dir__, 'Flutter')
copied_framework_path = File.join(copied_flutter_dir, 'Flutter.framework')
copied_podspec_path = File.join(copied_flutter_dir, 'Flutter.podspec')
unless File.exist?(copied_framework_path) && File.exist?(copied_podspec_path)
# Copy Flutter.framework and Flutter.podspec to Flutter/ to have something to link against if the xcode backend script has not run yet.
# That script will copy the correct debug/profile/release version of the framework based on the currently selected Xcode configuration.
# CocoaPods will not embed the framework on pod install (before any build phases can generate) if the dylib does not exist.
generated_xcode_build_settings_path = File.join(copied_flutter_dir, 'Generated.xcconfig')
unless File.exist?(generated_xcode_build_settings_path)
raise "Generated.xcconfig must exist. If you're running pod install manually, make sure flutter pub get is executed first"
end
generated_xcode_build_settings = parse_KV_file(generated_xcode_build_settings_path)
cached_framework_dir = generated_xcode_build_settings['FLUTTER_FRAMEWORK_DIR'];
unless File.exist?(copied_framework_path)
FileUtils.cp_r(File.join(cached_framework_dir, 'Flutter.framework'), copied_flutter_dir)
end
unless File.exist?(copied_podspec_path)
FileUtils.cp(File.join(cached_framework_dir, 'Flutter.podspec'), copied_flutter_dir)
end
end
# Keep pod path relative so it can be checked into Podfile.lock.
pod 'Flutter', :path => 'Flutter'
# Plugin Pods
# Prepare symlinks folder. We use symlinks to avoid having Podfile.lock
# referring to absolute paths on developers' machines.
system('rm -rf .symlinks')
system('mkdir -p .symlinks/plugins')
plugin_pods = parse_KV_file('../.flutter-plugins')
plugin_pods.each do |name, path|
symlink = File.join('.symlinks', 'plugins', name)
File.symlink(path, symlink)
pod name, :path => File.join(symlink, 'ios')
end
end
# Prevent Cocoapods from embedding a second Flutter framework and causing an error with the new Xcode build system.
install! 'cocoapods', :disable_input_output_paths => true
post_install do |installer|
installer.pods_project.targets.each do |target|
target.build_configurations.each do |config|
config.build_settings['ENABLE_BITCODE'] = 'NO'
end
end
end

@ -0,0 +1,29 @@
PODS:
- Flutter (1.0.0)
- speech_to_text (0.0.1):
- Flutter
- Try
- Try (2.1.1)
DEPENDENCIES:
- Flutter (from `Flutter`)
- speech_to_text (from `.symlinks/plugins/speech_to_text/ios`)
SPEC REPOS:
trunk:
- Try
EXTERNAL SOURCES:
Flutter:
:path: Flutter
speech_to_text:
:path: ".symlinks/plugins/speech_to_text/ios"
SPEC CHECKSUMS:
Flutter: 0e3d915762c693b495b44d77113d4970485de6ec
speech_to_text: b43a7d99aef037bd758ed8e45d79bbac035d2dfe
Try: 5ef669ae832617b3cee58cb2c6f99fb767a4ff96
PODFILE CHECKSUM: 0ba44ad07df4ab62269dc769727cf0f12b1e453d
COCOAPODS: 1.9.3

@ -0,0 +1,578 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; };
3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; };
74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74858FAE1ED2DC5600515810 /* AppDelegate.swift */; };
9740EEB41CF90195004384FC /* Debug.xcconfig in Resources */ = {isa = PBXBuildFile; fileRef = 9740EEB21CF90195004384FC /* Debug.xcconfig */; };
97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; };
97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; };
C446300A034BF27D9F1ACEF9 /* Pods_Runner.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = E76E9615C6B4FABD88067D55 /* Pods_Runner.framework */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
9705A1C41CF9048500538489 /* Embed Frameworks */ = {
isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647;
dstPath = "";
dstSubfolderSpec = 10;
files = (
);
name = "Embed Frameworks";
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = "<group>"; };
1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = "<group>"; };
3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; };
59AFE6BB0B596A0E0811BDFF /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = "<group>"; };
6280E2A777726D2043BF80B7 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = "<group>"; };
74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Runner-Bridging-Header.h"; sourceTree = "<group>"; };
74858FAE1ED2DC5600515810 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; };
9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = "<group>"; };
9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = "<group>"; };
97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; };
97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
C3909A4B7EC98A20255210E3 /* Pods-Runner.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.profile.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.profile.xcconfig"; sourceTree = "<group>"; };
E76E9615C6B4FABD88067D55 /* Pods_Runner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Runner.framework; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
97C146EB1CF9000F007C117D /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
C446300A034BF27D9F1ACEF9 /* Pods_Runner.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
7937AF765430D66F28F7FEEF /* Frameworks */ = {
isa = PBXGroup;
children = (
E76E9615C6B4FABD88067D55 /* Pods_Runner.framework */,
);
name = Frameworks;
sourceTree = "<group>";
};
9740EEB11CF90186004384FC /* Flutter */ = {
isa = PBXGroup;
children = (
3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */,
9740EEB21CF90195004384FC /* Debug.xcconfig */,
7AFA3C8E1D35360C0083082E /* Release.xcconfig */,
9740EEB31CF90195004384FC /* Generated.xcconfig */,
);
name = Flutter;
sourceTree = "<group>";
};
97C146E51CF9000F007C117D = {
isa = PBXGroup;
children = (
9740EEB11CF90186004384FC /* Flutter */,
97C146F01CF9000F007C117D /* Runner */,
97C146EF1CF9000F007C117D /* Products */,
A68CCF1640763A551D35BD31 /* Pods */,
7937AF765430D66F28F7FEEF /* Frameworks */,
);
sourceTree = "<group>";
};
97C146EF1CF9000F007C117D /* Products */ = {
isa = PBXGroup;
children = (
97C146EE1CF9000F007C117D /* Runner.app */,
);
name = Products;
sourceTree = "<group>";
};
97C146F01CF9000F007C117D /* Runner */ = {
isa = PBXGroup;
children = (
97C146FA1CF9000F007C117D /* Main.storyboard */,
97C146FD1CF9000F007C117D /* Assets.xcassets */,
97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */,
97C147021CF9000F007C117D /* Info.plist */,
97C146F11CF9000F007C117D /* Supporting Files */,
1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */,
1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */,
74858FAE1ED2DC5600515810 /* AppDelegate.swift */,
74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */,
);
path = Runner;
sourceTree = "<group>";
};
97C146F11CF9000F007C117D /* Supporting Files */ = {
isa = PBXGroup;
children = (
);
name = "Supporting Files";
sourceTree = "<group>";
};
A68CCF1640763A551D35BD31 /* Pods */ = {
isa = PBXGroup;
children = (
59AFE6BB0B596A0E0811BDFF /* Pods-Runner.debug.xcconfig */,
6280E2A777726D2043BF80B7 /* Pods-Runner.release.xcconfig */,
C3909A4B7EC98A20255210E3 /* Pods-Runner.profile.xcconfig */,
);
path = Pods;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
97C146ED1CF9000F007C117D /* Runner */ = {
isa = PBXNativeTarget;
buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */;
buildPhases = (
949FCB95217187F2C022D6A9 /* [CP] Check Pods Manifest.lock */,
9740EEB61CF901F6004384FC /* Run Script */,
97C146EA1CF9000F007C117D /* Sources */,
97C146EB1CF9000F007C117D /* Frameworks */,
97C146EC1CF9000F007C117D /* Resources */,
9705A1C41CF9048500538489 /* Embed Frameworks */,
3B06AD1E1E4923F5004D2608 /* Thin Binary */,
8B0988F04B6AE44AA0304FEF /* [CP] Embed Pods Frameworks */,
);
buildRules = (
);
dependencies = (
);
name = Runner;
productName = Runner;
productReference = 97C146EE1CF9000F007C117D /* Runner.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
97C146E61CF9000F007C117D /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 1020;
ORGANIZATIONNAME = "The Chromium Authors";
TargetAttributes = {
97C146ED1CF9000F007C117D = {
CreatedOnToolsVersion = 7.3.1;
DevelopmentTeam = 3X949YE9K2;
LastSwiftMigration = 0910;
};
};
};
buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 97C146E51CF9000F007C117D;
productRefGroup = 97C146EF1CF9000F007C117D /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
97C146ED1CF9000F007C117D /* Runner */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
97C146EC1CF9000F007C117D /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */,
3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */,
9740EEB41CF90195004384FC /* Debug.xcconfig in Resources */,
97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */,
97C146FC1CF9000F007C117D /* Main.storyboard in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXShellScriptBuildPhase section */
3B06AD1E1E4923F5004D2608 /* Thin Binary */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "Thin Binary";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin";
};
8B0988F04B6AE44AA0304FEF /* [CP] Embed Pods Frameworks */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "[CP] Embed Pods Frameworks";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";
showEnvVarsInLog = 0;
};
949FCB95217187F2C022D6A9 /* [CP] Check Pods Manifest.lock */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
);
inputPaths = (
"${PODS_PODFILE_DIR_PATH}/Podfile.lock",
"${PODS_ROOT}/Manifest.lock",
);
name = "[CP] Check Pods Manifest.lock";
outputFileListPaths = (
);
outputPaths = (
"$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
showEnvVarsInLog = 0;
};
9740EEB61CF901F6004384FC /* Run Script */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "Run Script";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
};
/* End PBXShellScriptBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
97C146EA1CF9000F007C117D /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */,
1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
97C146FA1CF9000F007C117D /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
97C146FB1CF9000F007C117D /* Base */,
);
name = Main.storyboard;
sourceTree = "<group>";
};
97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
97C147001CF9000F007C117D /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
249021D3217E4FDB00AE95B9 /* Profile */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Profile;
};
249021D4217E4FDB00AE95B9 /* Profile */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
DEVELOPMENT_TEAM = 3X949YE9K2;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
PRODUCT_BUNDLE_IDENTIFIER = com.csdcorp.speechToTextExample;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_VERSION = 5.0;
VERSIONING_SYSTEM = "apple-generic";
};
name = Profile;
};
97C147031CF9000F007C117D /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
97C147041CF9000F007C117D /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
97C147061CF9000F007C117D /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
DEVELOPMENT_TEAM = 3X949YE9K2;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
PRODUCT_BUNDLE_IDENTIFIER = com.csdcorp.speechToTextExample;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 5.0;
VERSIONING_SYSTEM = "apple-generic";
};
name = Debug;
};
97C147071CF9000F007C117D /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
DEVELOPMENT_TEAM = 3X949YE9K2;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
PRODUCT_BUNDLE_IDENTIFIER = com.csdcorp.speechToTextExample;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_VERSION = 5.0;
VERSIONING_SYSTEM = "apple-generic";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = {
isa = XCConfigurationList;
buildConfigurations = (
97C147031CF9000F007C117D /* Debug */,
97C147041CF9000F007C117D /* Release */,
249021D3217E4FDB00AE95B9 /* Profile */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = {
isa = XCConfigurationList;
buildConfigurations = (
97C147061CF9000F007C117D /* Debug */,
97C147071CF9000F007C117D /* Release */,
249021D4217E4FDB00AE95B9 /* Profile */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 97C146E61CF9000F007C117D /* Project object */;
}

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:Runner.xcodeproj">
</FileRef>
</Workspace>

@ -0,0 +1,91 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1020"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</MacroExpansion>
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
buildConfiguration = "Profile"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:Runner.xcodeproj">
</FileRef>
<FileRef
location = "group:Pods/Pods.xcodeproj">
</FileRef>
</Workspace>

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>

@ -0,0 +1,13 @@
import UIKit
import Flutter
@UIApplicationMain
@objc class AppDelegate: FlutterAppDelegate {
override func application(
_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
) -> Bool {
GeneratedPluginRegistrant.register(with: self)
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
}

@ -0,0 +1,122 @@
{
"images" : [
{
"size" : "20x20",
"idiom" : "iphone",
"filename" : "Icon-App-20x20@2x.png",
"scale" : "2x"
},
{
"size" : "20x20",
"idiom" : "iphone",
"filename" : "Icon-App-20x20@3x.png",
"scale" : "3x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@1x.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@2x.png",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@3x.png",
"scale" : "3x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-App-40x40@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-App-40x40@3x.png",
"scale" : "3x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-App-60x60@2x.png",
"scale" : "2x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-App-60x60@3x.png",
"scale" : "3x"
},
{
"size" : "20x20",
"idiom" : "ipad",
"filename" : "Icon-App-20x20@1x.png",
"scale" : "1x"
},
{
"size" : "20x20",
"idiom" : "ipad",
"filename" : "Icon-App-20x20@2x.png",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-App-29x29@1x.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-App-29x29@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-App-40x40@1x.png",
"scale" : "1x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-App-40x40@2x.png",
"scale" : "2x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-App-76x76@1x.png",
"scale" : "1x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-App-76x76@2x.png",
"scale" : "2x"
},
{
"size" : "83.5x83.5",
"idiom" : "ipad",
"filename" : "Icon-App-83.5x83.5@2x.png",
"scale" : "2x"
},
{
"size" : "1024x1024",
"idiom" : "ios-marketing",
"filename" : "Icon-App-1024x1024@1x.png",
"scale" : "1x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 564 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save