ios - TextField Input via speech(Speech To Text)? -
i'm building ios app using swift , xcode 6. want implement speech text functionality in app.
i googled , found links not helpful , in objective c openears.
i have 2 or 3 textfields user enter his/her name,age , location. , there mike button speech entry in textfield. in image below.
could me how can implement functionality using swift.
help appreciated! in advance!
you can implement openears way in swift project:
first of add framework downloaded here.
bridging-header.h
#import <openears/oelanguagemodelgenerator.h> #import <openears/oeacousticmodel.h> #import <openears/oepocketsphinxcontroller.h> #import <openears/oeeventsobserver.h> #import <openears/oelogging.h> #import <openears/oeflitecontroller.h> #import <slt/slt.h>
viewcontroller.swift
// // viewcontroller.swift // speechtotext // // created anil on 08/07/15. // copyright (c) 2015 variya soft solutions. rights reserved. // import uikit var lmpath: string! var dicpath: string! var words: array<string> = [] var currentword: string! var klevelupdatespersecond = 18 class viewcontroller: uiviewcontroller, oeeventsobserverdelegate { var openearseventsobserver = oeeventsobserver() var startupfailedduetolackofpermissions = bool() var buttonflashing = false @iboutlet weak var recordbutton: uibutton! @iboutlet weak var heardtextview: uitextview! @iboutlet weak var statustextview: uitextview! override func viewdidload() { super.viewdidload() loadopenears() } @ibaction func record(sender: anyobject) { if !buttonflashing { startflashingbutton() startlistening() } else { stopflashingbutton() stoplistening() } } func startflashingbutton() { buttonflashing = true recordbutton.alpha = 1 uiview.animatewithduration(0.5 , delay: 0.0, options: uiviewanimationoptions.curveeaseinout | uiviewanimationoptions.repeat | uiviewanimationoptions.autoreverse | uiviewanimationoptions.allowuserinteraction, animations: { self.recordbutton.alpha = 0.1 }, completion: {bool in }) } func stopflashingbutton() { buttonflashing = false uiview.animatewithduration(0.1, delay: 0.0, options: uiviewanimationoptions.curveeaseinout | uiviewanimationoptions.beginfromcurrentstate, animations: { self.recordbutton.alpha = 1 }, completion: {bool in }) } //openears methods begin func loadopenears() { self.openearseventsobserver = oeeventsobserver() self.openearseventsobserver.delegate = self var lmgenerator: oelanguagemodelgenerator = oelanguagemodelgenerator() addwords() var name = "languagemodelfilestarsaver" lmgenerator.generatelanguagemodelfromarray(words, withfilesnamed: name, foracousticmodelatpath: oeacousticmodel.pathtomodel("acousticmodelenglish")) lmpath = lmgenerator.pathtosuccessfullygeneratedlanguagemodelwithrequestedname(name) dicpath = lmgenerator.pathtosuccessfullygenerateddictionarywithrequestedname(name) } func pocketsphinxdidstartlistening() { println("pocketsphinx listening.") statustextview.text = "pocketsphinx listening." } func pocketsphinxdiddetectspeech() { println("pocketsphinx has detected speech.") statustextview.text = "pocketsphinx has detected speech." } func pocketsphinxdiddetectfinishedspeech() { println("pocketsphinx has detected period of silence, concluding utterance.") statustextview.text = "pocketsphinx has detected period of silence, concluding utterance." } func pocketsphinxdidstoplistening() { println("pocketsphinx has stopped listening.") statustextview.text = "pocketsphinx has stopped listening." } func pocketsphinxdidsuspendrecognition() { println("pocketsphinx has suspended recognition.") statustextview.text = "pocketsphinx has suspended recognition." } func pocketsphinxdidresumerecognition() { println("pocketsphinx has resumed recognition.") statustextview.text = "pocketsphinx has resumed recognition." } func pocketsphinxdidchangelanguagemodeltofile(newlanguagemodelpathasstring: string, newdictionarypathasstring: string) { println("pocketsphinx using following language model: \(newlanguagemodelpathasstring) , following dictionary: \(newdictionarypathasstring)") } func pocketsphinxcontinuoussetupdidfailwithreason(reasonforfailure: string) { println("listening setup wasn't successful , returned failure reason: \(reasonforfailure)") statustextview.text = "listening setup wasn't successful , returned failure reason: \(reasonforfailure)" } func pocketsphinxcontinuousteardowndidfailwithreason(reasonforfailure: string) { println("listening teardown wasn't successful , returned failure reason: \(reasonforfailure)") statustextview.text = "listening teardown wasn't successful , returned failure reason: \(reasonforfailure)" } func testrecognitioncompleted() { println("a test file submitted recognition complete.") statustextview.text = "a test file submitted recognition complete." } func startlistening() { oepocketsphinxcontroller.sharedinstance().setactive(true, error: nil) oepocketsphinxcontroller.sharedinstance().startlisteningwithlanguagemodelatpath(lmpath, dictionaryatpath: dicpath, acousticmodelatpath: oeacousticmodel.pathtomodel("acousticmodelenglish"), languagemodelisjsgf: false) } func stoplistening() { oepocketsphinxcontroller.sharedinstance().stoplistening() } func addwords() { //add thing here want recognized. must in capital letters words.append("sunday") words.append("monday") words.append("tuesday") words.append("wednesday") words.append("thursday") words.append("friday") words.append("saturday") words.append("january") words.append("february") words.append("march") words.append("april") words.append("may") words.append("june") words.append("july") words.append("august") words.append("september") words.append("october") words.append("november") words.append("december") } func getnewword() { var randomword = int(arc4random_uniform(uint32(words.count))) currentword = words[randomword] } func pocketsphinxfailednomicpermissions() { nslog("local callback: user has never set mic permissions or denied permission app's mic, listening not start.") self.startupfailedduetolackofpermissions = true if oepocketsphinxcontroller.sharedinstance().islistening { var error = oepocketsphinxcontroller.sharedinstance().stoplistening() // stop listening if listening. if(error != nil) { nslog("error while stopping listening in micpermissioncheckcompleted: %@", error); } } } func pocketsphinxdidreceivehypothesis(hypothesis: string!, recognitionscore: string!, utteranceid: string!) { heardtextview.text = "heard: \(hypothesis)" } }
in code add 1 button. pressing button can speak , lib recognise word.
and remember 1 thing openears can recognise add word array , can not recognise other words.
and can test this sample project.
hope help.
Comments
Post a Comment