RecognitionResult.cs source code in C# .NET

Source code for the .NET framework in C#

                        

Code:

/ Dotnetfx_Vista_SP2 / Dotnetfx_Vista_SP2 / 8.0.50727.4016 / WIN_WINDOWS / lh_tools_devdiv_wpf / Windows / wcp / Speech / Src / Result / RecognitionResult.cs / 1 / RecognitionResult.cs

                            //------------------------------------------------------------------ 
// 
//     Copyright (c) Microsoft Corporation.  All rights reserved.
// 
//----------------------------------------------------------------- 

 
using System; 
using System.Collections;
using System.Collections.Generic; 
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Globalization;
using System.IO; 
using System.Reflection;
using System.Runtime.InteropServices; 
using COMTYPES = System.Runtime.InteropServices.ComTypes; 
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary; 
using System.Security.Permissions;
using System.Speech.Internal;
using System.Speech.Internal.SapiInterop;
using System.Speech.AudioFormat; 
using System.Text;
using System.Xml; 
using System.Xml.XPath; 

#pragma warning disable 1634, 1691 // Allows suppression of certain PreSharp messages. 

#pragma warning disable 56507 // check for null or empty strings

namespace System.Speech.Recognition 
{
    /// TODOC <_include file='doc\RecognitionResult.uex' path='docs/doc[@for="RecognitionResult"]/*' /> 
    [DebuggerDisplay ("{DebuggerDisplayString ()}")] 
    [Serializable]
    public sealed class RecognitionResult : RecognizedPhrase, ISerializable 
    {
        //*******************************************************************
        //
        // Constructors 
        //
        //******************************************************************* 
 
        #region Constructors
#if SPEECHSERVER 
        internal RecognitionResult (IRecognizerInternal recognizer, byte [] sapiResultBlob)
        {
            Initialize (recognizer, null, sapiResultBlob, 0);
        } 
#else
        internal RecognitionResult (IRecognizerInternal recognizer, ISpRecoResult recoResult, byte [] sapiResultBlob, int maxAlternates) 
        { 
            Initialize (recognizer, recoResult, sapiResultBlob, maxAlternates);
        } 
#endif

        // empty constructor needed for some MSS unit tests
        internal RecognitionResult () 
        {
        } 
 
        /// 
        /// TODOC 
        /// 
        /// 
        /// 
        private RecognitionResult (SerializationInfo info, StreamingContext context) 
        {
            // Get the set of serializable members for our class and base classes 
            Type thisType = this.GetType (); 
            MemberInfo [] mis = FormatterServices.GetSerializableMembers (
               thisType, context); 

            // Do not copy all the field for App Domain transition
            bool appDomainTransition = context.State == StreamingContextStates.CrossAppDomain;
 
            // Deserialize the base class's fields from the info object
            foreach (MemberInfo mi in mis) 
            { 
                // To ease coding, treat the member as a FieldInfo object
                FieldInfo fi = (FieldInfo) mi; 

                // Set the field to the deserialized value
                if (!appDomainTransition || (mi.Name != "_recognizer" && mi.Name != "_grammar" && mi.Name != "_ruleList" && mi.Name != "_audio" && mi.Name != "_audio"))
                { 
                    fi.SetValue (this, info.GetValue (fi.Name, fi.FieldType));
                } 
            } 
        }
 
        #endregion

        //********************************************************************
        // 
        // Public Methods
        // 
        //******************************************************************* 

        #region Public Methods 

#if !SPEECHSERVER

        /// TODOC <_include file='doc\RecognitionResult.uex' path='docs/doc[@for="RecognitionResult.GetAudioForWordRange"]/*' /> 
        public RecognizedAudio GetAudioForWordRange (RecognizedWordUnit firstWord, RecognizedWordUnit lastWord)
        { 
            Helpers.ThrowIfNull (firstWord, "firstWord"); 
            Helpers.ThrowIfNull (lastWord, "lastWord");
 
            return Audio.GetRange (firstWord._audioPosition, lastWord._audioPosition + lastWord._audioDuration - firstWord._audioPosition);
        }
#endif
 
        [SecurityPermissionAttribute (SecurityAction.Demand, SerializationFormatter = true)]
        void ISerializable.GetObjectData (SerializationInfo info, StreamingContext context) 
        { 
            Helpers.ThrowIfNull (info, "info");
 
            bool appDomainTransition = context.State == StreamingContextStates.CrossAppDomain;

            if (!appDomainTransition)
            { 
                // build all the properies
                foreach (RecognizedPhrase phrase in Alternates) 
                { 
                    try
                    { 
                        // Get the sml Content and toy with this variable to fool the compiler in not doing the calucation at all
                        String sml = phrase.SmlContent;
#if !SPEECHSERVER
                        RecognizedAudio audio = Audio; 
#else
                        object audio = null; 
#endif 
                        if (phrase.Text == null || phrase.Homophones == null || phrase.Semantics == null || (sml == null && sml != null) || (audio == null && audio != null))
                        { 
                            throw new SerializationException ();
                        }
                    }
#pragma warning disable 56502 // Remove the empty catch statements warnings 
                    catch (NotSupportedException)
                    { 
                    } 
#pragma warning restore 56502
                } 
            }

            // Get the set of serializable members for our class and base classes
            Type thisType = this.GetType (); 
            MemberInfo [] mis = FormatterServices.GetSerializableMembers (thisType, context);
 
            // Serialize the base class's fields to the info object 
            foreach (MemberInfo mi in mis)
            { 
                if (!appDomainTransition || (mi.Name != "_recognizer" && mi.Name != "_grammar" && mi.Name != "_ruleList" && mi.Name != "_audio" && mi.Name != "_audio"))
                {
                    info.AddValue (mi.Name, ((FieldInfo) mi).GetValue (this));
                } 
            }
        } 
#if !SPEECHSERVER 
        /// TODOC <_include file='doc\RecognitionResult.uex' path='docs/doc[@for="RecognitionResult.SetTextFeedback"]/*' />
        internal bool SetTextFeedback (string text, bool isSuccessfulAction) 
        {
            if (_sapiRecoResult == null)
            {
                throw new NotSupportedException (SR.Get (SRID.NotSupportedWithThisVersionOfSAPI)); 
            }
            try 
            { 
                _sapiRecoResult.SetTextFeedback (text, isSuccessfulAction);
            } 
            catch (COMException ex)
            {

                // If we failed to set the text feedback, it is likely an inproc Recognition result. 
                if (ex.ErrorCode == (int) SAPIErrorCodes.SPERR_NOT_SUPPORTED_FOR_INPROC_RECOGNIZER)
                { 
                    throw new NotSupportedException (SR.Get (SRID.SapiErrorNotSupportedForInprocRecognizer)); 
                }
 
                // Otherwise, this could also fail for various reasons, e.g. we have changed the recognizer under
                // the hood. In any case, we dont want this function to fail.
                return false;
            } 

            return true; 
        } 
#endif
        #endregion 

        //********************************************************************
        //
        // Public Properties 
        //
        //******************************************************************** 
 
        #region Public Properties
 
#if !SPEECHSERVER

        // Recognized Audio:
        /// TODOC <_include file='doc\RecognitionResult.uex' path='docs/doc[@for="RecognitionResult.Audio"]/*' /> 
        public RecognizedAudio Audio
        { 
            get 
            {
                if (_audio == null && _header.ulRetainedOffset > 0) 
                {
                    SpeechAudioFormatInfo audioFormat;
                    int audioLength = _sapiAudioBlob.Length;
 
                    GCHandle gc = GCHandle.Alloc (_sapiAudioBlob, GCHandleType.Pinned);
                    try 
                    { 
                        IntPtr audioBuffer = gc.AddrOfPinnedObject ();
 
                        SPWAVEFORMATEX audioHeader = (SPWAVEFORMATEX) Marshal.PtrToStructure (audioBuffer, typeof (SPWAVEFORMATEX));

                        IntPtr rawDataBuffer = new IntPtr ((long) audioBuffer + audioHeader.cbUsed);
                        byte [] rawAudioData = new byte [audioLength - audioHeader.cbUsed]; 
                        Marshal.Copy (rawDataBuffer, rawAudioData, 0, audioLength - (int) audioHeader.cbUsed);
 
                        byte [] formatSpecificData = new byte [audioHeader.cbSize]; 
                        if (audioHeader.cbSize > 0)
                        { 
                            IntPtr codecDataBuffer = new IntPtr ((long) audioBuffer + 38); // 38 is sizeof(SPWAVEFORMATEX) without padding.
                            Marshal.Copy (codecDataBuffer, formatSpecificData, 0, audioHeader.cbSize);
                        }
                        audioFormat = new SpeechAudioFormatInfo ((EncodingFormat) audioHeader.wFormatTag, 
                                                        (int) audioHeader.nSamplesPerSec, (short) audioHeader.wBitsPerSample, (short) audioHeader.nChannels, (int) audioHeader.nAvgBytesPerSec,
                                                        (short) audioHeader.nBlockAlign, 
                                                        formatSpecificData); 
                        DateTime startTime;
                        if (_header.times.dwTickCount == 0) 
                        {
                            startTime = _startTime - AudioDuration;
                        }
                        else 
                        {
                            startTime = DateTime.FromFileTime ((long) ((ulong) _header.times.ftStreamTime.dwHighDateTime << 32) + _header.times.ftStreamTime.dwLowDateTime); 
                        } 
                        _audio = new RecognizedAudio (rawAudioData, audioFormat, startTime, AudioPosition, AudioDuration);
                    } 
                    finally
                    {
                        gc.Free ();
                    } 
                }
 
                return _audio; // Will be null if there's no audio. 
            }
        } 

#endif

 
        // Alternates. This returns a list of Alternate recognitions.
        // We use the same class here for alternates as the main RecognitionResult class. This simplifies the API surface. Calling Alternates on a Result that's already an Alternate will throw a NotSupportedException. 
        /// TODOC <_include file='doc\RecognitionResult.uex' path='docs/doc[@for="RecognitionResult.Alternates"]/*' /> 
        public ReadOnlyCollection Alternates
        { 
            get
            {
                return new ReadOnlyCollection (GetAlternates ());
            } 
        }
 
        #endregion 

        //******************************************************************* 
        //
        // Internal Methods
        //
        //******************************************************************** 

        #region Internal Methods 
 
        /// 
        /// This method convert a given pronunciation from SAPI phonetic alphabet to IPA for a given language 
        /// 
        /// 
        /// 
        /// New pronunciation in IPA alphabet 
        internal string ConvertPronunciation (string pronunciation, int langId)
        { 
            if (_alphabetConverter == null) 
            {
                _alphabetConverter = new AlphabetConverter (langId); 
            }
            else
            {
                _alphabetConverter.SetLanguageId (langId); 
            }
 
            char [] ipa = _alphabetConverter.SapiToIpa (pronunciation.ToCharArray ()); 

            if (ipa != null) 
            {
                pronunciation = new string (ipa);
            }
            else 
            {
                Trace.TraceError ("Cannot convert the pronunciation to IPA alphabet."); 
            } 
            return pronunciation;
        } 

        #endregion

        //******************************************************************* 
        //
        // Internal Properties 
        // 
        //*******************************************************************
 
        #region Internal Properties

        internal IRecognizerInternal Recognizer
        { 
            get
            { 
                // If this recognition result comes from a deserialize, then throw 
                if (_recognizer == null)
                { 
                    throw new NotSupportedException (SR.Get (SRID.CantGetPropertyFromSerializedInfo, "Recognizer"));
                }
                return _recognizer;
            } 
        }
 
        internal TimeSpan AudioPosition 
        {
            get 
            {
                if (_audioPosition == null)
                {
                    _audioPosition = new TimeSpan ((long) _header.times.ullStart); 
                }
                return (TimeSpan) _audioPosition; 
            } 
        }
 
        internal TimeSpan AudioDuration
        {
            get
            { 
                if (_audioDuration == null)
                { 
                    _audioDuration = new TimeSpan ((long) _header.times.ullLength); 
                }
                return (TimeSpan) _audioDuration; 
            }
        }

        #endregion 

        //******************************************************************* 
        // 
        // Private Methods
        // 
        //********************************************************************

        #region Private Methods
 
        private void Initialize (IRecognizerInternal recognizer, ISpRecoResult recoResult, byte [] sapiResultBlob, int maxAlternates)
        { 
            // record parameters 
            _recognizer = recognizer;
            _maxAlternates = maxAlternates; 

#if !SPEECHSERVER
            try
            { 
                _sapiRecoResult = recoResult as ISpRecoResult2;
            } 
            catch (COMException) 
            {
                _sapiRecoResult = null; 
            }
#endif
            GCHandle gc = GCHandle.Alloc (sapiResultBlob, GCHandleType.Pinned);
            try 
            {
                IntPtr buffer = gc.AddrOfPinnedObject (); 
 
                int headerSize = Marshal.ReadInt32 (buffer, 4); // Read header size directly from buffer - 4 is the offset of cbHeaderSize.
#if VSCOMPILE // 
                if (headerSize == 0) // Force to SAPI 5.3
#else
                if (headerSize == Marshal.SizeOf (typeof (SPRESULTHEADER_Sapi51))) // SAPI 5.1 size
#endif 
                {
                    SPRESULTHEADER_Sapi51 legacyHeader = (SPRESULTHEADER_Sapi51) Marshal.PtrToStructure (buffer, typeof (SPRESULTHEADER_Sapi51)); 
                    _header = new SPRESULTHEADER (legacyHeader); 
                    _isSapi53Header = false;
                } 
                else
                {
                    _header = (SPRESULTHEADER) Marshal.PtrToStructure (buffer, typeof (SPRESULTHEADER));
                    _isSapi53Header = true; 
                }
 
                // Validate the header fields 
                _header.Validate ();
 
                // initialize the parent to be this result - this is needed for the homophones
                IntPtr phraseBuffer = new IntPtr ((long) buffer + (int) _header.ulPhraseOffset);

                SPSERIALIZEDPHRASE serializedPhrase = RecognizedPhrase.GetPhraseHeader (phraseBuffer, _header.ulPhraseDataSize, _isSapi53Header); 

                // Get the alphabet of the main phrase, which should be the same as the current alphabet selected by us (applications). 
                bool hasIPAPronunciation = (_header.fAlphabet & (uint) SPRESULTALPHABET.SPRA_APP_UPS) != 0; 

                InitializeFromSerializedBuffer (this, serializedPhrase, phraseBuffer, (int) _header.ulPhraseDataSize, _isSapi53Header, hasIPAPronunciation); 

#if !SPEECHSERVER
                if (recoResult != null)
                { 
                    ExtractDictationAlternates (recoResult, maxAlternates);
                    // Since we took ownership of this unmanaged object we can discard information that dont need. 
                    recoResult.Discard (SapiConstants.SPDF_ALL); 
                }
#endif 
            }
            finally
            {
                gc.Free (); 
            }
 
            // save the sapi blobs spliting it in the relevant bits 

            // audio 
            _sapiAudioBlob = new byte [(int) _header.ulRetainedDataSize];
            Array.Copy (sapiResultBlob, (int) _header.ulRetainedOffset, _sapiAudioBlob, 0, (int) _header.ulRetainedDataSize);

            // alternates 
            _sapiAlternatesBlob = new byte [(int) _header.ulPhraseAltDataSize];
            Array.Copy (sapiResultBlob, (int) _header.ulPhraseAltOffset, _sapiAlternatesBlob, 0, (int) _header.ulPhraseAltDataSize); 
        } 

        private Collection ExtractAlternates (int numberOfAlternates, bool isSapi53Header) 
        {
            Collection alternates = new Collection ();

            if (numberOfAlternates > 0) 
            {
                GCHandle gc = GCHandle.Alloc (_sapiAlternatesBlob, GCHandleType.Pinned); 
                try 
                {
                    IntPtr buffer = gc.AddrOfPinnedObject (); 

                    int sizeOfSpSerializedPhraseAlt = Marshal.SizeOf (typeof (SPSERIALIZEDPHRASEALT));
                    int offset = 0;
                    for (int i = 0; i < numberOfAlternates; i++) 
                    {
                        IntPtr altBuffer = new IntPtr ((long) buffer + offset); 
                        SPSERIALIZEDPHRASEALT alt = (SPSERIALIZEDPHRASEALT) Marshal.PtrToStructure (altBuffer, typeof (SPSERIALIZEDPHRASEALT)); 

                        offset += sizeOfSpSerializedPhraseAlt; // advance over SPSERIALIZEDPHRASEALT 
                        if (isSapi53Header)
                        {
                            offset += (int) ((alt.cbAltExtra + 7) & ~7); // advance over extra data with alignment padding
                        } 
                        else
                        { 
                            offset += (int) alt.cbAltExtra; // no alignment padding 
                        }
 
                        // we cannot use a constructor parameter because RecognitionResult also derives from RecognizedPhrase
                        IntPtr phraseBuffer = new IntPtr ((long) buffer + (int) offset);
                        SPSERIALIZEDPHRASE serializedPhrase = RecognizedPhrase.GetPhraseHeader (phraseBuffer, _header.ulPhraseAltDataSize - (uint) offset, _isSapi53Header);
                        int serializedPhraseSize = (int) serializedPhrase.ulSerializedSize; 

                        RecognizedPhrase phrase = new RecognizedPhrase (); 
 
                        // Get the alphabet of the raw phrase alternate, which should be the same as the engine
                        bool hasIPAPronunciation = (_header.fAlphabet & (uint) SPRESULTALPHABET.SPRA_ENGINE_UPS) != 0; 

                        phrase.InitializeFromSerializedBuffer (this, serializedPhrase, phraseBuffer, serializedPhraseSize, isSapi53Header, hasIPAPronunciation);

                        // 
                        if (isSapi53Header)
                        { 
                            offset += ((serializedPhraseSize + 7) & ~7); // advance over phrase with alignment padding 
                        }
                        else 
                        {
                            offset += serializedPhraseSize; // advance over phrase
                        }
 
                        alternates.Add (phrase);
                    } 
                } 
                finally
                { 
                    gc.Free ();
                }
            }
 
            return alternates;
        } 
 
#if !SPEECHSERVER
 
        private void ExtractDictationAlternates (ISpRecoResult recoResult, int maxAlternates)
        {
            // Get the alternates for dication
            // alternates for dictation are not part of the recognition results and must be pulled out 
            // from the recognition result bits.
 
            if (recoResult != null) // recoResult is null if we are in the case of our unit test. 
            {
                if (Grammar is DictationGrammar) 
                {
                    _alternates = new Collection ();
                    IntPtr [] sapiAlternates = new IntPtr [maxAlternates];
                    try 
                    {
                        recoResult.GetAlternates (0, -1, maxAlternates, sapiAlternates, out maxAlternates); 
                    } 
                    catch (COMException)
                    { 
                        // In some cases such as when the dictation grammar has been unloaded, the engine may not be able
                        // to provide the alternates. We set the alternate list to empty.
                        maxAlternates = 0;
                    } 

                    //InnerList.Capacity = (int)numSapiAlternates; 
                    for (uint i = 0; i < maxAlternates; i++) 
                    {
                        ISpPhraseAlt phraseAlt = (ISpPhraseAlt) Marshal.GetObjectForIUnknown (sapiAlternates [i]); 
                        try
                        {
                            IntPtr coMemSerializedPhrase;
                            phraseAlt.GetSerializedPhrase (out coMemSerializedPhrase); 
                            try
                            { 
                                // Build a recognition phrase result 
                                RecognizedPhrase phrase = new RecognizedPhrase ();
 
                                // we cannot use a constructor parameter because RecognitionResult also derives from RecognizedPhrase
                                SPSERIALIZEDPHRASE serializedPhrase = RecognizedPhrase.GetPhraseHeader (coMemSerializedPhrase, uint.MaxValue, _isSapi53Header);

                                // 
                                // If we are getting the alternates from SAPI, the alphabet should have already been converted
                                // to the alphabet we (applications) want. 
                                // 
                                bool hasIPAPronunciation = (_header.fAlphabet & (uint) SPRESULTALPHABET.SPRA_APP_UPS) != 0;
 
                                phrase.InitializeFromSerializedBuffer (this, serializedPhrase, coMemSerializedPhrase, (int) serializedPhrase.ulSerializedSize, _isSapi53Header, hasIPAPronunciation);
                                _alternates.Add (phrase);
                            }
                            finally 
                            {
                                Marshal.FreeCoTaskMem (coMemSerializedPhrase); 
                            } 
                        }
                        finally 
                        {
                            Marshal.Release (sapiAlternates [i]);
                        }
                    } 
                }
            } 
        } 

#endif 

        private Collection GetAlternates ()
        {
            if (_alternates == null) 
            {
                // extract alternates even if ulNumPhraseAlts is 0 so that the list gets initialized to empty 
                _alternates = ExtractAlternates ((int) _header.ulNumPhraseAlts, _isSapi53Header); 

                // If no alternated then create one from the top result 
                if (_alternates.Count == 0 && _maxAlternates > 0)
                {
                    RecognizedPhrase alternate = new RecognizedPhrase ();
                    GCHandle gc = GCHandle.Alloc (_phraseBuffer, GCHandleType.Pinned); 
                    try
                    { 
                        alternate.InitializeFromSerializedBuffer (this, _serializedPhrase, gc.AddrOfPinnedObject (), _phraseBuffer.Length, _isSapi53Header, _hasIPAPronunciation); 
                    }
                    finally 
                    {
                        gc.Free ();
                    }
                    _alternates.Add (alternate); 
                }
            } 
            return _alternates; 
        }
 
        internal string DebuggerDisplayString ()
        {
            StringBuilder sb = new StringBuilder ("Recognized text: '");
            sb.Append (Text); 
            sb.Append ("'");
            if (Semantics.Value != null) 
            { 
                sb.Append (" - Semantic Value  = ");
                sb.Append (Semantics.Value.ToString ()); 
            }

            if (Semantics.Count > 0)
            { 
                sb.Append (" - Semantic children count = ");
                sb.Append (Semantics.Count.ToString (CultureInfo.InvariantCulture)); 
            } 

            if (Alternates.Count > 1) 
            {
                sb.Append (" - Alternate word count = ");
                sb.Append (Alternates.Count.ToString (CultureInfo.InvariantCulture));
            } 

            return sb.ToString (); 
        } 

        #endregion 

        //*******************************************************************
        //
        // Private Fields 
        //
        //******************************************************************** 
 
        #region Private Fields
 
        [field: NonSerialized]
        private IRecognizerInternal _recognizer;

        [field: NonSerialized] 
        private int _maxAlternates;
 
        [field: NonSerialized] 
        private AlphabetConverter _alphabetConverter;
 
        // sapi blobss
        byte [] _sapiAudioBlob;
        byte [] _sapiAlternatesBlob;
 
        private Collection _alternates;
 
        private SPRESULTHEADER _header; 

#if !SPEECHSERVER 
        private RecognizedAudio _audio;
        private DateTime _startTime = DateTime.Now;

        [field: NonSerialized] 
        private ISpRecoResult2 _sapiRecoResult;
#endif 
        // Keep as members because MSS uses these fields: 
        private TimeSpan? _audioPosition;
        private TimeSpan? _audioDuration; 

        #endregion
    }
} 

// File provided for Reference Use Only by Microsoft Corporation (c) 2007.
//------------------------------------------------------------------ 
// 
//     Copyright (c) Microsoft Corporation.  All rights reserved.
// 
//----------------------------------------------------------------- 

 
using System; 
using System.Collections;
using System.Collections.Generic; 
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Globalization;
using System.IO; 
using System.Reflection;
using System.Runtime.InteropServices; 
using COMTYPES = System.Runtime.InteropServices.ComTypes; 
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary; 
using System.Security.Permissions;
using System.Speech.Internal;
using System.Speech.Internal.SapiInterop;
using System.Speech.AudioFormat; 
using System.Text;
using System.Xml; 
using System.Xml.XPath; 

#pragma warning disable 1634, 1691 // Allows suppression of certain PreSharp messages. 

#pragma warning disable 56507 // check for null or empty strings

namespace System.Speech.Recognition 
{
    /// TODOC <_include file='doc\RecognitionResult.uex' path='docs/doc[@for="RecognitionResult"]/*' /> 
    [DebuggerDisplay ("{DebuggerDisplayString ()}")] 
    [Serializable]
    public sealed class RecognitionResult : RecognizedPhrase, ISerializable 
    {
        //*******************************************************************
        //
        // Constructors 
        //
        //******************************************************************* 
 
        #region Constructors
#if SPEECHSERVER 
        internal RecognitionResult (IRecognizerInternal recognizer, byte [] sapiResultBlob)
        {
            Initialize (recognizer, null, sapiResultBlob, 0);
        } 
#else
        internal RecognitionResult (IRecognizerInternal recognizer, ISpRecoResult recoResult, byte [] sapiResultBlob, int maxAlternates) 
        { 
            Initialize (recognizer, recoResult, sapiResultBlob, maxAlternates);
        } 
#endif

        // empty constructor needed for some MSS unit tests
        internal RecognitionResult () 
        {
        } 
 
        /// 
        /// TODOC 
        /// 
        /// 
        /// 
        private RecognitionResult (SerializationInfo info, StreamingContext context) 
        {
            // Get the set of serializable members for our class and base classes 
            Type thisType = this.GetType (); 
            MemberInfo [] mis = FormatterServices.GetSerializableMembers (
               thisType, context); 

            // Do not copy all the field for App Domain transition
            bool appDomainTransition = context.State == StreamingContextStates.CrossAppDomain;
 
            // Deserialize the base class's fields from the info object
            foreach (MemberInfo mi in mis) 
            { 
                // To ease coding, treat the member as a FieldInfo object
                FieldInfo fi = (FieldInfo) mi; 

                // Set the field to the deserialized value
                if (!appDomainTransition || (mi.Name != "_recognizer" && mi.Name != "_grammar" && mi.Name != "_ruleList" && mi.Name != "_audio" && mi.Name != "_audio"))
                { 
                    fi.SetValue (this, info.GetValue (fi.Name, fi.FieldType));
                } 
            } 
        }
 
        #endregion

        //********************************************************************
        // 
        // Public Methods
        // 
        //******************************************************************* 

        #region Public Methods 

#if !SPEECHSERVER

        /// TODOC <_include file='doc\RecognitionResult.uex' path='docs/doc[@for="RecognitionResult.GetAudioForWordRange"]/*' /> 
        public RecognizedAudio GetAudioForWordRange (RecognizedWordUnit firstWord, RecognizedWordUnit lastWord)
        { 
            Helpers.ThrowIfNull (firstWord, "firstWord"); 
            Helpers.ThrowIfNull (lastWord, "lastWord");
 
            return Audio.GetRange (firstWord._audioPosition, lastWord._audioPosition + lastWord._audioDuration - firstWord._audioPosition);
        }
#endif
 
        [SecurityPermissionAttribute (SecurityAction.Demand, SerializationFormatter = true)]
        void ISerializable.GetObjectData (SerializationInfo info, StreamingContext context) 
        { 
            Helpers.ThrowIfNull (info, "info");
 
            bool appDomainTransition = context.State == StreamingContextStates.CrossAppDomain;

            if (!appDomainTransition)
            { 
                // build all the properies
                foreach (RecognizedPhrase phrase in Alternates) 
                { 
                    try
                    { 
                        // Get the sml Content and toy with this variable to fool the compiler in not doing the calucation at all
                        String sml = phrase.SmlContent;
#if !SPEECHSERVER
                        RecognizedAudio audio = Audio; 
#else
                        object audio = null; 
#endif 
                        if (phrase.Text == null || phrase.Homophones == null || phrase.Semantics == null || (sml == null && sml != null) || (audio == null && audio != null))
                        { 
                            throw new SerializationException ();
                        }
                    }
#pragma warning disable 56502 // Remove the empty catch statements warnings 
                    catch (NotSupportedException)
                    { 
                    } 
#pragma warning restore 56502
                } 
            }

            // Get the set of serializable members for our class and base classes
            Type thisType = this.GetType (); 
            MemberInfo [] mis = FormatterServices.GetSerializableMembers (thisType, context);
 
            // Serialize the base class's fields to the info object 
            foreach (MemberInfo mi in mis)
            { 
                if (!appDomainTransition || (mi.Name != "_recognizer" && mi.Name != "_grammar" && mi.Name != "_ruleList" && mi.Name != "_audio" && mi.Name != "_audio"))
                {
                    info.AddValue (mi.Name, ((FieldInfo) mi).GetValue (this));
                } 
            }
        } 
#if !SPEECHSERVER 
        /// TODOC <_include file='doc\RecognitionResult.uex' path='docs/doc[@for="RecognitionResult.SetTextFeedback"]/*' />
        internal bool SetTextFeedback (string text, bool isSuccessfulAction) 
        {
            if (_sapiRecoResult == null)
            {
                throw new NotSupportedException (SR.Get (SRID.NotSupportedWithThisVersionOfSAPI)); 
            }
            try 
            { 
                _sapiRecoResult.SetTextFeedback (text, isSuccessfulAction);
            } 
            catch (COMException ex)
            {

                // If we failed to set the text feedback, it is likely an inproc Recognition result. 
                if (ex.ErrorCode == (int) SAPIErrorCodes.SPERR_NOT_SUPPORTED_FOR_INPROC_RECOGNIZER)
                { 
                    throw new NotSupportedException (SR.Get (SRID.SapiErrorNotSupportedForInprocRecognizer)); 
                }
 
                // Otherwise, this could also fail for various reasons, e.g. we have changed the recognizer under
                // the hood. In any case, we dont want this function to fail.
                return false;
            } 

            return true; 
        } 
#endif
        #endregion 

        //********************************************************************
        //
        // Public Properties 
        //
        //******************************************************************** 
 
        #region Public Properties
 
#if !SPEECHSERVER

        // Recognized Audio:
        /// TODOC <_include file='doc\RecognitionResult.uex' path='docs/doc[@for="RecognitionResult.Audio"]/*' /> 
        public RecognizedAudio Audio
        { 
            get 
            {
                if (_audio == null && _header.ulRetainedOffset > 0) 
                {
                    SpeechAudioFormatInfo audioFormat;
                    int audioLength = _sapiAudioBlob.Length;
 
                    GCHandle gc = GCHandle.Alloc (_sapiAudioBlob, GCHandleType.Pinned);
                    try 
                    { 
                        IntPtr audioBuffer = gc.AddrOfPinnedObject ();
 
                        SPWAVEFORMATEX audioHeader = (SPWAVEFORMATEX) Marshal.PtrToStructure (audioBuffer, typeof (SPWAVEFORMATEX));

                        IntPtr rawDataBuffer = new IntPtr ((long) audioBuffer + audioHeader.cbUsed);
                        byte [] rawAudioData = new byte [audioLength - audioHeader.cbUsed]; 
                        Marshal.Copy (rawDataBuffer, rawAudioData, 0, audioLength - (int) audioHeader.cbUsed);
 
                        byte [] formatSpecificData = new byte [audioHeader.cbSize]; 
                        if (audioHeader.cbSize > 0)
                        { 
                            IntPtr codecDataBuffer = new IntPtr ((long) audioBuffer + 38); // 38 is sizeof(SPWAVEFORMATEX) without padding.
                            Marshal.Copy (codecDataBuffer, formatSpecificData, 0, audioHeader.cbSize);
                        }
                        audioFormat = new SpeechAudioFormatInfo ((EncodingFormat) audioHeader.wFormatTag, 
                                                        (int) audioHeader.nSamplesPerSec, (short) audioHeader.wBitsPerSample, (short) audioHeader.nChannels, (int) audioHeader.nAvgBytesPerSec,
                                                        (short) audioHeader.nBlockAlign, 
                                                        formatSpecificData); 
                        DateTime startTime;
                        if (_header.times.dwTickCount == 0) 
                        {
                            startTime = _startTime - AudioDuration;
                        }
                        else 
                        {
                            startTime = DateTime.FromFileTime ((long) ((ulong) _header.times.ftStreamTime.dwHighDateTime << 32) + _header.times.ftStreamTime.dwLowDateTime); 
                        } 
                        _audio = new RecognizedAudio (rawAudioData, audioFormat, startTime, AudioPosition, AudioDuration);
                    } 
                    finally
                    {
                        gc.Free ();
                    } 
                }
 
                return _audio; // Will be null if there's no audio. 
            }
        } 

#endif

 
        // Alternates. This returns a list of Alternate recognitions.
        // We use the same class here for alternates as the main RecognitionResult class. This simplifies the API surface. Calling Alternates on a Result that's already an Alternate will throw a NotSupportedException. 
        /// TODOC <_include file='doc\RecognitionResult.uex' path='docs/doc[@for="RecognitionResult.Alternates"]/*' /> 
        public ReadOnlyCollection Alternates
        { 
            get
            {
                return new ReadOnlyCollection (GetAlternates ());
            } 
        }
 
        #endregion 

        //******************************************************************* 
        //
        // Internal Methods
        //
        //******************************************************************** 

        #region Internal Methods 
 
        /// 
        /// This method convert a given pronunciation from SAPI phonetic alphabet to IPA for a given language 
        /// 
        /// 
        /// 
        /// New pronunciation in IPA alphabet 
        internal string ConvertPronunciation (string pronunciation, int langId)
        { 
            if (_alphabetConverter == null) 
            {
                _alphabetConverter = new AlphabetConverter (langId); 
            }
            else
            {
                _alphabetConverter.SetLanguageId (langId); 
            }
 
            char [] ipa = _alphabetConverter.SapiToIpa (pronunciation.ToCharArray ()); 

            if (ipa != null) 
            {
                pronunciation = new string (ipa);
            }
            else 
            {
                Trace.TraceError ("Cannot convert the pronunciation to IPA alphabet."); 
            } 
            return pronunciation;
        } 

        #endregion

        //******************************************************************* 
        //
        // Internal Properties 
        // 
        //*******************************************************************
 
        #region Internal Properties

        internal IRecognizerInternal Recognizer
        { 
            get
            { 
                // If this recognition result comes from a deserialize, then throw 
                if (_recognizer == null)
                { 
                    throw new NotSupportedException (SR.Get (SRID.CantGetPropertyFromSerializedInfo, "Recognizer"));
                }
                return _recognizer;
            } 
        }
 
        internal TimeSpan AudioPosition 
        {
            get 
            {
                if (_audioPosition == null)
                {
                    _audioPosition = new TimeSpan ((long) _header.times.ullStart); 
                }
                return (TimeSpan) _audioPosition; 
            } 
        }
 
        internal TimeSpan AudioDuration
        {
            get
            { 
                if (_audioDuration == null)
                { 
                    _audioDuration = new TimeSpan ((long) _header.times.ullLength); 
                }
                return (TimeSpan) _audioDuration; 
            }
        }

        #endregion 

        //******************************************************************* 
        // 
        // Private Methods
        // 
        //********************************************************************

        #region Private Methods
 
        private void Initialize (IRecognizerInternal recognizer, ISpRecoResult recoResult, byte [] sapiResultBlob, int maxAlternates)
        { 
            // record parameters 
            _recognizer = recognizer;
            _maxAlternates = maxAlternates; 

#if !SPEECHSERVER
            try
            { 
                _sapiRecoResult = recoResult as ISpRecoResult2;
            } 
            catch (COMException) 
            {
                _sapiRecoResult = null; 
            }
#endif
            GCHandle gc = GCHandle.Alloc (sapiResultBlob, GCHandleType.Pinned);
            try 
            {
                IntPtr buffer = gc.AddrOfPinnedObject (); 
 
                int headerSize = Marshal.ReadInt32 (buffer, 4); // Read header size directly from buffer - 4 is the offset of cbHeaderSize.
#if VSCOMPILE // 
                if (headerSize == 0) // Force to SAPI 5.3
#else
                if (headerSize == Marshal.SizeOf (typeof (SPRESULTHEADER_Sapi51))) // SAPI 5.1 size
#endif 
                {
                    SPRESULTHEADER_Sapi51 legacyHeader = (SPRESULTHEADER_Sapi51) Marshal.PtrToStructure (buffer, typeof (SPRESULTHEADER_Sapi51)); 
                    _header = new SPRESULTHEADER (legacyHeader); 
                    _isSapi53Header = false;
                } 
                else
                {
                    _header = (SPRESULTHEADER) Marshal.PtrToStructure (buffer, typeof (SPRESULTHEADER));
                    _isSapi53Header = true; 
                }
 
                // Validate the header fields 
                _header.Validate ();
 
                // initialize the parent to be this result - this is needed for the homophones
                IntPtr phraseBuffer = new IntPtr ((long) buffer + (int) _header.ulPhraseOffset);

                SPSERIALIZEDPHRASE serializedPhrase = RecognizedPhrase.GetPhraseHeader (phraseBuffer, _header.ulPhraseDataSize, _isSapi53Header); 

                // Get the alphabet of the main phrase, which should be the same as the current alphabet selected by us (applications). 
                bool hasIPAPronunciation = (_header.fAlphabet & (uint) SPRESULTALPHABET.SPRA_APP_UPS) != 0; 

                InitializeFromSerializedBuffer (this, serializedPhrase, phraseBuffer, (int) _header.ulPhraseDataSize, _isSapi53Header, hasIPAPronunciation); 

#if !SPEECHSERVER
                if (recoResult != null)
                { 
                    ExtractDictationAlternates (recoResult, maxAlternates);
                    // Since we took ownership of this unmanaged object we can discard information that dont need. 
                    recoResult.Discard (SapiConstants.SPDF_ALL); 
                }
#endif 
            }
            finally
            {
                gc.Free (); 
            }
 
            // save the sapi blobs spliting it in the relevant bits 

            // audio 
            _sapiAudioBlob = new byte [(int) _header.ulRetainedDataSize];
            Array.Copy (sapiResultBlob, (int) _header.ulRetainedOffset, _sapiAudioBlob, 0, (int) _header.ulRetainedDataSize);

            // alternates 
            _sapiAlternatesBlob = new byte [(int) _header.ulPhraseAltDataSize];
            Array.Copy (sapiResultBlob, (int) _header.ulPhraseAltOffset, _sapiAlternatesBlob, 0, (int) _header.ulPhraseAltDataSize); 
        } 

        private Collection ExtractAlternates (int numberOfAlternates, bool isSapi53Header) 
        {
            Collection alternates = new Collection ();

            if (numberOfAlternates > 0) 
            {
                GCHandle gc = GCHandle.Alloc (_sapiAlternatesBlob, GCHandleType.Pinned); 
                try 
                {
                    IntPtr buffer = gc.AddrOfPinnedObject (); 

                    int sizeOfSpSerializedPhraseAlt = Marshal.SizeOf (typeof (SPSERIALIZEDPHRASEALT));
                    int offset = 0;
                    for (int i = 0; i < numberOfAlternates; i++) 
                    {
                        IntPtr altBuffer = new IntPtr ((long) buffer + offset); 
                        SPSERIALIZEDPHRASEALT alt = (SPSERIALIZEDPHRASEALT) Marshal.PtrToStructure (altBuffer, typeof (SPSERIALIZEDPHRASEALT)); 

                        offset += sizeOfSpSerializedPhraseAlt; // advance over SPSERIALIZEDPHRASEALT 
                        if (isSapi53Header)
                        {
                            offset += (int) ((alt.cbAltExtra + 7) & ~7); // advance over extra data with alignment padding
                        } 
                        else
                        { 
                            offset += (int) alt.cbAltExtra; // no alignment padding 
                        }
 
                        // we cannot use a constructor parameter because RecognitionResult also derives from RecognizedPhrase
                        IntPtr phraseBuffer = new IntPtr ((long) buffer + (int) offset);
                        SPSERIALIZEDPHRASE serializedPhrase = RecognizedPhrase.GetPhraseHeader (phraseBuffer, _header.ulPhraseAltDataSize - (uint) offset, _isSapi53Header);
                        int serializedPhraseSize = (int) serializedPhrase.ulSerializedSize; 

                        RecognizedPhrase phrase = new RecognizedPhrase (); 
 
                        // Get the alphabet of the raw phrase alternate, which should be the same as the engine
                        bool hasIPAPronunciation = (_header.fAlphabet & (uint) SPRESULTALPHABET.SPRA_ENGINE_UPS) != 0; 

                        phrase.InitializeFromSerializedBuffer (this, serializedPhrase, phraseBuffer, serializedPhraseSize, isSapi53Header, hasIPAPronunciation);

                        // 
                        if (isSapi53Header)
                        { 
                            offset += ((serializedPhraseSize + 7) & ~7); // advance over phrase with alignment padding 
                        }
                        else 
                        {
                            offset += serializedPhraseSize; // advance over phrase
                        }
 
                        alternates.Add (phrase);
                    } 
                } 
                finally
                { 
                    gc.Free ();
                }
            }
 
            return alternates;
        } 
 
#if !SPEECHSERVER
 
        private void ExtractDictationAlternates (ISpRecoResult recoResult, int maxAlternates)
        {
            // Get the alternates for dication
            // alternates for dictation are not part of the recognition results and must be pulled out 
            // from the recognition result bits.
 
            if (recoResult != null) // recoResult is null if we are in the case of our unit test. 
            {
                if (Grammar is DictationGrammar) 
                {
                    _alternates = new Collection ();
                    IntPtr [] sapiAlternates = new IntPtr [maxAlternates];
                    try 
                    {
                        recoResult.GetAlternates (0, -1, maxAlternates, sapiAlternates, out maxAlternates); 
                    } 
                    catch (COMException)
                    { 
                        // In some cases such as when the dictation grammar has been unloaded, the engine may not be able
                        // to provide the alternates. We set the alternate list to empty.
                        maxAlternates = 0;
                    } 

                    //InnerList.Capacity = (int)numSapiAlternates; 
                    for (uint i = 0; i < maxAlternates; i++) 
                    {
                        ISpPhraseAlt phraseAlt = (ISpPhraseAlt) Marshal.GetObjectForIUnknown (sapiAlternates [i]); 
                        try
                        {
                            IntPtr coMemSerializedPhrase;
                            phraseAlt.GetSerializedPhrase (out coMemSerializedPhrase); 
                            try
                            { 
                                // Build a recognition phrase result 
                                RecognizedPhrase phrase = new RecognizedPhrase ();
 
                                // we cannot use a constructor parameter because RecognitionResult also derives from RecognizedPhrase
                                SPSERIALIZEDPHRASE serializedPhrase = RecognizedPhrase.GetPhraseHeader (coMemSerializedPhrase, uint.MaxValue, _isSapi53Header);

                                // 
                                // If we are getting the alternates from SAPI, the alphabet should have already been converted
                                // to the alphabet we (applications) want. 
                                // 
                                bool hasIPAPronunciation = (_header.fAlphabet & (uint) SPRESULTALPHABET.SPRA_APP_UPS) != 0;
 
                                phrase.InitializeFromSerializedBuffer (this, serializedPhrase, coMemSerializedPhrase, (int) serializedPhrase.ulSerializedSize, _isSapi53Header, hasIPAPronunciation);
                                _alternates.Add (phrase);
                            }
                            finally 
                            {
                                Marshal.FreeCoTaskMem (coMemSerializedPhrase); 
                            } 
                        }
                        finally 
                        {
                            Marshal.Release (sapiAlternates [i]);
                        }
                    } 
                }
            } 
        } 

#endif 

        private Collection GetAlternates ()
        {
            if (_alternates == null) 
            {
                // extract alternates even if ulNumPhraseAlts is 0 so that the list gets initialized to empty 
                _alternates = ExtractAlternates ((int) _header.ulNumPhraseAlts, _isSapi53Header); 

                // If no alternated then create one from the top result 
                if (_alternates.Count == 0 && _maxAlternates > 0)
                {
                    RecognizedPhrase alternate = new RecognizedPhrase ();
                    GCHandle gc = GCHandle.Alloc (_phraseBuffer, GCHandleType.Pinned); 
                    try
                    { 
                        alternate.InitializeFromSerializedBuffer (this, _serializedPhrase, gc.AddrOfPinnedObject (), _phraseBuffer.Length, _isSapi53Header, _hasIPAPronunciation); 
                    }
                    finally 
                    {
                        gc.Free ();
                    }
                    _alternates.Add (alternate); 
                }
            } 
            return _alternates; 
        }
 
        internal string DebuggerDisplayString ()
        {
            StringBuilder sb = new StringBuilder ("Recognized text: '");
            sb.Append (Text); 
            sb.Append ("'");
            if (Semantics.Value != null) 
            { 
                sb.Append (" - Semantic Value  = ");
                sb.Append (Semantics.Value.ToString ()); 
            }

            if (Semantics.Count > 0)
            { 
                sb.Append (" - Semantic children count = ");
                sb.Append (Semantics.Count.ToString (CultureInfo.InvariantCulture)); 
            } 

            if (Alternates.Count > 1) 
            {
                sb.Append (" - Alternate word count = ");
                sb.Append (Alternates.Count.ToString (CultureInfo.InvariantCulture));
            } 

            return sb.ToString (); 
        } 

        #endregion 

        //*******************************************************************
        //
        // Private Fields 
        //
        //******************************************************************** 
 
        #region Private Fields
 
        [field: NonSerialized]
        private IRecognizerInternal _recognizer;

        [field: NonSerialized] 
        private int _maxAlternates;
 
        [field: NonSerialized] 
        private AlphabetConverter _alphabetConverter;
 
        // sapi blobss
        byte [] _sapiAudioBlob;
        byte [] _sapiAlternatesBlob;
 
        private Collection _alternates;
 
        private SPRESULTHEADER _header; 

#if !SPEECHSERVER 
        private RecognizedAudio _audio;
        private DateTime _startTime = DateTime.Now;

        [field: NonSerialized] 
        private ISpRecoResult2 _sapiRecoResult;
#endif 
        // Keep as members because MSS uses these fields: 
        private TimeSpan? _audioPosition;
        private TimeSpan? _audioDuration; 

        #endregion
    }
} 

// File provided for Reference Use Only by Microsoft Corporation (c) 2007.

                        

Link Menu

Network programming in C#, Network Programming in VB.NET, Network Programming in .NET
This book is available now!
Buy at Amazon US or
Buy at Amazon UK