Media Foundation, How can get/set MFSampleExtension_BottomFieldFirst, MFSampleExtension_Interlaced, MFSampleExtension_SingleField?

0

i have writing c# code to play a video whith the help of wmf.

It works, but some videofiles are interlaced video.

I do not know how can get/set MFSampleExtension_BottomFieldFirst, MFSampleExtension_Interlaced or MFSampleExtension_SingleField?

I get always MF_E_ATTRIBUTENOTFOUND in trail V1, V2, V3 and V4.

What is the best whay to get,set this values?

    public bool Play( string aVideoFile)
    {
       m_Videofile = aVideoFile;

       // Create source object
       IMFSourceResolver sourceResolver;
       MFExtern.MFCreateSourceResolver( out sourceResolver );
       MFObjectType objType = MFObjectType.Invalid;

       if (sourceResolver == null)
       {  
          throw new Exception( "sourceResolver is null");
       }

       // Video Infos, can be null 
       MediaFoundation.Misc.IPropertyStore ps = null;
       MFExtern.CreatePropertyStore( out ps );

       object objSource = null;
       HResult objectFromUrl = sourceResolver.CreateObjectFromURL( m_Videofile.Filename, MFResolution.ContentDoesNotHaveToMatchExtensionOrMimeType, ps, out objType, out objSource );

       // Errorhandling ?
       if (objectFromUrl != HResult.S_OK)
       { 
          throw new Exception( "sourceResolver.CreateObjectFromURL(....) was not okay " + objectFromUrl.ToString() );
       }

       // Create topology
       m_mediaSource = objSource as IMFMediaSource;
       IMFTopology topology;
       IMFPresentationDescriptor presentationDescriptor;
       MFExtern.MFCreateTopology( out topology );

       if (m_mediaSource == null)
       {
          throw new Exception( "m_mediaSource is null" );
       }

       m_mediaSource.CreatePresentationDescriptor( out presentationDescriptor );


       // Stream einlesen und verarbeiten
       int streamDescriptorCount = 0;
       presentationDescriptor.GetStreamDescriptorCount( out streamDescriptorCount );
       for( int loop = 0; loop < streamDescriptorCount; loop++ )
       {
          bool selected = false;
          IMFStreamDescriptor streamDescriptor;
          presentationDescriptor.GetStreamDescriptorByIndex( loop, out selected, out streamDescriptor );
          if( selected )
          {
             // Create source node
             IMFTopologyNode sourceNode = null;
             MFExtern.MFCreateTopologyNode( MFTopologyType.SourcestreamNode, out sourceNode );
             sourceNode.SetUnknown( MFAttributesClsid.MF_TOPONODE_SOURCE, m_mediaSource );
             sourceNode.SetUnknown( MFAttributesClsid.MF_TOPONODE_PRESENTATION_DESCRIPTOR, presentationDescriptor );
             sourceNode.SetUnknown( MFAttributesClsid.MF_TOPONODE_STREAM_DESCRIPTOR, streamDescriptor );

             // Create output node
             IMFTopologyNode outputNode = null;
             IMFMediaTypeHandler mediaTypeHandler = null;
             streamDescriptor.GetMediaTypeHandler( out mediaTypeHandler );
             Guid majorType = Guid.Empty;
             mediaTypeHandler.GetMajorType( out majorType );
             MFExtern.MFCreateTopologyNode( MFTopologyType.OutputNode, out outputNode );

             if ( MFMediaType.Audio == majorType )
             {
                IMFActivate audioRenderer;
                MFExtern.MFCreateAudioRendererActivate( out audioRenderer );
                outputNode.SetObject( audioRenderer );
             }
             else if( MFMediaType.Video == majorType )
             {
                log.Debug( " MFMediaType.Video == majorType " + outputNode.ToString() );

                IMFActivate videoRenderer;
                MFExtern.MFCreateVideoRendererActivate( m_targetControl.Handle, out videoRenderer );
                outputNode.SetObject( videoRenderer );

                IMFMediaType pCurrentMediaType = null;
                mediaTypeHandler.GetCurrentMediaType( out pCurrentMediaType );

                // VideoInterlaceMode abfragen: 
                //MFVideoInterlace_Unknown = 0,                       // Interlaced
                //MFVideoInterlace_Progressive = 2,                   // not Interlaced -> h264 der Vitec
                //MFVideoInterlace_FieldInterleavedUpperFirst = 3,    // Interlaced
                //MFVideoInterlace_FieldInterleavedLowerFirst = 4,    // Interlaced
                //MFVideoInterlace_FieldSingleUpper = 5,              // Interlaced
                //MFVideoInterlace_FieldSingleLower = 6,              // Interlaced
                //MFVideoInterlace_MixedInterlaceOrProgressive = 7,   // Interlaced or/and not Interlaced -> h264 der Sensoray
                //MFVideoInterlace_Last,                              // ???
                //MFVideoInterlace_ForceDWORD = 0x7FFFFFFF            // ???

                int videoInterlaceMode;
                pCurrentMediaType.GetUINT32(MFAttributesClsid.MF_MT_INTERLACE_MODE, out videoInterlaceMode);
                if (videoInterlaceMode != 2)
                {
                   // Video Interlacing:
                   // https://msdn.microsoft.com/en-us/library/windows/desktop/aa367729%28v=vs.85%29.aspx
                   // media team:
                   // https://github.com/Microsoft/FFmpegInterop/issues/72
                   //Here's and answer from the media team:
                   //For deinterlacing the media type should set MF_MT_INTERLACE_MODE attribute to interlace format.
                   //   Most components don’t know whether the content will be interlaced or not and set this attribute = MFVideoInterlace_MixedInterlaceOrProgressive
                   //When a frame is decoded, each sample needs to have attribute which specifies whether that frame is interlaced or not. Each sample should have following attributes set so that downstream component can do proper deinterlacing:
                   //MFSampleExtension_Interlaced, MFSampleExtension_BottomFieldFirst, MFSampleExtension_RepeatFirstField
                   //For example, progressive frame : 
                   //pOutSample->SetUINT32( MFSampleExtension_Interlaced, FALSE );
                   //Top_field_first interlace frame:
                   //pOutSample->SetUINT32( MFSampleExtension_Interlaced, TRUE );
                   //pOutSample->SetUINT32( MFSampleExtension_BottomFieldFirst, FALSE );
                   //pOutSample->SetUINT32( MFSampleExtension_SingleField, FALSE );
                   //You might need to set the extra properties in the ExtendedProperty set in the Sample

                   HResult hResult1 = HResult.E_FAIL;

                   // V1 with pCurrentMediaType,  hResult1 = MF_E_ATTRIBUTENOTFOUND
                   int h = -1;
                   hResult1 = HResult.E_FAIL;
                   hResult1 = pCurrentMediaType.GetUINT32( MFAttributesClsid.MFSampleExtension_BottomFieldFirst, out h );
                   //  hResult1 = MF_E_ATTRIBUTENOTFOUND


                   // V2 with MFExtern.MFCreateSample
                   hResult1 = HResult.E_FAIL;
                   IMFSample mfSamlle;
                   hResult1 = MFExtern.MFCreateSample( out mfSamlle );
                   int result = -1;
                   hResult1 = HResult.E_FAIL;
                   hResult1 = mfSamlle.GetUINT32( MFAttributesClsid.MFSampleExtension_BottomFieldFirst, out result );
                   //  hResult1 = MF_E_ATTRIBUTENOTFOUND
                   Marshal.ReleaseComObject( mfSamlle );

                   // V3 with MFExtern.MFCreateAttributes
                   hResult1 = HResult.E_FAIL;
                   IMFAttributes mfAttribute;
                   int o = -1;
                   hResult1 = MFExtern.MFCreateAttributes( out mfAttribute, 1 );
                   hResult1 = mfAttribute.GetUINT32( MFAttributesClsid.MFSampleExtension_BottomFieldFirst, out o );
                   //  hResult1 = MF_E_ATTRIBUTENOTFOUND

                   // V4
                   hResult1 = HResult.E_FAIL;
                   IMFSourceReader mfSourceReader;
                   CreateSourceReader( out mfSourceReader, out hResult1 );
                   hResult1 = HResult.E_FAIL;
                   IMFSample tmpSample;
                   int streamIndex;
                   MF_SOURCE_READER_FLAG dFlags;
                   long timeStamp = -1;
                   hResult1 = mfSourceReader.ReadSample( (int)MF_SOURCE_READER.FirstVideoStream, MF_SOURCE_READER_CONTROL_FLAG.None, out streamIndex, out dFlags, out timeStamp, out tmpSample );

                   hResult1 = HResult.E_FAIL;
                   int outvalue = -1;
                   hResult1 = tmpSample.GetUINT32( MFAttributesClsid.MFSampleExtension_BottomFieldFirst, out outvalue );
                   //  hResult1 = MF_E_ATTRIBUTENOTFOUND



                   // fall back
                   hResult1 = HResult.E_FAIL;
                   videoInterlaceMode = 7;
                   hResult1 = pCurrentMediaType.SetUINT32( MFAttributesClsid.MF_MT_INTERLACE_MODE, videoInterlaceMode );
                }


                // Subtype ist das Videoformat
                Guid subType = Guid.Empty;
                pCurrentMediaType.GetGUID( MFAttributesClsid.MF_MT_SUBTYPE, out subType );
                try
                {
                   if ( MediaFoundation.Misc.FourCC.IsA4ccSubtype(subType))
                   {
                      //MF.MFMediaType.MP4S
                      FourCC fourCC = new FourCC(subType);
                      m_Videoformat = fourCC.ToString();
                   }
                   else if ( subType.ToString() == "3f40f4f0-5622-4ff8-b6d8-a17a584bee5e" )
                      m_Videoformat = "AVC";
                }
                catch (Exception)
                {
                }
             }

             // Append nodes to topology
             topology.AddNode( sourceNode );
             topology.AddNode( outputNode );
             HResult connectOutput = sourceNode.ConnectOutput( 0, outputNode, 0 );

             // Fehlerbehandlung ?
             if (connectOutput != HResult.S_OK)
             {
                throw new Exception( "connectOutput is not okay" + connectOutput.ToString() );
             }
          }
       }
media
foundation
asked on Stack Overflow Feb 15, 2018 by Mfe

0 Answers

Nobody has answered this question yet.


User contributions licensed under CC BY-SA 3.0