Agora Unity - PushAudioFrame - PullRequest
       70

Agora Unity - PushAudioFrame

0 голосов
/ 08 мая 2020

Я использую Agor SDK 2.9.2.2 на платформе Unity для интеграции веб-трансляции в прямом эфире.

Я попытался использовать функцию pushAudioFrame, чтобы:

  • Захватить a float [] из audioListener
  • Отправить его как byte [] в AudioFrame,

Каким бы способом я ни пытался, SDK получает нулевые данные, или единство просто вылетает, я уверен В моем коде много проблем, но я не смог найти ни одного успешного примера в Интернете, всего ОДИН без ответа сообщение от китайского парня, и мне не разрешено из Европы участвовать в потоке ...

На самом деле следующий код used:

// Перед присоединением к каналу

mRtcEngine.SetAudioProfile(AUDIO_PROFILE_TYPE.AUDIO_PROFILE_MUSIC_STANDARD,AUDIO_SCENARIO_TYPE.AUDIO_SCENARIO_EDUCATION); mRtcEngine.SetExternalAudioSource(true, 16000, 1);

// При чтении аудиофильтра

`
virtual public void OnAudioFilterRead(float[] data, int channels)
        {
            if (GrabSceneAudio)
            {   
                Int16[] intData = new Int16[data.Length];
                //converting in 2 steps : float[] to Int16[], //then Int16[] to Byte[]

                Byte[] bytesData = new Byte[data.Length * 2];
                //bytesData array is twice the size of
                //dataSource array because a float converted in Int16 is 2 bytes.

                var rescaleFactor = 32767; //to convert float to Int16

                for (int i = 0; i < data.Length; i++)
                {
                    intData[i] = (short)(data[i] * rescaleFactor);
                    Byte[] byteArr = new Byte[2];
                    byteArr = BitConverter.GetBytes(intData[i]);
                    byteArr.CopyTo(bytesData, i * 2);
                }
                _externalAudioFrameBuffer = bytesData;

                PushExternalAudioFrame();
            }
        }
`

// Наконец pu sh Audioframe в SDK

`
virtual protected void PushExternalAudioFrame()
        {        
            _externalAudioFrame = new AudioFrame();

            _externalAudioFrame.type = AUDIO_FRAME_TYPE.FRAME_TYPE_PCM16;
            _externalAudioFrame.samples = 256;
            _externalAudioFrame.bytesPerSample = 2;
            _externalAudioFrame.samplesPerSec = 16000;
            _externalAudioFrame.channels = 1;
            _externalAudioFrame.buffer = _externalAudioFrameBuffer;

            if (mRtcEngine != null)
            {
                int a = mRtcEngine.PushAudioFrame(_externalAudioFrame);         
            }
        }
`

1 Ответ

0 голосов
/ 30 мая 2020

Привет, я добавил некоторые изменения в опубликованный вами код и протестировал его в демонстрационном приложении. Вы можете попробовать.

https://gist.github.com/icywind/8d1d6ad9831402bea8e5852933eb4eb9

или здесь:

using UnityEngine;
using UnityEngine.UI;

using agora_gaming_rtc;
using agora_utilities;
using System;
using System.Collections;


// this is an example of using Agora Unity SDK
// It demonstrates:
// How to enable video
// How to join/leave channel
// 
public class TestHelloUnityVideo
{

    // instance of agora engine
    private IRtcEngine mRtcEngine;
    AudioSource audioSource;
    MonoBehaviour monoProxy;

    public int AudioChannels = 1;

    // load agora engine
    public void loadEngine(string appId)
    {
        // start sdk
        Debug.Log("initializeEngine");

        if (mRtcEngine != null)
        {
            Debug.Log("Engine exists. Please unload it first!");
            return;
        }

        // init engine
        mRtcEngine = IRtcEngine.GetEngine(appId);

        // enable log
        mRtcEngine.SetLogFilter(LOG_FILTER.DEBUG | LOG_FILTER.INFO | LOG_FILTER.WARNING | LOG_FILTER.ERROR | LOG_FILTER.CRITICAL);
    }

    public void join(string channel)
    {
        Debug.Log("calling join (channel = " + channel + ")");

        if (mRtcEngine == null)
            return;

        // set callbacks (optional)
        mRtcEngine.OnJoinChannelSuccess = onJoinChannelSuccess;
        mRtcEngine.OnUserJoined = onUserJoined;
        mRtcEngine.OnUserOffline = onUserOffline;

        // enable video
        mRtcEngine.EnableVideo();
        // allow camera output callback
        mRtcEngine.EnableVideoObserver();

        /// BEFORE JOINING CHANNEL

        // mRtcEngine.SetAudioProfile(AUDIO_PROFILE_TYPE.AUDIO_PROFILE_MUSIC_HIGH_QUALITY, AUDIO_SCENARIO_TYPE.AUDIO_SCENARIO_GAME_STREAMING);

        mRtcEngine.SetAudioProfile(AUDIO_PROFILE_TYPE.AUDIO_PROFILE_MUSIC_STANDARD, AUDIO_SCENARIO_TYPE.AUDIO_SCENARIO_EDUCATION);
        mRtcEngine.SetExternalAudioSource(true, SAMPLE_RATE, AudioChannels);

        // join channel
        mRtcEngine.JoinChannel(channel, null, 0);

        // Optional: if a data stream is required, here is a good place to create it
        int streamID = mRtcEngine.CreateDataStream(true, true);
        Debug.Log("initializeEngine done, data stream id = " + streamID);
    }


    public bool GrabSceneAudio = true;


    //// OnAudioListenerRender
    virtual public void OnAudioFilterRead(float[] data, int channels)
    {
        if (GrabSceneAudio)
        {
            short[] intData = new short[data.Length];
            //converting in 2 steps : float[] to Int16[], //then Int16[] to Byte[]

            byte[] bytesData = new byte[data.Length * 2];
            //bytesData array is twice the size of
            //dataSource array because a float converted in Int16 is 2 bytes.

            var rescaleFactor = 32767; //to convert float to Int16

            for (int i = 0; i < data.Length; i++)
            {
                float sample = data[i];
                if (sample > 1) sample = 1;
                else if (sample < -1) sample = -1;

                intData[i] = (short)(sample * rescaleFactor);
                byte[] byteArr = new byte[2];
                byteArr = BitConverter.GetBytes(intData[i]);
                byteArr.CopyTo(bytesData, i * 2);
            }

            PushExternalAudioFrame(bytesData, channels);
        }
    }


    const int SAMPLE_RATE = 44100;
    /// FINALLY PUSH FRAME INTO STREAM
    // _externalAudioFrameBuffer.Length = samples * channels * bytesPerSample
    IEnumerator CoAudioRender()
    {
        int channels = audioSource.clip.channels;
        float[] samples = new float[audioSource.clip.samples * channels];
        audioSource.clip.GetData(samples, 0);
        GrabSceneAudio = true;
        int SourceDataIndex = channels * audioSource.timeSamples;
        //int SourceDataIndex = channels * (audioSource.clip.samples - 120000);
        Debug.LogWarning("CoAudioRender started. Found audio samples = " +
            samples.Length + " channels = " + audioSource.clip.channels);

        while (audioSource != null && audioSource.isActiveAndEnabled && audioSource.isPlaying)
        {
            int readSamples = (int)(SAMPLE_RATE * Time.deltaTime); // SamplesRate * elapsedTime => number of samples to read
            int delta = channels * readSamples;
            float[] copySample = new float[delta];
            if (readSamples + SourceDataIndex / channels <= audioSource.clip.samples)
            {
                Array.Copy(samples, SourceDataIndex, copySample, 0, delta);
            }
            else // wrap
            {
                int cur2EndCnt = samples.Length - SourceDataIndex;
                int wrap2HeadCnt = delta - cur2EndCnt;
                Array.Copy(samples, SourceDataIndex, copySample, 0, cur2EndCnt);
                Array.Copy(samples, 0, copySample, cur2EndCnt, wrap2HeadCnt);
            }
            SourceDataIndex = (SourceDataIndex + delta) % samples.Length;

            OnAudioFilterRead(copySample, channels);
            yield return new WaitForEndOfFrame();
        }
        GrabSceneAudio = false;
        Debug.LogWarning("Done Audio Render coroutine...");
    }

    virtual protected void PushExternalAudioFrame(byte[] _externalAudioFrameBuffer, int channels)
    {
        AudioFrame _externalAudioFrame = new AudioFrame();

        int bytesPerSample = 2;

        _externalAudioFrame.type = AUDIO_FRAME_TYPE.FRAME_TYPE_PCM16;
        _externalAudioFrame.samples = _externalAudioFrameBuffer.Length / (channels * bytesPerSample);
        _externalAudioFrame.bytesPerSample = bytesPerSample;
        _externalAudioFrame.samplesPerSec = SAMPLE_RATE;
        _externalAudioFrame.channels = channels;
        _externalAudioFrame.buffer = _externalAudioFrameBuffer;

        if (mRtcEngine != null)
        {
            int a = mRtcEngine.PushAudioFrame(_externalAudioFrame);
        }
    }


    public string getSdkVersion()
    {
        string ver = IRtcEngine.GetSdkVersion();
        if (ver == "2.9.1.45")
        {
            ver = "2.9.2";  // A conversion for the current internal version#
        }
        else
        {
            if (ver == "2.9.1.46")
            {
                ver = "2.9.2.2";  // A conversion for the current internal version#
            }
        }
        return ver;
    }

    public void leave()
    {
        Debug.Log("calling leave");

        if (mRtcEngine == null)
            return;

        // leave channel
        mRtcEngine.LeaveChannel();
        // deregister video frame observers in native-c code
        mRtcEngine.DisableVideoObserver();
    }

    // unload agora engine
    public void unloadEngine()
    {
        Debug.Log("calling unloadEngine");

        // delete
        if (mRtcEngine != null)
        {
            IRtcEngine.Destroy();  // Place this call in ApplicationQuit
            mRtcEngine = null;
        }
    }


    public void EnableVideo(bool pauseVideo)
    {
        if (mRtcEngine != null)
        {
            if (!pauseVideo)
            {
                mRtcEngine.EnableVideo();
            }
            else
            {
                mRtcEngine.DisableVideo();
            }
        }
    }

    // accessing GameObject in Scnene1
    // set video transform delegate for statically created GameObject
    public void onSceneHelloVideoLoaded()
    {
        // Attach the SDK Script VideoSurface for video rendering
        GameObject quad = GameObject.Find("Quad");
        if (ReferenceEquals(quad, null))
        {
            Debug.Log("BBBB: failed to find Quad");
            return;
        }
        else
        {
            quad.AddComponent<VideoSurface>();
        }

        GameObject cube = GameObject.Find("Cube");
        if (ReferenceEquals(cube, null))
        {
            Debug.Log("BBBB: failed to find Cube");
            return;
        }
        else
        {
            cube.AddComponent<VideoSurface>();
        }

        GameObject game = GameObject.Find("GameController");
        if (game != null)
        {
            monoProxy = game.GetComponent<MonoBehaviour>();
            audioSource = game.GetComponent<AudioSource>();
            monoProxy.StartCoroutine(CoAudioRender());
        }
    }

    // implement engine callbacks
    private void onJoinChannelSuccess(string channelName, uint uid, int elapsed)
    {
        Debug.Log("JoinChannelSuccessHandler: uid = " + uid);
        GameObject textVersionGameObject = GameObject.Find("VersionText");
        textVersionGameObject.GetComponent<Text>().text = "SDK Version : " + getSdkVersion();
    }

    // When a remote user joined, this delegate will be called. Typically
    // create a GameObject to render video on it
    private void onUserJoined(uint uid, int elapsed)
    {
        Debug.Log("onUserJoined: uid = " + uid + " elapsed = " + elapsed);
        // this is called in main thread

        // find a game object to render video stream from 'uid'
        GameObject go = GameObject.Find(uid.ToString());
        if (!ReferenceEquals(go, null))
        {
            return; // reuse
        }

        // create a GameObject and assign to this new user
        VideoSurface videoSurface = makeImageSurface(uid.ToString());
        if (!ReferenceEquals(videoSurface, null))
        {
            // configure videoSurface
            videoSurface.SetForUser(uid);
            videoSurface.SetEnable(true);
            videoSurface.SetVideoSurfaceType(AgoraVideoSurfaceType.RawImage);
            videoSurface.SetGameFps(30);
        }
    }

    public VideoSurface makePlaneSurface(string goName)
    {
        GameObject go = GameObject.CreatePrimitive(PrimitiveType.Plane);

        if (go == null)
        {
            return null;
        }
        go.name = goName;
        // set up transform
        go.transform.Rotate(-90.0f, 0.0f, 0.0f);
        float yPos = UnityEngine.Random.Range(3.0f, 5.0f);
        float xPos = UnityEngine.Random.Range(-2.0f, 2.0f);
        go.transform.position = new Vector3(xPos, yPos, 0f);
        go.transform.localScale = new Vector3(0.25f, 0.5f, .5f);

        // configure videoSurface
        VideoSurface videoSurface = go.AddComponent<VideoSurface>();
        return videoSurface;
    }

    private const float Offset = 100;
    public VideoSurface makeImageSurface(string goName)
    {
        GameObject go = new GameObject();

        if (go == null)
        {
            return null;
        }

        go.name = goName;

        // to be renderered onto
        go.AddComponent<RawImage>();

        // make the object draggable
        go.AddComponent<UIElementDragger>();
        GameObject canvas = GameObject.Find("Canvas");
        if (canvas != null)
        {
            go.transform.parent = canvas.transform;
        }
        // set up transform
        go.transform.Rotate(0f, 0.0f, 180.0f);
        float xPos = UnityEngine.Random.Range(Offset - Screen.width / 2f, Screen.width / 2f - Offset);
        float yPos = UnityEngine.Random.Range(Offset, Screen.height / 2f - Offset);
        go.transform.localPosition = new Vector3(xPos, yPos, 0f);
        go.transform.localScale = new Vector3(3f, 4f, 1f);

        // configure videoSurface
        VideoSurface videoSurface = go.AddComponent<VideoSurface>();
        return videoSurface;
    }
    // When remote user is offline, this delegate will be called. Typically
    // delete the GameObject for this user
    private void onUserOffline(uint uid, USER_OFFLINE_REASON reason)
    {
        // remove video stream
        Debug.Log("onUserOffline: uid = " + uid + " reason = " + reason);
        // this is called in main thread
        GameObject go = GameObject.Find(uid.ToString());
        if (!ReferenceEquals(go, null))
        {
            GameObject.Destroy(go);
        }
    }
}
...