Я настраиваю AudioGraph в App.xaml.cs, потому что, если я пытаюсь сделать это на MainPage, приложение зависает, никогда не возвращая AudioGraph.
Затем я хочу установить переменную частоту, которая будет управляться слайдером на MainPage.xaml.cs.
Затем при нажатии клавиши «а» частота будет воспроизводиться через звуковой график.
Чтобы заставить это работать, мне нужно запустить AudioGraph на MainPage.xaml.cs.
Как мне взять AudioGraph, который я могу получить только из App.xaml.cs, и поставитьэто в объект AudioGraph на MainPage.xaml.cs?
Я пытался инициализировать AudioGraph на MainPage.xaml.cs, но он никогда не возвращается и не зависает.
Я пробовал установитьпеременная частота на App.xaml.cs и не может, потому что класс запечатан.
На самом деле оба класса запечатаны, поэтому я не уверен, как заставить эти два связывать переменные друг с другом.Даже если я сделаю их общедоступными, это не сработает.
Вот MainPage.xaml.cs
namespace FG
{
/// <summary>
/// An empty page that can be used on its own or navigated to within a Frame.
/// </summary>
public sealed partial class MainPage : Page
{
// For audio nodes through which audio data flows
AudioGraph audioGraph;
// Pushes audio data that is generated
AudioFrameInputNode frameInputNode;
double pitch = 1000; // choosing to generate frequency of 1kHz
public MainPage()
{
this.InitializeComponent();
this.Focus(FocusState.Keyboard);
}
public void setAudioGraph(AudioGraph ag)
{
audioGraph = ag;
}
public void setFrameInputNode(AudioFrameInputNode fin)
{
frameInputNode = fin;
}
private void Slider_ValueChanged(object sender, RangeBaseValueChangedEventArgs e)
{
Slider slider = sender as Slider;
if (slider != null)
{
pitch = slider.Value;
}
}
private void Key_Down(object sender, KeyRoutedEventArgs e)
{
if (e.Key == VirtualKey.A)
{
frameInputNode.Start();
audioGraph.Start();
}
}
private void Key_Up(object sender, KeyRoutedEventArgs e)
{
frameInputNode.Stop();
audioGraph.Stop();
}
}
}
Вот App.xaml.cs
namespace FG
{
/// <summary>
/// Provides application-specific behavior to supplement the default Application class.
/// </summary>
sealed partial class App : Application
{
// For audio out
AudioDeviceOutputNode deviceOutputNode;
// Access to the underlying memory buffer
[ComImport]
[Guid("5B0D3235-4DBA-4D44-865E-8F1D0E4FD04D")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
unsafe interface IMemoryBufferByteAccess
{
void GetBuffer(out byte* buffer, out uint capacity);
}
/// <summary>
/// Initializes the singleton application object. This is the first line of authored code
/// executed, and as such is the logical equivalent of main() or WinMain().
/// </summary>
public App()
{
this.InitializeComponent();
// Setup audio pipeline
InitAudioGraph().Wait();
CreateDeviceOutputNode().Wait();
CreateFrameInputNode();
frameInputNode.AddOutgoingConnection(deviceOutputNode);
this.Suspending += OnSuspending;
}
public async Task InitAudioGraph()
{
AudioGraphSettings settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media);
CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);
audioGraph = result.Graph;
}
private async Task CreateDeviceOutputNode()
{
// Create a device output node
CreateAudioDeviceOutputNodeResult result = await audioGraph.CreateDeviceOutputNodeAsync();
deviceOutputNode = result.DeviceOutputNode;
}
private void CreateFrameInputNode()
{
// Create the FrameInputNode at the same format as the graph, except explicitly set mono.
AudioEncodingProperties nodeEncodingProperties = audioGraph.EncodingProperties;
nodeEncodingProperties.ChannelCount = 1;
frameInputNode = audioGraph.CreateFrameInputNode(nodeEncodingProperties);
// Initialize the Frame Input Node in the stopped state
frameInputNode.Stop();
// Hook up an event handler so we can start generating samples when needed
// This event is triggered when the node is required to provide data
frameInputNode.QuantumStarted += node_QuantumStarted;
}
private void node_QuantumStarted(AudioFrameInputNode sender, FrameInputNodeQuantumStartedEventArgs args)
{
// GenerateAudioData can provide PCM audio data by directly synthesizing it or reading from a file.
// Need to know how many samples are required. In this case, the node is running at the same rate as the rest of the graph
// For minimum latency, only provide the required amount of samples. Extra samples will introduce additional latency.
uint numSamplesNeeded = (uint)args.RequiredSamples;
if (numSamplesNeeded != 0)
{
AudioFrame audioData = GenerateAudioData(numSamplesNeeded);
frameInputNode.AddFrame(audioData);
}
}
unsafe private AudioFrame GenerateAudioData(uint samples)
{
double audioWaveTheta = 0;
// Buffer size is (number of samples) * (size of each sample)
// We choose to generate single channel (mono) audio. For multi-channel, multiply by number of channels
uint bufferSize = samples * sizeof(float);
AudioFrame frame = new Windows.Media.AudioFrame(bufferSize);
using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
using (IMemoryBufferReference reference = buffer.CreateReference())
{
byte* dataInBytes;
uint capacityInBytes;
float* dataInFloat;
// Get the buffer from the AudioFrame
((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);
// Cast to float since the data we are generating is float
dataInFloat = (float*)dataInBytes;
float amplitude = 0.3f;
int sampleRate = (int)audioGraph.EncodingProperties.SampleRate;
double sampleIncrement = (pitch * (Math.PI * 2)) / sampleRate;
// Generate a 1kHz sine wave and populate the values in the memory buffer
for (int i = 0; i < samples; i++)
{
double sinValue = amplitude * Math.Sin(audioWaveTheta);
dataInFloat[i] = (float)sinValue;
audioWaveTheta += sampleIncrement;
}
}
return frame;
}
}
I expect to be able to initialize an AudioGraph (only works in App.xaml.cs) and use it to play a frequency that is set with a slider on the MainPage and triggered with a keydown event on the MainPage.