Processor editor and processor in seperate files. slider paramters now can be added by simply adding them in the processor, everything else is dynamic
This commit is contained in:
michalcourson
2025-11-04 20:04:07 -05:00
parent 3468c1f389
commit f5245eb557
11 changed files with 2771 additions and 2232 deletions

86
Source/CircularBuffer.h Normal file
View File

@ -0,0 +1,86 @@
/*
==============================================================================
CircularBuffer.h
Created: 4 Nov 2025 6:20:15pm
Author: mickl
==============================================================================
*/
#pragma once
#include <JuceHeader.h>
class CircularBuffer
{
public:
CircularBuffer(int numChannels, int numSamples)
: buffer(data, (size_t)numChannels, (size_t)numSamples)
{
}
template <typename T>
void push(dsp::AudioBlock<T> b)
{
jassert(b.getNumChannels() == buffer.getNumChannels());
const auto trimmed = b.getSubBlock(b.getNumSamples()
- std::min(b.getNumSamples(), buffer.getNumSamples()));
const auto bufferLength = (int64)buffer.getNumSamples();
for (auto samplesRemaining = (int64)trimmed.getNumSamples(); samplesRemaining > 0;)
{
const auto writeOffset = writeIx % bufferLength;
const auto numSamplesToWrite = std::min(samplesRemaining, bufferLength - writeOffset);
auto destSubBlock = buffer.getSubBlock((size_t)writeOffset, (size_t)numSamplesToWrite);
const auto sourceSubBlock = trimmed.getSubBlock(trimmed.getNumSamples() - (size_t)samplesRemaining,
(size_t)numSamplesToWrite);
destSubBlock.copyFrom(sourceSubBlock);
samplesRemaining -= numSamplesToWrite;
writeIx += numSamplesToWrite;
}
}
template <typename T>
void push(Span<T> s)
{
auto* ptr = s.begin();
dsp::AudioBlock<T> b(&ptr, 1, s.size());
push(b);
}
void read(int64 readIx, dsp::AudioBlock<float> output) const
{
const auto numChannelsToUse = std::min(buffer.getNumChannels(), output.getNumChannels());
jassert(output.getNumChannels() == buffer.getNumChannels());
const auto bufferLength = (int64)buffer.getNumSamples();
for (auto outputOffset = (size_t)0; outputOffset < output.getNumSamples();)
{
const auto inputOffset = (size_t)((readIx + (int64)outputOffset) % bufferLength);
const auto numSamplesToRead = std::min(output.getNumSamples() - outputOffset,
(size_t)bufferLength - inputOffset);
auto destSubBlock = output.getSubBlock(outputOffset, numSamplesToRead)
.getSubsetChannelBlock(0, numChannelsToUse);
destSubBlock.copyFrom(buffer.getSubBlock(inputOffset, numSamplesToRead)
.getSubsetChannelBlock(0, numChannelsToUse));
outputOffset += numSamplesToRead;
}
}
int64 getWriteIndex() const noexcept { return writeIx; }
private:
HeapBlock<char> data;
dsp::AudioBlock<float> buffer;
int64 writeIx = 0;
};

185
Source/PluginEditor.cpp Normal file
View File

@ -0,0 +1,185 @@
/*
==============================================================================
PluginEditor.cpp
Created: 4 Nov 2025 6:20:46pm
Author: mickl
==============================================================================
*/
#include "PluginEditor.h"
#include "DemoUtilities.h"
static ZipFile* getZipFile()
{
static auto stream = createAssetInputStream("webviewplugin-gui_1.0.0.zip", AssertAssetExists::no);
if (stream == nullptr)
return nullptr;
static ZipFile f{ stream.get(), false };
return &f;
}
static const char* getMimeForExtension(const String& extension)
{
static const std::unordered_map<String, const char*> mimeMap =
{
{ { "htm" }, "text/html" },
{ { "html" }, "text/html" },
{ { "txt" }, "text/plain" },
{ { "jpg" }, "image/jpeg" },
{ { "jpeg" }, "image/jpeg" },
{ { "svg" }, "image/svg+xml" },
{ { "ico" }, "image/vnd.microsoft.icon" },
{ { "json" }, "application/json" },
{ { "png" }, "image/png" },
{ { "css" }, "text/css" },
{ { "map" }, "application/json" },
{ { "js" }, "text/javascript" },
{ { "woff2" }, "font/woff2" }
};
if (const auto it = mimeMap.find(extension.toLowerCase()); it != mimeMap.end())
return it->second;
jassertfalse;
return "";
}
static String getExtension(String filename)
{
return filename.fromLastOccurrenceOf(".", false, false);
}
static auto streamToVector(InputStream& stream)
{
std::vector<std::byte> result((size_t)stream.getTotalLength());
stream.setPosition(0);
[[maybe_unused]] const auto bytesRead = stream.read(result.data(), result.size());
jassert(bytesRead == (ssize_t)result.size());
return result;
}
std::optional<WebBrowserComponent::Resource> WebViewPluginAudioProcessorEditor::getResource(const String& url)
{
const auto urlToRetrive = url == "/" ? String{ "index.html" }
: url.fromFirstOccurrenceOf("/", false, false);
if (auto* archive = getZipFile())
{
if (auto* entry = archive->getEntry(urlToRetrive))
{
auto stream = rawToUniquePtr(archive->createStreamForEntry(*entry));
auto v = streamToVector(*stream);
auto mime = getMimeForExtension(getExtension(entry->filename).toLowerCase());
return WebBrowserComponent::Resource{ std::move(v),
std::move(mime) };
}
}
if (urlToRetrive == "index.html")
{
auto fallbackIndexHtml = createAssetInputStream("webviewplugin-gui-fallback.html");
return WebBrowserComponent::Resource{ streamToVector(*fallbackIndexHtml),
String { "text/html" } };
}
if (urlToRetrive == "data.txt")
{
WebBrowserComponent::Resource resource;
static constexpr char testData[] = "testdata";
MemoryInputStream stream{ testData, numElementsInArray(testData) - 1, false };
return WebBrowserComponent::Resource{ streamToVector(stream), String { "text/html" } };
}
if (urlToRetrive == "midNoteData.json")
{
juce::Array<var> notes;
int voice_num = 0;
for (auto& voice : processorRef.shifter.voices) {
if (voice.onoff_) {
auto obj = new DynamicObject();
obj->setProperty("voice", voice_num);
obj->setProperty("midi", voice.GetMidiNote());
notes.add(var(obj));
}
voice_num++;
}
DynamicObject::Ptr d(new DynamicObject());
d->setProperty("notes", notes);
const auto s = JSON::toString(d.get());
MemoryInputStream stream{ s.getCharPointer(), s.getNumBytesAsUTF8(), false };
return WebBrowserComponent::Resource{ streamToVector(stream), String { "application/json" } };
}
return std::nullopt;
}
#if JUCE_ANDROID
// The localhost is available on this address to the emulator
const String localDevServerAddress = "http://10.0.2.2:3000/";
#else
const String localDevServerAddress = "http://localhost:3000/";
#endif
bool SinglePageBrowser::pageAboutToLoad(const String& newURL)
{
return newURL == localDevServerAddress || newURL == getResourceProviderRoot();
}
WebViewPluginAudioProcessorEditor::WebViewPluginAudioProcessorEditor(WebViewPluginAudioProcessor& p)
: AudioProcessorEditor(&p), processorRef(p)
{
auto options = WebBrowserComponent::Options{}
.withBackend(WebBrowserComponent::Options::Backend::webview2)
.withWinWebView2Options(WebBrowserComponent::Options::WinWebView2{}
.withUserDataFolder(File::getSpecialLocation(File::SpecialLocationType::tempDirectory)))
.withNativeIntegrationEnabled()
.withOptionsFrom(controlParameterIndexReceiver)
.withResourceProvider([this](const auto& url)
{
return getResource(url);
},
URL{ localDevServerAddress }.getOrigin());
for (auto& sliderId : p.parameters.sliderIds) {
slider_relays.push_back(new WebSliderRelay{ sliderId });
slider_attatchments.push_back(new
WebSliderParameterAttachment(
*processorRef.state.getParameter(sliderId),
*slider_relays.back(),
processorRef.state.undoManager));
options = options.withOptionsFrom(*slider_relays.back());
}
webComponent = new SinglePageBrowser(options);
addAndMakeVisible(*webComponent);
webComponent->goToURL(localDevServerAddress);
//webComponent.goToURL (WebBrowserComponent::getResourceProviderRoot());
setSize(500, 500);
startTimerHz(60);
}
void WebViewPluginAudioProcessorEditor::paint(Graphics& g)
{
// (Our component is opaque, so we must completely fill the background with a solid colour)
g.fillAll(getLookAndFeel().findColour(ResizableWindow::backgroundColourId));
}
void WebViewPluginAudioProcessorEditor::resized()
{
if (webComponent == nullptr) return;
webComponent->setBounds(getLocalBounds());
}

93
Source/PluginEditor.h Normal file
View File

@ -0,0 +1,93 @@
/*
==============================================================================
PluginEditor.h
Created: 4 Nov 2025 6:20:46pm
Author: mickl
==============================================================================
*/
#pragma once
#include <JuceHeader.h>
#include "PluginProcessor.h"
extern const String localDevServerAddress;
std::optional<WebBrowserComponent::Resource> getResource(const String& url);
//==============================================================================
class SinglePageBrowser : public WebBrowserComponent
{
public:
using WebBrowserComponent::WebBrowserComponent;
// Prevent page loads from navigating away from our single page web app
bool pageAboutToLoad(const String& newURL) override;
};
//==============================================================================
class WebViewPluginAudioProcessorEditor : public AudioProcessorEditor, private Timer
{
public:
explicit WebViewPluginAudioProcessorEditor(WebViewPluginAudioProcessor&);
~WebViewPluginAudioProcessorEditor() {
delete webComponent;
for (auto& attatchments : slider_attatchments) {
delete attatchments;
}
for (auto& relays : slider_relays) {
delete relays;
}
}
std::optional<WebBrowserComponent::Resource> getResource(const String& url);
//==============================================================================
void paint(Graphics&) override;
void resized() override;
int getControlParameterIndex(Component&) override
{
return controlParameterIndexReceiver.getControlParameterIndex();
}
void timerCallback() override
{
static constexpr size_t numFramesBuffered = 5;
SpinLock::ScopedLockType lock{ processorRef.midiLock };
static int64 callbackCounter = 0;
processorRef.new_midi = false;
juce::Array<var> notes;
int voice_num = 0;
for (auto& voice : processorRef.shifter.voices) {
if (voice.onoff_) {
auto obj = new DynamicObject();
obj->setProperty("voice", voice_num);
obj->setProperty("midi", voice.GetMidiNote());
notes.add(var(obj));
}
voice_num++;
}
DynamicObject::Ptr d(new DynamicObject());
d->setProperty("notes", notes);
d->setProperty("input_pitch", processorRef.shifter.getInputPitch());
d->setProperty("output_pitch", processorRef.shifter.getOutputPitch());
webComponent->emitEventIfBrowserIsVisible("midNoteData", d.get());
}
private:
WebViewPluginAudioProcessor& processorRef;
std::vector<WebSliderRelay*> slider_relays;
std::vector< WebSliderParameterAttachment*> slider_attatchments;
WebControlParameterIndexReceiver controlParameterIndexReceiver;
SinglePageBrowser* webComponent = nullptr;
std::deque<Array<var>> spectrumDataFrames;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(WebViewPluginAudioProcessorEditor)
};

View File

@ -0,0 +1,96 @@
/*
==============================================================================
PluginProcessor.cpp
Created: 4 Nov 2025 6:20:37pm
Author: mickl
==============================================================================
*/
#include "PluginProcessor.h"
//==============================================================================
WebViewPluginAudioProcessor::WebViewPluginAudioProcessor(AudioProcessorValueTreeState::ParameterLayout layout)
: AudioProcessor(BusesProperties()
.withInput("Input", juce::AudioChannelSet::stereo(), true)
.withOutput("Output", juce::AudioChannelSet::stereo(), true)
),
parameters(layout),
state(*this, nullptr, "STATE", std::move(layout))
{
shifter.Init(48000.0f, 48);
shifter.SetFormantPreserve(state.getParameterAsValue("formantPreserve").getValue());
shifter.SetAutoTuneSpeed(state.getParameterAsValue("autoTuneSpeed").getValue());
shifter.SetAutoTuneDepth(state.getParameterAsValue("autoTuneDepth").getValue());
shifter.SetPortamentoTime(state.getParameterAsValue("portTime").getValue());
}
//==============================================================================
void WebViewPluginAudioProcessor::prepareToPlay(double sampleRate, int samplesPerBlock)
{
const auto channels = std::max(getTotalNumInputChannels(), getTotalNumOutputChannels());
shifter.Init((float)sampleRate, samplesPerBlock);
if (channels == 0)
return;
filter.prepare({ sampleRate, (uint32_t)samplesPerBlock, (uint32_t)channels });
filter.reset();
}
bool WebViewPluginAudioProcessor::isBusesLayoutSupported(const BusesLayout& layouts) const
{
if (layouts.getMainOutputChannelSet() != juce::AudioChannelSet::mono()
&& layouts.getMainOutputChannelSet() != juce::AudioChannelSet::stereo())
return false;
if (layouts.getMainOutputChannelSet() != layouts.getMainInputChannelSet())
return false;
return true;
}
void WebViewPluginAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer,
juce::MidiBuffer& midi)
{
juce::ScopedNoDenormals noDenormals;
const auto totalNumInputChannels = getTotalNumInputChannels();
const auto totalNumOutputChannels = getTotalNumOutputChannels();
for (auto i = totalNumInputChannels; i < totalNumOutputChannels; ++i)
buffer.clear(i, 0, buffer.getNumSamples());
shifter.SetFormantPreserve(state.getParameterAsValue("formantPreserve").getValue());
shifter.SetAutoTuneSpeed(state.getParameterAsValue("autoTuneSpeed").getValue());
shifter.SetAutoTuneDepth(state.getParameterAsValue("autoTuneDepth").getValue());
shifter.SetPortamentoTime(state.getParameterAsValue("portTime").getValue());
juce::AudioBuffer<float> const_buff;
const_buff.makeCopyOf(buffer);
shifter.Process(const_buff.getArrayOfReadPointers(), (float**)buffer.getArrayOfWritePointers(), buffer.getNumSamples());
for (const auto metadata : midi)
{
const auto msg = metadata.getMessage();
if (msg.isNoteOn()) {
shifter.AddMidiNote(msg.getNoteNumber());
new_midi = true;
}
else if (msg.isNoteOff()) {
shifter.RemoveMidiNote(msg.getNoteNumber());
new_midi = true;
}
}
}
//==============================================================================
void WebViewPluginAudioProcessor::getStateInformation(juce::MemoryBlock& destData)
{
juce::ignoreUnused(destData);
}
void WebViewPluginAudioProcessor::setStateInformation(const void* data, int sizeInBytes)
{
juce::ignoreUnused(data, sizeInBytes);
}

131
Source/PluginProcessor.h Normal file
View File

@ -0,0 +1,131 @@
/*
==============================================================================
PluginProcessor.h
Created: 4 Nov 2025 6:20:37pm
Author: mickl
==============================================================================
*/
#pragma once
#include <JuceHeader.h>
#include "Shifter.h"
class WebViewPluginAudioProcessor : public AudioProcessor
{
public:
//==============================================================================
WebViewPluginAudioProcessor(AudioProcessorValueTreeState::ParameterLayout layout);
//==============================================================================
void prepareToPlay(double sampleRate, int samplesPerBlock) override;
void releaseResources() override {}
bool isBusesLayoutSupported(const BusesLayout& layouts) const override;
void processBlock(AudioBuffer<float>&, MidiBuffer&) override;
using AudioProcessor::processBlock;
//==============================================================================
const String getName() const override { return JucePlugin_Name; }
bool acceptsMidi() const override { return false; }
bool producesMidi() const override { return false; }
bool isMidiEffect() const override { return false; }
double getTailLengthSeconds() const override { return 0.0; }
//==============================================================================
int getNumPrograms() override { return 1; }
int getCurrentProgram() override { return 0; }
void setCurrentProgram(int) override {}
const String getProgramName(int) override { return {}; }
void changeProgramName(int, const String&) override {}
//==============================================================================
void getStateInformation(MemoryBlock& destData) override;
void setStateInformation(const void* data, int sizeInBytes) override;
bool new_midi = false;
struct Parameters
{
public:
explicit Parameters(AudioProcessorValueTreeState::ParameterLayout& layout)
{
sliderIds.push_back("formantPreserve");
addToLayout<AudioParameterFloat>(layout,
ParameterID{ "formantPreserve" },
"Formant Preserve",
NormalisableRange<float> {0.0f, 1.0f, .01f},
.5f);
sliderIds.push_back("autoTuneDepth");
addToLayout<AudioParameterFloat>(layout,
ParameterID("autoTuneDepth"),
"AutoTune Depth",
NormalisableRange<float> {0.0f, 1.1f, .01f},
.5f);
sliderIds.push_back("autoTuneSpeed");
addToLayout<AudioParameterFloat>(layout,
ParameterID("autoTuneSpeed"),
"AutoTune Speed",
NormalisableRange<float> {0.001f, 0.1f, .001f},
.5f);
sliderIds.push_back("portTime");
addToLayout<AudioParameterFloat>(layout,
ParameterID("portTime"),
"Portamento Speed",
NormalisableRange<float> {0.001f, 0.2f, .001f},
.01f);
}
/*AudioParameterFloat& formantPreserve;
AudioParameterFloat& autoTuneSpeed;
AudioParameterFloat& autoTuneDepth;
AudioParameterFloat& portTime;*/
std::vector<juce::String> sliderIds;
/*AudioParameterBool& mute;
AudioParameterChoice& filterType;*/
private:
template <typename Param>
static void add(AudioProcessorParameterGroup& group, std::unique_ptr<Param> param)
{
group.addChild(std::move(param));
}
template <typename Param>
static void add(AudioProcessorValueTreeState::ParameterLayout& group, std::unique_ptr<Param> param)
{
group.add(std::move(param));
}
template <typename Param, typename Group, typename... Ts>
static Param& addToLayout(Group& layout, Ts&&... ts)
{
auto param = std::make_unique<Param>(std::forward<Ts>(ts)...);
auto& ref = *param;
add(layout, std::move(param));
return ref;
}
};
Parameters parameters;
AudioProcessorValueTreeState state;
SpinLock midiLock;
/*std::vector<int> spectrumData = [] { return std::vector<int>(256, 0.0f); }();
SpinLock spectrumDataLock;
SpectralBars spectralBars;*/
dsp::LadderFilter<float> filter;
Shifter shifter;
private:
//==============================================================================
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(WebViewPluginAudioProcessor)
};

View File

@ -52,652 +52,12 @@
#pragma once
#include "DemoUtilities.h"
#include <JuceHeader.h>
#include "Shifter.h"
#include "PluginEditor.h"
#include "PluginProcessor.h"
//using namespace juce::dsp;
namespace ID
{
#define PARAMETER_ID(str) static const ParameterID str { #str, 1 };
PARAMETER_ID(formantPreserve)
PARAMETER_ID(autoTuneSpeed)
PARAMETER_ID(autoTuneDepth)
PARAMETER_ID(portTime)
PARAMETER_ID(mute)
PARAMETER_ID(filterType)
#undef PARAMETER_ID
}
class CircularBuffer
{
public:
CircularBuffer(int numChannels, int numSamples)
: buffer(data, (size_t)numChannels, (size_t)numSamples)
{
}
template <typename T>
void push(dsp::AudioBlock<T> b)
{
jassert(b.getNumChannels() == buffer.getNumChannels());
const auto trimmed = b.getSubBlock(b.getNumSamples()
- std::min(b.getNumSamples(), buffer.getNumSamples()));
const auto bufferLength = (int64)buffer.getNumSamples();
for (auto samplesRemaining = (int64)trimmed.getNumSamples(); samplesRemaining > 0;)
{
const auto writeOffset = writeIx % bufferLength;
const auto numSamplesToWrite = std::min(samplesRemaining, bufferLength - writeOffset);
auto destSubBlock = buffer.getSubBlock((size_t)writeOffset, (size_t)numSamplesToWrite);
const auto sourceSubBlock = trimmed.getSubBlock(trimmed.getNumSamples() - (size_t)samplesRemaining,
(size_t)numSamplesToWrite);
destSubBlock.copyFrom(sourceSubBlock);
samplesRemaining -= numSamplesToWrite;
writeIx += numSamplesToWrite;
}
}
template <typename T>
void push(Span<T> s)
{
auto* ptr = s.begin();
dsp::AudioBlock<T> b(&ptr, 1, s.size());
push(b);
}
void read(int64 readIx, dsp::AudioBlock<float> output) const
{
const auto numChannelsToUse = std::min(buffer.getNumChannels(), output.getNumChannels());
jassert(output.getNumChannels() == buffer.getNumChannels());
const auto bufferLength = (int64)buffer.getNumSamples();
for (auto outputOffset = (size_t)0; outputOffset < output.getNumSamples();)
{
const auto inputOffset = (size_t)((readIx + (int64)outputOffset) % bufferLength);
const auto numSamplesToRead = std::min(output.getNumSamples() - outputOffset,
(size_t)bufferLength - inputOffset);
auto destSubBlock = output.getSubBlock(outputOffset, numSamplesToRead)
.getSubsetChannelBlock(0, numChannelsToUse);
destSubBlock.copyFrom(buffer.getSubBlock(inputOffset, numSamplesToRead)
.getSubsetChannelBlock(0, numChannelsToUse));
outputOffset += numSamplesToRead;
}
}
int64 getWriteIndex() const noexcept { return writeIx; }
private:
HeapBlock<char> data;
dsp::AudioBlock<float> buffer;
int64 writeIx = 0;
};
//class SpectralBars
//{
//public:
// //template <typename T>
// void push(int data)
// {
// testQueue.push(data);
// }
//
// void compute(Span<int> output) {
// int index = 0;
// for (auto it = output.begin(); it != output.end(); ++it) {
// *it = testQueue.get(index++);
// }
// }
//
//
//private:
// circ_queue<int, 256> testQueue;
//};
//==============================================================================
class WebViewPluginAudioProcessor : public AudioProcessor
{
public:
//==============================================================================
WebViewPluginAudioProcessor(AudioProcessorValueTreeState::ParameterLayout layout);
//==============================================================================
void prepareToPlay(double sampleRate, int samplesPerBlock) override;
void releaseResources() override {}
bool isBusesLayoutSupported(const BusesLayout& layouts) const override;
void processBlock(AudioBuffer<float>&, MidiBuffer&) override;
using AudioProcessor::processBlock;
//==============================================================================
const String getName() const override { return JucePlugin_Name; }
bool acceptsMidi() const override { return false; }
bool producesMidi() const override { return false; }
bool isMidiEffect() const override { return false; }
double getTailLengthSeconds() const override { return 0.0; }
//==============================================================================
int getNumPrograms() override { return 1; }
int getCurrentProgram() override { return 0; }
void setCurrentProgram(int) override {}
const String getProgramName(int) override { return {}; }
void changeProgramName(int, const String&) override {}
//==============================================================================
void getStateInformation(MemoryBlock& destData) override;
void setStateInformation(const void* data, int sizeInBytes) override;
bool new_midi = false;
struct Parameters
{
public:
explicit Parameters(AudioProcessorValueTreeState::ParameterLayout& layout)
: formantPreserve(addToLayout<AudioParameterFloat>(layout,
ID::formantPreserve,
"Formant Preserve",
NormalisableRange<float> {0.0f, 1.0f, .01f},
.5f)),
autoTuneSpeed(addToLayout<AudioParameterFloat>(layout,
ID::autoTuneSpeed,
"AutoTune Speed",
NormalisableRange<float> {0.001f, 0.1f, .001f},
.5f)),
autoTuneDepth(addToLayout<AudioParameterFloat>(layout,
ID::autoTuneDepth,
"AutoTune Depth",
NormalisableRange<float> {0.0f, 1.1f, .01f},
.5f)),
portTime(addToLayout<AudioParameterFloat>(layout,
ID::portTime,
"Portamento Speed",
NormalisableRange<float> {0.001f, 0.2f, .001f},
.01f)),
mute(addToLayout<AudioParameterBool>(layout, ID::mute, "Mute", false)),
filterType(addToLayout<AudioParameterChoice>(layout,
ID::filterType,
"Filter type",
StringArray{ "Low-pass", "High-pass", "Band-pass" },
0))
{
}
AudioParameterFloat& formantPreserve;
AudioParameterFloat& autoTuneSpeed;
AudioParameterFloat& autoTuneDepth;
AudioParameterFloat& portTime;
AudioParameterBool& mute;
AudioParameterChoice& filterType;
private:
template <typename Param>
static void add(AudioProcessorParameterGroup& group, std::unique_ptr<Param> param)
{
group.addChild(std::move(param));
}
template <typename Param>
static void add(AudioProcessorValueTreeState::ParameterLayout& group, std::unique_ptr<Param> param)
{
group.add(std::move(param));
}
template <typename Param, typename Group, typename... Ts>
static Param& addToLayout(Group& layout, Ts&&... ts)
{
auto param = std::make_unique<Param>(std::forward<Ts>(ts)...);
auto& ref = *param;
add(layout, std::move(param));
return ref;
}
};
Parameters parameters;
AudioProcessorValueTreeState state;
SpinLock midiLock;
/*std::vector<int> spectrumData = [] { return std::vector<int>(256, 0.0f); }();
SpinLock spectrumDataLock;
SpectralBars spectralBars;*/
dsp::LadderFilter<float> filter;
Shifter shifter;
private:
//==============================================================================
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(WebViewPluginAudioProcessor)
};
//==============================================================================
WebViewPluginAudioProcessor::WebViewPluginAudioProcessor(AudioProcessorValueTreeState::ParameterLayout layout)
: AudioProcessor(BusesProperties()
#if ! JucePlugin_IsMidiEffect
#if ! JucePlugin_IsSynth
.withInput("Input", juce::AudioChannelSet::stereo(), true)
#endif
.withOutput("Output", juce::AudioChannelSet::stereo(), true)
#endif
),
parameters(layout),
state(*this, nullptr, "STATE", std::move(layout))
{
shifter.Init(48000.0f, 48);
}
//==============================================================================
void WebViewPluginAudioProcessor::prepareToPlay(double sampleRate, int samplesPerBlock)
{
const auto channels = std::max(getTotalNumInputChannels(), getTotalNumOutputChannels());
shifter.Init((float)sampleRate, samplesPerBlock);
if (channels == 0)
return;
filter.prepare({ sampleRate, (uint32_t)samplesPerBlock, (uint32_t)channels });
filter.reset();
}
bool WebViewPluginAudioProcessor::isBusesLayoutSupported(const BusesLayout& layouts) const
{
if (layouts.getMainOutputChannelSet() != juce::AudioChannelSet::mono()
&& layouts.getMainOutputChannelSet() != juce::AudioChannelSet::stereo())
return false;
if (layouts.getMainOutputChannelSet() != layouts.getMainInputChannelSet())
return false;
return true;
}
void WebViewPluginAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer,
juce::MidiBuffer& midi)
{
juce::ScopedNoDenormals noDenormals;
const auto totalNumInputChannels = getTotalNumInputChannels();
const auto totalNumOutputChannels = getTotalNumOutputChannels();
for (auto i = totalNumInputChannels; i < totalNumOutputChannels; ++i)
buffer.clear(i, 0, buffer.getNumSamples());
shifter.SetFormantPreserve(parameters.formantPreserve.get());
shifter.SetAutoTuneSpeed(parameters.autoTuneSpeed.get());
shifter.SetAutoTuneDepth(parameters.autoTuneDepth.get());
shifter.SetPortamentoTime(parameters.portTime.get());
juce::AudioBuffer<float> const_buff;
const_buff.makeCopyOf(buffer);
shifter.Process(const_buff.getArrayOfReadPointers(), (float**)buffer.getArrayOfWritePointers(), buffer.getNumSamples());
for (const auto metadata : midi)
{
const auto msg = metadata.getMessage();
if (msg.isNoteOn()) {
shifter.AddMidiNote(msg.getNoteNumber());
new_midi = true;
//editor.webComponent.emitEventIfBrowserIsVisible("midNoteData", var{});
}
else if (msg.isNoteOff()) {
shifter.RemoveMidiNote(msg.getNoteNumber());
new_midi = true;
//editor.webComponent.emitEventIfBrowserIsVisible("midNoteData", var{});
}
}
{
//DBG(shifter.out_midi[MAX_VOICES]);
//push midi note
//spectralBars.push(shifter.out_midi[MAX_VOICES]);
const SpinLock::ScopedTryLockType lock(midiLock);
if (!lock.isLocked())
return;
}
/*for(auto i = 0; i < buffer.getNumSamples(); ++i)
{
bool process = (i % 256) == 0 && i != 0;
for(auto j = 0; j < totalNumInputChannels; ++j)
{
input[j][i] = buffer.getReadPointer(j)[i];
}
}
filter.setCutoffFrequencyHz (parameters.cutoffFreqHz.get());
const auto filterMode = [this]
{
switch (parameters.filterType.getIndex())
{
case 0:
return dsp::LadderFilter<float>::Mode::LPF12;
case 1:
return dsp::LadderFilter<float>::Mode::HPF12;
default:
return dsp::LadderFilter<float>::Mode::BPF12;
}
}();
filter.setMode (filterMode);
auto outBlock = dsp::AudioBlock<float> { buffer }.getSubsetChannelBlock (0, (size_t) getTotalNumOutputChannels());
if (parameters.mute.get())
outBlock.clear();
filter.process (dsp::ProcessContextReplacing<float> (outBlock));
spectralBars.push (Span { buffer.getReadPointer (0), (size_t) buffer.getNumSamples() });
{
const SpinLock::ScopedTryLockType lock (spectrumDataLock);
if (! lock.isLocked())
return;
spectralBars.compute ({ spectrumData.data(), spectrumData.size() });
}*/
}
//==============================================================================
void WebViewPluginAudioProcessor::getStateInformation(juce::MemoryBlock& destData)
{
juce::ignoreUnused(destData);
}
void WebViewPluginAudioProcessor::setStateInformation(const void* data, int sizeInBytes)
{
juce::ignoreUnused(data, sizeInBytes);
}
extern const String localDevServerAddress;
std::optional<WebBrowserComponent::Resource> getResource(const String& url);
//==============================================================================
struct SinglePageBrowser : WebBrowserComponent
{
using WebBrowserComponent::WebBrowserComponent;
// Prevent page loads from navigating away from our single page web app
bool pageAboutToLoad(const String& newURL) override;
};
//==============================================================================
class WebViewPluginAudioProcessorEditor : public AudioProcessorEditor, private Timer
{
public:
explicit WebViewPluginAudioProcessorEditor(WebViewPluginAudioProcessor&);
std::optional<WebBrowserComponent::Resource> getResource(const String& url);
//==============================================================================
void paint(Graphics&) override;
void resized() override;
int getControlParameterIndex(Component&) override
{
return controlParameterIndexReceiver.getControlParameterIndex();
}
void timerCallback() override
{
static constexpr size_t numFramesBuffered = 5;
SpinLock::ScopedLockType lock{ processorRef.midiLock };
static int64 callbackCounter = 0;
processorRef.new_midi = false;
juce::Array<var> notes;
int voice_num = 0;
for (auto& voice : processorRef.shifter.voices) {
if (voice.onoff_) {
auto obj = new DynamicObject();
obj->setProperty("voice", voice_num);
obj->setProperty("midi", voice.GetMidiNote());
notes.add(var(obj));
}
voice_num++;
}
DynamicObject::Ptr d(new DynamicObject());
d->setProperty("notes", notes);
d->setProperty("input_pitch", processorRef.shifter.getInputPitch());
d->setProperty("output_pitch", processorRef.shifter.getOutputPitch());
webComponent.emitEventIfBrowserIsVisible("midNoteData", d.get());
}
private:
WebViewPluginAudioProcessor& processorRef;
WebSliderRelay formantSliderRelay{ "formantSlider" };
WebSliderRelay autoTuneSpeedSliderRelay{ "autoTuneSpeedSlider" };
WebSliderRelay autoTuneDepthSliderRelay{ "autoTuneDepthSlider" };
WebSliderRelay portTimeSliderRelay{ "portTimeSlider" };
WebToggleButtonRelay muteToggleRelay{ "muteToggle" };
WebComboBoxRelay filterTypeComboRelay{ "filterTypeCombo" };
WebControlParameterIndexReceiver controlParameterIndexReceiver;
SinglePageBrowser webComponent{ WebBrowserComponent::Options{}
.withBackend(WebBrowserComponent::Options::Backend::webview2)
.withWinWebView2Options(WebBrowserComponent::Options::WinWebView2{}
.withUserDataFolder(File::getSpecialLocation(File::SpecialLocationType::tempDirectory)))
.withNativeIntegrationEnabled()
.withOptionsFrom(formantSliderRelay)
.withOptionsFrom(autoTuneSpeedSliderRelay)
.withOptionsFrom(autoTuneDepthSliderRelay)
.withOptionsFrom(portTimeSliderRelay)
.withOptionsFrom(muteToggleRelay)
.withOptionsFrom(filterTypeComboRelay)
.withOptionsFrom(controlParameterIndexReceiver)
.withNativeFunction("sayHello", [](auto& var, auto complete)
{
complete("Hello " + var[0].toString());
})
.withResourceProvider([this](const auto& url)
{
return getResource(url);
},
URL { localDevServerAddress }.getOrigin()) };
WebSliderParameterAttachment formantAttachment;
WebSliderParameterAttachment autoTuneSpeedAttachment;
WebSliderParameterAttachment autoTuneDepthAttachment;
WebSliderParameterAttachment portTimeAttachment;
WebToggleButtonParameterAttachment muteAttachment;
WebComboBoxParameterAttachment filterTypeAttachment;
std::deque<Array<var>> spectrumDataFrames;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(WebViewPluginAudioProcessorEditor)
};
static ZipFile* getZipFile()
{
static auto stream = createAssetInputStream("webviewplugin-gui_1.0.0.zip", AssertAssetExists::no);
if (stream == nullptr)
return nullptr;
static ZipFile f{ stream.get(), false };
return &f;
}
static const char* getMimeForExtension(const String& extension)
{
static const std::unordered_map<String, const char*> mimeMap =
{
{ { "htm" }, "text/html" },
{ { "html" }, "text/html" },
{ { "txt" }, "text/plain" },
{ { "jpg" }, "image/jpeg" },
{ { "jpeg" }, "image/jpeg" },
{ { "svg" }, "image/svg+xml" },
{ { "ico" }, "image/vnd.microsoft.icon" },
{ { "json" }, "application/json" },
{ { "png" }, "image/png" },
{ { "css" }, "text/css" },
{ { "map" }, "application/json" },
{ { "js" }, "text/javascript" },
{ { "woff2" }, "font/woff2" }
};
if (const auto it = mimeMap.find(extension.toLowerCase()); it != mimeMap.end())
return it->second;
jassertfalse;
return "";
}
static String getExtension(String filename)
{
return filename.fromLastOccurrenceOf(".", false, false);
}
static auto streamToVector(InputStream& stream)
{
std::vector<std::byte> result((size_t)stream.getTotalLength());
stream.setPosition(0);
[[maybe_unused]] const auto bytesRead = stream.read(result.data(), result.size());
jassert(bytesRead == (ssize_t)result.size());
return result;
}
std::optional<WebBrowserComponent::Resource> WebViewPluginAudioProcessorEditor::getResource(const String& url)
{
const auto urlToRetrive = url == "/" ? String{ "index.html" }
: url.fromFirstOccurrenceOf("/", false, false);
if (auto* archive = getZipFile())
{
if (auto* entry = archive->getEntry(urlToRetrive))
{
auto stream = rawToUniquePtr(archive->createStreamForEntry(*entry));
auto v = streamToVector(*stream);
auto mime = getMimeForExtension(getExtension(entry->filename).toLowerCase());
return WebBrowserComponent::Resource{ std::move(v),
std::move(mime) };
}
}
if (urlToRetrive == "index.html")
{
auto fallbackIndexHtml = createAssetInputStream("webviewplugin-gui-fallback.html");
return WebBrowserComponent::Resource{ streamToVector(*fallbackIndexHtml),
String { "text/html" } };
}
if (urlToRetrive == "data.txt")
{
WebBrowserComponent::Resource resource;
static constexpr char testData[] = "testdata";
MemoryInputStream stream{ testData, numElementsInArray(testData) - 1, false };
return WebBrowserComponent::Resource{ streamToVector(stream), String { "text/html" } };
}
if (urlToRetrive == "midNoteData.json")
{
juce::Array<var> notes;
int voice_num = 0;
for (auto& voice : processorRef.shifter.voices) {
if (voice.onoff_) {
auto obj = new DynamicObject();
obj->setProperty("voice", voice_num);
obj->setProperty("midi", voice.GetMidiNote());
notes.add(var(obj));
}
voice_num++;
}
DynamicObject::Ptr d(new DynamicObject());
d->setProperty("notes", notes);
const auto s = JSON::toString(d.get());
MemoryInputStream stream{ s.getCharPointer(), s.getNumBytesAsUTF8(), false };
return WebBrowserComponent::Resource{ streamToVector(stream), String { "application/json" } };
}
return std::nullopt;
}
#if JUCE_ANDROID
// The localhost is available on this address to the emulator
const String localDevServerAddress = "http://10.0.2.2:3000/";
#else
const String localDevServerAddress = "http://localhost:3000/";
#endif
bool SinglePageBrowser::pageAboutToLoad(const String& newURL)
{
return newURL == localDevServerAddress || newURL == getResourceProviderRoot();
}
//==============================================================================
WebViewPluginAudioProcessorEditor::WebViewPluginAudioProcessorEditor(WebViewPluginAudioProcessor& p)
: AudioProcessorEditor(&p), processorRef(p),
formantAttachment(*processorRef.state.getParameter(ID::formantPreserve.getParamID()),
formantSliderRelay,
processorRef.state.undoManager),
autoTuneSpeedAttachment(*processorRef.state.getParameter(ID::autoTuneSpeed.getParamID()),
autoTuneSpeedSliderRelay,
processorRef.state.undoManager),
autoTuneDepthAttachment(*processorRef.state.getParameter(ID::autoTuneDepth.getParamID()),
autoTuneDepthSliderRelay,
processorRef.state.undoManager),
portTimeAttachment(*processorRef.state.getParameter(ID::portTime.getParamID()),
portTimeSliderRelay,
processorRef.state.undoManager),
muteAttachment(*processorRef.state.getParameter(ID::mute.getParamID()),
muteToggleRelay,
processorRef.state.undoManager),
filterTypeAttachment(*processorRef.state.getParameter(ID::filterType.getParamID()),
filterTypeComboRelay,
processorRef.state.undoManager)
{
addAndMakeVisible(webComponent);
webComponent.goToURL(localDevServerAddress);
//webComponent.goToURL (WebBrowserComponent::getResourceProviderRoot());
setSize(500, 500);
startTimerHz(60);
}
//==============================================================================
void WebViewPluginAudioProcessorEditor::paint(Graphics& g)
{
// (Our component is opaque, so we must completely fill the background with a solid colour)
g.fillAll(getLookAndFeel().findColour(ResizableWindow::backgroundColourId));
}
void WebViewPluginAudioProcessorEditor::resized()
{
webComponent.setBounds(getLocalBounds());
}
class WebViewPluginAudioProcessorWrapper : public WebViewPluginAudioProcessor
{
@ -711,3 +71,4 @@ public:
return new WebViewPluginAudioProcessorEditor(*this);
}
};