Merge branch 'master' of https://bitbucket.org/flucoma/flucoma-supercollider into master
commit
e0840597e9
@ -0,0 +1,78 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <algorithms/KDTree.hpp>
|
||||||
|
#include <clients/common/FluidBaseClient.hpp>
|
||||||
|
#include <clients/common/FluidNRTClientWrapper.hpp>
|
||||||
|
#include <clients/common/MessageSet.hpp>
|
||||||
|
#include <clients/common/OfflineClient.hpp>
|
||||||
|
#include <clients/common/ParameterSet.hpp>
|
||||||
|
#include <clients/common/ParameterTypes.hpp>
|
||||||
|
#include <clients/common/Result.hpp>
|
||||||
|
#include <clients/common/SharedClientUtils.hpp>
|
||||||
|
#include <clients/nrt/CommonResults.hpp>
|
||||||
|
#include <clients/nrt/DataSetClient.hpp>
|
||||||
|
#include <data/FluidDataSet.hpp>
|
||||||
|
#include <data/FluidIndex.hpp>
|
||||||
|
#include <data/FluidTensor.hpp>
|
||||||
|
#include <data/TensorTypes.hpp>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
namespace fluid {
|
||||||
|
namespace client {
|
||||||
|
namespace datasetwr {
|
||||||
|
|
||||||
|
enum { kDataSet, kIDPrefix, kIDNumber, kBuffer };
|
||||||
|
|
||||||
|
constexpr std::initializer_list<index> idNumberDefaults{0, 0};
|
||||||
|
|
||||||
|
constexpr auto DataSetWrParams = defineParameters(
|
||||||
|
DataSetClientRef::makeParam("dataSet", "DataSet ID"),
|
||||||
|
StringParam("idPrefix", "ID Prefix"),
|
||||||
|
LongArrayParam("idNumber", "ID Counter Offset", idNumberDefaults),
|
||||||
|
BufferParam("buf", "Data Buffer"));
|
||||||
|
|
||||||
|
class DataSetWriterClient : public FluidBaseClient, OfflineIn, OfflineOut {
|
||||||
|
|
||||||
|
public:
|
||||||
|
using ParamDescType = decltype(DataSetWrParams);
|
||||||
|
|
||||||
|
using ParamSetViewType = ParameterSetView<ParamDescType>;
|
||||||
|
std::reference_wrapper<ParamSetViewType> mParams;
|
||||||
|
|
||||||
|
void setParams(ParamSetViewType &p) { mParams = p; }
|
||||||
|
|
||||||
|
template <size_t N> auto &get() const {
|
||||||
|
return mParams.get().template get<N>();
|
||||||
|
}
|
||||||
|
|
||||||
|
static constexpr auto &getParameterDescriptors() { return DataSetWrParams; }
|
||||||
|
|
||||||
|
DataSetWriterClient(ParamSetViewType &p) : mParams(p) {}
|
||||||
|
|
||||||
|
template <typename T> Result process(FluidContext &) {
|
||||||
|
auto dataset = get<kDataSet>().get();
|
||||||
|
if (auto datasetPtr = dataset.lock()) {
|
||||||
|
std::string &idPrefix = get<kIDPrefix>();
|
||||||
|
auto &idNumberArr = get<kIDNumber>();
|
||||||
|
if (idNumberArr.size() != 2)
|
||||||
|
return {Result::Status::kError, "ID number malformed"};
|
||||||
|
if (idPrefix.size() == 0 && idNumberArr[0] == 0)
|
||||||
|
return {Result::Status::kError, "No ID supplied"};
|
||||||
|
|
||||||
|
std::string id = idPrefix;
|
||||||
|
|
||||||
|
if (idNumberArr[0] > 0)
|
||||||
|
id += std::to_string(idNumberArr[1]);
|
||||||
|
|
||||||
|
auto buf = get<kBuffer>();
|
||||||
|
return datasetPtr->setPoint(id, buf);
|
||||||
|
} else
|
||||||
|
return {Result::Status::kError, "No DataSet"};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} // namespace datasetwr
|
||||||
|
|
||||||
|
using NRTThreadedDataSetWriter =
|
||||||
|
NRTThreadingAdaptor<ClientWrapper<datasetwr::DataSetWriterClient>>;
|
||||||
|
} // namespace client
|
||||||
|
} // namespace fluid
|
||||||
@ -0,0 +1,337 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "Meta.hpp"
|
||||||
|
|
||||||
|
namespace fluid {
|
||||||
|
namespace client {
|
||||||
|
|
||||||
|
namespace impl {
|
||||||
|
// Iterate over kr/ir inputs via callbacks from params object
|
||||||
|
struct FloatControlsIter
|
||||||
|
{
|
||||||
|
FloatControlsIter(float** vals, index N) : mValues(vals), mSize(N) {}
|
||||||
|
|
||||||
|
float next() { return mCount >= mSize ? 0 : *mValues[mCount++]; }
|
||||||
|
|
||||||
|
void reset(float** vals)
|
||||||
|
{
|
||||||
|
mValues = vals;
|
||||||
|
mCount = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
index size() const noexcept { return mSize; }
|
||||||
|
index remain() { return mSize - mCount; }
|
||||||
|
|
||||||
|
private:
|
||||||
|
float** mValues;
|
||||||
|
index mSize;
|
||||||
|
index mCount{0};
|
||||||
|
};
|
||||||
|
} //impl
|
||||||
|
|
||||||
|
//Specializations of param reader for RT and NRT cases (data encoded differently, buffer semantics differ cause of local bufs)
|
||||||
|
template <typename ArgType> struct ParamReader;
|
||||||
|
|
||||||
|
// RT case: we're decoding data from float**, there will be a Unit, we can have LocalBufs
|
||||||
|
// TODO: All the allocations should be using SC RT allocator, but this won't work reliably until it propagates down through the param set
|
||||||
|
template<>
|
||||||
|
struct ParamReader<impl::FloatControlsIter>
|
||||||
|
{
|
||||||
|
|
||||||
|
using Controls = impl::FloatControlsIter;
|
||||||
|
|
||||||
|
static auto fromArgs(Unit* /*x*/, Controls& args, std::string, int)
|
||||||
|
{
|
||||||
|
// first is string size, then chars
|
||||||
|
index size = static_cast<index>(args.next());
|
||||||
|
std::string res;
|
||||||
|
res.resize(asUnsigned(size));
|
||||||
|
for (index i = 0; i < size; ++i)
|
||||||
|
res[asUnsigned(i)] = static_cast<char>(args.next());
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
static auto fromArgs(Unit*, Controls& args,typename LongArrayT::type&, int)
|
||||||
|
{
|
||||||
|
//first is array size, then items
|
||||||
|
using Container = typename LongArrayT::type;
|
||||||
|
using Value = typename Container::type;
|
||||||
|
index size = static_cast<index>(args.next());
|
||||||
|
Container res(size);
|
||||||
|
for (index i = 0; i < size; ++i)
|
||||||
|
res[i] = static_cast<Value>(args.next());
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<std::is_integral<T>::value, T>
|
||||||
|
fromArgs(Unit*, Controls& args, T, int)
|
||||||
|
{
|
||||||
|
return static_cast<index>(args.next());
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<std::is_floating_point<T>::value, T>
|
||||||
|
fromArgs(Unit*, Controls& args, T, int)
|
||||||
|
{
|
||||||
|
return args.next();
|
||||||
|
}
|
||||||
|
|
||||||
|
static SCBufferAdaptor* fetchBuffer(Unit* x, index bufnum)
|
||||||
|
{
|
||||||
|
if(bufnum >= x->mWorld->mNumSndBufs)
|
||||||
|
{
|
||||||
|
index localBufNum = bufnum - x->mWorld->mNumSndBufs;
|
||||||
|
|
||||||
|
Graph* parent = x->mParent;
|
||||||
|
|
||||||
|
return localBufNum <= parent->localMaxBufNum ?
|
||||||
|
new SCBufferAdaptor(parent->mLocalSndBufs + localBufNum,x->mWorld,true)
|
||||||
|
: nullptr;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
return bufnum >= 0 ? new SCBufferAdaptor(bufnum, x->mWorld) : nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
static auto fromArgs(Unit* x, Controls& args, BufferT::type&, int)
|
||||||
|
{
|
||||||
|
typename LongT::type bufnum = static_cast<typename LongT::type>(
|
||||||
|
ParamReader::fromArgs(x, args, typename LongT::type(), -1));
|
||||||
|
return BufferT::type(fetchBuffer(x, bufnum));
|
||||||
|
}
|
||||||
|
|
||||||
|
static auto fromArgs(Unit* x, Controls& args, InputBufferT::type&, int)
|
||||||
|
{
|
||||||
|
typename LongT::type bufnum =
|
||||||
|
static_cast<LongT::type>(fromArgs(x, args, LongT::type(), -1));
|
||||||
|
return InputBufferT::type(fetchBuffer(x, bufnum));
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename P>
|
||||||
|
static std::enable_if_t<IsSharedClient<P>::value, P>
|
||||||
|
fromArgs(Unit* x, Controls& args, P&, int)
|
||||||
|
{
|
||||||
|
auto id = fromArgs(x, args, index{}, 0);
|
||||||
|
return {id >= 0 ? std::to_string(id).c_str() : "" };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// NRT case: we're decoding data from sc_msg_iter*, there will be a World*, we can't have LocalBufs
|
||||||
|
// TODO: All the allocations should be using SC RT allocator (I guess: this will probably always run on the RT thread), but this won't work reliably until it propagates down through the param set
|
||||||
|
template<>
|
||||||
|
struct ParamReader<sc_msg_iter>
|
||||||
|
{
|
||||||
|
static const char* oscTagToString(char tag)
|
||||||
|
{
|
||||||
|
switch (tag)
|
||||||
|
{
|
||||||
|
case 'i': return "integer"; break;
|
||||||
|
case 'f': return "float"; break;
|
||||||
|
case 'd': return "double"; break;
|
||||||
|
case 's': return "string"; break;
|
||||||
|
case 'b': return "blob"; break;
|
||||||
|
case 't': return "time tag"; break;
|
||||||
|
default: return "unknown type";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static const char* argTypeToString(std::string&)
|
||||||
|
{
|
||||||
|
return "string";
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<std::is_integral<T>::value, const char*>
|
||||||
|
argTypeToString(T&)
|
||||||
|
{
|
||||||
|
return "integer";
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<std::is_floating_point<T>::value, const char*>
|
||||||
|
argTypeToString(T&)
|
||||||
|
{
|
||||||
|
return "float";
|
||||||
|
}
|
||||||
|
|
||||||
|
static const char* argTypeToString(BufferT::type&)
|
||||||
|
{
|
||||||
|
return "buffer";
|
||||||
|
}
|
||||||
|
|
||||||
|
static const char* argTypeToString(InputBufferT::type&)
|
||||||
|
{
|
||||||
|
return "buffer";
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename P>
|
||||||
|
static std::enable_if_t<IsSharedClient<P>::value,const char*>
|
||||||
|
argTypeToString(P&)
|
||||||
|
{
|
||||||
|
return "shared_object"; //not ideal
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool argTypeOK(std::string&, char tag)
|
||||||
|
{
|
||||||
|
return tag == 's';
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<std::is_integral<T>::value
|
||||||
|
|| std::is_floating_point<T>::value, bool>
|
||||||
|
argTypeOK(T&, char tag)
|
||||||
|
{
|
||||||
|
return tag == 'i' || tag == 'f' || tag == 'd';
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool argTypeOK(BufferT::type&, char tag)
|
||||||
|
{
|
||||||
|
return tag == 'i';
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool argTypeOK(InputBufferT::type&, char tag)
|
||||||
|
{
|
||||||
|
return tag == 'i';
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename P>
|
||||||
|
static std::enable_if_t<IsSharedClient<P>::value,bool>
|
||||||
|
argTypeOK(P&, char tag)
|
||||||
|
{
|
||||||
|
return tag == 'i';
|
||||||
|
}
|
||||||
|
|
||||||
|
static auto fromArgs(World*, sc_msg_iter& args, std::string, int)
|
||||||
|
{
|
||||||
|
const char* recv = args.gets("");
|
||||||
|
|
||||||
|
return std::string(recv ? recv : "");
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<std::is_integral<T>::value, T>
|
||||||
|
fromArgs(World*, sc_msg_iter& args, T, int defVal)
|
||||||
|
{
|
||||||
|
return args.geti(defVal);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<std::is_floating_point<T>::value, T>
|
||||||
|
fromArgs(World*, sc_msg_iter& args, T, int)
|
||||||
|
{
|
||||||
|
return args.getf();
|
||||||
|
}
|
||||||
|
|
||||||
|
static SCBufferAdaptor* fetchBuffer(World* x, index bufnum)
|
||||||
|
{
|
||||||
|
if(bufnum >= x->mNumSndBufs)
|
||||||
|
{
|
||||||
|
std::cout << "ERROR: bufnum " << bufnum << " is invalid for global buffers\n";
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
return bufnum >= 0 ? new SCBufferAdaptor(bufnum, x) : nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
static auto fromArgs(World* x, sc_msg_iter& args, BufferT::type&, int)
|
||||||
|
{
|
||||||
|
typename LongT::type bufnum = static_cast<typename LongT::type>(
|
||||||
|
ParamReader::fromArgs(x, args, typename LongT::type(), -1));
|
||||||
|
return BufferT::type(fetchBuffer(x, bufnum));
|
||||||
|
}
|
||||||
|
|
||||||
|
static auto fromArgs(World* x, sc_msg_iter& args, InputBufferT::type&, int)
|
||||||
|
{
|
||||||
|
typename LongT::type bufnum =
|
||||||
|
static_cast<LongT::type>(fromArgs(x, args, LongT::type(), -1));
|
||||||
|
return InputBufferT::type(fetchBuffer(x, bufnum));
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename P>
|
||||||
|
static std::enable_if_t<IsSharedClient<P>::value, P>
|
||||||
|
fromArgs(World* x, sc_msg_iter& args, P&, int)
|
||||||
|
{
|
||||||
|
auto id = fromArgs(x, args, index{}, 0);
|
||||||
|
return {id >= 0 ? std::to_string(id).c_str() : ""};
|
||||||
|
}
|
||||||
|
|
||||||
|
static auto fromArgs(World*, sc_msg_iter& args,typename LongArrayT::type&, int)
|
||||||
|
{
|
||||||
|
//first is array size, then items
|
||||||
|
using Container = typename LongArrayT::type;
|
||||||
|
using Value = typename Container::type;
|
||||||
|
index size = static_cast<index>(args.geti());
|
||||||
|
Container res(size);
|
||||||
|
for (index i = 0; i < size; ++i)
|
||||||
|
res[i] = static_cast<Value>(args.geti());
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
template <typename Wrapper>
|
||||||
|
struct ClientParams{
|
||||||
|
// Iterate over arguments via callbacks from params object
|
||||||
|
template <typename ArgType, size_t N, typename T>
|
||||||
|
struct Setter
|
||||||
|
{
|
||||||
|
static constexpr index argSize =
|
||||||
|
Wrapper::Client::getParameterDescriptors().template get<N>().fixedSize;
|
||||||
|
|
||||||
|
|
||||||
|
/// Grizzly enable_if hackage coming up. Need to brute force an int from incoming data into a string param for FluidDataSet / FluidLabelSet.
|
||||||
|
/// This will go away one day
|
||||||
|
|
||||||
|
template<typename Context, typename Client = typename Wrapper::Client, size_t Number = N>
|
||||||
|
std::enable_if_t<!impl::IsNamedShared_v<Client> || Number!=0, typename T::type>
|
||||||
|
operator()(Context* x, ArgType& args)
|
||||||
|
{
|
||||||
|
// Just return default if there's nothing left to grab
|
||||||
|
if (args.remain() == 0)
|
||||||
|
{
|
||||||
|
std::cout << "WARNING: " << Wrapper::getName()
|
||||||
|
<< " received fewer parameters than expected\n";
|
||||||
|
return Wrapper::Client::getParameterDescriptors().template makeValue<N>();
|
||||||
|
}
|
||||||
|
|
||||||
|
ParamLiteralConvertor<T, argSize> a;
|
||||||
|
using LiteralType =
|
||||||
|
typename ParamLiteralConvertor<T, argSize>::LiteralType;
|
||||||
|
|
||||||
|
for (index i = 0; i < argSize; i++)
|
||||||
|
a[i] = static_cast<LiteralType>(
|
||||||
|
ParamReader<ArgType>::fromArgs(x, args, a[0], 0));
|
||||||
|
|
||||||
|
return a.value();
|
||||||
|
}
|
||||||
|
|
||||||
|
template<typename Context, typename Client = typename Wrapper::Client, size_t Number = N>
|
||||||
|
std::enable_if_t<impl::IsNamedShared_v<Client> && Number==0, typename T::type>
|
||||||
|
operator()(Context* x, ArgType& args)
|
||||||
|
{
|
||||||
|
// Just return default if there's nothing left to grab
|
||||||
|
if (args.remain() == 0)
|
||||||
|
{
|
||||||
|
std::cout << "WARNING: " << Wrapper::getName()
|
||||||
|
<< " received fewer parameters than expected\n";
|
||||||
|
return Wrapper::Client::getParameterDescriptors().template makeValue<N>();
|
||||||
|
}
|
||||||
|
|
||||||
|
index id = ParamReader<ArgType>::fromArgs(x,args,index{},0);
|
||||||
|
return std::to_string(id);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
template <typename ArgType, size_t N, typename T>
|
||||||
|
struct Getter
|
||||||
|
{
|
||||||
|
static constexpr index argSize =
|
||||||
|
Wrapper::Client::getParameterDescriptors().template get<N>().fixedSize;
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,200 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
namespace fluid {
|
||||||
|
namespace client {
|
||||||
|
|
||||||
|
struct ToFloatArray
|
||||||
|
{
|
||||||
|
static index allocSize(typename BufferT::type) { return 1; }
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<
|
||||||
|
std::is_integral<T>::value || std::is_floating_point<T>::value, index>
|
||||||
|
allocSize(T)
|
||||||
|
{
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
static index allocSize(std::string s)
|
||||||
|
{
|
||||||
|
return asSigned(s.size()) + 1;
|
||||||
|
} // put null char at end when we send
|
||||||
|
|
||||||
|
static index allocSize(FluidTensor<std::string, 1> s)
|
||||||
|
{
|
||||||
|
index count = 0;
|
||||||
|
for (auto& str : s) count += (str.size() + 1);
|
||||||
|
return count;
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static index allocSize(FluidTensor<T, 1> s)
|
||||||
|
{
|
||||||
|
return s.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename... Ts>
|
||||||
|
static std::tuple<std::array<index, sizeof...(Ts)>, index>
|
||||||
|
allocSize(std::tuple<Ts...>&& t)
|
||||||
|
{
|
||||||
|
return allocSizeImpl(std::forward<decltype(t)>(t),
|
||||||
|
std::index_sequence_for<Ts...>());
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename... Ts, size_t... Is>
|
||||||
|
static std::tuple<std::array<index, sizeof...(Ts)>, index>
|
||||||
|
allocSizeImpl(std::tuple<Ts...>&& t, std::index_sequence<Is...>)
|
||||||
|
{
|
||||||
|
index size{0};
|
||||||
|
std::array<index, sizeof...(Ts)> res;
|
||||||
|
(void) std::initializer_list<int>{
|
||||||
|
(res[Is] = size, size += ToFloatArray::allocSize(std::get<Is>(t)),
|
||||||
|
0)...};
|
||||||
|
return std::make_tuple(res,
|
||||||
|
size); // array of offsets into allocated buffer &
|
||||||
|
// total number of floats to alloc
|
||||||
|
}
|
||||||
|
|
||||||
|
static void convert(float* f, typename BufferT::type buf)
|
||||||
|
{
|
||||||
|
f[0] = static_cast<float>(static_cast<SCBufferAdaptor*>(buf.get())->bufnum());
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<std::is_integral<T>::value ||
|
||||||
|
std::is_floating_point<T>::value>
|
||||||
|
convert(float* f, T x)
|
||||||
|
{
|
||||||
|
f[0] = static_cast<float>(x);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void convert(float* f, std::string s)
|
||||||
|
{
|
||||||
|
std::copy(s.begin(), s.end(), f);
|
||||||
|
f[s.size()] = 0; // terminate
|
||||||
|
}
|
||||||
|
static void convert(float* f, FluidTensor<std::string, 1> s)
|
||||||
|
{
|
||||||
|
for (auto& str : s)
|
||||||
|
{
|
||||||
|
std::copy(str.begin(), str.end(), f);
|
||||||
|
f += str.size();
|
||||||
|
*f++ = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
template <typename T>
|
||||||
|
static void convert(float* f, FluidTensor<T, 1> s)
|
||||||
|
{
|
||||||
|
static_assert(std::is_convertible<T, float>::value,
|
||||||
|
"Can't convert this to float output");
|
||||||
|
std::copy(s.begin(), s.end(), f);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename... Ts, size_t... Is>
|
||||||
|
static void convert(float* f, std::tuple<Ts...>&& t,
|
||||||
|
std::array<index, sizeof...(Ts)> offsets,
|
||||||
|
std::index_sequence<Is...>)
|
||||||
|
{
|
||||||
|
(void) std::initializer_list<int>{
|
||||||
|
(convert(f + offsets[Is], std::get<Is>(t)), 0)...};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
template<typename Packet>
|
||||||
|
struct ToOSCTypes
|
||||||
|
{
|
||||||
|
|
||||||
|
static index numTags(typename BufferT::type) { return 1; }
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<
|
||||||
|
std::is_integral<T>::value || std::is_floating_point<T>::value, index>
|
||||||
|
numTags(T)
|
||||||
|
{
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
static index numTags(std::string)
|
||||||
|
{
|
||||||
|
return 1;;
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static index numTags(FluidTensor<T, 1> s)
|
||||||
|
{
|
||||||
|
return s.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename... Ts>
|
||||||
|
static index numTags(std::tuple<Ts...>&&)
|
||||||
|
{
|
||||||
|
return std::tuple_size<std::tuple<Ts...>>::value;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void getTag(Packet& p, typename BufferT::type) { p.addtag('i'); }
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<std::is_integral<std::decay_t<T>>::value>
|
||||||
|
getTag(Packet& p, T&&) { p.addtag('i'); }
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<std::is_floating_point<std::decay_t<T>>::value>
|
||||||
|
getTag(Packet& p, T&&) { p.addtag('f'); }
|
||||||
|
|
||||||
|
static void getTag (Packet& p, std::string) { p.addtag('s'); }
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static void getTag(Packet& p, FluidTensor<T, 1> x)
|
||||||
|
{
|
||||||
|
T dummy{};
|
||||||
|
for (int i = 0; i < x.rows(); i++)
|
||||||
|
getTag(p, dummy);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename... Ts>
|
||||||
|
static void getTag(Packet& p, std::tuple<Ts...>&& t)
|
||||||
|
{
|
||||||
|
ForEach(t,[&p](auto& x){getTag(p,x);});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static void convert(Packet& p, typename BufferT::type buf)
|
||||||
|
{
|
||||||
|
p.addi(static_cast<int>(static_cast<SCBufferAdaptor*>(buf.get())->bufnum()));
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<std::is_integral<T>::value>
|
||||||
|
convert(Packet& p, T x)
|
||||||
|
{
|
||||||
|
p.addi(static_cast<int>(x));
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static std::enable_if_t<std::is_floating_point<T>::value>
|
||||||
|
convert(Packet& p, T x)
|
||||||
|
{
|
||||||
|
p.addf(static_cast<float>(x));
|
||||||
|
}
|
||||||
|
|
||||||
|
static void convert(Packet& p, std::string s)
|
||||||
|
{
|
||||||
|
p.adds(s.c_str());
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static void convert(Packet& p, FluidTensor<T, 1> s)
|
||||||
|
{
|
||||||
|
for(auto& x: s) convert(p,x);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename... Ts>
|
||||||
|
static void convert(Packet& p, std::tuple<Ts...>&& t)
|
||||||
|
{
|
||||||
|
ForEach(t,[&p](auto& x){ convert(p,x);});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,29 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "SCBufferAdaptor.hpp"
|
||||||
|
|
||||||
|
namespace fluid {
|
||||||
|
namespace client {
|
||||||
|
namespace impl {
|
||||||
|
|
||||||
|
template <size_t N, typename T>
|
||||||
|
struct AssignBuffer
|
||||||
|
{
|
||||||
|
void operator()(const typename BufferT::type& p, World* w)
|
||||||
|
{
|
||||||
|
if (auto b = static_cast<SCBufferAdaptor*>(p.get())) b->assignToRT(w);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
template <size_t N, typename T>
|
||||||
|
struct CleanUpBuffer
|
||||||
|
{
|
||||||
|
void operator()(const typename BufferT::type& p)
|
||||||
|
{
|
||||||
|
if (auto b = static_cast<SCBufferAdaptor*>(p.get())) b->cleanUp();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,51 @@
|
|||||||
|
|
||||||
|
#include "CopyReplyAddress.hpp"
|
||||||
|
#include <SC_Win32Utils.h>
|
||||||
|
#include <SC_ReplyImpl.hpp>
|
||||||
|
|
||||||
|
namespace fluid{
|
||||||
|
namespace client{
|
||||||
|
|
||||||
|
void* copyReplyAddress(InterfaceTable* ft, World* inWorld, void* inreply)
|
||||||
|
{
|
||||||
|
|
||||||
|
if(! inreply) return nullptr;
|
||||||
|
|
||||||
|
ReplyAddress* reply = (ReplyAddress*)ft->fRTAlloc(inWorld, sizeof(ReplyAddress));
|
||||||
|
|
||||||
|
*reply = *(static_cast<ReplyAddress*>(inreply));
|
||||||
|
|
||||||
|
return reply;
|
||||||
|
}
|
||||||
|
|
||||||
|
void deleteReplyAddress(InterfaceTable* ft, World* inWorld, void* inreply)
|
||||||
|
{
|
||||||
|
if(! inreply) return;
|
||||||
|
ft->fRTFree(inWorld,(ReplyAddress*)inreply);
|
||||||
|
}
|
||||||
|
|
||||||
|
void* copyReplyAddress(void* inreply)
|
||||||
|
{
|
||||||
|
|
||||||
|
if(! inreply) return nullptr;
|
||||||
|
|
||||||
|
ReplyAddress* reply = new ReplyAddress();
|
||||||
|
|
||||||
|
*reply = *(static_cast<ReplyAddress*>(inreply));
|
||||||
|
|
||||||
|
return reply;
|
||||||
|
}
|
||||||
|
|
||||||
|
void deleteReplyAddress(void* inreply)
|
||||||
|
{
|
||||||
|
if(! inreply) return;
|
||||||
|
delete (ReplyAddress*)inreply;
|
||||||
|
}
|
||||||
|
|
||||||
|
void SendReply(void* inReplyAddr, char* inBuf, int inSize) {
|
||||||
|
SendReply(static_cast<ReplyAddress*>(inReplyAddr),inBuf,inSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,16 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <SC_PlugIn.h>
|
||||||
|
|
||||||
|
namespace fluid{
|
||||||
|
namespace client{
|
||||||
|
|
||||||
|
void* copyReplyAddress(InterfaceTable* ft, World* inWorld, void* inreply);
|
||||||
|
void deleteReplyAddress(InterfaceTable* ft, World* inWorld, void* inreply);
|
||||||
|
void* copyReplyAddress(void* inreply);
|
||||||
|
void deleteReplyAddress(void* inreply);
|
||||||
|
void SendReply(void* inReplyAddr, char* inBuf, int inSize);
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,49 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "NonRealtime.hpp"
|
||||||
|
#include "Realtime.hpp"
|
||||||
|
|
||||||
|
namespace fluid {
|
||||||
|
namespace client {
|
||||||
|
|
||||||
|
template <typename Client> class FluidSCWrapper;
|
||||||
|
|
||||||
|
namespace impl {
|
||||||
|
|
||||||
|
template<bool UseRealTime> struct ChooseRTOrNRT;
|
||||||
|
|
||||||
|
template<>
|
||||||
|
struct ChooseRTOrNRT<false>
|
||||||
|
{
|
||||||
|
template<typename Client, typename Wrapper>
|
||||||
|
using type = NonRealTime<Client,Wrapper>;
|
||||||
|
};
|
||||||
|
|
||||||
|
template<>
|
||||||
|
struct ChooseRTOrNRT<true>
|
||||||
|
{
|
||||||
|
template<typename Client, typename Wrapper>
|
||||||
|
using type = RealTime<Client,Wrapper>;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
template <typename Client,typename Wrapper>
|
||||||
|
struct BaseChooser
|
||||||
|
{
|
||||||
|
using RT = typename Client::isRealTime;
|
||||||
|
|
||||||
|
static constexpr bool UseRealTime = RT::value && !IsModel_t<Client>::value;
|
||||||
|
|
||||||
|
using type = typename ChooseRTOrNRT<UseRealTime>::template type<Client,Wrapper>;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
template <typename Client,typename Wrapper>
|
||||||
|
using BaseChooser_t = typename BaseChooser<Client,Wrapper>::type;
|
||||||
|
|
||||||
|
|
||||||
|
template <typename Client>
|
||||||
|
using FluidSCWrapperBase = BaseChooser_t<Client,FluidSCWrapper<Client>>;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,273 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "ArgsFromClient.hpp"
|
||||||
|
#include "ArgsToClient.hpp"
|
||||||
|
#include "CopyReplyAddress.hpp"
|
||||||
|
#include <scsynthsend.h>
|
||||||
|
|
||||||
|
namespace fluid {
|
||||||
|
namespace client {
|
||||||
|
|
||||||
|
template<typename FluidSCWrapper, typename Client>
|
||||||
|
struct FluidSCMessaging{
|
||||||
|
|
||||||
|
static auto getInterfaceTable(){ return FluidSCWrapper::getInterfaceTable(); }
|
||||||
|
static auto getName(){ return FluidSCWrapper::getName(); }
|
||||||
|
|
||||||
|
|
||||||
|
template <size_t N>
|
||||||
|
struct MessageDispatchCmd
|
||||||
|
{
|
||||||
|
using Descriptor = typename Client::MessageSetType::template MessageDescriptorAt<N>;
|
||||||
|
using ArgTuple = typename Descriptor::ArgumentTypes;
|
||||||
|
using ReturnType = typename Descriptor::ReturnType;
|
||||||
|
using IndexList = typename Descriptor::IndexList;
|
||||||
|
|
||||||
|
static constexpr size_t Message = N;
|
||||||
|
index id;
|
||||||
|
ArgTuple args;
|
||||||
|
ReturnType result;
|
||||||
|
std::string name;
|
||||||
|
IndexList argIndices;
|
||||||
|
void* replyAddr{nullptr};
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
template <size_t N, typename T>
|
||||||
|
struct SetupMessageCmd
|
||||||
|
{
|
||||||
|
|
||||||
|
void operator()(const T& message)
|
||||||
|
{
|
||||||
|
static std::string messageName = std::string{getName()} + '/' + message.name;
|
||||||
|
auto ft = getInterfaceTable();
|
||||||
|
ft->fDefinePlugInCmd(messageName.c_str(), doMessage<N>,(void*)messageName.c_str());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
template <typename Message>
|
||||||
|
static bool validateMessageArgs(Message* msg, sc_msg_iter* inArgs)
|
||||||
|
{
|
||||||
|
using ArgTuple = decltype(msg->args);
|
||||||
|
|
||||||
|
std::string tags(inArgs->tags + inArgs->count);//evidently this needs commenting: construct string at pointer offset by tag count, to pick up args
|
||||||
|
bool willContinue = true;
|
||||||
|
bool typesMatch = true;
|
||||||
|
|
||||||
|
auto& args = msg->args;
|
||||||
|
|
||||||
|
constexpr size_t expectedArgCount = std::tuple_size<ArgTuple>::value;
|
||||||
|
|
||||||
|
/// TODO this squawks if we have a completion message, so maybe we can check if extra arg is a 'b' and squawk if not?
|
||||||
|
// if(tags.size() > expectedArgCount)
|
||||||
|
// {
|
||||||
|
// std::cout << "WARNING: " << msg->name << " received more arguments than expected (got "
|
||||||
|
// << tags.size() << ", expect " << expectedArgCount << ")\n";
|
||||||
|
// }
|
||||||
|
|
||||||
|
if(tags.size() < expectedArgCount)
|
||||||
|
{
|
||||||
|
std::cout << "ERROR: " << msg->name << " received fewer arguments than expected (got "
|
||||||
|
<< tags.size() << ", expect " << expectedArgCount << ")\n";
|
||||||
|
willContinue = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto tagsIter = tags.begin();
|
||||||
|
auto tagsEnd = tags.end();
|
||||||
|
ForEach(args,[&typesMatch,&tagsIter,&tagsEnd](auto& arg){
|
||||||
|
if(tagsIter == tagsEnd)
|
||||||
|
{
|
||||||
|
typesMatch = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
char t = *(tagsIter++);
|
||||||
|
typesMatch = typesMatch && ParamReader<sc_msg_iter>::argTypeOK(arg,t);
|
||||||
|
});
|
||||||
|
|
||||||
|
willContinue = willContinue && typesMatch;
|
||||||
|
|
||||||
|
if(!typesMatch)
|
||||||
|
{
|
||||||
|
auto& report = std::cout;
|
||||||
|
report << "ERROR: " << msg->name << " type signature incorrect.\nExpect: (";
|
||||||
|
size_t i{0};
|
||||||
|
ForEach(args, [&i](auto& x){
|
||||||
|
std::cout << ParamReader<sc_msg_iter>::argTypeToString(x);
|
||||||
|
if(i < (std::tuple_size<ArgTuple>::value - 1 ) )
|
||||||
|
{
|
||||||
|
std::cout << " ,";
|
||||||
|
}
|
||||||
|
i++;
|
||||||
|
});
|
||||||
|
report << ")\nReceived: (";
|
||||||
|
i = 0;
|
||||||
|
for(auto t: tags)
|
||||||
|
{
|
||||||
|
report << ParamReader<sc_msg_iter>::oscTagToString(t);
|
||||||
|
if( i < ( tags.size() - 1 ) )
|
||||||
|
{
|
||||||
|
report << ", ";
|
||||||
|
}
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
report << ")\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
return willContinue;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
template<size_t N>
|
||||||
|
static void doMessage(World* inWorld, void* inUserData, struct sc_msg_iter* args, void* replyAddr)
|
||||||
|
{
|
||||||
|
using MessageData = MessageDispatchCmd<N>;
|
||||||
|
|
||||||
|
auto msg = new MessageData();
|
||||||
|
|
||||||
|
msg->id = args->geti();
|
||||||
|
msg->replyAddr = copyReplyAddress(replyAddr);
|
||||||
|
///TODO make this step contingent on verbosity or something, in the name of effieciency
|
||||||
|
bool willContinue = validateMessageArgs(msg, args);
|
||||||
|
|
||||||
|
if(!willContinue)
|
||||||
|
{
|
||||||
|
delete msg;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
msg->name = std::string{'/'} + (const char*)(inUserData);
|
||||||
|
|
||||||
|
ForEach(msg-> args,[inWorld,&args](auto& thisarg)
|
||||||
|
{
|
||||||
|
thisarg = ParamReader<sc_msg_iter>::fromArgs(inWorld, *args,thisarg,0);
|
||||||
|
});
|
||||||
|
|
||||||
|
size_t completionMsgSize{args ? args->getbsize() : 0};
|
||||||
|
assert(completionMsgSize <= std::numeric_limits<int>::max());
|
||||||
|
char* completionMsgData = nullptr;
|
||||||
|
|
||||||
|
if (completionMsgSize) {
|
||||||
|
completionMsgData = (char*)getInterfaceTable()->fRTAlloc(inWorld, completionMsgSize);
|
||||||
|
args->getb(completionMsgData, completionMsgSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
auto ft = getInterfaceTable();
|
||||||
|
|
||||||
|
ft->fDoAsynchronousCommand(inWorld, replyAddr, getName(), msg,
|
||||||
|
[](World* world, void* data) // NRT thread: invocation
|
||||||
|
{
|
||||||
|
MessageData* m = static_cast<MessageData*>(data);
|
||||||
|
using ReturnType = typename MessageData::ReturnType;
|
||||||
|
|
||||||
|
if(auto ptr = FluidSCWrapper::get(m->id).lock())
|
||||||
|
{
|
||||||
|
m->result =
|
||||||
|
ReturnType{invokeImpl<N>(ptr->mClient, m->args,m->argIndices)};
|
||||||
|
|
||||||
|
if (!m->result.ok())
|
||||||
|
FluidSCWrapper::printResult(world, m->result);
|
||||||
|
} else FluidSCWrapper::printNotFound(m->id);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
[](World* world, void* data) // RT thread: buffer swap (and possible completion messages)
|
||||||
|
{
|
||||||
|
MessageData* m = static_cast<MessageData*>(data);
|
||||||
|
MessageData::Descriptor::template forEachArg<typename BufferT::type,
|
||||||
|
impl::AssignBuffer>(m->args,
|
||||||
|
world);
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
[](World*, void* data) // NRT Thread: Send reply
|
||||||
|
{
|
||||||
|
MessageData* m = static_cast<MessageData*>(data);
|
||||||
|
if(m->result.status() != Result::Status::kError)
|
||||||
|
messageOutput(m->name, m->id, m->result, m->replyAddr);
|
||||||
|
return false;
|
||||||
|
},
|
||||||
|
[](World*, void* data) // RT thread: clean up
|
||||||
|
{
|
||||||
|
MessageData* m = static_cast<MessageData*>(data);
|
||||||
|
delete m;
|
||||||
|
},
|
||||||
|
static_cast<int>(completionMsgSize), completionMsgData);
|
||||||
|
|
||||||
|
if(completionMsgSize) ft->fRTFree(inWorld, completionMsgData);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
template <size_t N, typename ArgsTuple, size_t... Is> // Call from NRT
|
||||||
|
static decltype(auto) invokeImpl(Client& x, ArgsTuple& args,
|
||||||
|
std::index_sequence<Is...>)
|
||||||
|
{
|
||||||
|
return x.template invoke<N>(x, std::get<Is>(args)...);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T> // call from RT
|
||||||
|
static void messageOutput(const std::string& s, index id, MessageResult<T>& result, void* replyAddr)
|
||||||
|
{
|
||||||
|
index numTags = ToOSCTypes<small_scpacket>::numTags(static_cast<T>(result));
|
||||||
|
if(numTags > 2048)
|
||||||
|
{
|
||||||
|
std::cout << "ERROR: Message response too big to send (" << asUnsigned(numTags) * sizeof(float) << " bytes)." << std::endl;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
small_scpacket packet;
|
||||||
|
packet.adds(s.c_str());
|
||||||
|
packet.maketags(static_cast<int>(numTags) + 2);
|
||||||
|
packet.addtag(',');
|
||||||
|
packet.addtag('i');
|
||||||
|
ToOSCTypes<small_scpacket>::getTag(packet, static_cast<T>(result));
|
||||||
|
|
||||||
|
packet.addi(static_cast<int>(id));
|
||||||
|
ToOSCTypes<small_scpacket>::convert(packet, static_cast<T>(result));
|
||||||
|
|
||||||
|
if(replyAddr)
|
||||||
|
SendReply(replyAddr,packet.data(),static_cast<int>(packet.size()));
|
||||||
|
}
|
||||||
|
|
||||||
|
static void messageOutput(const std::string& s,index id, MessageResult<void>&, void* replyAddr)
|
||||||
|
{
|
||||||
|
small_scpacket packet;
|
||||||
|
packet.adds(s.c_str());
|
||||||
|
packet.maketags(2);
|
||||||
|
packet.addtag(',');
|
||||||
|
packet.addtag('i');
|
||||||
|
packet.addi(static_cast<int>(id));
|
||||||
|
|
||||||
|
if(replyAddr)
|
||||||
|
SendReply(replyAddr,packet.data(),static_cast<int>(packet.size()));
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename... Ts>
|
||||||
|
static void messageOutput(const std::string& s, index id, MessageResult<std::tuple<Ts...>>& result, void* replyAddr)
|
||||||
|
{
|
||||||
|
using T = std::tuple<Ts...>;
|
||||||
|
|
||||||
|
index numTags = ToOSCTypes<small_scpacket>::numTags(static_cast<T>(result));
|
||||||
|
if(numTags > 2048)
|
||||||
|
{
|
||||||
|
std::cout << "ERROR: Message response too big to send (" << asUnsigned(numTags) * sizeof(float) << " bytes)." << std::endl;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
small_scpacket packet;
|
||||||
|
packet.adds(s.c_str());
|
||||||
|
packet.maketags(static_cast<int>(numTags + 3));
|
||||||
|
packet.addtag(',');
|
||||||
|
packet.addtag('i');
|
||||||
|
ToOSCTypes<small_scpacket>::getTag(packet,static_cast<T>(result));
|
||||||
|
|
||||||
|
packet.addi(static_cast<int>(id));
|
||||||
|
ToOSCTypes<small_scpacket>::convert(packet, static_cast<T>(result));
|
||||||
|
|
||||||
|
if(replyAddr)
|
||||||
|
SendReply(replyAddr,packet.data(),static_cast<int>(packet.size()));
|
||||||
|
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,59 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <clients/nrt/FluidSharedInstanceAdaptor.hpp>
|
||||||
|
#include <clients/common/FluidNRTClientWrapper.hpp>
|
||||||
|
#include <clients/common/SharedClientUtils.hpp>
|
||||||
|
|
||||||
|
namespace fluid {
|
||||||
|
namespace client {
|
||||||
|
namespace impl {
|
||||||
|
/// Named, shared clients already have a lookup table in their adaptor class
|
||||||
|
template <typename T>
|
||||||
|
struct IsNamedShared
|
||||||
|
{
|
||||||
|
using type = std::false_type;
|
||||||
|
};
|
||||||
|
|
||||||
|
//TODO: make less tied to current implementation
|
||||||
|
template <typename T>
|
||||||
|
struct IsNamedShared<NRTThreadingAdaptor<NRTSharedInstanceAdaptor<T>>>
|
||||||
|
{
|
||||||
|
using type = std::true_type;
|
||||||
|
};
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
using IsNamedShared_t = typename IsNamedShared<T>::type;
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
constexpr bool IsNamedShared_v = IsNamedShared_t<T>::value;
|
||||||
|
|
||||||
|
/// Models don't, but still need to survive CMD-.
|
||||||
|
template<typename T>
|
||||||
|
struct IsModel
|
||||||
|
{
|
||||||
|
using type = std::false_type;
|
||||||
|
};
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
struct IsModel<NRTThreadingAdaptor<ClientWrapper<T>>>
|
||||||
|
{
|
||||||
|
using type = typename ClientWrapper<T>::isModelObject;
|
||||||
|
};
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
struct IsModel<ClientWrapper<T>>
|
||||||
|
{
|
||||||
|
using type = typename ClientWrapper<T>::isModelObject;
|
||||||
|
};
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
using IsModel_t = typename IsModel<T>::type;
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
constexpr bool IsModel_v = IsModel_t<T>::value;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,993 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "BufferFuncs.hpp"
|
||||||
|
#include "CopyReplyAddress.hpp"
|
||||||
|
#include "Messaging.hpp"
|
||||||
|
#include "Meta.hpp"
|
||||||
|
#include "RealTimeBase.hpp"
|
||||||
|
#include "SCBufferAdaptor.hpp"
|
||||||
|
#include <clients/common/FluidBaseClient.hpp>
|
||||||
|
#include <data/FluidMeta.hpp>
|
||||||
|
#include <SC_PlugIn.hpp>
|
||||||
|
#include <scsynthsend.h>
|
||||||
|
#include <unordered_map>
|
||||||
|
|
||||||
|
namespace fluid {
|
||||||
|
namespace client {
|
||||||
|
namespace impl {
|
||||||
|
|
||||||
|
/// Non Real Time Processor
|
||||||
|
|
||||||
|
template <typename Client, typename Wrapper>
|
||||||
|
class NonRealTime : public SCUnit
|
||||||
|
{
|
||||||
|
using Params = typename Client::ParamSetType;
|
||||||
|
|
||||||
|
template<typename T,typename...Args>
|
||||||
|
static T* rtalloc(World* world,Args&&...args)
|
||||||
|
{
|
||||||
|
void* space = getInterfaceTable()->fRTAlloc(world, sizeof(T));
|
||||||
|
return new (space) T{std::forward<Args>(args)...};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Instance cache
|
||||||
|
struct CacheEntry
|
||||||
|
{
|
||||||
|
|
||||||
|
CacheEntry(const Params& p):mParams{p},mClient{mParams}
|
||||||
|
{}
|
||||||
|
|
||||||
|
Params mParams;
|
||||||
|
Client mClient;
|
||||||
|
bool mDone{false};
|
||||||
|
};
|
||||||
|
|
||||||
|
using CacheEntryPointer = std::shared_ptr<CacheEntry>;
|
||||||
|
using WeakCacheEntryPointer = std::weak_ptr<CacheEntry>; //could use weak_type in 17
|
||||||
|
|
||||||
|
public:
|
||||||
|
using Cache = std::unordered_map<index,CacheEntryPointer>;
|
||||||
|
static Cache mCache;
|
||||||
|
private:
|
||||||
|
static bool isNull(WeakCacheEntryPointer const& weak) {
|
||||||
|
return !weak.owner_before(WeakCacheEntryPointer{}) && !WeakCacheEntryPointer{}.owner_before(weak);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public:
|
||||||
|
static WeakCacheEntryPointer get(index id)
|
||||||
|
{
|
||||||
|
auto lookup = mCache.find(id);
|
||||||
|
return lookup == mCache.end() ? WeakCacheEntryPointer() : lookup->second;
|
||||||
|
}
|
||||||
|
|
||||||
|
static WeakCacheEntryPointer add(index id, const Params& params)
|
||||||
|
{
|
||||||
|
if(isNull(get(id)))
|
||||||
|
{
|
||||||
|
auto result = mCache.emplace(id,
|
||||||
|
std::make_shared<CacheEntry>(params));
|
||||||
|
|
||||||
|
return result.second ? (result.first)->second : WeakCacheEntryPointer(); //sob
|
||||||
|
}
|
||||||
|
else //client has screwed up
|
||||||
|
{
|
||||||
|
std::cout << "ERROR: " << Wrapper::getName() << " ID " << id << " already in use\n";
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void remove(index id)
|
||||||
|
{
|
||||||
|
mCache.erase(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void printNotFound(index id)
|
||||||
|
{
|
||||||
|
std::cout << "ERROR: " << Wrapper::getName() << " no instance with ID " << id << std::endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
static InterfaceTable* getInterfaceTable() { return Wrapper::getInterfaceTable() ;}
|
||||||
|
|
||||||
|
template <size_t N, typename T>
|
||||||
|
using ParamsFromOSC = typename ClientParams<Wrapper>::template Setter<sc_msg_iter, N, T>;
|
||||||
|
|
||||||
|
template <size_t N, typename T>
|
||||||
|
using ParamsFromSynth = typename ClientParams<Wrapper>::template Setter<impl::FloatControlsIter, N, T>;
|
||||||
|
|
||||||
|
|
||||||
|
struct NRTCommand
|
||||||
|
{
|
||||||
|
NRTCommand(World*, sc_msg_iter* args, void* replyAddr, bool consumeID = true)
|
||||||
|
{
|
||||||
|
auto count = args->count;
|
||||||
|
auto pos = args->rdpos;
|
||||||
|
|
||||||
|
mID = args->geti();
|
||||||
|
|
||||||
|
if(!consumeID)
|
||||||
|
{
|
||||||
|
args->count = count;
|
||||||
|
args->rdpos = pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(replyAddr)
|
||||||
|
mReplyAddress = copyReplyAddress(replyAddr);
|
||||||
|
}
|
||||||
|
|
||||||
|
~NRTCommand()
|
||||||
|
{
|
||||||
|
if(mReplyAddress) deleteReplyAddress(mReplyAddress);
|
||||||
|
}
|
||||||
|
|
||||||
|
NRTCommand(){}
|
||||||
|
|
||||||
|
explicit NRTCommand(index id):mID{id}{}
|
||||||
|
|
||||||
|
bool stage2(World*) { return true; } //nrt
|
||||||
|
bool stage3(World*) { return true; } //rt
|
||||||
|
bool stage4(World*) { return false; } //nrt
|
||||||
|
void cleanup(World*) {} //rt
|
||||||
|
|
||||||
|
void sendReply(const char* name,bool success)
|
||||||
|
{
|
||||||
|
if(mReplyAddress)
|
||||||
|
{
|
||||||
|
std::string slash{"/"};
|
||||||
|
small_scpacket packet;
|
||||||
|
packet.adds((slash+name).c_str());
|
||||||
|
packet.maketags(3);
|
||||||
|
packet.addtag(',');
|
||||||
|
packet.addtag('i');
|
||||||
|
packet.addtag('i');
|
||||||
|
packet.addi(success);
|
||||||
|
packet.addi(static_cast<int>(mID));
|
||||||
|
|
||||||
|
SendReply(mReplyAddress,packet.data(), static_cast<int>(packet.size()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// protected:
|
||||||
|
index mID;
|
||||||
|
void* mReplyAddress{nullptr};
|
||||||
|
};
|
||||||
|
|
||||||
|
struct CommandNew : public NRTCommand
|
||||||
|
{
|
||||||
|
CommandNew(World* world, sc_msg_iter* args,void* replyAddr)
|
||||||
|
: NRTCommand{world,args, replyAddr, !IsNamedShared_v<Client>},
|
||||||
|
mParams{Client::getParameterDescriptors()}
|
||||||
|
{
|
||||||
|
mParams.template setParameterValuesRT<ParamsFromOSC>(nullptr, world, *args);
|
||||||
|
}
|
||||||
|
|
||||||
|
CommandNew(index id, World*, FloatControlsIter& args, Unit* x)
|
||||||
|
:NRTCommand{id},
|
||||||
|
mParams{Client::getParameterDescriptors()}
|
||||||
|
{
|
||||||
|
mParams.template setParameterValuesRT<ParamsFromSynth>(nullptr, x, args);
|
||||||
|
}
|
||||||
|
|
||||||
|
static const char* name()
|
||||||
|
{
|
||||||
|
static std::string cmd = std::string(Wrapper::getName()) + "/new";
|
||||||
|
return cmd.c_str();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool stage2(World* w)
|
||||||
|
{
|
||||||
|
// auto entry = ;
|
||||||
|
|
||||||
|
|
||||||
|
Result constraintsRes = validateParameters(mParams);
|
||||||
|
|
||||||
|
if(!constraintsRes.ok()) Wrapper::printResult(w,constraintsRes);
|
||||||
|
|
||||||
|
mResult = (!isNull(add(NRTCommand::mID, mParams)));
|
||||||
|
|
||||||
|
//Sigh. The cache entry above has both the client instance and main params instance.
|
||||||
|
// The client is linked to the params by reference; I've not got the in-place constrction
|
||||||
|
// working properly so that params are in their final resting place by the time we make the client
|
||||||
|
// so (for) now we need to manually repoint the client to the correct place. Or badness.
|
||||||
|
if(mResult)
|
||||||
|
{
|
||||||
|
auto ptr = get(NRTCommand::mID).lock();
|
||||||
|
ptr->mClient.setParams(ptr->mParams);
|
||||||
|
}
|
||||||
|
|
||||||
|
NRTCommand::sendReply(name(),mResult);
|
||||||
|
|
||||||
|
return mResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
bool mResult;
|
||||||
|
Params mParams;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct CommandFree: public NRTCommand
|
||||||
|
{
|
||||||
|
using NRTCommand::NRTCommand;
|
||||||
|
|
||||||
|
void cancelCheck(std::false_type, index id)
|
||||||
|
{
|
||||||
|
if(auto ptr = get(id).lock())
|
||||||
|
{
|
||||||
|
auto& client = ptr->mClient;
|
||||||
|
if(!client.synchronous() && client.state() == ProcessState::kProcessing)
|
||||||
|
std::cout << Wrapper::getName()
|
||||||
|
<< ": Processing cancelled"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void cancelCheck(std::true_type, index){}
|
||||||
|
|
||||||
|
|
||||||
|
static const char* name()
|
||||||
|
{
|
||||||
|
static std::string cmd = std::string(Wrapper::getName()) + "/free";
|
||||||
|
return cmd.c_str();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool stage2(World*)
|
||||||
|
{
|
||||||
|
cancelCheck(IsRTQueryModel_t(),NRTCommand::mID);
|
||||||
|
remove(NRTCommand::mID);
|
||||||
|
NRTCommand::sendReply(name(), true);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/// Not registered as a PlugInCmd. Triggered by worker thread callback
|
||||||
|
struct CommandAsyncComplete: public NRTCommand
|
||||||
|
{
|
||||||
|
CommandAsyncComplete(World*, index id, void* replyAddress)
|
||||||
|
{
|
||||||
|
NRTCommand::mID = id;
|
||||||
|
NRTCommand::mReplyAddress = replyAddress;
|
||||||
|
}
|
||||||
|
|
||||||
|
static const char* name() { return CommandProcess::name(); }
|
||||||
|
|
||||||
|
bool stage2(World* world)
|
||||||
|
{
|
||||||
|
|
||||||
|
// std::cout << "In Async completion\n";
|
||||||
|
if(auto ptr = get(NRTCommand::mID).lock())
|
||||||
|
{
|
||||||
|
Result r;
|
||||||
|
auto& client = ptr->mClient;
|
||||||
|
ProcessState s = client.checkProgress(r);
|
||||||
|
if (s == ProcessState::kDone || s == ProcessState::kDoneStillProcessing)
|
||||||
|
{
|
||||||
|
if (r.status() == Result::Status::kCancelled)
|
||||||
|
{
|
||||||
|
std::cout << Wrapper::getName()
|
||||||
|
<< ": Processing cancelled"
|
||||||
|
<< std::endl;
|
||||||
|
ptr->mDone = true;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
client.checkProgress(r);
|
||||||
|
mSuccess = !(r.status() == Result::Status::kError);
|
||||||
|
if (!r.ok())
|
||||||
|
{
|
||||||
|
Wrapper::printResult(world,r);
|
||||||
|
if(r.status() == Result::Status::kError)
|
||||||
|
{
|
||||||
|
ptr->mDone = true;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool stage3(World* world)
|
||||||
|
{
|
||||||
|
if(auto ptr = get(NRTCommand::mID).lock())
|
||||||
|
{
|
||||||
|
auto& params = ptr->mParams;
|
||||||
|
params.template forEachParamType<BufferT, AssignBuffer>(world);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool stage4(World*) //nrt
|
||||||
|
{
|
||||||
|
if(auto ptr = get(NRTCommand::mID).lock())
|
||||||
|
{
|
||||||
|
ptr->mParams.template forEachParamType<BufferT, impl::CleanUpBuffer>();
|
||||||
|
|
||||||
|
if(NRTCommand::mID >= 0 && NRTCommand::mReplyAddress)
|
||||||
|
{
|
||||||
|
NRTCommand::sendReply(name(),mSuccess);
|
||||||
|
}
|
||||||
|
ptr->mDone = true;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool mSuccess;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
static void doProcessCallback(World* world, index id,size_t completionMsgSize,char* completionMessage,void* replyAddress)
|
||||||
|
{
|
||||||
|
auto ft = getInterfaceTable();
|
||||||
|
struct Context{
|
||||||
|
World* mWorld;
|
||||||
|
index mID;
|
||||||
|
size_t mCompletionMsgSize;
|
||||||
|
char* mCompletionMessage;
|
||||||
|
void* mReplyAddress;
|
||||||
|
};
|
||||||
|
|
||||||
|
Context* c = new Context{world,id,completionMsgSize,completionMessage,replyAddress};
|
||||||
|
|
||||||
|
auto launchCompletionFromNRT = [](FifoMsg* inmsg)
|
||||||
|
{
|
||||||
|
auto runCompletion = [](FifoMsg* msg){
|
||||||
|
// std::cout << "In FIFOMsg\n";
|
||||||
|
Context* c = static_cast<Context*>(msg->mData);
|
||||||
|
World* world = c->mWorld;
|
||||||
|
index id = c->mID;
|
||||||
|
auto ft = getInterfaceTable();
|
||||||
|
void* space = ft->fRTAlloc(world,sizeof(CommandAsyncComplete));
|
||||||
|
CommandAsyncComplete* cmd = new (space) CommandAsyncComplete(world, id,c->mReplyAddress);
|
||||||
|
runAsyncCommand(world, cmd, c->mReplyAddress, c->mCompletionMsgSize, c->mCompletionMessage);
|
||||||
|
if(c->mCompletionMsgSize) ft->fRTFree(world,c->mCompletionMessage);
|
||||||
|
};
|
||||||
|
|
||||||
|
auto tidyup = [](FifoMsg* msg)
|
||||||
|
{
|
||||||
|
Context* c = static_cast<Context*>(msg->mData);
|
||||||
|
delete c;
|
||||||
|
};
|
||||||
|
|
||||||
|
auto ft = getInterfaceTable();
|
||||||
|
FifoMsg fwd = *inmsg;
|
||||||
|
fwd.Set(inmsg->mWorld, runCompletion, tidyup, inmsg->mData);
|
||||||
|
if(inmsg->mWorld->mRunning)
|
||||||
|
ft->fSendMsgToRT(inmsg->mWorld,fwd);
|
||||||
|
};
|
||||||
|
|
||||||
|
FifoMsg msg;
|
||||||
|
msg.Set(world, launchCompletionFromNRT, nullptr, c);
|
||||||
|
|
||||||
|
if(world->mRunning) ft->fSendMsgFromRT(world,msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
struct CommandProcess: public NRTCommand
|
||||||
|
{
|
||||||
|
CommandProcess(World* world, sc_msg_iter* args, void* replyAddr): NRTCommand{world, args, replyAddr},mParams{Client::getParameterDescriptors()}
|
||||||
|
{
|
||||||
|
auto& ar = *args;
|
||||||
|
|
||||||
|
if(auto ptr = get(NRTCommand::mID).lock())
|
||||||
|
{
|
||||||
|
ptr->mDone = false;
|
||||||
|
mParams.template setParameterValuesRT<ParamsFromOSC>(nullptr, world, ar);
|
||||||
|
mSynchronous = static_cast<bool>(ar.geti());
|
||||||
|
} //if this fails, we'll hear about it in stage2 anyway
|
||||||
|
}
|
||||||
|
|
||||||
|
explicit CommandProcess(index id,bool synchronous,Params* params):NRTCommand{id},mSynchronous(synchronous),
|
||||||
|
mParams{Client::getParameterDescriptors()}
|
||||||
|
{
|
||||||
|
if(params)
|
||||||
|
{
|
||||||
|
mParams = *params;
|
||||||
|
mOverwriteParams = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static const char* name()
|
||||||
|
{
|
||||||
|
static std::string cmd = std::string(Wrapper::getName()) + "/process";
|
||||||
|
return cmd.c_str();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool stage2(World* world)
|
||||||
|
{
|
||||||
|
if(auto ptr = get(NRTCommand::mID).lock())
|
||||||
|
{
|
||||||
|
|
||||||
|
auto& params = ptr->mParams;
|
||||||
|
if(mOverwriteParams) params = mParams;
|
||||||
|
auto& client = ptr->mClient;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// if(mOSCData)
|
||||||
|
// {
|
||||||
|
// params.template setParameterValuesRT<ParamsFromOSC>(nullptr, world, *mOSCData);
|
||||||
|
// mSynchronous = static_cast<bool>(mOSCData->geti());
|
||||||
|
// }
|
||||||
|
|
||||||
|
Result result = validateParameters(params);
|
||||||
|
Wrapper::printResult(world, result);
|
||||||
|
if (result.status() != Result::Status::kError)
|
||||||
|
{
|
||||||
|
// client.done()
|
||||||
|
client.setSynchronous(mSynchronous);
|
||||||
|
index id = NRTCommand::mID;
|
||||||
|
size_t completionMsgSize = mCompletionMsgSize;
|
||||||
|
char* completionMessage = mCompletionMessage;
|
||||||
|
void* replyAddress = copyReplyAddress(NRTCommand::mReplyAddress);
|
||||||
|
|
||||||
|
auto callback = [world,id,completionMsgSize,completionMessage,replyAddress](){
|
||||||
|
doProcessCallback(world,id,completionMsgSize,completionMessage,replyAddress);
|
||||||
|
};
|
||||||
|
|
||||||
|
result = mSynchronous ? client.enqueue(params) : client.enqueue(params,callback);
|
||||||
|
Wrapper::printResult(world, result);
|
||||||
|
|
||||||
|
if(result.ok())
|
||||||
|
{
|
||||||
|
ptr->mDone = false;
|
||||||
|
mResult = client.process();
|
||||||
|
Wrapper::printResult(world,mResult);
|
||||||
|
|
||||||
|
bool error =mResult.status() == Result::Status::kError;
|
||||||
|
|
||||||
|
if(error) ptr->mDone = true;
|
||||||
|
return mSynchronous && !error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
mResult = Result{Result::Status::kError, "No ", Wrapper::getName(), " with ID ", NRTCommand::mID};
|
||||||
|
Wrapper::printResult(world,mResult);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Only for blocking execution
|
||||||
|
bool stage3(World* world) //rt
|
||||||
|
{
|
||||||
|
if(auto ptr = get(NRTCommand::mID).lock())
|
||||||
|
{
|
||||||
|
ptr->mParams.template forEachParamType<BufferT, AssignBuffer>(world);
|
||||||
|
// NRTCommand::sendReply(world, name(), mResult.ok());
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
// std::cout << "Ohno\n";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Only for blocking execution
|
||||||
|
bool stage4(World*) //nrt
|
||||||
|
{
|
||||||
|
if(auto ptr = get(NRTCommand::mID).lock())
|
||||||
|
{
|
||||||
|
ptr->mParams.template forEachParamType<BufferT, impl::CleanUpBuffer>();
|
||||||
|
|
||||||
|
if(NRTCommand::mID >= 0 && mSynchronous)
|
||||||
|
NRTCommand::sendReply(name(), mResult.ok());
|
||||||
|
ptr->mDone = true;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool synchronous()
|
||||||
|
{
|
||||||
|
return mSynchronous;
|
||||||
|
}
|
||||||
|
|
||||||
|
void addCompletionMessage(size_t size, char* message)//, void* addr)
|
||||||
|
{
|
||||||
|
mCompletionMsgSize = size;
|
||||||
|
mCompletionMessage = message;
|
||||||
|
}
|
||||||
|
|
||||||
|
// private:
|
||||||
|
Result mResult;
|
||||||
|
bool mSynchronous;
|
||||||
|
size_t mCompletionMsgSize{0};
|
||||||
|
char* mCompletionMessage{nullptr};
|
||||||
|
Params mParams;
|
||||||
|
bool mOverwriteParams{false};
|
||||||
|
};
|
||||||
|
|
||||||
|
struct CommandProcessNew: public NRTCommand
|
||||||
|
{
|
||||||
|
CommandProcessNew(World* world, sc_msg_iter* args,void* replyAddr)
|
||||||
|
: mNew{world, args, replyAddr},
|
||||||
|
mProcess{mNew.mID,false,nullptr}
|
||||||
|
{
|
||||||
|
mProcess.mSynchronous = args->geti();
|
||||||
|
mProcess.mReplyAddress = mNew.mReplyAddress;
|
||||||
|
}
|
||||||
|
|
||||||
|
CommandProcessNew(index id, World* world, FloatControlsIter& args, Unit* x)
|
||||||
|
: mNew{id, world, args, x},
|
||||||
|
mProcess{id}
|
||||||
|
{}
|
||||||
|
|
||||||
|
static const char* name()
|
||||||
|
{
|
||||||
|
static std::string cmd = std::string(Wrapper::getName()) + "/processNew";
|
||||||
|
return cmd.c_str();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool stage2(World* world)
|
||||||
|
{
|
||||||
|
return mNew.stage2(world) ? mProcess.stage2(world) : false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool stage3(World* world) //rt
|
||||||
|
{
|
||||||
|
return mProcess.stage3(world);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool stage4(World* world) //nrt
|
||||||
|
{
|
||||||
|
return mProcess.stage4(world);
|
||||||
|
}
|
||||||
|
|
||||||
|
void cleanup(World* world)
|
||||||
|
{
|
||||||
|
mProcess.mReplyAddress = nullptr;
|
||||||
|
mProcess.cleanup(world);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool synchronous()
|
||||||
|
{
|
||||||
|
return mProcess.synchronous();
|
||||||
|
}
|
||||||
|
|
||||||
|
void addCompletionMessage(size_t size, char* message)
|
||||||
|
{
|
||||||
|
mProcess.addCompletionMessage(size, message);
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
CommandNew mNew;
|
||||||
|
CommandProcess mProcess;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
struct CommandCancel: public NRTCommand
|
||||||
|
{
|
||||||
|
CommandCancel(World* world, sc_msg_iter* args, void* replyAddr)
|
||||||
|
: NRTCommand{world, args, replyAddr}
|
||||||
|
{}
|
||||||
|
|
||||||
|
static const char* name()
|
||||||
|
{
|
||||||
|
static std::string cmd = std::string(Wrapper::getName()) + "/cancel";
|
||||||
|
return cmd.c_str();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool stage2(World*)
|
||||||
|
{
|
||||||
|
if(auto ptr = get(NRTCommand::mID).lock())
|
||||||
|
{
|
||||||
|
auto& client = ptr->mClient;
|
||||||
|
if(!client.synchronous())
|
||||||
|
{
|
||||||
|
client.cancel();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct CommandSetParams: public NRTCommand
|
||||||
|
{
|
||||||
|
CommandSetParams(World* world, sc_msg_iter* args, void* replyAddr)
|
||||||
|
: NRTCommand{world, args, replyAddr}
|
||||||
|
{
|
||||||
|
auto& ar = *args;
|
||||||
|
if(auto ptr = get(NRTCommand::mID).lock())
|
||||||
|
{
|
||||||
|
ptr->mParams.template setParameterValuesRT<ParamsFromOSC>(nullptr, world, ar);
|
||||||
|
Result result = validateParameters(ptr->mParams);
|
||||||
|
ptr->mClient.setParams(ptr->mParams);
|
||||||
|
} else printNotFound(NRTCommand::mID);
|
||||||
|
}
|
||||||
|
|
||||||
|
static const char* name()
|
||||||
|
{
|
||||||
|
static std::string cmd = std::string(Wrapper::getName()) + "/setParams";
|
||||||
|
return cmd.c_str();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
template<typename Command>
|
||||||
|
static auto runAsyncCommand(World* world, Command* cmd, void* replyAddr,
|
||||||
|
size_t completionMsgSize, char* completionMsgData)
|
||||||
|
{
|
||||||
|
auto ft = getInterfaceTable();
|
||||||
|
|
||||||
|
return ft->fDoAsynchronousCommand(world, replyAddr,Command::name(),cmd,
|
||||||
|
[](World* w, void* d) { return static_cast<Command*>(d)->stage2(w); },
|
||||||
|
[](World* w, void* d) { return static_cast<Command*>(d)->stage3(w); },
|
||||||
|
[](World* w, void* d) { return static_cast<Command*>(d)->stage4(w); },
|
||||||
|
[](World* w, void* d)
|
||||||
|
{
|
||||||
|
auto cmd = static_cast<Command*>(d);
|
||||||
|
cmd->cleanup(w);
|
||||||
|
cmd->~Command();
|
||||||
|
getInterfaceTable()->fRTFree(w,d);
|
||||||
|
},
|
||||||
|
static_cast<int>(completionMsgSize), completionMsgData);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static auto runAsyncCommand(World* world, CommandProcess* cmd, void* replyAddr,
|
||||||
|
size_t completionMsgSize, char* completionMsgData)
|
||||||
|
{
|
||||||
|
if(!cmd->synchronous())
|
||||||
|
{
|
||||||
|
|
||||||
|
auto msgcopy = (char*)getInterfaceTable()->fRTAlloc(world,completionMsgSize);
|
||||||
|
memcpy(msgcopy, completionMsgData, completionMsgSize);
|
||||||
|
cmd->addCompletionMessage(completionMsgSize,msgcopy);
|
||||||
|
return runAsyncCommand<CommandProcess>(world, cmd, replyAddr, 0, nullptr);
|
||||||
|
}
|
||||||
|
else return runAsyncCommand<CommandProcess>(world, cmd, replyAddr, completionMsgSize, completionMsgData);
|
||||||
|
}
|
||||||
|
|
||||||
|
static auto runAsyncCommand(World* world, CommandProcessNew* cmd, void* replyAddr,
|
||||||
|
size_t completionMsgSize, char* completionMsgData)
|
||||||
|
{
|
||||||
|
if(!cmd->synchronous())
|
||||||
|
{
|
||||||
|
auto msgcopy = (char*)getInterfaceTable()->fRTAlloc(world,completionMsgSize);
|
||||||
|
memcpy(msgcopy, completionMsgData, completionMsgSize);
|
||||||
|
cmd->addCompletionMessage(completionMsgSize,msgcopy);
|
||||||
|
return runAsyncCommand<CommandProcessNew>(world, cmd, replyAddr, 0, nullptr);
|
||||||
|
}
|
||||||
|
else return runAsyncCommand<CommandProcessNew>(world, cmd, replyAddr, completionMsgSize, completionMsgData);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
template<typename Command>
|
||||||
|
static void defineNRTCommand()
|
||||||
|
{
|
||||||
|
auto ft = getInterfaceTable();
|
||||||
|
auto commandRunner = [](World* world, void*, struct sc_msg_iter* args, void* replyAddr)
|
||||||
|
{
|
||||||
|
|
||||||
|
auto ft = getInterfaceTable();
|
||||||
|
void* space = ft->fRTAlloc(world,sizeof(Command));
|
||||||
|
Command* cmd = new (space) Command(world, args, replyAddr);
|
||||||
|
//This is brittle, but can't think of something better offhand
|
||||||
|
//This is the only place we can check for a completion message at the end of the OSC packet
|
||||||
|
//beause it has to be passed on to DoAsynhronousCommand at this point. However, detecting correctly
|
||||||
|
//relies on the Command type having fully consumed arguments from the args iterator in the constructor for cmd
|
||||||
|
size_t completionMsgSize{args ? args->getbsize() : 0};
|
||||||
|
assert(completionMsgSize <= std::numeric_limits<int>::max());
|
||||||
|
char* completionMsgData = nullptr;
|
||||||
|
|
||||||
|
if (completionMsgSize) {
|
||||||
|
completionMsgData = (char*)ft->fRTAlloc(world, completionMsgSize);
|
||||||
|
args->getb(completionMsgData, completionMsgSize);
|
||||||
|
}
|
||||||
|
runAsyncCommand(world, cmd, replyAddr, completionMsgSize, completionMsgData);
|
||||||
|
|
||||||
|
if(completionMsgSize) ft->fRTFree(world, completionMsgData);
|
||||||
|
|
||||||
|
};
|
||||||
|
ft->fDefinePlugInCmd(Command::name(),commandRunner,nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
struct NRTProgressUnit: SCUnit
|
||||||
|
{
|
||||||
|
|
||||||
|
static const char* name()
|
||||||
|
{
|
||||||
|
static std::string n = std::string(Wrapper::getName()) + "Monitor";
|
||||||
|
return n.c_str();
|
||||||
|
}
|
||||||
|
|
||||||
|
NRTProgressUnit()
|
||||||
|
{
|
||||||
|
mInterval = static_cast<index>(0.02 / controlDur());
|
||||||
|
set_calc_function<NRTProgressUnit, &NRTProgressUnit::next>();
|
||||||
|
Wrapper::getInterfaceTable()->fClearUnitOutputs(this, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
void next(int)
|
||||||
|
{
|
||||||
|
if (0 == mCounter++)
|
||||||
|
{
|
||||||
|
index id = static_cast<index>(mInBuf[0][0]);
|
||||||
|
if(auto ptr = get(id).lock())
|
||||||
|
{
|
||||||
|
if(ptr->mClient.done()) mDone = 1;
|
||||||
|
out0(0) = static_cast<float>(ptr->mClient.progress());
|
||||||
|
}
|
||||||
|
else
|
||||||
|
std::cout << "WARNING: No " << Wrapper::getName() << " with ID " << id << std::endl;
|
||||||
|
}
|
||||||
|
mCounter %= mInterval;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
index mInterval;
|
||||||
|
index mCounter{0};
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
struct NRTTriggerUnit: SCUnit
|
||||||
|
{
|
||||||
|
|
||||||
|
static index count(){
|
||||||
|
static index counter = -1;
|
||||||
|
return counter--;
|
||||||
|
}
|
||||||
|
|
||||||
|
index ControlOffset() { return mSpecialIndex + 1; }
|
||||||
|
|
||||||
|
index ControlSize()
|
||||||
|
{
|
||||||
|
return index(mNumInputs)
|
||||||
|
- mSpecialIndex //used for oddball cases
|
||||||
|
- 3; //id + trig + blocking;
|
||||||
|
}
|
||||||
|
|
||||||
|
static const char* name()
|
||||||
|
{
|
||||||
|
static std::string n = std::string(Wrapper::getName()) + "Trigger";
|
||||||
|
return n.c_str();
|
||||||
|
}
|
||||||
|
|
||||||
|
NRTTriggerUnit()
|
||||||
|
: mControlsIterator{mInBuf + ControlOffset(),ControlSize()},mParams{Client::getParameterDescriptors()}
|
||||||
|
{
|
||||||
|
mID = static_cast<index>(mInBuf[0][0]);
|
||||||
|
if(mID == -1) mID = count();
|
||||||
|
auto cmd = NonRealTime::rtalloc<CommandNew>(mWorld,mID,mWorld, mControlsIterator, this);
|
||||||
|
runAsyncCommand(mWorld, cmd, nullptr, 0, nullptr);
|
||||||
|
mInst = get(mID);
|
||||||
|
set_calc_function<NRTTriggerUnit, &NRTTriggerUnit::next>();
|
||||||
|
Wrapper::getInterfaceTable()->fClearUnitOutputs(this, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
~NRTTriggerUnit()
|
||||||
|
{
|
||||||
|
if(auto ptr = mInst.lock())
|
||||||
|
{
|
||||||
|
auto cmd = NonRealTime::rtalloc<CommandFree>(mWorld,mID);
|
||||||
|
runAsyncCommand(mWorld, cmd, nullptr, 0, nullptr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void next(int)
|
||||||
|
{
|
||||||
|
|
||||||
|
|
||||||
|
index triggerInput = static_cast<index>(mInBuf[static_cast<index>(mNumInputs) - 2][0]);
|
||||||
|
mTrigger = mTrigger || triggerInput;
|
||||||
|
|
||||||
|
// if(auto ptr = mInst->lock())
|
||||||
|
// if(auto ptr = get(mID).lock())
|
||||||
|
// {
|
||||||
|
bool trigger = (!mPreviousTrigger) && triggerInput;//mTrigger;
|
||||||
|
mPreviousTrigger = triggerInput;
|
||||||
|
mTrigger = 0;
|
||||||
|
// auto& client = ptr->mClient;
|
||||||
|
|
||||||
|
if(trigger)
|
||||||
|
{
|
||||||
|
mControlsIterator.reset(1 + mInBuf); //add one for ID
|
||||||
|
// auto& params = ptr->mParams;
|
||||||
|
Wrapper::setParams(this,mParams,mControlsIterator,true,false);
|
||||||
|
bool blocking = mInBuf[mNumInputs - 1][0] > 0;
|
||||||
|
CommandProcess* cmd = rtalloc<CommandProcess>(mWorld,mID,blocking,&mParams);
|
||||||
|
runAsyncCommand(mWorld,cmd, nullptr,0, nullptr);
|
||||||
|
mRunCount++;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if(auto ptr = get(mID).lock())
|
||||||
|
{
|
||||||
|
auto& client = ptr->mClient;
|
||||||
|
mDone = ptr->mDone;
|
||||||
|
out0(0) = mDone ? 1 : static_cast<float>(client.progress());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// }
|
||||||
|
// else printNotFound(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
bool mPreviousTrigger{false};
|
||||||
|
bool mTrigger{false};
|
||||||
|
Result mResult;
|
||||||
|
impl::FloatControlsIter mControlsIterator;
|
||||||
|
index mID;
|
||||||
|
index mRunCount{0};
|
||||||
|
WeakCacheEntryPointer mInst;
|
||||||
|
Params mParams;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct NRTModelQueryUnit: SCUnit
|
||||||
|
{
|
||||||
|
using Delegate = impl::RealTimeBase<Client,Wrapper>;
|
||||||
|
|
||||||
|
index ControlOffset() { return mSpecialIndex + 2; }
|
||||||
|
index ControlSize()
|
||||||
|
{
|
||||||
|
return index(mNumInputs)
|
||||||
|
- mSpecialIndex //used for oddball cases
|
||||||
|
- 2; // trig + id
|
||||||
|
}
|
||||||
|
|
||||||
|
static const char* name()
|
||||||
|
{
|
||||||
|
static std::string n = std::string(Wrapper::getName()) + "Query";
|
||||||
|
return n.c_str();
|
||||||
|
}
|
||||||
|
|
||||||
|
NRTModelQueryUnit()
|
||||||
|
//Offset controls by 1 to account for ID
|
||||||
|
: mControls{mInBuf + ControlOffset(),ControlSize()}
|
||||||
|
{
|
||||||
|
index id = static_cast<index>(in0(1));
|
||||||
|
mInst = get(id);
|
||||||
|
if(auto ptr = mInst.lock())
|
||||||
|
{
|
||||||
|
auto& client = ptr->mClient;
|
||||||
|
mDelegate.init(*this,client,mControls);
|
||||||
|
set_calc_function<NRTModelQueryUnit, &NRTModelQueryUnit::next>();
|
||||||
|
Wrapper::getInterfaceTable()->fClearUnitOutputs(this, 1);
|
||||||
|
}else printNotFound(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
void next(int)
|
||||||
|
{
|
||||||
|
index id = static_cast<index>(in0(1));
|
||||||
|
if(auto ptr = mInst.lock())
|
||||||
|
{
|
||||||
|
auto& client = ptr->mClient;
|
||||||
|
auto& params = ptr->mParams;
|
||||||
|
mControls.reset(mInBuf + ControlOffset());
|
||||||
|
mDelegate.next(*this,client,params,mControls);
|
||||||
|
}else printNotFound(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
Delegate mDelegate;
|
||||||
|
FloatControlsIter mControls;
|
||||||
|
index mID;
|
||||||
|
WeakCacheEntryPointer mInst;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
using ParamSetType = typename Client::ParamSetType;
|
||||||
|
|
||||||
|
template <size_t N, typename T>
|
||||||
|
using SetupMessageCmd = typename FluidSCMessaging<Wrapper,Client>::template SetupMessageCmd<N,T>;
|
||||||
|
|
||||||
|
|
||||||
|
template<bool, typename CommandType>
|
||||||
|
struct DefineCommandIf
|
||||||
|
{
|
||||||
|
void operator()() { }
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
template<typename CommandType>
|
||||||
|
struct DefineCommandIf<true, CommandType>
|
||||||
|
{
|
||||||
|
void operator()() {
|
||||||
|
// std::cout << CommandType::name() << std::endl;
|
||||||
|
defineNRTCommand<CommandType>();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
template<bool, typename UnitType>
|
||||||
|
struct RegisterUnitIf
|
||||||
|
{
|
||||||
|
void operator()(InterfaceTable*) {}
|
||||||
|
};
|
||||||
|
|
||||||
|
template<typename UnitType>
|
||||||
|
struct RegisterUnitIf<true, UnitType>
|
||||||
|
{
|
||||||
|
void operator()(InterfaceTable* ft) { registerUnit<UnitType>(ft,UnitType::name()); }
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
using IsRTQueryModel_t = typename Client::isRealTime;
|
||||||
|
static constexpr bool IsRTQueryModel = IsRTQueryModel_t::value;
|
||||||
|
|
||||||
|
static constexpr bool IsModel = Client::isModelObject::value;
|
||||||
|
|
||||||
|
|
||||||
|
public:
|
||||||
|
static void setup(InterfaceTable* ft, const char*)
|
||||||
|
{
|
||||||
|
defineNRTCommand<CommandNew>();
|
||||||
|
DefineCommandIf<!IsRTQueryModel, CommandProcess>()();
|
||||||
|
DefineCommandIf<!IsRTQueryModel, CommandProcessNew>()();
|
||||||
|
DefineCommandIf<!IsRTQueryModel, CommandCancel>()();
|
||||||
|
|
||||||
|
DefineCommandIf<IsModel,CommandSetParams>()();
|
||||||
|
|
||||||
|
defineNRTCommand<CommandFree>();
|
||||||
|
RegisterUnitIf<!IsRTQueryModel,NRTProgressUnit>()(ft);
|
||||||
|
RegisterUnitIf<!IsRTQueryModel,NRTTriggerUnit>()(ft);
|
||||||
|
|
||||||
|
RegisterUnitIf<IsRTQueryModel,NRTModelQueryUnit>()(ft);
|
||||||
|
Client::getMessageDescriptors().template iterate<SetupMessageCmd>();
|
||||||
|
|
||||||
|
|
||||||
|
static std::string flushCmd = std::string(Wrapper::getName()) + "/flush";
|
||||||
|
|
||||||
|
ft->fDefinePlugInCmd(flushCmd.c_str(),[](World*, void*, struct sc_msg_iter*, void* ){
|
||||||
|
mCache.clear();
|
||||||
|
},nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void init(){};
|
||||||
|
|
||||||
|
private:
|
||||||
|
static Result validateParameters(ParamSetType& p)
|
||||||
|
{
|
||||||
|
auto results = p.constrainParameterValues();
|
||||||
|
for (auto& r : results)
|
||||||
|
{
|
||||||
|
if (!r.ok()) return r;
|
||||||
|
}
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
template <size_t N, typename T>
|
||||||
|
struct AssignBuffer
|
||||||
|
{
|
||||||
|
void operator()(const typename BufferT::type& p, World* w)
|
||||||
|
{
|
||||||
|
if (auto b = static_cast<SCBufferAdaptor*>(p.get())) b->assignToRT(w);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
template <size_t N, typename T>
|
||||||
|
struct CleanUpBuffer
|
||||||
|
{
|
||||||
|
void operator()(const typename BufferT::type& p)
|
||||||
|
{
|
||||||
|
if (auto b = static_cast<SCBufferAdaptor*>(p.get())) b->cleanUp();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
FifoMsg mFifoMsg;
|
||||||
|
char* mCompletionMessage = nullptr;
|
||||||
|
void* mReplyAddr = nullptr;
|
||||||
|
const char* mName = nullptr;
|
||||||
|
index checkThreadInterval;
|
||||||
|
index pollCounter{0};
|
||||||
|
index mPreviousTrigger{0};
|
||||||
|
bool mSynchronous{true};
|
||||||
|
Result mResult;
|
||||||
|
};
|
||||||
|
|
||||||
|
template<typename Client, typename Wrapper>
|
||||||
|
typename NonRealTime<Client, Wrapper>::Cache NonRealTime<Client,Wrapper>::mCache{};
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,166 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <SC_PlugIn.hpp>
|
||||||
|
|
||||||
|
namespace fluid{
|
||||||
|
namespace client{
|
||||||
|
namespace impl{
|
||||||
|
template <typename Client, class Wrapper>
|
||||||
|
struct RealTimeBase
|
||||||
|
{
|
||||||
|
using HostVector = FluidTensorView<float, 1>;
|
||||||
|
using Params = typename Client::ParamSetType;
|
||||||
|
template<typename T, bool>
|
||||||
|
struct doExpectedCount;
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
struct doExpectedCount<T, false>
|
||||||
|
{
|
||||||
|
static void count(const T& d,FloatControlsIter& c,Result& status)
|
||||||
|
{
|
||||||
|
if(!status.ok()) return;
|
||||||
|
|
||||||
|
if(c.remain())
|
||||||
|
{
|
||||||
|
index statedSize = d.fixedSize;
|
||||||
|
|
||||||
|
if(c.remain() < statedSize)
|
||||||
|
status = {Result::Status::kError,"Ran out of arguments at ", d.name};
|
||||||
|
|
||||||
|
//fastforward
|
||||||
|
for(index i=0; i < statedSize; ++i) c.next();
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
struct doExpectedCount<T, true>
|
||||||
|
{
|
||||||
|
static void count(const T& d,FloatControlsIter& c,Result& status)
|
||||||
|
{
|
||||||
|
if(!status.ok()) return;
|
||||||
|
|
||||||
|
if(c.remain())
|
||||||
|
{
|
||||||
|
index statedSize = 1;
|
||||||
|
|
||||||
|
if(c.remain() < statedSize)
|
||||||
|
status = {Result::Status::kError,"Ran out of arguments at ", d.name};
|
||||||
|
|
||||||
|
//fastforward
|
||||||
|
for(index i=0; i < statedSize; ++i) c.next();
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
template<size_t N, typename T>
|
||||||
|
struct ExpectedCount{
|
||||||
|
void operator ()(const T& descriptor,FloatControlsIter& c, Result& status)
|
||||||
|
{
|
||||||
|
doExpectedCount<T,IsSharedClientRef<typename T::type>::value>::count(descriptor,c,status);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Result expectedSize(FloatControlsIter& controls)
|
||||||
|
{
|
||||||
|
if(controls.size() < Client::getParameterDescriptors().count())
|
||||||
|
{
|
||||||
|
return {Result::Status::kError,"Fewer parameters than exepected. Got ", controls.size(), "expect at least", Client::getParameterDescriptors().count()};
|
||||||
|
}
|
||||||
|
|
||||||
|
Result countScan;
|
||||||
|
Client::getParameterDescriptors().template iterate<ExpectedCount>(
|
||||||
|
std::forward<FloatControlsIter&>(controls),
|
||||||
|
std::forward<Result&>(countScan));
|
||||||
|
return countScan;
|
||||||
|
}
|
||||||
|
|
||||||
|
// static index ControlOffset(Unit* unit) { return unit->mSpecialIndex + 1; }
|
||||||
|
// static index ControlSize(Unit* unit) { return static_cast<index>(unit->mNumInputs) - unit->mSpecialIndex - 1 -(IsModel_t<Client>::value ? 1 : 0); }
|
||||||
|
|
||||||
|
void init(SCUnit& unit, Client& client, FloatControlsIter& controls)
|
||||||
|
{
|
||||||
|
assert(!(client.audioChannelsOut() > 0 && client.controlChannelsOut() > 0) &&"Client can't have both audio and control outputs");
|
||||||
|
// consoltr.reset(unit.mInBuf + unit.mSpecialIndex + 1);
|
||||||
|
client.sampleRate(unit.fullSampleRate());
|
||||||
|
mInputConnections.reserve(asUnsigned(client.audioChannelsIn()));
|
||||||
|
mOutputConnections.reserve(asUnsigned(client.audioChannelsOut()));
|
||||||
|
mAudioInputs.reserve(asUnsigned(client.audioChannelsIn()));
|
||||||
|
mOutputs.reserve(asUnsigned(
|
||||||
|
std::max(client.audioChannelsOut(), client.controlChannelsOut())));
|
||||||
|
|
||||||
|
|
||||||
|
Result r;
|
||||||
|
if(!(r = expectedSize(controls)).ok())
|
||||||
|
{
|
||||||
|
// mCalcFunc = Wrapper::getInterfaceTable()->fClearUnitOutputs;
|
||||||
|
std::cout
|
||||||
|
<< "ERROR: " << Wrapper::getName()
|
||||||
|
<< " wrong number of arguments."
|
||||||
|
<< r.message()
|
||||||
|
<< std::endl;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
for (index i = 0; i < client.audioChannelsIn(); ++i)
|
||||||
|
{
|
||||||
|
mInputConnections.emplace_back(unit.isAudioRateIn(static_cast<int>(i)));
|
||||||
|
mAudioInputs.emplace_back(nullptr, 0, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (index i = 0; i < client.audioChannelsOut(); ++i)
|
||||||
|
{
|
||||||
|
mOutputConnections.emplace_back(true);
|
||||||
|
mOutputs.emplace_back(nullptr, 0, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (index i = 0; i < client.controlChannelsOut(); ++i)
|
||||||
|
{
|
||||||
|
mOutputs.emplace_back(nullptr, 0, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void next(SCUnit& unit, Client& client,Params& params,FloatControlsIter& controls)
|
||||||
|
{
|
||||||
|
bool trig = IsModel_t<Client>::value ? !mPrevTrig && unit.in0(0) > 0 : false;
|
||||||
|
|
||||||
|
mPrevTrig = trig;
|
||||||
|
|
||||||
|
Wrapper::setParams(&unit, params, controls);
|
||||||
|
params.constrainParameterValuesRT(nullptr);
|
||||||
|
|
||||||
|
for (index i = 0; i < client.audioChannelsIn(); ++i)
|
||||||
|
{
|
||||||
|
assert(i <= std::numeric_limits<int>::max());
|
||||||
|
if (mInputConnections[asUnsigned(i)])
|
||||||
|
mAudioInputs[asUnsigned(i)].reset(const_cast<float*>(unit.in(static_cast<int>(i))), 0,unit.fullBufferSize());
|
||||||
|
}
|
||||||
|
|
||||||
|
for (index i = 0; i < client.audioChannelsOut(); ++i)
|
||||||
|
{
|
||||||
|
assert(i <= std::numeric_limits<int>::max());
|
||||||
|
if (mOutputConnections[asUnsigned(i)])
|
||||||
|
mOutputs[asUnsigned(i)].reset(unit.out(static_cast<int>(i)), 0,
|
||||||
|
unit.fullBufferSize());
|
||||||
|
}
|
||||||
|
|
||||||
|
for (index i = 0; i < client.controlChannelsOut(); ++i)
|
||||||
|
{
|
||||||
|
assert(i <= std::numeric_limits<int>::max());
|
||||||
|
mOutputs[asUnsigned(i)].reset(unit.out(static_cast<int>(i)), 0, 1);
|
||||||
|
}
|
||||||
|
client.process(mAudioInputs, mOutputs, mContext);
|
||||||
|
}
|
||||||
|
private:
|
||||||
|
std::vector<bool> mInputConnections;
|
||||||
|
std::vector<bool> mOutputConnections;
|
||||||
|
std::vector<HostVector> mAudioInputs;
|
||||||
|
std::vector<HostVector> mOutputs;
|
||||||
|
FluidContext mContext;
|
||||||
|
bool mPrevTrig;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,129 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "ArgsFromClient.hpp"
|
||||||
|
#include "Meta.hpp"
|
||||||
|
#include "RealTimeBase.hpp"
|
||||||
|
#include <clients/common/FluidBaseClient.hpp>
|
||||||
|
#include <SC_PlugIn.hpp>
|
||||||
|
|
||||||
|
// Real Time Processor
|
||||||
|
namespace fluid {
|
||||||
|
namespace client {
|
||||||
|
namespace impl {
|
||||||
|
|
||||||
|
template <typename Client, class Wrapper>
|
||||||
|
class RealTime : public SCUnit
|
||||||
|
{
|
||||||
|
|
||||||
|
using Delegate = impl::RealTimeBase<Client,Wrapper>;
|
||||||
|
using Params = typename Client::ParamSetType;
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
// static index ControlOffset(Unit* unit) { return Delegate::ControlOffset(unit); }
|
||||||
|
// static index ControlSize(Unit* unit) { return Delegate::ControlSize(unit); }
|
||||||
|
|
||||||
|
static index ControlOffset(Unit* unit) { return unit->mSpecialIndex + 1; }
|
||||||
|
static index ControlSize(Unit* unit)
|
||||||
|
{
|
||||||
|
return static_cast<index>(unit->mNumInputs)
|
||||||
|
- unit->mSpecialIndex
|
||||||
|
- 1
|
||||||
|
- (IsModel_t<Client>::value ? 1 : 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void setup(InterfaceTable* ft, const char* name)
|
||||||
|
{
|
||||||
|
|
||||||
|
registerUnit<RealTime>(ft,name);
|
||||||
|
ft->fDefineUnitCmd(name, "latency", doLatency);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void doLatency(Unit* unit, sc_msg_iter*)
|
||||||
|
{
|
||||||
|
float l[]{
|
||||||
|
static_cast<float>(static_cast<RealTime*>(unit)->mClient.latency())
|
||||||
|
};
|
||||||
|
auto ft = Wrapper::getInterfaceTable();
|
||||||
|
|
||||||
|
std::stringstream ss;
|
||||||
|
ss << '/' << Wrapper::getName() << "_latency";
|
||||||
|
// std::cout << ss.str() << ": " << l[0] << std::endl;
|
||||||
|
ft->fSendNodeReply(&unit->mParent->mNode, -1, ss.str().c_str(), 1, l);
|
||||||
|
}
|
||||||
|
|
||||||
|
RealTime()
|
||||||
|
: mControls{mInBuf + ControlOffset(this),ControlSize(this)},
|
||||||
|
mClient{Wrapper::setParams(this, mParams, mControls,true)}
|
||||||
|
{
|
||||||
|
init();
|
||||||
|
}
|
||||||
|
|
||||||
|
void init()
|
||||||
|
{
|
||||||
|
// auto& client = mClient;
|
||||||
|
|
||||||
|
mDelegate.init(*this,mClient,mControls);
|
||||||
|
mCalcFunc = make_calc_function<RealTime, &RealTime::next>();
|
||||||
|
Wrapper::getInterfaceTable()->fClearUnitOutputs(this, 1);
|
||||||
|
|
||||||
|
// assert(
|
||||||
|
// !(client.audioChannelsOut() > 0 && client.controlChannelsOut() > 0) &&
|
||||||
|
// "Client can't have both audio and control outputs");
|
||||||
|
//
|
||||||
|
// Result r;
|
||||||
|
// if(!(r = expectedSize(mWrapper->mControlsIterator)).ok())
|
||||||
|
// {
|
||||||
|
// mCalcFunc = Wrapper::getInterfaceTable()->fClearUnitOutputs;
|
||||||
|
// std::cout
|
||||||
|
// << "ERROR: " << Wrapper::getName()
|
||||||
|
// << " wrong number of arguments."
|
||||||
|
// << r.message()
|
||||||
|
// << std::endl;
|
||||||
|
// return;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// mWrapper->mControlsIterator.reset(mInBuf + mSpecialIndex + 1);
|
||||||
|
//
|
||||||
|
// client.sampleRate(fullSampleRate());
|
||||||
|
// mInputConnections.reserve(asUnsigned(client.audioChannelsIn()));
|
||||||
|
// mOutputConnections.reserve(asUnsigned(client.audioChannelsOut()));
|
||||||
|
// mAudioInputs.reserve(asUnsigned(client.audioChannelsIn()));
|
||||||
|
// mOutputs.reserve(asUnsigned(
|
||||||
|
// std::max(client.audioChannelsOut(), client.controlChannelsOut())));
|
||||||
|
//
|
||||||
|
// for (index i = 0; i < client.audioChannelsIn(); ++i)
|
||||||
|
// {
|
||||||
|
// mInputConnections.emplace_back(isAudioRateIn(static_cast<int>(i)));
|
||||||
|
// mAudioInputs.emplace_back(nullptr, 0, 0);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// for (index i = 0; i < client.audioChannelsOut(); ++i)
|
||||||
|
// {
|
||||||
|
// mOutputConnections.emplace_back(true);
|
||||||
|
// mOutputs.emplace_back(nullptr, 0, 0);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// for (index i = 0; i < client.controlChannelsOut(); ++i)
|
||||||
|
// { mOutputs.emplace_back(nullptr, 0, 0); }
|
||||||
|
//
|
||||||
|
// mCalcFunc = make_calc_function<RealTime, &RealTime::next>();
|
||||||
|
// Wrapper::getInterfaceTable()->fClearUnitOutputs(this, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
void next(int)
|
||||||
|
{
|
||||||
|
mControls.reset(mInBuf + ControlOffset(this));
|
||||||
|
mDelegate.next(*this,mClient,mParams,mControls);
|
||||||
|
}
|
||||||
|
private:
|
||||||
|
Delegate mDelegate;
|
||||||
|
FloatControlsIter mControls;
|
||||||
|
Params mParams{Client::getParameterDescriptors()};
|
||||||
|
Client mClient;
|
||||||
|
Wrapper* mWrapper{static_cast<Wrapper*>(this)};
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,13 @@
|
|||||||
|
FluidAudioTransport : FluidRTUGen {
|
||||||
|
|
||||||
|
init { |...theInputs|
|
||||||
|
theInputs;
|
||||||
|
inputs = theInputs;
|
||||||
|
this.specialIndex = 1; //two audio inputs
|
||||||
|
// ^this.initOutputs(1,rate);
|
||||||
|
}
|
||||||
|
|
||||||
|
*ar { arg in = 0, in2 = 0, interpolation = 0.0, windowSize = 1024, hopSize = -1, fftSize = -1, maxFFTSize = 16384;
|
||||||
|
^this.multiNew('audio', in.asAudioRateInput, in2, interpolation, windowSize, hopSize, fftSize, maxFFTSize)
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,37 +1,44 @@
|
|||||||
FluidBufAmpGate : UGen {
|
FluidBufAmpGate : FluidBufProcessor {
|
||||||
|
|
||||||
*new1 { |rate, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, rampUp = 10, rampDown = 10, onThreshold = -90, offThreshold = -90, minSliceLength = 1, minSilenceLength = 1, minLengthAbove = 1, minLengthBelow = 1, lookBack = 0, lookAhead = 0, highPassFreq = 85, doneAction = 0, blocking|
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, rampUp = 10, rampDown = 10, onThreshold = -90, offThreshold = -90, minSliceLength = 1, minSilenceLength = 1, minLengthAbove = 1, minLengthBelow = 1, lookBack = 0, lookAhead = 0, highPassFreq = 85, trig = 1, blocking = 0|
|
||||||
var maxSize = max(minLengthAbove + lookBack, max(minLengthBelow,lookAhead));
|
|
||||||
|
var maxSize = max(minLengthAbove + lookBack, max(minLengthBelow,lookAhead));
|
||||||
|
|
||||||
source = source.asUGenInput;
|
source = source.asUGenInput;
|
||||||
indices = indices.asUGenInput;
|
indices = indices.asUGenInput;
|
||||||
|
|
||||||
source.isNil.if {"FluidBufAmpSlice: Invalid source buffer".throw};
|
^FluidProxyUgen.kr(\FluidBufAmpGateTrigger,-1, source, startFrame, numFrames, startChan, numChans, indices, rampUp, rampDown, onThreshold, offThreshold, minSliceLength, minSilenceLength, minLengthAbove, minLengthBelow, lookBack, lookAhead, highPassFreq,maxSize, trig, blocking);
|
||||||
indices.isNil.if {"FluidBufAmpSlice: Invalid features buffer".throw};
|
|
||||||
|
|
||||||
^super.new1(rate, source, startFrame, numFrames, startChan, numChans, indices, rampUp, rampDown, onThreshold, offThreshold, minSliceLength, minSilenceLength, minLengthAbove, minLengthBelow, lookBack, lookAhead, highPassFreq, maxSize, doneAction, blocking);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, rampUp = 10, rampDown = 10, onThreshold = -90, offThreshold = -90, minSliceLength = 1, minSilenceLength = 1, minLengthAbove = 1, minLengthBelow = 1, lookBack = 0, lookAhead = 0, highPassFreq = 85, doneAction = 0|
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, rampUp = 10, rampDown = 10, onThreshold = -90, offThreshold = -90, minSliceLength = 1, minSilenceLength = 1, minLengthAbove = 1, minLengthBelow = 1, lookBack = 0, lookAhead = 0, highPassFreq = 85, freeWhenDone = true, action |
|
||||||
|
|
||||||
^this.multiNew(\control, source, startFrame, numFrames, startChan, numChans, indices, rampUp, rampDown, onThreshold, offThreshold, minSliceLength, minSilenceLength, minLengthAbove, minLengthBelow, lookBack, lookAhead, highPassFreq, doneAction,blocking:0);
|
|
||||||
}
|
|
||||||
|
|
||||||
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, rampUp = 10, rampDown = 10, onThreshold = -90, offThreshold = -90, minSliceLength = 1, minSilenceLength = 1, minLengthAbove = 1, minLengthBelow = 1, lookBack = 0, lookAhead = 0, highPassFreq = 85, action |
|
var maxSize = max(minLengthAbove + lookBack, max(minLengthBelow,lookAhead));
|
||||||
|
|
||||||
^FluidNRTProcess.new(
|
source = source ? -1;
|
||||||
server, this, action, [indices]
|
indices = indices ? -1;
|
||||||
).process(
|
|
||||||
source, startFrame, numFrames, startChan, numChans, indices, rampUp, rampDown, onThreshold, offThreshold, minSliceLength, minSilenceLength, minLengthAbove, minLengthBelow, lookBack, lookAhead, highPassFreq
|
^this.new(
|
||||||
|
server, nil, [indices]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, indices, rampUp, rampDown, onThreshold, offThreshold, minSliceLength, minSilenceLength, minLengthAbove, minLengthBelow, lookBack, lookAhead, highPassFreq, maxSize, 0],freeWhenDone,action
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, rampUp = 10, rampDown = 10, onThreshold = -90, offThreshold = -90, minSliceLength = 1, minSilenceLength = 1, minLengthAbove = 1, minLengthBelow = 1, lookBack = 0, lookAhead = 0, highPassFreq = 85, action|
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, rampUp = 10, rampDown = 10, onThreshold = -90, offThreshold = -90, minSliceLength = 1, minSilenceLength = 1, minLengthAbove = 1, minLengthBelow = 1, lookBack = 0, lookAhead = 0, highPassFreq = 85, freeWhenDone = true, action |
|
||||||
|
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action, [indices], blocking: 1
|
var maxSize = max(minLengthAbove + lookBack, max(minLengthBelow,lookAhead));
|
||||||
).process(
|
|
||||||
source, startFrame, numFrames, startChan, numChans, indices, rampUp, rampDown, onThreshold, offThreshold, minSliceLength, minSilenceLength, minLengthAbove, minLengthBelow, lookBack, lookAhead, highPassFreq
|
source = source ? -1;
|
||||||
);
|
indices = indices ? -1;
|
||||||
}
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [indices]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, indices, rampUp, rampDown, onThreshold, offThreshold, minSliceLength, minSilenceLength, minLengthAbove, minLengthBelow, lookBack, lookAhead, highPassFreq, maxSize, 1],freeWhenDone,action
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
FluidBufAmpGateTrigger : FluidProxyUgen {}
|
||||||
|
|||||||
@ -1,36 +1,40 @@
|
|||||||
FluidBufAmpSlice : UGen {
|
FluidBufAmpSlice : FluidBufProcessor {
|
||||||
|
|
||||||
*new1 { |rate, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, fastRampUp = 1, fastRampDown = 1, slowRampUp = 100, slowRampDown = 100, onThreshold = -144, offThreshold = -144, floor = -144, minSliceLength = 2, highPassFreq = 85, doneAction = 0, blocking|
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, fastRampUp = 1, fastRampDown = 1, slowRampUp = 100, slowRampDown = 100, onThreshold = -144, offThreshold = -144, floor = -144, minSliceLength = 2, highPassFreq = 85, trig = 1, blocking = 0|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
indices = indices.asUGenInput;
|
||||||
|
|
||||||
source = source.asUGenInput;
|
source.isNil.if {"FluidBufAmpSlice: Invalid source buffer".throw};
|
||||||
indices = indices.asUGenInput;
|
indices.isNil.if {"FluidBufAmpSlice: Invalid features buffer".throw};
|
||||||
|
|
||||||
source.isNil.if {"FluidBufAmpSlice: Invalid source buffer".throw};
|
^FluidProxyUgen.kr(\FluidBufAmpSliceTrigger, -1, source, startFrame, numFrames, startChan, numChans, indices, fastRampUp, fastRampDown, slowRampUp, slowRampDown, onThreshold, offThreshold, floor, minSliceLength, highPassFreq, trig, blocking);
|
||||||
indices.isNil.if {"FluidBufAmpSlice: Invalid features buffer".throw};
|
|
||||||
|
|
||||||
^super.new1(rate, source, startFrame, numFrames, startChan, numChans, indices, fastRampUp, fastRampDown, slowRampUp, slowRampDown, onThreshold, offThreshold, floor, minSliceLength, highPassFreq, doneAction, blocking);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, fastRampUp = 1, fastRampDown = 1, slowRampUp = 100, slowRampDown = 100, onThreshold = -144, offThreshold = -144, floor = -144, minSliceLength = 2, highPassFreq = 85, doneAction = 0|
|
*process { |server,source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, fastRampUp = 1, fastRampDown = 1, slowRampUp = 100, slowRampDown = 100, onThreshold = -144, offThreshold = -144, floor = -144, minSliceLength = 2, highPassFreq = 85, freeWhenDone = true, action |
|
||||||
|
|
||||||
^this.multiNew(\control, source, startFrame, numFrames, startChan, numChans, indices, fastRampUp, fastRampDown, slowRampUp, slowRampDown, onThreshold, offThreshold, floor, minSliceLength, highPassFreq, doneAction,blocking:0);
|
source = source.asUGenInput;
|
||||||
}
|
indices = indices.asUGenInput;
|
||||||
|
|
||||||
*process { |server,source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, fastRampUp = 1, fastRampDown = 1, slowRampUp = 100, slowRampDown = 100, onThreshold = -144, offThreshold = -144, floor = -144, minSliceLength = 2, highPassFreq = 85, action |
|
source.isNil.if {"FluidBufAmpSlice: Invalid source buffer".throw};
|
||||||
|
indices.isNil.if {"FluidBufAmpSlice: Invalid features buffer".throw};
|
||||||
|
|
||||||
^FluidNRTProcess.new(
|
^this.new(server, nil, [indices]).processList(
|
||||||
server, this, action, [indices]
|
[source, startFrame, numFrames, startChan, numChans, indices, fastRampUp, fastRampDown, slowRampUp, slowRampDown, onThreshold, offThreshold, floor, minSliceLength, highPassFreq,0],freeWhenDone, action
|
||||||
).process(
|
|
||||||
source, startFrame, numFrames, startChan, numChans, indices, fastRampUp, fastRampDown, slowRampUp, slowRampDown, onThreshold, offThreshold, floor, minSliceLength, highPassFreq
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
*processBlocking { |server,source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, fastRampUp = 1, fastRampDown = 1, slowRampUp = 100, slowRampDown = 100, onThreshold = -144, offThreshold = -144, floor = -144, minSliceLength = 2, highPassFreq = 85, action|
|
*processBlocking { |server,source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, fastRampUp = 1, fastRampDown = 1, slowRampUp = 100, slowRampDown = 100, onThreshold = -144, offThreshold = -144, floor = -144, minSliceLength = 2, highPassFreq = 85, freeWhenDone = true, action |
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
indices = indices.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufAmpSlice: Invalid source buffer".throw};
|
||||||
|
indices.isNil.if {"FluidBufAmpSlice: Invalid features buffer".throw};
|
||||||
|
|
||||||
^FluidNRTProcess.new(
|
^this.new(server, nil, [indices]).processList(
|
||||||
server, this, action, [indices], blocking: 1
|
[source, startFrame, numFrames, startChan, numChans, indices, fastRampUp, fastRampDown, slowRampUp, slowRampDown, onThreshold, offThreshold, floor, minSliceLength, highPassFreq,1],freeWhenDone, action
|
||||||
).process(
|
|
||||||
source, startFrame, numFrames, startChan, numChans, indices, fastRampUp, fastRampDown, slowRampUp, slowRampDown, onThreshold, offThreshold, floor, minSliceLength, highPassFreq
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
FluidBufAmpSliceTrigger : FluidProxyUgen {}
|
||||||
|
|||||||
@ -0,0 +1,59 @@
|
|||||||
|
FluidBufAudioTransport : FluidBufProcessor {
|
||||||
|
|
||||||
|
*objectClassName{
|
||||||
|
^\FluidBufAudioTransp
|
||||||
|
}
|
||||||
|
|
||||||
|
*kr { |source1, startFrame1 = 0, numFrames1 = -1, startChan1 = 0, numChans1 = -1, source2, startFrame2 = 0, numFrames2 = -1, startChan2 = 0, numChans2 = -1, destination, interpolation = 0.0, windowSize = 1024, hopSize = -1, fftSize = -1, trig = 1, blocking = 0|
|
||||||
|
|
||||||
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
|
source1.isNil.if {"FluidAudioTransport: Invalid source 1 buffer".throw};
|
||||||
|
source2.isNil.if {"FluidAudioTransport: Invalid source 2 buffer".throw};
|
||||||
|
source1 = source1.asUGenInput;
|
||||||
|
source2 = source2.asUGenInput;
|
||||||
|
|
||||||
|
destination.isNil.if {"FluidAudioTransport: Invalid destination buffer".throw};
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
|
||||||
|
^FluidProxyUgen.kr(this.objectClassName++\Trigger,-1, source1, startFrame1, numFrames1, startChan1, numChans1, source2, startFrame1, numFrames1, startChan2, numChans2, destination, interpolation, windowSize, hopSize, fftSize, maxFFTSize, trig, blocking);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
*process { |server, source1, startFrame1 = 0, numFrames1 = -1, startChan1 = 0, numChans1 = -1, source2, startFrame2 = 0, numFrames2 = -1, startChan2 = 0, numChans2 = -1, destination, interpolation=0.0, windowSize = 1024, hopSize = -1, fftSize = -1, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
|
source1.isNil.if {"FluidAudioTransport: Invalid source 1 buffer".throw};
|
||||||
|
source2.isNil.if {"FluidAudioTransport: Invalid source 2 buffer".throw};
|
||||||
|
source1 = source1.asUGenInput;
|
||||||
|
source2 = source2.asUGenInput;
|
||||||
|
|
||||||
|
destination.isNil.if {"FluidAudioTransport: Invalid destination buffer".throw};
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [destination]
|
||||||
|
).processList(
|
||||||
|
[source1, startFrame1, numFrames1, startChan1, numChans1, source2, startFrame2, numFrames2, startChan2, numChans2, destination, interpolation, windowSize, hopSize, fftSize,maxFFTSize,0], freeWhenDone, action
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
*processBlocking { |server, source1, startFrame1 = 0, numFrames1 = -1, startChan1 = 0, numChans1 = -1, source2, startFrame2 = 0, numFrames2 = -1, startChan2 = 0, numChans2 = -1, destination, interpolation=0.0, windowSize = 1024, hopSize = -1, fftSize = -1, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
|
source1.isNil.if {"FluidAudioTransport: Invalid source 1 buffer".throw};
|
||||||
|
source2.isNil.if {"FluidAudioTransport: Invalid source 2 buffer".throw};
|
||||||
|
source1 = source1.asUGenInput;
|
||||||
|
source2 = source2.asUGenInput;
|
||||||
|
|
||||||
|
destination.isNil.if {"FluidAudioTransport: Invalid destination buffer".throw};
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [destination]
|
||||||
|
).processList(
|
||||||
|
[source1, startFrame1, numFrames1, startChan1, numChans1, source2, startFrame2, numFrames2, startChan2, numChans2, destination, interpolation, windowSize, hopSize, fftSize,maxFFTSize,1], freeWhenDone, action
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FluidBufAudioTransportTrigger : FluidProxyUgen {}
|
||||||
@ -1,34 +1,39 @@
|
|||||||
FluidBufCompose : UGen {
|
FluidBufCompose : FluidBufProcessor {
|
||||||
|
|
||||||
*new1 { |rate, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, gain = 1, destination, destStartFrame = 0, destStartChan = 0, destGain = 0, doneAction = 0, blocking|
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, gain = 1, destination, destStartFrame = 0, destStartChan = 0, destGain = 0, trig = 1, blocking = 1|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
source = source.asUGenInput;
|
source.isNil.if {"FluidBufCompose: Invalid source buffer".throw};
|
||||||
destination = destination.asUGenInput;
|
destination.isNil.if {"FluidBufCompose: Invalid destination buffer".throw};
|
||||||
|
|
||||||
source.isNil.if {"FluidBufCompose: Invalid source buffer".throw};
|
^FluidProxyUgen.kr(\FluidBufComposeTrigger,-1, source, startFrame, numFrames, startChan, numChans, gain, destination, destStartFrame, destStartChan, destGain, trig, blocking);
|
||||||
destination.isNil.if {"FluidBufCompose: Invalid destination buffer".throw};
|
|
||||||
|
|
||||||
^super.new1(rate, source, startFrame, numFrames, startChan, numChans, gain, destination, destStartFrame, destStartChan, destGain, doneAction, blocking);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, gain = 1, destination, destStartFrame = 0, destStartChan = 0, destGain = 0, doneAction = 0|
|
|
||||||
|
|
||||||
^this.multiNew('control', source, startFrame, numFrames, startChan, numChans, gain, destination, destStartFrame, destStartChan, destGain, doneAction, blocking:1);
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, gain = 1, destination, destStartFrame = 0, destStartChan = 0, destGain = 0, freeWhenDone = true, action|
|
||||||
}*/
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufCompose: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufCompose: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
^this.new( server, nil, [destination]).processList([source, startFrame, numFrames, startChan, numChans, gain, destination, destStartFrame, destStartChan, destGain, 1], freeWhenDone, action);//NB always blocking
|
||||||
|
}
|
||||||
|
|
||||||
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, gain = 1, destination, destStartFrame = 0, destStartChan = 0, destGain = 0, action|
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, gain = 1, destination, destStartFrame = 0, destStartChan = 0, destGain = 0, freeWhenDone = true, action|
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action, [destination], blocking:1
|
|
||||||
).process(
|
|
||||||
source, startFrame, numFrames, startChan, numChans, gain, destination, destStartFrame, destStartChan, destGain
|
|
||||||
);
|
|
||||||
|
|
||||||
}
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, gain = 1, destination, destStartFrame = 0, destStartChan = 0, destGain = 0, action|
|
source.isNil.if {"FluidBufCompose: Invalid source buffer".throw};
|
||||||
^process(
|
destination.isNil.if {"FluidBufCompose: Invalid destination buffer".throw};
|
||||||
source, startFrame, numFrames, startChan, numChans, gain, destination, destStartFrame, destStartChan, destGain
|
|
||||||
);
|
^this.new(
|
||||||
|
server, nil, [destination]
|
||||||
|
).processList([source, startFrame, numFrames, startChan, numChans, gain, destination, destStartFrame, destStartChan, destGain, 1], freeWhenDone, action);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
FluidBufComposeTrigger : FluidProxyUgen {}
|
||||||
|
|||||||
@ -0,0 +1,47 @@
|
|||||||
|
FluidBufFlatten : FluidBufProcessor {
|
||||||
|
|
||||||
|
|
||||||
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, destination, axis = 1, trig = 1, blocking = 1|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufFlatten: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufFlatten: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
^FluidProxyUgen.kr(\FluidBufFlattenTrigger,-1, source, startFrame, numFrames, startChan, numChans, destination, axis, trig, blocking);
|
||||||
|
}
|
||||||
|
|
||||||
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, destination, axis = 1, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufFlatten: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufFlatten: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [destination],
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, destination, axis,0],freeWhenDone,action
|
||||||
|
);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, destination, axis = 1, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufFlatten: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufFlatten: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [destination],
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, destination, axis,1],freeWhenDone,action
|
||||||
|
);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FluidBufFlattenTrigger : FluidProxyUgen {}
|
||||||
@ -1,45 +1,52 @@
|
|||||||
FluidBufHPSS : UGen {
|
FluidBufHPSS : FluidBufProcessor {
|
||||||
|
|
||||||
*new1 {|rate, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, harmonic = -1, percussive = -1, residual = -1, harmFilterSize = 17, percFilterSize = 31, maskingMode = 0, harmThreshFreq1 = 0.1, harmThreshAmp1 = 0, harmThreshFreq2 = 0.5, harmThreshAmp2 = 0, percThreshFreq1 = 0.1, percThreshAmp1 = 0, percThreshFreq2 = 0.5, percThreshAmp2 = 0, windowSize = 1024, hopSize = -1, fftSize = -1, doneAction = 0, blocking|
|
*kr {|source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, harmonic = -1, percussive = -1, residual = -1, harmFilterSize = 17, percFilterSize = 31, maskingMode = 0, harmThreshFreq1 = 0.1, harmThreshAmp1 = 0, harmThreshFreq2 = 0.5, harmThreshAmp2 = 0, percThreshFreq1 = 0.1, percThreshAmp1 = 0, percThreshFreq2 = 0.5, percThreshAmp2 = 0, windowSize = 1024, hopSize = -1, fftSize = -1, trig = 1, blocking = 0|
|
||||||
|
|
||||||
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
|
|
||||||
source = source.asUGenInput;
|
harmonic = harmonic ? -1;
|
||||||
harmonic = harmonic.asUGenInput;
|
percussive = percussive ? -1;
|
||||||
percussive = percussive.asUGenInput;
|
residual = residual ? -1;
|
||||||
residual = residual.asUGenInput;
|
source.isNil.if {"FluidBufHPSS: Invalid source buffer".throw};
|
||||||
source.isNil.if {"FluidBufHPSS: Invalid source buffer".throw};
|
|
||||||
|
|
||||||
//NB For wrapped versions of NRT classes, we set the params for maxima to
|
^FluidProxyUgen.kr(\FluidBufHPSSTrigger, -1, source, startFrame, numFrames, startChan, numChans, harmonic, percussive, residual, harmFilterSize, percFilterSize, maskingMode, harmThreshFreq1, harmThreshAmp1, harmThreshFreq2, harmThreshAmp2, percThreshFreq1, percThreshAmp1, percThreshFreq2, percThreshAmp2, windowSize, hopSize, fftSize, maxFFTSize, harmFilterSize, percFilterSize, trig, blocking
|
||||||
//whatever has been passed in language-side (e.g maxFFTSize still exists as a parameter for the server plugin, but makes less sense here: it just needs to be set to a legal value)
|
);
|
||||||
|
|
||||||
^super.new1(rate, source, startFrame, numFrames, startChan, numChans, harmonic, percussive, residual, harmFilterSize, percFilterSize, maskingMode, harmThreshFreq1, harmThreshAmp1, harmThreshFreq2, harmThreshAmp2, percThreshFreq1, percThreshAmp1, percThreshFreq2, percThreshAmp2, windowSize, hopSize, fftSize, maxFFTSize, harmFilterSize, percFilterSize, doneAction, blocking);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*kr {|source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, harmonic = -1, percussive = -1, residual = -1, harmFilterSize = 17, percFilterSize = 31, maskingMode = 0, harmThreshFreq1 = 0.1, harmThreshAmp1 = 0, harmThreshFreq2 = 0.5, harmThreshAmp2 = 0, percThreshFreq1 = 0.1, percThreshAmp1 = 0, percThreshFreq2 = 0.5, percThreshAmp2 = 0, windowSize = 1024, hopSize = -1, fftSize = -1, doneAction = 0|
|
*process {|server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, harmonic = -1, percussive = -1, residual = -1, harmFilterSize = 17, percFilterSize = 31, maskingMode = 0, harmThreshFreq1 = 0.1, harmThreshAmp1 = 0, harmThreshFreq2 = 0.5, harmThreshAmp2 = 0, percThreshFreq1 = 0.1, percThreshAmp1 = 0, percThreshFreq2 = 0.5, percThreshAmp2 = 0, windowSize = 1024, hopSize = -1, fftSize = -1, freeWhenDone=true, action|
|
||||||
|
|
||||||
^this.multiNew(
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
'control', source, startFrame, numFrames, startChan, numChans, harmonic, percussive, residual, harmFilterSize, percFilterSize, maskingMode, harmThreshFreq1, harmThreshAmp1, harmThreshFreq2, harmThreshAmp2, percThreshFreq1, percThreshAmp1, percThreshFreq2, percThreshAmp2, windowSize, hopSize, fftSize, doneAction, blocking:0
|
|
||||||
);
|
harmonic = harmonic ? -1;
|
||||||
}
|
percussive = percussive ? -1;
|
||||||
|
residual = residual ? -1;
|
||||||
|
source.isNil.if {"FluidBufHPSS: Invalid source buffer".throw};
|
||||||
|
|
||||||
*process {|server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, harmonic = -1, percussive = -1, residual = -1, harmFilterSize = 17, percFilterSize = 31, maskingMode = 0, harmThreshFreq1 = 0.1, harmThreshAmp1 = 0, harmThreshFreq2 = 0.5, harmThreshAmp2 = 0, percThreshFreq1 = 0.1, percThreshAmp1 = 0, percThreshFreq2 = 0.5, percThreshAmp2 = 0, windowSize = 1024, hopSize = -1, fftSize = -1, action|
|
|
||||||
|
|
||||||
^FluidNRTProcess.new(
|
^this.new(
|
||||||
server, this, action, [harmonic, percussive, residual].select{|x| x!= -1}
|
server, nil, [harmonic, percussive, residual].select{|x| x!= -1}
|
||||||
).process(
|
).processList(
|
||||||
source, startFrame, numFrames, startChan, numChans, harmonic, percussive, residual, harmFilterSize, percFilterSize, maskingMode, harmThreshFreq1, harmThreshAmp1, harmThreshFreq2, harmThreshAmp2, percThreshFreq1, percThreshAmp1, percThreshFreq2, percThreshAmp2, windowSize, hopSize, fftSize
|
[source, startFrame, numFrames, startChan, numChans, harmonic, percussive, residual, harmFilterSize, percFilterSize, maskingMode, harmThreshFreq1, harmThreshAmp1, harmThreshFreq2, harmThreshAmp2, percThreshFreq1, percThreshAmp1, percThreshFreq2, percThreshAmp2, windowSize, hopSize, fftSize, maxFFTSize, harmFilterSize, percFilterSize,0], freeWhenDone,action
|
||||||
);
|
);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*processBlocking {|server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, harmonic = -1, percussive = -1, residual = -1, harmFilterSize = 17, percFilterSize = 31, maskingMode = 0, harmThreshFreq1 = 0.1, harmThreshAmp1 = 0, harmThreshFreq2 = 0.5, harmThreshAmp2 = 0, percThreshFreq1 = 0.1, percThreshAmp1 = 0, percThreshFreq2 = 0.5, percThreshAmp2 = 0, windowSize = 1024, hopSize = -1, fftSize = -1, action|
|
*processBlocking {|server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, harmonic = -1, percussive = -1, residual = -1, harmFilterSize = 17, percFilterSize = 31, maskingMode = 0, harmThreshFreq1 = 0.1, harmThreshAmp1 = 0, harmThreshFreq2 = 0.5, harmThreshAmp2 = 0, percThreshFreq1 = 0.1, percThreshAmp1 = 0, percThreshFreq2 = 0.5, percThreshAmp2 = 0, windowSize = 1024, hopSize = -1, fftSize = -1, freeWhenDone=true, action|
|
||||||
|
|
||||||
^FluidNRTProcess.new(
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
server, this, action, [harmonic, percussive, residual].select{|x| x!= -1}, blocking:1
|
|
||||||
).process(
|
|
||||||
source, startFrame, numFrames, startChan, numChans, harmonic, percussive, residual, harmFilterSize, percFilterSize, maskingMode, harmThreshFreq1, harmThreshAmp1, harmThreshFreq2, harmThreshAmp2, percThreshFreq1, percThreshAmp1, percThreshFreq2, percThreshAmp2, windowSize, hopSize, fftSize
|
|
||||||
);
|
|
||||||
|
|
||||||
}
|
harmonic = harmonic ? -1;
|
||||||
|
percussive = percussive ? -1;
|
||||||
|
residual = residual ? -1;
|
||||||
|
source.isNil.if {"FluidBufHPSS: Invalid source buffer".throw};
|
||||||
|
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [harmonic, percussive, residual].select{|x| x!= -1}
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, harmonic, percussive, residual, harmFilterSize, percFilterSize, maskingMode, harmThreshFreq1, harmThreshAmp1, harmThreshFreq2, harmThreshAmp2, percThreshFreq1, percThreshAmp1, percThreshFreq2, percThreshAmp2, windowSize, hopSize, fftSize, maxFFTSize,harmFilterSize, percFilterSize,1], freeWhenDone,action
|
||||||
|
);
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
FluidBufHPSSTrigger : FluidProxyUgen {}
|
||||||
|
|||||||
@ -1,38 +1,51 @@
|
|||||||
FluidBufMelBands : UGen {
|
FluidBufMelBands : FluidBufProcessor {
|
||||||
*new1 { |rate, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, numBands = 40, minFreq = 20, maxFreq = 20000, normalize = 1, windowSize = 1024, hopSize = -1, fftSize = -1, doneAction = 0, blocking = 0|
|
|
||||||
|
|
||||||
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, numBands = 40, minFreq = 20, maxFreq = 20000, normalize = 1, windowSize = 1024, hopSize = -1, fftSize = -1, padding = 1, trig = 1, blocking = 0|
|
||||||
|
|
||||||
source = source.asUGenInput;
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
features = features.asUGenInput;
|
|
||||||
|
|
||||||
source.isNil.if {"FluidBufMelBands: Invalid source buffer".throw};
|
source = source.asUGenInput;
|
||||||
features.isNil.if {"FluidBufMelBands: Invalid features buffer".throw};
|
features = features.asUGenInput;
|
||||||
|
|
||||||
//NB For wrapped versions of NRT classes, we set the params for maxima to
|
source.isNil.if {"FluidBufMelBands: Invalid source buffer".throw};
|
||||||
//whatever has been passed in language-side (e.g maxFFTSize still exists as a parameter for the server plugin, but makes less sense here: it just needs to be set to a legal value)
|
features.isNil.if {"FluidBufMelBands: Invalid features buffer".throw};
|
||||||
// same for maxNumBands which is passed numBands
|
|
||||||
|
|
||||||
^super.new1(rate, source, startFrame, numFrames, startChan, numChans, features, numBands, minFreq, maxFreq, numBands, normalize, windowSize, hopSize, fftSize, maxFFTSize, doneAction, blocking);
|
|
||||||
}
|
|
||||||
|
|
||||||
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, numBands = 40, minFreq = 20, maxFreq = 20000, normalize = 1, windowSize = 1024, hopSize = -1, fftSize = -1, doneAction = 0|
|
^FluidProxyUgen.kr(\FluidBufMelBandsTrigger,-1, source, startFrame, numFrames, startChan, numChans, features, padding, numBands, minFreq, maxFreq, numBands, normalize, windowSize, hopSize, fftSize, maxFFTSize, trig, blocking);
|
||||||
^this.multiNew(\control, source, startFrame, numFrames, startChan, numChans, features, numBands, minFreq, maxFreq, numBands, normalize, windowSize, hopSize, fftSize, doneAction);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, numBands = 40, minFreq = 20, maxFreq = 20000, normalize = 1, windowSize = 1024, hopSize = -1, fftSize = -1, action|
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, numBands = 40, minFreq = 20, maxFreq = 20000, normalize = 1, windowSize = 1024, hopSize = -1, fftSize = -1, padding = 1, freeWhenDone = true, action|
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action, [features]
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
).process(
|
|
||||||
source, startFrame, numFrames, startChan, numChans, features, numBands, minFreq, maxFreq, normalize, windowSize, hopSize, fftSize
|
source = source.asUGenInput;
|
||||||
|
features = features.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufMelBands: Invalid source buffer".throw};
|
||||||
|
features.isNil.if {"FluidBufMelBands: Invalid features buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [features]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, features, padding, numBands, minFreq, maxFreq, numBands, normalize, windowSize, hopSize, fftSize, maxFFTSize, 0],freeWhenDone,action
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, numBands = 40, minFreq = 20, maxFreq = 20000, normalize = 1, windowSize = 1024, hopSize = -1, fftSize = -1, action|
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, numBands = 40, minFreq = 20, maxFreq = 20000, normalize = 1, windowSize = 1024, hopSize = -1, fftSize = -1, padding = 1, freeWhenDone = true, action|
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action, [features], blocking:1
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
).process(
|
|
||||||
source, startFrame, numFrames, startChan, numChans, features, numBands, minFreq, maxFreq, normalize, windowSize, hopSize, fftSize
|
source = source.asUGenInput;
|
||||||
|
features = features.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufMelBands: Invalid source buffer".throw};
|
||||||
|
features.isNil.if {"FluidBufMelBands: Invalid features buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [features]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, features, padding, numBands, minFreq, maxFreq, numBands, normalize, windowSize, hopSize, fftSize, maxFFTSize, 1],freeWhenDone,action
|
||||||
);
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
FluidBufMelBandsTrigger : FluidProxyUgen {}
|
||||||
|
|||||||
@ -1,47 +1,37 @@
|
|||||||
FluidBufNMF : UGen {
|
FluidBufNMF : FluidBufProcessor
|
||||||
|
{
|
||||||
*new1 {|rate, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, resynth, bases, basesMode = 0, activations, actMode = 0, components = 1, iterations = 100, windowSize = 1024, hopSize = -1, fftSize = -1, windowType = 0, randomSeed = -1, doneAction = 0, blocking = 0|
|
*kr {|source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, resynth, bases, basesMode = 0, activations, actMode = 0, components = 1, iterations = 100, windowSize = 1024, hopSize = -1, fftSize = -1, windowType = 0, randomSeed = -1, trig = 1, blocking = 0|
|
||||||
|
|
||||||
source = source.asUGenInput;
|
|
||||||
resynth = resynth.asUGenInput;
|
|
||||||
bases = bases.asUGenInput;
|
|
||||||
activations = activations.asUGenInput;
|
|
||||||
|
|
||||||
source.isNil.if {"FluidBufNMF: Invalid source buffer".throw};
|
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufNMF: Invalid source buffer".throw};
|
||||||
resynth = resynth ? -1;
|
resynth = resynth ? -1;
|
||||||
bases = bases ? -1;
|
bases = bases ? -1;
|
||||||
activations = activations ? -1;
|
activations = activations ? -1;
|
||||||
|
|
||||||
^super.new1(rate,source, startFrame, numFrames, startChan, numChans, resynth, bases, basesMode, activations, actMode, components, iterations, windowSize, hopSize, fftSize, doneAction, blocking);
|
^FluidProxyUgen.kr(\FluidBufNMFTrigger,-1,source.asUGenInput, startFrame, numFrames, startChan, numChans, resynth.asUGenInput, bases.asUGenInput, basesMode, activations.asUGenInput, actMode, components, iterations, windowSize, hopSize, fftSize, trig, blocking);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, resynth = -1, bases = -1, basesMode = 0, activations = -1, actMode = 0, components = 1, iterations = 100, windowSize = 1024, hopSize = -1, fftSize = -1, windowType = 0, randomSeed = -1,freeWhenDone = true, action|
|
||||||
|
|
||||||
*kr {|source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, resynth, bases, basesMode = 0, activations, actMode = 0, components = 1, iterations = 100, windowSize = 1024, hopSize = -1, fftSize = -1, windowType = 0, randomSeed = -1, doneAction = 0|
|
source.isNil.if {"FluidBufNMF: Invalid source buffer".throw};
|
||||||
^this.multiNew(\control,source, startFrame, numFrames, startChan, numChans, resynth, bases, basesMode, activations, actMode, components, iterations, windowSize, hopSize, fftSize, doneAction);
|
resynth = resynth ? -1;
|
||||||
|
bases = bases ? -1;
|
||||||
}
|
activations = activations ? -1;
|
||||||
|
|
||||||
|
^this.new(
|
||||||
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, resynth = -1, bases = -1, basesMode = 0, activations = -1, actMode = 0, components = 1, iterations = 100, windowSize = 1024, hopSize = -1, fftSize = -1, windowType = 0, randomSeed = -1, action|
|
server,nil,[resynth, bases, activations].select{|x| x!= -1}
|
||||||
|
).processList([source, startFrame, numFrames, startChan, numChans, resynth, bases, basesMode, activations, actMode, components,iterations, windowSize, hopSize, fftSize,0],freeWhenDone,action);
|
||||||
source.isNil.if {"FluidBufNMF: Invalid source buffer".throw};
|
|
||||||
|
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action, [resynth, bases, activations].select{|x| x!= -1}
|
|
||||||
).process(
|
|
||||||
source, startFrame, numFrames, startChan, numChans, resynth, bases, basesMode, activations, actMode, components,iterations, windowSize, hopSize, fftSize, windowType, randomSeed
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, resynth = -1, bases = -1, basesMode = 0, activations = -1, actMode = 0, components = 1, iterations = 100, windowSize = 1024, hopSize = -1, fftSize = -1, windowType = 0, randomSeed = -1,freeWhenDone = true, action|
|
||||||
|
|
||||||
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, resynth = -1, bases = -1, basesMode = 0, activations = -1, actMode = 0, components = 1, iterations = 100, windowSize = 1024, hopSize = -1, fftSize = -1, windowType = 0, randomSeed = -1, action|
|
source.isNil.if {"FluidBufNMF: Invalid source buffer".throw};
|
||||||
|
resynth = resynth ? -1;
|
||||||
source.isNil.if {"FluidBufNMF: Invalid source buffer".throw};
|
bases = bases ? -1;
|
||||||
|
activations = activations ? -1;
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action, [resynth, bases, activations].select{|x| x!= -1},blocking: 1
|
^this.new(
|
||||||
).process(
|
server,nil,[resynth, bases, activations].select{|x| x!= -1}
|
||||||
source, startFrame, numFrames, startChan, numChans, resynth, bases, basesMode, activations, actMode, components,iterations, windowSize, hopSize, fftSize, windowType, randomSeed
|
).processList([source, startFrame, numFrames, startChan, numChans, resynth, bases, basesMode, activations, actMode, components,iterations, windowSize, hopSize, fftSize, 1],freeWhenDone,action);
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
FluidBufNMFTrigger : FluidProxyUgen {}
|
||||||
|
|||||||
@ -0,0 +1,49 @@
|
|||||||
|
FluidBufNMFCross : FluidBufProcessor {
|
||||||
|
|
||||||
|
*kr { |source, target, output , timeSparsity = 7, polyphony = 10, continuity = 7, iterations = 50, windowSize = 1024, hopSize = -1, fftSize = -1, trig = 1, blocking = 0|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
target = target.asUGenInput;
|
||||||
|
output = output.asUGenInput;
|
||||||
|
source.isNil.if {"FluidBufNMFCross: Invalid source buffer".throw};
|
||||||
|
target.isNil.if {"FluidBufNMFCross: Invalid target buffer".throw};
|
||||||
|
output.isNil.if {"FluidBufNMFCross: Invalid output buffer".throw};
|
||||||
|
|
||||||
|
^FluidProxyUgen.kr(\FluidBufNMFCrossTrigger, -1, source, target, output, timeSparsity, polyphony, continuity, iterations, windowSize, hopSize, fftSize, trig, blocking);
|
||||||
|
}
|
||||||
|
|
||||||
|
*process { |server, source, target, output , timeSparsity = 7, polyphony = 10, continuity = 7, iterations = 50, windowSize = 1024, hopSize = -1, fftSize = -1, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
target = target.asUGenInput;
|
||||||
|
output = output.asUGenInput;
|
||||||
|
source.isNil.if {"FluidBufNMFCross: Invalid source buffer".throw};
|
||||||
|
target.isNil.if {"FluidBufNMFCross: Invalid target buffer".throw};
|
||||||
|
output.isNil.if {"FluidBufNMFCross: Invalid output buffer".throw};
|
||||||
|
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [output]
|
||||||
|
).processList(
|
||||||
|
[source, target, output, timeSparsity, polyphony, continuity, iterations, windowSize, hopSize, fftSize,0],freeWhenDone, action
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
*processBlocking { |server, source, target, output , timeSparsity = 7, polyphony = 10, continuity = 7, iterations = 50, windowSize = 1024, hopSize = -1, fftSize = -1, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
target = target.asUGenInput;
|
||||||
|
output = output.asUGenInput;
|
||||||
|
source.isNil.if {"FluidBufNMFCross: Invalid source buffer".throw};
|
||||||
|
target.isNil.if {"FluidBufNMFCross: Invalid target buffer".throw};
|
||||||
|
output.isNil.if {"FluidBufNMFCross: Invalid output buffer".throw};
|
||||||
|
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [output]
|
||||||
|
).processList(
|
||||||
|
[source, target, output, timeSparsity, polyphony, continuity, iterations, windowSize, hopSize, fftSize,1],freeWhenDone, action
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FluidBufNMFCrossTrigger : FluidProxyUgen {}
|
||||||
@ -0,0 +1,48 @@
|
|||||||
|
FluidBufNNDSVD : FluidBufProcessor{
|
||||||
|
|
||||||
|
*kr { |source, bases, activations, minComponents = 1, maxComponents = 200, coverage = 0.5, method = 0, windowSize = 1024, hopSize = -1, fftSize = -1, trig = 1, blocking = 0|
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufNNDSVD: Invalid source buffer".throw};
|
||||||
|
bases.isNil.if {"FluidBufNNDSVD: Invalid bases buffer".throw};
|
||||||
|
activations.isNil.if {"FluidBufNNDSVD: Invalid bases buffer".throw};
|
||||||
|
source = source.asUGenInput;
|
||||||
|
bases = bases.asUGenInput;
|
||||||
|
activations = activations.asUGenInput;
|
||||||
|
|
||||||
|
^FluidProxyUgen.kr1(\FluidBufNNDSVDTrigger, -1, source, bases, activations, minComponents, maxComponents, coverage, method, windowSize, hopSize, fftSize, trig, blocking);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
*process { |server, source, bases, activations, minComponents = 1, maxComponents = 200, coverage = 0.5, method = 0, windowSize = 1024, hopSize = -1, fftSize = -1, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufNNDSVD: Invalid source buffer".throw};
|
||||||
|
bases.isNil.if {"FluidBufNNDSVD: Invalid bases buffer".throw};
|
||||||
|
activations.isNil.if {"FluidBufNNDSVD: Invalid bases buffer".throw};
|
||||||
|
source = source.asUGenInput;
|
||||||
|
bases = bases.asUGenInput;
|
||||||
|
activations = activations.asUGenInput;
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [bases]
|
||||||
|
).processList(
|
||||||
|
[source, bases, activations, minComponents, maxComponents, coverage, method, windowSize, hopSize, fftSize,0],freeWhenDone, action
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
*processBlocking { |server, source, bases, activations, minComponents = 1, maxComponents = 200, coverage = 0.5, method = 0, windowSize = 1024, hopSize = -1, fftSize = -1, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufNNDSVD: Invalid source buffer".throw};
|
||||||
|
bases.isNil.if {"FluidBufNNDSVD: Invalid bases buffer".throw};
|
||||||
|
activations.isNil.if {"FluidBufNNDSVD: Invalid bases buffer".throw};
|
||||||
|
source = source.asUGenInput;
|
||||||
|
bases = bases.asUGenInput;
|
||||||
|
activations = activations.asUGenInput;
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [bases]
|
||||||
|
).processList(
|
||||||
|
[source, bases, activations, minComponents, maxComponents, coverage, method, windowSize, hopSize, fftSize,1],freeWhenDone, action
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FluidBufNNDSVDTrigger : FluidProxyUgen {}
|
||||||
@ -1,37 +1,49 @@
|
|||||||
FluidBufOnsetSlice : UGen {
|
FluidBufOnsetSlice : FluidBufProcessor {
|
||||||
*new1 { |rate, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, metric = 0, threshold = 0.5, minSliceLength = 2, filterSize = 5, frameDelta = 0, windowSize = 1024, hopSize = -1, fftSize = -1, doneAction = 0, blocking = 0|
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, metric = 0, threshold = 0.5, minSliceLength = 2, filterSize = 5, frameDelta = 0, windowSize = 1024, hopSize = -1, fftSize = -1, trig = 1, blocking = 0|
|
||||||
|
|
||||||
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
|
|
||||||
source = source.asUGenInput;
|
source = source.asUGenInput;
|
||||||
indices = indices.asUGenInput;
|
indices = indices.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufOnsetSlice: Invalid source buffer".throw};
|
||||||
|
indices.isNil.if {"FluidBufOnsetSlice: Invalid features buffer".throw};
|
||||||
|
|
||||||
|
^FluidProxyUgen.kr(\FluidBufOnsetSliceTrigger, -1, source, startFrame, numFrames, startChan, numChans, indices, metric, threshold, minSliceLength, filterSize, frameDelta, windowSize, hopSize, fftSize, maxFFTSize, trig, blocking);
|
||||||
|
}
|
||||||
|
|
||||||
source.isNil.if {"FluidBufOnsetSlice: Invalid source buffer".throw};
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, metric = 0, threshold = 0.5, minSliceLength = 2, filterSize = 5, frameDelta = 0, windowSize = 1024, hopSize = -1, fftSize = -1, freeWhenDone = true, action|
|
||||||
indices.isNil.if {"FluidBufOnsetSlice: Invalid features buffer".throw};
|
|
||||||
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
|
|
||||||
//NB For wrapped versions of NRT classes, we set the params for maxima to
|
source = source.asUGenInput;
|
||||||
//whatever has been passed in language-side (e.g maxFFTSize still exists as a parameter for the server plugin, but makes less sense here: it just needs to be set to a legal value)
|
indices = indices.asUGenInput;
|
||||||
|
|
||||||
^super.new1(rate, source, startFrame, numFrames, startChan, numChans, indices, metric, threshold, minSliceLength, filterSize, frameDelta, windowSize, hopSize, fftSize, maxFFTSize, doneAction, blocking);
|
source.isNil.if {"FluidBufOnsetSlice: Invalid source buffer".throw};
|
||||||
|
indices.isNil.if {"FluidBufOnsetSlice: Invalid features buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [indices]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, indices, metric, threshold, minSliceLength, filterSize, frameDelta, windowSize, hopSize, fftSize,maxFFTSize,0],freeWhenDone,action
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, metric = 0, threshold = 0.5, minSliceLength = 2, filterSize = 5, frameDelta = 0, windowSize = 1024, hopSize = -1, fftSize = -1, doneAction = 0|
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, metric = 0, threshold = 0.5, minSliceLength = 2, filterSize = 5, frameDelta = 0, windowSize = 1024, hopSize = -1, fftSize = -1, freeWhenDone = true, action|
|
||||||
^this.multiNew(\control, source, startFrame, numFrames, startChan, numChans, indices, metric, threshold, minSliceLength, filterSize, frameDelta, windowSize, hopSize, fftSize, doneAction);
|
|
||||||
}
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
|
|
||||||
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, metric = 0, threshold = 0.5, minSliceLength = 2, filterSize = 5, frameDelta = 0, windowSize = 1024, hopSize = -1, fftSize = -1, action|
|
source = source.asUGenInput;
|
||||||
^FluidNRTProcess.new(
|
indices = indices.asUGenInput;
|
||||||
server, this, action, [indices]
|
|
||||||
).process(
|
|
||||||
source, startFrame, numFrames, startChan, numChans, indices, metric, threshold, minSliceLength, filterSize, frameDelta, windowSize, hopSize, fftSize
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, metric = 0, threshold = 0.5, minSliceLength = 2, filterSize = 5, frameDelta = 0, windowSize = 1024, hopSize = -1, fftSize = -1, action|
|
source.isNil.if {"FluidBufOnsetSlice: Invalid source buffer".throw};
|
||||||
^FluidNRTProcess.new(
|
indices.isNil.if {"FluidBufOnsetSlice: Invalid features buffer".throw};
|
||||||
server, this, action, [indices], blocking: 1
|
|
||||||
).process(
|
^this.new(
|
||||||
source, startFrame, numFrames, startChan, numChans, indices, metric, threshold, minSliceLength, filterSize, frameDelta, windowSize, hopSize, fftSize
|
server, nil, [indices]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, indices, metric, threshold, minSliceLength, filterSize, frameDelta, windowSize, hopSize, fftSize,maxFFTSize,1],freeWhenDone,action
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
FluidBufOnsetSliceTrigger : FluidProxyUgen {}
|
||||||
|
|||||||
@ -1,39 +1,51 @@
|
|||||||
FluidBufPitch : UGen{
|
FluidBufPitch : FluidBufProcessor{
|
||||||
|
|
||||||
*new1 {|rate, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, algorithm = 2, minFreq = 20, maxFreq = 10000, unit = 0, windowSize = 1024, hopSize = -1, fftSize = -1, doneAction = 0, blocking = 0|
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, algorithm = 2, minFreq = 20, maxFreq = 10000, unit = 0, windowSize = 1024, hopSize = -1, fftSize = -1, padding = 1, trig = 1, blocking = 0|
|
||||||
|
|
||||||
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
|
|
||||||
source = source.asUGenInput;
|
source = source.asUGenInput;
|
||||||
features = features.asUGenInput;
|
features = features.asUGenInput;
|
||||||
|
|
||||||
source.isNil.if {"FluidBufPitch: Invalid source buffer".throw};
|
source.isNil.if {"FluidBufPitch: Invalid source buffer".throw};
|
||||||
features.isNil.if {"FluidBufPitch: Invalid features buffer".throw};
|
features.isNil.if {"FluidBufPitch: Invalid features buffer".throw};
|
||||||
|
|
||||||
//NB For wrapped versions of NRT classes, we set the params for maxima to
|
^FluidProxyUgen.kr(\FluidBufPitchTrigger, -1, source, startFrame, numFrames, startChan, numChans, features, padding, algorithm, minFreq, maxFreq, unit, windowSize, hopSize, fftSize, maxFFTSize, trig, blocking);
|
||||||
//whatever has been passed in language-side (e.g maxFFTSize still exists as a parameter for the server plugin, but makes less sense here: it just needs to be set to a legal value)
|
|
||||||
^super.new1(rate, source, startFrame, numFrames, startChan, numChans, features, algorithm, minFreq, maxFreq, unit, windowSize, hopSize, fftSize, maxFFTSize, doneAction, blocking);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*kr {|source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, algorithm = 2, minFreq = 20, maxFreq = 10000, unit = 0, windowSize = 1024, hopSize = -1, fftSize = -1, doneAction = 0|
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, algorithm = 2, minFreq = 20, maxFreq = 10000, unit = 0, windowSize = 1024, hopSize = -1, fftSize = -1, padding = 1, freeWhenDone = true, action|
|
||||||
^this.multiNew(\control, source, startFrame, numFrames, startChan, numChans, features, algorithm, minFreq, maxFreq, unit, windowSize, hopSize, fftSize, doneAction);
|
|
||||||
|
|
||||||
}
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
features = features.asUGenInput;
|
||||||
|
|
||||||
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, algorithm = 2, minFreq = 20, maxFreq = 10000, unit = 0, windowSize = 1024, hopSize = -1, fftSize = -1, action|
|
source.isNil.if {"FluidBufPitch: Invalid source buffer".throw};
|
||||||
^FluidNRTProcess.new(
|
features.isNil.if {"FluidBufPitch: Invalid features buffer".throw};
|
||||||
server, this, action, [features]
|
|
||||||
).process(
|
^this.new(
|
||||||
source, startFrame, numFrames, startChan, numChans, features, algorithm, minFreq, maxFreq, unit, windowSize, hopSize, fftSize
|
server, nil, [features]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, features, padding, algorithm, minFreq, maxFreq, unit, windowSize, hopSize, fftSize, maxFFTSize, 0], freeWhenDone, action
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, algorithm = 2, minFreq = 20, maxFreq = 10000, unit = 0, windowSize = 1024, hopSize = -1, fftSize = -1, action|
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, features, algorithm = 2, minFreq = 20, maxFreq = 10000, unit = 0, windowSize = 1024, hopSize = -1, fftSize = -1, padding = 1, freeWhenDone = true, action|
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action, [features], blocking: 1
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
).process(
|
|
||||||
source, startFrame, numFrames, startChan, numChans, features, algorithm, minFreq, maxFreq, unit, windowSize, hopSize, fftSize
|
source = source.asUGenInput;
|
||||||
|
features = features.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufPitch: Invalid source buffer".throw};
|
||||||
|
features.isNil.if {"FluidBufPitch: Invalid features buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [features]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, features, padding, algorithm, minFreq, maxFreq, unit, windowSize, hopSize, fftSize, maxFFTSize, 1], freeWhenDone, action
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
FluidBufPitchTrigger : FluidProxyUgen {}
|
||||||
|
|||||||
@ -0,0 +1,47 @@
|
|||||||
|
FluidBufSTFT : FluidBufProcessor {
|
||||||
|
|
||||||
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, magnitude, phase, resynth, inverse = 0,windowSize = 1024, hopSize = -1, fftSize = -1, padding = 1, trig = 1, blocking = 1|
|
||||||
|
|
||||||
|
// source = source.asUGenInput;
|
||||||
|
|
||||||
|
// source.isNil.if {"FluidBufScale: Invalid source buffer".throw};
|
||||||
|
source = source ? -1;
|
||||||
|
magnitude = magnitude ? -1;
|
||||||
|
phase = phase ? -1;
|
||||||
|
resynth = resynth ? - 1;
|
||||||
|
|
||||||
|
^FluidProxyUgen.kr(\FluidBufSTFTTrigger, -1, source, startFrame, numFrames, startChan, magnitude, phase, resynth, inverse, padding, windowSize, hopSize, fftSize, trig, blocking);
|
||||||
|
}
|
||||||
|
|
||||||
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, magnitude, phase, resynth, inverse = 0, windowSize = 1024, hopSize = -1, fftSize = -1, padding = 1, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
// source = source.asUGenInput;
|
||||||
|
|
||||||
|
// source.isNil.if {"FluidBufSTFT: Invalid source buffer".throw};
|
||||||
|
source = source ? -1;
|
||||||
|
magnitude = magnitude ? -1;
|
||||||
|
phase = phase ? -1;
|
||||||
|
resynth = resynth ? - 1;
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [magnitude,phase,resynth].select{|b| b != -1}
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, magnitude, phase, resynth, inverse, padding, windowSize, hopSize, fftSize, 0], freeWhenDone, action
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, magnitude, phase, resynth, inverse = 0, windowSize = 1024, hopSize = -1, fftSize = -1, padding = 1,freeWhenDone = true, action|
|
||||||
|
|
||||||
|
// source = source.asUGenInput;
|
||||||
|
source = source ? -1;
|
||||||
|
magnitude = magnitude ? -1;
|
||||||
|
phase = phase ? -1;
|
||||||
|
resynth = resynth ? - 1;
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [magnitude,phase,resynth].select{|b| b != -1}
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, magnitude, phase, resynth, inverse, padding, windowSize, hopSize, fftSize, 1], freeWhenDone, action
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,44 @@
|
|||||||
|
FluidBufScale : FluidBufProcessor {
|
||||||
|
|
||||||
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, destination, inputLow = 0, inputHigh = 1, outputLow = 0, outputHigh = 1, trig = 1, blocking = 1|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufScale: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufScale: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
^FluidProxyUgen.kr(\FluidBufScaleTrigger, -1, source, startFrame, numFrames, startChan, numChans, destination, inputLow, inputHigh, outputLow, outputHigh, trig, blocking);
|
||||||
|
}
|
||||||
|
|
||||||
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, destination, inputLow = 0, inputHigh = 1, outputLow = 0, outputHigh = 1, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufScale: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufScale: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [destination]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, destination, inputLow, inputHigh, outputLow, outputHigh, 0], freeWhenDone, action
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, destination, inputLow = 0, inputHigh = 1, outputLow = 0, outputHigh = 1, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufScale: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufScale: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [destination]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, destination, inputLow, inputHigh, outputLow, outputHigh, 1], freeWhenDone, action
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FluidBufScaleTrigger : FluidProxyUgen {}
|
||||||
@ -0,0 +1,62 @@
|
|||||||
|
FluidBufSelect : FluidBufProcessor {
|
||||||
|
|
||||||
|
*kr { |source, destination, indices=#[-1], channels=#[-1], trig = 1, blocking = 1|
|
||||||
|
|
||||||
|
var params;
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
indices = indices.asArray;
|
||||||
|
channels = channels.asArray;
|
||||||
|
|
||||||
|
indices = [indices.size] ++ indices;
|
||||||
|
channels = [channels.size] ++ channels;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufSelect: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufSelect: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
params = indices ++ channels ++ [trig, blocking]
|
||||||
|
|
||||||
|
^FluidProxyUgen.kr(\FluidBufSelectTrigger,-1, source, destination, *params);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
*process { |server, source, destination, indices=#[-1], channels=#[-1], freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufSelect: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufSelect: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
indices = indices.asArray;
|
||||||
|
channels = channels.asArray;
|
||||||
|
|
||||||
|
indices = [indices.size] ++ indices;
|
||||||
|
channels = [channels.size] ++ channels;
|
||||||
|
|
||||||
|
^this.new(server, nil, [destination]).processList([source, destination]++ indices ++ channels ++ [1], freeWhenDone, action);//NB always blocking
|
||||||
|
}
|
||||||
|
|
||||||
|
*processBlocking { |server, source, destination, indices=#[-1], channels=#[-1], freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufSelect: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufSelect: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
indices = indices.asArray;
|
||||||
|
channels = channels.asArray;
|
||||||
|
|
||||||
|
indices = [indices.size] ++ indices;
|
||||||
|
channels = [channels.size] ++ channels;
|
||||||
|
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [destination]
|
||||||
|
).processList([source, destination]++ indices ++ channels ++ [1], freeWhenDone, action);//NB always blocking
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FluidBufSelectTrigger : FluidProxyUgen {}
|
||||||
@ -0,0 +1,44 @@
|
|||||||
|
FluidBufSelectEvery : FluidBufProcessor {
|
||||||
|
|
||||||
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, destination, frameHop = 1, channelHop = 1, trig = 1, blocking = 1|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufSelectEvery: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufSelectEvery: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
^FluidProxyUgen.kr(\FluidBufSelectEveryTrigger, -1, source, startFrame, numFrames, startChan, numChans, destination, frameHop, channelHop, trig, blocking);
|
||||||
|
}
|
||||||
|
|
||||||
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, destination, frameHop = 1, channelHop = 1, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufSelectEvery: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufSelectEvery: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [destination]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, destination, frameHop, channelHop, 0], freeWhenDone, action
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, destination, frameHop = 1, channelHop = 1, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufSelectEvery: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufSelectEvery: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [destination]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, destination, frameHop, channelHop, 1], freeWhenDone, action
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FluidBufSelectEveryTrigger : FluidProxyUgen {}
|
||||||
@ -1,39 +1,51 @@
|
|||||||
FluidBufSines : UGen{
|
FluidBufSines : FluidBufProcessor {
|
||||||
|
|
||||||
*new1 { |rate, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, sines = -1, residual = -1, bandwidth = 76, detectionThreshold = -96, birthLowThreshold = -24, birthHighThreshold = -60, minTrackLen = 15, trackingMethod = 0, trackMagRange = 15, trackFreqRange = 50, trackProb = 0.5, windowSize = 1024, hopSize = -1, fftSize = -1, doneAction = 0, blocking = 0|
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, sines = -1, residual = -1, bandwidth = 76, detectionThreshold = -96, birthLowThreshold = -24, birthHighThreshold = -60, minTrackLen = 15, trackingMethod = 0, trackMagRange = 15, trackFreqRange = 50, trackProb = 0.5, windowSize = 1024, hopSize = -1, fftSize = -1, trig = 1, blocking = 0|
|
||||||
|
|
||||||
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
|
|
||||||
source = source.asUGenInput;
|
source = source.asUGenInput;
|
||||||
sines = sines.asUGenInput;
|
sines = sines !? {sines.asUGenInput} ?? {-1};
|
||||||
residual = residual.asUGenInput;
|
residual = residual !? {residual.asUGenInput} ?? {-1};
|
||||||
|
|
||||||
source.isNil.if {"FluidBufSines: Invalid source buffer".throw};
|
source.isNil.if {"FluidBufSines: Invalid source buffer".throw};
|
||||||
|
|
||||||
//NB For wrapped versions of NRT classes, we set the params for maxima to
|
^FluidProxyUgen.multiNew(\FluidBufSinesTrigger, -1, source, startFrame, numFrames, startChan, numChans, sines, residual, bandwidth, detectionThreshold,birthLowThreshold, birthHighThreshold, minTrackLen, trackingMethod, trackMagRange, trackFreqRange, trackProb, windowSize, hopSize, fftSize, maxFFTSize, trig, blocking);
|
||||||
//whatever has been passed in language-side (e.g maxFFTSize still exists as a parameter for the server plugin, but makes less sense here: it just needs to be set to a legal value)
|
|
||||||
|
|
||||||
^super.new1(rate, source, startFrame, numFrames, startChan, numChans, sines, residual, bandwidth, detectionThreshold,birthLowThreshold, birthHighThreshold, minTrackLen, trackingMethod, trackMagRange, trackFreqRange, trackProb, windowSize, hopSize, fftSize, maxFFTSize, doneAction, blocking);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, sines = -1, residual = -1, bandwidth = 76, detectionThreshold = -96, birthLowThreshold = -24, birthHighThreshold = -60, minTrackLen = 15, trackingMethod = 0, trackMagRange = 15, trackFreqRange = 50, trackProb = 0.5, windowSize = 1024, hopSize = -1, fftSize = -1, doneAction = 0|
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, sines = -1, residual = -1, bandwidth = 76, detectionThreshold = -96, birthLowThreshold = -24, birthHighThreshold = -60, minTrackLen = 15, trackingMethod = 0, trackMagRange = 15, trackFreqRange = 50, trackProb = 0.5, windowSize = 1024, hopSize = -1, fftSize = -1, freeWhenDone = true, action|
|
||||||
^this.multiNew(\control, source, startFrame, numFrames, startChan, numChans, sines, residual, bandwidth, detectionThreshold,birthLowThreshold, birthHighThreshold, minTrackLen, trackingMethod, trackMagRange, trackFreqRange, trackProb, windowSize, hopSize, fftSize, doneAction);
|
|
||||||
}
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
sines = sines !? {sines.asUGenInput} ?? {-1};
|
||||||
|
residual = residual !? {residual.asUGenInput} ?? {-1};
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufSines: Invalid source buffer".throw};
|
||||||
|
|
||||||
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, sines = -1, residual = -1, bandwidth = 76, detectionThreshold = -96, birthLowThreshold = -24, birthHighThreshold = -60, minTrackLen = 15, trackingMethod = 0, trackMagRange = 15, trackFreqRange = 50, trackProb = 0.5, windowSize = 1024, hopSize = -1, fftSize = -1, action|
|
^this.new(
|
||||||
^FluidNRTProcess.new(
|
server, nil, [sines, residual].select{|x| x!= -1}
|
||||||
server, this, action, [sines, residual].select{|x| x!= -1}
|
).processList(
|
||||||
).process(
|
[source, startFrame, numFrames, startChan, numChans, sines, residual, bandwidth, detectionThreshold,birthLowThreshold, birthHighThreshold, minTrackLen, trackingMethod, trackMagRange, trackFreqRange, trackProb, windowSize, hopSize, fftSize,maxFFTSize,0],freeWhenDone = true,action
|
||||||
source, startFrame, numFrames, startChan, numChans, sines, residual, bandwidth, detectionThreshold,birthLowThreshold, birthHighThreshold, minTrackLen, trackingMethod, trackMagRange, trackFreqRange, trackProb, windowSize, hopSize, fftSize
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, sines = -1, residual = -1, bandwidth = 76, detectionThreshold = -96, birthLowThreshold = -24, birthHighThreshold = -60, minTrackLen = 15, trackingMethod = 0, trackMagRange = 15, trackFreqRange = 50, trackProb = 0.5, windowSize = 1024, hopSize = -1, fftSize = -1, action|
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, sines = -1, residual = -1, bandwidth = 76, detectionThreshold = -96, birthLowThreshold = -24, birthHighThreshold = -60, minTrackLen = 15, trackingMethod = 0, trackMagRange = 15, trackFreqRange = 50, trackProb = 0.5, windowSize = 1024, hopSize = -1, fftSize = -1, freeWhenDone = true, action|
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action, [sines, residual].select{|x| x!= -1}, blocking: 1
|
var maxFFTSize = if (fftSize == -1) {windowSize.nextPowerOfTwo} {fftSize};
|
||||||
).process(
|
|
||||||
source, startFrame, numFrames, startChan, numChans, sines, residual, bandwidth, detectionThreshold,birthLowThreshold, birthHighThreshold, minTrackLen, trackingMethod, trackMagRange, trackFreqRange, trackProb, windowSize, hopSize, fftSize
|
source = source.asUGenInput;
|
||||||
|
sines = sines !? {sines.asUGenInput} ?? {-1};
|
||||||
|
residual = residual !? {residual.asUGenInput} ?? {-1};
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufSines: Invalid source buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [sines, residual].select{|x| x!= -1}
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, sines, residual, bandwidth, detectionThreshold,birthLowThreshold, birthHighThreshold, minTrackLen, trackingMethod, trackMagRange, trackFreqRange, trackProb, windowSize, hopSize, fftSize,maxFFTSize,1],freeWhenDone,action
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
FluidBufSinesTrigger : FluidProxyUgen {}
|
||||||
|
|||||||
@ -1,34 +1,52 @@
|
|||||||
FluidBufStats : UGen{
|
FluidBufStats : FluidBufProcessor {
|
||||||
|
|
||||||
*new1 { |rate, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, stats, numDerivs = 0, low = 0, middle = 50, high = 100, doneAction=0, blocking = 0|
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, stats, numDerivs = 0, low = 0, middle = 50, high = 100, outliersCutoff = -1, weights, trig = 1, blocking = 0|
|
||||||
|
|
||||||
source = source.asUGenInput;
|
source = source.asUGenInput;
|
||||||
stats = stats.asUGenInput;
|
stats = stats.asUGenInput;
|
||||||
|
weights = weights.asUGenInput;
|
||||||
source.isNil.if {"FluidBufStats: Invalid source buffer".throw};
|
|
||||||
stats.isNil.if {"FluidBufStats: Invalid stats buffer".throw};
|
source.isNil.if {"FluidBufStats: Invalid source buffer".throw};
|
||||||
|
stats.isNil.if {"FluidBufStats: Invalid stats buffer".throw};
|
||||||
^super.new1(rate, source, startFrame, numFrames, startChan, numChans, stats, numDerivs, low, middle, high,doneAction, blocking);
|
weights = weights ? -1;
|
||||||
|
|
||||||
|
^FluidProxyUgen.kr(\FluidBufStatsTrigger, -1, source, startFrame, numFrames, startChan, numChans, stats, numDerivs, low, middle, high, outliersCutoff, weights, trig, blocking);
|
||||||
}
|
}
|
||||||
|
|
||||||
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, stats, numDerivs = 0, low = 0, middle = 50, high = 100, doneAction=0|
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, stats, numDerivs = 0, low = 0, middle = 50, high = 100, outliersCutoff = -1, weights, freeWhenDone = true, action|
|
||||||
^this.multiNew(\control, source, startFrame, numFrames, startChan, numChans, stats, numDerivs, low, middle, high,doneAction);
|
|
||||||
}
|
source = source.asUGenInput;
|
||||||
|
stats = stats.asUGenInput;
|
||||||
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, stats, numDerivs = 0, low = 0, middle = 50, high = 100, action|
|
weights = weights.asUGenInput;
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action, [stats]
|
source.isNil.if {"FluidBufStats: Invalid source buffer".throw};
|
||||||
).process(
|
stats.isNil.if {"FluidBufStats: Invalid stats buffer".throw};
|
||||||
source, startFrame, numFrames, startChan, numChans, stats,numDerivs, low, middle, high
|
weights = weights ? -1;
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [stats]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, stats,numDerivs, low, middle, high, outliersCutoff, weights, 0], freeWhenDone, action
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, stats, numDerivs = 0, low = 0, middle = 50, high = 100, action|
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, stats, numDerivs = 0, low = 0, middle = 50, high = 100, outliersCutoff = -1, weights, freeWhenDone = true, action|
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action, [stats], blocking: 1
|
source = source.asUGenInput;
|
||||||
).process(
|
stats = stats.asUGenInput;
|
||||||
source, startFrame, numFrames, startChan, numChans, stats,numDerivs, low, middle, high
|
weights = weights.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufStats: Invalid source buffer".throw};
|
||||||
|
stats.isNil.if {"FluidBufStats: Invalid stats buffer".throw};
|
||||||
|
weights = weights ? -1;
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [stats]
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, stats,numDerivs, low, middle, high, outliersCutoff, weights, 1], freeWhenDone, action
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
FluidBufStatsTrigger : FluidProxyUgen {}
|
||||||
|
|||||||
@ -1,29 +1,34 @@
|
|||||||
FluidBufThreadDemo : UGen{
|
FluidBufThreadDemo : FluidBufProcessor{
|
||||||
|
|
||||||
*new1 {|rate, result, time, doneAction = 0, blocking = 0 |
|
*kr {|result, time, trig = 1, blocking = 0|
|
||||||
result = result.asUGenInput;
|
|
||||||
result.isNil.if {this.class.name+": Invalid output buffer".throw};
|
|
||||||
^super.new1(rate, result, time, doneAction, blocking);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
result = result.asUGenInput;
|
||||||
|
result.isNil.if {this.class.name+": Invalid output buffer".throw};
|
||||||
|
|
||||||
*kr {|result, time, doneAction = 0|
|
^FluidProxyUgen.kr(\FluidBufThreadDemoTrigger, -1, result, time, trig, blocking);
|
||||||
^this.new1(\control, result, time, doneAction);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*process { |server, result, time = 1000, action|
|
*process { |server, result, time = 1000, freeWhenDone = true, action|
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action, [result]
|
|
||||||
).process(
|
result ?? {this.class.name+": Invalid output buffer".throw};
|
||||||
result, time
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [result]
|
||||||
|
).processList(
|
||||||
|
[result.asUGenInput, time, 0], freeWhenDone, action
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
*processBlocking { |server, result, time = 1000, action|
|
*processBlocking { |server, result, time = 1000, freeWhenDone = true, action|
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action, [result], blocking: 1
|
result ?? {this.class.name+": Invalid output buffer".throw};
|
||||||
).process(
|
|
||||||
result, time
|
^this.new(
|
||||||
|
server, nil, [result]
|
||||||
|
).processList(
|
||||||
|
[result.asUGenInput, time, 1], freeWhenDone, action
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
FluidBufThreadDemoTrigger : FluidProxyUgen {}
|
||||||
|
|||||||
@ -0,0 +1,46 @@
|
|||||||
|
FluidBufThresh : FluidBufProcessor {
|
||||||
|
|
||||||
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, destination, threshold = 0, trig = 1, blocking = 1|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufThresh: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufThresh: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
^FluidProxyUgen.kr(\FluidBufThreshTrigger, -1, source, startFrame, numFrames, startChan, numChans, destination, threshold, trig, blocking);
|
||||||
|
}
|
||||||
|
|
||||||
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, destination, threshold = 0, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufThresh: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufThresh: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [destination],
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, destination, threshold, 0], freeWhenDone, action
|
||||||
|
);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, destination, threshold = 0, freeWhenDone = true, action|
|
||||||
|
|
||||||
|
source = source.asUGenInput;
|
||||||
|
destination = destination.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufThresh: Invalid source buffer".throw};
|
||||||
|
destination.isNil.if {"FluidBufThresh: Invalid destination buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil, [destination],
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, destination, threshold, 1], freeWhenDone, action
|
||||||
|
);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FluidBufThreshTrigger : FluidProxyUgen {}
|
||||||
@ -1,30 +1,44 @@
|
|||||||
FluidBufTransientSlice : UGen{
|
FluidBufTransientSlice : FluidBufProcessor {
|
||||||
*new1 { |rate, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, minSliceLength = 1000, doneAction = 0, blocking = 0|
|
|
||||||
|
|
||||||
source = source.asUGenInput;
|
*objectClassName{^\FluidBufTrSlice}
|
||||||
indices = indices.asUGenInput;
|
|
||||||
|
|
||||||
source.isNil.if {"FluidBufNoveltySlice: Invalid source buffer".throw};
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, minSliceLength = 1000, trig = 1, blocking = 0|
|
||||||
indices.isNil.if {"FluidBufNoveltySlice: Invalid features buffer".throw};
|
|
||||||
|
|
||||||
^super.new1(rate, source, startFrame, numFrames, startChan, numChans, indices, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength, minSliceLength, doneAction, blocking);
|
source = source.asUGenInput;
|
||||||
}
|
indices = indices.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufNoveltySlice: Invalid source buffer".throw};
|
||||||
|
indices.isNil.if {"FluidBufNoveltySlice: Invalid features buffer".throw};
|
||||||
|
|
||||||
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, minSliceLength = 1000, doneAction = 0|
|
^FluidProxyUgen.kr(this.objectClassName++\Trigger, -1, source, startFrame, numFrames, startChan, numChans, indices, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength, minSliceLength, trig, blocking);
|
||||||
^this.multiNew(\control, source, startFrame, numFrames, startChan, numChans, indices, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength, minSliceLength, doneAction);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, minSliceLength = 1000, action|
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, minSliceLength = 1000, freeWhenDone = true, action|
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action,[indices]
|
source = source.asUGenInput;
|
||||||
).process(source, startFrame, numFrames, startChan, numChans, indices, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength, minSliceLength
|
indices = indices.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufNoveltySlice: Invalid source buffer".throw};
|
||||||
|
indices.isNil.if {"FluidBufNoveltySlice: Invalid features buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil,[indices]
|
||||||
|
).processList([source, startFrame, numFrames, startChan, numChans, indices, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength, minSliceLength,0], freeWhenDone, action
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, minSliceLength = 1000, action|
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, indices, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, minSliceLength = 1000, freeWhenDone = true, action|
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action,[indices], blocking: 1
|
source = source.asUGenInput;
|
||||||
).process(source, startFrame, numFrames, startChan, numChans, indices, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength, minSliceLength
|
indices = indices.asUGenInput;
|
||||||
|
|
||||||
|
source.isNil.if {"FluidBufNoveltySlice: Invalid source buffer".throw};
|
||||||
|
indices.isNil.if {"FluidBufNoveltySlice: Invalid features buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil,[indices]
|
||||||
|
).processList([source, startFrame, numFrames, startChan, numChans, indices, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength, minSliceLength,1], freeWhenDone, action
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
FluidBufTransientSliceTrigger : FluidProxyUgen {}
|
||||||
|
|||||||
@ -1,36 +1,44 @@
|
|||||||
FluidBufTransients : UGen {
|
FluidBufTransients : FluidBufProcessor {
|
||||||
|
|
||||||
|
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, transients = -1, residual = -1, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, trig = 1, blocking = 0|
|
||||||
|
|
||||||
*new1 { |rate, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, transients = -1, residual = -1, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, doneAction = 0, blocking = 0 |
|
source = source.asUGenInput;
|
||||||
|
transients = transients ? -1;
|
||||||
|
residual = residual ? -1;
|
||||||
|
|
||||||
source = source.asUGenInput;
|
source.isNil.if {"FluidBufTransients: Invalid source buffer".throw};
|
||||||
transients = transients.asUGenInput;
|
|
||||||
residual = residual.asUGenInput;
|
|
||||||
|
|
||||||
source.isNil.if {"FluidBufTransients: Invalid source buffer".throw};
|
^FluidProxyUgen.kr(\FluidBufTransientsTrigger, -1, source, startFrame, numFrames, startChan, numChans, transients, residual, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength, trig, blocking);
|
||||||
|
|
||||||
^super.new1(rate, source, startFrame, numFrames, startChan, numChans, transients, residual, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength, doneAction, blocking);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, transients = -1, residual = -1, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, freeWhenDone = true, action|
|
||||||
|
|
||||||
*kr { |source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, transients = -1, residual = -1, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, doneAction = 0|
|
source = source.asUGenInput;
|
||||||
^this.multiNew(\control, source, startFrame, numFrames, startChan, numChans, transients, residual, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength, doneAction);
|
transients = transients ? -1;
|
||||||
|
residual = residual ? -1;
|
||||||
|
|
||||||
}
|
source.isNil.if {"FluidBufTransients: Invalid source buffer".throw};
|
||||||
|
|
||||||
*process { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, transients = -1, residual = -1, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, action|
|
^this.new(
|
||||||
^FluidNRTProcess.new(
|
server, nil,[transients, residual].select{|x| x!= -1}
|
||||||
server, this, action,[transients, residual].select{|x| x!= -1}
|
).processList(
|
||||||
).process(
|
[source, startFrame, numFrames, startChan, numChans, transients, residual, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength,0],freeWhenDone,action
|
||||||
source, startFrame, numFrames, startChan, numChans, transients, residual, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, transients = -1, residual = -1, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, action|
|
*processBlocking { |server, source, startFrame = 0, numFrames = -1, startChan = 0, numChans = -1, transients = -1, residual = -1, order = 20, blockSize = 256, padSize = 128, skew = 0, threshFwd = 2, threshBack = 1.1, windowSize = 14, clumpLength = 25, freeWhenDone = true, action|
|
||||||
^FluidNRTProcess.new(
|
|
||||||
server, this, action,[transients, residual].select{|x| x!= -1}, blocking: 1
|
source = source.asUGenInput;
|
||||||
).process(
|
transients = transients ? -1;
|
||||||
source, startFrame, numFrames, startChan, numChans, transients, residual, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength
|
residual = residual ? -1;
|
||||||
);
|
|
||||||
}
|
source.isNil.if {"FluidBufTransients: Invalid source buffer".throw};
|
||||||
|
|
||||||
|
^this.new(
|
||||||
|
server, nil,[transients, residual].select{|x| x!= -1}
|
||||||
|
).processList(
|
||||||
|
[source, startFrame, numFrames, startChan, numChans, transients, residual, order, blockSize, padSize, skew, threshFwd, threshBack, windowSize, clumpLength,1],freeWhenDone = true,action
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
FluidBufTransientsTrigger : FluidProxyUgen {}
|
||||||
|
|||||||
@ -0,0 +1,165 @@
|
|||||||
|
FluidLoadFolder {
|
||||||
|
var path, labelFunc,channelFunc;
|
||||||
|
var < files;
|
||||||
|
var < index;
|
||||||
|
var < buffer;
|
||||||
|
|
||||||
|
*new{ |path, labelFunc, channelFunc |
|
||||||
|
^super.newCopyArgs(path, labelFunc,channelFunc);
|
||||||
|
}
|
||||||
|
|
||||||
|
play { |server, action|
|
||||||
|
var sizes,channels,maxChan, startEnd,counter;
|
||||||
|
server ?? {server = Server.default};
|
||||||
|
files = SoundFile.collect(path +/+ '*');
|
||||||
|
sizes = files.collect{|f|f.numFrames()};
|
||||||
|
channels = files.collect{|f| f.numChannels()};
|
||||||
|
startEnd = sizes.inject([0],{|a,b| a ++ (b + a[a.size - 1])}).slide(2).clump(2);
|
||||||
|
maxChan = channels[channels.maxIndex];
|
||||||
|
counter = 0;
|
||||||
|
index = IdentityDictionary();
|
||||||
|
forkIfNeeded{
|
||||||
|
buffer = Buffer.alloc(server,sizes.reduce('+'),maxChan);
|
||||||
|
server.sync;
|
||||||
|
buffer.updateInfo;
|
||||||
|
buffer.query;
|
||||||
|
server.sync;
|
||||||
|
this.files.do{|f,i|
|
||||||
|
var channelMap,label,entry;
|
||||||
|
OSCFunc({
|
||||||
|
if(labelFunc.notNil)
|
||||||
|
{ label = labelFunc.value(path,i) }
|
||||||
|
{ label = (f.path.basename).asSymbol };
|
||||||
|
entry = IdentityDictionary();
|
||||||
|
entry.add(\bounds->startEnd[i]);
|
||||||
|
entry.add(\numchans->f.numChannels);
|
||||||
|
entry.add(\sr->f.sampleRate);
|
||||||
|
entry.add(\path->f.path);
|
||||||
|
index.add(label->entry);
|
||||||
|
counter = counter + 1;
|
||||||
|
if(counter == (files.size)) {action !? action.value(index)};
|
||||||
|
},"/done",server.addr,argTemplate:["/b_readChannel"]).oneShot;
|
||||||
|
if(channelFunc.notNil)
|
||||||
|
{ channelMap = channelFunc.value(channels[i],maxChan,i) }
|
||||||
|
{ channelMap = Array.series(channels[i]) ++ -1.dup(maxChan - channels[i])}; //using -1 as a silence channel ID to fill the blanks (see SC_BufReadCommand::CopyChannels)
|
||||||
|
buffer.readChannel(f.path,bufStartFrame:startEnd[i][0], channels:channelMap);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
FluidSliceCorpus {
|
||||||
|
var < sliceFunc, labelFunc;
|
||||||
|
var < index;
|
||||||
|
|
||||||
|
*new { |sliceFunc, labelFunc|
|
||||||
|
^super.newCopyArgs(sliceFunc,labelFunc);
|
||||||
|
}
|
||||||
|
|
||||||
|
play{ |server,sourceBuffer,bufIdx, action, tasks = 4|
|
||||||
|
var counter, tmpIndices,perf,jobs,total,uid, completed, pointstotal;
|
||||||
|
uid = UniqueID.next;
|
||||||
|
sourceBuffer ?? {"No buffer to slice".error; ^nil};
|
||||||
|
bufIdx ?? {"No slice point dictionary passed".error;^nil};
|
||||||
|
server ?? {server = Server.default};
|
||||||
|
index = IdentityDictionary();
|
||||||
|
counter = 0;
|
||||||
|
completed = 0;
|
||||||
|
jobs = List.newFrom(bufIdx.keys);
|
||||||
|
total = jobs.size;
|
||||||
|
pointstotal = 0;
|
||||||
|
perf = { |tmpIndices|
|
||||||
|
var idx,v,k = jobs.pop;
|
||||||
|
v = bufIdx[k];
|
||||||
|
counter = counter + 1;
|
||||||
|
idx = counter;
|
||||||
|
("Slicing" + counter ++ "/" ++ total).postln;
|
||||||
|
OSCFunc({
|
||||||
|
tmpIndices.loadToFloatArray(action:{ |a|
|
||||||
|
var sliceindex = 1;
|
||||||
|
completed = completed + 1;
|
||||||
|
("FluidSliceCorpus:" + ( completed.asString ++ "/" ++ total)).postln;
|
||||||
|
if(a[0] != -1){
|
||||||
|
var rawPoints = Array.newFrom(a).asInteger;
|
||||||
|
if(rawPoints[0] != [v[\bounds][0]]){rawPoints = [v[\bounds][0]] ++ rawPoints};
|
||||||
|
if(rawPoints.last != [v[\bounds][1]]){rawPoints=rawPoints ++ [v[\bounds][1]]};
|
||||||
|
|
||||||
|
rawPoints.doAdjacentPairs{|a,b|
|
||||||
|
var dict;
|
||||||
|
if ((b - a) >= 1){
|
||||||
|
dict = IdentityDictionary();
|
||||||
|
dict.putAll(v);
|
||||||
|
dict[\bounds] = [a,b];
|
||||||
|
index.add(((k ++ "-" ++sliceindex).asSymbol)->dict);
|
||||||
|
sliceindex = sliceindex + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}{
|
||||||
|
var dict = IdentityDictionary();
|
||||||
|
dict.putAll(v);
|
||||||
|
index.add((k ++ "-1").asSymbol->dict);
|
||||||
|
};
|
||||||
|
if(jobs.size > 0){perf.value(tmpIndices)}{ tmpIndices.free };
|
||||||
|
if(completed == total) {action !? action.value(index)};
|
||||||
|
})
|
||||||
|
},'/doneslice' ++ uid ++ counter,server.addr).oneShot;
|
||||||
|
{
|
||||||
|
var numframes,onsets;
|
||||||
|
numframes = v[\bounds].reverse.reduce('-');
|
||||||
|
onsets = sliceFunc.value(sourceBuffer, v[\bounds][0],numframes,tmpIndices);
|
||||||
|
SendReply.kr(Done.kr(onsets),'/doneslice' ++ uid ++ idx);
|
||||||
|
FreeSelfWhenDone.kr(onsets);
|
||||||
|
}.play;
|
||||||
|
};
|
||||||
|
tasks ?? {tasks = 4};
|
||||||
|
tasks.asInteger.min(jobs.size).do{perf.value(Buffer.new)};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidProcessSlices{
|
||||||
|
var < featureFunc;
|
||||||
|
|
||||||
|
*new { |featureFunc|
|
||||||
|
^super.newCopyArgs(featureFunc);
|
||||||
|
}
|
||||||
|
|
||||||
|
play{ |server, sourceBuffer, bufIdx, action, tasks = 4|
|
||||||
|
var counter,perf,jobs,total,uid, completed;
|
||||||
|
|
||||||
|
sourceBuffer ?? {"No buffer to slice".error; ^nil};
|
||||||
|
bufIdx ?? {"No slice point dictionary passed".error;^nil};
|
||||||
|
server ?? {server = Server.default};
|
||||||
|
|
||||||
|
uid = UniqueID.next;
|
||||||
|
jobs = List.newFrom(bufIdx.keys);
|
||||||
|
total = jobs.size;
|
||||||
|
counter = 0;
|
||||||
|
completed = 0;
|
||||||
|
perf = {|jobID|
|
||||||
|
var idx,v, k = jobs.pop;
|
||||||
|
v = bufIdx[k];
|
||||||
|
counter = counter + 1;
|
||||||
|
("Processing" + counter ++ "/" ++ total).postln;
|
||||||
|
idx = counter;
|
||||||
|
v[\index] = counter;
|
||||||
|
v[\voice] = jobID;
|
||||||
|
OSCFunc({
|
||||||
|
completed = completed + 1;
|
||||||
|
("FluidProcessSlices:" + (completed.asString ++ "/" ++ total)).postln;
|
||||||
|
if(jobs.size > 0){perf.value(jobID)};
|
||||||
|
if(completed == total){action !? action.value(v);};
|
||||||
|
},"/doneFeature" ++ uid ++ counter,server.addr).oneShot;
|
||||||
|
|
||||||
|
{
|
||||||
|
var numframes,feature;
|
||||||
|
numframes = v[\bounds].reverse.reduce('-');
|
||||||
|
feature = featureFunc.value(sourceBuffer, v[\bounds][0], numframes, k->v);
|
||||||
|
SendReply.kr(Done.kr(feature),'/doneFeature' ++ uid ++ idx);
|
||||||
|
FreeSelfWhenDone.kr(feature);
|
||||||
|
}.play(server);
|
||||||
|
};
|
||||||
|
tasks ?? {tasks = 4};
|
||||||
|
tasks.asInteger.min(jobs.size).do{|jobIDs|perf.value(jobIDs)};
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,105 @@
|
|||||||
|
|
||||||
|
FluidDataSet : FluidDataObject
|
||||||
|
{
|
||||||
|
*new{|server| ^super.new(server) }
|
||||||
|
|
||||||
|
addPointMsg{|label,buffer|
|
||||||
|
buffer = this.prEncodeBuffer(buffer);
|
||||||
|
^this.prMakeMsg(\addPoint,id,label.asSymbol,buffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
addPoint{|label, buffer, action|
|
||||||
|
actions[\addPoint] = [nil,action];
|
||||||
|
this.prSendMsg(this.addPointMsg(label,buffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
getPointMsg{|label,buffer|
|
||||||
|
buffer = this.prEncodeBuffer(buffer);
|
||||||
|
^this.prMakeMsg(\getPoint,id,label.asSymbol,buffer,["/b_query",buffer.asUGenInput]);
|
||||||
|
}
|
||||||
|
|
||||||
|
getPoint{|label, buffer, action|
|
||||||
|
actions[\getPoint] = [nil,action];
|
||||||
|
this.prSendMsg(this.getPointMsg(label,buffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
updatePointMsg{|label,buffer|
|
||||||
|
buffer = this.prEncodeBuffer(buffer);
|
||||||
|
^this.prMakeMsg(\updatePoint,id,label.asSymbol,buffer,["/b_query",buffer.asUGenInput]);
|
||||||
|
}
|
||||||
|
|
||||||
|
updatePoint{|label, buffer, action|
|
||||||
|
actions[\updatePoint] = [nil,action];
|
||||||
|
this.prSendMsg(this.updatePointMsg(label,buffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
deletePointMsg{|label| ^this.prMakeMsg(\deletePoint,id,label.asSymbol);}
|
||||||
|
|
||||||
|
deletePoint{|label, buffer, action|
|
||||||
|
actions[\deletePoint] = [nil,action];
|
||||||
|
this.prSendMsg(this.deletePointMsg(label));
|
||||||
|
}
|
||||||
|
|
||||||
|
setPointMsg{|label,buffer|
|
||||||
|
buffer = this.prEncodeBuffer(buffer);
|
||||||
|
^this.prMakeMsg(\setPoint,id,label.asSymbol,buffer,["/b_query",buffer.asUGenInput]);
|
||||||
|
}
|
||||||
|
|
||||||
|
setPoint{|label, buffer, action|
|
||||||
|
actions[\setPoint] = [nil,action];
|
||||||
|
this.prSendMsg(this.setPointMsg(label,buffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
clearMsg { ^this.prMakeMsg(\clear,id); }
|
||||||
|
|
||||||
|
clear { |action|
|
||||||
|
actions[\clear] = [nil,action];
|
||||||
|
this.prSendMsg(this.clearMsg);
|
||||||
|
}
|
||||||
|
|
||||||
|
mergeMsg{|sourceDataSet, overwrite = 0|
|
||||||
|
^this.prMakeMsg(\merge,id,sourceDataSet.asUGenInput,overwrite);
|
||||||
|
}
|
||||||
|
|
||||||
|
merge{|sourceDataSet, overwrite = 0, action|
|
||||||
|
actions[\merge] = [nil,action];
|
||||||
|
this.prSendMsg(this.mergeMsg(sourceDataSet,overwrite));
|
||||||
|
}
|
||||||
|
|
||||||
|
printMsg { ^this.prMakeMsg(\print,id); }
|
||||||
|
|
||||||
|
print { |action=(postResponse)|
|
||||||
|
actions[\print] = [string(FluidMessageResponse,_,_),action];
|
||||||
|
this.prSendMsg(this.printMsg);
|
||||||
|
}
|
||||||
|
|
||||||
|
toBufferMsg{|buffer, transpose = 0, labelSet|
|
||||||
|
buffer = this.prEncodeBuffer(buffer);
|
||||||
|
^this.prMakeMsg(\toBuffer, id, buffer, transpose, labelSet.asUGenInput,["/b_query",buffer.asUGenInput]);
|
||||||
|
}
|
||||||
|
|
||||||
|
toBuffer{|buffer, transpose = 0, labelSet, action|
|
||||||
|
actions[\toBuffer] = [nil,action];
|
||||||
|
this.prSendMsg(this.toBufferMsg(buffer, transpose, labelSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
fromBufferMsg{|buffer, transpose = 0, labelSet|
|
||||||
|
buffer = this.prEncodeBuffer(buffer);
|
||||||
|
^this.prMakeMsg(\fromBuffer, id, buffer, transpose, labelSet.asUGenInput,["/b_query",buffer.asUGenInput]);
|
||||||
|
}
|
||||||
|
|
||||||
|
fromBuffer{|buffer, transpose = 0, labelSet, action|
|
||||||
|
actions[\fromBuffer] = [nil,action];
|
||||||
|
this.prSendMsg(this.fromBufferMsg(buffer, transpose, labelSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
getIdsMsg{|labelSet|
|
||||||
|
^this.prMakeMsg(\getIds, id, labelSet.asUGenInput);
|
||||||
|
}
|
||||||
|
|
||||||
|
getIds{|labelSet, action|
|
||||||
|
actions[\getIds] = [nil,action];
|
||||||
|
this.prSendMsg(this.getIdsMsg(labelSet));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@ -0,0 +1,84 @@
|
|||||||
|
|
||||||
|
FluidDataSetQuery : FluidDataObject {
|
||||||
|
|
||||||
|
addColumnMsg { |column|
|
||||||
|
^this.prMakeMsg(\addColumn,id,column);
|
||||||
|
}
|
||||||
|
|
||||||
|
addColumn{|column, action|
|
||||||
|
actions[\addColumn] = [nil,action];
|
||||||
|
this.prSendMsg(this.addColumnMsg(column));
|
||||||
|
}
|
||||||
|
|
||||||
|
addRangeMsg{|start,count|
|
||||||
|
^this.prMakeMsg(\addRange,id,start,count);
|
||||||
|
}
|
||||||
|
|
||||||
|
addRange{|start, count, action|
|
||||||
|
actions[\addRange] = [nil, action];
|
||||||
|
this.prSendMsg(this.addRangeMsg(start, count));
|
||||||
|
}
|
||||||
|
|
||||||
|
filterMsg{|column, condition, value, action|
|
||||||
|
^this.prMakeMsg(\filter,id,column,condition.asSymbol,value);
|
||||||
|
}
|
||||||
|
|
||||||
|
filter{|column, condition, value, action|
|
||||||
|
actions[\filter] = [nil, action];
|
||||||
|
this.prSendMsg(this.filterMsg(column, condition, value));
|
||||||
|
}
|
||||||
|
|
||||||
|
andMsg{ |column, condition, value|
|
||||||
|
^this.prMakeMsg(\and,id,column, condition.asSymbol, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
and{|column, condition, value, action|
|
||||||
|
actions[\and] = [nil, action];
|
||||||
|
this.prSendMsg(this.andMsg(column,condition,value));
|
||||||
|
}
|
||||||
|
|
||||||
|
orMsg{|column, condition, value|
|
||||||
|
^this.prMakeMsg(\or,id,column, condition.asSymbol, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
or{|column, condition, value, action|
|
||||||
|
actions[\or] = [nil,action];
|
||||||
|
this.prSendMsg(this.orMsg(column, condition, value));
|
||||||
|
}
|
||||||
|
|
||||||
|
clearMsg{
|
||||||
|
^this.prMakeMsg(\clear,id);
|
||||||
|
}
|
||||||
|
|
||||||
|
clear{|action|
|
||||||
|
actions[\clear] = [nil, action];
|
||||||
|
this.prSendMsg(this.clearMsg);
|
||||||
|
}
|
||||||
|
|
||||||
|
limitMsg{|rows|
|
||||||
|
^this.prMakeMsg(\limit,id,rows);
|
||||||
|
}
|
||||||
|
|
||||||
|
limit{|rows, action|
|
||||||
|
actions[\limit] = [nil,action];
|
||||||
|
this.prSendMsg(this.limitMsg(rows));
|
||||||
|
}
|
||||||
|
|
||||||
|
transformMsg{|sourceDataSet, destDataSet|
|
||||||
|
^this.prMakeMsg(\transform,id,sourceDataSet.id,destDataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
transform{|sourceDataSet, destDataSet, action|
|
||||||
|
actions[\transform] = [nil,action];
|
||||||
|
this.prSendMsg(this.transformMsg(sourceDataSet,destDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
transformJoinMsg{|source1DataSet, source2DataSet, destDataSet|
|
||||||
|
^this.prMakeMsg(\transformJoin,id,source1DataSet.id, source2DataSet.id, destDataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
transformJoin{|source1DataSet, source2DataSet, destDataSet, action|
|
||||||
|
actions[\transformJoin] = [nil,action];
|
||||||
|
this.prSendMsg(this.transformJoinMsg(source1DataSet, source2DataSet, destDataSet));
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,15 @@
|
|||||||
|
FluidDataSetWr : FluidBufProcessor {
|
||||||
|
*kr { |dataset,idPrefix = "", idNumber = 0,buf, trig=1, blocking = 1|
|
||||||
|
var args;
|
||||||
|
buf ?? {(this.class.name ++ ": No input buffer provided").error};
|
||||||
|
|
||||||
|
idNumber = idNumber !? {[2,1,idNumber.asInteger.asUGenInput]} ?? {[2,0,0]};
|
||||||
|
idPrefix = idPrefix !? {[idPrefix.asString.size] ++ idPrefix.asString.ascii} ?? {0};
|
||||||
|
|
||||||
|
args = [-1] ++ dataset.asUGenInput ++idPrefix ++ idNumber ++ buf.asUGenInput ++ trig ++ blocking;
|
||||||
|
|
||||||
|
^FluidProxyUgen.kr(\FluidDataSetWrTrigger,*args);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidDataSetWrTrigger : FluidProxyUgen {}
|
||||||
@ -0,0 +1,62 @@
|
|||||||
|
FluidKDTree : FluidRealTimeModel
|
||||||
|
{
|
||||||
|
|
||||||
|
var neighbours,radius,lookup;
|
||||||
|
|
||||||
|
*new{ |server, numNeighbours = 1, radius = 0, lookupDataSet|
|
||||||
|
^super.new(server,[numNeighbours,radius,lookupDataSet ? -1])
|
||||||
|
.numNeighbours_(numNeighbours)
|
||||||
|
.radius_(radius)
|
||||||
|
.lookupDataSet_(lookupDataSet);
|
||||||
|
}
|
||||||
|
|
||||||
|
numNeighbours_{|k|neighbours = k.asInteger; }
|
||||||
|
numNeighbours{ ^neighbours; }
|
||||||
|
|
||||||
|
radius_{|r| radius = r.asUGenInput;}
|
||||||
|
radius{ ^radius; }
|
||||||
|
|
||||||
|
lookupDataSet_{|ds| lookup = ds ? -1; }
|
||||||
|
lookupDataSet{|ds| ^ (lookup ? -1) }
|
||||||
|
|
||||||
|
prGetParams{^[this.numNeighbours,this.radius,this.lookupDataSet,-1,-1];}
|
||||||
|
|
||||||
|
fitMsg{ |dataSet| ^this.prMakeMsg(\fit,this.id,dataSet.id);}
|
||||||
|
|
||||||
|
fit{|dataSet,action|
|
||||||
|
actions[\fit] = [nil,action];
|
||||||
|
this.prSendMsg(this.fitMsg(dataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
kNearestMsg{|buffer|
|
||||||
|
^this.prMakeMsg(\kNearest,id,this.prEncodeBuffer(buffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
kNearest{ |buffer, action|
|
||||||
|
actions[\kNearest] = [strings(FluidMessageResponse,_,_),action];
|
||||||
|
this.prSendMsg(this.kNearestMsg(buffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
kNearestDistMsg {|buffer|
|
||||||
|
^this.prMakeMsg(\kNearestDist,id,this.prEncodeBuffer(buffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
kNearestDist { |buffer, action|
|
||||||
|
actions[\kNearestDist] = [numbers(FluidMessageResponse,_,nil,_),action];
|
||||||
|
this.prSendMsg(this.kNearestDistMsg(buffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
kr{|trig, inputBuffer,outputBuffer, numNeighbours = 1, lookupDataSet|
|
||||||
|
this.numNeighbours_(numNeighbours);
|
||||||
|
lookupDataSet = lookupDataSet ? -1;
|
||||||
|
this.lookupDataSet_(lookupDataSet);
|
||||||
|
|
||||||
|
^FluidKDTreeQuery.kr(K2A.ar(trig),
|
||||||
|
this, this.numNeighbours, this.radius, this.lookupDataSet.asUGenInput,
|
||||||
|
this.prEncodeBuffer(inputBuffer),
|
||||||
|
this.prEncodeBuffer(outputBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidKDTreeQuery : FluidRTQuery {}
|
||||||
@ -0,0 +1,121 @@
|
|||||||
|
FluidKMeans : FluidRealTimeModel {
|
||||||
|
|
||||||
|
var clusters, maxiter;
|
||||||
|
|
||||||
|
*new {|server, numClusters = 4, maxIter = 100|
|
||||||
|
^super.new(server,[numClusters,maxIter])
|
||||||
|
.numClusters_(numClusters)
|
||||||
|
.maxIter_(maxIter);
|
||||||
|
}
|
||||||
|
|
||||||
|
numClusters_{|n| clusters = n.asInteger}
|
||||||
|
numClusters{ ^clusters }
|
||||||
|
|
||||||
|
maxIter_{|i| maxiter = i.asInteger}
|
||||||
|
maxIter{ ^maxiter }
|
||||||
|
|
||||||
|
prGetParams{^[this.numClusters,this.maxIter,-1,-1];}
|
||||||
|
|
||||||
|
fitMsg{ |dataSet| ^this.prMakeMsg(\fit,id,dataSet.id);}
|
||||||
|
|
||||||
|
fit{|dataSet, action|
|
||||||
|
actions[\fit] = [
|
||||||
|
numbers( FluidMessageResponse, _, this.numClusters ,_),
|
||||||
|
action
|
||||||
|
];
|
||||||
|
this.prSendMsg(this.fitMsg(dataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
fitPredictMsg{|dataSet, labelSet|
|
||||||
|
^this.prMakeMsg(\fitPredict, id, dataSet.id, labelSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fitPredict{|dataSet, labelSet,action|
|
||||||
|
actions[\fitPredict] = [
|
||||||
|
numbers(FluidMessageResponse, _, this.numClusters, _),
|
||||||
|
action
|
||||||
|
];
|
||||||
|
this.prSendMsg(this.fitPredictMsg(dataSet,labelSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
predictMsg{|dataSet, labelSet|
|
||||||
|
^this.prMakeMsg(\predict, id, dataSet.id, labelSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
predict{ |dataSet, labelSet, action|
|
||||||
|
actions[\predict] = [
|
||||||
|
numbers(FluidMessageResponse, _, this.numClusters, _),
|
||||||
|
action
|
||||||
|
];
|
||||||
|
this.prSendMsg(this.predictMsg(dataSet,labelSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
predictPointMsg{|buffer|
|
||||||
|
^this.prMakeMsg(\predictPoint, id, this.prEncodeBuffer(buffer))
|
||||||
|
}
|
||||||
|
|
||||||
|
predictPoint { |buffer, action|
|
||||||
|
actions[\predictPoint] = [number(FluidMessageResponse,_,_),action];
|
||||||
|
this.prSendMsg(this.predictPointMsg(buffer))
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransformMsg{|srcDataSet, dstDataSet|
|
||||||
|
^this.prMakeMsg(\fitTransform, id, srcDataSet.id, dstDataSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransform{|srcDataSet, dstDataSet,action|
|
||||||
|
actions[\fitTransform] = [nil,action];
|
||||||
|
this.prSendMsg(this.fitTransformMsg(srcDataSet,dstDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
transformMsg{|srcDataSet, dstDataSet|
|
||||||
|
^this.prMakeMsg(\transform, id, srcDataSet.id, dstDataSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
transform{ |srcDataSet, dstDataSet, action|
|
||||||
|
actions[\transform] = [nil,action];
|
||||||
|
this.prSendMsg(this.transformMsg(srcDataSet,dstDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
transformPointMsg{ |sourceBuffer, targetBuffer|
|
||||||
|
^this.prMakeMsg(\transformPoint, id,
|
||||||
|
this.prEncodeBuffer(sourceBuffer),
|
||||||
|
this.prEncodeBuffer(targetBuffer),
|
||||||
|
["/b_query", targetBuffer.asUGenInput]);
|
||||||
|
}
|
||||||
|
|
||||||
|
transformPoint { |sourceBuffer, targetBuffer, action|
|
||||||
|
actions[\transformPoint] = [nil,{action.value(targetBuffer)}];
|
||||||
|
this.prSendMsg(this.transformPointMsg(sourceBuffer, targetBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
getMeansMsg{|dataSet| ^this.prMakeMsg(\getMeans, id, dataSet.asUGenInput) }
|
||||||
|
|
||||||
|
getMeans{ |dataSet, action|
|
||||||
|
actions[\getMeans] = [nil, action];
|
||||||
|
this.prSendMsg(this.getMeansMsg(dataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
setMeansMsg{|dataSet| ^this.prMakeMsg(\setMeans, id, dataSet.asUGenInput) }
|
||||||
|
|
||||||
|
setMeans{ |dataSet, action|
|
||||||
|
actions[\setMeans] = [nil, action];
|
||||||
|
this.prSendMsg(this.setMeansMsg(dataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
clearMsg{ ^this.prMakeMsg(\clear, id) }
|
||||||
|
|
||||||
|
clear{ |action|
|
||||||
|
actions[\clear] = [nil, action];
|
||||||
|
this.prSendMsg(this.clearMsg);
|
||||||
|
}
|
||||||
|
|
||||||
|
kr{|trig, inputBuffer,outputBuffer|
|
||||||
|
^FluidKMeansQuery.kr(K2A.ar(trig),
|
||||||
|
this, clusters, maxiter,
|
||||||
|
this.prEncodeBuffer(inputBuffer),
|
||||||
|
this.prEncodeBuffer(outputBuffer));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidKMeansQuery : FluidRTQuery {}
|
||||||
@ -0,0 +1,49 @@
|
|||||||
|
FluidKNNClassifier : FluidRealTimeModel {
|
||||||
|
|
||||||
|
var <>numNeighbours, <>weight;
|
||||||
|
|
||||||
|
*new {|server, numNeighbours = 3, weight = 1|
|
||||||
|
^super.new(server,[numNeighbours,weight])
|
||||||
|
.numNeighbours_(numNeighbours)
|
||||||
|
.weight_(weight);
|
||||||
|
}
|
||||||
|
|
||||||
|
prGetParams{^[this.numNeighbours,this.weight,-1,-1];}
|
||||||
|
|
||||||
|
fitMsg{|dataSet, labelSet|
|
||||||
|
^this.prMakeMsg(\fit, id, dataSet.id, labelSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fit{|dataSet, labelSet, action|
|
||||||
|
actions[\fit] = [nil,action];
|
||||||
|
this.prSendMsg(this.fitMsg(dataSet, labelSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
predictMsg{|dataSet, labelSet|
|
||||||
|
^this.prMakeMsg(\predict, id, dataSet.id, labelSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
predict{|dataSet, labelSet, action|
|
||||||
|
actions[\predict] = [nil, action];
|
||||||
|
this.prSendMsg(this.predictMsg(dataSet, labelSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
predictPointMsg{|buffer|
|
||||||
|
^this.prMakeMsg(\predictPoint, id, this.prEncodeBuffer(buffer))
|
||||||
|
}
|
||||||
|
|
||||||
|
predictPoint {|buffer, action|
|
||||||
|
actions[\predictPoint] = [string(FluidMessageResponse,_,_),action];
|
||||||
|
this.prSendMsg(this.predictPointMsg(buffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
kr{|trig, inputBuffer,outputBuffer|
|
||||||
|
^FluidKNNClassifierQuery.kr(K2A.ar(trig),
|
||||||
|
this, this.numNeighbours, this.weight,
|
||||||
|
this.prEncodeBuffer(inputBuffer),
|
||||||
|
this.prEncodeBuffer(outputBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidKNNClassifierQuery : FluidRTQuery {}
|
||||||
@ -0,0 +1,48 @@
|
|||||||
|
FluidKNNRegressor : FluidRealTimeModel {
|
||||||
|
|
||||||
|
var <>numNeighbours, <>weight;
|
||||||
|
|
||||||
|
*new {|server, numNeighbours = 3, weight = 1|
|
||||||
|
^super.new(server,[numNeighbours,weight])
|
||||||
|
.numNeighbours_(numNeighbours)
|
||||||
|
.weight_(weight);
|
||||||
|
}
|
||||||
|
|
||||||
|
prGetParams{^[this.numNeighbours,this.weight,-1,-1];}
|
||||||
|
|
||||||
|
fitMsg{|sourceDataSet, targetDataSet|
|
||||||
|
^this.prMakeMsg(\fit,this.id,sourceDataSet.id,targetDataSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fit{|sourceDataSet, targetDataSet, action|
|
||||||
|
actions[\fit] = [nil,action];
|
||||||
|
this.prSendMsg(this.fitMsg(sourceDataSet, targetDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
predictMsg{ |sourceDataSet, targetDataSet|
|
||||||
|
^this.prMakeMsg(\predict,this.id,sourceDataSet.id,targetDataSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
predict{ |sourceDataSet, targetDataSet,action|
|
||||||
|
actions[\predict] = [nil, action];
|
||||||
|
this.prSendMsg(this.predictMsg(sourceDataSet, targetDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
predictPointMsg { |buffer|
|
||||||
|
^this.prMakeMsg(\predictPoint,id, this.prEncodeBuffer(buffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
predictPoint { |buffer, action|
|
||||||
|
actions[\predictPoint] = [number(FluidMessageResponse,_,_),action];
|
||||||
|
this.prSendMsg(this.predictPointMsg(buffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
kr{|trig, inputBuffer,outputBuffer|
|
||||||
|
^FluidKNNRegressorQuery.kr(K2A.ar(trig),
|
||||||
|
this, this.numNeighbours, this.weight,
|
||||||
|
this.prEncodeBuffer(inputBuffer),
|
||||||
|
this.prEncodeBuffer(outputBuffer));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidKNNRegressorQuery : FluidRTQuery {}
|
||||||
@ -0,0 +1,54 @@
|
|||||||
|
FluidLabelSet : FluidDataObject {
|
||||||
|
|
||||||
|
*new{|server| ^super.new(server) }
|
||||||
|
|
||||||
|
addLabelMsg{|identifier,label|
|
||||||
|
^this.prMakeMsg(\addLabel,id,identifier.asSymbol,label.asSymbol);
|
||||||
|
}
|
||||||
|
|
||||||
|
addLabel{|identifier, label, action|
|
||||||
|
actions[\addLabel] = [nil, action];
|
||||||
|
this.prSendMsg(this.addLabelMsg(identifier,label));
|
||||||
|
}
|
||||||
|
|
||||||
|
updateLabelMsg{|identifier, label|
|
||||||
|
^this.prMakeMsg(\updateLabel, id, identifier.asSymbol, label.asSymbol);
|
||||||
|
}
|
||||||
|
|
||||||
|
updateLabel{|identifier, label, action|
|
||||||
|
actions[\updateLabel] = [nil,action];
|
||||||
|
this.prSendMsg(this.updateLabelMsg(identifier,label));
|
||||||
|
}
|
||||||
|
|
||||||
|
getLabelMsg{|identifier|
|
||||||
|
^this.prMakeMsg(\getLabel, id, identifier.asSymbol);
|
||||||
|
}
|
||||||
|
|
||||||
|
getLabel{|identifier, action|
|
||||||
|
actions[\getLabel] = [string(FluidMessageResponse,_,_),action];
|
||||||
|
this.prSendMsg(this.getLabelMsg(identifier));
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteLabelMsg{|identifier, action|
|
||||||
|
^this.prMakeMsg(\deleteLabel, id, identifier.asSymbol);
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteLabel{|identifier, action|
|
||||||
|
actions[\deleteLabel] = [nil, action];
|
||||||
|
this.prSendMsg(this.deleteLabelMsg(identifier));
|
||||||
|
}
|
||||||
|
|
||||||
|
clearMsg { ^this.prMakeMsg(\clear,id); }
|
||||||
|
|
||||||
|
clear { |action|
|
||||||
|
actions[\clear] = [nil,action];
|
||||||
|
this.prSendMsg(this.clearMsg);
|
||||||
|
}
|
||||||
|
|
||||||
|
printMsg { ^this.prMakeMsg(\print,id); }
|
||||||
|
|
||||||
|
print { |action=(postResponse)|
|
||||||
|
actions[\print] = [string(FluidMessageResponse,_,_),action];
|
||||||
|
this.prSendMsg(this.printMsg);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,39 @@
|
|||||||
|
FluidMDS : FluidModelObject {
|
||||||
|
classvar < manhattan = 0;
|
||||||
|
classvar < euclidean = 1;
|
||||||
|
classvar < sqeuclidean = 2;
|
||||||
|
classvar < max = 3;
|
||||||
|
classvar < min = 4;
|
||||||
|
classvar < kl = 5;
|
||||||
|
classvar < cosine = 5;
|
||||||
|
|
||||||
|
var <>numDimensions, <>distanceMetric;
|
||||||
|
|
||||||
|
*new {|server,numDimensions = 2, distanceMetric = 1|
|
||||||
|
^super.new(server,[numDimensions, distanceMetric])
|
||||||
|
.numDimensions_(numDimensions)
|
||||||
|
.distanceMetric_(distanceMetric);
|
||||||
|
}
|
||||||
|
|
||||||
|
prGetParams{
|
||||||
|
^[this.numDimensions, this.distanceMetric];
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransformMsg{|sourceDataSet, destDataSet|
|
||||||
|
^this.prMakeMsg(\fitTransform,id, sourceDataSet.id, destDataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransform{|sourceDataSet, destDataSet, action|
|
||||||
|
actions[\fitTransform] = [nil,action];
|
||||||
|
this.fitTransformMsg(sourceDataSet,destDataSet).postln;
|
||||||
|
|
||||||
|
this.prSendMsg(this.fitTransformMsg(sourceDataSet,destDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
// not implemented
|
||||||
|
cols {|action|}
|
||||||
|
read{|filename,action|}
|
||||||
|
write{|filename,action|}
|
||||||
|
size { |action|}
|
||||||
|
|
||||||
|
}
|
||||||
@ -0,0 +1,152 @@
|
|||||||
|
FluidMLPRegressor : FluidRealTimeModel {
|
||||||
|
|
||||||
|
const <identity = 0;
|
||||||
|
const <sigmoid = 1;
|
||||||
|
const <relu = 2;
|
||||||
|
const <tanh = 3;
|
||||||
|
|
||||||
|
var <>hidden, <>activation, <>outputActivation, <>tapIn, <>tapOut, <>maxIter, <>learnRate, <>momentum, <>batchSize, <>validation;
|
||||||
|
|
||||||
|
*new {|server, hidden = #[3,3] , activation = 2, outputActivation = 0, tapIn = 0, tapOut = -1,maxIter = 1000, learnRate = 0.0001, momentum = 0.9, batchSize = 50, validation = 0.2|
|
||||||
|
|
||||||
|
^super.new(server, [hidden.size] ++ hidden ++ [activation, outputActivation, tapIn, tapOut, maxIter, learnRate, momentum, batchSize, validation])
|
||||||
|
.hidden_(hidden)
|
||||||
|
.activation_(activation)
|
||||||
|
.outputActivation_(outputActivation)
|
||||||
|
.tapIn_(tapIn)
|
||||||
|
.tapOut_(tapOut)
|
||||||
|
.maxIter_(maxIter)
|
||||||
|
.learnRate_(learnRate)
|
||||||
|
.momentum_(momentum)
|
||||||
|
.batchSize_(batchSize)
|
||||||
|
.validation_(validation);
|
||||||
|
}
|
||||||
|
|
||||||
|
prGetParams{
|
||||||
|
^[this.hidden.size] ++ this.hidden ++ [this.activation, this.outputActivation, this.tapIn, this.tapOut, this.maxIter, this.learnRate, this.momentum, this.batchSize, this.validation, -1, -1]
|
||||||
|
}
|
||||||
|
|
||||||
|
clearMsg{ ^this.prMakeMsg(\clear, id) }
|
||||||
|
|
||||||
|
clear{ |action|
|
||||||
|
actions[\clear] = [nil, action];
|
||||||
|
this.prSendMsg(this.clearMsg);
|
||||||
|
}
|
||||||
|
|
||||||
|
fitMsg{|sourceDataSet, targetDataSet|
|
||||||
|
^this.prMakeMsg(\fit,id,sourceDataSet.id, targetDataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
fit{|sourceDataSet, targetDataSet, action|
|
||||||
|
actions[\fit] = [numbers(FluidMessageResponse,_,1,_),action];
|
||||||
|
this.prSendMsg(this.fitMsg(sourceDataSet,targetDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
predictMsg{|sourceDataSet, targetDataSet|
|
||||||
|
^this.prMakeMsg(\predict,id,sourceDataSet.id, targetDataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
predict{|sourceDataSet, targetDataSet, action|
|
||||||
|
actions[\predict] = [nil,action];
|
||||||
|
this.prSendMsg(this.predictMsg(sourceDataSet,targetDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
predictPointMsg { |sourceBuffer, targetBuffer|
|
||||||
|
^this.prMakeMsg(\predictPoint,id,
|
||||||
|
this.prEncodeBuffer(sourceBuffer),
|
||||||
|
this.prEncodeBuffer(targetBuffer),
|
||||||
|
["/b_query", targetBuffer.asUGenInput]);
|
||||||
|
}
|
||||||
|
|
||||||
|
predictPoint { |sourceBuffer, targetBuffer, action|
|
||||||
|
actions[\predictPoint] = [nil,{action.value(targetBuffer)}];
|
||||||
|
this.predictPointMsg(sourceBuffer, targetBuffer);
|
||||||
|
this.prSendMsg(this.predictPointMsg(sourceBuffer, targetBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
kr{|trig, inputBuffer,outputBuffer, tapIn = 0, tapOut = -1|
|
||||||
|
var params;
|
||||||
|
tapIn = tapIn ? this.tapIn;
|
||||||
|
tapOut = tapOut ? this.tapOut;
|
||||||
|
|
||||||
|
this.tapIn_(tapIn).tapOut_(tapOut);
|
||||||
|
|
||||||
|
params = this.prGetParams.drop(-2) ++ [this.prEncodeBuffer(inputBuffer),
|
||||||
|
this.prEncodeBuffer(outputBuffer)];
|
||||||
|
|
||||||
|
^FluidMLPRegressorQuery.kr(K2A.ar(trig),this, *params);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidMLPRegressorQuery : FluidRTQuery {}
|
||||||
|
|
||||||
|
FluidMLPClassifier : FluidRealTimeModel {
|
||||||
|
|
||||||
|
const <identity = 0;
|
||||||
|
const <sigmoid = 1;
|
||||||
|
const <relu = 2;
|
||||||
|
const <tanh = 3;
|
||||||
|
|
||||||
|
var <>hidden, <>activation, <> maxIter, <>learnRate, <> momentum, <>batchSize, <>validation;
|
||||||
|
|
||||||
|
*new {|server, hidden = #[3,3] , activation = 2, maxIter = 1000, learnRate = 0.0001, momentum = 0.9, batchSize = 50, validation = 0.2|
|
||||||
|
^super.new(server,[hidden.size] ++ hidden ++ [activation, maxIter, learnRate, momentum, batchSize, validation])
|
||||||
|
.hidden_(hidden)
|
||||||
|
.activation_(activation)
|
||||||
|
.maxIter_(maxIter)
|
||||||
|
.learnRate_(learnRate)
|
||||||
|
.momentum_(momentum)
|
||||||
|
.batchSize_(batchSize)
|
||||||
|
.validation_(validation);
|
||||||
|
}
|
||||||
|
|
||||||
|
prGetParams{
|
||||||
|
^[ this.hidden.size] ++ this.hidden ++ [this.activation, this.maxIter, this.learnRate, this.momentum, this.batchSize, this.validation, -1, -1];
|
||||||
|
}
|
||||||
|
|
||||||
|
clearMsg{ ^this.prMakeMsg(\clear,id) }
|
||||||
|
|
||||||
|
clear{ |action|
|
||||||
|
actions[\clear] = [nil,action];
|
||||||
|
this.prSendMsg(this.clearMsg);
|
||||||
|
}
|
||||||
|
|
||||||
|
fitMsg{|sourceDataSet, targetLabelSet|
|
||||||
|
^this.prMakeMsg(\fit,id,sourceDataSet.id, targetLabelSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
fit{|sourceDataSet, targetLabelSet, action|
|
||||||
|
actions[\fit] = [numbers(FluidMessageResponse,_,1,_),action];
|
||||||
|
this.prSendMsg(this.fitMsg(sourceDataSet,targetLabelSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
predictMsg{|sourceDataSet, targetLabelSet|
|
||||||
|
^this.prMakeMsg(\predict,id,sourceDataSet.id, targetLabelSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
predict{ |sourceDataSet, targetLabelSet, action|
|
||||||
|
actions[\predict]=[nil,action];
|
||||||
|
this.prSendMsg(this.predictMsg(sourceDataSet,targetLabelSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
predictPointMsg { |sourceBuffer|
|
||||||
|
^this.prMakeMsg(\predictPoint,id,this.prEncodeBuffer(sourceBuffer))
|
||||||
|
}
|
||||||
|
|
||||||
|
predictPoint { |sourceBuffer, action|
|
||||||
|
actions[\predictPoint] = [string(FluidMessageResponse,_,_),action];
|
||||||
|
this.prSendMsg(this.predictPointMsg(sourceBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
kr{|trig, inputBuffer,outputBuffer|
|
||||||
|
|
||||||
|
var params = this.prGetParams.drop(-2) ++ [this.prEncodeBuffer(inputBuffer),
|
||||||
|
this.prEncodeBuffer(outputBuffer)];
|
||||||
|
|
||||||
|
^FluidMLPClassifierQuery.kr(K2A.ar(trig),this, *params);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidMLPClassifierQuery : FluidRTQuery {}
|
||||||
@ -0,0 +1,68 @@
|
|||||||
|
+ FluidDataObject {
|
||||||
|
tmpJSONFilename{
|
||||||
|
^Platform.defaultTempDir++"tmp_fluid_data_"++
|
||||||
|
Date.localtime.stamp++"_"++UniqueID.next++".json";
|
||||||
|
}
|
||||||
|
|
||||||
|
dump {|action|
|
||||||
|
var filename = this.tmpJSONFilename;
|
||||||
|
action ?? {action = postResponse};
|
||||||
|
this.write(filename, {
|
||||||
|
action.value(this.parseJSON(File.readAllString(filename)));
|
||||||
|
File.delete(filename);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
load{|dict, action|
|
||||||
|
var filename = this.tmpJSONFilename;
|
||||||
|
File.use(filename, "wt", { |f| f.write(this.asJSON(dict));});
|
||||||
|
this.read(filename, {
|
||||||
|
action.notNil.if{ action.value; };
|
||||||
|
File.delete(filename);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
toDict{|obj|
|
||||||
|
var converted;
|
||||||
|
if(obj.class === Event){
|
||||||
|
converted = obj.as(Dictionary);
|
||||||
|
converted.keysValuesChange{|k,v|this.toDict(v)}
|
||||||
|
^converted;
|
||||||
|
};
|
||||||
|
if(obj.class === Array){
|
||||||
|
converted = obj.collect{|v| this.toDict(v)};
|
||||||
|
^converted;
|
||||||
|
};
|
||||||
|
^obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
parseJSON{|jsonStr|
|
||||||
|
var parsed = jsonStr;
|
||||||
|
jsonStr.do({|char,pos|
|
||||||
|
var inString = false;
|
||||||
|
char.switch(
|
||||||
|
$",{(jsonStr[pos-1]==$\ && inString).not.if({inString = inString.not})},
|
||||||
|
${,{ if(inString.not){parsed[pos] = $(} },
|
||||||
|
$},{ if(inString.not){parsed[pos] = $)} }
|
||||||
|
)
|
||||||
|
});
|
||||||
|
^this.toDict(parsed.interpret);
|
||||||
|
}
|
||||||
|
|
||||||
|
asJSON{|d|
|
||||||
|
if(d.isNumber){^d};
|
||||||
|
if(d.isString){^d.asString.asCompileString};
|
||||||
|
if(d.isKindOf(Symbol)){^this.asJSON(d.asString)};
|
||||||
|
if(d.isKindOf(Dictionary))
|
||||||
|
{
|
||||||
|
^"{" ++ (
|
||||||
|
d.keys.asList.collect{|k|
|
||||||
|
k.asString.asCompileString ++ ":" + this.asJSON(d[k])
|
||||||
|
}).join(", ") ++ "}"
|
||||||
|
};
|
||||||
|
if(d.isKindOf(SequenceableCollection))
|
||||||
|
{
|
||||||
|
^"[" ++ d.collect({|x|this.asJSON(x)}).join(", ")++ "]";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,41 @@
|
|||||||
|
FluidMessageResponse : Object
|
||||||
|
{
|
||||||
|
//selectors is an array of functions
|
||||||
|
//my cunning thought is that those that need extra data (e..g numbers()) can
|
||||||
|
//use partial applicaiton
|
||||||
|
*collectArgs{ |selectors,a|
|
||||||
|
var response = [];
|
||||||
|
var idx = 0;
|
||||||
|
selectors.do{ |selector|
|
||||||
|
var newThings;
|
||||||
|
# newThings,idx = selector.value(a, idx);
|
||||||
|
response = response ++ newThings;
|
||||||
|
};
|
||||||
|
|
||||||
|
if(response.size == 1,
|
||||||
|
{^response[0]},{^response})
|
||||||
|
}
|
||||||
|
|
||||||
|
*string{ |a, offset|
|
||||||
|
^[a]
|
||||||
|
}
|
||||||
|
|
||||||
|
*strings {|a,offset|
|
||||||
|
//TODO add an n argument as with numbers() to make this less omnivorous
|
||||||
|
^[a.drop(offset)];
|
||||||
|
}
|
||||||
|
|
||||||
|
*numbers{ |a, n, offset|
|
||||||
|
n = n ? a.size - offset; //send n = nil to consume everything
|
||||||
|
^[a.copyRange(offset, offset + n),offset + n]
|
||||||
|
}
|
||||||
|
|
||||||
|
*number{ |a,offset|
|
||||||
|
^[a[offset]];
|
||||||
|
}
|
||||||
|
|
||||||
|
*buffer{ |a,server,offset|
|
||||||
|
server = server ? Server.default ;
|
||||||
|
^[Buffer.cachedBufferAt(server, a[offset]), offset + 1]
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,120 @@
|
|||||||
|
FluidMessageTest : FluidModelObject {
|
||||||
|
|
||||||
|
var server;
|
||||||
|
|
||||||
|
// *kr { |trig = 1, blocking = 0|
|
||||||
|
//
|
||||||
|
// ^this.multiNew('control', trig, blocking);
|
||||||
|
// }
|
||||||
|
|
||||||
|
testReturnStrings { |server, nodeID, action|
|
||||||
|
|
||||||
|
server = server ? Server.default;
|
||||||
|
|
||||||
|
server.sendMsg('/u_cmd',nodeID,this.synthIndex,'testReturnStrings');
|
||||||
|
|
||||||
|
OSCFunc(
|
||||||
|
{ |msg|
|
||||||
|
var resp =FluidMessageResponse.collectArgs(
|
||||||
|
4.collect{string(FluidMessageResponse,_,_)}, msg.drop(3));
|
||||||
|
if(action.notNil){action.value(resp);};
|
||||||
|
},'/testReturnStrings').oneShot;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
testReturnNumbers{ |server, nodeID, action|
|
||||||
|
|
||||||
|
server = server ? Server.default;
|
||||||
|
|
||||||
|
server.sendMsg('/u_cmd',nodeID,this.synthIndex,'testReturnNumbers');
|
||||||
|
|
||||||
|
OSCFunc(
|
||||||
|
{ |msg|
|
||||||
|
var result = FluidMessageResponse.collectArgs(
|
||||||
|
[numbers(FluidMessageResponse,_,100,_)], msg.drop(3));
|
||||||
|
if(action.notNil){action.value(result);};
|
||||||
|
},'/testReturnNumbers').oneShot;
|
||||||
|
}
|
||||||
|
|
||||||
|
testReturnOneString{ |server, nodeID, action|
|
||||||
|
|
||||||
|
server = server ? Server.default;
|
||||||
|
|
||||||
|
server.sendMsg('/u_cmd',nodeID,this.synthIndex,'testReturnOneString');
|
||||||
|
|
||||||
|
OSCFunc(
|
||||||
|
{ |msg|
|
||||||
|
var result = FluidMessageResponse.collectArgs(
|
||||||
|
[string(FluidMessageResponse,_,_)], msg.drop(3));
|
||||||
|
if(action.notNil){action.value(result);};
|
||||||
|
},'/testReturnOneString').oneShot;
|
||||||
|
}
|
||||||
|
|
||||||
|
testReturnOneNumber{ |server, nodeID, action|
|
||||||
|
|
||||||
|
server = server ? Server.default;
|
||||||
|
|
||||||
|
server.sendMsg('/u_cmd',nodeID,this.synthIndex,'testReturnOneNumber');
|
||||||
|
|
||||||
|
OSCFunc(
|
||||||
|
{ |msg|
|
||||||
|
var result = msg.drop(3);
|
||||||
|
if(action.notNil){action.value(result);};
|
||||||
|
},'/testReturnOneNumber').oneShot;
|
||||||
|
}
|
||||||
|
|
||||||
|
testAccessBuffer{ |server, nodeID, buf, action|
|
||||||
|
|
||||||
|
server = server ? Server.default;
|
||||||
|
|
||||||
|
server.sendMsg('/u_cmd',nodeID,this.synthIndex,'testAccessBuffer', buf.asUGenInput);
|
||||||
|
|
||||||
|
OSCFunc(
|
||||||
|
{ |msg|
|
||||||
|
var result = FluidMessageResponse.collectArgs([numbers(FluidMessageResponse,_,1,_)],msg.drop(3));
|
||||||
|
if(action.notNil){action.value(result);};
|
||||||
|
},'/testAccessBuffer').oneShot;
|
||||||
|
}
|
||||||
|
|
||||||
|
testPassString{ |server, nodeID, str, a, b, c, d, action|
|
||||||
|
|
||||||
|
server = server ? Server.default;
|
||||||
|
|
||||||
|
server.sendMsg('/u_cmd',nodeID,this.synthIndex,'testPassString', str, a, b, c);
|
||||||
|
|
||||||
|
OSCFunc(
|
||||||
|
{ |msg|
|
||||||
|
if(action.notNil){action.value;};
|
||||||
|
},'/testPassString').oneShot;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
testReturnBuffer{ |server, nodeID, b, action|
|
||||||
|
|
||||||
|
server = server ? Server.default;
|
||||||
|
|
||||||
|
server.sendMsg('/u_cmd',nodeID,this.synthIndex,'testReturnBuffer', b.asUGenInput);
|
||||||
|
|
||||||
|
OSCFunc(
|
||||||
|
{ |msg|
|
||||||
|
var result = result = FluidMessageResponse.collectArgs([buffer(FluidMessageResponse,_,server,_)],msg.drop(3));
|
||||||
|
if(action.notNil){action.value(result);};
|
||||||
|
},'/testReturnBuffer').oneShot;
|
||||||
|
}
|
||||||
|
|
||||||
|
testReturnHetero{ |server, nodeID, action|
|
||||||
|
|
||||||
|
server = server ? Server.default;
|
||||||
|
|
||||||
|
server.sendMsg('/u_cmd',nodeID,this.synthIndex,'testReturnHetero');
|
||||||
|
|
||||||
|
OSCFunc(
|
||||||
|
{ |msg|
|
||||||
|
var result = result = FluidMessageResponse.collectArgs([string(FluidMessageResponse,_,_), numbers(FluidMessageResponse,_,2,_)],msg.drop(3));
|
||||||
|
if(action.notNil){action.value(result);};
|
||||||
|
},'/testReturnHetero').oneShot;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
@ -0,0 +1,23 @@
|
|||||||
|
FluidNMFMorph : FluidRTUGen {
|
||||||
|
|
||||||
|
*ar { arg source = -1, target = -1, activations = -1, autoassign = 1, interp = 0, windowSize = 1024, hopSize = -1, fftSize = -1, maxFFTSize = 16384;
|
||||||
|
|
||||||
|
source = source ?? {-1};
|
||||||
|
target = target ?? {-1};
|
||||||
|
activations = activations ?? {-1};
|
||||||
|
|
||||||
|
^this.new1('audio', source, target, activations, autoassign, interp, windowSize, hopSize, fftSize, maxFFTSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
init {arg ...theInputs;
|
||||||
|
inputs = theInputs;
|
||||||
|
specialIndex = -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
checkInputs {
|
||||||
|
if(inputs.last.rate != 'scalar') {
|
||||||
|
^(": maxFFTSize cannot be modulated.");
|
||||||
|
};
|
||||||
|
^this.checkValidInputs;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,70 @@
|
|||||||
|
FluidNormalize : FluidRealTimeModel {
|
||||||
|
|
||||||
|
var <>min, <>max, <>invert;
|
||||||
|
|
||||||
|
*new {|server, min = 0, max = 1, invert = 0|
|
||||||
|
^super.new(server,[min,max,invert])
|
||||||
|
.min_(min).max_(max).invert_(invert);
|
||||||
|
}
|
||||||
|
|
||||||
|
prGetParams{
|
||||||
|
^[this.min,this.max,this.invert,-1,-1];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fitMsg{|dataSet|
|
||||||
|
^this.prMakeMsg(\fit,id,dataSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fit{|dataSet, action|
|
||||||
|
actions[\fit] = [nil,action];
|
||||||
|
this.prSendMsg(this.fitMsg(dataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
transformMsg{|sourceDataSet, destDataSet|
|
||||||
|
^this.prMakeMsg(\transform,id,sourceDataSet.id,destDataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
transform{|sourceDataSet, destDataSet, action|
|
||||||
|
actions[\transform] = [nil,action];
|
||||||
|
this.prSendMsg(this.transformMsg(sourceDataSet, destDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransformMsg{|sourceDataSet, destDataSet|
|
||||||
|
^this.prMakeMsg(\fitTransform,id,sourceDataSet.id,destDataSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransform{|sourceDataSet, destDataSet, action|
|
||||||
|
actions[\fitTransform] = [nil,action];
|
||||||
|
this.prSendMsg(this.fitTransformMsg(sourceDataSet, destDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
transformPointMsg{|sourceBuffer, destBuffer|
|
||||||
|
^this.prMakeMsg(\transformPoint,id,
|
||||||
|
this.prEncodeBuffer(sourceBuffer),
|
||||||
|
this.prEncodeBuffer(destBuffer),
|
||||||
|
["/b_query",destBuffer.asUGenInput]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
transformPoint{|sourceBuffer, destBuffer, action|
|
||||||
|
actions[\transformPoint] = [nil,{action.value(destBuffer)}];
|
||||||
|
this.prSendMsg(this.transformPointMsg(sourceBuffer, destBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
kr{|trig, inputBuffer,outputBuffer,min,max,invert|
|
||||||
|
|
||||||
|
min = min ? this.min;
|
||||||
|
max = max ? this.max;
|
||||||
|
invert = invert ? this.invert;
|
||||||
|
|
||||||
|
this.min_(min).max_(max).invert_(invert);
|
||||||
|
|
||||||
|
^FluidNormalizeQuery.kr( K2A.ar(trig),
|
||||||
|
this, this.min, this.max, this.invert, this.prEncodeBuffer(inputBuffer), this.prEncodeBuffer(outputBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidNormalizeQuery : FluidRTQuery {}
|
||||||
@ -0,0 +1,63 @@
|
|||||||
|
FluidPCA : FluidRealTimeModel{
|
||||||
|
|
||||||
|
var <>numDimensions;
|
||||||
|
|
||||||
|
*new {|server, numDimensions = 2|
|
||||||
|
^super.new(server,[numDimensions]).numDimensions_(numDimensions);
|
||||||
|
}
|
||||||
|
|
||||||
|
prGetParams{
|
||||||
|
^[numDimensions,-1,-1];
|
||||||
|
}
|
||||||
|
|
||||||
|
fitMsg{|dataSet|
|
||||||
|
^this.prMakeMsg(\fit,id, dataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
fit{|dataSet, action|
|
||||||
|
actions[\fit] = [nil, action];
|
||||||
|
this.prSendMsg(this.fitMsg(dataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
transformMsg{|sourceDataSet, destDataSet|
|
||||||
|
^this.prMakeMsg(\transform, id, sourceDataSet.id, destDataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
transform{|sourceDataSet, destDataSet, action|
|
||||||
|
actions[\transform] = [numbers(FluidMessageResponse,_,1,_),action];
|
||||||
|
this.prSendMsg(this.transformMsg(sourceDataSet,destDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransformMsg{|sourceDataSet, destDataSet|
|
||||||
|
^this.prMakeMsg(\fitTransform,id, sourceDataSet.id, destDataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransform{|sourceDataSet, destDataSet, action|
|
||||||
|
actions[\fitTransform] = [numbers(FluidMessageResponse,_,1,_),action];
|
||||||
|
this.prSendMsg(this.fitTransformMsg(sourceDataSet,destDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
transformPointMsg{|sourceBuffer, destBuffer|
|
||||||
|
^this.prMakeMsg(\transformPoint,id,
|
||||||
|
this.prEncodeBuffer(sourceBuffer),
|
||||||
|
this.prEncodeBuffer(destBuffer),
|
||||||
|
["/b_query",destBuffer.asUGenInput]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
transformPoint{|sourceBuffer, destBuffer, action|
|
||||||
|
actions[\transformPoint] = [nil,{action.value(destBuffer)}];
|
||||||
|
this.prSendMsg(this.transformPointMsg(sourceBuffer,destBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
kr{|trig, inputBuffer,outputBuffer,numDimensions|
|
||||||
|
|
||||||
|
numDimensions = numDimensions ? this.numDimensions;
|
||||||
|
this.numDimensions_(numDimensions);
|
||||||
|
|
||||||
|
^FluidPCAQuery.kr(K2A.ar(trig),this, this.numDimensions, this.prEncodeBuffer(inputBuffer), this.prEncodeBuffer(outputBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidPCAQuery : FluidRTQuery {}
|
||||||
@ -0,0 +1,59 @@
|
|||||||
|
FluidProviderTest : FluidModelObject {
|
||||||
|
|
||||||
|
/* var <> server;
|
||||||
|
var <> nodeID;
|
||||||
|
|
||||||
|
*new{ |server, name|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}*/
|
||||||
|
|
||||||
|
// *kr{ |name,vals|
|
||||||
|
// ^this.new1('control',name,vals);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// *new1 { |rate, name,vals|
|
||||||
|
// var ascii = name.ascii;
|
||||||
|
// var args;
|
||||||
|
// vals ?? {vals = []};
|
||||||
|
// if(vals.isArray.not) {vals = [vals]};
|
||||||
|
// args = ([rate, ascii.size].addAll(ascii) ++ vals.size).addAll(vals).addAll([1,1]);
|
||||||
|
// args.postln;
|
||||||
|
// ^super.new1(*args);
|
||||||
|
// }
|
||||||
|
|
||||||
|
/* init { |size...chars|
|
||||||
|
specialIndex = -1;
|
||||||
|
inputs = [size].addAll(chars);
|
||||||
|
}*/
|
||||||
|
|
||||||
|
addPoint{|server, nodeID, args, action|
|
||||||
|
this.prSendMsg(server, nodeID, 'addPoint',args,action);
|
||||||
|
}
|
||||||
|
|
||||||
|
updatePoint{|server, nodeID, args, action|
|
||||||
|
this.prSendMsg(server, nodeID, 'updatePoint',args,action);
|
||||||
|
}
|
||||||
|
|
||||||
|
deletePoint{|server, nodeID, args, action|
|
||||||
|
this.prSendMsg(server,nodeID, 'deletePoint',args,action);
|
||||||
|
}
|
||||||
|
|
||||||
|
prSendMsg { |server, nodeID, msg, args, action,parser|
|
||||||
|
|
||||||
|
server = server ? Server.default;
|
||||||
|
|
||||||
|
server.listSendMsg(['/u_cmd',nodeID.nodeID,this.synthIndex,msg].addAll(args));
|
||||||
|
|
||||||
|
OSCFunc(
|
||||||
|
{ |msg|
|
||||||
|
var result = FluidMessageResponse.collectArgs(parser,msg.drop(3));
|
||||||
|
if(action.notNil){action.value(result)}{action.value};
|
||||||
|
},'/'++msg).oneShot;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
@ -0,0 +1,69 @@
|
|||||||
|
FluidRobustScale : FluidRealTimeModel {
|
||||||
|
|
||||||
|
var <>low, <>high, <>invert;
|
||||||
|
|
||||||
|
*new {|server, low = 25, high = 75, invert = 0|
|
||||||
|
^super.new(server,[low,high,invert])
|
||||||
|
.low_(low).high_(high).invert_(invert);
|
||||||
|
}
|
||||||
|
|
||||||
|
prGetParams{
|
||||||
|
^[this.low,this.high,this.invert,-1,-1];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fitMsg{|dataSet|
|
||||||
|
^this.prMakeMsg(\fit,id,dataSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fit{|dataSet, action|
|
||||||
|
actions[\fit] = [nil,action];
|
||||||
|
this.prSendMsg(this.fitMsg(dataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
transformMsg{|sourceDataSet, destDataSet|
|
||||||
|
^this.prMakeMsg(\transform,id,sourceDataSet.id,destDataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
transform{|sourceDataSet, destDataSet, action|
|
||||||
|
actions[\transform] = [nil,action];
|
||||||
|
this.prSendMsg(this.transformMsg(sourceDataSet, destDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransformMsg{|sourceDataSet, destDataSet|
|
||||||
|
^this.prMakeMsg(\fitTransform,id,sourceDataSet.id,destDataSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransform{|sourceDataSet, destDataSet, action|
|
||||||
|
actions[\fitTransform] = [nil,action];
|
||||||
|
this.prSendMsg(this.fitTransformMsg(sourceDataSet, destDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
transformPointMsg{|sourceBuffer, destBuffer|
|
||||||
|
^this.prMakeMsg(\transformPoint,id,
|
||||||
|
this.prEncodeBuffer(sourceBuffer),
|
||||||
|
this.prEncodeBuffer(destBuffer),
|
||||||
|
["/b_query",destBuffer.asUGenInput]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
transformPoint{|sourceBuffer, destBuffer, action|
|
||||||
|
actions[\transformPoint] = [nil,{action.value(destBuffer)}];
|
||||||
|
this.prSendMsg(this.transformPointMsg(sourceBuffer, destBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
kr{|trig, inputBuffer,outputBuffer,low,high,invert|
|
||||||
|
|
||||||
|
low = low ? this.low;
|
||||||
|
high = high ? this.high;
|
||||||
|
invert = invert ? this.invert;
|
||||||
|
|
||||||
|
this.low_(low).high_(high).invert_(invert);
|
||||||
|
|
||||||
|
^FluidRobustScaleQuery.kr(K2A.ar(trig),this, this.low, this.high, this.invert, this.prEncodeBuffer(inputBuffer), this.prEncodeBuffer(outputBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidRobustScaleQuery : FluidRTQuery {}
|
||||||
@ -0,0 +1,93 @@
|
|||||||
|
FluidProxyUgen : UGen {
|
||||||
|
|
||||||
|
var <>pluginname;
|
||||||
|
|
||||||
|
*kr { |pluginname...args|
|
||||||
|
args = args.collect{|x| x.asUGenInput}
|
||||||
|
^this.new1('control', pluginname,*args)
|
||||||
|
}
|
||||||
|
|
||||||
|
init { |pluginname...args|
|
||||||
|
this.pluginname = pluginname;
|
||||||
|
inputs = args;
|
||||||
|
rate = 'control';
|
||||||
|
}
|
||||||
|
|
||||||
|
name{
|
||||||
|
^pluginname.asString;
|
||||||
|
}
|
||||||
|
|
||||||
|
poll{ |trig = 10, label, trigid = -1|
|
||||||
|
^super.poll(trig, label ? this.name, trigid)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidServerCache {
|
||||||
|
|
||||||
|
var <cache;
|
||||||
|
|
||||||
|
*new{ ^super.new.init }
|
||||||
|
|
||||||
|
init{
|
||||||
|
cache = IdentityDictionary.new;
|
||||||
|
}
|
||||||
|
|
||||||
|
do { |server, func|
|
||||||
|
cache[server]!?{cache[server].do{|x|func.value(x)}}
|
||||||
|
}
|
||||||
|
|
||||||
|
doAll {|func|
|
||||||
|
cache.do{|subCache|
|
||||||
|
subCache.do{|item|
|
||||||
|
func.value(item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
postln{
|
||||||
|
cache.postln;
|
||||||
|
}
|
||||||
|
|
||||||
|
at { |server,id|
|
||||||
|
^cache[server].tryPerform(\at,id)
|
||||||
|
}
|
||||||
|
|
||||||
|
includesKey{|server,key|
|
||||||
|
^cache[server].tryPerform(\includesKey,key)
|
||||||
|
}
|
||||||
|
|
||||||
|
put {|server,id,x|
|
||||||
|
cache[server][id] = x;
|
||||||
|
}
|
||||||
|
|
||||||
|
remove { |server,id|
|
||||||
|
cache[server]!? {cache[server].removeAt(id)};
|
||||||
|
}
|
||||||
|
|
||||||
|
initCache {|server|
|
||||||
|
cache[server] ?? {
|
||||||
|
cache[server] = IdentityDictionary.new;
|
||||||
|
NotificationCenter.register(server,\newAllocators,this,
|
||||||
|
{
|
||||||
|
this.clearCache(server);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
clearCache { |server|
|
||||||
|
cache[server] !?
|
||||||
|
{
|
||||||
|
var bundle = [];
|
||||||
|
cache[server].values.do{|i|
|
||||||
|
if(i.respondsTo(\freeMsg)){
|
||||||
|
bundle = bundle.add(i.freeMsg); //server objects
|
||||||
|
}{
|
||||||
|
i.free; //OSCFunc
|
||||||
|
}
|
||||||
|
};
|
||||||
|
server.listSendBundle(nil,bundle);
|
||||||
|
cache.removeAt(server);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@ -0,0 +1,344 @@
|
|||||||
|
FluidServerObject
|
||||||
|
{
|
||||||
|
classvar serverCaches;
|
||||||
|
classvar count;
|
||||||
|
classvar persistent = true;
|
||||||
|
var <server,<id;
|
||||||
|
|
||||||
|
*version{|server|
|
||||||
|
server ?? {server = Server.default};
|
||||||
|
server.sendMsg("/cmd","/"++this.objectClassName++'/version');
|
||||||
|
}
|
||||||
|
|
||||||
|
*initClass {
|
||||||
|
serverCaches = IdentityDictionary.new;
|
||||||
|
count = 0;
|
||||||
|
ServerBoot.add({serverCaches[this]!?{serverCaches[this].cache.put(Server.internal,nil);}},Server.internal);
|
||||||
|
}
|
||||||
|
|
||||||
|
*initCache {|server|
|
||||||
|
serverCaches[this] ?? { serverCaches[this] = FluidServerCache.new};
|
||||||
|
|
||||||
|
if(server === Server.internal and: serverCaches[this].cache[Server.internal].isNil)
|
||||||
|
{
|
||||||
|
this.flush(Server.internal)
|
||||||
|
};
|
||||||
|
|
||||||
|
serverCaches[this].initCache(server);
|
||||||
|
NotificationCenter.register(server,\newAllocators,this,{ count = 0; });
|
||||||
|
}
|
||||||
|
|
||||||
|
*newMsg{|id, params|
|
||||||
|
params = params !? {params.collect(_.asUGenInput)};
|
||||||
|
// ("Newms"++params).postln;
|
||||||
|
^['/cmd',this.objectClassName ++ '/new',id] ++ params
|
||||||
|
}
|
||||||
|
|
||||||
|
*new{ |server, id, params, action, callNew = true|
|
||||||
|
var newObj;
|
||||||
|
server ?? {server = Server.default};
|
||||||
|
if(server.serverRunning.not){"Server not running".warn};
|
||||||
|
id !? { id = id.asInteger }
|
||||||
|
?? { id = count; count = count + 1; };
|
||||||
|
newObj = super.newCopyArgs(server,id,action);
|
||||||
|
// params.postln;
|
||||||
|
if(callNew) {server.listSendMsg(this.newMsg(id,params))};
|
||||||
|
^newObj.cache
|
||||||
|
}
|
||||||
|
|
||||||
|
cache {
|
||||||
|
this.class.initCache(server);
|
||||||
|
serverCaches[this.class].put(server,this.id,this);
|
||||||
|
}
|
||||||
|
|
||||||
|
uncache{
|
||||||
|
serverCaches[this.class].remove(server,id);
|
||||||
|
}
|
||||||
|
|
||||||
|
prMakeMsg{|msg,id...args|
|
||||||
|
^['/cmd',"%/%".format(this.class.objectClassName,msg),id].addAll(args);
|
||||||
|
}
|
||||||
|
|
||||||
|
freeMsg {
|
||||||
|
var msg;
|
||||||
|
id ?? {" % already freed".format(this.class.name).warn; ^nil};
|
||||||
|
this.uncache;
|
||||||
|
msg = this.prMakeMsg(\free,id);
|
||||||
|
id = nil;
|
||||||
|
^msg;
|
||||||
|
}
|
||||||
|
|
||||||
|
free{
|
||||||
|
var msg = this.freeMsg;
|
||||||
|
msg !? {server.listSendMsg(msg)} ?? {^nil};
|
||||||
|
}
|
||||||
|
|
||||||
|
*freeAll{|server|
|
||||||
|
serverCaches[this] !? {|cache|
|
||||||
|
cache.clearCache(server ? Server.default);
|
||||||
|
};
|
||||||
|
count = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
asUGenInput{ ^id }
|
||||||
|
|
||||||
|
asString {
|
||||||
|
^"%(%)".format(this.class.name,id).asString;
|
||||||
|
}
|
||||||
|
|
||||||
|
asSymbol {
|
||||||
|
^id.asSymbol
|
||||||
|
}
|
||||||
|
|
||||||
|
*objectClassName { ^this.name.asSymbol }
|
||||||
|
|
||||||
|
*flushMsg { ^['/cmd',this.objectClassName ++ '/flush'] }
|
||||||
|
|
||||||
|
*flush {|server| server.listSendMsg(this.flushMsg)}
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidBufProcessor : FluidServerObject
|
||||||
|
{
|
||||||
|
var <processAction;
|
||||||
|
var <outputBuffers;
|
||||||
|
var <freeWhenDone;
|
||||||
|
classvar responder;
|
||||||
|
classvar count;
|
||||||
|
|
||||||
|
*cmdPeriod {
|
||||||
|
serverCaches[this] !? {|cache|
|
||||||
|
cache.doAll{|processor| processor !? { processor.free;} };
|
||||||
|
serverCaches[this] = nil;
|
||||||
|
};
|
||||||
|
count = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
*initCache {|server|
|
||||||
|
// "initcache".postln;
|
||||||
|
// this.done.postln;
|
||||||
|
super.initCache(server);
|
||||||
|
CmdPeriod.add(this);
|
||||||
|
if(serverCaches[this].includesKey(server,\processResponder).not)
|
||||||
|
{
|
||||||
|
serverCaches[this].put(server,\processResponder,OSCFunc({|m|
|
||||||
|
var id = m.last.asInteger;
|
||||||
|
// "I'm in the pizza hut".postln;
|
||||||
|
serverCaches[this].at(server,id) !? {|p|
|
||||||
|
// "I'm in the taco bell".postln ;
|
||||||
|
p!?{
|
||||||
|
p.processAction!?{|a|
|
||||||
|
var bufs = p.outputBuffers;
|
||||||
|
|
||||||
|
bufs = bufs.collect{|b|
|
||||||
|
if(b.isKindOf(Buffer))
|
||||||
|
{b}
|
||||||
|
{Buffer.cachedBufferAt(server,b)};
|
||||||
|
};
|
||||||
|
a.valueArray(valueArray(bufs));
|
||||||
|
};
|
||||||
|
if(p.freeWhenDone){p.free};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},this.done ,server.addr).fix)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
*new {|server,id,outputBuffers|
|
||||||
|
^super.new(server,id, nil, nil,false).init(outputBuffers);
|
||||||
|
}
|
||||||
|
|
||||||
|
init{ |ob|
|
||||||
|
outputBuffers = ob;
|
||||||
|
}
|
||||||
|
|
||||||
|
*done {
|
||||||
|
^"/%/process".format(this.objectClassName);
|
||||||
|
}
|
||||||
|
|
||||||
|
wait {
|
||||||
|
var condition = Condition.new;
|
||||||
|
id ?? {Error("% already freed".format(this.class.name)).throw};
|
||||||
|
OSCFunc({
|
||||||
|
condition.unhang;
|
||||||
|
},this.class.done,server.addr,argTemplate:[nil,id]).oneShot;
|
||||||
|
condition.hang;
|
||||||
|
}
|
||||||
|
|
||||||
|
processMsg {|params|
|
||||||
|
var msg;
|
||||||
|
var completionMsg = outputBuffers !? {
|
||||||
|
[["/sync"]] ++ outputBuffers.collect{|b| ["/b_query", b.asUGenInput]}
|
||||||
|
} ?? {[]};
|
||||||
|
|
||||||
|
// completionMsg.postln;
|
||||||
|
id ?? {Error("% already freed".format(this.class.name)).throw};
|
||||||
|
msg = this.prMakeMsg(\processNew,id).addAll(params).add(completionMsg);
|
||||||
|
// msg.postln;
|
||||||
|
^msg;
|
||||||
|
}
|
||||||
|
|
||||||
|
processList { |params,shouldFree,action|
|
||||||
|
freeWhenDone = shouldFree;
|
||||||
|
processAction = action;
|
||||||
|
params = params.collect(_.asUGenInput);
|
||||||
|
server.listSendMsg(this.processMsg(params));
|
||||||
|
}
|
||||||
|
|
||||||
|
cancelMsg{
|
||||||
|
id ?? {Error("% already freed".format(this.class.name)).throw};
|
||||||
|
^this.prMakeMsg(\cancel, id);
|
||||||
|
}
|
||||||
|
|
||||||
|
cancel{
|
||||||
|
server.listSendMsg(this.cancelMsg);
|
||||||
|
}
|
||||||
|
|
||||||
|
kr{ ^FluidProxyUgen.kr(this.class.objectClassName ++ "Monitor",id) }
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidOSCPatternInversion : OSCMessageDispatcher
|
||||||
|
{
|
||||||
|
value {|msg, time, addr, recvPort|
|
||||||
|
var msgpath = msg[0].asSymbol;
|
||||||
|
active.keysValuesDo({|key, func|
|
||||||
|
if(msgpath.matchOSCAddressPattern(key), {func.value(msg, time, addr, recvPort);});
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
FluidDataObject : FluidServerObject
|
||||||
|
{
|
||||||
|
classvar postResponse;
|
||||||
|
|
||||||
|
var <actions;
|
||||||
|
|
||||||
|
*initClass{
|
||||||
|
postResponse = _.postln;
|
||||||
|
}
|
||||||
|
|
||||||
|
*initCache{ |server|
|
||||||
|
super.initCache(server);
|
||||||
|
if(serverCaches[this].includesKey(server,\messageResponder).not)
|
||||||
|
{
|
||||||
|
serverCaches[this].put(server,\messageResponder,OSCFunc.new({|m|
|
||||||
|
var id = m[1].asInteger;
|
||||||
|
var method;
|
||||||
|
serverCaches[this].at(server,id) !? { |p|
|
||||||
|
method = m[0].asString.findRegexp("/"++this.name++"/(.*)")[1][1].asSymbol;
|
||||||
|
p.actions[method] !? {|a|
|
||||||
|
//two items: parser and action
|
||||||
|
var parser = a[0];
|
||||||
|
var action = a[1];
|
||||||
|
var result = FluidMessageResponse.collectArgs(parser,m[2..]);
|
||||||
|
action.value(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},'/' ++ this.objectClassName ++ '/*',server.addr, dispatcher:FluidOSCPatternInversion.new).fix)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
*new{|server...args|
|
||||||
|
// args.flatten.postln;
|
||||||
|
^super.new(server,params:args.flatten).init;
|
||||||
|
}
|
||||||
|
|
||||||
|
*cachedInstanceAt{|server,id|
|
||||||
|
this.initCache(server);
|
||||||
|
^serverCaches[this].at(server,id);
|
||||||
|
}
|
||||||
|
|
||||||
|
init {
|
||||||
|
actions = IdentityDictionary.new;
|
||||||
|
}
|
||||||
|
|
||||||
|
prEncodeBuffer { |buf| buf !? {^buf.asUGenInput} ?? {^-1} }
|
||||||
|
|
||||||
|
prSendMsg {|msg| server !? {server.listSendMsg(msg)};}
|
||||||
|
|
||||||
|
colsMsg { ^this.prMakeMsg(\cols,id);}
|
||||||
|
|
||||||
|
cols{ |action=(postResponse)|
|
||||||
|
actions[\cols] = [numbers(FluidMessageResponse,_,1,_),action];
|
||||||
|
this.prSendMsg(this.colsMsg)
|
||||||
|
}
|
||||||
|
|
||||||
|
readMsg { |filename| ^this.prMakeMsg(\read,id,filename.asString);}
|
||||||
|
|
||||||
|
read{|filename, action|
|
||||||
|
actions[\read] = [nil,action];
|
||||||
|
this.prSendMsg(this.readMsg(filename));
|
||||||
|
}
|
||||||
|
|
||||||
|
writeMsg {|filename|
|
||||||
|
// ^['/cmd',this.class.name ++ '/write',id,filename.asString]
|
||||||
|
^this.prMakeMsg(\write,id,filename.asString);
|
||||||
|
}
|
||||||
|
|
||||||
|
write{|filename, action|
|
||||||
|
actions[\write] = [nil,action];
|
||||||
|
this.prSendMsg(this.writeMsg(filename));
|
||||||
|
}
|
||||||
|
|
||||||
|
sizeMsg{
|
||||||
|
// ^['/cmd',this.class.name ++ '/size',id]
|
||||||
|
^this.prMakeMsg(\size,id);
|
||||||
|
}
|
||||||
|
|
||||||
|
size {|action=(postResponse)|
|
||||||
|
actions[\size] = [numbers(FluidMessageResponse,_,1,_),action];
|
||||||
|
this.prSendMsg(this.sizeMsg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidModelObject : FluidDataObject
|
||||||
|
{
|
||||||
|
prGetParams{
|
||||||
|
"Subclass should provide this".throw;
|
||||||
|
}
|
||||||
|
|
||||||
|
prUpdateStateMsg{
|
||||||
|
var params = this.prGetParams.value.collect(_.asUGenInput);
|
||||||
|
^this.prMakeMsg(\setParams,id) ++ params;
|
||||||
|
}
|
||||||
|
|
||||||
|
prSendMsg {|msg|
|
||||||
|
//These need to happen sequentially, but not simultaneously
|
||||||
|
//sending as a bundle makes reasoning about timing w/r/t other
|
||||||
|
//commands more awkward, unless we set the offet to 0 (in which case,
|
||||||
|
//noisy 'late' messages)
|
||||||
|
super.prSendMsg(this.prUpdateStateMsg);
|
||||||
|
super.prSendMsg(msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidRealTimeModel : FluidModelObject
|
||||||
|
{
|
||||||
|
*new{ |server, params|
|
||||||
|
^super.new(server,params++[-1,-1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidRTQuery : FluidProxyUgen
|
||||||
|
{
|
||||||
|
*kr{ |trig,obj...args|
|
||||||
|
^super.kr(this.name,trig,obj.asUGenInput, *args)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
FluidRTUGen : UGen
|
||||||
|
{
|
||||||
|
*version{|server|
|
||||||
|
server ?? {server = Server.default};
|
||||||
|
server.sendMsg("/cmd","/"++this.name++'/version');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidRTMultiOutUGen : MultiOutUGen
|
||||||
|
{
|
||||||
|
*version{|server|
|
||||||
|
server ?? {server = Server.default};
|
||||||
|
server.sendMsg("/cmd","/"++this.name++'/version');
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,59 @@
|
|||||||
|
FluidStandardize : FluidRealTimeModel {
|
||||||
|
|
||||||
|
var <>invert;
|
||||||
|
|
||||||
|
*new {|server, invert = 0|
|
||||||
|
^super.new(server,[invert]).invert_(invert);
|
||||||
|
}
|
||||||
|
|
||||||
|
prGetParams{
|
||||||
|
^[this.invert, -1, 1];
|
||||||
|
}
|
||||||
|
|
||||||
|
fitMsg{|dataSet|
|
||||||
|
^this.prMakeMsg(\fit,id,dataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
fit{|dataSet, action|
|
||||||
|
actions[\fit] = [nil, action];
|
||||||
|
this.prSendMsg(this.fitMsg(dataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
transformMsg{|sourceDataSet, destDataSet|
|
||||||
|
^this.prMakeMsg(\transform,id,sourceDataSet.id,destDataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
transform{|sourceDataSet, destDataSet, action|
|
||||||
|
actions[\transform] = [nil,action];
|
||||||
|
this.prSendMsg(this.transformMsg(sourceDataSet,destDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransformMsg{|sourceDataSet, destDataSet|
|
||||||
|
^this.prMakeMsg(\fitTransform,id,sourceDataSet.id,destDataSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransform{|sourceDataSet, destDataSet, action|
|
||||||
|
actions[\fitTransform] = [nil,action];
|
||||||
|
this.prSendMsg(this.fitTransformMsg(sourceDataSet, destDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
transformPointMsg{|sourceBuffer, destBuffer|
|
||||||
|
^this.prMakeMsg(\transformPoint, id, this.prEncodeBuffer(sourceBuffer), this.prEncodeBuffer(destBuffer),["/b_query",destBuffer.asUGenInput]);
|
||||||
|
}
|
||||||
|
|
||||||
|
transformPoint{|sourceBuffer, destBuffer, action|
|
||||||
|
actions[\transformPoint] = [nil, {action.value(destBuffer)}];
|
||||||
|
this.prSendMsg(this.transformPointMsg(sourceBuffer,destBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
kr{|trig, inputBuffer,outputBuffer,invert|
|
||||||
|
|
||||||
|
invert = invert ? this.invert;
|
||||||
|
this.invert_(invert);
|
||||||
|
|
||||||
|
^FluidStandardizeQuery.kr(K2A.ar(trig),this, this.invert, this.prEncodeBuffer(inputBuffer), this.prEncodeBuffer(outputBuffer));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidStandardizeQuery : FluidRTQuery {}
|
||||||
@ -0,0 +1,40 @@
|
|||||||
|
FluidSubscriberTest : FluidModelObject {
|
||||||
|
|
||||||
|
var <> providerName;
|
||||||
|
var <> nodeID;
|
||||||
|
|
||||||
|
// *kr { |provider|
|
||||||
|
// ^this.multiNew('control',provider);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// *new1 { |rate, provider|
|
||||||
|
// var ascii = provider.ascii;
|
||||||
|
// ^super.new1(*[rate, ascii.size].addAll(ascii));
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// init { |size...chars|
|
||||||
|
// //Send the number of inputs (size of provider string) as specialIndex,
|
||||||
|
// //so server plugin knows what's going on
|
||||||
|
// specialIndex = -1;
|
||||||
|
// inputs = [size].addAll(chars);
|
||||||
|
// providerName = chars.collectAs({|x|x.asInteger.asAscii}, String);
|
||||||
|
// }
|
||||||
|
|
||||||
|
providerLookup { |server, nodeID, label, action|
|
||||||
|
this.prSendMsg(server, nodeID, 'providerLookup', label, action,
|
||||||
|
[string(FluidMessageResponse,_,_),numbers(FluidMessageResponse,_,2,_)] );
|
||||||
|
}
|
||||||
|
|
||||||
|
prSendMsg { |server, nodeID, msg, args, action,parser|
|
||||||
|
|
||||||
|
server = server ? Server.default;
|
||||||
|
|
||||||
|
server.listSendMsg(['/u_cmd',nodeID.nodeID,this.synthIndex,msg].addAll(args));
|
||||||
|
|
||||||
|
OSCFunc(
|
||||||
|
{ |msg|
|
||||||
|
var result = FluidMessageResponse.collectArgs(parser,msg.drop(3));
|
||||||
|
if(action.notNil){action.value(result)}{action.value};
|
||||||
|
},'/'++msg).oneShot;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,87 @@
|
|||||||
|
FluidUMAP : FluidRealTimeModel {
|
||||||
|
|
||||||
|
var <>numDimensions, <>numNeighbours, <>minDist, <>iterations, <>learnRate;
|
||||||
|
|
||||||
|
*new {|server,numDimensions = 2, numNeighbours = 15, minDist = 0.1, iterations = 200, learnRate = 0.1|
|
||||||
|
^super.new(server,[numDimensions, numNeighbours, minDist, iterations, learnRate])
|
||||||
|
.numDimensions_(numDimensions)
|
||||||
|
.numNeighbours_(numNeighbours)
|
||||||
|
.minDist_(minDist)
|
||||||
|
.iterations_(iterations)
|
||||||
|
.learnRate_(learnRate);
|
||||||
|
}
|
||||||
|
|
||||||
|
prGetParams{
|
||||||
|
^[
|
||||||
|
this.numDimensions,
|
||||||
|
this.numNeighbours,
|
||||||
|
this.minDist,
|
||||||
|
this.iterations,
|
||||||
|
this.learnRate,
|
||||||
|
-1,-1
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransformMsg{|sourceDataSet, destDataSet|
|
||||||
|
^this.prMakeMsg(\fitTransform, id, sourceDataSet.id, destDataSet.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fitTransform{|sourceDataSet, destDataSet, action|
|
||||||
|
actions[\fitTransform] = [nil, action];
|
||||||
|
this.prSendMsg(this.fitTransformMsg(sourceDataSet,destDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
fitMsg{|dataSet|
|
||||||
|
^this.prMakeMsg(\fit,id, dataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
fit{|dataSet, action|
|
||||||
|
actions[\fit] = [nil, action];
|
||||||
|
this.prSendMsg(this.fitMsg(dataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
transformMsg{|sourceDataSet, destDataSet|
|
||||||
|
^this.prMakeMsg(\transform, id, sourceDataSet.id, destDataSet.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
transform{|sourceDataSet, destDataSet, action|
|
||||||
|
actions[\transform] = [nil, action];
|
||||||
|
this.prSendMsg(this.transformMsg(sourceDataSet,destDataSet));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
transformPointMsg{|sourceBuffer, destBuffer|
|
||||||
|
^this.prMakeMsg(\transformPoint,id,
|
||||||
|
this.prEncodeBuffer(sourceBuffer),
|
||||||
|
this.prEncodeBuffer(destBuffer),
|
||||||
|
["/b_query",destBuffer.asUGenInput]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
transformPoint{|sourceBuffer, destBuffer, action|
|
||||||
|
actions[\transformPoint] = [nil,{action.value(destBuffer)}];
|
||||||
|
this.prSendMsg(this.transformPointMsg(sourceBuffer,destBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
kr{|trig, inputBuffer,outputBuffer,numDimensions|
|
||||||
|
|
||||||
|
numDimensions = numDimensions ? this.numDimensions;
|
||||||
|
this.numDimensions_(numDimensions);
|
||||||
|
|
||||||
|
^FluidUMAPQuery.kr(K2A.ar(trig),
|
||||||
|
this,
|
||||||
|
this.numDimensions,
|
||||||
|
this.numNeighbours,
|
||||||
|
this.minDist,
|
||||||
|
this.iterations,
|
||||||
|
this.learnRate,
|
||||||
|
this.prEncodeBuffer(inputBuffer),
|
||||||
|
this.prEncodeBuffer(outputBuffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
// not implemented
|
||||||
|
cols {|action|}
|
||||||
|
size { |action|}
|
||||||
|
}
|
||||||
|
|
||||||
|
FluidUMAPQuery : FluidRTQuery {}
|
||||||
@ -0,0 +1,169 @@
|
|||||||
|
// define a few processes
|
||||||
|
(
|
||||||
|
~ds = FluidDataSet(s);//no name needs to be provided
|
||||||
|
//define as many buffers as we have parallel voices/threads in the extractor processing (default is 4)
|
||||||
|
~mfccbuf = 4.collect{Buffer.new};
|
||||||
|
~statsbuf = 4.collect{Buffer.new};
|
||||||
|
~flatbuf = 4.collect{Buffer.new};
|
||||||
|
|
||||||
|
// here we instantiate a loader which creates a single large buffer with a dictionary of what was included in it
|
||||||
|
// ~loader = FluidLoadFolder("/Volumes/machins/projets/newsfeed/sons/smallnum/");
|
||||||
|
~loader = FluidLoadFolder(File.realpath(FluidLoadFolder.class.filenameSymbol).dirname +/+ "../AudioFiles");
|
||||||
|
|
||||||
|
// here we instantiate a further slicing step if needs be, which iterate through all the items of the FluidLoadFolder and slice the slices with the declared function.
|
||||||
|
~slicer = FluidSliceCorpus({ |src,start,num,dest|
|
||||||
|
FluidBufOnsetSlice.kr(src, start, num, metric: 9, minSliceLength: 17, indices:dest, threshold:0.7, blocking: 1)
|
||||||
|
});
|
||||||
|
|
||||||
|
// here we instantiate a process of description and dataset writing, which will run each slice of the previous slice and write the entry. Note the chain of Done.kr triggers.
|
||||||
|
~extractor = FluidProcessSlices({|src,start,num,data|
|
||||||
|
var mfcc, stats, writer, flatten,mfccBuf, statsBuf, flatBuf, label, voice;
|
||||||
|
label = data.key;
|
||||||
|
voice = data.value[\voice];
|
||||||
|
mfcc = FluidBufMFCC.kr(src, startFrame:start, numFrames:num, numChans:1, features:~mfccbuf[voice], trig:1, blocking: 1);
|
||||||
|
stats = FluidBufStats.kr(~mfccbuf[voice], stats:~statsbuf[voice], trig:Done.kr(mfcc), blocking: 1);
|
||||||
|
flatten = FluidBufFlatten.kr(~statsbuf[voice], destination:~flatbuf[voice], trig:Done.kr(stats), blocking: 1);
|
||||||
|
writer = FluidDataSetWr.kr(~ds, label, nil, ~flatbuf[voice], trig: Done.kr(flatten), blocking: 1)
|
||||||
|
});
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////
|
||||||
|
//loading process
|
||||||
|
|
||||||
|
// just run the loader
|
||||||
|
(
|
||||||
|
t = Main.elapsedTime;
|
||||||
|
~loader.play(s,action:{(Main.elapsedTime - t).postln;"Loaded".postln;});
|
||||||
|
)
|
||||||
|
|
||||||
|
//load and play to test if it is that quick - it is!
|
||||||
|
(
|
||||||
|
t = Main.elapsedTime;
|
||||||
|
~loader.play(s,action:{(Main.elapsedTime - t).postln;"Loaded".postln;{var start, stop; PlayBuf.ar(~loader.index[~loader.index.keys.asArray.last.asSymbol][\numchans],~loader.buffer,startPos: ~loader.index[~loader.index.keys.asArray.last.asSymbol][\bounds][0])}.play;});
|
||||||
|
)
|
||||||
|
|
||||||
|
//ref to the buffer
|
||||||
|
~loader.buffer
|
||||||
|
//size of item
|
||||||
|
~loader.index.keys.size
|
||||||
|
//a way to get all keys info sorted by time
|
||||||
|
~stuff = Array.newFrom(~loader.index.keys).sort.collect{|x|~loader.index[x][\bounds]}.sort{|a,b| a[0]<b[0]};
|
||||||
|
|
||||||
|
//or to iterate in the underlying dictionary (unsorted)
|
||||||
|
(
|
||||||
|
~loader.index.pairsDo{ |k,v,i|
|
||||||
|
k.postln;
|
||||||
|
v.pairsDo{|l,u,j|
|
||||||
|
"\t\t\t".post;
|
||||||
|
(l->u).postln;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
// or write to file a human readable, sorted version of the database after sorting it by index.
|
||||||
|
(
|
||||||
|
a = File("/tmp/sc-loading.json","w");
|
||||||
|
~stuffsorted = Array.newFrom(~loader.index.keys).sort{|a,b| ~loader.index[a][\bounds][0]< ~loader.index[b][\bounds][0]}.do{|k|
|
||||||
|
v = ~loader.index[k];
|
||||||
|
a.write(k.asString ++ "\n");
|
||||||
|
v.pairsDo{|l,u,j|
|
||||||
|
a.write("\t\t\t" ++ (l->u).asString ++ "\n");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
a.close;
|
||||||
|
)
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////
|
||||||
|
// slicing process
|
||||||
|
|
||||||
|
// just run the slicer
|
||||||
|
(
|
||||||
|
t = Main.elapsedTime;
|
||||||
|
~slicer.play(s,~loader.buffer,~loader.index,action:{(Main.elapsedTime - t).postln;"Slicing done".postln});
|
||||||
|
)
|
||||||
|
|
||||||
|
//slice count
|
||||||
|
~slicer.index.keys.size
|
||||||
|
|
||||||
|
// iterate
|
||||||
|
(
|
||||||
|
~slicer.index.pairsDo{ |k,v,i|
|
||||||
|
k.postln;
|
||||||
|
v.pairsDo{|l,u,j|
|
||||||
|
"\t\t\t".post;
|
||||||
|
(l->u).postln;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
///// write to file in human readable format, in order.
|
||||||
|
(
|
||||||
|
a = File("/tmp/sc-spliting.json","w");
|
||||||
|
~stuffsorted = Array.newFrom(~slicer.index.keys).sort{|a,b| ~slicer.index[a][\bounds][0]< ~slicer.index[b][\bounds][0]}.do{|k|
|
||||||
|
v = ~slicer.index[k];
|
||||||
|
a.write(k.asString ++ "\n");
|
||||||
|
v.pairsDo{|l,u,j|
|
||||||
|
a.write("\t\t\t" ++ (l->u).asString ++ "\n");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
a.close;
|
||||||
|
)
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////
|
||||||
|
// description process
|
||||||
|
|
||||||
|
// just run the descriptor extractor
|
||||||
|
(
|
||||||
|
t = Main.elapsedTime;
|
||||||
|
~extractor.play(s,~loader.buffer,~slicer.index,action:{(Main.elapsedTime - t).postln;"Features done".postln});
|
||||||
|
)
|
||||||
|
|
||||||
|
// write the dataset to file with the native JSON
|
||||||
|
~ds.write("/tmp/sc-dataset.json")
|
||||||
|
|
||||||
|
// open the file in your default json editor
|
||||||
|
"open /tmp/sc-dataset.json".unixCmd
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////
|
||||||
|
// manipulating and querying the data
|
||||||
|
|
||||||
|
//building a tree
|
||||||
|
~tree = FluidKDTree(s);
|
||||||
|
~tree.fit(~ds,{"Fitted".postln;});
|
||||||
|
|
||||||
|
//retrieve a sound to match
|
||||||
|
~targetsound = Buffer(s);
|
||||||
|
~targetname = ~slicer.index.keys.asArray.scramble[0].asSymbol;
|
||||||
|
#a,b = ~slicer.index[~targetname][\bounds];
|
||||||
|
FluidBufCompose.process(s,~loader.buffer,a,(b-a),numChans: 1, destination: ~targetsound,action: {~targetsound.play;})
|
||||||
|
|
||||||
|
//describe the sound to match
|
||||||
|
(
|
||||||
|
{
|
||||||
|
var mfcc, stats, flatten;
|
||||||
|
mfcc = FluidBufMFCC.kr(~targetsound,features:~mfccbuf[0],trig:1);
|
||||||
|
stats = FluidBufStats.kr(~mfccbuf[0],stats:~statsbuf[0],trig:Done.kr(mfcc));
|
||||||
|
flatten = FluidBufFlatten.kr(~statsbuf[0],destination:~flatbuf[0],trig:Done.kr(stats));
|
||||||
|
FreeSelfWhenDone.kr(flatten);
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
//find its nearest neighbours
|
||||||
|
~friends = Array;
|
||||||
|
~tree.numNeighbours = 5;
|
||||||
|
~tree.kNearest(~flatbuf[0],{|x| ~friends = x.postln;})
|
||||||
|
|
||||||
|
// play them in a row
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
5.do{|i|
|
||||||
|
var dur;
|
||||||
|
v = ~slicer.index[~friends[i].asSymbol];
|
||||||
|
dur = (v[\bounds][1] - v[\bounds][0]) / s.sampleRate;
|
||||||
|
{BufRd.ar(v[\numchans],~loader.buffer,Line.ar(v[\bounds][0],v[\bounds][1],dur, doneAction: 2))}.play;
|
||||||
|
~friends[i].postln;
|
||||||
|
dur.wait;
|
||||||
|
};
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
@ -0,0 +1,235 @@
|
|||||||
|
// define a few processes
|
||||||
|
(
|
||||||
|
~ds = FluidDataSet(s);
|
||||||
|
~dsW = FluidDataSet(s);
|
||||||
|
~dsL = FluidDataSet(s);
|
||||||
|
//define as many buffers as we have parallel voices/threads in the extractor processing (default is 4)
|
||||||
|
~loudbuf = 4.collect{Buffer.new};
|
||||||
|
~weightbuf = 4.collect{Buffer.new};
|
||||||
|
~mfccbuf = 4.collect{Buffer.new};
|
||||||
|
~statsbuf = 4.collect{Buffer.new};
|
||||||
|
~flatbuf = 4.collect{Buffer.new};
|
||||||
|
|
||||||
|
// here we instantiate a loader as per example 0
|
||||||
|
~loader = FluidLoadFolder(File.realpath(FluidBufPitch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/");
|
||||||
|
|
||||||
|
// here we instantiate a further slicing step as per example 0
|
||||||
|
~slicer = FluidSliceCorpus({ |src,start,num,dest|
|
||||||
|
FluidBufOnsetSlice.kr(src,start,num,metric: 9, minSliceLength: 17, indices:dest, threshold:0.2,blocking: 1)
|
||||||
|
});
|
||||||
|
|
||||||
|
// here we instantiate a process of description and dataset writing, as per example 0
|
||||||
|
~extractor = FluidProcessSlices({|src,start,num,data|
|
||||||
|
var label, voice, mfcc, stats, flatten;
|
||||||
|
label = data.key;
|
||||||
|
voice = data.value[\voice];
|
||||||
|
mfcc = FluidBufMFCC.kr(src, startFrame:start, numFrames:num, numChans:1, features:~mfccbuf[voice], padding: 2, trig:1, blocking: 1);
|
||||||
|
stats = FluidBufStats.kr(~mfccbuf[voice], stats:~statsbuf[voice], numDerivs: 1, trig:Done.kr(mfcc), blocking: 1);
|
||||||
|
flatten = FluidBufFlatten.kr(~statsbuf[voice], destination:~flatbuf[voice], trig:Done.kr(stats), blocking: 1);
|
||||||
|
FluidDataSetWr.kr(~ds, label, nil, ~flatbuf[voice], Done.kr(flatten), blocking: 1);
|
||||||
|
});
|
||||||
|
|
||||||
|
// here we make another processor, this time with doing an amplitude weighing
|
||||||
|
~extractorW = FluidProcessSlices({|src,start,num,data|
|
||||||
|
var label, voice, loud, weights, mfcc, stats, flatten;
|
||||||
|
label = data.key;
|
||||||
|
voice = data.value[\voice];
|
||||||
|
mfcc = FluidBufMFCC.kr(src, startFrame:start, numFrames:num, numChans:1, features:~mfccbuf[voice], padding: 2, trig:1, blocking: 1);
|
||||||
|
loud = FluidBufLoudness.kr(src, startFrame:start, numFrames:num, numChans:1, features:~loudbuf[voice], padding: 2, trig:Done.kr(mfcc), blocking: 1);
|
||||||
|
weights = FluidBufScale.kr(~loudbuf[voice], numChans: 1, destination: ~weightbuf[voice], inputLow: -70, inputHigh: 0, trig: Done.kr(loud), blocking: 1);
|
||||||
|
stats = FluidBufStats.kr(~mfccbuf[voice], stats:~statsbuf[voice], numDerivs: 1, weights: ~weightbuf[voice], trig:Done.kr(weights), blocking: 1);
|
||||||
|
flatten = FluidBufFlatten.kr(~statsbuf[voice], destination:~flatbuf[voice], trig:Done.kr(stats), blocking: 1);
|
||||||
|
FluidDataSetWr.kr(~dsW, label, nil, ~flatbuf[voice], Done.kr(flatten), blocking: 1);
|
||||||
|
});
|
||||||
|
|
||||||
|
// and here we make a little processor for loudness if we want to poke at it
|
||||||
|
~extractorL = FluidProcessSlices({|src,start,num,data|
|
||||||
|
var label, voice, loud, stats, flatten;
|
||||||
|
label = data.key;
|
||||||
|
voice = data.value[\voice];
|
||||||
|
loud = FluidBufLoudness.kr(src, startFrame:start, numFrames:num, numChans:1, features:~mfccbuf[voice], trig:1, padding: 2, blocking: 1);
|
||||||
|
stats = FluidBufStats.kr(~mfccbuf[voice], stats:~statsbuf[voice], numDerivs: 1, trig:Done.kr(loud), blocking: 1);
|
||||||
|
flatten = FluidBufFlatten.kr(~statsbuf[voice], destination:~flatbuf[voice], trig:Done.kr(stats), blocking: 1);
|
||||||
|
FluidDataSetWr.kr(~dsL, label, nil, ~flatbuf[voice], Done.kr(flatten), blocking: 1);
|
||||||
|
});
|
||||||
|
)
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////
|
||||||
|
//loading process
|
||||||
|
|
||||||
|
//load and play to test if it is that quick - it is!
|
||||||
|
(
|
||||||
|
t = Main.elapsedTime;
|
||||||
|
~loader.play(s,action:{(Main.elapsedTime - t).postln;"Loaded".postln;{var start, stop; PlayBuf.ar(~loader.index[~loader.index.keys.asArray.last.asSymbol][\numchans],~loader.buffer,startPos: ~loader.index[~loader.index.keys.asArray.last.asSymbol][\bounds][0])}.play;});
|
||||||
|
)
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////
|
||||||
|
// slicing process
|
||||||
|
|
||||||
|
// run the slicer
|
||||||
|
(
|
||||||
|
t = Main.elapsedTime;
|
||||||
|
~slicer.play(s,~loader.buffer,~loader.index,action:{(Main.elapsedTime - t).postln;"Slicing done".postln});
|
||||||
|
)
|
||||||
|
|
||||||
|
//slice count
|
||||||
|
~slicer.index.keys.size
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////
|
||||||
|
// description process
|
||||||
|
|
||||||
|
// run both descriptor extractor - here they are separate to the batch process duration
|
||||||
|
(
|
||||||
|
t = Main.elapsedTime;
|
||||||
|
~extractor.play(s,~loader.buffer,~slicer.index,action:{(Main.elapsedTime - t).postln;"Features done".postln});
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
t = Main.elapsedTime;
|
||||||
|
~extractorW.play(s,~loader.buffer,~slicer.index,action:{(Main.elapsedTime - t).postln;"Features done".postln});
|
||||||
|
)
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////
|
||||||
|
// manipulating and querying the data
|
||||||
|
|
||||||
|
// extracting whatever stats we want. In this case, mean/std/lowest/highest, and the same on the first derivative - excluding MFCC0 as it is mostly volume, keeping MFCC1-12
|
||||||
|
|
||||||
|
(
|
||||||
|
~curated = FluidDataSet(s);
|
||||||
|
~curatedW = FluidDataSet(s);
|
||||||
|
~curator = FluidDataSetQuery.new(s);
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
~curator.addRange(1,12,{
|
||||||
|
~curator.addRange(14,12,{
|
||||||
|
~curator.addRange(53,12,{
|
||||||
|
~curator.addRange(79,12,{
|
||||||
|
~curator.addRange(92,12,{
|
||||||
|
~curator.addRange(105,12,{
|
||||||
|
~curator.addRange(144,12,{
|
||||||
|
~curator.addRange(170,12);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
)
|
||||||
|
~curator.transform(~ds,~curated)
|
||||||
|
~curator.transform(~dsW,~curatedW)
|
||||||
|
|
||||||
|
//check the dimension count
|
||||||
|
~ds.print
|
||||||
|
~dsW.print
|
||||||
|
~curated.print
|
||||||
|
~curatedW.print
|
||||||
|
|
||||||
|
//building a tree for each dataset
|
||||||
|
~tree = FluidKDTree(s,5);
|
||||||
|
~tree.fit(~ds,{"Fitted".postln;});
|
||||||
|
~treeW = FluidKDTree(s,5);
|
||||||
|
~treeW.fit(~dsW,{"Fitted".postln;});
|
||||||
|
~treeC = FluidKDTree(s,5);
|
||||||
|
~treeC.fit(~curated,{"Fitted".postln;});
|
||||||
|
~treeCW = FluidKDTree(s,5);
|
||||||
|
~treeCW.fit(~curatedW,{"Fitted".postln;});
|
||||||
|
|
||||||
|
//select a sound to match
|
||||||
|
// EITHER retrieve a random slice
|
||||||
|
~targetsound = Buffer(s);
|
||||||
|
~targetname = ~slicer.index.keys.asArray.scramble.last.asSymbol;
|
||||||
|
#a,b = ~slicer.index[~targetname][\bounds];
|
||||||
|
FluidBufCompose.process(s,~loader.buffer,a,(b-a),numChans: 1, destination: ~targetsound,action: {~targetsound.play;})
|
||||||
|
|
||||||
|
// OR just load a file in that buffer
|
||||||
|
~targetsound = Buffer.read(s,Platform.resourceDir +/+ "sounds/a11wlk01.wav");
|
||||||
|
|
||||||
|
//describe the sound to match
|
||||||
|
(
|
||||||
|
{
|
||||||
|
var loud, weights, mfcc, stats, flatten, stats2, written;
|
||||||
|
mfcc = FluidBufMFCC.kr(~targetsound,features:~mfccbuf[0],padding: 2, trig:1);
|
||||||
|
stats = FluidBufStats.kr(~mfccbuf[0],stats:~statsbuf[0], numDerivs: 1,trig:Done.kr(mfcc));
|
||||||
|
flatten = FluidBufFlatten.kr(~statsbuf[0],destination:~flatbuf[0],trig:Done.kr(stats));
|
||||||
|
loud = FluidBufLoudness.kr(~targetsound,features:~loudbuf[0],padding: 2,trig:Done.kr(flatten),blocking: 1);
|
||||||
|
weights = FluidBufScale.kr(~loudbuf[0],numChans: 1,destination: ~weightbuf[0],inputLow: -70,inputHigh: 0,trig: Done.kr(loud),blocking: 1);
|
||||||
|
stats2 = FluidBufStats.kr(~mfccbuf[0],stats:~statsbuf[0], numDerivs: 1, weights: ~weightbuf[0], trig:Done.kr(weights),blocking: 1);
|
||||||
|
written = FluidBufFlatten.kr(~statsbuf[0],destination:~flatbuf[1],trig:Done.kr(stats2));
|
||||||
|
FreeSelf.kr(Done.kr(written));
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
//go language side to extract the right dimensions
|
||||||
|
~flatbuf[0].getn(0,182,{|x|~curatedBuf = Buffer.loadCollection(s, x[[0,1,4,6,7,8,11,13].collect{|x|var y=x*13+1;(y..(y+11))}.flat].postln)})
|
||||||
|
~flatbuf[1].getn(0,182,{|x|~curatedWBuf = Buffer.loadCollection(s, x[[0,1,4,6,7,8,11,13].collect{|x|var y=x*13+1;(y..(y+11))}.flat].postln)})
|
||||||
|
|
||||||
|
//find its nearest neighbours
|
||||||
|
~tree.kNearest(~flatbuf[0],{|x| ~friends = x.postln;})
|
||||||
|
~treeW.kNearest(~flatbuf[1],{|x| ~friendsW = x.postln;})
|
||||||
|
~treeC.kNearest(~curatedBuf,{|x| ~friendsC = x.postln;})
|
||||||
|
~treeCW.kNearest(~curatedWBuf,{|x| ~friendsCW = x.postln;})
|
||||||
|
|
||||||
|
|
||||||
|
// play them in a row
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
5.do{|i|
|
||||||
|
var dur;
|
||||||
|
v = ~slicer.index[~friends[i].asSymbol];
|
||||||
|
dur = (v[\bounds][1] - v[\bounds][0]) / s.sampleRate;
|
||||||
|
{BufRd.ar(v[\numchans],~loader.buffer,Line.ar(v[\bounds][0],v[\bounds][1],dur, doneAction: 2))}.play;
|
||||||
|
~friends[i].postln;
|
||||||
|
dur.wait;
|
||||||
|
};
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
5.do{|i|
|
||||||
|
var dur;
|
||||||
|
v = ~slicer.index[~friendsW[i].asSymbol];
|
||||||
|
dur = (v[\bounds][1] - v[\bounds][0]) / s.sampleRate;
|
||||||
|
{BufRd.ar(v[\numchans],~loader.buffer,Line.ar(v[\bounds][0],v[\bounds][1],dur, doneAction: 2))}.play;
|
||||||
|
~friendsW[i].postln;
|
||||||
|
dur.wait;
|
||||||
|
};
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
5.do{|i|
|
||||||
|
var dur;
|
||||||
|
v = ~slicer.index[~friendsC[i].asSymbol];
|
||||||
|
dur = (v[\bounds][1] - v[\bounds][0]) / s.sampleRate;
|
||||||
|
{BufRd.ar(v[\numchans],~loader.buffer,Line.ar(v[\bounds][0],v[\bounds][1],dur, doneAction: 2))}.play;
|
||||||
|
~friendsC[i].postln;
|
||||||
|
dur.wait;
|
||||||
|
};
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
5.do{|i|
|
||||||
|
var dur;
|
||||||
|
v = ~slicer.index[~friendsCW[i].asSymbol];
|
||||||
|
dur = (v[\bounds][1] - v[\bounds][0]) / s.sampleRate;
|
||||||
|
{BufRd.ar(v[\numchans],~loader.buffer,Line.ar(v[\bounds][0],v[\bounds][1],dur, doneAction: 2))}.play;
|
||||||
|
~friendsCW[i].postln;
|
||||||
|
dur.wait;
|
||||||
|
};
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
//explore dynamic range (changing the weigting's value of 0 in lines 44 and 168 will change the various weights given to quieter parts of the signal
|
||||||
|
(
|
||||||
|
t = Main.elapsedTime;
|
||||||
|
~extractorL.play(s,~loader.buffer,~slicer.index,action:{(Main.elapsedTime - t).postln;"Features done".postln});
|
||||||
|
)
|
||||||
|
~norm = FluidNormalize.new(s)
|
||||||
|
~norm.fit(~dsL)
|
||||||
|
~norm.dump({|x|x["data_min"][[8,12]].postln;x["data_max"][[8,12]].postln;})//here we extract the stats from the dataset by retrieving the stored maxima of the fitting process in FluidNormalize
|
||||||
@ -0,0 +1,73 @@
|
|||||||
|
//load a part of a sound that has 3 clear components: a clear pitch component to start, a noisy pitchless ending and DC offset silence on both ends
|
||||||
|
(
|
||||||
|
b = Buffer.read(s,File.realpath(FluidBufPitch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Tremblay-ASWINE-ScratchySynth-M.wav",42250,44100);
|
||||||
|
~pitches = Buffer(s);
|
||||||
|
~stats = Buffer(s);
|
||||||
|
~loud = Buffer(s);
|
||||||
|
~scaled = Buffer(s);
|
||||||
|
~brutePitchStats = Array;
|
||||||
|
~loudnessWeighedPitchStats = Array;
|
||||||
|
~confidenceWeighedPitchStats = Array;
|
||||||
|
~condidenceWeightedPitchIQRStats = Array;
|
||||||
|
~pitchIQRStats = Array;
|
||||||
|
)
|
||||||
|
|
||||||
|
b.play
|
||||||
|
b.plot
|
||||||
|
|
||||||
|
FluidBufPitch.process(s,b,features: ~pitches)
|
||||||
|
FluidBufStats.process(s,~pitches,stats:~stats)
|
||||||
|
~stats.getn(0,14,{|x|~brutePitchStats = x; x.reshape(7,2).do{|y| "%\t\t\t%\n".postf(y[0].round(0.1),y[1].round(0.01))}})
|
||||||
|
|
||||||
|
//observe the data - there are something clearly "wrong" in there - mostly, should we consider the stats on pitch when the confidence is low?
|
||||||
|
~pitches.plot(separately: true)
|
||||||
|
|
||||||
|
//let's check the loudness
|
||||||
|
FluidBufLoudness.process(s,b,features: ~loud)
|
||||||
|
FluidBufStats.process(s,~loud,stats:~stats)
|
||||||
|
~stats.getn(0,14,{|x|x.reshape(7,2).do{|y| "%\t\t\t%\n".postf(y[0].round(0.1),y[1].round(0.01))}})
|
||||||
|
|
||||||
|
~loud.plot(separately: true)
|
||||||
|
|
||||||
|
//it seems the loudness of the noisy section will not help us reject the silence. But let's try
|
||||||
|
FluidBufScale.process(s,~loud,numChans: 1,destination: ~scaled,inputLow: -60,inputHigh: -20)
|
||||||
|
FluidBufStats.process(s,~pitches, stats:~stats,weights: ~scaled)
|
||||||
|
~stats.getn(0,14,{|x|~loudnessWeighedPitchStats = x; x.reshape(7,2).do{|y| "%\t\t\t%\n".postf(y[0].round(0.1),y[1].round(0.01))}})
|
||||||
|
//not much difference but let's listen
|
||||||
|
|
||||||
|
//average pitch
|
||||||
|
c = {SinOsc.ar(~brutePitchStats[0],mul: 0.05)}.play
|
||||||
|
//compare with the source
|
||||||
|
b.play
|
||||||
|
c.free
|
||||||
|
//loudness-weighted average
|
||||||
|
c = {SinOsc.ar(~loudnessWeighedPitchStats[0],mul: 0.05)}.play
|
||||||
|
//compare with the source
|
||||||
|
b.play
|
||||||
|
c.free
|
||||||
|
//hmmm, worse! That is because we did remove the low amplitude skewing to wards the default value (high) which was balancing our noisy peak with low pitch and low pitch confidence...
|
||||||
|
|
||||||
|
//let's instead weight against the pitch confidence, first applying a threshold to so we pull down any middle value we want to ignore
|
||||||
|
FluidBufThresh.process(s, ~pitches, startChan: 1, numChans: 1, destination: ~scaled, threshold: 0.8)
|
||||||
|
FluidBufStats.process(s,~pitches, stats:~stats,weights: ~scaled)
|
||||||
|
~stats.getn(0,14,{|x|~confidenceWeighedPitchStats = x;x.reshape(7,2).do{|y| "%\t\t\t%\n".postf(y[0].round(0.1),y[1].round(0.01))}})
|
||||||
|
|
||||||
|
//let's listen
|
||||||
|
c = {SinOsc.ar(~confidenceWeighedPitchStats[0],mul: 0.05)}.play
|
||||||
|
//compare with the source
|
||||||
|
b.play
|
||||||
|
c.free
|
||||||
|
// much better! it is even better when we move the threshold above but 0.8 confidence is quite high... If we look at our stats we see that there are still minima in the low hundreds, and maxima in the very top...These must be statistically far enough and few enough just to mess a bit our stats, so let's use the inter-quantile range to first remove them then compute the stats.
|
||||||
|
FluidBufStats.process(s,~pitches, stats:~stats,weights: ~scaled,outliersCutoff: 1.5)
|
||||||
|
~stats.getn(0,14,{|x|~confidenceWeightedPitchIQRStats = x;x.reshape(7,2).do{|y| "%\t\t\t%\n".postf(y[0].round(0.1),y[1].round(0.01))}})
|
||||||
|
//now that is impressive!
|
||||||
|
c = {SinOsc.ar(~confidenceWeightedPitchIQRStats[0],mul: 0.05)}.play
|
||||||
|
b.play
|
||||||
|
c.free
|
||||||
|
|
||||||
|
//for completion, here is just with rejection of outliers - not as good, but a decent second best!
|
||||||
|
FluidBufStats.process(s,~pitches, stats:~stats,outliersCutoff: 1.5)
|
||||||
|
~stats.getn(0,14,{|x|~pitchIQRStats = x;x.reshape(7,2).do{|y| "%\t\t\t%\n".postf(y[0].round(0.1),y[1].round(0.01))}})
|
||||||
|
c = {SinOsc.ar(~pitchIQRStats[0],mul: 0.05)}.play
|
||||||
|
b.play
|
||||||
|
c.free
|
||||||
@ -0,0 +1,353 @@
|
|||||||
|
// here we will define a process that creates and populates a series of parallel dataset, one of each 'feature-space' that we can then eventually manipulate more easily than individual dimensions.
|
||||||
|
|
||||||
|
// define a few datasets
|
||||||
|
(
|
||||||
|
~pitchDS = FluidDataSet(s);
|
||||||
|
~loudDS = FluidDataSet(s);
|
||||||
|
~mfccDS = FluidDataSet(s);
|
||||||
|
~durDS = FluidDataSet(s);
|
||||||
|
|
||||||
|
//define as many buffers as we have parallel voices/threads in the extractor processing (default is 4)
|
||||||
|
~pitchbuf = 4.collect{Buffer.new};
|
||||||
|
~statsPitchbuf = 4.collect{Buffer.new};
|
||||||
|
~weightPitchbuf = 4.collect{Buffer.new};
|
||||||
|
~flatPitchbuf = 4.collect{Buffer.new};
|
||||||
|
~loudbuf = 4.collect{Buffer.new};
|
||||||
|
~statsLoudbuf = 4.collect{Buffer.new};
|
||||||
|
~flatLoudbuf = 4.collect{Buffer.new};
|
||||||
|
~weightMFCCbuf = 4.collect{Buffer.new};
|
||||||
|
~mfccbuf = 4.collect{Buffer.new};
|
||||||
|
~statsMFCCbuf = 4.collect{Buffer.new};
|
||||||
|
~flatMFCCbuf = 4.collect{Buffer.new};
|
||||||
|
|
||||||
|
// here we instantiate a loader as per example 0
|
||||||
|
~loader = FluidLoadFolder(File.realpath(FluidBufPitch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/");
|
||||||
|
|
||||||
|
// here we instantiate a further slicing step as per example 0
|
||||||
|
~slicer = FluidSliceCorpus({ |src,start,num,dest|
|
||||||
|
FluidBufOnsetSlice.kr(src ,start, num, indices:dest, metric: 9, threshold:0.2, minSliceLength: 17, blocking: 1)
|
||||||
|
});
|
||||||
|
|
||||||
|
// here we make the full processor building our 3 source datasets
|
||||||
|
~extractor = FluidProcessSlices({|src,start,num,data|
|
||||||
|
var label, voice, pitch, pitchweights, pitchstats, pitchflat, loud, statsLoud, flattenLoud, mfcc, mfccweights, mfccstats, mfccflat, writePitch, writeLoud;
|
||||||
|
label = data.key;
|
||||||
|
voice = data.value[\voice];
|
||||||
|
// the pitch computation is independant so it starts right away
|
||||||
|
pitch = FluidBufPitch.kr(src, startFrame:start, numFrames:num, numChans:1, features:~pitchbuf[voice], unit: 1, trig:1, blocking: 1);
|
||||||
|
pitchweights = FluidBufThresh.kr(~pitchbuf[voice], numChans: 1, startChan: 1, destination: ~weightPitchbuf[voice], threshold: 0.7, trig:Done.kr(pitch), blocking: 1);//pull down low conf
|
||||||
|
pitchstats = FluidBufStats.kr(~pitchbuf[voice], stats:~statsPitchbuf[voice], numDerivs: 1, weights: ~weightPitchbuf[voice], outliersCutoff: 1.5, trig:Done.kr(pitchweights), blocking: 1);
|
||||||
|
pitchflat = FluidBufFlatten.kr(~statsPitchbuf[voice],destination:~flatPitchbuf[voice],trig:Done.kr(pitchstats),blocking: 1);
|
||||||
|
writePitch = FluidDataSetWr.kr(~pitchDS,label, nil, ~flatPitchbuf[voice], Done.kr(pitchflat),blocking: 1);
|
||||||
|
// the mfcc need loudness to weigh, so let's start with that
|
||||||
|
loud = FluidBufLoudness.kr(src,startFrame:start, numFrames:num, numChans:1, features:~loudbuf[voice], trig:Done.kr(writePitch), blocking: 1);//here trig was 1
|
||||||
|
//we can now flatten and write Loudness in its own trigger tree
|
||||||
|
statsLoud = FluidBufStats.kr(~loudbuf[voice], stats:~statsLoudbuf[voice], numDerivs: 1, trig:Done.kr(loud), blocking: 1);
|
||||||
|
flattenLoud = FluidBufFlatten.kr(~statsLoudbuf[voice],destination:~flatLoudbuf[voice],trig:Done.kr(statsLoud),blocking: 1);
|
||||||
|
writeLoud = FluidDataSetWr.kr(~loudDS,label, nil, ~flatLoudbuf[voice], Done.kr(flattenLoud),blocking: 1);
|
||||||
|
//we can resume from the loud computation trigger
|
||||||
|
mfcc = FluidBufMFCC.kr(src,startFrame:start,numFrames:num,numChans:1,features:~mfccbuf[voice],trig:Done.kr(writeLoud),blocking: 1);//here trig was loud
|
||||||
|
mfccweights = FluidBufScale.kr(~loudbuf[voice],numChans: 1,destination: ~weightMFCCbuf[voice],inputLow: -70,inputHigh: 0, trig: Done.kr(mfcc), blocking: 1);
|
||||||
|
mfccstats = FluidBufStats.kr(~mfccbuf[voice], stats:~statsMFCCbuf[voice], startChan: 1, numDerivs: 1, weights: ~weightMFCCbuf[voice], trig:Done.kr(mfccweights), blocking: 1);//remove mfcc0 and weigh by loudness instead
|
||||||
|
mfccflat = FluidBufFlatten.kr(~statsMFCCbuf[voice],destination:~flatMFCCbuf[voice],trig:Done.kr(mfccstats),blocking: 1);
|
||||||
|
FluidDataSetWr.kr(~mfccDS,label, nil, ~flatMFCCbuf[voice], Done.kr(mfccflat),blocking: 1);
|
||||||
|
});
|
||||||
|
|
||||||
|
)
|
||||||
|
//////////////////////////////////////////////////////////////////////////
|
||||||
|
//loading process
|
||||||
|
|
||||||
|
//load and play to test if it is that quick - it is!
|
||||||
|
(
|
||||||
|
t = Main.elapsedTime;
|
||||||
|
~loader.play(s,action:{(Main.elapsedTime - t).postln;"Loaded".postln;{var start, stop; PlayBuf.ar(~loader.index[~loader.index.keys.asArray.last.asSymbol][\numchans],~loader.buffer,startPos: ~loader.index[~loader.index.keys.asArray.last.asSymbol][\bounds][0])}.play;});
|
||||||
|
)
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////
|
||||||
|
// slicing process
|
||||||
|
|
||||||
|
// run the slicer
|
||||||
|
(
|
||||||
|
t = Main.elapsedTime;
|
||||||
|
~slicer.play(s,~loader.buffer,~loader.index,action:{(Main.elapsedTime - t).postln;"Slicing done".postln});
|
||||||
|
)
|
||||||
|
//slice count
|
||||||
|
~slicer.index.keys.size
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////
|
||||||
|
// description process
|
||||||
|
|
||||||
|
// run the descriptor extractor (errors will be given, this is normal: the pitch conditions are quite exacting and therefore many slices are not valid)
|
||||||
|
(
|
||||||
|
t = Main.elapsedTime;
|
||||||
|
~extractor.play(s,~loader.buffer,~slicer.index,action:{(Main.elapsedTime - t).postln;"Features done".postln});
|
||||||
|
)
|
||||||
|
|
||||||
|
// make a dataset of durations for querying that too (it could have been made in the process loop, but hey, we have dictionaries we can manipulate too!)
|
||||||
|
(
|
||||||
|
~dict = Dictionary.new;
|
||||||
|
~temp = ~slicer.index.collect{ |k| [k[\bounds][1] - k[\bounds][0]]};
|
||||||
|
~dict.add(\data -> ~temp);
|
||||||
|
~dict.add(\cols -> 1);
|
||||||
|
~durDS.load(~dict)
|
||||||
|
)
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////////////////////////////
|
||||||
|
// manipulating and querying the data
|
||||||
|
|
||||||
|
~pitchDS.print;
|
||||||
|
~loudDS.print;
|
||||||
|
~mfccDS.print;
|
||||||
|
~durDS.print;
|
||||||
|
|
||||||
|
///////////////////////////////////////////////////////
|
||||||
|
//reduce the MFCC timbral space stats (many potential ways to explore here... - 2 are provided to compare, with and without the derivatives before running a dimension reduction)
|
||||||
|
~tempDS = FluidDataSet(s);
|
||||||
|
|
||||||
|
~query = FluidDataSetQuery(s);
|
||||||
|
~query.addRange(0,24);//add only means and stddev of the 12 coeffs...
|
||||||
|
~query.addRange((7*12),24);// and the same stats of the first derivative (moving 7 stats x 12 mfccs to the right)
|
||||||
|
~query.transform(~mfccDS, ~tempDS);
|
||||||
|
|
||||||
|
//check that you end up with the expected 48 dimensions
|
||||||
|
~tempDS.print;
|
||||||
|
|
||||||
|
// standardizing before the PCA, as argued here:
|
||||||
|
// https://scikit-learn.org/stable/auto_examples/preprocessing/plot_scaling_importance.html
|
||||||
|
~stan = FluidStandardize(s);
|
||||||
|
~stanDS = FluidDataSet(s);
|
||||||
|
~stan.fitTransform(~tempDS,~stanDS)
|
||||||
|
|
||||||
|
//shrinking A: using 2 stats on the values, and 2 stats on the redivative (12 x 2 x 2 = 48 dim)
|
||||||
|
~pca = FluidPCA(s,4);//shrink to 4 dimensions
|
||||||
|
~timbreDSd = FluidDataSet(s);
|
||||||
|
~pca.fitTransform(~stanDS,~timbreDSd,{|x|x.postln;})//accuracy
|
||||||
|
|
||||||
|
//shrinking B: using only the 2 stats on the values
|
||||||
|
~query.clear;
|
||||||
|
~query.addRange(0,24);//add only means and stddev of the 12 coeffs...
|
||||||
|
~query.transform(~stanDS, ~tempDS);//retrieve the values from the already standardized dataset
|
||||||
|
|
||||||
|
//check you have the expected 24 dimensions
|
||||||
|
~tempDS.print;
|
||||||
|
|
||||||
|
//keep its own PCA so we can keep the various states for later transforms
|
||||||
|
~pca2 = FluidPCA(s,4);//shrink to 4 dimensions
|
||||||
|
~timbreDS = FluidDataSet(s);
|
||||||
|
~pca2.fitTransform(~tempDS,~timbreDS,{|x|x.postln;})//accuracy
|
||||||
|
|
||||||
|
// comparing NN for fun
|
||||||
|
~targetDSd = Buffer(s)
|
||||||
|
~targetDS = Buffer(s)
|
||||||
|
~tree = FluidKDTree(s,5)
|
||||||
|
|
||||||
|
// you can run this a few times to have fun
|
||||||
|
(
|
||||||
|
~target = ~slicer.index.keys.asArray.scramble.[0].asSymbol;
|
||||||
|
~timbreDSd.getPoint(~target, ~targetDSd);
|
||||||
|
~timbreDS.getPoint(~target, ~targetDS);
|
||||||
|
)
|
||||||
|
|
||||||
|
~tree.fit(~timbreDSd,{~tree.kNearest(~targetDSd,{|x|~nearestDSd = x.postln;})})
|
||||||
|
~tree.fit(~timbreDS,{~tree.kNearest(~targetDS,{|x|~nearestDS = x.postln;})})
|
||||||
|
|
||||||
|
// play them in a row
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
5.do{|i|
|
||||||
|
var dur;
|
||||||
|
v = ~slicer.index[~nearestDSd[i].asSymbol];
|
||||||
|
dur = (v[\bounds][1] - v[\bounds][0]) / s.sampleRate;
|
||||||
|
{BufRd.ar(v[\numchans],~loader.buffer,Line.ar(v[\bounds][0],v[\bounds][1],dur, doneAction: 2))}.play;
|
||||||
|
~nearestDSd[i].postln;
|
||||||
|
dur.wait;
|
||||||
|
};
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
5.do{|i|
|
||||||
|
var dur;
|
||||||
|
v = ~slicer.index[~nearestDS[i].asSymbol];
|
||||||
|
dur = (v[\bounds][1] - v[\bounds][0]) / s.sampleRate;
|
||||||
|
{BufRd.ar(v[\numchans],~loader.buffer,Line.ar(v[\bounds][0],v[\bounds][1],dur, doneAction: 2))}.play;
|
||||||
|
~nearestDS[i].postln;
|
||||||
|
dur.wait;
|
||||||
|
};
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
///////////////////////////////////////////////////////
|
||||||
|
// compositing queries - defining a target and analysing it
|
||||||
|
|
||||||
|
~globalDS = FluidDataSet(s);
|
||||||
|
|
||||||
|
// define a source
|
||||||
|
~targetsound = Buffer.read(s,File.realpath(FluidBufPitch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/Tremblay-ASWINE-ScratchySynth-M.wav",42250,44100);
|
||||||
|
~targetsound.play
|
||||||
|
|
||||||
|
// analyse it as above, using voice 0 in the arrays of buffer to store the info
|
||||||
|
(
|
||||||
|
{
|
||||||
|
var label, voice, pitch, pitchweights, pitchstats, pitchflat, loud, statsLoud, flattenLoud, mfcc, mfccweights, mfccstats, mfccflat, writePitch, writeLoud;
|
||||||
|
pitch = FluidBufPitch.kr(~targetsound, numChans:1, features:~pitchbuf[0], unit: 1, trig:1, blocking: 1);
|
||||||
|
pitchweights = FluidBufThresh.kr(~pitchbuf[0], numChans: 1, startChan: 1, destination: ~weightPitchbuf[0], threshold: 0.7, trig:Done.kr(pitch), blocking: 1);
|
||||||
|
pitchstats = FluidBufStats.kr(~pitchbuf[0], stats:~statsPitchbuf[0], numDerivs: 1, weights: ~weightPitchbuf[0], outliersCutoff: 1.5, trig:Done.kr(pitchweights), blocking: 1);
|
||||||
|
pitchflat = FluidBufFlatten.kr(~statsPitchbuf[0],destination:~flatPitchbuf[0],trig:Done.kr(pitchstats),blocking: 1);
|
||||||
|
loud = FluidBufLoudness.kr(~targetsound, numChans:1, features:~loudbuf[0], trig:Done.kr(pitchflat), blocking: 1);
|
||||||
|
statsLoud = FluidBufStats.kr(~loudbuf[0], stats:~statsLoudbuf[0], numDerivs: 1, trig:Done.kr(loud), blocking: 1);
|
||||||
|
flattenLoud = FluidBufFlatten.kr(~statsLoudbuf[0],destination:~flatLoudbuf[0],trig:Done.kr(statsLoud),blocking: 1);
|
||||||
|
mfcc = FluidBufMFCC.kr(~targetsound,numChans:1,features:~mfccbuf[0],trig:Done.kr(flattenLoud),blocking: 1);
|
||||||
|
mfccweights = FluidBufScale.kr(~loudbuf[0],numChans: 1,destination: ~weightMFCCbuf[0],inputLow: -70,inputHigh: 0, trig: Done.kr(mfcc), blocking: 1);
|
||||||
|
mfccstats = FluidBufStats.kr(~mfccbuf[0], stats:~statsMFCCbuf[0], startChan: 1, numDerivs: 1, weights: ~weightMFCCbuf[0], trig:Done.kr(mfccweights), blocking: 1);
|
||||||
|
mfccflat = FluidBufFlatten.kr(~statsMFCCbuf[0],destination:~flatMFCCbuf[0],trig:Done.kr(mfccstats),blocking: 1);
|
||||||
|
FreeSelf.kr(Done.kr(mfccflat));
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
// a first query - length and pitch
|
||||||
|
~query.clear
|
||||||
|
~query.filter(0,"<",44100+22050)//column0 a little smaller than our source
|
||||||
|
~query.and(0,">", 44100-22050)//also as far as a little larger than the source
|
||||||
|
~query.transformJoin(~durDS, ~pitchDS, ~tempDS); //this passes to ~tempDS only the points that have the same label than those in ~durDS that satisfy the condition. No column were added so nothing from ~durDS is copied
|
||||||
|
|
||||||
|
// print to see how many slices (rows) we have
|
||||||
|
~tempDS.print
|
||||||
|
|
||||||
|
// further conditions to assemble the query
|
||||||
|
~query.clear
|
||||||
|
~query.filter(11,">",0.7)//column11 (median of pitch confidence) larger than 0.7
|
||||||
|
~query.addRange(0,4) //copy only mean and stddev of pitch and confidence
|
||||||
|
~query.transform(~tempDS, ~globalDS); // pass it to the final search
|
||||||
|
|
||||||
|
// print to see that we have less items, with only their pitch
|
||||||
|
~globalDS.print
|
||||||
|
|
||||||
|
// compare knearest on both globalDS and tempDS
|
||||||
|
// assemble search buffer
|
||||||
|
~targetPitch = Buffer(s)
|
||||||
|
FluidBufCompose.process(s, ~flatPitchbuf[0],numFrames: 4,destination: ~targetPitch)
|
||||||
|
|
||||||
|
// feed the trees
|
||||||
|
~tree.fit(~pitchDS,{~tree.kNearest(~flatPitchbuf[0],{|x|~nearestA = x.postln;})}) //all the points with all the stats
|
||||||
|
~tree.fit(~globalDS,{~tree.kNearest(~targetPitch,{|x|~nearestB = x.postln;})}) //just the points with the right lenght conditions, with the curated stats
|
||||||
|
|
||||||
|
// play them in a row
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
5.do{|i|
|
||||||
|
var dur;
|
||||||
|
v = ~slicer.index[~nearestA[i].asSymbol];
|
||||||
|
dur = (v[\bounds][1] - v[\bounds][0]) / s.sampleRate;
|
||||||
|
{BufRd.ar(v[\numchans],~loader.buffer,Line.ar(v[\bounds][0],v[\bounds][1],dur, doneAction: 2))}.play;
|
||||||
|
~nearestA[i].postln;
|
||||||
|
dur.wait;
|
||||||
|
};
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
// with our duration limits, strange results appear eventually
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
5.do{|i|
|
||||||
|
var dur;
|
||||||
|
v = ~slicer.index[~nearestB[i].asSymbol];
|
||||||
|
dur = (v[\bounds][1] - v[\bounds][0]) / s.sampleRate;
|
||||||
|
{BufRd.ar(v[\numchans],~loader.buffer,Line.ar(v[\bounds][0],v[\bounds][1],dur, doneAction: 2))}.play;
|
||||||
|
~nearestB[i].postln;
|
||||||
|
dur.wait;
|
||||||
|
};
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
///////////////////////////////////////////////////////
|
||||||
|
// compositing queries to weigh - defining a target and analysing it
|
||||||
|
|
||||||
|
// make sure to define and describe the source above (lines 178 to 201)
|
||||||
|
|
||||||
|
// let's make normalised versions of the 3 datasets, keeping the normalisers separate to query later
|
||||||
|
~loudDSn = FluidDataSet(s);
|
||||||
|
~pitchDSn = FluidDataSet(s);
|
||||||
|
~timbreDSn = FluidDataSet(s);
|
||||||
|
|
||||||
|
~normL = FluidNormalize(s)
|
||||||
|
~normP = FluidNormalize(s)
|
||||||
|
~normT = FluidNormalize(s)
|
||||||
|
|
||||||
|
~normL.fitTransform(~loudDS, ~loudDSn);
|
||||||
|
~normP.fitTransform(~pitchDS, ~pitchDSn);
|
||||||
|
~normT.fitTransform(~timbreDSd, ~timbreDSn);
|
||||||
|
|
||||||
|
// let's assemble these datasets
|
||||||
|
~query.clear
|
||||||
|
~query.addRange(0,4)
|
||||||
|
~query.transformJoin(~pitchDSn,~timbreDSn, ~tempDS) //appends 4 dims of pitch to 4 dims of timbre
|
||||||
|
~query.transformJoin(~loudDSn, ~tempDS, ~globalDS) // appends 4 dims of loud to the 8 dims above
|
||||||
|
|
||||||
|
~globalDS.print//12 dim: 4 timbre, 4 pitch, 4 loud, all normalised between 0 and 1
|
||||||
|
~globalDS.write("/tmp/test12dims.json") // write to file to look at the values
|
||||||
|
|
||||||
|
// let's assemble the query
|
||||||
|
// first let's normalise our target descriptors
|
||||||
|
(
|
||||||
|
~targetPitch = Buffer(s);
|
||||||
|
~targetLoud = Buffer(s);
|
||||||
|
~targetMFCC = Buffer(s);
|
||||||
|
~targetMFCCs = Buffer(s);
|
||||||
|
~targetMFCCsp = Buffer(s);
|
||||||
|
~targetTimbre = Buffer(s);
|
||||||
|
~targetAll= Buffer(s);
|
||||||
|
)
|
||||||
|
|
||||||
|
~normL.transformPoint(~flatLoudbuf[0], ~targetLoud) //normalise the loudness (all dims)
|
||||||
|
~normP.transformPoint(~flatPitchbuf[0], ~targetPitch) //normalise the pitch (all dims)
|
||||||
|
FluidBufCompose.process(s,~flatMFCCbuf[0],numFrames: 24,destination: ~targetMFCC) // copy the process of dimension reduction above
|
||||||
|
FluidBufCompose.process(s,~flatMFCCbuf[0],startFrame: (7*12), numFrames: 24, destination: ~targetMFCC,destStartFrame: 24) //keeping 48 dims
|
||||||
|
~stan.transformPoint(~targetMFCC,~targetMFCCs) //standardize with the same coeffs
|
||||||
|
~pca.transformPoint(~targetMFCCs, ~targetMFCCsp) //then down to 4
|
||||||
|
~normT.transformPoint(~targetMFCCsp, ~targetTimbre) //then normalised
|
||||||
|
FluidBufCompose.process(s, ~targetTimbre,destination: ~targetAll) // assembling the single query
|
||||||
|
FluidBufCompose.process(s, ~targetPitch, numFrames: 4, destination: ~targetAll, destStartFrame: 4) // copying the 4 stats of pitch we care about
|
||||||
|
FluidBufCompose.process(s, ~targetLoud, numFrames: 4, destination: ~targetAll, destStartFrame: 8) // same for loudness
|
||||||
|
//check the sanity
|
||||||
|
~targetAll.query
|
||||||
|
|
||||||
|
// now let's see which is nearest that point
|
||||||
|
~tree.fit(~globalDS,{~tree.kNearest(~targetAll,{|x|~nearest = x.postln;})}) //just the points with the right lenght conditions, with the curated stats
|
||||||
|
|
||||||
|
// play them in a row
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
5.do{|i|
|
||||||
|
var dur;
|
||||||
|
v = ~slicer.index[~nearest[i].asSymbol];
|
||||||
|
dur = (v[\bounds][1] - v[\bounds][0]) / s.sampleRate;
|
||||||
|
{BufRd.ar(v[\numchans],~loader.buffer,Line.ar(v[\bounds][0],v[\bounds][1],dur, doneAction: 2))}.play;
|
||||||
|
~nearest[i].postln;
|
||||||
|
dur.wait;
|
||||||
|
};
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
// to change the relative weight of each dataset, let's change the normalisation range. Larger ranges will mean larger distance, and therefore less importance for that parameter.
|
||||||
|
// for instance to downplay pitch, let's make it larger by a factor of 10 around the center of 0.5
|
||||||
|
~normP.max = 5.5
|
||||||
|
~normP.min = -4.5
|
||||||
|
~normP.fitTransform(~pitchDS, ~pitchDSn);
|
||||||
|
// here we can re-run just the part that composites the pitch
|
||||||
|
~normP.transformPoint(~flatPitchbuf[0], ~targetPitch) //normalise the pitch (all dims)
|
||||||
|
FluidBufCompose.process(s, ~targetPitch, numFrames: 4, destination: ~targetAll, destStartFrame: 4) // copying the 4 stats of pitch we care about
|
||||||
|
|
||||||
|
//see that the middle 4 values are much larger in range
|
||||||
|
~targetAll.getn(0,12,{|x|x.postln;})
|
||||||
|
|
||||||
|
// let's re-assemble these datasets
|
||||||
|
~query.transformJoin(~pitchDSn,~timbreDSn, ~tempDS) //appends 4 dims of pitch to 4 dims of timbre
|
||||||
|
~query.transformJoin(~loudDSn, ~tempDS, ~globalDS) // appends 4 dims of loud to the 8 dims above
|
||||||
|
|
||||||
|
// now let's see which is nearest that point
|
||||||
|
~tree.fit(~globalDS,{~tree.kNearest(~targetAll,{|x|~nearest = x.postln;})}) //just the points with the right lenght conditions, with the curated stats
|
||||||
|
|
||||||
|
///////////////////////////////////////////////
|
||||||
|
// todo: segment then query musaik
|
||||||
@ -0,0 +1,230 @@
|
|||||||
|
// load a source folder
|
||||||
|
~loader = FluidLoadFolder(File.realpath(FluidBufPitch.class.filenameSymbol).dirname.withTrailingSlash ++ "../AudioFiles/");
|
||||||
|
~loader.play;
|
||||||
|
|
||||||
|
//slightly oversegment with novelty
|
||||||
|
//segments should still make sense but might cut a few elements in 2 or 3
|
||||||
|
~slicer = FluidSliceCorpus({ |src,start,num,dest| FluidBufNoveltySlice.kr(src,start,num,indices:dest, feature: 1, kernelSize: 29, threshold: 0.1, filterSize: 5, hopSize: 128, blocking: 1)});
|
||||||
|
~slicer.play(s, ~loader.buffer,~loader.index);
|
||||||
|
|
||||||
|
//test the segmentation by looping them
|
||||||
|
(
|
||||||
|
~originalindices = Array.newFrom(~slicer.index.keys).sort{|a,b| ~slicer.index[a][\bounds][0]< ~slicer.index[b][\bounds][0]}.collect{|x|~slicer.index[x][\bounds]};
|
||||||
|
d = {arg start=0, end = 44100;
|
||||||
|
BufRd.ar(1, ~loader.buffer, Phasor.ar(0,1,start,end,start),0,1);
|
||||||
|
}.play;
|
||||||
|
|
||||||
|
w = Window.new(bounds:Rect(100,100,400,60)).front;
|
||||||
|
b = ControlSpec(0, ~originalindices.size - 1, \linear, 1); // min, max, mapping, step
|
||||||
|
c = StaticText(w, Rect(340, 20, 50, 20)).align_(\center);
|
||||||
|
a = Slider(w, Rect(10, 20, 330, 20))
|
||||||
|
.action_({var val = b.map(a.value).asInteger;
|
||||||
|
c.string_(val.asString);
|
||||||
|
d.set(\start,~originalindices[val][0], \end, ~originalindices[val][1]);
|
||||||
|
});
|
||||||
|
)
|
||||||
|
|
||||||
|
//analyse each segment with 20 MFCCs in a dataset and spectralshapes in another one
|
||||||
|
(
|
||||||
|
~featuresbuf = 4.collect{Buffer.new};
|
||||||
|
~statsbuf = 4.collect{Buffer.new};
|
||||||
|
~flatbuf = 4.collect{Buffer.new};
|
||||||
|
~slicesMFCC = FluidDataSet(s);
|
||||||
|
~slicesShapes = FluidDataSet(s);
|
||||||
|
~extractor = FluidProcessSlices({|src,start,num,data|
|
||||||
|
var features, stats, writer, flatten,mfccBuf, statsBuf, flatBuf, label, voice;
|
||||||
|
label = data.key;
|
||||||
|
voice = data.value[\voice];
|
||||||
|
features = FluidBufMFCC.kr(src,startFrame:start,numFrames:num,numChans:1, numCoeffs: 20, features:~featuresbuf[voice],trig:1,blocking: 1);
|
||||||
|
stats = FluidBufStats.kr(~featuresbuf[voice],stats:~statsbuf[voice],trig:Done.kr(features),blocking: 1);
|
||||||
|
flatten = FluidBufFlatten.kr(~statsbuf[voice],destination:~flatbuf[voice],trig:Done.kr(stats),blocking: 1);
|
||||||
|
writer = FluidDataSetWr.kr(~slicesMFCC,label, nil, ~flatbuf[voice], Done.kr(flatten),blocking: 1);
|
||||||
|
features = FluidBufSpectralShape.kr(src,startFrame:start,numFrames:num,numChans:1, features:~featuresbuf[voice],trig:Done.kr(writer),blocking: 1);
|
||||||
|
stats = FluidBufStats.kr(~featuresbuf[voice],stats:~statsbuf[voice],trig:Done.kr(features),blocking: 1);
|
||||||
|
flatten = FluidBufFlatten.kr(~statsbuf[voice],destination:~flatbuf[voice],trig:Done.kr(stats),blocking: 1);
|
||||||
|
writer = FluidDataSetWr.kr(~slicesShapes,label, nil, ~flatbuf[voice], Done.kr(flatten),blocking: 1);
|
||||||
|
});
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
t = Main.elapsedTime;
|
||||||
|
~extractor.play(s,~loader.buffer, ~slicer.index, action:{(Main.elapsedTime - t).postln;"Analysis done".postln});
|
||||||
|
)
|
||||||
|
|
||||||
|
~originalindices.size
|
||||||
|
~slicesMFCC.print
|
||||||
|
~slicesShapes.print
|
||||||
|
|
||||||
|
//run a window over consecutive segments, forcing them in 2 classes, and merging the consecutive segments of similar class
|
||||||
|
//we overlap the analysis with the last (original) slice to check for continuity
|
||||||
|
(
|
||||||
|
~winSize = 4;//the number of consecutive items to split in 2 classes;
|
||||||
|
~curated = FluidDataSet(s);
|
||||||
|
~query = FluidDataSetQuery(s);
|
||||||
|
~stan = FluidStandardize(s);
|
||||||
|
~kmeans = FluidKMeans(s,2,1000);
|
||||||
|
~windowDS = FluidDataSet(s);
|
||||||
|
~windowLS = FluidLabelSet(s);
|
||||||
|
)
|
||||||
|
|
||||||
|
//curate stats (MFCCs)
|
||||||
|
~query.clear
|
||||||
|
~query.addRange((0*20)+1,10);
|
||||||
|
~query.transform(~slicesMFCC,~curated);
|
||||||
|
|
||||||
|
//OR
|
||||||
|
//curate stats (moments)
|
||||||
|
~query.clear
|
||||||
|
~query.addRange(0,3);
|
||||||
|
~query.transform(~slicesShapes,~curated);
|
||||||
|
|
||||||
|
//OR
|
||||||
|
//curate both
|
||||||
|
~query.clear
|
||||||
|
~query.addColumn(0);//add col 0 (mean of mfcc0 as 'loudness')
|
||||||
|
~query.transform(~slicesMFCC,~curated);//mfcc0 as loudness
|
||||||
|
~query.clear;
|
||||||
|
~query.addRange(0,3);//add some spectral moments
|
||||||
|
~query.transformJoin(~slicesShapes, ~curated, ~curated);//join in centroids
|
||||||
|
|
||||||
|
//optionally standardize in place
|
||||||
|
~stan.fitTransform(~curated, ~curated);
|
||||||
|
|
||||||
|
~curated.print
|
||||||
|
|
||||||
|
//retrieve the dataset as dictionary
|
||||||
|
~curated.dump{|x|~sliceDict = x;};
|
||||||
|
|
||||||
|
~originalslicesarray = ~originalindices.flop[0] ++ ~loader.buffer.numFrames
|
||||||
|
~orginalkeys = Array.newFrom(~slicer.index.keys).sort{|a,b| ~slicer.index[a][\bounds][0]< ~slicer.index[b][\bounds][0]}
|
||||||
|
|
||||||
|
//the windowed function, recursive to deal with sync dependencies
|
||||||
|
(
|
||||||
|
~windowedFunct = {arg head, winSize, overlap;
|
||||||
|
var nbass = [], assignments = [], tempDict = ();
|
||||||
|
//check the size of everything to not overrun
|
||||||
|
winSize = (~originalslicesarray.size - head).min(winSize);
|
||||||
|
//copy the items to a subdataset from hear
|
||||||
|
winSize.do{|i|
|
||||||
|
tempDict.put((i.asString), ~sliceDict["data"][(~orginalkeys[(i+head)]).asString]);//here one could curate which stats to take
|
||||||
|
// "whichslices:%\n".postf(i+head);
|
||||||
|
};
|
||||||
|
~windowDS.load(Dictionary.newFrom([\cols, ~sliceDict["cols"].asInteger, \data, tempDict]), action: {
|
||||||
|
// "% - loaded\n".postf(head);
|
||||||
|
|
||||||
|
//kmeans 2 and retrieve ordered array of class assignations
|
||||||
|
~kmeans.fitPredict(~windowDS, ~windowLS, action: {|x|
|
||||||
|
nbass = x;
|
||||||
|
// "% - fitted1: ".postf(head); nbass.postln;
|
||||||
|
|
||||||
|
if (nbass.includes(winSize.asFloat), {
|
||||||
|
~kmeans.fitPredict(~windowDS, ~windowLS, {|x|
|
||||||
|
nbass = x;
|
||||||
|
// "% - fitted2: ".postf(head); nbass.postln;
|
||||||
|
if (nbass.includes(winSize.asFloat), {
|
||||||
|
~kmeans.fitPredict(~windowDS, ~windowLS, {|x|
|
||||||
|
nbass = x;
|
||||||
|
// "% - fitted3: ".postf(head); nbass.postln;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
~windowLS.dump{|x|
|
||||||
|
var assignments = x.at("data").asSortedArray.flop[1].flatten;
|
||||||
|
"% - assigned ".postf(head);
|
||||||
|
|
||||||
|
assignments.postln;
|
||||||
|
|
||||||
|
(winSize-1).do{|i|
|
||||||
|
if (assignments[i+1] != assignments[i], {
|
||||||
|
~newindices= ~newindices ++ (~originalslicesarray[head+i+1]).asInteger;
|
||||||
|
~newkeys = ~newkeys ++ (~orginalkeys[head+i+1]);
|
||||||
|
});
|
||||||
|
|
||||||
|
};
|
||||||
|
//if we still have some frames to do, do them
|
||||||
|
if (((winSize + head) < ~originalslicesarray.size), {
|
||||||
|
"-----------------".postln;
|
||||||
|
~windowedFunct.value(head + winSize - overlap, winSize, overlap);
|
||||||
|
}, {~newindices = (~newindices ++ ~loader.buffer.numFrames); "done".postln;});//if we're done close the books
|
||||||
|
};
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
//the job
|
||||||
|
|
||||||
|
//test 1 - start at the begining, consider 4 items at a time, make 2 clusters, overlap 1
|
||||||
|
~newindices = [~originalslicesarray[0]]; ~newkeys = [~orginalkeys[0]];
|
||||||
|
~windowedFunct.value(0, 4, 1);
|
||||||
|
|
||||||
|
//OPTIONAL: try again with more clusters (3) and a wider window (6) and more overlap (2)
|
||||||
|
~newindices = [~originalslicesarray[0]]; ~newkeys = [~orginalkeys[0]];
|
||||||
|
~kmeans.numClusters = 3;
|
||||||
|
~windowedFunct.value(0,6,2);
|
||||||
|
|
||||||
|
//compare sizes
|
||||||
|
~orginalkeys.size
|
||||||
|
~newkeys.size;
|
||||||
|
|
||||||
|
//export to reaper
|
||||||
|
(
|
||||||
|
//first create a new file that ends with rpp - it will overwrite if the file exists
|
||||||
|
f = File.new("/tmp/clusteredslices-" ++ Date.getDate.stamp ++".rpp","w+");
|
||||||
|
|
||||||
|
if (f.isOpen , {
|
||||||
|
var path, prevpath ="", sr, count, dur, realDur;
|
||||||
|
//write the header
|
||||||
|
f.write("<REAPER_PROJECT 0.1 \"5.99/OSX64\" 1603037150\n\n");
|
||||||
|
|
||||||
|
//a first track with the originalslicearray
|
||||||
|
//write the track header
|
||||||
|
f.write("<TRACK\nNAME \"novelty output\"\n");
|
||||||
|
// iterate through the items in the track
|
||||||
|
~orginalkeys.do{|v, i|
|
||||||
|
path = ~slicer.index[v][\path];
|
||||||
|
if (path != prevpath, {
|
||||||
|
sr = ~slicer.index[v][\sr];
|
||||||
|
prevpath = path;
|
||||||
|
count = 0;
|
||||||
|
});
|
||||||
|
dur = ~originalslicesarray[i+1] - ~originalslicesarray[i];
|
||||||
|
if ( dur > 0, {
|
||||||
|
f.write("<ITEM\nPOSITION " ++ (~originalslicesarray[i] / sr) ++ "\nLENGTH " ++ (dur / sr) ++ "\nNAME \"" ++ v ++ "\"\nSOFFS " ++ (count / sr) ++ "\n<SOURCE WAVE\nFILE \"" ++ path ++ "\"\n>\n>\n");
|
||||||
|
});
|
||||||
|
count = count + dur;
|
||||||
|
};
|
||||||
|
//write the track footer
|
||||||
|
f.write(">\n");
|
||||||
|
|
||||||
|
// a second track with the new ~indices
|
||||||
|
prevpath = "";
|
||||||
|
//write the track header
|
||||||
|
f.write("<TRACK\nNAME \"clustered output\"\n");
|
||||||
|
// iterate through the items in the track
|
||||||
|
~newkeys.do{|v, i|
|
||||||
|
dur = ~newindices[i+1] - ~newindices[i];
|
||||||
|
if (dur > 0, {
|
||||||
|
path = ~slicer.index[v][\path];
|
||||||
|
if (path != prevpath, {
|
||||||
|
sr = ~slicer.index[v][\sr];
|
||||||
|
prevpath = path;
|
||||||
|
count = 0;
|
||||||
|
});
|
||||||
|
f.write("<ITEM\nPOSITION " ++ (~newindices[i] / sr) ++ "\nLENGTH " ++ (dur / sr) ++ "\nNAME \"" ++ v ++ "\"\nSOFFS " ++ (count / sr) ++ "\n<SOURCE WAVE\nFILE \"" ++ path ++ "\"\n>\n>\n");
|
||||||
|
count = count + dur;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
//write the track footer
|
||||||
|
f.write(">\n");
|
||||||
|
|
||||||
|
//write the footer
|
||||||
|
f.write(">\n");
|
||||||
|
f.close;
|
||||||
|
});
|
||||||
|
)
|
||||||
|
|
||||||
|
(then open the time-stamped reaper file clusterdslice in the folder tmp)
|
||||||
|
"open /tmp".unixCmd
|
||||||
@ -0,0 +1,324 @@
|
|||||||
|
// Lookup in a KDTree using melbands
|
||||||
|
// Demonstration of a massive parallel approach to batch process swiftly in SC
|
||||||
|
|
||||||
|
s.options.numBuffers = 16384 //The method below for doing the analysus quickly needs lots of buffers
|
||||||
|
s.reboot
|
||||||
|
|
||||||
|
//Step 0: Make a corpus
|
||||||
|
|
||||||
|
//We'll jam together some random flucoma sounds for illustrative purposes
|
||||||
|
//Get some files
|
||||||
|
(
|
||||||
|
~audioexamples_path = File.realpath(FluidBufMelBands.class.filenameSymbol).dirname.withTrailingSlash +/+ "../AudioFiles/*.wav";
|
||||||
|
~allTheSounds = SoundFile.collect(~audioexamples_path);
|
||||||
|
~testSounds = ~allTheSounds;
|
||||||
|
~testSounds.do{|f| f.path.postln}; // print out the files that are loaded
|
||||||
|
)
|
||||||
|
|
||||||
|
//Load the files into individual buffers:
|
||||||
|
(
|
||||||
|
~audio_buffers = ~testSounds.collect{|f|
|
||||||
|
Buffer.readChannel(
|
||||||
|
server: s,
|
||||||
|
path:f.path,
|
||||||
|
channels:[0],
|
||||||
|
action:{("Loaded" + f.path).postln;}
|
||||||
|
)
|
||||||
|
};
|
||||||
|
)
|
||||||
|
|
||||||
|
//Do a segmentation of each buffer, in parallel
|
||||||
|
(
|
||||||
|
fork{
|
||||||
|
~index_buffers = ~audio_buffers.collect{Buffer.new};
|
||||||
|
s.sync;
|
||||||
|
~count = ~audio_buffers.size;
|
||||||
|
~audio_buffers.do{|src,i|
|
||||||
|
FluidBufOnsetSlice.process(
|
||||||
|
server:s,
|
||||||
|
source:src,
|
||||||
|
indices:~index_buffers[i],
|
||||||
|
metric: 9,
|
||||||
|
threshold:0.2,
|
||||||
|
minSliceLength: 17,
|
||||||
|
action:{
|
||||||
|
(~testSounds[i].path ++ ":" + ~index_buffers[i].numFrames + "slices").postln;
|
||||||
|
~count = ~count - 1;
|
||||||
|
if(~count == 0){"Done slicing".postln};
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
// we now have an array of index buffers, one per source buffer, each containing the segmentation points as a frame positions
|
||||||
|
// this allows us to make an array of sizes
|
||||||
|
~index_buffers.collect{|b| b.numFrames}.sum
|
||||||
|
|
||||||
|
//For each of these segments, let's make a datapoint using the mean melbands.
|
||||||
|
// There's a number of ways of skinning this cat w/r/t telling the server what to do, but here we want to minimize traffic between language and server, and also produce undertsandable code
|
||||||
|
|
||||||
|
//First, we'll grab the onset points as language-side arrays, then scroll through each slice getting the mean melbands
|
||||||
|
(
|
||||||
|
// - a dataset to keep the mean melbands in
|
||||||
|
~mels = FluidDataSet(s);
|
||||||
|
// - a dictionary to keep the slice points in for later playback
|
||||||
|
~slices = Dictionary();
|
||||||
|
//The code below (as well as needing lots of buffers), creates lots of threads and we need a big ass scheduling queue
|
||||||
|
~clock = TempoClock(queueSize:8192);
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
// Do the Mel analysis in a cunning parallel fashion
|
||||||
|
(
|
||||||
|
{
|
||||||
|
var counter, remaining;
|
||||||
|
var condition = Condition.new; // used to create a test condition to pause the routine ...
|
||||||
|
var index_arrays = Dictionary();
|
||||||
|
|
||||||
|
"Process started. Please wait.".postln;
|
||||||
|
|
||||||
|
~total_slice_count = ~index_buffers.collect{|b| b.numFrames}.sum + ~index_buffers.size; //we get an extra slice in buffer
|
||||||
|
~featurebuffers = ~total_slice_count.collect{Buffer.new}; // create a buffer per slice
|
||||||
|
|
||||||
|
//Make our dictionary FluidDataSet-shaped
|
||||||
|
~slices.put("cols",3);//[bufnum,start,end] for each slice
|
||||||
|
~slices.put("data",Dictionary());
|
||||||
|
|
||||||
|
//Collect each set of onsets into a language side array and store them in a dict
|
||||||
|
~index_buffers.do{|b,i| // iterate over the input buffer array
|
||||||
|
{
|
||||||
|
b.loadToFloatArray( // load to language side array
|
||||||
|
action:{|indices|
|
||||||
|
//Glue the first and last samples of the buffer on to the index list, and place in dictionary with the
|
||||||
|
//Buffer object as a key
|
||||||
|
|
||||||
|
index_arrays.put(~audio_buffers[i], Array.newFrom([0] ++ indices ++ (~audio_buffers[i].numFrames - 1)));
|
||||||
|
|
||||||
|
if(i==(~index_buffers.size-1)) {condition.unhang};
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}.fork(stackSize:~total_slice_count);
|
||||||
|
};
|
||||||
|
condition.hang; //Pause until all the callbacks above have completed
|
||||||
|
"Arrays loaded. Starting on the analysis, please wait.".postln;
|
||||||
|
|
||||||
|
//For each of these lists of points, we want to scroll over the indices in pairs and get some mel bands
|
||||||
|
counter = 0;
|
||||||
|
remaining = ~total_slice_count;
|
||||||
|
|
||||||
|
s.sync;
|
||||||
|
|
||||||
|
// now iterate over Dict and calc melbands
|
||||||
|
|
||||||
|
index_arrays.keysValuesDo{|buffer, indices|
|
||||||
|
indices.doAdjacentPairs{|start,end,num|
|
||||||
|
var analysis = Routine({|counter|
|
||||||
|
FluidBufMelBands.processBlocking(
|
||||||
|
server:s,
|
||||||
|
source:buffer,
|
||||||
|
startFrame:start,
|
||||||
|
numFrames:(end-1) - start,
|
||||||
|
features:~featurebuffers[counter],
|
||||||
|
action:{
|
||||||
|
remaining = remaining - 1;
|
||||||
|
if(remaining == 0) { ~numMelBands = ~featurebuffers[0].numChannels;condition.unhang };
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
~slices["data"].put(counter,[buffer.bufnum,start,end]);
|
||||||
|
|
||||||
|
//I'm spawning new threads to wait for the analysis callback from the server. The final callback will un-hang this thread
|
||||||
|
analysis.value(counter); //Done differently to other blocks because I need to pass in the value of counter
|
||||||
|
counter = counter + 1;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
condition.hang;
|
||||||
|
"Analysis of % slices done.\n".postf(~total_slice_count);
|
||||||
|
}.fork(clock:~clock);
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
// Run stats on each mel buffer
|
||||||
|
|
||||||
|
// create a stats buffer for each of the slices
|
||||||
|
~statsbuffers = ~total_slice_count.collect{Buffer.new}; // create n Slices buffers - to be filled with (40 mel bands * 7 stats)
|
||||||
|
|
||||||
|
// run stats on all the buffers
|
||||||
|
(
|
||||||
|
{
|
||||||
|
var remaining = ~total_slice_count;
|
||||||
|
~featurebuffers.do{|buffer,i|
|
||||||
|
FluidBufStats.processBlocking(
|
||||||
|
server:s,
|
||||||
|
source:buffer,
|
||||||
|
stats:~statsbuffers[i],
|
||||||
|
action:{
|
||||||
|
remaining = remaining - 1;
|
||||||
|
if(remaining == 0) { "done".postln};
|
||||||
|
}
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}.fork(clock:~clock);
|
||||||
|
)
|
||||||
|
|
||||||
|
~featurebuffers.size
|
||||||
|
|
||||||
|
//Flatten each stats buffer into a data point
|
||||||
|
~flatbuffers = ~total_slice_count.collect{Buffer.new};// create an array of flatten stats
|
||||||
|
|
||||||
|
(
|
||||||
|
{
|
||||||
|
var remaining = ~total_slice_count;
|
||||||
|
~statsbuffers.do{|buffer,i|
|
||||||
|
FluidBufFlatten.processBlocking(
|
||||||
|
server:s,
|
||||||
|
source:buffer,
|
||||||
|
destination:~flatbuffers[i],
|
||||||
|
action:{
|
||||||
|
remaining = remaining - 1;
|
||||||
|
if(remaining == 0) { "Got flat points".postln; };
|
||||||
|
}
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}.fork(clock:~clock);
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
//Ram each flat point into a data set. At this point we have more data than we need, but we'll prune in moment
|
||||||
|
(
|
||||||
|
"Filling dataset".postln;
|
||||||
|
~mels.clear;
|
||||||
|
|
||||||
|
// ~flatbuffers = flatbuffers;
|
||||||
|
~flatbuffers.do{|buf,i|
|
||||||
|
~mels.addPoint(i,buf);
|
||||||
|
};
|
||||||
|
|
||||||
|
~mels.print;
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
// Prune & standardise
|
||||||
|
|
||||||
|
// Tidy up the temp arrays of buffers we do not need anymore
|
||||||
|
|
||||||
|
(
|
||||||
|
"Cleaning".postln;
|
||||||
|
(~featurebuffers ++ ~statsbuffers ++ ~flatbuffers).do{|buf| buf.free};
|
||||||
|
)
|
||||||
|
|
||||||
|
//Above we sneakily made a dictionary of slice data for playback (bufnum,start,end). Let's throw it in a dataset
|
||||||
|
~slicedata = FluidDataSet(s); // will hold slice data (bufnum,start,end) for playback
|
||||||
|
|
||||||
|
//dict -> dataset
|
||||||
|
(
|
||||||
|
~slicedata.load(~slices);
|
||||||
|
~slicedata.print;
|
||||||
|
)
|
||||||
|
|
||||||
|
// Step 1. Let's prune and standardize before fitting to a tree
|
||||||
|
(
|
||||||
|
~meanmels = FluidDataSet(s);//will hold pruned mel data
|
||||||
|
~stdmels = FluidDataSet(s);//will standardised, pruned mel data
|
||||||
|
~standardizer = FluidStandardize(s);
|
||||||
|
~pruner = FluidDataSetQuery(s);
|
||||||
|
~tree = FluidKDTree(s,numNeighbours:10,lookupDataSet:~slicedata);//we have to supply the lookup data set when we make the tree (boo!)
|
||||||
|
)
|
||||||
|
|
||||||
|
//Prune, standardize and fit KDTree
|
||||||
|
(
|
||||||
|
{
|
||||||
|
~meanmels.clear;
|
||||||
|
~stdmels.clear;
|
||||||
|
~pruner.addRange(0,~numMelBands).transform(~mels,~meanmels); //prune with a 'query' -- so this is dropping all but ~meanmels
|
||||||
|
~standardizer.fitTransform(~meanmels,~stdmels);
|
||||||
|
~tree.fit(~stdmels,{"KDTree ready".postln});
|
||||||
|
}.fork(clock:~clock);
|
||||||
|
)
|
||||||
|
|
||||||
|
~meanmels.print
|
||||||
|
|
||||||
|
//Step 2: Set the FluidStandardizer and FluidKDTree up for listening
|
||||||
|
//set the buffers and busses needed
|
||||||
|
(
|
||||||
|
~stdInputPoint = Buffer.alloc(s,40);
|
||||||
|
~stdOutputPoint = Buffer.alloc(s,40);
|
||||||
|
~treeOutputPoint = Buffer.alloc(s,3 * 10);//numNeighbours x triples of bufnum,start,end
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
// let's play a random sound (to make sure we understand our data structure!
|
||||||
|
(
|
||||||
|
{
|
||||||
|
var randPoint, buf, start, stop, dur;
|
||||||
|
|
||||||
|
randPoint = ~slices["data"].keys.asArray.scramble[0]; // this good way of getting - but recast as strong
|
||||||
|
|
||||||
|
buf= ~slices["data"][randPoint][0];
|
||||||
|
start = ~slices["data"][randPoint][1];
|
||||||
|
stop = ~slices["data"][randPoint][2];
|
||||||
|
|
||||||
|
dur = stop - start;
|
||||||
|
|
||||||
|
BufRd.ar(1,buf, Line.ar(start,stop,dur/s.sampleRate, doneAction: 2), 0, 2);
|
||||||
|
}.play
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
// Query KD tree
|
||||||
|
|
||||||
|
// a target sound from outside our dataset
|
||||||
|
~inBuf = Buffer.readChannel(s, Platform.resourceDir +/+ "sounds/a11wlk01.wav", numFrames:15000, channels:[0]);
|
||||||
|
~inBuf.play
|
||||||
|
|
||||||
|
//OR one from within (but just the begining so beware of the difference!)
|
||||||
|
~inBuf = Buffer.alloc(s,15000);
|
||||||
|
~randomSlice = ~slices["data"].keys.asArray.scramble[0];
|
||||||
|
~audio_buffers[~slices["data"][~randomSlice][0]].copyData(~inBuf,srcStartAt: ~slices["data"][~randomSlice][1], numSamples: 15000.min(~slices["data"][~randomSlice][2] - (~slices["data"][~randomSlice][1])));
|
||||||
|
~inBuf.play
|
||||||
|
|
||||||
|
// now try getting a point, playing it, grabbing nearest neighbour and playing it ...
|
||||||
|
|
||||||
|
(
|
||||||
|
~inBufMels = Buffer(s);
|
||||||
|
~inBufStats = Buffer(s);
|
||||||
|
~inBufFlat = Buffer(s);
|
||||||
|
~inBufComp = Buffer(s);
|
||||||
|
~inBufStand = Buffer(s);
|
||||||
|
)
|
||||||
|
|
||||||
|
// FluidBuf Compose is buf version of dataSetQuery
|
||||||
|
|
||||||
|
(
|
||||||
|
FluidBufMelBands.process(s, ~inBuf, features: ~inBufMels, action: {
|
||||||
|
FluidBufStats.process(s, ~inBufMels, stats:~inBufStats, action: {
|
||||||
|
FluidBufFlatten.process(s, ~inBufStats, destination:~inBufFlat, action: {
|
||||||
|
FluidBufCompose.process(s, ~inBufFlat, numFrames: ~numMelBands, destination: ~inBufComp, action: {
|
||||||
|
~standardizer.transformPoint(~inBufComp, ~inBufStand, {
|
||||||
|
~tree.kNearest(~inBufStand,{ |a|a.postln;~nearest = a;})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// playback nearest in order
|
||||||
|
(
|
||||||
|
fork{
|
||||||
|
~nearest.do{|i|
|
||||||
|
var buf, start, stop, dur;
|
||||||
|
|
||||||
|
buf= ~slices["data"][i.asInteger][0];
|
||||||
|
start = ~slices["data"][i.asInteger][1];
|
||||||
|
stop = ~slices["data"][i.asInteger][2];
|
||||||
|
dur = (stop - start)/ s.sampleRate;
|
||||||
|
{BufRd.ar(1,buf, Line.ar(start,stop,dur, doneAction: 2), 0, 2);}.play;
|
||||||
|
|
||||||
|
i.postln;
|
||||||
|
dur.wait;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
)
|
||||||
@ -0,0 +1,73 @@
|
|||||||
|
s.reboot
|
||||||
|
~ds = FluidDataSet.new(s)
|
||||||
|
~point = Buffer.alloc(s,1,1)
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
10.do{|i|
|
||||||
|
~point.set(0,i);
|
||||||
|
~ds.addPoint(i.asString,~point,{("addPoint"+i).postln}); //because buffer.set do an immediate update in the RT thread we can take for granted it'll be updated when we call addPoint
|
||||||
|
s.sync; //but we need to sync to make sure everything is done on the DataSet before the next iteration
|
||||||
|
}
|
||||||
|
}.play
|
||||||
|
)
|
||||||
|
~ds.print;
|
||||||
|
|
||||||
|
/*** KDTREE ***/
|
||||||
|
~tree = FluidKDTree.new(s)
|
||||||
|
~tree.fit(~ds,action:{"Done indexing".postln})
|
||||||
|
|
||||||
|
~tree.numNeighbours = 5; //play with this
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
10.do{|i|
|
||||||
|
~point.set(0,i);
|
||||||
|
~tree.kNearest(~point, {|x| "Neighbours for a value of % are ".postf(i); x.postln});
|
||||||
|
s.sync;
|
||||||
|
}
|
||||||
|
}.play
|
||||||
|
)
|
||||||
|
|
||||||
|
/*** KMEANS ***/
|
||||||
|
|
||||||
|
~kmeans = FluidKMeans.new(s,maxIter:100);
|
||||||
|
~kmeans.numClusters = 2; //play with this
|
||||||
|
~kmeans.fit(~ds,action:{|x| "Done fitting with these number of items per cluster ".post;x.postln;})
|
||||||
|
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
10.do{|i|
|
||||||
|
~point.set(0,i);
|
||||||
|
~kmeans.predictPoint(~point,{|x| ("Predicted Cluster for a value of " + i ++ ":" + x).postln});
|
||||||
|
s.sync;
|
||||||
|
}
|
||||||
|
}.play
|
||||||
|
)
|
||||||
|
|
||||||
|
~labels = FluidLabelSet(s);
|
||||||
|
|
||||||
|
~kmeans.predict(~ds,~labels, {|x| ("Size of each cluster" + x).postln})
|
||||||
|
|
||||||
|
(
|
||||||
|
~labels.size{|x|
|
||||||
|
Routine{x.asInteger.do{|i|
|
||||||
|
~labels.getLabel(i,action: {|l|
|
||||||
|
("Label for entry " + i ++ ":" + l).postln;
|
||||||
|
});
|
||||||
|
s.sync;
|
||||||
|
}
|
||||||
|
}.play;
|
||||||
|
};
|
||||||
|
)
|
||||||
|
|
||||||
|
// or simply print it
|
||||||
|
~labels.print
|
||||||
|
|
||||||
|
// or dump and format
|
||||||
|
(
|
||||||
|
~labels.dump{|x|
|
||||||
|
var keys = x["data"].keys.asArray.sort;
|
||||||
|
keys.do{|key|
|
||||||
|
"Label for entry % is %\n".postf(key, x["data"][key][0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
@ -0,0 +1,67 @@
|
|||||||
|
s.reboot
|
||||||
|
~ds = FluidDataSet.new(s)
|
||||||
|
~point = Buffer.alloc(s,1,1)
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
10.do{|i|
|
||||||
|
var d;
|
||||||
|
if(i<=4,{d=i},{d=i+5});
|
||||||
|
~point.set(0,d);
|
||||||
|
~ds.addPoint(i.asString,~point,{("addPoint"+i).postln});
|
||||||
|
s.sync;
|
||||||
|
}
|
||||||
|
}.play
|
||||||
|
)
|
||||||
|
~ds.print;
|
||||||
|
|
||||||
|
/*** KDTREE ***/
|
||||||
|
~tree = FluidKDTree.new(s)
|
||||||
|
~tree.fit(~ds,action:{"Done indexing".postln})
|
||||||
|
|
||||||
|
~tree.numNeighbours = 5; //play with this
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
15.do{|i|
|
||||||
|
~point.set(0,i);
|
||||||
|
~tree.kNearest(~point, {|x| "Neighbours for a value of % are ".postf(i); x.post;" with respective distances of ".post;});
|
||||||
|
~tree.kNearestDist(~point, {|x| x.postln});
|
||||||
|
s.sync;
|
||||||
|
}
|
||||||
|
}.play
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
/*** KMEANS ***/
|
||||||
|
|
||||||
|
~kmeans = FluidKMeans.new(s,maxIter:100)
|
||||||
|
~kmeans.numClusters = 2; //play with this
|
||||||
|
~kmeans.fit(~ds, action:{|x| "Done fitting with these number of items per cluster ".post;x.postln;})
|
||||||
|
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
15.do{|i|
|
||||||
|
~point.set(0,i);
|
||||||
|
~kmeans.predictPoint(~point,{|x| ("Predicted Cluster for a value of " + i ++ ":" + x).postln});
|
||||||
|
s.sync;
|
||||||
|
}
|
||||||
|
}.play
|
||||||
|
)
|
||||||
|
|
||||||
|
~labels = FluidLabelSet(s);
|
||||||
|
|
||||||
|
~kmeans.predict(~ds,~labels, {|x| ("Size of each cluster" + x).postln})
|
||||||
|
|
||||||
|
(
|
||||||
|
~labels.size{|x|
|
||||||
|
Routine{x.asInteger.do{|i| //size does not return a value, but we can retrieve it via a function
|
||||||
|
~labels.getLabel(i,action: {|l|
|
||||||
|
("Label for entry " + i ++ ":" + l).postln;
|
||||||
|
});
|
||||||
|
s.sync;
|
||||||
|
}
|
||||||
|
}.play;
|
||||||
|
};
|
||||||
|
)
|
||||||
|
|
||||||
|
// or simply print it
|
||||||
|
~labels.print
|
||||||
@ -0,0 +1,64 @@
|
|||||||
|
(
|
||||||
|
~simpleInput = FluidDataSet(s);
|
||||||
|
~simpleOutput = FluidLabelSet(s);
|
||||||
|
b = Buffer.alloc(s,2);
|
||||||
|
~knn = FluidKNNClassifier(s);
|
||||||
|
~knn.numNeighbours = 3
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
var w,v,myx,myy;
|
||||||
|
|
||||||
|
//initialise the mouse position holder
|
||||||
|
myx=0;
|
||||||
|
myy=0;
|
||||||
|
|
||||||
|
//make a window and a full size view
|
||||||
|
w = Window.new("Viewer", Rect(100,Window.screenBounds.height - 400, 310, 310)).front;
|
||||||
|
v = View.new(w,Rect(0,0, 310, 310));
|
||||||
|
|
||||||
|
//creates a function that reacts to mousedown
|
||||||
|
v.mouseDownAction = {|view, x, y|myx=x;myy=y;w.refresh;
|
||||||
|
// myx.postln;myy.postln;
|
||||||
|
Routine{
|
||||||
|
b.setn(0,[myx,myy]);
|
||||||
|
~knn.predictPoint(b, action: {|x|x.postln;});
|
||||||
|
s.sync;
|
||||||
|
}.play;};
|
||||||
|
|
||||||
|
//custom redraw function
|
||||||
|
w.drawFunc = {
|
||||||
|
100.do { |i|
|
||||||
|
if (i < 50, {Pen.color = Color.white;} ,{Pen.color = Color.red;});
|
||||||
|
Pen.addRect(Rect(i.div(10)*30+10,i.mod(10)*30+10,20,20));
|
||||||
|
Pen.perform(\fill);
|
||||||
|
};
|
||||||
|
Pen.color = Color.black;
|
||||||
|
Pen.addOval(Rect(myx-5, myy-5,10,10));
|
||||||
|
Pen.perform(\stroke);
|
||||||
|
};
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
//populates a dataset with the same squares as the gui (their centres) (old method, iterating over buffers. A dictionary approach would be more efficient, see the example in this folder)
|
||||||
|
Routine{
|
||||||
|
50.do{|i|
|
||||||
|
var x = i.div(10)*30+20;
|
||||||
|
var y = i.mod(10)*30+20;
|
||||||
|
b.setn(0,[x,y]);
|
||||||
|
~simpleInput.addPoint(i.asString,b,{("Added Input" + i).postln});
|
||||||
|
~simpleOutput.addLabel(i.asString,"White",{("Added Output" + i).postln});
|
||||||
|
s.sync;
|
||||||
|
b.setn(0,[x+150,y]);
|
||||||
|
~simpleInput.addPoint((i+50).asString,b,{("Added Input" + (i+50)).postln});
|
||||||
|
~simpleOutput.addLabel((i+50).asString,"Red",{("Added Output" + (i+50)).postln});
|
||||||
|
s.sync;
|
||||||
|
};
|
||||||
|
\done.postln;
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
// fit the dataset
|
||||||
|
~knn.fit(~simpleInput,~simpleOutput, action:{"fitting done".postln})
|
||||||
|
|
||||||
|
// now click on the grid and read the estimated class according to the nearest K neighbours.
|
||||||
@ -0,0 +1,74 @@
|
|||||||
|
s.reboot
|
||||||
|
|
||||||
|
~urn = { |n=31416, min=0,max=31415| (min..max).scramble.keep(n) };
|
||||||
|
|
||||||
|
// creates 200 indices, then values of the output of a fundion with a predictable shape of a sinewave
|
||||||
|
n = 200
|
||||||
|
~idx = ~urn.value(n)
|
||||||
|
~data = n.collect{|i|sin(~idx[i]/5000)}
|
||||||
|
|
||||||
|
// creates the dataset with these associated indices and values
|
||||||
|
(
|
||||||
|
~simpleInput = FluidDataSet(s);
|
||||||
|
~simpleOutput = FluidDataSet(s);
|
||||||
|
b = Buffer.alloc(s,1);
|
||||||
|
c = Buffer.alloc(s,1);
|
||||||
|
~mappingviz = Buffer.alloc(s,512);
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
n.do{|i|
|
||||||
|
b.set(0,~idx[i]);
|
||||||
|
c.set(0,~data[i]);
|
||||||
|
~simpleInput.addPoint(i.asString,b,{("Added Input" + i).postln});
|
||||||
|
~simpleOutput.addPoint(i.asString,c,{("Added Output" + i).postln});
|
||||||
|
~mappingviz.set((~idx[i]/61.4).asInteger,~data[i]);
|
||||||
|
s.sync;
|
||||||
|
}
|
||||||
|
}.play
|
||||||
|
)
|
||||||
|
|
||||||
|
~simpleInput.print
|
||||||
|
~simpleOutput.print
|
||||||
|
|
||||||
|
//look at the seeing material
|
||||||
|
~mappingviz.plot(minval:-1,maxval:1)
|
||||||
|
|
||||||
|
//create a buffer to query
|
||||||
|
~mappingresult = Buffer.alloc(s,512);
|
||||||
|
|
||||||
|
//make the process then fit the data
|
||||||
|
~knn = FluidKNNRegressor(s,3,1)
|
||||||
|
~knn.fit(~simpleInput, ~simpleOutput, action:{"fitting done".postln})
|
||||||
|
|
||||||
|
// query 512 points along the line (slow because of all that sync'ing)
|
||||||
|
(
|
||||||
|
~knn.numNeighbours = 1; // change to see how many points the system uses to regress
|
||||||
|
Routine{
|
||||||
|
512.do{|i|
|
||||||
|
b.set(0,i*61);
|
||||||
|
~knn.predictPoint(b,action:{|d|~mappingresult.set(i,d);});
|
||||||
|
s.sync;
|
||||||
|
i.postln;
|
||||||
|
}
|
||||||
|
}.play
|
||||||
|
)
|
||||||
|
|
||||||
|
// look at the interpolated values
|
||||||
|
~mappingresult.plot
|
||||||
|
|
||||||
|
// change the number of neighbours to regress on
|
||||||
|
~knn.numNeighbours_(5)
|
||||||
|
~knn.fit(~simpleInput, ~simpleOutput, action:{"fitting done".postln})
|
||||||
|
|
||||||
|
// instead of doing the mapping per point, let's do a dataset of 512 points
|
||||||
|
~target = FluidDataSet(s)
|
||||||
|
~target.load(Dictionary.newFrom([\cols, 1, \data, Dictionary.newFrom(512.collect{|i|[i.asString, [i.asFloat * 61]]}.flatten)]))
|
||||||
|
~regressed = FluidDataSet(s)
|
||||||
|
~knn.predict(~target, ~regressed, action:{"prediction done".postln})
|
||||||
|
|
||||||
|
//dump the regressed values
|
||||||
|
~outputArray = Array.newClear(512);
|
||||||
|
~regressed.dump{|x| x["data"].keysValuesDo{|key,val|~outputArray[key.asInteger] = val[0]}}
|
||||||
|
~outputArray.plot
|
||||||
@ -0,0 +1,120 @@
|
|||||||
|
(
|
||||||
|
// set some variables
|
||||||
|
~nb_of_dim = 10;
|
||||||
|
~dataset = FluidDataSet(s);
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
// fill up the dataset with 20 entries of 10 column/dimension/descriptor value each. The naming of the item's label is arbitrary as usual
|
||||||
|
Routine{
|
||||||
|
var buf = Buffer.alloc(s,~nb_of_dim);
|
||||||
|
20.do({ arg i;
|
||||||
|
buf.loadCollection(Array.fill(~nb_of_dim,{rrand(0.0,100.0)}));
|
||||||
|
~dataset.addPoint("point-"++i.asInteger.asString, buf);
|
||||||
|
s.sync;
|
||||||
|
});
|
||||||
|
buf.free;
|
||||||
|
\done.postln;
|
||||||
|
}.play
|
||||||
|
)
|
||||||
|
|
||||||
|
~dataset.print;
|
||||||
|
|
||||||
|
// make a buf for getting points back
|
||||||
|
~query_buf = Buffer.alloc(s,~nb_of_dim);
|
||||||
|
|
||||||
|
// look at a point to see that it has points in it
|
||||||
|
~dataset.getPoint("point-0",~query_buf,{~query_buf.getn(0,~nb_of_dim,{|x|x.postln;});});
|
||||||
|
|
||||||
|
// look at another point to make sure it's different...
|
||||||
|
~dataset.getPoint("point-7",~query_buf,{~query_buf.getn(0,~nb_of_dim,{|x|x.postln;});});
|
||||||
|
|
||||||
|
///////////////////////////////////////////////////////
|
||||||
|
// exploring full dataset normalization and standardization
|
||||||
|
|
||||||
|
// make a FluidNormalize
|
||||||
|
~normalize = FluidNormalize(s,0,1);
|
||||||
|
|
||||||
|
// fits the dataset to find the coefficients
|
||||||
|
~normalize.fit(~dataset,{"done".postln;});
|
||||||
|
|
||||||
|
// making an empty 'normed_dataset' which is required for the normalize function
|
||||||
|
~normed_dataset = FluidDataSet(s);
|
||||||
|
|
||||||
|
// normalize the full dataset
|
||||||
|
~normalize.transform(~dataset,~normed_dataset,{"done".postln;});
|
||||||
|
|
||||||
|
// look at a point to see that it has points in it
|
||||||
|
~normed_dataset.getPoint("point-0",~query_buf,{~query_buf.getn(0,~nb_of_dim,{|x|x.postln;});});
|
||||||
|
// 10 numbers between 0.0 and 1.0 where each column/dimension/descriptor is certain to have at least one item on which it is 0 and one on which it is 1
|
||||||
|
// query a few more for fun
|
||||||
|
|
||||||
|
// try FluidStandardize
|
||||||
|
~standardize = FluidStandardize(s);
|
||||||
|
|
||||||
|
// fits the dataset to find the coefficients
|
||||||
|
~standardize.fit(~dataset,{"done".postln;});
|
||||||
|
|
||||||
|
// standardize the full dataset
|
||||||
|
~standardized_dataset = FluidDataSet(s);
|
||||||
|
~standardize.transform(~dataset,~standardized_dataset,{"done".postln;});
|
||||||
|
|
||||||
|
// look at a point to see that it has points in it
|
||||||
|
~standardized_dataset.getPoint("point-0",~query_buf,{~query_buf.getn(0,~nb_of_dim,{|x|x.postln;});});
|
||||||
|
// 10 numbers that are standardize, which mean that, for each column/dimension/descriptor, the average of all the points will be 0. and the standard deviation 1.
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////
|
||||||
|
// exploring point querying concepts via norm and std
|
||||||
|
|
||||||
|
// Once a dataset is normalized / standardized, query points have to be scaled accordingly to be used in distance measurement. In our instance, values were originally between 0 and 100, and now they will be between 0 and 1 (norm), or their average will be 0. (std). If we have data that we want to match from a similar ranging input, which is usually the case, we will need to normalize the searching point in each dimension using the same coefficients.
|
||||||
|
|
||||||
|
// first, make sure you have run all the code above, since we will query these datasets
|
||||||
|
|
||||||
|
// get a know point as a query point
|
||||||
|
~dataset.getPoint("point-7",~query_buf);
|
||||||
|
|
||||||
|
// find the 2 points with the shortest distances in the dataset
|
||||||
|
~tree = FluidKDTree.new(s,numNeighbours:2);
|
||||||
|
~tree.fit(~dataset)
|
||||||
|
~tree.kNearest(~query_buf, {|x| ("Labels:" + x).postln});
|
||||||
|
~tree.kNearestDist(~query_buf, {|x| ("Distances:" + x).postln});
|
||||||
|
// its nearest neighbourg is itself: it should be itself and the distance should be 0. The second point is depending on your input dataset.
|
||||||
|
|
||||||
|
// normalise that point (~query_buf) to be at the right scale
|
||||||
|
~normbuf = Buffer.alloc(s,~nb_of_dim);
|
||||||
|
~normalize.transformPoint(~query_buf,~normbuf);
|
||||||
|
~normbuf.getn(0,~nb_of_dim,{arg vec;vec.postln;});
|
||||||
|
|
||||||
|
// make a tree of the normalized database and query with the normalize buffer
|
||||||
|
~normtree = FluidKDTree.new(s,numNeighbours:2);
|
||||||
|
~normtree.fit(~normed_dataset)
|
||||||
|
~normtree.kNearest(~normbuf, {|x| ("Labels:" + x).postln});
|
||||||
|
~normtree.kNearestDist(~normbuf, {|x| ("Distances:" + x).postln});
|
||||||
|
// its nearest neighbourg is still itself as it should be, but the 2nd neighbourg might have changed. The distance is now different too
|
||||||
|
|
||||||
|
// standardize that same point (~query_buf) to be at the right scale
|
||||||
|
~stdbuf = Buffer.alloc(s,~nb_of_dim);
|
||||||
|
~standardize.transformPoint(~query_buf,~stdbuf);
|
||||||
|
~stdbuf.getn(0,~nb_of_dim,{arg vec;vec.postln;});
|
||||||
|
|
||||||
|
// make a tree of the standardized database and query with the normalize buffer
|
||||||
|
~stdtree = FluidKDTree.new(s, numNeighbours: 2);
|
||||||
|
~stdtree.fit(~standardized_dataset)
|
||||||
|
~stdtree.kNearest(~stdbuf, {|x| ("Labels:" + x).postln});
|
||||||
|
~stdtree.kNearestDist(~stdbuf, {|x| ("Distances:" + x).postln});
|
||||||
|
// its nearest neighbourg is still itself as it should be, but the 2nd neighbourg might have changed yet again. The distance is also different too
|
||||||
|
|
||||||
|
// where it starts to be interesting is when we query points that are not in our original dataset
|
||||||
|
|
||||||
|
// fill with known values (50.0 for each of the 10 column/dimension/descriptor, aka the theoretical middle point of the multidimension space) This could be anything but it is fun to aim in the middle.
|
||||||
|
~query_buf.fill(0,~nb_of_dim,50);
|
||||||
|
|
||||||
|
// normalize and standardize the query buffer. Note that we do not need to fit since we have not added a point to our reference dataset
|
||||||
|
~normalize.transformPoint(~query_buf,~normbuf);
|
||||||
|
~standardize.transformPoint(~query_buf,~stdbuf);
|
||||||
|
|
||||||
|
//query the single nearest neighbourg via 3 different data scaling. Depending on the random source at the begining, you should get (small or large) differences between the 3 answers!
|
||||||
|
[~tree,~normtree,~stdtree].do{|t| t.numNeighbours =1 };
|
||||||
|
~tree.kNearest(~query_buf, {|x| ("Original:" + x).post;~tree.kNearestDist(~query_buf, {|x| (" with a distance of " + x).postln});});
|
||||||
|
~normtree.kNearest(~normbuf, {|x| ("Normalized:" + x).post;~normtree.kNearestDist(~normbuf, {|x| (" with a distance of " + x).postln});});
|
||||||
|
~stdtree.kNearest(~stdbuf, {|x| ("Standardized:" + x).post; ~stdtree.kNearestDist(~stdbuf, {|x| (" with a distance of " + x).postln});});
|
||||||
@ -0,0 +1,52 @@
|
|||||||
|
// create the data dictionary
|
||||||
|
~data = Dictionary.new
|
||||||
|
7.do{|i| ~data.add(("entry-"++i).asSymbol -> 10.collect{|j|j/10 + i})}
|
||||||
|
|
||||||
|
// nest that dictionary in the dataset format, adding the number of columns
|
||||||
|
~dict = Dictionary.new
|
||||||
|
~dict.add(\data -> ~data)
|
||||||
|
~dict.add(\cols -> 10)
|
||||||
|
|
||||||
|
//create a dataset, then loading the dictionary
|
||||||
|
~ds = FluidDataSet(s);
|
||||||
|
~ds.load(~dict)
|
||||||
|
~ds.print
|
||||||
|
|
||||||
|
//fun with kdtree to see it actually works
|
||||||
|
~kdtree = FluidKDTree.new(s,numNeighbours:5)
|
||||||
|
~kdtree.fit(~ds,{\done.postln;})
|
||||||
|
~target = Buffer.loadCollection(s,(4).dup(10));
|
||||||
|
~kdtree.kNearest(~target, {|a|a.postln;})
|
||||||
|
~kdtree.kNearestDist(~target, {|a|a.postln;})
|
||||||
|
|
||||||
|
|
||||||
|
/////////////////////////////////////////////
|
||||||
|
// creating a labelset the same way
|
||||||
|
|
||||||
|
// creating the data dictionary
|
||||||
|
~data2 = Dictionary.new
|
||||||
|
7.do{|i| ~data2.add(("entry-"++i).asSymbol -> (if( i.odd, {["odd"]},{["even"]})))}
|
||||||
|
|
||||||
|
// nesting again
|
||||||
|
~dict2 = Dictionary.new
|
||||||
|
~dict2.add(\data -> ~data2)
|
||||||
|
~dict2.add(\cols -> 1)
|
||||||
|
|
||||||
|
// creating a labelset and loading the dictionary
|
||||||
|
~ls = FluidLabelSet(s);
|
||||||
|
~ls.load(~dict2)
|
||||||
|
~ls.print
|
||||||
|
|
||||||
|
// testin with a classifier toy example
|
||||||
|
~classifier = FluidKNNClassifier.new(s, numNeighbours:2);
|
||||||
|
~classifier.fit(~ds,~ls, {\done.postln;})
|
||||||
|
|
||||||
|
// run many times for random pleasure
|
||||||
|
(
|
||||||
|
fork{
|
||||||
|
var value = 7.rand;
|
||||||
|
~ds.getPoint(("entry-"++value).asSymbol,~target);
|
||||||
|
s.sync;
|
||||||
|
~classifier.predictPoint(~target, action: {|x|"entry % is an % entry.\n".postf(value,x);})
|
||||||
|
}
|
||||||
|
)
|
||||||
@ -0,0 +1,83 @@
|
|||||||
|
// basic xor to show non-linearity guestimating of MLP
|
||||||
|
// see https://medium.com/@jayeshbahire/the-xor-problem-in-neural-networks-50006411840b
|
||||||
|
// and https://youtu.be/188B6k_F9jU
|
||||||
|
|
||||||
|
// make 2 datasets as our truth
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
d = Dictionary.new;
|
||||||
|
d.add(\cols -> 2);
|
||||||
|
d.add(\data -> Dictionary.newFrom(["f-f", [0,0], "f-t", [0,1], "t-f", [1,0], "t-t", [1,1]]));
|
||||||
|
~mlpHelpSource = FluidDataSet.new(s);
|
||||||
|
s.sync;
|
||||||
|
~mlpHelpSource.load(d);
|
||||||
|
s.sync;
|
||||||
|
d.add(\cols -> 1);
|
||||||
|
d.add(\data -> Dictionary.newFrom(["f-f", [0], "f-t", [1], "t-f", [1], "t-t", [0]]));
|
||||||
|
~mlpHelpTarget = FluidDataSet.new(s);
|
||||||
|
s.sync;
|
||||||
|
~mlpHelpTarget.load(d);
|
||||||
|
s.sync;
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
//check our thruth tables
|
||||||
|
~mlpHelpSource.print
|
||||||
|
~mlpHelpTarget.print
|
||||||
|
|
||||||
|
// make an MLPregressor
|
||||||
|
~mlp = FluidMLPRegressor(s, [3], FluidMLPRegressor.sigmoid, FluidMLPRegressor.sigmoid,maxIter:1000,learnRate: 0.1,momentum: 0.1,batchSize: 1,validation: 0);//1000 epoch at a time
|
||||||
|
//train it by executing the following line multiple time, and observe the error
|
||||||
|
~mlp.fit(~mlpHelpSource,~mlpHelpTarget,{|x|x.postln;});
|
||||||
|
|
||||||
|
//to make a plot of the error let's do a classic 'shades of truth' (a grid of 11 x 11 with each values of truth between 0 and 1
|
||||||
|
(
|
||||||
|
Routine{
|
||||||
|
d = Dictionary.new;
|
||||||
|
|
||||||
|
d.add(\cols -> 2);
|
||||||
|
d.add(\data -> Dictionary.newFrom(121.collect{|x|[x.asString, [x.div(10)/10,x.mod(10)/10]]}.flatten));
|
||||||
|
~mlpHelpShades = FluidDataSet.new(s);
|
||||||
|
s.sync;
|
||||||
|
~mlpHelpShades.load(d);
|
||||||
|
s.sync;
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
// check it
|
||||||
|
~mlpHelpShades.print
|
||||||
|
|
||||||
|
// let's make a destination for our regressions
|
||||||
|
~mlpHelpRegressed = FluidDataSet.new(s);
|
||||||
|
|
||||||
|
// then predict the full DataSet in our trained network
|
||||||
|
~mlp.predict(~mlpHelpShades,~mlpHelpRegressed);
|
||||||
|
|
||||||
|
// estimated values
|
||||||
|
~mlpHelpRegressed.print;
|
||||||
|
|
||||||
|
// lets extract them as an array
|
||||||
|
~assignments = Array.new(121); ~mlpHelpRegressed.dump{|x|~assignments = x.at("data").atAll(x.at("data").keys.asArray.sort{|a,b|a.asInteger < b.asInteger}).flatten.postln;};
|
||||||
|
|
||||||
|
//and draw them
|
||||||
|
(
|
||||||
|
w = Window("scatter", Rect(128, 64, 200, 200));
|
||||||
|
w.drawFunc = {
|
||||||
|
Pen.use {
|
||||||
|
~assignments.do{|val,ind|
|
||||||
|
var x = (ind.div(10)*20);
|
||||||
|
var y = (ind.mod(10)*20);
|
||||||
|
var r = Rect(x,y,20,20);
|
||||||
|
Pen.fillColor = Color.grey(val);
|
||||||
|
Pen.fillRect(r);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
w.refresh;
|
||||||
|
w.front;
|
||||||
|
)
|
||||||
|
|
||||||
|
~mlp.free
|
||||||
|
~mlpHelpShades.free
|
||||||
|
~mlpHelpSource.free
|
||||||
|
~mlpHelpTarget.free
|
||||||
@ -0,0 +1,90 @@
|
|||||||
|
//1- make the gui then the synth below
|
||||||
|
(
|
||||||
|
var trained = 0, entering = 0;
|
||||||
|
var va = Array.fill(10,{0.5});
|
||||||
|
var input = Buffer.alloc(s,2);
|
||||||
|
var output = Buffer.alloc(s,10);
|
||||||
|
var mlp = FluidMLPRegressor(s,[6],activation: 1,outputActivation: 1,maxIter: 1000,learnRate: 0.1,momentum: 0,batchSize: 1,validation: 0);
|
||||||
|
var entry = 0;
|
||||||
|
|
||||||
|
~inData = FluidDataSet(s);
|
||||||
|
~outData = FluidDataSet(s);
|
||||||
|
|
||||||
|
w = Window("ChaosSynth", Rect(10, 10, 790, 320)).front;
|
||||||
|
a = MultiSliderView(w,Rect(10, 10, 400, 300)).elasticMode_(1).isFilled_(1);
|
||||||
|
a.value=va;
|
||||||
|
a.action = {arg q;
|
||||||
|
b.set(\val, q.value);
|
||||||
|
va = q.value;};
|
||||||
|
f = Slider2D(w,Rect(420,10,300, 300));
|
||||||
|
f.x = 0.5;
|
||||||
|
f.y = 0.5;
|
||||||
|
f.action = {arg x,y; //if trained, predict the point f.x f.y
|
||||||
|
if (entering == 1, { //if entering a point, add to the the database f.x f.y against the array va
|
||||||
|
input.setn(0, [f.x, f.y]);
|
||||||
|
output.setn(0, va);
|
||||||
|
~inData.addPoint(entry.asSymbol,input);
|
||||||
|
~outData.addPoint(entry.asSymbol,output);
|
||||||
|
entering = 0;
|
||||||
|
entry = entry + 1;
|
||||||
|
{d.value = 0;}.defer;
|
||||||
|
}, { //if not entering a point
|
||||||
|
if (trained == 1, { //if trained
|
||||||
|
input.setn(0, [f.x, f.y]);
|
||||||
|
mlp.predictPoint(input,output,{
|
||||||
|
output.getn(0,10,{
|
||||||
|
|x|va = x; b.set(\val, va); {a.value = va;}.defer;});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
c = Button(w, Rect(730,240,50, 20)).states_([["train", Color.red, Color.white], ["trained", Color.white, Color.grey]]).action_{
|
||||||
|
mlp.fit(~inData,~outData,{|x|
|
||||||
|
trained = 1;
|
||||||
|
{
|
||||||
|
c.value = 1;
|
||||||
|
e.value = x.round(0.001).asString;
|
||||||
|
}.defer;
|
||||||
|
});//train the network
|
||||||
|
};
|
||||||
|
d = Button(w, Rect(730,10,50, 20)).states_([["entry", Color.white, Color.grey], ["entry", Color.red, Color.white]]).action_{
|
||||||
|
entering = 1;
|
||||||
|
};
|
||||||
|
StaticText(w,Rect(732,260,50,20)).string_("Error:");
|
||||||
|
e = TextField(w,Rect(730,280,50,20)).string_(0.asString);
|
||||||
|
StaticText(w,Rect(732,70,50,20)).string_("rate:");
|
||||||
|
TextField(w,Rect(730,90,50,20)).string_(0.1.asString).action_{|in|mlp.learnRate = in.value.asFloat.postln;};
|
||||||
|
StaticText(w,Rect(732,110,50,20)).string_("momentum:");
|
||||||
|
TextField(w,Rect(730,130,50,20)).string_(0.0.asString).action_{|in|mlp.momentum = in.value.asFloat.postln;};
|
||||||
|
StaticText(w,Rect(732,150,50,20)).string_("maxIter:");
|
||||||
|
TextField(w,Rect(730,170,50,20)).string_(1000.asString).action_{|in| mlp.maxIter = in.value.asInteger.postln;};
|
||||||
|
StaticText(w,Rect(732,190,50,20)).string_("validation:");
|
||||||
|
TextField(w,Rect(730,210,50,20)).string_(0.0.asString).action_{|in|mlp.validation = in.value.asFloat.postln;};
|
||||||
|
)
|
||||||
|
|
||||||
|
//2- the synth
|
||||||
|
(
|
||||||
|
b = {
|
||||||
|
arg val = #[0,0,0,0,0,0,0,0,0,0];
|
||||||
|
var osc1, osc2, feed1, feed2, base1=69, base2=69, base3 = 130;
|
||||||
|
#feed2,feed1 = LocalIn.ar(2);
|
||||||
|
osc1 = MoogFF.ar(SinOsc.ar((((feed1 * val[0]) + val[1]) * base1).midicps,mul: (val[2] * 50).dbamp).atan,(base3 - (val[3] * (FluidLoudness.kr(feed2, 1, 0, hopSize: 64)[0].clip(-120,0) + 120))).lag(128/44100).midicps, val[4] * 3.5);
|
||||||
|
osc2 = MoogFF.ar(SinOsc.ar((((feed2 * val[5]) + val[6]) * base2).midicps,mul: (val[7] * 50).dbamp).atan,(base3 - (val[8] * (FluidLoudness.kr(feed1, 1, 0, hopSize: 64)[0].clip(-120,0) + 120))).lag(128/44100).midicps, val[9] * 3.5);
|
||||||
|
Out.ar(0,LeakDC.ar([osc1,osc2],mul: 0.1));
|
||||||
|
LocalOut.ar([osc1,osc2]);
|
||||||
|
}.play;
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
~inData.print;
|
||||||
|
~outData.print;
|
||||||
|
|
||||||
|
/////////
|
||||||
|
//3 - play with the multislider
|
||||||
|
//4 - when you like a spot, click entry (become read) then a position in the 2D graph where this point should be
|
||||||
|
//5 - do that for a few points
|
||||||
|
//6 - click train
|
||||||
|
//7 - the 2D graph controls the 10D
|
||||||
|
//8 - if you like a new sound and you want to update the graph, just click entry, then where it should be in the 2D, then retrain when you are happy
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue