Skip to content

Commit

Permalink
Add frame properties for AVS+ 3.6+
Browse files Browse the repository at this point in the history
  • Loading branch information
CrendKing committed Jul 24, 2021
1 parent b061824 commit 1b6a764
Show file tree
Hide file tree
Showing 9 changed files with 123 additions and 42 deletions.
76 changes: 65 additions & 11 deletions avisynth_filter/src/frame_handler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,32 @@ auto FrameHandler::AddInputSample(IMediaSample *inputSample) -> HRESULT {

PVideoFrame frame = Format::CreateFrame(_filter._inputVideoFormat, sampleBuffer);

if (FrameServerCommon::GetInstance().IsFramePropsSupported()) {
AVSMap *frameProps = AVSF_AVS_API->getFramePropsRW(frame);

AVSF_AVS_API->propSetFloat(frameProps, FRAME_PROP_NAME_ABS_TIME, inputSampleStartTime / static_cast<double>(UNITS), PROPAPPENDMODE_REPLACE);
AVSF_AVS_API->propSetInt(frameProps, "_SARNum", _filter._inputVideoFormat.pixelAspectRatioNum, PROPAPPENDMODE_REPLACE);
AVSF_AVS_API->propSetInt(frameProps, "_SARDen", _filter._inputVideoFormat.pixelAspectRatioDen, PROPAPPENDMODE_REPLACE);

if (const std::optional<int> &optColorRange = _filter._inputVideoFormat.colorSpaceInfo.colorRange) {
AVSF_AVS_API->propSetInt(frameProps, "_ColorRange", *optColorRange, PROPAPPENDMODE_REPLACE);
}
AVSF_AVS_API->propSetInt(frameProps, "_Primaries", _filter._inputVideoFormat.colorSpaceInfo.primaries, PROPAPPENDMODE_REPLACE);
AVSF_AVS_API->propSetInt(frameProps, "_Matrix", _filter._inputVideoFormat.colorSpaceInfo.matrix, PROPAPPENDMODE_REPLACE);
AVSF_AVS_API->propSetInt(frameProps, "_Transfer", _filter._inputVideoFormat.colorSpaceInfo.transfer, PROPAPPENDMODE_REPLACE);

const DWORD typeSpecificFlags = _filter.m_pInput->SampleProps()->dwTypeSpecificFlags;
int rfpFieldBased;
if (typeSpecificFlags & AM_VIDEO_FLAG_WEAVE) {
rfpFieldBased = 0;
} else if (typeSpecificFlags & AM_VIDEO_FLAG_FIELD1FIRST) {
rfpFieldBased = 2;
} else {
rfpFieldBased = 1;
}
AVSF_AVS_API->propSetInt(frameProps, FRAME_PROP_NAME_FIELD_BASED, rfpFieldBased, PROPAPPENDMODE_REPLACE);
}

std::unique_ptr<HDRSideData> hdrSideData = std::make_unique<HDRSideData>();
{
if (const ATL::CComQIPtr<IMediaSideData> inputSampleSideData(inputSample); inputSampleSideData != nullptr) {
Expand Down Expand Up @@ -218,8 +244,35 @@ auto FrameHandler::PrepareOutputSample(ATL::CComPtr<IMediaSample> &outSample, RE
} else {
try {
// some AviSynth internal filter (e.g. Subtitle) can't tolerate multi-thread access
const PVideoFrame scriptFrame = MainFrameServer::GetInstance().GetFrame(_nextOutputFrameNb);
Format::WriteSample(_filter._outputVideoFormat, scriptFrame, outputBuffer);
const PVideoFrame outputFrame = MainFrameServer::GetInstance().GetFrame(_nextOutputFrameNb);

if (const ATL::CComQIPtr<IMediaSample2> outSample2(outSample); outSample2 != nullptr) {
if (AM_SAMPLE2_PROPERTIES sampleProps; SUCCEEDED(outSample2->GetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&sampleProps)))) {
if (FrameServerCommon::GetInstance().IsFramePropsSupported()) {
const AVSMap *frameProps = AVSF_AVS_API->getFramePropsRO(outputFrame);
int propGetError;

if (const int64_t rfpFieldBased = AVSF_AVS_API->propGetInt(frameProps, FRAME_PROP_NAME_FIELD_BASED, 0, &propGetError);
propGetError == GETPROPERROR_UNSET || rfpFieldBased == 0) {
sampleProps.dwTypeSpecificFlags = AM_VIDEO_FLAG_WEAVE;
} else if (rfpFieldBased == 2) {
sampleProps.dwTypeSpecificFlags = AM_VIDEO_FLAG_FIELD1FIRST;
} else {
sampleProps.dwTypeSpecificFlags = 0;
}
} else {
sampleProps.dwTypeSpecificFlags = AM_VIDEO_FLAG_WEAVE;
}

if (sourceTypeSpecificFlags & AM_VIDEO_FLAG_REPEAT_FIELD) {
sampleProps.dwTypeSpecificFlags |= AM_VIDEO_FLAG_REPEAT_FIELD;
}

outSample2->SetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&sampleProps));
}
}

Format::WriteSample(_filter._outputVideoFormat, outputFrame, outputBuffer);
} catch (AvisynthError) {
return false;
}
Expand Down Expand Up @@ -266,7 +319,7 @@ auto FrameHandler::WorkerProc() -> void {
* Therefore instead of directly using the stop time from the current sample, we use the start time of the next sample.
*/

std::array<decltype(_sourceFrames)::const_iterator, NUM_SRC_FRAMES_PER_PROCESSING> processSourceFrameIters;
std::array<decltype(_sourceFrames)::iterator, NUM_SRC_FRAMES_PER_PROCESSING> processSourceFrameIters;
std::array<REFERENCE_TIME, NUM_SRC_FRAMES_PER_PROCESSING - 1> outputFrameDurations;

{
Expand Down Expand Up @@ -316,20 +369,21 @@ auto FrameHandler::WorkerProc() -> void {
}
_nextOutputFrameStartTime = outputStopTime;

if (FrameServerCommon::GetInstance().IsFramePropsSupported()) {
AVSMap *frameProps = AVSF_AVS_API->getFramePropsRW(processSourceFrameIters[0]->second.frame);
REFERENCE_TIME frameDurationNum = processSourceFrameIters[1]->second.startTime - processSourceFrameIters[0]->second.startTime;
REFERENCE_TIME frameDurationDen = UNITS;
CoprimeIntegers(frameDurationNum, frameDurationDen);
AVSF_AVS_API->propSetInt(frameProps, FRAME_PROP_NAME_DURATION_NUM, frameDurationNum, PROPAPPENDMODE_REPLACE);
AVSF_AVS_API->propSetInt(frameProps, FRAME_PROP_NAME_DURATION_DEN, frameDurationDen, PROPAPPENDMODE_REPLACE);
}

Environment::GetInstance().Log(L"Processing output frame %6i for source frame %6i at %10lli ~ %10lli duration %10lli",
_nextOutputFrameNb, processSourceFrameIters[0]->first, outputStartTime, outputStopTime, outputStopTime - outputStartTime);

RefreshOutputFrameRates(_nextOutputFrameNb);

if (ATL::CComPtr<IMediaSample> outSample; PrepareOutputSample(outSample, outputStartTime, outputStopTime, processSourceFrameIters[0]->second.typeSpecificFlags)) {
if (const ATL::CComQIPtr<IMediaSample2> outSample2(outSample); outSample2 != nullptr) {
AM_SAMPLE2_PROPERTIES props;
if (SUCCEEDED(outSample2->GetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props)))) {
props.dwTypeSpecificFlags = processSourceFrameIters[0]->second.typeSpecificFlags;
outSample2->SetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props));
}
}

if (const ATL::CComQIPtr<IMediaSideData> sideData(outSample); sideData != nullptr) {
processSourceFrameIters[0]->second.hdrSideData->WriteTo(sideData);
}
Expand Down
10 changes: 10 additions & 0 deletions avisynth_filter/src/frameserver.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,18 @@ FrameServerCommon::FrameServerCommon() {

IScriptEnvironment *env = CreateEnv();
AVS_linkage = env->GetAVSLinkage();

_versionString = env->Invoke("Eval", AVSValue("VersionString()")).AsString();
Environment::GetInstance().Log(L"AviSynth version: %S", GetVersionString().data());

try {
// AVS+ 3.6 is interface version 8
env->CheckVersion(8);
_isFramePropsSupported = true;
Environment::GetInstance().Log(L"AviSynth supports frame properties");
} catch (...) {
}

env->DeleteScriptEnvironment();

_sourceClip = new SourceClip(_sourceVideoInfo);
Expand Down
2 changes: 2 additions & 0 deletions avisynth_filter/src/frameserver.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,14 @@ class FrameServerCommon : public RefCountedSingleton<FrameServerCommon> {
auto SetScriptPath(const std::filesystem::path &scriptPath) -> void;
auto LinkFrameHandler(FrameHandler *frameHandler) const -> void;
constexpr auto GetVersionString() const -> std::string_view { return _versionString == nullptr ? "unknown AviSynth version" : _versionString; }
constexpr auto IsFramePropsSupported() const -> bool { return _isFramePropsSupported; }
constexpr auto GetScriptPath() const -> const std::filesystem::path & { return _scriptPath; }

private:
auto CreateEnv() const -> IScriptEnvironment *;

const char *_versionString;
bool _isFramePropsSupported = false;
std::filesystem::path _scriptPath = Environment::GetInstance().GetScriptPath();
VideoInfo _sourceVideoInfo = {};
PClip _sourceClip;
Expand Down
11 changes: 11 additions & 0 deletions filter_common/src/constants.h
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,17 @@ static constexpr const WCHAR *UNAVAILABLE_SOURCE_PATH = L"N/A";
*/
static constexpr const int NUM_FRAMES_FOR_INFINITE_STREAM = 10810800;

/*
* AviSynth+ and VapourSynth frame property names
* The ones prefixed with "AVSF_" are specific private properties of this filter, both variants
*/
static constexpr const char *FRAME_PROP_NAME_ABS_TIME = "_AbsoluteTime";
static constexpr const char *FRAME_PROP_NAME_DURATION_NUM = "_DurationNum";
static constexpr const char *FRAME_PROP_NAME_DURATION_DEN = "_DurationDen";
static constexpr const char *FRAME_PROP_NAME_FIELD_BASED = "_FieldBased";
static constexpr const char *FRAME_PROP_NAME_SOURCE_FRAME_NB = "AVSF_SourceFrameNb";
static constexpr const char *FRAME_PROP_NAME_TYPE_SPECIFIC_FLAGS = "AVSF_TypeSpecificFlags";

static constexpr const WCHAR *REGISTRY_KEY_NAME_PREFIX = L"Software\\AviSynthFilter\\";
static constexpr const WCHAR *SETTING_NAME_SCRIPT_FILE = L"ScriptFile";
static constexpr const WCHAR *SETTING_NAME_LOG_FILE = L"LogFile";
Expand Down
11 changes: 9 additions & 2 deletions filter_common/src/util.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ auto ConvertUtf8ToWide(std::string_view utf8String) -> std::wstring {
return ret;
}

auto DoubleToString(double d, int precision) -> std::wstring {
const std::wstring str = std::to_wstring(d);
auto DoubleToString(double num, int precision) -> std::wstring {
const std::wstring str = std::to_wstring(num);
return str.substr(0, str.find(L'.') + 1 + precision);
}

Expand All @@ -44,4 +44,11 @@ auto ReplaceSubstr(std::string &str, std::string_view from, std::string_view to)
return str;
}

auto CoprimeIntegers(int64_t &a, int64_t &b) -> void {
if (const int64_t gcd = std::gcd(a, b); gcd > 1) {
a /= gcd;
b /= gcd;
}
}

}
3 changes: 2 additions & 1 deletion filter_common/src/util.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,10 @@ namespace SynthFilter {

auto ConvertWideToUtf8(std::wstring_view wideString) -> std::string;
auto ConvertUtf8ToWide(std::string_view utf8String) -> std::wstring;
auto DoubleToString(double d, int precision) -> std::wstring;
auto DoubleToString(double num, int precision) -> std::wstring;
auto JoinStrings(const std::vector<std::wstring> &input, std::wstring_view delimiter) -> std::wstring;
auto ReplaceSubstr(std::string &str, std::string_view from, std::string_view to) -> std::string &;
auto CoprimeIntegers(int64_t &a, int64_t &b) -> void;

/**
* ceil(dividend / divisor), assuming both oprands are positive
Expand Down
4 changes: 2 additions & 2 deletions vapoursynth_filter/src/format.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ auto Format::GetVideoFormat(const AM_MEDIA_TYPE &mediaType, const FrameServerBas
const VIDEOINFOHEADER *vih = reinterpret_cast<VIDEOINFOHEADER *>(mediaType.pbFormat);
REFERENCE_TIME fpsNum = UNITS;
REFERENCE_TIME fpsDen = vih->AvgTimePerFrame > 0 ? vih->AvgTimePerFrame : DEFAULT_AVG_TIME_PER_FRAME;
vs_normalizeRational(&fpsNum, &fpsDen);
CoprimeIntegers(fpsNum, fpsDen);
VSCore *vsCore= vsscript_getCore(frameServerInstance->GetVsScript());

VideoFormat ret {
Expand Down Expand Up @@ -73,7 +73,7 @@ auto Format::GetVideoFormat(const AM_MEDIA_TYPE &mediaType, const FrameServerBas
*/
ret.pixelAspectRatioNum = vih2->dwPictAspectRatioX * ret.videoInfo.height;
ret.pixelAspectRatioDen = vih2->dwPictAspectRatioY * ret.videoInfo.width;
vs_normalizeRational(&ret.pixelAspectRatioNum, &ret.pixelAspectRatioDen);
CoprimeIntegers(ret.pixelAspectRatioNum, ret.pixelAspectRatioDen);
}

if ((vih2->dwControlFlags & AMCONTROL_USED) && (vih2->dwControlFlags & AMCONTROL_COLORINFO_PRESENT)) {
Expand Down
42 changes: 22 additions & 20 deletions vapoursynth_filter/src/frame_handler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,13 @@ auto FrameHandler::AddInputSample(IMediaSample *inputSample) -> HRESULT {
}

VSFrameRef *frame = Format::CreateFrame(_filter._inputVideoFormat, sampleBuffer);

VSMap *frameProps = AVSF_VPS_API->getFramePropsRW(frame);
AVSF_VPS_API->propSetFloat(frameProps, VS_PROP_NAME_ABS_TIME, inputSampleStartTime / static_cast<double>(UNITS), paReplace);

AVSF_VPS_API->propSetFloat(frameProps, FRAME_PROP_NAME_ABS_TIME, inputSampleStartTime / static_cast<double>(UNITS), paReplace);
AVSF_VPS_API->propSetInt(frameProps, "_SARNum", _filter._inputVideoFormat.pixelAspectRatioNum, paReplace);
AVSF_VPS_API->propSetInt(frameProps, "_SARDen", _filter._inputVideoFormat.pixelAspectRatioDen, paReplace);
AVSF_VPS_API->propSetInt(frameProps, VS_PROP_NAME_SOURCE_FRAME_NB, _nextSourceFrameNb, paReplace);
AVSF_VPS_API->propSetInt(frameProps, FRAME_PROP_NAME_SOURCE_FRAME_NB, _nextSourceFrameNb, paReplace);

if (const std::optional<int> &optColorRange = _filter._inputVideoFormat.colorSpaceInfo.colorRange) {
AVSF_VPS_API->propSetInt(frameProps, "_ColorRange", *optColorRange, paReplace);
Expand All @@ -84,8 +86,8 @@ auto FrameHandler::AddInputSample(IMediaSample *inputSample) -> HRESULT {
} else {
rfpFieldBased = 1;
}
AVSF_VPS_API->propSetInt(frameProps, VS_PROP_NAME_FIELD_BASED, rfpFieldBased, paReplace);
AVSF_VPS_API->propSetInt(frameProps, VS_PROP_NAME_TYPE_SPECIFIC_FLAGS, typeSpecificFlags, paReplace);
AVSF_VPS_API->propSetInt(frameProps, FRAME_PROP_NAME_FIELD_BASED, rfpFieldBased, paReplace);
AVSF_VPS_API->propSetInt(frameProps, FRAME_PROP_NAME_TYPE_SPECIFIC_FLAGS, typeSpecificFlags, paReplace);

std::unique_ptr<HDRSideData> hdrSideData = std::make_unique<HDRSideData>();
{
Expand Down Expand Up @@ -149,9 +151,9 @@ auto FrameHandler::AddInputSample(IMediaSample *inputSample) -> HRESULT {
frameProps = AVSF_VPS_API->getFramePropsRW(processSourceFrameIters[0]->second.frame);
REFERENCE_TIME frameDurationNum = processSourceFrameIters[1]->second.startTime - processSourceFrameIters[0]->second.startTime;
REFERENCE_TIME frameDurationDen = UNITS;
vs_normalizeRational(&frameDurationNum, &frameDurationDen);
AVSF_VPS_API->propSetInt(frameProps, VS_PROP_NAME_DURATION_NUM, frameDurationNum, paReplace);
AVSF_VPS_API->propSetInt(frameProps, VS_PROP_NAME_DURATION_DEN, frameDurationDen, paReplace);
CoprimeIntegers(frameDurationNum, frameDurationDen);
AVSF_VPS_API->propSetInt(frameProps, FRAME_PROP_NAME_DURATION_NUM, frameDurationNum, paReplace);
AVSF_VPS_API->propSetInt(frameProps, FRAME_PROP_NAME_DURATION_DEN, frameDurationDen, paReplace);
_newSourceFrameCv.notify_all();

const int maxRequestOutputFrameNb = static_cast<int>(llMulDiv(processSourceFrameIters[0]->first,
Expand Down Expand Up @@ -193,7 +195,7 @@ auto FrameHandler::GetSourceFrame(int frameNb) -> const VSFrameRef * {
}

const VSMap *frameProps = AVSF_VPS_API->getFramePropsRO(iter->second.frame);
return AVSF_VPS_API->propNumElements(frameProps, VS_PROP_NAME_DURATION_NUM) > 0 && AVSF_VPS_API->propNumElements(frameProps, VS_PROP_NAME_DURATION_DEN) > 0;
return AVSF_VPS_API->propNumElements(frameProps, FRAME_PROP_NAME_DURATION_NUM) > 0 && AVSF_VPS_API->propNumElements(frameProps, FRAME_PROP_NAME_DURATION_DEN) > 0;
});

if (_isFlushing) {
Expand Down Expand Up @@ -306,8 +308,8 @@ auto FrameHandler::ResetInput() -> void {
auto FrameHandler::PrepareOutputSample(ATL::CComPtr<IMediaSample> &outSample, int outputFrameNb, const VSFrameRef *outputFrame, int sourceFrameNb) -> bool {
const VSMap *frameProps = AVSF_VPS_API->getFramePropsRO(outputFrame);
int propGetError;
const int64_t frameDurationNum = AVSF_VPS_API->propGetInt(frameProps, VS_PROP_NAME_DURATION_NUM, 0, &propGetError);
const int64_t frameDurationDen = AVSF_VPS_API->propGetInt(frameProps, VS_PROP_NAME_DURATION_DEN, 0, &propGetError);
const int64_t frameDurationNum = AVSF_VPS_API->propGetInt(frameProps, FRAME_PROP_NAME_DURATION_NUM, 0, &propGetError);
const int64_t frameDurationDen = AVSF_VPS_API->propGetInt(frameProps, FRAME_PROP_NAME_DURATION_DEN, 0, &propGetError);
int64_t frameDuration;

if (frameDurationNum > 0 && frameDurationDen > 0) {
Expand All @@ -317,7 +319,7 @@ auto FrameHandler::PrepareOutputSample(ATL::CComPtr<IMediaSample> &outSample, in
}

if (_nextOutputFrameStartTime == 0) {
_nextOutputFrameStartTime = static_cast<REFERENCE_TIME>(AVSF_VPS_API->propGetFloat(frameProps, VS_PROP_NAME_ABS_TIME, 0, &propGetError) * UNITS);
_nextOutputFrameStartTime = static_cast<REFERENCE_TIME>(AVSF_VPS_API->propGetFloat(frameProps, FRAME_PROP_NAME_ABS_TIME, 0, &propGetError) * UNITS);
}

REFERENCE_TIME frameStartTime = _nextOutputFrameStartTime;
Expand Down Expand Up @@ -364,22 +366,22 @@ auto FrameHandler::PrepareOutputSample(ATL::CComPtr<IMediaSample> &outSample, in
}

if (const ATL::CComQIPtr<IMediaSample2> outSample2(outSample); outSample2 != nullptr) {
if (AM_SAMPLE2_PROPERTIES props; SUCCEEDED(outSample2->GetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props)))) {
if (const int64_t rfpFieldBased = AVSF_VPS_API->propGetInt(frameProps, VS_PROP_NAME_FIELD_BASED, 0, &propGetError);
if (AM_SAMPLE2_PROPERTIES sampleProps; SUCCEEDED(outSample2->GetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&sampleProps)))) {
if (const int64_t rfpFieldBased = AVSF_VPS_API->propGetInt(frameProps, FRAME_PROP_NAME_FIELD_BASED, 0, &propGetError);
propGetError == peUnset || rfpFieldBased == 0) {
props.dwTypeSpecificFlags = AM_VIDEO_FLAG_WEAVE;
sampleProps.dwTypeSpecificFlags = AM_VIDEO_FLAG_WEAVE;
} else if (rfpFieldBased == 2) {
props.dwTypeSpecificFlags = AM_VIDEO_FLAG_FIELD1FIRST;
sampleProps.dwTypeSpecificFlags = AM_VIDEO_FLAG_FIELD1FIRST;
} else {
props.dwTypeSpecificFlags = 0;
sampleProps.dwTypeSpecificFlags = 0;
}

if (const int64_t sourceTypeSpecificFlags = AVSF_VPS_API->propGetInt(frameProps, VS_PROP_NAME_TYPE_SPECIFIC_FLAGS, 0, &propGetError);
if (const int64_t sourceTypeSpecificFlags = AVSF_VPS_API->propGetInt(frameProps, FRAME_PROP_NAME_TYPE_SPECIFIC_FLAGS, 0, &propGetError);
sourceTypeSpecificFlags & AM_VIDEO_FLAG_REPEAT_FIELD) {
props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_REPEAT_FIELD;
sampleProps.dwTypeSpecificFlags |= AM_VIDEO_FLAG_REPEAT_FIELD;
}

outSample2->SetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props));
outSample2->SetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&sampleProps));
}
}

Expand Down Expand Up @@ -455,7 +457,7 @@ auto FrameHandler::WorkerProc() -> void {

const VSMap *frameProps = AVSF_VPS_API->getFramePropsRO(iter->second);
int propGetError;
const int sourceFrameNb = static_cast<int>(AVSF_VPS_API->propGetInt(frameProps, VS_PROP_NAME_SOURCE_FRAME_NB, 0, &propGetError));
const int sourceFrameNb = static_cast<int>(AVSF_VPS_API->propGetInt(frameProps, FRAME_PROP_NAME_SOURCE_FRAME_NB, 0, &propGetError));

_lastUsedSourceFrameNb = sourceFrameNb;
_addInputSampleCv.notify_all();
Expand Down
6 changes: 0 additions & 6 deletions vapoursynth_filter/src/frame_handler.h
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,6 @@ class FrameHandler {
auto RefreshDeliveryFrameRates(int frameNb) -> void;

static constexpr const int NUM_SRC_FRAMES_PER_PROCESSING = 2;
static constexpr const char *VS_PROP_NAME_ABS_TIME = "_AbsoluteTime";
static constexpr const char *VS_PROP_NAME_DURATION_NUM = "_DurationNum";
static constexpr const char *VS_PROP_NAME_DURATION_DEN = "_DurationDen";
static constexpr const char *VS_PROP_NAME_FIELD_BASED = "_FieldBased";
static constexpr const char *VS_PROP_NAME_SOURCE_FRAME_NB = "VPSF_SourceFrameNb";
static constexpr const char *VS_PROP_NAME_TYPE_SPECIFIC_FLAGS = "VPSF_TypeSpecificFlags";

CSynthFilter &_filter;

Expand Down

0 comments on commit 1b6a764

Please sign in to comment.