Skip to content

Commit

Permalink
VPSF: Instead of passing through the dwTypeSpecificFlags, properly co…
Browse files Browse the repository at this point in the history
…nstruct this data from related output frame information.

AVSF: AVS+ does not have API to support this until the introduction of frame properties in 3.6. AviSynth/AviSynthPlus#225
  • Loading branch information
CrendKing committed Jul 24, 2021
1 parent 10e1d8e commit b061824
Show file tree
Hide file tree
Showing 6 changed files with 74 additions and 61 deletions.
40 changes: 16 additions & 24 deletions avisynth_filter/src/frame_handler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -62,15 +62,7 @@ auto FrameHandler::AddInputSample(IMediaSample *inputSample) -> HRESULT {
return S_FALSE;
}

DWORD typeSpecificFlags = AM_VIDEO_FLAG_INTERLEAVED_FRAME;
if (const ATL::CComQIPtr<IMediaSample2> mediaSample2(inputSample); mediaSample2 != nullptr) {
AM_SAMPLE2_PROPERTIES props;
if (SUCCEEDED(mediaSample2->GetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props)))) {
typeSpecificFlags = props.dwTypeSpecificFlags;
}
}

const PVideoFrame frame = Format::CreateFrame(_filter._inputVideoFormat, sampleBuffer);
PVideoFrame frame = Format::CreateFrame(_filter._inputVideoFormat, sampleBuffer);

std::unique_ptr<HDRSideData> hdrSideData = std::make_unique<HDRSideData>();
{
Expand Down Expand Up @@ -99,7 +91,7 @@ auto FrameHandler::AddInputSample(IMediaSample *inputSample) -> HRESULT {

_sourceFrames.emplace(std::piecewise_construct,
std::forward_as_tuple(_nextSourceFrameNb),
std::forward_as_tuple(frame, inputSampleStartTime, typeSpecificFlags, std::move(hdrSideData)));
std::forward_as_tuple(frame, inputSampleStartTime, _filter.m_pInput->SampleProps()->dwTypeSpecificFlags, std::move(hdrSideData)));
}
_newSourceFrameCv.notify_all();

Expand Down Expand Up @@ -188,15 +180,15 @@ auto FrameHandler::ResetInput() -> void {
_currentInputFrameRate = 0;
}

auto FrameHandler::PrepareOutputSample(ATL::CComPtr<IMediaSample> &sample, REFERENCE_TIME startTime, REFERENCE_TIME stopTime) -> bool {
if (FAILED(_filter.m_pOutput->GetDeliveryBuffer(&sample, &startTime, &stopTime, 0))) {
auto FrameHandler::PrepareOutputSample(ATL::CComPtr<IMediaSample> &outSample, REFERENCE_TIME startTime, REFERENCE_TIME stopTime, DWORD sourceTypeSpecificFlags) -> bool {
if (FAILED(_filter.m_pOutput->GetDeliveryBuffer(&outSample, &startTime, &stopTime, 0))) {
// avoid releasing the invalid pointer in case the function change it to some random invalid address
sample.Detach();
outSample.Detach();
return false;
}

AM_MEDIA_TYPE *pmtOut;
sample->GetMediaType(&pmtOut);
outSample->GetMediaType(&pmtOut);

if (const std::shared_ptr<AM_MEDIA_TYPE> pmtOutPtr(pmtOut, &DeleteMediaType);
pmtOut != nullptr && pmtOut->pbFormat != nullptr) {
Expand All @@ -206,22 +198,22 @@ auto FrameHandler::PrepareOutputSample(ATL::CComPtr<IMediaSample> &sample, REFER
}

if (_notifyChangedOutputMediaType) {
sample->SetMediaType(&_filter.m_pOutput->CurrentMediaType());
outSample->SetMediaType(&_filter.m_pOutput->CurrentMediaType());
_notifyChangedOutputMediaType = false;

Environment::GetInstance().Log(L"New output format: name %s, width %5li, height %5li",
_filter._outputVideoFormat.pixelFormat->name, _filter._outputVideoFormat.bmi.biWidth, _filter._outputVideoFormat.bmi.biHeight);
}

if (FAILED(sample->SetTime(&startTime, &stopTime))) {
if (FAILED(outSample->SetTime(&startTime, &stopTime))) {
return false;
}

if (_nextOutputFrameNb == 0 && FAILED(sample->SetDiscontinuity(TRUE))) {
if (_nextOutputFrameNb == 0 && FAILED(outSample->SetDiscontinuity(TRUE))) {
return false;
}

if (BYTE *outputBuffer; FAILED(sample->GetPointer(&outputBuffer))) {
if (BYTE *outputBuffer; FAILED(outSample->GetPointer(&outputBuffer))) {
return false;
} else {
try {
Expand Down Expand Up @@ -329,20 +321,20 @@ auto FrameHandler::WorkerProc() -> void {

RefreshOutputFrameRates(_nextOutputFrameNb);

if (ATL::CComPtr<IMediaSample> outputSample; PrepareOutputSample(outputSample, outputStartTime, outputStopTime)) {
if (const ATL::CComQIPtr<IMediaSample2> mediaSample2(outputSample); mediaSample2 != nullptr) {
if (ATL::CComPtr<IMediaSample> outSample; PrepareOutputSample(outSample, outputStartTime, outputStopTime, processSourceFrameIters[0]->second.typeSpecificFlags)) {
if (const ATL::CComQIPtr<IMediaSample2> outSample2(outSample); outSample2 != nullptr) {
AM_SAMPLE2_PROPERTIES props;
if (SUCCEEDED(mediaSample2->GetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props)))) {
if (SUCCEEDED(outSample2->GetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props)))) {
props.dwTypeSpecificFlags = processSourceFrameIters[0]->second.typeSpecificFlags;
mediaSample2->SetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props));
outSample2->SetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props));
}
}

if (const ATL::CComQIPtr<IMediaSideData> sideData(outputSample); sideData != nullptr) {
if (const ATL::CComQIPtr<IMediaSideData> sideData(outSample); sideData != nullptr) {
processSourceFrameIters[0]->second.hdrSideData->WriteTo(sideData);
}

_filter.m_pOutput->Deliver(outputSample);
_filter.m_pOutput->Deliver(outSample);
RefreshDeliveryFrameRates(_nextOutputFrameNb);

Environment::GetInstance().Log(L"Delivered frame %6i", _nextOutputFrameNb);
Expand Down
2 changes: 1 addition & 1 deletion avisynth_filter/src/frame_handler.h
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class FrameHandler {
static auto RefreshFrameRatesTemplate(int sampleNb, int &checkpointSampleNb, DWORD &checkpointTime, int &currentFrameRate) -> void;

auto ResetInput() -> void;
auto PrepareOutputSample(ATL::CComPtr<IMediaSample> &sample, REFERENCE_TIME startTime, REFERENCE_TIME stopTime) -> bool;
auto PrepareOutputSample(ATL::CComPtr<IMediaSample> &outSample, REFERENCE_TIME startTime, REFERENCE_TIME stopTime, DWORD sourceTypeSpecificFlags) -> bool;
auto WorkerProc() -> void;
auto GarbageCollect(int srcFrameNb) -> void;
auto ChangeOutputFormat() -> bool;
Expand Down
2 changes: 1 addition & 1 deletion avisynth_filter/src/source_clip.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ class SourceClip : public IClip {
auto SetFrameHandler(FrameHandler *frameHandler) -> void;

auto __stdcall GetFrame(int frameNb, IScriptEnvironment *env) -> PVideoFrame override;
constexpr auto __stdcall GetParity(int frameNb) -> bool override { return false; }
constexpr auto __stdcall GetParity(int frameNb) -> bool override { return true; }
constexpr auto __stdcall GetAudio(void *buf, int64_t start, int64_t count, IScriptEnvironment *env) -> void override {}
constexpr auto __stdcall SetCacheHints(int cachehints, int frame_range) -> int override {
return cachehints == CACHE_GET_MTMODE ? MT_NICE_FILTER : 0;
Expand Down
4 changes: 4 additions & 0 deletions filter_common/src/filter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,10 @@ auto CSynthFilter::CompleteConnect(PIN_DIRECTION direction, IPin *pReceivePin) -
auto CSynthFilter::Receive(IMediaSample *pSample) -> HRESULT {
HRESULT hr;

if (m_pInput->SampleProps()->dwStreamId != AM_STREAM_MEDIA) {
return m_pOutput->Deliver(pSample);
}

AM_MEDIA_TYPE *pmt;
pSample->GetMediaType(&pmt);
if (pmt != nullptr && pmt->pbFormat != nullptr) {
Expand Down
72 changes: 44 additions & 28 deletions vapoursynth_filter/src/frame_handler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -61,14 +61,6 @@ auto FrameHandler::AddInputSample(IMediaSample *inputSample) -> HRESULT {
return S_FALSE;
}

DWORD typeSpecificFlags = AM_VIDEO_FLAG_INTERLEAVED_FRAME;
if (const ATL::CComQIPtr<IMediaSample2> mediaSample2(inputSample); mediaSample2 != nullptr) {
AM_SAMPLE2_PROPERTIES props;
if (SUCCEEDED(mediaSample2->GetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props)))) {
typeSpecificFlags = props.dwTypeSpecificFlags;
}
}

VSFrameRef *frame = Format::CreateFrame(_filter._inputVideoFormat, sampleBuffer);
VSMap *frameProps = AVSF_VPS_API->getFramePropsRW(frame);
AVSF_VPS_API->propSetFloat(frameProps, VS_PROP_NAME_ABS_TIME, inputSampleStartTime / static_cast<double>(UNITS), paReplace);
Expand All @@ -83,6 +75,18 @@ auto FrameHandler::AddInputSample(IMediaSample *inputSample) -> HRESULT {
AVSF_VPS_API->propSetInt(frameProps, "_Matrix", _filter._inputVideoFormat.colorSpaceInfo.matrix, paReplace);
AVSF_VPS_API->propSetInt(frameProps, "_Transfer", _filter._inputVideoFormat.colorSpaceInfo.transfer, paReplace);

const DWORD typeSpecificFlags = _filter.m_pInput->SampleProps()->dwTypeSpecificFlags;
int rfpFieldBased;
if (typeSpecificFlags & AM_VIDEO_FLAG_WEAVE) {
rfpFieldBased = 0;
} else if (typeSpecificFlags & AM_VIDEO_FLAG_FIELD1FIRST) {
rfpFieldBased = 2;
} else {
rfpFieldBased = 1;
}
AVSF_VPS_API->propSetInt(frameProps, VS_PROP_NAME_FIELD_BASED, rfpFieldBased, paReplace);
AVSF_VPS_API->propSetInt(frameProps, VS_PROP_NAME_TYPE_SPECIFIC_FLAGS, typeSpecificFlags, paReplace);

std::unique_ptr<HDRSideData> hdrSideData = std::make_unique<HDRSideData>();
{
if (const ATL::CComQIPtr<IMediaSideData> inputSampleSideData(inputSample); inputSampleSideData != nullptr) {
Expand Down Expand Up @@ -110,7 +114,7 @@ auto FrameHandler::AddInputSample(IMediaSample *inputSample) -> HRESULT {

_sourceFrames.emplace(std::piecewise_construct,
std::forward_as_tuple(_nextSourceFrameNb),
std::forward_as_tuple(frame, inputSampleStartTime, typeSpecificFlags, std::move(hdrSideData)));
std::forward_as_tuple(frame, inputSampleStartTime, std::move(hdrSideData)));
}

Environment::GetInstance().Log(L"Stored source frame: %6i at %10lli ~ %10lli duration(literal) %10lli, last_used %6i, extra_buffer %6i",
Expand Down Expand Up @@ -299,7 +303,7 @@ auto FrameHandler::ResetInput() -> void {
_currentInputFrameRate = 0;
}

auto FrameHandler::PrepareOutputSample(ATL::CComPtr<IMediaSample> &sample, int outputFrameNb, const VSFrameRef *outputFrame, int sourceFrameNb) -> bool {
auto FrameHandler::PrepareOutputSample(ATL::CComPtr<IMediaSample> &outSample, int outputFrameNb, const VSFrameRef *outputFrame, int sourceFrameNb) -> bool {
const VSMap *frameProps = AVSF_VPS_API->getFramePropsRO(outputFrame);
int propGetError;
const int64_t frameDurationNum = AVSF_VPS_API->propGetInt(frameProps, VS_PROP_NAME_DURATION_NUM, 0, &propGetError);
Expand All @@ -322,14 +326,14 @@ auto FrameHandler::PrepareOutputSample(ATL::CComPtr<IMediaSample> &sample, int o

Environment::GetInstance().Log(L"Output frame: frameNb %6i startTime %10lli stopTime %10lli duration %10lli", outputFrameNb, frameStartTime, frameStopTime, frameDuration);

if (FAILED(_filter.m_pOutput->GetDeliveryBuffer(&sample, &frameStartTime, &frameStopTime, 0))) {
if (FAILED(_filter.m_pOutput->GetDeliveryBuffer(&outSample, &frameStartTime, &frameStopTime, 0))) {
// avoid releasing the invalid pointer in case the function change it to some random invalid address
sample.Detach();
outSample.Detach();
return false;
}

AM_MEDIA_TYPE *pmtOut;
sample->GetMediaType(&pmtOut);
outSample->GetMediaType(&pmtOut);

if (const std::shared_ptr<AM_MEDIA_TYPE> pmtOutPtr(pmtOut, &DeleteMediaType);
pmtOut != nullptr && pmtOut->pbFormat != nullptr) {
Expand All @@ -339,40 +343,52 @@ auto FrameHandler::PrepareOutputSample(ATL::CComPtr<IMediaSample> &sample, int o
}

if (_notifyChangedOutputMediaType) {
sample->SetMediaType(&_filter.m_pOutput->CurrentMediaType());
outSample->SetMediaType(&_filter.m_pOutput->CurrentMediaType());
_notifyChangedOutputMediaType = false;

Environment::GetInstance().Log(L"New output format: name %s, width %5li, height %5li",
_filter._outputVideoFormat.pixelFormat->name, _filter._outputVideoFormat.bmi.biWidth, _filter._outputVideoFormat.bmi.biHeight);
}

if (FAILED(sample->SetTime(&frameStartTime, &frameStopTime))) {
if (FAILED(outSample->SetTime(&frameStartTime, &frameStopTime))) {
return false;
}

if (outputFrameNb == 0 && FAILED(sample->SetDiscontinuity(TRUE))) {
if (outputFrameNb == 0 && FAILED(outSample->SetDiscontinuity(TRUE))) {
return false;
}

BYTE *outputBuffer;
if (FAILED(sample->GetPointer(&outputBuffer))) {
if (FAILED(outSample->GetPointer(&outputBuffer))) {
return false;
}

Format::WriteSample(_filter._outputVideoFormat, outputFrame, outputBuffer);
if (const ATL::CComQIPtr<IMediaSample2> outSample2(outSample); outSample2 != nullptr) {
if (AM_SAMPLE2_PROPERTIES props; SUCCEEDED(outSample2->GetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props)))) {
if (const int64_t rfpFieldBased = AVSF_VPS_API->propGetInt(frameProps, VS_PROP_NAME_FIELD_BASED, 0, &propGetError);
propGetError == peUnset || rfpFieldBased == 0) {
props.dwTypeSpecificFlags = AM_VIDEO_FLAG_WEAVE;
} else if (rfpFieldBased == 2) {
props.dwTypeSpecificFlags = AM_VIDEO_FLAG_FIELD1FIRST;
} else {
props.dwTypeSpecificFlags = 0;
}

const decltype(_sourceFrames)::const_iterator iter = _sourceFrames.find(sourceFrameNb);
ASSERT(iter != _sourceFrames.end());
if (const int64_t sourceTypeSpecificFlags = AVSF_VPS_API->propGetInt(frameProps, VS_PROP_NAME_TYPE_SPECIFIC_FLAGS, 0, &propGetError);
sourceTypeSpecificFlags & AM_VIDEO_FLAG_REPEAT_FIELD) {
props.dwTypeSpecificFlags |= AM_VIDEO_FLAG_REPEAT_FIELD;
}

if (const ATL::CComQIPtr<IMediaSample2> mediaSample2(sample); mediaSample2 != nullptr) {
AM_SAMPLE2_PROPERTIES props;
if (SUCCEEDED(mediaSample2->GetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props)))) {
props.dwTypeSpecificFlags = iter->second.typeSpecificFlags;
mediaSample2->SetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props));
outSample2->SetProperties(SAMPLE2_TYPE_SPECIFIC_FLAGS_SIZE, reinterpret_cast<BYTE *>(&props));
}
}

if (const ATL::CComQIPtr<IMediaSideData> sideData(sample); sideData != nullptr) {
Format::WriteSample(_filter._outputVideoFormat, outputFrame, outputBuffer);

const decltype(_sourceFrames)::const_iterator iter = _sourceFrames.find(sourceFrameNb);
ASSERT(iter != _sourceFrames.end());

if (const ATL::CComQIPtr<IMediaSideData> sideData(outSample); sideData != nullptr) {
iter->second.hdrSideData->WriteTo(sideData);
}

Expand Down Expand Up @@ -444,8 +460,8 @@ auto FrameHandler::WorkerProc() -> void {
_lastUsedSourceFrameNb = sourceFrameNb;
_addInputSampleCv.notify_all();

if (ATL::CComPtr<IMediaSample> outputSample; PrepareOutputSample(outputSample, iter->first, iter->second, sourceFrameNb)) {
_filter.m_pOutput->Deliver(outputSample);
if (ATL::CComPtr<IMediaSample> outSample; PrepareOutputSample(outSample, iter->first, iter->second, sourceFrameNb)) {
_filter.m_pOutput->Deliver(outSample);
RefreshDeliveryFrameRates(iter->first);

Environment::GetInstance().Log(L"Delivered output sample %6i from source frame %6i", iter->first, sourceFrameNb);
Expand Down
15 changes: 8 additions & 7 deletions vapoursynth_filter/src/frame_handler.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,15 +35,14 @@ class FrameHandler {

VSFrameRef *frame;
REFERENCE_TIME startTime;
DWORD typeSpecificFlags;
std::unique_ptr<HDRSideData> hdrSideData;
};

static auto VS_CC VpsGetFrameCallback(void *userData, const VSFrameRef *f, int n, VSNodeRef *node, const char *errorMsg) -> void;
static auto RefreshFrameRatesTemplate(int sampleNb, int &checkpointSampleNb, DWORD &checkpointStartTime, int &currentFrameRate) -> void;

auto ResetInput() -> void;
auto PrepareOutputSample(ATL::CComPtr<IMediaSample> &sample, int outputFrameNb, const VSFrameRef *outputFrame, int sourceFrameNb) -> bool;
auto PrepareOutputSample(ATL::CComPtr<IMediaSample> &outSample, int outputFrameNb, const VSFrameRef *outputFrame, int sourceFrameNb) -> bool;
auto WorkerProc() -> void;
auto GarbageCollect(int srcFrameNb) -> void;
auto ChangeOutputFormat() -> bool;
Expand All @@ -52,11 +51,13 @@ class FrameHandler {
auto RefreshOutputFrameRates(int frameNb) -> void;
auto RefreshDeliveryFrameRates(int frameNb) -> void;

static constexpr const int NUM_SRC_FRAMES_PER_PROCESSING = 2;
static constexpr const char *VS_PROP_NAME_ABS_TIME = "_AbsoluteTime";
static constexpr const char *VS_PROP_NAME_DURATION_NUM = "_DurationNum";
static constexpr const char *VS_PROP_NAME_DURATION_DEN = "_DurationDen";
static constexpr const char *VS_PROP_NAME_SOURCE_FRAME_NB = "VPSF_SourceFrameNb";
static constexpr const int NUM_SRC_FRAMES_PER_PROCESSING = 2;
static constexpr const char *VS_PROP_NAME_ABS_TIME = "_AbsoluteTime";
static constexpr const char *VS_PROP_NAME_DURATION_NUM = "_DurationNum";
static constexpr const char *VS_PROP_NAME_DURATION_DEN = "_DurationDen";
static constexpr const char *VS_PROP_NAME_FIELD_BASED = "_FieldBased";
static constexpr const char *VS_PROP_NAME_SOURCE_FRAME_NB = "VPSF_SourceFrameNb";
static constexpr const char *VS_PROP_NAME_TYPE_SPECIFIC_FLAGS = "VPSF_TypeSpecificFlags";

CSynthFilter &_filter;

Expand Down

0 comments on commit b061824

Please sign in to comment.