Skip to content

Commit

Permalink
Create macro for AVS env
Browse files Browse the repository at this point in the history
  • Loading branch information
CrendKing committed Jul 24, 2021
1 parent 7f217dd commit 10e1d8e
Show file tree
Hide file tree
Showing 6 changed files with 74 additions and 72 deletions.
14 changes: 7 additions & 7 deletions avisynth_filter/src/format.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ auto Format::WriteSample(const VideoFormat &videoFormat, const PVideoFrame &srcF
}

auto Format::CreateFrame(const VideoFormat &videoFormat, const BYTE *srcBuffer) -> PVideoFrame {
PVideoFrame frame = MainFrameServer::GetInstance().GetEnv()->NewVideoFrame(videoFormat.videoInfo, static_cast<int>(_vectorSize));
PVideoFrame frame = AVSF_AVS_API->NewVideoFrame(videoFormat.videoInfo, static_cast<int>(_vectorSize));

const std::array dstSlices = { frame->GetWritePtr(), frame->GetWritePtr(PLANAR_U), frame->GetWritePtr(PLANAR_V) };
const std::array dstStrides = { frame->GetPitch(), frame->GetPitch(PLANAR_U), frame->GetPitch(PLANAR_V) };
Expand All @@ -113,7 +113,7 @@ auto Format::CopyFromInput(const VideoFormat &videoFormat, const BYTE *srcBuffer
srcMainPlaneStride = -srcMainPlaneStride;
}

MainFrameServer::GetInstance().GetEnv()->BitBlt(dstSlices[0], dstStrides[0], srcMainPlane, srcMainPlaneStride, rowSize, height);
AVSF_AVS_API->BitBlt(dstSlices[0], dstStrides[0], srcMainPlane, srcMainPlaneStride, rowSize, height);

if (videoFormat.pixelFormat->frameServerFormatId & VideoInfo::CS_INTERLEAVED) {
return;
Expand Down Expand Up @@ -161,8 +161,8 @@ auto Format::CopyFromInput(const VideoFormat &videoFormat, const BYTE *srcBuffer
srcV = srcUVPlane2;
}

MainFrameServer::GetInstance().GetEnv()->BitBlt(dstSlices[1], dstStrides[1], srcU, srcUVStride, srcUVRowSize, srcUVHeight);
MainFrameServer::GetInstance().GetEnv()->BitBlt(dstSlices[2], dstStrides[2], srcV, srcUVStride, srcUVRowSize, srcUVHeight);
AVSF_AVS_API->BitBlt(dstSlices[1], dstStrides[1], srcU, srcUVStride, srcUVRowSize, srcUVHeight);
AVSF_AVS_API->BitBlt(dstSlices[2], dstStrides[2], srcV, srcUVStride, srcUVRowSize, srcUVHeight);
}
}

Expand All @@ -179,7 +179,7 @@ auto Format::CopyToOutput(const VideoFormat &videoFormat, const std::array<const
dstMainPlaneStride = -dstMainPlaneStride;
}

MainFrameServer::GetInstance().GetEnv()->BitBlt(dstMainPlane, dstMainPlaneStride, srcSlices[0], srcStrides[0], rowSize, height);
AVSF_AVS_API->BitBlt(dstMainPlane, dstMainPlaneStride, srcSlices[0], srcStrides[0], rowSize, height);

if (videoFormat.pixelFormat->frameServerFormatId & VideoInfo::CS_INTERLEAVED) {
return;
Expand Down Expand Up @@ -221,8 +221,8 @@ auto Format::CopyToOutput(const VideoFormat &videoFormat, const std::array<const
dstV = dstUVPlane2;
}

MainFrameServer::GetInstance().GetEnv()->BitBlt(dstU, dstUVStride, srcSlices[1], srcStrides[1], dstUVRowSize, dstUVHeight);
MainFrameServer::GetInstance().GetEnv()->BitBlt(dstV, dstUVStride, srcSlices[2], srcStrides[2], dstUVRowSize, dstUVHeight);
AVSF_AVS_API->BitBlt(dstU, dstUVStride, srcSlices[1], srcStrides[1], dstUVRowSize, dstUVHeight);
AVSF_AVS_API->BitBlt(dstV, dstUVStride, srcSlices[2], srcStrides[2], dstUVRowSize, dstUVHeight);
}
}

Expand Down
2 changes: 2 additions & 0 deletions avisynth_filter/src/frameserver.h
Original file line number Diff line number Diff line change
Expand Up @@ -86,4 +86,6 @@ class AuxFrameServer
constexpr auto GetScriptPixelType() const -> int { return _scriptVideoInfo.pixel_type; }
};

#define AVSF_AVS_API MainFrameServer::GetInstance().GetEnv()

}
38 changes: 19 additions & 19 deletions vapoursynth_filter/src/format.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ auto Format::GetVideoFormat(const AM_MEDIA_TYPE &mediaType, const FrameServerBas
.frameServer = vsCore
};
ret.videoInfo = {
.format = AVSF_VS_API->getFormatPreset(ret.pixelFormat->frameServerFormatId, ret.frameServer),
.format = AVSF_VPS_API->getFormatPreset(ret.pixelFormat->frameServerFormatId, ret.frameServer),
.fpsNum = fpsNum,
.fpsDen = fpsDen,
.width = ret.bmi.biWidth,
Expand Down Expand Up @@ -85,29 +85,29 @@ auto Format::GetVideoFormat(const AM_MEDIA_TYPE &mediaType, const FrameServerBas
}

auto Format::WriteSample(const VideoFormat &videoFormat, const VSFrameRef *srcFrame, BYTE *dstBuffer) -> void {
const std::array srcSlices = { AVSF_VS_API->getReadPtr(srcFrame, 0)
, videoFormat.videoInfo.format->numPlanes < 2 ? nullptr : AVSF_VS_API->getReadPtr(srcFrame, 1)
, videoFormat.videoInfo.format->numPlanes < 3 ? nullptr : AVSF_VS_API->getReadPtr(srcFrame, 2) };
const std::array srcStrides = { AVSF_VS_API->getStride(srcFrame, 0)
, videoFormat.videoInfo.format->numPlanes < 2 ? 0 : AVSF_VS_API->getStride(srcFrame, 1)
, videoFormat.videoInfo.format->numPlanes < 3 ? 0 : AVSF_VS_API->getStride(srcFrame, 2) };
const int rowSize = AVSF_VS_API->getFrameWidth(srcFrame, 0) * videoFormat.videoInfo.format->bytesPerSample;

CopyToOutput(videoFormat, srcSlices, srcStrides, dstBuffer, rowSize, AVSF_VS_API->getFrameHeight(srcFrame, 0));
const std::array srcSlices = { AVSF_VPS_API->getReadPtr(srcFrame, 0)
, videoFormat.videoInfo.format->numPlanes < 2 ? nullptr : AVSF_VPS_API->getReadPtr(srcFrame, 1)
, videoFormat.videoInfo.format->numPlanes < 3 ? nullptr : AVSF_VPS_API->getReadPtr(srcFrame, 2) };
const std::array srcStrides = { AVSF_VPS_API->getStride(srcFrame, 0)
, videoFormat.videoInfo.format->numPlanes < 2 ? 0 : AVSF_VPS_API->getStride(srcFrame, 1)
, videoFormat.videoInfo.format->numPlanes < 3 ? 0 : AVSF_VPS_API->getStride(srcFrame, 2) };
const int rowSize = AVSF_VPS_API->getFrameWidth(srcFrame, 0) * videoFormat.videoInfo.format->bytesPerSample;

CopyToOutput(videoFormat, srcSlices, srcStrides, dstBuffer, rowSize, AVSF_VPS_API->getFrameHeight(srcFrame, 0));
}

auto Format::CreateFrame(const VideoFormat &videoFormat, const BYTE *srcBuffer) -> VSFrameRef * {
VSFrameRef *frame = AVSF_VS_API->newVideoFrame(videoFormat.videoInfo.format, videoFormat.videoInfo.width, videoFormat.videoInfo.height, nullptr, videoFormat.frameServer);
VSFrameRef *frame = AVSF_VPS_API->newVideoFrame(videoFormat.videoInfo.format, videoFormat.videoInfo.width, videoFormat.videoInfo.height, nullptr, videoFormat.frameServer);

const std::array dstSlices = { AVSF_VS_API->getWritePtr(frame, 0)
, videoFormat.videoInfo.format->numPlanes < 2 ? nullptr : AVSF_VS_API->getWritePtr(frame, 1)
, videoFormat.videoInfo.format->numPlanes < 3 ? nullptr : AVSF_VS_API->getWritePtr(frame, 2) };
const std::array dstStrides = { AVSF_VS_API->getStride(frame, 0)
, videoFormat.videoInfo.format->numPlanes < 2 ? 0 : AVSF_VS_API->getStride(frame, 1)
, videoFormat.videoInfo.format->numPlanes < 3 ? 0 : AVSF_VS_API->getStride(frame, 2) };
const int rowSize = AVSF_VS_API->getFrameWidth(frame, 0) * videoFormat.videoInfo.format->bytesPerSample;
const std::array dstSlices = { AVSF_VPS_API->getWritePtr(frame, 0)
, videoFormat.videoInfo.format->numPlanes < 2 ? nullptr : AVSF_VPS_API->getWritePtr(frame, 1)
, videoFormat.videoInfo.format->numPlanes < 3 ? nullptr : AVSF_VPS_API->getWritePtr(frame, 2) };
const std::array dstStrides = { AVSF_VPS_API->getStride(frame, 0)
, videoFormat.videoInfo.format->numPlanes < 2 ? 0 : AVSF_VPS_API->getStride(frame, 1)
, videoFormat.videoInfo.format->numPlanes < 3 ? 0 : AVSF_VPS_API->getStride(frame, 2) };
const int rowSize = AVSF_VPS_API->getFrameWidth(frame, 0) * videoFormat.videoInfo.format->bytesPerSample;

CopyFromInput(videoFormat, srcBuffer, dstSlices, dstStrides, rowSize, AVSF_VS_API->getFrameHeight(frame, 0));
CopyFromInput(videoFormat, srcBuffer, dstSlices, dstStrides, rowSize, AVSF_VPS_API->getFrameHeight(frame, 0));

return frame;
}
Expand Down
50 changes: 25 additions & 25 deletions vapoursynth_filter/src/frame_handler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -70,18 +70,18 @@ auto FrameHandler::AddInputSample(IMediaSample *inputSample) -> HRESULT {
}

VSFrameRef *frame = Format::CreateFrame(_filter._inputVideoFormat, sampleBuffer);
VSMap *frameProps = AVSF_VS_API->getFramePropsRW(frame);
AVSF_VS_API->propSetFloat(frameProps, VS_PROP_NAME_ABS_TIME, inputSampleStartTime / static_cast<double>(UNITS), paReplace);
AVSF_VS_API->propSetInt(frameProps, "_SARNum", _filter._inputVideoFormat.pixelAspectRatioNum, paReplace);
AVSF_VS_API->propSetInt(frameProps, "_SARDen", _filter._inputVideoFormat.pixelAspectRatioDen, paReplace);
AVSF_VS_API->propSetInt(frameProps, VS_PROP_NAME_SOURCE_FRAME_NB, _nextSourceFrameNb, paReplace);
VSMap *frameProps = AVSF_VPS_API->getFramePropsRW(frame);
AVSF_VPS_API->propSetFloat(frameProps, VS_PROP_NAME_ABS_TIME, inputSampleStartTime / static_cast<double>(UNITS), paReplace);
AVSF_VPS_API->propSetInt(frameProps, "_SARNum", _filter._inputVideoFormat.pixelAspectRatioNum, paReplace);
AVSF_VPS_API->propSetInt(frameProps, "_SARDen", _filter._inputVideoFormat.pixelAspectRatioDen, paReplace);
AVSF_VPS_API->propSetInt(frameProps, VS_PROP_NAME_SOURCE_FRAME_NB, _nextSourceFrameNb, paReplace);

if (const std::optional<int> &optColorRange = _filter._inputVideoFormat.colorSpaceInfo.colorRange) {
AVSF_VS_API->propSetInt(frameProps, "_ColorRange", *optColorRange, paReplace);
AVSF_VPS_API->propSetInt(frameProps, "_ColorRange", *optColorRange, paReplace);
}
AVSF_VS_API->propSetInt(frameProps, "_Primaries", _filter._inputVideoFormat.colorSpaceInfo.primaries, paReplace);
AVSF_VS_API->propSetInt(frameProps, "_Matrix", _filter._inputVideoFormat.colorSpaceInfo.matrix, paReplace);
AVSF_VS_API->propSetInt(frameProps, "_Transfer", _filter._inputVideoFormat.colorSpaceInfo.transfer, paReplace);
AVSF_VPS_API->propSetInt(frameProps, "_Primaries", _filter._inputVideoFormat.colorSpaceInfo.primaries, paReplace);
AVSF_VPS_API->propSetInt(frameProps, "_Matrix", _filter._inputVideoFormat.colorSpaceInfo.matrix, paReplace);
AVSF_VPS_API->propSetInt(frameProps, "_Transfer", _filter._inputVideoFormat.colorSpaceInfo.transfer, paReplace);

std::unique_ptr<HDRSideData> hdrSideData = std::make_unique<HDRSideData>();
{
Expand Down Expand Up @@ -142,12 +142,12 @@ auto FrameHandler::AddInputSample(IMediaSample *inputSample) -> HRESULT {
}
_nextProcessSourceFrameNb = processSourceFrameIters[1]->first;

frameProps = AVSF_VS_API->getFramePropsRW(processSourceFrameIters[0]->second.frame);
frameProps = AVSF_VPS_API->getFramePropsRW(processSourceFrameIters[0]->second.frame);
REFERENCE_TIME frameDurationNum = processSourceFrameIters[1]->second.startTime - processSourceFrameIters[0]->second.startTime;
REFERENCE_TIME frameDurationDen = UNITS;
vs_normalizeRational(&frameDurationNum, &frameDurationDen);
AVSF_VS_API->propSetInt(frameProps, VS_PROP_NAME_DURATION_NUM, frameDurationNum, paReplace);
AVSF_VS_API->propSetInt(frameProps, VS_PROP_NAME_DURATION_DEN, frameDurationDen, paReplace);
AVSF_VPS_API->propSetInt(frameProps, VS_PROP_NAME_DURATION_NUM, frameDurationNum, paReplace);
AVSF_VPS_API->propSetInt(frameProps, VS_PROP_NAME_DURATION_DEN, frameDurationDen, paReplace);
_newSourceFrameCv.notify_all();

const int maxRequestOutputFrameNb = static_cast<int>(llMulDiv(processSourceFrameIters[0]->first,
Expand All @@ -163,7 +163,7 @@ auto FrameHandler::AddInputSample(IMediaSample *inputSample) -> HRESULT {

_outputFrames.emplace(_nextOutputFrameNb, nullptr);
}
AVSF_VS_API->getFrameAsync(_nextOutputFrameNb, MainFrameServer::GetInstance().GetScriptClip(), VpsGetFrameCallback, this);
AVSF_VPS_API->getFrameAsync(_nextOutputFrameNb, MainFrameServer::GetInstance().GetScriptClip(), VpsGetFrameCallback, this);

_nextOutputFrameNb += 1;
}
Expand All @@ -188,8 +188,8 @@ auto FrameHandler::GetSourceFrame(int frameNb) -> const VSFrameRef * {
return false;
}

const VSMap *frameProps = AVSF_VS_API->getFramePropsRO(iter->second.frame);
return AVSF_VS_API->propNumElements(frameProps, VS_PROP_NAME_DURATION_NUM) > 0 && AVSF_VS_API->propNumElements(frameProps, VS_PROP_NAME_DURATION_DEN) > 0;
const VSMap *frameProps = AVSF_VPS_API->getFramePropsRO(iter->second.frame);
return AVSF_VPS_API->propNumElements(frameProps, VS_PROP_NAME_DURATION_NUM) > 0 && AVSF_VPS_API->propNumElements(frameProps, VS_PROP_NAME_DURATION_DEN) > 0;
});

if (_isFlushing) {
Expand Down Expand Up @@ -243,7 +243,7 @@ auto FrameHandler::EndFlush(const std::function<void ()> &interim) -> void {
// only the current thread is active here, no need to lock

for (const VSFrameRef *frame : _outputFrames | std::views::values) {
AVSF_VS_API->freeFrame(frame);
AVSF_VPS_API->freeFrame(frame);
}
_outputFrames.clear();

Expand All @@ -257,7 +257,7 @@ auto FrameHandler::EndFlush(const std::function<void ()> &interim) -> void {
}

FrameHandler::SourceFrameInfo::~SourceFrameInfo() {
AVSF_VS_API->freeFrame(frame);
AVSF_VPS_API->freeFrame(frame);
}

auto VS_CC FrameHandler::VpsGetFrameCallback(void *userData, const VSFrameRef *f, int n, VSNodeRef *node, const char *errorMsg) -> void {
Expand All @@ -277,7 +277,7 @@ auto VS_CC FrameHandler::VpsGetFrameCallback(void *userData, const VSFrameRef *f
}
frameHandler->_flushOutputSampleCv.notify_all();

AVSF_VS_API->freeFrame(f);
AVSF_VPS_API->freeFrame(f);
} else {
{
std::shared_lock sharedOutputLock(frameHandler->_outputMutex);
Expand All @@ -300,10 +300,10 @@ auto FrameHandler::ResetInput() -> void {
}

auto FrameHandler::PrepareOutputSample(ATL::CComPtr<IMediaSample> &sample, int outputFrameNb, const VSFrameRef *outputFrame, int sourceFrameNb) -> bool {
const VSMap *frameProps = AVSF_VS_API->getFramePropsRO(outputFrame);
const VSMap *frameProps = AVSF_VPS_API->getFramePropsRO(outputFrame);
int propGetError;
const int64_t frameDurationNum = AVSF_VS_API->propGetInt(frameProps, VS_PROP_NAME_DURATION_NUM, 0, &propGetError);
const int64_t frameDurationDen = AVSF_VS_API->propGetInt(frameProps, VS_PROP_NAME_DURATION_DEN, 0, &propGetError);
const int64_t frameDurationNum = AVSF_VPS_API->propGetInt(frameProps, VS_PROP_NAME_DURATION_NUM, 0, &propGetError);
const int64_t frameDurationDen = AVSF_VPS_API->propGetInt(frameProps, VS_PROP_NAME_DURATION_DEN, 0, &propGetError);
int64_t frameDuration;

if (frameDurationNum > 0 && frameDurationDen > 0) {
Expand All @@ -313,7 +313,7 @@ auto FrameHandler::PrepareOutputSample(ATL::CComPtr<IMediaSample> &sample, int o
}

if (_nextOutputFrameStartTime == 0) {
_nextOutputFrameStartTime = static_cast<REFERENCE_TIME>(AVSF_VS_API->propGetFloat(frameProps, VS_PROP_NAME_ABS_TIME, 0, &propGetError) * UNITS);
_nextOutputFrameStartTime = static_cast<REFERENCE_TIME>(AVSF_VPS_API->propGetFloat(frameProps, VS_PROP_NAME_ABS_TIME, 0, &propGetError) * UNITS);
}

REFERENCE_TIME frameStartTime = _nextOutputFrameStartTime;
Expand Down Expand Up @@ -437,9 +437,9 @@ auto FrameHandler::WorkerProc() -> void {
continue;
}

const VSMap *frameProps = AVSF_VS_API->getFramePropsRO(iter->second);
const VSMap *frameProps = AVSF_VPS_API->getFramePropsRO(iter->second);
int propGetError;
const int sourceFrameNb = static_cast<int>(AVSF_VS_API->propGetInt(frameProps, VS_PROP_NAME_SOURCE_FRAME_NB, 0, &propGetError));
const int sourceFrameNb = static_cast<int>(AVSF_VPS_API->propGetInt(frameProps, VS_PROP_NAME_SOURCE_FRAME_NB, 0, &propGetError));

_lastUsedSourceFrameNb = sourceFrameNb;
_addInputSampleCv.notify_all();
Expand All @@ -454,7 +454,7 @@ auto FrameHandler::WorkerProc() -> void {
{
std::unique_lock uniqueOutputLock(_outputMutex);

AVSF_VS_API->freeFrame(iter->second);
AVSF_VPS_API->freeFrame(iter->second);
_outputFrames.erase(iter);
}

Expand Down
Loading

0 comments on commit 10e1d8e

Please sign in to comment.