Skip to content

Commit

Permalink
use Scope.safe to replace recent usages of 'unsafe' deleteTempFrameAn…
Browse files Browse the repository at this point in the history
…dItsNonSharedVecs
  • Loading branch information
sebhrusen committed Oct 24, 2023
1 parent 31fca13 commit b30fa86
Show file tree
Hide file tree
Showing 9 changed files with 81 additions and 152 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -200,9 +200,8 @@ public Frame runAndGetOutput(Job j, Key<Frame> destinationKey, String[] names) {

Log.warn("Not enough memory to calculate SHAP at once. Calculating in " + (nSubFrames) + " iterations.");
_isFrameBigger = false; // ensure we map over the BG frame so we can average over the results properly;
Frame result = null;
List<Frame> subFrames = new LinkedList<Frame>();
try {
try (Scope.Safe safe = Scope.safe()) {
List<Frame> subFrames = new LinkedList<Frame>();
for (int i = 0; i < nSubFrames; i++) {
setChunkRange(i * chunksPerIter, Math.min(nChunks - 1, (i + 1) * chunksPerIter - 1));
Frame indivContribs = clone().withPostMapAction(JobUpdatePostMap.forJob(j))
Expand All @@ -217,23 +216,14 @@ public Frame runAndGetOutput(Job j, Key<Frame> destinationKey, String[] names) {
indivContribs.delete();
}

result = concatFrames(subFrames, destinationKey);
Set<String> homes = new HashSet<>();
Frame result = concatFrames(subFrames, destinationKey);
Set<String> homes = new HashSet<>(); // not used?
for (int i = 0; i < result.anyVec().nChunks(); i++) {
for (int k = 0; k < result.numCols(); k++) {
homes.add(result.vec(k).chunkKey(i).home_node().getIpPortString());
}
}
return result;
} finally {
if (null != result) {
for (Frame fr : subFrames) {
Frame.deleteTempFrameAndItsNonSharedVecs(fr, result);
}
} else {
for (Frame fr : subFrames)
fr.delete();
}
return Scope.untrack(result);
}
} else {
Frame indivContribs = withPostMapAction(JobUpdatePostMap.forJob(j))
Expand Down
13 changes: 4 additions & 9 deletions h2o-algos/src/main/java/hex/deeplearning/DeepLearningModel.java
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,10 @@ public Frame scoreContributions(Frame frame, Key<Frame> destination_key, Job<Fra
if (null == backgroundFrame)
throw H2O.unimpl("DeepLearning supports contribution calculation only with a background frame.");
Log.info("Starting contributions calculation for "+this._key+"...");
List<Frame> tmpFrames = new LinkedList<>();
Frame adaptedFrame = null;
Frame adaptedBgFrame = null;
try {
adaptedBgFrame = adaptFrameForScore(backgroundFrame, false, tmpFrames);
try (Scope.Safe s = Scope.safe(frame, backgroundFrame)) {
Frame adaptedBgFrame = adaptFrameForScore(backgroundFrame, false);
DKV.put(adaptedBgFrame);
adaptedFrame = adaptFrameForScore(frame, false, tmpFrames);
Frame adaptedFrame = adaptFrameForScore(frame, false);
DKV.put(adaptedFrame);
DeepSHAPContributionsWithBackground contributions = new DeepSHAPContributionsWithBackground(this,
adaptedFrame._key,
Expand All @@ -70,10 +67,8 @@ public Frame scoreContributions(Frame frame, Key<Frame> destination_key, Job<Fra

System.arraycopy(cols, 0, colNames, 0, colNames.length - 1);
colNames[colNames.length - 1] = "BiasTerm";
return contributions.runAndGetOutput(j, destination_key, colNames);
return Scope.untrack(contributions.runAndGetOutput(j, destination_key, colNames));
} finally {
if (null != adaptedFrame) Frame.deleteTempFrameAndItsNonSharedVecs(adaptedFrame, frame);
if (null != adaptedBgFrame) Frame.deleteTempFrameAndItsNonSharedVecs(adaptedBgFrame, backgroundFrame);
Log.info("Finished contributions calculation for "+this._key+"...");
}
}
Expand Down
92 changes: 39 additions & 53 deletions h2o-algos/src/main/java/hex/ensemble/StackedEnsembleModel.java
Original file line number Diff line number Diff line change
Expand Up @@ -177,12 +177,8 @@ private Frame baseLineContributions(Frame frame, Key<Frame> destination_key, Job
List<Integer> baseModelsIdx = new ArrayList<>();
String[] columns = null;
baseModelsIdx.add(0);
Frame fr = new Frame();
Frame levelOneFrame = null;
Frame levelOneFrameBg = null;
Frame adaptFr = null;
Frame adaptFrBg = null;
try {
try (Scope.Safe s = Scope.safe(frame, backgroundFrame)) {
Frame fr = new Frame();
for (Key<Model> bm : _parms._base_models) {
if (isUsefulBaseModel(bm)) {
baseModels.add(bm.toString());
Expand Down Expand Up @@ -238,13 +234,11 @@ private Frame baseLineContributions(Frame frame, Key<Frame> destination_key, Job
String[] colsWithRows = columns;
columns = Arrays.copyOfRange(columns, 0, columns.length - 3);

List<Frame> tmpFrames = new ArrayList<>();
adaptFr = adaptFrameForScore(frame, false, tmpFrames);
levelOneFrame = getLevelOnePredictFrame(frame, adaptFr, j);
Frame adaptFr = adaptFrameForScore(frame, false);
Frame levelOneFrame = getLevelOnePredictFrame(frame, adaptFr, j);

tmpFrames = new ArrayList<>();
adaptFrBg = adaptFrameForScore(backgroundFrame, false, tmpFrames);
levelOneFrameBg = getLevelOnePredictFrame(backgroundFrame, adaptFrBg, j);
Frame adaptFrBg = adaptFrameForScore(backgroundFrame, false);
Frame levelOneFrameBg = getLevelOnePredictFrame(backgroundFrame, adaptFrBg, j);

Frame metalearnerContrib = ((Model.Contributions) _output._metalearner).scoreContributions(levelOneFrame,
Key.make(destination_key + "_" + _output._metalearner._key), j,
Expand All @@ -258,22 +252,13 @@ private Frame baseLineContributions(Frame frame, Key<Frame> destination_key, Job
.map(name -> "metalearner_" + name)
.toArray(String[]::new));

fr.add(metalearnerContrib);
Frame.deleteTempFrameAndItsNonSharedVecs(metalearnerContrib, fr);


return new GDeepSHAP(columns, baseModels.toArray(new String[0]),
Scope.track(fr.add(metalearnerContrib));

return Scope.untrack(new GDeepSHAP(columns, baseModels.toArray(new String[0]),
fr._names, baseModelsIdx.toArray(new Integer[0]), _parms._metalearner_transform)
.withPostMapAction(JobUpdatePostMap.forJob(j))
.doAll(colsWithRows.length, Vec.T_NUM, fr)
.outputFrame(destination_key, colsWithRows, null);

} finally {
if (null != levelOneFrame) Frame.deleteTempFrameAndItsNonSharedVecs(levelOneFrame, frame);
if (null != levelOneFrameBg) Frame.deleteTempFrameAndItsNonSharedVecs(levelOneFrameBg, backgroundFrame);
Frame.deleteTempFrameAndItsNonSharedVecs(fr, frame);
if (null != adaptFr) Frame.deleteTempFrameAndItsNonSharedVecs(adaptFr, frame);
if (null != adaptFrBg) Frame.deleteTempFrameAndItsNonSharedVecs(adaptFrBg, backgroundFrame);
.outputFrame(destination_key, colsWithRows, null));
}
}

Expand Down Expand Up @@ -487,36 +472,37 @@ public int nfeatures() {
*/
@Override
protected PredictScoreResult predictScoreImpl(Frame fr, Frame adaptFrm, String destination_key, Job j, boolean computeMetrics, CFuncRef customMetricFunc) {
Frame levelOneFrame = getLevelOnePredictFrame(fr, adaptFrm, j);
// TODO: what if we're running multiple in parallel and have a name collision?
Log.info("Finished creating \"level one\" frame for scoring: " + levelOneFrame.toString());

// Score the dataset, building the class distribution & predictions

Model metalearner = this._output._metalearner;
Frame predictFr = metalearner.score(
levelOneFrame,
destination_key,
j,
computeMetrics,
CFuncRef.from(_parms._custom_metric_func)
);
ModelMetrics mmStackedEnsemble = null;
if (computeMetrics) {
// #score has just stored a ModelMetrics object for the (metalearner, preds_levelone) Model/Frame pair.
// We need to be able to look it up by the (this, fr) pair.
// The ModelMetrics object for the metalearner will be removed when the metalearner is removed.
Key<ModelMetrics>[] mms = metalearner._output.getModelMetrics();
ModelMetrics lastComputedMetric = mms[mms.length - 1].get();
mmStackedEnsemble = lastComputedMetric.deepCloneWithDifferentModelAndFrame(this, fr);
this.addModelMetrics(mmStackedEnsemble);
//now that we have the metric set on the SE model, removing the one we just computed on metalearner (otherwise it leaks in client mode)
for (Key<ModelMetrics> mm : metalearner._output.clearModelMetrics(true)) {
DKV.remove(mm);
try (Scope.Safe safe = Scope.safe(fr, adaptFrm)) {
Frame levelOneFrame = getLevelOnePredictFrame(fr, adaptFrm, j);
// TODO: what if we're running multiple in parallel and have a name collision?
Log.info("Finished creating \"level one\" frame for scoring: "+levelOneFrame.toString());

// Score the dataset, building the class distribution & predictions

Model metalearner = this._output._metalearner;
Frame predictFr = metalearner.score(
levelOneFrame,
destination_key,
j,
computeMetrics,
CFuncRef.from(_parms._custom_metric_func)
);
ModelMetrics mmStackedEnsemble = null;
if (computeMetrics) {
// #score has just stored a ModelMetrics object for the (metalearner, preds_levelone) Model/Frame pair.
// We need to be able to look it up by the (this, fr) pair.
// The ModelMetrics object for the metalearner will be removed when the metalearner is removed.
Key<ModelMetrics>[] mms = metalearner._output.getModelMetrics();
ModelMetrics lastComputedMetric = mms[mms.length-1].get();
mmStackedEnsemble = lastComputedMetric.deepCloneWithDifferentModelAndFrame(this, fr);
this.addModelMetrics(mmStackedEnsemble);
//now that we have the metric set on the SE model, removing the one we just computed on metalearner (otherwise it leaks in client mode)
for (Key<ModelMetrics> mm : metalearner._output.clearModelMetrics(true)) {
DKV.remove(mm);
}
}
return new StackedEnsemblePredictScoreResult(predictFr, mmStackedEnsemble);
}
Frame.deleteTempFrameAndItsNonSharedVecs(levelOneFrame, adaptFrm);
return new StackedEnsemblePredictScoreResult(predictFr, mmStackedEnsemble);
}

private Frame getLevelOnePredictFrame(Frame fr, Frame adaptFrm, Job j) {
Expand Down
14 changes: 4 additions & 10 deletions h2o-algos/src/main/java/hex/glm/GLMModel.java
Original file line number Diff line number Diff line change
Expand Up @@ -243,15 +243,12 @@ public Frame scoreContributions(Frame frame, Key<Frame> destination_key, Job<Fra
}
}

List<Frame> tmpFrames = new ArrayList<>();
Frame adaptedFrame = null;
Frame adaptedBgFrame = null;
if (null == backgroundFrame)
throw H2O.unimpl("GLM supports contribution calculation only with a background frame.");
Log.info("Starting contributions calculation for " + this._key + "...");
try {
adaptedBgFrame = adaptFrameForScore(backgroundFrame, false, tmpFrames);
adaptedFrame = adaptFrameForScore(frame, false, tmpFrames);
try (Scope.Safe s = Scope.safe(frame, backgroundFrame)) {
Frame adaptedBgFrame = adaptFrameForScore(backgroundFrame, false);
Frame adaptedFrame = adaptFrameForScore(frame, false);
DKV.put(adaptedBgFrame);
DKV.put(adaptedFrame);
DataInfo dinfo = _output._dinfo.clone();
Expand All @@ -277,13 +274,10 @@ public Frame scoreContributions(Frame frame, Key<Frame> destination_key, Job<Fra
}).toArray(String[]::new)
: _output._coefficient_names, 0, colNames, 0, colNames.length - 1);
colNames[colNames.length - 1] = "BiasTerm";
return contributions.runAndGetOutput(j, destination_key, colNames);
return Scope.untrack(contributions.runAndGetOutput(j, destination_key, colNames));
} finally {
if (null != adaptedFrame) Frame.deleteTempFrameAndItsNonSharedVecs(adaptedFrame, frame);
if (null != adaptedBgFrame) Frame.deleteTempFrameAndItsNonSharedVecs(adaptedBgFrame, backgroundFrame);
Log.info("Finished contributions calculation for " + this._key + "...");
}

}

public static class RegularizationPath extends Iced {
Expand Down
10 changes: 3 additions & 7 deletions h2o-algos/src/main/java/hex/rulefit/RuleFitModel.java
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,8 @@ public Frame score(Frame fr, String destination_key, Job j, boolean computeMetri
Frame adaptFrm = new Frame(fr);
adaptTestForTrain(adaptFrm, true, false);

Frame linearTest = new Frame();
try {
try (Scope.Safe safe = Scope.safe(adaptFrm)) {
Frame linearTest = new Frame();
if (ModelType.RULES_AND_LINEAR.equals(this._parms._model_type) || ModelType.RULES.equals(this._parms._model_type)) {
linearTest.add(ruleEnsemble.createGLMTrainFrame(adaptFrm, _parms._max_rule_length - _parms._min_rule_length + 1, _parms._rule_generation_ntrees, this._output.classNames(), _parms._weights_column, false));
}
Expand All @@ -147,12 +147,8 @@ public Frame score(Frame fr, String destination_key, Job j, boolean computeMetri
}

Frame scored = glmModel.score(linearTest, destination_key, null, true);

updateModelMetrics(glmModel, fr);

return scored;
} finally {
Frame.deleteTempFrameAndItsNonSharedVecs(linearTest, adaptFrm);
return Scope.untrack(scored);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,12 +119,10 @@ public Frame scoreContributions(Frame frame, Key<Frame> destination_key, Job<Fra
"Calculating contributions is currently not supported for multinomial models.");
}
Log.info("Starting contributions calculation for " + this._key + "...");
Frame adaptedFrame = null;
Frame adaptedBgFrame = null;
try {
try (Scope.Safe s = Scope.safe(frame, backgroundFrame)) {
if (options._outputFormat == ContributionsOutputFormat.Compact || _output._domains == null) {
adaptedFrame = removeSpecialColumns(frame);
adaptedBgFrame = removeSpecialColumns(backgroundFrame);
Frame adaptedFrame = removeSpecialColumns(frame);
Frame adaptedBgFrame = removeSpecialColumns(backgroundFrame);

DKV.put(adaptedFrame);
DKV.put(adaptedBgFrame);
Expand All @@ -133,8 +131,8 @@ public Frame scoreContributions(Frame frame, Key<Frame> destination_key, Job<Fra
return getScoreContributionsWithBackgroundTask(this, adaptedFrame, adaptedBgFrame, false, null, options)
.runAndGetOutput(j, destination_key, outputNames);
} else {
adaptedFrame = removeSpecialColumns(frame);
adaptedBgFrame = removeSpecialColumns(backgroundFrame);
Frame adaptedFrame = removeSpecialColumns(frame);
Frame adaptedBgFrame = removeSpecialColumns(backgroundFrame);
DKV.put(adaptedFrame);
DKV.put(adaptedBgFrame);
assert Parameters.CategoricalEncodingScheme.Enum.equals(_parms._categorical_encoding) : "Unsupported categorical encoding. Only enum is supported.";
Expand Down Expand Up @@ -178,12 +176,10 @@ public Frame scoreContributions(Frame frame, Key<Frame> destination_key, Job<Fra
}
}

return getScoreContributionsWithBackgroundTask(this, adaptedFrame, adaptedBgFrame, true, catOffsets, options)
.runAndGetOutput(j, destination_key, outputNames);
return Scope.untrack(getScoreContributionsWithBackgroundTask(this, adaptedFrame, adaptedBgFrame, true, catOffsets, options)
.runAndGetOutput(j, destination_key, outputNames));
}
} finally {
if (null != adaptedFrame) Frame.deleteTempFrameAndItsNonSharedVecs(adaptedFrame, frame);
if (null != adaptedBgFrame) Frame.deleteTempFrameAndItsNonSharedVecs(adaptedBgFrame, backgroundFrame);
Log.info("Finished contributions calculation for " + this._key + "...");
}
}
Expand Down
28 changes: 6 additions & 22 deletions h2o-core/src/main/java/water/SplitToChunksApplyCombine.java
Original file line number Diff line number Diff line change
Expand Up @@ -40,32 +40,16 @@ public static Frame createSubFrame(Frame fr, int cidx, String destinationKeyPref


public static Frame splitApplyCombine(Frame frameToSplit, Function<Frame, Frame> fun, Key<Frame> destinationKey) {
LinkedList<Frame> resultSubFrames = new LinkedList<>();
Frame result = null;
try {
try (Scope.Safe safe = Scope.safe(frameToSplit)) {
List<Frame> resultSubFrames = new LinkedList<>();
int nChunks = frameToSplit.anyVec().nChunks();
for (int cidx = 0; cidx < nChunks; cidx++) {
Frame subFrame = createSubFrame(frameToSplit, cidx, destinationKey.toString());
try {
if (subFrame.numRows() == 0) continue;
DKV.put(subFrame);
resultSubFrames.add(fun.apply(subFrame));
} finally {
for (Vec v : subFrame.vecs())
DKV.remove(v._key);
subFrame.delete();
}
}
result = concatFrames(resultSubFrames, destinationKey);
return result;
} finally {
if (null != result) {
for (Frame fr : resultSubFrames)
Frame.deleteTempFrameAndItsNonSharedVecs(fr, frameToSplit);
} else {
for (Frame fr : resultSubFrames)
fr.delete();
if (subFrame.numRows() == 0) continue;
DKV.put(subFrame);
resultSubFrames.add(Scope.track(fun.apply(subFrame)));
}
return Scope.untrack(concatFrames(resultSubFrames, destinationKey));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -226,20 +226,16 @@ public Frame transform(Frame fr, BlendingParams blendingParams, double noiseLeve
*/
public Frame transform(Frame fr, boolean asTraining, int outOfFold, BlendingParams blendingParams, double noiseLevel) {
if (!canApplyTargetEncoding(fr)) return fr;
Frame adaptFr = null;
try {
adaptFr = adaptForEncoding(fr);
return applyTargetEncoding(
try (Scope.Safe safe = Scope.safe(fr)) {
Frame adaptFr = adaptForEncoding(fr);
return Scope.untrack(applyTargetEncoding(
adaptFr,
asTraining,
outOfFold,
blendingParams,
noiseLevel,
null
);
} finally {
if (adaptFr != null)
Frame.deleteTempFrameAndItsNonSharedVecs(adaptFr, fr);
));
}
}

Expand All @@ -258,20 +254,16 @@ public Frame score(Frame fr, String destination_key, Job j, boolean computeMetri
DKV.put(res);
return res;
}
Frame adaptFr = null;
try {
adaptFr = adaptForEncoding(fr);
return applyTargetEncoding(
try (Scope.Safe safe = Scope.safe(fr)) {
Frame adaptFr = adaptForEncoding(fr);
return Scope.untrack(applyTargetEncoding(
adaptFr,
false,
NO_FOLD,
_parms.getBlendingParameters(),
_parms._noise,
Key.make(destination_key)
);
} finally {
if (adaptFr != null)
Frame.deleteTempFrameAndItsNonSharedVecs(adaptFr, fr);
));
}
}

Expand Down
Loading

0 comments on commit b30fa86

Please sign in to comment.