Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
131791: sql: simplify generic query plans r=mgartner a=mgartner

#### sql: do not cache non-reusable plans

Fixes cockroachdb#132963

Release note (bug fix): A bug has been fixed that caused non-reusable
query plans, e.g., plans for DDL and `SHOW ...` statements, to be cached
and reused in future executions, possibly causing stale results to be
returned. This bug only occurred when `plan_cache_mode` was set to
`auto` or `force_generic_plan`, both of which are not currently the
default settings.

#### sql: remove legacy planning code path

An optimization code path from before the introduction of generic query
plans has been removed. This code path remained in the codebase to
de-risk the generic query plans backports. It is not needed in future
versions.

Release note: None

#### sql: remove unused context parameter in `reuseMemo`

Release note: None

#### sql: simplify generic query plans

All fully-optimized memos that can be reused without re-optimization are
now stored in `PreparedStatement.GenericMemo`. Prior to this commit,
some fully-optimized memos were stored in `PreparedStatement.BaseMemo`.

Release note: None


Co-authored-by: Marcus Gartner <[email protected]>
  • Loading branch information
craig[bot] and mgartner committed Oct 18, 2024
2 parents 7296e84 + cea8cf8 commit 57afbbe
Show file tree
Hide file tree
Showing 3 changed files with 118 additions and 136 deletions.
27 changes: 27 additions & 0 deletions pkg/ccl/logictestccl/testdata/logic_test/generic
Original file line number Diff line number Diff line change
Expand Up @@ -1193,3 +1193,30 @@ quality of service: regular
regions: <hidden>
actual row count: 1
size: 1 column, 1 row

statement ok
DEALLOCATE p

# Regression test for #132963. Do not cache non-reusable plans.
statement ok
SET plan_cache_mode = auto

statement ok
CREATE TABLE a (a INT PRIMARY KEY)

statement ok
PREPARE p AS SELECT create_statement FROM [SHOW CREATE TABLE a]

query T
EXECUTE p
----
CREATE TABLE public.a (
a INT8 NOT NULL,
CONSTRAINT a_pkey PRIMARY KEY (a ASC)
)

statement ok
ALTER TABLE a RENAME TO b

statement error pgcode 42P01 pq: relation \"a\" does not exist
EXECUTE p
210 changes: 85 additions & 125 deletions pkg/sql/plan_opt.go
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,14 @@ func (p *planner) prepareUsingOptimizer(
stmt.Prepared.StatementNoConstants = pm.StatementNoConstants
stmt.Prepared.Columns = pm.Columns
stmt.Prepared.Types = pm.Types
stmt.Prepared.BaseMemo = cachedData.Memo
if cachedData.Memo.IsOptimized() {
// A cache, fully optimized memo is an "ideal generic
// memo".
stmt.Prepared.GenericMemo = cachedData.Memo
stmt.Prepared.IdealGenericPlan = true
} else {
stmt.Prepared.BaseMemo = cachedData.Memo
}
return opc.flags, nil
}
opc.log(ctx, "query cache hit but memo is stale (prepare)")
Expand All @@ -163,7 +170,7 @@ func (p *planner) prepareUsingOptimizer(
}

// Build the memo. Do not attempt to build a generic plan at PREPARE-time.
memo, _, err := opc.buildReusableMemo(ctx, false /* buildGeneric */)
memo, _, err := opc.buildReusableMemo(ctx, false /* allowNonIdealGeneric */)
if err != nil {
return 0, err
}
Expand Down Expand Up @@ -213,7 +220,14 @@ func (p *planner) prepareUsingOptimizer(
stmt.Prepared.Columns = resultCols
stmt.Prepared.Types = p.semaCtx.Placeholders.Types
if opc.allowMemoReuse {
stmt.Prepared.BaseMemo = memo
if memo.IsOptimized() {
// A memo fully optimized at prepare time is an "ideal generic
// memo".
stmt.Prepared.GenericMemo = memo
stmt.Prepared.IdealGenericPlan = true
} else {
stmt.Prepared.BaseMemo = memo
}
if opc.useCache {
// execPrepare sets the PrepareMetadata.InferredTypes field after this
// point. However, once the PrepareMetadata goes into the cache, it
Expand Down Expand Up @@ -419,13 +433,13 @@ const (
// 1. The statement does not contain placeholders nor fold-able stable
// operators.
// 2. Or, the placeholder fast path is used.
// 3. Or, buildGeneric is true and the plan is fully optimized as best as
// possible in the presence of placeholders.
// 3. Or, allowNonIdealGeneric is true and the plan is fully optimized as best
// as possible in the presence of placeholders.
//
// The returned memo is fully detached from the planner and can be used with
// reuseMemo independently and concurrently by multiple threads.
func (opc *optPlanningCtx) buildReusableMemo(
ctx context.Context, buildGeneric bool,
ctx context.Context, allowNonIdealGeneric bool,
) (*memo.Memo, memoType, error) {
p := opc.p

Expand Down Expand Up @@ -506,7 +520,7 @@ func (opc *optPlanningCtx) buildReusableMemo(
opc.log(ctx, "placeholder fast path")
opc.flags.Set(planFlagOptimized)
return opc.optimizer.DetachMemo(ctx), memoTypeIdealGeneric, nil
} else if buildGeneric {
} else if allowNonIdealGeneric {
// Build a generic query plan if the placeholder fast path failed and a
// generic plan was requested.
opc.log(ctx, "optimizing (generic)")
Expand All @@ -530,9 +544,7 @@ func (opc *optPlanningCtx) buildReusableMemo(
//
// The returned memo is only safe to use in one thread, during execution of the
// current statement.
func (opc *optPlanningCtx) reuseMemo(
ctx context.Context, cachedMemo *memo.Memo,
) (*memo.Memo, error) {
func (opc *optPlanningCtx) reuseMemo(cachedMemo *memo.Memo) (*memo.Memo, error) {
opc.incPlanTypeTelemetry(cachedMemo)
if cachedMemo.IsOptimized() {
// The query could have been already fully optimized in
Expand Down Expand Up @@ -581,11 +593,15 @@ func (opc *optPlanningCtx) incPlanTypeTelemetry(cachedMemo *memo.Memo) {
// useGenericPlan returns true if a generic query plan should be used instead of
// a custom plan.
func (opc *optPlanningCtx) useGenericPlan() bool {
prep := opc.p.stmt.Prepared
// Always use an ideal generic plan.
if prep.IdealGenericPlan {
return true
}
switch opc.p.SessionData().PlanCacheMode {
case sessiondatapb.PlanCacheModeForceGeneric:
return true
case sessiondatapb.PlanCacheModeAuto:
prep := opc.p.stmt.Prepared
// We need to build CustomPlanThreshold custom plans before considering
// a generic plan.
if prep.Costs.NumCustom() < CustomPlanThreshold {
Expand All @@ -609,7 +625,7 @@ func (opc *optPlanningCtx) useGenericPlan() bool {
// from, baseMemo or genericMemo. It returns nil if both memos are stale. It
// selects baseMemo or genericMemo based on the following rules, in order:
//
// 1. If baseMemo is fully optimized and not stale, it is returned as-is.
// 1. If the generic memo is ideal, it is returned as-is.
// 2. If plan_cache_mode=force_generic_plan is true then genericMemo is
// returned as-is if it is not stale.
// 3. If plan_cache_mode=auto, there have been at least 5 custom plans
Expand All @@ -622,54 +638,37 @@ func (opc *optPlanningCtx) useGenericPlan() bool {
// stale.
// 5. Otherwise, nil is returned and the caller is responsible for building a
// new memo.
//
// The logic is structured to avoid unnecessary (*memo.Memo).IsStale calls,
// since they can be expensive.
func (opc *optPlanningCtx) chooseValidPreparedMemo(
ctx context.Context, baseMemo *memo.Memo, genericMemo *memo.Memo,
) (*memo.Memo, error) {
// First check for a fully optimized, non-stale, base memo.
if baseMemo != nil && baseMemo.IsOptimized() {
isStale, err := baseMemo.IsStale(ctx, opc.p.EvalContext(), opc.catalog)
if err != nil {
return nil, err
} else if !isStale {
return baseMemo, nil
}
}

func (opc *optPlanningCtx) chooseValidPreparedMemo(ctx context.Context) (*memo.Memo, error) {
prep := opc.p.stmt.Prepared
reuseGeneric := opc.useGenericPlan()

// Next check for a non-stale, generic memo.
if reuseGeneric && genericMemo != nil {
isStale, err := genericMemo.IsStale(ctx, opc.p.EvalContext(), opc.catalog)
if opc.useGenericPlan() {
if prep.GenericMemo == nil {
// A generic plan does not yet exist.
return nil, nil
}
isStale, err := prep.GenericMemo.IsStale(ctx, opc.p.EvalContext(), opc.catalog)
if err != nil {
return nil, err
} else if !isStale {
return genericMemo, nil
} else {
// Clear the generic cost if the memo is stale. DDL or new stats
// could drastically change the cost of generic and custom plans, so
// we should re-consider which to use.
prep.Costs.ClearGeneric()
return prep.GenericMemo, nil
}
// Clear the generic cost if the memo is stale. DDL or new stats
// could drastically change the cost of generic and custom plans, so
// we should re-consider which to use.
prep.Costs.ClearGeneric()
return nil, nil
}

// Next, check for a non-stale, normalized memo, if a generic memo should
// not be reused.
if !reuseGeneric && baseMemo != nil && !baseMemo.IsOptimized() {
isStale, err := baseMemo.IsStale(ctx, opc.p.EvalContext(), opc.catalog)
if prep.BaseMemo != nil {
isStale, err := prep.BaseMemo.IsStale(ctx, opc.p.EvalContext(), opc.catalog)
if err != nil {
return nil, err
} else if !isStale {
return baseMemo, nil
} else {
// Clear the custom costs if the memo is stale. DDL or new stats
// could drastically change the cost of generic and custom plans, so
// we should re-consider which to use.
prep.Costs.ClearCustom()
return prep.BaseMemo, nil
}
// Clear the custom costs if the memo is stale. DDL or new stats
// could drastically change the cost of generic and custom plans, so
// we should re-consider which to use.
prep.Costs.ClearCustom()
}

// A valid memo was not found.
Expand Down Expand Up @@ -707,13 +706,13 @@ func (opc *optPlanningCtx) fetchPreparedMemo(ctx context.Context) (_ *memo.Memo,

// If the statement was previously prepared, check for a reusable memo.
// First check for a valid (non-stale) memo.
validMemo, err := opc.chooseValidPreparedMemo(ctx, prep.BaseMemo, prep.GenericMemo)
validMemo, err := opc.chooseValidPreparedMemo(ctx)
if err != nil {
return nil, err
}
if validMemo != nil {
opc.log(ctx, "reusing cached memo")
return opc.reuseMemo(ctx, validMemo)
return opc.reuseMemo(validMemo)
}

// Otherwise, we need to rebuild the memo.
Expand All @@ -727,60 +726,34 @@ func (opc *optPlanningCtx) fetchPreparedMemo(ctx context.Context) (_ *memo.Memo,
if err != nil {
return nil, err
}
switch typ {
case memoTypeIdealGeneric:
// If we have an "ideal" generic memo, store it as a base memo. It will
// always be used regardless of plan_cache_mode, so there is no need to
// set GenericCost.
prep.BaseMemo = newMemo
case memoTypeGeneric:
prep.GenericMemo = newMemo
prep.Costs.SetGeneric(newMemo.RootExpr().(memo.RelExpr).Cost())
// Now that the cost of the generic plan is known, we need to
// re-evaluate the decision to use a generic or custom plan.
if !opc.useGenericPlan() {
// The generic plan that we just built is too expensive, so we need
// to build a custom plan. We recursively call fetchPreparedMemo in
// case we have a custom plan that can be reused as a starting point
// for optimization. The function should not recurse more than once.
return opc.fetchPreparedMemo(ctx)
}
case memoTypeCustom:
prep.BaseMemo = newMemo
default:
return nil, errors.AssertionFailedf("unexpected memo type %v", typ)
}

// Re-optimize the memo, if necessary.
return opc.reuseMemo(ctx, newMemo)
}

// fetchPreparedMemoLegacy attempts to fetch a prepared memo. If a valid (i.e.,
// non-stale) memo is found, it is used. Otherwise, a new statement will be
// built. If memo reuse is not allowed, nil is returned.
func (opc *optPlanningCtx) fetchPreparedMemoLegacy(ctx context.Context) (_ *memo.Memo, err error) {
prepared := opc.p.stmt.Prepared
p := opc.p
if opc.allowMemoReuse && prepared != nil && prepared.BaseMemo != nil {
// We are executing a previously prepared statement and a reusable memo is
// available.

// If the prepared memo has been invalidated by schema or other changes,
// re-prepare it.
if isStale, err := prepared.BaseMemo.IsStale(ctx, p.EvalContext(), opc.catalog); err != nil {
return nil, err
} else if isStale {
opc.log(ctx, "rebuilding cached memo")
prepared.BaseMemo, _, err = opc.buildReusableMemo(ctx, false /* buildGeneric */)
if err != nil {
return nil, err
if opc.allowMemoReuse {
switch typ {
case memoTypeIdealGeneric:
// An "ideal" generic memo will always be used regardless of
// plan_cache_mode, so there is no need to set GenericCost.
prep.GenericMemo = newMemo
prep.IdealGenericPlan = true
case memoTypeGeneric:
prep.GenericMemo = newMemo
prep.Costs.SetGeneric(newMemo.RootExpr().(memo.RelExpr).Cost())
// Now that the cost of the generic plan is known, we need to
// re-evaluate the decision to use a generic or custom plan.
if !opc.useGenericPlan() {
// The generic plan that we just built is too expensive, so we need
// to build a custom plan. We recursively call fetchPreparedMemo in
// case we have a custom plan that can be reused as a starting point
// for optimization. The function should not recurse more than once.
return opc.fetchPreparedMemo(ctx)
}
case memoTypeCustom:
prep.BaseMemo = newMemo
default:
return nil, errors.AssertionFailedf("unexpected memo type %v", typ)
}
opc.log(ctx, "reusing cached memo")
return opc.reuseMemo(ctx, prepared.BaseMemo)
}

return nil, nil
// Re-optimize the memo, if necessary.
return opc.reuseMemo(newMemo)
}

// buildExecMemo creates a fully optimized memo, possibly reusing a previously
Expand All @@ -794,32 +767,19 @@ func (opc *optPlanningCtx) buildExecMemo(ctx context.Context) (_ *memo.Memo, _ e
// rollback its transaction. Use resumeProc to resume execution in a new
// transaction where the control statement left off.
opc.log(ctx, "resuming stored procedure execution in a new transaction")
return opc.reuseMemo(ctx, resumeProc)
return opc.reuseMemo(resumeProc)
}

p := opc.p
if p.SessionData().PlanCacheMode == sessiondatapb.PlanCacheModeForceCustom {
// Fallback to the legacy logic for reusing memos if plan_cache_mode is
// set to force_custom_plan.
m, err := opc.fetchPreparedMemoLegacy(ctx)
if err != nil {
return nil, err
}
if m != nil {
return m, nil
}
} else {
// Use new logic for reusing memos if plan_cache_mode is set to
// force_generic_plan or auto.
m, err := opc.fetchPreparedMemo(ctx)
if err != nil {
return nil, err
}
if m != nil {
return m, nil
}
// Fetch and reuse a memo if a valid one is available.
m, err := opc.fetchPreparedMemo(ctx)
if err != nil {
return nil, err
}
if m != nil {
return m, nil
}

p := opc.p
if opc.useCache {
// Consult the query cache.
cachedData, ok := p.execCfg.QueryCache.Find(&p.queryCacheSession, opc.p.stmt.SQL)
Expand All @@ -828,7 +788,7 @@ func (opc *optPlanningCtx) buildExecMemo(ctx context.Context) (_ *memo.Memo, _ e
return nil, err
} else if isStale {
opc.log(ctx, "query cache hit but needed update")
cachedData.Memo, _, err = opc.buildReusableMemo(ctx, false /* buildGeneric */)
cachedData.Memo, _, err = opc.buildReusableMemo(ctx, false /* allowNonIdealGeneric */)
if err != nil {
return nil, err
}
Expand All @@ -841,7 +801,7 @@ func (opc *optPlanningCtx) buildExecMemo(ctx context.Context) (_ *memo.Memo, _ e
opc.log(ctx, "query cache hit")
opc.flags.Set(planFlagOptCacheHit)
}
return opc.reuseMemo(ctx, cachedData.Memo)
return opc.reuseMemo(cachedData.Memo)
}
opc.flags.Set(planFlagOptCacheMiss)
opc.log(ctx, "query cache miss")
Expand Down
Loading

0 comments on commit 57afbbe

Please sign in to comment.