You've already forked linux-packaging-mono
Imported Upstream version 4.6.0.125
Former-commit-id: a2155e9bd80020e49e72e86c44da02a8ac0e57a4
This commit is contained in:
parent
a569aebcfd
commit
e79aa3c0ed
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1 @@
|
||||
62aa38cecdc24233d13a7efe9a084b196edad72b
|
@@ -0,0 +1,151 @@
|
||||
//---------------------------------------------------------------------
|
||||
// <copyright file="CodeGen.cs" company="Microsoft">
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// </copyright>
|
||||
//
|
||||
// @owner [....]
|
||||
// @backupOwner [....]
|
||||
//---------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
//using System.Diagnostics; // Please use PlanCompiler.Assert instead of Debug.Assert in this class...
|
||||
|
||||
// It is fine to use Debug.Assert in cases where you assert an obvious thing that is supposed
|
||||
// to prevent from simple mistakes during development (e.g. method argument validation
|
||||
// in cases where it was you who created the variables or the variables had already been validated or
|
||||
// in "else" clauses where due to code changes (e.g. adding a new value to an enum type) the default
|
||||
// "else" block is chosen why the new condition should be treated separately). This kind of asserts are
|
||||
// (can be) helpful when developing new code to avoid simple mistakes but have no or little value in
|
||||
// the shipped product.
|
||||
// PlanCompiler.Assert *MUST* be used to verify conditions in the trees. These would be assumptions
|
||||
// about how the tree was built etc. - in these cases we probably want to throw an exception (this is
|
||||
// what PlanCompiler.Assert does when the condition is not met) if either the assumption is not correct
|
||||
// or the tree was built/rewritten not the way we thought it was.
|
||||
// Use your judgment - if you rather remove an assert than ship it use Debug.Assert otherwise use
|
||||
// PlanCompiler.Assert.
|
||||
|
||||
using System.Globalization;
|
||||
using md = System.Data.Metadata.Edm;
|
||||
using System.Data.Common.CommandTrees;
|
||||
using System.Data.Query.InternalTrees;
|
||||
using System.Data.Query.PlanCompiler;
|
||||
|
||||
//
|
||||
// The CodeGen module is responsible for translating the ITree finally into a query
|
||||
// We assume that various tree transformations have taken place, and the tree
|
||||
// is finally ready to be executed. The CodeGen module
|
||||
// * converts the Itree into one or more CTrees (in S space)
|
||||
// * produces a ColumnMap to facilitate result assembly
|
||||
// * and wraps up everything in a plan object
|
||||
//
|
||||
//
|
||||
|
||||
namespace System.Data.Query.PlanCompiler
|
||||
{
|
||||
internal class CodeGen
|
||||
{
|
||||
#region public methods
|
||||
/// <summary>
|
||||
/// This involves
|
||||
/// * Converting the ITree into a set of ProviderCommandInfo objects
|
||||
/// * Creating a column map to enable result assembly
|
||||
/// Currently, we only produce a single ITree, and correspondingly, the
|
||||
/// following steps are trivial
|
||||
/// </summary>
|
||||
/// <param name="compilerState">current compiler state</param>
|
||||
/// <param name="childCommands">CQTs for each store command</param>
|
||||
/// <param name="resultColumnMap">column map to help in result assembly</param>
|
||||
internal static void Process(PlanCompiler compilerState, out List<ProviderCommandInfo> childCommands, out ColumnMap resultColumnMap, out int columnCount)
|
||||
{
|
||||
CodeGen codeGen = new CodeGen(compilerState);
|
||||
codeGen.Process(out childCommands, out resultColumnMap, out columnCount);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region constructors
|
||||
private CodeGen(PlanCompiler compilerState)
|
||||
{
|
||||
m_compilerState = compilerState;
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region private methods
|
||||
|
||||
/// <summary>
|
||||
/// The real driver. This routine walks the tree, converts each subcommand
|
||||
/// into a CTree, and converts the columnmap into a real column map.
|
||||
/// Finally, it produces a "real" plan that can be used by the bridge execution, and
|
||||
/// returns this plan
|
||||
///
|
||||
/// The root of the tree must be a PhysicalProjectOp. Each child of this Op
|
||||
/// represents a command to be executed, and the ColumnMap of this Op represents
|
||||
/// the eventual columnMap to be used for result assembly
|
||||
/// </summary>
|
||||
/// <param name="childCommands">CQTs for store commands</param>
|
||||
/// <param name="resultColumnMap">column map for result assembly</param>
|
||||
private void Process(out List<ProviderCommandInfo> childCommands, out ColumnMap resultColumnMap, out int columnCount)
|
||||
{
|
||||
PhysicalProjectOp projectOp = (PhysicalProjectOp)this.Command.Root.Op;
|
||||
|
||||
this.m_subCommands = new List<Node>(new Node[] { this.Command.Root });
|
||||
childCommands = new List<ProviderCommandInfo>(new ProviderCommandInfo[] {
|
||||
ProviderCommandInfoUtils.Create(
|
||||
this.Command,
|
||||
this.Command.Root // input node
|
||||
)});
|
||||
|
||||
// Build the final column map, and count the columns we expect for it.
|
||||
resultColumnMap = BuildResultColumnMap(projectOp);
|
||||
|
||||
columnCount = projectOp.Outputs.Count;
|
||||
}
|
||||
|
||||
private ColumnMap BuildResultColumnMap(PhysicalProjectOp projectOp)
|
||||
{
|
||||
// convert the column map into a real column map
|
||||
// build up a dictionary mapping Vars to their real positions in the commands
|
||||
Dictionary<Var, KeyValuePair<int, int>> varMap = BuildVarMap();
|
||||
ColumnMap realColumnMap = ColumnMapTranslator.Translate(projectOp.ColumnMap, varMap);
|
||||
|
||||
return realColumnMap;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// For each subcommand, build up a "location-map" for each top-level var that
|
||||
/// is projected out. This location map will ultimately be used to convert VarRefColumnMap
|
||||
/// into SimpleColumnMap
|
||||
/// </summary>
|
||||
private Dictionary<Var, KeyValuePair<int, int>> BuildVarMap()
|
||||
{
|
||||
Dictionary<Var, KeyValuePair<int, int>> varMap =
|
||||
new Dictionary<Var, KeyValuePair<int, int>>();
|
||||
|
||||
int commandId = 0;
|
||||
foreach (Node subCommand in m_subCommands)
|
||||
{
|
||||
PhysicalProjectOp projectOp = (PhysicalProjectOp)subCommand.Op;
|
||||
|
||||
int columnPos = 0;
|
||||
foreach (Var v in projectOp.Outputs)
|
||||
{
|
||||
KeyValuePair<int, int> varLocation = new KeyValuePair<int, int>(commandId, columnPos);
|
||||
varMap[v] = varLocation;
|
||||
columnPos++;
|
||||
}
|
||||
|
||||
commandId++;
|
||||
}
|
||||
return varMap;
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region private state
|
||||
private PlanCompiler m_compilerState;
|
||||
private Command Command { get { return m_compilerState.Command; } }
|
||||
private List<Node> m_subCommands;
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,462 @@
|
||||
//---------------------------------------------------------------------
|
||||
// <copyright file="ColumnMapTranslator.cs" company="Microsoft">
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// </copyright>
|
||||
//
|
||||
// @owner [....]
|
||||
// @backupOwner [....]
|
||||
//---------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Data.Query.InternalTrees;
|
||||
using System.Data.Query.PlanCompiler;
|
||||
using System.Linq;
|
||||
//using System.Diagnostics; // Please use PlanCompiler.Assert instead of Debug.Assert in this class...
|
||||
|
||||
// It is fine to use Debug.Assert in cases where you assert an obvious thing that is supposed
|
||||
// to prevent from simple mistakes during development (e.g. method argument validation
|
||||
// in cases where it was you who created the variables or the variables had already been validated or
|
||||
// in "else" clauses where due to code changes (e.g. adding a new value to an enum type) the default
|
||||
// "else" block is chosen why the new condition should be treated separately). This kind of asserts are
|
||||
// (can be) helpful when developing new code to avoid simple mistakes but have no or little value in
|
||||
// the shipped product.
|
||||
// PlanCompiler.Assert *MUST* be used to verify conditions in the trees. These would be assumptions
|
||||
// about how the tree was built etc. - in these cases we probably want to throw an exception (this is
|
||||
// what PlanCompiler.Assert does when the condition is not met) if either the assumption is not correct
|
||||
// or the tree was built/rewritten not the way we thought it was.
|
||||
// Use your judgment - if you rather remove an assert than ship it use Debug.Assert otherwise use
|
||||
// PlanCompiler.Assert.
|
||||
|
||||
|
||||
namespace System.Data.Query.PlanCompiler
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Delegate pattern that the ColumnMapTranslator uses to find its replacement
|
||||
/// columnMaps. Given a columnMap, return it's replacement.
|
||||
/// </summary>
|
||||
/// <param name="columnMap"></param>
|
||||
/// <returns></returns>
|
||||
internal delegate ColumnMap ColumnMapTranslatorTranslationDelegate(ColumnMap columnMap);
|
||||
|
||||
/// <summary>
|
||||
/// ColumnMapTranslator visits the ColumnMap hiearchy and runs the translation delegate
|
||||
/// you specify; There are some static methods to perform common translations, but you
|
||||
/// can bring your own translation if you desire.
|
||||
///
|
||||
/// This visitor only creates new ColumnMap objects when necessary; it attempts to
|
||||
/// replace-in-place, except when that is not possible because the field is not
|
||||
/// writable.
|
||||
///
|
||||
/// NOTE: over time, we should be able to modify the ColumnMaps to have more writable
|
||||
/// fields;
|
||||
/// </summary>
|
||||
internal class ColumnMapTranslator : ColumnMapVisitorWithResults<ColumnMap, ColumnMapTranslatorTranslationDelegate>
|
||||
{
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Singleton instance for the "public" methods to use;
|
||||
/// </summary>
|
||||
static private ColumnMapTranslator Instance = new ColumnMapTranslator();
|
||||
|
||||
/// <summary>
|
||||
/// Constructor; no one should use this.
|
||||
/// </summary>
|
||||
private ColumnMapTranslator()
|
||||
{
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Visitor Helpers
|
||||
|
||||
/// <summary>
|
||||
/// Returns the var to use in the copy, either the original or the
|
||||
/// replacement. Note that we will follow the chain of replacements, in
|
||||
/// case the replacement was also replaced.
|
||||
/// </summary>
|
||||
/// <param name="originalVar"></param>
|
||||
/// <param name="replacementVarMap"></param>
|
||||
/// <returns></returns>
|
||||
private static Var GetReplacementVar(Var originalVar, Dictionary<Var, Var> replacementVarMap)
|
||||
{
|
||||
// SQLBUDT #478509: Follow the chain of mapped vars, don't
|
||||
// just stop at the first one
|
||||
Var replacementVar = originalVar;
|
||||
|
||||
while (replacementVarMap.TryGetValue(replacementVar, out originalVar))
|
||||
{
|
||||
if (originalVar == replacementVar)
|
||||
{
|
||||
break;
|
||||
}
|
||||
replacementVar = originalVar;
|
||||
}
|
||||
return replacementVar;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region "Public" surface area
|
||||
|
||||
/// <summary>
|
||||
/// Bring-Your-Own-Replacement-Delegate method.
|
||||
/// </summary>
|
||||
/// <param name="columnMap"></param>
|
||||
/// <param name="translationDelegate"></param>
|
||||
/// <returns></returns>
|
||||
internal static ColumnMap Translate(ColumnMap columnMap, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
{
|
||||
return columnMap.Accept(ColumnMapTranslator.Instance, translationDelegate);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replace VarRefColumnMaps with the specified ColumnMap replacement
|
||||
/// </summary>
|
||||
/// <param name="columnMapToTranslate"></param>
|
||||
/// <param name="varToColumnMap"></param>
|
||||
/// <returns></returns>
|
||||
internal static ColumnMap Translate(ColumnMap columnMapToTranslate, Dictionary<Var, ColumnMap> varToColumnMap)
|
||||
{
|
||||
ColumnMap result = Translate(columnMapToTranslate,
|
||||
delegate(ColumnMap columnMap)
|
||||
{
|
||||
VarRefColumnMap varRefColumnMap = columnMap as VarRefColumnMap;
|
||||
if (null != varRefColumnMap)
|
||||
{
|
||||
if (varToColumnMap.TryGetValue(varRefColumnMap.Var, out columnMap))
|
||||
{
|
||||
// perform fixups; only allow name changes when the replacement isn't
|
||||
// already named (and the original is named...)
|
||||
if (!columnMap.IsNamed && varRefColumnMap.IsNamed)
|
||||
{
|
||||
columnMap.Name = varRefColumnMap.Name;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
columnMap = varRefColumnMap;
|
||||
}
|
||||
}
|
||||
return columnMap;
|
||||
}
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replace VarRefColumnMaps with new VarRefColumnMaps with the specified Var
|
||||
/// </summary>
|
||||
/// <param name="columnMapToTranslate"></param>
|
||||
/// <param name="varToVarMap"></param>
|
||||
/// <returns></returns>
|
||||
internal static ColumnMap Translate(ColumnMap columnMapToTranslate, Dictionary<Var, Var> varToVarMap)
|
||||
{
|
||||
ColumnMap result = Translate(columnMapToTranslate,
|
||||
delegate(ColumnMap columnMap)
|
||||
{
|
||||
VarRefColumnMap varRefColumnMap = columnMap as VarRefColumnMap;
|
||||
if (null != varRefColumnMap)
|
||||
{
|
||||
Var replacementVar = GetReplacementVar(varRefColumnMap.Var, varToVarMap);
|
||||
if (varRefColumnMap.Var != replacementVar)
|
||||
{
|
||||
columnMap = new VarRefColumnMap(varRefColumnMap.Type, varRefColumnMap.Name, replacementVar);
|
||||
}
|
||||
}
|
||||
return columnMap;
|
||||
}
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replace VarRefColumnMaps with ScalarColumnMaps referring to the command and column
|
||||
/// </summary>
|
||||
/// <param name="columnMapToTranslate"></param>
|
||||
/// <param name="varToCommandColumnMap"></param>
|
||||
/// <returns></returns>
|
||||
internal static ColumnMap Translate(ColumnMap columnMapToTranslate, Dictionary<Var, KeyValuePair<int, int>> varToCommandColumnMap)
|
||||
{
|
||||
ColumnMap result = Translate(columnMapToTranslate,
|
||||
delegate(ColumnMap columnMap)
|
||||
{
|
||||
VarRefColumnMap varRefColumnMap = columnMap as VarRefColumnMap;
|
||||
if (null != varRefColumnMap)
|
||||
{
|
||||
KeyValuePair<int, int> commandAndColumn;
|
||||
|
||||
if (!varToCommandColumnMap.TryGetValue(varRefColumnMap.Var, out commandAndColumn))
|
||||
{
|
||||
throw EntityUtil.InternalError(EntityUtil.InternalErrorCode.UnknownVar, 1, varRefColumnMap.Var.Id); // shouldn't have gotten here without having a resolveable var
|
||||
}
|
||||
columnMap = new ScalarColumnMap(varRefColumnMap.Type, varRefColumnMap.Name, commandAndColumn.Key, commandAndColumn.Value);
|
||||
}
|
||||
|
||||
// While we're at it, we ensure that all columnMaps are named; we wait
|
||||
// until this point, because we don't want to assign names until after
|
||||
// we've gone through the transformations;
|
||||
if (!columnMap.IsNamed)
|
||||
{
|
||||
columnMap.Name = ColumnMap.DefaultColumnName;
|
||||
}
|
||||
return columnMap;
|
||||
}
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Visitor methods
|
||||
|
||||
#region List handling
|
||||
|
||||
/// <summary>
|
||||
/// List(ColumnMap)
|
||||
/// </summary>
|
||||
/// <typeparam name="TResultType"></typeparam>
|
||||
/// <param name="tList"></param>
|
||||
/// <param name="translationDelegate"></param>
|
||||
private void VisitList<TResultType>(TResultType[] tList, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
where TResultType : ColumnMap
|
||||
{
|
||||
for (int i = 0; i < tList.Length; i++)
|
||||
{
|
||||
tList[i] = (TResultType)tList[i].Accept(this, translationDelegate);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EntityIdentity handling
|
||||
|
||||
/// <summary>
|
||||
/// DiscriminatedEntityIdentity
|
||||
/// </summary>
|
||||
/// <param name="entityIdentity"></param>
|
||||
/// <param name="translationDelegate"></param>
|
||||
/// <returns></returns>
|
||||
protected override EntityIdentity VisitEntityIdentity(DiscriminatedEntityIdentity entityIdentity, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
{
|
||||
ColumnMap newEntitySetColumnMap = entityIdentity.EntitySetColumnMap.Accept(this, translationDelegate);
|
||||
VisitList(entityIdentity.Keys, translationDelegate);
|
||||
|
||||
if (newEntitySetColumnMap != entityIdentity.EntitySetColumnMap)
|
||||
{
|
||||
entityIdentity = new DiscriminatedEntityIdentity((SimpleColumnMap)newEntitySetColumnMap, entityIdentity.EntitySetMap, entityIdentity.Keys);
|
||||
}
|
||||
return entityIdentity;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SimpleEntityIdentity
|
||||
/// </summary>
|
||||
/// <param name="entityIdentity"></param>
|
||||
/// <param name="translationDelegate"></param>
|
||||
/// <returns></returns>
|
||||
protected override EntityIdentity VisitEntityIdentity(SimpleEntityIdentity entityIdentity, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
{
|
||||
VisitList(entityIdentity.Keys, translationDelegate);
|
||||
return entityIdentity;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// ComplexTypeColumnMap
|
||||
/// </summary>
|
||||
/// <param name="columnMap"></param>
|
||||
/// <param name="translationDelegate"></param>
|
||||
/// <returns></returns>
|
||||
internal override ColumnMap Visit(ComplexTypeColumnMap columnMap, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
{
|
||||
SimpleColumnMap newNullSentinel = columnMap.NullSentinel;
|
||||
if (null != newNullSentinel)
|
||||
{
|
||||
newNullSentinel = (SimpleColumnMap)translationDelegate(newNullSentinel);
|
||||
}
|
||||
|
||||
VisitList(columnMap.Properties, translationDelegate);
|
||||
|
||||
if (columnMap.NullSentinel != newNullSentinel)
|
||||
{
|
||||
columnMap = new ComplexTypeColumnMap(columnMap.Type, columnMap.Name, columnMap.Properties, newNullSentinel);
|
||||
}
|
||||
return translationDelegate(columnMap);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DiscriminatedCollectionColumnMap
|
||||
/// </summary>
|
||||
/// <param name="columnMap"></param>
|
||||
/// <param name="translationDelegate"></param>
|
||||
/// <returns></returns>
|
||||
internal override ColumnMap Visit(DiscriminatedCollectionColumnMap columnMap, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
{
|
||||
ColumnMap newDiscriminator = columnMap.Discriminator.Accept(this, translationDelegate);
|
||||
VisitList(columnMap.ForeignKeys, translationDelegate);
|
||||
VisitList(columnMap.Keys, translationDelegate);
|
||||
ColumnMap newElement = columnMap.Element.Accept(this, translationDelegate);
|
||||
|
||||
if (newDiscriminator != columnMap.Discriminator || newElement != columnMap.Element)
|
||||
{
|
||||
columnMap = new DiscriminatedCollectionColumnMap(columnMap.Type, columnMap.Name, newElement, columnMap.Keys, columnMap.ForeignKeys,(SimpleColumnMap)newDiscriminator, columnMap.DiscriminatorValue);
|
||||
}
|
||||
return translationDelegate(columnMap);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// EntityColumnMap
|
||||
/// </summary>
|
||||
/// <param name="columnMap"></param>
|
||||
/// <param name="translationDelegate"></param>
|
||||
/// <returns></returns>
|
||||
internal override ColumnMap Visit(EntityColumnMap columnMap, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
{
|
||||
EntityIdentity newEntityIdentity = VisitEntityIdentity(columnMap.EntityIdentity, translationDelegate);
|
||||
VisitList(columnMap.Properties, translationDelegate);
|
||||
|
||||
if (newEntityIdentity != columnMap.EntityIdentity)
|
||||
{
|
||||
columnMap = new EntityColumnMap(columnMap.Type, columnMap.Name, columnMap.Properties, newEntityIdentity);
|
||||
}
|
||||
return translationDelegate(columnMap);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SimplePolymorphicColumnMap
|
||||
/// </summary>
|
||||
/// <param name="columnMap"></param>
|
||||
/// <param name="translationDelegate"></param>
|
||||
/// <returns></returns>
|
||||
internal override ColumnMap Visit(SimplePolymorphicColumnMap columnMap, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
{
|
||||
ColumnMap newTypeDiscriminator = columnMap.TypeDiscriminator.Accept(this, translationDelegate);
|
||||
|
||||
// NOTE: we're using Copy-On-Write logic to avoid allocation if we don't
|
||||
// need to change things.
|
||||
Dictionary<object, TypedColumnMap> newTypeChoices = columnMap.TypeChoices;
|
||||
foreach (KeyValuePair<object, TypedColumnMap> kv in columnMap.TypeChoices)
|
||||
{
|
||||
TypedColumnMap newTypeChoice = (TypedColumnMap)kv.Value.Accept(this, translationDelegate);
|
||||
|
||||
if (newTypeChoice != kv.Value)
|
||||
{
|
||||
if (newTypeChoices == columnMap.TypeChoices)
|
||||
{
|
||||
newTypeChoices = new Dictionary<object, TypedColumnMap>(columnMap.TypeChoices);
|
||||
}
|
||||
newTypeChoices[kv.Key] = newTypeChoice;
|
||||
}
|
||||
}
|
||||
VisitList(columnMap.Properties, translationDelegate);
|
||||
|
||||
if (newTypeDiscriminator != columnMap.TypeDiscriminator || newTypeChoices != columnMap.TypeChoices)
|
||||
{
|
||||
columnMap = new SimplePolymorphicColumnMap(columnMap.Type, columnMap.Name, columnMap.Properties, (SimpleColumnMap)newTypeDiscriminator, newTypeChoices);
|
||||
}
|
||||
return translationDelegate(columnMap);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// MultipleDiscriminatorPolymorphicColumnMap
|
||||
/// </summary>
|
||||
internal override ColumnMap Visit(MultipleDiscriminatorPolymorphicColumnMap columnMap, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
{
|
||||
// At this time, we shouldn't ever see this type here; it's for SPROCS which don't use
|
||||
// the plan compiler.
|
||||
System.Data.Query.PlanCompiler.PlanCompiler.Assert(false, "unexpected MultipleDiscriminatorPolymorphicColumnMap in ColumnMapTranslator");
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// RecordColumnMap
|
||||
/// </summary>
|
||||
/// <param name="columnMap"></param>
|
||||
/// <param name="translationDelegate"></param>
|
||||
/// <returns></returns>
|
||||
internal override ColumnMap Visit(RecordColumnMap columnMap, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
{
|
||||
SimpleColumnMap newNullSentinel = columnMap.NullSentinel;
|
||||
if (null != newNullSentinel)
|
||||
{
|
||||
newNullSentinel = (SimpleColumnMap)translationDelegate(newNullSentinel);
|
||||
}
|
||||
|
||||
VisitList(columnMap.Properties, translationDelegate);
|
||||
|
||||
if (columnMap.NullSentinel != newNullSentinel)
|
||||
{
|
||||
columnMap = new RecordColumnMap(columnMap.Type, columnMap.Name, columnMap.Properties, newNullSentinel);
|
||||
}
|
||||
return translationDelegate(columnMap);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// RefColumnMap
|
||||
/// </summary>
|
||||
/// <param name="columnMap"></param>
|
||||
/// <param name="translationDelegate"></param>
|
||||
/// <returns></returns>
|
||||
internal override ColumnMap Visit(RefColumnMap columnMap, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
{
|
||||
EntityIdentity newEntityIdentity = VisitEntityIdentity(columnMap.EntityIdentity, translationDelegate);
|
||||
|
||||
if (newEntityIdentity != columnMap.EntityIdentity)
|
||||
{
|
||||
columnMap = new RefColumnMap(columnMap.Type, columnMap.Name, newEntityIdentity);
|
||||
}
|
||||
return translationDelegate(columnMap);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// ScalarColumnMap
|
||||
/// </summary>
|
||||
/// <param name="columnMap"></param>
|
||||
/// <param name="translationDelegate"></param>
|
||||
/// <returns></returns>
|
||||
internal override ColumnMap Visit(ScalarColumnMap columnMap, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
{
|
||||
return translationDelegate(columnMap);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SimpleCollectionColumnMap
|
||||
/// </summary>
|
||||
/// <param name="columnMap"></param>
|
||||
/// <param name="translationDelegate"></param>
|
||||
/// <returns></returns>
|
||||
internal override ColumnMap Visit(SimpleCollectionColumnMap columnMap, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
{
|
||||
VisitList(columnMap.ForeignKeys, translationDelegate);
|
||||
VisitList(columnMap.Keys, translationDelegate);
|
||||
ColumnMap newElement = columnMap.Element.Accept(this, translationDelegate);
|
||||
|
||||
if (newElement != columnMap.Element)
|
||||
{
|
||||
columnMap = new SimpleCollectionColumnMap(columnMap.Type, columnMap.Name, newElement, columnMap.Keys, columnMap.ForeignKeys);
|
||||
}
|
||||
return translationDelegate(columnMap);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VarRefColumnMap
|
||||
/// </summary>
|
||||
/// <param name="columnMap"></param>
|
||||
/// <param name="translationDelegate"></param>
|
||||
/// <returns></returns>
|
||||
internal override ColumnMap Visit(VarRefColumnMap columnMap, ColumnMapTranslatorTranslationDelegate translationDelegate)
|
||||
{
|
||||
return translationDelegate(columnMap);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
@@ -0,0 +1,89 @@
|
||||
//---------------------------------------------------------------------
|
||||
// <copyright file="CommandPlan.cs" company="Microsoft">
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// </copyright>
|
||||
//
|
||||
// @owner [....]
|
||||
// @backupOwner [....]
|
||||
//---------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Data.Common;
|
||||
using md = System.Data.Metadata.Edm;
|
||||
using cqt = System.Data.Common.CommandTrees;
|
||||
//using System.Diagnostics; // Please use PlanCompiler.Assert instead of Debug.Assert in this class...
|
||||
|
||||
// It is fine to use Debug.Assert in cases where you assert an obvious thing that is supposed
|
||||
// to prevent from simple mistakes during development (e.g. method argument validation
|
||||
// in cases where it was you who created the variables or the variables had already been validated or
|
||||
// in "else" clauses where due to code changes (e.g. adding a new value to an enum type) the default
|
||||
// "else" block is chosen why the new condition should be treated separately). This kind of asserts are
|
||||
// (can be) helpful when developing new code to avoid simple mistakes but have no or little value in
|
||||
// the shipped product.
|
||||
// PlanCompiler.Assert *MUST* be used to verify conditions in the trees. These would be assumptions
|
||||
// about how the tree was built etc. - in these cases we probably want to throw an exception (this is
|
||||
// what PlanCompiler.Assert does when the condition is not met) if either the assumption is not correct
|
||||
// or the tree was built/rewritten not the way we thought it was.
|
||||
// Use your judgment - if you rather remove an assert than ship it use Debug.Assert otherwise use
|
||||
// PlanCompiler.Assert.
|
||||
|
||||
|
||||
//
|
||||
// A CommandPlan represents the plan for a query.
|
||||
//
|
||||
namespace System.Data.Query.PlanCompiler
|
||||
{
|
||||
#region CommandInfo
|
||||
|
||||
/// <summary>
|
||||
/// Captures information about a single provider command
|
||||
/// </summary>
|
||||
internal sealed class ProviderCommandInfo
|
||||
{
|
||||
#region public apis
|
||||
|
||||
/// <summary>
|
||||
/// Internal methods to get the command tree
|
||||
/// </summary>
|
||||
internal cqt.DbCommandTree CommandTree
|
||||
{
|
||||
get { return _commandTree; }
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region private state
|
||||
private cqt.DbCommandTree _commandTree;
|
||||
private ProviderCommandInfo _parent;
|
||||
private List<ProviderCommandInfo> _children;
|
||||
#endregion
|
||||
|
||||
#region constructors
|
||||
|
||||
/// <summary>
|
||||
/// Internal constructor for a ProviderCommandInfo object
|
||||
/// </summary>
|
||||
/// <param name="commandTree">command tree for the provider command</param>
|
||||
/// <param name="children">children command infos</param>
|
||||
internal ProviderCommandInfo(cqt.DbCommandTree commandTree,
|
||||
List<ProviderCommandInfo> children)
|
||||
{
|
||||
_commandTree = commandTree;
|
||||
_children = children;
|
||||
|
||||
if (_children == null)
|
||||
{
|
||||
_children = new List<ProviderCommandInfo>();
|
||||
}
|
||||
|
||||
foreach (ProviderCommandInfo child in _children)
|
||||
{
|
||||
child._parent = this;
|
||||
}
|
||||
}
|
||||
#endregion
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
@@ -0,0 +1,318 @@
|
||||
//---------------------------------------------------------------------
|
||||
// <copyright file="ConstraintManager.cs" company="Microsoft">
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// </copyright>
|
||||
//
|
||||
// @owner [....]
|
||||
// @backupOwner [....]
|
||||
//---------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Data.Common;
|
||||
using System.Data.Query.InternalTrees;
|
||||
using md=System.Data.Metadata.Edm;
|
||||
//using System.Diagnostics; // Please use PlanCompiler.Assert instead of Debug.Assert in this class...
|
||||
|
||||
// It is fine to use Debug.Assert in cases where you assert an obvious thing that is supposed
|
||||
// to prevent from simple mistakes during development (e.g. method argument validation
|
||||
// in cases where it was you who created the variables or the variables had already been validated or
|
||||
// in "else" clauses where due to code changes (e.g. adding a new value to an enum type) the default
|
||||
// "else" block is chosen why the new condition should be treated separately). This kind of asserts are
|
||||
// (can be) helpful when developing new code to avoid simple mistakes but have no or little value in
|
||||
// the shipped product.
|
||||
// PlanCompiler.Assert *MUST* be used to verify conditions in the trees. These would be assumptions
|
||||
// about how the tree was built etc. - in these cases we probably want to throw an exception (this is
|
||||
// what PlanCompiler.Assert does when the condition is not met) if either the assumption is not correct
|
||||
// or the tree was built/rewritten not the way we thought it was.
|
||||
// Use your judgment - if you rather remove an assert than ship it use Debug.Assert otherwise use
|
||||
// PlanCompiler.Assert.
|
||||
|
||||
//
|
||||
// The ConstraintManager module manages foreign key constraints for a query. It reshapes
|
||||
// referential constraints supplied by metadata into a more useful form.
|
||||
//
|
||||
namespace System.Data.Query.PlanCompiler
|
||||
{
|
||||
/// <summary>
|
||||
/// A simple class that represents a pair of extents
|
||||
/// </summary>
|
||||
internal class ExtentPair
|
||||
{
|
||||
#region public surface
|
||||
/// <summary>
|
||||
/// Return the left component of the pair
|
||||
/// </summary>
|
||||
internal md.EntitySetBase Left { get { return m_left; } }
|
||||
|
||||
/// <summary>
|
||||
/// Return the right component of the pair
|
||||
/// </summary>
|
||||
internal md.EntitySetBase Right { get { return m_right; } }
|
||||
|
||||
/// <summary>
|
||||
/// Equals
|
||||
/// </summary>
|
||||
/// <param name="obj"></param>
|
||||
/// <returns></returns>
|
||||
public override bool Equals(object obj)
|
||||
{
|
||||
ExtentPair other = obj as ExtentPair;
|
||||
return (other != null) && other.Left.Equals(this.Left) && other.Right.Equals(this.Right);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Hashcode
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public override int GetHashCode()
|
||||
{
|
||||
return (this.Left.GetHashCode() << 4) ^ this.Right.GetHashCode();
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region constructors
|
||||
internal ExtentPair(md.EntitySetBase left, md.EntitySetBase right)
|
||||
{
|
||||
m_left = left;
|
||||
m_right = right;
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region private state
|
||||
private md.EntitySetBase m_left;
|
||||
private md.EntitySetBase m_right;
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Information about a foreign-key constraint
|
||||
/// </summary>
|
||||
internal class ForeignKeyConstraint
|
||||
{
|
||||
#region public surface
|
||||
|
||||
/// <summary>
|
||||
/// Parent key properties
|
||||
/// </summary>
|
||||
internal List<string> ParentKeys { get { return m_parentKeys; } }
|
||||
/// <summary>
|
||||
/// Child key properties
|
||||
/// </summary>
|
||||
internal List<string> ChildKeys { get { return m_childKeys; } }
|
||||
|
||||
/// <summary>
|
||||
/// Get the parent-child pair
|
||||
/// </summary>
|
||||
internal ExtentPair Pair { get { return m_extentPair; } }
|
||||
|
||||
/// <summary>
|
||||
/// Return the child rowcount
|
||||
/// </summary>
|
||||
internal md.RelationshipMultiplicity ChildMultiplicity { get { return m_constraint.ToRole.RelationshipMultiplicity; } }
|
||||
|
||||
/// <summary>
|
||||
/// Get the corresponding parent (key) property, for a specific child (foreign key) property
|
||||
/// </summary>
|
||||
/// <param name="childPropertyName">child (foreign key) property name</param>
|
||||
/// <param name="parentPropertyName">corresponding parent property name</param>
|
||||
/// <returns>true, if the parent property was found</returns>
|
||||
internal bool GetParentProperty(string childPropertyName, out string parentPropertyName)
|
||||
{
|
||||
BuildKeyMap();
|
||||
return m_keyMap.TryGetValue(childPropertyName, out parentPropertyName);
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region constructors
|
||||
internal ForeignKeyConstraint(md.RelationshipType relType, md.RelationshipSet relationshipSet, md.ReferentialConstraint constraint)
|
||||
{
|
||||
md.AssociationSet assocSet = relationshipSet as md.AssociationSet;
|
||||
md.AssociationEndMember fromEnd = constraint.FromRole as md.AssociationEndMember;
|
||||
md.AssociationEndMember toEnd = constraint.ToRole as md.AssociationEndMember;
|
||||
|
||||
// Currently only Associations are supported
|
||||
if (null == assocSet || null == fromEnd || null == toEnd)
|
||||
{
|
||||
throw EntityUtil.NotSupported();
|
||||
}
|
||||
|
||||
m_constraint = constraint;
|
||||
md.EntitySet parent = System.Data.Common.Utils.MetadataHelper.GetEntitySetAtEnd(assocSet, fromEnd);// relationshipSet.GetRelationshipEndExtent(constraint.FromRole);
|
||||
md.EntitySet child = System.Data.Common.Utils.MetadataHelper.GetEntitySetAtEnd(assocSet, toEnd);// relationshipSet.GetRelationshipEndExtent(constraint.ToRole);
|
||||
m_extentPair = new ExtentPair(parent, child);
|
||||
m_childKeys = new List<string>();
|
||||
foreach (md.EdmProperty prop in constraint.ToProperties)
|
||||
{
|
||||
m_childKeys.Add(prop.Name);
|
||||
}
|
||||
|
||||
m_parentKeys = new List<string>();
|
||||
foreach (md.EdmProperty prop in constraint.FromProperties)
|
||||
{
|
||||
m_parentKeys.Add(prop.Name);
|
||||
}
|
||||
|
||||
PlanCompiler.Assert((md.RelationshipMultiplicity.ZeroOrOne == fromEnd.RelationshipMultiplicity || md.RelationshipMultiplicity.One == fromEnd.RelationshipMultiplicity), "from-end of relationship constraint cannot have multiplicity greater than 1");
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region private state
|
||||
private ExtentPair m_extentPair;
|
||||
private List<string> m_parentKeys;
|
||||
private List<string> m_childKeys;
|
||||
private md.ReferentialConstraint m_constraint;
|
||||
private Dictionary<string, string> m_keyMap;
|
||||
#endregion
|
||||
|
||||
#region private methods
|
||||
|
||||
/// <summary>
|
||||
/// Build up an equivalence map of primary keys and foreign keys (ie) for each
|
||||
/// foreign key column, identify the corresponding primary key property
|
||||
/// </summary>
|
||||
private void BuildKeyMap()
|
||||
{
|
||||
if (m_keyMap != null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
m_keyMap = new Dictionary<string, string>();
|
||||
IEnumerator<md.EdmProperty> parentProps = m_constraint.FromProperties.GetEnumerator();
|
||||
IEnumerator<md.EdmProperty> childProps = m_constraint.ToProperties.GetEnumerator();
|
||||
while (true)
|
||||
{
|
||||
bool parentOver = !parentProps.MoveNext();
|
||||
bool childOver = !childProps.MoveNext();
|
||||
PlanCompiler.Assert(parentOver == childOver, "key count mismatch");
|
||||
if (parentOver)
|
||||
{
|
||||
break;
|
||||
}
|
||||
m_keyMap[childProps.Current.Name] = parentProps.Current.Name;
|
||||
}
|
||||
}
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Keeps track of all foreign key relationships
|
||||
/// </summary>
|
||||
internal class ConstraintManager
|
||||
{
|
||||
#region public methods
|
||||
/// <summary>
|
||||
/// Is there a parent child relationship between table1 and table2 ?
|
||||
/// </summary>
|
||||
/// <param name="table1">parent table ?</param>
|
||||
/// <param name="table2">child table ?</param>
|
||||
/// <param name="constraints">list of constraints ?</param>
|
||||
/// <returns>true if there is at least one constraint</returns>
|
||||
internal bool IsParentChildRelationship(md.EntitySetBase table1, md.EntitySetBase table2,
|
||||
out List<ForeignKeyConstraint> constraints)
|
||||
{
|
||||
LoadRelationships(table1.EntityContainer);
|
||||
LoadRelationships(table2.EntityContainer);
|
||||
|
||||
ExtentPair extentPair = new ExtentPair(table1, table2);
|
||||
return m_parentChildRelationships.TryGetValue(extentPair, out constraints);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Load all relationships in this entity container
|
||||
/// </summary>
|
||||
/// <param name="entityContainer"></param>
|
||||
internal void LoadRelationships(md.EntityContainer entityContainer)
|
||||
{
|
||||
// Check to see if I've already loaded information for this entity container
|
||||
if (m_entityContainerMap.ContainsKey(entityContainer))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Load all relationships from this entitycontainer
|
||||
foreach (md.EntitySetBase e in entityContainer.BaseEntitySets)
|
||||
{
|
||||
md.RelationshipSet relationshipSet = e as md.RelationshipSet;
|
||||
if (relationshipSet == null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Relationship sets can only contain relationships
|
||||
md.RelationshipType relationshipType = (md.RelationshipType)relationshipSet.ElementType;
|
||||
md.AssociationType assocType = relationshipType as md.AssociationType;
|
||||
|
||||
//
|
||||
// Handle only binary Association relationships for now
|
||||
//
|
||||
if (null == assocType || !IsBinary(relationshipType))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (md.ReferentialConstraint constraint in assocType.ReferentialConstraints)
|
||||
{
|
||||
List<ForeignKeyConstraint> fkConstraintList;
|
||||
ForeignKeyConstraint fkConstraint = new ForeignKeyConstraint(relationshipType, relationshipSet, constraint);
|
||||
if (!m_parentChildRelationships.TryGetValue(fkConstraint.Pair, out fkConstraintList))
|
||||
{
|
||||
fkConstraintList = new List<ForeignKeyConstraint>();
|
||||
m_parentChildRelationships[fkConstraint.Pair] = fkConstraintList;
|
||||
}
|
||||
//
|
||||
// Theoretically, we can have more than one fk constraint between
|
||||
// the 2 tables (though, it is unlikely)
|
||||
//
|
||||
fkConstraintList.Add(fkConstraint);
|
||||
}
|
||||
}
|
||||
|
||||
// Mark this entity container as already loaded
|
||||
m_entityContainerMap[entityContainer] = entityContainer;
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region constructors
|
||||
internal ConstraintManager()
|
||||
{
|
||||
m_entityContainerMap = new Dictionary<md.EntityContainer, md.EntityContainer>();
|
||||
m_parentChildRelationships = new Dictionary<ExtentPair, List<ForeignKeyConstraint>>();
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region private state
|
||||
private Dictionary<md.EntityContainer, md.EntityContainer> m_entityContainerMap;
|
||||
private Dictionary<ExtentPair, List<ForeignKeyConstraint>> m_parentChildRelationships;
|
||||
#endregion
|
||||
|
||||
#region private methods
|
||||
|
||||
/// <summary>
|
||||
/// Is this relationship a binary relationship (ie) does it have exactly 2 end points?
|
||||
///
|
||||
/// This should ideally be a method supported by RelationType itself
|
||||
/// </summary>
|
||||
/// <param name="relationshipType"></param>
|
||||
/// <returns>true, if this is a binary relationship</returns>
|
||||
private static bool IsBinary(md.RelationshipType relationshipType)
|
||||
{
|
||||
int endCount = 0;
|
||||
foreach(md.EdmMember member in relationshipType.Members)
|
||||
{
|
||||
if (member is md.RelationshipEndMember)
|
||||
{
|
||||
endCount++;
|
||||
if (endCount > 2)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return (endCount == 2);
|
||||
}
|
||||
#endregion
|
||||
}
|
||||
}
|
@@ -0,0 +1 @@
|
||||
a324477779263ca181ec4ddd48934a24b6bdac99
|
@@ -0,0 +1,225 @@
|
||||
//---------------------------------------------------------------------
|
||||
// <copyright file="JoinElimination.cs" company="Microsoft">
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// </copyright>
|
||||
//
|
||||
// @owner [....]
|
||||
// @backupOwner [....]
|
||||
//---------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
//using System.Diagnostics; // Please use PlanCompiler.Assert instead of Debug.Assert in this class...
|
||||
|
||||
// It is fine to use Debug.Assert in cases where you assert an obvious thing that is supposed
|
||||
// to prevent from simple mistakes during development (e.g. method argument validation
|
||||
// in cases where it was you who created the variables or the variables had already been validated or
|
||||
// in "else" clauses where due to code changes (e.g. adding a new value to an enum type) the default
|
||||
// "else" block is chosen why the new condition should be treated separately). This kind of asserts are
|
||||
// (can be) helpful when developing new code to avoid simple mistakes but have no or little value in
|
||||
// the shipped product.
|
||||
// PlanCompiler.Assert *MUST* be used to verify conditions in the trees. These would be assumptions
|
||||
// about how the tree was built etc. - in these cases we probably want to throw an exception (this is
|
||||
// what PlanCompiler.Assert does when the condition is not met) if either the assumption is not correct
|
||||
// or the tree was built/rewritten not the way we thought it was.
|
||||
// Use your judgment - if you rather remove an assert than ship it use Debug.Assert otherwise use
|
||||
// PlanCompiler.Assert.
|
||||
|
||||
using System.Globalization;
|
||||
|
||||
using System.Data.Query.InternalTrees;
|
||||
using System.Data.Metadata.Edm;
|
||||
|
||||
namespace System.Data.Query.PlanCompiler
|
||||
{
|
||||
/// <summary>
|
||||
/// The JoinElimination module is intended to do just that - eliminate unnecessary joins.
|
||||
/// This module deals with the following kinds of joins
|
||||
/// * Self-joins: The join can be eliminated, and either of the table instances can be
|
||||
/// used instead
|
||||
/// * Implied self-joins: Same as above
|
||||
/// * PK-FK joins: (More generally, UniqueKey-FK joins): Eliminate the join, and use just the FK table, if no
|
||||
/// column of the PK table is used (other than the join condition)
|
||||
/// * PK-PK joins: Eliminate the right side table, if we have a left-outer join
|
||||
/// </summary>
|
||||
internal class JoinElimination : BasicOpVisitorOfNode
|
||||
{
|
||||
#region private constants
|
||||
private const string SqlServerCeNamespaceName = "SqlServerCe";
|
||||
#endregion
|
||||
|
||||
#region private state
|
||||
private PlanCompiler m_compilerState;
|
||||
private Command Command { get { return m_compilerState.Command; } }
|
||||
private ConstraintManager ConstraintManager { get { return m_compilerState.ConstraintManager; } }
|
||||
private Dictionary<Node, Node> m_joinGraphUnnecessaryMap = new Dictionary<Node,Node>();
|
||||
private VarRemapper m_varRemapper;
|
||||
private bool m_treeModified = false;
|
||||
private VarRefManager m_varRefManager;
|
||||
private Nullable<bool> m_isSqlCe = null;
|
||||
#endregion
|
||||
|
||||
#region constructors
|
||||
private JoinElimination(PlanCompiler compilerState)
|
||||
{
|
||||
m_compilerState = compilerState;
|
||||
m_varRemapper = new VarRemapper(m_compilerState.Command);
|
||||
m_varRefManager = new VarRefManager(m_compilerState.Command);
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region public surface
|
||||
internal static bool Process(PlanCompiler compilerState)
|
||||
{
|
||||
JoinElimination je = new JoinElimination(compilerState);
|
||||
je.Process();
|
||||
return je.m_treeModified;
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region private methods
|
||||
|
||||
/// <summary>
|
||||
/// Invokes the visitor
|
||||
/// </summary>
|
||||
private void Process()
|
||||
{
|
||||
this.Command.Root = VisitNode(this.Command.Root);
|
||||
}
|
||||
|
||||
#region JoinHelpers
|
||||
|
||||
#region Building JoinGraphs
|
||||
/// <summary>
|
||||
/// Do we need to build a join graph for this node - returns false, if we've already
|
||||
/// processed this
|
||||
/// </summary>
|
||||
/// <param name="joinNode"></param>
|
||||
/// <returns></returns>
|
||||
private bool NeedsJoinGraph(Node joinNode)
|
||||
{
|
||||
return !m_joinGraphUnnecessaryMap.ContainsKey(joinNode);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Do the real processing of the join graph.
|
||||
/// </summary>
|
||||
/// <param name="joinNode">current join node</param>
|
||||
/// <returns>modified join node</returns>
|
||||
private Node ProcessJoinGraph(Node joinNode)
|
||||
{
|
||||
// Build the join graph
|
||||
JoinGraph joinGraph = new JoinGraph(this.Command, this.ConstraintManager, this.m_varRefManager, joinNode, this.IsSqlCeProvider);
|
||||
|
||||
// Get the transformed node tree
|
||||
VarMap remappedVars;
|
||||
Dictionary<Node, Node> processedNodes;
|
||||
Node newNode = joinGraph.DoJoinElimination(out remappedVars, out processedNodes);
|
||||
|
||||
// Get the set of vars that need to be renamed
|
||||
foreach (KeyValuePair<Var, Var> kv in remappedVars)
|
||||
{
|
||||
m_varRemapper.AddMapping(kv.Key, kv.Value);
|
||||
}
|
||||
// get the set of nodes that have already been processed
|
||||
foreach (Node n in processedNodes.Keys)
|
||||
{
|
||||
m_joinGraphUnnecessaryMap[n] = n;
|
||||
}
|
||||
|
||||
return newNode;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether we are running against a SQL CE provider or not.
|
||||
/// </summary>
|
||||
private bool IsSqlCeProvider
|
||||
{
|
||||
get
|
||||
{
|
||||
if (!m_isSqlCe.HasValue)
|
||||
{
|
||||
// Figure out if we are using SQL CE by asking the store provider manifest for its namespace name.
|
||||
PlanCompiler.Assert(m_compilerState != null, "Plan compiler cannot be null");
|
||||
var sspace = (StoreItemCollection)m_compilerState.MetadataWorkspace.GetItemCollection(Metadata.Edm.DataSpace.SSpace);
|
||||
if (sspace != null)
|
||||
{
|
||||
m_isSqlCe = sspace.StoreProviderManifest.NamespaceName == JoinElimination.SqlServerCeNamespaceName;
|
||||
}
|
||||
}
|
||||
// If the sspace was null then m_isSqlCe still doesn't have a value. Use 'false' as default.
|
||||
return m_isSqlCe.HasValue ? m_isSqlCe.Value : false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default handler for a node. Simply visits the children, then handles any var
|
||||
/// remapping, and then recomputes the node info
|
||||
/// </summary>
|
||||
/// <param name="n"></param>
|
||||
/// <returns></returns>
|
||||
private Node VisitDefaultForAllNodes(Node n)
|
||||
{
|
||||
VisitChildren(n);
|
||||
m_varRemapper.RemapNode(n);
|
||||
this.Command.RecomputeNodeInfo(n);
|
||||
return n;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
|
||||
#region Visitor overrides
|
||||
|
||||
/// <summary>
|
||||
/// Invokes default handling for a node and adds the child-parent tracking info to the VarRefManager.
|
||||
/// </summary>
|
||||
/// <param name="n"></param>
|
||||
/// <returns></returns>
|
||||
protected override Node VisitDefault(Node n)
|
||||
{
|
||||
m_varRefManager.AddChildren(n);
|
||||
return VisitDefaultForAllNodes(n);
|
||||
}
|
||||
|
||||
#region RelOps
|
||||
#region JoinOps
|
||||
|
||||
/// <summary>
|
||||
/// Build a join graph for this node for this node if necessary, and process it
|
||||
/// </summary>
|
||||
/// <param name="op">current join op</param>
|
||||
/// <param name="joinNode">current join node</param>
|
||||
/// <returns></returns>
|
||||
protected override Node VisitJoinOp(JoinBaseOp op, Node joinNode)
|
||||
{
|
||||
Node newNode;
|
||||
|
||||
// Build and process a join graph if necessary
|
||||
if (NeedsJoinGraph(joinNode))
|
||||
{
|
||||
newNode = ProcessJoinGraph(joinNode);
|
||||
if (newNode != joinNode)
|
||||
{
|
||||
m_treeModified = true;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
newNode = joinNode;
|
||||
}
|
||||
|
||||
// Now do the default processing (ie) visit the children, compute the nodeinfo etc.
|
||||
return VisitDefaultForAllNodes(newNode);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
||||
}
|
@@ -0,0 +1 @@
|
||||
170d81de80a5417365da37de844ac1f7e1c4ef56
|
@@ -0,0 +1,398 @@
|
||||
//---------------------------------------------------------------------
|
||||
// <copyright file="KeyPullup.cs" company="Microsoft">
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// </copyright>
|
||||
//
|
||||
// @owner [....]
|
||||
// @backupOwner [....]
|
||||
//---------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
//using System.Diagnostics; // Please use PlanCompiler.Assert instead of Debug.Assert in this class...
|
||||
|
||||
// It is fine to use Debug.Assert in cases where you assert an obvious thing that is supposed
|
||||
// to prevent from simple mistakes during development (e.g. method argument validation
|
||||
// in cases where it was you who created the variables or the variables had already been validated or
|
||||
// in "else" clauses where due to code changes (e.g. adding a new value to an enum type) the default
|
||||
// "else" block is chosen why the new condition should be treated separately). This kind of asserts are
|
||||
// (can be) helpful when developing new code to avoid simple mistakes but have no or little value in
|
||||
// the shipped product.
|
||||
// PlanCompiler.Assert *MUST* be used to verify conditions in the trees. These would be assumptions
|
||||
// about how the tree was built etc. - in these cases we probably want to throw an exception (this is
|
||||
// what PlanCompiler.Assert does when the condition is not met) if either the assumption is not correct
|
||||
// or the tree was built/rewritten not the way we thought it was.
|
||||
// Use your judgment - if you rather remove an assert than ship it use Debug.Assert otherwise use
|
||||
// PlanCompiler.Assert.
|
||||
|
||||
using System.Globalization;
|
||||
|
||||
using System.Data.Query.InternalTrees;
|
||||
|
||||
//
|
||||
// The KeyPullup module helps pull up keys from the leaves of a subtree.
|
||||
//
|
||||
namespace System.Data.Query.PlanCompiler
|
||||
{
|
||||
/// <summary>
|
||||
/// The KeyPullup class subclasses the default visitor and pulls up keys
|
||||
/// for the different node classes below.
|
||||
/// The only Op that really deserves special treatment is the ProjectOp.
|
||||
/// </summary>
|
||||
internal class KeyPullup : BasicOpVisitor
|
||||
{
|
||||
#region private state
|
||||
private Command m_command;
|
||||
#endregion
|
||||
|
||||
#region constructors
|
||||
internal KeyPullup(Command command)
|
||||
{
|
||||
m_command = command;
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region public methods
|
||||
/// <summary>
|
||||
/// Pull up keys (if possible) for the given node
|
||||
/// </summary>
|
||||
/// <param name="node">node to pull up keys for</param>
|
||||
/// <returns>Keys for the node</returns>
|
||||
internal KeyVec GetKeys(Node node)
|
||||
{
|
||||
ExtendedNodeInfo nodeInfo = node.GetExtendedNodeInfo(m_command);
|
||||
if (nodeInfo.Keys.NoKeys)
|
||||
{
|
||||
VisitNode(node);
|
||||
}
|
||||
return nodeInfo.Keys;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region private methods
|
||||
|
||||
#region Visitor Methods
|
||||
|
||||
#region general helpers
|
||||
/// <summary>
|
||||
/// Default visitor for children. Simply visit all children, and
|
||||
/// try to get keys for those nodes (relops, physicalOps) that
|
||||
/// don't have keys as yet.
|
||||
/// </summary>
|
||||
/// <param name="n">Current node</param>
|
||||
protected override void VisitChildren(Node n)
|
||||
{
|
||||
foreach (Node chi in n.Children)
|
||||
{
|
||||
if (chi.Op.IsRelOp || chi.Op.IsPhysicalOp)
|
||||
{
|
||||
GetKeys(chi);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region RelOp Visitors
|
||||
|
||||
/// <summary>
|
||||
/// Default visitor for RelOps. Simply visits the children, and
|
||||
/// then tries to recompute the NodeInfo (with the fond hope that
|
||||
/// some keys have now shown up)
|
||||
/// </summary>
|
||||
/// <param name="op"></param>
|
||||
/// <param name="n"></param>
|
||||
protected override void VisitRelOpDefault(RelOp op, Node n)
|
||||
{
|
||||
VisitChildren(n);
|
||||
m_command.RecomputeNodeInfo(n);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Visitor for a ScanTableOp. Simply ensures that the keys get
|
||||
/// added to the list of referenced columns
|
||||
/// </summary>
|
||||
/// <param name="op">current ScanTableOp</param>
|
||||
/// <param name="n">current subtree</param>
|
||||
public override void Visit(ScanTableOp op, Node n)
|
||||
{
|
||||
// find the keys of the table. Make sure that they are
|
||||
// all references
|
||||
op.Table.ReferencedColumns.Or(op.Table.Keys);
|
||||
// recompute the nodeinfo - keys won't get picked up otherwise
|
||||
m_command.RecomputeNodeInfo(n);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Pulls up keys for a ProjectOp. First visits its children to pull
|
||||
/// up its keys; then identifies any keys from the input that it may have
|
||||
/// projected out - and adds them to the output list of vars
|
||||
/// </summary>
|
||||
/// <param name="op">Current ProjectOp</param>
|
||||
/// <param name="n">Current subtree</param>
|
||||
public override void Visit(ProjectOp op, Node n)
|
||||
{
|
||||
VisitChildren(n);
|
||||
|
||||
ExtendedNodeInfo childNodeInfo = n.Child0.GetExtendedNodeInfo(m_command);
|
||||
if (!childNodeInfo.Keys.NoKeys)
|
||||
{
|
||||
VarVec outputVars = m_command.CreateVarVec(op.Outputs);
|
||||
// NOTE: This code appears in NodeInfoVisitor as well. Try to see if we
|
||||
// can share this somehow.
|
||||
Dictionary<Var, Var> varRenameMap = NodeInfoVisitor.ComputeVarRemappings(n.Child1);
|
||||
VarVec mappedKeyVec = childNodeInfo.Keys.KeyVars.Remap(varRenameMap);
|
||||
outputVars.Or(mappedKeyVec);
|
||||
op.Outputs.InitFrom(outputVars);
|
||||
}
|
||||
m_command.RecomputeNodeInfo(n);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Comments from Murali:
|
||||
///
|
||||
/// There are several cases to consider here.
|
||||
///
|
||||
/// Case 0:
|
||||
/// Let<65>s assume that K1 is the set of keys ({k1, k2, ..., kn}) for the
|
||||
/// first input, and K2 ({l1, l2, <20>}) is the set of keys for the second
|
||||
/// input.
|
||||
///
|
||||
/// The best case is when both K1 and K2 have the same cardinality (hopefully
|
||||
/// greater than 0), and the keys are in the same locations (ie) the corresponding
|
||||
/// positions in the select-list. Even in this case, its not enough to take
|
||||
/// the keys, and treat them as the keys of the union-all. What we<77>ll need to
|
||||
/// do is to add a <20>branch<63> discriminator constant for each branch of the
|
||||
/// union-all, and use this as the prefix for the keys.
|
||||
///
|
||||
/// For example, if I had:
|
||||
///
|
||||
/// Select c1, c2, c3... from ...
|
||||
/// Union all
|
||||
/// Select d1, d2, d3... from ...
|
||||
///
|
||||
/// And for the sake of argument, lets say that {c2} and {d2} are the keys of
|
||||
/// each of the branches. What you<6F>ll need to do is to translate this into
|
||||
///
|
||||
/// Select 0 as bd, c1, c2, c3... from ...
|
||||
/// Union all
|
||||
/// Select 1 as bd, d1, d2, d3... from ...
|
||||
///
|
||||
/// And then treat {bd, c2/d2} as the key of the union-all
|
||||
///
|
||||
/// Case 1: (actually, a subcase of Case 0):
|
||||
/// Now, if the keys don<6F>t align, then we can simply take the union of the
|
||||
/// corresponding positions, and make them all the keys (we would still need
|
||||
/// the branch discriminator)
|
||||
///
|
||||
/// Case 2:
|
||||
/// Finally, if you need to <20>pull<6C> up keys from either of the branches, it is
|
||||
/// possible that the branches get out of whack. We will then need to push up
|
||||
/// the keys (with nulls if the other branch doesn<73>t have the corresponding key)
|
||||
/// into the union-all. (We still need the branch discriminator).
|
||||
///
|
||||
/// Now, unfortunately, whenever we've got polymorphic entity types, we'll end up
|
||||
/// in case 2 way more often than we really want to, because when we're pulling up
|
||||
/// keys, we don't want to reason about a caseop (which is how polymorphic types
|
||||
/// wrap their key value).
|
||||
///
|
||||
/// To simplify all of this, we:
|
||||
///
|
||||
/// (1) Pulling up the keys for both branches of the UnionAll, and computing which
|
||||
/// keys are in the outputs and which are missing from the outputs.
|
||||
///
|
||||
/// (2) Accumulate all the missing keys.
|
||||
///
|
||||
/// (3) Slap a projectOp around each branch, adding a branch discriminator
|
||||
/// var and all the missing keys. When keys are missing from a different
|
||||
/// branch, we'll construct null ops for them on the other branches. If
|
||||
/// a branch already has a branch descriminator, we'll re-use it instead
|
||||
/// of constructing a new one. (Of course, if there aren't any keys to
|
||||
/// add and it's already including the branch discriminator we won't
|
||||
/// need the projectOp)
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="op">the UnionAllOp</param>
|
||||
/// <param name="n">current subtree</param>
|
||||
public override void Visit(UnionAllOp op, Node n)
|
||||
{
|
||||
#if DEBUG
|
||||
string input = Dump.ToXml(m_command, n);
|
||||
#endif //DEBUG
|
||||
|
||||
// Ensure we have keys pulled up on each branch of the union all.
|
||||
VisitChildren(n);
|
||||
|
||||
// Create the setOp var we'll use to output the branch discriminator value; if
|
||||
// any of the branches are already surfacing a branchDiscriminator var to the
|
||||
// output of this operation then we won't need to use this but we construct it
|
||||
// early to simplify logic.
|
||||
Var outputBranchDiscriminatorVar = m_command.CreateSetOpVar(m_command.IntegerType);
|
||||
|
||||
// Now ensure that we're outputting the key vars from this op as well.
|
||||
VarList allKeyVarsMissingFromOutput = Command.CreateVarList();
|
||||
VarVec[] keyVarsMissingFromOutput = new VarVec[n.Children.Count];
|
||||
|
||||
for (int i = 0; i < n.Children.Count; i++)
|
||||
{
|
||||
Node branchNode = n.Children[i];
|
||||
ExtendedNodeInfo branchNodeInfo = m_command.GetExtendedNodeInfo(branchNode);
|
||||
|
||||
// Identify keys that aren't in the output list of this operation. We
|
||||
// determine these by remapping the keys that are found through the node's
|
||||
// VarMap, which gives us the keys in the same "varspace" as the outputs
|
||||
// of the UnionAll, then we subtract out the outputs of this UnionAll op,
|
||||
// leaving things that are not in the output vars. Of course, if they're
|
||||
// not in the output vars, then we didn't really remap.
|
||||
VarVec existingKeyVars = branchNodeInfo.Keys.KeyVars.Remap(op.VarMap[i]);
|
||||
|
||||
keyVarsMissingFromOutput[i] = m_command.CreateVarVec(existingKeyVars);
|
||||
keyVarsMissingFromOutput[i].Minus(op.Outputs);
|
||||
|
||||
// Special Case: if the branch is a UnionAll, it will already have it's
|
||||
// branch discriminator var added in the keys; we don't want to add that
|
||||
// a second time...
|
||||
if (OpType.UnionAll == branchNode.Op.OpType)
|
||||
{
|
||||
UnionAllOp branchUnionAllOp = (UnionAllOp)branchNode.Op;
|
||||
|
||||
keyVarsMissingFromOutput[i].Clear(branchUnionAllOp.BranchDiscriminator);
|
||||
}
|
||||
|
||||
allKeyVarsMissingFromOutput.AddRange(keyVarsMissingFromOutput[i]);
|
||||
}
|
||||
|
||||
// Construct the setOp vars we're going to map to output.
|
||||
VarList allKeyVarsToAddToOutput = Command.CreateVarList();
|
||||
|
||||
foreach (Var v in allKeyVarsMissingFromOutput)
|
||||
{
|
||||
Var newKeyVar = m_command.CreateSetOpVar(v.Type);
|
||||
allKeyVarsToAddToOutput.Add(newKeyVar);
|
||||
}
|
||||
|
||||
// Now that we've identified all the keys we need to add, ensure that each branch
|
||||
// has both the branch discrimination var and the all the keys in them, even when
|
||||
// the keys are just going to null (which we construct, as needed)
|
||||
for (int i = 0; i < n.Children.Count; i++)
|
||||
{
|
||||
Node branchNode = n.Children[i];
|
||||
ExtendedNodeInfo branchNodeInfo = m_command.GetExtendedNodeInfo(branchNode);
|
||||
|
||||
VarVec branchOutputVars = m_command.CreateVarVec();
|
||||
List<Node> varDefNodes = new List<Node>();
|
||||
|
||||
// If the branch is a UnionAllOp that has a branch discriminator var then we can
|
||||
// use it, otherwise we'll construct a new integer constant with the next value
|
||||
// of the branch discriminator value from the command object.
|
||||
Var branchDiscriminatorVar;
|
||||
|
||||
if (OpType.UnionAll == branchNode.Op.OpType && null != ((UnionAllOp)branchNode.Op).BranchDiscriminator)
|
||||
{
|
||||
branchDiscriminatorVar = ((UnionAllOp)branchNode.Op).BranchDiscriminator;
|
||||
|
||||
// If the branch has a discriminator var, but we haven't added it to the
|
||||
// varmap yet, then we do so now.
|
||||
if (!op.VarMap[i].ContainsValue(branchDiscriminatorVar))
|
||||
{
|
||||
op.VarMap[i].Add(outputBranchDiscriminatorVar, branchDiscriminatorVar);
|
||||
// We don't need to add this to the branch outputs, because it's already there,
|
||||
// otherwise we wouln't have gotten here, yes?
|
||||
}
|
||||
else
|
||||
{
|
||||
// In this case, we're already outputting the branch discriminator var -- we'll
|
||||
// just use it for both sides. We should never have a case where only one of the
|
||||
// two branches are outputting the branch discriminator var, because it can only
|
||||
// be constructed in this method, and we wouldn't need it for any other purpose.
|
||||
PlanCompiler.Assert(0 == i, "right branch has a discriminator var that the left branch doesn't have?");
|
||||
VarMap reverseVarMap = op.VarMap[i].GetReverseMap();
|
||||
outputBranchDiscriminatorVar = reverseVarMap[branchDiscriminatorVar];
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Not a unionAll -- we have to add a BranchDiscriminator var.
|
||||
varDefNodes.Add(
|
||||
m_command.CreateVarDefNode(
|
||||
m_command.CreateNode(
|
||||
m_command.CreateConstantOp(m_command.IntegerType, m_command.NextBranchDiscriminatorValue)), out branchDiscriminatorVar));
|
||||
|
||||
branchOutputVars.Set(branchDiscriminatorVar);
|
||||
op.VarMap[i].Add(outputBranchDiscriminatorVar, branchDiscriminatorVar);
|
||||
}
|
||||
|
||||
// Append all the missing keys to the branch outputs. If the missing key
|
||||
// is not from this branch then create a null.
|
||||
for (int j = 0; j < allKeyVarsMissingFromOutput.Count; j++)
|
||||
{
|
||||
Var keyVar = allKeyVarsMissingFromOutput[j];
|
||||
|
||||
if (!keyVarsMissingFromOutput[i].IsSet(keyVar))
|
||||
{
|
||||
varDefNodes.Add(
|
||||
m_command.CreateVarDefNode(
|
||||
m_command.CreateNode(
|
||||
m_command.CreateNullOp(keyVar.Type)), out keyVar));
|
||||
|
||||
branchOutputVars.Set(keyVar);
|
||||
}
|
||||
|
||||
// In all cases, we're adding a key to the output so we need to update the
|
||||
// varmap.
|
||||
op.VarMap[i].Add(allKeyVarsToAddToOutput[j], keyVar);
|
||||
}
|
||||
|
||||
// If we got this far and didn't add anything to the branch, then we're done.
|
||||
// Otherwise we'll have to construct the new projectOp around the input branch
|
||||
// to add the stuff we've added.
|
||||
if (branchOutputVars.IsEmpty)
|
||||
{
|
||||
// Actually, we're not quite done -- we need to update the key vars for the
|
||||
// branch to include the branch discriminator var we
|
||||
branchNodeInfo.Keys.KeyVars.Set(branchDiscriminatorVar);
|
||||
}
|
||||
else
|
||||
{
|
||||
PlanCompiler.Assert(varDefNodes.Count != 0, "no new nodes?");
|
||||
|
||||
// Start by ensuring all the existing outputs from the branch are in the list.
|
||||
foreach (Var v in op.VarMap[i].Values)
|
||||
{
|
||||
branchOutputVars.Set(v);
|
||||
}
|
||||
|
||||
// Now construct a project op to project out everything we've added, and
|
||||
// replace the branchNode with it in the flattened ladder.
|
||||
n.Children[i] = m_command.CreateNode(m_command.CreateProjectOp(branchOutputVars),
|
||||
branchNode,
|
||||
m_command.CreateNode(m_command.CreateVarDefListOp(), varDefNodes));
|
||||
|
||||
// Finally, ensure that we update the Key info for the projectOp to include
|
||||
// the original branch's keys, along with the branch discriminator var.
|
||||
m_command.RecomputeNodeInfo(n.Children[i]);
|
||||
ExtendedNodeInfo projectNodeInfo = m_command.GetExtendedNodeInfo(n.Children[i]);
|
||||
projectNodeInfo.Keys.KeyVars.InitFrom(branchNodeInfo.Keys.KeyVars);
|
||||
projectNodeInfo.Keys.KeyVars.Set(branchDiscriminatorVar);
|
||||
}
|
||||
}
|
||||
|
||||
// All done with the branches, now it's time to update the UnionAll op to indicate
|
||||
// that we've got a branch discriminator var.
|
||||
n.Op = m_command.CreateUnionAllOp(op.VarMap[0], op.VarMap[1], outputBranchDiscriminatorVar);
|
||||
|
||||
// Finally, the thing we've all been waiting for -- computing the keys. We cheat here and let
|
||||
// nodeInfo do it so we don't have to duplicate the logic...
|
||||
m_command.RecomputeNodeInfo(n);
|
||||
|
||||
#if DEBUG
|
||||
input = input.Trim();
|
||||
string output = Dump.ToXml(m_command, n);
|
||||
#endif //DEBUG
|
||||
}
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
#endregion
|
||||
}
|
||||
}
|
@@ -0,0 +1 @@
|
||||
41b2629eb7fed3c73e545bb1444227d163f72ea3
|
@@ -0,0 +1 @@
|
||||
5cdb77d54936dba3669f3bfce9ca6d9517b16574
|
@@ -0,0 +1,270 @@
|
||||
//---------------------------------------------------------------------
|
||||
// <copyright file="Normalizer.cs" company="Microsoft">
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// </copyright>
|
||||
//
|
||||
// @owner [....]
|
||||
// @backupOwner [....]
|
||||
//---------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Data.Common;
|
||||
using System.Data.Metadata.Edm;
|
||||
using System.Data.Query.InternalTrees;
|
||||
//using System.Diagnostics; // Please use PlanCompiler.Assert instead of Debug.Assert in this class...
|
||||
|
||||
// It is fine to use Debug.Assert in cases where you assert an obvious thing that is supposed
|
||||
// to prevent from simple mistakes during development (e.g. method argument validation
|
||||
// in cases where it was you who created the variables or the variables had already been validated or
|
||||
// in "else" clauses where due to code changes (e.g. adding a new value to an enum type) the default
|
||||
// "else" block is chosen why the new condition should be treated separately). This kind of asserts are
|
||||
// (can be) helpful when developing new code to avoid simple mistakes but have no or little value in
|
||||
// the shipped product.
|
||||
// PlanCompiler.Assert *MUST* be used to verify conditions in the trees. These would be assumptions
|
||||
// about how the tree was built etc. - in these cases we probably want to throw an exception (this is
|
||||
// what PlanCompiler.Assert does when the condition is not met) if either the assumption is not correct
|
||||
// or the tree was built/rewritten not the way we thought it was.
|
||||
// Use your judgment - if you rather remove an assert than ship it use Debug.Assert otherwise use
|
||||
// PlanCompiler.Assert.
|
||||
|
||||
//
|
||||
// The normalizer performs transformations of the tree to bring it to a 'normalized' format
|
||||
// In particular it does the following:
|
||||
// (a) Transforms collection aggregate functions into a GroupBy.
|
||||
// (b) Translates Exists(X) into Exists(select 1 from X)
|
||||
//
|
||||
namespace System.Data.Query.PlanCompiler
|
||||
{
|
||||
/// <summary>
|
||||
/// The normalizer performs transformations of the tree to bring it to a 'normalized' format
|
||||
/// </summary>
|
||||
internal class Normalizer : SubqueryTrackingVisitor
|
||||
{
|
||||
#region constructors
|
||||
private Normalizer(PlanCompiler planCompilerState)
|
||||
:base(planCompilerState)
|
||||
{
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region public methods
|
||||
/// <summary>
|
||||
/// The driver routine.
|
||||
/// </summary>
|
||||
/// <param name="planCompilerState">plan compiler state</param>
|
||||
internal static void Process(PlanCompiler planCompilerState)
|
||||
{
|
||||
Normalizer normalizer = new Normalizer(planCompilerState);
|
||||
normalizer.Process();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region private methods
|
||||
|
||||
#region driver
|
||||
private void Process()
|
||||
{
|
||||
m_command.Root = VisitNode(m_command.Root);
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region visitor methods
|
||||
|
||||
#region ScalarOps
|
||||
|
||||
/// <summary>
|
||||
/// Translate Exists(X) into Exists(select 1 from X)
|
||||
/// </summary>
|
||||
/// <param name="op"></param>
|
||||
/// <param name="n"></param>
|
||||
/// <returns></returns>
|
||||
public override Node Visit(ExistsOp op, Node n)
|
||||
{
|
||||
VisitChildren(n);
|
||||
|
||||
// Build up a dummy project node over the input
|
||||
n.Child0 = BuildDummyProjectForExists(n.Child0);
|
||||
|
||||
return n;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Build Project(select 1 from child).
|
||||
/// </summary>
|
||||
/// <param name="child"></param>
|
||||
/// <returns></returns>
|
||||
private Node BuildDummyProjectForExists(Node child)
|
||||
{
|
||||
Var newVar;
|
||||
Node projectNode = m_command.BuildProject(
|
||||
child,
|
||||
m_command.CreateNode(m_command.CreateInternalConstantOp(m_command.IntegerType, 1)),
|
||||
out newVar);
|
||||
return projectNode;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Build up an unnest above a scalar op node
|
||||
/// X => unnest(X)
|
||||
/// </summary>
|
||||
/// <param name="collectionNode">the scalarop collection node</param>
|
||||
/// <returns>the unnest node</returns>
|
||||
private Node BuildUnnest(Node collectionNode)
|
||||
{
|
||||
PlanCompiler.Assert(collectionNode.Op.IsScalarOp, "non-scalar usage of Unnest?");
|
||||
PlanCompiler.Assert(TypeSemantics.IsCollectionType(collectionNode.Op.Type), "non-collection usage for Unnest?");
|
||||
|
||||
Var newVar;
|
||||
Node varDefNode = m_command.CreateVarDefNode(collectionNode, out newVar);
|
||||
UnnestOp unnestOp = m_command.CreateUnnestOp(newVar);
|
||||
Node unnestNode = m_command.CreateNode(unnestOp, varDefNode);
|
||||
|
||||
return unnestNode;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts the reference to a TVF as following: Collect(PhysicalProject(Unnest(Func)))
|
||||
/// </summary>
|
||||
/// <param name="op">current function op</param>
|
||||
/// <param name="n">current function subtree</param>
|
||||
/// <returns>the new expression that corresponds to the TVF</returns>
|
||||
private Node VisitCollectionFunction(FunctionOp op, Node n)
|
||||
{
|
||||
PlanCompiler.Assert(TypeSemantics.IsCollectionType(op.Type), "non-TVF function?");
|
||||
|
||||
Node unnestNode = BuildUnnest(n);
|
||||
UnnestOp unnestOp = unnestNode.Op as UnnestOp;
|
||||
PhysicalProjectOp projectOp = m_command.CreatePhysicalProjectOp(unnestOp.Table.Columns[0]);
|
||||
Node projectNode = m_command.CreateNode(projectOp, unnestNode);
|
||||
CollectOp collectOp = m_command.CreateCollectOp(n.Op.Type);
|
||||
Node collectNode = m_command.CreateNode(collectOp, projectNode);
|
||||
|
||||
return collectNode;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts a collection aggregate function count(X), where X is a collection into
|
||||
/// two parts. Part A is a groupby subquery that looks like
|
||||
/// GroupBy(Unnest(X), empty, count(y))
|
||||
/// where "empty" describes the fact that the groupby has no keys, and y is an
|
||||
/// element var of the Unnest
|
||||
///
|
||||
/// Part 2 is a VarRef that refers to the aggregate var for count(y) described above.
|
||||
///
|
||||
/// Logically, we would replace the entire functionOp by element(GroupBy...). However,
|
||||
/// since we also want to translate element() into single-row-subqueries, we do this
|
||||
/// here as well.
|
||||
///
|
||||
/// The function itself is replaced by the VarRef, and the GroupBy is added to the list
|
||||
/// of scalar subqueries for the current relOp node on the stack
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="op">the functionOp for the collection agg</param>
|
||||
/// <param name="n">current subtree</param>
|
||||
/// <returns>the VarRef node that should replace the function</returns>
|
||||
private Node VisitCollectionAggregateFunction(FunctionOp op, Node n)
|
||||
{
|
||||
TypeUsage softCastType = null;
|
||||
Node argNode = n.Child0;
|
||||
if (OpType.SoftCast == argNode.Op.OpType)
|
||||
{
|
||||
softCastType = TypeHelpers.GetEdmType<CollectionType>(argNode.Op.Type).TypeUsage;
|
||||
argNode = argNode.Child0;
|
||||
|
||||
while (OpType.SoftCast == argNode.Op.OpType)
|
||||
{
|
||||
argNode = argNode.Child0;
|
||||
}
|
||||
}
|
||||
|
||||
Node unnestNode = BuildUnnest(argNode);
|
||||
UnnestOp unnestOp = unnestNode.Op as UnnestOp;
|
||||
Var unnestOutputVar = unnestOp.Table.Columns[0];
|
||||
|
||||
AggregateOp aggregateOp = m_command.CreateAggregateOp(op.Function, false);
|
||||
VarRefOp unnestVarRefOp = m_command.CreateVarRefOp(unnestOutputVar);
|
||||
Node unnestVarRefNode = m_command.CreateNode(unnestVarRefOp);
|
||||
if (softCastType != null)
|
||||
{
|
||||
unnestVarRefNode = m_command.CreateNode(m_command.CreateSoftCastOp(softCastType), unnestVarRefNode);
|
||||
}
|
||||
Node aggExprNode = m_command.CreateNode(aggregateOp, unnestVarRefNode);
|
||||
|
||||
VarVec keyVars = m_command.CreateVarVec(); // empty keys
|
||||
Node keyVarDefListNode = m_command.CreateNode(m_command.CreateVarDefListOp());
|
||||
|
||||
VarVec gbyOutputVars = m_command.CreateVarVec();
|
||||
Var aggVar;
|
||||
Node aggVarDefListNode = m_command.CreateVarDefListNode(aggExprNode, out aggVar);
|
||||
gbyOutputVars.Set(aggVar);
|
||||
GroupByOp gbyOp = m_command.CreateGroupByOp(keyVars, gbyOutputVars);
|
||||
Node gbySubqueryNode = m_command.CreateNode(gbyOp, unnestNode, keyVarDefListNode, aggVarDefListNode);
|
||||
|
||||
// "Move" this subquery to my parent relop
|
||||
Node ret = AddSubqueryToParentRelOp(aggVar, gbySubqueryNode);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Pre-processing for a function. Does the default scalar op processing.
|
||||
/// If the function returns a collection (TVF), the method converts this expression into
|
||||
/// Collect(PhysicalProject(Unnest(Func))).
|
||||
/// If the function is a collection aggregate, converts it into the corresponding group aggregate.
|
||||
/// </summary>
|
||||
/// <param name="op"></param>
|
||||
/// <param name="n"></param>
|
||||
/// <returns></returns>
|
||||
public override Node Visit(FunctionOp op, Node n)
|
||||
{
|
||||
VisitScalarOpDefault(op, n);
|
||||
Node newNode = null;
|
||||
|
||||
// Is this a TVF?
|
||||
if (TypeSemantics.IsCollectionType(op.Type))
|
||||
{
|
||||
newNode = VisitCollectionFunction(op, n);
|
||||
}
|
||||
// Is this a collection-aggregate function?
|
||||
else if (PlanCompilerUtil.IsCollectionAggregateFunction(op, n))
|
||||
{
|
||||
newNode = VisitCollectionAggregateFunction(op, n);
|
||||
}
|
||||
else
|
||||
{
|
||||
newNode = n;
|
||||
}
|
||||
|
||||
PlanCompiler.Assert(newNode != null, "failure to construct a functionOp?");
|
||||
return newNode;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region RelOps
|
||||
/// <summary>
|
||||
/// Processing for all JoinOps
|
||||
/// </summary>
|
||||
/// <param name="op">JoinOp</param>
|
||||
/// <param name="n">Current subtree</param>
|
||||
/// <returns></returns>
|
||||
protected override Node VisitJoinOp(JoinBaseOp op, Node n)
|
||||
{
|
||||
if (base.ProcessJoinOp(op, n))
|
||||
{
|
||||
// update the join condition
|
||||
// #479372: Build up a dummy project node over the input, as we always wrap the child of exists
|
||||
n.Child2.Child0 = BuildDummyProjectForExists(n.Child2.Child0);
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,156 @@
|
||||
//---------------------------------------------------------------------
|
||||
// <copyright file="PlanCompilerUtil.cs" company="Microsoft">
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// </copyright>
|
||||
//
|
||||
// @owner [....]
|
||||
// @backupOwner [....]
|
||||
//---------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Data.Common.Utils;
|
||||
using System.Data.Metadata.Edm;
|
||||
using System.Data.Query.InternalTrees;
|
||||
|
||||
namespace System.Data.Query.PlanCompiler
|
||||
{
|
||||
/// <summary>
|
||||
/// Utility class for the methods shared among the classes comprising the plan compiler
|
||||
/// </summary>
|
||||
internal static class PlanCompilerUtil
|
||||
{
|
||||
/// <summary>
|
||||
/// Utility method that determines whether a given CaseOp subtree can be optimized.
|
||||
/// Called by both PreProcessor and NominalTypeEliminator.
|
||||
///
|
||||
/// If the case statement is of the shape:
|
||||
/// case when X then NULL else Y, or
|
||||
/// case when X then Y else NULL,
|
||||
/// where Y is of row type, and the types of the input CaseOp, the NULL and Y are the same,
|
||||
/// return true
|
||||
/// </summary>
|
||||
/// <param name="op"></param>
|
||||
/// <param name="n"></param>
|
||||
/// <returns></returns>
|
||||
internal static bool IsRowTypeCaseOpWithNullability(CaseOp op, Node n, out bool thenClauseIsNull)
|
||||
{
|
||||
thenClauseIsNull = false; //any default value will do
|
||||
|
||||
if (!TypeSemantics.IsRowType(op.Type))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if (n.Children.Count != 3)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
//All three types must be equal
|
||||
if (!n.Child1.Op.Type.EdmEquals(op.Type) || !n.Child2.Op.Type.EdmEquals(op.Type))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
//At least one of Child1 and Child2 needs to be a null
|
||||
if (n.Child1.Op.OpType == OpType.Null)
|
||||
{
|
||||
thenClauseIsNull = true;
|
||||
return true;
|
||||
}
|
||||
if (n.Child2.Op.OpType == OpType.Null)
|
||||
{
|
||||
// thenClauseIsNull stays false
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Is this function a collection aggregate function. It is, if
|
||||
/// - it has exactly one child
|
||||
/// - that child is a collection type
|
||||
/// - and the function has been marked with the aggregate attribute
|
||||
/// </summary>
|
||||
/// <param name="op">the function op</param>
|
||||
/// <param name="n">the current subtree</param>
|
||||
/// <returns>true, if this was a collection aggregate function</returns>
|
||||
internal static bool IsCollectionAggregateFunction(FunctionOp op, Node n)
|
||||
{
|
||||
return ((n.Children.Count == 1) &&
|
||||
TypeSemantics.IsCollectionType(n.Child0.Op.Type) &&
|
||||
TypeSemantics.IsAggregateFunction(op.Function));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Is the given op one of the ConstantBaseOp-s
|
||||
/// </summary>
|
||||
/// <param name="opType"></param>
|
||||
/// <returns></returns>
|
||||
internal static bool IsConstantBaseOp(OpType opType)
|
||||
{
|
||||
return opType == OpType.Constant ||
|
||||
opType == OpType.InternalConstant ||
|
||||
opType == OpType.Null ||
|
||||
opType == OpType.NullSentinel;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Combine two predicates by trying to avoid the predicate parts of the
|
||||
/// second one that are already present in the first one.
|
||||
///
|
||||
/// In particular, given two nodes, predicate1 and predicate2,
|
||||
/// it creates a combined predicate logically equivalent to
|
||||
/// predicate1 AND predicate2,
|
||||
/// but it does not include any AND parts of predicate2 that are present
|
||||
/// in predicate1.
|
||||
/// </summary>
|
||||
/// <param name="predicate1"></param>
|
||||
/// <param name="predicate2"></param>
|
||||
/// <param name="command"></param>
|
||||
/// <returns></returns>
|
||||
internal static Node CombinePredicates(Node predicate1, Node predicate2, Command command)
|
||||
{
|
||||
IEnumerable<Node> andParts1 = BreakIntoAndParts(predicate1);
|
||||
IEnumerable<Node> andParts2 = BreakIntoAndParts(predicate2);
|
||||
|
||||
Node result = predicate1;
|
||||
|
||||
foreach (Node predicatePart2 in andParts2)
|
||||
{
|
||||
bool foundMatch = false;
|
||||
foreach (Node predicatePart1 in andParts1)
|
||||
{
|
||||
if (predicatePart1.IsEquivalent(predicatePart2))
|
||||
{
|
||||
foundMatch = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!foundMatch)
|
||||
{
|
||||
result = command.CreateNode(command.CreateConditionalOp(OpType.And), result, predicatePart2);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a list of AND parts for a given predicate.
|
||||
/// For example, if the predicate is of the shape:
|
||||
/// ((p1 and p2) and (p3 and p4)) the list is p1, p2, p3, p4
|
||||
/// The predicates p1,p2, p3, p4 may be roots of subtrees that
|
||||
/// have nodes with AND ops, but
|
||||
/// would not be broken unless they are the AND nodes themselves.
|
||||
/// </summary>
|
||||
/// <param name="predicate"></param>
|
||||
/// <param name="andParts"></param>
|
||||
private static IEnumerable<Node> BreakIntoAndParts(Node predicate)
|
||||
{
|
||||
return Helpers.GetLeafNodes<Node>(predicate,
|
||||
node => (node.Op.OpType != OpType.And),
|
||||
node => (new[] {node.Child0, node.Child1}));
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1 @@
|
||||
b3a359ed137dba35ac5aede1b24795a6259ca829
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user