You've already forked UnrealEngineUWP
mirror of
https://github.com/izzy2lost/UnrealEngineUWP.git
synced 2026-03-26 18:15:20 -07:00
* Rename blacklist to excludelist (requested by high management) * Support section exclusion rule to be able to exclude entire section of tests (ie PathTracing is only supported on Win64 for now) * Mark excluded test as skipped in the report instead of entirely removed for test list. Check for exclusion just before running the test. * Remove NotEnoughParticipant state in favor of Skipped (same conditions lead to Skipped state with appropriate messaging) * Add support for exclusion management from the Test Automation window. (added a column at the end of each row) * Expose device information to UE test report * Add support for metadata in Gauntlet test report for Horde Limitations: * Management through the UI is limited to which test is available through in the active worker node. That's mean Runtime only tests are not listed from a worker that is Editor(the default) and platform specific are not clearly identified. * For platforms, the mechanic to access their config and save it will remain to be done. In the meantime, it needs to be done manually through the target platform config file. #jira UE-125960 #jira UE-125974 #ROBOMERGE-AUTHOR: jerome.delattre #ROBOMERGE-SOURCE: CL 17607554 in //UE5/Main/... #ROBOMERGE-BOT: STARSHIP (Main -> Release-Engine-Test) (v871-17566257) [CL 17607557 by jerome delattre in ue5-release-engine-test branch]
923 lines
24 KiB
C++
923 lines
24 KiB
C++
// Copyright Epic Games, Inc. All Rights Reserved.
|
|
|
|
#include "AutomationReport.h"
|
|
#include "AutomationTestExcludelist.h"
|
|
#include "Misc/FilterCollection.h"
|
|
#include "HAL/FileManager.h"
|
|
#include "Misc/FileHelper.h"
|
|
#include "Misc/Paths.h"
|
|
|
|
FAutomationReport::FAutomationReport(FAutomationTestInfo& InTestInfo, bool InIsParent)
|
|
: bEnabled( false )
|
|
, bIsParent(InIsParent)
|
|
, bNodeExpandInUI(false)
|
|
, bSelfPassesFilter(false)
|
|
, SupportFlags(0)
|
|
, TestInfo( InTestInfo )
|
|
{
|
|
// Enable smoke tests
|
|
if ( TestInfo.GetTestFlags() == EAutomationTestFlags::SmokeFilter )
|
|
{
|
|
bEnabled = true;
|
|
}
|
|
// Get exclude test info from Config/DefaultEngine.ini
|
|
if (auto Entry = UAutomationTestExcludelist::Get()->GetExcludeTestEntry(TestInfo.GetFullTestPath()))
|
|
{
|
|
ExcludeTestInfo = *Entry;
|
|
ExcludeTestInfo.SetPropagation(TestInfo.GetFullTestPath());
|
|
bNeedToSkip = true;
|
|
}
|
|
}
|
|
|
|
void FAutomationReport::Empty()
|
|
{
|
|
//release references to all child tests
|
|
ChildReports.Empty();
|
|
ChildReportNameHashes.Empty();
|
|
FilteredChildReports.Empty();
|
|
}
|
|
|
|
FString FAutomationReport::GetTestParameter() const
|
|
{
|
|
return TestInfo.GetTestParameter();
|
|
}
|
|
|
|
FString FAutomationReport::GetAssetPath() const
|
|
{
|
|
return TestInfo.GetAssetPath();
|
|
}
|
|
|
|
FString FAutomationReport::GetOpenCommand() const
|
|
{
|
|
return TestInfo.GetOpenCommand();
|
|
}
|
|
|
|
FString FAutomationReport::GetCommand() const
|
|
{
|
|
return TestInfo.GetTestName();
|
|
}
|
|
|
|
const FString& FAutomationReport::GetDisplayName() const
|
|
{
|
|
return TestInfo.GetDisplayName();
|
|
}
|
|
|
|
const FString& FAutomationReport::GetFullTestPath() const
|
|
{
|
|
return TestInfo.GetFullTestPath();
|
|
}
|
|
|
|
FString FAutomationReport::GetDisplayNameWithDecoration() const
|
|
{
|
|
FString FinalDisplayName = TestInfo.GetDisplayName();
|
|
//if this is an internal leaf node and the "decoration" name is being requested
|
|
if (ChildReports.Num())
|
|
{
|
|
int32 NumChildren = GetTotalNumChildren();
|
|
//append on the number of child tests
|
|
return TestInfo.GetDisplayName() + FString::Printf(TEXT(" (%d)"), NumChildren);
|
|
}
|
|
return FinalDisplayName;
|
|
}
|
|
|
|
int32 FAutomationReport::GetTotalNumChildren() const
|
|
{
|
|
int32 Total = 0;
|
|
for (int32 ChildIndex = 0; ChildIndex < ChildReports.Num(); ++ChildIndex)
|
|
{
|
|
int ChildCount = ChildReports[ChildIndex]->GetTotalNumChildren();
|
|
//Only count leaf nodes
|
|
if (ChildCount == 0)
|
|
{
|
|
Total ++;
|
|
}
|
|
Total += ChildCount;
|
|
}
|
|
return Total;
|
|
}
|
|
|
|
int32 FAutomationReport::GetTotalNumFilteredChildren() const
|
|
{
|
|
int32 Total = 0;
|
|
for ( int32 ChildIndex = 0; ChildIndex < FilteredChildReports.Num(); ++ChildIndex )
|
|
{
|
|
int ChildCount = FilteredChildReports[ChildIndex]->GetTotalNumFilteredChildren();
|
|
//Only count leaf nodes
|
|
if ( ChildCount == 0 )
|
|
{
|
|
Total++;
|
|
}
|
|
Total += ChildCount;
|
|
}
|
|
return Total;
|
|
}
|
|
|
|
void FAutomationReport::GetEnabledTestNames(TArray<FString>& OutEnabledTestNames, FString CurrentPath) const
|
|
{
|
|
//if this is a leaf and this test is enabled
|
|
if ((ChildReports.Num() == 0) && IsEnabled())
|
|
{
|
|
const FString FullTestName = CurrentPath.Len() > 0 ? CurrentPath.AppendChar(TCHAR('.')) + TestInfo.GetDisplayName() : TestInfo.GetDisplayName();
|
|
OutEnabledTestNames.Add(FullTestName);
|
|
}
|
|
else
|
|
{
|
|
if( !CurrentPath.IsEmpty() )
|
|
{
|
|
CurrentPath += TEXT(".");
|
|
}
|
|
CurrentPath += TestInfo.GetDisplayName();
|
|
//recurse through the hierarchy
|
|
for (int32 ChildIndex = 0; ChildIndex < ChildReports.Num(); ++ChildIndex)
|
|
{
|
|
ChildReports[ChildIndex]->GetEnabledTestNames(OutEnabledTestNames,CurrentPath);
|
|
}
|
|
}
|
|
return;
|
|
}
|
|
|
|
|
|
void FAutomationReport::SetEnabledTests(const TArray<FString>& InEnabledTests, FString CurrentPath)
|
|
{
|
|
if (ChildReports.Num() == 0)
|
|
{
|
|
//Find of the full name of this test and see if it is in our list
|
|
const FString FullTestName = CurrentPath.Len() > 0 ? CurrentPath.AppendChar(TCHAR('.')) + TestInfo.GetDisplayName() : TestInfo.GetDisplayName();
|
|
const bool bNewEnabled = InEnabledTests.Contains(FullTestName);
|
|
SetEnabled(bNewEnabled);
|
|
}
|
|
else
|
|
{
|
|
if( !CurrentPath.IsEmpty() )
|
|
{
|
|
CurrentPath += TEXT(".");
|
|
}
|
|
CurrentPath += TestInfo.GetDisplayName();
|
|
|
|
//recurse through the hierarchy
|
|
for (int32 ChildIndex = 0; ChildIndex < ChildReports.Num(); ++ChildIndex)
|
|
{
|
|
ChildReports[ChildIndex]->SetEnabledTests(InEnabledTests,CurrentPath);
|
|
}
|
|
|
|
//Parent nodes should be checked if all of its children are
|
|
const int32 TotalNumChildern = GetTotalNumChildren();
|
|
const int32 EnabledChildren = GetEnabledTestsNum();
|
|
bEnabled = (TotalNumChildern == EnabledChildren);
|
|
}
|
|
}
|
|
|
|
int32 FAutomationReport::GetEnabledTestsNum() const
|
|
{
|
|
int32 Total = 0;
|
|
//if this is a leaf and this test is enabled
|
|
if ((ChildReports.Num() == 0) && IsEnabled())
|
|
{
|
|
Total++;
|
|
}
|
|
else
|
|
{
|
|
//recurse through the hierarchy
|
|
for (int32 ChildIndex = 0; ChildIndex < ChildReports.Num(); ++ChildIndex)
|
|
{
|
|
Total += ChildReports[ChildIndex]->GetEnabledTestsNum();
|
|
}
|
|
}
|
|
return Total;
|
|
}
|
|
|
|
bool FAutomationReport::IsEnabled() const
|
|
{
|
|
return bEnabled;
|
|
}
|
|
|
|
void FAutomationReport::SetEnabled(bool bShouldBeEnabled)
|
|
{
|
|
bEnabled = bShouldBeEnabled;
|
|
//set children to the same value
|
|
for (int32 ChildIndex = 0; ChildIndex < FilteredChildReports.Num(); ++ChildIndex)
|
|
{
|
|
FilteredChildReports[ChildIndex]->SetEnabled(bShouldBeEnabled);
|
|
}
|
|
}
|
|
|
|
void FAutomationReport::SetSupport(const int32 ClusterIndex)
|
|
{
|
|
SupportFlags |= (1<<ClusterIndex);
|
|
|
|
//ensure there is enough room in the array for status per platform
|
|
for (int32 i = 0; i <= ClusterIndex; ++i)
|
|
{
|
|
//Make sure we have enough results for a single pass
|
|
TArray<FAutomationTestResults> AutomationTestResult;
|
|
AutomationTestResult.Add( FAutomationTestResults() );
|
|
Results.Add( AutomationTestResult );
|
|
}
|
|
}
|
|
|
|
bool FAutomationReport::IsSupported(const int32 ClusterIndex) const
|
|
{
|
|
return (SupportFlags & (1<<ClusterIndex)) ? true : false;
|
|
}
|
|
|
|
|
|
uint32 FAutomationReport::GetTestFlags( ) const
|
|
{
|
|
return TestInfo.GetTestFlags();
|
|
}
|
|
|
|
FString FAutomationReport::GetSourceFile() const
|
|
{
|
|
return TestInfo.GetSourceFile();
|
|
}
|
|
|
|
int32 FAutomationReport::GetSourceFileLine() const
|
|
{
|
|
return TestInfo.GetSourceFileLine();
|
|
}
|
|
|
|
void FAutomationReport::SetTestFlags( const uint32 InTestFlags)
|
|
{
|
|
TestInfo.AddTestFlags( InTestFlags );
|
|
|
|
if ( InTestFlags == EAutomationTestFlags::SmokeFilter )
|
|
{
|
|
bEnabled = true;
|
|
}
|
|
}
|
|
|
|
const bool FAutomationReport::IsParent()
|
|
{
|
|
return bIsParent;
|
|
}
|
|
|
|
const bool FAutomationReport::IsSmokeTest( )
|
|
{
|
|
return GetTestFlags( ) & EAutomationTestFlags::SmokeFilter ? true : false;
|
|
}
|
|
|
|
bool FAutomationReport::SetFilter( TSharedPtr< AutomationFilterCollection > InFilter, const bool ParentPassedFilter )
|
|
{
|
|
//assume that this node and all its children fail to pass the filter test
|
|
bool bSelfOrChildPassedFilter = false;
|
|
|
|
//assume this node should not be expanded in the UI
|
|
bNodeExpandInUI = false;
|
|
|
|
//test for empty search string or matching search string
|
|
bSelfPassesFilter = InFilter->PassesAllFilters( SharedThis( this ) );
|
|
|
|
if ( IsParent() && ParentPassedFilter )
|
|
{
|
|
bSelfPassesFilter = true;
|
|
}
|
|
|
|
//clear the currently filtered tests array
|
|
FilteredChildReports.Empty();
|
|
|
|
//see if any children pass the filter
|
|
for( int32 ChildIndex = 0; ChildIndex < ChildReports.Num(); ++ChildIndex )
|
|
{
|
|
bool ThisChildPassedFilter = ChildReports[ChildIndex]->SetFilter( InFilter, bSelfPassesFilter );
|
|
|
|
if( ThisChildPassedFilter || bSelfPassesFilter || ParentPassedFilter )
|
|
{
|
|
if ( !ChildReports[ChildIndex]->IsParent() || ChildReports[ChildIndex]->GetFilteredChildren().Num() > 0 )
|
|
{
|
|
FilteredChildReports.Add(ChildReports[ChildIndex]);
|
|
}
|
|
}
|
|
|
|
if ( bNodeExpandInUI == false && ThisChildPassedFilter == true )
|
|
{
|
|
// A child node has passed the filter, so we want to expand this node in the UI
|
|
bNodeExpandInUI = true;
|
|
}
|
|
}
|
|
|
|
//if we passed name, speed, and status tests
|
|
if( bSelfPassesFilter || bNodeExpandInUI )
|
|
{
|
|
//Passed the filter!
|
|
bSelfOrChildPassedFilter = true;
|
|
}
|
|
|
|
return bSelfOrChildPassedFilter;
|
|
}
|
|
|
|
TArray<TSharedPtr<IAutomationReport> >& FAutomationReport::GetFilteredChildren()
|
|
{
|
|
return FilteredChildReports;
|
|
}
|
|
|
|
TArray<TSharedPtr<IAutomationReport> >& FAutomationReport::GetChildReports()
|
|
{
|
|
return ChildReports;
|
|
}
|
|
|
|
void FAutomationReport::ClustersUpdated(const int32 NumClusters)
|
|
{
|
|
TestInfo.ResetNumDevicesRunningTest();
|
|
|
|
//Fixup Support flags
|
|
SupportFlags = 0;
|
|
for (int32 i = 0; i <= NumClusters; ++i)
|
|
{
|
|
SupportFlags |= (1<<i);
|
|
}
|
|
|
|
//Fixup Results array
|
|
if( NumClusters > Results.Num() )
|
|
{
|
|
for( int32 ClusterIndex = Results.Num(); ClusterIndex < NumClusters; ++ClusterIndex )
|
|
{
|
|
//Make sure we have enough results for a single pass
|
|
TArray<FAutomationTestResults> AutomationTestResult;
|
|
AutomationTestResult.Add( FAutomationTestResults() );
|
|
Results.Add( AutomationTestResult );
|
|
}
|
|
}
|
|
else if( NumClusters < Results.Num() )
|
|
{
|
|
Results.RemoveAt(NumClusters, Results.Num() - NumClusters);
|
|
}
|
|
|
|
//recurse to children
|
|
for (int32 ChildIndex = 0; ChildIndex < ChildReports.Num(); ++ChildIndex)
|
|
{
|
|
ChildReports[ChildIndex]->ClustersUpdated(NumClusters);
|
|
}
|
|
}
|
|
|
|
void FAutomationReport::ResetForExecution(const int32 NumTestPasses)
|
|
{
|
|
TestInfo.ResetNumDevicesRunningTest();
|
|
|
|
//if this test is enabled
|
|
if (IsEnabled())
|
|
{
|
|
for (int32 ClusterIndex = 0; ClusterIndex < Results.Num(); ++ClusterIndex)
|
|
{
|
|
//Make sure we have enough results
|
|
if( NumTestPasses > Results[ClusterIndex].Num() )
|
|
{
|
|
for(int32 PassCount = Results[ClusterIndex].Num(); PassCount < NumTestPasses; ++PassCount)
|
|
{
|
|
Results[ClusterIndex].Add( FAutomationTestResults() );
|
|
}
|
|
}
|
|
else if( NumTestPasses < Results[ClusterIndex].Num() )
|
|
{
|
|
Results[ClusterIndex].RemoveAt(NumTestPasses, Results[ClusterIndex].Num() - NumTestPasses);
|
|
}
|
|
|
|
for( int32 PassIndex = 0; PassIndex < Results[ClusterIndex].Num(); ++PassIndex)
|
|
{
|
|
//reset all stats
|
|
Results[ClusterIndex][PassIndex].Reset();
|
|
}
|
|
}
|
|
}
|
|
|
|
//recurse to children
|
|
for (int32 ChildIndex = 0; ChildIndex < ChildReports.Num(); ++ChildIndex)
|
|
{
|
|
ChildReports[ChildIndex]->ResetForExecution(NumTestPasses);
|
|
}
|
|
}
|
|
|
|
void FAutomationReport::SetResults( const int32 ClusterIndex, const int32 PassIndex, const FAutomationTestResults& InResults )
|
|
{
|
|
//verify this is a platform this test is aware of
|
|
check((ClusterIndex >= 0) && (ClusterIndex < Results.Num()));
|
|
check((PassIndex >= 0) && (PassIndex < Results[ClusterIndex].Num()));
|
|
|
|
if( InResults.State == EAutomationState::InProcess )
|
|
{
|
|
TestInfo.InformOfNewDeviceRunningTest();
|
|
}
|
|
|
|
const TArray<FAutomationArtifact> ExistingArtifacts = Results[ClusterIndex][PassIndex].Artifacts;
|
|
Results[ClusterIndex][PassIndex] = InResults;
|
|
Results[ClusterIndex][PassIndex].Artifacts.Append(ExistingArtifacts);
|
|
|
|
// Add an error report if none was received
|
|
if ( InResults.State == EAutomationState::Fail && InResults.GetErrorTotal() == 0 )
|
|
{
|
|
FString Msg = FString::Printf(TEXT("Test %s failed, but no errors were logged."), *TestInfo.GetFullTestPath());
|
|
Results[ClusterIndex][PassIndex].AddEvent(FAutomationEvent(EAutomationEventType::Error, *Msg));
|
|
}
|
|
|
|
// While setting the results of the test cause the log of any selected test to refresh
|
|
OnSetResults.ExecuteIfBound(AsShared());
|
|
}
|
|
|
|
void FAutomationReport::AddArtifact(const int32 ClusterIndex, const int32 PassIndex, const FAutomationArtifact& Artifact)
|
|
{
|
|
//verify this is a platform this test is aware of
|
|
check(( ClusterIndex >= 0 ) && ( ClusterIndex < Results.Num() ));
|
|
check(( PassIndex >= 0 ) && ( PassIndex < Results[ClusterIndex].Num() ));
|
|
|
|
Results[ClusterIndex][PassIndex].Artifacts.Add(Artifact);
|
|
}
|
|
|
|
void FAutomationReport::GetCompletionStatus(const int32 ClusterIndex, const int32 PassIndex, FAutomationCompleteState& OutCompletionState)
|
|
{
|
|
//if this test is enabled and a leaf test
|
|
if (IsSupported(ClusterIndex) && (ChildReports.Num()==0))
|
|
{
|
|
EAutomationState CurrentState = Results[ClusterIndex][PassIndex].State;
|
|
//Enabled and In-Process
|
|
if (IsEnabled())
|
|
{
|
|
OutCompletionState.TotalEnabled++;
|
|
if (CurrentState == EAutomationState::InProcess)
|
|
{
|
|
OutCompletionState.NumEnabledInProcess++;
|
|
}
|
|
}
|
|
|
|
//Warnings
|
|
if (Results[ClusterIndex][PassIndex].GetWarningTotal() > 0)
|
|
{
|
|
IsEnabled() ? OutCompletionState.NumEnabledTestsWarnings++ : OutCompletionState.NumDisabledTestsWarnings++;
|
|
}
|
|
|
|
//Test results
|
|
if (CurrentState == EAutomationState::Success)
|
|
{
|
|
IsEnabled() ? OutCompletionState.NumEnabledTestsPassed++ : OutCompletionState.NumDisabledTestsPassed++;
|
|
}
|
|
else if (CurrentState == EAutomationState::Fail)
|
|
{
|
|
IsEnabled() ? OutCompletionState.NumEnabledTestsFailed++ : OutCompletionState.NumDisabledTestsFailed++;
|
|
}
|
|
else if( CurrentState == EAutomationState::Skipped )
|
|
{
|
|
IsEnabled() ? OutCompletionState.NumEnabledTestsCouldntBeRun++ : OutCompletionState.NumDisabledTestsCouldntBeRun++;
|
|
}
|
|
}
|
|
//recurse to children
|
|
for (int32 ChildIndex = 0; ChildIndex < ChildReports.Num(); ++ChildIndex)
|
|
{
|
|
ChildReports[ChildIndex]->GetCompletionStatus(ClusterIndex,PassIndex, OutCompletionState);
|
|
}
|
|
}
|
|
|
|
|
|
EAutomationState FAutomationReport::GetState(const int32 ClusterIndex, const int32 PassIndex) const
|
|
{
|
|
if ((ClusterIndex >= 0) && (ClusterIndex < Results.Num()) &&
|
|
(PassIndex >= 0) && (PassIndex < Results[ClusterIndex].Num()))
|
|
{
|
|
return Results[ClusterIndex][PassIndex].State;
|
|
}
|
|
return EAutomationState::NotRun;
|
|
}
|
|
|
|
void FAutomationReport::SetState(const EAutomationState State)
|
|
{
|
|
if (IsEnabled())
|
|
{
|
|
for (int32 ResultsIndex = 0; ResultsIndex < Results.Num(); ++ResultsIndex)
|
|
{
|
|
for (int32 PassIndex = 0; PassIndex < Results[ResultsIndex].Num(); ++PassIndex)
|
|
{
|
|
Results[ResultsIndex][PassIndex].State = State;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
const FAutomationTestResults& FAutomationReport::GetResults( const int32 ClusterIndex, const int32 PassIndex )
|
|
{
|
|
return Results[ClusterIndex][PassIndex];
|
|
}
|
|
|
|
const int32 FAutomationReport::GetNumResults( const int32 ClusterIndex )
|
|
{
|
|
return Results[ClusterIndex].Num();
|
|
}
|
|
|
|
const int32 FAutomationReport::GetCurrentPassIndex( const int32 ClusterIndex )
|
|
{
|
|
int32 PassIndex = 1;
|
|
|
|
if( IsSupported(ClusterIndex) )
|
|
{
|
|
for(; PassIndex < Results[ClusterIndex].Num(); ++PassIndex )
|
|
{
|
|
if( Results[ClusterIndex][PassIndex].State == EAutomationState::NotRun )
|
|
{
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
return PassIndex - 1;
|
|
}
|
|
|
|
FString FAutomationReport::GetGameInstanceName( const int32 ClusterIndex )
|
|
{
|
|
return Results[ClusterIndex][0].GameInstance;
|
|
}
|
|
|
|
TSharedPtr<IAutomationReport> FAutomationReport::EnsureReportExists(FAutomationTestInfo& InTestInfo, const int32 ClusterIndex, const int32 NumPasses)
|
|
{
|
|
//Split New Test Name by the first "." found
|
|
FString NameToMatch = InTestInfo.GetDisplayName();
|
|
FString FullPath = InTestInfo.GetFullTestPath();
|
|
FString NameRemainder;
|
|
//if this is a leaf test (no ".")
|
|
if (!InTestInfo.GetDisplayName().Split(TEXT("."), &NameToMatch, &NameRemainder))
|
|
{
|
|
NameToMatch = InTestInfo.GetDisplayName();
|
|
}
|
|
|
|
if ( NameRemainder.Len() != 0 )
|
|
{
|
|
// Set the test info name to be the remaining string
|
|
InTestInfo.SetDisplayName( NameRemainder );
|
|
// Update the fullpath
|
|
int32 Pos = FullPath.Find(NameToMatch + TEXT("."));
|
|
if (Pos >= 0)
|
|
{
|
|
FullPath.LeftChopInline(FullPath.Len() - Pos);
|
|
}
|
|
FullPath += NameToMatch;
|
|
}
|
|
|
|
uint32 NameToMatchHash = GetTypeHash(NameToMatch);
|
|
|
|
TSharedPtr<IAutomationReport> MatchTest;
|
|
//check hash table first to see if it exists yet
|
|
if (ChildReportNameHashes.Contains(NameToMatchHash))
|
|
{
|
|
//go backwards. Most recent event most likely matches
|
|
int32 TestIndex = ChildReports.Num() - 1;
|
|
for (; TestIndex >= 0; --TestIndex)
|
|
{
|
|
//if the name matches
|
|
if (ChildReports[TestIndex]->GetDisplayName() == NameToMatch)
|
|
{
|
|
MatchTest = ChildReports[TestIndex];
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
//if there isn't already a test like this
|
|
if (!MatchTest.IsValid())
|
|
{
|
|
if ( NameRemainder.Len() == 0 )
|
|
{
|
|
// Create a new leaf node
|
|
MatchTest = MakeShareable(new FAutomationReport(InTestInfo));
|
|
}
|
|
else
|
|
{
|
|
// Create a parent node
|
|
FAutomationTestInfo ParentTestInfo(NameToMatch, FullPath, TEXT(""), InTestInfo.GetTestFlags(), InTestInfo.GetNumParticipantsRequired());
|
|
MatchTest = MakeShareable(new FAutomationReport(ParentTestInfo, true));
|
|
}
|
|
|
|
// Sort tests alphabetically
|
|
const int32 InsertAt = Algo::UpperBound<>(ChildReports, MatchTest, [](const TSharedPtr<IAutomationReport>& ReportA, const TSharedPtr<IAutomationReport>& ReportB)
|
|
{
|
|
bool AIsLeafNode = !ReportA->IsParent();
|
|
bool BIsLeafNode = !ReportB->IsParent();
|
|
|
|
if (AIsLeafNode == BIsLeafNode) // both leaves or both parents => normal comparison
|
|
{
|
|
return ReportA->GetDisplayName() < ReportB->GetDisplayName();
|
|
}
|
|
else // leaf and parent => A is less than B when B is the leaf
|
|
{
|
|
return BIsLeafNode;
|
|
}
|
|
}
|
|
);
|
|
|
|
// Insert new test (sorted)
|
|
ChildReports.Insert(MatchTest, InsertAt);
|
|
ChildReportNameHashes.Add(NameToMatchHash, NameToMatchHash);
|
|
}
|
|
|
|
//mark this test as supported on a particular platform
|
|
MatchTest->SetSupport(ClusterIndex);
|
|
|
|
MatchTest->SetTestFlags( InTestInfo.GetTestFlags() );
|
|
MatchTest->SetNumParticipantsRequired( MatchTest->GetNumParticipantsRequired() > InTestInfo.GetNumParticipantsRequired() ? MatchTest->GetNumParticipantsRequired() : InTestInfo.GetNumParticipantsRequired() );
|
|
|
|
TSharedPtr<IAutomationReport> FoundTest;
|
|
//if this is a leaf node
|
|
if (NameRemainder.Len() == 0)
|
|
{
|
|
FoundTest = MatchTest;
|
|
}
|
|
else
|
|
{
|
|
//recurse to add to the proper layer
|
|
FoundTest = MatchTest->EnsureReportExists(InTestInfo, ClusterIndex, NumPasses);
|
|
}
|
|
|
|
return FoundTest;
|
|
}
|
|
|
|
|
|
TSharedPtr<IAutomationReport> FAutomationReport::GetNextReportToExecute(bool& bOutAllTestsComplete, const int32 ClusterIndex, const int32 PassIndex, const int32 NumDevicesInCluster)
|
|
{
|
|
TSharedPtr<IAutomationReport> NextReport;
|
|
//if this is not a leaf node
|
|
if (ChildReports.Num())
|
|
{
|
|
for (int32 ReportIndex = 0; ReportIndex < ChildReports.Num(); ++ReportIndex)
|
|
{
|
|
NextReport = ChildReports[ReportIndex]->GetNextReportToExecute(bOutAllTestsComplete, ClusterIndex, PassIndex, NumDevicesInCluster);
|
|
//if we found one, return it
|
|
if (NextReport.IsValid())
|
|
{
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
else
|
|
{
|
|
//consider self
|
|
if (IsEnabled() && IsSupported(ClusterIndex))
|
|
{
|
|
EAutomationState TestState = GetState(ClusterIndex,PassIndex);
|
|
//if any enabled test hasn't been run yet or is in process
|
|
if (TestState == EAutomationState::NotRun || TestState == EAutomationState::InProcess)
|
|
{
|
|
//make sure we announce we are NOT done with all tests
|
|
bOutAllTestsComplete = false;
|
|
}
|
|
if (TestState == EAutomationState::NotRun)
|
|
{
|
|
//Found one to run next
|
|
NextReport = AsShared();
|
|
}
|
|
}
|
|
}
|
|
return NextReport;
|
|
}
|
|
|
|
void FAutomationReport::GetEnabledTestReports(TArray<TSharedPtr<IAutomationReport>>& OutReports)
|
|
{
|
|
//if this is not a leaf node
|
|
if (ChildReports.Num())
|
|
{
|
|
for (int32 ReportIndex = 0; ReportIndex < ChildReports.Num(); ++ReportIndex)
|
|
{
|
|
ChildReports[ReportIndex]->GetEnabledTestReports(OutReports);
|
|
}
|
|
}
|
|
else
|
|
{
|
|
if (IsEnabled())
|
|
{
|
|
//Found one to run
|
|
OutReports.Add(AsShared());
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
const bool FAutomationReport::HasErrors()
|
|
{
|
|
bool bHasErrors = false;
|
|
for ( int32 ClusterIndex = 0; ClusterIndex < Results.Num(); ++ClusterIndex )
|
|
{
|
|
for ( int32 PassIndex = 0; PassIndex < Results[ClusterIndex].Num(); ++PassIndex )
|
|
{
|
|
//if we want tests with errors and this test had them OR we want tests warnings and this test had them
|
|
if ( Results[ClusterIndex][PassIndex].GetErrorTotal() > 0 )
|
|
{
|
|
//mark this test as having passed the results filter
|
|
bHasErrors = true;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
return bHasErrors;
|
|
}
|
|
|
|
const bool FAutomationReport::HasWarnings()
|
|
{
|
|
bool bHasWarnings = false;
|
|
for ( int32 ClusterIndex = 0; ClusterIndex < Results.Num(); ++ClusterIndex )
|
|
{
|
|
for ( int32 PassIndex = 0; PassIndex < Results[ClusterIndex].Num(); ++PassIndex )
|
|
{
|
|
//if we want tests with errors and this test had them OR we want tests warnings and this test had them
|
|
if ( Results[ClusterIndex][PassIndex].GetWarningTotal() > 0 )
|
|
{
|
|
//mark this test as having passed the results filter
|
|
bHasWarnings = true;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
return bHasWarnings;
|
|
}
|
|
|
|
const bool FAutomationReport::GetDurationRange(float& OutMinTime, float& OutMaxTime)
|
|
{
|
|
//assume we haven't found any tests that have completed successfully
|
|
OutMinTime = MAX_FLT;
|
|
OutMaxTime = 0.0f;
|
|
bool bAnyResultsFound = false;
|
|
|
|
//keep sum of all child tests
|
|
float ChildTotalMinTime = 0.0f;
|
|
float ChildTotalMaxTime = 0.0f;
|
|
for (int32 ReportIndex = 0; ReportIndex < ChildReports.Num(); ++ReportIndex)
|
|
{
|
|
float ChildMinTime = MAX_FLT;
|
|
float ChildMaxTime = 0.0f;
|
|
if (ChildReports[ReportIndex]->GetDurationRange(ChildMinTime, ChildMaxTime))
|
|
{
|
|
ChildTotalMinTime += ChildMinTime;
|
|
ChildTotalMaxTime += ChildMaxTime;
|
|
bAnyResultsFound = true;
|
|
}
|
|
}
|
|
|
|
//if any child test had valid timings
|
|
if (bAnyResultsFound)
|
|
{
|
|
OutMinTime = ChildTotalMinTime;
|
|
OutMaxTime = ChildTotalMaxTime;
|
|
}
|
|
|
|
for (int32 ClusterIndex = 0; ClusterIndex < Results.Num(); ++ClusterIndex )
|
|
{
|
|
for( int32 PassIndex = 0; PassIndex < Results[ClusterIndex].Num(); ++PassIndex)
|
|
{
|
|
//if we want tests with errors and this test had them OR we want tests warnings and this test had them
|
|
if( Results[ClusterIndex][PassIndex].State == EAutomationState::Success ||
|
|
Results[ClusterIndex][PassIndex].State == EAutomationState::Fail)
|
|
{
|
|
OutMinTime = FMath::Min(OutMinTime, Results[ClusterIndex][PassIndex].Duration );
|
|
OutMaxTime = FMath::Max(OutMaxTime, Results[ClusterIndex][PassIndex].Duration );
|
|
bAnyResultsFound = true;
|
|
}
|
|
}
|
|
}
|
|
return bAnyResultsFound;
|
|
}
|
|
|
|
|
|
const int32 FAutomationReport::GetNumDevicesRunningTest() const
|
|
{
|
|
return TestInfo.GetNumDevicesRunningTest();
|
|
}
|
|
|
|
|
|
const int32 FAutomationReport::GetNumParticipantsRequired() const
|
|
{
|
|
return TestInfo.GetNumParticipantsRequired();
|
|
}
|
|
|
|
|
|
void FAutomationReport::SetNumParticipantsRequired( const int32 NewCount )
|
|
{
|
|
TestInfo.SetNumParticipantsRequired( NewCount );
|
|
}
|
|
|
|
|
|
bool FAutomationReport::IncrementNetworkCommandResponses()
|
|
{
|
|
NumberNetworkResponsesReceived++;
|
|
return (NumberNetworkResponsesReceived == TestInfo.GetNumParticipantsRequired());
|
|
}
|
|
|
|
|
|
void FAutomationReport::ResetNetworkCommandResponses()
|
|
{
|
|
NumberNetworkResponsesReceived = 0;
|
|
}
|
|
|
|
|
|
const bool FAutomationReport::ExpandInUI() const
|
|
{
|
|
return bNodeExpandInUI;
|
|
}
|
|
|
|
|
|
void FAutomationReport::StopRunningTest()
|
|
{
|
|
if( IsEnabled() )
|
|
{
|
|
for( int32 ResultsIndex = 0; ResultsIndex < Results.Num(); ++ResultsIndex )
|
|
{
|
|
for( int32 PassIndex = 0; PassIndex < Results[ResultsIndex].Num(); ++PassIndex)
|
|
{
|
|
if( Results[ResultsIndex][PassIndex].State == EAutomationState::InProcess )
|
|
{
|
|
Results[ResultsIndex][PassIndex].State = EAutomationState::NotRun;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Recurse to children
|
|
for( int32 ChildIndex = 0; ChildIndex < ChildReports.Num(); ++ChildIndex )
|
|
{
|
|
ChildReports[ChildIndex]->StopRunningTest();
|
|
}
|
|
}
|
|
|
|
bool FAutomationReport::IsToBeSkipped(FName* OutReason, bool* OutWarn) const
|
|
{
|
|
if (bNeedToSkip)
|
|
{
|
|
if (OutReason != nullptr)
|
|
{
|
|
*OutReason = ExcludeTestInfo.Reason;
|
|
}
|
|
|
|
if (OutWarn != nullptr)
|
|
{
|
|
*OutWarn = ExcludeTestInfo.Warn;
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
bool FAutomationReport::IsToBeSkippedByPropagation() const
|
|
{
|
|
return bNeedToSkip && ExcludeTestInfo.bIsPropagated;
|
|
}
|
|
|
|
void FAutomationReport::SetSkipFlag(bool bEnableSkip, const FAutomationTestExcludelistEntry* Template, bool bFromPropagation)
|
|
{
|
|
if (IsToBeSkipped() == bEnableSkip)
|
|
{
|
|
if (!bEnableSkip || Template == nullptr)
|
|
return;
|
|
|
|
if (!bFromPropagation)
|
|
{
|
|
// Remove previous entry in the config
|
|
UAutomationTestExcludelist::Get()->RemoveFromExcludeTest(TestInfo.GetFullTestPath());
|
|
}
|
|
}
|
|
|
|
if (!bFromPropagation && !ExcludeTestInfo.IsEmpty() && ExcludeTestInfo.bIsPropagated)
|
|
return; // Propagated exclusion can't be changed directly
|
|
|
|
bNeedToSkip = bEnableSkip;
|
|
|
|
if (Template != nullptr)
|
|
{
|
|
// Update the entry
|
|
ExcludeTestInfo = *Template;
|
|
ExcludeTestInfo.bIsPropagated = bFromPropagation;
|
|
}
|
|
|
|
if (bFromPropagation)
|
|
{
|
|
if (!bNeedToSkip)
|
|
{
|
|
ExcludeTestInfo.Reset();
|
|
}
|
|
}
|
|
else
|
|
{
|
|
auto ExcludedTestCached = UAutomationTestExcludelist::Get();
|
|
if (bNeedToSkip)
|
|
{
|
|
check(Template != nullptr);
|
|
ExcludedTestCached->AddToExcludeTest(TestInfo.GetFullTestPath(), *Template);
|
|
}
|
|
else
|
|
{
|
|
ExcludedTestCached->RemoveFromExcludeTest(TestInfo.GetFullTestPath());
|
|
}
|
|
|
|
ExcludedTestCached->SaveConfig();
|
|
}
|
|
|
|
// Propagate to children
|
|
if (IsParent())
|
|
{
|
|
for (IAutomationReportPtr Child : GetChildReports())
|
|
{
|
|
Child->SetSkipFlag(bNeedToSkip, Template, true);
|
|
}
|
|
}
|
|
|
|
}
|
|
|
|
TSharedPtr<FAutomationTestExcludeOptions> FAutomationReport::GetExcludeOptions()
|
|
{
|
|
ExcludeTestInfo.Test = *TestInfo.GetFullTestPath();
|
|
return ExcludeTestInfo.GetOptions();
|
|
}
|
|
|