From 29caf18ec3aa55043bb7a90646d9f7229e4f744e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Raimund=20Andr=C3=A9e=20=5BMSFT=5D?= Date: Sat, 23 Apr 2022 10:46:04 +0200 Subject: [PATCH] New major release (#137) * Added required module versions of 'Datum', 'Datum.InvokeCommand' 'and 'Sampler.DscPipeline' * Updated gitversion * Dynamic data test * More dynamic config data * Added test for making sure DSC resource modules are spcified correctly * More test improvments * Updated 'datum' and 'Datum.InvokeCommand' * Updated tests * Removed dependencies for now and added them to the repo * Removed another redundancy * Fixed NodeName * Updated to a new datum version * Added 'DatumHandlersThrowOnError' * Key for DSC Meta Config can be configured in 'datum.yml' with 'DscLocalConfigurationManagerKeyName' * Renamed 'Get-DatumSourceFile' to 'Get-RelativeFileName' in config data * Updated to latest versions of datum and Datum.InvokeCommand handler * Added error handling when loading nodes * Fixed yaml sytax test which did not work yet * Fixed variable * Removed modules * Restored gitignore to default * Updated modules and removed many that are no longer needed * Added 'xDscDiagnostics' * Moved reports * Replaced 'CommonTasks' in lab with 'DscConfig.Demo' * Using latest Gallery releases * Formatting * Work in progress on documentation * Work in progress on documentation * Replaced gitversion task * Finished updating exercises * Enabled 'WithYAML' * Undo last change --- .build/TestBuildAcceptance.build.ps1 | 91 ------------- .build/TestConfigData.build.ps1 | 99 -------------- .gitignore | 15 --- Exercises/README.md | 12 +- Exercises/Task2/Exercise1.md | 114 +++++++++------- Exercises/Task2/Exercise2.md | 74 ++++++----- Exercises/Task2/Exercise3.md | 38 +++--- Exercises/Task2/Exercise4.md | 91 +++++++++---- Exercises/Task2/StretchGoal.md | 122 ++++++++++++------ Exercises/Task3/Exercise1.md | 12 +- Exercises/Task3/Exercise2.md | 53 ++------ Exercises/Task3/Exercise3.md | 24 ++-- Exercises/Task3/Exercise4.md | 10 +- Exercises/Task3/Exercise5.md | 14 +- Lab/20 Lab Customizations.ps1 | 4 +- Lab/31 New Release Pipeline CommonTasks.ps1 | 4 +- Lab/32 New Release Pipeline DscWorkshop.ps1 | 4 +- .../{harmonized => }/NodeAdditionalInfo.rdl | 0 .../NodeConfigurationData.rdl | 0 Lab/Reports/{harmonized => }/NodeMetaData.rdl | 0 .../{harmonized => }/NodeStatusOverview.rdl | 0 .../{ => old}/AdditionalNodeInformation.rdl | 0 Lab/Reports/{ => old}/ConfigurationData.rdl | 0 Lab/Reports/{ => old}/DSC Apply Status.rdl | 0 Lab/Reports/{ => old}/Node Metadata.rdl | 0 Lab/Reports/{ => old}/NodeStatusSimple.rdl | 0 Lab/Reports/{ => old}/NotInDesiredState.rdl | 0 README.md | 4 +- RequiredModules.psd1 | 37 +----- azure-pipelines.yml | 17 ++- source/AllNodes/Dev/DSCFile01.yml | 16 +-- source/AllNodes/Dev/DSCWeb01.yml | 14 +- source/AllNodes/Prod/DSCFile03.yml | 14 +- source/AllNodes/Prod/DSCWeb03.yml | 14 +- source/AllNodes/Test/DSCFile02.yml | 14 +- source/AllNodes/Test/DSCWeb02.yml | 14 +- source/Baselines/DscLcm.yml | 2 +- source/Baselines/Security.yml | 2 +- source/Baselines/Server.yml | 18 ++- source/Datum.yml | 15 ++- source/Environment/Dev.yml | 10 +- source/Environment/Prod.yml | 8 +- source/Environment/Test.yml | 8 +- source/Global/Domain.yml | 6 + source/Locations/Frankfurt.yml | 4 +- source/Locations/London.yml | 4 +- source/Locations/Singapore.yml | 4 +- source/Locations/Tokio.yml | 4 +- source/Roles/DomainController.yml | 10 +- source/Roles/FileServer.yml | 4 +- source/Roles/WebServer.yml | 3 +- tests/Acceptance/TestMofFiles.Tests.ps1 | 43 ++++-- tests/ConfigData/CompositeResources.Tests.ps1 | 100 ++++++++++++++ tests/ConfigData/ConfigData.Tests.ps1 | 81 ++++++------ 54 files changed, 625 insertions(+), 626 deletions(-) delete mode 100644 .build/TestBuildAcceptance.build.ps1 delete mode 100644 .build/TestConfigData.build.ps1 rename Lab/Reports/{harmonized => }/NodeAdditionalInfo.rdl (100%) rename Lab/Reports/{harmonized => }/NodeConfigurationData.rdl (100%) rename Lab/Reports/{harmonized => }/NodeMetaData.rdl (100%) rename Lab/Reports/{harmonized => }/NodeStatusOverview.rdl (100%) rename Lab/Reports/{ => old}/AdditionalNodeInformation.rdl (100%) rename Lab/Reports/{ => old}/ConfigurationData.rdl (100%) rename Lab/Reports/{ => old}/DSC Apply Status.rdl (100%) rename Lab/Reports/{ => old}/Node Metadata.rdl (100%) rename Lab/Reports/{ => old}/NodeStatusSimple.rdl (100%) rename Lab/Reports/{ => old}/NotInDesiredState.rdl (100%) create mode 100644 source/Global/Domain.yml create mode 100644 tests/ConfigData/CompositeResources.Tests.ps1 diff --git a/.build/TestBuildAcceptance.build.ps1 b/.build/TestBuildAcceptance.build.ps1 deleted file mode 100644 index d3b8df36..00000000 --- a/.build/TestBuildAcceptance.build.ps1 +++ /dev/null @@ -1,91 +0,0 @@ -param -( - # Project path - [Parameter()] - [System.String] - $ProjectPath = (property ProjectPath $BuildRoot), - - [Parameter()] - # Base directory of all output (default to 'output') - [System.String] - $OutputDirectory = (property OutputDirectory (Join-Path $BuildRoot 'output')), - - [Parameter()] - [string] - $DatumConfigDataDirectory = (property DatumConfigDataDirectory 'source'), - - [Parameter()] - [System.Object[]] - $PesterScript = (property PesterScript 'tests'), - - [Parameter()] - [System.Object[]] - $AcceptancePesterScript = (property AcceptancePesterScript 'Acceptance'), - - [Parameter()] - [string[]] - $excludeTag = (property excludeTag @()), - - [Parameter()] - [int] - $CurrentJobNumber = (property CurrentJobNumber 1), - - [Parameter()] - [int] - $TotalJobCount = (property TotalJobCount 1), - - # Build Configuration object - [Parameter()] - [System.Collections.Hashtable] - $BuildInfo = (property BuildInfo @{ }) -) - -task TestBuildAcceptance { - $PesterOutputFolder = Get-SamplerAbsolutePath -Path $PesterOutputFolder -RelativeTo $OutputDirectory - "`tPester Output Folder = '$PesterOutputFolder" - if (-not (Test-Path -Path $PesterOutputFolder)) - { - Write-Build -Color 'Yellow' -Text "Creating folder $PesterOutputFolder" - - $null = New-Item -Path $PesterOutputFolder -ItemType 'Directory' -Force -ErrorAction 'Stop' - } - - $DatumConfigDataDirectory = Get-SamplerAbsolutePath -Path $DatumConfigDataDirectory -RelativeTo $ProjectPath - $PesterScript = $PesterScript.Foreach({ - Get-SamplerAbsolutePath -Path $_ -RelativeTo $ProjectPath - }) - - $AcceptancePesterScript = $AcceptancePesterScript.Foreach({ - Get-SamplerAbsolutePath -Path $_ -RelativeTo $PesterScript[0] - }) - - Write-Build Green "Acceptance Data Pester Scripts = [$($AcceptancePesterScript -join ';')]" - - if (-not (Test-Path -Path $AcceptancePesterScript)) - { - Write-Build Yellow "Path for tests '$AcceptancePesterScript' does not exist" - return - } - - $testResultsPath = Get-SamplerAbsolutePath -Path AcceptanceTestResults.xml -RelativeTo $PesterOutputFolder - - Write-Build DarkGray "TestResultsPath is: $testResultsPath" - Write-Build DarkGray "BuildOutput is: $OutputDirectory" - - Import-Module -Name Pester - $po = $po = New-PesterConfiguration - $po.Run.PassThru = $true - $po.Run.Path = [string[]]$AcceptancePesterScript - $po.Output.Verbosity = 'Detailed' - if ($excludeTag) - { - $po.Filter.ExcludeTag = $excludeTag - } - $po.Filter.Tag = 'BuildAcceptance' - $po.TestResult.Enabled = $true - $po.TestResult.OutputFormat = 'NUnitXml' - $po.TestResult.OutputPath = $testResultsPath - $testResults = Invoke-Pester -Configuration $po - - assert ($testResults.FailedCount -eq 0 -and $testResults.FailedBlocksCount -eq 0 -and $testResults.FailedContainersCount -eq 0) -} diff --git a/.build/TestConfigData.build.ps1 b/.build/TestConfigData.build.ps1 deleted file mode 100644 index 7ff88ada..00000000 --- a/.build/TestConfigData.build.ps1 +++ /dev/null @@ -1,99 +0,0 @@ -param -( - # Project path - [Parameter()] - [System.String] - $ProjectPath = (property ProjectPath $BuildRoot), - - [Parameter()] - # Base directory of all output (default to 'output') - [System.String] - $OutputDirectory = (property OutputDirectory (Join-Path $BuildRoot 'output')), - - [Parameter()] - [System.String] - $PesterOutputFolder = (property PesterOutputFolder 'TestResults'), - - [Parameter()] - [System.String] - $PesterOutputFormat = (property PesterOutputFormat ''), - - [Parameter()] - [System.Object[]] - $PesterScript = (property PesterScript ''), - - [Parameter()] - [System.Object[]] - $ConfigDataPesterScript = (property ConfigDataPesterScript 'ConfigData'), - - [Parameter()] - [int] - $CurrentJobNumber = (property CurrentJobNumber 1), - - [Parameter()] - [int] - $TotalJobCount = (property TotalJobCount 1), - - # Build Configuration object - [Parameter()] - [System.Collections.Hashtable] - $BuildInfo = (property BuildInfo @{ }) -) - -task TestConfigData { - - $isWrongPesterVersion = (Get-Module -Name 'Pester' -ListAvailable | Select-Object -First 1).Version -lt [System.Version] '5.0.0' - - # If the correct module is not imported, then exit. - if ($isWrongPesterVersion) - { - "Pester 5 is not used in the pipeline, skipping task.`n" - - return - } - - . Set-SamplerTaskVariable -AsNewBuild - - $PesterOutputFolder = Get-SamplerAbsolutePath -Path $PesterOutputFolder -RelativeTo $OutputDirectory - "`tPester Output Folder = '$PesterOutputFolder" - if (-not (Test-Path -Path $PesterOutputFolder)) - { - Write-Build -Color 'Yellow' -Text "Creating folder $PesterOutputFolder" - - $null = New-Item -Path $PesterOutputFolder -ItemType 'Directory' -Force -ErrorAction 'Stop' - } - - $PesterScript = $PesterScript.Foreach( { - Get-SamplerAbsolutePath -Path $_ -RelativeTo $ProjectPath - }) - - $ConfigDataPesterScript = $ConfigDataPesterScript.Foreach( { - Get-SamplerAbsolutePath -Path $_ -RelativeTo $PesterScript[0] - }) - - Write-Build Green "Config Data Pester Scripts = [$($ConfigDataPesterScript -join ';')]" - - if (-not (Test-Path -Path $ConfigDataPesterScript)) - { - Write-Build Yellow "Path for tests '$ConfigDataPesterScript' does not exist" - return - } - - $testResultsPath = Get-SamplerAbsolutePath -Path IntegrationTestResults.xml -RelativeTo $PesterOutputFolder - - Write-Build DarkGray "TestResultsPath is: $TestResultsPath" - Write-Build DarkGray "OutputDirectory is: $PesterOutputFolder" - - Import-Module -Name Pester - $po = New-PesterConfiguration - $po.Run.PassThru = $true - $po.Run.Path = [string[]]$ConfigDataPesterScript - $po.Output.Verbosity = 'Detailed' - $po.Filter.Tag = 'Integration' - $po.TestResult.Enabled = $true - $po.TestResult.OutputFormat = 'NUnitXml' - $po.TestResult.OutputPath = $testResultsPath - $testResults = Invoke-Pester -Configuration $po - - assert ($testResults.FailedCount -eq 0 -and $testResults.FailedBlocksCount -eq 0 -and $testResults.FailedContainersCount -eq 0) -} diff --git a/.gitignore b/.gitignore index ab2af191..ea1472ec 100644 --- a/.gitignore +++ b/.gitignore @@ -1,16 +1 @@ output/ - -**.bak -*.local.* -!**/README.md -.kitchen/ - -*.suo -*.user -*.coverage -.vs -.psproj -.sln -markdownissues.txt -node_modules -package-lock.json diff --git a/Exercises/README.md b/Exercises/README.md index 8800e6bd..fb98e5d7 100644 --- a/Exercises/README.md +++ b/Exercises/README.md @@ -10,16 +10,16 @@ Before starting any exercise, please make sure you have met the following requir - Create a free Azure test account if you don't have one yet with enough credits left: . You may want to create a new email address if you already have one and used all your credits already (required to test Azure Automation DSC in a release pipeline) - Create one or two virtual machines in the test subscription and don’t forget to turn them off. The machine must have the status "deallocated", otherwise they are eating up your credits. - Have a notebook computer (ideally Windows 10) with you that has the following software installed: - - git: https://git-scm.com/downloads - - Visual Studio Code: https://code.visualstudio.com/Download + - [Download Git](https://git-scm.com/downloads) + - [Download Visual Studio Code](https://code.visualstudio.com/Download) - Install the PowerShell extensions - Install the RedHat yaml extension - - Install the Az module (Install-Module -Name Az) - - Test logging into your free Azure test subscription (Login-AzAccount) - + - Install the Az module (Install-Module -Name Az) + - Test logging into your free Azure test subscription (Login-AzAccount) + Please execute the [prerequisite check](CheckPrereq.ps1) in Windows PowerShell to check if everything is configured correctly on you computer. -> ***Please not that this test requires Pester > 4 to run.*** +> ***Please note that this test requires Pester > 4 to run.*** ## Task 1 diff --git a/Exercises/Task2/Exercise1.md b/Exercises/Task2/Exercise1.md index c7c7fc71..b777525e 100644 --- a/Exercises/Task2/Exercise1.md +++ b/Exercises/Task2/Exercise1.md @@ -4,7 +4,7 @@ This task is about building the solution locally. For that, no infrastructure or service is required. All you need is having cloned the public DscWorkshop repository to your machine. -To kick off a new build, the script ```/DSC/Build.ps1``` is going to be used. Whether or not you are in a build pipeline, the build script will create all artifacts in your current environment. +To kick off a new build, the script ```build.ps1``` is going to be used. Whether or not you are in a build pipeline, the build script will create all artifacts in your current environment. After completing this task, you have a gone through the build process for all artifacts that are required for a DSC pull server scenario (on-prem or Azure). @@ -15,7 +15,7 @@ After completing this task, you have a gone through the build process for all ar ## 2.1 Running a manual build locally 1. Open Windows PowerShell as elevated Admin. Do this by pressing the Windows and then typing ```powershell.exe``` and then right-click select 'Run As Administrator' -2. Execute the ```Get-ExecutionPolicy``` cmdlet. The resulting execution policy should be either RemoteSigned, Unrestricted or Bypass: +1. Execute the ```Get-ExecutionPolicy``` cmdlet. The resulting execution policy should be either RemoteSigned, Unrestricted or Bypass: ```code Get-ExecutionPolicy @@ -29,14 +29,14 @@ After completing this task, you have a gone through the build process for all ar Set-ExecutionPolicy RemoteSigned -Force ``` -3. Change to a suitable directory in which to clone the workshop files. As you will navigate to that folder quite often, keep it easy like +1. Change to a suitable directory in which to clone the workshop files. As you will navigate to that folder quite often, keep it easy like ```powershell mkdir C:\Git Set-Location -Path C:\Git ``` -4. **Optional**: If you have not yet installed git, please do so now by executing the following lines of code: +1. **Optional**: If you have not yet installed git, please do so now by executing the following lines of code: ```powershell Install-PackageProvider -Name nuget -Force @@ -47,81 +47,92 @@ After completing this task, you have a gone through the build process for all ar If you do not want to install Chocolatey, you can also browse to and download and install git from there. -5. Ensure that the git executable is in your path to make the next exercises work. Otherwise, please use the full or relative path to git.exe in the following steps. +1. Ensure that the git executable is in your path to make the next exercises work. Otherwise, please use the full or relative path to git.exe in the following steps. > Note: After installing git, you may need to close and open VSCode or the ISE again to make the process read the new path environment variable. -6. In this and the following exercises we will be working with the open-source DscWorkshop repository hosted at . To clone this repository, please execute: +1. In this and the following exercises we will be working with the open-source DscWorkshop repository hosted at . To clone this repository, please execute: > Note: Please make sure you are in the 'C:\Git' folder or wherever you want to store project. - + ```powershell git clone https://github.com/dsccommunity/DscWorkshop ``` -7. Change into the newly cloned repository and checkout the dev branch to move into the development +1. Change into the newly cloned repository and checkout the dev branch to move into the development ```powershell Set-Location -Path .\dscworkshop ``` To get the branch you are currently using, just type: + ```powershell git branch ``` - If the command did not return 'dev', please switch for the 'dev' branch like this: + If the command did not return 'dev', please switch for the 'dev' branch like this. If the branch 'dev' does not exist yet, create one like done in the next code block: ```powershell git checkout dev + + #if the previous command failed with: error: pathspec 'dev' did not match any file(s) known to git + git branch dev + git checkout dev ``` - > Note: If you want to read more about this, have a look at the documentation about [git branches](https://git-scm.com/book/en/v2/Git-Branching-Branches-in-a-Nutshell) + > Note: If you want to read more about branches in git, have a look at the documentation about [git branches](https://git-scm.com/book/en/v2/Git-Branching-Branches-in-a-Nutshell) -8. Open the DscWorkshop folder in VSCode and examine the repository contents. The shortcut in VSCode to open a folder is ```CTRL+K CTRL+O```. You can also press ```F1``` and type in the command you are looking for. And of course there is the classical way using the file menu. +1. Open the DscWorkshop folder in VSCode and examine the repository contents. The shortcut in VSCode to open a folder is `CTRL+K CTRL+O`. You can also press `F1` and type in the command you are looking for. And of course there is the classical way using the file menu. -9. For a build to succeed, multiple dependencies have to be met. These are defined in files containing hashtables of key/value pairs much like a module manifest (*.psd1) file. Take a look at the content of these files by navigating to the DSC folder in VSCode and open the \*PSDepend\*.psd1 files: +1. For a build to succeed, multiple dependencies have to be met. These are defined in a file containing hashtables of key/value pairs much like a module manifest (*.psd1) file. Take a look at the content of the file `RequiredModules.psd1` by opening it from the project's root folder in VSCode: PSDepend is another PowerShell module being used here which can be leveraged to define project dependencies to PowerShell modules, GitHub repositories and more. - To learn more about PSDepend, have a look at - -10. Without modifying anything yet, start the build script by executing: + On learn more about PSDepend, have a look at - > Note: It is important to go into the DSC folder and start the build script form there. Don't invoke it like ```.\DSC\Build.ps1```. +1. Without modifying anything yet, start the build script by executing: - ```powershell - cd DSC - .\Build.ps1 -ResolveDependency + ```powershe + ll + .\build.ps1 ``` - This command will download all dependencies that have been defined first and then build the entire environment. Downloading all the dependencies can take This can take a short while. + This command will download all dependencies that have been defined in the previously mentioned file `RequiredModules.psd1` and then build the entire environment. Downloading all the dependencies can take a short while. - While the script is running, you may want to explore the following folders. The PSDepend module downloads and stores the dependencies into these folders based on the information in the files in brackets. - - DSC\BuildOutput (DSC\PSDepend.Build.psd1) - - DSC\DscConfigurations (DSC\PSDepend.DscConfigurations.psd1) - - DSC\DscResources (DSC\PSDepend.DscResources.psd1) - - >Note: Depending on you machine's speed, your internet connection and the performance of the PowerShell Gallery, the initial build with downloading all the resources may take 20 to 30 minutes. Subsequent builds should take around 3 minutes. + While the script is running, you may want to explore the following folders. The `PSDepend` module downloads and stores the dependencies into the folder defined as target in the `RequiredModules.psd1`, which is `output\RequiredModules`. + + >Note: Depending on you machine's speed, your internet connection and the performance of the PowerShell Gallery, the initial build with downloading all the resources may take 5 to 15 minutes. Subsequent builds should take around 2 to 4 minutes. + +1. After the build process has finished, a number of artifacts have been created. The artifacts that we need for DSC are the + - MOF files + - Meta.MOF files + - compressed modules -11. After the build process has finished, a number of artifacts have been created. The artifacts that we need for DSC are the MOF files, Meta.MOF files and the compressed modules. Before having a closer look at the artifacts, let's have a look how nodes are defined for the dev environment. In VSCode, please navigate to the folder 'DSC\DscConfigData\AllNodes\Dev. + Additionally, you have artifacts that help when investigating issues with the configuration data or when debugging something but which are not required for DSC. + - CompressedArtifacts + - Logs + - RSOP + - RsopWithSource + + Before having a closer look at the artifacts, let's have a look how nodes are defined for the dev environment. In VSCode, please navigate to the folder `source\AllNodes\Dev`. - You should see two files here for the DSCFile01 and DSCWeb01. + You should see two files here for the `DSCFile01.yml` and `DSCWeb01.yml`. -12. Please open the files 'DSCFile01.yml' and 'DSCWeb01.yml'. Both files are in the YAML format. YAML, like JSON, has been around since 2000/2001 and can be used to serialize data. +1. Please open the files `DSCFile01.yml` and `DSCWeb01.yml`. Both files are in the YAML format. [YAML](https://yaml.org/), like JSON, has been around since 2000 / 2001 and can be used to serialize data. The file server for example looks like this: ```yaml - NodeName: DSCFile01 - Environment: Dev + NodeName: '[x={ $Node.Name }=]' + Environment: '[x={ $File.Directory.BaseName } =]' Role: FileServer - Description: File Server in Dev + Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' Location: Frankfurt Baseline: Server ComputerSettings: - Name: DSCFile01 - Description: File Server in Dev + Name: '[x={ $Node.NodeName }=]' + Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' NetworkIpConfiguration: Interfaces: @@ -139,11 +150,11 @@ After completing this task, you have a gone through the build process for all ar LcmConfig: ConfigurationRepositoryWeb: Server: - ConfigurationNames: DSCFile01 + ConfigurationNames: '[x={ $Node.NodeName }=]' DscTagging: Layers: - - AllNodes\Dev\DscFile01 + - '[x={ Get-DatumSourceFile -Path $File } =]' FilesAndFolders: Items: @@ -151,9 +162,11 @@ After completing this task, you have a gone through the build process for all ar Type: Directory ``` - A node's YAML will contain data that is unique to the node, like its name or IP address. It will also contain role assignments like 'FileServer', the location of the node as well as the optional LCM configuration name to pull. + >Note: The syntax `'[x={ } =]'` invokes PowerShell code for adding data to your yaml files during compilation. More information about this can be found on [Datum.InvokeCommand](https://github.com/raandree/Datum.InvokeCommand). + + A node's YAML will contain data that is unique to the node, like its name or IP address. It will also contain role assignments like `FileServer`, the location of the node as well as the optional LCM configuration name to pull. -13. The role of a node (FileServer) is effectively a link to another YAML file, in this case 'FileServer.yml' in the folder 'DSC\DscConfigData\Roles'. A role describes settings that are meant for a group of nodes and is the next level of generalization. Notice that the content starts with the 'Configurations' key. Nodes, Roles and Locations can all subscribe to DSC composite resources, which we call configurations: +1. The role of a node (`FileServer`) is effectively a link to another YAML file, in this case `FileServer.yml` in the folder `.\source\Roles\FileServer.yml`. A role describes settings that are meant for a group of nodes and is the next level of generalization. Notice that the content starts with the `Configurations` key. Nodes, Roles and Locations can all subscribe to DSC composite resources, which we call configurations: ```yaml Configurations: @@ -165,25 +178,30 @@ After completing this task, you have a gone through the build process for all ar ```yaml FilesAndFolders: - Items: - - DestinationPath: C:\GpoBackup - SourcePath: \\DSCDC01\SYSVOL\contoso.com\Policies - Type: Directory + Items: + - DestinationPath: C:\Test + Type: Directory ``` - In this case, the composite resource 'FilesAndFolders' accepts a (very generic) parameter called 'Items'. The 'Items' parameter is simply a hashtable expecting the same settings that the [File resource](https://docs.microsoft.com/en-us/powershell/scripting/dsc/reference/resources/windows/fileResource?view=powershell-7.1) would use as well. + In this case, the configuration (composite resource) 'FilesAndFolders' accepts a (very generic) parameter called 'Items'. The 'Items' parameter is simply a hashtable expecting the same settings that the [File resource](https://docs.microsoft.com/en-us/powershell/scripting/dsc/reference/resources/windows/fileResource?view=powershell-7.1) would use as well. The configuration is documented and you can find some examples how to use it in [DSC Resource 'FilesAndFolders'](https://github.com/raandree/DscConfig.Demo/blob/main/doc/FilesAndFolders.adoc). The location of a node is even more generic than the role and can be used to describe location-specific items like network topology and other settings. Same applies to the environment. -14. Now it's time to focus more on the artifacts. The build process created four types of artifacts: MOF files, Meta.MOF files, Compressed modules and RSoP YAML files. Among these, the RSoP (Resultant Set of Policy) will be very helpful as these files will show you what configuration items exactly will be applied to your nodes and the parameters given to them. The concept of RSoP is very similar to Windows Group Policies and how to [use Resultant Set of Policy to Manage Group Policy](https://docs.microsoft.com/en-us/previous-versions/windows/it-pro/windows-server-2012-R2-and-2012/dn789183(v=ws.11)). +1. Now it's time to focus more on the artifacts. The build process created four types of artifacts: + - MOF files + - Meta.MOF files + - Compressed modules + - RSoP YAML files with and without source level information + + Among these, the RSoP (Resultant Set of Policy) will be very helpful as these files will show you what configuration items exactly will be applied to your nodes and the parameters given to them. The concept of RSoP is very similar to Windows Group Policies and how to use [Resultant Set of Policy to Manage Group Policy](https://docs.microsoft.com/en-us/previous-versions/windows/it-pro/windows-server-2012-R2-and-2012/dn789183(v=ws.11)). - Examine the RSoP files now which are in the folder 'DSC\BuildOutput\RSoP'. + Examine the RSoP files now which are in the folder `output\RSOP` and `output\RsopWithSource`. -15. Let's take the RSoP artifact for 'DSCFile01'. If you compare the RSoP output of this node (DSC\BuildOutput\RSoP\DSCFile01.yml) to the node's config file (DSC\DscConfigData\AllNodes\Dev\DSCFile01.yml), you will notice that there are many more properties defined than in the original 'DSCFile01.yml'. Where did these come from? They are defined the node's role and location YAML files. +1. Let's take the RSoP artifact for 'DSCFile01'. If you compare the RSoP output of this node (DSC\BuildOutput\RSoP\DSCFile01.yml) to the node's config file (DSC\DscConfigData\AllNodes\Dev\DSCFile01.yml), you will notice that there are many more properties defined than in the original 'DSCFile01.yml'. Where did these come from? They are defined the node's role and location YAML files. - For understanding how Datum merges different layers, please refer to [Lookup Merging Behaviour](https://github.com/gaelcolas/Datum#lookup-merging-behaviour). + For understanding how Datum merges different layers, please refer to [Lookup Merging Behavior](https://github.com/gaelcolas/Datum#lookup-merging-behaviour). -16. The usable artifacts are your MOF, meta.MOF files and compressed modules - these files will be part of your release pipeline. +1. The usable artifacts are your MOF, Meta.MOF files and compressed modules - these files will be part of your release pipeline. --- diff --git a/Exercises/Task2/Exercise2.md b/Exercises/Task2/Exercise2.md index 53ca225c..ba2bf80b 100644 --- a/Exercises/Task2/Exercise2.md +++ b/Exercises/Task2/Exercise2.md @@ -2,7 +2,7 @@ *Estimated time to completion: 30-60 minutes* -To kick off a new build, the script 'Build.ps1' is going to be used. Whether or not you are in a build pipeline, the build script will create all artifacts in your current environment. +To kick off a new build, the script `build.ps1` is going to be used. Whether or not you are in a build pipeline, the build script will create all artifacts in your current environment. ***Remember to check the [prerequisites](../CheckPrereq.ps1)!*** @@ -12,50 +12,64 @@ To kick off a new build, the script 'Build.ps1' is going to be used. Whether or You are tasked with on-boarding a new node (DSCFile04) to your environment. The node should be a file server (Role) in your branch office in Singapore (Location). You also know that it should be part of the Test servers or canaries that receive new DSC configurations before other production servers. -1. Make a copy of DSCFile02.yml (use as a template) inside the folder 'DSC\DscConfigData\AllNodes\Test' and call it 'DSCFile04.yml'. This new yml will represent your new node. You can do this in the VSCode (mark the file and press CTRL+C and then CTRL+V. Rename the new file) or you can use this PowerShell command. +1. Make a copy of `DSCFile02.yml` (use as a template) inside the folder 'source\AllNodes\Test' and call it `DSCFile04.yml`. This new yml will represent your new node. You can do this in the VSCode (mark the file and press CTRL+C and then CTRL+V. Rename the new file) or you can use this PowerShell command. ```powershell - Copy-Item -Path .\DscConfigData\AllNodes\Test\DSCFile02.yml -Destination .\DscConfigData\AllNodes\Test\DscFile04.yml + Copy-Item -Path .\source\AllNodes\Test\DSCFile02.yml -Destination .\source\AllNodes\Test\DSCFile04.yml ``` -2. Open the newly created file and modify the properties NodeName, Location, Description and ConfigurationNames with the below values. - *Please note that outside of a workshop environment, this step can easily be scripted to e.g. use a CMDB as the source for new nodes* +1. Please start the build job again by calling the `build.ps1` script and let's see if it was that easy to add a new node to the configuration data. + + You should see some red on the screen, always the wrong color. What is the problem? + + You should see an error message indicating that there is a IP address conflict. Before the build creates the RSOP files and compiles the MOF files, the configuration data is tested for integrity. There is a number of predefined tests and of course the list of tests can be extended depending on the complexity and design of your configuration data. The tests are invoked by [Pester](https://pester.dev/). + + ``` + [-] Should not have duplicate node names 19ms (17ms|2ms) + Expected $null or empty, but got DSCFile02. + at (Compare-Object -ReferenceObject $ReferenceNodes -DifferenceObject $DifferenceNodes).InputObject | Should -BeNullOrEmpty, D:\DscWorkshop\tests\ConfigData\ConfigData.Tests.ps1:127 + at , D:\DscWorkshop\tests\ConfigData\ConfigData.Tests.ps1:127 + ``` + +1. Open the newly created file and modify the IP address to `192.168.111.112`. All other fields are retrieved dynamically using the Datum handler [Datum.InvokeCommand](https://github.com/raandree/Datum.InvokeCommand). + + > Please note that outside of a workshop environment, this step can easily be scripted to e.g. use a CMDB as the source for new nodes*. ```yaml - NodeName: DSCFile04 - . - Description: 'SIN secondary file server' - . - Location: Singapore - . - ComputerSettings: - Name: DSCFile01 + NodeName: '[x={ $Node.Name }=]' + Environment: '[x={ $File.Directory.BaseName } =]' . + NetworkIpConfiguration: - Interfaces: - - InterfaceAlias: DscWorkshop 0 - IpAddress: 192.168.111.112 - . - LcmConfig: - ConfigurationRepositoryWeb: - Server: - ConfigurationNames: DSCFile01 + Interfaces: + - InterfaceAlias: DscWorkshop 0 + IpAddress: 192.168.111.112 + Prefix: 24 + Gateway: 192.168.111.50 + DnsServer: + - 192.168.111.10 + DisableNetbios: true + . DscTagging: Layers: - - AllNodes\Dev\DscFile04 + - '[x={ Get-DatumSourceFile -Path $File } =]' ``` -3. This simple file is already enough for your new node. Produce new artifacts now by committing your changes and running a build again. You can commit the change by means of the VSCode UI or using the git command. You can find some guidance here: -[Using Version Control in VS Code](https://code.visualstudio.com/Docs/editor/versioncontrol). After the commit, start a new build. The commands look like this: +1. After changing the IP address, this simple file is already enough for your new node. Produce new artifacts now by committing your changes and running a build again. You can commit the change by means of the VSCode UI or using the git command. You can find some guidance here: +[Using Version Control in VS Code](https://code.visualstudio.com/Docs/editor/versioncontrol). - ```powershell - git add . - git commit -m "Added node DSCFile04" - .\Build.ps1 - ``` +You can also trigger the commit using the terminal like this: + + ```powershell + git add . + git commit -m 'Added node DSCFile04' + .\Build.ps1 + ``` + + After the commit, start a new build. -4. If you now examine the contents of your BuildOutput folder, you will notice that your new node will have received an RSOP file, a MOF and Meta.MOF file. +1. If you now examine the contents of your `output` folder, you will notice that your new node will have received two RSOP files, a MOF and Meta.MOF file. ```powershell Get-ChildItem -Path .\BuildOutput -Recurse -Filter DSCFile04* -File diff --git a/Exercises/Task2/Exercise3.md b/Exercises/Task2/Exercise3.md index 84f81461..f73beb33 100644 --- a/Exercises/Task2/Exercise3.md +++ b/Exercises/Task2/Exercise3.md @@ -2,7 +2,7 @@ *Estimated time to completion: 30-60 minutes* -To kick off a new build, the script 'Build.ps1' is going to be used. Whether or not you are in a build pipeline, the build script will create all artifacts in your current environment. +To kick off a new build, the script `build.ps1` is going to be used. Whether or not you are in a build pipeline, the build script will create all artifacts in your current environment. ***Remember to check the [prerequisites](../CheckPrereq.ps1)!*** @@ -14,9 +14,9 @@ Now, your branch office in Frankfurt has requested a new role for WSUS servers. This new role should enable WSUS administrators to build on top of the basic infrastructure. -1. Let us now create a new role for a WSUS Server in the 'DSC\DscConfigData\Roles' folder. This role's YAML will subscribe to the configuration "WindowsFeatures" and will define configuration data (Settings) for the configuration. +1. Let us now create a new role for a WSUS Server in the `source\Roles` folder. This role's YAML will subscribe to the configuration `WindowsFeatures` and will define configuration data (Settings) for the configuration. -Create a new file in 'DSC\DscConfigData\Roles' named 'WsusServer.yml'. Paste the following code into the new file and save it. +Create a new file in `source\Roles` named `WsusServer.yml`. Paste the following code into the new file and save it. ```yml Configurations: @@ -27,19 +27,19 @@ Create a new file in 'DSC\DscConfigData\Roles' named 'WsusServer.yml'. Paste the - +UpdateServices ``` -2. Now let us add a new node YAML (DSCWS01.yml) in the Test which is based on this Role. Create the new file 'DSCWS01.yml' in the folder 'DSC\DscConfigData\AllNodes\Test'. Paste the following content into the file and save it. +1. Now let us add a new node YAML `DSCWS01.yml` in the Test environment which is based on this role. Create the new file `DSCWS01.yml` in the folder `source\AllNodes\Test`. Paste the following content into the file and save it. ```yml - NodeName: DSCWS01 - Environment: Test + NodeName: '[x={ $Node.Name }=]' + Environment: '[x={ $File.Directory.BaseName } =]' Role: WsusServer - Description: WSUS Server in Test + Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' Location: Frankfurt Baseline: Server ComputerSettings: - Name: DSCWS01 - Description: WSUS Server in Test + Name: '[x={ $Node.NodeName }=]' + Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' NetworkIpConfiguration: Interfaces: @@ -57,11 +57,11 @@ Create a new file in 'DSC\DscConfigData\Roles' named 'WsusServer.yml'. Paste the LcmConfig: ConfigurationRepositoryWeb: Server: - ConfigurationNames: DSCWS01 + ConfigurationNames: '[x={ $Node.NodeName }=]' DscTagging: Layers: - - AllNodes\Test\DSCWS01 + - '[x={ Get-DatumSourceFile -Path $File } =]' ``` > Note: The YAML rendering does not always show the indention correctly. Please have a look at another node file to check the indention. @@ -69,10 +69,10 @@ Create a new file in 'DSC\DscConfigData\Roles' named 'WsusServer.yml'. Paste the Once again, it is that easy. New roles (i.e. WsusServer), environments (i.e. Test) and nodes (i.e. DSCWS01) just require adding YAML files. The devil is in the details: Providing the appropriate configuration data for your configurations like the network configuration requires knowledge of the underlying infrastructure of course. -In order to build the new node 'DSCWS01' which uses the 'WsusServer' role, simply start up the build again. +In order to build the new node `DSCWS01` which uses the `WsusServer` role, simply start up the build again. ```powershell - .\Build.ps1 + .\build.ps1 ``` After the build has completed take a look at the new nodes resulting files. @@ -81,11 +81,11 @@ After the build has completed take a look at the new nodes resulting files. ## 2.4 Modify a role -Modifying a role is even easier as adding a new one. Let's try changing the default time server for all the file servers. If the setting effect all time servers, it must be defined in the 'FileServer' role +Modifying a role is even easier as adding a new one. Let's try changing the default time server for all the file servers. If the setting effect all time servers, it must be defined in the `FileServer` role. -1. Open the 'FileServer.yml' from your roles directory. We are modifying an already existing role definition now. +1. Open the `FileServer.yml` from your roles directory. We are modifying an already existing role definition now. -2. In order to change a configuration item, just modify or add to your YAML file: +1. In order to change a configuration item, just modify or add to your YAML file: ```yaml RegistryValues: @@ -97,12 +97,12 @@ Modifying a role is even easier as adding a new one. Let's try changing the defa Ensure: Present ``` -3. After committing your changes, you can restart the build again to see your results in action. All file server artifacts that have been created will now have a modified MOF and RSoP. You can either use the VSCode UI or the following commands: +1. After committing your changes, you can restart the build again to see your results in action. All file server artifacts that have been created will now have a modified MOF and RSoPs. You can either use the VSCode UI or the following commands: ```powershell git add . - git commit -m "Modified the ntp server setting for the file server role." - .\Build.ps1 + git commit -m 'Modified the ntp server setting for the file server role.' + .\build.ps1 ``` You should have a feeling now how easy it is to modify config data used by DSC when using Datum. diff --git a/Exercises/Task2/Exercise4.md b/Exercises/Task2/Exercise4.md index 6a18e252..44d550ce 100644 --- a/Exercises/Task2/Exercise4.md +++ b/Exercises/Task2/Exercise4.md @@ -2,7 +2,7 @@ *Estimated time to completion: 30-60 minutes* -To kick off a new build, the script 'Build.ps1' is going to be used. Whether or not you are in a build pipeline, the build script will create all artifacts in your current environment. +To kick off a new build, the script `build.ps1` is going to be used. Whether or not you are in a build pipeline, the build script will create all artifacts in your current environment. ***Remember to check the [prerequisites](../CheckPrereq.ps1)!*** @@ -10,11 +10,11 @@ To kick off a new build, the script 'Build.ps1' is going to be used. Whether or ## 2.4 Add another layer to your hierarchy -You are tasked with creating another layer that better reflects separate fire sections for your locations. All locations have two fire sections that are physically kept apart from each other. Each computer should have the fire section information written to its registry in the key 'HKEY_LOCAL_MACHINE\SOFTWARE\Dsc'. +You are tasked with creating another layer that better reflects separate fire sections for your locations. All locations have two fire sections that are physically kept apart from each other. Each computer should have the fire section information written to its registry in the key `HKEY_LOCAL_MACHINE\SOFTWARE\Dsc`. -1. To create a new layer, you need to find an appropriate structure. Since the file system is already quite good when it comes to displaying hierarchical data, we can add a subfolder called FireSections which should contain for example Section1.yml and Section2.yml. The file subscribes to the 'RegistryValues' configuration to write a registry key to the nodes containing the fire section. You may either use VSCode to create the folder and the files or run the following commands: +1. To create a new layer, you need to find an appropriate structure. Since the file system is already quite good when it comes to displaying hierarchical data, we can add a subfolder called `FireSections` which should contain for example `Section1.yml` and `Section2.yml`. The file subscribes to the `RegistryValues` configuration to write a registry key to the nodes containing the fire section. You may either use VSCode to create the folder and the files or run the following commands: -> **Note: Before running the commands, make sure you are in the directory ```DscWorkshop\DSC```**. +> **Note: Before running the commands, make sure you are in the directory `DSC`**. ```powershell @' @@ -33,7 +33,7 @@ RegistryValues: DscTagging: Layers: - FireSections\Section1 -'@ | New-Item -Path .\DscConfigData\FireSections\Section1.yml -Force +'@ | New-Item -Path .\source\FireSections\Section1.yml -Force @' Configurations: @@ -51,45 +51,46 @@ RegistryValues: DscTagging: Layers: - FireSections\Section2 -'@ | New-Item -Path .\DscConfigData\FireSections\Section2.yml -Force +'@ | New-Item -Path .\source\FireSections\Section2.yml -Force ``` -2. Please start a new build and examine the RSoP files for the new fire section information once completed. Don't try too hard to find the information. It is expected that it's not there. Why? +1. Please start a new build and examine the RSoP files for the new fire section information once completed. Don't try too hard to find the information. It is expected that it's not there. Why? We have created config files containing the fire sections but the nodes have not been assigned a fire section yet. -3. To assign a node to a fire section, please open the files for the nodes 'DSCFile01' and DSCWeb01' in the dev environment. Like a node is assigned to a location or role, you can add a line containing the fire section like this: +1. To assign a node to a fire section, please open the files for the nodes `DSCFile01` and `DSCWeb01` in the dev environment. Like a node is assigned to a location or role, you can add a line containing the fire section like this: ```yml - NodeName: DSCFile01 - Environment: Dev + NodeName: '[x={ $Node.Name }=]' + Environment: '[x={ $File.Directory.BaseName } =]' Role: FileServer - Description: File Server in Dev + Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' Location: Frankfurt + Baseline: Server FireSection: Section1 ``` -4. Please build the project again. This time you will see that the fire section number has made it to the node's RSoP files. However, something important is missing: The data about the registry key to write. Why is it still missing? +1. Please build the project again. This time you will see that the fire section number has made it to the node's RSoP files. However, something important is missing: The data about the registry key to write. Why is it still missing? -5. In order to add completely new layers to your configuration, you need to tell Datum about it by modifying the lookup precedence. This is done in the global configuration file called 'Datum.yml' stored in the directory 'DscConfigData'. Please open the file. +1. In order to add completely new layers to your configuration, you need to tell Datum about it by modifying the lookup precedence. This is one in the global configuration file called `Datum.yml` stored in the directory `source`. Please open the file. -6. Examine the current contents of 'Datum.yml' and notice the resolution order for your files: +1. Examine the current contents of `Datum.yml` and notice the resolution order for your files: | Name | Description | |-|-| - | ```Baselines\Security``` | The security basline overwrites everything| - | ```AllNodes\$($Node.Environment)\$($Node.NodeName)``` | The settings unique to one node| - | ```Environment$($Node.Environment)``` | The settings that are environment specific| - | ```Environment$($Node.Location)``` | The settings that are location specific| - | ```Roles\$($Node.Role)``` | The settings unique to the role of a node| - | ```Baselines\$($Node.Baseline)``` | The baseline settings that should apply to all nodes and roles| - | ```Baselines\DscLcm``` | DSC specific settings like intervals, maintenance windows and version info + | `Baselines\Security` | The security basline overwrites everything| + | `AllNodes\$($Node.Environment)\$($Node.NodeName)` | The settings unique to one node| + | `Environment$($Node.Environment)` | The settings that are environment specific| + | `Environment$($Node.Location)` | The settings that are location specific| + | `Roles\$($Node.Role)` | The settings unique to the role of a node| + | `Baselines\$($Node.Baseline)` | The baseline settings that should apply to all nodes and roles| + | `Baselines\DscLcm` | DSC specific settings like intervals, maintenance windows and version info - The settings get more generic the further down you go in the list. This way, your node will always win and will always be able to override settings that have been defined on a more global scale like the environment. This is because the default lookup is set to 'MostSpecific', so the most specific setting wins. + The settings get more generic the further down you go in the list. This way, your node will always win and will always be able to override settings that have been defined on a more global scale like the environment. This is because the default lookup is set to `MostSpecific`, so the most specific setting wins. - Some paths are configured to have a different lookup option like 'merge_basetype_array: Unique' or 'merge_hash: deep'. This tells Datum not to override settings in lower levels but merge the content. An example: + Some paths are configured to have a different lookup option like `merge_basetype_array: Unique` or `merge_hash: deep`. This tells Datum not to override settings in lower levels but merge the content. An example: - The 'ServerBaseline.yml' adds the Windows feature 'Telnet-Client' to the list of windows features: + The `Security.yml` baseline adds the Windows feature `Telnet-Client` to the list of windows features (for removal): ```yaml WindowsFeatures: @@ -97,7 +98,7 @@ DscTagging: - -Telnet-Client ``` - And the web server role contains some other Windows features: + And the `WebServer` role contains some other Windows features: ```yaml WindowsFeatures: @@ -106,14 +107,14 @@ DscTagging: - -WoW64-Support ``` - The 'Datum.yml' defines the merge behavior for the path 'WindowsFeatures\Name': + The `Datum.yml` defines the merge behavior for the path `WindowsFeatures\Name`: ```yaml WindowsFeatures\Name: merge_basetype_array: Unique ``` - The result can be seen the RSoP files. After building the project, the Windows features config section in the 'DSCWeb01.yml' in the folder 'DSC\BuildOutput\RSOP' looks like this: + The result can be seen the RSoP files. After building the project, the Windows features config section in the `DSCWeb01.yml` in the folder `output\RSOP` looks like this: ```yaml WindowsFeatures: @@ -125,7 +126,7 @@ DscTagging: More complex merging scenarios are supported that will be explained in later articles. -7. Let's go back to the fire section task. A good place to add your new layer thus would be somewhere before the node-specific data is applied, since a separate fire section might mean different IP configurations. +1. Let's go back to the fire section task. A good place to add your new layer thus would be somewhere before the node-specific data is applied, since a separate fire section might mean different IP configurations. Let's add the new layer by adding an entry to Datum's global lookup precedence. Depending on when you want your new layer to apply, this could look like: @@ -144,6 +145,40 @@ DscTagging: In summary, adding new layers is a bit more involved than adding a new role. You need to think about the resolution precedence and the way your settings will be merged. Our project can serve as a good starting point, but you still need to take care of organizational requirements and so on. + If you start a new build now, you will find three references in the RSoP files to the fire section: + - The fire section assignment on the node level. + - The fire section reference in `DscTagging\Layers`. + - The fire section registry key. + + The RSOP file should look like this: + + ```yaml + FireSection: Section1 + . + DscTagging: + Environment: Dev + Version: 0.3.0 + Layers: + - AllNodes\Dev\DSCFile01 + - Environment\Dev + - Locations\Frankfurt + - FireSections\Section1 + - Roles\FileServer + - Baselines\Security + - Baselines\Server + - Baselines\DscLcm + . + RegistryValues: + DependsOn: '[FilesAndFolders]FilesAndFolders' + Values: + - ValueName: FireSection + ValueType: DWORD + ValueData: 1 + Ensure: Present + Force: true + Key: HKEY_LOCAL_MACHINE\SOFTWARE\Dsc + ``` + --- Please continue with [the stretch goal](StretchGoal.md) when your are ready. diff --git a/Exercises/Task2/StretchGoal.md b/Exercises/Task2/StretchGoal.md index 5f0aa8bd..a3051d7f 100644 --- a/Exercises/Task2/StretchGoal.md +++ b/Exercises/Task2/StretchGoal.md @@ -2,7 +2,7 @@ *Estimated time to completion: 30-60 minutes* -To kick off a new build, the script 'Build.ps1' is going to be used. Whether or not you are in a build pipeline, the build script will create all artifacts in your current environment. +To kick off a new build, the script `build.ps1` is going to be used. Whether or not you are in a build pipeline, the build script will create all artifacts in your current environment. ***Remember to check the [prerequisites](../CheckPrereq.ps1)!*** @@ -16,45 +16,45 @@ Extending configurations based on the customer's needs will eventually require y There should rarely be the need for hard-coded values in your composite resources. Keep in mind though that they should abstract some of the complexity of DSC. A composite resource that requires massive amounts of configuration data is probably not the best choice. -We cannot give you a blueprint that covers all your needs. However, the repository can serve as a starting point again. The CommonTasks module is our trusted module in the build and release pipeline and collects commonly used DSC composite resources. +We cannot give you a blueprint that covers all your needs. However, the repository can serve as a starting point again. The `DscConfig.Demo` module is our trusted module in the build and release pipeline and collects commonly used DSC composite resources. At your customer, this is all customer-specific code and should be collected in one or more separate PowerShell modules with their own build and release pipeline. This pipeline is trusted and will always deliver tested and working code to an internal gallery, for example [ProGet](https://inedo.com/proget), [Azure DevOps](https://dev.azure.com) or the free and open-source [NuGet](https://nuget.org). -1. To start we have to clone the repository 'CommonTasks' like we have cloned the 'DscWorkshop' project right at the beginning. +1. To start we have to clone the repository `DscConfig.Demo` like we have cloned the `DscWorkshop` project right at the beginning. - > Note: Before cloning, please switch to the same directory you cloned the 'DscWorkshop' project into. + > Note: Before cloning, please switch to the same directory you cloned the `DscWorkshop` project into. ```powershell - git clone https://github.com/dsccommunity/commontasks + git clone https://github.com/raandree/DscConfig.Demo.git ``` - After cloning, please open the 'CommonTasks' repository in VSCode. You may want to open a new VSCode window so you can switch between both projects. + After cloning, please open the `DscConfig.Demo` repository in VSCode. You may want to open a new VSCode window so you can switch between both projects. -2. This module contains many small DSC composite resources (in this context we call them configurations), that the 'DscWorkshop' project uses. Please open the folder 'CommonTasks\DscResources' and have a look at the composite resources defined there. +1. This module contains some small DSC composite resources (in this context we call them configurations), that the `DscWorkshop` project uses. Please open the folder `DscConfig.Demo\DscResources` and have a look at the composite resources defined there. You can get a list of all resources also with this command: ```powershell - Get-ChildItem -Directory -Path ./CommonTasks/CommonTasks/DSCResources + Get-ChildItem -Directory -Path .\source\DSCResources\ ``` -3. Now let's add your own composite resource / configuration by adding the following files to the structure: +1. Now let's add your own composite resource / configuration by adding the following files to the structure: - > Note: You can choose whatever name you like, but here are some recommendations. PowerShell function, cmdlet and parameter names are always in singular. To prevent conflicts, all the DSC composite resources in 'CommonTasks' are named in plural if they can effect one or multiple objects. + > Note: You can choose whatever name you like, but here are some recommendations. PowerShell function, cmdlet and parameter names are always in singular. To prevent conflicts, all the DSC composite resources in `DscConfig.Demo` are named in plural if they can effect one or multiple objects. The naming convention in PowerShell is naming cmdlets always in singular. As we are going to create a composite resource that is configuring disks, you may want to name this resource just 'Disks'. ```code - CommonTasks\ - DscResources\ - Disks\ - Disks.psd1 - Disks.schema.psm1 + source\ + DscResources\ + Disks\ + Disks.psd1 + Disks.schema.psm1 ``` > Note: Some people find it easier to duplicate an existing composite resource and replacing the content in the files. That's up to you. -4. Either copy the module manifest content from another resource or add your own minimal content, describing which DSC resource is exposed: +1. Either copy the module manifest content from another resource or add your own minimal content, describing which DSC resource is exposed: ```powershell @{ @@ -68,48 +68,63 @@ At your customer, this is all customer-specific code and should be collected in } ``` -5. Your .psm1 file now should only contain your DSC configuration element, the composite resource. Depending on the DSC resources that you use in this composite, you can make use of Datum's cmdlet ```Get-DscSplattedResource``` or its alias ```x``` to pass parameter values to the resource in a single, beautiful line of code. +1. Your .psm1 file now should only contain your DSC configuration element, the composite resource. Depending on the DSC resources that you use in this composite, you can make use of Datum's cmdlet `Get-DscSplattedResource` or its alias `x` to pass parameter values to the resource in a single, beautiful line of code. - > Note: The ['WindowsServices'](https://github.com/dsccommunity/CommonTasks/blob/master/CommonTasks/DscResources/WindowsServices/WindowsServices.schema.psm1) composite resource in 'CommonTasks' shows the difference of splatting vs. passing the parameters in the classical way. If you want to read more about how PowerShell supports splatting, have a look at [About Splatting](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_splatting?view=powershell-6). DSC does not support splatting out-of-the-box, but Datum adds that very usful feature. - - The following code uses the 'Disk' resource published in the 'StorageDsc' module to configure disk layouts. The '$DiskLayout' hashtable must have a pattern that matches exactly the parameter pattern defined in the 'StorageDsc\Disk' resource. + > Note: The ['WindowsServices'](https://github.com/dsccommunity/CommonTasks/blob/master/CommonTasks/DscResources/WindowsServices/WindowsServices.schema.psm1) composite resource in 'CommonTasks' shows the difference of splatting vs. passing the parameters in the classical way. If you want to read more about how PowerShell supports splatting, have a look at [About Splatting](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_splatting?view=powershell-6). DSC does not support splatting out-of-the-box, but Datum adds that very useful feature. - Please put this code into the file 'Disks.schema.psm1'. + The following code uses the `Disk` resource published in the [StorageDsc](https://github.com/dsccommunity/StorageDsc) module to configure disk layouts. The `$DiskLayout` hashtable must have a pattern that matches exactly the parameter pattern defined in the `StorageDsc\Disk` resource. + + Please put this code into the file `Disks.schema.psm1`. ```powershell configuration Disks { - param - ( - [Parameter(Mandatory)] + param ( + [Parameter(Mandatory = $true)] [hashtable[]] - $DiskLayout + $Disks ) + Import-DscResource -ModuleName PSDesiredStateConfiguration Import-DscResource -ModuleName StorageDsc - foreach ($disk in $DiskLayout.GetEnumerator()) { - (Get-DscSplattedResource -ResourceName Disk -ExecutionName $disk.DiskId -Properties $disk -NoInvoke).Invoke($disk) + foreach ($disk in $Disks) + { + # convert string with KB/MB/GB into Uint64 + if ($null -ne $disk.Size) + { + $disk.Size = [Uint64] ($disk.Size / 1) + } + + # convert string with KB/MB/GB into Uint32 + if ($null -ne $disk.AllocationUnitSize) + { + $disk.AllocationUnitSize = [Uint32] ($disk.AllocationUnitSize / 1) + } + + $executionName = $disk.DiskId + (Get-DscSplattedResource -ResourceName Disk -ExecutionName $executionName -Properties $disk -NoInvoke).Invoke($disk) } } ``` - Great, you have greated the first composite resource that serves as a configuration. But this resource only exists in the 'CommonTasks' project. We want to use it in the 'Dscworkshop' project. In a real-life environment the build pipeline of the 'DscWorkshop' project would pull the 'CommonTasks' module from an interal gallery. In case of this exercise the build pipeline downloads the 'CommonTasks' module from the [PowerShell Gallery](https://www.powershellgallery.com/packages/CommonTasks), which of course doesn't know about the code that you want to add. To skip this step and inject your modified version which has the new 'Disks' resource directory, run the following commands: + Great, you have created the first composite resource that serves as a configuration. But this resource only exists in the `DscConfig.Demo` project. We want to use it in the `DscWorkshop` project. In a real-life environment the build pipeline of the `DscWorkshop` project would pull the `DscWorkshop` module from an internal gallery. In case of this exercise the build pipeline downloads the `DscConfig.Demo` module from the [PowerShell Gallery](https://www.powershellgallery.com/packages/DscConfig.Demo), which of course doesn't know about the code that you want to add. To skip this step and inject your modified version which has the new `Disks` resource directory, run the following commands: > Note: Please make sure you are in the directory you have cloned the repositories into. If you are not in the right location, these commands will fail. ```powershell - Remove-Item -Path .\DscWorkshop\DSC\DscConfigurations\CommonTasks\ -Recurse -Force + Remove-Item -Path .\DscWorkshop\output\RequiredModules\DscConfig.Demo\ -Recurse -Force - Copy-Item -Path .\CommonTasks\BuildOutput\Modules\CommonTasks\ -Destination .\DscWorkshop\DSC\DscConfigurations\ -Recurse + Copy-Item -Path .\DscConfig.Demo\output\Module\DscConfig.Demo\ -Destination .\DscWorkshop\output\RequiredModules\ -Recurse ``` - The folder 'C:\Git\DscWorkshopFork\DSC\DscConfigurations\CommonTasks\DscResources' should now contain your new 'Disks' composite resource. + The folder `C:\Git\DscWorkshop\output\RequiredModules\DscConfig.Demo\DscResources` should now contain your new `Disks` composite resource. ## 2.6 - Use a custom Configuration (DSC Composite Resource) -1. Let's suppose you want to manage the disk layout of all file servers with DSC. In this case the new config goes into the 'FileServer.yml' file. Please open it. The full path is '\DSC\DscConfigData\Roles\FileServer.yml'. - At the top of the file you have the configurations mapped to the file server role. Please add the new 'Disks' configuration: +1. Let's suppose you want to manage the disk layout of all file servers with DSC. In this case the new config goes into the `FileServer.yml` file. Please open it. The full path is `\source\Roles\FileServer.yml`. + + At the top of the file you have the configurations mapped to the file server role. Please add the new `Disks` configuration: ```yaml Configurations: @@ -118,18 +133,41 @@ At your customer, this is all customer-specific code and should be collected in - Disks ``` - After saving the file, please start a new build using the script 'DSC\Build.ps1'. The build will not fail but wait for further input like this: + After saving the file, please start a new build using the script `build.ps1`. The build will fail because a Pester tests has discovered that the DSC resource module `StorageDsc` is missing. + + Please add the following line to the file `.\RequiredModules.psd1` + + ```yaml + StorageDsc = '5.0.1' + ``` + + Then start the build again and tell the build script via the `ResolveDependency` switch to download the dependencies again. + + ```powershell + .\build.ps1 -ResolveDependency + ``` + + > Note: This may take a while, good time to grab a coffee. + +1. The build will not fail but wait for further input like this: ```code - DSCFile01 : DSCFile01 : MOF__0.0.0 NA - DSCFile01 : FileServer ::> FilesAndFolders .....................................................OK - DSCFile01 : FileServer ::> RegistryValues ......................................................OK + Did not find 'RootConfiguration.ps1' and 'CompileRootConfiguration.ps1' in 'source', using the ones in 'Sampler.DscPipeline' + RootConfiguration will import these composite resource modules as defined in 'build.yaml': + - PSDesiredStateConfiguration + - DscConfig.Demo + + + --------------------------------------------------------------------------- + DSCFile02 : DSCFile02 : MOF__ NA + DSCFile02 : FileServer ::> FilesAndFolders .....................................................OK + DSCFile02 : FileServer ::> RegistryValues ......................................................OK cmdlet Disks at command pipeline position 1 Supply values for the following parameters: - DiskLayout[0]: + Disks[0]: ``` - So why does the build require additional data? Adding the 'Disks' resource to the configurations makes the build script calls it when compiling the MOF files. The resource has a mandatory parameter but no argument for this mandatory parameter is available in the configuration data. + So why does the build require additional data? Adding the `Disks` resource to the configurations makes the build script calls it when compiling the MOF files. The resource has a mandatory parameter but no argument for this mandatory parameter is available in the configuration data. ```powershell param @@ -140,11 +178,11 @@ At your customer, this is all customer-specific code and should be collected in ) ``` -2. So let's add the configuration data so the 'Disks' resource knows what to do. Please add the following section to the file server role: +1. So let's add the configuration data so the 'Disks' resource knows what to do. Please add the following section to the file server role: ```yaml Disks: - DiskLayout: + Disks: - DiskId: 0 DiskIdType: Number DriveLetter: C @@ -155,7 +193,7 @@ At your customer, this is all customer-specific code and should be collected in FSLabel: Data ``` - If the build has finished, examine the MOF files in the 'BuildOutput' folder. You should see the config you have made reflected there. + If the build has finished, examine the MOF files in the `output` folder. You should see the config you have made reflected there. Congratulations! You have walked through the entire process of making this repository your own! We hope you are successful with this concept - we certainly are. diff --git a/Exercises/Task3/Exercise1.md b/Exercises/Task3/Exercise1.md index b5bf0e06..cc3b7cb6 100644 --- a/Exercises/Task3/Exercise1.md +++ b/Exercises/Task3/Exercise1.md @@ -4,7 +4,7 @@ This task will guide you through the process of creating an infrastructure build and release pipeline. While the full project also creates a separate pipeline for the DSC Composite Resource module, the same principles apply so that we will concentrate on the build process of your IaaS workloads. -This task assumes that you have access to dev.azure.com in order to create your own project and your own pipeline. +This task assumes that you have access to `https://dev.azure.com` in order to create your own project and your own pipeline. > *By the way: You can use the PowerShell module [AutomatedLab.Common](https://github.com/automatedlab/automatedlab.common) to automate your interactions with TFS,VSTS and Azure DevOps* @@ -16,16 +16,16 @@ Whether you work with a test environment or at a customer location, you will nee 1. Navigate to in a browser of your choice and log in. -2. Click on the "New project" button in the upper right corner and fill out the basics. It does not matter if it is a public or private repository for this lab. - +1. Click on the `New project` button in the upper right corner and fill out the basics. It does not matter if it is a public or private repository for this lab. + >Note: New projects automatically use git as version control system.* -3. On the left-hand side, select Repos -> Files. The page says the project is empty. The easiest way to start is importing the project from GitHub. - +1. On the left-hand side, select Repos -> Files. The page says the project is empty. The easiest way to start is importing the project from GitHub. + Click on "Import" button. As the clone url, use without authorization. ![Repo import](./img/ImportRepo.png) -4. Wait for a couple of seconds. The page will automatically refresh once the import job is done. All project branches, most notably 'master' and 'dev', have been imported. You should already be familiar with the structure from [Task 1](../Task1/Exercise1.md). +1. Wait for a couple of seconds. The page will automatically refresh once the import job is done. All project branches, most notably `main`, have been imported. You should already be familiar with the structure from [Task 1](../Task1/Exercise1.md). At the moment, this project does not do anything. However, you have started with one important component: Source code control for your infrastructure. From now on, every change to an infrastructure component needs to be committed, and every change can be accounted for. diff --git a/Exercises/Task3/Exercise2.md b/Exercises/Task3/Exercise2.md index 8d5306f2..9014fe45 100644 --- a/Exercises/Task3/Exercise2.md +++ b/Exercises/Task3/Exercise2.md @@ -2,7 +2,7 @@ *Estimated time to completion: 35 minutes* -This task assumes that you have access to dev.azure.com in order to create your own project and your own pipeline. +This task assumes that you have access to `https://dev.azure.com` in order to create your own project and your own pipeline. *By the way: You can use the PowerShell module [AutomatedLab.Common](https://github.com/automatedlab/automatedlab.common) to automate your interactions with TFS,VSTS and Azure DevOps* @@ -12,7 +12,7 @@ This task assumes that you have access to dev.azure.com in order to create your > Note: In the exercise we do not use a YAML pipeline but the old graphical way of defining one. This way is better for learning how a pipeline can be defined and how things work. Later you surely want to switch to YAML. -Again, this step can easily be automated. ~~If you are interested in how this may look like, take a look at the [project code](../../Lab/31%20New%20Release%20Pipeline%20CommonTasks.ps1) in either of the pipeline lab scripts. We are using a hashtable containing all build tasks and pass it to the cmdlets exposed by AutomatedLab.Common.~~ +Again, this step can easily be automated. If you are interested in how this may look like, take a look at the [project code](../../Lab/31%20New%20Release%20Pipeline%20CommonTasks.ps1) in either of the pipeline lab scripts. We are using a hashtable containing all build tasks and pass it to the cmdlets exposed by AutomatedLab.Common. Our template approach consists of using a trusted, internal (i.e. private) gallery for PowerShell modules. Internal does not necessarily mean on-premises, but means a gallery that you trust in which is usually self-hosted. @@ -22,54 +22,29 @@ In the previous exercise, you have created a new Azure DevOps project to collabo To create your own build (Continuous Integration) pipeline, follow the next steps: -1. In your repository, on the left side click on Pipelines -> Builds and then on the button 'New pipeline'. +1. In your repository, on the left side click on `Pipelines -> Builds` and then on the button `Create Pipeline`. -2. Now you are asked: Where is your code? Please choose 'Other Git'. +1. Now you are asked: Where is your code? Please choose `Azure Repos Git`. -3. The next menu lets you 'Select a source'. Please select 'Azure Repos Git'. The required information will be added automatically. Please switch the 'Default branch for manual and scheduled builds' to 'dev' and press the 'Continue' button. +1. In the next menu select the repository that has the same name as your project. There should be shown just this single repository anyway. -4. On the "Select a template" page, select the "Empty pipeline" which is on the very bottom the last time this documentation was updated. +1. The next step shows you the pipeline as it is part of the DscWorkshop project. The pipeline contains eight steps: -You have created an empty pipeline now. The next tasks will give the pipeline some work to do. + 1. Evaluate the next version using [GitVersion](https://gitversion.net/). -Our build process can run on the hosted agent. A build agent is just a small service/daemon running on a VM that is capable of executing scripts and so on. A new VM is deployed every time you start a build and destroyed once the build is finished. + 1. Call the build script like you have done it locally before. As the build is running on a new worker with `ResolveDependency` must be used. This step runs only the build. -On premises, you might want to select a dedicated agent pool for DSC configuration compilation jobs for example. + 1. The next step runs the pack task which is compressing the modules and previously created artifacts. -5. Add the first agent job by clicking the plus icon next to 'Agent job 1'. From the list of tasks, select PowerShell and make sure that the following settings are correct: - - Display name: Execute build.ps1 - - Type: Inline - - Script: .\Build.ps1 -ResolveDependency - - Working Directory: DSC + 1. The next tasks are just uploading the artifacts into the Azure DevOps database. - ![Build task](./img/ExecuteBuild.png) + > Note: At the very top of the pipeline definition, there is the section `trigger`. This enables continuous integration meaning that the build is started every time you do a change to any file in any branch. The only exception is if you only change the file `changelog.md`, this path is excluded. -6. Next, we would like to publish all test results. In the last task you have triggered a manual build and saw the test cases that were executed. On each build an NUnit XML file is generated that Azure DevOps can pick up. To do so, add another agent task, this time "Publish Test Results". Make sure that it is configured to use NUnit and to pick up the correct file: ```**/IntegrationTestResults.xml```. +1. Now, just click the `Run` button to kick off your first infrastructure build. The next page informs you about the current status of you job. Lie back and wait for the artifacts to be built. - ![Test results](./img/PublishTests.png) + > Note: If you create and / or compile software on a dedicated development machine or your personal computer, you pile up a lot of dependencies: Programs, helper tools, DLLs, PowerShell modules, etc. All these things may be required to run you code. In the previous task we have introduced you to [PSDepend](https://github.com/RamblingCookieMonster/PSDepend). This helper module makes sure that we have all the dependencies downloaded that are defined in the `RequiredModules.psd1` file. If your software can be build on a standard build worker, it can be build everywhere and does not have any unwanted and undocumented dependencies. -7. Now we do exactly the same like in the previous step but for the build acceptance test results. You can clone the task "Publish Interation Test Results" and adapt the fields 'Display name' and 'Test results files'. The name of the file this task is looking for is ```**/BuildAcceptanceTestResults.xml```. - -8. As a last step, we need to make sure that all build artifacts (MOF, meta.MOF and modules) are published. These artifacts will be used in the release and can be published on an Azure Automation DSC pull server, an on-premises pull server or actively pushed to your infrastructure. - - Add one "Publish Build Artifact" step for each of the following artifact types: - - |DisplayName|Path|Artifact name| - |---|---|---| - |MOF|$(Build.SourcesDirectory)\DSC\BuildOutput\MOF|MOF| - |Meta.MOF|$(Build.SourcesDirectory)\DSC\BuildOutput\MetaMof|MetaMof| - |Modules|$(Build.SourcesDirectory)\DSC\BuildOutput\CompressedModules|CompressedModules| - |RSOP|$(Build.SourcesDirectory)\DSC\BuildOutput\RSOP|RSOP| - -9. At the moment, our build has no triggers. Navigate to the 'Triggers' tab and enable the continuous integration trigger. The branch filters should include 'master' and 'dev' or just '*'. - - Setting up a CI trigger enables your project to be built every time someone checks in changes to code. This can be a new branch, a pull request from a fork or code committed to 'master' or 'dev'. - -10. Once done, just select "Save & queue" to kick off your first infrastructure build. In the next 'Run pipeline dialog' just take the defaults and click on 'Save and run. Once you have done this, the next page tells you about 'Preparing an agent for the job' and 'Waiting for the request to be queued'. Lie back and wait for the artifacts to be built. - - > Note: If you create and / or compile software on a dedicated development machine or your personal computer, you pile up a lot of dependencies: Programs, helper tools, DLLs, PowerShell modules, etc. All these things may be required to run you code. In the previous task we have introduced you to [PSDepend](https://github.com/RamblingCookieMonster/PSDepend). This helper module makes sure that we have all the dependencies downloaded that are defined in the \*PSDepend*.psd1 file(s). If your software can be build on a standard build worker, it can be build everywhere and does not have any unwanted and undocumented dependencies. - -11. Hopefully each build step is green. If the job is finished, you have the 'Artifacts' button in the upper right corner. Explore the build output a little while and move on to the next exercise once you are satisfied. Also quite interesting are the test results that you may want to examine. +1. Hopefully each build step is green. If the job is finished, you have the 'Artifacts' button in the upper right corner. Explore the build output a little while and move on to the next exercise once you are satisfied. Also quite interesting are the test results that you may want to examine. >Note: All successful tests are hidden, only failed ones are shown by default. Just remove the filter to get the full view. diff --git a/Exercises/Task3/Exercise3.md b/Exercises/Task3/Exercise3.md index 92dbab88..96d59c14 100644 --- a/Exercises/Task3/Exercise3.md +++ b/Exercises/Task3/Exercise3.md @@ -12,39 +12,39 @@ This task assumes that you have access to dev.azure.com in order to create your With your CI trigger set up from the previous exercise, every change that is committed to the repository invariably starts a new build process - including all test cases. Try it yourself! -1. There are two ways to edit a file. Please change the location of the node 'DSCFile01' using one of the following methods: +1. There are two ways to edit a file. Please change the location of the node `DSCFile01` using one of the following methods: ### The Azure DevOps web editor - The quick one is just using the web editor in Azure DevOps. This is ok for a small test but this method does not scale and you can easily do mistakes. + The quick one is just using the web editor in Azure DevOps. This is ok for a small test but this method does not scale and you can easily do mistakes. ![UI file editing](./img/CommitChange.png) - If you have changed the location, press the 'Commit' button. By default, this change goes to the 'dev' branch. + If you have changed the location, press the 'Commit' button. By default, this change goes to the 'main' branch. ### Working with a local clone - If you really want to work with a source code repository, first thing is to create a local clone. You can do all you changes locally, review them and push them back to the repository. + If you really want to work with a source code repository, first thing is to create a local clone. You can do all you changes locally, review them and push them back to the repository. - In Visual Studio Code you can clone a repository by pressing ```F1``` and navigating to the command 'Git: Clone'. You need to provide the repository's URL and a local target folder. + In Visual Studio Code you can clone a repository by pressing `F1` and navigating to the command 'Git: Clone'. You need to provide the repository's URL and a local target folder. - You will get the URL from the Azure DevOps Repos -> Files page or you can start the clone project right from there. + You will get the URL from the Azure DevOps Repos -> Files page or you can start the clone project right from there. ![Clone a repository](img/CloneRepository.png) - After the cloning is finished, you will be asked if 'you [would] like to open the cloned repository, or add it to the current workspace?'. Please choose 'Open'. + After the cloning is finished, you will be asked if 'you [would] like to open the cloned repository, or add it to the current workspace?'. Please choose 'Open'. - After you have altered the location in the 'DSCFile01.yml', save the file and see the change in the source control area: + After you have altered the location in the `DSCFile01.yml`, save the file and see the change in the source control area: - >**Note: Please monitor the changes in the yellow marked are.** + > **Note: Please monitor the changes in the yellow marked are.** ![Commit a local change](img/CommitLocalChange.png) - Enter a commit message and then click the 'Commit' button. You will be asked 'Would you like to automatically stage all your changes and commit them directly? ', please click 'Yes'. + Enter a commit message and then click the 'Commit' button. You will be asked 'Would you like to automatically stage all your changes and commit them directly?', please click 'Yes'. - Now you have done a change in the local repository. This is indicated in the area at the left bottom that was marked yellow in the screenshot. You see 0 incoming and 1 outgoing change. If you click on that area, you will be asked "This action will push and pull commits to and from 'origin/dev'". Click yes, to push the changes to the Azure DevOps repo. + Now you have done a change in the local repository. This is indicated in the area at the left bottom that was marked yellow in the screenshot. You see 0 incoming and 1 outgoing change. If you click on that area, you will be asked "This action will push and pull commits to and from 'origin/main'". Click yes, to push the changes to the Azure DevOps repo. - >**Info:** Alternatively you could have used the git command to do the same on the command line like this: + > **Info:** Alternatively you could have used the git command to do the same on the command line like this: ```powershell git add . diff --git a/Exercises/Task3/Exercise4.md b/Exercises/Task3/Exercise4.md index a368a62b..d9e73af1 100644 --- a/Exercises/Task3/Exercise4.md +++ b/Exercises/Task3/Exercise4.md @@ -27,13 +27,13 @@ The template selection will pop up. Select "Empty job" on the very top. > Note: The names may vary depending on the name you have given to the project. -2. Rename 'Stage 1' to 'Dev'. Add two additional stages (environemnts), called 'Test' and 'Production', each with an empty job. +1. Rename 'Stage 1' to 'Dev'. Add two additional stages (environments), called 'Test' and 'Production', each with an empty job. The design of the pipeline depends very much on where it should operate. Your build steps might have included copying the files to an Azure blob storage instead of an on-premises file share. This would be the recommended way in case you want your Azure Automation DSC pull server to host the MOF files. The release step would be to execute New-AzAutomationModule with the URIs of your uploaded, compressed modules. For now, we will only upload the MOF files to Azure Automation, but you can add a similar release task for uploading the modules for example. -3. Open your first stage, dev, and navigate to variables. For the dev stage, we want for example to deploy to the dev automation account. Variables you add here are available as environment variables in the release process. The environment variables are used later by the PowerShell scripts. By selecting the appropriate scope, you can control the variable contents for each stage. +1. Open your first stage, dev, and navigate to variables. For the dev stage, we want for example to deploy to the dev automation account. Variables you add here are available as environment variables in the release process. The environment variables are used later by the PowerShell scripts. By selecting the appropriate scope, you can control the variable contents for each stage. Please add the following variables to the 'Dev' stage: @@ -47,7 +47,7 @@ For now, we will only upload the MOF files to Azure Automation, but you can add --- -4. Add a new 'Azure PowerShell' task for uploading the MOF files to the Azure automation account. +1. Add a new 'Azure PowerShell' task for uploading the MOF files to the Azure automation account. > Note: In the task select your subscription and authorize Azure DevOps to access your subscription. @@ -82,7 +82,7 @@ For now, we will only upload the MOF files to Azure Automation, but you can add --- -5. Add a new 'Azure PowerShell' task for uploading the compressed modules to the Azure automation account +1. Add a new 'Azure PowerShell' task for uploading the compressed modules to the Azure automation account This is almost the same as for the previous task for uploading the MOF files but this time we want to upload the compressed modules. These modules are required to apply the DSC configuration on the nodes. After a node has downloaded its configuration (MOF) from the automation account, it will look for the modules referenced in the configuration. If the automation account does not provide the required modules, applying the configuration will fail. @@ -158,7 +158,7 @@ For now, we will only upload the MOF files to Azure Automation, but you can add You can trigger a new release either manually or automatically after a build has successfully finished. If you have an automation account set up, you can try it out! Simply set up your build variables properly and observe. -6. Examine what the release process has done in your Azure Automation Account. Please click on your Azure Automation Account and then on the menu 'Configuration Management -> State configuration (DSC)'. So far, no nodes have been onboarded yet so the numbers here are not that interesting. But if you click on 'Configurations', things get more interesting. +1. Examine what the release process has done in your Azure Automation Account. Please click on your Azure Automation Account and then on the menu 'Configuration Management -> State configuration (DSC)'. So far, no nodes have been onboarded yet so the numbers here are not that interesting. But if you click on 'Configurations', things get more interesting. ![Azure Automation Account MOF Files](img/DscResourcesOnAzureAA.png) diff --git a/Exercises/Task3/Exercise5.md b/Exercises/Task3/Exercise5.md index c07f99ae..1e564528 100644 --- a/Exercises/Task3/Exercise5.md +++ b/Exercises/Task3/Exercise5.md @@ -14,10 +14,10 @@ This task assumes that you have access to dev.azure.com in order to create your 1. Please create a machine in Azure or on hour local Hyper-V named 'DSCFile01'. The operating system should be Windows 2016 or 2019. -2. Please open an PowerShell ISE on the new virtual machine and paste the following code into a new empty script window: +1. Please open an PowerShell ISE on the new virtual machine and paste the following code into a new empty script window: ```powershell - # The DSC configuration that will generate metaconfigurations + # The DSC configuration that will generate meta configurations [DscLocalConfigurationManager()] Configuration DscMetaConfigs { @@ -104,7 +104,7 @@ This task assumes that you have access to dev.azure.com in order to create your } } - # Create the metaconfigurations + # Create the meta configurations # NOTE: DSC Node Configuration names are case sensitive in the portal. # TODO: edit the below as needed for your use case $Params = @{ @@ -132,15 +132,15 @@ This task assumes that you have access to dev.azure.com in order to create your ![Azure Automation Account Keys](img/AzureAutomationAccountKeys.png) -3. Please execute the script by pressing the run button or ```F5```. The result is a Meta.MOF file that configures the machines DSC local configuration manager to pull configurations from the Azure Automation DSC pull server. +1. Please execute the script by pressing the run button or `F5`. The result is a Meta.MOF file that configures the machines DSC local configuration manager to pull configurations from the Azure Automation DSC pull server. -4. Use the following command to actually apply the Meta.MOF file where the path is the one that was returned in the previous activity. +1. Use the following command to actually apply the Meta.MOF file where the path is the one that was returned in the previous activity. ```powershell Set-DscLocalConfigurationManager -Path C:\Users\Install.contoso\DscMetaConfigs -Verbose ``` - The output the command ```Set-DscLocalConfigurationManager``` returns should look like this: + The output the command `Set-DscLocalConfigurationManager` returns should look like this: ```code VERBOSE: Performing the operation "Start-DscConfiguration: SendMetaConfigurationApply" on target "MSFT_DSCLocalConfigurationManager". @@ -164,7 +164,7 @@ This task assumes that you have access to dev.azure.com in order to create your VERBOSE: Set-DscLocalConfigurationManager finished in 1.053 seconds. ``` -5. In max 30 minutes the node will pull the new configuration from the Azure pull server. If you don't want to wait that long, use this command: +1. In max 30 minutes the node will pull the new configuration from the Azure pull server. If you don't want to wait that long, use this command: ```powershell Update-DscConfiguration -Wait -Verbose diff --git a/Lab/20 Lab Customizations.ps1 b/Lab/20 Lab Customizations.ps1 index 9751b497..7ba9b7b5 100644 --- a/Lab/20 Lab Customizations.ps1 +++ b/Lab/20 Lab Customizations.ps1 @@ -173,8 +173,8 @@ Invoke-LabCommand -ActivityName 'Create link on AzureDevOps desktop' -ComputerNa $shortcut.TargetPath = "https://$($devOpsServer):$($originalPort)/AutomatedLab/DscWorkshop" $shortcut.Save() - $shortcut = $shell.CreateShortcut("$desktopPath\CommonTasks Project.url") - $shortcut.TargetPath = "https://$($devOpsServer):$($originalPort)/AutomatedLab/CommonTasks" + $shortcut = $shell.CreateShortcut("$desktopPath\DscConfig.Demo Project.url") + $shortcut.TargetPath = "https://$($devOpsServer):$($originalPort)/AutomatedLab/DscConfig.Demo" $shortcut.Save() $shortcut = $shell.CreateShortcut("$desktopPath\PowerShell Feed.url") diff --git a/Lab/31 New Release Pipeline CommonTasks.ps1 b/Lab/31 New Release Pipeline CommonTasks.ps1 index 72de6dcf..34d1e4f3 100644 --- a/Lab/31 New Release Pipeline CommonTasks.ps1 +++ b/Lab/31 New Release Pipeline CommonTasks.ps1 @@ -3,7 +3,7 @@ Import-Lab -Name DscWorkshop -NoValidation -ErrorAction Stop } -$projectGitUrl = 'https://github.com/DscCommunity/CommonTasks' +$projectGitUrl = 'https://github.com/raandree/DscConfig.Demo' $projectName = $projectGitUrl.Substring($projectGitUrl.LastIndexOf('/') + 1) $collectionName = 'AutomatedLab' $gitVersion = @{ @@ -152,7 +152,7 @@ Invoke-LabCommand -ActivityName "Installing 'GitVersion' extension" -ComputerNam Invoke-LabCommand -ActivityName 'Set Repository and create Build Pipeline' -ScriptBlock { - Set-Location -Path C:\Git\CommonTasks + Set-Location -Path C:\Git\DscConfig.Demo git checkout main *>$null Remove-Item -Path '.\azure-pipelines.yml' (Get-Content -Path '.\azure-pipelines On-Prem.yml' -Raw) -replace 'RepositoryUri_WillBeChanged', $nugetFeed.NugetV2Url | Set-Content -Path .\azure-pipelines.yml diff --git a/Lab/32 New Release Pipeline DscWorkshop.ps1 b/Lab/32 New Release Pipeline DscWorkshop.ps1 index 0ac231b6..5c0a8b95 100644 --- a/Lab/32 New Release Pipeline DscWorkshop.ps1 +++ b/Lab/32 New Release Pipeline DscWorkshop.ps1 @@ -3,7 +3,7 @@ } $projectName = 'DscWorkshop' -$projectGitUrl = 'https://github.com/DscCommunity/DscWorkshop' +$projectGitUrl = 'https://github.com/raandree/DscWorkshop' $collectionName = 'AutomatedLab' $lab = Get-Lab @@ -47,5 +47,5 @@ Invoke-LabCommand -ActivityName 'Set RepositoryUri and create Build Pipeline' -S Write-ScreenInfo done # in case you screw something up -#Checkpoint-LabVM -All -SnapshotName AfterPipelines +Checkpoint-LabVM -All -SnapshotName AfterPipelines Write-Host "3. - Creating Snapshot 'AfterPipelines'" -ForegroundColor Magenta diff --git a/Lab/Reports/harmonized/NodeAdditionalInfo.rdl b/Lab/Reports/NodeAdditionalInfo.rdl similarity index 100% rename from Lab/Reports/harmonized/NodeAdditionalInfo.rdl rename to Lab/Reports/NodeAdditionalInfo.rdl diff --git a/Lab/Reports/harmonized/NodeConfigurationData.rdl b/Lab/Reports/NodeConfigurationData.rdl similarity index 100% rename from Lab/Reports/harmonized/NodeConfigurationData.rdl rename to Lab/Reports/NodeConfigurationData.rdl diff --git a/Lab/Reports/harmonized/NodeMetaData.rdl b/Lab/Reports/NodeMetaData.rdl similarity index 100% rename from Lab/Reports/harmonized/NodeMetaData.rdl rename to Lab/Reports/NodeMetaData.rdl diff --git a/Lab/Reports/harmonized/NodeStatusOverview.rdl b/Lab/Reports/NodeStatusOverview.rdl similarity index 100% rename from Lab/Reports/harmonized/NodeStatusOverview.rdl rename to Lab/Reports/NodeStatusOverview.rdl diff --git a/Lab/Reports/AdditionalNodeInformation.rdl b/Lab/Reports/old/AdditionalNodeInformation.rdl similarity index 100% rename from Lab/Reports/AdditionalNodeInformation.rdl rename to Lab/Reports/old/AdditionalNodeInformation.rdl diff --git a/Lab/Reports/ConfigurationData.rdl b/Lab/Reports/old/ConfigurationData.rdl similarity index 100% rename from Lab/Reports/ConfigurationData.rdl rename to Lab/Reports/old/ConfigurationData.rdl diff --git a/Lab/Reports/DSC Apply Status.rdl b/Lab/Reports/old/DSC Apply Status.rdl similarity index 100% rename from Lab/Reports/DSC Apply Status.rdl rename to Lab/Reports/old/DSC Apply Status.rdl diff --git a/Lab/Reports/Node Metadata.rdl b/Lab/Reports/old/Node Metadata.rdl similarity index 100% rename from Lab/Reports/Node Metadata.rdl rename to Lab/Reports/old/Node Metadata.rdl diff --git a/Lab/Reports/NodeStatusSimple.rdl b/Lab/Reports/old/NodeStatusSimple.rdl similarity index 100% rename from Lab/Reports/NodeStatusSimple.rdl rename to Lab/Reports/old/NodeStatusSimple.rdl diff --git a/Lab/Reports/NotInDesiredState.rdl b/Lab/Reports/old/NotInDesiredState.rdl similarity index 100% rename from Lab/Reports/NotInDesiredState.rdl rename to Lab/Reports/old/NotInDesiredState.rdl diff --git a/README.md b/README.md index eb07a5bc..951da151 100644 --- a/README.md +++ b/README.md @@ -29,7 +29,7 @@ In the past few years many projects using DSC have not produced the desired outp One of the goals of this project is to manage the complexity that comes with DSC. The needs to be proper tooling that solves these issues: - **Configuration Management** must be flexible and scalable. The DSC documentation is technically correct but does not lead people the right way. If one follows [Using configuration data in DSC](https://docs.microsoft.com/en-us/powershell/scripting/dsc/configurations/configData?view=powershell-7) and [Separating configuration and environment data](https://docs.microsoft.com/en-us/powershell/scripting/dsc/configurations/separatingenvdata?view=powershell-7), the outcome will be unmanageable if the configuration data gets more complex like dealing with roles, differences between locations and / or environments.. The solution to this problem is [Datum](https://github.com/gaelcolas/Datum), which is described in detail in the [Exercises](./Exercises). -- Building the solution and creating the artifacts requires a **Single Build Script**. This get very difficult if the build process has any manual steps or preparations that need to be done. After you have done your changes and want to create new artifacts, running the [Build.ps1 script](./DSC/Build.ps1). This build script runs locally or inside a release pipeline (tested on Azure DevOps, Azure DevOps Sever, AppVeyor, GitLab). +- Building the solution and creating the artifacts requires a **Single Build Script**. This get very difficult if the build process has any manual steps or preparations that need to be done. After you have done your changes and want to create new artifacts, running the [Build.ps1 script](./build.ps1). This build script runs locally or inside a release pipeline (tested on Azure DevOps, Azure DevOps Sever, AppVeyor, GitLab). - The lack of **Dependency Resolution** makes it impossible to move a solution from local build to a CI/CD pipeline. Many DSC solutions require downloading a bunch of dependencies prior being able to run the build. This project uses [PSDepend](https://github.com/RamblingCookieMonster/PSDepend/) to download all required resources from either the PowerShell gallery or your internal repository feed. - **Automated Testing** is essential to verify the integrity of the configuration data. This project uses [Pester](https://pester.dev/) for this. Additionally, the artifacts must be tested in the development as well as the test environment prior deploying them to them to the production environment. This process should be fully automated as well. @@ -61,4 +61,4 @@ The lab script are in [Lab](./Lab). ## YAML Reference Documentation -The [YAML reference documentation](https://github.com/dsccommunity/CommonTasks/blob/dev/doc/README.adoc) is located in the ./doc subfolder of the [CommonTasks](https://github.com/dsccommunity/CommonTasks) repository. +The [YAML reference documentation](https://github.com/raandree/DscConfig.Demo/tree/main/doc/README.adoc) is located in the `./doc` subfolder of the [DscConfig.Demo](https://github.com/raandree/DscConfig.Demo) repository. diff --git a/RequiredModules.psd1 b/RequiredModules.psd1 index d054ee57..50ffb11a 100644 --- a/RequiredModules.psd1 +++ b/RequiredModules.psd1 @@ -18,11 +18,11 @@ Sampler = 'latest' 'Sampler.GitHubTasks' = 'latest' PowerShellForGitHub = 'latest' - 'Sampler.DscPipeline' = '0.2.0-preview0001' + 'Sampler.DscPipeline' = 'latest' MarkdownLinkCheck = 'latest' 'DscResource.AnalyzerRules' = 'latest' DscBuildHelpers = 'latest' - Datum = '0.39.0' + Datum = 'latest' ProtectedData = 'latest' 'Datum.ProtectedData' = 'latest' 'Datum.InvokeCommand' = 'latest' @@ -33,41 +33,16 @@ 'DscResource.Test' = 'latest' # Composites - 'DscConfig.Demo' = '0.7.1-preview0002' + 'DscConfig.Demo' = '0.8.0' - # DSC Resources + #DSC Resources xPSDesiredStateConfiguration = '9.1.0' ComputerManagementDsc = '8.5.0' NetworkingDsc = '8.2.0' JeaDsc = '0.7.2' - XmlContentDsc = '0.0.1' xWebAdministration = '3.2.0' + FileSystemDsc = '1.1.1' SecurityPolicyDsc = '2.10.0.0' - StorageDsc = '5.0.1' - Chocolatey = '0.0.79' - ActiveDirectoryDsc = '6.0.1' - DfsDsc = '4.4.0.0' - WdsDsc = '0.11.0' - xDhcpServer = '3.0.0' xDscDiagnostics = '2.8.0' - DnsServerDsc = '3.0.0' - xFailoverCluster = '1.16.0' - GPRegistryPolicyDsc = '1.2.0' - AuditPolicyDsc = '1.4.0.0' - SharePointDSC = '4.8.0' - xExchange = '1.33.0' - SqlServerDsc = '15.2.0' - UpdateServicesDsc = '1.2.1' - xWindowsEventForwarding = '1.0.0.0' - OfficeOnlineServerDsc = '1.5.0' - xBitlocker = '1.4.0.0' - ActiveDirectoryCSDsc = '5.0.0' - 'xHyper-V' = '3.17.0.0' - DSCR_PowerPlan = '1.3.0' - FileSystemDsc = '1.1.1' - PackageManagement = '1.4.7' - PowerShellGet = '2.2.5' - ConfigMgrCBDsc = '3.0.0' - MmaDsc = '1.3.0' - CertificateDsc = '5.1.0' + } diff --git a/azure-pipelines.yml b/azure-pipelines.yml index d969f248..cef80ad3 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -25,12 +25,15 @@ stages: vmImage: 'windows-2019' steps: - - task: GitVersion@5 - name: gitVersion - displayName: 'Evaluate Next Version' - inputs: - runtime: 'core' - configFilePath: 'GitVersion.yml' + - pwsh: | + dotnet tool install --global GitVersion.Tool + $gitVersionObject = dotnet-gitversion | ConvertFrom-Json + $gitVersionObject.PSObject.Properties.ForEach{ + Write-Host -Object "Setting Task Variable '$($_.Name)' with value '$($_.Value)'." + Write-Host -Object "##vso[task.setvariable variable=$($_.Name);]$($_.Value)" + } + Write-Host -Object "##vso[build.updatebuildnumber]$($gitVersionObject.FullSemVer)" + displayName: Calculate ModuleVersion (GitVersion) - task: PowerShell@2 name: build @@ -40,7 +43,7 @@ stages: arguments: '-ResolveDependency -tasks build' pwsh: false env: - ModuleVersion: $(gitVersion.NuGetVersionV2) + ModuleVersion: $(NuGetVersionV2) - task: PowerShell@2 name: pack diff --git a/source/AllNodes/Dev/DSCFile01.yml b/source/AllNodes/Dev/DSCFile01.yml index 2e064615..e94d3e0b 100644 --- a/source/AllNodes/Dev/DSCFile01.yml +++ b/source/AllNodes/Dev/DSCFile01.yml @@ -1,13 +1,13 @@ -NodeName: DSCFile01 -Environment: Dev +NodeName: '[x={ $Node.Name }=]' +Environment: '[x={ $File.Directory.BaseName } =]' Role: FileServer -Description: File Server in Dev +Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' Location: Frankfurt Baseline: Server ComputerSettings: - Name: DSCFile01 - Description: File Server in Dev + Name: '[x={ $Node.NodeName }=]' + Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' NetworkIpConfiguration: Interfaces: @@ -25,15 +25,13 @@ PSDscAllowDomainUser: True LcmConfig: ConfigurationRepositoryWeb: Server: - ConfigurationNames: DSCFile01 + ConfigurationNames: '[x={ $Node.NodeName }=]' DscTagging: Layers: - - AllNodes\Dev\DscFile01 + - '[x={ Get-DatumSourceFile -Path $File } =]' FilesAndFolders: Items: - DestinationPath: Z:\DoesNotWork Type: Directory - - DestinationPath: C:\Test\211209 DscWorkshop - Type: Directory diff --git a/source/AllNodes/Dev/DSCWeb01.yml b/source/AllNodes/Dev/DSCWeb01.yml index 2e0c0dc6..2dbb34ec 100644 --- a/source/AllNodes/Dev/DSCWeb01.yml +++ b/source/AllNodes/Dev/DSCWeb01.yml @@ -1,13 +1,13 @@ -NodeName: DSCWeb01 -Environment: Dev +NodeName: '[x={ $Node.Name }=]' +Environment: '[x={ $File.Directory.BaseName } =]' Role: WebServer -Description: Web Server in Dev +Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' Location: Singapore Baseline: Server ComputerSettings: - Name: DSCWeb01 - Description: Web Server in Dev + Name: '[x={ $Node.NodeName }=]' + Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' NetworkIpConfiguration: Interfaces: @@ -25,8 +25,8 @@ PSDscAllowDomainUser: True LcmConfig: ConfigurationRepositoryWeb: Server: - ConfigurationNames: DSCWeb01 + ConfigurationNames: '[x={ $Node.NodeName }=]' DscTagging: Layers: - - AllNodes\Dev\DscWeb01 + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/source/AllNodes/Prod/DSCFile03.yml b/source/AllNodes/Prod/DSCFile03.yml index 9eff8ee8..5abddfc8 100644 --- a/source/AllNodes/Prod/DSCFile03.yml +++ b/source/AllNodes/Prod/DSCFile03.yml @@ -1,13 +1,13 @@ -NodeName: DSCFile03 -Environment: Prod +NodeName: '[x={ $Node.Name }=]' +Environment: '[x={ $File.Directory.BaseName } =]' Role: FileServer -Description: File Server in Prod +Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' Location: Frankfurt Baseline: Server ComputerSettings: - Name: DSCFile03 - Description: File Server in Prod + Name: '[x={ $Node.NodeName }=]' + Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' NetworkIpConfiguration: Interfaces: @@ -25,8 +25,8 @@ PSDscAllowDomainUser: True LcmConfig: ConfigurationRepositoryWeb: Server: - ConfigurationNames: DSCFile03 + ConfigurationNames: '[x={ $Node.NodeName }=]' DscTagging: Layers: - - AllNodes\Prod\DscFile03 + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/source/AllNodes/Prod/DSCWeb03.yml b/source/AllNodes/Prod/DSCWeb03.yml index 6b3d597a..deb1ea5f 100644 --- a/source/AllNodes/Prod/DSCWeb03.yml +++ b/source/AllNodes/Prod/DSCWeb03.yml @@ -1,13 +1,13 @@ -NodeName: DSCWeb03 -Environment: Prod +NodeName: '[x={ $Node.Name }=]' +Environment: '[x={ $File.Directory.BaseName } =]' Role: WebServer -Description: Web Server in Prod +Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' Location: Singapore Baseline: Server ComputerSettings: - Name: DSCWeb03 - Description: Web Server in Prod + Name: '[x={ $Node.NodeName }=]' + Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' NetworkIpConfiguration: Interfaces: @@ -25,8 +25,8 @@ PSDscAllowDomainUser: True LcmConfig: ConfigurationRepositoryWeb: Server: - ConfigurationNames: DSCWeb03 + ConfigurationNames: '[x={ $Node.NodeName }=]' DscTagging: Layers: - - AllNodes\Prod\DscWeb03 + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/source/AllNodes/Test/DSCFile02.yml b/source/AllNodes/Test/DSCFile02.yml index cdb87c57..eea0a6c9 100644 --- a/source/AllNodes/Test/DSCFile02.yml +++ b/source/AllNodes/Test/DSCFile02.yml @@ -1,13 +1,13 @@ -NodeName: DSCFile02 -Environment: Test +NodeName: '[x={ $Node.Name }=]' +Environment: '[x={ $File.Directory.BaseName } =]' Role: FileServer -Description: File Server in Test +Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' Location: Frankfurt Baseline: Server ComputerSettings: - Name: DSCFile02 - Description: File Server in Test + Name: '[x={ $Node.NodeName }=]' + Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' NetworkIpConfiguration: Interfaces: @@ -25,8 +25,8 @@ PSDscAllowDomainUser: True LcmConfig: ConfigurationRepositoryWeb: Server: - ConfigurationNames: DSCFile02 + ConfigurationNames: '[x={ $Node.NodeName }=]' DscTagging: Layers: - - AllNodes\Test\DscFile02 + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/source/AllNodes/Test/DSCWeb02.yml b/source/AllNodes/Test/DSCWeb02.yml index e22c21be..6b33829d 100644 --- a/source/AllNodes/Test/DSCWeb02.yml +++ b/source/AllNodes/Test/DSCWeb02.yml @@ -1,13 +1,13 @@ -NodeName: DSCWeb02 -Environment: Test +NodeName: '[x={ $Node.Name }=]' +Environment: '[x={ $File.Directory.BaseName } =]' Role: WebServer -Description: Web Server in Test +Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' Location: Singapore Baseline: Server ComputerSettings: - Name: DSCWeb02 - Description: Web Server in Test + Name: '[x={ $Node.NodeName }=]' + Description: '[x= "$($Node.Role) in $($Node.Environment)" =]' NetworkIpConfiguration: Interfaces: @@ -25,8 +25,8 @@ PSDscAllowDomainUser: True LcmConfig: ConfigurationRepositoryWeb: Server: - ConfigurationNames: DSCWeb02 + ConfigurationNames: '[x={ $Node.NodeName }=]' DscTagging: Layers: - - AllNodes\Test\DscWeb02 + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/source/Baselines/DscLcm.yml b/source/Baselines/DscLcm.yml index 88257c63..a6814187 100644 --- a/source/Baselines/DscLcm.yml +++ b/source/Baselines/DscLcm.yml @@ -7,7 +7,7 @@ Configurations: DscTagging: Version: 0.3.0 Layers: - - Baseline\DscLcm + - '[x={ Get-DatumSourceFile -Path $File } =]' DscLcmMaintenanceWindows: MaintenanceWindows: diff --git a/source/Baselines/Security.yml b/source/Baselines/Security.yml index 07e6adf1..1706501a 100644 --- a/source/Baselines/Security.yml +++ b/source/Baselines/Security.yml @@ -12,4 +12,4 @@ SecurityBase: DscTagging: Layers: - - Baselines\Security + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/source/Baselines/Server.yml b/source/Baselines/Server.yml index d6f6fceb..b8a30fdc 100644 --- a/source/Baselines/Server.yml +++ b/source/Baselines/Server.yml @@ -3,6 +3,15 @@ Configurations: - NetworkIpConfiguration - WindowsEventLogs +NetworkIpConfiguration: + Interfaces: + - InterfaceAlias: DscWorkshop 0* + Prefix: 24 + Gateway: 192.168.111.50 + DnsServer: + - 192.168.111.10 + DisableNetbios: true + WindowsEventLogs: Logs: - LogName: System @@ -20,12 +29,11 @@ WindowsEventLogs: DependsOn: '[ComputerSettings]ComputerSettings' ComputerSettings: - DomainName: contoso.com - JoinOU: CN=Computers,DC=contoso,dc=com - #contoso\install : Somepass1 - Credential: '[ENC=PE9ianMgVmVyc2lvbj0iMS4xLjAuMSIgeG1sbnM9Imh0dHA6Ly9zY2hlbWFzLm1pY3Jvc29mdC5jb20vcG93ZXJzaGVsbC8yMDA0LzA0Ij4NCiAgPE9iaiBSZWZJZD0iMCI+DQogICAgPFROIFJlZklkPSIwIj4NCiAgICAgIDxUPlN5c3RlbS5NYW5hZ2VtZW50LkF1dG9tYXRpb24uUFNDdXN0b21PYmplY3Q8L1Q+DQogICAgICA8VD5TeXN0ZW0uT2JqZWN0PC9UPg0KICAgIDwvVE4+DQogICAgPE1TPg0KICAgICAgPE9iaiBOPSJLZXlEYXRhIiBSZWZJZD0iMSI+DQogICAgICAgIDxUTiBSZWZJZD0iMSI+DQogICAgICAgICAgPFQ+U3lzdGVtLk9iamVjdFtdPC9UPg0KICAgICAgICAgIDxUPlN5c3RlbS5BcnJheTwvVD4NCiAgICAgICAgICA8VD5TeXN0ZW0uT2JqZWN0PC9UPg0KICAgICAgICA8L1ROPg0KICAgICAgICA8TFNUPg0KICAgICAgICAgIDxPYmogUmVmSWQ9IjIiPg0KICAgICAgICAgICAgPFROUmVmIFJlZklkPSIwIiAvPg0KICAgICAgICAgICAgPE1TPg0KICAgICAgICAgICAgICA8UyBOPSJIYXNoIj4xRUZBNTk3QUVERTAyNkNCMDg2RTY4NTIwRjVDNjM2QTc3Q0U1QTNDMDA4RjhENkUxQjBEQjYyMkM5NzFCRkEyPC9TPg0KICAgICAgICAgICAgICA8STMyIE49Ikl0ZXJhdGlvbkNvdW50Ij41MDAwMDwvSTMyPg0KICAgICAgICAgICAgICA8QkEgTj0iS2V5Ij5KbHZqZDBsc2F0VDZxdFNBRXlQSGFEelhrRVJLVS82Z29rQ3QzZm4zRE1VNndSbTFiMy9RcFRBTDRBNk9BSXlsPC9CQT4NCiAgICAgICAgICAgICAgPEJBIE49Ikhhc2hTYWx0Ij5IMzYxc2ZzUENEVXk1Ri92NUN2eno1VTVQVk5KNjhuUWtQTFd5RVltRU44PTwvQkE+DQogICAgICAgICAgICAgIDxCQSBOPSJTYWx0Ij5jcHE3V1ZFRHVwWVhVbVlDbGVRWE0yVWkxN2lFa0s2ZTFrWTYyT2ZONXRJPTwvQkE+DQogICAgICAgICAgICAgIDxCQSBOPSJJViI+Q1VsZWIrbStjbHUvYVk0MmE1aWVTNWlwQWl5NEo1T2hZTmVUR1BSUkhCST08L0JBPg0KICAgICAgICAgICAgPC9NUz4NCiAgICAgICAgICA8L09iaj4NCiAgICAgICAgPC9MU1Q+DQogICAgICA8L09iaj4NCiAgICAgIDxCQSBOPSJDaXBoZXJUZXh0Ij5oLzlodnE5S2ViSnROUmE2NWc5NDVmUDlLU0FwRmYxZ3loaHBYc0s2TFBPZTF3WVRRaFFaTGRxN2lnYlJhWloydU9EeWF0d3diUFVGbFZRVWd2Tm5YWTUxNjUrbjhGYkZOUjd6bTlBWHVNVT08L0JBPg0KICAgICAgPEJBIE49IkhNQUMiPnE5R3BaZlp5U3pHaDByME1uaE1RM3V3S3ZudkdobnJxeFVneG9jS0RRUkU9PC9CQT4NCiAgICAgIDxTIE49IlR5cGUiPlN5c3RlbS5NYW5hZ2VtZW50LkF1dG9tYXRpb24uUFNDcmVkZW50aWFsPC9TPg0KICAgIDwvTVM+DQogIDwvT2JqPg0KPC9PYmpzPg==]' + DomainName: '[x={ $Datum.Global.Domain.DomainFqdn }=]' + JoinOU: '[x= "CN=Computers,$($Datum.Global.Domain.DomainDn)" =]' + Credential: '[x={ $Datum.Global.Domain.DomainJoinCredentials }=]' TimeZone: Greenwich Standard Time DscTagging: Layers: - - Baselines\Server + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/source/Datum.yml b/source/Datum.yml index 58830571..6019b26f 100644 --- a/source/Datum.yml +++ b/source/Datum.yml @@ -7,10 +7,15 @@ ResolutionPrecedence: - Baselines\$($Node.Baseline) - Baselines\DscLcm +DatumHandlersThrowOnError: true DatumHandlers: Datum.ProtectedData::ProtectedDatum: CommandOptions: PlainTextPassword: SomeSecret + Datum.InvokeCommand::InvokeCommand: + SkipDuringLoad: true + +DscLocalConfigurationManagerKeyName: LcmConfig default_lookup_options: MostSpecific @@ -48,8 +53,14 @@ lookup_options: tuple_keys: - Key - NetworkIpConfiguration: - merge_hash: deep + #NetworkIpConfiguration: + # merge_hash: deep + #NetworkIpConfiguration\Interfaces: + # #merge_basetype_array: Deep + # merge_hash_array: DeepTuple + # merge_options: + # tuple_keys: + # - InterfaceAlias ComputerSettings: merge_hash: deep diff --git a/source/Environment/Dev.yml b/source/Environment/Dev.yml index 9deaffa5..e2ec2d2f 100644 --- a/source/Environment/Dev.yml +++ b/source/Environment/Dev.yml @@ -1,18 +1,18 @@ DscTagging: - Environment: Dev + Environment: '[x={ $Node.Environment } =]' Layers: - - Environment\Dev + - '[x={ Get-DatumSourceFile -Path $File } =]' FilesAndFolders: Items: - - DestinationPath: C:\Test\Dev-Environment + - DestinationPath: '[x= "C:\Test\$($File.BaseName)-Environment" =]' Type: Directory RegistryValues: Values: - - Key: HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Services\Netlogon\Parameters + - Key: HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Services\Netlogon\Parameters ValueName: DBFlag ValueData: 545325055 ValueType: DWORD Ensure: Present - Force: true \ No newline at end of file + Force: true diff --git a/source/Environment/Prod.yml b/source/Environment/Prod.yml index 2422fd34..7e645c3b 100644 --- a/source/Environment/Prod.yml +++ b/source/Environment/Prod.yml @@ -1,9 +1,9 @@ DscTagging: - Environment: Prod + Environment: '[x={ $Node.Environment } =]' Layers: - - Environment\Prod + - '[x={ Get-DatumSourceFile -Path $File } =]' FilesAndFolders: Items: - - DestinationPath: C:\Test\Prod-Environment - Type: Directory \ No newline at end of file + - DestinationPath: '[x= "C:\Test\$($File.BaseName)-Environment" =]' + Type: Directory diff --git a/source/Environment/Test.yml b/source/Environment/Test.yml index 8f22c883..7e645c3b 100644 --- a/source/Environment/Test.yml +++ b/source/Environment/Test.yml @@ -1,9 +1,9 @@ DscTagging: - Environment: Test + Environment: '[x={ $Node.Environment } =]' Layers: - - Environment\Test + - '[x={ Get-DatumSourceFile -Path $File } =]' FilesAndFolders: Items: - - DestinationPath: C:\Test\Test-Environment - Type: Directory \ No newline at end of file + - DestinationPath: '[x= "C:\Test\$($File.BaseName)-Environment" =]' + Type: Directory diff --git a/source/Global/Domain.yml b/source/Global/Domain.yml new file mode 100644 index 00000000..20385374 --- /dev/null +++ b/source/Global/Domain.yml @@ -0,0 +1,6 @@ +DomainFqdn: contoso.com +DomainDn: dc=contoso,dc=com +#contoso\install : Somepass1 +DomainAdminCredentials: '[ENC=PE9ianMgVmVyc2lvbj0iMS4xLjAuMSIgeG1sbnM9Imh0dHA6Ly9zY2hlbWFzLm1pY3Jvc29mdC5jb20vcG93ZXJzaGVsbC8yMDA0LzA0Ij4NCiAgPE9iaiBSZWZJZD0iMCI+DQogICAgPFROIFJlZklkPSIwIj4NCiAgICAgIDxUPlN5c3RlbS5NYW5hZ2VtZW50LkF1dG9tYXRpb24uUFNDdXN0b21PYmplY3Q8L1Q+DQogICAgICA8VD5TeXN0ZW0uT2JqZWN0PC9UPg0KICAgIDwvVE4+DQogICAgPE1TPg0KICAgICAgPE9iaiBOPSJLZXlEYXRhIiBSZWZJZD0iMSI+DQogICAgICAgIDxUTiBSZWZJZD0iMSI+DQogICAgICAgICAgPFQ+U3lzdGVtLk9iamVjdFtdPC9UPg0KICAgICAgICAgIDxUPlN5c3RlbS5BcnJheTwvVD4NCiAgICAgICAgICA8VD5TeXN0ZW0uT2JqZWN0PC9UPg0KICAgICAgICA8L1ROPg0KICAgICAgICA8TFNUPg0KICAgICAgICAgIDxPYmogUmVmSWQ9IjIiPg0KICAgICAgICAgICAgPFROUmVmIFJlZklkPSIwIiAvPg0KICAgICAgICAgICAgPE1TPg0KICAgICAgICAgICAgICA8UyBOPSJIYXNoIj4xRUZBNTk3QUVERTAyNkNCMDg2RTY4NTIwRjVDNjM2QTc3Q0U1QTNDMDA4RjhENkUxQjBEQjYyMkM5NzFCRkEyPC9TPg0KICAgICAgICAgICAgICA8STMyIE49Ikl0ZXJhdGlvbkNvdW50Ij41MDAwMDwvSTMyPg0KICAgICAgICAgICAgICA8QkEgTj0iS2V5Ij5KbHZqZDBsc2F0VDZxdFNBRXlQSGFEelhrRVJLVS82Z29rQ3QzZm4zRE1VNndSbTFiMy9RcFRBTDRBNk9BSXlsPC9CQT4NCiAgICAgICAgICAgICAgPEJBIE49Ikhhc2hTYWx0Ij5IMzYxc2ZzUENEVXk1Ri92NUN2eno1VTVQVk5KNjhuUWtQTFd5RVltRU44PTwvQkE+DQogICAgICAgICAgICAgIDxCQSBOPSJTYWx0Ij5jcHE3V1ZFRHVwWVhVbVlDbGVRWE0yVWkxN2lFa0s2ZTFrWTYyT2ZONXRJPTwvQkE+DQogICAgICAgICAgICAgIDxCQSBOPSJJViI+Q1VsZWIrbStjbHUvYVk0MmE1aWVTNWlwQWl5NEo1T2hZTmVUR1BSUkhCST08L0JBPg0KICAgICAgICAgICAgPC9NUz4NCiAgICAgICAgICA8L09iaj4NCiAgICAgICAgPC9MU1Q+DQogICAgICA8L09iaj4NCiAgICAgIDxCQSBOPSJDaXBoZXJUZXh0Ij5oLzlodnE5S2ViSnROUmE2NWc5NDVmUDlLU0FwRmYxZ3loaHBYc0s2TFBPZTF3WVRRaFFaTGRxN2lnYlJhWloydU9EeWF0d3diUFVGbFZRVWd2Tm5YWTUxNjUrbjhGYkZOUjd6bTlBWHVNVT08L0JBPg0KICAgICAgPEJBIE49IkhNQUMiPnE5R3BaZlp5U3pHaDByME1uaE1RM3V3S3ZudkdobnJxeFVneG9jS0RRUkU9PC9CQT4NCiAgICAgIDxTIE49IlR5cGUiPlN5c3RlbS5NYW5hZ2VtZW50LkF1dG9tYXRpb24uUFNDcmVkZW50aWFsPC9TPg0KICAgIDwvTVM+DQogIDwvT2JqPg0KPC9PYmpzPg==]' +#contoso\install : Somepass1 +DomainJoinCredentials: '[ENC=PE9ianMgVmVyc2lvbj0iMS4xLjAuMSIgeG1sbnM9Imh0dHA6Ly9zY2hlbWFzLm1pY3Jvc29mdC5jb20vcG93ZXJzaGVsbC8yMDA0LzA0Ij4NCiAgPE9iaiBSZWZJZD0iMCI+DQogICAgPFROIFJlZklkPSIwIj4NCiAgICAgIDxUPlN5c3RlbS5NYW5hZ2VtZW50LkF1dG9tYXRpb24uUFNDdXN0b21PYmplY3Q8L1Q+DQogICAgICA8VD5TeXN0ZW0uT2JqZWN0PC9UPg0KICAgIDwvVE4+DQogICAgPE1TPg0KICAgICAgPE9iaiBOPSJLZXlEYXRhIiBSZWZJZD0iMSI+DQogICAgICAgIDxUTiBSZWZJZD0iMSI+DQogICAgICAgICAgPFQ+U3lzdGVtLk9iamVjdFtdPC9UPg0KICAgICAgICAgIDxUPlN5c3RlbS5BcnJheTwvVD4NCiAgICAgICAgICA8VD5TeXN0ZW0uT2JqZWN0PC9UPg0KICAgICAgICA8L1ROPg0KICAgICAgICA8TFNUPg0KICAgICAgICAgIDxPYmogUmVmSWQ9IjIiPg0KICAgICAgICAgICAgPFROUmVmIFJlZklkPSIwIiAvPg0KICAgICAgICAgICAgPE1TPg0KICAgICAgICAgICAgICA8UyBOPSJIYXNoIj4xRUZBNTk3QUVERTAyNkNCMDg2RTY4NTIwRjVDNjM2QTc3Q0U1QTNDMDA4RjhENkUxQjBEQjYyMkM5NzFCRkEyPC9TPg0KICAgICAgICAgICAgICA8STMyIE49Ikl0ZXJhdGlvbkNvdW50Ij41MDAwMDwvSTMyPg0KICAgICAgICAgICAgICA8QkEgTj0iS2V5Ij5KbHZqZDBsc2F0VDZxdFNBRXlQSGFEelhrRVJLVS82Z29rQ3QzZm4zRE1VNndSbTFiMy9RcFRBTDRBNk9BSXlsPC9CQT4NCiAgICAgICAgICAgICAgPEJBIE49Ikhhc2hTYWx0Ij5IMzYxc2ZzUENEVXk1Ri92NUN2eno1VTVQVk5KNjhuUWtQTFd5RVltRU44PTwvQkE+DQogICAgICAgICAgICAgIDxCQSBOPSJTYWx0Ij5jcHE3V1ZFRHVwWVhVbVlDbGVRWE0yVWkxN2lFa0s2ZTFrWTYyT2ZONXRJPTwvQkE+DQogICAgICAgICAgICAgIDxCQSBOPSJJViI+Q1VsZWIrbStjbHUvYVk0MmE1aWVTNWlwQWl5NEo1T2hZTmVUR1BSUkhCST08L0JBPg0KICAgICAgICAgICAgPC9NUz4NCiAgICAgICAgICA8L09iaj4NCiAgICAgICAgPC9MU1Q+DQogICAgICA8L09iaj4NCiAgICAgIDxCQSBOPSJDaXBoZXJUZXh0Ij5oLzlodnE5S2ViSnROUmE2NWc5NDVmUDlLU0FwRmYxZ3loaHBYc0s2TFBPZTF3WVRRaFFaTGRxN2lnYlJhWloydU9EeWF0d3diUFVGbFZRVWd2Tm5YWTUxNjUrbjhGYkZOUjd6bTlBWHVNVT08L0JBPg0KICAgICAgPEJBIE49IkhNQUMiPnE5R3BaZlp5U3pHaDByME1uaE1RM3V3S3ZudkdobnJxeFVneG9jS0RRUkU9PC9CQT4NCiAgICAgIDxTIE49IlR5cGUiPlN5c3RlbS5NYW5hZ2VtZW50LkF1dG9tYXRpb24uUFNDcmVkZW50aWFsPC9TPg0KICAgIDwvTVM+DQogIDwvT2JqPg0KPC9PYmpzPg==]' diff --git a/source/Locations/Frankfurt.yml b/source/Locations/Frankfurt.yml index fd2648a7..7ced6a06 100644 --- a/source/Locations/Frankfurt.yml +++ b/source/Locations/Frankfurt.yml @@ -3,9 +3,9 @@ Configurations: FilesAndFolders: Items: - - DestinationPath: C:\Test\Frankfurt + - DestinationPath: '[x= "C:\Test\$($File.BaseName)" =]' Type: Directory DscTagging: Layers: - - Locations\Frankfurt \ No newline at end of file + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/source/Locations/London.yml b/source/Locations/London.yml index b5087d27..7ced6a06 100644 --- a/source/Locations/London.yml +++ b/source/Locations/London.yml @@ -3,9 +3,9 @@ Configurations: FilesAndFolders: Items: - - DestinationPath: C:\Test\London + - DestinationPath: '[x= "C:\Test\$($File.BaseName)" =]' Type: Directory DscTagging: Layers: - - Locations\London + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/source/Locations/Singapore.yml b/source/Locations/Singapore.yml index 49cfb9ff..7ced6a06 100644 --- a/source/Locations/Singapore.yml +++ b/source/Locations/Singapore.yml @@ -3,9 +3,9 @@ Configurations: FilesAndFolders: Items: - - DestinationPath: C:\Test\Singapore + - DestinationPath: '[x= "C:\Test\$($File.BaseName)" =]' Type: Directory DscTagging: Layers: - - Locations\Singapore + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/source/Locations/Tokio.yml b/source/Locations/Tokio.yml index 2da15db2..7ced6a06 100644 --- a/source/Locations/Tokio.yml +++ b/source/Locations/Tokio.yml @@ -3,9 +3,9 @@ Configurations: FilesAndFolders: Items: - - DestinationPath: C:\Test\Tokio + - DestinationPath: '[x= "C:\Test\$($File.BaseName)" =]' Type: Directory DscTagging: Layers: - - Locations\Tokio + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/source/Roles/DomainController.yml b/source/Roles/DomainController.yml index 56fc8c1f..ed697d00 100644 --- a/source/Roles/DomainController.yml +++ b/source/Roles/DomainController.yml @@ -2,11 +2,9 @@ Configurations: - AddsDomainController AddsDomainController: - DomainName: contoso.com - #contoso\install : Somepass1 - Credential: '[ENC=PE9ianMgVmVyc2lvbj0iMS4xLjAuMSIgeG1sbnM9Imh0dHA6Ly9zY2hlbWFzLm1pY3Jvc29mdC5jb20vcG93ZXJzaGVsbC8yMDA0LzA0Ij4NCiAgPE9iaiBSZWZJZD0iMCI+DQogICAgPFROIFJlZklkPSIwIj4NCiAgICAgIDxUPlN5c3RlbS5NYW5hZ2VtZW50LkF1dG9tYXRpb24uUFNDdXN0b21PYmplY3Q8L1Q+DQogICAgICA8VD5TeXN0ZW0uT2JqZWN0PC9UPg0KICAgIDwvVE4+DQogICAgPE1TPg0KICAgICAgPE9iaiBOPSJLZXlEYXRhIiBSZWZJZD0iMSI+DQogICAgICAgIDxUTiBSZWZJZD0iMSI+DQogICAgICAgICAgPFQ+U3lzdGVtLk9iamVjdFtdPC9UPg0KICAgICAgICAgIDxUPlN5c3RlbS5BcnJheTwvVD4NCiAgICAgICAgICA8VD5TeXN0ZW0uT2JqZWN0PC9UPg0KICAgICAgICA8L1ROPg0KICAgICAgICA8TFNUPg0KICAgICAgICAgIDxPYmogUmVmSWQ9IjIiPg0KICAgICAgICAgICAgPFROUmVmIFJlZklkPSIwIiAvPg0KICAgICAgICAgICAgPE1TPg0KICAgICAgICAgICAgICA8UyBOPSJIYXNoIj4yRTdCRDJFOUMwOEJCRjgzRjJGMjdCMzVGRjE1ODlGMUQyQ0VFRjA1NjkyOTUwNDQ5REU1QUEwMUJBQkNGQTlGPC9TPg0KICAgICAgICAgICAgICA8STMyIE49Ikl0ZXJhdGlvbkNvdW50Ij41MDAwMDwvSTMyPg0KICAgICAgICAgICAgICA8QkEgTj0iS2V5Ij5KYjk3bkNwWTB5Q3A1eVdBNkZKMU44c1BNTHVhbUtDQ0hKRlhHOFNyTWg2WC95SjZmRXg3dHFnelNWWmMwM1RIPC9CQT4NCiAgICAgICAgICAgICAgPEJBIE49Ikhhc2hTYWx0Ij4vbjBsakhXaGI4NUJIT0x6cnhka2tleVZHNWFMWEk4Szh2TGJrT3RycDQ0PTwvQkE+DQogICAgICAgICAgICAgIDxCQSBOPSJTYWx0Ij55SlRucW9SSlpvYXNLRXQ0ZHVXSXA5d0wyZG5sMXkwb3hNUjJiY0FYSWxRPTwvQkE+DQogICAgICAgICAgICAgIDxCQSBOPSJJViI+MSt0L054WHBLQVRlVkFIY3ludmpoV1RDcXVSQ1E5QXk2QkJXUCttOEViTT08L0JBPg0KICAgICAgICAgICAgPC9NUz4NCiAgICAgICAgICA8L09iaj4NCiAgICAgICAgPC9MU1Q+DQogICAgICA8L09iaj4NCiAgICAgIDxCQSBOPSJDaXBoZXJUZXh0Ij5Dcm9Ga0pxSEh0RGN1MEc1dTA0WWVMVVFuZ3FrOENLMy9iMU5RdzdaY25hUnJRckdYNUxHQjA5amVKQXd6b2VjTDY4cjZHRWRYK2lmZ3ZIeHVFY25DVml1QTVLT2UzcjZtZk9YcFhvbzIrND08L0JBPg0KICAgICAgPEJBIE49IkhNQUMiPnZYNk9tQzlCcUlCb0lIVG9UcVEvZHhBdkJyR0tyNXNiZVBWZGd6VDBKRkk9PC9CQT4NCiAgICAgIDxTIE49IlR5cGUiPlN5c3RlbS5NYW5hZ2VtZW50LkF1dG9tYXRpb24uUFNDcmVkZW50aWFsPC9TPg0KICAgIDwvTVM+DQogIDwvT2JqPg0KPC9PYmpzPg==]' - #contoso\install : Somepass1 - SafeModeAdministratorPassword: '[ENC=PE9ianMgVmVyc2lvbj0iMS4xLjAuMSIgeG1sbnM9Imh0dHA6Ly9zY2hlbWFzLm1pY3Jvc29mdC5jb20vcG93ZXJzaGVsbC8yMDA0LzA0Ij4NCiAgPE9iaiBSZWZJZD0iMCI+DQogICAgPFROIFJlZklkPSIwIj4NCiAgICAgIDxUPlN5c3RlbS5NYW5hZ2VtZW50LkF1dG9tYXRpb24uUFNDdXN0b21PYmplY3Q8L1Q+DQogICAgICA8VD5TeXN0ZW0uT2JqZWN0PC9UPg0KICAgIDwvVE4+DQogICAgPE1TPg0KICAgICAgPE9iaiBOPSJLZXlEYXRhIiBSZWZJZD0iMSI+DQogICAgICAgIDxUTiBSZWZJZD0iMSI+DQogICAgICAgICAgPFQ+U3lzdGVtLk9iamVjdFtdPC9UPg0KICAgICAgICAgIDxUPlN5c3RlbS5BcnJheTwvVD4NCiAgICAgICAgICA8VD5TeXN0ZW0uT2JqZWN0PC9UPg0KICAgICAgICA8L1ROPg0KICAgICAgICA8TFNUPg0KICAgICAgICAgIDxPYmogUmVmSWQ9IjIiPg0KICAgICAgICAgICAgPFROUmVmIFJlZklkPSIwIiAvPg0KICAgICAgICAgICAgPE1TPg0KICAgICAgICAgICAgICA8UyBOPSJIYXNoIj4yRTdCRDJFOUMwOEJCRjgzRjJGMjdCMzVGRjE1ODlGMUQyQ0VFRjA1NjkyOTUwNDQ5REU1QUEwMUJBQkNGQTlGPC9TPg0KICAgICAgICAgICAgICA8STMyIE49Ikl0ZXJhdGlvbkNvdW50Ij41MDAwMDwvSTMyPg0KICAgICAgICAgICAgICA8QkEgTj0iS2V5Ij5KYjk3bkNwWTB5Q3A1eVdBNkZKMU44c1BNTHVhbUtDQ0hKRlhHOFNyTWg2WC95SjZmRXg3dHFnelNWWmMwM1RIPC9CQT4NCiAgICAgICAgICAgICAgPEJBIE49Ikhhc2hTYWx0Ij4vbjBsakhXaGI4NUJIT0x6cnhka2tleVZHNWFMWEk4Szh2TGJrT3RycDQ0PTwvQkE+DQogICAgICAgICAgICAgIDxCQSBOPSJTYWx0Ij55SlRucW9SSlpvYXNLRXQ0ZHVXSXA5d0wyZG5sMXkwb3hNUjJiY0FYSWxRPTwvQkE+DQogICAgICAgICAgICAgIDxCQSBOPSJJViI+MSt0L054WHBLQVRlVkFIY3ludmpoV1RDcXVSQ1E5QXk2QkJXUCttOEViTT08L0JBPg0KICAgICAgICAgICAgPC9NUz4NCiAgICAgICAgICA8L09iaj4NCiAgICAgICAgPC9MU1Q+DQogICAgICA8L09iaj4NCiAgICAgIDxCQSBOPSJDaXBoZXJUZXh0Ij5Dcm9Ga0pxSEh0RGN1MEc1dTA0WWVMVVFuZ3FrOENLMy9iMU5RdzdaY25hUnJRckdYNUxHQjA5amVKQXd6b2VjTDY4cjZHRWRYK2lmZ3ZIeHVFY25DVml1QTVLT2UzcjZtZk9YcFhvbzIrND08L0JBPg0KICAgICAgPEJBIE49IkhNQUMiPnZYNk9tQzlCcUlCb0lIVG9UcVEvZHhBdkJyR0tyNXNiZVBWZGd6VDBKRkk9PC9CQT4NCiAgICAgIDxTIE49IlR5cGUiPlN5c3RlbS5NYW5hZ2VtZW50LkF1dG9tYXRpb24uUFNDcmVkZW50aWFsPC9TPg0KICAgIDwvTVM+DQogIDwvT2JqPg0KPC9PYmpzPg==]' + DomainName: '[x={ $Datum.Global.Domain.DomainFqdn }=]' + Credential: '[x={ $Datum.Global.Domain.DomainJoinCredentials }=]' + SafeModeAdministratorPassword: '[x={ $Datum.Global.Domain.DomainJoinCredentials }=]' DatabasePath: C:\Windows\NTDS LogPath: C:\Windows\Logs SysvolPath: C:\Windows\SYSVOL @@ -15,4 +13,4 @@ AddsDomainController: DscTagging: Layers: - - Roles\DomainControllers + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/source/Roles/FileServer.yml b/source/Roles/FileServer.yml index 76220522..6feecd61 100644 --- a/source/Roles/FileServer.yml +++ b/source/Roles/FileServer.yml @@ -19,7 +19,7 @@ FilesAndFolders: Contents: Some test data DependsOn: '[File]file_C__Test' - DestinationPath: C:\GpoBackup - SourcePath: \\DSCDC01\SYSVOL\contoso.com\Policies + SourcePath: '[x= "\\DSCDC01\SYSVOL\$($Datum.Global.Domain.DomainFqdn)\Policies" =]' Type: Directory RegistryValues: @@ -38,4 +38,4 @@ SecurityBaseline: DscTagging: Layers: - - Roles\FileServer + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/source/Roles/WebServer.yml b/source/Roles/WebServer.yml index 4af22aba..b9094f8f 100644 --- a/source/Roles/WebServer.yml +++ b/source/Roles/WebServer.yml @@ -75,7 +75,6 @@ WindowsServices: StartupType: Automatic Path: C:\DummyService.exe DependsOn: '[RegistryValues]RegistryValues' - DscTagging: Layers: - - Roles\WebServer + - '[x={ Get-DatumSourceFile -Path $File } =]' diff --git a/tests/Acceptance/TestMofFiles.Tests.ps1 b/tests/Acceptance/TestMofFiles.Tests.ps1 index 3ac0d47e..cfa6a7cb 100644 --- a/tests/Acceptance/TestMofFiles.Tests.ps1 +++ b/tests/Acceptance/TestMofFiles.Tests.ps1 @@ -5,29 +5,39 @@ BeforeDiscovery { $Filter = $global:Filter } - $datumDefinitionFile = Join-Path $here ..\..\source\Datum.yml - $nodeDefinitions = Get-ChildItem $here\..\..\source\AllNodes -Recurse -Include *.yml - $environments = (Get-ChildItem $here\..\..\source\AllNodes -Directory).BaseName - $roleDefinitions = Get-ChildItem $here\..\..\source\Roles -Recurse -Include *.yml + $datumDefinitionFile = Join-Path -Path $ProjectPath -ChildPath source\Datum.yml + $nodeDefinitions = Get-ChildItem $ProjectPath\source\AllNodes -Recurse -Include *.yml + $environments = (Get-ChildItem $ProjectPath\source\AllNodes -Directory -ErrorAction SilentlyContinue).BaseName + $roleDefinitions = Get-ChildItem $ProjectPath\source\Roles -Recurse -Include *.yml -ErrorAction SilentlyContinue $datum = New-DatumStructure -DefinitionFile $datumDefinitionFile $configurationData = Get-FilteredConfigurationData -Filter $Filter -CurrentJobNumber $currentJobNumber -TotalJobCount $totalJobCount $nodeNames = [System.Collections.ArrayList]::new() $mofFiles = Get-ChildItem -Path "$OutputDirectory\MOF" -Filter *.mof -Recurse -ErrorAction SilentlyContinue + $mofChecksumFiles = Get-ChildItem -Path "$OutputDirectory\MOF" -Filter *.mof.checksum -Recurse -ErrorAction SilentlyContinue $metaMofFiles = Get-ChildItem -Path "$OutputDirectory\MetaMOF" -Filter *.mof -Recurse -ErrorAction SilentlyContinue $nodes = $configurationData.AllNodes $allMofTests = @( @{ - MofFiles = $mofFiles - MetaMofFiles = $metaMofFiles - Nodes = $nodes + MofFiles = $mofFiles + MofChecksumFiles = $mofChecksumFiles + MetaMofFiles = $metaMofFiles + Nodes = $nodes } ) - $individualTests = $nodes | Foreach-Object { @{NodeName = $_.Name; MofFiles = $mofFiles; MetaMofFiles = $metaMofFiles } } + $individualTests = $nodes | ForEach-Object { + @{ + NodeName = $_.Name + MofChecksumFiles = $mofChecksumFiles + MofFiles = $mofFiles + MetaMofFiles = $metaMofFiles + } + } } Describe 'MOF Files' -Tag BuildAcceptance { + It 'All nodes have a MOF file' -TestCases $allMofTests { Write-Verbose "MOF File Count $($mofFiles.Count)" Write-Verbose "Node Count $($nodes.Count)" @@ -35,8 +45,19 @@ Describe 'MOF Files' -Tag BuildAcceptance { $mofFiles.Count | Should -Be $nodes.Count } + It 'All nodes have a MOF Checksum file' -TestCases $allMofTests { + Write-Verbose "MOF Checksum File Count $($mofFiles.Count)" + Write-Verbose "Node Count $($nodes.Count)" + + $MofChecksumFiles.Count | Should -Be $nodes.Count + } + It "Node '' should have a MOF file" -TestCases $individualTests { - $MofFiles | Where-Object BaseName -eq $NodeName | Should -BeOfType System.IO.FileSystemInfo + $MofFiles | Where-Object BaseName -EQ $NodeName | Should -BeOfType System.IO.FileSystemInfo + } + + It "Node '' should have a MOF Checksum file" -TestCases $individualTests { + $MofChecksumFiles | Where-Object BaseName -EQ "$NodeName.mof" | Should -BeOfType System.IO.FileSystemInfo } It 'All nodes have a Meta MOF file' -TestCases $allMofTests { @@ -45,7 +66,9 @@ Describe 'MOF Files' -Tag BuildAcceptance { $metaMofFiles.Count | Should -BeIn $nodes.Count } + It "Node '' should have a Meta MOF file" -TestCases $individualTests { - $metaMofFiles | Where-Object BaseName -eq "$($NodeName).meta" | Should -BeOfType System.IO.FileSystemInfo + $metaMofFiles | Where-Object BaseName -EQ "$($NodeName).meta" | Should -BeOfType System.IO.FileSystemInfo } + } diff --git a/tests/ConfigData/CompositeResources.Tests.ps1 b/tests/ConfigData/CompositeResources.Tests.ps1 new file mode 100644 index 00000000..add0664f --- /dev/null +++ b/tests/ConfigData/CompositeResources.Tests.ps1 @@ -0,0 +1,100 @@ +#TODO: Test if a composite resource needs/imports a dsc resource that is currently not available/physically existent + +BeforeDiscovery { + $here = $PSScriptRoot + $moduleFindPattern = 'Import-DscResource -ModuleName (?\w+)( -ModuleVersion (?(\d|\.)+))?' + $rootPath = Split-Path -Path (Split-Path -Path $here -Parent) -Parent + + $dscCompositeResourceModules = $BuildInfo.'Sampler.DscPipeline'.DscCompositeResourceModules + + foreach ($dscCompositeResourceModule in $dscCompositeResourceModules.GetEnumerator()) + { + $compositeResourceModuleName, $compositeResourceModuleVersion = if ($dscCompositeResourceModule -is [hashtable]) + { + $dscCompositeResourceModule.Name + $dscCompositeResourceModule.Version + } + else + { + $dscCompositeResourceModule + } + + if ($compositeResourceModuleName -eq 'PSDesiredStateConfiguration') + { + continue + } + + if ($compositeResourceModuleVersion) + { + $compositeResourceModulePath = Join-Path -Path $RequiredModulesDirectory -ChildPath "$compositeResourceModuleName\$compositeResourceModuleVersion\DscResources" + } + else + { + $compositeResourceModulePath = Join-Path -Path $RequiredModulesDirectory -ChildPath "$compositeResourceModuleName\*\DscResources" + } + + $compositeResourceModulePath = (Resolve-Path -Path $compositeResourceModulePath).Path + $compResources = (Get-ChildItem -Path $compositeResourceModulePath) + $psDependPath = Join-Path -Path $rootPath -ChildPath RequiredModules.psd1 + $psDepend = Get-Item -Path $psDependPath + + $dscResources = Import-PowerShellDataFile -Path $psDepend.FullName + $dscResources.Remove('PSDependOptions') + + [hashtable[]]$testCases = @() + + Write-Host "DSC Composite / Resource Module Table for '$compositeResourceModuleName' with version '$(if ($compositeResourceModuleVersion) { $compositeResourceModuleVersion } else { 'NA' })'" -ForegroundColor Green + Write-Host '-------------------------------------' -ForegroundColor Green + foreach ($compRes in $compResources) + { + $files = Get-ChildItem -Path $compRes.FullName -File -Recurse -Include '*.psm1' + foreach ($file in $files) + { + $importHash = @{} + $moduleMatches = Select-String -Path $file.FullName -Pattern $moduleFindPattern + foreach ($moduleMatch in $moduleMatches) + { + $moduleVersion = $moduleMatch.Matches[0].Groups['ModuleVersion'].Value + $importHash.Add($moduleMatch.Matches[0].Groups['ModuleName'].Value, $moduleVersion) + } + + $importHash.Remove('PSDesiredStateConfiguration') #standard module available on every Windows machine. + + [PSCustomObject]$dscResourceModuleTable = @() + if ($importHash -ne $null) + { + $testCases += $importHash.GetEnumerator() | ForEach-Object { + @{ + PSDependFileName = $psDepend.Name + DscResourceFileName = $file.Name + DscResourceName = $_.Key + VersionInCompositeResource = $_.Value + VersionInPSDependFile = $dscResources[$_.Key] + } + } + } + } + } + $testCases.GetEnumerator() | ForEach-Object { [pscustomobject]$_ } | + Sort-Object -Property DscResourceName | + Format-Table -Property DscResourceFileName, DscResourceName, VersionInCompositeResource, VersionInPSDependFile, PSDependFileName | + Out-String | Write-Host -ForegroundColor DarkGray + Write-Host '-------------------------------------' -ForegroundColor Green + } +} + +Describe 'Resources matching between Composite Resources and PSDepend file' { + + Context 'Composite Resources import correct DSC Resources' -Tag Integration { + It "DSC Resource Module '' is defined in ''" -TestCases $testCases { + $VersionInPSDependFile | Should -Not -BeNullOrEmpty + } + + It "Version of '' in '' is equal to version in ''" -TestCases $testCases { + if ($VersionInCompositeResource) + { + $VersionInCompositeResource | Should -Be $VersionInPSDependFile + } + } + } +} diff --git a/tests/ConfigData/ConfigData.Tests.ps1 b/tests/ConfigData/ConfigData.Tests.ps1 index 70e71df3..33653f13 100644 --- a/tests/ConfigData/ConfigData.Tests.ps1 +++ b/tests/ConfigData/ConfigData.Tests.ps1 @@ -18,13 +18,13 @@ BeforeDiscovery { } $definitionTests = @{ - datumDefinitionFile = "$ProjectPath\source\Datum.yml" - datumYamlContent = Get-Content -Raw -Path "$ProjectPath\source\Datum.yml" -ErrorAction SilentlyContinue + datumDefinitionFile = Join-Path -Path $ProjectPath -ChildPath source\Datum.yml + datumYamlContent = Get-Content -Raw -Path (Join-Path -Path $ProjectPath -ChildPath source\Datum.yml) -ErrorAction SilentlyContinue configurationData = $configurationData } $nodeDefinitions = Get-ChildItem $ProjectPath\source\AllNodes -Recurse -Include *.yml | Where-Object { - $_.BaseName -in $configurationData.AllNodes.NodeName + $_.BaseName -in $configurationData.AllNodes.Name } $environments = (Get-ChildItem $ProjectPath\source\AllNodes -Directory -ErrorAction SilentlyContinue).BaseName $roleDefinitions = Get-ChildItem $ProjectPath\source\Roles -Recurse -Include *.yml -ErrorAction SilentlyContinue @@ -39,12 +39,12 @@ BeforeDiscovery { $nodeGroups = $configurationData.AllNodes | Group-Object { $_.Environment } [hashtable[]]$allNodeTestsDuplicate = $nodeGroups | ForEach-Object { @{ - ReferenceNodes = $_.Group.NodeName - DifferenceNodes = $_.Group.NodeName | Sort-Object -Unique + ReferenceNodes = $_.Group.NodeName | Where-Object { $_ -notlike '`[x=*' } + DifferenceNodes = $_.Group.NodeName | Where-Object { $_ -notlike '`[x=*' } | Sort-Object -Unique } } - $environments = Get-ChildItem $ProjectPath\source\Environment -ErrorAction SilentlyContinue | Select-Object -ExpandProperty BaseName + $environments = Get-ChildItem $ProjectPath\source\Environments -ErrorAction SilentlyContinue | Select-Object -ExpandProperty BaseName $locations = Get-ChildItem $ProjectPath\source\Locations -ErrorAction SilentlyContinue | Select-Object -ExpandProperty BaseName $roles = Get-ChildItem $ProjectPath\source\Roles -ErrorAction SilentlyContinue | Select-Object -ExpandProperty BaseName $baselines = Get-ChildItem $ProjectPath\source\Baselines -ErrorAction SilentlyContinue | Select-Object -ExpandProperty BaseName @@ -55,6 +55,7 @@ BeforeDiscovery { Content = $content Node = $n NodeName = $n.NodeName + Name = $_.BaseName Location = $n.Location Role = $n.Role FullName = $_.FullName @@ -90,6 +91,7 @@ BeforeDiscovery { $nodeTestsSingleNode = $nodes | ForEach-Object { @{ NodeName = $_.Name + Name = $_.Name Node = $_ Datum = $datum ConfigurationData = $configurationData @@ -100,17 +102,16 @@ BeforeDiscovery { Describe 'Validate All Definition Files' -Tag Integration { It "'' is a valid yaml" -TestCases $allDefinitions { - { $content | ConvertFrom-Yaml } | Should -Not -Throw + { Get-Content -Path $FullName -Raw | ConvertFrom-Yaml } | Should -Not -Throw } } - Describe 'Datum Tree Definition' -Tag Integration { It 'Exists in source Folder' -TestCases $definitionTests { Test-Path $datumDefinitionFile | Should -Be $true } - It 'is Valid Yaml' -TestCases $definitionTests { + It 'Is Valid Yaml' -TestCases $definitionTests { { $datumYamlContent | ConvertFrom-Yaml } | Should -Not -Throw } @@ -124,72 +125,74 @@ Describe 'Node Definition Files' -Tag Integration { Context 'Testing for conflicts / duplicate data' { It 'Should not have duplicate node names' -TestCases $allNodeTestsDuplicate { - (Compare-Object -ReferenceObject $ReferenceNodes -DifferenceObject $DifferenceNodes).InputObject | Should -BeNullOrEmpty + if ($ReferenceNodes -and $DifferenceNodes) + { + (Compare-Object -ReferenceObject $ReferenceNodes -DifferenceObject $DifferenceNodes).InputObject | Should -BeNullOrEmpty + } } } - It "'' has valid yaml" -TestCases $allNodeTests { + It "'' has valid yaml" -TestCases $allNodeTests { { $content | ConvertFrom-Yaml } | Should -Not -Throw } - It "'' is in the right environment" -TestCases $allNodeTests { - if ($node.Environment) + It "'' is in the right environment" -TestCases $allNodeTests { + if ($node.Environment -and $node.Environment -notlike '`[x=*') { $pathElements = $FullName.Split('\') $pathElements -contains $node.Environment | Should -BeTrue } } - It "Location of '' is '' and does exist" -TestCases $allNodeTests { + It "Location of '' is '' and does exist" -TestCases $allNodeTests { if ($node.Location) { $node.Location -in $Locations | Should -BeTrue } } - It "Environment of '' is '' and does exist" -TestCases $allNodeTests { - if ($node.Environment) - { + if ($node.Environment -and $node.Environment -notlike '`[x=*') + { + It "Environment of '' is '' and does exist" -TestCases $allNodeTests { $node.Environment -in $Environments | Should -BeTrue } } - It "Role of '' is '' and does exist" -TestCases $allNodeTests { - if ($node.Role) - { + if ($node.Role) + { + It "Role of '' is '' and does exist" -TestCases $allNodeTests { $node.Role -in $Roles | Should -BeTrue } } - It "Baseline of '' is '' and does exist" -TestCases $allNodeTests { - if ($node.Baseline) - { + if ($node.Baseline) + { + It "Baseline of '' is '' and does exist" -TestCases $allNodeTests { $node.Baseline -in $Baselines | Should -BeTrue } } -} - -Describe 'Roles Definition Files' -Tag Integration { - It ' has valid yaml' -TestCases $nodeRoleTests { - { $null = Get-Content -Raw -Path $FullName | ConvertFrom-Yaml } | Should -Not -Throw + Describe 'Roles Definition Files' -Tag Integration { + It ' has valid yaml' -TestCases $nodeRoleTests { + { $null = Get-Content -Raw -Path $FullName | ConvertFrom-Yaml } | Should -Not -Throw + } } -} -Describe 'Role Composition' -Tag Integration { + Describe 'Role Composition' -Tag Integration { - It " has a valid Configurations Setting (!`$null)" -TestCases $nodeTestsSingleNode { - { Resolve-Datum -PropertyPath Configurations -Node $node -DatumTree $datum } | Should -Not -Throw - } + It " has a valid Configurations Setting (!`$null)" -TestCases $nodeTestsSingleNode { + { Resolve-Datum -PropertyPath Configurations -Node $node -DatumTree $datum } | Should -Not -Throw + } - It 'No duplicate IP addresses should be used' -TestCases $nodeTestsAllNodes { - $allIps = $configurationData.AllNodes.NetworkIpConfiguration.Interfaces.IpAddress - $selectedIps = $allIps | Select-Object -Unique + It 'No duplicate IP addresses should be used' -TestCases $nodeTestsAllNodes { + $allIps = $configurationData.AllNodes.NetworkIpConfiguration.Interfaces.IpAddress + $selectedIps = $allIps | Select-Object -Unique - if ($allIps -and $selectedIps) - { - Compare-Object -ReferenceObject $allIps -DifferenceObject $selectedIps | Should -BeNull + if ($allIps -and $selectedIps) + { + Compare-Object -ReferenceObject $allIps -DifferenceObject $selectedIps | Should -BeNull + } } } }