From 630619a7ab3030d633a8e1a429c257ad9cfa3310 Mon Sep 17 00:00:00 2001 From: tgrieger Date: Mon, 12 Sep 2016 10:50:56 -0700 Subject: [PATCH] Initial Azure Automation Automated Export Sample This contains the powershell files for the auotmated export, a README.md file explaining how to set up the automated export in azure automation, and an update to the README.md file in the directory above describing the automated export sample. --- samples/manage/README.md | 3 + .../AutoExport.ps1 | 280 ++++++++++++++++++ .../AutoExportBlobRetention.ps1 | 23 ++ .../README.md | 53 ++++ 4 files changed, 359 insertions(+) create mode 100644 samples/manage/azure-automation-automated-export/AutoExport.ps1 create mode 100644 samples/manage/azure-automation-automated-export/AutoExportBlobRetention.ps1 create mode 100644 samples/manage/azure-automation-automated-export/README.md diff --git a/samples/manage/README.md b/samples/manage/README.md index 6a106762..9ddaf4fb 100644 --- a/samples/manage/README.md +++ b/samples/manage/README.md @@ -2,6 +2,9 @@ Contains samples for managing Microsoft's SQL databases including SQL Server, Azure SQL Database, and Azure SQL Data Warehouse. +## Automatically Export your databases with Azure Automation +This includes samples for setting up Azure Automation and exporting your databases to azure blob storage. + ## Collect and monitor resource usage data across multiple pools in a subscription This Solution Quick Start provides a solution for collecting and monitoring Azure SQL Database resource usage across multiple pools in a subscription. When you have a large number of databases in a subscription, it is cumbersome to monitor each elastic pool separately. To solve this, you can combine SQL database PowerShell cmdlets and T-SQL queries to collect resource usage data from multiple pools and their databases for monitoring and analysis of resource usage. diff --git a/samples/manage/azure-automation-automated-export/AutoExport.ps1 b/samples/manage/azure-automation-automated-export/AutoExport.ps1 new file mode 100644 index 00000000..691d1e9c --- /dev/null +++ b/samples/manage/azure-automation-automated-export/AutoExport.ps1 @@ -0,0 +1,280 @@ +# The Array that will hold the list of databases objects. +$dbs = New-Object System.Collections.ArrayList; + +# The Enums that describe what state a database is in. +Add-Type -TypeDefinition @" + public enum DatabaseState + { + ToCopy, + Copying, + ToExport, + Exporting, + ToDrop, + Finished + } +"@ + +# The database and server pairs that will be exported. +$databaseServerPairs = + @([pscustomobject]@{serverName="SAMPLESERVER1";databaseName="SAMPLEDATABASE1"}, + [pscustomobject]@{serverName="SAMPLESERVER1";databaseName="SAMPLEDATABASE2"}, + [pscustomobject]@{serverName="SAMPLESERVER2";databaseName="SAMPLEDATABASE3"}); + +$serverCred = Get-AutomationPSCredential -Name 'NAMEOFSERVERCREDENTIAL1'; +$serverCred2 = Get-AutomationPSCredential -Name 'NAMEOFSERVERCREDENTIAL2'; +$serverCredentialsDictionary = @{'SAMPLESERVER1'=$serverCred;'SAMPLESERVER2'=$serverCred2} + +# The number of databases you want to have running at the same time. +$batchingLimit = 10; +# The number of times you want to retry if there is a failure. +$retryLimit = 5; +# The number of minutes you want to wait for an operation to finish before you fail. +$waitInMinutes = 30; + +# This function takes the database and server names and creates a database object to use for the export. +function CreateDatabaseObject($databaseName, $serverName) +{ + # Create the new object. + $dbObj = New-Object System.Object; + # Add the DatabaseName property and set it. + $dbObj | Add-Member -type NoteProperty -name DatabaseName -Value $databaseName; + # Add a unique time at the end of DatabaseCopyName so that we have a unique database name every time. + $currentTime = Get-Date -format "_yyyy-MM-dd_HH:mm.ss"; + $dbCopyName = $databaseName + $currentTime; + # Add the DatabaseCopyName property and set it. + $dbObj | Add-Member -type NoteProperty -name DatabaseCopyName -Value $dbCopyName; + # Add the ServerName property and set it. + $dbObj | Add-Member -type NoteProperty -name ServerName -Value $serverName; + # Add the Export property and set it to $null for now. This will be used to look up the export after it has been started. + $dbObj | Add-Member -type NoteProperty -name Export -Value $null; + # Add the DatabaseState property and set it to ToCopy so that the "state machine" knows to start the copy of the database. + $dbObj | Add-Member -type NoteProperty -name DatabaseState -Value ([DatabaseState]::ToCopy); + # Add the RetryCount property and set it to 0. This will be used to count the number of time we retry each failable operation. + $dbObj | Add-Member -type NoteProperty -name RetryCount -Value 0; + # Add the OperationStartTime property and set it to $null for now. This will be used when an operation starts to correcly do timeouts. + $dbObj | Add-Member -type NoteProperty -name OperationStartTime -Value $null; + + # Return the newly created object. + return $dbObj; +} + +# This function starts the copy of the database. If there is an error, we set the state to ToDrop. Otherwise, we set the state to Copying. +function StartCopy($dbObj) +{ + # Start the copy of the database. + Start-AzureSqlDatabaseCopy -ServerName $dbObj.ServerName -DatabaseName $dbObj.DatabaseName -PartnerDatabase $dbObj.DatabaseCopyName; + # $? is true if the last command succeeded and false if the last command failed. If it is false, go to the ToDrop state. + if(-not $? -and $global:retryLimit -ile $dbObj.RetryCount) + { + echo ("Error occurred while starting copy of " + $dbObj.DatabaseName + ". It will not be copied. Deleting the database copy named " + $dbObj.DatabaseCopyName + "."); + # Set state to ToDrop in case something does get copied. + $dbsCopying[$i].DatabaseState = ([DatabaseState]::ToDrop); + # Return so we don't execute the rest of the function. + return; + } + elseif(-not $?) + { + # We failed but we haven't hit the retry limit yet so increment RetryCount and return so we try again. + echo ("Retrying with database " + $dbObj.DatabaseName); + $dbObj.RetryCount++; + return; + } + # Set the state of the database object to Copying. + $dbObj.DatabaseState = ([DatabaseState]::Copying); + echo ("Copying " + $dbObj.DatabaseName); + $dbObj.OperationStartTime = Get-Date; +} + +# This function checks the progress of the copy. If there is an error, we set the state to ToDrop. Otherwise, we set the state to ToExport. +function CheckCopy($dbObj) +{ + # Get the status of the database copy. + $check = Get-AzureSqlDatabaseCopy -ServerName $dbObj.ServerName -DatabaseName $dbObj.DatabaseName; + $currentTime = Get-Date; + # $? is true if the last command succeeded and false if the last command failed. If it is false, go to the ToDrop state. + if((-not $? -and $global:retryLimit -ile $dbObj.RetryCount) -or ($currentTime - $dbObj.OperationStartTime).TotalMinutes -gt $global:waitInMinutes) + { + echo ("Error occurred during copy of " + $dbObj.DatabaseName + ". It will not be exported. Deleting the database copy named " + $dbObj.DatabaseCopyName + "."); + # Set state to ToDrop in case something did get copied. + $dbsCopying[$i].DatabaseState = ([DatabaseState]::ToDrop); + # Return so we don't execute the rest of the function. + return; + } + elseif(-not $?) + { + # We failed but we haven't hit the retry limit yet so increment RetryCount and return so we try again. + echo ("Retrying with database " + $dbObj.DatabaseName); + $dbObj.RetryCount++; + return; + } + # Get the percent complete from the status to check if the database copy is done. + $i = $check.PercentComplete + # $i will be $null when the copy is complete. + if($i -eq $null) + { + # The copy is complete so set the state to ToExport. + $dbObj.DatabaseState = ([DatabaseState]::ToExport); + $dbObj.RetryCount = 0; + } +} + +# This function starts the export. If there is an error, we se tthe state o ToDrop. Otherwise, we set the state to Exporting. +function StartExport($dbObj) +{ + # Setup the server connection that the storage account is on. + $serverManageUrl = "https://autoexportserver.database.windows.net"; + # Get the current time to use as a unique identifier for the blob name. + $currentTime = Get-Date -format "_yyyy-MM-dd_HH:mm.ss"; + $blobName = $dbObj.DatabaseName + "_ExportBlob" + $currentTime; + # Use the stored credential to create a server credential to use to login to the server. + $servercredential = $global:serverCredentialsDictionary[$dbObj.ServerName]; + # Set up a SQL connection context to use when exporting. + $ctx = New-AzureSqlDatabaseServerContext -ServerName $dbObj.ServerName -Credential $servercredential; + # Get the storage key to setup the storage context. + $storageKey = Get-AutomationVariable -Name "STORAGEKEYVARIABLENAME"; + # Get the storage context. + $stgctx = New-AzureStorageContext -StorageAccountName "STORAGEACCOUNTNAME" -StorageAccountKey $storageKey; + + # Start the export. If there is an error, stop the export and set the state to ToDrop. + $dbObj.Export = Start-AzureSqlDatabaseExport -SqlConnectionContext $ctx -StorageContext $stgctx -StorageContainerName autoexportcontainer -DatabaseName $dbObj.DatabaseCopyName -BlobName $blobName; + # $? is true if the last command succeeded and false if the last command failed. If it is false, go to the ToDrop state. + if (-not $? -and $global:retryLimit -ile $dbObj.RetryCount) + { + echo ("Error occurred while starting export of " + $dbObj.DatabaseName + ". It will not be exported. Deleting the database copy named " + $dbObj.DatabaseCopyName + "."); + # Set state to ToDrop so that we drop the copied database since there was an error exporting it. + $dbsToExport[$i].DatabaseState = ([DatabaseState]::ToDrop); + # Return so we don't execute the rest of the function. + return + } + elseif(-not $?) + { + # We failed but we haven't hit the retry limit yet so increment RetryCount and return so we try again. + echo ("Retrying with database " + $dbObj.DatabaseName); + $dbObj.RetryCount++; + return; + } + # Set the state to Exporting. + $dbObj.DatabaseState = ([DatabaseState]::Exporting); + echo ("Exporting " + $dbObj.DatabaseCopyName); + $dbObj.OperationStartTime = Get-Date; +} + +# This function monitors the export progress. +function CheckExport($dbObj) +{ + # Get the progress of the database's export. + $check = Get-AzureSqlDatabaseImportExportStatus -Request $dbObj.Export; + $currentTime = Get-Date; + # The export is complete when Status is "Completed". Wait for that to happen. + if($check.Status -eq "Completed") + { + # The export id one, set the state to ToDrop because it was successful. + $dbObj.DatabaseState = ([DatabaseState]::ToDrop); + $dbObj.RetryCount = 0; + } + elseif($check.Status -eq "Failed" -and $dbObj.RetryCount -lt $global:retryLimit) + { + # If the status is "Failed" and we have more retries left, try to export the database copy again. + echo ("The last export failed on database " + $dbObj.DatabaseName + ", going back to ToExport state to try again"); + echo $check + $dbObj.DatabaseState = ([DatabaseState]::ToExport); + $dbObj.RetryCount++; + return; + } + elseif($global:retryLimit -ile $dbObj.RetryCount -or ($currentTime - $dbObj.OperationStartTime).TotalMinutes -gt $global:waitInMinutes) + { + echo ("Error occurred while exporting " + $dbObj.DatabaseName + ". Deleting the database copy named " + $dbObj.DatabaseCopyName + "."); + # The export id one, set the state to ToDrop either because it failed. + $dbObj.DatabaseState = ([DatabaseState]::ToDrop); + } + elseif(-not $?) + { + # We failed but we haven't hit the retry limit yet so increment RetryCount and return so we try again. + echo ("Retrying with database " + $dbObj.DatabaseName); + $dbObj.RetryCount++; + return; + } +} + +# This function runs the command to drop the database and sets the state to Finished. +function StartDrop($dbObj) +{ + # Start the delete + Remove-AzureSqlDatabase -ServerName $dbObj.ServerName -DatabaseName $dbObj.DatabaseCopyName -Force; + # Set the state to Finished so it gets removed from the array. + $dbObj.DatabaseState = ([DatabaseState]::Finished); + echo ($dbObj.DatabaseCopyName + " dropped") +} + +# Runs the "State Machine" so that different databases can progress independently. +function ExportProcess +{ + # Get all database objects in the ToCopy state and start the database copy. + $dbsToCopy = $global:dbs | Where-Object DatabaseState -eq ([DatabaseState]::ToCopy); + for($i = 0; $i -lt $dbsToCopy.Count; $i++) + { + echo $dbsToCopy[$i]; + StartCopy($dbsToCopy[$i]); + } + + # Get all database objects in the Copying state and check on their copy progress. + $dbsCopying = $global:dbs | Where-Object DatabaseState -eq ([DatabaseState]::Copying); + for($i = 0; $i -lt $dbsCopying.Count; $i++) + { + CheckCopy($dbsCopying[$i]); + } + + # Get all database objects in the ToExport state and start their export. + $dbsToExport = $global:dbs | Where-Object DatabaseState -eq ([DatabaseState]::ToExport); + for($i = 0; $i -lt $dbsToExport.Count; $i++) + { + echo $dbsToExport[$i]; + StartExport($dbsToExport[$i]); + } + + # Get all database objects in the Exporting state and check on their export progress. + $dbsExporting = $global:dbs | Where-Object DatabaseState -eq ([DatabaseState]::Exporting); + for($i = 0; $i -lt $dbsExporting.Count; $i++) + { + CheckExport($dbsExporting[$i]); + } + + # Get all database objects in the ToDrop state and start their drop. + $dbsToDrop = $global:dbs | Where-Object DatabaseState -eq ([DatabaseState]::ToDrop); + for($i = 0; $i -lt $dbsToDrop.Count; $i++) + { + echo $dbsToDrop[$i]; + StartDrop($dbsToDrop[$i]); + } + + # Get all database objects in the Finished state and remove them from the array. + $dbsFinished = $global:dbs | Where-Object DatabaseState -eq ([DatabaseState]::Finished); + for($i = 0; $i -lt $dbsFinished.Count; $i++) + { + $global:dbs.Remove($dbsFinished[$i]); + } +} + +# Get the certificate to authenticate the subscription +$cert = Get-AutomationCertificate -Name "CERTIFICATENAME"; +# Set the subscription to use +$subID = "00000000-0000-0000-0000-000000000000"; +$subName = "SUBSCRIPTIONNAME"; +Set-AzureSubscription -SubscriptionName $subName -Certificate $cert -SubscriptionId $subID; +Select-AzureSubscription -Current $subName; + +$currentIndex = 0; +for($currentRun = 0; $currentRun -lt ([math]::Ceiling($databaseServerPairs.Length/$batchingLimit)); $currentRun++) +{ + # Loop through all the databses in the $databaseServerPairs array and add corresponding database objects into the array. + for($currentIndex; $currentIndex -lt $global:databaseServerPairs.Length -and $currentIndex -lt ($currentRun*$batchingLimit + $batchingLimit); $currentIndex++) + { + $global:dbs.Add((CreateDatabaseObject $global:databaseServerPairs[$currentIndex].DatabaseName $global:databaseServerPairs[$currentIndex].ServerName)) + } + + # Continually call ExportProcess until all of the database objects have been removed from the array. + while($global:dbs.Count -gt 0) + { + ExportProcess + } +} \ No newline at end of file diff --git a/samples/manage/azure-automation-automated-export/AutoExportBlobRetention.ps1 b/samples/manage/azure-automation-automated-export/AutoExportBlobRetention.ps1 new file mode 100644 index 00000000..7840a092 --- /dev/null +++ b/samples/manage/azure-automation-automated-export/AutoExportBlobRetention.ps1 @@ -0,0 +1,23 @@ +# The storage key for the storage account you are using. +$storageKey = Get-AutomationVariable -Name "STORAGEKEYVARIABLENAME"; +# The name of the storage container you are using. +$storageContainer = "STORAGECONTAINERNAME"; +# Set up the storage context for the storage account. +$context = New-AzureStorageContext -StorageAccountName "STORAGEACCOUNTNAME" -StorageAccountKey $storageKey +# Get all of the blobs in the storage account. +$blobs = Get-AzureStorageBlob -Container $storageContainer -Context $context +# Set the number of days that you want the blob to be stored for. +$retentionInDays = 30 + +foreach($blob in $blobs) +{ + # Get the current time to compare to the time that the blob was created. + $currentTime = Get-Date; + # If the blob is more than $retentionInDays old, delete it. + if(($currentTime - $blob.LastModified.DateTime).TotalDays -gt $retentionInDays) + { + echo ("Deleting blob " + $blob.Name) + # Delete the blob.e + Remove-AzureStorageBlob -Container $storageContainer -Context $context -Blob $blob.Name; + } +} \ No newline at end of file diff --git a/samples/manage/azure-automation-automated-export/README.md b/samples/manage/azure-automation-automated-export/README.md new file mode 100644 index 00000000..1fd18674 --- /dev/null +++ b/samples/manage/azure-automation-automated-export/README.md @@ -0,0 +1,53 @@ +--- +services: azure automation +platforms: azure +author: trgrie-msft +--- + +# Setting up Auto Export in Azure Automation + +Provides the scripts and lists the steps to set up automatically exporting your databases to Azure Storage with Azure Automation. + +## Azure Automation Set Up + +1. Create and uploade the certificates that you will use to authenticate your connection to azure. + - Run powershell as admin. + - Run the New-SelfSignedCertificate command: New-SelfSignedCertificate -CertStoreLocation cert:\localmachine\my -DnsName + - Create a corresponding pfx certificate by taking the thumbprint of the newly created certificate and running these commands: + - $CertPassword = ConvertTo-SecureString -String -Force -AsPlainText + - Export-PfxCertificate -Cert cert:\localmachine\my\ -FilePath .pfx -Password $CertPassword + - Upload the .cer file to your subscription [here][https://manage.windowsazure.com/] + - Upload the .pfx file to the certificates under Assets in the automation account that you want to use on Azure. You will use the password you gave in the previous step to authenticate it. +2. Create new a new credentials asset to authenticate your server with. + - Under assets, click on Credentials, and then click on Add a credential. + - Name the credential and give the username and password that you will be logging into the server with. +3. Create a new variable asset to pass the storage key of the Azure storage account you will be using. + - Under assets, click on variables and then Add a variable. + - Give the value of the storage key and you can make it encrypted so that only Azure Automation can read the variable and it won't show the key in plaintext if someone looks at the variable. +4. Set Up Log Analytics (OMS) and Alerts + - If you don't have Log Analytics set up on your Azure account, follow [these][https://azure.microsoft.com/en-us/documentation/articles/automation-manage-send-joblogs-log-analytics/] instructions for setting it up. +5. Set Up Log Analytics Alerts + - To send yourself an email if an error occurs or one of the jobs fails, you need to set up alerts. + - Select your log analytics account that you want to use in the azure portal and click on the OMS Portal box under Management. + - Click on Log Search and enter the queries you want to alert on. These are two that are suggested: + - Category=JobStreams “Error occurred*” + - Category=JobLogs ResultType=Failed + - The first will alert on an the provided script saying an error occurred so you know if something didn't go quite right. The second alerts if the script fails entirely. + +## Script Set Up + +1. In the AutoExport.ps1 script, here are the values that need to be modified: + - $databaseServerPairs: This is where you put in the names of the databases you want to export along with the name of the server they are on. + - $serverCredentialsDictionary: If you are backing up from multiple servers, you can setup all of the credentials here and look them up by the server’s name later. + - $batchingLimit: This tells the script how many databases can be worked on at the same time (basically, the maximum number of database copies that there will be at once). + - $retryLimit: This tells the script how many times it can retry an operation. + - $waitTimeInMinutes: This tells the script how long it can wait for an operation to complete before it fails. + - -Name for Get-AzureAutomationVariable: This is the AutomationAccount you created the StorageKey variable under (probably the same one you are running the RunBook under) and -Name is the name of the variable. + - -StorageAccountName for New-AzureStorageContext: This is the name of the storage account you are exporting to. + - -Name for Get-AutomationCertificate: This is the name of the certificate you setup to authenticate with Azure. + - $subId: The ID of the subscription you are using. This will be used to tell Azure Automation which subscription to use. + - $subName: The name of the subscription you are using. This will be used to tell Azure Automation which subscription to use. +2. In AutoExportBlobRetention, here are the values that need to be modified: + - -Name for Get-AzureAutomationVariable: This is the AutomationAccount you created the StorageKey variable under (probably the same one you are running the RunBook under) and -Name is the name of the variable. + - $storageContainer: This is the name of the storage container where you will be monitoring the exported blobs. + - $retentionInDays: This is how many days you want to keep the exported blobs stored for before deleting. \ No newline at end of file