## Wednesday, March 30, 2011

### Creating Dynamic User Objects in Active Directory

Account management in Active Directory comes at a cost of time and effort for an Administrator. This is especially burdensome for objects that are only needed for a limited timeframe such as testing objects. Active Directory provides a low cost method for dealing with temporary objects through the dynamic object class which provides automatic garbage collection based on a time-to-live value in seconds. This benefit does come with several limitations:
• The maximum TTL of a dynamic object is 31,556,926 seconds (1 year).
• A deleted dynamic object due to its TTL expiring does not leave a tombstone behind.
• All DCs holding replicas of dynamic objects must run on Windows Server 2003 or greater.
• Dynamic entries with TTL values are supported in all partitions except the Configuration partition and Schema partition.
• Active Directory Domain Services do not publish the optional dynamicSubtrees attribute, as described in the RFC 2589, in the root DSE object.
• Dynamic entries are handled similar to non-dynamic entries when processing search, compare, add, delete, modify, and modifyDN operations.
• There is no way to change a static entry into a dynamic entry and vice-versa.
• A non-dynamic entry cannot be added subordinate to a dynamic entry.
In the code sample below, I take the advantage of the dynamic object class to create 50 test accounts that will expire and delete themselves from the forest on May 1, 2011 at 2:30 pm. The accounts created for a hypothetical finance application testing are placed in a specific OU, provided a unique sAMAccountName (tested to ensure this) and a pseudo-random password created for them. All this information is placed in a comma separated values text file so you can pass it over to your Quality Assurance team.

And remember, this script creates objects in Active Directory! Use at your own risk! I might have the best of intentions but my skill may betray you. Test, test and further test before implementing this code in a production environment.
Function Get-LocalDomainController($objectDomain) { return ([System.DirectoryServices.ActiveDirectory.ActiveDirectorySite]::GetComputerSite()).Servers | Where-Object {$_.Domain.Name -eq $objectDomain } | ForEach-Object {$_.Name } | Select-Object -first 1
}

Function Get-ObjectADDomain($distinguishedName) { return ((($distinguishedName -replace "(.*?)DC=(.*)",'$2') -replace "DC=","") -replace ",",".") } Function Get-ActiveDirectoryObject($distinguishedName) {
return [ADSI]("LDAP://" + (Get-LocalDomainController (Get-ObjectADDomain $distinguishedName)) + "/" + ($distinguishedName -replace "/","\/"))
}

Function Get-DomainDistinguishedName($domain) { return ("dc=" +$domain.Replace(".",",dc="))
}

Function Test-sAMAccountName($sAMAccountName,$domain) {
$objectConnection = New-Object -comObject "ADODB.Connection"$objectConnection.Open("Provider=ADsDSOObject;")
$objectCommand = New-Object -comObject "ADODB.Command"$objectCommand.ActiveConnection = $objectConnection$ldapBase = ("LDAP://" + (Get-LocalDomainController $domain) + "/" + (Get-DomainDistinguishedName$domain))
$ldapAttr = "sAMAccountName"$ldapScope = "subtree"
$ldapFilter = "(&(objectClass=user)(sAMAccountName=$sAMAccountName))"
$ldapQuery= "<$ldapBase>;$ldapFilter;$ldapAttr;$ldapScope"$objectCommand.CommandText = $ldapQuery$objectRecordSet = $objectCommand.Execute() if($objectRecordSet.RecordCount -gt 0) {
$found =$true
} else {
$found =$false
}
return $found } #--------------------------------------------------------------------------------------------------# Set-Variable -name destinationOu -option Constant -value "ou=Finance Testing,dc=americas,dc=ad,dc=mycompany,dc=local" Set-Variable -name totalNumberOfAccounts -option Constant -value 50 Set-Variable -name givenName -option Constant -value "Test" Set-Variable -name sn -option Constant -value "Account" Set-Variable -name passwordSize -option Constant -value 12 #--------------------------------------------------------------------------------------------------#$randomNumber = New-Object System.Random
$accounts = @()$endDate = "05/01/2011 14:30:00"
$entryTtlSeconds = [int]((Get-Date$endDate) - (Get-Date)).TotalSeconds
#######################################################################
# Or you can do a time span instead for the account time to live i.e.,
# $entryTtlSeconds = [int](New-TimeSpan -days 90).TotalSeconds #$entryTtlSeconds = [int](New-TimeSpan -minutes 60).TotalSeconds
#######################################################################
if($entryTtlSeconds -gt 31556926) { Write-Host "Time-to-live greater than 1 year. Exiting!" -foregroundColor Red exit }$destinationOuObject = Get-ActiveDirectoryObject $destinationOu if($destinationOuObject.distinguishedName -ne $destinationOu) { Write-Host "Unable to connect to$destinationOu. Exiting!" -foregroundColor Red
exit
}

for($i = 1;$i -le $totalNumberOfAccounts;$i++) {
$accountName = ($givenName + $sn + "$i")
Write-Host "Creating $accountName..." if(Test-sAMAccountName$accountName (Get-ObjectADDomain $destinationOu)) { Write-Host "$accountName already exists, skipping..." -foregroundColor Yellow
continue
}

$userObject =$destinationOuObject.Create("user","CN=$accountName")$userObject.PutEx(2,"objectClass",@("dynamicObject","user"))
$userObject.Put("entryTTL",$entryTtlSeconds)
$userObject.Put("sAMAccountName",$accountName) # Mandatory attribute
$userObject.Put("givenName",$givenName)
$userObject.Put("sn",($sn + "$i"))$userObject.Put("displayName",($givenName + " " +$sn + "$i"))$userObject.Put("description","Account Used for Application Testing")
$userObject.Put("wWWHomePage","http://sharepointsite/projects/newfinancesystem")$userObject.Put("userPrincipalName",($accountName + "@" + (Get-ObjectADDomain$destinationOu)))
$userObject.SetInfo()$password = ""
for($x = 1;$x -le $passwordSize;$x++) {
$password += [char]($randomNumber.Next(33,126))
}

$userObject.SetPassword($password)
$userObject.Put("userAccountControl", 512)$userObject.SetInfo()

$account = New-Object -typeName PSObject Add-Member -inputObject$account -type NoteProperty -name "domain" -value (Get-ObjectADDomain $destinationOu).Split(".")[0] Add-Member -inputObject$account -type NoteProperty -name "sAMAccountName" -value ($userObject.sAMAccountName).ToString() Add-Member -inputObject$account -type NoteProperty -name "givenName" -value ($userObject.givenName).ToString() Add-Member -inputObject$account -type NoteProperty -name "sn" -value ($userObject.sn).ToString() Add-Member -inputObject$account -type NoteProperty -name "userPrincipalName" -value ($userObject.userPrincipalName).ToString() Add-Member -inputObject$account -type NoteProperty -name "password" -value $password$accounts += $account }$accounts | Export-Csv -path "$givenName$sn List.csv" -noTypeInformation


## Tuesday, March 29, 2011

### Retrieve the Fully Qualified Domain Name of a Local Active Directory Bound Computer

Here is the method I use to obtain the fully qualified domain name of a local Active Directory bound computer in my scripts.
$computerFqdn = (([System.Net.NetworkInformation.IPGlobalProperties]::GetIPGlobalProperties()).HostName + "." + ([System.Net.NetworkInformation.IPGlobalProperties]::GetIPGlobalProperties()).DomainName)  ## Tuesday, March 22, 2011 ### Scheduling Tasks in PowerShell for Windows Server 2008 -- Part 3: Daily Tasks In my previous blog posts, "Scheduling Tasks in PowerShell for Windows Server 2008 -- Part 1: Monthly Task" and "Scheduling Tasks in PowerShell for Windows Server 2008 -- Part 2: Weekly Tasks", I explored the various settings required for scheduled tasks for those given date ranges. This post deals with the daily reoccurring tasks that administrators need to schedule, sometimes more than once a day. In the code sample below, I schedule a fictitious log cleanup PowerShell script to run three times a day; midnight, 8 am and 4 pm. For security reasons, after running this script, you should close the PowerShell command window as the password submitted for the Task Security Principal will remain in the command buffer allowing anyone with access to your keyboard to "up arrow" and reveal it. # Parameters to modify$taskName = "Start-TriDailyLogCleanup"
$taskWorkingDirectory = "C:\PowerShell"$taskPath = "%SystemRoot%\system32\WindowsPowerShell\v1.0\powershell.exe"
$taskArguments = "-command "$taskWorkingDirectory\$taskName.ps1""$taskAuthor = "ad\myaccount"
$taskDescription = "The Tri-Daily Log Cleanup on the servers."$taskSecurityPrincipal = "ad\maintenance"
$taskShedulerTaskFolder = "\MyTasks"$startTimes = @((Get-Date "03/23/2011 00:00:00" -Format s),(Get-Date "03/23/2011 08:00:00" -Format s),(Get-Date "03/23/2011 16:00:00" -Format s))

# Would like to use -asSecureString but RegisterTaskDefinition does not accept it
# Look over your shoulder before typing
$password = Read-Host -prompt "$taskSecurityPrincipal Password"

# The meaty parts

$taskService = New-Object -ComObject Schedule.Service$taskService.Connect()

$rootFolder =$taskService.GetFolder($taskShedulerTaskFolder)$taskDefinition = $taskService.NewTask(0)$registrationInformation = $taskDefinition.RegistrationInfo$registrationInformation.Description = $taskDescription$registrationInformation.Author = $taskAuthor$taskPrincipal = $taskDefinition.Principal$taskPrincipal.LogonType = 1
$taskPrincipal.UserID =$taskSecurityPrincipal
$taskPrincipal.RunLevel = 0$taskSettings = $taskDefinition.Settings$taskSettings.StartWhenAvailable = $true$taskSettings.RunOnlyIfNetworkAvailable = $true$taskSettings.Priority = 7
$taskSettings.ExecutionTimeLimit = "PT2H"$taskTriggers = $taskDefinition.Triggers foreach($startTime in $startTimes) {$executionTrigger = $taskTriggers.Create(2)$executionTrigger.StartBoundary = $startTime }$taskAction = $taskDefinition.Actions.Create(0)$taskAction.Path = $taskPath$taskAction.Arguments = $taskArguments$taskAction.WorkingDirectory = $taskWorkingDirectory # 6 == Task Create or Update # 1 == Password must be supplied at registration$rootFolder.RegisterTaskDefinition($taskName,$taskDefinition, 6, $taskSecurityPrincipal,$password, 1)

# Since we captured this in plain text I am going to nuke the value
# Not 100% security. Close the PowerShell command window to increase security.


## Monday, March 21, 2011

### MD5 Hash For PowerShell

For a project, I had a requirement to move selected files between two servers and provide proof that the copied matched exactly the original file. A common method to provide this proof is through a MD5 hash for each of the files to create a fingerprint. A MD5 hash provides the ability to compare two files without having to go to the trouble and slowness of comparing the files byte by byte using the algorithm created by Ron Rivest in 1991 to create a 128-bit hash. You can save the hash and use it for future reference to see if the file has changed.

In the code sample below, I use the System.Security.Cryptography.MD5CryptoServiceProvider class to create the hash in bytes then convert it to a string using the System.Bitconverter class and removing all dashes to create a 32 character representation of that hash. If you require even more bits to be present in your hash, the System.Security.Cryptography namespace provides up to 512 bits. To achieve this level of security, use System.Security.Cryptography.SHA512Managed class instead of System.Security.Cryptography.MD5CryptoServiceProvider in the function.

To test out this code, change out the file names below. Copy a file to a new location, run the function and see the results. Then open the copied file, make a minor edit and test again. It's interesting to see the changes in the returned hash.
Function Get-MD5Hash($fileName) { if([System.IO.File]::Exists($fileName)) {
$fileStream = New-Object System.IO.FileStream($fileName,[System.IO.FileMode]::Open,[System.IO.FileAccess]::Read,[System.IO.FileShare]::ReadWrite)
$MD5Hash = New-Object System.Security.Cryptography.MD5CryptoServiceProvider [byte[]]$fileByteChecksum = $MD5Hash.ComputeHash($fileStream)
$fileChecksum = ([System.Bitconverter]::ToString($fileByteChecksum)).Replace("-","")
$fileStream.Close() } else {$fileChecksum = "ERROR: $fileName Not Found" } return$fileChecksum
}

$fileName = "\\east-coast-fs.ad.mycompany.local\documents\Important Excel Spreadsheet.xlsx"$fileChecksumOne = Get-MD5Hash $fileName if($fileChecksumOne -match "^ERROR:") {
Write-Host $fileChecksumOne -foregroundColor Red exit }$fileName = "\\west-coast-fs.ad.mycompany.local\documents\Important Excel Spreadsheet.xlsx"
$fileChecksumTwo = Get-MD5Hash$fileName
if($fileChecksumTwo -match "^ERROR:") { Write-Host$fileChecksumTwo -foregroundColor Red
exit
}

if($fileChecksumOne -eq$fileChecksumTwo) {
Write-Host "Files match!"
Write-Host $fileChecksumOne -foregroundColor Green Write-Host$fileChecksumTwo -foregroundColor Green
} else {
Write-Host "Files do not match!"
Write-Host $fileChecksumOne -foregroundColor Red Write-Host$fileChecksumTwo -foregroundColor Green
}



## Thursday, March 17, 2011

### Add a New Mail Domain to Multiple Users

In my work, it is not uncommon to have new domains purchased for business units and employees immediately needing e-mail addresses with the new domain. So after the MX record is created and Exchange is updated to accept mail for the new domain, its time to target the users that require the new mail domain to be included in the proxyAddresses attribute. The proxyAddresses attribute is multivalued so you must use treat it as an array and use PutEx to append to the attribute with the new address. In the code sample below, I take a file of e-mail addresses that users already use, split off the "local-part" from the domain using the at sign as the delimiter and append the $newMailDomain constant to the local-part to create the new e-mail address for the user. When inserting the new e-mail address into the proxyAddresses attribute with PutEx, you will need to send it as an array even if it only has 1 element. To be on the safe side, which you should also do when modifying production data, a backup of all the current elements of the user's proxyAddresses attribute are dumped into a subdirectory called "backup" in a file name that matches their mail attribute (with ".txt" appended) and the script is set to not write to the directory by default. You must alter the$writeEnabled constant in order to actually update the user's object. The script also checks to see if the e-mail address you are trying to assign to the employee's account is already present and skips if so. I am using Write-Output to document the scripts behavior so you can easily pipe it out to a text file for review.

This script does not make the new address the employee's primary SMTP address. In order to do this, you would need to remove the current primary denoted by the "SMTP:" at the start of the proxyAddress and return it back with "smtp:" then add the new e-mail address with "SMTP:" at the start. I will provide a code sample in the future that will detail that swap out and how to do it safely.

And remember, this script modifies data! Use at your own risk! I might have the best of intentions but my skill may betray you. Test, test and further test before implementing this code in a production environment.
Function Get-LocalDomainController($objectDomain) { return ([System.DirectoryServices.ActiveDirectory.ActiveDirectorySite]::GetComputerSite()).Servers | Where-Object {$_.Domain.Name -eq $objectDomain } | ForEach-Object {$_.Name } | Select-Object -first 1
}

Function Get-ObjectADDomain($distinguishedName) { return ((($distinguishedName -replace "(.*?)DC=(.*)",'$2') -replace "DC=","") -replace ",",".") } Function Get-ActiveDirectoryObject($distinguishedName) {
return [ADSI]("LDAP://" + (Get-LocalDomainController (Get-ObjectADDomain $distinguishedName)) + "/" + ($distinguishedName -replace "/","\/"))
}
#--------------------------------------------------------------------------------------------------#
Set-Variable -name forestRootDn -option Constant -value ([ADSI]("LDAP://" + (([System.DirectoryServices.ActiveDirectory.Forest]::GetCurrentForest()).name) + "/rootDSE")).defaultNamingContext
Set-Variable -name adsPropertyAppend -option Constant -value 3
Set-Variable -name newMailDomain -option Constant -value "@newdomain.local"
Set-Variable -name inputFile -option Constant -value "userlist.txt"
Set-Variable -name writeEnabled -option Constant -value $false #--------------------------------------------------------------------------------------------------# if(Test-Path -path$inputFile) {
$userList = Get-Content -path$inputFile
} else {
Write-Host "Could not locate $inputFile. Exiting..." -foregroundColor Red exit } if(!(Test-Path -path "backup")) { New-Item -path "backup" -type directory | Out-Null }$objectConnection = New-Object -comObject "ADODB.Connection"
$objectCommand = New-Object -comObject "ADODB.Command"$objectConnection.Open("Provider=ADsDSOObject;")
$objectCommand.ActiveConnection =$objectConnection

foreach($user in$userList) {

$ldapBase = "GC://$forestRootDn"
$ldapAttr = "distinguishedName"$ldapScope = "subtree"
$ldapFilter = "(&(objectClass=user)(proxyAddresses=smtp:$user))"
$ldapQuery = "<$ldapBase>;$ldapFilter;$ldapAttr;$ldapScope"$objectCommand.CommandText = $ldapQuery$objectRecordSet = $objectCommand.Execute() if(!$objectRecordSet.EOF) {
while(!$objectRecordSet.EOF) {$userObject = Get-ActiveDirectoryObject $objectRecordSet.Fields.Item('distinguishedName').Value$newEmailAddress = ("smtp:" + ($user.Split("@")[0]).ToLower() +$newMailDomain)
Write-Output ($userObject.displayName).ToString() Write-Output "New Address:$newEmailAddress"
$notFound =$true
Set-Content -path ("backup\" + ($userObject.mail).ToString() + ".txt") -value$userObject.proxyAddresses
foreach($proxyAddress in$userObject.proxyAddresses) {
if($proxyAddress -eq$newEmailAddress) {
$notFound =$false
}
}
if($notFound -eq$true) {
Write-Output "Adding $newEmailAddress" if($writeEnabled -eq $true) {$userObject.PutEx($adsPropertyAppend, "proxyAddresses", @($newEmailAddress))
$userObject.SetInfo() } } else { Write-Output "Already has$newEmailAddress"
}
$objectRecordSet.MoveNext() } } else { Write-Output "Could not locate$user in the forest."
}
Write-Output ("-" * 50)
}


## Monday, March 14, 2011

### Unix Tail-like Functionality in PowerShell Revisited

My first attempt to replicate tail for PowerShell, which I wrote about in "Unix Tail-like Functionality in PowerShell", was horribly inefficient once you got past a couple dozen lines. This makes since given the method I was using -- a byte by byte reverse read of a text file, converting each byte at a time to ASCII. I knew the solution was to "wolf down" large byte chunks and process them as a whole. Using System.Text.ASCIIEncoding.GetString, I am doing that just after reading into memory multiple bytes using System.IO.FileStream.Read. With this change in methodology, I am getting to within 3% of the speed of tail in UNIX in my tests. The largest test I've performed was returning 1,000,000 lines from a 850MB log file. A Mac OS X 10.6.6 workstation performed the task in 16 seconds using tail and a Windows Server 2003 server returned in 17 seconds using this method. Good enough for me. Most of my needs are in the thousands of lines which I am able to return in hundreds of milliseconds which is perfect my monitoring scripts in Nagios. Compared to my previous attempt, this is a Lockheed SR-71 vs. a Wright Brothers Flyer. A small 5,000 tail using the old code took 5 1/2 minutes to return while this code returned the same lines in 200 milliseconds. Huge difference!

In the code sample below, I am using 10 kilobytes for that chunking. I found that number suited most of my needs. However, you can greatly increase that number for large number of lines to be returned (I used 4MB for my million line test). You can also do a little automatic tuning by altering the number of bytes using the number of lines you are seeking. One thing to be aware when passing files to this code, if you pass a file to System.IO.File/FileStream without a full path, it will not assume the file is located in the path of the executed script so Test-Path is not a valid test. Using System.IO.Directory.GetCurrentDirectory, you can find this by running the following in PowerShell:
[System.IO.Directory]::GetCurrentDirectory()
More than likely, it will point to the home directory of the profile the shell is executed under.

Also be aware that this tail-like function does not handle unicode log files. The method I am using to decode the bytes is ASCII dependent. I am not using System.Text.UnicodeEncoding yet in the code. Currently ASCII meets all my needs for reading log files but I am still interested in adding compatibility to this function. I am also assuming that all log files denote the end of a line using carriage return & line feed (CHR 13 + CHR 10) which is how majority of text files are written in Windows. UNIX & old style Macintosh text files will not work properly with this code. You will need to modify line 23 to change the delimiter for the split for those text file formats.

UPDATE: I have now finished an update that provides the "tail -f" functionality for continuously reading the updates to a text file. Read about it in my blog post, Replicating UNIX "tail -f" in PowerShell.

UPDATE: I have updated the code to handle unicode text files and non-Windows new lines. You can review the code here.
Function Read-EndOfFileByByteChunk($fileName,$totalNumberOfLines,$byteChunk) { if($totalNumberOfLines -lt 1) { $totalNumberOfLines = 1 } if($byteChunk -le 0) { $byteChunk = 10240 }$linesOfText = New-Object System.Collections.ArrayList
if([System.IO.File]::Exists($fileName)) {$fileStream = New-Object System.IO.FileStream($fileName,[System.IO.FileMode]::Open,[System.IO.FileAccess]::Read,[System.IO.FileShare]::ReadWrite)$asciiEncoding = New-Object System.Text.ASCIIEncoding
$fileSize =$fileStream.Length
$byteOffset =$byteChunk
[byte[]] $bytesRead = New-Object byte[]$byteChunk
$totalBytesProcessed = 0$lastReadAttempt = $false do { if($byteOffset -ge $fileSize) {$byteChunk = $fileSize -$totalBytesProcessed
[byte[]] $bytesRead = New-Object byte[]$byteChunk
$byteOffset =$fileSize
$lastReadAttempt =$true
}
$fileStream.Seek((-$byteOffset), [System.IO.SeekOrigin]::End) | Out-Null
$fileStream.Read($bytesRead, 0, $byteChunk) | Out-Null$chunkOfText = New-Object System.Collections.ArrayList
$chunkOfText.AddRange(([System.Text.RegularExpressions.Regex]::Split($asciiEncoding.GetString($bytesRead),"\r\n")))$firstLineLength = ($chunkOfText[0].Length)$byteOffset = ($byteOffset +$byteChunk) - ($firstLineLength) if($lastReadAttempt -eq $false -and$chunkOfText.count -lt $totalNumberOfLines) {$chunkOfText.RemoveAt(0)
}
$totalBytesProcessed += ($byteChunk - $firstLineLength)$linesOfText.InsertRange(0, $chunkOfText) } while($totalNumberOfLines -ge $linesOfText.count -and$lastReadAttempt -eq $false -and$totalBytesProcessed -lt $fileSize)$fileStream.Close()
if($linesOfText.count -gt 1) {$linesOfText.RemoveAt($linesOfText.count-1) }$deltaLines = ($linesOfText.count -$totalNumberOfLines)
if($deltaLines -gt 0) {$linesOfText.RemoveRange(0, $deltaLines) } } else {$linesOfText.Add("[ERROR] $fileName not found") | Out-Null } return$linesOfText
}
#--------------------------------------------------------------------------------------------------#
$fileName = "C:\Logs\really-huge.log" # Your really big log file$numberOfLines = 100 # Number of lines from the end of the really big log file to return
$byteChunk = 10240 # Size of bytes read per seek during the search for lines to return #################################################################################################### ## This is a possible self-tuning method you can use but will blow up memory on an enormous ## number of lines to return ##$byteChunk = $numberOfLines * 256 ####################################################################################################$lastLines = @()

$lastLines = Read-EndOfFileByByteChunk$fileName $numberOfLines$byteChunk
foreach($lineOfText in$lastLines) {
Write-Output $lineOfText }  ## Thursday, March 10, 2011 ### Write Excel Spreadsheets Fast in PowerShell If you try to populate an Excel spreadsheet cell by cell using PowerShell expect it to take a long time. As I showed in a previous post, "Speed Up Reading Excel Files in PowerShell", when dealing with Excel work with comma separated value text files. The population of the cells appears to be the huge bottleneck in Excel permformace but if you let Excel load a comma separated values file, you can quickly format and manipulate the prepopulated cells. One example from my own work involves a spreadsheet with ~3,500 rows. If I populate the rows directly through Excel it can take upwards of five minutes to complete the process. If I dump the data to a comma separated values text file then import it into Excel, it takes about 2 seconds -- a huge time savings. When you are running ad hoc reports, it pays off. If you want an excuse to head down to Starbucks from your office, it doesn't. To demonstrate this technique, I will take a previous post, "BlackBerry User Report", and change the report from a comma separated values text file to Excel spreadsheet. First to make the final result look nice, the NoteProperty names are cleaned up so they make attractive headers in the spreadsheet. Swap out the following lines to achieve this.  Add-Member -inputObject$blackberryUser -type NoteProperty -name "Domain" -value $domain Add-Member -inputObject$blackberryUser -type NoteProperty -name "User ID" -value $sAMAccountName Add-Member -inputObject$blackberryUser -type NoteProperty -name "First Name" -value $firstName Add-Member -inputObject$blackberryUser -type NoteProperty -name "Last Name" -value $firstName Add-Member -inputObject$blackberryUser -type NoteProperty -name "Display Name" -value $displayName Add-Member -inputObject$blackberryUser -type NoteProperty -name "E-Mail Address" -value $proxyAddress Add-Member -inputObject$blackberryUser -type NoteProperty -name "PIN" -value $pin Add-Member -inputObject$blackberryUser -type NoteProperty -name "Cell Phone Number" -value $phoneNumber Add-Member -inputObject$blackberryUser -type NoteProperty -name "Desk Phone Number" -value $telephoneNumber Add-Member -inputObject$blackberryUser -type NoteProperty -name "Street Address" -value $streetAddress Add-Member -inputObject$blackberryUser -type NoteProperty -name "City" -value $city Add-Member -inputObject$blackberryUser -type NoteProperty -name "State" -value $state Add-Member -inputObject$blackberryUser -type NoteProperty -name "Zip Code" -value $zipCode Add-Member -inputObject$blackberryUser -type NoteProperty -name "Country" -value $country Add-Member -inputObject$blackberryUser -type NoteProperty -name "BlackBerry Model" -value $modelName Add-Member -inputObject$blackberryUser -type NoteProperty -name "Carrier" -value $homeNetwork Add-Member -inputObject$blackberryUser -type NoteProperty -name "IMEI" -value $imei Add-Member -inputObject$blackberryUser -type NoteProperty -name "Password Enabled" -value $passwordEnabled Add-Member -inputObject$blackberryUser -type NoteProperty -name "Exchange Server" -value $exchangeServer Add-Member -inputObject$blackberryUser -type NoteProperty -name "BlackBerry Server" -value $blackberryServer  Now that you have nice and pretty column headers in your NoteProperty, its time to write out that Excel spreadsheet. This code assumes that you have at least Excel 2007 installed on your scripting workstation/server. First the code writes out to the temp directory using a GUID (just to be fancy) for the comma separated values file's name. We create an Excel object and load that temporary file. At that point, we can save out a fairly boring Excel spreadsheet. That won't impress anyone. If you want to look "pro" when you distribute reports, having a good layout is key. You want to save out a nicely formatted spreadsheet. So you autofit all the columns to longest cell in each, populate the worksheet's name, update some of the metadata in the spreadsheet and then apply a table style. Table styles are a quick way to add color to your spreadsheet with minimal effort by creating a list object. I have found that if you want to apply your own custom style (a future blog post) there is no speed penalty as long as the data is prepopulated. And finally we perform a Save As, remind Excel that we are saved, quit Excel and do some garbage cleanup. If you just quit Excel programatically, you will find that Excel will still be in memory. $blackberryUsers = $blackberryUsers | Sort-Object "BlackBerry Server", "Last Name", "First Name"$excelFile = ("\\fileserver.ad.mydomain.local\it_reports\blackberry\" + (Get-Date -format yyyyMMdd) + "-BlackBerry User Report.xlsx")

$temporaryCsvFile = ($env:temp + "\" + ([System.Guid]::NewGuid()).ToString() + ".csv")
$blackberryUsers | Export-Csv -path$temporaryCsvFile -noTypeInformation

if(Test-Path -path $excelFile) { Remove-Item -path$excelFile }

$excelObject = New-Object -comObject Excel.Application$excelObject.Visible = $false$workbookObject = $excelObject.Workbooks.Open($temporaryCsvFile)
$workbookObject.Title = ("BlackBerry User List for " + (Get-Date -Format D))$workbookObject.Author = "Robert M. Toups, Jr."

$worksheetObject =$workbookObject.Worksheets.Item(1)
$worksheetObject.UsedRange.Columns.Autofit() | Out-Null$worksheetObject.Name = "BlackBerry Users"

$listObject =$worksheetObject.ListObjects.Add([Microsoft.Office.Interop.Excel.XlListObjectSourceType]::xlSrcRange, $worksheetObject.UsedRange,$null,[Microsoft.Office.Interop.Excel.XlYesNoGuess]::xlYes,$null)$listObject.Name = "User Table"
$listObject.TableStyle = "TableStyleMedium4" # Style Cheat Sheet in French/English: http://msdn.microsoft.com/fr-fr/library/documentformat.openxml.spreadsheet.tablestyle.aspx$workbookObject.SaveAs($excelFile,51) # http://msdn.microsoft.com/en-us/library/bb241279.aspx$workbookObject.Saved = $true$workbookObject.Close()
[System.Runtime.Interopservices.Marshal]::ReleaseComObject($workbookObject) | Out-Null$excelObject.Quit()
[System.Runtime.Interopservices.Marshal]::ReleaseComObject($excelObject) | Out-Null [System.GC]::Collect() [System.GC]::WaitForPendingFinalizers() if(Test-Path -path$temporaryCsvFile) { Remove-Item -path $temporaryCsvFile }  ## Tuesday, March 8, 2011 ### Speed Up Reading Excel Files in PowerShell One of the most frustrating aspects of PowerShell for me is the extremely sluggish performance the ComObject is for Excel. I deal with spreadsheets on a daily basis and have processes to read them and compare information. I have spent hours upon hours trying to figure out how to optimize the process of reading cells. One day, I said to myself, "Why can't I read Excel documents at near the speed I can read a text file?" (in more colorful language than this). The proverbial lightbulb went off above my head. That one question lead to the code below. If I can't read Excel files at the speed I want and I can with text files, why not convert the Excel spreadsheet to a text based format? A comma separated value text file! Using PowerShell to load an Excel spreadsheet is quick. The slowdown is reading the cells/rows. So the solution is to load the Excel spreadsheet and immediately save it in the temp directory as a .csv file. PowerShell has a wonderful commandlet called Import-Csv that will allow you to read in a comma separated value text file as PSObject and associate the first row as a NoteProperty for each column in the spreadsheet. As long as your Excel spreadsheet is straight forward, this works perfectly and blazing fast. I have some processes that are now 20 times faster using this method. Function Remove-File($fileName) {
if(Test-Path -path $fileName) { Remove-Item -path$fileName }
}
#--------------------------------------------------------------------------------------------------#
$excelFile = "\\server.ad.mydomain.local\excelreports\myreport.xlsx" if(Test-Path -path$excelFile) {
$csvFile = ($env:temp + "\" + ((Get-Item -path $excelFile).name).Replace(((Get-Item -path$excelFile).extension),".csv"))

Remove-File $csvFile$excelObject = New-Object -ComObject Excel.Application
$excelObject.Visible =$false
$workbookObject =$excelObject.Workbooks.Open($excelFile)$workbookObject.SaveAs($csvFile,6) # http://msdn.microsoft.com/en-us/library/bb241279.aspx$workbookObject.Saved = $true$workbookObject.Close()
[System.Runtime.Interopservices.Marshal]::ReleaseComObject($workbookObject) | Out-Null$excelObject.Quit()
[System.Runtime.Interopservices.Marshal]::ReleaseComObject($excelObject) | Out-Null [System.GC]::Collect() [System.GC]::WaitForPendingFinalizers()$spreadsheetDataObject = Import-Csv -path $csvFile # Use the$spreadsheetDataObject for your analysis

Remove-File $csvFile }  ## Monday, March 7, 2011 ### Unix Tail-like Functionality in PowerShell A common tool I use in shell scripts on Unix/Linux/Mac OS X servers is tail. While there are command-line tail conversions for Windows, I need something I can integrate into a script for reading the end of large log files, search for information and act on that result. I don't want to distribute third party software along with the script to accomplish the task. Get-Content and Select-Object are not suitable for large files. After researching the capabilities of File IO in .Net, I found that System.IO.FileStream class had just what I needed. Using this class, I read the target text file byte by byte from the end of the file until I reach a selected number of lines of text delimited by a carriage return. The amount of time it takes to obtain the data is related to the number of characters per line. It works very well in 500 lines or less in my typical log files (I tested up to 1 gigabyte) and much faster than using: Get-Content "C:\Logs\really-huge.log" | Select-Object -last 100  The code meets 95% of my needs but I am sure I can optimize it so it comes close to matching the speed of tail from the Unix distributions I commonly use. It's my first stab at tackling the problem. One interesting part of the code is that I use System.Collections.ArrayList instead of a standard PowerShell array. The reason is since I am reading the file in reverse, I need to return the data back in the proper order. The ArrayList object allows me to insert into the first element so I don't have to re-write the array in the right order after collecting the data. Also I noticed that using System.Convert to covert the bytes to a character instead of using PowerShell's native [char] was faster. Returning large number of lines, it was significant -- about .5 seconds per 100 lines. I will keep working on this to close that 5% and update this post with a link to an updated blog post in the future with the improvements. UPDATE: I have rewritten this function in a new blog post and it is lightning fast. This code is deprecated and should only be used for amusement purposes. Function Read-EndOfFile($fileName,$totalNumberOfLines) {$fileStream = New-Object System.IO.FileStream($fileName,[System.IO.FileMode]::Open,[System.IO.FileAccess]::Read,[System.IO.FileShare]::ReadWrite)$linesOfText = New-Object System.Collections.ArrayList
$byteOffset = 1$lineOfText = ""
do {
$fileStream.Seek((-$byteOffset), [System.IO.SeekOrigin]::End) | Out-Null
$byte =$fileStream.ReadByte()
if($byte -eq 13) { } elseif($byte -eq 10) {
$linesOfText.Insert(0,$lineOfText)
$lineOfText = "" } else {$lineOfText = [System.Convert]::ToChar($byte) +$lineOfText
}
$byteOffset++ } while ($linesOfText.count -le $totalNumberOfLines)$fileStream.Close()
return $linesOfText } #--------------------------------------------------------------------------------------------------#$fileName = "C:\Logs\really-huge.log" # Your really big log file
$numberOfLines = 100 # Number of lines from the end of the really big log file to return if([System.IO.File]::Exists($fileName) -and $numberOfLines -gt 0) {$lastLines = Read-EndOfFile $fileName$numberOfLines

foreach($lineOfText in$lastLines) {
Write-Output $lineOfText } }  ### SharePoint 2010 and Social Collaboration Below is a webinar that I attended hosted by Gig Werks focused on the Social Collaboration features in SharePoint 2010. I found it to be a good introduction to the built-in capabilities of the product. ## Thursday, March 3, 2011 ### Quick Organizational Chart from Active Directory Building on my blog post, Export Manager's Nested Direct Reports, I quickly threw together a revision of the code that will create visual (ASCII at least) organizational chart based on the directReports attribute of an employee in Active Directory. The nesting method I am using could easy be converted to simulate the "tree" DOS command using Get-ChildObject on file systems or about any programming challenge that deals with nested objects. The one main fault of the code, is that it is not pretty when it gets to the last nested object of a parent. It leaves the "|" in front of the last nested subordinate object. A little more work and I could clean this presentation issue, but not today. Function Get-DirectReports($distinguishedName,$level) {$managerObject = Get-ActiveDirectoryObject $distinguishedName if($managerObject.directReports.count -gt 0) {
foreach($directReport in$managerObject.directReports) {
$directReportObject = Get-ActiveDirectoryObject$directReport
Write-Output (("|  " * $level) + "|") Write-Output (("| " *$level) + "+-" + ($directReportObject.givenName).ToString() + " " + ($directReportObject.sn).ToString())
Get-DirectReports $directReport ($level + 1)
}
}
}

Function Get-LocalDomainController($objectDomain) { return ([System.DirectoryServices.ActiveDirectory.ActiveDirectorySite]::GetComputerSite()).Servers | Where-Object {$_.Domain.Name -eq $objectDomain } | ForEach-Object {$_.Name } | Select-Object -first 1
}

Function Get-ObjectADDomain($distinguishedName) { return ((($distinguishedName -replace "(.*?)DC=(.*)",'$2') -replace "DC=","") -replace ",",".") } Function Get-ActiveDirectoryObject($distinguishedName) {
return [ADSI]("LDAP://" + (Get-LocalDomainController (Get-ObjectADDomain $distinguishedName)) + "/" + ($distinguishedName -replace "/","\/"))
}
#--------------------------------------------------------------------------------------------------#
Set-Variable -name forestRootDn -option Constant -value ([ADSI]("LDAP://" + (([System.DirectoryServices.ActiveDirectory.Forest]::GetCurrentForest()).name) + "/rootDSE")).defaultNamingContext
#--------------------------------------------------------------------------------------------------#
$objectConnection = New-Object -comObject "ADODB.Connection"$objectCommand = New-Object -comObject "ADODB.Command"
$objectConnection.Open("Provider=ADsDSOObject;")$objectCommand.ActiveConnection = $objectConnection$manager = "the.ceo@mycompany.local"

$ldapBase = "GC://$forestRootDn"
$ldapAttr = "distinguishedName"$ldapScope = "subtree"
$ldapFilter = "(&(objectClass=user)(proxyAddresses=smtp:$manager))"
$ldapQuery = "<$ldapBase>;$ldapFilter;$ldapAttr;$ldapScope"$objectCommand.CommandText = $ldapQuery$objectRecordSet = $objectCommand.Execute() while(!$objectRecordSet.EOF) {
$firstLevelObject = Get-ActiveDirectoryObject$objectRecordSet.Fields.Item('distinguishedName').Value
$topLevelManager = (($firstLevelObject.givenName).ToString() + " " + ($firstLevelObject.sn).ToString()) Write-Output$topLevelManager
Get-DirectReports $firstLevelObject.distinguishedName 0$objectRecordSet.MoveNext()
}