SQL Script: After Restore from PROD to TEST

Do you want PROD data in your TEST System? Easy, just restore a backup of PROD into your TEST-DB. Run the following script to clean up some AOS specific data:

Update [AX09_TEST].[dbo].[BatchServerGroup] set SERVERID = ’01@SRVAOS1′  where SERVERID = ’02@SRVAOS1′
–01: DEV, 02: PROD, 03: MIG

Update [AX09_TEST].[dbo].[BATCHSERVERCONFIG] set SERVERID = ’01@SRVAOS1′  where SERVERID = ’02@SRVAOS1′
–01: DEV, 02: PROD, 03: MIG

Update [AX09_TEST].[dbo].[SYSSERVERCONFIG] set SERVERID = ’01@SRVAOS1′ , SERVERGUID = newid() where SERVERID = ’02@SRVAOS1′
–01: DEV, 02: PROD, 03: MIG

Update [AX09_TEST].[dbo].[Batch] set SERVERID = ’01@SRVAOS1′  where SERVERID = ’02@SRVAOS1′
–01: DEV, 02: PROD, 03: MIG

delete [AX09_TEST].[dbo].[SYSCLIENTSESSIONS]

–the following has nothing to do with the restore to TEST
–remove obsolete [SYSCLIENTSESSIONS] from PROD

select * from [AX09_LIVE].[dbo].[SYSCLIENTSESSIONS] where STATUS = 0

delete [AX09_LIVE].[dbo].[SYSCLIENTSESSIONS] where STATUS = 0

Powershell script: Copy DEV Layers to LIVE

The following Powershell script copies the AX2009 DEV Layers to LIVE thereby checking that the services have been stopped and also copying the previous layers into old and backing up old in subfolders of old:

[void][System.Reflection.Assembly]::Load("System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")
$AXSERVER_DEV = "srvaos2"
$AXSERVICE_DEV = "AOS50?01" # ? stands for $
$AXPATH_DEV = "\\"+ $AXSERVER_DEV + "\c$\Program Files\Microsoft Dynamics AX\50\Application\Appl\AX09_DEV"
$AXPATH_DEV_OLD = "\\"+ $AXSERVER_DEV + "\c$\Program Files\Microsoft Dynamics AX\50\Application\Appl\AX09_DEV\old"
$AXSERVER_PROD = "srvaos1"
$AXSERVICE_PROD = "AOS50?02" # ? stands for $
$AXPATH_PROD = "\\"+ $AXSERVER_PROD + "\c$\Program Files\Microsoft Dynamics AX\50\Application\Appl\AX09_LIVE"
$AXPATH_PROD_OLD = "\\"+ $AXSERVER_PROD + "\c$\Program Files\Microsoft Dynamics AX\50\Application\Appl\AX09_LIVE\old"
$LABELFILES = "axsa*.ald"
$LAYERFILES = "axcus*.aod"
$AOIFILES = "*.aoi"
$ALIFILES = "*.ali"
function copyAXAppl
{
#DEV and PROD services must be stopped	
	echo ("checking service " + $AXSERVICE_DEV + " on " + $AXSERVER_DEV)
	if((get-service -ComputerName $AXSERVER_DEV -name $AXSERVICE_DEV).status -eq "Stopped")
	{
		echo "ok" ("checking service " + $AXSERVICE_PROD + " on " + $AXSERVER_PROD) 
		if((get-service -ComputerName $AXSERVER_PROD -name $AXSERVICE_PROD).status -eq "Stopped")
		{
			echo "ok" ("copying from " + $AXPATH_DEV + " to " + $AXPATH_PROD)
#CREATE PROD\OLD BACKUP DIR (YYYYMMDD)
			if(-not (test-path (join-path $AXPATH_PROD_OLD (get-date -uformat "%Y%m%d"))))
			{
				new-item -path $AXPATH_PROD_OLD -name (get-date -uformat "%Y%m%d") -itemtype directory
			}			
#BACKUP PROD (only CUS Files)
			(dir -path $AXPATH_PROD -name $LABELFILES ) |
				foreach-object {copy $_.pspath (join-path $AXPATH_PROD_OLD (get-date -uformat "%Y%m%d")) }
			(dir -path $AXPATH_PROD -name $LAYERFILES) |
				foreach-object {copy $_.pspath (join-path $AXPATH_PROD_OLD (get-date -uformat "%Y%m%d")) }
#COPY PROD\OLD\{14 days old} -> PROD\OLD
			if(test-path (join-path $AXPATH_PROD_OLD (get-date (get-date).addDays(-14) -uformat "%Y%m%d")))
			{
				(dir -path (join-path $AXPATH_PROD_OLD (get-date (get-date).addDays(-14) -uformat "%Y%m%d")) -name "*.ald" ) |
					foreach-object {copy $_.pspath $AXPATH_PROD_OLD }
				(dir -path (join-path $AXPATH_PROD_OLD (get-date (get-date).addDays(-14) -uformat "%Y%m%d")) -name "*.aod" ) |
					foreach-object {copy $_.pspath $AXPATH_PROD_OLD }
			}
#CREATE DEV\OLD BACKUP DIR (YYYYMMDD)
			if(-not (test-path (join-path $AXPATH_DEV_OLD (get-date -uformat "%Y%m%d"))))
			{
				new-item -path $AXPATH_DEV_OLD -name (get-date -uformat "%Y%m%d") -itemtype directory
			}			
#BACKUP DEV (only CUS Files)
			(dir -path $AXPATH_DEV -name $LABELFILES ) |
				foreach-object {copy $_.pspath (join-path $AXPATH_DEV_OLD (get-date -uformat "%Y%m%d")) }
			(dir -path $AXPATH_DEV -name $LAYERFILES) |
				foreach-object {copy $_.pspath (join-path $AXPATH_DEV_OLD (get-date -uformat "%Y%m%d")) }
#COPY DEV -> DEVOLD
			(dir -path $AXPATH_DEV -name "*.ald" ) |
				foreach-object {copy $_.pspath $AXPATH_DEV_OLD }
			(dir -path $AXPATH_DEV -name "*.aod" ) |
				foreach-object {copy $_.pspath $AXPATH_DEV_OLD }

#COPY DEV->PROD (only CUS Files)
			(dir -path $AXPATH_DEV -name $LABELFILES ) |
				foreach-object {copy $_.pspath $AXPATH_PROD }
			(dir -path $AXPATH_DEV -name $LAYERFILES ) |
				foreach-object {copy $_.pspath $AXPATH_PROD }
#DELETE PROD AOI
			(dir -path $AXPATH_PROD -name $AOIFILES ) |
				foreach-object {remove-item $_.pspath}
#DELETE DEV AOI
			(dir -path $AXPATH_DEV -name $AOIFILES ) |
				foreach-object {remove-item $_.pspath}
#DELETE PROD ALI
			(dir -path $AXPATH_PROD -name $ALIFILES ) |
				foreach-object {remove-item $_.pspath}
#DELETE DEV ALI
			(dir -path $AXPATH_DEV -name $ALIFILES ) |
				foreach-object {remove-item $_.pspath}
		}
		else
		{
			[System.Windows.Forms.MessageBox]::Show("First stop '" + (get-service -name AOS50?02).Displayname + "'")
		}
	}
	else
	{
		[System.Windows.Forms.MessageBox]::Show("First stop '" + (get-service -name AOS50?01).Displayname + "'")
	}
}

Edit 20.06.2014: variable servers

Reset program cache file for all clients

I had a problem with some clients running on old code (e.g. experiencing bugs that have already been fixed).

The following job resets the GUID which in turn forces all clients to create a new program cache file (AX_*.auc). A restart of the AOS is required.

static void forceNewAUC(Args _args)
{
    #AiF
    SysSQMSettings sysSQMSettings;
    ;
    ttsbegin;
    update_recordset sysSQMSettings setting GlobalGUID = str2Guid(#EmptyGuidString);
    ttscommit;
    info("Restart the AOS Service to generate a new GlobalGUID.");
}

(Source: http://blogs.msdn.com/emeadaxsupport/archive/2010/01/25/identical-ax-2009-auc-file-created-for-multiple-ax-installations.aspx)

Another option would be adding the following to the Info.startupPost() method, however I’m not sure if this is a cleaver thing to do routinely, but if you cannot restart the AOS it is you only option.

xSession::removeAOC();
SysTreeNode::refreshAll();
SysFlushDictionary::doFlush();
SysFlushAOD::doFlush();
xSession::updateAOC();

(Source: http://dynamics-ax.blogspot.com/2006/04/flush-aos-cache-from-code.html)

Multi-company Batch Processing

It is rather annoying that you have to run a special client to do batch processing and that you have to ensure that the client is always running, however even more annoying would be if you would have to run a client for every single company, which is what seems to be needed when you look at the standard system. Luckily however there is a no-modification-needed approach to avoid having to run multiple batch clients for multiple companies:
  • Create a new Virtual company e.g. BAT (Administration>>Setup>>Virtual company accounts)
  • On the Company accounts tab, add all companies to the Virtual company.
  • On the Table collections tab, pick the Batch table collection, which is available as a standard table collection.

That’s it. Easy, isn’t it?

Data import options

There are multiple options to import data. My favorite one when I still "owned" an installation and could take full responsibility was to import directly into the database using and updating the record-id stored in SYSTEMSEQUENCES. Now that I am consultant an can not take unreasonable risks I had to find a new way of efficiently importing data. The standard options are:
  • Excel:
    Populating the Excel template is great, you have predefined drop-down lists for enum values. You can use all the tricks available in excel to calculate fields in order to clean you data. However excel has two significant disadvantages: Importing from Excel is sloooooooow. And tables that are linked via the Record-Id (e.g. Address, Notes) can not be imported.
  • CSV:
    CSV import on the other hand is the exact opposite: importing e.g. thousands of customers takes only a few seconds, while the Excel import takes hours. The record-id field is available for auxiliary table such as Address and Notes, and the relationship will remain even though the record-ids might be re-mapped during import. But again CSV has significant disadvantages: Enum values are just 0,1,2,… you fist have to find the mapping of these values. Editing CSV tables is not easy as Excel does not save to quotation marks. Line breaks as they might appear in the address field can no be imported.

My idea was to combine the two methodologies and get a new variant with all the advantages – the remaining disadvantage is that it takes longer to set up:

  1. Create both an Excel and a CSV template (export both with a couple of records so that you have examples to work with)
  2. In the the Excel template create new worksheets for each table in which you will prepare the csv data file.
  3. Copy the sample CSV for the dat rows into the new worksheets. Use the menu Data>>Text to columns to convert the csv line (use Delimiter comma and text qualifier {none})
  4. Add another line and mark each column with on of the following data types: STRING, LIST, NUM, DATE
  5. The first five and the last columns contain system fields:
    • The first column always contains "RECORD"
    • The second column always contains the Table-Id. Copy the table Id you found in the exported sample record.
    • The third column contains the record-id which must be unique and can be simply calculated 1,2,3,4,… by using something like = C4+1 for cell C5.
    • The fourth and fifth columns are unused and contain 0
    • The last column contains the record version witch can be set to 1 for all records.
  6. All other fields should contain the following formula =IF(F$1="DATE",TEXT(‘SourceWS’!A7,"yyyy/mm/dd"),IF(F$1="NUM",’SourceWS’!A7,IF(F$1="LIST",VALUE(LEFT(‘SourceWS’!A7,3)),"""" & ‘SourceWS’!A7 & """"))), whereas of course the SourceWS is the original Excel template WS which you have populate and which provides you will all advantages described above. Once you have linked the first field you can copy the formula throughout the csv worksheet. Note that the source data should not contain any line break, but the line break should be replaced by some tag e.g. "-newline-".
  7. Now for creating the csv: copy past all the relevant data into a text editor. You now have tab separated value.
  8. Replace the tabs by commas. Note: Notepad is frighteningly inefficient for doing the simple task. Use a more sophisticated text editor such as Textpad and you will have replaced all tabs within seconds.
  9. Save the generated csv as dat file.
  10. Import
  11. Write a job to replace the -newline- tag:
    static void AddNewLine(Args _args)
    {
        //only modify the following constants
        #define.runCustTable(false)
        #define.runVendTable(false)
        #define.runInventTable(false)
        #define.runAddress(True)
        #define.pattern("-newline-")
        //only modify the constants above
        CustTable custTable;
        VendTable vendTable;
        Address address;
        InventTable inventTable;
        int64 oldCount;
        int i;
        ;
        ttsbegin;
        if (#runCustTable)
        {
            While select forupdate custtable where custtable.Address like "*" + #pattern + "*"
                                                || custtable.Street like "*" + #pattern + "*"
            {
                custTable.Address = strreplace(custTable.Address,#pattern,"\n");
                custTable.Street = strreplace(custTable.Street,#pattern,"\n");
                custTable.update();
                i++;
            }
            info(strfmt("%1 CustTable records updated",i));
            i=0;
        }
        if (#runVendTable)
        {
            While select forupdate vendTable where vendTable.Address like "*" + #pattern + "*"
                                                || vendTable.Street like "*" + #pattern + "*"
            {
                vendTable.Address = strreplace(vendTable.Address,#pattern,"\n");
                vendTable.Street = strreplace(vendTable.Street,#pattern,"\n");
                vendTable.update();
                i++;
            }
            info(strfmt("%1 VendTable records updated",i));
            i=0;
        }
        if (#runAddress)
        {
            select count(recid) from address;
            oldCount = address.RecId;
            delete_from address where !address.AddrRecId;
            select count(recid) from address;
            if (oldCount != address.RecId)
                info(strfmt("%1 Address records deleted", oldCount – address.RecId));
            While select forupdate address where address.Address like "*" + #pattern + "*"
                                                || address.Street like "*" + #pattern + "*"
            {
                address.Address = strreplace(address.Address,#pattern,"\n");
                address.Street = strreplace(address.Street,#pattern,"\n");
                address.update();
                i++;
            }
            info(strfmt("%1 Address records updated",i));
            i=0;
        }
        if (#runInventTable)
        {
            While select forupdate inventTable where inventTable.ItemName like "*" + #pattern + "*"
            {
                inventTable.ItemName = strreplace(inventTable.ItemName,#pattern,"\n");
                inventTable.update();
                i++;
            }
            info(strfmt("%1 InventTable records updated",i));
            i=0;
        }
        ttscommit;
        info("complete");
    }