[XSL-LIST Mailing List Archive Home] [By Thread] [By Date] [Recent Entries] [Reply To This Message]

Re: Performance results of an XML-based neural network

Subject: Re: Performance results of an XML-based neural network versusa map-based neural network
From: "Dr. Roger L Costello costello@xxxxxxxxx" <xsl-list-service@xxxxxxxxxxxxxxxxxxxxxx>
Date: Wed, 22 Jul 2020 12:29:30 -0000
Re:  Performance results of an XML-based neural network
Hello Martin,

> Does the profile not reveal where the
> map based solution spends most of
> the time?

Yes, is says that all the time is spent in the train function and virtually
zero time in the matrix operations. Which is strange since the train function
primarily just calls the matrix operations. My train function is shown below.
Do you see anything suspiciously time-consuming in it?

> Which Saxon version exactly is that?

The latest version: saxon-ee-10.1.jar

Here's my neural network train function:

<xsl:function name="f:train" as="map(xs:string, item()+)">
    <xsl:param name="self" as="map(xs:string, item()+)" />  <!-- Current
version of the neural network -->
    <xsl:param name="inputs_list" as="xs:double*" />
    <xsl:param name="targets_list" as="xs:double*" />

    <!-- Convert inputs_list to n x 1 matrix. -->
    <xsl:variable name="n" select="count($inputs_list)" as="xs:integer"/>
    <xsl:variable name="inputs" select="matrix:create($n, 1, 'input',
$inputs_list)"/>
    <!-- Convert targets_list to n x 1 matrix. -->
    <xsl:variable name="n" select="count($targets_list)" as="xs:integer"/>
    <xsl:variable name="targets" select="matrix:create($n, 1, 'target',
$targets_list)"/>
    <!-- Calculate signals into hidden layer. -->
    <xsl:variable name="hidden_inputs"
select="matrix:dot-product(map:get($self, 'wih'), $inputs, 'hidden_inputs')"
as="item()*"/>
    <!-- Calculate the signals emerging from hidden layer. -->
    <xsl:variable name="hidden_outputs"
select="activation:sigmoid($hidden_inputs, 'hidden_outputs')" as="item()*"/>
    <!-- Calculate signals into final output layer. -->
    <xsl:variable name="final_inputs"
select="matrix:dot-product(map:get($self, 'who'), $hidden_outputs,
'final_inputs')" as="item()*" />
    <!-- Calculate the signals emerging from final output layer. -->
    <xsl:variable name="final_outputs"
select="activation:sigmoid($final_inputs, 'final_outputs')" as="item()*" />
    <!-- Output errors is (target - actual). -->
    <xsl:variable name="output_errors" select="matrix:difference($targets,
$final_outputs, 'output_errors')" as="item()*" />
    <!-- Hidden layer error is the output_errors, split by weights, recombined
at hidden nodes -->
    <xsl:variable name="hidden_errors"
select="matrix:dot-product(matrix:transpose(map:get($self, 'who')),
$output_errors, 'hidden_errors')" as="item()*" />

    <!-- Update the weights for the links between the hidden and output
layers. -->

    <xsl:variable name="Ek_times_Ok"
select="matrix:Hadamard_product($output_errors, $final_outputs,
'Ek_times_Ok')" as="item()*" />
    <xsl:variable name="One_minus_Ok" select="matrix:scalar-difference(1,
$final_outputs, 'One_minus_Ok')" as="item()*" />
    <xsl:variable name="updated-layer-values"
select="matrix:Hadamard_product($Ek_times_Ok, $One_minus_Ok,
'updated-layer-values')" as="item()*" />
    <xsl:variable name="output-transposed"
select="matrix:transpose($hidden_outputs)" as="item()*"/>
    <xsl:variable name="weight-changes"
select="matrix:dot-product($updated-layer-values, $output-transposed,
'weight-changes')" as="item()*" />
    <xsl:variable name="learning-rate-multiplied-by-weight-changes"
select="matrix:scalar-multiplication(map:get($self, 'lr'), $weight-changes,
'learning-rate-times-weight-changes')" as="item()*" />
    <xsl:variable name="who" select="map:get($self, 'who')" as="item()*"/>
    <xsl:variable name="updated-who" select="matrix:addition(map:get($self,
'who'), $learning-rate-multiplied-by-weight-changes, 'who')" as="item()*" />
    <!-- Update the weights for the links between the input and hidden layer.
-->
    <xsl:variable name="Ek_times_Ok"
select="matrix:Hadamard_product($hidden_errors, $hidden_outputs,
'Ek_times_Ok')" as="item()*" />
    <xsl:variable name="One_minus_Ok" select="matrix:scalar-difference(1,
$hidden_outputs, 'One_minus_Ok')" as="item()*" />
    <xsl:variable name="updated-layer-values"
select="matrix:Hadamard_product($Ek_times_Ok, $One_minus_Ok,
'updated-layer-values')" as="item()*" />
    <xsl:variable name="output-transposed" select="matrix:transpose($inputs)"
as="item()*"/>
    <xsl:variable name="weight-changes"
select="matrix:dot-product($updated-layer-values, $output-transposed,
'weight-changes')" as="item()*" />
    <xsl:variable name="learning-rate-multiplied-by-weight-changes"
select="matrix:scalar-multiplication(map:get($self, 'lr'), $weight-changes,
'learning-rate-times-weight-changes')" as="item()*" />
    <xsl:variable name="updated-wih" select="matrix:addition(map:get($self,
'wih'), $learning-rate-multiplied-by-weight-changes, 'wih')" as="item()*" />

    <xsl:variable name="neural-network-with-new-wih" select="map:put($self,
'wih', $updated-wih)" />
    <xsl:variable name="neural-network-with-new-wih-and-new-who"
select="map:put($neural-network-with-new-wih, 'who', $updated-who)" />
    <xsl:sequence select="$neural-network-with-new-wih-and-new-who" />

</xsl:function>

/Roger

Current Thread

PURCHASE STYLUS STUDIO ONLINE TODAY!

Purchasing Stylus Studio from our online shop is Easy, Secure and Value Priced!

Buy Stylus Studio Now

Download The World's Best XML IDE!

Accelerate XML development with our award-winning XML IDE - Download a free trial today!

Don't miss another message! Subscribe to this list today.
Email
First Name
Last Name
Company
Subscribe in XML format
RSS 2.0
Atom 0.3
Site Map | Privacy Policy | Terms of Use | Trademarks
Free Stylus Studio XML Training:
W3C Member
Stylus Studio® and DataDirect XQuery ™are products from DataDirect Technologies, is a registered trademark of Progress Software Corporation, in the U.S. and other countries. © 2004-2013 All Rights Reserved.