hasktorch-indef-0.0.1.0: Core Hasktorch abstractions wrapping FFI bindings

Copyright(c) Sam Stites 2017
LicenseBSD3
Maintainersam@stites.io
Stabilityexperimental
Portabilitynon-portable
Safe HaskellNone
LanguageHaskell2010

Torch.Indef.Static.NN.Math

Description

 
Synopsis

Documentation

abs_updateOutput :: Tensor d -> IO (Tensor d) Source #

abs forward pass (updates the output tensor)

abs_updateGradInput Source #

Arguments

:: Product d ~ Product d' 
=> Tensor d

input

-> Tensor d'

gradOutput

-> IO (Tensor d)

gradInput

abs backward-update (updates the layer and bias tensors)

_sqrt_updateOutput :: Tensor d -> Tensor d -> Double -> IO () Source #

sqrt forward pass (updates the output tensor)

_sqrt_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Tensor d -> IO () Source #

sqrt backward-update (updates the layer and bias tensors)

_square_updateOutput :: Tensor d -> Tensor d -> IO () Source #

square forward pass (updates the output tensor)

_square_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> IO () Source #

square backward-update (updates the layer and bias tensors)

_logSigmoid_updateOutput :: Tensor d -> Tensor d -> Tensor d -> IO () Source #

logSigmoid forward pass (updates the output tensor)

_logSigmoid_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Tensor d -> IO () Source #

logSigmoid backward-update (updates the layer and bias tensors)

_sigmoid_updateOutput :: Tensor d -> Tensor d -> IO () Source #

sigmoid forward pass (updates the output tensor)

_sigmoid_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> IO () Source #

sigmoid backward-update (updates the layer and bias tensors)

softmax Source #

Arguments

:: KnownDim n 
=> Reifies s W 
=> BVar s (Tensor '[n])

input

-> BVar s (Tensor '[n])

output

one dimensional version of softmaxN

softmaxBatch Source #

Arguments

:: KnownDim b 
=> KnownDim n 
=> Reifies s W 
=> BVar s (Tensor '[b, n])

input

-> BVar s (Tensor '[b, n])

output

softmaxN along the mini-batch dimension.

softmaxN Source #

Arguments

:: Reifies s W 
=> (i < Length d) ~ True 
=> Dimensions d 
=> Dim i

dimension to softmax over

-> BVar s (Tensor d)

input

-> BVar s (Tensor d)

output

run a threshold function againts two BVar variables

logSoftMax Source #

Arguments

:: KnownDim n 
=> Reifies s W 
=> BVar s (Tensor '[n])

input

-> BVar s (Tensor '[n])

output

run a threshold function againts two BVar variables

logSoftMaxBatch Source #

Arguments

:: KnownDim n 
=> KnownDim b 
=> Reifies s W 
=> BVar s (Tensor '[b, n])

input

-> BVar s (Tensor '[b, n])

output

run a threshold function againts two BVar variables

logSoftMaxN Source #

Arguments

:: Reifies s W 
=> (i < Length d) ~ True 
=> Dimensions d 
=> Dim i

dimension to logSoftMax over

-> BVar s (Tensor d)

input

-> BVar s (Tensor d)

output

run a threshold function againts two BVar variables

_softPlus_updateOutput :: Tensor d -> Tensor d -> Double -> Double -> IO () Source #

softPlus forward pass (updates the output tensor)

_softPlus_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Tensor d -> Double -> Double -> IO () Source #

softPlus backward-update (updates the layer and bias tensors)

_softShrink_updateOutput :: Tensor d -> Tensor d -> Double -> IO () Source #

softShrink forward pass (updates the output tensor)

_softShrink_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Double -> IO () Source #

softShrink backward-update (updates the layer and bias tensors)

_tanh_updateOutput :: Tensor d -> Tensor d -> IO () Source #

tanh forward pass (updates the output tensor)

_tanh_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> IO () Source #

tanh backward-update (updates the layer and bias tensors)

_hardTanh_updateOutput :: Tensor d -> Tensor d -> Double -> Double -> Bool -> IO () Source #

hardTanh forward pass (updates the output tensor)

_hardTanh_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Double -> Double -> Bool -> IO () Source #

hardTanh backward-update (updates the layer and bias tensors)