qualia_codegen_core.graph.layers package

Submodules

Module contents

class qualia_codegen_core.graph.layers.TActivationLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: qualia_codegen_core.graph.layers.TActivationLayer.TActivation)[source]

Bases: TBaseLayer

activation: TActivation
class qualia_codegen_core.graph.layers.TAddLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: qualia_codegen_core.graph.layers.TActivationLayer.TActivation = <TActivation.LINEAR: 3>)[source]

Bases: TBaseLayer

activation: TActivation = 3
class qualia_codegen_core.graph.layers.TAvgPooling1DLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: qualia_codegen_core.graph.layers.TActivationLayer.TActivation, pool_size: tuple[int, ...], strides: tuple[int, ...])[source]

Bases: TAvgPoolingLayer

class qualia_codegen_core.graph.layers.TAvgPooling2DLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: qualia_codegen_core.graph.layers.TActivationLayer.TActivation, pool_size: tuple[int, ...], strides: tuple[int, ...])[source]

Bases: TAvgPoolingLayer

class qualia_codegen_core.graph.layers.TAvgPoolingLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: qualia_codegen_core.graph.layers.TActivationLayer.TActivation, pool_size: tuple[int, ...], strides: tuple[int, ...])[source]

Bases: TBaseLayer

activation: TActivation
pool_size: tuple[int, ...]
strides: tuple[int, ...]
class qualia_codegen_core.graph.layers.TBaseLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str)[source]

Bases: ABC

input_shape: Shapes
output_shape: Shapes
output_dtype: DTypes
name: str
property weights: OrderedDict[str, ndarray[Any, dtype[floating[Any]]] | ndarray[Any, dtype[integer[Any]]]]
class qualia_codegen_core.graph.layers.TBatchNormalization1DLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: 'TActivation', mean: 'NDArrayFloatOrInt', variance: 'NDArrayFloatOrInt', gamma: 'NDArrayFloatOrInt', beta: 'NDArrayFloatOrInt', epsilon: 'NDArrayFloatOrInt', _kernel: 'NDArrayFloatOrInt | None' = None, _bias: 'NDArrayFloatOrInt | None' = None)[source]

Bases: TBatchNormalizationLayer

class qualia_codegen_core.graph.layers.TBatchNormalization2DLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: 'TActivation', mean: 'NDArrayFloatOrInt', variance: 'NDArrayFloatOrInt', gamma: 'NDArrayFloatOrInt', beta: 'NDArrayFloatOrInt', epsilon: 'NDArrayFloatOrInt', _kernel: 'NDArrayFloatOrInt | None' = None, _bias: 'NDArrayFloatOrInt | None' = None)[source]

Bases: TBatchNormalizationLayer

class qualia_codegen_core.graph.layers.TBatchNormalizationLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: 'TActivation', mean: 'NDArrayFloatOrInt', variance: 'NDArrayFloatOrInt', gamma: 'NDArrayFloatOrInt', beta: 'NDArrayFloatOrInt', epsilon: 'NDArrayFloatOrInt', _kernel: 'NDArrayFloatOrInt | None' = None, _bias: 'NDArrayFloatOrInt | None' = None)[source]

Bases: TBaseLayer

activation: TActivation
mean: NDArrayFloatOrInt
variance: NDArrayFloatOrInt
gamma: NDArrayFloatOrInt
beta: NDArrayFloatOrInt
epsilon: NDArrayFloatOrInt
property kernel: ndarray[Any, dtype[floating[Any]]] | ndarray[Any, dtype[integer[Any]]]
property bias: ndarray[Any, dtype[floating[Any]]] | ndarray[Any, dtype[integer[Any]]]
property weights: OrderedDict[str, ndarray[Any, dtype[floating[Any]]] | ndarray[Any, dtype[integer[Any]]]]
class qualia_codegen_core.graph.layers.TConv1DLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: 'TActivation', kernel: 'NDArrayFloatOrInt', kernel_size: 'tuple[int, ...]', strides: 'tuple[int, ...]', filters: 'int', use_bias: 'bool', bias: 'NDArrayFloatOrInt', groups: 'int', padding: tuple[int, int])[source]

Bases: TConvLayer

padding: tuple[int, int]
class qualia_codegen_core.graph.layers.TConv2DLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: 'TActivation', kernel: 'NDArrayFloatOrInt', kernel_size: 'tuple[int, ...]', strides: 'tuple[int, ...]', filters: 'int', use_bias: 'bool', bias: 'NDArrayFloatOrInt', groups: 'int', padding: tuple[tuple[int, int], tuple[int, int]])[source]

Bases: TConvLayer

padding: tuple[tuple[int, int], tuple[int, int]]
class qualia_codegen_core.graph.layers.TConvLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: 'TActivation', kernel: 'NDArrayFloatOrInt', kernel_size: 'tuple[int, ...]', strides: 'tuple[int, ...]', filters: 'int', use_bias: 'bool', bias: 'NDArrayFloatOrInt', groups: 'int')[source]

Bases: TBaseLayer

activation: TActivation
kernel: NDArrayFloatOrInt
kernel_size: tuple[int, ...]
strides: tuple[int, ...]
filters: int
use_bias: bool
bias: NDArrayFloatOrInt
groups: int
property weights: OrderedDict[str, ndarray[Any, dtype[floating[Any]]] | ndarray[Any, dtype[integer[Any]]]]
class qualia_codegen_core.graph.layers.TDenseLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: 'TActivation', kernel: 'NDArrayFloatOrInt', units: 'int', use_bias: 'bool', bias: 'NDArrayFloatOrInt')[source]

Bases: TBaseLayer

activation: TActivation
kernel: NDArrayFloatOrInt
units: int
use_bias: bool
bias: NDArrayFloatOrInt
property weights: OrderedDict[str, ndarray[Any, dtype[floating[Any]]] | ndarray[Any, dtype[integer[Any]]]]
class qualia_codegen_core.graph.layers.TDropoutLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, p: float)[source]

Bases: TBaseLayer

p: float
class qualia_codegen_core.graph.layers.TFlattenLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str)[source]

Bases: TBaseLayer

class qualia_codegen_core.graph.layers.TIdentityLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str)[source]

Bases: TBaseLayer

class qualia_codegen_core.graph.layers.TInputLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str)[source]

Bases: TBaseLayer

class qualia_codegen_core.graph.layers.TMaxPooling1DLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: qualia_codegen_core.graph.layers.TActivationLayer.TActivation, pool_size: tuple[int, ...], strides: tuple[int, ...])[source]

Bases: TMaxPoolingLayer

class qualia_codegen_core.graph.layers.TMaxPooling2DLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: qualia_codegen_core.graph.layers.TActivationLayer.TActivation, pool_size: tuple[int, ...], strides: tuple[int, ...])[source]

Bases: TMaxPoolingLayer

class qualia_codegen_core.graph.layers.TMaxPoolingLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, activation: qualia_codegen_core.graph.layers.TActivationLayer.TActivation, pool_size: tuple[int, ...], strides: tuple[int, ...])[source]

Bases: TBaseLayer

activation: TActivation
pool_size: tuple[int, ...]
strides: tuple[int, ...]
class qualia_codegen_core.graph.layers.TSumLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, dim: tuple[int, ...])[source]

Bases: TBaseLayer

dim: tuple[int, ...]
class qualia_codegen_core.graph.layers.TZeroPadding1DLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, padding: 'int | tuple[int, int]')[source]

Bases: TZeroPaddingLayer

class qualia_codegen_core.graph.layers.TZeroPadding2DLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, padding: 'int | tuple[int, int]')[source]

Bases: TZeroPaddingLayer

class qualia_codegen_core.graph.layers.TZeroPaddingLayer(input_shape: qualia_codegen_core.typing.Shapes, output_shape: qualia_codegen_core.typing.Shapes, output_dtype: qualia_codegen_core.typing.DTypes, name: str, padding: 'int | tuple[int, int]')[source]

Bases: TBaseLayer

padding: int | tuple[int, int]