Coverage for source/model/model_blue_prints/dnn_blue_print.py: 23%
31 statements
« prev ^ index » next coverage.py v7.8.0, created at 2025-08-30 09:28 +0000
« prev ^ index » next coverage.py v7.8.0, created at 2025-08-30 09:28 +0000
1# model/model_blue_prints/dnn_blue_print.py
3# global imports
4import math
5from tensorflow.keras import layers, Model
6from typing import Optional
8# local imports
9from source.model import BluePrintBase, ModelAdapterBase, TFModelAdapter
11class DnnBluePrint(BluePrintBase):
12 """
13 Blueprint for creating a DNN.
15 This class implements a model blueprint that constructs a neural network using
16 fully connected (dense) layers. Expected input is a 1D vector of features.
17 """
19 def __init__(self, dense_squeezing_coeff: int = 2, dense_repetition_coeff: int = 1) -> None:
20 """
21 Initializes the DnnBluePrint with the specified configuration parameters.
23 Parameters:
24 dense_squeezing_coeff (int): Factor by which dense layer sizes are reduced.
25 dense_repetition_coeff (int): Number of dense layers of the same size to use.
26 """
28 self.__dense_squeezing_coeff = dense_squeezing_coeff
29 self.__dense_repetition_coeff = dense_repetition_coeff
31 def instantiate_model(self, input_shape: tuple[int, int], output_length: int,
32 dense_squeezing_coeff: Optional[int] = None, dense_repetition_coeff: Optional[int] = None) -> ModelAdapterBase:
33 """
34 Creates and returns a DNN model according to specified parameters.
36 The method constructs a neural network that:
37 1. Processes a 1D input vector through multiple dense layers
38 2. Produces a softmax output for classification
40 Parameters:
41 input_shape (tuple[int, int]): Shape of the input tensor
42 output_length (int): Number of output classes/actions
43 dense_squeezing_coeff (int): Factor by which dense layer sizes are reduced
44 dense_repetition_coeff (int): Number of dense layers of the same size to use
46 Returns:
47 Model: Keras model implementing the DNN architecture to be compiled further.
48 """
50 if dense_squeezing_coeff is None:
51 dense_squeezing_coeff = self.__dense_squeezing_coeff
52 if dense_repetition_coeff is None:
53 dense_repetition_coeff = self.__dense_repetition_coeff
55 input_vector = layers.Input((1, input_shape[0]))
56 reshaped_input_vector = layers.Reshape((input_shape[0],))(input_vector)
58 closest_smaller_power_of_coeff = int(math.pow(dense_squeezing_coeff,
59 int(math.log(reshaped_input_vector.shape[-1],
60 dense_squeezing_coeff))))
61 dense = layers.Dense(closest_smaller_power_of_coeff, activation='relu')(reshaped_input_vector)
62 dense = layers.BatchNormalization()(dense)
64 number_of_nodes = closest_smaller_power_of_coeff // dense_squeezing_coeff
65 nr_of_dense_layers = int(math.log(closest_smaller_power_of_coeff, dense_squeezing_coeff))
66 for _ in range(nr_of_dense_layers):
67 for _ in range(dense_repetition_coeff):
68 dense = layers.Dense(number_of_nodes, activation='relu')(dense)
69 dense = layers.BatchNormalization()(dense)
70 number_of_nodes //= dense_squeezing_coeff
71 if int(math.log(number_of_nodes, 10)) == int(math.log(output_length, 10)) + 1:
72 dense = layers.Dropout(0.3)(dense)
73 elif int(math.log(number_of_nodes, 10)) == int(math.log(output_length, 10)):
74 break
76 output = layers.Dense(output_length, activation='softmax')(dense)
78 return TFModelAdapter(Model(inputs = input_vector, outputs = output))