Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
15 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 27 additions & 1 deletion bindings/pyroot/pythonizations/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,33 @@ if(tmva)
ROOT/_pythonization/_tmva/_rtensor.py
ROOT/_pythonization/_tmva/_tree_inference.py
ROOT/_pythonization/_tmva/_utils.py
ROOT/_pythonization/_tmva/_gnn.py)
ROOT/_pythonization/_tmva/_gnn.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/__init__.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/generate_keras_functional.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/generate_keras_sequential.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/parser.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/parser_test_function.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/__init__.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/batchnorm.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/binary.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/concat.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/conv.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/dense.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/elu.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/flatten.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/identity.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/layernorm.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/leaky_relu.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/permute.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/pooling.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/reshape.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/relu.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/rnn.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/selu.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/sigmoid.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/softmax.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/swish.py
ROOT/_pythonization/_tmva/_sofie/_parser/_keras/layers/tanh.py)
if(dataframe)
list(APPEND PYROOT_EXTRA_PYTHON_SOURCES
ROOT/_pythonization/_tmva/_batchgenerator.py)
Expand Down
1 change: 1 addition & 0 deletions bindings/pyroot/pythonizations/python/ROOT/_facade.py
Original file line number Diff line number Diff line change
Expand Up @@ -427,6 +427,7 @@ def TMVA(self):
from ._pythonization import _tmva # noqa: F401

ns = self._fallback_getattr("TMVA")
setattr(ns.Experimental.SOFIE, "PyKeras", _tmva.PyKeras)
hasRDF = "dataframe" in self.gROOT.GetConfigFeatures()
if hasRDF:
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ def inject_rbatchgenerator(ns):


from ._gnn import RModel_GNN, RModel_GraphIndependent
from ._sofie._parser._keras.parser import PyKeras

hasRDF = "dataframe" in cppyy.gbl.ROOT.GetROOT().GetConfigFeatures()
if hasRDF:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
def get_keras_version() -> str:

import keras

return keras.__version__
Original file line number Diff line number Diff line change
@@ -0,0 +1,211 @@
def generate_keras_functional(dst_dir):

from keras import models, layers
import numpy as np

# Helper training function
def train_and_save(keras_model, model_name):
# Handle multiple inputs dynamically
if isinstance(keras_model.input_shape, list):
x_train = [np.random.rand(32, *shape[1:]) for shape in keras_model.input_shape]
else:
x_train = np.random.rand(32, *keras_model.input_shape[1:])
y_train = np.random.rand(32, *keras_model.output_shape[1:])

try:
keras_model.compile(optimizer='adam', loss='mean_squared_error', metrics=['mae'])
keras_model.fit(x_train, y_train, epochs=1, verbose=0)
keras_model.save(f"{dst_dir}/Functional_{model_name}_test.keras")
except Exception as error:
print(f"Error while traning the keras_model {model_name}: {error}")

# Activation Functions
for act in ['relu', 'elu', 'leaky_relu', 'selu', 'sigmoid', 'softmax', 'swish', 'tanh']:
inp = layers.Input(shape=(10,))
out = layers.Activation(act)(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, f"Activation_layer_{act.capitalize()}")
# Along with these, Keras allows explicit delcaration of activation layers such as:
# [ELU, ReLU, LeakyReLU, Softmax]

# Add
in1 = layers.Input(shape=(8,))
in2 = layers.Input(shape=(8,))
out = layers.Add()([in1, in2])
keras_model = models.Model([in1, in2], out)
train_and_save(keras_model, "Add")

# AveragePooling2D channels_first
inp = layers.Input(shape=(3, 8, 8))
out = layers.AveragePooling2D(pool_size=(2, 2), data_format='channels_first')(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "AveragePooling2D_channels_first")

# AveragePooling2D channels_last
inp = layers.Input(shape=(8, 8, 3))
out = layers.AveragePooling2D(pool_size=(2, 2), data_format='channels_last')(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "AveragePooling2D_channels_last")

# BatchNorm
inp = layers.Input(shape=(10, 3, 5))
out = layers.BatchNormalization(axis=2)(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "BatchNorm")

# Concat
in1 = layers.Input(shape=(8,))
in2 = layers.Input(shape=(8,))
out = layers.Concatenate()([in1, in2])
keras_model = models.Model([in1, in2], out)
train_and_save(keras_model, "Concat")

# Conv2D channels_first
inp = layers.Input(shape=(3, 8, 8))
out = layers.Conv2D(4, (3, 3), padding='same', data_format='channels_first', activation='relu')(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "Conv2D_channels_first")

# Conv2D channels_last
inp = layers.Input(shape=(8, 8, 3))
out = layers.Conv2D(4, (3, 3), padding='same', data_format='channels_last', activation='leaky_relu')(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "Conv2D_channels_last")

# Conv2D padding_same
inp = layers.Input(shape=(8, 8, 3))
out = layers.Conv2D(4, (3, 3), padding='same', data_format='channels_last')(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "Conv2D_padding_same")

# Conv2D padding_valid
inp = layers.Input(shape=(8, 8, 3))
out = layers.Conv2D(4, (3, 3), padding='valid', data_format='channels_last', activation='elu')(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "Conv2D_padding_valid")

# Dense
inp = layers.Input(shape=(10,))
out = layers.Dense(5, activation='tanh')(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "Dense")

# ELU
inp = layers.Input(shape=(10,))
out = layers.ELU(alpha=0.5)(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "ELU")

# Flatten
inp = layers.Input(shape=(4, 5))
out = layers.Flatten()(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "Flatten")

# GlobalAveragePooling2D channels first
inp = layers.Input(shape=(3, 4, 6))
out = layers.GlobalAveragePooling2D(data_format='channels_first')(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "GlobalAveragePooling2D_channels_first")

# GlobalAveragePooling2D channels last
inp = layers.Input(shape=(4, 6, 3))
out = layers.GlobalAveragePooling2D(data_format='channels_last')(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "GlobalAveragePooling2D_channels_last")

# LayerNorm
inp = layers.Input(shape=(10, 3, 5))
out = layers.LayerNormalization(axis=-1)(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "LayerNorm")

# LeakyReLU
inp = layers.Input(shape=(10,))
out = layers.LeakyReLU()(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "LeakyReLU")

# MaxPooling2D channels_first
inp = layers.Input(shape=(3, 8, 8))
out = layers.MaxPooling2D(pool_size=(2, 2), data_format='channels_first')(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "MaxPool2D_channels_first")

# MaxPooling2D channels_last
inp = layers.Input(shape=(8, 8, 3))
out = layers.MaxPooling2D(pool_size=(2, 2), data_format='channels_last')(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "MaxPool2D_channels_last")

# Multiply
in1 = layers.Input(shape=(8,))
in2 = layers.Input(shape=(8,))
out = layers.Multiply()([in1, in2])
keras_model = models.Model([in1, in2], out)
train_and_save(keras_model, "Multiply")

# Permute
inp = layers.Input(shape=(3, 4, 5))
out = layers.Permute((2, 1, 3))(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "Permute")

# ReLU
inp = layers.Input(shape=(10,))
out = layers.ReLU()(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "ReLU")

# Reshape
inp = layers.Input(shape=(4, 5))
out = layers.Reshape((2, 10))(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "Reshape")

# Softmax
inp = layers.Input(shape=(10,))
out = layers.Softmax()(inp)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "Softmax")

# Subtract
in1 = layers.Input(shape=(8,))
in2 = layers.Input(shape=(8,))
out = layers.Subtract()([in1, in2])
keras_model = models.Model([in1, in2], out)
train_and_save(keras_model, "Subtract")

# Layer Combination

inp = layers.Input(shape=(32, 32, 3))
x = layers.Conv2D(8, (3,3), padding="same", activation="relu")(inp)
x = layers.MaxPooling2D((2,2))(x)
x = layers.Reshape((16, 16, 8))(x)
x = layers.Permute((3, 1, 2))(x)
x = layers.Flatten()(x)
out = layers.Dense(10, activation="softmax")(x)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "Layer_Combination_1")

inp = layers.Input(shape=(20,))
x = layers.Dense(32, activation="tanh")(inp)
x = layers.Dense(16)(x)
x = layers.ELU()(x)
x = layers.LayerNormalization()(x)
out = layers.Dense(5, activation="sigmoid")(x)
keras_model = models.Model(inp, out)
train_and_save(keras_model, "Layer_Combination_2")

inp1 = layers.Input(shape=(16,))
inp2 = layers.Input(shape=(16,))
d1 = layers.Dense(16, activation="relu")(inp1)
d2 = layers.Dense(16, activation="selu")(inp2)
add = layers.Add()([d1, d2])
sub = layers.Subtract()([d1, d2])
mul = layers.Multiply()([d1, d2])
merged = layers.Concatenate()([add, sub, mul])
merged = layers.LeakyReLU(alpha=0.1)(merged)
out = layers.Dense(4, activation="softmax")(merged)
keras_model = models.Model([inp1, inp2], out)
train_and_save(keras_model, "Layer_Combination_3")
Loading