Probando Colab + Gemini
Hoy vamos a algo más practico y me pongo friki... # prompt: haz transformer con keras y explica cada paso import tensorflow as tf from tensorflow import keras from tensorflow.keras import layers # Define the transformer encoder block def transformer_encoder(inputs, head_size, num_heads, ff_dim, dropout=0): # Multi-head self-attention attention_output = layers.MultiHeadAttention( num_heads=num_heads, key_dim=head_size, dropout=dropout )(inputs, inputs) attention_output = layers.Dropout(dropout)(attention_output) attention_output = layers.LayerNormalization(epsilon=1e-6)(inputs + attention_output) # Feed-forward network ffn = keras.Sequential( [layers.Dense(ff_dim, activation="relu"), layers.Dense(inputs.shape[-1]),] ) ffn_output = ffn(attention_output) ffn_output = layers.Dropout(dropout)(ffn_output) encoder_output = layers.LayerNormalization(epsilon=1e-6)(attention_output + ffn_output) return encoder_output # Define the transformer model de