--- a +++ b/man/compile_model.Rd @@ -0,0 +1,64 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/create_model_utils.R +\name{compile_model} +\alias{compile_model} +\title{Compile model} +\usage{ +compile_model( + model, + solver, + learning_rate, + loss_fn, + label_smoothing = 0, + num_output_layers = 1, + label_noise_matrix = NULL, + bal_acc = FALSE, + f1_metric = FALSE, + auc_metric = FALSE, + layer_dense = NULL +) +} +\arguments{ +\item{model}{A keras model.} + +\item{solver}{Optimization method, options are \verb{"adam", "adagrad", "rmsprop"} or \code{"sgd"}.} + +\item{learning_rate}{Learning rate for optimizer.} + +\item{loss_fn}{Either \code{"categorical_crossentropy"} or \code{"binary_crossentropy"}. If \code{label_noise_matrix} given, will use custom \code{"noisy_loss"}.} + +\item{label_smoothing}{Float in [0, 1]. If 0, no smoothing is applied. If > 0, loss between the predicted +labels and a smoothed version of the true labels, where the smoothing squeezes the labels towards 0.5. +The closer the argument is to 1 the more the labels get smoothed.} + +\item{num_output_layers}{Number of output layers.} + +\item{label_noise_matrix}{Matrix of label noises. Every row stands for one class and columns for percentage of labels in that class. +If first label contains 5 percent wrong labels and second label no noise, then + +\code{label_noise_matrix <- matrix(c(0.95, 0.05, 0, 1), nrow = 2, byrow = TRUE )}} + +\item{bal_acc}{Whether to add balanced accuracy.} + +\item{f1_metric}{Whether to add F1 metric.} + +\item{auc_metric}{Whether to add AUC metric.} + +\item{layer_dense}{Vector specifying number of neurons per dense layer after last LSTM or CNN layer (if no LSTM used).} +} +\value{ +A compiled keras model. +} +\description{ +Compile model +} +\examples{ +\dontshow{if (reticulate::py_module_available("tensorflow")) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} + +model <- create_model_lstm_cnn(layer_lstm = 8, compile = FALSE) +model <- compile_model(model = model, + solver = 'adam', + learning_rate = 0.01, + loss_fn = 'categorical_crossentropy') +\dontshow{\}) # examplesIf} +}