Switch to side-by-side view

--- a
+++ b/man/layer_pos_embedding_wrapper.Rd
@@ -0,0 +1,36 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/custom_layers.R
+\name{layer_pos_embedding_wrapper}
+\alias{layer_pos_embedding_wrapper}
+\title{Layer for positional embedding}
+\usage{
+layer_pos_embedding_wrapper(
+  maxlen = 100,
+  vocabulary_size = 4,
+  load_r6 = FALSE,
+  embed_dim = 64
+)
+}
+\arguments{
+\item{maxlen}{Length of predictor sequence.}
+
+\item{vocabulary_size}{Number of unique character in vocabulary.}
+
+\item{load_r6}{Whether to load the R6 layer class.}
+
+\item{embed_dim}{Dimension for token embedding. No embedding if set to 0. Should be used when input is not one-hot encoded
+(integer sequence).}
+}
+\value{
+A keras layer implementing positional embedding.
+}
+\description{
+Positional encoding layer with learned embedding.
+}
+\examples{
+
+\donttest{
+library(keras)
+l <- layer_pos_embedding_wrapper()
+}
+}