a b/man/layer_pos_embedding_wrapper.Rd
1
% Generated by roxygen2: do not edit by hand
2
% Please edit documentation in R/custom_layers.R
3
\name{layer_pos_embedding_wrapper}
4
\alias{layer_pos_embedding_wrapper}
5
\title{Layer for positional embedding}
6
\usage{
7
layer_pos_embedding_wrapper(
8
  maxlen = 100,
9
  vocabulary_size = 4,
10
  load_r6 = FALSE,
11
  embed_dim = 64
12
)
13
}
14
\arguments{
15
\item{maxlen}{Length of predictor sequence.}
16
17
\item{vocabulary_size}{Number of unique character in vocabulary.}
18
19
\item{load_r6}{Whether to load the R6 layer class.}
20
21
\item{embed_dim}{Dimension for token embedding. No embedding if set to 0. Should be used when input is not one-hot encoded
22
(integer sequence).}
23
}
24
\value{
25
A keras layer implementing positional embedding.
26
}
27
\description{
28
Positional encoding layer with learned embedding.
29
}
30
\examples{
31
32
\donttest{
33
library(keras)
34
l <- layer_pos_embedding_wrapper()
35
}
36
}