[1c0e03]: / man / layer_pos_embedding_wrapper.Rd

Download this file

37 lines (32 with data), 846 Bytes

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/custom_layers.R
\name{layer_pos_embedding_wrapper}
\alias{layer_pos_embedding_wrapper}
\title{Layer for positional embedding}
\usage{
layer_pos_embedding_wrapper(
maxlen = 100,
vocabulary_size = 4,
load_r6 = FALSE,
embed_dim = 64
)
}
\arguments{
\item{maxlen}{Length of predictor sequence.}
\item{vocabulary_size}{Number of unique character in vocabulary.}
\item{load_r6}{Whether to load the R6 layer class.}
\item{embed_dim}{Dimension for token embedding. No embedding if set to 0. Should be used when input is not one-hot encoded
(integer sequence).}
}
\value{
A keras layer implementing positional embedding.
}
\description{
Positional encoding layer with learned embedding.
}
\examples{
\donttest{
library(keras)
l <- layer_pos_embedding_wrapper()
}
}