#include <Tokenizer.h>
|
| virtual | ~Tokenizer ()=default |
| |
| virtual std::string | id () const =0 |
| |
| virtual size_t | tokenize (const char *text, size_t text_len, int32_t *tokens, size_t max_tokens, size_t *n_tokens) const =0 |
| |
| virtual size_t | detokenize (const int32_t *tokens, size_t n_tokens, char *text, size_t text_len) const =0 |
| |
Definition at line 25 of file Tokenizer.h.
◆ ~Tokenizer()
| virtual ggma::Tokenizer::~Tokenizer |
( |
| ) |
|
|
virtualdefault |
◆ Tokenizer() [1/2]
| ggma::Tokenizer::Tokenizer |
( |
| ) |
|
|
protecteddefault |
◆ Tokenizer() [2/2]
| ggma::Tokenizer::Tokenizer |
( |
const Tokenizer & |
| ) |
|
|
protecteddelete |
◆ detokenize()
| virtual size_t ggma::Tokenizer::detokenize |
( |
const int32_t * |
tokens, |
|
|
size_t |
n_tokens, |
|
|
char * |
text, |
|
|
size_t |
text_len |
|
) |
| const |
|
pure virtual |
◆ id()
| virtual std::string ggma::Tokenizer::id |
( |
| ) |
const |
|
pure virtual |
◆ operator=()
◆ tokenize()
| virtual size_t ggma::Tokenizer::tokenize |
( |
const char * |
text, |
|
|
size_t |
text_len, |
|
|
int32_t * |
tokens, |
|
|
size_t |
max_tokens, |
|
|
size_t * |
n_tokens |
|
) |
| const |
|
pure virtual |
The documentation for this class was generated from the following file: