-
Notifications
You must be signed in to change notification settings - Fork 14
/
NAMESPACE
203 lines (202 loc) · 6.09 KB
/
NAMESPACE
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
# Generated by roxygen2: do not edit by hand
S3method(bake,step_clean_levels)
S3method(bake,step_clean_names)
S3method(bake,step_dummy_hash)
S3method(bake,step_lda)
S3method(bake,step_lemma)
S3method(bake,step_ngram)
S3method(bake,step_pos_filter)
S3method(bake,step_sequence_onehot)
S3method(bake,step_stem)
S3method(bake,step_stopwords)
S3method(bake,step_text_normalization)
S3method(bake,step_textfeature)
S3method(bake,step_texthash)
S3method(bake,step_tf)
S3method(bake,step_tfidf)
S3method(bake,step_tokenfilter)
S3method(bake,step_tokenize)
S3method(bake,step_tokenize_bpe)
S3method(bake,step_tokenize_sentencepiece)
S3method(bake,step_tokenize_wordpiece)
S3method(bake,step_tokenmerge)
S3method(bake,step_untokenize)
S3method(bake,step_word_embeddings)
S3method(format,textrecipes_tokenlist)
S3method(obj_print_footer,textrecipes_tokenlist)
S3method(prep,step_clean_levels)
S3method(prep,step_clean_names)
S3method(prep,step_dummy_hash)
S3method(prep,step_lda)
S3method(prep,step_lemma)
S3method(prep,step_ngram)
S3method(prep,step_pos_filter)
S3method(prep,step_sequence_onehot)
S3method(prep,step_stem)
S3method(prep,step_stopwords)
S3method(prep,step_text_normalization)
S3method(prep,step_textfeature)
S3method(prep,step_texthash)
S3method(prep,step_tf)
S3method(prep,step_tfidf)
S3method(prep,step_tokenfilter)
S3method(prep,step_tokenize)
S3method(prep,step_tokenize_bpe)
S3method(prep,step_tokenize_sentencepiece)
S3method(prep,step_tokenize_wordpiece)
S3method(prep,step_tokenmerge)
S3method(prep,step_untokenize)
S3method(prep,step_word_embeddings)
S3method(print,step_clean_levels)
S3method(print,step_clean_names)
S3method(print,step_dummy_hash)
S3method(print,step_lda)
S3method(print,step_lemma)
S3method(print,step_ngram)
S3method(print,step_pos_filter)
S3method(print,step_sequence_onehot)
S3method(print,step_stem)
S3method(print,step_stopwords)
S3method(print,step_text_normalization)
S3method(print,step_textfeature)
S3method(print,step_texthash)
S3method(print,step_tf)
S3method(print,step_tfidf)
S3method(print,step_tokenfilter)
S3method(print,step_tokenize)
S3method(print,step_tokenize_bpe)
S3method(print,step_tokenize_sentencepiece)
S3method(print,step_tokenize_wordpiece)
S3method(print,step_tokenmerge)
S3method(print,step_untokenize)
S3method(print,step_word_embeddings)
S3method(required_pkgs,step_clean_levels)
S3method(required_pkgs,step_clean_names)
S3method(required_pkgs,step_dummy_hash)
S3method(required_pkgs,step_lda)
S3method(required_pkgs,step_lemma)
S3method(required_pkgs,step_ngram)
S3method(required_pkgs,step_pos_filter)
S3method(required_pkgs,step_sequence_onehot)
S3method(required_pkgs,step_stem)
S3method(required_pkgs,step_stopwords)
S3method(required_pkgs,step_text_normalization)
S3method(required_pkgs,step_textfeature)
S3method(required_pkgs,step_texthash)
S3method(required_pkgs,step_tf)
S3method(required_pkgs,step_tfidf)
S3method(required_pkgs,step_tokenfilter)
S3method(required_pkgs,step_tokenize)
S3method(required_pkgs,step_tokenize_bpe)
S3method(required_pkgs,step_tokenize_sentencepiece)
S3method(required_pkgs,step_tokenize_wordpiece)
S3method(required_pkgs,step_tokenmerge)
S3method(required_pkgs,step_untokenize)
S3method(required_pkgs,step_word_embeddings)
S3method(tidy,step_clean_levels)
S3method(tidy,step_clean_names)
S3method(tidy,step_dummy_hash)
S3method(tidy,step_lda)
S3method(tidy,step_lemma)
S3method(tidy,step_ngram)
S3method(tidy,step_pos_filter)
S3method(tidy,step_sequence_onehot)
S3method(tidy,step_stem)
S3method(tidy,step_stopwords)
S3method(tidy,step_text_normalization)
S3method(tidy,step_textfeature)
S3method(tidy,step_texthash)
S3method(tidy,step_tf)
S3method(tidy,step_tfidf)
S3method(tidy,step_tokenfilter)
S3method(tidy,step_tokenize)
S3method(tidy,step_tokenize_bpe)
S3method(tidy,step_tokenize_sentencepiece)
S3method(tidy,step_tokenize_wordpiece)
S3method(tidy,step_tokenmerge)
S3method(tidy,step_untokenize)
S3method(tidy,step_word_embeddings)
S3method(tunable,step_dummy_hash)
S3method(tunable,step_ngram)
S3method(tunable,step_texthash)
S3method(tunable,step_tf)
S3method(tunable,step_tokenfilter)
S3method(tunable,step_tokenize)
S3method(tunable,step_tokenize_bpe)
S3method(vec_ptype_abbr,textrecipes_tokenlist)
S3method(vec_restore,textrecipes_tokenlist)
export("%>%")
export(all_tokenized)
export(all_tokenized_predictors)
export(count_functions)
export(required_pkgs)
export(show_tokens)
export(step_clean_levels)
export(step_clean_names)
export(step_dummy_hash)
export(step_lda)
export(step_lemma)
export(step_ngram)
export(step_pos_filter)
export(step_sequence_onehot)
export(step_stem)
export(step_stopwords)
export(step_text_normalization)
export(step_textfeature)
export(step_texthash)
export(step_tf)
export(step_tfidf)
export(step_tokenfilter)
export(step_tokenize)
export(step_tokenize_bpe)
export(step_tokenize_sentencepiece)
export(step_tokenize_wordpiece)
export(step_tokenmerge)
export(step_untokenize)
export(step_word_embeddings)
export(tidy)
export(tokenlist)
export(tunable)
import(rlang)
importFrom(generics,required_pkgs)
importFrom(generics,tidy)
importFrom(generics,tunable)
importFrom(glue,glue)
importFrom(lifecycle,deprecated)
importFrom(magrittr,"%>%")
importFrom(purrr,map)
importFrom(purrr,map_chr)
importFrom(purrr,map_dfc)
importFrom(purrr,map_lgl)
importFrom(purrr,pmap)
importFrom(recipes,add_step)
importFrom(recipes,bake)
importFrom(recipes,check_new_data)
importFrom(recipes,check_type)
importFrom(recipes,get_keep_original_cols)
importFrom(recipes,is_trained)
importFrom(recipes,names0)
importFrom(recipes,prep)
importFrom(recipes,print_step)
importFrom(recipes,rand_id)
importFrom(recipes,recipes_eval_select)
importFrom(recipes,remove_original_cols)
importFrom(recipes,sel2char)
importFrom(recipes,step)
importFrom(rlang,"%||%")
importFrom(rlang,":=")
importFrom(rlang,caller_env)
importFrom(rlang,enquos)
importFrom(rlang,expr)
importFrom(rlang,na_chr)
importFrom(rlang,na_int)
importFrom(rlang,na_lgl)
importFrom(tibble,as_tibble)
importFrom(tibble,tibble)
importFrom(vctrs,obj_print_footer)
importFrom(vctrs,vec_assert)
importFrom(vctrs,vec_cast)
importFrom(vctrs,vec_cbind)
importFrom(vctrs,vec_ptype_abbr)
importFrom(vctrs,vec_restore)
useDynLib(textrecipes, .registration = TRUE)