@@ -53,17 +53,17 @@ setGeneric("collocationAnalysis", function(kco, ...) standardGeneric("collocati
53
53
# ' \dontrun{
54
54
# '
55
55
# ' # Find top collocates of "Packung" inside and outside the sports domain.
56
- # ' KorAPConnection(verbose = TRUE) %>%
56
+ # ' KorAPConnection(verbose = TRUE) |>
57
57
# ' collocationAnalysis("Packung", vc=c("textClass=sport", "textClass!=sport"),
58
- # ' leftContextSize=1, rightContextSize=1, topCollocatesLimit=20) %>%
58
+ # ' leftContextSize=1, rightContextSize=1, topCollocatesLimit=20) |>
59
59
# ' dplyr::filter(logDice >= 5)
60
60
# ' }
61
61
# '
62
62
# ' \dontrun{
63
63
# '
64
64
# ' # Identify the most prominent light verb construction with "in ... setzen".
65
65
# ' # Note that, currently, the use of focus function disallows exactFrequencies.
66
- # ' KorAPConnection(verbose = TRUE) %>%
66
+ # ' KorAPConnection(verbose = TRUE) |>
67
67
# ' collocationAnalysis("focus(in [tt/p=NN] {[tt/l=setzen]})",
68
68
# ' leftContextSize=1, rightContextSize=0, exactFrequencies=FALSE, topCollocatesLimit=20)
69
69
# ' }
@@ -124,7 +124,7 @@ setMethod("collocationAnalysis", "KorAPConnection",
124
124
localStopwords = localStopwords ,
125
125
seed = seed ,
126
126
expand = expand ,
127
- ... ) ) % > %
127
+ ... ) ) | >
128
128
bind_rows()
129
129
} else {
130
130
set.seed(seed )
@@ -142,8 +142,8 @@ setMethod("collocationAnalysis", "KorAPConnection",
142
142
)
143
143
144
144
if (nrow(candidates ) > 0 ) {
145
- candidates <- candidates % > %
146
- filter(frequency > = minOccur ) % > %
145
+ candidates <- candidates | >
146
+ filter(frequency > = minOccur ) | >
147
147
slice_head(n = topCollocatesLimit )
148
148
collocationScoreQuery(
149
149
kco ,
@@ -164,7 +164,7 @@ setMethod("collocationAnalysis", "KorAPConnection",
164
164
}
165
165
}
166
166
if (maxRecurse > 0 & length(result ) > 0 && any(!! thresholdScore > = threshold )) {
167
- recurseWith <- result % > %
167
+ recurseWith <- result | >
168
168
filter(!! as.name(thresholdScore ) > = threshold )
169
169
result <- collocationAnalysis(
170
170
kco ,
@@ -262,10 +262,10 @@ matches2FreqTable <- function(matches,
262
262
)
263
263
}
264
264
log_info(verbose , paste(" Aggregating" , length(oldTable $ word ), " tokens\n " ))
265
- oldTable % > %
266
- group_by(word ) % > %
267
- mutate(word = dplyr :: case_when(ignoreCollocateCase ~ tolower(word ), TRUE ~ word )) % > %
268
- summarise(frequency = sum(frequency ), .groups = " drop" ) % > %
265
+ oldTable | >
266
+ group_by(word ) | >
267
+ mutate(word = dplyr :: case_when(ignoreCollocateCase ~ tolower(word ), TRUE ~ word )) | >
268
+ summarise(frequency = sum(frequency ), .groups = " drop" ) | >
269
269
arrange(desc(frequency ))
270
270
} else {
271
271
stopwordsTable <- dplyr :: tibble(word = stopwords )
@@ -281,11 +281,11 @@ matches2FreqTable <- function(matches,
281
281
if (length(left ) + length(right ) == 0 ) {
282
282
oldTable
283
283
} else {
284
- table(c(left , right )) % > %
285
- dplyr :: as_tibble(.name_repair = " minimal" ) % > %
286
- dplyr :: rename(word = 1 , frequency = 2 ) % > %
287
- dplyr :: filter(str_detect(word , collocateFilterRegex )) % > %
288
- dplyr :: anti_join(stopwordsTable , by = " word" ) % > %
284
+ table(c(left , right )) | >
285
+ dplyr :: as_tibble(.name_repair = " minimal" ) | >
286
+ dplyr :: rename(word = 1 , frequency = 2 ) | >
287
+ dplyr :: filter(str_detect(word , collocateFilterRegex )) | >
288
+ dplyr :: anti_join(stopwordsTable , by = " word" ) | >
289
289
dplyr :: bind_rows(oldTable )
290
290
}
291
291
}
@@ -323,10 +323,10 @@ snippet2FreqTable <- function(snippet,
323
323
)
324
324
}
325
325
log_info(verbose , paste(" Aggregating" , length(oldTable $ word ), " tokens\n " ))
326
- oldTable % > %
327
- group_by(word ) % > %
328
- mutate(word = dplyr :: case_when(ignoreCollocateCase ~ tolower(word ), TRUE ~ word )) % > %
329
- summarise(frequency = sum(frequency ), .groups = " drop" ) % > %
326
+ oldTable | >
327
+ group_by(word ) | >
328
+ mutate(word = dplyr :: case_when(ignoreCollocateCase ~ tolower(word ), TRUE ~ word )) | >
329
+ summarise(frequency = sum(frequency ), .groups = " drop" ) | >
330
330
arrange(desc(frequency ))
331
331
} else {
332
332
stopwordsTable <- dplyr :: tibble(word = stopwords )
@@ -351,11 +351,11 @@ snippet2FreqTable <- function(snippet,
351
351
if (is.na(left [1 ]) || is.na(right [1 ]) || length(left ) + length(right ) == 0 ) {
352
352
oldTable
353
353
} else {
354
- table(c(left , right )) % > %
355
- dplyr :: as_tibble(.name_repair = " minimal" ) % > %
356
- dplyr :: rename(word = 1 , frequency = 2 ) % > %
357
- dplyr :: filter(str_detect(word , collocateFilterRegex )) % > %
358
- dplyr :: anti_join(stopwordsTable , by = " word" ) % > %
354
+ table(c(left , right )) | >
355
+ dplyr :: as_tibble(.name_repair = " minimal" ) | >
356
+ dplyr :: rename(word = 1 , frequency = 2 ) | >
357
+ dplyr :: filter(str_detect(word , collocateFilterRegex )) | >
358
+ dplyr :: anti_join(stopwordsTable , by = " word" ) | >
359
359
dplyr :: bind_rows(oldTable )
360
360
}
361
361
}
@@ -487,8 +487,8 @@ collocatesQuery <-
487
487
ignoreCollocateCase = ignoreCollocateCase ,
488
488
stopwords = stopwords ,
489
489
... ,
490
- verbose = kco @ verbose ) % > %
491
- mutate(frequency = frequency * q @ totalResults / min(q @ totalResults , searchHitsSampleLimit )) % > %
490
+ verbose = kco @ verbose ) | >
491
+ mutate(frequency = frequency * q @ totalResults / min(q @ totalResults , searchHitsSampleLimit )) | >
492
492
filter(frequency > = minOccur )
493
493
}
494
494
}
0 commit comments