+2
-2
automod/keyword/tokenize.go
+2
-2
automod/keyword/tokenize.go
···
41
41
return tokenizeText(text, nonTokenCharsSkipCensorChars)
42
42
}
43
43
44
-
func TokenizeTextWithCustomNonTokenRegex(text string, regex *regexp.Regexp) []string {
45
-
return tokenizeText(text, regex)
44
+
func TokenizeTextWithRegex(text string, nonTokenCharsRegex *regexp.Regexp) []string {
45
+
return tokenizeText(text, nonTokenCharsRegex)
46
46
}
47
47
48
48
func splitIdentRune(c rune) bool {
+1
-1
automod/keyword/tokenize_test.go
+1
-1
automod/keyword/tokenize_test.go