1 /*
2 * Copyright (C) 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "utils/bert_tokenizer.h"
18
19 #include <memory>
20
21 #include "utils/test-data-test-utils.h"
22 #include "gmock/gmock.h"
23 #include "gtest/gtest.h"
24
25 namespace libtextclassifier3 {
26
27 using ::testing::ElementsAre;
28
29 namespace {
30 constexpr char kTestVocabPath[] = "annotator/pod_ner/test_data/vocab.txt";
31
AssertTokenizerResults(std::unique_ptr<BertTokenizer> tokenizer)32 void AssertTokenizerResults(std::unique_ptr<BertTokenizer> tokenizer) {
33 auto results = tokenizer->Tokenize("i'm question");
34
35 EXPECT_THAT(results.subwords, ElementsAre("i", "'", "m", "question"));
36 }
37
TEST(BertTokenizerTest,TestTokenizerCreationFromBuffer)38 TEST(BertTokenizerTest, TestTokenizerCreationFromBuffer) {
39 std::string buffer = GetTestFileContent(kTestVocabPath);
40
41 auto tokenizer =
42 std::make_unique<BertTokenizer>(buffer.data(), buffer.size());
43
44 AssertTokenizerResults(std::move(tokenizer));
45 }
46
TEST(BertTokenizerTest,TestTokenizerCreationFromFile)47 TEST(BertTokenizerTest, TestTokenizerCreationFromFile) {
48 auto tokenizer =
49 std::make_unique<BertTokenizer>(GetTestDataPath(kTestVocabPath));
50
51 AssertTokenizerResults(std::move(tokenizer));
52 }
53
TEST(BertTokenizerTest,TestTokenizerCreationFromVector)54 TEST(BertTokenizerTest, TestTokenizerCreationFromVector) {
55 std::vector<std::string> vocab;
56 vocab.emplace_back("i");
57 vocab.emplace_back("'");
58 vocab.emplace_back("m");
59 vocab.emplace_back("question");
60 auto tokenizer = std::make_unique<BertTokenizer>(vocab);
61
62 AssertTokenizerResults(std::move(tokenizer));
63 }
64
TEST(BertTokenizerTest,TestTokenizerMultipleRows)65 TEST(BertTokenizerTest, TestTokenizerMultipleRows) {
66 auto tokenizer =
67 std::make_unique<BertTokenizer>(GetTestDataPath(kTestVocabPath));
68
69 auto results = tokenizer->Tokenize("i'm questionansweraskask");
70
71 EXPECT_THAT(results.subwords, ElementsAre("i", "'", "m", "question", "##ans",
72 "##wer", "##ask", "##ask"));
73 }
74
TEST(BertTokenizerTest,TestTokenizeIntoWordpieces)75 TEST(BertTokenizerTest, TestTokenizeIntoWordpieces) {
76 auto tokenizer =
77 std::make_unique<BertTokenizer>(GetTestDataPath(kTestVocabPath));
78
79 auto results = tokenizer->TokenizeIntoWordpieces("i'm questionansweraskask");
80
81 EXPECT_THAT(results.subwords, ElementsAre("i", "'", "m", "question", "##ans",
82 "##wer", "##ask", "##ask"));
83 EXPECT_THAT(results.wp_begin_offset, ElementsAre(0, 1, 2, 4, 12, 15, 18, 21));
84 EXPECT_THAT(results.wp_end_offset, ElementsAre(1, 2, 3, 12, 15, 18, 21, 24));
85 EXPECT_THAT(results.row_lengths, ElementsAre(1, 1, 1, 5));
86 }
87
TEST(BertTokenizerTest,TestTokenizeIntoWordpiecesLongNonAscii)88 TEST(BertTokenizerTest, TestTokenizeIntoWordpiecesLongNonAscii) {
89 auto tokenizer =
90 std::make_unique<BertTokenizer>(GetTestDataPath(kTestVocabPath));
91
92 std::string token;
93 for (int i = 0; i < 100; ++i) {
94 token += "ń";
95 }
96 auto results = tokenizer->TokenizeIntoWordpieces(token);
97
98 EXPECT_THAT(results.subwords, ElementsAre("[UNK]"));
99 EXPECT_THAT(results.wp_begin_offset, ElementsAre(0));
100 EXPECT_THAT(results.wp_end_offset, ElementsAre(100));
101 EXPECT_THAT(results.row_lengths, ElementsAre(1));
102 }
103
TEST(BertTokenizerTest,TestTokenizerUnknownTokens)104 TEST(BertTokenizerTest, TestTokenizerUnknownTokens) {
105 std::vector<std::string> vocab;
106 vocab.emplace_back("i");
107 vocab.emplace_back("'");
108 vocab.emplace_back("m");
109 vocab.emplace_back("question");
110 auto tokenizer = std::make_unique<BertTokenizer>(vocab);
111
112 auto results = tokenizer->Tokenize("i'm questionansweraskask");
113
114 EXPECT_THAT(results.subwords,
115 ElementsAre("i", "'", "m", kDefaultUnknownToken));
116 }
117
TEST(BertTokenizerTest,TestLookupId)118 TEST(BertTokenizerTest, TestLookupId) {
119 std::vector<std::string> vocab;
120 vocab.emplace_back("i");
121 vocab.emplace_back("'");
122 vocab.emplace_back("m");
123 vocab.emplace_back("question");
124 auto tokenizer = std::make_unique<BertTokenizer>(vocab);
125
126 int i;
127 ASSERT_FALSE(tokenizer->LookupId("iDontExist", &i));
128
129 ASSERT_TRUE(tokenizer->LookupId("i", &i));
130 ASSERT_EQ(i, 0);
131 ASSERT_TRUE(tokenizer->LookupId("'", &i));
132 ASSERT_EQ(i, 1);
133 ASSERT_TRUE(tokenizer->LookupId("m", &i));
134 ASSERT_EQ(i, 2);
135 ASSERT_TRUE(tokenizer->LookupId("question", &i));
136 ASSERT_EQ(i, 3);
137 }
138
TEST(BertTokenizerTest,TestLookupWord)139 TEST(BertTokenizerTest, TestLookupWord) {
140 std::vector<std::string> vocab;
141 vocab.emplace_back("i");
142 vocab.emplace_back("'");
143 vocab.emplace_back("m");
144 vocab.emplace_back("question");
145 auto tokenizer = std::make_unique<BertTokenizer>(vocab);
146
147 absl::string_view result;
148 ASSERT_FALSE(tokenizer->LookupWord(6, &result));
149
150 ASSERT_TRUE(tokenizer->LookupWord(0, &result));
151 ASSERT_EQ(result, "i");
152 ASSERT_TRUE(tokenizer->LookupWord(1, &result));
153 ASSERT_EQ(result, "'");
154 ASSERT_TRUE(tokenizer->LookupWord(2, &result));
155 ASSERT_EQ(result, "m");
156 ASSERT_TRUE(tokenizer->LookupWord(3, &result));
157 ASSERT_EQ(result, "question");
158 }
159
TEST(BertTokenizerTest,TestContains)160 TEST(BertTokenizerTest, TestContains) {
161 std::vector<std::string> vocab;
162 vocab.emplace_back("i");
163 vocab.emplace_back("'");
164 vocab.emplace_back("m");
165 vocab.emplace_back("question");
166 auto tokenizer = std::make_unique<BertTokenizer>(vocab);
167
168 bool result;
169 tokenizer->Contains("iDontExist", &result);
170 ASSERT_FALSE(result);
171
172 tokenizer->Contains("i", &result);
173 ASSERT_TRUE(result);
174 tokenizer->Contains("'", &result);
175 ASSERT_TRUE(result);
176 tokenizer->Contains("m", &result);
177 ASSERT_TRUE(result);
178 tokenizer->Contains("question", &result);
179 ASSERT_TRUE(result);
180 }
181
TEST(BertTokenizerTest,TestLVocabularySize)182 TEST(BertTokenizerTest, TestLVocabularySize) {
183 std::vector<std::string> vocab;
184 vocab.emplace_back("i");
185 vocab.emplace_back("'");
186 vocab.emplace_back("m");
187 vocab.emplace_back("question");
188 auto tokenizer = std::make_unique<BertTokenizer>(vocab);
189
190 ASSERT_EQ(tokenizer->VocabularySize(), 4);
191 }
192
TEST(BertTokenizerTest,SimpleEnglishWithPunctuation)193 TEST(BertTokenizerTest, SimpleEnglishWithPunctuation) {
194 absl::string_view input = "I am fine, thanks!";
195
196 std::vector<std::string> tokens = BertTokenizer::PreTokenize(input);
197
198 EXPECT_THAT(tokens, testing::ElementsAreArray(
199 {"I", "am", "fine", ",", "thanks", "!"}));
200 }
201 } // namespace
202 } // namespace libtextclassifier3
203