]> gitweb.fperrin.net Git - DictionaryPC.git/blob - src/com/hughes/android/dictionary/engine/IndexBuilder.java
Add parallel HashMap for faster lookups.
[DictionaryPC.git] / src / com / hughes / android / dictionary / engine / IndexBuilder.java
1 // Copyright 2011 Google Inc. All Rights Reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //     http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 package com.hughes.android.dictionary.engine;
16
17 import java.util.*;
18
19 import com.hughes.android.dictionary.engine.Index.IndexEntry;
20 import com.hughes.android.dictionary.parser.DictFileParser;
21
22 public class IndexBuilder {
23
24     final DictionaryBuilder dictionaryBuilder;
25     public final Index index;
26     final Set<String> stoplist;
27
28     final Map<String, TokenData> fastTokenToData;
29     final SortedMap<FastCompareString, TokenData> tokenToData;
30
31     IndexBuilder(final DictionaryBuilder dictionaryBuilder, final String shortName, final String longName, final Language language, final String normalizerRules, final Set<String> stoplist, final boolean swapPairEntries) {
32         this.dictionaryBuilder = dictionaryBuilder;
33         index = new Index(dictionaryBuilder.dictionary, shortName, longName, language, normalizerRules, swapPairEntries, stoplist);
34         tokenToData = new TreeMap<>(new FastNormalizeComparator(index.getSortComparator()));
35         fastTokenToData = new HashMap<>();
36         this.stoplist = stoplist;
37     }
38
39     public void build() {
40         final Set<IndexedEntry> tokenIndexedEntries = new HashSet<>();
41         final List<RowBase> rows = index.rows;
42         index.mainTokenCount = 0;
43         for (final TokenData tokenData : tokenToData.values()) {
44             tokenIndexedEntries.clear();
45             final int indexIndex = index.sortedIndexEntries.size();
46             final int startRow = rows.size();
47
48             TokenRow tokenRow = null;
49             if (!tokenData.htmlEntries.isEmpty()) {
50                 tokenRow = new TokenRow(indexIndex, rows.size(), index, tokenData.hasMainEntry);
51                 rows.add(tokenRow);
52             }
53
54 //    System.out.println("Added TokenRow: " + rows.get(rows.size() - 1));
55
56             int numRows = 0;  // off by one--doesn't count the token row!
57 //      System.out.println("TOKEN: " + tokenData.token);
58             for (final Map.Entry<EntryTypeName, List<IndexedEntry>> typeToIndexedEntries : tokenData.typeToEntries.entrySet()) {
59                 for (final IndexedEntry indexedEntry : typeToIndexedEntries.getValue()) {
60                     if (!indexedEntry.isValid) {
61                         continue;
62                     }
63
64                     if (tokenRow == null) {
65                         tokenRow = new TokenRow(indexIndex, rows.size(), index, tokenData.hasMainEntry);
66                         rows.add(tokenRow);
67                     }
68
69                     if (indexedEntry.entry.index() == -1) {
70                         indexedEntry.entry.addToDictionary(dictionaryBuilder.dictionary);
71                         assert indexedEntry.entry.index() >= 0;
72                     }
73                     if (tokenIndexedEntries.add(indexedEntry) && !tokenData.htmlEntries.contains(indexedEntry.entry)) {
74                         rows.add(indexedEntry.entry.CreateRow(rows.size(), index));
75                         ++indexedEntry.entry.entrySource.numEntries;
76                         ++numRows;
77
78 //            System.out.print("  " + typeToEntry.getKey() + ": ");
79                         //          rows.get(rows.size() - 1).print(System.out);
80 //            System.out.println();
81                     }
82                 }
83             }
84
85             if (tokenRow != null) {
86                 if (tokenRow.hasMainEntry) {
87                     index.mainTokenCount++;
88                 }
89
90                 final Index.IndexEntry indexEntry = new Index.IndexEntry(index, tokenData.token, index
91                         .normalizer().transliterate(tokenData.token), startRow, numRows);
92                 indexEntry.htmlEntries.addAll(tokenData.htmlEntries);
93                 index.sortedIndexEntries.add(indexEntry);
94             }
95         }
96
97         final List<IndexEntry> entriesSortedByNumRows = new ArrayList<>(index.sortedIndexEntries);
98         entriesSortedByNumRows.sort((object1, object2) -> object2.numRows - object1.numRows);
99         System.out.println("Most common tokens:");
100         for (int i = 0; i < 50 && i < entriesSortedByNumRows.size(); ++i) {
101             System.out.println("  " + entriesSortedByNumRows.get(i));
102         }
103     }
104
105     public static class TokenData {
106         final String token;
107
108         final Map<EntryTypeName, List<IndexedEntry>> typeToEntries = new EnumMap<>(EntryTypeName.class);
109         public boolean hasMainEntry = false;
110
111         public final List<HtmlEntry> htmlEntries = new ArrayList<>();
112
113         TokenData(final String token) {
114             assert token.equals(token.trim());
115             assert token.length() > 0;
116             this.token = token;
117         }
118     }
119
120     public TokenData getOrCreateTokenData(final String token) {
121         TokenData tokenData = fastTokenToData.get(token);
122         if (tokenData != null) return tokenData;
123         tokenData = new TokenData(token);
124         final FastCompareString c = new FastCompareString(token);
125         if (tokenToData.put(c, tokenData) != null) {
126             // The parallel HashMap assumes that the TreeMap Comparator
127             // is compatible with the equals it uses to compare.
128             throw new RuntimeException("TokenData TreeMap and HashMap out of sync, Comparator may be broken?");
129         }
130         fastTokenToData.put(token, tokenData);
131         return tokenData;
132     }
133
134     private List<IndexedEntry> getOrCreateEntries(final String token, final EntryTypeName entryTypeName) {
135         final TokenData tokenData = getOrCreateTokenData(token);
136         List<IndexedEntry> entries = tokenData.typeToEntries.get(entryTypeName);
137         if (entryTypeName.mainWord) {
138             tokenData.hasMainEntry = true;
139         }
140         if (entries == null) {
141             entries = new ArrayList<>();
142             tokenData.typeToEntries.put(entryTypeName, entries);
143         }
144         return entries;
145     }
146
147     public void addEntryWithTokens(final IndexedEntry indexedEntry, final Set<String> tokens,
148                                    final EntryTypeName entryTypeName) {
149         if (indexedEntry == null) {
150             System.out.println("asdfasdf");
151         }
152         assert indexedEntry != null;
153         for (final String token : tokens) {
154             if (entryTypeName.overridesStopList || !stoplist.contains(token)) {
155                 getOrCreateEntries(token, entryTypeName).add(indexedEntry);
156             }
157         }
158     }
159
160     public void addEntryWithString(final IndexedEntry indexedEntry, final String untokenizedString,
161                                    final EntryTypeName entryTypeName) {
162         final Set<String> tokens = DictFileParser.tokenize(untokenizedString, DictFileParser.NON_CHAR);
163         addEntryWithTokens(indexedEntry, tokens, tokens.size() == 1 ? entryTypeName.singleWordInstance : entryTypeName);
164     }
165
166     public void addEntryWithStringNoSingle(final IndexedEntry indexedEntry, final String untokenizedString,
167                                            final EntryTypeName entryTypeName) {
168         final Set<String> tokens = DictFileParser.tokenize(untokenizedString, DictFileParser.NON_CHAR);
169         addEntryWithTokens(indexedEntry, tokens, entryTypeName);
170     }
171 }