// Copyright 2011 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.hughes.android.dictionary.engine; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.EnumMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import com.hughes.android.dictionary.engine.Index.IndexEntry; import com.hughes.android.dictionary.parser.DictFileParser; public class IndexBuilder { final DictionaryBuilder dictionaryBuilder; public final Index index; final SortedMap tokenToData; IndexBuilder(final DictionaryBuilder dictionaryBuilder, final String shortName, final String longName, final Language language, final String normalizerRules, final boolean swapPairEntries) { this.dictionaryBuilder = dictionaryBuilder; index = new Index(dictionaryBuilder.dictionary, shortName, longName, language, normalizerRules, swapPairEntries); tokenToData = new TreeMap(new NormalizeComparator(index.normalizer(), language.getCollator())); } public void build() { final Set tokenEntryDatas = new HashSet(); final List rows = index.rows; for (final TokenData tokenData : tokenToData.values()) { tokenEntryDatas.clear(); final int indexIndex = index.sortedIndexEntries.size(); final int startRow = rows.size(); rows.add(new TokenRow(indexIndex, rows.size(), index)); // System.out.println("Added TokenRow: " + rows.get(rows.size() - 1)); int numRows = 0; // System.out.println("TOKEN: " + tokenData.token); for (final Map.Entry> typeToEntry : tokenData.typeToEntries.entrySet()) { for (final IndexedEntry entryData : typeToEntry.getValue()) { if (entryData.index() == -1) { entryData.addToDictionary(dictionaryBuilder.dictionary); assert entryData.index() >= 0; } if (tokenEntryDatas.add(entryData)) { rows.add(new PairEntry.Row(entryData.index(), rows.size(), index)); ++numRows; // System.out.print(" " + typeToEntry.getKey() + ": "); // rows.get(rows.size() - 1).print(System.out); // System.out.println(); } } } index.sortedIndexEntries.add(new Index.IndexEntry(tokenData.token, index .normalizer().transliterate(tokenData.token), startRow, numRows)); } final List sortedEntries = new ArrayList(index.sortedIndexEntries); Collections.sort(sortedEntries, new Comparator() { @Override public int compare(IndexEntry object1, IndexEntry object2) { return object2.numRows - object1.numRows; }}); System.out.println("Most common tokens:"); for (int i = 0; i < 50 && i < sortedEntries.size(); ++i) { System.out.println(" " + sortedEntries.get(i)); } } static class TokenData { final String token; final Map> typeToEntries = new EnumMap>(EntryTypeName.class); TokenData(final String token) { assert token.equals(token.trim()); assert token.length() > 0; this.token = token; } } public TokenData getOrCreateTokenData(final String token) { TokenData tokenData = tokenToData.get(token); if (tokenData == null) { tokenData = new TokenData(token); tokenToData.put(token, tokenData); } return tokenData; } public List getOrCreateEntries(final String token, final EntryTypeName entryTypeName) { final TokenData tokenData = getOrCreateTokenData(token); List entries = tokenData.typeToEntries.get(entryTypeName); if (entries == null) { entries = new ArrayList(); tokenData.typeToEntries.put(entryTypeName, entries); } return entries; } public void addEntryWithTokens(final IndexedEntry indexedEntry, final Set tokens, final EntryTypeName entryTypeName) { for (final String token : tokens) { getOrCreateEntries(token, entryTypeName).add(indexedEntry); } } public void addEntryWithString(final IndexedEntry indexedEntry, final String untokenizedString, final EntryTypeName singleTokenEntryTypeName, final EntryTypeName multiTokenEntryTypeName) { final Set tokens = DictFileParser.tokenize(untokenizedString, DictFileParser.NON_CHAR); addEntryWithTokens(indexedEntry, tokens, tokens.size() == 1 ? singleTokenEntryTypeName : multiTokenEntryTypeName); } public void addEntryWithString(final IndexedEntry indexedEntry, final String untokenizedString, final EntryTypeName entryTypeName) { addEntryWithString(indexedEntry, untokenizedString, entryTypeName, entryTypeName); } }