refactoring

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@2144 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 19 years ago
parent 24686e50a2
commit 196b8abb30

@ -165,7 +165,7 @@ public class IndexControl_p {
switchboard.urlPool.loadedURL.remove(urlx[i]); switchboard.urlPool.loadedURL.remove(urlx[i]);
} }
} }
switchboard.wordIndex.deleteIndex(keyhash); switchboard.wordIndex.deleteContainer(keyhash);
post.remove("keyhashdeleteall"); post.remove("keyhashdeleteall");
if (keystring.length() > 0 && if (keystring.length() > 0 &&
indexEntryAttribute.word2hash(keystring).equals(keyhash)) { indexEntryAttribute.word2hash(keystring).equals(keyhash)) {

@ -92,8 +92,8 @@
<td class="small">Empty<br>(avail.)<br>Slots</td> <td class="small">Empty<br>(avail.)<br>Slots</td>
<td class="small">Used: High, Medium, Low Prio</td> <td class="small">Used: High, Medium, Low Prio</td>
<td class="small">Node-Cache<br>Hit:Miss<br>Uniq:Doub<br>Del:Flush<br></td> <td class="small">Node-Cache<br>Hit:Miss<br>Uniq:Doub<br>Del:Flush<br></td>
<td class="small">&nbsp;<br>Size<br>Max</td> <td class="small">&nbsp;<br>Max<br>Size</td>
<td class="small">&nbsp;<br>Size<br>Current</td> <td class="small">&nbsp;<br>Hit-Size<br>Miss-Size</td>
<td class="small">Hit-Cache<br>Hit:Miss<br>Uniq:Doub<br>Del:Flush<br></td> <td class="small">Hit-Cache<br>Hit:Miss<br>Uniq:Doub<br>Del:Flush<br></td>
<td class="small">Miss-Cache<br>Hit:Miss<br>Uniq:Doub<br>Del:Flush<br></td> <td class="small">Miss-Cache<br>Hit:Miss<br>Uniq:Doub<br>Del:Flush<br></td>
<td class="small">Used Now</td> <td class="small">Used Now</td>

@ -302,13 +302,13 @@ public class PerformanceMemory_p {
prop.put("sluniqdoub" + db, slt[6] + ":" + slt[7]); prop.put("sluniqdoub" + db, slt[6] + ":" + slt[7]);
prop.put("slflush" + db, slt[8] + ":" + slt[9]); prop.put("slflush" + db, slt[8] + ":" + slt[9]);
prop.put("ochunkmax" + db, ost[0]); prop.put("ochunkmax" + db, ost[0]);
prop.put("ochunkcur" + db, ost[1]); prop.put("ochunkcur" + db, ost[1] + "<br>" + ost[2]);
prop.put("ohittmiss" + db, ost[5] + ":" + ost[6]); prop.put("ohittmiss" + db, ost[6] + ":" + ost[7]);
prop.put("ouniqdoub" + db, ost[7] + ":" + ost[8]); prop.put("ouniqdoub" + db, ost[8] + ":" + ost[9]);
prop.put("oflush" + db, ost[9] + ":" + ost[10]); prop.put("oflush" + db, ost[10] + ":" + ost[11]);
prop.put("nhittmiss" + db, ost[11] + ":" + ost[12]); prop.put("nhittmiss" + db, ost[12] + ":" + ost[13]);
prop.put("nuniqdoub" + db, ost[13] + ":" + ost[14]); prop.put("nuniqdoub" + db, ost[14] + ":" + ost[15]);
prop.put("nflush" + db, ost[15] + ":" + ost[16]); prop.put("nflush" + db, ost[16] + ":" + ost[17]);
prop.put("used" + db, usd / KB); prop.put("used" + db, usd / KB);
prop.put("good" + db, god / KB); prop.put("good" + db, god / KB);
prop.put("best" + db, bst / KB); prop.put("best" + db, bst / KB);

@ -33,6 +33,7 @@ public interface indexEntry {
public byte[] toEncodedByteArrayForm(); public byte[] toEncodedByteArrayForm();
public String toPropertyForm(); public String toPropertyForm();
public String getUrlHash();
public void combineDistance(indexEntry oe); public void combineDistance(indexEntry oe);
public void min(indexEntry other); public void min(indexEntry other);
public void max(indexEntry other); public void max(indexEntry other);

@ -1,4 +1,4 @@
// plasmaWordIndexInterface.java // indexRI.java
// ----------------------------- // -----------------------------
// part of YACY // part of YACY
// (C) by Michael Peter Christen; mc@anomic.de // (C) by Michael Peter Christen; mc@anomic.de
@ -40,22 +40,25 @@
// Contributions and changes to the program code must be marked as such. // Contributions and changes to the program code must be marked as such.
package de.anomic.plasma; package de.anomic.index;
import java.util.Iterator; import java.util.Iterator;
public interface plasmaWordIndexInterface { import de.anomic.plasma.plasmaWordIndexEntryContainer;
import de.anomic.plasma.plasmaWordIndexEntryInstance;
public interface indexRI {
public int size(); public int size();
public Iterator wordHashes(String startWordHash, boolean rot); public Iterator wordHashes(String startWordHash, boolean rot);
public long getUpdateTime(String wordHash); public long getUpdateTime(String wordHash);
public plasmaWordIndexEntryContainer getContainer(String wordHash, boolean deleteIfEmpty); public plasmaWordIndexEntryContainer getContainer(String wordHash, boolean deleteIfEmpty, long maxtime);
public plasmaWordIndexEntryContainer deleteContainer(String wordHash); public plasmaWordIndexEntryContainer deleteContainer(String wordHash);
public int removeEntries(String wordHash, String[] urlHashes, boolean deleteComplete); public int removeEntries(String wordHash, String[] referenceHashes, boolean deleteComplete);
public boolean addEntry(String wordHash, plasmaWordIndexEntryInstance entry, long updateTime, boolean dhtCase); public boolean addEntry(String wordHash, indexEntry entry, long updateTime, boolean dhtCase);
public int addEntries(plasmaWordIndexEntryContainer newEntries, long creationTime, boolean dhtCase); public int addEntries(plasmaWordIndexEntryContainer newEntries, long creationTime, boolean dhtCase);
public void close(int waitingSeconds); public void close(int waitingSeconds);

@ -1,4 +1,4 @@
// kelondroCell.java // kelondroColumn.java
// (C) 2006 by Michael Peter Christen; mc@anomic.de, Frankfurt a. M., Germany // (C) 2006 by Michael Peter Christen; mc@anomic.de, Frankfurt a. M., Germany
// first published 24.05.2006 on http://www.anomic.de // first published 24.05.2006 on http://www.anomic.de
// //
@ -28,7 +28,7 @@
package de.anomic.kelondro; package de.anomic.kelondro;
public class kelondroCell { public class kelondroColumn {
public static int celltype_undefined = 0; public static int celltype_undefined = 0;
public static int celltype_boolean = 1; public static int celltype_boolean = 1;
@ -40,7 +40,7 @@ public class kelondroCell {
private int celltype, dbwidth; private int celltype, dbwidth;
private String nickname, description; private String nickname, description;
public kelondroCell(int celltype, int dbwidth, String nickname, String description) { public kelondroColumn(int celltype, int dbwidth, String nickname, String description) {
this.celltype = celltype; this.celltype = celltype;
this.dbwidth = dbwidth; this.dbwidth = dbwidth;
this.nickname = nickname; this.nickname = nickname;

@ -123,14 +123,19 @@ public class kelondroObjectCache {
return System.currentTimeMillis() - longEmit(ages.getMinScore()); return System.currentTimeMillis() - longEmit(ages.getMinScore());
} }
public int size() { public int hitsize() {
return cache.size(); return cache.size();
} }
public int misssize() {
return hasnot.size();
}
public String[] status() { public String[] status() {
return new String[]{ return new String[]{
Integer.toString(maxSize()), Integer.toString(maxSize()),
Integer.toString(size()), Integer.toString(hitsize()),
Integer.toString(misssize()),
Long.toString(this.maxAge), Long.toString(this.maxAge),
Long.toString(minAge()), Long.toString(minAge()),
Long.toString(maxAge()), Long.toString(maxAge()),
@ -153,10 +158,10 @@ public class kelondroObjectCache {
return new String[]{ return new String[]{
Integer.toString(Integer.parseInt(a[0]) + Integer.parseInt(b[0])), Integer.toString(Integer.parseInt(a[0]) + Integer.parseInt(b[0])),
Integer.toString(Integer.parseInt(a[1]) + Integer.parseInt(b[1])), Integer.toString(Integer.parseInt(a[1]) + Integer.parseInt(b[1])),
Long.toString(Math.max(Long.parseLong(a[2]), Long.parseLong(b[2]))), Integer.toString(Integer.parseInt(a[2]) + Integer.parseInt(b[2])),
Long.toString(Math.min(Long.parseLong(a[3]), Long.parseLong(b[3]))), Long.toString(Math.max(Long.parseLong(a[3]), Long.parseLong(b[3]))),
Long.toString(Math.max(Long.parseLong(a[4]), Long.parseLong(b[4]))), Long.toString(Math.min(Long.parseLong(a[4]), Long.parseLong(b[4]))),
Integer.toString(Integer.parseInt(a[5]) + Integer.parseInt(b[5])), Long.toString(Math.max(Long.parseLong(a[5]), Long.parseLong(b[5]))),
Integer.toString(Integer.parseInt(a[6]) + Integer.parseInt(b[6])), Integer.toString(Integer.parseInt(a[6]) + Integer.parseInt(b[6])),
Integer.toString(Integer.parseInt(a[7]) + Integer.parseInt(b[7])), Integer.toString(Integer.parseInt(a[7]) + Integer.parseInt(b[7])),
Integer.toString(Integer.parseInt(a[8]) + Integer.parseInt(b[8])), Integer.toString(Integer.parseInt(a[8]) + Integer.parseInt(b[8])),
@ -167,7 +172,8 @@ public class kelondroObjectCache {
Integer.toString(Integer.parseInt(a[13]) + Integer.parseInt(b[13])), Integer.toString(Integer.parseInt(a[13]) + Integer.parseInt(b[13])),
Integer.toString(Integer.parseInt(a[14]) + Integer.parseInt(b[14])), Integer.toString(Integer.parseInt(a[14]) + Integer.parseInt(b[14])),
Integer.toString(Integer.parseInt(a[15]) + Integer.parseInt(b[15])), Integer.toString(Integer.parseInt(a[15]) + Integer.parseInt(b[15])),
Integer.toString(Integer.parseInt(a[16]) + Integer.parseInt(b[16])) Integer.toString(Integer.parseInt(a[16]) + Integer.parseInt(b[16])),
Integer.toString(Integer.parseInt(a[17]) + Integer.parseInt(b[17]))
}; };
} }

@ -1021,15 +1021,15 @@ public class kelondroRecords {
for (int j = 0; j < chunk.length; j++) System.out.print(chunk[j] + ","); for (int j = 0; j < chunk.length; j++) System.out.print(chunk[j] + ",");
} }
public synchronized kelondroRow row() { public final kelondroRow row() {
return this.ROW; return this.ROW;
} }
public synchronized int columns() { public final int columns() {
return this.ROW.columns(); return this.ROW.columns();
} }
public synchronized int columnSize(int column) { public final int columnSize(int column) {
if ((column < 0) || (column >= this.ROW.columns())) return -1; if ((column < 0) || (column >= this.ROW.columns())) return -1;
return ROW.width(column); return ROW.width(column);
} }

@ -29,15 +29,15 @@ package de.anomic.kelondro;
public class kelondroRow { public class kelondroRow {
private kelondroCell[] row; private kelondroColumn[] row;
public kelondroRow(kelondroCell[] row) { public kelondroRow(kelondroColumn[] row) {
this.row = row; this.row = row;
} }
public kelondroRow(int[] row) { public kelondroRow(int[] row) {
this.row = new kelondroCell[row.length]; this.row = new kelondroColumn[row.length];
for (int i = 0; i < row.length; i++) this.row[i] = new kelondroCell(kelondroCell.celltype_undefined, row[i], "", ""); for (int i = 0; i < row.length; i++) this.row[i] = new kelondroColumn(kelondroColumn.celltype_undefined, row[i], "", "");
} }
public int columns() { public int columns() {

@ -184,7 +184,7 @@ public class plasmaDbImporter extends AbstractImporter implements dbImporter {
if (newContainer.size() > 0) { this.homeWordIndex.addEntries(newContainer, System.currentTimeMillis(), false); } if (newContainer.size() > 0) { this.homeWordIndex.addEntries(newContainer, System.currentTimeMillis(), false); }
// delete complete index entity file // delete complete index entity file
this.importWordIndex.deleteIndex(this.wordHash); this.importWordIndex.deleteContainer(this.wordHash);
// print out some statistical information // print out some statistical information
if (this.entryCounter % 500 == 0) { if (this.entryCounter % 500 == 0) {

@ -233,7 +233,7 @@ public class plasmaDHTChunk {
tmpContainers.add(indexContainer); tmpContainers.add(indexContainer);
} catch (kelondroException e) { } catch (kelondroException e) {
log.logSevere("plasmaWordIndexDistribution/2: deleted DB for word " + nexthash, e); log.logSevere("plasmaWordIndexDistribution/2: deleted DB for word " + nexthash, e);
wordIndex.deleteIndex(nexthash); wordIndex.deleteContainer(nexthash);
} }
} }
// create result // create result

@ -58,7 +58,10 @@ import java.util.TreeSet;
import java.net.URL; import java.net.URL;
import de.anomic.htmlFilter.htmlFilterContentScraper; import de.anomic.htmlFilter.htmlFilterContentScraper;
import de.anomic.index.indexEntry;
import de.anomic.index.indexEntryAttribute; import de.anomic.index.indexEntryAttribute;
import de.anomic.index.indexRI;
import de.anomic.index.indexAbstractRI;
import de.anomic.kelondro.kelondroBase64Order; import de.anomic.kelondro.kelondroBase64Order;
import de.anomic.kelondro.kelondroException; import de.anomic.kelondro.kelondroException;
import de.anomic.kelondro.kelondroMergeIterator; import de.anomic.kelondro.kelondroMergeIterator;
@ -66,7 +69,7 @@ import de.anomic.kelondro.kelondroNaturalOrder;
import de.anomic.kelondro.kelondroOrder; import de.anomic.kelondro.kelondroOrder;
import de.anomic.server.logging.serverLog; import de.anomic.server.logging.serverLog;
public final class plasmaWordIndex { public final class plasmaWordIndex extends indexAbstractRI implements indexRI {
private static final String indexAssortmentClusterPath = "ACLUSTER"; private static final String indexAssortmentClusterPath = "ACLUSTER";
private static final int assortmentCount = 64; private static final int assortmentCount = 64;
@ -155,7 +158,7 @@ public final class plasmaWordIndex {
} }
} }
public boolean addEntry(String wordHash, plasmaWordIndexEntryInstance entry, long updateTime, boolean dhtCase) { public boolean addEntry(String wordHash, indexEntry entry, long updateTime, boolean dhtCase) {
if (ramCache.addEntry(wordHash, entry, updateTime, dhtCase)) { if (ramCache.addEntry(wordHash, entry, updateTime, dhtCase)) {
if (!dhtCase) flushControl(); if (!dhtCase) flushControl();
return true; return true;
@ -284,7 +287,7 @@ public final class plasmaWordIndex {
// e.g. indexTransfer might keep this container for minutes while // e.g. indexTransfer might keep this container for minutes while
// several new pages could be added to the index, possibly with the same words that have // several new pages could be added to the index, possibly with the same words that have
// been selected for transfer // been selected for transfer
container.add(ramCache.getContainer(wordHash, true), (maxTime < 0) ? -1 : maxTime / 2); container.add(ramCache.getContainer(wordHash, true, (maxTime < 0) ? -1 : maxTime / 2), (maxTime < 0) ? -1 : maxTime / 2);
// get from assortments // get from assortments
container.add(assortmentCluster.getFromAll(wordHash, (maxTime < 0) ? -1 : maxTime / 2), (maxTime < 0) ? -1 : maxTime / 2); container.add(assortmentCluster.getFromAll(wordHash, (maxTime < 0) ? -1 : maxTime / 2), (maxTime < 0) ? -1 : maxTime / 2);
@ -352,10 +355,11 @@ public final class plasmaWordIndex {
backend.close(10); backend.close(10);
} }
public synchronized void deleteIndex(String wordHash) { public synchronized plasmaWordIndexEntryContainer deleteContainer(String wordHash) {
ramCache.deleteContainer(wordHash); plasmaWordIndexEntryContainer c = ramCache.deleteContainer(wordHash);
assortmentCluster.removeFromAll(wordHash, -1); c.add(assortmentCluster.removeFromAll(wordHash, -1), -1);
backend.deleteIndex(wordHash); c.add(backend.deleteContainer(wordHash), -1);
return c;
} }
public int removeEntries(String wordHash, String[] urlHashes, boolean deleteComplete) { public int removeEntries(String wordHash, String[] urlHashes, boolean deleteComplete) {
@ -410,6 +414,14 @@ public final class plasmaWordIndex {
return hashes; return hashes;
} }
public Iterator wordHashes(String startHash, boolean rot) {
try {
return wordHashes(startHash, RL_WORDFILES, rot);
} catch (IOException e) {
return new HashSet().iterator();
}
}
public Iterator wordHashes(String startHash, int resourceLevel, boolean rot) throws IOException { public Iterator wordHashes(String startHash, int resourceLevel, boolean rot) throws IOException {
if (rot) return new rotatingWordIterator(startHash, resourceLevel); if (rot) return new rotatingWordIterator(startHash, resourceLevel);
else return wordHashes(startHash, resourceLevel); else return wordHashes(startHash, resourceLevel);

@ -49,6 +49,10 @@ import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
import de.anomic.index.indexEntry;
import de.anomic.index.indexRI;
import de.anomic.index.indexAbstractRI;
import de.anomic.kelondro.kelondroArray; import de.anomic.kelondro.kelondroArray;
import de.anomic.kelondro.kelondroException; import de.anomic.kelondro.kelondroException;
import de.anomic.kelondro.kelondroMScoreCluster; import de.anomic.kelondro.kelondroMScoreCluster;
@ -56,7 +60,7 @@ import de.anomic.kelondro.kelondroRecords;
import de.anomic.server.logging.serverLog; import de.anomic.server.logging.serverLog;
import de.anomic.yacy.yacySeedDB; import de.anomic.yacy.yacySeedDB;
public final class plasmaWordIndexCache implements plasmaWordIndexInterface { public final class plasmaWordIndexCache extends indexAbstractRI implements indexRI {
// environment constants // environment constants
private static final String indexArrayFileName = "indexDump1.array"; private static final String indexArrayFileName = "indexDump1.array";
@ -356,21 +360,10 @@ public final class plasmaWordIndexCache implements plasmaWordIndexInterface {
return (((long) intTime) * (long) 1000) + startTime; return (((long) intTime) * (long) 1000) + startTime;
} }
public plasmaWordIndexEntryContainer getContainer(String wordHash, boolean deleteIfEmpty) { public plasmaWordIndexEntryContainer getContainer(String wordHash, boolean deleteIfEmpty, long maxtime_dummy) {
return (plasmaWordIndexEntryContainer) wCache.get(wordHash); return (plasmaWordIndexEntryContainer) wCache.get(wordHash);
} }
public long getUpdateTime(String wordHash) {
plasmaWordIndexEntryContainer entries = (plasmaWordIndexEntryContainer) wCache.get(wordHash);
if (entries == null) return 0;
return entries.updated();
/*
Long time = new Long(longTime(hashDate.getScore(wordHash)));
if (time == null) return 0;
return time.longValue();
*/
}
public plasmaWordIndexEntryContainer deleteContainer(String wordHash) { public plasmaWordIndexEntryContainer deleteContainer(String wordHash) {
// returns the index that had been deleted // returns the index that had been deleted
synchronized (wCache) { synchronized (wCache) {
@ -450,7 +443,7 @@ public final class plasmaWordIndexCache implements plasmaWordIndexInterface {
return added; return added;
} }
public boolean addEntry(String wordHash, plasmaWordIndexEntryInstance newEntry, long updateTime, boolean dhtCase) { public boolean addEntry(String wordHash, indexEntry newEntry, long updateTime, boolean dhtCase) {
if (dhtCase) synchronized (kCache) { if (dhtCase) synchronized (kCache) {
// put container into kCache // put container into kCache
plasmaWordIndexEntryContainer container = new plasmaWordIndexEntryContainer(wordHash); plasmaWordIndexEntryContainer container = new plasmaWordIndexEntryContainer(wordHash);
@ -462,7 +455,7 @@ public final class plasmaWordIndexCache implements plasmaWordIndexInterface {
} else synchronized (wCache) { } else synchronized (wCache) {
plasmaWordIndexEntryContainer container = (plasmaWordIndexEntryContainer) wCache.get(wordHash); plasmaWordIndexEntryContainer container = (plasmaWordIndexEntryContainer) wCache.get(wordHash);
if (container == null) container = new plasmaWordIndexEntryContainer(wordHash); if (container == null) container = new plasmaWordIndexEntryContainer(wordHash);
plasmaWordIndexEntryInstance[] entries = new plasmaWordIndexEntryInstance[] { newEntry }; indexEntry[] entries = new indexEntry[] { newEntry };
if (container.add(entries, updateTime) > 0) { if (container.add(entries, updateTime) > 0) {
wCache.put(wordHash, container); wCache.put(wordHash, container);
hashScore.incScore(wordHash); hashScore.incScore(wordHash);

@ -49,11 +49,13 @@ import java.util.Comparator;
import java.util.Iterator; import java.util.Iterator;
import java.util.TreeSet; import java.util.TreeSet;
import de.anomic.index.indexRI;
import de.anomic.index.indexAbstractRI;
import de.anomic.kelondro.kelondroNaturalOrder; import de.anomic.kelondro.kelondroNaturalOrder;
import de.anomic.server.logging.serverLog; import de.anomic.server.logging.serverLog;
import de.anomic.yacy.yacySeedDB; import de.anomic.yacy.yacySeedDB;
public class plasmaWordIndexClassicDB { public class plasmaWordIndexClassicDB extends indexAbstractRI implements indexRI {
// class variables // class variables
private final File databaseRoot; private final File databaseRoot;
@ -70,6 +72,10 @@ public class plasmaWordIndexClassicDB {
return size; return size;
} }
public Iterator wordHashes(String startHash, boolean rot) {
return wordHashes(startHash, rot);
}
public Iterator wordHashes(String startHash, boolean up, boolean rot) { public Iterator wordHashes(String startHash, boolean up, boolean rot) {
if (rot) throw new UnsupportedOperationException("no rot allowed"); if (rot) throw new UnsupportedOperationException("no rot allowed");
return new iterateFiles(startHash, up); return new iterateFiles(startHash, up);
@ -208,8 +214,9 @@ public class plasmaWordIndexClassicDB {
if (f.exists()) return f.lastModified(); else return -1; if (f.exists()) return f.lastModified(); else return -1;
} }
public void deleteIndex(String wordHash) { public plasmaWordIndexEntryContainer deleteContainer(String wordHash) {
plasmaWordIndexEntity.removePlasmaIndex(databaseRoot, wordHash); plasmaWordIndexEntity.removePlasmaIndex(databaseRoot, wordHash);
return new plasmaWordIndexEntryContainer(wordHash);
} }
public int removeEntries(String wordHash, String[] urlHashes, boolean deleteComplete) { public int removeEntries(String wordHash, String[] urlHashes, boolean deleteComplete) {
@ -223,7 +230,7 @@ public class plasmaWordIndexClassicDB {
int size = pi.size(); int size = pi.size();
pi.close(); pi = null; pi.close(); pi = null;
// check if we can remove the index completely // check if we can remove the index completely
if ((deleteComplete) && (size == 0)) deleteIndex(wordHash); if ((deleteComplete) && (size == 0)) deleteContainer(wordHash);
return count; return count;
} catch (IOException e) { } catch (IOException e) {
log.logSevere("plasmaWordIndexClassic.removeEntries: " + e.getMessage()); log.logSevere("plasmaWordIndexClassic.removeEntries: " + e.getMessage());

@ -57,6 +57,7 @@ import java.util.Iterator;
import java.util.Set; import java.util.Set;
import java.util.TreeMap; import java.util.TreeMap;
import de.anomic.index.indexEntry;
import de.anomic.kelondro.kelondroBase64Order; import de.anomic.kelondro.kelondroBase64Order;
import de.anomic.kelondro.kelondroNaturalOrder; import de.anomic.kelondro.kelondroNaturalOrder;
import de.anomic.kelondro.kelondroOrder; import de.anomic.kelondro.kelondroOrder;
@ -100,16 +101,16 @@ public final class plasmaWordIndexEntryContainer {
return wordHash; return wordHash;
} }
public int add(plasmaWordIndexEntryInstance entry) { public int add(indexEntry entry) {
return add(entry, System.currentTimeMillis()); return add(entry, System.currentTimeMillis());
} }
public int add(plasmaWordIndexEntryInstance entry, long updateTime) { public int add(indexEntry entry, long updateTime) {
this.updateTime = java.lang.Math.max(this.updateTime, updateTime); this.updateTime = java.lang.Math.max(this.updateTime, updateTime);
return (addi(entry)) ? 1 : 0; return (addi(entry)) ? 1 : 0;
} }
public int add(plasmaWordIndexEntryInstance[] entries, long updateTime) { public int add(indexEntry[] entries, long updateTime) {
int c = 0; int c = 0;
for (int i = 0; i < entries.length; i++) if (addi(entries[i])) c++; for (int i = 0; i < entries.length; i++) if (addi(entries[i])) c++;
this.updateTime = java.lang.Math.max(this.updateTime, updateTime); this.updateTime = java.lang.Math.max(this.updateTime, updateTime);
@ -131,7 +132,7 @@ public final class plasmaWordIndexEntryContainer {
return x; return x;
} }
private boolean addi(plasmaWordIndexEntryInstance entry) { private boolean addi(indexEntry entry) {
// returns true if the new entry was added, false if it already existed // returns true if the new entry was added, false if it already existed
plasmaWordIndexEntryInstance oldEntry = (plasmaWordIndexEntryInstance) container.put(entry.getUrlHash(), entry); plasmaWordIndexEntryInstance oldEntry = (plasmaWordIndexEntryInstance) container.put(entry.getUrlHash(), entry);
if ((oldEntry != null) && (entry.isOlder(oldEntry))) { // A more recent Entry is already in this container if ((oldEntry != null) && (entry.isOlder(oldEntry))) { // A more recent Entry is already in this container

@ -892,7 +892,7 @@ public final class yacy {
homeWordIndex.addEntries(newContainer, System.currentTimeMillis(), true); homeWordIndex.addEntries(newContainer, System.currentTimeMillis(), true);
// delete complete index entity file // delete complete index entity file
importWordIndex.deleteIndex(wordHash); importWordIndex.deleteContainer(wordHash);
// print out some statistical information // print out some statistical information
if (wordCounter%500 == 0) { if (wordCounter%500 == 0) {

Loading…
Cancel
Save