Start line:  
End line:  

Snippet Preview

Snippet HTML Code

Stack Overflow Questions
  /*-
   * See the file LICENSE for redistribution information.
   *
   * Copyright (c) 2002, 2013 Oracle and/or its affiliates.  All rights reserved.
   *
   */
  
  package com.sleepycat.je.util;
  
 import java.io.File;
 import java.util.Date;
 import java.util.Map;
 import java.util.Set;
 
Used to retrieve as much data as possible from a corrupted environment. This utility is meant to be used programmatically, and is the equivalent to the -R or -r options for DbDump.

To scavenge a database:

  DbScavenger scavenger =
      new DbScavenger(env, outputDirectory, <boolean>, <boolean>, <boolean>);
  scavenger.dump();

The recovered databases will put placed in the outputDirectory with ".dump" file suffixes. The format of the .dump files will be suitable for use with DbLoad.

 
 
 public class DbScavenger extends DbDump {
     private static final int FLUSH_INTERVAL = 100;
     private int readBufferSize;
     private EnvironmentImpl envImpl;
 
     /*
      * Set of committed txn ids that have been seen so far.
      */
     private BitMap committedTxnIdsSeen;
 
     /*
      * Set of LN Node Ids that have been seen so far.
      */
     private Set<CompareSlotlnNodesSeen;
 
     /*
      * Map of database id to database names.
      */
     private Map<LongStringdbIdToName;
 
     /*
      * Map of database id to DatabaseImpl.
      */
     private Map<LongDatabaseImpldbIdToImpl;
 
     /*
      * Map of database id to the .dump file output stream for that database.
      */
     private Map<LongPrintStreamdbIdToOutputStream;
 
     private boolean dumpCorruptedBounds = false;
 
     private int flushCounter = 0;
     private long lastTime;

    
Create a DbScavenger object for a specific environment.

Parameters:
env The Environment containing the database to dump.
outputDirectory The directory to create the .dump files in.
formatUsingPrintable true if the dump should use printable characters.
doAggressiveScavengerRun true if true, then all data records are dumped, regardless of whether they are the latest version or not.
verbose true if status output should be written to System.out during scavenging.
    public DbScavenger(Environment env,
                       String outputDirectory,
                       boolean formatUsingPrintable,
                       boolean doAggressiveScavengerRun,
                       boolean verbose) {
        super(envnullnullformatUsingPrintable);
        this. = doAggressiveScavengerRun;
        this. = new HashMap<LongString>();
        this. = new HashMap<LongDatabaseImpl>();
        this. = new HashMap<LongPrintStream>();
        this. = verbose;
        this. = outputDirectory;
    }

    
Set to true if corrupted boundaries should be dumped out.
    public void setDumpCorruptedBounds(boolean dumpCorruptedBounds) {
        this. = dumpCorruptedBounds;
    }

    
Start the scavenger run.
    @Override
    public void dump()
        throws EnvironmentNotFoundException,
               EnvironmentLockedException,
               IOException {
        openEnv(false);
         = DbInternal.getEnvironmentImpl();
        DbConfigManager cm = .getConfigManager();
        /*
         * Find the end of the log.
         */
        LastFileReader reader = new LastFileReader();
        while (reader.readNextEntry()) {
        }
        /* Tell the fileManager where the end of the log is. */
        long lastUsedLsn = reader.getLastValidLsn();
        long nextAvailableLsn = reader.getEndOfLog();
        .getFileManager().setLastPosition(nextAvailableLsn,
                                                 lastUsedLsn,
                                                 reader.getPrevOffset());
        try {
            /* Pass 1: Scavenge the dbtree. */
            if () {
                ..println("Pass 1: " + new Date());
            }
            scavengeDbTree(lastUsedLsnnextAvailableLsn);
            /* Pass 2: Scavenge the databases. */
            if () {
                ..println("Pass 2: " + new Date());
            }
            scavenge(lastUsedLsnnextAvailableLsn);
            if () {
                ..println("End: " + new Date());
            }
        } finally {
            closeOutputStreams();
        }
    }
    /*
     * Scan the log looking for records that are relevant for scavenging the db
     * tree.
     */
    private void scavengeDbTree(long lastUsedLsnlong nextAvailableLsn)
        throws DatabaseException {
         = new BitMap();
         = new TreeSet<CompareSlot>();
        final ScavengerFileReader scavengerReader =
            new ScavengerFileReader(lastUsedLsn,
                                    .nextAvailableLsn) {
                protected void processEntryCallback(LogEntry entry,
                                                    LogEntryType entryType)
                    throws DatabaseException {
                    processDbTreeEntry(entryentryType);
                }
            };
        scavengerReader.setTargetType(.);
        scavengerReader.setTargetType(.);
        scavengerReader.setTargetType(.);
        scavengerReader.setTargetType(.);
        scavengerReader.setTargetType(.);
        scavengerReader.setTargetType(.);
         = System.currentTimeMillis();
        long fileNum = -1;
        while (scavengerReader.readNextEntry()) {
            fileNum = reportProgress(fileNum,
                                     scavengerReader.getLastLsn());
        }
    }
    private long reportProgress(long fileNumlong lastLsn) {
        long currentFile = DbLsn.getFileNumber(lastLsn);
        if () {
            if (currentFile != fileNum) {
                long now = System.currentTimeMillis();
                ..println("processing file " +
                                   FileManager.getFileName(currentFile,
                                                           ".jdb  ") +
                                   (now-) + " ms");
                 = now;
            }
        }
        return currentFile;
    }
    /*
     * Look at an entry and determine if it should be processed for scavenging.
     */
    private boolean checkProcessEntry(LogEntry entry,
                                      LogEntryType entryType,
                                      boolean pass2) {
        boolean isTransactional = entryType.isTransactional();
        /*
         * If entry is txnal...
         *  if a commit record, add to committed txn id set
         *  if an abort record, ignore it and don't process.
         *  if an LN, check if it's in the committed txn id set.
         *     If it is, continue processing, otherwise ignore it.
         */
        if (isTransactional) {
            final long txnId = entry.getTransactionId();
            if (entryType.equals(.)) {
                .set(txnId);
                /* No need to process this entry further. */
                return false;
            }
            if (entryType.equals(.)) {
                /* No need to process this entry further. */
                return false;
            }
            if (!.get(txnId)) {
                return false;
            }
        }
        /*
         * Check the nodeId to see if we've already seen it or not.
         */
        if (entry instanceof LNLogEntry) {
            final LNLogEntry lnEntry = (LNLogEntryentry;
            final long dbId = lnEntry.getDbId().getId();
            final DatabaseImpl db = .get(dbId);
            /* Must call postFetchInit if true is returned. */
            if (db != null) {
                lnEntry.postFetchInit(db);
            } else {
                lnEntry.postFetchInit(false /*isDupDb*/);
            }
            /*
             * If aggressive or if processing DbTree entries, don't worry about
             * whether this node has been processed already.
             */
            if ( || !pass2) {
                return true;
            }
            if (db == null) {
                throw EnvironmentFailureException.unexpectedState
                    ("Database info not available for DB ID: " + dbId);
            }
            return .add(new CompareSlot(dblnEntry));
        }
        return false;
    }
    /*
     * Called once for each log entry during the pass 1 (dbtree).
     */
    private void processDbTreeEntry(LogEntry entryLogEntryType entryType)
        throws DatabaseException {
        boolean processThisEntry =
            checkProcessEntry(entryentryTypefalse);
        if (processThisEntry &&
            (entry instanceof LNLogEntry)) {
            LNLogEntry lnEntry = (LNLogEntryentry;
            LN ln = lnEntry.getLN();
            if (ln instanceof NameLN) {
                String name = StringUtils.fromUTF8(lnEntry.getKey());
                Long dbId = Long.valueOf(((NameLNln).getId().getId());
                if (.containsKey(dbId) &&
                    !.get(dbId).equals(name)) {
                    throw EnvironmentFailureException.unexpectedState
                        ("Already name mapped for dbId: " + dbId +
                         " changed from " + .get(dbId) +
                         " to " + name);
                } else {
                    .put(dbIdname);
                }
            }
            if (ln instanceof MapLN) {
                DatabaseImpl db = ((MapLNln).getDatabase();
                Long dbId = db.getId().getId();
                /* Use latest version to get most recent comparators. */
                if (!.containsKey(dbId)) {
                    .put(dbIddb);
                }
            }
        }
    }
    /*
     * Pass 2: scavenge the regular (non-dbtree) environment.
     */
    private void scavenge(long lastUsedLsnlong nextAvailableLsn)
        throws DatabaseException {
        final ScavengerFileReader scavengerReader =
            new ScavengerFileReader(lastUsedLsn,
                                    .nextAvailableLsn) {
                protected void processEntryCallback(LogEntry entry,
                                                    LogEntryType entryType)
                    throws DatabaseException {
                    processRegularEntry(entryentryType);
                }
            };
        /*
         * Note: committed transaction id map has been created already, no
         * need to read TXN_COMMITS on this pass.
         */
        for (LogEntryType entryType : LogEntryType.getAllTypes()) {
            if (entryType.isUserLNType()) {
                scavengerReader.setTargetType(entryType);
            }
        }
        scavengerReader.setDumpCorruptedBounds();
        long progressFileNum = -1;
        while (scavengerReader.readNextEntry()) {
            progressFileNum = reportProgress(progressFileNum,
                                             scavengerReader.getLastLsn());
        }
    }
    /*
     * Process an entry during pass 2.
     */
    private void processRegularEntry(LogEntry entryLogEntryType entryType)
        throws DatabaseException {
        boolean processThisEntry =
            checkProcessEntry(entryentryTypetrue);
        if (processThisEntry) {
            LNLogEntry lnEntry = (LNLogEntryentry;
            Long dbId = Long.valueOf(lnEntry.getDbId().getId());
            LN ln = lnEntry.getLN();
            /* Create output file even if we don't process a deleted entry. */
            PrintStream out = getOutputStream(dbId);
            if (!ln.isDeleted()) {
                DatabaseEntry key = new DatabaseEntry();
                DatabaseEntry data = new DatabaseEntry();
                lnEntry.getUserKeyData(keydata);
                dumpOne(outkey.getData(), );
                dumpOne(outdata.getData(), );
                if ((++ % ) == 0) {
                    out.flush();
                     = 0;
                }
            }
        }
    }
    /*
     * Return the output stream for the .dump file for database with id dbId.
     * If an output stream has not already been created, then create one.
     */
    private PrintStream getOutputStream(Long dbId)
        throws DatabaseException {
        PrintStream ret = .get(dbId);
        if (ret != null) {
            return ret;
        }
        String name = .get(dbId);
        if (name == null) {
            name = "db" + dbId;
        }
        File file = new File(name + ".dump");
        try {
            ret = new PrintStream(new FileOutputStream(file), false);
        } catch (FileNotFoundException e) {
            throw EnvironmentFailureException.unexpectedException(e);
        }
        .put(dbIdret);
        DatabaseImpl db = .get(dbId);
        boolean dupSort = (db != null) ? db.getSortedDuplicates() : false;
        printHeader(retdupSort);
        return ret;
    }
    private void closeOutputStreams() {
        Iterator<PrintStreamiter = .values().iterator();
        while (iter.hasNext()) {
            PrintStream s = iter.next();
            s.println("DATA=END");
            s.close();
        }
    }
New to GrepCode? Check out our FAQ X