Start the stopwatch.
This method starts a new timing session, clearing any previous values.
6970717273747576777879
System.out.println(" Nocache took "+sw); long nocachetime = sw.getTime(); sw.reset(); sw.start(); for( int i = 0; i < 100; i++ ) { WikiPage page = m_engine.getPage( "TestPage" ); String pagedata = m_engine.getPureText( page );
8182838485868788899091
} @Override public void run() { StopWatch watch = new StopWatch(); watch.start(); logger.debug("clean up expired or over maxOnlineTime session start ..."); Calendar calendar = Calendar.getInstance(); @SuppressWarnings("unchecked") OqlBuilder<Sessioninfo> builder = OqlBuilder.from(registry.getSessioninfoBuilder() .getSessioninfoClass(), "info");
7172737475767778798081
public void doPropFind( HttpServletRequest req, HttpServletResponse res ) throws IOException,ServletException { StopWatch sw = new StopWatch(); sw.start(); // Do the "sanitize url" trick String p = new String(req.getPathInfo().getBytes("ISO-8859-1"), "UTF-8"); DavPath path = new DavPath( p );
202203204205206207208209210211212
public void initialize( Collection pages ) throws ProviderException { log.debug( "Initializing new ReferenceManager with "+pages.size()+" initial pages." ); StopWatch sw = new StopWatch(); sw.start(); log.info( "Starting cross reference scan of WikiPages" ); // // First, try to serialize old data from disk. If that fails, // we'll go and update the entire reference lists (which'll take
313314315316317318319320321322323
long saved = 0L; try { StopWatch sw = new StopWatch(); sw.start(); File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE ); in = new ObjectInputStream( new BufferedInputStream(new FileInputStream(f)) );
356357358359360361362363364365366
ObjectOutputStream out = null; try { StopWatch sw = new StopWatch(); sw.start(); File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE ); out = new ObjectOutputStream( new BufferedOutputStream(new FileOutputStream(f)) );
417418419420421422423424425426427
long saved = 0L; try { StopWatch sw = new StopWatch(); sw.start(); // // Find attribute cache, and check if it exists // File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR );
490491492493494495496497498499500
*/ private synchronized void serializeAttrsToDisk( WikiPage p ) { ObjectOutputStream out = null; StopWatch sw = new StopWatch(); sw.start(); try { File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR );
103104105106107108109110111112113
* @return the suggestions */ public List getSuggestions( String wikiName, int maxLength ) { StopWatch sw = new StopWatch(); sw.start(); List<String> list = new ArrayList<String>(maxLength); if( wikiName.length() > 0 ) {
152153154155156157158159160161162
* @return the pages found */ public List findPages( String searchString, int maxLength ) { StopWatch sw = new StopWatch(); sw.start(); List<HashMap> list = new ArrayList<HashMap>(maxLength); if( searchString.length() > 0 ) {