
import os
from crawlerBase import *
class crawlerFile(crawlerBase):
    '''
    provided a directory, will return the files matching the extension
    '''

    results = []

    def findInSubdirectory(self, extension, subdirectory=None):
        if subdirectory:
            path = subdirectory
        else:
            path = os.getcwd()
        print "PATH ", path
        for root, dirs, names in os.walk(path):
            print names
            print dirs
            for filename in names:
                print os.path.splitext(filename)
                if extension == os.path.splitext(filename)[1]:
                    self.results.append( os.path.join(root, filename) )
                    
    def setSearchTerms(self, terms, subdir=None):
        self.findInSubdirectory(terms, subdir)
    
    def getNext(self):
        print len(self.results)
        return self.results.pop() 
    
    def isDone(self):
        if len(self.results) == 0:
            return True
        return False 
    
    def estimateItemsLeft(self):
        return len(self.results)