IPP Software Navigation Tools IPP Links Communication Pan-STARRS Links

Ignore:
Timestamp:
Jul 8, 2011, 2:59:10 PM (15 years ago)
Author:
rhenders
Message:

removed redundant alreadyProcessed() method; using new logging method; no longer storing stack_id locally, as this is 'id' field in super-class; deleting IPP tables prior to loading new ones in case some are not overwritten and then used to populate the wrong batch; skycell format change

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/ippToPsps/jython/stackbatch.py

    r31811 r31847  
    3030                 gpc1Db,
    3131                 ippToPspsDb,
    32                  skyID,
    33                  inputFile,
    34                  stackType):
     32                 stackID):
    3533
    3634       super(StackBatch, self).__init__(
     
    3937               gpc1Db,
    4038               ippToPspsDb,
    41                skyID,
     39               stackID,
    4240               "stack",
    43                inputFile,
    44                "MD04") # TODO
     41               gpc1Db.getStackStageCmf(stackID),
     42               "3PI") # TODO
    4543
    4644       if not self.everythingOK: return
    4745
    48        self.expTime = gpc1Db.getStackExpTime(self.stackID)
    49 
    50        self.logger.info("got exp time of %d" % self.expTime)
     46       self.stackType = "DEEP_STACK" # TODO
     47
     48       # get stack meta data
     49       meta = self.gpc1Db.getStackStageMeta(self.id)
     50       self.filter = meta[0];
     51       self.filter = self.filter[0:1]
     52       self.skycell = meta[1];
     53       self.skycell = self.skycell[8:]
     54       # mangling e.g. 0683.043 into 0683043 for now until we have a schema change
     55       self.skycell = self.skycell.replace(".", "")
     56       self.analysisVer = meta[2];
     57
     58       self.expTime = gpc1Db.getStackExpTime(self.id)
     59
     60       self.logger.debug("Got exp time of %d" % self.expTime)
    5161
    5262       # delete PSPS tables
     
    5969       self.scratchDb.dropTable("ObjectCalColor")
    6070
     71       # delete IPP tables
     72       self.scratchDb.dropTable("SkyChip_psf")
     73       self.scratchDb.dropTable("SkyChip_xrad")
     74       self.scratchDb.dropTable("SkyChip_xfit")
     75       self.scratchDb.dropTable("SkyChip_xsrc")
     76
    6177       # create an output filename, which is {filterID}{skycellID}.FITS
    62        self.outputFitsFile = "%08d.FITS" % self.stackID
     78       self.outputFitsFile = "%08d.FITS" % self.id
    6379       self.outputFitsPath = "%s/%s" % (self.localOutPath, self.outputFitsFile)
    6480
     
    6783
    6884       # insert what we know about this stack batch into the stack table
    69        self.ippToPspsDb.insertStackMeta(self.batchID, self.id, self.stackID, self.filter, self.stackType)
     85       self.ippToPspsDb.insertStackMeta(self.batchID, self.filter, self.stackType)
    7086
    7187       # insert sourceID/imageID combo so DVO can look it up
     
    8096       super(StackBatch, self).printMe()
    8197
    82        self.logger.info("Sky ID:     %d" % self.id)
    83        self.logger.info("Stack ID:   %d" % self.stackID)
    84        self.logger.info("Stack type: %s" % self.stackType)
    85        self.logger.info("Skycell:    %s" % self.skycell)
    86        self.logger.info("Filter:     %s" % self.filter)
     98       self.logger.infoPair("Stack ID", "%d" % self.id)
     99       self.logger.infoPair("Stack type", "%s" % self.stackType)
     100       self.logger.infoPair("Skycell", "%s" % self.skycell)
     101       self.logger.infoPair("Filter", "%s" % self.filter)
    87102
    88103
     
    92107    def updateStackMetaID(self, table):
    93108
    94         sql = "UPDATE " + table + "  SET stackMetaID=" + str(self.stackID)
     109        sql = "UPDATE " + table + "  SET stackMetaID=" + str(self.id)
    95110        self.scratchDb.execute(sql)
    96111
     
    225240    def populateStackMeta(self):
    226241
    227         self.logger.info("Procesing StackMeta table")
     242        self.logger.infoPair("Procesing table", "StackMeta")
    228243
    229244        sql = "INSERT INTO StackMeta (\
     
    247262        ,pc002002 \
    248263         ) VALUES ( \
    249         " + str(self.stackID) + " \
     264        " + str(self.id) + " \
    250265        ," + self.skycell + " \
    251266        ," + str(self.scratchDb.getPhotoCalID(self.header['SOURCEID'], self.header['IMAGEID'])) + " \
     
    278293    def populateStackDetection(self):
    279294
    280         self.logger.info("Procesing StackDetection table")
     295        self.logger.infoPair("Procesing table", "StackDetection")
    281296
    282297        # insert all the detections
     
    390405    def populateStackApFlx(self):
    391406
    392         self.logger.info("Procesing StackApFlx table")
     407        self.logger.infoPair("Procesing", "StackApFlx")
    393408 
    394409        sql = "INSERT INTO StackApFlx \
     
    403418
    404419        # TODO temporarily loading 1st convolved fluxes into unconvolved fields
    405         self.logger.info("Adding un-convolved fluxes")
     420        self.logger.infoPair("Adding fluxes", "un-convolved")
    406421        self.updateApFlxs("", "< 7.0")
    407         self.logger.info("Adding 1st convolved fluxes")
     422        self.logger.infoPair("Adding fluxes", "1st convolved")
    408423        self.updateApFlxs("c1", "< 7.0")
    409         self.logger.info("Adding 2nd convolved fluxes")
     424        self.logger.infoPair("Adding fluxes", "2nd convolved")
    410425        self.updateApFlxs("c2", "> 7.0")
    411426
    412         self.logger.info("Adding petrosians for extended sources")
     427        self.logger.infoPair("Adding", "petrosians for extended sources")
    413428        sql = "UPDATE StackApFlx AS a, SkyChip_xsrc AS b SET \
    414429        petRadius=b.PETRO_RADIUS \
     
    440455    def populateStackModelFit(self):
    441456
    442         self.logger.info("Procesing StackModelFit table")
     457        self.logger.infoPair("Procesing table", "StackModelFit")
    443458
    444459        # insert all the detections
     
    450465
    451466        # populate model parameters
    452         self.logger.info("Adding deVaucouleurs fit")
     467        self.logger.infoPair("Adding model fit", "deVaucouleurs")
    453468        self.updateModelFit("deV", "PS_MODEL_DEV")
    454         self.logger.info("Adding exponential fit")
     469        self.logger.infoPair("Adding model fit", "exponential")
    455470        self.updateModelFit("exp", "PS_MODEL_EXP")
    456         self.logger.info("Adding sersic fit")
     471        self.logger.infoPair("Adding model fit", "sersic")
    457472        self.updateModelFit("ser", "PS_MODEL_SERSIC")
    458473
     
    477492        rs.first()
    478493        nMissing = rs.getInt(1)
    479         self.logger.info("%5d detections in %s table that are not in StackDetection. Deleting" % (nMissing, table))
     494        self.logger.infoPair("Detections deleted from %s that are not in StackDetection" + table, "%5d" % nMissing)
    480495 
    481496        if nMissing < 1: return
     
    490505    def populateStackToImage(self):
    491506
    492         self.logger.info("Procesing StackToImage table")
    493 
    494         imageIDs = self.gpc1Db.getImageIDsForThisStackID(self.stackID)
     507        self.logger.infoPair("Procesing table", "StackToImage")
     508
     509        imageIDs = self.gpc1Db.getImageIDsForThisStackID(self.id)
    495510
    496511        for imageID in imageIDs:
    497512            sql = "INSERT INTO StackToImage (stackMetaID, imageID) \
    498513                   VALUES (\
    499                    " + str(self.stackID) + ", " + imageID + ")"
     514                   " + str(self.id) + ", " + imageID + ")"
    500515            self.scratchDb.execute(sql)
    501516
     
    509524    def populateSkinnyObject(self):
    510525
    511         self.logger.info("Procesing SkinnyObject table")
     526        self.logger.infoPair("Procesing table", "SkinnyObject")
    512527
    513528        sql = "INSERT INTO SkinnyObject (\
     
    529544    def populateObjectCalColor(self):
    530545
    531         self.logger.info("Procesing ObjectCalColor table")
     546        self.logger.infoPair("Procesing table", "ObjectCalColor table")
    532547
    533548        sql = "INSERT INTO ObjectCalColor (\
     
    550565    def alterPspsTables(self):
    551566
    552         self.logger.info("Altering PSPS tables")
     567        self.logger.debug("Altering PSPS tables")
    553568        #self.scratchDb.makeColumnUnique("StackDetection", "objID")
    554569        self.scratchDb.createIndex("StackDetection", "ippDetectID")
     
    561576    def indexIppTables(self):
    562577
    563         self.logger.info("Creating indexes on IPP tables")
     578        self.logger.infoPair("Creating indexes on", "IPP tables")
    564579        self.scratchDb.createIndex("SkyChip_psf", "IPP_IDET")
    565580        self.scratchDb.createIndex("SkyChip_xfit", "IPP_IDET")
     
    575590
    576591        self.logger.debug("Updating table '" + table + "' with DVO IDs...")
    577         sql = "UPDATE IGNORE " + table + " AS a, dvoDetectionFull AS b SET \
     592        sql = "UPDATE IGNORE " + table + " AS a, " + self.scratchDb.dvoDetection + " AS b SET \
    578593               a.ippObjID = b.ippObjID, \
    579594               a.stackDetectID = b.detectID, \
     
    613628        return True
    614629
    615     '''
    616     Checks whether this batch has already been processed and published
    617     '''
    618     def alreadyProcessed(self):
    619 
    620         # sadly, we have to read the FITS primary header first
    621         if not self.readPrimaryHeader(): return False
    622 
    623         # get filterID using init table
    624         self.filter = self.header['FPA.FILTER']
    625         self.filter = self.filter[0:1]
    626 
    627         self.stackType = stackType
    628         meta = self.gpc1Db.getStackStageMeta(self.id, self.header['FPA.FILTER'])
    629         if len(meta) < 1: return False
    630         self.stackID = meta[0];
    631         self.skycell = meta[1];
    632         self.skycell = self.skycell[8:]
    633         self.analysisVer = meta[2];
    634 
    635         #return self.ippToPspsDb.alreadyProcessed("stack", "stack_id", self.stackID)
    636         return False # TODOI
    637 
    638 
    639 logging.config.fileConfig("logging.conf")
    640 logger = logging.getLogger("stackbatch")
    641 logger.setLevel(logging.INFO)
    642 logger.info("Starting")
    643 
    644 gpc1Db = Gpc1Db(logger)
    645 ippToPspsDb = IppToPspsDb(logger)
    646 
    647 configDoc = ElementTree(file="config.xml")
    648 
    649 #stackType = "NIGHTLY_STACK"
    650 #skyIDs = gpc1Db.getIDsInThisDVODbForThisStageFudge()
    651 #skyIDs = gpc1Db.getIDsInThisDVODbForThisStage("MD04.Staticsky", "staticsky")
    652 
    653 stackType = "DEEP_STACK"
    654 skyIDs = gpc1Db.getIDsInThisDVODbForThisStage("MD04.GENE.PSPSDEEP", "staticsky")
    655 
    656 #skyIDs = [942]
    657 #skyIDs = [299]
    658 #skyIDs = [302]
    659 #skyIDs = [8508]
    660 #i = 0
    661 for skyID in skyIDs:
    662    
    663     #if skyID < 1340: continue # nightly
    664     #if skyID < 238: continue # deep
    665 
    666     cmfFiles = gpc1Db.getStackStageCmfs(skyID)
    667 
    668     for file in cmfFiles:
    669 
    670         stackBatch = StackBatch(logger,
    671                                 configDoc,
    672                                 gpc1Db,
    673                                 ippToPspsDb,
    674                                 skyID,
    675                                 file,
    676                                 stackType)
    677 
    678         stackBatch.run()
    679 
Note: See TracChangeset for help on using the changeset viewer.