Index: trunk/ippToPsps/jython/stackbatch.py
===================================================================
--- trunk/ippToPsps/jython/stackbatch.py	(revision 31402)
+++ trunk/ippToPsps/jython/stackbatch.py	(revision 31502)
@@ -8,6 +8,9 @@
 from java.sql import *
 
+from batch import Batch
 from gpc1db import Gpc1Db
-from batch import Batch
+from ipptopspsdb import IppToPspsDb
+from scratchdb import ScratchDb
+
 import logging.config
 
@@ -20,7 +23,20 @@
     Constructor
     '''
-    def __init__(self, logger, skyID, inputFile, stackType, useFullTables=False):
+    def __init__(self, 
+                 logger, 
+                 gpc1Db,
+                 ippToPspsDb,
+                 scratchDb,
+                 skyID, 
+                 inputFile, 
+                 stackType, 
+                 useFullTables=False):
+
        super(StackBatch, self).__init__(
                logger,
+               gpc1Db,
+               ippToPspsDb,
+               scratchDb,
+               skyID,
                "stack", 
                inputFile, 
@@ -28,24 +44,18 @@
                useFullTables) # TODO
 
-       self.logger.info("StackBatch constructor. Creating batch from: '" + inputFile + "'")
-
-       self.skyID = skyID
-
-       # get filterID using init table
-       self.filter = self.header['FPA.FILTER']
-       self.filter = self.filter[0:1]
-
-       self.stackType = stackType
-       meta = self.gpc1Db.getStackStageMeta(self.skyID, self.header['FPA.FILTER'])
-       if len(meta) < 1: return
-       self.stackID = meta[0];
-       self.skycell = meta[1];
-
-       # determine skycell from header value
-       #self.skycell = "skycell.34" #= self.header['SKYCELL']
-       self.skycell = self.skycell[8:]
-
-       self.logger.info("Processing stack with ID: %d, type: %s and skycell: %s filter: %s" % (self.stackID, self.stackType, self.skycell, self.filter))
-
+       if not self.everythingOK: return
+
+       self.expTime = gpc1Db.getStackExpTime(self.stackID)
+
+       self.logger.info("got exp time of %d" % self.expTime)
+
+       # meta data to the log
+       self.logger.info("New Stack Batch:")
+       self.logger.info("Sky ID:     %d" % self.id)
+       self.logger.info("File:       %s" % inputFile)
+       self.logger.info("Stack ID:   %d" % self.stackID)
+       self.logger.info("Stack type: %s" % self.stackType)
+       self.logger.info("Skycell:    %s" % self.skycell)
+       self.logger.info("Filter:     %s" % self.filter)
 
        # delete PSPS tables
@@ -58,16 +68,6 @@
        self.scratchDb.dropTable("ObjectCalColor")
 
-       # delete IPP tables
-       #self.scratchDb.dropTable("SkyChip_psf")
-       #self.scratchDb.dropTable("SkyChip_xsrc")
-       #self.scratchDb.dropTable("SkyChip_xfit")
-       #self.scratchDb.dropTable("SkyChip_xrad")
-
-       self.logger.info("Stack type: " + self.safeDictionaryAccess(self.header, self.stackType))
-       # obs time makes no sense except for nightly stacks
-       #if self.header['STK_TYPE'] != "NIGHTLY_STACK": self.header['MJD-OBS'] = "-999"
-
        # create an output filename, which is {filterID}{skycellID}.FITS
-       self.outputFitsFile = "%s%07d.FITS" % (self.filter, int(self.skycell))
+       self.outputFitsFile = "%08d.FITS" % self.stackID
        self.outputFitsPath = "%s/%s" % (self.localOutPath, self.outputFitsFile)
 
@@ -77,5 +77,5 @@
 
        # insert what we know about this stack batch into the stack table
-       self.ippToPspsDb.insertStackMeta(self.batchID, self.skyID, self.stackID, self.filter, self.stackType)
+       self.ippToPspsDb.insertStackMeta(self.batchID, self.id, self.stackID, self.filter, self.stackType)
 
        # insert sourceID/imageID combo so DVO can look it up
@@ -89,5 +89,5 @@
 
         sql = "UPDATE " + table + "  SET stackMetaID=" + str(self.stackID)
-        self.scratchDb.stmt.execute(sql)
+        self.scratchDb.execute(sql)
 
     '''
@@ -97,5 +97,5 @@
 
         sql = "UPDATE "+table+" AS a, StackType AS b SET a.stackTypeID=b.stackTypeID WHERE b.name = '" + self.stackType + "'"
-        self.scratchDb.stmt.execute(sql)
+        self.scratchDb.execute(sql)
 
 
@@ -148,5 +148,5 @@
         WHERE a.ippDetectID=b.IPP_IDET AND b.PSF_FWHM "+psfCondition
 
-        self.scratchDb.stmt.execute(sql)
+        self.scratchDb.execute(sql)
 
     '''
@@ -196,5 +196,5 @@
         WHERE a.ippDetectID=b.IPP_IDET AND b.MODEL_TYPE = '"+ippModelType+"'"
 
-        self.scratchDb.stmt.execute(sql)
+        self.scratchDb.execute(sql)
 
         # sersic fit has an extra parameter
@@ -213,5 +213,5 @@
             WHERE a.ippDetectID=b.IPP_IDET AND b.MODEL_TYPE = '"+ippModelType+"'"
 
-            self.scratchDb.stmt.execute(sql)
+            self.scratchDb.execute(sql)
 
 
@@ -220,4 +220,5 @@
     '''
     def populateStackMeta(self):
+
         self.logger.info("Procesing StackMeta table")
 
@@ -246,5 +247,5 @@
         ," + str(self.scratchDb.getPhotoCalID(self.header['SOURCEID'], self.header['IMAGEID'])) + " \
         ," + self.header['FPA.ZP'] + " \
-        ," + self.header['EXPTIME'] + " \
+        ," + str(self.expTime) + " \
         ,'" + self.safeDictionaryAccess(self.header, 'PSFMODEL') + "' \
         ,'" + self.header['CTYPE1'] + "' \
@@ -261,5 +262,5 @@
         ," + self.header['PC002002'] + " \
         )"
-        self.scratchDb.stmt.execute(sql)
+        self.scratchDb.execute(sql)
 
         self.scratchDb.updateAllRows("StackMeta", "surveyID", str(self.surveyID))
@@ -272,4 +273,5 @@
     '''
     def populateStackDetection(self):
+
         self.logger.info("Procesing StackDetection table")
 
@@ -321,7 +323,7 @@
                ,X_PSF_SIG \
                ,Y_PSF_SIG \
-               ,POW(10.0, (-0.4*PSF_INST_MAG)) / "+self.header['EXPTIME']+" \
-               ,ABS((PSF_INST_MAG_SIG*(POW(10.0, (-0.4*PSF_INST_MAG)) / "+self.header['EXPTIME']+")) / 1.085736) \
-               ,POW(10.0, (-0.4*PEAK_FLUX_AS_MAG)) / "+self.header['EXPTIME']+" \
+               ,POW(10.0, (-0.4*PSF_INST_MAG)) / "+str(self.expTime)+" \
+               ,ABS((PSF_INST_MAG_SIG*(POW(10.0, (-0.4*PSF_INST_MAG)) / "+str(self.expTime)+")) / 1.085736) \
+               ,POW(10.0, (-0.4*PEAK_FLUX_AS_MAG)) / "+str(self.expTime)+" \
                ,SKY \
                ,SKY_SIGMA \
@@ -352,5 +354,5 @@
                FROM SkyChip_psf"
 
-        self.scratchDb.stmt.execute(sql)
+        self.scratchDb.execute(sql)
 
         self.scratchDb.updateAllRows("StackDetection", "surveyID", str(self.surveyID))
@@ -362,6 +364,17 @@
         self.updateStackTypeID("StackDetection")
         self.updateDvoIDs("StackDetection")
-
-        # now delete bad flux
+        sql = "ALTER TABLE StackDetection ADD PRIMARY KEY (objID, stackDetectID)"
+        self.scratchDb.execute(sql)
+
+        if self.stackType == "DEEP_STACK": 
+
+            #if deep stack and instFlux = null and err not null
+            sql = "UPDATE StackDetection AS a, SkyChip_psf AS b \
+                   SET instFlux = 2*b.PSF_INST_FLUX_SIG \
+                   WHERE instFlux IS NULL \
+                   AND b.PSF_INST_FLUX_SIG IS NOT NULL"
+            self.scratchDb.execute(sql)
+            #    instFlux = 2*PSF_INST_FLUX_SIG
+            
         self.scratchDb.reportAndDeleteRowsWithNULLS("StackDetection", "instFlux")
         self.scratchDb.reportAndDeleteRowsWithNULLS("StackDetection", "objID")
@@ -372,4 +385,5 @@
     '''
     def populateStackApFlx(self):
+
         self.logger.info("Procesing StackApFlx table")
  
@@ -381,16 +395,16 @@
 
         try:
-            self.scratchDb.stmt.execute(sql)
+            self.scratchDb.execute(sql)
         except: return
 
         # TODO temporarily loading 1st convolved fluxes into unconvolved fields
-        self.logger.info("    Adding un-convolved fluxes")
+        self.logger.info("Adding un-convolved fluxes")
         self.updateApFlxs("", "< 7.0")
-        self.logger.info("    Adding 1st convolved fluxes")
+        self.logger.info("Adding 1st convolved fluxes")
         self.updateApFlxs("c1", "< 7.0")
-        self.logger.info("    Adding 2nd convolved fluxes")
+        self.logger.info("Adding 2nd convolved fluxes")
         self.updateApFlxs("c2", "> 7.0")
 
-        self.logger.info("    Adding petrosians for extended sources")
+        self.logger.info("Adding petrosians for extended sources")
         sql = "UPDATE StackApFlx AS a, SkyChip_xsrc AS b SET \
         petRadius=b.PETRO_RADIUS \
@@ -403,5 +417,5 @@
         ,petR90Err=b.PETRO_RADIUS_90_ERR \
         WHERE a.ippDetectID=b.IPP_IDET"
-        self.scratchDb.stmt.execute(sql)
+        self.scratchDb.execute(sql)
 
         self.scratchDb.updateAllRows("StackApFlx", "surveyID", str(self.surveyID))
@@ -413,4 +427,7 @@
         self.updateStackTypeID("StackApFlx")
         self.updateDvoIDs("StackApFlx")
+        self.scratchDb.reportAndDeleteRowsWithNULLS("StackApFlx", "objID")
+        self.deleteDetectionsNotInStackDetection("StackApFlx")
+
 
     '''
@@ -418,4 +435,5 @@
     '''
     def populateStackModelFit(self):
+
         self.logger.info("Procesing StackModelFit table")
 
@@ -423,15 +441,14 @@
         sql = "INSERT INTO StackModelFit (ippDetectID) SELECT DISTINCT IPP_IDET from SkyChip_xfit"
         try:
-            self.scratchDb.stmt.execute(sql)
+            self.scratchDb.execute(sql)
         except:
             return
 
-
         # populate model parameters
-        self.logger.info("    Adding deVaucouleurs fit")
+        self.logger.info("Adding deVaucouleurs fit")
         self.updateModelFit("deV", "PS_MODEL_DEV")
-        self.logger.info("    Adding exponential fit")
+        self.logger.info("Adding exponential fit")
         self.updateModelFit("exp", "PS_MODEL_EXP")
-        self.logger.info("    Adding sersic fit")
+        self.logger.info("Adding sersic fit")
         self.updateModelFit("ser", "PS_MODEL_SERSIC")
 
@@ -444,4 +461,23 @@
         self.updateStackTypeID("StackModelFit")
         self.updateDvoIDs("StackModelFit")
+        self.scratchDb.reportAndDeleteRowsWithNULLS("StackModelFit", "objID")
+        self.deleteDetectionsNotInStackDetection("StackModelFit")
+
+    '''
+    Reports and deletes detections in this table that are not in StackDetection
+    '''
+    def deleteDetectionsNotInStackDetection(self, table):
+
+        sql = "SELECT COUNT(*) FROM " + table + " WHERE ippDetectID NOT IN (SELECT ippDetectID FROM StackDetection)"
+        rs = self.scratchDb.executeQuery(sql)
+        rs.first()
+        nMissing = rs.getInt(1)
+        self.logger.info("%5d detections in %s table that are not in StackDetection. Deleting" % (nMissing, table))
+  
+        if nMissing < 1: return
+        
+        sql = "DELETE FROM " + table + " WHERE ippDetectID NOT IN (SELECT ippDetectID FROM StackDetection)"
+        self.scratchDb.execute(sql)
+       
 
     '''
@@ -449,4 +485,5 @@
     '''
     def populateStackToImage(self):
+
         self.logger.info("Procesing StackToImage table")
 
@@ -457,9 +494,9 @@
                    VALUES (\
                    " + str(self.stackID) + ", " + imageID + ")"
-            self.scratchDb.stmt.execute(sql)
+            self.scratchDb.execute(sql)
 
         # now update StackMeta with correct number of inputs
         sql = "UPDATE StackMeta SET nP2Images = (SELECT COUNT(*) FROM StackToImage)"
-        self.scratchDb.stmt.execute(sql)
+        self.scratchDb.execute(sql)
 
     '''
@@ -467,4 +504,5 @@
     '''
     def populateSkinnyObject(self):
+
         self.logger.info("Procesing SkinnyObject table")
 
@@ -474,8 +512,8 @@
                ) \
                SELECT \
-               objID \
+               DISTINCT objID \
                ,ippObjID \
                FROM StackDetection"
-        self.scratchDb.stmt.execute(sql)
+        self.scratchDb.execute(sql)
 
         self.scratchDb.updateAllRows("SkinnyObject", "surveyID", str(self.surveyID))
@@ -486,4 +524,5 @@
     '''
     def populateObjectCalColor(self):
+
         self.logger.info("Procesing ObjectCalColor table")
 
@@ -493,8 +532,8 @@
                ) \
                SELECT \
-               objID \
+               DISTINCT objID \
                ,ippObjID \
                FROM StackDetection"
-        self.scratchDb.stmt.execute(sql)
+        self.scratchDb.execute(sql)
 
         self.scratchDb.updateFilterID("ObjectCalColor", self.filter)
@@ -508,5 +547,5 @@
 
         self.logger.info("Altering PSPS tables")
-        self.scratchDb.makeColumnUnique("StackDetection", "objID")
+        #self.scratchDb.makeColumnUnique("StackDetection", "objID")
         self.scratchDb.createIndex("StackDetection", "ippDetectID")
         self.scratchDb.createIndex("StackApFlx", "ippDetectID")
@@ -531,5 +570,5 @@
         imageID = self.scratchDb.getImageIDFromExternID(self.header['SOURCEID'], self.header['IMAGEID'])
 
-        self.logger.info("Updating table '" + table + "' with DVO IDs...")
+        self.logger.debug("Updating table '" + table + "' with DVO IDs...")
         sql = "UPDATE IGNORE " + table + " AS a, dvoDetectionFull AS b SET \
                a.ippObjID = b.ippObjID, \
@@ -539,6 +578,5 @@
                AND b.sourceID = " + self.header['SOURCEID'] + "\
                AND b.imageID = " + str(imageID)
-        self.scratchDb.stmt.execute(sql)
-
+        self.scratchDb.execute(sql)
 
     '''
@@ -553,6 +591,9 @@
         self.populateStackMeta()
         self.populateStackDetection()
-        self.populateStackModelFit()
-        self.populateStackApFlx()
+
+        if self.stackType != "NIGHTLY_STACK": 
+            self.populateStackModelFit()
+            self.populateStackApFlx()
+   
         self.populateStackToImage()
         self.populateSkinnyObject()
@@ -560,5 +601,10 @@
 
         self.setMinMaxObjID(["StackDetection"])
-        
+       
+        if self.totalDetections < 1: 
+
+            self.logger.error("No detections to publish")
+            return False
+
         return True
 
@@ -568,22 +614,49 @@
     def alreadyProcessed(self):
 
-        return self.ippToPspsDb.alreadyProcessed("stack", "stack_id", self.stackID)
+        # sadly, we have to read the FITS primary header first
+        if not self.readPrimaryHeader(): return False
+
+        # get filterID using init table
+        self.filter = self.header['FPA.FILTER']
+        self.filter = self.filter[0:1]
+
+        self.stackType = stackType
+        meta = self.gpc1Db.getStackStageMeta(self.id, self.header['FPA.FILTER'])
+        if len(meta) < 1: return False
+        self.stackID = meta[0];
+        self.skycell = meta[1];
+        self.skycell = self.skycell[8:]
+
+        #return self.ippToPspsDb.alreadyProcessed("stack", "stack_id", self.stackID)
+        return False # TODOI
+
+
+useFullTables=True
 
 logging.config.fileConfig("logging.conf")
 logger = logging.getLogger("stackbatch")
+logger.setLevel(logging.INFO)
 logger.info("Starting")
+
 gpc1Db = Gpc1Db(logger)
-stackType = "NIGHTLY_STACK"
-skyIDs = gpc1Db.getIDsInThisDVODbForThisStage("MD04.Staticsky", "staticsky")
-#skyIDs = gpc1Db.getIDsInThisDVODbForThisStage("MD04.GENE.PSPSDEEP", "staticsky")
-#stackType = "DEEP_STACK"
-#skyIDs = [689]
+ippToPspsDb = IppToPspsDb(logger)
+scratchDb = ScratchDb(logger, useFullTables)
+
+#stackType = "NIGHTLY_STACK"
+#skyIDs = gpc1Db.getIDsInThisDVODbForThisStageFudge()
+#skyIDs = gpc1Db.getIDsInThisDVODbForThisStage("MD04.Staticsky", "staticsky")
+
+stackType = "DEEP_STACK"
+skyIDs = gpc1Db.getIDsInThisDVODbForThisStage("MD04.GENE.PSPSDEEP", "staticsky")
+
+#skyIDs = [942]
 #skyIDs = [299]
 #skyIDs = [302]
 #skyIDs = [8508]
-i = 0
+#i = 0
 for skyID in skyIDs:
-
-    logger.info("-------------------------------------------------- sky ID: %d" % skyID)
+    
+    #if skyID < 1340: continue # nightly
+    #if skyID < 238: continue # deep
 
     cmfFiles = gpc1Db.getStackStageCmfs(skyID)
@@ -591,24 +664,13 @@
     for file in cmfFiles:
 
-        if not os.path.isfile(file):
-            logger.error("Cannot read file at '" + file)
-            continue
-
-        stackBatch = StackBatch(logger, skyID, file, stackType, True)
-
-        if not stackBatch.alreadyProcessed():
-
-            stackBatch.createEmptyPspsTables()
-            stackBatch.importIppTables("")
-            if stackBatch.populatePspsTables():
- 
-                #stackBatch.reportNullsInAllPspsTables(False)
-                stackBatch.exportPspsTablesToFits()
-                stackBatch.writeBatchManifest()
-                #stackBatch.createTarball()
-                #stackBatch.publishToDatastore()
-
-                i = i + 1
-                #if i > 0: sys.exit()
-
-logger.info("Finished")
+        stackBatch = StackBatch(logger,
+                                gpc1Db,
+                                ippToPspsDb,
+                                scratchDb,
+                                skyID, 
+                                file, 
+                                stackType, 
+                                useFullTables)
+
+        stackBatch.run()
+
